repo_name
stringclasses
1 value
pr_number
int64
4.12k
11.2k
pr_title
stringlengths
9
107
pr_description
stringlengths
107
5.48k
author
stringlengths
4
18
date_created
timestamp[ns, tz=UTC]
date_merged
timestamp[ns, tz=UTC]
previous_commit
stringlengths
40
40
pr_commit
stringlengths
40
40
query
stringlengths
118
5.52k
before_content
stringlengths
0
7.93M
after_content
stringlengths
0
7.93M
label
int64
-1
1
TheAlgorithms/Python
7,417
Remove references to depreciated QasmSimulator
### Describe your change: Replaced instances of `qiskit.Aer.get_backend("qasm_simulator")` in `quantum/` with `q.Aer.get_backend("aer_simulator")`, as the former is depreciated and raises warnings (Qiskit's [documentation](https://qiskit.org/documentation/apidoc/aer_provider.html) says that `QasmSimulator` is legacy). This PR edits multiple code files because they all raise the same warning and are mentioned in the same GitHub issue. Fixes #7308 * [ ] Add an algorithm? * [x] Fix a bug or typo in an existing algorithm? * [ ] Documentation change? ### Checklist: * [x] I have read [CONTRIBUTING.md](https://github.com/TheAlgorithms/Python/blob/master/CONTRIBUTING.md). * [x] This pull request is all my own work -- I have not plagiarized. * [x] I know that pull requests will not be merged if they fail the automated tests. * [ ] This PR only changes one algorithm file. To ease review, please open separate PRs for separate algorithms. * [x] All new Python files are placed inside an existing directory. * [x] All filenames are in all lowercase characters with no spaces or dashes. * [x] All functions and variable names follow Python naming conventions. * [x] All function parameters and return values are annotated with Python [type hints](https://docs.python.org/3/library/typing.html). * [x] All functions have [doctests](https://docs.python.org/3/library/doctest.html) that pass the automated testing. * [x] All new algorithms have a URL in its comments that points to Wikipedia or other similar explanation. * [x] If this pull request resolves one or more open issues then the commit message contains `Fixes: #{$ISSUE_NO}`.
tianyizheng02
2022-10-19T03:31:43Z
2022-10-19T20:12:44Z
50da472ddcdc2d79d1ad325ec05cda3558802fda
2859d4bf3aa96737a4715c65d4a9051d9c62d24d
Remove references to depreciated QasmSimulator. ### Describe your change: Replaced instances of `qiskit.Aer.get_backend("qasm_simulator")` in `quantum/` with `q.Aer.get_backend("aer_simulator")`, as the former is depreciated and raises warnings (Qiskit's [documentation](https://qiskit.org/documentation/apidoc/aer_provider.html) says that `QasmSimulator` is legacy). This PR edits multiple code files because they all raise the same warning and are mentioned in the same GitHub issue. Fixes #7308 * [ ] Add an algorithm? * [x] Fix a bug or typo in an existing algorithm? * [ ] Documentation change? ### Checklist: * [x] I have read [CONTRIBUTING.md](https://github.com/TheAlgorithms/Python/blob/master/CONTRIBUTING.md). * [x] This pull request is all my own work -- I have not plagiarized. * [x] I know that pull requests will not be merged if they fail the automated tests. * [ ] This PR only changes one algorithm file. To ease review, please open separate PRs for separate algorithms. * [x] All new Python files are placed inside an existing directory. * [x] All filenames are in all lowercase characters with no spaces or dashes. * [x] All functions and variable names follow Python naming conventions. * [x] All function parameters and return values are annotated with Python [type hints](https://docs.python.org/3/library/typing.html). * [x] All functions have [doctests](https://docs.python.org/3/library/doctest.html) that pass the automated testing. * [x] All new algorithms have a URL in its comments that points to Wikipedia or other similar explanation. * [x] If this pull request resolves one or more open issues then the commit message contains `Fixes: #{$ISSUE_NO}`.
""" Luhn Algorithm """ from __future__ import annotations def is_luhn(string: str) -> bool: """ Perform Luhn validation on an input string Algorithm: * Double every other digit starting from 2nd last digit. * Subtract 9 if number is greater than 9. * Sum the numbers * >>> test_cases = (79927398710, 79927398711, 79927398712, 79927398713, ... 79927398714, 79927398715, 79927398716, 79927398717, 79927398718, ... 79927398719) >>> [is_luhn(str(test_case)) for test_case in test_cases] [False, False, False, True, False, False, False, False, False, False] """ check_digit: int _vector: list[str] = list(string) __vector, check_digit = _vector[:-1], int(_vector[-1]) vector: list[int] = [int(digit) for digit in __vector] vector.reverse() for i, digit in enumerate(vector): if i & 1 == 0: doubled: int = digit * 2 if doubled > 9: doubled -= 9 check_digit += doubled else: check_digit += digit return check_digit % 10 == 0 if __name__ == "__main__": import doctest doctest.testmod() assert is_luhn("79927398713") assert not is_luhn("79927398714")
""" Luhn Algorithm """ from __future__ import annotations def is_luhn(string: str) -> bool: """ Perform Luhn validation on an input string Algorithm: * Double every other digit starting from 2nd last digit. * Subtract 9 if number is greater than 9. * Sum the numbers * >>> test_cases = (79927398710, 79927398711, 79927398712, 79927398713, ... 79927398714, 79927398715, 79927398716, 79927398717, 79927398718, ... 79927398719) >>> [is_luhn(str(test_case)) for test_case in test_cases] [False, False, False, True, False, False, False, False, False, False] """ check_digit: int _vector: list[str] = list(string) __vector, check_digit = _vector[:-1], int(_vector[-1]) vector: list[int] = [int(digit) for digit in __vector] vector.reverse() for i, digit in enumerate(vector): if i & 1 == 0: doubled: int = digit * 2 if doubled > 9: doubled -= 9 check_digit += doubled else: check_digit += digit return check_digit % 10 == 0 if __name__ == "__main__": import doctest doctest.testmod() assert is_luhn("79927398713") assert not is_luhn("79927398714")
-1
TheAlgorithms/Python
7,417
Remove references to depreciated QasmSimulator
### Describe your change: Replaced instances of `qiskit.Aer.get_backend("qasm_simulator")` in `quantum/` with `q.Aer.get_backend("aer_simulator")`, as the former is depreciated and raises warnings (Qiskit's [documentation](https://qiskit.org/documentation/apidoc/aer_provider.html) says that `QasmSimulator` is legacy). This PR edits multiple code files because they all raise the same warning and are mentioned in the same GitHub issue. Fixes #7308 * [ ] Add an algorithm? * [x] Fix a bug or typo in an existing algorithm? * [ ] Documentation change? ### Checklist: * [x] I have read [CONTRIBUTING.md](https://github.com/TheAlgorithms/Python/blob/master/CONTRIBUTING.md). * [x] This pull request is all my own work -- I have not plagiarized. * [x] I know that pull requests will not be merged if they fail the automated tests. * [ ] This PR only changes one algorithm file. To ease review, please open separate PRs for separate algorithms. * [x] All new Python files are placed inside an existing directory. * [x] All filenames are in all lowercase characters with no spaces or dashes. * [x] All functions and variable names follow Python naming conventions. * [x] All function parameters and return values are annotated with Python [type hints](https://docs.python.org/3/library/typing.html). * [x] All functions have [doctests](https://docs.python.org/3/library/doctest.html) that pass the automated testing. * [x] All new algorithms have a URL in its comments that points to Wikipedia or other similar explanation. * [x] If this pull request resolves one or more open issues then the commit message contains `Fixes: #{$ISSUE_NO}`.
tianyizheng02
2022-10-19T03:31:43Z
2022-10-19T20:12:44Z
50da472ddcdc2d79d1ad325ec05cda3558802fda
2859d4bf3aa96737a4715c65d4a9051d9c62d24d
Remove references to depreciated QasmSimulator. ### Describe your change: Replaced instances of `qiskit.Aer.get_backend("qasm_simulator")` in `quantum/` with `q.Aer.get_backend("aer_simulator")`, as the former is depreciated and raises warnings (Qiskit's [documentation](https://qiskit.org/documentation/apidoc/aer_provider.html) says that `QasmSimulator` is legacy). This PR edits multiple code files because they all raise the same warning and are mentioned in the same GitHub issue. Fixes #7308 * [ ] Add an algorithm? * [x] Fix a bug or typo in an existing algorithm? * [ ] Documentation change? ### Checklist: * [x] I have read [CONTRIBUTING.md](https://github.com/TheAlgorithms/Python/blob/master/CONTRIBUTING.md). * [x] This pull request is all my own work -- I have not plagiarized. * [x] I know that pull requests will not be merged if they fail the automated tests. * [ ] This PR only changes one algorithm file. To ease review, please open separate PRs for separate algorithms. * [x] All new Python files are placed inside an existing directory. * [x] All filenames are in all lowercase characters with no spaces or dashes. * [x] All functions and variable names follow Python naming conventions. * [x] All function parameters and return values are annotated with Python [type hints](https://docs.python.org/3/library/typing.html). * [x] All functions have [doctests](https://docs.python.org/3/library/doctest.html) that pass the automated testing. * [x] All new algorithms have a URL in its comments that points to Wikipedia or other similar explanation. * [x] If this pull request resolves one or more open issues then the commit message contains `Fixes: #{$ISSUE_NO}`.
# A naive recursive implementation of 0-1 Knapsack Problem This overview is taken from: https://en.wikipedia.org/wiki/Knapsack_problem --- ## Overview The knapsack problem is a problem in combinatorial optimization: Given a set of items, each with a weight and a value, determine the number of each item to include in a collection so that the total weight is less than or equal to a given limit and the total value is as large as possible. It derives its name from the problem faced by someone who is constrained by a fixed-size knapsack and must fill it with the most valuable items. The problem often arises in resource allocation where the decision makers have to choose from a set of non-divisible projects or tasks under a fixed budget or time constraint, respectively. The knapsack problem has been studied for more than a century, with early works dating as far back as 1897 The name "knapsack problem" dates back to the early works of mathematician Tobias Dantzig (1884–1956), and refers to the commonplace problem of packing the most valuable or useful items without overloading the luggage. --- ## Documentation This module uses docstrings to enable the use of Python's in-built `help(...)` function. For instance, try `help(Vector)`, `help(unit_basis_vector)`, and `help(CLASSNAME.METHODNAME)`. --- ## Usage Import the module `knapsack.py` from the **.** directory into your project. --- ## Tests `.` contains Python unit tests which can be run with `python3 -m unittest -v`.
# A naive recursive implementation of 0-1 Knapsack Problem This overview is taken from: https://en.wikipedia.org/wiki/Knapsack_problem --- ## Overview The knapsack problem is a problem in combinatorial optimization: Given a set of items, each with a weight and a value, determine the number of each item to include in a collection so that the total weight is less than or equal to a given limit and the total value is as large as possible. It derives its name from the problem faced by someone who is constrained by a fixed-size knapsack and must fill it with the most valuable items. The problem often arises in resource allocation where the decision makers have to choose from a set of non-divisible projects or tasks under a fixed budget or time constraint, respectively. The knapsack problem has been studied for more than a century, with early works dating as far back as 1897 The name "knapsack problem" dates back to the early works of mathematician Tobias Dantzig (1884–1956), and refers to the commonplace problem of packing the most valuable or useful items without overloading the luggage. --- ## Documentation This module uses docstrings to enable the use of Python's in-built `help(...)` function. For instance, try `help(Vector)`, `help(unit_basis_vector)`, and `help(CLASSNAME.METHODNAME)`. --- ## Usage Import the module `knapsack.py` from the **.** directory into your project. --- ## Tests `.` contains Python unit tests which can be run with `python3 -m unittest -v`.
-1
TheAlgorithms/Python
7,417
Remove references to depreciated QasmSimulator
### Describe your change: Replaced instances of `qiskit.Aer.get_backend("qasm_simulator")` in `quantum/` with `q.Aer.get_backend("aer_simulator")`, as the former is depreciated and raises warnings (Qiskit's [documentation](https://qiskit.org/documentation/apidoc/aer_provider.html) says that `QasmSimulator` is legacy). This PR edits multiple code files because they all raise the same warning and are mentioned in the same GitHub issue. Fixes #7308 * [ ] Add an algorithm? * [x] Fix a bug or typo in an existing algorithm? * [ ] Documentation change? ### Checklist: * [x] I have read [CONTRIBUTING.md](https://github.com/TheAlgorithms/Python/blob/master/CONTRIBUTING.md). * [x] This pull request is all my own work -- I have not plagiarized. * [x] I know that pull requests will not be merged if they fail the automated tests. * [ ] This PR only changes one algorithm file. To ease review, please open separate PRs for separate algorithms. * [x] All new Python files are placed inside an existing directory. * [x] All filenames are in all lowercase characters with no spaces or dashes. * [x] All functions and variable names follow Python naming conventions. * [x] All function parameters and return values are annotated with Python [type hints](https://docs.python.org/3/library/typing.html). * [x] All functions have [doctests](https://docs.python.org/3/library/doctest.html) that pass the automated testing. * [x] All new algorithms have a URL in its comments that points to Wikipedia or other similar explanation. * [x] If this pull request resolves one or more open issues then the commit message contains `Fixes: #{$ISSUE_NO}`.
tianyizheng02
2022-10-19T03:31:43Z
2022-10-19T20:12:44Z
50da472ddcdc2d79d1ad325ec05cda3558802fda
2859d4bf3aa96737a4715c65d4a9051d9c62d24d
Remove references to depreciated QasmSimulator. ### Describe your change: Replaced instances of `qiskit.Aer.get_backend("qasm_simulator")` in `quantum/` with `q.Aer.get_backend("aer_simulator")`, as the former is depreciated and raises warnings (Qiskit's [documentation](https://qiskit.org/documentation/apidoc/aer_provider.html) says that `QasmSimulator` is legacy). This PR edits multiple code files because they all raise the same warning and are mentioned in the same GitHub issue. Fixes #7308 * [ ] Add an algorithm? * [x] Fix a bug or typo in an existing algorithm? * [ ] Documentation change? ### Checklist: * [x] I have read [CONTRIBUTING.md](https://github.com/TheAlgorithms/Python/blob/master/CONTRIBUTING.md). * [x] This pull request is all my own work -- I have not plagiarized. * [x] I know that pull requests will not be merged if they fail the automated tests. * [ ] This PR only changes one algorithm file. To ease review, please open separate PRs for separate algorithms. * [x] All new Python files are placed inside an existing directory. * [x] All filenames are in all lowercase characters with no spaces or dashes. * [x] All functions and variable names follow Python naming conventions. * [x] All function parameters and return values are annotated with Python [type hints](https://docs.python.org/3/library/typing.html). * [x] All functions have [doctests](https://docs.python.org/3/library/doctest.html) that pass the automated testing. * [x] All new algorithms have a URL in its comments that points to Wikipedia or other similar explanation. * [x] If this pull request resolves one or more open issues then the commit message contains `Fixes: #{$ISSUE_NO}`.
""" Project Euler Problem 686: https://projecteuler.net/problem=686 2^7 = 128 is the first power of two whose leading digits are "12". The next power of two whose leading digits are "12" is 2^80. Define p(L,n) to be the nth-smallest value of j such that the base 10 representation of 2^j begins with the digits of L. So p(12, 1) = 7 and p(12, 2) = 80. You are given that p(123, 45) = 12710. Find p(123, 678910). """ import math def log_difference(number: int) -> float: """ This function returns the decimal value of a number multiplied with log(2) Since the problem is on powers of two, finding the powers of two with large exponents is time consuming. Hence we use log to reduce compute time. We can find out that the first power of 2 with starting digits 123 is 90. Computing 2^90 is time consuming. Hence we find log(2^90) = 90*log(2) = 27.092699609758302 But we require only the decimal part to determine whether the power starts with 123. So we just return the decimal part of the log product. Therefore we return 0.092699609758302 >>> log_difference(90) 0.092699609758302 >>> log_difference(379) 0.090368356648852 """ log_number = math.log(2, 10) * number difference = round((log_number - int(log_number)), 15) return difference def solution(number: int = 678910) -> int: """ This function calculates the power of two which is nth (n = number) smallest value of power of 2 such that the starting digits of the 2^power is 123. For example the powers of 2 for which starting digits is 123 are: 90, 379, 575, 864, 1060, 1545, 1741, 2030, 2226, 2515 and so on. 90 is the first power of 2 whose starting digits are 123, 379 is second power of 2 whose starting digits are 123, and so on. So if number = 10, then solution returns 2515 as we observe from above series. We will define a lowerbound and upperbound. lowerbound = log(1.23), upperbound = log(1.24) because we need to find the powers that yield 123 as starting digits. log(1.23) = 0.08990511143939792, log(1,24) = 0.09342168516223506. We use 1.23 and not 12.3 or 123, because log(1.23) yields only decimal value which is less than 1. log(12.3) will be same decimal value but 1 added to it which is log(12.3) = 1.093421685162235. We observe that decimal value remains same no matter 1.23 or 12.3 Since we use the function log_difference(), which returns the value that is only decimal part, using 1.23 is logical. If we see, 90*log(2) = 27.092699609758302, decimal part = 0.092699609758302, which is inside the range of lowerbound and upperbound. If we compute the difference between all the powers which lead to 123 starting digits is as follows: 379 - 90 = 289 575 - 379 = 196 864 - 575 = 289 1060 - 864 = 196 We see a pattern here. The difference is either 196 or 289 = 196 + 93. Hence to optimize the algorithm we will increment by 196 or 93 depending upon the log_difference() value. Let's take for example 90. Since 90 is the first power leading to staring digits as 123, we will increment iterator by 196. Because the difference between any two powers leading to 123 as staring digits is greater than or equal to 196. After incrementing by 196 we get 286. log_difference(286) = 0.09457875989861 which is greater than upperbound. The next power is 379, and we need to add 93 to get there. The iterator will now become 379, which is the next power leading to 123 as starting digits. Let's take 1060. We increment by 196, we get 1256. log_difference(1256) = 0.09367455396034, Which is greater than upperbound hence we increment by 93. Now iterator is 1349. log_difference(1349) = 0.08946415071057 which is less than lowerbound. The next power is 1545 and we need to add 196 to get 1545. Conditions are as follows: 1) If we find a power whose log_difference() is in the range of lower and upperbound, we will increment by 196. which implies that the power is a number which will lead to 123 as starting digits. 2) If we find a power, whose log_difference() is greater than or equal upperbound, we will increment by 93. 3) if log_difference() < lowerbound, we increment by 196. Reference to the above logic: https://math.stackexchange.com/questions/4093970/powers-of-2-starting-with-123-does-a-pattern-exist >>> solution(1000) 284168 >>> solution(56000) 15924915 >>> solution(678910) 193060223 """ power_iterator = 90 position = 0 lower_limit = math.log(1.23, 10) upper_limit = math.log(1.24, 10) previous_power = 0 while position < number: difference = log_difference(power_iterator) if difference >= upper_limit: power_iterator += 93 elif difference < lower_limit: power_iterator += 196 else: previous_power = power_iterator power_iterator += 196 position += 1 return previous_power if __name__ == "__main__": import doctest doctest.testmod() print(f"{solution() = }")
""" Project Euler Problem 686: https://projecteuler.net/problem=686 2^7 = 128 is the first power of two whose leading digits are "12". The next power of two whose leading digits are "12" is 2^80. Define p(L,n) to be the nth-smallest value of j such that the base 10 representation of 2^j begins with the digits of L. So p(12, 1) = 7 and p(12, 2) = 80. You are given that p(123, 45) = 12710. Find p(123, 678910). """ import math def log_difference(number: int) -> float: """ This function returns the decimal value of a number multiplied with log(2) Since the problem is on powers of two, finding the powers of two with large exponents is time consuming. Hence we use log to reduce compute time. We can find out that the first power of 2 with starting digits 123 is 90. Computing 2^90 is time consuming. Hence we find log(2^90) = 90*log(2) = 27.092699609758302 But we require only the decimal part to determine whether the power starts with 123. So we just return the decimal part of the log product. Therefore we return 0.092699609758302 >>> log_difference(90) 0.092699609758302 >>> log_difference(379) 0.090368356648852 """ log_number = math.log(2, 10) * number difference = round((log_number - int(log_number)), 15) return difference def solution(number: int = 678910) -> int: """ This function calculates the power of two which is nth (n = number) smallest value of power of 2 such that the starting digits of the 2^power is 123. For example the powers of 2 for which starting digits is 123 are: 90, 379, 575, 864, 1060, 1545, 1741, 2030, 2226, 2515 and so on. 90 is the first power of 2 whose starting digits are 123, 379 is second power of 2 whose starting digits are 123, and so on. So if number = 10, then solution returns 2515 as we observe from above series. We will define a lowerbound and upperbound. lowerbound = log(1.23), upperbound = log(1.24) because we need to find the powers that yield 123 as starting digits. log(1.23) = 0.08990511143939792, log(1,24) = 0.09342168516223506. We use 1.23 and not 12.3 or 123, because log(1.23) yields only decimal value which is less than 1. log(12.3) will be same decimal value but 1 added to it which is log(12.3) = 1.093421685162235. We observe that decimal value remains same no matter 1.23 or 12.3 Since we use the function log_difference(), which returns the value that is only decimal part, using 1.23 is logical. If we see, 90*log(2) = 27.092699609758302, decimal part = 0.092699609758302, which is inside the range of lowerbound and upperbound. If we compute the difference between all the powers which lead to 123 starting digits is as follows: 379 - 90 = 289 575 - 379 = 196 864 - 575 = 289 1060 - 864 = 196 We see a pattern here. The difference is either 196 or 289 = 196 + 93. Hence to optimize the algorithm we will increment by 196 or 93 depending upon the log_difference() value. Let's take for example 90. Since 90 is the first power leading to staring digits as 123, we will increment iterator by 196. Because the difference between any two powers leading to 123 as staring digits is greater than or equal to 196. After incrementing by 196 we get 286. log_difference(286) = 0.09457875989861 which is greater than upperbound. The next power is 379, and we need to add 93 to get there. The iterator will now become 379, which is the next power leading to 123 as starting digits. Let's take 1060. We increment by 196, we get 1256. log_difference(1256) = 0.09367455396034, Which is greater than upperbound hence we increment by 93. Now iterator is 1349. log_difference(1349) = 0.08946415071057 which is less than lowerbound. The next power is 1545 and we need to add 196 to get 1545. Conditions are as follows: 1) If we find a power whose log_difference() is in the range of lower and upperbound, we will increment by 196. which implies that the power is a number which will lead to 123 as starting digits. 2) If we find a power, whose log_difference() is greater than or equal upperbound, we will increment by 93. 3) if log_difference() < lowerbound, we increment by 196. Reference to the above logic: https://math.stackexchange.com/questions/4093970/powers-of-2-starting-with-123-does-a-pattern-exist >>> solution(1000) 284168 >>> solution(56000) 15924915 >>> solution(678910) 193060223 """ power_iterator = 90 position = 0 lower_limit = math.log(1.23, 10) upper_limit = math.log(1.24, 10) previous_power = 0 while position < number: difference = log_difference(power_iterator) if difference >= upper_limit: power_iterator += 93 elif difference < lower_limit: power_iterator += 196 else: previous_power = power_iterator power_iterator += 196 position += 1 return previous_power if __name__ == "__main__": import doctest doctest.testmod() print(f"{solution() = }")
-1
TheAlgorithms/Python
7,417
Remove references to depreciated QasmSimulator
### Describe your change: Replaced instances of `qiskit.Aer.get_backend("qasm_simulator")` in `quantum/` with `q.Aer.get_backend("aer_simulator")`, as the former is depreciated and raises warnings (Qiskit's [documentation](https://qiskit.org/documentation/apidoc/aer_provider.html) says that `QasmSimulator` is legacy). This PR edits multiple code files because they all raise the same warning and are mentioned in the same GitHub issue. Fixes #7308 * [ ] Add an algorithm? * [x] Fix a bug or typo in an existing algorithm? * [ ] Documentation change? ### Checklist: * [x] I have read [CONTRIBUTING.md](https://github.com/TheAlgorithms/Python/blob/master/CONTRIBUTING.md). * [x] This pull request is all my own work -- I have not plagiarized. * [x] I know that pull requests will not be merged if they fail the automated tests. * [ ] This PR only changes one algorithm file. To ease review, please open separate PRs for separate algorithms. * [x] All new Python files are placed inside an existing directory. * [x] All filenames are in all lowercase characters with no spaces or dashes. * [x] All functions and variable names follow Python naming conventions. * [x] All function parameters and return values are annotated with Python [type hints](https://docs.python.org/3/library/typing.html). * [x] All functions have [doctests](https://docs.python.org/3/library/doctest.html) that pass the automated testing. * [x] All new algorithms have a URL in its comments that points to Wikipedia or other similar explanation. * [x] If this pull request resolves one or more open issues then the commit message contains `Fixes: #{$ISSUE_NO}`.
tianyizheng02
2022-10-19T03:31:43Z
2022-10-19T20:12:44Z
50da472ddcdc2d79d1ad325ec05cda3558802fda
2859d4bf3aa96737a4715c65d4a9051d9c62d24d
Remove references to depreciated QasmSimulator. ### Describe your change: Replaced instances of `qiskit.Aer.get_backend("qasm_simulator")` in `quantum/` with `q.Aer.get_backend("aer_simulator")`, as the former is depreciated and raises warnings (Qiskit's [documentation](https://qiskit.org/documentation/apidoc/aer_provider.html) says that `QasmSimulator` is legacy). This PR edits multiple code files because they all raise the same warning and are mentioned in the same GitHub issue. Fixes #7308 * [ ] Add an algorithm? * [x] Fix a bug or typo in an existing algorithm? * [ ] Documentation change? ### Checklist: * [x] I have read [CONTRIBUTING.md](https://github.com/TheAlgorithms/Python/blob/master/CONTRIBUTING.md). * [x] This pull request is all my own work -- I have not plagiarized. * [x] I know that pull requests will not be merged if they fail the automated tests. * [ ] This PR only changes one algorithm file. To ease review, please open separate PRs for separate algorithms. * [x] All new Python files are placed inside an existing directory. * [x] All filenames are in all lowercase characters with no spaces or dashes. * [x] All functions and variable names follow Python naming conventions. * [x] All function parameters and return values are annotated with Python [type hints](https://docs.python.org/3/library/typing.html). * [x] All functions have [doctests](https://docs.python.org/3/library/doctest.html) that pass the automated testing. * [x] All new algorithms have a URL in its comments that points to Wikipedia or other similar explanation. * [x] If this pull request resolves one or more open issues then the commit message contains `Fixes: #{$ISSUE_NO}`.
def hamming_distance(string1: str, string2: str) -> int: """Calculate the Hamming distance between two equal length strings In information theory, the Hamming distance between two strings of equal length is the number of positions at which the corresponding symbols are different. https://en.wikipedia.org/wiki/Hamming_distance Args: string1 (str): Sequence 1 string2 (str): Sequence 2 Returns: int: Hamming distance >>> hamming_distance("python", "python") 0 >>> hamming_distance("karolin", "kathrin") 3 >>> hamming_distance("00000", "11111") 5 >>> hamming_distance("karolin", "kath") Traceback (most recent call last): ... ValueError: String lengths must match! """ if len(string1) != len(string2): raise ValueError("String lengths must match!") count = 0 for char1, char2 in zip(string1, string2): if char1 != char2: count += 1 return count if __name__ == "__main__": import doctest doctest.testmod()
def hamming_distance(string1: str, string2: str) -> int: """Calculate the Hamming distance between two equal length strings In information theory, the Hamming distance between two strings of equal length is the number of positions at which the corresponding symbols are different. https://en.wikipedia.org/wiki/Hamming_distance Args: string1 (str): Sequence 1 string2 (str): Sequence 2 Returns: int: Hamming distance >>> hamming_distance("python", "python") 0 >>> hamming_distance("karolin", "kathrin") 3 >>> hamming_distance("00000", "11111") 5 >>> hamming_distance("karolin", "kath") Traceback (most recent call last): ... ValueError: String lengths must match! """ if len(string1) != len(string2): raise ValueError("String lengths must match!") count = 0 for char1, char2 in zip(string1, string2): if char1 != char2: count += 1 return count if __name__ == "__main__": import doctest doctest.testmod()
-1
TheAlgorithms/Python
7,417
Remove references to depreciated QasmSimulator
### Describe your change: Replaced instances of `qiskit.Aer.get_backend("qasm_simulator")` in `quantum/` with `q.Aer.get_backend("aer_simulator")`, as the former is depreciated and raises warnings (Qiskit's [documentation](https://qiskit.org/documentation/apidoc/aer_provider.html) says that `QasmSimulator` is legacy). This PR edits multiple code files because they all raise the same warning and are mentioned in the same GitHub issue. Fixes #7308 * [ ] Add an algorithm? * [x] Fix a bug or typo in an existing algorithm? * [ ] Documentation change? ### Checklist: * [x] I have read [CONTRIBUTING.md](https://github.com/TheAlgorithms/Python/blob/master/CONTRIBUTING.md). * [x] This pull request is all my own work -- I have not plagiarized. * [x] I know that pull requests will not be merged if they fail the automated tests. * [ ] This PR only changes one algorithm file. To ease review, please open separate PRs for separate algorithms. * [x] All new Python files are placed inside an existing directory. * [x] All filenames are in all lowercase characters with no spaces or dashes. * [x] All functions and variable names follow Python naming conventions. * [x] All function parameters and return values are annotated with Python [type hints](https://docs.python.org/3/library/typing.html). * [x] All functions have [doctests](https://docs.python.org/3/library/doctest.html) that pass the automated testing. * [x] All new algorithms have a URL in its comments that points to Wikipedia or other similar explanation. * [x] If this pull request resolves one or more open issues then the commit message contains `Fixes: #{$ISSUE_NO}`.
tianyizheng02
2022-10-19T03:31:43Z
2022-10-19T20:12:44Z
50da472ddcdc2d79d1ad325ec05cda3558802fda
2859d4bf3aa96737a4715c65d4a9051d9c62d24d
Remove references to depreciated QasmSimulator. ### Describe your change: Replaced instances of `qiskit.Aer.get_backend("qasm_simulator")` in `quantum/` with `q.Aer.get_backend("aer_simulator")`, as the former is depreciated and raises warnings (Qiskit's [documentation](https://qiskit.org/documentation/apidoc/aer_provider.html) says that `QasmSimulator` is legacy). This PR edits multiple code files because they all raise the same warning and are mentioned in the same GitHub issue. Fixes #7308 * [ ] Add an algorithm? * [x] Fix a bug or typo in an existing algorithm? * [ ] Documentation change? ### Checklist: * [x] I have read [CONTRIBUTING.md](https://github.com/TheAlgorithms/Python/blob/master/CONTRIBUTING.md). * [x] This pull request is all my own work -- I have not plagiarized. * [x] I know that pull requests will not be merged if they fail the automated tests. * [ ] This PR only changes one algorithm file. To ease review, please open separate PRs for separate algorithms. * [x] All new Python files are placed inside an existing directory. * [x] All filenames are in all lowercase characters with no spaces or dashes. * [x] All functions and variable names follow Python naming conventions. * [x] All function parameters and return values are annotated with Python [type hints](https://docs.python.org/3/library/typing.html). * [x] All functions have [doctests](https://docs.python.org/3/library/doctest.html) that pass the automated testing. * [x] All new algorithms have a URL in its comments that points to Wikipedia or other similar explanation. * [x] If this pull request resolves one or more open issues then the commit message contains `Fixes: #{$ISSUE_NO}`.
""" Implementation of median filter algorithm """ from cv2 import COLOR_BGR2GRAY, cvtColor, imread, imshow, waitKey from numpy import divide, int8, multiply, ravel, sort, zeros_like def median_filter(gray_img, mask=3): """ :param gray_img: gray image :param mask: mask size :return: image with median filter """ # set image borders bd = int(mask / 2) # copy image size median_img = zeros_like(gray_img) for i in range(bd, gray_img.shape[0] - bd): for j in range(bd, gray_img.shape[1] - bd): # get mask according with mask kernel = ravel(gray_img[i - bd : i + bd + 1, j - bd : j + bd + 1]) # calculate mask median median = sort(kernel)[int8(divide((multiply(mask, mask)), 2) + 1)] median_img[i, j] = median return median_img if __name__ == "__main__": # read original image img = imread("../image_data/lena.jpg") # turn image in gray scale value gray = cvtColor(img, COLOR_BGR2GRAY) # get values with two different mask size median3x3 = median_filter(gray, 3) median5x5 = median_filter(gray, 5) # show result images imshow("median filter with 3x3 mask", median3x3) imshow("median filter with 5x5 mask", median5x5) waitKey(0)
""" Implementation of median filter algorithm """ from cv2 import COLOR_BGR2GRAY, cvtColor, imread, imshow, waitKey from numpy import divide, int8, multiply, ravel, sort, zeros_like def median_filter(gray_img, mask=3): """ :param gray_img: gray image :param mask: mask size :return: image with median filter """ # set image borders bd = int(mask / 2) # copy image size median_img = zeros_like(gray_img) for i in range(bd, gray_img.shape[0] - bd): for j in range(bd, gray_img.shape[1] - bd): # get mask according with mask kernel = ravel(gray_img[i - bd : i + bd + 1, j - bd : j + bd + 1]) # calculate mask median median = sort(kernel)[int8(divide((multiply(mask, mask)), 2) + 1)] median_img[i, j] = median return median_img if __name__ == "__main__": # read original image img = imread("../image_data/lena.jpg") # turn image in gray scale value gray = cvtColor(img, COLOR_BGR2GRAY) # get values with two different mask size median3x3 = median_filter(gray, 3) median5x5 = median_filter(gray, 5) # show result images imshow("median filter with 3x3 mask", median3x3) imshow("median filter with 5x5 mask", median5x5) waitKey(0)
-1
TheAlgorithms/Python
7,417
Remove references to depreciated QasmSimulator
### Describe your change: Replaced instances of `qiskit.Aer.get_backend("qasm_simulator")` in `quantum/` with `q.Aer.get_backend("aer_simulator")`, as the former is depreciated and raises warnings (Qiskit's [documentation](https://qiskit.org/documentation/apidoc/aer_provider.html) says that `QasmSimulator` is legacy). This PR edits multiple code files because they all raise the same warning and are mentioned in the same GitHub issue. Fixes #7308 * [ ] Add an algorithm? * [x] Fix a bug or typo in an existing algorithm? * [ ] Documentation change? ### Checklist: * [x] I have read [CONTRIBUTING.md](https://github.com/TheAlgorithms/Python/blob/master/CONTRIBUTING.md). * [x] This pull request is all my own work -- I have not plagiarized. * [x] I know that pull requests will not be merged if they fail the automated tests. * [ ] This PR only changes one algorithm file. To ease review, please open separate PRs for separate algorithms. * [x] All new Python files are placed inside an existing directory. * [x] All filenames are in all lowercase characters with no spaces or dashes. * [x] All functions and variable names follow Python naming conventions. * [x] All function parameters and return values are annotated with Python [type hints](https://docs.python.org/3/library/typing.html). * [x] All functions have [doctests](https://docs.python.org/3/library/doctest.html) that pass the automated testing. * [x] All new algorithms have a URL in its comments that points to Wikipedia or other similar explanation. * [x] If this pull request resolves one or more open issues then the commit message contains `Fixes: #{$ISSUE_NO}`.
tianyizheng02
2022-10-19T03:31:43Z
2022-10-19T20:12:44Z
50da472ddcdc2d79d1ad325ec05cda3558802fda
2859d4bf3aa96737a4715c65d4a9051d9c62d24d
Remove references to depreciated QasmSimulator. ### Describe your change: Replaced instances of `qiskit.Aer.get_backend("qasm_simulator")` in `quantum/` with `q.Aer.get_backend("aer_simulator")`, as the former is depreciated and raises warnings (Qiskit's [documentation](https://qiskit.org/documentation/apidoc/aer_provider.html) says that `QasmSimulator` is legacy). This PR edits multiple code files because they all raise the same warning and are mentioned in the same GitHub issue. Fixes #7308 * [ ] Add an algorithm? * [x] Fix a bug or typo in an existing algorithm? * [ ] Documentation change? ### Checklist: * [x] I have read [CONTRIBUTING.md](https://github.com/TheAlgorithms/Python/blob/master/CONTRIBUTING.md). * [x] This pull request is all my own work -- I have not plagiarized. * [x] I know that pull requests will not be merged if they fail the automated tests. * [ ] This PR only changes one algorithm file. To ease review, please open separate PRs for separate algorithms. * [x] All new Python files are placed inside an existing directory. * [x] All filenames are in all lowercase characters with no spaces or dashes. * [x] All functions and variable names follow Python naming conventions. * [x] All function parameters and return values are annotated with Python [type hints](https://docs.python.org/3/library/typing.html). * [x] All functions have [doctests](https://docs.python.org/3/library/doctest.html) that pass the automated testing. * [x] All new algorithms have a URL in its comments that points to Wikipedia or other similar explanation. * [x] If this pull request resolves one or more open issues then the commit message contains `Fixes: #{$ISSUE_NO}`.
-1
TheAlgorithms/Python
7,417
Remove references to depreciated QasmSimulator
### Describe your change: Replaced instances of `qiskit.Aer.get_backend("qasm_simulator")` in `quantum/` with `q.Aer.get_backend("aer_simulator")`, as the former is depreciated and raises warnings (Qiskit's [documentation](https://qiskit.org/documentation/apidoc/aer_provider.html) says that `QasmSimulator` is legacy). This PR edits multiple code files because they all raise the same warning and are mentioned in the same GitHub issue. Fixes #7308 * [ ] Add an algorithm? * [x] Fix a bug or typo in an existing algorithm? * [ ] Documentation change? ### Checklist: * [x] I have read [CONTRIBUTING.md](https://github.com/TheAlgorithms/Python/blob/master/CONTRIBUTING.md). * [x] This pull request is all my own work -- I have not plagiarized. * [x] I know that pull requests will not be merged if they fail the automated tests. * [ ] This PR only changes one algorithm file. To ease review, please open separate PRs for separate algorithms. * [x] All new Python files are placed inside an existing directory. * [x] All filenames are in all lowercase characters with no spaces or dashes. * [x] All functions and variable names follow Python naming conventions. * [x] All function parameters and return values are annotated with Python [type hints](https://docs.python.org/3/library/typing.html). * [x] All functions have [doctests](https://docs.python.org/3/library/doctest.html) that pass the automated testing. * [x] All new algorithms have a URL in its comments that points to Wikipedia or other similar explanation. * [x] If this pull request resolves one or more open issues then the commit message contains `Fixes: #{$ISSUE_NO}`.
tianyizheng02
2022-10-19T03:31:43Z
2022-10-19T20:12:44Z
50da472ddcdc2d79d1ad325ec05cda3558802fda
2859d4bf3aa96737a4715c65d4a9051d9c62d24d
Remove references to depreciated QasmSimulator. ### Describe your change: Replaced instances of `qiskit.Aer.get_backend("qasm_simulator")` in `quantum/` with `q.Aer.get_backend("aer_simulator")`, as the former is depreciated and raises warnings (Qiskit's [documentation](https://qiskit.org/documentation/apidoc/aer_provider.html) says that `QasmSimulator` is legacy). This PR edits multiple code files because they all raise the same warning and are mentioned in the same GitHub issue. Fixes #7308 * [ ] Add an algorithm? * [x] Fix a bug or typo in an existing algorithm? * [ ] Documentation change? ### Checklist: * [x] I have read [CONTRIBUTING.md](https://github.com/TheAlgorithms/Python/blob/master/CONTRIBUTING.md). * [x] This pull request is all my own work -- I have not plagiarized. * [x] I know that pull requests will not be merged if they fail the automated tests. * [ ] This PR only changes one algorithm file. To ease review, please open separate PRs for separate algorithms. * [x] All new Python files are placed inside an existing directory. * [x] All filenames are in all lowercase characters with no spaces or dashes. * [x] All functions and variable names follow Python naming conventions. * [x] All function parameters and return values are annotated with Python [type hints](https://docs.python.org/3/library/typing.html). * [x] All functions have [doctests](https://docs.python.org/3/library/doctest.html) that pass the automated testing. * [x] All new algorithms have a URL in its comments that points to Wikipedia or other similar explanation. * [x] If this pull request resolves one or more open issues then the commit message contains `Fixes: #{$ISSUE_NO}`.
JFIFP7<F<2PFAFZUP_xxnnx"1,@h@TY!65]0XUZZxixJJ" }!1AQa"q2#BR$3br %&'()*456789:CDEFGHIJSTUVWXYZcdefghijstuvwxyz w!1AQaq"2B #3Rbr $4%&'()*56789:CDEFGHIJSTUVWXYZcdefghijstuvwxyz ?uU%PR(bREEPJ|P(_I)jZAS*IOL%S(L[b,hV%1QL)(hJ((QEQEQE%-PIE-PIE-QEQEP> (Q@Q@ :JQPEPEPEPEPEPERQ@ E%RQ@ E%RQ@ E%QEQEQEQEQEPIE-PQEQERQ@Q@Q@ IEQE(((@Q@ QҊ=(G'ZLhXU5T4 `zA(EPEPEPEPREQEQEQEQEQEQEQEQEQEQEQEQEQEQEQEQEQEQEQEQEQEQEQEQEQEQEQEQEQEQEQEQEQEQE ( ( ( ( ( ( ( ( ( ( ( ( ( ( ( ( ( ( ( (EPEP(aEP!i)i(BJT"zA™OhAEZ( |?t)aI zeAIE(QEQI@ E%QEQEQEQEQEQEQEQEQERIE92dShaEP ((W%*)(hJ(h ( (Q@QH(Z(((J(()i(Z)(Z( ( J((EPEPEPEPEPEPEPM~F)Ԙ;RUR~^iOZCh`Os KsvSQE(@QE0 ( ( ( ( ( ( ( ( ( ( ( ( ( ( ( ( ( ( ( ( ( ( ( ( ( ( ( ( ( ( ( ( (Q@Q@Q@QL(EPEPEPEPEPEPEPEPEPEPER@ E-R@ EPEPEPER@ EPEP1{RRPEP Q@•>%*}L8u29z>0t )PNB?֓K/AQT}ʆ (b ( ( JZ(((((((((((CҜi(@Š(AEPHzR@jJruJ)hJ)hJ)hJ)h(Z((( (Q@Q@Q@Q@Q@Q@Q@Q@Q@Q@Q@Q@Q@Q@Q@Q@R`Xo'UvSTw?V#Z"k?['**0EQE0 (QL(((((((((((((((((((((((((((((((((((((ES((()QEQEQEQEQERQ@ E%QEQEQEQEQE%PREPE ( JZ()h Z(J)<H> P)(9>0uGSPzLZ):84I,7(j rABQE1RPIEQEQEQEQE%-PEPEPEPEPEPEPEP@Q@)(hU84wHN0i2= .6( ((((( ( (J(h ((EPEPEPEPEPEPEPEPEPEPEPEPEPEPEPEPEPGz);4"*v?z#ZZFX)D?QT_ t1E%QE0 (QL((((ES)QEQE-%-%-PQKE%PQKI@Q@QL(ES((((((()QE(@QE0 (Q@QL)iRPES)QE(@QE0 ( ( ( ( ( ( ( ( ( ( (Q@Q@-%QEQEQEQEQE:)(QEQE ZJZ))M%9XShQZBs)J:Q@M=)24RE( ~M,' u5,DxCbQE%2B(((((((((((((((}@Š)(QEQE5SۢJ~'(!hJ(h())h(((( (Q@Q@Q@Q@Q@Q@Q@Q@Q@Q@Q@Q@Q@Q@Q@Q@Q@֊; La}ꝿ ci`s'҆$OuGA+G҄ ZZ(=i(`QEQEQEQEQEQEQEQEQEQE ( Z((((J)hJ)h((ZJ( ( ( (Q@QH(ER((h(((((J)hJ)h()QE(`QE (Q@Q@Q@Q@Q@Q@QH(Z))h (Z( (RRi(Кyԃ4#'x3`u AXƛJ&wP:@QLAAh%XJ>zXOK(S#* ( JZ((Z(( JZ((((((((J) 4%S 7ݩTOqQEB ( ( ( ( ZJ(h EPEPEPEPIKI@QL(((((((((((((((((=)OZLd{TCI hs}i Hq_*k5 B-)=1 ES()QE((((Z)R@Q@ KEQEQEQKI@Q@PQKE%QEQEQEQEQEQEQEQEQER@ E-Q@Q@%-Q@Q@Q@Q@RQKE%PQKE%PQKI@ E-R@ E-RPEPKIK@%-R@ EPEPES(()RQ@ EPQKE:(bQEQEP(E J?zLԏ=4ɧRv4 u4SޒS֒QE1Q@J>Pj85=JCEDE%RQ@Q@Q@Q@Q@Q@Q@Q@QL(((()%/PQL(~!tJR؎(QEQEQEQEQEQERQ@ E%- ( ( JZ((`QEQEQEQEQEQEQEQEQEQEQEQEQEQEQEQE (}A R+ TX8R9V)UMc֝ķP<jwh@RL(Z((Z()h ( ( ( (((((aEPE-R@ E-R@&)bF)أ)hRPQKEZ((PEPEP0(QE (Z((bJ)hE-JZ((aEPE-R@ E-QE%P!( (((Z((())h(ZJ( ( Z((QIK@’(P0E)яS*HI OM,h֖R%/sILAJ>Z )(bՃ5[)"+Ah$J)i((((((()RR@Q@ EPEPEPEPEPz% (Q@ n)?-Fv?v5(AEPEPEPEPEPEPEPEPEPKIE-))hQKI@Q@Q@Q@Q@Q@Q@Q@Q@RQKE%(E-J)hJkSGݓIީ  \\Ue'Zo?W:`6ElY[{))q1X POҠN=P/ cUQEQEQEQEQEQEQEQEQER@Q@Q@Q@Š)h((Z@QEQER@ J)hPIKE%PRRL(ZJ(((@Q@Q@Q@ E-QE0( ZJZ@PQE))h((((PEQE-(Q@QEQEQEQEQEQE (QEP(%RRG“Qԝ"}JX =QHes֒t"J`)IJyKIK@=G֬jxS5,D1I6QEPQKE%QEQEQEQEQEQE(J)h))i(Gޢ(`QE#}y)jCEKESQE(((Z((Z((((()h ( JZ(()QEQEQEQEQEQEQEQEQE-%PQKE%PQKA@jCKR5!_Aڧ?BzPiH;S|\dz}*V O?{Ҡ$ u(E-P%PEPEPEPEPEPE-R@ĢZJZ(Z(( E-QE0 (EPEPKIE-PEPEPERQ@ E%-RQ@ E%RQ@ E%RQ@ E%QEQI@ E%RQ@ IERRS(hK@ E%(`Q@Q@Q@(4fL@ E%QEQEQEQEQEQEQEQEQEQE ( RQ@LQ{RB*|_zOPR !ɦS֐QE(wP4Q@= OPԱƆ2@ŠJ)((((((((((((((CNn)>QEhݩyKEQEQEQEQEQEQEQER@ E-R@ E-RPER@ E-R@ E-R@ E-R@Q@Q@Q@Q@Q@Q@Q@Š)h(=)i;ZkSҐ_BzTWPІ(Ii'U vv۟/{7ҫ}I(*QE (Q@Q@Q@R(Z()h JZ(((Z(J)i((ZJ)h ( (JZ)(((EPEPERQ@ E%RRP!fE:mњmf\IE-PQEPEQEQEQEQE%-%P( ( ( ( ( ( ( ( ( ( ( ( ( Z(((QE (tcSSirƗQޡ&oii>SHd&ҚNB JZ(PzSHbQE-:ɦi1Y~5 NPPQLAEPEPEPEPEPEPEPEPEQEQK@ E-R@ E-PQBi1E-%P_R(((((((((Z(R@ E-R@ E-R@ KEQER@ E-R@ E-R@ E-Q@Q@Q@%-QEQފZ@ҖPMGR?Eڄ0Iiz@K'{7ҫ[}OګmM AREQE%-PEPEPEPEPEPE-QEQEQEQERR@ E-QEQE%QEQEQEQE%PQKI@QL(J)h((R@ E-%QERPEPEPEPEPEPEPE-R@ E-R@ E-R@Q@%-QK@ ERPEQK@ EPEPER@ KEQEQEQ@(`i(4P &&5/H#I OS)}"jGp/j(QEҏR"Ģ)(OQ@8`i1PT4 bRRLBQKE%PQKE%-PE(4QEQEQEQEQEQEQEQEQERF( ( </ftԣIƆwS(QEQEQEQEQEQEQEQEQEQERQ@ E%RQ@ŢZ( ( ( ( ( ( ( ( ( ( ( (4PEP@hGj@ҖPOQv[OQHaB'x>0'oکoզsU &=)i;P:SQEQEQEQEQEQEQEQEPIE-Q@Q@Q@Q@Q@Q@Q@ EPEPEPEPEPE%RQ@ E%-QEPQKI@QLJ)hJ)hJ)hJ)hJ)h(ZJZ(((((QK@ KEQEQEQE-%P1((AEPEPE- JZ()(({Kم+4Ou'.($.*S$ )}m>/CL)}N !REPBLT`M& H=̠1 FS5XO> }RS$!QEQ!EPEPEPEP~%QEQEQEQEQEQEQEQEQE/RwzJxP (MJMDpԩ); Zb ( ( ( ( ( ( ( ( ( ( ( ( ( ( ( ( ( ( (J(hERQ@ E%RRKIEQE (!H -#t4+JEI 4)MqL =RNH 1 EPEPEPE%QEQEQEQEQEQEQEQEQE-%PEPEPEPEPEPEPEPEPIKE%PQEQE)i(@-PIEQE-PIE-PIE-Q@%-QE0 ( ( ( (Q@QL(Z%PQKEZ((Z(()h(R@Ģ )h((J0(Bb}M- 2޴n?RRi<SM ` c7htC֢O"2VqCaA434&(!}ip)M%0 ssMҊHiiƐ"X褗tJ)QEDQ@Q@Q@Q@ i)֛@Q@Q@Q@Q@Q@Q@Q@RQKE%!ihnڒ JHaESOgQ5$t)1(QEQEQEQEQEQEQEQEQEQEQEQEQEQEQEQEQEQEQEQEQEQEQEQEQEQEQEM HaHvL P[PJHlU((`?+cUT`f􂊡 KM4Q@ E%QERQHZ)((Z)(Z)(Z)(IE-PIE-PIFhhZ)(Z)(-4RfZJ(`PRQ@ E%RQ@-%Z)(bIE-PIE.h%RQ@ E%-QEQEQEQEQE%PEPEPEPEPER)QEQEQE- Z((Z((Z()h(EPEPR0M?“Cފ(!0DIi!J i)M% ({(S !4tRIj(3R>QEQ!EPEPEPEP6Oқ@Š(AEPEPEPEPIE-PE%-Q@RP#RVԔ-QL5$]? OjH&43(RR@ E-RPEPEPEPEPE-%QEQEQEQEQEQEQEQEQEQEQEQEQEQEQEQEQEQEQGjJ(R7iihfR*S7P8wTo?Zxi@³YWTc_ƒ`R^U%)QIE-PIE-PњJ(sFi(PIE-Q@RPEQEQEQEQEQIK@PIE-QEQEQEQEQERQ@ E% ZJ((AE J)hJ)hJZ((((()h( EPE-R@ E-R@ E-R@ KEQERR@Q@Q@Q@Q@Q@Q@RQKE%-Pi)i))i((@d?.*4:N `'z|_xZ|_x`d!KEP%/j(( sMu@i CIKILH*QU/ARƶ JB(((('Sӓқ@Q@Q@Q@Q@Q@Q@Q@Q@RQEăߊSHx0 mSTr}RG~HosH)OSH)Z( (Q@Q@Š(AIKE%PQERREP1( EPEPEPEPEPEPEPEPEPER@ E-%QEQEQEQE-!@ KRP0=)iC@"vT*9STi b4Oi@UQOz8$2U%/jN(Z((Z((((()h ))h)i)h(( ( ( ( ( ( ( ( ( ( ( ZJ(RREPEPEPEPEPERJ)hJ)hJZ(((((((((ZJ(((Z((Z((ZnpphsEREQEQE6)h(4!41Oe>&KڟQڐ袊Q@P /#қJ{ JSILna>ҦEKxjIze4!(J)hrwS)i (b ( ( (()h ( (Q@Q@=)ǕSMJsŠ(dUI =?ƒAES((((((((((((((((Z(( KEQE ((J)i(QKE%7zSԮ1SzI);ӶJn<P%0 CZC?@LԏB:JPM~Zw5}h|f©g9ƐɨE@(R4}HE; Z\tp)ۇaMlQAb ( ( ( ( ( ( ( ( ( ( ( ( ( ( ( ( ( ( ( ( ( ZJ(hE%QEQEQEQEQE ZJ(hJ(hJ(h((((((((((((((H2)> iV)hh((mQLAEPEP2T**EIE`zm:i0DJ^QT P((BP8`iq!HcRR J1J2CC/jT!0)(((rjm9~Si (b ( ( (Q@Q@Q@Q@Q@Q@*aIBI@ z 2OiPƅi;ӟozQE0 ( ( ( ( (Q@Q@Š((((((((((((((E-R@ EPEP`P:KE'= 㞦ڀQ?JZk h$m*#}L{T@TvJH1ʌ( $Z Ҫȥ8Df«;&=E!QF)iL q@ {QϭV 9E`QE PEPEPEPE-R@ E- J)hBQKE%PQKE%PQKEZ(R@ E-R@ E- J)hBQKEZ((QEQEQEQEQEQEQEQEPIEJ(h((((((EPES E%QEQEQEQEQEQEQEO@Ob?ՊZ(((J(E./P3I'>\LE i$vSƣ5$=h`9zjCE=l %rTii(G1))S%RȤ8SP硨(@Š(EP (QEMjm Z)QE))hJ)hJ)h`%R(E7ZJ^ %6OiWU(cC|z@(JZ(J)h`%QEQEQEQEQEQEQEQEQEQEQEQEQEQEQE(((((J;ր(ޚvHvQTCaڣ$5'j`(>˹Ui~.VKEwh@QE0 ( ( (J)hQKE%PQKE%PQKE%PQKE%PQKE%PQKE%PQKE%PQKE%PQKE4)un40(*)qIL(Z)R@ E-R@ E-R@ E-R@ E-R@ E-R@ E-R@ E-R@ E-R@ E-Q@Q@Q@Q@Q@Q@Q@PS~%0PKBPE-R@ E-(ZJ|cϥ2pi0C_Si[J Q@:/i&OQ81RRQE0 )i)! R0M7i4)iP}qP^EFxCchJ)hpNph(((((((((((_G=E>_>}Jt}) ~M?o4QEQEQEQEQEQEQEQEQERR@ E-R@ E-R@ E-R@ E-R@ E-R@ E-Q@Q@Q@Q@Q@ E-((O/a@%- HvHr%_D*_d>#}Hxz0$ʏX_׏E $Z)SPiiJ(QEQEQE-%PQKE%PQKE%PQKE%PQKEQEQEQEQEQEQEPREQEPEPEP[֥֥1SR@ E-R@Q@Q@%-QEQEQEQERR@ E-R@ E-QEQEQEQEQEQEQEQEQEQEQEQEQEQE )K'Kޑ?ՊZ@QE0 ( (ESRJQHSDjSt0DTQ@ E-R%,_xTR}ꖢi )(())hJ9A@z@#tn%lzNVNԐ((((gM3EPEPEPEPEPEPEPEPEPEPEP8}=i7ZdpүAU>=/ҚiWM EQEQEPEQEQEQER@ E-R@ E-R@ E-Q@Q@Q@Q@Q@Q@Q@Q@Q@Q@Q@Q@Q@ KIږޘrMo@G xWS #QI'ܠ = -Z? N$ONtS(PEP (((((((QEQEQE(((QEQEQEQE (Q@Q@Q@Q@ ~ZNn, ES((((((((((((((QEQE (QE ( (Q@Q@Š((((((OljC*4S@Ob?ՊZ@QE0 ( (E%-R;RE1F\SH}RRSQE}IK40DR66( ((((_FHa-P!5+ڎ35+}HdTRLBQEQEQEgiQKE%QKE%PQEQEQKE%Q@Q@Q@Q@ ' <}1MHce RIүAZm8M  ( ( ( ( ( ( ( ( ( ( ( ( ( ( ( ( (Q@((aEPEPEPEPEҊCҀR- (Sܧ(WSJxUS қ'4Sd$ORCWSGTdҀ-rZ(` (((AEPEPEPEPEPEPEPEPEPEPEPEPEPEPEPEPEPIOKE/(@5Z7qڥaœd2޴Q@[֌S3E'|EPEPEPEPEPEPEPEPEPEPEPEPEPEPEPEPEPEPEPEPEPEPEPEPEPEP?JjK/G(SXObQL(m-P (ROSHbRw!h)bԔ}0TM chb ( )i(H*qJ(9үǥ%$6"JPTPzLAEPEPEPM)QEQEQEQEQEQEQEQEQEQEQEGbiH0Ғ_iW)Zm1J(((((J(hJ(h(J(h(((((((((((Q@“@- `zQHihtSҁ_GԃWQ>S_?~}*? u>fKOV*J(`!KAGj(=(Q@Š((AEPEPEPEPEPEPEPEPEPEPEPEPEPEPEPEPEPEPE*B1N+}Ť1RRJ (Q@Q@Q@Q@Q@Q@Q@Q@Q@Q@Q@Q@Q@Q@Q@Q@Q@Q@Q@Q@Q@Q@Q@Q@Q@ iO4uAJpKH- (Q@Q@ EP )'D!Ph (=)bԆ?i  EDxh )i((- O֒ÃJCAB*T?(xiQ@ KEQ@?pSiNv JJZ((((JZ(((J)i((("ZFo/Aڑz!ˏzJz)wK@(((((((J)hJ)h )h (JZ((((((((Nhi?ҖzR1;wBS[SOݤ2ocTteS{PMi> "QUWSTYXpjNQE0 AKH:Z( ( ( ( ( ( ( ( ( ( ( ( ( ( ( ( ( ( ( ( ( ( JZ(t-C'ݩ@GE-) -'zZ(Q@Q@Š((((((((((((((((((((((((?ĥs!\=$V)iXES((QEH~ GO2 C֖-QE@8O50PQژZ)(J((n84Ҕ= k}O԰40CMݨ E%RQ@ E%.q8AE (Q@Q@Q@S@bzPKEPEPIKE%-44lJoyk@֑'Z9P6 ?{iC". !ax3iAzquZ7TS7ǽ---PEPEPEPEPEPEPEP05% (Q@((((((((((((bwC҃PzR AKH)h-PR7ݥn(AO'MH'4lrΨEtfYԆ=(h)iRw(()h(((((((((((((((((((((t~4 EZN((((((((((((((((((((((((AERQ@ IE `VR8"`zW-9ՊZEAIH#֌J)Z)(((M&4#i! ((;TšdQ2%S(@ (PBA@D'HH=!vOJ (b (()E60(QERQ@ E%W/U߅ KIE-QEQEQK@ EPEPc+"4!PxF{TZ=M))i(((((Z( ( (p||)RQEQEQEQEQEQEQEQEQEQEQEQEQEQEK@=E/Rww:Ph(LJJGǭ %TN4@ i}q >?PnWSY{PnTݪ o~=ZZ`PE  (EPEPEPEPEPEPEPE%Z)(bQEQER@Q@Q@ KERPKIK@RPIK@P_*>< #EPEPEPEPEPKIE-PIEJ(RQ@Z)(Z)(Z)((((((((((((41749/Pqh)(f-F}jJ((;Sӄ!) ZJb ( (Ah4LDz¡=M ( (E)JZCҀNޛ<47ސɇJ!8ɦ aESQERRP0(J)hqږnIUm░LBQKEQEQE%-%-PQKI@ 84Úiڐ'FITmҀEP (((((((=)i HTTT-QLAEPEPEPEPEQ@Q@Q@Q@Q@Q@Q@Q@z(h((H((?-)4SH OgjxSMdzdjуUg7 V*ʳޘzZJZ@QE0K@; CKI@ EPEPEPEPEPE9( ( (Z)((((((Z)(AEP0(֦O_M/AH袊bRR0(QEQGj(((((JZ)(Z)(Z)(Z)(Z))h((((ZJ( (ZJ)hn)x0AJtпtPt!W--"ť1(?_H:Siސ ES( P!z#2<ϥ,(Q@qM֐aIߩdTqT'J( JZJEQE HIZ֐AzЭ ( (=(IJݾCҊ'RRQLBIK@S_ҒB@5$MךZ#S  +ARRP )(i -!@ U'USLY'1jCVۥISۥ:Q@E((J(RQ@Z)(ZQIJxP1:?Qh)i( JZ((Z((Z((Z((ZJ((4E0 (Pr#@I,i㘪:^>w InZ0Oր$koYihZ( KHzR)PE(=(J(hO#+)E "8SiQEQEQE(QEQEi)GzJ(Q@Q@Q@PQqc~4@+FRTTn<P(bJ(i)OZ( %(E%PQKE%PQKE%PQKE%PQuPbR@P!(E-JZ(aEPEPER((((((M?}2_JG_(~ࢀ (#ilWO?mC)*(IsZuݨ9a*M $^슅♒i 4fL^bZ(l(#8tOZtOS@%-%0 ( ZJZ))i( (PEzt<) MIڢ^(@ ESQM. C2JI8L}M!7jm#7)SV8Y>T+ )lE[~v 4(8ɠ tUFYAJRN0ǚEVdpVN^zJɻ28<f&|f Uti'4p)iWB器I@X= CP`i +L *//pE/IE4H(e=E) -!($aLt_{$F >?~hhzQL(AEPEPEPEPEPEPEPN_)~ X52je ()h((RP!hJ(h( ( ZJ(h j(@E9oqNiQފ`PhZktM4UOִ UYր$YUoU`-%wKIE0Q@R((%))(QE()я) Z)(WQԹ)(((((( ((((JZ( 40yƐǯQf.r՗ c9z)Ž 2QL)h4 JQ֊@%-S((((((((()QEQEQEQEQEQEQEQEQEQEQEQES$)s2 Mh/PEP%9?ZsvE\dž ML(2zxLULM[#UR0S7t&* [:IFF@/ݠH(=(GJ50P (QE ZJ) (PMi=J҃MӏZC5HI朝() u9K?U@y`ךzTK:y)U1Oof$R@AM2q@Y<R 51@2QCJIb(+q#NǷ4J0z>1LqPS5֛F(_4'y5.(F=i<@ǵXYA$EԎ*<J6 deA!0[xJW@4q7=)QOqP#(E@#p4%d䊁ԈrhAo"d^6Nxy@TO7T]N)CFz⋁ ȥqObcU(DXE1Pp)i(L(QEQEQEQEQEQEQESSiG4 tuS)QEQEQEQEQEQEQEQEQEQEQEQEQEQEui3sHb (Rwa74Ґ`gzGQ3s@-5A}jƨ7V떭7?Znq*VJC֗RRRw@ Gz(@FtR ( (}jZF*Qi܀xKEGw7!IqLQE0 ( ( ( ( ( ^’((( (p2ij)2vE \a[4GKIE-JZJZ@%((GzZ( ER)QEQIK@RPIK@PEQEQEQE(((LњZ)Z( ( ( ( ( ( s1%FzG#@_(~ࢀ (?ZW֜~(_)h(xQJ~o3;:RmB1LyL)0B(R:'/AHݾ55Bzb(ER)^ԔvRiG.}ME$ÊC)}掟 >lUIOWgT[?ZQ0jEnINF)Ԕ"A@ ?xՓҫmJh_Jjpix{R rjaS$P9A@v)_0t hiW"??S1a@X)i$C *hh=)|hEH iNH,Jk(Rj``RtR(QEQEQEQEQEQEQESӏHb[LL( ((((((((((((((4~>OUӤ ESRw =iSC@H? H?ӇJk(KT7֯MQ?}\Q0>tzPAOۑFzxjZ +CL *JBF)ih6mĞH=%))(: `$ wcL 1ERiiЂ((()QEQE)OZ;z)()hjB@ETi]_ʁ' &D) j/!E4hݣCn3}v7:xoF43 tI?w?•Y@aE+\1(pi|=9yO"OJ_!(y>w@ޣZiiVK: (QO^z p?N׸5>(4%CޟLs֟P!R%0 ( ( ( ( ( ( ( ( ( ( ( k8Pi!i+ $QI2|nOS39@' -n)j3GJ`--%Z)(IE0J(h?2~zG)iZ)QEOK?Zy8ON֞(&J!i{d?ΔPE!L)硦ТR8(=E=E>=MMP)QEQE;QE AKH)h'cOF>\!ƖACҔyE>5}}@~(oˊs6@;M4u4 J((4 O75]9aa GtP@{SPR}R,v >*HtПtS[KEiGJFKH)NaL3nYb|T<?MzSGWZԆzSQ43KȌz@ƕnsRR) -QLAEPEPEPEPEPEPEPN?ui1cLjeQE1Q@Q@Q@Q@Q@Š((AEP0(QE (QEQE!KHz^!ZsM?y~@ Ik1I }MQ+}ip9CڍI44\ #WQd |HS FoMQoZo&ZS@ N`'#E[pF *&4gJ?-Nd8ɢ@*A֗%(&;L$UE23֊(^@2:}2:QE*RR6RQ@QL((((E%*dÎ\DMaҀ QL(RPE! RntZjԄڀ)}RGҒ0ӓP`QE-60j)VWQM(<HU%x\87zQE0 k}}i> /ҊE?(E֘ E3x覣nE((QE (Q@Q@RP0$pqU]YZb0i;QHI'\S<Szҁ $i6〦1dԨ1L(Q@Q@Q@Q@)SbV)d8(ԏM-5Z(t}((rZlK/8q!f&q@OJp`s4Jx@ E;czSqCJ{t4҆ҎAHzQҐT'!(`QEQEQE R Z`#)A}w@(|gFFy߂1OsesN* C1ۯZ\ՈU |ƃPMh CKHhQځҀ) G6hW5 u-wsNd)QCIݥ=)1@RO%:22A(|T|T@ETÌdZv~eo#}(iQE$GZtGҁFbq<{R/48R(E1-b ( ( ( ( (Q@((SRH?4z5Hq:(b ( ( ( ( ( ( )2=i @QL2zSA0qI@QL((*Q֐u)Mr)֣kP7MہI}i49ɤnT/&$A(\.i)[oeT}Ai45Tajݷ?Zyh%WWRO֭,hv zQ֐oIږ?~QHX)ƣ21Ru&W?61^XR7zYN%CC 1_nCRR)()M'iScScCCHcҙNNJm4 )QEQEQEQEQERRiG?JVCE+Fh|~yޖzf2?z.lFI6=?whVB8EQޜ(A@%ܪ=6pqڦ27FiAE!.ۀ\2ޣ) ?{R*O5(QE0 kN<@ ()ȿ } :r8yUMhSIǟJ< a@'@Fh((((((4"taSjBd1ӱ@ySTsKɤKsDEKhz(QL(((LviE2ޤ)lS7MJFE5mKǖ_ZJ)z}#` @SegN1Fris@!5K9E6.\+}) csM2hLU#EhJ[H:S7*AҀ p5!)i(⪳Ƭ5Edo>5N-CHziw[)q@hH[$h\dzS#]H! RΙ]r:P@n8i7aqCN.j-4E0'R)"g Mv;~(ZU~4SGZwjJ() -%PSAޒs>?ju5 pP@K#"T@ 78o?i;4fi4dzOAQo4ۡIyˣz[h_M_)OYSW, @ :{әf4}Ҧڢ~Hc js P Jè8Jڢ6xY B)jjhsiV-QUiwZ.;PddҸXEWt&HޝTBoQO)@:iaKfRf* r}H~Ч' [S)QL((AEP0((J0)/8,*)9=YIjNQOH8P23U[w'Z^EBFQE0 ( (); uq/֒n$cLBL˓މֽ!90 9n^< 3 n I( `)3LS[4LSʚ.Cvpj'ހFAjQpPJ0(ǥ!<@ ҕš`N@ #@OSJ(FW3}w4Y^i?cޘztGjN}_E91L?qi()N}))"bCJJZ) USשS)QLJ(hzөr+G?F7)>P;0i|cڅjWJ P#L^9 9 RMSRP~IOU%(f!jy?Ry?TS\&})Q7TR '5=³*?5X(Y?)H?@ 00UʊhyjBI=jE!q6 Ӊˀ;ri[9zԋҒ@Sh~4iM ﶢ>I+qM@P 85&F@E9)g4֐XT8py`QE0 ( ( ( ( ( F<RMVS N皅% Q`f P MJD q@*((((@E/ARcޣ~gQRsM)sy'>JJN}dT ďA@K7QHxRn(x)(uHsukK".4PGLĂ*njqҦEDpp(S XЄb`'ph7 RqGR'j͔|jDÐh4!.i3 lJ{ B<ϭxެ?{Q@ʧE–NXW bE?Dy?ZGGQG!#2Ƙ~M~Tq GRf4C@9hPbJsS (ǑMSJH rsH޵ru$1.94ohLT] 4MDy4Q4tb4SwaQf+#=Mo C'=4a3ߧ/5>2)>z4ޕ!AJy?Zjxހ9CMR?2NʎR;OqUzUv>M@ ~4̊xǕzn0$S'ڑS?JtC(G9?c:*hy2}-R:T&3L+I=4yb6jhPz>A@Y)%bB}*!S2gҁ[!>椏o50%OfOOF@ShJ(hJ(h)ޟG! @!eҜ@=KIKLdz2=EAК*FZNZrMuRShA(=EC)Q:ފ|41(KW551 23F)aRDhaO>#*2*V?*7POP=~NN֩rۣ}j3}ir}GbZ?$$f6zSS7րh/f4gh)i(ӛS?IB(~z aJNI4:O IݤS֒ƛ@?JmX)h~4EhRR)INNq@蠌QLaCCRa8?3@r_‘: )SK@'sKLҀR7,iր/ I4ƒ/@^1Zu3A@ E%P  ,xPd5#ASKT*s((RQ@ H܌QMv &H ̋ZIJ;з5ԜLA{ȿJX~{-:.-Q@QLCnѾ{nJ$IwvUhOcBh9Ⅹ#Z?Ɛ dT4h(ER(()QE))h#*rSKAx bI<ȗjөZ*&G֘nXyM4n@jN@;59c@āU7`3E#48]ib'q4JcjMĞPM:v})3;GҒp$yPrF*-I(C);$Q@ EUQEQE*VGJxUҦE!iGZANjFL* >8UЄ– 8HY@+*$RF¥ : (J)0< 72֯zU 7ց1p[F"1ͪJ84u#ր-/FZOډ>2#OӳF2˜?Y4V?()3@Nhh%IL@,=\ػ TS Nf*G*c4n!K~"c"Po$ M2uЌe(y7@ H RdPrjh({݀h|)| C"eTpjIfT4e{WS֚()~NOi7ژ1=,\4f݋>q)yzCy\ڀ"[}hn[H>V"їP ~>wDL4AN^ ?8SԱCRTqt5%!j5=BziP)WQKACQ<T0PqJO+J P"ݏ;I5#@֛HGOO&() (AEP0( ε5B*c!d=QPiA4PyɣWL_jF&:LZ2=jZ)2=ir=h@/z;ZK'MSs74צSR PRuNpj6?7Ofsڐda8 XJ~ǭY_& # *>/`MoѾJ_j?kAmBˎ?P?.j<!ޙRdzd`f~cR1#q 3Tr I1E;(玦tJJ0RcMn>JmUʹQf#I9楋cU~)i :өѽhny1 S6R A(_+KHtcScSSg@QVQ@ ~4O6: Xei[4P1FҖP0 cD{S E⛒zUryx-lZ;,H#Tܚv buUcdv89U=iDL,Z- G޵ؒh Cbm9YnEDe$*X]{Sojt{Cҕ8sCSIJxPKIE0:-"t?ZZ@ 斊`VhRjyr*4PP`>|Ԥdj -: (Rɠ%Cޕ HQL1TT+7_=sҀ%ixRz@HOCK@QLESa ֥r1P)Rja_HcRK d[LBx( (/jJ\wqM"sHL,oN2MXRr1Ujx_өh>Pz%QE0/AfjNI"nH ÊZL`ҞZJ*J(hxUҪhtHZQIJ:ԌիZLN'mL*Ģ.Ҝ: `*cI@R%2o^U NJEXڑj< 9Vc:ԸҀ$S5=N=Z24+m9qL"_4Pf>'}(MGEHejii֓'֒\IE8RAS7TXMhYU#ڎjY$gO栩X|RPHPԇE@W,5 Im.O%bH.; i9*P"j 7), U:GJg4F T@ N) ͚scC:ȍ#&`l2 &=@˴ؚ =J.?'HxqNRa}&1ӊvQ%CSO|Tm93Ek I~u29P=7z/@%8D .6jZCjC5@E0$ Z9>(!?ROU0m9i (M@*>MB{`)p,Fxd asN$ '4f(A@@ PE- E-!qSB CE!s֌SHz@ N#M2 fJWM$>,Zt$֙ZE$1=h`3 }iO!@ #Р<)ːRZpǵ!Gց$bl(31#Vi;-֍Rkt?J`j.OJ 4~u>vGZ{T|<ȧt9r;vh]Nq@BCzR‚OqN4x4lT~G8;HOg<>4M4j5Ǜ4 Si=Q8? #OqU}MYU}sUh!zwZV(ސ\*9Iy-qzTs@S[ P1STU*}¢J̻pj?1Oj3LCc)M;ZBq%%<`<Zm #dMPJd)ѩc€&STo#R/*I$ci$ƀ# u4ӐzP4 SjGCHAHwxQjYAF P&'?kF)U1LCG4 N")vJc9\}{M^IcH(4l&^J٥ALCW NAF§(niլ b95T)1-tRS\%JM:fRO)iͰrObP:)@~47d z~RR5R7?Ji-QLBqɦIU5 J\`dRQpM 4IE.h>ԔfI"$|j %F&`p'IBg9HLQQ H6_f B2<T `T()i(QE9($Ru R4R/C@GZ8|Q"(`(ɩj=9 0$=ҒRPEStZ}2CO-71S Dl) 0V -;<JJFblƧ'\,WO*Цr)å&4:u^MU>Պyh*Ӳ@‍APU2lR2 `Q$f^Ѐ: (/zPyݪ5V-1#ڪOkÊG!`S9I:SQn>=@X@2>QE0 ( (Z1@ EQE(:mQ@0yiێ1)(*VWQ8jJJZ(cR*(b8Q@@?Ju&J֊*ŨꚯV͟A@SKI;L H5-#vn=MSaqWۆSUcÇ>x5T)#=*{vہMl8<1#|Z\#f' f9lU ?ѹƋˬ1Qzi2}h\q) #H.$LDSiNk4iXdQfyb*V*A1G}iNgTmjYFnM"4?#h>QS'Q02)1N,.h'w*6|EW|0. _cP9ɤ@I #p@S XW.q  i[M4)U4b$bL< hT^)>q@ oi)OSF(KH-!=R~F&QLR ZaLx@qHzJOh*h_:o!ӎ#AOd<- ю:椀mS1\THccZI:5*)P!#TwMGzB@Ms)%8@PJ[4H>U4Mn߻aU|jhKEn1)M)e{FUG14T;M0#J2Bs*_|AmǽOuWQH Hv?SP6J_I٧N?}f r3u4 '[S&Z)}'(*;ը (`{d\f<TQ"zP) QLzUSg3G8>"P;ޤjAT4$Sh͌g h4&O%PI@X) 4Phph< Z!ɢqA@<QRE@М&\8\ҟW3@i%Ƞ }*!Hr1M`ģsP0 g?!z+srZgJU@ PS)ȡ2AS!>i%G4Z Y93M4:R( (Pz(4)sJ)PǠ-%)h xSW*x4放 v1btqALJi(=)J*Ӛ ɠ1G N=7"P?oi:Rq$RHJt44G&=0[`S"yzzj*9Ddf*)5xԕԔ>?/J|phHZF-M3q#=i jV}P#5a8LCDˌGBFf *U@ť^@3J*jcT,N{UTRhR3RAcCOB:Հ^ , )hQT ցMTp):|jqU Ug=W~1QIL)hFXT1ڢՍs@*b* Zn$4(EQEQEQEQEQEQER~ҙO2ٰ֒zJfajH)iJ@-#t= 0tzWPACrxPcG"o0Kv2ֈT"NaM(iހ"v撥Wpi2xJUZU||dǵW1dS*sS>V8H)ܣS\ ݲqր-ѓR{TyOUmF3@ ⒃E2ySg"Sօrq毡A4JɣxE:WZGځb5"n{[4Y=6?4a"efBzJvzѶ'- )sHd*]'TLJJq`)OVڤz;jt@KTQz})2B9 HZIM6IfDpfcCcSÑڙܑJ]1wB|SsP}G9 QM#JJ}MMѤPSRXLT78GqPH1JYsx␌Y(=C21Na\^iz/җҜi|('zXe_j\0=zTG4x!a:Tem‘ޜP7{I)A5W?1T0 |CmJs ?Dt 4_P, %% xg1ӭ'9q1{UOɏJՈ@RQEz=MHz=M2s>\ӦμR3M٥"brh`(4 $8)fA1R$bɤd&戗j=EI(S( u2HhZJ(bSH((\)i-!ҊJZd>Pȴ}S\ rƀKڒ"ޏ.piqC7+5,Z-6xҢ8tHJ)a?-B*E?gbbԧ$UʧaJp(QWgE1?\ giOt(&E0P))E(iH4\ %#E4RAH)sE94uRsJ0P)Y<PhT4ɥ?tRf@ 3!֛J4sNh4fL^uNBy894$sE֚0O4PJ4ݾ)Pii @I28"_ր%*xm^s4t cQ0G}i dTѱ R'zӈgHjjE1UҙO/zrӗMQ-IQMhe9QQ4@EPb.RZ &0ŏzX#w ff:#-dݪܱH))GAHL#M;) -PՈB* )Q@ EPIE(85<sAE\1NɫvtNPPQE( (((((Q@~'LK@ {RRZj^Qå5Oi E@Ӹ$mҀ+ ܌R tdE@UG+/CWitT<h=jf-@ϽDȧV 9='OC;j`3Jr7 _7p($@Ȧ_z@Y+ǜU c<ziRN>B5/JPE!ˎGOf;O֐OsLfh۸7#V  U'$U RBS'j]G:`4(J@hJlR\BC u؀j|r1gI(jWW8TG坘ڧ'+Mp"Q巭KHH\2=I'N'SI'#ژyg֓aPAE&${U2w*7i!&J4Gz6Fi4~u<jhjG#Xm Q#A!l֑=( '֒o=F8aҏ6G4 Afܱ1y}PH L Ib~S@;kNG4t5?֐iXPݨc6ÎN9eLrIq '(T]Pm۔ 9#~fCiHdD"&~;C@!Xݤ&8j^h$E[?.=CSt4L{)_5nH<ua֪*J3b\P l`=6?ڧ$Lqߊb"^3G]QH f8fZ.! j<%R_W9ZuQ@OQOQ"mC9h8-184~)jaB- & J:11~SPm^jMXSPF2)GԓJv֝ E0  ) Z(QE bQE-'zCKސ!W- h))M%%-%(AN~sH84u5j{JEIEP!e=MQHcj*SE@œ^w/&h$FN5RZeCS[)h4LOZ)ERfLJ&64 RP NztQEȦFi4֡SL4 a\wIAQ{f 0ZF:I MLm2@SRA40Kޞp@'b*qqQh)GݤS)A+@ CLLOϥ<D+qN q֟*5=[r)v@ ^iO!5 OZc)i(t jSPN>dcSsO84ʼnߎ*Fˁځ N)HH)W!T .O+@B)C ,cޟK@^Z5 h@IځցTՑUaDҁȦ"H K$G#LF +*=iL8#7ASSIqiv:O+Rjt5,0'wJ1NV+ҁ;rZă@J9q@f4ٽEK7&֋X5 sRn9fѻHmX 1Sbq"3E*`Q)٘<}@>N9.@/CtՓ#0;sSl}AbjЕjT $";P>_zQ֐9>۠=CvmR~+@G3Hm=2?~ qƘlxaL vܰF@{B4x~?7Z x9!4|?*}aG0 ԢH)@ rsKEWT$!^.c 򑊋<a0`)4i(hp}i9ACHvMw|{NP! y8i>jȔǽK }*0 8 9T +F< JNv3LU*xҝvt%)?/@CՁOWXdR!S1 $LHi Zg*9UmX.NLS@pKlchQ1QR :皌dù$Ȥk[ӊhEN:Q@( XU_*1ؚm71:%yT9c;qO5 'qC$b2x$ƙ1H}(R&8 qHH~0,×V,sPSvO@>iցf'=0N~Y5H`xr=~4*,{};tDθ4Kv1*Hň=i9<եaO)GCEQ#2|3i}FP=<qY(j|Sސ5y)C 1@!qN~GaI@җzDn05b߽A1*KSG@֥fswk<ԃ1p~9%!UZr1+c5c+GT3 &ZJ[j;Qr)~*v?<n@RQGz*R/Zd똁z⮰j4!6qQñZ)-pBM l6E< I$dHT6A枍sL\6HZd]I4\Gsȥaޢȩ1@MTU3,2] Rb\/) @64ݤE.R𢀸COϠ+)iC)7!))szHjz j|DLv@Ý}f Q]gr*jSOIJg4r(f@t_&)CPj:qrzh1E!mIPt+ SJe@n!h@Mod~Rɦݧ7\SE+s nXLnj>594&/OaڐqRzE&8P(@yaT}*6\7CJ v0ʴ-#ixvT`Ѽ E9g&M.N1RI)H $hm' %"LŜ@ɠPvy$TxO9n 1)&sHmG~PFCp;U^ ON)pb>9>nIG@ HXA84&G4m QR@`FO8W&ߴӄt'0i#T'u9i1MS\T&D{ub8Aii71P84Ɛʠ`hU)PqNYR>qցMfS O9sRZP\Jԏ\ VGZ1R@;wԝ@z2z6j\A٠ } IQ)  ˊd@Oemph/֑<Ұ(c7QL"݃ q/€f=L,npiaz|$rj#֜BCDcsC6)I6F*0֕/֛@@D @)%$F$@4a9H͖Ƞ S^⫩K#(ҹʰ JT`@8f9&^"8ENwfc԰[4WY5,p9'oPpH]9cC|?fRl̀4ᑼޛ ]: bH۶I$D 1>UQ@ Āv\g#򦚋 ֲ94j쓞LGycidA1bHFF 0*OG 7nFOTXm r9<PM*_Ln>[=)6JNċ@{}*vA&ӣҒ.m{Sĕ9l޴%Jgƨ=E[<ҩCPv^ @@4ZVA@[OWN? TRP4?9AF(?h72}i)qUa\|nZF|(.}A曊)`p@Q;Q ӈE!GJA֠ogT :UeUv:RsIިi֚:ӗh2sM~dbf"iŽ" zҞ_1)'-Jx\R@T(1&OAS<. Ҡ=֢jRsDИM!4Ҹ )}A/qF)i){P1 4*'ǵ>UE|4lF=k6hFNLj.y3)?u @=X~4n p9@ 30)>jDNiP>"#5 x|p*9h_5 409AO!:(uJѕgmQ@A]P2mCN Cyr<[CI$6iӀeҀ*7VU0 P2j~cS/Y~|AjXn=jyOU4 s1=M394)Z))y ?+T&LjXWq"?LZxLT5:(ӻHFh=5 h7;H @ъo<Ӷ)i mJb;S6TӏJC"Fڑ'EvъwA|a`Q}.&hI$$3ڜjԹ>a J.?٨mI(и C99mt?*1@A!Qyt\,LeAS<zR I8"~\m)}|qE/Ҟ>=*<@ 7|nHsFM C@Sn>8!`=UTp9,D gm4O*eWh'GQ)6f&nlRyuy yZڤ) m*l*']JC4R7N(YqR`Si('8j`vb&I(iTTPLӨ.irsKE RZ)}h nv;6bNo/_M҂yX@Cƚt7N)2 &9Z~9"2E*}\R a&81b4bPzϙBi[4g}KLf JڛVsc(IzS3@ Ojoe1jAґ1J(Qs}cS|£ ЀjC*$9<B&iU،!Romyy:*~Sv%s֜%8L*Bސ9%HbHW" }ޤS{}i]pاB78 pSXTa=J!)7E>~ojiOg5'jiFB(QKޗmQNJ %Q@ ))qi(((`(b`F)hX2\ub Sy( b`qL>w"L_&ybIIUCiÚp84yQ1A?(7J^UMER0P0JFhA'8KQ(oqVFFThpG4,*Gi|(SLiqNң2ć+cfN1FژɒyI'@@J`R@☐"1J鎔xu'9Fr7JbZHp_iix4Ԋ㯭U;+jD:Ӑ.NiR0h 2vjnzvȏ 1w儕^3SJTi€GzFӊCփ҄Qڊ^ա"RP(QE0{ԯʫ^R<<Tå((PY3?Jd;qVP~F:Ԉ51Ԩ%^sLDl6~RiZ)\bbGZH؏ҫ֬Ie8rqR8)֧7XlO"V_-'BVdc(l]#VyČb_AHU]S#P?&i1IiQBÀCO "GQQv@XzSYsKOT&] =), RPҧ QK*@4b6ǭH'14S6@( Y= U4^}3iжjk&xNj6@S҆zQ Ұ\!c Jq$5KL JG?{KM8giRb#: #)(_( NsdQR$ԆU( c5\MPaq@)(SVo~e~ր_8d4nI4{Rq(ґ~U04)ۜp=酶 96`+p3H $╆|q@)f'L@ s<xBiR0n*X>1wb'HP=8yd*pPfNi4 T42i1Ohi f( iJLsRPJbo%)F$qMf/€*@H)րd= <@jQSLBn>1'9If3➽8 ӯJH-<n{taC=3?jH<SqMMcҁ yvʩ&K+I94iWޔ)JTv4GqE 1ONifE#I7z.X4\QNEiy)q֙EOݦ惜P)⒀ ( EQQF{67xH Em?Y~5*L~Ci5A54ET?0q&4%Ƞ)x4sO g=hE RqL3@(F )O$PnlSLf^S4JRyg""isL rhɢJzRILh~hC7 RC)8<iy(?1nKd&,Kd.<N*7Pi]I횗$4J=q@/ZH-*C%UwS8.XvQ yH*(Dۓ iaK;_SV]pv['<\R)3- UH Jo|SjMڀ0i`#)=GJ(52 m:=(Q@Q@ E%(I@Q@Q@Q@-%-N*֧N ^֎Fe=}̞۔( 6~">檞@7?֫!i 8i@) 6Mc@NaaP4]TXEPڤyZnh,QS5T(\c85&aLUp?) v O F "ə1ޤxTu@ OOUd 2ADK9O?9, *T/>s _l[GZX֚GԓTX%EI)sd {T06q@GޤPx5 RԘȁ4ph4PsM<kpbR4)h)P'>^lŏJOa&i=hePR!zVO CUxFwFN5)OZp*9@*)K)"tF@:V"%_ZEq@noZPzҼF6sHupir8 Qi~S(,.YXPK'B) DǞ(FNNh M*"qY E4t`vӋ `\h]$˚<! (40NPHDP*Gր(}Au$ 0F CCNqKC3LDJN 4)O֐u45&ajJ *­eH'= 曜r-֩O_:Ҋ Qa~=j\qQ\Mҕ[<eIii'4Hzoje*2>s@ -GI/ QE"\H@qBS `=мsHi@sژ ;I&9N 3T| LribQ#iJC#M:1|n?r[NV=(bTOA@f[:R1QS4O=hn(4 S@iᚔLB ՟REZ,JH #}rH|ІĻSA%-y=!H<(n)ry>HeJ|} 2CLD/O7/j84'i 6j7  PMH6sj櫉֩j<bdR@4iԆ I4?Z nqH,=*0њ,jJPrqOf}i|Lv hj\G0ݠ*arn"{AFᏺ(44Hn9-粊<N.$ꏊR1@<ҤĂGSRL db%T0hTT1FE ֤p83EM)zb"NOݜ Rz GIRz 8(i `T EԻ}MN*"[N)E JAS@}heJ8`u4[S4,7SZ ۩Jp1R gIijF<*CQS@4M1 )fI@jD}` yҠ |[8YEM$MSzUr3@y*9պlsPvoN*V$KOHdClPF=iZpN0 c9yT()@ tSElVph UZ7jBS锹ҌJeQE)GJJ((()i)hGZ:T#L(H?u5Hd|>ҟqH=s@ ޠR]T;@fE(aK۱J =)ϥ0#S\|:BA<)Ž75T ր1E.@FԩQ@b֥4_Z@4/p{敗+?*O4 :P')c }iVBs@ɣAJ֑0cҕ>C,B1V2zmFpqM8.IoZ`2n M{ B h̛J09'q-@JJʹ BF^ ?z%RX4?zq:)J\b,I皳 ~'44s.c4 ceX1`֚W\,CPlX(X@2jd  ☈Aw fI@;* ԟVɤp4#BjZHiL '\x\2c)Å*#rӀwx>']q({fBygbMA&M<h$u(AJ,)0jҫ?ZlKܵ D&Oz>"a]SY|ύkMl "^yrqJG(9 )l.H1F(v"'iSyG"iIh֡,0MZcDTr(AnjId L(8<Rp0h?)B(1~Mxӓ41 ҧҬF UcjҀ";P fA+ϥyZ@?֐EV6o*%W[T+=E=) ɤ2X5ԱP?Zh9UHj 4R) :PxRMNJ5[gҍ8;S-U UTzT@%S[{Ӏx9џZ@M᾵ FC'}Zrj1cޔǸxG0 "rcZa'֐3ǭ}Mn&)fN*183& mJ&ކ!gCS F;4"Sd"S9Ba$32ifP84NzV@O9gi Si08 !>6tL%ZrCd }q,b>1>"VșV׭T\~isM!r)E4Hw~lTt> pLjW;}i3ځ :K@QN)њ;~:SKIEɢ\HF4nɠ4f(A&֐ R'lVSIɢUAM1[ *Vu5 OCLӖM6iDL{P_'4A4-)`w61ڀ|[>n3 /=E2yѱޝ1ɦ;PpsNEj,O46u 9 .rj{D;) DN9{ zژr;摺qBmZF7RGր,'+85 8F}@P!aS׉SؾGvhui@irEB =) *\dr F)hP(7iiri ғ% Ecq040h"N'8s'5(< 1Q,TvbkiƛcHdd0pGaE ֕p#hFSrΐ zTyΕY)U:'SKM ){Ph(^ԔQ@Q@Q@Q@Q@NRJx8xoZJZ(Ǎ_ AXOZz Z(g>.Te$OIޠB!¿Y' pۗzH g&&,0MJ.1֞@Y;Q8;xXcFdu4G)v/{Wa;~&"zyH:ѷz4X:n ǽDy(5:E :Tndl~ZݛREĚǥ*g'<џ7L:46'$XzTkYWKA#=jK¦;quJnɨ6xv+3cp(C8恴 q;QێF@= Oր&Psh:T_#g'S֓#4ǩh bm5ZCSԢ4-!mJUĄ,!0/Br)!OJM4쏚!Oʑw` L'vjq$'Q%BݠrJ8'vtzE܄U?-ɈԊ|*ýDQSz昆}*W.X`fqދj?oVcTJĕ^: C l =ϵ;$@EH>?'')8)sM64*ǫzHB9!Xćc I"*J.XG>8 i ty(BsB12}(Y@ i D !ݎ6u& G>wE#*` 8_:rii)4i"Ҝ @ jHW=@TfʊXsE7i2nqCt;m\Y.zԷ 03j$ 4hȦ17*'Xi7ay14jF{ݘbI#4gd 棤VjUnYN8YQT{Uv8AP0)ϥ)<(hH(m eYHF@ovkjA?@AOsQȠ h}j֥<P!9*@i颤;NX{fBSّlb7ݤ4&"r[M cLZl:'A@֘)ddïZ[>Ԇ=L=R[,HpTRtT>gRGMGRGiU?ZKiW7֤STǠT1A!֫ݏ~duGҀ+bSBqE.(%>/M9>#iq@Ģb\Rb J=sF)h(ǽ-{Z7`QEPWڎ{QE"kѵiMczygր})žGHd_'}i_/@ Z<'=MX=K w},UI-XšG Ssi ~Ev4LCL{ OŽ= 04+NϽ8@Fiw rh'rṩ <Sdo9.5('zI@)h#KHey5<Àj6SILIj0*I>F .)S! 4yJ>jr9rhCc(Q@!斣f?$|mhG4vMKs֡e\#皘tP*ZCn))@^c*~aKL~H>64/Sr^Tz KIޝLBRi((((((hոWa tP1h }L{ BFcyHR4VHjUQHnTt  roʤXc_ր*;u?=Q2ϽLUS^t@=M<"oΟ׽&M<L1J<{bU64 v[.}N}Bަ4 )YjRrM@M9"!Ҫ^08Y9'l)W&*Z 4R(sN_4&8@H*?zUF1֓`޸$bϞ\ 7q'1]bqi0Rn)t2 dRb^NA>h=*EzPGCKzP @[ғ-'Ғٳ׌ 9`F: U'ژ m)i:7UOUf"* d})8i"!Ԅ`Tj~x&jsLw S8j3i3~lT(IܜReMH,wuℂxY47ҝYWb9'$m8Ǹi KmV:dc#マhb/Ou]PL]ƞcJaUbnJ%+0!x_Λas30xS ӦjhS|=*Vm U=Yc V}sQE^̒7̠4T.SFB* $#yvB#:~hܯQ@۟49`v%]} 1jͿOҀ3hԻ?tM1bZ@n#.89(At.@=,*qL*t*2jz9P!$ QʁSG.}* ħMO,OƤBXL_znԌ E8zRvM4JOiw`S ӈ&yiv`t =Bz@ fG97(ECS:BXS٧)4hZiZC$JQrOJWpUzeH~Us@\8J>) _0tR6yאGE RIFh@pMZPíTQMEKL6?zM.ALH! 硫߽DOVFݞqL9Jqnv3Hbީ}*M#L@:7X>0P ̞ +MvH@N1444Jr}n *'4tHi ZZnhC)(`-RhZJ(3E0uy (yޏ1i)b/:zѽ}hh޾֐֍ր.Kr;Gj)j2$@݀}M7fh@uDR@4Pi9J( \RPN#r4}zT@ &,vOO^@~cKZ(9Oˊ*OJCJIz`P}{$T4uP) 9zS$Ccw \ZJJbpppw%%.j]Ƙ:4dQ@>$+0  R ZC>R/Lњ;Siݩ(VzZ0s_΀E3_Ώ-}:@:~Dcyay?SLjz/ST ޝښzҎ-4J((Z((((( mIˊ5`t]GV Bii&EYhQO LjtXbH$ԛ@h8~to\)571-ˌuԛGrMCW /QN:cҥfA>}hh>: JVMv 2z @Ti GIu4I䌏jzNpF)ӭY+K,q8Sl&t"U[j/8fc隈' R]Iڢ4FsQ@ FD44 R!ri7yQM;ge7u F\{QMԪ %J @UPy"1*, cҐ1ҤCPaKH)E!?r*`ppbq@iK`nQJ)$#m@iNM8oҝiH&Uk0RyV# ƔcU'aCy+0xM?p44(Sh_5a?#VvHZ%>5hfw}EODe "Ԇ< f;/CFÑP)!nR)V l7JI,^PLþTbjRjESoWBz!\g~JE `{f-n*X*$nܮz+iHXvL<S8|1@JH'Tj*ڞb) @RUip}i]dp x'S jol><g5%Dx[mRyBA5q1GP\?JT&;#bƄ!3sLSFM;xSHHj62OB)\P;`4`bSMSRPq59Zu)HbÒcj"p iQNP)Ͳ|RsZC@pqTxԌyci革 \џjJ(ϵ=h(vIoćE"*(Q&M#HqɌSycM0 >5!1X4LR}ihO0ԿJxү-}~jGҫ buAc4I`rZq>P䓎i C@ T+ ^!}G9(}*?7ր#ޑNKߑ*&xsV4Q@R Z())hQK@PE;:FK3L()sFhXf<z<O<M%{Ldjd?0J+RI@ 3N"m&Ki6ѶޢIm4 v>ݴb==4`rMTX4`pRn>E#~ .>])QETS6R 4v48S<u)(~ m%-%1Q@%PwSJZ($QՄL1X pZkvQoZu%)ݩ6ڛ@%9QCP2}[U=ha%1 %Q@Q@Q@Q@Q@(!P)OJhQ 2/JN% kr~Iڐ HzR Qҗ9b:&=$gI64nhS(:TfBh`jXp[>?5MS@i()4 Zi#"ʏzZk)K3|SC _z3<<ܒqBF0((@sR_{REh1<x<M0QRғ< 6 L4c+@]XSO)SBΐ'ǘ0s@LdR}Tqu /7h7Tg8yO(z 0=((G18Mu9:3sNl}C䃃J (d*+ĂNiݥaBV MJ#|Ӣ SpO4D7qQ&%.rsQN08Q@$CY a?@V6>8P: Z"]EOQ/CRG`>Q6}*EK@~t :(D`YV`T9>玢)QOxOLe%0jN6㌊*F] 1~IQZ<~ӎzGS6X?Ow%1>e<}664RҖ'bS)@!jWXU [5ݼUÊQ(}SO* (P9W#sIF#aUYp; 8H'ޘ]HAA!Qҙ@!)1&Gt~\S]p@ Gj#=s@v T|R)R4HGJC)4J'qK9Cz 䎔 Xmm) @iqrX.[i14!F='ކmCٷ+TKz<4 K0y#d< O% !W4T7(Q1 Lz)GMKL@ ғtݠJbܹ@VufTfiXGDX1ڹ4~N4 aBʐt}iZY:ҙgvsݞ(*֚T=8sJ\@#& QJHjb<)|ƞsHӐ(ÇϵAJxONMGPX("=h4PEPEPKH)hHhEA`RfQLQH)h@-JrEJ܎MB:Լ@ &) JZJZb QZ(PbR@ Q@)@*TI %)QE!8s6ڮiۘi$_Q6z`@N5ӗ562( NrҀ!J:U.&%@i@>`';h袀!ަ*4T֓-h%VzN}MZjK۸F}Z0=haɥ6Z]5gCF@hG#4y1pT[}h lSe /8w4oS@i_M.)Wpy2p1[Ҁ( ?hy ٠ *yLrM1(tCш TUgפ6Oف Tm"ҚHhN-E8Zy3 ¬ŠzPt!1I!/Jwdv95&N: ݱZct'~!ɠB3KJ`4 ̀U̹9L@*)G$Rp _)6#> #u>lP'bK79bSJ7г`RqRF6OHsdTM @9Aǹ2mJ^^8昇JvqM'&48Aڐ?7>TrGq8jUoS Ua0%ܹC=3L Wx2)1/zW8`'ʓLCLJ9ChzpRLgOr5*#hS"=0dR?5 P: b'"ƫ4((($XڢN$_qdJ(vqR LdV)`C|}\gm[c$ԴW"#@=@5\٩bw`QpLpjAS5u-OT-KL;N_2{aL\b*ҕrHs8.FE9~-" QQNp bX).ߙzd?LP;ҍ}GP9-!'Ufw0@_VD]G5lt0v0vK@U$@եT)I> -! ڔsj&`2q@ sS˷~#y~5t_W1܁J}HMm_J\AE`zQE-b bPJ)(bьELQKE%1@ LSI@)h f84fx4GƤl.h)i MpzPzeY|ǚ5-P!{IE)ԙM+ҨcR}im9i(((ii)E-4Ӹ4)OAI@ E JMPh(i){ 2s@ zQڔOzԼNMJ a<R ;Rw1hbE'44P)(E ZJZJ\RQJx<ultqҐ袊*cMTf!4f`1SsK'Jfiiؠe<S_GPI@4TFǥ1n=ezpE SIO('m Ҕ})$ҌZy>&=qL{Tw!F-&FKE&SF((̪%Q@P`zQԴPB 6pih}}?bޚJBƀ"-Ȥ-Sy^(xi6zQ@cޗ߭IMҀAK7p)vz]4wZ7Dr@?b3:t*64(RE4j\/֦4j͓"KSPH~c@&iێ-p#j@P8 rDzC) ?"S.Tr}>RT*|)&QOzQƣ{So,zP>ojjj1cQPKKӕz_9b=EsPG6Ijj<zSL*oޔyLyԢ"GޔwIQI7'$$w=ژe*:QJgb:S|(vWեc$M;>%x[jˑNmHlѵ¶)bx+JW0aW/})<u-*D Ґ=B*@{ҙ:ҩ"zzP:Rb (PEPEPEPHQީ|:ݧPEPrqQS9o2uTKL2JaC'c4>,Pxcm:h^ZrvҘc ")@(/Z{!RZTQI4(#9m+AP7pPp ޮTMIE%!U[Zp&њMhD**gWPE֚U<@<=P*Ƒf*r1QK3H=k/aqLOFL4GjȐUD85j7 7I8QKHbP2ܚ/hU "<<3Qv˂x460'2;SX;Ҁ&l"N+4\b54zP>\ѹiB*98zRԊn,jUphqPj-dپD?("#r;Ƥh[Qʓ) opD;ӢehIrY#JzrG $#EBڥj#0@mZ>i[ڑaEiAHv vᾢW#ҙ$}#ުQKoK@ E-%QE lZu#d o|R)VAZ@B-42OP)>i \Z^}E6w>H J9Z`R@ zRRIHh40Pir=i4 ?~qA>/{gږ Fy@8'n(cҎvyN)jF$ JNiwRf4dsJ dR}(ӀAA44у@3KI(KEtI p's91by''&4(%GO2 (AX;Gzu" CQ)h$ү*sIHLh+Hd.q#91,.=:"lsSNUw?p}hP9 hmB@  JR5 g4\3IT2jX,X`)*\:PXU)%>;ϵ)D?.M&k9RMBBIedԉ* v"Tڪcf/T~r12)F 1vYM:K1]Ɂ@Pv\ M޿ !o>=ij+cޥJ(x} )#q J(sKڒNU)ր-v+<Ս*3EhQP)o)lRԔDϞ s8Zi)ьDG6#R{E-叭S5~!@Lj^F:Q+R9“Y9c֨ ƁdSqE&E.ERdRPwȣZ@R⒎=hqZOƗ#ցsJhȤZ)#֌ZZB2E)7P7{@LёL((%R_ZoqŒv--&FBNb;g֌R(u&hiA!)񟚙NN0iK@ZuPEPEPE%4G.J~K.hH鑌gOij9Odii)R%KzSjWc @QE%-Z( b$ubCTif*j)N?:s*)y( ~pO@>/L\Q(1*WjހhA<: $UPIK@~@oR! ^{ kgҀ 4ҌP:bxQPO^dtQHM:Ռ)&E3AqO&^94(iA=ih݋K}:Z(6/J(f#dEcJњh@Im9!s FhPG(@D?xjl,ɠK3@84fx4v7-GojJ3EP 3@ AisG4 c@'s2G)9nl>l>ԸOV~ (ނ@ )0/SI4MJy۷HqR{хz`&h-Jv 4Ǡ~4曚3@ )xҎSqGԯ1jf#i@Nv  `.M&M4>PQS}h)M֍{QNi<R@2M0mњZJ(2@ KIE-=Xjih9Xt@Ppig8@9 ,)zP+0IɦGJH CӲ WJ-NhȡV@M:b =)ЂK*\ @HMpƥ1E5 !M&1uD"b>n$@€'*Jl\ P;aҁ GU Tr$硠DU>4bd~O,L@.(OxϼtH :P!?VtWQL–;r3PpGyncaVIur1jkK%/x0)6)Bs@Ƀ!N0ԊMVJIqWf(QKI@ƻm\9bMZj@TќT"J`MUR­un)<Pd<♂xMH hG$LyICVP=*z>{ƀ-jnZgf7SjSҢjBx/='}j B:V8jy4PhbPhQm(1F)PhQ@ i4sRPMEEIQǥ2~=1@ JJҖ8b8qR\bInq@ s'6\h0\Rb֔)ޙN0ZZ( )3Z((QRQTxN@XԮp\IEr 646X=**Z)): )r .( })i(TI;SUIsM&iנ!O84 uH#Qytk&C 'պ/%)|}TSU(oJW\'U-˜ѸP[8#SsOGIRX)EZT:RQEQEp)3מ)P;I48NH yE4zbE秿ZR~c:3&Kv)i(P(e9<Tlޤ9͏ԞtޥؿTv:?SPfLf~T|Դ vRъJ)qF((~PzWi)p_Zc n IzbN(F8ѓKFh0O!SNi2NRl>[RfOZ66/rz‘ǽ;R0(ƅqA(6.Z]zPlyJL.ܸI=)H8㎴9Ͻ!>`P>6?T@ojR( NqKϠ:^=h֗+@ 441\qG@ )s@M1ɧ 0}B;>9cJ<jq`zOҒ\-& RR6<ғ6rUU*r֞MRS&(޽E8tP:Q@@ KIK@SK`u-0h@Ғ@:h$ҮHnU$M JIhfNp()#_1S(L' OրNhLf9 hK7y'<lRE@1NPBъd PyOjSKҡ44GH?׊Ua9U)d{ EGS,"I*̻9^ XccU#8w`A<` z@<j>PcH,'Sby<SA)z7ҚqF}(O=M!w \LZ]PȥR4XESZ_,b'Ҁ,tG;0q) Ji%4XR@!P9|R`zUܿ3 @ FKz@>l>hҍOygc4ҧg4[1CݿJM?E@qڛ@hQ~юh,/M+sKR9)2)p(֚W9_@RM8e}JZ:J( IE-PIK@MoCS[}@ viǥDiIB%=IEݎ %nv&(*k ]͞¬R"D'F(bҢ9UV@K.BM!4I43*(yL R\t{ѽ)w{zS`RnZ7M}ie?3zRoOSHdrz7U1>)e j=@ I@ _WTOUvt~R2 C-b{PAFp?41{<f<1W!H"#>b+n4n(en 9U*5 g<R< S"NkS5n.jCE3'@QL=aS<G=(J)AKzPi`S<4yh 毡_C@X}5}(W$WӼ@J<@Xu5} jj?4z<h QQҏ8zP$J%bJ)gg h ^w4yp)<io+yoj?2w '8N}h<M!O{Z?LBah?FS//.ߗ+Q?R?Pz!#IhUzOM@ڽɡ 7bxѵE) ;cڤ 0ZQ(843FMl1KK}hؾNEZzѷ qli̜u3)qؠJߊ2G ҍV4ƀHHF*Fs@ 6⏓ڔ Ǹ"8/)QhM;ܑPA֗pQNIbmvhȦ@@)dҚ())M9"gUˊY ¯@ ]McN Ԇ*i)qF((S s )A<S֝ހ Z6ъ(PFPFE-t E-E.Ph#4Q@KIFhh4f (--74fE%&hsc4O90$vj9j)wP"6c`)f,t,~eE¨6s'ڕ`.>Q5]ﲐ2DV0z@ .ǩ4ci袐`.(sLhQV{sM)4'P48PhJ 0i;#SFA)Is3Ԁ⁀U>Ue@oҥ#y#4h҄ӕ8LC`3-HN$L=*B})s(֙SIQ4JO>qIRތSuڔzQj3@ z~dK?Te?ZO2?ȤȠ!<b9<bÝ||1Mq楈ozv1C=)G֑#ڀ#R@i5?$@ :sa&&uj`%iw)&Kz4PfX}h}hz* ֗{zb۽Q޵j)y GE3r6I`S$I4J{zo>ڒG:U}M I&h +9U S ٨i(R֓x 7M(۩3IE2S&A4Ѕc@P~(3IRPQ1@(4b𢖀(zR p Ps:Й!\Tx8S`GƆH868@)sh=(o1qPPI=MQE8R(wtRIލX.KލX.KѼTQ`.FQQEQQEj(\CE n1ޠz7ZɷQ@b{QF*(\yormRoQQEϭ&z,'FA\QaܛڨF(P_EA7 Sl}zsErBsڙi:LVykM-wykMϹ-hX̏SA#@'ԛѨ}$xQ)F=E ⍿/CM ñI4Lh(9J~Mf41 qȠO<?MJRz<_ѵ}(6c—jRP~8@VҀ#ڿޣjz}M&4֟*LКP>(Ԙ_SQoʀ$}M# })Aw._‘~у@ j? L}h'";2&OÜM'uPbRE!QE%.h%SB(qp}(A?PR J.}V9,zQj%d)J.v5V4X b}~Оѓ@|I秭S rz><Y>{O _5?*}s@O j\ 'bxU RzP1?(ޟC4dzPFdzPFdzPFdPF#Ҁ47/_ >*6vaT=)2=(<+?s@l??:v^ IP"]D rEMC{y02i΍7 fc@JiqLD"BZo<S@F$1Okg4? Frj*r r3io$PKҏEZ.ѐ3@vg5o qW)@嗌ցU gZP i(dR:p@>sHcZ 0jĀ$!>Ҥ4]ڤj-jFݴ(hT)Cu8JS@ ;$~\M.pOZh4ޛK& jTz<g-QGPm_JJ^h0o}qI}.8jSӊChh4QLAKH z >) ;u> ]ݼvz6Q*X@$8"*O$`0 F)>༑@ QցJ:PR6JI hFrzRP(ƒGPQEQEQE%-P԰TU$mhCڡoi& IE (J((((((bK@ҍ£ppIp0sFhm¡.i.ETY_Z7EEI}iZ#֗pp9=j ӷJh۞O &FzRF5X( ((((((((((((((c(S\PK7b'm;ޓԘz|V35^#bpp 7AJOϹ()y4``n~T~tT7QIIZM֓u?ZN(&}njBM;CHHݠ-(# ]ҀcҞ ~w{PE*3F٠QL}wJhy?K)r})hҟA'Ҁ)ץ?-i'zPS}ݤ$SE(ҌfLCZu#c(%{JE#oJcLr)z@ J)M6JJRiP:Q@Z1@*B1E<ɑfh J)h)x88({E)r(RR(Rb\Z0}h0h1@((PQE-%-%-.iQҎ Y(dP:Ri0}*L6zQ)jTSs3vOti@&,j87nեP?:V(UQoU[iQP"!"0袬}VzUOb_O&3 [5ZQ2?JBC&:E IE1G^(ɥ\yk=8HcVqV{sQ:/^hGҝ\b<@IT~X  Sf@#x;ޔRdH&qnGaORp(1=j<J#@ = ԴQj^=ڀaKhh+G@#PH)0)ޥ~~ژ sJvz4eO  jXH18=zʁڤAݨ>Obfz3(i'qP1ɫ vV Z)֘RP(})0jN(#ǽ!Zmi<pi:`RcFM(  JZ\QjL{яzv=Z0=jE\qPx`zԝ?R?* ~oCP({LP ? v(ǽL"M%7bE6u1OPqE-!@ KIE(A\ (1F)ih0i>Y=ȧo_(sHh4c`%RQK1@ E(PQK1@ E.((J)hZ(2h)hRSHh \Q_Δc@ ǡK6~~րAҀ њ\QP(/(z N? CҦ QxczPF}Qǭ5n8I΀M)s?:2}@Qsd@7z\ IR^}h[֍Ɲ~4ؚ֚҇A} >R6@(*Soj` GKOMM7laɣqw ?Iǥ;&xmoR;zRv5=)<ŠjM941} Sނi}8)^ŠNhɥ'ހXH2 xv43NҚz 417ɈWM]L*(S=(QJԔ ZJQ@ KIE-*jJIa@簦g=x@ 覔'8Jǥ +')|sR}hhGp}hǹ;<Ԡz)W==jZj-'@ -}>|(ǠG>(z <Hh PfJjѵ}(/)?ɥZ~./ZB&F"ϡt_5.1+N-A?iw'58ڗ_+</5b@j= +nN oU~Th?*8=)An++< qn d){ޚzLb%]ϥZN*! 1@RT< 0=o2}@ w~RNq"cU sSodrq@ }3&sVRBsU@jCTbq0-1 tJ,8C%C!aSM|WJ}V\S!`sLzTv>g)(qWFTWڐry0rjCCLh=(=)i(ҌZ3@ϵRm}(@z 0=7EOjG pЧ BR2QA4AQFiTR2R>CiH1)֛Ndi)h(QzZ(1IE7hmstLq-|qM4 >gG})6/Hi)ЋA@O 8u7N1@mG-`zRb$3 @?VRGpi hhXoΏ,y:v`?*0ycL ~tU|΢@$;(mi6Ͻ.qHW`7֌ZvFn=Rb (0}hS6RѶm%ivm.=h@c.)p69EM.@ Q wj](} .ӂkzg&a;kzymGޔ{7F=ymGޔѴz5 7S{S=oyGSv/&K} RZoBoZJ1SKGc_J]@'pi'M}.@Ozb 6#ھGjh+(?h٧O(|ycl_:wѤ@ 6MZ@ qtޗl^OO)p?GKVp?h?,-(Ro¤){ 4`RLB` LAN>nZ\QFGB` 9dz'>}.G- ϠPs(2=E0!"[Hsԛhq]縥@ <M qK@ϸjfGKP_jOڌOҗ#=^=?J8~TŠ>ޟKH]}h5O=)zS=h0}ha&#R61ڀdуK@4`яz3I@ z<04;{R})=&[̬0fޢԀbL0Wb>C$X&jO#P=Mb Jw4|QK)1MSՀ6۟qUZڐdRdR}M>>p, T@ hϽ U h^hG.@PzKF#֖ (Q@ހHsS0e;qI&(Ϡp=hAE4:Ӂf Dg5.j7\wQH4*Pnی͟JHaQ񞆤ր:[@4,z]žSR2>gnhwzk!YNsLvF `{Tcm#<T;~/9S8 }j{G&&SH0R74 C OcBҙA& N* ҥ1ZPGFd*=(;s3J̓@ VfiNA:3@nqLib)}}- \w4mP=hbQ})h(EPHTI@ mg\PJN)Qӽ&q@aƐh08Nϱot4@N@+UjT5T@ħ'^U@ ZAK@ IKI@ KEQEQETz@\sJqjC xGSÞBb3E-0 {f;))hTȬ*9*:4@IaHqKIUyI? cVt 49;sL9 4!rx⌜7<tb)p=ipLԌ0?MҀc<M!SF4)v8" p|v2xM@d]ǵ/ޔ '#4lI#ޔ{ԛ[RGz\{`n4hqU,2;S4u4֗u8F@nnQ@FKIbI @ F) @ҁ ^h4 9PF-7֓oFi9mJޣ}@ F=(W҅RcJ'ir} EF(2I4(<eZ]@ z >oAN< v|rTO~QRP6*uݞ4( &)B3֍KF&m_AFFLjQ}(()GE.h=( 6ҌJM;ʝM8ٱS 'JUV4ր#ii֗bzj_=0oj}!_sFhh=OK0=(6΍tSFޖ;Pm_J (2 l064( p ;%"FH(4~`S03KXwlg$})hؾ7MУ4Qr:!5I)&*bG?4y*i>+LCS1OsLfp>b~cҀW45epm<l& h )i8 gJ]T@O#PtsJ>)ӎi&(@p 9<14hn~RhF1p%=sO ihR65 nǭ!Ҁޣ⛿1N<Fwg"":TKϝ67~M)E2n#>f3NrPE+_#4ݓ@!i0c l`.iE3GOPzRғ PJZJXvbi'cV5< S :`ƤJ[HaLsڟM4ol 'JG3(I@G6S2G< (5CVHҐ*@$1إz@\p=iodќ bA$v#YҀaG5 ȦqL@TsQS&֐4G9#=Kژ yB0'AAiAݞƙR"uQESQE4PIKE&4PcяsKE7h4QEH8PsA'<R9FR2c<7i4 cә0:GөJZJ( ( ( ( e1S[4/ !#:?Z}!u,>*P\Ղ@iZL@)v=)HԌlo_-)|3G޴O))˹z)<}e~>f2>DHQ}O FṧlQ"~*$Ňjp*PN@496NҘ6Nܾ$JVe8Sw/-&:>nzY"nP(NFMZHȔhKEEi ikѵz@oAMsۊyV?Ms}pRWh#w{Pgu(ϩ (q@Z1@cޛ߭>W&ux`bZ23֜(5 w.H4InR @lQFE`zq &>E(l@ bi7fHzR|IFrH*L{T@庭;'GQwoҀ,1=>T`N޾evzTo_ʍѼ{{Tnyz҆Ҁp4nš̤cQFh1N;fƖMƌihgڊ(gڊ(a1Eя-JBO40ɎOZf(OE/Ҁk{ӷ1!KxP õ8n PsQZi捧Ɩ`&?ޥ ϭ>)Q4MAJ^}):R )iih(Ґ0 m(:P @,_JIE/4&#ҝóR-DdRWib(j>)LJ4ފQZB1ސb3L(px'֡0/F-qސ IZ2=(<=}iXԳf+%xKGӷZ ŒiɎˊ@8&09|HHIݤ,TiYM9 zQC`cb.X(sPA-4{}*%)Fx@ǀq5\yf.m!aiJ8!l3J7mӲW8>_@NsNI@'azq4ؠn#Jz @?Ͽ@ڛ٨8HSH4(C@~R´Mѽ@4(GC)=K(|j2{c v~l;z\ܚލQH~sQl*NLn-@ ֛| 'ҍG?(Rzaj=ަ%s֙=3PgmsSW@RhqrO҂cbi`c3bZӛnLBJvWGS(4 ^qH({9>c(I)u4 #'i@QLAIcKE 'Ҝ:IE'ҏR@ԼR@zaiP!7sKG4GZMԌ/4cc1@>u!,|ԍBdeG) QM\o~Ss)r}('ҌJZ( (TpjHbGMݖ : %!e S #JN:fU!{I@bNCzufh۰&iVFQG=7R}h4$_z"@wzT5=)wvך >hPA2SWq@l ~^i)`{Pri`zSa~U^=(5<Ru+`ShJZUƧ|85@ jUsҬcu-3cށ9qQ)pNM3&[PKcK1LDX?R`M ! M9#Jց@Ɯ{P{R9RdҀH =E 'Ҕ{9.y-S 4u'4J)7ԛj}!i7qHhzdRc4Bytg<NhSX )~n9r>*r) 6<iO ʇ+RJ)A<pi 䓚";⡌e'&hأ݃֗b!p(&ť  k v#GIjZ)L3h9G@ )9EPEP (QEM$s@?ܣ{rCv"H)rޔa(~pJ>? Z(2ނ-(zR@ e-ojCSI@ `OaJ 4ӭ8gPz3G4z@  -'8 ўh1h ֓ ^0Ə,ttҀ-A ixA*:"0S=)<Ƞ*6eJfUz!sL2( ֔PԀN;Q֛҅4xԘ4/֤RARAv -TU*Ǒҡ<ADls@b'4RY N3;8;q4 nJqڎ(3IsR=)2(Rs2(bi)H4dw(w#Md 7/b#ҏ(gp<%7_N(ؾҏ==)6vPқN*Bh嗌KTa۲RvJK5y(7zP:Td@HwWSȤ3 1;¢34ߕNʘY f8f?W2N47KP!;Z!֖1POae?(WSP: ]N/I7=B2?)CT 43M<i ISJ<4`pr[x㏭hQǭ''S[|27𧒸S>(JzI݁@)iqvsQځ=4>J@zC5"p< e&h-!3KFi3@ E&is@fE-4}h&4dsG4!#4`0$PhdfѓSsM *} cA9җ$f"QOu:~YȢ>*<ށSbn @KrzTC;qSuUր&'qLˑm?Z37ʨ1R7Zc ǨQޤa&qP=1ӌEQ dJG?B@S@tb >OPsJ1X.L'qq?*ɌsӰO/dZ @* u61R;5O@)c3HbcM; H\9!F1֜0sMUd P(i?\QN=h ;q@ <Aӊj 1 (+sVGJUJZ(`)e4 #=-fsA_sN#i6ޘKh=v9 xpݞi7b4ߟڃiw籠p#zP<=)ޔcҗ@ ѐ)xRmoRmoPnO;'(`|у 0=hj0-H֣iV>zIz03֙ޏ,{)&74mq@ 8LOt6sJv>R Z0)xSp ܵ' ;Ը&JJր 7OJppz BziH_ځ *(((QEQEQE L{֣ ~?ɣjQ`{4>6/DzSo;bSAoӁ= ^hEhQINhÌr9<уFvh[iP>E")iR}ihXf6Gzj IO4ljRoO0p!CQ?4ѿJSN;L ΀#ze9iSӔdɧ)HR4 zR=)}iy#Ґj0=iyj  Ppi4Kړj`!잝);5,AJa@ALK{uQFi}]Ҋ( >fQj  N1Lǩ9INU Z0AhKpVԅ)ܞGcUc#([ځr"xn-35gz0ڗwUޤT ԄݩJGoUcZN(ɒ/zC$~irh_2!4:, M!+i3M&(Y)y@TåVRqҬJZkPQ)dOҀ6zf1S0bE5 49v 3̐1O"chcdt_LT;eN)$-6ydzSG* SCR3)㠦'3R LP@rhq@2ihfבփFjC۳R`Ә!0hǽ-PbR@ 1A&[oAG(qF)>oj>oQ@ߛPhSy4)4yjFIJO4p))`ND\-5ȠRu4x(=Z7{q04d@  (#ր uj%R,};那S8O})f8IPT2sP!(֊Pxy#H}?wZCсS85/GVjN¨DNgŌq{D;'+uP)qh,4!~_2H'ڣ$Niɵ9fɤNԠqJp{𡛿ALV$` (9N KgAzQ39G&M4IFGV0*oq4X4RQ@^VNb("Rj~AR*<zPPmmJ`7`4<Bz=vQ{\{l8I)3HI4wRdQ( ڗw7g9ȥ3րw(#' LѺ 0=hr !$t[email protected];zT}G\)vpz>X(@)9͋Hs/م7jϟœ71PkFjPPm|jgp(9O)7!4`yXUǥCHxqi'>/'H()qҌ0PS|)r,R}i7xF)I`p{Iɠ(E&((h)6uݢhhhh E& (ԛw&-bѵKzP0AJ PE(@QEQE0 )(ǹ7xA4s)i /CK@ JZJ`5.F(bq"_Z74j3Z2`?4$֚NJ@?ojiE3i;ilRJ)MhP)Ӆ5@&Ph})ۗҘ 3N=)d`)=q@ĥ&qP;qJwhG94S< @_J*__6i q#w'40^5чNh8Ὡ=@s@ A:яzZoz\QL;qҀa@8cPL$=)6J{ߍ&}/*C7KF(!=0zZC8C|sR<=8=)<:- ђSv)3Jg''@Xw60hYqf(LK/(F~UϥK/) i4>7ڂώ#} TMelE23KzɓdtcqsSdRSO.}4ҀaS9j? T94a1פ~ߝ+7vߠ3(M7_zRԞdc02!yA=%Y8a,JXR4Դbb -:RQ@8=)ȣz gM!1LBbu%'4s@ KMhhG42)4bFE&(.Eb"LQ\kSHTe9Ti1@b*FHXt(%4iM1 꿥4S֐#R {Tg$6_QI/8 7imSF@=((ԑ}#޸ a;OʒS& !{35)RGިΣ43 DӊÑP"2ԙ&ˎ♟j`(y!NAӀR wc$fSȧ9*il*0֒c Jz*Hri `7M<N0ր@##<"<xNHz&*ocL,qJ(4RPRfvd!n %%`*AV(2h#d4}i4~4gRzQLCr=EAF7#֐;=(d)>ԃ<RQG"ˑހg)9aspiSހR:mcҐ'A M S;jE46ހ4^[{P <(򏨠DtTWTOBPXG݊?)P#%0e^RykޤC|+E<@'=(<m< CJasPP*Mx#=ObA:TJ(ZkzҿhKMQN,=0h})Iހi }Z\iC rڒi QE1Q@Q@Š(()(RQ@i {Rsځ/ƓdԀM_z0J z @ KH)iQEQE(ZZC&?4i͇9'9I )N4ZT翥-4.y-Ÿ)>bҗ G@K84ȧr) G C"31)#Jqt GY"/ƓTh2F) M-ڛ@ J(OT)[zR})>h9}EH"XLAK%}( RIJ1ސu81sB4Դ;E<J!9X8/P$EI4AG4 <bh۩7b8@ KTax֓xM7/zk5#lQIKL(((((i( \Q@ IE❸S)*GZRj|UWH`c@)#t/ޡ)sP\@XCIujx,ws@wc8y@| T~(oC@ #?b<7|wP((+]i)Łi4RQ@fZ)3K3@$fր,֚ԄP:Hd\JAi)(()h( JZJ(CƁPIϭ&OƂؠRsIHc;֕@ LЀJBLC6ۥ;`#8Ґҝ(@2aՓM'@i<3@-Mh#4HS ڛg4g PQ2j2ca2}?ZC*byJD݃Upw4ߴe4_dkQsSݧҀJ_0ycp)e|dMI<Rf,HuhP~bd'(}SRLaqK wJJv (gRpq`R`у@ E&撓 w1j 9sSfZ MJ:R9撊)J)(( ((d *1FZxL.x)[k/i%u\#tw/+@ 0iN\ZiO}( c F¼ BO>Gzq4(3A4ZCJL҆ i(JecQ$M=7'uAR!hWu7oJWD`dsPeaQsN*H@ =F"'Ҕ(؆sޣM*co4 v<GJvbM`AFM#4!QE0}h P 4܏P ^Q\њLQb4Q1@bPE ( (Qi؊v(>QNE/FGpRQLъ1@4fQ4bP &sJx@nqKRRh'%Ҝ((A:Ӗ1vJ0Qj0e6)&9) R2z{_Ǝ?L&ҰL1CLy"[N 79)~j@w4/4,8}hac4"r4Қ)@ Ji)R1tSOZV@'Ru9 rz[U@9ۘJ#uIDDsM R\TX>(\&RLPZBPQzMz1SGcM0J]ѷPh#ǽ%RPQKF@ E8#loJmKP(?iܚJ\RQ@ )⣩ހIN*Fj.6CnjdqJX`PWmRb0)LCqM8,Oj@BQ*i%V@;M3J@XFP,)K8aQ{Rb ΙRbޏvqX#֗/j)LQKE%PQR@袀 ZJ(qN==iZvE!GZfiF)f&4df3IZ(3FiqF(3FihуNF :n 4(QKz GIHcBAO?0J1*!jCKHaKM=(ڧ֓bR݀;6/WQ@ :Qz 6QP;b0*XG+{)jO2 72gQ e`2A)^Vqiz@('H`iE%;AvQ "*HmQ==Yp) 6Ɓ`N C1Rc;ؓڕB}i8sQQRGZn(h>Ԙ-)+zBu@ QAumi L0i,3HP` TbLS `.2i))L99֌ZZ(1GKE7p'b)qDP&jTi&A@ `1qHrp8RO| 2}*D, >➮TpR'?M /zbnZpZF 4haQF  )i)hZ3Hc|ncqңc) M(#)hbEsնpT}MHc ޵#3185\? ӡ'h@sME15)|ՠCҝLiҦ)" ÚmSh E% Z)(BIE-%f (Lњ6AI'GKEG.PEbbP Q@-&(Q1@!58`RR*u44`q(dfE1i4`bPK摲80Izu-1 hi YHii`u%!G=)q=TbR dsB43S8.;S.h%&iqI@E( v^N) ړ&ZbCԽhZHB@ cӊ37R2 RoR>nƁڛcRdNhϭ=l`ր@CQs֤ez.G)iK)PGȨ <V>(֓qǽ.4.>EE2v>}pfSR1SH⧋`_z>6UlLJNޥj{znSjR7^WCSsV )Rb"VljUDQiKn1ǵ !Dm ҈$`8oS??9XR}h/iȣb(ȣq@hq3Fip(&h @@8rǚLʪ1)PiqKE&(i9944RsFhSrhɠQқAɠ'&8C5JܚPF0 bh!)3Ji3@ F(.(CF)F3HM&M/"2xCzvzHbORt5CSM(l*OZhL^}(8/1 ri1qLHbb1Jr:A@:ӆ2JqHbf8LoÎ=b“M444vHi78&t4\FM1@uѓ@$)P 7q&ԔƌLi1TZ@DP:1:sLȦуLBQL0hsFLJ]n4PFMPCKHh[)\<giAvf2zC"æ*3ry5-+C2A/L@ })vJ>oz0M.hԥu4m=&zJJvF&M4'4f9 ( QZJznqǭOSR4&(2s7z2S =2j>_CFWޘ N:R7# 1C QI !qIҁIEF=M}APsKϥ0&pPcڥ (ۅ/?&}Ɠu+.E&)7Q1F(b њ(4f(h 1G4s@(P1Eb(2=i2(hPE.(%RQF(QEzF(tӳM#wJE $У(@Q(KSRHc~ojOSHp:Iįܾp )p=*F=$hZy ]ڢp3OTQ|r  y(=:m4LRޔCQMS0)[ c;Qb~ojvHH@f!^WY)$=1RPi=IMniϡs@RS77J}%7qhRO4e-/>}E%hM7bh@ . )ri71NSsI@p))PTTP}CQQ@9Vzycyݚe8--9Xg74ҐBy0{T~aȠdjPVpF1Qx@ XzԞKy0#VN7RjI1\gH$! ֓dd(3v4O&rx<;4AFh\~MRMAb7Rmn.ᜑIJc4h_33Fh3Fh4f((b(qF)3FhqF)3FhSsFhh''4ަ=1Jސ ZibM<`M#h4!ڐgڀ)sH(`ъ\ ƃ8Q@AKN=(-@RJNiJ%lPl0qJ揥0$WE7S1@ MdRg&:JŒTXc;Ҍ45 9 ԧ9KA4.Woi3@\с@i3GqHbϥ'93C }Oj^}(8❌v`SQ-((4Q)\LP {.EGcfԆJFM094!9v}!уNF :n6bf4 FiM'?H=9i P) 3HO>C FXh贻#Q tS|Gh;|&qϭѓIRJ? ? `QERQ@?yEJ9hF4psO^P1I!\M4 ڀsڗ+IgO 7i*Q4j3S <c5W'֔<qFڀ%րJ*?1J{=s*eR9h<G4`1`ъZJ1F(Q(b 4fLKE%b6M)6SK Z((`zQzP0dzѴzQHppҗҀzѸߺ)vZB@@K(4f!8%0OӷJ^fڛPb'h+HH< hOzpP RfȨv)3QN"izTѠLg r)0-1MRz8!SS0jRɎ44 P1@4f N&41I{PI<f#i ~ڢ>j@I#b9 r(*.h 7 3`zvR) 㸧ɐTcހҀ)HZ8)xZ8z2(`zQF(ǽ/'ϥ<Hq֙ (E0PG^~ 9&)p@撐G(IiU N@ZNzvyCց ޴FjO)sKHN20WR.|4ɓ@NF~Ť?z zO*ަSBZ)LњZ8ѓN(4fPsFi`Prh;b2iإ̚3RȀ`♊mQmQmӱI@ 4sG4bQ\RsG44f Lњ\ 0)3Fh KNMi @ғ qG6SғpRzQ@ E;!cJ1M7>߻@ F=qI(t&h )r91 0U6zS%6( zRʘR+`N.jF)Yph(Iә02 7$8ސ *L` )CޝފM㱡~C(OZP41@f›EhfiP'%6Ni“ݣlIH(8q<LS@ub4fQ\&%;)Pm3:ǰLp9z֠c5;03FCR~5[ޏ$8T<3(OT_?.ސ<҄b*0s4ipE'HaѸA>&hQF(8 jESi ]MsӳZ149..i ݤiCL8ly1Gލ)7P!'yy+)Cs@l7_Z`.0{JuHiF7 (ϜM89P{h)QEQE(((LњZ)3Fhh@&i(3Fh њ(Q))qF(9\QNhP2A敩Tq/ujZ6iԇ'4s@9bӨ SM{Qp.yq@)9Ӹw: C%%E@A c5/&"jMzHGLS'T!آZ)(iPFKbi&&w"bDi)tV:YqOr) &n,w)u?h fu/j2&Fh0zSK(=)PzSHg7 SF#|ɣ8} &G%4PъL3E.(ǽ%4(`(1@4ǥ#J7LFiwL*i2޴qqn`9@~PpBXT@LQh8%yc[#fh u7IGG/qgHgS̚9C(2JҌyzSK{S4`ѺbRn4dܚ2hأE;8ъvhȦSq@\x*Ԛfrǻy+@ `RPfG4Q@ G4f9\њL1KG4-.(hJ$qGP(r aL"4fQb qKwhPfъ(()җ4nM1l`ԁ=iCNUC9)S2jHYW;@BF):TX|J4H*%85 9!$sIJZ1F) 4f \Rdњ. jvs@1N(Sn)qKP`Sy?Jh&48 MqIfS1֚INShE7bZ()@ 9(mIAHb8,piҟ6;TX$4yl9ѰzɎ85G3I})=h(4S3 4i(=ޔ.hv5O|SwSE8 iKҙN\h&qK@ G8I㞤 ҔqFُJ7/a@m=Pc-EixO(EU|{P짵.W)GG1ͣҌ/23FGj@M*TBm=f41hfсF/PfZ()9Q@ idP`ъ\2(1F)w M@ 1Iu4444ff\zE!8┚Oz3Iޖ3Fh(sFh~NMZFej Hc=) y}i*,xS1Jz4ڑ1A֠$ɫl5]4&*(>xBE))hQE%f\PJ3F2(84f)ojMƔ6^MmE2EM/4Rl'?(p(2Iǭ81IPdQKQϨ()y`sG4{с~4~4(N)sI? _“&^}(4PF=(hchIj~Ii}(CҞ۲y8oSqzAvPzSeh>(E5 '5)U `\p)HqLBQ\Q@(f Q3Fh.)4QMQM.hi9u94)>iO34`ъ3Fh4Rf^(Ȥ4@@@E: )M(B{Ұ`sM O&LBԔP3KFE%4`b\њLQ\9IbioF\t4ho0Rnԇ/.h@$d<R9.rzwPuj \iSb69N9IE i3Fhq(&hɠbA Z1F()h.((h%m%@=.)`fisIBl(7q/F(Q!3ILPFiњo44`ъ7Q1F(;(<tؤ1drB8ysB01ޝ b:qTQ47Hi=77.6Zn{Ѹ&34fhھ)W֝#EEIhھ.FES8 i=&[ҀSDc7̷Oo94_a/<=)&y9}iprinD aMZ6s֐ ,T 9A L|#BĎiZ榱aޟQzPY4sRzRAIN E>bpCw撊\њJ(b њ(4f\PfPf1@ 1@ 1@ R@ E3bFh4bI@ 1KE&)qEbQEQE)iPi@ɠKQ@PEQEZLK@42{Ȧ56M((i)ENKRPD= u@cfbEOLIhBc .M d0-%P3F(fR4ϵ74Pi{b}㎔N'Xڐ e=4-Bhq.Fud@ H(p>~tgNȢU#8(QEQJN=iBjz\/)P( (zѵ}ivMҀI(@ }Atmi 7M'?Ln$F uYJMI @2)Ѽ׊C,bjg$R3P)\zSKIv ^3E1&iPE6Z)(-bPESѶE;mhSѶG5 \)Ѡ-:84b`7tQ}%&(b((3@ E&i3@\EPNM.M8d)r֓Ҁb(qF))h4bJ\QLK\RfZ &i3@G)WZ64$bp)2i׽7qXu5&NzSMAHdc#S dԓi0: xJZbQz0h()i9-PRF( ѓKQ&isGq@isIK(4f&\z)◥H8-IN.h%.h%4PbPQ@ J(@a$vsچ$&1vFI7ZwGL4y~y~( Mzm=iPTC~.G5%ELbɢ%cӭGzE>y4sOZCE(S&Ǩadi! Qp&yQNyb)&MMIh޵ǽ)`=SNQ:bNO"$PQK1@ E.(%Q1KLњ\QLњ\QLK((4f Q3Fh4f (&hhZ)s@у@`у@4& . .\n)+cZp?/=iE(bPy96fFR$jO"PLQp$^ҙsqK(=ғyqHFh=ӗcCyfi W !cc>F3B# H4͹<SBu-%%:mRQEp&2,4gҒN2zN+ Jm2BI>PTˎE !6KzMր _g(r=(ȣh*= c4)Lh)v8hZ6^)7Z\\1) ֝H+@"I ԛ/FV ޔ ǭdP1qdRn@ zR?2Jy:?4aۍdn3GJ@< pN4兏^)H >ړm8 y@6ҷAFp9&6њ7Sm&MQn7P(♚3@fh>(挊efE;uPq] e4PEQERR@ K1F( 1F(1F(>`dgᶐT5)l)]™@h%4-<QF(fRњ]pM6_—)Rq@BM5ORb2zP9%7m/NsJcocHMI a dʬ7/K3lQP"Q椦!4`R@JJ0hPQEQE.i(=h ( ( Q(ғQE iqɦqMQ'qLBQIȣ4њvh74&hQEڛKE1ԅi"kuԑ(,w$g ( (Q@`ѴPE*1f @ kzQPbO)ym>08@pv^j6rhݩM.wwc7<t&@ȥP'̟Z\ӿ (\B a9Rܣ"jez\<cFRLD{  IE7iNM9I(qM%&(٣4R4nFLQ]fQ3Fh(4qFEQFhњ(4fL@ 3II@ 3IE.h&(.h&(.hfъZL1hh^J|Z(b (;Pip(qp'Ҩ=z nF1M@i1*P9[?bīQBc@CҊTnP(((@Q@ NQ))8#QK3@&Ls@ k q= zҚd}%%)Ԙ)EE2vHF((Qv@ PQO*(P*LJMhGG ;bҗR^()&E)lS蹨>2;h%$P40P684J'9NZAȆXҒqR9ɠƧցI iw,Q`bh(Nv;SXf(O\RњLQ\њLQ\њLQ3FhQ@Q()&(b (.()h4f(LњZ)@)@4s44X I@7"3E~Ef Fi.h&i3@ɣ4њvi)3FhM-742$t"> fƞ) +撘K<~TSǙIu®`ZvfT`d A8)iOƗ4RsIEP@4(٣4(sFi(.(ާN()1 ;S1R/21F)h!1F)hQEf4RњJZ3E4f'ѹ})Jzy~hsK}h<j<j6J<jO0R%74o4'_J@(`J{Hp4[ޔ>(C҅3q (Fi"a.qN<ۜaC4CJ7ɧgb(6(-;u) @<Z)(4fQ@ 3M њ(4f(Q@Q@Q@ E-/)Q@ E.h%4f\њJZ3Fhњ(4f (DSHzPGҊ@~a\I3@ E&is@hQE.i6>!:iJ!ƅ&ON eGК6cCJ"B((P3E%qMP ZgSRQH`RcRo@J2zhzpa@ L9ɦE%-9M4EdӁm4QE%f1i( \撀3@ gJ2}hR`nHjTPgMP]>jvͣzZ q!E (ccI֔Ԝѽq@ h3җ7ҌS1 1I\Qn7PF)3@fh?8f"3@ȥ GEHN*(FeԛPu%fER@ E6Z((\QJ)qF((b\QJ)qF()h-6Z((Q@ INr({M+|џjCI#-%1p~pz":AHdjQRSfC@ E((S@ EPEPEPE.(%M.(4("⛃N0F)sFi1F(&hqF)3FhqF)3FhqF)3I@⒒)M "iZ7v(AI`fޔ vFH zS>Mҗ(>Z2P|Zuߗ҄+J})UT3ސ'x&Iz1@FO(CKe,P3KOzNF֜)ؠ  2h]КNihihJ)hJ)h((4f(((ZJ((Fhf1K3@ 1K3@ 1K3@ 1KL1FhZO%PQNE;b()6h`s)@")ך ) էJi[!h*%PQKI@Q@ E%RQ@^қIQ@ IҊC@ z$0i㸧Sw!⁋! ր`RP`zQR@ SJ( PR4NN4 QR`Q@ڗ#۞=(:)̼IP!7i.h)H9Q@`҅#Ґ@ KOUSs@';f:sMvxzR4A4Crh^M!@I.i)(J)Q@ ERQ@(PIE-PQEQ1@%.(I@ E;b\QJ)qF((.(Q@Z((44bQ3@(4bQ3@(.)(>J()\w440  *.nɸ_)\wdMJC:ԵJFiZ1Jh#RQER@ E-R@ KEfQ@ Q@%-=ÁKE# ҒJ`63Fi(4f (-4Qڌњi9zSJRoa})1@o4p*;PrpӷZ7Z`3-IRdz61ڐ '5,{vё@sFKRo(\Roo=h W攚[fU99Z" 1J)hhhZ((6v(6SF(SE7SN(Sn(;((њn)qK3@ 44PbR@(P1Eb~cO0}GJpRrJ\ьъ3I`-4JZfM4Q@LneH>d9HdtLAE QԈxH84$ojY9ȦȨ#SR@ E-R@ K4)N3%.8(!fb zӃ(GNԡxpPo`PwRn4(ѓK q2@֔II@h$S(nȦR16]ԛ4m47RhI8ɠJP`H@I<P8muPhaidTX0z擊^}Qځ֝ @ 8J`@ %PQKE%PQKE&(-PbQ@Q@Q@Q@Q@Q@Q@Q@-%RQ@ Fi( њ((xT-JBc4(F)dPqF)٤&)qFhn.(3@(4Aj Qӓ@ N'&4:MMjhPtE# )GzZb4fb4fb4fQ@ E%RQZ)3Fhh4PERc'KJ) ZJZ& fN<J`6v(1 N(((S:m4(44 L1KMn&J>I@}(im/!;bQRE.KQP2_%6y~ SRaX%GzӶz6Jn&?``DtT&@QJ@4Tnivg8O R惃G#WiR dd((Z)(-4f\њJ(3E%RRKIE.(%RQ@ E%@fh?)f3Lf((7N((N@ShqG(Q6Z(<'=cI M!ZZ):RcJ)TC &OH}$JEPEPEPFx(ii=n:SM.zi $650rifo1i}pqC4;ChaE-%%(hq/(dbE(84eb1Q@:據}!E* {(:4 @<EoJv7t@ sϵ uZ`u}z֐8R1fE-4Rf\њm-.h%4P4Q@hb њ1F(J(4RfI3@ E&h-њu@E:4PQ@ 3I\P3F(&h.(&h)hE&(-)+ӉSiHbLbNO-M'<J9 %1%R9$G)+i L(ɠ @\qMQ74fFi4RfъLњ\QL@ KMfQUJŠZ)M)J)(PbQ@ E-Q@Q@!Pg9M}jR)0=) fFzQ7y4S g4a'(C@FN(j5XւO91OOgpP2ޗOmoZ6ZvG3aM.F( OZR)N<S iJ( (ADɢ& ' {RRL(LQ)h1F)hLQZ3@ \Q3@(4bQ3@(&h.(&i3@3@"E;4fK@ IEQEQEQEQF(Q1@\Rt4QK\Ph;b1NSKmQ@җuI'8IӊJCi-fcQGS@G֣)"4BbQEQEQERQ@Q@( BLf&; CE=c"LAE5& M Oz\PM'ր$GAi0}iqI1L"84ZCrhzӎ1@ Ku4`*Ҟ](4:Cu@J֘jU9C@ f0iOɠ.z i"18<Z)6 4tg{HؠfmƀRSQEQEQERQ@ E%QEQE4Q@hb RPQK1@ E.(%RPQK1@ E-bE&(--&h-QMQ@ 3IE.h%4Pv4f\њJ(sRV2GZWԆ6=crZJ RI9w櫞M! Zb ( ( ( ( ( ZJ(iib ?QG):`:PiQE1Q@RfLњ(4PIE-%P;R@ z0޴dszP1qh)77ҁ =)iz>z}1F@֙ѷހǜS’8 F~!-MH`#0EPhxq7vA&F))I (+2M&fN9 f*Uzn8h'4 њ(4((( ( ( JZ(((1F( Q(fRfњuњuњuњuњ}!SsE/QN2~h2~h2\(RQE9[ ҙFq@ Ai*C.{t:QHbҙJiT(@ 0bSWaES()i((((pB}*h ۥR'ڒAb QE-&hҖ4 nO'4n 7" Ѻ`mɤ,hvсH2hQIGhȤ'"ÓK ^hR`Ґ)qF.(=8`{J(JPlh=)(M%Pކ֐?革`HK1LBQK(( Q(b(()i(ZJJ(h( (-44f\IE-%PEPEPEPEPEPEPEPIKE%(Hj*0i iӱHzS#l=V|HdP\SSъufb44RfZ)(\QI:@^Tpnho6Sh!ShRQEQEQEQIE-Z) c&@&h&hPQK8@@SJ28j>E!sN1M+ӟZaQR<`E8!@ R&գjPCbRzQLAzѺ('SKLF(4fњ(((4f ZLњZ)3Fhi(LњZ)3FhhZ)(Z)()s@ )h(PQKE%RQIE--64SsFhQEQE($S2)$}2:J^$ 4LH)GZNR 3dQPNhRR@%-%RQEQEz( dQ#ѸPX9! &z i<b .=M.y1:ъ(&(-fNM-%(8ϭ% JZ(F1@8PhKs@ ;[җ2iyȤ HN>ƛ˸ c Phh${S@F Y7 i1J܀i3@8Q֓4s@NZ}iG%fRfE4QFhQ3@J(h ( ( ( ( ( ( ( ( ( )qF((S1FhQJZ)((ZJ)h(E%(@,I,y*:C C-*. -*K4AQ@NQFi3@ E&hf4f4f1Fi3@ ZnhRfZJ)(sNA( 12S*b6@-SRQ@ E%-PEQEQER-(ZMu()PvTӨGњGуRf4fNXَ=T4]0=i a)؃4XfI@ E%RQ@ E%QEQEQEQEQEQEQEQEQEQEQEQKE%PQKEQEQEQEQK@ E-R@ E-%PEPEPEPEPEPE-QEQEpԀf}hRf族Z@-8-3"ޔ rdu#abE)$EXdRRLZJ( ( (<RrNdTí~'қA9Fiy(8HG=h+i 9JQ :SXzPtS砦%z 1@ INhZ]]mJ]h3IOhIE<IPi{RQw7zYs`t(QEn S`9iۛ4( IErzR8=8@_Cс֙EQEQEQEQEQEQEQEQEQEQEQK@ E-PbR@ Z(()h((J)i((fLњZ)3FhhRRfZ)(fsҜ0y4{OJCXBcM?'@Ѧ3SHњ)(QI@ IKE%Q@ IKI@ E%QEQEPQKER$(Q)ÚCQHҌSm1 1K3Lb4fLfE74f3M%?4fE;4fE;44f\QEQKJU`  ;S*Uu<H2(1XcސȨ_ bKIh=(h&jMQnF@n>n>RɥњuQ@ 1KE&(;PqF)ih(-PbR@ 1KE&(((()( ( ( ( ( ( (QEQEQEQERQ@ E%RQ@((“ޘiA" 1p3ړ4)Ei@cqM y**txTHRQEQEQEQEQEu8RH$E*e`N!)JLsNV(R92s e9:40(`⒘((((3I@ E g4):nM:n 4 j.QM(Q@iiw`p1HyE➧=F:S;R:lRuMLBQEQEQEQEQEQEQEQEQEQEQEQEQE1F(b(\њJ(4RQ@ E%RQ@Q@QEQKE%-Q@ J )(H9cMcԃQRb !@"M' 0}iBTeNE0Յ;`% C*p9UuQL)((((i(()i(UJ1;%I 8F[4b ( ( ( 1KE&(.h&(;4fmisFh6ъ\f Q3FhpPwJM杁F3qɩ0=(2:Q6Q9f 677Mƀ#}(i47ag:MmPRQ@ Fi(@J1@ E&)q@b(((((((( ( )h(Z()h((((((((QEQIE-Pʚe=՚(lSh!I&N qInHۛڛLAEPEPEPEPGZ(U\&y!404s!QE%-PIE-2LQEQEQEQ(+M(iYٻgPm)hPEPEPEPE: r=cQE1Q@Q@Q@Q@Q@Q@Q@Q@Q@Q@-%RQ@ E%fK@hRQ@Q@Q@Q@RE%RQ@ E%-QE%PREPQKE%).9^r) ojS44Kcb"!ejC+6N7E&iiQERP()i((((H: GzV5#8 GSQ'-%SQEQEQEQEQEQEQEQKE%PEPQKE%-PRGCQSʠ`$QMnO1QS(\PQK1@ F)hRP1KI1E4QI3@ E&h-(ԙ4f\њJ(sIEQEQERQ@ E%RQ@ E%RQ@ E%RQ@ Fi(PEQEQE%-PIE-Q@Q@Q@ EPEPRR*]ǭLw5)994QE$]*G8CQER(jFW*QEQEQEQEQEc4u84v5Hw # IKIL(((vM4h((3EPEPEPJ((()h(ZJUϵ!P}i Lӳ=)4(qRS ii(((((((((((((((((((((((()qIE)4((JZJ( E6򷱦*y:sڐ ;I4` zTR_84M&LAEQ@Q@Q@RQK1@ E.(%6v(Niv=) I;qf(=Jb1N1N&hfJ(hJ(i( ( (%ހpq(qGNߘRhǛa>'PTt4HJͦ'?4R-1f4P)((Z((RPQK((Z((Z3@(4b3I@Q@Q@Q@RQEQEQEQEQEQEQ@ E%QEQERRRQ@ E%))hZ5RpRc")J)qEM?4n)2ej)i*QEQEQEQڊ(( T'5'M(ɦ31 %SRQ@ IEQE>M%QEQEQEQEPKIE-PIEQEQEQEQF(S)SK€3(ʛJNi)(((((((J(h(((((((((((((J(hRJZ(){QE)h(W:6bޜkM  :SMI?1R ө>b\bR1Fhf\QFi(i)i(qFEQLsFM:2hzө><IқE.h%&(ZJ((RR❊(- BtS doi.Hdy4T.E#SoZ\њ`&҈}h'}i|ӓAsM3F!(((ZZJ(h 3E%.h%RQ@ IEQEQEQEQEQEQEQEQEQEQEQEQEQEQEQEQEQEQEQEQEQEQE( (֤q GJES0hҶ3@ )⫯&^&2)W 2 )(hJ(hJU8<{ԑ 9֐в}eJF娨B JZ)R@ E-R@?֙O_E2 ( ( ( ( ( ( ( ( ( ( (fRcsHb'Z@q@M1Q@Q@RPEQERR@%-QEQEQIE-QE%-PEPEPEPE-%R@ E-QEQEQEQEQE-PIE-PE-%QEQE-:?L!R9˚Ná!*SȀEEJ ^EF (5`,e*H:RQIEPKIE-J)h()3@ E%(E.iPE%%;q@ - Q(8\њLQ@ E&h-:ni(٤%QEQE"[GьvRQLv)=(((((((((((((((((((((((Z)((((((()h(Z(Z))hT# 6rǟJh'=hWqrsSJE\ W:RmM6)(( ( ( (7@E^໎ODxә eQEQERQ@RP\ziO& ( ( ( ( ( ( ( ( ( ( ( U<QؠҰ2:Ai (n|4QI@PEPEPEPEPEPEPEPEPEPEPEPEPEPEPEPEPEPERQ@ IEQEQEQEQE-%Z((Z(4xHbxiϽ(By< JPQWɤ$S@y>]ңNZ)(((((((Z~AM-.I@84--\ҙN744-:)P)3IE.h%((((((*6DH͹H)&h ( ( ( ( ( ( ( ( ( ( ( ( ( ( ( ( ( ( ( ( ( ( ( ( ( ( ( ( ( ( ( ZJ(h(*Hr GNPx4ƛ@M1PlLhsVAȨ\(@ih((((֒ZCMǥ q֞T0CE)8 (h(! m0i)M2 ( ( ( ( ( ( ( ( ( ( ( (N E(i(((((((J(h(((J(i(J(i( Z((((J(J( ( ( ( \EPQKED8jaHbOZCKHh(Pj&rJv1#ȠbQMnQIE1Q@Q@Q@RQEQEQERQ@ Fi(-E&OPREQERQ@Q@Q@Q@Q@Q@Q@Q@Q@Q@(KIEQEQEQEQEQEQEQEQEQEQEQEQEQEQEQEQEQEQEQEQEQEQEQEQEQEQEQEQEQEQEQEQEQEQEQEQE8jm Sj^}21H`I5/QUҐȜa,T(`QEQE%PQJzSS@)9M/qM!99b ( ( ( P2i)z% ri( ( ( ( JZ(()h ( ( ( )(Z\P:=M!HNiG4bJ(hJ(h((((())h(((((((((J(hJ(h((hJ((0i!q3S(r:RQL ruo /#M#ҐyfP0EBh斚LAERQ@Q@PQE-%PEPEPEPEPEQ@Q@Q@Q@Q@Q@Q@Q@Q@Q@Q@Q@Q@Q@Q@Q@Q@Q@Q@Q@Q@Q@Q@Q@Q@Q@Q@Q@Q@Q@Q@Q@Q@Q@Q@Q@Q@Q@Q@Q@Q@Q@Q@Q@Q@Q@Q@Q@ EPEPEP)۳֛E;4JPPaI֤CHhAX" gҀ!)((ր7JU84<1֙y!4BQ)(((()eHӓPh(()(h(PEPEPEPIKE%PG֕}}) SɢJ)h))h(((((((((()(hJ(i( ( ( ( ( ( ( ( ( ( ( )i((Zr68=(tiG8/zC#E1jXs K s8v'^@-QE1Q@Q@RIEQEREQEQEQEQEQ@-%RRIKI@Q@Q@Q@Q@PQKE%-PEPEPIKE%Q@Q@Q@Q@Q@Q@Q@Q@Q@Q@Q@Q@Q@Q@Q@Q@Q@Q@Q@Q@Q@Q@Q@Q@Q@Q@Q@Q@Q@Q@Q@Q@Q@Q@Q@ EPEPQKEPIKJa@ӣjӣHzS_sj7o#EUJZ((4QEQ@n|ڒR3JQn JOCESQEQEScM)mQ@Q@Q@Q@Q@Q@Q@Q@Q@Q@))MiJUNƚ:R) J(((((((((J(hJ(((((((((JZJ((((((((i( ( ^ ɥ' ^I&Nï(˟œ؉ڨX0 hQKE1 E-RQEPIE-PIE-PIEQEQEQK@ E-QE%-%-%RPEPKEQE%-Q@Q@Q@RPRPEPEPEPEQK@ EPEPQEQEQEQEQEQEQEQEQEQEQEQEQEQEQEQEQEQEQEQEQEQEQEQEQEQEQEQEQE-Q@Q@Q@f|zi2gc42^5/ZnԁQKEPJ)i(((h=)BAoN! i '%QEQEQEQE4J( (J(hJ(h((()i(J@/USzp84 J("Z)(Z)(((((((((((((((((((((((((tr8C/qHbRP"gQy:PJ*縠dtRg(`QEQEQE4Q@Q@Q@Q@Q@Q@Q@Q@ EPIE-QERQ@ IEQEQEQEQEQEQEQEQEQEQEQEQEQEQEQEQEQEQEQEQEQEQEQEQEQEQEQEQEQEQEQEQEQEQEQEQEQEQEQEQEQEQEQEQEQEQEQE&w'G@8xÏza8NNG7AO' QREEU(((րNM4Ґsm:OM (JZ)()i(zH=OJ ɠZJZ)( (((JZ(((^Hi p4`VmS(((()h()h(ZJ((((((((()h((((Z((((IHi {Oo2 ?;"}Z8BQKE0Z((Z(( JZ(((((((()h((((((((((((((((((((()(h(((((((((((((((((((JZ(((((((WK!9i16v={TN*@QEP(((@z UwsaҐƱsIEQEQEPQERi)z Q@%-QEQEQEQEQEQEQEQE-(hQHNM/Ai Z)(!h(((((((((((((((((J)hJ)h((((((0(:6'Q1 ,Rw()hQL(((((((J(h((((((((((((((((((Z)(ZJ((((Z)((Z)(Z)(ZJ(Z((((((Z))h)h ( ( ( ( ( ( ( (((((Q֟'j(2:|tQCJ~OJ( m%SQE0(QE!OL=h (((((=PEPEPQEQEQERE%-PEPEPEPIEQH^Q@ iQL((()h J(((((()h(((((((((((((((((()h'z(})Q@}C4Q@QLBREQEQH)QEQERQEQEQEQEQEQEQEQEQEQEQEQEQEQEQEQEQ@ EPEPEPEPQEQEQEQEQEQEQEQEQEREQE%Q@-PIEQERE%Q@Q@ IEQ@Q@Q@-PQEQEQEQEQEQEQEQEQEQE%Q@
JFIFP7<F<2PFAFZUP_xxnnx"1,@h@TY!65]0XUZZxixJJ" }!1AQa"q2#BR$3br %&'()*456789:CDEFGHIJSTUVWXYZcdefghijstuvwxyz w!1AQaq"2B #3Rbr $4%&'()*56789:CDEFGHIJSTUVWXYZcdefghijstuvwxyz ?uU%PR(bREEPJ|P(_I)jZAS*IOL%S(L[b,hV%1QL)(hJ((QEQEQE%-PIE-PIE-QEQEP> (Q@Q@ :JQPEPEPEPEPEPERQ@ E%RQ@ E%RQ@ E%QEQEQEQEQEPIE-PQEQERQ@Q@Q@ IEQE(((@Q@ QҊ=(G'ZLhXU5T4 `zA(EPEPEPEPREQEQEQEQEQEQEQEQEQEQEQEQEQEQEQEQEQEQEQEQEQEQEQEQEQEQEQEQEQEQEQEQEQEQE ( ( ( ( ( ( ( ( ( ( ( ( ( ( ( ( ( ( ( (EPEP(aEP!i)i(BJT"zA™OhAEZ( |?t)aI zeAIE(QEQI@ E%QEQEQEQEQEQEQEQEQERIE92dShaEP ((W%*)(hJ(h ( (Q@QH(Z(((J(()i(Z)(Z( ( J((EPEPEPEPEPEPEPM~F)Ԙ;RUR~^iOZCh`Os KsvSQE(@QE0 ( ( ( ( ( ( ( ( ( ( ( ( ( ( ( ( ( ( ( ( ( ( ( ( ( ( ( ( ( ( ( ( (Q@Q@Q@QL(EPEPEPEPEPEPEPEPEPEPER@ E-R@ EPEPEPER@ EPEP1{RRPEP Q@•>%*}L8u29z>0t )PNB?֓K/AQT}ʆ (b ( ( JZ(((((((((((CҜi(@Š(AEPHzR@jJruJ)hJ)hJ)hJ)h(Z((( (Q@Q@Q@Q@Q@Q@Q@Q@Q@Q@Q@Q@Q@Q@Q@Q@R`Xo'UvSTw?V#Z"k?['**0EQE0 (QL(((((((((((((((((((((((((((((((((((((ES((()QEQEQEQEQERQ@ E%QEQEQEQEQE%PREPE ( JZ()h Z(J)<H> P)(9>0uGSPzLZ):84I,7(j rABQE1RPIEQEQEQEQE%-PEPEPEPEPEPEPEP@Q@)(hU84wHN0i2= .6( ((((( ( (J(h ((EPEPEPEPEPEPEPEPEPEPEPEPEPEPEPEPEPGz);4"*v?z#ZZFX)D?QT_ t1E%QE0 (QL((((ES)QEQE-%-%-PQKE%PQKI@Q@QL(ES((((((()QE(@QE0 (Q@QL)iRPES)QE(@QE0 ( ( ( ( ( ( ( ( ( ( (Q@Q@-%QEQEQEQEQE:)(QEQE ZJZ))M%9XShQZBs)J:Q@M=)24RE( ~M,' u5,DxCbQE%2B(((((((((((((((}@Š)(QEQE5SۢJ~'(!hJ(h())h(((( (Q@Q@Q@Q@Q@Q@Q@Q@Q@Q@Q@Q@Q@Q@Q@Q@Q@֊; La}ꝿ ci`s'҆$OuGA+G҄ ZZ(=i(`QEQEQEQEQEQEQEQEQEQE ( Z((((J)hJ)h((ZJ( ( ( (Q@QH(ER((h(((((J)hJ)h()QE(`QE (Q@Q@Q@Q@Q@Q@QH(Z))h (Z( (RRi(Кyԃ4#'x3`u AXƛJ&wP:@QLAAh%XJ>zXOK(S#* ( JZ((Z(( JZ((((((((J) 4%S 7ݩTOqQEB ( ( ( ( ZJ(h EPEPEPEPIKI@QL(((((((((((((((((=)OZLd{TCI hs}i Hq_*k5 B-)=1 ES()QE((((Z)R@Q@ KEQEQEQKI@Q@PQKE%QEQEQEQEQEQEQEQEQER@ E-Q@Q@%-Q@Q@Q@Q@RQKE%PQKE%PQKI@ E-R@ E-RPEPKIK@%-R@ EPEPES(()RQ@ EPQKE:(bQEQEP(E J?zLԏ=4ɧRv4 u4SޒS֒QE1Q@J>Pj85=JCEDE%RQ@Q@Q@Q@Q@Q@Q@Q@QL(((()%/PQL(~!tJR؎(QEQEQEQEQEQERQ@ E%- ( ( JZ((`QEQEQEQEQEQEQEQEQEQEQEQEQEQEQEQE (}A R+ TX8R9V)UMc֝ķP<jwh@RL(Z((Z()h ( ( ( (((((aEPE-R@ E-R@&)bF)أ)hRPQKEZ((PEPEP0(QE (Z((bJ)hE-JZ((aEPE-R@ E-QE%P!( (((Z((())h(ZJ( ( Z((QIK@’(P0E)яS*HI OM,h֖R%/sILAJ>Z )(bՃ5[)"+Ah$J)i((((((()RR@Q@ EPEPEPEPEPz% (Q@ n)?-Fv?v5(AEPEPEPEPEPEPEPEPEPKIE-))hQKI@Q@Q@Q@Q@Q@Q@Q@Q@RQKE%(E-J)hJkSGݓIީ  \\Ue'Zo?W:`6ElY[{))q1X POҠN=P/ cUQEQEQEQEQEQEQEQEQER@Q@Q@Q@Š)h((Z@QEQER@ J)hPIKE%PRRL(ZJ(((@Q@Q@Q@ E-QE0( ZJZ@PQE))h((((PEQE-(Q@QEQEQEQEQEQE (QEP(%RRG“Qԝ"}JX =QHes֒t"J`)IJyKIK@=G֬jxS5,D1I6QEPQKE%QEQEQEQEQEQE(J)h))i(Gޢ(`QE#}y)jCEKESQE(((Z((Z((((()h ( JZ(()QEQEQEQEQEQEQEQEQE-%PQKE%PQKA@jCKR5!_Aڧ?BzPiH;S|\dz}*V O?{Ҡ$ u(E-P%PEPEPEPEPEPE-R@ĢZJZ(Z(( E-QE0 (EPEPKIE-PEPEPERQ@ E%-RQ@ E%RQ@ E%RQ@ E%QEQI@ E%RQ@ IERRS(hK@ E%(`Q@Q@Q@(4fL@ E%QEQEQEQEQEQEQEQEQEQE ( RQ@LQ{RB*|_zOPR !ɦS֐QE(wP4Q@= OPԱƆ2@ŠJ)((((((((((((((CNn)>QEhݩyKEQEQEQEQEQEQEQER@ E-R@ E-RPER@ E-R@ E-R@ E-R@Q@Q@Q@Q@Q@Q@Q@Š)h(=)i;ZkSҐ_BzTWPІ(Ii'U vv۟/{7ҫ}I(*QE (Q@Q@Q@R(Z()h JZ(((Z(J)i((ZJ)h ( (JZ)(((EPEPERQ@ E%RRP!fE:mњmf\IE-PQEPEQEQEQEQE%-%P( ( ( ( ( ( ( ( ( ( ( ( ( Z(((QE (tcSSirƗQޡ&oii>SHd&ҚNB JZ(PzSHbQE-:ɦi1Y~5 NPPQLAEPEPEPEPEPEPEPEPEQEQK@ E-R@ E-PQBi1E-%P_R(((((((((Z(R@ E-R@ E-R@ KEQER@ E-R@ E-R@ E-Q@Q@Q@%-QEQފZ@ҖPMGR?Eڄ0Iiz@K'{7ҫ[}OګmM AREQE%-PEPEPEPEPEPE-QEQEQEQERR@ E-QEQE%QEQEQEQE%PQKI@QL(J)h((R@ E-%QERPEPEPEPEPEPEPE-R@ E-R@ E-R@Q@%-QK@ ERPEQK@ EPEPER@ KEQEQEQ@(`i(4P &&5/H#I OS)}"jGp/j(QEҏR"Ģ)(OQ@8`i1PT4 bRRLBQKE%PQKE%-PE(4QEQEQEQEQEQEQEQEQERF( ( </ftԣIƆwS(QEQEQEQEQEQEQEQEQEQERQ@ E%RQ@ŢZ( ( ( ( ( ( ( ( ( ( ( (4PEP@hGj@ҖPOQv[OQHaB'x>0'oکoզsU &=)i;P:SQEQEQEQEQEQEQEQEPIE-Q@Q@Q@Q@Q@Q@Q@ EPEPEPEPEPE%RQ@ E%-QEPQKI@QLJ)hJ)hJ)hJ)hJ)h(ZJZ(((((QK@ KEQEQEQE-%P1((AEPEPE- JZ()(({Kم+4Ou'.($.*S$ )}m>/CL)}N !REPBLT`M& H=̠1 FS5XO> }RS$!QEQ!EPEPEPEP~%QEQEQEQEQEQEQEQEQE/RwzJxP (MJMDpԩ); Zb ( ( ( ( ( ( ( ( ( ( ( ( ( ( ( ( ( ( (J(hERQ@ E%RRKIEQE (!H -#t4+JEI 4)MqL =RNH 1 EPEPEPE%QEQEQEQEQEQEQEQEQE-%PEPEPEPEPEPEPEPEPIKE%PQEQE)i(@-PIEQE-PIE-PIE-Q@%-QE0 ( ( ( (Q@QL(Z%PQKEZ((Z(()h(R@Ģ )h((J0(Bb}M- 2޴n?RRi<SM ` c7htC֢O"2VqCaA434&(!}ip)M%0 ssMҊHiiƐ"X褗tJ)QEDQ@Q@Q@Q@ i)֛@Q@Q@Q@Q@Q@Q@Q@RQKE%!ihnڒ JHaESOgQ5$t)1(QEQEQEQEQEQEQEQEQEQEQEQEQEQEQEQEQEQEQEQEQEQEQEQEQEQEQEM HaHvL P[PJHlU((`?+cUT`f􂊡 KM4Q@ E%QERQHZ)((Z)(Z)(Z)(IE-PIE-PIFhhZ)(Z)(-4RfZJ(`PRQ@ E%RQ@-%Z)(bIE-PIE.h%RQ@ E%-QEQEQEQEQE%PEPEPEPEPER)QEQEQE- Z((Z((Z()h(EPEPR0M?“Cފ(!0DIi!J i)M% ({(S !4tRIj(3R>QEQ!EPEPEPEP6Oқ@Š(AEPEPEPEPIE-PE%-Q@RP#RVԔ-QL5$]? OjH&43(RR@ E-RPEPEPEPEPE-%QEQEQEQEQEQEQEQEQEQEQEQEQEQEQEQEQEQEQGjJ(R7iihfR*S7P8wTo?Zxi@³YWTc_ƒ`R^U%)QIE-PIE-PњJ(sFi(PIE-Q@RPEQEQEQEQEQIK@PIE-QEQEQEQEQERQ@ E% ZJ((AE J)hJ)hJZ((((()h( EPE-R@ E-R@ E-R@ KEQERR@Q@Q@Q@Q@Q@Q@RQKE%-Pi)i))i((@d?.*4:N `'z|_xZ|_x`d!KEP%/j(( sMu@i CIKILH*QU/ARƶ JB(((('Sӓқ@Q@Q@Q@Q@Q@Q@Q@Q@RQEăߊSHx0 mSTr}RG~HosH)OSH)Z( (Q@Q@Š(AIKE%PQERREP1( EPEPEPEPEPEPEPEPEPER@ E-%QEQEQEQE-!@ KRP0=)iC@"vT*9STi b4Oi@UQOz8$2U%/jN(Z((Z((((()h ))h)i)h(( ( ( ( ( ( ( ( ( ( ( ZJ(RREPEPEPEPEPERJ)hJ)hJZ(((((((((ZJ(((Z((Z((ZnpphsEREQEQE6)h(4!41Oe>&KڟQڐ袊Q@P /#қJ{ JSILna>ҦEKxjIze4!(J)hrwS)i (b ( ( (()h ( (Q@Q@=)ǕSMJsŠ(dUI =?ƒAES((((((((((((((((Z(( KEQE ((J)i(QKE%7zSԮ1SzI);ӶJn<P%0 CZC?@LԏB:JPM~Zw5}h|f©g9ƐɨE@(R4}HE; Z\tp)ۇaMlQAb ( ( ( ( ( ( ( ( ( ( ( ( ( ( ( ( ( ( ( ( ( ZJ(hE%QEQEQEQEQE ZJ(hJ(hJ(h((((((((((((((H2)> iV)hh((mQLAEPEP2T**EIE`zm:i0DJ^QT P((BP8`iq!HcRR J1J2CC/jT!0)(((rjm9~Si (b ( ( (Q@Q@Q@Q@Q@Q@*aIBI@ z 2OiPƅi;ӟozQE0 ( ( ( ( (Q@Q@Š((((((((((((((E-R@ EPEP`P:KE'= 㞦ڀQ?JZk h$m*#}L{T@TvJH1ʌ( $Z Ҫȥ8Df«;&=E!QF)iL q@ {QϭV 9E`QE PEPEPEPE-R@ E- J)hBQKE%PQKE%PQKEZ(R@ E-R@ E- J)hBQKEZ((QEQEQEQEQEQEQEQEPIEJ(h((((((EPES E%QEQEQEQEQEQEQEO@Ob?ՊZ(((J(E./P3I'>\LE i$vSƣ5$=h`9zjCE=l %rTii(G1))S%RȤ8SP硨(@Š(EP (QEMjm Z)QE))hJ)hJ)h`%R(E7ZJ^ %6OiWU(cC|z@(JZ(J)h`%QEQEQEQEQEQEQEQEQEQEQEQEQEQEQE(((((J;ր(ޚvHvQTCaڣ$5'j`(>˹Ui~.VKEwh@QE0 ( ( (J)hQKE%PQKE%PQKE%PQKE%PQKE%PQKE%PQKE%PQKE%PQKE4)un40(*)qIL(Z)R@ E-R@ E-R@ E-R@ E-R@ E-R@ E-R@ E-R@ E-R@ E-Q@Q@Q@Q@Q@Q@Q@PS~%0PKBPE-R@ E-(ZJ|cϥ2pi0C_Si[J Q@:/i&OQ81RRQE0 )i)! R0M7i4)iP}qP^EFxCchJ)hpNph(((((((((((_G=E>_>}Jt}) ~M?o4QEQEQEQEQEQEQEQEQERR@ E-R@ E-R@ E-R@ E-R@ E-R@ E-Q@Q@Q@Q@Q@ E-((O/a@%- HvHr%_D*_d>#}Hxz0$ʏX_׏E $Z)SPiiJ(QEQEQE-%PQKE%PQKE%PQKE%PQKEQEQEQEQEQEQEPREQEPEPEP[֥֥1SR@ E-R@Q@Q@%-QEQEQEQERR@ E-R@ E-QEQEQEQEQEQEQEQEQEQEQEQEQEQE )K'Kޑ?ՊZ@QE0 ( (ESRJQHSDjSt0DTQ@ E-R%,_xTR}ꖢi )(())hJ9A@z@#tn%lzNVNԐ((((gM3EPEPEPEPEPEPEPEPEPEPEP8}=i7ZdpүAU>=/ҚiWM EQEQEPEQEQEQER@ E-R@ E-R@ E-Q@Q@Q@Q@Q@Q@Q@Q@Q@Q@Q@Q@Q@ KIږޘrMo@G xWS #QI'ܠ = -Z? N$ONtS(PEP (((((((QEQEQE(((QEQEQEQE (Q@Q@Q@Q@ ~ZNn, ES((((((((((((((QEQE (QE ( (Q@Q@Š((((((OljC*4S@Ob?ՊZ@QE0 ( (E%-R;RE1F\SH}RRSQE}IK40DR66( ((((_FHa-P!5+ڎ35+}HdTRLBQEQEQEgiQKE%QKE%PQEQEQKE%Q@Q@Q@Q@ ' <}1MHce RIүAZm8M  ( ( ( ( ( ( ( ( ( ( ( ( ( ( ( ( (Q@((aEPEPEPEPEҊCҀR- (Sܧ(WSJxUS қ'4Sd$ORCWSGTdҀ-rZ(` (((AEPEPEPEPEPEPEPEPEPEPEPEPEPEPEPEPEPIOKE/(@5Z7qڥaœd2޴Q@[֌S3E'|EPEPEPEPEPEPEPEPEPEPEPEPEPEPEPEPEPEPEPEPEPEPEPEPEPEP?JjK/G(SXObQL(m-P (ROSHbRw!h)bԔ}0TM chb ( )i(H*qJ(9үǥ%$6"JPTPzLAEPEPEPM)QEQEQEQEQEQEQEQEQEQEQEGbiH0Ғ_iW)Zm1J(((((J(hJ(h(J(h(((((((((((Q@“@- `zQHihtSҁ_GԃWQ>S_?~}*? u>fKOV*J(`!KAGj(=(Q@Š((AEPEPEPEPEPEPEPEPEPEPEPEPEPEPEPEPEPEPE*B1N+}Ť1RRJ (Q@Q@Q@Q@Q@Q@Q@Q@Q@Q@Q@Q@Q@Q@Q@Q@Q@Q@Q@Q@Q@Q@Q@Q@Q@ iO4uAJpKH- (Q@Q@ EP )'D!Ph (=)bԆ?i  EDxh )i((- O֒ÃJCAB*T?(xiQ@ KEQ@?pSiNv JJZ((((JZ(((J)i((("ZFo/Aڑz!ˏzJz)wK@(((((((J)hJ)h )h (JZ((((((((Nhi?ҖzR1;wBS[SOݤ2ocTteS{PMi> "QUWSTYXpjNQE0 AKH:Z( ( ( ( ( ( ( ( ( ( ( ( ( ( ( ( ( ( ( ( ( ( JZ(t-C'ݩ@GE-) -'zZ(Q@Q@Š((((((((((((((((((((((((?ĥs!\=$V)iXES((QEH~ GO2 C֖-QE@8O50PQژZ)(J((n84Ҕ= k}O԰40CMݨ E%RQ@ E%.q8AE (Q@Q@Q@S@bzPKEPEPIKE%-44lJoyk@֑'Z9P6 ?{iC". !ax3iAzquZ7TS7ǽ---PEPEPEPEPEPEPEP05% (Q@((((((((((((bwC҃PzR AKH)h-PR7ݥn(AO'MH'4lrΨEtfYԆ=(h)iRw(()h(((((((((((((((((((((t~4 EZN((((((((((((((((((((((((AERQ@ IE `VR8"`zW-9ՊZEAIH#֌J)Z)(((M&4#i! ((;TšdQ2%S(@ (PBA@D'HH=!vOJ (b (()E60(QERQ@ E%W/U߅ KIE-QEQEQK@ EPEPc+"4!PxF{TZ=M))i(((((Z( ( (p||)RQEQEQEQEQEQEQEQEQEQEQEQEQEQEK@=E/Rww:Ph(LJJGǭ %TN4@ i}q >?PnWSY{PnTݪ o~=ZZ`PE  (EPEPEPEPEPEPEPE%Z)(bQEQER@Q@Q@ KERPKIK@RPIK@P_*>< #EPEPEPEPEPKIE-PIEJ(RQ@Z)(Z)(Z)((((((((((((41749/Pqh)(f-F}jJ((;Sӄ!) ZJb ( (Ah4LDz¡=M ( (E)JZCҀNޛ<47ސɇJ!8ɦ aESQERRP0(J)hqږnIUm░LBQKEQEQE%-%-PQKI@ 84Úiڐ'FITmҀEP (((((((=)i HTTT-QLAEPEPEPEPEQ@Q@Q@Q@Q@Q@Q@Q@z(h((H((?-)4SH OgjxSMdzdjуUg7 V*ʳޘzZJZ@QE0K@; CKI@ EPEPEPEPEPE9( ( (Z)((((((Z)(AEP0(֦O_M/AH袊bRR0(QEQGj(((((JZ)(Z)(Z)(Z)(Z))h((((ZJ( (ZJ)hn)x0AJtпtPt!W--"ť1(?_H:Siސ ES( P!z#2<ϥ,(Q@qM֐aIߩdTqT'J( JZJEQE HIZ֐AzЭ ( (=(IJݾCҊ'RRQLBIK@S_ҒB@5$MךZ#S  +ARRP )(i -!@ U'USLY'1jCVۥISۥ:Q@E((J(RQ@Z)(ZQIJxP1:?Qh)i( JZ((Z((Z((Z((ZJ((4E0 (Pr#@I,i㘪:^>w InZ0Oր$koYihZ( KHzR)PE(=(J(hO#+)E "8SiQEQEQE(QEQEi)GzJ(Q@Q@Q@PQqc~4@+FRTTn<P(bJ(i)OZ( %(E%PQKE%PQKE%PQKE%PQuPbR@P!(E-JZ(aEPEPER((((((M?}2_JG_(~ࢀ (#ilWO?mC)*(IsZuݨ9a*M $^슅♒i 4fL^bZ(l(#8tOZtOS@%-%0 ( ZJZ))i( (PEzt<) MIڢ^(@ ESQM. C2JI8L}M!7jm#7)SV8Y>T+ )lE[~v 4(8ɠ tUFYAJRN0ǚEVdpVN^zJɻ28<f&|f Uti'4p)iWB器I@X= CP`i +L *//pE/IE4H(e=E) -!($aLt_{$F >?~hhzQL(AEPEPEPEPEPEPEPN_)~ X52je ()h((RP!hJ(h( ( ZJ(h j(@E9oqNiQފ`PhZktM4UOִ UYր$YUoU`-%wKIE0Q@R((%))(QE()я) Z)(WQԹ)(((((( ((((JZ( 40yƐǯQf.r՗ c9z)Ž 2QL)h4 JQ֊@%-S((((((((()QEQEQEQEQEQEQEQEQEQEQEQES$)s2 Mh/PEP%9?ZsvE\dž ML(2zxLULM[#UR0S7t&* [:IFF@/ݠH(=(GJ50P (QE ZJ) (PMi=J҃MӏZC5HI朝() u9K?U@y`ךzTK:y)U1Oof$R@AM2q@Y<R 51@2QCJIb(+q#NǷ4J0z>1LqPS5֛F(_4'y5.(F=i<@ǵXYA$EԎ*<J6 deA!0[xJW@4q7=)QOqP#(E@#p4%d䊁ԈrhAo"d^6Nxy@TO7T]N)CFz⋁ ȥqObcU(DXE1Pp)i(L(QEQEQEQEQEQEQESSiG4 tuS)QEQEQEQEQEQEQEQEQEQEQEQEQEQEui3sHb (Rwa74Ґ`gzGQ3s@-5A}jƨ7V떭7?Znq*VJC֗RRRw@ Gz(@FtR ( (}jZF*Qi܀xKEGw7!IqLQE0 ( ( ( ( ( ^’((( (p2ij)2vE \a[4GKIE-JZJZ@%((GzZ( ER)QEQIK@RPIK@PEQEQEQE(((LњZ)Z( ( ( ( ( ( s1%FzG#@_(~ࢀ (?ZW֜~(_)h(xQJ~o3;:RmB1LyL)0B(R:'/AHݾ55Bzb(ER)^ԔvRiG.}ME$ÊC)}掟 >lUIOWgT[?ZQ0jEnINF)Ԕ"A@ ?xՓҫmJh_Jjpix{R rjaS$P9A@v)_0t hiW"??S1a@X)i$C *hh=)|hEH iNH,Jk(Rj``RtR(QEQEQEQEQEQEQESӏHb[LL( ((((((((((((((4~>OUӤ ESRw =iSC@H? H?ӇJk(KT7֯MQ?}\Q0>tzPAOۑFzxjZ +CL *JBF)ih6mĞH=%))(: `$ wcL 1ERiiЂ((()QEQE)OZ;z)()hjB@ETi]_ʁ' &D) j/!E4hݣCn3}v7:xoF43 tI?w?•Y@aE+\1(pi|=9yO"OJ_!(y>w@ޣZiiVK: (QO^z p?N׸5>(4%CޟLs֟P!R%0 ( ( ( ( ( ( ( ( ( ( ( k8Pi!i+ $QI2|nOS39@' -n)j3GJ`--%Z)(IE0J(h?2~zG)iZ)QEOK?Zy8ON֞(&J!i{d?ΔPE!L)硦ТR8(=E=E>=MMP)QEQE;QE AKH)h'cOF>\!ƖACҔyE>5}}@~(oˊs6@;M4u4 J((4 O75]9aa GtP@{SPR}R,v >*HtПtS[KEiGJFKH)NaL3nYb|T<?MzSGWZԆzSQ43KȌz@ƕnsRR) -QLAEPEPEPEPEPEPEPN?ui1cLjeQE1Q@Q@Q@Q@Q@Š((AEP0(QE (QEQE!KHz^!ZsM?y~@ Ik1I }MQ+}ip9CڍI44\ #WQd |HS FoMQoZo&ZS@ N`'#E[pF *&4gJ?-Nd8ɢ@*A֗%(&;L$UE23֊(^@2:}2:QE*RR6RQ@QL((((E%*dÎ\DMaҀ QL(RPE! RntZjԄڀ)}RGҒ0ӓP`QE-60j)VWQM(<HU%x\87zQE0 k}}i> /ҊE?(E֘ E3x覣nE((QE (Q@Q@RP0$pqU]YZb0i;QHI'\S<Szҁ $i6〦1dԨ1L(Q@Q@Q@Q@)SbV)d8(ԏM-5Z(t}((rZlK/8q!f&q@OJp`s4Jx@ E;czSqCJ{t4҆ҎAHzQҐT'!(`QEQEQE R Z`#)A}w@(|gFFy߂1OsesN* C1ۯZ\ՈU |ƃPMh CKHhQځҀ) G6hW5 u-wsNd)QCIݥ=)1@RO%:22A(|T|T@ETÌdZv~eo#}(iQE$GZtGҁFbq<{R/48R(E1-b ( ( ( ( (Q@((SRH?4z5Hq:(b ( ( ( ( ( ( )2=i @QL2zSA0qI@QL((*Q֐u)Mr)֣kP7MہI}i49ɤnT/&$A(\.i)[oeT}Ai45Tajݷ?Zyh%WWRO֭,hv zQ֐oIږ?~QHX)ƣ21Ru&W?61^XR7zYN%CC 1_nCRR)()M'iScScCCHcҙNNJm4 )QEQEQEQEQERRiG?JVCE+Fh|~yޖzf2?z.lFI6=?whVB8EQޜ(A@%ܪ=6pqڦ27FiAE!.ۀ\2ޣ) ?{R*O5(QE0 kN<@ ()ȿ } :r8yUMhSIǟJ< a@'@Fh((((((4"taSjBd1ӱ@ySTsKɤKsDEKhz(QL(((LviE2ޤ)lS7MJFE5mKǖ_ZJ)z}#` @SegN1Fris@!5K9E6.\+}) csM2hLU#EhJ[H:S7*AҀ p5!)i(⪳Ƭ5Edo>5N-CHziw[)q@hH[$h\dzS#]H! RΙ]r:P@n8i7aqCN.j-4E0'R)"g Mv;~(ZU~4SGZwjJ() -%PSAޒs>?ju5 pP@K#"T@ 78o?i;4fi4dzOAQo4ۡIyˣz[h_M_)OYSW, @ :{әf4}Ҧڢ~Hc js P Jè8Jڢ6xY B)jjhsiV-QUiwZ.;PddҸXEWt&HޝTBoQO)@:iaKfRf* r}H~Ч' [S)QL((AEP0((J0)/8,*)9=YIjNQOH8P23U[w'Z^EBFQE0 ( (); uq/֒n$cLBL˓މֽ!90 9n^< 3 n I( `)3LS[4LSʚ.Cvpj'ހFAjQpPJ0(ǥ!<@ ҕš`N@ #@OSJ(FW3}w4Y^i?cޘztGjN}_E91L?qi()N}))"bCJJZ) USשS)QLJ(hzөr+G?F7)>P;0i|cڅjWJ P#L^9 9 RMSRP~IOU%(f!jy?Ry?TS\&})Q7TR '5=³*?5X(Y?)H?@ 00UʊhyjBI=jE!q6 Ӊˀ;ri[9zԋҒ@Sh~4iM ﶢ>I+qM@P 85&F@E9)g4֐XT8py`QE0 ( ( ( ( ( F<RMVS N皅% Q`f P MJD q@*((((@E/ARcޣ~gQRsM)sy'>JJN}dT ďA@K7QHxRn(x)(uHsukK".4PGLĂ*njqҦEDpp(S XЄb`'ph7 RqGR'j͔|jDÐh4!.i3 lJ{ B<ϭxެ?{Q@ʧE–NXW bE?Dy?ZGGQG!#2Ƙ~M~Tq GRf4C@9hPbJsS (ǑMSJH rsH޵ru$1.94ohLT] 4MDy4Q4tb4SwaQf+#=Mo C'=4a3ߧ/5>2)>z4ޕ!AJy?Zjxހ9CMR?2NʎR;OqUzUv>M@ ~4̊xǕzn0$S'ڑS?JtC(G9?c:*hy2}-R:T&3L+I=4yb6jhPz>A@Y)%bB}*!S2gҁ[!>椏o50%OfOOF@ShJ(hJ(h)ޟG! @!eҜ@=KIKLdz2=EAК*FZNZrMuRShA(=EC)Q:ފ|41(KW551 23F)aRDhaO>#*2*V?*7POP=~NN֩rۣ}j3}ir}GbZ?$$f6zSS7րh/f4gh)i(ӛS?IB(~z aJNI4:O IݤS֒ƛ@?JmX)h~4EhRR)INNq@蠌QLaCCRa8?3@r_‘: )SK@'sKLҀR7,iր/ I4ƒ/@^1Zu3A@ E%P  ,xPd5#ASKT*s((RQ@ H܌QMv &H ̋ZIJ;з5ԜLA{ȿJX~{-:.-Q@QLCnѾ{nJ$IwvUhOcBh9Ⅹ#Z?Ɛ dT4h(ER(()QE))h#*rSKAx bI<ȗjөZ*&G֘nXyM4n@jN@;59c@āU7`3E#48]ib'q4JcjMĞPM:v})3;GҒp$yPrF*-I(C);$Q@ EUQEQE*VGJxUҦE!iGZANjFL* >8UЄ– 8HY@+*$RF¥ : (J)0< 72֯zU 7ց1p[F"1ͪJ84u#ր-/FZOډ>2#OӳF2˜?Y4V?()3@Nhh%IL@,=\ػ TS Nf*G*c4n!K~"c"Po$ M2uЌe(y7@ H RdPrjh({݀h|)| C"eTpjIfT4e{WS֚()~NOi7ژ1=,\4f݋>q)yzCy\ڀ"[}hn[H>V"їP ~>wDL4AN^ ?8SԱCRTqt5%!j5=BziP)WQKACQ<T0PqJO+J P"ݏ;I5#@֛HGOO&() (AEP0( ε5B*c!d=QPiA4PyɣWL_jF&:LZ2=jZ)2=ir=h@/z;ZK'MSs74צSR PRuNpj6?7Ofsڐda8 XJ~ǭY_& # *>/`MoѾJ_j?kAmBˎ?P?.j<!ޙRdzd`f~cR1#q 3Tr I1E;(玦tJJ0RcMn>JmUʹQf#I9楋cU~)i :өѽhny1 S6R A(_+KHtcScSSg@QVQ@ ~4O6: Xei[4P1FҖP0 cD{S E⛒zUryx-lZ;,H#Tܚv buUcdv89U=iDL,Z- G޵ؒh Cbm9YnEDe$*X]{Sojt{Cҕ8sCSIJxPKIE0:-"t?ZZ@ 斊`VhRjyr*4PP`>|Ԥdj -: (Rɠ%Cޕ HQL1TT+7_=sҀ%ixRz@HOCK@QLESa ֥r1P)Rja_HcRK d[LBx( (/jJ\wqM"sHL,oN2MXRr1Ujx_өh>Pz%QE0/AfjNI"nH ÊZL`ҞZJ*J(hxUҪhtHZQIJ:ԌիZLN'mL*Ģ.Ҝ: `*cI@R%2o^U NJEXڑj< 9Vc:ԸҀ$S5=N=Z24+m9qL"_4Pf>'}(MGEHejii֓'֒\IE8RAS7TXMhYU#ڎjY$gO栩X|RPHPԇE@W,5 Im.O%bH.; i9*P"j 7), U:GJg4F T@ N) ͚scC:ȍ#&`l2 &=@˴ؚ =J.?'HxqNRa}&1ӊvQ%CSO|Tm93Ek I~u29P=7z/@%8D .6jZCjC5@E0$ Z9>(!?ROU0m9i (M@*>MB{`)p,Fxd asN$ '4f(A@@ PE- E-!qSB CE!s֌SHz@ N#M2 fJWM$>,Zt$֙ZE$1=h`3 }iO!@ #Р<)ːRZpǵ!Gց$bl(31#Vi;-֍Rkt?J`j.OJ 4~u>vGZ{T|<ȧt9r;vh]Nq@BCzR‚OqN4x4lT~G8;HOg<>4M4j5Ǜ4 Si=Q8? #OqU}MYU}sUh!zwZV(ސ\*9Iy-qzTs@S[ P1STU*}¢J̻pj?1Oj3LCc)M;ZBq%%<`<Zm #dMPJd)ѩc€&STo#R/*I$ci$ƀ# u4ӐzP4 SjGCHAHwxQjYAF P&'?kF)U1LCG4 N")vJc9\}{M^IcH(4l&^J٥ALCW NAF§(niլ b95T)1-tRS\%JM:fRO)iͰrObP:)@~47d z~RR5R7?Ji-QLBqɦIU5 J\`dRQpM 4IE.h>ԔfI"$|j %F&`p'IBg9HLQQ H6_f B2<T `T()i(QE9($Ru R4R/C@GZ8|Q"(`(ɩj=9 0$=ҒRPEStZ}2CO-71S Dl) 0V -;<JJFblƧ'\,WO*Цr)å&4:u^MU>Պyh*Ӳ@‍APU2lR2 `Q$f^Ѐ: (/zPyݪ5V-1#ڪOkÊG!`S9I:SQn>=@X@2>QE0 ( (Z1@ EQE(:mQ@0yiێ1)(*VWQ8jJJZ(cR*(b8Q@@?Ju&J֊*ŨꚯV͟A@SKI;L H5-#vn=MSaqWۆSUcÇ>x5T)#=*{vہMl8<1#|Z\#f' f9lU ?ѹƋˬ1Qzi2}h\q) #H.$LDSiNk4iXdQfyb*V*A1G}iNgTmjYFnM"4?#h>QS'Q02)1N,.h'w*6|EW|0. _cP9ɤ@I #p@S XW.q  i[M4)U4b$bL< hT^)>q@ oi)OSF(KH-!=R~F&QLR ZaLx@qHzJOh*h_:o!ӎ#AOd<- ю:椀mS1\THccZI:5*)P!#TwMGzB@Ms)%8@PJ[4H>U4Mn߻aU|jhKEn1)M)e{FUG14T;M0#J2Bs*_|AmǽOuWQH Hv?SP6J_I٧N?}f r3u4 '[S&Z)}'(*;ը (`{d\f<TQ"zP) QLzUSg3G8>"P;ޤjAT4$Sh͌g h4&O%PI@X) 4Phph< Z!ɢqA@<QRE@М&\8\ҟW3@i%Ƞ }*!Hr1M`ģsP0 g?!z+srZgJU@ PS)ȡ2AS!>i%G4Z Y93M4:R( (Pz(4)sJ)PǠ-%)h xSW*x4放 v1btqALJi(=)J*Ӛ ɠ1G N=7"P?oi:Rq$RHJt44G&=0[`S"yzzj*9Ddf*)5xԕԔ>?/J|phHZF-M3q#=i jV}P#5a8LCDˌGBFf *U@ť^@3J*jcT,N{UTRhR3RAcCOB:Հ^ , )hQT ցMTp):|jqU Ug=W~1QIL)hFXT1ڢՍs@*b* Zn$4(EQEQEQEQEQEQER~ҙO2ٰ֒zJfajH)iJ@-#t= 0tzWPACrxPcG"o0Kv2ֈT"NaM(iހ"v撥Wpi2xJUZU||dǵW1dS*sS>V8H)ܣS\ ݲqր-ѓR{TyOUmF3@ ⒃E2ySg"Sօrq毡A4JɣxE:WZGځb5"n{[4Y=6?4a"efBzJvzѶ'- )sHd*]'TLJJq`)OVڤz;jt@KTQz})2B9 HZIM6IfDpfcCcSÑڙܑJ]1wB|SsP}G9 QM#JJ}MMѤPSRXLT78GqPH1JYsx␌Y(=C21Na\^iz/җҜi|('zXe_j\0=zTG4x!a:Tem‘ޜP7{I)A5W?1T0 |CmJs ?Dt 4_P, %% xg1ӭ'9q1{UOɏJՈ@RQEz=MHz=M2s>\ӦμR3M٥"brh`(4 $8)fA1R$bɤd&戗j=EI(S( u2HhZJ(bSH((\)i-!ҊJZd>Pȴ}S\ rƀKڒ"ޏ.piqC7+5,Z-6xҢ8tHJ)a?-B*E?gbbԧ$UʧaJp(QWgE1?\ giOt(&E0P))E(iH4\ %#E4RAH)sE94uRsJ0P)Y<PhT4ɥ?tRf@ 3!֛J4sNh4fL^uNBy894$sE֚0O4PJ4ݾ)Pii @I28"_ր%*xm^s4t cQ0G}i dTѱ R'zӈgHjjE1UҙO/zrӗMQ-IQMhe9QQ4@EPb.RZ &0ŏzX#w ff:#-dݪܱH))GAHL#M;) -PՈB* )Q@ EPIE(85<sAE\1NɫvtNPPQE( (((((Q@~'LK@ {RRZj^Qå5Oi E@Ӹ$mҀ+ ܌R tdE@UG+/CWitT<h=jf-@ϽDȧV 9='OC;j`3Jr7 _7p($@Ȧ_z@Y+ǜU c<ziRN>B5/JPE!ˎGOf;O֐OsLfh۸7#V  U'$U RBS'j]G:`4(J@hJlR\BC u؀j|r1gI(jWW8TG坘ڧ'+Mp"Q巭KHH\2=I'N'SI'#ژyg֓aPAE&${U2w*7i!&J4Gz6Fi4~u<jhjG#Xm Q#A!l֑=( '֒o=F8aҏ6G4 Afܱ1y}PH L Ib~S@;kNG4t5?֐iXPݨc6ÎN9eLrIq '(T]Pm۔ 9#~fCiHdD"&~;C@!Xݤ&8j^h$E[?.=CSt4L{)_5nH<ua֪*J3b\P l`=6?ڧ$Lqߊb"^3G]QH f8fZ.! j<%R_W9ZuQ@OQOQ"mC9h8-184~)jaB- & J:11~SPm^jMXSPF2)GԓJv֝ E0  ) Z(QE bQE-'zCKސ!W- h))M%%-%(AN~sH84u5j{JEIEP!e=MQHcj*SE@œ^w/&h$FN5RZeCS[)h4LOZ)ERfLJ&64 RP NztQEȦFi4֡SL4 a\wIAQ{f 0ZF:I MLm2@SRA40Kޞp@'b*qqQh)GݤS)A+@ CLLOϥ<D+qN q֟*5=[r)v@ ^iO!5 OZc)i(t jSPN>dcSsO84ʼnߎ*Fˁځ N)HH)W!T .O+@B)C ,cޟK@^Z5 h@IځցTՑUaDҁȦ"H K$G#LF +*=iL8#7ASSIqiv:O+Rjt5,0'wJ1NV+ҁ;rZă@J9q@f4ٽEK7&֋X5 sRn9fѻHmX 1Sbq"3E*`Q)٘<}@>N9.@/CtՓ#0;sSl}AbjЕjT $";P>_zQ֐9>۠=CvmR~+@G3Hm=2?~ qƘlxaL vܰF@{B4x~?7Z x9!4|?*}aG0 ԢH)@ rsKEWT$!^.c 򑊋<a0`)4i(hp}i9ACHvMw|{NP! y8i>jȔǽK }*0 8 9T +F< JNv3LU*xҝvt%)?/@CՁOWXdR!S1 $LHi Zg*9UmX.NLS@pKlchQ1QR :皌dù$Ȥk[ӊhEN:Q@( XU_*1ؚm71:%yT9c;qO5 'qC$b2x$ƙ1H}(R&8 qHH~0,×V,sPSvO@>iցf'=0N~Y5H`xr=~4*,{};tDθ4Kv1*Hň=i9<եaO)GCEQ#2|3i}FP=<qY(j|Sސ5y)C 1@!qN~GaI@җzDn05b߽A1*KSG@֥fswk<ԃ1p~9%!UZr1+c5c+GT3 &ZJ[j;Qr)~*v?<n@RQGz*R/Zd똁z⮰j4!6qQñZ)-pBM l6E< I$dHT6A枍sL\6HZd]I4\Gsȥaޢȩ1@MTU3,2] Rb\/) @64ݤE.R𢀸COϠ+)iC)7!))szHjz j|DLv@Ý}f Q]gr*jSOIJg4r(f@t_&)CPj:qrzh1E!mIPt+ SJe@n!h@Mod~Rɦݧ7\SE+s nXLnj>594&/OaڐqRzE&8P(@yaT}*6\7CJ v0ʴ-#ixvT`Ѽ E9g&M.N1RI)H $hm' %"LŜ@ɠPvy$TxO9n 1)&sHmG~PFCp;U^ ON)pb>9>nIG@ HXA84&G4m QR@`FO8W&ߴӄt'0i#T'u9i1MS\T&D{ub8Aii71P84Ɛʠ`hU)PqNYR>qցMfS O9sRZP\Jԏ\ VGZ1R@;wԝ@z2z6j\A٠ } IQ)  ˊd@Oemph/֑<Ұ(c7QL"݃ q/€f=L,npiaz|$rj#֜BCDcsC6)I6F*0֕/֛@@D @)%$F$@4a9H͖Ƞ S^⫩K#(ҹʰ JT`@8f9&^"8ENwfc԰[4WY5,p9'oPpH]9cC|?fRl̀4ᑼޛ ]: bH۶I$D 1>UQ@ Āv\g#򦚋 ֲ94j쓞LGycidA1bHFF 0*OG 7nFOTXm r9<PM*_Ln>[=)6JNċ@{}*vA&ӣҒ.m{Sĕ9l޴%Jgƨ=E[<ҩCPv^ @@4ZVA@[OWN? TRP4?9AF(?h72}i)qUa\|nZF|(.}A曊)`p@Q;Q ӈE!GJA֠ogT :UeUv:RsIިi֚:ӗh2sM~dbf"iŽ" zҞ_1)'-Jx\R@T(1&OAS<. Ҡ=֢jRsDИM!4Ҹ )}A/qF)i){P1 4*'ǵ>UE|4lF=k6hFNLj.y3)?u @=X~4n p9@ 30)>jDNiP>"#5 x|p*9h_5 409AO!:(uJѕgmQ@A]P2mCN Cyr<[CI$6iӀeҀ*7VU0 P2j~cS/Y~|AjXn=jyOU4 s1=M394)Z))y ?+T&LjXWq"?LZxLT5:(ӻHFh=5 h7;H @ъo<Ӷ)i mJb;S6TӏJC"Fڑ'EvъwA|a`Q}.&hI$$3ڜjԹ>a J.?٨mI(и C99mt?*1@A!Qyt\,LeAS<zR I8"~\m)}|qE/Ҟ>=*<@ 7|nHsFM C@Sn>8!`=UTp9,D gm4O*eWh'GQ)6f&nlRyuy yZڤ) m*l*']JC4R7N(YqR`Si('8j`vb&I(iTTPLӨ.irsKE RZ)}h nv;6bNo/_M҂yX@Cƚt7N)2 &9Z~9"2E*}\R a&81b4bPzϙBi[4g}KLf JڛVsc(IzS3@ Ojoe1jAґ1J(Qs}cS|£ ЀjC*$9<B&iU،!Romyy:*~Sv%s֜%8L*Bސ9%HbHW" }ޤS{}i]pاB78 pSXTa=J!)7E>~ojiOg5'jiFB(QKޗmQNJ %Q@ ))qi(((`(b`F)hX2\ub Sy( b`qL>w"L_&ybIIUCiÚp84yQ1A?(7J^UMER0P0JFhA'8KQ(oqVFFThpG4,*Gi|(SLiqNң2ć+cfN1FژɒyI'@@J`R@☐"1J鎔xu'9Fr7JbZHp_iix4Ԋ㯭U;+jD:Ӑ.NiR0h 2vjnzvȏ 1w儕^3SJTi€GzFӊCփ҄Qڊ^ա"RP(QE0{ԯʫ^R<<Tå((PY3?Jd;qVP~F:Ԉ51Ԩ%^sLDl6~RiZ)\bbGZH؏ҫ֬Ie8rqR8)֧7XlO"V_-'BVdc(l]#VyČb_AHU]S#P?&i1IiQBÀCO "GQQv@XzSYsKOT&] =), RPҧ QK*@4b6ǭH'14S6@( Y= U4^}3iжjk&xNj6@S҆zQ Ұ\!c Jq$5KL JG?{KM8giRb#: #)(_( NsdQR$ԆU( c5\MPaq@)(SVo~e~ր_8d4nI4{Rq(ґ~U04)ۜp=酶 96`+p3H $╆|q@)f'L@ s<xBiR0n*X>1wb'HP=8yd*pPfNi4 T42i1Ohi f( iJLsRPJbo%)F$qMf/€*@H)րd= <@jQSLBn>1'9If3➽8 ӯJH-<n{taC=3?jH<SqMMcҁ yvʩ&K+I94iWޔ)JTv4GqE 1ONifE#I7z.X4\QNEiy)q֙EOݦ惜P)⒀ ( EQQF{67xH Em?Y~5*L~Ci5A54ET?0q&4%Ƞ)x4sO g=hE RqL3@(F )O$PnlSLf^S4JRyg""isL rhɢJzRILh~hC7 RC)8<iy(?1nKd&,Kd.<N*7Pi]I횗$4J=q@/ZH-*C%UwS8.XvQ yH*(Dۓ iaK;_SV]pv['<\R)3- UH Jo|SjMڀ0i`#)=GJ(52 m:=(Q@Q@ E%(I@Q@Q@Q@-%-N*֧N ^֎Fe=}̞۔( 6~">檞@7?֫!i 8i@) 6Mc@NaaP4]TXEPڤyZnh,QS5T(\c85&aLUp?) v O F "ə1ޤxTu@ OOUd 2ADK9O?9, *T/>s _l[GZX֚GԓTX%EI)sd {T06q@GޤPx5 RԘȁ4ph4PsM<kpbR4)h)P'>^lŏJOa&i=hePR!zVO CUxFwFN5)OZp*9@*)K)"tF@:V"%_ZEq@noZPzҼF6sHupir8 Qi~S(,.YXPK'B) DǞ(FNNh M*"qY E4t`vӋ `\h]$˚<! (40NPHDP*Gր(}Au$ 0F CCNqKC3LDJN 4)O֐u45&ajJ *­eH'= 曜r-֩O_:Ҋ Qa~=j\qQ\Mҕ[<eIii'4Hzoje*2>s@ -GI/ QE"\H@qBS `=мsHi@sژ ;I&9N 3T| LribQ#iJC#M:1|n?r[NV=(bTOA@f[:R1QS4O=hn(4 S@iᚔLB ՟REZ,JH #}rH|ІĻSA%-y=!H<(n)ry>HeJ|} 2CLD/O7/j84'i 6j7  PMH6sj櫉֩j<bdR@4iԆ I4?Z nqH,=*0њ,jJPrqOf}i|Lv hj\G0ݠ*arn"{AFᏺ(44Hn9-粊<N.$ꏊR1@<ҤĂGSRL db%T0hTT1FE ֤p83EM)zb"NOݜ Rz GIRz 8(i `T EԻ}MN*"[N)E JAS@}heJ8`u4[S4,7SZ ۩Jp1R gIijF<*CQS@4M1 )fI@jD}` yҠ |[8YEM$MSzUr3@y*9պlsPvoN*V$KOHdClPF=iZpN0 c9yT()@ tSElVph UZ7jBS锹ҌJeQE)GJJ((()i)hGZ:T#L(H?u5Hd|>ҟqH=s@ ޠR]T;@fE(aK۱J =)ϥ0#S\|:BA<)Ž75T ր1E.@FԩQ@b֥4_Z@4/p{敗+?*O4 :P')c }iVBs@ɣAJ֑0cҕ>C,B1V2zmFpqM8.IoZ`2n M{ B h̛J09'q-@JJʹ BF^ ?z%RX4?zq:)J\b,I皳 ~'44s.c4 ceX1`֚W\,CPlX(X@2jd  ☈Aw fI@;* ԟVɤp4#BjZHiL '\x\2c)Å*#rӀwx>']q({fBygbMA&M<h$u(AJ,)0jҫ?ZlKܵ D&Oz>"a]SY|ύkMl "^yrqJG(9 )l.H1F(v"'iSyG"iIh֡,0MZcDTr(AnjId L(8<Rp0h?)B(1~Mxӓ41 ҧҬF UcjҀ";P fA+ϥyZ@?֐EV6o*%W[T+=E=) ɤ2X5ԱP?Zh9UHj 4R) :PxRMNJ5[gҍ8;S-U UTzT@%S[{Ӏx9џZ@M᾵ FC'}Zrj1cޔǸxG0 "rcZa'֐3ǭ}Mn&)fN*183& mJ&ކ!gCS F;4"Sd"S9Ba$32ifP84NzV@O9gi Si08 !>6tL%ZrCd }q,b>1>"VșV׭T\~isM!r)E4Hw~lTt> pLjW;}i3ځ :K@QN)њ;~:SKIEɢ\HF4nɠ4f(A&֐ R'lVSIɢUAM1[ *Vu5 OCLӖM6iDL{P_'4A4-)`w61ڀ|[>n3 /=E2yѱޝ1ɦ;PpsNEj,O46u 9 .rj{D;) DN9{ zژr;摺qBmZF7RGր,'+85 8F}@P!aS׉SؾGvhui@irEB =) *\dr F)hP(7iiri ғ% Ecq040h"N'8s'5(< 1Q,TvbkiƛcHdd0pGaE ֕p#hFSrΐ zTyΕY)U:'SKM ){Ph(^ԔQ@Q@Q@Q@Q@NRJx8xoZJZ(Ǎ_ AXOZz Z(g>.Te$OIޠB!¿Y' pۗzH g&&,0MJ.1֞@Y;Q8;xXcFdu4G)v/{Wa;~&"zyH:ѷz4X:n ǽDy(5:E :Tndl~ZݛREĚǥ*g'<џ7L:46'$XzTkYWKA#=jK¦;quJnɨ6xv+3cp(C8恴 q;QێF@= Oր&Psh:T_#g'S֓#4ǩh bm5ZCSԢ4-!mJUĄ,!0/Br)!OJM4쏚!Oʑw` L'vjq$'Q%BݠrJ8'vtzE܄U?-ɈԊ|*ýDQSz昆}*W.X`fqދj?oVcTJĕ^: C l =ϵ;$@EH>?'')8)sM64*ǫzHB9!Xćc I"*J.XG>8 i ty(BsB12}(Y@ i D !ݎ6u& G>wE#*` 8_:rii)4i"Ҝ @ jHW=@TfʊXsE7i2nqCt;m\Y.zԷ 03j$ 4hȦ17*'Xi7ay14jF{ݘbI#4gd 棤VjUnYN8YQT{Uv8AP0)ϥ)<(hH(m eYHF@ovkjA?@AOsQȠ h}j֥<P!9*@i颤;NX{fBSّlb7ݤ4&"r[M cLZl:'A@֘)ddïZ[>Ԇ=L=R[,HpTRtT>gRGMGRGiU?ZKiW7֤STǠT1A!֫ݏ~duGҀ+bSBqE.(%>/M9>#iq@Ģb\Rb J=sF)h(ǽ-{Z7`QEPWڎ{QE"kѵiMczygր})žGHd_'}i_/@ Z<'=MX=K w},UI-XšG Ssi ~Ev4LCL{ OŽ= 04+NϽ8@Fiw rh'rṩ <Sdo9.5('zI@)h#KHey5<Àj6SILIj0*I>F .)S! 4yJ>jr9rhCc(Q@!斣f?$|mhG4vMKs֡e\#皘tP*ZCn))@^c*~aKL~H>64/Sr^Tz KIޝLBRi((((((hոWa tP1h }L{ BFcyHR4VHjUQHnTt  roʤXc_ր*;u?=Q2ϽLUS^t@=M<"oΟ׽&M<L1J<{bU64 v[.}N}Bަ4 )YjRrM@M9"!Ҫ^08Y9'l)W&*Z 4R(sN_4&8@H*?zUF1֓`޸$bϞ\ 7q'1]bqi0Rn)t2 dRb^NA>h=*EzPGCKzP @[ғ-'Ғٳ׌ 9`F: U'ژ m)i:7UOUf"* d})8i"!Ԅ`Tj~x&jsLw S8j3i3~lT(IܜReMH,wuℂxY47ҝYWb9'$m8Ǹi KmV:dc#マhb/Ou]PL]ƞcJaUbnJ%+0!x_Λas30xS ӦjhS|=*Vm U=Yc V}sQE^̒7̠4T.SFB* $#yvB#:~hܯQ@۟49`v%]} 1jͿOҀ3hԻ?tM1bZ@n#.89(At.@=,*qL*t*2jz9P!$ QʁSG.}* ħMO,OƤBXL_znԌ E8zRvM4JOiw`S ӈ&yiv`t =Bz@ fG97(ECS:BXS٧)4hZiZC$JQrOJWpUzeH~Us@\8J>) _0tR6yאGE RIFh@pMZPíTQMEKL6?zM.ALH! 硫߽DOVFݞqL9Jqnv3Hbީ}*M#L@:7X>0P ̞ +MvH@N1444Jr}n *'4tHi ZZnhC)(`-RhZJ(3E0uy (yޏ1i)b/:zѽ}hh޾֐֍ր.Kr;Gj)j2$@݀}M7fh@uDR@4Pi9J( \RPN#r4}zT@ &,vOO^@~cKZ(9Oˊ*OJCJIz`P}{$T4uP) 9zS$Ccw \ZJJbpppw%%.j]Ƙ:4dQ@>$+0  R ZC>R/Lњ;Siݩ(VzZ0s_΀E3_Ώ-}:@:~Dcyay?SLjz/ST ޝښzҎ-4J((Z((((( mIˊ5`t]GV Bii&EYhQO LjtXbH$ԛ@h8~to\)571-ˌuԛGrMCW /QN:cҥfA>}hh>: JVMv 2z @Ti GIu4I䌏jzNpF)ӭY+K,q8Sl&t"U[j/8fc隈' R]Iڢ4FsQ@ FD44 R!ri7yQM;ge7u F\{QMԪ %J @UPy"1*, cҐ1ҤCPaKH)E!?r*`ppbq@iK`nQJ)$#m@iNM8oҝiH&Uk0RyV# ƔcU'aCy+0xM?p44(Sh_5a?#VvHZ%>5hfw}EODe "Ԇ< f;/CFÑP)!nR)V l7JI,^PLþTbjRjESoWBz!\g~JE `{f-n*X*$nܮz+iHXvL<S8|1@JH'Tj*ڞb) @RUip}i]dp x'S jol><g5%Dx[mRyBA5q1GP\?JT&;#bƄ!3sLSFM;xSHHj62OB)\P;`4`bSMSRPq59Zu)HbÒcj"p iQNP)Ͳ|RsZC@pqTxԌyci革 \џjJ(ϵ=h(vIoćE"*(Q&M#HqɌSycM0 >5!1X4LR}ihO0ԿJxү-}~jGҫ buAc4I`rZq>P䓎i C@ T+ ^!}G9(}*?7ր#ޑNKߑ*&xsV4Q@R Z())hQK@PE;:FK3L()sFhXf<z<O<M%{Ldjd?0J+RI@ 3N"m&Ki6ѶޢIm4 v>ݴb==4`rMTX4`pRn>E#~ .>])QETS6R 4v48S<u)(~ m%-%1Q@%PwSJZ($QՄL1X pZkvQoZu%)ݩ6ڛ@%9QCP2}[U=ha%1 %Q@Q@Q@Q@Q@(!P)OJhQ 2/JN% kr~Iڐ HzR Qҗ9b:&=$gI64nhS(:TfBh`jXp[>?5MS@i()4 Zi#"ʏzZk)K3|SC _z3<<ܒqBF0((@sR_{REh1<x<M0QRғ< 6 L4c+@]XSO)SBΐ'ǘ0s@LdR}Tqu /7h7Tg8yO(z 0=((G18Mu9:3sNl}C䃃J (d*+ĂNiݥaBV MJ#|Ӣ SpO4D7qQ&%.rsQN08Q@$CY a?@V6>8P: Z"]EOQ/CRG`>Q6}*EK@~t :(D`YV`T9>玢)QOxOLe%0jN6㌊*F] 1~IQZ<~ӎzGS6X?Ow%1>e<}664RҖ'bS)@!jWXU [5ݼUÊQ(}SO* (P9W#sIF#aUYp; 8H'ޘ]HAA!Qҙ@!)1&Gt~\S]p@ Gj#=s@v T|R)R4HGJC)4J'qK9Cz 䎔 Xmm) @iqrX.[i14!F='ކmCٷ+TKz<4 K0y#d< O% !W4T7(Q1 Lz)GMKL@ ғtݠJbܹ@VufTfiXGDX1ڹ4~N4 aBʐt}iZY:ҙgvsݞ(*֚T=8sJ\@#& QJHjb<)|ƞsHӐ(ÇϵAJxONMGPX("=h4PEPEPKH)hHhEA`RfQLQH)h@-JrEJ܎MB:Լ@ &) JZJZb QZ(PbR@ Q@)@*TI %)QE!8s6ڮiۘi$_Q6z`@N5ӗ562( NrҀ!J:U.&%@i@>`';h袀!ަ*4T֓-h%VzN}MZjK۸F}Z0=haɥ6Z]5gCF@hG#4y1pT[}h lSe /8w4oS@i_M.)Wpy2p1[Ҁ( ?hy ٠ *yLrM1(tCш TUgפ6Oف Tm"ҚHhN-E8Zy3 ¬ŠzPt!1I!/Jwdv95&N: ݱZct'~!ɠB3KJ`4 ̀U̹9L@*)G$Rp _)6#> #u>lP'bK79bSJ7г`RqRF6OHsdTM @9Aǹ2mJ^^8昇JvqM'&48Aڐ?7>TrGq8jUoS Ua0%ܹC=3L Wx2)1/zW8`'ʓLCLJ9ChzpRLgOr5*#hS"=0dR?5 P: b'"ƫ4((($XڢN$_qdJ(vqR LdV)`C|}\gm[c$ԴW"#@=@5\٩bw`QpLpjAS5u-OT-KL;N_2{aL\b*ҕrHs8.FE9~-" QQNp bX).ߙzd?LP;ҍ}GP9-!'Ufw0@_VD]G5lt0v0vK@U$@եT)I> -! ڔsj&`2q@ sS˷~#y~5t_W1܁J}HMm_J\AE`zQE-b bPJ)(bьELQKE%1@ LSI@)h f84fx4GƤl.h)i MpzPzeY|ǚ5-P!{IE)ԙM+ҨcR}im9i(((ii)E-4Ӹ4)OAI@ E JMPh(i){ 2s@ zQڔOzԼNMJ a<R ;Rw1hbE'44P)(E ZJZJ\RQJx<ultqҐ袊*cMTf!4f`1SsK'Jfiiؠe<S_GPI@4TFǥ1n=ezpE SIO('m Ҕ})$ҌZy>&=qL{Tw!F-&FKE&SF((̪%Q@P`zQԴPB 6pih}}?bޚJBƀ"-Ȥ-Sy^(xi6zQ@cޗ߭IMҀAK7p)vz]4wZ7Dr@?b3:t*64(RE4j\/֦4j͓"KSPH~c@&iێ-p#j@P8 rDzC) ?"S.Tr}>RT*|)&QOzQƣ{So,zP>ojjj1cQPKKӕz_9b=EsPG6Ijj<zSL*oޔyLyԢ"GޔwIQI7'$$w=ژe*:QJgb:S|(vWեc$M;>%x[jˑNmHlѵ¶)bx+JW0aW/})<u-*D Ґ=B*@{ҙ:ҩ"zzP:Rb (PEPEPEPHQީ|:ݧPEPrqQS9o2uTKL2JaC'c4>,Pxcm:h^ZrvҘc ")@(/Z{!RZTQI4(#9m+AP7pPp ޮTMIE%!U[Zp&њMhD**gWPE֚U<@<=P*Ƒf*r1QK3H=k/aqLOFL4GjȐUD85j7 7I8QKHbP2ܚ/hU "<<3Qv˂x460'2;SX;Ҁ&l"N+4\b54zP>\ѹiB*98zRԊn,jUphqPj-dپD?("#r;Ƥh[Qʓ) opD;ӢehIrY#JzrG $#EBڥj#0@mZ>i[ڑaEiAHv vᾢW#ҙ$}#ުQKoK@ E-%QE lZu#d o|R)VAZ@B-42OP)>i \Z^}E6w>H J9Z`R@ zRRIHh40Pir=i4 ?~qA>/{gږ Fy@8'n(cҎvyN)jF$ JNiwRf4dsJ dR}(ӀAA44у@3KI(KEtI p's91by''&4(%GO2 (AX;Gzu" CQ)h$ү*sIHLh+Hd.q#91,.=:"lsSNUw?p}hP9 hmB@  JR5 g4\3IT2jX,X`)*\:PXU)%>;ϵ)D?.M&k9RMBBIedԉ* v"Tڪcf/T~r12)F 1vYM:K1]Ɂ@Pv\ M޿ !o>=ij+cޥJ(x} )#q J(sKڒNU)ր-v+<Ս*3EhQP)o)lRԔDϞ s8Zi)ьDG6#R{E-叭S5~!@Lj^F:Q+R9“Y9c֨ ƁdSqE&E.ERdRPwȣZ@R⒎=hqZOƗ#ցsJhȤZ)#֌ZZB2E)7P7{@LёL((%R_ZoqŒv--&FBNb;g֌R(u&hiA!)񟚙NN0iK@ZuPEPEPE%4G.J~K.hH鑌gOij9Odii)R%KzSjWc @QE%-Z( b$ubCTif*j)N?:s*)y( ~pO@>/L\Q(1*WjހhA<: $UPIK@~@oR! ^{ kgҀ 4ҌP:bxQPO^dtQHM:Ռ)&E3AqO&^94(iA=ih݋K}:Z(6/J(f#dEcJњh@Im9!s FhPG(@D?xjl,ɠK3@84fx4v7-GojJ3EP 3@ AisG4 c@'s2G)9nl>l>ԸOV~ (ނ@ )0/SI4MJy۷HqR{хz`&h-Jv 4Ǡ~4曚3@ )xҎSqGԯ1jf#i@Nv  `.M&M4>PQS}h)M֍{QNi<R@2M0mњZJ(2@ KIE-=Xjih9Xt@Ppig8@9 ,)zP+0IɦGJH CӲ WJ-NhȡV@M:b =)ЂK*\ @HMpƥ1E5 !M&1uD"b>n$@€'*Jl\ P;aҁ GU Tr$硠DU>4bd~O,L@.(OxϼtH :P!?VtWQL–;r3PpGyncaVIur1jkK%/x0)6)Bs@Ƀ!N0ԊMVJIqWf(QKI@ƻm\9bMZj@TќT"J`MUR­un)<Pd<♂xMH hG$LyICVP=*z>{ƀ-jnZgf7SjSҢjBx/='}j B:V8jy4PhbPhQm(1F)PhQ@ i4sRPMEEIQǥ2~=1@ JJҖ8b8qR\bInq@ s'6\h0\Rb֔)ޙN0ZZ( )3Z((QRQTxN@XԮp\IEr 646X=**Z)): )r .( })i(TI;SUIsM&iנ!O84 uH#Qytk&C 'պ/%)|}TSU(oJW\'U-˜ѸP[8#SsOGIRX)EZT:RQEQEp)3מ)P;I48NH yE4zbE秿ZR~c:3&Kv)i(P(e9<Tlޤ9͏ԞtޥؿTv:?SPfLf~T|Դ vRъJ)qF((~PzWi)p_Zc n IzbN(F8ѓKFh0O!SNi2NRl>[RfOZ66/rz‘ǽ;R0(ƅqA(6.Z]zPlyJL.ܸI=)H8㎴9Ͻ!>`P>6?T@ojR( NqKϠ:^=h֗+@ 441\qG@ )s@M1ɧ 0}B;>9cJ<jq`zOҒ\-& RR6<ғ6rUU*r֞MRS&(޽E8tP:Q@@ KIK@SK`u-0h@Ғ@:h$ҮHnU$M JIhfNp()#_1S(L' OրNhLf9 hK7y'<lRE@1NPBъd PyOjSKҡ44GH?׊Ua9U)d{ EGS,"I*̻9^ XccU#8w`A<` z@<j>PcH,'Sby<SA)z7ҚqF}(O=M!w \LZ]PȥR4XESZ_,b'Ҁ,tG;0q) Ji%4XR@!P9|R`zUܿ3 @ FKz@>l>hҍOygc4ҧg4[1CݿJM?E@qڛ@hQ~юh,/M+sKR9)2)p(֚W9_@RM8e}JZ:J( IE-PIK@MoCS[}@ viǥDiIB%=IEݎ %nv&(*k ]͞¬R"D'F(bҢ9UV@K.BM!4I43*(yL R\t{ѽ)w{zS`RnZ7M}ie?3zRoOSHdrz7U1>)e j=@ I@ _WTOUvt~R2 C-b{PAFp?41{<f<1W!H"#>b+n4n(en 9U*5 g<R< S"NkS5n.jCE3'@QL=aS<G=(J)AKzPi`S<4yh 毡_C@X}5}(W$WӼ@J<@Xu5} jj?4z<h QQҏ8zP$J%bJ)gg h ^w4yp)<io+yoj?2w '8N}h<M!O{Z?LBah?FS//.ߗ+Q?R?Pz!#IhUzOM@ڽɡ 7bxѵE) ;cڤ 0ZQ(843FMl1KK}hؾNEZzѷ qli̜u3)qؠJߊ2G ҍV4ƀHHF*Fs@ 6⏓ڔ Ǹ"8/)QhM;ܑPA֗pQNIbmvhȦ@@)dҚ())M9"gUˊY ¯@ ]McN Ԇ*i)qF((S s )A<S֝ހ Z6ъ(PFPFE-t E-E.Ph#4Q@KIFhh4f (--74fE%&hsc4O90$vj9j)wP"6c`)f,t,~eE¨6s'ڕ`.>Q5]ﲐ2DV0z@ .ǩ4ci袐`.(sLhQV{sM)4'P48PhJ 0i;#SFA)Is3Ԁ⁀U>Ue@oҥ#y#4h҄ӕ8LC`3-HN$L=*B})s(֙SIQ4JO>qIRތSuڔzQj3@ z~dK?Te?ZO2?ȤȠ!<b9<bÝ||1Mq楈ozv1C=)G֑#ڀ#R@i5?$@ :sa&&uj`%iw)&Kz4PfX}h}hz* ֗{zb۽Q޵j)y GE3r6I`S$I4J{zo>ڒG:U}M I&h +9U S ٨i(R֓x 7M(۩3IE2S&A4Ѕc@P~(3IRPQ1@(4b𢖀(zR p Ps:Й!\Tx8S`GƆH868@)sh=(o1qPPI=MQE8R(wtRIލX.KލX.KѼTQ`.FQQEQQEj(\CE n1ޠz7ZɷQ@b{QF*(\yormRoQQEϭ&z,'FA\QaܛڨF(P_EA7 Sl}zsErBsڙi:LVykM-wykMϹ-hX̏SA#@'ԛѨ}$xQ)F=E ⍿/CM ñI4Lh(9J~Mf41 qȠO<?MJRz<_ѵ}(6c—jRP~8@VҀ#ڿޣjz}M&4֟*LКP>(Ԙ_SQoʀ$}M# })Aw._‘~у@ j? L}h'";2&OÜM'uPbRE!QE%.h%SB(qp}(A?PR J.}V9,zQj%d)J.v5V4X b}~Оѓ@|I秭S rz><Y>{O _5?*}s@O j\ 'bxU RzP1?(ޟC4dzPFdzPFdzPFdPF#Ҁ47/_ >*6vaT=)2=(<+?s@l??:v^ IP"]D rEMC{y02i΍7 fc@JiqLD"BZo<S@F$1Okg4? Frj*r r3io$PKҏEZ.ѐ3@vg5o qW)@嗌ցU gZP i(dR:p@>sHcZ 0jĀ$!>Ҥ4]ڤj-jFݴ(hT)Cu8JS@ ;$~\M.pOZh4ޛK& jTz<g-QGPm_JJ^h0o}qI}.8jSӊChh4QLAKH z >) ;u> ]ݼvz6Q*X@$8"*O$`0 F)>༑@ QցJ:PR6JI hFrzRP(ƒGPQEQEQE%-P԰TU$mhCڡoi& IE (J((((((bK@ҍ£ppIp0sFhm¡.i.ETY_Z7EEI}iZ#֗pp9=j ӷJh۞O &FzRF5X( ((((((((((((((c(S\PK7b'm;ޓԘz|V35^#bpp 7AJOϹ()y4``n~T~tT7QIIZM֓u?ZN(&}njBM;CHHݠ-(# ]ҀcҞ ~w{PE*3F٠QL}wJhy?K)r})hҟA'Ҁ)ץ?-i'zPS}ݤ$SE(ҌfLCZu#c(%{JE#oJcLr)z@ J)M6JJRiP:Q@Z1@*B1E<ɑfh J)h)x88({E)r(RR(Rb\Z0}h0h1@((PQE-%-%-.iQҎ Y(dP:Ri0}*L6zQ)jTSs3vOti@&,j87nեP?:V(UQoU[iQP"!"0袬}VzUOb_O&3 [5ZQ2?JBC&:E IE1G^(ɥ\yk=8HcVqV{sQ:/^hGҝ\b<@IT~X  Sf@#x;ޔRdH&qnGaORp(1=j<J#@ = ԴQj^=ڀaKhh+G@#PH)0)ޥ~~ژ sJvz4eO  jXH18=zʁڤAݨ>Obfz3(i'qP1ɫ vV Z)֘RP(})0jN(#ǽ!Zmi<pi:`RcFM(  JZ\QjL{яzv=Z0=jE\qPx`zԝ?R?* ~oCP({LP ? v(ǽL"M%7bE6u1OPqE-!@ KIE(A\ (1F)ih0i>Y=ȧo_(sHh4c`%RQK1@ E(PQK1@ E.((J)hZ(2h)hRSHh \Q_Δc@ ǡK6~~րAҀ њ\QP(/(z N? CҦ QxczPF}Qǭ5n8I΀M)s?:2}@Qsd@7z\ IR^}h[֍Ɲ~4ؚ֚҇A} >R6@(*Soj` GKOMM7laɣqw ?Iǥ;&xmoR;zRv5=)<ŠjM941} Sނi}8)^ŠNhɥ'ހXH2 xv43NҚz 417ɈWM]L*(S=(QJԔ ZJQ@ KIE-*jJIa@簦g=x@ 覔'8Jǥ +')|sR}hhGp}hǹ;<Ԡz)W==jZj-'@ -}>|(ǠG>(z <Hh PfJjѵ}(/)?ɥZ~./ZB&F"ϡt_5.1+N-A?iw'58ڗ_+</5b@j= +nN oU~Th?*8=)An++< qn d){ޚzLb%]ϥZN*! 1@RT< 0=o2}@ w~RNq"cU sSodrq@ }3&sVRBsU@jCTbq0-1 tJ,8C%C!aSM|WJ}V\S!`sLzTv>g)(qWFTWڐry0rjCCLh=(=)i(ҌZ3@ϵRm}(@z 0=7EOjG pЧ BR2QA4AQFiTR2R>CiH1)֛Ndi)h(QzZ(1IE7hmstLq-|qM4 >gG})6/Hi)ЋA@O 8u7N1@mG-`zRb$3 @?VRGpi hhXoΏ,y:v`?*0ycL ~tU|΢@$;(mi6Ͻ.qHW`7֌ZvFn=Rb (0}hS6RѶm%ivm.=h@c.)p69EM.@ Q wj](} .ӂkzg&a;kzymGޔ{7F=ymGޔѴz5 7S{S=oyGSv/&K} RZoBoZJ1SKGc_J]@'pi'M}.@Ozb 6#ھGjh+(?h٧O(|ycl_:wѤ@ 6MZ@ qtޗl^OO)p?GKVp?h?,-(Ro¤){ 4`RLB` LAN>nZ\QFGB` 9dz'>}.G- ϠPs(2=E0!"[Hsԛhq]縥@ <M qK@ϸjfGKP_jOڌOҗ#=^=?J8~TŠ>ޟKH]}h5O=)zS=h0}ha&#R61ڀdуK@4`яz3I@ z<04;{R})=&[̬0fޢԀbL0Wb>C$X&jO#P=Mb Jw4|QK)1MSՀ6۟qUZڐdRdR}M>>p, T@ hϽ U h^hG.@PzKF#֖ (Q@ހHsS0e;qI&(Ϡp=hAE4:Ӂf Dg5.j7\wQH4*Pnی͟JHaQ񞆤ր:[@4,z]žSR2>gnhwzk!YNsLvF `{Tcm#<T;~/9S8 }j{G&&SH0R74 C OcBҙA& N* ҥ1ZPGFd*=(;s3J̓@ VfiNA:3@nqLib)}}- \w4mP=hbQ})h(EPHTI@ mg\PJN)Qӽ&q@aƐh08Nϱot4@N@+UjT5T@ħ'^U@ ZAK@ IKI@ KEQEQETz@\sJqjC xGSÞBb3E-0 {f;))hTȬ*9*:4@IaHqKIUyI? cVt 49;sL9 4!rx⌜7<tb)p=ipLԌ0?MҀc<M!SF4)v8" p|v2xM@d]ǵ/ޔ '#4lI#ޔ{ԛ[RGz\{`n4hqU,2;S4u4֗u8F@nnQ@FKIbI @ F) @ҁ ^h4 9PF-7֓oFi9mJޣ}@ F=(W҅RcJ'ir} EF(2I4(<eZ]@ z >oAN< v|rTO~QRP6*uݞ4( &)B3֍KF&m_AFFLjQ}(()GE.h=( 6ҌJM;ʝM8ٱS 'JUV4ր#ii֗bzj_=0oj}!_sFhh=OK0=(6΍tSFޖ;Pm_J (2 l064( p ;%"FH(4~`S03KXwlg$})hؾ7MУ4Qr:!5I)&*bG?4y*i>+LCS1OsLfp>b~cҀW45epm<l& h )i8 gJ]T@O#PtsJ>)ӎi&(@p 9<14hn~RhF1p%=sO ihR65 nǭ!Ҁޣ⛿1N<Fwg"":TKϝ67~M)E2n#>f3NrPE+_#4ݓ@!i0c l`.iE3GOPzRғ PJZJXvbi'cV5< S :`ƤJ[HaLsڟM4ol 'JG3(I@G6S2G< (5CVHҐ*@$1إz@\p=iodќ bA$v#YҀaG5 ȦqL@TsQS&֐4G9#=Kژ yB0'AAiAݞƙR"uQESQE4PIKE&4PcяsKE7h4QEH8PsA'<R9FR2c<7i4 cә0:GөJZJ( ( ( ( e1S[4/ !#:?Z}!u,>*P\Ղ@iZL@)v=)HԌlo_-)|3G޴O))˹z)<}e~>f2>DHQ}O FṧlQ"~*$Ňjp*PN@496NҘ6Nܾ$JVe8Sw/-&:>nzY"nP(NFMZHȔhKEEi ikѵz@oAMsۊyV?Ms}pRWh#w{Pgu(ϩ (q@Z1@cޛ߭>W&ux`bZ23֜(5 w.H4InR @lQFE`zq &>E(l@ bi7fHzR|IFrH*L{T@庭;'GQwoҀ,1=>T`N޾evzTo_ʍѼ{{Tnyz҆Ҁp4nš̤cQFh1N;fƖMƌihgڊ(gڊ(a1Eя-JBO40ɎOZf(OE/Ҁk{ӷ1!KxP õ8n PsQZi捧Ɩ`&?ޥ ϭ>)Q4MAJ^}):R )iih(Ґ0 m(:P @,_JIE/4&#ҝóR-DdRWib(j>)LJ4ފQZB1ސb3L(px'֡0/F-qސ IZ2=(<=}iXԳf+%xKGӷZ ŒiɎˊ@8&09|HHIݤ,TiYM9 zQC`cb.X(sPA-4{}*%)Fx@ǀq5\yf.m!aiJ8!l3J7mӲW8>_@NsNI@'azq4ؠn#Jz @?Ͽ@ڛ٨8HSH4(C@~R´Mѽ@4(GC)=K(|j2{c v~l;z\ܚލQH~sQl*NLn-@ ֛| 'ҍG?(Rzaj=ަ%s֙=3PgmsSW@RhqrO҂cbi`c3bZӛnLBJvWGS(4 ^qH({9>c(I)u4 #'i@QLAIcKE 'Ҝ:IE'ҏR@ԼR@zaiP!7sKG4GZMԌ/4cc1@>u!,|ԍBdeG) QM\o~Ss)r}('ҌJZ( (TpjHbGMݖ : %!e S #JN:fU!{I@bNCzufh۰&iVFQG=7R}h4$_z"@wzT5=)wvך >hPA2SWq@l ~^i)`{Pri`zSa~U^=(5<Ru+`ShJZUƧ|85@ jUsҬcu-3cށ9qQ)pNM3&[PKcK1LDX?R`M ! M9#Jց@Ɯ{P{R9RdҀH =E 'Ҕ{9.y-S 4u'4J)7ԛj}!i7qHhzdRc4Bytg<NhSX )~n9r>*r) 6<iO ʇ+RJ)A<pi 䓚";⡌e'&hأ݃֗b!p(&ť  k v#GIjZ)L3h9G@ )9EPEP (QEM$s@?ܣ{rCv"H)rޔa(~pJ>? Z(2ނ-(zR@ e-ojCSI@ `OaJ 4ӭ8gPz3G4z@  -'8 ўh1h ֓ ^0Ə,ttҀ-A ixA*:"0S=)<Ƞ*6eJfUz!sL2( ֔PԀN;Q֛҅4xԘ4/֤RARAv -TU*Ǒҡ<ADls@b'4RY N3;8;q4 nJqڎ(3IsR=)2(Rs2(bi)H4dw(w#Md 7/b#ҏ(gp<%7_N(ؾҏ==)6vPқN*Bh嗌KTa۲RvJK5y(7zP:Td@HwWSȤ3 1;¢34ߕNʘY f8f?W2N47KP!;Z!֖1POae?(WSP: ]N/I7=B2?)CT 43M<i ISJ<4`pr[x㏭hQǭ''S[|27𧒸S>(JzI݁@)iqvsQځ=4>J@zC5"p< e&h-!3KFi3@ E&is@fE-4}h&4dsG4!#4`0$PhdfѓSsM *} cA9җ$f"QOu:~YȢ>*<ށSbn @KrzTC;qSuUր&'qLˑm?Z37ʨ1R7Zc ǨQޤa&qP=1ӌEQ dJG?B@S@tb >OPsJ1X.L'qq?*ɌsӰO/dZ @* u61R;5O@)c3HbcM; H\9!F1֜0sMUd P(i?\QN=h ;q@ <Aӊj 1 (+sVGJUJZ(`)e4 #=-fsA_sN#i6ޘKh=v9 xpݞi7b4ߟڃiw籠p#zP<=)ޔcҗ@ ѐ)xRmoRmoPnO;'(`|у 0=hj0-H֣iV>zIz03֙ޏ,{)&74mq@ 8LOt6sJv>R Z0)xSp ܵ' ;Ը&JJր 7OJppz BziH_ځ *(((QEQEQE L{֣ ~?ɣjQ`{4>6/DzSo;bSAoӁ= ^hEhQINhÌr9<уFvh[iP>E")iR}ihXf6Gzj IO4ljRoO0p!CQ?4ѿJSN;L ΀#ze9iSӔdɧ)HR4 zR=)}iy#Ґj0=iyj  Ppi4Kړj`!잝);5,AJa@ALK{uQFi}]Ҋ( >fQj  N1Lǩ9INU Z0AhKpVԅ)ܞGcUc#([ځr"xn-35gz0ڗwUޤT ԄݩJGoUcZN(ɒ/zC$~irh_2!4:, M!+i3M&(Y)y@TåVRqҬJZkPQ)dOҀ6zf1S0bE5 49v 3̐1O"chcdt_LT;eN)$-6ydzSG* SCR3)㠦'3R LP@rhq@2ihfבփFjC۳R`Ә!0hǽ-PbR@ 1A&[oAG(qF)>oj>oQ@ߛPhSy4)4yjFIJO4p))`ND\-5ȠRu4x(=Z7{q04d@  (#ր uj%R,};那S8O})f8IPT2sP!(֊Pxy#H}?wZCсS85/GVjN¨DNgŌq{D;'+uP)qh,4!~_2H'ڣ$Niɵ9fɤNԠqJp{𡛿ALV$` (9N KgAzQ39G&M4IFGV0*oq4X4RQ@^VNb("Rj~AR*<zPPmmJ`7`4<Bz=vQ{\{l8I)3HI4wRdQ( ڗw7g9ȥ3րw(#' LѺ 0=hr !$t[email protected];zT}G\)vpz>X(@)9͋Hs/م7jϟœ71PkFjPPm|jgp(9O)7!4`yXUǥCHxqi'>/'H()qҌ0PS|)r,R}i7xF)I`p{Iɠ(E&((h)6uݢhhhh E& (ԛw&-bѵKzP0AJ PE(@QEQE0 )(ǹ7xA4s)i /CK@ JZJ`5.F(bq"_Z74j3Z2`?4$֚NJ@?ojiE3i;ilRJ)MhP)Ӆ5@&Ph})ۗҘ 3N=)d`)=q@ĥ&qP;qJwhG94S< @_J*__6i q#w'40^5чNh8Ὡ=@s@ A:яzZoz\QL;qҀa@8cPL$=)6J{ߍ&}/*C7KF(!=0zZC8C|sR<=8=)<:- ђSv)3Jg''@Xw60hYqf(LK/(F~UϥK/) i4>7ڂώ#} TMelE23KzɓdtcqsSdRSO.}4ҀaS9j? T94a1פ~ߝ+7vߠ3(M7_zRԞdc02!yA=%Y8a,JXR4Դbb -:RQ@8=)ȣz gM!1LBbu%'4s@ KMhhG42)4bFE&(.Eb"LQ\kSHTe9Ti1@b*FHXt(%4iM1 꿥4S֐#R {Tg$6_QI/8 7imSF@=((ԑ}#޸ a;OʒS& !{35)RGިΣ43 DӊÑP"2ԙ&ˎ♟j`(y!NAӀR wc$fSȧ9*il*0֒c Jz*Hri `7M<N0ր@##<"<xNHz&*ocL,qJ(4RPRfvd!n %%`*AV(2h#d4}i4~4gRzQLCr=EAF7#֐;=(d)>ԃ<RQG"ˑހg)9aspiSހR:mcҐ'A M S;jE46ހ4^[{P <(򏨠DtTWTOBPXG݊?)P#%0e^RykޤC|+E<@'=(<m< CJasPP*Mx#=ObA:TJ(ZkzҿhKMQN,=0h})Iހi }Z\iC rڒi QE1Q@Q@Š(()(RQ@i {Rsځ/ƓdԀM_z0J z @ KH)iQEQE(ZZC&?4i͇9'9I )N4ZT翥-4.y-Ÿ)>bҗ G@K84ȧr) G C"31)#Jqt GY"/ƓTh2F) M-ڛ@ J(OT)[zR})>h9}EH"XLAK%}( RIJ1ސu81sB4Դ;E<J!9X8/P$EI4AG4 <bh۩7b8@ KTax֓xM7/zk5#lQIKL(((((i( \Q@ IE❸S)*GZRj|UWH`c@)#t/ޡ)sP\@XCIujx,ws@wc8y@| T~(oC@ #?b<7|wP((+]i)Łi4RQ@fZ)3K3@$fր,֚ԄP:Hd\JAi)(()h( JZJ(CƁPIϭ&OƂؠRsIHc;֕@ LЀJBLC6ۥ;`#8Ґҝ(@2aՓM'@i<3@-Mh#4HS ڛg4g PQ2j2ca2}?ZC*byJD݃Upw4ߴe4_dkQsSݧҀJ_0ycp)e|dMI<Rf,HuhP~bd'(}SRLaqK wJJv (gRpq`R`у@ E&撓 w1j 9sSfZ MJ:R9撊)J)(( ((d *1FZxL.x)[k/i%u\#tw/+@ 0iN\ZiO}( c F¼ BO>Gzq4(3A4ZCJL҆ i(JecQ$M=7'uAR!hWu7oJWD`dsPeaQsN*H@ =F"'Ҕ(؆sޣM*co4 v<GJvbM`AFM#4!QE0}h P 4܏P ^Q\њLQb4Q1@bPE ( (Qi؊v(>QNE/FGpRQLъ1@4fQ4bP &sJx@nqKRRh'%Ҝ((A:Ӗ1vJ0Qj0e6)&9) R2z{_Ǝ?L&ҰL1CLy"[N 79)~j@w4/4,8}hac4"r4Қ)@ Ji)R1tSOZV@'Ru9 rz[U@9ۘJ#uIDDsM R\TX>(\&RLPZBPQzMz1SGcM0J]ѷPh#ǽ%RPQKF@ E8#loJmKP(?iܚJ\RQ@ )⣩ހIN*Fj.6CnjdqJX`PWmRb0)LCqM8,Oj@BQ*i%V@;M3J@XFP,)K8aQ{Rb ΙRbޏvqX#֗/j)LQKE%PQR@袀 ZJ(qN==iZvE!GZfiF)f&4df3IZ(3FiqF(3FihуNF :n 4(QKz GIHcBAO?0J1*!jCKHaKM=(ڧ֓bR݀;6/WQ@ :Qz 6QP;b0*XG+{)jO2 72gQ e`2A)^Vqiz@('H`iE%;AvQ "*HmQ==Yp) 6Ɓ`N C1Rc;ؓڕB}i8sQQRGZn(h>Ԙ-)+zBu@ QAumi L0i,3HP` TbLS `.2i))L99֌ZZ(1GKE7p'b)qDP&jTi&A@ `1qHrp8RO| 2}*D, >➮TpR'?M /zbnZpZF 4haQF  )i)hZ3Hc|ncqңc) M(#)hbEsնpT}MHc ޵#3185\? ӡ'h@sME15)|ՠCҝLiҦ)" ÚmSh E% Z)(BIE-%f (Lњ6AI'GKEG.PEbbP Q@-&(Q1@!58`RR*u44`q(dfE1i4`bPK摲80Izu-1 hi YHii`u%!G=)q=TbR dsB43S8.;S.h%&iqI@E( v^N) ړ&ZbCԽhZHB@ cӊ37R2 RoR>nƁڛcRdNhϭ=l`ր@CQs֤ez.G)iK)PGȨ <V>(֓qǽ.4.>EE2v>}pfSR1SH⧋`_z>6UlLJNޥj{znSjR7^WCSsV )Rb"VljUDQiKn1ǵ !Dm ҈$`8oS??9XR}h/iȣb(ȣq@hq3Fip(&h @@8rǚLʪ1)PiqKE&(i9944RsFhSrhɠQқAɠ'&8C5JܚPF0 bh!)3Ji3@ F(.(CF)F3HM&M/"2xCzvzHbORt5CSM(l*OZhL^}(8/1 ri1qLHbb1Jr:A@:ӆ2JqHbf8LoÎ=b“M444vHi78&t4\FM1@uѓ@$)P 7q&ԔƌLi1TZ@DP:1:sLȦуLBQL0hsFLJ]n4PFMPCKHh[)\<giAvf2zC"æ*3ry5-+C2A/L@ })vJ>oz0M.hԥu4m=&zJJvF&M4'4f9 ( QZJznqǭOSR4&(2s7z2S =2j>_CFWޘ N:R7# 1C QI !qIҁIEF=M}APsKϥ0&pPcڥ (ۅ/?&}Ɠu+.E&)7Q1F(b њ(4f(h 1G4s@(P1Eb(2=i2(hPE.(%RQF(QEzF(tӳM#wJE $У(@Q(KSRHc~ojOSHp:Iįܾp )p=*F=$hZy ]ڢp3OTQ|r  y(=:m4LRޔCQMS0)[ c;Qb~ojvHH@f!^WY)$=1RPi=IMniϡs@RS77J}%7qhRO4e-/>}E%hM7bh@ . )ri71NSsI@p))PTTP}CQQ@9Vzycyݚe8--9Xg74ҐBy0{T~aȠdjPVpF1Qx@ XzԞKy0#VN7RjI1\gH$! ֓dd(3v4O&rx<;4AFh\~MRMAb7Rmn.ᜑIJc4h_33Fh3Fh4f((b(qF)3FhqF)3FhSsFhh''4ަ=1Jސ ZibM<`M#h4!ڐgڀ)sH(`ъ\ ƃ8Q@AKN=(-@RJNiJ%lPl0qJ揥0$WE7S1@ MdRg&:JŒTXc;Ҍ45 9 ԧ9KA4.Woi3@\с@i3GqHbϥ'93C }Oj^}(8❌v`SQ-((4Q)\LP {.EGcfԆJFM094!9v}!уNF :n6bf4 FiM'?H=9i P) 3HO>C FXh贻#Q tS|Gh;|&qϭѓIRJ? ? `QERQ@?yEJ9hF4psO^P1I!\M4 ڀsڗ+IgO 7i*Q4j3S <c5W'֔<qFڀ%րJ*?1J{=s*eR9h<G4`1`ъZJ1F(Q(b 4fLKE%b6M)6SK Z((`zQzP0dzѴzQHppҗҀzѸߺ)vZB@@K(4f!8%0OӷJ^fڛPb'h+HH< hOzpP RfȨv)3QN"izTѠLg r)0-1MRz8!SS0jRɎ44 P1@4f N&41I{PI<f#i ~ڢ>j@I#b9 r(*.h 7 3`zvR) 㸧ɐTcހҀ)HZ8)xZ8z2(`zQF(ǽ/'ϥ<Hq֙ (E0PG^~ 9&)p@撐G(IiU N@ZNzvyCց ޴FjO)sKHN20WR.|4ɓ@NF~Ť?z zO*ަSBZ)LњZ8ѓN(4fPsFi`Prh;b2iإ̚3RȀ`♊mQmQmӱI@ 4sG4bQ\RsG44f Lњ\ 0)3Fh KNMi @ғ qG6SғpRzQ@ E;!cJ1M7>߻@ F=qI(t&h )r91 0U6zS%6( zRʘR+`N.jF)Yph(Iә02 7$8ސ *L` )CޝފM㱡~C(OZP41@f›EhfiP'%6Ni“ݣlIH(8q<LS@ub4fQ\&%;)Pm3:ǰLp9z֠c5;03FCR~5[ޏ$8T<3(OT_?.ސ<҄b*0s4ipE'HaѸA>&hQF(8 jESi ]MsӳZ149..i ݤiCL8ly1Gލ)7P!'yy+)Cs@l7_Z`.0{JuHiF7 (ϜM89P{h)QEQE(((LњZ)3Fhh@&i(3Fh њ(Q))qF(9\QNhP2A敩Tq/ujZ6iԇ'4s@9bӨ SM{Qp.yq@)9Ӹw: C%%E@A c5/&"jMzHGLS'T!آZ)(iPFKbi&&w"bDi)tV:YqOr) &n,w)u?h fu/j2&Fh0zSK(=)PzSHg7 SF#|ɣ8} &G%4PъL3E.(ǽ%4(`(1@4ǥ#J7LFiwL*i2޴qqn`9@~PpBXT@LQh8%yc[#fh u7IGG/qgHgS̚9C(2JҌyzSK{S4`ѺbRn4dܚ2hأE;8ъvhȦSq@\x*Ԛfrǻy+@ `RPfG4Q@ G4f9\њL1KG4-.(hJ$qGP(r aL"4fQb qKwhPfъ(()җ4nM1l`ԁ=iCNUC9)S2jHYW;@BF):TX|J4H*%85 9!$sIJZ1F) 4f \Rdњ. jvs@1N(Sn)qKP`Sy?Jh&48 MqIfS1֚INShE7bZ()@ 9(mIAHb8,piҟ6;TX$4yl9ѰzɎ85G3I})=h(4S3 4i(=ޔ.hv5O|SwSE8 iKҙN\h&qK@ G8I㞤 ҔqFُJ7/a@m=Pc-EixO(EU|{P짵.W)GG1ͣҌ/23FGj@M*TBm=f41hfсF/PfZ()9Q@ idP`ъ\2(1F)w M@ 1Iu4444ff\zE!8┚Oz3Iޖ3Fh(sFh~NMZFej Hc=) y}i*,xS1Jz4ڑ1A֠$ɫl5]4&*(>xBE))hQE%f\PJ3F2(84f)ojMƔ6^MmE2EM/4Rl'?(p(2Iǭ81IPdQKQϨ()y`sG4{с~4~4(N)sI? _“&^}(4PF=(hchIj~Ii}(CҞ۲y8oSqzAvPzSeh>(E5 '5)U `\p)HqLBQ\Q@(f Q3Fh.)4QMQM.hi9u94)>iO34`ъ3Fh4Rf^(Ȥ4@@@E: )M(B{Ұ`sM O&LBԔP3KFE%4`b\њLQ\9IbioF\t4ho0Rnԇ/.h@$d<R9.rzwPuj \iSb69N9IE i3Fhq(&hɠbA Z1F()h.((h%m%@=.)`fisIBl(7q/F(Q!3ILPFiњo44`ъ7Q1F(;(<tؤ1drB8ysB01ޝ b:qTQ47Hi=77.6Zn{Ѹ&34fhھ)W֝#EEIhھ.FES8 i=&[ҀSDc7̷Oo94_a/<=)&y9}iprinD aMZ6s֐ ,T 9A L|#BĎiZ榱aޟQzPY4sRzRAIN E>bpCw撊\њJ(b њ(4f\PfPf1@ 1@ 1@ R@ E3bFh4bI@ 1KE&)qEbQEQE)iPi@ɠKQ@PEQEZLK@42{Ȧ56M((i)ENKRPD= u@cfbEOLIhBc .M d0-%P3F(fR4ϵ74Pi{b}㎔N'Xڐ e=4-Bhq.Fud@ H(p>~tgNȢU#8(QEQJN=iBjz\/)P( (zѵ}ivMҀI(@ }Atmi 7M'?Ln$F uYJMI @2)Ѽ׊C,bjg$R3P)\zSKIv ^3E1&iPE6Z)(-bPESѶE;mhSѶG5 \)Ѡ-:84b`7tQ}%&(b((3@ E&i3@\EPNM.M8d)r֓Ҁb(qF))h4bJ\QLK\RfZ &i3@G)WZ64$bp)2i׽7qXu5&NzSMAHdc#S dԓi0: xJZbQz0h()i9-PRF( ѓKQ&isGq@isIK(4f&\z)◥H8-IN.h%.h%4PbPQ@ J(@a$vsچ$&1vFI7ZwGL4y~y~( Mzm=iPTC~.G5%ELbɢ%cӭGzE>y4sOZCE(S&Ǩadi! Qp&yQNyb)&MMIh޵ǽ)`=SNQ:bNO"$PQK1@ E.(%Q1KLњ\QLњ\QLK((4f Q3Fh4f (&hhZ)s@у@`у@4& . .\n)+cZp?/=iE(bPy96fFR$jO"PLQp$^ҙsqK(=ғyqHFh=ӗcCyfi W !cc>F3B# H4͹<SBu-%%:mRQEp&2,4gҒN2zN+ Jm2BI>PTˎE !6KzMր _g(r=(ȣh*= c4)Lh)v8hZ6^)7Z\\1) ֝H+@"I ԛ/FV ޔ ǭdP1qdRn@ zR?2Jy:?4aۍdn3GJ@< pN4兏^)H >ړm8 y@6ҷAFp9&6њ7Sm&MQn7P(♚3@fh>(挊efE;uPq] e4PEQERR@ K1F( 1F(1F(>`dgᶐT5)l)]™@h%4-<QF(fRњ]pM6_—)Rq@BM5ORb2zP9%7m/NsJcocHMI a dʬ7/K3lQP"Q椦!4`R@JJ0hPQEQE.i(=h ( ( Q(ғQE iqɦqMQ'qLBQIȣ4њvh74&hQEڛKE1ԅi"kuԑ(,w$g ( (Q@`ѴPE*1f @ kzQPbO)ym>08@pv^j6rhݩM.wwc7<t&@ȥP'̟Z\ӿ (\B a9Rܣ"jez\<cFRLD{  IE7iNM9I(qM%&(٣4R4nFLQ]fQ3Fh(4qFEQFhњ(4fL@ 3II@ 3IE.h&(.h&(.hfъZL1hh^J|Z(b (;Pip(qp'Ҩ=z nF1M@i1*P9[?bīQBc@CҊTnP(((@Q@ NQ))8#QK3@&Ls@ k q= zҚd}%%)Ԙ)EE2vHF((Qv@ PQO*(P*LJMhGG ;bҗR^()&E)lS蹨>2;h%$P40P684J'9NZAȆXҒqR9ɠƧցI iw,Q`bh(Nv;SXf(O\RњLQ\њLQ\њLQ3FhQ@Q()&(b (.()h4f(LњZ)@)@4s44X I@7"3E~Ef Fi.h&i3@ɣ4њvi)3FhM-742$t"> fƞ) +撘K<~TSǙIu®`ZvfT`d A8)iOƗ4RsIEP@4(٣4(sFi(.(ާN()1 ;S1R/21F)h!1F)hQEf4RњJZ3E4f'ѹ})Jzy~hsK}h<j<j6J<jO0R%74o4'_J@(`J{Hp4[ޔ>(C҅3q (Fi"a.qN<ۜaC4CJ7ɧgb(6(-;u) @<Z)(4fQ@ 3M њ(4f(Q@Q@Q@ E-/)Q@ E.h%4f\њJZ3Fhњ(4f (DSHzPGҊ@~a\I3@ E&is@hQE.i6>!:iJ!ƅ&ON eGК6cCJ"B((P3E%qMP ZgSRQH`RcRo@J2zhzpa@ L9ɦE%-9M4EdӁm4QE%f1i( \撀3@ gJ2}hR`nHjTPgMP]>jvͣzZ q!E (ccI֔Ԝѽq@ h3җ7ҌS1 1I\Qn7PF)3@fh?8f"3@ȥ GEHN*(FeԛPu%fER@ E6Z((\QJ)qF((b\QJ)qF()h-6Z((Q@ INr({M+|џjCI#-%1p~pz":AHdjQRSfC@ E((S@ EPEPEPE.(%M.(4("⛃N0F)sFi1F(&hqF)3FhqF)3FhqF)3I@⒒)M "iZ7v(AI`fޔ vFH zS>Mҗ(>Z2P|Zuߗ҄+J})UT3ސ'x&Iz1@FO(CKe,P3KOzNF֜)ؠ  2h]КNihihJ)hJ)h((4f(((ZJ((Fhf1K3@ 1K3@ 1K3@ 1KL1FhZO%PQNE;b()6h`s)@")ך ) էJi[!h*%PQKI@Q@ E%RQ@^қIQ@ IҊC@ z$0i㸧Sw!⁋! ր`RP`zQR@ SJ( PR4NN4 QR`Q@ڗ#۞=(:)̼IP!7i.h)H9Q@`҅#Ґ@ KOUSs@';f:sMvxzR4A4Crh^M!@I.i)(J)Q@ ERQ@(PIE-PQEQ1@%.(I@ E;b\QJ)qF((.(Q@Z((44bQ3@(4bQ3@(.)(>J()\w440  *.nɸ_)\wdMJC:ԵJFiZ1Jh#RQER@ E-R@ KEfQ@ Q@%-=ÁKE# ҒJ`63Fi(4f (-4Qڌњi9zSJRoa})1@o4p*;PrpӷZ7Z`3-IRdz61ڐ '5,{vё@sFKRo(\Roo=h W攚[fU99Z" 1J)hhhZ((6v(6SF(SE7SN(Sn(;((њn)qK3@ 44PbR@(P1Eb~cO0}GJpRrJ\ьъ3I`-4JZfM4Q@LneH>d9HdtLAE QԈxH84$ojY9ȦȨ#SR@ E-R@ K4)N3%.8(!fb zӃ(GNԡxpPo`PwRn4(ѓK q2@֔II@h$S(nȦR16]ԛ4m47RhI8ɠJP`H@I<P8muPhaidTX0z擊^}Qځ֝ @ 8J`@ %PQKE%PQKE&(-PbQ@Q@Q@Q@Q@Q@Q@Q@-%RQ@ Fi( њ((xT-JBc4(F)dPqF)٤&)qFhn.(3@(4Aj Qӓ@ N'&4:MMjhPtE# )GzZb4fb4fb4fQ@ E%RQZ)3Fhh4PERc'KJ) ZJZ& fN<J`6v(1 N(((S:m4(44 L1KMn&J>I@}(im/!;bQRE.KQP2_%6y~ SRaX%GzӶz6Jn&?``DtT&@QJ@4Tnivg8O R惃G#WiR dd((Z)(-4f\њJ(3E%RRKIE.(%RQ@ E%@fh?)f3Lf((7N((N@ShqG(Q6Z(<'=cI M!ZZ):RcJ)TC &OH}$JEPEPEPFx(ii=n:SM.zi $650rifo1i}pqC4;ChaE-%%(hq/(dbE(84eb1Q@:據}!E* {(:4 @<EoJv7t@ sϵ uZ`u}z֐8R1fE-4Rf\њm-.h%4P4Q@hb њ1F(J(4RfI3@ E&h-њu@E:4PQ@ 3I\P3F(&h.(&h)hE&(-)+ӉSiHbLbNO-M'<J9 %1%R9$G)+i L(ɠ @\qMQ74fFi4RfъLњ\QL@ KMfQUJŠZ)M)J)(PbQ@ E-Q@Q@!Pg9M}jR)0=) fFzQ7y4S g4a'(C@FN(j5XւO91OOgpP2ޗOmoZ6ZvG3aM.F( OZR)N<S iJ( (ADɢ& ' {RRL(LQ)h1F)hLQZ3@ \Q3@(4bQ3@(&h.(&i3@3@"E;4fK@ IEQEQEQEQF(Q1@\Rt4QK\Ph;b1NSKmQ@җuI'8IӊJCi-fcQGS@G֣)"4BbQEQEQERQ@Q@( BLf&; CE=c"LAE5& M Oz\PM'ր$GAi0}iqI1L"84ZCrhzӎ1@ Ku4`*Ҟ](4:Cu@J֘jU9C@ f0iOɠ.z i"18<Z)6 4tg{HؠfmƀRSQEQEQERQ@ E%QEQE4Q@hb RPQK1@ E.(%RPQK1@ E-bE&(--&h-QMQ@ 3IE.h%4Pv4f\њJ(sRV2GZWԆ6=crZJ RI9w櫞M! Zb ( ( ( ( ( ZJ(iib ?QG):`:PiQE1Q@RfLњ(4PIE-%P;R@ z0޴dszP1qh)77ҁ =)iz>z}1F@֙ѷހǜS’8 F~!-MH`#0EPhxq7vA&F))I (+2M&fN9 f*Uzn8h'4 њ(4((( ( ( JZ(((1F( Q(fRfњuњuњuњuњ}!SsE/QN2~h2~h2\(RQE9[ ҙFq@ Ai*C.{t:QHbҙJiT(@ 0bSWaES()i((((pB}*h ۥR'ڒAb QE-&hҖ4 nO'4n 7" Ѻ`mɤ,hvсH2hQIGhȤ'"ÓK ^hR`Ґ)qF.(=8`{J(JPlh=)(M%Pކ֐?革`HK1LBQK(( Q(b(()i(ZJJ(h( (-44f\IE-%PEPEPEPEPEPEPEPIKE%(Hj*0i iӱHzS#l=V|HdP\SSъufb44RfZ)(\QI:@^Tpnho6Sh!ShRQEQEQEQIE-Z) c&@&h&hPQK8@@SJ28j>E!sN1M+ӟZaQR<`E8!@ R&գjPCbRzQLAzѺ('SKLF(4fњ(((4f ZLњZ)3Fhi(LњZ)3FhhZ)(Z)()s@ )h(PQKE%RQIE--64SsFhQEQE($S2)$}2:J^$ 4LH)GZNR 3dQPNhRR@%-%RQEQEz( dQ#ѸPX9! &z i<b .=M.y1:ъ(&(-fNM-%(8ϭ% JZ(F1@8PhKs@ ;[җ2iyȤ HN>ƛ˸ c Phh${S@F Y7 i1J܀i3@8Q֓4s@NZ}iG%fRfE4QFhQ3@J(h ( ( ( ( ( ( ( ( ( )qF((S1FhQJZ)((ZJ)h(E%(@,I,y*:C C-*. -*K4AQ@NQFi3@ E&hf4f4f1Fi3@ ZnhRfZJ)(sNA( 12S*b6@-SRQ@ E%-PEQEQER-(ZMu()PvTӨGњGуRf4fNXَ=T4]0=i a)؃4XfI@ E%RQ@ E%QEQEQEQEQEQEQEQEQEQEQEQKE%PQKEQEQEQEQK@ E-R@ E-%PEPEPEPEPEPE-QEQEpԀf}hRf族Z@-8-3"ޔ rdu#abE)$EXdRRLZJ( ( (<RrNdTí~'қA9Fiy(8HG=h+i 9JQ :SXzPtS砦%z 1@ INhZ]]mJ]h3IOhIE<IPi{RQw7zYs`t(QEn S`9iۛ4( IErzR8=8@_Cс֙EQEQEQEQEQEQEQEQEQEQEQK@ E-PbR@ Z(()h((J)i((fLњZ)3FhhRRfZ)(fsҜ0y4{OJCXBcM?'@Ѧ3SHњ)(QI@ IKE%Q@ IKI@ E%QEQEPQKER$(Q)ÚCQHҌSm1 1K3Lb4fLfE74f3M%?4fE;4fE;44f\QEQKJU`  ;S*Uu<H2(1XcސȨ_ bKIh=(h&jMQnF@n>n>RɥњuQ@ 1KE&(;PqF)ih(-PbR@ 1KE&(((()( ( ( ( ( ( (QEQEQEQERQ@ E%RQ@((“ޘiA" 1p3ړ4)Ei@cqM y**txTHRQEQEQEQEQEu8RH$E*e`N!)JLsNV(R92s e9:40(`⒘((((3I@ E g4):nM:n 4 j.QM(Q@iiw`p1HyE➧=F:S;R:lRuMLBQEQEQEQEQEQEQEQEQEQEQEQEQE1F(b(\њJ(4RQ@ E%RQ@Q@QEQKE%-Q@ J )(H9cMcԃQRb !@"M' 0}iBTeNE0Յ;`% C*p9UuQL)((((i(()i(UJ1;%I 8F[4b ( ( ( 1KE&(.h&(;4fmisFh6ъ\f Q3FhpPwJM杁F3qɩ0=(2:Q6Q9f 677Mƀ#}(i47ag:MmPRQ@ Fi(@J1@ E&)q@b(((((((( ( )h(Z()h((((((((QEQIE-Pʚe=՚(lSh!I&N qInHۛڛLAEPEPEPEPGZ(U\&y!404s!QE%-PIE-2LQEQEQEQ(+M(iYٻgPm)hPEPEPEPE: r=cQE1Q@Q@Q@Q@Q@Q@Q@Q@Q@Q@-%RQ@ E%fK@hRQ@Q@Q@Q@RE%RQ@ E%-QE%PREPQKE%).9^r) ojS44Kcb"!ejC+6N7E&iiQERP()i((((H: GzV5#8 GSQ'-%SQEQEQEQEQEQEQEQKE%PEPQKE%-PRGCQSʠ`$QMnO1QS(\PQK1@ F)hRP1KI1E4QI3@ E&h-(ԙ4f\њJ(sIEQEQERQ@ E%RQ@ E%RQ@ E%RQ@ Fi(PEQEQE%-PIE-Q@Q@Q@ EPEPRR*]ǭLw5)994QE$]*G8CQER(jFW*QEQEQEQEQEc4u84v5Hw # IKIL(((vM4h((3EPEPEPJ((()h(ZJUϵ!P}i Lӳ=)4(qRS ii(((((((((((((((((((((((()qIE)4((JZJ( E6򷱦*y:sڐ ;I4` zTR_84M&LAEQ@Q@Q@RQK1@ E.(%6v(Niv=) I;qf(=Jb1N1N&hfJ(hJ(i( ( (%ހpq(qGNߘRhǛa>'PTt4HJͦ'?4R-1f4P)((Z((RPQK((Z((Z3@(4b3I@Q@Q@Q@RQEQEQEQEQEQEQ@ E%QEQERRRQ@ E%))hZ5RpRc")J)qEM?4n)2ej)i*QEQEQEQڊ(( T'5'M(ɦ31 %SRQ@ IEQE>M%QEQEQEQEPKIE-PIEQEQEQEQF(S)SK€3(ʛJNi)(((((((J(h(((((((((((((J(hRJZ(){QE)h(W:6bޜkM  :SMI?1R ө>b\bR1Fhf\QFi(i)i(qFEQLsFM:2hzө><IқE.h%&(ZJ((RR❊(- BtS doi.Hdy4T.E#SoZ\њ`&҈}h'}i|ӓAsM3F!(((ZZJ(h 3E%.h%RQ@ IEQEQEQEQEQEQEQEQEQEQEQEQEQEQEQEQEQEQEQEQEQEQE( (֤q GJES0hҶ3@ )⫯&^&2)W 2 )(hJ(hJU8<{ԑ 9֐в}eJF娨B JZ)R@ E-R@?֙O_E2 ( ( ( ( ( ( ( ( ( ( (fRcsHb'Z@q@M1Q@Q@RPEQERR@%-QEQEQIE-QE%-PEPEPEPE-%R@ E-QEQEQEQEQE-PIE-PE-%QEQE-:?L!R9˚Ná!*SȀEEJ ^EF (5`,e*H:RQIEPKIE-J)h()3@ E%(E.iPE%%;q@ - Q(8\њLQ@ E&h-:ni(٤%QEQE"[GьvRQLv)=(((((((((((((((((((((((Z)((((((()h(Z(Z))hT# 6rǟJh'=hWqrsSJE\ W:RmM6)(( ( ( (7@E^໎ODxә eQEQERQ@RP\ziO& ( ( ( ( ( ( ( ( ( ( ( U<QؠҰ2:Ai (n|4QI@PEPEPEPEPEPEPEPEPEPEPEPEPEPEPEPEPEPERQ@ IEQEQEQEQE-%Z((Z(4xHbxiϽ(By< JPQWɤ$S@y>]ңNZ)(((((((Z~AM-.I@84--\ҙN744-:)P)3IE.h%((((((*6DH͹H)&h ( ( ( ( ( ( ( ( ( ( ( ( ( ( ( ( ( ( ( ( ( ( ( ( ( ( ( ( ( ( ( ZJ(h(*Hr GNPx4ƛ@M1PlLhsVAȨ\(@ih((((֒ZCMǥ q֞T0CE)8 (h(! m0i)M2 ( ( ( ( ( ( ( ( ( ( ( (N E(i(((((((J(h(((J(i(J(i( Z((((J(J( ( ( ( \EPQKED8jaHbOZCKHh(Pj&rJv1#ȠbQMnQIE1Q@Q@Q@RQEQEQERQ@ Fi(-E&OPREQERQ@Q@Q@Q@Q@Q@Q@Q@Q@Q@(KIEQEQEQEQEQEQEQEQEQEQEQEQEQEQEQEQEQEQEQEQEQEQEQEQEQEQEQEQEQEQEQEQEQEQEQEQE8jm Sj^}21H`I5/QUҐȜa,T(`QEQE%PQJzSS@)9M/qM!99b ( ( ( P2i)z% ri( ( ( ( JZ(()h ( ( ( )(Z\P:=M!HNiG4bJ(hJ(h((((())h(((((((((J(hJ(h((hJ((0i!q3S(r:RQL ruo /#M#ҐyfP0EBh斚LAERQ@Q@PQE-%PEPEPEPEPEQ@Q@Q@Q@Q@Q@Q@Q@Q@Q@Q@Q@Q@Q@Q@Q@Q@Q@Q@Q@Q@Q@Q@Q@Q@Q@Q@Q@Q@Q@Q@Q@Q@Q@Q@Q@Q@Q@Q@Q@Q@Q@Q@Q@Q@Q@Q@Q@ EPEPEP)۳֛E;4JPPaI֤CHhAX" gҀ!)((ր7JU84<1֙y!4BQ)(((()eHӓPh(()(h(PEPEPEPIKE%PG֕}}) SɢJ)h))h(((((((((()(hJ(i( ( ( ( ( ( ( ( ( ( ( )i((Zr68=(tiG8/zC#E1jXs K s8v'^@-QE1Q@Q@RIEQEREQEQEQEQEQ@-%RRIKI@Q@Q@Q@Q@PQKE%-PEPEPIKE%Q@Q@Q@Q@Q@Q@Q@Q@Q@Q@Q@Q@Q@Q@Q@Q@Q@Q@Q@Q@Q@Q@Q@Q@Q@Q@Q@Q@Q@Q@Q@Q@Q@Q@Q@ EPEPQKEPIKJa@ӣjӣHzS_sj7o#EUJZ((4QEQ@n|ڒR3JQn JOCESQEQEScM)mQ@Q@Q@Q@Q@Q@Q@Q@Q@Q@))MiJUNƚ:R) J(((((((((J(hJ(((((((((JZJ((((((((i( ( ^ ɥ' ^I&Nï(˟œ؉ڨX0 hQKE1 E-RQEPIE-PIE-PIEQEQEQK@ E-QE%-%-%RPEPKEQE%-Q@Q@Q@RPRPEPEPEPEQK@ EPEPQEQEQEQEQEQEQEQEQEQEQEQEQEQEQEQEQEQEQEQEQEQEQEQEQEQEQEQEQE-Q@Q@Q@f|zi2gc42^5/ZnԁQKEPJ)i(((h=)BAoN! i '%QEQEQEQE4J( (J(hJ(h((()i(J@/USzp84 J("Z)(Z)(((((((((((((((((((((((((tr8C/qHbRP"gQy:PJ*縠dtRg(`QEQEQE4Q@Q@Q@Q@Q@Q@Q@Q@ EPIE-QERQ@ IEQEQEQEQEQEQEQEQEQEQEQEQEQEQEQEQEQEQEQEQEQEQEQEQEQEQEQEQEQEQEQEQEQEQEQEQEQEQEQEQEQEQEQEQEQEQEQE&w'G@8xÏza8NNG7AO' QREEU(((րNM4Ґsm:OM (JZ)()i(zH=OJ ɠZJZ)( (((JZ(((^Hi p4`VmS(((()h()h(ZJ((((((((()h((((Z((((IHi {Oo2 ?;"}Z8BQKE0Z((Z(( JZ(((((((()h((((((((((((((((((((()(h(((((((((((((((((((JZ(((((((WK!9i16v={TN*@QEP(((@z UwsaҐƱsIEQEQEPQERi)z Q@%-QEQEQEQEQEQEQEQE-(hQHNM/Ai Z)(!h(((((((((((((((((J)hJ)h((((((0(:6'Q1 ,Rw()hQL(((((((J(h((((((((((((((((((Z)(ZJ((((Z)((Z)(Z)(ZJ(Z((((((Z))h)h ( ( ( ( ( ( ( (((((Q֟'j(2:|tQCJ~OJ( m%SQE0(QE!OL=h (((((=PEPEPQEQEQERE%-PEPEPEPIEQH^Q@ iQL((()h J(((((()h(((((((((((((((((()h'z(})Q@}C4Q@QLBREQEQH)QEQERQEQEQEQEQEQEQEQEQEQEQEQEQEQEQEQEQ@ EPEPEPEPQEQEQEQEQEQEQEQEQEREQE%Q@-PIEQERE%Q@Q@ IEQ@Q@Q@-PQEQEQEQEQEQEQEQEQEQE%Q@
-1
TheAlgorithms/Python
7,417
Remove references to depreciated QasmSimulator
### Describe your change: Replaced instances of `qiskit.Aer.get_backend("qasm_simulator")` in `quantum/` with `q.Aer.get_backend("aer_simulator")`, as the former is depreciated and raises warnings (Qiskit's [documentation](https://qiskit.org/documentation/apidoc/aer_provider.html) says that `QasmSimulator` is legacy). This PR edits multiple code files because they all raise the same warning and are mentioned in the same GitHub issue. Fixes #7308 * [ ] Add an algorithm? * [x] Fix a bug or typo in an existing algorithm? * [ ] Documentation change? ### Checklist: * [x] I have read [CONTRIBUTING.md](https://github.com/TheAlgorithms/Python/blob/master/CONTRIBUTING.md). * [x] This pull request is all my own work -- I have not plagiarized. * [x] I know that pull requests will not be merged if they fail the automated tests. * [ ] This PR only changes one algorithm file. To ease review, please open separate PRs for separate algorithms. * [x] All new Python files are placed inside an existing directory. * [x] All filenames are in all lowercase characters with no spaces or dashes. * [x] All functions and variable names follow Python naming conventions. * [x] All function parameters and return values are annotated with Python [type hints](https://docs.python.org/3/library/typing.html). * [x] All functions have [doctests](https://docs.python.org/3/library/doctest.html) that pass the automated testing. * [x] All new algorithms have a URL in its comments that points to Wikipedia or other similar explanation. * [x] If this pull request resolves one or more open issues then the commit message contains `Fixes: #{$ISSUE_NO}`.
tianyizheng02
2022-10-19T03:31:43Z
2022-10-19T20:12:44Z
50da472ddcdc2d79d1ad325ec05cda3558802fda
2859d4bf3aa96737a4715c65d4a9051d9c62d24d
Remove references to depreciated QasmSimulator. ### Describe your change: Replaced instances of `qiskit.Aer.get_backend("qasm_simulator")` in `quantum/` with `q.Aer.get_backend("aer_simulator")`, as the former is depreciated and raises warnings (Qiskit's [documentation](https://qiskit.org/documentation/apidoc/aer_provider.html) says that `QasmSimulator` is legacy). This PR edits multiple code files because they all raise the same warning and are mentioned in the same GitHub issue. Fixes #7308 * [ ] Add an algorithm? * [x] Fix a bug or typo in an existing algorithm? * [ ] Documentation change? ### Checklist: * [x] I have read [CONTRIBUTING.md](https://github.com/TheAlgorithms/Python/blob/master/CONTRIBUTING.md). * [x] This pull request is all my own work -- I have not plagiarized. * [x] I know that pull requests will not be merged if they fail the automated tests. * [ ] This PR only changes one algorithm file. To ease review, please open separate PRs for separate algorithms. * [x] All new Python files are placed inside an existing directory. * [x] All filenames are in all lowercase characters with no spaces or dashes. * [x] All functions and variable names follow Python naming conventions. * [x] All function parameters and return values are annotated with Python [type hints](https://docs.python.org/3/library/typing.html). * [x] All functions have [doctests](https://docs.python.org/3/library/doctest.html) that pass the automated testing. * [x] All new algorithms have a URL in its comments that points to Wikipedia or other similar explanation. * [x] If this pull request resolves one or more open issues then the commit message contains `Fixes: #{$ISSUE_NO}`.
-1
TheAlgorithms/Python
7,417
Remove references to depreciated QasmSimulator
### Describe your change: Replaced instances of `qiskit.Aer.get_backend("qasm_simulator")` in `quantum/` with `q.Aer.get_backend("aer_simulator")`, as the former is depreciated and raises warnings (Qiskit's [documentation](https://qiskit.org/documentation/apidoc/aer_provider.html) says that `QasmSimulator` is legacy). This PR edits multiple code files because they all raise the same warning and are mentioned in the same GitHub issue. Fixes #7308 * [ ] Add an algorithm? * [x] Fix a bug or typo in an existing algorithm? * [ ] Documentation change? ### Checklist: * [x] I have read [CONTRIBUTING.md](https://github.com/TheAlgorithms/Python/blob/master/CONTRIBUTING.md). * [x] This pull request is all my own work -- I have not plagiarized. * [x] I know that pull requests will not be merged if they fail the automated tests. * [ ] This PR only changes one algorithm file. To ease review, please open separate PRs for separate algorithms. * [x] All new Python files are placed inside an existing directory. * [x] All filenames are in all lowercase characters with no spaces or dashes. * [x] All functions and variable names follow Python naming conventions. * [x] All function parameters and return values are annotated with Python [type hints](https://docs.python.org/3/library/typing.html). * [x] All functions have [doctests](https://docs.python.org/3/library/doctest.html) that pass the automated testing. * [x] All new algorithms have a URL in its comments that points to Wikipedia or other similar explanation. * [x] If this pull request resolves one or more open issues then the commit message contains `Fixes: #{$ISSUE_NO}`.
tianyizheng02
2022-10-19T03:31:43Z
2022-10-19T20:12:44Z
50da472ddcdc2d79d1ad325ec05cda3558802fda
2859d4bf3aa96737a4715c65d4a9051d9c62d24d
Remove references to depreciated QasmSimulator. ### Describe your change: Replaced instances of `qiskit.Aer.get_backend("qasm_simulator")` in `quantum/` with `q.Aer.get_backend("aer_simulator")`, as the former is depreciated and raises warnings (Qiskit's [documentation](https://qiskit.org/documentation/apidoc/aer_provider.html) says that `QasmSimulator` is legacy). This PR edits multiple code files because they all raise the same warning and are mentioned in the same GitHub issue. Fixes #7308 * [ ] Add an algorithm? * [x] Fix a bug or typo in an existing algorithm? * [ ] Documentation change? ### Checklist: * [x] I have read [CONTRIBUTING.md](https://github.com/TheAlgorithms/Python/blob/master/CONTRIBUTING.md). * [x] This pull request is all my own work -- I have not plagiarized. * [x] I know that pull requests will not be merged if they fail the automated tests. * [ ] This PR only changes one algorithm file. To ease review, please open separate PRs for separate algorithms. * [x] All new Python files are placed inside an existing directory. * [x] All filenames are in all lowercase characters with no spaces or dashes. * [x] All functions and variable names follow Python naming conventions. * [x] All function parameters and return values are annotated with Python [type hints](https://docs.python.org/3/library/typing.html). * [x] All functions have [doctests](https://docs.python.org/3/library/doctest.html) that pass the automated testing. * [x] All new algorithms have a URL in its comments that points to Wikipedia or other similar explanation. * [x] If this pull request resolves one or more open issues then the commit message contains `Fixes: #{$ISSUE_NO}`.
-1
TheAlgorithms/Python
7,417
Remove references to depreciated QasmSimulator
### Describe your change: Replaced instances of `qiskit.Aer.get_backend("qasm_simulator")` in `quantum/` with `q.Aer.get_backend("aer_simulator")`, as the former is depreciated and raises warnings (Qiskit's [documentation](https://qiskit.org/documentation/apidoc/aer_provider.html) says that `QasmSimulator` is legacy). This PR edits multiple code files because they all raise the same warning and are mentioned in the same GitHub issue. Fixes #7308 * [ ] Add an algorithm? * [x] Fix a bug or typo in an existing algorithm? * [ ] Documentation change? ### Checklist: * [x] I have read [CONTRIBUTING.md](https://github.com/TheAlgorithms/Python/blob/master/CONTRIBUTING.md). * [x] This pull request is all my own work -- I have not plagiarized. * [x] I know that pull requests will not be merged if they fail the automated tests. * [ ] This PR only changes one algorithm file. To ease review, please open separate PRs for separate algorithms. * [x] All new Python files are placed inside an existing directory. * [x] All filenames are in all lowercase characters with no spaces or dashes. * [x] All functions and variable names follow Python naming conventions. * [x] All function parameters and return values are annotated with Python [type hints](https://docs.python.org/3/library/typing.html). * [x] All functions have [doctests](https://docs.python.org/3/library/doctest.html) that pass the automated testing. * [x] All new algorithms have a URL in its comments that points to Wikipedia or other similar explanation. * [x] If this pull request resolves one or more open issues then the commit message contains `Fixes: #{$ISSUE_NO}`.
tianyizheng02
2022-10-19T03:31:43Z
2022-10-19T20:12:44Z
50da472ddcdc2d79d1ad325ec05cda3558802fda
2859d4bf3aa96737a4715c65d4a9051d9c62d24d
Remove references to depreciated QasmSimulator. ### Describe your change: Replaced instances of `qiskit.Aer.get_backend("qasm_simulator")` in `quantum/` with `q.Aer.get_backend("aer_simulator")`, as the former is depreciated and raises warnings (Qiskit's [documentation](https://qiskit.org/documentation/apidoc/aer_provider.html) says that `QasmSimulator` is legacy). This PR edits multiple code files because they all raise the same warning and are mentioned in the same GitHub issue. Fixes #7308 * [ ] Add an algorithm? * [x] Fix a bug or typo in an existing algorithm? * [ ] Documentation change? ### Checklist: * [x] I have read [CONTRIBUTING.md](https://github.com/TheAlgorithms/Python/blob/master/CONTRIBUTING.md). * [x] This pull request is all my own work -- I have not plagiarized. * [x] I know that pull requests will not be merged if they fail the automated tests. * [ ] This PR only changes one algorithm file. To ease review, please open separate PRs for separate algorithms. * [x] All new Python files are placed inside an existing directory. * [x] All filenames are in all lowercase characters with no spaces or dashes. * [x] All functions and variable names follow Python naming conventions. * [x] All function parameters and return values are annotated with Python [type hints](https://docs.python.org/3/library/typing.html). * [x] All functions have [doctests](https://docs.python.org/3/library/doctest.html) that pass the automated testing. * [x] All new algorithms have a URL in its comments that points to Wikipedia or other similar explanation. * [x] If this pull request resolves one or more open issues then the commit message contains `Fixes: #{$ISSUE_NO}`.
# Compression Data compression is everywhere, you need it to store data without taking too much space. Either the compression lose some data (then we talk about lossy compression, such as .jpg) or it does not (and then it is lossless compression, such as .png) Lossless compression is mainly used for archive purpose as it allow storing data without losing information about the file archived. On the other hand, lossy compression is used for transfer of file where quality isn't necessarily what is required (i.e: images on Twitter). * <https://www.sciencedirect.com/topics/computer-science/compression-algorithm> * <https://en.wikipedia.org/wiki/Data_compression> * <https://en.wikipedia.org/wiki/Pigeonhole_principle>
# Compression Data compression is everywhere, you need it to store data without taking too much space. Either the compression lose some data (then we talk about lossy compression, such as .jpg) or it does not (and then it is lossless compression, such as .png) Lossless compression is mainly used for archive purpose as it allow storing data without losing information about the file archived. On the other hand, lossy compression is used for transfer of file where quality isn't necessarily what is required (i.e: images on Twitter). * <https://www.sciencedirect.com/topics/computer-science/compression-algorithm> * <https://en.wikipedia.org/wiki/Data_compression> * <https://en.wikipedia.org/wiki/Pigeonhole_principle>
-1
TheAlgorithms/Python
7,417
Remove references to depreciated QasmSimulator
### Describe your change: Replaced instances of `qiskit.Aer.get_backend("qasm_simulator")` in `quantum/` with `q.Aer.get_backend("aer_simulator")`, as the former is depreciated and raises warnings (Qiskit's [documentation](https://qiskit.org/documentation/apidoc/aer_provider.html) says that `QasmSimulator` is legacy). This PR edits multiple code files because they all raise the same warning and are mentioned in the same GitHub issue. Fixes #7308 * [ ] Add an algorithm? * [x] Fix a bug or typo in an existing algorithm? * [ ] Documentation change? ### Checklist: * [x] I have read [CONTRIBUTING.md](https://github.com/TheAlgorithms/Python/blob/master/CONTRIBUTING.md). * [x] This pull request is all my own work -- I have not plagiarized. * [x] I know that pull requests will not be merged if they fail the automated tests. * [ ] This PR only changes one algorithm file. To ease review, please open separate PRs for separate algorithms. * [x] All new Python files are placed inside an existing directory. * [x] All filenames are in all lowercase characters with no spaces or dashes. * [x] All functions and variable names follow Python naming conventions. * [x] All function parameters and return values are annotated with Python [type hints](https://docs.python.org/3/library/typing.html). * [x] All functions have [doctests](https://docs.python.org/3/library/doctest.html) that pass the automated testing. * [x] All new algorithms have a URL in its comments that points to Wikipedia or other similar explanation. * [x] If this pull request resolves one or more open issues then the commit message contains `Fixes: #{$ISSUE_NO}`.
tianyizheng02
2022-10-19T03:31:43Z
2022-10-19T20:12:44Z
50da472ddcdc2d79d1ad325ec05cda3558802fda
2859d4bf3aa96737a4715c65d4a9051d9c62d24d
Remove references to depreciated QasmSimulator. ### Describe your change: Replaced instances of `qiskit.Aer.get_backend("qasm_simulator")` in `quantum/` with `q.Aer.get_backend("aer_simulator")`, as the former is depreciated and raises warnings (Qiskit's [documentation](https://qiskit.org/documentation/apidoc/aer_provider.html) says that `QasmSimulator` is legacy). This PR edits multiple code files because they all raise the same warning and are mentioned in the same GitHub issue. Fixes #7308 * [ ] Add an algorithm? * [x] Fix a bug or typo in an existing algorithm? * [ ] Documentation change? ### Checklist: * [x] I have read [CONTRIBUTING.md](https://github.com/TheAlgorithms/Python/blob/master/CONTRIBUTING.md). * [x] This pull request is all my own work -- I have not plagiarized. * [x] I know that pull requests will not be merged if they fail the automated tests. * [ ] This PR only changes one algorithm file. To ease review, please open separate PRs for separate algorithms. * [x] All new Python files are placed inside an existing directory. * [x] All filenames are in all lowercase characters with no spaces or dashes. * [x] All functions and variable names follow Python naming conventions. * [x] All function parameters and return values are annotated with Python [type hints](https://docs.python.org/3/library/typing.html). * [x] All functions have [doctests](https://docs.python.org/3/library/doctest.html) that pass the automated testing. * [x] All new algorithms have a URL in its comments that points to Wikipedia or other similar explanation. * [x] If this pull request resolves one or more open issues then the commit message contains `Fixes: #{$ISSUE_NO}`.
""" Greatest Common Divisor. Wikipedia reference: https://en.wikipedia.org/wiki/Greatest_common_divisor gcd(a, b) = gcd(a, -b) = gcd(-a, b) = gcd(-a, -b) by definition of divisibility """ def greatest_common_divisor(a: int, b: int) -> int: """ Calculate Greatest Common Divisor (GCD). >>> greatest_common_divisor(24, 40) 8 >>> greatest_common_divisor(1, 1) 1 >>> greatest_common_divisor(1, 800) 1 >>> greatest_common_divisor(11, 37) 1 >>> greatest_common_divisor(3, 5) 1 >>> greatest_common_divisor(16, 4) 4 >>> greatest_common_divisor(-3, 9) 3 >>> greatest_common_divisor(9, -3) 3 >>> greatest_common_divisor(3, -9) 3 >>> greatest_common_divisor(-3, -9) 3 """ return abs(b) if a == 0 else greatest_common_divisor(b % a, a) def gcd_by_iterative(x: int, y: int) -> int: """ Below method is more memory efficient because it does not create additional stack frames for recursive functions calls (as done in the above method). >>> gcd_by_iterative(24, 40) 8 >>> greatest_common_divisor(24, 40) == gcd_by_iterative(24, 40) True >>> gcd_by_iterative(-3, -9) 3 >>> gcd_by_iterative(3, -9) 3 >>> gcd_by_iterative(1, -800) 1 >>> gcd_by_iterative(11, 37) 1 """ while y: # --> when y=0 then loop will terminate and return x as final GCD. x, y = y, x % y return abs(x) def main(): """ Call Greatest Common Divisor function. """ try: nums = input("Enter two integers separated by comma (,): ").split(",") num_1 = int(nums[0]) num_2 = int(nums[1]) print( f"greatest_common_divisor({num_1}, {num_2}) = " f"{greatest_common_divisor(num_1, num_2)}" ) print(f"By iterative gcd({num_1}, {num_2}) = {gcd_by_iterative(num_1, num_2)}") except (IndexError, UnboundLocalError, ValueError): print("Wrong input") if __name__ == "__main__": main()
""" Greatest Common Divisor. Wikipedia reference: https://en.wikipedia.org/wiki/Greatest_common_divisor gcd(a, b) = gcd(a, -b) = gcd(-a, b) = gcd(-a, -b) by definition of divisibility """ def greatest_common_divisor(a: int, b: int) -> int: """ Calculate Greatest Common Divisor (GCD). >>> greatest_common_divisor(24, 40) 8 >>> greatest_common_divisor(1, 1) 1 >>> greatest_common_divisor(1, 800) 1 >>> greatest_common_divisor(11, 37) 1 >>> greatest_common_divisor(3, 5) 1 >>> greatest_common_divisor(16, 4) 4 >>> greatest_common_divisor(-3, 9) 3 >>> greatest_common_divisor(9, -3) 3 >>> greatest_common_divisor(3, -9) 3 >>> greatest_common_divisor(-3, -9) 3 """ return abs(b) if a == 0 else greatest_common_divisor(b % a, a) def gcd_by_iterative(x: int, y: int) -> int: """ Below method is more memory efficient because it does not create additional stack frames for recursive functions calls (as done in the above method). >>> gcd_by_iterative(24, 40) 8 >>> greatest_common_divisor(24, 40) == gcd_by_iterative(24, 40) True >>> gcd_by_iterative(-3, -9) 3 >>> gcd_by_iterative(3, -9) 3 >>> gcd_by_iterative(1, -800) 1 >>> gcd_by_iterative(11, 37) 1 """ while y: # --> when y=0 then loop will terminate and return x as final GCD. x, y = y, x % y return abs(x) def main(): """ Call Greatest Common Divisor function. """ try: nums = input("Enter two integers separated by comma (,): ").split(",") num_1 = int(nums[0]) num_2 = int(nums[1]) print( f"greatest_common_divisor({num_1}, {num_2}) = " f"{greatest_common_divisor(num_1, num_2)}" ) print(f"By iterative gcd({num_1}, {num_2}) = {gcd_by_iterative(num_1, num_2)}") except (IndexError, UnboundLocalError, ValueError): print("Wrong input") if __name__ == "__main__": main()
-1
TheAlgorithms/Python
7,417
Remove references to depreciated QasmSimulator
### Describe your change: Replaced instances of `qiskit.Aer.get_backend("qasm_simulator")` in `quantum/` with `q.Aer.get_backend("aer_simulator")`, as the former is depreciated and raises warnings (Qiskit's [documentation](https://qiskit.org/documentation/apidoc/aer_provider.html) says that `QasmSimulator` is legacy). This PR edits multiple code files because they all raise the same warning and are mentioned in the same GitHub issue. Fixes #7308 * [ ] Add an algorithm? * [x] Fix a bug or typo in an existing algorithm? * [ ] Documentation change? ### Checklist: * [x] I have read [CONTRIBUTING.md](https://github.com/TheAlgorithms/Python/blob/master/CONTRIBUTING.md). * [x] This pull request is all my own work -- I have not plagiarized. * [x] I know that pull requests will not be merged if they fail the automated tests. * [ ] This PR only changes one algorithm file. To ease review, please open separate PRs for separate algorithms. * [x] All new Python files are placed inside an existing directory. * [x] All filenames are in all lowercase characters with no spaces or dashes. * [x] All functions and variable names follow Python naming conventions. * [x] All function parameters and return values are annotated with Python [type hints](https://docs.python.org/3/library/typing.html). * [x] All functions have [doctests](https://docs.python.org/3/library/doctest.html) that pass the automated testing. * [x] All new algorithms have a URL in its comments that points to Wikipedia or other similar explanation. * [x] If this pull request resolves one or more open issues then the commit message contains `Fixes: #{$ISSUE_NO}`.
tianyizheng02
2022-10-19T03:31:43Z
2022-10-19T20:12:44Z
50da472ddcdc2d79d1ad325ec05cda3558802fda
2859d4bf3aa96737a4715c65d4a9051d9c62d24d
Remove references to depreciated QasmSimulator. ### Describe your change: Replaced instances of `qiskit.Aer.get_backend("qasm_simulator")` in `quantum/` with `q.Aer.get_backend("aer_simulator")`, as the former is depreciated and raises warnings (Qiskit's [documentation](https://qiskit.org/documentation/apidoc/aer_provider.html) says that `QasmSimulator` is legacy). This PR edits multiple code files because they all raise the same warning and are mentioned in the same GitHub issue. Fixes #7308 * [ ] Add an algorithm? * [x] Fix a bug or typo in an existing algorithm? * [ ] Documentation change? ### Checklist: * [x] I have read [CONTRIBUTING.md](https://github.com/TheAlgorithms/Python/blob/master/CONTRIBUTING.md). * [x] This pull request is all my own work -- I have not plagiarized. * [x] I know that pull requests will not be merged if they fail the automated tests. * [ ] This PR only changes one algorithm file. To ease review, please open separate PRs for separate algorithms. * [x] All new Python files are placed inside an existing directory. * [x] All filenames are in all lowercase characters with no spaces or dashes. * [x] All functions and variable names follow Python naming conventions. * [x] All function parameters and return values are annotated with Python [type hints](https://docs.python.org/3/library/typing.html). * [x] All functions have [doctests](https://docs.python.org/3/library/doctest.html) that pass the automated testing. * [x] All new algorithms have a URL in its comments that points to Wikipedia or other similar explanation. * [x] If this pull request resolves one or more open issues then the commit message contains `Fixes: #{$ISSUE_NO}`.
-1
TheAlgorithms/Python
7,417
Remove references to depreciated QasmSimulator
### Describe your change: Replaced instances of `qiskit.Aer.get_backend("qasm_simulator")` in `quantum/` with `q.Aer.get_backend("aer_simulator")`, as the former is depreciated and raises warnings (Qiskit's [documentation](https://qiskit.org/documentation/apidoc/aer_provider.html) says that `QasmSimulator` is legacy). This PR edits multiple code files because they all raise the same warning and are mentioned in the same GitHub issue. Fixes #7308 * [ ] Add an algorithm? * [x] Fix a bug or typo in an existing algorithm? * [ ] Documentation change? ### Checklist: * [x] I have read [CONTRIBUTING.md](https://github.com/TheAlgorithms/Python/blob/master/CONTRIBUTING.md). * [x] This pull request is all my own work -- I have not plagiarized. * [x] I know that pull requests will not be merged if they fail the automated tests. * [ ] This PR only changes one algorithm file. To ease review, please open separate PRs for separate algorithms. * [x] All new Python files are placed inside an existing directory. * [x] All filenames are in all lowercase characters with no spaces or dashes. * [x] All functions and variable names follow Python naming conventions. * [x] All function parameters and return values are annotated with Python [type hints](https://docs.python.org/3/library/typing.html). * [x] All functions have [doctests](https://docs.python.org/3/library/doctest.html) that pass the automated testing. * [x] All new algorithms have a URL in its comments that points to Wikipedia or other similar explanation. * [x] If this pull request resolves one or more open issues then the commit message contains `Fixes: #{$ISSUE_NO}`.
tianyizheng02
2022-10-19T03:31:43Z
2022-10-19T20:12:44Z
50da472ddcdc2d79d1ad325ec05cda3558802fda
2859d4bf3aa96737a4715c65d4a9051d9c62d24d
Remove references to depreciated QasmSimulator. ### Describe your change: Replaced instances of `qiskit.Aer.get_backend("qasm_simulator")` in `quantum/` with `q.Aer.get_backend("aer_simulator")`, as the former is depreciated and raises warnings (Qiskit's [documentation](https://qiskit.org/documentation/apidoc/aer_provider.html) says that `QasmSimulator` is legacy). This PR edits multiple code files because they all raise the same warning and are mentioned in the same GitHub issue. Fixes #7308 * [ ] Add an algorithm? * [x] Fix a bug or typo in an existing algorithm? * [ ] Documentation change? ### Checklist: * [x] I have read [CONTRIBUTING.md](https://github.com/TheAlgorithms/Python/blob/master/CONTRIBUTING.md). * [x] This pull request is all my own work -- I have not plagiarized. * [x] I know that pull requests will not be merged if they fail the automated tests. * [ ] This PR only changes one algorithm file. To ease review, please open separate PRs for separate algorithms. * [x] All new Python files are placed inside an existing directory. * [x] All filenames are in all lowercase characters with no spaces or dashes. * [x] All functions and variable names follow Python naming conventions. * [x] All function parameters and return values are annotated with Python [type hints](https://docs.python.org/3/library/typing.html). * [x] All functions have [doctests](https://docs.python.org/3/library/doctest.html) that pass the automated testing. * [x] All new algorithms have a URL in its comments that points to Wikipedia or other similar explanation. * [x] If this pull request resolves one or more open issues then the commit message contains `Fixes: #{$ISSUE_NO}`.
import numpy as np """ Here I implemented the scoring functions. MAE, MSE, RMSE, RMSLE are included. Those are used for calculating differences between predicted values and actual values. Metrics are slightly differentiated. Sometimes squared, rooted, even log is used. Using log and roots can be perceived as tools for penalizing big errors. However, using appropriate metrics depends on the situations, and types of data """ # Mean Absolute Error def mae(predict, actual): """ Examples(rounded for precision): >>> actual = [1,2,3];predict = [1,4,3] >>> np.around(mae(predict,actual),decimals = 2) 0.67 >>> actual = [1,1,1];predict = [1,1,1] >>> mae(predict,actual) 0.0 """ predict = np.array(predict) actual = np.array(actual) difference = abs(predict - actual) score = difference.mean() return score # Mean Squared Error def mse(predict, actual): """ Examples(rounded for precision): >>> actual = [1,2,3];predict = [1,4,3] >>> np.around(mse(predict,actual),decimals = 2) 1.33 >>> actual = [1,1,1];predict = [1,1,1] >>> mse(predict,actual) 0.0 """ predict = np.array(predict) actual = np.array(actual) difference = predict - actual square_diff = np.square(difference) score = square_diff.mean() return score # Root Mean Squared Error def rmse(predict, actual): """ Examples(rounded for precision): >>> actual = [1,2,3];predict = [1,4,3] >>> np.around(rmse(predict,actual),decimals = 2) 1.15 >>> actual = [1,1,1];predict = [1,1,1] >>> rmse(predict,actual) 0.0 """ predict = np.array(predict) actual = np.array(actual) difference = predict - actual square_diff = np.square(difference) mean_square_diff = square_diff.mean() score = np.sqrt(mean_square_diff) return score # Root Mean Square Logarithmic Error def rmsle(predict, actual): """ Examples(rounded for precision): >>> actual = [10,10,30];predict = [10,2,30] >>> np.around(rmsle(predict,actual),decimals = 2) 0.75 >>> actual = [1,1,1];predict = [1,1,1] >>> rmsle(predict,actual) 0.0 """ predict = np.array(predict) actual = np.array(actual) log_predict = np.log(predict + 1) log_actual = np.log(actual + 1) difference = log_predict - log_actual square_diff = np.square(difference) mean_square_diff = square_diff.mean() score = np.sqrt(mean_square_diff) return score # Mean Bias Deviation def mbd(predict, actual): """ This value is Negative, if the model underpredicts, positive, if it overpredicts. Example(rounded for precision): Here the model overpredicts >>> actual = [1,2,3];predict = [2,3,4] >>> np.around(mbd(predict,actual),decimals = 2) 50.0 Here the model underpredicts >>> actual = [1,2,3];predict = [0,1,1] >>> np.around(mbd(predict,actual),decimals = 2) -66.67 """ predict = np.array(predict) actual = np.array(actual) difference = predict - actual numerator = np.sum(difference) / len(predict) denumerator = np.sum(actual) / len(predict) # print(numerator, denumerator) score = float(numerator) / denumerator * 100 return score def manual_accuracy(predict, actual): return np.mean(np.array(actual) == np.array(predict))
import numpy as np """ Here I implemented the scoring functions. MAE, MSE, RMSE, RMSLE are included. Those are used for calculating differences between predicted values and actual values. Metrics are slightly differentiated. Sometimes squared, rooted, even log is used. Using log and roots can be perceived as tools for penalizing big errors. However, using appropriate metrics depends on the situations, and types of data """ # Mean Absolute Error def mae(predict, actual): """ Examples(rounded for precision): >>> actual = [1,2,3];predict = [1,4,3] >>> np.around(mae(predict,actual),decimals = 2) 0.67 >>> actual = [1,1,1];predict = [1,1,1] >>> mae(predict,actual) 0.0 """ predict = np.array(predict) actual = np.array(actual) difference = abs(predict - actual) score = difference.mean() return score # Mean Squared Error def mse(predict, actual): """ Examples(rounded for precision): >>> actual = [1,2,3];predict = [1,4,3] >>> np.around(mse(predict,actual),decimals = 2) 1.33 >>> actual = [1,1,1];predict = [1,1,1] >>> mse(predict,actual) 0.0 """ predict = np.array(predict) actual = np.array(actual) difference = predict - actual square_diff = np.square(difference) score = square_diff.mean() return score # Root Mean Squared Error def rmse(predict, actual): """ Examples(rounded for precision): >>> actual = [1,2,3];predict = [1,4,3] >>> np.around(rmse(predict,actual),decimals = 2) 1.15 >>> actual = [1,1,1];predict = [1,1,1] >>> rmse(predict,actual) 0.0 """ predict = np.array(predict) actual = np.array(actual) difference = predict - actual square_diff = np.square(difference) mean_square_diff = square_diff.mean() score = np.sqrt(mean_square_diff) return score # Root Mean Square Logarithmic Error def rmsle(predict, actual): """ Examples(rounded for precision): >>> actual = [10,10,30];predict = [10,2,30] >>> np.around(rmsle(predict,actual),decimals = 2) 0.75 >>> actual = [1,1,1];predict = [1,1,1] >>> rmsle(predict,actual) 0.0 """ predict = np.array(predict) actual = np.array(actual) log_predict = np.log(predict + 1) log_actual = np.log(actual + 1) difference = log_predict - log_actual square_diff = np.square(difference) mean_square_diff = square_diff.mean() score = np.sqrt(mean_square_diff) return score # Mean Bias Deviation def mbd(predict, actual): """ This value is Negative, if the model underpredicts, positive, if it overpredicts. Example(rounded for precision): Here the model overpredicts >>> actual = [1,2,3];predict = [2,3,4] >>> np.around(mbd(predict,actual),decimals = 2) 50.0 Here the model underpredicts >>> actual = [1,2,3];predict = [0,1,1] >>> np.around(mbd(predict,actual),decimals = 2) -66.67 """ predict = np.array(predict) actual = np.array(actual) difference = predict - actual numerator = np.sum(difference) / len(predict) denumerator = np.sum(actual) / len(predict) # print(numerator, denumerator) score = float(numerator) / denumerator * 100 return score def manual_accuracy(predict, actual): return np.mean(np.array(actual) == np.array(predict))
-1
TheAlgorithms/Python
7,417
Remove references to depreciated QasmSimulator
### Describe your change: Replaced instances of `qiskit.Aer.get_backend("qasm_simulator")` in `quantum/` with `q.Aer.get_backend("aer_simulator")`, as the former is depreciated and raises warnings (Qiskit's [documentation](https://qiskit.org/documentation/apidoc/aer_provider.html) says that `QasmSimulator` is legacy). This PR edits multiple code files because they all raise the same warning and are mentioned in the same GitHub issue. Fixes #7308 * [ ] Add an algorithm? * [x] Fix a bug or typo in an existing algorithm? * [ ] Documentation change? ### Checklist: * [x] I have read [CONTRIBUTING.md](https://github.com/TheAlgorithms/Python/blob/master/CONTRIBUTING.md). * [x] This pull request is all my own work -- I have not plagiarized. * [x] I know that pull requests will not be merged if they fail the automated tests. * [ ] This PR only changes one algorithm file. To ease review, please open separate PRs for separate algorithms. * [x] All new Python files are placed inside an existing directory. * [x] All filenames are in all lowercase characters with no spaces or dashes. * [x] All functions and variable names follow Python naming conventions. * [x] All function parameters and return values are annotated with Python [type hints](https://docs.python.org/3/library/typing.html). * [x] All functions have [doctests](https://docs.python.org/3/library/doctest.html) that pass the automated testing. * [x] All new algorithms have a URL in its comments that points to Wikipedia or other similar explanation. * [x] If this pull request resolves one or more open issues then the commit message contains `Fixes: #{$ISSUE_NO}`.
tianyizheng02
2022-10-19T03:31:43Z
2022-10-19T20:12:44Z
50da472ddcdc2d79d1ad325ec05cda3558802fda
2859d4bf3aa96737a4715c65d4a9051d9c62d24d
Remove references to depreciated QasmSimulator. ### Describe your change: Replaced instances of `qiskit.Aer.get_backend("qasm_simulator")` in `quantum/` with `q.Aer.get_backend("aer_simulator")`, as the former is depreciated and raises warnings (Qiskit's [documentation](https://qiskit.org/documentation/apidoc/aer_provider.html) says that `QasmSimulator` is legacy). This PR edits multiple code files because they all raise the same warning and are mentioned in the same GitHub issue. Fixes #7308 * [ ] Add an algorithm? * [x] Fix a bug or typo in an existing algorithm? * [ ] Documentation change? ### Checklist: * [x] I have read [CONTRIBUTING.md](https://github.com/TheAlgorithms/Python/blob/master/CONTRIBUTING.md). * [x] This pull request is all my own work -- I have not plagiarized. * [x] I know that pull requests will not be merged if they fail the automated tests. * [ ] This PR only changes one algorithm file. To ease review, please open separate PRs for separate algorithms. * [x] All new Python files are placed inside an existing directory. * [x] All filenames are in all lowercase characters with no spaces or dashes. * [x] All functions and variable names follow Python naming conventions. * [x] All function parameters and return values are annotated with Python [type hints](https://docs.python.org/3/library/typing.html). * [x] All functions have [doctests](https://docs.python.org/3/library/doctest.html) that pass the automated testing. * [x] All new algorithms have a URL in its comments that points to Wikipedia or other similar explanation. * [x] If this pull request resolves one or more open issues then the commit message contains `Fixes: #{$ISSUE_NO}`.
-1
TheAlgorithms/Python
7,417
Remove references to depreciated QasmSimulator
### Describe your change: Replaced instances of `qiskit.Aer.get_backend("qasm_simulator")` in `quantum/` with `q.Aer.get_backend("aer_simulator")`, as the former is depreciated and raises warnings (Qiskit's [documentation](https://qiskit.org/documentation/apidoc/aer_provider.html) says that `QasmSimulator` is legacy). This PR edits multiple code files because they all raise the same warning and are mentioned in the same GitHub issue. Fixes #7308 * [ ] Add an algorithm? * [x] Fix a bug or typo in an existing algorithm? * [ ] Documentation change? ### Checklist: * [x] I have read [CONTRIBUTING.md](https://github.com/TheAlgorithms/Python/blob/master/CONTRIBUTING.md). * [x] This pull request is all my own work -- I have not plagiarized. * [x] I know that pull requests will not be merged if they fail the automated tests. * [ ] This PR only changes one algorithm file. To ease review, please open separate PRs for separate algorithms. * [x] All new Python files are placed inside an existing directory. * [x] All filenames are in all lowercase characters with no spaces or dashes. * [x] All functions and variable names follow Python naming conventions. * [x] All function parameters and return values are annotated with Python [type hints](https://docs.python.org/3/library/typing.html). * [x] All functions have [doctests](https://docs.python.org/3/library/doctest.html) that pass the automated testing. * [x] All new algorithms have a URL in its comments that points to Wikipedia or other similar explanation. * [x] If this pull request resolves one or more open issues then the commit message contains `Fixes: #{$ISSUE_NO}`.
tianyizheng02
2022-10-19T03:31:43Z
2022-10-19T20:12:44Z
50da472ddcdc2d79d1ad325ec05cda3558802fda
2859d4bf3aa96737a4715c65d4a9051d9c62d24d
Remove references to depreciated QasmSimulator. ### Describe your change: Replaced instances of `qiskit.Aer.get_backend("qasm_simulator")` in `quantum/` with `q.Aer.get_backend("aer_simulator")`, as the former is depreciated and raises warnings (Qiskit's [documentation](https://qiskit.org/documentation/apidoc/aer_provider.html) says that `QasmSimulator` is legacy). This PR edits multiple code files because they all raise the same warning and are mentioned in the same GitHub issue. Fixes #7308 * [ ] Add an algorithm? * [x] Fix a bug or typo in an existing algorithm? * [ ] Documentation change? ### Checklist: * [x] I have read [CONTRIBUTING.md](https://github.com/TheAlgorithms/Python/blob/master/CONTRIBUTING.md). * [x] This pull request is all my own work -- I have not plagiarized. * [x] I know that pull requests will not be merged if they fail the automated tests. * [ ] This PR only changes one algorithm file. To ease review, please open separate PRs for separate algorithms. * [x] All new Python files are placed inside an existing directory. * [x] All filenames are in all lowercase characters with no spaces or dashes. * [x] All functions and variable names follow Python naming conventions. * [x] All function parameters and return values are annotated with Python [type hints](https://docs.python.org/3/library/typing.html). * [x] All functions have [doctests](https://docs.python.org/3/library/doctest.html) that pass the automated testing. * [x] All new algorithms have a URL in its comments that points to Wikipedia or other similar explanation. * [x] If this pull request resolves one or more open issues then the commit message contains `Fixes: #{$ISSUE_NO}`.
"""Breath First Search (BFS) can be used when finding the shortest path from a given source node to a target node in an unweighted graph. """ from __future__ import annotations graph = { "A": ["B", "C", "E"], "B": ["A", "D", "E"], "C": ["A", "F", "G"], "D": ["B"], "E": ["A", "B", "D"], "F": ["C"], "G": ["C"], } class Graph: def __init__(self, graph: dict[str, list[str]], source_vertex: str) -> None: """ Graph is implemented as dictionary of adjacency lists. Also, Source vertex have to be defined upon initialization. """ self.graph = graph # mapping node to its parent in resulting breadth first tree self.parent: dict[str, str | None] = {} self.source_vertex = source_vertex def breath_first_search(self) -> None: """ This function is a helper for running breath first search on this graph. >>> g = Graph(graph, "G") >>> g.breath_first_search() >>> g.parent {'G': None, 'C': 'G', 'A': 'C', 'F': 'C', 'B': 'A', 'E': 'A', 'D': 'B'} """ visited = {self.source_vertex} self.parent[self.source_vertex] = None queue = [self.source_vertex] # first in first out queue while queue: vertex = queue.pop(0) for adjacent_vertex in self.graph[vertex]: if adjacent_vertex not in visited: visited.add(adjacent_vertex) self.parent[adjacent_vertex] = vertex queue.append(adjacent_vertex) def shortest_path(self, target_vertex: str) -> str: """ This shortest path function returns a string, describing the result: 1.) No path is found. The string is a human readable message to indicate this. 2.) The shortest path is found. The string is in the form `v1(->v2->v3->...->vn)`, where v1 is the source vertex and vn is the target vertex, if it exists separately. >>> g = Graph(graph, "G") >>> g.breath_first_search() Case 1 - No path is found. >>> g.shortest_path("Foo") 'No path from vertex:G to vertex:Foo' Case 2 - The path is found. >>> g.shortest_path("D") 'G->C->A->B->D' >>> g.shortest_path("G") 'G' """ if target_vertex == self.source_vertex: return self.source_vertex target_vertex_parent = self.parent.get(target_vertex) if target_vertex_parent is None: return f"No path from vertex:{self.source_vertex} to vertex:{target_vertex}" return self.shortest_path(target_vertex_parent) + f"->{target_vertex}" if __name__ == "__main__": g = Graph(graph, "G") g.breath_first_search() print(g.shortest_path("D")) print(g.shortest_path("G")) print(g.shortest_path("Foo"))
"""Breath First Search (BFS) can be used when finding the shortest path from a given source node to a target node in an unweighted graph. """ from __future__ import annotations graph = { "A": ["B", "C", "E"], "B": ["A", "D", "E"], "C": ["A", "F", "G"], "D": ["B"], "E": ["A", "B", "D"], "F": ["C"], "G": ["C"], } class Graph: def __init__(self, graph: dict[str, list[str]], source_vertex: str) -> None: """ Graph is implemented as dictionary of adjacency lists. Also, Source vertex have to be defined upon initialization. """ self.graph = graph # mapping node to its parent in resulting breadth first tree self.parent: dict[str, str | None] = {} self.source_vertex = source_vertex def breath_first_search(self) -> None: """ This function is a helper for running breath first search on this graph. >>> g = Graph(graph, "G") >>> g.breath_first_search() >>> g.parent {'G': None, 'C': 'G', 'A': 'C', 'F': 'C', 'B': 'A', 'E': 'A', 'D': 'B'} """ visited = {self.source_vertex} self.parent[self.source_vertex] = None queue = [self.source_vertex] # first in first out queue while queue: vertex = queue.pop(0) for adjacent_vertex in self.graph[vertex]: if adjacent_vertex not in visited: visited.add(adjacent_vertex) self.parent[adjacent_vertex] = vertex queue.append(adjacent_vertex) def shortest_path(self, target_vertex: str) -> str: """ This shortest path function returns a string, describing the result: 1.) No path is found. The string is a human readable message to indicate this. 2.) The shortest path is found. The string is in the form `v1(->v2->v3->...->vn)`, where v1 is the source vertex and vn is the target vertex, if it exists separately. >>> g = Graph(graph, "G") >>> g.breath_first_search() Case 1 - No path is found. >>> g.shortest_path("Foo") 'No path from vertex:G to vertex:Foo' Case 2 - The path is found. >>> g.shortest_path("D") 'G->C->A->B->D' >>> g.shortest_path("G") 'G' """ if target_vertex == self.source_vertex: return self.source_vertex target_vertex_parent = self.parent.get(target_vertex) if target_vertex_parent is None: return f"No path from vertex:{self.source_vertex} to vertex:{target_vertex}" return self.shortest_path(target_vertex_parent) + f"->{target_vertex}" if __name__ == "__main__": g = Graph(graph, "G") g.breath_first_search() print(g.shortest_path("D")) print(g.shortest_path("G")) print(g.shortest_path("Foo"))
-1
TheAlgorithms/Python
7,417
Remove references to depreciated QasmSimulator
### Describe your change: Replaced instances of `qiskit.Aer.get_backend("qasm_simulator")` in `quantum/` with `q.Aer.get_backend("aer_simulator")`, as the former is depreciated and raises warnings (Qiskit's [documentation](https://qiskit.org/documentation/apidoc/aer_provider.html) says that `QasmSimulator` is legacy). This PR edits multiple code files because they all raise the same warning and are mentioned in the same GitHub issue. Fixes #7308 * [ ] Add an algorithm? * [x] Fix a bug or typo in an existing algorithm? * [ ] Documentation change? ### Checklist: * [x] I have read [CONTRIBUTING.md](https://github.com/TheAlgorithms/Python/blob/master/CONTRIBUTING.md). * [x] This pull request is all my own work -- I have not plagiarized. * [x] I know that pull requests will not be merged if they fail the automated tests. * [ ] This PR only changes one algorithm file. To ease review, please open separate PRs for separate algorithms. * [x] All new Python files are placed inside an existing directory. * [x] All filenames are in all lowercase characters with no spaces or dashes. * [x] All functions and variable names follow Python naming conventions. * [x] All function parameters and return values are annotated with Python [type hints](https://docs.python.org/3/library/typing.html). * [x] All functions have [doctests](https://docs.python.org/3/library/doctest.html) that pass the automated testing. * [x] All new algorithms have a URL in its comments that points to Wikipedia or other similar explanation. * [x] If this pull request resolves one or more open issues then the commit message contains `Fixes: #{$ISSUE_NO}`.
tianyizheng02
2022-10-19T03:31:43Z
2022-10-19T20:12:44Z
50da472ddcdc2d79d1ad325ec05cda3558802fda
2859d4bf3aa96737a4715c65d4a9051d9c62d24d
Remove references to depreciated QasmSimulator. ### Describe your change: Replaced instances of `qiskit.Aer.get_backend("qasm_simulator")` in `quantum/` with `q.Aer.get_backend("aer_simulator")`, as the former is depreciated and raises warnings (Qiskit's [documentation](https://qiskit.org/documentation/apidoc/aer_provider.html) says that `QasmSimulator` is legacy). This PR edits multiple code files because they all raise the same warning and are mentioned in the same GitHub issue. Fixes #7308 * [ ] Add an algorithm? * [x] Fix a bug or typo in an existing algorithm? * [ ] Documentation change? ### Checklist: * [x] I have read [CONTRIBUTING.md](https://github.com/TheAlgorithms/Python/blob/master/CONTRIBUTING.md). * [x] This pull request is all my own work -- I have not plagiarized. * [x] I know that pull requests will not be merged if they fail the automated tests. * [ ] This PR only changes one algorithm file. To ease review, please open separate PRs for separate algorithms. * [x] All new Python files are placed inside an existing directory. * [x] All filenames are in all lowercase characters with no spaces or dashes. * [x] All functions and variable names follow Python naming conventions. * [x] All function parameters and return values are annotated with Python [type hints](https://docs.python.org/3/library/typing.html). * [x] All functions have [doctests](https://docs.python.org/3/library/doctest.html) that pass the automated testing. * [x] All new algorithms have a URL in its comments that points to Wikipedia or other similar explanation. * [x] If this pull request resolves one or more open issues then the commit message contains `Fixes: #{$ISSUE_NO}`.
# Source : https://computersciencewiki.org/index.php/Max-pooling_/_Pooling # Importing the libraries import numpy as np from PIL import Image # Maxpooling Function def maxpooling(arr: np.ndarray, size: int, stride: int) -> np.ndarray: """ This function is used to perform maxpooling on the input array of 2D matrix(image) Args: arr: numpy array size: size of pooling matrix stride: the number of pixels shifts over the input matrix Returns: numpy array of maxpooled matrix Sample Input Output: >>> maxpooling([[1,2,3,4],[5,6,7,8],[9,10,11,12],[13,14,15,16]], 2, 2) array([[ 6., 8.], [14., 16.]]) >>> maxpooling([[147, 180, 122],[241, 76, 32],[126, 13, 157]], 2, 1) array([[241., 180.], [241., 157.]]) """ arr = np.array(arr) if arr.shape[0] != arr.shape[1]: raise ValueError("The input array is not a square matrix") i = 0 j = 0 mat_i = 0 mat_j = 0 # compute the shape of the output matrix maxpool_shape = (arr.shape[0] - size) // stride + 1 # initialize the output matrix with zeros of shape maxpool_shape updated_arr = np.zeros((maxpool_shape, maxpool_shape)) while i < arr.shape[0]: if i + size > arr.shape[0]: # if the end of the matrix is reached, break break while j < arr.shape[1]: # if the end of the matrix is reached, break if j + size > arr.shape[1]: break # compute the maximum of the pooling matrix updated_arr[mat_i][mat_j] = np.max(arr[i : i + size, j : j + size]) # shift the pooling matrix by stride of column pixels j += stride mat_j += 1 # shift the pooling matrix by stride of row pixels i += stride mat_i += 1 # reset the column index to 0 j = 0 mat_j = 0 return updated_arr # Averagepooling Function def avgpooling(arr: np.ndarray, size: int, stride: int) -> np.ndarray: """ This function is used to perform avgpooling on the input array of 2D matrix(image) Args: arr: numpy array size: size of pooling matrix stride: the number of pixels shifts over the input matrix Returns: numpy array of avgpooled matrix Sample Input Output: >>> avgpooling([[1,2,3,4],[5,6,7,8],[9,10,11,12],[13,14,15,16]], 2, 2) array([[ 3., 5.], [11., 13.]]) >>> avgpooling([[147, 180, 122],[241, 76, 32],[126, 13, 157]], 2, 1) array([[161., 102.], [114., 69.]]) """ arr = np.array(arr) if arr.shape[0] != arr.shape[1]: raise ValueError("The input array is not a square matrix") i = 0 j = 0 mat_i = 0 mat_j = 0 # compute the shape of the output matrix avgpool_shape = (arr.shape[0] - size) // stride + 1 # initialize the output matrix with zeros of shape avgpool_shape updated_arr = np.zeros((avgpool_shape, avgpool_shape)) while i < arr.shape[0]: # if the end of the matrix is reached, break if i + size > arr.shape[0]: break while j < arr.shape[1]: # if the end of the matrix is reached, break if j + size > arr.shape[1]: break # compute the average of the pooling matrix updated_arr[mat_i][mat_j] = int(np.average(arr[i : i + size, j : j + size])) # shift the pooling matrix by stride of column pixels j += stride mat_j += 1 # shift the pooling matrix by stride of row pixels i += stride mat_i += 1 # reset the column index to 0 j = 0 mat_j = 0 return updated_arr # Main Function if __name__ == "__main__": from doctest import testmod testmod(name="avgpooling", verbose=True) # Loading the image image = Image.open("path_to_image") # Converting the image to numpy array and maxpooling, displaying the result # Ensure that the image is a square matrix Image.fromarray(maxpooling(np.array(image), size=3, stride=2)).show() # Converting the image to numpy array and averagepooling, displaying the result # Ensure that the image is a square matrix Image.fromarray(avgpooling(np.array(image), size=3, stride=2)).show()
# Source : https://computersciencewiki.org/index.php/Max-pooling_/_Pooling # Importing the libraries import numpy as np from PIL import Image # Maxpooling Function def maxpooling(arr: np.ndarray, size: int, stride: int) -> np.ndarray: """ This function is used to perform maxpooling on the input array of 2D matrix(image) Args: arr: numpy array size: size of pooling matrix stride: the number of pixels shifts over the input matrix Returns: numpy array of maxpooled matrix Sample Input Output: >>> maxpooling([[1,2,3,4],[5,6,7,8],[9,10,11,12],[13,14,15,16]], 2, 2) array([[ 6., 8.], [14., 16.]]) >>> maxpooling([[147, 180, 122],[241, 76, 32],[126, 13, 157]], 2, 1) array([[241., 180.], [241., 157.]]) """ arr = np.array(arr) if arr.shape[0] != arr.shape[1]: raise ValueError("The input array is not a square matrix") i = 0 j = 0 mat_i = 0 mat_j = 0 # compute the shape of the output matrix maxpool_shape = (arr.shape[0] - size) // stride + 1 # initialize the output matrix with zeros of shape maxpool_shape updated_arr = np.zeros((maxpool_shape, maxpool_shape)) while i < arr.shape[0]: if i + size > arr.shape[0]: # if the end of the matrix is reached, break break while j < arr.shape[1]: # if the end of the matrix is reached, break if j + size > arr.shape[1]: break # compute the maximum of the pooling matrix updated_arr[mat_i][mat_j] = np.max(arr[i : i + size, j : j + size]) # shift the pooling matrix by stride of column pixels j += stride mat_j += 1 # shift the pooling matrix by stride of row pixels i += stride mat_i += 1 # reset the column index to 0 j = 0 mat_j = 0 return updated_arr # Averagepooling Function def avgpooling(arr: np.ndarray, size: int, stride: int) -> np.ndarray: """ This function is used to perform avgpooling on the input array of 2D matrix(image) Args: arr: numpy array size: size of pooling matrix stride: the number of pixels shifts over the input matrix Returns: numpy array of avgpooled matrix Sample Input Output: >>> avgpooling([[1,2,3,4],[5,6,7,8],[9,10,11,12],[13,14,15,16]], 2, 2) array([[ 3., 5.], [11., 13.]]) >>> avgpooling([[147, 180, 122],[241, 76, 32],[126, 13, 157]], 2, 1) array([[161., 102.], [114., 69.]]) """ arr = np.array(arr) if arr.shape[0] != arr.shape[1]: raise ValueError("The input array is not a square matrix") i = 0 j = 0 mat_i = 0 mat_j = 0 # compute the shape of the output matrix avgpool_shape = (arr.shape[0] - size) // stride + 1 # initialize the output matrix with zeros of shape avgpool_shape updated_arr = np.zeros((avgpool_shape, avgpool_shape)) while i < arr.shape[0]: # if the end of the matrix is reached, break if i + size > arr.shape[0]: break while j < arr.shape[1]: # if the end of the matrix is reached, break if j + size > arr.shape[1]: break # compute the average of the pooling matrix updated_arr[mat_i][mat_j] = int(np.average(arr[i : i + size, j : j + size])) # shift the pooling matrix by stride of column pixels j += stride mat_j += 1 # shift the pooling matrix by stride of row pixels i += stride mat_i += 1 # reset the column index to 0 j = 0 mat_j = 0 return updated_arr # Main Function if __name__ == "__main__": from doctest import testmod testmod(name="avgpooling", verbose=True) # Loading the image image = Image.open("path_to_image") # Converting the image to numpy array and maxpooling, displaying the result # Ensure that the image is a square matrix Image.fromarray(maxpooling(np.array(image), size=3, stride=2)).show() # Converting the image to numpy array and averagepooling, displaying the result # Ensure that the image is a square matrix Image.fromarray(avgpooling(np.array(image), size=3, stride=2)).show()
-1
TheAlgorithms/Python
7,417
Remove references to depreciated QasmSimulator
### Describe your change: Replaced instances of `qiskit.Aer.get_backend("qasm_simulator")` in `quantum/` with `q.Aer.get_backend("aer_simulator")`, as the former is depreciated and raises warnings (Qiskit's [documentation](https://qiskit.org/documentation/apidoc/aer_provider.html) says that `QasmSimulator` is legacy). This PR edits multiple code files because they all raise the same warning and are mentioned in the same GitHub issue. Fixes #7308 * [ ] Add an algorithm? * [x] Fix a bug or typo in an existing algorithm? * [ ] Documentation change? ### Checklist: * [x] I have read [CONTRIBUTING.md](https://github.com/TheAlgorithms/Python/blob/master/CONTRIBUTING.md). * [x] This pull request is all my own work -- I have not plagiarized. * [x] I know that pull requests will not be merged if they fail the automated tests. * [ ] This PR only changes one algorithm file. To ease review, please open separate PRs for separate algorithms. * [x] All new Python files are placed inside an existing directory. * [x] All filenames are in all lowercase characters with no spaces or dashes. * [x] All functions and variable names follow Python naming conventions. * [x] All function parameters and return values are annotated with Python [type hints](https://docs.python.org/3/library/typing.html). * [x] All functions have [doctests](https://docs.python.org/3/library/doctest.html) that pass the automated testing. * [x] All new algorithms have a URL in its comments that points to Wikipedia or other similar explanation. * [x] If this pull request resolves one or more open issues then the commit message contains `Fixes: #{$ISSUE_NO}`.
tianyizheng02
2022-10-19T03:31:43Z
2022-10-19T20:12:44Z
50da472ddcdc2d79d1ad325ec05cda3558802fda
2859d4bf3aa96737a4715c65d4a9051d9c62d24d
Remove references to depreciated QasmSimulator. ### Describe your change: Replaced instances of `qiskit.Aer.get_backend("qasm_simulator")` in `quantum/` with `q.Aer.get_backend("aer_simulator")`, as the former is depreciated and raises warnings (Qiskit's [documentation](https://qiskit.org/documentation/apidoc/aer_provider.html) says that `QasmSimulator` is legacy). This PR edits multiple code files because they all raise the same warning and are mentioned in the same GitHub issue. Fixes #7308 * [ ] Add an algorithm? * [x] Fix a bug or typo in an existing algorithm? * [ ] Documentation change? ### Checklist: * [x] I have read [CONTRIBUTING.md](https://github.com/TheAlgorithms/Python/blob/master/CONTRIBUTING.md). * [x] This pull request is all my own work -- I have not plagiarized. * [x] I know that pull requests will not be merged if they fail the automated tests. * [ ] This PR only changes one algorithm file. To ease review, please open separate PRs for separate algorithms. * [x] All new Python files are placed inside an existing directory. * [x] All filenames are in all lowercase characters with no spaces or dashes. * [x] All functions and variable names follow Python naming conventions. * [x] All function parameters and return values are annotated with Python [type hints](https://docs.python.org/3/library/typing.html). * [x] All functions have [doctests](https://docs.python.org/3/library/doctest.html) that pass the automated testing. * [x] All new algorithms have a URL in its comments that points to Wikipedia or other similar explanation. * [x] If this pull request resolves one or more open issues then the commit message contains `Fixes: #{$ISSUE_NO}`.
-1
TheAlgorithms/Python
7,417
Remove references to depreciated QasmSimulator
### Describe your change: Replaced instances of `qiskit.Aer.get_backend("qasm_simulator")` in `quantum/` with `q.Aer.get_backend("aer_simulator")`, as the former is depreciated and raises warnings (Qiskit's [documentation](https://qiskit.org/documentation/apidoc/aer_provider.html) says that `QasmSimulator` is legacy). This PR edits multiple code files because they all raise the same warning and are mentioned in the same GitHub issue. Fixes #7308 * [ ] Add an algorithm? * [x] Fix a bug or typo in an existing algorithm? * [ ] Documentation change? ### Checklist: * [x] I have read [CONTRIBUTING.md](https://github.com/TheAlgorithms/Python/blob/master/CONTRIBUTING.md). * [x] This pull request is all my own work -- I have not plagiarized. * [x] I know that pull requests will not be merged if they fail the automated tests. * [ ] This PR only changes one algorithm file. To ease review, please open separate PRs for separate algorithms. * [x] All new Python files are placed inside an existing directory. * [x] All filenames are in all lowercase characters with no spaces or dashes. * [x] All functions and variable names follow Python naming conventions. * [x] All function parameters and return values are annotated with Python [type hints](https://docs.python.org/3/library/typing.html). * [x] All functions have [doctests](https://docs.python.org/3/library/doctest.html) that pass the automated testing. * [x] All new algorithms have a URL in its comments that points to Wikipedia or other similar explanation. * [x] If this pull request resolves one or more open issues then the commit message contains `Fixes: #{$ISSUE_NO}`.
tianyizheng02
2022-10-19T03:31:43Z
2022-10-19T20:12:44Z
50da472ddcdc2d79d1ad325ec05cda3558802fda
2859d4bf3aa96737a4715c65d4a9051d9c62d24d
Remove references to depreciated QasmSimulator. ### Describe your change: Replaced instances of `qiskit.Aer.get_backend("qasm_simulator")` in `quantum/` with `q.Aer.get_backend("aer_simulator")`, as the former is depreciated and raises warnings (Qiskit's [documentation](https://qiskit.org/documentation/apidoc/aer_provider.html) says that `QasmSimulator` is legacy). This PR edits multiple code files because they all raise the same warning and are mentioned in the same GitHub issue. Fixes #7308 * [ ] Add an algorithm? * [x] Fix a bug or typo in an existing algorithm? * [ ] Documentation change? ### Checklist: * [x] I have read [CONTRIBUTING.md](https://github.com/TheAlgorithms/Python/blob/master/CONTRIBUTING.md). * [x] This pull request is all my own work -- I have not plagiarized. * [x] I know that pull requests will not be merged if they fail the automated tests. * [ ] This PR only changes one algorithm file. To ease review, please open separate PRs for separate algorithms. * [x] All new Python files are placed inside an existing directory. * [x] All filenames are in all lowercase characters with no spaces or dashes. * [x] All functions and variable names follow Python naming conventions. * [x] All function parameters and return values are annotated with Python [type hints](https://docs.python.org/3/library/typing.html). * [x] All functions have [doctests](https://docs.python.org/3/library/doctest.html) that pass the automated testing. * [x] All new algorithms have a URL in its comments that points to Wikipedia or other similar explanation. * [x] If this pull request resolves one or more open issues then the commit message contains `Fixes: #{$ISSUE_NO}`.
# floyd_warshall.py """ The problem is to find the shortest distance between all pairs of vertices in a weighted directed graph that can have negative edge weights. """ def _print_dist(dist, v): print("\nThe shortest path matrix using Floyd Warshall algorithm\n") for i in range(v): for j in range(v): if dist[i][j] != float("inf"): print(int(dist[i][j]), end="\t") else: print("INF", end="\t") print() def floyd_warshall(graph, v): """ :param graph: 2D array calculated from weight[edge[i, j]] :type graph: List[List[float]] :param v: number of vertices :type v: int :return: shortest distance between all vertex pairs distance[u][v] will contain the shortest distance from vertex u to v. 1. For all edges from v to n, distance[i][j] = weight(edge(i, j)). 3. The algorithm then performs distance[i][j] = min(distance[i][j], distance[i][k] + distance[k][j]) for each possible pair i, j of vertices. 4. The above is repeated for each vertex k in the graph. 5. Whenever distance[i][j] is given a new minimum value, next vertex[i][j] is updated to the next vertex[i][k]. """ dist = [[float("inf") for _ in range(v)] for _ in range(v)] for i in range(v): for j in range(v): dist[i][j] = graph[i][j] # check vertex k against all other vertices (i, j) for k in range(v): # looping through rows of graph array for i in range(v): # looping through columns of graph array for j in range(v): if ( dist[i][k] != float("inf") and dist[k][j] != float("inf") and dist[i][k] + dist[k][j] < dist[i][j] ): dist[i][j] = dist[i][k] + dist[k][j] _print_dist(dist, v) return dist, v if __name__ == "__main__": v = int(input("Enter number of vertices: ")) e = int(input("Enter number of edges: ")) graph = [[float("inf") for i in range(v)] for j in range(v)] for i in range(v): graph[i][i] = 0.0 # src and dst are indices that must be within the array size graph[e][v] # failure to follow this will result in an error for i in range(e): print("\nEdge ", i + 1) src = int(input("Enter source:")) dst = int(input("Enter destination:")) weight = float(input("Enter weight:")) graph[src][dst] = weight floyd_warshall(graph, v) # Example Input # Enter number of vertices: 3 # Enter number of edges: 2 # # generated graph from vertex and edge inputs # [[inf, inf, inf], [inf, inf, inf], [inf, inf, inf]] # [[0.0, inf, inf], [inf, 0.0, inf], [inf, inf, 0.0]] # specify source, destination and weight for edge #1 # Edge 1 # Enter source:1 # Enter destination:2 # Enter weight:2 # specify source, destination and weight for edge #2 # Edge 2 # Enter source:2 # Enter destination:1 # Enter weight:1 # # Expected Output from the vertice, edge and src, dst, weight inputs!! # 0 INF INF # INF 0 2 # INF 1 0
# floyd_warshall.py """ The problem is to find the shortest distance between all pairs of vertices in a weighted directed graph that can have negative edge weights. """ def _print_dist(dist, v): print("\nThe shortest path matrix using Floyd Warshall algorithm\n") for i in range(v): for j in range(v): if dist[i][j] != float("inf"): print(int(dist[i][j]), end="\t") else: print("INF", end="\t") print() def floyd_warshall(graph, v): """ :param graph: 2D array calculated from weight[edge[i, j]] :type graph: List[List[float]] :param v: number of vertices :type v: int :return: shortest distance between all vertex pairs distance[u][v] will contain the shortest distance from vertex u to v. 1. For all edges from v to n, distance[i][j] = weight(edge(i, j)). 3. The algorithm then performs distance[i][j] = min(distance[i][j], distance[i][k] + distance[k][j]) for each possible pair i, j of vertices. 4. The above is repeated for each vertex k in the graph. 5. Whenever distance[i][j] is given a new minimum value, next vertex[i][j] is updated to the next vertex[i][k]. """ dist = [[float("inf") for _ in range(v)] for _ in range(v)] for i in range(v): for j in range(v): dist[i][j] = graph[i][j] # check vertex k against all other vertices (i, j) for k in range(v): # looping through rows of graph array for i in range(v): # looping through columns of graph array for j in range(v): if ( dist[i][k] != float("inf") and dist[k][j] != float("inf") and dist[i][k] + dist[k][j] < dist[i][j] ): dist[i][j] = dist[i][k] + dist[k][j] _print_dist(dist, v) return dist, v if __name__ == "__main__": v = int(input("Enter number of vertices: ")) e = int(input("Enter number of edges: ")) graph = [[float("inf") for i in range(v)] for j in range(v)] for i in range(v): graph[i][i] = 0.0 # src and dst are indices that must be within the array size graph[e][v] # failure to follow this will result in an error for i in range(e): print("\nEdge ", i + 1) src = int(input("Enter source:")) dst = int(input("Enter destination:")) weight = float(input("Enter weight:")) graph[src][dst] = weight floyd_warshall(graph, v) # Example Input # Enter number of vertices: 3 # Enter number of edges: 2 # # generated graph from vertex and edge inputs # [[inf, inf, inf], [inf, inf, inf], [inf, inf, inf]] # [[0.0, inf, inf], [inf, 0.0, inf], [inf, inf, 0.0]] # specify source, destination and weight for edge #1 # Edge 1 # Enter source:1 # Enter destination:2 # Enter weight:2 # specify source, destination and weight for edge #2 # Edge 2 # Enter source:2 # Enter destination:1 # Enter weight:1 # # Expected Output from the vertice, edge and src, dst, weight inputs!! # 0 INF INF # INF 0 2 # INF 1 0
-1
TheAlgorithms/Python
7,417
Remove references to depreciated QasmSimulator
### Describe your change: Replaced instances of `qiskit.Aer.get_backend("qasm_simulator")` in `quantum/` with `q.Aer.get_backend("aer_simulator")`, as the former is depreciated and raises warnings (Qiskit's [documentation](https://qiskit.org/documentation/apidoc/aer_provider.html) says that `QasmSimulator` is legacy). This PR edits multiple code files because they all raise the same warning and are mentioned in the same GitHub issue. Fixes #7308 * [ ] Add an algorithm? * [x] Fix a bug or typo in an existing algorithm? * [ ] Documentation change? ### Checklist: * [x] I have read [CONTRIBUTING.md](https://github.com/TheAlgorithms/Python/blob/master/CONTRIBUTING.md). * [x] This pull request is all my own work -- I have not plagiarized. * [x] I know that pull requests will not be merged if they fail the automated tests. * [ ] This PR only changes one algorithm file. To ease review, please open separate PRs for separate algorithms. * [x] All new Python files are placed inside an existing directory. * [x] All filenames are in all lowercase characters with no spaces or dashes. * [x] All functions and variable names follow Python naming conventions. * [x] All function parameters and return values are annotated with Python [type hints](https://docs.python.org/3/library/typing.html). * [x] All functions have [doctests](https://docs.python.org/3/library/doctest.html) that pass the automated testing. * [x] All new algorithms have a URL in its comments that points to Wikipedia or other similar explanation. * [x] If this pull request resolves one or more open issues then the commit message contains `Fixes: #{$ISSUE_NO}`.
tianyizheng02
2022-10-19T03:31:43Z
2022-10-19T20:12:44Z
50da472ddcdc2d79d1ad325ec05cda3558802fda
2859d4bf3aa96737a4715c65d4a9051d9c62d24d
Remove references to depreciated QasmSimulator. ### Describe your change: Replaced instances of `qiskit.Aer.get_backend("qasm_simulator")` in `quantum/` with `q.Aer.get_backend("aer_simulator")`, as the former is depreciated and raises warnings (Qiskit's [documentation](https://qiskit.org/documentation/apidoc/aer_provider.html) says that `QasmSimulator` is legacy). This PR edits multiple code files because they all raise the same warning and are mentioned in the same GitHub issue. Fixes #7308 * [ ] Add an algorithm? * [x] Fix a bug or typo in an existing algorithm? * [ ] Documentation change? ### Checklist: * [x] I have read [CONTRIBUTING.md](https://github.com/TheAlgorithms/Python/blob/master/CONTRIBUTING.md). * [x] This pull request is all my own work -- I have not plagiarized. * [x] I know that pull requests will not be merged if they fail the automated tests. * [ ] This PR only changes one algorithm file. To ease review, please open separate PRs for separate algorithms. * [x] All new Python files are placed inside an existing directory. * [x] All filenames are in all lowercase characters with no spaces or dashes. * [x] All functions and variable names follow Python naming conventions. * [x] All function parameters and return values are annotated with Python [type hints](https://docs.python.org/3/library/typing.html). * [x] All functions have [doctests](https://docs.python.org/3/library/doctest.html) that pass the automated testing. * [x] All new algorithms have a URL in its comments that points to Wikipedia or other similar explanation. * [x] If this pull request resolves one or more open issues then the commit message contains `Fixes: #{$ISSUE_NO}`.
-1
TheAlgorithms/Python
7,417
Remove references to depreciated QasmSimulator
### Describe your change: Replaced instances of `qiskit.Aer.get_backend("qasm_simulator")` in `quantum/` with `q.Aer.get_backend("aer_simulator")`, as the former is depreciated and raises warnings (Qiskit's [documentation](https://qiskit.org/documentation/apidoc/aer_provider.html) says that `QasmSimulator` is legacy). This PR edits multiple code files because they all raise the same warning and are mentioned in the same GitHub issue. Fixes #7308 * [ ] Add an algorithm? * [x] Fix a bug or typo in an existing algorithm? * [ ] Documentation change? ### Checklist: * [x] I have read [CONTRIBUTING.md](https://github.com/TheAlgorithms/Python/blob/master/CONTRIBUTING.md). * [x] This pull request is all my own work -- I have not plagiarized. * [x] I know that pull requests will not be merged if they fail the automated tests. * [ ] This PR only changes one algorithm file. To ease review, please open separate PRs for separate algorithms. * [x] All new Python files are placed inside an existing directory. * [x] All filenames are in all lowercase characters with no spaces or dashes. * [x] All functions and variable names follow Python naming conventions. * [x] All function parameters and return values are annotated with Python [type hints](https://docs.python.org/3/library/typing.html). * [x] All functions have [doctests](https://docs.python.org/3/library/doctest.html) that pass the automated testing. * [x] All new algorithms have a URL in its comments that points to Wikipedia or other similar explanation. * [x] If this pull request resolves one or more open issues then the commit message contains `Fixes: #{$ISSUE_NO}`.
tianyizheng02
2022-10-19T03:31:43Z
2022-10-19T20:12:44Z
50da472ddcdc2d79d1ad325ec05cda3558802fda
2859d4bf3aa96737a4715c65d4a9051d9c62d24d
Remove references to depreciated QasmSimulator. ### Describe your change: Replaced instances of `qiskit.Aer.get_backend("qasm_simulator")` in `quantum/` with `q.Aer.get_backend("aer_simulator")`, as the former is depreciated and raises warnings (Qiskit's [documentation](https://qiskit.org/documentation/apidoc/aer_provider.html) says that `QasmSimulator` is legacy). This PR edits multiple code files because they all raise the same warning and are mentioned in the same GitHub issue. Fixes #7308 * [ ] Add an algorithm? * [x] Fix a bug or typo in an existing algorithm? * [ ] Documentation change? ### Checklist: * [x] I have read [CONTRIBUTING.md](https://github.com/TheAlgorithms/Python/blob/master/CONTRIBUTING.md). * [x] This pull request is all my own work -- I have not plagiarized. * [x] I know that pull requests will not be merged if they fail the automated tests. * [ ] This PR only changes one algorithm file. To ease review, please open separate PRs for separate algorithms. * [x] All new Python files are placed inside an existing directory. * [x] All filenames are in all lowercase characters with no spaces or dashes. * [x] All functions and variable names follow Python naming conventions. * [x] All function parameters and return values are annotated with Python [type hints](https://docs.python.org/3/library/typing.html). * [x] All functions have [doctests](https://docs.python.org/3/library/doctest.html) that pass the automated testing. * [x] All new algorithms have a URL in its comments that points to Wikipedia or other similar explanation. * [x] If this pull request resolves one or more open issues then the commit message contains `Fixes: #{$ISSUE_NO}`.
""" Project Euler Problem 3: https://projecteuler.net/problem=3 Largest prime factor The prime factors of 13195 are 5, 7, 13 and 29. What is the largest prime factor of the number 600851475143? References: - https://en.wikipedia.org/wiki/Prime_number#Unique_factorization """ import math def is_prime(number: int) -> bool: """Checks to see if a number is a prime in O(sqrt(n)). A number is prime if it has exactly two factors: 1 and itself. Returns boolean representing primality of given number (i.e., if the result is true, then the number is indeed prime else it is not). >>> is_prime(2) True >>> is_prime(3) True >>> is_prime(27) False >>> is_prime(2999) True >>> is_prime(0) False >>> is_prime(1) False """ if 1 < number < 4: # 2 and 3 are primes return True elif number < 2 or number % 2 == 0 or number % 3 == 0: # Negatives, 0, 1, all even numbers, all multiples of 3 are not primes return False # All primes number are in format of 6k +/- 1 for i in range(5, int(math.sqrt(number) + 1), 6): if number % i == 0 or number % (i + 2) == 0: return False return True def solution(n: int = 600851475143) -> int: """ Returns the largest prime factor of a given number n. >>> solution(13195) 29 >>> solution(10) 5 >>> solution(17) 17 >>> solution(3.4) 3 >>> solution(0) Traceback (most recent call last): ... ValueError: Parameter n must be greater than or equal to one. >>> solution(-17) Traceback (most recent call last): ... ValueError: Parameter n must be greater than or equal to one. >>> solution([]) Traceback (most recent call last): ... TypeError: Parameter n must be int or castable to int. >>> solution("asd") Traceback (most recent call last): ... TypeError: Parameter n must be int or castable to int. """ try: n = int(n) except (TypeError, ValueError): raise TypeError("Parameter n must be int or castable to int.") if n <= 0: raise ValueError("Parameter n must be greater than or equal to one.") max_number = 0 if is_prime(n): return n while n % 2 == 0: n //= 2 if is_prime(n): return n for i in range(3, int(math.sqrt(n)) + 1, 2): if n % i == 0: if is_prime(n // i): max_number = n // i break elif is_prime(i): max_number = i return max_number if __name__ == "__main__": print(f"{solution() = }")
""" Project Euler Problem 3: https://projecteuler.net/problem=3 Largest prime factor The prime factors of 13195 are 5, 7, 13 and 29. What is the largest prime factor of the number 600851475143? References: - https://en.wikipedia.org/wiki/Prime_number#Unique_factorization """ import math def is_prime(number: int) -> bool: """Checks to see if a number is a prime in O(sqrt(n)). A number is prime if it has exactly two factors: 1 and itself. Returns boolean representing primality of given number (i.e., if the result is true, then the number is indeed prime else it is not). >>> is_prime(2) True >>> is_prime(3) True >>> is_prime(27) False >>> is_prime(2999) True >>> is_prime(0) False >>> is_prime(1) False """ if 1 < number < 4: # 2 and 3 are primes return True elif number < 2 or number % 2 == 0 or number % 3 == 0: # Negatives, 0, 1, all even numbers, all multiples of 3 are not primes return False # All primes number are in format of 6k +/- 1 for i in range(5, int(math.sqrt(number) + 1), 6): if number % i == 0 or number % (i + 2) == 0: return False return True def solution(n: int = 600851475143) -> int: """ Returns the largest prime factor of a given number n. >>> solution(13195) 29 >>> solution(10) 5 >>> solution(17) 17 >>> solution(3.4) 3 >>> solution(0) Traceback (most recent call last): ... ValueError: Parameter n must be greater than or equal to one. >>> solution(-17) Traceback (most recent call last): ... ValueError: Parameter n must be greater than or equal to one. >>> solution([]) Traceback (most recent call last): ... TypeError: Parameter n must be int or castable to int. >>> solution("asd") Traceback (most recent call last): ... TypeError: Parameter n must be int or castable to int. """ try: n = int(n) except (TypeError, ValueError): raise TypeError("Parameter n must be int or castable to int.") if n <= 0: raise ValueError("Parameter n must be greater than or equal to one.") max_number = 0 if is_prime(n): return n while n % 2 == 0: n //= 2 if is_prime(n): return n for i in range(3, int(math.sqrt(n)) + 1, 2): if n % i == 0: if is_prime(n // i): max_number = n // i break elif is_prime(i): max_number = i return max_number if __name__ == "__main__": print(f"{solution() = }")
-1
TheAlgorithms/Python
7,417
Remove references to depreciated QasmSimulator
### Describe your change: Replaced instances of `qiskit.Aer.get_backend("qasm_simulator")` in `quantum/` with `q.Aer.get_backend("aer_simulator")`, as the former is depreciated and raises warnings (Qiskit's [documentation](https://qiskit.org/documentation/apidoc/aer_provider.html) says that `QasmSimulator` is legacy). This PR edits multiple code files because they all raise the same warning and are mentioned in the same GitHub issue. Fixes #7308 * [ ] Add an algorithm? * [x] Fix a bug or typo in an existing algorithm? * [ ] Documentation change? ### Checklist: * [x] I have read [CONTRIBUTING.md](https://github.com/TheAlgorithms/Python/blob/master/CONTRIBUTING.md). * [x] This pull request is all my own work -- I have not plagiarized. * [x] I know that pull requests will not be merged if they fail the automated tests. * [ ] This PR only changes one algorithm file. To ease review, please open separate PRs for separate algorithms. * [x] All new Python files are placed inside an existing directory. * [x] All filenames are in all lowercase characters with no spaces or dashes. * [x] All functions and variable names follow Python naming conventions. * [x] All function parameters and return values are annotated with Python [type hints](https://docs.python.org/3/library/typing.html). * [x] All functions have [doctests](https://docs.python.org/3/library/doctest.html) that pass the automated testing. * [x] All new algorithms have a URL in its comments that points to Wikipedia or other similar explanation. * [x] If this pull request resolves one or more open issues then the commit message contains `Fixes: #{$ISSUE_NO}`.
tianyizheng02
2022-10-19T03:31:43Z
2022-10-19T20:12:44Z
50da472ddcdc2d79d1ad325ec05cda3558802fda
2859d4bf3aa96737a4715c65d4a9051d9c62d24d
Remove references to depreciated QasmSimulator. ### Describe your change: Replaced instances of `qiskit.Aer.get_backend("qasm_simulator")` in `quantum/` with `q.Aer.get_backend("aer_simulator")`, as the former is depreciated and raises warnings (Qiskit's [documentation](https://qiskit.org/documentation/apidoc/aer_provider.html) says that `QasmSimulator` is legacy). This PR edits multiple code files because they all raise the same warning and are mentioned in the same GitHub issue. Fixes #7308 * [ ] Add an algorithm? * [x] Fix a bug or typo in an existing algorithm? * [ ] Documentation change? ### Checklist: * [x] I have read [CONTRIBUTING.md](https://github.com/TheAlgorithms/Python/blob/master/CONTRIBUTING.md). * [x] This pull request is all my own work -- I have not plagiarized. * [x] I know that pull requests will not be merged if they fail the automated tests. * [ ] This PR only changes one algorithm file. To ease review, please open separate PRs for separate algorithms. * [x] All new Python files are placed inside an existing directory. * [x] All filenames are in all lowercase characters with no spaces or dashes. * [x] All functions and variable names follow Python naming conventions. * [x] All function parameters and return values are annotated with Python [type hints](https://docs.python.org/3/library/typing.html). * [x] All functions have [doctests](https://docs.python.org/3/library/doctest.html) that pass the automated testing. * [x] All new algorithms have a URL in its comments that points to Wikipedia or other similar explanation. * [x] If this pull request resolves one or more open issues then the commit message contains `Fixes: #{$ISSUE_NO}`.
-1
TheAlgorithms/Python
7,417
Remove references to depreciated QasmSimulator
### Describe your change: Replaced instances of `qiskit.Aer.get_backend("qasm_simulator")` in `quantum/` with `q.Aer.get_backend("aer_simulator")`, as the former is depreciated and raises warnings (Qiskit's [documentation](https://qiskit.org/documentation/apidoc/aer_provider.html) says that `QasmSimulator` is legacy). This PR edits multiple code files because they all raise the same warning and are mentioned in the same GitHub issue. Fixes #7308 * [ ] Add an algorithm? * [x] Fix a bug or typo in an existing algorithm? * [ ] Documentation change? ### Checklist: * [x] I have read [CONTRIBUTING.md](https://github.com/TheAlgorithms/Python/blob/master/CONTRIBUTING.md). * [x] This pull request is all my own work -- I have not plagiarized. * [x] I know that pull requests will not be merged if they fail the automated tests. * [ ] This PR only changes one algorithm file. To ease review, please open separate PRs for separate algorithms. * [x] All new Python files are placed inside an existing directory. * [x] All filenames are in all lowercase characters with no spaces or dashes. * [x] All functions and variable names follow Python naming conventions. * [x] All function parameters and return values are annotated with Python [type hints](https://docs.python.org/3/library/typing.html). * [x] All functions have [doctests](https://docs.python.org/3/library/doctest.html) that pass the automated testing. * [x] All new algorithms have a URL in its comments that points to Wikipedia or other similar explanation. * [x] If this pull request resolves one or more open issues then the commit message contains `Fixes: #{$ISSUE_NO}`.
tianyizheng02
2022-10-19T03:31:43Z
2022-10-19T20:12:44Z
50da472ddcdc2d79d1ad325ec05cda3558802fda
2859d4bf3aa96737a4715c65d4a9051d9c62d24d
Remove references to depreciated QasmSimulator. ### Describe your change: Replaced instances of `qiskit.Aer.get_backend("qasm_simulator")` in `quantum/` with `q.Aer.get_backend("aer_simulator")`, as the former is depreciated and raises warnings (Qiskit's [documentation](https://qiskit.org/documentation/apidoc/aer_provider.html) says that `QasmSimulator` is legacy). This PR edits multiple code files because they all raise the same warning and are mentioned in the same GitHub issue. Fixes #7308 * [ ] Add an algorithm? * [x] Fix a bug or typo in an existing algorithm? * [ ] Documentation change? ### Checklist: * [x] I have read [CONTRIBUTING.md](https://github.com/TheAlgorithms/Python/blob/master/CONTRIBUTING.md). * [x] This pull request is all my own work -- I have not plagiarized. * [x] I know that pull requests will not be merged if they fail the automated tests. * [ ] This PR only changes one algorithm file. To ease review, please open separate PRs for separate algorithms. * [x] All new Python files are placed inside an existing directory. * [x] All filenames are in all lowercase characters with no spaces or dashes. * [x] All functions and variable names follow Python naming conventions. * [x] All function parameters and return values are annotated with Python [type hints](https://docs.python.org/3/library/typing.html). * [x] All functions have [doctests](https://docs.python.org/3/library/doctest.html) that pass the automated testing. * [x] All new algorithms have a URL in its comments that points to Wikipedia or other similar explanation. * [x] If this pull request resolves one or more open issues then the commit message contains `Fixes: #{$ISSUE_NO}`.
-1
TheAlgorithms/Python
7,417
Remove references to depreciated QasmSimulator
### Describe your change: Replaced instances of `qiskit.Aer.get_backend("qasm_simulator")` in `quantum/` with `q.Aer.get_backend("aer_simulator")`, as the former is depreciated and raises warnings (Qiskit's [documentation](https://qiskit.org/documentation/apidoc/aer_provider.html) says that `QasmSimulator` is legacy). This PR edits multiple code files because they all raise the same warning and are mentioned in the same GitHub issue. Fixes #7308 * [ ] Add an algorithm? * [x] Fix a bug or typo in an existing algorithm? * [ ] Documentation change? ### Checklist: * [x] I have read [CONTRIBUTING.md](https://github.com/TheAlgorithms/Python/blob/master/CONTRIBUTING.md). * [x] This pull request is all my own work -- I have not plagiarized. * [x] I know that pull requests will not be merged if they fail the automated tests. * [ ] This PR only changes one algorithm file. To ease review, please open separate PRs for separate algorithms. * [x] All new Python files are placed inside an existing directory. * [x] All filenames are in all lowercase characters with no spaces or dashes. * [x] All functions and variable names follow Python naming conventions. * [x] All function parameters and return values are annotated with Python [type hints](https://docs.python.org/3/library/typing.html). * [x] All functions have [doctests](https://docs.python.org/3/library/doctest.html) that pass the automated testing. * [x] All new algorithms have a URL in its comments that points to Wikipedia or other similar explanation. * [x] If this pull request resolves one or more open issues then the commit message contains `Fixes: #{$ISSUE_NO}`.
tianyizheng02
2022-10-19T03:31:43Z
2022-10-19T20:12:44Z
50da472ddcdc2d79d1ad325ec05cda3558802fda
2859d4bf3aa96737a4715c65d4a9051d9c62d24d
Remove references to depreciated QasmSimulator. ### Describe your change: Replaced instances of `qiskit.Aer.get_backend("qasm_simulator")` in `quantum/` with `q.Aer.get_backend("aer_simulator")`, as the former is depreciated and raises warnings (Qiskit's [documentation](https://qiskit.org/documentation/apidoc/aer_provider.html) says that `QasmSimulator` is legacy). This PR edits multiple code files because they all raise the same warning and are mentioned in the same GitHub issue. Fixes #7308 * [ ] Add an algorithm? * [x] Fix a bug or typo in an existing algorithm? * [ ] Documentation change? ### Checklist: * [x] I have read [CONTRIBUTING.md](https://github.com/TheAlgorithms/Python/blob/master/CONTRIBUTING.md). * [x] This pull request is all my own work -- I have not plagiarized. * [x] I know that pull requests will not be merged if they fail the automated tests. * [ ] This PR only changes one algorithm file. To ease review, please open separate PRs for separate algorithms. * [x] All new Python files are placed inside an existing directory. * [x] All filenames are in all lowercase characters with no spaces or dashes. * [x] All functions and variable names follow Python naming conventions. * [x] All function parameters and return values are annotated with Python [type hints](https://docs.python.org/3/library/typing.html). * [x] All functions have [doctests](https://docs.python.org/3/library/doctest.html) that pass the automated testing. * [x] All new algorithms have a URL in its comments that points to Wikipedia or other similar explanation. * [x] If this pull request resolves one or more open issues then the commit message contains `Fixes: #{$ISSUE_NO}`.
""" Problem 46: https://projecteuler.net/problem=46 It was proposed by Christian Goldbach that every odd composite number can be written as the sum of a prime and twice a square. 9 = 7 + 2 × 12 15 = 7 + 2 × 22 21 = 3 + 2 × 32 25 = 7 + 2 × 32 27 = 19 + 2 × 22 33 = 31 + 2 × 12 It turns out that the conjecture was false. What is the smallest odd composite that cannot be written as the sum of a prime and twice a square? """ from __future__ import annotations import math def is_prime(number: int) -> bool: """Checks to see if a number is a prime in O(sqrt(n)). A number is prime if it has exactly two factors: 1 and itself. >>> is_prime(0) False >>> is_prime(1) False >>> is_prime(2) True >>> is_prime(3) True >>> is_prime(27) False >>> is_prime(87) False >>> is_prime(563) True >>> is_prime(2999) True >>> is_prime(67483) False """ if 1 < number < 4: # 2 and 3 are primes return True elif number < 2 or number % 2 == 0 or number % 3 == 0: # Negatives, 0, 1, all even numbers, all multiples of 3 are not primes return False # All primes number are in format of 6k +/- 1 for i in range(5, int(math.sqrt(number) + 1), 6): if number % i == 0 or number % (i + 2) == 0: return False return True odd_composites = [num for num in range(3, 100001, 2) if not is_prime(num)] def compute_nums(n: int) -> list[int]: """ Returns a list of first n odd composite numbers which do not follow the conjecture. >>> compute_nums(1) [5777] >>> compute_nums(2) [5777, 5993] >>> compute_nums(0) Traceback (most recent call last): ... ValueError: n must be >= 0 >>> compute_nums("a") Traceback (most recent call last): ... ValueError: n must be an integer >>> compute_nums(1.1) Traceback (most recent call last): ... ValueError: n must be an integer """ if not isinstance(n, int): raise ValueError("n must be an integer") if n <= 0: raise ValueError("n must be >= 0") list_nums = [] for num in range(len(odd_composites)): i = 0 while 2 * i * i <= odd_composites[num]: rem = odd_composites[num] - 2 * i * i if is_prime(rem): break i += 1 else: list_nums.append(odd_composites[num]) if len(list_nums) == n: return list_nums return [] def solution() -> int: """Return the solution to the problem""" return compute_nums(1)[0] if __name__ == "__main__": print(f"{solution() = }")
""" Problem 46: https://projecteuler.net/problem=46 It was proposed by Christian Goldbach that every odd composite number can be written as the sum of a prime and twice a square. 9 = 7 + 2 × 12 15 = 7 + 2 × 22 21 = 3 + 2 × 32 25 = 7 + 2 × 32 27 = 19 + 2 × 22 33 = 31 + 2 × 12 It turns out that the conjecture was false. What is the smallest odd composite that cannot be written as the sum of a prime and twice a square? """ from __future__ import annotations import math def is_prime(number: int) -> bool: """Checks to see if a number is a prime in O(sqrt(n)). A number is prime if it has exactly two factors: 1 and itself. >>> is_prime(0) False >>> is_prime(1) False >>> is_prime(2) True >>> is_prime(3) True >>> is_prime(27) False >>> is_prime(87) False >>> is_prime(563) True >>> is_prime(2999) True >>> is_prime(67483) False """ if 1 < number < 4: # 2 and 3 are primes return True elif number < 2 or number % 2 == 0 or number % 3 == 0: # Negatives, 0, 1, all even numbers, all multiples of 3 are not primes return False # All primes number are in format of 6k +/- 1 for i in range(5, int(math.sqrt(number) + 1), 6): if number % i == 0 or number % (i + 2) == 0: return False return True odd_composites = [num for num in range(3, 100001, 2) if not is_prime(num)] def compute_nums(n: int) -> list[int]: """ Returns a list of first n odd composite numbers which do not follow the conjecture. >>> compute_nums(1) [5777] >>> compute_nums(2) [5777, 5993] >>> compute_nums(0) Traceback (most recent call last): ... ValueError: n must be >= 0 >>> compute_nums("a") Traceback (most recent call last): ... ValueError: n must be an integer >>> compute_nums(1.1) Traceback (most recent call last): ... ValueError: n must be an integer """ if not isinstance(n, int): raise ValueError("n must be an integer") if n <= 0: raise ValueError("n must be >= 0") list_nums = [] for num in range(len(odd_composites)): i = 0 while 2 * i * i <= odd_composites[num]: rem = odd_composites[num] - 2 * i * i if is_prime(rem): break i += 1 else: list_nums.append(odd_composites[num]) if len(list_nums) == n: return list_nums return [] def solution() -> int: """Return the solution to the problem""" return compute_nums(1)[0] if __name__ == "__main__": print(f"{solution() = }")
-1
TheAlgorithms/Python
7,417
Remove references to depreciated QasmSimulator
### Describe your change: Replaced instances of `qiskit.Aer.get_backend("qasm_simulator")` in `quantum/` with `q.Aer.get_backend("aer_simulator")`, as the former is depreciated and raises warnings (Qiskit's [documentation](https://qiskit.org/documentation/apidoc/aer_provider.html) says that `QasmSimulator` is legacy). This PR edits multiple code files because they all raise the same warning and are mentioned in the same GitHub issue. Fixes #7308 * [ ] Add an algorithm? * [x] Fix a bug or typo in an existing algorithm? * [ ] Documentation change? ### Checklist: * [x] I have read [CONTRIBUTING.md](https://github.com/TheAlgorithms/Python/blob/master/CONTRIBUTING.md). * [x] This pull request is all my own work -- I have not plagiarized. * [x] I know that pull requests will not be merged if they fail the automated tests. * [ ] This PR only changes one algorithm file. To ease review, please open separate PRs for separate algorithms. * [x] All new Python files are placed inside an existing directory. * [x] All filenames are in all lowercase characters with no spaces or dashes. * [x] All functions and variable names follow Python naming conventions. * [x] All function parameters and return values are annotated with Python [type hints](https://docs.python.org/3/library/typing.html). * [x] All functions have [doctests](https://docs.python.org/3/library/doctest.html) that pass the automated testing. * [x] All new algorithms have a URL in its comments that points to Wikipedia or other similar explanation. * [x] If this pull request resolves one or more open issues then the commit message contains `Fixes: #{$ISSUE_NO}`.
tianyizheng02
2022-10-19T03:31:43Z
2022-10-19T20:12:44Z
50da472ddcdc2d79d1ad325ec05cda3558802fda
2859d4bf3aa96737a4715c65d4a9051d9c62d24d
Remove references to depreciated QasmSimulator. ### Describe your change: Replaced instances of `qiskit.Aer.get_backend("qasm_simulator")` in `quantum/` with `q.Aer.get_backend("aer_simulator")`, as the former is depreciated and raises warnings (Qiskit's [documentation](https://qiskit.org/documentation/apidoc/aer_provider.html) says that `QasmSimulator` is legacy). This PR edits multiple code files because they all raise the same warning and are mentioned in the same GitHub issue. Fixes #7308 * [ ] Add an algorithm? * [x] Fix a bug or typo in an existing algorithm? * [ ] Documentation change? ### Checklist: * [x] I have read [CONTRIBUTING.md](https://github.com/TheAlgorithms/Python/blob/master/CONTRIBUTING.md). * [x] This pull request is all my own work -- I have not plagiarized. * [x] I know that pull requests will not be merged if they fail the automated tests. * [ ] This PR only changes one algorithm file. To ease review, please open separate PRs for separate algorithms. * [x] All new Python files are placed inside an existing directory. * [x] All filenames are in all lowercase characters with no spaces or dashes. * [x] All functions and variable names follow Python naming conventions. * [x] All function parameters and return values are annotated with Python [type hints](https://docs.python.org/3/library/typing.html). * [x] All functions have [doctests](https://docs.python.org/3/library/doctest.html) that pass the automated testing. * [x] All new algorithms have a URL in its comments that points to Wikipedia or other similar explanation. * [x] If this pull request resolves one or more open issues then the commit message contains `Fixes: #{$ISSUE_NO}`.
""" Algorithm that merges two sorted linked lists into one sorted linked list. """ from __future__ import annotations from collections.abc import Iterable, Iterator from dataclasses import dataclass test_data_odd = (3, 9, -11, 0, 7, 5, 1, -1) test_data_even = (4, 6, 2, 0, 8, 10, 3, -2) @dataclass class Node: data: int next: Node | None class SortedLinkedList: def __init__(self, ints: Iterable[int]) -> None: self.head: Node | None = None for i in sorted(ints, reverse=True): self.head = Node(i, self.head) def __iter__(self) -> Iterator[int]: """ >>> tuple(SortedLinkedList(test_data_odd)) == tuple(sorted(test_data_odd)) True >>> tuple(SortedLinkedList(test_data_even)) == tuple(sorted(test_data_even)) True """ node = self.head while node: yield node.data node = node.next def __len__(self) -> int: """ >>> for i in range(3): ... len(SortedLinkedList(range(i))) == i True True True >>> len(SortedLinkedList(test_data_odd)) 8 """ return len(tuple(iter(self))) def __str__(self) -> str: """ >>> str(SortedLinkedList([])) '' >>> str(SortedLinkedList(test_data_odd)) '-11 -> -1 -> 0 -> 1 -> 3 -> 5 -> 7 -> 9' >>> str(SortedLinkedList(test_data_even)) '-2 -> 0 -> 2 -> 3 -> 4 -> 6 -> 8 -> 10' """ return " -> ".join([str(node) for node in self]) def merge_lists( sll_one: SortedLinkedList, sll_two: SortedLinkedList ) -> SortedLinkedList: """ >>> SSL = SortedLinkedList >>> merged = merge_lists(SSL(test_data_odd), SSL(test_data_even)) >>> len(merged) 16 >>> str(merged) '-11 -> -2 -> -1 -> 0 -> 0 -> 1 -> 2 -> 3 -> 3 -> 4 -> 5 -> 6 -> 7 -> 8 -> 9 -> 10' >>> list(merged) == list(sorted(test_data_odd + test_data_even)) True """ return SortedLinkedList(list(sll_one) + list(sll_two)) if __name__ == "__main__": import doctest doctest.testmod() SSL = SortedLinkedList print(merge_lists(SSL(test_data_odd), SSL(test_data_even)))
""" Algorithm that merges two sorted linked lists into one sorted linked list. """ from __future__ import annotations from collections.abc import Iterable, Iterator from dataclasses import dataclass test_data_odd = (3, 9, -11, 0, 7, 5, 1, -1) test_data_even = (4, 6, 2, 0, 8, 10, 3, -2) @dataclass class Node: data: int next: Node | None class SortedLinkedList: def __init__(self, ints: Iterable[int]) -> None: self.head: Node | None = None for i in sorted(ints, reverse=True): self.head = Node(i, self.head) def __iter__(self) -> Iterator[int]: """ >>> tuple(SortedLinkedList(test_data_odd)) == tuple(sorted(test_data_odd)) True >>> tuple(SortedLinkedList(test_data_even)) == tuple(sorted(test_data_even)) True """ node = self.head while node: yield node.data node = node.next def __len__(self) -> int: """ >>> for i in range(3): ... len(SortedLinkedList(range(i))) == i True True True >>> len(SortedLinkedList(test_data_odd)) 8 """ return len(tuple(iter(self))) def __str__(self) -> str: """ >>> str(SortedLinkedList([])) '' >>> str(SortedLinkedList(test_data_odd)) '-11 -> -1 -> 0 -> 1 -> 3 -> 5 -> 7 -> 9' >>> str(SortedLinkedList(test_data_even)) '-2 -> 0 -> 2 -> 3 -> 4 -> 6 -> 8 -> 10' """ return " -> ".join([str(node) for node in self]) def merge_lists( sll_one: SortedLinkedList, sll_two: SortedLinkedList ) -> SortedLinkedList: """ >>> SSL = SortedLinkedList >>> merged = merge_lists(SSL(test_data_odd), SSL(test_data_even)) >>> len(merged) 16 >>> str(merged) '-11 -> -2 -> -1 -> 0 -> 0 -> 1 -> 2 -> 3 -> 3 -> 4 -> 5 -> 6 -> 7 -> 8 -> 9 -> 10' >>> list(merged) == list(sorted(test_data_odd + test_data_even)) True """ return SortedLinkedList(list(sll_one) + list(sll_two)) if __name__ == "__main__": import doctest doctest.testmod() SSL = SortedLinkedList print(merge_lists(SSL(test_data_odd), SSL(test_data_even)))
-1
TheAlgorithms/Python
7,387
[pre-commit.ci] pre-commit autoupdate
<!--pre-commit.ci start--> updates: - [github.com/asottile/pyupgrade: v3.0.0 → v3.1.0](https://github.com/asottile/pyupgrade/compare/v3.0.0...v3.1.0) - [github.com/codespell-project/codespell: v2.2.1 → v2.2.2](https://github.com/codespell-project/codespell/compare/v2.2.1...v2.2.2) <!--pre-commit.ci end-->
pre-commit-ci[bot]
2022-10-17T19:35:07Z
2022-10-17T19:59:26Z
a34b756fd40e5cdfb69abc06dcd42f5f1b5fa21e
0c7c5fa7b0161a7433467240155356c93ae106b8
[pre-commit.ci] pre-commit autoupdate. <!--pre-commit.ci start--> updates: - [github.com/asottile/pyupgrade: v3.0.0 → v3.1.0](https://github.com/asottile/pyupgrade/compare/v3.0.0...v3.1.0) - [github.com/codespell-project/codespell: v2.2.1 → v2.2.2](https://github.com/codespell-project/codespell/compare/v2.2.1...v2.2.2) <!--pre-commit.ci end-->
repos: - repo: https://github.com/pre-commit/pre-commit-hooks rev: v4.3.0 hooks: - id: check-executables-have-shebangs - id: check-yaml - id: end-of-file-fixer types: [python] - id: trailing-whitespace exclude: | (?x)^( data_structures/heap/binomial_heap.py )$ - id: requirements-txt-fixer - repo: https://github.com/psf/black rev: 22.10.0 hooks: - id: black - repo: https://github.com/PyCQA/isort rev: 5.10.1 hooks: - id: isort args: - --profile=black - repo: https://github.com/asottile/pyupgrade rev: v3.0.0 hooks: - id: pyupgrade args: - --py310-plus - repo: https://github.com/PyCQA/flake8 rev: 5.0.4 hooks: - id: flake8 # See .flake8 for args additional_dependencies: - flake8-bugbear - flake8-builtins - flake8-broken-line - flake8-comprehensions - pep8-naming - yesqa - repo: https://github.com/pre-commit/mirrors-mypy rev: v0.982 hooks: - id: mypy args: - --ignore-missing-imports - --install-types # See mirrors-mypy README.md - --non-interactive additional_dependencies: [types-requests] - repo: https://github.com/codespell-project/codespell rev: v2.2.1 hooks: - id: codespell args: - --ignore-words-list=ans,crate,damon,fo,followings,hist,iff,mater,secant,som,sur,tim,zar - --skip="./.*,./strings/dictionary.txt,./strings/words.txt,./project_euler/problem_022/p022_names.txt" exclude: | (?x)^( strings/dictionary.txt | strings/words.txt | project_euler/problem_022/p022_names.txt )$ - repo: local hooks: - id: validate-filenames name: Validate filenames entry: ./scripts/validate_filenames.py language: script pass_filenames: false
repos: - repo: https://github.com/pre-commit/pre-commit-hooks rev: v4.3.0 hooks: - id: check-executables-have-shebangs - id: check-yaml - id: end-of-file-fixer types: [python] - id: trailing-whitespace exclude: | (?x)^( data_structures/heap/binomial_heap.py )$ - id: requirements-txt-fixer - repo: https://github.com/psf/black rev: 22.10.0 hooks: - id: black - repo: https://github.com/PyCQA/isort rev: 5.10.1 hooks: - id: isort args: - --profile=black - repo: https://github.com/asottile/pyupgrade rev: v3.1.0 hooks: - id: pyupgrade args: - --py310-plus - repo: https://github.com/PyCQA/flake8 rev: 5.0.4 hooks: - id: flake8 # See .flake8 for args additional_dependencies: - flake8-bugbear - flake8-builtins - flake8-broken-line - flake8-comprehensions - pep8-naming - yesqa - repo: https://github.com/pre-commit/mirrors-mypy rev: v0.982 hooks: - id: mypy args: - --ignore-missing-imports - --install-types # See mirrors-mypy README.md - --non-interactive additional_dependencies: [types-requests] - repo: https://github.com/codespell-project/codespell rev: v2.2.2 hooks: - id: codespell args: - --ignore-words-list=ans,crate,damon,fo,followings,hist,iff,mater,secant,som,sur,tim,zar exclude: | (?x)^( ciphers/prehistoric_men.txt | strings/dictionary.txt | strings/words.txt | project_euler/problem_022/p022_names.txt )$ - repo: local hooks: - id: validate-filenames name: Validate filenames entry: ./scripts/validate_filenames.py language: script pass_filenames: false
1
TheAlgorithms/Python
7,387
[pre-commit.ci] pre-commit autoupdate
<!--pre-commit.ci start--> updates: - [github.com/asottile/pyupgrade: v3.0.0 → v3.1.0](https://github.com/asottile/pyupgrade/compare/v3.0.0...v3.1.0) - [github.com/codespell-project/codespell: v2.2.1 → v2.2.2](https://github.com/codespell-project/codespell/compare/v2.2.1...v2.2.2) <!--pre-commit.ci end-->
pre-commit-ci[bot]
2022-10-17T19:35:07Z
2022-10-17T19:59:26Z
a34b756fd40e5cdfb69abc06dcd42f5f1b5fa21e
0c7c5fa7b0161a7433467240155356c93ae106b8
[pre-commit.ci] pre-commit autoupdate. <!--pre-commit.ci start--> updates: - [github.com/asottile/pyupgrade: v3.0.0 → v3.1.0](https://github.com/asottile/pyupgrade/compare/v3.0.0...v3.1.0) - [github.com/codespell-project/codespell: v2.2.1 → v2.2.2](https://github.com/codespell-project/codespell/compare/v2.2.1...v2.2.2) <!--pre-commit.ci end-->
## Arithmetic Analysis * [Bisection](arithmetic_analysis/bisection.py) * [Gaussian Elimination](arithmetic_analysis/gaussian_elimination.py) * [In Static Equilibrium](arithmetic_analysis/in_static_equilibrium.py) * [Intersection](arithmetic_analysis/intersection.py) * [Jacobi Iteration Method](arithmetic_analysis/jacobi_iteration_method.py) * [Lu Decomposition](arithmetic_analysis/lu_decomposition.py) * [Newton Forward Interpolation](arithmetic_analysis/newton_forward_interpolation.py) * [Newton Method](arithmetic_analysis/newton_method.py) * [Newton Raphson](arithmetic_analysis/newton_raphson.py) * [Newton Raphson New](arithmetic_analysis/newton_raphson_new.py) * [Secant Method](arithmetic_analysis/secant_method.py) ## Audio Filters * [Butterworth Filter](audio_filters/butterworth_filter.py) * [Equal Loudness Filter](audio_filters/equal_loudness_filter.py) * [Iir Filter](audio_filters/iir_filter.py) * [Show Response](audio_filters/show_response.py) ## Backtracking * [All Combinations](backtracking/all_combinations.py) * [All Permutations](backtracking/all_permutations.py) * [All Subsequences](backtracking/all_subsequences.py) * [Coloring](backtracking/coloring.py) * [Hamiltonian Cycle](backtracking/hamiltonian_cycle.py) * [Knight Tour](backtracking/knight_tour.py) * [Minimax](backtracking/minimax.py) * [N Queens](backtracking/n_queens.py) * [N Queens Math](backtracking/n_queens_math.py) * [Rat In Maze](backtracking/rat_in_maze.py) * [Sudoku](backtracking/sudoku.py) * [Sum Of Subsets](backtracking/sum_of_subsets.py) ## Bit Manipulation * [Binary And Operator](bit_manipulation/binary_and_operator.py) * [Binary Count Setbits](bit_manipulation/binary_count_setbits.py) * [Binary Count Trailing Zeros](bit_manipulation/binary_count_trailing_zeros.py) * [Binary Or Operator](bit_manipulation/binary_or_operator.py) * [Binary Shifts](bit_manipulation/binary_shifts.py) * [Binary Twos Complement](bit_manipulation/binary_twos_complement.py) * [Binary Xor Operator](bit_manipulation/binary_xor_operator.py) * [Count 1S Brian Kernighan Method](bit_manipulation/count_1s_brian_kernighan_method.py) * [Count Number Of One Bits](bit_manipulation/count_number_of_one_bits.py) * [Gray Code Sequence](bit_manipulation/gray_code_sequence.py) * [Reverse Bits](bit_manipulation/reverse_bits.py) * [Single Bit Manipulation Operations](bit_manipulation/single_bit_manipulation_operations.py) ## Blockchain * [Chinese Remainder Theorem](blockchain/chinese_remainder_theorem.py) * [Diophantine Equation](blockchain/diophantine_equation.py) * [Modular Division](blockchain/modular_division.py) ## Boolean Algebra * [Norgate](boolean_algebra/norgate.py) * [Quine Mc Cluskey](boolean_algebra/quine_mc_cluskey.py) ## Cellular Automata * [Conways Game Of Life](cellular_automata/conways_game_of_life.py) * [Game Of Life](cellular_automata/game_of_life.py) * [Nagel Schrekenberg](cellular_automata/nagel_schrekenberg.py) * [One Dimensional](cellular_automata/one_dimensional.py) ## Ciphers * [A1Z26](ciphers/a1z26.py) * [Affine Cipher](ciphers/affine_cipher.py) * [Atbash](ciphers/atbash.py) * [Baconian Cipher](ciphers/baconian_cipher.py) * [Base16](ciphers/base16.py) * [Base32](ciphers/base32.py) * [Base64](ciphers/base64.py) * [Base85](ciphers/base85.py) * [Beaufort Cipher](ciphers/beaufort_cipher.py) * [Bifid](ciphers/bifid.py) * [Brute Force Caesar Cipher](ciphers/brute_force_caesar_cipher.py) * [Caesar Cipher](ciphers/caesar_cipher.py) * [Cryptomath Module](ciphers/cryptomath_module.py) * [Decrypt Caesar With Chi Squared](ciphers/decrypt_caesar_with_chi_squared.py) * [Deterministic Miller Rabin](ciphers/deterministic_miller_rabin.py) * [Diffie](ciphers/diffie.py) * [Diffie Hellman](ciphers/diffie_hellman.py) * [Elgamal Key Generator](ciphers/elgamal_key_generator.py) * [Enigma Machine2](ciphers/enigma_machine2.py) * [Hill Cipher](ciphers/hill_cipher.py) * [Mixed Keyword Cypher](ciphers/mixed_keyword_cypher.py) * [Mono Alphabetic Ciphers](ciphers/mono_alphabetic_ciphers.py) * [Morse Code](ciphers/morse_code.py) * [Onepad Cipher](ciphers/onepad_cipher.py) * [Playfair Cipher](ciphers/playfair_cipher.py) * [Polybius](ciphers/polybius.py) * [Porta Cipher](ciphers/porta_cipher.py) * [Rabin Miller](ciphers/rabin_miller.py) * [Rail Fence Cipher](ciphers/rail_fence_cipher.py) * [Rot13](ciphers/rot13.py) * [Rsa Cipher](ciphers/rsa_cipher.py) * [Rsa Factorization](ciphers/rsa_factorization.py) * [Rsa Key Generator](ciphers/rsa_key_generator.py) * [Shuffled Shift Cipher](ciphers/shuffled_shift_cipher.py) * [Simple Keyword Cypher](ciphers/simple_keyword_cypher.py) * [Simple Substitution Cipher](ciphers/simple_substitution_cipher.py) * [Trafid Cipher](ciphers/trafid_cipher.py) * [Transposition Cipher](ciphers/transposition_cipher.py) * [Transposition Cipher Encrypt Decrypt File](ciphers/transposition_cipher_encrypt_decrypt_file.py) * [Vigenere Cipher](ciphers/vigenere_cipher.py) * [Xor Cipher](ciphers/xor_cipher.py) ## Compression * [Burrows Wheeler](compression/burrows_wheeler.py) * [Huffman](compression/huffman.py) * [Lempel Ziv](compression/lempel_ziv.py) * [Lempel Ziv Decompress](compression/lempel_ziv_decompress.py) * [Peak Signal To Noise Ratio](compression/peak_signal_to_noise_ratio.py) * [Run Length Encoding](compression/run_length_encoding.py) ## Computer Vision * [Cnn Classification](computer_vision/cnn_classification.py) * [Flip Augmentation](computer_vision/flip_augmentation.py) * [Harris Corner](computer_vision/harris_corner.py) * [Horn Schunck](computer_vision/horn_schunck.py) * [Mean Threshold](computer_vision/mean_threshold.py) * [Mosaic Augmentation](computer_vision/mosaic_augmentation.py) * [Pooling Functions](computer_vision/pooling_functions.py) ## Conversions * [Astronomical Length Scale Conversion](conversions/astronomical_length_scale_conversion.py) * [Binary To Decimal](conversions/binary_to_decimal.py) * [Binary To Hexadecimal](conversions/binary_to_hexadecimal.py) * [Binary To Octal](conversions/binary_to_octal.py) * [Decimal To Any](conversions/decimal_to_any.py) * [Decimal To Binary](conversions/decimal_to_binary.py) * [Decimal To Binary Recursion](conversions/decimal_to_binary_recursion.py) * [Decimal To Hexadecimal](conversions/decimal_to_hexadecimal.py) * [Decimal To Octal](conversions/decimal_to_octal.py) * [Excel Title To Column](conversions/excel_title_to_column.py) * [Hex To Bin](conversions/hex_to_bin.py) * [Hexadecimal To Decimal](conversions/hexadecimal_to_decimal.py) * [Length Conversion](conversions/length_conversion.py) * [Molecular Chemistry](conversions/molecular_chemistry.py) * [Octal To Decimal](conversions/octal_to_decimal.py) * [Prefix Conversions](conversions/prefix_conversions.py) * [Prefix Conversions String](conversions/prefix_conversions_string.py) * [Pressure Conversions](conversions/pressure_conversions.py) * [Rgb Hsv Conversion](conversions/rgb_hsv_conversion.py) * [Roman Numerals](conversions/roman_numerals.py) * [Speed Conversions](conversions/speed_conversions.py) * [Temperature Conversions](conversions/temperature_conversions.py) * [Volume Conversions](conversions/volume_conversions.py) * [Weight Conversion](conversions/weight_conversion.py) ## Data Structures * Binary Tree * [Avl Tree](data_structures/binary_tree/avl_tree.py) * [Basic Binary Tree](data_structures/binary_tree/basic_binary_tree.py) * [Binary Search Tree](data_structures/binary_tree/binary_search_tree.py) * [Binary Search Tree Recursive](data_structures/binary_tree/binary_search_tree_recursive.py) * [Binary Tree Mirror](data_structures/binary_tree/binary_tree_mirror.py) * [Binary Tree Node Sum](data_structures/binary_tree/binary_tree_node_sum.py) * [Binary Tree Traversals](data_structures/binary_tree/binary_tree_traversals.py) * [Fenwick Tree](data_structures/binary_tree/fenwick_tree.py) * [Inorder Tree Traversal 2022](data_structures/binary_tree/inorder_tree_traversal_2022.py) * [Lazy Segment Tree](data_structures/binary_tree/lazy_segment_tree.py) * [Lowest Common Ancestor](data_structures/binary_tree/lowest_common_ancestor.py) * [Maximum Fenwick Tree](data_structures/binary_tree/maximum_fenwick_tree.py) * [Merge Two Binary Trees](data_structures/binary_tree/merge_two_binary_trees.py) * [Non Recursive Segment Tree](data_structures/binary_tree/non_recursive_segment_tree.py) * [Number Of Possible Binary Trees](data_structures/binary_tree/number_of_possible_binary_trees.py) * [Red Black Tree](data_structures/binary_tree/red_black_tree.py) * [Segment Tree](data_structures/binary_tree/segment_tree.py) * [Segment Tree Other](data_structures/binary_tree/segment_tree_other.py) * [Treap](data_structures/binary_tree/treap.py) * [Wavelet Tree](data_structures/binary_tree/wavelet_tree.py) * Disjoint Set * [Alternate Disjoint Set](data_structures/disjoint_set/alternate_disjoint_set.py) * [Disjoint Set](data_structures/disjoint_set/disjoint_set.py) * Hashing * [Double Hash](data_structures/hashing/double_hash.py) * [Hash Table](data_structures/hashing/hash_table.py) * [Hash Table With Linked List](data_structures/hashing/hash_table_with_linked_list.py) * Number Theory * [Prime Numbers](data_structures/hashing/number_theory/prime_numbers.py) * [Quadratic Probing](data_structures/hashing/quadratic_probing.py) * Heap * [Binomial Heap](data_structures/heap/binomial_heap.py) * [Heap](data_structures/heap/heap.py) * [Heap Generic](data_structures/heap/heap_generic.py) * [Max Heap](data_structures/heap/max_heap.py) * [Min Heap](data_structures/heap/min_heap.py) * [Randomized Heap](data_structures/heap/randomized_heap.py) * [Skew Heap](data_structures/heap/skew_heap.py) * Linked List * [Circular Linked List](data_structures/linked_list/circular_linked_list.py) * [Deque Doubly](data_structures/linked_list/deque_doubly.py) * [Doubly Linked List](data_structures/linked_list/doubly_linked_list.py) * [Doubly Linked List Two](data_structures/linked_list/doubly_linked_list_two.py) * [From Sequence](data_structures/linked_list/from_sequence.py) * [Has Loop](data_structures/linked_list/has_loop.py) * [Is Palindrome](data_structures/linked_list/is_palindrome.py) * [Merge Two Lists](data_structures/linked_list/merge_two_lists.py) * [Middle Element Of Linked List](data_structures/linked_list/middle_element_of_linked_list.py) * [Print Reverse](data_structures/linked_list/print_reverse.py) * [Singly Linked List](data_structures/linked_list/singly_linked_list.py) * [Skip List](data_structures/linked_list/skip_list.py) * [Swap Nodes](data_structures/linked_list/swap_nodes.py) * Queue * [Circular Queue](data_structures/queue/circular_queue.py) * [Circular Queue Linked List](data_structures/queue/circular_queue_linked_list.py) * [Double Ended Queue](data_structures/queue/double_ended_queue.py) * [Linked Queue](data_structures/queue/linked_queue.py) * [Priority Queue Using List](data_structures/queue/priority_queue_using_list.py) * [Queue On List](data_structures/queue/queue_on_list.py) * [Queue On Pseudo Stack](data_structures/queue/queue_on_pseudo_stack.py) * Stacks * [Balanced Parentheses](data_structures/stacks/balanced_parentheses.py) * [Dijkstras Two Stack Algorithm](data_structures/stacks/dijkstras_two_stack_algorithm.py) * [Evaluate Postfix Notations](data_structures/stacks/evaluate_postfix_notations.py) * [Infix To Postfix Conversion](data_structures/stacks/infix_to_postfix_conversion.py) * [Infix To Prefix Conversion](data_structures/stacks/infix_to_prefix_conversion.py) * [Next Greater Element](data_structures/stacks/next_greater_element.py) * [Postfix Evaluation](data_structures/stacks/postfix_evaluation.py) * [Prefix Evaluation](data_structures/stacks/prefix_evaluation.py) * [Stack](data_structures/stacks/stack.py) * [Stack With Doubly Linked List](data_structures/stacks/stack_with_doubly_linked_list.py) * [Stack With Singly Linked List](data_structures/stacks/stack_with_singly_linked_list.py) * [Stock Span Problem](data_structures/stacks/stock_span_problem.py) * Trie * [Trie](data_structures/trie/trie.py) ## Digital Image Processing * [Change Brightness](digital_image_processing/change_brightness.py) * [Change Contrast](digital_image_processing/change_contrast.py) * [Convert To Negative](digital_image_processing/convert_to_negative.py) * Dithering * [Burkes](digital_image_processing/dithering/burkes.py) * Edge Detection * [Canny](digital_image_processing/edge_detection/canny.py) * Filters * [Bilateral Filter](digital_image_processing/filters/bilateral_filter.py) * [Convolve](digital_image_processing/filters/convolve.py) * [Gabor Filter](digital_image_processing/filters/gabor_filter.py) * [Gaussian Filter](digital_image_processing/filters/gaussian_filter.py) * [Local Binary Pattern](digital_image_processing/filters/local_binary_pattern.py) * [Median Filter](digital_image_processing/filters/median_filter.py) * [Sobel Filter](digital_image_processing/filters/sobel_filter.py) * Histogram Equalization * [Histogram Stretch](digital_image_processing/histogram_equalization/histogram_stretch.py) * [Index Calculation](digital_image_processing/index_calculation.py) * Morphological Operations * [Dilation Operation](digital_image_processing/morphological_operations/dilation_operation.py) * [Erosion Operation](digital_image_processing/morphological_operations/erosion_operation.py) * Resize * [Resize](digital_image_processing/resize/resize.py) * Rotation * [Rotation](digital_image_processing/rotation/rotation.py) * [Sepia](digital_image_processing/sepia.py) * [Test Digital Image Processing](digital_image_processing/test_digital_image_processing.py) ## Divide And Conquer * [Closest Pair Of Points](divide_and_conquer/closest_pair_of_points.py) * [Convex Hull](divide_and_conquer/convex_hull.py) * [Heaps Algorithm](divide_and_conquer/heaps_algorithm.py) * [Heaps Algorithm Iterative](divide_and_conquer/heaps_algorithm_iterative.py) * [Inversions](divide_and_conquer/inversions.py) * [Kth Order Statistic](divide_and_conquer/kth_order_statistic.py) * [Max Difference Pair](divide_and_conquer/max_difference_pair.py) * [Max Subarray Sum](divide_and_conquer/max_subarray_sum.py) * [Mergesort](divide_and_conquer/mergesort.py) * [Peak](divide_and_conquer/peak.py) * [Power](divide_and_conquer/power.py) * [Strassen Matrix Multiplication](divide_and_conquer/strassen_matrix_multiplication.py) ## Dynamic Programming * [Abbreviation](dynamic_programming/abbreviation.py) * [All Construct](dynamic_programming/all_construct.py) * [Bitmask](dynamic_programming/bitmask.py) * [Catalan Numbers](dynamic_programming/catalan_numbers.py) * [Climbing Stairs](dynamic_programming/climbing_stairs.py) * [Edit Distance](dynamic_programming/edit_distance.py) * [Factorial](dynamic_programming/factorial.py) * [Fast Fibonacci](dynamic_programming/fast_fibonacci.py) * [Fibonacci](dynamic_programming/fibonacci.py) * [Floyd Warshall](dynamic_programming/floyd_warshall.py) * [Integer Partition](dynamic_programming/integer_partition.py) * [Iterating Through Submasks](dynamic_programming/iterating_through_submasks.py) * [Knapsack](dynamic_programming/knapsack.py) * [Longest Common Subsequence](dynamic_programming/longest_common_subsequence.py) * [Longest Increasing Subsequence](dynamic_programming/longest_increasing_subsequence.py) * [Longest Increasing Subsequence O(Nlogn)](dynamic_programming/longest_increasing_subsequence_o(nlogn).py) * [Longest Sub Array](dynamic_programming/longest_sub_array.py) * [Matrix Chain Order](dynamic_programming/matrix_chain_order.py) * [Max Non Adjacent Sum](dynamic_programming/max_non_adjacent_sum.py) * [Max Sub Array](dynamic_programming/max_sub_array.py) * [Max Sum Contiguous Subsequence](dynamic_programming/max_sum_contiguous_subsequence.py) * [Minimum Coin Change](dynamic_programming/minimum_coin_change.py) * [Minimum Cost Path](dynamic_programming/minimum_cost_path.py) * [Minimum Partition](dynamic_programming/minimum_partition.py) * [Minimum Steps To One](dynamic_programming/minimum_steps_to_one.py) * [Optimal Binary Search Tree](dynamic_programming/optimal_binary_search_tree.py) * [Rod Cutting](dynamic_programming/rod_cutting.py) * [Subset Generation](dynamic_programming/subset_generation.py) * [Sum Of Subset](dynamic_programming/sum_of_subset.py) ## Electronics * [Carrier Concentration](electronics/carrier_concentration.py) * [Coulombs Law](electronics/coulombs_law.py) * [Electric Power](electronics/electric_power.py) * [Ohms Law](electronics/ohms_law.py) ## File Transfer * [Receive File](file_transfer/receive_file.py) * [Send File](file_transfer/send_file.py) * Tests * [Test Send File](file_transfer/tests/test_send_file.py) ## Financial * [Equated Monthly Installments](financial/equated_monthly_installments.py) * [Interest](financial/interest.py) ## Fractals * [Julia Sets](fractals/julia_sets.py) * [Koch Snowflake](fractals/koch_snowflake.py) * [Mandelbrot](fractals/mandelbrot.py) * [Sierpinski Triangle](fractals/sierpinski_triangle.py) ## Fuzzy Logic * [Fuzzy Operations](fuzzy_logic/fuzzy_operations.py) ## Genetic Algorithm * [Basic String](genetic_algorithm/basic_string.py) ## Geodesy * [Haversine Distance](geodesy/haversine_distance.py) * [Lamberts Ellipsoidal Distance](geodesy/lamberts_ellipsoidal_distance.py) ## Graphics * [Bezier Curve](graphics/bezier_curve.py) * [Vector3 For 2D Rendering](graphics/vector3_for_2d_rendering.py) ## Graphs * [A Star](graphs/a_star.py) * [Articulation Points](graphs/articulation_points.py) * [Basic Graphs](graphs/basic_graphs.py) * [Bellman Ford](graphs/bellman_ford.py) * [Bfs Shortest Path](graphs/bfs_shortest_path.py) * [Bfs Zero One Shortest Path](graphs/bfs_zero_one_shortest_path.py) * [Bidirectional A Star](graphs/bidirectional_a_star.py) * [Bidirectional Breadth First Search](graphs/bidirectional_breadth_first_search.py) * [Boruvka](graphs/boruvka.py) * [Breadth First Search](graphs/breadth_first_search.py) * [Breadth First Search 2](graphs/breadth_first_search_2.py) * [Breadth First Search Shortest Path](graphs/breadth_first_search_shortest_path.py) * [Check Bipartite Graph Bfs](graphs/check_bipartite_graph_bfs.py) * [Check Bipartite Graph Dfs](graphs/check_bipartite_graph_dfs.py) * [Check Cycle](graphs/check_cycle.py) * [Connected Components](graphs/connected_components.py) * [Depth First Search](graphs/depth_first_search.py) * [Depth First Search 2](graphs/depth_first_search_2.py) * [Dijkstra](graphs/dijkstra.py) * [Dijkstra 2](graphs/dijkstra_2.py) * [Dijkstra Algorithm](graphs/dijkstra_algorithm.py) * [Dinic](graphs/dinic.py) * [Directed And Undirected (Weighted) Graph](graphs/directed_and_undirected_(weighted)_graph.py) * [Edmonds Karp Multiple Source And Sink](graphs/edmonds_karp_multiple_source_and_sink.py) * [Eulerian Path And Circuit For Undirected Graph](graphs/eulerian_path_and_circuit_for_undirected_graph.py) * [Even Tree](graphs/even_tree.py) * [Finding Bridges](graphs/finding_bridges.py) * [Frequent Pattern Graph Miner](graphs/frequent_pattern_graph_miner.py) * [G Topological Sort](graphs/g_topological_sort.py) * [Gale Shapley Bigraph](graphs/gale_shapley_bigraph.py) * [Graph List](graphs/graph_list.py) * [Graph Matrix](graphs/graph_matrix.py) * [Graphs Floyd Warshall](graphs/graphs_floyd_warshall.py) * [Greedy Best First](graphs/greedy_best_first.py) * [Greedy Min Vertex Cover](graphs/greedy_min_vertex_cover.py) * [Kahns Algorithm Long](graphs/kahns_algorithm_long.py) * [Kahns Algorithm Topo](graphs/kahns_algorithm_topo.py) * [Karger](graphs/karger.py) * [Markov Chain](graphs/markov_chain.py) * [Matching Min Vertex Cover](graphs/matching_min_vertex_cover.py) * [Minimum Path Sum](graphs/minimum_path_sum.py) * [Minimum Spanning Tree Boruvka](graphs/minimum_spanning_tree_boruvka.py) * [Minimum Spanning Tree Kruskal](graphs/minimum_spanning_tree_kruskal.py) * [Minimum Spanning Tree Kruskal2](graphs/minimum_spanning_tree_kruskal2.py) * [Minimum Spanning Tree Prims](graphs/minimum_spanning_tree_prims.py) * [Minimum Spanning Tree Prims2](graphs/minimum_spanning_tree_prims2.py) * [Multi Heuristic Astar](graphs/multi_heuristic_astar.py) * [Page Rank](graphs/page_rank.py) * [Prim](graphs/prim.py) * [Random Graph Generator](graphs/random_graph_generator.py) * [Scc Kosaraju](graphs/scc_kosaraju.py) * [Strongly Connected Components](graphs/strongly_connected_components.py) * [Tarjans Scc](graphs/tarjans_scc.py) * Tests * [Test Min Spanning Tree Kruskal](graphs/tests/test_min_spanning_tree_kruskal.py) * [Test Min Spanning Tree Prim](graphs/tests/test_min_spanning_tree_prim.py) ## Greedy Methods * [Fractional Knapsack](greedy_methods/fractional_knapsack.py) * [Fractional Knapsack 2](greedy_methods/fractional_knapsack_2.py) * [Optimal Merge Pattern](greedy_methods/optimal_merge_pattern.py) ## Hashes * [Adler32](hashes/adler32.py) * [Chaos Machine](hashes/chaos_machine.py) * [Djb2](hashes/djb2.py) * [Enigma Machine](hashes/enigma_machine.py) * [Hamming Code](hashes/hamming_code.py) * [Luhn](hashes/luhn.py) * [Md5](hashes/md5.py) * [Sdbm](hashes/sdbm.py) * [Sha1](hashes/sha1.py) * [Sha256](hashes/sha256.py) ## Knapsack * [Greedy Knapsack](knapsack/greedy_knapsack.py) * [Knapsack](knapsack/knapsack.py) * Tests * [Test Greedy Knapsack](knapsack/tests/test_greedy_knapsack.py) * [Test Knapsack](knapsack/tests/test_knapsack.py) ## Linear Algebra * Src * [Conjugate Gradient](linear_algebra/src/conjugate_gradient.py) * [Lib](linear_algebra/src/lib.py) * [Polynom For Points](linear_algebra/src/polynom_for_points.py) * [Power Iteration](linear_algebra/src/power_iteration.py) * [Rayleigh Quotient](linear_algebra/src/rayleigh_quotient.py) * [Schur Complement](linear_algebra/src/schur_complement.py) * [Test Linear Algebra](linear_algebra/src/test_linear_algebra.py) * [Transformations 2D](linear_algebra/src/transformations_2d.py) ## Machine Learning * [Astar](machine_learning/astar.py) * [Data Transformations](machine_learning/data_transformations.py) * [Decision Tree](machine_learning/decision_tree.py) * Forecasting * [Run](machine_learning/forecasting/run.py) * [Gaussian Naive Bayes](machine_learning/gaussian_naive_bayes.py) * [Gradient Boosting Regressor](machine_learning/gradient_boosting_regressor.py) * [Gradient Descent](machine_learning/gradient_descent.py) * [K Means Clust](machine_learning/k_means_clust.py) * [K Nearest Neighbours](machine_learning/k_nearest_neighbours.py) * [Knn Sklearn](machine_learning/knn_sklearn.py) * [Linear Discriminant Analysis](machine_learning/linear_discriminant_analysis.py) * [Linear Regression](machine_learning/linear_regression.py) * Local Weighted Learning * [Local Weighted Learning](machine_learning/local_weighted_learning/local_weighted_learning.py) * [Logistic Regression](machine_learning/logistic_regression.py) * Lstm * [Lstm Prediction](machine_learning/lstm/lstm_prediction.py) * [Multilayer Perceptron Classifier](machine_learning/multilayer_perceptron_classifier.py) * [Polymonial Regression](machine_learning/polymonial_regression.py) * [Random Forest Classifier](machine_learning/random_forest_classifier.py) * [Random Forest Regressor](machine_learning/random_forest_regressor.py) * [Scoring Functions](machine_learning/scoring_functions.py) * [Self Organizing Map](machine_learning/self_organizing_map.py) * [Sequential Minimum Optimization](machine_learning/sequential_minimum_optimization.py) * [Similarity Search](machine_learning/similarity_search.py) * [Support Vector Machines](machine_learning/support_vector_machines.py) * [Word Frequency Functions](machine_learning/word_frequency_functions.py) ## Maths * [3N Plus 1](maths/3n_plus_1.py) * [Abs](maths/abs.py) * [Abs Max](maths/abs_max.py) * [Abs Min](maths/abs_min.py) * [Add](maths/add.py) * [Aliquot Sum](maths/aliquot_sum.py) * [Allocation Number](maths/allocation_number.py) * [Area](maths/area.py) * [Area Under Curve](maths/area_under_curve.py) * [Armstrong Numbers](maths/armstrong_numbers.py) * [Average Absolute Deviation](maths/average_absolute_deviation.py) * [Average Mean](maths/average_mean.py) * [Average Median](maths/average_median.py) * [Average Mode](maths/average_mode.py) * [Bailey Borwein Plouffe](maths/bailey_borwein_plouffe.py) * [Basic Maths](maths/basic_maths.py) * [Binary Exp Mod](maths/binary_exp_mod.py) * [Binary Exponentiation](maths/binary_exponentiation.py) * [Binary Exponentiation 2](maths/binary_exponentiation_2.py) * [Binary Exponentiation 3](maths/binary_exponentiation_3.py) * [Binomial Coefficient](maths/binomial_coefficient.py) * [Binomial Distribution](maths/binomial_distribution.py) * [Bisection](maths/bisection.py) * [Carmichael Number](maths/carmichael_number.py) * [Catalan Number](maths/catalan_number.py) * [Ceil](maths/ceil.py) * [Check Polygon](maths/check_polygon.py) * [Chudnovsky Algorithm](maths/chudnovsky_algorithm.py) * [Collatz Sequence](maths/collatz_sequence.py) * [Combinations](maths/combinations.py) * [Decimal Isolate](maths/decimal_isolate.py) * [Double Factorial Iterative](maths/double_factorial_iterative.py) * [Double Factorial Recursive](maths/double_factorial_recursive.py) * [Entropy](maths/entropy.py) * [Euclidean Distance](maths/euclidean_distance.py) * [Euclidean Gcd](maths/euclidean_gcd.py) * [Euler Method](maths/euler_method.py) * [Euler Modified](maths/euler_modified.py) * [Eulers Totient](maths/eulers_totient.py) * [Extended Euclidean Algorithm](maths/extended_euclidean_algorithm.py) * [Factorial Iterative](maths/factorial_iterative.py) * [Factorial Recursive](maths/factorial_recursive.py) * [Factors](maths/factors.py) * [Fermat Little Theorem](maths/fermat_little_theorem.py) * [Fibonacci](maths/fibonacci.py) * [Find Max](maths/find_max.py) * [Find Max Recursion](maths/find_max_recursion.py) * [Find Min](maths/find_min.py) * [Find Min Recursion](maths/find_min_recursion.py) * [Floor](maths/floor.py) * [Gamma](maths/gamma.py) * [Gamma Recursive](maths/gamma_recursive.py) * [Gaussian](maths/gaussian.py) * [Greatest Common Divisor](maths/greatest_common_divisor.py) * [Greedy Coin Change](maths/greedy_coin_change.py) * [Hamming Numbers](maths/hamming_numbers.py) * [Hardy Ramanujanalgo](maths/hardy_ramanujanalgo.py) * [Integration By Simpson Approx](maths/integration_by_simpson_approx.py) * [Is Ip V4 Address Valid](maths/is_ip_v4_address_valid.py) * [Is Square Free](maths/is_square_free.py) * [Jaccard Similarity](maths/jaccard_similarity.py) * [Kadanes](maths/kadanes.py) * [Karatsuba](maths/karatsuba.py) * [Krishnamurthy Number](maths/krishnamurthy_number.py) * [Kth Lexicographic Permutation](maths/kth_lexicographic_permutation.py) * [Largest Of Very Large Numbers](maths/largest_of_very_large_numbers.py) * [Largest Subarray Sum](maths/largest_subarray_sum.py) * [Least Common Multiple](maths/least_common_multiple.py) * [Line Length](maths/line_length.py) * [Lucas Lehmer Primality Test](maths/lucas_lehmer_primality_test.py) * [Lucas Series](maths/lucas_series.py) * [Matrix Exponentiation](maths/matrix_exponentiation.py) * [Max Sum Sliding Window](maths/max_sum_sliding_window.py) * [Median Of Two Arrays](maths/median_of_two_arrays.py) * [Miller Rabin](maths/miller_rabin.py) * [Mobius Function](maths/mobius_function.py) * [Modular Exponential](maths/modular_exponential.py) * [Monte Carlo](maths/monte_carlo.py) * [Monte Carlo Dice](maths/monte_carlo_dice.py) * [Nevilles Method](maths/nevilles_method.py) * [Newton Raphson](maths/newton_raphson.py) * [Number Of Digits](maths/number_of_digits.py) * [Numerical Integration](maths/numerical_integration.py) * [Perfect Cube](maths/perfect_cube.py) * [Perfect Number](maths/perfect_number.py) * [Perfect Square](maths/perfect_square.py) * [Persistence](maths/persistence.py) * [Pi Monte Carlo Estimation](maths/pi_monte_carlo_estimation.py) * [Points Are Collinear 3D](maths/points_are_collinear_3d.py) * [Pollard Rho](maths/pollard_rho.py) * [Polynomial Evaluation](maths/polynomial_evaluation.py) * [Power Using Recursion](maths/power_using_recursion.py) * [Prime Check](maths/prime_check.py) * [Prime Factors](maths/prime_factors.py) * [Prime Numbers](maths/prime_numbers.py) * [Prime Sieve Eratosthenes](maths/prime_sieve_eratosthenes.py) * [Primelib](maths/primelib.py) * [Proth Number](maths/proth_number.py) * [Pythagoras](maths/pythagoras.py) * [Qr Decomposition](maths/qr_decomposition.py) * [Quadratic Equations Complex Numbers](maths/quadratic_equations_complex_numbers.py) * [Radians](maths/radians.py) * [Radix2 Fft](maths/radix2_fft.py) * [Relu](maths/relu.py) * [Runge Kutta](maths/runge_kutta.py) * [Segmented Sieve](maths/segmented_sieve.py) * Series * [Arithmetic](maths/series/arithmetic.py) * [Geometric](maths/series/geometric.py) * [Geometric Series](maths/series/geometric_series.py) * [Harmonic](maths/series/harmonic.py) * [Harmonic Series](maths/series/harmonic_series.py) * [Hexagonal Numbers](maths/series/hexagonal_numbers.py) * [P Series](maths/series/p_series.py) * [Sieve Of Eratosthenes](maths/sieve_of_eratosthenes.py) * [Sigmoid](maths/sigmoid.py) * [Simpson Rule](maths/simpson_rule.py) * [Sin](maths/sin.py) * [Sock Merchant](maths/sock_merchant.py) * [Softmax](maths/softmax.py) * [Square Root](maths/square_root.py) * [Sum Of Arithmetic Series](maths/sum_of_arithmetic_series.py) * [Sum Of Digits](maths/sum_of_digits.py) * [Sum Of Geometric Progression](maths/sum_of_geometric_progression.py) * [Sylvester Sequence](maths/sylvester_sequence.py) * [Test Prime Check](maths/test_prime_check.py) * [Trapezoidal Rule](maths/trapezoidal_rule.py) * [Triplet Sum](maths/triplet_sum.py) * [Two Pointer](maths/two_pointer.py) * [Two Sum](maths/two_sum.py) * [Ugly Numbers](maths/ugly_numbers.py) * [Volume](maths/volume.py) * [Weird Number](maths/weird_number.py) * [Zellers Congruence](maths/zellers_congruence.py) ## Matrix * [Binary Search Matrix](matrix/binary_search_matrix.py) * [Count Islands In Matrix](matrix/count_islands_in_matrix.py) * [Inverse Of Matrix](matrix/inverse_of_matrix.py) * [Matrix Class](matrix/matrix_class.py) * [Matrix Operation](matrix/matrix_operation.py) * [Nth Fibonacci Using Matrix Exponentiation](matrix/nth_fibonacci_using_matrix_exponentiation.py) * [Rotate Matrix](matrix/rotate_matrix.py) * [Searching In Sorted Matrix](matrix/searching_in_sorted_matrix.py) * [Sherman Morrison](matrix/sherman_morrison.py) * [Spiral Print](matrix/spiral_print.py) * Tests * [Test Matrix Operation](matrix/tests/test_matrix_operation.py) ## Networking Flow * [Ford Fulkerson](networking_flow/ford_fulkerson.py) * [Minimum Cut](networking_flow/minimum_cut.py) ## Neural Network * [2 Hidden Layers Neural Network](neural_network/2_hidden_layers_neural_network.py) * [Back Propagation Neural Network](neural_network/back_propagation_neural_network.py) * [Convolution Neural Network](neural_network/convolution_neural_network.py) * [Perceptron](neural_network/perceptron.py) ## Other * [Activity Selection](other/activity_selection.py) * [Alternative List Arrange](other/alternative_list_arrange.py) * [Check Strong Password](other/check_strong_password.py) * [Davisb Putnamb Logemannb Loveland](other/davisb_putnamb_logemannb_loveland.py) * [Dijkstra Bankers Algorithm](other/dijkstra_bankers_algorithm.py) * [Doomsday](other/doomsday.py) * [Fischer Yates Shuffle](other/fischer_yates_shuffle.py) * [Gauss Easter](other/gauss_easter.py) * [Graham Scan](other/graham_scan.py) * [Greedy](other/greedy.py) * [Least Recently Used](other/least_recently_used.py) * [Lfu Cache](other/lfu_cache.py) * [Linear Congruential Generator](other/linear_congruential_generator.py) * [Lru Cache](other/lru_cache.py) * [Magicdiamondpattern](other/magicdiamondpattern.py) * [Maximum Subarray](other/maximum_subarray.py) * [Nested Brackets](other/nested_brackets.py) * [Password Generator](other/password_generator.py) * [Scoring Algorithm](other/scoring_algorithm.py) * [Sdes](other/sdes.py) * [Tower Of Hanoi](other/tower_of_hanoi.py) ## Physics * [Horizontal Projectile Motion](physics/horizontal_projectile_motion.py) * [Lorentz Transformation Four Vector](physics/lorentz_transformation_four_vector.py) * [N Body Simulation](physics/n_body_simulation.py) * [Newtons Law Of Gravitation](physics/newtons_law_of_gravitation.py) * [Newtons Second Law Of Motion](physics/newtons_second_law_of_motion.py) ## Project Euler * Problem 001 * [Sol1](project_euler/problem_001/sol1.py) * [Sol2](project_euler/problem_001/sol2.py) * [Sol3](project_euler/problem_001/sol3.py) * [Sol4](project_euler/problem_001/sol4.py) * [Sol5](project_euler/problem_001/sol5.py) * [Sol6](project_euler/problem_001/sol6.py) * [Sol7](project_euler/problem_001/sol7.py) * Problem 002 * [Sol1](project_euler/problem_002/sol1.py) * [Sol2](project_euler/problem_002/sol2.py) * [Sol3](project_euler/problem_002/sol3.py) * [Sol4](project_euler/problem_002/sol4.py) * [Sol5](project_euler/problem_002/sol5.py) * Problem 003 * [Sol1](project_euler/problem_003/sol1.py) * [Sol2](project_euler/problem_003/sol2.py) * [Sol3](project_euler/problem_003/sol3.py) * Problem 004 * [Sol1](project_euler/problem_004/sol1.py) * [Sol2](project_euler/problem_004/sol2.py) * Problem 005 * [Sol1](project_euler/problem_005/sol1.py) * [Sol2](project_euler/problem_005/sol2.py) * Problem 006 * [Sol1](project_euler/problem_006/sol1.py) * [Sol2](project_euler/problem_006/sol2.py) * [Sol3](project_euler/problem_006/sol3.py) * [Sol4](project_euler/problem_006/sol4.py) * Problem 007 * [Sol1](project_euler/problem_007/sol1.py) * [Sol2](project_euler/problem_007/sol2.py) * [Sol3](project_euler/problem_007/sol3.py) * Problem 008 * [Sol1](project_euler/problem_008/sol1.py) * [Sol2](project_euler/problem_008/sol2.py) * [Sol3](project_euler/problem_008/sol3.py) * Problem 009 * [Sol1](project_euler/problem_009/sol1.py) * [Sol2](project_euler/problem_009/sol2.py) * [Sol3](project_euler/problem_009/sol3.py) * Problem 010 * [Sol1](project_euler/problem_010/sol1.py) * [Sol2](project_euler/problem_010/sol2.py) * [Sol3](project_euler/problem_010/sol3.py) * Problem 011 * [Sol1](project_euler/problem_011/sol1.py) * [Sol2](project_euler/problem_011/sol2.py) * Problem 012 * [Sol1](project_euler/problem_012/sol1.py) * [Sol2](project_euler/problem_012/sol2.py) * Problem 013 * [Sol1](project_euler/problem_013/sol1.py) * Problem 014 * [Sol1](project_euler/problem_014/sol1.py) * [Sol2](project_euler/problem_014/sol2.py) * Problem 015 * [Sol1](project_euler/problem_015/sol1.py) * Problem 016 * [Sol1](project_euler/problem_016/sol1.py) * [Sol2](project_euler/problem_016/sol2.py) * Problem 017 * [Sol1](project_euler/problem_017/sol1.py) * Problem 018 * [Solution](project_euler/problem_018/solution.py) * Problem 019 * [Sol1](project_euler/problem_019/sol1.py) * Problem 020 * [Sol1](project_euler/problem_020/sol1.py) * [Sol2](project_euler/problem_020/sol2.py) * [Sol3](project_euler/problem_020/sol3.py) * [Sol4](project_euler/problem_020/sol4.py) * Problem 021 * [Sol1](project_euler/problem_021/sol1.py) * Problem 022 * [Sol1](project_euler/problem_022/sol1.py) * [Sol2](project_euler/problem_022/sol2.py) * Problem 023 * [Sol1](project_euler/problem_023/sol1.py) * Problem 024 * [Sol1](project_euler/problem_024/sol1.py) * Problem 025 * [Sol1](project_euler/problem_025/sol1.py) * [Sol2](project_euler/problem_025/sol2.py) * [Sol3](project_euler/problem_025/sol3.py) * Problem 026 * [Sol1](project_euler/problem_026/sol1.py) * Problem 027 * [Sol1](project_euler/problem_027/sol1.py) * Problem 028 * [Sol1](project_euler/problem_028/sol1.py) * Problem 029 * [Sol1](project_euler/problem_029/sol1.py) * Problem 030 * [Sol1](project_euler/problem_030/sol1.py) * Problem 031 * [Sol1](project_euler/problem_031/sol1.py) * [Sol2](project_euler/problem_031/sol2.py) * Problem 032 * [Sol32](project_euler/problem_032/sol32.py) * Problem 033 * [Sol1](project_euler/problem_033/sol1.py) * Problem 034 * [Sol1](project_euler/problem_034/sol1.py) * Problem 035 * [Sol1](project_euler/problem_035/sol1.py) * Problem 036 * [Sol1](project_euler/problem_036/sol1.py) * Problem 037 * [Sol1](project_euler/problem_037/sol1.py) * Problem 038 * [Sol1](project_euler/problem_038/sol1.py) * Problem 039 * [Sol1](project_euler/problem_039/sol1.py) * Problem 040 * [Sol1](project_euler/problem_040/sol1.py) * Problem 041 * [Sol1](project_euler/problem_041/sol1.py) * Problem 042 * [Solution42](project_euler/problem_042/solution42.py) * Problem 043 * [Sol1](project_euler/problem_043/sol1.py) * Problem 044 * [Sol1](project_euler/problem_044/sol1.py) * Problem 045 * [Sol1](project_euler/problem_045/sol1.py) * Problem 046 * [Sol1](project_euler/problem_046/sol1.py) * Problem 047 * [Sol1](project_euler/problem_047/sol1.py) * Problem 048 * [Sol1](project_euler/problem_048/sol1.py) * Problem 049 * [Sol1](project_euler/problem_049/sol1.py) * Problem 050 * [Sol1](project_euler/problem_050/sol1.py) * Problem 051 * [Sol1](project_euler/problem_051/sol1.py) * Problem 052 * [Sol1](project_euler/problem_052/sol1.py) * Problem 053 * [Sol1](project_euler/problem_053/sol1.py) * Problem 054 * [Sol1](project_euler/problem_054/sol1.py) * [Test Poker Hand](project_euler/problem_054/test_poker_hand.py) * Problem 055 * [Sol1](project_euler/problem_055/sol1.py) * Problem 056 * [Sol1](project_euler/problem_056/sol1.py) * Problem 057 * [Sol1](project_euler/problem_057/sol1.py) * Problem 058 * [Sol1](project_euler/problem_058/sol1.py) * Problem 059 * [Sol1](project_euler/problem_059/sol1.py) * Problem 062 * [Sol1](project_euler/problem_062/sol1.py) * Problem 063 * [Sol1](project_euler/problem_063/sol1.py) * Problem 064 * [Sol1](project_euler/problem_064/sol1.py) * Problem 065 * [Sol1](project_euler/problem_065/sol1.py) * Problem 067 * [Sol1](project_euler/problem_067/sol1.py) * [Sol2](project_euler/problem_067/sol2.py) * Problem 068 * [Sol1](project_euler/problem_068/sol1.py) * Problem 069 * [Sol1](project_euler/problem_069/sol1.py) * Problem 070 * [Sol1](project_euler/problem_070/sol1.py) * Problem 071 * [Sol1](project_euler/problem_071/sol1.py) * Problem 072 * [Sol1](project_euler/problem_072/sol1.py) * [Sol2](project_euler/problem_072/sol2.py) * Problem 074 * [Sol1](project_euler/problem_074/sol1.py) * [Sol2](project_euler/problem_074/sol2.py) * Problem 075 * [Sol1](project_euler/problem_075/sol1.py) * Problem 076 * [Sol1](project_euler/problem_076/sol1.py) * Problem 077 * [Sol1](project_euler/problem_077/sol1.py) * Problem 078 * [Sol1](project_euler/problem_078/sol1.py) * Problem 080 * [Sol1](project_euler/problem_080/sol1.py) * Problem 081 * [Sol1](project_euler/problem_081/sol1.py) * Problem 085 * [Sol1](project_euler/problem_085/sol1.py) * Problem 086 * [Sol1](project_euler/problem_086/sol1.py) * Problem 087 * [Sol1](project_euler/problem_087/sol1.py) * Problem 089 * [Sol1](project_euler/problem_089/sol1.py) * Problem 091 * [Sol1](project_euler/problem_091/sol1.py) * Problem 092 * [Sol1](project_euler/problem_092/sol1.py) * Problem 097 * [Sol1](project_euler/problem_097/sol1.py) * Problem 099 * [Sol1](project_euler/problem_099/sol1.py) * Problem 101 * [Sol1](project_euler/problem_101/sol1.py) * Problem 102 * [Sol1](project_euler/problem_102/sol1.py) * Problem 107 * [Sol1](project_euler/problem_107/sol1.py) * Problem 109 * [Sol1](project_euler/problem_109/sol1.py) * Problem 112 * [Sol1](project_euler/problem_112/sol1.py) * Problem 113 * [Sol1](project_euler/problem_113/sol1.py) * Problem 114 * [Sol1](project_euler/problem_114/sol1.py) * Problem 115 * [Sol1](project_euler/problem_115/sol1.py) * Problem 116 * [Sol1](project_euler/problem_116/sol1.py) * Problem 119 * [Sol1](project_euler/problem_119/sol1.py) * Problem 120 * [Sol1](project_euler/problem_120/sol1.py) * Problem 121 * [Sol1](project_euler/problem_121/sol1.py) * Problem 123 * [Sol1](project_euler/problem_123/sol1.py) * Problem 125 * [Sol1](project_euler/problem_125/sol1.py) * Problem 129 * [Sol1](project_euler/problem_129/sol1.py) * Problem 135 * [Sol1](project_euler/problem_135/sol1.py) * Problem 144 * [Sol1](project_euler/problem_144/sol1.py) * Problem 145 * [Sol1](project_euler/problem_145/sol1.py) * Problem 173 * [Sol1](project_euler/problem_173/sol1.py) * Problem 174 * [Sol1](project_euler/problem_174/sol1.py) * Problem 180 * [Sol1](project_euler/problem_180/sol1.py) * Problem 188 * [Sol1](project_euler/problem_188/sol1.py) * Problem 191 * [Sol1](project_euler/problem_191/sol1.py) * Problem 203 * [Sol1](project_euler/problem_203/sol1.py) * Problem 205 * [Sol1](project_euler/problem_205/sol1.py) * Problem 206 * [Sol1](project_euler/problem_206/sol1.py) * Problem 207 * [Sol1](project_euler/problem_207/sol1.py) * Problem 234 * [Sol1](project_euler/problem_234/sol1.py) * Problem 301 * [Sol1](project_euler/problem_301/sol1.py) * Problem 493 * [Sol1](project_euler/problem_493/sol1.py) * Problem 551 * [Sol1](project_euler/problem_551/sol1.py) * Problem 587 * [Sol1](project_euler/problem_587/sol1.py) * Problem 686 * [Sol1](project_euler/problem_686/sol1.py) ## Quantum * [Deutsch Jozsa](quantum/deutsch_jozsa.py) * [Half Adder](quantum/half_adder.py) * [Not Gate](quantum/not_gate.py) * [Quantum Entanglement](quantum/quantum_entanglement.py) * [Ripple Adder Classic](quantum/ripple_adder_classic.py) * [Single Qubit Measure](quantum/single_qubit_measure.py) ## Scheduling * [First Come First Served](scheduling/first_come_first_served.py) * [Highest Response Ratio Next](scheduling/highest_response_ratio_next.py) * [Job Sequencing With Deadline](scheduling/job_sequencing_with_deadline.py) * [Multi Level Feedback Queue](scheduling/multi_level_feedback_queue.py) * [Non Preemptive Shortest Job First](scheduling/non_preemptive_shortest_job_first.py) * [Round Robin](scheduling/round_robin.py) * [Shortest Job First](scheduling/shortest_job_first.py) ## Searches * [Binary Search](searches/binary_search.py) * [Binary Tree Traversal](searches/binary_tree_traversal.py) * [Double Linear Search](searches/double_linear_search.py) * [Double Linear Search Recursion](searches/double_linear_search_recursion.py) * [Fibonacci Search](searches/fibonacci_search.py) * [Hill Climbing](searches/hill_climbing.py) * [Interpolation Search](searches/interpolation_search.py) * [Jump Search](searches/jump_search.py) * [Linear Search](searches/linear_search.py) * [Quick Select](searches/quick_select.py) * [Sentinel Linear Search](searches/sentinel_linear_search.py) * [Simple Binary Search](searches/simple_binary_search.py) * [Simulated Annealing](searches/simulated_annealing.py) * [Tabu Search](searches/tabu_search.py) * [Ternary Search](searches/ternary_search.py) ## Sorts * [Bead Sort](sorts/bead_sort.py) * [Bitonic Sort](sorts/bitonic_sort.py) * [Bogo Sort](sorts/bogo_sort.py) * [Bubble Sort](sorts/bubble_sort.py) * [Bucket Sort](sorts/bucket_sort.py) * [Circle Sort](sorts/circle_sort.py) * [Cocktail Shaker Sort](sorts/cocktail_shaker_sort.py) * [Comb Sort](sorts/comb_sort.py) * [Counting Sort](sorts/counting_sort.py) * [Cycle Sort](sorts/cycle_sort.py) * [Double Sort](sorts/double_sort.py) * [Dutch National Flag Sort](sorts/dutch_national_flag_sort.py) * [Exchange Sort](sorts/exchange_sort.py) * [External Sort](sorts/external_sort.py) * [Gnome Sort](sorts/gnome_sort.py) * [Heap Sort](sorts/heap_sort.py) * [Insertion Sort](sorts/insertion_sort.py) * [Intro Sort](sorts/intro_sort.py) * [Iterative Merge Sort](sorts/iterative_merge_sort.py) * [Merge Insertion Sort](sorts/merge_insertion_sort.py) * [Merge Sort](sorts/merge_sort.py) * [Msd Radix Sort](sorts/msd_radix_sort.py) * [Natural Sort](sorts/natural_sort.py) * [Odd Even Sort](sorts/odd_even_sort.py) * [Odd Even Transposition Parallel](sorts/odd_even_transposition_parallel.py) * [Odd Even Transposition Single Threaded](sorts/odd_even_transposition_single_threaded.py) * [Pancake Sort](sorts/pancake_sort.py) * [Patience Sort](sorts/patience_sort.py) * [Pigeon Sort](sorts/pigeon_sort.py) * [Pigeonhole Sort](sorts/pigeonhole_sort.py) * [Quick Sort](sorts/quick_sort.py) * [Quick Sort 3 Partition](sorts/quick_sort_3_partition.py) * [Radix Sort](sorts/radix_sort.py) * [Random Normal Distribution Quicksort](sorts/random_normal_distribution_quicksort.py) * [Random Pivot Quick Sort](sorts/random_pivot_quick_sort.py) * [Recursive Bubble Sort](sorts/recursive_bubble_sort.py) * [Recursive Insertion Sort](sorts/recursive_insertion_sort.py) * [Recursive Mergesort Array](sorts/recursive_mergesort_array.py) * [Recursive Quick Sort](sorts/recursive_quick_sort.py) * [Selection Sort](sorts/selection_sort.py) * [Shell Sort](sorts/shell_sort.py) * [Shrink Shell Sort](sorts/shrink_shell_sort.py) * [Slowsort](sorts/slowsort.py) * [Stooge Sort](sorts/stooge_sort.py) * [Strand Sort](sorts/strand_sort.py) * [Tim Sort](sorts/tim_sort.py) * [Topological Sort](sorts/topological_sort.py) * [Tree Sort](sorts/tree_sort.py) * [Unknown Sort](sorts/unknown_sort.py) * [Wiggle Sort](sorts/wiggle_sort.py) ## Strings * [Aho Corasick](strings/aho_corasick.py) * [Alternative String Arrange](strings/alternative_string_arrange.py) * [Anagrams](strings/anagrams.py) * [Autocomplete Using Trie](strings/autocomplete_using_trie.py) * [Barcode Validator](strings/barcode_validator.py) * [Boyer Moore Search](strings/boyer_moore_search.py) * [Can String Be Rearranged As Palindrome](strings/can_string_be_rearranged_as_palindrome.py) * [Capitalize](strings/capitalize.py) * [Check Anagrams](strings/check_anagrams.py) * [Check Pangram](strings/check_pangram.py) * [Credit Card Validator](strings/credit_card_validator.py) * [Detecting English Programmatically](strings/detecting_english_programmatically.py) * [Dna](strings/dna.py) * [Frequency Finder](strings/frequency_finder.py) * [Hamming Distance](strings/hamming_distance.py) * [Indian Phone Validator](strings/indian_phone_validator.py) * [Is Contains Unique Chars](strings/is_contains_unique_chars.py) * [Is Palindrome](strings/is_palindrome.py) * [Jaro Winkler](strings/jaro_winkler.py) * [Join](strings/join.py) * [Knuth Morris Pratt](strings/knuth_morris_pratt.py) * [Levenshtein Distance](strings/levenshtein_distance.py) * [Lower](strings/lower.py) * [Manacher](strings/manacher.py) * [Min Cost String Conversion](strings/min_cost_string_conversion.py) * [Naive String Search](strings/naive_string_search.py) * [Ngram](strings/ngram.py) * [Palindrome](strings/palindrome.py) * [Prefix Function](strings/prefix_function.py) * [Rabin Karp](strings/rabin_karp.py) * [Remove Duplicate](strings/remove_duplicate.py) * [Reverse Letters](strings/reverse_letters.py) * [Reverse Long Words](strings/reverse_long_words.py) * [Reverse Words](strings/reverse_words.py) * [Snake Case To Camel Pascal Case](strings/snake_case_to_camel_pascal_case.py) * [Split](strings/split.py) * [Upper](strings/upper.py) * [Wave](strings/wave.py) * [Wildcard Pattern Matching](strings/wildcard_pattern_matching.py) * [Word Occurrence](strings/word_occurrence.py) * [Word Patterns](strings/word_patterns.py) * [Z Function](strings/z_function.py) ## Web Programming * [Co2 Emission](web_programming/co2_emission.py) * [Covid Stats Via Xpath](web_programming/covid_stats_via_xpath.py) * [Crawl Google Results](web_programming/crawl_google_results.py) * [Crawl Google Scholar Citation](web_programming/crawl_google_scholar_citation.py) * [Currency Converter](web_programming/currency_converter.py) * [Current Stock Price](web_programming/current_stock_price.py) * [Current Weather](web_programming/current_weather.py) * [Daily Horoscope](web_programming/daily_horoscope.py) * [Download Images From Google Query](web_programming/download_images_from_google_query.py) * [Emails From Url](web_programming/emails_from_url.py) * [Fetch Anime And Play](web_programming/fetch_anime_and_play.py) * [Fetch Bbc News](web_programming/fetch_bbc_news.py) * [Fetch Github Info](web_programming/fetch_github_info.py) * [Fetch Jobs](web_programming/fetch_jobs.py) * [Fetch Quotes](web_programming/fetch_quotes.py) * [Fetch Well Rx Price](web_programming/fetch_well_rx_price.py) * [Get Imdb Top 250 Movies Csv](web_programming/get_imdb_top_250_movies_csv.py) * [Get Imdbtop](web_programming/get_imdbtop.py) * [Get Top Hn Posts](web_programming/get_top_hn_posts.py) * [Get User Tweets](web_programming/get_user_tweets.py) * [Giphy](web_programming/giphy.py) * [Instagram Crawler](web_programming/instagram_crawler.py) * [Instagram Pic](web_programming/instagram_pic.py) * [Instagram Video](web_programming/instagram_video.py) * [Nasa Data](web_programming/nasa_data.py) * [Open Google Results](web_programming/open_google_results.py) * [Random Anime Character](web_programming/random_anime_character.py) * [Recaptcha Verification](web_programming/recaptcha_verification.py) * [Reddit](web_programming/reddit.py) * [Search Books By Isbn](web_programming/search_books_by_isbn.py) * [Slack Message](web_programming/slack_message.py) * [Test Fetch Github Info](web_programming/test_fetch_github_info.py) * [World Covid19 Stats](web_programming/world_covid19_stats.py)
## Arithmetic Analysis * [Bisection](arithmetic_analysis/bisection.py) * [Gaussian Elimination](arithmetic_analysis/gaussian_elimination.py) * [In Static Equilibrium](arithmetic_analysis/in_static_equilibrium.py) * [Intersection](arithmetic_analysis/intersection.py) * [Jacobi Iteration Method](arithmetic_analysis/jacobi_iteration_method.py) * [Lu Decomposition](arithmetic_analysis/lu_decomposition.py) * [Newton Forward Interpolation](arithmetic_analysis/newton_forward_interpolation.py) * [Newton Method](arithmetic_analysis/newton_method.py) * [Newton Raphson](arithmetic_analysis/newton_raphson.py) * [Newton Raphson New](arithmetic_analysis/newton_raphson_new.py) * [Secant Method](arithmetic_analysis/secant_method.py) ## Audio Filters * [Butterworth Filter](audio_filters/butterworth_filter.py) * [Equal Loudness Filter](audio_filters/equal_loudness_filter.py) * [Iir Filter](audio_filters/iir_filter.py) * [Show Response](audio_filters/show_response.py) ## Backtracking * [All Combinations](backtracking/all_combinations.py) * [All Permutations](backtracking/all_permutations.py) * [All Subsequences](backtracking/all_subsequences.py) * [Coloring](backtracking/coloring.py) * [Hamiltonian Cycle](backtracking/hamiltonian_cycle.py) * [Knight Tour](backtracking/knight_tour.py) * [Minimax](backtracking/minimax.py) * [N Queens](backtracking/n_queens.py) * [N Queens Math](backtracking/n_queens_math.py) * [Rat In Maze](backtracking/rat_in_maze.py) * [Sudoku](backtracking/sudoku.py) * [Sum Of Subsets](backtracking/sum_of_subsets.py) ## Bit Manipulation * [Binary And Operator](bit_manipulation/binary_and_operator.py) * [Binary Count Setbits](bit_manipulation/binary_count_setbits.py) * [Binary Count Trailing Zeros](bit_manipulation/binary_count_trailing_zeros.py) * [Binary Or Operator](bit_manipulation/binary_or_operator.py) * [Binary Shifts](bit_manipulation/binary_shifts.py) * [Binary Twos Complement](bit_manipulation/binary_twos_complement.py) * [Binary Xor Operator](bit_manipulation/binary_xor_operator.py) * [Count 1S Brian Kernighan Method](bit_manipulation/count_1s_brian_kernighan_method.py) * [Count Number Of One Bits](bit_manipulation/count_number_of_one_bits.py) * [Gray Code Sequence](bit_manipulation/gray_code_sequence.py) * [Reverse Bits](bit_manipulation/reverse_bits.py) * [Single Bit Manipulation Operations](bit_manipulation/single_bit_manipulation_operations.py) ## Blockchain * [Chinese Remainder Theorem](blockchain/chinese_remainder_theorem.py) * [Diophantine Equation](blockchain/diophantine_equation.py) * [Modular Division](blockchain/modular_division.py) ## Boolean Algebra * [Norgate](boolean_algebra/norgate.py) * [Quine Mc Cluskey](boolean_algebra/quine_mc_cluskey.py) ## Cellular Automata * [Conways Game Of Life](cellular_automata/conways_game_of_life.py) * [Game Of Life](cellular_automata/game_of_life.py) * [Nagel Schrekenberg](cellular_automata/nagel_schrekenberg.py) * [One Dimensional](cellular_automata/one_dimensional.py) ## Ciphers * [A1Z26](ciphers/a1z26.py) * [Affine Cipher](ciphers/affine_cipher.py) * [Atbash](ciphers/atbash.py) * [Baconian Cipher](ciphers/baconian_cipher.py) * [Base16](ciphers/base16.py) * [Base32](ciphers/base32.py) * [Base64](ciphers/base64.py) * [Base85](ciphers/base85.py) * [Beaufort Cipher](ciphers/beaufort_cipher.py) * [Bifid](ciphers/bifid.py) * [Brute Force Caesar Cipher](ciphers/brute_force_caesar_cipher.py) * [Caesar Cipher](ciphers/caesar_cipher.py) * [Cryptomath Module](ciphers/cryptomath_module.py) * [Decrypt Caesar With Chi Squared](ciphers/decrypt_caesar_with_chi_squared.py) * [Deterministic Miller Rabin](ciphers/deterministic_miller_rabin.py) * [Diffie](ciphers/diffie.py) * [Diffie Hellman](ciphers/diffie_hellman.py) * [Elgamal Key Generator](ciphers/elgamal_key_generator.py) * [Enigma Machine2](ciphers/enigma_machine2.py) * [Hill Cipher](ciphers/hill_cipher.py) * [Mixed Keyword Cypher](ciphers/mixed_keyword_cypher.py) * [Mono Alphabetic Ciphers](ciphers/mono_alphabetic_ciphers.py) * [Morse Code](ciphers/morse_code.py) * [Onepad Cipher](ciphers/onepad_cipher.py) * [Playfair Cipher](ciphers/playfair_cipher.py) * [Polybius](ciphers/polybius.py) * [Porta Cipher](ciphers/porta_cipher.py) * [Rabin Miller](ciphers/rabin_miller.py) * [Rail Fence Cipher](ciphers/rail_fence_cipher.py) * [Rot13](ciphers/rot13.py) * [Rsa Cipher](ciphers/rsa_cipher.py) * [Rsa Factorization](ciphers/rsa_factorization.py) * [Rsa Key Generator](ciphers/rsa_key_generator.py) * [Shuffled Shift Cipher](ciphers/shuffled_shift_cipher.py) * [Simple Keyword Cypher](ciphers/simple_keyword_cypher.py) * [Simple Substitution Cipher](ciphers/simple_substitution_cipher.py) * [Trafid Cipher](ciphers/trafid_cipher.py) * [Transposition Cipher](ciphers/transposition_cipher.py) * [Transposition Cipher Encrypt Decrypt File](ciphers/transposition_cipher_encrypt_decrypt_file.py) * [Vigenere Cipher](ciphers/vigenere_cipher.py) * [Xor Cipher](ciphers/xor_cipher.py) ## Compression * [Burrows Wheeler](compression/burrows_wheeler.py) * [Huffman](compression/huffman.py) * [Lempel Ziv](compression/lempel_ziv.py) * [Lempel Ziv Decompress](compression/lempel_ziv_decompress.py) * [Peak Signal To Noise Ratio](compression/peak_signal_to_noise_ratio.py) * [Run Length Encoding](compression/run_length_encoding.py) ## Computer Vision * [Cnn Classification](computer_vision/cnn_classification.py) * [Flip Augmentation](computer_vision/flip_augmentation.py) * [Harris Corner](computer_vision/harris_corner.py) * [Horn Schunck](computer_vision/horn_schunck.py) * [Mean Threshold](computer_vision/mean_threshold.py) * [Mosaic Augmentation](computer_vision/mosaic_augmentation.py) * [Pooling Functions](computer_vision/pooling_functions.py) ## Conversions * [Astronomical Length Scale Conversion](conversions/astronomical_length_scale_conversion.py) * [Binary To Decimal](conversions/binary_to_decimal.py) * [Binary To Hexadecimal](conversions/binary_to_hexadecimal.py) * [Binary To Octal](conversions/binary_to_octal.py) * [Decimal To Any](conversions/decimal_to_any.py) * [Decimal To Binary](conversions/decimal_to_binary.py) * [Decimal To Binary Recursion](conversions/decimal_to_binary_recursion.py) * [Decimal To Hexadecimal](conversions/decimal_to_hexadecimal.py) * [Decimal To Octal](conversions/decimal_to_octal.py) * [Excel Title To Column](conversions/excel_title_to_column.py) * [Hex To Bin](conversions/hex_to_bin.py) * [Hexadecimal To Decimal](conversions/hexadecimal_to_decimal.py) * [Length Conversion](conversions/length_conversion.py) * [Molecular Chemistry](conversions/molecular_chemistry.py) * [Octal To Decimal](conversions/octal_to_decimal.py) * [Prefix Conversions](conversions/prefix_conversions.py) * [Prefix Conversions String](conversions/prefix_conversions_string.py) * [Pressure Conversions](conversions/pressure_conversions.py) * [Rgb Hsv Conversion](conversions/rgb_hsv_conversion.py) * [Roman Numerals](conversions/roman_numerals.py) * [Speed Conversions](conversions/speed_conversions.py) * [Temperature Conversions](conversions/temperature_conversions.py) * [Volume Conversions](conversions/volume_conversions.py) * [Weight Conversion](conversions/weight_conversion.py) ## Data Structures * Binary Tree * [Avl Tree](data_structures/binary_tree/avl_tree.py) * [Basic Binary Tree](data_structures/binary_tree/basic_binary_tree.py) * [Binary Search Tree](data_structures/binary_tree/binary_search_tree.py) * [Binary Search Tree Recursive](data_structures/binary_tree/binary_search_tree_recursive.py) * [Binary Tree Mirror](data_structures/binary_tree/binary_tree_mirror.py) * [Binary Tree Node Sum](data_structures/binary_tree/binary_tree_node_sum.py) * [Binary Tree Traversals](data_structures/binary_tree/binary_tree_traversals.py) * [Fenwick Tree](data_structures/binary_tree/fenwick_tree.py) * [Inorder Tree Traversal 2022](data_structures/binary_tree/inorder_tree_traversal_2022.py) * [Lazy Segment Tree](data_structures/binary_tree/lazy_segment_tree.py) * [Lowest Common Ancestor](data_structures/binary_tree/lowest_common_ancestor.py) * [Maximum Fenwick Tree](data_structures/binary_tree/maximum_fenwick_tree.py) * [Merge Two Binary Trees](data_structures/binary_tree/merge_two_binary_trees.py) * [Non Recursive Segment Tree](data_structures/binary_tree/non_recursive_segment_tree.py) * [Number Of Possible Binary Trees](data_structures/binary_tree/number_of_possible_binary_trees.py) * [Red Black Tree](data_structures/binary_tree/red_black_tree.py) * [Segment Tree](data_structures/binary_tree/segment_tree.py) * [Segment Tree Other](data_structures/binary_tree/segment_tree_other.py) * [Treap](data_structures/binary_tree/treap.py) * [Wavelet Tree](data_structures/binary_tree/wavelet_tree.py) * Disjoint Set * [Alternate Disjoint Set](data_structures/disjoint_set/alternate_disjoint_set.py) * [Disjoint Set](data_structures/disjoint_set/disjoint_set.py) * Hashing * [Double Hash](data_structures/hashing/double_hash.py) * [Hash Table](data_structures/hashing/hash_table.py) * [Hash Table With Linked List](data_structures/hashing/hash_table_with_linked_list.py) * Number Theory * [Prime Numbers](data_structures/hashing/number_theory/prime_numbers.py) * [Quadratic Probing](data_structures/hashing/quadratic_probing.py) * Heap * [Binomial Heap](data_structures/heap/binomial_heap.py) * [Heap](data_structures/heap/heap.py) * [Heap Generic](data_structures/heap/heap_generic.py) * [Max Heap](data_structures/heap/max_heap.py) * [Min Heap](data_structures/heap/min_heap.py) * [Randomized Heap](data_structures/heap/randomized_heap.py) * [Skew Heap](data_structures/heap/skew_heap.py) * Linked List * [Circular Linked List](data_structures/linked_list/circular_linked_list.py) * [Deque Doubly](data_structures/linked_list/deque_doubly.py) * [Doubly Linked List](data_structures/linked_list/doubly_linked_list.py) * [Doubly Linked List Two](data_structures/linked_list/doubly_linked_list_two.py) * [From Sequence](data_structures/linked_list/from_sequence.py) * [Has Loop](data_structures/linked_list/has_loop.py) * [Is Palindrome](data_structures/linked_list/is_palindrome.py) * [Merge Two Lists](data_structures/linked_list/merge_two_lists.py) * [Middle Element Of Linked List](data_structures/linked_list/middle_element_of_linked_list.py) * [Print Reverse](data_structures/linked_list/print_reverse.py) * [Singly Linked List](data_structures/linked_list/singly_linked_list.py) * [Skip List](data_structures/linked_list/skip_list.py) * [Swap Nodes](data_structures/linked_list/swap_nodes.py) * Queue * [Circular Queue](data_structures/queue/circular_queue.py) * [Circular Queue Linked List](data_structures/queue/circular_queue_linked_list.py) * [Double Ended Queue](data_structures/queue/double_ended_queue.py) * [Linked Queue](data_structures/queue/linked_queue.py) * [Priority Queue Using List](data_structures/queue/priority_queue_using_list.py) * [Queue On List](data_structures/queue/queue_on_list.py) * [Queue On Pseudo Stack](data_structures/queue/queue_on_pseudo_stack.py) * Stacks * [Balanced Parentheses](data_structures/stacks/balanced_parentheses.py) * [Dijkstras Two Stack Algorithm](data_structures/stacks/dijkstras_two_stack_algorithm.py) * [Evaluate Postfix Notations](data_structures/stacks/evaluate_postfix_notations.py) * [Infix To Postfix Conversion](data_structures/stacks/infix_to_postfix_conversion.py) * [Infix To Prefix Conversion](data_structures/stacks/infix_to_prefix_conversion.py) * [Next Greater Element](data_structures/stacks/next_greater_element.py) * [Postfix Evaluation](data_structures/stacks/postfix_evaluation.py) * [Prefix Evaluation](data_structures/stacks/prefix_evaluation.py) * [Stack](data_structures/stacks/stack.py) * [Stack With Doubly Linked List](data_structures/stacks/stack_with_doubly_linked_list.py) * [Stack With Singly Linked List](data_structures/stacks/stack_with_singly_linked_list.py) * [Stock Span Problem](data_structures/stacks/stock_span_problem.py) * Trie * [Trie](data_structures/trie/trie.py) ## Digital Image Processing * [Change Brightness](digital_image_processing/change_brightness.py) * [Change Contrast](digital_image_processing/change_contrast.py) * [Convert To Negative](digital_image_processing/convert_to_negative.py) * Dithering * [Burkes](digital_image_processing/dithering/burkes.py) * Edge Detection * [Canny](digital_image_processing/edge_detection/canny.py) * Filters * [Bilateral Filter](digital_image_processing/filters/bilateral_filter.py) * [Convolve](digital_image_processing/filters/convolve.py) * [Gabor Filter](digital_image_processing/filters/gabor_filter.py) * [Gaussian Filter](digital_image_processing/filters/gaussian_filter.py) * [Local Binary Pattern](digital_image_processing/filters/local_binary_pattern.py) * [Median Filter](digital_image_processing/filters/median_filter.py) * [Sobel Filter](digital_image_processing/filters/sobel_filter.py) * Histogram Equalization * [Histogram Stretch](digital_image_processing/histogram_equalization/histogram_stretch.py) * [Index Calculation](digital_image_processing/index_calculation.py) * Morphological Operations * [Dilation Operation](digital_image_processing/morphological_operations/dilation_operation.py) * [Erosion Operation](digital_image_processing/morphological_operations/erosion_operation.py) * Resize * [Resize](digital_image_processing/resize/resize.py) * Rotation * [Rotation](digital_image_processing/rotation/rotation.py) * [Sepia](digital_image_processing/sepia.py) * [Test Digital Image Processing](digital_image_processing/test_digital_image_processing.py) ## Divide And Conquer * [Closest Pair Of Points](divide_and_conquer/closest_pair_of_points.py) * [Convex Hull](divide_and_conquer/convex_hull.py) * [Heaps Algorithm](divide_and_conquer/heaps_algorithm.py) * [Heaps Algorithm Iterative](divide_and_conquer/heaps_algorithm_iterative.py) * [Inversions](divide_and_conquer/inversions.py) * [Kth Order Statistic](divide_and_conquer/kth_order_statistic.py) * [Max Difference Pair](divide_and_conquer/max_difference_pair.py) * [Max Subarray Sum](divide_and_conquer/max_subarray_sum.py) * [Mergesort](divide_and_conquer/mergesort.py) * [Peak](divide_and_conquer/peak.py) * [Power](divide_and_conquer/power.py) * [Strassen Matrix Multiplication](divide_and_conquer/strassen_matrix_multiplication.py) ## Dynamic Programming * [Abbreviation](dynamic_programming/abbreviation.py) * [All Construct](dynamic_programming/all_construct.py) * [Bitmask](dynamic_programming/bitmask.py) * [Catalan Numbers](dynamic_programming/catalan_numbers.py) * [Climbing Stairs](dynamic_programming/climbing_stairs.py) * [Edit Distance](dynamic_programming/edit_distance.py) * [Factorial](dynamic_programming/factorial.py) * [Fast Fibonacci](dynamic_programming/fast_fibonacci.py) * [Fibonacci](dynamic_programming/fibonacci.py) * [Floyd Warshall](dynamic_programming/floyd_warshall.py) * [Integer Partition](dynamic_programming/integer_partition.py) * [Iterating Through Submasks](dynamic_programming/iterating_through_submasks.py) * [Knapsack](dynamic_programming/knapsack.py) * [Longest Common Subsequence](dynamic_programming/longest_common_subsequence.py) * [Longest Increasing Subsequence](dynamic_programming/longest_increasing_subsequence.py) * [Longest Increasing Subsequence O(Nlogn)](dynamic_programming/longest_increasing_subsequence_o(nlogn).py) * [Longest Sub Array](dynamic_programming/longest_sub_array.py) * [Matrix Chain Order](dynamic_programming/matrix_chain_order.py) * [Max Non Adjacent Sum](dynamic_programming/max_non_adjacent_sum.py) * [Max Sub Array](dynamic_programming/max_sub_array.py) * [Max Sum Contiguous Subsequence](dynamic_programming/max_sum_contiguous_subsequence.py) * [Minimum Coin Change](dynamic_programming/minimum_coin_change.py) * [Minimum Cost Path](dynamic_programming/minimum_cost_path.py) * [Minimum Partition](dynamic_programming/minimum_partition.py) * [Minimum Steps To One](dynamic_programming/minimum_steps_to_one.py) * [Optimal Binary Search Tree](dynamic_programming/optimal_binary_search_tree.py) * [Rod Cutting](dynamic_programming/rod_cutting.py) * [Subset Generation](dynamic_programming/subset_generation.py) * [Sum Of Subset](dynamic_programming/sum_of_subset.py) ## Electronics * [Carrier Concentration](electronics/carrier_concentration.py) * [Coulombs Law](electronics/coulombs_law.py) * [Electric Power](electronics/electric_power.py) * [Ohms Law](electronics/ohms_law.py) ## File Transfer * [Receive File](file_transfer/receive_file.py) * [Send File](file_transfer/send_file.py) * Tests * [Test Send File](file_transfer/tests/test_send_file.py) ## Financial * [Equated Monthly Installments](financial/equated_monthly_installments.py) * [Interest](financial/interest.py) ## Fractals * [Julia Sets](fractals/julia_sets.py) * [Koch Snowflake](fractals/koch_snowflake.py) * [Mandelbrot](fractals/mandelbrot.py) * [Sierpinski Triangle](fractals/sierpinski_triangle.py) ## Fuzzy Logic * [Fuzzy Operations](fuzzy_logic/fuzzy_operations.py) ## Genetic Algorithm * [Basic String](genetic_algorithm/basic_string.py) ## Geodesy * [Haversine Distance](geodesy/haversine_distance.py) * [Lamberts Ellipsoidal Distance](geodesy/lamberts_ellipsoidal_distance.py) ## Graphics * [Bezier Curve](graphics/bezier_curve.py) * [Vector3 For 2D Rendering](graphics/vector3_for_2d_rendering.py) ## Graphs * [A Star](graphs/a_star.py) * [Articulation Points](graphs/articulation_points.py) * [Basic Graphs](graphs/basic_graphs.py) * [Bellman Ford](graphs/bellman_ford.py) * [Bfs Shortest Path](graphs/bfs_shortest_path.py) * [Bfs Zero One Shortest Path](graphs/bfs_zero_one_shortest_path.py) * [Bidirectional A Star](graphs/bidirectional_a_star.py) * [Bidirectional Breadth First Search](graphs/bidirectional_breadth_first_search.py) * [Boruvka](graphs/boruvka.py) * [Breadth First Search](graphs/breadth_first_search.py) * [Breadth First Search 2](graphs/breadth_first_search_2.py) * [Breadth First Search Shortest Path](graphs/breadth_first_search_shortest_path.py) * [Check Bipartite Graph Bfs](graphs/check_bipartite_graph_bfs.py) * [Check Bipartite Graph Dfs](graphs/check_bipartite_graph_dfs.py) * [Check Cycle](graphs/check_cycle.py) * [Connected Components](graphs/connected_components.py) * [Depth First Search](graphs/depth_first_search.py) * [Depth First Search 2](graphs/depth_first_search_2.py) * [Dijkstra](graphs/dijkstra.py) * [Dijkstra 2](graphs/dijkstra_2.py) * [Dijkstra Algorithm](graphs/dijkstra_algorithm.py) * [Dinic](graphs/dinic.py) * [Directed And Undirected (Weighted) Graph](graphs/directed_and_undirected_(weighted)_graph.py) * [Edmonds Karp Multiple Source And Sink](graphs/edmonds_karp_multiple_source_and_sink.py) * [Eulerian Path And Circuit For Undirected Graph](graphs/eulerian_path_and_circuit_for_undirected_graph.py) * [Even Tree](graphs/even_tree.py) * [Finding Bridges](graphs/finding_bridges.py) * [Frequent Pattern Graph Miner](graphs/frequent_pattern_graph_miner.py) * [G Topological Sort](graphs/g_topological_sort.py) * [Gale Shapley Bigraph](graphs/gale_shapley_bigraph.py) * [Graph List](graphs/graph_list.py) * [Graph Matrix](graphs/graph_matrix.py) * [Graphs Floyd Warshall](graphs/graphs_floyd_warshall.py) * [Greedy Best First](graphs/greedy_best_first.py) * [Greedy Min Vertex Cover](graphs/greedy_min_vertex_cover.py) * [Kahns Algorithm Long](graphs/kahns_algorithm_long.py) * [Kahns Algorithm Topo](graphs/kahns_algorithm_topo.py) * [Karger](graphs/karger.py) * [Markov Chain](graphs/markov_chain.py) * [Matching Min Vertex Cover](graphs/matching_min_vertex_cover.py) * [Minimum Path Sum](graphs/minimum_path_sum.py) * [Minimum Spanning Tree Boruvka](graphs/minimum_spanning_tree_boruvka.py) * [Minimum Spanning Tree Kruskal](graphs/minimum_spanning_tree_kruskal.py) * [Minimum Spanning Tree Kruskal2](graphs/minimum_spanning_tree_kruskal2.py) * [Minimum Spanning Tree Prims](graphs/minimum_spanning_tree_prims.py) * [Minimum Spanning Tree Prims2](graphs/minimum_spanning_tree_prims2.py) * [Multi Heuristic Astar](graphs/multi_heuristic_astar.py) * [Page Rank](graphs/page_rank.py) * [Prim](graphs/prim.py) * [Random Graph Generator](graphs/random_graph_generator.py) * [Scc Kosaraju](graphs/scc_kosaraju.py) * [Strongly Connected Components](graphs/strongly_connected_components.py) * [Tarjans Scc](graphs/tarjans_scc.py) * Tests * [Test Min Spanning Tree Kruskal](graphs/tests/test_min_spanning_tree_kruskal.py) * [Test Min Spanning Tree Prim](graphs/tests/test_min_spanning_tree_prim.py) ## Greedy Methods * [Fractional Knapsack](greedy_methods/fractional_knapsack.py) * [Fractional Knapsack 2](greedy_methods/fractional_knapsack_2.py) * [Optimal Merge Pattern](greedy_methods/optimal_merge_pattern.py) ## Hashes * [Adler32](hashes/adler32.py) * [Chaos Machine](hashes/chaos_machine.py) * [Djb2](hashes/djb2.py) * [Enigma Machine](hashes/enigma_machine.py) * [Hamming Code](hashes/hamming_code.py) * [Luhn](hashes/luhn.py) * [Md5](hashes/md5.py) * [Sdbm](hashes/sdbm.py) * [Sha1](hashes/sha1.py) * [Sha256](hashes/sha256.py) ## Knapsack * [Greedy Knapsack](knapsack/greedy_knapsack.py) * [Knapsack](knapsack/knapsack.py) * Tests * [Test Greedy Knapsack](knapsack/tests/test_greedy_knapsack.py) * [Test Knapsack](knapsack/tests/test_knapsack.py) ## Linear Algebra * Src * [Conjugate Gradient](linear_algebra/src/conjugate_gradient.py) * [Lib](linear_algebra/src/lib.py) * [Polynom For Points](linear_algebra/src/polynom_for_points.py) * [Power Iteration](linear_algebra/src/power_iteration.py) * [Rayleigh Quotient](linear_algebra/src/rayleigh_quotient.py) * [Schur Complement](linear_algebra/src/schur_complement.py) * [Test Linear Algebra](linear_algebra/src/test_linear_algebra.py) * [Transformations 2D](linear_algebra/src/transformations_2d.py) ## Machine Learning * [Astar](machine_learning/astar.py) * [Data Transformations](machine_learning/data_transformations.py) * [Decision Tree](machine_learning/decision_tree.py) * Forecasting * [Run](machine_learning/forecasting/run.py) * [Gaussian Naive Bayes](machine_learning/gaussian_naive_bayes.py) * [Gradient Boosting Regressor](machine_learning/gradient_boosting_regressor.py) * [Gradient Descent](machine_learning/gradient_descent.py) * [K Means Clust](machine_learning/k_means_clust.py) * [K Nearest Neighbours](machine_learning/k_nearest_neighbours.py) * [Knn Sklearn](machine_learning/knn_sklearn.py) * [Linear Discriminant Analysis](machine_learning/linear_discriminant_analysis.py) * [Linear Regression](machine_learning/linear_regression.py) * Local Weighted Learning * [Local Weighted Learning](machine_learning/local_weighted_learning/local_weighted_learning.py) * [Logistic Regression](machine_learning/logistic_regression.py) * Lstm * [Lstm Prediction](machine_learning/lstm/lstm_prediction.py) * [Multilayer Perceptron Classifier](machine_learning/multilayer_perceptron_classifier.py) * [Polymonial Regression](machine_learning/polymonial_regression.py) * [Random Forest Classifier](machine_learning/random_forest_classifier.py) * [Random Forest Regressor](machine_learning/random_forest_regressor.py) * [Scoring Functions](machine_learning/scoring_functions.py) * [Self Organizing Map](machine_learning/self_organizing_map.py) * [Sequential Minimum Optimization](machine_learning/sequential_minimum_optimization.py) * [Similarity Search](machine_learning/similarity_search.py) * [Support Vector Machines](machine_learning/support_vector_machines.py) * [Word Frequency Functions](machine_learning/word_frequency_functions.py) ## Maths * [3N Plus 1](maths/3n_plus_1.py) * [Abs](maths/abs.py) * [Abs Max](maths/abs_max.py) * [Abs Min](maths/abs_min.py) * [Add](maths/add.py) * [Aliquot Sum](maths/aliquot_sum.py) * [Allocation Number](maths/allocation_number.py) * [Area](maths/area.py) * [Area Under Curve](maths/area_under_curve.py) * [Armstrong Numbers](maths/armstrong_numbers.py) * [Average Absolute Deviation](maths/average_absolute_deviation.py) * [Average Mean](maths/average_mean.py) * [Average Median](maths/average_median.py) * [Average Mode](maths/average_mode.py) * [Bailey Borwein Plouffe](maths/bailey_borwein_plouffe.py) * [Basic Maths](maths/basic_maths.py) * [Binary Exp Mod](maths/binary_exp_mod.py) * [Binary Exponentiation](maths/binary_exponentiation.py) * [Binary Exponentiation 2](maths/binary_exponentiation_2.py) * [Binary Exponentiation 3](maths/binary_exponentiation_3.py) * [Binomial Coefficient](maths/binomial_coefficient.py) * [Binomial Distribution](maths/binomial_distribution.py) * [Bisection](maths/bisection.py) * [Carmichael Number](maths/carmichael_number.py) * [Catalan Number](maths/catalan_number.py) * [Ceil](maths/ceil.py) * [Check Polygon](maths/check_polygon.py) * [Chudnovsky Algorithm](maths/chudnovsky_algorithm.py) * [Collatz Sequence](maths/collatz_sequence.py) * [Combinations](maths/combinations.py) * [Decimal Isolate](maths/decimal_isolate.py) * [Double Factorial Iterative](maths/double_factorial_iterative.py) * [Double Factorial Recursive](maths/double_factorial_recursive.py) * [Entropy](maths/entropy.py) * [Euclidean Distance](maths/euclidean_distance.py) * [Euclidean Gcd](maths/euclidean_gcd.py) * [Euler Method](maths/euler_method.py) * [Euler Modified](maths/euler_modified.py) * [Eulers Totient](maths/eulers_totient.py) * [Extended Euclidean Algorithm](maths/extended_euclidean_algorithm.py) * [Factorial Iterative](maths/factorial_iterative.py) * [Factorial Recursive](maths/factorial_recursive.py) * [Factors](maths/factors.py) * [Fermat Little Theorem](maths/fermat_little_theorem.py) * [Fibonacci](maths/fibonacci.py) * [Find Max](maths/find_max.py) * [Find Max Recursion](maths/find_max_recursion.py) * [Find Min](maths/find_min.py) * [Find Min Recursion](maths/find_min_recursion.py) * [Floor](maths/floor.py) * [Gamma](maths/gamma.py) * [Gamma Recursive](maths/gamma_recursive.py) * [Gaussian](maths/gaussian.py) * [Greatest Common Divisor](maths/greatest_common_divisor.py) * [Greedy Coin Change](maths/greedy_coin_change.py) * [Hamming Numbers](maths/hamming_numbers.py) * [Hardy Ramanujanalgo](maths/hardy_ramanujanalgo.py) * [Integration By Simpson Approx](maths/integration_by_simpson_approx.py) * [Is Ip V4 Address Valid](maths/is_ip_v4_address_valid.py) * [Is Square Free](maths/is_square_free.py) * [Jaccard Similarity](maths/jaccard_similarity.py) * [Kadanes](maths/kadanes.py) * [Karatsuba](maths/karatsuba.py) * [Krishnamurthy Number](maths/krishnamurthy_number.py) * [Kth Lexicographic Permutation](maths/kth_lexicographic_permutation.py) * [Largest Of Very Large Numbers](maths/largest_of_very_large_numbers.py) * [Largest Subarray Sum](maths/largest_subarray_sum.py) * [Least Common Multiple](maths/least_common_multiple.py) * [Line Length](maths/line_length.py) * [Lucas Lehmer Primality Test](maths/lucas_lehmer_primality_test.py) * [Lucas Series](maths/lucas_series.py) * [Matrix Exponentiation](maths/matrix_exponentiation.py) * [Max Sum Sliding Window](maths/max_sum_sliding_window.py) * [Median Of Two Arrays](maths/median_of_two_arrays.py) * [Miller Rabin](maths/miller_rabin.py) * [Mobius Function](maths/mobius_function.py) * [Modular Exponential](maths/modular_exponential.py) * [Monte Carlo](maths/monte_carlo.py) * [Monte Carlo Dice](maths/monte_carlo_dice.py) * [Nevilles Method](maths/nevilles_method.py) * [Newton Raphson](maths/newton_raphson.py) * [Number Of Digits](maths/number_of_digits.py) * [Numerical Integration](maths/numerical_integration.py) * [Perfect Cube](maths/perfect_cube.py) * [Perfect Number](maths/perfect_number.py) * [Perfect Square](maths/perfect_square.py) * [Persistence](maths/persistence.py) * [Pi Monte Carlo Estimation](maths/pi_monte_carlo_estimation.py) * [Points Are Collinear 3D](maths/points_are_collinear_3d.py) * [Pollard Rho](maths/pollard_rho.py) * [Polynomial Evaluation](maths/polynomial_evaluation.py) * [Power Using Recursion](maths/power_using_recursion.py) * [Prime Check](maths/prime_check.py) * [Prime Factors](maths/prime_factors.py) * [Prime Numbers](maths/prime_numbers.py) * [Prime Sieve Eratosthenes](maths/prime_sieve_eratosthenes.py) * [Primelib](maths/primelib.py) * [Proth Number](maths/proth_number.py) * [Pythagoras](maths/pythagoras.py) * [Qr Decomposition](maths/qr_decomposition.py) * [Quadratic Equations Complex Numbers](maths/quadratic_equations_complex_numbers.py) * [Radians](maths/radians.py) * [Radix2 Fft](maths/radix2_fft.py) * [Relu](maths/relu.py) * [Runge Kutta](maths/runge_kutta.py) * [Segmented Sieve](maths/segmented_sieve.py) * Series * [Arithmetic](maths/series/arithmetic.py) * [Geometric](maths/series/geometric.py) * [Geometric Series](maths/series/geometric_series.py) * [Harmonic](maths/series/harmonic.py) * [Harmonic Series](maths/series/harmonic_series.py) * [Hexagonal Numbers](maths/series/hexagonal_numbers.py) * [P Series](maths/series/p_series.py) * [Sieve Of Eratosthenes](maths/sieve_of_eratosthenes.py) * [Sigmoid](maths/sigmoid.py) * [Simpson Rule](maths/simpson_rule.py) * [Sin](maths/sin.py) * [Sock Merchant](maths/sock_merchant.py) * [Softmax](maths/softmax.py) * [Square Root](maths/square_root.py) * [Sum Of Arithmetic Series](maths/sum_of_arithmetic_series.py) * [Sum Of Digits](maths/sum_of_digits.py) * [Sum Of Geometric Progression](maths/sum_of_geometric_progression.py) * [Sylvester Sequence](maths/sylvester_sequence.py) * [Test Prime Check](maths/test_prime_check.py) * [Trapezoidal Rule](maths/trapezoidal_rule.py) * [Triplet Sum](maths/triplet_sum.py) * [Two Pointer](maths/two_pointer.py) * [Two Sum](maths/two_sum.py) * [Ugly Numbers](maths/ugly_numbers.py) * [Volume](maths/volume.py) * [Weird Number](maths/weird_number.py) * [Zellers Congruence](maths/zellers_congruence.py) ## Matrix * [Binary Search Matrix](matrix/binary_search_matrix.py) * [Count Islands In Matrix](matrix/count_islands_in_matrix.py) * [Inverse Of Matrix](matrix/inverse_of_matrix.py) * [Matrix Class](matrix/matrix_class.py) * [Matrix Operation](matrix/matrix_operation.py) * [Nth Fibonacci Using Matrix Exponentiation](matrix/nth_fibonacci_using_matrix_exponentiation.py) * [Rotate Matrix](matrix/rotate_matrix.py) * [Searching In Sorted Matrix](matrix/searching_in_sorted_matrix.py) * [Sherman Morrison](matrix/sherman_morrison.py) * [Spiral Print](matrix/spiral_print.py) * Tests * [Test Matrix Operation](matrix/tests/test_matrix_operation.py) ## Networking Flow * [Ford Fulkerson](networking_flow/ford_fulkerson.py) * [Minimum Cut](networking_flow/minimum_cut.py) ## Neural Network * [2 Hidden Layers Neural Network](neural_network/2_hidden_layers_neural_network.py) * [Back Propagation Neural Network](neural_network/back_propagation_neural_network.py) * [Convolution Neural Network](neural_network/convolution_neural_network.py) * [Perceptron](neural_network/perceptron.py) ## Other * [Activity Selection](other/activity_selection.py) * [Alternative List Arrange](other/alternative_list_arrange.py) * [Check Strong Password](other/check_strong_password.py) * [Davisb Putnamb Logemannb Loveland](other/davisb_putnamb_logemannb_loveland.py) * [Dijkstra Bankers Algorithm](other/dijkstra_bankers_algorithm.py) * [Doomsday](other/doomsday.py) * [Fischer Yates Shuffle](other/fischer_yates_shuffle.py) * [Gauss Easter](other/gauss_easter.py) * [Graham Scan](other/graham_scan.py) * [Greedy](other/greedy.py) * [Least Recently Used](other/least_recently_used.py) * [Lfu Cache](other/lfu_cache.py) * [Linear Congruential Generator](other/linear_congruential_generator.py) * [Lru Cache](other/lru_cache.py) * [Magicdiamondpattern](other/magicdiamondpattern.py) * [Maximum Subarray](other/maximum_subarray.py) * [Nested Brackets](other/nested_brackets.py) * [Password Generator](other/password_generator.py) * [Scoring Algorithm](other/scoring_algorithm.py) * [Sdes](other/sdes.py) * [Tower Of Hanoi](other/tower_of_hanoi.py) ## Physics * [Casimir Effect](physics/casimir_effect.py) * [Horizontal Projectile Motion](physics/horizontal_projectile_motion.py) * [Lorentz Transformation Four Vector](physics/lorentz_transformation_four_vector.py) * [N Body Simulation](physics/n_body_simulation.py) * [Newtons Law Of Gravitation](physics/newtons_law_of_gravitation.py) * [Newtons Second Law Of Motion](physics/newtons_second_law_of_motion.py) ## Project Euler * Problem 001 * [Sol1](project_euler/problem_001/sol1.py) * [Sol2](project_euler/problem_001/sol2.py) * [Sol3](project_euler/problem_001/sol3.py) * [Sol4](project_euler/problem_001/sol4.py) * [Sol5](project_euler/problem_001/sol5.py) * [Sol6](project_euler/problem_001/sol6.py) * [Sol7](project_euler/problem_001/sol7.py) * Problem 002 * [Sol1](project_euler/problem_002/sol1.py) * [Sol2](project_euler/problem_002/sol2.py) * [Sol3](project_euler/problem_002/sol3.py) * [Sol4](project_euler/problem_002/sol4.py) * [Sol5](project_euler/problem_002/sol5.py) * Problem 003 * [Sol1](project_euler/problem_003/sol1.py) * [Sol2](project_euler/problem_003/sol2.py) * [Sol3](project_euler/problem_003/sol3.py) * Problem 004 * [Sol1](project_euler/problem_004/sol1.py) * [Sol2](project_euler/problem_004/sol2.py) * Problem 005 * [Sol1](project_euler/problem_005/sol1.py) * [Sol2](project_euler/problem_005/sol2.py) * Problem 006 * [Sol1](project_euler/problem_006/sol1.py) * [Sol2](project_euler/problem_006/sol2.py) * [Sol3](project_euler/problem_006/sol3.py) * [Sol4](project_euler/problem_006/sol4.py) * Problem 007 * [Sol1](project_euler/problem_007/sol1.py) * [Sol2](project_euler/problem_007/sol2.py) * [Sol3](project_euler/problem_007/sol3.py) * Problem 008 * [Sol1](project_euler/problem_008/sol1.py) * [Sol2](project_euler/problem_008/sol2.py) * [Sol3](project_euler/problem_008/sol3.py) * Problem 009 * [Sol1](project_euler/problem_009/sol1.py) * [Sol2](project_euler/problem_009/sol2.py) * [Sol3](project_euler/problem_009/sol3.py) * Problem 010 * [Sol1](project_euler/problem_010/sol1.py) * [Sol2](project_euler/problem_010/sol2.py) * [Sol3](project_euler/problem_010/sol3.py) * Problem 011 * [Sol1](project_euler/problem_011/sol1.py) * [Sol2](project_euler/problem_011/sol2.py) * Problem 012 * [Sol1](project_euler/problem_012/sol1.py) * [Sol2](project_euler/problem_012/sol2.py) * Problem 013 * [Sol1](project_euler/problem_013/sol1.py) * Problem 014 * [Sol1](project_euler/problem_014/sol1.py) * [Sol2](project_euler/problem_014/sol2.py) * Problem 015 * [Sol1](project_euler/problem_015/sol1.py) * Problem 016 * [Sol1](project_euler/problem_016/sol1.py) * [Sol2](project_euler/problem_016/sol2.py) * Problem 017 * [Sol1](project_euler/problem_017/sol1.py) * Problem 018 * [Solution](project_euler/problem_018/solution.py) * Problem 019 * [Sol1](project_euler/problem_019/sol1.py) * Problem 020 * [Sol1](project_euler/problem_020/sol1.py) * [Sol2](project_euler/problem_020/sol2.py) * [Sol3](project_euler/problem_020/sol3.py) * [Sol4](project_euler/problem_020/sol4.py) * Problem 021 * [Sol1](project_euler/problem_021/sol1.py) * Problem 022 * [Sol1](project_euler/problem_022/sol1.py) * [Sol2](project_euler/problem_022/sol2.py) * Problem 023 * [Sol1](project_euler/problem_023/sol1.py) * Problem 024 * [Sol1](project_euler/problem_024/sol1.py) * Problem 025 * [Sol1](project_euler/problem_025/sol1.py) * [Sol2](project_euler/problem_025/sol2.py) * [Sol3](project_euler/problem_025/sol3.py) * Problem 026 * [Sol1](project_euler/problem_026/sol1.py) * Problem 027 * [Sol1](project_euler/problem_027/sol1.py) * Problem 028 * [Sol1](project_euler/problem_028/sol1.py) * Problem 029 * [Sol1](project_euler/problem_029/sol1.py) * Problem 030 * [Sol1](project_euler/problem_030/sol1.py) * Problem 031 * [Sol1](project_euler/problem_031/sol1.py) * [Sol2](project_euler/problem_031/sol2.py) * Problem 032 * [Sol32](project_euler/problem_032/sol32.py) * Problem 033 * [Sol1](project_euler/problem_033/sol1.py) * Problem 034 * [Sol1](project_euler/problem_034/sol1.py) * Problem 035 * [Sol1](project_euler/problem_035/sol1.py) * Problem 036 * [Sol1](project_euler/problem_036/sol1.py) * Problem 037 * [Sol1](project_euler/problem_037/sol1.py) * Problem 038 * [Sol1](project_euler/problem_038/sol1.py) * Problem 039 * [Sol1](project_euler/problem_039/sol1.py) * Problem 040 * [Sol1](project_euler/problem_040/sol1.py) * Problem 041 * [Sol1](project_euler/problem_041/sol1.py) * Problem 042 * [Solution42](project_euler/problem_042/solution42.py) * Problem 043 * [Sol1](project_euler/problem_043/sol1.py) * Problem 044 * [Sol1](project_euler/problem_044/sol1.py) * Problem 045 * [Sol1](project_euler/problem_045/sol1.py) * Problem 046 * [Sol1](project_euler/problem_046/sol1.py) * Problem 047 * [Sol1](project_euler/problem_047/sol1.py) * Problem 048 * [Sol1](project_euler/problem_048/sol1.py) * Problem 049 * [Sol1](project_euler/problem_049/sol1.py) * Problem 050 * [Sol1](project_euler/problem_050/sol1.py) * Problem 051 * [Sol1](project_euler/problem_051/sol1.py) * Problem 052 * [Sol1](project_euler/problem_052/sol1.py) * Problem 053 * [Sol1](project_euler/problem_053/sol1.py) * Problem 054 * [Sol1](project_euler/problem_054/sol1.py) * [Test Poker Hand](project_euler/problem_054/test_poker_hand.py) * Problem 055 * [Sol1](project_euler/problem_055/sol1.py) * Problem 056 * [Sol1](project_euler/problem_056/sol1.py) * Problem 057 * [Sol1](project_euler/problem_057/sol1.py) * Problem 058 * [Sol1](project_euler/problem_058/sol1.py) * Problem 059 * [Sol1](project_euler/problem_059/sol1.py) * Problem 062 * [Sol1](project_euler/problem_062/sol1.py) * Problem 063 * [Sol1](project_euler/problem_063/sol1.py) * Problem 064 * [Sol1](project_euler/problem_064/sol1.py) * Problem 065 * [Sol1](project_euler/problem_065/sol1.py) * Problem 067 * [Sol1](project_euler/problem_067/sol1.py) * [Sol2](project_euler/problem_067/sol2.py) * Problem 068 * [Sol1](project_euler/problem_068/sol1.py) * Problem 069 * [Sol1](project_euler/problem_069/sol1.py) * Problem 070 * [Sol1](project_euler/problem_070/sol1.py) * Problem 071 * [Sol1](project_euler/problem_071/sol1.py) * Problem 072 * [Sol1](project_euler/problem_072/sol1.py) * [Sol2](project_euler/problem_072/sol2.py) * Problem 074 * [Sol1](project_euler/problem_074/sol1.py) * [Sol2](project_euler/problem_074/sol2.py) * Problem 075 * [Sol1](project_euler/problem_075/sol1.py) * Problem 076 * [Sol1](project_euler/problem_076/sol1.py) * Problem 077 * [Sol1](project_euler/problem_077/sol1.py) * Problem 078 * [Sol1](project_euler/problem_078/sol1.py) * Problem 080 * [Sol1](project_euler/problem_080/sol1.py) * Problem 081 * [Sol1](project_euler/problem_081/sol1.py) * Problem 085 * [Sol1](project_euler/problem_085/sol1.py) * Problem 086 * [Sol1](project_euler/problem_086/sol1.py) * Problem 087 * [Sol1](project_euler/problem_087/sol1.py) * Problem 089 * [Sol1](project_euler/problem_089/sol1.py) * Problem 091 * [Sol1](project_euler/problem_091/sol1.py) * Problem 092 * [Sol1](project_euler/problem_092/sol1.py) * Problem 097 * [Sol1](project_euler/problem_097/sol1.py) * Problem 099 * [Sol1](project_euler/problem_099/sol1.py) * Problem 101 * [Sol1](project_euler/problem_101/sol1.py) * Problem 102 * [Sol1](project_euler/problem_102/sol1.py) * Problem 107 * [Sol1](project_euler/problem_107/sol1.py) * Problem 109 * [Sol1](project_euler/problem_109/sol1.py) * Problem 112 * [Sol1](project_euler/problem_112/sol1.py) * Problem 113 * [Sol1](project_euler/problem_113/sol1.py) * Problem 114 * [Sol1](project_euler/problem_114/sol1.py) * Problem 115 * [Sol1](project_euler/problem_115/sol1.py) * Problem 116 * [Sol1](project_euler/problem_116/sol1.py) * Problem 119 * [Sol1](project_euler/problem_119/sol1.py) * Problem 120 * [Sol1](project_euler/problem_120/sol1.py) * Problem 121 * [Sol1](project_euler/problem_121/sol1.py) * Problem 123 * [Sol1](project_euler/problem_123/sol1.py) * Problem 125 * [Sol1](project_euler/problem_125/sol1.py) * Problem 129 * [Sol1](project_euler/problem_129/sol1.py) * Problem 135 * [Sol1](project_euler/problem_135/sol1.py) * Problem 144 * [Sol1](project_euler/problem_144/sol1.py) * Problem 145 * [Sol1](project_euler/problem_145/sol1.py) * Problem 173 * [Sol1](project_euler/problem_173/sol1.py) * Problem 174 * [Sol1](project_euler/problem_174/sol1.py) * Problem 180 * [Sol1](project_euler/problem_180/sol1.py) * Problem 188 * [Sol1](project_euler/problem_188/sol1.py) * Problem 191 * [Sol1](project_euler/problem_191/sol1.py) * Problem 203 * [Sol1](project_euler/problem_203/sol1.py) * Problem 205 * [Sol1](project_euler/problem_205/sol1.py) * Problem 206 * [Sol1](project_euler/problem_206/sol1.py) * Problem 207 * [Sol1](project_euler/problem_207/sol1.py) * Problem 234 * [Sol1](project_euler/problem_234/sol1.py) * Problem 301 * [Sol1](project_euler/problem_301/sol1.py) * Problem 493 * [Sol1](project_euler/problem_493/sol1.py) * Problem 551 * [Sol1](project_euler/problem_551/sol1.py) * Problem 587 * [Sol1](project_euler/problem_587/sol1.py) * Problem 686 * [Sol1](project_euler/problem_686/sol1.py) ## Quantum * [Deutsch Jozsa](quantum/deutsch_jozsa.py) * [Half Adder](quantum/half_adder.py) * [Not Gate](quantum/not_gate.py) * [Q Full Adder](quantum/q_full_adder.py) * [Quantum Entanglement](quantum/quantum_entanglement.py) * [Ripple Adder Classic](quantum/ripple_adder_classic.py) * [Single Qubit Measure](quantum/single_qubit_measure.py) ## Scheduling * [First Come First Served](scheduling/first_come_first_served.py) * [Highest Response Ratio Next](scheduling/highest_response_ratio_next.py) * [Job Sequencing With Deadline](scheduling/job_sequencing_with_deadline.py) * [Multi Level Feedback Queue](scheduling/multi_level_feedback_queue.py) * [Non Preemptive Shortest Job First](scheduling/non_preemptive_shortest_job_first.py) * [Round Robin](scheduling/round_robin.py) * [Shortest Job First](scheduling/shortest_job_first.py) ## Searches * [Binary Search](searches/binary_search.py) * [Binary Tree Traversal](searches/binary_tree_traversal.py) * [Double Linear Search](searches/double_linear_search.py) * [Double Linear Search Recursion](searches/double_linear_search_recursion.py) * [Fibonacci Search](searches/fibonacci_search.py) * [Hill Climbing](searches/hill_climbing.py) * [Interpolation Search](searches/interpolation_search.py) * [Jump Search](searches/jump_search.py) * [Linear Search](searches/linear_search.py) * [Quick Select](searches/quick_select.py) * [Sentinel Linear Search](searches/sentinel_linear_search.py) * [Simple Binary Search](searches/simple_binary_search.py) * [Simulated Annealing](searches/simulated_annealing.py) * [Tabu Search](searches/tabu_search.py) * [Ternary Search](searches/ternary_search.py) ## Sorts * [Bead Sort](sorts/bead_sort.py) * [Bitonic Sort](sorts/bitonic_sort.py) * [Bogo Sort](sorts/bogo_sort.py) * [Bubble Sort](sorts/bubble_sort.py) * [Bucket Sort](sorts/bucket_sort.py) * [Circle Sort](sorts/circle_sort.py) * [Cocktail Shaker Sort](sorts/cocktail_shaker_sort.py) * [Comb Sort](sorts/comb_sort.py) * [Counting Sort](sorts/counting_sort.py) * [Cycle Sort](sorts/cycle_sort.py) * [Double Sort](sorts/double_sort.py) * [Dutch National Flag Sort](sorts/dutch_national_flag_sort.py) * [Exchange Sort](sorts/exchange_sort.py) * [External Sort](sorts/external_sort.py) * [Gnome Sort](sorts/gnome_sort.py) * [Heap Sort](sorts/heap_sort.py) * [Insertion Sort](sorts/insertion_sort.py) * [Intro Sort](sorts/intro_sort.py) * [Iterative Merge Sort](sorts/iterative_merge_sort.py) * [Merge Insertion Sort](sorts/merge_insertion_sort.py) * [Merge Sort](sorts/merge_sort.py) * [Msd Radix Sort](sorts/msd_radix_sort.py) * [Natural Sort](sorts/natural_sort.py) * [Odd Even Sort](sorts/odd_even_sort.py) * [Odd Even Transposition Parallel](sorts/odd_even_transposition_parallel.py) * [Odd Even Transposition Single Threaded](sorts/odd_even_transposition_single_threaded.py) * [Pancake Sort](sorts/pancake_sort.py) * [Patience Sort](sorts/patience_sort.py) * [Pigeon Sort](sorts/pigeon_sort.py) * [Pigeonhole Sort](sorts/pigeonhole_sort.py) * [Quick Sort](sorts/quick_sort.py) * [Quick Sort 3 Partition](sorts/quick_sort_3_partition.py) * [Radix Sort](sorts/radix_sort.py) * [Random Normal Distribution Quicksort](sorts/random_normal_distribution_quicksort.py) * [Random Pivot Quick Sort](sorts/random_pivot_quick_sort.py) * [Recursive Bubble Sort](sorts/recursive_bubble_sort.py) * [Recursive Insertion Sort](sorts/recursive_insertion_sort.py) * [Recursive Mergesort Array](sorts/recursive_mergesort_array.py) * [Recursive Quick Sort](sorts/recursive_quick_sort.py) * [Selection Sort](sorts/selection_sort.py) * [Shell Sort](sorts/shell_sort.py) * [Shrink Shell Sort](sorts/shrink_shell_sort.py) * [Slowsort](sorts/slowsort.py) * [Stooge Sort](sorts/stooge_sort.py) * [Strand Sort](sorts/strand_sort.py) * [Tim Sort](sorts/tim_sort.py) * [Topological Sort](sorts/topological_sort.py) * [Tree Sort](sorts/tree_sort.py) * [Unknown Sort](sorts/unknown_sort.py) * [Wiggle Sort](sorts/wiggle_sort.py) ## Strings * [Aho Corasick](strings/aho_corasick.py) * [Alternative String Arrange](strings/alternative_string_arrange.py) * [Anagrams](strings/anagrams.py) * [Autocomplete Using Trie](strings/autocomplete_using_trie.py) * [Barcode Validator](strings/barcode_validator.py) * [Boyer Moore Search](strings/boyer_moore_search.py) * [Can String Be Rearranged As Palindrome](strings/can_string_be_rearranged_as_palindrome.py) * [Capitalize](strings/capitalize.py) * [Check Anagrams](strings/check_anagrams.py) * [Check Pangram](strings/check_pangram.py) * [Credit Card Validator](strings/credit_card_validator.py) * [Detecting English Programmatically](strings/detecting_english_programmatically.py) * [Dna](strings/dna.py) * [Frequency Finder](strings/frequency_finder.py) * [Hamming Distance](strings/hamming_distance.py) * [Indian Phone Validator](strings/indian_phone_validator.py) * [Is Contains Unique Chars](strings/is_contains_unique_chars.py) * [Is Palindrome](strings/is_palindrome.py) * [Jaro Winkler](strings/jaro_winkler.py) * [Join](strings/join.py) * [Knuth Morris Pratt](strings/knuth_morris_pratt.py) * [Levenshtein Distance](strings/levenshtein_distance.py) * [Lower](strings/lower.py) * [Manacher](strings/manacher.py) * [Min Cost String Conversion](strings/min_cost_string_conversion.py) * [Naive String Search](strings/naive_string_search.py) * [Ngram](strings/ngram.py) * [Palindrome](strings/palindrome.py) * [Prefix Function](strings/prefix_function.py) * [Rabin Karp](strings/rabin_karp.py) * [Remove Duplicate](strings/remove_duplicate.py) * [Reverse Letters](strings/reverse_letters.py) * [Reverse Long Words](strings/reverse_long_words.py) * [Reverse Words](strings/reverse_words.py) * [Snake Case To Camel Pascal Case](strings/snake_case_to_camel_pascal_case.py) * [Split](strings/split.py) * [Upper](strings/upper.py) * [Wave](strings/wave.py) * [Wildcard Pattern Matching](strings/wildcard_pattern_matching.py) * [Word Occurrence](strings/word_occurrence.py) * [Word Patterns](strings/word_patterns.py) * [Z Function](strings/z_function.py) ## Web Programming * [Co2 Emission](web_programming/co2_emission.py) * [Covid Stats Via Xpath](web_programming/covid_stats_via_xpath.py) * [Crawl Google Results](web_programming/crawl_google_results.py) * [Crawl Google Scholar Citation](web_programming/crawl_google_scholar_citation.py) * [Currency Converter](web_programming/currency_converter.py) * [Current Stock Price](web_programming/current_stock_price.py) * [Current Weather](web_programming/current_weather.py) * [Daily Horoscope](web_programming/daily_horoscope.py) * [Download Images From Google Query](web_programming/download_images_from_google_query.py) * [Emails From Url](web_programming/emails_from_url.py) * [Fetch Anime And Play](web_programming/fetch_anime_and_play.py) * [Fetch Bbc News](web_programming/fetch_bbc_news.py) * [Fetch Github Info](web_programming/fetch_github_info.py) * [Fetch Jobs](web_programming/fetch_jobs.py) * [Fetch Quotes](web_programming/fetch_quotes.py) * [Fetch Well Rx Price](web_programming/fetch_well_rx_price.py) * [Get Imdb Top 250 Movies Csv](web_programming/get_imdb_top_250_movies_csv.py) * [Get Imdbtop](web_programming/get_imdbtop.py) * [Get Top Hn Posts](web_programming/get_top_hn_posts.py) * [Get User Tweets](web_programming/get_user_tweets.py) * [Giphy](web_programming/giphy.py) * [Instagram Crawler](web_programming/instagram_crawler.py) * [Instagram Pic](web_programming/instagram_pic.py) * [Instagram Video](web_programming/instagram_video.py) * [Nasa Data](web_programming/nasa_data.py) * [Open Google Results](web_programming/open_google_results.py) * [Random Anime Character](web_programming/random_anime_character.py) * [Recaptcha Verification](web_programming/recaptcha_verification.py) * [Reddit](web_programming/reddit.py) * [Search Books By Isbn](web_programming/search_books_by_isbn.py) * [Slack Message](web_programming/slack_message.py) * [Test Fetch Github Info](web_programming/test_fetch_github_info.py) * [World Covid19 Stats](web_programming/world_covid19_stats.py)
1
TheAlgorithms/Python
7,387
[pre-commit.ci] pre-commit autoupdate
<!--pre-commit.ci start--> updates: - [github.com/asottile/pyupgrade: v3.0.0 → v3.1.0](https://github.com/asottile/pyupgrade/compare/v3.0.0...v3.1.0) - [github.com/codespell-project/codespell: v2.2.1 → v2.2.2](https://github.com/codespell-project/codespell/compare/v2.2.1...v2.2.2) <!--pre-commit.ci end-->
pre-commit-ci[bot]
2022-10-17T19:35:07Z
2022-10-17T19:59:26Z
a34b756fd40e5cdfb69abc06dcd42f5f1b5fa21e
0c7c5fa7b0161a7433467240155356c93ae106b8
[pre-commit.ci] pre-commit autoupdate. <!--pre-commit.ci start--> updates: - [github.com/asottile/pyupgrade: v3.0.0 → v3.1.0](https://github.com/asottile/pyupgrade/compare/v3.0.0...v3.1.0) - [github.com/codespell-project/codespell: v2.2.1 → v2.2.2](https://github.com/codespell-project/codespell/compare/v2.2.1...v2.2.2) <!--pre-commit.ci end-->
# Locally Weighted Linear Regression It is a non-parametric ML algorithm that does not learn on a fixed set of parameters such as **linear regression**. \ So, here comes a question of what is *linear regression*? \ **Linear regression** is a supervised learning algorithm used for computing linear relationships between input (X) and output (Y). \ ### Terminology Involved number_of_features(i) = Number of features involved. \ number_of_training_examples(m) = Number of training examples. \ output_sequence(y) = Output Sequence. \ $\theta$ $^T$ x = predicted point. \ J($\theta$) = COst function of point. The steps involved in ordinary linear regression are: Training phase: Compute \theta to minimize the cost. \ J($\theta$) = $\sum_{i=1}^m$ (($\theta$)$^T$ $x^i$ - $y^i$)$^2$ Predict output: for given query point x, \ return: ($\theta$)$^T$ x <img src="https://miro.medium.com/max/700/1*FZsLp8yTULf77qrp0Qd91g.png" alt="Linear Regression"> This training phase is possible when data points are linear, but there again comes a question can we predict non-linear relationship between x and y ? as shown below <img src="https://miro.medium.com/max/700/1*DHYvJg55uN-Kj8jHaxDKvQ.png" alt="Non-linear Data"> <br /> <br /> So, here comes the role of non-parametric algorithm which doesn't compute predictions based on fixed set of params. Rather parameters $\theta$ are computed individually for each query point/data point x. <br /> <br /> While Computing $\theta$ , a higher "preferance" is given to points in the vicinity of x than points farther from x. Cost Function J($\theta$) = $\sum_{i=1}^m$ $w^i$ (($\theta$)$^T$ $x^i$ - $y^i$)$^2$ $w^i$ is non-negative weight associated to training point $x^i$. \ $w^i$ is large fr $x^i$'s lying closer to query point $x_i$. \ $w^i$ is small for $x^i$'s lying farther to query point $x_i$. A Typical weight can be computed using \ $w^i$ = $\exp$(-$\frac{(x^i-x)(x^i-x)^T}{2\tau^2}$) Where $\tau$ is the bandwidth parameter that controls $w^i$ distance from x. Let's look at a example : Suppose, we had a query point x=5.0 and training points $x^1$=4.9 and $x^2$=5.0 than we can calculate weights as : $w^i$ = $\exp$(-$\frac{(x^i-x)(x^i-x)^T}{2\tau^2}$) with $\tau$=0.5 $w^1$ = $\exp$(-$\frac{(4.9-5)^2}{2(0.5)^2}$) = 0.9802 $w^2$ = $\exp$(-$\frac{(3-5)^2}{2(0.5)^2}$) = 0.000335 So, J($\theta$) = 0.9802*($\theta$ $^T$ $x^1$ - $y^1$) + 0.000335*($\theta$ $^T$ $x^2$ - $y^2$) So, here by we can conclude that the weight fall exponentially as the distance between x & $x^i$ increases and So, does the contribution of error in prediction for $x^i$ to the cost. Steps involved in LWL are : \ Compute \theta to minimize the cost. J($\theta$) = $\sum_{i=1}^m$ $w^i$ (($\theta$)$^T$ $x^i$ - $y^i$)$^2$ \ Predict Output: for given query point x, \ return : $\theta$ $^T$ x <img src="https://miro.medium.com/max/700/1*H3QS05Q1GJtY-tiBL00iug.png" alt="LWL">
# Locally Weighted Linear Regression It is a non-parametric ML algorithm that does not learn on a fixed set of parameters such as **linear regression**. \ So, here comes a question of what is *linear regression*? \ **Linear regression** is a supervised learning algorithm used for computing linear relationships between input (X) and output (Y). \ ### Terminology Involved number_of_features(i) = Number of features involved. \ number_of_training_examples(m) = Number of training examples. \ output_sequence(y) = Output Sequence. \ $\theta$ $^T$ x = predicted point. \ J($\theta$) = COst function of point. The steps involved in ordinary linear regression are: Training phase: Compute \theta to minimize the cost. \ J($\theta$) = $\sum_{i=1}^m$ (($\theta$)$^T$ $x^i$ - $y^i$)$^2$ Predict output: for given query point x, \ return: ($\theta$)$^T$ x <img src="https://miro.medium.com/max/700/1*FZsLp8yTULf77qrp0Qd91g.png" alt="Linear Regression"> This training phase is possible when data points are linear, but there again comes a question can we predict non-linear relationship between x and y ? as shown below <img src="https://miro.medium.com/max/700/1*DHYvJg55uN-Kj8jHaxDKvQ.png" alt="Non-linear Data"> <br /> <br /> So, here comes the role of non-parametric algorithm which doesn't compute predictions based on fixed set of params. Rather parameters $\theta$ are computed individually for each query point/data point x. <br /> <br /> While Computing $\theta$ , a higher preference is given to points in the vicinity of x than points farther from x. Cost Function J($\theta$) = $\sum_{i=1}^m$ $w^i$ (($\theta$)$^T$ $x^i$ - $y^i$)$^2$ $w^i$ is non-negative weight associated to training point $x^i$. \ $w^i$ is large fr $x^i$'s lying closer to query point $x_i$. \ $w^i$ is small for $x^i$'s lying farther to query point $x_i$. A Typical weight can be computed using \ $w^i$ = $\exp$(-$\frac{(x^i-x)(x^i-x)^T}{2\tau^2}$) Where $\tau$ is the bandwidth parameter that controls $w^i$ distance from x. Let's look at a example : Suppose, we had a query point x=5.0 and training points $x^1$=4.9 and $x^2$=5.0 than we can calculate weights as : $w^i$ = $\exp$(-$\frac{(x^i-x)(x^i-x)^T}{2\tau^2}$) with $\tau$=0.5 $w^1$ = $\exp$(-$\frac{(4.9-5)^2}{2(0.5)^2}$) = 0.9802 $w^2$ = $\exp$(-$\frac{(3-5)^2}{2(0.5)^2}$) = 0.000335 So, J($\theta$) = 0.9802*($\theta$ $^T$ $x^1$ - $y^1$) + 0.000335*($\theta$ $^T$ $x^2$ - $y^2$) So, here by we can conclude that the weight fall exponentially as the distance between x & $x^i$ increases and So, does the contribution of error in prediction for $x^i$ to the cost. Steps involved in LWL are : \ Compute \theta to minimize the cost. J($\theta$) = $\sum_{i=1}^m$ $w^i$ (($\theta$)$^T$ $x^i$ - $y^i$)$^2$ \ Predict Output: for given query point x, \ return : $\theta$ $^T$ x <img src="https://miro.medium.com/max/700/1*H3QS05Q1GJtY-tiBL00iug.png" alt="LWL">
1
TheAlgorithms/Python
7,387
[pre-commit.ci] pre-commit autoupdate
<!--pre-commit.ci start--> updates: - [github.com/asottile/pyupgrade: v3.0.0 → v3.1.0](https://github.com/asottile/pyupgrade/compare/v3.0.0...v3.1.0) - [github.com/codespell-project/codespell: v2.2.1 → v2.2.2](https://github.com/codespell-project/codespell/compare/v2.2.1...v2.2.2) <!--pre-commit.ci end-->
pre-commit-ci[bot]
2022-10-17T19:35:07Z
2022-10-17T19:59:26Z
a34b756fd40e5cdfb69abc06dcd42f5f1b5fa21e
0c7c5fa7b0161a7433467240155356c93ae106b8
[pre-commit.ci] pre-commit autoupdate. <!--pre-commit.ci start--> updates: - [github.com/asottile/pyupgrade: v3.0.0 → v3.1.0](https://github.com/asottile/pyupgrade/compare/v3.0.0...v3.1.0) - [github.com/codespell-project/codespell: v2.2.1 → v2.2.2](https://github.com/codespell-project/codespell/compare/v2.2.1...v2.2.2) <!--pre-commit.ci end-->
""" References: wikipedia:square free number python/black : True flake8 : True """ from __future__ import annotations def is_square_free(factors: list[int]) -> bool: """ # doctest: +NORMALIZE_WHITESPACE This functions takes a list of prime factors as input. returns True if the factors are square free. >>> is_square_free([1, 1, 2, 3, 4]) False These are wrong but should return some value it simply checks for repition in the numbers. >>> is_square_free([1, 3, 4, 'sd', 0.0]) True >>> is_square_free([1, 0.5, 2, 0.0]) True >>> is_square_free([1, 2, 2, 5]) False >>> is_square_free('asd') True >>> is_square_free(24) Traceback (most recent call last): ... TypeError: 'int' object is not iterable """ return len(set(factors)) == len(factors) if __name__ == "__main__": import doctest doctest.testmod()
""" References: wikipedia:square free number python/black : True flake8 : True """ from __future__ import annotations def is_square_free(factors: list[int]) -> bool: """ # doctest: +NORMALIZE_WHITESPACE This functions takes a list of prime factors as input. returns True if the factors are square free. >>> is_square_free([1, 1, 2, 3, 4]) False These are wrong but should return some value it simply checks for repetition in the numbers. >>> is_square_free([1, 3, 4, 'sd', 0.0]) True >>> is_square_free([1, 0.5, 2, 0.0]) True >>> is_square_free([1, 2, 2, 5]) False >>> is_square_free('asd') True >>> is_square_free(24) Traceback (most recent call last): ... TypeError: 'int' object is not iterable """ return len(set(factors)) == len(factors) if __name__ == "__main__": import doctest doctest.testmod()
1
TheAlgorithms/Python
7,387
[pre-commit.ci] pre-commit autoupdate
<!--pre-commit.ci start--> updates: - [github.com/asottile/pyupgrade: v3.0.0 → v3.1.0](https://github.com/asottile/pyupgrade/compare/v3.0.0...v3.1.0) - [github.com/codespell-project/codespell: v2.2.1 → v2.2.2](https://github.com/codespell-project/codespell/compare/v2.2.1...v2.2.2) <!--pre-commit.ci end-->
pre-commit-ci[bot]
2022-10-17T19:35:07Z
2022-10-17T19:59:26Z
a34b756fd40e5cdfb69abc06dcd42f5f1b5fa21e
0c7c5fa7b0161a7433467240155356c93ae106b8
[pre-commit.ci] pre-commit autoupdate. <!--pre-commit.ci start--> updates: - [github.com/asottile/pyupgrade: v3.0.0 → v3.1.0](https://github.com/asottile/pyupgrade/compare/v3.0.0...v3.1.0) - [github.com/codespell-project/codespell: v2.2.1 → v2.2.2](https://github.com/codespell-project/codespell/compare/v2.2.1...v2.2.2) <!--pre-commit.ci end-->
""" Author: Alexander Joslin GitHub: github.com/echoaj Explanation: https://medium.com/@haleesammar/implemented-in-js-dijkstras-2-stack- algorithm-for-evaluating-mathematical-expressions-fc0837dae1ea We can use Dijkstra's two stack algorithm to solve an equation such as: (5 + ((4 * 2) * (2 + 3))) THESE ARE THE ALGORITHM'S RULES: RULE 1: Scan the expression from left to right. When an operand is encountered, push it onto the operand stack. RULE 2: When an operator is encountered in the expression, push it onto the operator stack. RULE 3: When a left parenthesis is encountered in the expression, ignore it. RULE 4: When a right parenthesis is encountered in the expression, pop an operator off the operator stack. The two operands it must operate on must be the last two operands pushed onto the operand stack. We therefore pop the operand stack twice, perform the operation, and push the result back onto the operand stack so it will be available for use as an operand of the next operator popped off the operator stack. RULE 5: When the entire infix expression has been scanned, the value left on the operand stack represents the value of the expression. NOTE: It only works with whole numbers. """ __author__ = "Alexander Joslin" import operator as op from .stack import Stack def dijkstras_two_stack_algorithm(equation: str) -> int: """ DocTests >>> dijkstras_two_stack_algorithm("(5 + 3)") 8 >>> dijkstras_two_stack_algorithm("((9 - (2 + 9)) + (8 - 1))") 5 >>> dijkstras_two_stack_algorithm("((((3 - 2) - (2 + 3)) + (2 - 4)) + 3)") -3 :param equation: a string :return: result: an integer """ operators = {"*": op.mul, "/": op.truediv, "+": op.add, "-": op.sub} operand_stack: Stack[int] = Stack() operator_stack: Stack[str] = Stack() for i in equation: if i.isdigit(): # RULE 1 operand_stack.push(int(i)) elif i in operators: # RULE 2 operator_stack.push(i) elif i == ")": # RULE 4 opr = operator_stack.peek() operator_stack.pop() num1 = operand_stack.peek() operand_stack.pop() num2 = operand_stack.peek() operand_stack.pop() total = operators[opr](num2, num1) operand_stack.push(total) # RULE 5 return operand_stack.peek() if __name__ == "__main__": equation = "(5 + ((4 * 2) * (2 + 3)))" # answer = 45 print(f"{equation} = {dijkstras_two_stack_algorithm(equation)}")
""" Author: Alexander Joslin GitHub: github.com/echoaj Explanation: https://medium.com/@haleesammar/implemented-in-js-dijkstras-2-stack- algorithm-for-evaluating-mathematical-expressions-fc0837dae1ea We can use Dijkstra's two stack algorithm to solve an equation such as: (5 + ((4 * 2) * (2 + 3))) THESE ARE THE ALGORITHM'S RULES: RULE 1: Scan the expression from left to right. When an operand is encountered, push it onto the operand stack. RULE 2: When an operator is encountered in the expression, push it onto the operator stack. RULE 3: When a left parenthesis is encountered in the expression, ignore it. RULE 4: When a right parenthesis is encountered in the expression, pop an operator off the operator stack. The two operands it must operate on must be the last two operands pushed onto the operand stack. We therefore pop the operand stack twice, perform the operation, and push the result back onto the operand stack so it will be available for use as an operand of the next operator popped off the operator stack. RULE 5: When the entire infix expression has been scanned, the value left on the operand stack represents the value of the expression. NOTE: It only works with whole numbers. """ __author__ = "Alexander Joslin" import operator as op from .stack import Stack def dijkstras_two_stack_algorithm(equation: str) -> int: """ DocTests >>> dijkstras_two_stack_algorithm("(5 + 3)") 8 >>> dijkstras_two_stack_algorithm("((9 - (2 + 9)) + (8 - 1))") 5 >>> dijkstras_two_stack_algorithm("((((3 - 2) - (2 + 3)) + (2 - 4)) + 3)") -3 :param equation: a string :return: result: an integer """ operators = {"*": op.mul, "/": op.truediv, "+": op.add, "-": op.sub} operand_stack: Stack[int] = Stack() operator_stack: Stack[str] = Stack() for i in equation: if i.isdigit(): # RULE 1 operand_stack.push(int(i)) elif i in operators: # RULE 2 operator_stack.push(i) elif i == ")": # RULE 4 opr = operator_stack.peek() operator_stack.pop() num1 = operand_stack.peek() operand_stack.pop() num2 = operand_stack.peek() operand_stack.pop() total = operators[opr](num2, num1) operand_stack.push(total) # RULE 5 return operand_stack.peek() if __name__ == "__main__": equation = "(5 + ((4 * 2) * (2 + 3)))" # answer = 45 print(f"{equation} = {dijkstras_two_stack_algorithm(equation)}")
-1
TheAlgorithms/Python
7,387
[pre-commit.ci] pre-commit autoupdate
<!--pre-commit.ci start--> updates: - [github.com/asottile/pyupgrade: v3.0.0 → v3.1.0](https://github.com/asottile/pyupgrade/compare/v3.0.0...v3.1.0) - [github.com/codespell-project/codespell: v2.2.1 → v2.2.2](https://github.com/codespell-project/codespell/compare/v2.2.1...v2.2.2) <!--pre-commit.ci end-->
pre-commit-ci[bot]
2022-10-17T19:35:07Z
2022-10-17T19:59:26Z
a34b756fd40e5cdfb69abc06dcd42f5f1b5fa21e
0c7c5fa7b0161a7433467240155356c93ae106b8
[pre-commit.ci] pre-commit autoupdate. <!--pre-commit.ci start--> updates: - [github.com/asottile/pyupgrade: v3.0.0 → v3.1.0](https://github.com/asottile/pyupgrade/compare/v3.0.0...v3.1.0) - [github.com/codespell-project/codespell: v2.2.1 → v2.2.2](https://github.com/codespell-project/codespell/compare/v2.2.1...v2.2.2) <!--pre-commit.ci end-->
""" Project Euler Problem 5: https://projecteuler.net/problem=5 Smallest multiple 2520 is the smallest number that can be divided by each of the numbers from 1 to 10 without any remainder. What is the smallest positive number that is _evenly divisible_ by all of the numbers from 1 to 20? References: - https://en.wiktionary.org/wiki/evenly_divisible - https://en.wikipedia.org/wiki/Euclidean_algorithm - https://en.wikipedia.org/wiki/Least_common_multiple """ def greatest_common_divisor(x: int, y: int) -> int: """ Euclidean Greatest Common Divisor algorithm >>> greatest_common_divisor(0, 0) 0 >>> greatest_common_divisor(23, 42) 1 >>> greatest_common_divisor(15, 33) 3 >>> greatest_common_divisor(12345, 67890) 15 """ return x if y == 0 else greatest_common_divisor(y, x % y) def lcm(x: int, y: int) -> int: """ Least Common Multiple. Using the property that lcm(a, b) * greatest_common_divisor(a, b) = a*b >>> lcm(3, 15) 15 >>> lcm(1, 27) 27 >>> lcm(13, 27) 351 >>> lcm(64, 48) 192 """ return (x * y) // greatest_common_divisor(x, y) def solution(n: int = 20) -> int: """ Returns the smallest positive number that is evenly divisible (divisible with no remainder) by all of the numbers from 1 to n. >>> solution(10) 2520 >>> solution(15) 360360 >>> solution(22) 232792560 """ g = 1 for i in range(1, n + 1): g = lcm(g, i) return g if __name__ == "__main__": print(f"{solution() = }")
""" Project Euler Problem 5: https://projecteuler.net/problem=5 Smallest multiple 2520 is the smallest number that can be divided by each of the numbers from 1 to 10 without any remainder. What is the smallest positive number that is _evenly divisible_ by all of the numbers from 1 to 20? References: - https://en.wiktionary.org/wiki/evenly_divisible - https://en.wikipedia.org/wiki/Euclidean_algorithm - https://en.wikipedia.org/wiki/Least_common_multiple """ def greatest_common_divisor(x: int, y: int) -> int: """ Euclidean Greatest Common Divisor algorithm >>> greatest_common_divisor(0, 0) 0 >>> greatest_common_divisor(23, 42) 1 >>> greatest_common_divisor(15, 33) 3 >>> greatest_common_divisor(12345, 67890) 15 """ return x if y == 0 else greatest_common_divisor(y, x % y) def lcm(x: int, y: int) -> int: """ Least Common Multiple. Using the property that lcm(a, b) * greatest_common_divisor(a, b) = a*b >>> lcm(3, 15) 15 >>> lcm(1, 27) 27 >>> lcm(13, 27) 351 >>> lcm(64, 48) 192 """ return (x * y) // greatest_common_divisor(x, y) def solution(n: int = 20) -> int: """ Returns the smallest positive number that is evenly divisible (divisible with no remainder) by all of the numbers from 1 to n. >>> solution(10) 2520 >>> solution(15) 360360 >>> solution(22) 232792560 """ g = 1 for i in range(1, n + 1): g = lcm(g, i) return g if __name__ == "__main__": print(f"{solution() = }")
-1
TheAlgorithms/Python
7,387
[pre-commit.ci] pre-commit autoupdate
<!--pre-commit.ci start--> updates: - [github.com/asottile/pyupgrade: v3.0.0 → v3.1.0](https://github.com/asottile/pyupgrade/compare/v3.0.0...v3.1.0) - [github.com/codespell-project/codespell: v2.2.1 → v2.2.2](https://github.com/codespell-project/codespell/compare/v2.2.1...v2.2.2) <!--pre-commit.ci end-->
pre-commit-ci[bot]
2022-10-17T19:35:07Z
2022-10-17T19:59:26Z
a34b756fd40e5cdfb69abc06dcd42f5f1b5fa21e
0c7c5fa7b0161a7433467240155356c93ae106b8
[pre-commit.ci] pre-commit autoupdate. <!--pre-commit.ci start--> updates: - [github.com/asottile/pyupgrade: v3.0.0 → v3.1.0](https://github.com/asottile/pyupgrade/compare/v3.0.0...v3.1.0) - [github.com/codespell-project/codespell: v2.2.1 → v2.2.2](https://github.com/codespell-project/codespell/compare/v2.2.1...v2.2.2) <!--pre-commit.ci end-->
""" Champernowne's constant Problem 40 An irrational decimal fraction is created by concatenating the positive integers: 0.123456789101112131415161718192021... It can be seen that the 12th digit of the fractional part is 1. If dn represents the nth digit of the fractional part, find the value of the following expression. d1 × d10 × d100 × d1000 × d10000 × d100000 × d1000000 """ def solution(): """Returns >>> solution() 210 """ constant = [] i = 1 while len(constant) < 1e6: constant.append(str(i)) i += 1 constant = "".join(constant) return ( int(constant[0]) * int(constant[9]) * int(constant[99]) * int(constant[999]) * int(constant[9999]) * int(constant[99999]) * int(constant[999999]) ) if __name__ == "__main__": print(solution())
""" Champernowne's constant Problem 40 An irrational decimal fraction is created by concatenating the positive integers: 0.123456789101112131415161718192021... It can be seen that the 12th digit of the fractional part is 1. If dn represents the nth digit of the fractional part, find the value of the following expression. d1 × d10 × d100 × d1000 × d10000 × d100000 × d1000000 """ def solution(): """Returns >>> solution() 210 """ constant = [] i = 1 while len(constant) < 1e6: constant.append(str(i)) i += 1 constant = "".join(constant) return ( int(constant[0]) * int(constant[9]) * int(constant[99]) * int(constant[999]) * int(constant[9999]) * int(constant[99999]) * int(constant[999999]) ) if __name__ == "__main__": print(solution())
-1
TheAlgorithms/Python
7,387
[pre-commit.ci] pre-commit autoupdate
<!--pre-commit.ci start--> updates: - [github.com/asottile/pyupgrade: v3.0.0 → v3.1.0](https://github.com/asottile/pyupgrade/compare/v3.0.0...v3.1.0) - [github.com/codespell-project/codespell: v2.2.1 → v2.2.2](https://github.com/codespell-project/codespell/compare/v2.2.1...v2.2.2) <!--pre-commit.ci end-->
pre-commit-ci[bot]
2022-10-17T19:35:07Z
2022-10-17T19:59:26Z
a34b756fd40e5cdfb69abc06dcd42f5f1b5fa21e
0c7c5fa7b0161a7433467240155356c93ae106b8
[pre-commit.ci] pre-commit autoupdate. <!--pre-commit.ci start--> updates: - [github.com/asottile/pyupgrade: v3.0.0 → v3.1.0](https://github.com/asottile/pyupgrade/compare/v3.0.0...v3.1.0) - [github.com/codespell-project/codespell: v2.2.1 → v2.2.2](https://github.com/codespell-project/codespell/compare/v2.2.1...v2.2.2) <!--pre-commit.ci end-->
from __future__ import annotations from collections import Counter from random import random class MarkovChainGraphUndirectedUnweighted: """ Undirected Unweighted Graph for running Markov Chain Algorithm """ def __init__(self): self.connections = {} def add_node(self, node: str) -> None: self.connections[node] = {} def add_transition_probability( self, node1: str, node2: str, probability: float ) -> None: if node1 not in self.connections: self.add_node(node1) if node2 not in self.connections: self.add_node(node2) self.connections[node1][node2] = probability def get_nodes(self) -> list[str]: return list(self.connections) def transition(self, node: str) -> str: current_probability = 0 random_value = random() for dest in self.connections[node]: current_probability += self.connections[node][dest] if current_probability > random_value: return dest return "" def get_transitions( start: str, transitions: list[tuple[str, str, float]], steps: int ) -> dict[str, int]: """ Running Markov Chain algorithm and calculating the number of times each node is visited >>> transitions = [ ... ('a', 'a', 0.9), ... ('a', 'b', 0.075), ... ('a', 'c', 0.025), ... ('b', 'a', 0.15), ... ('b', 'b', 0.8), ... ('b', 'c', 0.05), ... ('c', 'a', 0.25), ... ('c', 'b', 0.25), ... ('c', 'c', 0.5) ... ] >>> result = get_transitions('a', transitions, 5000) >>> result['a'] > result['b'] > result['c'] True """ graph = MarkovChainGraphUndirectedUnweighted() for node1, node2, probability in transitions: graph.add_transition_probability(node1, node2, probability) visited = Counter(graph.get_nodes()) node = start for _ in range(steps): node = graph.transition(node) visited[node] += 1 return visited if __name__ == "__main__": import doctest doctest.testmod()
from __future__ import annotations from collections import Counter from random import random class MarkovChainGraphUndirectedUnweighted: """ Undirected Unweighted Graph for running Markov Chain Algorithm """ def __init__(self): self.connections = {} def add_node(self, node: str) -> None: self.connections[node] = {} def add_transition_probability( self, node1: str, node2: str, probability: float ) -> None: if node1 not in self.connections: self.add_node(node1) if node2 not in self.connections: self.add_node(node2) self.connections[node1][node2] = probability def get_nodes(self) -> list[str]: return list(self.connections) def transition(self, node: str) -> str: current_probability = 0 random_value = random() for dest in self.connections[node]: current_probability += self.connections[node][dest] if current_probability > random_value: return dest return "" def get_transitions( start: str, transitions: list[tuple[str, str, float]], steps: int ) -> dict[str, int]: """ Running Markov Chain algorithm and calculating the number of times each node is visited >>> transitions = [ ... ('a', 'a', 0.9), ... ('a', 'b', 0.075), ... ('a', 'c', 0.025), ... ('b', 'a', 0.15), ... ('b', 'b', 0.8), ... ('b', 'c', 0.05), ... ('c', 'a', 0.25), ... ('c', 'b', 0.25), ... ('c', 'c', 0.5) ... ] >>> result = get_transitions('a', transitions, 5000) >>> result['a'] > result['b'] > result['c'] True """ graph = MarkovChainGraphUndirectedUnweighted() for node1, node2, probability in transitions: graph.add_transition_probability(node1, node2, probability) visited = Counter(graph.get_nodes()) node = start for _ in range(steps): node = graph.transition(node) visited[node] += 1 return visited if __name__ == "__main__": import doctest doctest.testmod()
-1
TheAlgorithms/Python
7,387
[pre-commit.ci] pre-commit autoupdate
<!--pre-commit.ci start--> updates: - [github.com/asottile/pyupgrade: v3.0.0 → v3.1.0](https://github.com/asottile/pyupgrade/compare/v3.0.0...v3.1.0) - [github.com/codespell-project/codespell: v2.2.1 → v2.2.2](https://github.com/codespell-project/codespell/compare/v2.2.1...v2.2.2) <!--pre-commit.ci end-->
pre-commit-ci[bot]
2022-10-17T19:35:07Z
2022-10-17T19:59:26Z
a34b756fd40e5cdfb69abc06dcd42f5f1b5fa21e
0c7c5fa7b0161a7433467240155356c93ae106b8
[pre-commit.ci] pre-commit autoupdate. <!--pre-commit.ci start--> updates: - [github.com/asottile/pyupgrade: v3.0.0 → v3.1.0](https://github.com/asottile/pyupgrade/compare/v3.0.0...v3.1.0) - [github.com/codespell-project/codespell: v2.2.1 → v2.2.2](https://github.com/codespell-project/codespell/compare/v2.2.1...v2.2.2) <!--pre-commit.ci end-->
""" This is a pure Python implementation of the merge-insertion sort algorithm Source: https://en.wikipedia.org/wiki/Graham_scan For doctests run following command: python3 -m doctest -v graham_scan.py """ from __future__ import annotations from collections import deque from enum import Enum from math import atan2, degrees from sys import maxsize def graham_scan(points: list[tuple[int, int]]) -> list[tuple[int, int]]: """Pure implementation of graham scan algorithm in Python :param points: The unique points on coordinates. :return: The points on convex hell. Examples: >>> graham_scan([(9, 6), (3, 1), (0, 0), (5, 5), (5, 2), (7, 0), (3, 3), (1, 4)]) [(0, 0), (7, 0), (9, 6), (5, 5), (1, 4)] >>> graham_scan([(0, 0), (1, 0), (1, 1), (0, 1)]) [(0, 0), (1, 0), (1, 1), (0, 1)] >>> graham_scan([(0, 0), (1, 1), (2, 2), (3, 3), (-1, 2)]) [(0, 0), (1, 1), (2, 2), (3, 3), (-1, 2)] >>> graham_scan([(-100, 20), (99, 3), (1, 10000001), (5133186, -25), (-66, -4)]) [(5133186, -25), (1, 10000001), (-100, 20), (-66, -4)] """ if len(points) <= 2: # There is no convex hull raise ValueError("graham_scan: argument must contain more than 3 points.") if len(points) == 3: return points # find the lowest and the most left point minidx = 0 miny, minx = maxsize, maxsize for i, point in enumerate(points): x = point[0] y = point[1] if y < miny: miny = y minx = x minidx = i if y == miny: if x < minx: minx = x minidx = i # remove the lowest and the most left point from points for preparing for sort points.pop(minidx) def angle_comparer(point: tuple[int, int], minx: int, miny: int) -> float: """Return the angle toward to point from (minx, miny) :param point: The target point minx: The starting point's x miny: The starting point's y :return: the angle Examples: >>> angle_comparer((1,1), 0, 0) 45.0 >>> angle_comparer((100,1), 10, 10) -5.710593137499642 >>> angle_comparer((5,5), 2, 3) 33.690067525979785 """ # sort the points accorgind to the angle from the lowest and the most left point x = point[0] y = point[1] angle = degrees(atan2(y - miny, x - minx)) return angle sorted_points = sorted(points, key=lambda point: angle_comparer(point, minx, miny)) # This insert actually costs complexity, # and you should instead add (minx, miny) into stack later. # I'm using insert just for easy understanding. sorted_points.insert(0, (minx, miny)) # traversal from the lowest and the most left point in anti-clockwise direction # if direction gets right, the previous point is not the convex hull. class Direction(Enum): left = 1 straight = 2 right = 3 def check_direction( starting: tuple[int, int], via: tuple[int, int], target: tuple[int, int] ) -> Direction: """Return the direction toward to the line from via to target from starting :param starting: The starting point via: The via point target: The target point :return: the Direction Examples: >>> check_direction((1,1), (2,2), (3,3)) Direction.straight >>> check_direction((60,1), (-50,199), (30,2)) Direction.left >>> check_direction((0,0), (5,5), (10,0)) Direction.right """ x0, y0 = starting x1, y1 = via x2, y2 = target via_angle = degrees(atan2(y1 - y0, x1 - x0)) if via_angle < 0: via_angle += 360 target_angle = degrees(atan2(y2 - y0, x2 - x0)) if target_angle < 0: target_angle += 360 # t- # \ \ # \ v # \| # s # via_angle is always lower than target_angle, if direction is left. # If they are same, it means they are on a same line of convex hull. if target_angle > via_angle: return Direction.left elif target_angle == via_angle: return Direction.straight else: return Direction.right stack: deque[tuple[int, int]] = deque() stack.append(sorted_points[0]) stack.append(sorted_points[1]) stack.append(sorted_points[2]) # In any ways, the first 3 points line are towards left. # Because we sort them the angle from minx, miny. current_direction = Direction.left for i in range(3, len(sorted_points)): while True: starting = stack[-2] via = stack[-1] target = sorted_points[i] next_direction = check_direction(starting, via, target) if next_direction == Direction.left: current_direction = Direction.left break if next_direction == Direction.straight: if current_direction == Direction.left: # We keep current_direction as left. # Because if the straight line keeps as straight, # we want to know if this straight line is towards left. break elif current_direction == Direction.right: # If the straight line is towards right, # every previous points on those straigh line is not convex hull. stack.pop() if next_direction == Direction.right: stack.pop() stack.append(sorted_points[i]) return list(stack)
""" This is a pure Python implementation of the merge-insertion sort algorithm Source: https://en.wikipedia.org/wiki/Graham_scan For doctests run following command: python3 -m doctest -v graham_scan.py """ from __future__ import annotations from collections import deque from enum import Enum from math import atan2, degrees from sys import maxsize def graham_scan(points: list[tuple[int, int]]) -> list[tuple[int, int]]: """Pure implementation of graham scan algorithm in Python :param points: The unique points on coordinates. :return: The points on convex hell. Examples: >>> graham_scan([(9, 6), (3, 1), (0, 0), (5, 5), (5, 2), (7, 0), (3, 3), (1, 4)]) [(0, 0), (7, 0), (9, 6), (5, 5), (1, 4)] >>> graham_scan([(0, 0), (1, 0), (1, 1), (0, 1)]) [(0, 0), (1, 0), (1, 1), (0, 1)] >>> graham_scan([(0, 0), (1, 1), (2, 2), (3, 3), (-1, 2)]) [(0, 0), (1, 1), (2, 2), (3, 3), (-1, 2)] >>> graham_scan([(-100, 20), (99, 3), (1, 10000001), (5133186, -25), (-66, -4)]) [(5133186, -25), (1, 10000001), (-100, 20), (-66, -4)] """ if len(points) <= 2: # There is no convex hull raise ValueError("graham_scan: argument must contain more than 3 points.") if len(points) == 3: return points # find the lowest and the most left point minidx = 0 miny, minx = maxsize, maxsize for i, point in enumerate(points): x = point[0] y = point[1] if y < miny: miny = y minx = x minidx = i if y == miny: if x < minx: minx = x minidx = i # remove the lowest and the most left point from points for preparing for sort points.pop(minidx) def angle_comparer(point: tuple[int, int], minx: int, miny: int) -> float: """Return the angle toward to point from (minx, miny) :param point: The target point minx: The starting point's x miny: The starting point's y :return: the angle Examples: >>> angle_comparer((1,1), 0, 0) 45.0 >>> angle_comparer((100,1), 10, 10) -5.710593137499642 >>> angle_comparer((5,5), 2, 3) 33.690067525979785 """ # sort the points accorgind to the angle from the lowest and the most left point x = point[0] y = point[1] angle = degrees(atan2(y - miny, x - minx)) return angle sorted_points = sorted(points, key=lambda point: angle_comparer(point, minx, miny)) # This insert actually costs complexity, # and you should instead add (minx, miny) into stack later. # I'm using insert just for easy understanding. sorted_points.insert(0, (minx, miny)) # traversal from the lowest and the most left point in anti-clockwise direction # if direction gets right, the previous point is not the convex hull. class Direction(Enum): left = 1 straight = 2 right = 3 def check_direction( starting: tuple[int, int], via: tuple[int, int], target: tuple[int, int] ) -> Direction: """Return the direction toward to the line from via to target from starting :param starting: The starting point via: The via point target: The target point :return: the Direction Examples: >>> check_direction((1,1), (2,2), (3,3)) Direction.straight >>> check_direction((60,1), (-50,199), (30,2)) Direction.left >>> check_direction((0,0), (5,5), (10,0)) Direction.right """ x0, y0 = starting x1, y1 = via x2, y2 = target via_angle = degrees(atan2(y1 - y0, x1 - x0)) if via_angle < 0: via_angle += 360 target_angle = degrees(atan2(y2 - y0, x2 - x0)) if target_angle < 0: target_angle += 360 # t- # \ \ # \ v # \| # s # via_angle is always lower than target_angle, if direction is left. # If they are same, it means they are on a same line of convex hull. if target_angle > via_angle: return Direction.left elif target_angle == via_angle: return Direction.straight else: return Direction.right stack: deque[tuple[int, int]] = deque() stack.append(sorted_points[0]) stack.append(sorted_points[1]) stack.append(sorted_points[2]) # In any ways, the first 3 points line are towards left. # Because we sort them the angle from minx, miny. current_direction = Direction.left for i in range(3, len(sorted_points)): while True: starting = stack[-2] via = stack[-1] target = sorted_points[i] next_direction = check_direction(starting, via, target) if next_direction == Direction.left: current_direction = Direction.left break if next_direction == Direction.straight: if current_direction == Direction.left: # We keep current_direction as left. # Because if the straight line keeps as straight, # we want to know if this straight line is towards left. break elif current_direction == Direction.right: # If the straight line is towards right, # every previous points on those straigh line is not convex hull. stack.pop() if next_direction == Direction.right: stack.pop() stack.append(sorted_points[i]) return list(stack)
-1
TheAlgorithms/Python
7,387
[pre-commit.ci] pre-commit autoupdate
<!--pre-commit.ci start--> updates: - [github.com/asottile/pyupgrade: v3.0.0 → v3.1.0](https://github.com/asottile/pyupgrade/compare/v3.0.0...v3.1.0) - [github.com/codespell-project/codespell: v2.2.1 → v2.2.2](https://github.com/codespell-project/codespell/compare/v2.2.1...v2.2.2) <!--pre-commit.ci end-->
pre-commit-ci[bot]
2022-10-17T19:35:07Z
2022-10-17T19:59:26Z
a34b756fd40e5cdfb69abc06dcd42f5f1b5fa21e
0c7c5fa7b0161a7433467240155356c93ae106b8
[pre-commit.ci] pre-commit autoupdate. <!--pre-commit.ci start--> updates: - [github.com/asottile/pyupgrade: v3.0.0 → v3.1.0](https://github.com/asottile/pyupgrade/compare/v3.0.0...v3.1.0) - [github.com/codespell-project/codespell: v2.2.1 → v2.2.2](https://github.com/codespell-project/codespell/compare/v2.2.1...v2.2.2) <!--pre-commit.ci end-->
-1
TheAlgorithms/Python
7,387
[pre-commit.ci] pre-commit autoupdate
<!--pre-commit.ci start--> updates: - [github.com/asottile/pyupgrade: v3.0.0 → v3.1.0](https://github.com/asottile/pyupgrade/compare/v3.0.0...v3.1.0) - [github.com/codespell-project/codespell: v2.2.1 → v2.2.2](https://github.com/codespell-project/codespell/compare/v2.2.1...v2.2.2) <!--pre-commit.ci end-->
pre-commit-ci[bot]
2022-10-17T19:35:07Z
2022-10-17T19:59:26Z
a34b756fd40e5cdfb69abc06dcd42f5f1b5fa21e
0c7c5fa7b0161a7433467240155356c93ae106b8
[pre-commit.ci] pre-commit autoupdate. <!--pre-commit.ci start--> updates: - [github.com/asottile/pyupgrade: v3.0.0 → v3.1.0](https://github.com/asottile/pyupgrade/compare/v3.0.0...v3.1.0) - [github.com/codespell-project/codespell: v2.2.1 → v2.2.2](https://github.com/codespell-project/codespell/compare/v2.2.1...v2.2.2) <!--pre-commit.ci end-->
#!/usr/bin/env python3 import os from collections.abc import Iterator def good_file_paths(top_dir: str = ".") -> Iterator[str]: for dir_path, dir_names, filenames in os.walk(top_dir): dir_names[:] = [d for d in dir_names if d != "scripts" and d[0] not in "._"] for filename in filenames: if filename == "__init__.py": continue if os.path.splitext(filename)[1] in (".py", ".ipynb"): yield os.path.join(dir_path, filename).lstrip("./") def md_prefix(i): return f"{i * ' '}*" if i else "\n##" def print_path(old_path: str, new_path: str) -> str: old_parts = old_path.split(os.sep) for i, new_part in enumerate(new_path.split(os.sep)): if i + 1 > len(old_parts) or old_parts[i] != new_part: if new_part: print(f"{md_prefix(i)} {new_part.replace('_', ' ').title()}") return new_path def print_directory_md(top_dir: str = ".") -> None: old_path = "" for filepath in sorted(good_file_paths(top_dir)): filepath, filename = os.path.split(filepath) if filepath != old_path: old_path = print_path(old_path, filepath) indent = (filepath.count(os.sep) + 1) if filepath else 0 url = "/".join((filepath, filename)).replace(" ", "%20") filename = os.path.splitext(filename.replace("_", " ").title())[0] print(f"{md_prefix(indent)} [{filename}]({url})") if __name__ == "__main__": print_directory_md(".")
#!/usr/bin/env python3 import os from collections.abc import Iterator def good_file_paths(top_dir: str = ".") -> Iterator[str]: for dir_path, dir_names, filenames in os.walk(top_dir): dir_names[:] = [d for d in dir_names if d != "scripts" and d[0] not in "._"] for filename in filenames: if filename == "__init__.py": continue if os.path.splitext(filename)[1] in (".py", ".ipynb"): yield os.path.join(dir_path, filename).lstrip("./") def md_prefix(i): return f"{i * ' '}*" if i else "\n##" def print_path(old_path: str, new_path: str) -> str: old_parts = old_path.split(os.sep) for i, new_part in enumerate(new_path.split(os.sep)): if i + 1 > len(old_parts) or old_parts[i] != new_part: if new_part: print(f"{md_prefix(i)} {new_part.replace('_', ' ').title()}") return new_path def print_directory_md(top_dir: str = ".") -> None: old_path = "" for filepath in sorted(good_file_paths(top_dir)): filepath, filename = os.path.split(filepath) if filepath != old_path: old_path = print_path(old_path, filepath) indent = (filepath.count(os.sep) + 1) if filepath else 0 url = "/".join((filepath, filename)).replace(" ", "%20") filename = os.path.splitext(filename.replace("_", " ").title())[0] print(f"{md_prefix(indent)} [{filename}]({url})") if __name__ == "__main__": print_directory_md(".")
-1
TheAlgorithms/Python
7,387
[pre-commit.ci] pre-commit autoupdate
<!--pre-commit.ci start--> updates: - [github.com/asottile/pyupgrade: v3.0.0 → v3.1.0](https://github.com/asottile/pyupgrade/compare/v3.0.0...v3.1.0) - [github.com/codespell-project/codespell: v2.2.1 → v2.2.2](https://github.com/codespell-project/codespell/compare/v2.2.1...v2.2.2) <!--pre-commit.ci end-->
pre-commit-ci[bot]
2022-10-17T19:35:07Z
2022-10-17T19:59:26Z
a34b756fd40e5cdfb69abc06dcd42f5f1b5fa21e
0c7c5fa7b0161a7433467240155356c93ae106b8
[pre-commit.ci] pre-commit autoupdate. <!--pre-commit.ci start--> updates: - [github.com/asottile/pyupgrade: v3.0.0 → v3.1.0](https://github.com/asottile/pyupgrade/compare/v3.0.0...v3.1.0) - [github.com/codespell-project/codespell: v2.2.1 → v2.2.2](https://github.com/codespell-project/codespell/compare/v2.2.1...v2.2.2) <!--pre-commit.ci end-->
# floyd_warshall.py """ The problem is to find the shortest distance between all pairs of vertices in a weighted directed graph that can have negative edge weights. """ def _print_dist(dist, v): print("\nThe shortest path matrix using Floyd Warshall algorithm\n") for i in range(v): for j in range(v): if dist[i][j] != float("inf"): print(int(dist[i][j]), end="\t") else: print("INF", end="\t") print() def floyd_warshall(graph, v): """ :param graph: 2D array calculated from weight[edge[i, j]] :type graph: List[List[float]] :param v: number of vertices :type v: int :return: shortest distance between all vertex pairs distance[u][v] will contain the shortest distance from vertex u to v. 1. For all edges from v to n, distance[i][j] = weight(edge(i, j)). 3. The algorithm then performs distance[i][j] = min(distance[i][j], distance[i][k] + distance[k][j]) for each possible pair i, j of vertices. 4. The above is repeated for each vertex k in the graph. 5. Whenever distance[i][j] is given a new minimum value, next vertex[i][j] is updated to the next vertex[i][k]. """ dist = [[float("inf") for _ in range(v)] for _ in range(v)] for i in range(v): for j in range(v): dist[i][j] = graph[i][j] # check vertex k against all other vertices (i, j) for k in range(v): # looping through rows of graph array for i in range(v): # looping through columns of graph array for j in range(v): if ( dist[i][k] != float("inf") and dist[k][j] != float("inf") and dist[i][k] + dist[k][j] < dist[i][j] ): dist[i][j] = dist[i][k] + dist[k][j] _print_dist(dist, v) return dist, v if __name__ == "__main__": v = int(input("Enter number of vertices: ")) e = int(input("Enter number of edges: ")) graph = [[float("inf") for i in range(v)] for j in range(v)] for i in range(v): graph[i][i] = 0.0 # src and dst are indices that must be within the array size graph[e][v] # failure to follow this will result in an error for i in range(e): print("\nEdge ", i + 1) src = int(input("Enter source:")) dst = int(input("Enter destination:")) weight = float(input("Enter weight:")) graph[src][dst] = weight floyd_warshall(graph, v) # Example Input # Enter number of vertices: 3 # Enter number of edges: 2 # # generated graph from vertex and edge inputs # [[inf, inf, inf], [inf, inf, inf], [inf, inf, inf]] # [[0.0, inf, inf], [inf, 0.0, inf], [inf, inf, 0.0]] # specify source, destination and weight for edge #1 # Edge 1 # Enter source:1 # Enter destination:2 # Enter weight:2 # specify source, destination and weight for edge #2 # Edge 2 # Enter source:2 # Enter destination:1 # Enter weight:1 # # Expected Output from the vertice, edge and src, dst, weight inputs!! # 0 INF INF # INF 0 2 # INF 1 0
# floyd_warshall.py """ The problem is to find the shortest distance between all pairs of vertices in a weighted directed graph that can have negative edge weights. """ def _print_dist(dist, v): print("\nThe shortest path matrix using Floyd Warshall algorithm\n") for i in range(v): for j in range(v): if dist[i][j] != float("inf"): print(int(dist[i][j]), end="\t") else: print("INF", end="\t") print() def floyd_warshall(graph, v): """ :param graph: 2D array calculated from weight[edge[i, j]] :type graph: List[List[float]] :param v: number of vertices :type v: int :return: shortest distance between all vertex pairs distance[u][v] will contain the shortest distance from vertex u to v. 1. For all edges from v to n, distance[i][j] = weight(edge(i, j)). 3. The algorithm then performs distance[i][j] = min(distance[i][j], distance[i][k] + distance[k][j]) for each possible pair i, j of vertices. 4. The above is repeated for each vertex k in the graph. 5. Whenever distance[i][j] is given a new minimum value, next vertex[i][j] is updated to the next vertex[i][k]. """ dist = [[float("inf") for _ in range(v)] for _ in range(v)] for i in range(v): for j in range(v): dist[i][j] = graph[i][j] # check vertex k against all other vertices (i, j) for k in range(v): # looping through rows of graph array for i in range(v): # looping through columns of graph array for j in range(v): if ( dist[i][k] != float("inf") and dist[k][j] != float("inf") and dist[i][k] + dist[k][j] < dist[i][j] ): dist[i][j] = dist[i][k] + dist[k][j] _print_dist(dist, v) return dist, v if __name__ == "__main__": v = int(input("Enter number of vertices: ")) e = int(input("Enter number of edges: ")) graph = [[float("inf") for i in range(v)] for j in range(v)] for i in range(v): graph[i][i] = 0.0 # src and dst are indices that must be within the array size graph[e][v] # failure to follow this will result in an error for i in range(e): print("\nEdge ", i + 1) src = int(input("Enter source:")) dst = int(input("Enter destination:")) weight = float(input("Enter weight:")) graph[src][dst] = weight floyd_warshall(graph, v) # Example Input # Enter number of vertices: 3 # Enter number of edges: 2 # # generated graph from vertex and edge inputs # [[inf, inf, inf], [inf, inf, inf], [inf, inf, inf]] # [[0.0, inf, inf], [inf, 0.0, inf], [inf, inf, 0.0]] # specify source, destination and weight for edge #1 # Edge 1 # Enter source:1 # Enter destination:2 # Enter weight:2 # specify source, destination and weight for edge #2 # Edge 2 # Enter source:2 # Enter destination:1 # Enter weight:1 # # Expected Output from the vertice, edge and src, dst, weight inputs!! # 0 INF INF # INF 0 2 # INF 1 0
-1
TheAlgorithms/Python
7,387
[pre-commit.ci] pre-commit autoupdate
<!--pre-commit.ci start--> updates: - [github.com/asottile/pyupgrade: v3.0.0 → v3.1.0](https://github.com/asottile/pyupgrade/compare/v3.0.0...v3.1.0) - [github.com/codespell-project/codespell: v2.2.1 → v2.2.2](https://github.com/codespell-project/codespell/compare/v2.2.1...v2.2.2) <!--pre-commit.ci end-->
pre-commit-ci[bot]
2022-10-17T19:35:07Z
2022-10-17T19:59:26Z
a34b756fd40e5cdfb69abc06dcd42f5f1b5fa21e
0c7c5fa7b0161a7433467240155356c93ae106b8
[pre-commit.ci] pre-commit autoupdate. <!--pre-commit.ci start--> updates: - [github.com/asottile/pyupgrade: v3.0.0 → v3.1.0](https://github.com/asottile/pyupgrade/compare/v3.0.0...v3.1.0) - [github.com/codespell-project/codespell: v2.2.1 → v2.2.2](https://github.com/codespell-project/codespell/compare/v2.2.1...v2.2.2) <!--pre-commit.ci end-->
""" This is a python3 implementation of binary search tree using recursion To run tests: python -m unittest binary_search_tree_recursive.py To run an example: python binary_search_tree_recursive.py """ from __future__ import annotations import unittest from collections.abc import Iterator class Node: def __init__(self, label: int, parent: Node | None) -> None: self.label = label self.parent = parent self.left: Node | None = None self.right: Node | None = None class BinarySearchTree: def __init__(self) -> None: self.root: Node | None = None def empty(self) -> None: """ Empties the tree >>> t = BinarySearchTree() >>> assert t.root is None >>> t.put(8) >>> assert t.root is not None """ self.root = None def is_empty(self) -> bool: """ Checks if the tree is empty >>> t = BinarySearchTree() >>> t.is_empty() True >>> t.put(8) >>> t.is_empty() False """ return self.root is None def put(self, label: int) -> None: """ Put a new node in the tree >>> t = BinarySearchTree() >>> t.put(8) >>> assert t.root.parent is None >>> assert t.root.label == 8 >>> t.put(10) >>> assert t.root.right.parent == t.root >>> assert t.root.right.label == 10 >>> t.put(3) >>> assert t.root.left.parent == t.root >>> assert t.root.left.label == 3 """ self.root = self._put(self.root, label) def _put(self, node: Node | None, label: int, parent: Node | None = None) -> Node: if node is None: node = Node(label, parent) else: if label < node.label: node.left = self._put(node.left, label, node) elif label > node.label: node.right = self._put(node.right, label, node) else: raise Exception(f"Node with label {label} already exists") return node def search(self, label: int) -> Node: """ Searches a node in the tree >>> t = BinarySearchTree() >>> t.put(8) >>> t.put(10) >>> node = t.search(8) >>> assert node.label == 8 >>> node = t.search(3) Traceback (most recent call last): ... Exception: Node with label 3 does not exist """ return self._search(self.root, label) def _search(self, node: Node | None, label: int) -> Node: if node is None: raise Exception(f"Node with label {label} does not exist") else: if label < node.label: node = self._search(node.left, label) elif label > node.label: node = self._search(node.right, label) return node def remove(self, label: int) -> None: """ Removes a node in the tree >>> t = BinarySearchTree() >>> t.put(8) >>> t.put(10) >>> t.remove(8) >>> assert t.root.label == 10 >>> t.remove(3) Traceback (most recent call last): ... Exception: Node with label 3 does not exist """ node = self.search(label) if node.right and node.left: lowest_node = self._get_lowest_node(node.right) lowest_node.left = node.left lowest_node.right = node.right node.left.parent = lowest_node if node.right: node.right.parent = lowest_node self._reassign_nodes(node, lowest_node) elif not node.right and node.left: self._reassign_nodes(node, node.left) elif node.right and not node.left: self._reassign_nodes(node, node.right) else: self._reassign_nodes(node, None) def _reassign_nodes(self, node: Node, new_children: Node | None) -> None: if new_children: new_children.parent = node.parent if node.parent: if node.parent.right == node: node.parent.right = new_children else: node.parent.left = new_children else: self.root = new_children def _get_lowest_node(self, node: Node) -> Node: if node.left: lowest_node = self._get_lowest_node(node.left) else: lowest_node = node self._reassign_nodes(node, node.right) return lowest_node def exists(self, label: int) -> bool: """ Checks if a node exists in the tree >>> t = BinarySearchTree() >>> t.put(8) >>> t.put(10) >>> t.exists(8) True >>> t.exists(3) False """ try: self.search(label) return True except Exception: return False def get_max_label(self) -> int: """ Gets the max label inserted in the tree >>> t = BinarySearchTree() >>> t.get_max_label() Traceback (most recent call last): ... Exception: Binary search tree is empty >>> t.put(8) >>> t.put(10) >>> t.get_max_label() 10 """ if self.root is None: raise Exception("Binary search tree is empty") node = self.root while node.right is not None: node = node.right return node.label def get_min_label(self) -> int: """ Gets the min label inserted in the tree >>> t = BinarySearchTree() >>> t.get_min_label() Traceback (most recent call last): ... Exception: Binary search tree is empty >>> t.put(8) >>> t.put(10) >>> t.get_min_label() 8 """ if self.root is None: raise Exception("Binary search tree is empty") node = self.root while node.left is not None: node = node.left return node.label def inorder_traversal(self) -> Iterator[Node]: """ Return the inorder traversal of the tree >>> t = BinarySearchTree() >>> [i.label for i in t.inorder_traversal()] [] >>> t.put(8) >>> t.put(10) >>> t.put(9) >>> [i.label for i in t.inorder_traversal()] [8, 9, 10] """ return self._inorder_traversal(self.root) def _inorder_traversal(self, node: Node | None) -> Iterator[Node]: if node is not None: yield from self._inorder_traversal(node.left) yield node yield from self._inorder_traversal(node.right) def preorder_traversal(self) -> Iterator[Node]: """ Return the preorder traversal of the tree >>> t = BinarySearchTree() >>> [i.label for i in t.preorder_traversal()] [] >>> t.put(8) >>> t.put(10) >>> t.put(9) >>> [i.label for i in t.preorder_traversal()] [8, 10, 9] """ return self._preorder_traversal(self.root) def _preorder_traversal(self, node: Node | None) -> Iterator[Node]: if node is not None: yield node yield from self._preorder_traversal(node.left) yield from self._preorder_traversal(node.right) class BinarySearchTreeTest(unittest.TestCase): @staticmethod def _get_binary_search_tree() -> BinarySearchTree: r""" 8 / \ 3 10 / \ \ 1 6 14 / \ / 4 7 13 \ 5 """ t = BinarySearchTree() t.put(8) t.put(3) t.put(6) t.put(1) t.put(10) t.put(14) t.put(13) t.put(4) t.put(7) t.put(5) return t def test_put(self) -> None: t = BinarySearchTree() assert t.is_empty() t.put(8) r""" 8 """ assert t.root is not None assert t.root.parent is None assert t.root.label == 8 t.put(10) r""" 8 \ 10 """ assert t.root.right is not None assert t.root.right.parent == t.root assert t.root.right.label == 10 t.put(3) r""" 8 / \ 3 10 """ assert t.root.left is not None assert t.root.left.parent == t.root assert t.root.left.label == 3 t.put(6) r""" 8 / \ 3 10 \ 6 """ assert t.root.left.right is not None assert t.root.left.right.parent == t.root.left assert t.root.left.right.label == 6 t.put(1) r""" 8 / \ 3 10 / \ 1 6 """ assert t.root.left.left is not None assert t.root.left.left.parent == t.root.left assert t.root.left.left.label == 1 with self.assertRaises(Exception): # noqa: B017 t.put(1) def test_search(self) -> None: t = self._get_binary_search_tree() node = t.search(6) assert node.label == 6 node = t.search(13) assert node.label == 13 with self.assertRaises(Exception): # noqa: B017 t.search(2) def test_remove(self) -> None: t = self._get_binary_search_tree() t.remove(13) r""" 8 / \ 3 10 / \ \ 1 6 14 / \ 4 7 \ 5 """ assert t.root is not None assert t.root.right is not None assert t.root.right.right is not None assert t.root.right.right.right is None assert t.root.right.right.left is None t.remove(7) r""" 8 / \ 3 10 / \ \ 1 6 14 / 4 \ 5 """ assert t.root.left is not None assert t.root.left.right is not None assert t.root.left.right.left is not None assert t.root.left.right.right is None assert t.root.left.right.left.label == 4 t.remove(6) r""" 8 / \ 3 10 / \ \ 1 4 14 \ 5 """ assert t.root.left.left is not None assert t.root.left.right.right is not None assert t.root.left.left.label == 1 assert t.root.left.right.label == 4 assert t.root.left.right.right.label == 5 assert t.root.left.right.left is None assert t.root.left.left.parent == t.root.left assert t.root.left.right.parent == t.root.left t.remove(3) r""" 8 / \ 4 10 / \ \ 1 5 14 """ assert t.root is not None assert t.root.left.label == 4 assert t.root.left.right.label == 5 assert t.root.left.left.label == 1 assert t.root.left.parent == t.root assert t.root.left.left.parent == t.root.left assert t.root.left.right.parent == t.root.left t.remove(4) r""" 8 / \ 5 10 / \ 1 14 """ assert t.root.left is not None assert t.root.left.left is not None assert t.root.left.label == 5 assert t.root.left.right is None assert t.root.left.left.label == 1 assert t.root.left.parent == t.root assert t.root.left.left.parent == t.root.left def test_remove_2(self) -> None: t = self._get_binary_search_tree() t.remove(3) r""" 8 / \ 4 10 / \ \ 1 6 14 / \ / 5 7 13 """ assert t.root is not None assert t.root.left is not None assert t.root.left.left is not None assert t.root.left.right is not None assert t.root.left.right.left is not None assert t.root.left.right.right is not None assert t.root.left.label == 4 assert t.root.left.right.label == 6 assert t.root.left.left.label == 1 assert t.root.left.right.right.label == 7 assert t.root.left.right.left.label == 5 assert t.root.left.parent == t.root assert t.root.left.right.parent == t.root.left assert t.root.left.left.parent == t.root.left assert t.root.left.right.left.parent == t.root.left.right def test_empty(self) -> None: t = self._get_binary_search_tree() t.empty() assert t.root is None def test_is_empty(self) -> None: t = self._get_binary_search_tree() assert not t.is_empty() t.empty() assert t.is_empty() def test_exists(self) -> None: t = self._get_binary_search_tree() assert t.exists(6) assert not t.exists(-1) def test_get_max_label(self) -> None: t = self._get_binary_search_tree() assert t.get_max_label() == 14 t.empty() with self.assertRaises(Exception): # noqa: B017 t.get_max_label() def test_get_min_label(self) -> None: t = self._get_binary_search_tree() assert t.get_min_label() == 1 t.empty() with self.assertRaises(Exception): # noqa: B017 t.get_min_label() def test_inorder_traversal(self) -> None: t = self._get_binary_search_tree() inorder_traversal_nodes = [i.label for i in t.inorder_traversal()] assert inorder_traversal_nodes == [1, 3, 4, 5, 6, 7, 8, 10, 13, 14] def test_preorder_traversal(self) -> None: t = self._get_binary_search_tree() preorder_traversal_nodes = [i.label for i in t.preorder_traversal()] assert preorder_traversal_nodes == [8, 3, 1, 6, 4, 5, 7, 10, 14, 13] def binary_search_tree_example() -> None: r""" Example 8 / \ 3 10 / \ \ 1 6 14 / \ / 4 7 13 \ 5 Example After Deletion 4 / \ 1 7 \ 5 """ t = BinarySearchTree() t.put(8) t.put(3) t.put(6) t.put(1) t.put(10) t.put(14) t.put(13) t.put(4) t.put(7) t.put(5) print( """ 8 / \\ 3 10 / \\ \\ 1 6 14 / \\ / 4 7 13 \\ 5 """ ) print("Label 6 exists:", t.exists(6)) print("Label 13 exists:", t.exists(13)) print("Label -1 exists:", t.exists(-1)) print("Label 12 exists:", t.exists(12)) # Prints all the elements of the list in inorder traversal inorder_traversal_nodes = [i.label for i in t.inorder_traversal()] print("Inorder traversal:", inorder_traversal_nodes) # Prints all the elements of the list in preorder traversal preorder_traversal_nodes = [i.label for i in t.preorder_traversal()] print("Preorder traversal:", preorder_traversal_nodes) print("Max. label:", t.get_max_label()) print("Min. label:", t.get_min_label()) # Delete elements print("\nDeleting elements 13, 10, 8, 3, 6, 14") print( """ 4 / \\ 1 7 \\ 5 """ ) t.remove(13) t.remove(10) t.remove(8) t.remove(3) t.remove(6) t.remove(14) # Prints all the elements of the list in inorder traversal after delete inorder_traversal_nodes = [i.label for i in t.inorder_traversal()] print("Inorder traversal after delete:", inorder_traversal_nodes) # Prints all the elements of the list in preorder traversal after delete preorder_traversal_nodes = [i.label for i in t.preorder_traversal()] print("Preorder traversal after delete:", preorder_traversal_nodes) print("Max. label:", t.get_max_label()) print("Min. label:", t.get_min_label()) if __name__ == "__main__": binary_search_tree_example()
""" This is a python3 implementation of binary search tree using recursion To run tests: python -m unittest binary_search_tree_recursive.py To run an example: python binary_search_tree_recursive.py """ from __future__ import annotations import unittest from collections.abc import Iterator class Node: def __init__(self, label: int, parent: Node | None) -> None: self.label = label self.parent = parent self.left: Node | None = None self.right: Node | None = None class BinarySearchTree: def __init__(self) -> None: self.root: Node | None = None def empty(self) -> None: """ Empties the tree >>> t = BinarySearchTree() >>> assert t.root is None >>> t.put(8) >>> assert t.root is not None """ self.root = None def is_empty(self) -> bool: """ Checks if the tree is empty >>> t = BinarySearchTree() >>> t.is_empty() True >>> t.put(8) >>> t.is_empty() False """ return self.root is None def put(self, label: int) -> None: """ Put a new node in the tree >>> t = BinarySearchTree() >>> t.put(8) >>> assert t.root.parent is None >>> assert t.root.label == 8 >>> t.put(10) >>> assert t.root.right.parent == t.root >>> assert t.root.right.label == 10 >>> t.put(3) >>> assert t.root.left.parent == t.root >>> assert t.root.left.label == 3 """ self.root = self._put(self.root, label) def _put(self, node: Node | None, label: int, parent: Node | None = None) -> Node: if node is None: node = Node(label, parent) else: if label < node.label: node.left = self._put(node.left, label, node) elif label > node.label: node.right = self._put(node.right, label, node) else: raise Exception(f"Node with label {label} already exists") return node def search(self, label: int) -> Node: """ Searches a node in the tree >>> t = BinarySearchTree() >>> t.put(8) >>> t.put(10) >>> node = t.search(8) >>> assert node.label == 8 >>> node = t.search(3) Traceback (most recent call last): ... Exception: Node with label 3 does not exist """ return self._search(self.root, label) def _search(self, node: Node | None, label: int) -> Node: if node is None: raise Exception(f"Node with label {label} does not exist") else: if label < node.label: node = self._search(node.left, label) elif label > node.label: node = self._search(node.right, label) return node def remove(self, label: int) -> None: """ Removes a node in the tree >>> t = BinarySearchTree() >>> t.put(8) >>> t.put(10) >>> t.remove(8) >>> assert t.root.label == 10 >>> t.remove(3) Traceback (most recent call last): ... Exception: Node with label 3 does not exist """ node = self.search(label) if node.right and node.left: lowest_node = self._get_lowest_node(node.right) lowest_node.left = node.left lowest_node.right = node.right node.left.parent = lowest_node if node.right: node.right.parent = lowest_node self._reassign_nodes(node, lowest_node) elif not node.right and node.left: self._reassign_nodes(node, node.left) elif node.right and not node.left: self._reassign_nodes(node, node.right) else: self._reassign_nodes(node, None) def _reassign_nodes(self, node: Node, new_children: Node | None) -> None: if new_children: new_children.parent = node.parent if node.parent: if node.parent.right == node: node.parent.right = new_children else: node.parent.left = new_children else: self.root = new_children def _get_lowest_node(self, node: Node) -> Node: if node.left: lowest_node = self._get_lowest_node(node.left) else: lowest_node = node self._reassign_nodes(node, node.right) return lowest_node def exists(self, label: int) -> bool: """ Checks if a node exists in the tree >>> t = BinarySearchTree() >>> t.put(8) >>> t.put(10) >>> t.exists(8) True >>> t.exists(3) False """ try: self.search(label) return True except Exception: return False def get_max_label(self) -> int: """ Gets the max label inserted in the tree >>> t = BinarySearchTree() >>> t.get_max_label() Traceback (most recent call last): ... Exception: Binary search tree is empty >>> t.put(8) >>> t.put(10) >>> t.get_max_label() 10 """ if self.root is None: raise Exception("Binary search tree is empty") node = self.root while node.right is not None: node = node.right return node.label def get_min_label(self) -> int: """ Gets the min label inserted in the tree >>> t = BinarySearchTree() >>> t.get_min_label() Traceback (most recent call last): ... Exception: Binary search tree is empty >>> t.put(8) >>> t.put(10) >>> t.get_min_label() 8 """ if self.root is None: raise Exception("Binary search tree is empty") node = self.root while node.left is not None: node = node.left return node.label def inorder_traversal(self) -> Iterator[Node]: """ Return the inorder traversal of the tree >>> t = BinarySearchTree() >>> [i.label for i in t.inorder_traversal()] [] >>> t.put(8) >>> t.put(10) >>> t.put(9) >>> [i.label for i in t.inorder_traversal()] [8, 9, 10] """ return self._inorder_traversal(self.root) def _inorder_traversal(self, node: Node | None) -> Iterator[Node]: if node is not None: yield from self._inorder_traversal(node.left) yield node yield from self._inorder_traversal(node.right) def preorder_traversal(self) -> Iterator[Node]: """ Return the preorder traversal of the tree >>> t = BinarySearchTree() >>> [i.label for i in t.preorder_traversal()] [] >>> t.put(8) >>> t.put(10) >>> t.put(9) >>> [i.label for i in t.preorder_traversal()] [8, 10, 9] """ return self._preorder_traversal(self.root) def _preorder_traversal(self, node: Node | None) -> Iterator[Node]: if node is not None: yield node yield from self._preorder_traversal(node.left) yield from self._preorder_traversal(node.right) class BinarySearchTreeTest(unittest.TestCase): @staticmethod def _get_binary_search_tree() -> BinarySearchTree: r""" 8 / \ 3 10 / \ \ 1 6 14 / \ / 4 7 13 \ 5 """ t = BinarySearchTree() t.put(8) t.put(3) t.put(6) t.put(1) t.put(10) t.put(14) t.put(13) t.put(4) t.put(7) t.put(5) return t def test_put(self) -> None: t = BinarySearchTree() assert t.is_empty() t.put(8) r""" 8 """ assert t.root is not None assert t.root.parent is None assert t.root.label == 8 t.put(10) r""" 8 \ 10 """ assert t.root.right is not None assert t.root.right.parent == t.root assert t.root.right.label == 10 t.put(3) r""" 8 / \ 3 10 """ assert t.root.left is not None assert t.root.left.parent == t.root assert t.root.left.label == 3 t.put(6) r""" 8 / \ 3 10 \ 6 """ assert t.root.left.right is not None assert t.root.left.right.parent == t.root.left assert t.root.left.right.label == 6 t.put(1) r""" 8 / \ 3 10 / \ 1 6 """ assert t.root.left.left is not None assert t.root.left.left.parent == t.root.left assert t.root.left.left.label == 1 with self.assertRaises(Exception): # noqa: B017 t.put(1) def test_search(self) -> None: t = self._get_binary_search_tree() node = t.search(6) assert node.label == 6 node = t.search(13) assert node.label == 13 with self.assertRaises(Exception): # noqa: B017 t.search(2) def test_remove(self) -> None: t = self._get_binary_search_tree() t.remove(13) r""" 8 / \ 3 10 / \ \ 1 6 14 / \ 4 7 \ 5 """ assert t.root is not None assert t.root.right is not None assert t.root.right.right is not None assert t.root.right.right.right is None assert t.root.right.right.left is None t.remove(7) r""" 8 / \ 3 10 / \ \ 1 6 14 / 4 \ 5 """ assert t.root.left is not None assert t.root.left.right is not None assert t.root.left.right.left is not None assert t.root.left.right.right is None assert t.root.left.right.left.label == 4 t.remove(6) r""" 8 / \ 3 10 / \ \ 1 4 14 \ 5 """ assert t.root.left.left is not None assert t.root.left.right.right is not None assert t.root.left.left.label == 1 assert t.root.left.right.label == 4 assert t.root.left.right.right.label == 5 assert t.root.left.right.left is None assert t.root.left.left.parent == t.root.left assert t.root.left.right.parent == t.root.left t.remove(3) r""" 8 / \ 4 10 / \ \ 1 5 14 """ assert t.root is not None assert t.root.left.label == 4 assert t.root.left.right.label == 5 assert t.root.left.left.label == 1 assert t.root.left.parent == t.root assert t.root.left.left.parent == t.root.left assert t.root.left.right.parent == t.root.left t.remove(4) r""" 8 / \ 5 10 / \ 1 14 """ assert t.root.left is not None assert t.root.left.left is not None assert t.root.left.label == 5 assert t.root.left.right is None assert t.root.left.left.label == 1 assert t.root.left.parent == t.root assert t.root.left.left.parent == t.root.left def test_remove_2(self) -> None: t = self._get_binary_search_tree() t.remove(3) r""" 8 / \ 4 10 / \ \ 1 6 14 / \ / 5 7 13 """ assert t.root is not None assert t.root.left is not None assert t.root.left.left is not None assert t.root.left.right is not None assert t.root.left.right.left is not None assert t.root.left.right.right is not None assert t.root.left.label == 4 assert t.root.left.right.label == 6 assert t.root.left.left.label == 1 assert t.root.left.right.right.label == 7 assert t.root.left.right.left.label == 5 assert t.root.left.parent == t.root assert t.root.left.right.parent == t.root.left assert t.root.left.left.parent == t.root.left assert t.root.left.right.left.parent == t.root.left.right def test_empty(self) -> None: t = self._get_binary_search_tree() t.empty() assert t.root is None def test_is_empty(self) -> None: t = self._get_binary_search_tree() assert not t.is_empty() t.empty() assert t.is_empty() def test_exists(self) -> None: t = self._get_binary_search_tree() assert t.exists(6) assert not t.exists(-1) def test_get_max_label(self) -> None: t = self._get_binary_search_tree() assert t.get_max_label() == 14 t.empty() with self.assertRaises(Exception): # noqa: B017 t.get_max_label() def test_get_min_label(self) -> None: t = self._get_binary_search_tree() assert t.get_min_label() == 1 t.empty() with self.assertRaises(Exception): # noqa: B017 t.get_min_label() def test_inorder_traversal(self) -> None: t = self._get_binary_search_tree() inorder_traversal_nodes = [i.label for i in t.inorder_traversal()] assert inorder_traversal_nodes == [1, 3, 4, 5, 6, 7, 8, 10, 13, 14] def test_preorder_traversal(self) -> None: t = self._get_binary_search_tree() preorder_traversal_nodes = [i.label for i in t.preorder_traversal()] assert preorder_traversal_nodes == [8, 3, 1, 6, 4, 5, 7, 10, 14, 13] def binary_search_tree_example() -> None: r""" Example 8 / \ 3 10 / \ \ 1 6 14 / \ / 4 7 13 \ 5 Example After Deletion 4 / \ 1 7 \ 5 """ t = BinarySearchTree() t.put(8) t.put(3) t.put(6) t.put(1) t.put(10) t.put(14) t.put(13) t.put(4) t.put(7) t.put(5) print( """ 8 / \\ 3 10 / \\ \\ 1 6 14 / \\ / 4 7 13 \\ 5 """ ) print("Label 6 exists:", t.exists(6)) print("Label 13 exists:", t.exists(13)) print("Label -1 exists:", t.exists(-1)) print("Label 12 exists:", t.exists(12)) # Prints all the elements of the list in inorder traversal inorder_traversal_nodes = [i.label for i in t.inorder_traversal()] print("Inorder traversal:", inorder_traversal_nodes) # Prints all the elements of the list in preorder traversal preorder_traversal_nodes = [i.label for i in t.preorder_traversal()] print("Preorder traversal:", preorder_traversal_nodes) print("Max. label:", t.get_max_label()) print("Min. label:", t.get_min_label()) # Delete elements print("\nDeleting elements 13, 10, 8, 3, 6, 14") print( """ 4 / \\ 1 7 \\ 5 """ ) t.remove(13) t.remove(10) t.remove(8) t.remove(3) t.remove(6) t.remove(14) # Prints all the elements of the list in inorder traversal after delete inorder_traversal_nodes = [i.label for i in t.inorder_traversal()] print("Inorder traversal after delete:", inorder_traversal_nodes) # Prints all the elements of the list in preorder traversal after delete preorder_traversal_nodes = [i.label for i in t.preorder_traversal()] print("Preorder traversal after delete:", preorder_traversal_nodes) print("Max. label:", t.get_max_label()) print("Min. label:", t.get_min_label()) if __name__ == "__main__": binary_search_tree_example()
-1
TheAlgorithms/Python
7,387
[pre-commit.ci] pre-commit autoupdate
<!--pre-commit.ci start--> updates: - [github.com/asottile/pyupgrade: v3.0.0 → v3.1.0](https://github.com/asottile/pyupgrade/compare/v3.0.0...v3.1.0) - [github.com/codespell-project/codespell: v2.2.1 → v2.2.2](https://github.com/codespell-project/codespell/compare/v2.2.1...v2.2.2) <!--pre-commit.ci end-->
pre-commit-ci[bot]
2022-10-17T19:35:07Z
2022-10-17T19:59:26Z
a34b756fd40e5cdfb69abc06dcd42f5f1b5fa21e
0c7c5fa7b0161a7433467240155356c93ae106b8
[pre-commit.ci] pre-commit autoupdate. <!--pre-commit.ci start--> updates: - [github.com/asottile/pyupgrade: v3.0.0 → v3.1.0](https://github.com/asottile/pyupgrade/compare/v3.0.0...v3.1.0) - [github.com/codespell-project/codespell: v2.2.1 → v2.2.2](https://github.com/codespell-project/codespell/compare/v2.2.1...v2.2.2) <!--pre-commit.ci end-->
-1
TheAlgorithms/Python
7,387
[pre-commit.ci] pre-commit autoupdate
<!--pre-commit.ci start--> updates: - [github.com/asottile/pyupgrade: v3.0.0 → v3.1.0](https://github.com/asottile/pyupgrade/compare/v3.0.0...v3.1.0) - [github.com/codespell-project/codespell: v2.2.1 → v2.2.2](https://github.com/codespell-project/codespell/compare/v2.2.1...v2.2.2) <!--pre-commit.ci end-->
pre-commit-ci[bot]
2022-10-17T19:35:07Z
2022-10-17T19:59:26Z
a34b756fd40e5cdfb69abc06dcd42f5f1b5fa21e
0c7c5fa7b0161a7433467240155356c93ae106b8
[pre-commit.ci] pre-commit autoupdate. <!--pre-commit.ci start--> updates: - [github.com/asottile/pyupgrade: v3.0.0 → v3.1.0](https://github.com/asottile/pyupgrade/compare/v3.0.0...v3.1.0) - [github.com/codespell-project/codespell: v2.2.1 → v2.2.2](https://github.com/codespell-project/codespell/compare/v2.2.1...v2.2.2) <!--pre-commit.ci end-->
from bisect import bisect from itertools import accumulate def frac_knapsack(vl, wt, w, n): """ >>> frac_knapsack([60, 100, 120], [10, 20, 30], 50, 3) 240.0 """ r = sorted(zip(vl, wt), key=lambda x: x[0] / x[1], reverse=True) vl, wt = [i[0] for i in r], [i[1] for i in r] acc = list(accumulate(wt)) k = bisect(acc, w) return ( 0 if k == 0 else sum(vl[:k]) + (w - acc[k - 1]) * (vl[k]) / (wt[k]) if k != n else sum(vl[:k]) ) if __name__ == "__main__": import doctest doctest.testmod()
from bisect import bisect from itertools import accumulate def frac_knapsack(vl, wt, w, n): """ >>> frac_knapsack([60, 100, 120], [10, 20, 30], 50, 3) 240.0 """ r = sorted(zip(vl, wt), key=lambda x: x[0] / x[1], reverse=True) vl, wt = [i[0] for i in r], [i[1] for i in r] acc = list(accumulate(wt)) k = bisect(acc, w) return ( 0 if k == 0 else sum(vl[:k]) + (w - acc[k - 1]) * (vl[k]) / (wt[k]) if k != n else sum(vl[:k]) ) if __name__ == "__main__": import doctest doctest.testmod()
-1
TheAlgorithms/Python
7,387
[pre-commit.ci] pre-commit autoupdate
<!--pre-commit.ci start--> updates: - [github.com/asottile/pyupgrade: v3.0.0 → v3.1.0](https://github.com/asottile/pyupgrade/compare/v3.0.0...v3.1.0) - [github.com/codespell-project/codespell: v2.2.1 → v2.2.2](https://github.com/codespell-project/codespell/compare/v2.2.1...v2.2.2) <!--pre-commit.ci end-->
pre-commit-ci[bot]
2022-10-17T19:35:07Z
2022-10-17T19:59:26Z
a34b756fd40e5cdfb69abc06dcd42f5f1b5fa21e
0c7c5fa7b0161a7433467240155356c93ae106b8
[pre-commit.ci] pre-commit autoupdate. <!--pre-commit.ci start--> updates: - [github.com/asottile/pyupgrade: v3.0.0 → v3.1.0](https://github.com/asottile/pyupgrade/compare/v3.0.0...v3.1.0) - [github.com/codespell-project/codespell: v2.2.1 → v2.2.2](https://github.com/codespell-project/codespell/compare/v2.2.1...v2.2.2) <!--pre-commit.ci end-->
from __future__ import annotations from cmath import sqrt def quadratic_roots(a: int, b: int, c: int) -> tuple[complex, complex]: """ Given the numerical coefficients a, b and c, calculates the roots for any quadratic equation of the form ax^2 + bx + c >>> quadratic_roots(a=1, b=3, c=-4) (1.0, -4.0) >>> quadratic_roots(5, 6, 1) (-0.2, -1.0) >>> quadratic_roots(1, -6, 25) ((3+4j), (3-4j)) """ if a == 0: raise ValueError("Coefficient 'a' must not be zero.") delta = b * b - 4 * a * c root_1 = (-b + sqrt(delta)) / (2 * a) root_2 = (-b - sqrt(delta)) / (2 * a) return ( root_1.real if not root_1.imag else root_1, root_2.real if not root_2.imag else root_2, ) def main(): solution1, solution2 = quadratic_roots(a=5, b=6, c=1) print(f"The solutions are: {solution1} and {solution2}") if __name__ == "__main__": main()
from __future__ import annotations from cmath import sqrt def quadratic_roots(a: int, b: int, c: int) -> tuple[complex, complex]: """ Given the numerical coefficients a, b and c, calculates the roots for any quadratic equation of the form ax^2 + bx + c >>> quadratic_roots(a=1, b=3, c=-4) (1.0, -4.0) >>> quadratic_roots(5, 6, 1) (-0.2, -1.0) >>> quadratic_roots(1, -6, 25) ((3+4j), (3-4j)) """ if a == 0: raise ValueError("Coefficient 'a' must not be zero.") delta = b * b - 4 * a * c root_1 = (-b + sqrt(delta)) / (2 * a) root_2 = (-b - sqrt(delta)) / (2 * a) return ( root_1.real if not root_1.imag else root_1, root_2.real if not root_2.imag else root_2, ) def main(): solution1, solution2 = quadratic_roots(a=5, b=6, c=1) print(f"The solutions are: {solution1} and {solution2}") if __name__ == "__main__": main()
-1
TheAlgorithms/Python
7,387
[pre-commit.ci] pre-commit autoupdate
<!--pre-commit.ci start--> updates: - [github.com/asottile/pyupgrade: v3.0.0 → v3.1.0](https://github.com/asottile/pyupgrade/compare/v3.0.0...v3.1.0) - [github.com/codespell-project/codespell: v2.2.1 → v2.2.2](https://github.com/codespell-project/codespell/compare/v2.2.1...v2.2.2) <!--pre-commit.ci end-->
pre-commit-ci[bot]
2022-10-17T19:35:07Z
2022-10-17T19:59:26Z
a34b756fd40e5cdfb69abc06dcd42f5f1b5fa21e
0c7c5fa7b0161a7433467240155356c93ae106b8
[pre-commit.ci] pre-commit autoupdate. <!--pre-commit.ci start--> updates: - [github.com/asottile/pyupgrade: v3.0.0 → v3.1.0](https://github.com/asottile/pyupgrade/compare/v3.0.0...v3.1.0) - [github.com/codespell-project/codespell: v2.2.1 → v2.2.2](https://github.com/codespell-project/codespell/compare/v2.2.1...v2.2.2) <!--pre-commit.ci end-->
-1
TheAlgorithms/Python
7,387
[pre-commit.ci] pre-commit autoupdate
<!--pre-commit.ci start--> updates: - [github.com/asottile/pyupgrade: v3.0.0 → v3.1.0](https://github.com/asottile/pyupgrade/compare/v3.0.0...v3.1.0) - [github.com/codespell-project/codespell: v2.2.1 → v2.2.2](https://github.com/codespell-project/codespell/compare/v2.2.1...v2.2.2) <!--pre-commit.ci end-->
pre-commit-ci[bot]
2022-10-17T19:35:07Z
2022-10-17T19:59:26Z
a34b756fd40e5cdfb69abc06dcd42f5f1b5fa21e
0c7c5fa7b0161a7433467240155356c93ae106b8
[pre-commit.ci] pre-commit autoupdate. <!--pre-commit.ci start--> updates: - [github.com/asottile/pyupgrade: v3.0.0 → v3.1.0](https://github.com/asottile/pyupgrade/compare/v3.0.0...v3.1.0) - [github.com/codespell-project/codespell: v2.2.1 → v2.2.2](https://github.com/codespell-project/codespell/compare/v2.2.1...v2.2.2) <!--pre-commit.ci end-->
import numpy as np def qr_householder(a): """Return a QR-decomposition of the matrix A using Householder reflection. The QR-decomposition decomposes the matrix A of shape (m, n) into an orthogonal matrix Q of shape (m, m) and an upper triangular matrix R of shape (m, n). Note that the matrix A does not have to be square. This method of decomposing A uses the Householder reflection, which is numerically stable and of complexity O(n^3). https://en.wikipedia.org/wiki/QR_decomposition#Using_Householder_reflections Arguments: A -- a numpy.ndarray of shape (m, n) Note: several optimizations can be made for numeric efficiency, but this is intended to demonstrate how it would be represented in a mathematics textbook. In cases where efficiency is particularly important, an optimized version from BLAS should be used. >>> A = np.array([[12, -51, 4], [6, 167, -68], [-4, 24, -41]], dtype=float) >>> Q, R = qr_householder(A) >>> # check that the decomposition is correct >>> np.allclose(Q@R, A) True >>> # check that Q is orthogonal >>> np.allclose([email protected], np.eye(A.shape[0])) True >>> np.allclose(Q.T@Q, np.eye(A.shape[0])) True >>> # check that R is upper triangular >>> np.allclose(np.triu(R), R) True """ m, n = a.shape t = min(m, n) q = np.eye(m) r = a.copy() for k in range(t - 1): # select a column of modified matrix A': x = r[k:, [k]] # construct first basis vector e1 = np.zeros_like(x) e1[0] = 1.0 # determine scaling factor alpha = np.linalg.norm(x) # construct vector v for Householder reflection v = x + np.sign(x[0]) * alpha * e1 v /= np.linalg.norm(v) # construct the Householder matrix q_k = np.eye(m - k) - 2.0 * v @ v.T # pad with ones and zeros as necessary q_k = np.block([[np.eye(k), np.zeros((k, m - k))], [np.zeros((m - k, k)), q_k]]) q = q @ q_k.T r = q_k @ r return q, r if __name__ == "__main__": import doctest doctest.testmod()
import numpy as np def qr_householder(a): """Return a QR-decomposition of the matrix A using Householder reflection. The QR-decomposition decomposes the matrix A of shape (m, n) into an orthogonal matrix Q of shape (m, m) and an upper triangular matrix R of shape (m, n). Note that the matrix A does not have to be square. This method of decomposing A uses the Householder reflection, which is numerically stable and of complexity O(n^3). https://en.wikipedia.org/wiki/QR_decomposition#Using_Householder_reflections Arguments: A -- a numpy.ndarray of shape (m, n) Note: several optimizations can be made for numeric efficiency, but this is intended to demonstrate how it would be represented in a mathematics textbook. In cases where efficiency is particularly important, an optimized version from BLAS should be used. >>> A = np.array([[12, -51, 4], [6, 167, -68], [-4, 24, -41]], dtype=float) >>> Q, R = qr_householder(A) >>> # check that the decomposition is correct >>> np.allclose(Q@R, A) True >>> # check that Q is orthogonal >>> np.allclose([email protected], np.eye(A.shape[0])) True >>> np.allclose(Q.T@Q, np.eye(A.shape[0])) True >>> # check that R is upper triangular >>> np.allclose(np.triu(R), R) True """ m, n = a.shape t = min(m, n) q = np.eye(m) r = a.copy() for k in range(t - 1): # select a column of modified matrix A': x = r[k:, [k]] # construct first basis vector e1 = np.zeros_like(x) e1[0] = 1.0 # determine scaling factor alpha = np.linalg.norm(x) # construct vector v for Householder reflection v = x + np.sign(x[0]) * alpha * e1 v /= np.linalg.norm(v) # construct the Householder matrix q_k = np.eye(m - k) - 2.0 * v @ v.T # pad with ones and zeros as necessary q_k = np.block([[np.eye(k), np.zeros((k, m - k))], [np.zeros((m - k, k)), q_k]]) q = q @ q_k.T r = q_k @ r return q, r if __name__ == "__main__": import doctest doctest.testmod()
-1
TheAlgorithms/Python
7,387
[pre-commit.ci] pre-commit autoupdate
<!--pre-commit.ci start--> updates: - [github.com/asottile/pyupgrade: v3.0.0 → v3.1.0](https://github.com/asottile/pyupgrade/compare/v3.0.0...v3.1.0) - [github.com/codespell-project/codespell: v2.2.1 → v2.2.2](https://github.com/codespell-project/codespell/compare/v2.2.1...v2.2.2) <!--pre-commit.ci end-->
pre-commit-ci[bot]
2022-10-17T19:35:07Z
2022-10-17T19:59:26Z
a34b756fd40e5cdfb69abc06dcd42f5f1b5fa21e
0c7c5fa7b0161a7433467240155356c93ae106b8
[pre-commit.ci] pre-commit autoupdate. <!--pre-commit.ci start--> updates: - [github.com/asottile/pyupgrade: v3.0.0 → v3.1.0](https://github.com/asottile/pyupgrade/compare/v3.0.0...v3.1.0) - [github.com/codespell-project/codespell: v2.2.1 → v2.2.2](https://github.com/codespell-project/codespell/compare/v2.2.1...v2.2.2) <!--pre-commit.ci end-->
""" Wikipedia: https://en.wikipedia.org/wiki/Enigma_machine Video explanation: https://youtu.be/QwQVMqfoB2E Also check out Numberphile's and Computerphile's videos on this topic This module contains function 'enigma' which emulates the famous Enigma machine from WWII. Module includes: - enigma function - showcase of function usage - 9 randomly generated rotors - reflector (aka static rotor) - original alphabet Created by TrapinchO """ from __future__ import annotations RotorPositionT = tuple[int, int, int] RotorSelectionT = tuple[str, str, str] # used alphabet -------------------------- # from string.ascii_uppercase abc = "ABCDEFGHIJKLMNOPQRSTUVWXYZ" # -------------------------- default selection -------------------------- # rotors -------------------------- rotor1 = "EGZWVONAHDCLFQMSIPJBYUKXTR" rotor2 = "FOBHMDKEXQNRAULPGSJVTYICZW" rotor3 = "ZJXESIUQLHAVRMDOYGTNFWPBKC" # reflector -------------------------- reflector = { "A": "N", "N": "A", "B": "O", "O": "B", "C": "P", "P": "C", "D": "Q", "Q": "D", "E": "R", "R": "E", "F": "S", "S": "F", "G": "T", "T": "G", "H": "U", "U": "H", "I": "V", "V": "I", "J": "W", "W": "J", "K": "X", "X": "K", "L": "Y", "Y": "L", "M": "Z", "Z": "M", } # -------------------------- extra rotors -------------------------- rotor4 = "RMDJXFUWGISLHVTCQNKYPBEZOA" rotor5 = "SGLCPQWZHKXAREONTFBVIYJUDM" rotor6 = "HVSICLTYKQUBXDWAJZOMFGPREN" rotor7 = "RZWQHFMVDBKICJLNTUXAGYPSOE" rotor8 = "LFKIJODBEGAMQPXVUHYSTCZRWN" rotor9 = "KOAEGVDHXPQZMLFTYWJNBRCIUS" def _validator( rotpos: RotorPositionT, rotsel: RotorSelectionT, pb: str ) -> tuple[RotorPositionT, RotorSelectionT, dict[str, str]]: """ Checks if the values can be used for the 'enigma' function >>> _validator((1,1,1), (rotor1, rotor2, rotor3), 'POLAND') ((1, 1, 1), ('EGZWVONAHDCLFQMSIPJBYUKXTR', 'FOBHMDKEXQNRAULPGSJVTYICZW', \ 'ZJXESIUQLHAVRMDOYGTNFWPBKC'), \ {'P': 'O', 'O': 'P', 'L': 'A', 'A': 'L', 'N': 'D', 'D': 'N'}) :param rotpos: rotor_positon :param rotsel: rotor_selection :param pb: plugb -> validated and transformed :return: (rotpos, rotsel, pb) """ # Checks if there are 3 unique rotors unique_rotsel = len(set(rotsel)) if unique_rotsel < 3: raise Exception(f"Please use 3 unique rotors (not {unique_rotsel})") # Checks if rotor positions are valid rotorpos1, rotorpos2, rotorpos3 = rotpos if not 0 < rotorpos1 <= len(abc): raise ValueError( "First rotor position is not within range of 1..26 (" f"{rotorpos1}" ) if not 0 < rotorpos2 <= len(abc): raise ValueError( "Second rotor position is not within range of 1..26 (" f"{rotorpos2})" ) if not 0 < rotorpos3 <= len(abc): raise ValueError( "Third rotor position is not within range of 1..26 (" f"{rotorpos3})" ) # Validates string and returns dict pbdict = _plugboard(pb) return rotpos, rotsel, pbdict def _plugboard(pbstring: str) -> dict[str, str]: """ https://en.wikipedia.org/wiki/Enigma_machine#Plugboard >>> _plugboard('PICTURES') {'P': 'I', 'I': 'P', 'C': 'T', 'T': 'C', 'U': 'R', 'R': 'U', 'E': 'S', 'S': 'E'} >>> _plugboard('POLAND') {'P': 'O', 'O': 'P', 'L': 'A', 'A': 'L', 'N': 'D', 'D': 'N'} In the code, 'pb' stands for 'plugboard' Pairs can be separated by spaces :param pbstring: string containing plugboard setting for the Enigma machine :return: dictionary containing converted pairs """ # tests the input string if it # a) is type string # b) has even length (so pairs can be made) if not isinstance(pbstring, str): raise TypeError(f"Plugboard setting isn't type string ({type(pbstring)})") elif len(pbstring) % 2 != 0: raise Exception(f"Odd number of symbols ({len(pbstring)})") elif pbstring == "": return {} pbstring.replace(" ", "") # Checks if all characters are unique tmppbl = set() for i in pbstring: if i not in abc: raise Exception(f"'{i}' not in list of symbols") elif i in tmppbl: raise Exception(f"Duplicate symbol ({i})") else: tmppbl.add(i) del tmppbl # Created the dictionary pb = {} for j in range(0, len(pbstring) - 1, 2): pb[pbstring[j]] = pbstring[j + 1] pb[pbstring[j + 1]] = pbstring[j] return pb def enigma( text: str, rotor_position: RotorPositionT, rotor_selection: RotorSelectionT = (rotor1, rotor2, rotor3), plugb: str = "", ) -> str: """ The only difference with real-world enigma is that I allowed string input. All characters are converted to uppercase. (non-letter symbol are ignored) How it works: (for every letter in the message) - Input letter goes into the plugboard. If it is connected to another one, switch it. - Letter goes through 3 rotors. Each rotor can be represented as 2 sets of symbol, where one is shuffled. Each symbol from the first set has corresponding symbol in the second set and vice versa. example: | ABCDEFGHIJKLMNOPQRSTUVWXYZ | e.g. F=D and D=F | VKLEPDBGRNWTFCJOHQAMUZYIXS | - Symbol then goes through reflector (static rotor). There it is switched with paired symbol The reflector can be represented as2 sets, each with half of the alphanet. There are usually 10 pairs of letters. Example: | ABCDEFGHIJKLM | e.g. E is paired to X | ZYXWVUTSRQPON | so when E goes in X goes out and vice versa - Letter then goes through the rotors again - If the letter is connected to plugboard, it is switched. - Return the letter >>> enigma('Hello World!', (1, 2, 1), plugb='pictures') 'KORYH JUHHI!' >>> enigma('KORYH, juhhi!', (1, 2, 1), plugb='pictures') 'HELLO, WORLD!' >>> enigma('hello world!', (1, 1, 1), plugb='pictures') 'FPNCZ QWOBU!' >>> enigma('FPNCZ QWOBU', (1, 1, 1), plugb='pictures') 'HELLO WORLD' :param text: input message :param rotor_position: tuple with 3 values in range 1..26 :param rotor_selection: tuple with 3 rotors () :param plugb: string containing plugboard configuration (default '') :return: en/decrypted string """ text = text.upper() rotor_position, rotor_selection, plugboard = _validator( rotor_position, rotor_selection, plugb.upper() ) rotorpos1, rotorpos2, rotorpos3 = rotor_position rotor1, rotor2, rotor3 = rotor_selection rotorpos1 -= 1 rotorpos2 -= 1 rotorpos3 -= 1 result = [] # encryption/decryption process -------------------------- for symbol in text: if symbol in abc: # 1st plugboard -------------------------- if symbol in plugboard: symbol = plugboard[symbol] # rotor ra -------------------------- index = abc.index(symbol) + rotorpos1 symbol = rotor1[index % len(abc)] # rotor rb -------------------------- index = abc.index(symbol) + rotorpos2 symbol = rotor2[index % len(abc)] # rotor rc -------------------------- index = abc.index(symbol) + rotorpos3 symbol = rotor3[index % len(abc)] # reflector -------------------------- # this is the reason you don't need another machine to decipher symbol = reflector[symbol] # 2nd rotors symbol = abc[rotor3.index(symbol) - rotorpos3] symbol = abc[rotor2.index(symbol) - rotorpos2] symbol = abc[rotor1.index(symbol) - rotorpos1] # 2nd plugboard if symbol in plugboard: symbol = plugboard[symbol] # moves/resets rotor positions rotorpos1 += 1 if rotorpos1 >= len(abc): rotorpos1 = 0 rotorpos2 += 1 if rotorpos2 >= len(abc): rotorpos2 = 0 rotorpos3 += 1 if rotorpos3 >= len(abc): rotorpos3 = 0 # else: # pass # Error could be also raised # raise ValueError( # 'Invalid symbol('+repr(symbol)+')') result.append(symbol) return "".join(result) if __name__ == "__main__": message = "This is my Python script that emulates the Enigma machine from WWII." rotor_pos = (1, 1, 1) pb = "pictures" rotor_sel = (rotor2, rotor4, rotor8) en = enigma(message, rotor_pos, rotor_sel, pb) print("Encrypted message:", en) print("Decrypted message:", enigma(en, rotor_pos, rotor_sel, pb))
""" Wikipedia: https://en.wikipedia.org/wiki/Enigma_machine Video explanation: https://youtu.be/QwQVMqfoB2E Also check out Numberphile's and Computerphile's videos on this topic This module contains function 'enigma' which emulates the famous Enigma machine from WWII. Module includes: - enigma function - showcase of function usage - 9 randomly generated rotors - reflector (aka static rotor) - original alphabet Created by TrapinchO """ from __future__ import annotations RotorPositionT = tuple[int, int, int] RotorSelectionT = tuple[str, str, str] # used alphabet -------------------------- # from string.ascii_uppercase abc = "ABCDEFGHIJKLMNOPQRSTUVWXYZ" # -------------------------- default selection -------------------------- # rotors -------------------------- rotor1 = "EGZWVONAHDCLFQMSIPJBYUKXTR" rotor2 = "FOBHMDKEXQNRAULPGSJVTYICZW" rotor3 = "ZJXESIUQLHAVRMDOYGTNFWPBKC" # reflector -------------------------- reflector = { "A": "N", "N": "A", "B": "O", "O": "B", "C": "P", "P": "C", "D": "Q", "Q": "D", "E": "R", "R": "E", "F": "S", "S": "F", "G": "T", "T": "G", "H": "U", "U": "H", "I": "V", "V": "I", "J": "W", "W": "J", "K": "X", "X": "K", "L": "Y", "Y": "L", "M": "Z", "Z": "M", } # -------------------------- extra rotors -------------------------- rotor4 = "RMDJXFUWGISLHVTCQNKYPBEZOA" rotor5 = "SGLCPQWZHKXAREONTFBVIYJUDM" rotor6 = "HVSICLTYKQUBXDWAJZOMFGPREN" rotor7 = "RZWQHFMVDBKICJLNTUXAGYPSOE" rotor8 = "LFKIJODBEGAMQPXVUHYSTCZRWN" rotor9 = "KOAEGVDHXPQZMLFTYWJNBRCIUS" def _validator( rotpos: RotorPositionT, rotsel: RotorSelectionT, pb: str ) -> tuple[RotorPositionT, RotorSelectionT, dict[str, str]]: """ Checks if the values can be used for the 'enigma' function >>> _validator((1,1,1), (rotor1, rotor2, rotor3), 'POLAND') ((1, 1, 1), ('EGZWVONAHDCLFQMSIPJBYUKXTR', 'FOBHMDKEXQNRAULPGSJVTYICZW', \ 'ZJXESIUQLHAVRMDOYGTNFWPBKC'), \ {'P': 'O', 'O': 'P', 'L': 'A', 'A': 'L', 'N': 'D', 'D': 'N'}) :param rotpos: rotor_positon :param rotsel: rotor_selection :param pb: plugb -> validated and transformed :return: (rotpos, rotsel, pb) """ # Checks if there are 3 unique rotors unique_rotsel = len(set(rotsel)) if unique_rotsel < 3: raise Exception(f"Please use 3 unique rotors (not {unique_rotsel})") # Checks if rotor positions are valid rotorpos1, rotorpos2, rotorpos3 = rotpos if not 0 < rotorpos1 <= len(abc): raise ValueError( "First rotor position is not within range of 1..26 (" f"{rotorpos1}" ) if not 0 < rotorpos2 <= len(abc): raise ValueError( "Second rotor position is not within range of 1..26 (" f"{rotorpos2})" ) if not 0 < rotorpos3 <= len(abc): raise ValueError( "Third rotor position is not within range of 1..26 (" f"{rotorpos3})" ) # Validates string and returns dict pbdict = _plugboard(pb) return rotpos, rotsel, pbdict def _plugboard(pbstring: str) -> dict[str, str]: """ https://en.wikipedia.org/wiki/Enigma_machine#Plugboard >>> _plugboard('PICTURES') {'P': 'I', 'I': 'P', 'C': 'T', 'T': 'C', 'U': 'R', 'R': 'U', 'E': 'S', 'S': 'E'} >>> _plugboard('POLAND') {'P': 'O', 'O': 'P', 'L': 'A', 'A': 'L', 'N': 'D', 'D': 'N'} In the code, 'pb' stands for 'plugboard' Pairs can be separated by spaces :param pbstring: string containing plugboard setting for the Enigma machine :return: dictionary containing converted pairs """ # tests the input string if it # a) is type string # b) has even length (so pairs can be made) if not isinstance(pbstring, str): raise TypeError(f"Plugboard setting isn't type string ({type(pbstring)})") elif len(pbstring) % 2 != 0: raise Exception(f"Odd number of symbols ({len(pbstring)})") elif pbstring == "": return {} pbstring.replace(" ", "") # Checks if all characters are unique tmppbl = set() for i in pbstring: if i not in abc: raise Exception(f"'{i}' not in list of symbols") elif i in tmppbl: raise Exception(f"Duplicate symbol ({i})") else: tmppbl.add(i) del tmppbl # Created the dictionary pb = {} for j in range(0, len(pbstring) - 1, 2): pb[pbstring[j]] = pbstring[j + 1] pb[pbstring[j + 1]] = pbstring[j] return pb def enigma( text: str, rotor_position: RotorPositionT, rotor_selection: RotorSelectionT = (rotor1, rotor2, rotor3), plugb: str = "", ) -> str: """ The only difference with real-world enigma is that I allowed string input. All characters are converted to uppercase. (non-letter symbol are ignored) How it works: (for every letter in the message) - Input letter goes into the plugboard. If it is connected to another one, switch it. - Letter goes through 3 rotors. Each rotor can be represented as 2 sets of symbol, where one is shuffled. Each symbol from the first set has corresponding symbol in the second set and vice versa. example: | ABCDEFGHIJKLMNOPQRSTUVWXYZ | e.g. F=D and D=F | VKLEPDBGRNWTFCJOHQAMUZYIXS | - Symbol then goes through reflector (static rotor). There it is switched with paired symbol The reflector can be represented as2 sets, each with half of the alphanet. There are usually 10 pairs of letters. Example: | ABCDEFGHIJKLM | e.g. E is paired to X | ZYXWVUTSRQPON | so when E goes in X goes out and vice versa - Letter then goes through the rotors again - If the letter is connected to plugboard, it is switched. - Return the letter >>> enigma('Hello World!', (1, 2, 1), plugb='pictures') 'KORYH JUHHI!' >>> enigma('KORYH, juhhi!', (1, 2, 1), plugb='pictures') 'HELLO, WORLD!' >>> enigma('hello world!', (1, 1, 1), plugb='pictures') 'FPNCZ QWOBU!' >>> enigma('FPNCZ QWOBU', (1, 1, 1), plugb='pictures') 'HELLO WORLD' :param text: input message :param rotor_position: tuple with 3 values in range 1..26 :param rotor_selection: tuple with 3 rotors () :param plugb: string containing plugboard configuration (default '') :return: en/decrypted string """ text = text.upper() rotor_position, rotor_selection, plugboard = _validator( rotor_position, rotor_selection, plugb.upper() ) rotorpos1, rotorpos2, rotorpos3 = rotor_position rotor1, rotor2, rotor3 = rotor_selection rotorpos1 -= 1 rotorpos2 -= 1 rotorpos3 -= 1 result = [] # encryption/decryption process -------------------------- for symbol in text: if symbol in abc: # 1st plugboard -------------------------- if symbol in plugboard: symbol = plugboard[symbol] # rotor ra -------------------------- index = abc.index(symbol) + rotorpos1 symbol = rotor1[index % len(abc)] # rotor rb -------------------------- index = abc.index(symbol) + rotorpos2 symbol = rotor2[index % len(abc)] # rotor rc -------------------------- index = abc.index(symbol) + rotorpos3 symbol = rotor3[index % len(abc)] # reflector -------------------------- # this is the reason you don't need another machine to decipher symbol = reflector[symbol] # 2nd rotors symbol = abc[rotor3.index(symbol) - rotorpos3] symbol = abc[rotor2.index(symbol) - rotorpos2] symbol = abc[rotor1.index(symbol) - rotorpos1] # 2nd plugboard if symbol in plugboard: symbol = plugboard[symbol] # moves/resets rotor positions rotorpos1 += 1 if rotorpos1 >= len(abc): rotorpos1 = 0 rotorpos2 += 1 if rotorpos2 >= len(abc): rotorpos2 = 0 rotorpos3 += 1 if rotorpos3 >= len(abc): rotorpos3 = 0 # else: # pass # Error could be also raised # raise ValueError( # 'Invalid symbol('+repr(symbol)+')') result.append(symbol) return "".join(result) if __name__ == "__main__": message = "This is my Python script that emulates the Enigma machine from WWII." rotor_pos = (1, 1, 1) pb = "pictures" rotor_sel = (rotor2, rotor4, rotor8) en = enigma(message, rotor_pos, rotor_sel, pb) print("Encrypted message:", en) print("Decrypted message:", enigma(en, rotor_pos, rotor_sel, pb))
-1
TheAlgorithms/Python
7,387
[pre-commit.ci] pre-commit autoupdate
<!--pre-commit.ci start--> updates: - [github.com/asottile/pyupgrade: v3.0.0 → v3.1.0](https://github.com/asottile/pyupgrade/compare/v3.0.0...v3.1.0) - [github.com/codespell-project/codespell: v2.2.1 → v2.2.2](https://github.com/codespell-project/codespell/compare/v2.2.1...v2.2.2) <!--pre-commit.ci end-->
pre-commit-ci[bot]
2022-10-17T19:35:07Z
2022-10-17T19:59:26Z
a34b756fd40e5cdfb69abc06dcd42f5f1b5fa21e
0c7c5fa7b0161a7433467240155356c93ae106b8
[pre-commit.ci] pre-commit autoupdate. <!--pre-commit.ci start--> updates: - [github.com/asottile/pyupgrade: v3.0.0 → v3.1.0](https://github.com/asottile/pyupgrade/compare/v3.0.0...v3.1.0) - [github.com/codespell-project/codespell: v2.2.1 → v2.2.2](https://github.com/codespell-project/codespell/compare/v2.2.1...v2.2.2) <!--pre-commit.ci end-->
# Arithmetic analysis Arithmetic analysis is a branch of mathematics that deals with solving linear equations. * <https://en.wikipedia.org/wiki/System_of_linear_equations> * <https://en.wikipedia.org/wiki/Gaussian_elimination> * <https://en.wikipedia.org/wiki/Root-finding_algorithms>
# Arithmetic analysis Arithmetic analysis is a branch of mathematics that deals with solving linear equations. * <https://en.wikipedia.org/wiki/System_of_linear_equations> * <https://en.wikipedia.org/wiki/Gaussian_elimination> * <https://en.wikipedia.org/wiki/Root-finding_algorithms>
-1
TheAlgorithms/Python
7,387
[pre-commit.ci] pre-commit autoupdate
<!--pre-commit.ci start--> updates: - [github.com/asottile/pyupgrade: v3.0.0 → v3.1.0](https://github.com/asottile/pyupgrade/compare/v3.0.0...v3.1.0) - [github.com/codespell-project/codespell: v2.2.1 → v2.2.2](https://github.com/codespell-project/codespell/compare/v2.2.1...v2.2.2) <!--pre-commit.ci end-->
pre-commit-ci[bot]
2022-10-17T19:35:07Z
2022-10-17T19:59:26Z
a34b756fd40e5cdfb69abc06dcd42f5f1b5fa21e
0c7c5fa7b0161a7433467240155356c93ae106b8
[pre-commit.ci] pre-commit autoupdate. <!--pre-commit.ci start--> updates: - [github.com/asottile/pyupgrade: v3.0.0 → v3.1.0](https://github.com/asottile/pyupgrade/compare/v3.0.0...v3.1.0) - [github.com/codespell-project/codespell: v2.2.1 → v2.2.2](https://github.com/codespell-project/codespell/compare/v2.2.1...v2.2.2) <!--pre-commit.ci end-->
from math import asin, atan, cos, radians, sin, sqrt, tan AXIS_A = 6378137.0 AXIS_B = 6356752.314245 RADIUS = 6378137 def haversine_distance(lat1: float, lon1: float, lat2: float, lon2: float) -> float: """ Calculate great circle distance between two points in a sphere, given longitudes and latitudes https://en.wikipedia.org/wiki/Haversine_formula We know that the globe is "sort of" spherical, so a path between two points isn't exactly a straight line. We need to account for the Earth's curvature when calculating distance from point A to B. This effect is negligible for small distances but adds up as distance increases. The Haversine method treats the earth as a sphere which allows us to "project" the two points A and B onto the surface of that sphere and approximate the spherical distance between them. Since the Earth is not a perfect sphere, other methods which model the Earth's ellipsoidal nature are more accurate but a quick and modifiable computation like Haversine can be handy for shorter range distances. Args: lat1, lon1: latitude and longitude of coordinate 1 lat2, lon2: latitude and longitude of coordinate 2 Returns: geographical distance between two points in metres >>> from collections import namedtuple >>> point_2d = namedtuple("point_2d", "lat lon") >>> SAN_FRANCISCO = point_2d(37.774856, -122.424227) >>> YOSEMITE = point_2d(37.864742, -119.537521) >>> f"{haversine_distance(*SAN_FRANCISCO, *YOSEMITE):0,.0f} meters" '254,352 meters' """ # CONSTANTS per WGS84 https://en.wikipedia.org/wiki/World_Geodetic_System # Distance in metres(m) # Equation parameters # Equation https://en.wikipedia.org/wiki/Haversine_formula#Formulation flattening = (AXIS_A - AXIS_B) / AXIS_A phi_1 = atan((1 - flattening) * tan(radians(lat1))) phi_2 = atan((1 - flattening) * tan(radians(lat2))) lambda_1 = radians(lon1) lambda_2 = radians(lon2) # Equation sin_sq_phi = sin((phi_2 - phi_1) / 2) sin_sq_lambda = sin((lambda_2 - lambda_1) / 2) # Square both values sin_sq_phi *= sin_sq_phi sin_sq_lambda *= sin_sq_lambda h_value = sqrt(sin_sq_phi + (cos(phi_1) * cos(phi_2) * sin_sq_lambda)) return 2 * RADIUS * asin(h_value) if __name__ == "__main__": import doctest doctest.testmod()
from math import asin, atan, cos, radians, sin, sqrt, tan AXIS_A = 6378137.0 AXIS_B = 6356752.314245 RADIUS = 6378137 def haversine_distance(lat1: float, lon1: float, lat2: float, lon2: float) -> float: """ Calculate great circle distance between two points in a sphere, given longitudes and latitudes https://en.wikipedia.org/wiki/Haversine_formula We know that the globe is "sort of" spherical, so a path between two points isn't exactly a straight line. We need to account for the Earth's curvature when calculating distance from point A to B. This effect is negligible for small distances but adds up as distance increases. The Haversine method treats the earth as a sphere which allows us to "project" the two points A and B onto the surface of that sphere and approximate the spherical distance between them. Since the Earth is not a perfect sphere, other methods which model the Earth's ellipsoidal nature are more accurate but a quick and modifiable computation like Haversine can be handy for shorter range distances. Args: lat1, lon1: latitude and longitude of coordinate 1 lat2, lon2: latitude and longitude of coordinate 2 Returns: geographical distance between two points in metres >>> from collections import namedtuple >>> point_2d = namedtuple("point_2d", "lat lon") >>> SAN_FRANCISCO = point_2d(37.774856, -122.424227) >>> YOSEMITE = point_2d(37.864742, -119.537521) >>> f"{haversine_distance(*SAN_FRANCISCO, *YOSEMITE):0,.0f} meters" '254,352 meters' """ # CONSTANTS per WGS84 https://en.wikipedia.org/wiki/World_Geodetic_System # Distance in metres(m) # Equation parameters # Equation https://en.wikipedia.org/wiki/Haversine_formula#Formulation flattening = (AXIS_A - AXIS_B) / AXIS_A phi_1 = atan((1 - flattening) * tan(radians(lat1))) phi_2 = atan((1 - flattening) * tan(radians(lat2))) lambda_1 = radians(lon1) lambda_2 = radians(lon2) # Equation sin_sq_phi = sin((phi_2 - phi_1) / 2) sin_sq_lambda = sin((lambda_2 - lambda_1) / 2) # Square both values sin_sq_phi *= sin_sq_phi sin_sq_lambda *= sin_sq_lambda h_value = sqrt(sin_sq_phi + (cos(phi_1) * cos(phi_2) * sin_sq_lambda)) return 2 * RADIUS * asin(h_value) if __name__ == "__main__": import doctest doctest.testmod()
-1
TheAlgorithms/Python
7,387
[pre-commit.ci] pre-commit autoupdate
<!--pre-commit.ci start--> updates: - [github.com/asottile/pyupgrade: v3.0.0 → v3.1.0](https://github.com/asottile/pyupgrade/compare/v3.0.0...v3.1.0) - [github.com/codespell-project/codespell: v2.2.1 → v2.2.2](https://github.com/codespell-project/codespell/compare/v2.2.1...v2.2.2) <!--pre-commit.ci end-->
pre-commit-ci[bot]
2022-10-17T19:35:07Z
2022-10-17T19:59:26Z
a34b756fd40e5cdfb69abc06dcd42f5f1b5fa21e
0c7c5fa7b0161a7433467240155356c93ae106b8
[pre-commit.ci] pre-commit autoupdate. <!--pre-commit.ci start--> updates: - [github.com/asottile/pyupgrade: v3.0.0 → v3.1.0](https://github.com/asottile/pyupgrade/compare/v3.0.0...v3.1.0) - [github.com/codespell-project/codespell: v2.2.1 → v2.2.2](https://github.com/codespell-project/codespell/compare/v2.2.1...v2.2.2) <!--pre-commit.ci end-->
-1
TheAlgorithms/Python
7,387
[pre-commit.ci] pre-commit autoupdate
<!--pre-commit.ci start--> updates: - [github.com/asottile/pyupgrade: v3.0.0 → v3.1.0](https://github.com/asottile/pyupgrade/compare/v3.0.0...v3.1.0) - [github.com/codespell-project/codespell: v2.2.1 → v2.2.2](https://github.com/codespell-project/codespell/compare/v2.2.1...v2.2.2) <!--pre-commit.ci end-->
pre-commit-ci[bot]
2022-10-17T19:35:07Z
2022-10-17T19:59:26Z
a34b756fd40e5cdfb69abc06dcd42f5f1b5fa21e
0c7c5fa7b0161a7433467240155356c93ae106b8
[pre-commit.ci] pre-commit autoupdate. <!--pre-commit.ci start--> updates: - [github.com/asottile/pyupgrade: v3.0.0 → v3.1.0](https://github.com/asottile/pyupgrade/compare/v3.0.0...v3.1.0) - [github.com/codespell-project/codespell: v2.2.1 → v2.2.2](https://github.com/codespell-project/codespell/compare/v2.2.1...v2.2.2) <!--pre-commit.ci end-->
# Print all subset combinations of n element in given set of r element. def combination_util(arr, n, r, index, data, i): """ Current combination is ready to be printed, print it arr[] ---> Input Array data[] ---> Temporary array to store current combination start & end ---> Staring and Ending indexes in arr[] index ---> Current index in data[] r ---> Size of a combination to be printed """ if index == r: for j in range(r): print(data[j], end=" ") print(" ") return # When no more elements are there to put in data[] if i >= n: return # current is included, put next at next location data[index] = arr[i] combination_util(arr, n, r, index + 1, data, i + 1) # current is excluded, replace it with # next (Note that i+1 is passed, but # index is not changed) combination_util(arr, n, r, index, data, i + 1) # The main function that prints all combinations # of size r in arr[] of size n. This function # mainly uses combinationUtil() def print_combination(arr, n, r): # A temporary array to store all combination one by one data = [0] * r # Print all combination using temporary array 'data[]' combination_util(arr, n, r, 0, data, 0) # Driver function to check for above function arr = [10, 20, 30, 40, 50] print_combination(arr, len(arr), 3) # This code is contributed by Ambuj sahu
# Print all subset combinations of n element in given set of r element. def combination_util(arr, n, r, index, data, i): """ Current combination is ready to be printed, print it arr[] ---> Input Array data[] ---> Temporary array to store current combination start & end ---> Staring and Ending indexes in arr[] index ---> Current index in data[] r ---> Size of a combination to be printed """ if index == r: for j in range(r): print(data[j], end=" ") print(" ") return # When no more elements are there to put in data[] if i >= n: return # current is included, put next at next location data[index] = arr[i] combination_util(arr, n, r, index + 1, data, i + 1) # current is excluded, replace it with # next (Note that i+1 is passed, but # index is not changed) combination_util(arr, n, r, index, data, i + 1) # The main function that prints all combinations # of size r in arr[] of size n. This function # mainly uses combinationUtil() def print_combination(arr, n, r): # A temporary array to store all combination one by one data = [0] * r # Print all combination using temporary array 'data[]' combination_util(arr, n, r, 0, data, 0) # Driver function to check for above function arr = [10, 20, 30, 40, 50] print_combination(arr, len(arr), 3) # This code is contributed by Ambuj sahu
-1
TheAlgorithms/Python
7,387
[pre-commit.ci] pre-commit autoupdate
<!--pre-commit.ci start--> updates: - [github.com/asottile/pyupgrade: v3.0.0 → v3.1.0](https://github.com/asottile/pyupgrade/compare/v3.0.0...v3.1.0) - [github.com/codespell-project/codespell: v2.2.1 → v2.2.2](https://github.com/codespell-project/codespell/compare/v2.2.1...v2.2.2) <!--pre-commit.ci end-->
pre-commit-ci[bot]
2022-10-17T19:35:07Z
2022-10-17T19:59:26Z
a34b756fd40e5cdfb69abc06dcd42f5f1b5fa21e
0c7c5fa7b0161a7433467240155356c93ae106b8
[pre-commit.ci] pre-commit autoupdate. <!--pre-commit.ci start--> updates: - [github.com/asottile/pyupgrade: v3.0.0 → v3.1.0](https://github.com/asottile/pyupgrade/compare/v3.0.0...v3.1.0) - [github.com/codespell-project/codespell: v2.2.1 → v2.2.2](https://github.com/codespell-project/codespell/compare/v2.2.1...v2.2.2) <!--pre-commit.ci end-->
#!/usr/bin/env python3 from __future__ import annotations import random from collections.abc import Iterable from typing import Any, Generic, TypeVar T = TypeVar("T", bound=bool) class RandomizedHeapNode(Generic[T]): """ One node of the randomized heap. Contains the value and references to two children. """ def __init__(self, value: T) -> None: self._value: T = value self.left: RandomizedHeapNode[T] | None = None self.right: RandomizedHeapNode[T] | None = None @property def value(self) -> T: """Return the value of the node.""" return self._value @staticmethod def merge( root1: RandomizedHeapNode[T] | None, root2: RandomizedHeapNode[T] | None ) -> RandomizedHeapNode[T] | None: """Merge 2 nodes together.""" if not root1: return root2 if not root2: return root1 if root1.value > root2.value: root1, root2 = root2, root1 if random.choice([True, False]): root1.left, root1.right = root1.right, root1.left root1.left = RandomizedHeapNode.merge(root1.left, root2) return root1 class RandomizedHeap(Generic[T]): """ A data structure that allows inserting a new value and to pop the smallest values. Both operations take O(logN) time where N is the size of the structure. Wiki: https://en.wikipedia.org/wiki/Randomized_meldable_heap >>> RandomizedHeap([2, 3, 1, 5, 1, 7]).to_sorted_list() [1, 1, 2, 3, 5, 7] >>> rh = RandomizedHeap() >>> rh.pop() Traceback (most recent call last): ... IndexError: Can't get top element for the empty heap. >>> rh.insert(1) >>> rh.insert(-1) >>> rh.insert(0) >>> rh.to_sorted_list() [-1, 0, 1] """ def __init__(self, data: Iterable[T] | None = ()) -> None: """ >>> rh = RandomizedHeap([3, 1, 3, 7]) >>> rh.to_sorted_list() [1, 3, 3, 7] """ self._root: RandomizedHeapNode[T] | None = None if data: for item in data: self.insert(item) def insert(self, value: T) -> None: """ Insert the value into the heap. >>> rh = RandomizedHeap() >>> rh.insert(3) >>> rh.insert(1) >>> rh.insert(3) >>> rh.insert(7) >>> rh.to_sorted_list() [1, 3, 3, 7] """ self._root = RandomizedHeapNode.merge(self._root, RandomizedHeapNode(value)) def pop(self) -> T | None: """ Pop the smallest value from the heap and return it. >>> rh = RandomizedHeap([3, 1, 3, 7]) >>> rh.pop() 1 >>> rh.pop() 3 >>> rh.pop() 3 >>> rh.pop() 7 >>> rh.pop() Traceback (most recent call last): ... IndexError: Can't get top element for the empty heap. """ result = self.top() if self._root is None: return None self._root = RandomizedHeapNode.merge(self._root.left, self._root.right) return result def top(self) -> T: """ Return the smallest value from the heap. >>> rh = RandomizedHeap() >>> rh.insert(3) >>> rh.top() 3 >>> rh.insert(1) >>> rh.top() 1 >>> rh.insert(3) >>> rh.top() 1 >>> rh.insert(7) >>> rh.top() 1 """ if not self._root: raise IndexError("Can't get top element for the empty heap.") return self._root.value def clear(self) -> None: """ Clear the heap. >>> rh = RandomizedHeap([3, 1, 3, 7]) >>> rh.clear() >>> rh.pop() Traceback (most recent call last): ... IndexError: Can't get top element for the empty heap. """ self._root = None def to_sorted_list(self) -> list[Any]: """ Returns sorted list containing all the values in the heap. >>> rh = RandomizedHeap([3, 1, 3, 7]) >>> rh.to_sorted_list() [1, 3, 3, 7] """ result = [] while self: result.append(self.pop()) return result def __bool__(self) -> bool: """ Check if the heap is not empty. >>> rh = RandomizedHeap() >>> bool(rh) False >>> rh.insert(1) >>> bool(rh) True >>> rh.clear() >>> bool(rh) False """ return self._root is not None if __name__ == "__main__": import doctest doctest.testmod()
#!/usr/bin/env python3 from __future__ import annotations import random from collections.abc import Iterable from typing import Any, Generic, TypeVar T = TypeVar("T", bound=bool) class RandomizedHeapNode(Generic[T]): """ One node of the randomized heap. Contains the value and references to two children. """ def __init__(self, value: T) -> None: self._value: T = value self.left: RandomizedHeapNode[T] | None = None self.right: RandomizedHeapNode[T] | None = None @property def value(self) -> T: """Return the value of the node.""" return self._value @staticmethod def merge( root1: RandomizedHeapNode[T] | None, root2: RandomizedHeapNode[T] | None ) -> RandomizedHeapNode[T] | None: """Merge 2 nodes together.""" if not root1: return root2 if not root2: return root1 if root1.value > root2.value: root1, root2 = root2, root1 if random.choice([True, False]): root1.left, root1.right = root1.right, root1.left root1.left = RandomizedHeapNode.merge(root1.left, root2) return root1 class RandomizedHeap(Generic[T]): """ A data structure that allows inserting a new value and to pop the smallest values. Both operations take O(logN) time where N is the size of the structure. Wiki: https://en.wikipedia.org/wiki/Randomized_meldable_heap >>> RandomizedHeap([2, 3, 1, 5, 1, 7]).to_sorted_list() [1, 1, 2, 3, 5, 7] >>> rh = RandomizedHeap() >>> rh.pop() Traceback (most recent call last): ... IndexError: Can't get top element for the empty heap. >>> rh.insert(1) >>> rh.insert(-1) >>> rh.insert(0) >>> rh.to_sorted_list() [-1, 0, 1] """ def __init__(self, data: Iterable[T] | None = ()) -> None: """ >>> rh = RandomizedHeap([3, 1, 3, 7]) >>> rh.to_sorted_list() [1, 3, 3, 7] """ self._root: RandomizedHeapNode[T] | None = None if data: for item in data: self.insert(item) def insert(self, value: T) -> None: """ Insert the value into the heap. >>> rh = RandomizedHeap() >>> rh.insert(3) >>> rh.insert(1) >>> rh.insert(3) >>> rh.insert(7) >>> rh.to_sorted_list() [1, 3, 3, 7] """ self._root = RandomizedHeapNode.merge(self._root, RandomizedHeapNode(value)) def pop(self) -> T | None: """ Pop the smallest value from the heap and return it. >>> rh = RandomizedHeap([3, 1, 3, 7]) >>> rh.pop() 1 >>> rh.pop() 3 >>> rh.pop() 3 >>> rh.pop() 7 >>> rh.pop() Traceback (most recent call last): ... IndexError: Can't get top element for the empty heap. """ result = self.top() if self._root is None: return None self._root = RandomizedHeapNode.merge(self._root.left, self._root.right) return result def top(self) -> T: """ Return the smallest value from the heap. >>> rh = RandomizedHeap() >>> rh.insert(3) >>> rh.top() 3 >>> rh.insert(1) >>> rh.top() 1 >>> rh.insert(3) >>> rh.top() 1 >>> rh.insert(7) >>> rh.top() 1 """ if not self._root: raise IndexError("Can't get top element for the empty heap.") return self._root.value def clear(self) -> None: """ Clear the heap. >>> rh = RandomizedHeap([3, 1, 3, 7]) >>> rh.clear() >>> rh.pop() Traceback (most recent call last): ... IndexError: Can't get top element for the empty heap. """ self._root = None def to_sorted_list(self) -> list[Any]: """ Returns sorted list containing all the values in the heap. >>> rh = RandomizedHeap([3, 1, 3, 7]) >>> rh.to_sorted_list() [1, 3, 3, 7] """ result = [] while self: result.append(self.pop()) return result def __bool__(self) -> bool: """ Check if the heap is not empty. >>> rh = RandomizedHeap() >>> bool(rh) False >>> rh.insert(1) >>> bool(rh) True >>> rh.clear() >>> bool(rh) False """ return self._root is not None if __name__ == "__main__": import doctest doctest.testmod()
-1
TheAlgorithms/Python
7,387
[pre-commit.ci] pre-commit autoupdate
<!--pre-commit.ci start--> updates: - [github.com/asottile/pyupgrade: v3.0.0 → v3.1.0](https://github.com/asottile/pyupgrade/compare/v3.0.0...v3.1.0) - [github.com/codespell-project/codespell: v2.2.1 → v2.2.2](https://github.com/codespell-project/codespell/compare/v2.2.1...v2.2.2) <!--pre-commit.ci end-->
pre-commit-ci[bot]
2022-10-17T19:35:07Z
2022-10-17T19:59:26Z
a34b756fd40e5cdfb69abc06dcd42f5f1b5fa21e
0c7c5fa7b0161a7433467240155356c93ae106b8
[pre-commit.ci] pre-commit autoupdate. <!--pre-commit.ci start--> updates: - [github.com/asottile/pyupgrade: v3.0.0 → v3.1.0](https://github.com/asottile/pyupgrade/compare/v3.0.0...v3.1.0) - [github.com/codespell-project/codespell: v2.2.1 → v2.2.2](https://github.com/codespell-project/codespell/compare/v2.2.1...v2.2.2) <!--pre-commit.ci end-->
-1
TheAlgorithms/Python
7,387
[pre-commit.ci] pre-commit autoupdate
<!--pre-commit.ci start--> updates: - [github.com/asottile/pyupgrade: v3.0.0 → v3.1.0](https://github.com/asottile/pyupgrade/compare/v3.0.0...v3.1.0) - [github.com/codespell-project/codespell: v2.2.1 → v2.2.2](https://github.com/codespell-project/codespell/compare/v2.2.1...v2.2.2) <!--pre-commit.ci end-->
pre-commit-ci[bot]
2022-10-17T19:35:07Z
2022-10-17T19:59:26Z
a34b756fd40e5cdfb69abc06dcd42f5f1b5fa21e
0c7c5fa7b0161a7433467240155356c93ae106b8
[pre-commit.ci] pre-commit autoupdate. <!--pre-commit.ci start--> updates: - [github.com/asottile/pyupgrade: v3.0.0 → v3.1.0](https://github.com/asottile/pyupgrade/compare/v3.0.0...v3.1.0) - [github.com/codespell-project/codespell: v2.2.1 → v2.2.2](https://github.com/codespell-project/codespell/compare/v2.2.1...v2.2.2) <!--pre-commit.ci end-->
""" Project Euler Problem 85: https://projecteuler.net/problem=85 By counting carefully it can be seen that a rectangular grid measuring 3 by 2 contains eighteen rectangles.  Although there exists no rectangular grid that contains exactly two million rectangles, find the area of the grid with the nearest solution. Solution: For a grid with side-lengths a and b, the number of rectangles contained in the grid is [a*(a+1)/2] * [b*(b+1)/2)], which happens to be the product of the a-th and b-th triangle numbers. So to find the solution grid (a,b), we need to find the two triangle numbers whose product is closest to two million. Denote these two triangle numbers Ta and Tb. We want their product Ta*Tb to be as close as possible to 2m. Assuming that the best solution is fairly close to 2m, We can assume that both Ta and Tb are roughly bounded by 2m. Since Ta = a(a+1)/2, we can assume that a (and similarly b) are roughly bounded by sqrt(2 * 2m) = 2000. Since this is a rough bound, to be on the safe side we add 10%. Therefore we start by generating all the triangle numbers Ta for 1 <= a <= 2200. This can be done iteratively since the ith triangle number is the sum of 1,2, ... ,i, and so T(i) = T(i-1) + i. We then search this list of triangle numbers for the two that give a product closest to our target of two million. Rather than testing every combination of 2 elements of the list, which would find the result in quadratic time, we can find the best pair in linear time. We iterate through the list of triangle numbers using enumerate() so we have a and Ta. Since we want Ta * Tb to be as close as possible to 2m, we know that Tb needs to be roughly 2m / Ta. Using the formula Tb = b*(b+1)/2 as well as the quadratic formula, we can solve for b: b is roughly (-1 + sqrt(1 + 8 * 2m / Ta)) / 2. Since the closest integers to this estimate will give product closest to 2m, we only need to consider the integers above and below. It's then a simple matter to get the triangle numbers corresponding to those integers, calculate the product Ta * Tb, compare that product to our target 2m, and keep track of the (a,b) pair that comes the closest. Reference: https://en.wikipedia.org/wiki/Triangular_number https://en.wikipedia.org/wiki/Quadratic_formula """ from __future__ import annotations from math import ceil, floor, sqrt def solution(target: int = 2000000) -> int: """ Find the area of the grid which contains as close to two million rectangles as possible. >>> solution(20) 6 >>> solution(2000) 72 >>> solution(2000000000) 86595 """ triangle_numbers: list[int] = [0] idx: int for idx in range(1, ceil(sqrt(target * 2) * 1.1)): triangle_numbers.append(triangle_numbers[-1] + idx) # we want this to be as close as possible to target best_product: int = 0 # the area corresponding to the grid that gives the product closest to target area: int = 0 # an estimate of b, using the quadratic formula b_estimate: float # the largest integer less than b_estimate b_floor: int # the largest integer less than b_estimate b_ceil: int # the triangle number corresponding to b_floor triangle_b_first_guess: int # the triangle number corresponding to b_ceil triangle_b_second_guess: int for idx_a, triangle_a in enumerate(triangle_numbers[1:], 1): b_estimate = (-1 + sqrt(1 + 8 * target / triangle_a)) / 2 b_floor = floor(b_estimate) b_ceil = ceil(b_estimate) triangle_b_first_guess = triangle_numbers[b_floor] triangle_b_second_guess = triangle_numbers[b_ceil] if abs(target - triangle_b_first_guess * triangle_a) < abs( target - best_product ): best_product = triangle_b_first_guess * triangle_a area = idx_a * b_floor if abs(target - triangle_b_second_guess * triangle_a) < abs( target - best_product ): best_product = triangle_b_second_guess * triangle_a area = idx_a * b_ceil return area if __name__ == "__main__": print(f"{solution() = }")
""" Project Euler Problem 85: https://projecteuler.net/problem=85 By counting carefully it can be seen that a rectangular grid measuring 3 by 2 contains eighteen rectangles.  Although there exists no rectangular grid that contains exactly two million rectangles, find the area of the grid with the nearest solution. Solution: For a grid with side-lengths a and b, the number of rectangles contained in the grid is [a*(a+1)/2] * [b*(b+1)/2)], which happens to be the product of the a-th and b-th triangle numbers. So to find the solution grid (a,b), we need to find the two triangle numbers whose product is closest to two million. Denote these two triangle numbers Ta and Tb. We want their product Ta*Tb to be as close as possible to 2m. Assuming that the best solution is fairly close to 2m, We can assume that both Ta and Tb are roughly bounded by 2m. Since Ta = a(a+1)/2, we can assume that a (and similarly b) are roughly bounded by sqrt(2 * 2m) = 2000. Since this is a rough bound, to be on the safe side we add 10%. Therefore we start by generating all the triangle numbers Ta for 1 <= a <= 2200. This can be done iteratively since the ith triangle number is the sum of 1,2, ... ,i, and so T(i) = T(i-1) + i. We then search this list of triangle numbers for the two that give a product closest to our target of two million. Rather than testing every combination of 2 elements of the list, which would find the result in quadratic time, we can find the best pair in linear time. We iterate through the list of triangle numbers using enumerate() so we have a and Ta. Since we want Ta * Tb to be as close as possible to 2m, we know that Tb needs to be roughly 2m / Ta. Using the formula Tb = b*(b+1)/2 as well as the quadratic formula, we can solve for b: b is roughly (-1 + sqrt(1 + 8 * 2m / Ta)) / 2. Since the closest integers to this estimate will give product closest to 2m, we only need to consider the integers above and below. It's then a simple matter to get the triangle numbers corresponding to those integers, calculate the product Ta * Tb, compare that product to our target 2m, and keep track of the (a,b) pair that comes the closest. Reference: https://en.wikipedia.org/wiki/Triangular_number https://en.wikipedia.org/wiki/Quadratic_formula """ from __future__ import annotations from math import ceil, floor, sqrt def solution(target: int = 2000000) -> int: """ Find the area of the grid which contains as close to two million rectangles as possible. >>> solution(20) 6 >>> solution(2000) 72 >>> solution(2000000000) 86595 """ triangle_numbers: list[int] = [0] idx: int for idx in range(1, ceil(sqrt(target * 2) * 1.1)): triangle_numbers.append(triangle_numbers[-1] + idx) # we want this to be as close as possible to target best_product: int = 0 # the area corresponding to the grid that gives the product closest to target area: int = 0 # an estimate of b, using the quadratic formula b_estimate: float # the largest integer less than b_estimate b_floor: int # the largest integer less than b_estimate b_ceil: int # the triangle number corresponding to b_floor triangle_b_first_guess: int # the triangle number corresponding to b_ceil triangle_b_second_guess: int for idx_a, triangle_a in enumerate(triangle_numbers[1:], 1): b_estimate = (-1 + sqrt(1 + 8 * target / triangle_a)) / 2 b_floor = floor(b_estimate) b_ceil = ceil(b_estimate) triangle_b_first_guess = triangle_numbers[b_floor] triangle_b_second_guess = triangle_numbers[b_ceil] if abs(target - triangle_b_first_guess * triangle_a) < abs( target - best_product ): best_product = triangle_b_first_guess * triangle_a area = idx_a * b_floor if abs(target - triangle_b_second_guess * triangle_a) < abs( target - best_product ): best_product = triangle_b_second_guess * triangle_a area = idx_a * b_ceil return area if __name__ == "__main__": print(f"{solution() = }")
-1
TheAlgorithms/Python
7,387
[pre-commit.ci] pre-commit autoupdate
<!--pre-commit.ci start--> updates: - [github.com/asottile/pyupgrade: v3.0.0 → v3.1.0](https://github.com/asottile/pyupgrade/compare/v3.0.0...v3.1.0) - [github.com/codespell-project/codespell: v2.2.1 → v2.2.2](https://github.com/codespell-project/codespell/compare/v2.2.1...v2.2.2) <!--pre-commit.ci end-->
pre-commit-ci[bot]
2022-10-17T19:35:07Z
2022-10-17T19:59:26Z
a34b756fd40e5cdfb69abc06dcd42f5f1b5fa21e
0c7c5fa7b0161a7433467240155356c93ae106b8
[pre-commit.ci] pre-commit autoupdate. <!--pre-commit.ci start--> updates: - [github.com/asottile/pyupgrade: v3.0.0 → v3.1.0](https://github.com/asottile/pyupgrade/compare/v3.0.0...v3.1.0) - [github.com/codespell-project/codespell: v2.2.1 → v2.2.2](https://github.com/codespell-project/codespell/compare/v2.2.1...v2.2.2) <!--pre-commit.ci end-->
-1
TheAlgorithms/Python
7,387
[pre-commit.ci] pre-commit autoupdate
<!--pre-commit.ci start--> updates: - [github.com/asottile/pyupgrade: v3.0.0 → v3.1.0](https://github.com/asottile/pyupgrade/compare/v3.0.0...v3.1.0) - [github.com/codespell-project/codespell: v2.2.1 → v2.2.2](https://github.com/codespell-project/codespell/compare/v2.2.1...v2.2.2) <!--pre-commit.ci end-->
pre-commit-ci[bot]
2022-10-17T19:35:07Z
2022-10-17T19:59:26Z
a34b756fd40e5cdfb69abc06dcd42f5f1b5fa21e
0c7c5fa7b0161a7433467240155356c93ae106b8
[pre-commit.ci] pre-commit autoupdate. <!--pre-commit.ci start--> updates: - [github.com/asottile/pyupgrade: v3.0.0 → v3.1.0](https://github.com/asottile/pyupgrade/compare/v3.0.0...v3.1.0) - [github.com/codespell-project/codespell: v2.2.1 → v2.2.2](https://github.com/codespell-project/codespell/compare/v2.2.1...v2.2.2) <!--pre-commit.ci end-->
def factorial(n: int) -> int: """ Calculate the factorial of a positive integer https://en.wikipedia.org/wiki/Factorial >>> import math >>> all(factorial(i) == math.factorial(i) for i in range(20)) True >>> factorial(0.1) Traceback (most recent call last): ... ValueError: factorial() only accepts integral values >>> factorial(-1) Traceback (most recent call last): ... ValueError: factorial() not defined for negative values """ if not isinstance(n, int): raise ValueError("factorial() only accepts integral values") if n < 0: raise ValueError("factorial() not defined for negative values") return 1 if n == 0 or n == 1 else n * factorial(n - 1) if __name__ == "__main__": import doctest doctest.testmod()
def factorial(n: int) -> int: """ Calculate the factorial of a positive integer https://en.wikipedia.org/wiki/Factorial >>> import math >>> all(factorial(i) == math.factorial(i) for i in range(20)) True >>> factorial(0.1) Traceback (most recent call last): ... ValueError: factorial() only accepts integral values >>> factorial(-1) Traceback (most recent call last): ... ValueError: factorial() not defined for negative values """ if not isinstance(n, int): raise ValueError("factorial() only accepts integral values") if n < 0: raise ValueError("factorial() not defined for negative values") return 1 if n == 0 or n == 1 else n * factorial(n - 1) if __name__ == "__main__": import doctest doctest.testmod()
-1
TheAlgorithms/Python
7,387
[pre-commit.ci] pre-commit autoupdate
<!--pre-commit.ci start--> updates: - [github.com/asottile/pyupgrade: v3.0.0 → v3.1.0](https://github.com/asottile/pyupgrade/compare/v3.0.0...v3.1.0) - [github.com/codespell-project/codespell: v2.2.1 → v2.2.2](https://github.com/codespell-project/codespell/compare/v2.2.1...v2.2.2) <!--pre-commit.ci end-->
pre-commit-ci[bot]
2022-10-17T19:35:07Z
2022-10-17T19:59:26Z
a34b756fd40e5cdfb69abc06dcd42f5f1b5fa21e
0c7c5fa7b0161a7433467240155356c93ae106b8
[pre-commit.ci] pre-commit autoupdate. <!--pre-commit.ci start--> updates: - [github.com/asottile/pyupgrade: v3.0.0 → v3.1.0](https://github.com/asottile/pyupgrade/compare/v3.0.0...v3.1.0) - [github.com/codespell-project/codespell: v2.2.1 → v2.2.2](https://github.com/codespell-project/codespell/compare/v2.2.1...v2.2.2) <!--pre-commit.ci end-->
# https://en.wikipedia.org/wiki/Hill_climbing import math class SearchProblem: """ An interface to define search problems. The interface will be illustrated using the example of mathematical function. """ def __init__(self, x: int, y: int, step_size: int, function_to_optimize): """ The constructor of the search problem. x: the x coordinate of the current search state. y: the y coordinate of the current search state. step_size: size of the step to take when looking for neighbors. function_to_optimize: a function to optimize having the signature f(x, y). """ self.x = x self.y = y self.step_size = step_size self.function = function_to_optimize def score(self) -> int: """ Returns the output of the function called with current x and y coordinates. >>> def test_function(x, y): ... return x + y >>> SearchProblem(0, 0, 1, test_function).score() # 0 + 0 = 0 0 >>> SearchProblem(5, 7, 1, test_function).score() # 5 + 7 = 12 12 """ return self.function(self.x, self.y) def get_neighbors(self): """ Returns a list of coordinates of neighbors adjacent to the current coordinates. Neighbors: | 0 | 1 | 2 | | 3 | _ | 4 | | 5 | 6 | 7 | """ step_size = self.step_size return [ SearchProblem(x, y, step_size, self.function) for x, y in ( (self.x - step_size, self.y - step_size), (self.x - step_size, self.y), (self.x - step_size, self.y + step_size), (self.x, self.y - step_size), (self.x, self.y + step_size), (self.x + step_size, self.y - step_size), (self.x + step_size, self.y), (self.x + step_size, self.y + step_size), ) ] def __hash__(self): """ hash the string representation of the current search state. """ return hash(str(self)) def __eq__(self, obj): """ Check if the 2 objects are equal. """ if isinstance(obj, SearchProblem): return hash(str(self)) == hash(str(obj)) return False def __str__(self): """ string representation of the current search state. >>> str(SearchProblem(0, 0, 1, None)) 'x: 0 y: 0' >>> str(SearchProblem(2, 5, 1, None)) 'x: 2 y: 5' """ return f"x: {self.x} y: {self.y}" def hill_climbing( search_prob, find_max: bool = True, max_x: float = math.inf, min_x: float = -math.inf, max_y: float = math.inf, min_y: float = -math.inf, visualization: bool = False, max_iter: int = 10000, ) -> SearchProblem: """ Implementation of the hill climbling algorithm. We start with a given state, find all its neighbors, move towards the neighbor which provides the maximum (or minimum) change. We keep doing this until we are at a state where we do not have any neighbors which can improve the solution. Args: search_prob: The search state at the start. find_max: If True, the algorithm should find the maximum else the minimum. max_x, min_x, max_y, min_y: the maximum and minimum bounds of x and y. visualization: If True, a matplotlib graph is displayed. max_iter: number of times to run the iteration. Returns a search state having the maximum (or minimum) score. """ current_state = search_prob scores = [] # list to store the current score at each iteration iterations = 0 solution_found = False visited = set() while not solution_found and iterations < max_iter: visited.add(current_state) iterations += 1 current_score = current_state.score() scores.append(current_score) neighbors = current_state.get_neighbors() max_change = -math.inf min_change = math.inf next_state = None # to hold the next best neighbor for neighbor in neighbors: if neighbor in visited: continue # do not want to visit the same state again if ( neighbor.x > max_x or neighbor.x < min_x or neighbor.y > max_y or neighbor.y < min_y ): continue # neighbor outside our bounds change = neighbor.score() - current_score if find_max: # finding max # going to direction with greatest ascent if change > max_change and change > 0: max_change = change next_state = neighbor else: # finding min # to direction with greatest descent if change < min_change and change < 0: min_change = change next_state = neighbor if next_state is not None: # we found at least one neighbor which improved the current state current_state = next_state else: # since we have no neighbor that improves the solution we stop the search solution_found = True if visualization: from matplotlib import pyplot as plt plt.plot(range(iterations), scores) plt.xlabel("Iterations") plt.ylabel("Function values") plt.show() return current_state if __name__ == "__main__": import doctest doctest.testmod() def test_f1(x, y): return (x**2) + (y**2) # starting the problem with initial coordinates (3, 4) prob = SearchProblem(x=3, y=4, step_size=1, function_to_optimize=test_f1) local_min = hill_climbing(prob, find_max=False) print( "The minimum score for f(x, y) = x^2 + y^2 found via hill climbing: " f"{local_min.score()}" ) # starting the problem with initial coordinates (12, 47) prob = SearchProblem(x=12, y=47, step_size=1, function_to_optimize=test_f1) local_min = hill_climbing( prob, find_max=False, max_x=100, min_x=5, max_y=50, min_y=-5, visualization=True ) print( "The minimum score for f(x, y) = x^2 + y^2 with the domain 100 > x > 5 " f"and 50 > y > - 5 found via hill climbing: {local_min.score()}" ) def test_f2(x, y): return (3 * x**2) - (6 * y) prob = SearchProblem(x=3, y=4, step_size=1, function_to_optimize=test_f1) local_min = hill_climbing(prob, find_max=True) print( "The maximum score for f(x, y) = x^2 + y^2 found via hill climbing: " f"{local_min.score()}" )
# https://en.wikipedia.org/wiki/Hill_climbing import math class SearchProblem: """ An interface to define search problems. The interface will be illustrated using the example of mathematical function. """ def __init__(self, x: int, y: int, step_size: int, function_to_optimize): """ The constructor of the search problem. x: the x coordinate of the current search state. y: the y coordinate of the current search state. step_size: size of the step to take when looking for neighbors. function_to_optimize: a function to optimize having the signature f(x, y). """ self.x = x self.y = y self.step_size = step_size self.function = function_to_optimize def score(self) -> int: """ Returns the output of the function called with current x and y coordinates. >>> def test_function(x, y): ... return x + y >>> SearchProblem(0, 0, 1, test_function).score() # 0 + 0 = 0 0 >>> SearchProblem(5, 7, 1, test_function).score() # 5 + 7 = 12 12 """ return self.function(self.x, self.y) def get_neighbors(self): """ Returns a list of coordinates of neighbors adjacent to the current coordinates. Neighbors: | 0 | 1 | 2 | | 3 | _ | 4 | | 5 | 6 | 7 | """ step_size = self.step_size return [ SearchProblem(x, y, step_size, self.function) for x, y in ( (self.x - step_size, self.y - step_size), (self.x - step_size, self.y), (self.x - step_size, self.y + step_size), (self.x, self.y - step_size), (self.x, self.y + step_size), (self.x + step_size, self.y - step_size), (self.x + step_size, self.y), (self.x + step_size, self.y + step_size), ) ] def __hash__(self): """ hash the string representation of the current search state. """ return hash(str(self)) def __eq__(self, obj): """ Check if the 2 objects are equal. """ if isinstance(obj, SearchProblem): return hash(str(self)) == hash(str(obj)) return False def __str__(self): """ string representation of the current search state. >>> str(SearchProblem(0, 0, 1, None)) 'x: 0 y: 0' >>> str(SearchProblem(2, 5, 1, None)) 'x: 2 y: 5' """ return f"x: {self.x} y: {self.y}" def hill_climbing( search_prob, find_max: bool = True, max_x: float = math.inf, min_x: float = -math.inf, max_y: float = math.inf, min_y: float = -math.inf, visualization: bool = False, max_iter: int = 10000, ) -> SearchProblem: """ Implementation of the hill climbling algorithm. We start with a given state, find all its neighbors, move towards the neighbor which provides the maximum (or minimum) change. We keep doing this until we are at a state where we do not have any neighbors which can improve the solution. Args: search_prob: The search state at the start. find_max: If True, the algorithm should find the maximum else the minimum. max_x, min_x, max_y, min_y: the maximum and minimum bounds of x and y. visualization: If True, a matplotlib graph is displayed. max_iter: number of times to run the iteration. Returns a search state having the maximum (or minimum) score. """ current_state = search_prob scores = [] # list to store the current score at each iteration iterations = 0 solution_found = False visited = set() while not solution_found and iterations < max_iter: visited.add(current_state) iterations += 1 current_score = current_state.score() scores.append(current_score) neighbors = current_state.get_neighbors() max_change = -math.inf min_change = math.inf next_state = None # to hold the next best neighbor for neighbor in neighbors: if neighbor in visited: continue # do not want to visit the same state again if ( neighbor.x > max_x or neighbor.x < min_x or neighbor.y > max_y or neighbor.y < min_y ): continue # neighbor outside our bounds change = neighbor.score() - current_score if find_max: # finding max # going to direction with greatest ascent if change > max_change and change > 0: max_change = change next_state = neighbor else: # finding min # to direction with greatest descent if change < min_change and change < 0: min_change = change next_state = neighbor if next_state is not None: # we found at least one neighbor which improved the current state current_state = next_state else: # since we have no neighbor that improves the solution we stop the search solution_found = True if visualization: from matplotlib import pyplot as plt plt.plot(range(iterations), scores) plt.xlabel("Iterations") plt.ylabel("Function values") plt.show() return current_state if __name__ == "__main__": import doctest doctest.testmod() def test_f1(x, y): return (x**2) + (y**2) # starting the problem with initial coordinates (3, 4) prob = SearchProblem(x=3, y=4, step_size=1, function_to_optimize=test_f1) local_min = hill_climbing(prob, find_max=False) print( "The minimum score for f(x, y) = x^2 + y^2 found via hill climbing: " f"{local_min.score()}" ) # starting the problem with initial coordinates (12, 47) prob = SearchProblem(x=12, y=47, step_size=1, function_to_optimize=test_f1) local_min = hill_climbing( prob, find_max=False, max_x=100, min_x=5, max_y=50, min_y=-5, visualization=True ) print( "The minimum score for f(x, y) = x^2 + y^2 with the domain 100 > x > 5 " f"and 50 > y > - 5 found via hill climbing: {local_min.score()}" ) def test_f2(x, y): return (3 * x**2) - (6 * y) prob = SearchProblem(x=3, y=4, step_size=1, function_to_optimize=test_f1) local_min = hill_climbing(prob, find_max=True) print( "The maximum score for f(x, y) = x^2 + y^2 found via hill climbing: " f"{local_min.score()}" )
-1
TheAlgorithms/Python
7,387
[pre-commit.ci] pre-commit autoupdate
<!--pre-commit.ci start--> updates: - [github.com/asottile/pyupgrade: v3.0.0 → v3.1.0](https://github.com/asottile/pyupgrade/compare/v3.0.0...v3.1.0) - [github.com/codespell-project/codespell: v2.2.1 → v2.2.2](https://github.com/codespell-project/codespell/compare/v2.2.1...v2.2.2) <!--pre-commit.ci end-->
pre-commit-ci[bot]
2022-10-17T19:35:07Z
2022-10-17T19:59:26Z
a34b756fd40e5cdfb69abc06dcd42f5f1b5fa21e
0c7c5fa7b0161a7433467240155356c93ae106b8
[pre-commit.ci] pre-commit autoupdate. <!--pre-commit.ci start--> updates: - [github.com/asottile/pyupgrade: v3.0.0 → v3.1.0](https://github.com/asottile/pyupgrade/compare/v3.0.0...v3.1.0) - [github.com/codespell-project/codespell: v2.2.1 → v2.2.2](https://github.com/codespell-project/codespell/compare/v2.2.1...v2.2.2) <!--pre-commit.ci end-->
""" Pure Python implementations of a Fixed Priority Queue and an Element Priority Queue using Python lists. """ class OverFlowError(Exception): pass class UnderFlowError(Exception): pass class FixedPriorityQueue: """ Tasks can be added to a Priority Queue at any time and in any order but when Tasks are removed then the Task with the highest priority is removed in FIFO order. In code we will use three levels of priority with priority zero Tasks being the most urgent (high priority) and priority 2 tasks being the least urgent. Examples >>> fpq = FixedPriorityQueue() >>> fpq.enqueue(0, 10) >>> fpq.enqueue(1, 70) >>> fpq.enqueue(0, 100) >>> fpq.enqueue(2, 1) >>> fpq.enqueue(2, 5) >>> fpq.enqueue(1, 7) >>> fpq.enqueue(2, 4) >>> fpq.enqueue(1, 64) >>> fpq.enqueue(0, 128) >>> print(fpq) Priority 0: [10, 100, 128] Priority 1: [70, 7, 64] Priority 2: [1, 5, 4] >>> fpq.dequeue() 10 >>> fpq.dequeue() 100 >>> fpq.dequeue() 128 >>> fpq.dequeue() 70 >>> fpq.dequeue() 7 >>> print(fpq) Priority 0: [] Priority 1: [64] Priority 2: [1, 5, 4] >>> fpq.dequeue() 64 >>> fpq.dequeue() 1 >>> fpq.dequeue() 5 >>> fpq.dequeue() 4 >>> fpq.dequeue() Traceback (most recent call last): ... data_structures.queue.priority_queue_using_list.UnderFlowError: All queues are empty >>> print(fpq) Priority 0: [] Priority 1: [] Priority 2: [] """ def __init__(self): self.queues = [ [], [], [], ] def enqueue(self, priority: int, data: int) -> None: """ Add an element to a queue based on its priority. If the priority is invalid ValueError is raised. If the queue is full an OverFlowError is raised. """ try: if len(self.queues[priority]) >= 100: raise OverflowError("Maximum queue size is 100") self.queues[priority].append(data) except IndexError: raise ValueError("Valid priorities are 0, 1, and 2") def dequeue(self) -> int: """ Return the highest priority element in FIFO order. If the queue is empty then an under flow exception is raised. """ for queue in self.queues: if queue: return queue.pop(0) raise UnderFlowError("All queues are empty") def __str__(self) -> str: return "\n".join(f"Priority {i}: {q}" for i, q in enumerate(self.queues)) class ElementPriorityQueue: """ Element Priority Queue is the same as Fixed Priority Queue except that the value of the element itself is the priority. The rules for priorities are the same the as Fixed Priority Queue. >>> epq = ElementPriorityQueue() >>> epq.enqueue(10) >>> epq.enqueue(70) >>> epq.enqueue(4) >>> epq.enqueue(1) >>> epq.enqueue(5) >>> epq.enqueue(7) >>> epq.enqueue(4) >>> epq.enqueue(64) >>> epq.enqueue(128) >>> print(epq) [10, 70, 4, 1, 5, 7, 4, 64, 128] >>> epq.dequeue() 1 >>> epq.dequeue() 4 >>> epq.dequeue() 4 >>> epq.dequeue() 5 >>> epq.dequeue() 7 >>> epq.dequeue() 10 >>> print(epq) [70, 64, 128] >>> epq.dequeue() 64 >>> epq.dequeue() 70 >>> epq.dequeue() 128 >>> epq.dequeue() Traceback (most recent call last): ... data_structures.queue.priority_queue_using_list.UnderFlowError: The queue is empty >>> print(epq) [] """ def __init__(self): self.queue = [] def enqueue(self, data: int) -> None: """ This function enters the element into the queue If the queue is full an Exception is raised saying Over Flow! """ if len(self.queue) == 100: raise OverFlowError("Maximum queue size is 100") self.queue.append(data) def dequeue(self) -> int: """ Return the highest priority element in FIFO order. If the queue is empty then an under flow exception is raised. """ if not self.queue: raise UnderFlowError("The queue is empty") else: data = min(self.queue) self.queue.remove(data) return data def __str__(self) -> str: """ Prints all the elements within the Element Priority Queue """ return str(self.queue) def fixed_priority_queue(): fpq = FixedPriorityQueue() fpq.enqueue(0, 10) fpq.enqueue(1, 70) fpq.enqueue(0, 100) fpq.enqueue(2, 1) fpq.enqueue(2, 5) fpq.enqueue(1, 7) fpq.enqueue(2, 4) fpq.enqueue(1, 64) fpq.enqueue(0, 128) print(fpq) print(fpq.dequeue()) print(fpq.dequeue()) print(fpq.dequeue()) print(fpq.dequeue()) print(fpq.dequeue()) print(fpq) print(fpq.dequeue()) print(fpq.dequeue()) print(fpq.dequeue()) print(fpq.dequeue()) print(fpq.dequeue()) def element_priority_queue(): epq = ElementPriorityQueue() epq.enqueue(10) epq.enqueue(70) epq.enqueue(100) epq.enqueue(1) epq.enqueue(5) epq.enqueue(7) epq.enqueue(4) epq.enqueue(64) epq.enqueue(128) print(epq) print(epq.dequeue()) print(epq.dequeue()) print(epq.dequeue()) print(epq.dequeue()) print(epq.dequeue()) print(epq) print(epq.dequeue()) print(epq.dequeue()) print(epq.dequeue()) print(epq.dequeue()) print(epq.dequeue()) if __name__ == "__main__": fixed_priority_queue() element_priority_queue()
""" Pure Python implementations of a Fixed Priority Queue and an Element Priority Queue using Python lists. """ class OverFlowError(Exception): pass class UnderFlowError(Exception): pass class FixedPriorityQueue: """ Tasks can be added to a Priority Queue at any time and in any order but when Tasks are removed then the Task with the highest priority is removed in FIFO order. In code we will use three levels of priority with priority zero Tasks being the most urgent (high priority) and priority 2 tasks being the least urgent. Examples >>> fpq = FixedPriorityQueue() >>> fpq.enqueue(0, 10) >>> fpq.enqueue(1, 70) >>> fpq.enqueue(0, 100) >>> fpq.enqueue(2, 1) >>> fpq.enqueue(2, 5) >>> fpq.enqueue(1, 7) >>> fpq.enqueue(2, 4) >>> fpq.enqueue(1, 64) >>> fpq.enqueue(0, 128) >>> print(fpq) Priority 0: [10, 100, 128] Priority 1: [70, 7, 64] Priority 2: [1, 5, 4] >>> fpq.dequeue() 10 >>> fpq.dequeue() 100 >>> fpq.dequeue() 128 >>> fpq.dequeue() 70 >>> fpq.dequeue() 7 >>> print(fpq) Priority 0: [] Priority 1: [64] Priority 2: [1, 5, 4] >>> fpq.dequeue() 64 >>> fpq.dequeue() 1 >>> fpq.dequeue() 5 >>> fpq.dequeue() 4 >>> fpq.dequeue() Traceback (most recent call last): ... data_structures.queue.priority_queue_using_list.UnderFlowError: All queues are empty >>> print(fpq) Priority 0: [] Priority 1: [] Priority 2: [] """ def __init__(self): self.queues = [ [], [], [], ] def enqueue(self, priority: int, data: int) -> None: """ Add an element to a queue based on its priority. If the priority is invalid ValueError is raised. If the queue is full an OverFlowError is raised. """ try: if len(self.queues[priority]) >= 100: raise OverflowError("Maximum queue size is 100") self.queues[priority].append(data) except IndexError: raise ValueError("Valid priorities are 0, 1, and 2") def dequeue(self) -> int: """ Return the highest priority element in FIFO order. If the queue is empty then an under flow exception is raised. """ for queue in self.queues: if queue: return queue.pop(0) raise UnderFlowError("All queues are empty") def __str__(self) -> str: return "\n".join(f"Priority {i}: {q}" for i, q in enumerate(self.queues)) class ElementPriorityQueue: """ Element Priority Queue is the same as Fixed Priority Queue except that the value of the element itself is the priority. The rules for priorities are the same the as Fixed Priority Queue. >>> epq = ElementPriorityQueue() >>> epq.enqueue(10) >>> epq.enqueue(70) >>> epq.enqueue(4) >>> epq.enqueue(1) >>> epq.enqueue(5) >>> epq.enqueue(7) >>> epq.enqueue(4) >>> epq.enqueue(64) >>> epq.enqueue(128) >>> print(epq) [10, 70, 4, 1, 5, 7, 4, 64, 128] >>> epq.dequeue() 1 >>> epq.dequeue() 4 >>> epq.dequeue() 4 >>> epq.dequeue() 5 >>> epq.dequeue() 7 >>> epq.dequeue() 10 >>> print(epq) [70, 64, 128] >>> epq.dequeue() 64 >>> epq.dequeue() 70 >>> epq.dequeue() 128 >>> epq.dequeue() Traceback (most recent call last): ... data_structures.queue.priority_queue_using_list.UnderFlowError: The queue is empty >>> print(epq) [] """ def __init__(self): self.queue = [] def enqueue(self, data: int) -> None: """ This function enters the element into the queue If the queue is full an Exception is raised saying Over Flow! """ if len(self.queue) == 100: raise OverFlowError("Maximum queue size is 100") self.queue.append(data) def dequeue(self) -> int: """ Return the highest priority element in FIFO order. If the queue is empty then an under flow exception is raised. """ if not self.queue: raise UnderFlowError("The queue is empty") else: data = min(self.queue) self.queue.remove(data) return data def __str__(self) -> str: """ Prints all the elements within the Element Priority Queue """ return str(self.queue) def fixed_priority_queue(): fpq = FixedPriorityQueue() fpq.enqueue(0, 10) fpq.enqueue(1, 70) fpq.enqueue(0, 100) fpq.enqueue(2, 1) fpq.enqueue(2, 5) fpq.enqueue(1, 7) fpq.enqueue(2, 4) fpq.enqueue(1, 64) fpq.enqueue(0, 128) print(fpq) print(fpq.dequeue()) print(fpq.dequeue()) print(fpq.dequeue()) print(fpq.dequeue()) print(fpq.dequeue()) print(fpq) print(fpq.dequeue()) print(fpq.dequeue()) print(fpq.dequeue()) print(fpq.dequeue()) print(fpq.dequeue()) def element_priority_queue(): epq = ElementPriorityQueue() epq.enqueue(10) epq.enqueue(70) epq.enqueue(100) epq.enqueue(1) epq.enqueue(5) epq.enqueue(7) epq.enqueue(4) epq.enqueue(64) epq.enqueue(128) print(epq) print(epq.dequeue()) print(epq.dequeue()) print(epq.dequeue()) print(epq.dequeue()) print(epq.dequeue()) print(epq) print(epq.dequeue()) print(epq.dequeue()) print(epq.dequeue()) print(epq.dequeue()) print(epq.dequeue()) if __name__ == "__main__": fixed_priority_queue() element_priority_queue()
-1
TheAlgorithms/Python
7,387
[pre-commit.ci] pre-commit autoupdate
<!--pre-commit.ci start--> updates: - [github.com/asottile/pyupgrade: v3.0.0 → v3.1.0](https://github.com/asottile/pyupgrade/compare/v3.0.0...v3.1.0) - [github.com/codespell-project/codespell: v2.2.1 → v2.2.2](https://github.com/codespell-project/codespell/compare/v2.2.1...v2.2.2) <!--pre-commit.ci end-->
pre-commit-ci[bot]
2022-10-17T19:35:07Z
2022-10-17T19:59:26Z
a34b756fd40e5cdfb69abc06dcd42f5f1b5fa21e
0c7c5fa7b0161a7433467240155356c93ae106b8
[pre-commit.ci] pre-commit autoupdate. <!--pre-commit.ci start--> updates: - [github.com/asottile/pyupgrade: v3.0.0 → v3.1.0](https://github.com/asottile/pyupgrade/compare/v3.0.0...v3.1.0) - [github.com/codespell-project/codespell: v2.2.1 → v2.2.2](https://github.com/codespell-project/codespell/compare/v2.2.1...v2.2.2) <!--pre-commit.ci end-->
from __future__ import annotations def modular_division(a: int, b: int, n: int) -> int: """ Modular Division : An efficient algorithm for dividing b by a modulo n. GCD ( Greatest Common Divisor ) or HCF ( Highest Common Factor ) Given three integers a, b, and n, such that gcd(a,n)=1 and n>1, the algorithm should return an integer x such that 0≤x≤n−1, and b/a=x(modn) (that is, b=ax(modn)). Theorem: a has a multiplicative inverse modulo n iff gcd(a,n) = 1 This find x = b*a^(-1) mod n Uses ExtendedEuclid to find the inverse of a >>> modular_division(4,8,5) 2 >>> modular_division(3,8,5) 1 >>> modular_division(4, 11, 5) 4 """ assert n > 1 and a > 0 and greatest_common_divisor(a, n) == 1 (d, t, s) = extended_gcd(n, a) # Implemented below x = (b * s) % n return x def invert_modulo(a: int, n: int) -> int: """ This function find the inverses of a i.e., a^(-1) >>> invert_modulo(2, 5) 3 >>> invert_modulo(8,7) 1 """ (b, x) = extended_euclid(a, n) # Implemented below if b < 0: b = (b % n + n) % n return b # ------------------ Finding Modular division using invert_modulo ------------------- def modular_division2(a: int, b: int, n: int) -> int: """ This function used the above inversion of a to find x = (b*a^(-1))mod n >>> modular_division2(4,8,5) 2 >>> modular_division2(3,8,5) 1 >>> modular_division2(4, 11, 5) 4 """ s = invert_modulo(a, n) x = (b * s) % n return x def extended_gcd(a: int, b: int) -> tuple[int, int, int]: """ Extended Euclid's Algorithm : If d divides a and b and d = a*x + b*y for integers x and y, then d = gcd(a,b) >>> extended_gcd(10, 6) (2, -1, 2) >>> extended_gcd(7, 5) (1, -2, 3) ** extended_gcd function is used when d = gcd(a,b) is required in output """ assert a >= 0 and b >= 0 if b == 0: d, x, y = a, 1, 0 else: (d, p, q) = extended_gcd(b, a % b) x = q y = p - q * (a // b) assert a % d == 0 and b % d == 0 assert d == a * x + b * y return (d, x, y) def extended_euclid(a: int, b: int) -> tuple[int, int]: """ Extended Euclid >>> extended_euclid(10, 6) (-1, 2) >>> extended_euclid(7, 5) (-2, 3) """ if b == 0: return (1, 0) (x, y) = extended_euclid(b, a % b) k = a // b return (y, x - k * y) def greatest_common_divisor(a: int, b: int) -> int: """ Euclid's Lemma : d divides a and b, if and only if d divides a-b and b Euclid's Algorithm >>> greatest_common_divisor(7,5) 1 Note : In number theory, two integers a and b are said to be relatively prime, mutually prime, or co-prime if the only positive integer (factor) that divides both of them is 1 i.e., gcd(a,b) = 1. >>> greatest_common_divisor(121, 11) 11 """ if a < b: a, b = b, a while a % b != 0: a, b = b, a % b return b if __name__ == "__main__": from doctest import testmod testmod(name="modular_division", verbose=True) testmod(name="modular_division2", verbose=True) testmod(name="invert_modulo", verbose=True) testmod(name="extended_gcd", verbose=True) testmod(name="extended_euclid", verbose=True) testmod(name="greatest_common_divisor", verbose=True)
from __future__ import annotations def modular_division(a: int, b: int, n: int) -> int: """ Modular Division : An efficient algorithm for dividing b by a modulo n. GCD ( Greatest Common Divisor ) or HCF ( Highest Common Factor ) Given three integers a, b, and n, such that gcd(a,n)=1 and n>1, the algorithm should return an integer x such that 0≤x≤n−1, and b/a=x(modn) (that is, b=ax(modn)). Theorem: a has a multiplicative inverse modulo n iff gcd(a,n) = 1 This find x = b*a^(-1) mod n Uses ExtendedEuclid to find the inverse of a >>> modular_division(4,8,5) 2 >>> modular_division(3,8,5) 1 >>> modular_division(4, 11, 5) 4 """ assert n > 1 and a > 0 and greatest_common_divisor(a, n) == 1 (d, t, s) = extended_gcd(n, a) # Implemented below x = (b * s) % n return x def invert_modulo(a: int, n: int) -> int: """ This function find the inverses of a i.e., a^(-1) >>> invert_modulo(2, 5) 3 >>> invert_modulo(8,7) 1 """ (b, x) = extended_euclid(a, n) # Implemented below if b < 0: b = (b % n + n) % n return b # ------------------ Finding Modular division using invert_modulo ------------------- def modular_division2(a: int, b: int, n: int) -> int: """ This function used the above inversion of a to find x = (b*a^(-1))mod n >>> modular_division2(4,8,5) 2 >>> modular_division2(3,8,5) 1 >>> modular_division2(4, 11, 5) 4 """ s = invert_modulo(a, n) x = (b * s) % n return x def extended_gcd(a: int, b: int) -> tuple[int, int, int]: """ Extended Euclid's Algorithm : If d divides a and b and d = a*x + b*y for integers x and y, then d = gcd(a,b) >>> extended_gcd(10, 6) (2, -1, 2) >>> extended_gcd(7, 5) (1, -2, 3) ** extended_gcd function is used when d = gcd(a,b) is required in output """ assert a >= 0 and b >= 0 if b == 0: d, x, y = a, 1, 0 else: (d, p, q) = extended_gcd(b, a % b) x = q y = p - q * (a // b) assert a % d == 0 and b % d == 0 assert d == a * x + b * y return (d, x, y) def extended_euclid(a: int, b: int) -> tuple[int, int]: """ Extended Euclid >>> extended_euclid(10, 6) (-1, 2) >>> extended_euclid(7, 5) (-2, 3) """ if b == 0: return (1, 0) (x, y) = extended_euclid(b, a % b) k = a // b return (y, x - k * y) def greatest_common_divisor(a: int, b: int) -> int: """ Euclid's Lemma : d divides a and b, if and only if d divides a-b and b Euclid's Algorithm >>> greatest_common_divisor(7,5) 1 Note : In number theory, two integers a and b are said to be relatively prime, mutually prime, or co-prime if the only positive integer (factor) that divides both of them is 1 i.e., gcd(a,b) = 1. >>> greatest_common_divisor(121, 11) 11 """ if a < b: a, b = b, a while a % b != 0: a, b = b, a % b return b if __name__ == "__main__": from doctest import testmod testmod(name="modular_division", verbose=True) testmod(name="modular_division2", verbose=True) testmod(name="invert_modulo", verbose=True) testmod(name="extended_gcd", verbose=True) testmod(name="extended_euclid", verbose=True) testmod(name="greatest_common_divisor", verbose=True)
-1
TheAlgorithms/Python
7,387
[pre-commit.ci] pre-commit autoupdate
<!--pre-commit.ci start--> updates: - [github.com/asottile/pyupgrade: v3.0.0 → v3.1.0](https://github.com/asottile/pyupgrade/compare/v3.0.0...v3.1.0) - [github.com/codespell-project/codespell: v2.2.1 → v2.2.2](https://github.com/codespell-project/codespell/compare/v2.2.1...v2.2.2) <!--pre-commit.ci end-->
pre-commit-ci[bot]
2022-10-17T19:35:07Z
2022-10-17T19:59:26Z
a34b756fd40e5cdfb69abc06dcd42f5f1b5fa21e
0c7c5fa7b0161a7433467240155356c93ae106b8
[pre-commit.ci] pre-commit autoupdate. <!--pre-commit.ci start--> updates: - [github.com/asottile/pyupgrade: v3.0.0 → v3.1.0](https://github.com/asottile/pyupgrade/compare/v3.0.0...v3.1.0) - [github.com/codespell-project/codespell: v2.2.1 → v2.2.2](https://github.com/codespell-project/codespell/compare/v2.2.1...v2.2.2) <!--pre-commit.ci end-->
#!/bin/sh # # An example hook script to prepare the commit log message. # Called by "git commit" with the name of the file that has the # commit message, followed by the description of the commit # message's source. The hook's purpose is to edit the commit # message file. If the hook fails with a non-zero status, # the commit is aborted. # # To enable this hook, rename this file to "prepare-commit-msg". # This hook includes three examples. The first one removes the # "# Please enter the commit message..." help message. # # The second includes the output of "git diff --name-status -r" # into the message, just before the "git status" output. It is # commented because it doesn't cope with --amend or with squashed # commits. # # The third example adds a Signed-off-by line to the message, that can # still be edited. This is rarely a good idea. COMMIT_MSG_FILE=$1 COMMIT_SOURCE=$2 SHA1=$3 /usr/bin/perl -i.bak -ne 'print unless(m/^. Please enter the commit message/..m/^#$/)' "$COMMIT_MSG_FILE" # case "$COMMIT_SOURCE,$SHA1" in # ,|template,) # /usr/bin/perl -i.bak -pe ' # print "\n" . `git diff --cached --name-status -r` # if /^#/ && $first++ == 0' "$COMMIT_MSG_FILE" ;; # *) ;; # esac # SOB=$(git var GIT_COMMITTER_IDENT | sed -n 's/^\(.*>\).*$/Signed-off-by: \1/p') # git interpret-trailers --in-place --trailer "$SOB" "$COMMIT_MSG_FILE" # if test -z "$COMMIT_SOURCE" # then # /usr/bin/perl -i.bak -pe 'print "\n" if !$first_line++' "$COMMIT_MSG_FILE" # fi
#!/bin/sh # # An example hook script to prepare the commit log message. # Called by "git commit" with the name of the file that has the # commit message, followed by the description of the commit # message's source. The hook's purpose is to edit the commit # message file. If the hook fails with a non-zero status, # the commit is aborted. # # To enable this hook, rename this file to "prepare-commit-msg". # This hook includes three examples. The first one removes the # "# Please enter the commit message..." help message. # # The second includes the output of "git diff --name-status -r" # into the message, just before the "git status" output. It is # commented because it doesn't cope with --amend or with squashed # commits. # # The third example adds a Signed-off-by line to the message, that can # still be edited. This is rarely a good idea. COMMIT_MSG_FILE=$1 COMMIT_SOURCE=$2 SHA1=$3 /usr/bin/perl -i.bak -ne 'print unless(m/^. Please enter the commit message/..m/^#$/)' "$COMMIT_MSG_FILE" # case "$COMMIT_SOURCE,$SHA1" in # ,|template,) # /usr/bin/perl -i.bak -pe ' # print "\n" . `git diff --cached --name-status -r` # if /^#/ && $first++ == 0' "$COMMIT_MSG_FILE" ;; # *) ;; # esac # SOB=$(git var GIT_COMMITTER_IDENT | sed -n 's/^\(.*>\).*$/Signed-off-by: \1/p') # git interpret-trailers --in-place --trailer "$SOB" "$COMMIT_MSG_FILE" # if test -z "$COMMIT_SOURCE" # then # /usr/bin/perl -i.bak -pe 'print "\n" if !$first_line++' "$COMMIT_MSG_FILE" # fi
-1
TheAlgorithms/Python
7,387
[pre-commit.ci] pre-commit autoupdate
<!--pre-commit.ci start--> updates: - [github.com/asottile/pyupgrade: v3.0.0 → v3.1.0](https://github.com/asottile/pyupgrade/compare/v3.0.0...v3.1.0) - [github.com/codespell-project/codespell: v2.2.1 → v2.2.2](https://github.com/codespell-project/codespell/compare/v2.2.1...v2.2.2) <!--pre-commit.ci end-->
pre-commit-ci[bot]
2022-10-17T19:35:07Z
2022-10-17T19:59:26Z
a34b756fd40e5cdfb69abc06dcd42f5f1b5fa21e
0c7c5fa7b0161a7433467240155356c93ae106b8
[pre-commit.ci] pre-commit autoupdate. <!--pre-commit.ci start--> updates: - [github.com/asottile/pyupgrade: v3.0.0 → v3.1.0](https://github.com/asottile/pyupgrade/compare/v3.0.0...v3.1.0) - [github.com/codespell-project/codespell: v2.2.1 → v2.2.2](https://github.com/codespell-project/codespell/compare/v2.2.1...v2.2.2) <!--pre-commit.ci end-->
9caf4784aada17dc75348f77cc8c356df503c0f3 https://github.com/TheAlgorithms/Python
9caf4784aada17dc75348f77cc8c356df503c0f3 https://github.com/TheAlgorithms/Python
-1
TheAlgorithms/Python
7,387
[pre-commit.ci] pre-commit autoupdate
<!--pre-commit.ci start--> updates: - [github.com/asottile/pyupgrade: v3.0.0 → v3.1.0](https://github.com/asottile/pyupgrade/compare/v3.0.0...v3.1.0) - [github.com/codespell-project/codespell: v2.2.1 → v2.2.2](https://github.com/codespell-project/codespell/compare/v2.2.1...v2.2.2) <!--pre-commit.ci end-->
pre-commit-ci[bot]
2022-10-17T19:35:07Z
2022-10-17T19:59:26Z
a34b756fd40e5cdfb69abc06dcd42f5f1b5fa21e
0c7c5fa7b0161a7433467240155356c93ae106b8
[pre-commit.ci] pre-commit autoupdate. <!--pre-commit.ci start--> updates: - [github.com/asottile/pyupgrade: v3.0.0 → v3.1.0](https://github.com/asottile/pyupgrade/compare/v3.0.0...v3.1.0) - [github.com/codespell-project/codespell: v2.2.1 → v2.2.2](https://github.com/codespell-project/codespell/compare/v2.2.1...v2.2.2) <!--pre-commit.ci end-->
""" Project Euler Problem 9: https://projecteuler.net/problem=9 Special Pythagorean triplet A Pythagorean triplet is a set of three natural numbers, a < b < c, for which, a^2 + b^2 = c^2 For example, 3^2 + 4^2 = 9 + 16 = 25 = 5^2. There exists exactly one Pythagorean triplet for which a + b + c = 1000. Find the product a*b*c. References: - https://en.wikipedia.org/wiki/Pythagorean_triple """ def solution(n: int = 1000) -> int: """ Return the product of a,b,c which are Pythagorean Triplet that satisfies the following: 1. a < b < c 2. a**2 + b**2 = c**2 3. a + b + c = n >>> solution(36) 1620 >>> solution(126) 66780 """ product = -1 candidate = 0 for a in range(1, n // 3): # Solving the two equations a**2+b**2=c**2 and a+b+c=N eliminating c b = (n * n - 2 * a * n) // (2 * n - 2 * a) c = n - a - b if c * c == (a * a + b * b): candidate = a * b * c if candidate >= product: product = candidate return product if __name__ == "__main__": print(f"{solution() = }")
""" Project Euler Problem 9: https://projecteuler.net/problem=9 Special Pythagorean triplet A Pythagorean triplet is a set of three natural numbers, a < b < c, for which, a^2 + b^2 = c^2 For example, 3^2 + 4^2 = 9 + 16 = 25 = 5^2. There exists exactly one Pythagorean triplet for which a + b + c = 1000. Find the product a*b*c. References: - https://en.wikipedia.org/wiki/Pythagorean_triple """ def solution(n: int = 1000) -> int: """ Return the product of a,b,c which are Pythagorean Triplet that satisfies the following: 1. a < b < c 2. a**2 + b**2 = c**2 3. a + b + c = n >>> solution(36) 1620 >>> solution(126) 66780 """ product = -1 candidate = 0 for a in range(1, n // 3): # Solving the two equations a**2+b**2=c**2 and a+b+c=N eliminating c b = (n * n - 2 * a * n) // (2 * n - 2 * a) c = n - a - b if c * c == (a * a + b * b): candidate = a * b * c if candidate >= product: product = candidate return product if __name__ == "__main__": print(f"{solution() = }")
-1
TheAlgorithms/Python
7,387
[pre-commit.ci] pre-commit autoupdate
<!--pre-commit.ci start--> updates: - [github.com/asottile/pyupgrade: v3.0.0 → v3.1.0](https://github.com/asottile/pyupgrade/compare/v3.0.0...v3.1.0) - [github.com/codespell-project/codespell: v2.2.1 → v2.2.2](https://github.com/codespell-project/codespell/compare/v2.2.1...v2.2.2) <!--pre-commit.ci end-->
pre-commit-ci[bot]
2022-10-17T19:35:07Z
2022-10-17T19:59:26Z
a34b756fd40e5cdfb69abc06dcd42f5f1b5fa21e
0c7c5fa7b0161a7433467240155356c93ae106b8
[pre-commit.ci] pre-commit autoupdate. <!--pre-commit.ci start--> updates: - [github.com/asottile/pyupgrade: v3.0.0 → v3.1.0](https://github.com/asottile/pyupgrade/compare/v3.0.0...v3.1.0) - [github.com/codespell-project/codespell: v2.2.1 → v2.2.2](https://github.com/codespell-project/codespell/compare/v2.2.1...v2.2.2) <!--pre-commit.ci end-->
""" Convert Base 10 (Decimal) Values to Hexadecimal Representations """ # set decimal value for each hexadecimal digit values = { 0: "0", 1: "1", 2: "2", 3: "3", 4: "4", 5: "5", 6: "6", 7: "7", 8: "8", 9: "9", 10: "a", 11: "b", 12: "c", 13: "d", 14: "e", 15: "f", } def decimal_to_hexadecimal(decimal: float) -> str: """ take integer decimal value, return hexadecimal representation as str beginning with 0x >>> decimal_to_hexadecimal(5) '0x5' >>> decimal_to_hexadecimal(15) '0xf' >>> decimal_to_hexadecimal(37) '0x25' >>> decimal_to_hexadecimal(255) '0xff' >>> decimal_to_hexadecimal(4096) '0x1000' >>> decimal_to_hexadecimal(999098) '0xf3eba' >>> # negatives work too >>> decimal_to_hexadecimal(-256) '-0x100' >>> # floats are acceptable if equivalent to an int >>> decimal_to_hexadecimal(17.0) '0x11' >>> # other floats will error >>> decimal_to_hexadecimal(16.16) # doctest: +ELLIPSIS Traceback (most recent call last): ... AssertionError >>> # strings will error as well >>> decimal_to_hexadecimal('0xfffff') # doctest: +ELLIPSIS Traceback (most recent call last): ... AssertionError >>> # results are the same when compared to Python's default hex function >>> decimal_to_hexadecimal(-256) == hex(-256) True """ assert type(decimal) in (int, float) and decimal == int(decimal) decimal = int(decimal) hexadecimal = "" negative = False if decimal < 0: negative = True decimal *= -1 while decimal > 0: decimal, remainder = divmod(decimal, 16) hexadecimal = values[remainder] + hexadecimal hexadecimal = "0x" + hexadecimal if negative: hexadecimal = "-" + hexadecimal return hexadecimal if __name__ == "__main__": import doctest doctest.testmod()
""" Convert Base 10 (Decimal) Values to Hexadecimal Representations """ # set decimal value for each hexadecimal digit values = { 0: "0", 1: "1", 2: "2", 3: "3", 4: "4", 5: "5", 6: "6", 7: "7", 8: "8", 9: "9", 10: "a", 11: "b", 12: "c", 13: "d", 14: "e", 15: "f", } def decimal_to_hexadecimal(decimal: float) -> str: """ take integer decimal value, return hexadecimal representation as str beginning with 0x >>> decimal_to_hexadecimal(5) '0x5' >>> decimal_to_hexadecimal(15) '0xf' >>> decimal_to_hexadecimal(37) '0x25' >>> decimal_to_hexadecimal(255) '0xff' >>> decimal_to_hexadecimal(4096) '0x1000' >>> decimal_to_hexadecimal(999098) '0xf3eba' >>> # negatives work too >>> decimal_to_hexadecimal(-256) '-0x100' >>> # floats are acceptable if equivalent to an int >>> decimal_to_hexadecimal(17.0) '0x11' >>> # other floats will error >>> decimal_to_hexadecimal(16.16) # doctest: +ELLIPSIS Traceback (most recent call last): ... AssertionError >>> # strings will error as well >>> decimal_to_hexadecimal('0xfffff') # doctest: +ELLIPSIS Traceback (most recent call last): ... AssertionError >>> # results are the same when compared to Python's default hex function >>> decimal_to_hexadecimal(-256) == hex(-256) True """ assert type(decimal) in (int, float) and decimal == int(decimal) decimal = int(decimal) hexadecimal = "" negative = False if decimal < 0: negative = True decimal *= -1 while decimal > 0: decimal, remainder = divmod(decimal, 16) hexadecimal = values[remainder] + hexadecimal hexadecimal = "0x" + hexadecimal if negative: hexadecimal = "-" + hexadecimal return hexadecimal if __name__ == "__main__": import doctest doctest.testmod()
-1
TheAlgorithms/Python
7,387
[pre-commit.ci] pre-commit autoupdate
<!--pre-commit.ci start--> updates: - [github.com/asottile/pyupgrade: v3.0.0 → v3.1.0](https://github.com/asottile/pyupgrade/compare/v3.0.0...v3.1.0) - [github.com/codespell-project/codespell: v2.2.1 → v2.2.2](https://github.com/codespell-project/codespell/compare/v2.2.1...v2.2.2) <!--pre-commit.ci end-->
pre-commit-ci[bot]
2022-10-17T19:35:07Z
2022-10-17T19:59:26Z
a34b756fd40e5cdfb69abc06dcd42f5f1b5fa21e
0c7c5fa7b0161a7433467240155356c93ae106b8
[pre-commit.ci] pre-commit autoupdate. <!--pre-commit.ci start--> updates: - [github.com/asottile/pyupgrade: v3.0.0 → v3.1.0](https://github.com/asottile/pyupgrade/compare/v3.0.0...v3.1.0) - [github.com/codespell-project/codespell: v2.2.1 → v2.2.2](https://github.com/codespell-project/codespell/compare/v2.2.1...v2.2.2) <!--pre-commit.ci end-->
""" Lychrel numbers Problem 55: https://projecteuler.net/problem=55 If we take 47, reverse and add, 47 + 74 = 121, which is palindromic. Not all numbers produce palindromes so quickly. For example, 349 + 943 = 1292, 1292 + 2921 = 4213 4213 + 3124 = 7337 That is, 349 took three iterations to arrive at a palindrome. Although no one has proved it yet, it is thought that some numbers, like 196, never produce a palindrome. A number that never forms a palindrome through the reverse and add process is called a Lychrel number. Due to the theoretical nature of these numbers, and for the purpose of this problem, we shall assume that a number is Lychrel until proven otherwise. In addition you are given that for every number below ten-thousand, it will either (i) become a palindrome in less than fifty iterations, or, (ii) no one, with all the computing power that exists, has managed so far to map it to a palindrome. In fact, 10677 is the first number to be shown to require over fifty iterations before producing a palindrome: 4668731596684224866951378664 (53 iterations, 28-digits). Surprisingly, there are palindromic numbers that are themselves Lychrel numbers; the first example is 4994. How many Lychrel numbers are there below ten-thousand? """ def is_palindrome(n: int) -> bool: """ Returns True if a number is palindrome. >>> is_palindrome(12567321) False >>> is_palindrome(1221) True >>> is_palindrome(9876789) True """ return str(n) == str(n)[::-1] def sum_reverse(n: int) -> int: """ Returns the sum of n and reverse of n. >>> sum_reverse(123) 444 >>> sum_reverse(3478) 12221 >>> sum_reverse(12) 33 """ return int(n) + int(str(n)[::-1]) def solution(limit: int = 10000) -> int: """ Returns the count of all lychrel numbers below limit. >>> solution(10000) 249 >>> solution(5000) 76 >>> solution(1000) 13 """ lychrel_nums = [] for num in range(1, limit): iterations = 0 a = num while iterations < 50: num = sum_reverse(num) iterations += 1 if is_palindrome(num): break else: lychrel_nums.append(a) return len(lychrel_nums) if __name__ == "__main__": print(f"{solution() = }")
""" Lychrel numbers Problem 55: https://projecteuler.net/problem=55 If we take 47, reverse and add, 47 + 74 = 121, which is palindromic. Not all numbers produce palindromes so quickly. For example, 349 + 943 = 1292, 1292 + 2921 = 4213 4213 + 3124 = 7337 That is, 349 took three iterations to arrive at a palindrome. Although no one has proved it yet, it is thought that some numbers, like 196, never produce a palindrome. A number that never forms a palindrome through the reverse and add process is called a Lychrel number. Due to the theoretical nature of these numbers, and for the purpose of this problem, we shall assume that a number is Lychrel until proven otherwise. In addition you are given that for every number below ten-thousand, it will either (i) become a palindrome in less than fifty iterations, or, (ii) no one, with all the computing power that exists, has managed so far to map it to a palindrome. In fact, 10677 is the first number to be shown to require over fifty iterations before producing a palindrome: 4668731596684224866951378664 (53 iterations, 28-digits). Surprisingly, there are palindromic numbers that are themselves Lychrel numbers; the first example is 4994. How many Lychrel numbers are there below ten-thousand? """ def is_palindrome(n: int) -> bool: """ Returns True if a number is palindrome. >>> is_palindrome(12567321) False >>> is_palindrome(1221) True >>> is_palindrome(9876789) True """ return str(n) == str(n)[::-1] def sum_reverse(n: int) -> int: """ Returns the sum of n and reverse of n. >>> sum_reverse(123) 444 >>> sum_reverse(3478) 12221 >>> sum_reverse(12) 33 """ return int(n) + int(str(n)[::-1]) def solution(limit: int = 10000) -> int: """ Returns the count of all lychrel numbers below limit. >>> solution(10000) 249 >>> solution(5000) 76 >>> solution(1000) 13 """ lychrel_nums = [] for num in range(1, limit): iterations = 0 a = num while iterations < 50: num = sum_reverse(num) iterations += 1 if is_palindrome(num): break else: lychrel_nums.append(a) return len(lychrel_nums) if __name__ == "__main__": print(f"{solution() = }")
-1
TheAlgorithms/Python
7,387
[pre-commit.ci] pre-commit autoupdate
<!--pre-commit.ci start--> updates: - [github.com/asottile/pyupgrade: v3.0.0 → v3.1.0](https://github.com/asottile/pyupgrade/compare/v3.0.0...v3.1.0) - [github.com/codespell-project/codespell: v2.2.1 → v2.2.2](https://github.com/codespell-project/codespell/compare/v2.2.1...v2.2.2) <!--pre-commit.ci end-->
pre-commit-ci[bot]
2022-10-17T19:35:07Z
2022-10-17T19:59:26Z
a34b756fd40e5cdfb69abc06dcd42f5f1b5fa21e
0c7c5fa7b0161a7433467240155356c93ae106b8
[pre-commit.ci] pre-commit autoupdate. <!--pre-commit.ci start--> updates: - [github.com/asottile/pyupgrade: v3.0.0 → v3.1.0](https://github.com/asottile/pyupgrade/compare/v3.0.0...v3.1.0) - [github.com/codespell-project/codespell: v2.2.1 → v2.2.2](https://github.com/codespell-project/codespell/compare/v2.2.1...v2.2.2) <!--pre-commit.ci end-->
""" Project Euler Problem 80: https://projecteuler.net/problem=80 Author: Sandeep Gupta Problem statement: For the first one hundred natural numbers, find the total of the digital sums of the first one hundred decimal digits for all the irrational square roots. Time: 5 October 2020, 18:30 """ import decimal def solution() -> int: """ To evaluate the sum, Used decimal python module to calculate the decimal places up to 100, the most important thing would be take calculate a few extra places for decimal otherwise there will be rounding error. >>> solution() 40886 """ answer = 0 decimal_context = decimal.Context(prec=105) for i in range(2, 100): number = decimal.Decimal(i) sqrt_number = number.sqrt(decimal_context) if len(str(sqrt_number)) > 1: answer += int(str(sqrt_number)[0]) sqrt_number_str = str(sqrt_number)[2:101] answer += sum(int(x) for x in sqrt_number_str) return answer if __name__ == "__main__": import doctest doctest.testmod() print(f"{solution() = }")
""" Project Euler Problem 80: https://projecteuler.net/problem=80 Author: Sandeep Gupta Problem statement: For the first one hundred natural numbers, find the total of the digital sums of the first one hundred decimal digits for all the irrational square roots. Time: 5 October 2020, 18:30 """ import decimal def solution() -> int: """ To evaluate the sum, Used decimal python module to calculate the decimal places up to 100, the most important thing would be take calculate a few extra places for decimal otherwise there will be rounding error. >>> solution() 40886 """ answer = 0 decimal_context = decimal.Context(prec=105) for i in range(2, 100): number = decimal.Decimal(i) sqrt_number = number.sqrt(decimal_context) if len(str(sqrt_number)) > 1: answer += int(str(sqrt_number)[0]) sqrt_number_str = str(sqrt_number)[2:101] answer += sum(int(x) for x in sqrt_number_str) return answer if __name__ == "__main__": import doctest doctest.testmod() print(f"{solution() = }")
-1
TheAlgorithms/Python
7,387
[pre-commit.ci] pre-commit autoupdate
<!--pre-commit.ci start--> updates: - [github.com/asottile/pyupgrade: v3.0.0 → v3.1.0](https://github.com/asottile/pyupgrade/compare/v3.0.0...v3.1.0) - [github.com/codespell-project/codespell: v2.2.1 → v2.2.2](https://github.com/codespell-project/codespell/compare/v2.2.1...v2.2.2) <!--pre-commit.ci end-->
pre-commit-ci[bot]
2022-10-17T19:35:07Z
2022-10-17T19:59:26Z
a34b756fd40e5cdfb69abc06dcd42f5f1b5fa21e
0c7c5fa7b0161a7433467240155356c93ae106b8
[pre-commit.ci] pre-commit autoupdate. <!--pre-commit.ci start--> updates: - [github.com/asottile/pyupgrade: v3.0.0 → v3.1.0](https://github.com/asottile/pyupgrade/compare/v3.0.0...v3.1.0) - [github.com/codespell-project/codespell: v2.2.1 → v2.2.2](https://github.com/codespell-project/codespell/compare/v2.2.1...v2.2.2) <!--pre-commit.ci end-->
ref: refs/remotes/origin/master
ref: refs/remotes/origin/master
-1
TheAlgorithms/Python
7,387
[pre-commit.ci] pre-commit autoupdate
<!--pre-commit.ci start--> updates: - [github.com/asottile/pyupgrade: v3.0.0 → v3.1.0](https://github.com/asottile/pyupgrade/compare/v3.0.0...v3.1.0) - [github.com/codespell-project/codespell: v2.2.1 → v2.2.2](https://github.com/codespell-project/codespell/compare/v2.2.1...v2.2.2) <!--pre-commit.ci end-->
pre-commit-ci[bot]
2022-10-17T19:35:07Z
2022-10-17T19:59:26Z
a34b756fd40e5cdfb69abc06dcd42f5f1b5fa21e
0c7c5fa7b0161a7433467240155356c93ae106b8
[pre-commit.ci] pre-commit autoupdate. <!--pre-commit.ci start--> updates: - [github.com/asottile/pyupgrade: v3.0.0 → v3.1.0](https://github.com/asottile/pyupgrade/compare/v3.0.0...v3.1.0) - [github.com/codespell-project/codespell: v2.2.1 → v2.2.2](https://github.com/codespell-project/codespell/compare/v2.2.1...v2.2.2) <!--pre-commit.ci end-->
def is_palindrome(s: str) -> bool: """ Determine whether the string is palindrome :param s: :return: Boolean >>> is_palindrome("a man a plan a canal panama".replace(" ", "")) True >>> is_palindrome("Hello") False >>> is_palindrome("Able was I ere I saw Elba") True >>> is_palindrome("racecar") True >>> is_palindrome("Mr. Owl ate my metal worm?") True """ # Since Punctuation, capitalization, and spaces are usually ignored while checking # Palindrome, we first remove them from our string. s = "".join([character for character in s.lower() if character.isalnum()]) return s == s[::-1] if __name__ == "__main__": s = input("Enter string to determine whether its palindrome or not: ").strip() if is_palindrome(s): print("Given string is palindrome") else: print("Given string is not palindrome")
def is_palindrome(s: str) -> bool: """ Determine whether the string is palindrome :param s: :return: Boolean >>> is_palindrome("a man a plan a canal panama".replace(" ", "")) True >>> is_palindrome("Hello") False >>> is_palindrome("Able was I ere I saw Elba") True >>> is_palindrome("racecar") True >>> is_palindrome("Mr. Owl ate my metal worm?") True """ # Since Punctuation, capitalization, and spaces are usually ignored while checking # Palindrome, we first remove them from our string. s = "".join([character for character in s.lower() if character.isalnum()]) return s == s[::-1] if __name__ == "__main__": s = input("Enter string to determine whether its palindrome or not: ").strip() if is_palindrome(s): print("Given string is palindrome") else: print("Given string is not palindrome")
-1
TheAlgorithms/Python
7,387
[pre-commit.ci] pre-commit autoupdate
<!--pre-commit.ci start--> updates: - [github.com/asottile/pyupgrade: v3.0.0 → v3.1.0](https://github.com/asottile/pyupgrade/compare/v3.0.0...v3.1.0) - [github.com/codespell-project/codespell: v2.2.1 → v2.2.2](https://github.com/codespell-project/codespell/compare/v2.2.1...v2.2.2) <!--pre-commit.ci end-->
pre-commit-ci[bot]
2022-10-17T19:35:07Z
2022-10-17T19:59:26Z
a34b756fd40e5cdfb69abc06dcd42f5f1b5fa21e
0c7c5fa7b0161a7433467240155356c93ae106b8
[pre-commit.ci] pre-commit autoupdate. <!--pre-commit.ci start--> updates: - [github.com/asottile/pyupgrade: v3.0.0 → v3.1.0](https://github.com/asottile/pyupgrade/compare/v3.0.0...v3.1.0) - [github.com/codespell-project/codespell: v2.2.1 → v2.2.2](https://github.com/codespell-project/codespell/compare/v2.2.1...v2.2.2) <!--pre-commit.ci end-->
"""For reference https://en.wikipedia.org/wiki/Odd%E2%80%93even_sort """ def odd_even_sort(input_list: list) -> list: """this algorithm uses the same idea of bubblesort, but by first dividing in two phase (odd and even). Originally developed for use on parallel processors with local interconnections. :param collection: mutable ordered sequence of elements :return: same collection in ascending order Examples: >>> odd_even_sort([5 , 4 ,3 ,2 ,1]) [1, 2, 3, 4, 5] >>> odd_even_sort([]) [] >>> odd_even_sort([-10 ,-1 ,10 ,2]) [-10, -1, 2, 10] >>> odd_even_sort([1 ,2 ,3 ,4]) [1, 2, 3, 4] """ is_sorted = False while is_sorted is False: # Until all the indices are traversed keep looping is_sorted = True for i in range(0, len(input_list) - 1, 2): # iterating over all even indices if input_list[i] > input_list[i + 1]: input_list[i], input_list[i + 1] = input_list[i + 1], input_list[i] # swapping if elements not in order is_sorted = False for i in range(1, len(input_list) - 1, 2): # iterating over all odd indices if input_list[i] > input_list[i + 1]: input_list[i], input_list[i + 1] = input_list[i + 1], input_list[i] # swapping if elements not in order is_sorted = False return input_list if __name__ == "__main__": print("Enter list to be sorted") input_list = [int(x) for x in input().split()] # inputing elements of the list in one line sorted_list = odd_even_sort(input_list) print("The sorted list is") print(sorted_list)
"""For reference https://en.wikipedia.org/wiki/Odd%E2%80%93even_sort """ def odd_even_sort(input_list: list) -> list: """this algorithm uses the same idea of bubblesort, but by first dividing in two phase (odd and even). Originally developed for use on parallel processors with local interconnections. :param collection: mutable ordered sequence of elements :return: same collection in ascending order Examples: >>> odd_even_sort([5 , 4 ,3 ,2 ,1]) [1, 2, 3, 4, 5] >>> odd_even_sort([]) [] >>> odd_even_sort([-10 ,-1 ,10 ,2]) [-10, -1, 2, 10] >>> odd_even_sort([1 ,2 ,3 ,4]) [1, 2, 3, 4] """ is_sorted = False while is_sorted is False: # Until all the indices are traversed keep looping is_sorted = True for i in range(0, len(input_list) - 1, 2): # iterating over all even indices if input_list[i] > input_list[i + 1]: input_list[i], input_list[i + 1] = input_list[i + 1], input_list[i] # swapping if elements not in order is_sorted = False for i in range(1, len(input_list) - 1, 2): # iterating over all odd indices if input_list[i] > input_list[i + 1]: input_list[i], input_list[i + 1] = input_list[i + 1], input_list[i] # swapping if elements not in order is_sorted = False return input_list if __name__ == "__main__": print("Enter list to be sorted") input_list = [int(x) for x in input().split()] # inputing elements of the list in one line sorted_list = odd_even_sort(input_list) print("The sorted list is") print(sorted_list)
-1
TheAlgorithms/Python
7,387
[pre-commit.ci] pre-commit autoupdate
<!--pre-commit.ci start--> updates: - [github.com/asottile/pyupgrade: v3.0.0 → v3.1.0](https://github.com/asottile/pyupgrade/compare/v3.0.0...v3.1.0) - [github.com/codespell-project/codespell: v2.2.1 → v2.2.2](https://github.com/codespell-project/codespell/compare/v2.2.1...v2.2.2) <!--pre-commit.ci end-->
pre-commit-ci[bot]
2022-10-17T19:35:07Z
2022-10-17T19:59:26Z
a34b756fd40e5cdfb69abc06dcd42f5f1b5fa21e
0c7c5fa7b0161a7433467240155356c93ae106b8
[pre-commit.ci] pre-commit autoupdate. <!--pre-commit.ci start--> updates: - [github.com/asottile/pyupgrade: v3.0.0 → v3.1.0](https://github.com/asottile/pyupgrade/compare/v3.0.0...v3.1.0) - [github.com/codespell-project/codespell: v2.2.1 → v2.2.2](https://github.com/codespell-project/codespell/compare/v2.2.1...v2.2.2) <!--pre-commit.ci end-->
# Author: Phyllipe Bezerra (https://github.com/pmba) clothes = { 0: "underwear", 1: "pants", 2: "belt", 3: "suit", 4: "shoe", 5: "socks", 6: "shirt", 7: "tie", 8: "watch", } graph = [[1, 4], [2, 4], [3], [], [], [4], [2, 7], [3], []] visited = [0 for x in range(len(graph))] stack = [] def print_stack(stack, clothes): order = 1 while stack: current_clothing = stack.pop() print(order, clothes[current_clothing]) order += 1 def depth_first_search(u, visited, graph): visited[u] = 1 for v in graph[u]: if not visited[v]: depth_first_search(v, visited, graph) stack.append(u) def topological_sort(graph, visited): for v in range(len(graph)): if not visited[v]: depth_first_search(v, visited, graph) if __name__ == "__main__": topological_sort(graph, visited) print(stack) print_stack(stack, clothes)
# Author: Phyllipe Bezerra (https://github.com/pmba) clothes = { 0: "underwear", 1: "pants", 2: "belt", 3: "suit", 4: "shoe", 5: "socks", 6: "shirt", 7: "tie", 8: "watch", } graph = [[1, 4], [2, 4], [3], [], [], [4], [2, 7], [3], []] visited = [0 for x in range(len(graph))] stack = [] def print_stack(stack, clothes): order = 1 while stack: current_clothing = stack.pop() print(order, clothes[current_clothing]) order += 1 def depth_first_search(u, visited, graph): visited[u] = 1 for v in graph[u]: if not visited[v]: depth_first_search(v, visited, graph) stack.append(u) def topological_sort(graph, visited): for v in range(len(graph)): if not visited[v]: depth_first_search(v, visited, graph) if __name__ == "__main__": topological_sort(graph, visited) print(stack) print_stack(stack, clothes)
-1
TheAlgorithms/Python
7,387
[pre-commit.ci] pre-commit autoupdate
<!--pre-commit.ci start--> updates: - [github.com/asottile/pyupgrade: v3.0.0 → v3.1.0](https://github.com/asottile/pyupgrade/compare/v3.0.0...v3.1.0) - [github.com/codespell-project/codespell: v2.2.1 → v2.2.2](https://github.com/codespell-project/codespell/compare/v2.2.1...v2.2.2) <!--pre-commit.ci end-->
pre-commit-ci[bot]
2022-10-17T19:35:07Z
2022-10-17T19:59:26Z
a34b756fd40e5cdfb69abc06dcd42f5f1b5fa21e
0c7c5fa7b0161a7433467240155356c93ae106b8
[pre-commit.ci] pre-commit autoupdate. <!--pre-commit.ci start--> updates: - [github.com/asottile/pyupgrade: v3.0.0 → v3.1.0](https://github.com/asottile/pyupgrade/compare/v3.0.0...v3.1.0) - [github.com/codespell-project/codespell: v2.2.1 → v2.2.2](https://github.com/codespell-project/codespell/compare/v2.2.1...v2.2.2) <!--pre-commit.ci end-->
-1
TheAlgorithms/Python
7,387
[pre-commit.ci] pre-commit autoupdate
<!--pre-commit.ci start--> updates: - [github.com/asottile/pyupgrade: v3.0.0 → v3.1.0](https://github.com/asottile/pyupgrade/compare/v3.0.0...v3.1.0) - [github.com/codespell-project/codespell: v2.2.1 → v2.2.2](https://github.com/codespell-project/codespell/compare/v2.2.1...v2.2.2) <!--pre-commit.ci end-->
pre-commit-ci[bot]
2022-10-17T19:35:07Z
2022-10-17T19:59:26Z
a34b756fd40e5cdfb69abc06dcd42f5f1b5fa21e
0c7c5fa7b0161a7433467240155356c93ae106b8
[pre-commit.ci] pre-commit autoupdate. <!--pre-commit.ci start--> updates: - [github.com/asottile/pyupgrade: v3.0.0 → v3.1.0](https://github.com/asottile/pyupgrade/compare/v3.0.0...v3.1.0) - [github.com/codespell-project/codespell: v2.2.1 → v2.2.2](https://github.com/codespell-project/codespell/compare/v2.2.1...v2.2.2) <!--pre-commit.ci end-->
# https://en.wikipedia.org/wiki/Charge_carrier_density # https://www.pveducation.org/pvcdrom/pn-junctions/equilibrium-carrier-concentration # http://www.ece.utep.edu/courses/ee3329/ee3329/Studyguide/ToC/Fundamentals/Carriers/concentrations.html from __future__ import annotations def carrier_concentration( electron_conc: float, hole_conc: float, intrinsic_conc: float, ) -> tuple: """ This function can calculate any one of the three - 1. Electron Concentration 2, Hole Concentration 3. Intrinsic Concentration given the other two. Examples - >>> carrier_concentration(electron_conc=25, hole_conc=100, intrinsic_conc=0) ('intrinsic_conc', 50.0) >>> carrier_concentration(electron_conc=0, hole_conc=1600, intrinsic_conc=200) ('electron_conc', 25.0) >>> carrier_concentration(electron_conc=1000, hole_conc=0, intrinsic_conc=1200) ('hole_conc', 1440.0) >>> carrier_concentration(electron_conc=1000, hole_conc=400, intrinsic_conc=1200) Traceback (most recent call last): File "<stdin>", line 37, in <module> ValueError: You cannot supply more or less than 2 values >>> carrier_concentration(electron_conc=-1000, hole_conc=0, intrinsic_conc=1200) Traceback (most recent call last): File "<stdin>", line 40, in <module> ValueError: Electron concentration cannot be negative in a semiconductor >>> carrier_concentration(electron_conc=0, hole_conc=-400, intrinsic_conc=1200) Traceback (most recent call last): File "<stdin>", line 44, in <module> ValueError: Hole concentration cannot be negative in a semiconductor >>> carrier_concentration(electron_conc=0, hole_conc=400, intrinsic_conc=-1200) Traceback (most recent call last): File "<stdin>", line 48, in <module> ValueError: Intrinsic concentration cannot be negative in a semiconductor """ if (electron_conc, hole_conc, intrinsic_conc).count(0) != 1: raise ValueError("You cannot supply more or less than 2 values") elif electron_conc < 0: raise ValueError("Electron concentration cannot be negative in a semiconductor") elif hole_conc < 0: raise ValueError("Hole concentration cannot be negative in a semiconductor") elif intrinsic_conc < 0: raise ValueError( "Intrinsic concentration cannot be negative in a semiconductor" ) elif electron_conc == 0: return ( "electron_conc", intrinsic_conc**2 / hole_conc, ) elif hole_conc == 0: return ( "hole_conc", intrinsic_conc**2 / electron_conc, ) elif intrinsic_conc == 0: return ( "intrinsic_conc", (electron_conc * hole_conc) ** 0.5, ) else: return (-1, -1) if __name__ == "__main__": import doctest doctest.testmod()
# https://en.wikipedia.org/wiki/Charge_carrier_density # https://www.pveducation.org/pvcdrom/pn-junctions/equilibrium-carrier-concentration # http://www.ece.utep.edu/courses/ee3329/ee3329/Studyguide/ToC/Fundamentals/Carriers/concentrations.html from __future__ import annotations def carrier_concentration( electron_conc: float, hole_conc: float, intrinsic_conc: float, ) -> tuple: """ This function can calculate any one of the three - 1. Electron Concentration 2, Hole Concentration 3. Intrinsic Concentration given the other two. Examples - >>> carrier_concentration(electron_conc=25, hole_conc=100, intrinsic_conc=0) ('intrinsic_conc', 50.0) >>> carrier_concentration(electron_conc=0, hole_conc=1600, intrinsic_conc=200) ('electron_conc', 25.0) >>> carrier_concentration(electron_conc=1000, hole_conc=0, intrinsic_conc=1200) ('hole_conc', 1440.0) >>> carrier_concentration(electron_conc=1000, hole_conc=400, intrinsic_conc=1200) Traceback (most recent call last): File "<stdin>", line 37, in <module> ValueError: You cannot supply more or less than 2 values >>> carrier_concentration(electron_conc=-1000, hole_conc=0, intrinsic_conc=1200) Traceback (most recent call last): File "<stdin>", line 40, in <module> ValueError: Electron concentration cannot be negative in a semiconductor >>> carrier_concentration(electron_conc=0, hole_conc=-400, intrinsic_conc=1200) Traceback (most recent call last): File "<stdin>", line 44, in <module> ValueError: Hole concentration cannot be negative in a semiconductor >>> carrier_concentration(electron_conc=0, hole_conc=400, intrinsic_conc=-1200) Traceback (most recent call last): File "<stdin>", line 48, in <module> ValueError: Intrinsic concentration cannot be negative in a semiconductor """ if (electron_conc, hole_conc, intrinsic_conc).count(0) != 1: raise ValueError("You cannot supply more or less than 2 values") elif electron_conc < 0: raise ValueError("Electron concentration cannot be negative in a semiconductor") elif hole_conc < 0: raise ValueError("Hole concentration cannot be negative in a semiconductor") elif intrinsic_conc < 0: raise ValueError( "Intrinsic concentration cannot be negative in a semiconductor" ) elif electron_conc == 0: return ( "electron_conc", intrinsic_conc**2 / hole_conc, ) elif hole_conc == 0: return ( "hole_conc", intrinsic_conc**2 / electron_conc, ) elif intrinsic_conc == 0: return ( "intrinsic_conc", (electron_conc * hole_conc) ** 0.5, ) else: return (-1, -1) if __name__ == "__main__": import doctest doctest.testmod()
-1
TheAlgorithms/Python
7,387
[pre-commit.ci] pre-commit autoupdate
<!--pre-commit.ci start--> updates: - [github.com/asottile/pyupgrade: v3.0.0 → v3.1.0](https://github.com/asottile/pyupgrade/compare/v3.0.0...v3.1.0) - [github.com/codespell-project/codespell: v2.2.1 → v2.2.2](https://github.com/codespell-project/codespell/compare/v2.2.1...v2.2.2) <!--pre-commit.ci end-->
pre-commit-ci[bot]
2022-10-17T19:35:07Z
2022-10-17T19:59:26Z
a34b756fd40e5cdfb69abc06dcd42f5f1b5fa21e
0c7c5fa7b0161a7433467240155356c93ae106b8
[pre-commit.ci] pre-commit autoupdate. <!--pre-commit.ci start--> updates: - [github.com/asottile/pyupgrade: v3.0.0 → v3.1.0](https://github.com/asottile/pyupgrade/compare/v3.0.0...v3.1.0) - [github.com/codespell-project/codespell: v2.2.1 → v2.2.2](https://github.com/codespell-project/codespell/compare/v2.2.1...v2.2.2) <!--pre-commit.ci end-->
-1
TheAlgorithms/Python
7,339
Follow Flake8 pep3101 and remove modulo formatting
### Describe your change: Follow Flake8 pep3101 and remove modulo formatting (replace with f-strings) For example ```py "Hello %s" % name ``` goes to ```py f"Hello {name}" ``` * [ ] Add an algorithm? * [x] Fix a bug or typo in an existing algorithm? * [ ] Documentation change? ### Checklist: * [x] I have read [CONTRIBUTING.md](https://github.com/TheAlgorithms/Python/blob/master/CONTRIBUTING.md). * [x] This pull request is all my own work -- I have not plagiarized. * [x] I know that pull requests will not be merged if they fail the automated tests. * [x] This PR only changes one algorithm file. To ease review, please open separate PRs for separate algorithms. * [x] All new Python files are placed inside an existing directory. * [x] All filenames are in all lowercase characters with no spaces or dashes. * [x] All functions and variable names follow Python naming conventions. * [x] All function parameters and return values are annotated with Python [type hints](https://docs.python.org/3/library/typing.html). * [x] All functions have [doctests](https://docs.python.org/3/library/doctest.html) that pass the automated testing. * [x] All new algorithms have a URL in its comments that points to Wikipedia or other similar explanation. * [x] If this pull request resolves one or more open issues then the commit message contains `Fixes: #{$ISSUE_NO}`.
CaedenPH
2022-10-16T20:16:16Z
2022-10-16T20:50:11Z
7f6e0b656f6362e452b11d06acde50b8b81cb31a
f15cc2f01c2a4124ff6dc0843c728a546f9d9f79
Follow Flake8 pep3101 and remove modulo formatting. ### Describe your change: Follow Flake8 pep3101 and remove modulo formatting (replace with f-strings) For example ```py "Hello %s" % name ``` goes to ```py f"Hello {name}" ``` * [ ] Add an algorithm? * [x] Fix a bug or typo in an existing algorithm? * [ ] Documentation change? ### Checklist: * [x] I have read [CONTRIBUTING.md](https://github.com/TheAlgorithms/Python/blob/master/CONTRIBUTING.md). * [x] This pull request is all my own work -- I have not plagiarized. * [x] I know that pull requests will not be merged if they fail the automated tests. * [x] This PR only changes one algorithm file. To ease review, please open separate PRs for separate algorithms. * [x] All new Python files are placed inside an existing directory. * [x] All filenames are in all lowercase characters with no spaces or dashes. * [x] All functions and variable names follow Python naming conventions. * [x] All function parameters and return values are annotated with Python [type hints](https://docs.python.org/3/library/typing.html). * [x] All functions have [doctests](https://docs.python.org/3/library/doctest.html) that pass the automated testing. * [x] All new algorithms have a URL in its comments that points to Wikipedia or other similar explanation. * [x] If this pull request resolves one or more open issues then the commit message contains `Fixes: #{$ISSUE_NO}`.
import os import random import sys from . import cryptomath_module as cryptomath from . import rabin_miller min_primitive_root = 3 # I have written my code naively same as definition of primitive root # however every time I run this program, memory exceeded... # so I used 4.80 Algorithm in # Handbook of Applied Cryptography(CRC Press, ISBN : 0-8493-8523-7, October 1996) # and it seems to run nicely! def primitive_root(p_val: int) -> int: print("Generating primitive root of p") while True: g = random.randrange(3, p_val) if pow(g, 2, p_val) == 1: continue if pow(g, p_val, p_val) == 1: continue return g def generate_key(key_size: int) -> tuple[tuple[int, int, int, int], tuple[int, int]]: print("Generating prime p...") p = rabin_miller.generate_large_prime(key_size) # select large prime number. e_1 = primitive_root(p) # one primitive root on modulo p. d = random.randrange(3, p) # private_key -> have to be greater than 2 for safety. e_2 = cryptomath.find_mod_inverse(pow(e_1, d, p), p) public_key = (key_size, e_1, e_2, p) private_key = (key_size, d) return public_key, private_key def make_key_files(name: str, key_size: int) -> None: if os.path.exists(f"{name}_pubkey.txt") or os.path.exists(f"{name}_privkey.txt"): print("\nWARNING:") print( '"%s_pubkey.txt" or "%s_privkey.txt" already exists. \n' "Use a different name or delete these files and re-run this program." % (name, name) ) sys.exit() public_key, private_key = generate_key(key_size) print(f"\nWriting public key to file {name}_pubkey.txt...") with open(f"{name}_pubkey.txt", "w") as fo: fo.write( "%d,%d,%d,%d" % (public_key[0], public_key[1], public_key[2], public_key[3]) ) print(f"Writing private key to file {name}_privkey.txt...") with open(f"{name}_privkey.txt", "w") as fo: fo.write("%d,%d" % (private_key[0], private_key[1])) def main() -> None: print("Making key files...") make_key_files("elgamal", 2048) print("Key files generation successful") if __name__ == "__main__": main()
import os import random import sys from . import cryptomath_module as cryptomath from . import rabin_miller min_primitive_root = 3 # I have written my code naively same as definition of primitive root # however every time I run this program, memory exceeded... # so I used 4.80 Algorithm in # Handbook of Applied Cryptography(CRC Press, ISBN : 0-8493-8523-7, October 1996) # and it seems to run nicely! def primitive_root(p_val: int) -> int: print("Generating primitive root of p") while True: g = random.randrange(3, p_val) if pow(g, 2, p_val) == 1: continue if pow(g, p_val, p_val) == 1: continue return g def generate_key(key_size: int) -> tuple[tuple[int, int, int, int], tuple[int, int]]: print("Generating prime p...") p = rabin_miller.generate_large_prime(key_size) # select large prime number. e_1 = primitive_root(p) # one primitive root on modulo p. d = random.randrange(3, p) # private_key -> have to be greater than 2 for safety. e_2 = cryptomath.find_mod_inverse(pow(e_1, d, p), p) public_key = (key_size, e_1, e_2, p) private_key = (key_size, d) return public_key, private_key def make_key_files(name: str, key_size: int) -> None: if os.path.exists(f"{name}_pubkey.txt") or os.path.exists(f"{name}_privkey.txt"): print("\nWARNING:") print( f'"{name}_pubkey.txt" or "{name}_privkey.txt" already exists. \n' "Use a different name or delete these files and re-run this program." ) sys.exit() public_key, private_key = generate_key(key_size) print(f"\nWriting public key to file {name}_pubkey.txt...") with open(f"{name}_pubkey.txt", "w") as fo: fo.write(f"{public_key[0]},{public_key[1]},{public_key[2]},{public_key[3]}") print(f"Writing private key to file {name}_privkey.txt...") with open(f"{name}_privkey.txt", "w") as fo: fo.write(f"{private_key[0]},{private_key[1]}") def main() -> None: print("Making key files...") make_key_files("elgamal", 2048) print("Key files generation successful") if __name__ == "__main__": main()
1
TheAlgorithms/Python
7,339
Follow Flake8 pep3101 and remove modulo formatting
### Describe your change: Follow Flake8 pep3101 and remove modulo formatting (replace with f-strings) For example ```py "Hello %s" % name ``` goes to ```py f"Hello {name}" ``` * [ ] Add an algorithm? * [x] Fix a bug or typo in an existing algorithm? * [ ] Documentation change? ### Checklist: * [x] I have read [CONTRIBUTING.md](https://github.com/TheAlgorithms/Python/blob/master/CONTRIBUTING.md). * [x] This pull request is all my own work -- I have not plagiarized. * [x] I know that pull requests will not be merged if they fail the automated tests. * [x] This PR only changes one algorithm file. To ease review, please open separate PRs for separate algorithms. * [x] All new Python files are placed inside an existing directory. * [x] All filenames are in all lowercase characters with no spaces or dashes. * [x] All functions and variable names follow Python naming conventions. * [x] All function parameters and return values are annotated with Python [type hints](https://docs.python.org/3/library/typing.html). * [x] All functions have [doctests](https://docs.python.org/3/library/doctest.html) that pass the automated testing. * [x] All new algorithms have a URL in its comments that points to Wikipedia or other similar explanation. * [x] If this pull request resolves one or more open issues then the commit message contains `Fixes: #{$ISSUE_NO}`.
CaedenPH
2022-10-16T20:16:16Z
2022-10-16T20:50:11Z
7f6e0b656f6362e452b11d06acde50b8b81cb31a
f15cc2f01c2a4124ff6dc0843c728a546f9d9f79
Follow Flake8 pep3101 and remove modulo formatting. ### Describe your change: Follow Flake8 pep3101 and remove modulo formatting (replace with f-strings) For example ```py "Hello %s" % name ``` goes to ```py f"Hello {name}" ``` * [ ] Add an algorithm? * [x] Fix a bug or typo in an existing algorithm? * [ ] Documentation change? ### Checklist: * [x] I have read [CONTRIBUTING.md](https://github.com/TheAlgorithms/Python/blob/master/CONTRIBUTING.md). * [x] This pull request is all my own work -- I have not plagiarized. * [x] I know that pull requests will not be merged if they fail the automated tests. * [x] This PR only changes one algorithm file. To ease review, please open separate PRs for separate algorithms. * [x] All new Python files are placed inside an existing directory. * [x] All filenames are in all lowercase characters with no spaces or dashes. * [x] All functions and variable names follow Python naming conventions. * [x] All function parameters and return values are annotated with Python [type hints](https://docs.python.org/3/library/typing.html). * [x] All functions have [doctests](https://docs.python.org/3/library/doctest.html) that pass the automated testing. * [x] All new algorithms have a URL in its comments that points to Wikipedia or other similar explanation. * [x] If this pull request resolves one or more open issues then the commit message contains `Fixes: #{$ISSUE_NO}`.
import os import random import sys from . import cryptomath_module as cryptoMath # noqa: N812 from . import rabin_miller as rabinMiller # noqa: N812 def main() -> None: print("Making key files...") make_key_files("rsa", 1024) print("Key files generation successful.") def generate_key(key_size: int) -> tuple[tuple[int, int], tuple[int, int]]: print("Generating prime p...") p = rabinMiller.generate_large_prime(key_size) print("Generating prime q...") q = rabinMiller.generate_large_prime(key_size) n = p * q print("Generating e that is relatively prime to (p - 1) * (q - 1)...") while True: e = random.randrange(2 ** (key_size - 1), 2 ** (key_size)) if cryptoMath.gcd(e, (p - 1) * (q - 1)) == 1: break print("Calculating d that is mod inverse of e...") d = cryptoMath.find_mod_inverse(e, (p - 1) * (q - 1)) public_key = (n, e) private_key = (n, d) return (public_key, private_key) def make_key_files(name: str, key_size: int) -> None: if os.path.exists(f"{name}_pubkey.txt") or os.path.exists(f"{name}_privkey.txt"): print("\nWARNING:") print( '"%s_pubkey.txt" or "%s_privkey.txt" already exists. \n' "Use a different name or delete these files and re-run this program." % (name, name) ) sys.exit() public_key, private_key = generate_key(key_size) print(f"\nWriting public key to file {name}_pubkey.txt...") with open(f"{name}_pubkey.txt", "w") as out_file: out_file.write(f"{key_size},{public_key[0]},{public_key[1]}") print(f"Writing private key to file {name}_privkey.txt...") with open(f"{name}_privkey.txt", "w") as out_file: out_file.write(f"{key_size},{private_key[0]},{private_key[1]}") if __name__ == "__main__": main()
import os import random import sys from . import cryptomath_module as cryptoMath # noqa: N812 from . import rabin_miller as rabinMiller # noqa: N812 def main() -> None: print("Making key files...") make_key_files("rsa", 1024) print("Key files generation successful.") def generate_key(key_size: int) -> tuple[tuple[int, int], tuple[int, int]]: print("Generating prime p...") p = rabinMiller.generate_large_prime(key_size) print("Generating prime q...") q = rabinMiller.generate_large_prime(key_size) n = p * q print("Generating e that is relatively prime to (p - 1) * (q - 1)...") while True: e = random.randrange(2 ** (key_size - 1), 2 ** (key_size)) if cryptoMath.gcd(e, (p - 1) * (q - 1)) == 1: break print("Calculating d that is mod inverse of e...") d = cryptoMath.find_mod_inverse(e, (p - 1) * (q - 1)) public_key = (n, e) private_key = (n, d) return (public_key, private_key) def make_key_files(name: str, key_size: int) -> None: if os.path.exists(f"{name}_pubkey.txt") or os.path.exists(f"{name}_privkey.txt"): print("\nWARNING:") print( f'"{name}_pubkey.txt" or "{name}_privkey.txt" already exists. \n' "Use a different name or delete these files and re-run this program." ) sys.exit() public_key, private_key = generate_key(key_size) print(f"\nWriting public key to file {name}_pubkey.txt...") with open(f"{name}_pubkey.txt", "w") as out_file: out_file.write(f"{key_size},{public_key[0]},{public_key[1]}") print(f"Writing private key to file {name}_privkey.txt...") with open(f"{name}_privkey.txt", "w") as out_file: out_file.write(f"{key_size},{private_key[0]},{private_key[1]}") if __name__ == "__main__": main()
1
TheAlgorithms/Python
7,339
Follow Flake8 pep3101 and remove modulo formatting
### Describe your change: Follow Flake8 pep3101 and remove modulo formatting (replace with f-strings) For example ```py "Hello %s" % name ``` goes to ```py f"Hello {name}" ``` * [ ] Add an algorithm? * [x] Fix a bug or typo in an existing algorithm? * [ ] Documentation change? ### Checklist: * [x] I have read [CONTRIBUTING.md](https://github.com/TheAlgorithms/Python/blob/master/CONTRIBUTING.md). * [x] This pull request is all my own work -- I have not plagiarized. * [x] I know that pull requests will not be merged if they fail the automated tests. * [x] This PR only changes one algorithm file. To ease review, please open separate PRs for separate algorithms. * [x] All new Python files are placed inside an existing directory. * [x] All filenames are in all lowercase characters with no spaces or dashes. * [x] All functions and variable names follow Python naming conventions. * [x] All function parameters and return values are annotated with Python [type hints](https://docs.python.org/3/library/typing.html). * [x] All functions have [doctests](https://docs.python.org/3/library/doctest.html) that pass the automated testing. * [x] All new algorithms have a URL in its comments that points to Wikipedia or other similar explanation. * [x] If this pull request resolves one or more open issues then the commit message contains `Fixes: #{$ISSUE_NO}`.
CaedenPH
2022-10-16T20:16:16Z
2022-10-16T20:50:11Z
7f6e0b656f6362e452b11d06acde50b8b81cb31a
f15cc2f01c2a4124ff6dc0843c728a546f9d9f79
Follow Flake8 pep3101 and remove modulo formatting. ### Describe your change: Follow Flake8 pep3101 and remove modulo formatting (replace with f-strings) For example ```py "Hello %s" % name ``` goes to ```py f"Hello {name}" ``` * [ ] Add an algorithm? * [x] Fix a bug or typo in an existing algorithm? * [ ] Documentation change? ### Checklist: * [x] I have read [CONTRIBUTING.md](https://github.com/TheAlgorithms/Python/blob/master/CONTRIBUTING.md). * [x] This pull request is all my own work -- I have not plagiarized. * [x] I know that pull requests will not be merged if they fail the automated tests. * [x] This PR only changes one algorithm file. To ease review, please open separate PRs for separate algorithms. * [x] All new Python files are placed inside an existing directory. * [x] All filenames are in all lowercase characters with no spaces or dashes. * [x] All functions and variable names follow Python naming conventions. * [x] All function parameters and return values are annotated with Python [type hints](https://docs.python.org/3/library/typing.html). * [x] All functions have [doctests](https://docs.python.org/3/library/doctest.html) that pass the automated testing. * [x] All new algorithms have a URL in its comments that points to Wikipedia or other similar explanation. * [x] If this pull request resolves one or more open issues then the commit message contains `Fixes: #{$ISSUE_NO}`.
""" Author : Turfa Auliarachman Date : October 12, 2016 This is a pure Python implementation of Dynamic Programming solution to the edit distance problem. The problem is : Given two strings A and B. Find the minimum number of operations to string B such that A = B. The permitted operations are removal, insertion, and substitution. """ class EditDistance: """ Use : solver = EditDistance() editDistanceResult = solver.solve(firstString, secondString) """ def __init__(self): self.__prepare__() def __prepare__(self, n=0, m=0): self.dp = [[-1 for y in range(0, m)] for x in range(0, n)] def __solve_dp(self, x, y): if x == -1: return y + 1 elif y == -1: return x + 1 elif self.dp[x][y] > -1: return self.dp[x][y] else: if self.a[x] == self.b[y]: self.dp[x][y] = self.__solve_dp(x - 1, y - 1) else: self.dp[x][y] = 1 + min( self.__solve_dp(x, y - 1), self.__solve_dp(x - 1, y), self.__solve_dp(x - 1, y - 1), ) return self.dp[x][y] def solve(self, a, b): if isinstance(a, bytes): a = a.decode("ascii") if isinstance(b, bytes): b = b.decode("ascii") self.a = str(a) self.b = str(b) self.__prepare__(len(a), len(b)) return self.__solve_dp(len(a) - 1, len(b) - 1) def min_distance_bottom_up(word1: str, word2: str) -> int: """ >>> min_distance_bottom_up("intention", "execution") 5 >>> min_distance_bottom_up("intention", "") 9 >>> min_distance_bottom_up("", "") 0 """ m = len(word1) n = len(word2) dp = [[0 for _ in range(n + 1)] for _ in range(m + 1)] for i in range(m + 1): for j in range(n + 1): if i == 0: # first string is empty dp[i][j] = j elif j == 0: # second string is empty dp[i][j] = i elif ( word1[i - 1] == word2[j - 1] ): # last character of both substing is equal dp[i][j] = dp[i - 1][j - 1] else: insert = dp[i][j - 1] delete = dp[i - 1][j] replace = dp[i - 1][j - 1] dp[i][j] = 1 + min(insert, delete, replace) return dp[m][n] if __name__ == "__main__": solver = EditDistance() print("****************** Testing Edit Distance DP Algorithm ******************") print() S1 = input("Enter the first string: ").strip() S2 = input("Enter the second string: ").strip() print() print("The minimum Edit Distance is: %d" % (solver.solve(S1, S2))) print("The minimum Edit Distance is: %d" % (min_distance_bottom_up(S1, S2))) print() print("*************** End of Testing Edit Distance DP Algorithm ***************")
""" Author : Turfa Auliarachman Date : October 12, 2016 This is a pure Python implementation of Dynamic Programming solution to the edit distance problem. The problem is : Given two strings A and B. Find the minimum number of operations to string B such that A = B. The permitted operations are removal, insertion, and substitution. """ class EditDistance: """ Use : solver = EditDistance() editDistanceResult = solver.solve(firstString, secondString) """ def __init__(self): self.__prepare__() def __prepare__(self, n=0, m=0): self.dp = [[-1 for y in range(0, m)] for x in range(0, n)] def __solve_dp(self, x, y): if x == -1: return y + 1 elif y == -1: return x + 1 elif self.dp[x][y] > -1: return self.dp[x][y] else: if self.a[x] == self.b[y]: self.dp[x][y] = self.__solve_dp(x - 1, y - 1) else: self.dp[x][y] = 1 + min( self.__solve_dp(x, y - 1), self.__solve_dp(x - 1, y), self.__solve_dp(x - 1, y - 1), ) return self.dp[x][y] def solve(self, a, b): if isinstance(a, bytes): a = a.decode("ascii") if isinstance(b, bytes): b = b.decode("ascii") self.a = str(a) self.b = str(b) self.__prepare__(len(a), len(b)) return self.__solve_dp(len(a) - 1, len(b) - 1) def min_distance_bottom_up(word1: str, word2: str) -> int: """ >>> min_distance_bottom_up("intention", "execution") 5 >>> min_distance_bottom_up("intention", "") 9 >>> min_distance_bottom_up("", "") 0 """ m = len(word1) n = len(word2) dp = [[0 for _ in range(n + 1)] for _ in range(m + 1)] for i in range(m + 1): for j in range(n + 1): if i == 0: # first string is empty dp[i][j] = j elif j == 0: # second string is empty dp[i][j] = i elif ( word1[i - 1] == word2[j - 1] ): # last character of both substing is equal dp[i][j] = dp[i - 1][j - 1] else: insert = dp[i][j - 1] delete = dp[i - 1][j] replace = dp[i - 1][j - 1] dp[i][j] = 1 + min(insert, delete, replace) return dp[m][n] if __name__ == "__main__": solver = EditDistance() print("****************** Testing Edit Distance DP Algorithm ******************") print() S1 = input("Enter the first string: ").strip() S2 = input("Enter the second string: ").strip() print() print(f"The minimum Edit Distance is: {solver.solve(S1, S2)}") print(f"The minimum Edit Distance is: {min_distance_bottom_up(S1, S2)}") print() print("*************** End of Testing Edit Distance DP Algorithm ***************")
1
TheAlgorithms/Python
7,339
Follow Flake8 pep3101 and remove modulo formatting
### Describe your change: Follow Flake8 pep3101 and remove modulo formatting (replace with f-strings) For example ```py "Hello %s" % name ``` goes to ```py f"Hello {name}" ``` * [ ] Add an algorithm? * [x] Fix a bug or typo in an existing algorithm? * [ ] Documentation change? ### Checklist: * [x] I have read [CONTRIBUTING.md](https://github.com/TheAlgorithms/Python/blob/master/CONTRIBUTING.md). * [x] This pull request is all my own work -- I have not plagiarized. * [x] I know that pull requests will not be merged if they fail the automated tests. * [x] This PR only changes one algorithm file. To ease review, please open separate PRs for separate algorithms. * [x] All new Python files are placed inside an existing directory. * [x] All filenames are in all lowercase characters with no spaces or dashes. * [x] All functions and variable names follow Python naming conventions. * [x] All function parameters and return values are annotated with Python [type hints](https://docs.python.org/3/library/typing.html). * [x] All functions have [doctests](https://docs.python.org/3/library/doctest.html) that pass the automated testing. * [x] All new algorithms have a URL in its comments that points to Wikipedia or other similar explanation. * [x] If this pull request resolves one or more open issues then the commit message contains `Fixes: #{$ISSUE_NO}`.
CaedenPH
2022-10-16T20:16:16Z
2022-10-16T20:50:11Z
7f6e0b656f6362e452b11d06acde50b8b81cb31a
f15cc2f01c2a4124ff6dc0843c728a546f9d9f79
Follow Flake8 pep3101 and remove modulo formatting. ### Describe your change: Follow Flake8 pep3101 and remove modulo formatting (replace with f-strings) For example ```py "Hello %s" % name ``` goes to ```py f"Hello {name}" ``` * [ ] Add an algorithm? * [x] Fix a bug or typo in an existing algorithm? * [ ] Documentation change? ### Checklist: * [x] I have read [CONTRIBUTING.md](https://github.com/TheAlgorithms/Python/blob/master/CONTRIBUTING.md). * [x] This pull request is all my own work -- I have not plagiarized. * [x] I know that pull requests will not be merged if they fail the automated tests. * [x] This PR only changes one algorithm file. To ease review, please open separate PRs for separate algorithms. * [x] All new Python files are placed inside an existing directory. * [x] All filenames are in all lowercase characters with no spaces or dashes. * [x] All functions and variable names follow Python naming conventions. * [x] All function parameters and return values are annotated with Python [type hints](https://docs.python.org/3/library/typing.html). * [x] All functions have [doctests](https://docs.python.org/3/library/doctest.html) that pass the automated testing. * [x] All new algorithms have a URL in its comments that points to Wikipedia or other similar explanation. * [x] If this pull request resolves one or more open issues then the commit message contains `Fixes: #{$ISSUE_NO}`.
""" Simple multithreaded algorithm to show how the 4 phases of a genetic algorithm works (Evaluation, Selection, Crossover and Mutation) https://en.wikipedia.org/wiki/Genetic_algorithm Author: D4rkia """ from __future__ import annotations import random # Maximum size of the population. bigger could be faster but is more memory expensive N_POPULATION = 200 # Number of elements selected in every generation for evolution the selection takes # place from the best to the worst of that generation must be smaller than N_POPULATION N_SELECTED = 50 # Probability that an element of a generation can mutate changing one of its genes this # guarantees that all genes will be used during evolution MUTATION_PROBABILITY = 0.4 # just a seed to improve randomness required by the algorithm random.seed(random.randint(0, 1000)) def basic(target: str, genes: list[str], debug: bool = True) -> tuple[int, int, str]: """ Verify that the target contains no genes besides the ones inside genes variable. >>> from string import ascii_lowercase >>> basic("doctest", ascii_lowercase, debug=False)[2] 'doctest' >>> genes = list(ascii_lowercase) >>> genes.remove("e") >>> basic("test", genes) Traceback (most recent call last): ... ValueError: ['e'] is not in genes list, evolution cannot converge >>> genes.remove("s") >>> basic("test", genes) Traceback (most recent call last): ... ValueError: ['e', 's'] is not in genes list, evolution cannot converge >>> genes.remove("t") >>> basic("test", genes) Traceback (most recent call last): ... ValueError: ['e', 's', 't'] is not in genes list, evolution cannot converge """ # Verify if N_POPULATION is bigger than N_SELECTED if N_POPULATION < N_SELECTED: raise ValueError(f"{N_POPULATION} must be bigger than {N_SELECTED}") # Verify that the target contains no genes besides the ones inside genes variable. not_in_genes_list = sorted({c for c in target if c not in genes}) if not_in_genes_list: raise ValueError( f"{not_in_genes_list} is not in genes list, evolution cannot converge" ) # Generate random starting population population = [] for _ in range(N_POPULATION): population.append("".join([random.choice(genes) for i in range(len(target))])) # Just some logs to know what the algorithms is doing generation, total_population = 0, 0 # This loop will end when we will find a perfect match for our target while True: generation += 1 total_population += len(population) # Random population created now it's time to evaluate def evaluate(item: str, main_target: str = target) -> tuple[str, float]: """ Evaluate how similar the item is with the target by just counting each char in the right position >>> evaluate("Helxo Worlx", Hello World) ["Helxo Worlx", 9] """ score = len( [g for position, g in enumerate(item) if g == main_target[position]] ) return (item, float(score)) # noqa: B023 # Adding a bit of concurrency can make everything faster, # # import concurrent.futures # population_score: list[tuple[str, float]] = [] # with concurrent.futures.ThreadPoolExecutor( # max_workers=NUM_WORKERS) as executor: # futures = {executor.submit(evaluate, item) for item in population} # concurrent.futures.wait(futures) # population_score = [item.result() for item in futures] # # but with a simple algorithm like this will probably be slower # we just need to call evaluate for every item inside population population_score = [evaluate(item) for item in population] # Check if there is a matching evolution population_score = sorted(population_score, key=lambda x: x[1], reverse=True) if population_score[0][0] == target: return (generation, total_population, population_score[0][0]) # Print the Best result every 10 generation # just to know that the algorithm is working if debug and generation % 10 == 0: print( f"\nGeneration: {generation}" f"\nTotal Population:{total_population}" f"\nBest score: {population_score[0][1]}" f"\nBest string: {population_score[0][0]}" ) # Flush the old population keeping some of the best evolutions # Keeping this avoid regression of evolution population_best = population[: int(N_POPULATION / 3)] population.clear() population.extend(population_best) # Normalize population score from 0 to 1 population_score = [ (item, score / len(target)) for item, score in population_score ] # Select, Crossover and Mutate a new population def select(parent_1: tuple[str, float]) -> list[str]: """Select the second parent and generate new population""" pop = [] # Generate more child proportionally to the fitness score child_n = int(parent_1[1] * 100) + 1 child_n = 10 if child_n >= 10 else child_n for _ in range(child_n): parent_2 = population_score[ # noqa: B023 random.randint(0, N_SELECTED) ][0] child_1, child_2 = crossover(parent_1[0], parent_2) # Append new string to the population list pop.append(mutate(child_1)) pop.append(mutate(child_2)) return pop def crossover(parent_1: str, parent_2: str) -> tuple[str, str]: """Slice and combine two string in a random point""" random_slice = random.randint(0, len(parent_1) - 1) child_1 = parent_1[:random_slice] + parent_2[random_slice:] child_2 = parent_2[:random_slice] + parent_1[random_slice:] return (child_1, child_2) def mutate(child: str) -> str: """Mutate a random gene of a child with another one from the list""" child_list = list(child) if random.uniform(0, 1) < MUTATION_PROBABILITY: child_list[random.randint(0, len(child)) - 1] = random.choice(genes) return "".join(child_list) # This is Selection for i in range(N_SELECTED): population.extend(select(population_score[int(i)])) # Check if the population has already reached the maximum value and if so, # break the cycle. if this check is disabled the algorithm will take # forever to compute large strings but will also calculate small string in # a lot fewer generations if len(population) > N_POPULATION: break if __name__ == "__main__": target_str = ( "This is a genetic algorithm to evaluate, combine, evolve, and mutate a string!" ) genes_list = list( " ABCDEFGHIJKLMNOPQRSTUVWXYZabcdefghijklm" "nopqrstuvwxyz.,;!?+-*#@^'èéòà€ù=)(&%$£/\\" ) print( "\nGeneration: %s\nTotal Population: %s\nTarget: %s" % basic(target_str, genes_list) )
""" Simple multithreaded algorithm to show how the 4 phases of a genetic algorithm works (Evaluation, Selection, Crossover and Mutation) https://en.wikipedia.org/wiki/Genetic_algorithm Author: D4rkia """ from __future__ import annotations import random # Maximum size of the population. bigger could be faster but is more memory expensive N_POPULATION = 200 # Number of elements selected in every generation for evolution the selection takes # place from the best to the worst of that generation must be smaller than N_POPULATION N_SELECTED = 50 # Probability that an element of a generation can mutate changing one of its genes this # guarantees that all genes will be used during evolution MUTATION_PROBABILITY = 0.4 # just a seed to improve randomness required by the algorithm random.seed(random.randint(0, 1000)) def basic(target: str, genes: list[str], debug: bool = True) -> tuple[int, int, str]: """ Verify that the target contains no genes besides the ones inside genes variable. >>> from string import ascii_lowercase >>> basic("doctest", ascii_lowercase, debug=False)[2] 'doctest' >>> genes = list(ascii_lowercase) >>> genes.remove("e") >>> basic("test", genes) Traceback (most recent call last): ... ValueError: ['e'] is not in genes list, evolution cannot converge >>> genes.remove("s") >>> basic("test", genes) Traceback (most recent call last): ... ValueError: ['e', 's'] is not in genes list, evolution cannot converge >>> genes.remove("t") >>> basic("test", genes) Traceback (most recent call last): ... ValueError: ['e', 's', 't'] is not in genes list, evolution cannot converge """ # Verify if N_POPULATION is bigger than N_SELECTED if N_POPULATION < N_SELECTED: raise ValueError(f"{N_POPULATION} must be bigger than {N_SELECTED}") # Verify that the target contains no genes besides the ones inside genes variable. not_in_genes_list = sorted({c for c in target if c not in genes}) if not_in_genes_list: raise ValueError( f"{not_in_genes_list} is not in genes list, evolution cannot converge" ) # Generate random starting population population = [] for _ in range(N_POPULATION): population.append("".join([random.choice(genes) for i in range(len(target))])) # Just some logs to know what the algorithms is doing generation, total_population = 0, 0 # This loop will end when we will find a perfect match for our target while True: generation += 1 total_population += len(population) # Random population created now it's time to evaluate def evaluate(item: str, main_target: str = target) -> tuple[str, float]: """ Evaluate how similar the item is with the target by just counting each char in the right position >>> evaluate("Helxo Worlx", Hello World) ["Helxo Worlx", 9] """ score = len( [g for position, g in enumerate(item) if g == main_target[position]] ) return (item, float(score)) # noqa: B023 # Adding a bit of concurrency can make everything faster, # # import concurrent.futures # population_score: list[tuple[str, float]] = [] # with concurrent.futures.ThreadPoolExecutor( # max_workers=NUM_WORKERS) as executor: # futures = {executor.submit(evaluate, item) for item in population} # concurrent.futures.wait(futures) # population_score = [item.result() for item in futures] # # but with a simple algorithm like this will probably be slower # we just need to call evaluate for every item inside population population_score = [evaluate(item) for item in population] # Check if there is a matching evolution population_score = sorted(population_score, key=lambda x: x[1], reverse=True) if population_score[0][0] == target: return (generation, total_population, population_score[0][0]) # Print the Best result every 10 generation # just to know that the algorithm is working if debug and generation % 10 == 0: print( f"\nGeneration: {generation}" f"\nTotal Population:{total_population}" f"\nBest score: {population_score[0][1]}" f"\nBest string: {population_score[0][0]}" ) # Flush the old population keeping some of the best evolutions # Keeping this avoid regression of evolution population_best = population[: int(N_POPULATION / 3)] population.clear() population.extend(population_best) # Normalize population score from 0 to 1 population_score = [ (item, score / len(target)) for item, score in population_score ] # Select, Crossover and Mutate a new population def select(parent_1: tuple[str, float]) -> list[str]: """Select the second parent and generate new population""" pop = [] # Generate more child proportionally to the fitness score child_n = int(parent_1[1] * 100) + 1 child_n = 10 if child_n >= 10 else child_n for _ in range(child_n): parent_2 = population_score[ # noqa: B023 random.randint(0, N_SELECTED) ][0] child_1, child_2 = crossover(parent_1[0], parent_2) # Append new string to the population list pop.append(mutate(child_1)) pop.append(mutate(child_2)) return pop def crossover(parent_1: str, parent_2: str) -> tuple[str, str]: """Slice and combine two string in a random point""" random_slice = random.randint(0, len(parent_1) - 1) child_1 = parent_1[:random_slice] + parent_2[random_slice:] child_2 = parent_2[:random_slice] + parent_1[random_slice:] return (child_1, child_2) def mutate(child: str) -> str: """Mutate a random gene of a child with another one from the list""" child_list = list(child) if random.uniform(0, 1) < MUTATION_PROBABILITY: child_list[random.randint(0, len(child)) - 1] = random.choice(genes) return "".join(child_list) # This is Selection for i in range(N_SELECTED): population.extend(select(population_score[int(i)])) # Check if the population has already reached the maximum value and if so, # break the cycle. if this check is disabled the algorithm will take # forever to compute large strings but will also calculate small string in # a lot fewer generations if len(population) > N_POPULATION: break if __name__ == "__main__": target_str = ( "This is a genetic algorithm to evaluate, combine, evolve, and mutate a string!" ) genes_list = list( " ABCDEFGHIJKLMNOPQRSTUVWXYZabcdefghijklm" "nopqrstuvwxyz.,;!?+-*#@^'èéòà€ù=)(&%$£/\\" ) generation, population, target = basic(target_str, genes_list) print( f"\nGeneration: {generation}\nTotal Population: {population}\nTarget: {target}" )
1
TheAlgorithms/Python
7,339
Follow Flake8 pep3101 and remove modulo formatting
### Describe your change: Follow Flake8 pep3101 and remove modulo formatting (replace with f-strings) For example ```py "Hello %s" % name ``` goes to ```py f"Hello {name}" ``` * [ ] Add an algorithm? * [x] Fix a bug or typo in an existing algorithm? * [ ] Documentation change? ### Checklist: * [x] I have read [CONTRIBUTING.md](https://github.com/TheAlgorithms/Python/blob/master/CONTRIBUTING.md). * [x] This pull request is all my own work -- I have not plagiarized. * [x] I know that pull requests will not be merged if they fail the automated tests. * [x] This PR only changes one algorithm file. To ease review, please open separate PRs for separate algorithms. * [x] All new Python files are placed inside an existing directory. * [x] All filenames are in all lowercase characters with no spaces or dashes. * [x] All functions and variable names follow Python naming conventions. * [x] All function parameters and return values are annotated with Python [type hints](https://docs.python.org/3/library/typing.html). * [x] All functions have [doctests](https://docs.python.org/3/library/doctest.html) that pass the automated testing. * [x] All new algorithms have a URL in its comments that points to Wikipedia or other similar explanation. * [x] If this pull request resolves one or more open issues then the commit message contains `Fixes: #{$ISSUE_NO}`.
CaedenPH
2022-10-16T20:16:16Z
2022-10-16T20:50:11Z
7f6e0b656f6362e452b11d06acde50b8b81cb31a
f15cc2f01c2a4124ff6dc0843c728a546f9d9f79
Follow Flake8 pep3101 and remove modulo formatting. ### Describe your change: Follow Flake8 pep3101 and remove modulo formatting (replace with f-strings) For example ```py "Hello %s" % name ``` goes to ```py f"Hello {name}" ``` * [ ] Add an algorithm? * [x] Fix a bug or typo in an existing algorithm? * [ ] Documentation change? ### Checklist: * [x] I have read [CONTRIBUTING.md](https://github.com/TheAlgorithms/Python/blob/master/CONTRIBUTING.md). * [x] This pull request is all my own work -- I have not plagiarized. * [x] I know that pull requests will not be merged if they fail the automated tests. * [x] This PR only changes one algorithm file. To ease review, please open separate PRs for separate algorithms. * [x] All new Python files are placed inside an existing directory. * [x] All filenames are in all lowercase characters with no spaces or dashes. * [x] All functions and variable names follow Python naming conventions. * [x] All function parameters and return values are annotated with Python [type hints](https://docs.python.org/3/library/typing.html). * [x] All functions have [doctests](https://docs.python.org/3/library/doctest.html) that pass the automated testing. * [x] All new algorithms have a URL in its comments that points to Wikipedia or other similar explanation. * [x] If this pull request resolves one or more open issues then the commit message contains `Fixes: #{$ISSUE_NO}`.
class Graph: """ Data structure to store graphs (based on adjacency lists) """ def __init__(self): self.num_vertices = 0 self.num_edges = 0 self.adjacency = {} def add_vertex(self, vertex): """ Adds a vertex to the graph """ if vertex not in self.adjacency: self.adjacency[vertex] = {} self.num_vertices += 1 def add_edge(self, head, tail, weight): """ Adds an edge to the graph """ self.add_vertex(head) self.add_vertex(tail) if head == tail: return self.adjacency[head][tail] = weight self.adjacency[tail][head] = weight def distinct_weight(self): """ For Boruvks's algorithm the weights should be distinct Converts the weights to be distinct """ edges = self.get_edges() for edge in edges: head, tail, weight = edge edges.remove((tail, head, weight)) for i in range(len(edges)): edges[i] = list(edges[i]) edges.sort(key=lambda e: e[2]) for i in range(len(edges) - 1): if edges[i][2] >= edges[i + 1][2]: edges[i + 1][2] = edges[i][2] + 1 for edge in edges: head, tail, weight = edge self.adjacency[head][tail] = weight self.adjacency[tail][head] = weight def __str__(self): """ Returns string representation of the graph """ string = "" for tail in self.adjacency: for head in self.adjacency[tail]: weight = self.adjacency[head][tail] string += "%d -> %d == %d\n" % (head, tail, weight) return string.rstrip("\n") def get_edges(self): """ Returna all edges in the graph """ output = [] for tail in self.adjacency: for head in self.adjacency[tail]: output.append((tail, head, self.adjacency[head][tail])) return output def get_vertices(self): """ Returns all vertices in the graph """ return self.adjacency.keys() @staticmethod def build(vertices=None, edges=None): """ Builds a graph from the given set of vertices and edges """ g = Graph() if vertices is None: vertices = [] if edges is None: edge = [] for vertex in vertices: g.add_vertex(vertex) for edge in edges: g.add_edge(*edge) return g class UnionFind: """ Disjoint set Union and Find for Boruvka's algorithm """ def __init__(self): self.parent = {} self.rank = {} def __len__(self): return len(self.parent) def make_set(self, item): if item in self.parent: return self.find(item) self.parent[item] = item self.rank[item] = 0 return item def find(self, item): if item not in self.parent: return self.make_set(item) if item != self.parent[item]: self.parent[item] = self.find(self.parent[item]) return self.parent[item] def union(self, item1, item2): root1 = self.find(item1) root2 = self.find(item2) if root1 == root2: return root1 if self.rank[root1] > self.rank[root2]: self.parent[root2] = root1 return root1 if self.rank[root1] < self.rank[root2]: self.parent[root1] = root2 return root2 if self.rank[root1] == self.rank[root2]: self.rank[root1] += 1 self.parent[root2] = root1 return root1 @staticmethod def boruvka_mst(graph): """ Implementation of Boruvka's algorithm >>> g = Graph() >>> g = Graph.build([0, 1, 2, 3], [[0, 1, 1], [0, 2, 1],[2, 3, 1]]) >>> g.distinct_weight() >>> bg = Graph.boruvka_mst(g) >>> print(bg) 1 -> 0 == 1 2 -> 0 == 2 0 -> 1 == 1 0 -> 2 == 2 3 -> 2 == 3 2 -> 3 == 3 """ num_components = graph.num_vertices union_find = Graph.UnionFind() mst_edges = [] while num_components > 1: cheap_edge = {} for vertex in graph.get_vertices(): cheap_edge[vertex] = -1 edges = graph.get_edges() for edge in edges: head, tail, weight = edge edges.remove((tail, head, weight)) for edge in edges: head, tail, weight = edge set1 = union_find.find(head) set2 = union_find.find(tail) if set1 != set2: if cheap_edge[set1] == -1 or cheap_edge[set1][2] > weight: cheap_edge[set1] = [head, tail, weight] if cheap_edge[set2] == -1 or cheap_edge[set2][2] > weight: cheap_edge[set2] = [head, tail, weight] for vertex in cheap_edge: if cheap_edge[vertex] != -1: head, tail, weight = cheap_edge[vertex] if union_find.find(head) != union_find.find(tail): union_find.union(head, tail) mst_edges.append(cheap_edge[vertex]) num_components = num_components - 1 mst = Graph.build(edges=mst_edges) return mst
class Graph: """ Data structure to store graphs (based on adjacency lists) """ def __init__(self): self.num_vertices = 0 self.num_edges = 0 self.adjacency = {} def add_vertex(self, vertex): """ Adds a vertex to the graph """ if vertex not in self.adjacency: self.adjacency[vertex] = {} self.num_vertices += 1 def add_edge(self, head, tail, weight): """ Adds an edge to the graph """ self.add_vertex(head) self.add_vertex(tail) if head == tail: return self.adjacency[head][tail] = weight self.adjacency[tail][head] = weight def distinct_weight(self): """ For Boruvks's algorithm the weights should be distinct Converts the weights to be distinct """ edges = self.get_edges() for edge in edges: head, tail, weight = edge edges.remove((tail, head, weight)) for i in range(len(edges)): edges[i] = list(edges[i]) edges.sort(key=lambda e: e[2]) for i in range(len(edges) - 1): if edges[i][2] >= edges[i + 1][2]: edges[i + 1][2] = edges[i][2] + 1 for edge in edges: head, tail, weight = edge self.adjacency[head][tail] = weight self.adjacency[tail][head] = weight def __str__(self): """ Returns string representation of the graph """ string = "" for tail in self.adjacency: for head in self.adjacency[tail]: weight = self.adjacency[head][tail] string += f"{head} -> {tail} == {weight}\n" return string.rstrip("\n") def get_edges(self): """ Returna all edges in the graph """ output = [] for tail in self.adjacency: for head in self.adjacency[tail]: output.append((tail, head, self.adjacency[head][tail])) return output def get_vertices(self): """ Returns all vertices in the graph """ return self.adjacency.keys() @staticmethod def build(vertices=None, edges=None): """ Builds a graph from the given set of vertices and edges """ g = Graph() if vertices is None: vertices = [] if edges is None: edge = [] for vertex in vertices: g.add_vertex(vertex) for edge in edges: g.add_edge(*edge) return g class UnionFind: """ Disjoint set Union and Find for Boruvka's algorithm """ def __init__(self): self.parent = {} self.rank = {} def __len__(self): return len(self.parent) def make_set(self, item): if item in self.parent: return self.find(item) self.parent[item] = item self.rank[item] = 0 return item def find(self, item): if item not in self.parent: return self.make_set(item) if item != self.parent[item]: self.parent[item] = self.find(self.parent[item]) return self.parent[item] def union(self, item1, item2): root1 = self.find(item1) root2 = self.find(item2) if root1 == root2: return root1 if self.rank[root1] > self.rank[root2]: self.parent[root2] = root1 return root1 if self.rank[root1] < self.rank[root2]: self.parent[root1] = root2 return root2 if self.rank[root1] == self.rank[root2]: self.rank[root1] += 1 self.parent[root2] = root1 return root1 @staticmethod def boruvka_mst(graph): """ Implementation of Boruvka's algorithm >>> g = Graph() >>> g = Graph.build([0, 1, 2, 3], [[0, 1, 1], [0, 2, 1],[2, 3, 1]]) >>> g.distinct_weight() >>> bg = Graph.boruvka_mst(g) >>> print(bg) 1 -> 0 == 1 2 -> 0 == 2 0 -> 1 == 1 0 -> 2 == 2 3 -> 2 == 3 2 -> 3 == 3 """ num_components = graph.num_vertices union_find = Graph.UnionFind() mst_edges = [] while num_components > 1: cheap_edge = {} for vertex in graph.get_vertices(): cheap_edge[vertex] = -1 edges = graph.get_edges() for edge in edges: head, tail, weight = edge edges.remove((tail, head, weight)) for edge in edges: head, tail, weight = edge set1 = union_find.find(head) set2 = union_find.find(tail) if set1 != set2: if cheap_edge[set1] == -1 or cheap_edge[set1][2] > weight: cheap_edge[set1] = [head, tail, weight] if cheap_edge[set2] == -1 or cheap_edge[set2][2] > weight: cheap_edge[set2] = [head, tail, weight] for vertex in cheap_edge: if cheap_edge[vertex] != -1: head, tail, weight = cheap_edge[vertex] if union_find.find(head) != union_find.find(tail): union_find.union(head, tail) mst_edges.append(cheap_edge[vertex]) num_components = num_components - 1 mst = Graph.build(edges=mst_edges) return mst
1
TheAlgorithms/Python
7,339
Follow Flake8 pep3101 and remove modulo formatting
### Describe your change: Follow Flake8 pep3101 and remove modulo formatting (replace with f-strings) For example ```py "Hello %s" % name ``` goes to ```py f"Hello {name}" ``` * [ ] Add an algorithm? * [x] Fix a bug or typo in an existing algorithm? * [ ] Documentation change? ### Checklist: * [x] I have read [CONTRIBUTING.md](https://github.com/TheAlgorithms/Python/blob/master/CONTRIBUTING.md). * [x] This pull request is all my own work -- I have not plagiarized. * [x] I know that pull requests will not be merged if they fail the automated tests. * [x] This PR only changes one algorithm file. To ease review, please open separate PRs for separate algorithms. * [x] All new Python files are placed inside an existing directory. * [x] All filenames are in all lowercase characters with no spaces or dashes. * [x] All functions and variable names follow Python naming conventions. * [x] All function parameters and return values are annotated with Python [type hints](https://docs.python.org/3/library/typing.html). * [x] All functions have [doctests](https://docs.python.org/3/library/doctest.html) that pass the automated testing. * [x] All new algorithms have a URL in its comments that points to Wikipedia or other similar explanation. * [x] If this pull request resolves one or more open issues then the commit message contains `Fixes: #{$ISSUE_NO}`.
CaedenPH
2022-10-16T20:16:16Z
2022-10-16T20:50:11Z
7f6e0b656f6362e452b11d06acde50b8b81cb31a
f15cc2f01c2a4124ff6dc0843c728a546f9d9f79
Follow Flake8 pep3101 and remove modulo formatting. ### Describe your change: Follow Flake8 pep3101 and remove modulo formatting (replace with f-strings) For example ```py "Hello %s" % name ``` goes to ```py f"Hello {name}" ``` * [ ] Add an algorithm? * [x] Fix a bug or typo in an existing algorithm? * [ ] Documentation change? ### Checklist: * [x] I have read [CONTRIBUTING.md](https://github.com/TheAlgorithms/Python/blob/master/CONTRIBUTING.md). * [x] This pull request is all my own work -- I have not plagiarized. * [x] I know that pull requests will not be merged if they fail the automated tests. * [x] This PR only changes one algorithm file. To ease review, please open separate PRs for separate algorithms. * [x] All new Python files are placed inside an existing directory. * [x] All filenames are in all lowercase characters with no spaces or dashes. * [x] All functions and variable names follow Python naming conventions. * [x] All function parameters and return values are annotated with Python [type hints](https://docs.python.org/3/library/typing.html). * [x] All functions have [doctests](https://docs.python.org/3/library/doctest.html) that pass the automated testing. * [x] All new algorithms have a URL in its comments that points to Wikipedia or other similar explanation. * [x] If this pull request resolves one or more open issues then the commit message contains `Fixes: #{$ISSUE_NO}`.
""" Linear regression is the most basic type of regression commonly used for predictive analysis. The idea is pretty simple: we have a dataset and we have features associated with it. Features should be chosen very cautiously as they determine how much our model will be able to make future predictions. We try to set the weight of these features, over many iterations, so that they best fit our dataset. In this particular code, I had used a CSGO dataset (ADR vs Rating). We try to best fit a line through dataset and estimate the parameters. """ import numpy as np import requests def collect_dataset(): """Collect dataset of CSGO The dataset contains ADR vs Rating of a Player :return : dataset obtained from the link, as matrix """ response = requests.get( "https://raw.githubusercontent.com/yashLadha/" + "The_Math_of_Intelligence/master/Week1/ADRvs" + "Rating.csv" ) lines = response.text.splitlines() data = [] for item in lines: item = item.split(",") data.append(item) data.pop(0) # This is for removing the labels from the list dataset = np.matrix(data) return dataset def run_steep_gradient_descent(data_x, data_y, len_data, alpha, theta): """Run steep gradient descent and updates the Feature vector accordingly_ :param data_x : contains the dataset :param data_y : contains the output associated with each data-entry :param len_data : length of the data_ :param alpha : Learning rate of the model :param theta : Feature vector (weight's for our model) ;param return : Updated Feature's, using curr_features - alpha_ * gradient(w.r.t. feature) """ n = len_data prod = np.dot(theta, data_x.transpose()) prod -= data_y.transpose() sum_grad = np.dot(prod, data_x) theta = theta - (alpha / n) * sum_grad return theta def sum_of_square_error(data_x, data_y, len_data, theta): """Return sum of square error for error calculation :param data_x : contains our dataset :param data_y : contains the output (result vector) :param len_data : len of the dataset :param theta : contains the feature vector :return : sum of square error computed from given feature's """ prod = np.dot(theta, data_x.transpose()) prod -= data_y.transpose() sum_elem = np.sum(np.square(prod)) error = sum_elem / (2 * len_data) return error def run_linear_regression(data_x, data_y): """Implement Linear regression over the dataset :param data_x : contains our dataset :param data_y : contains the output (result vector) :return : feature for line of best fit (Feature vector) """ iterations = 100000 alpha = 0.0001550 no_features = data_x.shape[1] len_data = data_x.shape[0] - 1 theta = np.zeros((1, no_features)) for i in range(0, iterations): theta = run_steep_gradient_descent(data_x, data_y, len_data, alpha, theta) error = sum_of_square_error(data_x, data_y, len_data, theta) print("At Iteration %d - Error is %.5f " % (i + 1, error)) return theta def main(): """Driver function""" data = collect_dataset() len_data = data.shape[0] data_x = np.c_[np.ones(len_data), data[:, :-1]].astype(float) data_y = data[:, -1].astype(float) theta = run_linear_regression(data_x, data_y) len_result = theta.shape[1] print("Resultant Feature vector : ") for i in range(0, len_result): print(f"{theta[0, i]:.5f}") if __name__ == "__main__": main()
""" Linear regression is the most basic type of regression commonly used for predictive analysis. The idea is pretty simple: we have a dataset and we have features associated with it. Features should be chosen very cautiously as they determine how much our model will be able to make future predictions. We try to set the weight of these features, over many iterations, so that they best fit our dataset. In this particular code, I had used a CSGO dataset (ADR vs Rating). We try to best fit a line through dataset and estimate the parameters. """ import numpy as np import requests def collect_dataset(): """Collect dataset of CSGO The dataset contains ADR vs Rating of a Player :return : dataset obtained from the link, as matrix """ response = requests.get( "https://raw.githubusercontent.com/yashLadha/" + "The_Math_of_Intelligence/master/Week1/ADRvs" + "Rating.csv" ) lines = response.text.splitlines() data = [] for item in lines: item = item.split(",") data.append(item) data.pop(0) # This is for removing the labels from the list dataset = np.matrix(data) return dataset def run_steep_gradient_descent(data_x, data_y, len_data, alpha, theta): """Run steep gradient descent and updates the Feature vector accordingly_ :param data_x : contains the dataset :param data_y : contains the output associated with each data-entry :param len_data : length of the data_ :param alpha : Learning rate of the model :param theta : Feature vector (weight's for our model) ;param return : Updated Feature's, using curr_features - alpha_ * gradient(w.r.t. feature) """ n = len_data prod = np.dot(theta, data_x.transpose()) prod -= data_y.transpose() sum_grad = np.dot(prod, data_x) theta = theta - (alpha / n) * sum_grad return theta def sum_of_square_error(data_x, data_y, len_data, theta): """Return sum of square error for error calculation :param data_x : contains our dataset :param data_y : contains the output (result vector) :param len_data : len of the dataset :param theta : contains the feature vector :return : sum of square error computed from given feature's """ prod = np.dot(theta, data_x.transpose()) prod -= data_y.transpose() sum_elem = np.sum(np.square(prod)) error = sum_elem / (2 * len_data) return error def run_linear_regression(data_x, data_y): """Implement Linear regression over the dataset :param data_x : contains our dataset :param data_y : contains the output (result vector) :return : feature for line of best fit (Feature vector) """ iterations = 100000 alpha = 0.0001550 no_features = data_x.shape[1] len_data = data_x.shape[0] - 1 theta = np.zeros((1, no_features)) for i in range(0, iterations): theta = run_steep_gradient_descent(data_x, data_y, len_data, alpha, theta) error = sum_of_square_error(data_x, data_y, len_data, theta) print(f"At Iteration {i + 1} - Error is {error:.5f}") return theta def main(): """Driver function""" data = collect_dataset() len_data = data.shape[0] data_x = np.c_[np.ones(len_data), data[:, :-1]].astype(float) data_y = data[:, -1].astype(float) theta = run_linear_regression(data_x, data_y) len_result = theta.shape[1] print("Resultant Feature vector : ") for i in range(0, len_result): print(f"{theta[0, i]:.5f}") if __name__ == "__main__": main()
1
TheAlgorithms/Python
7,339
Follow Flake8 pep3101 and remove modulo formatting
### Describe your change: Follow Flake8 pep3101 and remove modulo formatting (replace with f-strings) For example ```py "Hello %s" % name ``` goes to ```py f"Hello {name}" ``` * [ ] Add an algorithm? * [x] Fix a bug or typo in an existing algorithm? * [ ] Documentation change? ### Checklist: * [x] I have read [CONTRIBUTING.md](https://github.com/TheAlgorithms/Python/blob/master/CONTRIBUTING.md). * [x] This pull request is all my own work -- I have not plagiarized. * [x] I know that pull requests will not be merged if they fail the automated tests. * [x] This PR only changes one algorithm file. To ease review, please open separate PRs for separate algorithms. * [x] All new Python files are placed inside an existing directory. * [x] All filenames are in all lowercase characters with no spaces or dashes. * [x] All functions and variable names follow Python naming conventions. * [x] All function parameters and return values are annotated with Python [type hints](https://docs.python.org/3/library/typing.html). * [x] All functions have [doctests](https://docs.python.org/3/library/doctest.html) that pass the automated testing. * [x] All new algorithms have a URL in its comments that points to Wikipedia or other similar explanation. * [x] If this pull request resolves one or more open issues then the commit message contains `Fixes: #{$ISSUE_NO}`.
CaedenPH
2022-10-16T20:16:16Z
2022-10-16T20:50:11Z
7f6e0b656f6362e452b11d06acde50b8b81cb31a
f15cc2f01c2a4124ff6dc0843c728a546f9d9f79
Follow Flake8 pep3101 and remove modulo formatting. ### Describe your change: Follow Flake8 pep3101 and remove modulo formatting (replace with f-strings) For example ```py "Hello %s" % name ``` goes to ```py f"Hello {name}" ``` * [ ] Add an algorithm? * [x] Fix a bug or typo in an existing algorithm? * [ ] Documentation change? ### Checklist: * [x] I have read [CONTRIBUTING.md](https://github.com/TheAlgorithms/Python/blob/master/CONTRIBUTING.md). * [x] This pull request is all my own work -- I have not plagiarized. * [x] I know that pull requests will not be merged if they fail the automated tests. * [x] This PR only changes one algorithm file. To ease review, please open separate PRs for separate algorithms. * [x] All new Python files are placed inside an existing directory. * [x] All filenames are in all lowercase characters with no spaces or dashes. * [x] All functions and variable names follow Python naming conventions. * [x] All function parameters and return values are annotated with Python [type hints](https://docs.python.org/3/library/typing.html). * [x] All functions have [doctests](https://docs.python.org/3/library/doctest.html) that pass the automated testing. * [x] All new algorithms have a URL in its comments that points to Wikipedia or other similar explanation. * [x] If this pull request resolves one or more open issues then the commit message contains `Fixes: #{$ISSUE_NO}`.
from __future__ import annotations from typing import Any class Matrix: """ <class Matrix> Matrix structure. """ def __init__(self, row: int, column: int, default_value: float = 0) -> None: """ <method Matrix.__init__> Initialize matrix with given size and default value. Example: >>> a = Matrix(2, 3, 1) >>> a Matrix consist of 2 rows and 3 columns [1, 1, 1] [1, 1, 1] """ self.row, self.column = row, column self.array = [[default_value for c in range(column)] for r in range(row)] def __str__(self) -> str: """ <method Matrix.__str__> Return string representation of this matrix. """ # Prefix s = "Matrix consist of %d rows and %d columns\n" % (self.row, self.column) # Make string identifier max_element_length = 0 for row_vector in self.array: for obj in row_vector: max_element_length = max(max_element_length, len(str(obj))) string_format_identifier = "%%%ds" % (max_element_length,) # Make string and return def single_line(row_vector: list[float]) -> str: nonlocal string_format_identifier line = "[" line += ", ".join(string_format_identifier % (obj,) for obj in row_vector) line += "]" return line s += "\n".join(single_line(row_vector) for row_vector in self.array) return s def __repr__(self) -> str: return str(self) def validate_indicies(self, loc: tuple[int, int]) -> bool: """ <method Matrix.validate_indicies> Check if given indices are valid to pick element from matrix. Example: >>> a = Matrix(2, 6, 0) >>> a.validate_indicies((2, 7)) False >>> a.validate_indicies((0, 0)) True """ if not (isinstance(loc, (list, tuple)) and len(loc) == 2): return False elif not (0 <= loc[0] < self.row and 0 <= loc[1] < self.column): return False else: return True def __getitem__(self, loc: tuple[int, int]) -> Any: """ <method Matrix.__getitem__> Return array[row][column] where loc = (row, column). Example: >>> a = Matrix(3, 2, 7) >>> a[1, 0] 7 """ assert self.validate_indicies(loc) return self.array[loc[0]][loc[1]] def __setitem__(self, loc: tuple[int, int], value: float) -> None: """ <method Matrix.__setitem__> Set array[row][column] = value where loc = (row, column). Example: >>> a = Matrix(2, 3, 1) >>> a[1, 2] = 51 >>> a Matrix consist of 2 rows and 3 columns [ 1, 1, 1] [ 1, 1, 51] """ assert self.validate_indicies(loc) self.array[loc[0]][loc[1]] = value def __add__(self, another: Matrix) -> Matrix: """ <method Matrix.__add__> Return self + another. Example: >>> a = Matrix(2, 1, -4) >>> b = Matrix(2, 1, 3) >>> a+b Matrix consist of 2 rows and 1 columns [-1] [-1] """ # Validation assert isinstance(another, Matrix) assert self.row == another.row and self.column == another.column # Add result = Matrix(self.row, self.column) for r in range(self.row): for c in range(self.column): result[r, c] = self[r, c] + another[r, c] return result def __neg__(self) -> Matrix: """ <method Matrix.__neg__> Return -self. Example: >>> a = Matrix(2, 2, 3) >>> a[0, 1] = a[1, 0] = -2 >>> -a Matrix consist of 2 rows and 2 columns [-3, 2] [ 2, -3] """ result = Matrix(self.row, self.column) for r in range(self.row): for c in range(self.column): result[r, c] = -self[r, c] return result def __sub__(self, another: Matrix) -> Matrix: return self + (-another) def __mul__(self, another: int | float | Matrix) -> Matrix: """ <method Matrix.__mul__> Return self * another. Example: >>> a = Matrix(2, 3, 1) >>> a[0,2] = a[1,2] = 3 >>> a * -2 Matrix consist of 2 rows and 3 columns [-2, -2, -6] [-2, -2, -6] """ if isinstance(another, (int, float)): # Scalar multiplication result = Matrix(self.row, self.column) for r in range(self.row): for c in range(self.column): result[r, c] = self[r, c] * another return result elif isinstance(another, Matrix): # Matrix multiplication assert self.column == another.row result = Matrix(self.row, another.column) for r in range(self.row): for c in range(another.column): for i in range(self.column): result[r, c] += self[r, i] * another[i, c] return result else: raise TypeError(f"Unsupported type given for another ({type(another)})") def transpose(self) -> Matrix: """ <method Matrix.transpose> Return self^T. Example: >>> a = Matrix(2, 3) >>> for r in range(2): ... for c in range(3): ... a[r,c] = r*c ... >>> a.transpose() Matrix consist of 3 rows and 2 columns [0, 0] [0, 1] [0, 2] """ result = Matrix(self.column, self.row) for r in range(self.row): for c in range(self.column): result[c, r] = self[r, c] return result def sherman_morrison(self, u: Matrix, v: Matrix) -> Any: """ <method Matrix.sherman_morrison> Apply Sherman-Morrison formula in O(n^2). To learn this formula, please look this: https://en.wikipedia.org/wiki/Sherman%E2%80%93Morrison_formula This method returns (A + uv^T)^(-1) where A^(-1) is self. Returns None if it's impossible to calculate. Warning: This method doesn't check if self is invertible. Make sure self is invertible before execute this method. Example: >>> ainv = Matrix(3, 3, 0) >>> for i in range(3): ainv[i,i] = 1 ... >>> u = Matrix(3, 1, 0) >>> u[0,0], u[1,0], u[2,0] = 1, 2, -3 >>> v = Matrix(3, 1, 0) >>> v[0,0], v[1,0], v[2,0] = 4, -2, 5 >>> ainv.sherman_morrison(u, v) Matrix consist of 3 rows and 3 columns [ 1.2857142857142856, -0.14285714285714285, 0.3571428571428571] [ 0.5714285714285714, 0.7142857142857143, 0.7142857142857142] [ -0.8571428571428571, 0.42857142857142855, -0.0714285714285714] """ # Size validation assert isinstance(u, Matrix) and isinstance(v, Matrix) assert self.row == self.column == u.row == v.row # u, v should be column vector assert u.column == v.column == 1 # u, v should be column vector # Calculate v_t = v.transpose() numerator_factor = (v_t * self * u)[0, 0] + 1 if numerator_factor == 0: return None # It's not invertable return self - ((self * u) * (v_t * self) * (1.0 / numerator_factor)) # Testing if __name__ == "__main__": def test1() -> None: # a^(-1) ainv = Matrix(3, 3, 0) for i in range(3): ainv[i, i] = 1 print(f"a^(-1) is {ainv}") # u, v u = Matrix(3, 1, 0) u[0, 0], u[1, 0], u[2, 0] = 1, 2, -3 v = Matrix(3, 1, 0) v[0, 0], v[1, 0], v[2, 0] = 4, -2, 5 print(f"u is {u}") print(f"v is {v}") print("uv^T is %s" % (u * v.transpose())) # Sherman Morrison print(f"(a + uv^T)^(-1) is {ainv.sherman_morrison(u, v)}") def test2() -> None: import doctest doctest.testmod() test2()
from __future__ import annotations from typing import Any class Matrix: """ <class Matrix> Matrix structure. """ def __init__(self, row: int, column: int, default_value: float = 0) -> None: """ <method Matrix.__init__> Initialize matrix with given size and default value. Example: >>> a = Matrix(2, 3, 1) >>> a Matrix consist of 2 rows and 3 columns [1, 1, 1] [1, 1, 1] """ self.row, self.column = row, column self.array = [[default_value for c in range(column)] for r in range(row)] def __str__(self) -> str: """ <method Matrix.__str__> Return string representation of this matrix. """ # Prefix s = f"Matrix consist of {self.row} rows and {self.column} columns\n" # Make string identifier max_element_length = 0 for row_vector in self.array: for obj in row_vector: max_element_length = max(max_element_length, len(str(obj))) string_format_identifier = f"%{max_element_length}s" # Make string and return def single_line(row_vector: list[float]) -> str: nonlocal string_format_identifier line = "[" line += ", ".join(string_format_identifier % (obj,) for obj in row_vector) line += "]" return line s += "\n".join(single_line(row_vector) for row_vector in self.array) return s def __repr__(self) -> str: return str(self) def validate_indicies(self, loc: tuple[int, int]) -> bool: """ <method Matrix.validate_indicies> Check if given indices are valid to pick element from matrix. Example: >>> a = Matrix(2, 6, 0) >>> a.validate_indicies((2, 7)) False >>> a.validate_indicies((0, 0)) True """ if not (isinstance(loc, (list, tuple)) and len(loc) == 2): return False elif not (0 <= loc[0] < self.row and 0 <= loc[1] < self.column): return False else: return True def __getitem__(self, loc: tuple[int, int]) -> Any: """ <method Matrix.__getitem__> Return array[row][column] where loc = (row, column). Example: >>> a = Matrix(3, 2, 7) >>> a[1, 0] 7 """ assert self.validate_indicies(loc) return self.array[loc[0]][loc[1]] def __setitem__(self, loc: tuple[int, int], value: float) -> None: """ <method Matrix.__setitem__> Set array[row][column] = value where loc = (row, column). Example: >>> a = Matrix(2, 3, 1) >>> a[1, 2] = 51 >>> a Matrix consist of 2 rows and 3 columns [ 1, 1, 1] [ 1, 1, 51] """ assert self.validate_indicies(loc) self.array[loc[0]][loc[1]] = value def __add__(self, another: Matrix) -> Matrix: """ <method Matrix.__add__> Return self + another. Example: >>> a = Matrix(2, 1, -4) >>> b = Matrix(2, 1, 3) >>> a+b Matrix consist of 2 rows and 1 columns [-1] [-1] """ # Validation assert isinstance(another, Matrix) assert self.row == another.row and self.column == another.column # Add result = Matrix(self.row, self.column) for r in range(self.row): for c in range(self.column): result[r, c] = self[r, c] + another[r, c] return result def __neg__(self) -> Matrix: """ <method Matrix.__neg__> Return -self. Example: >>> a = Matrix(2, 2, 3) >>> a[0, 1] = a[1, 0] = -2 >>> -a Matrix consist of 2 rows and 2 columns [-3, 2] [ 2, -3] """ result = Matrix(self.row, self.column) for r in range(self.row): for c in range(self.column): result[r, c] = -self[r, c] return result def __sub__(self, another: Matrix) -> Matrix: return self + (-another) def __mul__(self, another: int | float | Matrix) -> Matrix: """ <method Matrix.__mul__> Return self * another. Example: >>> a = Matrix(2, 3, 1) >>> a[0,2] = a[1,2] = 3 >>> a * -2 Matrix consist of 2 rows and 3 columns [-2, -2, -6] [-2, -2, -6] """ if isinstance(another, (int, float)): # Scalar multiplication result = Matrix(self.row, self.column) for r in range(self.row): for c in range(self.column): result[r, c] = self[r, c] * another return result elif isinstance(another, Matrix): # Matrix multiplication assert self.column == another.row result = Matrix(self.row, another.column) for r in range(self.row): for c in range(another.column): for i in range(self.column): result[r, c] += self[r, i] * another[i, c] return result else: raise TypeError(f"Unsupported type given for another ({type(another)})") def transpose(self) -> Matrix: """ <method Matrix.transpose> Return self^T. Example: >>> a = Matrix(2, 3) >>> for r in range(2): ... for c in range(3): ... a[r,c] = r*c ... >>> a.transpose() Matrix consist of 3 rows and 2 columns [0, 0] [0, 1] [0, 2] """ result = Matrix(self.column, self.row) for r in range(self.row): for c in range(self.column): result[c, r] = self[r, c] return result def sherman_morrison(self, u: Matrix, v: Matrix) -> Any: """ <method Matrix.sherman_morrison> Apply Sherman-Morrison formula in O(n^2). To learn this formula, please look this: https://en.wikipedia.org/wiki/Sherman%E2%80%93Morrison_formula This method returns (A + uv^T)^(-1) where A^(-1) is self. Returns None if it's impossible to calculate. Warning: This method doesn't check if self is invertible. Make sure self is invertible before execute this method. Example: >>> ainv = Matrix(3, 3, 0) >>> for i in range(3): ainv[i,i] = 1 ... >>> u = Matrix(3, 1, 0) >>> u[0,0], u[1,0], u[2,0] = 1, 2, -3 >>> v = Matrix(3, 1, 0) >>> v[0,0], v[1,0], v[2,0] = 4, -2, 5 >>> ainv.sherman_morrison(u, v) Matrix consist of 3 rows and 3 columns [ 1.2857142857142856, -0.14285714285714285, 0.3571428571428571] [ 0.5714285714285714, 0.7142857142857143, 0.7142857142857142] [ -0.8571428571428571, 0.42857142857142855, -0.0714285714285714] """ # Size validation assert isinstance(u, Matrix) and isinstance(v, Matrix) assert self.row == self.column == u.row == v.row # u, v should be column vector assert u.column == v.column == 1 # u, v should be column vector # Calculate v_t = v.transpose() numerator_factor = (v_t * self * u)[0, 0] + 1 if numerator_factor == 0: return None # It's not invertable return self - ((self * u) * (v_t * self) * (1.0 / numerator_factor)) # Testing if __name__ == "__main__": def test1() -> None: # a^(-1) ainv = Matrix(3, 3, 0) for i in range(3): ainv[i, i] = 1 print(f"a^(-1) is {ainv}") # u, v u = Matrix(3, 1, 0) u[0, 0], u[1, 0], u[2, 0] = 1, 2, -3 v = Matrix(3, 1, 0) v[0, 0], v[1, 0], v[2, 0] = 4, -2, 5 print(f"u is {u}") print(f"v is {v}") print(f"uv^T is {u * v.transpose()}") # Sherman Morrison print(f"(a + uv^T)^(-1) is {ainv.sherman_morrison(u, v)}") def test2() -> None: import doctest doctest.testmod() test2()
1
TheAlgorithms/Python
7,339
Follow Flake8 pep3101 and remove modulo formatting
### Describe your change: Follow Flake8 pep3101 and remove modulo formatting (replace with f-strings) For example ```py "Hello %s" % name ``` goes to ```py f"Hello {name}" ``` * [ ] Add an algorithm? * [x] Fix a bug or typo in an existing algorithm? * [ ] Documentation change? ### Checklist: * [x] I have read [CONTRIBUTING.md](https://github.com/TheAlgorithms/Python/blob/master/CONTRIBUTING.md). * [x] This pull request is all my own work -- I have not plagiarized. * [x] I know that pull requests will not be merged if they fail the automated tests. * [x] This PR only changes one algorithm file. To ease review, please open separate PRs for separate algorithms. * [x] All new Python files are placed inside an existing directory. * [x] All filenames are in all lowercase characters with no spaces or dashes. * [x] All functions and variable names follow Python naming conventions. * [x] All function parameters and return values are annotated with Python [type hints](https://docs.python.org/3/library/typing.html). * [x] All functions have [doctests](https://docs.python.org/3/library/doctest.html) that pass the automated testing. * [x] All new algorithms have a URL in its comments that points to Wikipedia or other similar explanation. * [x] If this pull request resolves one or more open issues then the commit message contains `Fixes: #{$ISSUE_NO}`.
CaedenPH
2022-10-16T20:16:16Z
2022-10-16T20:50:11Z
7f6e0b656f6362e452b11d06acde50b8b81cb31a
f15cc2f01c2a4124ff6dc0843c728a546f9d9f79
Follow Flake8 pep3101 and remove modulo formatting. ### Describe your change: Follow Flake8 pep3101 and remove modulo formatting (replace with f-strings) For example ```py "Hello %s" % name ``` goes to ```py f"Hello {name}" ``` * [ ] Add an algorithm? * [x] Fix a bug or typo in an existing algorithm? * [ ] Documentation change? ### Checklist: * [x] I have read [CONTRIBUTING.md](https://github.com/TheAlgorithms/Python/blob/master/CONTRIBUTING.md). * [x] This pull request is all my own work -- I have not plagiarized. * [x] I know that pull requests will not be merged if they fail the automated tests. * [x] This PR only changes one algorithm file. To ease review, please open separate PRs for separate algorithms. * [x] All new Python files are placed inside an existing directory. * [x] All filenames are in all lowercase characters with no spaces or dashes. * [x] All functions and variable names follow Python naming conventions. * [x] All function parameters and return values are annotated with Python [type hints](https://docs.python.org/3/library/typing.html). * [x] All functions have [doctests](https://docs.python.org/3/library/doctest.html) that pass the automated testing. * [x] All new algorithms have a URL in its comments that points to Wikipedia or other similar explanation. * [x] If this pull request resolves one or more open issues then the commit message contains `Fixes: #{$ISSUE_NO}`.
#!/usr/bin/python """ A Framework of Back Propagation Neural Network(BP) model Easy to use: * add many layers as you want !!! * clearly see how the loss decreasing Easy to expand: * more activation functions * more loss functions * more optimization method Author: Stephen Lee Github : https://github.com/RiptideBo Date: 2017.11.23 """ import numpy as np from matplotlib import pyplot as plt def sigmoid(x): return 1 / (1 + np.exp(-1 * x)) class DenseLayer: """ Layers of BP neural network """ def __init__( self, units, activation=None, learning_rate=None, is_input_layer=False ): """ common connected layer of bp network :param units: numbers of neural units :param activation: activation function :param learning_rate: learning rate for paras :param is_input_layer: whether it is input layer or not """ self.units = units self.weight = None self.bias = None self.activation = activation if learning_rate is None: learning_rate = 0.3 self.learn_rate = learning_rate self.is_input_layer = is_input_layer def initializer(self, back_units): self.weight = np.asmatrix(np.random.normal(0, 0.5, (self.units, back_units))) self.bias = np.asmatrix(np.random.normal(0, 0.5, self.units)).T if self.activation is None: self.activation = sigmoid def cal_gradient(self): # activation function may be sigmoid or linear if self.activation == sigmoid: gradient_mat = np.dot(self.output, (1 - self.output).T) gradient_activation = np.diag(np.diag(gradient_mat)) else: gradient_activation = 1 return gradient_activation def forward_propagation(self, xdata): self.xdata = xdata if self.is_input_layer: # input layer self.wx_plus_b = xdata self.output = xdata return xdata else: self.wx_plus_b = np.dot(self.weight, self.xdata) - self.bias self.output = self.activation(self.wx_plus_b) return self.output def back_propagation(self, gradient): gradient_activation = self.cal_gradient() # i * i 维 gradient = np.asmatrix(np.dot(gradient.T, gradient_activation)) self._gradient_weight = np.asmatrix(self.xdata) self._gradient_bias = -1 self._gradient_x = self.weight self.gradient_weight = np.dot(gradient.T, self._gradient_weight.T) self.gradient_bias = gradient * self._gradient_bias self.gradient = np.dot(gradient, self._gradient_x).T # upgrade: the Negative gradient direction self.weight = self.weight - self.learn_rate * self.gradient_weight self.bias = self.bias - self.learn_rate * self.gradient_bias.T # updates the weights and bias according to learning rate (0.3 if undefined) return self.gradient class BPNN: """ Back Propagation Neural Network model """ def __init__(self): self.layers = [] self.train_mse = [] self.fig_loss = plt.figure() self.ax_loss = self.fig_loss.add_subplot(1, 1, 1) def add_layer(self, layer): self.layers.append(layer) def build(self): for i, layer in enumerate(self.layers[:]): if i < 1: layer.is_input_layer = True else: layer.initializer(self.layers[i - 1].units) def summary(self): for i, layer in enumerate(self.layers[:]): print("------- layer %d -------" % i) print("weight.shape ", np.shape(layer.weight)) print("bias.shape ", np.shape(layer.bias)) def train(self, xdata, ydata, train_round, accuracy): self.train_round = train_round self.accuracy = accuracy self.ax_loss.hlines(self.accuracy, 0, self.train_round * 1.1) x_shape = np.shape(xdata) for _ in range(train_round): all_loss = 0 for row in range(x_shape[0]): _xdata = np.asmatrix(xdata[row, :]).T _ydata = np.asmatrix(ydata[row, :]).T # forward propagation for layer in self.layers: _xdata = layer.forward_propagation(_xdata) loss, gradient = self.cal_loss(_ydata, _xdata) all_loss = all_loss + loss # back propagation: the input_layer does not upgrade for layer in self.layers[:0:-1]: gradient = layer.back_propagation(gradient) mse = all_loss / x_shape[0] self.train_mse.append(mse) self.plot_loss() if mse < self.accuracy: print("----达到精度----") return mse def cal_loss(self, ydata, ydata_): self.loss = np.sum(np.power((ydata - ydata_), 2)) self.loss_gradient = 2 * (ydata_ - ydata) # vector (shape is the same as _ydata.shape) return self.loss, self.loss_gradient def plot_loss(self): if self.ax_loss.lines: self.ax_loss.lines.remove(self.ax_loss.lines[0]) self.ax_loss.plot(self.train_mse, "r-") plt.ion() plt.xlabel("step") plt.ylabel("loss") plt.show() plt.pause(0.1) def example(): x = np.random.randn(10, 10) y = np.asarray( [ [0.8, 0.4], [0.4, 0.3], [0.34, 0.45], [0.67, 0.32], [0.88, 0.67], [0.78, 0.77], [0.55, 0.66], [0.55, 0.43], [0.54, 0.1], [0.1, 0.5], ] ) model = BPNN() for i in (10, 20, 30, 2): model.add_layer(DenseLayer(i)) model.build() model.summary() model.train(xdata=x, ydata=y, train_round=100, accuracy=0.01) if __name__ == "__main__": example()
#!/usr/bin/python """ A Framework of Back Propagation Neural Network(BP) model Easy to use: * add many layers as you want !!! * clearly see how the loss decreasing Easy to expand: * more activation functions * more loss functions * more optimization method Author: Stephen Lee Github : https://github.com/RiptideBo Date: 2017.11.23 """ import numpy as np from matplotlib import pyplot as plt def sigmoid(x): return 1 / (1 + np.exp(-1 * x)) class DenseLayer: """ Layers of BP neural network """ def __init__( self, units, activation=None, learning_rate=None, is_input_layer=False ): """ common connected layer of bp network :param units: numbers of neural units :param activation: activation function :param learning_rate: learning rate for paras :param is_input_layer: whether it is input layer or not """ self.units = units self.weight = None self.bias = None self.activation = activation if learning_rate is None: learning_rate = 0.3 self.learn_rate = learning_rate self.is_input_layer = is_input_layer def initializer(self, back_units): self.weight = np.asmatrix(np.random.normal(0, 0.5, (self.units, back_units))) self.bias = np.asmatrix(np.random.normal(0, 0.5, self.units)).T if self.activation is None: self.activation = sigmoid def cal_gradient(self): # activation function may be sigmoid or linear if self.activation == sigmoid: gradient_mat = np.dot(self.output, (1 - self.output).T) gradient_activation = np.diag(np.diag(gradient_mat)) else: gradient_activation = 1 return gradient_activation def forward_propagation(self, xdata): self.xdata = xdata if self.is_input_layer: # input layer self.wx_plus_b = xdata self.output = xdata return xdata else: self.wx_plus_b = np.dot(self.weight, self.xdata) - self.bias self.output = self.activation(self.wx_plus_b) return self.output def back_propagation(self, gradient): gradient_activation = self.cal_gradient() # i * i 维 gradient = np.asmatrix(np.dot(gradient.T, gradient_activation)) self._gradient_weight = np.asmatrix(self.xdata) self._gradient_bias = -1 self._gradient_x = self.weight self.gradient_weight = np.dot(gradient.T, self._gradient_weight.T) self.gradient_bias = gradient * self._gradient_bias self.gradient = np.dot(gradient, self._gradient_x).T # upgrade: the Negative gradient direction self.weight = self.weight - self.learn_rate * self.gradient_weight self.bias = self.bias - self.learn_rate * self.gradient_bias.T # updates the weights and bias according to learning rate (0.3 if undefined) return self.gradient class BPNN: """ Back Propagation Neural Network model """ def __init__(self): self.layers = [] self.train_mse = [] self.fig_loss = plt.figure() self.ax_loss = self.fig_loss.add_subplot(1, 1, 1) def add_layer(self, layer): self.layers.append(layer) def build(self): for i, layer in enumerate(self.layers[:]): if i < 1: layer.is_input_layer = True else: layer.initializer(self.layers[i - 1].units) def summary(self): for i, layer in enumerate(self.layers[:]): print(f"------- layer {i} -------") print("weight.shape ", np.shape(layer.weight)) print("bias.shape ", np.shape(layer.bias)) def train(self, xdata, ydata, train_round, accuracy): self.train_round = train_round self.accuracy = accuracy self.ax_loss.hlines(self.accuracy, 0, self.train_round * 1.1) x_shape = np.shape(xdata) for _ in range(train_round): all_loss = 0 for row in range(x_shape[0]): _xdata = np.asmatrix(xdata[row, :]).T _ydata = np.asmatrix(ydata[row, :]).T # forward propagation for layer in self.layers: _xdata = layer.forward_propagation(_xdata) loss, gradient = self.cal_loss(_ydata, _xdata) all_loss = all_loss + loss # back propagation: the input_layer does not upgrade for layer in self.layers[:0:-1]: gradient = layer.back_propagation(gradient) mse = all_loss / x_shape[0] self.train_mse.append(mse) self.plot_loss() if mse < self.accuracy: print("----达到精度----") return mse def cal_loss(self, ydata, ydata_): self.loss = np.sum(np.power((ydata - ydata_), 2)) self.loss_gradient = 2 * (ydata_ - ydata) # vector (shape is the same as _ydata.shape) return self.loss, self.loss_gradient def plot_loss(self): if self.ax_loss.lines: self.ax_loss.lines.remove(self.ax_loss.lines[0]) self.ax_loss.plot(self.train_mse, "r-") plt.ion() plt.xlabel("step") plt.ylabel("loss") plt.show() plt.pause(0.1) def example(): x = np.random.randn(10, 10) y = np.asarray( [ [0.8, 0.4], [0.4, 0.3], [0.34, 0.45], [0.67, 0.32], [0.88, 0.67], [0.78, 0.77], [0.55, 0.66], [0.55, 0.43], [0.54, 0.1], [0.1, 0.5], ] ) model = BPNN() for i in (10, 20, 30, 2): model.add_layer(DenseLayer(i)) model.build() model.summary() model.train(xdata=x, ydata=y, train_round=100, accuracy=0.01) if __name__ == "__main__": example()
1
TheAlgorithms/Python
7,339
Follow Flake8 pep3101 and remove modulo formatting
### Describe your change: Follow Flake8 pep3101 and remove modulo formatting (replace with f-strings) For example ```py "Hello %s" % name ``` goes to ```py f"Hello {name}" ``` * [ ] Add an algorithm? * [x] Fix a bug or typo in an existing algorithm? * [ ] Documentation change? ### Checklist: * [x] I have read [CONTRIBUTING.md](https://github.com/TheAlgorithms/Python/blob/master/CONTRIBUTING.md). * [x] This pull request is all my own work -- I have not plagiarized. * [x] I know that pull requests will not be merged if they fail the automated tests. * [x] This PR only changes one algorithm file. To ease review, please open separate PRs for separate algorithms. * [x] All new Python files are placed inside an existing directory. * [x] All filenames are in all lowercase characters with no spaces or dashes. * [x] All functions and variable names follow Python naming conventions. * [x] All function parameters and return values are annotated with Python [type hints](https://docs.python.org/3/library/typing.html). * [x] All functions have [doctests](https://docs.python.org/3/library/doctest.html) that pass the automated testing. * [x] All new algorithms have a URL in its comments that points to Wikipedia or other similar explanation. * [x] If this pull request resolves one or more open issues then the commit message contains `Fixes: #{$ISSUE_NO}`.
CaedenPH
2022-10-16T20:16:16Z
2022-10-16T20:50:11Z
7f6e0b656f6362e452b11d06acde50b8b81cb31a
f15cc2f01c2a4124ff6dc0843c728a546f9d9f79
Follow Flake8 pep3101 and remove modulo formatting. ### Describe your change: Follow Flake8 pep3101 and remove modulo formatting (replace with f-strings) For example ```py "Hello %s" % name ``` goes to ```py f"Hello {name}" ``` * [ ] Add an algorithm? * [x] Fix a bug or typo in an existing algorithm? * [ ] Documentation change? ### Checklist: * [x] I have read [CONTRIBUTING.md](https://github.com/TheAlgorithms/Python/blob/master/CONTRIBUTING.md). * [x] This pull request is all my own work -- I have not plagiarized. * [x] I know that pull requests will not be merged if they fail the automated tests. * [x] This PR only changes one algorithm file. To ease review, please open separate PRs for separate algorithms. * [x] All new Python files are placed inside an existing directory. * [x] All filenames are in all lowercase characters with no spaces or dashes. * [x] All functions and variable names follow Python naming conventions. * [x] All function parameters and return values are annotated with Python [type hints](https://docs.python.org/3/library/typing.html). * [x] All functions have [doctests](https://docs.python.org/3/library/doctest.html) that pass the automated testing. * [x] All new algorithms have a URL in its comments that points to Wikipedia or other similar explanation. * [x] If this pull request resolves one or more open issues then the commit message contains `Fixes: #{$ISSUE_NO}`.
""" - - - - - -- - - - - - - - - - - - - - - - - - - - - - - Name - - CNN - Convolution Neural Network For Photo Recognizing Goal - - Recognize Handing Writing Word Photo Detail:Total 5 layers neural network * Convolution layer * Pooling layer * Input layer layer of BP * Hidden layer of BP * Output layer of BP Author: Stephen Lee Github: [email protected] Date: 2017.9.20 - - - - - -- - - - - - - - - - - - - - - - - - - - - - - """ import pickle import numpy as np from matplotlib import pyplot as plt class CNN: def __init__( self, conv1_get, size_p1, bp_num1, bp_num2, bp_num3, rate_w=0.2, rate_t=0.2 ): """ :param conv1_get: [a,c,d],size, number, step of convolution kernel :param size_p1: pooling size :param bp_num1: units number of flatten layer :param bp_num2: units number of hidden layer :param bp_num3: units number of output layer :param rate_w: rate of weight learning :param rate_t: rate of threshold learning """ self.num_bp1 = bp_num1 self.num_bp2 = bp_num2 self.num_bp3 = bp_num3 self.conv1 = conv1_get[:2] self.step_conv1 = conv1_get[2] self.size_pooling1 = size_p1 self.rate_weight = rate_w self.rate_thre = rate_t self.w_conv1 = [ np.mat(-1 * np.random.rand(self.conv1[0], self.conv1[0]) + 0.5) for i in range(self.conv1[1]) ] self.wkj = np.mat(-1 * np.random.rand(self.num_bp3, self.num_bp2) + 0.5) self.vji = np.mat(-1 * np.random.rand(self.num_bp2, self.num_bp1) + 0.5) self.thre_conv1 = -2 * np.random.rand(self.conv1[1]) + 1 self.thre_bp2 = -2 * np.random.rand(self.num_bp2) + 1 self.thre_bp3 = -2 * np.random.rand(self.num_bp3) + 1 def save_model(self, save_path): # save model dict with pickle model_dic = { "num_bp1": self.num_bp1, "num_bp2": self.num_bp2, "num_bp3": self.num_bp3, "conv1": self.conv1, "step_conv1": self.step_conv1, "size_pooling1": self.size_pooling1, "rate_weight": self.rate_weight, "rate_thre": self.rate_thre, "w_conv1": self.w_conv1, "wkj": self.wkj, "vji": self.vji, "thre_conv1": self.thre_conv1, "thre_bp2": self.thre_bp2, "thre_bp3": self.thre_bp3, } with open(save_path, "wb") as f: pickle.dump(model_dic, f) print(f"Model saved: {save_path}") @classmethod def read_model(cls, model_path): # read saved model with open(model_path, "rb") as f: model_dic = pickle.load(f) conv_get = model_dic.get("conv1") conv_get.append(model_dic.get("step_conv1")) size_p1 = model_dic.get("size_pooling1") bp1 = model_dic.get("num_bp1") bp2 = model_dic.get("num_bp2") bp3 = model_dic.get("num_bp3") r_w = model_dic.get("rate_weight") r_t = model_dic.get("rate_thre") # create model instance conv_ins = CNN(conv_get, size_p1, bp1, bp2, bp3, r_w, r_t) # modify model parameter conv_ins.w_conv1 = model_dic.get("w_conv1") conv_ins.wkj = model_dic.get("wkj") conv_ins.vji = model_dic.get("vji") conv_ins.thre_conv1 = model_dic.get("thre_conv1") conv_ins.thre_bp2 = model_dic.get("thre_bp2") conv_ins.thre_bp3 = model_dic.get("thre_bp3") return conv_ins def sig(self, x): return 1 / (1 + np.exp(-1 * x)) def do_round(self, x): return round(x, 3) def convolute(self, data, convs, w_convs, thre_convs, conv_step): # convolution process size_conv = convs[0] num_conv = convs[1] size_data = np.shape(data)[0] # get the data slice of original image data, data_focus data_focus = [] for i_focus in range(0, size_data - size_conv + 1, conv_step): for j_focus in range(0, size_data - size_conv + 1, conv_step): focus = data[ i_focus : i_focus + size_conv, j_focus : j_focus + size_conv ] data_focus.append(focus) # calculate the feature map of every single kernel, and saved as list of matrix data_featuremap = [] size_feature_map = int((size_data - size_conv) / conv_step + 1) for i_map in range(num_conv): featuremap = [] for i_focus in range(len(data_focus)): net_focus = ( np.sum(np.multiply(data_focus[i_focus], w_convs[i_map])) - thre_convs[i_map] ) featuremap.append(self.sig(net_focus)) featuremap = np.asmatrix(featuremap).reshape( size_feature_map, size_feature_map ) data_featuremap.append(featuremap) # expanding the data slice to One dimenssion focus1_list = [] for each_focus in data_focus: focus1_list.extend(self.Expand_Mat(each_focus)) focus_list = np.asarray(focus1_list) return focus_list, data_featuremap def pooling(self, featuremaps, size_pooling, pooling_type="average_pool"): # pooling process size_map = len(featuremaps[0]) size_pooled = int(size_map / size_pooling) featuremap_pooled = [] for i_map in range(len(featuremaps)): feature_map = featuremaps[i_map] map_pooled = [] for i_focus in range(0, size_map, size_pooling): for j_focus in range(0, size_map, size_pooling): focus = feature_map[ i_focus : i_focus + size_pooling, j_focus : j_focus + size_pooling, ] if pooling_type == "average_pool": # average pooling map_pooled.append(np.average(focus)) elif pooling_type == "max_pooling": # max pooling map_pooled.append(np.max(focus)) map_pooled = np.asmatrix(map_pooled).reshape(size_pooled, size_pooled) featuremap_pooled.append(map_pooled) return featuremap_pooled def _expand(self, data): # expanding three dimension data to one dimension list data_expanded = [] for i in range(len(data)): shapes = np.shape(data[i]) data_listed = data[i].reshape(1, shapes[0] * shapes[1]) data_listed = data_listed.getA().tolist()[0] data_expanded.extend(data_listed) data_expanded = np.asarray(data_expanded) return data_expanded def _expand_mat(self, data_mat): # expanding matrix to one dimension list data_mat = np.asarray(data_mat) shapes = np.shape(data_mat) data_expanded = data_mat.reshape(1, shapes[0] * shapes[1]) return data_expanded def _calculate_gradient_from_pool( self, out_map, pd_pool, num_map, size_map, size_pooling ): """ calculate the gradient from the data slice of pool layer pd_pool: list of matrix out_map: the shape of data slice(size_map*size_map) return: pd_all: list of matrix, [num, size_map, size_map] """ pd_all = [] i_pool = 0 for i_map in range(num_map): pd_conv1 = np.ones((size_map, size_map)) for i in range(0, size_map, size_pooling): for j in range(0, size_map, size_pooling): pd_conv1[i : i + size_pooling, j : j + size_pooling] = pd_pool[ i_pool ] i_pool = i_pool + 1 pd_conv2 = np.multiply( pd_conv1, np.multiply(out_map[i_map], (1 - out_map[i_map])) ) pd_all.append(pd_conv2) return pd_all def train( self, patterns, datas_train, datas_teach, n_repeat, error_accuracy, draw_e=bool ): # model traning print("----------------------Start Training-------------------------") print((" - - Shape: Train_Data ", np.shape(datas_train))) print((" - - Shape: Teach_Data ", np.shape(datas_teach))) rp = 0 all_mse = [] mse = 10000 while rp < n_repeat and mse >= error_accuracy: error_count = 0 print("-------------Learning Time %d--------------" % rp) for p in range(len(datas_train)): # print('------------Learning Image: %d--------------'%p) data_train = np.asmatrix(datas_train[p]) data_teach = np.asarray(datas_teach[p]) data_focus1, data_conved1 = self.convolute( data_train, self.conv1, self.w_conv1, self.thre_conv1, conv_step=self.step_conv1, ) data_pooled1 = self.pooling(data_conved1, self.size_pooling1) shape_featuremap1 = np.shape(data_conved1) """ print(' -----original shape ', np.shape(data_train)) print(' ---- after convolution ',np.shape(data_conv1)) print(' -----after pooling ',np.shape(data_pooled1)) """ data_bp_input = self._expand(data_pooled1) bp_out1 = data_bp_input bp_net_j = np.dot(bp_out1, self.vji.T) - self.thre_bp2 bp_out2 = self.sig(bp_net_j) bp_net_k = np.dot(bp_out2, self.wkj.T) - self.thre_bp3 bp_out3 = self.sig(bp_net_k) # --------------Model Leaning ------------------------ # calculate error and gradient--------------- pd_k_all = np.multiply( (data_teach - bp_out3), np.multiply(bp_out3, (1 - bp_out3)) ) pd_j_all = np.multiply( np.dot(pd_k_all, self.wkj), np.multiply(bp_out2, (1 - bp_out2)) ) pd_i_all = np.dot(pd_j_all, self.vji) pd_conv1_pooled = pd_i_all / (self.size_pooling1 * self.size_pooling1) pd_conv1_pooled = pd_conv1_pooled.T.getA().tolist() pd_conv1_all = self._calculate_gradient_from_pool( data_conved1, pd_conv1_pooled, shape_featuremap1[0], shape_featuremap1[1], self.size_pooling1, ) # weight and threshold learning process--------- # convolution layer for k_conv in range(self.conv1[1]): pd_conv_list = self._expand_mat(pd_conv1_all[k_conv]) delta_w = self.rate_weight * np.dot(pd_conv_list, data_focus1) self.w_conv1[k_conv] = self.w_conv1[k_conv] + delta_w.reshape( (self.conv1[0], self.conv1[0]) ) self.thre_conv1[k_conv] = ( self.thre_conv1[k_conv] - np.sum(pd_conv1_all[k_conv]) * self.rate_thre ) # all connected layer self.wkj = self.wkj + pd_k_all.T * bp_out2 * self.rate_weight self.vji = self.vji + pd_j_all.T * bp_out1 * self.rate_weight self.thre_bp3 = self.thre_bp3 - pd_k_all * self.rate_thre self.thre_bp2 = self.thre_bp2 - pd_j_all * self.rate_thre # calculate the sum error of all single image errors = np.sum(abs(data_teach - bp_out3)) error_count += errors # print(' ----Teach ',data_teach) # print(' ----BP_output ',bp_out3) rp = rp + 1 mse = error_count / patterns all_mse.append(mse) def draw_error(): yplot = [error_accuracy for i in range(int(n_repeat * 1.2))] plt.plot(all_mse, "+-") plt.plot(yplot, "r--") plt.xlabel("Learning Times") plt.ylabel("All_mse") plt.grid(True, alpha=0.5) plt.show() print("------------------Training Complished---------------------") print((" - - Training epoch: ", rp, f" - - Mse: {mse:.6f}")) if draw_e: draw_error() return mse def predict(self, datas_test): # model predict produce_out = [] print("-------------------Start Testing-------------------------") print((" - - Shape: Test_Data ", np.shape(datas_test))) for p in range(len(datas_test)): data_test = np.asmatrix(datas_test[p]) data_focus1, data_conved1 = self.convolute( data_test, self.conv1, self.w_conv1, self.thre_conv1, conv_step=self.step_conv1, ) data_pooled1 = self.pooling(data_conved1, self.size_pooling1) data_bp_input = self._expand(data_pooled1) bp_out1 = data_bp_input bp_net_j = bp_out1 * self.vji.T - self.thre_bp2 bp_out2 = self.sig(bp_net_j) bp_net_k = bp_out2 * self.wkj.T - self.thre_bp3 bp_out3 = self.sig(bp_net_k) produce_out.extend(bp_out3.getA().tolist()) res = [list(map(self.do_round, each)) for each in produce_out] return np.asarray(res) def convolution(self, data): # return the data of image after convoluting process so we can check it out data_test = np.asmatrix(data) data_focus1, data_conved1 = self.convolute( data_test, self.conv1, self.w_conv1, self.thre_conv1, conv_step=self.step_conv1, ) data_pooled1 = self.pooling(data_conved1, self.size_pooling1) return data_conved1, data_pooled1 if __name__ == "__main__": """ I will put the example on other file """
""" - - - - - -- - - - - - - - - - - - - - - - - - - - - - - Name - - CNN - Convolution Neural Network For Photo Recognizing Goal - - Recognize Handing Writing Word Photo Detail:Total 5 layers neural network * Convolution layer * Pooling layer * Input layer layer of BP * Hidden layer of BP * Output layer of BP Author: Stephen Lee Github: [email protected] Date: 2017.9.20 - - - - - -- - - - - - - - - - - - - - - - - - - - - - - """ import pickle import numpy as np from matplotlib import pyplot as plt class CNN: def __init__( self, conv1_get, size_p1, bp_num1, bp_num2, bp_num3, rate_w=0.2, rate_t=0.2 ): """ :param conv1_get: [a,c,d],size, number, step of convolution kernel :param size_p1: pooling size :param bp_num1: units number of flatten layer :param bp_num2: units number of hidden layer :param bp_num3: units number of output layer :param rate_w: rate of weight learning :param rate_t: rate of threshold learning """ self.num_bp1 = bp_num1 self.num_bp2 = bp_num2 self.num_bp3 = bp_num3 self.conv1 = conv1_get[:2] self.step_conv1 = conv1_get[2] self.size_pooling1 = size_p1 self.rate_weight = rate_w self.rate_thre = rate_t self.w_conv1 = [ np.mat(-1 * np.random.rand(self.conv1[0], self.conv1[0]) + 0.5) for i in range(self.conv1[1]) ] self.wkj = np.mat(-1 * np.random.rand(self.num_bp3, self.num_bp2) + 0.5) self.vji = np.mat(-1 * np.random.rand(self.num_bp2, self.num_bp1) + 0.5) self.thre_conv1 = -2 * np.random.rand(self.conv1[1]) + 1 self.thre_bp2 = -2 * np.random.rand(self.num_bp2) + 1 self.thre_bp3 = -2 * np.random.rand(self.num_bp3) + 1 def save_model(self, save_path): # save model dict with pickle model_dic = { "num_bp1": self.num_bp1, "num_bp2": self.num_bp2, "num_bp3": self.num_bp3, "conv1": self.conv1, "step_conv1": self.step_conv1, "size_pooling1": self.size_pooling1, "rate_weight": self.rate_weight, "rate_thre": self.rate_thre, "w_conv1": self.w_conv1, "wkj": self.wkj, "vji": self.vji, "thre_conv1": self.thre_conv1, "thre_bp2": self.thre_bp2, "thre_bp3": self.thre_bp3, } with open(save_path, "wb") as f: pickle.dump(model_dic, f) print(f"Model saved: {save_path}") @classmethod def read_model(cls, model_path): # read saved model with open(model_path, "rb") as f: model_dic = pickle.load(f) conv_get = model_dic.get("conv1") conv_get.append(model_dic.get("step_conv1")) size_p1 = model_dic.get("size_pooling1") bp1 = model_dic.get("num_bp1") bp2 = model_dic.get("num_bp2") bp3 = model_dic.get("num_bp3") r_w = model_dic.get("rate_weight") r_t = model_dic.get("rate_thre") # create model instance conv_ins = CNN(conv_get, size_p1, bp1, bp2, bp3, r_w, r_t) # modify model parameter conv_ins.w_conv1 = model_dic.get("w_conv1") conv_ins.wkj = model_dic.get("wkj") conv_ins.vji = model_dic.get("vji") conv_ins.thre_conv1 = model_dic.get("thre_conv1") conv_ins.thre_bp2 = model_dic.get("thre_bp2") conv_ins.thre_bp3 = model_dic.get("thre_bp3") return conv_ins def sig(self, x): return 1 / (1 + np.exp(-1 * x)) def do_round(self, x): return round(x, 3) def convolute(self, data, convs, w_convs, thre_convs, conv_step): # convolution process size_conv = convs[0] num_conv = convs[1] size_data = np.shape(data)[0] # get the data slice of original image data, data_focus data_focus = [] for i_focus in range(0, size_data - size_conv + 1, conv_step): for j_focus in range(0, size_data - size_conv + 1, conv_step): focus = data[ i_focus : i_focus + size_conv, j_focus : j_focus + size_conv ] data_focus.append(focus) # calculate the feature map of every single kernel, and saved as list of matrix data_featuremap = [] size_feature_map = int((size_data - size_conv) / conv_step + 1) for i_map in range(num_conv): featuremap = [] for i_focus in range(len(data_focus)): net_focus = ( np.sum(np.multiply(data_focus[i_focus], w_convs[i_map])) - thre_convs[i_map] ) featuremap.append(self.sig(net_focus)) featuremap = np.asmatrix(featuremap).reshape( size_feature_map, size_feature_map ) data_featuremap.append(featuremap) # expanding the data slice to One dimenssion focus1_list = [] for each_focus in data_focus: focus1_list.extend(self.Expand_Mat(each_focus)) focus_list = np.asarray(focus1_list) return focus_list, data_featuremap def pooling(self, featuremaps, size_pooling, pooling_type="average_pool"): # pooling process size_map = len(featuremaps[0]) size_pooled = int(size_map / size_pooling) featuremap_pooled = [] for i_map in range(len(featuremaps)): feature_map = featuremaps[i_map] map_pooled = [] for i_focus in range(0, size_map, size_pooling): for j_focus in range(0, size_map, size_pooling): focus = feature_map[ i_focus : i_focus + size_pooling, j_focus : j_focus + size_pooling, ] if pooling_type == "average_pool": # average pooling map_pooled.append(np.average(focus)) elif pooling_type == "max_pooling": # max pooling map_pooled.append(np.max(focus)) map_pooled = np.asmatrix(map_pooled).reshape(size_pooled, size_pooled) featuremap_pooled.append(map_pooled) return featuremap_pooled def _expand(self, data): # expanding three dimension data to one dimension list data_expanded = [] for i in range(len(data)): shapes = np.shape(data[i]) data_listed = data[i].reshape(1, shapes[0] * shapes[1]) data_listed = data_listed.getA().tolist()[0] data_expanded.extend(data_listed) data_expanded = np.asarray(data_expanded) return data_expanded def _expand_mat(self, data_mat): # expanding matrix to one dimension list data_mat = np.asarray(data_mat) shapes = np.shape(data_mat) data_expanded = data_mat.reshape(1, shapes[0] * shapes[1]) return data_expanded def _calculate_gradient_from_pool( self, out_map, pd_pool, num_map, size_map, size_pooling ): """ calculate the gradient from the data slice of pool layer pd_pool: list of matrix out_map: the shape of data slice(size_map*size_map) return: pd_all: list of matrix, [num, size_map, size_map] """ pd_all = [] i_pool = 0 for i_map in range(num_map): pd_conv1 = np.ones((size_map, size_map)) for i in range(0, size_map, size_pooling): for j in range(0, size_map, size_pooling): pd_conv1[i : i + size_pooling, j : j + size_pooling] = pd_pool[ i_pool ] i_pool = i_pool + 1 pd_conv2 = np.multiply( pd_conv1, np.multiply(out_map[i_map], (1 - out_map[i_map])) ) pd_all.append(pd_conv2) return pd_all def train( self, patterns, datas_train, datas_teach, n_repeat, error_accuracy, draw_e=bool ): # model traning print("----------------------Start Training-------------------------") print((" - - Shape: Train_Data ", np.shape(datas_train))) print((" - - Shape: Teach_Data ", np.shape(datas_teach))) rp = 0 all_mse = [] mse = 10000 while rp < n_repeat and mse >= error_accuracy: error_count = 0 print(f"-------------Learning Time {rp}--------------") for p in range(len(datas_train)): # print('------------Learning Image: %d--------------'%p) data_train = np.asmatrix(datas_train[p]) data_teach = np.asarray(datas_teach[p]) data_focus1, data_conved1 = self.convolute( data_train, self.conv1, self.w_conv1, self.thre_conv1, conv_step=self.step_conv1, ) data_pooled1 = self.pooling(data_conved1, self.size_pooling1) shape_featuremap1 = np.shape(data_conved1) """ print(' -----original shape ', np.shape(data_train)) print(' ---- after convolution ',np.shape(data_conv1)) print(' -----after pooling ',np.shape(data_pooled1)) """ data_bp_input = self._expand(data_pooled1) bp_out1 = data_bp_input bp_net_j = np.dot(bp_out1, self.vji.T) - self.thre_bp2 bp_out2 = self.sig(bp_net_j) bp_net_k = np.dot(bp_out2, self.wkj.T) - self.thre_bp3 bp_out3 = self.sig(bp_net_k) # --------------Model Leaning ------------------------ # calculate error and gradient--------------- pd_k_all = np.multiply( (data_teach - bp_out3), np.multiply(bp_out3, (1 - bp_out3)) ) pd_j_all = np.multiply( np.dot(pd_k_all, self.wkj), np.multiply(bp_out2, (1 - bp_out2)) ) pd_i_all = np.dot(pd_j_all, self.vji) pd_conv1_pooled = pd_i_all / (self.size_pooling1 * self.size_pooling1) pd_conv1_pooled = pd_conv1_pooled.T.getA().tolist() pd_conv1_all = self._calculate_gradient_from_pool( data_conved1, pd_conv1_pooled, shape_featuremap1[0], shape_featuremap1[1], self.size_pooling1, ) # weight and threshold learning process--------- # convolution layer for k_conv in range(self.conv1[1]): pd_conv_list = self._expand_mat(pd_conv1_all[k_conv]) delta_w = self.rate_weight * np.dot(pd_conv_list, data_focus1) self.w_conv1[k_conv] = self.w_conv1[k_conv] + delta_w.reshape( (self.conv1[0], self.conv1[0]) ) self.thre_conv1[k_conv] = ( self.thre_conv1[k_conv] - np.sum(pd_conv1_all[k_conv]) * self.rate_thre ) # all connected layer self.wkj = self.wkj + pd_k_all.T * bp_out2 * self.rate_weight self.vji = self.vji + pd_j_all.T * bp_out1 * self.rate_weight self.thre_bp3 = self.thre_bp3 - pd_k_all * self.rate_thre self.thre_bp2 = self.thre_bp2 - pd_j_all * self.rate_thre # calculate the sum error of all single image errors = np.sum(abs(data_teach - bp_out3)) error_count += errors # print(' ----Teach ',data_teach) # print(' ----BP_output ',bp_out3) rp = rp + 1 mse = error_count / patterns all_mse.append(mse) def draw_error(): yplot = [error_accuracy for i in range(int(n_repeat * 1.2))] plt.plot(all_mse, "+-") plt.plot(yplot, "r--") plt.xlabel("Learning Times") plt.ylabel("All_mse") plt.grid(True, alpha=0.5) plt.show() print("------------------Training Complished---------------------") print((" - - Training epoch: ", rp, f" - - Mse: {mse:.6f}")) if draw_e: draw_error() return mse def predict(self, datas_test): # model predict produce_out = [] print("-------------------Start Testing-------------------------") print((" - - Shape: Test_Data ", np.shape(datas_test))) for p in range(len(datas_test)): data_test = np.asmatrix(datas_test[p]) data_focus1, data_conved1 = self.convolute( data_test, self.conv1, self.w_conv1, self.thre_conv1, conv_step=self.step_conv1, ) data_pooled1 = self.pooling(data_conved1, self.size_pooling1) data_bp_input = self._expand(data_pooled1) bp_out1 = data_bp_input bp_net_j = bp_out1 * self.vji.T - self.thre_bp2 bp_out2 = self.sig(bp_net_j) bp_net_k = bp_out2 * self.wkj.T - self.thre_bp3 bp_out3 = self.sig(bp_net_k) produce_out.extend(bp_out3.getA().tolist()) res = [list(map(self.do_round, each)) for each in produce_out] return np.asarray(res) def convolution(self, data): # return the data of image after convoluting process so we can check it out data_test = np.asmatrix(data) data_focus1, data_conved1 = self.convolute( data_test, self.conv1, self.w_conv1, self.thre_conv1, conv_step=self.step_conv1, ) data_pooled1 = self.pooling(data_conved1, self.size_pooling1) return data_conved1, data_pooled1 if __name__ == "__main__": """ I will put the example on other file """
1
TheAlgorithms/Python
7,339
Follow Flake8 pep3101 and remove modulo formatting
### Describe your change: Follow Flake8 pep3101 and remove modulo formatting (replace with f-strings) For example ```py "Hello %s" % name ``` goes to ```py f"Hello {name}" ``` * [ ] Add an algorithm? * [x] Fix a bug or typo in an existing algorithm? * [ ] Documentation change? ### Checklist: * [x] I have read [CONTRIBUTING.md](https://github.com/TheAlgorithms/Python/blob/master/CONTRIBUTING.md). * [x] This pull request is all my own work -- I have not plagiarized. * [x] I know that pull requests will not be merged if they fail the automated tests. * [x] This PR only changes one algorithm file. To ease review, please open separate PRs for separate algorithms. * [x] All new Python files are placed inside an existing directory. * [x] All filenames are in all lowercase characters with no spaces or dashes. * [x] All functions and variable names follow Python naming conventions. * [x] All function parameters and return values are annotated with Python [type hints](https://docs.python.org/3/library/typing.html). * [x] All functions have [doctests](https://docs.python.org/3/library/doctest.html) that pass the automated testing. * [x] All new algorithms have a URL in its comments that points to Wikipedia or other similar explanation. * [x] If this pull request resolves one or more open issues then the commit message contains `Fixes: #{$ISSUE_NO}`.
CaedenPH
2022-10-16T20:16:16Z
2022-10-16T20:50:11Z
7f6e0b656f6362e452b11d06acde50b8b81cb31a
f15cc2f01c2a4124ff6dc0843c728a546f9d9f79
Follow Flake8 pep3101 and remove modulo formatting. ### Describe your change: Follow Flake8 pep3101 and remove modulo formatting (replace with f-strings) For example ```py "Hello %s" % name ``` goes to ```py f"Hello {name}" ``` * [ ] Add an algorithm? * [x] Fix a bug or typo in an existing algorithm? * [ ] Documentation change? ### Checklist: * [x] I have read [CONTRIBUTING.md](https://github.com/TheAlgorithms/Python/blob/master/CONTRIBUTING.md). * [x] This pull request is all my own work -- I have not plagiarized. * [x] I know that pull requests will not be merged if they fail the automated tests. * [x] This PR only changes one algorithm file. To ease review, please open separate PRs for separate algorithms. * [x] All new Python files are placed inside an existing directory. * [x] All filenames are in all lowercase characters with no spaces or dashes. * [x] All functions and variable names follow Python naming conventions. * [x] All function parameters and return values are annotated with Python [type hints](https://docs.python.org/3/library/typing.html). * [x] All functions have [doctests](https://docs.python.org/3/library/doctest.html) that pass the automated testing. * [x] All new algorithms have a URL in its comments that points to Wikipedia or other similar explanation. * [x] If this pull request resolves one or more open issues then the commit message contains `Fixes: #{$ISSUE_NO}`.
-1
TheAlgorithms/Python
7,339
Follow Flake8 pep3101 and remove modulo formatting
### Describe your change: Follow Flake8 pep3101 and remove modulo formatting (replace with f-strings) For example ```py "Hello %s" % name ``` goes to ```py f"Hello {name}" ``` * [ ] Add an algorithm? * [x] Fix a bug or typo in an existing algorithm? * [ ] Documentation change? ### Checklist: * [x] I have read [CONTRIBUTING.md](https://github.com/TheAlgorithms/Python/blob/master/CONTRIBUTING.md). * [x] This pull request is all my own work -- I have not plagiarized. * [x] I know that pull requests will not be merged if they fail the automated tests. * [x] This PR only changes one algorithm file. To ease review, please open separate PRs for separate algorithms. * [x] All new Python files are placed inside an existing directory. * [x] All filenames are in all lowercase characters with no spaces or dashes. * [x] All functions and variable names follow Python naming conventions. * [x] All function parameters and return values are annotated with Python [type hints](https://docs.python.org/3/library/typing.html). * [x] All functions have [doctests](https://docs.python.org/3/library/doctest.html) that pass the automated testing. * [x] All new algorithms have a URL in its comments that points to Wikipedia or other similar explanation. * [x] If this pull request resolves one or more open issues then the commit message contains `Fixes: #{$ISSUE_NO}`.
CaedenPH
2022-10-16T20:16:16Z
2022-10-16T20:50:11Z
7f6e0b656f6362e452b11d06acde50b8b81cb31a
f15cc2f01c2a4124ff6dc0843c728a546f9d9f79
Follow Flake8 pep3101 and remove modulo formatting. ### Describe your change: Follow Flake8 pep3101 and remove modulo formatting (replace with f-strings) For example ```py "Hello %s" % name ``` goes to ```py f"Hello {name}" ``` * [ ] Add an algorithm? * [x] Fix a bug or typo in an existing algorithm? * [ ] Documentation change? ### Checklist: * [x] I have read [CONTRIBUTING.md](https://github.com/TheAlgorithms/Python/blob/master/CONTRIBUTING.md). * [x] This pull request is all my own work -- I have not plagiarized. * [x] I know that pull requests will not be merged if they fail the automated tests. * [x] This PR only changes one algorithm file. To ease review, please open separate PRs for separate algorithms. * [x] All new Python files are placed inside an existing directory. * [x] All filenames are in all lowercase characters with no spaces or dashes. * [x] All functions and variable names follow Python naming conventions. * [x] All function parameters and return values are annotated with Python [type hints](https://docs.python.org/3/library/typing.html). * [x] All functions have [doctests](https://docs.python.org/3/library/doctest.html) that pass the automated testing. * [x] All new algorithms have a URL in its comments that points to Wikipedia or other similar explanation. * [x] If this pull request resolves one or more open issues then the commit message contains `Fixes: #{$ISSUE_NO}`.
-1
TheAlgorithms/Python
7,339
Follow Flake8 pep3101 and remove modulo formatting
### Describe your change: Follow Flake8 pep3101 and remove modulo formatting (replace with f-strings) For example ```py "Hello %s" % name ``` goes to ```py f"Hello {name}" ``` * [ ] Add an algorithm? * [x] Fix a bug or typo in an existing algorithm? * [ ] Documentation change? ### Checklist: * [x] I have read [CONTRIBUTING.md](https://github.com/TheAlgorithms/Python/blob/master/CONTRIBUTING.md). * [x] This pull request is all my own work -- I have not plagiarized. * [x] I know that pull requests will not be merged if they fail the automated tests. * [x] This PR only changes one algorithm file. To ease review, please open separate PRs for separate algorithms. * [x] All new Python files are placed inside an existing directory. * [x] All filenames are in all lowercase characters with no spaces or dashes. * [x] All functions and variable names follow Python naming conventions. * [x] All function parameters and return values are annotated with Python [type hints](https://docs.python.org/3/library/typing.html). * [x] All functions have [doctests](https://docs.python.org/3/library/doctest.html) that pass the automated testing. * [x] All new algorithms have a URL in its comments that points to Wikipedia or other similar explanation. * [x] If this pull request resolves one or more open issues then the commit message contains `Fixes: #{$ISSUE_NO}`.
CaedenPH
2022-10-16T20:16:16Z
2022-10-16T20:50:11Z
7f6e0b656f6362e452b11d06acde50b8b81cb31a
f15cc2f01c2a4124ff6dc0843c728a546f9d9f79
Follow Flake8 pep3101 and remove modulo formatting. ### Describe your change: Follow Flake8 pep3101 and remove modulo formatting (replace with f-strings) For example ```py "Hello %s" % name ``` goes to ```py f"Hello {name}" ``` * [ ] Add an algorithm? * [x] Fix a bug or typo in an existing algorithm? * [ ] Documentation change? ### Checklist: * [x] I have read [CONTRIBUTING.md](https://github.com/TheAlgorithms/Python/blob/master/CONTRIBUTING.md). * [x] This pull request is all my own work -- I have not plagiarized. * [x] I know that pull requests will not be merged if they fail the automated tests. * [x] This PR only changes one algorithm file. To ease review, please open separate PRs for separate algorithms. * [x] All new Python files are placed inside an existing directory. * [x] All filenames are in all lowercase characters with no spaces or dashes. * [x] All functions and variable names follow Python naming conventions. * [x] All function parameters and return values are annotated with Python [type hints](https://docs.python.org/3/library/typing.html). * [x] All functions have [doctests](https://docs.python.org/3/library/doctest.html) that pass the automated testing. * [x] All new algorithms have a URL in its comments that points to Wikipedia or other similar explanation. * [x] If this pull request resolves one or more open issues then the commit message contains `Fixes: #{$ISSUE_NO}`.
def is_palindrome(s: str) -> bool: """ Determine whether the string is palindrome :param s: :return: Boolean >>> is_palindrome("a man a plan a canal panama".replace(" ", "")) True >>> is_palindrome("Hello") False >>> is_palindrome("Able was I ere I saw Elba") True >>> is_palindrome("racecar") True >>> is_palindrome("Mr. Owl ate my metal worm?") True """ # Since Punctuation, capitalization, and spaces are usually ignored while checking # Palindrome, we first remove them from our string. s = "".join([character for character in s.lower() if character.isalnum()]) return s == s[::-1] if __name__ == "__main__": s = input("Enter string to determine whether its palindrome or not: ").strip() if is_palindrome(s): print("Given string is palindrome") else: print("Given string is not palindrome")
def is_palindrome(s: str) -> bool: """ Determine whether the string is palindrome :param s: :return: Boolean >>> is_palindrome("a man a plan a canal panama".replace(" ", "")) True >>> is_palindrome("Hello") False >>> is_palindrome("Able was I ere I saw Elba") True >>> is_palindrome("racecar") True >>> is_palindrome("Mr. Owl ate my metal worm?") True """ # Since Punctuation, capitalization, and spaces are usually ignored while checking # Palindrome, we first remove them from our string. s = "".join([character for character in s.lower() if character.isalnum()]) return s == s[::-1] if __name__ == "__main__": s = input("Enter string to determine whether its palindrome or not: ").strip() if is_palindrome(s): print("Given string is palindrome") else: print("Given string is not palindrome")
-1
TheAlgorithms/Python
7,339
Follow Flake8 pep3101 and remove modulo formatting
### Describe your change: Follow Flake8 pep3101 and remove modulo formatting (replace with f-strings) For example ```py "Hello %s" % name ``` goes to ```py f"Hello {name}" ``` * [ ] Add an algorithm? * [x] Fix a bug or typo in an existing algorithm? * [ ] Documentation change? ### Checklist: * [x] I have read [CONTRIBUTING.md](https://github.com/TheAlgorithms/Python/blob/master/CONTRIBUTING.md). * [x] This pull request is all my own work -- I have not plagiarized. * [x] I know that pull requests will not be merged if they fail the automated tests. * [x] This PR only changes one algorithm file. To ease review, please open separate PRs for separate algorithms. * [x] All new Python files are placed inside an existing directory. * [x] All filenames are in all lowercase characters with no spaces or dashes. * [x] All functions and variable names follow Python naming conventions. * [x] All function parameters and return values are annotated with Python [type hints](https://docs.python.org/3/library/typing.html). * [x] All functions have [doctests](https://docs.python.org/3/library/doctest.html) that pass the automated testing. * [x] All new algorithms have a URL in its comments that points to Wikipedia or other similar explanation. * [x] If this pull request resolves one or more open issues then the commit message contains `Fixes: #{$ISSUE_NO}`.
CaedenPH
2022-10-16T20:16:16Z
2022-10-16T20:50:11Z
7f6e0b656f6362e452b11d06acde50b8b81cb31a
f15cc2f01c2a4124ff6dc0843c728a546f9d9f79
Follow Flake8 pep3101 and remove modulo formatting. ### Describe your change: Follow Flake8 pep3101 and remove modulo formatting (replace with f-strings) For example ```py "Hello %s" % name ``` goes to ```py f"Hello {name}" ``` * [ ] Add an algorithm? * [x] Fix a bug or typo in an existing algorithm? * [ ] Documentation change? ### Checklist: * [x] I have read [CONTRIBUTING.md](https://github.com/TheAlgorithms/Python/blob/master/CONTRIBUTING.md). * [x] This pull request is all my own work -- I have not plagiarized. * [x] I know that pull requests will not be merged if they fail the automated tests. * [x] This PR only changes one algorithm file. To ease review, please open separate PRs for separate algorithms. * [x] All new Python files are placed inside an existing directory. * [x] All filenames are in all lowercase characters with no spaces or dashes. * [x] All functions and variable names follow Python naming conventions. * [x] All function parameters and return values are annotated with Python [type hints](https://docs.python.org/3/library/typing.html). * [x] All functions have [doctests](https://docs.python.org/3/library/doctest.html) that pass the automated testing. * [x] All new algorithms have a URL in its comments that points to Wikipedia or other similar explanation. * [x] If this pull request resolves one or more open issues then the commit message contains `Fixes: #{$ISSUE_NO}`.
-1
TheAlgorithms/Python
7,339
Follow Flake8 pep3101 and remove modulo formatting
### Describe your change: Follow Flake8 pep3101 and remove modulo formatting (replace with f-strings) For example ```py "Hello %s" % name ``` goes to ```py f"Hello {name}" ``` * [ ] Add an algorithm? * [x] Fix a bug or typo in an existing algorithm? * [ ] Documentation change? ### Checklist: * [x] I have read [CONTRIBUTING.md](https://github.com/TheAlgorithms/Python/blob/master/CONTRIBUTING.md). * [x] This pull request is all my own work -- I have not plagiarized. * [x] I know that pull requests will not be merged if they fail the automated tests. * [x] This PR only changes one algorithm file. To ease review, please open separate PRs for separate algorithms. * [x] All new Python files are placed inside an existing directory. * [x] All filenames are in all lowercase characters with no spaces or dashes. * [x] All functions and variable names follow Python naming conventions. * [x] All function parameters and return values are annotated with Python [type hints](https://docs.python.org/3/library/typing.html). * [x] All functions have [doctests](https://docs.python.org/3/library/doctest.html) that pass the automated testing. * [x] All new algorithms have a URL in its comments that points to Wikipedia or other similar explanation. * [x] If this pull request resolves one or more open issues then the commit message contains `Fixes: #{$ISSUE_NO}`.
CaedenPH
2022-10-16T20:16:16Z
2022-10-16T20:50:11Z
7f6e0b656f6362e452b11d06acde50b8b81cb31a
f15cc2f01c2a4124ff6dc0843c728a546f9d9f79
Follow Flake8 pep3101 and remove modulo formatting. ### Describe your change: Follow Flake8 pep3101 and remove modulo formatting (replace with f-strings) For example ```py "Hello %s" % name ``` goes to ```py f"Hello {name}" ``` * [ ] Add an algorithm? * [x] Fix a bug or typo in an existing algorithm? * [ ] Documentation change? ### Checklist: * [x] I have read [CONTRIBUTING.md](https://github.com/TheAlgorithms/Python/blob/master/CONTRIBUTING.md). * [x] This pull request is all my own work -- I have not plagiarized. * [x] I know that pull requests will not be merged if they fail the automated tests. * [x] This PR only changes one algorithm file. To ease review, please open separate PRs for separate algorithms. * [x] All new Python files are placed inside an existing directory. * [x] All filenames are in all lowercase characters with no spaces or dashes. * [x] All functions and variable names follow Python naming conventions. * [x] All function parameters and return values are annotated with Python [type hints](https://docs.python.org/3/library/typing.html). * [x] All functions have [doctests](https://docs.python.org/3/library/doctest.html) that pass the automated testing. * [x] All new algorithms have a URL in its comments that points to Wikipedia or other similar explanation. * [x] If this pull request resolves one or more open issues then the commit message contains `Fixes: #{$ISSUE_NO}`.
def snake_to_camel_case(input_str: str, use_pascal: bool = False) -> str: """ Transforms a snake_case given string to camelCase (or PascalCase if indicated) (defaults to not use Pascal) >>> snake_to_camel_case("some_random_string") 'someRandomString' >>> snake_to_camel_case("some_random_string", use_pascal=True) 'SomeRandomString' >>> snake_to_camel_case("some_random_string_with_numbers_123") 'someRandomStringWithNumbers123' >>> snake_to_camel_case("some_random_string_with_numbers_123", use_pascal=True) 'SomeRandomStringWithNumbers123' >>> snake_to_camel_case(123) Traceback (most recent call last): ... ValueError: Expected string as input, found <class 'int'> >>> snake_to_camel_case("some_string", use_pascal="True") Traceback (most recent call last): ... ValueError: Expected boolean as use_pascal parameter, found <class 'str'> """ if not isinstance(input_str, str): raise ValueError(f"Expected string as input, found {type(input_str)}") if not isinstance(use_pascal, bool): raise ValueError( f"Expected boolean as use_pascal parameter, found {type(use_pascal)}" ) words = input_str.split("_") start_index = 0 if use_pascal else 1 words_to_capitalize = words[start_index:] capitalized_words = [word[0].upper() + word[1:] for word in words_to_capitalize] initial_word = "" if use_pascal else words[0] return "".join([initial_word] + capitalized_words) if __name__ == "__main__": from doctest import testmod testmod()
def snake_to_camel_case(input_str: str, use_pascal: bool = False) -> str: """ Transforms a snake_case given string to camelCase (or PascalCase if indicated) (defaults to not use Pascal) >>> snake_to_camel_case("some_random_string") 'someRandomString' >>> snake_to_camel_case("some_random_string", use_pascal=True) 'SomeRandomString' >>> snake_to_camel_case("some_random_string_with_numbers_123") 'someRandomStringWithNumbers123' >>> snake_to_camel_case("some_random_string_with_numbers_123", use_pascal=True) 'SomeRandomStringWithNumbers123' >>> snake_to_camel_case(123) Traceback (most recent call last): ... ValueError: Expected string as input, found <class 'int'> >>> snake_to_camel_case("some_string", use_pascal="True") Traceback (most recent call last): ... ValueError: Expected boolean as use_pascal parameter, found <class 'str'> """ if not isinstance(input_str, str): raise ValueError(f"Expected string as input, found {type(input_str)}") if not isinstance(use_pascal, bool): raise ValueError( f"Expected boolean as use_pascal parameter, found {type(use_pascal)}" ) words = input_str.split("_") start_index = 0 if use_pascal else 1 words_to_capitalize = words[start_index:] capitalized_words = [word[0].upper() + word[1:] for word in words_to_capitalize] initial_word = "" if use_pascal else words[0] return "".join([initial_word] + capitalized_words) if __name__ == "__main__": from doctest import testmod testmod()
-1
TheAlgorithms/Python
7,339
Follow Flake8 pep3101 and remove modulo formatting
### Describe your change: Follow Flake8 pep3101 and remove modulo formatting (replace with f-strings) For example ```py "Hello %s" % name ``` goes to ```py f"Hello {name}" ``` * [ ] Add an algorithm? * [x] Fix a bug or typo in an existing algorithm? * [ ] Documentation change? ### Checklist: * [x] I have read [CONTRIBUTING.md](https://github.com/TheAlgorithms/Python/blob/master/CONTRIBUTING.md). * [x] This pull request is all my own work -- I have not plagiarized. * [x] I know that pull requests will not be merged if they fail the automated tests. * [x] This PR only changes one algorithm file. To ease review, please open separate PRs for separate algorithms. * [x] All new Python files are placed inside an existing directory. * [x] All filenames are in all lowercase characters with no spaces or dashes. * [x] All functions and variable names follow Python naming conventions. * [x] All function parameters and return values are annotated with Python [type hints](https://docs.python.org/3/library/typing.html). * [x] All functions have [doctests](https://docs.python.org/3/library/doctest.html) that pass the automated testing. * [x] All new algorithms have a URL in its comments that points to Wikipedia or other similar explanation. * [x] If this pull request resolves one or more open issues then the commit message contains `Fixes: #{$ISSUE_NO}`.
CaedenPH
2022-10-16T20:16:16Z
2022-10-16T20:50:11Z
7f6e0b656f6362e452b11d06acde50b8b81cb31a
f15cc2f01c2a4124ff6dc0843c728a546f9d9f79
Follow Flake8 pep3101 and remove modulo formatting. ### Describe your change: Follow Flake8 pep3101 and remove modulo formatting (replace with f-strings) For example ```py "Hello %s" % name ``` goes to ```py f"Hello {name}" ``` * [ ] Add an algorithm? * [x] Fix a bug or typo in an existing algorithm? * [ ] Documentation change? ### Checklist: * [x] I have read [CONTRIBUTING.md](https://github.com/TheAlgorithms/Python/blob/master/CONTRIBUTING.md). * [x] This pull request is all my own work -- I have not plagiarized. * [x] I know that pull requests will not be merged if they fail the automated tests. * [x] This PR only changes one algorithm file. To ease review, please open separate PRs for separate algorithms. * [x] All new Python files are placed inside an existing directory. * [x] All filenames are in all lowercase characters with no spaces or dashes. * [x] All functions and variable names follow Python naming conventions. * [x] All function parameters and return values are annotated with Python [type hints](https://docs.python.org/3/library/typing.html). * [x] All functions have [doctests](https://docs.python.org/3/library/doctest.html) that pass the automated testing. * [x] All new algorithms have a URL in its comments that points to Wikipedia or other similar explanation. * [x] If this pull request resolves one or more open issues then the commit message contains `Fixes: #{$ISSUE_NO}`.
""" Created on Mon Feb 26 14:29:11 2018 @author: Christian Bender @license: MIT-license This module contains some useful classes and functions for dealing with linear algebra in python. Overview: - class Vector - function zero_vector(dimension) - function unit_basis_vector(dimension, pos) - function axpy(scalar, vector1, vector2) - function random_vector(N, a, b) - class Matrix - function square_zero_matrix(N) - function random_matrix(W, H, a, b) """ from __future__ import annotations import math import random from collections.abc import Collection from typing import overload class Vector: """ This class represents a vector of arbitrary size. You need to give the vector components. Overview of the methods: __init__(components: Collection[float] | None): init the vector __len__(): gets the size of the vector (number of components) __str__(): returns a string representation __add__(other: Vector): vector addition __sub__(other: Vector): vector subtraction __mul__(other: float): scalar multiplication __mul__(other: Vector): dot product set(components: Collection[float]): changes the vector components copy(): copies this vector and returns it component(i): gets the i-th component (0-indexed) change_component(pos: int, value: float): changes specified component euclidean_length(): returns the euclidean length of the vector angle(other: Vector, deg: bool): returns the angle between two vectors TODO: compare-operator """ def __init__(self, components: Collection[float] | None = None) -> None: """ input: components or nothing simple constructor for init the vector """ if components is None: components = [] self.__components = list(components) def __len__(self) -> int: """ returns the size of the vector """ return len(self.__components) def __str__(self) -> str: """ returns a string representation of the vector """ return "(" + ",".join(map(str, self.__components)) + ")" def __add__(self, other: Vector) -> Vector: """ input: other vector assumes: other vector has the same size returns a new vector that represents the sum. """ size = len(self) if size == len(other): result = [self.__components[i] + other.component(i) for i in range(size)] return Vector(result) else: raise Exception("must have the same size") def __sub__(self, other: Vector) -> Vector: """ input: other vector assumes: other vector has the same size returns a new vector that represents the difference. """ size = len(self) if size == len(other): result = [self.__components[i] - other.component(i) for i in range(size)] return Vector(result) else: # error case raise Exception("must have the same size") @overload def __mul__(self, other: float) -> Vector: ... @overload def __mul__(self, other: Vector) -> float: ... def __mul__(self, other: float | Vector) -> float | Vector: """ mul implements the scalar multiplication and the dot-product """ if isinstance(other, float) or isinstance(other, int): ans = [c * other for c in self.__components] return Vector(ans) elif isinstance(other, Vector) and len(self) == len(other): size = len(self) prods = [self.__components[i] * other.component(i) for i in range(size)] return sum(prods) else: # error case raise Exception("invalid operand!") def set(self, components: Collection[float]) -> None: """ input: new components changes the components of the vector. replaces the components with newer one. """ if len(components) > 0: self.__components = list(components) else: raise Exception("please give any vector") def copy(self) -> Vector: """ copies this vector and returns it. """ return Vector(self.__components) def component(self, i: int) -> float: """ input: index (0-indexed) output: the i-th component of the vector. """ if type(i) is int and -len(self.__components) <= i < len(self.__components): return self.__components[i] else: raise Exception("index out of range") def change_component(self, pos: int, value: float) -> None: """ input: an index (pos) and a value changes the specified component (pos) with the 'value' """ # precondition assert -len(self.__components) <= pos < len(self.__components) self.__components[pos] = value def euclidean_length(self) -> float: """ returns the euclidean length of the vector >>> Vector([2, 3, 4]).euclidean_length() 5.385164807134504 >>> Vector([1]).euclidean_length() 1.0 >>> Vector([0, -1, -2, -3, 4, 5, 6]).euclidean_length() 9.539392014169456 >>> Vector([]).euclidean_length() Traceback (most recent call last): ... Exception: Vector is empty """ if len(self.__components) == 0: raise Exception("Vector is empty") squares = [c**2 for c in self.__components] return math.sqrt(sum(squares)) def angle(self, other: Vector, deg: bool = False) -> float: """ find angle between two Vector (self, Vector) >>> Vector([3, 4, -1]).angle(Vector([2, -1, 1])) 1.4906464636572374 >>> Vector([3, 4, -1]).angle(Vector([2, -1, 1]), deg = True) 85.40775111366095 >>> Vector([3, 4, -1]).angle(Vector([2, -1])) Traceback (most recent call last): ... Exception: invalid operand! """ num = self * other den = self.euclidean_length() * other.euclidean_length() if deg: return math.degrees(math.acos(num / den)) else: return math.acos(num / den) def zero_vector(dimension: int) -> Vector: """ returns a zero-vector of size 'dimension' """ # precondition assert isinstance(dimension, int) return Vector([0] * dimension) def unit_basis_vector(dimension: int, pos: int) -> Vector: """ returns a unit basis vector with a One at index 'pos' (indexing at 0) """ # precondition assert isinstance(dimension, int) and (isinstance(pos, int)) ans = [0] * dimension ans[pos] = 1 return Vector(ans) def axpy(scalar: float, x: Vector, y: Vector) -> Vector: """ input: a 'scalar' and two vectors 'x' and 'y' output: a vector computes the axpy operation """ # precondition assert ( isinstance(x, Vector) and isinstance(y, Vector) and (isinstance(scalar, int) or isinstance(scalar, float)) ) return x * scalar + y def random_vector(n: int, a: int, b: int) -> Vector: """ input: size (N) of the vector. random range (a,b) output: returns a random vector of size N, with random integer components between 'a' and 'b'. """ random.seed(None) ans = [random.randint(a, b) for _ in range(n)] return Vector(ans) class Matrix: """ class: Matrix This class represents an arbitrary matrix. Overview of the methods: __init__(): __str__(): returns a string representation __add__(other: Matrix): matrix addition __sub__(other: Matrix): matrix subtraction __mul__(other: float): scalar multiplication __mul__(other: Vector): vector multiplication height() : returns height width() : returns width component(x: int, y: int): returns specified component change_component(x: int, y: int, value: float): changes specified component minor(x: int, y: int): returns minor along (x, y) cofactor(x: int, y: int): returns cofactor along (x, y) determinant() : returns determinant """ def __init__(self, matrix: list[list[float]], w: int, h: int) -> None: """ simple constructor for initializing the matrix with components. """ self.__matrix = matrix self.__width = w self.__height = h def __str__(self) -> str: """ returns a string representation of this matrix. """ ans = "" for i in range(self.__height): ans += "|" for j in range(self.__width): if j < self.__width - 1: ans += str(self.__matrix[i][j]) + "," else: ans += str(self.__matrix[i][j]) + "|\n" return ans def __add__(self, other: Matrix) -> Matrix: """ implements matrix addition. """ if self.__width == other.width() and self.__height == other.height(): matrix = [] for i in range(self.__height): row = [ self.__matrix[i][j] + other.component(i, j) for j in range(self.__width) ] matrix.append(row) return Matrix(matrix, self.__width, self.__height) else: raise Exception("matrix must have the same dimension!") def __sub__(self, other: Matrix) -> Matrix: """ implements matrix subtraction. """ if self.__width == other.width() and self.__height == other.height(): matrix = [] for i in range(self.__height): row = [ self.__matrix[i][j] - other.component(i, j) for j in range(self.__width) ] matrix.append(row) return Matrix(matrix, self.__width, self.__height) else: raise Exception("matrices must have the same dimension!") @overload def __mul__(self, other: float) -> Matrix: ... @overload def __mul__(self, other: Vector) -> Vector: ... def __mul__(self, other: float | Vector) -> Vector | Matrix: """ implements the matrix-vector multiplication. implements the matrix-scalar multiplication """ if isinstance(other, Vector): # matrix-vector if len(other) == self.__width: ans = zero_vector(self.__height) for i in range(self.__height): prods = [ self.__matrix[i][j] * other.component(j) for j in range(self.__width) ] ans.change_component(i, sum(prods)) return ans else: raise Exception( "vector must have the same size as the " "number of columns of the matrix!" ) elif isinstance(other, int) or isinstance(other, float): # matrix-scalar matrix = [ [self.__matrix[i][j] * other for j in range(self.__width)] for i in range(self.__height) ] return Matrix(matrix, self.__width, self.__height) def height(self) -> int: """ getter for the height """ return self.__height def width(self) -> int: """ getter for the width """ return self.__width def component(self, x: int, y: int) -> float: """ returns the specified (x,y) component """ if 0 <= x < self.__height and 0 <= y < self.__width: return self.__matrix[x][y] else: raise Exception("change_component: indices out of bounds") def change_component(self, x: int, y: int, value: float) -> None: """ changes the x-y component of this matrix """ if 0 <= x < self.__height and 0 <= y < self.__width: self.__matrix[x][y] = value else: raise Exception("change_component: indices out of bounds") def minor(self, x: int, y: int) -> float: """ returns the minor along (x, y) """ if self.__height != self.__width: raise Exception("Matrix is not square") minor = self.__matrix[:x] + self.__matrix[x + 1 :] for i in range(len(minor)): minor[i] = minor[i][:y] + minor[i][y + 1 :] return Matrix(minor, self.__width - 1, self.__height - 1).determinant() def cofactor(self, x: int, y: int) -> float: """ returns the cofactor (signed minor) along (x, y) """ if self.__height != self.__width: raise Exception("Matrix is not square") if 0 <= x < self.__height and 0 <= y < self.__width: return (-1) ** (x + y) * self.minor(x, y) else: raise Exception("Indices out of bounds") def determinant(self) -> float: """ returns the determinant of an nxn matrix using Laplace expansion """ if self.__height != self.__width: raise Exception("Matrix is not square") if self.__height < 1: raise Exception("Matrix has no element") elif self.__height == 1: return self.__matrix[0][0] elif self.__height == 2: return ( self.__matrix[0][0] * self.__matrix[1][1] - self.__matrix[0][1] * self.__matrix[1][0] ) else: cofactor_prods = [ self.__matrix[0][y] * self.cofactor(0, y) for y in range(self.__width) ] return sum(cofactor_prods) def square_zero_matrix(n: int) -> Matrix: """ returns a square zero-matrix of dimension NxN """ ans: list[list[float]] = [[0] * n for _ in range(n)] return Matrix(ans, n, n) def random_matrix(width: int, height: int, a: int, b: int) -> Matrix: """ returns a random matrix WxH with integer components between 'a' and 'b' """ random.seed(None) matrix: list[list[float]] = [ [random.randint(a, b) for _ in range(width)] for _ in range(height) ] return Matrix(matrix, width, height)
""" Created on Mon Feb 26 14:29:11 2018 @author: Christian Bender @license: MIT-license This module contains some useful classes and functions for dealing with linear algebra in python. Overview: - class Vector - function zero_vector(dimension) - function unit_basis_vector(dimension, pos) - function axpy(scalar, vector1, vector2) - function random_vector(N, a, b) - class Matrix - function square_zero_matrix(N) - function random_matrix(W, H, a, b) """ from __future__ import annotations import math import random from collections.abc import Collection from typing import overload class Vector: """ This class represents a vector of arbitrary size. You need to give the vector components. Overview of the methods: __init__(components: Collection[float] | None): init the vector __len__(): gets the size of the vector (number of components) __str__(): returns a string representation __add__(other: Vector): vector addition __sub__(other: Vector): vector subtraction __mul__(other: float): scalar multiplication __mul__(other: Vector): dot product set(components: Collection[float]): changes the vector components copy(): copies this vector and returns it component(i): gets the i-th component (0-indexed) change_component(pos: int, value: float): changes specified component euclidean_length(): returns the euclidean length of the vector angle(other: Vector, deg: bool): returns the angle between two vectors TODO: compare-operator """ def __init__(self, components: Collection[float] | None = None) -> None: """ input: components or nothing simple constructor for init the vector """ if components is None: components = [] self.__components = list(components) def __len__(self) -> int: """ returns the size of the vector """ return len(self.__components) def __str__(self) -> str: """ returns a string representation of the vector """ return "(" + ",".join(map(str, self.__components)) + ")" def __add__(self, other: Vector) -> Vector: """ input: other vector assumes: other vector has the same size returns a new vector that represents the sum. """ size = len(self) if size == len(other): result = [self.__components[i] + other.component(i) for i in range(size)] return Vector(result) else: raise Exception("must have the same size") def __sub__(self, other: Vector) -> Vector: """ input: other vector assumes: other vector has the same size returns a new vector that represents the difference. """ size = len(self) if size == len(other): result = [self.__components[i] - other.component(i) for i in range(size)] return Vector(result) else: # error case raise Exception("must have the same size") @overload def __mul__(self, other: float) -> Vector: ... @overload def __mul__(self, other: Vector) -> float: ... def __mul__(self, other: float | Vector) -> float | Vector: """ mul implements the scalar multiplication and the dot-product """ if isinstance(other, float) or isinstance(other, int): ans = [c * other for c in self.__components] return Vector(ans) elif isinstance(other, Vector) and len(self) == len(other): size = len(self) prods = [self.__components[i] * other.component(i) for i in range(size)] return sum(prods) else: # error case raise Exception("invalid operand!") def set(self, components: Collection[float]) -> None: """ input: new components changes the components of the vector. replaces the components with newer one. """ if len(components) > 0: self.__components = list(components) else: raise Exception("please give any vector") def copy(self) -> Vector: """ copies this vector and returns it. """ return Vector(self.__components) def component(self, i: int) -> float: """ input: index (0-indexed) output: the i-th component of the vector. """ if type(i) is int and -len(self.__components) <= i < len(self.__components): return self.__components[i] else: raise Exception("index out of range") def change_component(self, pos: int, value: float) -> None: """ input: an index (pos) and a value changes the specified component (pos) with the 'value' """ # precondition assert -len(self.__components) <= pos < len(self.__components) self.__components[pos] = value def euclidean_length(self) -> float: """ returns the euclidean length of the vector >>> Vector([2, 3, 4]).euclidean_length() 5.385164807134504 >>> Vector([1]).euclidean_length() 1.0 >>> Vector([0, -1, -2, -3, 4, 5, 6]).euclidean_length() 9.539392014169456 >>> Vector([]).euclidean_length() Traceback (most recent call last): ... Exception: Vector is empty """ if len(self.__components) == 0: raise Exception("Vector is empty") squares = [c**2 for c in self.__components] return math.sqrt(sum(squares)) def angle(self, other: Vector, deg: bool = False) -> float: """ find angle between two Vector (self, Vector) >>> Vector([3, 4, -1]).angle(Vector([2, -1, 1])) 1.4906464636572374 >>> Vector([3, 4, -1]).angle(Vector([2, -1, 1]), deg = True) 85.40775111366095 >>> Vector([3, 4, -1]).angle(Vector([2, -1])) Traceback (most recent call last): ... Exception: invalid operand! """ num = self * other den = self.euclidean_length() * other.euclidean_length() if deg: return math.degrees(math.acos(num / den)) else: return math.acos(num / den) def zero_vector(dimension: int) -> Vector: """ returns a zero-vector of size 'dimension' """ # precondition assert isinstance(dimension, int) return Vector([0] * dimension) def unit_basis_vector(dimension: int, pos: int) -> Vector: """ returns a unit basis vector with a One at index 'pos' (indexing at 0) """ # precondition assert isinstance(dimension, int) and (isinstance(pos, int)) ans = [0] * dimension ans[pos] = 1 return Vector(ans) def axpy(scalar: float, x: Vector, y: Vector) -> Vector: """ input: a 'scalar' and two vectors 'x' and 'y' output: a vector computes the axpy operation """ # precondition assert ( isinstance(x, Vector) and isinstance(y, Vector) and (isinstance(scalar, int) or isinstance(scalar, float)) ) return x * scalar + y def random_vector(n: int, a: int, b: int) -> Vector: """ input: size (N) of the vector. random range (a,b) output: returns a random vector of size N, with random integer components between 'a' and 'b'. """ random.seed(None) ans = [random.randint(a, b) for _ in range(n)] return Vector(ans) class Matrix: """ class: Matrix This class represents an arbitrary matrix. Overview of the methods: __init__(): __str__(): returns a string representation __add__(other: Matrix): matrix addition __sub__(other: Matrix): matrix subtraction __mul__(other: float): scalar multiplication __mul__(other: Vector): vector multiplication height() : returns height width() : returns width component(x: int, y: int): returns specified component change_component(x: int, y: int, value: float): changes specified component minor(x: int, y: int): returns minor along (x, y) cofactor(x: int, y: int): returns cofactor along (x, y) determinant() : returns determinant """ def __init__(self, matrix: list[list[float]], w: int, h: int) -> None: """ simple constructor for initializing the matrix with components. """ self.__matrix = matrix self.__width = w self.__height = h def __str__(self) -> str: """ returns a string representation of this matrix. """ ans = "" for i in range(self.__height): ans += "|" for j in range(self.__width): if j < self.__width - 1: ans += str(self.__matrix[i][j]) + "," else: ans += str(self.__matrix[i][j]) + "|\n" return ans def __add__(self, other: Matrix) -> Matrix: """ implements matrix addition. """ if self.__width == other.width() and self.__height == other.height(): matrix = [] for i in range(self.__height): row = [ self.__matrix[i][j] + other.component(i, j) for j in range(self.__width) ] matrix.append(row) return Matrix(matrix, self.__width, self.__height) else: raise Exception("matrix must have the same dimension!") def __sub__(self, other: Matrix) -> Matrix: """ implements matrix subtraction. """ if self.__width == other.width() and self.__height == other.height(): matrix = [] for i in range(self.__height): row = [ self.__matrix[i][j] - other.component(i, j) for j in range(self.__width) ] matrix.append(row) return Matrix(matrix, self.__width, self.__height) else: raise Exception("matrices must have the same dimension!") @overload def __mul__(self, other: float) -> Matrix: ... @overload def __mul__(self, other: Vector) -> Vector: ... def __mul__(self, other: float | Vector) -> Vector | Matrix: """ implements the matrix-vector multiplication. implements the matrix-scalar multiplication """ if isinstance(other, Vector): # matrix-vector if len(other) == self.__width: ans = zero_vector(self.__height) for i in range(self.__height): prods = [ self.__matrix[i][j] * other.component(j) for j in range(self.__width) ] ans.change_component(i, sum(prods)) return ans else: raise Exception( "vector must have the same size as the " "number of columns of the matrix!" ) elif isinstance(other, int) or isinstance(other, float): # matrix-scalar matrix = [ [self.__matrix[i][j] * other for j in range(self.__width)] for i in range(self.__height) ] return Matrix(matrix, self.__width, self.__height) def height(self) -> int: """ getter for the height """ return self.__height def width(self) -> int: """ getter for the width """ return self.__width def component(self, x: int, y: int) -> float: """ returns the specified (x,y) component """ if 0 <= x < self.__height and 0 <= y < self.__width: return self.__matrix[x][y] else: raise Exception("change_component: indices out of bounds") def change_component(self, x: int, y: int, value: float) -> None: """ changes the x-y component of this matrix """ if 0 <= x < self.__height and 0 <= y < self.__width: self.__matrix[x][y] = value else: raise Exception("change_component: indices out of bounds") def minor(self, x: int, y: int) -> float: """ returns the minor along (x, y) """ if self.__height != self.__width: raise Exception("Matrix is not square") minor = self.__matrix[:x] + self.__matrix[x + 1 :] for i in range(len(minor)): minor[i] = minor[i][:y] + minor[i][y + 1 :] return Matrix(minor, self.__width - 1, self.__height - 1).determinant() def cofactor(self, x: int, y: int) -> float: """ returns the cofactor (signed minor) along (x, y) """ if self.__height != self.__width: raise Exception("Matrix is not square") if 0 <= x < self.__height and 0 <= y < self.__width: return (-1) ** (x + y) * self.minor(x, y) else: raise Exception("Indices out of bounds") def determinant(self) -> float: """ returns the determinant of an nxn matrix using Laplace expansion """ if self.__height != self.__width: raise Exception("Matrix is not square") if self.__height < 1: raise Exception("Matrix has no element") elif self.__height == 1: return self.__matrix[0][0] elif self.__height == 2: return ( self.__matrix[0][0] * self.__matrix[1][1] - self.__matrix[0][1] * self.__matrix[1][0] ) else: cofactor_prods = [ self.__matrix[0][y] * self.cofactor(0, y) for y in range(self.__width) ] return sum(cofactor_prods) def square_zero_matrix(n: int) -> Matrix: """ returns a square zero-matrix of dimension NxN """ ans: list[list[float]] = [[0] * n for _ in range(n)] return Matrix(ans, n, n) def random_matrix(width: int, height: int, a: int, b: int) -> Matrix: """ returns a random matrix WxH with integer components between 'a' and 'b' """ random.seed(None) matrix: list[list[float]] = [ [random.randint(a, b) for _ in range(width)] for _ in range(height) ] return Matrix(matrix, width, height)
-1
TheAlgorithms/Python
7,339
Follow Flake8 pep3101 and remove modulo formatting
### Describe your change: Follow Flake8 pep3101 and remove modulo formatting (replace with f-strings) For example ```py "Hello %s" % name ``` goes to ```py f"Hello {name}" ``` * [ ] Add an algorithm? * [x] Fix a bug or typo in an existing algorithm? * [ ] Documentation change? ### Checklist: * [x] I have read [CONTRIBUTING.md](https://github.com/TheAlgorithms/Python/blob/master/CONTRIBUTING.md). * [x] This pull request is all my own work -- I have not plagiarized. * [x] I know that pull requests will not be merged if they fail the automated tests. * [x] This PR only changes one algorithm file. To ease review, please open separate PRs for separate algorithms. * [x] All new Python files are placed inside an existing directory. * [x] All filenames are in all lowercase characters with no spaces or dashes. * [x] All functions and variable names follow Python naming conventions. * [x] All function parameters and return values are annotated with Python [type hints](https://docs.python.org/3/library/typing.html). * [x] All functions have [doctests](https://docs.python.org/3/library/doctest.html) that pass the automated testing. * [x] All new algorithms have a URL in its comments that points to Wikipedia or other similar explanation. * [x] If this pull request resolves one or more open issues then the commit message contains `Fixes: #{$ISSUE_NO}`.
CaedenPH
2022-10-16T20:16:16Z
2022-10-16T20:50:11Z
7f6e0b656f6362e452b11d06acde50b8b81cb31a
f15cc2f01c2a4124ff6dc0843c728a546f9d9f79
Follow Flake8 pep3101 and remove modulo formatting. ### Describe your change: Follow Flake8 pep3101 and remove modulo formatting (replace with f-strings) For example ```py "Hello %s" % name ``` goes to ```py f"Hello {name}" ``` * [ ] Add an algorithm? * [x] Fix a bug or typo in an existing algorithm? * [ ] Documentation change? ### Checklist: * [x] I have read [CONTRIBUTING.md](https://github.com/TheAlgorithms/Python/blob/master/CONTRIBUTING.md). * [x] This pull request is all my own work -- I have not plagiarized. * [x] I know that pull requests will not be merged if they fail the automated tests. * [x] This PR only changes one algorithm file. To ease review, please open separate PRs for separate algorithms. * [x] All new Python files are placed inside an existing directory. * [x] All filenames are in all lowercase characters with no spaces or dashes. * [x] All functions and variable names follow Python naming conventions. * [x] All function parameters and return values are annotated with Python [type hints](https://docs.python.org/3/library/typing.html). * [x] All functions have [doctests](https://docs.python.org/3/library/doctest.html) that pass the automated testing. * [x] All new algorithms have a URL in its comments that points to Wikipedia or other similar explanation. * [x] If this pull request resolves one or more open issues then the commit message contains `Fixes: #{$ISSUE_NO}`.
from __future__ import annotations from .abs import abs_val def abs_min(x: list[int]) -> int: """ >>> abs_min([0,5,1,11]) 0 >>> abs_min([3,-10,-2]) -2 >>> abs_min([]) Traceback (most recent call last): ... ValueError: abs_min() arg is an empty sequence """ if len(x) == 0: raise ValueError("abs_min() arg is an empty sequence") j = x[0] for i in x: if abs_val(i) < abs_val(j): j = i return j def main(): a = [-3, -1, 2, -11] print(abs_min(a)) # = -1 if __name__ == "__main__": import doctest doctest.testmod(verbose=True) main()
from __future__ import annotations from .abs import abs_val def abs_min(x: list[int]) -> int: """ >>> abs_min([0,5,1,11]) 0 >>> abs_min([3,-10,-2]) -2 >>> abs_min([]) Traceback (most recent call last): ... ValueError: abs_min() arg is an empty sequence """ if len(x) == 0: raise ValueError("abs_min() arg is an empty sequence") j = x[0] for i in x: if abs_val(i) < abs_val(j): j = i return j def main(): a = [-3, -1, 2, -11] print(abs_min(a)) # = -1 if __name__ == "__main__": import doctest doctest.testmod(verbose=True) main()
-1
TheAlgorithms/Python
7,339
Follow Flake8 pep3101 and remove modulo formatting
### Describe your change: Follow Flake8 pep3101 and remove modulo formatting (replace with f-strings) For example ```py "Hello %s" % name ``` goes to ```py f"Hello {name}" ``` * [ ] Add an algorithm? * [x] Fix a bug or typo in an existing algorithm? * [ ] Documentation change? ### Checklist: * [x] I have read [CONTRIBUTING.md](https://github.com/TheAlgorithms/Python/blob/master/CONTRIBUTING.md). * [x] This pull request is all my own work -- I have not plagiarized. * [x] I know that pull requests will not be merged if they fail the automated tests. * [x] This PR only changes one algorithm file. To ease review, please open separate PRs for separate algorithms. * [x] All new Python files are placed inside an existing directory. * [x] All filenames are in all lowercase characters with no spaces or dashes. * [x] All functions and variable names follow Python naming conventions. * [x] All function parameters and return values are annotated with Python [type hints](https://docs.python.org/3/library/typing.html). * [x] All functions have [doctests](https://docs.python.org/3/library/doctest.html) that pass the automated testing. * [x] All new algorithms have a URL in its comments that points to Wikipedia or other similar explanation. * [x] If this pull request resolves one or more open issues then the commit message contains `Fixes: #{$ISSUE_NO}`.
CaedenPH
2022-10-16T20:16:16Z
2022-10-16T20:50:11Z
7f6e0b656f6362e452b11d06acde50b8b81cb31a
f15cc2f01c2a4124ff6dc0843c728a546f9d9f79
Follow Flake8 pep3101 and remove modulo formatting. ### Describe your change: Follow Flake8 pep3101 and remove modulo formatting (replace with f-strings) For example ```py "Hello %s" % name ``` goes to ```py f"Hello {name}" ``` * [ ] Add an algorithm? * [x] Fix a bug or typo in an existing algorithm? * [ ] Documentation change? ### Checklist: * [x] I have read [CONTRIBUTING.md](https://github.com/TheAlgorithms/Python/blob/master/CONTRIBUTING.md). * [x] This pull request is all my own work -- I have not plagiarized. * [x] I know that pull requests will not be merged if they fail the automated tests. * [x] This PR only changes one algorithm file. To ease review, please open separate PRs for separate algorithms. * [x] All new Python files are placed inside an existing directory. * [x] All filenames are in all lowercase characters with no spaces or dashes. * [x] All functions and variable names follow Python naming conventions. * [x] All function parameters and return values are annotated with Python [type hints](https://docs.python.org/3/library/typing.html). * [x] All functions have [doctests](https://docs.python.org/3/library/doctest.html) that pass the automated testing. * [x] All new algorithms have a URL in its comments that points to Wikipedia or other similar explanation. * [x] If this pull request resolves one or more open issues then the commit message contains `Fixes: #{$ISSUE_NO}`.
""" Project Euler Problem 1: https://projecteuler.net/problem=1 Multiples of 3 and 5 If we list all the natural numbers below 10 that are multiples of 3 or 5, we get 3, 5, 6 and 9. The sum of these multiples is 23. Find the sum of all the multiples of 3 or 5 below 1000. """ def solution(n: int = 1000) -> int: """ Returns the sum of all the multiples of 3 or 5 below n. A straightforward pythonic solution using list comprehension. >>> solution(3) 0 >>> solution(4) 3 >>> solution(10) 23 >>> solution(600) 83700 """ return sum(i for i in range(n) if i % 3 == 0 or i % 5 == 0) if __name__ == "__main__": print(f"{solution() = }")
""" Project Euler Problem 1: https://projecteuler.net/problem=1 Multiples of 3 and 5 If we list all the natural numbers below 10 that are multiples of 3 or 5, we get 3, 5, 6 and 9. The sum of these multiples is 23. Find the sum of all the multiples of 3 or 5 below 1000. """ def solution(n: int = 1000) -> int: """ Returns the sum of all the multiples of 3 or 5 below n. A straightforward pythonic solution using list comprehension. >>> solution(3) 0 >>> solution(4) 3 >>> solution(10) 23 >>> solution(600) 83700 """ return sum(i for i in range(n) if i % 3 == 0 or i % 5 == 0) if __name__ == "__main__": print(f"{solution() = }")
-1
TheAlgorithms/Python
7,339
Follow Flake8 pep3101 and remove modulo formatting
### Describe your change: Follow Flake8 pep3101 and remove modulo formatting (replace with f-strings) For example ```py "Hello %s" % name ``` goes to ```py f"Hello {name}" ``` * [ ] Add an algorithm? * [x] Fix a bug or typo in an existing algorithm? * [ ] Documentation change? ### Checklist: * [x] I have read [CONTRIBUTING.md](https://github.com/TheAlgorithms/Python/blob/master/CONTRIBUTING.md). * [x] This pull request is all my own work -- I have not plagiarized. * [x] I know that pull requests will not be merged if they fail the automated tests. * [x] This PR only changes one algorithm file. To ease review, please open separate PRs for separate algorithms. * [x] All new Python files are placed inside an existing directory. * [x] All filenames are in all lowercase characters with no spaces or dashes. * [x] All functions and variable names follow Python naming conventions. * [x] All function parameters and return values are annotated with Python [type hints](https://docs.python.org/3/library/typing.html). * [x] All functions have [doctests](https://docs.python.org/3/library/doctest.html) that pass the automated testing. * [x] All new algorithms have a URL in its comments that points to Wikipedia or other similar explanation. * [x] If this pull request resolves one or more open issues then the commit message contains `Fixes: #{$ISSUE_NO}`.
CaedenPH
2022-10-16T20:16:16Z
2022-10-16T20:50:11Z
7f6e0b656f6362e452b11d06acde50b8b81cb31a
f15cc2f01c2a4124ff6dc0843c728a546f9d9f79
Follow Flake8 pep3101 and remove modulo formatting. ### Describe your change: Follow Flake8 pep3101 and remove modulo formatting (replace with f-strings) For example ```py "Hello %s" % name ``` goes to ```py f"Hello {name}" ``` * [ ] Add an algorithm? * [x] Fix a bug or typo in an existing algorithm? * [ ] Documentation change? ### Checklist: * [x] I have read [CONTRIBUTING.md](https://github.com/TheAlgorithms/Python/blob/master/CONTRIBUTING.md). * [x] This pull request is all my own work -- I have not plagiarized. * [x] I know that pull requests will not be merged if they fail the automated tests. * [x] This PR only changes one algorithm file. To ease review, please open separate PRs for separate algorithms. * [x] All new Python files are placed inside an existing directory. * [x] All filenames are in all lowercase characters with no spaces or dashes. * [x] All functions and variable names follow Python naming conventions. * [x] All function parameters and return values are annotated with Python [type hints](https://docs.python.org/3/library/typing.html). * [x] All functions have [doctests](https://docs.python.org/3/library/doctest.html) that pass the automated testing. * [x] All new algorithms have a URL in its comments that points to Wikipedia or other similar explanation. * [x] If this pull request resolves one or more open issues then the commit message contains `Fixes: #{$ISSUE_NO}`.
# An island in matrix is a group of linked areas, all having the same value. # This code counts number of islands in a given matrix, with including diagonal # connections. class Matrix: # Public class to implement a graph def __init__(self, row: int, col: int, graph: list[list[bool]]) -> None: self.ROW = row self.COL = col self.graph = graph def is_safe(self, i: int, j: int, visited: list[list[bool]]) -> bool: return ( 0 <= i < self.ROW and 0 <= j < self.COL and not visited[i][j] and self.graph[i][j] ) def diffs(self, i: int, j: int, visited: list[list[bool]]) -> None: # Checking all 8 elements surrounding nth element row_nbr = [-1, -1, -1, 0, 0, 1, 1, 1] # Coordinate order col_nbr = [-1, 0, 1, -1, 1, -1, 0, 1] visited[i][j] = True # Make those cells visited for k in range(8): if self.is_safe(i + row_nbr[k], j + col_nbr[k], visited): self.diffs(i + row_nbr[k], j + col_nbr[k], visited) def count_islands(self) -> int: # And finally, count all islands. visited = [[False for j in range(self.COL)] for i in range(self.ROW)] count = 0 for i in range(self.ROW): for j in range(self.COL): if visited[i][j] is False and self.graph[i][j] == 1: self.diffs(i, j, visited) count += 1 return count
# An island in matrix is a group of linked areas, all having the same value. # This code counts number of islands in a given matrix, with including diagonal # connections. class Matrix: # Public class to implement a graph def __init__(self, row: int, col: int, graph: list[list[bool]]) -> None: self.ROW = row self.COL = col self.graph = graph def is_safe(self, i: int, j: int, visited: list[list[bool]]) -> bool: return ( 0 <= i < self.ROW and 0 <= j < self.COL and not visited[i][j] and self.graph[i][j] ) def diffs(self, i: int, j: int, visited: list[list[bool]]) -> None: # Checking all 8 elements surrounding nth element row_nbr = [-1, -1, -1, 0, 0, 1, 1, 1] # Coordinate order col_nbr = [-1, 0, 1, -1, 1, -1, 0, 1] visited[i][j] = True # Make those cells visited for k in range(8): if self.is_safe(i + row_nbr[k], j + col_nbr[k], visited): self.diffs(i + row_nbr[k], j + col_nbr[k], visited) def count_islands(self) -> int: # And finally, count all islands. visited = [[False for j in range(self.COL)] for i in range(self.ROW)] count = 0 for i in range(self.ROW): for j in range(self.COL): if visited[i][j] is False and self.graph[i][j] == 1: self.diffs(i, j, visited) count += 1 return count
-1
TheAlgorithms/Python
7,339
Follow Flake8 pep3101 and remove modulo formatting
### Describe your change: Follow Flake8 pep3101 and remove modulo formatting (replace with f-strings) For example ```py "Hello %s" % name ``` goes to ```py f"Hello {name}" ``` * [ ] Add an algorithm? * [x] Fix a bug or typo in an existing algorithm? * [ ] Documentation change? ### Checklist: * [x] I have read [CONTRIBUTING.md](https://github.com/TheAlgorithms/Python/blob/master/CONTRIBUTING.md). * [x] This pull request is all my own work -- I have not plagiarized. * [x] I know that pull requests will not be merged if they fail the automated tests. * [x] This PR only changes one algorithm file. To ease review, please open separate PRs for separate algorithms. * [x] All new Python files are placed inside an existing directory. * [x] All filenames are in all lowercase characters with no spaces or dashes. * [x] All functions and variable names follow Python naming conventions. * [x] All function parameters and return values are annotated with Python [type hints](https://docs.python.org/3/library/typing.html). * [x] All functions have [doctests](https://docs.python.org/3/library/doctest.html) that pass the automated testing. * [x] All new algorithms have a URL in its comments that points to Wikipedia or other similar explanation. * [x] If this pull request resolves one or more open issues then the commit message contains `Fixes: #{$ISSUE_NO}`.
CaedenPH
2022-10-16T20:16:16Z
2022-10-16T20:50:11Z
7f6e0b656f6362e452b11d06acde50b8b81cb31a
f15cc2f01c2a4124ff6dc0843c728a546f9d9f79
Follow Flake8 pep3101 and remove modulo formatting. ### Describe your change: Follow Flake8 pep3101 and remove modulo formatting (replace with f-strings) For example ```py "Hello %s" % name ``` goes to ```py f"Hello {name}" ``` * [ ] Add an algorithm? * [x] Fix a bug or typo in an existing algorithm? * [ ] Documentation change? ### Checklist: * [x] I have read [CONTRIBUTING.md](https://github.com/TheAlgorithms/Python/blob/master/CONTRIBUTING.md). * [x] This pull request is all my own work -- I have not plagiarized. * [x] I know that pull requests will not be merged if they fail the automated tests. * [x] This PR only changes one algorithm file. To ease review, please open separate PRs for separate algorithms. * [x] All new Python files are placed inside an existing directory. * [x] All filenames are in all lowercase characters with no spaces or dashes. * [x] All functions and variable names follow Python naming conventions. * [x] All function parameters and return values are annotated with Python [type hints](https://docs.python.org/3/library/typing.html). * [x] All functions have [doctests](https://docs.python.org/3/library/doctest.html) that pass the automated testing. * [x] All new algorithms have a URL in its comments that points to Wikipedia or other similar explanation. * [x] If this pull request resolves one or more open issues then the commit message contains `Fixes: #{$ISSUE_NO}`.
""" Problem 72 Counting fractions: https://projecteuler.net/problem=72 Description: Consider the fraction, n/d, where n and d are positive integers. If n<d and HCF(n,d)=1, it is called a reduced proper fraction. If we list the set of reduced proper fractions for d ≤ 8 in ascending order of size, we get: 1/8, 1/7, 1/6, 1/5, 1/4, 2/7, 1/3, 3/8, 2/5, 3/7, 1/2, 4/7, 3/5, 5/8, 2/3, 5/7, 3/4, 4/5, 5/6, 6/7, 7/8 It can be seen that there are 21 elements in this set. How many elements would be contained in the set of reduced proper fractions for d ≤ 1,000,000? Solution: Number of numbers between 1 and n that are coprime to n is given by the Euler's Totient function, phi(n). So, the answer is simply the sum of phi(n) for 2 <= n <= 1,000,000 Sum of phi(d), for all d|n = n. This result can be used to find phi(n) using a sieve. Time: 1 sec """ def solution(limit: int = 1_000_000) -> int: """ Returns an integer, the solution to the problem >>> solution(10) 31 >>> solution(100) 3043 >>> solution(1_000) 304191 """ phi = [i - 1 for i in range(limit + 1)] for i in range(2, limit + 1): if phi[i] == i - 1: for j in range(2 * i, limit + 1, i): phi[j] -= phi[j] // i return sum(phi[2 : limit + 1]) if __name__ == "__main__": print(solution())
""" Problem 72 Counting fractions: https://projecteuler.net/problem=72 Description: Consider the fraction, n/d, where n and d are positive integers. If n<d and HCF(n,d)=1, it is called a reduced proper fraction. If we list the set of reduced proper fractions for d ≤ 8 in ascending order of size, we get: 1/8, 1/7, 1/6, 1/5, 1/4, 2/7, 1/3, 3/8, 2/5, 3/7, 1/2, 4/7, 3/5, 5/8, 2/3, 5/7, 3/4, 4/5, 5/6, 6/7, 7/8 It can be seen that there are 21 elements in this set. How many elements would be contained in the set of reduced proper fractions for d ≤ 1,000,000? Solution: Number of numbers between 1 and n that are coprime to n is given by the Euler's Totient function, phi(n). So, the answer is simply the sum of phi(n) for 2 <= n <= 1,000,000 Sum of phi(d), for all d|n = n. This result can be used to find phi(n) using a sieve. Time: 1 sec """ def solution(limit: int = 1_000_000) -> int: """ Returns an integer, the solution to the problem >>> solution(10) 31 >>> solution(100) 3043 >>> solution(1_000) 304191 """ phi = [i - 1 for i in range(limit + 1)] for i in range(2, limit + 1): if phi[i] == i - 1: for j in range(2 * i, limit + 1, i): phi[j] -= phi[j] // i return sum(phi[2 : limit + 1]) if __name__ == "__main__": print(solution())
-1
TheAlgorithms/Python
7,339
Follow Flake8 pep3101 and remove modulo formatting
### Describe your change: Follow Flake8 pep3101 and remove modulo formatting (replace with f-strings) For example ```py "Hello %s" % name ``` goes to ```py f"Hello {name}" ``` * [ ] Add an algorithm? * [x] Fix a bug or typo in an existing algorithm? * [ ] Documentation change? ### Checklist: * [x] I have read [CONTRIBUTING.md](https://github.com/TheAlgorithms/Python/blob/master/CONTRIBUTING.md). * [x] This pull request is all my own work -- I have not plagiarized. * [x] I know that pull requests will not be merged if they fail the automated tests. * [x] This PR only changes one algorithm file. To ease review, please open separate PRs for separate algorithms. * [x] All new Python files are placed inside an existing directory. * [x] All filenames are in all lowercase characters with no spaces or dashes. * [x] All functions and variable names follow Python naming conventions. * [x] All function parameters and return values are annotated with Python [type hints](https://docs.python.org/3/library/typing.html). * [x] All functions have [doctests](https://docs.python.org/3/library/doctest.html) that pass the automated testing. * [x] All new algorithms have a URL in its comments that points to Wikipedia or other similar explanation. * [x] If this pull request resolves one or more open issues then the commit message contains `Fixes: #{$ISSUE_NO}`.
CaedenPH
2022-10-16T20:16:16Z
2022-10-16T20:50:11Z
7f6e0b656f6362e452b11d06acde50b8b81cb31a
f15cc2f01c2a4124ff6dc0843c728a546f9d9f79
Follow Flake8 pep3101 and remove modulo formatting. ### Describe your change: Follow Flake8 pep3101 and remove modulo formatting (replace with f-strings) For example ```py "Hello %s" % name ``` goes to ```py f"Hello {name}" ``` * [ ] Add an algorithm? * [x] Fix a bug or typo in an existing algorithm? * [ ] Documentation change? ### Checklist: * [x] I have read [CONTRIBUTING.md](https://github.com/TheAlgorithms/Python/blob/master/CONTRIBUTING.md). * [x] This pull request is all my own work -- I have not plagiarized. * [x] I know that pull requests will not be merged if they fail the automated tests. * [x] This PR only changes one algorithm file. To ease review, please open separate PRs for separate algorithms. * [x] All new Python files are placed inside an existing directory. * [x] All filenames are in all lowercase characters with no spaces or dashes. * [x] All functions and variable names follow Python naming conventions. * [x] All function parameters and return values are annotated with Python [type hints](https://docs.python.org/3/library/typing.html). * [x] All functions have [doctests](https://docs.python.org/3/library/doctest.html) that pass the automated testing. * [x] All new algorithms have a URL in its comments that points to Wikipedia or other similar explanation. * [x] If this pull request resolves one or more open issues then the commit message contains `Fixes: #{$ISSUE_NO}`.
""" The stock span problem is a financial problem where we have a series of n daily price quotes for a stock and we need to calculate span of stock's price for all n days. The span Si of the stock's price on a given day i is defined as the maximum number of consecutive days just before the given day, for which the price of the stock on the current day is less than or equal to its price on the given day. """ def calculation_span(price, s): n = len(price) # Create a stack and push index of fist element to it st = [] st.append(0) # Span value of first element is always 1 s[0] = 1 # Calculate span values for rest of the elements for i in range(1, n): # Pop elements from stack while stack is not # empty and top of stack is smaller than price[i] while len(st) > 0 and price[st[0]] <= price[i]: st.pop() # If stack becomes empty, then price[i] is greater # than all elements on left of it, i.e. price[0], # price[1], ..price[i-1]. Else the price[i] is # greater than elements after top of stack s[i] = i + 1 if len(st) <= 0 else (i - st[0]) # Push this element to stack st.append(i) # A utility function to print elements of array def print_array(arr, n): for i in range(0, n): print(arr[i], end=" ") # Driver program to test above function price = [10, 4, 5, 90, 120, 80] S = [0 for i in range(len(price) + 1)] # Fill the span values in array S[] calculation_span(price, S) # Print the calculated span values print_array(S, len(price))
""" The stock span problem is a financial problem where we have a series of n daily price quotes for a stock and we need to calculate span of stock's price for all n days. The span Si of the stock's price on a given day i is defined as the maximum number of consecutive days just before the given day, for which the price of the stock on the current day is less than or equal to its price on the given day. """ def calculation_span(price, s): n = len(price) # Create a stack and push index of fist element to it st = [] st.append(0) # Span value of first element is always 1 s[0] = 1 # Calculate span values for rest of the elements for i in range(1, n): # Pop elements from stack while stack is not # empty and top of stack is smaller than price[i] while len(st) > 0 and price[st[0]] <= price[i]: st.pop() # If stack becomes empty, then price[i] is greater # than all elements on left of it, i.e. price[0], # price[1], ..price[i-1]. Else the price[i] is # greater than elements after top of stack s[i] = i + 1 if len(st) <= 0 else (i - st[0]) # Push this element to stack st.append(i) # A utility function to print elements of array def print_array(arr, n): for i in range(0, n): print(arr[i], end=" ") # Driver program to test above function price = [10, 4, 5, 90, 120, 80] S = [0 for i in range(len(price) + 1)] # Fill the span values in array S[] calculation_span(price, S) # Print the calculated span values print_array(S, len(price))
-1
TheAlgorithms/Python
7,339
Follow Flake8 pep3101 and remove modulo formatting
### Describe your change: Follow Flake8 pep3101 and remove modulo formatting (replace with f-strings) For example ```py "Hello %s" % name ``` goes to ```py f"Hello {name}" ``` * [ ] Add an algorithm? * [x] Fix a bug or typo in an existing algorithm? * [ ] Documentation change? ### Checklist: * [x] I have read [CONTRIBUTING.md](https://github.com/TheAlgorithms/Python/blob/master/CONTRIBUTING.md). * [x] This pull request is all my own work -- I have not plagiarized. * [x] I know that pull requests will not be merged if they fail the automated tests. * [x] This PR only changes one algorithm file. To ease review, please open separate PRs for separate algorithms. * [x] All new Python files are placed inside an existing directory. * [x] All filenames are in all lowercase characters with no spaces or dashes. * [x] All functions and variable names follow Python naming conventions. * [x] All function parameters and return values are annotated with Python [type hints](https://docs.python.org/3/library/typing.html). * [x] All functions have [doctests](https://docs.python.org/3/library/doctest.html) that pass the automated testing. * [x] All new algorithms have a URL in its comments that points to Wikipedia or other similar explanation. * [x] If this pull request resolves one or more open issues then the commit message contains `Fixes: #{$ISSUE_NO}`.
CaedenPH
2022-10-16T20:16:16Z
2022-10-16T20:50:11Z
7f6e0b656f6362e452b11d06acde50b8b81cb31a
f15cc2f01c2a4124ff6dc0843c728a546f9d9f79
Follow Flake8 pep3101 and remove modulo formatting. ### Describe your change: Follow Flake8 pep3101 and remove modulo formatting (replace with f-strings) For example ```py "Hello %s" % name ``` goes to ```py f"Hello {name}" ``` * [ ] Add an algorithm? * [x] Fix a bug or typo in an existing algorithm? * [ ] Documentation change? ### Checklist: * [x] I have read [CONTRIBUTING.md](https://github.com/TheAlgorithms/Python/blob/master/CONTRIBUTING.md). * [x] This pull request is all my own work -- I have not plagiarized. * [x] I know that pull requests will not be merged if they fail the automated tests. * [x] This PR only changes one algorithm file. To ease review, please open separate PRs for separate algorithms. * [x] All new Python files are placed inside an existing directory. * [x] All filenames are in all lowercase characters with no spaces or dashes. * [x] All functions and variable names follow Python naming conventions. * [x] All function parameters and return values are annotated with Python [type hints](https://docs.python.org/3/library/typing.html). * [x] All functions have [doctests](https://docs.python.org/3/library/doctest.html) that pass the automated testing. * [x] All new algorithms have a URL in its comments that points to Wikipedia or other similar explanation. * [x] If this pull request resolves one or more open issues then the commit message contains `Fixes: #{$ISSUE_NO}`.
""" Conway's Game of Life implemented in Python. https://en.wikipedia.org/wiki/Conway%27s_Game_of_Life """ from __future__ import annotations from PIL import Image # Define glider example GLIDER = [ [0, 1, 0, 0, 0, 0, 0, 0], [0, 0, 1, 0, 0, 0, 0, 0], [1, 1, 1, 0, 0, 0, 0, 0], [0, 0, 0, 0, 0, 0, 0, 0], [0, 0, 0, 0, 0, 0, 0, 0], [0, 0, 0, 0, 0, 0, 0, 0], [0, 0, 0, 0, 0, 0, 0, 0], [0, 0, 0, 0, 0, 0, 0, 0], ] # Define blinker example BLINKER = [[0, 1, 0], [0, 1, 0], [0, 1, 0]] def new_generation(cells: list[list[int]]) -> list[list[int]]: """ Generates the next generation for a given state of Conway's Game of Life. >>> new_generation(BLINKER) [[0, 0, 0], [1, 1, 1], [0, 0, 0]] """ next_generation = [] for i in range(len(cells)): next_generation_row = [] for j in range(len(cells[i])): # Get the number of live neighbours neighbour_count = 0 if i > 0 and j > 0: neighbour_count += cells[i - 1][j - 1] if i > 0: neighbour_count += cells[i - 1][j] if i > 0 and j < len(cells[i]) - 1: neighbour_count += cells[i - 1][j + 1] if j > 0: neighbour_count += cells[i][j - 1] if j < len(cells[i]) - 1: neighbour_count += cells[i][j + 1] if i < len(cells) - 1 and j > 0: neighbour_count += cells[i + 1][j - 1] if i < len(cells) - 1: neighbour_count += cells[i + 1][j] if i < len(cells) - 1 and j < len(cells[i]) - 1: neighbour_count += cells[i + 1][j + 1] # Rules of the game of life (excerpt from Wikipedia): # 1. Any live cell with two or three live neighbours survives. # 2. Any dead cell with three live neighbours becomes a live cell. # 3. All other live cells die in the next generation. # Similarly, all other dead cells stay dead. alive = cells[i][j] == 1 if ( (alive and 2 <= neighbour_count <= 3) or not alive and neighbour_count == 3 ): next_generation_row.append(1) else: next_generation_row.append(0) next_generation.append(next_generation_row) return next_generation def generate_images(cells: list[list[int]], frames: int) -> list[Image.Image]: """ Generates a list of images of subsequent Game of Life states. """ images = [] for _ in range(frames): # Create output image img = Image.new("RGB", (len(cells[0]), len(cells))) pixels = img.load() # Save cells to image for x in range(len(cells)): for y in range(len(cells[0])): colour = 255 - cells[y][x] * 255 pixels[x, y] = (colour, colour, colour) # Save image images.append(img) cells = new_generation(cells) return images if __name__ == "__main__": images = generate_images(GLIDER, 16) images[0].save("out.gif", save_all=True, append_images=images[1:])
""" Conway's Game of Life implemented in Python. https://en.wikipedia.org/wiki/Conway%27s_Game_of_Life """ from __future__ import annotations from PIL import Image # Define glider example GLIDER = [ [0, 1, 0, 0, 0, 0, 0, 0], [0, 0, 1, 0, 0, 0, 0, 0], [1, 1, 1, 0, 0, 0, 0, 0], [0, 0, 0, 0, 0, 0, 0, 0], [0, 0, 0, 0, 0, 0, 0, 0], [0, 0, 0, 0, 0, 0, 0, 0], [0, 0, 0, 0, 0, 0, 0, 0], [0, 0, 0, 0, 0, 0, 0, 0], ] # Define blinker example BLINKER = [[0, 1, 0], [0, 1, 0], [0, 1, 0]] def new_generation(cells: list[list[int]]) -> list[list[int]]: """ Generates the next generation for a given state of Conway's Game of Life. >>> new_generation(BLINKER) [[0, 0, 0], [1, 1, 1], [0, 0, 0]] """ next_generation = [] for i in range(len(cells)): next_generation_row = [] for j in range(len(cells[i])): # Get the number of live neighbours neighbour_count = 0 if i > 0 and j > 0: neighbour_count += cells[i - 1][j - 1] if i > 0: neighbour_count += cells[i - 1][j] if i > 0 and j < len(cells[i]) - 1: neighbour_count += cells[i - 1][j + 1] if j > 0: neighbour_count += cells[i][j - 1] if j < len(cells[i]) - 1: neighbour_count += cells[i][j + 1] if i < len(cells) - 1 and j > 0: neighbour_count += cells[i + 1][j - 1] if i < len(cells) - 1: neighbour_count += cells[i + 1][j] if i < len(cells) - 1 and j < len(cells[i]) - 1: neighbour_count += cells[i + 1][j + 1] # Rules of the game of life (excerpt from Wikipedia): # 1. Any live cell with two or three live neighbours survives. # 2. Any dead cell with three live neighbours becomes a live cell. # 3. All other live cells die in the next generation. # Similarly, all other dead cells stay dead. alive = cells[i][j] == 1 if ( (alive and 2 <= neighbour_count <= 3) or not alive and neighbour_count == 3 ): next_generation_row.append(1) else: next_generation_row.append(0) next_generation.append(next_generation_row) return next_generation def generate_images(cells: list[list[int]], frames: int) -> list[Image.Image]: """ Generates a list of images of subsequent Game of Life states. """ images = [] for _ in range(frames): # Create output image img = Image.new("RGB", (len(cells[0]), len(cells))) pixels = img.load() # Save cells to image for x in range(len(cells)): for y in range(len(cells[0])): colour = 255 - cells[y][x] * 255 pixels[x, y] = (colour, colour, colour) # Save image images.append(img) cells = new_generation(cells) return images if __name__ == "__main__": images = generate_images(GLIDER, 16) images[0].save("out.gif", save_all=True, append_images=images[1:])
-1
TheAlgorithms/Python
7,339
Follow Flake8 pep3101 and remove modulo formatting
### Describe your change: Follow Flake8 pep3101 and remove modulo formatting (replace with f-strings) For example ```py "Hello %s" % name ``` goes to ```py f"Hello {name}" ``` * [ ] Add an algorithm? * [x] Fix a bug or typo in an existing algorithm? * [ ] Documentation change? ### Checklist: * [x] I have read [CONTRIBUTING.md](https://github.com/TheAlgorithms/Python/blob/master/CONTRIBUTING.md). * [x] This pull request is all my own work -- I have not plagiarized. * [x] I know that pull requests will not be merged if they fail the automated tests. * [x] This PR only changes one algorithm file. To ease review, please open separate PRs for separate algorithms. * [x] All new Python files are placed inside an existing directory. * [x] All filenames are in all lowercase characters with no spaces or dashes. * [x] All functions and variable names follow Python naming conventions. * [x] All function parameters and return values are annotated with Python [type hints](https://docs.python.org/3/library/typing.html). * [x] All functions have [doctests](https://docs.python.org/3/library/doctest.html) that pass the automated testing. * [x] All new algorithms have a URL in its comments that points to Wikipedia or other similar explanation. * [x] If this pull request resolves one or more open issues then the commit message contains `Fixes: #{$ISSUE_NO}`.
CaedenPH
2022-10-16T20:16:16Z
2022-10-16T20:50:11Z
7f6e0b656f6362e452b11d06acde50b8b81cb31a
f15cc2f01c2a4124ff6dc0843c728a546f9d9f79
Follow Flake8 pep3101 and remove modulo formatting. ### Describe your change: Follow Flake8 pep3101 and remove modulo formatting (replace with f-strings) For example ```py "Hello %s" % name ``` goes to ```py f"Hello {name}" ``` * [ ] Add an algorithm? * [x] Fix a bug or typo in an existing algorithm? * [ ] Documentation change? ### Checklist: * [x] I have read [CONTRIBUTING.md](https://github.com/TheAlgorithms/Python/blob/master/CONTRIBUTING.md). * [x] This pull request is all my own work -- I have not plagiarized. * [x] I know that pull requests will not be merged if they fail the automated tests. * [x] This PR only changes one algorithm file. To ease review, please open separate PRs for separate algorithms. * [x] All new Python files are placed inside an existing directory. * [x] All filenames are in all lowercase characters with no spaces or dashes. * [x] All functions and variable names follow Python naming conventions. * [x] All function parameters and return values are annotated with Python [type hints](https://docs.python.org/3/library/typing.html). * [x] All functions have [doctests](https://docs.python.org/3/library/doctest.html) that pass the automated testing. * [x] All new algorithms have a URL in its comments that points to Wikipedia or other similar explanation. * [x] If this pull request resolves one or more open issues then the commit message contains `Fixes: #{$ISSUE_NO}`.
-1
TheAlgorithms/Python
7,339
Follow Flake8 pep3101 and remove modulo formatting
### Describe your change: Follow Flake8 pep3101 and remove modulo formatting (replace with f-strings) For example ```py "Hello %s" % name ``` goes to ```py f"Hello {name}" ``` * [ ] Add an algorithm? * [x] Fix a bug or typo in an existing algorithm? * [ ] Documentation change? ### Checklist: * [x] I have read [CONTRIBUTING.md](https://github.com/TheAlgorithms/Python/blob/master/CONTRIBUTING.md). * [x] This pull request is all my own work -- I have not plagiarized. * [x] I know that pull requests will not be merged if they fail the automated tests. * [x] This PR only changes one algorithm file. To ease review, please open separate PRs for separate algorithms. * [x] All new Python files are placed inside an existing directory. * [x] All filenames are in all lowercase characters with no spaces or dashes. * [x] All functions and variable names follow Python naming conventions. * [x] All function parameters and return values are annotated with Python [type hints](https://docs.python.org/3/library/typing.html). * [x] All functions have [doctests](https://docs.python.org/3/library/doctest.html) that pass the automated testing. * [x] All new algorithms have a URL in its comments that points to Wikipedia or other similar explanation. * [x] If this pull request resolves one or more open issues then the commit message contains `Fixes: #{$ISSUE_NO}`.
CaedenPH
2022-10-16T20:16:16Z
2022-10-16T20:50:11Z
7f6e0b656f6362e452b11d06acde50b8b81cb31a
f15cc2f01c2a4124ff6dc0843c728a546f9d9f79
Follow Flake8 pep3101 and remove modulo formatting. ### Describe your change: Follow Flake8 pep3101 and remove modulo formatting (replace with f-strings) For example ```py "Hello %s" % name ``` goes to ```py f"Hello {name}" ``` * [ ] Add an algorithm? * [x] Fix a bug or typo in an existing algorithm? * [ ] Documentation change? ### Checklist: * [x] I have read [CONTRIBUTING.md](https://github.com/TheAlgorithms/Python/blob/master/CONTRIBUTING.md). * [x] This pull request is all my own work -- I have not plagiarized. * [x] I know that pull requests will not be merged if they fail the automated tests. * [x] This PR only changes one algorithm file. To ease review, please open separate PRs for separate algorithms. * [x] All new Python files are placed inside an existing directory. * [x] All filenames are in all lowercase characters with no spaces or dashes. * [x] All functions and variable names follow Python naming conventions. * [x] All function parameters and return values are annotated with Python [type hints](https://docs.python.org/3/library/typing.html). * [x] All functions have [doctests](https://docs.python.org/3/library/doctest.html) that pass the automated testing. * [x] All new algorithms have a URL in its comments that points to Wikipedia or other similar explanation. * [x] If this pull request resolves one or more open issues then the commit message contains `Fixes: #{$ISSUE_NO}`.
""" Project Euler Problem 38: https://projecteuler.net/problem=38 Take the number 192 and multiply it by each of 1, 2, and 3: 192 × 1 = 192 192 × 2 = 384 192 × 3 = 576 By concatenating each product we get the 1 to 9 pandigital, 192384576. We will call 192384576 the concatenated product of 192 and (1,2,3) The same can be achieved by starting with 9 and multiplying by 1, 2, 3, 4, and 5, giving the pandigital, 918273645, which is the concatenated product of 9 and (1,2,3,4,5). What is the largest 1 to 9 pandigital 9-digit number that can be formed as the concatenated product of an integer with (1,2, ... , n) where n > 1? Solution: Since n>1, the largest candidate for the solution will be a concactenation of a 4-digit number and its double, a 5-digit number. Let a be the 4-digit number. a has 4 digits => 1000 <= a < 10000 2a has 5 digits => 10000 <= 2a < 100000 => 5000 <= a < 10000 The concatenation of a with 2a = a * 10^5 + 2a so our candidate for a given a is 100002 * a. We iterate through the search space 5000 <= a < 10000 in reverse order, calculating the candidates for each a and checking if they are 1-9 pandigital. In case there are no 4-digit numbers that satisfy this property, we check the 3-digit numbers with a similar formula (the example a=192 gives a lower bound on the length of a): a has 3 digits, etc... => 100 <= a < 334, candidate = a * 10^6 + 2a * 10^3 + 3a = 1002003 * a """ from __future__ import annotations def is_9_pandigital(n: int) -> bool: """ Checks whether n is a 9-digit 1 to 9 pandigital number. >>> is_9_pandigital(12345) False >>> is_9_pandigital(156284973) True >>> is_9_pandigital(1562849733) False """ s = str(n) return len(s) == 9 and set(s) == set("123456789") def solution() -> int | None: """ Return the largest 1 to 9 pandigital 9-digital number that can be formed as the concatenated product of an integer with (1,2,...,n) where n > 1. """ for base_num in range(9999, 4999, -1): candidate = 100002 * base_num if is_9_pandigital(candidate): return candidate for base_num in range(333, 99, -1): candidate = 1002003 * base_num if is_9_pandigital(candidate): return candidate return None if __name__ == "__main__": print(f"{solution() = }")
""" Project Euler Problem 38: https://projecteuler.net/problem=38 Take the number 192 and multiply it by each of 1, 2, and 3: 192 × 1 = 192 192 × 2 = 384 192 × 3 = 576 By concatenating each product we get the 1 to 9 pandigital, 192384576. We will call 192384576 the concatenated product of 192 and (1,2,3) The same can be achieved by starting with 9 and multiplying by 1, 2, 3, 4, and 5, giving the pandigital, 918273645, which is the concatenated product of 9 and (1,2,3,4,5). What is the largest 1 to 9 pandigital 9-digit number that can be formed as the concatenated product of an integer with (1,2, ... , n) where n > 1? Solution: Since n>1, the largest candidate for the solution will be a concactenation of a 4-digit number and its double, a 5-digit number. Let a be the 4-digit number. a has 4 digits => 1000 <= a < 10000 2a has 5 digits => 10000 <= 2a < 100000 => 5000 <= a < 10000 The concatenation of a with 2a = a * 10^5 + 2a so our candidate for a given a is 100002 * a. We iterate through the search space 5000 <= a < 10000 in reverse order, calculating the candidates for each a and checking if they are 1-9 pandigital. In case there are no 4-digit numbers that satisfy this property, we check the 3-digit numbers with a similar formula (the example a=192 gives a lower bound on the length of a): a has 3 digits, etc... => 100 <= a < 334, candidate = a * 10^6 + 2a * 10^3 + 3a = 1002003 * a """ from __future__ import annotations def is_9_pandigital(n: int) -> bool: """ Checks whether n is a 9-digit 1 to 9 pandigital number. >>> is_9_pandigital(12345) False >>> is_9_pandigital(156284973) True >>> is_9_pandigital(1562849733) False """ s = str(n) return len(s) == 9 and set(s) == set("123456789") def solution() -> int | None: """ Return the largest 1 to 9 pandigital 9-digital number that can be formed as the concatenated product of an integer with (1,2,...,n) where n > 1. """ for base_num in range(9999, 4999, -1): candidate = 100002 * base_num if is_9_pandigital(candidate): return candidate for base_num in range(333, 99, -1): candidate = 1002003 * base_num if is_9_pandigital(candidate): return candidate return None if __name__ == "__main__": print(f"{solution() = }")
-1
TheAlgorithms/Python
7,339
Follow Flake8 pep3101 and remove modulo formatting
### Describe your change: Follow Flake8 pep3101 and remove modulo formatting (replace with f-strings) For example ```py "Hello %s" % name ``` goes to ```py f"Hello {name}" ``` * [ ] Add an algorithm? * [x] Fix a bug or typo in an existing algorithm? * [ ] Documentation change? ### Checklist: * [x] I have read [CONTRIBUTING.md](https://github.com/TheAlgorithms/Python/blob/master/CONTRIBUTING.md). * [x] This pull request is all my own work -- I have not plagiarized. * [x] I know that pull requests will not be merged if they fail the automated tests. * [x] This PR only changes one algorithm file. To ease review, please open separate PRs for separate algorithms. * [x] All new Python files are placed inside an existing directory. * [x] All filenames are in all lowercase characters with no spaces or dashes. * [x] All functions and variable names follow Python naming conventions. * [x] All function parameters and return values are annotated with Python [type hints](https://docs.python.org/3/library/typing.html). * [x] All functions have [doctests](https://docs.python.org/3/library/doctest.html) that pass the automated testing. * [x] All new algorithms have a URL in its comments that points to Wikipedia or other similar explanation. * [x] If this pull request resolves one or more open issues then the commit message contains `Fixes: #{$ISSUE_NO}`.
CaedenPH
2022-10-16T20:16:16Z
2022-10-16T20:50:11Z
7f6e0b656f6362e452b11d06acde50b8b81cb31a
f15cc2f01c2a4124ff6dc0843c728a546f9d9f79
Follow Flake8 pep3101 and remove modulo formatting. ### Describe your change: Follow Flake8 pep3101 and remove modulo formatting (replace with f-strings) For example ```py "Hello %s" % name ``` goes to ```py f"Hello {name}" ``` * [ ] Add an algorithm? * [x] Fix a bug or typo in an existing algorithm? * [ ] Documentation change? ### Checklist: * [x] I have read [CONTRIBUTING.md](https://github.com/TheAlgorithms/Python/blob/master/CONTRIBUTING.md). * [x] This pull request is all my own work -- I have not plagiarized. * [x] I know that pull requests will not be merged if they fail the automated tests. * [x] This PR only changes one algorithm file. To ease review, please open separate PRs for separate algorithms. * [x] All new Python files are placed inside an existing directory. * [x] All filenames are in all lowercase characters with no spaces or dashes. * [x] All functions and variable names follow Python naming conventions. * [x] All function parameters and return values are annotated with Python [type hints](https://docs.python.org/3/library/typing.html). * [x] All functions have [doctests](https://docs.python.org/3/library/doctest.html) that pass the automated testing. * [x] All new algorithms have a URL in its comments that points to Wikipedia or other similar explanation. * [x] If this pull request resolves one or more open issues then the commit message contains `Fixes: #{$ISSUE_NO}`.
-1
TheAlgorithms/Python
7,339
Follow Flake8 pep3101 and remove modulo formatting
### Describe your change: Follow Flake8 pep3101 and remove modulo formatting (replace with f-strings) For example ```py "Hello %s" % name ``` goes to ```py f"Hello {name}" ``` * [ ] Add an algorithm? * [x] Fix a bug or typo in an existing algorithm? * [ ] Documentation change? ### Checklist: * [x] I have read [CONTRIBUTING.md](https://github.com/TheAlgorithms/Python/blob/master/CONTRIBUTING.md). * [x] This pull request is all my own work -- I have not plagiarized. * [x] I know that pull requests will not be merged if they fail the automated tests. * [x] This PR only changes one algorithm file. To ease review, please open separate PRs for separate algorithms. * [x] All new Python files are placed inside an existing directory. * [x] All filenames are in all lowercase characters with no spaces or dashes. * [x] All functions and variable names follow Python naming conventions. * [x] All function parameters and return values are annotated with Python [type hints](https://docs.python.org/3/library/typing.html). * [x] All functions have [doctests](https://docs.python.org/3/library/doctest.html) that pass the automated testing. * [x] All new algorithms have a URL in its comments that points to Wikipedia or other similar explanation. * [x] If this pull request resolves one or more open issues then the commit message contains `Fixes: #{$ISSUE_NO}`.
CaedenPH
2022-10-16T20:16:16Z
2022-10-16T20:50:11Z
7f6e0b656f6362e452b11d06acde50b8b81cb31a
f15cc2f01c2a4124ff6dc0843c728a546f9d9f79
Follow Flake8 pep3101 and remove modulo formatting. ### Describe your change: Follow Flake8 pep3101 and remove modulo formatting (replace with f-strings) For example ```py "Hello %s" % name ``` goes to ```py f"Hello {name}" ``` * [ ] Add an algorithm? * [x] Fix a bug or typo in an existing algorithm? * [ ] Documentation change? ### Checklist: * [x] I have read [CONTRIBUTING.md](https://github.com/TheAlgorithms/Python/blob/master/CONTRIBUTING.md). * [x] This pull request is all my own work -- I have not plagiarized. * [x] I know that pull requests will not be merged if they fail the automated tests. * [x] This PR only changes one algorithm file. To ease review, please open separate PRs for separate algorithms. * [x] All new Python files are placed inside an existing directory. * [x] All filenames are in all lowercase characters with no spaces or dashes. * [x] All functions and variable names follow Python naming conventions. * [x] All function parameters and return values are annotated with Python [type hints](https://docs.python.org/3/library/typing.html). * [x] All functions have [doctests](https://docs.python.org/3/library/doctest.html) that pass the automated testing. * [x] All new algorithms have a URL in its comments that points to Wikipedia or other similar explanation. * [x] If this pull request resolves one or more open issues then the commit message contains `Fixes: #{$ISSUE_NO}`.
from __future__ import annotations from typing import Generic, TypeVar T = TypeVar("T") class StackOverflowError(BaseException): pass class StackUnderflowError(BaseException): pass class Stack(Generic[T]): """A stack is an abstract data type that serves as a collection of elements with two principal operations: push() and pop(). push() adds an element to the top of the stack, and pop() removes an element from the top of a stack. The order in which elements come off of a stack are Last In, First Out (LIFO). https://en.wikipedia.org/wiki/Stack_(abstract_data_type) """ def __init__(self, limit: int = 10): self.stack: list[T] = [] self.limit = limit def __bool__(self) -> bool: return bool(self.stack) def __str__(self) -> str: return str(self.stack) def push(self, data: T) -> None: """Push an element to the top of the stack.""" if len(self.stack) >= self.limit: raise StackOverflowError self.stack.append(data) def pop(self) -> T: """ Pop an element off of the top of the stack. >>> Stack().pop() Traceback (most recent call last): ... data_structures.stacks.stack.StackUnderflowError """ if not self.stack: raise StackUnderflowError return self.stack.pop() def peek(self) -> T: """ Peek at the top-most element of the stack. >>> Stack().pop() Traceback (most recent call last): ... data_structures.stacks.stack.StackUnderflowError """ if not self.stack: raise StackUnderflowError return self.stack[-1] def is_empty(self) -> bool: """Check if a stack is empty.""" return not bool(self.stack) def is_full(self) -> bool: return self.size() == self.limit def size(self) -> int: """Return the size of the stack.""" return len(self.stack) def __contains__(self, item: T) -> bool: """Check if item is in stack""" return item in self.stack def test_stack() -> None: """ >>> test_stack() """ stack: Stack[int] = Stack(10) assert bool(stack) is False assert stack.is_empty() is True assert stack.is_full() is False assert str(stack) == "[]" try: _ = stack.pop() raise AssertionError() # This should not happen except StackUnderflowError: assert True # This should happen try: _ = stack.peek() raise AssertionError() # This should not happen except StackUnderflowError: assert True # This should happen for i in range(10): assert stack.size() == i stack.push(i) assert bool(stack) assert not stack.is_empty() assert stack.is_full() assert str(stack) == str(list(range(10))) assert stack.pop() == 9 assert stack.peek() == 8 stack.push(100) assert str(stack) == str([0, 1, 2, 3, 4, 5, 6, 7, 8, 100]) try: stack.push(200) raise AssertionError() # This should not happen except StackOverflowError: assert True # This should happen assert not stack.is_empty() assert stack.size() == 10 assert 5 in stack assert 55 not in stack if __name__ == "__main__": test_stack()
from __future__ import annotations from typing import Generic, TypeVar T = TypeVar("T") class StackOverflowError(BaseException): pass class StackUnderflowError(BaseException): pass class Stack(Generic[T]): """A stack is an abstract data type that serves as a collection of elements with two principal operations: push() and pop(). push() adds an element to the top of the stack, and pop() removes an element from the top of a stack. The order in which elements come off of a stack are Last In, First Out (LIFO). https://en.wikipedia.org/wiki/Stack_(abstract_data_type) """ def __init__(self, limit: int = 10): self.stack: list[T] = [] self.limit = limit def __bool__(self) -> bool: return bool(self.stack) def __str__(self) -> str: return str(self.stack) def push(self, data: T) -> None: """Push an element to the top of the stack.""" if len(self.stack) >= self.limit: raise StackOverflowError self.stack.append(data) def pop(self) -> T: """ Pop an element off of the top of the stack. >>> Stack().pop() Traceback (most recent call last): ... data_structures.stacks.stack.StackUnderflowError """ if not self.stack: raise StackUnderflowError return self.stack.pop() def peek(self) -> T: """ Peek at the top-most element of the stack. >>> Stack().pop() Traceback (most recent call last): ... data_structures.stacks.stack.StackUnderflowError """ if not self.stack: raise StackUnderflowError return self.stack[-1] def is_empty(self) -> bool: """Check if a stack is empty.""" return not bool(self.stack) def is_full(self) -> bool: return self.size() == self.limit def size(self) -> int: """Return the size of the stack.""" return len(self.stack) def __contains__(self, item: T) -> bool: """Check if item is in stack""" return item in self.stack def test_stack() -> None: """ >>> test_stack() """ stack: Stack[int] = Stack(10) assert bool(stack) is False assert stack.is_empty() is True assert stack.is_full() is False assert str(stack) == "[]" try: _ = stack.pop() raise AssertionError() # This should not happen except StackUnderflowError: assert True # This should happen try: _ = stack.peek() raise AssertionError() # This should not happen except StackUnderflowError: assert True # This should happen for i in range(10): assert stack.size() == i stack.push(i) assert bool(stack) assert not stack.is_empty() assert stack.is_full() assert str(stack) == str(list(range(10))) assert stack.pop() == 9 assert stack.peek() == 8 stack.push(100) assert str(stack) == str([0, 1, 2, 3, 4, 5, 6, 7, 8, 100]) try: stack.push(200) raise AssertionError() # This should not happen except StackOverflowError: assert True # This should happen assert not stack.is_empty() assert stack.size() == 10 assert 5 in stack assert 55 not in stack if __name__ == "__main__": test_stack()
-1
TheAlgorithms/Python
7,339
Follow Flake8 pep3101 and remove modulo formatting
### Describe your change: Follow Flake8 pep3101 and remove modulo formatting (replace with f-strings) For example ```py "Hello %s" % name ``` goes to ```py f"Hello {name}" ``` * [ ] Add an algorithm? * [x] Fix a bug or typo in an existing algorithm? * [ ] Documentation change? ### Checklist: * [x] I have read [CONTRIBUTING.md](https://github.com/TheAlgorithms/Python/blob/master/CONTRIBUTING.md). * [x] This pull request is all my own work -- I have not plagiarized. * [x] I know that pull requests will not be merged if they fail the automated tests. * [x] This PR only changes one algorithm file. To ease review, please open separate PRs for separate algorithms. * [x] All new Python files are placed inside an existing directory. * [x] All filenames are in all lowercase characters with no spaces or dashes. * [x] All functions and variable names follow Python naming conventions. * [x] All function parameters and return values are annotated with Python [type hints](https://docs.python.org/3/library/typing.html). * [x] All functions have [doctests](https://docs.python.org/3/library/doctest.html) that pass the automated testing. * [x] All new algorithms have a URL in its comments that points to Wikipedia or other similar explanation. * [x] If this pull request resolves one or more open issues then the commit message contains `Fixes: #{$ISSUE_NO}`.
CaedenPH
2022-10-16T20:16:16Z
2022-10-16T20:50:11Z
7f6e0b656f6362e452b11d06acde50b8b81cb31a
f15cc2f01c2a4124ff6dc0843c728a546f9d9f79
Follow Flake8 pep3101 and remove modulo formatting. ### Describe your change: Follow Flake8 pep3101 and remove modulo formatting (replace with f-strings) For example ```py "Hello %s" % name ``` goes to ```py f"Hello {name}" ``` * [ ] Add an algorithm? * [x] Fix a bug or typo in an existing algorithm? * [ ] Documentation change? ### Checklist: * [x] I have read [CONTRIBUTING.md](https://github.com/TheAlgorithms/Python/blob/master/CONTRIBUTING.md). * [x] This pull request is all my own work -- I have not plagiarized. * [x] I know that pull requests will not be merged if they fail the automated tests. * [x] This PR only changes one algorithm file. To ease review, please open separate PRs for separate algorithms. * [x] All new Python files are placed inside an existing directory. * [x] All filenames are in all lowercase characters with no spaces or dashes. * [x] All functions and variable names follow Python naming conventions. * [x] All function parameters and return values are annotated with Python [type hints](https://docs.python.org/3/library/typing.html). * [x] All functions have [doctests](https://docs.python.org/3/library/doctest.html) that pass the automated testing. * [x] All new algorithms have a URL in its comments that points to Wikipedia or other similar explanation. * [x] If this pull request resolves one or more open issues then the commit message contains `Fixes: #{$ISSUE_NO}`.
-1
TheAlgorithms/Python
7,339
Follow Flake8 pep3101 and remove modulo formatting
### Describe your change: Follow Flake8 pep3101 and remove modulo formatting (replace with f-strings) For example ```py "Hello %s" % name ``` goes to ```py f"Hello {name}" ``` * [ ] Add an algorithm? * [x] Fix a bug or typo in an existing algorithm? * [ ] Documentation change? ### Checklist: * [x] I have read [CONTRIBUTING.md](https://github.com/TheAlgorithms/Python/blob/master/CONTRIBUTING.md). * [x] This pull request is all my own work -- I have not plagiarized. * [x] I know that pull requests will not be merged if they fail the automated tests. * [x] This PR only changes one algorithm file. To ease review, please open separate PRs for separate algorithms. * [x] All new Python files are placed inside an existing directory. * [x] All filenames are in all lowercase characters with no spaces or dashes. * [x] All functions and variable names follow Python naming conventions. * [x] All function parameters and return values are annotated with Python [type hints](https://docs.python.org/3/library/typing.html). * [x] All functions have [doctests](https://docs.python.org/3/library/doctest.html) that pass the automated testing. * [x] All new algorithms have a URL in its comments that points to Wikipedia or other similar explanation. * [x] If this pull request resolves one or more open issues then the commit message contains `Fixes: #{$ISSUE_NO}`.
CaedenPH
2022-10-16T20:16:16Z
2022-10-16T20:50:11Z
7f6e0b656f6362e452b11d06acde50b8b81cb31a
f15cc2f01c2a4124ff6dc0843c728a546f9d9f79
Follow Flake8 pep3101 and remove modulo formatting. ### Describe your change: Follow Flake8 pep3101 and remove modulo formatting (replace with f-strings) For example ```py "Hello %s" % name ``` goes to ```py f"Hello {name}" ``` * [ ] Add an algorithm? * [x] Fix a bug or typo in an existing algorithm? * [ ] Documentation change? ### Checklist: * [x] I have read [CONTRIBUTING.md](https://github.com/TheAlgorithms/Python/blob/master/CONTRIBUTING.md). * [x] This pull request is all my own work -- I have not plagiarized. * [x] I know that pull requests will not be merged if they fail the automated tests. * [x] This PR only changes one algorithm file. To ease review, please open separate PRs for separate algorithms. * [x] All new Python files are placed inside an existing directory. * [x] All filenames are in all lowercase characters with no spaces or dashes. * [x] All functions and variable names follow Python naming conventions. * [x] All function parameters and return values are annotated with Python [type hints](https://docs.python.org/3/library/typing.html). * [x] All functions have [doctests](https://docs.python.org/3/library/doctest.html) that pass the automated testing. * [x] All new algorithms have a URL in its comments that points to Wikipedia or other similar explanation. * [x] If this pull request resolves one or more open issues then the commit message contains `Fixes: #{$ISSUE_NO}`.
-1
TheAlgorithms/Python
7,339
Follow Flake8 pep3101 and remove modulo formatting
### Describe your change: Follow Flake8 pep3101 and remove modulo formatting (replace with f-strings) For example ```py "Hello %s" % name ``` goes to ```py f"Hello {name}" ``` * [ ] Add an algorithm? * [x] Fix a bug or typo in an existing algorithm? * [ ] Documentation change? ### Checklist: * [x] I have read [CONTRIBUTING.md](https://github.com/TheAlgorithms/Python/blob/master/CONTRIBUTING.md). * [x] This pull request is all my own work -- I have not plagiarized. * [x] I know that pull requests will not be merged if they fail the automated tests. * [x] This PR only changes one algorithm file. To ease review, please open separate PRs for separate algorithms. * [x] All new Python files are placed inside an existing directory. * [x] All filenames are in all lowercase characters with no spaces or dashes. * [x] All functions and variable names follow Python naming conventions. * [x] All function parameters and return values are annotated with Python [type hints](https://docs.python.org/3/library/typing.html). * [x] All functions have [doctests](https://docs.python.org/3/library/doctest.html) that pass the automated testing. * [x] All new algorithms have a URL in its comments that points to Wikipedia or other similar explanation. * [x] If this pull request resolves one or more open issues then the commit message contains `Fixes: #{$ISSUE_NO}`.
CaedenPH
2022-10-16T20:16:16Z
2022-10-16T20:50:11Z
7f6e0b656f6362e452b11d06acde50b8b81cb31a
f15cc2f01c2a4124ff6dc0843c728a546f9d9f79
Follow Flake8 pep3101 and remove modulo formatting. ### Describe your change: Follow Flake8 pep3101 and remove modulo formatting (replace with f-strings) For example ```py "Hello %s" % name ``` goes to ```py f"Hello {name}" ``` * [ ] Add an algorithm? * [x] Fix a bug or typo in an existing algorithm? * [ ] Documentation change? ### Checklist: * [x] I have read [CONTRIBUTING.md](https://github.com/TheAlgorithms/Python/blob/master/CONTRIBUTING.md). * [x] This pull request is all my own work -- I have not plagiarized. * [x] I know that pull requests will not be merged if they fail the automated tests. * [x] This PR only changes one algorithm file. To ease review, please open separate PRs for separate algorithms. * [x] All new Python files are placed inside an existing directory. * [x] All filenames are in all lowercase characters with no spaces or dashes. * [x] All functions and variable names follow Python naming conventions. * [x] All function parameters and return values are annotated with Python [type hints](https://docs.python.org/3/library/typing.html). * [x] All functions have [doctests](https://docs.python.org/3/library/doctest.html) that pass the automated testing. * [x] All new algorithms have a URL in its comments that points to Wikipedia or other similar explanation. * [x] If this pull request resolves one or more open issues then the commit message contains `Fixes: #{$ISSUE_NO}`.
# flake8: noqa """ Binomial Heap Reference: Advanced Data Structures, Peter Brass """ class Node: """ Node in a doubly-linked binomial tree, containing: - value - size of left subtree - link to left, right and parent nodes """ def __init__(self, val): self.val = val # Number of nodes in left subtree self.left_tree_size = 0 self.left = None self.right = None self.parent = None def mergeTrees(self, other): """ In-place merge of two binomial trees of equal size. Returns the root of the resulting tree """ assert self.left_tree_size == other.left_tree_size, "Unequal Sizes of Blocks" if self.val < other.val: other.left = self.right other.parent = None if self.right: self.right.parent = other self.right = other self.left_tree_size = self.left_tree_size * 2 + 1 return self else: self.left = other.right self.parent = None if other.right: other.right.parent = self other.right = self other.left_tree_size = other.left_tree_size * 2 + 1 return other class BinomialHeap: r""" Min-oriented priority queue implemented with the Binomial Heap data structure implemented with the BinomialHeap class. It supports: - Insert element in a heap with n elements: Guaranteed logn, amoratized 1 - Merge (meld) heaps of size m and n: O(logn + logm) - Delete Min: O(logn) - Peek (return min without deleting it): O(1) Example: Create a random permutation of 30 integers to be inserted and 19 of them deleted >>> import numpy as np >>> permutation = np.random.permutation(list(range(30))) Create a Heap and insert the 30 integers __init__() test >>> first_heap = BinomialHeap() 30 inserts - insert() test >>> for number in permutation: ... first_heap.insert(number) Size test >>> print(first_heap.size) 30 Deleting - delete() test >>> for i in range(25): ... print(first_heap.deleteMin(), end=" ") 0 1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19 20 21 22 23 24 Create a new Heap >>> second_heap = BinomialHeap() >>> vals = [17, 20, 31, 34] >>> for value in vals: ... second_heap.insert(value) The heap should have the following structure: 17 / \ # 31 / \ 20 34 / \ / \ # # # # preOrder() test >>> print(second_heap.preOrder()) [(17, 0), ('#', 1), (31, 1), (20, 2), ('#', 3), ('#', 3), (34, 2), ('#', 3), ('#', 3)] printing Heap - __str__() test >>> print(second_heap) 17 -# -31 --20 ---# ---# --34 ---# ---# mergeHeaps() test >>> merged = second_heap.mergeHeaps(first_heap) >>> merged.peek() 17 values in merged heap; (merge is inplace) >>> while not first_heap.isEmpty(): ... print(first_heap.deleteMin(), end=" ") 17 20 25 26 27 28 29 31 34 """ def __init__(self, bottom_root=None, min_node=None, heap_size=0): self.size = heap_size self.bottom_root = bottom_root self.min_node = min_node def mergeHeaps(self, other): """ In-place merge of two binomial heaps. Both of them become the resulting merged heap """ # Empty heaps corner cases if other.size == 0: return if self.size == 0: self.size = other.size self.bottom_root = other.bottom_root self.min_node = other.min_node return # Update size self.size = self.size + other.size # Update min.node if self.min_node.val > other.min_node.val: self.min_node = other.min_node # Merge # Order roots by left_subtree_size combined_roots_list = [] i, j = self.bottom_root, other.bottom_root while i or j: if i and ((not j) or i.left_tree_size < j.left_tree_size): combined_roots_list.append((i, True)) i = i.parent else: combined_roots_list.append((j, False)) j = j.parent # Insert links between them for i in range(len(combined_roots_list) - 1): if combined_roots_list[i][1] != combined_roots_list[i + 1][1]: combined_roots_list[i][0].parent = combined_roots_list[i + 1][0] combined_roots_list[i + 1][0].left = combined_roots_list[i][0] # Consecutively merge roots with same left_tree_size i = combined_roots_list[0][0] while i.parent: if ( (i.left_tree_size == i.parent.left_tree_size) and (not i.parent.parent) ) or ( i.left_tree_size == i.parent.left_tree_size and i.left_tree_size != i.parent.parent.left_tree_size ): # Neighbouring Nodes previous_node = i.left next_node = i.parent.parent # Merging trees i = i.mergeTrees(i.parent) # Updating links i.left = previous_node i.parent = next_node if previous_node: previous_node.parent = i if next_node: next_node.left = i else: i = i.parent # Updating self.bottom_root while i.left: i = i.left self.bottom_root = i # Update other other.size = self.size other.bottom_root = self.bottom_root other.min_node = self.min_node # Return the merged heap return self def insert(self, val): """ insert a value in the heap """ if self.size == 0: self.bottom_root = Node(val) self.size = 1 self.min_node = self.bottom_root else: # Create new node new_node = Node(val) # Update size self.size += 1 # update min_node if val < self.min_node.val: self.min_node = new_node # Put new_node as a bottom_root in heap self.bottom_root.left = new_node new_node.parent = self.bottom_root self.bottom_root = new_node # Consecutively merge roots with same left_tree_size while ( self.bottom_root.parent and self.bottom_root.left_tree_size == self.bottom_root.parent.left_tree_size ): # Next node next_node = self.bottom_root.parent.parent # Merge self.bottom_root = self.bottom_root.mergeTrees(self.bottom_root.parent) # Update Links self.bottom_root.parent = next_node self.bottom_root.left = None if next_node: next_node.left = self.bottom_root def peek(self): """ return min element without deleting it """ return self.min_node.val def isEmpty(self): return self.size == 0 def deleteMin(self): """ delete min element and return it """ # assert not self.isEmpty(), "Empty Heap" # Save minimal value min_value = self.min_node.val # Last element in heap corner case if self.size == 1: # Update size self.size = 0 # Update bottom root self.bottom_root = None # Update min_node self.min_node = None return min_value # No right subtree corner case # The structure of the tree implies that this should be the bottom root # and there is at least one other root if self.min_node.right is None: # Update size self.size -= 1 # Update bottom root self.bottom_root = self.bottom_root.parent self.bottom_root.left = None # Update min_node self.min_node = self.bottom_root i = self.bottom_root.parent while i: if i.val < self.min_node.val: self.min_node = i i = i.parent return min_value # General case # Find the BinomialHeap of the right subtree of min_node bottom_of_new = self.min_node.right bottom_of_new.parent = None min_of_new = bottom_of_new size_of_new = 1 # Size, min_node and bottom_root while bottom_of_new.left: size_of_new = size_of_new * 2 + 1 bottom_of_new = bottom_of_new.left if bottom_of_new.val < min_of_new.val: min_of_new = bottom_of_new # Corner case of single root on top left path if (not self.min_node.left) and (not self.min_node.parent): self.size = size_of_new self.bottom_root = bottom_of_new self.min_node = min_of_new # print("Single root, multiple nodes case") return min_value # Remaining cases # Construct heap of right subtree newHeap = BinomialHeap( bottom_root=bottom_of_new, min_node=min_of_new, heap_size=size_of_new ) # Update size self.size = self.size - 1 - size_of_new # Neighbour nodes previous_node = self.min_node.left next_node = self.min_node.parent # Initialize new bottom_root and min_node self.min_node = previous_node or next_node self.bottom_root = next_node # Update links of previous_node and search below for new min_node and # bottom_root if previous_node: previous_node.parent = next_node # Update bottom_root and search for min_node below self.bottom_root = previous_node self.min_node = previous_node while self.bottom_root.left: self.bottom_root = self.bottom_root.left if self.bottom_root.val < self.min_node.val: self.min_node = self.bottom_root if next_node: next_node.left = previous_node # Search for new min_node above min_node i = next_node while i: if i.val < self.min_node.val: self.min_node = i i = i.parent # Merge heaps self.mergeHeaps(newHeap) return min_value def preOrder(self): """ Returns the Pre-order representation of the heap including values of nodes plus their level distance from the root; Empty nodes appear as # """ # Find top root top_root = self.bottom_root while top_root.parent: top_root = top_root.parent # preorder heap_preOrder = [] self.__traversal(top_root, heap_preOrder) return heap_preOrder def __traversal(self, curr_node, preorder, level=0): """ Pre-order traversal of nodes """ if curr_node: preorder.append((curr_node.val, level)) self.__traversal(curr_node.left, preorder, level + 1) self.__traversal(curr_node.right, preorder, level + 1) else: preorder.append(("#", level)) def __str__(self): """ Overwriting str for a pre-order print of nodes in heap; Performance is poor, so use only for small examples """ if self.isEmpty(): return "" preorder_heap = self.preOrder() return "\n".join(("-" * level + str(value)) for value, level in preorder_heap) # Unit Tests if __name__ == "__main__": import doctest doctest.testmod()
# flake8: noqa """ Binomial Heap Reference: Advanced Data Structures, Peter Brass """ class Node: """ Node in a doubly-linked binomial tree, containing: - value - size of left subtree - link to left, right and parent nodes """ def __init__(self, val): self.val = val # Number of nodes in left subtree self.left_tree_size = 0 self.left = None self.right = None self.parent = None def mergeTrees(self, other): """ In-place merge of two binomial trees of equal size. Returns the root of the resulting tree """ assert self.left_tree_size == other.left_tree_size, "Unequal Sizes of Blocks" if self.val < other.val: other.left = self.right other.parent = None if self.right: self.right.parent = other self.right = other self.left_tree_size = self.left_tree_size * 2 + 1 return self else: self.left = other.right self.parent = None if other.right: other.right.parent = self other.right = self other.left_tree_size = other.left_tree_size * 2 + 1 return other class BinomialHeap: r""" Min-oriented priority queue implemented with the Binomial Heap data structure implemented with the BinomialHeap class. It supports: - Insert element in a heap with n elements: Guaranteed logn, amoratized 1 - Merge (meld) heaps of size m and n: O(logn + logm) - Delete Min: O(logn) - Peek (return min without deleting it): O(1) Example: Create a random permutation of 30 integers to be inserted and 19 of them deleted >>> import numpy as np >>> permutation = np.random.permutation(list(range(30))) Create a Heap and insert the 30 integers __init__() test >>> first_heap = BinomialHeap() 30 inserts - insert() test >>> for number in permutation: ... first_heap.insert(number) Size test >>> print(first_heap.size) 30 Deleting - delete() test >>> for i in range(25): ... print(first_heap.deleteMin(), end=" ") 0 1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19 20 21 22 23 24 Create a new Heap >>> second_heap = BinomialHeap() >>> vals = [17, 20, 31, 34] >>> for value in vals: ... second_heap.insert(value) The heap should have the following structure: 17 / \ # 31 / \ 20 34 / \ / \ # # # # preOrder() test >>> print(second_heap.preOrder()) [(17, 0), ('#', 1), (31, 1), (20, 2), ('#', 3), ('#', 3), (34, 2), ('#', 3), ('#', 3)] printing Heap - __str__() test >>> print(second_heap) 17 -# -31 --20 ---# ---# --34 ---# ---# mergeHeaps() test >>> merged = second_heap.mergeHeaps(first_heap) >>> merged.peek() 17 values in merged heap; (merge is inplace) >>> while not first_heap.isEmpty(): ... print(first_heap.deleteMin(), end=" ") 17 20 25 26 27 28 29 31 34 """ def __init__(self, bottom_root=None, min_node=None, heap_size=0): self.size = heap_size self.bottom_root = bottom_root self.min_node = min_node def mergeHeaps(self, other): """ In-place merge of two binomial heaps. Both of them become the resulting merged heap """ # Empty heaps corner cases if other.size == 0: return if self.size == 0: self.size = other.size self.bottom_root = other.bottom_root self.min_node = other.min_node return # Update size self.size = self.size + other.size # Update min.node if self.min_node.val > other.min_node.val: self.min_node = other.min_node # Merge # Order roots by left_subtree_size combined_roots_list = [] i, j = self.bottom_root, other.bottom_root while i or j: if i and ((not j) or i.left_tree_size < j.left_tree_size): combined_roots_list.append((i, True)) i = i.parent else: combined_roots_list.append((j, False)) j = j.parent # Insert links between them for i in range(len(combined_roots_list) - 1): if combined_roots_list[i][1] != combined_roots_list[i + 1][1]: combined_roots_list[i][0].parent = combined_roots_list[i + 1][0] combined_roots_list[i + 1][0].left = combined_roots_list[i][0] # Consecutively merge roots with same left_tree_size i = combined_roots_list[0][0] while i.parent: if ( (i.left_tree_size == i.parent.left_tree_size) and (not i.parent.parent) ) or ( i.left_tree_size == i.parent.left_tree_size and i.left_tree_size != i.parent.parent.left_tree_size ): # Neighbouring Nodes previous_node = i.left next_node = i.parent.parent # Merging trees i = i.mergeTrees(i.parent) # Updating links i.left = previous_node i.parent = next_node if previous_node: previous_node.parent = i if next_node: next_node.left = i else: i = i.parent # Updating self.bottom_root while i.left: i = i.left self.bottom_root = i # Update other other.size = self.size other.bottom_root = self.bottom_root other.min_node = self.min_node # Return the merged heap return self def insert(self, val): """ insert a value in the heap """ if self.size == 0: self.bottom_root = Node(val) self.size = 1 self.min_node = self.bottom_root else: # Create new node new_node = Node(val) # Update size self.size += 1 # update min_node if val < self.min_node.val: self.min_node = new_node # Put new_node as a bottom_root in heap self.bottom_root.left = new_node new_node.parent = self.bottom_root self.bottom_root = new_node # Consecutively merge roots with same left_tree_size while ( self.bottom_root.parent and self.bottom_root.left_tree_size == self.bottom_root.parent.left_tree_size ): # Next node next_node = self.bottom_root.parent.parent # Merge self.bottom_root = self.bottom_root.mergeTrees(self.bottom_root.parent) # Update Links self.bottom_root.parent = next_node self.bottom_root.left = None if next_node: next_node.left = self.bottom_root def peek(self): """ return min element without deleting it """ return self.min_node.val def isEmpty(self): return self.size == 0 def deleteMin(self): """ delete min element and return it """ # assert not self.isEmpty(), "Empty Heap" # Save minimal value min_value = self.min_node.val # Last element in heap corner case if self.size == 1: # Update size self.size = 0 # Update bottom root self.bottom_root = None # Update min_node self.min_node = None return min_value # No right subtree corner case # The structure of the tree implies that this should be the bottom root # and there is at least one other root if self.min_node.right is None: # Update size self.size -= 1 # Update bottom root self.bottom_root = self.bottom_root.parent self.bottom_root.left = None # Update min_node self.min_node = self.bottom_root i = self.bottom_root.parent while i: if i.val < self.min_node.val: self.min_node = i i = i.parent return min_value # General case # Find the BinomialHeap of the right subtree of min_node bottom_of_new = self.min_node.right bottom_of_new.parent = None min_of_new = bottom_of_new size_of_new = 1 # Size, min_node and bottom_root while bottom_of_new.left: size_of_new = size_of_new * 2 + 1 bottom_of_new = bottom_of_new.left if bottom_of_new.val < min_of_new.val: min_of_new = bottom_of_new # Corner case of single root on top left path if (not self.min_node.left) and (not self.min_node.parent): self.size = size_of_new self.bottom_root = bottom_of_new self.min_node = min_of_new # print("Single root, multiple nodes case") return min_value # Remaining cases # Construct heap of right subtree newHeap = BinomialHeap( bottom_root=bottom_of_new, min_node=min_of_new, heap_size=size_of_new ) # Update size self.size = self.size - 1 - size_of_new # Neighbour nodes previous_node = self.min_node.left next_node = self.min_node.parent # Initialize new bottom_root and min_node self.min_node = previous_node or next_node self.bottom_root = next_node # Update links of previous_node and search below for new min_node and # bottom_root if previous_node: previous_node.parent = next_node # Update bottom_root and search for min_node below self.bottom_root = previous_node self.min_node = previous_node while self.bottom_root.left: self.bottom_root = self.bottom_root.left if self.bottom_root.val < self.min_node.val: self.min_node = self.bottom_root if next_node: next_node.left = previous_node # Search for new min_node above min_node i = next_node while i: if i.val < self.min_node.val: self.min_node = i i = i.parent # Merge heaps self.mergeHeaps(newHeap) return min_value def preOrder(self): """ Returns the Pre-order representation of the heap including values of nodes plus their level distance from the root; Empty nodes appear as # """ # Find top root top_root = self.bottom_root while top_root.parent: top_root = top_root.parent # preorder heap_preOrder = [] self.__traversal(top_root, heap_preOrder) return heap_preOrder def __traversal(self, curr_node, preorder, level=0): """ Pre-order traversal of nodes """ if curr_node: preorder.append((curr_node.val, level)) self.__traversal(curr_node.left, preorder, level + 1) self.__traversal(curr_node.right, preorder, level + 1) else: preorder.append(("#", level)) def __str__(self): """ Overwriting str for a pre-order print of nodes in heap; Performance is poor, so use only for small examples """ if self.isEmpty(): return "" preorder_heap = self.preOrder() return "\n".join(("-" * level + str(value)) for value, level in preorder_heap) # Unit Tests if __name__ == "__main__": import doctest doctest.testmod()
-1
TheAlgorithms/Python
7,339
Follow Flake8 pep3101 and remove modulo formatting
### Describe your change: Follow Flake8 pep3101 and remove modulo formatting (replace with f-strings) For example ```py "Hello %s" % name ``` goes to ```py f"Hello {name}" ``` * [ ] Add an algorithm? * [x] Fix a bug or typo in an existing algorithm? * [ ] Documentation change? ### Checklist: * [x] I have read [CONTRIBUTING.md](https://github.com/TheAlgorithms/Python/blob/master/CONTRIBUTING.md). * [x] This pull request is all my own work -- I have not plagiarized. * [x] I know that pull requests will not be merged if they fail the automated tests. * [x] This PR only changes one algorithm file. To ease review, please open separate PRs for separate algorithms. * [x] All new Python files are placed inside an existing directory. * [x] All filenames are in all lowercase characters with no spaces or dashes. * [x] All functions and variable names follow Python naming conventions. * [x] All function parameters and return values are annotated with Python [type hints](https://docs.python.org/3/library/typing.html). * [x] All functions have [doctests](https://docs.python.org/3/library/doctest.html) that pass the automated testing. * [x] All new algorithms have a URL in its comments that points to Wikipedia or other similar explanation. * [x] If this pull request resolves one or more open issues then the commit message contains `Fixes: #{$ISSUE_NO}`.
CaedenPH
2022-10-16T20:16:16Z
2022-10-16T20:50:11Z
7f6e0b656f6362e452b11d06acde50b8b81cb31a
f15cc2f01c2a4124ff6dc0843c728a546f9d9f79
Follow Flake8 pep3101 and remove modulo formatting. ### Describe your change: Follow Flake8 pep3101 and remove modulo formatting (replace with f-strings) For example ```py "Hello %s" % name ``` goes to ```py f"Hello {name}" ``` * [ ] Add an algorithm? * [x] Fix a bug or typo in an existing algorithm? * [ ] Documentation change? ### Checklist: * [x] I have read [CONTRIBUTING.md](https://github.com/TheAlgorithms/Python/blob/master/CONTRIBUTING.md). * [x] This pull request is all my own work -- I have not plagiarized. * [x] I know that pull requests will not be merged if they fail the automated tests. * [x] This PR only changes one algorithm file. To ease review, please open separate PRs for separate algorithms. * [x] All new Python files are placed inside an existing directory. * [x] All filenames are in all lowercase characters with no spaces or dashes. * [x] All functions and variable names follow Python naming conventions. * [x] All function parameters and return values are annotated with Python [type hints](https://docs.python.org/3/library/typing.html). * [x] All functions have [doctests](https://docs.python.org/3/library/doctest.html) that pass the automated testing. * [x] All new algorithms have a URL in its comments that points to Wikipedia or other similar explanation. * [x] If this pull request resolves one or more open issues then the commit message contains `Fixes: #{$ISSUE_NO}`.
""" https://en.wikipedia.org/wiki/Atbash """ import string def atbash_slow(sequence: str) -> str: """ >>> atbash_slow("ABCDEFG") 'ZYXWVUT' >>> atbash_slow("aW;;123BX") 'zD;;123YC' """ output = "" for i in sequence: extract = ord(i) if 65 <= extract <= 90: output += chr(155 - extract) elif 97 <= extract <= 122: output += chr(219 - extract) else: output += i return output def atbash(sequence: str) -> str: """ >>> atbash("ABCDEFG") 'ZYXWVUT' >>> atbash("aW;;123BX") 'zD;;123YC' """ letters = string.ascii_letters letters_reversed = string.ascii_lowercase[::-1] + string.ascii_uppercase[::-1] return "".join( letters_reversed[letters.index(c)] if c in letters else c for c in sequence ) def benchmark() -> None: """Let's benchmark them side-by-side...""" from timeit import timeit print("Running performance benchmarks...") print( "> atbash_slow()", timeit( "atbash_slow(printable)", setup="from string import printable ; from __main__ import atbash_slow", ), "seconds", ) print( "> atbash()", timeit( "atbash(printable)", setup="from string import printable ; from __main__ import atbash", ), "seconds", ) if __name__ == "__main__": for example in ("ABCDEFGH", "123GGjj", "testStringtest", "with space"): print(f"{example} encrypted in atbash: {atbash(example)}") benchmark()
""" https://en.wikipedia.org/wiki/Atbash """ import string def atbash_slow(sequence: str) -> str: """ >>> atbash_slow("ABCDEFG") 'ZYXWVUT' >>> atbash_slow("aW;;123BX") 'zD;;123YC' """ output = "" for i in sequence: extract = ord(i) if 65 <= extract <= 90: output += chr(155 - extract) elif 97 <= extract <= 122: output += chr(219 - extract) else: output += i return output def atbash(sequence: str) -> str: """ >>> atbash("ABCDEFG") 'ZYXWVUT' >>> atbash("aW;;123BX") 'zD;;123YC' """ letters = string.ascii_letters letters_reversed = string.ascii_lowercase[::-1] + string.ascii_uppercase[::-1] return "".join( letters_reversed[letters.index(c)] if c in letters else c for c in sequence ) def benchmark() -> None: """Let's benchmark them side-by-side...""" from timeit import timeit print("Running performance benchmarks...") print( "> atbash_slow()", timeit( "atbash_slow(printable)", setup="from string import printable ; from __main__ import atbash_slow", ), "seconds", ) print( "> atbash()", timeit( "atbash(printable)", setup="from string import printable ; from __main__ import atbash", ), "seconds", ) if __name__ == "__main__": for example in ("ABCDEFGH", "123GGjj", "testStringtest", "with space"): print(f"{example} encrypted in atbash: {atbash(example)}") benchmark()
-1
TheAlgorithms/Python
7,339
Follow Flake8 pep3101 and remove modulo formatting
### Describe your change: Follow Flake8 pep3101 and remove modulo formatting (replace with f-strings) For example ```py "Hello %s" % name ``` goes to ```py f"Hello {name}" ``` * [ ] Add an algorithm? * [x] Fix a bug or typo in an existing algorithm? * [ ] Documentation change? ### Checklist: * [x] I have read [CONTRIBUTING.md](https://github.com/TheAlgorithms/Python/blob/master/CONTRIBUTING.md). * [x] This pull request is all my own work -- I have not plagiarized. * [x] I know that pull requests will not be merged if they fail the automated tests. * [x] This PR only changes one algorithm file. To ease review, please open separate PRs for separate algorithms. * [x] All new Python files are placed inside an existing directory. * [x] All filenames are in all lowercase characters with no spaces or dashes. * [x] All functions and variable names follow Python naming conventions. * [x] All function parameters and return values are annotated with Python [type hints](https://docs.python.org/3/library/typing.html). * [x] All functions have [doctests](https://docs.python.org/3/library/doctest.html) that pass the automated testing. * [x] All new algorithms have a URL in its comments that points to Wikipedia or other similar explanation. * [x] If this pull request resolves one or more open issues then the commit message contains `Fixes: #{$ISSUE_NO}`.
CaedenPH
2022-10-16T20:16:16Z
2022-10-16T20:50:11Z
7f6e0b656f6362e452b11d06acde50b8b81cb31a
f15cc2f01c2a4124ff6dc0843c728a546f9d9f79
Follow Flake8 pep3101 and remove modulo formatting. ### Describe your change: Follow Flake8 pep3101 and remove modulo formatting (replace with f-strings) For example ```py "Hello %s" % name ``` goes to ```py f"Hello {name}" ``` * [ ] Add an algorithm? * [x] Fix a bug or typo in an existing algorithm? * [ ] Documentation change? ### Checklist: * [x] I have read [CONTRIBUTING.md](https://github.com/TheAlgorithms/Python/blob/master/CONTRIBUTING.md). * [x] This pull request is all my own work -- I have not plagiarized. * [x] I know that pull requests will not be merged if they fail the automated tests. * [x] This PR only changes one algorithm file. To ease review, please open separate PRs for separate algorithms. * [x] All new Python files are placed inside an existing directory. * [x] All filenames are in all lowercase characters with no spaces or dashes. * [x] All functions and variable names follow Python naming conventions. * [x] All function parameters and return values are annotated with Python [type hints](https://docs.python.org/3/library/typing.html). * [x] All functions have [doctests](https://docs.python.org/3/library/doctest.html) that pass the automated testing. * [x] All new algorithms have a URL in its comments that points to Wikipedia or other similar explanation. * [x] If this pull request resolves one or more open issues then the commit message contains `Fixes: #{$ISSUE_NO}`.
-1
TheAlgorithms/Python
7,339
Follow Flake8 pep3101 and remove modulo formatting
### Describe your change: Follow Flake8 pep3101 and remove modulo formatting (replace with f-strings) For example ```py "Hello %s" % name ``` goes to ```py f"Hello {name}" ``` * [ ] Add an algorithm? * [x] Fix a bug or typo in an existing algorithm? * [ ] Documentation change? ### Checklist: * [x] I have read [CONTRIBUTING.md](https://github.com/TheAlgorithms/Python/blob/master/CONTRIBUTING.md). * [x] This pull request is all my own work -- I have not plagiarized. * [x] I know that pull requests will not be merged if they fail the automated tests. * [x] This PR only changes one algorithm file. To ease review, please open separate PRs for separate algorithms. * [x] All new Python files are placed inside an existing directory. * [x] All filenames are in all lowercase characters with no spaces or dashes. * [x] All functions and variable names follow Python naming conventions. * [x] All function parameters and return values are annotated with Python [type hints](https://docs.python.org/3/library/typing.html). * [x] All functions have [doctests](https://docs.python.org/3/library/doctest.html) that pass the automated testing. * [x] All new algorithms have a URL in its comments that points to Wikipedia or other similar explanation. * [x] If this pull request resolves one or more open issues then the commit message contains `Fixes: #{$ISSUE_NO}`.
CaedenPH
2022-10-16T20:16:16Z
2022-10-16T20:50:11Z
7f6e0b656f6362e452b11d06acde50b8b81cb31a
f15cc2f01c2a4124ff6dc0843c728a546f9d9f79
Follow Flake8 pep3101 and remove modulo formatting. ### Describe your change: Follow Flake8 pep3101 and remove modulo formatting (replace with f-strings) For example ```py "Hello %s" % name ``` goes to ```py f"Hello {name}" ``` * [ ] Add an algorithm? * [x] Fix a bug or typo in an existing algorithm? * [ ] Documentation change? ### Checklist: * [x] I have read [CONTRIBUTING.md](https://github.com/TheAlgorithms/Python/blob/master/CONTRIBUTING.md). * [x] This pull request is all my own work -- I have not plagiarized. * [x] I know that pull requests will not be merged if they fail the automated tests. * [x] This PR only changes one algorithm file. To ease review, please open separate PRs for separate algorithms. * [x] All new Python files are placed inside an existing directory. * [x] All filenames are in all lowercase characters with no spaces or dashes. * [x] All functions and variable names follow Python naming conventions. * [x] All function parameters and return values are annotated with Python [type hints](https://docs.python.org/3/library/typing.html). * [x] All functions have [doctests](https://docs.python.org/3/library/doctest.html) that pass the automated testing. * [x] All new algorithms have a URL in its comments that points to Wikipedia or other similar explanation. * [x] If this pull request resolves one or more open issues then the commit message contains `Fixes: #{$ISSUE_NO}`.
import math def rearrange(bit_string_32): """[summary] Regroups the given binary string. Arguments: bitString32 {[string]} -- [32 bit binary] Raises: ValueError -- [if the given string not are 32 bit binary string] Returns: [string] -- [32 bit binary string] >>> rearrange('1234567890abcdfghijklmnopqrstuvw') 'pqrstuvwhijklmno90abcdfg12345678' """ if len(bit_string_32) != 32: raise ValueError("Need length 32") new_string = "" for i in [3, 2, 1, 0]: new_string += bit_string_32[8 * i : 8 * i + 8] return new_string def reformat_hex(i): """[summary] Converts the given integer into 8-digit hex number. Arguments: i {[int]} -- [integer] >>> reformat_hex(666) '9a020000' """ hexrep = format(i, "08x") thing = "" for i in [3, 2, 1, 0]: thing += hexrep[2 * i : 2 * i + 2] return thing def pad(bit_string): """[summary] Fills up the binary string to a 512 bit binary string Arguments: bitString {[string]} -- [binary string] Returns: [string] -- [binary string] """ start_length = len(bit_string) bit_string += "1" while len(bit_string) % 512 != 448: bit_string += "0" last_part = format(start_length, "064b") bit_string += rearrange(last_part[32:]) + rearrange(last_part[:32]) return bit_string def get_block(bit_string): """[summary] Iterator: Returns by each call a list of length 16 with the 32 bit integer blocks. Arguments: bit_string {[string]} -- [binary string >= 512] """ curr_pos = 0 while curr_pos < len(bit_string): curr_part = bit_string[curr_pos : curr_pos + 512] my_splits = [] for i in range(16): my_splits.append(int(rearrange(curr_part[32 * i : 32 * i + 32]), 2)) yield my_splits curr_pos += 512 def not32(i): """ >>> not32(34) 4294967261 """ i_str = format(i, "032b") new_str = "" for c in i_str: new_str += "1" if c == "0" else "0" return int(new_str, 2) def sum32(a, b): return (a + b) % 2**32 def leftrot32(i, s): return (i << s) ^ (i >> (32 - s)) def md5me(test_string): """[summary] Returns a 32-bit hash code of the string 'testString' Arguments: testString {[string]} -- [message] """ bs = "" for i in test_string: bs += format(ord(i), "08b") bs = pad(bs) tvals = [int(2**32 * abs(math.sin(i + 1))) for i in range(64)] a0 = 0x67452301 b0 = 0xEFCDAB89 c0 = 0x98BADCFE d0 = 0x10325476 s = [ 7, 12, 17, 22, 7, 12, 17, 22, 7, 12, 17, 22, 7, 12, 17, 22, 5, 9, 14, 20, 5, 9, 14, 20, 5, 9, 14, 20, 5, 9, 14, 20, 4, 11, 16, 23, 4, 11, 16, 23, 4, 11, 16, 23, 4, 11, 16, 23, 6, 10, 15, 21, 6, 10, 15, 21, 6, 10, 15, 21, 6, 10, 15, 21, ] for m in get_block(bs): a = a0 b = b0 c = c0 d = d0 for i in range(64): if i <= 15: # f = (B & C) | (not32(B) & D) f = d ^ (b & (c ^ d)) g = i elif i <= 31: # f = (D & B) | (not32(D) & C) f = c ^ (d & (b ^ c)) g = (5 * i + 1) % 16 elif i <= 47: f = b ^ c ^ d g = (3 * i + 5) % 16 else: f = c ^ (b | not32(d)) g = (7 * i) % 16 dtemp = d d = c c = b b = sum32(b, leftrot32((a + f + tvals[i] + m[g]) % 2**32, s[i])) a = dtemp a0 = sum32(a0, a) b0 = sum32(b0, b) c0 = sum32(c0, c) d0 = sum32(d0, d) digest = reformat_hex(a0) + reformat_hex(b0) + reformat_hex(c0) + reformat_hex(d0) return digest def test(): assert md5me("") == "d41d8cd98f00b204e9800998ecf8427e" assert ( md5me("The quick brown fox jumps over the lazy dog") == "9e107d9d372bb6826bd81d3542a419d6" ) print("Success.") if __name__ == "__main__": test() import doctest doctest.testmod()
import math def rearrange(bit_string_32): """[summary] Regroups the given binary string. Arguments: bitString32 {[string]} -- [32 bit binary] Raises: ValueError -- [if the given string not are 32 bit binary string] Returns: [string] -- [32 bit binary string] >>> rearrange('1234567890abcdfghijklmnopqrstuvw') 'pqrstuvwhijklmno90abcdfg12345678' """ if len(bit_string_32) != 32: raise ValueError("Need length 32") new_string = "" for i in [3, 2, 1, 0]: new_string += bit_string_32[8 * i : 8 * i + 8] return new_string def reformat_hex(i): """[summary] Converts the given integer into 8-digit hex number. Arguments: i {[int]} -- [integer] >>> reformat_hex(666) '9a020000' """ hexrep = format(i, "08x") thing = "" for i in [3, 2, 1, 0]: thing += hexrep[2 * i : 2 * i + 2] return thing def pad(bit_string): """[summary] Fills up the binary string to a 512 bit binary string Arguments: bitString {[string]} -- [binary string] Returns: [string] -- [binary string] """ start_length = len(bit_string) bit_string += "1" while len(bit_string) % 512 != 448: bit_string += "0" last_part = format(start_length, "064b") bit_string += rearrange(last_part[32:]) + rearrange(last_part[:32]) return bit_string def get_block(bit_string): """[summary] Iterator: Returns by each call a list of length 16 with the 32 bit integer blocks. Arguments: bit_string {[string]} -- [binary string >= 512] """ curr_pos = 0 while curr_pos < len(bit_string): curr_part = bit_string[curr_pos : curr_pos + 512] my_splits = [] for i in range(16): my_splits.append(int(rearrange(curr_part[32 * i : 32 * i + 32]), 2)) yield my_splits curr_pos += 512 def not32(i): """ >>> not32(34) 4294967261 """ i_str = format(i, "032b") new_str = "" for c in i_str: new_str += "1" if c == "0" else "0" return int(new_str, 2) def sum32(a, b): return (a + b) % 2**32 def leftrot32(i, s): return (i << s) ^ (i >> (32 - s)) def md5me(test_string): """[summary] Returns a 32-bit hash code of the string 'testString' Arguments: testString {[string]} -- [message] """ bs = "" for i in test_string: bs += format(ord(i), "08b") bs = pad(bs) tvals = [int(2**32 * abs(math.sin(i + 1))) for i in range(64)] a0 = 0x67452301 b0 = 0xEFCDAB89 c0 = 0x98BADCFE d0 = 0x10325476 s = [ 7, 12, 17, 22, 7, 12, 17, 22, 7, 12, 17, 22, 7, 12, 17, 22, 5, 9, 14, 20, 5, 9, 14, 20, 5, 9, 14, 20, 5, 9, 14, 20, 4, 11, 16, 23, 4, 11, 16, 23, 4, 11, 16, 23, 4, 11, 16, 23, 6, 10, 15, 21, 6, 10, 15, 21, 6, 10, 15, 21, 6, 10, 15, 21, ] for m in get_block(bs): a = a0 b = b0 c = c0 d = d0 for i in range(64): if i <= 15: # f = (B & C) | (not32(B) & D) f = d ^ (b & (c ^ d)) g = i elif i <= 31: # f = (D & B) | (not32(D) & C) f = c ^ (d & (b ^ c)) g = (5 * i + 1) % 16 elif i <= 47: f = b ^ c ^ d g = (3 * i + 5) % 16 else: f = c ^ (b | not32(d)) g = (7 * i) % 16 dtemp = d d = c c = b b = sum32(b, leftrot32((a + f + tvals[i] + m[g]) % 2**32, s[i])) a = dtemp a0 = sum32(a0, a) b0 = sum32(b0, b) c0 = sum32(c0, c) d0 = sum32(d0, d) digest = reformat_hex(a0) + reformat_hex(b0) + reformat_hex(c0) + reformat_hex(d0) return digest def test(): assert md5me("") == "d41d8cd98f00b204e9800998ecf8427e" assert ( md5me("The quick brown fox jumps over the lazy dog") == "9e107d9d372bb6826bd81d3542a419d6" ) print("Success.") if __name__ == "__main__": test() import doctest doctest.testmod()
-1
TheAlgorithms/Python
7,339
Follow Flake8 pep3101 and remove modulo formatting
### Describe your change: Follow Flake8 pep3101 and remove modulo formatting (replace with f-strings) For example ```py "Hello %s" % name ``` goes to ```py f"Hello {name}" ``` * [ ] Add an algorithm? * [x] Fix a bug or typo in an existing algorithm? * [ ] Documentation change? ### Checklist: * [x] I have read [CONTRIBUTING.md](https://github.com/TheAlgorithms/Python/blob/master/CONTRIBUTING.md). * [x] This pull request is all my own work -- I have not plagiarized. * [x] I know that pull requests will not be merged if they fail the automated tests. * [x] This PR only changes one algorithm file. To ease review, please open separate PRs for separate algorithms. * [x] All new Python files are placed inside an existing directory. * [x] All filenames are in all lowercase characters with no spaces or dashes. * [x] All functions and variable names follow Python naming conventions. * [x] All function parameters and return values are annotated with Python [type hints](https://docs.python.org/3/library/typing.html). * [x] All functions have [doctests](https://docs.python.org/3/library/doctest.html) that pass the automated testing. * [x] All new algorithms have a URL in its comments that points to Wikipedia or other similar explanation. * [x] If this pull request resolves one or more open issues then the commit message contains `Fixes: #{$ISSUE_NO}`.
CaedenPH
2022-10-16T20:16:16Z
2022-10-16T20:50:11Z
7f6e0b656f6362e452b11d06acde50b8b81cb31a
f15cc2f01c2a4124ff6dc0843c728a546f9d9f79
Follow Flake8 pep3101 and remove modulo formatting. ### Describe your change: Follow Flake8 pep3101 and remove modulo formatting (replace with f-strings) For example ```py "Hello %s" % name ``` goes to ```py f"Hello {name}" ``` * [ ] Add an algorithm? * [x] Fix a bug or typo in an existing algorithm? * [ ] Documentation change? ### Checklist: * [x] I have read [CONTRIBUTING.md](https://github.com/TheAlgorithms/Python/blob/master/CONTRIBUTING.md). * [x] This pull request is all my own work -- I have not plagiarized. * [x] I know that pull requests will not be merged if they fail the automated tests. * [x] This PR only changes one algorithm file. To ease review, please open separate PRs for separate algorithms. * [x] All new Python files are placed inside an existing directory. * [x] All filenames are in all lowercase characters with no spaces or dashes. * [x] All functions and variable names follow Python naming conventions. * [x] All function parameters and return values are annotated with Python [type hints](https://docs.python.org/3/library/typing.html). * [x] All functions have [doctests](https://docs.python.org/3/library/doctest.html) that pass the automated testing. * [x] All new algorithms have a URL in its comments that points to Wikipedia or other similar explanation. * [x] If this pull request resolves one or more open issues then the commit message contains `Fixes: #{$ISSUE_NO}`.
from collections import defaultdict from graphs.minimum_spanning_tree_prims import prisms_algorithm as mst def test_prim_successful_result(): num_nodes, num_edges = 9, 14 # noqa: F841 edges = [ [0, 1, 4], [0, 7, 8], [1, 2, 8], [7, 8, 7], [7, 6, 1], [2, 8, 2], [8, 6, 6], [2, 3, 7], [2, 5, 4], [6, 5, 2], [3, 5, 14], [3, 4, 9], [5, 4, 10], [1, 7, 11], ] adjancency = defaultdict(list) for node1, node2, cost in edges: adjancency[node1].append([node2, cost]) adjancency[node2].append([node1, cost]) result = mst(adjancency) expected = [ [7, 6, 1], [2, 8, 2], [6, 5, 2], [0, 1, 4], [2, 5, 4], [2, 3, 7], [0, 7, 8], [3, 4, 9], ] for answer in expected: edge = tuple(answer[:2]) reverse = tuple(edge[::-1]) assert edge in result or reverse in result
from collections import defaultdict from graphs.minimum_spanning_tree_prims import prisms_algorithm as mst def test_prim_successful_result(): num_nodes, num_edges = 9, 14 # noqa: F841 edges = [ [0, 1, 4], [0, 7, 8], [1, 2, 8], [7, 8, 7], [7, 6, 1], [2, 8, 2], [8, 6, 6], [2, 3, 7], [2, 5, 4], [6, 5, 2], [3, 5, 14], [3, 4, 9], [5, 4, 10], [1, 7, 11], ] adjancency = defaultdict(list) for node1, node2, cost in edges: adjancency[node1].append([node2, cost]) adjancency[node2].append([node1, cost]) result = mst(adjancency) expected = [ [7, 6, 1], [2, 8, 2], [6, 5, 2], [0, 1, 4], [2, 5, 4], [2, 3, 7], [0, 7, 8], [3, 4, 9], ] for answer in expected: edge = tuple(answer[:2]) reverse = tuple(edge[::-1]) assert edge in result or reverse in result
-1