repo_name
stringclasses 1
value | pr_number
int64 4.12k
11.2k
| pr_title
stringlengths 9
107
| pr_description
stringlengths 107
5.48k
| author
stringlengths 4
18
| date_created
unknown | date_merged
unknown | previous_commit
stringlengths 40
40
| pr_commit
stringlengths 40
40
| query
stringlengths 118
5.52k
| before_content
stringlengths 0
7.93M
| after_content
stringlengths 0
7.93M
| label
int64 -1
1
|
---|---|---|---|---|---|---|---|---|---|---|---|---|
TheAlgorithms/Python | 8,551 | New gitter link added or replaced | ### Describe your change:
Fixes #8546
* [ ] Add an algorithm?
* [ ] Fix a bug or typo in an existing algorithm?
* [x] Documentation change?
### Checklist:
* [x] I have read [CONTRIBUTING.md](https://github.com/TheAlgorithms/Python/blob/master/CONTRIBUTING.md).
* [x] This pull request is all my own work -- I have not plagiarized.
* [x] I know that pull requests will not be merged if they fail the automated tests.
* [ ] This PR only changes one algorithm file. To ease review, please open separate PRs for separate algorithms.
* [ ] All new Python files are placed inside an existing directory.
* [ ] All filenames are in all lowercase characters with no spaces or dashes.
* [ ] All functions and variable names follow Python naming conventions.
* [ ] All function parameters and return values are annotated with Python [type hints](https://docs.python.org/3/library/typing.html).
* [ ] All functions have [doctests](https://docs.python.org/3/library/doctest.html) that pass the automated testing.
* [ ] All new algorithms include at least one URL that points to Wikipedia or another similar explanation.
* [x] If this pull request resolves one or more open issues then the commit message contains `Fixes: #{$ISSUE_NO}`.
| KaixLina | "2023-03-26T04:26:40Z" | "2023-03-26T15:19:19Z" | 3f9150c1b2dd15808a4962e03a1455f8d825512c | 7cdb011ba440a07768179bfaea190bddefc890d8 | New gitter link added or replaced. ### Describe your change:
Fixes #8546
* [ ] Add an algorithm?
* [ ] Fix a bug or typo in an existing algorithm?
* [x] Documentation change?
### Checklist:
* [x] I have read [CONTRIBUTING.md](https://github.com/TheAlgorithms/Python/blob/master/CONTRIBUTING.md).
* [x] This pull request is all my own work -- I have not plagiarized.
* [x] I know that pull requests will not be merged if they fail the automated tests.
* [ ] This PR only changes one algorithm file. To ease review, please open separate PRs for separate algorithms.
* [ ] All new Python files are placed inside an existing directory.
* [ ] All filenames are in all lowercase characters with no spaces or dashes.
* [ ] All functions and variable names follow Python naming conventions.
* [ ] All function parameters and return values are annotated with Python [type hints](https://docs.python.org/3/library/typing.html).
* [ ] All functions have [doctests](https://docs.python.org/3/library/doctest.html) that pass the automated testing.
* [ ] All new algorithms include at least one URL that points to Wikipedia or another similar explanation.
* [x] If this pull request resolves one or more open issues then the commit message contains `Fixes: #{$ISSUE_NO}`.
| def is_palindrome(head):
if not head:
return True
# split the list to two parts
fast, slow = head.next, head
while fast and fast.next:
fast = fast.next.next
slow = slow.next
second = slow.next
slow.next = None # Don't forget here! But forget still works!
# reverse the second part
node = None
while second:
nxt = second.next
second.next = node
node = second
second = nxt
# compare two parts
# second part has the same or one less node
while node:
if node.val != head.val:
return False
node = node.next
head = head.next
return True
def is_palindrome_stack(head):
if not head or not head.next:
return True
# 1. Get the midpoint (slow)
slow = fast = cur = head
while fast and fast.next:
fast, slow = fast.next.next, slow.next
# 2. Push the second half into the stack
stack = [slow.val]
while slow.next:
slow = slow.next
stack.append(slow.val)
# 3. Comparison
while stack:
if stack.pop() != cur.val:
return False
cur = cur.next
return True
def is_palindrome_dict(head):
if not head or not head.next:
return True
d = {}
pos = 0
while head:
if head.val in d:
d[head.val].append(pos)
else:
d[head.val] = [pos]
head = head.next
pos += 1
checksum = pos - 1
middle = 0
for v in d.values():
if len(v) % 2 != 0:
middle += 1
else:
step = 0
for i in range(0, len(v)):
if v[i] + v[len(v) - 1 - step] != checksum:
return False
step += 1
if middle > 1:
return False
return True
| def is_palindrome(head):
if not head:
return True
# split the list to two parts
fast, slow = head.next, head
while fast and fast.next:
fast = fast.next.next
slow = slow.next
second = slow.next
slow.next = None # Don't forget here! But forget still works!
# reverse the second part
node = None
while second:
nxt = second.next
second.next = node
node = second
second = nxt
# compare two parts
# second part has the same or one less node
while node:
if node.val != head.val:
return False
node = node.next
head = head.next
return True
def is_palindrome_stack(head):
if not head or not head.next:
return True
# 1. Get the midpoint (slow)
slow = fast = cur = head
while fast and fast.next:
fast, slow = fast.next.next, slow.next
# 2. Push the second half into the stack
stack = [slow.val]
while slow.next:
slow = slow.next
stack.append(slow.val)
# 3. Comparison
while stack:
if stack.pop() != cur.val:
return False
cur = cur.next
return True
def is_palindrome_dict(head):
if not head or not head.next:
return True
d = {}
pos = 0
while head:
if head.val in d:
d[head.val].append(pos)
else:
d[head.val] = [pos]
head = head.next
pos += 1
checksum = pos - 1
middle = 0
for v in d.values():
if len(v) % 2 != 0:
middle += 1
else:
step = 0
for i in range(0, len(v)):
if v[i] + v[len(v) - 1 - step] != checksum:
return False
step += 1
if middle > 1:
return False
return True
| -1 |
TheAlgorithms/Python | 8,551 | New gitter link added or replaced | ### Describe your change:
Fixes #8546
* [ ] Add an algorithm?
* [ ] Fix a bug or typo in an existing algorithm?
* [x] Documentation change?
### Checklist:
* [x] I have read [CONTRIBUTING.md](https://github.com/TheAlgorithms/Python/blob/master/CONTRIBUTING.md).
* [x] This pull request is all my own work -- I have not plagiarized.
* [x] I know that pull requests will not be merged if they fail the automated tests.
* [ ] This PR only changes one algorithm file. To ease review, please open separate PRs for separate algorithms.
* [ ] All new Python files are placed inside an existing directory.
* [ ] All filenames are in all lowercase characters with no spaces or dashes.
* [ ] All functions and variable names follow Python naming conventions.
* [ ] All function parameters and return values are annotated with Python [type hints](https://docs.python.org/3/library/typing.html).
* [ ] All functions have [doctests](https://docs.python.org/3/library/doctest.html) that pass the automated testing.
* [ ] All new algorithms include at least one URL that points to Wikipedia or another similar explanation.
* [x] If this pull request resolves one or more open issues then the commit message contains `Fixes: #{$ISSUE_NO}`.
| KaixLina | "2023-03-26T04:26:40Z" | "2023-03-26T15:19:19Z" | 3f9150c1b2dd15808a4962e03a1455f8d825512c | 7cdb011ba440a07768179bfaea190bddefc890d8 | New gitter link added or replaced. ### Describe your change:
Fixes #8546
* [ ] Add an algorithm?
* [ ] Fix a bug or typo in an existing algorithm?
* [x] Documentation change?
### Checklist:
* [x] I have read [CONTRIBUTING.md](https://github.com/TheAlgorithms/Python/blob/master/CONTRIBUTING.md).
* [x] This pull request is all my own work -- I have not plagiarized.
* [x] I know that pull requests will not be merged if they fail the automated tests.
* [ ] This PR only changes one algorithm file. To ease review, please open separate PRs for separate algorithms.
* [ ] All new Python files are placed inside an existing directory.
* [ ] All filenames are in all lowercase characters with no spaces or dashes.
* [ ] All functions and variable names follow Python naming conventions.
* [ ] All function parameters and return values are annotated with Python [type hints](https://docs.python.org/3/library/typing.html).
* [ ] All functions have [doctests](https://docs.python.org/3/library/doctest.html) that pass the automated testing.
* [ ] All new algorithms include at least one URL that points to Wikipedia or another similar explanation.
* [x] If this pull request resolves one or more open issues then the commit message contains `Fixes: #{$ISSUE_NO}`.
| """
An edge is a bridge if, after removing it count of connected components in graph will
be increased by one. Bridges represent vulnerabilities in a connected network and are
useful for designing reliable networks. For example, in a wired computer network, an
articulation point indicates the critical computers and a bridge indicates the critical
wires or connections.
For more details, refer this article:
https://www.geeksforgeeks.org/bridge-in-a-graph/
"""
def __get_demo_graph(index):
return [
{
0: [1, 2],
1: [0, 2],
2: [0, 1, 3, 5],
3: [2, 4],
4: [3],
5: [2, 6, 8],
6: [5, 7],
7: [6, 8],
8: [5, 7],
},
{
0: [6],
1: [9],
2: [4, 5],
3: [4],
4: [2, 3],
5: [2],
6: [0, 7],
7: [6],
8: [],
9: [1],
},
{
0: [4],
1: [6],
2: [],
3: [5, 6, 7],
4: [0, 6],
5: [3, 8, 9],
6: [1, 3, 4, 7],
7: [3, 6, 8, 9],
8: [5, 7],
9: [5, 7],
},
{
0: [1, 3],
1: [0, 2, 4],
2: [1, 3, 4],
3: [0, 2, 4],
4: [1, 2, 3],
},
][index]
def compute_bridges(graph: dict[int, list[int]]) -> list[tuple[int, int]]:
"""
Return the list of undirected graph bridges [(a1, b1), ..., (ak, bk)]; ai <= bi
>>> compute_bridges(__get_demo_graph(0))
[(3, 4), (2, 3), (2, 5)]
>>> compute_bridges(__get_demo_graph(1))
[(6, 7), (0, 6), (1, 9), (3, 4), (2, 4), (2, 5)]
>>> compute_bridges(__get_demo_graph(2))
[(1, 6), (4, 6), (0, 4)]
>>> compute_bridges(__get_demo_graph(3))
[]
>>> compute_bridges({})
[]
"""
id_ = 0
n = len(graph) # No of vertices in graph
low = [0] * n
visited = [False] * n
def dfs(at, parent, bridges, id_):
visited[at] = True
low[at] = id_
id_ += 1
for to in graph[at]:
if to == parent:
pass
elif not visited[to]:
dfs(to, at, bridges, id_)
low[at] = min(low[at], low[to])
if id_ <= low[to]:
bridges.append((at, to) if at < to else (to, at))
else:
# This edge is a back edge and cannot be a bridge
low[at] = min(low[at], low[to])
bridges: list[tuple[int, int]] = []
for i in range(n):
if not visited[i]:
dfs(i, -1, bridges, id_)
return bridges
if __name__ == "__main__":
import doctest
doctest.testmod()
| """
An edge is a bridge if, after removing it count of connected components in graph will
be increased by one. Bridges represent vulnerabilities in a connected network and are
useful for designing reliable networks. For example, in a wired computer network, an
articulation point indicates the critical computers and a bridge indicates the critical
wires or connections.
For more details, refer this article:
https://www.geeksforgeeks.org/bridge-in-a-graph/
"""
def __get_demo_graph(index):
return [
{
0: [1, 2],
1: [0, 2],
2: [0, 1, 3, 5],
3: [2, 4],
4: [3],
5: [2, 6, 8],
6: [5, 7],
7: [6, 8],
8: [5, 7],
},
{
0: [6],
1: [9],
2: [4, 5],
3: [4],
4: [2, 3],
5: [2],
6: [0, 7],
7: [6],
8: [],
9: [1],
},
{
0: [4],
1: [6],
2: [],
3: [5, 6, 7],
4: [0, 6],
5: [3, 8, 9],
6: [1, 3, 4, 7],
7: [3, 6, 8, 9],
8: [5, 7],
9: [5, 7],
},
{
0: [1, 3],
1: [0, 2, 4],
2: [1, 3, 4],
3: [0, 2, 4],
4: [1, 2, 3],
},
][index]
def compute_bridges(graph: dict[int, list[int]]) -> list[tuple[int, int]]:
"""
Return the list of undirected graph bridges [(a1, b1), ..., (ak, bk)]; ai <= bi
>>> compute_bridges(__get_demo_graph(0))
[(3, 4), (2, 3), (2, 5)]
>>> compute_bridges(__get_demo_graph(1))
[(6, 7), (0, 6), (1, 9), (3, 4), (2, 4), (2, 5)]
>>> compute_bridges(__get_demo_graph(2))
[(1, 6), (4, 6), (0, 4)]
>>> compute_bridges(__get_demo_graph(3))
[]
>>> compute_bridges({})
[]
"""
id_ = 0
n = len(graph) # No of vertices in graph
low = [0] * n
visited = [False] * n
def dfs(at, parent, bridges, id_):
visited[at] = True
low[at] = id_
id_ += 1
for to in graph[at]:
if to == parent:
pass
elif not visited[to]:
dfs(to, at, bridges, id_)
low[at] = min(low[at], low[to])
if id_ <= low[to]:
bridges.append((at, to) if at < to else (to, at))
else:
# This edge is a back edge and cannot be a bridge
low[at] = min(low[at], low[to])
bridges: list[tuple[int, int]] = []
for i in range(n):
if not visited[i]:
dfs(i, -1, bridges, id_)
return bridges
if __name__ == "__main__":
import doctest
doctest.testmod()
| -1 |
TheAlgorithms/Python | 8,551 | New gitter link added or replaced | ### Describe your change:
Fixes #8546
* [ ] Add an algorithm?
* [ ] Fix a bug or typo in an existing algorithm?
* [x] Documentation change?
### Checklist:
* [x] I have read [CONTRIBUTING.md](https://github.com/TheAlgorithms/Python/blob/master/CONTRIBUTING.md).
* [x] This pull request is all my own work -- I have not plagiarized.
* [x] I know that pull requests will not be merged if they fail the automated tests.
* [ ] This PR only changes one algorithm file. To ease review, please open separate PRs for separate algorithms.
* [ ] All new Python files are placed inside an existing directory.
* [ ] All filenames are in all lowercase characters with no spaces or dashes.
* [ ] All functions and variable names follow Python naming conventions.
* [ ] All function parameters and return values are annotated with Python [type hints](https://docs.python.org/3/library/typing.html).
* [ ] All functions have [doctests](https://docs.python.org/3/library/doctest.html) that pass the automated testing.
* [ ] All new algorithms include at least one URL that points to Wikipedia or another similar explanation.
* [x] If this pull request resolves one or more open issues then the commit message contains `Fixes: #{$ISSUE_NO}`.
| KaixLina | "2023-03-26T04:26:40Z" | "2023-03-26T15:19:19Z" | 3f9150c1b2dd15808a4962e03a1455f8d825512c | 7cdb011ba440a07768179bfaea190bddefc890d8 | New gitter link added or replaced. ### Describe your change:
Fixes #8546
* [ ] Add an algorithm?
* [ ] Fix a bug or typo in an existing algorithm?
* [x] Documentation change?
### Checklist:
* [x] I have read [CONTRIBUTING.md](https://github.com/TheAlgorithms/Python/blob/master/CONTRIBUTING.md).
* [x] This pull request is all my own work -- I have not plagiarized.
* [x] I know that pull requests will not be merged if they fail the automated tests.
* [ ] This PR only changes one algorithm file. To ease review, please open separate PRs for separate algorithms.
* [ ] All new Python files are placed inside an existing directory.
* [ ] All filenames are in all lowercase characters with no spaces or dashes.
* [ ] All functions and variable names follow Python naming conventions.
* [ ] All function parameters and return values are annotated with Python [type hints](https://docs.python.org/3/library/typing.html).
* [ ] All functions have [doctests](https://docs.python.org/3/library/doctest.html) that pass the automated testing.
* [ ] All new algorithms include at least one URL that points to Wikipedia or another similar explanation.
* [x] If this pull request resolves one or more open issues then the commit message contains `Fixes: #{$ISSUE_NO}`.
| def bin_exp_mod(a, n, b):
"""
>>> bin_exp_mod(3, 4, 5)
1
>>> bin_exp_mod(7, 13, 10)
7
"""
# mod b
assert b != 0, "This cannot accept modulo that is == 0"
if n == 0:
return 1
if n % 2 == 1:
return (bin_exp_mod(a, n - 1, b) * a) % b
r = bin_exp_mod(a, n / 2, b)
return (r * r) % b
if __name__ == "__main__":
try:
BASE = int(input("Enter Base : ").strip())
POWER = int(input("Enter Power : ").strip())
MODULO = int(input("Enter Modulo : ").strip())
except ValueError:
print("Invalid literal for integer")
print(bin_exp_mod(BASE, POWER, MODULO))
| def bin_exp_mod(a, n, b):
"""
>>> bin_exp_mod(3, 4, 5)
1
>>> bin_exp_mod(7, 13, 10)
7
"""
# mod b
assert b != 0, "This cannot accept modulo that is == 0"
if n == 0:
return 1
if n % 2 == 1:
return (bin_exp_mod(a, n - 1, b) * a) % b
r = bin_exp_mod(a, n / 2, b)
return (r * r) % b
if __name__ == "__main__":
try:
BASE = int(input("Enter Base : ").strip())
POWER = int(input("Enter Power : ").strip())
MODULO = int(input("Enter Modulo : ").strip())
except ValueError:
print("Invalid literal for integer")
print(bin_exp_mod(BASE, POWER, MODULO))
| -1 |
TheAlgorithms/Python | 8,551 | New gitter link added or replaced | ### Describe your change:
Fixes #8546
* [ ] Add an algorithm?
* [ ] Fix a bug or typo in an existing algorithm?
* [x] Documentation change?
### Checklist:
* [x] I have read [CONTRIBUTING.md](https://github.com/TheAlgorithms/Python/blob/master/CONTRIBUTING.md).
* [x] This pull request is all my own work -- I have not plagiarized.
* [x] I know that pull requests will not be merged if they fail the automated tests.
* [ ] This PR only changes one algorithm file. To ease review, please open separate PRs for separate algorithms.
* [ ] All new Python files are placed inside an existing directory.
* [ ] All filenames are in all lowercase characters with no spaces or dashes.
* [ ] All functions and variable names follow Python naming conventions.
* [ ] All function parameters and return values are annotated with Python [type hints](https://docs.python.org/3/library/typing.html).
* [ ] All functions have [doctests](https://docs.python.org/3/library/doctest.html) that pass the automated testing.
* [ ] All new algorithms include at least one URL that points to Wikipedia or another similar explanation.
* [x] If this pull request resolves one or more open issues then the commit message contains `Fixes: #{$ISSUE_NO}`.
| KaixLina | "2023-03-26T04:26:40Z" | "2023-03-26T15:19:19Z" | 3f9150c1b2dd15808a4962e03a1455f8d825512c | 7cdb011ba440a07768179bfaea190bddefc890d8 | New gitter link added or replaced. ### Describe your change:
Fixes #8546
* [ ] Add an algorithm?
* [ ] Fix a bug or typo in an existing algorithm?
* [x] Documentation change?
### Checklist:
* [x] I have read [CONTRIBUTING.md](https://github.com/TheAlgorithms/Python/blob/master/CONTRIBUTING.md).
* [x] This pull request is all my own work -- I have not plagiarized.
* [x] I know that pull requests will not be merged if they fail the automated tests.
* [ ] This PR only changes one algorithm file. To ease review, please open separate PRs for separate algorithms.
* [ ] All new Python files are placed inside an existing directory.
* [ ] All filenames are in all lowercase characters with no spaces or dashes.
* [ ] All functions and variable names follow Python naming conventions.
* [ ] All function parameters and return values are annotated with Python [type hints](https://docs.python.org/3/library/typing.html).
* [ ] All functions have [doctests](https://docs.python.org/3/library/doctest.html) that pass the automated testing.
* [ ] All new algorithms include at least one URL that points to Wikipedia or another similar explanation.
* [x] If this pull request resolves one or more open issues then the commit message contains `Fixes: #{$ISSUE_NO}`.
| """
Author : Mehdi ALAOUI
This is a pure Python implementation of Dynamic Programming solution to the longest
increasing subsequence of a given sequence.
The problem is :
Given an array, to find the longest and increasing sub-array in that given array and
return it.
Example: [10, 22, 9, 33, 21, 50, 41, 60, 80] as input will return
[10, 22, 33, 41, 60, 80] as output
"""
from __future__ import annotations
def longest_subsequence(array: list[int]) -> list[int]: # This function is recursive
"""
Some examples
>>> longest_subsequence([10, 22, 9, 33, 21, 50, 41, 60, 80])
[10, 22, 33, 41, 60, 80]
>>> longest_subsequence([4, 8, 7, 5, 1, 12, 2, 3, 9])
[1, 2, 3, 9]
>>> longest_subsequence([9, 8, 7, 6, 5, 7])
[8]
>>> longest_subsequence([1, 1, 1])
[1, 1, 1]
>>> longest_subsequence([])
[]
"""
array_length = len(array)
# If the array contains only one element, we return it (it's the stop condition of
# recursion)
if array_length <= 1:
return array
# Else
pivot = array[0]
is_found = False
i = 1
longest_subseq: list[int] = []
while not is_found and i < array_length:
if array[i] < pivot:
is_found = True
temp_array = [element for element in array[i:] if element >= array[i]]
temp_array = longest_subsequence(temp_array)
if len(temp_array) > len(longest_subseq):
longest_subseq = temp_array
else:
i += 1
temp_array = [element for element in array[1:] if element >= pivot]
temp_array = [pivot, *longest_subsequence(temp_array)]
if len(temp_array) > len(longest_subseq):
return temp_array
else:
return longest_subseq
if __name__ == "__main__":
import doctest
doctest.testmod()
| """
Author : Mehdi ALAOUI
This is a pure Python implementation of Dynamic Programming solution to the longest
increasing subsequence of a given sequence.
The problem is :
Given an array, to find the longest and increasing sub-array in that given array and
return it.
Example: [10, 22, 9, 33, 21, 50, 41, 60, 80] as input will return
[10, 22, 33, 41, 60, 80] as output
"""
from __future__ import annotations
def longest_subsequence(array: list[int]) -> list[int]: # This function is recursive
"""
Some examples
>>> longest_subsequence([10, 22, 9, 33, 21, 50, 41, 60, 80])
[10, 22, 33, 41, 60, 80]
>>> longest_subsequence([4, 8, 7, 5, 1, 12, 2, 3, 9])
[1, 2, 3, 9]
>>> longest_subsequence([9, 8, 7, 6, 5, 7])
[8]
>>> longest_subsequence([1, 1, 1])
[1, 1, 1]
>>> longest_subsequence([])
[]
"""
array_length = len(array)
# If the array contains only one element, we return it (it's the stop condition of
# recursion)
if array_length <= 1:
return array
# Else
pivot = array[0]
is_found = False
i = 1
longest_subseq: list[int] = []
while not is_found and i < array_length:
if array[i] < pivot:
is_found = True
temp_array = [element for element in array[i:] if element >= array[i]]
temp_array = longest_subsequence(temp_array)
if len(temp_array) > len(longest_subseq):
longest_subseq = temp_array
else:
i += 1
temp_array = [element for element in array[1:] if element >= pivot]
temp_array = [pivot, *longest_subsequence(temp_array)]
if len(temp_array) > len(longest_subseq):
return temp_array
else:
return longest_subseq
if __name__ == "__main__":
import doctest
doctest.testmod()
| -1 |
TheAlgorithms/Python | 8,551 | New gitter link added or replaced | ### Describe your change:
Fixes #8546
* [ ] Add an algorithm?
* [ ] Fix a bug or typo in an existing algorithm?
* [x] Documentation change?
### Checklist:
* [x] I have read [CONTRIBUTING.md](https://github.com/TheAlgorithms/Python/blob/master/CONTRIBUTING.md).
* [x] This pull request is all my own work -- I have not plagiarized.
* [x] I know that pull requests will not be merged if they fail the automated tests.
* [ ] This PR only changes one algorithm file. To ease review, please open separate PRs for separate algorithms.
* [ ] All new Python files are placed inside an existing directory.
* [ ] All filenames are in all lowercase characters with no spaces or dashes.
* [ ] All functions and variable names follow Python naming conventions.
* [ ] All function parameters and return values are annotated with Python [type hints](https://docs.python.org/3/library/typing.html).
* [ ] All functions have [doctests](https://docs.python.org/3/library/doctest.html) that pass the automated testing.
* [ ] All new algorithms include at least one URL that points to Wikipedia or another similar explanation.
* [x] If this pull request resolves one or more open issues then the commit message contains `Fixes: #{$ISSUE_NO}`.
| KaixLina | "2023-03-26T04:26:40Z" | "2023-03-26T15:19:19Z" | 3f9150c1b2dd15808a4962e03a1455f8d825512c | 7cdb011ba440a07768179bfaea190bddefc890d8 | New gitter link added or replaced. ### Describe your change:
Fixes #8546
* [ ] Add an algorithm?
* [ ] Fix a bug or typo in an existing algorithm?
* [x] Documentation change?
### Checklist:
* [x] I have read [CONTRIBUTING.md](https://github.com/TheAlgorithms/Python/blob/master/CONTRIBUTING.md).
* [x] This pull request is all my own work -- I have not plagiarized.
* [x] I know that pull requests will not be merged if they fail the automated tests.
* [ ] This PR only changes one algorithm file. To ease review, please open separate PRs for separate algorithms.
* [ ] All new Python files are placed inside an existing directory.
* [ ] All filenames are in all lowercase characters with no spaces or dashes.
* [ ] All functions and variable names follow Python naming conventions.
* [ ] All function parameters and return values are annotated with Python [type hints](https://docs.python.org/3/library/typing.html).
* [ ] All functions have [doctests](https://docs.python.org/3/library/doctest.html) that pass the automated testing.
* [ ] All new algorithms include at least one URL that points to Wikipedia or another similar explanation.
* [x] If this pull request resolves one or more open issues then the commit message contains `Fixes: #{$ISSUE_NO}`.
| """
Simulate the evolution of a highway with only one road that is a loop.
The highway is divided in cells, each cell can have at most one car in it.
The highway is a loop so when a car comes to one end, it will come out on the other.
Each car is represented by its speed (from 0 to 5).
Some information about speed:
-1 means that the cell on the highway is empty
0 to 5 are the speed of the cars with 0 being the lowest and 5 the highest
highway: list[int] Where every position and speed of every car will be stored
probability The probability that a driver will slow down
initial_speed The speed of the cars a the start
frequency How many cells there are between two cars at the start
max_speed The maximum speed a car can go to
number_of_cells How many cell are there in the highway
number_of_update How many times will the position be updated
More information here: https://en.wikipedia.org/wiki/Nagel%E2%80%93Schreckenberg_model
Examples for doctest:
>>> simulate(construct_highway(6, 3, 0), 2, 0, 2)
[[0, -1, -1, 0, -1, -1], [-1, 1, -1, -1, 1, -1], [-1, -1, 1, -1, -1, 1]]
>>> simulate(construct_highway(5, 2, -2), 3, 0, 2)
[[0, -1, 0, -1, 0], [0, -1, 0, -1, -1], [0, -1, -1, 1, -1], [-1, 1, -1, 0, -1]]
"""
from random import randint, random
def construct_highway(
number_of_cells: int,
frequency: int,
initial_speed: int,
random_frequency: bool = False,
random_speed: bool = False,
max_speed: int = 5,
) -> list:
"""
Build the highway following the parameters given
>>> construct_highway(10, 2, 6)
[[6, -1, 6, -1, 6, -1, 6, -1, 6, -1]]
>>> construct_highway(10, 10, 2)
[[2, -1, -1, -1, -1, -1, -1, -1, -1, -1]]
"""
highway = [[-1] * number_of_cells] # Create a highway without any car
i = 0
initial_speed = max(initial_speed, 0)
while i < number_of_cells:
highway[0][i] = (
randint(0, max_speed) if random_speed else initial_speed
) # Place the cars
i += (
randint(1, max_speed * 2) if random_frequency else frequency
) # Arbitrary number, may need tuning
return highway
def get_distance(highway_now: list, car_index: int) -> int:
"""
Get the distance between a car (at index car_index) and the next car
>>> get_distance([6, -1, 6, -1, 6], 2)
1
>>> get_distance([2, -1, -1, -1, 3, 1, 0, 1, 3, 2], 0)
3
>>> get_distance([-1, -1, -1, -1, 2, -1, -1, -1, 3], -1)
4
"""
distance = 0
cells = highway_now[car_index + 1 :]
for cell in range(len(cells)): # May need a better name for this
if cells[cell] != -1: # If the cell is not empty then
return distance # we have the distance we wanted
distance += 1
# Here if the car is near the end of the highway
return distance + get_distance(highway_now, -1)
def update(highway_now: list, probability: float, max_speed: int) -> list:
"""
Update the speed of the cars
>>> update([-1, -1, -1, -1, -1, 2, -1, -1, -1, -1, 3], 0.0, 5)
[-1, -1, -1, -1, -1, 3, -1, -1, -1, -1, 4]
>>> update([-1, -1, 2, -1, -1, -1, -1, 3], 0.0, 5)
[-1, -1, 3, -1, -1, -1, -1, 1]
"""
number_of_cells = len(highway_now)
# Beforce calculations, the highway is empty
next_highway = [-1] * number_of_cells
for car_index in range(number_of_cells):
if highway_now[car_index] != -1:
# Add 1 to the current speed of the car and cap the speed
next_highway[car_index] = min(highway_now[car_index] + 1, max_speed)
# Number of empty cell before the next car
dn = get_distance(highway_now, car_index) - 1
# We can't have the car causing an accident
next_highway[car_index] = min(next_highway[car_index], dn)
if random() < probability:
# Randomly, a driver will slow down
next_highway[car_index] = max(next_highway[car_index] - 1, 0)
return next_highway
def simulate(
highway: list, number_of_update: int, probability: float, max_speed: int
) -> list:
"""
The main function, it will simulate the evolution of the highway
>>> simulate([[-1, 2, -1, -1, -1, 3]], 2, 0.0, 3)
[[-1, 2, -1, -1, -1, 3], [-1, -1, -1, 2, -1, 0], [1, -1, -1, 0, -1, -1]]
>>> simulate([[-1, 2, -1, 3]], 4, 0.0, 3)
[[-1, 2, -1, 3], [-1, 0, -1, 0], [-1, 0, -1, 0], [-1, 0, -1, 0], [-1, 0, -1, 0]]
"""
number_of_cells = len(highway[0])
for i in range(number_of_update):
next_speeds_calculated = update(highway[i], probability, max_speed)
real_next_speeds = [-1] * number_of_cells
for car_index in range(number_of_cells):
speed = next_speeds_calculated[car_index]
if speed != -1:
# Change the position based on the speed (with % to create the loop)
index = (car_index + speed) % number_of_cells
# Commit the change of position
real_next_speeds[index] = speed
highway.append(real_next_speeds)
return highway
if __name__ == "__main__":
import doctest
doctest.testmod()
| """
Simulate the evolution of a highway with only one road that is a loop.
The highway is divided in cells, each cell can have at most one car in it.
The highway is a loop so when a car comes to one end, it will come out on the other.
Each car is represented by its speed (from 0 to 5).
Some information about speed:
-1 means that the cell on the highway is empty
0 to 5 are the speed of the cars with 0 being the lowest and 5 the highest
highway: list[int] Where every position and speed of every car will be stored
probability The probability that a driver will slow down
initial_speed The speed of the cars a the start
frequency How many cells there are between two cars at the start
max_speed The maximum speed a car can go to
number_of_cells How many cell are there in the highway
number_of_update How many times will the position be updated
More information here: https://en.wikipedia.org/wiki/Nagel%E2%80%93Schreckenberg_model
Examples for doctest:
>>> simulate(construct_highway(6, 3, 0), 2, 0, 2)
[[0, -1, -1, 0, -1, -1], [-1, 1, -1, -1, 1, -1], [-1, -1, 1, -1, -1, 1]]
>>> simulate(construct_highway(5, 2, -2), 3, 0, 2)
[[0, -1, 0, -1, 0], [0, -1, 0, -1, -1], [0, -1, -1, 1, -1], [-1, 1, -1, 0, -1]]
"""
from random import randint, random
def construct_highway(
number_of_cells: int,
frequency: int,
initial_speed: int,
random_frequency: bool = False,
random_speed: bool = False,
max_speed: int = 5,
) -> list:
"""
Build the highway following the parameters given
>>> construct_highway(10, 2, 6)
[[6, -1, 6, -1, 6, -1, 6, -1, 6, -1]]
>>> construct_highway(10, 10, 2)
[[2, -1, -1, -1, -1, -1, -1, -1, -1, -1]]
"""
highway = [[-1] * number_of_cells] # Create a highway without any car
i = 0
initial_speed = max(initial_speed, 0)
while i < number_of_cells:
highway[0][i] = (
randint(0, max_speed) if random_speed else initial_speed
) # Place the cars
i += (
randint(1, max_speed * 2) if random_frequency else frequency
) # Arbitrary number, may need tuning
return highway
def get_distance(highway_now: list, car_index: int) -> int:
"""
Get the distance between a car (at index car_index) and the next car
>>> get_distance([6, -1, 6, -1, 6], 2)
1
>>> get_distance([2, -1, -1, -1, 3, 1, 0, 1, 3, 2], 0)
3
>>> get_distance([-1, -1, -1, -1, 2, -1, -1, -1, 3], -1)
4
"""
distance = 0
cells = highway_now[car_index + 1 :]
for cell in range(len(cells)): # May need a better name for this
if cells[cell] != -1: # If the cell is not empty then
return distance # we have the distance we wanted
distance += 1
# Here if the car is near the end of the highway
return distance + get_distance(highway_now, -1)
def update(highway_now: list, probability: float, max_speed: int) -> list:
"""
Update the speed of the cars
>>> update([-1, -1, -1, -1, -1, 2, -1, -1, -1, -1, 3], 0.0, 5)
[-1, -1, -1, -1, -1, 3, -1, -1, -1, -1, 4]
>>> update([-1, -1, 2, -1, -1, -1, -1, 3], 0.0, 5)
[-1, -1, 3, -1, -1, -1, -1, 1]
"""
number_of_cells = len(highway_now)
# Beforce calculations, the highway is empty
next_highway = [-1] * number_of_cells
for car_index in range(number_of_cells):
if highway_now[car_index] != -1:
# Add 1 to the current speed of the car and cap the speed
next_highway[car_index] = min(highway_now[car_index] + 1, max_speed)
# Number of empty cell before the next car
dn = get_distance(highway_now, car_index) - 1
# We can't have the car causing an accident
next_highway[car_index] = min(next_highway[car_index], dn)
if random() < probability:
# Randomly, a driver will slow down
next_highway[car_index] = max(next_highway[car_index] - 1, 0)
return next_highway
def simulate(
highway: list, number_of_update: int, probability: float, max_speed: int
) -> list:
"""
The main function, it will simulate the evolution of the highway
>>> simulate([[-1, 2, -1, -1, -1, 3]], 2, 0.0, 3)
[[-1, 2, -1, -1, -1, 3], [-1, -1, -1, 2, -1, 0], [1, -1, -1, 0, -1, -1]]
>>> simulate([[-1, 2, -1, 3]], 4, 0.0, 3)
[[-1, 2, -1, 3], [-1, 0, -1, 0], [-1, 0, -1, 0], [-1, 0, -1, 0], [-1, 0, -1, 0]]
"""
number_of_cells = len(highway[0])
for i in range(number_of_update):
next_speeds_calculated = update(highway[i], probability, max_speed)
real_next_speeds = [-1] * number_of_cells
for car_index in range(number_of_cells):
speed = next_speeds_calculated[car_index]
if speed != -1:
# Change the position based on the speed (with % to create the loop)
index = (car_index + speed) % number_of_cells
# Commit the change of position
real_next_speeds[index] = speed
highway.append(real_next_speeds)
return highway
if __name__ == "__main__":
import doctest
doctest.testmod()
| -1 |
TheAlgorithms/Python | 8,551 | New gitter link added or replaced | ### Describe your change:
Fixes #8546
* [ ] Add an algorithm?
* [ ] Fix a bug or typo in an existing algorithm?
* [x] Documentation change?
### Checklist:
* [x] I have read [CONTRIBUTING.md](https://github.com/TheAlgorithms/Python/blob/master/CONTRIBUTING.md).
* [x] This pull request is all my own work -- I have not plagiarized.
* [x] I know that pull requests will not be merged if they fail the automated tests.
* [ ] This PR only changes one algorithm file. To ease review, please open separate PRs for separate algorithms.
* [ ] All new Python files are placed inside an existing directory.
* [ ] All filenames are in all lowercase characters with no spaces or dashes.
* [ ] All functions and variable names follow Python naming conventions.
* [ ] All function parameters and return values are annotated with Python [type hints](https://docs.python.org/3/library/typing.html).
* [ ] All functions have [doctests](https://docs.python.org/3/library/doctest.html) that pass the automated testing.
* [ ] All new algorithms include at least one URL that points to Wikipedia or another similar explanation.
* [x] If this pull request resolves one or more open issues then the commit message contains `Fixes: #{$ISSUE_NO}`.
| KaixLina | "2023-03-26T04:26:40Z" | "2023-03-26T15:19:19Z" | 3f9150c1b2dd15808a4962e03a1455f8d825512c | 7cdb011ba440a07768179bfaea190bddefc890d8 | New gitter link added or replaced. ### Describe your change:
Fixes #8546
* [ ] Add an algorithm?
* [ ] Fix a bug or typo in an existing algorithm?
* [x] Documentation change?
### Checklist:
* [x] I have read [CONTRIBUTING.md](https://github.com/TheAlgorithms/Python/blob/master/CONTRIBUTING.md).
* [x] This pull request is all my own work -- I have not plagiarized.
* [x] I know that pull requests will not be merged if they fail the automated tests.
* [ ] This PR only changes one algorithm file. To ease review, please open separate PRs for separate algorithms.
* [ ] All new Python files are placed inside an existing directory.
* [ ] All filenames are in all lowercase characters with no spaces or dashes.
* [ ] All functions and variable names follow Python naming conventions.
* [ ] All function parameters and return values are annotated with Python [type hints](https://docs.python.org/3/library/typing.html).
* [ ] All functions have [doctests](https://docs.python.org/3/library/doctest.html) that pass the automated testing.
* [ ] All new algorithms include at least one URL that points to Wikipedia or another similar explanation.
* [x] If this pull request resolves one or more open issues then the commit message contains `Fixes: #{$ISSUE_NO}`.
| class BinaryHeap:
"""
A max-heap implementation in Python
>>> binary_heap = BinaryHeap()
>>> binary_heap.insert(6)
>>> binary_heap.insert(10)
>>> binary_heap.insert(15)
>>> binary_heap.insert(12)
>>> binary_heap.pop()
15
>>> binary_heap.pop()
12
>>> binary_heap.get_list
[10, 6]
>>> len(binary_heap)
2
"""
def __init__(self):
self.__heap = [0]
self.__size = 0
def __swap_up(self, i: int) -> None:
"""Swap the element up"""
temporary = self.__heap[i]
while i // 2 > 0:
if self.__heap[i] > self.__heap[i // 2]:
self.__heap[i] = self.__heap[i // 2]
self.__heap[i // 2] = temporary
i //= 2
def insert(self, value: int) -> None:
"""Insert new element"""
self.__heap.append(value)
self.__size += 1
self.__swap_up(self.__size)
def __swap_down(self, i: int) -> None:
"""Swap the element down"""
while self.__size >= 2 * i:
if 2 * i + 1 > self.__size:
bigger_child = 2 * i
else:
if self.__heap[2 * i] > self.__heap[2 * i + 1]:
bigger_child = 2 * i
else:
bigger_child = 2 * i + 1
temporary = self.__heap[i]
if self.__heap[i] < self.__heap[bigger_child]:
self.__heap[i] = self.__heap[bigger_child]
self.__heap[bigger_child] = temporary
i = bigger_child
def pop(self) -> int:
"""Pop the root element"""
max_value = self.__heap[1]
self.__heap[1] = self.__heap[self.__size]
self.__size -= 1
self.__heap.pop()
self.__swap_down(1)
return max_value
@property
def get_list(self):
return self.__heap[1:]
def __len__(self):
"""Length of the array"""
return self.__size
if __name__ == "__main__":
import doctest
doctest.testmod()
# create an instance of BinaryHeap
binary_heap = BinaryHeap()
binary_heap.insert(6)
binary_heap.insert(10)
binary_heap.insert(15)
binary_heap.insert(12)
# pop root(max-values because it is max heap)
print(binary_heap.pop()) # 15
print(binary_heap.pop()) # 12
# get the list and size after operations
print(binary_heap.get_list)
print(len(binary_heap))
| class BinaryHeap:
"""
A max-heap implementation in Python
>>> binary_heap = BinaryHeap()
>>> binary_heap.insert(6)
>>> binary_heap.insert(10)
>>> binary_heap.insert(15)
>>> binary_heap.insert(12)
>>> binary_heap.pop()
15
>>> binary_heap.pop()
12
>>> binary_heap.get_list
[10, 6]
>>> len(binary_heap)
2
"""
def __init__(self):
self.__heap = [0]
self.__size = 0
def __swap_up(self, i: int) -> None:
"""Swap the element up"""
temporary = self.__heap[i]
while i // 2 > 0:
if self.__heap[i] > self.__heap[i // 2]:
self.__heap[i] = self.__heap[i // 2]
self.__heap[i // 2] = temporary
i //= 2
def insert(self, value: int) -> None:
"""Insert new element"""
self.__heap.append(value)
self.__size += 1
self.__swap_up(self.__size)
def __swap_down(self, i: int) -> None:
"""Swap the element down"""
while self.__size >= 2 * i:
if 2 * i + 1 > self.__size:
bigger_child = 2 * i
else:
if self.__heap[2 * i] > self.__heap[2 * i + 1]:
bigger_child = 2 * i
else:
bigger_child = 2 * i + 1
temporary = self.__heap[i]
if self.__heap[i] < self.__heap[bigger_child]:
self.__heap[i] = self.__heap[bigger_child]
self.__heap[bigger_child] = temporary
i = bigger_child
def pop(self) -> int:
"""Pop the root element"""
max_value = self.__heap[1]
self.__heap[1] = self.__heap[self.__size]
self.__size -= 1
self.__heap.pop()
self.__swap_down(1)
return max_value
@property
def get_list(self):
return self.__heap[1:]
def __len__(self):
"""Length of the array"""
return self.__size
if __name__ == "__main__":
import doctest
doctest.testmod()
# create an instance of BinaryHeap
binary_heap = BinaryHeap()
binary_heap.insert(6)
binary_heap.insert(10)
binary_heap.insert(15)
binary_heap.insert(12)
# pop root(max-values because it is max heap)
print(binary_heap.pop()) # 15
print(binary_heap.pop()) # 12
# get the list and size after operations
print(binary_heap.get_list)
print(len(binary_heap))
| -1 |
TheAlgorithms/Python | 8,551 | New gitter link added or replaced | ### Describe your change:
Fixes #8546
* [ ] Add an algorithm?
* [ ] Fix a bug or typo in an existing algorithm?
* [x] Documentation change?
### Checklist:
* [x] I have read [CONTRIBUTING.md](https://github.com/TheAlgorithms/Python/blob/master/CONTRIBUTING.md).
* [x] This pull request is all my own work -- I have not plagiarized.
* [x] I know that pull requests will not be merged if they fail the automated tests.
* [ ] This PR only changes one algorithm file. To ease review, please open separate PRs for separate algorithms.
* [ ] All new Python files are placed inside an existing directory.
* [ ] All filenames are in all lowercase characters with no spaces or dashes.
* [ ] All functions and variable names follow Python naming conventions.
* [ ] All function parameters and return values are annotated with Python [type hints](https://docs.python.org/3/library/typing.html).
* [ ] All functions have [doctests](https://docs.python.org/3/library/doctest.html) that pass the automated testing.
* [ ] All new algorithms include at least one URL that points to Wikipedia or another similar explanation.
* [x] If this pull request resolves one or more open issues then the commit message contains `Fixes: #{$ISSUE_NO}`.
| KaixLina | "2023-03-26T04:26:40Z" | "2023-03-26T15:19:19Z" | 3f9150c1b2dd15808a4962e03a1455f8d825512c | 7cdb011ba440a07768179bfaea190bddefc890d8 | New gitter link added or replaced. ### Describe your change:
Fixes #8546
* [ ] Add an algorithm?
* [ ] Fix a bug or typo in an existing algorithm?
* [x] Documentation change?
### Checklist:
* [x] I have read [CONTRIBUTING.md](https://github.com/TheAlgorithms/Python/blob/master/CONTRIBUTING.md).
* [x] This pull request is all my own work -- I have not plagiarized.
* [x] I know that pull requests will not be merged if they fail the automated tests.
* [ ] This PR only changes one algorithm file. To ease review, please open separate PRs for separate algorithms.
* [ ] All new Python files are placed inside an existing directory.
* [ ] All filenames are in all lowercase characters with no spaces or dashes.
* [ ] All functions and variable names follow Python naming conventions.
* [ ] All function parameters and return values are annotated with Python [type hints](https://docs.python.org/3/library/typing.html).
* [ ] All functions have [doctests](https://docs.python.org/3/library/doctest.html) that pass the automated testing.
* [ ] All new algorithms include at least one URL that points to Wikipedia or another similar explanation.
* [x] If this pull request resolves one or more open issues then the commit message contains `Fixes: #{$ISSUE_NO}`.
| -1 |
||
TheAlgorithms/Python | 8,551 | New gitter link added or replaced | ### Describe your change:
Fixes #8546
* [ ] Add an algorithm?
* [ ] Fix a bug or typo in an existing algorithm?
* [x] Documentation change?
### Checklist:
* [x] I have read [CONTRIBUTING.md](https://github.com/TheAlgorithms/Python/blob/master/CONTRIBUTING.md).
* [x] This pull request is all my own work -- I have not plagiarized.
* [x] I know that pull requests will not be merged if they fail the automated tests.
* [ ] This PR only changes one algorithm file. To ease review, please open separate PRs for separate algorithms.
* [ ] All new Python files are placed inside an existing directory.
* [ ] All filenames are in all lowercase characters with no spaces or dashes.
* [ ] All functions and variable names follow Python naming conventions.
* [ ] All function parameters and return values are annotated with Python [type hints](https://docs.python.org/3/library/typing.html).
* [ ] All functions have [doctests](https://docs.python.org/3/library/doctest.html) that pass the automated testing.
* [ ] All new algorithms include at least one URL that points to Wikipedia or another similar explanation.
* [x] If this pull request resolves one or more open issues then the commit message contains `Fixes: #{$ISSUE_NO}`.
| KaixLina | "2023-03-26T04:26:40Z" | "2023-03-26T15:19:19Z" | 3f9150c1b2dd15808a4962e03a1455f8d825512c | 7cdb011ba440a07768179bfaea190bddefc890d8 | New gitter link added or replaced. ### Describe your change:
Fixes #8546
* [ ] Add an algorithm?
* [ ] Fix a bug or typo in an existing algorithm?
* [x] Documentation change?
### Checklist:
* [x] I have read [CONTRIBUTING.md](https://github.com/TheAlgorithms/Python/blob/master/CONTRIBUTING.md).
* [x] This pull request is all my own work -- I have not plagiarized.
* [x] I know that pull requests will not be merged if they fail the automated tests.
* [ ] This PR only changes one algorithm file. To ease review, please open separate PRs for separate algorithms.
* [ ] All new Python files are placed inside an existing directory.
* [ ] All filenames are in all lowercase characters with no spaces or dashes.
* [ ] All functions and variable names follow Python naming conventions.
* [ ] All function parameters and return values are annotated with Python [type hints](https://docs.python.org/3/library/typing.html).
* [ ] All functions have [doctests](https://docs.python.org/3/library/doctest.html) that pass the automated testing.
* [ ] All new algorithms include at least one URL that points to Wikipedia or another similar explanation.
* [x] If this pull request resolves one or more open issues then the commit message contains `Fixes: #{$ISSUE_NO}`.
| """
This is a pure Python implementation of the P-Series algorithm
https://en.wikipedia.org/wiki/Harmonic_series_(mathematics)#P-series
For doctests run following command:
python -m doctest -v p_series.py
or
python3 -m doctest -v p_series.py
For manual testing run:
python3 p_series.py
"""
from __future__ import annotations
def p_series(nth_term: int | float | str, power: int | float | str) -> list[str]:
"""
Pure Python implementation of P-Series algorithm
:return: The P-Series starting from 1 to last (nth) term
Examples:
>>> p_series(5, 2)
['1', '1 / 4', '1 / 9', '1 / 16', '1 / 25']
>>> p_series(-5, 2)
[]
>>> p_series(5, -2)
['1', '1 / 0.25', '1 / 0.1111111111111111', '1 / 0.0625', '1 / 0.04']
>>> p_series("", 1000)
['']
>>> p_series(0, 0)
[]
>>> p_series(1, 1)
['1']
"""
if nth_term == "":
return [""]
nth_term = int(nth_term)
power = int(power)
series: list[str] = []
for temp in range(int(nth_term)):
series.append(f"1 / {pow(temp + 1, int(power))}" if series else "1")
return series
if __name__ == "__main__":
import doctest
doctest.testmod()
nth_term = int(input("Enter the last number (nth term) of the P-Series"))
power = int(input("Enter the power for P-Series"))
print("Formula of P-Series => 1+1/2^p+1/3^p ..... 1/n^p")
print(p_series(nth_term, power))
| """
This is a pure Python implementation of the P-Series algorithm
https://en.wikipedia.org/wiki/Harmonic_series_(mathematics)#P-series
For doctests run following command:
python -m doctest -v p_series.py
or
python3 -m doctest -v p_series.py
For manual testing run:
python3 p_series.py
"""
from __future__ import annotations
def p_series(nth_term: int | float | str, power: int | float | str) -> list[str]:
"""
Pure Python implementation of P-Series algorithm
:return: The P-Series starting from 1 to last (nth) term
Examples:
>>> p_series(5, 2)
['1', '1 / 4', '1 / 9', '1 / 16', '1 / 25']
>>> p_series(-5, 2)
[]
>>> p_series(5, -2)
['1', '1 / 0.25', '1 / 0.1111111111111111', '1 / 0.0625', '1 / 0.04']
>>> p_series("", 1000)
['']
>>> p_series(0, 0)
[]
>>> p_series(1, 1)
['1']
"""
if nth_term == "":
return [""]
nth_term = int(nth_term)
power = int(power)
series: list[str] = []
for temp in range(int(nth_term)):
series.append(f"1 / {pow(temp + 1, int(power))}" if series else "1")
return series
if __name__ == "__main__":
import doctest
doctest.testmod()
nth_term = int(input("Enter the last number (nth term) of the P-Series"))
power = int(input("Enter the power for P-Series"))
print("Formula of P-Series => 1+1/2^p+1/3^p ..... 1/n^p")
print(p_series(nth_term, power))
| -1 |
TheAlgorithms/Python | 8,551 | New gitter link added or replaced | ### Describe your change:
Fixes #8546
* [ ] Add an algorithm?
* [ ] Fix a bug or typo in an existing algorithm?
* [x] Documentation change?
### Checklist:
* [x] I have read [CONTRIBUTING.md](https://github.com/TheAlgorithms/Python/blob/master/CONTRIBUTING.md).
* [x] This pull request is all my own work -- I have not plagiarized.
* [x] I know that pull requests will not be merged if they fail the automated tests.
* [ ] This PR only changes one algorithm file. To ease review, please open separate PRs for separate algorithms.
* [ ] All new Python files are placed inside an existing directory.
* [ ] All filenames are in all lowercase characters with no spaces or dashes.
* [ ] All functions and variable names follow Python naming conventions.
* [ ] All function parameters and return values are annotated with Python [type hints](https://docs.python.org/3/library/typing.html).
* [ ] All functions have [doctests](https://docs.python.org/3/library/doctest.html) that pass the automated testing.
* [ ] All new algorithms include at least one URL that points to Wikipedia or another similar explanation.
* [x] If this pull request resolves one or more open issues then the commit message contains `Fixes: #{$ISSUE_NO}`.
| KaixLina | "2023-03-26T04:26:40Z" | "2023-03-26T15:19:19Z" | 3f9150c1b2dd15808a4962e03a1455f8d825512c | 7cdb011ba440a07768179bfaea190bddefc890d8 | New gitter link added or replaced. ### Describe your change:
Fixes #8546
* [ ] Add an algorithm?
* [ ] Fix a bug or typo in an existing algorithm?
* [x] Documentation change?
### Checklist:
* [x] I have read [CONTRIBUTING.md](https://github.com/TheAlgorithms/Python/blob/master/CONTRIBUTING.md).
* [x] This pull request is all my own work -- I have not plagiarized.
* [x] I know that pull requests will not be merged if they fail the automated tests.
* [ ] This PR only changes one algorithm file. To ease review, please open separate PRs for separate algorithms.
* [ ] All new Python files are placed inside an existing directory.
* [ ] All filenames are in all lowercase characters with no spaces or dashes.
* [ ] All functions and variable names follow Python naming conventions.
* [ ] All function parameters and return values are annotated with Python [type hints](https://docs.python.org/3/library/typing.html).
* [ ] All functions have [doctests](https://docs.python.org/3/library/doctest.html) that pass the automated testing.
* [ ] All new algorithms include at least one URL that points to Wikipedia or another similar explanation.
* [x] If this pull request resolves one or more open issues then the commit message contains `Fixes: #{$ISSUE_NO}`.
| """Get the site emails from URL."""
from __future__ import annotations
__author__ = "Muhammad Umer Farooq"
__license__ = "MIT"
__version__ = "1.0.0"
__maintainer__ = "Muhammad Umer Farooq"
__email__ = "[email protected]"
__status__ = "Alpha"
import re
from html.parser import HTMLParser
from urllib import parse
import requests
class Parser(HTMLParser):
def __init__(self, domain: str) -> None:
super().__init__()
self.urls: list[str] = []
self.domain = domain
def handle_starttag(self, tag: str, attrs: list[tuple[str, str | None]]) -> None:
"""
This function parse html to take takes url from tags
"""
# Only parse the 'anchor' tag.
if tag == "a":
# Check the list of defined attributes.
for name, value in attrs:
# If href is defined, and not empty nor # print it.
if name == "href" and value != "#" and value != "":
# If not already in urls.
if value not in self.urls:
url = parse.urljoin(self.domain, value)
self.urls.append(url)
# Get main domain name (example.com)
def get_domain_name(url: str) -> str:
"""
This function get the main domain name
>>> get_domain_name("https://a.b.c.d/e/f?g=h,i=j#k")
'c.d'
>>> get_domain_name("Not a URL!")
''
"""
return ".".join(get_sub_domain_name(url).split(".")[-2:])
# Get sub domain name (sub.example.com)
def get_sub_domain_name(url: str) -> str:
"""
>>> get_sub_domain_name("https://a.b.c.d/e/f?g=h,i=j#k")
'a.b.c.d'
>>> get_sub_domain_name("Not a URL!")
''
"""
return parse.urlparse(url).netloc
def emails_from_url(url: str = "https://github.com") -> list[str]:
"""
This function takes url and return all valid urls
"""
# Get the base domain from the url
domain = get_domain_name(url)
# Initialize the parser
parser = Parser(domain)
try:
# Open URL
r = requests.get(url)
# pass the raw HTML to the parser to get links
parser.feed(r.text)
# Get links and loop through
valid_emails = set()
for link in parser.urls:
# open URL.
# read = requests.get(link)
try:
read = requests.get(link)
# Get the valid email.
emails = re.findall("[a-zA-Z0-9]+@" + domain, read.text)
# If not in list then append it.
for email in emails:
valid_emails.add(email)
except ValueError:
pass
except ValueError:
raise SystemExit(1)
# Finally return a sorted list of email addresses with no duplicates.
return sorted(valid_emails)
if __name__ == "__main__":
emails = emails_from_url("https://github.com")
print(f"{len(emails)} emails found:")
print("\n".join(sorted(emails)))
| """Get the site emails from URL."""
from __future__ import annotations
__author__ = "Muhammad Umer Farooq"
__license__ = "MIT"
__version__ = "1.0.0"
__maintainer__ = "Muhammad Umer Farooq"
__email__ = "[email protected]"
__status__ = "Alpha"
import re
from html.parser import HTMLParser
from urllib import parse
import requests
class Parser(HTMLParser):
def __init__(self, domain: str) -> None:
super().__init__()
self.urls: list[str] = []
self.domain = domain
def handle_starttag(self, tag: str, attrs: list[tuple[str, str | None]]) -> None:
"""
This function parse html to take takes url from tags
"""
# Only parse the 'anchor' tag.
if tag == "a":
# Check the list of defined attributes.
for name, value in attrs:
# If href is defined, and not empty nor # print it.
if name == "href" and value != "#" and value != "":
# If not already in urls.
if value not in self.urls:
url = parse.urljoin(self.domain, value)
self.urls.append(url)
# Get main domain name (example.com)
def get_domain_name(url: str) -> str:
"""
This function get the main domain name
>>> get_domain_name("https://a.b.c.d/e/f?g=h,i=j#k")
'c.d'
>>> get_domain_name("Not a URL!")
''
"""
return ".".join(get_sub_domain_name(url).split(".")[-2:])
# Get sub domain name (sub.example.com)
def get_sub_domain_name(url: str) -> str:
"""
>>> get_sub_domain_name("https://a.b.c.d/e/f?g=h,i=j#k")
'a.b.c.d'
>>> get_sub_domain_name("Not a URL!")
''
"""
return parse.urlparse(url).netloc
def emails_from_url(url: str = "https://github.com") -> list[str]:
"""
This function takes url and return all valid urls
"""
# Get the base domain from the url
domain = get_domain_name(url)
# Initialize the parser
parser = Parser(domain)
try:
# Open URL
r = requests.get(url)
# pass the raw HTML to the parser to get links
parser.feed(r.text)
# Get links and loop through
valid_emails = set()
for link in parser.urls:
# open URL.
# read = requests.get(link)
try:
read = requests.get(link)
# Get the valid email.
emails = re.findall("[a-zA-Z0-9]+@" + domain, read.text)
# If not in list then append it.
for email in emails:
valid_emails.add(email)
except ValueError:
pass
except ValueError:
raise SystemExit(1)
# Finally return a sorted list of email addresses with no duplicates.
return sorted(valid_emails)
if __name__ == "__main__":
emails = emails_from_url("https://github.com")
print(f"{len(emails)} emails found:")
print("\n".join(sorted(emails)))
| -1 |
TheAlgorithms/Python | 8,551 | New gitter link added or replaced | ### Describe your change:
Fixes #8546
* [ ] Add an algorithm?
* [ ] Fix a bug or typo in an existing algorithm?
* [x] Documentation change?
### Checklist:
* [x] I have read [CONTRIBUTING.md](https://github.com/TheAlgorithms/Python/blob/master/CONTRIBUTING.md).
* [x] This pull request is all my own work -- I have not plagiarized.
* [x] I know that pull requests will not be merged if they fail the automated tests.
* [ ] This PR only changes one algorithm file. To ease review, please open separate PRs for separate algorithms.
* [ ] All new Python files are placed inside an existing directory.
* [ ] All filenames are in all lowercase characters with no spaces or dashes.
* [ ] All functions and variable names follow Python naming conventions.
* [ ] All function parameters and return values are annotated with Python [type hints](https://docs.python.org/3/library/typing.html).
* [ ] All functions have [doctests](https://docs.python.org/3/library/doctest.html) that pass the automated testing.
* [ ] All new algorithms include at least one URL that points to Wikipedia or another similar explanation.
* [x] If this pull request resolves one or more open issues then the commit message contains `Fixes: #{$ISSUE_NO}`.
| KaixLina | "2023-03-26T04:26:40Z" | "2023-03-26T15:19:19Z" | 3f9150c1b2dd15808a4962e03a1455f8d825512c | 7cdb011ba440a07768179bfaea190bddefc890d8 | New gitter link added or replaced. ### Describe your change:
Fixes #8546
* [ ] Add an algorithm?
* [ ] Fix a bug or typo in an existing algorithm?
* [x] Documentation change?
### Checklist:
* [x] I have read [CONTRIBUTING.md](https://github.com/TheAlgorithms/Python/blob/master/CONTRIBUTING.md).
* [x] This pull request is all my own work -- I have not plagiarized.
* [x] I know that pull requests will not be merged if they fail the automated tests.
* [ ] This PR only changes one algorithm file. To ease review, please open separate PRs for separate algorithms.
* [ ] All new Python files are placed inside an existing directory.
* [ ] All filenames are in all lowercase characters with no spaces or dashes.
* [ ] All functions and variable names follow Python naming conventions.
* [ ] All function parameters and return values are annotated with Python [type hints](https://docs.python.org/3/library/typing.html).
* [ ] All functions have [doctests](https://docs.python.org/3/library/doctest.html) that pass the automated testing.
* [ ] All new algorithms include at least one URL that points to Wikipedia or another similar explanation.
* [x] If this pull request resolves one or more open issues then the commit message contains `Fixes: #{$ISSUE_NO}`.
| """
Problem Description:
Given a binary tree, return its mirror.
"""
def binary_tree_mirror_dict(binary_tree_mirror_dictionary: dict, root: int):
if not root or root not in binary_tree_mirror_dictionary:
return
left_child, right_child = binary_tree_mirror_dictionary[root][:2]
binary_tree_mirror_dictionary[root] = [right_child, left_child]
binary_tree_mirror_dict(binary_tree_mirror_dictionary, left_child)
binary_tree_mirror_dict(binary_tree_mirror_dictionary, right_child)
def binary_tree_mirror(binary_tree: dict, root: int = 1) -> dict:
"""
>>> binary_tree_mirror({ 1: [2,3], 2: [4,5], 3: [6,7], 7: [8,9]}, 1)
{1: [3, 2], 2: [5, 4], 3: [7, 6], 7: [9, 8]}
>>> binary_tree_mirror({ 1: [2,3], 2: [4,5], 3: [6,7], 4: [10,11]}, 1)
{1: [3, 2], 2: [5, 4], 3: [7, 6], 4: [11, 10]}
>>> binary_tree_mirror({ 1: [2,3], 2: [4,5], 3: [6,7], 4: [10,11]}, 5)
Traceback (most recent call last):
...
ValueError: root 5 is not present in the binary_tree
>>> binary_tree_mirror({}, 5)
Traceback (most recent call last):
...
ValueError: binary tree cannot be empty
"""
if not binary_tree:
raise ValueError("binary tree cannot be empty")
if root not in binary_tree:
raise ValueError(f"root {root} is not present in the binary_tree")
binary_tree_mirror_dictionary = dict(binary_tree)
binary_tree_mirror_dict(binary_tree_mirror_dictionary, root)
return binary_tree_mirror_dictionary
if __name__ == "__main__":
binary_tree = {1: [2, 3], 2: [4, 5], 3: [6, 7], 7: [8, 9]}
print(f"Binary tree: {binary_tree}")
binary_tree_mirror_dictionary = binary_tree_mirror(binary_tree, 5)
print(f"Binary tree mirror: {binary_tree_mirror_dictionary}")
| """
Problem Description:
Given a binary tree, return its mirror.
"""
def binary_tree_mirror_dict(binary_tree_mirror_dictionary: dict, root: int):
if not root or root not in binary_tree_mirror_dictionary:
return
left_child, right_child = binary_tree_mirror_dictionary[root][:2]
binary_tree_mirror_dictionary[root] = [right_child, left_child]
binary_tree_mirror_dict(binary_tree_mirror_dictionary, left_child)
binary_tree_mirror_dict(binary_tree_mirror_dictionary, right_child)
def binary_tree_mirror(binary_tree: dict, root: int = 1) -> dict:
"""
>>> binary_tree_mirror({ 1: [2,3], 2: [4,5], 3: [6,7], 7: [8,9]}, 1)
{1: [3, 2], 2: [5, 4], 3: [7, 6], 7: [9, 8]}
>>> binary_tree_mirror({ 1: [2,3], 2: [4,5], 3: [6,7], 4: [10,11]}, 1)
{1: [3, 2], 2: [5, 4], 3: [7, 6], 4: [11, 10]}
>>> binary_tree_mirror({ 1: [2,3], 2: [4,5], 3: [6,7], 4: [10,11]}, 5)
Traceback (most recent call last):
...
ValueError: root 5 is not present in the binary_tree
>>> binary_tree_mirror({}, 5)
Traceback (most recent call last):
...
ValueError: binary tree cannot be empty
"""
if not binary_tree:
raise ValueError("binary tree cannot be empty")
if root not in binary_tree:
raise ValueError(f"root {root} is not present in the binary_tree")
binary_tree_mirror_dictionary = dict(binary_tree)
binary_tree_mirror_dict(binary_tree_mirror_dictionary, root)
return binary_tree_mirror_dictionary
if __name__ == "__main__":
binary_tree = {1: [2, 3], 2: [4, 5], 3: [6, 7], 7: [8, 9]}
print(f"Binary tree: {binary_tree}")
binary_tree_mirror_dictionary = binary_tree_mirror(binary_tree, 5)
print(f"Binary tree mirror: {binary_tree_mirror_dictionary}")
| -1 |
TheAlgorithms/Python | 8,551 | New gitter link added or replaced | ### Describe your change:
Fixes #8546
* [ ] Add an algorithm?
* [ ] Fix a bug or typo in an existing algorithm?
* [x] Documentation change?
### Checklist:
* [x] I have read [CONTRIBUTING.md](https://github.com/TheAlgorithms/Python/blob/master/CONTRIBUTING.md).
* [x] This pull request is all my own work -- I have not plagiarized.
* [x] I know that pull requests will not be merged if they fail the automated tests.
* [ ] This PR only changes one algorithm file. To ease review, please open separate PRs for separate algorithms.
* [ ] All new Python files are placed inside an existing directory.
* [ ] All filenames are in all lowercase characters with no spaces or dashes.
* [ ] All functions and variable names follow Python naming conventions.
* [ ] All function parameters and return values are annotated with Python [type hints](https://docs.python.org/3/library/typing.html).
* [ ] All functions have [doctests](https://docs.python.org/3/library/doctest.html) that pass the automated testing.
* [ ] All new algorithms include at least one URL that points to Wikipedia or another similar explanation.
* [x] If this pull request resolves one or more open issues then the commit message contains `Fixes: #{$ISSUE_NO}`.
| KaixLina | "2023-03-26T04:26:40Z" | "2023-03-26T15:19:19Z" | 3f9150c1b2dd15808a4962e03a1455f8d825512c | 7cdb011ba440a07768179bfaea190bddefc890d8 | New gitter link added or replaced. ### Describe your change:
Fixes #8546
* [ ] Add an algorithm?
* [ ] Fix a bug or typo in an existing algorithm?
* [x] Documentation change?
### Checklist:
* [x] I have read [CONTRIBUTING.md](https://github.com/TheAlgorithms/Python/blob/master/CONTRIBUTING.md).
* [x] This pull request is all my own work -- I have not plagiarized.
* [x] I know that pull requests will not be merged if they fail the automated tests.
* [ ] This PR only changes one algorithm file. To ease review, please open separate PRs for separate algorithms.
* [ ] All new Python files are placed inside an existing directory.
* [ ] All filenames are in all lowercase characters with no spaces or dashes.
* [ ] All functions and variable names follow Python naming conventions.
* [ ] All function parameters and return values are annotated with Python [type hints](https://docs.python.org/3/library/typing.html).
* [ ] All functions have [doctests](https://docs.python.org/3/library/doctest.html) that pass the automated testing.
* [ ] All new algorithms include at least one URL that points to Wikipedia or another similar explanation.
* [x] If this pull request resolves one or more open issues then the commit message contains `Fixes: #{$ISSUE_NO}`.
| """
Project Euler Problem 27
https://projecteuler.net/problem=27
Problem Statement:
Euler discovered the remarkable quadratic formula:
n2 + n + 41
It turns out that the formula will produce 40 primes for the consecutive values
n = 0 to 39. However, when n = 40, 402 + 40 + 41 = 40(40 + 1) + 41 is divisible
by 41, and certainly when n = 41, 412 + 41 + 41 is clearly divisible by 41.
The incredible formula n2 − 79n + 1601 was discovered, which produces 80 primes
for the consecutive values n = 0 to 79. The product of the coefficients, −79 and
1601, is −126479.
Considering quadratics of the form:
n² + an + b, where |a| < 1000 and |b| < 1000
where |n| is the modulus/absolute value of ne.g. |11| = 11 and |−4| = 4
Find the product of the coefficients, a and b, for the quadratic expression that
produces the maximum number of primes for consecutive values of n, starting with
n = 0.
"""
import math
def is_prime(number: int) -> bool:
"""Checks to see if a number is a prime in O(sqrt(n)).
A number is prime if it has exactly two factors: 1 and itself.
Returns boolean representing primality of given number num (i.e., if the
result is true, then the number is indeed prime else it is not).
>>> is_prime(2)
True
>>> is_prime(3)
True
>>> is_prime(27)
False
>>> is_prime(2999)
True
>>> is_prime(0)
False
>>> is_prime(1)
False
>>> is_prime(-10)
False
"""
if 1 < number < 4:
# 2 and 3 are primes
return True
elif number < 2 or number % 2 == 0 or number % 3 == 0:
# Negatives, 0, 1, all even numbers, all multiples of 3 are not primes
return False
# All primes number are in format of 6k +/- 1
for i in range(5, int(math.sqrt(number) + 1), 6):
if number % i == 0 or number % (i + 2) == 0:
return False
return True
def solution(a_limit: int = 1000, b_limit: int = 1000) -> int:
"""
>>> solution(1000, 1000)
-59231
>>> solution(200, 1000)
-59231
>>> solution(200, 200)
-4925
>>> solution(-1000, 1000)
0
>>> solution(-1000, -1000)
0
"""
longest = [0, 0, 0] # length, a, b
for a in range((a_limit * -1) + 1, a_limit):
for b in range(2, b_limit):
if is_prime(b):
count = 0
n = 0
while is_prime((n**2) + (a * n) + b):
count += 1
n += 1
if count > longest[0]:
longest = [count, a, b]
ans = longest[1] * longest[2]
return ans
if __name__ == "__main__":
print(solution(1000, 1000))
| """
Project Euler Problem 27
https://projecteuler.net/problem=27
Problem Statement:
Euler discovered the remarkable quadratic formula:
n2 + n + 41
It turns out that the formula will produce 40 primes for the consecutive values
n = 0 to 39. However, when n = 40, 402 + 40 + 41 = 40(40 + 1) + 41 is divisible
by 41, and certainly when n = 41, 412 + 41 + 41 is clearly divisible by 41.
The incredible formula n2 − 79n + 1601 was discovered, which produces 80 primes
for the consecutive values n = 0 to 79. The product of the coefficients, −79 and
1601, is −126479.
Considering quadratics of the form:
n² + an + b, where |a| < 1000 and |b| < 1000
where |n| is the modulus/absolute value of ne.g. |11| = 11 and |−4| = 4
Find the product of the coefficients, a and b, for the quadratic expression that
produces the maximum number of primes for consecutive values of n, starting with
n = 0.
"""
import math
def is_prime(number: int) -> bool:
"""Checks to see if a number is a prime in O(sqrt(n)).
A number is prime if it has exactly two factors: 1 and itself.
Returns boolean representing primality of given number num (i.e., if the
result is true, then the number is indeed prime else it is not).
>>> is_prime(2)
True
>>> is_prime(3)
True
>>> is_prime(27)
False
>>> is_prime(2999)
True
>>> is_prime(0)
False
>>> is_prime(1)
False
>>> is_prime(-10)
False
"""
if 1 < number < 4:
# 2 and 3 are primes
return True
elif number < 2 or number % 2 == 0 or number % 3 == 0:
# Negatives, 0, 1, all even numbers, all multiples of 3 are not primes
return False
# All primes number are in format of 6k +/- 1
for i in range(5, int(math.sqrt(number) + 1), 6):
if number % i == 0 or number % (i + 2) == 0:
return False
return True
def solution(a_limit: int = 1000, b_limit: int = 1000) -> int:
"""
>>> solution(1000, 1000)
-59231
>>> solution(200, 1000)
-59231
>>> solution(200, 200)
-4925
>>> solution(-1000, 1000)
0
>>> solution(-1000, -1000)
0
"""
longest = [0, 0, 0] # length, a, b
for a in range((a_limit * -1) + 1, a_limit):
for b in range(2, b_limit):
if is_prime(b):
count = 0
n = 0
while is_prime((n**2) + (a * n) + b):
count += 1
n += 1
if count > longest[0]:
longest = [count, a, b]
ans = longest[1] * longest[2]
return ans
if __name__ == "__main__":
print(solution(1000, 1000))
| -1 |
TheAlgorithms/Python | 8,551 | New gitter link added or replaced | ### Describe your change:
Fixes #8546
* [ ] Add an algorithm?
* [ ] Fix a bug or typo in an existing algorithm?
* [x] Documentation change?
### Checklist:
* [x] I have read [CONTRIBUTING.md](https://github.com/TheAlgorithms/Python/blob/master/CONTRIBUTING.md).
* [x] This pull request is all my own work -- I have not plagiarized.
* [x] I know that pull requests will not be merged if they fail the automated tests.
* [ ] This PR only changes one algorithm file. To ease review, please open separate PRs for separate algorithms.
* [ ] All new Python files are placed inside an existing directory.
* [ ] All filenames are in all lowercase characters with no spaces or dashes.
* [ ] All functions and variable names follow Python naming conventions.
* [ ] All function parameters and return values are annotated with Python [type hints](https://docs.python.org/3/library/typing.html).
* [ ] All functions have [doctests](https://docs.python.org/3/library/doctest.html) that pass the automated testing.
* [ ] All new algorithms include at least one URL that points to Wikipedia or another similar explanation.
* [x] If this pull request resolves one or more open issues then the commit message contains `Fixes: #{$ISSUE_NO}`.
| KaixLina | "2023-03-26T04:26:40Z" | "2023-03-26T15:19:19Z" | 3f9150c1b2dd15808a4962e03a1455f8d825512c | 7cdb011ba440a07768179bfaea190bddefc890d8 | New gitter link added or replaced. ### Describe your change:
Fixes #8546
* [ ] Add an algorithm?
* [ ] Fix a bug or typo in an existing algorithm?
* [x] Documentation change?
### Checklist:
* [x] I have read [CONTRIBUTING.md](https://github.com/TheAlgorithms/Python/blob/master/CONTRIBUTING.md).
* [x] This pull request is all my own work -- I have not plagiarized.
* [x] I know that pull requests will not be merged if they fail the automated tests.
* [ ] This PR only changes one algorithm file. To ease review, please open separate PRs for separate algorithms.
* [ ] All new Python files are placed inside an existing directory.
* [ ] All filenames are in all lowercase characters with no spaces or dashes.
* [ ] All functions and variable names follow Python naming conventions.
* [ ] All function parameters and return values are annotated with Python [type hints](https://docs.python.org/3/library/typing.html).
* [ ] All functions have [doctests](https://docs.python.org/3/library/doctest.html) that pass the automated testing.
* [ ] All new algorithms include at least one URL that points to Wikipedia or another similar explanation.
* [x] If this pull request resolves one or more open issues then the commit message contains `Fixes: #{$ISSUE_NO}`.
| """ Problem Statement (Digit Fifth Powers): https://projecteuler.net/problem=30
Surprisingly there are only three numbers that can be written as the sum of fourth
powers of their digits:
1634 = 1^4 + 6^4 + 3^4 + 4^4
8208 = 8^4 + 2^4 + 0^4 + 8^4
9474 = 9^4 + 4^4 + 7^4 + 4^4
As 1 = 1^4 is not a sum it is not included.
The sum of these numbers is 1634 + 8208 + 9474 = 19316.
Find the sum of all the numbers that can be written as the sum of fifth powers of their
digits.
9^5 = 59049
59049 * 7 = 413343 (which is only 6 digit number)
So, numbers greater than 999999 are rejected
and also 59049 * 3 = 177147 (which exceeds the criteria of number being 3 digit)
So, number > 999
and hence a number between 1000 and 1000000
"""
DIGITS_FIFTH_POWER = {str(digit): digit**5 for digit in range(10)}
def digits_fifth_powers_sum(number: int) -> int:
"""
>>> digits_fifth_powers_sum(1234)
1300
"""
return sum(DIGITS_FIFTH_POWER[digit] for digit in str(number))
def solution() -> int:
return sum(
number
for number in range(1000, 1000000)
if number == digits_fifth_powers_sum(number)
)
if __name__ == "__main__":
print(solution())
| """ Problem Statement (Digit Fifth Powers): https://projecteuler.net/problem=30
Surprisingly there are only three numbers that can be written as the sum of fourth
powers of their digits:
1634 = 1^4 + 6^4 + 3^4 + 4^4
8208 = 8^4 + 2^4 + 0^4 + 8^4
9474 = 9^4 + 4^4 + 7^4 + 4^4
As 1 = 1^4 is not a sum it is not included.
The sum of these numbers is 1634 + 8208 + 9474 = 19316.
Find the sum of all the numbers that can be written as the sum of fifth powers of their
digits.
9^5 = 59049
59049 * 7 = 413343 (which is only 6 digit number)
So, numbers greater than 999999 are rejected
and also 59049 * 3 = 177147 (which exceeds the criteria of number being 3 digit)
So, number > 999
and hence a number between 1000 and 1000000
"""
DIGITS_FIFTH_POWER = {str(digit): digit**5 for digit in range(10)}
def digits_fifth_powers_sum(number: int) -> int:
"""
>>> digits_fifth_powers_sum(1234)
1300
"""
return sum(DIGITS_FIFTH_POWER[digit] for digit in str(number))
def solution() -> int:
return sum(
number
for number in range(1000, 1000000)
if number == digits_fifth_powers_sum(number)
)
if __name__ == "__main__":
print(solution())
| -1 |
TheAlgorithms/Python | 8,551 | New gitter link added or replaced | ### Describe your change:
Fixes #8546
* [ ] Add an algorithm?
* [ ] Fix a bug or typo in an existing algorithm?
* [x] Documentation change?
### Checklist:
* [x] I have read [CONTRIBUTING.md](https://github.com/TheAlgorithms/Python/blob/master/CONTRIBUTING.md).
* [x] This pull request is all my own work -- I have not plagiarized.
* [x] I know that pull requests will not be merged if they fail the automated tests.
* [ ] This PR only changes one algorithm file. To ease review, please open separate PRs for separate algorithms.
* [ ] All new Python files are placed inside an existing directory.
* [ ] All filenames are in all lowercase characters with no spaces or dashes.
* [ ] All functions and variable names follow Python naming conventions.
* [ ] All function parameters and return values are annotated with Python [type hints](https://docs.python.org/3/library/typing.html).
* [ ] All functions have [doctests](https://docs.python.org/3/library/doctest.html) that pass the automated testing.
* [ ] All new algorithms include at least one URL that points to Wikipedia or another similar explanation.
* [x] If this pull request resolves one or more open issues then the commit message contains `Fixes: #{$ISSUE_NO}`.
| KaixLina | "2023-03-26T04:26:40Z" | "2023-03-26T15:19:19Z" | 3f9150c1b2dd15808a4962e03a1455f8d825512c | 7cdb011ba440a07768179bfaea190bddefc890d8 | New gitter link added or replaced. ### Describe your change:
Fixes #8546
* [ ] Add an algorithm?
* [ ] Fix a bug or typo in an existing algorithm?
* [x] Documentation change?
### Checklist:
* [x] I have read [CONTRIBUTING.md](https://github.com/TheAlgorithms/Python/blob/master/CONTRIBUTING.md).
* [x] This pull request is all my own work -- I have not plagiarized.
* [x] I know that pull requests will not be merged if they fail the automated tests.
* [ ] This PR only changes one algorithm file. To ease review, please open separate PRs for separate algorithms.
* [ ] All new Python files are placed inside an existing directory.
* [ ] All filenames are in all lowercase characters with no spaces or dashes.
* [ ] All functions and variable names follow Python naming conventions.
* [ ] All function parameters and return values are annotated with Python [type hints](https://docs.python.org/3/library/typing.html).
* [ ] All functions have [doctests](https://docs.python.org/3/library/doctest.html) that pass the automated testing.
* [ ] All new algorithms include at least one URL that points to Wikipedia or another similar explanation.
* [x] If this pull request resolves one or more open issues then the commit message contains `Fixes: #{$ISSUE_NO}`.
| from __future__ import annotations
from typing import Generic, TypeVar
T = TypeVar("T")
class DisjointSetTreeNode(Generic[T]):
# Disjoint Set Node to store the parent and rank
def __init__(self, data: T) -> None:
self.data = data
self.parent = self
self.rank = 0
class DisjointSetTree(Generic[T]):
# Disjoint Set DataStructure
def __init__(self) -> None:
# map from node name to the node object
self.map: dict[T, DisjointSetTreeNode[T]] = {}
def make_set(self, data: T) -> None:
# create a new set with x as its member
self.map[data] = DisjointSetTreeNode(data)
def find_set(self, data: T) -> DisjointSetTreeNode[T]:
# find the set x belongs to (with path-compression)
elem_ref = self.map[data]
if elem_ref != elem_ref.parent:
elem_ref.parent = self.find_set(elem_ref.parent.data)
return elem_ref.parent
def link(
self, node1: DisjointSetTreeNode[T], node2: DisjointSetTreeNode[T]
) -> None:
# helper function for union operation
if node1.rank > node2.rank:
node2.parent = node1
else:
node1.parent = node2
if node1.rank == node2.rank:
node2.rank += 1
def union(self, data1: T, data2: T) -> None:
# merge 2 disjoint sets
self.link(self.find_set(data1), self.find_set(data2))
class GraphUndirectedWeighted(Generic[T]):
def __init__(self) -> None:
# connections: map from the node to the neighbouring nodes (with weights)
self.connections: dict[T, dict[T, int]] = {}
def add_node(self, node: T) -> None:
# add a node ONLY if its not present in the graph
if node not in self.connections:
self.connections[node] = {}
def add_edge(self, node1: T, node2: T, weight: int) -> None:
# add an edge with the given weight
self.add_node(node1)
self.add_node(node2)
self.connections[node1][node2] = weight
self.connections[node2][node1] = weight
def kruskal(self) -> GraphUndirectedWeighted[T]:
# Kruskal's Algorithm to generate a Minimum Spanning Tree (MST) of a graph
"""
Details: https://en.wikipedia.org/wiki/Kruskal%27s_algorithm
Example:
>>> g1 = GraphUndirectedWeighted[int]()
>>> g1.add_edge(1, 2, 1)
>>> g1.add_edge(2, 3, 2)
>>> g1.add_edge(3, 4, 1)
>>> g1.add_edge(3, 5, 100) # Removed in MST
>>> g1.add_edge(4, 5, 5)
>>> assert 5 in g1.connections[3]
>>> mst = g1.kruskal()
>>> assert 5 not in mst.connections[3]
>>> g2 = GraphUndirectedWeighted[str]()
>>> g2.add_edge('A', 'B', 1)
>>> g2.add_edge('B', 'C', 2)
>>> g2.add_edge('C', 'D', 1)
>>> g2.add_edge('C', 'E', 100) # Removed in MST
>>> g2.add_edge('D', 'E', 5)
>>> assert 'E' in g2.connections["C"]
>>> mst = g2.kruskal()
>>> assert 'E' not in mst.connections['C']
"""
# getting the edges in ascending order of weights
edges = []
seen = set()
for start in self.connections:
for end in self.connections[start]:
if (start, end) not in seen:
seen.add((end, start))
edges.append((start, end, self.connections[start][end]))
edges.sort(key=lambda x: x[2])
# creating the disjoint set
disjoint_set = DisjointSetTree[T]()
for node in self.connections:
disjoint_set.make_set(node)
# MST generation
num_edges = 0
index = 0
graph = GraphUndirectedWeighted[T]()
while num_edges < len(self.connections) - 1:
u, v, w = edges[index]
index += 1
parent_u = disjoint_set.find_set(u)
parent_v = disjoint_set.find_set(v)
if parent_u != parent_v:
num_edges += 1
graph.add_edge(u, v, w)
disjoint_set.union(u, v)
return graph
| from __future__ import annotations
from typing import Generic, TypeVar
T = TypeVar("T")
class DisjointSetTreeNode(Generic[T]):
# Disjoint Set Node to store the parent and rank
def __init__(self, data: T) -> None:
self.data = data
self.parent = self
self.rank = 0
class DisjointSetTree(Generic[T]):
# Disjoint Set DataStructure
def __init__(self) -> None:
# map from node name to the node object
self.map: dict[T, DisjointSetTreeNode[T]] = {}
def make_set(self, data: T) -> None:
# create a new set with x as its member
self.map[data] = DisjointSetTreeNode(data)
def find_set(self, data: T) -> DisjointSetTreeNode[T]:
# find the set x belongs to (with path-compression)
elem_ref = self.map[data]
if elem_ref != elem_ref.parent:
elem_ref.parent = self.find_set(elem_ref.parent.data)
return elem_ref.parent
def link(
self, node1: DisjointSetTreeNode[T], node2: DisjointSetTreeNode[T]
) -> None:
# helper function for union operation
if node1.rank > node2.rank:
node2.parent = node1
else:
node1.parent = node2
if node1.rank == node2.rank:
node2.rank += 1
def union(self, data1: T, data2: T) -> None:
# merge 2 disjoint sets
self.link(self.find_set(data1), self.find_set(data2))
class GraphUndirectedWeighted(Generic[T]):
def __init__(self) -> None:
# connections: map from the node to the neighbouring nodes (with weights)
self.connections: dict[T, dict[T, int]] = {}
def add_node(self, node: T) -> None:
# add a node ONLY if its not present in the graph
if node not in self.connections:
self.connections[node] = {}
def add_edge(self, node1: T, node2: T, weight: int) -> None:
# add an edge with the given weight
self.add_node(node1)
self.add_node(node2)
self.connections[node1][node2] = weight
self.connections[node2][node1] = weight
def kruskal(self) -> GraphUndirectedWeighted[T]:
# Kruskal's Algorithm to generate a Minimum Spanning Tree (MST) of a graph
"""
Details: https://en.wikipedia.org/wiki/Kruskal%27s_algorithm
Example:
>>> g1 = GraphUndirectedWeighted[int]()
>>> g1.add_edge(1, 2, 1)
>>> g1.add_edge(2, 3, 2)
>>> g1.add_edge(3, 4, 1)
>>> g1.add_edge(3, 5, 100) # Removed in MST
>>> g1.add_edge(4, 5, 5)
>>> assert 5 in g1.connections[3]
>>> mst = g1.kruskal()
>>> assert 5 not in mst.connections[3]
>>> g2 = GraphUndirectedWeighted[str]()
>>> g2.add_edge('A', 'B', 1)
>>> g2.add_edge('B', 'C', 2)
>>> g2.add_edge('C', 'D', 1)
>>> g2.add_edge('C', 'E', 100) # Removed in MST
>>> g2.add_edge('D', 'E', 5)
>>> assert 'E' in g2.connections["C"]
>>> mst = g2.kruskal()
>>> assert 'E' not in mst.connections['C']
"""
# getting the edges in ascending order of weights
edges = []
seen = set()
for start in self.connections:
for end in self.connections[start]:
if (start, end) not in seen:
seen.add((end, start))
edges.append((start, end, self.connections[start][end]))
edges.sort(key=lambda x: x[2])
# creating the disjoint set
disjoint_set = DisjointSetTree[T]()
for node in self.connections:
disjoint_set.make_set(node)
# MST generation
num_edges = 0
index = 0
graph = GraphUndirectedWeighted[T]()
while num_edges < len(self.connections) - 1:
u, v, w = edges[index]
index += 1
parent_u = disjoint_set.find_set(u)
parent_v = disjoint_set.find_set(v)
if parent_u != parent_v:
num_edges += 1
graph.add_edge(u, v, w)
disjoint_set.union(u, v)
return graph
| -1 |
TheAlgorithms/Python | 8,551 | New gitter link added or replaced | ### Describe your change:
Fixes #8546
* [ ] Add an algorithm?
* [ ] Fix a bug or typo in an existing algorithm?
* [x] Documentation change?
### Checklist:
* [x] I have read [CONTRIBUTING.md](https://github.com/TheAlgorithms/Python/blob/master/CONTRIBUTING.md).
* [x] This pull request is all my own work -- I have not plagiarized.
* [x] I know that pull requests will not be merged if they fail the automated tests.
* [ ] This PR only changes one algorithm file. To ease review, please open separate PRs for separate algorithms.
* [ ] All new Python files are placed inside an existing directory.
* [ ] All filenames are in all lowercase characters with no spaces or dashes.
* [ ] All functions and variable names follow Python naming conventions.
* [ ] All function parameters and return values are annotated with Python [type hints](https://docs.python.org/3/library/typing.html).
* [ ] All functions have [doctests](https://docs.python.org/3/library/doctest.html) that pass the automated testing.
* [ ] All new algorithms include at least one URL that points to Wikipedia or another similar explanation.
* [x] If this pull request resolves one or more open issues then the commit message contains `Fixes: #{$ISSUE_NO}`.
| KaixLina | "2023-03-26T04:26:40Z" | "2023-03-26T15:19:19Z" | 3f9150c1b2dd15808a4962e03a1455f8d825512c | 7cdb011ba440a07768179bfaea190bddefc890d8 | New gitter link added or replaced. ### Describe your change:
Fixes #8546
* [ ] Add an algorithm?
* [ ] Fix a bug or typo in an existing algorithm?
* [x] Documentation change?
### Checklist:
* [x] I have read [CONTRIBUTING.md](https://github.com/TheAlgorithms/Python/blob/master/CONTRIBUTING.md).
* [x] This pull request is all my own work -- I have not plagiarized.
* [x] I know that pull requests will not be merged if they fail the automated tests.
* [ ] This PR only changes one algorithm file. To ease review, please open separate PRs for separate algorithms.
* [ ] All new Python files are placed inside an existing directory.
* [ ] All filenames are in all lowercase characters with no spaces or dashes.
* [ ] All functions and variable names follow Python naming conventions.
* [ ] All function parameters and return values are annotated with Python [type hints](https://docs.python.org/3/library/typing.html).
* [ ] All functions have [doctests](https://docs.python.org/3/library/doctest.html) that pass the automated testing.
* [ ] All new algorithms include at least one URL that points to Wikipedia or another similar explanation.
* [x] If this pull request resolves one or more open issues then the commit message contains `Fixes: #{$ISSUE_NO}`.
| def binary_search(array: list, lower_bound: int, upper_bound: int, value: int) -> int:
"""
This function carries out Binary search on a 1d array and
return -1 if it do not exist
array: A 1d sorted array
value : the value meant to be searched
>>> matrix = [1, 4, 7, 11, 15]
>>> binary_search(matrix, 0, len(matrix) - 1, 1)
0
>>> binary_search(matrix, 0, len(matrix) - 1, 23)
-1
"""
r = int((lower_bound + upper_bound) // 2)
if array[r] == value:
return r
if lower_bound >= upper_bound:
return -1
if array[r] < value:
return binary_search(array, r + 1, upper_bound, value)
else:
return binary_search(array, lower_bound, r - 1, value)
def mat_bin_search(value: int, matrix: list) -> list:
"""
This function loops over a 2d matrix and calls binarySearch on
the selected 1d array and returns [-1, -1] is it do not exist
value : value meant to be searched
matrix = a sorted 2d matrix
>>> matrix = [[1, 4, 7, 11, 15],
... [2, 5, 8, 12, 19],
... [3, 6, 9, 16, 22],
... [10, 13, 14, 17, 24],
... [18, 21, 23, 26, 30]]
>>> target = 1
>>> mat_bin_search(target, matrix)
[0, 0]
>>> target = 34
>>> mat_bin_search(target, matrix)
[-1, -1]
"""
index = 0
if matrix[index][0] == value:
return [index, 0]
while index < len(matrix) and matrix[index][0] < value:
r = binary_search(matrix[index], 0, len(matrix[index]) - 1, value)
if r != -1:
return [index, r]
index += 1
return [-1, -1]
if __name__ == "__main__":
import doctest
doctest.testmod()
| def binary_search(array: list, lower_bound: int, upper_bound: int, value: int) -> int:
"""
This function carries out Binary search on a 1d array and
return -1 if it do not exist
array: A 1d sorted array
value : the value meant to be searched
>>> matrix = [1, 4, 7, 11, 15]
>>> binary_search(matrix, 0, len(matrix) - 1, 1)
0
>>> binary_search(matrix, 0, len(matrix) - 1, 23)
-1
"""
r = int((lower_bound + upper_bound) // 2)
if array[r] == value:
return r
if lower_bound >= upper_bound:
return -1
if array[r] < value:
return binary_search(array, r + 1, upper_bound, value)
else:
return binary_search(array, lower_bound, r - 1, value)
def mat_bin_search(value: int, matrix: list) -> list:
"""
This function loops over a 2d matrix and calls binarySearch on
the selected 1d array and returns [-1, -1] is it do not exist
value : value meant to be searched
matrix = a sorted 2d matrix
>>> matrix = [[1, 4, 7, 11, 15],
... [2, 5, 8, 12, 19],
... [3, 6, 9, 16, 22],
... [10, 13, 14, 17, 24],
... [18, 21, 23, 26, 30]]
>>> target = 1
>>> mat_bin_search(target, matrix)
[0, 0]
>>> target = 34
>>> mat_bin_search(target, matrix)
[-1, -1]
"""
index = 0
if matrix[index][0] == value:
return [index, 0]
while index < len(matrix) and matrix[index][0] < value:
r = binary_search(matrix[index], 0, len(matrix[index]) - 1, value)
if r != -1:
return [index, r]
index += 1
return [-1, -1]
if __name__ == "__main__":
import doctest
doctest.testmod()
| -1 |
TheAlgorithms/Python | 8,551 | New gitter link added or replaced | ### Describe your change:
Fixes #8546
* [ ] Add an algorithm?
* [ ] Fix a bug or typo in an existing algorithm?
* [x] Documentation change?
### Checklist:
* [x] I have read [CONTRIBUTING.md](https://github.com/TheAlgorithms/Python/blob/master/CONTRIBUTING.md).
* [x] This pull request is all my own work -- I have not plagiarized.
* [x] I know that pull requests will not be merged if they fail the automated tests.
* [ ] This PR only changes one algorithm file. To ease review, please open separate PRs for separate algorithms.
* [ ] All new Python files are placed inside an existing directory.
* [ ] All filenames are in all lowercase characters with no spaces or dashes.
* [ ] All functions and variable names follow Python naming conventions.
* [ ] All function parameters and return values are annotated with Python [type hints](https://docs.python.org/3/library/typing.html).
* [ ] All functions have [doctests](https://docs.python.org/3/library/doctest.html) that pass the automated testing.
* [ ] All new algorithms include at least one URL that points to Wikipedia or another similar explanation.
* [x] If this pull request resolves one or more open issues then the commit message contains `Fixes: #{$ISSUE_NO}`.
| KaixLina | "2023-03-26T04:26:40Z" | "2023-03-26T15:19:19Z" | 3f9150c1b2dd15808a4962e03a1455f8d825512c | 7cdb011ba440a07768179bfaea190bddefc890d8 | New gitter link added or replaced. ### Describe your change:
Fixes #8546
* [ ] Add an algorithm?
* [ ] Fix a bug or typo in an existing algorithm?
* [x] Documentation change?
### Checklist:
* [x] I have read [CONTRIBUTING.md](https://github.com/TheAlgorithms/Python/blob/master/CONTRIBUTING.md).
* [x] This pull request is all my own work -- I have not plagiarized.
* [x] I know that pull requests will not be merged if they fail the automated tests.
* [ ] This PR only changes one algorithm file. To ease review, please open separate PRs for separate algorithms.
* [ ] All new Python files are placed inside an existing directory.
* [ ] All filenames are in all lowercase characters with no spaces or dashes.
* [ ] All functions and variable names follow Python naming conventions.
* [ ] All function parameters and return values are annotated with Python [type hints](https://docs.python.org/3/library/typing.html).
* [ ] All functions have [doctests](https://docs.python.org/3/library/doctest.html) that pass the automated testing.
* [ ] All new algorithms include at least one URL that points to Wikipedia or another similar explanation.
* [x] If this pull request resolves one or more open issues then the commit message contains `Fixes: #{$ISSUE_NO}`.
| #!/usr/bin/python
""" Author: OMKAR PATHAK """
from __future__ import annotations
from queue import Queue
class Graph:
def __init__(self) -> None:
self.vertices: dict[int, list[int]] = {}
def print_graph(self) -> None:
"""
prints adjacency list representation of graaph
>>> g = Graph()
>>> g.print_graph()
>>> g.add_edge(0, 1)
>>> g.print_graph()
0 : 1
"""
for i in self.vertices:
print(i, " : ", " -> ".join([str(j) for j in self.vertices[i]]))
def add_edge(self, from_vertex: int, to_vertex: int) -> None:
"""
adding the edge between two vertices
>>> g = Graph()
>>> g.print_graph()
>>> g.add_edge(0, 1)
>>> g.print_graph()
0 : 1
"""
if from_vertex in self.vertices:
self.vertices[from_vertex].append(to_vertex)
else:
self.vertices[from_vertex] = [to_vertex]
def bfs(self, start_vertex: int) -> set[int]:
"""
>>> g = Graph()
>>> g.add_edge(0, 1)
>>> g.add_edge(0, 1)
>>> g.add_edge(0, 2)
>>> g.add_edge(1, 2)
>>> g.add_edge(2, 0)
>>> g.add_edge(2, 3)
>>> g.add_edge(3, 3)
>>> sorted(g.bfs(2))
[0, 1, 2, 3]
"""
# initialize set for storing already visited vertices
visited = set()
# create a first in first out queue to store all the vertices for BFS
queue: Queue = Queue()
# mark the source node as visited and enqueue it
visited.add(start_vertex)
queue.put(start_vertex)
while not queue.empty():
vertex = queue.get()
# loop through all adjacent vertex and enqueue it if not yet visited
for adjacent_vertex in self.vertices[vertex]:
if adjacent_vertex not in visited:
queue.put(adjacent_vertex)
visited.add(adjacent_vertex)
return visited
if __name__ == "__main__":
from doctest import testmod
testmod(verbose=True)
g = Graph()
g.add_edge(0, 1)
g.add_edge(0, 2)
g.add_edge(1, 2)
g.add_edge(2, 0)
g.add_edge(2, 3)
g.add_edge(3, 3)
g.print_graph()
# 0 : 1 -> 2
# 1 : 2
# 2 : 0 -> 3
# 3 : 3
assert sorted(g.bfs(2)) == [0, 1, 2, 3]
| #!/usr/bin/python
""" Author: OMKAR PATHAK """
from __future__ import annotations
from queue import Queue
class Graph:
def __init__(self) -> None:
self.vertices: dict[int, list[int]] = {}
def print_graph(self) -> None:
"""
prints adjacency list representation of graaph
>>> g = Graph()
>>> g.print_graph()
>>> g.add_edge(0, 1)
>>> g.print_graph()
0 : 1
"""
for i in self.vertices:
print(i, " : ", " -> ".join([str(j) for j in self.vertices[i]]))
def add_edge(self, from_vertex: int, to_vertex: int) -> None:
"""
adding the edge between two vertices
>>> g = Graph()
>>> g.print_graph()
>>> g.add_edge(0, 1)
>>> g.print_graph()
0 : 1
"""
if from_vertex in self.vertices:
self.vertices[from_vertex].append(to_vertex)
else:
self.vertices[from_vertex] = [to_vertex]
def bfs(self, start_vertex: int) -> set[int]:
"""
>>> g = Graph()
>>> g.add_edge(0, 1)
>>> g.add_edge(0, 1)
>>> g.add_edge(0, 2)
>>> g.add_edge(1, 2)
>>> g.add_edge(2, 0)
>>> g.add_edge(2, 3)
>>> g.add_edge(3, 3)
>>> sorted(g.bfs(2))
[0, 1, 2, 3]
"""
# initialize set for storing already visited vertices
visited = set()
# create a first in first out queue to store all the vertices for BFS
queue: Queue = Queue()
# mark the source node as visited and enqueue it
visited.add(start_vertex)
queue.put(start_vertex)
while not queue.empty():
vertex = queue.get()
# loop through all adjacent vertex and enqueue it if not yet visited
for adjacent_vertex in self.vertices[vertex]:
if adjacent_vertex not in visited:
queue.put(adjacent_vertex)
visited.add(adjacent_vertex)
return visited
if __name__ == "__main__":
from doctest import testmod
testmod(verbose=True)
g = Graph()
g.add_edge(0, 1)
g.add_edge(0, 2)
g.add_edge(1, 2)
g.add_edge(2, 0)
g.add_edge(2, 3)
g.add_edge(3, 3)
g.print_graph()
# 0 : 1 -> 2
# 1 : 2
# 2 : 0 -> 3
# 3 : 3
assert sorted(g.bfs(2)) == [0, 1, 2, 3]
| -1 |
TheAlgorithms/Python | 8,551 | New gitter link added or replaced | ### Describe your change:
Fixes #8546
* [ ] Add an algorithm?
* [ ] Fix a bug or typo in an existing algorithm?
* [x] Documentation change?
### Checklist:
* [x] I have read [CONTRIBUTING.md](https://github.com/TheAlgorithms/Python/blob/master/CONTRIBUTING.md).
* [x] This pull request is all my own work -- I have not plagiarized.
* [x] I know that pull requests will not be merged if they fail the automated tests.
* [ ] This PR only changes one algorithm file. To ease review, please open separate PRs for separate algorithms.
* [ ] All new Python files are placed inside an existing directory.
* [ ] All filenames are in all lowercase characters with no spaces or dashes.
* [ ] All functions and variable names follow Python naming conventions.
* [ ] All function parameters and return values are annotated with Python [type hints](https://docs.python.org/3/library/typing.html).
* [ ] All functions have [doctests](https://docs.python.org/3/library/doctest.html) that pass the automated testing.
* [ ] All new algorithms include at least one URL that points to Wikipedia or another similar explanation.
* [x] If this pull request resolves one or more open issues then the commit message contains `Fixes: #{$ISSUE_NO}`.
| KaixLina | "2023-03-26T04:26:40Z" | "2023-03-26T15:19:19Z" | 3f9150c1b2dd15808a4962e03a1455f8d825512c | 7cdb011ba440a07768179bfaea190bddefc890d8 | New gitter link added or replaced. ### Describe your change:
Fixes #8546
* [ ] Add an algorithm?
* [ ] Fix a bug or typo in an existing algorithm?
* [x] Documentation change?
### Checklist:
* [x] I have read [CONTRIBUTING.md](https://github.com/TheAlgorithms/Python/blob/master/CONTRIBUTING.md).
* [x] This pull request is all my own work -- I have not plagiarized.
* [x] I know that pull requests will not be merged if they fail the automated tests.
* [ ] This PR only changes one algorithm file. To ease review, please open separate PRs for separate algorithms.
* [ ] All new Python files are placed inside an existing directory.
* [ ] All filenames are in all lowercase characters with no spaces or dashes.
* [ ] All functions and variable names follow Python naming conventions.
* [ ] All function parameters and return values are annotated with Python [type hints](https://docs.python.org/3/library/typing.html).
* [ ] All functions have [doctests](https://docs.python.org/3/library/doctest.html) that pass the automated testing.
* [ ] All new algorithms include at least one URL that points to Wikipedia or another similar explanation.
* [x] If this pull request resolves one or more open issues then the commit message contains `Fixes: #{$ISSUE_NO}`.
| import math
def rearrange(bit_string_32):
"""[summary]
Regroups the given binary string.
Arguments:
bitString32 {[string]} -- [32 bit binary]
Raises:
ValueError -- [if the given string not are 32 bit binary string]
Returns:
[string] -- [32 bit binary string]
>>> rearrange('1234567890abcdfghijklmnopqrstuvw')
'pqrstuvwhijklmno90abcdfg12345678'
"""
if len(bit_string_32) != 32:
raise ValueError("Need length 32")
new_string = ""
for i in [3, 2, 1, 0]:
new_string += bit_string_32[8 * i : 8 * i + 8]
return new_string
def reformat_hex(i):
"""[summary]
Converts the given integer into 8-digit hex number.
Arguments:
i {[int]} -- [integer]
>>> reformat_hex(666)
'9a020000'
"""
hexrep = format(i, "08x")
thing = ""
for i in [3, 2, 1, 0]:
thing += hexrep[2 * i : 2 * i + 2]
return thing
def pad(bit_string):
"""[summary]
Fills up the binary string to a 512 bit binary string
Arguments:
bitString {[string]} -- [binary string]
Returns:
[string] -- [binary string]
"""
start_length = len(bit_string)
bit_string += "1"
while len(bit_string) % 512 != 448:
bit_string += "0"
last_part = format(start_length, "064b")
bit_string += rearrange(last_part[32:]) + rearrange(last_part[:32])
return bit_string
def get_block(bit_string):
"""[summary]
Iterator:
Returns by each call a list of length 16 with the 32 bit
integer blocks.
Arguments:
bit_string {[string]} -- [binary string >= 512]
"""
curr_pos = 0
while curr_pos < len(bit_string):
curr_part = bit_string[curr_pos : curr_pos + 512]
my_splits = []
for i in range(16):
my_splits.append(int(rearrange(curr_part[32 * i : 32 * i + 32]), 2))
yield my_splits
curr_pos += 512
def not32(i):
"""
>>> not32(34)
4294967261
"""
i_str = format(i, "032b")
new_str = ""
for c in i_str:
new_str += "1" if c == "0" else "0"
return int(new_str, 2)
def sum32(a, b):
return (a + b) % 2**32
def leftrot32(i, s):
return (i << s) ^ (i >> (32 - s))
def md5me(test_string):
"""[summary]
Returns a 32-bit hash code of the string 'testString'
Arguments:
testString {[string]} -- [message]
"""
bs = ""
for i in test_string:
bs += format(ord(i), "08b")
bs = pad(bs)
tvals = [int(2**32 * abs(math.sin(i + 1))) for i in range(64)]
a0 = 0x67452301
b0 = 0xEFCDAB89
c0 = 0x98BADCFE
d0 = 0x10325476
s = [
7,
12,
17,
22,
7,
12,
17,
22,
7,
12,
17,
22,
7,
12,
17,
22,
5,
9,
14,
20,
5,
9,
14,
20,
5,
9,
14,
20,
5,
9,
14,
20,
4,
11,
16,
23,
4,
11,
16,
23,
4,
11,
16,
23,
4,
11,
16,
23,
6,
10,
15,
21,
6,
10,
15,
21,
6,
10,
15,
21,
6,
10,
15,
21,
]
for m in get_block(bs):
a = a0
b = b0
c = c0
d = d0
for i in range(64):
if i <= 15:
# f = (B & C) | (not32(B) & D)
f = d ^ (b & (c ^ d))
g = i
elif i <= 31:
# f = (D & B) | (not32(D) & C)
f = c ^ (d & (b ^ c))
g = (5 * i + 1) % 16
elif i <= 47:
f = b ^ c ^ d
g = (3 * i + 5) % 16
else:
f = c ^ (b | not32(d))
g = (7 * i) % 16
dtemp = d
d = c
c = b
b = sum32(b, leftrot32((a + f + tvals[i] + m[g]) % 2**32, s[i]))
a = dtemp
a0 = sum32(a0, a)
b0 = sum32(b0, b)
c0 = sum32(c0, c)
d0 = sum32(d0, d)
digest = reformat_hex(a0) + reformat_hex(b0) + reformat_hex(c0) + reformat_hex(d0)
return digest
def test():
assert md5me("") == "d41d8cd98f00b204e9800998ecf8427e"
assert (
md5me("The quick brown fox jumps over the lazy dog")
== "9e107d9d372bb6826bd81d3542a419d6"
)
print("Success.")
if __name__ == "__main__":
test()
import doctest
doctest.testmod()
| import math
def rearrange(bit_string_32):
"""[summary]
Regroups the given binary string.
Arguments:
bitString32 {[string]} -- [32 bit binary]
Raises:
ValueError -- [if the given string not are 32 bit binary string]
Returns:
[string] -- [32 bit binary string]
>>> rearrange('1234567890abcdfghijklmnopqrstuvw')
'pqrstuvwhijklmno90abcdfg12345678'
"""
if len(bit_string_32) != 32:
raise ValueError("Need length 32")
new_string = ""
for i in [3, 2, 1, 0]:
new_string += bit_string_32[8 * i : 8 * i + 8]
return new_string
def reformat_hex(i):
"""[summary]
Converts the given integer into 8-digit hex number.
Arguments:
i {[int]} -- [integer]
>>> reformat_hex(666)
'9a020000'
"""
hexrep = format(i, "08x")
thing = ""
for i in [3, 2, 1, 0]:
thing += hexrep[2 * i : 2 * i + 2]
return thing
def pad(bit_string):
"""[summary]
Fills up the binary string to a 512 bit binary string
Arguments:
bitString {[string]} -- [binary string]
Returns:
[string] -- [binary string]
"""
start_length = len(bit_string)
bit_string += "1"
while len(bit_string) % 512 != 448:
bit_string += "0"
last_part = format(start_length, "064b")
bit_string += rearrange(last_part[32:]) + rearrange(last_part[:32])
return bit_string
def get_block(bit_string):
"""[summary]
Iterator:
Returns by each call a list of length 16 with the 32 bit
integer blocks.
Arguments:
bit_string {[string]} -- [binary string >= 512]
"""
curr_pos = 0
while curr_pos < len(bit_string):
curr_part = bit_string[curr_pos : curr_pos + 512]
my_splits = []
for i in range(16):
my_splits.append(int(rearrange(curr_part[32 * i : 32 * i + 32]), 2))
yield my_splits
curr_pos += 512
def not32(i):
"""
>>> not32(34)
4294967261
"""
i_str = format(i, "032b")
new_str = ""
for c in i_str:
new_str += "1" if c == "0" else "0"
return int(new_str, 2)
def sum32(a, b):
return (a + b) % 2**32
def leftrot32(i, s):
return (i << s) ^ (i >> (32 - s))
def md5me(test_string):
"""[summary]
Returns a 32-bit hash code of the string 'testString'
Arguments:
testString {[string]} -- [message]
"""
bs = ""
for i in test_string:
bs += format(ord(i), "08b")
bs = pad(bs)
tvals = [int(2**32 * abs(math.sin(i + 1))) for i in range(64)]
a0 = 0x67452301
b0 = 0xEFCDAB89
c0 = 0x98BADCFE
d0 = 0x10325476
s = [
7,
12,
17,
22,
7,
12,
17,
22,
7,
12,
17,
22,
7,
12,
17,
22,
5,
9,
14,
20,
5,
9,
14,
20,
5,
9,
14,
20,
5,
9,
14,
20,
4,
11,
16,
23,
4,
11,
16,
23,
4,
11,
16,
23,
4,
11,
16,
23,
6,
10,
15,
21,
6,
10,
15,
21,
6,
10,
15,
21,
6,
10,
15,
21,
]
for m in get_block(bs):
a = a0
b = b0
c = c0
d = d0
for i in range(64):
if i <= 15:
# f = (B & C) | (not32(B) & D)
f = d ^ (b & (c ^ d))
g = i
elif i <= 31:
# f = (D & B) | (not32(D) & C)
f = c ^ (d & (b ^ c))
g = (5 * i + 1) % 16
elif i <= 47:
f = b ^ c ^ d
g = (3 * i + 5) % 16
else:
f = c ^ (b | not32(d))
g = (7 * i) % 16
dtemp = d
d = c
c = b
b = sum32(b, leftrot32((a + f + tvals[i] + m[g]) % 2**32, s[i]))
a = dtemp
a0 = sum32(a0, a)
b0 = sum32(b0, b)
c0 = sum32(c0, c)
d0 = sum32(d0, d)
digest = reformat_hex(a0) + reformat_hex(b0) + reformat_hex(c0) + reformat_hex(d0)
return digest
def test():
assert md5me("") == "d41d8cd98f00b204e9800998ecf8427e"
assert (
md5me("The quick brown fox jumps over the lazy dog")
== "9e107d9d372bb6826bd81d3542a419d6"
)
print("Success.")
if __name__ == "__main__":
test()
import doctest
doctest.testmod()
| -1 |
TheAlgorithms/Python | 8,551 | New gitter link added or replaced | ### Describe your change:
Fixes #8546
* [ ] Add an algorithm?
* [ ] Fix a bug or typo in an existing algorithm?
* [x] Documentation change?
### Checklist:
* [x] I have read [CONTRIBUTING.md](https://github.com/TheAlgorithms/Python/blob/master/CONTRIBUTING.md).
* [x] This pull request is all my own work -- I have not plagiarized.
* [x] I know that pull requests will not be merged if they fail the automated tests.
* [ ] This PR only changes one algorithm file. To ease review, please open separate PRs for separate algorithms.
* [ ] All new Python files are placed inside an existing directory.
* [ ] All filenames are in all lowercase characters with no spaces or dashes.
* [ ] All functions and variable names follow Python naming conventions.
* [ ] All function parameters and return values are annotated with Python [type hints](https://docs.python.org/3/library/typing.html).
* [ ] All functions have [doctests](https://docs.python.org/3/library/doctest.html) that pass the automated testing.
* [ ] All new algorithms include at least one URL that points to Wikipedia or another similar explanation.
* [x] If this pull request resolves one or more open issues then the commit message contains `Fixes: #{$ISSUE_NO}`.
| KaixLina | "2023-03-26T04:26:40Z" | "2023-03-26T15:19:19Z" | 3f9150c1b2dd15808a4962e03a1455f8d825512c | 7cdb011ba440a07768179bfaea190bddefc890d8 | New gitter link added or replaced. ### Describe your change:
Fixes #8546
* [ ] Add an algorithm?
* [ ] Fix a bug or typo in an existing algorithm?
* [x] Documentation change?
### Checklist:
* [x] I have read [CONTRIBUTING.md](https://github.com/TheAlgorithms/Python/blob/master/CONTRIBUTING.md).
* [x] This pull request is all my own work -- I have not plagiarized.
* [x] I know that pull requests will not be merged if they fail the automated tests.
* [ ] This PR only changes one algorithm file. To ease review, please open separate PRs for separate algorithms.
* [ ] All new Python files are placed inside an existing directory.
* [ ] All filenames are in all lowercase characters with no spaces or dashes.
* [ ] All functions and variable names follow Python naming conventions.
* [ ] All function parameters and return values are annotated with Python [type hints](https://docs.python.org/3/library/typing.html).
* [ ] All functions have [doctests](https://docs.python.org/3/library/doctest.html) that pass the automated testing.
* [ ] All new algorithms include at least one URL that points to Wikipedia or another similar explanation.
* [x] If this pull request resolves one or more open issues then the commit message contains `Fixes: #{$ISSUE_NO}`.
| """
Project Euler Problem 64: https://projecteuler.net/problem=64
All square roots are periodic when written as continued fractions.
For example, let us consider sqrt(23).
It can be seen that the sequence is repeating.
For conciseness, we use the notation sqrt(23)=[4;(1,3,1,8)],
to indicate that the block (1,3,1,8) repeats indefinitely.
Exactly four continued fractions, for N<=13, have an odd period.
How many continued fractions for N<=10000 have an odd period?
References:
- https://en.wikipedia.org/wiki/Continued_fraction
"""
from math import floor, sqrt
def continuous_fraction_period(n: int) -> int:
"""
Returns the continued fraction period of a number n.
>>> continuous_fraction_period(2)
1
>>> continuous_fraction_period(5)
1
>>> continuous_fraction_period(7)
4
>>> continuous_fraction_period(11)
2
>>> continuous_fraction_period(13)
5
"""
numerator = 0.0
denominator = 1.0
root = int(sqrt(n))
integer_part = root
period = 0
while integer_part != 2 * root:
numerator = denominator * integer_part - numerator
denominator = (n - numerator**2) / denominator
integer_part = int((root + numerator) / denominator)
period += 1
return period
def solution(n: int = 10000) -> int:
"""
Returns the count of numbers <= 10000 with odd periods.
This function calls continuous_fraction_period for numbers which are
not perfect squares.
This is checked in if sr - floor(sr) != 0 statement.
If an odd period is returned by continuous_fraction_period,
count_odd_periods is increased by 1.
>>> solution(2)
1
>>> solution(5)
2
>>> solution(7)
2
>>> solution(11)
3
>>> solution(13)
4
"""
count_odd_periods = 0
for i in range(2, n + 1):
sr = sqrt(i)
if sr - floor(sr) != 0 and continuous_fraction_period(i) % 2 == 1:
count_odd_periods += 1
return count_odd_periods
if __name__ == "__main__":
print(f"{solution(int(input().strip()))}")
| """
Project Euler Problem 64: https://projecteuler.net/problem=64
All square roots are periodic when written as continued fractions.
For example, let us consider sqrt(23).
It can be seen that the sequence is repeating.
For conciseness, we use the notation sqrt(23)=[4;(1,3,1,8)],
to indicate that the block (1,3,1,8) repeats indefinitely.
Exactly four continued fractions, for N<=13, have an odd period.
How many continued fractions for N<=10000 have an odd period?
References:
- https://en.wikipedia.org/wiki/Continued_fraction
"""
from math import floor, sqrt
def continuous_fraction_period(n: int) -> int:
"""
Returns the continued fraction period of a number n.
>>> continuous_fraction_period(2)
1
>>> continuous_fraction_period(5)
1
>>> continuous_fraction_period(7)
4
>>> continuous_fraction_period(11)
2
>>> continuous_fraction_period(13)
5
"""
numerator = 0.0
denominator = 1.0
root = int(sqrt(n))
integer_part = root
period = 0
while integer_part != 2 * root:
numerator = denominator * integer_part - numerator
denominator = (n - numerator**2) / denominator
integer_part = int((root + numerator) / denominator)
period += 1
return period
def solution(n: int = 10000) -> int:
"""
Returns the count of numbers <= 10000 with odd periods.
This function calls continuous_fraction_period for numbers which are
not perfect squares.
This is checked in if sr - floor(sr) != 0 statement.
If an odd period is returned by continuous_fraction_period,
count_odd_periods is increased by 1.
>>> solution(2)
1
>>> solution(5)
2
>>> solution(7)
2
>>> solution(11)
3
>>> solution(13)
4
"""
count_odd_periods = 0
for i in range(2, n + 1):
sr = sqrt(i)
if sr - floor(sr) != 0 and continuous_fraction_period(i) % 2 == 1:
count_odd_periods += 1
return count_odd_periods
if __name__ == "__main__":
print(f"{solution(int(input().strip()))}")
| -1 |
TheAlgorithms/Python | 8,551 | New gitter link added or replaced | ### Describe your change:
Fixes #8546
* [ ] Add an algorithm?
* [ ] Fix a bug or typo in an existing algorithm?
* [x] Documentation change?
### Checklist:
* [x] I have read [CONTRIBUTING.md](https://github.com/TheAlgorithms/Python/blob/master/CONTRIBUTING.md).
* [x] This pull request is all my own work -- I have not plagiarized.
* [x] I know that pull requests will not be merged if they fail the automated tests.
* [ ] This PR only changes one algorithm file. To ease review, please open separate PRs for separate algorithms.
* [ ] All new Python files are placed inside an existing directory.
* [ ] All filenames are in all lowercase characters with no spaces or dashes.
* [ ] All functions and variable names follow Python naming conventions.
* [ ] All function parameters and return values are annotated with Python [type hints](https://docs.python.org/3/library/typing.html).
* [ ] All functions have [doctests](https://docs.python.org/3/library/doctest.html) that pass the automated testing.
* [ ] All new algorithms include at least one URL that points to Wikipedia or another similar explanation.
* [x] If this pull request resolves one or more open issues then the commit message contains `Fixes: #{$ISSUE_NO}`.
| KaixLina | "2023-03-26T04:26:40Z" | "2023-03-26T15:19:19Z" | 3f9150c1b2dd15808a4962e03a1455f8d825512c | 7cdb011ba440a07768179bfaea190bddefc890d8 | New gitter link added or replaced. ### Describe your change:
Fixes #8546
* [ ] Add an algorithm?
* [ ] Fix a bug or typo in an existing algorithm?
* [x] Documentation change?
### Checklist:
* [x] I have read [CONTRIBUTING.md](https://github.com/TheAlgorithms/Python/blob/master/CONTRIBUTING.md).
* [x] This pull request is all my own work -- I have not plagiarized.
* [x] I know that pull requests will not be merged if they fail the automated tests.
* [ ] This PR only changes one algorithm file. To ease review, please open separate PRs for separate algorithms.
* [ ] All new Python files are placed inside an existing directory.
* [ ] All filenames are in all lowercase characters with no spaces or dashes.
* [ ] All functions and variable names follow Python naming conventions.
* [ ] All function parameters and return values are annotated with Python [type hints](https://docs.python.org/3/library/typing.html).
* [ ] All functions have [doctests](https://docs.python.org/3/library/doctest.html) that pass the automated testing.
* [ ] All new algorithms include at least one URL that points to Wikipedia or another similar explanation.
* [x] If this pull request resolves one or more open issues then the commit message contains `Fixes: #{$ISSUE_NO}`.
| from __future__ import annotations
from random import random
class Node:
"""
Treap's node
Treap is a binary tree by value and heap by priority
"""
def __init__(self, value: int | None = None):
self.value = value
self.prior = random()
self.left: Node | None = None
self.right: Node | None = None
def __repr__(self) -> str:
from pprint import pformat
if self.left is None and self.right is None:
return f"'{self.value}: {self.prior:.5}'"
else:
return pformat(
{f"{self.value}: {self.prior:.5}": (self.left, self.right)}, indent=1
)
def __str__(self) -> str:
value = str(self.value) + " "
left = str(self.left or "")
right = str(self.right or "")
return value + left + right
def split(root: Node | None, value: int) -> tuple[Node | None, Node | None]:
"""
We split current tree into 2 trees with value:
Left tree contains all values less than split value.
Right tree contains all values greater or equal, than split value
"""
if root is None: # None tree is split into 2 Nones
return None, None
elif root.value is None:
return None, None
else:
if value < root.value:
"""
Right tree's root will be current node.
Now we split(with the same value) current node's left son
Left tree: left part of that split
Right tree's left son: right part of that split
"""
left, root.left = split(root.left, value)
return left, root
else:
"""
Just symmetric to previous case
"""
root.right, right = split(root.right, value)
return root, right
def merge(left: Node | None, right: Node | None) -> Node | None:
"""
We merge 2 trees into one.
Note: all left tree's values must be less than all right tree's
"""
if (not left) or (not right): # If one node is None, return the other
return left or right
elif left.prior < right.prior:
"""
Left will be root because it has more priority
Now we need to merge left's right son and right tree
"""
left.right = merge(left.right, right)
return left
else:
"""
Symmetric as well
"""
right.left = merge(left, right.left)
return right
def insert(root: Node | None, value: int) -> Node | None:
"""
Insert element
Split current tree with a value into left, right,
Insert new node into the middle
Merge left, node, right into root
"""
node = Node(value)
left, right = split(root, value)
return merge(merge(left, node), right)
def erase(root: Node | None, value: int) -> Node | None:
"""
Erase element
Split all nodes with values less into left,
Split all nodes with values greater into right.
Merge left, right
"""
left, right = split(root, value - 1)
_, right = split(right, value)
return merge(left, right)
def inorder(root: Node | None) -> None:
"""
Just recursive print of a tree
"""
if not root: # None
return
else:
inorder(root.left)
print(root.value, end=",")
inorder(root.right)
def interact_treap(root: Node | None, args: str) -> Node | None:
"""
Commands:
+ value to add value into treap
- value to erase all nodes with value
>>> root = interact_treap(None, "+1")
>>> inorder(root)
1,
>>> root = interact_treap(root, "+3 +5 +17 +19 +2 +16 +4 +0")
>>> inorder(root)
0,1,2,3,4,5,16,17,19,
>>> root = interact_treap(root, "+4 +4 +4")
>>> inorder(root)
0,1,2,3,4,4,4,4,5,16,17,19,
>>> root = interact_treap(root, "-0")
>>> inorder(root)
1,2,3,4,4,4,4,5,16,17,19,
>>> root = interact_treap(root, "-4")
>>> inorder(root)
1,2,3,5,16,17,19,
>>> root = interact_treap(root, "=0")
Unknown command
"""
for arg in args.split():
if arg[0] == "+":
root = insert(root, int(arg[1:]))
elif arg[0] == "-":
root = erase(root, int(arg[1:]))
else:
print("Unknown command")
return root
def main() -> None:
"""After each command, program prints treap"""
root = None
print(
"enter numbers to create a tree, + value to add value into treap, "
"- value to erase all nodes with value. 'q' to quit. "
)
args = input()
while args != "q":
root = interact_treap(root, args)
print(root)
args = input()
print("good by!")
if __name__ == "__main__":
import doctest
doctest.testmod()
main()
| from __future__ import annotations
from random import random
class Node:
"""
Treap's node
Treap is a binary tree by value and heap by priority
"""
def __init__(self, value: int | None = None):
self.value = value
self.prior = random()
self.left: Node | None = None
self.right: Node | None = None
def __repr__(self) -> str:
from pprint import pformat
if self.left is None and self.right is None:
return f"'{self.value}: {self.prior:.5}'"
else:
return pformat(
{f"{self.value}: {self.prior:.5}": (self.left, self.right)}, indent=1
)
def __str__(self) -> str:
value = str(self.value) + " "
left = str(self.left or "")
right = str(self.right or "")
return value + left + right
def split(root: Node | None, value: int) -> tuple[Node | None, Node | None]:
"""
We split current tree into 2 trees with value:
Left tree contains all values less than split value.
Right tree contains all values greater or equal, than split value
"""
if root is None: # None tree is split into 2 Nones
return None, None
elif root.value is None:
return None, None
else:
if value < root.value:
"""
Right tree's root will be current node.
Now we split(with the same value) current node's left son
Left tree: left part of that split
Right tree's left son: right part of that split
"""
left, root.left = split(root.left, value)
return left, root
else:
"""
Just symmetric to previous case
"""
root.right, right = split(root.right, value)
return root, right
def merge(left: Node | None, right: Node | None) -> Node | None:
"""
We merge 2 trees into one.
Note: all left tree's values must be less than all right tree's
"""
if (not left) or (not right): # If one node is None, return the other
return left or right
elif left.prior < right.prior:
"""
Left will be root because it has more priority
Now we need to merge left's right son and right tree
"""
left.right = merge(left.right, right)
return left
else:
"""
Symmetric as well
"""
right.left = merge(left, right.left)
return right
def insert(root: Node | None, value: int) -> Node | None:
"""
Insert element
Split current tree with a value into left, right,
Insert new node into the middle
Merge left, node, right into root
"""
node = Node(value)
left, right = split(root, value)
return merge(merge(left, node), right)
def erase(root: Node | None, value: int) -> Node | None:
"""
Erase element
Split all nodes with values less into left,
Split all nodes with values greater into right.
Merge left, right
"""
left, right = split(root, value - 1)
_, right = split(right, value)
return merge(left, right)
def inorder(root: Node | None) -> None:
"""
Just recursive print of a tree
"""
if not root: # None
return
else:
inorder(root.left)
print(root.value, end=",")
inorder(root.right)
def interact_treap(root: Node | None, args: str) -> Node | None:
"""
Commands:
+ value to add value into treap
- value to erase all nodes with value
>>> root = interact_treap(None, "+1")
>>> inorder(root)
1,
>>> root = interact_treap(root, "+3 +5 +17 +19 +2 +16 +4 +0")
>>> inorder(root)
0,1,2,3,4,5,16,17,19,
>>> root = interact_treap(root, "+4 +4 +4")
>>> inorder(root)
0,1,2,3,4,4,4,4,5,16,17,19,
>>> root = interact_treap(root, "-0")
>>> inorder(root)
1,2,3,4,4,4,4,5,16,17,19,
>>> root = interact_treap(root, "-4")
>>> inorder(root)
1,2,3,5,16,17,19,
>>> root = interact_treap(root, "=0")
Unknown command
"""
for arg in args.split():
if arg[0] == "+":
root = insert(root, int(arg[1:]))
elif arg[0] == "-":
root = erase(root, int(arg[1:]))
else:
print("Unknown command")
return root
def main() -> None:
"""After each command, program prints treap"""
root = None
print(
"enter numbers to create a tree, + value to add value into treap, "
"- value to erase all nodes with value. 'q' to quit. "
)
args = input()
while args != "q":
root = interact_treap(root, args)
print(root)
args = input()
print("good by!")
if __name__ == "__main__":
import doctest
doctest.testmod()
main()
| -1 |
TheAlgorithms/Python | 8,551 | New gitter link added or replaced | ### Describe your change:
Fixes #8546
* [ ] Add an algorithm?
* [ ] Fix a bug or typo in an existing algorithm?
* [x] Documentation change?
### Checklist:
* [x] I have read [CONTRIBUTING.md](https://github.com/TheAlgorithms/Python/blob/master/CONTRIBUTING.md).
* [x] This pull request is all my own work -- I have not plagiarized.
* [x] I know that pull requests will not be merged if they fail the automated tests.
* [ ] This PR only changes one algorithm file. To ease review, please open separate PRs for separate algorithms.
* [ ] All new Python files are placed inside an existing directory.
* [ ] All filenames are in all lowercase characters with no spaces or dashes.
* [ ] All functions and variable names follow Python naming conventions.
* [ ] All function parameters and return values are annotated with Python [type hints](https://docs.python.org/3/library/typing.html).
* [ ] All functions have [doctests](https://docs.python.org/3/library/doctest.html) that pass the automated testing.
* [ ] All new algorithms include at least one URL that points to Wikipedia or another similar explanation.
* [x] If this pull request resolves one or more open issues then the commit message contains `Fixes: #{$ISSUE_NO}`.
| KaixLina | "2023-03-26T04:26:40Z" | "2023-03-26T15:19:19Z" | 3f9150c1b2dd15808a4962e03a1455f8d825512c | 7cdb011ba440a07768179bfaea190bddefc890d8 | New gitter link added or replaced. ### Describe your change:
Fixes #8546
* [ ] Add an algorithm?
* [ ] Fix a bug or typo in an existing algorithm?
* [x] Documentation change?
### Checklist:
* [x] I have read [CONTRIBUTING.md](https://github.com/TheAlgorithms/Python/blob/master/CONTRIBUTING.md).
* [x] This pull request is all my own work -- I have not plagiarized.
* [x] I know that pull requests will not be merged if they fail the automated tests.
* [ ] This PR only changes one algorithm file. To ease review, please open separate PRs for separate algorithms.
* [ ] All new Python files are placed inside an existing directory.
* [ ] All filenames are in all lowercase characters with no spaces or dashes.
* [ ] All functions and variable names follow Python naming conventions.
* [ ] All function parameters and return values are annotated with Python [type hints](https://docs.python.org/3/library/typing.html).
* [ ] All functions have [doctests](https://docs.python.org/3/library/doctest.html) that pass the automated testing.
* [ ] All new algorithms include at least one URL that points to Wikipedia or another similar explanation.
* [x] If this pull request resolves one or more open issues then the commit message contains `Fixes: #{$ISSUE_NO}`.
| # Author: Phyllipe Bezerra (https://github.com/pmba)
clothes = {
0: "underwear",
1: "pants",
2: "belt",
3: "suit",
4: "shoe",
5: "socks",
6: "shirt",
7: "tie",
8: "watch",
}
graph = [[1, 4], [2, 4], [3], [], [], [4], [2, 7], [3], []]
visited = [0 for x in range(len(graph))]
stack = []
def print_stack(stack, clothes):
order = 1
while stack:
current_clothing = stack.pop()
print(order, clothes[current_clothing])
order += 1
def depth_first_search(u, visited, graph):
visited[u] = 1
for v in graph[u]:
if not visited[v]:
depth_first_search(v, visited, graph)
stack.append(u)
def topological_sort(graph, visited):
for v in range(len(graph)):
if not visited[v]:
depth_first_search(v, visited, graph)
if __name__ == "__main__":
topological_sort(graph, visited)
print(stack)
print_stack(stack, clothes)
| # Author: Phyllipe Bezerra (https://github.com/pmba)
clothes = {
0: "underwear",
1: "pants",
2: "belt",
3: "suit",
4: "shoe",
5: "socks",
6: "shirt",
7: "tie",
8: "watch",
}
graph = [[1, 4], [2, 4], [3], [], [], [4], [2, 7], [3], []]
visited = [0 for x in range(len(graph))]
stack = []
def print_stack(stack, clothes):
order = 1
while stack:
current_clothing = stack.pop()
print(order, clothes[current_clothing])
order += 1
def depth_first_search(u, visited, graph):
visited[u] = 1
for v in graph[u]:
if not visited[v]:
depth_first_search(v, visited, graph)
stack.append(u)
def topological_sort(graph, visited):
for v in range(len(graph)):
if not visited[v]:
depth_first_search(v, visited, graph)
if __name__ == "__main__":
topological_sort(graph, visited)
print(stack)
print_stack(stack, clothes)
| -1 |
TheAlgorithms/Python | 8,551 | New gitter link added or replaced | ### Describe your change:
Fixes #8546
* [ ] Add an algorithm?
* [ ] Fix a bug or typo in an existing algorithm?
* [x] Documentation change?
### Checklist:
* [x] I have read [CONTRIBUTING.md](https://github.com/TheAlgorithms/Python/blob/master/CONTRIBUTING.md).
* [x] This pull request is all my own work -- I have not plagiarized.
* [x] I know that pull requests will not be merged if they fail the automated tests.
* [ ] This PR only changes one algorithm file. To ease review, please open separate PRs for separate algorithms.
* [ ] All new Python files are placed inside an existing directory.
* [ ] All filenames are in all lowercase characters with no spaces or dashes.
* [ ] All functions and variable names follow Python naming conventions.
* [ ] All function parameters and return values are annotated with Python [type hints](https://docs.python.org/3/library/typing.html).
* [ ] All functions have [doctests](https://docs.python.org/3/library/doctest.html) that pass the automated testing.
* [ ] All new algorithms include at least one URL that points to Wikipedia or another similar explanation.
* [x] If this pull request resolves one or more open issues then the commit message contains `Fixes: #{$ISSUE_NO}`.
| KaixLina | "2023-03-26T04:26:40Z" | "2023-03-26T15:19:19Z" | 3f9150c1b2dd15808a4962e03a1455f8d825512c | 7cdb011ba440a07768179bfaea190bddefc890d8 | New gitter link added or replaced. ### Describe your change:
Fixes #8546
* [ ] Add an algorithm?
* [ ] Fix a bug or typo in an existing algorithm?
* [x] Documentation change?
### Checklist:
* [x] I have read [CONTRIBUTING.md](https://github.com/TheAlgorithms/Python/blob/master/CONTRIBUTING.md).
* [x] This pull request is all my own work -- I have not plagiarized.
* [x] I know that pull requests will not be merged if they fail the automated tests.
* [ ] This PR only changes one algorithm file. To ease review, please open separate PRs for separate algorithms.
* [ ] All new Python files are placed inside an existing directory.
* [ ] All filenames are in all lowercase characters with no spaces or dashes.
* [ ] All functions and variable names follow Python naming conventions.
* [ ] All function parameters and return values are annotated with Python [type hints](https://docs.python.org/3/library/typing.html).
* [ ] All functions have [doctests](https://docs.python.org/3/library/doctest.html) that pass the automated testing.
* [ ] All new algorithms include at least one URL that points to Wikipedia or another similar explanation.
* [x] If this pull request resolves one or more open issues then the commit message contains `Fixes: #{$ISSUE_NO}`.
| from typing import Any
def viterbi(
observations_space: list,
states_space: list,
initial_probabilities: dict,
transition_probabilities: dict,
emission_probabilities: dict,
) -> list:
"""
Viterbi Algorithm, to find the most likely path of
states from the start and the expected output.
https://en.wikipedia.org/wiki/Viterbi_algorithm
sdafads
Wikipedia example
>>> observations = ["normal", "cold", "dizzy"]
>>> states = ["Healthy", "Fever"]
>>> start_p = {"Healthy": 0.6, "Fever": 0.4}
>>> trans_p = {
... "Healthy": {"Healthy": 0.7, "Fever": 0.3},
... "Fever": {"Healthy": 0.4, "Fever": 0.6},
... }
>>> emit_p = {
... "Healthy": {"normal": 0.5, "cold": 0.4, "dizzy": 0.1},
... "Fever": {"normal": 0.1, "cold": 0.3, "dizzy": 0.6},
... }
>>> viterbi(observations, states, start_p, trans_p, emit_p)
['Healthy', 'Healthy', 'Fever']
>>> viterbi((), states, start_p, trans_p, emit_p)
Traceback (most recent call last):
...
ValueError: There's an empty parameter
>>> viterbi(observations, (), start_p, trans_p, emit_p)
Traceback (most recent call last):
...
ValueError: There's an empty parameter
>>> viterbi(observations, states, {}, trans_p, emit_p)
Traceback (most recent call last):
...
ValueError: There's an empty parameter
>>> viterbi(observations, states, start_p, {}, emit_p)
Traceback (most recent call last):
...
ValueError: There's an empty parameter
>>> viterbi(observations, states, start_p, trans_p, {})
Traceback (most recent call last):
...
ValueError: There's an empty parameter
>>> viterbi("invalid", states, start_p, trans_p, emit_p)
Traceback (most recent call last):
...
ValueError: observations_space must be a list
>>> viterbi(["valid", 123], states, start_p, trans_p, emit_p)
Traceback (most recent call last):
...
ValueError: observations_space must be a list of strings
>>> viterbi(observations, "invalid", start_p, trans_p, emit_p)
Traceback (most recent call last):
...
ValueError: states_space must be a list
>>> viterbi(observations, ["valid", 123], start_p, trans_p, emit_p)
Traceback (most recent call last):
...
ValueError: states_space must be a list of strings
>>> viterbi(observations, states, "invalid", trans_p, emit_p)
Traceback (most recent call last):
...
ValueError: initial_probabilities must be a dict
>>> viterbi(observations, states, {2:2}, trans_p, emit_p)
Traceback (most recent call last):
...
ValueError: initial_probabilities all keys must be strings
>>> viterbi(observations, states, {"a":2}, trans_p, emit_p)
Traceback (most recent call last):
...
ValueError: initial_probabilities all values must be float
>>> viterbi(observations, states, start_p, "invalid", emit_p)
Traceback (most recent call last):
...
ValueError: transition_probabilities must be a dict
>>> viterbi(observations, states, start_p, {"a":2}, emit_p)
Traceback (most recent call last):
...
ValueError: transition_probabilities all values must be dict
>>> viterbi(observations, states, start_p, {2:{2:2}}, emit_p)
Traceback (most recent call last):
...
ValueError: transition_probabilities all keys must be strings
>>> viterbi(observations, states, start_p, {"a":{2:2}}, emit_p)
Traceback (most recent call last):
...
ValueError: transition_probabilities all keys must be strings
>>> viterbi(observations, states, start_p, {"a":{"b":2}}, emit_p)
Traceback (most recent call last):
...
ValueError: transition_probabilities nested dictionary all values must be float
>>> viterbi(observations, states, start_p, trans_p, "invalid")
Traceback (most recent call last):
...
ValueError: emission_probabilities must be a dict
>>> viterbi(observations, states, start_p, trans_p, None)
Traceback (most recent call last):
...
ValueError: There's an empty parameter
"""
_validation(
observations_space,
states_space,
initial_probabilities,
transition_probabilities,
emission_probabilities,
)
# Creates data structures and fill initial step
probabilities: dict = {}
pointers: dict = {}
for state in states_space:
observation = observations_space[0]
probabilities[(state, observation)] = (
initial_probabilities[state] * emission_probabilities[state][observation]
)
pointers[(state, observation)] = None
# Fills the data structure with the probabilities of
# different transitions and pointers to previous states
for o in range(1, len(observations_space)):
observation = observations_space[o]
prior_observation = observations_space[o - 1]
for state in states_space:
# Calculates the argmax for probability function
arg_max = ""
max_probability = -1
for k_state in states_space:
probability = (
probabilities[(k_state, prior_observation)]
* transition_probabilities[k_state][state]
* emission_probabilities[state][observation]
)
if probability > max_probability:
max_probability = probability
arg_max = k_state
# Update probabilities and pointers dicts
probabilities[(state, observation)] = (
probabilities[(arg_max, prior_observation)]
* transition_probabilities[arg_max][state]
* emission_probabilities[state][observation]
)
pointers[(state, observation)] = arg_max
# The final observation
final_observation = observations_space[len(observations_space) - 1]
# argmax for given final observation
arg_max = ""
max_probability = -1
for k_state in states_space:
probability = probabilities[(k_state, final_observation)]
if probability > max_probability:
max_probability = probability
arg_max = k_state
last_state = arg_max
# Process pointers backwards
previous = last_state
result = []
for o in range(len(observations_space) - 1, -1, -1):
result.append(previous)
previous = pointers[previous, observations_space[o]]
result.reverse()
return result
def _validation(
observations_space: Any,
states_space: Any,
initial_probabilities: Any,
transition_probabilities: Any,
emission_probabilities: Any,
) -> None:
"""
>>> observations = ["normal", "cold", "dizzy"]
>>> states = ["Healthy", "Fever"]
>>> start_p = {"Healthy": 0.6, "Fever": 0.4}
>>> trans_p = {
... "Healthy": {"Healthy": 0.7, "Fever": 0.3},
... "Fever": {"Healthy": 0.4, "Fever": 0.6},
... }
>>> emit_p = {
... "Healthy": {"normal": 0.5, "cold": 0.4, "dizzy": 0.1},
... "Fever": {"normal": 0.1, "cold": 0.3, "dizzy": 0.6},
... }
>>> _validation(observations, states, start_p, trans_p, emit_p)
>>> _validation([], states, start_p, trans_p, emit_p)
Traceback (most recent call last):
...
ValueError: There's an empty parameter
"""
_validate_not_empty(
observations_space,
states_space,
initial_probabilities,
transition_probabilities,
emission_probabilities,
)
_validate_lists(observations_space, states_space)
_validate_dicts(
initial_probabilities, transition_probabilities, emission_probabilities
)
def _validate_not_empty(
observations_space: Any,
states_space: Any,
initial_probabilities: Any,
transition_probabilities: Any,
emission_probabilities: Any,
) -> None:
"""
>>> _validate_not_empty(["a"], ["b"], {"c":0.5},
... {"d": {"e": 0.6}}, {"f": {"g": 0.7}})
>>> _validate_not_empty(["a"], ["b"], {"c":0.5}, {}, {"f": {"g": 0.7}})
Traceback (most recent call last):
...
ValueError: There's an empty parameter
>>> _validate_not_empty(["a"], ["b"], None, {"d": {"e": 0.6}}, {"f": {"g": 0.7}})
Traceback (most recent call last):
...
ValueError: There's an empty parameter
"""
if not all(
[
observations_space,
states_space,
initial_probabilities,
transition_probabilities,
emission_probabilities,
]
):
raise ValueError("There's an empty parameter")
def _validate_lists(observations_space: Any, states_space: Any) -> None:
"""
>>> _validate_lists(["a"], ["b"])
>>> _validate_lists(1234, ["b"])
Traceback (most recent call last):
...
ValueError: observations_space must be a list
>>> _validate_lists(["a"], [3])
Traceback (most recent call last):
...
ValueError: states_space must be a list of strings
"""
_validate_list(observations_space, "observations_space")
_validate_list(states_space, "states_space")
def _validate_list(_object: Any, var_name: str) -> None:
"""
>>> _validate_list(["a"], "mock_name")
>>> _validate_list("a", "mock_name")
Traceback (most recent call last):
...
ValueError: mock_name must be a list
>>> _validate_list([0.5], "mock_name")
Traceback (most recent call last):
...
ValueError: mock_name must be a list of strings
"""
if not isinstance(_object, list):
raise ValueError(f"{var_name} must be a list")
else:
for x in _object:
if not isinstance(x, str):
raise ValueError(f"{var_name} must be a list of strings")
def _validate_dicts(
initial_probabilities: Any,
transition_probabilities: Any,
emission_probabilities: Any,
) -> None:
"""
>>> _validate_dicts({"c":0.5}, {"d": {"e": 0.6}}, {"f": {"g": 0.7}})
>>> _validate_dicts("invalid", {"d": {"e": 0.6}}, {"f": {"g": 0.7}})
Traceback (most recent call last):
...
ValueError: initial_probabilities must be a dict
>>> _validate_dicts({"c":0.5}, {2: {"e": 0.6}}, {"f": {"g": 0.7}})
Traceback (most recent call last):
...
ValueError: transition_probabilities all keys must be strings
>>> _validate_dicts({"c":0.5}, {"d": {"e": 0.6}}, {"f": {2: 0.7}})
Traceback (most recent call last):
...
ValueError: emission_probabilities all keys must be strings
>>> _validate_dicts({"c":0.5}, {"d": {"e": 0.6}}, {"f": {"g": "h"}})
Traceback (most recent call last):
...
ValueError: emission_probabilities nested dictionary all values must be float
"""
_validate_dict(initial_probabilities, "initial_probabilities", float)
_validate_nested_dict(transition_probabilities, "transition_probabilities")
_validate_nested_dict(emission_probabilities, "emission_probabilities")
def _validate_nested_dict(_object: Any, var_name: str) -> None:
"""
>>> _validate_nested_dict({"a":{"b": 0.5}}, "mock_name")
>>> _validate_nested_dict("invalid", "mock_name")
Traceback (most recent call last):
...
ValueError: mock_name must be a dict
>>> _validate_nested_dict({"a": 8}, "mock_name")
Traceback (most recent call last):
...
ValueError: mock_name all values must be dict
>>> _validate_nested_dict({"a":{2: 0.5}}, "mock_name")
Traceback (most recent call last):
...
ValueError: mock_name all keys must be strings
>>> _validate_nested_dict({"a":{"b": 4}}, "mock_name")
Traceback (most recent call last):
...
ValueError: mock_name nested dictionary all values must be float
"""
_validate_dict(_object, var_name, dict)
for x in _object.values():
_validate_dict(x, var_name, float, True)
def _validate_dict(
_object: Any, var_name: str, value_type: type, nested: bool = False
) -> None:
"""
>>> _validate_dict({"b": 0.5}, "mock_name", float)
>>> _validate_dict("invalid", "mock_name", float)
Traceback (most recent call last):
...
ValueError: mock_name must be a dict
>>> _validate_dict({"a": 8}, "mock_name", dict)
Traceback (most recent call last):
...
ValueError: mock_name all values must be dict
>>> _validate_dict({2: 0.5}, "mock_name",float, True)
Traceback (most recent call last):
...
ValueError: mock_name all keys must be strings
>>> _validate_dict({"b": 4}, "mock_name", float,True)
Traceback (most recent call last):
...
ValueError: mock_name nested dictionary all values must be float
"""
if not isinstance(_object, dict):
raise ValueError(f"{var_name} must be a dict")
if not all(isinstance(x, str) for x in _object):
raise ValueError(f"{var_name} all keys must be strings")
if not all(isinstance(x, value_type) for x in _object.values()):
nested_text = "nested dictionary " if nested else ""
raise ValueError(
f"{var_name} {nested_text}all values must be {value_type.__name__}"
)
if __name__ == "__main__":
from doctest import testmod
testmod()
| from typing import Any
def viterbi(
observations_space: list,
states_space: list,
initial_probabilities: dict,
transition_probabilities: dict,
emission_probabilities: dict,
) -> list:
"""
Viterbi Algorithm, to find the most likely path of
states from the start and the expected output.
https://en.wikipedia.org/wiki/Viterbi_algorithm
sdafads
Wikipedia example
>>> observations = ["normal", "cold", "dizzy"]
>>> states = ["Healthy", "Fever"]
>>> start_p = {"Healthy": 0.6, "Fever": 0.4}
>>> trans_p = {
... "Healthy": {"Healthy": 0.7, "Fever": 0.3},
... "Fever": {"Healthy": 0.4, "Fever": 0.6},
... }
>>> emit_p = {
... "Healthy": {"normal": 0.5, "cold": 0.4, "dizzy": 0.1},
... "Fever": {"normal": 0.1, "cold": 0.3, "dizzy": 0.6},
... }
>>> viterbi(observations, states, start_p, trans_p, emit_p)
['Healthy', 'Healthy', 'Fever']
>>> viterbi((), states, start_p, trans_p, emit_p)
Traceback (most recent call last):
...
ValueError: There's an empty parameter
>>> viterbi(observations, (), start_p, trans_p, emit_p)
Traceback (most recent call last):
...
ValueError: There's an empty parameter
>>> viterbi(observations, states, {}, trans_p, emit_p)
Traceback (most recent call last):
...
ValueError: There's an empty parameter
>>> viterbi(observations, states, start_p, {}, emit_p)
Traceback (most recent call last):
...
ValueError: There's an empty parameter
>>> viterbi(observations, states, start_p, trans_p, {})
Traceback (most recent call last):
...
ValueError: There's an empty parameter
>>> viterbi("invalid", states, start_p, trans_p, emit_p)
Traceback (most recent call last):
...
ValueError: observations_space must be a list
>>> viterbi(["valid", 123], states, start_p, trans_p, emit_p)
Traceback (most recent call last):
...
ValueError: observations_space must be a list of strings
>>> viterbi(observations, "invalid", start_p, trans_p, emit_p)
Traceback (most recent call last):
...
ValueError: states_space must be a list
>>> viterbi(observations, ["valid", 123], start_p, trans_p, emit_p)
Traceback (most recent call last):
...
ValueError: states_space must be a list of strings
>>> viterbi(observations, states, "invalid", trans_p, emit_p)
Traceback (most recent call last):
...
ValueError: initial_probabilities must be a dict
>>> viterbi(observations, states, {2:2}, trans_p, emit_p)
Traceback (most recent call last):
...
ValueError: initial_probabilities all keys must be strings
>>> viterbi(observations, states, {"a":2}, trans_p, emit_p)
Traceback (most recent call last):
...
ValueError: initial_probabilities all values must be float
>>> viterbi(observations, states, start_p, "invalid", emit_p)
Traceback (most recent call last):
...
ValueError: transition_probabilities must be a dict
>>> viterbi(observations, states, start_p, {"a":2}, emit_p)
Traceback (most recent call last):
...
ValueError: transition_probabilities all values must be dict
>>> viterbi(observations, states, start_p, {2:{2:2}}, emit_p)
Traceback (most recent call last):
...
ValueError: transition_probabilities all keys must be strings
>>> viterbi(observations, states, start_p, {"a":{2:2}}, emit_p)
Traceback (most recent call last):
...
ValueError: transition_probabilities all keys must be strings
>>> viterbi(observations, states, start_p, {"a":{"b":2}}, emit_p)
Traceback (most recent call last):
...
ValueError: transition_probabilities nested dictionary all values must be float
>>> viterbi(observations, states, start_p, trans_p, "invalid")
Traceback (most recent call last):
...
ValueError: emission_probabilities must be a dict
>>> viterbi(observations, states, start_p, trans_p, None)
Traceback (most recent call last):
...
ValueError: There's an empty parameter
"""
_validation(
observations_space,
states_space,
initial_probabilities,
transition_probabilities,
emission_probabilities,
)
# Creates data structures and fill initial step
probabilities: dict = {}
pointers: dict = {}
for state in states_space:
observation = observations_space[0]
probabilities[(state, observation)] = (
initial_probabilities[state] * emission_probabilities[state][observation]
)
pointers[(state, observation)] = None
# Fills the data structure with the probabilities of
# different transitions and pointers to previous states
for o in range(1, len(observations_space)):
observation = observations_space[o]
prior_observation = observations_space[o - 1]
for state in states_space:
# Calculates the argmax for probability function
arg_max = ""
max_probability = -1
for k_state in states_space:
probability = (
probabilities[(k_state, prior_observation)]
* transition_probabilities[k_state][state]
* emission_probabilities[state][observation]
)
if probability > max_probability:
max_probability = probability
arg_max = k_state
# Update probabilities and pointers dicts
probabilities[(state, observation)] = (
probabilities[(arg_max, prior_observation)]
* transition_probabilities[arg_max][state]
* emission_probabilities[state][observation]
)
pointers[(state, observation)] = arg_max
# The final observation
final_observation = observations_space[len(observations_space) - 1]
# argmax for given final observation
arg_max = ""
max_probability = -1
for k_state in states_space:
probability = probabilities[(k_state, final_observation)]
if probability > max_probability:
max_probability = probability
arg_max = k_state
last_state = arg_max
# Process pointers backwards
previous = last_state
result = []
for o in range(len(observations_space) - 1, -1, -1):
result.append(previous)
previous = pointers[previous, observations_space[o]]
result.reverse()
return result
def _validation(
observations_space: Any,
states_space: Any,
initial_probabilities: Any,
transition_probabilities: Any,
emission_probabilities: Any,
) -> None:
"""
>>> observations = ["normal", "cold", "dizzy"]
>>> states = ["Healthy", "Fever"]
>>> start_p = {"Healthy": 0.6, "Fever": 0.4}
>>> trans_p = {
... "Healthy": {"Healthy": 0.7, "Fever": 0.3},
... "Fever": {"Healthy": 0.4, "Fever": 0.6},
... }
>>> emit_p = {
... "Healthy": {"normal": 0.5, "cold": 0.4, "dizzy": 0.1},
... "Fever": {"normal": 0.1, "cold": 0.3, "dizzy": 0.6},
... }
>>> _validation(observations, states, start_p, trans_p, emit_p)
>>> _validation([], states, start_p, trans_p, emit_p)
Traceback (most recent call last):
...
ValueError: There's an empty parameter
"""
_validate_not_empty(
observations_space,
states_space,
initial_probabilities,
transition_probabilities,
emission_probabilities,
)
_validate_lists(observations_space, states_space)
_validate_dicts(
initial_probabilities, transition_probabilities, emission_probabilities
)
def _validate_not_empty(
observations_space: Any,
states_space: Any,
initial_probabilities: Any,
transition_probabilities: Any,
emission_probabilities: Any,
) -> None:
"""
>>> _validate_not_empty(["a"], ["b"], {"c":0.5},
... {"d": {"e": 0.6}}, {"f": {"g": 0.7}})
>>> _validate_not_empty(["a"], ["b"], {"c":0.5}, {}, {"f": {"g": 0.7}})
Traceback (most recent call last):
...
ValueError: There's an empty parameter
>>> _validate_not_empty(["a"], ["b"], None, {"d": {"e": 0.6}}, {"f": {"g": 0.7}})
Traceback (most recent call last):
...
ValueError: There's an empty parameter
"""
if not all(
[
observations_space,
states_space,
initial_probabilities,
transition_probabilities,
emission_probabilities,
]
):
raise ValueError("There's an empty parameter")
def _validate_lists(observations_space: Any, states_space: Any) -> None:
"""
>>> _validate_lists(["a"], ["b"])
>>> _validate_lists(1234, ["b"])
Traceback (most recent call last):
...
ValueError: observations_space must be a list
>>> _validate_lists(["a"], [3])
Traceback (most recent call last):
...
ValueError: states_space must be a list of strings
"""
_validate_list(observations_space, "observations_space")
_validate_list(states_space, "states_space")
def _validate_list(_object: Any, var_name: str) -> None:
"""
>>> _validate_list(["a"], "mock_name")
>>> _validate_list("a", "mock_name")
Traceback (most recent call last):
...
ValueError: mock_name must be a list
>>> _validate_list([0.5], "mock_name")
Traceback (most recent call last):
...
ValueError: mock_name must be a list of strings
"""
if not isinstance(_object, list):
raise ValueError(f"{var_name} must be a list")
else:
for x in _object:
if not isinstance(x, str):
raise ValueError(f"{var_name} must be a list of strings")
def _validate_dicts(
initial_probabilities: Any,
transition_probabilities: Any,
emission_probabilities: Any,
) -> None:
"""
>>> _validate_dicts({"c":0.5}, {"d": {"e": 0.6}}, {"f": {"g": 0.7}})
>>> _validate_dicts("invalid", {"d": {"e": 0.6}}, {"f": {"g": 0.7}})
Traceback (most recent call last):
...
ValueError: initial_probabilities must be a dict
>>> _validate_dicts({"c":0.5}, {2: {"e": 0.6}}, {"f": {"g": 0.7}})
Traceback (most recent call last):
...
ValueError: transition_probabilities all keys must be strings
>>> _validate_dicts({"c":0.5}, {"d": {"e": 0.6}}, {"f": {2: 0.7}})
Traceback (most recent call last):
...
ValueError: emission_probabilities all keys must be strings
>>> _validate_dicts({"c":0.5}, {"d": {"e": 0.6}}, {"f": {"g": "h"}})
Traceback (most recent call last):
...
ValueError: emission_probabilities nested dictionary all values must be float
"""
_validate_dict(initial_probabilities, "initial_probabilities", float)
_validate_nested_dict(transition_probabilities, "transition_probabilities")
_validate_nested_dict(emission_probabilities, "emission_probabilities")
def _validate_nested_dict(_object: Any, var_name: str) -> None:
"""
>>> _validate_nested_dict({"a":{"b": 0.5}}, "mock_name")
>>> _validate_nested_dict("invalid", "mock_name")
Traceback (most recent call last):
...
ValueError: mock_name must be a dict
>>> _validate_nested_dict({"a": 8}, "mock_name")
Traceback (most recent call last):
...
ValueError: mock_name all values must be dict
>>> _validate_nested_dict({"a":{2: 0.5}}, "mock_name")
Traceback (most recent call last):
...
ValueError: mock_name all keys must be strings
>>> _validate_nested_dict({"a":{"b": 4}}, "mock_name")
Traceback (most recent call last):
...
ValueError: mock_name nested dictionary all values must be float
"""
_validate_dict(_object, var_name, dict)
for x in _object.values():
_validate_dict(x, var_name, float, True)
def _validate_dict(
_object: Any, var_name: str, value_type: type, nested: bool = False
) -> None:
"""
>>> _validate_dict({"b": 0.5}, "mock_name", float)
>>> _validate_dict("invalid", "mock_name", float)
Traceback (most recent call last):
...
ValueError: mock_name must be a dict
>>> _validate_dict({"a": 8}, "mock_name", dict)
Traceback (most recent call last):
...
ValueError: mock_name all values must be dict
>>> _validate_dict({2: 0.5}, "mock_name",float, True)
Traceback (most recent call last):
...
ValueError: mock_name all keys must be strings
>>> _validate_dict({"b": 4}, "mock_name", float,True)
Traceback (most recent call last):
...
ValueError: mock_name nested dictionary all values must be float
"""
if not isinstance(_object, dict):
raise ValueError(f"{var_name} must be a dict")
if not all(isinstance(x, str) for x in _object):
raise ValueError(f"{var_name} all keys must be strings")
if not all(isinstance(x, value_type) for x in _object.values()):
nested_text = "nested dictionary " if nested else ""
raise ValueError(
f"{var_name} {nested_text}all values must be {value_type.__name__}"
)
if __name__ == "__main__":
from doctest import testmod
testmod()
| -1 |
TheAlgorithms/Python | 8,551 | New gitter link added or replaced | ### Describe your change:
Fixes #8546
* [ ] Add an algorithm?
* [ ] Fix a bug or typo in an existing algorithm?
* [x] Documentation change?
### Checklist:
* [x] I have read [CONTRIBUTING.md](https://github.com/TheAlgorithms/Python/blob/master/CONTRIBUTING.md).
* [x] This pull request is all my own work -- I have not plagiarized.
* [x] I know that pull requests will not be merged if they fail the automated tests.
* [ ] This PR only changes one algorithm file. To ease review, please open separate PRs for separate algorithms.
* [ ] All new Python files are placed inside an existing directory.
* [ ] All filenames are in all lowercase characters with no spaces or dashes.
* [ ] All functions and variable names follow Python naming conventions.
* [ ] All function parameters and return values are annotated with Python [type hints](https://docs.python.org/3/library/typing.html).
* [ ] All functions have [doctests](https://docs.python.org/3/library/doctest.html) that pass the automated testing.
* [ ] All new algorithms include at least one URL that points to Wikipedia or another similar explanation.
* [x] If this pull request resolves one or more open issues then the commit message contains `Fixes: #{$ISSUE_NO}`.
| KaixLina | "2023-03-26T04:26:40Z" | "2023-03-26T15:19:19Z" | 3f9150c1b2dd15808a4962e03a1455f8d825512c | 7cdb011ba440a07768179bfaea190bddefc890d8 | New gitter link added or replaced. ### Describe your change:
Fixes #8546
* [ ] Add an algorithm?
* [ ] Fix a bug or typo in an existing algorithm?
* [x] Documentation change?
### Checklist:
* [x] I have read [CONTRIBUTING.md](https://github.com/TheAlgorithms/Python/blob/master/CONTRIBUTING.md).
* [x] This pull request is all my own work -- I have not plagiarized.
* [x] I know that pull requests will not be merged if they fail the automated tests.
* [ ] This PR only changes one algorithm file. To ease review, please open separate PRs for separate algorithms.
* [ ] All new Python files are placed inside an existing directory.
* [ ] All filenames are in all lowercase characters with no spaces or dashes.
* [ ] All functions and variable names follow Python naming conventions.
* [ ] All function parameters and return values are annotated with Python [type hints](https://docs.python.org/3/library/typing.html).
* [ ] All functions have [doctests](https://docs.python.org/3/library/doctest.html) that pass the automated testing.
* [ ] All new algorithms include at least one URL that points to Wikipedia or another similar explanation.
* [x] If this pull request resolves one or more open issues then the commit message contains `Fixes: #{$ISSUE_NO}`.
| from __future__ import annotations
def mean(nums: list) -> float:
"""
Find mean of a list of numbers.
Wiki: https://en.wikipedia.org/wiki/Mean
>>> mean([3, 6, 9, 12, 15, 18, 21])
12.0
>>> mean([5, 10, 15, 20, 25, 30, 35])
20.0
>>> mean([1, 2, 3, 4, 5, 6, 7, 8])
4.5
>>> mean([])
Traceback (most recent call last):
...
ValueError: List is empty
"""
if not nums:
raise ValueError("List is empty")
return sum(nums) / len(nums)
if __name__ == "__main__":
import doctest
doctest.testmod()
| from __future__ import annotations
def mean(nums: list) -> float:
"""
Find mean of a list of numbers.
Wiki: https://en.wikipedia.org/wiki/Mean
>>> mean([3, 6, 9, 12, 15, 18, 21])
12.0
>>> mean([5, 10, 15, 20, 25, 30, 35])
20.0
>>> mean([1, 2, 3, 4, 5, 6, 7, 8])
4.5
>>> mean([])
Traceback (most recent call last):
...
ValueError: List is empty
"""
if not nums:
raise ValueError("List is empty")
return sum(nums) / len(nums)
if __name__ == "__main__":
import doctest
doctest.testmod()
| -1 |
TheAlgorithms/Python | 8,551 | New gitter link added or replaced | ### Describe your change:
Fixes #8546
* [ ] Add an algorithm?
* [ ] Fix a bug or typo in an existing algorithm?
* [x] Documentation change?
### Checklist:
* [x] I have read [CONTRIBUTING.md](https://github.com/TheAlgorithms/Python/blob/master/CONTRIBUTING.md).
* [x] This pull request is all my own work -- I have not plagiarized.
* [x] I know that pull requests will not be merged if they fail the automated tests.
* [ ] This PR only changes one algorithm file. To ease review, please open separate PRs for separate algorithms.
* [ ] All new Python files are placed inside an existing directory.
* [ ] All filenames are in all lowercase characters with no spaces or dashes.
* [ ] All functions and variable names follow Python naming conventions.
* [ ] All function parameters and return values are annotated with Python [type hints](https://docs.python.org/3/library/typing.html).
* [ ] All functions have [doctests](https://docs.python.org/3/library/doctest.html) that pass the automated testing.
* [ ] All new algorithms include at least one URL that points to Wikipedia or another similar explanation.
* [x] If this pull request resolves one or more open issues then the commit message contains `Fixes: #{$ISSUE_NO}`.
| KaixLina | "2023-03-26T04:26:40Z" | "2023-03-26T15:19:19Z" | 3f9150c1b2dd15808a4962e03a1455f8d825512c | 7cdb011ba440a07768179bfaea190bddefc890d8 | New gitter link added or replaced. ### Describe your change:
Fixes #8546
* [ ] Add an algorithm?
* [ ] Fix a bug or typo in an existing algorithm?
* [x] Documentation change?
### Checklist:
* [x] I have read [CONTRIBUTING.md](https://github.com/TheAlgorithms/Python/blob/master/CONTRIBUTING.md).
* [x] This pull request is all my own work -- I have not plagiarized.
* [x] I know that pull requests will not be merged if they fail the automated tests.
* [ ] This PR only changes one algorithm file. To ease review, please open separate PRs for separate algorithms.
* [ ] All new Python files are placed inside an existing directory.
* [ ] All filenames are in all lowercase characters with no spaces or dashes.
* [ ] All functions and variable names follow Python naming conventions.
* [ ] All function parameters and return values are annotated with Python [type hints](https://docs.python.org/3/library/typing.html).
* [ ] All functions have [doctests](https://docs.python.org/3/library/doctest.html) that pass the automated testing.
* [ ] All new algorithms include at least one URL that points to Wikipedia or another similar explanation.
* [x] If this pull request resolves one or more open issues then the commit message contains `Fixes: #{$ISSUE_NO}`.
| from __future__ import annotations
import collections
import pprint
from pathlib import Path
def signature(word: str) -> str:
"""Return a word sorted
>>> signature("test")
'estt'
>>> signature("this is a test")
' aehiisssttt'
>>> signature("finaltest")
'aefilnstt'
"""
return "".join(sorted(word))
def anagram(my_word: str) -> list[str]:
"""Return every anagram of the given word
>>> anagram('test')
['sett', 'stet', 'test']
>>> anagram('this is a test')
[]
>>> anagram('final')
['final']
"""
return word_by_signature[signature(my_word)]
data: str = Path(__file__).parent.joinpath("words.txt").read_text(encoding="utf-8")
word_list = sorted({word.strip().lower() for word in data.splitlines()})
word_by_signature = collections.defaultdict(list)
for word in word_list:
word_by_signature[signature(word)].append(word)
if __name__ == "__main__":
all_anagrams = {word: anagram(word) for word in word_list if len(anagram(word)) > 1}
with open("anagrams.txt", "w") as file:
file.write("all_anagrams = \n ")
file.write(pprint.pformat(all_anagrams))
| from __future__ import annotations
import collections
import pprint
from pathlib import Path
def signature(word: str) -> str:
"""Return a word sorted
>>> signature("test")
'estt'
>>> signature("this is a test")
' aehiisssttt'
>>> signature("finaltest")
'aefilnstt'
"""
return "".join(sorted(word))
def anagram(my_word: str) -> list[str]:
"""Return every anagram of the given word
>>> anagram('test')
['sett', 'stet', 'test']
>>> anagram('this is a test')
[]
>>> anagram('final')
['final']
"""
return word_by_signature[signature(my_word)]
data: str = Path(__file__).parent.joinpath("words.txt").read_text(encoding="utf-8")
word_list = sorted({word.strip().lower() for word in data.splitlines()})
word_by_signature = collections.defaultdict(list)
for word in word_list:
word_by_signature[signature(word)].append(word)
if __name__ == "__main__":
all_anagrams = {word: anagram(word) for word in word_list if len(anagram(word)) > 1}
with open("anagrams.txt", "w") as file:
file.write("all_anagrams = \n ")
file.write(pprint.pformat(all_anagrams))
| -1 |
TheAlgorithms/Python | 8,551 | New gitter link added or replaced | ### Describe your change:
Fixes #8546
* [ ] Add an algorithm?
* [ ] Fix a bug or typo in an existing algorithm?
* [x] Documentation change?
### Checklist:
* [x] I have read [CONTRIBUTING.md](https://github.com/TheAlgorithms/Python/blob/master/CONTRIBUTING.md).
* [x] This pull request is all my own work -- I have not plagiarized.
* [x] I know that pull requests will not be merged if they fail the automated tests.
* [ ] This PR only changes one algorithm file. To ease review, please open separate PRs for separate algorithms.
* [ ] All new Python files are placed inside an existing directory.
* [ ] All filenames are in all lowercase characters with no spaces or dashes.
* [ ] All functions and variable names follow Python naming conventions.
* [ ] All function parameters and return values are annotated with Python [type hints](https://docs.python.org/3/library/typing.html).
* [ ] All functions have [doctests](https://docs.python.org/3/library/doctest.html) that pass the automated testing.
* [ ] All new algorithms include at least one URL that points to Wikipedia or another similar explanation.
* [x] If this pull request resolves one or more open issues then the commit message contains `Fixes: #{$ISSUE_NO}`.
| KaixLina | "2023-03-26T04:26:40Z" | "2023-03-26T15:19:19Z" | 3f9150c1b2dd15808a4962e03a1455f8d825512c | 7cdb011ba440a07768179bfaea190bddefc890d8 | New gitter link added or replaced. ### Describe your change:
Fixes #8546
* [ ] Add an algorithm?
* [ ] Fix a bug or typo in an existing algorithm?
* [x] Documentation change?
### Checklist:
* [x] I have read [CONTRIBUTING.md](https://github.com/TheAlgorithms/Python/blob/master/CONTRIBUTING.md).
* [x] This pull request is all my own work -- I have not plagiarized.
* [x] I know that pull requests will not be merged if they fail the automated tests.
* [ ] This PR only changes one algorithm file. To ease review, please open separate PRs for separate algorithms.
* [ ] All new Python files are placed inside an existing directory.
* [ ] All filenames are in all lowercase characters with no spaces or dashes.
* [ ] All functions and variable names follow Python naming conventions.
* [ ] All function parameters and return values are annotated with Python [type hints](https://docs.python.org/3/library/typing.html).
* [ ] All functions have [doctests](https://docs.python.org/3/library/doctest.html) that pass the automated testing.
* [ ] All new algorithms include at least one URL that points to Wikipedia or another similar explanation.
* [x] If this pull request resolves one or more open issues then the commit message contains `Fixes: #{$ISSUE_NO}`.
| """
CAUTION: You may get a json.decoding error.
This works for some of us but fails for others.
"""
from datetime import datetime
import requests
from rich import box
from rich import console as rich_console
from rich import table as rich_table
LIMIT = 10
TODAY = datetime.now()
API_URL = (
"https://www.forbes.com/forbesapi/person/rtb/0/position/true.json"
"?fields=personName,gender,source,countryOfCitizenship,birthDate,finalWorth"
f"&limit={LIMIT}"
)
def calculate_age(unix_date: int) -> str:
"""Calculates age from given unix time format.
Returns:
Age as string
>>> calculate_age(-657244800000)
'73'
>>> calculate_age(46915200000)
'51'
"""
birthdate = datetime.fromtimestamp(unix_date / 1000).date()
return str(
TODAY.year
- birthdate.year
- ((TODAY.month, TODAY.day) < (birthdate.month, birthdate.day))
)
def get_forbes_real_time_billionaires() -> list[dict[str, str]]:
"""Get top 10 realtime billionaires using forbes API.
Returns:
List of top 10 realtime billionaires data.
"""
response_json = requests.get(API_URL).json()
return [
{
"Name": person["personName"],
"Source": person["source"],
"Country": person["countryOfCitizenship"],
"Gender": person["gender"],
"Worth ($)": f"{person['finalWorth'] / 1000:.1f} Billion",
"Age": calculate_age(person["birthDate"]),
}
for person in response_json["personList"]["personsLists"]
]
def display_billionaires(forbes_billionaires: list[dict[str, str]]) -> None:
"""Display Forbes real time billionaires in a rich table.
Args:
forbes_billionaires (list): Forbes top 10 real time billionaires
"""
table = rich_table.Table(
title=f"Forbes Top {LIMIT} Real Time Billionaires at {TODAY:%Y-%m-%d %H:%M}",
style="green",
highlight=True,
box=box.SQUARE,
)
for key in forbes_billionaires[0]:
table.add_column(key)
for billionaire in forbes_billionaires:
table.add_row(*billionaire.values())
rich_console.Console().print(table)
if __name__ == "__main__":
display_billionaires(get_forbes_real_time_billionaires())
| """
CAUTION: You may get a json.decoding error.
This works for some of us but fails for others.
"""
from datetime import datetime
import requests
from rich import box
from rich import console as rich_console
from rich import table as rich_table
LIMIT = 10
TODAY = datetime.now()
API_URL = (
"https://www.forbes.com/forbesapi/person/rtb/0/position/true.json"
"?fields=personName,gender,source,countryOfCitizenship,birthDate,finalWorth"
f"&limit={LIMIT}"
)
def calculate_age(unix_date: int) -> str:
"""Calculates age from given unix time format.
Returns:
Age as string
>>> calculate_age(-657244800000)
'73'
>>> calculate_age(46915200000)
'51'
"""
birthdate = datetime.fromtimestamp(unix_date / 1000).date()
return str(
TODAY.year
- birthdate.year
- ((TODAY.month, TODAY.day) < (birthdate.month, birthdate.day))
)
def get_forbes_real_time_billionaires() -> list[dict[str, str]]:
"""Get top 10 realtime billionaires using forbes API.
Returns:
List of top 10 realtime billionaires data.
"""
response_json = requests.get(API_URL).json()
return [
{
"Name": person["personName"],
"Source": person["source"],
"Country": person["countryOfCitizenship"],
"Gender": person["gender"],
"Worth ($)": f"{person['finalWorth'] / 1000:.1f} Billion",
"Age": calculate_age(person["birthDate"]),
}
for person in response_json["personList"]["personsLists"]
]
def display_billionaires(forbes_billionaires: list[dict[str, str]]) -> None:
"""Display Forbes real time billionaires in a rich table.
Args:
forbes_billionaires (list): Forbes top 10 real time billionaires
"""
table = rich_table.Table(
title=f"Forbes Top {LIMIT} Real Time Billionaires at {TODAY:%Y-%m-%d %H:%M}",
style="green",
highlight=True,
box=box.SQUARE,
)
for key in forbes_billionaires[0]:
table.add_column(key)
for billionaire in forbes_billionaires:
table.add_row(*billionaire.values())
rich_console.Console().print(table)
if __name__ == "__main__":
display_billionaires(get_forbes_real_time_billionaires())
| -1 |
TheAlgorithms/Python | 8,551 | New gitter link added or replaced | ### Describe your change:
Fixes #8546
* [ ] Add an algorithm?
* [ ] Fix a bug or typo in an existing algorithm?
* [x] Documentation change?
### Checklist:
* [x] I have read [CONTRIBUTING.md](https://github.com/TheAlgorithms/Python/blob/master/CONTRIBUTING.md).
* [x] This pull request is all my own work -- I have not plagiarized.
* [x] I know that pull requests will not be merged if they fail the automated tests.
* [ ] This PR only changes one algorithm file. To ease review, please open separate PRs for separate algorithms.
* [ ] All new Python files are placed inside an existing directory.
* [ ] All filenames are in all lowercase characters with no spaces or dashes.
* [ ] All functions and variable names follow Python naming conventions.
* [ ] All function parameters and return values are annotated with Python [type hints](https://docs.python.org/3/library/typing.html).
* [ ] All functions have [doctests](https://docs.python.org/3/library/doctest.html) that pass the automated testing.
* [ ] All new algorithms include at least one URL that points to Wikipedia or another similar explanation.
* [x] If this pull request resolves one or more open issues then the commit message contains `Fixes: #{$ISSUE_NO}`.
| KaixLina | "2023-03-26T04:26:40Z" | "2023-03-26T15:19:19Z" | 3f9150c1b2dd15808a4962e03a1455f8d825512c | 7cdb011ba440a07768179bfaea190bddefc890d8 | New gitter link added or replaced. ### Describe your change:
Fixes #8546
* [ ] Add an algorithm?
* [ ] Fix a bug or typo in an existing algorithm?
* [x] Documentation change?
### Checklist:
* [x] I have read [CONTRIBUTING.md](https://github.com/TheAlgorithms/Python/blob/master/CONTRIBUTING.md).
* [x] This pull request is all my own work -- I have not plagiarized.
* [x] I know that pull requests will not be merged if they fail the automated tests.
* [ ] This PR only changes one algorithm file. To ease review, please open separate PRs for separate algorithms.
* [ ] All new Python files are placed inside an existing directory.
* [ ] All filenames are in all lowercase characters with no spaces or dashes.
* [ ] All functions and variable names follow Python naming conventions.
* [ ] All function parameters and return values are annotated with Python [type hints](https://docs.python.org/3/library/typing.html).
* [ ] All functions have [doctests](https://docs.python.org/3/library/doctest.html) that pass the automated testing.
* [ ] All new algorithms include at least one URL that points to Wikipedia or another similar explanation.
* [x] If this pull request resolves one or more open issues then the commit message contains `Fixes: #{$ISSUE_NO}`.
| """
https://en.wikipedia.org/wiki/Strongly_connected_component
Finding strongly connected components in directed graph
"""
test_graph_1 = {0: [2, 3], 1: [0], 2: [1], 3: [4], 4: []}
test_graph_2 = {0: [1, 2, 3], 1: [2], 2: [0], 3: [4], 4: [5], 5: [3]}
def topology_sort(
graph: dict[int, list[int]], vert: int, visited: list[bool]
) -> list[int]:
"""
Use depth first search to sort graph
At this time graph is the same as input
>>> topology_sort(test_graph_1, 0, 5 * [False])
[1, 2, 4, 3, 0]
>>> topology_sort(test_graph_2, 0, 6 * [False])
[2, 1, 5, 4, 3, 0]
"""
visited[vert] = True
order = []
for neighbour in graph[vert]:
if not visited[neighbour]:
order += topology_sort(graph, neighbour, visited)
order.append(vert)
return order
def find_components(
reversed_graph: dict[int, list[int]], vert: int, visited: list[bool]
) -> list[int]:
"""
Use depth first search to find strongliy connected
vertices. Now graph is reversed
>>> find_components({0: [1], 1: [2], 2: [0]}, 0, 5 * [False])
[0, 1, 2]
>>> find_components({0: [2], 1: [0], 2: [0, 1]}, 0, 6 * [False])
[0, 2, 1]
"""
visited[vert] = True
component = [vert]
for neighbour in reversed_graph[vert]:
if not visited[neighbour]:
component += find_components(reversed_graph, neighbour, visited)
return component
def strongly_connected_components(graph: dict[int, list[int]]) -> list[list[int]]:
"""
This function takes graph as a parameter
and then returns the list of strongly connected components
>>> strongly_connected_components(test_graph_1)
[[0, 1, 2], [3], [4]]
>>> strongly_connected_components(test_graph_2)
[[0, 2, 1], [3, 5, 4]]
"""
visited = len(graph) * [False]
reversed_graph: dict[int, list[int]] = {vert: [] for vert in range(len(graph))}
for vert, neighbours in graph.items():
for neighbour in neighbours:
reversed_graph[neighbour].append(vert)
order = []
for i, was_visited in enumerate(visited):
if not was_visited:
order += topology_sort(graph, i, visited)
components_list = []
visited = len(graph) * [False]
for i in range(len(graph)):
vert = order[len(graph) - i - 1]
if not visited[vert]:
component = find_components(reversed_graph, vert, visited)
components_list.append(component)
return components_list
| """
https://en.wikipedia.org/wiki/Strongly_connected_component
Finding strongly connected components in directed graph
"""
test_graph_1 = {0: [2, 3], 1: [0], 2: [1], 3: [4], 4: []}
test_graph_2 = {0: [1, 2, 3], 1: [2], 2: [0], 3: [4], 4: [5], 5: [3]}
def topology_sort(
graph: dict[int, list[int]], vert: int, visited: list[bool]
) -> list[int]:
"""
Use depth first search to sort graph
At this time graph is the same as input
>>> topology_sort(test_graph_1, 0, 5 * [False])
[1, 2, 4, 3, 0]
>>> topology_sort(test_graph_2, 0, 6 * [False])
[2, 1, 5, 4, 3, 0]
"""
visited[vert] = True
order = []
for neighbour in graph[vert]:
if not visited[neighbour]:
order += topology_sort(graph, neighbour, visited)
order.append(vert)
return order
def find_components(
reversed_graph: dict[int, list[int]], vert: int, visited: list[bool]
) -> list[int]:
"""
Use depth first search to find strongliy connected
vertices. Now graph is reversed
>>> find_components({0: [1], 1: [2], 2: [0]}, 0, 5 * [False])
[0, 1, 2]
>>> find_components({0: [2], 1: [0], 2: [0, 1]}, 0, 6 * [False])
[0, 2, 1]
"""
visited[vert] = True
component = [vert]
for neighbour in reversed_graph[vert]:
if not visited[neighbour]:
component += find_components(reversed_graph, neighbour, visited)
return component
def strongly_connected_components(graph: dict[int, list[int]]) -> list[list[int]]:
"""
This function takes graph as a parameter
and then returns the list of strongly connected components
>>> strongly_connected_components(test_graph_1)
[[0, 1, 2], [3], [4]]
>>> strongly_connected_components(test_graph_2)
[[0, 2, 1], [3, 5, 4]]
"""
visited = len(graph) * [False]
reversed_graph: dict[int, list[int]] = {vert: [] for vert in range(len(graph))}
for vert, neighbours in graph.items():
for neighbour in neighbours:
reversed_graph[neighbour].append(vert)
order = []
for i, was_visited in enumerate(visited):
if not was_visited:
order += topology_sort(graph, i, visited)
components_list = []
visited = len(graph) * [False]
for i in range(len(graph)):
vert = order[len(graph) - i - 1]
if not visited[vert]:
component = find_components(reversed_graph, vert, visited)
components_list.append(component)
return components_list
| -1 |
TheAlgorithms/Python | 8,551 | New gitter link added or replaced | ### Describe your change:
Fixes #8546
* [ ] Add an algorithm?
* [ ] Fix a bug or typo in an existing algorithm?
* [x] Documentation change?
### Checklist:
* [x] I have read [CONTRIBUTING.md](https://github.com/TheAlgorithms/Python/blob/master/CONTRIBUTING.md).
* [x] This pull request is all my own work -- I have not plagiarized.
* [x] I know that pull requests will not be merged if they fail the automated tests.
* [ ] This PR only changes one algorithm file. To ease review, please open separate PRs for separate algorithms.
* [ ] All new Python files are placed inside an existing directory.
* [ ] All filenames are in all lowercase characters with no spaces or dashes.
* [ ] All functions and variable names follow Python naming conventions.
* [ ] All function parameters and return values are annotated with Python [type hints](https://docs.python.org/3/library/typing.html).
* [ ] All functions have [doctests](https://docs.python.org/3/library/doctest.html) that pass the automated testing.
* [ ] All new algorithms include at least one URL that points to Wikipedia or another similar explanation.
* [x] If this pull request resolves one or more open issues then the commit message contains `Fixes: #{$ISSUE_NO}`.
| KaixLina | "2023-03-26T04:26:40Z" | "2023-03-26T15:19:19Z" | 3f9150c1b2dd15808a4962e03a1455f8d825512c | 7cdb011ba440a07768179bfaea190bddefc890d8 | New gitter link added or replaced. ### Describe your change:
Fixes #8546
* [ ] Add an algorithm?
* [ ] Fix a bug or typo in an existing algorithm?
* [x] Documentation change?
### Checklist:
* [x] I have read [CONTRIBUTING.md](https://github.com/TheAlgorithms/Python/blob/master/CONTRIBUTING.md).
* [x] This pull request is all my own work -- I have not plagiarized.
* [x] I know that pull requests will not be merged if they fail the automated tests.
* [ ] This PR only changes one algorithm file. To ease review, please open separate PRs for separate algorithms.
* [ ] All new Python files are placed inside an existing directory.
* [ ] All filenames are in all lowercase characters with no spaces or dashes.
* [ ] All functions and variable names follow Python naming conventions.
* [ ] All function parameters and return values are annotated with Python [type hints](https://docs.python.org/3/library/typing.html).
* [ ] All functions have [doctests](https://docs.python.org/3/library/doctest.html) that pass the automated testing.
* [ ] All new algorithms include at least one URL that points to Wikipedia or another similar explanation.
* [x] If this pull request resolves one or more open issues then the commit message contains `Fixes: #{$ISSUE_NO}`.
| """
Problem 34: https://projecteuler.net/problem=34
145 is a curious number, as 1! + 4! + 5! = 1 + 24 + 120 = 145.
Find the sum of all numbers which are equal to the sum of the factorial of their digits.
Note: As 1! = 1 and 2! = 2 are not sums they are not included.
"""
from math import factorial
DIGIT_FACTORIAL = {str(d): factorial(d) for d in range(10)}
def sum_of_digit_factorial(n: int) -> int:
"""
Returns the sum of the factorial of digits in n
>>> sum_of_digit_factorial(15)
121
>>> sum_of_digit_factorial(0)
1
"""
return sum(DIGIT_FACTORIAL[d] for d in str(n))
def solution() -> int:
"""
Returns the sum of all numbers whose
sum of the factorials of all digits
add up to the number itself.
>>> solution()
40730
"""
limit = 7 * factorial(9) + 1
return sum(i for i in range(3, limit) if sum_of_digit_factorial(i) == i)
if __name__ == "__main__":
print(f"{solution() = }")
| """
Problem 34: https://projecteuler.net/problem=34
145 is a curious number, as 1! + 4! + 5! = 1 + 24 + 120 = 145.
Find the sum of all numbers which are equal to the sum of the factorial of their digits.
Note: As 1! = 1 and 2! = 2 are not sums they are not included.
"""
from math import factorial
DIGIT_FACTORIAL = {str(d): factorial(d) for d in range(10)}
def sum_of_digit_factorial(n: int) -> int:
"""
Returns the sum of the factorial of digits in n
>>> sum_of_digit_factorial(15)
121
>>> sum_of_digit_factorial(0)
1
"""
return sum(DIGIT_FACTORIAL[d] for d in str(n))
def solution() -> int:
"""
Returns the sum of all numbers whose
sum of the factorials of all digits
add up to the number itself.
>>> solution()
40730
"""
limit = 7 * factorial(9) + 1
return sum(i for i in range(3, limit) if sum_of_digit_factorial(i) == i)
if __name__ == "__main__":
print(f"{solution() = }")
| -1 |
TheAlgorithms/Python | 8,551 | New gitter link added or replaced | ### Describe your change:
Fixes #8546
* [ ] Add an algorithm?
* [ ] Fix a bug or typo in an existing algorithm?
* [x] Documentation change?
### Checklist:
* [x] I have read [CONTRIBUTING.md](https://github.com/TheAlgorithms/Python/blob/master/CONTRIBUTING.md).
* [x] This pull request is all my own work -- I have not plagiarized.
* [x] I know that pull requests will not be merged if they fail the automated tests.
* [ ] This PR only changes one algorithm file. To ease review, please open separate PRs for separate algorithms.
* [ ] All new Python files are placed inside an existing directory.
* [ ] All filenames are in all lowercase characters with no spaces or dashes.
* [ ] All functions and variable names follow Python naming conventions.
* [ ] All function parameters and return values are annotated with Python [type hints](https://docs.python.org/3/library/typing.html).
* [ ] All functions have [doctests](https://docs.python.org/3/library/doctest.html) that pass the automated testing.
* [ ] All new algorithms include at least one URL that points to Wikipedia or another similar explanation.
* [x] If this pull request resolves one or more open issues then the commit message contains `Fixes: #{$ISSUE_NO}`.
| KaixLina | "2023-03-26T04:26:40Z" | "2023-03-26T15:19:19Z" | 3f9150c1b2dd15808a4962e03a1455f8d825512c | 7cdb011ba440a07768179bfaea190bddefc890d8 | New gitter link added or replaced. ### Describe your change:
Fixes #8546
* [ ] Add an algorithm?
* [ ] Fix a bug or typo in an existing algorithm?
* [x] Documentation change?
### Checklist:
* [x] I have read [CONTRIBUTING.md](https://github.com/TheAlgorithms/Python/blob/master/CONTRIBUTING.md).
* [x] This pull request is all my own work -- I have not plagiarized.
* [x] I know that pull requests will not be merged if they fail the automated tests.
* [ ] This PR only changes one algorithm file. To ease review, please open separate PRs for separate algorithms.
* [ ] All new Python files are placed inside an existing directory.
* [ ] All filenames are in all lowercase characters with no spaces or dashes.
* [ ] All functions and variable names follow Python naming conventions.
* [ ] All function parameters and return values are annotated with Python [type hints](https://docs.python.org/3/library/typing.html).
* [ ] All functions have [doctests](https://docs.python.org/3/library/doctest.html) that pass the automated testing.
* [ ] All new algorithms include at least one URL that points to Wikipedia or another similar explanation.
* [x] If this pull request resolves one or more open issues then the commit message contains `Fixes: #{$ISSUE_NO}`.
| """
Kadane's algorithm to get maximum subarray sum
https://medium.com/@rsinghal757/kadanes-algorithm-dynamic-programming-how-and-why-does-it-work-3fd8849ed73d
https://en.wikipedia.org/wiki/Maximum_subarray_problem
"""
test_data: tuple = ([-2, -8, -9], [2, 8, 9], [-1, 0, 1], [0, 0], [])
def negative_exist(arr: list) -> int:
"""
>>> negative_exist([-2,-8,-9])
-2
>>> [negative_exist(arr) for arr in test_data]
[-2, 0, 0, 0, 0]
"""
arr = arr or [0]
max_number = arr[0]
for i in arr:
if i >= 0:
return 0
elif max_number <= i:
max_number = i
return max_number
def kadanes(arr: list) -> int:
"""
If negative_exist() returns 0 than this function will execute
else it will return the value return by negative_exist function
For example: arr = [2, 3, -9, 8, -2]
Initially we set value of max_sum to 0 and max_till_element to 0 than when
max_sum is less than max_till particular element it will assign that value to
max_sum and when value of max_till_sum is less than 0 it will assign 0 to i
and after that whole process, return the max_sum
So the output for above arr is 8
>>> kadanes([2, 3, -9, 8, -2])
8
>>> [kadanes(arr) for arr in test_data]
[-2, 19, 1, 0, 0]
"""
max_sum = negative_exist(arr)
if max_sum < 0:
return max_sum
max_sum = 0
max_till_element = 0
for i in arr:
max_till_element += i
max_sum = max(max_sum, max_till_element)
max_till_element = max(max_till_element, 0)
return max_sum
if __name__ == "__main__":
try:
print("Enter integer values sepatated by spaces")
arr = [int(x) for x in input().split()]
print(f"Maximum subarray sum of {arr} is {kadanes(arr)}")
except ValueError:
print("Please enter integer values.")
| """
Kadane's algorithm to get maximum subarray sum
https://medium.com/@rsinghal757/kadanes-algorithm-dynamic-programming-how-and-why-does-it-work-3fd8849ed73d
https://en.wikipedia.org/wiki/Maximum_subarray_problem
"""
test_data: tuple = ([-2, -8, -9], [2, 8, 9], [-1, 0, 1], [0, 0], [])
def negative_exist(arr: list) -> int:
"""
>>> negative_exist([-2,-8,-9])
-2
>>> [negative_exist(arr) for arr in test_data]
[-2, 0, 0, 0, 0]
"""
arr = arr or [0]
max_number = arr[0]
for i in arr:
if i >= 0:
return 0
elif max_number <= i:
max_number = i
return max_number
def kadanes(arr: list) -> int:
"""
If negative_exist() returns 0 than this function will execute
else it will return the value return by negative_exist function
For example: arr = [2, 3, -9, 8, -2]
Initially we set value of max_sum to 0 and max_till_element to 0 than when
max_sum is less than max_till particular element it will assign that value to
max_sum and when value of max_till_sum is less than 0 it will assign 0 to i
and after that whole process, return the max_sum
So the output for above arr is 8
>>> kadanes([2, 3, -9, 8, -2])
8
>>> [kadanes(arr) for arr in test_data]
[-2, 19, 1, 0, 0]
"""
max_sum = negative_exist(arr)
if max_sum < 0:
return max_sum
max_sum = 0
max_till_element = 0
for i in arr:
max_till_element += i
max_sum = max(max_sum, max_till_element)
max_till_element = max(max_till_element, 0)
return max_sum
if __name__ == "__main__":
try:
print("Enter integer values sepatated by spaces")
arr = [int(x) for x in input().split()]
print(f"Maximum subarray sum of {arr} is {kadanes(arr)}")
except ValueError:
print("Please enter integer values.")
| -1 |
TheAlgorithms/Python | 8,551 | New gitter link added or replaced | ### Describe your change:
Fixes #8546
* [ ] Add an algorithm?
* [ ] Fix a bug or typo in an existing algorithm?
* [x] Documentation change?
### Checklist:
* [x] I have read [CONTRIBUTING.md](https://github.com/TheAlgorithms/Python/blob/master/CONTRIBUTING.md).
* [x] This pull request is all my own work -- I have not plagiarized.
* [x] I know that pull requests will not be merged if they fail the automated tests.
* [ ] This PR only changes one algorithm file. To ease review, please open separate PRs for separate algorithms.
* [ ] All new Python files are placed inside an existing directory.
* [ ] All filenames are in all lowercase characters with no spaces or dashes.
* [ ] All functions and variable names follow Python naming conventions.
* [ ] All function parameters and return values are annotated with Python [type hints](https://docs.python.org/3/library/typing.html).
* [ ] All functions have [doctests](https://docs.python.org/3/library/doctest.html) that pass the automated testing.
* [ ] All new algorithms include at least one URL that points to Wikipedia or another similar explanation.
* [x] If this pull request resolves one or more open issues then the commit message contains `Fixes: #{$ISSUE_NO}`.
| KaixLina | "2023-03-26T04:26:40Z" | "2023-03-26T15:19:19Z" | 3f9150c1b2dd15808a4962e03a1455f8d825512c | 7cdb011ba440a07768179bfaea190bddefc890d8 | New gitter link added or replaced. ### Describe your change:
Fixes #8546
* [ ] Add an algorithm?
* [ ] Fix a bug or typo in an existing algorithm?
* [x] Documentation change?
### Checklist:
* [x] I have read [CONTRIBUTING.md](https://github.com/TheAlgorithms/Python/blob/master/CONTRIBUTING.md).
* [x] This pull request is all my own work -- I have not plagiarized.
* [x] I know that pull requests will not be merged if they fail the automated tests.
* [ ] This PR only changes one algorithm file. To ease review, please open separate PRs for separate algorithms.
* [ ] All new Python files are placed inside an existing directory.
* [ ] All filenames are in all lowercase characters with no spaces or dashes.
* [ ] All functions and variable names follow Python naming conventions.
* [ ] All function parameters and return values are annotated with Python [type hints](https://docs.python.org/3/library/typing.html).
* [ ] All functions have [doctests](https://docs.python.org/3/library/doctest.html) that pass the automated testing.
* [ ] All new algorithms include at least one URL that points to Wikipedia or another similar explanation.
* [x] If this pull request resolves one or more open issues then the commit message contains `Fixes: #{$ISSUE_NO}`.
| """
https://en.wikipedia.org/wiki/Rayleigh_quotient
"""
from typing import Any
import numpy as np
def is_hermitian(matrix: np.ndarray) -> bool:
"""
Checks if a matrix is Hermitian.
>>> import numpy as np
>>> A = np.array([
... [2, 2+1j, 4],
... [2-1j, 3, 1j],
... [4, -1j, 1]])
>>> is_hermitian(A)
True
>>> A = np.array([
... [2, 2+1j, 4+1j],
... [2-1j, 3, 1j],
... [4, -1j, 1]])
>>> is_hermitian(A)
False
"""
return np.array_equal(matrix, matrix.conjugate().T)
def rayleigh_quotient(a: np.ndarray, v: np.ndarray) -> Any:
"""
Returns the Rayleigh quotient of a Hermitian matrix A and
vector v.
>>> import numpy as np
>>> A = np.array([
... [1, 2, 4],
... [2, 3, -1],
... [4, -1, 1]
... ])
>>> v = np.array([
... [1],
... [2],
... [3]
... ])
>>> rayleigh_quotient(A, v)
array([[3.]])
"""
v_star = v.conjugate().T
v_star_dot = v_star.dot(a)
assert isinstance(v_star_dot, np.ndarray)
return (v_star_dot.dot(v)) / (v_star.dot(v))
def tests() -> None:
a = np.array([[2, 2 + 1j, 4], [2 - 1j, 3, 1j], [4, -1j, 1]])
v = np.array([[1], [2], [3]])
assert is_hermitian(a), f"{a} is not hermitian."
print(rayleigh_quotient(a, v))
a = np.array([[1, 2, 4], [2, 3, -1], [4, -1, 1]])
assert is_hermitian(a), f"{a} is not hermitian."
assert rayleigh_quotient(a, v) == float(3)
if __name__ == "__main__":
import doctest
doctest.testmod()
tests()
| """
https://en.wikipedia.org/wiki/Rayleigh_quotient
"""
from typing import Any
import numpy as np
def is_hermitian(matrix: np.ndarray) -> bool:
"""
Checks if a matrix is Hermitian.
>>> import numpy as np
>>> A = np.array([
... [2, 2+1j, 4],
... [2-1j, 3, 1j],
... [4, -1j, 1]])
>>> is_hermitian(A)
True
>>> A = np.array([
... [2, 2+1j, 4+1j],
... [2-1j, 3, 1j],
... [4, -1j, 1]])
>>> is_hermitian(A)
False
"""
return np.array_equal(matrix, matrix.conjugate().T)
def rayleigh_quotient(a: np.ndarray, v: np.ndarray) -> Any:
"""
Returns the Rayleigh quotient of a Hermitian matrix A and
vector v.
>>> import numpy as np
>>> A = np.array([
... [1, 2, 4],
... [2, 3, -1],
... [4, -1, 1]
... ])
>>> v = np.array([
... [1],
... [2],
... [3]
... ])
>>> rayleigh_quotient(A, v)
array([[3.]])
"""
v_star = v.conjugate().T
v_star_dot = v_star.dot(a)
assert isinstance(v_star_dot, np.ndarray)
return (v_star_dot.dot(v)) / (v_star.dot(v))
def tests() -> None:
a = np.array([[2, 2 + 1j, 4], [2 - 1j, 3, 1j], [4, -1j, 1]])
v = np.array([[1], [2], [3]])
assert is_hermitian(a), f"{a} is not hermitian."
print(rayleigh_quotient(a, v))
a = np.array([[1, 2, 4], [2, 3, -1], [4, -1, 1]])
assert is_hermitian(a), f"{a} is not hermitian."
assert rayleigh_quotient(a, v) == float(3)
if __name__ == "__main__":
import doctest
doctest.testmod()
tests()
| -1 |
TheAlgorithms/Python | 8,551 | New gitter link added or replaced | ### Describe your change:
Fixes #8546
* [ ] Add an algorithm?
* [ ] Fix a bug or typo in an existing algorithm?
* [x] Documentation change?
### Checklist:
* [x] I have read [CONTRIBUTING.md](https://github.com/TheAlgorithms/Python/blob/master/CONTRIBUTING.md).
* [x] This pull request is all my own work -- I have not plagiarized.
* [x] I know that pull requests will not be merged if they fail the automated tests.
* [ ] This PR only changes one algorithm file. To ease review, please open separate PRs for separate algorithms.
* [ ] All new Python files are placed inside an existing directory.
* [ ] All filenames are in all lowercase characters with no spaces or dashes.
* [ ] All functions and variable names follow Python naming conventions.
* [ ] All function parameters and return values are annotated with Python [type hints](https://docs.python.org/3/library/typing.html).
* [ ] All functions have [doctests](https://docs.python.org/3/library/doctest.html) that pass the automated testing.
* [ ] All new algorithms include at least one URL that points to Wikipedia or another similar explanation.
* [x] If this pull request resolves one or more open issues then the commit message contains `Fixes: #{$ISSUE_NO}`.
| KaixLina | "2023-03-26T04:26:40Z" | "2023-03-26T15:19:19Z" | 3f9150c1b2dd15808a4962e03a1455f8d825512c | 7cdb011ba440a07768179bfaea190bddefc890d8 | New gitter link added or replaced. ### Describe your change:
Fixes #8546
* [ ] Add an algorithm?
* [ ] Fix a bug or typo in an existing algorithm?
* [x] Documentation change?
### Checklist:
* [x] I have read [CONTRIBUTING.md](https://github.com/TheAlgorithms/Python/blob/master/CONTRIBUTING.md).
* [x] This pull request is all my own work -- I have not plagiarized.
* [x] I know that pull requests will not be merged if they fail the automated tests.
* [ ] This PR only changes one algorithm file. To ease review, please open separate PRs for separate algorithms.
* [ ] All new Python files are placed inside an existing directory.
* [ ] All filenames are in all lowercase characters with no spaces or dashes.
* [ ] All functions and variable names follow Python naming conventions.
* [ ] All function parameters and return values are annotated with Python [type hints](https://docs.python.org/3/library/typing.html).
* [ ] All functions have [doctests](https://docs.python.org/3/library/doctest.html) that pass the automated testing.
* [ ] All new algorithms include at least one URL that points to Wikipedia or another similar explanation.
* [x] If this pull request resolves one or more open issues then the commit message contains `Fixes: #{$ISSUE_NO}`.
| """
Get the citation from google scholar
using title and year of publication, and volume and pages of journal.
"""
import requests
from bs4 import BeautifulSoup
def get_citation(base_url: str, params: dict) -> str:
"""
Return the citation number.
"""
soup = BeautifulSoup(requests.get(base_url, params=params).content, "html.parser")
div = soup.find("div", attrs={"class": "gs_ri"})
anchors = div.find("div", attrs={"class": "gs_fl"}).find_all("a")
return anchors[2].get_text()
if __name__ == "__main__":
params = {
"title": (
"Precisely geometry controlled microsupercapacitors for ultrahigh areal "
"capacitance, volumetric capacitance, and energy density"
),
"journal": "Chem. Mater.",
"volume": 30,
"pages": "3979-3990",
"year": 2018,
"hl": "en",
}
print(get_citation("https://scholar.google.com/scholar_lookup", params=params))
| """
Get the citation from google scholar
using title and year of publication, and volume and pages of journal.
"""
import requests
from bs4 import BeautifulSoup
def get_citation(base_url: str, params: dict) -> str:
"""
Return the citation number.
"""
soup = BeautifulSoup(requests.get(base_url, params=params).content, "html.parser")
div = soup.find("div", attrs={"class": "gs_ri"})
anchors = div.find("div", attrs={"class": "gs_fl"}).find_all("a")
return anchors[2].get_text()
if __name__ == "__main__":
params = {
"title": (
"Precisely geometry controlled microsupercapacitors for ultrahigh areal "
"capacitance, volumetric capacitance, and energy density"
),
"journal": "Chem. Mater.",
"volume": 30,
"pages": "3979-3990",
"year": 2018,
"hl": "en",
}
print(get_citation("https://scholar.google.com/scholar_lookup", params=params))
| -1 |
TheAlgorithms/Python | 8,551 | New gitter link added or replaced | ### Describe your change:
Fixes #8546
* [ ] Add an algorithm?
* [ ] Fix a bug or typo in an existing algorithm?
* [x] Documentation change?
### Checklist:
* [x] I have read [CONTRIBUTING.md](https://github.com/TheAlgorithms/Python/blob/master/CONTRIBUTING.md).
* [x] This pull request is all my own work -- I have not plagiarized.
* [x] I know that pull requests will not be merged if they fail the automated tests.
* [ ] This PR only changes one algorithm file. To ease review, please open separate PRs for separate algorithms.
* [ ] All new Python files are placed inside an existing directory.
* [ ] All filenames are in all lowercase characters with no spaces or dashes.
* [ ] All functions and variable names follow Python naming conventions.
* [ ] All function parameters and return values are annotated with Python [type hints](https://docs.python.org/3/library/typing.html).
* [ ] All functions have [doctests](https://docs.python.org/3/library/doctest.html) that pass the automated testing.
* [ ] All new algorithms include at least one URL that points to Wikipedia or another similar explanation.
* [x] If this pull request resolves one or more open issues then the commit message contains `Fixes: #{$ISSUE_NO}`.
| KaixLina | "2023-03-26T04:26:40Z" | "2023-03-26T15:19:19Z" | 3f9150c1b2dd15808a4962e03a1455f8d825512c | 7cdb011ba440a07768179bfaea190bddefc890d8 | New gitter link added or replaced. ### Describe your change:
Fixes #8546
* [ ] Add an algorithm?
* [ ] Fix a bug or typo in an existing algorithm?
* [x] Documentation change?
### Checklist:
* [x] I have read [CONTRIBUTING.md](https://github.com/TheAlgorithms/Python/blob/master/CONTRIBUTING.md).
* [x] This pull request is all my own work -- I have not plagiarized.
* [x] I know that pull requests will not be merged if they fail the automated tests.
* [ ] This PR only changes one algorithm file. To ease review, please open separate PRs for separate algorithms.
* [ ] All new Python files are placed inside an existing directory.
* [ ] All filenames are in all lowercase characters with no spaces or dashes.
* [ ] All functions and variable names follow Python naming conventions.
* [ ] All function parameters and return values are annotated with Python [type hints](https://docs.python.org/3/library/typing.html).
* [ ] All functions have [doctests](https://docs.python.org/3/library/doctest.html) that pass the automated testing.
* [ ] All new algorithms include at least one URL that points to Wikipedia or another similar explanation.
* [x] If this pull request resolves one or more open issues then the commit message contains `Fixes: #{$ISSUE_NO}`.
| """
Sum of all nodes in a binary tree.
Python implementation:
O(n) time complexity - Recurses through :meth:`depth_first_search`
with each element.
O(n) space complexity - At any point in time maximum number of stack
frames that could be in memory is `n`
"""
from __future__ import annotations
from collections.abc import Iterator
class Node:
"""
A Node has a value variable and pointers to Nodes to its left and right.
"""
def __init__(self, value: int) -> None:
self.value = value
self.left: Node | None = None
self.right: Node | None = None
class BinaryTreeNodeSum:
r"""
The below tree looks like this
10
/ \
5 -3
/ / \
12 8 0
>>> tree = Node(10)
>>> sum(BinaryTreeNodeSum(tree))
10
>>> tree.left = Node(5)
>>> sum(BinaryTreeNodeSum(tree))
15
>>> tree.right = Node(-3)
>>> sum(BinaryTreeNodeSum(tree))
12
>>> tree.left.left = Node(12)
>>> sum(BinaryTreeNodeSum(tree))
24
>>> tree.right.left = Node(8)
>>> tree.right.right = Node(0)
>>> sum(BinaryTreeNodeSum(tree))
32
"""
def __init__(self, tree: Node) -> None:
self.tree = tree
def depth_first_search(self, node: Node | None) -> int:
if node is None:
return 0
return node.value + (
self.depth_first_search(node.left) + self.depth_first_search(node.right)
)
def __iter__(self) -> Iterator[int]:
yield self.depth_first_search(self.tree)
if __name__ == "__main__":
import doctest
doctest.testmod()
| """
Sum of all nodes in a binary tree.
Python implementation:
O(n) time complexity - Recurses through :meth:`depth_first_search`
with each element.
O(n) space complexity - At any point in time maximum number of stack
frames that could be in memory is `n`
"""
from __future__ import annotations
from collections.abc import Iterator
class Node:
"""
A Node has a value variable and pointers to Nodes to its left and right.
"""
def __init__(self, value: int) -> None:
self.value = value
self.left: Node | None = None
self.right: Node | None = None
class BinaryTreeNodeSum:
r"""
The below tree looks like this
10
/ \
5 -3
/ / \
12 8 0
>>> tree = Node(10)
>>> sum(BinaryTreeNodeSum(tree))
10
>>> tree.left = Node(5)
>>> sum(BinaryTreeNodeSum(tree))
15
>>> tree.right = Node(-3)
>>> sum(BinaryTreeNodeSum(tree))
12
>>> tree.left.left = Node(12)
>>> sum(BinaryTreeNodeSum(tree))
24
>>> tree.right.left = Node(8)
>>> tree.right.right = Node(0)
>>> sum(BinaryTreeNodeSum(tree))
32
"""
def __init__(self, tree: Node) -> None:
self.tree = tree
def depth_first_search(self, node: Node | None) -> int:
if node is None:
return 0
return node.value + (
self.depth_first_search(node.left) + self.depth_first_search(node.right)
)
def __iter__(self) -> Iterator[int]:
yield self.depth_first_search(self.tree)
if __name__ == "__main__":
import doctest
doctest.testmod()
| -1 |
TheAlgorithms/Python | 8,551 | New gitter link added or replaced | ### Describe your change:
Fixes #8546
* [ ] Add an algorithm?
* [ ] Fix a bug or typo in an existing algorithm?
* [x] Documentation change?
### Checklist:
* [x] I have read [CONTRIBUTING.md](https://github.com/TheAlgorithms/Python/blob/master/CONTRIBUTING.md).
* [x] This pull request is all my own work -- I have not plagiarized.
* [x] I know that pull requests will not be merged if they fail the automated tests.
* [ ] This PR only changes one algorithm file. To ease review, please open separate PRs for separate algorithms.
* [ ] All new Python files are placed inside an existing directory.
* [ ] All filenames are in all lowercase characters with no spaces or dashes.
* [ ] All functions and variable names follow Python naming conventions.
* [ ] All function parameters and return values are annotated with Python [type hints](https://docs.python.org/3/library/typing.html).
* [ ] All functions have [doctests](https://docs.python.org/3/library/doctest.html) that pass the automated testing.
* [ ] All new algorithms include at least one URL that points to Wikipedia or another similar explanation.
* [x] If this pull request resolves one or more open issues then the commit message contains `Fixes: #{$ISSUE_NO}`.
| KaixLina | "2023-03-26T04:26:40Z" | "2023-03-26T15:19:19Z" | 3f9150c1b2dd15808a4962e03a1455f8d825512c | 7cdb011ba440a07768179bfaea190bddefc890d8 | New gitter link added or replaced. ### Describe your change:
Fixes #8546
* [ ] Add an algorithm?
* [ ] Fix a bug or typo in an existing algorithm?
* [x] Documentation change?
### Checklist:
* [x] I have read [CONTRIBUTING.md](https://github.com/TheAlgorithms/Python/blob/master/CONTRIBUTING.md).
* [x] This pull request is all my own work -- I have not plagiarized.
* [x] I know that pull requests will not be merged if they fail the automated tests.
* [ ] This PR only changes one algorithm file. To ease review, please open separate PRs for separate algorithms.
* [ ] All new Python files are placed inside an existing directory.
* [ ] All filenames are in all lowercase characters with no spaces or dashes.
* [ ] All functions and variable names follow Python naming conventions.
* [ ] All function parameters and return values are annotated with Python [type hints](https://docs.python.org/3/library/typing.html).
* [ ] All functions have [doctests](https://docs.python.org/3/library/doctest.html) that pass the automated testing.
* [ ] All new algorithms include at least one URL that points to Wikipedia or another similar explanation.
* [x] If this pull request resolves one or more open issues then the commit message contains `Fixes: #{$ISSUE_NO}`.
| """
This program print the matrix in spiral form.
This problem has been solved through recursive way.
Matrix must satisfy below conditions
i) matrix should be only one or two dimensional
ii) number of column of all rows should be equal
"""
def check_matrix(matrix: list[list[int]]) -> bool:
# must be
matrix = [list(row) for row in matrix]
if matrix and isinstance(matrix, list):
if isinstance(matrix[0], list):
prev_len = 0
for row in matrix:
if prev_len == 0:
prev_len = len(row)
result = True
else:
result = prev_len == len(row)
else:
result = True
else:
result = False
return result
def spiral_print_clockwise(a: list[list[int]]) -> None:
"""
>>> spiral_print_clockwise([[1, 2, 3, 4], [5, 6, 7, 8], [9, 10, 11, 12]])
1
2
3
4
8
12
11
10
9
5
6
7
"""
if check_matrix(a) and len(a) > 0:
a = [list(row) for row in a]
mat_row = len(a)
if isinstance(a[0], list):
mat_col = len(a[0])
else:
for dat in a:
print(dat)
return
# horizotal printing increasing
for i in range(0, mat_col):
print(a[0][i])
# vertical printing down
for i in range(1, mat_row):
print(a[i][mat_col - 1])
# horizotal printing decreasing
if mat_row > 1:
for i in range(mat_col - 2, -1, -1):
print(a[mat_row - 1][i])
# vertical printing up
for i in range(mat_row - 2, 0, -1):
print(a[i][0])
remain_mat = [row[1 : mat_col - 1] for row in a[1 : mat_row - 1]]
if len(remain_mat) > 0:
spiral_print_clockwise(remain_mat)
else:
return
else:
print("Not a valid matrix")
return
# Other Easy to understand Approach
def spiral_traversal(matrix: list[list]) -> list[int]:
"""
>>> spiral_traversal([[1, 2, 3, 4], [5, 6, 7, 8], [9, 10, 11, 12]])
[1, 2, 3, 4, 8, 12, 11, 10, 9, 5, 6, 7]
Example:
matrix = [[1, 2, 3, 4], [5, 6, 7, 8], [9, 10, 11, 12]]
Algorithm:
Step 1. first pop the 0 index list. (which is [1,2,3,4] and concatenate the
output of [step 2])
Step 2. Now perform matrix’s Transpose operation (Change rows to column
and vice versa) and reverse the resultant matrix.
Step 3. Pass the output of [2nd step], to same recursive function till
base case hits.
Dry Run:
Stage 1.
[1, 2, 3, 4] + spiral_traversal([
[8, 12], [7, 11], [6, 10], [5, 9]]
])
Stage 2.
[1, 2, 3, 4, 8, 12] + spiral_traversal([
[11, 10, 9], [7, 6, 5]
])
Stage 3.
[1, 2, 3, 4, 8, 12, 11, 10, 9] + spiral_traversal([
[5], [6], [7]
])
Stage 4.
[1, 2, 3, 4, 8, 12, 11, 10, 9, 5] + spiral_traversal([
[5], [6], [7]
])
Stage 5.
[1, 2, 3, 4, 8, 12, 11, 10, 9, 5] + spiral_traversal([[6, 7]])
Stage 6.
[1, 2, 3, 4, 8, 12, 11, 10, 9, 5, 6, 7] + spiral_traversal([])
"""
if matrix:
return list(matrix.pop(0)) + spiral_traversal(list(zip(*matrix))[::-1])
else:
return []
# driver code
if __name__ == "__main__":
import doctest
doctest.testmod()
a = [[1, 2, 3, 4], [5, 6, 7, 8], [9, 10, 11, 12]]
spiral_print_clockwise(a)
| """
This program print the matrix in spiral form.
This problem has been solved through recursive way.
Matrix must satisfy below conditions
i) matrix should be only one or two dimensional
ii) number of column of all rows should be equal
"""
def check_matrix(matrix: list[list[int]]) -> bool:
# must be
matrix = [list(row) for row in matrix]
if matrix and isinstance(matrix, list):
if isinstance(matrix[0], list):
prev_len = 0
for row in matrix:
if prev_len == 0:
prev_len = len(row)
result = True
else:
result = prev_len == len(row)
else:
result = True
else:
result = False
return result
def spiral_print_clockwise(a: list[list[int]]) -> None:
"""
>>> spiral_print_clockwise([[1, 2, 3, 4], [5, 6, 7, 8], [9, 10, 11, 12]])
1
2
3
4
8
12
11
10
9
5
6
7
"""
if check_matrix(a) and len(a) > 0:
a = [list(row) for row in a]
mat_row = len(a)
if isinstance(a[0], list):
mat_col = len(a[0])
else:
for dat in a:
print(dat)
return
# horizotal printing increasing
for i in range(0, mat_col):
print(a[0][i])
# vertical printing down
for i in range(1, mat_row):
print(a[i][mat_col - 1])
# horizotal printing decreasing
if mat_row > 1:
for i in range(mat_col - 2, -1, -1):
print(a[mat_row - 1][i])
# vertical printing up
for i in range(mat_row - 2, 0, -1):
print(a[i][0])
remain_mat = [row[1 : mat_col - 1] for row in a[1 : mat_row - 1]]
if len(remain_mat) > 0:
spiral_print_clockwise(remain_mat)
else:
return
else:
print("Not a valid matrix")
return
# Other Easy to understand Approach
def spiral_traversal(matrix: list[list]) -> list[int]:
"""
>>> spiral_traversal([[1, 2, 3, 4], [5, 6, 7, 8], [9, 10, 11, 12]])
[1, 2, 3, 4, 8, 12, 11, 10, 9, 5, 6, 7]
Example:
matrix = [[1, 2, 3, 4], [5, 6, 7, 8], [9, 10, 11, 12]]
Algorithm:
Step 1. first pop the 0 index list. (which is [1,2,3,4] and concatenate the
output of [step 2])
Step 2. Now perform matrix’s Transpose operation (Change rows to column
and vice versa) and reverse the resultant matrix.
Step 3. Pass the output of [2nd step], to same recursive function till
base case hits.
Dry Run:
Stage 1.
[1, 2, 3, 4] + spiral_traversal([
[8, 12], [7, 11], [6, 10], [5, 9]]
])
Stage 2.
[1, 2, 3, 4, 8, 12] + spiral_traversal([
[11, 10, 9], [7, 6, 5]
])
Stage 3.
[1, 2, 3, 4, 8, 12, 11, 10, 9] + spiral_traversal([
[5], [6], [7]
])
Stage 4.
[1, 2, 3, 4, 8, 12, 11, 10, 9, 5] + spiral_traversal([
[5], [6], [7]
])
Stage 5.
[1, 2, 3, 4, 8, 12, 11, 10, 9, 5] + spiral_traversal([[6, 7]])
Stage 6.
[1, 2, 3, 4, 8, 12, 11, 10, 9, 5, 6, 7] + spiral_traversal([])
"""
if matrix:
return list(matrix.pop(0)) + spiral_traversal(list(zip(*matrix))[::-1])
else:
return []
# driver code
if __name__ == "__main__":
import doctest
doctest.testmod()
a = [[1, 2, 3, 4], [5, 6, 7, 8], [9, 10, 11, 12]]
spiral_print_clockwise(a)
| -1 |
TheAlgorithms/Python | 8,551 | New gitter link added or replaced | ### Describe your change:
Fixes #8546
* [ ] Add an algorithm?
* [ ] Fix a bug or typo in an existing algorithm?
* [x] Documentation change?
### Checklist:
* [x] I have read [CONTRIBUTING.md](https://github.com/TheAlgorithms/Python/blob/master/CONTRIBUTING.md).
* [x] This pull request is all my own work -- I have not plagiarized.
* [x] I know that pull requests will not be merged if they fail the automated tests.
* [ ] This PR only changes one algorithm file. To ease review, please open separate PRs for separate algorithms.
* [ ] All new Python files are placed inside an existing directory.
* [ ] All filenames are in all lowercase characters with no spaces or dashes.
* [ ] All functions and variable names follow Python naming conventions.
* [ ] All function parameters and return values are annotated with Python [type hints](https://docs.python.org/3/library/typing.html).
* [ ] All functions have [doctests](https://docs.python.org/3/library/doctest.html) that pass the automated testing.
* [ ] All new algorithms include at least one URL that points to Wikipedia or another similar explanation.
* [x] If this pull request resolves one or more open issues then the commit message contains `Fixes: #{$ISSUE_NO}`.
| KaixLina | "2023-03-26T04:26:40Z" | "2023-03-26T15:19:19Z" | 3f9150c1b2dd15808a4962e03a1455f8d825512c | 7cdb011ba440a07768179bfaea190bddefc890d8 | New gitter link added or replaced. ### Describe your change:
Fixes #8546
* [ ] Add an algorithm?
* [ ] Fix a bug or typo in an existing algorithm?
* [x] Documentation change?
### Checklist:
* [x] I have read [CONTRIBUTING.md](https://github.com/TheAlgorithms/Python/blob/master/CONTRIBUTING.md).
* [x] This pull request is all my own work -- I have not plagiarized.
* [x] I know that pull requests will not be merged if they fail the automated tests.
* [ ] This PR only changes one algorithm file. To ease review, please open separate PRs for separate algorithms.
* [ ] All new Python files are placed inside an existing directory.
* [ ] All filenames are in all lowercase characters with no spaces or dashes.
* [ ] All functions and variable names follow Python naming conventions.
* [ ] All function parameters and return values are annotated with Python [type hints](https://docs.python.org/3/library/typing.html).
* [ ] All functions have [doctests](https://docs.python.org/3/library/doctest.html) that pass the automated testing.
* [ ] All new algorithms include at least one URL that points to Wikipedia or another similar explanation.
* [x] If this pull request resolves one or more open issues then the commit message contains `Fixes: #{$ISSUE_NO}`.
| def elf_hash(data: str) -> int:
"""
Implementation of ElfHash Algorithm, a variant of PJW hash function.
>>> elf_hash('lorem ipsum')
253956621
"""
hash_ = x = 0
for letter in data:
hash_ = (hash_ << 4) + ord(letter)
x = hash_ & 0xF0000000
if x != 0:
hash_ ^= x >> 24
hash_ &= ~x
return hash_
if __name__ == "__main__":
import doctest
doctest.testmod()
| def elf_hash(data: str) -> int:
"""
Implementation of ElfHash Algorithm, a variant of PJW hash function.
>>> elf_hash('lorem ipsum')
253956621
"""
hash_ = x = 0
for letter in data:
hash_ = (hash_ << 4) + ord(letter)
x = hash_ & 0xF0000000
if x != 0:
hash_ ^= x >> 24
hash_ &= ~x
return hash_
if __name__ == "__main__":
import doctest
doctest.testmod()
| -1 |
TheAlgorithms/Python | 8,551 | New gitter link added or replaced | ### Describe your change:
Fixes #8546
* [ ] Add an algorithm?
* [ ] Fix a bug or typo in an existing algorithm?
* [x] Documentation change?
### Checklist:
* [x] I have read [CONTRIBUTING.md](https://github.com/TheAlgorithms/Python/blob/master/CONTRIBUTING.md).
* [x] This pull request is all my own work -- I have not plagiarized.
* [x] I know that pull requests will not be merged if they fail the automated tests.
* [ ] This PR only changes one algorithm file. To ease review, please open separate PRs for separate algorithms.
* [ ] All new Python files are placed inside an existing directory.
* [ ] All filenames are in all lowercase characters with no spaces or dashes.
* [ ] All functions and variable names follow Python naming conventions.
* [ ] All function parameters and return values are annotated with Python [type hints](https://docs.python.org/3/library/typing.html).
* [ ] All functions have [doctests](https://docs.python.org/3/library/doctest.html) that pass the automated testing.
* [ ] All new algorithms include at least one URL that points to Wikipedia or another similar explanation.
* [x] If this pull request resolves one or more open issues then the commit message contains `Fixes: #{$ISSUE_NO}`.
| KaixLina | "2023-03-26T04:26:40Z" | "2023-03-26T15:19:19Z" | 3f9150c1b2dd15808a4962e03a1455f8d825512c | 7cdb011ba440a07768179bfaea190bddefc890d8 | New gitter link added or replaced. ### Describe your change:
Fixes #8546
* [ ] Add an algorithm?
* [ ] Fix a bug or typo in an existing algorithm?
* [x] Documentation change?
### Checklist:
* [x] I have read [CONTRIBUTING.md](https://github.com/TheAlgorithms/Python/blob/master/CONTRIBUTING.md).
* [x] This pull request is all my own work -- I have not plagiarized.
* [x] I know that pull requests will not be merged if they fail the automated tests.
* [ ] This PR only changes one algorithm file. To ease review, please open separate PRs for separate algorithms.
* [ ] All new Python files are placed inside an existing directory.
* [ ] All filenames are in all lowercase characters with no spaces or dashes.
* [ ] All functions and variable names follow Python naming conventions.
* [ ] All function parameters and return values are annotated with Python [type hints](https://docs.python.org/3/library/typing.html).
* [ ] All functions have [doctests](https://docs.python.org/3/library/doctest.html) that pass the automated testing.
* [ ] All new algorithms include at least one URL that points to Wikipedia or another similar explanation.
* [x] If this pull request resolves one or more open issues then the commit message contains `Fixes: #{$ISSUE_NO}`.
| """
A Trie/Prefix Tree is a kind of search tree used to provide quick lookup
of words/patterns in a set of words. A basic Trie however has O(n^2) space complexity
making it impractical in practice. It however provides O(max(search_string, length of
longest word)) lookup time making it an optimal approach when space is not an issue.
"""
class TrieNode:
def __init__(self) -> None:
self.nodes: dict[str, TrieNode] = {} # Mapping from char to TrieNode
self.is_leaf = False
def insert_many(self, words: list[str]) -> None:
"""
Inserts a list of words into the Trie
:param words: list of string words
:return: None
"""
for word in words:
self.insert(word)
def insert(self, word: str) -> None:
"""
Inserts a word into the Trie
:param word: word to be inserted
:return: None
"""
curr = self
for char in word:
if char not in curr.nodes:
curr.nodes[char] = TrieNode()
curr = curr.nodes[char]
curr.is_leaf = True
def find(self, word: str) -> bool:
"""
Tries to find word in a Trie
:param word: word to look for
:return: Returns True if word is found, False otherwise
"""
curr = self
for char in word:
if char not in curr.nodes:
return False
curr = curr.nodes[char]
return curr.is_leaf
def delete(self, word: str) -> None:
"""
Deletes a word in a Trie
:param word: word to delete
:return: None
"""
def _delete(curr: TrieNode, word: str, index: int) -> bool:
if index == len(word):
# If word does not exist
if not curr.is_leaf:
return False
curr.is_leaf = False
return len(curr.nodes) == 0
char = word[index]
char_node = curr.nodes.get(char)
# If char not in current trie node
if not char_node:
return False
# Flag to check if node can be deleted
delete_curr = _delete(char_node, word, index + 1)
if delete_curr:
del curr.nodes[char]
return len(curr.nodes) == 0
return delete_curr
_delete(self, word, 0)
def print_words(node: TrieNode, word: str) -> None:
"""
Prints all the words in a Trie
:param node: root node of Trie
:param word: Word variable should be empty at start
:return: None
"""
if node.is_leaf:
print(word, end=" ")
for key, value in node.nodes.items():
print_words(value, word + key)
def test_trie() -> bool:
words = "banana bananas bandana band apple all beast".split()
root = TrieNode()
root.insert_many(words)
# print_words(root, "")
assert all(root.find(word) for word in words)
assert root.find("banana")
assert not root.find("bandanas")
assert not root.find("apps")
assert root.find("apple")
assert root.find("all")
root.delete("all")
assert not root.find("all")
root.delete("banana")
assert not root.find("banana")
assert root.find("bananas")
return True
def print_results(msg: str, passes: bool) -> None:
print(str(msg), "works!" if passes else "doesn't work :(")
def pytests() -> None:
assert test_trie()
def main() -> None:
"""
>>> pytests()
"""
print_results("Testing trie functionality", test_trie())
if __name__ == "__main__":
main()
| """
A Trie/Prefix Tree is a kind of search tree used to provide quick lookup
of words/patterns in a set of words. A basic Trie however has O(n^2) space complexity
making it impractical in practice. It however provides O(max(search_string, length of
longest word)) lookup time making it an optimal approach when space is not an issue.
"""
class TrieNode:
def __init__(self) -> None:
self.nodes: dict[str, TrieNode] = {} # Mapping from char to TrieNode
self.is_leaf = False
def insert_many(self, words: list[str]) -> None:
"""
Inserts a list of words into the Trie
:param words: list of string words
:return: None
"""
for word in words:
self.insert(word)
def insert(self, word: str) -> None:
"""
Inserts a word into the Trie
:param word: word to be inserted
:return: None
"""
curr = self
for char in word:
if char not in curr.nodes:
curr.nodes[char] = TrieNode()
curr = curr.nodes[char]
curr.is_leaf = True
def find(self, word: str) -> bool:
"""
Tries to find word in a Trie
:param word: word to look for
:return: Returns True if word is found, False otherwise
"""
curr = self
for char in word:
if char not in curr.nodes:
return False
curr = curr.nodes[char]
return curr.is_leaf
def delete(self, word: str) -> None:
"""
Deletes a word in a Trie
:param word: word to delete
:return: None
"""
def _delete(curr: TrieNode, word: str, index: int) -> bool:
if index == len(word):
# If word does not exist
if not curr.is_leaf:
return False
curr.is_leaf = False
return len(curr.nodes) == 0
char = word[index]
char_node = curr.nodes.get(char)
# If char not in current trie node
if not char_node:
return False
# Flag to check if node can be deleted
delete_curr = _delete(char_node, word, index + 1)
if delete_curr:
del curr.nodes[char]
return len(curr.nodes) == 0
return delete_curr
_delete(self, word, 0)
def print_words(node: TrieNode, word: str) -> None:
"""
Prints all the words in a Trie
:param node: root node of Trie
:param word: Word variable should be empty at start
:return: None
"""
if node.is_leaf:
print(word, end=" ")
for key, value in node.nodes.items():
print_words(value, word + key)
def test_trie() -> bool:
words = "banana bananas bandana band apple all beast".split()
root = TrieNode()
root.insert_many(words)
# print_words(root, "")
assert all(root.find(word) for word in words)
assert root.find("banana")
assert not root.find("bandanas")
assert not root.find("apps")
assert root.find("apple")
assert root.find("all")
root.delete("all")
assert not root.find("all")
root.delete("banana")
assert not root.find("banana")
assert root.find("bananas")
return True
def print_results(msg: str, passes: bool) -> None:
print(str(msg), "works!" if passes else "doesn't work :(")
def pytests() -> None:
assert test_trie()
def main() -> None:
"""
>>> pytests()
"""
print_results("Testing trie functionality", test_trie())
if __name__ == "__main__":
main()
| -1 |
TheAlgorithms/Python | 8,551 | New gitter link added or replaced | ### Describe your change:
Fixes #8546
* [ ] Add an algorithm?
* [ ] Fix a bug or typo in an existing algorithm?
* [x] Documentation change?
### Checklist:
* [x] I have read [CONTRIBUTING.md](https://github.com/TheAlgorithms/Python/blob/master/CONTRIBUTING.md).
* [x] This pull request is all my own work -- I have not plagiarized.
* [x] I know that pull requests will not be merged if they fail the automated tests.
* [ ] This PR only changes one algorithm file. To ease review, please open separate PRs for separate algorithms.
* [ ] All new Python files are placed inside an existing directory.
* [ ] All filenames are in all lowercase characters with no spaces or dashes.
* [ ] All functions and variable names follow Python naming conventions.
* [ ] All function parameters and return values are annotated with Python [type hints](https://docs.python.org/3/library/typing.html).
* [ ] All functions have [doctests](https://docs.python.org/3/library/doctest.html) that pass the automated testing.
* [ ] All new algorithms include at least one URL that points to Wikipedia or another similar explanation.
* [x] If this pull request resolves one or more open issues then the commit message contains `Fixes: #{$ISSUE_NO}`.
| KaixLina | "2023-03-26T04:26:40Z" | "2023-03-26T15:19:19Z" | 3f9150c1b2dd15808a4962e03a1455f8d825512c | 7cdb011ba440a07768179bfaea190bddefc890d8 | New gitter link added or replaced. ### Describe your change:
Fixes #8546
* [ ] Add an algorithm?
* [ ] Fix a bug or typo in an existing algorithm?
* [x] Documentation change?
### Checklist:
* [x] I have read [CONTRIBUTING.md](https://github.com/TheAlgorithms/Python/blob/master/CONTRIBUTING.md).
* [x] This pull request is all my own work -- I have not plagiarized.
* [x] I know that pull requests will not be merged if they fail the automated tests.
* [ ] This PR only changes one algorithm file. To ease review, please open separate PRs for separate algorithms.
* [ ] All new Python files are placed inside an existing directory.
* [ ] All filenames are in all lowercase characters with no spaces or dashes.
* [ ] All functions and variable names follow Python naming conventions.
* [ ] All function parameters and return values are annotated with Python [type hints](https://docs.python.org/3/library/typing.html).
* [ ] All functions have [doctests](https://docs.python.org/3/library/doctest.html) that pass the automated testing.
* [ ] All new algorithms include at least one URL that points to Wikipedia or another similar explanation.
* [x] If this pull request resolves one or more open issues then the commit message contains `Fixes: #{$ISSUE_NO}`.
| # Min heap data structure
# with decrease key functionality - in O(log(n)) time
class Node:
def __init__(self, name, val):
self.name = name
self.val = val
def __str__(self):
return f"{self.__class__.__name__}({self.name}, {self.val})"
def __lt__(self, other):
return self.val < other.val
class MinHeap:
"""
>>> r = Node("R", -1)
>>> b = Node("B", 6)
>>> a = Node("A", 3)
>>> x = Node("X", 1)
>>> e = Node("E", 4)
>>> print(b)
Node(B, 6)
>>> myMinHeap = MinHeap([r, b, a, x, e])
>>> myMinHeap.decrease_key(b, -17)
>>> print(b)
Node(B, -17)
>>> myMinHeap["B"]
-17
"""
def __init__(self, array):
self.idx_of_element = {}
self.heap_dict = {}
self.heap = self.build_heap(array)
def __getitem__(self, key):
return self.get_value(key)
def get_parent_idx(self, idx):
return (idx - 1) // 2
def get_left_child_idx(self, idx):
return idx * 2 + 1
def get_right_child_idx(self, idx):
return idx * 2 + 2
def get_value(self, key):
return self.heap_dict[key]
def build_heap(self, array):
last_idx = len(array) - 1
start_from = self.get_parent_idx(last_idx)
for idx, i in enumerate(array):
self.idx_of_element[i] = idx
self.heap_dict[i.name] = i.val
for i in range(start_from, -1, -1):
self.sift_down(i, array)
return array
# this is min-heapify method
def sift_down(self, idx, array):
while True:
l = self.get_left_child_idx(idx) # noqa: E741
r = self.get_right_child_idx(idx)
smallest = idx
if l < len(array) and array[l] < array[idx]:
smallest = l
if r < len(array) and array[r] < array[smallest]:
smallest = r
if smallest != idx:
array[idx], array[smallest] = array[smallest], array[idx]
(
self.idx_of_element[array[idx]],
self.idx_of_element[array[smallest]],
) = (
self.idx_of_element[array[smallest]],
self.idx_of_element[array[idx]],
)
idx = smallest
else:
break
def sift_up(self, idx):
p = self.get_parent_idx(idx)
while p >= 0 and self.heap[p] > self.heap[idx]:
self.heap[p], self.heap[idx] = self.heap[idx], self.heap[p]
self.idx_of_element[self.heap[p]], self.idx_of_element[self.heap[idx]] = (
self.idx_of_element[self.heap[idx]],
self.idx_of_element[self.heap[p]],
)
idx = p
p = self.get_parent_idx(idx)
def peek(self):
return self.heap[0]
def remove(self):
self.heap[0], self.heap[-1] = self.heap[-1], self.heap[0]
self.idx_of_element[self.heap[0]], self.idx_of_element[self.heap[-1]] = (
self.idx_of_element[self.heap[-1]],
self.idx_of_element[self.heap[0]],
)
x = self.heap.pop()
del self.idx_of_element[x]
self.sift_down(0, self.heap)
return x
def insert(self, node):
self.heap.append(node)
self.idx_of_element[node] = len(self.heap) - 1
self.heap_dict[node.name] = node.val
self.sift_up(len(self.heap) - 1)
def is_empty(self):
return len(self.heap) == 0
def decrease_key(self, node, new_value):
assert (
self.heap[self.idx_of_element[node]].val > new_value
), "newValue must be less that current value"
node.val = new_value
self.heap_dict[node.name] = new_value
self.sift_up(self.idx_of_element[node])
# USAGE
r = Node("R", -1)
b = Node("B", 6)
a = Node("A", 3)
x = Node("X", 1)
e = Node("E", 4)
# Use one of these two ways to generate Min-Heap
# Generating Min-Heap from array
my_min_heap = MinHeap([r, b, a, x, e])
# Generating Min-Heap by Insert method
# myMinHeap.insert(a)
# myMinHeap.insert(b)
# myMinHeap.insert(x)
# myMinHeap.insert(r)
# myMinHeap.insert(e)
# Before
print("Min Heap - before decrease key")
for i in my_min_heap.heap:
print(i)
print("Min Heap - After decrease key of node [B -> -17]")
my_min_heap.decrease_key(b, -17)
# After
for i in my_min_heap.heap:
print(i)
if __name__ == "__main__":
import doctest
doctest.testmod()
| # Min heap data structure
# with decrease key functionality - in O(log(n)) time
class Node:
def __init__(self, name, val):
self.name = name
self.val = val
def __str__(self):
return f"{self.__class__.__name__}({self.name}, {self.val})"
def __lt__(self, other):
return self.val < other.val
class MinHeap:
"""
>>> r = Node("R", -1)
>>> b = Node("B", 6)
>>> a = Node("A", 3)
>>> x = Node("X", 1)
>>> e = Node("E", 4)
>>> print(b)
Node(B, 6)
>>> myMinHeap = MinHeap([r, b, a, x, e])
>>> myMinHeap.decrease_key(b, -17)
>>> print(b)
Node(B, -17)
>>> myMinHeap["B"]
-17
"""
def __init__(self, array):
self.idx_of_element = {}
self.heap_dict = {}
self.heap = self.build_heap(array)
def __getitem__(self, key):
return self.get_value(key)
def get_parent_idx(self, idx):
return (idx - 1) // 2
def get_left_child_idx(self, idx):
return idx * 2 + 1
def get_right_child_idx(self, idx):
return idx * 2 + 2
def get_value(self, key):
return self.heap_dict[key]
def build_heap(self, array):
last_idx = len(array) - 1
start_from = self.get_parent_idx(last_idx)
for idx, i in enumerate(array):
self.idx_of_element[i] = idx
self.heap_dict[i.name] = i.val
for i in range(start_from, -1, -1):
self.sift_down(i, array)
return array
# this is min-heapify method
def sift_down(self, idx, array):
while True:
l = self.get_left_child_idx(idx) # noqa: E741
r = self.get_right_child_idx(idx)
smallest = idx
if l < len(array) and array[l] < array[idx]:
smallest = l
if r < len(array) and array[r] < array[smallest]:
smallest = r
if smallest != idx:
array[idx], array[smallest] = array[smallest], array[idx]
(
self.idx_of_element[array[idx]],
self.idx_of_element[array[smallest]],
) = (
self.idx_of_element[array[smallest]],
self.idx_of_element[array[idx]],
)
idx = smallest
else:
break
def sift_up(self, idx):
p = self.get_parent_idx(idx)
while p >= 0 and self.heap[p] > self.heap[idx]:
self.heap[p], self.heap[idx] = self.heap[idx], self.heap[p]
self.idx_of_element[self.heap[p]], self.idx_of_element[self.heap[idx]] = (
self.idx_of_element[self.heap[idx]],
self.idx_of_element[self.heap[p]],
)
idx = p
p = self.get_parent_idx(idx)
def peek(self):
return self.heap[0]
def remove(self):
self.heap[0], self.heap[-1] = self.heap[-1], self.heap[0]
self.idx_of_element[self.heap[0]], self.idx_of_element[self.heap[-1]] = (
self.idx_of_element[self.heap[-1]],
self.idx_of_element[self.heap[0]],
)
x = self.heap.pop()
del self.idx_of_element[x]
self.sift_down(0, self.heap)
return x
def insert(self, node):
self.heap.append(node)
self.idx_of_element[node] = len(self.heap) - 1
self.heap_dict[node.name] = node.val
self.sift_up(len(self.heap) - 1)
def is_empty(self):
return len(self.heap) == 0
def decrease_key(self, node, new_value):
assert (
self.heap[self.idx_of_element[node]].val > new_value
), "newValue must be less that current value"
node.val = new_value
self.heap_dict[node.name] = new_value
self.sift_up(self.idx_of_element[node])
# USAGE
r = Node("R", -1)
b = Node("B", 6)
a = Node("A", 3)
x = Node("X", 1)
e = Node("E", 4)
# Use one of these two ways to generate Min-Heap
# Generating Min-Heap from array
my_min_heap = MinHeap([r, b, a, x, e])
# Generating Min-Heap by Insert method
# myMinHeap.insert(a)
# myMinHeap.insert(b)
# myMinHeap.insert(x)
# myMinHeap.insert(r)
# myMinHeap.insert(e)
# Before
print("Min Heap - before decrease key")
for i in my_min_heap.heap:
print(i)
print("Min Heap - After decrease key of node [B -> -17]")
my_min_heap.decrease_key(b, -17)
# After
for i in my_min_heap.heap:
print(i)
if __name__ == "__main__":
import doctest
doctest.testmod()
| -1 |
TheAlgorithms/Python | 8,551 | New gitter link added or replaced | ### Describe your change:
Fixes #8546
* [ ] Add an algorithm?
* [ ] Fix a bug or typo in an existing algorithm?
* [x] Documentation change?
### Checklist:
* [x] I have read [CONTRIBUTING.md](https://github.com/TheAlgorithms/Python/blob/master/CONTRIBUTING.md).
* [x] This pull request is all my own work -- I have not plagiarized.
* [x] I know that pull requests will not be merged if they fail the automated tests.
* [ ] This PR only changes one algorithm file. To ease review, please open separate PRs for separate algorithms.
* [ ] All new Python files are placed inside an existing directory.
* [ ] All filenames are in all lowercase characters with no spaces or dashes.
* [ ] All functions and variable names follow Python naming conventions.
* [ ] All function parameters and return values are annotated with Python [type hints](https://docs.python.org/3/library/typing.html).
* [ ] All functions have [doctests](https://docs.python.org/3/library/doctest.html) that pass the automated testing.
* [ ] All new algorithms include at least one URL that points to Wikipedia or another similar explanation.
* [x] If this pull request resolves one or more open issues then the commit message contains `Fixes: #{$ISSUE_NO}`.
| KaixLina | "2023-03-26T04:26:40Z" | "2023-03-26T15:19:19Z" | 3f9150c1b2dd15808a4962e03a1455f8d825512c | 7cdb011ba440a07768179bfaea190bddefc890d8 | New gitter link added or replaced. ### Describe your change:
Fixes #8546
* [ ] Add an algorithm?
* [ ] Fix a bug or typo in an existing algorithm?
* [x] Documentation change?
### Checklist:
* [x] I have read [CONTRIBUTING.md](https://github.com/TheAlgorithms/Python/blob/master/CONTRIBUTING.md).
* [x] This pull request is all my own work -- I have not plagiarized.
* [x] I know that pull requests will not be merged if they fail the automated tests.
* [ ] This PR only changes one algorithm file. To ease review, please open separate PRs for separate algorithms.
* [ ] All new Python files are placed inside an existing directory.
* [ ] All filenames are in all lowercase characters with no spaces or dashes.
* [ ] All functions and variable names follow Python naming conventions.
* [ ] All function parameters and return values are annotated with Python [type hints](https://docs.python.org/3/library/typing.html).
* [ ] All functions have [doctests](https://docs.python.org/3/library/doctest.html) that pass the automated testing.
* [ ] All new algorithms include at least one URL that points to Wikipedia or another similar explanation.
* [x] If this pull request resolves one or more open issues then the commit message contains `Fixes: #{$ISSUE_NO}`.
| """
One of the several implementations of Lempel–Ziv–Welch compression algorithm
https://en.wikipedia.org/wiki/Lempel%E2%80%93Ziv%E2%80%93Welch
"""
import math
import os
import sys
def read_file_binary(file_path: str) -> str:
"""
Reads given file as bytes and returns them as a long string
"""
result = ""
try:
with open(file_path, "rb") as binary_file:
data = binary_file.read()
for dat in data:
curr_byte = f"{dat:08b}"
result += curr_byte
return result
except OSError:
print("File not accessible")
sys.exit()
def add_key_to_lexicon(
lexicon: dict[str, str], curr_string: str, index: int, last_match_id: str
) -> None:
"""
Adds new strings (curr_string + "0", curr_string + "1") to the lexicon
"""
lexicon.pop(curr_string)
lexicon[curr_string + "0"] = last_match_id
if math.log2(index).is_integer():
for curr_key in lexicon:
lexicon[curr_key] = "0" + lexicon[curr_key]
lexicon[curr_string + "1"] = bin(index)[2:]
def compress_data(data_bits: str) -> str:
"""
Compresses given data_bits using Lempel–Ziv–Welch compression algorithm
and returns the result as a string
"""
lexicon = {"0": "0", "1": "1"}
result, curr_string = "", ""
index = len(lexicon)
for i in range(len(data_bits)):
curr_string += data_bits[i]
if curr_string not in lexicon:
continue
last_match_id = lexicon[curr_string]
result += last_match_id
add_key_to_lexicon(lexicon, curr_string, index, last_match_id)
index += 1
curr_string = ""
while curr_string != "" and curr_string not in lexicon:
curr_string += "0"
if curr_string != "":
last_match_id = lexicon[curr_string]
result += last_match_id
return result
def add_file_length(source_path: str, compressed: str) -> str:
"""
Adds given file's length in front (using Elias gamma coding) of the compressed
string
"""
file_length = os.path.getsize(source_path)
file_length_binary = bin(file_length)[2:]
length_length = len(file_length_binary)
return "0" * (length_length - 1) + file_length_binary + compressed
def write_file_binary(file_path: str, to_write: str) -> None:
"""
Writes given to_write string (should only consist of 0's and 1's) as bytes in the
file
"""
byte_length = 8
try:
with open(file_path, "wb") as opened_file:
result_byte_array = [
to_write[i : i + byte_length]
for i in range(0, len(to_write), byte_length)
]
if len(result_byte_array[-1]) % byte_length == 0:
result_byte_array.append("10000000")
else:
result_byte_array[-1] += "1" + "0" * (
byte_length - len(result_byte_array[-1]) - 1
)
for elem in result_byte_array:
opened_file.write(int(elem, 2).to_bytes(1, byteorder="big"))
except OSError:
print("File not accessible")
sys.exit()
def compress(source_path: str, destination_path: str) -> None:
"""
Reads source file, compresses it and writes the compressed result in destination
file
"""
data_bits = read_file_binary(source_path)
compressed = compress_data(data_bits)
compressed = add_file_length(source_path, compressed)
write_file_binary(destination_path, compressed)
if __name__ == "__main__":
compress(sys.argv[1], sys.argv[2])
| """
One of the several implementations of Lempel–Ziv–Welch compression algorithm
https://en.wikipedia.org/wiki/Lempel%E2%80%93Ziv%E2%80%93Welch
"""
import math
import os
import sys
def read_file_binary(file_path: str) -> str:
"""
Reads given file as bytes and returns them as a long string
"""
result = ""
try:
with open(file_path, "rb") as binary_file:
data = binary_file.read()
for dat in data:
curr_byte = f"{dat:08b}"
result += curr_byte
return result
except OSError:
print("File not accessible")
sys.exit()
def add_key_to_lexicon(
lexicon: dict[str, str], curr_string: str, index: int, last_match_id: str
) -> None:
"""
Adds new strings (curr_string + "0", curr_string + "1") to the lexicon
"""
lexicon.pop(curr_string)
lexicon[curr_string + "0"] = last_match_id
if math.log2(index).is_integer():
for curr_key in lexicon:
lexicon[curr_key] = "0" + lexicon[curr_key]
lexicon[curr_string + "1"] = bin(index)[2:]
def compress_data(data_bits: str) -> str:
"""
Compresses given data_bits using Lempel–Ziv–Welch compression algorithm
and returns the result as a string
"""
lexicon = {"0": "0", "1": "1"}
result, curr_string = "", ""
index = len(lexicon)
for i in range(len(data_bits)):
curr_string += data_bits[i]
if curr_string not in lexicon:
continue
last_match_id = lexicon[curr_string]
result += last_match_id
add_key_to_lexicon(lexicon, curr_string, index, last_match_id)
index += 1
curr_string = ""
while curr_string != "" and curr_string not in lexicon:
curr_string += "0"
if curr_string != "":
last_match_id = lexicon[curr_string]
result += last_match_id
return result
def add_file_length(source_path: str, compressed: str) -> str:
"""
Adds given file's length in front (using Elias gamma coding) of the compressed
string
"""
file_length = os.path.getsize(source_path)
file_length_binary = bin(file_length)[2:]
length_length = len(file_length_binary)
return "0" * (length_length - 1) + file_length_binary + compressed
def write_file_binary(file_path: str, to_write: str) -> None:
"""
Writes given to_write string (should only consist of 0's and 1's) as bytes in the
file
"""
byte_length = 8
try:
with open(file_path, "wb") as opened_file:
result_byte_array = [
to_write[i : i + byte_length]
for i in range(0, len(to_write), byte_length)
]
if len(result_byte_array[-1]) % byte_length == 0:
result_byte_array.append("10000000")
else:
result_byte_array[-1] += "1" + "0" * (
byte_length - len(result_byte_array[-1]) - 1
)
for elem in result_byte_array:
opened_file.write(int(elem, 2).to_bytes(1, byteorder="big"))
except OSError:
print("File not accessible")
sys.exit()
def compress(source_path: str, destination_path: str) -> None:
"""
Reads source file, compresses it and writes the compressed result in destination
file
"""
data_bits = read_file_binary(source_path)
compressed = compress_data(data_bits)
compressed = add_file_length(source_path, compressed)
write_file_binary(destination_path, compressed)
if __name__ == "__main__":
compress(sys.argv[1], sys.argv[2])
| -1 |
TheAlgorithms/Python | 8,551 | New gitter link added or replaced | ### Describe your change:
Fixes #8546
* [ ] Add an algorithm?
* [ ] Fix a bug or typo in an existing algorithm?
* [x] Documentation change?
### Checklist:
* [x] I have read [CONTRIBUTING.md](https://github.com/TheAlgorithms/Python/blob/master/CONTRIBUTING.md).
* [x] This pull request is all my own work -- I have not plagiarized.
* [x] I know that pull requests will not be merged if they fail the automated tests.
* [ ] This PR only changes one algorithm file. To ease review, please open separate PRs for separate algorithms.
* [ ] All new Python files are placed inside an existing directory.
* [ ] All filenames are in all lowercase characters with no spaces or dashes.
* [ ] All functions and variable names follow Python naming conventions.
* [ ] All function parameters and return values are annotated with Python [type hints](https://docs.python.org/3/library/typing.html).
* [ ] All functions have [doctests](https://docs.python.org/3/library/doctest.html) that pass the automated testing.
* [ ] All new algorithms include at least one URL that points to Wikipedia or another similar explanation.
* [x] If this pull request resolves one or more open issues then the commit message contains `Fixes: #{$ISSUE_NO}`.
| KaixLina | "2023-03-26T04:26:40Z" | "2023-03-26T15:19:19Z" | 3f9150c1b2dd15808a4962e03a1455f8d825512c | 7cdb011ba440a07768179bfaea190bddefc890d8 | New gitter link added or replaced. ### Describe your change:
Fixes #8546
* [ ] Add an algorithm?
* [ ] Fix a bug or typo in an existing algorithm?
* [x] Documentation change?
### Checklist:
* [x] I have read [CONTRIBUTING.md](https://github.com/TheAlgorithms/Python/blob/master/CONTRIBUTING.md).
* [x] This pull request is all my own work -- I have not plagiarized.
* [x] I know that pull requests will not be merged if they fail the automated tests.
* [ ] This PR only changes one algorithm file. To ease review, please open separate PRs for separate algorithms.
* [ ] All new Python files are placed inside an existing directory.
* [ ] All filenames are in all lowercase characters with no spaces or dashes.
* [ ] All functions and variable names follow Python naming conventions.
* [ ] All function parameters and return values are annotated with Python [type hints](https://docs.python.org/3/library/typing.html).
* [ ] All functions have [doctests](https://docs.python.org/3/library/doctest.html) that pass the automated testing.
* [ ] All new algorithms include at least one URL that points to Wikipedia or another similar explanation.
* [x] If this pull request resolves one or more open issues then the commit message contains `Fixes: #{$ISSUE_NO}`.
| """ Luhn Algorithm """
from __future__ import annotations
def is_luhn(string: str) -> bool:
"""
Perform Luhn validation on an input string
Algorithm:
* Double every other digit starting from 2nd last digit.
* Subtract 9 if number is greater than 9.
* Sum the numbers
*
>>> test_cases = (79927398710, 79927398711, 79927398712, 79927398713,
... 79927398714, 79927398715, 79927398716, 79927398717, 79927398718,
... 79927398719)
>>> [is_luhn(str(test_case)) for test_case in test_cases]
[False, False, False, True, False, False, False, False, False, False]
"""
check_digit: int
_vector: list[str] = list(string)
__vector, check_digit = _vector[:-1], int(_vector[-1])
vector: list[int] = [int(digit) for digit in __vector]
vector.reverse()
for i, digit in enumerate(vector):
if i & 1 == 0:
doubled: int = digit * 2
if doubled > 9:
doubled -= 9
check_digit += doubled
else:
check_digit += digit
return check_digit % 10 == 0
if __name__ == "__main__":
import doctest
doctest.testmod()
assert is_luhn("79927398713")
assert not is_luhn("79927398714")
| """ Luhn Algorithm """
from __future__ import annotations
def is_luhn(string: str) -> bool:
"""
Perform Luhn validation on an input string
Algorithm:
* Double every other digit starting from 2nd last digit.
* Subtract 9 if number is greater than 9.
* Sum the numbers
*
>>> test_cases = (79927398710, 79927398711, 79927398712, 79927398713,
... 79927398714, 79927398715, 79927398716, 79927398717, 79927398718,
... 79927398719)
>>> [is_luhn(str(test_case)) for test_case in test_cases]
[False, False, False, True, False, False, False, False, False, False]
"""
check_digit: int
_vector: list[str] = list(string)
__vector, check_digit = _vector[:-1], int(_vector[-1])
vector: list[int] = [int(digit) for digit in __vector]
vector.reverse()
for i, digit in enumerate(vector):
if i & 1 == 0:
doubled: int = digit * 2
if doubled > 9:
doubled -= 9
check_digit += doubled
else:
check_digit += digit
return check_digit % 10 == 0
if __name__ == "__main__":
import doctest
doctest.testmod()
assert is_luhn("79927398713")
assert not is_luhn("79927398714")
| -1 |
TheAlgorithms/Python | 8,551 | New gitter link added or replaced | ### Describe your change:
Fixes #8546
* [ ] Add an algorithm?
* [ ] Fix a bug or typo in an existing algorithm?
* [x] Documentation change?
### Checklist:
* [x] I have read [CONTRIBUTING.md](https://github.com/TheAlgorithms/Python/blob/master/CONTRIBUTING.md).
* [x] This pull request is all my own work -- I have not plagiarized.
* [x] I know that pull requests will not be merged if they fail the automated tests.
* [ ] This PR only changes one algorithm file. To ease review, please open separate PRs for separate algorithms.
* [ ] All new Python files are placed inside an existing directory.
* [ ] All filenames are in all lowercase characters with no spaces or dashes.
* [ ] All functions and variable names follow Python naming conventions.
* [ ] All function parameters and return values are annotated with Python [type hints](https://docs.python.org/3/library/typing.html).
* [ ] All functions have [doctests](https://docs.python.org/3/library/doctest.html) that pass the automated testing.
* [ ] All new algorithms include at least one URL that points to Wikipedia or another similar explanation.
* [x] If this pull request resolves one or more open issues then the commit message contains `Fixes: #{$ISSUE_NO}`.
| KaixLina | "2023-03-26T04:26:40Z" | "2023-03-26T15:19:19Z" | 3f9150c1b2dd15808a4962e03a1455f8d825512c | 7cdb011ba440a07768179bfaea190bddefc890d8 | New gitter link added or replaced. ### Describe your change:
Fixes #8546
* [ ] Add an algorithm?
* [ ] Fix a bug or typo in an existing algorithm?
* [x] Documentation change?
### Checklist:
* [x] I have read [CONTRIBUTING.md](https://github.com/TheAlgorithms/Python/blob/master/CONTRIBUTING.md).
* [x] This pull request is all my own work -- I have not plagiarized.
* [x] I know that pull requests will not be merged if they fail the automated tests.
* [ ] This PR only changes one algorithm file. To ease review, please open separate PRs for separate algorithms.
* [ ] All new Python files are placed inside an existing directory.
* [ ] All filenames are in all lowercase characters with no spaces or dashes.
* [ ] All functions and variable names follow Python naming conventions.
* [ ] All function parameters and return values are annotated with Python [type hints](https://docs.python.org/3/library/typing.html).
* [ ] All functions have [doctests](https://docs.python.org/3/library/doctest.html) that pass the automated testing.
* [ ] All new algorithms include at least one URL that points to Wikipedia or another similar explanation.
* [x] If this pull request resolves one or more open issues then the commit message contains `Fixes: #{$ISSUE_NO}`.
| -1 |
||
TheAlgorithms/Python | 8,551 | New gitter link added or replaced | ### Describe your change:
Fixes #8546
* [ ] Add an algorithm?
* [ ] Fix a bug or typo in an existing algorithm?
* [x] Documentation change?
### Checklist:
* [x] I have read [CONTRIBUTING.md](https://github.com/TheAlgorithms/Python/blob/master/CONTRIBUTING.md).
* [x] This pull request is all my own work -- I have not plagiarized.
* [x] I know that pull requests will not be merged if they fail the automated tests.
* [ ] This PR only changes one algorithm file. To ease review, please open separate PRs for separate algorithms.
* [ ] All new Python files are placed inside an existing directory.
* [ ] All filenames are in all lowercase characters with no spaces or dashes.
* [ ] All functions and variable names follow Python naming conventions.
* [ ] All function parameters and return values are annotated with Python [type hints](https://docs.python.org/3/library/typing.html).
* [ ] All functions have [doctests](https://docs.python.org/3/library/doctest.html) that pass the automated testing.
* [ ] All new algorithms include at least one URL that points to Wikipedia or another similar explanation.
* [x] If this pull request resolves one or more open issues then the commit message contains `Fixes: #{$ISSUE_NO}`.
| KaixLina | "2023-03-26T04:26:40Z" | "2023-03-26T15:19:19Z" | 3f9150c1b2dd15808a4962e03a1455f8d825512c | 7cdb011ba440a07768179bfaea190bddefc890d8 | New gitter link added or replaced. ### Describe your change:
Fixes #8546
* [ ] Add an algorithm?
* [ ] Fix a bug or typo in an existing algorithm?
* [x] Documentation change?
### Checklist:
* [x] I have read [CONTRIBUTING.md](https://github.com/TheAlgorithms/Python/blob/master/CONTRIBUTING.md).
* [x] This pull request is all my own work -- I have not plagiarized.
* [x] I know that pull requests will not be merged if they fail the automated tests.
* [ ] This PR only changes one algorithm file. To ease review, please open separate PRs for separate algorithms.
* [ ] All new Python files are placed inside an existing directory.
* [ ] All filenames are in all lowercase characters with no spaces or dashes.
* [ ] All functions and variable names follow Python naming conventions.
* [ ] All function parameters and return values are annotated with Python [type hints](https://docs.python.org/3/library/typing.html).
* [ ] All functions have [doctests](https://docs.python.org/3/library/doctest.html) that pass the automated testing.
* [ ] All new algorithms include at least one URL that points to Wikipedia or another similar explanation.
* [x] If this pull request resolves one or more open issues then the commit message contains `Fixes: #{$ISSUE_NO}`.
| """
The Jaccard similarity coefficient is a commonly used indicator of the
similarity between two sets. Let U be a set and A and B be subsets of U,
then the Jaccard index/similarity is defined to be the ratio of the number
of elements of their intersection and the number of elements of their union.
Inspired from Wikipedia and
the book Mining of Massive Datasets [MMDS 2nd Edition, Chapter 3]
https://en.wikipedia.org/wiki/Jaccard_index
https://mmds.org
Jaccard similarity is widely used with MinHashing.
"""
def jaccard_similarity(set_a, set_b, alternative_union=False):
"""
Finds the jaccard similarity between two sets.
Essentially, its intersection over union.
The alternative way to calculate this is to take union as sum of the
number of items in the two sets. This will lead to jaccard similarity
of a set with itself be 1/2 instead of 1. [MMDS 2nd Edition, Page 77]
Parameters:
:set_a (set,list,tuple): A non-empty set/list
:set_b (set,list,tuple): A non-empty set/list
:alternativeUnion (boolean): If True, use sum of number of
items as union
Output:
(float) The jaccard similarity between the two sets.
Examples:
>>> set_a = {'a', 'b', 'c', 'd', 'e'}
>>> set_b = {'c', 'd', 'e', 'f', 'h', 'i'}
>>> jaccard_similarity(set_a, set_b)
0.375
>>> jaccard_similarity(set_a, set_a)
1.0
>>> jaccard_similarity(set_a, set_a, True)
0.5
>>> set_a = ['a', 'b', 'c', 'd', 'e']
>>> set_b = ('c', 'd', 'e', 'f', 'h', 'i')
>>> jaccard_similarity(set_a, set_b)
0.375
"""
if isinstance(set_a, set) and isinstance(set_b, set):
intersection = len(set_a.intersection(set_b))
if alternative_union:
union = len(set_a) + len(set_b)
else:
union = len(set_a.union(set_b))
return intersection / union
if isinstance(set_a, (list, tuple)) and isinstance(set_b, (list, tuple)):
intersection = [element for element in set_a if element in set_b]
if alternative_union:
union = len(set_a) + len(set_b)
return len(intersection) / union
else:
union = set_a + [element for element in set_b if element not in set_a]
return len(intersection) / len(union)
return len(intersection) / len(union)
return None
if __name__ == "__main__":
set_a = {"a", "b", "c", "d", "e"}
set_b = {"c", "d", "e", "f", "h", "i"}
print(jaccard_similarity(set_a, set_b))
| """
The Jaccard similarity coefficient is a commonly used indicator of the
similarity between two sets. Let U be a set and A and B be subsets of U,
then the Jaccard index/similarity is defined to be the ratio of the number
of elements of their intersection and the number of elements of their union.
Inspired from Wikipedia and
the book Mining of Massive Datasets [MMDS 2nd Edition, Chapter 3]
https://en.wikipedia.org/wiki/Jaccard_index
https://mmds.org
Jaccard similarity is widely used with MinHashing.
"""
def jaccard_similarity(set_a, set_b, alternative_union=False):
"""
Finds the jaccard similarity between two sets.
Essentially, its intersection over union.
The alternative way to calculate this is to take union as sum of the
number of items in the two sets. This will lead to jaccard similarity
of a set with itself be 1/2 instead of 1. [MMDS 2nd Edition, Page 77]
Parameters:
:set_a (set,list,tuple): A non-empty set/list
:set_b (set,list,tuple): A non-empty set/list
:alternativeUnion (boolean): If True, use sum of number of
items as union
Output:
(float) The jaccard similarity between the two sets.
Examples:
>>> set_a = {'a', 'b', 'c', 'd', 'e'}
>>> set_b = {'c', 'd', 'e', 'f', 'h', 'i'}
>>> jaccard_similarity(set_a, set_b)
0.375
>>> jaccard_similarity(set_a, set_a)
1.0
>>> jaccard_similarity(set_a, set_a, True)
0.5
>>> set_a = ['a', 'b', 'c', 'd', 'e']
>>> set_b = ('c', 'd', 'e', 'f', 'h', 'i')
>>> jaccard_similarity(set_a, set_b)
0.375
"""
if isinstance(set_a, set) and isinstance(set_b, set):
intersection = len(set_a.intersection(set_b))
if alternative_union:
union = len(set_a) + len(set_b)
else:
union = len(set_a.union(set_b))
return intersection / union
if isinstance(set_a, (list, tuple)) and isinstance(set_b, (list, tuple)):
intersection = [element for element in set_a if element in set_b]
if alternative_union:
union = len(set_a) + len(set_b)
return len(intersection) / union
else:
union = set_a + [element for element in set_b if element not in set_a]
return len(intersection) / len(union)
return len(intersection) / len(union)
return None
if __name__ == "__main__":
set_a = {"a", "b", "c", "d", "e"}
set_b = {"c", "d", "e", "f", "h", "i"}
print(jaccard_similarity(set_a, set_b))
| -1 |
TheAlgorithms/Python | 8,551 | New gitter link added or replaced | ### Describe your change:
Fixes #8546
* [ ] Add an algorithm?
* [ ] Fix a bug or typo in an existing algorithm?
* [x] Documentation change?
### Checklist:
* [x] I have read [CONTRIBUTING.md](https://github.com/TheAlgorithms/Python/blob/master/CONTRIBUTING.md).
* [x] This pull request is all my own work -- I have not plagiarized.
* [x] I know that pull requests will not be merged if they fail the automated tests.
* [ ] This PR only changes one algorithm file. To ease review, please open separate PRs for separate algorithms.
* [ ] All new Python files are placed inside an existing directory.
* [ ] All filenames are in all lowercase characters with no spaces or dashes.
* [ ] All functions and variable names follow Python naming conventions.
* [ ] All function parameters and return values are annotated with Python [type hints](https://docs.python.org/3/library/typing.html).
* [ ] All functions have [doctests](https://docs.python.org/3/library/doctest.html) that pass the automated testing.
* [ ] All new algorithms include at least one URL that points to Wikipedia or another similar explanation.
* [x] If this pull request resolves one or more open issues then the commit message contains `Fixes: #{$ISSUE_NO}`.
| KaixLina | "2023-03-26T04:26:40Z" | "2023-03-26T15:19:19Z" | 3f9150c1b2dd15808a4962e03a1455f8d825512c | 7cdb011ba440a07768179bfaea190bddefc890d8 | New gitter link added or replaced. ### Describe your change:
Fixes #8546
* [ ] Add an algorithm?
* [ ] Fix a bug or typo in an existing algorithm?
* [x] Documentation change?
### Checklist:
* [x] I have read [CONTRIBUTING.md](https://github.com/TheAlgorithms/Python/blob/master/CONTRIBUTING.md).
* [x] This pull request is all my own work -- I have not plagiarized.
* [x] I know that pull requests will not be merged if they fail the automated tests.
* [ ] This PR only changes one algorithm file. To ease review, please open separate PRs for separate algorithms.
* [ ] All new Python files are placed inside an existing directory.
* [ ] All filenames are in all lowercase characters with no spaces or dashes.
* [ ] All functions and variable names follow Python naming conventions.
* [ ] All function parameters and return values are annotated with Python [type hints](https://docs.python.org/3/library/typing.html).
* [ ] All functions have [doctests](https://docs.python.org/3/library/doctest.html) that pass the automated testing.
* [ ] All new algorithms include at least one URL that points to Wikipedia or another similar explanation.
* [x] If this pull request resolves one or more open issues then the commit message contains `Fixes: #{$ISSUE_NO}`.
| """
Linear regression is the most basic type of regression commonly used for
predictive analysis. The idea is pretty simple: we have a dataset and we have
features associated with it. Features should be chosen very cautiously
as they determine how much our model will be able to make future predictions.
We try to set the weight of these features, over many iterations, so that they best
fit our dataset. In this particular code, I had used a CSGO dataset (ADR vs
Rating). We try to best fit a line through dataset and estimate the parameters.
"""
import numpy as np
import requests
def collect_dataset():
"""Collect dataset of CSGO
The dataset contains ADR vs Rating of a Player
:return : dataset obtained from the link, as matrix
"""
response = requests.get(
"https://raw.githubusercontent.com/yashLadha/The_Math_of_Intelligence/"
"master/Week1/ADRvsRating.csv"
)
lines = response.text.splitlines()
data = []
for item in lines:
item = item.split(",")
data.append(item)
data.pop(0) # This is for removing the labels from the list
dataset = np.matrix(data)
return dataset
def run_steep_gradient_descent(data_x, data_y, len_data, alpha, theta):
"""Run steep gradient descent and updates the Feature vector accordingly_
:param data_x : contains the dataset
:param data_y : contains the output associated with each data-entry
:param len_data : length of the data_
:param alpha : Learning rate of the model
:param theta : Feature vector (weight's for our model)
;param return : Updated Feature's, using
curr_features - alpha_ * gradient(w.r.t. feature)
"""
n = len_data
prod = np.dot(theta, data_x.transpose())
prod -= data_y.transpose()
sum_grad = np.dot(prod, data_x)
theta = theta - (alpha / n) * sum_grad
return theta
def sum_of_square_error(data_x, data_y, len_data, theta):
"""Return sum of square error for error calculation
:param data_x : contains our dataset
:param data_y : contains the output (result vector)
:param len_data : len of the dataset
:param theta : contains the feature vector
:return : sum of square error computed from given feature's
"""
prod = np.dot(theta, data_x.transpose())
prod -= data_y.transpose()
sum_elem = np.sum(np.square(prod))
error = sum_elem / (2 * len_data)
return error
def run_linear_regression(data_x, data_y):
"""Implement Linear regression over the dataset
:param data_x : contains our dataset
:param data_y : contains the output (result vector)
:return : feature for line of best fit (Feature vector)
"""
iterations = 100000
alpha = 0.0001550
no_features = data_x.shape[1]
len_data = data_x.shape[0] - 1
theta = np.zeros((1, no_features))
for i in range(0, iterations):
theta = run_steep_gradient_descent(data_x, data_y, len_data, alpha, theta)
error = sum_of_square_error(data_x, data_y, len_data, theta)
print(f"At Iteration {i + 1} - Error is {error:.5f}")
return theta
def mean_absolute_error(predicted_y, original_y):
"""Return sum of square error for error calculation
:param predicted_y : contains the output of prediction (result vector)
:param original_y : contains values of expected outcome
:return : mean absolute error computed from given feature's
"""
total = sum(abs(y - predicted_y[i]) for i, y in enumerate(original_y))
return total / len(original_y)
def main():
"""Driver function"""
data = collect_dataset()
len_data = data.shape[0]
data_x = np.c_[np.ones(len_data), data[:, :-1]].astype(float)
data_y = data[:, -1].astype(float)
theta = run_linear_regression(data_x, data_y)
len_result = theta.shape[1]
print("Resultant Feature vector : ")
for i in range(0, len_result):
print(f"{theta[0, i]:.5f}")
if __name__ == "__main__":
main()
| """
Linear regression is the most basic type of regression commonly used for
predictive analysis. The idea is pretty simple: we have a dataset and we have
features associated with it. Features should be chosen very cautiously
as they determine how much our model will be able to make future predictions.
We try to set the weight of these features, over many iterations, so that they best
fit our dataset. In this particular code, I had used a CSGO dataset (ADR vs
Rating). We try to best fit a line through dataset and estimate the parameters.
"""
import numpy as np
import requests
def collect_dataset():
"""Collect dataset of CSGO
The dataset contains ADR vs Rating of a Player
:return : dataset obtained from the link, as matrix
"""
response = requests.get(
"https://raw.githubusercontent.com/yashLadha/The_Math_of_Intelligence/"
"master/Week1/ADRvsRating.csv"
)
lines = response.text.splitlines()
data = []
for item in lines:
item = item.split(",")
data.append(item)
data.pop(0) # This is for removing the labels from the list
dataset = np.matrix(data)
return dataset
def run_steep_gradient_descent(data_x, data_y, len_data, alpha, theta):
"""Run steep gradient descent and updates the Feature vector accordingly_
:param data_x : contains the dataset
:param data_y : contains the output associated with each data-entry
:param len_data : length of the data_
:param alpha : Learning rate of the model
:param theta : Feature vector (weight's for our model)
;param return : Updated Feature's, using
curr_features - alpha_ * gradient(w.r.t. feature)
"""
n = len_data
prod = np.dot(theta, data_x.transpose())
prod -= data_y.transpose()
sum_grad = np.dot(prod, data_x)
theta = theta - (alpha / n) * sum_grad
return theta
def sum_of_square_error(data_x, data_y, len_data, theta):
"""Return sum of square error for error calculation
:param data_x : contains our dataset
:param data_y : contains the output (result vector)
:param len_data : len of the dataset
:param theta : contains the feature vector
:return : sum of square error computed from given feature's
"""
prod = np.dot(theta, data_x.transpose())
prod -= data_y.transpose()
sum_elem = np.sum(np.square(prod))
error = sum_elem / (2 * len_data)
return error
def run_linear_regression(data_x, data_y):
"""Implement Linear regression over the dataset
:param data_x : contains our dataset
:param data_y : contains the output (result vector)
:return : feature for line of best fit (Feature vector)
"""
iterations = 100000
alpha = 0.0001550
no_features = data_x.shape[1]
len_data = data_x.shape[0] - 1
theta = np.zeros((1, no_features))
for i in range(0, iterations):
theta = run_steep_gradient_descent(data_x, data_y, len_data, alpha, theta)
error = sum_of_square_error(data_x, data_y, len_data, theta)
print(f"At Iteration {i + 1} - Error is {error:.5f}")
return theta
def mean_absolute_error(predicted_y, original_y):
"""Return sum of square error for error calculation
:param predicted_y : contains the output of prediction (result vector)
:param original_y : contains values of expected outcome
:return : mean absolute error computed from given feature's
"""
total = sum(abs(y - predicted_y[i]) for i, y in enumerate(original_y))
return total / len(original_y)
def main():
"""Driver function"""
data = collect_dataset()
len_data = data.shape[0]
data_x = np.c_[np.ones(len_data), data[:, :-1]].astype(float)
data_y = data[:, -1].astype(float)
theta = run_linear_regression(data_x, data_y)
len_result = theta.shape[1]
print("Resultant Feature vector : ")
for i in range(0, len_result):
print(f"{theta[0, i]:.5f}")
if __name__ == "__main__":
main()
| -1 |
TheAlgorithms/Python | 8,551 | New gitter link added or replaced | ### Describe your change:
Fixes #8546
* [ ] Add an algorithm?
* [ ] Fix a bug or typo in an existing algorithm?
* [x] Documentation change?
### Checklist:
* [x] I have read [CONTRIBUTING.md](https://github.com/TheAlgorithms/Python/blob/master/CONTRIBUTING.md).
* [x] This pull request is all my own work -- I have not plagiarized.
* [x] I know that pull requests will not be merged if they fail the automated tests.
* [ ] This PR only changes one algorithm file. To ease review, please open separate PRs for separate algorithms.
* [ ] All new Python files are placed inside an existing directory.
* [ ] All filenames are in all lowercase characters with no spaces or dashes.
* [ ] All functions and variable names follow Python naming conventions.
* [ ] All function parameters and return values are annotated with Python [type hints](https://docs.python.org/3/library/typing.html).
* [ ] All functions have [doctests](https://docs.python.org/3/library/doctest.html) that pass the automated testing.
* [ ] All new algorithms include at least one URL that points to Wikipedia or another similar explanation.
* [x] If this pull request resolves one or more open issues then the commit message contains `Fixes: #{$ISSUE_NO}`.
| KaixLina | "2023-03-26T04:26:40Z" | "2023-03-26T15:19:19Z" | 3f9150c1b2dd15808a4962e03a1455f8d825512c | 7cdb011ba440a07768179bfaea190bddefc890d8 | New gitter link added or replaced. ### Describe your change:
Fixes #8546
* [ ] Add an algorithm?
* [ ] Fix a bug or typo in an existing algorithm?
* [x] Documentation change?
### Checklist:
* [x] I have read [CONTRIBUTING.md](https://github.com/TheAlgorithms/Python/blob/master/CONTRIBUTING.md).
* [x] This pull request is all my own work -- I have not plagiarized.
* [x] I know that pull requests will not be merged if they fail the automated tests.
* [ ] This PR only changes one algorithm file. To ease review, please open separate PRs for separate algorithms.
* [ ] All new Python files are placed inside an existing directory.
* [ ] All filenames are in all lowercase characters with no spaces or dashes.
* [ ] All functions and variable names follow Python naming conventions.
* [ ] All function parameters and return values are annotated with Python [type hints](https://docs.python.org/3/library/typing.html).
* [ ] All functions have [doctests](https://docs.python.org/3/library/doctest.html) that pass the automated testing.
* [ ] All new algorithms include at least one URL that points to Wikipedia or another similar explanation.
* [x] If this pull request resolves one or more open issues then the commit message contains `Fixes: #{$ISSUE_NO}`.
| """
Hash map with open addressing.
https://en.wikipedia.org/wiki/Hash_table
Another hash map implementation, with a good explanation.
Modern Dictionaries by Raymond Hettinger
https://www.youtube.com/watch?v=p33CVV29OG8
"""
from collections.abc import Iterator, MutableMapping
from dataclasses import dataclass
from typing import Generic, TypeVar
KEY = TypeVar("KEY")
VAL = TypeVar("VAL")
@dataclass(frozen=True, slots=True)
class _Item(Generic[KEY, VAL]):
key: KEY
val: VAL
class _DeletedItem(_Item):
def __init__(self) -> None:
super().__init__(None, None)
def __bool__(self) -> bool:
return False
_deleted = _DeletedItem()
class HashMap(MutableMapping[KEY, VAL]):
"""
Hash map with open addressing.
"""
def __init__(
self, initial_block_size: int = 8, capacity_factor: float = 0.75
) -> None:
self._initial_block_size = initial_block_size
self._buckets: list[_Item | None] = [None] * initial_block_size
assert 0.0 < capacity_factor < 1.0
self._capacity_factor = capacity_factor
self._len = 0
def _get_bucket_index(self, key: KEY) -> int:
return hash(key) % len(self._buckets)
def _get_next_ind(self, ind: int) -> int:
"""
Get next index.
Implements linear open addressing.
"""
return (ind + 1) % len(self._buckets)
def _try_set(self, ind: int, key: KEY, val: VAL) -> bool:
"""
Try to add value to the bucket.
If bucket is empty or key is the same, does insert and return True.
If bucket has another key or deleted placeholder,
that means that we need to check next bucket.
"""
stored = self._buckets[ind]
if not stored:
self._buckets[ind] = _Item(key, val)
self._len += 1
return True
elif stored.key == key:
self._buckets[ind] = _Item(key, val)
return True
else:
return False
def _is_full(self) -> bool:
"""
Return true if we have reached safe capacity.
So we need to increase the number of buckets to avoid collisions.
"""
limit = len(self._buckets) * self._capacity_factor
return len(self) >= int(limit)
def _is_sparse(self) -> bool:
"""Return true if we need twice fewer buckets when we have now."""
if len(self._buckets) <= self._initial_block_size:
return False
limit = len(self._buckets) * self._capacity_factor / 2
return len(self) < limit
def _resize(self, new_size: int) -> None:
old_buckets = self._buckets
self._buckets = [None] * new_size
self._len = 0
for item in old_buckets:
if item:
self._add_item(item.key, item.val)
def _size_up(self) -> None:
self._resize(len(self._buckets) * 2)
def _size_down(self) -> None:
self._resize(len(self._buckets) // 2)
def _iterate_buckets(self, key: KEY) -> Iterator[int]:
ind = self._get_bucket_index(key)
for _ in range(len(self._buckets)):
yield ind
ind = self._get_next_ind(ind)
def _add_item(self, key: KEY, val: VAL) -> None:
for ind in self._iterate_buckets(key):
if self._try_set(ind, key, val):
break
def __setitem__(self, key: KEY, val: VAL) -> None:
if self._is_full():
self._size_up()
self._add_item(key, val)
def __delitem__(self, key: KEY) -> None:
for ind in self._iterate_buckets(key):
item = self._buckets[ind]
if item is None:
raise KeyError(key)
if item is _deleted:
continue
if item.key == key:
self._buckets[ind] = _deleted
self._len -= 1
break
if self._is_sparse():
self._size_down()
def __getitem__(self, key: KEY) -> VAL:
for ind in self._iterate_buckets(key):
item = self._buckets[ind]
if item is None:
break
if item is _deleted:
continue
if item.key == key:
return item.val
raise KeyError(key)
def __len__(self) -> int:
return self._len
def __iter__(self) -> Iterator[KEY]:
yield from (item.key for item in self._buckets if item)
def __repr__(self) -> str:
val_string = " ,".join(
f"{item.key}: {item.val}" for item in self._buckets if item
)
return f"HashMap({val_string})"
| """
Hash map with open addressing.
https://en.wikipedia.org/wiki/Hash_table
Another hash map implementation, with a good explanation.
Modern Dictionaries by Raymond Hettinger
https://www.youtube.com/watch?v=p33CVV29OG8
"""
from collections.abc import Iterator, MutableMapping
from dataclasses import dataclass
from typing import Generic, TypeVar
KEY = TypeVar("KEY")
VAL = TypeVar("VAL")
@dataclass(frozen=True, slots=True)
class _Item(Generic[KEY, VAL]):
key: KEY
val: VAL
class _DeletedItem(_Item):
def __init__(self) -> None:
super().__init__(None, None)
def __bool__(self) -> bool:
return False
_deleted = _DeletedItem()
class HashMap(MutableMapping[KEY, VAL]):
"""
Hash map with open addressing.
"""
def __init__(
self, initial_block_size: int = 8, capacity_factor: float = 0.75
) -> None:
self._initial_block_size = initial_block_size
self._buckets: list[_Item | None] = [None] * initial_block_size
assert 0.0 < capacity_factor < 1.0
self._capacity_factor = capacity_factor
self._len = 0
def _get_bucket_index(self, key: KEY) -> int:
return hash(key) % len(self._buckets)
def _get_next_ind(self, ind: int) -> int:
"""
Get next index.
Implements linear open addressing.
"""
return (ind + 1) % len(self._buckets)
def _try_set(self, ind: int, key: KEY, val: VAL) -> bool:
"""
Try to add value to the bucket.
If bucket is empty or key is the same, does insert and return True.
If bucket has another key or deleted placeholder,
that means that we need to check next bucket.
"""
stored = self._buckets[ind]
if not stored:
self._buckets[ind] = _Item(key, val)
self._len += 1
return True
elif stored.key == key:
self._buckets[ind] = _Item(key, val)
return True
else:
return False
def _is_full(self) -> bool:
"""
Return true if we have reached safe capacity.
So we need to increase the number of buckets to avoid collisions.
"""
limit = len(self._buckets) * self._capacity_factor
return len(self) >= int(limit)
def _is_sparse(self) -> bool:
"""Return true if we need twice fewer buckets when we have now."""
if len(self._buckets) <= self._initial_block_size:
return False
limit = len(self._buckets) * self._capacity_factor / 2
return len(self) < limit
def _resize(self, new_size: int) -> None:
old_buckets = self._buckets
self._buckets = [None] * new_size
self._len = 0
for item in old_buckets:
if item:
self._add_item(item.key, item.val)
def _size_up(self) -> None:
self._resize(len(self._buckets) * 2)
def _size_down(self) -> None:
self._resize(len(self._buckets) // 2)
def _iterate_buckets(self, key: KEY) -> Iterator[int]:
ind = self._get_bucket_index(key)
for _ in range(len(self._buckets)):
yield ind
ind = self._get_next_ind(ind)
def _add_item(self, key: KEY, val: VAL) -> None:
for ind in self._iterate_buckets(key):
if self._try_set(ind, key, val):
break
def __setitem__(self, key: KEY, val: VAL) -> None:
if self._is_full():
self._size_up()
self._add_item(key, val)
def __delitem__(self, key: KEY) -> None:
for ind in self._iterate_buckets(key):
item = self._buckets[ind]
if item is None:
raise KeyError(key)
if item is _deleted:
continue
if item.key == key:
self._buckets[ind] = _deleted
self._len -= 1
break
if self._is_sparse():
self._size_down()
def __getitem__(self, key: KEY) -> VAL:
for ind in self._iterate_buckets(key):
item = self._buckets[ind]
if item is None:
break
if item is _deleted:
continue
if item.key == key:
return item.val
raise KeyError(key)
def __len__(self) -> int:
return self._len
def __iter__(self) -> Iterator[KEY]:
yield from (item.key for item in self._buckets if item)
def __repr__(self) -> str:
val_string = " ,".join(
f"{item.key}: {item.val}" for item in self._buckets if item
)
return f"HashMap({val_string})"
| -1 |
TheAlgorithms/Python | 8,551 | New gitter link added or replaced | ### Describe your change:
Fixes #8546
* [ ] Add an algorithm?
* [ ] Fix a bug or typo in an existing algorithm?
* [x] Documentation change?
### Checklist:
* [x] I have read [CONTRIBUTING.md](https://github.com/TheAlgorithms/Python/blob/master/CONTRIBUTING.md).
* [x] This pull request is all my own work -- I have not plagiarized.
* [x] I know that pull requests will not be merged if they fail the automated tests.
* [ ] This PR only changes one algorithm file. To ease review, please open separate PRs for separate algorithms.
* [ ] All new Python files are placed inside an existing directory.
* [ ] All filenames are in all lowercase characters with no spaces or dashes.
* [ ] All functions and variable names follow Python naming conventions.
* [ ] All function parameters and return values are annotated with Python [type hints](https://docs.python.org/3/library/typing.html).
* [ ] All functions have [doctests](https://docs.python.org/3/library/doctest.html) that pass the automated testing.
* [ ] All new algorithms include at least one URL that points to Wikipedia or another similar explanation.
* [x] If this pull request resolves one or more open issues then the commit message contains `Fixes: #{$ISSUE_NO}`.
| KaixLina | "2023-03-26T04:26:40Z" | "2023-03-26T15:19:19Z" | 3f9150c1b2dd15808a4962e03a1455f8d825512c | 7cdb011ba440a07768179bfaea190bddefc890d8 | New gitter link added or replaced. ### Describe your change:
Fixes #8546
* [ ] Add an algorithm?
* [ ] Fix a bug or typo in an existing algorithm?
* [x] Documentation change?
### Checklist:
* [x] I have read [CONTRIBUTING.md](https://github.com/TheAlgorithms/Python/blob/master/CONTRIBUTING.md).
* [x] This pull request is all my own work -- I have not plagiarized.
* [x] I know that pull requests will not be merged if they fail the automated tests.
* [ ] This PR only changes one algorithm file. To ease review, please open separate PRs for separate algorithms.
* [ ] All new Python files are placed inside an existing directory.
* [ ] All filenames are in all lowercase characters with no spaces or dashes.
* [ ] All functions and variable names follow Python naming conventions.
* [ ] All function parameters and return values are annotated with Python [type hints](https://docs.python.org/3/library/typing.html).
* [ ] All functions have [doctests](https://docs.python.org/3/library/doctest.html) that pass the automated testing.
* [ ] All new algorithms include at least one URL that points to Wikipedia or another similar explanation.
* [x] If this pull request resolves one or more open issues then the commit message contains `Fixes: #{$ISSUE_NO}`.
| {
"001": "c0b20f4665d0388d564f0b6ecf3edc9f9480cb15fff87198b95701d9f5fe1f7b",
"002": "1f5882e19314ac13acca52ad5503184b3cb1fd8dbeea82e0979d799af2361704",
"003": "5c09f0554518a413e58e6bc5964ba90655713483d0b2bbc94572ad6b0b4dda28",
"004": "aa74f52b4c428d89606b411bc165eb81a6266821ecc9b4f30cdb70c5c930f4d9",
"005": "1ba90ab11bfb2d2400545337212b0de2a5c7f399215175ade6396e91388912b1",
"006": "537942be3eb323c507623a6a73fa87bf5aeb97b7c7422993a82aa7c15f6d9cd6",
"007": "ecbe74e25cfa4763dbc304ccac2ffb9912e9625cd9993a84bd0dd6d7dc0ca021",
"008": "b9fb30b6553415e9150051ce5710a93d0f55b22557c0068d8e16619a388f145a",
"009": "d912d9d473ef86f12da1fb2011c5c0c155bd3a0ebdb4bbd7ea275cecdcb63731",
"010": "bed2d160e02f0540f19a64ca738aacb79cfcd08ba7e2421567b16cb6e7e3e90e",
"011": "9ded5bc849d33e477aa9c944138d34f0aacc485a372e84464e8a572712a5b7da",
"012": "3e7be445b6c19e6db58c2482005c1f78cb74011a4279249ca632011a9f1b61a2",
"013": "3cb265a96c5645a9ad11d47551f015c25f3f99792c951617656d84626fbc4868",
"014": "78a262dd40eba0f7195686ec7f3891a39437523456f8d16fa9065a34409eeac6",
"015": "7b8f812ca89e311e1b16b903de76fa7b0800a939b3028d9dc4d35f6fa4050281",
"016": "a6f988d30328bd706c66f8ac0d92aac21dd732149cdd69cb31f459dca20c5abe",
"017": "1a455b216c6e916943acf3fa4c7e57a7a5cac66d97cc51befca810c223ef9c23",
"018": "fde3f2e7127f6810eb4160bf7bb0563240d78c9d75a9a590b6d6244748a7f4ff",
"019": "284de502c9847342318c17d474733ef468fbdbe252cddf6e4b4be0676706d9d0",
"020": "c86a2932e1c79343a3c16fb218b9944791aaeedd3e30c87d1c7f505c0e588f7c",
"021": "e8c6ef4a1736a245b5682e0262c5c43862cfb233ca5e286be2f5bb4d8a974ecf",
"022": "85148c096c25e3ed3da55c7e9c89448018b0f5f53ad8d042129c33d9beac6736",
"023": "42e2552a2f589e021824339e2508629ffa00b3489ea467f47e77a1ea97e735c9",
"024": "4677b3d9daa3b30a9665e4558f826e04f7833dda886b8ef24f7176519a0db537",
"025": "7d398da8791745001b3d1c41030676d1c036687eb1ab32e0b5a1832e7579c073",
"026": "fbe10beedf9d29cf53137ba38859ffd1dbe7642cedb7ef0a102a3ab109b47842",
"027": "e4110e0852a2f70703f0081fc91c4a20f595919a038729cb37c564d68b875c6f",
"028": "261171a770d594f6a7fc76c1a839eda7f6dd4e9495e00e75048578fc86d8adf0",
"029": "a207c35d8417aeed4c9e78bcf83f936cd8191c702893be62aa690ce16bc909ca",
"030": "46e68e4199ab0a663ab306651528b06756556c9f0d8b819095af45e036dfbe6b",
"031": "8de34b4ba97b184c7a2096b9266776175242b87d67bc8d77d7289be6f70cd105",
"032": "0d246750daa7f1b367a21f55da454ddc8f62e0a95d163062e9b9273320d5130f",
"033": "ad57366865126e55649ecb23ae1d48887544976efea46a48eb5d85a6eeb4d306",
"034": "728b8d7d6d5d34cad9cbb7c3ea15f807ae57144594b1740b3c73b82314ccd1ed",
"035": "02d20bbd7e394ad5999a4cebabac9619732c343a4cac99470c03e23ba2bdc2bc",
"036": "9480c0160719234b57defc0681c0949a175ffb3ff4a3bf5e8163ac843f383f35",
"037": "e9800abda89919edac504e90dac91f95e0778e3ba0f21a0bac4e77a84766eaaf",
"038": "b2004522103364a6e842b9d042c0707d79af68dec7810078729d061fb7948912",
"039": "fd0f7e53c5b02b688a57ee37f3d52065cb168a7b9fd5a3abd93d37e1559fbd30",
"040": "d29d53701d3c859e29e1b90028eec1ca8e2f29439198b6e036c60951fb458aa1",
"041": "bf05020e70de94e26dba112bb6fb7b0755db5ca88c7225e99187c5a08c8a0428",
"042": "79d6eaa2676189eb927f2e16a70091474078e2117c3fc607d35cdc6b591ef355",
"043": "6512f20c244844b6130204379601855098826afa1b55ff91c293c853ddf67db5",
"044": "97e2524fd3796e83b06c0f89fdcb16e4c544e76e9c0496f57ac84834869f4cc3",
"045": "8b0300d71656b9cf0716318be9453c99a13bb8644d227fd683d06124e6a28b35",
"046": "8485ee802cc628b8cbd82476133d11b57af87e00711516a703525a9af0193b12",
"047": "c7274da71333bd93201fa1e05b1ed54e0074d83f259bd7148c70ddc43082bde1",
"048": "743d17cbff06ab458b99ecbb32e1d6bb9a7ff2ac804118f7743177dd969cfc61",
"049": "47c6094ff1ff6e37788def89190c8256619ef1511681c503fea02c171569d16e",
"050": "6ee74ef623df9fb69facd30b91ed78fe70370462bb267097f0dfeef9d9b057bb",
"051": "d17cec28356b4f9a7f1ec0f20cca4c89e270aeb0e75d70d485b05bb1f28e9f6d",
"052": "ebd72b510911af3e254a030cd891cb804e1902189eee7a0f6199472eb5e4dba2",
"053": "9705cc6128a60cc22581217b715750a6053b2ddda67cc3af7e14803b27cf0c1f",
"054": "12e2c8df501501b2bb531e941a737ffa7a2a491e849c5c5841e3b6132291bc35",
"055": "9f484139a27415ae2e8612bf6c65a8101a18eb5e9b7809e74ca63a45a65f17f4",
"056": "3658d7fa3c43456f3c9c87db0490e872039516e6375336254560167cc3db2ea2",
"057": "620c9c332101a5bae955c66ae72268fbcd3972766179522c8deede6a249addb7",
"058": "196f327021627b6a48db9c6e0a3388d110909d4bb957eb3fbc90ff1ecbda42cb",
"059": "0295239a9d71f7452b93e920b7e0e462f712af5444579d25e06b9614ed77de74",
"060": "ad7c26db722221bfb1bf7e3c36b501bedf8be857b1cfa8664fccb074b54354f9",
"061": "94e4fb283c1abcccae4b8b28e39a294a323cdc9732c3d3ce1133c518d0a286f6",
"062": "d25a595036aa8722157aca38f90084acb369b00df1070f49e203d5a3b7a0736d",
"063": "0e17daca5f3e175f448bacace3bc0da47d0655a74c8dd0dc497a3afbdad95f1f",
"064": "6d62aa4b52071e39f064a930d190b85ab327eb1a5045a8050ac538666ee765ca",
"065": "1c6c0bb2c7ecdc3be8e134f79b9de45155258c1f554ae7542dce48f5cc8d63f0",
"066": "316c0f93c7fe125865d85d6e7e7a31b79e9a46c414c45078b732080fa22ef2a3",
"067": "53f66b6783cb7552d83015df01b0d5229569fce1dd7d1856335c7244b9a3ded6",
"068": "4bf689d09a156621220881a2264dc031b2bfb181213b26d6ff2c338408cf94c3",
"069": "79555e4b891e2885525c136f8b834cc0b1e9416960b12e371111a5cb2da0479f",
"070": "08c6a7c8c06a01d2b17993ada398084b0707652bcfbd580f9173bcddf120ac2c",
"071": "63f032489227c969135c6a6571fe9b33d6970dc6eca32c2086c61a4a099c98fa",
"072": "9ef8a4249d4b8f24147ab6e9ad2536eb04f10fb886a8099e88e0e7c41cf7c616",
"073": "ae9f9c786cd0f24fe03196d5061545862d87a208580570d46e2cfb371319aa68",
"074": "b7c7470e59e2a2df1bfd0a4705488ee6fe0c5c125de15cccdfab0e00d6c03dc0",
"075": "8a426e100572b8e2ea7c1b404a1ee694699346632cf4942705c54f05162bc07a",
"076": "81c54809c3bdfc23f844fde21ae645525817b6e1bee1525270f49282888a5546",
"077": "7f2253d7e228b22a08bda1f09c516f6fead81df6536eb02fa991a34bb38d9be8",
"078": "71374036b661ac8ffe4b78c191050c3ccd1c956ca8a5f465ea1956f7ce571f63",
"079": "2df095aea1862ebfed8df7fb26e8c4a518ca1a8f604a31cfba9da991fc1d6422",
"080": "58bfe3a44f8ae452aaa6ef6267bafc3e841cfe7f9672bdfeb841d2e3a62c1587",
"081": "04bad90d08bdf11010267ec9d1c9bbb49a813194dace245868ea8140aec9a1f7",
"082": "52c42c55daea3131d5357498b8a0ddcf99d1babd16f6ccaee67cb3d0a665b772",
"083": "a825281bc5ce8fe70d66a04e96314e7de070f11fed0f78bc81e007ca7c92e8b0",
"084": "692a776beae0e92d1121fed36427c10d0860344614ead6b4760d1b7091a6ab1f",
"085": "7b2e7211fb4f4d8352c9215c591252344775c56d58b9a5ff88bda8358628ec4e",
"086": "8ffe8459134b46975acd31df13a50c51dbeacf1c19a764bf1602ba7c73ffc8fb",
"087": "cec1917df3b3ee1f43b3468596ed3042df700dc7a752fefc06c4142a2832995d",
"088": "c06356fdcaff01810e1f794263f3e44a75f28e8902a145a0d01a1fff77614722",
"089": "0df5486b7bca884d5f00c502e216f734b2865b202397f24bca25ac9b8a95ab4a",
"090": "cb69775effd93fc34ef38dfbfcdc4c593b1a3d8e7ab70c0f05d627dbc5cbd298",
"091": "327f057e054d1e6a9a1be4ac6acc4b1dedc63d8a88222396ffe98b3194067347",
"092": "538cd20a275b610698691d714b2adf4e4c321915def05667f4d25d97413ec076",
"093": "d8ed8ca27d83a63df6982905ea53b4613b9d7974edcee06f301cf43d63177f47",
"094": "d1b79281d95ce5bfa848060de4e0c80af2c3cae1ff7453cca31ff31e2d67ac14",
"095": "0a3ddcd71cf30a567070630f947ab79fc168865ba0bf112aed9b71fb4e76c32f",
"096": "9c527d233befbf357335e18e6dd5b14ef3a62e19ef34f90bd3fb9e5a2a0a0111",
"097": "f0e2911e303617c9648692ee8056beeb045d89e469315716abed47cd94a3cd56",
"098": "ededac5db280586f534cde4f69ce2c134d2360d6b5da3c3ebc400494cc016e78",
"099": "92c5fd0421c1d619cbf1bdba83a207261f2c5f764aed46db9b4d2de03b72b654",
"100": "993189cbf49fef4c913aa081f2ef44d360b84bf33d19df93fce4663ac34e9927",
"101": "e8539f8b271851cad65d551354874d3086fa9ff7b6f6a2ab9890d63f5ba16c68",
"102": "9d693eeee1d1899cbc50b6d45df953d3835acf28ee869879b45565fccc814765",
"103": "1f17277005b8d58ad32f2cbee4c482cb8c0f3687c3cfe764ec30ee99827c3b1d",
"104": "87dfcf5471e77980d098ff445701dbada0f6f7bac2fa5e43fa7685ec435040e1",
"105": "a76f4e7fa1357a955743d5c0acb2e641c50bcaf0eec27eb4aaffebb45fe12994",
"106": "197f5e68d1e83af7e40a7c7717acc6a99767bf8c53eece9253131a3790a02063",
"107": "bf13bc90121776d7de3c4c3ca4c400a4c12284c3da684b3d530113236813ce81",
"108": "3dea386e2c4a8a0633b667fdd4beacd8bb3fe27c282f886c828ad7d6b42c2d73",
"109": "735cc3e619b9a1e3ac503ba5195c43c02d968113fd3795373ca085ed7777b54d",
"110": "01b4e8163485356b46f612c9d40ed4b8602621d4d02289623e7dbb3dcbe03395",
"111": "97c1b054c094337ec1397cd5ccdf6c9efe1067ad16f531824a94eaadb3c0953b",
"112": "c99c843e0f6d6566132d97c829780332218e005efc14b633e25a5badb912d63a",
"113": "8dbc8319e5d8923ef7ab42108341ee2c32a34ffc0d19d5ae5677f1564813314a",
"114": "b3b9ebc9f9ddadb6b630eeef5d7ba724b3bb4d071b249318093eb7547949bbb9",
"115": "80c3cd40fa35f9088b8741bd8be6153de05f661cfeeb4625ffbf5f4a6c3c02c4",
"116": "a39208d7130682b772d6206acd746bc3779cc1bc0033f0a472e97993d0a32d5b",
"117": "54201fbc7a70d21c1b0acede7708f1658d8e87032ab666001e888e7887c67d50",
"118": "834e6235764ae632737ebf7cd0be66634c4fb70fe1e55e858efd260a66a0e3a9",
"119": "bcabd9609d7293a3a3f1640c2937e302fa52ff03a95c117f87f2c465817eba5e",
"120": "2bd8cabf5aecfcadde03beda142ac26c00b6ccfc59fdcb685672cd79a92f63a6",
"121": "5292478e83f6b244c7c7c5c1fe272121abdc2982f66ed11fcbc6ea7e73af124d",
"122": "6d78b19a042a64f08cc4df0d42fb91cd757829718e60e82a54e3498f03f3ef32",
"123": "057b9b6e49d03958b3f38e812d2cfdd0f500e35e537b4fa9afedd2f3444db8a2",
"124": "d251170c5287da10bffc1ac8af344e0c434ef5f649fd430fcf1049f90d45cf45",
"125": "e9b7a676dc359ffce7075886373af79e3348ddbf344502614d9940eecd0532c1",
"126": "38752ed2e711a3c001d5139cb3c945c0f780939db4ea80d68f31e6763b11cfba",
"127": "e707d9f315269a34d94d9d9fa4f8b29328e66b53447ef38419c6033e57d5d123",
"128": "5e15922fba7f61ddccb2ee579b5ec35034cc32def25ff156ae2b0a3e98c4414e",
"129": "3cc4ad1254491787f52a66e595dbb573e13ceb554c51d81e42d5490a575da070",
"130": "7a6e9899cccb6a01e05013c622422717f54853f7f2581bc3b88a78b25981da08",
"131": "4a8596a7790b5ca9e067da401c018b3206befbcf95c38121854d1a0158e7678a",
"132": "ed77e05f47f7f19f09cae9b272bfd6daa1682b426d39dcb7f473234c0c9381c5",
"133": "e456d3fec55d88653dd88c3f8bbde1f8240c8ceb7882016d19e6f329e412a4ae",
"134": "b144116982f4f0930038299efbdd56afc1528ef59047fb75bade8777309fde4b",
"135": "0709e1008834c2ca8648376ac62d74ac8df5457069cbfedf2b0776dab07a3c5b",
"136": "84692ebaa4fc17e9cfce27126b3fc5a92c1e33e1d94658de0544f8b35a597592",
"137": "6eca481578c967fb9373fe4ce7930b39d8eefe1c0c9c7cb5af241a766bd4dfbc",
"138": "1b5f0f504917592dea2e878497b0e12655366f2a7a163e0a081d785124982d2c",
"139": "0d2f26ec4004c99171fc415282ec714afa617208480b45aeb104c039dc653f5d",
"140": "78ceab5e434a86a6a6bb4f486707bffaf536ef6cb2cc5b45a90b3edd89a03283",
"141": "d74ae4b07f05779065fb038b35d85a21444ed3bed2373f51d9e22d85a16a704c",
"142": "f59af8b0b63a3d0eb580405092c1539261aec18890ea5b6d6e2d93697d67cd38",
"143": "66e9d1093f00eef9a32e704232219f462138f13c493cc0775c507cf51cb231ed",
"144": "09a1b036b82baba3177d83c27c1f7d0beacaac6de1c5fdcc9680c49f638c5fb9",
"145": "b910b9b7bf3c3f071e410e0474958931a022d20c717a298a568308250ed2b0da",
"146": "5292f0166523ea1a89c9f7f2d69064dee481a7d3c119841442cd36f03c42b657",
"147": "cdb162a8a376b1df385dac44ce7b10777c9fea049961cb303078ebbd08d70de8",
"148": "54f631973f7bc002a958b818a1e99e2fc1a91c41eafe19b9136fac9a4eb8d7b8",
"149": "c49382eb9fc41e750239ac7b209513a266e80a268c83cf4d0c79f571629bac48",
"150": "c89b0574a2e2f4a63845fe0fd9d51122d9d4149d887995051c3e53d2244bba41",
"151": "5d09e3b57ced9fd215acc96186743e422ce48900b8992c9b6c74d3e4117e4140",
"152": "c3ea99f86b2f8a74ef4145bb245155ff5f91cd856f287523481c15a1959d5fd1",
"153": "fb57f89f289ee59c36cede64d2d13828b8997766b49aa4530aabfc18ff4a4f17",
"154": "c877d90a178511a52ae2b2119e99e0b8b643cec44d5fd864bd3ef3e0d7f1f4bb",
"155": "58801bebc14c905b79c209affab74e176e2e971c1d9799a1a342ae6a3c2afbc1",
"156": "983d2222220ab7ffa243f48274f6eb82f92258445b93b23724770995575d77fe",
"157": "023344e94ad747fbc529e3e68b95e596badcc445c85c1c7c8fa590e3d492779a",
"158": "d1b58f4c07d1db5eb97785807b6d97a0d1ee1340e7dbcc7bb289f3547559f2fc",
"159": "cd3a3d2cf8973c5f2c97ebed2460784818513e7d0fee8f98f4fdcf510295e159",
"160": "3a926519b024ea9df5e7ad79d0b1c4400f78f58d07834f5ecd7be522112b676d",
"161": "2b3d09a4c76b282db1428342c82c5a55c0ab57c7a7640e0850d82021164477e9",
"162": "d50ce1ab3a25a5c5e020517092128ab3ec4a3bd5b58673b2e6cda86bcc0c48a0",
"163": "7e17ce0fca5d559f76015765c652d76b8468f9ddc91c2069d7799867b9d52769",
"164": "5c680d0b2c4dfac8aade87be60cb4d04a4c3d4db398f51e2cbf131d699b630a8",
"165": "304de2e63f91f8f74faaebae7a7ec2e0c6e0d8d322d8a747e4e3be88de2d3505",
"166": "14212843872dab188a86eb1f0f7012e1b72ea1097545f29377b3b9b52822af76",
"167": "18c18f8710f831a82eb74ae979bd36d609bee818c972ff88f8d8fa065270f951",
"168": "66640021d592f79b510f9d6101bd8eca89893187d23919c8edff4075e73ae390",
"169": "819b01e0394727fd089f84b9351243176920f03d0c6e33e5ff136016da5d8d4e",
"170": "e68fadd33a8c41d9a259577a278d8518aeb8b81c67b8cf03ccf43fc451ec8bd8",
"171": "33bf9ed4714b0e5da8828f8b3d9d3e9d0cf55c1d496324acb04a3f195252749c",
"172": "b9a27b513dc15448772cac5e914de61f02efe323f528892c0bff86d19913a6bd",
"173": "1b2a5e44fda5dfee3ce230f44fe73c672249f6620cdbaa343ba0ba808034958c",
"174": "98aabf085c6c8647f5e8a4775dc1d036513742d8e03b8c5c51e41bdfc9c3e7ae",
"175": "c03dcb22b7faf121d99542018dd10a07a778abee2678d35c03394a8d207b826b",
"176": "4fff1a7beda4291446d76e5ed5177c3f36e52a10481009fdaf2976da39e802ae",
"177": "614d3df0ba5fdffab2328eff8e9ca2d76b09bbc447c06bf1fab0419ae278fae9",
"178": "094a2ba3011118efdd9d4c1f839e6747dee8ba953b52e9012fe2977e32599375",
"179": "9f5563a5ea90ca7023f0304acba78005ee6b7351245a8a668a94dfef160f8d29",
"180": "dbef09115a57784ea4ea14c1fe35571301b0d6139bea29d1b9e0babf2c2aae05",
"181": "3920627e86db42beb1cdf61d026f9f7798082f1221db25a17fb7feb6c1d49027",
"182": "58096166bb8199abf4e07a9ef0f960065e5a635443c1937a1a3c527ade51d594",
"183": "bdf84a73b16a5dd5ece189dc970ab2c8f0cb5334c96bdd1d6ba2bad6e7f8a213",
"184": "c1e8c0f1b1eb3c258923e9daa46ef055bd79512b485c7dc73a9c5e395d5e6911",
"185": "0ea72907eb6c1120740cd80ee6b9a935cd754edcf69379328f24dfc3f09b9986",
"186": "3c0078aeae0362b6b7561518d3eb28400193fec73aab35980f138c28b6579433",
"187": "f2bc655b33e35669ee9adc841cbda98e0834083eb0657d10f7170e70081db7e0",
"188": "38e0291a3f5438779b157e5efcae6cef9db21cbac5607cd08882844cf981febd",
"189": "9b2a65ac4c35f6b392501dee2a28221a3975aac5f0866b476f5e6a5a59f3fcc2",
"190": "606fe2cb6525dabfcdab93afb594dbc8399cb967fc05f0ca93f6084d3f8fb591",
"191": "ea1977e7b22df383de61bded2a4bb3064cf17fcc0170be452330256f938b8d55",
"192": "91d614f139082168d730003f04b87135c64e13709ced2a96001ed60796867825",
"193": "65648f18a50a7f9195fe56bb8cb9e25421c6d777ad2447a3b566dc8c54f3399a",
"194": "cdd31847c6138853597261756d5e795884566220a9361217daa5ba7f87560404",
"195": "d12224510de6c98076f6289cbe342a7ec7ea3c5453f6e3cf8d37d9eea74bd07e",
"196": "1349b472d2821dff215e54d683dbfca49f0b490ade6a30b1db9667bc94e5312d",
"197": "e2aa8f7cb3ba893af8bddbffa6240e7eb71a4f4c4498f2a360f3db7b513094df",
"198": "a29d9edd0dceca9a72d2238a50dbb728846cd06594baec35a1b2c053faeab93d",
"199": "50a6b9725ef854537a554584ca74612a4d37d0ec35d85d04812c3ae730a4c8cc",
"200": "5b439098a3081d99496a7b1b2df6500ca5f66c2f170c709a57b27c6be717538a",
"201": "b4e86186652a11df0b9ec8f601c68b4823ae0bafd96357051371fde5d11a25ed",
"202": "057243f52fd25fa90a16219d945950ed5523ddb7eb6f2f361b33f8b85af25930",
"203": "2742f7af8ce9e20185e940bb4e27afc5fefe8cd7d01d7d8e16c7a5aaf3ad47aa",
"204": "15f5e9ae4636a6bf8bdd52f582774b9696b485671f4a64ab8c717166dc085205",
"205": "e03c2f4ceabf677ec502d235064a62271ce2ee91132b33f57382c4150c295784",
"206": "16bb96da8f20d738bbd735404ea148818ef5942d4d1bc4c707171f9e5e476b1e",
"207": "133fea765d0b055894d8aba573f47891c1f7f715f53edeefb200fbda758a1884",
"208": "90831cd89b4cceacaf099c9bae2452132cfa2f2b5553c651ef4948460e53d1f3",
"209": "570fab1574a3fd9aca6404083dec1c150e858e137692ee0c8011e702ec3e902f",
"210": "ae9a76ce3418c06d0eac3375a82744fb4250a2f380e723c404334d8572faead0",
"211": "aa4b2bc3a136b27bf10a858ac6a8d48f41e40f769182c444df89c5b9f0ed84e5",
"212": "81489bf56605b69cc48f0bce22615d4415b2eea882a11a33e1b317c4facba6eb",
"213": "a497e789f49b77d647de0e80cd2699acd3d384cc29c534d6609c700968124d14",
"214": "409520c6a94de382003db04a3dfee85a6dbb71324f8bd033e566e510ad47e747",
"215": "0eccb27846f417380a12dfd588a353e151b328425ecf3794c9cf7b7eec1a1110",
"216": "f735b4b441635ecded989bdc2862e35c75f5179d118d0533ae827a84ed29e81b",
"217": "9aa88ac109aefaa7ce79c7b085495863a70679058b106a5deb76b2772a531faa",
"218": "5feceb66ffc86f38d952786c6d696c79c2dbc239dd4e91b46729d73a27fb57e9",
"219": "9da1307fd12f4c9a21a34e612920cec286d937418a2d5040676408ba0c47f3d8",
"220": "a262318d02a14747ed2137c701f94248bf8651a23d1f82826952e66c25029588",
"221": "bfb4e53578fa42f83eda027da0467a351298dd65e3e8e84a987d69fc275e9f2d",
"222": "4308f4374b84e657aa7a21e5f5fe42ed16386b6dc7a74bff0d24d08ad62acd26",
"223": "3790f82f65ce7bc071b4096ca22544548b3413a755f58bfc401eff3ddf487a86",
"224": "96356c050fa66d919c75212d789544db81b012bbaf1f7915b053cb9ba2d67de7",
"225": "f37f3f2b0dc57a86dee4ba6ff855283bb4d2f0dea1c5bd1b708853444c2ffcec",
"226": "49bd28d97a79875007a6713aa9700df14464217c28a6e76bc93ded57b75a33f5",
"227": "b1f73471a3e6ea1dfb04610bd89ccb110994780084280fae872d42a2786f9131",
"228": "e38da154f6cccd06cd0001924ec2dad8de5bdcd0b78b68e8d8347768d99ac0bd",
"229": "098ffc6baaa32734053275ce38f4bbe58efe0ff946bf31e0e2df4c6a169e23d8",
"230": "2c72b887a8638941b85765645b45c5cdb73255427e14d5114f6830f847a6b861",
"231": "4aa0c92e77eeed08994650ac6129d77db9f026ae2aee78ad5c9fde132fac0505",
"232": "5f7905b71cb897bc7cc6db7e29cc38ee459e2fd8f5d58ba4746d3acd4e46d444",
"233": "8d986e287ad21475728b0dbd9e935623d69db4e5fdca0d42bc32d70eda48985b",
"234": "2d9d03b778af897e305caa8a1a14a117737bbdd549003c6d7477dd3be8331694",
"235": "7168cff545d365b09e8997bb9450343c7090913876c8f7eb9f0e9849c6fc7dd5",
"236": "ceb3002bad36c22c5da82fd422b36bad91b97a7d3f5409ed5d16aa9b42dc137a",
"237": "c857d8fa78c8fde91f29b3fbe332c2e781f7e8b54532f4c352693d6676fda2a8",
"238": "3e2edae8b8ddbcfaecd5aa6c69cb5960b84cc16f7b3232f3386aae9ecbd23f20",
"239": "49df3a63ca6509687cabb3d208e92b057630231e66b87fe8c781baabb12a55f8",
"240": "5034a21557b2de1c5c2d2aadfe8ffe62162c23f42d1aaabc705ed8519e91a3c1",
"241": "85abbe1913df41c57d1e6f00cecea416edb19c64681d1bb43fb5024e2f48d409",
"242": "4da30e6198a3d9ae6a538c2366e08ee218de6efe2c5b8f231493e21489c21a7e",
"243": "7404bb7881a010271831847e71162ee7a393843922ee93cf7cf3455a0074279c",
"244": "21aa3213adeb0a562ec7161e1cfcb5f1701303f1c9f03ed726c536283e080db6",
"245": "22b9cfa9ab97c84eb64e3478a43acd4d95b90cae8c3453c968457a89c6387a81",
"246": "729e3de7687fc597be2eb4c592d697ff29c78cff6945c9690cfb1ee67550eeed",
"247": "f49b98df95a1f826c24cf622ba4d80427a0e0767dffcc28a9206c58508863cca",
"248": "44b8116c29dafbdfa984b3751c1dfd057b3e93fc57c8cd18495f1c0f605496bc",
"249": "49e96b6ba41e88901dbd118139ef6f013b4fc59e2347058a7e726cf6a2f14067",
"250": "f0e0dc05fb555ae5ba9820586bef3bb8a3a82905ece3d8a998a3015fc91c1c3e",
"251": "8c1ece1b350c210380456da2bab70054f717731b0dfb34bc3cf4abfacf696f15",
"252": "ad20a49374f9176bd26460d35f96f30d734df3cf6fc63b4642380b4e866848de",
"253": "ba1a2bbccabbcddbf29ee0b56d0d56b4f026e8a7b97e97de2d48c133ccbdf2a1",
"254": "381a2eac64a984a81671722bd95ca5b8b6508a6f490af46780e9f393c6797223",
"255": "5e6ece13372bad4a6ea011c933389dfaefedad5860aefba2ab97fe5f59156c42",
"256": "068d4a3c845803bf66a9e5320261a0fd7b6292a8230b271a6a83f0dc8c73e907",
"257": "d80ac9215ffa7adacb22711cc88f5b580272d0d65c49e1ea48e69d17e264d91a",
"258": "256c4d399703b7f16dadef9201efc0ef9f6aa6ee05ddfa2d3e26ff6efe09704d",
"259": "275a4e84039a1596ac7e8bbe186163dcfb02bfa99c209653ff5d505a29b4cb10",
"260": "f461ff2df66653be1a2e579b1aea515d4a84f2ae5ebea3aa21fb2477a53699f4",
"261": "178ecd56cd79c7aaec1353093571ce89845130991d64c5a715a02da83a2705ab",
"262": "2e0cb5e8fc8ef04c50a5b9ab9a9eecad446598ebc2527b19c447143e5ae09736",
"263": "c870fd75ed0d5ed92ec35789c522d029f364328a16282a1c5eb9b3b7e121eff3",
"264": "da5d6bdd89eacf70a88810935f80e4725da4feaf2aa86adb13985d7d9e1c247f",
"265": "13f16351c3971c286fae5e9cfbaf6f0a128a6507804fd280971a600019e352e8",
"266": "4f39cdd293598de9259231592e99bfc5fde82a0bc1820a4c5faeb54f96037f00",
"267": "3e054d92034d3d32c3d4e7acadf1c09232e468fc2520d23d2c7d183ec0735aa3",
"268": "2d47c47a2b19178cef9e4eba1a53dd39b5f8657bbe011a71c8d402d294d50132",
"269": "4448f310ab9bff796ca70c7b7d0cd3b9c517f72744a8615112f65ba30a6d61f7",
"270": "ce71f5bd1db540762e4bc6c4798d8b7f3d2b7068e91c300fd271a46298aea2aa",
"271": "5a05e212b9b6ccf6092081f567aa73d27da399d45418f674628a8154f9182b6b",
"272": "a326c2d7121d80861aaf110826615e560aa4efdec0cc9fdfce051c6b9038e781",
"273": "d32b75411f407c5da6a9a6b4a3907b9a9ebbca6b651324c03432d549671bb837",
"274": "b5740ac928d58f53537b05ecc80b7463dc1fd5a53400f57aa55573ecbd5faa56",
"275": "e1c843ff0e97692a180e384c1a9c03c7de06ef92ccad5aa6157fabf0dbe5b804",
"276": "2edf523574e0a062cacf21f51ed6f81128537f27a3cd27b84a8b5d2478d0092d",
"277": "130c990ad499345b7638e57dce365442e2ab2d2571546aae60a9fa6ed3834b8d",
"278": "2204d89df74e664621dfe8892277d50e7774f60613561d70ae06ee0eb4c483d4",
"279": "4618456c7239784964b8fcd27155e01cf5417a16cdca7b163cc885d598ba93f4",
"280": "4b2d9501483d450371ec4413519b0b3461502aabb970fb2b07766d0a3d3a3f85",
"281": "b04a4a02fa0ae20b657dcfe3f80ef84fd446daa1521aabae006b61bb8fa5a7da",
"282": "6dab2ee10b0dc8db525aeaa2f000f3bd35580ba91e71fe92bcd120ad83cf82c5",
"283": "c964c01082a258f4c6bb66a983615686cb4ae363f4d25bd0bdad14cd628cfce8",
"284": "df960dabff27b2404555d6b83aed7a58ef9a887104d85b6d5296f1c379b28494",
"285": "087de77e5f379e7733505f196e483390596050c75dad56a999b1079ea89246ed",
"286": "8f3e5fda508a37403238471d09494dde8c206beadfa0a71381bd8c6ac93abaf4",
"287": "5d834d4c0ca68d0dca107ffe9dbaddac7fc038b0ad6ccc7ba3cfb53920236103",
"288": "20a3ef9e411065c7265deff5e9b4d398cab6f71faa134353ccea35d2b279af04",
"289": "9dda7eb623939f599551ad1d39dbf405211352ae4e65ddd87fe3e31899ca571b",
"290": "a629c35ad526f4a6c0bb42f35f3e3fa1077c34e1898eac658775072730c40d6b",
"291": "81b1e5196bec98afe72f4412cf907a2816515bad0680bd4062f2b2c715643088",
"292": "614950a1cff05f4cf403f55393ed9d7807febbae49522ef83b97e0390038ae03",
"293": "9e4067ac93c6febda554d724d453d78bf3e28a7742cdec57ee47c5c706fbe940",
"294": "9ac900bf0fbb4c3c7e26986ac33698c46c6c3e8102ab75b40b8df94fc3a0c7a1",
"295": "2fdcd631f3c68bef3c90f8679b7aef685fa33f20c2d6eb5965cd2a62267c2ffa",
"296": "dfc947e61ea2138ebe47234ba503cf5246ecec530b12e363acb240822ddf0b34",
"297": "4d5af88ba8a28b49a79773d3e6521e8731ff07d49765203b157481469e6ae6d0",
"298": "94aa77eadafaad7327acb8e653888f00e114cca2fbe04691dabdafa2a0c8cd64",
"299": "0f221ba58a8ea417e13847ef91af9ff029561ac18c74bbeeb3f5509af81a3b03",
"300": "50a79fb6e417fb4a34e155a9af683aa9a74ee922a6c156a58bfedd22cf3185c4",
"301": "eb09a0097a47e7a95b892ad7230475a1a28343b47db4faeb3e47f227aeb04738",
"302": "fcf9736fe8c20a6d02f00e9b1e719de06aff4afa99d2eba166592aeff1b8f3b7",
"303": "e6266f575c94d805a67fcd3f2391d0961b4b121b8a66afbfbae76dfc34e5c06b",
"304": "189bd2a8daf5b638ede7c50035fcf426d125de87a401382f66ab75f35b2ac1f7",
"305": "0ac58c6eb8513f4ffe911bf0f044e0153862ee89c04939fd9b294860a37ec9ce",
"306": "335998d7e2a3fae2da75a5192d62c37dd006be96831fd37e7438ec6d84451c44",
"307": "4f1f2695b1b6b1660f3ef6ac31a81630ca74da9368eafbfb214ec1980705c13c",
"308": "bc5ae127f8690ba7f6e9ddad98a49137acb45abf4e187eaf3648f197c72fbe90",
"309": "6b78ed4c4bfc942b9b5dc340961f19c690d56f9d721b6b764d1db31da53139db",
"310": "0d183ec2ff1cbc768a9eb7eb06c2a354f6c8bab00e64ca2aed2da00825f33c05",
"311": "3ae7fdad095eed78e0c63cfe4e28ab7ba2b977259590ed38722e9c44727e598b",
"312": "329d107b5743a96e3551084832587a6346e410aa89641d83f03f1242a7244473",
"313": "ecc63ee12cbe487e5390007271890b8aa5df2cf48b8e692404895d3c2af20758",
"314": "5fa65495795c52818aea910c24e4d3176c71817f5268c34e1cb259b256737352",
"315": "95bd03b9913be37d24809d30e7bfd31a1b7a127d03d65e52086857bb3a364b5d",
"316": "ca6ec6c9159e10719cd8d2cfcfaf2fe2d3637fb3d497e2c80866de6b593632e6",
"317": "5b0d72d34b406ce20714a59f1c4d5340c5559285e340497dbcad68729a9db786",
"318": "3e2b479fafb86b8ab097588b8fa12ae8a078f8b5801e15c7faa1ef23d87a631b",
"319": "e04b18947b36771937dea491f47b75fedf42a6db684035f5690e6c2bd7e6031e",
"320": "e546e4a4c9020669c78a095aa5c5038242dd78e0f98517c0e23c43aefeb58138",
"321": "3da0198df2f98a7306ee6d2e12b96ba9a6ad837a6c2d4f316d3cd8589b6af308",
"322": "07e511e9002147c33739c924c17a61126d12823d143069535a615a97f86d936f",
"323": "be514911dd6258f860c2773253f6df6c22ca975a10c4e34db5903269f2975faf",
"324": "53ed94369b59a84d003ff3155edbf481a0eef362325539d6ab1a7f370ce919c8",
"325": "43c8dc1907d3e1eb30deb565475ec1ad4f807baf6ef34178508ec85071722f0a",
"326": "b08d72606988ea5a82e0caf15e68d81b4f2e8dbb4af6a22437916f3fc53e3dea",
"327": "f70bb9cb351daf610a91a3c769d84bbb3f3b8f1169b10839196b65b8585e7c38",
"328": "6e26ed661a0add2e583229066d304f7e765a0ea337b6a93bf979e4027b70b94e",
"329": "89d8b56a1e05d90ccde0df482ff2fec3d44270739810f3c5d06856c38d801380",
"330": "2dfac8e04d08dc5eefcbba4e475164103d339f844896a75ef3af2229185118f9",
"331": "a20f9b06c126f4ee65e3f3a0bf345007b35ecb69d035dd0ad848e09300130fcb",
"332": "6593d40f4e3f53a73191c704d388c7cd1639403da6e679c8e4169b26ade19f3f",
"333": "7499bc84f6bd2211365fec34943d64f6be80a53ee2efb21c099c1c910ca29967",
"334": "f24dd99fe5b46bb7a7a30c5eff61e71cab21e05f1b03132d7da9c943f65713f6",
"335": "8e2111c24160d92b1b29dd010b8b3a0a4f9af55f1d30bd5892756c58ffaec201",
"336": "eed2e8d970c1c5031220476e6b700d16e5065d7893a2766a53600825b4ad3ae5",
"337": "44e298d1b55c51c9f127989da1149ccf6bda24c40041f777d35d5b8f192753d2",
"338": "b3a60e80296f79cfdfc02354acc674162faefcb3fb78b9672254c9cfc6eb113f",
"339": "2b55688ba27d72202632783186211ee24ea39c53915066578291fffd9db73128",
"340": "15765221271275022a6ef57634d836b052ffbab6d7d5a6899992972143841e3c",
"341": "f7340563f85e057709a2fcc71bd448fed8d6de6907d8ba5f91fefa2abffda6cf",
"342": "f252eec230c2e92ed1fa04834bc0738b79597c3b0d2a66c787fdd520e63cb3d3",
"343": "1d65c53a04f7eea94ebf76d797c0f79fe3d251bd33e5edc16c780715531b4345",
"344": "86d3fb095439bddbc0d6e6e8e433d54aff04350e2da2ad05f53d607113075c8b",
"345": "21db551743591f9cd20fffcedf3bda17f9f178bc9fbca528a56c2c61b9e7c731",
"346": "f326e2241b7e57320914aa279f9ba2e155ea77f809a188958e0b590bea9c3ada",
"347": "0fb6749b98280cc8c26950a2cb9c9dbecac18f8760e161e9bab887dcb0077653",
"348": "0cdb77330ae73fbbd0f287240f82b7547a0ef42d37004003a9c759f86b686d61",
"349": "690ad38e4357b34368966b9de08d89e0c095246bf55969842f373f1976f86062",
"350": "5b427d47f98e296cb78875619fe67d42f41868b78886d560d8fcac89043fe945",
"351": "93dcda27a0c12f0c32cc35f0de161e7f7792d11abe5d4c50d7fd5192ab8b11c0",
"352": "d01c0cd49e7649289a1f13162757de494bb9104b20ac8bdb30a4180df5225889",
"353": "3d856f38821d7b221aaaa9baa3d7927f6e360919e8f8505d7499f9bbd85c44b8",
"354": "36dd3030dec4a8050d2079678250c9c6c86c66c64fdbe7f5b82e79024bb8d5a7",
"355": "b0a915b700e415ba3acc3ef261128680b921b5df9bd6fb1d35c2d1180e7f61d7",
"356": "a309814f13708f2eb5ee8dd1a3114e8f8b15646b8c797bc7119ceaa3f6911f0e",
"357": "61c9c81a41fd294a8f07033c8373706694faab4df3652d310e84904356cf5e6c",
"358": "7d59500b8883d81040173b88462a73849e0d386a53830d599e6a042f4c1c165f",
"359": "0793805920db4896155cbce40fb58570a3cc952d0c15ee57393fa3c6ca7a8222",
"360": "ee8cacd40fb7515e510cbbe7deb6005369ce7d9800ecff897f3fd8721fd6ef71",
"361": "e96f225fa470174b4ac787b21579ad1556804de85c0c83da99a92ddc2c56c7ac",
"362": "9a4ce079c1a882a306e21e0c145dab75a2698cba3860152f03dafc802ad9006e",
"363": "258a6e6ea10385ca3c0cf08377d13ef31135bd9479d5a4983beadf158e19ccc6",
"364": "13aefde214541fab44d2a3013c532637a3da82199fb6c0a1a941c3108f76b9cf",
"365": "0cd978902035027c6898d6b5fc11fb5931f06f8e8ec9c24b4706143c91de9450",
"366": "47495a92574a6d7b150eb3f4338748ba03672ff93162139f98e03847f96551cb",
"367": "fad9203cd26fccb99f0f89fdc569c230eda46cd72ed3fb7e5f6fbcce78ced1a9",
"368": "a237e13fa6c32b66695b8c8de6472d5c93c7650989f047f62a17438c07036845",
"369": "da4c450ba0c4f76556fce54bc3f6b2a754a626cf1f87ba3280f545e023942640",
"370": "5000899cd3070e1937d42a68766c840bdb9629a49c6112bea5cff52fdb4e9f7a",
"371": "7afb55ee21c0447f7b961265abe7ccf87f59af6206949bb1da19fd36334b09df",
"372": "fcf734716ed1fa724e7228a489304e3c55e813734fb5792a83f806ab04e40485",
"373": "83c98f0431cf944440dfe0a9831275ed451b0d16856aba4100f53170c55c2e6c",
"374": "d998ea6616a5a7a9f7beb3ec02f8cbed4a9c5f17be978c31f32ac0f9f4e4460d",
"375": "6a72aba5c61e27e281235b1f001ab68b840f6e8bef0e6bbd7bfd8eec1abf844e",
"376": "980dce9435a9fc03250df4e809c2f68c48601b64c30d32c6e67bf1faa35fe274",
"377": "7b4a0b6958cf23951636b5e27af8041dd9901256c53de44c9be313ffd0a01ea0",
"378": "a1b13bda78da3ccab1af6c330d3e768fce62841f924933285e7b1f7a8b7dcd5f",
"379": "c957fcbb90e1afe9a342e95608ca035596a7dfd4cef398ada55e05a2462aba14",
"380": "b794fae83475a77832f46e69799419f9881bd774e1bfda56773b587c42591039",
"381": "e7208f3630a20b01a5e1bf5d0537be9dae9fd7529773cac12b96c4ac2b0f8dbf",
"382": "70480c0d26a6d76eba0faf3ee047d6214b2ca4d1442070ae5e79893192ffa699",
"383": "3c814d251089cb2a92a78ec3424b2a729cfbbfc6a996fd48148261312364a9a8",
"384": "f709015ae0f8ad20bd2efd94d01af3858234e381942b5b15391ff7f77211b116",
"385": "0bca6cad1f4ff336b93c9f86c4ac872bda67ee0cd41b1862a7a663852717535d",
"386": "3e1748647b60bbf292aacae65b3608ccce8e55e203a36ff062ee787cd8c14480",
"387": "cf592fa81780e727a56553df32410beba6de9c33543dd1ef1155b368ba9a9b9f",
"388": "911326fcfb0638154f69eabb87e4c0c141df09e56274c8522e9c13b7b577f00f",
"389": "cdd56fb06838a10149f2c7229bbc76f78b4a5a58945fb70a47261f1bf635c404",
"390": "07dde4848eb878808635fb7b366261b1e9cb158635e76577eecc48ccf941323f",
"391": "76cd3def1eea8e2631d333798f4d282bf40f6254b2d18c02c78cb56b33462093",
"392": "c4f7ecf21a8738c3ad0114a1ee6a2d16668e71b499741381f30827ed451dc817",
"393": "7bbc419f89fde57d2862bfb3678ddab96614693dfca109d0f444e4762a2b7a8f",
"394": "7781ca3332d6da18b1b9be5e2eff634b526ae9e8088f6e479b49d657f4f41525",
"395": "5b5de0def2c4a989a54ae3e748362c78cd018778d5adc4dec13c1bde6ffdc139",
"396": "d42c389d6abc7d8102b8cd1b906e4600da08394388d4dcd432ec955e6d8b311d",
"397": "629e23dc358ed2a8c202e1b870e270e401aecc5d726a679b542df8e6becb4200",
"398": "c30114e73097c3fa4efb203915f3b828b1b8c432ddeab2b7e1ba3fe63c50e190",
"399": "a681ef7bdb22145a3e051ecf7bfb694c18b255c80dae6fb8d49f187d28f3c58f",
"400": "c993a792804e09c9f60313f4144953eec072ca6a8a27f44d8718ce53d9429585",
"401": "074b576ae2054cd030ffcfa132b1465f8f49b836f505cd4bb01af4a98f4f5337",
"402": "d45f88fc3c00673ef7e628d867a54a4ea281b3b2620735cea85a8da3b06321df",
"403": "a09086d3cdab7d6ff8a9fba1746c5d236e0ad0abe088be99bb172e80c6f0f8f3",
"404": "55a774ac3423440dda50d73e472887195940d5e9df605b30deeb0f2528b001a4",
"405": "ee9fa61ae8153df7979be3afe6377e584fbdad624833424a5cff64f6ea94c9da",
"406": "584cba4abd5711b8f558fde97620b8ff0fe91586bad052ccff87c49c13f72555",
"407": "ac50b37409f7ea91f90856bbfa716731013deffb5f5b51540a99736e08e5378e",
"408": "2c12c3cf062c3d9cf2c53e6e4dafce70ca5c7a38c97479c3b013cd91076ecf4a",
"409": "5a55b5fb584c359f4b6ee2d21deb62923b0b25e1b4c3da0a6f351079ce657173",
"410": "9e224b6ab0b7f20759b63d1799b426a8652c9e637b1f38d3eaf8beff73c80c67",
"411": "66c0c1ab79e9887b5daf2c510f2c2c4097044b69fee6bd4ffcff73ad4816b8c7",
"412": "27f1768d99e22f8b55d010b8b7acd904e8b66751d5310d32c4d017a0ad34d650",
"413": "7c99634a1161e424a14d60b516291655096eb90ed055326325d7f5de7a44a3e7",
"414": "4e03e038e99870b1faf45a0a29d6124379d05a0a3553a11aaaa91b8ba56eac5f",
"415": "955e433ea745016af2a5df015f1cc223ddd84ddccaee60d5302b7ad61542d9e1",
"416": "8d07a87b9012a166f5bec4dcd646d5957c9b3633a1a37c40c584ede75cb7ad22",
"417": "3f738338cef45597e3b839536953104186f11d94d16877c77abd8a067c152dc3",
"418": "0c813356b30108f89fb37e8774a98af4f9eca3df49e963f985ecea82a88b1437",
"419": "8ea8d93a9e874f8c8ceeb240f1f1245a077a7c0a62287d3044feaf855b5dae78",
"420": "af7ac1e90e07f189afbb284ae24614d9e713e32098bc39bb81d6484d47351444",
"421": "f45e155846624f37cf2ee08be2a63cb1ca35bf795fb0f770b4c91ab549f22b25",
"422": "69d728f7e25055dbebd41684bc6de61be6b4db4119d7ecdcef5b5d8ead976537",
"423": "3e78c62395be704a59a3a6a65e457725105619e0a6f9f3aa6b311c4f7762b0a0",
"424": "fbd6edb36c3754a35e7de936839c4fd0564db873924ba97b35cd43e065835582",
"425": "ee5bb631b2a9edf8ed05781b192f42e24ae748f3aa4ba5e635374c094d28ddac",
"426": "3e913e088a689d2d33bc797040cea94512bf54a61f96501f60576ab22ed0304b",
"427": "415e6da4c7f92da36e2d8c43fa8056d0050ae127e648451e2fada49bf2c936d1",
"428": "389bded7b0c14212fb69b559fd1ade4f5b235b976c9655365c45481c3afda486",
"429": "3007beefa50c509b89b86c54f53757ff701f795dc5f7ed47a1520c2b092455f7",
"430": "59ec8ec2866ca502ad558ade9f8a06a9ff815a1ed649bd1cb513f417f1d4727c",
"431": "d3f28dffa4e22b3bed74c3c2c9ded1e4a8be49d3757368e4e3efaf7f79affb15",
"432": "59fd80dbc8eb4af9596e4ce8a87313d363da41313351a69ab3525faeb905c27e",
"433": "471a7ddde597fbaaaed1941f42ca1fc0f4f047e17f2197f8999dea98b38213f3",
"434": "319cb430c66d9f418aa90a3d6f9c2dfc8171383d6f4af5803a73684afcf18e15",
"435": "aa29c0119ca84133617c8bc7455afdfcf5b05a569393ff21ebcb10d32ffde2c8",
"436": "928f772ad7a9fc501f71cdef6dfe60e2d8cb5d5c5800b519d01afeae0681dd08",
"437": "ea70162a014b8294ede65af6fcdc11fb365ab2b126aef8d47983d58816fd6a54",
"438": "43633662392854b5d9f9f0fa564605212d016c9ea9377d2a6ab52137238d4191",
"439": "42f7e88fab5c9cb31d4bb34403d7958abd5023e9cf9ac05cd29626c5df763584",
"440": "cd08ef4f14b804e3106ee88f9d2b24864d5e2fec6c7cd7dddfa2713e1431375a",
"441": "daa69bac44ce5f57b4b43ab6ece3b2b3561292c0f4c6e82a506ce2973713f749",
"442": "910d2abf184cfd7b1964cec906a79f3e45f59e3d42ec20b22f56de59c9018927",
"443": "7a14ac86724d318e6d40464e710c17625d441d1e7adf83d3062305de2f85d445",
"444": "390877dded07897360921e8d0d126bf45d6a379d47292c90826d775bd1897f2f",
"445": "5ee5723341b0b81c9e0172fcb654f8b24322244bc2d1b55afcb78b180ada180b",
"446": "8b2dcb0168e8701dc9da286489a1e68e43e1b17638e5990edd882196d7fd5a29",
"447": "179af1c75faa5f42e89ce3b41496a64b2d2846361f76dd5d87f4ce97ec2bec07",
"448": "18173b14e0c0bf403b5f0d4aa23515ecf44622b3a860d80e866cd498f107123c",
"449": "22d7739bccf54ea1159ce6aca3e215482deba85a4db0676cf86d82a760c44a6c",
"450": "938bf7cdedab94bd7208b69047014e3d9ab7b54d1223bd649eb3de0bd61ab47e",
"451": "abd88e378f54b649e818d6e1d8e06c9f8cf225ac96b4085523acbb1f0c1df24b",
"452": "4119701c51dd8c457b74a19ed7ae3bdf069f5fd915c8085e9a15d909a39db036",
"453": "381ba093e8ece9e14efc965ee94bb8adbd1c0bf140875ef95f8f11050d5ed489",
"454": "b7613128b0401fdbc07a4015eb3935f6677b84dff936fc5e7e9f424d0ba1006e",
"455": "35ee11c9763f48a68f2b60b4b9c3919d3a895afc7071e8dcac5abd5845dfe79f",
"456": "8b129a3c7163dae86f1f43c18557296240a02bdac70ad29538eb5dce00e51f4d",
"457": "629c99f9af0e962f00b812057c0967861a9b6db9dd652233ac4b37f368d09206",
"458": "02df8a1d11130bde8af932dfc5cafe7d8e6c2fc12b82df5d222a91e4eed8e2f8",
"459": "062b225facc7a897e0e42e6b0f95deeb8b02de64267bf5cea4cb5280ccec1562",
"460": "a05f9a7cb049c40760ea2196eb41df1826ad492e6e5fc4696ce7bfcf7a842811",
"461": "95e5e99da04c0cd73e1818a62be3fc0de98c76d5cbdc81261672824ed5b8c1a7",
"462": "69eafed1b3d4022fc245a8416c1120bdcd039716db8cd43351a96e6c7d10691d",
"463": "018efbd353bb456112cf2c760b4d96aef02aa899ef74d4aadfb3dcf374a22987",
"464": "cd4447e836cdbed7f6a3998b50c4ab467aedaeb8e54c377da34245e90fddbe12",
"465": "da0612471988c89ea2fb190838f9f5e9029fd106330a801e66280c967ff1c52b",
"466": "8d16100c0148ed7bd41003b4a0612cbc5fa150ddabe5f9916ed6eac3fcfdefa4",
"467": "d6ea164cb91d14d6aba2d482926cb6cbd1a3644737a0530abac635083a97b8a4",
"468": "8d0e3f6bff322ff11d1267f1f8303a8ce1e2d796b7dc2d9eb3e3da939dd850b5",
"469": "35e2072f22c7cb980fbe797e30c25e9224328813eb81d07d3c88820492ce9a1f",
"470": "4993f275946ae0d444410821faa3ef4a448f10888c50ff59f7ae01d0b50328d9",
"471": "b9af9323a0237fbf88fdb14b8bce95c084351325249629ffd4fbb32fe9d6da5d",
"472": "5b278c08ab97d82c1779411fb1018b07feac7ddf38a69e4d398240a495c54271",
"473": "4448b03417a784f554c44eb15ad2d4cc022bd9cb5abe2547811eb8085355aaaa",
"474": "1c64fc4076d6b00aff86a180fd9af927b7c1c9ba87a2ca3c83dd80ba5e5ea973",
"475": "e571b4b8218a2961ed2b04f62f816eb18686d82b7f2693694b9c774acef4a0ff",
"476": "a6383ed918d7851ed7503921a64201a032a33c9e1cbd4e08d1233f543bd21be9",
"477": "c871da03e684e099190c4ce787a9588ae85841246ad7bcc9cb4c302d617f881d",
"478": "96d8bec6b787a7aea2da8dfa8a1226e00881afc218c211fc59da830775d55acb",
"479": "b35720df96afbd98c6a4f081ae1173fdce21d63f75f7b455f4c2b9fc0aa672c2",
"480": "2db876e9625c8638c66103ad0206c9a51b68d4c6a3222f403b195a81837856e3",
"481": "bac35824e79af403a2058b08cbc84f8e4df93a21d1766e4ea1de6414e2a8a926",
"482": "0f9797e2f3691bc7291d81d1ddd5d88cb4e10b0be555e2ebfbd3c5b12b7cd2b2",
"483": "8f3348df383ec9ee00e18d41c419370d42ca6ebf71c510690aa5435a679b7e4f",
"484": "3b3bac32669c5b66faaa42b89a2dcb4de0bb9aa0bd279d60061dbe9e7039f5dc",
"485": "25d0335a0576f974617351ef5aec889f311fc8d7cddb997862b10b2496842d4d",
"486": "93b9a59a937594d2196271416ea3b2221d32b3b40a04bbebbdf97e8bdc557e0a",
"487": "a643c75a8d062b87a1c8635fdf439c04d949ce01f75dde10ab6edba90cbaee77",
"488": "984593c12abbff5d009091cd3c1883c87efc535f760727ed12f06df0902bfa75",
"489": "926ac61244f94e10270a2d40169de025be6db342b3de7f0db33a50b07176c143",
"490": "e2c8142e501b0b0b808d2d36f5f38266f99cd3aaca7d2f70f4bba386ae1d2025",
"491": "1a1c8b472424f8057c94a9f5e0c0b673551fbe9ea4cde5ca2d90df1de76a5c76",
"492": "345a83966ead821efa2a9de93aeb0fd5bd60a8f50e162caae2447f1f4d9462bd",
"493": "ee7018d63b08bc7226d6f77c2345a87e09fc7cc87b0a003aaf3a4a3f622edffd",
"494": "3d69e540997d79f21f249d4d8f73cd75119d81bcfb8bd80782863249f0d7c62c",
"495": "b717f1088b0ce24851c30d54bc8dad9f3ae93402b91c874e385e5c699323a5e2",
"496": "fbe77ec1978ad86e73e5a3f494fa7c198fe334b511298f5a0f2d04d6a7f51d01",
"497": "a4a66d6c7c555a2997ca59a8dbab512388adf20902293a5617132a16df76d954",
"498": "d71813b8175fa2d70181d87ae8f839e79792516a1cfa99a7e6b29500c057617f",
"499": "477d5b817df8c0b6f0928d02a58fc39fde2224493cec89393bd6dc349e5235bf",
"500": "3ac8e26d4864c538936efa7c5920435107a50c01306adaee5a4aeaa2ef378f7d",
"501": "766448b05b248ac3d6e991baa3e4b2d53b02aac426bda312c2299b2b983e145e",
"502": "50218b55f5b7207438137f2b0c71e3f6d37afd76aa5b1f2106111f3432b4cef8",
"503": "1d7c24799a287d42e97dd4ccc5bbd3713ce139e6294896cc5fe2efb80a1be7ad",
"504": "9878db5eb2218b18568dc8cfa13bc8363a1c93e6a59a05cc76da0588fd54af46",
"505": "872fc20275833f09c8aaef277abfe77f67be6bd443b489e0cb8bdf9d4ca9fac7",
"506": "d66834cc7ebe58cce2ee1c02bb11ae69672d711ead6a0a58ab592339cddbf02e",
"507": "ae955394665befbbc89e2ba85b5e520cb293b8d03209b1f71d78ce2cc807a437",
"508": "3917ce4173af47bfaf8525f0917736bde3f4bee0ed5fae721c3e2fa957ab1675",
"509": "2f64571cd71f0e59006da84808abf3d3ccff9a38884321533d448b3e8e3cae05",
"510": "41ce72f4701e786427413b68fb70bd77d921c06648ca15033ce1926a9f1224cb",
"511": "c9fc787389265492e60d5503f279714d5b19760ea7b2e1a720e6fc0251fe087c",
"512": "a8af6acf3744af13cde63540e37bb9bc722ea19a012656e3a3c5bfff8292c423",
"513": "506b90816555d1083be7d211f02a5db364e5c2337fc85b1ba845c1a806689373",
"514": "e4e9536766181eda627721723bfbdbca85859a3ba92d439f58ac0009c102430c",
"515": "16daaa62fa87776bc4843d226988cc83ee846ceef7b885ab63e10789b30071ae",
"516": "44b6de4eb51dd8f762142f284b154d3153592549cdea3b94467fa95484a4f172",
"517": "bb72c6d437197a8c1f1132626b3b47adb9827f4f9b912d1069cfcc75575371b5",
"518": "ff57c1f518651af805bb4b258130c7c5b0726422c3390327217562088785b4ba",
"519": "159b59f1261b7a31d7172cdc28d9515d0731e5117cb30f34a497bc3bd0496da2",
"520": "bdfb7f17c8c841c0b61ee7f00e51f09e4c78c90f7977548b72050a7aa12dfa3f",
"521": "bd27ca9292c19160cbb0568f750b247fbb805b85f4a2316fcf2c3a35d3ae031d",
"522": "98e0ef155297aac8a4060d204614753f26f6ba5357deb78c683783dc7ae30191",
"523": "9bfc344c80d1200fe12bea3ba4cacf8d5ac9693258962f2f15f42b30ce8ef3ef",
"524": "8df22d8716d7ca6354ea42b8e522d286ff9362cfa5881f527efcf1a953ed1151",
"525": "13dc6d869fbe2c3d95f715e55f02bc3d5787874b4c88d7da1d05360afd2025fa",
"526": "dfbe442040ce9afba654773fb14f307d67ab614267d3feb6b18df03182b5b60f",
"527": "ba634833af68fcf0ca7bcb08fa699b2c5fab934eb81ecd85e7464b01bca131ca",
"528": "016f7b569dc1c3466c97754c7dcc0f76c2a32a76c22357cc521bcc330d86daf5",
"529": "4960ff863e3d21a58f9e81c3d94075cb7a4daea5fcf396812382111e462fc57f",
"530": "6a2e45fdfcad65e0ee84d206d59cbac998026d7415d16a5c0b8c55e4a7d6bb3f",
"531": "95ec72fa8c409255d43e7c8d4e957bcb9239534973187b3b4cc2557b09bdba98",
"532": "fee6802490757983c499a08831d9bdc75a9eff08700bd29e8e5c134583ee07b3",
"533": "8a056666bd75d853a12d22b8317042a3f5500cfb21f6698d90ab41e01edcf81d",
"534": "8f65c9feb935e09a04c87143d1b2c63e38f08738199ebcc2758f67ee914d8a48",
"535": "ed8970f8ef1e2374289fc735aedff90b010c311a3b80d16df6bca2d3c250fdeb",
"536": "f82635851b442ec0ee95c5c2b7377ba382aa364cc49ff4e981d509ef324bb356",
"537": "54fc97bb6f3d7c724d4e245df37111c20334972300297fe38b590354fb9dfe92",
"538": "650c7f5f382c295cf6e7fb092db6fdfff164c861bcfcfe1fb38a50268f53f50a",
"539": "0bfb3df290912d8a70dc5e1e2761151cdf2c4b75d4b37c8fdcbed7483ada85fd",
"540": "08f1b2bffa88a9d01eecb8c9da6636b5e668a5478d8876a63ec3a74d7f932205",
"541": "5e59cf440336e86b67c17ed61f7bee7e548c434f475c415294b3b652d1aec606",
"542": "1257b6a3ad900df97f5aabc1e18b9f7ddae8c7d7ad60216ae21b5b7310cbda84",
"543": "8a783bfbe11c7f7b24431a15a0eb582f6fe5f75d1d21a3d55f8d8d81ba6b411c",
"544": "ce93bedef94ffbf62ad449cb0c68e8103a0bd005563ab854daa5e470664b4d7b",
"545": "40c253003d601fd2c90908bffcd8133e77489fe247e74ec03901895318fe69de",
"546": "d40739115f18fee96817266232ff1b8845e7966778fdcc644028fe5c759469be",
"547": "fa32a8de8fdcfc551d808c5dd0ff5545a199027acd32e380959b91f3b3d04643",
"548": "72e66168068b6ffcd2988e24124c8b1dba9a5b52a383a937397575e3c1e3f031",
"549": "a23baaa745a976b4f212836beb81a0a7b42d9f2e923c2412e2c07c63ff660ceb",
"550": "f58ff320639b2c47c76ed8aba487e31da0fd4656c3be6e33807cd00f77456e5d",
"551": "0449ec4d6d5b2e88603e62f3ec0287ed711cff682bbdfe298a197144ab24e80b",
"552": "f125761e8a0d02b17b1dc4be40216f2791727fd4e4bc56f60ebea1925c2fbf36",
"553": "dbb93b2a6cbf972bb1f94d1f8656cd113a09a02cbc44f25737e7d75c986646e1",
"554": "dcfd1e7a4a32ff0fae296b8211b5c9e91ab81844a0308933f598c712c1bc313d",
"555": "cebbca914f917f990202f110e77285132d2a5a3ba9a7475c93e3561d8ba88ea0",
"556": "0d5518ef165979b758fcc8df9c8cf536861f376f8640541ba6112ee7610ed82e",
"557": "0547c86b57c7c8c590f6d7a5131778f5b6ab2eeccc5e819e5fd095a6d4e68b08",
"558": "e763aa1dd494e097251484381ddb057c7d79b739c3f8644b1759e786e12f5b40",
"559": "e48eea4c3b4c9d58fe02739accf31bb64dd9c31623ad4cc06c740463d664c098",
"560": "77a09dc1ea6f1ae669004b8c9429dd83ead1148c62e0d945173edac45d9000a4",
"561": "756d226727e611d4bd22aa33747da2f635eeec070906dbc3262ef29e341e2a6d",
"562": "29de450d6e440c528287b98bcb4b76fb5155ab573df4721467446114661936ed",
"563": "7703d943dbbfdccb90acad65ed7c0eb13a10034ad01809472a55eb3162b7e53b",
"564": "65712c105411e6fc0ed35b9347de8cbaea33b0c5e57cf162f48dc257dd4f05b5",
"565": "2945ef4779089c9e49a9a9f5e2a67ba7e393aa20a955ed9302da6677cb03a9cd",
"566": "95d936e1d454df2e1e7d486c43af387b39a50cb57e57c7712d967bc9ec556f41",
"567": "2abe8af9ee20c6b8ad5034bc31fc1f4f16769595d5b4fc2837db3e76a90ac405",
"568": "fdc104338866e50ae2bffc1ea19719136f639df6c25f38a8680a70e9375a9378",
"569": "25677266de2b900788dfa047cb53f5585c37b564b3a711243fad52e186ec184a",
"570": "9101edb48d98c3742ceb713de591d261b79e90481d28f83f2d2c74d7034f4b46",
"571": "c364dde8cce2080d073eb1f9666cca97ccdeba61b2bf19ca0c84987e6f8d3576",
"572": "9cc3049e9464376b95fb88d6fff4331e0e40196f92a0a9aa1c5d10dfe33079f7",
"573": "2ade73491e183608b340f312d08cfd39c10ecb581c87b873443590452580a43e",
"574": "96325b210d18a7a1d6873e00a859648c4754bd4c91c324aa812ed78bd047118b",
"575": "33942a261a9150e2b5ce2ffe5b934a81f3972cf5aa5a9414a9d5f63f6b55324b",
"576": "7fca01a835681914b5fe5014d5649b5170faf459375ccc2bf9ad71ebaa73940c",
"577": "2bdc7a0e8adacf885c6ea0f6534b935b8a9dd338c5dcff05a74c162c3e9dd531",
"578": "ce6b6de1d907c8839b84f5f3967f6af7e9a3644a0bd7dffe80cfe531de08f8ca",
"579": "03dbff2575902a3c56a64483c8e8ca38d9888f72c6a71a6236eb07b808fb24ab",
"580": "96892003c30358ed55a39e13e6159fad09ebc3916e34492b91d63832fa86f731",
"581": "4fc5533c52133e54f8b54dcbfc4555638ae809676dbfec9d1400ab032f30648d",
"582": "7ba9b154acf699c8a123df5471fd40ad556cf6fc630136c686c87b09c88ff546",
"583": "ec10ea6801eadac9ae8ead5f222e0580f419b67d2ad5cd5c8ac914dcf5cfd69f",
"584": "510001c4104c80517a13f967df6ee071f15fb7b65e97229bc91b2925cbe4e93e",
"585": "ced737da53940337c5dff81720024fbaf4cee38aed1d3514d2a75c7b1271acf8",
"586": "9ab074d1d480d718930c9abac8b616a0bc5c30846381d6d9bce1741e9bca1991",
"587": "ec3fdcd8136188e3b476270894351cdc05dc44a4df50d1c4ed727294fb89430f",
"588": "31400607f95129fcc531604b7b0478a748d2495746280dc07ff30e39cd6f4a97",
"589": "3051de9b2a7ced941140aa1074952029f532e133beb41c18bfd990f43bfbd9ae",
"590": "4af295f83800334d77a04d56be7524ff6241e3d8b2f23820c9c54580b7996086",
"591": "2ecf2c1ab8d9e5cef5224842732af17bd2259598e4363e1d46cb172dccc39022",
"592": "2e71a26370d45781f31ede0c7810c2705706ce63291a52d5cd6f060ae16aeb01",
"593": "423867f77b64f725f823204796301ae09b427190cdbb62d472bc1395507da9a2",
"594": "6c28830e35913c59000dfce4432db255f7dd34809285881f05a9e9749f5d8452",
"595": "53fc00ae32e0b0d701175ac17ac0b91e05859ae6d7f3e5bf0548dad36e3d68f9",
"596": "9ccbee33387383d458e7ffa2c9c0cb9e4f5bbe3d1b949463a98232ae67d29956",
"597": "921102754e24e8ba99480e77652d88764020202e6dcd67adddbb1660204e8e78",
"598": "430f975f490ce37df74bc346556cb2186f7a47a58d3b282ab42f35b33a812f7c",
"599": "b603988248769444a1566b058ef3660cac528086b8193efd6d0be4080b834780",
"600": "dd539cd38fade63aa0d14899c7c75ff459ab839148b15b4efacd4bdfa0408dae",
"601": "571c5ade4cd89b460b7d2568a44d1efb05e2927ec840d8ecf149dc9e0ff09734",
"602": "edef32d6c2c7193b4b30a0e2c7d3ab37e0ec21db62543f4bf78169b683792e41",
"603": "cce7491b7ddf0e3ebde191e0e57614e61602cfaa2b52be5c2d657d9ae5e1f1b1",
"604": "d08fe0e5c0fc10640043f9d645446e23fa8efbfdf29c93c87794e5b6405ff51e",
"605": "1bdd74af73e2434db6149fd8089bd294defe3cedfaaf92f532568ddc6c48e2ea",
"606": "30e44b49f18048323d1c1bf4631587df8f0dbd477ebc79b7ef860a792953d932",
"607": "2d9b6a1b4810a39471e5dae85eadf595fc108097eeda746c8925a7be057464de",
"608": "cd3fdc5ee5b6e606349b9e5775d6e632e0424d6190f632632bd7435d5622b20d",
"609": "8b86933e27e64e6840bedc8087fa31326d9527a424c63ecc61823894c81f867d",
"610": "a781fd7cb6970e8f6f679296be5bb0fe7ea62207caa7ce86635257186a5a70d9",
"611": "4a3a0b9877d68deb8d7db624ec2d7f4b1c467fe337f803a220292ac6131acc05",
"612": "6e95bb170c3a521fc7befa446cad879a36b7b3d0e0e8eab1df6ddbd753156ab7",
"613": "afe8c7002c5e15859be829b4b69f0da00c1298971d5afa469b050016fc021978",
"614": "f85495a58ad9d5c4d16167084bbc3581ea22e6dfc39423b70d7fe486e316d951",
"615": "8da9fc3356df220081c71ccfc9c67251e6dd7058fb11258ecfc88ea9b8c00c92",
"616": "0fadf4975e2c27aae12447e080505d604258102f61c8667a5c2594ee033567e8",
"617": "06d9e8723de7ffd20129f1d8b5993926a97cad1261dc0cf01a37d8fa728ee996",
"618": "04d0dc62694f26c61871d8129259540884ad2296a3cf455f6b92fc911f98c336",
"619": "a93d0ec83cbbd4ec0866c97b372e4374a9d6724cc3767f5230e8316734cbb0eb",
"620": "071da5dc1dd87c2558b45247c29a92092bc5a00ef3cd46d70d08e18b791d2926",
"621": "458ca388a6b74c57ae13d1233984d5b66abb1f18dbfa12aa14ba868a9b5a708d",
"622": "1ad0227dc5f8c259ada5120d9db05ac7a013bd1bd84cbbca2f0ae6b174dac129",
"623": "d82d0401e10767b022417dbb64d348bc6c03ed4bb7e4553493e8d9e65525d229",
"624": "1d25005c86a9635d3483ea63ce95fa097f95792ebab86319c12bc66ea1d2ac83",
"625": "3fc397ed884cabc16bf30bb7487c8211424a08279a166d4fa4da6dc151a02cd1",
"626": "7c42e09e504cb269512dae989ee7fffe1f3bfea499c990e8edea796761331ccb",
"627": "5062b75aa39c974a579b0a3360c4da32e481d2242de72106f651c7d7de631cf1",
"628": "dc656eef13928f18d14a9265be6a923bc7d76048b861cdf1523e397801a8ef52",
"629": "9eefedc5b5995658be337f48146e37020db4ed3bb61e2af1fc57f698bd398b0d",
"630": "6e17ecc4a4d07ffbd67c49a59d31b7efeabd3bfead49fdf1ec005836e6030ebf",
"631": "781372694518c122f62566aec8867772e492fefef32c00e24b5604297dc1d44c",
"632": "c978055ae1d71dfdfd8bb4e845bb82fc4211b14560bf6001edefa4367e1d4403",
"633": "af4ff4b546369974642b3f68d4d3e90f0a0496b3b5d1572b638378fb49c7b4fa",
"634": "f6af89331ee087a2fc03e0bddd738e2716b49ed616ceb3b47743cf3806c6d8c2",
"635": "e4251ea6989571d8b83993560b537b7a9d0777ba54e6941757580cbfc14aab5f",
"636": "dc15b5ccabd8fd3141c244b7dbc6fe95078299ea3ce3016cbb483893fcdd4236",
"637": "053571be83ed06ab23a96d4e8fa129a4ce7e740de17dc35b000fb56c35a5ab80",
"638": "df57e1f418a24e38b39011048084c6b5cc91a56c1deb643ab605e0350f329b4b",
"639": "56930902baea90d1a8e505a227e5d7ac4da6b60f6c370ab75a0011cb3746818f",
"640": "c105d171242fa8e35f26491ba2f932d1577dfab2a4a6e75034ae69f062e8aa71",
"641": "0f6c3873a87ce630accf7f3b19feb764aac3fa0c3933042a817a82e6a9963aea",
"642": "c20081830b70a00d1bcc6f4b6572d511d534986c10ea3c057db304a1f26df2da",
"643": "143de023a92c7c8ce5fc0b839644e897267c44c8ba4e715743dc99686415a8b5",
"644": "7a8c9f1db1b1bce9a3b8d91e5b1a39a92a478029d975f5e45d593b7ca81a7134",
"645": "932a51e4c0cc5e30041ca5db1fd0674820638563a9df1300bece7df12c23017e",
"646": "a49cbd2966ea8248816b0a53b6eedea4aef2525aac45272b862d7d52e604625a",
"647": "40ad98735f3b5417ea1916f6146b69b7659963263caed186abf0790de0d9dae9",
"648": "53b288529a83c376f2399e986e5ca25c5993a6640063386fdb2de491afba2e81",
"649": "c0bebda0473186148087feb9828a418ab8d50726a1ff5c39ec69c4a6232c6b67",
"650": "98ac68d2bc42f89dbe97b3392ac691ed6c2c4f36a44665555bf7f816ca97cd27",
"651": "81f8287532f504b4f4a21e6d6ed573845bff197c479fb52e4c5b6f2fc1cfc40f",
"652": "fa32d8e7c1c766a6126b0f1cdd9d752cad55f54d0c05839e89d4da238615d9ed",
"653": "311cec39a42837f803ce8cfa5e6df32cc27fe541de108e3e7cf7ba3242e414ee",
"654": "f2b3d205c2da66cdf9a596e2caa1098132b832758eea2b14da071b8dd9584ec9",
"655": "39517ea688972769cccd46ad15b4f06ac2a6175d053dc97f849fa11a63a163e8",
"656": "2a21e5e89d9b019c1195c50af7c6e1864cbab05068d10e11519fb6d4766ceae5",
"657": "bbf54db41dc18753a3caa5001aae99c0c998e8a07b6e7390932054d7882498e3",
"658": "ca8e7b53b095939e5fafefa56e9b45b40c396145acf2a767f9f2430fbba75a79",
"659": "3d6c8492fbfe1c76e3f9d66485a7447489b89763623127deb6ed327a0c2a011b",
"660": "23d754ebe35981ad5de850f66bb2294a22280a8ad0b4160b1c29dfb5487505d9",
"661": "fd7eaca9690ee0384770e855ed600c96080c5c23565bfdae01c6045a87d9550a",
"662": "b93bc0a52860ee0a1fdc28adeca7b39288b1119e0f318467f0a193236e00f99c",
"663": "f73e3335b21c11b78987deb5a6eace1cef327981322f53a070adfbe31b56e7d0",
"664": "f3f0de955603850bd411690d11a5391e63f515a29e31e9241c66c62d688bcf72",
"665": "f2650e75f39098e5a114077b6e07bc15325adce22e1ab4b20569a4eeda5c6ca6",
"666": "a01a34d1c29aff5618a96046605adb74fa49b834975051d4ac82672567727a21",
"667": "2db51646a4038b38c88512738f79bb21776d39c7bfa3086538cccba0b63024db",
"668": "2f3336b7f1211fcc180cd76dc6442fecb412771aa45ef1a7675aa437d04e582b",
"669": "28bf022d827392eff1ec8ec121767ec24778f1b69da8605b4ab059023b8ad28a",
"670": "38db5dbb2a3ce2d31d1958f5b3ca4c3555eb0ac4193ebffa3f42ffd6bb4806e3",
"671": "0580cf2ef8abd3afbf91fba2032c2d51e43306bebb7f979bb750c3d7bd14c961",
"672": "394f1b74ccfac5a4fa958d813b5932371c5f8c2f3dbd1eb7202af2223aa08afb",
"673": "61c90400cd197b8ea6d7de90fcd1af0959fc37625fe163363fdae0ac4a724bfd",
"674": "6037c38f696b10fb531c26396890cd3b48d5408c5b37e61d03a72ae2f7b64ed6",
"675": "39c8b1bd1d534381b811bd8050e54b753106c1bfaf5d3cc63d8fe92a94471915",
"676": "346d1e2de9915fa2f4ce3675ccebadccb8e9d14239f1e53b6d08d09f5c26297d",
"677": "36841bba8f77d669e9d8f4e09ec580ce2c7a36c37da719815e65cc641eb1fdeb",
"678": "09532ddbaffb710f02964e270f8658bd8a09149744726a82f618708b35a5fa26",
"679": "774f8d6f89a5875342b21e8337aa5e3ab0539960a5b42554bc8d8a0fffce7d65",
"680": "48d62baa62c2a9c561612192ec39a7dbcecc8badadc0ddc755191648597a42f9",
"681": "7adc09dd86f3e73979d9f8a4b3232102ca52bc41d043614fe989cd908ed88c76",
"682": "522f0ff3ae2f1761dca78207dec1c9b52556eba2db7503ca03441abf62f65c76",
"683": "376e3c3e4b88ee76cb0f02390751a7248fcf1562013b1390b1e276a3f3d7da63",
"684": "6363f306f081683781908acd4bedd92b3a75c796243cdacadc4b9896d8cfaaaa",
"685": "29f2c4c5325cf626b392a910e6e22b6d2a989bfbb38439c20162b7b786b5e2f8",
"686": "990ae3583a1f7a32b7581a8ace626511c812e0bd910b8064fefb31e625b9c22d",
"687": "7e78b4b91851b976f5cc2a1341b9389ae7bdd0248ae7f7c53e7ebb2d86bbc73c",
"688": "1ada92e769892b4bb540d75cbf40017a24b5b309b28a097ed62eb7f2727518e7",
"689": "17a0ba5b100d0a92f3f82e2e6f31c71a6ca53a0f043094a6419331e22036150b",
"690": "f9658a8f0687d69f420f655c500304c3c0888f298a68075ab6a2165a3bc47c53",
"691": "3ff8aa53eb2f7e700fdc7cb838ca7f7b495948bb997ef70d196c10592fa64680",
"692": "c01c3e579b2743866cd3d0c1d9039871356143a99c572593d2702f387e9f629f",
"693": "c08e2dd3686459c2989cd6a367d2cc64b2bc2af460417102e9856e91b5f78fa4",
"694": "063e59bfd9cbed08afa508954ac9c1c313b80331d6a917fd2202e15e1eeb00e9",
"695": "c3259eeed96a5837a6630fd9d1245de7c77e10d0733b6129a3dc99548bd92800",
"696": "9ab20a4d8c3c0de897a1c8afa95733d0f7f79870c6379064ef4cf1f5baae67e6",
"697": "62c07adf4da24a20a723f6c32e35a51f2b942e363dc9fa35070e34991a5a9c1d",
"698": "632f1a4eba12f5c80401d82c4bad7c5679f55ccc89bf2da3e3930ff3d6671ba1",
"699": "8c40c5c92fad7ed2774080ddd39f62cdc94ca05dde4273344497ab4206499484",
"700": "3dccee8e873d2c9c2f8359417e666b702f97b60b90b229e3c41190909ff9388b",
"701": "65a57fc7ebcdab77821276a1eba1c1a625bf2bae575b025359de492592ded205",
"702": "c1b0ade78aadbf0d5576489c2200439ef825fe74452115edbc908e9ff955efc0",
"703": "1e5ea7fffdcdbca5fc91694b200db8e2e3737e829b7694e4dcf3b937b41be330",
"704": "9ddf38880f294ac1a759c764c394cacd4635735880f326a0b5e4a896e4fdce8c",
"705": "2bb033d9eeb9157fc6ae835e99b9523bfb1d61173cfb34941cbfdc4c0d3ea67e",
"706": "51a0e8daacbd6537efd583c48c5815a9bd22fef0eb9b8e15dbe2ee87c76e2a6b",
"707": "9f50d3b52dc4ebae279c6f6021258ca8cd60b8cd13e358f29a2879caa390a774",
"708": "42e0a9be7737aaab1fd27543c0273f4c97dd3bd6471e6ec04b1fc7b79542db71",
"709": "ac2605c16873ea2b5f0ce5008089a55e37588f45313ad06ccc7dfd96f407eb8a",
"710": "09214942caed4184e7155b4016b1e0de37c0a142deaebee3879c770438a28276",
"711": "8d8ea19a78bcb10e502f91a057bac1b200ab17db66e11cdf42b63ec65a8e6c18",
"712": "001493340cc232a48125f958308be6d0567ff2684e0625e55af8b0a024c4ccca",
"713": "98a124df4ffa11cca86fbd959f4d091665fc871a4a86cc1024429d1c116b556e",
"714": "cd175b00873a9a3369c628861c1f20df57a4ca75074530ebf5b974d04b8b93c4",
"715": "cdb954d8620ad2d95915f94243cdcf71170cfc363334b2f831544f55f0d15746",
"716": "abb62293fb9df9bc7a6e80ea24f0da1049f894ade937367e24563a3277f953ef",
"717": "319369720bf1831be4c73600c26f5d08dcf6cf85fd32340c28263e39c1dda5e6",
"718": "412ce061b1ae228d2226fdb3bf2cb68421870465d6a8cf7ae58515c02fe54684",
"719": "c461587d4f3a41c375628e94fb9f971cc2829b8608d3c7aca840e62a6c8f1929",
"720": "3651d0d1f023c90e42be5c6ccf28ca71203d1c67d85249323d35db28f146786f",
"721": "8430fc43038ba44efb6e9ecbd5aa3dfeaeaf73f2d04a2d5596855c7de5de9c20",
"722": "9687101dfe209fd65f57a10603baa38ba83c9152e43a8b802b96f1e07f568e0e",
"723": "74832787e7d4e0cb7991256c8f6d02775dffec0684de234786f25f898003f2de",
"724": "fa05e2b497e7eafa64574017a4c45aadef6b163d907b03d63ba3f4021096d329",
"725": "005c873563f51bbebfdb1f8dbc383259e9a98e506bc87ae8d8c9044b81fc6418"
}
| {
"001": "c0b20f4665d0388d564f0b6ecf3edc9f9480cb15fff87198b95701d9f5fe1f7b",
"002": "1f5882e19314ac13acca52ad5503184b3cb1fd8dbeea82e0979d799af2361704",
"003": "5c09f0554518a413e58e6bc5964ba90655713483d0b2bbc94572ad6b0b4dda28",
"004": "aa74f52b4c428d89606b411bc165eb81a6266821ecc9b4f30cdb70c5c930f4d9",
"005": "1ba90ab11bfb2d2400545337212b0de2a5c7f399215175ade6396e91388912b1",
"006": "537942be3eb323c507623a6a73fa87bf5aeb97b7c7422993a82aa7c15f6d9cd6",
"007": "ecbe74e25cfa4763dbc304ccac2ffb9912e9625cd9993a84bd0dd6d7dc0ca021",
"008": "b9fb30b6553415e9150051ce5710a93d0f55b22557c0068d8e16619a388f145a",
"009": "d912d9d473ef86f12da1fb2011c5c0c155bd3a0ebdb4bbd7ea275cecdcb63731",
"010": "bed2d160e02f0540f19a64ca738aacb79cfcd08ba7e2421567b16cb6e7e3e90e",
"011": "9ded5bc849d33e477aa9c944138d34f0aacc485a372e84464e8a572712a5b7da",
"012": "3e7be445b6c19e6db58c2482005c1f78cb74011a4279249ca632011a9f1b61a2",
"013": "3cb265a96c5645a9ad11d47551f015c25f3f99792c951617656d84626fbc4868",
"014": "78a262dd40eba0f7195686ec7f3891a39437523456f8d16fa9065a34409eeac6",
"015": "7b8f812ca89e311e1b16b903de76fa7b0800a939b3028d9dc4d35f6fa4050281",
"016": "a6f988d30328bd706c66f8ac0d92aac21dd732149cdd69cb31f459dca20c5abe",
"017": "1a455b216c6e916943acf3fa4c7e57a7a5cac66d97cc51befca810c223ef9c23",
"018": "fde3f2e7127f6810eb4160bf7bb0563240d78c9d75a9a590b6d6244748a7f4ff",
"019": "284de502c9847342318c17d474733ef468fbdbe252cddf6e4b4be0676706d9d0",
"020": "c86a2932e1c79343a3c16fb218b9944791aaeedd3e30c87d1c7f505c0e588f7c",
"021": "e8c6ef4a1736a245b5682e0262c5c43862cfb233ca5e286be2f5bb4d8a974ecf",
"022": "85148c096c25e3ed3da55c7e9c89448018b0f5f53ad8d042129c33d9beac6736",
"023": "42e2552a2f589e021824339e2508629ffa00b3489ea467f47e77a1ea97e735c9",
"024": "4677b3d9daa3b30a9665e4558f826e04f7833dda886b8ef24f7176519a0db537",
"025": "7d398da8791745001b3d1c41030676d1c036687eb1ab32e0b5a1832e7579c073",
"026": "fbe10beedf9d29cf53137ba38859ffd1dbe7642cedb7ef0a102a3ab109b47842",
"027": "e4110e0852a2f70703f0081fc91c4a20f595919a038729cb37c564d68b875c6f",
"028": "261171a770d594f6a7fc76c1a839eda7f6dd4e9495e00e75048578fc86d8adf0",
"029": "a207c35d8417aeed4c9e78bcf83f936cd8191c702893be62aa690ce16bc909ca",
"030": "46e68e4199ab0a663ab306651528b06756556c9f0d8b819095af45e036dfbe6b",
"031": "8de34b4ba97b184c7a2096b9266776175242b87d67bc8d77d7289be6f70cd105",
"032": "0d246750daa7f1b367a21f55da454ddc8f62e0a95d163062e9b9273320d5130f",
"033": "ad57366865126e55649ecb23ae1d48887544976efea46a48eb5d85a6eeb4d306",
"034": "728b8d7d6d5d34cad9cbb7c3ea15f807ae57144594b1740b3c73b82314ccd1ed",
"035": "02d20bbd7e394ad5999a4cebabac9619732c343a4cac99470c03e23ba2bdc2bc",
"036": "9480c0160719234b57defc0681c0949a175ffb3ff4a3bf5e8163ac843f383f35",
"037": "e9800abda89919edac504e90dac91f95e0778e3ba0f21a0bac4e77a84766eaaf",
"038": "b2004522103364a6e842b9d042c0707d79af68dec7810078729d061fb7948912",
"039": "fd0f7e53c5b02b688a57ee37f3d52065cb168a7b9fd5a3abd93d37e1559fbd30",
"040": "d29d53701d3c859e29e1b90028eec1ca8e2f29439198b6e036c60951fb458aa1",
"041": "bf05020e70de94e26dba112bb6fb7b0755db5ca88c7225e99187c5a08c8a0428",
"042": "79d6eaa2676189eb927f2e16a70091474078e2117c3fc607d35cdc6b591ef355",
"043": "6512f20c244844b6130204379601855098826afa1b55ff91c293c853ddf67db5",
"044": "97e2524fd3796e83b06c0f89fdcb16e4c544e76e9c0496f57ac84834869f4cc3",
"045": "8b0300d71656b9cf0716318be9453c99a13bb8644d227fd683d06124e6a28b35",
"046": "8485ee802cc628b8cbd82476133d11b57af87e00711516a703525a9af0193b12",
"047": "c7274da71333bd93201fa1e05b1ed54e0074d83f259bd7148c70ddc43082bde1",
"048": "743d17cbff06ab458b99ecbb32e1d6bb9a7ff2ac804118f7743177dd969cfc61",
"049": "47c6094ff1ff6e37788def89190c8256619ef1511681c503fea02c171569d16e",
"050": "6ee74ef623df9fb69facd30b91ed78fe70370462bb267097f0dfeef9d9b057bb",
"051": "d17cec28356b4f9a7f1ec0f20cca4c89e270aeb0e75d70d485b05bb1f28e9f6d",
"052": "ebd72b510911af3e254a030cd891cb804e1902189eee7a0f6199472eb5e4dba2",
"053": "9705cc6128a60cc22581217b715750a6053b2ddda67cc3af7e14803b27cf0c1f",
"054": "12e2c8df501501b2bb531e941a737ffa7a2a491e849c5c5841e3b6132291bc35",
"055": "9f484139a27415ae2e8612bf6c65a8101a18eb5e9b7809e74ca63a45a65f17f4",
"056": "3658d7fa3c43456f3c9c87db0490e872039516e6375336254560167cc3db2ea2",
"057": "620c9c332101a5bae955c66ae72268fbcd3972766179522c8deede6a249addb7",
"058": "196f327021627b6a48db9c6e0a3388d110909d4bb957eb3fbc90ff1ecbda42cb",
"059": "0295239a9d71f7452b93e920b7e0e462f712af5444579d25e06b9614ed77de74",
"060": "ad7c26db722221bfb1bf7e3c36b501bedf8be857b1cfa8664fccb074b54354f9",
"061": "94e4fb283c1abcccae4b8b28e39a294a323cdc9732c3d3ce1133c518d0a286f6",
"062": "d25a595036aa8722157aca38f90084acb369b00df1070f49e203d5a3b7a0736d",
"063": "0e17daca5f3e175f448bacace3bc0da47d0655a74c8dd0dc497a3afbdad95f1f",
"064": "6d62aa4b52071e39f064a930d190b85ab327eb1a5045a8050ac538666ee765ca",
"065": "1c6c0bb2c7ecdc3be8e134f79b9de45155258c1f554ae7542dce48f5cc8d63f0",
"066": "316c0f93c7fe125865d85d6e7e7a31b79e9a46c414c45078b732080fa22ef2a3",
"067": "53f66b6783cb7552d83015df01b0d5229569fce1dd7d1856335c7244b9a3ded6",
"068": "4bf689d09a156621220881a2264dc031b2bfb181213b26d6ff2c338408cf94c3",
"069": "79555e4b891e2885525c136f8b834cc0b1e9416960b12e371111a5cb2da0479f",
"070": "08c6a7c8c06a01d2b17993ada398084b0707652bcfbd580f9173bcddf120ac2c",
"071": "63f032489227c969135c6a6571fe9b33d6970dc6eca32c2086c61a4a099c98fa",
"072": "9ef8a4249d4b8f24147ab6e9ad2536eb04f10fb886a8099e88e0e7c41cf7c616",
"073": "ae9f9c786cd0f24fe03196d5061545862d87a208580570d46e2cfb371319aa68",
"074": "b7c7470e59e2a2df1bfd0a4705488ee6fe0c5c125de15cccdfab0e00d6c03dc0",
"075": "8a426e100572b8e2ea7c1b404a1ee694699346632cf4942705c54f05162bc07a",
"076": "81c54809c3bdfc23f844fde21ae645525817b6e1bee1525270f49282888a5546",
"077": "7f2253d7e228b22a08bda1f09c516f6fead81df6536eb02fa991a34bb38d9be8",
"078": "71374036b661ac8ffe4b78c191050c3ccd1c956ca8a5f465ea1956f7ce571f63",
"079": "2df095aea1862ebfed8df7fb26e8c4a518ca1a8f604a31cfba9da991fc1d6422",
"080": "58bfe3a44f8ae452aaa6ef6267bafc3e841cfe7f9672bdfeb841d2e3a62c1587",
"081": "04bad90d08bdf11010267ec9d1c9bbb49a813194dace245868ea8140aec9a1f7",
"082": "52c42c55daea3131d5357498b8a0ddcf99d1babd16f6ccaee67cb3d0a665b772",
"083": "a825281bc5ce8fe70d66a04e96314e7de070f11fed0f78bc81e007ca7c92e8b0",
"084": "692a776beae0e92d1121fed36427c10d0860344614ead6b4760d1b7091a6ab1f",
"085": "7b2e7211fb4f4d8352c9215c591252344775c56d58b9a5ff88bda8358628ec4e",
"086": "8ffe8459134b46975acd31df13a50c51dbeacf1c19a764bf1602ba7c73ffc8fb",
"087": "cec1917df3b3ee1f43b3468596ed3042df700dc7a752fefc06c4142a2832995d",
"088": "c06356fdcaff01810e1f794263f3e44a75f28e8902a145a0d01a1fff77614722",
"089": "0df5486b7bca884d5f00c502e216f734b2865b202397f24bca25ac9b8a95ab4a",
"090": "cb69775effd93fc34ef38dfbfcdc4c593b1a3d8e7ab70c0f05d627dbc5cbd298",
"091": "327f057e054d1e6a9a1be4ac6acc4b1dedc63d8a88222396ffe98b3194067347",
"092": "538cd20a275b610698691d714b2adf4e4c321915def05667f4d25d97413ec076",
"093": "d8ed8ca27d83a63df6982905ea53b4613b9d7974edcee06f301cf43d63177f47",
"094": "d1b79281d95ce5bfa848060de4e0c80af2c3cae1ff7453cca31ff31e2d67ac14",
"095": "0a3ddcd71cf30a567070630f947ab79fc168865ba0bf112aed9b71fb4e76c32f",
"096": "9c527d233befbf357335e18e6dd5b14ef3a62e19ef34f90bd3fb9e5a2a0a0111",
"097": "f0e2911e303617c9648692ee8056beeb045d89e469315716abed47cd94a3cd56",
"098": "ededac5db280586f534cde4f69ce2c134d2360d6b5da3c3ebc400494cc016e78",
"099": "92c5fd0421c1d619cbf1bdba83a207261f2c5f764aed46db9b4d2de03b72b654",
"100": "993189cbf49fef4c913aa081f2ef44d360b84bf33d19df93fce4663ac34e9927",
"101": "e8539f8b271851cad65d551354874d3086fa9ff7b6f6a2ab9890d63f5ba16c68",
"102": "9d693eeee1d1899cbc50b6d45df953d3835acf28ee869879b45565fccc814765",
"103": "1f17277005b8d58ad32f2cbee4c482cb8c0f3687c3cfe764ec30ee99827c3b1d",
"104": "87dfcf5471e77980d098ff445701dbada0f6f7bac2fa5e43fa7685ec435040e1",
"105": "a76f4e7fa1357a955743d5c0acb2e641c50bcaf0eec27eb4aaffebb45fe12994",
"106": "197f5e68d1e83af7e40a7c7717acc6a99767bf8c53eece9253131a3790a02063",
"107": "bf13bc90121776d7de3c4c3ca4c400a4c12284c3da684b3d530113236813ce81",
"108": "3dea386e2c4a8a0633b667fdd4beacd8bb3fe27c282f886c828ad7d6b42c2d73",
"109": "735cc3e619b9a1e3ac503ba5195c43c02d968113fd3795373ca085ed7777b54d",
"110": "01b4e8163485356b46f612c9d40ed4b8602621d4d02289623e7dbb3dcbe03395",
"111": "97c1b054c094337ec1397cd5ccdf6c9efe1067ad16f531824a94eaadb3c0953b",
"112": "c99c843e0f6d6566132d97c829780332218e005efc14b633e25a5badb912d63a",
"113": "8dbc8319e5d8923ef7ab42108341ee2c32a34ffc0d19d5ae5677f1564813314a",
"114": "b3b9ebc9f9ddadb6b630eeef5d7ba724b3bb4d071b249318093eb7547949bbb9",
"115": "80c3cd40fa35f9088b8741bd8be6153de05f661cfeeb4625ffbf5f4a6c3c02c4",
"116": "a39208d7130682b772d6206acd746bc3779cc1bc0033f0a472e97993d0a32d5b",
"117": "54201fbc7a70d21c1b0acede7708f1658d8e87032ab666001e888e7887c67d50",
"118": "834e6235764ae632737ebf7cd0be66634c4fb70fe1e55e858efd260a66a0e3a9",
"119": "bcabd9609d7293a3a3f1640c2937e302fa52ff03a95c117f87f2c465817eba5e",
"120": "2bd8cabf5aecfcadde03beda142ac26c00b6ccfc59fdcb685672cd79a92f63a6",
"121": "5292478e83f6b244c7c7c5c1fe272121abdc2982f66ed11fcbc6ea7e73af124d",
"122": "6d78b19a042a64f08cc4df0d42fb91cd757829718e60e82a54e3498f03f3ef32",
"123": "057b9b6e49d03958b3f38e812d2cfdd0f500e35e537b4fa9afedd2f3444db8a2",
"124": "d251170c5287da10bffc1ac8af344e0c434ef5f649fd430fcf1049f90d45cf45",
"125": "e9b7a676dc359ffce7075886373af79e3348ddbf344502614d9940eecd0532c1",
"126": "38752ed2e711a3c001d5139cb3c945c0f780939db4ea80d68f31e6763b11cfba",
"127": "e707d9f315269a34d94d9d9fa4f8b29328e66b53447ef38419c6033e57d5d123",
"128": "5e15922fba7f61ddccb2ee579b5ec35034cc32def25ff156ae2b0a3e98c4414e",
"129": "3cc4ad1254491787f52a66e595dbb573e13ceb554c51d81e42d5490a575da070",
"130": "7a6e9899cccb6a01e05013c622422717f54853f7f2581bc3b88a78b25981da08",
"131": "4a8596a7790b5ca9e067da401c018b3206befbcf95c38121854d1a0158e7678a",
"132": "ed77e05f47f7f19f09cae9b272bfd6daa1682b426d39dcb7f473234c0c9381c5",
"133": "e456d3fec55d88653dd88c3f8bbde1f8240c8ceb7882016d19e6f329e412a4ae",
"134": "b144116982f4f0930038299efbdd56afc1528ef59047fb75bade8777309fde4b",
"135": "0709e1008834c2ca8648376ac62d74ac8df5457069cbfedf2b0776dab07a3c5b",
"136": "84692ebaa4fc17e9cfce27126b3fc5a92c1e33e1d94658de0544f8b35a597592",
"137": "6eca481578c967fb9373fe4ce7930b39d8eefe1c0c9c7cb5af241a766bd4dfbc",
"138": "1b5f0f504917592dea2e878497b0e12655366f2a7a163e0a081d785124982d2c",
"139": "0d2f26ec4004c99171fc415282ec714afa617208480b45aeb104c039dc653f5d",
"140": "78ceab5e434a86a6a6bb4f486707bffaf536ef6cb2cc5b45a90b3edd89a03283",
"141": "d74ae4b07f05779065fb038b35d85a21444ed3bed2373f51d9e22d85a16a704c",
"142": "f59af8b0b63a3d0eb580405092c1539261aec18890ea5b6d6e2d93697d67cd38",
"143": "66e9d1093f00eef9a32e704232219f462138f13c493cc0775c507cf51cb231ed",
"144": "09a1b036b82baba3177d83c27c1f7d0beacaac6de1c5fdcc9680c49f638c5fb9",
"145": "b910b9b7bf3c3f071e410e0474958931a022d20c717a298a568308250ed2b0da",
"146": "5292f0166523ea1a89c9f7f2d69064dee481a7d3c119841442cd36f03c42b657",
"147": "cdb162a8a376b1df385dac44ce7b10777c9fea049961cb303078ebbd08d70de8",
"148": "54f631973f7bc002a958b818a1e99e2fc1a91c41eafe19b9136fac9a4eb8d7b8",
"149": "c49382eb9fc41e750239ac7b209513a266e80a268c83cf4d0c79f571629bac48",
"150": "c89b0574a2e2f4a63845fe0fd9d51122d9d4149d887995051c3e53d2244bba41",
"151": "5d09e3b57ced9fd215acc96186743e422ce48900b8992c9b6c74d3e4117e4140",
"152": "c3ea99f86b2f8a74ef4145bb245155ff5f91cd856f287523481c15a1959d5fd1",
"153": "fb57f89f289ee59c36cede64d2d13828b8997766b49aa4530aabfc18ff4a4f17",
"154": "c877d90a178511a52ae2b2119e99e0b8b643cec44d5fd864bd3ef3e0d7f1f4bb",
"155": "58801bebc14c905b79c209affab74e176e2e971c1d9799a1a342ae6a3c2afbc1",
"156": "983d2222220ab7ffa243f48274f6eb82f92258445b93b23724770995575d77fe",
"157": "023344e94ad747fbc529e3e68b95e596badcc445c85c1c7c8fa590e3d492779a",
"158": "d1b58f4c07d1db5eb97785807b6d97a0d1ee1340e7dbcc7bb289f3547559f2fc",
"159": "cd3a3d2cf8973c5f2c97ebed2460784818513e7d0fee8f98f4fdcf510295e159",
"160": "3a926519b024ea9df5e7ad79d0b1c4400f78f58d07834f5ecd7be522112b676d",
"161": "2b3d09a4c76b282db1428342c82c5a55c0ab57c7a7640e0850d82021164477e9",
"162": "d50ce1ab3a25a5c5e020517092128ab3ec4a3bd5b58673b2e6cda86bcc0c48a0",
"163": "7e17ce0fca5d559f76015765c652d76b8468f9ddc91c2069d7799867b9d52769",
"164": "5c680d0b2c4dfac8aade87be60cb4d04a4c3d4db398f51e2cbf131d699b630a8",
"165": "304de2e63f91f8f74faaebae7a7ec2e0c6e0d8d322d8a747e4e3be88de2d3505",
"166": "14212843872dab188a86eb1f0f7012e1b72ea1097545f29377b3b9b52822af76",
"167": "18c18f8710f831a82eb74ae979bd36d609bee818c972ff88f8d8fa065270f951",
"168": "66640021d592f79b510f9d6101bd8eca89893187d23919c8edff4075e73ae390",
"169": "819b01e0394727fd089f84b9351243176920f03d0c6e33e5ff136016da5d8d4e",
"170": "e68fadd33a8c41d9a259577a278d8518aeb8b81c67b8cf03ccf43fc451ec8bd8",
"171": "33bf9ed4714b0e5da8828f8b3d9d3e9d0cf55c1d496324acb04a3f195252749c",
"172": "b9a27b513dc15448772cac5e914de61f02efe323f528892c0bff86d19913a6bd",
"173": "1b2a5e44fda5dfee3ce230f44fe73c672249f6620cdbaa343ba0ba808034958c",
"174": "98aabf085c6c8647f5e8a4775dc1d036513742d8e03b8c5c51e41bdfc9c3e7ae",
"175": "c03dcb22b7faf121d99542018dd10a07a778abee2678d35c03394a8d207b826b",
"176": "4fff1a7beda4291446d76e5ed5177c3f36e52a10481009fdaf2976da39e802ae",
"177": "614d3df0ba5fdffab2328eff8e9ca2d76b09bbc447c06bf1fab0419ae278fae9",
"178": "094a2ba3011118efdd9d4c1f839e6747dee8ba953b52e9012fe2977e32599375",
"179": "9f5563a5ea90ca7023f0304acba78005ee6b7351245a8a668a94dfef160f8d29",
"180": "dbef09115a57784ea4ea14c1fe35571301b0d6139bea29d1b9e0babf2c2aae05",
"181": "3920627e86db42beb1cdf61d026f9f7798082f1221db25a17fb7feb6c1d49027",
"182": "58096166bb8199abf4e07a9ef0f960065e5a635443c1937a1a3c527ade51d594",
"183": "bdf84a73b16a5dd5ece189dc970ab2c8f0cb5334c96bdd1d6ba2bad6e7f8a213",
"184": "c1e8c0f1b1eb3c258923e9daa46ef055bd79512b485c7dc73a9c5e395d5e6911",
"185": "0ea72907eb6c1120740cd80ee6b9a935cd754edcf69379328f24dfc3f09b9986",
"186": "3c0078aeae0362b6b7561518d3eb28400193fec73aab35980f138c28b6579433",
"187": "f2bc655b33e35669ee9adc841cbda98e0834083eb0657d10f7170e70081db7e0",
"188": "38e0291a3f5438779b157e5efcae6cef9db21cbac5607cd08882844cf981febd",
"189": "9b2a65ac4c35f6b392501dee2a28221a3975aac5f0866b476f5e6a5a59f3fcc2",
"190": "606fe2cb6525dabfcdab93afb594dbc8399cb967fc05f0ca93f6084d3f8fb591",
"191": "ea1977e7b22df383de61bded2a4bb3064cf17fcc0170be452330256f938b8d55",
"192": "91d614f139082168d730003f04b87135c64e13709ced2a96001ed60796867825",
"193": "65648f18a50a7f9195fe56bb8cb9e25421c6d777ad2447a3b566dc8c54f3399a",
"194": "cdd31847c6138853597261756d5e795884566220a9361217daa5ba7f87560404",
"195": "d12224510de6c98076f6289cbe342a7ec7ea3c5453f6e3cf8d37d9eea74bd07e",
"196": "1349b472d2821dff215e54d683dbfca49f0b490ade6a30b1db9667bc94e5312d",
"197": "e2aa8f7cb3ba893af8bddbffa6240e7eb71a4f4c4498f2a360f3db7b513094df",
"198": "a29d9edd0dceca9a72d2238a50dbb728846cd06594baec35a1b2c053faeab93d",
"199": "50a6b9725ef854537a554584ca74612a4d37d0ec35d85d04812c3ae730a4c8cc",
"200": "5b439098a3081d99496a7b1b2df6500ca5f66c2f170c709a57b27c6be717538a",
"201": "b4e86186652a11df0b9ec8f601c68b4823ae0bafd96357051371fde5d11a25ed",
"202": "057243f52fd25fa90a16219d945950ed5523ddb7eb6f2f361b33f8b85af25930",
"203": "2742f7af8ce9e20185e940bb4e27afc5fefe8cd7d01d7d8e16c7a5aaf3ad47aa",
"204": "15f5e9ae4636a6bf8bdd52f582774b9696b485671f4a64ab8c717166dc085205",
"205": "e03c2f4ceabf677ec502d235064a62271ce2ee91132b33f57382c4150c295784",
"206": "16bb96da8f20d738bbd735404ea148818ef5942d4d1bc4c707171f9e5e476b1e",
"207": "133fea765d0b055894d8aba573f47891c1f7f715f53edeefb200fbda758a1884",
"208": "90831cd89b4cceacaf099c9bae2452132cfa2f2b5553c651ef4948460e53d1f3",
"209": "570fab1574a3fd9aca6404083dec1c150e858e137692ee0c8011e702ec3e902f",
"210": "ae9a76ce3418c06d0eac3375a82744fb4250a2f380e723c404334d8572faead0",
"211": "aa4b2bc3a136b27bf10a858ac6a8d48f41e40f769182c444df89c5b9f0ed84e5",
"212": "81489bf56605b69cc48f0bce22615d4415b2eea882a11a33e1b317c4facba6eb",
"213": "a497e789f49b77d647de0e80cd2699acd3d384cc29c534d6609c700968124d14",
"214": "409520c6a94de382003db04a3dfee85a6dbb71324f8bd033e566e510ad47e747",
"215": "0eccb27846f417380a12dfd588a353e151b328425ecf3794c9cf7b7eec1a1110",
"216": "f735b4b441635ecded989bdc2862e35c75f5179d118d0533ae827a84ed29e81b",
"217": "9aa88ac109aefaa7ce79c7b085495863a70679058b106a5deb76b2772a531faa",
"218": "5feceb66ffc86f38d952786c6d696c79c2dbc239dd4e91b46729d73a27fb57e9",
"219": "9da1307fd12f4c9a21a34e612920cec286d937418a2d5040676408ba0c47f3d8",
"220": "a262318d02a14747ed2137c701f94248bf8651a23d1f82826952e66c25029588",
"221": "bfb4e53578fa42f83eda027da0467a351298dd65e3e8e84a987d69fc275e9f2d",
"222": "4308f4374b84e657aa7a21e5f5fe42ed16386b6dc7a74bff0d24d08ad62acd26",
"223": "3790f82f65ce7bc071b4096ca22544548b3413a755f58bfc401eff3ddf487a86",
"224": "96356c050fa66d919c75212d789544db81b012bbaf1f7915b053cb9ba2d67de7",
"225": "f37f3f2b0dc57a86dee4ba6ff855283bb4d2f0dea1c5bd1b708853444c2ffcec",
"226": "49bd28d97a79875007a6713aa9700df14464217c28a6e76bc93ded57b75a33f5",
"227": "b1f73471a3e6ea1dfb04610bd89ccb110994780084280fae872d42a2786f9131",
"228": "e38da154f6cccd06cd0001924ec2dad8de5bdcd0b78b68e8d8347768d99ac0bd",
"229": "098ffc6baaa32734053275ce38f4bbe58efe0ff946bf31e0e2df4c6a169e23d8",
"230": "2c72b887a8638941b85765645b45c5cdb73255427e14d5114f6830f847a6b861",
"231": "4aa0c92e77eeed08994650ac6129d77db9f026ae2aee78ad5c9fde132fac0505",
"232": "5f7905b71cb897bc7cc6db7e29cc38ee459e2fd8f5d58ba4746d3acd4e46d444",
"233": "8d986e287ad21475728b0dbd9e935623d69db4e5fdca0d42bc32d70eda48985b",
"234": "2d9d03b778af897e305caa8a1a14a117737bbdd549003c6d7477dd3be8331694",
"235": "7168cff545d365b09e8997bb9450343c7090913876c8f7eb9f0e9849c6fc7dd5",
"236": "ceb3002bad36c22c5da82fd422b36bad91b97a7d3f5409ed5d16aa9b42dc137a",
"237": "c857d8fa78c8fde91f29b3fbe332c2e781f7e8b54532f4c352693d6676fda2a8",
"238": "3e2edae8b8ddbcfaecd5aa6c69cb5960b84cc16f7b3232f3386aae9ecbd23f20",
"239": "49df3a63ca6509687cabb3d208e92b057630231e66b87fe8c781baabb12a55f8",
"240": "5034a21557b2de1c5c2d2aadfe8ffe62162c23f42d1aaabc705ed8519e91a3c1",
"241": "85abbe1913df41c57d1e6f00cecea416edb19c64681d1bb43fb5024e2f48d409",
"242": "4da30e6198a3d9ae6a538c2366e08ee218de6efe2c5b8f231493e21489c21a7e",
"243": "7404bb7881a010271831847e71162ee7a393843922ee93cf7cf3455a0074279c",
"244": "21aa3213adeb0a562ec7161e1cfcb5f1701303f1c9f03ed726c536283e080db6",
"245": "22b9cfa9ab97c84eb64e3478a43acd4d95b90cae8c3453c968457a89c6387a81",
"246": "729e3de7687fc597be2eb4c592d697ff29c78cff6945c9690cfb1ee67550eeed",
"247": "f49b98df95a1f826c24cf622ba4d80427a0e0767dffcc28a9206c58508863cca",
"248": "44b8116c29dafbdfa984b3751c1dfd057b3e93fc57c8cd18495f1c0f605496bc",
"249": "49e96b6ba41e88901dbd118139ef6f013b4fc59e2347058a7e726cf6a2f14067",
"250": "f0e0dc05fb555ae5ba9820586bef3bb8a3a82905ece3d8a998a3015fc91c1c3e",
"251": "8c1ece1b350c210380456da2bab70054f717731b0dfb34bc3cf4abfacf696f15",
"252": "ad20a49374f9176bd26460d35f96f30d734df3cf6fc63b4642380b4e866848de",
"253": "ba1a2bbccabbcddbf29ee0b56d0d56b4f026e8a7b97e97de2d48c133ccbdf2a1",
"254": "381a2eac64a984a81671722bd95ca5b8b6508a6f490af46780e9f393c6797223",
"255": "5e6ece13372bad4a6ea011c933389dfaefedad5860aefba2ab97fe5f59156c42",
"256": "068d4a3c845803bf66a9e5320261a0fd7b6292a8230b271a6a83f0dc8c73e907",
"257": "d80ac9215ffa7adacb22711cc88f5b580272d0d65c49e1ea48e69d17e264d91a",
"258": "256c4d399703b7f16dadef9201efc0ef9f6aa6ee05ddfa2d3e26ff6efe09704d",
"259": "275a4e84039a1596ac7e8bbe186163dcfb02bfa99c209653ff5d505a29b4cb10",
"260": "f461ff2df66653be1a2e579b1aea515d4a84f2ae5ebea3aa21fb2477a53699f4",
"261": "178ecd56cd79c7aaec1353093571ce89845130991d64c5a715a02da83a2705ab",
"262": "2e0cb5e8fc8ef04c50a5b9ab9a9eecad446598ebc2527b19c447143e5ae09736",
"263": "c870fd75ed0d5ed92ec35789c522d029f364328a16282a1c5eb9b3b7e121eff3",
"264": "da5d6bdd89eacf70a88810935f80e4725da4feaf2aa86adb13985d7d9e1c247f",
"265": "13f16351c3971c286fae5e9cfbaf6f0a128a6507804fd280971a600019e352e8",
"266": "4f39cdd293598de9259231592e99bfc5fde82a0bc1820a4c5faeb54f96037f00",
"267": "3e054d92034d3d32c3d4e7acadf1c09232e468fc2520d23d2c7d183ec0735aa3",
"268": "2d47c47a2b19178cef9e4eba1a53dd39b5f8657bbe011a71c8d402d294d50132",
"269": "4448f310ab9bff796ca70c7b7d0cd3b9c517f72744a8615112f65ba30a6d61f7",
"270": "ce71f5bd1db540762e4bc6c4798d8b7f3d2b7068e91c300fd271a46298aea2aa",
"271": "5a05e212b9b6ccf6092081f567aa73d27da399d45418f674628a8154f9182b6b",
"272": "a326c2d7121d80861aaf110826615e560aa4efdec0cc9fdfce051c6b9038e781",
"273": "d32b75411f407c5da6a9a6b4a3907b9a9ebbca6b651324c03432d549671bb837",
"274": "b5740ac928d58f53537b05ecc80b7463dc1fd5a53400f57aa55573ecbd5faa56",
"275": "e1c843ff0e97692a180e384c1a9c03c7de06ef92ccad5aa6157fabf0dbe5b804",
"276": "2edf523574e0a062cacf21f51ed6f81128537f27a3cd27b84a8b5d2478d0092d",
"277": "130c990ad499345b7638e57dce365442e2ab2d2571546aae60a9fa6ed3834b8d",
"278": "2204d89df74e664621dfe8892277d50e7774f60613561d70ae06ee0eb4c483d4",
"279": "4618456c7239784964b8fcd27155e01cf5417a16cdca7b163cc885d598ba93f4",
"280": "4b2d9501483d450371ec4413519b0b3461502aabb970fb2b07766d0a3d3a3f85",
"281": "b04a4a02fa0ae20b657dcfe3f80ef84fd446daa1521aabae006b61bb8fa5a7da",
"282": "6dab2ee10b0dc8db525aeaa2f000f3bd35580ba91e71fe92bcd120ad83cf82c5",
"283": "c964c01082a258f4c6bb66a983615686cb4ae363f4d25bd0bdad14cd628cfce8",
"284": "df960dabff27b2404555d6b83aed7a58ef9a887104d85b6d5296f1c379b28494",
"285": "087de77e5f379e7733505f196e483390596050c75dad56a999b1079ea89246ed",
"286": "8f3e5fda508a37403238471d09494dde8c206beadfa0a71381bd8c6ac93abaf4",
"287": "5d834d4c0ca68d0dca107ffe9dbaddac7fc038b0ad6ccc7ba3cfb53920236103",
"288": "20a3ef9e411065c7265deff5e9b4d398cab6f71faa134353ccea35d2b279af04",
"289": "9dda7eb623939f599551ad1d39dbf405211352ae4e65ddd87fe3e31899ca571b",
"290": "a629c35ad526f4a6c0bb42f35f3e3fa1077c34e1898eac658775072730c40d6b",
"291": "81b1e5196bec98afe72f4412cf907a2816515bad0680bd4062f2b2c715643088",
"292": "614950a1cff05f4cf403f55393ed9d7807febbae49522ef83b97e0390038ae03",
"293": "9e4067ac93c6febda554d724d453d78bf3e28a7742cdec57ee47c5c706fbe940",
"294": "9ac900bf0fbb4c3c7e26986ac33698c46c6c3e8102ab75b40b8df94fc3a0c7a1",
"295": "2fdcd631f3c68bef3c90f8679b7aef685fa33f20c2d6eb5965cd2a62267c2ffa",
"296": "dfc947e61ea2138ebe47234ba503cf5246ecec530b12e363acb240822ddf0b34",
"297": "4d5af88ba8a28b49a79773d3e6521e8731ff07d49765203b157481469e6ae6d0",
"298": "94aa77eadafaad7327acb8e653888f00e114cca2fbe04691dabdafa2a0c8cd64",
"299": "0f221ba58a8ea417e13847ef91af9ff029561ac18c74bbeeb3f5509af81a3b03",
"300": "50a79fb6e417fb4a34e155a9af683aa9a74ee922a6c156a58bfedd22cf3185c4",
"301": "eb09a0097a47e7a95b892ad7230475a1a28343b47db4faeb3e47f227aeb04738",
"302": "fcf9736fe8c20a6d02f00e9b1e719de06aff4afa99d2eba166592aeff1b8f3b7",
"303": "e6266f575c94d805a67fcd3f2391d0961b4b121b8a66afbfbae76dfc34e5c06b",
"304": "189bd2a8daf5b638ede7c50035fcf426d125de87a401382f66ab75f35b2ac1f7",
"305": "0ac58c6eb8513f4ffe911bf0f044e0153862ee89c04939fd9b294860a37ec9ce",
"306": "335998d7e2a3fae2da75a5192d62c37dd006be96831fd37e7438ec6d84451c44",
"307": "4f1f2695b1b6b1660f3ef6ac31a81630ca74da9368eafbfb214ec1980705c13c",
"308": "bc5ae127f8690ba7f6e9ddad98a49137acb45abf4e187eaf3648f197c72fbe90",
"309": "6b78ed4c4bfc942b9b5dc340961f19c690d56f9d721b6b764d1db31da53139db",
"310": "0d183ec2ff1cbc768a9eb7eb06c2a354f6c8bab00e64ca2aed2da00825f33c05",
"311": "3ae7fdad095eed78e0c63cfe4e28ab7ba2b977259590ed38722e9c44727e598b",
"312": "329d107b5743a96e3551084832587a6346e410aa89641d83f03f1242a7244473",
"313": "ecc63ee12cbe487e5390007271890b8aa5df2cf48b8e692404895d3c2af20758",
"314": "5fa65495795c52818aea910c24e4d3176c71817f5268c34e1cb259b256737352",
"315": "95bd03b9913be37d24809d30e7bfd31a1b7a127d03d65e52086857bb3a364b5d",
"316": "ca6ec6c9159e10719cd8d2cfcfaf2fe2d3637fb3d497e2c80866de6b593632e6",
"317": "5b0d72d34b406ce20714a59f1c4d5340c5559285e340497dbcad68729a9db786",
"318": "3e2b479fafb86b8ab097588b8fa12ae8a078f8b5801e15c7faa1ef23d87a631b",
"319": "e04b18947b36771937dea491f47b75fedf42a6db684035f5690e6c2bd7e6031e",
"320": "e546e4a4c9020669c78a095aa5c5038242dd78e0f98517c0e23c43aefeb58138",
"321": "3da0198df2f98a7306ee6d2e12b96ba9a6ad837a6c2d4f316d3cd8589b6af308",
"322": "07e511e9002147c33739c924c17a61126d12823d143069535a615a97f86d936f",
"323": "be514911dd6258f860c2773253f6df6c22ca975a10c4e34db5903269f2975faf",
"324": "53ed94369b59a84d003ff3155edbf481a0eef362325539d6ab1a7f370ce919c8",
"325": "43c8dc1907d3e1eb30deb565475ec1ad4f807baf6ef34178508ec85071722f0a",
"326": "b08d72606988ea5a82e0caf15e68d81b4f2e8dbb4af6a22437916f3fc53e3dea",
"327": "f70bb9cb351daf610a91a3c769d84bbb3f3b8f1169b10839196b65b8585e7c38",
"328": "6e26ed661a0add2e583229066d304f7e765a0ea337b6a93bf979e4027b70b94e",
"329": "89d8b56a1e05d90ccde0df482ff2fec3d44270739810f3c5d06856c38d801380",
"330": "2dfac8e04d08dc5eefcbba4e475164103d339f844896a75ef3af2229185118f9",
"331": "a20f9b06c126f4ee65e3f3a0bf345007b35ecb69d035dd0ad848e09300130fcb",
"332": "6593d40f4e3f53a73191c704d388c7cd1639403da6e679c8e4169b26ade19f3f",
"333": "7499bc84f6bd2211365fec34943d64f6be80a53ee2efb21c099c1c910ca29967",
"334": "f24dd99fe5b46bb7a7a30c5eff61e71cab21e05f1b03132d7da9c943f65713f6",
"335": "8e2111c24160d92b1b29dd010b8b3a0a4f9af55f1d30bd5892756c58ffaec201",
"336": "eed2e8d970c1c5031220476e6b700d16e5065d7893a2766a53600825b4ad3ae5",
"337": "44e298d1b55c51c9f127989da1149ccf6bda24c40041f777d35d5b8f192753d2",
"338": "b3a60e80296f79cfdfc02354acc674162faefcb3fb78b9672254c9cfc6eb113f",
"339": "2b55688ba27d72202632783186211ee24ea39c53915066578291fffd9db73128",
"340": "15765221271275022a6ef57634d836b052ffbab6d7d5a6899992972143841e3c",
"341": "f7340563f85e057709a2fcc71bd448fed8d6de6907d8ba5f91fefa2abffda6cf",
"342": "f252eec230c2e92ed1fa04834bc0738b79597c3b0d2a66c787fdd520e63cb3d3",
"343": "1d65c53a04f7eea94ebf76d797c0f79fe3d251bd33e5edc16c780715531b4345",
"344": "86d3fb095439bddbc0d6e6e8e433d54aff04350e2da2ad05f53d607113075c8b",
"345": "21db551743591f9cd20fffcedf3bda17f9f178bc9fbca528a56c2c61b9e7c731",
"346": "f326e2241b7e57320914aa279f9ba2e155ea77f809a188958e0b590bea9c3ada",
"347": "0fb6749b98280cc8c26950a2cb9c9dbecac18f8760e161e9bab887dcb0077653",
"348": "0cdb77330ae73fbbd0f287240f82b7547a0ef42d37004003a9c759f86b686d61",
"349": "690ad38e4357b34368966b9de08d89e0c095246bf55969842f373f1976f86062",
"350": "5b427d47f98e296cb78875619fe67d42f41868b78886d560d8fcac89043fe945",
"351": "93dcda27a0c12f0c32cc35f0de161e7f7792d11abe5d4c50d7fd5192ab8b11c0",
"352": "d01c0cd49e7649289a1f13162757de494bb9104b20ac8bdb30a4180df5225889",
"353": "3d856f38821d7b221aaaa9baa3d7927f6e360919e8f8505d7499f9bbd85c44b8",
"354": "36dd3030dec4a8050d2079678250c9c6c86c66c64fdbe7f5b82e79024bb8d5a7",
"355": "b0a915b700e415ba3acc3ef261128680b921b5df9bd6fb1d35c2d1180e7f61d7",
"356": "a309814f13708f2eb5ee8dd1a3114e8f8b15646b8c797bc7119ceaa3f6911f0e",
"357": "61c9c81a41fd294a8f07033c8373706694faab4df3652d310e84904356cf5e6c",
"358": "7d59500b8883d81040173b88462a73849e0d386a53830d599e6a042f4c1c165f",
"359": "0793805920db4896155cbce40fb58570a3cc952d0c15ee57393fa3c6ca7a8222",
"360": "ee8cacd40fb7515e510cbbe7deb6005369ce7d9800ecff897f3fd8721fd6ef71",
"361": "e96f225fa470174b4ac787b21579ad1556804de85c0c83da99a92ddc2c56c7ac",
"362": "9a4ce079c1a882a306e21e0c145dab75a2698cba3860152f03dafc802ad9006e",
"363": "258a6e6ea10385ca3c0cf08377d13ef31135bd9479d5a4983beadf158e19ccc6",
"364": "13aefde214541fab44d2a3013c532637a3da82199fb6c0a1a941c3108f76b9cf",
"365": "0cd978902035027c6898d6b5fc11fb5931f06f8e8ec9c24b4706143c91de9450",
"366": "47495a92574a6d7b150eb3f4338748ba03672ff93162139f98e03847f96551cb",
"367": "fad9203cd26fccb99f0f89fdc569c230eda46cd72ed3fb7e5f6fbcce78ced1a9",
"368": "a237e13fa6c32b66695b8c8de6472d5c93c7650989f047f62a17438c07036845",
"369": "da4c450ba0c4f76556fce54bc3f6b2a754a626cf1f87ba3280f545e023942640",
"370": "5000899cd3070e1937d42a68766c840bdb9629a49c6112bea5cff52fdb4e9f7a",
"371": "7afb55ee21c0447f7b961265abe7ccf87f59af6206949bb1da19fd36334b09df",
"372": "fcf734716ed1fa724e7228a489304e3c55e813734fb5792a83f806ab04e40485",
"373": "83c98f0431cf944440dfe0a9831275ed451b0d16856aba4100f53170c55c2e6c",
"374": "d998ea6616a5a7a9f7beb3ec02f8cbed4a9c5f17be978c31f32ac0f9f4e4460d",
"375": "6a72aba5c61e27e281235b1f001ab68b840f6e8bef0e6bbd7bfd8eec1abf844e",
"376": "980dce9435a9fc03250df4e809c2f68c48601b64c30d32c6e67bf1faa35fe274",
"377": "7b4a0b6958cf23951636b5e27af8041dd9901256c53de44c9be313ffd0a01ea0",
"378": "a1b13bda78da3ccab1af6c330d3e768fce62841f924933285e7b1f7a8b7dcd5f",
"379": "c957fcbb90e1afe9a342e95608ca035596a7dfd4cef398ada55e05a2462aba14",
"380": "b794fae83475a77832f46e69799419f9881bd774e1bfda56773b587c42591039",
"381": "e7208f3630a20b01a5e1bf5d0537be9dae9fd7529773cac12b96c4ac2b0f8dbf",
"382": "70480c0d26a6d76eba0faf3ee047d6214b2ca4d1442070ae5e79893192ffa699",
"383": "3c814d251089cb2a92a78ec3424b2a729cfbbfc6a996fd48148261312364a9a8",
"384": "f709015ae0f8ad20bd2efd94d01af3858234e381942b5b15391ff7f77211b116",
"385": "0bca6cad1f4ff336b93c9f86c4ac872bda67ee0cd41b1862a7a663852717535d",
"386": "3e1748647b60bbf292aacae65b3608ccce8e55e203a36ff062ee787cd8c14480",
"387": "cf592fa81780e727a56553df32410beba6de9c33543dd1ef1155b368ba9a9b9f",
"388": "911326fcfb0638154f69eabb87e4c0c141df09e56274c8522e9c13b7b577f00f",
"389": "cdd56fb06838a10149f2c7229bbc76f78b4a5a58945fb70a47261f1bf635c404",
"390": "07dde4848eb878808635fb7b366261b1e9cb158635e76577eecc48ccf941323f",
"391": "76cd3def1eea8e2631d333798f4d282bf40f6254b2d18c02c78cb56b33462093",
"392": "c4f7ecf21a8738c3ad0114a1ee6a2d16668e71b499741381f30827ed451dc817",
"393": "7bbc419f89fde57d2862bfb3678ddab96614693dfca109d0f444e4762a2b7a8f",
"394": "7781ca3332d6da18b1b9be5e2eff634b526ae9e8088f6e479b49d657f4f41525",
"395": "5b5de0def2c4a989a54ae3e748362c78cd018778d5adc4dec13c1bde6ffdc139",
"396": "d42c389d6abc7d8102b8cd1b906e4600da08394388d4dcd432ec955e6d8b311d",
"397": "629e23dc358ed2a8c202e1b870e270e401aecc5d726a679b542df8e6becb4200",
"398": "c30114e73097c3fa4efb203915f3b828b1b8c432ddeab2b7e1ba3fe63c50e190",
"399": "a681ef7bdb22145a3e051ecf7bfb694c18b255c80dae6fb8d49f187d28f3c58f",
"400": "c993a792804e09c9f60313f4144953eec072ca6a8a27f44d8718ce53d9429585",
"401": "074b576ae2054cd030ffcfa132b1465f8f49b836f505cd4bb01af4a98f4f5337",
"402": "d45f88fc3c00673ef7e628d867a54a4ea281b3b2620735cea85a8da3b06321df",
"403": "a09086d3cdab7d6ff8a9fba1746c5d236e0ad0abe088be99bb172e80c6f0f8f3",
"404": "55a774ac3423440dda50d73e472887195940d5e9df605b30deeb0f2528b001a4",
"405": "ee9fa61ae8153df7979be3afe6377e584fbdad624833424a5cff64f6ea94c9da",
"406": "584cba4abd5711b8f558fde97620b8ff0fe91586bad052ccff87c49c13f72555",
"407": "ac50b37409f7ea91f90856bbfa716731013deffb5f5b51540a99736e08e5378e",
"408": "2c12c3cf062c3d9cf2c53e6e4dafce70ca5c7a38c97479c3b013cd91076ecf4a",
"409": "5a55b5fb584c359f4b6ee2d21deb62923b0b25e1b4c3da0a6f351079ce657173",
"410": "9e224b6ab0b7f20759b63d1799b426a8652c9e637b1f38d3eaf8beff73c80c67",
"411": "66c0c1ab79e9887b5daf2c510f2c2c4097044b69fee6bd4ffcff73ad4816b8c7",
"412": "27f1768d99e22f8b55d010b8b7acd904e8b66751d5310d32c4d017a0ad34d650",
"413": "7c99634a1161e424a14d60b516291655096eb90ed055326325d7f5de7a44a3e7",
"414": "4e03e038e99870b1faf45a0a29d6124379d05a0a3553a11aaaa91b8ba56eac5f",
"415": "955e433ea745016af2a5df015f1cc223ddd84ddccaee60d5302b7ad61542d9e1",
"416": "8d07a87b9012a166f5bec4dcd646d5957c9b3633a1a37c40c584ede75cb7ad22",
"417": "3f738338cef45597e3b839536953104186f11d94d16877c77abd8a067c152dc3",
"418": "0c813356b30108f89fb37e8774a98af4f9eca3df49e963f985ecea82a88b1437",
"419": "8ea8d93a9e874f8c8ceeb240f1f1245a077a7c0a62287d3044feaf855b5dae78",
"420": "af7ac1e90e07f189afbb284ae24614d9e713e32098bc39bb81d6484d47351444",
"421": "f45e155846624f37cf2ee08be2a63cb1ca35bf795fb0f770b4c91ab549f22b25",
"422": "69d728f7e25055dbebd41684bc6de61be6b4db4119d7ecdcef5b5d8ead976537",
"423": "3e78c62395be704a59a3a6a65e457725105619e0a6f9f3aa6b311c4f7762b0a0",
"424": "fbd6edb36c3754a35e7de936839c4fd0564db873924ba97b35cd43e065835582",
"425": "ee5bb631b2a9edf8ed05781b192f42e24ae748f3aa4ba5e635374c094d28ddac",
"426": "3e913e088a689d2d33bc797040cea94512bf54a61f96501f60576ab22ed0304b",
"427": "415e6da4c7f92da36e2d8c43fa8056d0050ae127e648451e2fada49bf2c936d1",
"428": "389bded7b0c14212fb69b559fd1ade4f5b235b976c9655365c45481c3afda486",
"429": "3007beefa50c509b89b86c54f53757ff701f795dc5f7ed47a1520c2b092455f7",
"430": "59ec8ec2866ca502ad558ade9f8a06a9ff815a1ed649bd1cb513f417f1d4727c",
"431": "d3f28dffa4e22b3bed74c3c2c9ded1e4a8be49d3757368e4e3efaf7f79affb15",
"432": "59fd80dbc8eb4af9596e4ce8a87313d363da41313351a69ab3525faeb905c27e",
"433": "471a7ddde597fbaaaed1941f42ca1fc0f4f047e17f2197f8999dea98b38213f3",
"434": "319cb430c66d9f418aa90a3d6f9c2dfc8171383d6f4af5803a73684afcf18e15",
"435": "aa29c0119ca84133617c8bc7455afdfcf5b05a569393ff21ebcb10d32ffde2c8",
"436": "928f772ad7a9fc501f71cdef6dfe60e2d8cb5d5c5800b519d01afeae0681dd08",
"437": "ea70162a014b8294ede65af6fcdc11fb365ab2b126aef8d47983d58816fd6a54",
"438": "43633662392854b5d9f9f0fa564605212d016c9ea9377d2a6ab52137238d4191",
"439": "42f7e88fab5c9cb31d4bb34403d7958abd5023e9cf9ac05cd29626c5df763584",
"440": "cd08ef4f14b804e3106ee88f9d2b24864d5e2fec6c7cd7dddfa2713e1431375a",
"441": "daa69bac44ce5f57b4b43ab6ece3b2b3561292c0f4c6e82a506ce2973713f749",
"442": "910d2abf184cfd7b1964cec906a79f3e45f59e3d42ec20b22f56de59c9018927",
"443": "7a14ac86724d318e6d40464e710c17625d441d1e7adf83d3062305de2f85d445",
"444": "390877dded07897360921e8d0d126bf45d6a379d47292c90826d775bd1897f2f",
"445": "5ee5723341b0b81c9e0172fcb654f8b24322244bc2d1b55afcb78b180ada180b",
"446": "8b2dcb0168e8701dc9da286489a1e68e43e1b17638e5990edd882196d7fd5a29",
"447": "179af1c75faa5f42e89ce3b41496a64b2d2846361f76dd5d87f4ce97ec2bec07",
"448": "18173b14e0c0bf403b5f0d4aa23515ecf44622b3a860d80e866cd498f107123c",
"449": "22d7739bccf54ea1159ce6aca3e215482deba85a4db0676cf86d82a760c44a6c",
"450": "938bf7cdedab94bd7208b69047014e3d9ab7b54d1223bd649eb3de0bd61ab47e",
"451": "abd88e378f54b649e818d6e1d8e06c9f8cf225ac96b4085523acbb1f0c1df24b",
"452": "4119701c51dd8c457b74a19ed7ae3bdf069f5fd915c8085e9a15d909a39db036",
"453": "381ba093e8ece9e14efc965ee94bb8adbd1c0bf140875ef95f8f11050d5ed489",
"454": "b7613128b0401fdbc07a4015eb3935f6677b84dff936fc5e7e9f424d0ba1006e",
"455": "35ee11c9763f48a68f2b60b4b9c3919d3a895afc7071e8dcac5abd5845dfe79f",
"456": "8b129a3c7163dae86f1f43c18557296240a02bdac70ad29538eb5dce00e51f4d",
"457": "629c99f9af0e962f00b812057c0967861a9b6db9dd652233ac4b37f368d09206",
"458": "02df8a1d11130bde8af932dfc5cafe7d8e6c2fc12b82df5d222a91e4eed8e2f8",
"459": "062b225facc7a897e0e42e6b0f95deeb8b02de64267bf5cea4cb5280ccec1562",
"460": "a05f9a7cb049c40760ea2196eb41df1826ad492e6e5fc4696ce7bfcf7a842811",
"461": "95e5e99da04c0cd73e1818a62be3fc0de98c76d5cbdc81261672824ed5b8c1a7",
"462": "69eafed1b3d4022fc245a8416c1120bdcd039716db8cd43351a96e6c7d10691d",
"463": "018efbd353bb456112cf2c760b4d96aef02aa899ef74d4aadfb3dcf374a22987",
"464": "cd4447e836cdbed7f6a3998b50c4ab467aedaeb8e54c377da34245e90fddbe12",
"465": "da0612471988c89ea2fb190838f9f5e9029fd106330a801e66280c967ff1c52b",
"466": "8d16100c0148ed7bd41003b4a0612cbc5fa150ddabe5f9916ed6eac3fcfdefa4",
"467": "d6ea164cb91d14d6aba2d482926cb6cbd1a3644737a0530abac635083a97b8a4",
"468": "8d0e3f6bff322ff11d1267f1f8303a8ce1e2d796b7dc2d9eb3e3da939dd850b5",
"469": "35e2072f22c7cb980fbe797e30c25e9224328813eb81d07d3c88820492ce9a1f",
"470": "4993f275946ae0d444410821faa3ef4a448f10888c50ff59f7ae01d0b50328d9",
"471": "b9af9323a0237fbf88fdb14b8bce95c084351325249629ffd4fbb32fe9d6da5d",
"472": "5b278c08ab97d82c1779411fb1018b07feac7ddf38a69e4d398240a495c54271",
"473": "4448b03417a784f554c44eb15ad2d4cc022bd9cb5abe2547811eb8085355aaaa",
"474": "1c64fc4076d6b00aff86a180fd9af927b7c1c9ba87a2ca3c83dd80ba5e5ea973",
"475": "e571b4b8218a2961ed2b04f62f816eb18686d82b7f2693694b9c774acef4a0ff",
"476": "a6383ed918d7851ed7503921a64201a032a33c9e1cbd4e08d1233f543bd21be9",
"477": "c871da03e684e099190c4ce787a9588ae85841246ad7bcc9cb4c302d617f881d",
"478": "96d8bec6b787a7aea2da8dfa8a1226e00881afc218c211fc59da830775d55acb",
"479": "b35720df96afbd98c6a4f081ae1173fdce21d63f75f7b455f4c2b9fc0aa672c2",
"480": "2db876e9625c8638c66103ad0206c9a51b68d4c6a3222f403b195a81837856e3",
"481": "bac35824e79af403a2058b08cbc84f8e4df93a21d1766e4ea1de6414e2a8a926",
"482": "0f9797e2f3691bc7291d81d1ddd5d88cb4e10b0be555e2ebfbd3c5b12b7cd2b2",
"483": "8f3348df383ec9ee00e18d41c419370d42ca6ebf71c510690aa5435a679b7e4f",
"484": "3b3bac32669c5b66faaa42b89a2dcb4de0bb9aa0bd279d60061dbe9e7039f5dc",
"485": "25d0335a0576f974617351ef5aec889f311fc8d7cddb997862b10b2496842d4d",
"486": "93b9a59a937594d2196271416ea3b2221d32b3b40a04bbebbdf97e8bdc557e0a",
"487": "a643c75a8d062b87a1c8635fdf439c04d949ce01f75dde10ab6edba90cbaee77",
"488": "984593c12abbff5d009091cd3c1883c87efc535f760727ed12f06df0902bfa75",
"489": "926ac61244f94e10270a2d40169de025be6db342b3de7f0db33a50b07176c143",
"490": "e2c8142e501b0b0b808d2d36f5f38266f99cd3aaca7d2f70f4bba386ae1d2025",
"491": "1a1c8b472424f8057c94a9f5e0c0b673551fbe9ea4cde5ca2d90df1de76a5c76",
"492": "345a83966ead821efa2a9de93aeb0fd5bd60a8f50e162caae2447f1f4d9462bd",
"493": "ee7018d63b08bc7226d6f77c2345a87e09fc7cc87b0a003aaf3a4a3f622edffd",
"494": "3d69e540997d79f21f249d4d8f73cd75119d81bcfb8bd80782863249f0d7c62c",
"495": "b717f1088b0ce24851c30d54bc8dad9f3ae93402b91c874e385e5c699323a5e2",
"496": "fbe77ec1978ad86e73e5a3f494fa7c198fe334b511298f5a0f2d04d6a7f51d01",
"497": "a4a66d6c7c555a2997ca59a8dbab512388adf20902293a5617132a16df76d954",
"498": "d71813b8175fa2d70181d87ae8f839e79792516a1cfa99a7e6b29500c057617f",
"499": "477d5b817df8c0b6f0928d02a58fc39fde2224493cec89393bd6dc349e5235bf",
"500": "3ac8e26d4864c538936efa7c5920435107a50c01306adaee5a4aeaa2ef378f7d",
"501": "766448b05b248ac3d6e991baa3e4b2d53b02aac426bda312c2299b2b983e145e",
"502": "50218b55f5b7207438137f2b0c71e3f6d37afd76aa5b1f2106111f3432b4cef8",
"503": "1d7c24799a287d42e97dd4ccc5bbd3713ce139e6294896cc5fe2efb80a1be7ad",
"504": "9878db5eb2218b18568dc8cfa13bc8363a1c93e6a59a05cc76da0588fd54af46",
"505": "872fc20275833f09c8aaef277abfe77f67be6bd443b489e0cb8bdf9d4ca9fac7",
"506": "d66834cc7ebe58cce2ee1c02bb11ae69672d711ead6a0a58ab592339cddbf02e",
"507": "ae955394665befbbc89e2ba85b5e520cb293b8d03209b1f71d78ce2cc807a437",
"508": "3917ce4173af47bfaf8525f0917736bde3f4bee0ed5fae721c3e2fa957ab1675",
"509": "2f64571cd71f0e59006da84808abf3d3ccff9a38884321533d448b3e8e3cae05",
"510": "41ce72f4701e786427413b68fb70bd77d921c06648ca15033ce1926a9f1224cb",
"511": "c9fc787389265492e60d5503f279714d5b19760ea7b2e1a720e6fc0251fe087c",
"512": "a8af6acf3744af13cde63540e37bb9bc722ea19a012656e3a3c5bfff8292c423",
"513": "506b90816555d1083be7d211f02a5db364e5c2337fc85b1ba845c1a806689373",
"514": "e4e9536766181eda627721723bfbdbca85859a3ba92d439f58ac0009c102430c",
"515": "16daaa62fa87776bc4843d226988cc83ee846ceef7b885ab63e10789b30071ae",
"516": "44b6de4eb51dd8f762142f284b154d3153592549cdea3b94467fa95484a4f172",
"517": "bb72c6d437197a8c1f1132626b3b47adb9827f4f9b912d1069cfcc75575371b5",
"518": "ff57c1f518651af805bb4b258130c7c5b0726422c3390327217562088785b4ba",
"519": "159b59f1261b7a31d7172cdc28d9515d0731e5117cb30f34a497bc3bd0496da2",
"520": "bdfb7f17c8c841c0b61ee7f00e51f09e4c78c90f7977548b72050a7aa12dfa3f",
"521": "bd27ca9292c19160cbb0568f750b247fbb805b85f4a2316fcf2c3a35d3ae031d",
"522": "98e0ef155297aac8a4060d204614753f26f6ba5357deb78c683783dc7ae30191",
"523": "9bfc344c80d1200fe12bea3ba4cacf8d5ac9693258962f2f15f42b30ce8ef3ef",
"524": "8df22d8716d7ca6354ea42b8e522d286ff9362cfa5881f527efcf1a953ed1151",
"525": "13dc6d869fbe2c3d95f715e55f02bc3d5787874b4c88d7da1d05360afd2025fa",
"526": "dfbe442040ce9afba654773fb14f307d67ab614267d3feb6b18df03182b5b60f",
"527": "ba634833af68fcf0ca7bcb08fa699b2c5fab934eb81ecd85e7464b01bca131ca",
"528": "016f7b569dc1c3466c97754c7dcc0f76c2a32a76c22357cc521bcc330d86daf5",
"529": "4960ff863e3d21a58f9e81c3d94075cb7a4daea5fcf396812382111e462fc57f",
"530": "6a2e45fdfcad65e0ee84d206d59cbac998026d7415d16a5c0b8c55e4a7d6bb3f",
"531": "95ec72fa8c409255d43e7c8d4e957bcb9239534973187b3b4cc2557b09bdba98",
"532": "fee6802490757983c499a08831d9bdc75a9eff08700bd29e8e5c134583ee07b3",
"533": "8a056666bd75d853a12d22b8317042a3f5500cfb21f6698d90ab41e01edcf81d",
"534": "8f65c9feb935e09a04c87143d1b2c63e38f08738199ebcc2758f67ee914d8a48",
"535": "ed8970f8ef1e2374289fc735aedff90b010c311a3b80d16df6bca2d3c250fdeb",
"536": "f82635851b442ec0ee95c5c2b7377ba382aa364cc49ff4e981d509ef324bb356",
"537": "54fc97bb6f3d7c724d4e245df37111c20334972300297fe38b590354fb9dfe92",
"538": "650c7f5f382c295cf6e7fb092db6fdfff164c861bcfcfe1fb38a50268f53f50a",
"539": "0bfb3df290912d8a70dc5e1e2761151cdf2c4b75d4b37c8fdcbed7483ada85fd",
"540": "08f1b2bffa88a9d01eecb8c9da6636b5e668a5478d8876a63ec3a74d7f932205",
"541": "5e59cf440336e86b67c17ed61f7bee7e548c434f475c415294b3b652d1aec606",
"542": "1257b6a3ad900df97f5aabc1e18b9f7ddae8c7d7ad60216ae21b5b7310cbda84",
"543": "8a783bfbe11c7f7b24431a15a0eb582f6fe5f75d1d21a3d55f8d8d81ba6b411c",
"544": "ce93bedef94ffbf62ad449cb0c68e8103a0bd005563ab854daa5e470664b4d7b",
"545": "40c253003d601fd2c90908bffcd8133e77489fe247e74ec03901895318fe69de",
"546": "d40739115f18fee96817266232ff1b8845e7966778fdcc644028fe5c759469be",
"547": "fa32a8de8fdcfc551d808c5dd0ff5545a199027acd32e380959b91f3b3d04643",
"548": "72e66168068b6ffcd2988e24124c8b1dba9a5b52a383a937397575e3c1e3f031",
"549": "a23baaa745a976b4f212836beb81a0a7b42d9f2e923c2412e2c07c63ff660ceb",
"550": "f58ff320639b2c47c76ed8aba487e31da0fd4656c3be6e33807cd00f77456e5d",
"551": "0449ec4d6d5b2e88603e62f3ec0287ed711cff682bbdfe298a197144ab24e80b",
"552": "f125761e8a0d02b17b1dc4be40216f2791727fd4e4bc56f60ebea1925c2fbf36",
"553": "dbb93b2a6cbf972bb1f94d1f8656cd113a09a02cbc44f25737e7d75c986646e1",
"554": "dcfd1e7a4a32ff0fae296b8211b5c9e91ab81844a0308933f598c712c1bc313d",
"555": "cebbca914f917f990202f110e77285132d2a5a3ba9a7475c93e3561d8ba88ea0",
"556": "0d5518ef165979b758fcc8df9c8cf536861f376f8640541ba6112ee7610ed82e",
"557": "0547c86b57c7c8c590f6d7a5131778f5b6ab2eeccc5e819e5fd095a6d4e68b08",
"558": "e763aa1dd494e097251484381ddb057c7d79b739c3f8644b1759e786e12f5b40",
"559": "e48eea4c3b4c9d58fe02739accf31bb64dd9c31623ad4cc06c740463d664c098",
"560": "77a09dc1ea6f1ae669004b8c9429dd83ead1148c62e0d945173edac45d9000a4",
"561": "756d226727e611d4bd22aa33747da2f635eeec070906dbc3262ef29e341e2a6d",
"562": "29de450d6e440c528287b98bcb4b76fb5155ab573df4721467446114661936ed",
"563": "7703d943dbbfdccb90acad65ed7c0eb13a10034ad01809472a55eb3162b7e53b",
"564": "65712c105411e6fc0ed35b9347de8cbaea33b0c5e57cf162f48dc257dd4f05b5",
"565": "2945ef4779089c9e49a9a9f5e2a67ba7e393aa20a955ed9302da6677cb03a9cd",
"566": "95d936e1d454df2e1e7d486c43af387b39a50cb57e57c7712d967bc9ec556f41",
"567": "2abe8af9ee20c6b8ad5034bc31fc1f4f16769595d5b4fc2837db3e76a90ac405",
"568": "fdc104338866e50ae2bffc1ea19719136f639df6c25f38a8680a70e9375a9378",
"569": "25677266de2b900788dfa047cb53f5585c37b564b3a711243fad52e186ec184a",
"570": "9101edb48d98c3742ceb713de591d261b79e90481d28f83f2d2c74d7034f4b46",
"571": "c364dde8cce2080d073eb1f9666cca97ccdeba61b2bf19ca0c84987e6f8d3576",
"572": "9cc3049e9464376b95fb88d6fff4331e0e40196f92a0a9aa1c5d10dfe33079f7",
"573": "2ade73491e183608b340f312d08cfd39c10ecb581c87b873443590452580a43e",
"574": "96325b210d18a7a1d6873e00a859648c4754bd4c91c324aa812ed78bd047118b",
"575": "33942a261a9150e2b5ce2ffe5b934a81f3972cf5aa5a9414a9d5f63f6b55324b",
"576": "7fca01a835681914b5fe5014d5649b5170faf459375ccc2bf9ad71ebaa73940c",
"577": "2bdc7a0e8adacf885c6ea0f6534b935b8a9dd338c5dcff05a74c162c3e9dd531",
"578": "ce6b6de1d907c8839b84f5f3967f6af7e9a3644a0bd7dffe80cfe531de08f8ca",
"579": "03dbff2575902a3c56a64483c8e8ca38d9888f72c6a71a6236eb07b808fb24ab",
"580": "96892003c30358ed55a39e13e6159fad09ebc3916e34492b91d63832fa86f731",
"581": "4fc5533c52133e54f8b54dcbfc4555638ae809676dbfec9d1400ab032f30648d",
"582": "7ba9b154acf699c8a123df5471fd40ad556cf6fc630136c686c87b09c88ff546",
"583": "ec10ea6801eadac9ae8ead5f222e0580f419b67d2ad5cd5c8ac914dcf5cfd69f",
"584": "510001c4104c80517a13f967df6ee071f15fb7b65e97229bc91b2925cbe4e93e",
"585": "ced737da53940337c5dff81720024fbaf4cee38aed1d3514d2a75c7b1271acf8",
"586": "9ab074d1d480d718930c9abac8b616a0bc5c30846381d6d9bce1741e9bca1991",
"587": "ec3fdcd8136188e3b476270894351cdc05dc44a4df50d1c4ed727294fb89430f",
"588": "31400607f95129fcc531604b7b0478a748d2495746280dc07ff30e39cd6f4a97",
"589": "3051de9b2a7ced941140aa1074952029f532e133beb41c18bfd990f43bfbd9ae",
"590": "4af295f83800334d77a04d56be7524ff6241e3d8b2f23820c9c54580b7996086",
"591": "2ecf2c1ab8d9e5cef5224842732af17bd2259598e4363e1d46cb172dccc39022",
"592": "2e71a26370d45781f31ede0c7810c2705706ce63291a52d5cd6f060ae16aeb01",
"593": "423867f77b64f725f823204796301ae09b427190cdbb62d472bc1395507da9a2",
"594": "6c28830e35913c59000dfce4432db255f7dd34809285881f05a9e9749f5d8452",
"595": "53fc00ae32e0b0d701175ac17ac0b91e05859ae6d7f3e5bf0548dad36e3d68f9",
"596": "9ccbee33387383d458e7ffa2c9c0cb9e4f5bbe3d1b949463a98232ae67d29956",
"597": "921102754e24e8ba99480e77652d88764020202e6dcd67adddbb1660204e8e78",
"598": "430f975f490ce37df74bc346556cb2186f7a47a58d3b282ab42f35b33a812f7c",
"599": "b603988248769444a1566b058ef3660cac528086b8193efd6d0be4080b834780",
"600": "dd539cd38fade63aa0d14899c7c75ff459ab839148b15b4efacd4bdfa0408dae",
"601": "571c5ade4cd89b460b7d2568a44d1efb05e2927ec840d8ecf149dc9e0ff09734",
"602": "edef32d6c2c7193b4b30a0e2c7d3ab37e0ec21db62543f4bf78169b683792e41",
"603": "cce7491b7ddf0e3ebde191e0e57614e61602cfaa2b52be5c2d657d9ae5e1f1b1",
"604": "d08fe0e5c0fc10640043f9d645446e23fa8efbfdf29c93c87794e5b6405ff51e",
"605": "1bdd74af73e2434db6149fd8089bd294defe3cedfaaf92f532568ddc6c48e2ea",
"606": "30e44b49f18048323d1c1bf4631587df8f0dbd477ebc79b7ef860a792953d932",
"607": "2d9b6a1b4810a39471e5dae85eadf595fc108097eeda746c8925a7be057464de",
"608": "cd3fdc5ee5b6e606349b9e5775d6e632e0424d6190f632632bd7435d5622b20d",
"609": "8b86933e27e64e6840bedc8087fa31326d9527a424c63ecc61823894c81f867d",
"610": "a781fd7cb6970e8f6f679296be5bb0fe7ea62207caa7ce86635257186a5a70d9",
"611": "4a3a0b9877d68deb8d7db624ec2d7f4b1c467fe337f803a220292ac6131acc05",
"612": "6e95bb170c3a521fc7befa446cad879a36b7b3d0e0e8eab1df6ddbd753156ab7",
"613": "afe8c7002c5e15859be829b4b69f0da00c1298971d5afa469b050016fc021978",
"614": "f85495a58ad9d5c4d16167084bbc3581ea22e6dfc39423b70d7fe486e316d951",
"615": "8da9fc3356df220081c71ccfc9c67251e6dd7058fb11258ecfc88ea9b8c00c92",
"616": "0fadf4975e2c27aae12447e080505d604258102f61c8667a5c2594ee033567e8",
"617": "06d9e8723de7ffd20129f1d8b5993926a97cad1261dc0cf01a37d8fa728ee996",
"618": "04d0dc62694f26c61871d8129259540884ad2296a3cf455f6b92fc911f98c336",
"619": "a93d0ec83cbbd4ec0866c97b372e4374a9d6724cc3767f5230e8316734cbb0eb",
"620": "071da5dc1dd87c2558b45247c29a92092bc5a00ef3cd46d70d08e18b791d2926",
"621": "458ca388a6b74c57ae13d1233984d5b66abb1f18dbfa12aa14ba868a9b5a708d",
"622": "1ad0227dc5f8c259ada5120d9db05ac7a013bd1bd84cbbca2f0ae6b174dac129",
"623": "d82d0401e10767b022417dbb64d348bc6c03ed4bb7e4553493e8d9e65525d229",
"624": "1d25005c86a9635d3483ea63ce95fa097f95792ebab86319c12bc66ea1d2ac83",
"625": "3fc397ed884cabc16bf30bb7487c8211424a08279a166d4fa4da6dc151a02cd1",
"626": "7c42e09e504cb269512dae989ee7fffe1f3bfea499c990e8edea796761331ccb",
"627": "5062b75aa39c974a579b0a3360c4da32e481d2242de72106f651c7d7de631cf1",
"628": "dc656eef13928f18d14a9265be6a923bc7d76048b861cdf1523e397801a8ef52",
"629": "9eefedc5b5995658be337f48146e37020db4ed3bb61e2af1fc57f698bd398b0d",
"630": "6e17ecc4a4d07ffbd67c49a59d31b7efeabd3bfead49fdf1ec005836e6030ebf",
"631": "781372694518c122f62566aec8867772e492fefef32c00e24b5604297dc1d44c",
"632": "c978055ae1d71dfdfd8bb4e845bb82fc4211b14560bf6001edefa4367e1d4403",
"633": "af4ff4b546369974642b3f68d4d3e90f0a0496b3b5d1572b638378fb49c7b4fa",
"634": "f6af89331ee087a2fc03e0bddd738e2716b49ed616ceb3b47743cf3806c6d8c2",
"635": "e4251ea6989571d8b83993560b537b7a9d0777ba54e6941757580cbfc14aab5f",
"636": "dc15b5ccabd8fd3141c244b7dbc6fe95078299ea3ce3016cbb483893fcdd4236",
"637": "053571be83ed06ab23a96d4e8fa129a4ce7e740de17dc35b000fb56c35a5ab80",
"638": "df57e1f418a24e38b39011048084c6b5cc91a56c1deb643ab605e0350f329b4b",
"639": "56930902baea90d1a8e505a227e5d7ac4da6b60f6c370ab75a0011cb3746818f",
"640": "c105d171242fa8e35f26491ba2f932d1577dfab2a4a6e75034ae69f062e8aa71",
"641": "0f6c3873a87ce630accf7f3b19feb764aac3fa0c3933042a817a82e6a9963aea",
"642": "c20081830b70a00d1bcc6f4b6572d511d534986c10ea3c057db304a1f26df2da",
"643": "143de023a92c7c8ce5fc0b839644e897267c44c8ba4e715743dc99686415a8b5",
"644": "7a8c9f1db1b1bce9a3b8d91e5b1a39a92a478029d975f5e45d593b7ca81a7134",
"645": "932a51e4c0cc5e30041ca5db1fd0674820638563a9df1300bece7df12c23017e",
"646": "a49cbd2966ea8248816b0a53b6eedea4aef2525aac45272b862d7d52e604625a",
"647": "40ad98735f3b5417ea1916f6146b69b7659963263caed186abf0790de0d9dae9",
"648": "53b288529a83c376f2399e986e5ca25c5993a6640063386fdb2de491afba2e81",
"649": "c0bebda0473186148087feb9828a418ab8d50726a1ff5c39ec69c4a6232c6b67",
"650": "98ac68d2bc42f89dbe97b3392ac691ed6c2c4f36a44665555bf7f816ca97cd27",
"651": "81f8287532f504b4f4a21e6d6ed573845bff197c479fb52e4c5b6f2fc1cfc40f",
"652": "fa32d8e7c1c766a6126b0f1cdd9d752cad55f54d0c05839e89d4da238615d9ed",
"653": "311cec39a42837f803ce8cfa5e6df32cc27fe541de108e3e7cf7ba3242e414ee",
"654": "f2b3d205c2da66cdf9a596e2caa1098132b832758eea2b14da071b8dd9584ec9",
"655": "39517ea688972769cccd46ad15b4f06ac2a6175d053dc97f849fa11a63a163e8",
"656": "2a21e5e89d9b019c1195c50af7c6e1864cbab05068d10e11519fb6d4766ceae5",
"657": "bbf54db41dc18753a3caa5001aae99c0c998e8a07b6e7390932054d7882498e3",
"658": "ca8e7b53b095939e5fafefa56e9b45b40c396145acf2a767f9f2430fbba75a79",
"659": "3d6c8492fbfe1c76e3f9d66485a7447489b89763623127deb6ed327a0c2a011b",
"660": "23d754ebe35981ad5de850f66bb2294a22280a8ad0b4160b1c29dfb5487505d9",
"661": "fd7eaca9690ee0384770e855ed600c96080c5c23565bfdae01c6045a87d9550a",
"662": "b93bc0a52860ee0a1fdc28adeca7b39288b1119e0f318467f0a193236e00f99c",
"663": "f73e3335b21c11b78987deb5a6eace1cef327981322f53a070adfbe31b56e7d0",
"664": "f3f0de955603850bd411690d11a5391e63f515a29e31e9241c66c62d688bcf72",
"665": "f2650e75f39098e5a114077b6e07bc15325adce22e1ab4b20569a4eeda5c6ca6",
"666": "a01a34d1c29aff5618a96046605adb74fa49b834975051d4ac82672567727a21",
"667": "2db51646a4038b38c88512738f79bb21776d39c7bfa3086538cccba0b63024db",
"668": "2f3336b7f1211fcc180cd76dc6442fecb412771aa45ef1a7675aa437d04e582b",
"669": "28bf022d827392eff1ec8ec121767ec24778f1b69da8605b4ab059023b8ad28a",
"670": "38db5dbb2a3ce2d31d1958f5b3ca4c3555eb0ac4193ebffa3f42ffd6bb4806e3",
"671": "0580cf2ef8abd3afbf91fba2032c2d51e43306bebb7f979bb750c3d7bd14c961",
"672": "394f1b74ccfac5a4fa958d813b5932371c5f8c2f3dbd1eb7202af2223aa08afb",
"673": "61c90400cd197b8ea6d7de90fcd1af0959fc37625fe163363fdae0ac4a724bfd",
"674": "6037c38f696b10fb531c26396890cd3b48d5408c5b37e61d03a72ae2f7b64ed6",
"675": "39c8b1bd1d534381b811bd8050e54b753106c1bfaf5d3cc63d8fe92a94471915",
"676": "346d1e2de9915fa2f4ce3675ccebadccb8e9d14239f1e53b6d08d09f5c26297d",
"677": "36841bba8f77d669e9d8f4e09ec580ce2c7a36c37da719815e65cc641eb1fdeb",
"678": "09532ddbaffb710f02964e270f8658bd8a09149744726a82f618708b35a5fa26",
"679": "774f8d6f89a5875342b21e8337aa5e3ab0539960a5b42554bc8d8a0fffce7d65",
"680": "48d62baa62c2a9c561612192ec39a7dbcecc8badadc0ddc755191648597a42f9",
"681": "7adc09dd86f3e73979d9f8a4b3232102ca52bc41d043614fe989cd908ed88c76",
"682": "522f0ff3ae2f1761dca78207dec1c9b52556eba2db7503ca03441abf62f65c76",
"683": "376e3c3e4b88ee76cb0f02390751a7248fcf1562013b1390b1e276a3f3d7da63",
"684": "6363f306f081683781908acd4bedd92b3a75c796243cdacadc4b9896d8cfaaaa",
"685": "29f2c4c5325cf626b392a910e6e22b6d2a989bfbb38439c20162b7b786b5e2f8",
"686": "990ae3583a1f7a32b7581a8ace626511c812e0bd910b8064fefb31e625b9c22d",
"687": "7e78b4b91851b976f5cc2a1341b9389ae7bdd0248ae7f7c53e7ebb2d86bbc73c",
"688": "1ada92e769892b4bb540d75cbf40017a24b5b309b28a097ed62eb7f2727518e7",
"689": "17a0ba5b100d0a92f3f82e2e6f31c71a6ca53a0f043094a6419331e22036150b",
"690": "f9658a8f0687d69f420f655c500304c3c0888f298a68075ab6a2165a3bc47c53",
"691": "3ff8aa53eb2f7e700fdc7cb838ca7f7b495948bb997ef70d196c10592fa64680",
"692": "c01c3e579b2743866cd3d0c1d9039871356143a99c572593d2702f387e9f629f",
"693": "c08e2dd3686459c2989cd6a367d2cc64b2bc2af460417102e9856e91b5f78fa4",
"694": "063e59bfd9cbed08afa508954ac9c1c313b80331d6a917fd2202e15e1eeb00e9",
"695": "c3259eeed96a5837a6630fd9d1245de7c77e10d0733b6129a3dc99548bd92800",
"696": "9ab20a4d8c3c0de897a1c8afa95733d0f7f79870c6379064ef4cf1f5baae67e6",
"697": "62c07adf4da24a20a723f6c32e35a51f2b942e363dc9fa35070e34991a5a9c1d",
"698": "632f1a4eba12f5c80401d82c4bad7c5679f55ccc89bf2da3e3930ff3d6671ba1",
"699": "8c40c5c92fad7ed2774080ddd39f62cdc94ca05dde4273344497ab4206499484",
"700": "3dccee8e873d2c9c2f8359417e666b702f97b60b90b229e3c41190909ff9388b",
"701": "65a57fc7ebcdab77821276a1eba1c1a625bf2bae575b025359de492592ded205",
"702": "c1b0ade78aadbf0d5576489c2200439ef825fe74452115edbc908e9ff955efc0",
"703": "1e5ea7fffdcdbca5fc91694b200db8e2e3737e829b7694e4dcf3b937b41be330",
"704": "9ddf38880f294ac1a759c764c394cacd4635735880f326a0b5e4a896e4fdce8c",
"705": "2bb033d9eeb9157fc6ae835e99b9523bfb1d61173cfb34941cbfdc4c0d3ea67e",
"706": "51a0e8daacbd6537efd583c48c5815a9bd22fef0eb9b8e15dbe2ee87c76e2a6b",
"707": "9f50d3b52dc4ebae279c6f6021258ca8cd60b8cd13e358f29a2879caa390a774",
"708": "42e0a9be7737aaab1fd27543c0273f4c97dd3bd6471e6ec04b1fc7b79542db71",
"709": "ac2605c16873ea2b5f0ce5008089a55e37588f45313ad06ccc7dfd96f407eb8a",
"710": "09214942caed4184e7155b4016b1e0de37c0a142deaebee3879c770438a28276",
"711": "8d8ea19a78bcb10e502f91a057bac1b200ab17db66e11cdf42b63ec65a8e6c18",
"712": "001493340cc232a48125f958308be6d0567ff2684e0625e55af8b0a024c4ccca",
"713": "98a124df4ffa11cca86fbd959f4d091665fc871a4a86cc1024429d1c116b556e",
"714": "cd175b00873a9a3369c628861c1f20df57a4ca75074530ebf5b974d04b8b93c4",
"715": "cdb954d8620ad2d95915f94243cdcf71170cfc363334b2f831544f55f0d15746",
"716": "abb62293fb9df9bc7a6e80ea24f0da1049f894ade937367e24563a3277f953ef",
"717": "319369720bf1831be4c73600c26f5d08dcf6cf85fd32340c28263e39c1dda5e6",
"718": "412ce061b1ae228d2226fdb3bf2cb68421870465d6a8cf7ae58515c02fe54684",
"719": "c461587d4f3a41c375628e94fb9f971cc2829b8608d3c7aca840e62a6c8f1929",
"720": "3651d0d1f023c90e42be5c6ccf28ca71203d1c67d85249323d35db28f146786f",
"721": "8430fc43038ba44efb6e9ecbd5aa3dfeaeaf73f2d04a2d5596855c7de5de9c20",
"722": "9687101dfe209fd65f57a10603baa38ba83c9152e43a8b802b96f1e07f568e0e",
"723": "74832787e7d4e0cb7991256c8f6d02775dffec0684de234786f25f898003f2de",
"724": "fa05e2b497e7eafa64574017a4c45aadef6b163d907b03d63ba3f4021096d329",
"725": "005c873563f51bbebfdb1f8dbc383259e9a98e506bc87ae8d8c9044b81fc6418"
}
| -1 |
TheAlgorithms/Python | 8,551 | New gitter link added or replaced | ### Describe your change:
Fixes #8546
* [ ] Add an algorithm?
* [ ] Fix a bug or typo in an existing algorithm?
* [x] Documentation change?
### Checklist:
* [x] I have read [CONTRIBUTING.md](https://github.com/TheAlgorithms/Python/blob/master/CONTRIBUTING.md).
* [x] This pull request is all my own work -- I have not plagiarized.
* [x] I know that pull requests will not be merged if they fail the automated tests.
* [ ] This PR only changes one algorithm file. To ease review, please open separate PRs for separate algorithms.
* [ ] All new Python files are placed inside an existing directory.
* [ ] All filenames are in all lowercase characters with no spaces or dashes.
* [ ] All functions and variable names follow Python naming conventions.
* [ ] All function parameters and return values are annotated with Python [type hints](https://docs.python.org/3/library/typing.html).
* [ ] All functions have [doctests](https://docs.python.org/3/library/doctest.html) that pass the automated testing.
* [ ] All new algorithms include at least one URL that points to Wikipedia or another similar explanation.
* [x] If this pull request resolves one or more open issues then the commit message contains `Fixes: #{$ISSUE_NO}`.
| KaixLina | "2023-03-26T04:26:40Z" | "2023-03-26T15:19:19Z" | 3f9150c1b2dd15808a4962e03a1455f8d825512c | 7cdb011ba440a07768179bfaea190bddefc890d8 | New gitter link added or replaced. ### Describe your change:
Fixes #8546
* [ ] Add an algorithm?
* [ ] Fix a bug or typo in an existing algorithm?
* [x] Documentation change?
### Checklist:
* [x] I have read [CONTRIBUTING.md](https://github.com/TheAlgorithms/Python/blob/master/CONTRIBUTING.md).
* [x] This pull request is all my own work -- I have not plagiarized.
* [x] I know that pull requests will not be merged if they fail the automated tests.
* [ ] This PR only changes one algorithm file. To ease review, please open separate PRs for separate algorithms.
* [ ] All new Python files are placed inside an existing directory.
* [ ] All filenames are in all lowercase characters with no spaces or dashes.
* [ ] All functions and variable names follow Python naming conventions.
* [ ] All function parameters and return values are annotated with Python [type hints](https://docs.python.org/3/library/typing.html).
* [ ] All functions have [doctests](https://docs.python.org/3/library/doctest.html) that pass the automated testing.
* [ ] All new algorithms include at least one URL that points to Wikipedia or another similar explanation.
* [x] If this pull request resolves one or more open issues then the commit message contains `Fixes: #{$ISSUE_NO}`.
| a b 20
a c 18
a d 22
a e 26
b c 10
b d 11
b e 12
c d 23
c e 24
d e 40
| a b 20
a c 18
a d 22
a e 26
b c 10
b d 11
b e 12
c d 23
c e 24
d e 40
| -1 |
TheAlgorithms/Python | 8,551 | New gitter link added or replaced | ### Describe your change:
Fixes #8546
* [ ] Add an algorithm?
* [ ] Fix a bug or typo in an existing algorithm?
* [x] Documentation change?
### Checklist:
* [x] I have read [CONTRIBUTING.md](https://github.com/TheAlgorithms/Python/blob/master/CONTRIBUTING.md).
* [x] This pull request is all my own work -- I have not plagiarized.
* [x] I know that pull requests will not be merged if they fail the automated tests.
* [ ] This PR only changes one algorithm file. To ease review, please open separate PRs for separate algorithms.
* [ ] All new Python files are placed inside an existing directory.
* [ ] All filenames are in all lowercase characters with no spaces or dashes.
* [ ] All functions and variable names follow Python naming conventions.
* [ ] All function parameters and return values are annotated with Python [type hints](https://docs.python.org/3/library/typing.html).
* [ ] All functions have [doctests](https://docs.python.org/3/library/doctest.html) that pass the automated testing.
* [ ] All new algorithms include at least one URL that points to Wikipedia or another similar explanation.
* [x] If this pull request resolves one or more open issues then the commit message contains `Fixes: #{$ISSUE_NO}`.
| KaixLina | "2023-03-26T04:26:40Z" | "2023-03-26T15:19:19Z" | 3f9150c1b2dd15808a4962e03a1455f8d825512c | 7cdb011ba440a07768179bfaea190bddefc890d8 | New gitter link added or replaced. ### Describe your change:
Fixes #8546
* [ ] Add an algorithm?
* [ ] Fix a bug or typo in an existing algorithm?
* [x] Documentation change?
### Checklist:
* [x] I have read [CONTRIBUTING.md](https://github.com/TheAlgorithms/Python/blob/master/CONTRIBUTING.md).
* [x] This pull request is all my own work -- I have not plagiarized.
* [x] I know that pull requests will not be merged if they fail the automated tests.
* [ ] This PR only changes one algorithm file. To ease review, please open separate PRs for separate algorithms.
* [ ] All new Python files are placed inside an existing directory.
* [ ] All filenames are in all lowercase characters with no spaces or dashes.
* [ ] All functions and variable names follow Python naming conventions.
* [ ] All function parameters and return values are annotated with Python [type hints](https://docs.python.org/3/library/typing.html).
* [ ] All functions have [doctests](https://docs.python.org/3/library/doctest.html) that pass the automated testing.
* [ ] All new algorithms include at least one URL that points to Wikipedia or another similar explanation.
* [x] If this pull request resolves one or more open issues then the commit message contains `Fixes: #{$ISSUE_NO}`.
| """
Project Euler Problem 87: https://projecteuler.net/problem=87
The smallest number expressible as the sum of a prime square, prime cube, and prime
fourth power is 28. In fact, there are exactly four numbers below fifty that can be
expressed in such a way:
28 = 22 + 23 + 24
33 = 32 + 23 + 24
49 = 52 + 23 + 24
47 = 22 + 33 + 24
How many numbers below fifty million can be expressed as the sum of a prime square,
prime cube, and prime fourth power?
"""
def solution(limit: int = 50000000) -> int:
"""
Return the number of integers less than limit which can be expressed as the sum
of a prime square, prime cube, and prime fourth power.
>>> solution(50)
4
"""
ret = set()
prime_square_limit = int((limit - 24) ** (1 / 2))
primes = set(range(3, prime_square_limit + 1, 2))
primes.add(2)
for p in range(3, prime_square_limit + 1, 2):
if p not in primes:
continue
primes.difference_update(set(range(p * p, prime_square_limit + 1, p)))
for prime1 in primes:
square = prime1 * prime1
for prime2 in primes:
cube = prime2 * prime2 * prime2
if square + cube >= limit - 16:
break
for prime3 in primes:
tetr = prime3 * prime3 * prime3 * prime3
total = square + cube + tetr
if total >= limit:
break
ret.add(total)
return len(ret)
if __name__ == "__main__":
print(f"{solution() = }")
| """
Project Euler Problem 87: https://projecteuler.net/problem=87
The smallest number expressible as the sum of a prime square, prime cube, and prime
fourth power is 28. In fact, there are exactly four numbers below fifty that can be
expressed in such a way:
28 = 22 + 23 + 24
33 = 32 + 23 + 24
49 = 52 + 23 + 24
47 = 22 + 33 + 24
How many numbers below fifty million can be expressed as the sum of a prime square,
prime cube, and prime fourth power?
"""
def solution(limit: int = 50000000) -> int:
"""
Return the number of integers less than limit which can be expressed as the sum
of a prime square, prime cube, and prime fourth power.
>>> solution(50)
4
"""
ret = set()
prime_square_limit = int((limit - 24) ** (1 / 2))
primes = set(range(3, prime_square_limit + 1, 2))
primes.add(2)
for p in range(3, prime_square_limit + 1, 2):
if p not in primes:
continue
primes.difference_update(set(range(p * p, prime_square_limit + 1, p)))
for prime1 in primes:
square = prime1 * prime1
for prime2 in primes:
cube = prime2 * prime2 * prime2
if square + cube >= limit - 16:
break
for prime3 in primes:
tetr = prime3 * prime3 * prime3 * prime3
total = square + cube + tetr
if total >= limit:
break
ret.add(total)
return len(ret)
if __name__ == "__main__":
print(f"{solution() = }")
| -1 |
TheAlgorithms/Python | 8,551 | New gitter link added or replaced | ### Describe your change:
Fixes #8546
* [ ] Add an algorithm?
* [ ] Fix a bug or typo in an existing algorithm?
* [x] Documentation change?
### Checklist:
* [x] I have read [CONTRIBUTING.md](https://github.com/TheAlgorithms/Python/blob/master/CONTRIBUTING.md).
* [x] This pull request is all my own work -- I have not plagiarized.
* [x] I know that pull requests will not be merged if they fail the automated tests.
* [ ] This PR only changes one algorithm file. To ease review, please open separate PRs for separate algorithms.
* [ ] All new Python files are placed inside an existing directory.
* [ ] All filenames are in all lowercase characters with no spaces or dashes.
* [ ] All functions and variable names follow Python naming conventions.
* [ ] All function parameters and return values are annotated with Python [type hints](https://docs.python.org/3/library/typing.html).
* [ ] All functions have [doctests](https://docs.python.org/3/library/doctest.html) that pass the automated testing.
* [ ] All new algorithms include at least one URL that points to Wikipedia or another similar explanation.
* [x] If this pull request resolves one or more open issues then the commit message contains `Fixes: #{$ISSUE_NO}`.
| KaixLina | "2023-03-26T04:26:40Z" | "2023-03-26T15:19:19Z" | 3f9150c1b2dd15808a4962e03a1455f8d825512c | 7cdb011ba440a07768179bfaea190bddefc890d8 | New gitter link added or replaced. ### Describe your change:
Fixes #8546
* [ ] Add an algorithm?
* [ ] Fix a bug or typo in an existing algorithm?
* [x] Documentation change?
### Checklist:
* [x] I have read [CONTRIBUTING.md](https://github.com/TheAlgorithms/Python/blob/master/CONTRIBUTING.md).
* [x] This pull request is all my own work -- I have not plagiarized.
* [x] I know that pull requests will not be merged if they fail the automated tests.
* [ ] This PR only changes one algorithm file. To ease review, please open separate PRs for separate algorithms.
* [ ] All new Python files are placed inside an existing directory.
* [ ] All filenames are in all lowercase characters with no spaces or dashes.
* [ ] All functions and variable names follow Python naming conventions.
* [ ] All function parameters and return values are annotated with Python [type hints](https://docs.python.org/3/library/typing.html).
* [ ] All functions have [doctests](https://docs.python.org/3/library/doctest.html) that pass the automated testing.
* [ ] All new algorithms include at least one URL that points to Wikipedia or another similar explanation.
* [x] If this pull request resolves one or more open issues then the commit message contains `Fixes: #{$ISSUE_NO}`.
| """
Project Euler Problem 5: https://projecteuler.net/problem=5
Smallest multiple
2520 is the smallest number that can be divided by each of the numbers
from 1 to 10 without any remainder.
What is the smallest positive number that is _evenly divisible_ by all
of the numbers from 1 to 20?
References:
- https://en.wiktionary.org/wiki/evenly_divisible
- https://en.wikipedia.org/wiki/Euclidean_algorithm
- https://en.wikipedia.org/wiki/Least_common_multiple
"""
def greatest_common_divisor(x: int, y: int) -> int:
"""
Euclidean Greatest Common Divisor algorithm
>>> greatest_common_divisor(0, 0)
0
>>> greatest_common_divisor(23, 42)
1
>>> greatest_common_divisor(15, 33)
3
>>> greatest_common_divisor(12345, 67890)
15
"""
return x if y == 0 else greatest_common_divisor(y, x % y)
def lcm(x: int, y: int) -> int:
"""
Least Common Multiple.
Using the property that lcm(a, b) * greatest_common_divisor(a, b) = a*b
>>> lcm(3, 15)
15
>>> lcm(1, 27)
27
>>> lcm(13, 27)
351
>>> lcm(64, 48)
192
"""
return (x * y) // greatest_common_divisor(x, y)
def solution(n: int = 20) -> int:
"""
Returns the smallest positive number that is evenly divisible (divisible
with no remainder) by all of the numbers from 1 to n.
>>> solution(10)
2520
>>> solution(15)
360360
>>> solution(22)
232792560
"""
g = 1
for i in range(1, n + 1):
g = lcm(g, i)
return g
if __name__ == "__main__":
print(f"{solution() = }")
| """
Project Euler Problem 5: https://projecteuler.net/problem=5
Smallest multiple
2520 is the smallest number that can be divided by each of the numbers
from 1 to 10 without any remainder.
What is the smallest positive number that is _evenly divisible_ by all
of the numbers from 1 to 20?
References:
- https://en.wiktionary.org/wiki/evenly_divisible
- https://en.wikipedia.org/wiki/Euclidean_algorithm
- https://en.wikipedia.org/wiki/Least_common_multiple
"""
def greatest_common_divisor(x: int, y: int) -> int:
"""
Euclidean Greatest Common Divisor algorithm
>>> greatest_common_divisor(0, 0)
0
>>> greatest_common_divisor(23, 42)
1
>>> greatest_common_divisor(15, 33)
3
>>> greatest_common_divisor(12345, 67890)
15
"""
return x if y == 0 else greatest_common_divisor(y, x % y)
def lcm(x: int, y: int) -> int:
"""
Least Common Multiple.
Using the property that lcm(a, b) * greatest_common_divisor(a, b) = a*b
>>> lcm(3, 15)
15
>>> lcm(1, 27)
27
>>> lcm(13, 27)
351
>>> lcm(64, 48)
192
"""
return (x * y) // greatest_common_divisor(x, y)
def solution(n: int = 20) -> int:
"""
Returns the smallest positive number that is evenly divisible (divisible
with no remainder) by all of the numbers from 1 to n.
>>> solution(10)
2520
>>> solution(15)
360360
>>> solution(22)
232792560
"""
g = 1
for i in range(1, n + 1):
g = lcm(g, i)
return g
if __name__ == "__main__":
print(f"{solution() = }")
| -1 |
TheAlgorithms/Python | 8,551 | New gitter link added or replaced | ### Describe your change:
Fixes #8546
* [ ] Add an algorithm?
* [ ] Fix a bug or typo in an existing algorithm?
* [x] Documentation change?
### Checklist:
* [x] I have read [CONTRIBUTING.md](https://github.com/TheAlgorithms/Python/blob/master/CONTRIBUTING.md).
* [x] This pull request is all my own work -- I have not plagiarized.
* [x] I know that pull requests will not be merged if they fail the automated tests.
* [ ] This PR only changes one algorithm file. To ease review, please open separate PRs for separate algorithms.
* [ ] All new Python files are placed inside an existing directory.
* [ ] All filenames are in all lowercase characters with no spaces or dashes.
* [ ] All functions and variable names follow Python naming conventions.
* [ ] All function parameters and return values are annotated with Python [type hints](https://docs.python.org/3/library/typing.html).
* [ ] All functions have [doctests](https://docs.python.org/3/library/doctest.html) that pass the automated testing.
* [ ] All new algorithms include at least one URL that points to Wikipedia or another similar explanation.
* [x] If this pull request resolves one or more open issues then the commit message contains `Fixes: #{$ISSUE_NO}`.
| KaixLina | "2023-03-26T04:26:40Z" | "2023-03-26T15:19:19Z" | 3f9150c1b2dd15808a4962e03a1455f8d825512c | 7cdb011ba440a07768179bfaea190bddefc890d8 | New gitter link added or replaced. ### Describe your change:
Fixes #8546
* [ ] Add an algorithm?
* [ ] Fix a bug or typo in an existing algorithm?
* [x] Documentation change?
### Checklist:
* [x] I have read [CONTRIBUTING.md](https://github.com/TheAlgorithms/Python/blob/master/CONTRIBUTING.md).
* [x] This pull request is all my own work -- I have not plagiarized.
* [x] I know that pull requests will not be merged if they fail the automated tests.
* [ ] This PR only changes one algorithm file. To ease review, please open separate PRs for separate algorithms.
* [ ] All new Python files are placed inside an existing directory.
* [ ] All filenames are in all lowercase characters with no spaces or dashes.
* [ ] All functions and variable names follow Python naming conventions.
* [ ] All function parameters and return values are annotated with Python [type hints](https://docs.python.org/3/library/typing.html).
* [ ] All functions have [doctests](https://docs.python.org/3/library/doctest.html) that pass the automated testing.
* [ ] All new algorithms include at least one URL that points to Wikipedia or another similar explanation.
* [x] If this pull request resolves one or more open issues then the commit message contains `Fixes: #{$ISSUE_NO}`.
| """ Convert between different units of temperature """
def celsius_to_fahrenheit(celsius: float, ndigits: int = 2) -> float:
"""
Convert a given value from Celsius to Fahrenheit and round it to 2 decimal places.
Wikipedia reference: https://en.wikipedia.org/wiki/Celsius
Wikipedia reference: https://en.wikipedia.org/wiki/Fahrenheit
>>> celsius_to_fahrenheit(273.354, 3)
524.037
>>> celsius_to_fahrenheit(273.354, 0)
524.0
>>> celsius_to_fahrenheit(-40.0)
-40.0
>>> celsius_to_fahrenheit(-20.0)
-4.0
>>> celsius_to_fahrenheit(0)
32.0
>>> celsius_to_fahrenheit(20)
68.0
>>> celsius_to_fahrenheit("40")
104.0
>>> celsius_to_fahrenheit("celsius")
Traceback (most recent call last):
...
ValueError: could not convert string to float: 'celsius'
"""
return round((float(celsius) * 9 / 5) + 32, ndigits)
def celsius_to_kelvin(celsius: float, ndigits: int = 2) -> float:
"""
Convert a given value from Celsius to Kelvin and round it to 2 decimal places.
Wikipedia reference: https://en.wikipedia.org/wiki/Celsius
Wikipedia reference: https://en.wikipedia.org/wiki/Kelvin
>>> celsius_to_kelvin(273.354, 3)
546.504
>>> celsius_to_kelvin(273.354, 0)
547.0
>>> celsius_to_kelvin(0)
273.15
>>> celsius_to_kelvin(20.0)
293.15
>>> celsius_to_kelvin("40")
313.15
>>> celsius_to_kelvin("celsius")
Traceback (most recent call last):
...
ValueError: could not convert string to float: 'celsius'
"""
return round(float(celsius) + 273.15, ndigits)
def celsius_to_rankine(celsius: float, ndigits: int = 2) -> float:
"""
Convert a given value from Celsius to Rankine and round it to 2 decimal places.
Wikipedia reference: https://en.wikipedia.org/wiki/Celsius
Wikipedia reference: https://en.wikipedia.org/wiki/Rankine_scale
>>> celsius_to_rankine(273.354, 3)
983.707
>>> celsius_to_rankine(273.354, 0)
984.0
>>> celsius_to_rankine(0)
491.67
>>> celsius_to_rankine(20.0)
527.67
>>> celsius_to_rankine("40")
563.67
>>> celsius_to_rankine("celsius")
Traceback (most recent call last):
...
ValueError: could not convert string to float: 'celsius'
"""
return round((float(celsius) * 9 / 5) + 491.67, ndigits)
def fahrenheit_to_celsius(fahrenheit: float, ndigits: int = 2) -> float:
"""
Convert a given value from Fahrenheit to Celsius and round it to 2 decimal places.
Wikipedia reference: https://en.wikipedia.org/wiki/Fahrenheit
Wikipedia reference: https://en.wikipedia.org/wiki/Celsius
>>> fahrenheit_to_celsius(273.354, 3)
134.086
>>> fahrenheit_to_celsius(273.354, 0)
134.0
>>> fahrenheit_to_celsius(0)
-17.78
>>> fahrenheit_to_celsius(20.0)
-6.67
>>> fahrenheit_to_celsius(40.0)
4.44
>>> fahrenheit_to_celsius(60)
15.56
>>> fahrenheit_to_celsius(80)
26.67
>>> fahrenheit_to_celsius("100")
37.78
>>> fahrenheit_to_celsius("fahrenheit")
Traceback (most recent call last):
...
ValueError: could not convert string to float: 'fahrenheit'
"""
return round((float(fahrenheit) - 32) * 5 / 9, ndigits)
def fahrenheit_to_kelvin(fahrenheit: float, ndigits: int = 2) -> float:
"""
Convert a given value from Fahrenheit to Kelvin and round it to 2 decimal places.
Wikipedia reference: https://en.wikipedia.org/wiki/Fahrenheit
Wikipedia reference: https://en.wikipedia.org/wiki/Kelvin
>>> fahrenheit_to_kelvin(273.354, 3)
407.236
>>> fahrenheit_to_kelvin(273.354, 0)
407.0
>>> fahrenheit_to_kelvin(0)
255.37
>>> fahrenheit_to_kelvin(20.0)
266.48
>>> fahrenheit_to_kelvin(40.0)
277.59
>>> fahrenheit_to_kelvin(60)
288.71
>>> fahrenheit_to_kelvin(80)
299.82
>>> fahrenheit_to_kelvin("100")
310.93
>>> fahrenheit_to_kelvin("fahrenheit")
Traceback (most recent call last):
...
ValueError: could not convert string to float: 'fahrenheit'
"""
return round(((float(fahrenheit) - 32) * 5 / 9) + 273.15, ndigits)
def fahrenheit_to_rankine(fahrenheit: float, ndigits: int = 2) -> float:
"""
Convert a given value from Fahrenheit to Rankine and round it to 2 decimal places.
Wikipedia reference: https://en.wikipedia.org/wiki/Fahrenheit
Wikipedia reference: https://en.wikipedia.org/wiki/Rankine_scale
>>> fahrenheit_to_rankine(273.354, 3)
733.024
>>> fahrenheit_to_rankine(273.354, 0)
733.0
>>> fahrenheit_to_rankine(0)
459.67
>>> fahrenheit_to_rankine(20.0)
479.67
>>> fahrenheit_to_rankine(40.0)
499.67
>>> fahrenheit_to_rankine(60)
519.67
>>> fahrenheit_to_rankine(80)
539.67
>>> fahrenheit_to_rankine("100")
559.67
>>> fahrenheit_to_rankine("fahrenheit")
Traceback (most recent call last):
...
ValueError: could not convert string to float: 'fahrenheit'
"""
return round(float(fahrenheit) + 459.67, ndigits)
def kelvin_to_celsius(kelvin: float, ndigits: int = 2) -> float:
"""
Convert a given value from Kelvin to Celsius and round it to 2 decimal places.
Wikipedia reference: https://en.wikipedia.org/wiki/Kelvin
Wikipedia reference: https://en.wikipedia.org/wiki/Celsius
>>> kelvin_to_celsius(273.354, 3)
0.204
>>> kelvin_to_celsius(273.354, 0)
0.0
>>> kelvin_to_celsius(273.15)
0.0
>>> kelvin_to_celsius(300)
26.85
>>> kelvin_to_celsius("315.5")
42.35
>>> kelvin_to_celsius("kelvin")
Traceback (most recent call last):
...
ValueError: could not convert string to float: 'kelvin'
"""
return round(float(kelvin) - 273.15, ndigits)
def kelvin_to_fahrenheit(kelvin: float, ndigits: int = 2) -> float:
"""
Convert a given value from Kelvin to Fahrenheit and round it to 2 decimal places.
Wikipedia reference: https://en.wikipedia.org/wiki/Kelvin
Wikipedia reference: https://en.wikipedia.org/wiki/Fahrenheit
>>> kelvin_to_fahrenheit(273.354, 3)
32.367
>>> kelvin_to_fahrenheit(273.354, 0)
32.0
>>> kelvin_to_fahrenheit(273.15)
32.0
>>> kelvin_to_fahrenheit(300)
80.33
>>> kelvin_to_fahrenheit("315.5")
108.23
>>> kelvin_to_fahrenheit("kelvin")
Traceback (most recent call last):
...
ValueError: could not convert string to float: 'kelvin'
"""
return round(((float(kelvin) - 273.15) * 9 / 5) + 32, ndigits)
def kelvin_to_rankine(kelvin: float, ndigits: int = 2) -> float:
"""
Convert a given value from Kelvin to Rankine and round it to 2 decimal places.
Wikipedia reference: https://en.wikipedia.org/wiki/Kelvin
Wikipedia reference: https://en.wikipedia.org/wiki/Rankine_scale
>>> kelvin_to_rankine(273.354, 3)
492.037
>>> kelvin_to_rankine(273.354, 0)
492.0
>>> kelvin_to_rankine(0)
0.0
>>> kelvin_to_rankine(20.0)
36.0
>>> kelvin_to_rankine("40")
72.0
>>> kelvin_to_rankine("kelvin")
Traceback (most recent call last):
...
ValueError: could not convert string to float: 'kelvin'
"""
return round((float(kelvin) * 9 / 5), ndigits)
def rankine_to_celsius(rankine: float, ndigits: int = 2) -> float:
"""
Convert a given value from Rankine to Celsius and round it to 2 decimal places.
Wikipedia reference: https://en.wikipedia.org/wiki/Rankine_scale
Wikipedia reference: https://en.wikipedia.org/wiki/Celsius
>>> rankine_to_celsius(273.354, 3)
-121.287
>>> rankine_to_celsius(273.354, 0)
-121.0
>>> rankine_to_celsius(273.15)
-121.4
>>> rankine_to_celsius(300)
-106.48
>>> rankine_to_celsius("315.5")
-97.87
>>> rankine_to_celsius("rankine")
Traceback (most recent call last):
...
ValueError: could not convert string to float: 'rankine'
"""
return round((float(rankine) - 491.67) * 5 / 9, ndigits)
def rankine_to_fahrenheit(rankine: float, ndigits: int = 2) -> float:
"""
Convert a given value from Rankine to Fahrenheit and round it to 2 decimal places.
Wikipedia reference: https://en.wikipedia.org/wiki/Rankine_scale
Wikipedia reference: https://en.wikipedia.org/wiki/Fahrenheit
>>> rankine_to_fahrenheit(273.15)
-186.52
>>> rankine_to_fahrenheit(300)
-159.67
>>> rankine_to_fahrenheit("315.5")
-144.17
>>> rankine_to_fahrenheit("rankine")
Traceback (most recent call last):
...
ValueError: could not convert string to float: 'rankine'
"""
return round(float(rankine) - 459.67, ndigits)
def rankine_to_kelvin(rankine: float, ndigits: int = 2) -> float:
"""
Convert a given value from Rankine to Kelvin and round it to 2 decimal places.
Wikipedia reference: https://en.wikipedia.org/wiki/Rankine_scale
Wikipedia reference: https://en.wikipedia.org/wiki/Kelvin
>>> rankine_to_kelvin(0)
0.0
>>> rankine_to_kelvin(20.0)
11.11
>>> rankine_to_kelvin("40")
22.22
>>> rankine_to_kelvin("rankine")
Traceback (most recent call last):
...
ValueError: could not convert string to float: 'rankine'
"""
return round((float(rankine) * 5 / 9), ndigits)
def reaumur_to_kelvin(reaumur: float, ndigits: int = 2) -> float:
"""
Convert a given value from reaumur to Kelvin and round it to 2 decimal places.
Reference:- http://www.csgnetwork.com/temp2conv.html
>>> reaumur_to_kelvin(0)
273.15
>>> reaumur_to_kelvin(20.0)
298.15
>>> reaumur_to_kelvin(40)
323.15
>>> reaumur_to_kelvin("reaumur")
Traceback (most recent call last):
...
ValueError: could not convert string to float: 'reaumur'
"""
return round((float(reaumur) * 1.25 + 273.15), ndigits)
def reaumur_to_fahrenheit(reaumur: float, ndigits: int = 2) -> float:
"""
Convert a given value from reaumur to fahrenheit and round it to 2 decimal places.
Reference:- http://www.csgnetwork.com/temp2conv.html
>>> reaumur_to_fahrenheit(0)
32.0
>>> reaumur_to_fahrenheit(20.0)
77.0
>>> reaumur_to_fahrenheit(40)
122.0
>>> reaumur_to_fahrenheit("reaumur")
Traceback (most recent call last):
...
ValueError: could not convert string to float: 'reaumur'
"""
return round((float(reaumur) * 2.25 + 32), ndigits)
def reaumur_to_celsius(reaumur: float, ndigits: int = 2) -> float:
"""
Convert a given value from reaumur to celsius and round it to 2 decimal places.
Reference:- http://www.csgnetwork.com/temp2conv.html
>>> reaumur_to_celsius(0)
0.0
>>> reaumur_to_celsius(20.0)
25.0
>>> reaumur_to_celsius(40)
50.0
>>> reaumur_to_celsius("reaumur")
Traceback (most recent call last):
...
ValueError: could not convert string to float: 'reaumur'
"""
return round((float(reaumur) * 1.25), ndigits)
def reaumur_to_rankine(reaumur: float, ndigits: int = 2) -> float:
"""
Convert a given value from reaumur to rankine and round it to 2 decimal places.
Reference:- http://www.csgnetwork.com/temp2conv.html
>>> reaumur_to_rankine(0)
491.67
>>> reaumur_to_rankine(20.0)
536.67
>>> reaumur_to_rankine(40)
581.67
>>> reaumur_to_rankine("reaumur")
Traceback (most recent call last):
...
ValueError: could not convert string to float: 'reaumur'
"""
return round((float(reaumur) * 2.25 + 32 + 459.67), ndigits)
if __name__ == "__main__":
import doctest
doctest.testmod()
| """ Convert between different units of temperature """
def celsius_to_fahrenheit(celsius: float, ndigits: int = 2) -> float:
"""
Convert a given value from Celsius to Fahrenheit and round it to 2 decimal places.
Wikipedia reference: https://en.wikipedia.org/wiki/Celsius
Wikipedia reference: https://en.wikipedia.org/wiki/Fahrenheit
>>> celsius_to_fahrenheit(273.354, 3)
524.037
>>> celsius_to_fahrenheit(273.354, 0)
524.0
>>> celsius_to_fahrenheit(-40.0)
-40.0
>>> celsius_to_fahrenheit(-20.0)
-4.0
>>> celsius_to_fahrenheit(0)
32.0
>>> celsius_to_fahrenheit(20)
68.0
>>> celsius_to_fahrenheit("40")
104.0
>>> celsius_to_fahrenheit("celsius")
Traceback (most recent call last):
...
ValueError: could not convert string to float: 'celsius'
"""
return round((float(celsius) * 9 / 5) + 32, ndigits)
def celsius_to_kelvin(celsius: float, ndigits: int = 2) -> float:
"""
Convert a given value from Celsius to Kelvin and round it to 2 decimal places.
Wikipedia reference: https://en.wikipedia.org/wiki/Celsius
Wikipedia reference: https://en.wikipedia.org/wiki/Kelvin
>>> celsius_to_kelvin(273.354, 3)
546.504
>>> celsius_to_kelvin(273.354, 0)
547.0
>>> celsius_to_kelvin(0)
273.15
>>> celsius_to_kelvin(20.0)
293.15
>>> celsius_to_kelvin("40")
313.15
>>> celsius_to_kelvin("celsius")
Traceback (most recent call last):
...
ValueError: could not convert string to float: 'celsius'
"""
return round(float(celsius) + 273.15, ndigits)
def celsius_to_rankine(celsius: float, ndigits: int = 2) -> float:
"""
Convert a given value from Celsius to Rankine and round it to 2 decimal places.
Wikipedia reference: https://en.wikipedia.org/wiki/Celsius
Wikipedia reference: https://en.wikipedia.org/wiki/Rankine_scale
>>> celsius_to_rankine(273.354, 3)
983.707
>>> celsius_to_rankine(273.354, 0)
984.0
>>> celsius_to_rankine(0)
491.67
>>> celsius_to_rankine(20.0)
527.67
>>> celsius_to_rankine("40")
563.67
>>> celsius_to_rankine("celsius")
Traceback (most recent call last):
...
ValueError: could not convert string to float: 'celsius'
"""
return round((float(celsius) * 9 / 5) + 491.67, ndigits)
def fahrenheit_to_celsius(fahrenheit: float, ndigits: int = 2) -> float:
"""
Convert a given value from Fahrenheit to Celsius and round it to 2 decimal places.
Wikipedia reference: https://en.wikipedia.org/wiki/Fahrenheit
Wikipedia reference: https://en.wikipedia.org/wiki/Celsius
>>> fahrenheit_to_celsius(273.354, 3)
134.086
>>> fahrenheit_to_celsius(273.354, 0)
134.0
>>> fahrenheit_to_celsius(0)
-17.78
>>> fahrenheit_to_celsius(20.0)
-6.67
>>> fahrenheit_to_celsius(40.0)
4.44
>>> fahrenheit_to_celsius(60)
15.56
>>> fahrenheit_to_celsius(80)
26.67
>>> fahrenheit_to_celsius("100")
37.78
>>> fahrenheit_to_celsius("fahrenheit")
Traceback (most recent call last):
...
ValueError: could not convert string to float: 'fahrenheit'
"""
return round((float(fahrenheit) - 32) * 5 / 9, ndigits)
def fahrenheit_to_kelvin(fahrenheit: float, ndigits: int = 2) -> float:
"""
Convert a given value from Fahrenheit to Kelvin and round it to 2 decimal places.
Wikipedia reference: https://en.wikipedia.org/wiki/Fahrenheit
Wikipedia reference: https://en.wikipedia.org/wiki/Kelvin
>>> fahrenheit_to_kelvin(273.354, 3)
407.236
>>> fahrenheit_to_kelvin(273.354, 0)
407.0
>>> fahrenheit_to_kelvin(0)
255.37
>>> fahrenheit_to_kelvin(20.0)
266.48
>>> fahrenheit_to_kelvin(40.0)
277.59
>>> fahrenheit_to_kelvin(60)
288.71
>>> fahrenheit_to_kelvin(80)
299.82
>>> fahrenheit_to_kelvin("100")
310.93
>>> fahrenheit_to_kelvin("fahrenheit")
Traceback (most recent call last):
...
ValueError: could not convert string to float: 'fahrenheit'
"""
return round(((float(fahrenheit) - 32) * 5 / 9) + 273.15, ndigits)
def fahrenheit_to_rankine(fahrenheit: float, ndigits: int = 2) -> float:
"""
Convert a given value from Fahrenheit to Rankine and round it to 2 decimal places.
Wikipedia reference: https://en.wikipedia.org/wiki/Fahrenheit
Wikipedia reference: https://en.wikipedia.org/wiki/Rankine_scale
>>> fahrenheit_to_rankine(273.354, 3)
733.024
>>> fahrenheit_to_rankine(273.354, 0)
733.0
>>> fahrenheit_to_rankine(0)
459.67
>>> fahrenheit_to_rankine(20.0)
479.67
>>> fahrenheit_to_rankine(40.0)
499.67
>>> fahrenheit_to_rankine(60)
519.67
>>> fahrenheit_to_rankine(80)
539.67
>>> fahrenheit_to_rankine("100")
559.67
>>> fahrenheit_to_rankine("fahrenheit")
Traceback (most recent call last):
...
ValueError: could not convert string to float: 'fahrenheit'
"""
return round(float(fahrenheit) + 459.67, ndigits)
def kelvin_to_celsius(kelvin: float, ndigits: int = 2) -> float:
"""
Convert a given value from Kelvin to Celsius and round it to 2 decimal places.
Wikipedia reference: https://en.wikipedia.org/wiki/Kelvin
Wikipedia reference: https://en.wikipedia.org/wiki/Celsius
>>> kelvin_to_celsius(273.354, 3)
0.204
>>> kelvin_to_celsius(273.354, 0)
0.0
>>> kelvin_to_celsius(273.15)
0.0
>>> kelvin_to_celsius(300)
26.85
>>> kelvin_to_celsius("315.5")
42.35
>>> kelvin_to_celsius("kelvin")
Traceback (most recent call last):
...
ValueError: could not convert string to float: 'kelvin'
"""
return round(float(kelvin) - 273.15, ndigits)
def kelvin_to_fahrenheit(kelvin: float, ndigits: int = 2) -> float:
"""
Convert a given value from Kelvin to Fahrenheit and round it to 2 decimal places.
Wikipedia reference: https://en.wikipedia.org/wiki/Kelvin
Wikipedia reference: https://en.wikipedia.org/wiki/Fahrenheit
>>> kelvin_to_fahrenheit(273.354, 3)
32.367
>>> kelvin_to_fahrenheit(273.354, 0)
32.0
>>> kelvin_to_fahrenheit(273.15)
32.0
>>> kelvin_to_fahrenheit(300)
80.33
>>> kelvin_to_fahrenheit("315.5")
108.23
>>> kelvin_to_fahrenheit("kelvin")
Traceback (most recent call last):
...
ValueError: could not convert string to float: 'kelvin'
"""
return round(((float(kelvin) - 273.15) * 9 / 5) + 32, ndigits)
def kelvin_to_rankine(kelvin: float, ndigits: int = 2) -> float:
"""
Convert a given value from Kelvin to Rankine and round it to 2 decimal places.
Wikipedia reference: https://en.wikipedia.org/wiki/Kelvin
Wikipedia reference: https://en.wikipedia.org/wiki/Rankine_scale
>>> kelvin_to_rankine(273.354, 3)
492.037
>>> kelvin_to_rankine(273.354, 0)
492.0
>>> kelvin_to_rankine(0)
0.0
>>> kelvin_to_rankine(20.0)
36.0
>>> kelvin_to_rankine("40")
72.0
>>> kelvin_to_rankine("kelvin")
Traceback (most recent call last):
...
ValueError: could not convert string to float: 'kelvin'
"""
return round((float(kelvin) * 9 / 5), ndigits)
def rankine_to_celsius(rankine: float, ndigits: int = 2) -> float:
"""
Convert a given value from Rankine to Celsius and round it to 2 decimal places.
Wikipedia reference: https://en.wikipedia.org/wiki/Rankine_scale
Wikipedia reference: https://en.wikipedia.org/wiki/Celsius
>>> rankine_to_celsius(273.354, 3)
-121.287
>>> rankine_to_celsius(273.354, 0)
-121.0
>>> rankine_to_celsius(273.15)
-121.4
>>> rankine_to_celsius(300)
-106.48
>>> rankine_to_celsius("315.5")
-97.87
>>> rankine_to_celsius("rankine")
Traceback (most recent call last):
...
ValueError: could not convert string to float: 'rankine'
"""
return round((float(rankine) - 491.67) * 5 / 9, ndigits)
def rankine_to_fahrenheit(rankine: float, ndigits: int = 2) -> float:
"""
Convert a given value from Rankine to Fahrenheit and round it to 2 decimal places.
Wikipedia reference: https://en.wikipedia.org/wiki/Rankine_scale
Wikipedia reference: https://en.wikipedia.org/wiki/Fahrenheit
>>> rankine_to_fahrenheit(273.15)
-186.52
>>> rankine_to_fahrenheit(300)
-159.67
>>> rankine_to_fahrenheit("315.5")
-144.17
>>> rankine_to_fahrenheit("rankine")
Traceback (most recent call last):
...
ValueError: could not convert string to float: 'rankine'
"""
return round(float(rankine) - 459.67, ndigits)
def rankine_to_kelvin(rankine: float, ndigits: int = 2) -> float:
"""
Convert a given value from Rankine to Kelvin and round it to 2 decimal places.
Wikipedia reference: https://en.wikipedia.org/wiki/Rankine_scale
Wikipedia reference: https://en.wikipedia.org/wiki/Kelvin
>>> rankine_to_kelvin(0)
0.0
>>> rankine_to_kelvin(20.0)
11.11
>>> rankine_to_kelvin("40")
22.22
>>> rankine_to_kelvin("rankine")
Traceback (most recent call last):
...
ValueError: could not convert string to float: 'rankine'
"""
return round((float(rankine) * 5 / 9), ndigits)
def reaumur_to_kelvin(reaumur: float, ndigits: int = 2) -> float:
"""
Convert a given value from reaumur to Kelvin and round it to 2 decimal places.
Reference:- http://www.csgnetwork.com/temp2conv.html
>>> reaumur_to_kelvin(0)
273.15
>>> reaumur_to_kelvin(20.0)
298.15
>>> reaumur_to_kelvin(40)
323.15
>>> reaumur_to_kelvin("reaumur")
Traceback (most recent call last):
...
ValueError: could not convert string to float: 'reaumur'
"""
return round((float(reaumur) * 1.25 + 273.15), ndigits)
def reaumur_to_fahrenheit(reaumur: float, ndigits: int = 2) -> float:
"""
Convert a given value from reaumur to fahrenheit and round it to 2 decimal places.
Reference:- http://www.csgnetwork.com/temp2conv.html
>>> reaumur_to_fahrenheit(0)
32.0
>>> reaumur_to_fahrenheit(20.0)
77.0
>>> reaumur_to_fahrenheit(40)
122.0
>>> reaumur_to_fahrenheit("reaumur")
Traceback (most recent call last):
...
ValueError: could not convert string to float: 'reaumur'
"""
return round((float(reaumur) * 2.25 + 32), ndigits)
def reaumur_to_celsius(reaumur: float, ndigits: int = 2) -> float:
"""
Convert a given value from reaumur to celsius and round it to 2 decimal places.
Reference:- http://www.csgnetwork.com/temp2conv.html
>>> reaumur_to_celsius(0)
0.0
>>> reaumur_to_celsius(20.0)
25.0
>>> reaumur_to_celsius(40)
50.0
>>> reaumur_to_celsius("reaumur")
Traceback (most recent call last):
...
ValueError: could not convert string to float: 'reaumur'
"""
return round((float(reaumur) * 1.25), ndigits)
def reaumur_to_rankine(reaumur: float, ndigits: int = 2) -> float:
"""
Convert a given value from reaumur to rankine and round it to 2 decimal places.
Reference:- http://www.csgnetwork.com/temp2conv.html
>>> reaumur_to_rankine(0)
491.67
>>> reaumur_to_rankine(20.0)
536.67
>>> reaumur_to_rankine(40)
581.67
>>> reaumur_to_rankine("reaumur")
Traceback (most recent call last):
...
ValueError: could not convert string to float: 'reaumur'
"""
return round((float(reaumur) * 2.25 + 32 + 459.67), ndigits)
if __name__ == "__main__":
import doctest
doctest.testmod()
| -1 |
TheAlgorithms/Python | 8,551 | New gitter link added or replaced | ### Describe your change:
Fixes #8546
* [ ] Add an algorithm?
* [ ] Fix a bug or typo in an existing algorithm?
* [x] Documentation change?
### Checklist:
* [x] I have read [CONTRIBUTING.md](https://github.com/TheAlgorithms/Python/blob/master/CONTRIBUTING.md).
* [x] This pull request is all my own work -- I have not plagiarized.
* [x] I know that pull requests will not be merged if they fail the automated tests.
* [ ] This PR only changes one algorithm file. To ease review, please open separate PRs for separate algorithms.
* [ ] All new Python files are placed inside an existing directory.
* [ ] All filenames are in all lowercase characters with no spaces or dashes.
* [ ] All functions and variable names follow Python naming conventions.
* [ ] All function parameters and return values are annotated with Python [type hints](https://docs.python.org/3/library/typing.html).
* [ ] All functions have [doctests](https://docs.python.org/3/library/doctest.html) that pass the automated testing.
* [ ] All new algorithms include at least one URL that points to Wikipedia or another similar explanation.
* [x] If this pull request resolves one or more open issues then the commit message contains `Fixes: #{$ISSUE_NO}`.
| KaixLina | "2023-03-26T04:26:40Z" | "2023-03-26T15:19:19Z" | 3f9150c1b2dd15808a4962e03a1455f8d825512c | 7cdb011ba440a07768179bfaea190bddefc890d8 | New gitter link added or replaced. ### Describe your change:
Fixes #8546
* [ ] Add an algorithm?
* [ ] Fix a bug or typo in an existing algorithm?
* [x] Documentation change?
### Checklist:
* [x] I have read [CONTRIBUTING.md](https://github.com/TheAlgorithms/Python/blob/master/CONTRIBUTING.md).
* [x] This pull request is all my own work -- I have not plagiarized.
* [x] I know that pull requests will not be merged if they fail the automated tests.
* [ ] This PR only changes one algorithm file. To ease review, please open separate PRs for separate algorithms.
* [ ] All new Python files are placed inside an existing directory.
* [ ] All filenames are in all lowercase characters with no spaces or dashes.
* [ ] All functions and variable names follow Python naming conventions.
* [ ] All function parameters and return values are annotated with Python [type hints](https://docs.python.org/3/library/typing.html).
* [ ] All functions have [doctests](https://docs.python.org/3/library/doctest.html) that pass the automated testing.
* [ ] All new algorithms include at least one URL that points to Wikipedia or another similar explanation.
* [x] If this pull request resolves one or more open issues then the commit message contains `Fixes: #{$ISSUE_NO}`.
| """
A non-recursive Segment Tree implementation with range query and single element update,
works virtually with any list of the same type of elements with a "commutative"
combiner.
Explanation:
https://www.geeksforgeeks.org/iterative-segment-tree-range-minimum-query/
https://www.geeksforgeeks.org/segment-tree-efficient-implementation/
>>> SegmentTree([1, 2, 3], lambda a, b: a + b).query(0, 2)
6
>>> SegmentTree([3, 1, 2], min).query(0, 2)
1
>>> SegmentTree([2, 3, 1], max).query(0, 2)
3
>>> st = SegmentTree([1, 5, 7, -1, 6], lambda a, b: a + b)
>>> st.update(1, -1)
>>> st.update(2, 3)
>>> st.query(1, 2)
2
>>> st.query(1, 1)
-1
>>> st.update(4, 1)
>>> st.query(3, 4)
0
>>> st = SegmentTree([[1, 2, 3], [3, 2, 1], [1, 1, 1]], lambda a, b: [a[i] + b[i] for i
... in range(len(a))])
>>> st.query(0, 1)
[4, 4, 4]
>>> st.query(1, 2)
[4, 3, 2]
>>> st.update(1, [-1, -1, -1])
>>> st.query(1, 2)
[0, 0, 0]
>>> st.query(0, 2)
[1, 2, 3]
"""
from __future__ import annotations
from collections.abc import Callable
from typing import Any, Generic, TypeVar
T = TypeVar("T")
class SegmentTree(Generic[T]):
def __init__(self, arr: list[T], fnc: Callable[[T, T], T]) -> None:
"""
Segment Tree constructor, it works just with commutative combiner.
:param arr: list of elements for the segment tree
:param fnc: commutative function for combine two elements
>>> SegmentTree(['a', 'b', 'c'], lambda a, b: f'{a}{b}').query(0, 2)
'abc'
>>> SegmentTree([(1, 2), (2, 3), (3, 4)],
... lambda a, b: (a[0] + b[0], a[1] + b[1])).query(0, 2)
(6, 9)
"""
any_type: Any | T = None
self.N: int = len(arr)
self.st: list[T] = [any_type for _ in range(self.N)] + arr
self.fn = fnc
self.build()
def build(self) -> None:
for p in range(self.N - 1, 0, -1):
self.st[p] = self.fn(self.st[p * 2], self.st[p * 2 + 1])
def update(self, p: int, v: T) -> None:
"""
Update an element in log(N) time
:param p: position to be update
:param v: new value
>>> st = SegmentTree([3, 1, 2, 4], min)
>>> st.query(0, 3)
1
>>> st.update(2, -1)
>>> st.query(0, 3)
-1
"""
p += self.N
self.st[p] = v
while p > 1:
p = p // 2
self.st[p] = self.fn(self.st[p * 2], self.st[p * 2 + 1])
def query(self, l: int, r: int) -> T | None: # noqa: E741
"""
Get range query value in log(N) time
:param l: left element index
:param r: right element index
:return: element combined in the range [l, r]
>>> st = SegmentTree([1, 2, 3, 4], lambda a, b: a + b)
>>> st.query(0, 2)
6
>>> st.query(1, 2)
5
>>> st.query(0, 3)
10
>>> st.query(2, 3)
7
"""
l, r = l + self.N, r + self.N
res: T | None = None
while l <= r:
if l % 2 == 1:
res = self.st[l] if res is None else self.fn(res, self.st[l])
if r % 2 == 0:
res = self.st[r] if res is None else self.fn(res, self.st[r])
l, r = (l + 1) // 2, (r - 1) // 2
return res
if __name__ == "__main__":
from functools import reduce
test_array = [1, 10, -2, 9, -3, 8, 4, -7, 5, 6, 11, -12]
test_updates = {
0: 7,
1: 2,
2: 6,
3: -14,
4: 5,
5: 4,
6: 7,
7: -10,
8: 9,
9: 10,
10: 12,
11: 1,
}
min_segment_tree = SegmentTree(test_array, min)
max_segment_tree = SegmentTree(test_array, max)
sum_segment_tree = SegmentTree(test_array, lambda a, b: a + b)
def test_all_segments() -> None:
"""
Test all possible segments
"""
for i in range(len(test_array)):
for j in range(i, len(test_array)):
min_range = reduce(min, test_array[i : j + 1])
max_range = reduce(max, test_array[i : j + 1])
sum_range = reduce(lambda a, b: a + b, test_array[i : j + 1])
assert min_range == min_segment_tree.query(i, j)
assert max_range == max_segment_tree.query(i, j)
assert sum_range == sum_segment_tree.query(i, j)
test_all_segments()
for index, value in test_updates.items():
test_array[index] = value
min_segment_tree.update(index, value)
max_segment_tree.update(index, value)
sum_segment_tree.update(index, value)
test_all_segments()
| """
A non-recursive Segment Tree implementation with range query and single element update,
works virtually with any list of the same type of elements with a "commutative"
combiner.
Explanation:
https://www.geeksforgeeks.org/iterative-segment-tree-range-minimum-query/
https://www.geeksforgeeks.org/segment-tree-efficient-implementation/
>>> SegmentTree([1, 2, 3], lambda a, b: a + b).query(0, 2)
6
>>> SegmentTree([3, 1, 2], min).query(0, 2)
1
>>> SegmentTree([2, 3, 1], max).query(0, 2)
3
>>> st = SegmentTree([1, 5, 7, -1, 6], lambda a, b: a + b)
>>> st.update(1, -1)
>>> st.update(2, 3)
>>> st.query(1, 2)
2
>>> st.query(1, 1)
-1
>>> st.update(4, 1)
>>> st.query(3, 4)
0
>>> st = SegmentTree([[1, 2, 3], [3, 2, 1], [1, 1, 1]], lambda a, b: [a[i] + b[i] for i
... in range(len(a))])
>>> st.query(0, 1)
[4, 4, 4]
>>> st.query(1, 2)
[4, 3, 2]
>>> st.update(1, [-1, -1, -1])
>>> st.query(1, 2)
[0, 0, 0]
>>> st.query(0, 2)
[1, 2, 3]
"""
from __future__ import annotations
from collections.abc import Callable
from typing import Any, Generic, TypeVar
T = TypeVar("T")
class SegmentTree(Generic[T]):
def __init__(self, arr: list[T], fnc: Callable[[T, T], T]) -> None:
"""
Segment Tree constructor, it works just with commutative combiner.
:param arr: list of elements for the segment tree
:param fnc: commutative function for combine two elements
>>> SegmentTree(['a', 'b', 'c'], lambda a, b: f'{a}{b}').query(0, 2)
'abc'
>>> SegmentTree([(1, 2), (2, 3), (3, 4)],
... lambda a, b: (a[0] + b[0], a[1] + b[1])).query(0, 2)
(6, 9)
"""
any_type: Any | T = None
self.N: int = len(arr)
self.st: list[T] = [any_type for _ in range(self.N)] + arr
self.fn = fnc
self.build()
def build(self) -> None:
for p in range(self.N - 1, 0, -1):
self.st[p] = self.fn(self.st[p * 2], self.st[p * 2 + 1])
def update(self, p: int, v: T) -> None:
"""
Update an element in log(N) time
:param p: position to be update
:param v: new value
>>> st = SegmentTree([3, 1, 2, 4], min)
>>> st.query(0, 3)
1
>>> st.update(2, -1)
>>> st.query(0, 3)
-1
"""
p += self.N
self.st[p] = v
while p > 1:
p = p // 2
self.st[p] = self.fn(self.st[p * 2], self.st[p * 2 + 1])
def query(self, l: int, r: int) -> T | None: # noqa: E741
"""
Get range query value in log(N) time
:param l: left element index
:param r: right element index
:return: element combined in the range [l, r]
>>> st = SegmentTree([1, 2, 3, 4], lambda a, b: a + b)
>>> st.query(0, 2)
6
>>> st.query(1, 2)
5
>>> st.query(0, 3)
10
>>> st.query(2, 3)
7
"""
l, r = l + self.N, r + self.N
res: T | None = None
while l <= r:
if l % 2 == 1:
res = self.st[l] if res is None else self.fn(res, self.st[l])
if r % 2 == 0:
res = self.st[r] if res is None else self.fn(res, self.st[r])
l, r = (l + 1) // 2, (r - 1) // 2
return res
if __name__ == "__main__":
from functools import reduce
test_array = [1, 10, -2, 9, -3, 8, 4, -7, 5, 6, 11, -12]
test_updates = {
0: 7,
1: 2,
2: 6,
3: -14,
4: 5,
5: 4,
6: 7,
7: -10,
8: 9,
9: 10,
10: 12,
11: 1,
}
min_segment_tree = SegmentTree(test_array, min)
max_segment_tree = SegmentTree(test_array, max)
sum_segment_tree = SegmentTree(test_array, lambda a, b: a + b)
def test_all_segments() -> None:
"""
Test all possible segments
"""
for i in range(len(test_array)):
for j in range(i, len(test_array)):
min_range = reduce(min, test_array[i : j + 1])
max_range = reduce(max, test_array[i : j + 1])
sum_range = reduce(lambda a, b: a + b, test_array[i : j + 1])
assert min_range == min_segment_tree.query(i, j)
assert max_range == max_segment_tree.query(i, j)
assert sum_range == sum_segment_tree.query(i, j)
test_all_segments()
for index, value in test_updates.items():
test_array[index] = value
min_segment_tree.update(index, value)
max_segment_tree.update(index, value)
sum_segment_tree.update(index, value)
test_all_segments()
| -1 |
TheAlgorithms/Python | 8,551 | New gitter link added or replaced | ### Describe your change:
Fixes #8546
* [ ] Add an algorithm?
* [ ] Fix a bug or typo in an existing algorithm?
* [x] Documentation change?
### Checklist:
* [x] I have read [CONTRIBUTING.md](https://github.com/TheAlgorithms/Python/blob/master/CONTRIBUTING.md).
* [x] This pull request is all my own work -- I have not plagiarized.
* [x] I know that pull requests will not be merged if they fail the automated tests.
* [ ] This PR only changes one algorithm file. To ease review, please open separate PRs for separate algorithms.
* [ ] All new Python files are placed inside an existing directory.
* [ ] All filenames are in all lowercase characters with no spaces or dashes.
* [ ] All functions and variable names follow Python naming conventions.
* [ ] All function parameters and return values are annotated with Python [type hints](https://docs.python.org/3/library/typing.html).
* [ ] All functions have [doctests](https://docs.python.org/3/library/doctest.html) that pass the automated testing.
* [ ] All new algorithms include at least one URL that points to Wikipedia or another similar explanation.
* [x] If this pull request resolves one or more open issues then the commit message contains `Fixes: #{$ISSUE_NO}`.
| KaixLina | "2023-03-26T04:26:40Z" | "2023-03-26T15:19:19Z" | 3f9150c1b2dd15808a4962e03a1455f8d825512c | 7cdb011ba440a07768179bfaea190bddefc890d8 | New gitter link added or replaced. ### Describe your change:
Fixes #8546
* [ ] Add an algorithm?
* [ ] Fix a bug or typo in an existing algorithm?
* [x] Documentation change?
### Checklist:
* [x] I have read [CONTRIBUTING.md](https://github.com/TheAlgorithms/Python/blob/master/CONTRIBUTING.md).
* [x] This pull request is all my own work -- I have not plagiarized.
* [x] I know that pull requests will not be merged if they fail the automated tests.
* [ ] This PR only changes one algorithm file. To ease review, please open separate PRs for separate algorithms.
* [ ] All new Python files are placed inside an existing directory.
* [ ] All filenames are in all lowercase characters with no spaces or dashes.
* [ ] All functions and variable names follow Python naming conventions.
* [ ] All function parameters and return values are annotated with Python [type hints](https://docs.python.org/3/library/typing.html).
* [ ] All functions have [doctests](https://docs.python.org/3/library/doctest.html) that pass the automated testing.
* [ ] All new algorithms include at least one URL that points to Wikipedia or another similar explanation.
* [x] If this pull request resolves one or more open issues then the commit message contains `Fixes: #{$ISSUE_NO}`.
| -1 |
||
TheAlgorithms/Python | 8,551 | New gitter link added or replaced | ### Describe your change:
Fixes #8546
* [ ] Add an algorithm?
* [ ] Fix a bug or typo in an existing algorithm?
* [x] Documentation change?
### Checklist:
* [x] I have read [CONTRIBUTING.md](https://github.com/TheAlgorithms/Python/blob/master/CONTRIBUTING.md).
* [x] This pull request is all my own work -- I have not plagiarized.
* [x] I know that pull requests will not be merged if they fail the automated tests.
* [ ] This PR only changes one algorithm file. To ease review, please open separate PRs for separate algorithms.
* [ ] All new Python files are placed inside an existing directory.
* [ ] All filenames are in all lowercase characters with no spaces or dashes.
* [ ] All functions and variable names follow Python naming conventions.
* [ ] All function parameters and return values are annotated with Python [type hints](https://docs.python.org/3/library/typing.html).
* [ ] All functions have [doctests](https://docs.python.org/3/library/doctest.html) that pass the automated testing.
* [ ] All new algorithms include at least one URL that points to Wikipedia or another similar explanation.
* [x] If this pull request resolves one or more open issues then the commit message contains `Fixes: #{$ISSUE_NO}`.
| KaixLina | "2023-03-26T04:26:40Z" | "2023-03-26T15:19:19Z" | 3f9150c1b2dd15808a4962e03a1455f8d825512c | 7cdb011ba440a07768179bfaea190bddefc890d8 | New gitter link added or replaced. ### Describe your change:
Fixes #8546
* [ ] Add an algorithm?
* [ ] Fix a bug or typo in an existing algorithm?
* [x] Documentation change?
### Checklist:
* [x] I have read [CONTRIBUTING.md](https://github.com/TheAlgorithms/Python/blob/master/CONTRIBUTING.md).
* [x] This pull request is all my own work -- I have not plagiarized.
* [x] I know that pull requests will not be merged if they fail the automated tests.
* [ ] This PR only changes one algorithm file. To ease review, please open separate PRs for separate algorithms.
* [ ] All new Python files are placed inside an existing directory.
* [ ] All filenames are in all lowercase characters with no spaces or dashes.
* [ ] All functions and variable names follow Python naming conventions.
* [ ] All function parameters and return values are annotated with Python [type hints](https://docs.python.org/3/library/typing.html).
* [ ] All functions have [doctests](https://docs.python.org/3/library/doctest.html) that pass the automated testing.
* [ ] All new algorithms include at least one URL that points to Wikipedia or another similar explanation.
* [x] If this pull request resolves one or more open issues then the commit message contains `Fixes: #{$ISSUE_NO}`.
| # Implementing Newton Raphson method in Python
# Author: Saksham Gupta
#
# The Newton-Raphson method (also known as Newton's method) is a way to
# quickly find a good approximation for the root of a functreal-valued ion
# The method can also be extended to complex functions
#
# Newton's Method - https://en.wikipedia.org/wiki/Newton's_method
from sympy import diff, lambdify, symbols
from sympy.functions import * # noqa: F403
def newton_raphson(
function: str,
starting_point: complex,
variable: str = "x",
precision: float = 10**-10,
multiplicity: int = 1,
) -> complex:
"""Finds root from the 'starting_point' onwards by Newton-Raphson method
Refer to https://docs.sympy.org/latest/modules/functions/index.html
for usable mathematical functions
>>> newton_raphson("sin(x)", 2)
3.141592653589793
>>> newton_raphson("x**4 -5", 0.4 + 5j)
(-7.52316384526264e-37+1.4953487812212207j)
>>> newton_raphson('log(y) - 1', 2, variable='y')
2.7182818284590455
>>> newton_raphson('exp(x) - 1', 10, precision=0.005)
1.2186556186174883e-10
>>> newton_raphson('cos(x)', 0)
Traceback (most recent call last):
...
ZeroDivisionError: Could not find root
"""
x = symbols(variable)
func = lambdify(x, function)
diff_function = lambdify(x, diff(function, x))
prev_guess = starting_point
while True:
if diff_function(prev_guess) != 0:
next_guess = prev_guess - multiplicity * func(prev_guess) / diff_function(
prev_guess
)
else:
raise ZeroDivisionError("Could not find root") from None
# Precision is checked by comparing the difference of consecutive guesses
if abs(next_guess - prev_guess) < precision:
return next_guess
prev_guess = next_guess
# Let's Execute
if __name__ == "__main__":
# Find root of trigonometric function
# Find value of pi
print(f"The root of sin(x) = 0 is {newton_raphson('sin(x)', 2)}")
# Find root of polynomial
# Find fourth Root of 5
print(f"The root of x**4 - 5 = 0 is {newton_raphson('x**4 -5', 0.4 +5j)}")
# Find value of e
print(
"The root of log(y) - 1 = 0 is ",
f"{newton_raphson('log(y) - 1', 2, variable='y')}",
)
# Exponential Roots
print(
"The root of exp(x) - 1 = 0 is",
f"{newton_raphson('exp(x) - 1', 10, precision=0.005)}",
)
# Find root of cos(x)
print(f"The root of cos(x) = 0 is {newton_raphson('cos(x)', 0)}")
| # Implementing Newton Raphson method in Python
# Author: Saksham Gupta
#
# The Newton-Raphson method (also known as Newton's method) is a way to
# quickly find a good approximation for the root of a functreal-valued ion
# The method can also be extended to complex functions
#
# Newton's Method - https://en.wikipedia.org/wiki/Newton's_method
from sympy import diff, lambdify, symbols
from sympy.functions import * # noqa: F403
def newton_raphson(
function: str,
starting_point: complex,
variable: str = "x",
precision: float = 10**-10,
multiplicity: int = 1,
) -> complex:
"""Finds root from the 'starting_point' onwards by Newton-Raphson method
Refer to https://docs.sympy.org/latest/modules/functions/index.html
for usable mathematical functions
>>> newton_raphson("sin(x)", 2)
3.141592653589793
>>> newton_raphson("x**4 -5", 0.4 + 5j)
(-7.52316384526264e-37+1.4953487812212207j)
>>> newton_raphson('log(y) - 1', 2, variable='y')
2.7182818284590455
>>> newton_raphson('exp(x) - 1', 10, precision=0.005)
1.2186556186174883e-10
>>> newton_raphson('cos(x)', 0)
Traceback (most recent call last):
...
ZeroDivisionError: Could not find root
"""
x = symbols(variable)
func = lambdify(x, function)
diff_function = lambdify(x, diff(function, x))
prev_guess = starting_point
while True:
if diff_function(prev_guess) != 0:
next_guess = prev_guess - multiplicity * func(prev_guess) / diff_function(
prev_guess
)
else:
raise ZeroDivisionError("Could not find root") from None
# Precision is checked by comparing the difference of consecutive guesses
if abs(next_guess - prev_guess) < precision:
return next_guess
prev_guess = next_guess
# Let's Execute
if __name__ == "__main__":
# Find root of trigonometric function
# Find value of pi
print(f"The root of sin(x) = 0 is {newton_raphson('sin(x)', 2)}")
# Find root of polynomial
# Find fourth Root of 5
print(f"The root of x**4 - 5 = 0 is {newton_raphson('x**4 -5', 0.4 +5j)}")
# Find value of e
print(
"The root of log(y) - 1 = 0 is ",
f"{newton_raphson('log(y) - 1', 2, variable='y')}",
)
# Exponential Roots
print(
"The root of exp(x) - 1 = 0 is",
f"{newton_raphson('exp(x) - 1', 10, precision=0.005)}",
)
# Find root of cos(x)
print(f"The root of cos(x) = 0 is {newton_raphson('cos(x)', 0)}")
| -1 |
TheAlgorithms/Python | 8,551 | New gitter link added or replaced | ### Describe your change:
Fixes #8546
* [ ] Add an algorithm?
* [ ] Fix a bug or typo in an existing algorithm?
* [x] Documentation change?
### Checklist:
* [x] I have read [CONTRIBUTING.md](https://github.com/TheAlgorithms/Python/blob/master/CONTRIBUTING.md).
* [x] This pull request is all my own work -- I have not plagiarized.
* [x] I know that pull requests will not be merged if they fail the automated tests.
* [ ] This PR only changes one algorithm file. To ease review, please open separate PRs for separate algorithms.
* [ ] All new Python files are placed inside an existing directory.
* [ ] All filenames are in all lowercase characters with no spaces or dashes.
* [ ] All functions and variable names follow Python naming conventions.
* [ ] All function parameters and return values are annotated with Python [type hints](https://docs.python.org/3/library/typing.html).
* [ ] All functions have [doctests](https://docs.python.org/3/library/doctest.html) that pass the automated testing.
* [ ] All new algorithms include at least one URL that points to Wikipedia or another similar explanation.
* [x] If this pull request resolves one or more open issues then the commit message contains `Fixes: #{$ISSUE_NO}`.
| KaixLina | "2023-03-26T04:26:40Z" | "2023-03-26T15:19:19Z" | 3f9150c1b2dd15808a4962e03a1455f8d825512c | 7cdb011ba440a07768179bfaea190bddefc890d8 | New gitter link added or replaced. ### Describe your change:
Fixes #8546
* [ ] Add an algorithm?
* [ ] Fix a bug or typo in an existing algorithm?
* [x] Documentation change?
### Checklist:
* [x] I have read [CONTRIBUTING.md](https://github.com/TheAlgorithms/Python/blob/master/CONTRIBUTING.md).
* [x] This pull request is all my own work -- I have not plagiarized.
* [x] I know that pull requests will not be merged if they fail the automated tests.
* [ ] This PR only changes one algorithm file. To ease review, please open separate PRs for separate algorithms.
* [ ] All new Python files are placed inside an existing directory.
* [ ] All filenames are in all lowercase characters with no spaces or dashes.
* [ ] All functions and variable names follow Python naming conventions.
* [ ] All function parameters and return values are annotated with Python [type hints](https://docs.python.org/3/library/typing.html).
* [ ] All functions have [doctests](https://docs.python.org/3/library/doctest.html) that pass the automated testing.
* [ ] All new algorithms include at least one URL that points to Wikipedia or another similar explanation.
* [x] If this pull request resolves one or more open issues then the commit message contains `Fixes: #{$ISSUE_NO}`.
| # Random Forest Classifier Example
from matplotlib import pyplot as plt
from sklearn.datasets import load_iris
from sklearn.ensemble import RandomForestClassifier
from sklearn.metrics import plot_confusion_matrix
from sklearn.model_selection import train_test_split
def main():
"""
Random Forest Classifier Example using sklearn function.
Iris type dataset is used to demonstrate algorithm.
"""
# Load Iris dataset
iris = load_iris()
# Split dataset into train and test data
x = iris["data"] # features
y = iris["target"]
x_train, x_test, y_train, y_test = train_test_split(
x, y, test_size=0.3, random_state=1
)
# Random Forest Classifier
rand_for = RandomForestClassifier(random_state=42, n_estimators=100)
rand_for.fit(x_train, y_train)
# Display Confusion Matrix of Classifier
plot_confusion_matrix(
rand_for,
x_test,
y_test,
display_labels=iris["target_names"],
cmap="Blues",
normalize="true",
)
plt.title("Normalized Confusion Matrix - IRIS Dataset")
plt.show()
if __name__ == "__main__":
main()
| # Random Forest Classifier Example
from matplotlib import pyplot as plt
from sklearn.datasets import load_iris
from sklearn.ensemble import RandomForestClassifier
from sklearn.metrics import plot_confusion_matrix
from sklearn.model_selection import train_test_split
def main():
"""
Random Forest Classifier Example using sklearn function.
Iris type dataset is used to demonstrate algorithm.
"""
# Load Iris dataset
iris = load_iris()
# Split dataset into train and test data
x = iris["data"] # features
y = iris["target"]
x_train, x_test, y_train, y_test = train_test_split(
x, y, test_size=0.3, random_state=1
)
# Random Forest Classifier
rand_for = RandomForestClassifier(random_state=42, n_estimators=100)
rand_for.fit(x_train, y_train)
# Display Confusion Matrix of Classifier
plot_confusion_matrix(
rand_for,
x_test,
y_test,
display_labels=iris["target_names"],
cmap="Blues",
normalize="true",
)
plt.title("Normalized Confusion Matrix - IRIS Dataset")
plt.show()
if __name__ == "__main__":
main()
| -1 |
TheAlgorithms/Python | 8,551 | New gitter link added or replaced | ### Describe your change:
Fixes #8546
* [ ] Add an algorithm?
* [ ] Fix a bug or typo in an existing algorithm?
* [x] Documentation change?
### Checklist:
* [x] I have read [CONTRIBUTING.md](https://github.com/TheAlgorithms/Python/blob/master/CONTRIBUTING.md).
* [x] This pull request is all my own work -- I have not plagiarized.
* [x] I know that pull requests will not be merged if they fail the automated tests.
* [ ] This PR only changes one algorithm file. To ease review, please open separate PRs for separate algorithms.
* [ ] All new Python files are placed inside an existing directory.
* [ ] All filenames are in all lowercase characters with no spaces or dashes.
* [ ] All functions and variable names follow Python naming conventions.
* [ ] All function parameters and return values are annotated with Python [type hints](https://docs.python.org/3/library/typing.html).
* [ ] All functions have [doctests](https://docs.python.org/3/library/doctest.html) that pass the automated testing.
* [ ] All new algorithms include at least one URL that points to Wikipedia or another similar explanation.
* [x] If this pull request resolves one or more open issues then the commit message contains `Fixes: #{$ISSUE_NO}`.
| KaixLina | "2023-03-26T04:26:40Z" | "2023-03-26T15:19:19Z" | 3f9150c1b2dd15808a4962e03a1455f8d825512c | 7cdb011ba440a07768179bfaea190bddefc890d8 | New gitter link added or replaced. ### Describe your change:
Fixes #8546
* [ ] Add an algorithm?
* [ ] Fix a bug or typo in an existing algorithm?
* [x] Documentation change?
### Checklist:
* [x] I have read [CONTRIBUTING.md](https://github.com/TheAlgorithms/Python/blob/master/CONTRIBUTING.md).
* [x] This pull request is all my own work -- I have not plagiarized.
* [x] I know that pull requests will not be merged if they fail the automated tests.
* [ ] This PR only changes one algorithm file. To ease review, please open separate PRs for separate algorithms.
* [ ] All new Python files are placed inside an existing directory.
* [ ] All filenames are in all lowercase characters with no spaces or dashes.
* [ ] All functions and variable names follow Python naming conventions.
* [ ] All function parameters and return values are annotated with Python [type hints](https://docs.python.org/3/library/typing.html).
* [ ] All functions have [doctests](https://docs.python.org/3/library/doctest.html) that pass the automated testing.
* [ ] All new algorithms include at least one URL that points to Wikipedia or another similar explanation.
* [x] If this pull request resolves one or more open issues then the commit message contains `Fixes: #{$ISSUE_NO}`.
| import cv2
import numpy as np
def get_neighbors_pixel(
image: np.ndarray, x_coordinate: int, y_coordinate: int, center: int
) -> int:
"""
Comparing local neighborhood pixel value with threshold value of centre pixel.
Exception is required when neighborhood value of a center pixel value is null.
i.e. values present at boundaries.
:param image: The image we're working with
:param x_coordinate: x-coordinate of the pixel
:param y_coordinate: The y coordinate of the pixel
:param center: center pixel value
:return: The value of the pixel is being returned.
"""
try:
return int(image[x_coordinate][y_coordinate] >= center)
except (IndexError, TypeError):
return 0
def local_binary_value(image: np.ndarray, x_coordinate: int, y_coordinate: int) -> int:
"""
It takes an image, an x and y coordinate, and returns the
decimal value of the local binary patternof the pixel
at that coordinate
:param image: the image to be processed
:param x_coordinate: x coordinate of the pixel
:param y_coordinate: the y coordinate of the pixel
:return: The decimal value of the binary value of the pixels
around the center pixel.
"""
center = image[x_coordinate][y_coordinate]
powers = [1, 2, 4, 8, 16, 32, 64, 128]
# skip get_neighbors_pixel if center is null
if center is None:
return 0
# Starting from the top right, assigning value to pixels clockwise
binary_values = [
get_neighbors_pixel(image, x_coordinate - 1, y_coordinate + 1, center),
get_neighbors_pixel(image, x_coordinate, y_coordinate + 1, center),
get_neighbors_pixel(image, x_coordinate - 1, y_coordinate, center),
get_neighbors_pixel(image, x_coordinate + 1, y_coordinate + 1, center),
get_neighbors_pixel(image, x_coordinate + 1, y_coordinate, center),
get_neighbors_pixel(image, x_coordinate + 1, y_coordinate - 1, center),
get_neighbors_pixel(image, x_coordinate, y_coordinate - 1, center),
get_neighbors_pixel(image, x_coordinate - 1, y_coordinate - 1, center),
]
# Converting the binary value to decimal.
return sum(
binary_value * power for binary_value, power in zip(binary_values, powers)
)
if __name__ == "__main__":
# Reading the image and converting it to grayscale.
image = cv2.imread(
"digital_image_processing/image_data/lena.jpg", cv2.IMREAD_GRAYSCALE
)
# Create a numpy array as the same height and width of read image
lbp_image = np.zeros((image.shape[0], image.shape[1]))
# Iterating through the image and calculating the
# local binary pattern value for each pixel.
for i in range(0, image.shape[0]):
for j in range(0, image.shape[1]):
lbp_image[i][j] = local_binary_value(image, i, j)
cv2.imshow("local binary pattern", lbp_image)
cv2.waitKey(0)
cv2.destroyAllWindows()
| import cv2
import numpy as np
def get_neighbors_pixel(
image: np.ndarray, x_coordinate: int, y_coordinate: int, center: int
) -> int:
"""
Comparing local neighborhood pixel value with threshold value of centre pixel.
Exception is required when neighborhood value of a center pixel value is null.
i.e. values present at boundaries.
:param image: The image we're working with
:param x_coordinate: x-coordinate of the pixel
:param y_coordinate: The y coordinate of the pixel
:param center: center pixel value
:return: The value of the pixel is being returned.
"""
try:
return int(image[x_coordinate][y_coordinate] >= center)
except (IndexError, TypeError):
return 0
def local_binary_value(image: np.ndarray, x_coordinate: int, y_coordinate: int) -> int:
"""
It takes an image, an x and y coordinate, and returns the
decimal value of the local binary patternof the pixel
at that coordinate
:param image: the image to be processed
:param x_coordinate: x coordinate of the pixel
:param y_coordinate: the y coordinate of the pixel
:return: The decimal value of the binary value of the pixels
around the center pixel.
"""
center = image[x_coordinate][y_coordinate]
powers = [1, 2, 4, 8, 16, 32, 64, 128]
# skip get_neighbors_pixel if center is null
if center is None:
return 0
# Starting from the top right, assigning value to pixels clockwise
binary_values = [
get_neighbors_pixel(image, x_coordinate - 1, y_coordinate + 1, center),
get_neighbors_pixel(image, x_coordinate, y_coordinate + 1, center),
get_neighbors_pixel(image, x_coordinate - 1, y_coordinate, center),
get_neighbors_pixel(image, x_coordinate + 1, y_coordinate + 1, center),
get_neighbors_pixel(image, x_coordinate + 1, y_coordinate, center),
get_neighbors_pixel(image, x_coordinate + 1, y_coordinate - 1, center),
get_neighbors_pixel(image, x_coordinate, y_coordinate - 1, center),
get_neighbors_pixel(image, x_coordinate - 1, y_coordinate - 1, center),
]
# Converting the binary value to decimal.
return sum(
binary_value * power for binary_value, power in zip(binary_values, powers)
)
if __name__ == "__main__":
# Reading the image and converting it to grayscale.
image = cv2.imread(
"digital_image_processing/image_data/lena.jpg", cv2.IMREAD_GRAYSCALE
)
# Create a numpy array as the same height and width of read image
lbp_image = np.zeros((image.shape[0], image.shape[1]))
# Iterating through the image and calculating the
# local binary pattern value for each pixel.
for i in range(0, image.shape[0]):
for j in range(0, image.shape[1]):
lbp_image[i][j] = local_binary_value(image, i, j)
cv2.imshow("local binary pattern", lbp_image)
cv2.waitKey(0)
cv2.destroyAllWindows()
| -1 |
TheAlgorithms/Python | 8,551 | New gitter link added or replaced | ### Describe your change:
Fixes #8546
* [ ] Add an algorithm?
* [ ] Fix a bug or typo in an existing algorithm?
* [x] Documentation change?
### Checklist:
* [x] I have read [CONTRIBUTING.md](https://github.com/TheAlgorithms/Python/blob/master/CONTRIBUTING.md).
* [x] This pull request is all my own work -- I have not plagiarized.
* [x] I know that pull requests will not be merged if they fail the automated tests.
* [ ] This PR only changes one algorithm file. To ease review, please open separate PRs for separate algorithms.
* [ ] All new Python files are placed inside an existing directory.
* [ ] All filenames are in all lowercase characters with no spaces or dashes.
* [ ] All functions and variable names follow Python naming conventions.
* [ ] All function parameters and return values are annotated with Python [type hints](https://docs.python.org/3/library/typing.html).
* [ ] All functions have [doctests](https://docs.python.org/3/library/doctest.html) that pass the automated testing.
* [ ] All new algorithms include at least one URL that points to Wikipedia or another similar explanation.
* [x] If this pull request resolves one or more open issues then the commit message contains `Fixes: #{$ISSUE_NO}`.
| KaixLina | "2023-03-26T04:26:40Z" | "2023-03-26T15:19:19Z" | 3f9150c1b2dd15808a4962e03a1455f8d825512c | 7cdb011ba440a07768179bfaea190bddefc890d8 | New gitter link added or replaced. ### Describe your change:
Fixes #8546
* [ ] Add an algorithm?
* [ ] Fix a bug or typo in an existing algorithm?
* [x] Documentation change?
### Checklist:
* [x] I have read [CONTRIBUTING.md](https://github.com/TheAlgorithms/Python/blob/master/CONTRIBUTING.md).
* [x] This pull request is all my own work -- I have not plagiarized.
* [x] I know that pull requests will not be merged if they fail the automated tests.
* [ ] This PR only changes one algorithm file. To ease review, please open separate PRs for separate algorithms.
* [ ] All new Python files are placed inside an existing directory.
* [ ] All filenames are in all lowercase characters with no spaces or dashes.
* [ ] All functions and variable names follow Python naming conventions.
* [ ] All function parameters and return values are annotated with Python [type hints](https://docs.python.org/3/library/typing.html).
* [ ] All functions have [doctests](https://docs.python.org/3/library/doctest.html) that pass the automated testing.
* [ ] All new algorithms include at least one URL that points to Wikipedia or another similar explanation.
* [x] If this pull request resolves one or more open issues then the commit message contains `Fixes: #{$ISSUE_NO}`.
| -1 |
||
TheAlgorithms/Python | 8,551 | New gitter link added or replaced | ### Describe your change:
Fixes #8546
* [ ] Add an algorithm?
* [ ] Fix a bug or typo in an existing algorithm?
* [x] Documentation change?
### Checklist:
* [x] I have read [CONTRIBUTING.md](https://github.com/TheAlgorithms/Python/blob/master/CONTRIBUTING.md).
* [x] This pull request is all my own work -- I have not plagiarized.
* [x] I know that pull requests will not be merged if they fail the automated tests.
* [ ] This PR only changes one algorithm file. To ease review, please open separate PRs for separate algorithms.
* [ ] All new Python files are placed inside an existing directory.
* [ ] All filenames are in all lowercase characters with no spaces or dashes.
* [ ] All functions and variable names follow Python naming conventions.
* [ ] All function parameters and return values are annotated with Python [type hints](https://docs.python.org/3/library/typing.html).
* [ ] All functions have [doctests](https://docs.python.org/3/library/doctest.html) that pass the automated testing.
* [ ] All new algorithms include at least one URL that points to Wikipedia or another similar explanation.
* [x] If this pull request resolves one or more open issues then the commit message contains `Fixes: #{$ISSUE_NO}`.
| KaixLina | "2023-03-26T04:26:40Z" | "2023-03-26T15:19:19Z" | 3f9150c1b2dd15808a4962e03a1455f8d825512c | 7cdb011ba440a07768179bfaea190bddefc890d8 | New gitter link added or replaced. ### Describe your change:
Fixes #8546
* [ ] Add an algorithm?
* [ ] Fix a bug or typo in an existing algorithm?
* [x] Documentation change?
### Checklist:
* [x] I have read [CONTRIBUTING.md](https://github.com/TheAlgorithms/Python/blob/master/CONTRIBUTING.md).
* [x] This pull request is all my own work -- I have not plagiarized.
* [x] I know that pull requests will not be merged if they fail the automated tests.
* [ ] This PR only changes one algorithm file. To ease review, please open separate PRs for separate algorithms.
* [ ] All new Python files are placed inside an existing directory.
* [ ] All filenames are in all lowercase characters with no spaces or dashes.
* [ ] All functions and variable names follow Python naming conventions.
* [ ] All function parameters and return values are annotated with Python [type hints](https://docs.python.org/3/library/typing.html).
* [ ] All functions have [doctests](https://docs.python.org/3/library/doctest.html) that pass the automated testing.
* [ ] All new algorithms include at least one URL that points to Wikipedia or another similar explanation.
* [x] If this pull request resolves one or more open issues then the commit message contains `Fixes: #{$ISSUE_NO}`.
| def average_absolute_deviation(nums: list[int]) -> float:
"""
Return the average absolute deviation of a list of numbers.
Wiki: https://en.wikipedia.org/wiki/Average_absolute_deviation
>>> average_absolute_deviation([0])
0.0
>>> average_absolute_deviation([4, 1, 3, 2])
1.0
>>> average_absolute_deviation([2, 70, 6, 50, 20, 8, 4, 0])
20.0
>>> average_absolute_deviation([-20, 0, 30, 15])
16.25
>>> average_absolute_deviation([])
Traceback (most recent call last):
...
ValueError: List is empty
"""
if not nums: # Makes sure that the list is not empty
raise ValueError("List is empty")
average = sum(nums) / len(nums) # Calculate the average
return sum(abs(x - average) for x in nums) / len(nums)
if __name__ == "__main__":
import doctest
doctest.testmod()
| def average_absolute_deviation(nums: list[int]) -> float:
"""
Return the average absolute deviation of a list of numbers.
Wiki: https://en.wikipedia.org/wiki/Average_absolute_deviation
>>> average_absolute_deviation([0])
0.0
>>> average_absolute_deviation([4, 1, 3, 2])
1.0
>>> average_absolute_deviation([2, 70, 6, 50, 20, 8, 4, 0])
20.0
>>> average_absolute_deviation([-20, 0, 30, 15])
16.25
>>> average_absolute_deviation([])
Traceback (most recent call last):
...
ValueError: List is empty
"""
if not nums: # Makes sure that the list is not empty
raise ValueError("List is empty")
average = sum(nums) / len(nums) # Calculate the average
return sum(abs(x - average) for x in nums) / len(nums)
if __name__ == "__main__":
import doctest
doctest.testmod()
| -1 |
TheAlgorithms/Python | 8,551 | New gitter link added or replaced | ### Describe your change:
Fixes #8546
* [ ] Add an algorithm?
* [ ] Fix a bug or typo in an existing algorithm?
* [x] Documentation change?
### Checklist:
* [x] I have read [CONTRIBUTING.md](https://github.com/TheAlgorithms/Python/blob/master/CONTRIBUTING.md).
* [x] This pull request is all my own work -- I have not plagiarized.
* [x] I know that pull requests will not be merged if they fail the automated tests.
* [ ] This PR only changes one algorithm file. To ease review, please open separate PRs for separate algorithms.
* [ ] All new Python files are placed inside an existing directory.
* [ ] All filenames are in all lowercase characters with no spaces or dashes.
* [ ] All functions and variable names follow Python naming conventions.
* [ ] All function parameters and return values are annotated with Python [type hints](https://docs.python.org/3/library/typing.html).
* [ ] All functions have [doctests](https://docs.python.org/3/library/doctest.html) that pass the automated testing.
* [ ] All new algorithms include at least one URL that points to Wikipedia or another similar explanation.
* [x] If this pull request resolves one or more open issues then the commit message contains `Fixes: #{$ISSUE_NO}`.
| KaixLina | "2023-03-26T04:26:40Z" | "2023-03-26T15:19:19Z" | 3f9150c1b2dd15808a4962e03a1455f8d825512c | 7cdb011ba440a07768179bfaea190bddefc890d8 | New gitter link added or replaced. ### Describe your change:
Fixes #8546
* [ ] Add an algorithm?
* [ ] Fix a bug or typo in an existing algorithm?
* [x] Documentation change?
### Checklist:
* [x] I have read [CONTRIBUTING.md](https://github.com/TheAlgorithms/Python/blob/master/CONTRIBUTING.md).
* [x] This pull request is all my own work -- I have not plagiarized.
* [x] I know that pull requests will not be merged if they fail the automated tests.
* [ ] This PR only changes one algorithm file. To ease review, please open separate PRs for separate algorithms.
* [ ] All new Python files are placed inside an existing directory.
* [ ] All filenames are in all lowercase characters with no spaces or dashes.
* [ ] All functions and variable names follow Python naming conventions.
* [ ] All function parameters and return values are annotated with Python [type hints](https://docs.python.org/3/library/typing.html).
* [ ] All functions have [doctests](https://docs.python.org/3/library/doctest.html) that pass the automated testing.
* [ ] All new algorithms include at least one URL that points to Wikipedia or another similar explanation.
* [x] If this pull request resolves one or more open issues then the commit message contains `Fixes: #{$ISSUE_NO}`.
| """
Implementation of double ended queue.
"""
from __future__ import annotations
from collections.abc import Iterable
from dataclasses import dataclass
from typing import Any
class Deque:
"""
Deque data structure.
Operations
----------
append(val: Any) -> None
appendleft(val: Any) -> None
extend(iterable: Iterable) -> None
extendleft(iterable: Iterable) -> None
pop() -> Any
popleft() -> Any
Observers
---------
is_empty() -> bool
Attributes
----------
_front: _Node
front of the deque a.k.a. the first element
_back: _Node
back of the element a.k.a. the last element
_len: int
the number of nodes
"""
__slots__ = ["_front", "_back", "_len"]
@dataclass
class _Node:
"""
Representation of a node.
Contains a value and a pointer to the next node as well as to the previous one.
"""
val: Any = None
next_node: Deque._Node | None = None
prev_node: Deque._Node | None = None
class _Iterator:
"""
Helper class for iteration. Will be used to implement iteration.
Attributes
----------
_cur: _Node
the current node of the iteration.
"""
__slots__ = ["_cur"]
def __init__(self, cur: Deque._Node | None) -> None:
self._cur = cur
def __iter__(self) -> Deque._Iterator:
"""
>>> our_deque = Deque([1, 2, 3])
>>> iterator = iter(our_deque)
"""
return self
def __next__(self) -> Any:
"""
>>> our_deque = Deque([1, 2, 3])
>>> iterator = iter(our_deque)
>>> next(iterator)
1
>>> next(iterator)
2
>>> next(iterator)
3
"""
if self._cur is None:
# finished iterating
raise StopIteration
val = self._cur.val
self._cur = self._cur.next_node
return val
def __init__(self, iterable: Iterable[Any] | None = None) -> None:
self._front: Any = None
self._back: Any = None
self._len: int = 0
if iterable is not None:
# append every value to the deque
for val in iterable:
self.append(val)
def append(self, val: Any) -> None:
"""
Adds val to the end of the deque.
Time complexity: O(1)
>>> our_deque_1 = Deque([1, 2, 3])
>>> our_deque_1.append(4)
>>> our_deque_1
[1, 2, 3, 4]
>>> our_deque_2 = Deque('ab')
>>> our_deque_2.append('c')
>>> our_deque_2
['a', 'b', 'c']
>>> from collections import deque
>>> deque_collections_1 = deque([1, 2, 3])
>>> deque_collections_1.append(4)
>>> deque_collections_1
deque([1, 2, 3, 4])
>>> deque_collections_2 = deque('ab')
>>> deque_collections_2.append('c')
>>> deque_collections_2
deque(['a', 'b', 'c'])
>>> list(our_deque_1) == list(deque_collections_1)
True
>>> list(our_deque_2) == list(deque_collections_2)
True
"""
node = self._Node(val, None, None)
if self.is_empty():
# front = back
self._front = self._back = node
self._len = 1
else:
# connect nodes
self._back.next_node = node
node.prev_node = self._back
self._back = node # assign new back to the new node
self._len += 1
# make sure there were no errors
assert not self.is_empty(), "Error on appending value."
def appendleft(self, val: Any) -> None:
"""
Adds val to the beginning of the deque.
Time complexity: O(1)
>>> our_deque_1 = Deque([2, 3])
>>> our_deque_1.appendleft(1)
>>> our_deque_1
[1, 2, 3]
>>> our_deque_2 = Deque('bc')
>>> our_deque_2.appendleft('a')
>>> our_deque_2
['a', 'b', 'c']
>>> from collections import deque
>>> deque_collections_1 = deque([2, 3])
>>> deque_collections_1.appendleft(1)
>>> deque_collections_1
deque([1, 2, 3])
>>> deque_collections_2 = deque('bc')
>>> deque_collections_2.appendleft('a')
>>> deque_collections_2
deque(['a', 'b', 'c'])
>>> list(our_deque_1) == list(deque_collections_1)
True
>>> list(our_deque_2) == list(deque_collections_2)
True
"""
node = self._Node(val, None, None)
if self.is_empty():
# front = back
self._front = self._back = node
self._len = 1
else:
# connect nodes
node.next_node = self._front
self._front.prev_node = node
self._front = node # assign new front to the new node
self._len += 1
# make sure there were no errors
assert not self.is_empty(), "Error on appending value."
def extend(self, iterable: Iterable[Any]) -> None:
"""
Appends every value of iterable to the end of the deque.
Time complexity: O(n)
>>> our_deque_1 = Deque([1, 2, 3])
>>> our_deque_1.extend([4, 5])
>>> our_deque_1
[1, 2, 3, 4, 5]
>>> our_deque_2 = Deque('ab')
>>> our_deque_2.extend('cd')
>>> our_deque_2
['a', 'b', 'c', 'd']
>>> from collections import deque
>>> deque_collections_1 = deque([1, 2, 3])
>>> deque_collections_1.extend([4, 5])
>>> deque_collections_1
deque([1, 2, 3, 4, 5])
>>> deque_collections_2 = deque('ab')
>>> deque_collections_2.extend('cd')
>>> deque_collections_2
deque(['a', 'b', 'c', 'd'])
>>> list(our_deque_1) == list(deque_collections_1)
True
>>> list(our_deque_2) == list(deque_collections_2)
True
"""
for val in iterable:
self.append(val)
def extendleft(self, iterable: Iterable[Any]) -> None:
"""
Appends every value of iterable to the beginning of the deque.
Time complexity: O(n)
>>> our_deque_1 = Deque([1, 2, 3])
>>> our_deque_1.extendleft([0, -1])
>>> our_deque_1
[-1, 0, 1, 2, 3]
>>> our_deque_2 = Deque('cd')
>>> our_deque_2.extendleft('ba')
>>> our_deque_2
['a', 'b', 'c', 'd']
>>> from collections import deque
>>> deque_collections_1 = deque([1, 2, 3])
>>> deque_collections_1.extendleft([0, -1])
>>> deque_collections_1
deque([-1, 0, 1, 2, 3])
>>> deque_collections_2 = deque('cd')
>>> deque_collections_2.extendleft('ba')
>>> deque_collections_2
deque(['a', 'b', 'c', 'd'])
>>> list(our_deque_1) == list(deque_collections_1)
True
>>> list(our_deque_2) == list(deque_collections_2)
True
"""
for val in iterable:
self.appendleft(val)
def pop(self) -> Any:
"""
Removes the last element of the deque and returns it.
Time complexity: O(1)
@returns topop.val: the value of the node to pop.
>>> our_deque = Deque([1, 2, 3, 15182])
>>> our_popped = our_deque.pop()
>>> our_popped
15182
>>> our_deque
[1, 2, 3]
>>> from collections import deque
>>> deque_collections = deque([1, 2, 3, 15182])
>>> collections_popped = deque_collections.pop()
>>> collections_popped
15182
>>> deque_collections
deque([1, 2, 3])
>>> list(our_deque) == list(deque_collections)
True
>>> our_popped == collections_popped
True
"""
# make sure the deque has elements to pop
assert not self.is_empty(), "Deque is empty."
topop = self._back
self._back = self._back.prev_node # set new back
# drop the last node - python will deallocate memory automatically
self._back.next_node = None
self._len -= 1
return topop.val
def popleft(self) -> Any:
"""
Removes the first element of the deque and returns it.
Time complexity: O(1)
@returns topop.val: the value of the node to pop.
>>> our_deque = Deque([15182, 1, 2, 3])
>>> our_popped = our_deque.popleft()
>>> our_popped
15182
>>> our_deque
[1, 2, 3]
>>> from collections import deque
>>> deque_collections = deque([15182, 1, 2, 3])
>>> collections_popped = deque_collections.popleft()
>>> collections_popped
15182
>>> deque_collections
deque([1, 2, 3])
>>> list(our_deque) == list(deque_collections)
True
>>> our_popped == collections_popped
True
"""
# make sure the deque has elements to pop
assert not self.is_empty(), "Deque is empty."
topop = self._front
self._front = self._front.next_node # set new front and drop the first node
self._front.prev_node = None
self._len -= 1
return topop.val
def is_empty(self) -> bool:
"""
Checks if the deque is empty.
Time complexity: O(1)
>>> our_deque = Deque([1, 2, 3])
>>> our_deque.is_empty()
False
>>> our_empty_deque = Deque()
>>> our_empty_deque.is_empty()
True
>>> from collections import deque
>>> empty_deque_collections = deque()
>>> list(our_empty_deque) == list(empty_deque_collections)
True
"""
return self._front is None
def __len__(self) -> int:
"""
Implements len() function. Returns the length of the deque.
Time complexity: O(1)
>>> our_deque = Deque([1, 2, 3])
>>> len(our_deque)
3
>>> our_empty_deque = Deque()
>>> len(our_empty_deque)
0
>>> from collections import deque
>>> deque_collections = deque([1, 2, 3])
>>> len(deque_collections)
3
>>> empty_deque_collections = deque()
>>> len(empty_deque_collections)
0
>>> len(our_empty_deque) == len(empty_deque_collections)
True
"""
return self._len
def __eq__(self, other: object) -> bool:
"""
Implements "==" operator. Returns if *self* is equal to *other*.
Time complexity: O(n)
>>> our_deque_1 = Deque([1, 2, 3])
>>> our_deque_2 = Deque([1, 2, 3])
>>> our_deque_1 == our_deque_2
True
>>> our_deque_3 = Deque([1, 2])
>>> our_deque_1 == our_deque_3
False
>>> from collections import deque
>>> deque_collections_1 = deque([1, 2, 3])
>>> deque_collections_2 = deque([1, 2, 3])
>>> deque_collections_1 == deque_collections_2
True
>>> deque_collections_3 = deque([1, 2])
>>> deque_collections_1 == deque_collections_3
False
>>> (our_deque_1 == our_deque_2) == (deque_collections_1 == deque_collections_2)
True
>>> (our_deque_1 == our_deque_3) == (deque_collections_1 == deque_collections_3)
True
"""
if not isinstance(other, Deque):
return NotImplemented
me = self._front
oth = other._front
# if the length of the dequeues are not the same, they are not equal
if len(self) != len(other):
return False
while me is not None and oth is not None:
# compare every value
if me.val != oth.val:
return False
me = me.next_node
oth = oth.next_node
return True
def __iter__(self) -> Deque._Iterator:
"""
Implements iteration.
Time complexity: O(1)
>>> our_deque = Deque([1, 2, 3])
>>> for v in our_deque:
... print(v)
1
2
3
>>> from collections import deque
>>> deque_collections = deque([1, 2, 3])
>>> for v in deque_collections:
... print(v)
1
2
3
"""
return Deque._Iterator(self._front)
def __repr__(self) -> str:
"""
Implements representation of the deque.
Represents it as a list, with its values between '[' and ']'.
Time complexity: O(n)
>>> our_deque = Deque([1, 2, 3])
>>> our_deque
[1, 2, 3]
"""
values_list = []
aux = self._front
while aux is not None:
# append the values in a list to display
values_list.append(aux.val)
aux = aux.next_node
return f"[{', '.join(repr(val) for val in values_list)}]"
if __name__ == "__main__":
import doctest
doctest.testmod()
| """
Implementation of double ended queue.
"""
from __future__ import annotations
from collections.abc import Iterable
from dataclasses import dataclass
from typing import Any
class Deque:
"""
Deque data structure.
Operations
----------
append(val: Any) -> None
appendleft(val: Any) -> None
extend(iterable: Iterable) -> None
extendleft(iterable: Iterable) -> None
pop() -> Any
popleft() -> Any
Observers
---------
is_empty() -> bool
Attributes
----------
_front: _Node
front of the deque a.k.a. the first element
_back: _Node
back of the element a.k.a. the last element
_len: int
the number of nodes
"""
__slots__ = ["_front", "_back", "_len"]
@dataclass
class _Node:
"""
Representation of a node.
Contains a value and a pointer to the next node as well as to the previous one.
"""
val: Any = None
next_node: Deque._Node | None = None
prev_node: Deque._Node | None = None
class _Iterator:
"""
Helper class for iteration. Will be used to implement iteration.
Attributes
----------
_cur: _Node
the current node of the iteration.
"""
__slots__ = ["_cur"]
def __init__(self, cur: Deque._Node | None) -> None:
self._cur = cur
def __iter__(self) -> Deque._Iterator:
"""
>>> our_deque = Deque([1, 2, 3])
>>> iterator = iter(our_deque)
"""
return self
def __next__(self) -> Any:
"""
>>> our_deque = Deque([1, 2, 3])
>>> iterator = iter(our_deque)
>>> next(iterator)
1
>>> next(iterator)
2
>>> next(iterator)
3
"""
if self._cur is None:
# finished iterating
raise StopIteration
val = self._cur.val
self._cur = self._cur.next_node
return val
def __init__(self, iterable: Iterable[Any] | None = None) -> None:
self._front: Any = None
self._back: Any = None
self._len: int = 0
if iterable is not None:
# append every value to the deque
for val in iterable:
self.append(val)
def append(self, val: Any) -> None:
"""
Adds val to the end of the deque.
Time complexity: O(1)
>>> our_deque_1 = Deque([1, 2, 3])
>>> our_deque_1.append(4)
>>> our_deque_1
[1, 2, 3, 4]
>>> our_deque_2 = Deque('ab')
>>> our_deque_2.append('c')
>>> our_deque_2
['a', 'b', 'c']
>>> from collections import deque
>>> deque_collections_1 = deque([1, 2, 3])
>>> deque_collections_1.append(4)
>>> deque_collections_1
deque([1, 2, 3, 4])
>>> deque_collections_2 = deque('ab')
>>> deque_collections_2.append('c')
>>> deque_collections_2
deque(['a', 'b', 'c'])
>>> list(our_deque_1) == list(deque_collections_1)
True
>>> list(our_deque_2) == list(deque_collections_2)
True
"""
node = self._Node(val, None, None)
if self.is_empty():
# front = back
self._front = self._back = node
self._len = 1
else:
# connect nodes
self._back.next_node = node
node.prev_node = self._back
self._back = node # assign new back to the new node
self._len += 1
# make sure there were no errors
assert not self.is_empty(), "Error on appending value."
def appendleft(self, val: Any) -> None:
"""
Adds val to the beginning of the deque.
Time complexity: O(1)
>>> our_deque_1 = Deque([2, 3])
>>> our_deque_1.appendleft(1)
>>> our_deque_1
[1, 2, 3]
>>> our_deque_2 = Deque('bc')
>>> our_deque_2.appendleft('a')
>>> our_deque_2
['a', 'b', 'c']
>>> from collections import deque
>>> deque_collections_1 = deque([2, 3])
>>> deque_collections_1.appendleft(1)
>>> deque_collections_1
deque([1, 2, 3])
>>> deque_collections_2 = deque('bc')
>>> deque_collections_2.appendleft('a')
>>> deque_collections_2
deque(['a', 'b', 'c'])
>>> list(our_deque_1) == list(deque_collections_1)
True
>>> list(our_deque_2) == list(deque_collections_2)
True
"""
node = self._Node(val, None, None)
if self.is_empty():
# front = back
self._front = self._back = node
self._len = 1
else:
# connect nodes
node.next_node = self._front
self._front.prev_node = node
self._front = node # assign new front to the new node
self._len += 1
# make sure there were no errors
assert not self.is_empty(), "Error on appending value."
def extend(self, iterable: Iterable[Any]) -> None:
"""
Appends every value of iterable to the end of the deque.
Time complexity: O(n)
>>> our_deque_1 = Deque([1, 2, 3])
>>> our_deque_1.extend([4, 5])
>>> our_deque_1
[1, 2, 3, 4, 5]
>>> our_deque_2 = Deque('ab')
>>> our_deque_2.extend('cd')
>>> our_deque_2
['a', 'b', 'c', 'd']
>>> from collections import deque
>>> deque_collections_1 = deque([1, 2, 3])
>>> deque_collections_1.extend([4, 5])
>>> deque_collections_1
deque([1, 2, 3, 4, 5])
>>> deque_collections_2 = deque('ab')
>>> deque_collections_2.extend('cd')
>>> deque_collections_2
deque(['a', 'b', 'c', 'd'])
>>> list(our_deque_1) == list(deque_collections_1)
True
>>> list(our_deque_2) == list(deque_collections_2)
True
"""
for val in iterable:
self.append(val)
def extendleft(self, iterable: Iterable[Any]) -> None:
"""
Appends every value of iterable to the beginning of the deque.
Time complexity: O(n)
>>> our_deque_1 = Deque([1, 2, 3])
>>> our_deque_1.extendleft([0, -1])
>>> our_deque_1
[-1, 0, 1, 2, 3]
>>> our_deque_2 = Deque('cd')
>>> our_deque_2.extendleft('ba')
>>> our_deque_2
['a', 'b', 'c', 'd']
>>> from collections import deque
>>> deque_collections_1 = deque([1, 2, 3])
>>> deque_collections_1.extendleft([0, -1])
>>> deque_collections_1
deque([-1, 0, 1, 2, 3])
>>> deque_collections_2 = deque('cd')
>>> deque_collections_2.extendleft('ba')
>>> deque_collections_2
deque(['a', 'b', 'c', 'd'])
>>> list(our_deque_1) == list(deque_collections_1)
True
>>> list(our_deque_2) == list(deque_collections_2)
True
"""
for val in iterable:
self.appendleft(val)
def pop(self) -> Any:
"""
Removes the last element of the deque and returns it.
Time complexity: O(1)
@returns topop.val: the value of the node to pop.
>>> our_deque = Deque([1, 2, 3, 15182])
>>> our_popped = our_deque.pop()
>>> our_popped
15182
>>> our_deque
[1, 2, 3]
>>> from collections import deque
>>> deque_collections = deque([1, 2, 3, 15182])
>>> collections_popped = deque_collections.pop()
>>> collections_popped
15182
>>> deque_collections
deque([1, 2, 3])
>>> list(our_deque) == list(deque_collections)
True
>>> our_popped == collections_popped
True
"""
# make sure the deque has elements to pop
assert not self.is_empty(), "Deque is empty."
topop = self._back
self._back = self._back.prev_node # set new back
# drop the last node - python will deallocate memory automatically
self._back.next_node = None
self._len -= 1
return topop.val
def popleft(self) -> Any:
"""
Removes the first element of the deque and returns it.
Time complexity: O(1)
@returns topop.val: the value of the node to pop.
>>> our_deque = Deque([15182, 1, 2, 3])
>>> our_popped = our_deque.popleft()
>>> our_popped
15182
>>> our_deque
[1, 2, 3]
>>> from collections import deque
>>> deque_collections = deque([15182, 1, 2, 3])
>>> collections_popped = deque_collections.popleft()
>>> collections_popped
15182
>>> deque_collections
deque([1, 2, 3])
>>> list(our_deque) == list(deque_collections)
True
>>> our_popped == collections_popped
True
"""
# make sure the deque has elements to pop
assert not self.is_empty(), "Deque is empty."
topop = self._front
self._front = self._front.next_node # set new front and drop the first node
self._front.prev_node = None
self._len -= 1
return topop.val
def is_empty(self) -> bool:
"""
Checks if the deque is empty.
Time complexity: O(1)
>>> our_deque = Deque([1, 2, 3])
>>> our_deque.is_empty()
False
>>> our_empty_deque = Deque()
>>> our_empty_deque.is_empty()
True
>>> from collections import deque
>>> empty_deque_collections = deque()
>>> list(our_empty_deque) == list(empty_deque_collections)
True
"""
return self._front is None
def __len__(self) -> int:
"""
Implements len() function. Returns the length of the deque.
Time complexity: O(1)
>>> our_deque = Deque([1, 2, 3])
>>> len(our_deque)
3
>>> our_empty_deque = Deque()
>>> len(our_empty_deque)
0
>>> from collections import deque
>>> deque_collections = deque([1, 2, 3])
>>> len(deque_collections)
3
>>> empty_deque_collections = deque()
>>> len(empty_deque_collections)
0
>>> len(our_empty_deque) == len(empty_deque_collections)
True
"""
return self._len
def __eq__(self, other: object) -> bool:
"""
Implements "==" operator. Returns if *self* is equal to *other*.
Time complexity: O(n)
>>> our_deque_1 = Deque([1, 2, 3])
>>> our_deque_2 = Deque([1, 2, 3])
>>> our_deque_1 == our_deque_2
True
>>> our_deque_3 = Deque([1, 2])
>>> our_deque_1 == our_deque_3
False
>>> from collections import deque
>>> deque_collections_1 = deque([1, 2, 3])
>>> deque_collections_2 = deque([1, 2, 3])
>>> deque_collections_1 == deque_collections_2
True
>>> deque_collections_3 = deque([1, 2])
>>> deque_collections_1 == deque_collections_3
False
>>> (our_deque_1 == our_deque_2) == (deque_collections_1 == deque_collections_2)
True
>>> (our_deque_1 == our_deque_3) == (deque_collections_1 == deque_collections_3)
True
"""
if not isinstance(other, Deque):
return NotImplemented
me = self._front
oth = other._front
# if the length of the dequeues are not the same, they are not equal
if len(self) != len(other):
return False
while me is not None and oth is not None:
# compare every value
if me.val != oth.val:
return False
me = me.next_node
oth = oth.next_node
return True
def __iter__(self) -> Deque._Iterator:
"""
Implements iteration.
Time complexity: O(1)
>>> our_deque = Deque([1, 2, 3])
>>> for v in our_deque:
... print(v)
1
2
3
>>> from collections import deque
>>> deque_collections = deque([1, 2, 3])
>>> for v in deque_collections:
... print(v)
1
2
3
"""
return Deque._Iterator(self._front)
def __repr__(self) -> str:
"""
Implements representation of the deque.
Represents it as a list, with its values between '[' and ']'.
Time complexity: O(n)
>>> our_deque = Deque([1, 2, 3])
>>> our_deque
[1, 2, 3]
"""
values_list = []
aux = self._front
while aux is not None:
# append the values in a list to display
values_list.append(aux.val)
aux = aux.next_node
return f"[{', '.join(repr(val) for val in values_list)}]"
if __name__ == "__main__":
import doctest
doctest.testmod()
| -1 |
TheAlgorithms/Python | 8,551 | New gitter link added or replaced | ### Describe your change:
Fixes #8546
* [ ] Add an algorithm?
* [ ] Fix a bug or typo in an existing algorithm?
* [x] Documentation change?
### Checklist:
* [x] I have read [CONTRIBUTING.md](https://github.com/TheAlgorithms/Python/blob/master/CONTRIBUTING.md).
* [x] This pull request is all my own work -- I have not plagiarized.
* [x] I know that pull requests will not be merged if they fail the automated tests.
* [ ] This PR only changes one algorithm file. To ease review, please open separate PRs for separate algorithms.
* [ ] All new Python files are placed inside an existing directory.
* [ ] All filenames are in all lowercase characters with no spaces or dashes.
* [ ] All functions and variable names follow Python naming conventions.
* [ ] All function parameters and return values are annotated with Python [type hints](https://docs.python.org/3/library/typing.html).
* [ ] All functions have [doctests](https://docs.python.org/3/library/doctest.html) that pass the automated testing.
* [ ] All new algorithms include at least one URL that points to Wikipedia or another similar explanation.
* [x] If this pull request resolves one or more open issues then the commit message contains `Fixes: #{$ISSUE_NO}`.
| KaixLina | "2023-03-26T04:26:40Z" | "2023-03-26T15:19:19Z" | 3f9150c1b2dd15808a4962e03a1455f8d825512c | 7cdb011ba440a07768179bfaea190bddefc890d8 | New gitter link added or replaced. ### Describe your change:
Fixes #8546
* [ ] Add an algorithm?
* [ ] Fix a bug or typo in an existing algorithm?
* [x] Documentation change?
### Checklist:
* [x] I have read [CONTRIBUTING.md](https://github.com/TheAlgorithms/Python/blob/master/CONTRIBUTING.md).
* [x] This pull request is all my own work -- I have not plagiarized.
* [x] I know that pull requests will not be merged if they fail the automated tests.
* [ ] This PR only changes one algorithm file. To ease review, please open separate PRs for separate algorithms.
* [ ] All new Python files are placed inside an existing directory.
* [ ] All filenames are in all lowercase characters with no spaces or dashes.
* [ ] All functions and variable names follow Python naming conventions.
* [ ] All function parameters and return values are annotated with Python [type hints](https://docs.python.org/3/library/typing.html).
* [ ] All functions have [doctests](https://docs.python.org/3/library/doctest.html) that pass the automated testing.
* [ ] All new algorithms include at least one URL that points to Wikipedia or another similar explanation.
* [x] If this pull request resolves one or more open issues then the commit message contains `Fixes: #{$ISSUE_NO}`.
| -1 |
||
TheAlgorithms/Python | 8,551 | New gitter link added or replaced | ### Describe your change:
Fixes #8546
* [ ] Add an algorithm?
* [ ] Fix a bug or typo in an existing algorithm?
* [x] Documentation change?
### Checklist:
* [x] I have read [CONTRIBUTING.md](https://github.com/TheAlgorithms/Python/blob/master/CONTRIBUTING.md).
* [x] This pull request is all my own work -- I have not plagiarized.
* [x] I know that pull requests will not be merged if they fail the automated tests.
* [ ] This PR only changes one algorithm file. To ease review, please open separate PRs for separate algorithms.
* [ ] All new Python files are placed inside an existing directory.
* [ ] All filenames are in all lowercase characters with no spaces or dashes.
* [ ] All functions and variable names follow Python naming conventions.
* [ ] All function parameters and return values are annotated with Python [type hints](https://docs.python.org/3/library/typing.html).
* [ ] All functions have [doctests](https://docs.python.org/3/library/doctest.html) that pass the automated testing.
* [ ] All new algorithms include at least one URL that points to Wikipedia or another similar explanation.
* [x] If this pull request resolves one or more open issues then the commit message contains `Fixes: #{$ISSUE_NO}`.
| KaixLina | "2023-03-26T04:26:40Z" | "2023-03-26T15:19:19Z" | 3f9150c1b2dd15808a4962e03a1455f8d825512c | 7cdb011ba440a07768179bfaea190bddefc890d8 | New gitter link added or replaced. ### Describe your change:
Fixes #8546
* [ ] Add an algorithm?
* [ ] Fix a bug or typo in an existing algorithm?
* [x] Documentation change?
### Checklist:
* [x] I have read [CONTRIBUTING.md](https://github.com/TheAlgorithms/Python/blob/master/CONTRIBUTING.md).
* [x] This pull request is all my own work -- I have not plagiarized.
* [x] I know that pull requests will not be merged if they fail the automated tests.
* [ ] This PR only changes one algorithm file. To ease review, please open separate PRs for separate algorithms.
* [ ] All new Python files are placed inside an existing directory.
* [ ] All filenames are in all lowercase characters with no spaces or dashes.
* [ ] All functions and variable names follow Python naming conventions.
* [ ] All function parameters and return values are annotated with Python [type hints](https://docs.python.org/3/library/typing.html).
* [ ] All functions have [doctests](https://docs.python.org/3/library/doctest.html) that pass the automated testing.
* [ ] All new algorithms include at least one URL that points to Wikipedia or another similar explanation.
* [x] If this pull request resolves one or more open issues then the commit message contains `Fixes: #{$ISSUE_NO}`.
| #!/usr/bin/env python3
"""
This is pure Python implementation of binary search algorithms
For doctests run following command:
python3 -m doctest -v binary_search.py
For manual testing run:
python3 binary_search.py
"""
from __future__ import annotations
import bisect
def bisect_left(
sorted_collection: list[int], item: int, lo: int = 0, hi: int = -1
) -> int:
"""
Locates the first element in a sorted array that is larger or equal to a given
value.
It has the same interface as
https://docs.python.org/3/library/bisect.html#bisect.bisect_left .
:param sorted_collection: some ascending sorted collection with comparable items
:param item: item to bisect
:param lo: lowest index to consider (as in sorted_collection[lo:hi])
:param hi: past the highest index to consider (as in sorted_collection[lo:hi])
:return: index i such that all values in sorted_collection[lo:i] are < item and all
values in sorted_collection[i:hi] are >= item.
Examples:
>>> bisect_left([0, 5, 7, 10, 15], 0)
0
>>> bisect_left([0, 5, 7, 10, 15], 6)
2
>>> bisect_left([0, 5, 7, 10, 15], 20)
5
>>> bisect_left([0, 5, 7, 10, 15], 15, 1, 3)
3
>>> bisect_left([0, 5, 7, 10, 15], 6, 2)
2
"""
if hi < 0:
hi = len(sorted_collection)
while lo < hi:
mid = lo + (hi - lo) // 2
if sorted_collection[mid] < item:
lo = mid + 1
else:
hi = mid
return lo
def bisect_right(
sorted_collection: list[int], item: int, lo: int = 0, hi: int = -1
) -> int:
"""
Locates the first element in a sorted array that is larger than a given value.
It has the same interface as
https://docs.python.org/3/library/bisect.html#bisect.bisect_right .
:param sorted_collection: some ascending sorted collection with comparable items
:param item: item to bisect
:param lo: lowest index to consider (as in sorted_collection[lo:hi])
:param hi: past the highest index to consider (as in sorted_collection[lo:hi])
:return: index i such that all values in sorted_collection[lo:i] are <= item and
all values in sorted_collection[i:hi] are > item.
Examples:
>>> bisect_right([0, 5, 7, 10, 15], 0)
1
>>> bisect_right([0, 5, 7, 10, 15], 15)
5
>>> bisect_right([0, 5, 7, 10, 15], 6)
2
>>> bisect_right([0, 5, 7, 10, 15], 15, 1, 3)
3
>>> bisect_right([0, 5, 7, 10, 15], 6, 2)
2
"""
if hi < 0:
hi = len(sorted_collection)
while lo < hi:
mid = lo + (hi - lo) // 2
if sorted_collection[mid] <= item:
lo = mid + 1
else:
hi = mid
return lo
def insort_left(
sorted_collection: list[int], item: int, lo: int = 0, hi: int = -1
) -> None:
"""
Inserts a given value into a sorted array before other values with the same value.
It has the same interface as
https://docs.python.org/3/library/bisect.html#bisect.insort_left .
:param sorted_collection: some ascending sorted collection with comparable items
:param item: item to insert
:param lo: lowest index to consider (as in sorted_collection[lo:hi])
:param hi: past the highest index to consider (as in sorted_collection[lo:hi])
Examples:
>>> sorted_collection = [0, 5, 7, 10, 15]
>>> insort_left(sorted_collection, 6)
>>> sorted_collection
[0, 5, 6, 7, 10, 15]
>>> sorted_collection = [(0, 0), (5, 5), (7, 7), (10, 10), (15, 15)]
>>> item = (5, 5)
>>> insort_left(sorted_collection, item)
>>> sorted_collection
[(0, 0), (5, 5), (5, 5), (7, 7), (10, 10), (15, 15)]
>>> item is sorted_collection[1]
True
>>> item is sorted_collection[2]
False
>>> sorted_collection = [0, 5, 7, 10, 15]
>>> insort_left(sorted_collection, 20)
>>> sorted_collection
[0, 5, 7, 10, 15, 20]
>>> sorted_collection = [0, 5, 7, 10, 15]
>>> insort_left(sorted_collection, 15, 1, 3)
>>> sorted_collection
[0, 5, 7, 15, 10, 15]
"""
sorted_collection.insert(bisect_left(sorted_collection, item, lo, hi), item)
def insort_right(
sorted_collection: list[int], item: int, lo: int = 0, hi: int = -1
) -> None:
"""
Inserts a given value into a sorted array after other values with the same value.
It has the same interface as
https://docs.python.org/3/library/bisect.html#bisect.insort_right .
:param sorted_collection: some ascending sorted collection with comparable items
:param item: item to insert
:param lo: lowest index to consider (as in sorted_collection[lo:hi])
:param hi: past the highest index to consider (as in sorted_collection[lo:hi])
Examples:
>>> sorted_collection = [0, 5, 7, 10, 15]
>>> insort_right(sorted_collection, 6)
>>> sorted_collection
[0, 5, 6, 7, 10, 15]
>>> sorted_collection = [(0, 0), (5, 5), (7, 7), (10, 10), (15, 15)]
>>> item = (5, 5)
>>> insort_right(sorted_collection, item)
>>> sorted_collection
[(0, 0), (5, 5), (5, 5), (7, 7), (10, 10), (15, 15)]
>>> item is sorted_collection[1]
False
>>> item is sorted_collection[2]
True
>>> sorted_collection = [0, 5, 7, 10, 15]
>>> insort_right(sorted_collection, 20)
>>> sorted_collection
[0, 5, 7, 10, 15, 20]
>>> sorted_collection = [0, 5, 7, 10, 15]
>>> insort_right(sorted_collection, 15, 1, 3)
>>> sorted_collection
[0, 5, 7, 15, 10, 15]
"""
sorted_collection.insert(bisect_right(sorted_collection, item, lo, hi), item)
def binary_search(sorted_collection: list[int], item: int) -> int | None:
"""Pure implementation of binary search algorithm in Python
Be careful collection must be ascending sorted, otherwise result will be
unpredictable
:param sorted_collection: some ascending sorted collection with comparable items
:param item: item value to search
:return: index of found item or None if item is not found
Examples:
>>> binary_search([0, 5, 7, 10, 15], 0)
0
>>> binary_search([0, 5, 7, 10, 15], 15)
4
>>> binary_search([0, 5, 7, 10, 15], 5)
1
>>> binary_search([0, 5, 7, 10, 15], 6)
"""
left = 0
right = len(sorted_collection) - 1
while left <= right:
midpoint = left + (right - left) // 2
current_item = sorted_collection[midpoint]
if current_item == item:
return midpoint
elif item < current_item:
right = midpoint - 1
else:
left = midpoint + 1
return None
def binary_search_std_lib(sorted_collection: list[int], item: int) -> int | None:
"""Pure implementation of binary search algorithm in Python using stdlib
Be careful collection must be ascending sorted, otherwise result will be
unpredictable
:param sorted_collection: some ascending sorted collection with comparable items
:param item: item value to search
:return: index of found item or None if item is not found
Examples:
>>> binary_search_std_lib([0, 5, 7, 10, 15], 0)
0
>>> binary_search_std_lib([0, 5, 7, 10, 15], 15)
4
>>> binary_search_std_lib([0, 5, 7, 10, 15], 5)
1
>>> binary_search_std_lib([0, 5, 7, 10, 15], 6)
"""
index = bisect.bisect_left(sorted_collection, item)
if index != len(sorted_collection) and sorted_collection[index] == item:
return index
return None
def binary_search_by_recursion(
sorted_collection: list[int], item: int, left: int, right: int
) -> int | None:
"""Pure implementation of binary search algorithm in Python by recursion
Be careful collection must be ascending sorted, otherwise result will be
unpredictable
First recursion should be started with left=0 and right=(len(sorted_collection)-1)
:param sorted_collection: some ascending sorted collection with comparable items
:param item: item value to search
:return: index of found item or None if item is not found
Examples:
>>> binary_search_by_recursion([0, 5, 7, 10, 15], 0, 0, 4)
0
>>> binary_search_by_recursion([0, 5, 7, 10, 15], 15, 0, 4)
4
>>> binary_search_by_recursion([0, 5, 7, 10, 15], 5, 0, 4)
1
>>> binary_search_by_recursion([0, 5, 7, 10, 15], 6, 0, 4)
"""
if right < left:
return None
midpoint = left + (right - left) // 2
if sorted_collection[midpoint] == item:
return midpoint
elif sorted_collection[midpoint] > item:
return binary_search_by_recursion(sorted_collection, item, left, midpoint - 1)
else:
return binary_search_by_recursion(sorted_collection, item, midpoint + 1, right)
if __name__ == "__main__":
user_input = input("Enter numbers separated by comma:\n").strip()
collection = sorted(int(item) for item in user_input.split(","))
target = int(input("Enter a single number to be found in the list:\n"))
result = binary_search(collection, target)
if result is None:
print(f"{target} was not found in {collection}.")
else:
print(f"{target} was found at position {result} in {collection}.")
| #!/usr/bin/env python3
"""
This is pure Python implementation of binary search algorithms
For doctests run following command:
python3 -m doctest -v binary_search.py
For manual testing run:
python3 binary_search.py
"""
from __future__ import annotations
import bisect
def bisect_left(
sorted_collection: list[int], item: int, lo: int = 0, hi: int = -1
) -> int:
"""
Locates the first element in a sorted array that is larger or equal to a given
value.
It has the same interface as
https://docs.python.org/3/library/bisect.html#bisect.bisect_left .
:param sorted_collection: some ascending sorted collection with comparable items
:param item: item to bisect
:param lo: lowest index to consider (as in sorted_collection[lo:hi])
:param hi: past the highest index to consider (as in sorted_collection[lo:hi])
:return: index i such that all values in sorted_collection[lo:i] are < item and all
values in sorted_collection[i:hi] are >= item.
Examples:
>>> bisect_left([0, 5, 7, 10, 15], 0)
0
>>> bisect_left([0, 5, 7, 10, 15], 6)
2
>>> bisect_left([0, 5, 7, 10, 15], 20)
5
>>> bisect_left([0, 5, 7, 10, 15], 15, 1, 3)
3
>>> bisect_left([0, 5, 7, 10, 15], 6, 2)
2
"""
if hi < 0:
hi = len(sorted_collection)
while lo < hi:
mid = lo + (hi - lo) // 2
if sorted_collection[mid] < item:
lo = mid + 1
else:
hi = mid
return lo
def bisect_right(
sorted_collection: list[int], item: int, lo: int = 0, hi: int = -1
) -> int:
"""
Locates the first element in a sorted array that is larger than a given value.
It has the same interface as
https://docs.python.org/3/library/bisect.html#bisect.bisect_right .
:param sorted_collection: some ascending sorted collection with comparable items
:param item: item to bisect
:param lo: lowest index to consider (as in sorted_collection[lo:hi])
:param hi: past the highest index to consider (as in sorted_collection[lo:hi])
:return: index i such that all values in sorted_collection[lo:i] are <= item and
all values in sorted_collection[i:hi] are > item.
Examples:
>>> bisect_right([0, 5, 7, 10, 15], 0)
1
>>> bisect_right([0, 5, 7, 10, 15], 15)
5
>>> bisect_right([0, 5, 7, 10, 15], 6)
2
>>> bisect_right([0, 5, 7, 10, 15], 15, 1, 3)
3
>>> bisect_right([0, 5, 7, 10, 15], 6, 2)
2
"""
if hi < 0:
hi = len(sorted_collection)
while lo < hi:
mid = lo + (hi - lo) // 2
if sorted_collection[mid] <= item:
lo = mid + 1
else:
hi = mid
return lo
def insort_left(
sorted_collection: list[int], item: int, lo: int = 0, hi: int = -1
) -> None:
"""
Inserts a given value into a sorted array before other values with the same value.
It has the same interface as
https://docs.python.org/3/library/bisect.html#bisect.insort_left .
:param sorted_collection: some ascending sorted collection with comparable items
:param item: item to insert
:param lo: lowest index to consider (as in sorted_collection[lo:hi])
:param hi: past the highest index to consider (as in sorted_collection[lo:hi])
Examples:
>>> sorted_collection = [0, 5, 7, 10, 15]
>>> insort_left(sorted_collection, 6)
>>> sorted_collection
[0, 5, 6, 7, 10, 15]
>>> sorted_collection = [(0, 0), (5, 5), (7, 7), (10, 10), (15, 15)]
>>> item = (5, 5)
>>> insort_left(sorted_collection, item)
>>> sorted_collection
[(0, 0), (5, 5), (5, 5), (7, 7), (10, 10), (15, 15)]
>>> item is sorted_collection[1]
True
>>> item is sorted_collection[2]
False
>>> sorted_collection = [0, 5, 7, 10, 15]
>>> insort_left(sorted_collection, 20)
>>> sorted_collection
[0, 5, 7, 10, 15, 20]
>>> sorted_collection = [0, 5, 7, 10, 15]
>>> insort_left(sorted_collection, 15, 1, 3)
>>> sorted_collection
[0, 5, 7, 15, 10, 15]
"""
sorted_collection.insert(bisect_left(sorted_collection, item, lo, hi), item)
def insort_right(
sorted_collection: list[int], item: int, lo: int = 0, hi: int = -1
) -> None:
"""
Inserts a given value into a sorted array after other values with the same value.
It has the same interface as
https://docs.python.org/3/library/bisect.html#bisect.insort_right .
:param sorted_collection: some ascending sorted collection with comparable items
:param item: item to insert
:param lo: lowest index to consider (as in sorted_collection[lo:hi])
:param hi: past the highest index to consider (as in sorted_collection[lo:hi])
Examples:
>>> sorted_collection = [0, 5, 7, 10, 15]
>>> insort_right(sorted_collection, 6)
>>> sorted_collection
[0, 5, 6, 7, 10, 15]
>>> sorted_collection = [(0, 0), (5, 5), (7, 7), (10, 10), (15, 15)]
>>> item = (5, 5)
>>> insort_right(sorted_collection, item)
>>> sorted_collection
[(0, 0), (5, 5), (5, 5), (7, 7), (10, 10), (15, 15)]
>>> item is sorted_collection[1]
False
>>> item is sorted_collection[2]
True
>>> sorted_collection = [0, 5, 7, 10, 15]
>>> insort_right(sorted_collection, 20)
>>> sorted_collection
[0, 5, 7, 10, 15, 20]
>>> sorted_collection = [0, 5, 7, 10, 15]
>>> insort_right(sorted_collection, 15, 1, 3)
>>> sorted_collection
[0, 5, 7, 15, 10, 15]
"""
sorted_collection.insert(bisect_right(sorted_collection, item, lo, hi), item)
def binary_search(sorted_collection: list[int], item: int) -> int | None:
"""Pure implementation of binary search algorithm in Python
Be careful collection must be ascending sorted, otherwise result will be
unpredictable
:param sorted_collection: some ascending sorted collection with comparable items
:param item: item value to search
:return: index of found item or None if item is not found
Examples:
>>> binary_search([0, 5, 7, 10, 15], 0)
0
>>> binary_search([0, 5, 7, 10, 15], 15)
4
>>> binary_search([0, 5, 7, 10, 15], 5)
1
>>> binary_search([0, 5, 7, 10, 15], 6)
"""
left = 0
right = len(sorted_collection) - 1
while left <= right:
midpoint = left + (right - left) // 2
current_item = sorted_collection[midpoint]
if current_item == item:
return midpoint
elif item < current_item:
right = midpoint - 1
else:
left = midpoint + 1
return None
def binary_search_std_lib(sorted_collection: list[int], item: int) -> int | None:
"""Pure implementation of binary search algorithm in Python using stdlib
Be careful collection must be ascending sorted, otherwise result will be
unpredictable
:param sorted_collection: some ascending sorted collection with comparable items
:param item: item value to search
:return: index of found item or None if item is not found
Examples:
>>> binary_search_std_lib([0, 5, 7, 10, 15], 0)
0
>>> binary_search_std_lib([0, 5, 7, 10, 15], 15)
4
>>> binary_search_std_lib([0, 5, 7, 10, 15], 5)
1
>>> binary_search_std_lib([0, 5, 7, 10, 15], 6)
"""
index = bisect.bisect_left(sorted_collection, item)
if index != len(sorted_collection) and sorted_collection[index] == item:
return index
return None
def binary_search_by_recursion(
sorted_collection: list[int], item: int, left: int, right: int
) -> int | None:
"""Pure implementation of binary search algorithm in Python by recursion
Be careful collection must be ascending sorted, otherwise result will be
unpredictable
First recursion should be started with left=0 and right=(len(sorted_collection)-1)
:param sorted_collection: some ascending sorted collection with comparable items
:param item: item value to search
:return: index of found item or None if item is not found
Examples:
>>> binary_search_by_recursion([0, 5, 7, 10, 15], 0, 0, 4)
0
>>> binary_search_by_recursion([0, 5, 7, 10, 15], 15, 0, 4)
4
>>> binary_search_by_recursion([0, 5, 7, 10, 15], 5, 0, 4)
1
>>> binary_search_by_recursion([0, 5, 7, 10, 15], 6, 0, 4)
"""
if right < left:
return None
midpoint = left + (right - left) // 2
if sorted_collection[midpoint] == item:
return midpoint
elif sorted_collection[midpoint] > item:
return binary_search_by_recursion(sorted_collection, item, left, midpoint - 1)
else:
return binary_search_by_recursion(sorted_collection, item, midpoint + 1, right)
if __name__ == "__main__":
user_input = input("Enter numbers separated by comma:\n").strip()
collection = sorted(int(item) for item in user_input.split(","))
target = int(input("Enter a single number to be found in the list:\n"))
result = binary_search(collection, target)
if result is None:
print(f"{target} was not found in {collection}.")
else:
print(f"{target} was found at position {result} in {collection}.")
| -1 |
TheAlgorithms/Python | 8,551 | New gitter link added or replaced | ### Describe your change:
Fixes #8546
* [ ] Add an algorithm?
* [ ] Fix a bug or typo in an existing algorithm?
* [x] Documentation change?
### Checklist:
* [x] I have read [CONTRIBUTING.md](https://github.com/TheAlgorithms/Python/blob/master/CONTRIBUTING.md).
* [x] This pull request is all my own work -- I have not plagiarized.
* [x] I know that pull requests will not be merged if they fail the automated tests.
* [ ] This PR only changes one algorithm file. To ease review, please open separate PRs for separate algorithms.
* [ ] All new Python files are placed inside an existing directory.
* [ ] All filenames are in all lowercase characters with no spaces or dashes.
* [ ] All functions and variable names follow Python naming conventions.
* [ ] All function parameters and return values are annotated with Python [type hints](https://docs.python.org/3/library/typing.html).
* [ ] All functions have [doctests](https://docs.python.org/3/library/doctest.html) that pass the automated testing.
* [ ] All new algorithms include at least one URL that points to Wikipedia or another similar explanation.
* [x] If this pull request resolves one or more open issues then the commit message contains `Fixes: #{$ISSUE_NO}`.
| KaixLina | "2023-03-26T04:26:40Z" | "2023-03-26T15:19:19Z" | 3f9150c1b2dd15808a4962e03a1455f8d825512c | 7cdb011ba440a07768179bfaea190bddefc890d8 | New gitter link added or replaced. ### Describe your change:
Fixes #8546
* [ ] Add an algorithm?
* [ ] Fix a bug or typo in an existing algorithm?
* [x] Documentation change?
### Checklist:
* [x] I have read [CONTRIBUTING.md](https://github.com/TheAlgorithms/Python/blob/master/CONTRIBUTING.md).
* [x] This pull request is all my own work -- I have not plagiarized.
* [x] I know that pull requests will not be merged if they fail the automated tests.
* [ ] This PR only changes one algorithm file. To ease review, please open separate PRs for separate algorithms.
* [ ] All new Python files are placed inside an existing directory.
* [ ] All filenames are in all lowercase characters with no spaces or dashes.
* [ ] All functions and variable names follow Python naming conventions.
* [ ] All function parameters and return values are annotated with Python [type hints](https://docs.python.org/3/library/typing.html).
* [ ] All functions have [doctests](https://docs.python.org/3/library/doctest.html) that pass the automated testing.
* [ ] All new algorithms include at least one URL that points to Wikipedia or another similar explanation.
* [x] If this pull request resolves one or more open issues then the commit message contains `Fixes: #{$ISSUE_NO}`.
| # https://www.tutorialspoint.com/python3/bitwise_operators_example.htm
def binary_and(a: int, b: int) -> str:
"""
Take in 2 integers, convert them to binary,
return a binary number that is the
result of a binary and operation on the integers provided.
>>> binary_and(25, 32)
'0b000000'
>>> binary_and(37, 50)
'0b100000'
>>> binary_and(21, 30)
'0b10100'
>>> binary_and(58, 73)
'0b0001000'
>>> binary_and(0, 255)
'0b00000000'
>>> binary_and(256, 256)
'0b100000000'
>>> binary_and(0, -1)
Traceback (most recent call last):
...
ValueError: the value of both inputs must be positive
>>> binary_and(0, 1.1)
Traceback (most recent call last):
...
TypeError: 'float' object cannot be interpreted as an integer
>>> binary_and("0", "1")
Traceback (most recent call last):
...
TypeError: '<' not supported between instances of 'str' and 'int'
"""
if a < 0 or b < 0:
raise ValueError("the value of both inputs must be positive")
a_binary = str(bin(a))[2:] # remove the leading "0b"
b_binary = str(bin(b))[2:] # remove the leading "0b"
max_len = max(len(a_binary), len(b_binary))
return "0b" + "".join(
str(int(char_a == "1" and char_b == "1"))
for char_a, char_b in zip(a_binary.zfill(max_len), b_binary.zfill(max_len))
)
if __name__ == "__main__":
import doctest
doctest.testmod()
| # https://www.tutorialspoint.com/python3/bitwise_operators_example.htm
def binary_and(a: int, b: int) -> str:
"""
Take in 2 integers, convert them to binary,
return a binary number that is the
result of a binary and operation on the integers provided.
>>> binary_and(25, 32)
'0b000000'
>>> binary_and(37, 50)
'0b100000'
>>> binary_and(21, 30)
'0b10100'
>>> binary_and(58, 73)
'0b0001000'
>>> binary_and(0, 255)
'0b00000000'
>>> binary_and(256, 256)
'0b100000000'
>>> binary_and(0, -1)
Traceback (most recent call last):
...
ValueError: the value of both inputs must be positive
>>> binary_and(0, 1.1)
Traceback (most recent call last):
...
TypeError: 'float' object cannot be interpreted as an integer
>>> binary_and("0", "1")
Traceback (most recent call last):
...
TypeError: '<' not supported between instances of 'str' and 'int'
"""
if a < 0 or b < 0:
raise ValueError("the value of both inputs must be positive")
a_binary = str(bin(a))[2:] # remove the leading "0b"
b_binary = str(bin(b))[2:] # remove the leading "0b"
max_len = max(len(a_binary), len(b_binary))
return "0b" + "".join(
str(int(char_a == "1" and char_b == "1"))
for char_a, char_b in zip(a_binary.zfill(max_len), b_binary.zfill(max_len))
)
if __name__ == "__main__":
import doctest
doctest.testmod()
| -1 |
TheAlgorithms/Python | 8,551 | New gitter link added or replaced | ### Describe your change:
Fixes #8546
* [ ] Add an algorithm?
* [ ] Fix a bug or typo in an existing algorithm?
* [x] Documentation change?
### Checklist:
* [x] I have read [CONTRIBUTING.md](https://github.com/TheAlgorithms/Python/blob/master/CONTRIBUTING.md).
* [x] This pull request is all my own work -- I have not plagiarized.
* [x] I know that pull requests will not be merged if they fail the automated tests.
* [ ] This PR only changes one algorithm file. To ease review, please open separate PRs for separate algorithms.
* [ ] All new Python files are placed inside an existing directory.
* [ ] All filenames are in all lowercase characters with no spaces or dashes.
* [ ] All functions and variable names follow Python naming conventions.
* [ ] All function parameters and return values are annotated with Python [type hints](https://docs.python.org/3/library/typing.html).
* [ ] All functions have [doctests](https://docs.python.org/3/library/doctest.html) that pass the automated testing.
* [ ] All new algorithms include at least one URL that points to Wikipedia or another similar explanation.
* [x] If this pull request resolves one or more open issues then the commit message contains `Fixes: #{$ISSUE_NO}`.
| KaixLina | "2023-03-26T04:26:40Z" | "2023-03-26T15:19:19Z" | 3f9150c1b2dd15808a4962e03a1455f8d825512c | 7cdb011ba440a07768179bfaea190bddefc890d8 | New gitter link added or replaced. ### Describe your change:
Fixes #8546
* [ ] Add an algorithm?
* [ ] Fix a bug or typo in an existing algorithm?
* [x] Documentation change?
### Checklist:
* [x] I have read [CONTRIBUTING.md](https://github.com/TheAlgorithms/Python/blob/master/CONTRIBUTING.md).
* [x] This pull request is all my own work -- I have not plagiarized.
* [x] I know that pull requests will not be merged if they fail the automated tests.
* [ ] This PR only changes one algorithm file. To ease review, please open separate PRs for separate algorithms.
* [ ] All new Python files are placed inside an existing directory.
* [ ] All filenames are in all lowercase characters with no spaces or dashes.
* [ ] All functions and variable names follow Python naming conventions.
* [ ] All function parameters and return values are annotated with Python [type hints](https://docs.python.org/3/library/typing.html).
* [ ] All functions have [doctests](https://docs.python.org/3/library/doctest.html) that pass the automated testing.
* [ ] All new algorithms include at least one URL that points to Wikipedia or another similar explanation.
* [x] If this pull request resolves one or more open issues then the commit message contains `Fixes: #{$ISSUE_NO}`.
| def text_justification(word: str, max_width: int) -> list:
"""
Will format the string such that each line has exactly
(max_width) characters and is fully (left and right) justified,
and return the list of justified text.
example 1:
string = "This is an example of text justification."
max_width = 16
output = ['This is an',
'example of text',
'justification. ']
>>> text_justification("This is an example of text justification.", 16)
['This is an', 'example of text', 'justification. ']
example 2:
string = "Two roads diverged in a yellow wood"
max_width = 16
output = ['Two roads',
'diverged in a',
'yellow wood ']
>>> text_justification("Two roads diverged in a yellow wood", 16)
['Two roads', 'diverged in a', 'yellow wood ']
Time complexity: O(m*n)
Space complexity: O(m*n)
"""
# Converting string into list of strings split by a space
words = word.split()
def justify(line: list, width: int, max_width: int) -> str:
overall_spaces_count = max_width - width
words_count = len(line)
if len(line) == 1:
# if there is only word in line
# just insert overall_spaces_count for the remainder of line
return line[0] + " " * overall_spaces_count
else:
spaces_to_insert_between_words = words_count - 1
# num_spaces_between_words_list[i] : tells you to insert
# num_spaces_between_words_list[i] spaces
# after word on line[i]
num_spaces_between_words_list = spaces_to_insert_between_words * [
overall_spaces_count // spaces_to_insert_between_words
]
spaces_count_in_locations = (
overall_spaces_count % spaces_to_insert_between_words
)
# distribute spaces via round robin to the left words
for i in range(spaces_count_in_locations):
num_spaces_between_words_list[i] += 1
aligned_words_list = []
for i in range(spaces_to_insert_between_words):
# add the word
aligned_words_list.append(line[i])
# add the spaces to insert
aligned_words_list.append(num_spaces_between_words_list[i] * " ")
# just add the last word to the sentence
aligned_words_list.append(line[-1])
# join the aligned words list to form a justified line
return "".join(aligned_words_list)
answer = []
line: list[str] = []
width = 0
for word in words:
if width + len(word) + len(line) <= max_width:
# keep adding words until we can fill out max_width
# width = sum of length of all words (without overall_spaces_count)
# len(word) = length of current word
# len(line) = number of overall_spaces_count to insert between words
line.append(word)
width += len(word)
else:
# justify the line and add it to result
answer.append(justify(line, width, max_width))
# reset new line and new width
line, width = [word], len(word)
remaining_spaces = max_width - width - len(line)
answer.append(" ".join(line) + (remaining_spaces + 1) * " ")
return answer
if __name__ == "__main__":
from doctest import testmod
testmod()
| def text_justification(word: str, max_width: int) -> list:
"""
Will format the string such that each line has exactly
(max_width) characters and is fully (left and right) justified,
and return the list of justified text.
example 1:
string = "This is an example of text justification."
max_width = 16
output = ['This is an',
'example of text',
'justification. ']
>>> text_justification("This is an example of text justification.", 16)
['This is an', 'example of text', 'justification. ']
example 2:
string = "Two roads diverged in a yellow wood"
max_width = 16
output = ['Two roads',
'diverged in a',
'yellow wood ']
>>> text_justification("Two roads diverged in a yellow wood", 16)
['Two roads', 'diverged in a', 'yellow wood ']
Time complexity: O(m*n)
Space complexity: O(m*n)
"""
# Converting string into list of strings split by a space
words = word.split()
def justify(line: list, width: int, max_width: int) -> str:
overall_spaces_count = max_width - width
words_count = len(line)
if len(line) == 1:
# if there is only word in line
# just insert overall_spaces_count for the remainder of line
return line[0] + " " * overall_spaces_count
else:
spaces_to_insert_between_words = words_count - 1
# num_spaces_between_words_list[i] : tells you to insert
# num_spaces_between_words_list[i] spaces
# after word on line[i]
num_spaces_between_words_list = spaces_to_insert_between_words * [
overall_spaces_count // spaces_to_insert_between_words
]
spaces_count_in_locations = (
overall_spaces_count % spaces_to_insert_between_words
)
# distribute spaces via round robin to the left words
for i in range(spaces_count_in_locations):
num_spaces_between_words_list[i] += 1
aligned_words_list = []
for i in range(spaces_to_insert_between_words):
# add the word
aligned_words_list.append(line[i])
# add the spaces to insert
aligned_words_list.append(num_spaces_between_words_list[i] * " ")
# just add the last word to the sentence
aligned_words_list.append(line[-1])
# join the aligned words list to form a justified line
return "".join(aligned_words_list)
answer = []
line: list[str] = []
width = 0
for word in words:
if width + len(word) + len(line) <= max_width:
# keep adding words until we can fill out max_width
# width = sum of length of all words (without overall_spaces_count)
# len(word) = length of current word
# len(line) = number of overall_spaces_count to insert between words
line.append(word)
width += len(word)
else:
# justify the line and add it to result
answer.append(justify(line, width, max_width))
# reset new line and new width
line, width = [word], len(word)
remaining_spaces = max_width - width - len(line)
answer.append(" ".join(line) + (remaining_spaces + 1) * " ")
return answer
if __name__ == "__main__":
from doctest import testmod
testmod()
| -1 |
TheAlgorithms/Python | 8,551 | New gitter link added or replaced | ### Describe your change:
Fixes #8546
* [ ] Add an algorithm?
* [ ] Fix a bug or typo in an existing algorithm?
* [x] Documentation change?
### Checklist:
* [x] I have read [CONTRIBUTING.md](https://github.com/TheAlgorithms/Python/blob/master/CONTRIBUTING.md).
* [x] This pull request is all my own work -- I have not plagiarized.
* [x] I know that pull requests will not be merged if they fail the automated tests.
* [ ] This PR only changes one algorithm file. To ease review, please open separate PRs for separate algorithms.
* [ ] All new Python files are placed inside an existing directory.
* [ ] All filenames are in all lowercase characters with no spaces or dashes.
* [ ] All functions and variable names follow Python naming conventions.
* [ ] All function parameters and return values are annotated with Python [type hints](https://docs.python.org/3/library/typing.html).
* [ ] All functions have [doctests](https://docs.python.org/3/library/doctest.html) that pass the automated testing.
* [ ] All new algorithms include at least one URL that points to Wikipedia or another similar explanation.
* [x] If this pull request resolves one or more open issues then the commit message contains `Fixes: #{$ISSUE_NO}`.
| KaixLina | "2023-03-26T04:26:40Z" | "2023-03-26T15:19:19Z" | 3f9150c1b2dd15808a4962e03a1455f8d825512c | 7cdb011ba440a07768179bfaea190bddefc890d8 | New gitter link added or replaced. ### Describe your change:
Fixes #8546
* [ ] Add an algorithm?
* [ ] Fix a bug or typo in an existing algorithm?
* [x] Documentation change?
### Checklist:
* [x] I have read [CONTRIBUTING.md](https://github.com/TheAlgorithms/Python/blob/master/CONTRIBUTING.md).
* [x] This pull request is all my own work -- I have not plagiarized.
* [x] I know that pull requests will not be merged if they fail the automated tests.
* [ ] This PR only changes one algorithm file. To ease review, please open separate PRs for separate algorithms.
* [ ] All new Python files are placed inside an existing directory.
* [ ] All filenames are in all lowercase characters with no spaces or dashes.
* [ ] All functions and variable names follow Python naming conventions.
* [ ] All function parameters and return values are annotated with Python [type hints](https://docs.python.org/3/library/typing.html).
* [ ] All functions have [doctests](https://docs.python.org/3/library/doctest.html) that pass the automated testing.
* [ ] All new algorithms include at least one URL that points to Wikipedia or another similar explanation.
* [x] If this pull request resolves one or more open issues then the commit message contains `Fixes: #{$ISSUE_NO}`.
| from __future__ import annotations
class Node:
def __init__(self, data=None):
self.data = data
self.next = None
def __repr__(self):
"""Returns a visual representation of the node and all its following nodes."""
string_rep = []
temp = self
while temp:
string_rep.append(f"{temp.data}")
temp = temp.next
return "->".join(string_rep)
def make_linked_list(elements_list: list):
"""Creates a Linked List from the elements of the given sequence
(list/tuple) and returns the head of the Linked List.
>>> make_linked_list([])
Traceback (most recent call last):
...
Exception: The Elements List is empty
>>> make_linked_list([7])
7
>>> make_linked_list(['abc'])
abc
>>> make_linked_list([7, 25])
7->25
"""
if not elements_list:
raise Exception("The Elements List is empty")
current = head = Node(elements_list[0])
for i in range(1, len(elements_list)):
current.next = Node(elements_list[i])
current = current.next
return head
def print_reverse(head_node: Node) -> None:
"""Prints the elements of the given Linked List in reverse order
>>> print_reverse([])
>>> linked_list = make_linked_list([69, 88, 73])
>>> print_reverse(linked_list)
73
88
69
"""
if head_node is not None and isinstance(head_node, Node):
print_reverse(head_node.next)
print(head_node.data)
def main():
from doctest import testmod
testmod()
linked_list = make_linked_list([14, 52, 14, 12, 43])
print("Linked List:")
print(linked_list)
print("Elements in Reverse:")
print_reverse(linked_list)
if __name__ == "__main__":
main()
| from __future__ import annotations
class Node:
def __init__(self, data=None):
self.data = data
self.next = None
def __repr__(self):
"""Returns a visual representation of the node and all its following nodes."""
string_rep = []
temp = self
while temp:
string_rep.append(f"{temp.data}")
temp = temp.next
return "->".join(string_rep)
def make_linked_list(elements_list: list):
"""Creates a Linked List from the elements of the given sequence
(list/tuple) and returns the head of the Linked List.
>>> make_linked_list([])
Traceback (most recent call last):
...
Exception: The Elements List is empty
>>> make_linked_list([7])
7
>>> make_linked_list(['abc'])
abc
>>> make_linked_list([7, 25])
7->25
"""
if not elements_list:
raise Exception("The Elements List is empty")
current = head = Node(elements_list[0])
for i in range(1, len(elements_list)):
current.next = Node(elements_list[i])
current = current.next
return head
def print_reverse(head_node: Node) -> None:
"""Prints the elements of the given Linked List in reverse order
>>> print_reverse([])
>>> linked_list = make_linked_list([69, 88, 73])
>>> print_reverse(linked_list)
73
88
69
"""
if head_node is not None and isinstance(head_node, Node):
print_reverse(head_node.next)
print(head_node.data)
def main():
from doctest import testmod
testmod()
linked_list = make_linked_list([14, 52, 14, 12, 43])
print("Linked List:")
print(linked_list)
print("Elements in Reverse:")
print_reverse(linked_list)
if __name__ == "__main__":
main()
| -1 |
TheAlgorithms/Python | 8,551 | New gitter link added or replaced | ### Describe your change:
Fixes #8546
* [ ] Add an algorithm?
* [ ] Fix a bug or typo in an existing algorithm?
* [x] Documentation change?
### Checklist:
* [x] I have read [CONTRIBUTING.md](https://github.com/TheAlgorithms/Python/blob/master/CONTRIBUTING.md).
* [x] This pull request is all my own work -- I have not plagiarized.
* [x] I know that pull requests will not be merged if they fail the automated tests.
* [ ] This PR only changes one algorithm file. To ease review, please open separate PRs for separate algorithms.
* [ ] All new Python files are placed inside an existing directory.
* [ ] All filenames are in all lowercase characters with no spaces or dashes.
* [ ] All functions and variable names follow Python naming conventions.
* [ ] All function parameters and return values are annotated with Python [type hints](https://docs.python.org/3/library/typing.html).
* [ ] All functions have [doctests](https://docs.python.org/3/library/doctest.html) that pass the automated testing.
* [ ] All new algorithms include at least one URL that points to Wikipedia or another similar explanation.
* [x] If this pull request resolves one or more open issues then the commit message contains `Fixes: #{$ISSUE_NO}`.
| KaixLina | "2023-03-26T04:26:40Z" | "2023-03-26T15:19:19Z" | 3f9150c1b2dd15808a4962e03a1455f8d825512c | 7cdb011ba440a07768179bfaea190bddefc890d8 | New gitter link added or replaced. ### Describe your change:
Fixes #8546
* [ ] Add an algorithm?
* [ ] Fix a bug or typo in an existing algorithm?
* [x] Documentation change?
### Checklist:
* [x] I have read [CONTRIBUTING.md](https://github.com/TheAlgorithms/Python/blob/master/CONTRIBUTING.md).
* [x] This pull request is all my own work -- I have not plagiarized.
* [x] I know that pull requests will not be merged if they fail the automated tests.
* [ ] This PR only changes one algorithm file. To ease review, please open separate PRs for separate algorithms.
* [ ] All new Python files are placed inside an existing directory.
* [ ] All filenames are in all lowercase characters with no spaces or dashes.
* [ ] All functions and variable names follow Python naming conventions.
* [ ] All function parameters and return values are annotated with Python [type hints](https://docs.python.org/3/library/typing.html).
* [ ] All functions have [doctests](https://docs.python.org/3/library/doctest.html) that pass the automated testing.
* [ ] All new algorithms include at least one URL that points to Wikipedia or another similar explanation.
* [x] If this pull request resolves one or more open issues then the commit message contains `Fixes: #{$ISSUE_NO}`.
| -1 |
||
TheAlgorithms/Python | 8,551 | New gitter link added or replaced | ### Describe your change:
Fixes #8546
* [ ] Add an algorithm?
* [ ] Fix a bug or typo in an existing algorithm?
* [x] Documentation change?
### Checklist:
* [x] I have read [CONTRIBUTING.md](https://github.com/TheAlgorithms/Python/blob/master/CONTRIBUTING.md).
* [x] This pull request is all my own work -- I have not plagiarized.
* [x] I know that pull requests will not be merged if they fail the automated tests.
* [ ] This PR only changes one algorithm file. To ease review, please open separate PRs for separate algorithms.
* [ ] All new Python files are placed inside an existing directory.
* [ ] All filenames are in all lowercase characters with no spaces or dashes.
* [ ] All functions and variable names follow Python naming conventions.
* [ ] All function parameters and return values are annotated with Python [type hints](https://docs.python.org/3/library/typing.html).
* [ ] All functions have [doctests](https://docs.python.org/3/library/doctest.html) that pass the automated testing.
* [ ] All new algorithms include at least one URL that points to Wikipedia or another similar explanation.
* [x] If this pull request resolves one or more open issues then the commit message contains `Fixes: #{$ISSUE_NO}`.
| KaixLina | "2023-03-26T04:26:40Z" | "2023-03-26T15:19:19Z" | 3f9150c1b2dd15808a4962e03a1455f8d825512c | 7cdb011ba440a07768179bfaea190bddefc890d8 | New gitter link added or replaced. ### Describe your change:
Fixes #8546
* [ ] Add an algorithm?
* [ ] Fix a bug or typo in an existing algorithm?
* [x] Documentation change?
### Checklist:
* [x] I have read [CONTRIBUTING.md](https://github.com/TheAlgorithms/Python/blob/master/CONTRIBUTING.md).
* [x] This pull request is all my own work -- I have not plagiarized.
* [x] I know that pull requests will not be merged if they fail the automated tests.
* [ ] This PR only changes one algorithm file. To ease review, please open separate PRs for separate algorithms.
* [ ] All new Python files are placed inside an existing directory.
* [ ] All filenames are in all lowercase characters with no spaces or dashes.
* [ ] All functions and variable names follow Python naming conventions.
* [ ] All function parameters and return values are annotated with Python [type hints](https://docs.python.org/3/library/typing.html).
* [ ] All functions have [doctests](https://docs.python.org/3/library/doctest.html) that pass the automated testing.
* [ ] All new algorithms include at least one URL that points to Wikipedia or another similar explanation.
* [x] If this pull request resolves one or more open issues then the commit message contains `Fixes: #{$ISSUE_NO}`.
| -1 |
||
TheAlgorithms/Python | 8,551 | New gitter link added or replaced | ### Describe your change:
Fixes #8546
* [ ] Add an algorithm?
* [ ] Fix a bug or typo in an existing algorithm?
* [x] Documentation change?
### Checklist:
* [x] I have read [CONTRIBUTING.md](https://github.com/TheAlgorithms/Python/blob/master/CONTRIBUTING.md).
* [x] This pull request is all my own work -- I have not plagiarized.
* [x] I know that pull requests will not be merged if they fail the automated tests.
* [ ] This PR only changes one algorithm file. To ease review, please open separate PRs for separate algorithms.
* [ ] All new Python files are placed inside an existing directory.
* [ ] All filenames are in all lowercase characters with no spaces or dashes.
* [ ] All functions and variable names follow Python naming conventions.
* [ ] All function parameters and return values are annotated with Python [type hints](https://docs.python.org/3/library/typing.html).
* [ ] All functions have [doctests](https://docs.python.org/3/library/doctest.html) that pass the automated testing.
* [ ] All new algorithms include at least one URL that points to Wikipedia or another similar explanation.
* [x] If this pull request resolves one or more open issues then the commit message contains `Fixes: #{$ISSUE_NO}`.
| KaixLina | "2023-03-26T04:26:40Z" | "2023-03-26T15:19:19Z" | 3f9150c1b2dd15808a4962e03a1455f8d825512c | 7cdb011ba440a07768179bfaea190bddefc890d8 | New gitter link added or replaced. ### Describe your change:
Fixes #8546
* [ ] Add an algorithm?
* [ ] Fix a bug or typo in an existing algorithm?
* [x] Documentation change?
### Checklist:
* [x] I have read [CONTRIBUTING.md](https://github.com/TheAlgorithms/Python/blob/master/CONTRIBUTING.md).
* [x] This pull request is all my own work -- I have not plagiarized.
* [x] I know that pull requests will not be merged if they fail the automated tests.
* [ ] This PR only changes one algorithm file. To ease review, please open separate PRs for separate algorithms.
* [ ] All new Python files are placed inside an existing directory.
* [ ] All filenames are in all lowercase characters with no spaces or dashes.
* [ ] All functions and variable names follow Python naming conventions.
* [ ] All function parameters and return values are annotated with Python [type hints](https://docs.python.org/3/library/typing.html).
* [ ] All functions have [doctests](https://docs.python.org/3/library/doctest.html) that pass the automated testing.
* [ ] All new algorithms include at least one URL that points to Wikipedia or another similar explanation.
* [x] If this pull request resolves one or more open issues then the commit message contains `Fixes: #{$ISSUE_NO}`.
| blank_issues_enabled: false
contact_links:
- name: Discord community
url: https://discord.gg/c7MnfGFGa6
about: Have any questions or need any help? Please contact us via Discord
| blank_issues_enabled: false
contact_links:
- name: Discord community
url: https://discord.gg/c7MnfGFGa6
about: Have any questions or need any help? Please contact us via Discord
| -1 |
TheAlgorithms/Python | 8,551 | New gitter link added or replaced | ### Describe your change:
Fixes #8546
* [ ] Add an algorithm?
* [ ] Fix a bug or typo in an existing algorithm?
* [x] Documentation change?
### Checklist:
* [x] I have read [CONTRIBUTING.md](https://github.com/TheAlgorithms/Python/blob/master/CONTRIBUTING.md).
* [x] This pull request is all my own work -- I have not plagiarized.
* [x] I know that pull requests will not be merged if they fail the automated tests.
* [ ] This PR only changes one algorithm file. To ease review, please open separate PRs for separate algorithms.
* [ ] All new Python files are placed inside an existing directory.
* [ ] All filenames are in all lowercase characters with no spaces or dashes.
* [ ] All functions and variable names follow Python naming conventions.
* [ ] All function parameters and return values are annotated with Python [type hints](https://docs.python.org/3/library/typing.html).
* [ ] All functions have [doctests](https://docs.python.org/3/library/doctest.html) that pass the automated testing.
* [ ] All new algorithms include at least one URL that points to Wikipedia or another similar explanation.
* [x] If this pull request resolves one or more open issues then the commit message contains `Fixes: #{$ISSUE_NO}`.
| KaixLina | "2023-03-26T04:26:40Z" | "2023-03-26T15:19:19Z" | 3f9150c1b2dd15808a4962e03a1455f8d825512c | 7cdb011ba440a07768179bfaea190bddefc890d8 | New gitter link added or replaced. ### Describe your change:
Fixes #8546
* [ ] Add an algorithm?
* [ ] Fix a bug or typo in an existing algorithm?
* [x] Documentation change?
### Checklist:
* [x] I have read [CONTRIBUTING.md](https://github.com/TheAlgorithms/Python/blob/master/CONTRIBUTING.md).
* [x] This pull request is all my own work -- I have not plagiarized.
* [x] I know that pull requests will not be merged if they fail the automated tests.
* [ ] This PR only changes one algorithm file. To ease review, please open separate PRs for separate algorithms.
* [ ] All new Python files are placed inside an existing directory.
* [ ] All filenames are in all lowercase characters with no spaces or dashes.
* [ ] All functions and variable names follow Python naming conventions.
* [ ] All function parameters and return values are annotated with Python [type hints](https://docs.python.org/3/library/typing.html).
* [ ] All functions have [doctests](https://docs.python.org/3/library/doctest.html) that pass the automated testing.
* [ ] All new algorithms include at least one URL that points to Wikipedia or another similar explanation.
* [x] If this pull request resolves one or more open issues then the commit message contains `Fixes: #{$ISSUE_NO}`.
| # To get an insight into Greedy Algorithm through the Knapsack problem
"""
A shopkeeper has bags of wheat that each have different weights and different profits.
eg.
profit 5 8 7 1 12 3 4
weight 2 7 1 6 4 2 5
max_weight 100
Constraints:
max_weight > 0
profit[i] >= 0
weight[i] >= 0
Calculate the maximum profit that the shopkeeper can make given maxmum weight that can
be carried.
"""
def calc_profit(profit: list, weight: list, max_weight: int) -> int:
"""
Function description is as follows-
:param profit: Take a list of profits
:param weight: Take a list of weight if bags corresponding to the profits
:param max_weight: Maximum weight that could be carried
:return: Maximum expected gain
>>> calc_profit([1, 2, 3], [3, 4, 5], 15)
6
>>> calc_profit([10, 9 , 8], [3 ,4 , 5], 25)
27
"""
if len(profit) != len(weight):
raise ValueError("The length of profit and weight must be same.")
if max_weight <= 0:
raise ValueError("max_weight must greater than zero.")
if any(p < 0 for p in profit):
raise ValueError("Profit can not be negative.")
if any(w < 0 for w in weight):
raise ValueError("Weight can not be negative.")
# List created to store profit gained for the 1kg in case of each weight
# respectively. Calculate and append profit/weight for each element.
profit_by_weight = [p / w for p, w in zip(profit, weight)]
# Creating a copy of the list and sorting profit/weight in ascending order
sorted_profit_by_weight = sorted(profit_by_weight)
# declaring useful variables
length = len(sorted_profit_by_weight)
limit = 0
gain = 0
i = 0
# loop till the total weight do not reach max limit e.g. 15 kg and till i<length
while limit <= max_weight and i < length:
# flag value for encountered greatest element in sorted_profit_by_weight
biggest_profit_by_weight = sorted_profit_by_weight[length - i - 1]
"""
Calculate the index of the biggest_profit_by_weight in profit_by_weight list.
This will give the index of the first encountered element which is same as of
biggest_profit_by_weight. There may be one or more values same as that of
biggest_profit_by_weight but index always encounter the very first element
only. To curb this alter the values in profit_by_weight once they are used
here it is done to -1 because neither profit nor weight can be in negative.
"""
index = profit_by_weight.index(biggest_profit_by_weight)
profit_by_weight[index] = -1
# check if the weight encountered is less than the total weight
# encountered before.
if max_weight - limit >= weight[index]:
limit += weight[index]
# Adding profit gained for the given weight 1 ===
# weight[index]/weight[index]
gain += 1 * profit[index]
else:
# Since the weight encountered is greater than limit, therefore take the
# required number of remaining kgs and calculate profit for it.
# weight remaining / weight[index]
gain += (max_weight - limit) / weight[index] * profit[index]
break
i += 1
return gain
if __name__ == "__main__":
print(
"Input profits, weights, and then max_weight (all positive ints) separated by "
"spaces."
)
profit = [int(x) for x in input("Input profits separated by spaces: ").split()]
weight = [int(x) for x in input("Input weights separated by spaces: ").split()]
max_weight = int(input("Max weight allowed: "))
# Function Call
calc_profit(profit, weight, max_weight)
| # To get an insight into Greedy Algorithm through the Knapsack problem
"""
A shopkeeper has bags of wheat that each have different weights and different profits.
eg.
profit 5 8 7 1 12 3 4
weight 2 7 1 6 4 2 5
max_weight 100
Constraints:
max_weight > 0
profit[i] >= 0
weight[i] >= 0
Calculate the maximum profit that the shopkeeper can make given maxmum weight that can
be carried.
"""
def calc_profit(profit: list, weight: list, max_weight: int) -> int:
"""
Function description is as follows-
:param profit: Take a list of profits
:param weight: Take a list of weight if bags corresponding to the profits
:param max_weight: Maximum weight that could be carried
:return: Maximum expected gain
>>> calc_profit([1, 2, 3], [3, 4, 5], 15)
6
>>> calc_profit([10, 9 , 8], [3 ,4 , 5], 25)
27
"""
if len(profit) != len(weight):
raise ValueError("The length of profit and weight must be same.")
if max_weight <= 0:
raise ValueError("max_weight must greater than zero.")
if any(p < 0 for p in profit):
raise ValueError("Profit can not be negative.")
if any(w < 0 for w in weight):
raise ValueError("Weight can not be negative.")
# List created to store profit gained for the 1kg in case of each weight
# respectively. Calculate and append profit/weight for each element.
profit_by_weight = [p / w for p, w in zip(profit, weight)]
# Creating a copy of the list and sorting profit/weight in ascending order
sorted_profit_by_weight = sorted(profit_by_weight)
# declaring useful variables
length = len(sorted_profit_by_weight)
limit = 0
gain = 0
i = 0
# loop till the total weight do not reach max limit e.g. 15 kg and till i<length
while limit <= max_weight and i < length:
# flag value for encountered greatest element in sorted_profit_by_weight
biggest_profit_by_weight = sorted_profit_by_weight[length - i - 1]
"""
Calculate the index of the biggest_profit_by_weight in profit_by_weight list.
This will give the index of the first encountered element which is same as of
biggest_profit_by_weight. There may be one or more values same as that of
biggest_profit_by_weight but index always encounter the very first element
only. To curb this alter the values in profit_by_weight once they are used
here it is done to -1 because neither profit nor weight can be in negative.
"""
index = profit_by_weight.index(biggest_profit_by_weight)
profit_by_weight[index] = -1
# check if the weight encountered is less than the total weight
# encountered before.
if max_weight - limit >= weight[index]:
limit += weight[index]
# Adding profit gained for the given weight 1 ===
# weight[index]/weight[index]
gain += 1 * profit[index]
else:
# Since the weight encountered is greater than limit, therefore take the
# required number of remaining kgs and calculate profit for it.
# weight remaining / weight[index]
gain += (max_weight - limit) / weight[index] * profit[index]
break
i += 1
return gain
if __name__ == "__main__":
print(
"Input profits, weights, and then max_weight (all positive ints) separated by "
"spaces."
)
profit = [int(x) for x in input("Input profits separated by spaces: ").split()]
weight = [int(x) for x in input("Input weights separated by spaces: ").split()]
max_weight = int(input("Max weight allowed: "))
# Function Call
calc_profit(profit, weight, max_weight)
| -1 |
TheAlgorithms/Python | 8,546 | Fix broken links to Gitter community | ### Describe your change:
The link <https://gitter.im/TheAlgorithms> as used in the `README.md` and `CONTRIBUTING.md` files is invalid and returns the error `404 Not Found`.
I fixed the link to the working version <https://gitter.im/TheAlgorithms/community>.
This PR fixes issue #8197
* [ ] Add an algorithm?
* [ ] Fix a bug or typo in an existing algorithm?
* [x] Documentation change?
### Checklist:
* [x] I have read [CONTRIBUTING.md](https://github.com/TheAlgorithms/Python/blob/master/CONTRIBUTING.md).
* [x] This pull request is all my own work -- I have not plagiarized.
* [x] I know that pull requests will not be merged if they fail the automated tests.
* [ ] This PR only changes one algorithm file. To ease review, please open separate PRs for separate algorithms.
* [ ] All new Python files are placed inside an existing directory.
* [ ] All filenames are in all lowercase characters with no spaces or dashes.
* [ ] All functions and variable names follow Python naming conventions.
* [ ] All function parameters and return values are annotated with Python [type hints](https://docs.python.org/3/library/typing.html).
* [ ] All functions have [doctests](https://docs.python.org/3/library/doctest.html) that pass the automated testing.
* [ ] All new algorithms include at least one URL that points to Wikipedia or another similar explanation.
* [x] If this pull request resolves one or more open issues then the commit message contains `Fixes: #{$ISSUE_NO}`.
| ChVeen | "2023-03-25T21:52:30Z" | "2023-03-26T16:20:47Z" | 7cdb011ba440a07768179bfaea190bddefc890d8 | 86b2ab09aab359ef1b4bea58ed3c1fdf5b989500 | Fix broken links to Gitter community. ### Describe your change:
The link <https://gitter.im/TheAlgorithms> as used in the `README.md` and `CONTRIBUTING.md` files is invalid and returns the error `404 Not Found`.
I fixed the link to the working version <https://gitter.im/TheAlgorithms/community>.
This PR fixes issue #8197
* [ ] Add an algorithm?
* [ ] Fix a bug or typo in an existing algorithm?
* [x] Documentation change?
### Checklist:
* [x] I have read [CONTRIBUTING.md](https://github.com/TheAlgorithms/Python/blob/master/CONTRIBUTING.md).
* [x] This pull request is all my own work -- I have not plagiarized.
* [x] I know that pull requests will not be merged if they fail the automated tests.
* [ ] This PR only changes one algorithm file. To ease review, please open separate PRs for separate algorithms.
* [ ] All new Python files are placed inside an existing directory.
* [ ] All filenames are in all lowercase characters with no spaces or dashes.
* [ ] All functions and variable names follow Python naming conventions.
* [ ] All function parameters and return values are annotated with Python [type hints](https://docs.python.org/3/library/typing.html).
* [ ] All functions have [doctests](https://docs.python.org/3/library/doctest.html) that pass the automated testing.
* [ ] All new algorithms include at least one URL that points to Wikipedia or another similar explanation.
* [x] If this pull request resolves one or more open issues then the commit message contains `Fixes: #{$ISSUE_NO}`.
| # Configuration for probot-stale - https://github.com/probot/stale
# Number of days of inactivity before an Issue or Pull Request becomes stale
daysUntilStale: 30
# Number of days of inactivity before an Issue or Pull Request with the stale label is closed.
# Set to false to disable. If disabled, issues still need to be closed manually, but will remain marked as stale.
daysUntilClose: 7
# Only issues or pull requests with all of these labels are check if stale. Defaults to `[]` (disabled)
onlyLabels: []
# Issues or Pull Requests with these labels will never be considered stale. Set to `[]` to disable
exemptLabels:
- "Status: on hold"
# Set to true to ignore issues in a project (defaults to false)
exemptProjects: false
# Set to true to ignore issues in a milestone (defaults to false)
exemptMilestones: false
# Set to true to ignore issues with an assignee (defaults to false)
exemptAssignees: false
# Label to use when marking as stale
staleLabel: stale
# Limit the number of actions per hour, from 1-30. Default is 30
limitPerRun: 5
# Comment to post when removing the stale label.
# unmarkComment: >
# Your comment here.
# Optionally, specify configuration settings that are specific to just 'issues' or 'pulls':
pulls:
# Comment to post when marking as stale. Set to `false` to disable
markComment: >
This pull request has been automatically marked as stale because it has not had
recent activity. It will be closed if no further activity occurs. Thank you
for your contributions.
# Comment to post when closing a stale Pull Request.
closeComment: >
Please reopen this pull request once you commit the changes requested
or make improvements on the code. If this is not the case and you need
some help, feel free to seek help from our [Gitter](https://app.gitter.im/#/room/#TheAlgorithms_community:gitter.im)
or ping one of the reviewers. Thank you for your contributions!
issues:
# Comment to post when marking as stale. Set to `false` to disable
markComment: >
This issue has been automatically marked as stale because it has not had
recent activity. It will be closed if no further activity occurs. Thank you
for your contributions.
# Comment to post when closing a stale Issue.
closeComment: >
Please reopen this issue once you add more information and updates here.
If this is not the case and you need some help, feel free to seek help
from our [Gitter](https://app.gitter.im/#/room/#TheAlgorithms_community:gitter.im) or ping one of the
reviewers. Thank you for your contributions!
| # Configuration for probot-stale - https://github.com/probot/stale
# Number of days of inactivity before an Issue or Pull Request becomes stale
daysUntilStale: 30
# Number of days of inactivity before an Issue or Pull Request with the stale label is closed.
# Set to false to disable. If disabled, issues still need to be closed manually, but will remain marked as stale.
daysUntilClose: 7
# Only issues or pull requests with all of these labels are check if stale. Defaults to `[]` (disabled)
onlyLabels: []
# Issues or Pull Requests with these labels will never be considered stale. Set to `[]` to disable
exemptLabels:
- "Status: on hold"
# Set to true to ignore issues in a project (defaults to false)
exemptProjects: false
# Set to true to ignore issues in a milestone (defaults to false)
exemptMilestones: false
# Set to true to ignore issues with an assignee (defaults to false)
exemptAssignees: false
# Label to use when marking as stale
staleLabel: stale
# Limit the number of actions per hour, from 1-30. Default is 30
limitPerRun: 5
# Comment to post when removing the stale label.
# unmarkComment: >
# Your comment here.
# Optionally, specify configuration settings that are specific to just 'issues' or 'pulls':
pulls:
# Comment to post when marking as stale. Set to `false` to disable
markComment: >
This pull request has been automatically marked as stale because it has not had
recent activity. It will be closed if no further activity occurs. Thank you
for your contributions.
# Comment to post when closing a stale Pull Request.
closeComment: >
Please reopen this pull request once you commit the changes requested
or make improvements on the code. If this is not the case and you need
some help, feel free to seek help from our [Gitter](https://gitter.im/TheAlgorithms/community)
or ping one of the reviewers. Thank you for your contributions!
issues:
# Comment to post when marking as stale. Set to `false` to disable
markComment: >
This issue has been automatically marked as stale because it has not had
recent activity. It will be closed if no further activity occurs. Thank you
for your contributions.
# Comment to post when closing a stale Issue.
closeComment: >
Please reopen this issue once you add more information and updates here.
If this is not the case and you need some help, feel free to seek help
from our [Gitter](https://gitter.im/TheAlgorithms/community) or ping one of the
reviewers. Thank you for your contributions!
| 1 |
TheAlgorithms/Python | 8,546 | Fix broken links to Gitter community | ### Describe your change:
The link <https://gitter.im/TheAlgorithms> as used in the `README.md` and `CONTRIBUTING.md` files is invalid and returns the error `404 Not Found`.
I fixed the link to the working version <https://gitter.im/TheAlgorithms/community>.
This PR fixes issue #8197
* [ ] Add an algorithm?
* [ ] Fix a bug or typo in an existing algorithm?
* [x] Documentation change?
### Checklist:
* [x] I have read [CONTRIBUTING.md](https://github.com/TheAlgorithms/Python/blob/master/CONTRIBUTING.md).
* [x] This pull request is all my own work -- I have not plagiarized.
* [x] I know that pull requests will not be merged if they fail the automated tests.
* [ ] This PR only changes one algorithm file. To ease review, please open separate PRs for separate algorithms.
* [ ] All new Python files are placed inside an existing directory.
* [ ] All filenames are in all lowercase characters with no spaces or dashes.
* [ ] All functions and variable names follow Python naming conventions.
* [ ] All function parameters and return values are annotated with Python [type hints](https://docs.python.org/3/library/typing.html).
* [ ] All functions have [doctests](https://docs.python.org/3/library/doctest.html) that pass the automated testing.
* [ ] All new algorithms include at least one URL that points to Wikipedia or another similar explanation.
* [x] If this pull request resolves one or more open issues then the commit message contains `Fixes: #{$ISSUE_NO}`.
| ChVeen | "2023-03-25T21:52:30Z" | "2023-03-26T16:20:47Z" | 7cdb011ba440a07768179bfaea190bddefc890d8 | 86b2ab09aab359ef1b4bea58ed3c1fdf5b989500 | Fix broken links to Gitter community. ### Describe your change:
The link <https://gitter.im/TheAlgorithms> as used in the `README.md` and `CONTRIBUTING.md` files is invalid and returns the error `404 Not Found`.
I fixed the link to the working version <https://gitter.im/TheAlgorithms/community>.
This PR fixes issue #8197
* [ ] Add an algorithm?
* [ ] Fix a bug or typo in an existing algorithm?
* [x] Documentation change?
### Checklist:
* [x] I have read [CONTRIBUTING.md](https://github.com/TheAlgorithms/Python/blob/master/CONTRIBUTING.md).
* [x] This pull request is all my own work -- I have not plagiarized.
* [x] I know that pull requests will not be merged if they fail the automated tests.
* [ ] This PR only changes one algorithm file. To ease review, please open separate PRs for separate algorithms.
* [ ] All new Python files are placed inside an existing directory.
* [ ] All filenames are in all lowercase characters with no spaces or dashes.
* [ ] All functions and variable names follow Python naming conventions.
* [ ] All function parameters and return values are annotated with Python [type hints](https://docs.python.org/3/library/typing.html).
* [ ] All functions have [doctests](https://docs.python.org/3/library/doctest.html) that pass the automated testing.
* [ ] All new algorithms include at least one URL that points to Wikipedia or another similar explanation.
* [x] If this pull request resolves one or more open issues then the commit message contains `Fixes: #{$ISSUE_NO}`.
| # Contributing guidelines
## Before contributing
Welcome to [TheAlgorithms/Python](https://github.com/TheAlgorithms/Python)! Before sending your pull requests, make sure that you __read the whole guidelines__. If you have any doubt on the contributing guide, please feel free to [state it clearly in an issue](https://github.com/TheAlgorithms/Python/issues/new) or ask the community in [Gitter](https://app.gitter.im/#/room/#TheAlgorithms_community:gitter.im).
## Contributing
### Contributor
We are very happy that you are considering implementing algorithms and data structures for others! This repository is referenced and used by learners from all over the globe. Being one of our contributors, you agree and confirm that:
- You did your work - no plagiarism allowed
- Any plagiarized work will not be merged.
- Your work will be distributed under [MIT License](LICENSE.md) once your pull request is merged
- Your submitted work fulfils or mostly fulfils our styles and standards
__New implementation__ is welcome! For example, new solutions for a problem, different representations for a graph data structure or algorithm designs with different complexity but __identical implementation__ of an existing implementation is not allowed. Please check whether the solution is already implemented or not before submitting your pull request.
__Improving comments__ and __writing proper tests__ are also highly welcome.
### Contribution
We appreciate any contribution, from fixing a grammar mistake in a comment to implementing complex algorithms. Please read this section if you are contributing your work.
Your contribution will be tested by our [automated testing on GitHub Actions](https://github.com/TheAlgorithms/Python/actions) to save time and mental energy. After you have submitted your pull request, you should see the GitHub Actions tests start to run at the bottom of your submission page. If those tests fail, then click on the ___details___ button try to read through the GitHub Actions output to understand the failure. If you do not understand, please leave a comment on your submission page and a community member will try to help.
Please help us keep our issue list small by adding fixes: #{$ISSUE_NO} to the commit message of pull requests that resolve open issues. GitHub will use this tag to auto-close the issue when the PR is merged.
#### What is an Algorithm?
An Algorithm is one or more functions (or classes) that:
* take one or more inputs,
* perform some internal calculations or data manipulations,
* return one or more outputs,
* have minimal side effects (Ex. `print()`, `plot()`, `read()`, `write()`).
Algorithms should be packaged in a way that would make it easy for readers to put them into larger programs.
Algorithms should:
* have intuitive class and function names that make their purpose clear to readers
* use Python naming conventions and intuitive variable names to ease comprehension
* be flexible to take different input values
* have Python type hints for their input parameters and return values
* raise Python exceptions (`ValueError`, etc.) on erroneous input values
* have docstrings with clear explanations and/or URLs to source materials
* contain doctests that test both valid and erroneous input values
* return all calculation results instead of printing or plotting them
Algorithms in this repo should not be how-to examples for existing Python packages. Instead, they should perform internal calculations or manipulations to convert input values into different output values. Those calculations or manipulations can use data types, classes, or functions of existing Python packages but each algorithm in this repo should add unique value.
#### Pre-commit plugin
Use [pre-commit](https://pre-commit.com/#installation) to automatically format your code to match our coding style:
```bash
python3 -m pip install pre-commit # only required the first time
pre-commit install
```
That's it! The plugin will run every time you commit any changes. If there are any errors found during the run, fix them and commit those changes. You can even run the plugin manually on all files:
```bash
pre-commit run --all-files --show-diff-on-failure
```
#### Coding Style
We want your work to be readable by others; therefore, we encourage you to note the following:
- Please write in Python 3.11+. For instance: `print()` is a function in Python 3 so `print "Hello"` will *not* work but `print("Hello")` will.
- Please focus hard on the naming of functions, classes, and variables. Help your reader by using __descriptive names__ that can help you to remove redundant comments.
- Single letter variable names are *old school* so please avoid them unless their life only spans a few lines.
- Expand acronyms because `gcd()` is hard to understand but `greatest_common_divisor()` is not.
- Please follow the [Python Naming Conventions](https://pep8.org/#prescriptive-naming-conventions) so variable_names and function_names should be lower_case, CONSTANTS in UPPERCASE, ClassNames should be CamelCase, etc.
- We encourage the use of Python [f-strings](https://realpython.com/python-f-strings/#f-strings-a-new-and-improved-way-to-format-strings-in-python) where they make the code easier to read.
- Please consider running [__psf/black__](https://github.com/python/black) on your Python file(s) before submitting your pull request. This is not yet a requirement but it does make your code more readable and automatically aligns it with much of [PEP 8](https://www.python.org/dev/peps/pep-0008/). There are other code formatters (autopep8, yapf) but the __black__ formatter is now hosted by the Python Software Foundation. To use it,
```bash
python3 -m pip install black # only required the first time
black .
```
- All submissions will need to pass the test `ruff .` before they will be accepted so if possible, try this test locally on your Python file(s) before submitting your pull request.
```bash
python3 -m pip install ruff # only required the first time
ruff .
```
- Original code submission require docstrings or comments to describe your work.
- More on docstrings and comments:
If you used a Wikipedia article or some other source material to create your algorithm, please add the URL in a docstring or comment to help your reader.
The following are considered to be bad and may be requested to be improved:
```python
x = x + 2 # increased by 2
```
This is too trivial. Comments are expected to be explanatory. For comments, you can write them above, on or below a line of code, as long as you are consistent within the same piece of code.
We encourage you to put docstrings inside your functions but please pay attention to the indentation of docstrings. The following is a good example:
```python
def sum_ab(a, b):
"""
Return the sum of two integers a and b.
"""
return a + b
```
- Write tests (especially [__doctests__](https://docs.python.org/3/library/doctest.html)) to illustrate and verify your work. We highly encourage the use of _doctests on all functions_.
```python
def sum_ab(a, b):
"""
Return the sum of two integers a and b
>>> sum_ab(2, 2)
4
>>> sum_ab(-2, 3)
1
>>> sum_ab(4.9, 5.1)
10.0
"""
return a + b
```
These doctests will be run by pytest as part of our automated testing so please try to run your doctests locally and make sure that they are found and pass:
```bash
python3 -m doctest -v my_submission.py
```
The use of the Python builtin `input()` function is __not__ encouraged:
```python
input('Enter your input:')
# Or even worse...
input = eval(input("Enter your input: "))
```
However, if your code uses `input()` then we encourage you to gracefully deal with leading and trailing whitespace in user input by adding `.strip()` as in:
```python
starting_value = int(input("Please enter a starting value: ").strip())
```
The use of [Python type hints](https://docs.python.org/3/library/typing.html) is encouraged for function parameters and return values. Our automated testing will run [mypy](http://mypy-lang.org) so run that locally before making your submission.
```python
def sum_ab(a: int, b: int) -> int:
return a + b
```
Instructions on how to install mypy can be found [here](https://github.com/python/mypy). Please use the command `mypy --ignore-missing-imports .` to test all files or `mypy --ignore-missing-imports path/to/file.py` to test a specific file.
- [__List comprehensions and generators__](https://docs.python.org/3/tutorial/datastructures.html#list-comprehensions) are preferred over the use of `lambda`, `map`, `filter`, `reduce` but the important thing is to demonstrate the power of Python in code that is easy to read and maintain.
- Avoid importing external libraries for basic algorithms. Only use those libraries for complicated algorithms.
- If you need a third-party module that is not in the file __requirements.txt__, please add it to that file as part of your submission.
#### Other Requirements for Submissions
- If you are submitting code in the `project_euler/` directory, please also read [the dedicated Guideline](https://github.com/TheAlgorithms/Python/blob/master/project_euler/README.md) before contributing to our Project Euler library.
- The file extension for code files should be `.py`. Jupyter Notebooks should be submitted to [TheAlgorithms/Jupyter](https://github.com/TheAlgorithms/Jupyter).
- Strictly use snake_case (underscore_separated) in your file_name, as it will be easy to parse in future using scripts.
- Please avoid creating new directories if at all possible. Try to fit your work into the existing directory structure.
- If possible, follow the standard *within* the folder you are submitting to.
- If you have modified/added code work, make sure the code compiles before submitting.
- If you have modified/added documentation work, ensure your language is concise and contains no grammar errors.
- Do not update the README.md or DIRECTORY.md file which will be periodically autogenerated by our GitHub Actions processes.
- Add a corresponding explanation to [Algorithms-Explanation](https://github.com/TheAlgorithms/Algorithms-Explanation) (Optional but recommended).
- All submissions will be tested with [__mypy__](http://www.mypy-lang.org) so we encourage you to add [__Python type hints__](https://docs.python.org/3/library/typing.html) where it makes sense to do so.
- Most importantly,
- __Be consistent in the use of these guidelines when submitting.__
- __Join__ us on [Discord](https://discord.com/invite/c7MnfGFGa6) and [Gitter](https://app.gitter.im/#/room/#TheAlgorithms_community:gitter.im) __now!__
- Happy coding!
Writer [@poyea](https://github.com/poyea), Jun 2019.
| # Contributing guidelines
## Before contributing
Welcome to [TheAlgorithms/Python](https://github.com/TheAlgorithms/Python)! Before sending your pull requests, make sure that you __read the whole guidelines__. If you have any doubt on the contributing guide, please feel free to [state it clearly in an issue](https://github.com/TheAlgorithms/Python/issues/new) or ask the community in [Gitter](https://gitter.im/TheAlgorithms/community).
## Contributing
### Contributor
We are very happy that you are considering implementing algorithms and data structures for others! This repository is referenced and used by learners from all over the globe. Being one of our contributors, you agree and confirm that:
- You did your work - no plagiarism allowed
- Any plagiarized work will not be merged.
- Your work will be distributed under [MIT License](LICENSE.md) once your pull request is merged
- Your submitted work fulfils or mostly fulfils our styles and standards
__New implementation__ is welcome! For example, new solutions for a problem, different representations for a graph data structure or algorithm designs with different complexity but __identical implementation__ of an existing implementation is not allowed. Please check whether the solution is already implemented or not before submitting your pull request.
__Improving comments__ and __writing proper tests__ are also highly welcome.
### Contribution
We appreciate any contribution, from fixing a grammar mistake in a comment to implementing complex algorithms. Please read this section if you are contributing your work.
Your contribution will be tested by our [automated testing on GitHub Actions](https://github.com/TheAlgorithms/Python/actions) to save time and mental energy. After you have submitted your pull request, you should see the GitHub Actions tests start to run at the bottom of your submission page. If those tests fail, then click on the ___details___ button try to read through the GitHub Actions output to understand the failure. If you do not understand, please leave a comment on your submission page and a community member will try to help.
Please help us keep our issue list small by adding fixes: #{$ISSUE_NO} to the commit message of pull requests that resolve open issues. GitHub will use this tag to auto-close the issue when the PR is merged.
#### What is an Algorithm?
An Algorithm is one or more functions (or classes) that:
* take one or more inputs,
* perform some internal calculations or data manipulations,
* return one or more outputs,
* have minimal side effects (Ex. `print()`, `plot()`, `read()`, `write()`).
Algorithms should be packaged in a way that would make it easy for readers to put them into larger programs.
Algorithms should:
* have intuitive class and function names that make their purpose clear to readers
* use Python naming conventions and intuitive variable names to ease comprehension
* be flexible to take different input values
* have Python type hints for their input parameters and return values
* raise Python exceptions (`ValueError`, etc.) on erroneous input values
* have docstrings with clear explanations and/or URLs to source materials
* contain doctests that test both valid and erroneous input values
* return all calculation results instead of printing or plotting them
Algorithms in this repo should not be how-to examples for existing Python packages. Instead, they should perform internal calculations or manipulations to convert input values into different output values. Those calculations or manipulations can use data types, classes, or functions of existing Python packages but each algorithm in this repo should add unique value.
#### Pre-commit plugin
Use [pre-commit](https://pre-commit.com/#installation) to automatically format your code to match our coding style:
```bash
python3 -m pip install pre-commit # only required the first time
pre-commit install
```
That's it! The plugin will run every time you commit any changes. If there are any errors found during the run, fix them and commit those changes. You can even run the plugin manually on all files:
```bash
pre-commit run --all-files --show-diff-on-failure
```
#### Coding Style
We want your work to be readable by others; therefore, we encourage you to note the following:
- Please write in Python 3.11+. For instance: `print()` is a function in Python 3 so `print "Hello"` will *not* work but `print("Hello")` will.
- Please focus hard on the naming of functions, classes, and variables. Help your reader by using __descriptive names__ that can help you to remove redundant comments.
- Single letter variable names are *old school* so please avoid them unless their life only spans a few lines.
- Expand acronyms because `gcd()` is hard to understand but `greatest_common_divisor()` is not.
- Please follow the [Python Naming Conventions](https://pep8.org/#prescriptive-naming-conventions) so variable_names and function_names should be lower_case, CONSTANTS in UPPERCASE, ClassNames should be CamelCase, etc.
- We encourage the use of Python [f-strings](https://realpython.com/python-f-strings/#f-strings-a-new-and-improved-way-to-format-strings-in-python) where they make the code easier to read.
- Please consider running [__psf/black__](https://github.com/python/black) on your Python file(s) before submitting your pull request. This is not yet a requirement but it does make your code more readable and automatically aligns it with much of [PEP 8](https://www.python.org/dev/peps/pep-0008/). There are other code formatters (autopep8, yapf) but the __black__ formatter is now hosted by the Python Software Foundation. To use it,
```bash
python3 -m pip install black # only required the first time
black .
```
- All submissions will need to pass the test `ruff .` before they will be accepted so if possible, try this test locally on your Python file(s) before submitting your pull request.
```bash
python3 -m pip install ruff # only required the first time
ruff .
```
- Original code submission require docstrings or comments to describe your work.
- More on docstrings and comments:
If you used a Wikipedia article or some other source material to create your algorithm, please add the URL in a docstring or comment to help your reader.
The following are considered to be bad and may be requested to be improved:
```python
x = x + 2 # increased by 2
```
This is too trivial. Comments are expected to be explanatory. For comments, you can write them above, on or below a line of code, as long as you are consistent within the same piece of code.
We encourage you to put docstrings inside your functions but please pay attention to the indentation of docstrings. The following is a good example:
```python
def sum_ab(a, b):
"""
Return the sum of two integers a and b.
"""
return a + b
```
- Write tests (especially [__doctests__](https://docs.python.org/3/library/doctest.html)) to illustrate and verify your work. We highly encourage the use of _doctests on all functions_.
```python
def sum_ab(a, b):
"""
Return the sum of two integers a and b
>>> sum_ab(2, 2)
4
>>> sum_ab(-2, 3)
1
>>> sum_ab(4.9, 5.1)
10.0
"""
return a + b
```
These doctests will be run by pytest as part of our automated testing so please try to run your doctests locally and make sure that they are found and pass:
```bash
python3 -m doctest -v my_submission.py
```
The use of the Python builtin `input()` function is __not__ encouraged:
```python
input('Enter your input:')
# Or even worse...
input = eval(input("Enter your input: "))
```
However, if your code uses `input()` then we encourage you to gracefully deal with leading and trailing whitespace in user input by adding `.strip()` as in:
```python
starting_value = int(input("Please enter a starting value: ").strip())
```
The use of [Python type hints](https://docs.python.org/3/library/typing.html) is encouraged for function parameters and return values. Our automated testing will run [mypy](http://mypy-lang.org) so run that locally before making your submission.
```python
def sum_ab(a: int, b: int) -> int:
return a + b
```
Instructions on how to install mypy can be found [here](https://github.com/python/mypy). Please use the command `mypy --ignore-missing-imports .` to test all files or `mypy --ignore-missing-imports path/to/file.py` to test a specific file.
- [__List comprehensions and generators__](https://docs.python.org/3/tutorial/datastructures.html#list-comprehensions) are preferred over the use of `lambda`, `map`, `filter`, `reduce` but the important thing is to demonstrate the power of Python in code that is easy to read and maintain.
- Avoid importing external libraries for basic algorithms. Only use those libraries for complicated algorithms.
- If you need a third-party module that is not in the file __requirements.txt__, please add it to that file as part of your submission.
#### Other Requirements for Submissions
- If you are submitting code in the `project_euler/` directory, please also read [the dedicated Guideline](https://github.com/TheAlgorithms/Python/blob/master/project_euler/README.md) before contributing to our Project Euler library.
- The file extension for code files should be `.py`. Jupyter Notebooks should be submitted to [TheAlgorithms/Jupyter](https://github.com/TheAlgorithms/Jupyter).
- Strictly use snake_case (underscore_separated) in your file_name, as it will be easy to parse in future using scripts.
- Please avoid creating new directories if at all possible. Try to fit your work into the existing directory structure.
- If possible, follow the standard *within* the folder you are submitting to.
- If you have modified/added code work, make sure the code compiles before submitting.
- If you have modified/added documentation work, ensure your language is concise and contains no grammar errors.
- Do not update the README.md or DIRECTORY.md file which will be periodically autogenerated by our GitHub Actions processes.
- Add a corresponding explanation to [Algorithms-Explanation](https://github.com/TheAlgorithms/Algorithms-Explanation) (Optional but recommended).
- All submissions will be tested with [__mypy__](http://www.mypy-lang.org) so we encourage you to add [__Python type hints__](https://docs.python.org/3/library/typing.html) where it makes sense to do so.
- Most importantly,
- __Be consistent in the use of these guidelines when submitting.__
- __Join__ us on [Discord](https://discord.com/invite/c7MnfGFGa6) and [Gitter](https://gitter.im/TheAlgorithms/community) __now!__
- Happy coding!
Writer [@poyea](https://github.com/poyea), Jun 2019.
| 1 |
TheAlgorithms/Python | 8,546 | Fix broken links to Gitter community | ### Describe your change:
The link <https://gitter.im/TheAlgorithms> as used in the `README.md` and `CONTRIBUTING.md` files is invalid and returns the error `404 Not Found`.
I fixed the link to the working version <https://gitter.im/TheAlgorithms/community>.
This PR fixes issue #8197
* [ ] Add an algorithm?
* [ ] Fix a bug or typo in an existing algorithm?
* [x] Documentation change?
### Checklist:
* [x] I have read [CONTRIBUTING.md](https://github.com/TheAlgorithms/Python/blob/master/CONTRIBUTING.md).
* [x] This pull request is all my own work -- I have not plagiarized.
* [x] I know that pull requests will not be merged if they fail the automated tests.
* [ ] This PR only changes one algorithm file. To ease review, please open separate PRs for separate algorithms.
* [ ] All new Python files are placed inside an existing directory.
* [ ] All filenames are in all lowercase characters with no spaces or dashes.
* [ ] All functions and variable names follow Python naming conventions.
* [ ] All function parameters and return values are annotated with Python [type hints](https://docs.python.org/3/library/typing.html).
* [ ] All functions have [doctests](https://docs.python.org/3/library/doctest.html) that pass the automated testing.
* [ ] All new algorithms include at least one URL that points to Wikipedia or another similar explanation.
* [x] If this pull request resolves one or more open issues then the commit message contains `Fixes: #{$ISSUE_NO}`.
| ChVeen | "2023-03-25T21:52:30Z" | "2023-03-26T16:20:47Z" | 7cdb011ba440a07768179bfaea190bddefc890d8 | 86b2ab09aab359ef1b4bea58ed3c1fdf5b989500 | Fix broken links to Gitter community. ### Describe your change:
The link <https://gitter.im/TheAlgorithms> as used in the `README.md` and `CONTRIBUTING.md` files is invalid and returns the error `404 Not Found`.
I fixed the link to the working version <https://gitter.im/TheAlgorithms/community>.
This PR fixes issue #8197
* [ ] Add an algorithm?
* [ ] Fix a bug or typo in an existing algorithm?
* [x] Documentation change?
### Checklist:
* [x] I have read [CONTRIBUTING.md](https://github.com/TheAlgorithms/Python/blob/master/CONTRIBUTING.md).
* [x] This pull request is all my own work -- I have not plagiarized.
* [x] I know that pull requests will not be merged if they fail the automated tests.
* [ ] This PR only changes one algorithm file. To ease review, please open separate PRs for separate algorithms.
* [ ] All new Python files are placed inside an existing directory.
* [ ] All filenames are in all lowercase characters with no spaces or dashes.
* [ ] All functions and variable names follow Python naming conventions.
* [ ] All function parameters and return values are annotated with Python [type hints](https://docs.python.org/3/library/typing.html).
* [ ] All functions have [doctests](https://docs.python.org/3/library/doctest.html) that pass the automated testing.
* [ ] All new algorithms include at least one URL that points to Wikipedia or another similar explanation.
* [x] If this pull request resolves one or more open issues then the commit message contains `Fixes: #{$ISSUE_NO}`.
| <div align="center">
<!-- Title: -->
<a href="https://github.com/TheAlgorithms/">
<img src="https://raw.githubusercontent.com/TheAlgorithms/website/1cd824df116b27029f17c2d1b42d81731f28a920/public/logo.svg" height="100">
</a>
<h1><a href="https://github.com/TheAlgorithms/">The Algorithms</a> - Python</h1>
<!-- Labels: -->
<!-- First row: -->
<a href="https://gitpod.io/#https://github.com/TheAlgorithms/Python">
<img src="https://img.shields.io/badge/Gitpod-Ready--to--Code-blue?logo=gitpod&style=flat-square" height="20" alt="Gitpod Ready-to-Code">
</a>
<a href="https://github.com/TheAlgorithms/Python/blob/master/CONTRIBUTING.md">
<img src="https://img.shields.io/static/v1.svg?label=Contributions&message=Welcome&color=0059b3&style=flat-square" height="20" alt="Contributions Welcome">
</a>
<img src="https://img.shields.io/github/repo-size/TheAlgorithms/Python.svg?label=Repo%20size&style=flat-square" height="20">
<a href="https://discord.gg/c7MnfGFGa6">
<img src="https://img.shields.io/discord/808045925556682782.svg?logo=discord&colorB=7289DA&style=flat-square" height="20" alt="Discord chat">
</a>
<a href="https://app.gitter.im/#/room/#TheAlgorithms_community:gitter.im">
<img src="https://img.shields.io/badge/Chat-Gitter-ff69b4.svg?label=Chat&logo=gitter&style=flat-square" height="20" alt="Gitter chat">
</a>
<!-- Second row: -->
<br>
<a href="https://github.com/TheAlgorithms/Python/actions">
<img src="https://img.shields.io/github/actions/workflow/status/TheAlgorithms/Python/build.yml?branch=master&label=CI&logo=github&style=flat-square" height="20" alt="GitHub Workflow Status">
</a>
<a href="https://github.com/pre-commit/pre-commit">
<img src="https://img.shields.io/badge/pre--commit-enabled-brightgreen?logo=pre-commit&logoColor=white&style=flat-square" height="20" alt="pre-commit">
</a>
<a href="https://github.com/psf/black">
<img src="https://img.shields.io/static/v1?label=code%20style&message=black&color=black&style=flat-square" height="20" alt="code style: black">
</a>
<!-- Short description: -->
<h3>All algorithms implemented in Python - for education</h3>
</div>
Implementations are for learning purposes only. They may be less efficient than the implementations in the Python standard library. Use them at your discretion.
## Getting Started
Read through our [Contribution Guidelines](CONTRIBUTING.md) before you contribute.
## Community Channels
We are on [Discord](https://discord.gg/c7MnfGFGa6) and [Gitter](https://app.gitter.im/#/room/#TheAlgorithms_community:gitter.im)! Community channels are a great way for you to ask questions and get help. Please join us!
## List of Algorithms
See our [directory](DIRECTORY.md) for easier navigation and a better overview of the project.
| <div align="center">
<!-- Title: -->
<a href="https://github.com/TheAlgorithms/">
<img src="https://raw.githubusercontent.com/TheAlgorithms/website/1cd824df116b27029f17c2d1b42d81731f28a920/public/logo.svg" height="100">
</a>
<h1><a href="https://github.com/TheAlgorithms/">The Algorithms</a> - Python</h1>
<!-- Labels: -->
<!-- First row: -->
<a href="https://gitpod.io/#https://github.com/TheAlgorithms/Python">
<img src="https://img.shields.io/badge/Gitpod-Ready--to--Code-blue?logo=gitpod&style=flat-square" height="20" alt="Gitpod Ready-to-Code">
</a>
<a href="https://github.com/TheAlgorithms/Python/blob/master/CONTRIBUTING.md">
<img src="https://img.shields.io/static/v1.svg?label=Contributions&message=Welcome&color=0059b3&style=flat-square" height="20" alt="Contributions Welcome">
</a>
<img src="https://img.shields.io/github/repo-size/TheAlgorithms/Python.svg?label=Repo%20size&style=flat-square" height="20">
<a href="https://discord.gg/c7MnfGFGa6">
<img src="https://img.shields.io/discord/808045925556682782.svg?logo=discord&colorB=7289DA&style=flat-square" height="20" alt="Discord chat">
</a>
<a href="https://gitter.im/TheAlgorithms/community">
<img src="https://img.shields.io/badge/Chat-Gitter-ff69b4.svg?label=Chat&logo=gitter&style=flat-square" height="20" alt="Gitter chat">
</a>
<!-- Second row: -->
<br>
<a href="https://github.com/TheAlgorithms/Python/actions">
<img src="https://img.shields.io/github/actions/workflow/status/TheAlgorithms/Python/build.yml?branch=master&label=CI&logo=github&style=flat-square" height="20" alt="GitHub Workflow Status">
</a>
<a href="https://github.com/pre-commit/pre-commit">
<img src="https://img.shields.io/badge/pre--commit-enabled-brightgreen?logo=pre-commit&logoColor=white&style=flat-square" height="20" alt="pre-commit">
</a>
<a href="https://github.com/psf/black">
<img src="https://img.shields.io/static/v1?label=code%20style&message=black&color=black&style=flat-square" height="20" alt="code style: black">
</a>
<!-- Short description: -->
<h3>All algorithms implemented in Python - for education</h3>
</div>
Implementations are for learning purposes only. They may be less efficient than the implementations in the Python standard library. Use them at your discretion.
## Getting Started
Read through our [Contribution Guidelines](CONTRIBUTING.md) before you contribute.
## Community Channels
We are on [Discord](https://discord.gg/c7MnfGFGa6) and [Gitter](https://gitter.im/TheAlgorithms/community)! Community channels are a great way for you to ask questions and get help. Please join us!
## List of Algorithms
See our [directory](DIRECTORY.md) for easier navigation and a better overview of the project.
| 1 |
TheAlgorithms/Python | 8,546 | Fix broken links to Gitter community | ### Describe your change:
The link <https://gitter.im/TheAlgorithms> as used in the `README.md` and `CONTRIBUTING.md` files is invalid and returns the error `404 Not Found`.
I fixed the link to the working version <https://gitter.im/TheAlgorithms/community>.
This PR fixes issue #8197
* [ ] Add an algorithm?
* [ ] Fix a bug or typo in an existing algorithm?
* [x] Documentation change?
### Checklist:
* [x] I have read [CONTRIBUTING.md](https://github.com/TheAlgorithms/Python/blob/master/CONTRIBUTING.md).
* [x] This pull request is all my own work -- I have not plagiarized.
* [x] I know that pull requests will not be merged if they fail the automated tests.
* [ ] This PR only changes one algorithm file. To ease review, please open separate PRs for separate algorithms.
* [ ] All new Python files are placed inside an existing directory.
* [ ] All filenames are in all lowercase characters with no spaces or dashes.
* [ ] All functions and variable names follow Python naming conventions.
* [ ] All function parameters and return values are annotated with Python [type hints](https://docs.python.org/3/library/typing.html).
* [ ] All functions have [doctests](https://docs.python.org/3/library/doctest.html) that pass the automated testing.
* [ ] All new algorithms include at least one URL that points to Wikipedia or another similar explanation.
* [x] If this pull request resolves one or more open issues then the commit message contains `Fixes: #{$ISSUE_NO}`.
| ChVeen | "2023-03-25T21:52:30Z" | "2023-03-26T16:20:47Z" | 7cdb011ba440a07768179bfaea190bddefc890d8 | 86b2ab09aab359ef1b4bea58ed3c1fdf5b989500 | Fix broken links to Gitter community. ### Describe your change:
The link <https://gitter.im/TheAlgorithms> as used in the `README.md` and `CONTRIBUTING.md` files is invalid and returns the error `404 Not Found`.
I fixed the link to the working version <https://gitter.im/TheAlgorithms/community>.
This PR fixes issue #8197
* [ ] Add an algorithm?
* [ ] Fix a bug or typo in an existing algorithm?
* [x] Documentation change?
### Checklist:
* [x] I have read [CONTRIBUTING.md](https://github.com/TheAlgorithms/Python/blob/master/CONTRIBUTING.md).
* [x] This pull request is all my own work -- I have not plagiarized.
* [x] I know that pull requests will not be merged if they fail the automated tests.
* [ ] This PR only changes one algorithm file. To ease review, please open separate PRs for separate algorithms.
* [ ] All new Python files are placed inside an existing directory.
* [ ] All filenames are in all lowercase characters with no spaces or dashes.
* [ ] All functions and variable names follow Python naming conventions.
* [ ] All function parameters and return values are annotated with Python [type hints](https://docs.python.org/3/library/typing.html).
* [ ] All functions have [doctests](https://docs.python.org/3/library/doctest.html) that pass the automated testing.
* [ ] All new algorithms include at least one URL that points to Wikipedia or another similar explanation.
* [x] If this pull request resolves one or more open issues then the commit message contains `Fixes: #{$ISSUE_NO}`.
| # Project Euler
Problems are taken from https://projecteuler.net/, the Project Euler. [Problems are licensed under CC BY-NC-SA 4.0](https://projecteuler.net/copyright).
Project Euler is a series of challenging mathematical/computer programming problems that require more than just mathematical
insights to solve. Project Euler is ideal for mathematicians who are learning to code.
The solutions will be checked by our [automated testing on GitHub Actions](https://github.com/TheAlgorithms/Python/actions) with the help of [this script](https://github.com/TheAlgorithms/Python/blob/master/scripts/validate_solutions.py). The efficiency of your code is also checked. You can view the top 10 slowest solutions on GitHub Actions logs (under `slowest 10 durations`) and open a pull request to improve those solutions.
## Solution Guidelines
Welcome to [TheAlgorithms/Python](https://github.com/TheAlgorithms/Python)! Before reading the solution guidelines, make sure you read the whole [Contributing Guidelines](https://github.com/TheAlgorithms/Python/blob/master/CONTRIBUTING.md) as it won't be repeated in here. If you have any doubt on the guidelines, please feel free to [state it clearly in an issue](https://github.com/TheAlgorithms/Python/issues/new) or ask the community in [Gitter](https://app.gitter.im/#/room/#TheAlgorithms_community:gitter.im). You can use the [template](https://github.com/TheAlgorithms/Python/blob/master/project_euler/README.md#solution-template) we have provided below as your starting point but be sure to read the [Coding Style](https://github.com/TheAlgorithms/Python/blob/master/project_euler/README.md#coding-style) part first.
### Coding Style
* Please maintain consistency in project directory and solution file names. Keep the following points in mind:
* Create a new directory only for the problems which do not exist yet.
* If you create a new directory, please create an empty `__init__.py` file inside it as well.
* Please name the project **directory** as `problem_<problem_number>` where `problem_number` should be filled with 0s so as to occupy 3 digits. Example: `problem_001`, `problem_002`, `problem_067`, `problem_145`, and so on.
* Please provide a link to the problem and other references, if used, in the **module-level docstring**.
* All imports should come ***after*** the module-level docstring.
* You can have as many helper functions as you want but there should be one main function called `solution` which should satisfy the conditions as stated below:
* It should contain positional argument(s) whose default value is the question input. Example: Please take a look at [Problem 1](https://projecteuler.net/problem=1) where the question is to *Find the sum of all the multiples of 3 or 5 below 1000.* In this case the main solution function will be `solution(limit: int = 1000)`.
* When the `solution` function is called without any arguments like so: `solution()`, it should return the answer to the problem.
* Every function, which includes all the helper functions, if any, and the main solution function, should have `doctest` in the function docstring along with a brief statement mentioning what the function is about.
* There should not be a `doctest` for testing the answer as that is done by our GitHub Actions build using this [script](https://github.com/TheAlgorithms/Python/blob/master/scripts/validate_solutions.py). Keeping in mind the above example of [Problem 1](https://projecteuler.net/problem=1):
```python
def solution(limit: int = 1000):
"""
A brief statement mentioning what the function is about.
You can have a detailed explanation about the solution method in the
module-level docstring.
>>> solution(1)
...
>>> solution(16)
...
>>> solution(100)
...
"""
```
### Solution Template
You can use the below template as your starting point but please read the [Coding Style](https://github.com/TheAlgorithms/Python/blob/master/project_euler/README.md#coding-style) first to understand how the template works.
Please change the name of the helper functions accordingly, change the parameter names with a descriptive one, replace the content within `[square brackets]` (including the brackets) with the appropriate content.
```python
"""
Project Euler Problem [problem number]: [link to the original problem]
... [Entire problem statement] ...
... [Solution explanation - Optional] ...
References [Optional]:
- [Wikipedia link to the topic]
- [Stackoverflow link]
...
"""
import module1
import module2
...
def helper1(arg1: [type hint], arg2: [type hint], ...) -> [Return type hint]:
"""
A brief statement explaining what the function is about.
... A more elaborate description ... [Optional]
...
[Doctest]
...
"""
...
# calculations
...
return
# You can have multiple helper functions but the solution function should be
# after all the helper functions ...
def solution(arg1: [type hint], arg2: [type hint], ...) -> [Return type hint]:
"""
A brief statement mentioning what the function is about.
You can have a detailed explanation about the solution in the
module-level docstring.
...
[Doctest as mentioned above]
...
"""
...
# calculations
...
return answer
if __name__ == "__main__":
print(f"{solution() = }")
```
| # Project Euler
Problems are taken from https://projecteuler.net/, the Project Euler. [Problems are licensed under CC BY-NC-SA 4.0](https://projecteuler.net/copyright).
Project Euler is a series of challenging mathematical/computer programming problems that require more than just mathematical
insights to solve. Project Euler is ideal for mathematicians who are learning to code.
The solutions will be checked by our [automated testing on GitHub Actions](https://github.com/TheAlgorithms/Python/actions) with the help of [this script](https://github.com/TheAlgorithms/Python/blob/master/scripts/validate_solutions.py). The efficiency of your code is also checked. You can view the top 10 slowest solutions on GitHub Actions logs (under `slowest 10 durations`) and open a pull request to improve those solutions.
## Solution Guidelines
Welcome to [TheAlgorithms/Python](https://github.com/TheAlgorithms/Python)! Before reading the solution guidelines, make sure you read the whole [Contributing Guidelines](https://github.com/TheAlgorithms/Python/blob/master/CONTRIBUTING.md) as it won't be repeated in here. If you have any doubt on the guidelines, please feel free to [state it clearly in an issue](https://github.com/TheAlgorithms/Python/issues/new) or ask the community in [Gitter](https://gitter.im/TheAlgorithms/community). You can use the [template](https://github.com/TheAlgorithms/Python/blob/master/project_euler/README.md#solution-template) we have provided below as your starting point but be sure to read the [Coding Style](https://github.com/TheAlgorithms/Python/blob/master/project_euler/README.md#coding-style) part first.
### Coding Style
* Please maintain consistency in project directory and solution file names. Keep the following points in mind:
* Create a new directory only for the problems which do not exist yet.
* If you create a new directory, please create an empty `__init__.py` file inside it as well.
* Please name the project **directory** as `problem_<problem_number>` where `problem_number` should be filled with 0s so as to occupy 3 digits. Example: `problem_001`, `problem_002`, `problem_067`, `problem_145`, and so on.
* Please provide a link to the problem and other references, if used, in the **module-level docstring**.
* All imports should come ***after*** the module-level docstring.
* You can have as many helper functions as you want but there should be one main function called `solution` which should satisfy the conditions as stated below:
* It should contain positional argument(s) whose default value is the question input. Example: Please take a look at [Problem 1](https://projecteuler.net/problem=1) where the question is to *Find the sum of all the multiples of 3 or 5 below 1000.* In this case the main solution function will be `solution(limit: int = 1000)`.
* When the `solution` function is called without any arguments like so: `solution()`, it should return the answer to the problem.
* Every function, which includes all the helper functions, if any, and the main solution function, should have `doctest` in the function docstring along with a brief statement mentioning what the function is about.
* There should not be a `doctest` for testing the answer as that is done by our GitHub Actions build using this [script](https://github.com/TheAlgorithms/Python/blob/master/scripts/validate_solutions.py). Keeping in mind the above example of [Problem 1](https://projecteuler.net/problem=1):
```python
def solution(limit: int = 1000):
"""
A brief statement mentioning what the function is about.
You can have a detailed explanation about the solution method in the
module-level docstring.
>>> solution(1)
...
>>> solution(16)
...
>>> solution(100)
...
"""
```
### Solution Template
You can use the below template as your starting point but please read the [Coding Style](https://github.com/TheAlgorithms/Python/blob/master/project_euler/README.md#coding-style) first to understand how the template works.
Please change the name of the helper functions accordingly, change the parameter names with a descriptive one, replace the content within `[square brackets]` (including the brackets) with the appropriate content.
```python
"""
Project Euler Problem [problem number]: [link to the original problem]
... [Entire problem statement] ...
... [Solution explanation - Optional] ...
References [Optional]:
- [Wikipedia link to the topic]
- [Stackoverflow link]
...
"""
import module1
import module2
...
def helper1(arg1: [type hint], arg2: [type hint], ...) -> [Return type hint]:
"""
A brief statement explaining what the function is about.
... A more elaborate description ... [Optional]
...
[Doctest]
...
"""
...
# calculations
...
return
# You can have multiple helper functions but the solution function should be
# after all the helper functions ...
def solution(arg1: [type hint], arg2: [type hint], ...) -> [Return type hint]:
"""
A brief statement mentioning what the function is about.
You can have a detailed explanation about the solution in the
module-level docstring.
...
[Doctest as mentioned above]
...
"""
...
# calculations
...
return answer
if __name__ == "__main__":
print(f"{solution() = }")
```
| 1 |
TheAlgorithms/Python | 8,546 | Fix broken links to Gitter community | ### Describe your change:
The link <https://gitter.im/TheAlgorithms> as used in the `README.md` and `CONTRIBUTING.md` files is invalid and returns the error `404 Not Found`.
I fixed the link to the working version <https://gitter.im/TheAlgorithms/community>.
This PR fixes issue #8197
* [ ] Add an algorithm?
* [ ] Fix a bug or typo in an existing algorithm?
* [x] Documentation change?
### Checklist:
* [x] I have read [CONTRIBUTING.md](https://github.com/TheAlgorithms/Python/blob/master/CONTRIBUTING.md).
* [x] This pull request is all my own work -- I have not plagiarized.
* [x] I know that pull requests will not be merged if they fail the automated tests.
* [ ] This PR only changes one algorithm file. To ease review, please open separate PRs for separate algorithms.
* [ ] All new Python files are placed inside an existing directory.
* [ ] All filenames are in all lowercase characters with no spaces or dashes.
* [ ] All functions and variable names follow Python naming conventions.
* [ ] All function parameters and return values are annotated with Python [type hints](https://docs.python.org/3/library/typing.html).
* [ ] All functions have [doctests](https://docs.python.org/3/library/doctest.html) that pass the automated testing.
* [ ] All new algorithms include at least one URL that points to Wikipedia or another similar explanation.
* [x] If this pull request resolves one or more open issues then the commit message contains `Fixes: #{$ISSUE_NO}`.
| ChVeen | "2023-03-25T21:52:30Z" | "2023-03-26T16:20:47Z" | 7cdb011ba440a07768179bfaea190bddefc890d8 | 86b2ab09aab359ef1b4bea58ed3c1fdf5b989500 | Fix broken links to Gitter community. ### Describe your change:
The link <https://gitter.im/TheAlgorithms> as used in the `README.md` and `CONTRIBUTING.md` files is invalid and returns the error `404 Not Found`.
I fixed the link to the working version <https://gitter.im/TheAlgorithms/community>.
This PR fixes issue #8197
* [ ] Add an algorithm?
* [ ] Fix a bug or typo in an existing algorithm?
* [x] Documentation change?
### Checklist:
* [x] I have read [CONTRIBUTING.md](https://github.com/TheAlgorithms/Python/blob/master/CONTRIBUTING.md).
* [x] This pull request is all my own work -- I have not plagiarized.
* [x] I know that pull requests will not be merged if they fail the automated tests.
* [ ] This PR only changes one algorithm file. To ease review, please open separate PRs for separate algorithms.
* [ ] All new Python files are placed inside an existing directory.
* [ ] All filenames are in all lowercase characters with no spaces or dashes.
* [ ] All functions and variable names follow Python naming conventions.
* [ ] All function parameters and return values are annotated with Python [type hints](https://docs.python.org/3/library/typing.html).
* [ ] All functions have [doctests](https://docs.python.org/3/library/doctest.html) that pass the automated testing.
* [ ] All new algorithms include at least one URL that points to Wikipedia or another similar explanation.
* [x] If this pull request resolves one or more open issues then the commit message contains `Fixes: #{$ISSUE_NO}`.
| # Arithmetic analysis
Arithmetic analysis is a branch of mathematics that deals with solving linear equations.
* <https://en.wikipedia.org/wiki/System_of_linear_equations>
* <https://en.wikipedia.org/wiki/Gaussian_elimination>
* <https://en.wikipedia.org/wiki/Root-finding_algorithms>
| # Arithmetic analysis
Arithmetic analysis is a branch of mathematics that deals with solving linear equations.
* <https://en.wikipedia.org/wiki/System_of_linear_equations>
* <https://en.wikipedia.org/wiki/Gaussian_elimination>
* <https://en.wikipedia.org/wiki/Root-finding_algorithms>
| -1 |
TheAlgorithms/Python | 8,546 | Fix broken links to Gitter community | ### Describe your change:
The link <https://gitter.im/TheAlgorithms> as used in the `README.md` and `CONTRIBUTING.md` files is invalid and returns the error `404 Not Found`.
I fixed the link to the working version <https://gitter.im/TheAlgorithms/community>.
This PR fixes issue #8197
* [ ] Add an algorithm?
* [ ] Fix a bug or typo in an existing algorithm?
* [x] Documentation change?
### Checklist:
* [x] I have read [CONTRIBUTING.md](https://github.com/TheAlgorithms/Python/blob/master/CONTRIBUTING.md).
* [x] This pull request is all my own work -- I have not plagiarized.
* [x] I know that pull requests will not be merged if they fail the automated tests.
* [ ] This PR only changes one algorithm file. To ease review, please open separate PRs for separate algorithms.
* [ ] All new Python files are placed inside an existing directory.
* [ ] All filenames are in all lowercase characters with no spaces or dashes.
* [ ] All functions and variable names follow Python naming conventions.
* [ ] All function parameters and return values are annotated with Python [type hints](https://docs.python.org/3/library/typing.html).
* [ ] All functions have [doctests](https://docs.python.org/3/library/doctest.html) that pass the automated testing.
* [ ] All new algorithms include at least one URL that points to Wikipedia or another similar explanation.
* [x] If this pull request resolves one or more open issues then the commit message contains `Fixes: #{$ISSUE_NO}`.
| ChVeen | "2023-03-25T21:52:30Z" | "2023-03-26T16:20:47Z" | 7cdb011ba440a07768179bfaea190bddefc890d8 | 86b2ab09aab359ef1b4bea58ed3c1fdf5b989500 | Fix broken links to Gitter community. ### Describe your change:
The link <https://gitter.im/TheAlgorithms> as used in the `README.md` and `CONTRIBUTING.md` files is invalid and returns the error `404 Not Found`.
I fixed the link to the working version <https://gitter.im/TheAlgorithms/community>.
This PR fixes issue #8197
* [ ] Add an algorithm?
* [ ] Fix a bug or typo in an existing algorithm?
* [x] Documentation change?
### Checklist:
* [x] I have read [CONTRIBUTING.md](https://github.com/TheAlgorithms/Python/blob/master/CONTRIBUTING.md).
* [x] This pull request is all my own work -- I have not plagiarized.
* [x] I know that pull requests will not be merged if they fail the automated tests.
* [ ] This PR only changes one algorithm file. To ease review, please open separate PRs for separate algorithms.
* [ ] All new Python files are placed inside an existing directory.
* [ ] All filenames are in all lowercase characters with no spaces or dashes.
* [ ] All functions and variable names follow Python naming conventions.
* [ ] All function parameters and return values are annotated with Python [type hints](https://docs.python.org/3/library/typing.html).
* [ ] All functions have [doctests](https://docs.python.org/3/library/doctest.html) that pass the automated testing.
* [ ] All new algorithms include at least one URL that points to Wikipedia or another similar explanation.
* [x] If this pull request resolves one or more open issues then the commit message contains `Fixes: #{$ISSUE_NO}`.
| ### Describe your change:
* [ ] Add an algorithm?
* [ ] Fix a bug or typo in an existing algorithm?
* [ ] Documentation change?
### Checklist:
* [ ] I have read [CONTRIBUTING.md](https://github.com/TheAlgorithms/Python/blob/master/CONTRIBUTING.md).
* [ ] This pull request is all my own work -- I have not plagiarized.
* [ ] I know that pull requests will not be merged if they fail the automated tests.
* [ ] This PR only changes one algorithm file. To ease review, please open separate PRs for separate algorithms.
* [ ] All new Python files are placed inside an existing directory.
* [ ] All filenames are in all lowercase characters with no spaces or dashes.
* [ ] All functions and variable names follow Python naming conventions.
* [ ] All function parameters and return values are annotated with Python [type hints](https://docs.python.org/3/library/typing.html).
* [ ] All functions have [doctests](https://docs.python.org/3/library/doctest.html) that pass the automated testing.
* [ ] All new algorithms include at least one URL that points to Wikipedia or another similar explanation.
* [ ] If this pull request resolves one or more open issues then the commit message contains `Fixes: #{$ISSUE_NO}`.
| ### Describe your change:
* [ ] Add an algorithm?
* [ ] Fix a bug or typo in an existing algorithm?
* [ ] Documentation change?
### Checklist:
* [ ] I have read [CONTRIBUTING.md](https://github.com/TheAlgorithms/Python/blob/master/CONTRIBUTING.md).
* [ ] This pull request is all my own work -- I have not plagiarized.
* [ ] I know that pull requests will not be merged if they fail the automated tests.
* [ ] This PR only changes one algorithm file. To ease review, please open separate PRs for separate algorithms.
* [ ] All new Python files are placed inside an existing directory.
* [ ] All filenames are in all lowercase characters with no spaces or dashes.
* [ ] All functions and variable names follow Python naming conventions.
* [ ] All function parameters and return values are annotated with Python [type hints](https://docs.python.org/3/library/typing.html).
* [ ] All functions have [doctests](https://docs.python.org/3/library/doctest.html) that pass the automated testing.
* [ ] All new algorithms include at least one URL that points to Wikipedia or another similar explanation.
* [ ] If this pull request resolves one or more open issues then the commit message contains `Fixes: #{$ISSUE_NO}`.
| -1 |
TheAlgorithms/Python | 8,546 | Fix broken links to Gitter community | ### Describe your change:
The link <https://gitter.im/TheAlgorithms> as used in the `README.md` and `CONTRIBUTING.md` files is invalid and returns the error `404 Not Found`.
I fixed the link to the working version <https://gitter.im/TheAlgorithms/community>.
This PR fixes issue #8197
* [ ] Add an algorithm?
* [ ] Fix a bug or typo in an existing algorithm?
* [x] Documentation change?
### Checklist:
* [x] I have read [CONTRIBUTING.md](https://github.com/TheAlgorithms/Python/blob/master/CONTRIBUTING.md).
* [x] This pull request is all my own work -- I have not plagiarized.
* [x] I know that pull requests will not be merged if they fail the automated tests.
* [ ] This PR only changes one algorithm file. To ease review, please open separate PRs for separate algorithms.
* [ ] All new Python files are placed inside an existing directory.
* [ ] All filenames are in all lowercase characters with no spaces or dashes.
* [ ] All functions and variable names follow Python naming conventions.
* [ ] All function parameters and return values are annotated with Python [type hints](https://docs.python.org/3/library/typing.html).
* [ ] All functions have [doctests](https://docs.python.org/3/library/doctest.html) that pass the automated testing.
* [ ] All new algorithms include at least one URL that points to Wikipedia or another similar explanation.
* [x] If this pull request resolves one or more open issues then the commit message contains `Fixes: #{$ISSUE_NO}`.
| ChVeen | "2023-03-25T21:52:30Z" | "2023-03-26T16:20:47Z" | 7cdb011ba440a07768179bfaea190bddefc890d8 | 86b2ab09aab359ef1b4bea58ed3c1fdf5b989500 | Fix broken links to Gitter community. ### Describe your change:
The link <https://gitter.im/TheAlgorithms> as used in the `README.md` and `CONTRIBUTING.md` files is invalid and returns the error `404 Not Found`.
I fixed the link to the working version <https://gitter.im/TheAlgorithms/community>.
This PR fixes issue #8197
* [ ] Add an algorithm?
* [ ] Fix a bug or typo in an existing algorithm?
* [x] Documentation change?
### Checklist:
* [x] I have read [CONTRIBUTING.md](https://github.com/TheAlgorithms/Python/blob/master/CONTRIBUTING.md).
* [x] This pull request is all my own work -- I have not plagiarized.
* [x] I know that pull requests will not be merged if they fail the automated tests.
* [ ] This PR only changes one algorithm file. To ease review, please open separate PRs for separate algorithms.
* [ ] All new Python files are placed inside an existing directory.
* [ ] All filenames are in all lowercase characters with no spaces or dashes.
* [ ] All functions and variable names follow Python naming conventions.
* [ ] All function parameters and return values are annotated with Python [type hints](https://docs.python.org/3/library/typing.html).
* [ ] All functions have [doctests](https://docs.python.org/3/library/doctest.html) that pass the automated testing.
* [ ] All new algorithms include at least one URL that points to Wikipedia or another similar explanation.
* [x] If this pull request resolves one or more open issues then the commit message contains `Fixes: #{$ISSUE_NO}`.
| # Boolean Algebra
Boolean algebra is used to do arithmetic with bits of values True (1) or False (0).
There are three basic operations: 'and', 'or' and 'not'.
* <https://en.wikipedia.org/wiki/Boolean_algebra>
* <https://plato.stanford.edu/entries/boolalg-math/>
| # Boolean Algebra
Boolean algebra is used to do arithmetic with bits of values True (1) or False (0).
There are three basic operations: 'and', 'or' and 'not'.
* <https://en.wikipedia.org/wiki/Boolean_algebra>
* <https://plato.stanford.edu/entries/boolalg-math/>
| -1 |
TheAlgorithms/Python | 8,546 | Fix broken links to Gitter community | ### Describe your change:
The link <https://gitter.im/TheAlgorithms> as used in the `README.md` and `CONTRIBUTING.md` files is invalid and returns the error `404 Not Found`.
I fixed the link to the working version <https://gitter.im/TheAlgorithms/community>.
This PR fixes issue #8197
* [ ] Add an algorithm?
* [ ] Fix a bug or typo in an existing algorithm?
* [x] Documentation change?
### Checklist:
* [x] I have read [CONTRIBUTING.md](https://github.com/TheAlgorithms/Python/blob/master/CONTRIBUTING.md).
* [x] This pull request is all my own work -- I have not plagiarized.
* [x] I know that pull requests will not be merged if they fail the automated tests.
* [ ] This PR only changes one algorithm file. To ease review, please open separate PRs for separate algorithms.
* [ ] All new Python files are placed inside an existing directory.
* [ ] All filenames are in all lowercase characters with no spaces or dashes.
* [ ] All functions and variable names follow Python naming conventions.
* [ ] All function parameters and return values are annotated with Python [type hints](https://docs.python.org/3/library/typing.html).
* [ ] All functions have [doctests](https://docs.python.org/3/library/doctest.html) that pass the automated testing.
* [ ] All new algorithms include at least one URL that points to Wikipedia or another similar explanation.
* [x] If this pull request resolves one or more open issues then the commit message contains `Fixes: #{$ISSUE_NO}`.
| ChVeen | "2023-03-25T21:52:30Z" | "2023-03-26T16:20:47Z" | 7cdb011ba440a07768179bfaea190bddefc890d8 | 86b2ab09aab359ef1b4bea58ed3c1fdf5b989500 | Fix broken links to Gitter community. ### Describe your change:
The link <https://gitter.im/TheAlgorithms> as used in the `README.md` and `CONTRIBUTING.md` files is invalid and returns the error `404 Not Found`.
I fixed the link to the working version <https://gitter.im/TheAlgorithms/community>.
This PR fixes issue #8197
* [ ] Add an algorithm?
* [ ] Fix a bug or typo in an existing algorithm?
* [x] Documentation change?
### Checklist:
* [x] I have read [CONTRIBUTING.md](https://github.com/TheAlgorithms/Python/blob/master/CONTRIBUTING.md).
* [x] This pull request is all my own work -- I have not plagiarized.
* [x] I know that pull requests will not be merged if they fail the automated tests.
* [ ] This PR only changes one algorithm file. To ease review, please open separate PRs for separate algorithms.
* [ ] All new Python files are placed inside an existing directory.
* [ ] All filenames are in all lowercase characters with no spaces or dashes.
* [ ] All functions and variable names follow Python naming conventions.
* [ ] All function parameters and return values are annotated with Python [type hints](https://docs.python.org/3/library/typing.html).
* [ ] All functions have [doctests](https://docs.python.org/3/library/doctest.html) that pass the automated testing.
* [ ] All new algorithms include at least one URL that points to Wikipedia or another similar explanation.
* [x] If this pull request resolves one or more open issues then the commit message contains `Fixes: #{$ISSUE_NO}`.
|
## Arithmetic Analysis
* [Bisection](arithmetic_analysis/bisection.py)
* [Gaussian Elimination](arithmetic_analysis/gaussian_elimination.py)
* [In Static Equilibrium](arithmetic_analysis/in_static_equilibrium.py)
* [Intersection](arithmetic_analysis/intersection.py)
* [Jacobi Iteration Method](arithmetic_analysis/jacobi_iteration_method.py)
* [Lu Decomposition](arithmetic_analysis/lu_decomposition.py)
* [Newton Forward Interpolation](arithmetic_analysis/newton_forward_interpolation.py)
* [Newton Method](arithmetic_analysis/newton_method.py)
* [Newton Raphson](arithmetic_analysis/newton_raphson.py)
* [Newton Raphson New](arithmetic_analysis/newton_raphson_new.py)
* [Secant Method](arithmetic_analysis/secant_method.py)
## Audio Filters
* [Butterworth Filter](audio_filters/butterworth_filter.py)
* [Iir Filter](audio_filters/iir_filter.py)
* [Show Response](audio_filters/show_response.py)
## Backtracking
* [All Combinations](backtracking/all_combinations.py)
* [All Permutations](backtracking/all_permutations.py)
* [All Subsequences](backtracking/all_subsequences.py)
* [Coloring](backtracking/coloring.py)
* [Combination Sum](backtracking/combination_sum.py)
* [Hamiltonian Cycle](backtracking/hamiltonian_cycle.py)
* [Knight Tour](backtracking/knight_tour.py)
* [Minimax](backtracking/minimax.py)
* [Minmax](backtracking/minmax.py)
* [N Queens](backtracking/n_queens.py)
* [N Queens Math](backtracking/n_queens_math.py)
* [Rat In Maze](backtracking/rat_in_maze.py)
* [Sudoku](backtracking/sudoku.py)
* [Sum Of Subsets](backtracking/sum_of_subsets.py)
* [Word Search](backtracking/word_search.py)
## Bit Manipulation
* [Binary And Operator](bit_manipulation/binary_and_operator.py)
* [Binary Count Setbits](bit_manipulation/binary_count_setbits.py)
* [Binary Count Trailing Zeros](bit_manipulation/binary_count_trailing_zeros.py)
* [Binary Or Operator](bit_manipulation/binary_or_operator.py)
* [Binary Shifts](bit_manipulation/binary_shifts.py)
* [Binary Twos Complement](bit_manipulation/binary_twos_complement.py)
* [Binary Xor Operator](bit_manipulation/binary_xor_operator.py)
* [Count 1S Brian Kernighan Method](bit_manipulation/count_1s_brian_kernighan_method.py)
* [Count Number Of One Bits](bit_manipulation/count_number_of_one_bits.py)
* [Gray Code Sequence](bit_manipulation/gray_code_sequence.py)
* [Highest Set Bit](bit_manipulation/highest_set_bit.py)
* [Index Of Rightmost Set Bit](bit_manipulation/index_of_rightmost_set_bit.py)
* [Is Even](bit_manipulation/is_even.py)
* [Is Power Of Two](bit_manipulation/is_power_of_two.py)
* [Numbers Different Signs](bit_manipulation/numbers_different_signs.py)
* [Reverse Bits](bit_manipulation/reverse_bits.py)
* [Single Bit Manipulation Operations](bit_manipulation/single_bit_manipulation_operations.py)
## Blockchain
* [Chinese Remainder Theorem](blockchain/chinese_remainder_theorem.py)
* [Diophantine Equation](blockchain/diophantine_equation.py)
* [Modular Division](blockchain/modular_division.py)
## Boolean Algebra
* [And Gate](boolean_algebra/and_gate.py)
* [Nand Gate](boolean_algebra/nand_gate.py)
* [Norgate](boolean_algebra/norgate.py)
* [Not Gate](boolean_algebra/not_gate.py)
* [Or Gate](boolean_algebra/or_gate.py)
* [Quine Mc Cluskey](boolean_algebra/quine_mc_cluskey.py)
* [Xnor Gate](boolean_algebra/xnor_gate.py)
* [Xor Gate](boolean_algebra/xor_gate.py)
## Cellular Automata
* [Conways Game Of Life](cellular_automata/conways_game_of_life.py)
* [Game Of Life](cellular_automata/game_of_life.py)
* [Nagel Schrekenberg](cellular_automata/nagel_schrekenberg.py)
* [One Dimensional](cellular_automata/one_dimensional.py)
## Ciphers
* [A1Z26](ciphers/a1z26.py)
* [Affine Cipher](ciphers/affine_cipher.py)
* [Atbash](ciphers/atbash.py)
* [Autokey](ciphers/autokey.py)
* [Baconian Cipher](ciphers/baconian_cipher.py)
* [Base16](ciphers/base16.py)
* [Base32](ciphers/base32.py)
* [Base64](ciphers/base64.py)
* [Base85](ciphers/base85.py)
* [Beaufort Cipher](ciphers/beaufort_cipher.py)
* [Bifid](ciphers/bifid.py)
* [Brute Force Caesar Cipher](ciphers/brute_force_caesar_cipher.py)
* [Caesar Cipher](ciphers/caesar_cipher.py)
* [Cryptomath Module](ciphers/cryptomath_module.py)
* [Decrypt Caesar With Chi Squared](ciphers/decrypt_caesar_with_chi_squared.py)
* [Deterministic Miller Rabin](ciphers/deterministic_miller_rabin.py)
* [Diffie](ciphers/diffie.py)
* [Diffie Hellman](ciphers/diffie_hellman.py)
* [Elgamal Key Generator](ciphers/elgamal_key_generator.py)
* [Enigma Machine2](ciphers/enigma_machine2.py)
* [Hill Cipher](ciphers/hill_cipher.py)
* [Mixed Keyword Cypher](ciphers/mixed_keyword_cypher.py)
* [Mono Alphabetic Ciphers](ciphers/mono_alphabetic_ciphers.py)
* [Morse Code](ciphers/morse_code.py)
* [Onepad Cipher](ciphers/onepad_cipher.py)
* [Playfair Cipher](ciphers/playfair_cipher.py)
* [Polybius](ciphers/polybius.py)
* [Porta Cipher](ciphers/porta_cipher.py)
* [Rabin Miller](ciphers/rabin_miller.py)
* [Rail Fence Cipher](ciphers/rail_fence_cipher.py)
* [Rot13](ciphers/rot13.py)
* [Rsa Cipher](ciphers/rsa_cipher.py)
* [Rsa Factorization](ciphers/rsa_factorization.py)
* [Rsa Key Generator](ciphers/rsa_key_generator.py)
* [Shuffled Shift Cipher](ciphers/shuffled_shift_cipher.py)
* [Simple Keyword Cypher](ciphers/simple_keyword_cypher.py)
* [Simple Substitution Cipher](ciphers/simple_substitution_cipher.py)
* [Trafid Cipher](ciphers/trafid_cipher.py)
* [Transposition Cipher](ciphers/transposition_cipher.py)
* [Transposition Cipher Encrypt Decrypt File](ciphers/transposition_cipher_encrypt_decrypt_file.py)
* [Vigenere Cipher](ciphers/vigenere_cipher.py)
* [Xor Cipher](ciphers/xor_cipher.py)
## Compression
* [Burrows Wheeler](compression/burrows_wheeler.py)
* [Huffman](compression/huffman.py)
* [Lempel Ziv](compression/lempel_ziv.py)
* [Lempel Ziv Decompress](compression/lempel_ziv_decompress.py)
* [Lz77](compression/lz77.py)
* [Peak Signal To Noise Ratio](compression/peak_signal_to_noise_ratio.py)
* [Run Length Encoding](compression/run_length_encoding.py)
## Computer Vision
* [Cnn Classification](computer_vision/cnn_classification.py)
* [Flip Augmentation](computer_vision/flip_augmentation.py)
* [Harris Corner](computer_vision/harris_corner.py)
* [Horn Schunck](computer_vision/horn_schunck.py)
* [Mean Threshold](computer_vision/mean_threshold.py)
* [Mosaic Augmentation](computer_vision/mosaic_augmentation.py)
* [Pooling Functions](computer_vision/pooling_functions.py)
## Conversions
* [Astronomical Length Scale Conversion](conversions/astronomical_length_scale_conversion.py)
* [Binary To Decimal](conversions/binary_to_decimal.py)
* [Binary To Hexadecimal](conversions/binary_to_hexadecimal.py)
* [Binary To Octal](conversions/binary_to_octal.py)
* [Decimal To Any](conversions/decimal_to_any.py)
* [Decimal To Binary](conversions/decimal_to_binary.py)
* [Decimal To Binary Recursion](conversions/decimal_to_binary_recursion.py)
* [Decimal To Hexadecimal](conversions/decimal_to_hexadecimal.py)
* [Decimal To Octal](conversions/decimal_to_octal.py)
* [Excel Title To Column](conversions/excel_title_to_column.py)
* [Hex To Bin](conversions/hex_to_bin.py)
* [Hexadecimal To Decimal](conversions/hexadecimal_to_decimal.py)
* [Length Conversion](conversions/length_conversion.py)
* [Molecular Chemistry](conversions/molecular_chemistry.py)
* [Octal To Decimal](conversions/octal_to_decimal.py)
* [Prefix Conversions](conversions/prefix_conversions.py)
* [Prefix Conversions String](conversions/prefix_conversions_string.py)
* [Pressure Conversions](conversions/pressure_conversions.py)
* [Rgb Hsv Conversion](conversions/rgb_hsv_conversion.py)
* [Roman Numerals](conversions/roman_numerals.py)
* [Speed Conversions](conversions/speed_conversions.py)
* [Temperature Conversions](conversions/temperature_conversions.py)
* [Volume Conversions](conversions/volume_conversions.py)
* [Weight Conversion](conversions/weight_conversion.py)
## Data Structures
* Arrays
* [Permutations](data_structures/arrays/permutations.py)
* [Prefix Sum](data_structures/arrays/prefix_sum.py)
* Binary Tree
* [Avl Tree](data_structures/binary_tree/avl_tree.py)
* [Basic Binary Tree](data_structures/binary_tree/basic_binary_tree.py)
* [Binary Search Tree](data_structures/binary_tree/binary_search_tree.py)
* [Binary Search Tree Recursive](data_structures/binary_tree/binary_search_tree_recursive.py)
* [Binary Tree Mirror](data_structures/binary_tree/binary_tree_mirror.py)
* [Binary Tree Node Sum](data_structures/binary_tree/binary_tree_node_sum.py)
* [Binary Tree Path Sum](data_structures/binary_tree/binary_tree_path_sum.py)
* [Binary Tree Traversals](data_structures/binary_tree/binary_tree_traversals.py)
* [Diff Views Of Binary Tree](data_structures/binary_tree/diff_views_of_binary_tree.py)
* [Distribute Coins](data_structures/binary_tree/distribute_coins.py)
* [Fenwick Tree](data_structures/binary_tree/fenwick_tree.py)
* [Inorder Tree Traversal 2022](data_structures/binary_tree/inorder_tree_traversal_2022.py)
* [Is Bst](data_structures/binary_tree/is_bst.py)
* [Lazy Segment Tree](data_structures/binary_tree/lazy_segment_tree.py)
* [Lowest Common Ancestor](data_structures/binary_tree/lowest_common_ancestor.py)
* [Maximum Fenwick Tree](data_structures/binary_tree/maximum_fenwick_tree.py)
* [Merge Two Binary Trees](data_structures/binary_tree/merge_two_binary_trees.py)
* [Non Recursive Segment Tree](data_structures/binary_tree/non_recursive_segment_tree.py)
* [Number Of Possible Binary Trees](data_structures/binary_tree/number_of_possible_binary_trees.py)
* [Red Black Tree](data_structures/binary_tree/red_black_tree.py)
* [Segment Tree](data_structures/binary_tree/segment_tree.py)
* [Segment Tree Other](data_structures/binary_tree/segment_tree_other.py)
* [Treap](data_structures/binary_tree/treap.py)
* [Wavelet Tree](data_structures/binary_tree/wavelet_tree.py)
* Disjoint Set
* [Alternate Disjoint Set](data_structures/disjoint_set/alternate_disjoint_set.py)
* [Disjoint Set](data_structures/disjoint_set/disjoint_set.py)
* Hashing
* [Double Hash](data_structures/hashing/double_hash.py)
* [Hash Map](data_structures/hashing/hash_map.py)
* [Hash Table](data_structures/hashing/hash_table.py)
* [Hash Table With Linked List](data_structures/hashing/hash_table_with_linked_list.py)
* Number Theory
* [Prime Numbers](data_structures/hashing/number_theory/prime_numbers.py)
* [Quadratic Probing](data_structures/hashing/quadratic_probing.py)
* Tests
* [Test Hash Map](data_structures/hashing/tests/test_hash_map.py)
* Heap
* [Binomial Heap](data_structures/heap/binomial_heap.py)
* [Heap](data_structures/heap/heap.py)
* [Heap Generic](data_structures/heap/heap_generic.py)
* [Max Heap](data_structures/heap/max_heap.py)
* [Min Heap](data_structures/heap/min_heap.py)
* [Randomized Heap](data_structures/heap/randomized_heap.py)
* [Skew Heap](data_structures/heap/skew_heap.py)
* Linked List
* [Circular Linked List](data_structures/linked_list/circular_linked_list.py)
* [Deque Doubly](data_structures/linked_list/deque_doubly.py)
* [Doubly Linked List](data_structures/linked_list/doubly_linked_list.py)
* [Doubly Linked List Two](data_structures/linked_list/doubly_linked_list_two.py)
* [From Sequence](data_structures/linked_list/from_sequence.py)
* [Has Loop](data_structures/linked_list/has_loop.py)
* [Is Palindrome](data_structures/linked_list/is_palindrome.py)
* [Merge Two Lists](data_structures/linked_list/merge_two_lists.py)
* [Middle Element Of Linked List](data_structures/linked_list/middle_element_of_linked_list.py)
* [Print Reverse](data_structures/linked_list/print_reverse.py)
* [Singly Linked List](data_structures/linked_list/singly_linked_list.py)
* [Skip List](data_structures/linked_list/skip_list.py)
* [Swap Nodes](data_structures/linked_list/swap_nodes.py)
* Queue
* [Circular Queue](data_structures/queue/circular_queue.py)
* [Circular Queue Linked List](data_structures/queue/circular_queue_linked_list.py)
* [Double Ended Queue](data_structures/queue/double_ended_queue.py)
* [Linked Queue](data_structures/queue/linked_queue.py)
* [Priority Queue Using List](data_structures/queue/priority_queue_using_list.py)
* [Queue On List](data_structures/queue/queue_on_list.py)
* [Queue On Pseudo Stack](data_structures/queue/queue_on_pseudo_stack.py)
* Stacks
* [Balanced Parentheses](data_structures/stacks/balanced_parentheses.py)
* [Dijkstras Two Stack Algorithm](data_structures/stacks/dijkstras_two_stack_algorithm.py)
* [Evaluate Postfix Notations](data_structures/stacks/evaluate_postfix_notations.py)
* [Infix To Postfix Conversion](data_structures/stacks/infix_to_postfix_conversion.py)
* [Infix To Prefix Conversion](data_structures/stacks/infix_to_prefix_conversion.py)
* [Next Greater Element](data_structures/stacks/next_greater_element.py)
* [Postfix Evaluation](data_structures/stacks/postfix_evaluation.py)
* [Prefix Evaluation](data_structures/stacks/prefix_evaluation.py)
* [Stack](data_structures/stacks/stack.py)
* [Stack With Doubly Linked List](data_structures/stacks/stack_with_doubly_linked_list.py)
* [Stack With Singly Linked List](data_structures/stacks/stack_with_singly_linked_list.py)
* [Stock Span Problem](data_structures/stacks/stock_span_problem.py)
* Trie
* [Radix Tree](data_structures/trie/radix_tree.py)
* [Trie](data_structures/trie/trie.py)
## Digital Image Processing
* [Change Brightness](digital_image_processing/change_brightness.py)
* [Change Contrast](digital_image_processing/change_contrast.py)
* [Convert To Negative](digital_image_processing/convert_to_negative.py)
* Dithering
* [Burkes](digital_image_processing/dithering/burkes.py)
* Edge Detection
* [Canny](digital_image_processing/edge_detection/canny.py)
* Filters
* [Bilateral Filter](digital_image_processing/filters/bilateral_filter.py)
* [Convolve](digital_image_processing/filters/convolve.py)
* [Gabor Filter](digital_image_processing/filters/gabor_filter.py)
* [Gaussian Filter](digital_image_processing/filters/gaussian_filter.py)
* [Local Binary Pattern](digital_image_processing/filters/local_binary_pattern.py)
* [Median Filter](digital_image_processing/filters/median_filter.py)
* [Sobel Filter](digital_image_processing/filters/sobel_filter.py)
* Histogram Equalization
* [Histogram Stretch](digital_image_processing/histogram_equalization/histogram_stretch.py)
* [Index Calculation](digital_image_processing/index_calculation.py)
* Morphological Operations
* [Dilation Operation](digital_image_processing/morphological_operations/dilation_operation.py)
* [Erosion Operation](digital_image_processing/morphological_operations/erosion_operation.py)
* Resize
* [Resize](digital_image_processing/resize/resize.py)
* Rotation
* [Rotation](digital_image_processing/rotation/rotation.py)
* [Sepia](digital_image_processing/sepia.py)
* [Test Digital Image Processing](digital_image_processing/test_digital_image_processing.py)
## Divide And Conquer
* [Closest Pair Of Points](divide_and_conquer/closest_pair_of_points.py)
* [Convex Hull](divide_and_conquer/convex_hull.py)
* [Heaps Algorithm](divide_and_conquer/heaps_algorithm.py)
* [Heaps Algorithm Iterative](divide_and_conquer/heaps_algorithm_iterative.py)
* [Inversions](divide_and_conquer/inversions.py)
* [Kth Order Statistic](divide_and_conquer/kth_order_statistic.py)
* [Max Difference Pair](divide_and_conquer/max_difference_pair.py)
* [Max Subarray Sum](divide_and_conquer/max_subarray_sum.py)
* [Mergesort](divide_and_conquer/mergesort.py)
* [Peak](divide_and_conquer/peak.py)
* [Power](divide_and_conquer/power.py)
* [Strassen Matrix Multiplication](divide_and_conquer/strassen_matrix_multiplication.py)
## Dynamic Programming
* [Abbreviation](dynamic_programming/abbreviation.py)
* [All Construct](dynamic_programming/all_construct.py)
* [Bitmask](dynamic_programming/bitmask.py)
* [Catalan Numbers](dynamic_programming/catalan_numbers.py)
* [Climbing Stairs](dynamic_programming/climbing_stairs.py)
* [Combination Sum Iv](dynamic_programming/combination_sum_iv.py)
* [Edit Distance](dynamic_programming/edit_distance.py)
* [Factorial](dynamic_programming/factorial.py)
* [Fast Fibonacci](dynamic_programming/fast_fibonacci.py)
* [Fibonacci](dynamic_programming/fibonacci.py)
* [Fizz Buzz](dynamic_programming/fizz_buzz.py)
* [Floyd Warshall](dynamic_programming/floyd_warshall.py)
* [Integer Partition](dynamic_programming/integer_partition.py)
* [Iterating Through Submasks](dynamic_programming/iterating_through_submasks.py)
* [Knapsack](dynamic_programming/knapsack.py)
* [Longest Common Subsequence](dynamic_programming/longest_common_subsequence.py)
* [Longest Common Substring](dynamic_programming/longest_common_substring.py)
* [Longest Increasing Subsequence](dynamic_programming/longest_increasing_subsequence.py)
* [Longest Increasing Subsequence O(Nlogn)](dynamic_programming/longest_increasing_subsequence_o(nlogn).py)
* [Longest Sub Array](dynamic_programming/longest_sub_array.py)
* [Matrix Chain Order](dynamic_programming/matrix_chain_order.py)
* [Max Non Adjacent Sum](dynamic_programming/max_non_adjacent_sum.py)
* [Max Sub Array](dynamic_programming/max_sub_array.py)
* [Max Sum Contiguous Subsequence](dynamic_programming/max_sum_contiguous_subsequence.py)
* [Min Distance Up Bottom](dynamic_programming/min_distance_up_bottom.py)
* [Minimum Coin Change](dynamic_programming/minimum_coin_change.py)
* [Minimum Cost Path](dynamic_programming/minimum_cost_path.py)
* [Minimum Partition](dynamic_programming/minimum_partition.py)
* [Minimum Squares To Represent A Number](dynamic_programming/minimum_squares_to_represent_a_number.py)
* [Minimum Steps To One](dynamic_programming/minimum_steps_to_one.py)
* [Minimum Tickets Cost](dynamic_programming/minimum_tickets_cost.py)
* [Optimal Binary Search Tree](dynamic_programming/optimal_binary_search_tree.py)
* [Palindrome Partitioning](dynamic_programming/palindrome_partitioning.py)
* [Rod Cutting](dynamic_programming/rod_cutting.py)
* [Subset Generation](dynamic_programming/subset_generation.py)
* [Sum Of Subset](dynamic_programming/sum_of_subset.py)
* [Viterbi](dynamic_programming/viterbi.py)
* [Word Break](dynamic_programming/word_break.py)
## Electronics
* [Builtin Voltage](electronics/builtin_voltage.py)
* [Carrier Concentration](electronics/carrier_concentration.py)
* [Circular Convolution](electronics/circular_convolution.py)
* [Coulombs Law](electronics/coulombs_law.py)
* [Electric Conductivity](electronics/electric_conductivity.py)
* [Electric Power](electronics/electric_power.py)
* [Electrical Impedance](electronics/electrical_impedance.py)
* [Ind Reactance](electronics/ind_reactance.py)
* [Ohms Law](electronics/ohms_law.py)
* [Resistor Equivalence](electronics/resistor_equivalence.py)
* [Resonant Frequency](electronics/resonant_frequency.py)
## File Transfer
* [Receive File](file_transfer/receive_file.py)
* [Send File](file_transfer/send_file.py)
* Tests
* [Test Send File](file_transfer/tests/test_send_file.py)
## Financial
* [Equated Monthly Installments](financial/equated_monthly_installments.py)
* [Interest](financial/interest.py)
* [Price Plus Tax](financial/price_plus_tax.py)
## Fractals
* [Julia Sets](fractals/julia_sets.py)
* [Koch Snowflake](fractals/koch_snowflake.py)
* [Mandelbrot](fractals/mandelbrot.py)
* [Sierpinski Triangle](fractals/sierpinski_triangle.py)
## Fuzzy Logic
* [Fuzzy Operations](fuzzy_logic/fuzzy_operations.py)
## Genetic Algorithm
* [Basic String](genetic_algorithm/basic_string.py)
## Geodesy
* [Haversine Distance](geodesy/haversine_distance.py)
* [Lamberts Ellipsoidal Distance](geodesy/lamberts_ellipsoidal_distance.py)
## Graphics
* [Bezier Curve](graphics/bezier_curve.py)
* [Vector3 For 2D Rendering](graphics/vector3_for_2d_rendering.py)
## Graphs
* [A Star](graphs/a_star.py)
* [Articulation Points](graphs/articulation_points.py)
* [Basic Graphs](graphs/basic_graphs.py)
* [Bellman Ford](graphs/bellman_ford.py)
* [Bi Directional Dijkstra](graphs/bi_directional_dijkstra.py)
* [Bidirectional A Star](graphs/bidirectional_a_star.py)
* [Bidirectional Breadth First Search](graphs/bidirectional_breadth_first_search.py)
* [Boruvka](graphs/boruvka.py)
* [Breadth First Search](graphs/breadth_first_search.py)
* [Breadth First Search 2](graphs/breadth_first_search_2.py)
* [Breadth First Search Shortest Path](graphs/breadth_first_search_shortest_path.py)
* [Breadth First Search Shortest Path 2](graphs/breadth_first_search_shortest_path_2.py)
* [Breadth First Search Zero One Shortest Path](graphs/breadth_first_search_zero_one_shortest_path.py)
* [Check Bipartite Graph Bfs](graphs/check_bipartite_graph_bfs.py)
* [Check Bipartite Graph Dfs](graphs/check_bipartite_graph_dfs.py)
* [Check Cycle](graphs/check_cycle.py)
* [Connected Components](graphs/connected_components.py)
* [Depth First Search](graphs/depth_first_search.py)
* [Depth First Search 2](graphs/depth_first_search_2.py)
* [Dijkstra](graphs/dijkstra.py)
* [Dijkstra 2](graphs/dijkstra_2.py)
* [Dijkstra Algorithm](graphs/dijkstra_algorithm.py)
* [Dijkstra Alternate](graphs/dijkstra_alternate.py)
* [Dinic](graphs/dinic.py)
* [Directed And Undirected (Weighted) Graph](graphs/directed_and_undirected_(weighted)_graph.py)
* [Edmonds Karp Multiple Source And Sink](graphs/edmonds_karp_multiple_source_and_sink.py)
* [Eulerian Path And Circuit For Undirected Graph](graphs/eulerian_path_and_circuit_for_undirected_graph.py)
* [Even Tree](graphs/even_tree.py)
* [Finding Bridges](graphs/finding_bridges.py)
* [Frequent Pattern Graph Miner](graphs/frequent_pattern_graph_miner.py)
* [G Topological Sort](graphs/g_topological_sort.py)
* [Gale Shapley Bigraph](graphs/gale_shapley_bigraph.py)
* [Graph List](graphs/graph_list.py)
* [Graph Matrix](graphs/graph_matrix.py)
* [Graphs Floyd Warshall](graphs/graphs_floyd_warshall.py)
* [Greedy Best First](graphs/greedy_best_first.py)
* [Greedy Min Vertex Cover](graphs/greedy_min_vertex_cover.py)
* [Kahns Algorithm Long](graphs/kahns_algorithm_long.py)
* [Kahns Algorithm Topo](graphs/kahns_algorithm_topo.py)
* [Karger](graphs/karger.py)
* [Markov Chain](graphs/markov_chain.py)
* [Matching Min Vertex Cover](graphs/matching_min_vertex_cover.py)
* [Minimum Path Sum](graphs/minimum_path_sum.py)
* [Minimum Spanning Tree Boruvka](graphs/minimum_spanning_tree_boruvka.py)
* [Minimum Spanning Tree Kruskal](graphs/minimum_spanning_tree_kruskal.py)
* [Minimum Spanning Tree Kruskal2](graphs/minimum_spanning_tree_kruskal2.py)
* [Minimum Spanning Tree Prims](graphs/minimum_spanning_tree_prims.py)
* [Minimum Spanning Tree Prims2](graphs/minimum_spanning_tree_prims2.py)
* [Multi Heuristic Astar](graphs/multi_heuristic_astar.py)
* [Page Rank](graphs/page_rank.py)
* [Prim](graphs/prim.py)
* [Random Graph Generator](graphs/random_graph_generator.py)
* [Scc Kosaraju](graphs/scc_kosaraju.py)
* [Strongly Connected Components](graphs/strongly_connected_components.py)
* [Tarjans Scc](graphs/tarjans_scc.py)
* Tests
* [Test Min Spanning Tree Kruskal](graphs/tests/test_min_spanning_tree_kruskal.py)
* [Test Min Spanning Tree Prim](graphs/tests/test_min_spanning_tree_prim.py)
## Greedy Methods
* [Fractional Knapsack](greedy_methods/fractional_knapsack.py)
* [Fractional Knapsack 2](greedy_methods/fractional_knapsack_2.py)
* [Optimal Merge Pattern](greedy_methods/optimal_merge_pattern.py)
## Hashes
* [Adler32](hashes/adler32.py)
* [Chaos Machine](hashes/chaos_machine.py)
* [Djb2](hashes/djb2.py)
* [Elf](hashes/elf.py)
* [Enigma Machine](hashes/enigma_machine.py)
* [Hamming Code](hashes/hamming_code.py)
* [Luhn](hashes/luhn.py)
* [Md5](hashes/md5.py)
* [Sdbm](hashes/sdbm.py)
* [Sha1](hashes/sha1.py)
* [Sha256](hashes/sha256.py)
## Knapsack
* [Greedy Knapsack](knapsack/greedy_knapsack.py)
* [Knapsack](knapsack/knapsack.py)
* [Recursive Approach Knapsack](knapsack/recursive_approach_knapsack.py)
* Tests
* [Test Greedy Knapsack](knapsack/tests/test_greedy_knapsack.py)
* [Test Knapsack](knapsack/tests/test_knapsack.py)
## Linear Algebra
* Src
* [Conjugate Gradient](linear_algebra/src/conjugate_gradient.py)
* [Lib](linear_algebra/src/lib.py)
* [Polynom For Points](linear_algebra/src/polynom_for_points.py)
* [Power Iteration](linear_algebra/src/power_iteration.py)
* [Rayleigh Quotient](linear_algebra/src/rayleigh_quotient.py)
* [Schur Complement](linear_algebra/src/schur_complement.py)
* [Test Linear Algebra](linear_algebra/src/test_linear_algebra.py)
* [Transformations 2D](linear_algebra/src/transformations_2d.py)
## Machine Learning
* [Astar](machine_learning/astar.py)
* [Data Transformations](machine_learning/data_transformations.py)
* [Decision Tree](machine_learning/decision_tree.py)
* Forecasting
* [Run](machine_learning/forecasting/run.py)
* [Gradient Descent](machine_learning/gradient_descent.py)
* [K Means Clust](machine_learning/k_means_clust.py)
* [K Nearest Neighbours](machine_learning/k_nearest_neighbours.py)
* [Knn Sklearn](machine_learning/knn_sklearn.py)
* [Linear Discriminant Analysis](machine_learning/linear_discriminant_analysis.py)
* [Linear Regression](machine_learning/linear_regression.py)
* Local Weighted Learning
* [Local Weighted Learning](machine_learning/local_weighted_learning/local_weighted_learning.py)
* [Logistic Regression](machine_learning/logistic_regression.py)
* Lstm
* [Lstm Prediction](machine_learning/lstm/lstm_prediction.py)
* [Multilayer Perceptron Classifier](machine_learning/multilayer_perceptron_classifier.py)
* [Polymonial Regression](machine_learning/polymonial_regression.py)
* [Scoring Functions](machine_learning/scoring_functions.py)
* [Self Organizing Map](machine_learning/self_organizing_map.py)
* [Sequential Minimum Optimization](machine_learning/sequential_minimum_optimization.py)
* [Similarity Search](machine_learning/similarity_search.py)
* [Support Vector Machines](machine_learning/support_vector_machines.py)
* [Word Frequency Functions](machine_learning/word_frequency_functions.py)
* [Xgboost Classifier](machine_learning/xgboost_classifier.py)
* [Xgboost Regressor](machine_learning/xgboost_regressor.py)
## Maths
* [3N Plus 1](maths/3n_plus_1.py)
* [Abs](maths/abs.py)
* [Add](maths/add.py)
* [Addition Without Arithmetic](maths/addition_without_arithmetic.py)
* [Aliquot Sum](maths/aliquot_sum.py)
* [Allocation Number](maths/allocation_number.py)
* [Arc Length](maths/arc_length.py)
* [Area](maths/area.py)
* [Area Under Curve](maths/area_under_curve.py)
* [Armstrong Numbers](maths/armstrong_numbers.py)
* [Automorphic Number](maths/automorphic_number.py)
* [Average Absolute Deviation](maths/average_absolute_deviation.py)
* [Average Mean](maths/average_mean.py)
* [Average Median](maths/average_median.py)
* [Average Mode](maths/average_mode.py)
* [Bailey Borwein Plouffe](maths/bailey_borwein_plouffe.py)
* [Basic Maths](maths/basic_maths.py)
* [Binary Exp Mod](maths/binary_exp_mod.py)
* [Binary Exponentiation](maths/binary_exponentiation.py)
* [Binary Exponentiation 2](maths/binary_exponentiation_2.py)
* [Binary Exponentiation 3](maths/binary_exponentiation_3.py)
* [Binomial Coefficient](maths/binomial_coefficient.py)
* [Binomial Distribution](maths/binomial_distribution.py)
* [Bisection](maths/bisection.py)
* [Carmichael Number](maths/carmichael_number.py)
* [Catalan Number](maths/catalan_number.py)
* [Ceil](maths/ceil.py)
* [Check Polygon](maths/check_polygon.py)
* [Chudnovsky Algorithm](maths/chudnovsky_algorithm.py)
* [Collatz Sequence](maths/collatz_sequence.py)
* [Combinations](maths/combinations.py)
* [Decimal Isolate](maths/decimal_isolate.py)
* [Decimal To Fraction](maths/decimal_to_fraction.py)
* [Dodecahedron](maths/dodecahedron.py)
* [Double Factorial Iterative](maths/double_factorial_iterative.py)
* [Double Factorial Recursive](maths/double_factorial_recursive.py)
* [Entropy](maths/entropy.py)
* [Euclidean Distance](maths/euclidean_distance.py)
* [Euclidean Gcd](maths/euclidean_gcd.py)
* [Euler Method](maths/euler_method.py)
* [Euler Modified](maths/euler_modified.py)
* [Eulers Totient](maths/eulers_totient.py)
* [Extended Euclidean Algorithm](maths/extended_euclidean_algorithm.py)
* [Factorial](maths/factorial.py)
* [Factors](maths/factors.py)
* [Fermat Little Theorem](maths/fermat_little_theorem.py)
* [Fibonacci](maths/fibonacci.py)
* [Find Max](maths/find_max.py)
* [Find Max Recursion](maths/find_max_recursion.py)
* [Find Min](maths/find_min.py)
* [Find Min Recursion](maths/find_min_recursion.py)
* [Floor](maths/floor.py)
* [Gamma](maths/gamma.py)
* [Gamma Recursive](maths/gamma_recursive.py)
* [Gaussian](maths/gaussian.py)
* [Gaussian Error Linear Unit](maths/gaussian_error_linear_unit.py)
* [Gcd Of N Numbers](maths/gcd_of_n_numbers.py)
* [Greatest Common Divisor](maths/greatest_common_divisor.py)
* [Greedy Coin Change](maths/greedy_coin_change.py)
* [Hamming Numbers](maths/hamming_numbers.py)
* [Hardy Ramanujanalgo](maths/hardy_ramanujanalgo.py)
* [Hexagonal Number](maths/hexagonal_number.py)
* [Integration By Simpson Approx](maths/integration_by_simpson_approx.py)
* [Is Ip V4 Address Valid](maths/is_ip_v4_address_valid.py)
* [Is Square Free](maths/is_square_free.py)
* [Jaccard Similarity](maths/jaccard_similarity.py)
* [Juggler Sequence](maths/juggler_sequence.py)
* [Kadanes](maths/kadanes.py)
* [Karatsuba](maths/karatsuba.py)
* [Krishnamurthy Number](maths/krishnamurthy_number.py)
* [Kth Lexicographic Permutation](maths/kth_lexicographic_permutation.py)
* [Largest Of Very Large Numbers](maths/largest_of_very_large_numbers.py)
* [Largest Subarray Sum](maths/largest_subarray_sum.py)
* [Least Common Multiple](maths/least_common_multiple.py)
* [Line Length](maths/line_length.py)
* [Liouville Lambda](maths/liouville_lambda.py)
* [Lucas Lehmer Primality Test](maths/lucas_lehmer_primality_test.py)
* [Lucas Series](maths/lucas_series.py)
* [Maclaurin Series](maths/maclaurin_series.py)
* [Manhattan Distance](maths/manhattan_distance.py)
* [Matrix Exponentiation](maths/matrix_exponentiation.py)
* [Max Sum Sliding Window](maths/max_sum_sliding_window.py)
* [Median Of Two Arrays](maths/median_of_two_arrays.py)
* [Miller Rabin](maths/miller_rabin.py)
* [Mobius Function](maths/mobius_function.py)
* [Modular Exponential](maths/modular_exponential.py)
* [Monte Carlo](maths/monte_carlo.py)
* [Monte Carlo Dice](maths/monte_carlo_dice.py)
* [Nevilles Method](maths/nevilles_method.py)
* [Newton Raphson](maths/newton_raphson.py)
* [Number Of Digits](maths/number_of_digits.py)
* [Numerical Integration](maths/numerical_integration.py)
* [Perfect Cube](maths/perfect_cube.py)
* [Perfect Number](maths/perfect_number.py)
* [Perfect Square](maths/perfect_square.py)
* [Persistence](maths/persistence.py)
* [Pi Monte Carlo Estimation](maths/pi_monte_carlo_estimation.py)
* [Points Are Collinear 3D](maths/points_are_collinear_3d.py)
* [Pollard Rho](maths/pollard_rho.py)
* [Polynomial Evaluation](maths/polynomial_evaluation.py)
* Polynomials
* [Single Indeterminate Operations](maths/polynomials/single_indeterminate_operations.py)
* [Power Using Recursion](maths/power_using_recursion.py)
* [Prime Check](maths/prime_check.py)
* [Prime Factors](maths/prime_factors.py)
* [Prime Numbers](maths/prime_numbers.py)
* [Prime Sieve Eratosthenes](maths/prime_sieve_eratosthenes.py)
* [Primelib](maths/primelib.py)
* [Print Multiplication Table](maths/print_multiplication_table.py)
* [Pronic Number](maths/pronic_number.py)
* [Proth Number](maths/proth_number.py)
* [Pythagoras](maths/pythagoras.py)
* [Qr Decomposition](maths/qr_decomposition.py)
* [Quadratic Equations Complex Numbers](maths/quadratic_equations_complex_numbers.py)
* [Radians](maths/radians.py)
* [Radix2 Fft](maths/radix2_fft.py)
* [Relu](maths/relu.py)
* [Runge Kutta](maths/runge_kutta.py)
* [Segmented Sieve](maths/segmented_sieve.py)
* Series
* [Arithmetic](maths/series/arithmetic.py)
* [Geometric](maths/series/geometric.py)
* [Geometric Series](maths/series/geometric_series.py)
* [Harmonic](maths/series/harmonic.py)
* [Harmonic Series](maths/series/harmonic_series.py)
* [Hexagonal Numbers](maths/series/hexagonal_numbers.py)
* [P Series](maths/series/p_series.py)
* [Sieve Of Eratosthenes](maths/sieve_of_eratosthenes.py)
* [Sigmoid](maths/sigmoid.py)
* [Sigmoid Linear Unit](maths/sigmoid_linear_unit.py)
* [Signum](maths/signum.py)
* [Simpson Rule](maths/simpson_rule.py)
* [Sin](maths/sin.py)
* [Sock Merchant](maths/sock_merchant.py)
* [Softmax](maths/softmax.py)
* [Square Root](maths/square_root.py)
* [Sum Of Arithmetic Series](maths/sum_of_arithmetic_series.py)
* [Sum Of Digits](maths/sum_of_digits.py)
* [Sum Of Geometric Progression](maths/sum_of_geometric_progression.py)
* [Sum Of Harmonic Series](maths/sum_of_harmonic_series.py)
* [Sumset](maths/sumset.py)
* [Sylvester Sequence](maths/sylvester_sequence.py)
* [Test Prime Check](maths/test_prime_check.py)
* [Trapezoidal Rule](maths/trapezoidal_rule.py)
* [Triplet Sum](maths/triplet_sum.py)
* [Twin Prime](maths/twin_prime.py)
* [Two Pointer](maths/two_pointer.py)
* [Two Sum](maths/two_sum.py)
* [Ugly Numbers](maths/ugly_numbers.py)
* [Volume](maths/volume.py)
* [Weird Number](maths/weird_number.py)
* [Zellers Congruence](maths/zellers_congruence.py)
## Matrix
* [Binary Search Matrix](matrix/binary_search_matrix.py)
* [Count Islands In Matrix](matrix/count_islands_in_matrix.py)
* [Count Paths](matrix/count_paths.py)
* [Cramers Rule 2X2](matrix/cramers_rule_2x2.py)
* [Inverse Of Matrix](matrix/inverse_of_matrix.py)
* [Largest Square Area In Matrix](matrix/largest_square_area_in_matrix.py)
* [Matrix Class](matrix/matrix_class.py)
* [Matrix Operation](matrix/matrix_operation.py)
* [Max Area Of Island](matrix/max_area_of_island.py)
* [Nth Fibonacci Using Matrix Exponentiation](matrix/nth_fibonacci_using_matrix_exponentiation.py)
* [Pascal Triangle](matrix/pascal_triangle.py)
* [Rotate Matrix](matrix/rotate_matrix.py)
* [Searching In Sorted Matrix](matrix/searching_in_sorted_matrix.py)
* [Sherman Morrison](matrix/sherman_morrison.py)
* [Spiral Print](matrix/spiral_print.py)
* Tests
* [Test Matrix Operation](matrix/tests/test_matrix_operation.py)
## Networking Flow
* [Ford Fulkerson](networking_flow/ford_fulkerson.py)
* [Minimum Cut](networking_flow/minimum_cut.py)
## Neural Network
* [2 Hidden Layers Neural Network](neural_network/2_hidden_layers_neural_network.py)
* [Back Propagation Neural Network](neural_network/back_propagation_neural_network.py)
* [Convolution Neural Network](neural_network/convolution_neural_network.py)
* [Perceptron](neural_network/perceptron.py)
* [Simple Neural Network](neural_network/simple_neural_network.py)
## Other
* [Activity Selection](other/activity_selection.py)
* [Alternative List Arrange](other/alternative_list_arrange.py)
* [Davisb Putnamb Logemannb Loveland](other/davisb_putnamb_logemannb_loveland.py)
* [Dijkstra Bankers Algorithm](other/dijkstra_bankers_algorithm.py)
* [Doomsday](other/doomsday.py)
* [Fischer Yates Shuffle](other/fischer_yates_shuffle.py)
* [Gauss Easter](other/gauss_easter.py)
* [Graham Scan](other/graham_scan.py)
* [Greedy](other/greedy.py)
* [Least Recently Used](other/least_recently_used.py)
* [Lfu Cache](other/lfu_cache.py)
* [Linear Congruential Generator](other/linear_congruential_generator.py)
* [Lru Cache](other/lru_cache.py)
* [Magicdiamondpattern](other/magicdiamondpattern.py)
* [Maximum Subarray](other/maximum_subarray.py)
* [Nested Brackets](other/nested_brackets.py)
* [Password](other/password.py)
* [Quine](other/quine.py)
* [Scoring Algorithm](other/scoring_algorithm.py)
* [Sdes](other/sdes.py)
* [Tower Of Hanoi](other/tower_of_hanoi.py)
## Physics
* [Archimedes Principle](physics/archimedes_principle.py)
* [Casimir Effect](physics/casimir_effect.py)
* [Centripetal Force](physics/centripetal_force.py)
* [Horizontal Projectile Motion](physics/horizontal_projectile_motion.py)
* [Hubble Parameter](physics/hubble_parameter.py)
* [Ideal Gas Law](physics/ideal_gas_law.py)
* [Kinetic Energy](physics/kinetic_energy.py)
* [Lorentz Transformation Four Vector](physics/lorentz_transformation_four_vector.py)
* [Malus Law](physics/malus_law.py)
* [N Body Simulation](physics/n_body_simulation.py)
* [Newtons Law Of Gravitation](physics/newtons_law_of_gravitation.py)
* [Newtons Second Law Of Motion](physics/newtons_second_law_of_motion.py)
* [Potential Energy](physics/potential_energy.py)
* [Rms Speed Of Molecule](physics/rms_speed_of_molecule.py)
* [Shear Stress](physics/shear_stress.py)
## Project Euler
* Problem 001
* [Sol1](project_euler/problem_001/sol1.py)
* [Sol2](project_euler/problem_001/sol2.py)
* [Sol3](project_euler/problem_001/sol3.py)
* [Sol4](project_euler/problem_001/sol4.py)
* [Sol5](project_euler/problem_001/sol5.py)
* [Sol6](project_euler/problem_001/sol6.py)
* [Sol7](project_euler/problem_001/sol7.py)
* Problem 002
* [Sol1](project_euler/problem_002/sol1.py)
* [Sol2](project_euler/problem_002/sol2.py)
* [Sol3](project_euler/problem_002/sol3.py)
* [Sol4](project_euler/problem_002/sol4.py)
* [Sol5](project_euler/problem_002/sol5.py)
* Problem 003
* [Sol1](project_euler/problem_003/sol1.py)
* [Sol2](project_euler/problem_003/sol2.py)
* [Sol3](project_euler/problem_003/sol3.py)
* Problem 004
* [Sol1](project_euler/problem_004/sol1.py)
* [Sol2](project_euler/problem_004/sol2.py)
* Problem 005
* [Sol1](project_euler/problem_005/sol1.py)
* [Sol2](project_euler/problem_005/sol2.py)
* Problem 006
* [Sol1](project_euler/problem_006/sol1.py)
* [Sol2](project_euler/problem_006/sol2.py)
* [Sol3](project_euler/problem_006/sol3.py)
* [Sol4](project_euler/problem_006/sol4.py)
* Problem 007
* [Sol1](project_euler/problem_007/sol1.py)
* [Sol2](project_euler/problem_007/sol2.py)
* [Sol3](project_euler/problem_007/sol3.py)
* Problem 008
* [Sol1](project_euler/problem_008/sol1.py)
* [Sol2](project_euler/problem_008/sol2.py)
* [Sol3](project_euler/problem_008/sol3.py)
* Problem 009
* [Sol1](project_euler/problem_009/sol1.py)
* [Sol2](project_euler/problem_009/sol2.py)
* [Sol3](project_euler/problem_009/sol3.py)
* Problem 010
* [Sol1](project_euler/problem_010/sol1.py)
* [Sol2](project_euler/problem_010/sol2.py)
* [Sol3](project_euler/problem_010/sol3.py)
* Problem 011
* [Sol1](project_euler/problem_011/sol1.py)
* [Sol2](project_euler/problem_011/sol2.py)
* Problem 012
* [Sol1](project_euler/problem_012/sol1.py)
* [Sol2](project_euler/problem_012/sol2.py)
* Problem 013
* [Sol1](project_euler/problem_013/sol1.py)
* Problem 014
* [Sol1](project_euler/problem_014/sol1.py)
* [Sol2](project_euler/problem_014/sol2.py)
* Problem 015
* [Sol1](project_euler/problem_015/sol1.py)
* Problem 016
* [Sol1](project_euler/problem_016/sol1.py)
* [Sol2](project_euler/problem_016/sol2.py)
* Problem 017
* [Sol1](project_euler/problem_017/sol1.py)
* Problem 018
* [Solution](project_euler/problem_018/solution.py)
* Problem 019
* [Sol1](project_euler/problem_019/sol1.py)
* Problem 020
* [Sol1](project_euler/problem_020/sol1.py)
* [Sol2](project_euler/problem_020/sol2.py)
* [Sol3](project_euler/problem_020/sol3.py)
* [Sol4](project_euler/problem_020/sol4.py)
* Problem 021
* [Sol1](project_euler/problem_021/sol1.py)
* Problem 022
* [Sol1](project_euler/problem_022/sol1.py)
* [Sol2](project_euler/problem_022/sol2.py)
* Problem 023
* [Sol1](project_euler/problem_023/sol1.py)
* Problem 024
* [Sol1](project_euler/problem_024/sol1.py)
* Problem 025
* [Sol1](project_euler/problem_025/sol1.py)
* [Sol2](project_euler/problem_025/sol2.py)
* [Sol3](project_euler/problem_025/sol3.py)
* Problem 026
* [Sol1](project_euler/problem_026/sol1.py)
* Problem 027
* [Sol1](project_euler/problem_027/sol1.py)
* Problem 028
* [Sol1](project_euler/problem_028/sol1.py)
* Problem 029
* [Sol1](project_euler/problem_029/sol1.py)
* Problem 030
* [Sol1](project_euler/problem_030/sol1.py)
* Problem 031
* [Sol1](project_euler/problem_031/sol1.py)
* [Sol2](project_euler/problem_031/sol2.py)
* Problem 032
* [Sol32](project_euler/problem_032/sol32.py)
* Problem 033
* [Sol1](project_euler/problem_033/sol1.py)
* Problem 034
* [Sol1](project_euler/problem_034/sol1.py)
* Problem 035
* [Sol1](project_euler/problem_035/sol1.py)
* Problem 036
* [Sol1](project_euler/problem_036/sol1.py)
* Problem 037
* [Sol1](project_euler/problem_037/sol1.py)
* Problem 038
* [Sol1](project_euler/problem_038/sol1.py)
* Problem 039
* [Sol1](project_euler/problem_039/sol1.py)
* Problem 040
* [Sol1](project_euler/problem_040/sol1.py)
* Problem 041
* [Sol1](project_euler/problem_041/sol1.py)
* Problem 042
* [Solution42](project_euler/problem_042/solution42.py)
* Problem 043
* [Sol1](project_euler/problem_043/sol1.py)
* Problem 044
* [Sol1](project_euler/problem_044/sol1.py)
* Problem 045
* [Sol1](project_euler/problem_045/sol1.py)
* Problem 046
* [Sol1](project_euler/problem_046/sol1.py)
* Problem 047
* [Sol1](project_euler/problem_047/sol1.py)
* Problem 048
* [Sol1](project_euler/problem_048/sol1.py)
* Problem 049
* [Sol1](project_euler/problem_049/sol1.py)
* Problem 050
* [Sol1](project_euler/problem_050/sol1.py)
* Problem 051
* [Sol1](project_euler/problem_051/sol1.py)
* Problem 052
* [Sol1](project_euler/problem_052/sol1.py)
* Problem 053
* [Sol1](project_euler/problem_053/sol1.py)
* Problem 054
* [Sol1](project_euler/problem_054/sol1.py)
* [Test Poker Hand](project_euler/problem_054/test_poker_hand.py)
* Problem 055
* [Sol1](project_euler/problem_055/sol1.py)
* Problem 056
* [Sol1](project_euler/problem_056/sol1.py)
* Problem 057
* [Sol1](project_euler/problem_057/sol1.py)
* Problem 058
* [Sol1](project_euler/problem_058/sol1.py)
* Problem 059
* [Sol1](project_euler/problem_059/sol1.py)
* Problem 062
* [Sol1](project_euler/problem_062/sol1.py)
* Problem 063
* [Sol1](project_euler/problem_063/sol1.py)
* Problem 064
* [Sol1](project_euler/problem_064/sol1.py)
* Problem 065
* [Sol1](project_euler/problem_065/sol1.py)
* Problem 067
* [Sol1](project_euler/problem_067/sol1.py)
* [Sol2](project_euler/problem_067/sol2.py)
* Problem 068
* [Sol1](project_euler/problem_068/sol1.py)
* Problem 069
* [Sol1](project_euler/problem_069/sol1.py)
* Problem 070
* [Sol1](project_euler/problem_070/sol1.py)
* Problem 071
* [Sol1](project_euler/problem_071/sol1.py)
* Problem 072
* [Sol1](project_euler/problem_072/sol1.py)
* [Sol2](project_euler/problem_072/sol2.py)
* Problem 073
* [Sol1](project_euler/problem_073/sol1.py)
* Problem 074
* [Sol1](project_euler/problem_074/sol1.py)
* [Sol2](project_euler/problem_074/sol2.py)
* Problem 075
* [Sol1](project_euler/problem_075/sol1.py)
* Problem 076
* [Sol1](project_euler/problem_076/sol1.py)
* Problem 077
* [Sol1](project_euler/problem_077/sol1.py)
* Problem 078
* [Sol1](project_euler/problem_078/sol1.py)
* Problem 080
* [Sol1](project_euler/problem_080/sol1.py)
* Problem 081
* [Sol1](project_euler/problem_081/sol1.py)
* Problem 082
* [Sol1](project_euler/problem_082/sol1.py)
* Problem 085
* [Sol1](project_euler/problem_085/sol1.py)
* Problem 086
* [Sol1](project_euler/problem_086/sol1.py)
* Problem 087
* [Sol1](project_euler/problem_087/sol1.py)
* Problem 089
* [Sol1](project_euler/problem_089/sol1.py)
* Problem 091
* [Sol1](project_euler/problem_091/sol1.py)
* Problem 092
* [Sol1](project_euler/problem_092/sol1.py)
* Problem 097
* [Sol1](project_euler/problem_097/sol1.py)
* Problem 099
* [Sol1](project_euler/problem_099/sol1.py)
* Problem 100
* [Sol1](project_euler/problem_100/sol1.py)
* Problem 101
* [Sol1](project_euler/problem_101/sol1.py)
* Problem 102
* [Sol1](project_euler/problem_102/sol1.py)
* Problem 104
* [Sol1](project_euler/problem_104/sol1.py)
* Problem 107
* [Sol1](project_euler/problem_107/sol1.py)
* Problem 109
* [Sol1](project_euler/problem_109/sol1.py)
* Problem 112
* [Sol1](project_euler/problem_112/sol1.py)
* Problem 113
* [Sol1](project_euler/problem_113/sol1.py)
* Problem 114
* [Sol1](project_euler/problem_114/sol1.py)
* Problem 115
* [Sol1](project_euler/problem_115/sol1.py)
* Problem 116
* [Sol1](project_euler/problem_116/sol1.py)
* Problem 117
* [Sol1](project_euler/problem_117/sol1.py)
* Problem 119
* [Sol1](project_euler/problem_119/sol1.py)
* Problem 120
* [Sol1](project_euler/problem_120/sol1.py)
* Problem 121
* [Sol1](project_euler/problem_121/sol1.py)
* Problem 123
* [Sol1](project_euler/problem_123/sol1.py)
* Problem 125
* [Sol1](project_euler/problem_125/sol1.py)
* Problem 129
* [Sol1](project_euler/problem_129/sol1.py)
* Problem 131
* [Sol1](project_euler/problem_131/sol1.py)
* Problem 135
* [Sol1](project_euler/problem_135/sol1.py)
* Problem 144
* [Sol1](project_euler/problem_144/sol1.py)
* Problem 145
* [Sol1](project_euler/problem_145/sol1.py)
* Problem 173
* [Sol1](project_euler/problem_173/sol1.py)
* Problem 174
* [Sol1](project_euler/problem_174/sol1.py)
* Problem 180
* [Sol1](project_euler/problem_180/sol1.py)
* Problem 188
* [Sol1](project_euler/problem_188/sol1.py)
* Problem 191
* [Sol1](project_euler/problem_191/sol1.py)
* Problem 203
* [Sol1](project_euler/problem_203/sol1.py)
* Problem 205
* [Sol1](project_euler/problem_205/sol1.py)
* Problem 206
* [Sol1](project_euler/problem_206/sol1.py)
* Problem 207
* [Sol1](project_euler/problem_207/sol1.py)
* Problem 234
* [Sol1](project_euler/problem_234/sol1.py)
* Problem 301
* [Sol1](project_euler/problem_301/sol1.py)
* Problem 493
* [Sol1](project_euler/problem_493/sol1.py)
* Problem 551
* [Sol1](project_euler/problem_551/sol1.py)
* Problem 587
* [Sol1](project_euler/problem_587/sol1.py)
* Problem 686
* [Sol1](project_euler/problem_686/sol1.py)
## Quantum
* [Bb84](quantum/bb84.py)
* [Deutsch Jozsa](quantum/deutsch_jozsa.py)
* [Half Adder](quantum/half_adder.py)
* [Not Gate](quantum/not_gate.py)
* [Q Fourier Transform](quantum/q_fourier_transform.py)
* [Q Full Adder](quantum/q_full_adder.py)
* [Quantum Entanglement](quantum/quantum_entanglement.py)
* [Quantum Teleportation](quantum/quantum_teleportation.py)
* [Ripple Adder Classic](quantum/ripple_adder_classic.py)
* [Single Qubit Measure](quantum/single_qubit_measure.py)
* [Superdense Coding](quantum/superdense_coding.py)
## Scheduling
* [First Come First Served](scheduling/first_come_first_served.py)
* [Highest Response Ratio Next](scheduling/highest_response_ratio_next.py)
* [Job Sequencing With Deadline](scheduling/job_sequencing_with_deadline.py)
* [Multi Level Feedback Queue](scheduling/multi_level_feedback_queue.py)
* [Non Preemptive Shortest Job First](scheduling/non_preemptive_shortest_job_first.py)
* [Round Robin](scheduling/round_robin.py)
* [Shortest Job First](scheduling/shortest_job_first.py)
## Searches
* [Binary Search](searches/binary_search.py)
* [Binary Tree Traversal](searches/binary_tree_traversal.py)
* [Double Linear Search](searches/double_linear_search.py)
* [Double Linear Search Recursion](searches/double_linear_search_recursion.py)
* [Fibonacci Search](searches/fibonacci_search.py)
* [Hill Climbing](searches/hill_climbing.py)
* [Interpolation Search](searches/interpolation_search.py)
* [Jump Search](searches/jump_search.py)
* [Linear Search](searches/linear_search.py)
* [Quick Select](searches/quick_select.py)
* [Sentinel Linear Search](searches/sentinel_linear_search.py)
* [Simple Binary Search](searches/simple_binary_search.py)
* [Simulated Annealing](searches/simulated_annealing.py)
* [Tabu Search](searches/tabu_search.py)
* [Ternary Search](searches/ternary_search.py)
## Sorts
* [Bead Sort](sorts/bead_sort.py)
* [Bitonic Sort](sorts/bitonic_sort.py)
* [Bogo Sort](sorts/bogo_sort.py)
* [Bubble Sort](sorts/bubble_sort.py)
* [Bucket Sort](sorts/bucket_sort.py)
* [Circle Sort](sorts/circle_sort.py)
* [Cocktail Shaker Sort](sorts/cocktail_shaker_sort.py)
* [Comb Sort](sorts/comb_sort.py)
* [Counting Sort](sorts/counting_sort.py)
* [Cycle Sort](sorts/cycle_sort.py)
* [Double Sort](sorts/double_sort.py)
* [Dutch National Flag Sort](sorts/dutch_national_flag_sort.py)
* [Exchange Sort](sorts/exchange_sort.py)
* [External Sort](sorts/external_sort.py)
* [Gnome Sort](sorts/gnome_sort.py)
* [Heap Sort](sorts/heap_sort.py)
* [Insertion Sort](sorts/insertion_sort.py)
* [Intro Sort](sorts/intro_sort.py)
* [Iterative Merge Sort](sorts/iterative_merge_sort.py)
* [Merge Insertion Sort](sorts/merge_insertion_sort.py)
* [Merge Sort](sorts/merge_sort.py)
* [Msd Radix Sort](sorts/msd_radix_sort.py)
* [Natural Sort](sorts/natural_sort.py)
* [Odd Even Sort](sorts/odd_even_sort.py)
* [Odd Even Transposition Parallel](sorts/odd_even_transposition_parallel.py)
* [Odd Even Transposition Single Threaded](sorts/odd_even_transposition_single_threaded.py)
* [Pancake Sort](sorts/pancake_sort.py)
* [Patience Sort](sorts/patience_sort.py)
* [Pigeon Sort](sorts/pigeon_sort.py)
* [Pigeonhole Sort](sorts/pigeonhole_sort.py)
* [Quick Sort](sorts/quick_sort.py)
* [Quick Sort 3 Partition](sorts/quick_sort_3_partition.py)
* [Radix Sort](sorts/radix_sort.py)
* [Random Normal Distribution Quicksort](sorts/random_normal_distribution_quicksort.py)
* [Random Pivot Quick Sort](sorts/random_pivot_quick_sort.py)
* [Recursive Bubble Sort](sorts/recursive_bubble_sort.py)
* [Recursive Insertion Sort](sorts/recursive_insertion_sort.py)
* [Recursive Mergesort Array](sorts/recursive_mergesort_array.py)
* [Recursive Quick Sort](sorts/recursive_quick_sort.py)
* [Selection Sort](sorts/selection_sort.py)
* [Shell Sort](sorts/shell_sort.py)
* [Shrink Shell Sort](sorts/shrink_shell_sort.py)
* [Slowsort](sorts/slowsort.py)
* [Stooge Sort](sorts/stooge_sort.py)
* [Strand Sort](sorts/strand_sort.py)
* [Tim Sort](sorts/tim_sort.py)
* [Topological Sort](sorts/topological_sort.py)
* [Tree Sort](sorts/tree_sort.py)
* [Unknown Sort](sorts/unknown_sort.py)
* [Wiggle Sort](sorts/wiggle_sort.py)
## Strings
* [Aho Corasick](strings/aho_corasick.py)
* [Alternative String Arrange](strings/alternative_string_arrange.py)
* [Anagrams](strings/anagrams.py)
* [Autocomplete Using Trie](strings/autocomplete_using_trie.py)
* [Barcode Validator](strings/barcode_validator.py)
* [Boyer Moore Search](strings/boyer_moore_search.py)
* [Can String Be Rearranged As Palindrome](strings/can_string_be_rearranged_as_palindrome.py)
* [Capitalize](strings/capitalize.py)
* [Check Anagrams](strings/check_anagrams.py)
* [Credit Card Validator](strings/credit_card_validator.py)
* [Detecting English Programmatically](strings/detecting_english_programmatically.py)
* [Dna](strings/dna.py)
* [Frequency Finder](strings/frequency_finder.py)
* [Hamming Distance](strings/hamming_distance.py)
* [Indian Phone Validator](strings/indian_phone_validator.py)
* [Is Contains Unique Chars](strings/is_contains_unique_chars.py)
* [Is Isogram](strings/is_isogram.py)
* [Is Palindrome](strings/is_palindrome.py)
* [Is Pangram](strings/is_pangram.py)
* [Is Spain National Id](strings/is_spain_national_id.py)
* [Is Srilankan Phone Number](strings/is_srilankan_phone_number.py)
* [Jaro Winkler](strings/jaro_winkler.py)
* [Join](strings/join.py)
* [Knuth Morris Pratt](strings/knuth_morris_pratt.py)
* [Levenshtein Distance](strings/levenshtein_distance.py)
* [Lower](strings/lower.py)
* [Manacher](strings/manacher.py)
* [Min Cost String Conversion](strings/min_cost_string_conversion.py)
* [Naive String Search](strings/naive_string_search.py)
* [Ngram](strings/ngram.py)
* [Palindrome](strings/palindrome.py)
* [Prefix Function](strings/prefix_function.py)
* [Rabin Karp](strings/rabin_karp.py)
* [Remove Duplicate](strings/remove_duplicate.py)
* [Reverse Letters](strings/reverse_letters.py)
* [Reverse Long Words](strings/reverse_long_words.py)
* [Reverse Words](strings/reverse_words.py)
* [Snake Case To Camel Pascal Case](strings/snake_case_to_camel_pascal_case.py)
* [Split](strings/split.py)
* [Text Justification](strings/text_justification.py)
* [Upper](strings/upper.py)
* [Wave](strings/wave.py)
* [Wildcard Pattern Matching](strings/wildcard_pattern_matching.py)
* [Word Occurrence](strings/word_occurrence.py)
* [Word Patterns](strings/word_patterns.py)
* [Z Function](strings/z_function.py)
## Web Programming
* [Co2 Emission](web_programming/co2_emission.py)
* [Convert Number To Words](web_programming/convert_number_to_words.py)
* [Covid Stats Via Xpath](web_programming/covid_stats_via_xpath.py)
* [Crawl Google Results](web_programming/crawl_google_results.py)
* [Crawl Google Scholar Citation](web_programming/crawl_google_scholar_citation.py)
* [Currency Converter](web_programming/currency_converter.py)
* [Current Stock Price](web_programming/current_stock_price.py)
* [Current Weather](web_programming/current_weather.py)
* [Daily Horoscope](web_programming/daily_horoscope.py)
* [Download Images From Google Query](web_programming/download_images_from_google_query.py)
* [Emails From Url](web_programming/emails_from_url.py)
* [Fetch Anime And Play](web_programming/fetch_anime_and_play.py)
* [Fetch Bbc News](web_programming/fetch_bbc_news.py)
* [Fetch Github Info](web_programming/fetch_github_info.py)
* [Fetch Jobs](web_programming/fetch_jobs.py)
* [Fetch Quotes](web_programming/fetch_quotes.py)
* [Fetch Well Rx Price](web_programming/fetch_well_rx_price.py)
* [Get Amazon Product Data](web_programming/get_amazon_product_data.py)
* [Get Imdb Top 250 Movies Csv](web_programming/get_imdb_top_250_movies_csv.py)
* [Get Imdbtop](web_programming/get_imdbtop.py)
* [Get Top Hn Posts](web_programming/get_top_hn_posts.py)
* [Get User Tweets](web_programming/get_user_tweets.py)
* [Giphy](web_programming/giphy.py)
* [Instagram Crawler](web_programming/instagram_crawler.py)
* [Instagram Pic](web_programming/instagram_pic.py)
* [Instagram Video](web_programming/instagram_video.py)
* [Nasa Data](web_programming/nasa_data.py)
* [Open Google Results](web_programming/open_google_results.py)
* [Random Anime Character](web_programming/random_anime_character.py)
* [Recaptcha Verification](web_programming/recaptcha_verification.py)
* [Reddit](web_programming/reddit.py)
* [Search Books By Isbn](web_programming/search_books_by_isbn.py)
* [Slack Message](web_programming/slack_message.py)
* [Test Fetch Github Info](web_programming/test_fetch_github_info.py)
* [World Covid19 Stats](web_programming/world_covid19_stats.py)
|
## Arithmetic Analysis
* [Bisection](arithmetic_analysis/bisection.py)
* [Gaussian Elimination](arithmetic_analysis/gaussian_elimination.py)
* [In Static Equilibrium](arithmetic_analysis/in_static_equilibrium.py)
* [Intersection](arithmetic_analysis/intersection.py)
* [Jacobi Iteration Method](arithmetic_analysis/jacobi_iteration_method.py)
* [Lu Decomposition](arithmetic_analysis/lu_decomposition.py)
* [Newton Forward Interpolation](arithmetic_analysis/newton_forward_interpolation.py)
* [Newton Method](arithmetic_analysis/newton_method.py)
* [Newton Raphson](arithmetic_analysis/newton_raphson.py)
* [Newton Raphson New](arithmetic_analysis/newton_raphson_new.py)
* [Secant Method](arithmetic_analysis/secant_method.py)
## Audio Filters
* [Butterworth Filter](audio_filters/butterworth_filter.py)
* [Iir Filter](audio_filters/iir_filter.py)
* [Show Response](audio_filters/show_response.py)
## Backtracking
* [All Combinations](backtracking/all_combinations.py)
* [All Permutations](backtracking/all_permutations.py)
* [All Subsequences](backtracking/all_subsequences.py)
* [Coloring](backtracking/coloring.py)
* [Combination Sum](backtracking/combination_sum.py)
* [Hamiltonian Cycle](backtracking/hamiltonian_cycle.py)
* [Knight Tour](backtracking/knight_tour.py)
* [Minimax](backtracking/minimax.py)
* [Minmax](backtracking/minmax.py)
* [N Queens](backtracking/n_queens.py)
* [N Queens Math](backtracking/n_queens_math.py)
* [Rat In Maze](backtracking/rat_in_maze.py)
* [Sudoku](backtracking/sudoku.py)
* [Sum Of Subsets](backtracking/sum_of_subsets.py)
* [Word Search](backtracking/word_search.py)
## Bit Manipulation
* [Binary And Operator](bit_manipulation/binary_and_operator.py)
* [Binary Count Setbits](bit_manipulation/binary_count_setbits.py)
* [Binary Count Trailing Zeros](bit_manipulation/binary_count_trailing_zeros.py)
* [Binary Or Operator](bit_manipulation/binary_or_operator.py)
* [Binary Shifts](bit_manipulation/binary_shifts.py)
* [Binary Twos Complement](bit_manipulation/binary_twos_complement.py)
* [Binary Xor Operator](bit_manipulation/binary_xor_operator.py)
* [Count 1S Brian Kernighan Method](bit_manipulation/count_1s_brian_kernighan_method.py)
* [Count Number Of One Bits](bit_manipulation/count_number_of_one_bits.py)
* [Gray Code Sequence](bit_manipulation/gray_code_sequence.py)
* [Highest Set Bit](bit_manipulation/highest_set_bit.py)
* [Index Of Rightmost Set Bit](bit_manipulation/index_of_rightmost_set_bit.py)
* [Is Even](bit_manipulation/is_even.py)
* [Is Power Of Two](bit_manipulation/is_power_of_two.py)
* [Numbers Different Signs](bit_manipulation/numbers_different_signs.py)
* [Reverse Bits](bit_manipulation/reverse_bits.py)
* [Single Bit Manipulation Operations](bit_manipulation/single_bit_manipulation_operations.py)
## Blockchain
* [Chinese Remainder Theorem](blockchain/chinese_remainder_theorem.py)
* [Diophantine Equation](blockchain/diophantine_equation.py)
* [Modular Division](blockchain/modular_division.py)
## Boolean Algebra
* [And Gate](boolean_algebra/and_gate.py)
* [Nand Gate](boolean_algebra/nand_gate.py)
* [Norgate](boolean_algebra/norgate.py)
* [Not Gate](boolean_algebra/not_gate.py)
* [Or Gate](boolean_algebra/or_gate.py)
* [Quine Mc Cluskey](boolean_algebra/quine_mc_cluskey.py)
* [Xnor Gate](boolean_algebra/xnor_gate.py)
* [Xor Gate](boolean_algebra/xor_gate.py)
## Cellular Automata
* [Conways Game Of Life](cellular_automata/conways_game_of_life.py)
* [Game Of Life](cellular_automata/game_of_life.py)
* [Nagel Schrekenberg](cellular_automata/nagel_schrekenberg.py)
* [One Dimensional](cellular_automata/one_dimensional.py)
## Ciphers
* [A1Z26](ciphers/a1z26.py)
* [Affine Cipher](ciphers/affine_cipher.py)
* [Atbash](ciphers/atbash.py)
* [Autokey](ciphers/autokey.py)
* [Baconian Cipher](ciphers/baconian_cipher.py)
* [Base16](ciphers/base16.py)
* [Base32](ciphers/base32.py)
* [Base64](ciphers/base64.py)
* [Base85](ciphers/base85.py)
* [Beaufort Cipher](ciphers/beaufort_cipher.py)
* [Bifid](ciphers/bifid.py)
* [Brute Force Caesar Cipher](ciphers/brute_force_caesar_cipher.py)
* [Caesar Cipher](ciphers/caesar_cipher.py)
* [Cryptomath Module](ciphers/cryptomath_module.py)
* [Decrypt Caesar With Chi Squared](ciphers/decrypt_caesar_with_chi_squared.py)
* [Deterministic Miller Rabin](ciphers/deterministic_miller_rabin.py)
* [Diffie](ciphers/diffie.py)
* [Diffie Hellman](ciphers/diffie_hellman.py)
* [Elgamal Key Generator](ciphers/elgamal_key_generator.py)
* [Enigma Machine2](ciphers/enigma_machine2.py)
* [Hill Cipher](ciphers/hill_cipher.py)
* [Mixed Keyword Cypher](ciphers/mixed_keyword_cypher.py)
* [Mono Alphabetic Ciphers](ciphers/mono_alphabetic_ciphers.py)
* [Morse Code](ciphers/morse_code.py)
* [Onepad Cipher](ciphers/onepad_cipher.py)
* [Playfair Cipher](ciphers/playfair_cipher.py)
* [Polybius](ciphers/polybius.py)
* [Porta Cipher](ciphers/porta_cipher.py)
* [Rabin Miller](ciphers/rabin_miller.py)
* [Rail Fence Cipher](ciphers/rail_fence_cipher.py)
* [Rot13](ciphers/rot13.py)
* [Rsa Cipher](ciphers/rsa_cipher.py)
* [Rsa Factorization](ciphers/rsa_factorization.py)
* [Rsa Key Generator](ciphers/rsa_key_generator.py)
* [Shuffled Shift Cipher](ciphers/shuffled_shift_cipher.py)
* [Simple Keyword Cypher](ciphers/simple_keyword_cypher.py)
* [Simple Substitution Cipher](ciphers/simple_substitution_cipher.py)
* [Trafid Cipher](ciphers/trafid_cipher.py)
* [Transposition Cipher](ciphers/transposition_cipher.py)
* [Transposition Cipher Encrypt Decrypt File](ciphers/transposition_cipher_encrypt_decrypt_file.py)
* [Vigenere Cipher](ciphers/vigenere_cipher.py)
* [Xor Cipher](ciphers/xor_cipher.py)
## Compression
* [Burrows Wheeler](compression/burrows_wheeler.py)
* [Huffman](compression/huffman.py)
* [Lempel Ziv](compression/lempel_ziv.py)
* [Lempel Ziv Decompress](compression/lempel_ziv_decompress.py)
* [Lz77](compression/lz77.py)
* [Peak Signal To Noise Ratio](compression/peak_signal_to_noise_ratio.py)
* [Run Length Encoding](compression/run_length_encoding.py)
## Computer Vision
* [Cnn Classification](computer_vision/cnn_classification.py)
* [Flip Augmentation](computer_vision/flip_augmentation.py)
* [Harris Corner](computer_vision/harris_corner.py)
* [Horn Schunck](computer_vision/horn_schunck.py)
* [Mean Threshold](computer_vision/mean_threshold.py)
* [Mosaic Augmentation](computer_vision/mosaic_augmentation.py)
* [Pooling Functions](computer_vision/pooling_functions.py)
## Conversions
* [Astronomical Length Scale Conversion](conversions/astronomical_length_scale_conversion.py)
* [Binary To Decimal](conversions/binary_to_decimal.py)
* [Binary To Hexadecimal](conversions/binary_to_hexadecimal.py)
* [Binary To Octal](conversions/binary_to_octal.py)
* [Decimal To Any](conversions/decimal_to_any.py)
* [Decimal To Binary](conversions/decimal_to_binary.py)
* [Decimal To Binary Recursion](conversions/decimal_to_binary_recursion.py)
* [Decimal To Hexadecimal](conversions/decimal_to_hexadecimal.py)
* [Decimal To Octal](conversions/decimal_to_octal.py)
* [Excel Title To Column](conversions/excel_title_to_column.py)
* [Hex To Bin](conversions/hex_to_bin.py)
* [Hexadecimal To Decimal](conversions/hexadecimal_to_decimal.py)
* [Length Conversion](conversions/length_conversion.py)
* [Molecular Chemistry](conversions/molecular_chemistry.py)
* [Octal To Decimal](conversions/octal_to_decimal.py)
* [Prefix Conversions](conversions/prefix_conversions.py)
* [Prefix Conversions String](conversions/prefix_conversions_string.py)
* [Pressure Conversions](conversions/pressure_conversions.py)
* [Rgb Hsv Conversion](conversions/rgb_hsv_conversion.py)
* [Roman Numerals](conversions/roman_numerals.py)
* [Speed Conversions](conversions/speed_conversions.py)
* [Temperature Conversions](conversions/temperature_conversions.py)
* [Volume Conversions](conversions/volume_conversions.py)
* [Weight Conversion](conversions/weight_conversion.py)
## Data Structures
* Arrays
* [Permutations](data_structures/arrays/permutations.py)
* [Prefix Sum](data_structures/arrays/prefix_sum.py)
* Binary Tree
* [Avl Tree](data_structures/binary_tree/avl_tree.py)
* [Basic Binary Tree](data_structures/binary_tree/basic_binary_tree.py)
* [Binary Search Tree](data_structures/binary_tree/binary_search_tree.py)
* [Binary Search Tree Recursive](data_structures/binary_tree/binary_search_tree_recursive.py)
* [Binary Tree Mirror](data_structures/binary_tree/binary_tree_mirror.py)
* [Binary Tree Node Sum](data_structures/binary_tree/binary_tree_node_sum.py)
* [Binary Tree Path Sum](data_structures/binary_tree/binary_tree_path_sum.py)
* [Binary Tree Traversals](data_structures/binary_tree/binary_tree_traversals.py)
* [Diff Views Of Binary Tree](data_structures/binary_tree/diff_views_of_binary_tree.py)
* [Distribute Coins](data_structures/binary_tree/distribute_coins.py)
* [Fenwick Tree](data_structures/binary_tree/fenwick_tree.py)
* [Inorder Tree Traversal 2022](data_structures/binary_tree/inorder_tree_traversal_2022.py)
* [Is Bst](data_structures/binary_tree/is_bst.py)
* [Lazy Segment Tree](data_structures/binary_tree/lazy_segment_tree.py)
* [Lowest Common Ancestor](data_structures/binary_tree/lowest_common_ancestor.py)
* [Maximum Fenwick Tree](data_structures/binary_tree/maximum_fenwick_tree.py)
* [Merge Two Binary Trees](data_structures/binary_tree/merge_two_binary_trees.py)
* [Non Recursive Segment Tree](data_structures/binary_tree/non_recursive_segment_tree.py)
* [Number Of Possible Binary Trees](data_structures/binary_tree/number_of_possible_binary_trees.py)
* [Red Black Tree](data_structures/binary_tree/red_black_tree.py)
* [Segment Tree](data_structures/binary_tree/segment_tree.py)
* [Segment Tree Other](data_structures/binary_tree/segment_tree_other.py)
* [Treap](data_structures/binary_tree/treap.py)
* [Wavelet Tree](data_structures/binary_tree/wavelet_tree.py)
* Disjoint Set
* [Alternate Disjoint Set](data_structures/disjoint_set/alternate_disjoint_set.py)
* [Disjoint Set](data_structures/disjoint_set/disjoint_set.py)
* Hashing
* [Double Hash](data_structures/hashing/double_hash.py)
* [Hash Map](data_structures/hashing/hash_map.py)
* [Hash Table](data_structures/hashing/hash_table.py)
* [Hash Table With Linked List](data_structures/hashing/hash_table_with_linked_list.py)
* Number Theory
* [Prime Numbers](data_structures/hashing/number_theory/prime_numbers.py)
* [Quadratic Probing](data_structures/hashing/quadratic_probing.py)
* Tests
* [Test Hash Map](data_structures/hashing/tests/test_hash_map.py)
* Heap
* [Binomial Heap](data_structures/heap/binomial_heap.py)
* [Heap](data_structures/heap/heap.py)
* [Heap Generic](data_structures/heap/heap_generic.py)
* [Max Heap](data_structures/heap/max_heap.py)
* [Min Heap](data_structures/heap/min_heap.py)
* [Randomized Heap](data_structures/heap/randomized_heap.py)
* [Skew Heap](data_structures/heap/skew_heap.py)
* Linked List
* [Circular Linked List](data_structures/linked_list/circular_linked_list.py)
* [Deque Doubly](data_structures/linked_list/deque_doubly.py)
* [Doubly Linked List](data_structures/linked_list/doubly_linked_list.py)
* [Doubly Linked List Two](data_structures/linked_list/doubly_linked_list_two.py)
* [From Sequence](data_structures/linked_list/from_sequence.py)
* [Has Loop](data_structures/linked_list/has_loop.py)
* [Is Palindrome](data_structures/linked_list/is_palindrome.py)
* [Merge Two Lists](data_structures/linked_list/merge_two_lists.py)
* [Middle Element Of Linked List](data_structures/linked_list/middle_element_of_linked_list.py)
* [Print Reverse](data_structures/linked_list/print_reverse.py)
* [Singly Linked List](data_structures/linked_list/singly_linked_list.py)
* [Skip List](data_structures/linked_list/skip_list.py)
* [Swap Nodes](data_structures/linked_list/swap_nodes.py)
* Queue
* [Circular Queue](data_structures/queue/circular_queue.py)
* [Circular Queue Linked List](data_structures/queue/circular_queue_linked_list.py)
* [Double Ended Queue](data_structures/queue/double_ended_queue.py)
* [Linked Queue](data_structures/queue/linked_queue.py)
* [Priority Queue Using List](data_structures/queue/priority_queue_using_list.py)
* [Queue On List](data_structures/queue/queue_on_list.py)
* [Queue On Pseudo Stack](data_structures/queue/queue_on_pseudo_stack.py)
* Stacks
* [Balanced Parentheses](data_structures/stacks/balanced_parentheses.py)
* [Dijkstras Two Stack Algorithm](data_structures/stacks/dijkstras_two_stack_algorithm.py)
* [Evaluate Postfix Notations](data_structures/stacks/evaluate_postfix_notations.py)
* [Infix To Postfix Conversion](data_structures/stacks/infix_to_postfix_conversion.py)
* [Infix To Prefix Conversion](data_structures/stacks/infix_to_prefix_conversion.py)
* [Next Greater Element](data_structures/stacks/next_greater_element.py)
* [Postfix Evaluation](data_structures/stacks/postfix_evaluation.py)
* [Prefix Evaluation](data_structures/stacks/prefix_evaluation.py)
* [Stack](data_structures/stacks/stack.py)
* [Stack With Doubly Linked List](data_structures/stacks/stack_with_doubly_linked_list.py)
* [Stack With Singly Linked List](data_structures/stacks/stack_with_singly_linked_list.py)
* [Stock Span Problem](data_structures/stacks/stock_span_problem.py)
* Trie
* [Radix Tree](data_structures/trie/radix_tree.py)
* [Trie](data_structures/trie/trie.py)
## Digital Image Processing
* [Change Brightness](digital_image_processing/change_brightness.py)
* [Change Contrast](digital_image_processing/change_contrast.py)
* [Convert To Negative](digital_image_processing/convert_to_negative.py)
* Dithering
* [Burkes](digital_image_processing/dithering/burkes.py)
* Edge Detection
* [Canny](digital_image_processing/edge_detection/canny.py)
* Filters
* [Bilateral Filter](digital_image_processing/filters/bilateral_filter.py)
* [Convolve](digital_image_processing/filters/convolve.py)
* [Gabor Filter](digital_image_processing/filters/gabor_filter.py)
* [Gaussian Filter](digital_image_processing/filters/gaussian_filter.py)
* [Local Binary Pattern](digital_image_processing/filters/local_binary_pattern.py)
* [Median Filter](digital_image_processing/filters/median_filter.py)
* [Sobel Filter](digital_image_processing/filters/sobel_filter.py)
* Histogram Equalization
* [Histogram Stretch](digital_image_processing/histogram_equalization/histogram_stretch.py)
* [Index Calculation](digital_image_processing/index_calculation.py)
* Morphological Operations
* [Dilation Operation](digital_image_processing/morphological_operations/dilation_operation.py)
* [Erosion Operation](digital_image_processing/morphological_operations/erosion_operation.py)
* Resize
* [Resize](digital_image_processing/resize/resize.py)
* Rotation
* [Rotation](digital_image_processing/rotation/rotation.py)
* [Sepia](digital_image_processing/sepia.py)
* [Test Digital Image Processing](digital_image_processing/test_digital_image_processing.py)
## Divide And Conquer
* [Closest Pair Of Points](divide_and_conquer/closest_pair_of_points.py)
* [Convex Hull](divide_and_conquer/convex_hull.py)
* [Heaps Algorithm](divide_and_conquer/heaps_algorithm.py)
* [Heaps Algorithm Iterative](divide_and_conquer/heaps_algorithm_iterative.py)
* [Inversions](divide_and_conquer/inversions.py)
* [Kth Order Statistic](divide_and_conquer/kth_order_statistic.py)
* [Max Difference Pair](divide_and_conquer/max_difference_pair.py)
* [Max Subarray Sum](divide_and_conquer/max_subarray_sum.py)
* [Mergesort](divide_and_conquer/mergesort.py)
* [Peak](divide_and_conquer/peak.py)
* [Power](divide_and_conquer/power.py)
* [Strassen Matrix Multiplication](divide_and_conquer/strassen_matrix_multiplication.py)
## Dynamic Programming
* [Abbreviation](dynamic_programming/abbreviation.py)
* [All Construct](dynamic_programming/all_construct.py)
* [Bitmask](dynamic_programming/bitmask.py)
* [Catalan Numbers](dynamic_programming/catalan_numbers.py)
* [Climbing Stairs](dynamic_programming/climbing_stairs.py)
* [Combination Sum Iv](dynamic_programming/combination_sum_iv.py)
* [Edit Distance](dynamic_programming/edit_distance.py)
* [Factorial](dynamic_programming/factorial.py)
* [Fast Fibonacci](dynamic_programming/fast_fibonacci.py)
* [Fibonacci](dynamic_programming/fibonacci.py)
* [Fizz Buzz](dynamic_programming/fizz_buzz.py)
* [Floyd Warshall](dynamic_programming/floyd_warshall.py)
* [Integer Partition](dynamic_programming/integer_partition.py)
* [Iterating Through Submasks](dynamic_programming/iterating_through_submasks.py)
* [Knapsack](dynamic_programming/knapsack.py)
* [Longest Common Subsequence](dynamic_programming/longest_common_subsequence.py)
* [Longest Common Substring](dynamic_programming/longest_common_substring.py)
* [Longest Increasing Subsequence](dynamic_programming/longest_increasing_subsequence.py)
* [Longest Increasing Subsequence O(Nlogn)](dynamic_programming/longest_increasing_subsequence_o(nlogn).py)
* [Longest Sub Array](dynamic_programming/longest_sub_array.py)
* [Matrix Chain Order](dynamic_programming/matrix_chain_order.py)
* [Max Non Adjacent Sum](dynamic_programming/max_non_adjacent_sum.py)
* [Max Sub Array](dynamic_programming/max_sub_array.py)
* [Max Sum Contiguous Subsequence](dynamic_programming/max_sum_contiguous_subsequence.py)
* [Min Distance Up Bottom](dynamic_programming/min_distance_up_bottom.py)
* [Minimum Coin Change](dynamic_programming/minimum_coin_change.py)
* [Minimum Cost Path](dynamic_programming/minimum_cost_path.py)
* [Minimum Partition](dynamic_programming/minimum_partition.py)
* [Minimum Squares To Represent A Number](dynamic_programming/minimum_squares_to_represent_a_number.py)
* [Minimum Steps To One](dynamic_programming/minimum_steps_to_one.py)
* [Minimum Tickets Cost](dynamic_programming/minimum_tickets_cost.py)
* [Optimal Binary Search Tree](dynamic_programming/optimal_binary_search_tree.py)
* [Palindrome Partitioning](dynamic_programming/palindrome_partitioning.py)
* [Rod Cutting](dynamic_programming/rod_cutting.py)
* [Subset Generation](dynamic_programming/subset_generation.py)
* [Sum Of Subset](dynamic_programming/sum_of_subset.py)
* [Viterbi](dynamic_programming/viterbi.py)
* [Word Break](dynamic_programming/word_break.py)
## Electronics
* [Builtin Voltage](electronics/builtin_voltage.py)
* [Carrier Concentration](electronics/carrier_concentration.py)
* [Circular Convolution](electronics/circular_convolution.py)
* [Coulombs Law](electronics/coulombs_law.py)
* [Electric Conductivity](electronics/electric_conductivity.py)
* [Electric Power](electronics/electric_power.py)
* [Electrical Impedance](electronics/electrical_impedance.py)
* [Ind Reactance](electronics/ind_reactance.py)
* [Ohms Law](electronics/ohms_law.py)
* [Resistor Equivalence](electronics/resistor_equivalence.py)
* [Resonant Frequency](electronics/resonant_frequency.py)
## File Transfer
* [Receive File](file_transfer/receive_file.py)
* [Send File](file_transfer/send_file.py)
* Tests
* [Test Send File](file_transfer/tests/test_send_file.py)
## Financial
* [Equated Monthly Installments](financial/equated_monthly_installments.py)
* [Interest](financial/interest.py)
* [Price Plus Tax](financial/price_plus_tax.py)
## Fractals
* [Julia Sets](fractals/julia_sets.py)
* [Koch Snowflake](fractals/koch_snowflake.py)
* [Mandelbrot](fractals/mandelbrot.py)
* [Sierpinski Triangle](fractals/sierpinski_triangle.py)
## Fuzzy Logic
* [Fuzzy Operations](fuzzy_logic/fuzzy_operations.py)
## Genetic Algorithm
* [Basic String](genetic_algorithm/basic_string.py)
## Geodesy
* [Haversine Distance](geodesy/haversine_distance.py)
* [Lamberts Ellipsoidal Distance](geodesy/lamberts_ellipsoidal_distance.py)
## Graphics
* [Bezier Curve](graphics/bezier_curve.py)
* [Vector3 For 2D Rendering](graphics/vector3_for_2d_rendering.py)
## Graphs
* [A Star](graphs/a_star.py)
* [Articulation Points](graphs/articulation_points.py)
* [Basic Graphs](graphs/basic_graphs.py)
* [Bellman Ford](graphs/bellman_ford.py)
* [Bi Directional Dijkstra](graphs/bi_directional_dijkstra.py)
* [Bidirectional A Star](graphs/bidirectional_a_star.py)
* [Bidirectional Breadth First Search](graphs/bidirectional_breadth_first_search.py)
* [Boruvka](graphs/boruvka.py)
* [Breadth First Search](graphs/breadth_first_search.py)
* [Breadth First Search 2](graphs/breadth_first_search_2.py)
* [Breadth First Search Shortest Path](graphs/breadth_first_search_shortest_path.py)
* [Breadth First Search Shortest Path 2](graphs/breadth_first_search_shortest_path_2.py)
* [Breadth First Search Zero One Shortest Path](graphs/breadth_first_search_zero_one_shortest_path.py)
* [Check Bipartite Graph Bfs](graphs/check_bipartite_graph_bfs.py)
* [Check Bipartite Graph Dfs](graphs/check_bipartite_graph_dfs.py)
* [Check Cycle](graphs/check_cycle.py)
* [Connected Components](graphs/connected_components.py)
* [Depth First Search](graphs/depth_first_search.py)
* [Depth First Search 2](graphs/depth_first_search_2.py)
* [Dijkstra](graphs/dijkstra.py)
* [Dijkstra 2](graphs/dijkstra_2.py)
* [Dijkstra Algorithm](graphs/dijkstra_algorithm.py)
* [Dijkstra Alternate](graphs/dijkstra_alternate.py)
* [Dinic](graphs/dinic.py)
* [Directed And Undirected (Weighted) Graph](graphs/directed_and_undirected_(weighted)_graph.py)
* [Edmonds Karp Multiple Source And Sink](graphs/edmonds_karp_multiple_source_and_sink.py)
* [Eulerian Path And Circuit For Undirected Graph](graphs/eulerian_path_and_circuit_for_undirected_graph.py)
* [Even Tree](graphs/even_tree.py)
* [Finding Bridges](graphs/finding_bridges.py)
* [Frequent Pattern Graph Miner](graphs/frequent_pattern_graph_miner.py)
* [G Topological Sort](graphs/g_topological_sort.py)
* [Gale Shapley Bigraph](graphs/gale_shapley_bigraph.py)
* [Graph List](graphs/graph_list.py)
* [Graph Matrix](graphs/graph_matrix.py)
* [Graphs Floyd Warshall](graphs/graphs_floyd_warshall.py)
* [Greedy Best First](graphs/greedy_best_first.py)
* [Greedy Min Vertex Cover](graphs/greedy_min_vertex_cover.py)
* [Kahns Algorithm Long](graphs/kahns_algorithm_long.py)
* [Kahns Algorithm Topo](graphs/kahns_algorithm_topo.py)
* [Karger](graphs/karger.py)
* [Markov Chain](graphs/markov_chain.py)
* [Matching Min Vertex Cover](graphs/matching_min_vertex_cover.py)
* [Minimum Path Sum](graphs/minimum_path_sum.py)
* [Minimum Spanning Tree Boruvka](graphs/minimum_spanning_tree_boruvka.py)
* [Minimum Spanning Tree Kruskal](graphs/minimum_spanning_tree_kruskal.py)
* [Minimum Spanning Tree Kruskal2](graphs/minimum_spanning_tree_kruskal2.py)
* [Minimum Spanning Tree Prims](graphs/minimum_spanning_tree_prims.py)
* [Minimum Spanning Tree Prims2](graphs/minimum_spanning_tree_prims2.py)
* [Multi Heuristic Astar](graphs/multi_heuristic_astar.py)
* [Page Rank](graphs/page_rank.py)
* [Prim](graphs/prim.py)
* [Random Graph Generator](graphs/random_graph_generator.py)
* [Scc Kosaraju](graphs/scc_kosaraju.py)
* [Strongly Connected Components](graphs/strongly_connected_components.py)
* [Tarjans Scc](graphs/tarjans_scc.py)
* Tests
* [Test Min Spanning Tree Kruskal](graphs/tests/test_min_spanning_tree_kruskal.py)
* [Test Min Spanning Tree Prim](graphs/tests/test_min_spanning_tree_prim.py)
## Greedy Methods
* [Fractional Knapsack](greedy_methods/fractional_knapsack.py)
* [Fractional Knapsack 2](greedy_methods/fractional_knapsack_2.py)
* [Optimal Merge Pattern](greedy_methods/optimal_merge_pattern.py)
## Hashes
* [Adler32](hashes/adler32.py)
* [Chaos Machine](hashes/chaos_machine.py)
* [Djb2](hashes/djb2.py)
* [Elf](hashes/elf.py)
* [Enigma Machine](hashes/enigma_machine.py)
* [Hamming Code](hashes/hamming_code.py)
* [Luhn](hashes/luhn.py)
* [Md5](hashes/md5.py)
* [Sdbm](hashes/sdbm.py)
* [Sha1](hashes/sha1.py)
* [Sha256](hashes/sha256.py)
## Knapsack
* [Greedy Knapsack](knapsack/greedy_knapsack.py)
* [Knapsack](knapsack/knapsack.py)
* [Recursive Approach Knapsack](knapsack/recursive_approach_knapsack.py)
* Tests
* [Test Greedy Knapsack](knapsack/tests/test_greedy_knapsack.py)
* [Test Knapsack](knapsack/tests/test_knapsack.py)
## Linear Algebra
* Src
* [Conjugate Gradient](linear_algebra/src/conjugate_gradient.py)
* [Lib](linear_algebra/src/lib.py)
* [Polynom For Points](linear_algebra/src/polynom_for_points.py)
* [Power Iteration](linear_algebra/src/power_iteration.py)
* [Rayleigh Quotient](linear_algebra/src/rayleigh_quotient.py)
* [Schur Complement](linear_algebra/src/schur_complement.py)
* [Test Linear Algebra](linear_algebra/src/test_linear_algebra.py)
* [Transformations 2D](linear_algebra/src/transformations_2d.py)
## Machine Learning
* [Astar](machine_learning/astar.py)
* [Data Transformations](machine_learning/data_transformations.py)
* [Decision Tree](machine_learning/decision_tree.py)
* Forecasting
* [Run](machine_learning/forecasting/run.py)
* [Gradient Descent](machine_learning/gradient_descent.py)
* [K Means Clust](machine_learning/k_means_clust.py)
* [K Nearest Neighbours](machine_learning/k_nearest_neighbours.py)
* [Knn Sklearn](machine_learning/knn_sklearn.py)
* [Linear Discriminant Analysis](machine_learning/linear_discriminant_analysis.py)
* [Linear Regression](machine_learning/linear_regression.py)
* Local Weighted Learning
* [Local Weighted Learning](machine_learning/local_weighted_learning/local_weighted_learning.py)
* [Logistic Regression](machine_learning/logistic_regression.py)
* Lstm
* [Lstm Prediction](machine_learning/lstm/lstm_prediction.py)
* [Multilayer Perceptron Classifier](machine_learning/multilayer_perceptron_classifier.py)
* [Polymonial Regression](machine_learning/polymonial_regression.py)
* [Scoring Functions](machine_learning/scoring_functions.py)
* [Self Organizing Map](machine_learning/self_organizing_map.py)
* [Sequential Minimum Optimization](machine_learning/sequential_minimum_optimization.py)
* [Similarity Search](machine_learning/similarity_search.py)
* [Support Vector Machines](machine_learning/support_vector_machines.py)
* [Word Frequency Functions](machine_learning/word_frequency_functions.py)
* [Xgboost Classifier](machine_learning/xgboost_classifier.py)
* [Xgboost Regressor](machine_learning/xgboost_regressor.py)
## Maths
* [3N Plus 1](maths/3n_plus_1.py)
* [Abs](maths/abs.py)
* [Add](maths/add.py)
* [Addition Without Arithmetic](maths/addition_without_arithmetic.py)
* [Aliquot Sum](maths/aliquot_sum.py)
* [Allocation Number](maths/allocation_number.py)
* [Arc Length](maths/arc_length.py)
* [Area](maths/area.py)
* [Area Under Curve](maths/area_under_curve.py)
* [Armstrong Numbers](maths/armstrong_numbers.py)
* [Automorphic Number](maths/automorphic_number.py)
* [Average Absolute Deviation](maths/average_absolute_deviation.py)
* [Average Mean](maths/average_mean.py)
* [Average Median](maths/average_median.py)
* [Average Mode](maths/average_mode.py)
* [Bailey Borwein Plouffe](maths/bailey_borwein_plouffe.py)
* [Basic Maths](maths/basic_maths.py)
* [Binary Exp Mod](maths/binary_exp_mod.py)
* [Binary Exponentiation](maths/binary_exponentiation.py)
* [Binary Exponentiation 2](maths/binary_exponentiation_2.py)
* [Binary Exponentiation 3](maths/binary_exponentiation_3.py)
* [Binomial Coefficient](maths/binomial_coefficient.py)
* [Binomial Distribution](maths/binomial_distribution.py)
* [Bisection](maths/bisection.py)
* [Carmichael Number](maths/carmichael_number.py)
* [Catalan Number](maths/catalan_number.py)
* [Ceil](maths/ceil.py)
* [Check Polygon](maths/check_polygon.py)
* [Chudnovsky Algorithm](maths/chudnovsky_algorithm.py)
* [Collatz Sequence](maths/collatz_sequence.py)
* [Combinations](maths/combinations.py)
* [Decimal Isolate](maths/decimal_isolate.py)
* [Decimal To Fraction](maths/decimal_to_fraction.py)
* [Dodecahedron](maths/dodecahedron.py)
* [Double Factorial Iterative](maths/double_factorial_iterative.py)
* [Double Factorial Recursive](maths/double_factorial_recursive.py)
* [Entropy](maths/entropy.py)
* [Euclidean Distance](maths/euclidean_distance.py)
* [Euclidean Gcd](maths/euclidean_gcd.py)
* [Euler Method](maths/euler_method.py)
* [Euler Modified](maths/euler_modified.py)
* [Eulers Totient](maths/eulers_totient.py)
* [Extended Euclidean Algorithm](maths/extended_euclidean_algorithm.py)
* [Factorial](maths/factorial.py)
* [Factors](maths/factors.py)
* [Fermat Little Theorem](maths/fermat_little_theorem.py)
* [Fibonacci](maths/fibonacci.py)
* [Find Max](maths/find_max.py)
* [Find Max Recursion](maths/find_max_recursion.py)
* [Find Min](maths/find_min.py)
* [Find Min Recursion](maths/find_min_recursion.py)
* [Floor](maths/floor.py)
* [Gamma](maths/gamma.py)
* [Gamma Recursive](maths/gamma_recursive.py)
* [Gaussian](maths/gaussian.py)
* [Gaussian Error Linear Unit](maths/gaussian_error_linear_unit.py)
* [Gcd Of N Numbers](maths/gcd_of_n_numbers.py)
* [Greatest Common Divisor](maths/greatest_common_divisor.py)
* [Greedy Coin Change](maths/greedy_coin_change.py)
* [Hamming Numbers](maths/hamming_numbers.py)
* [Hardy Ramanujanalgo](maths/hardy_ramanujanalgo.py)
* [Hexagonal Number](maths/hexagonal_number.py)
* [Integration By Simpson Approx](maths/integration_by_simpson_approx.py)
* [Is Ip V4 Address Valid](maths/is_ip_v4_address_valid.py)
* [Is Square Free](maths/is_square_free.py)
* [Jaccard Similarity](maths/jaccard_similarity.py)
* [Juggler Sequence](maths/juggler_sequence.py)
* [Kadanes](maths/kadanes.py)
* [Karatsuba](maths/karatsuba.py)
* [Krishnamurthy Number](maths/krishnamurthy_number.py)
* [Kth Lexicographic Permutation](maths/kth_lexicographic_permutation.py)
* [Largest Of Very Large Numbers](maths/largest_of_very_large_numbers.py)
* [Largest Subarray Sum](maths/largest_subarray_sum.py)
* [Least Common Multiple](maths/least_common_multiple.py)
* [Line Length](maths/line_length.py)
* [Liouville Lambda](maths/liouville_lambda.py)
* [Lucas Lehmer Primality Test](maths/lucas_lehmer_primality_test.py)
* [Lucas Series](maths/lucas_series.py)
* [Maclaurin Series](maths/maclaurin_series.py)
* [Manhattan Distance](maths/manhattan_distance.py)
* [Matrix Exponentiation](maths/matrix_exponentiation.py)
* [Max Sum Sliding Window](maths/max_sum_sliding_window.py)
* [Median Of Two Arrays](maths/median_of_two_arrays.py)
* [Miller Rabin](maths/miller_rabin.py)
* [Mobius Function](maths/mobius_function.py)
* [Modular Exponential](maths/modular_exponential.py)
* [Monte Carlo](maths/monte_carlo.py)
* [Monte Carlo Dice](maths/monte_carlo_dice.py)
* [Nevilles Method](maths/nevilles_method.py)
* [Newton Raphson](maths/newton_raphson.py)
* [Number Of Digits](maths/number_of_digits.py)
* [Numerical Integration](maths/numerical_integration.py)
* [Perfect Cube](maths/perfect_cube.py)
* [Perfect Number](maths/perfect_number.py)
* [Perfect Square](maths/perfect_square.py)
* [Persistence](maths/persistence.py)
* [Pi Monte Carlo Estimation](maths/pi_monte_carlo_estimation.py)
* [Points Are Collinear 3D](maths/points_are_collinear_3d.py)
* [Pollard Rho](maths/pollard_rho.py)
* [Polynomial Evaluation](maths/polynomial_evaluation.py)
* Polynomials
* [Single Indeterminate Operations](maths/polynomials/single_indeterminate_operations.py)
* [Power Using Recursion](maths/power_using_recursion.py)
* [Prime Check](maths/prime_check.py)
* [Prime Factors](maths/prime_factors.py)
* [Prime Numbers](maths/prime_numbers.py)
* [Prime Sieve Eratosthenes](maths/prime_sieve_eratosthenes.py)
* [Primelib](maths/primelib.py)
* [Print Multiplication Table](maths/print_multiplication_table.py)
* [Pronic Number](maths/pronic_number.py)
* [Proth Number](maths/proth_number.py)
* [Pythagoras](maths/pythagoras.py)
* [Qr Decomposition](maths/qr_decomposition.py)
* [Quadratic Equations Complex Numbers](maths/quadratic_equations_complex_numbers.py)
* [Radians](maths/radians.py)
* [Radix2 Fft](maths/radix2_fft.py)
* [Relu](maths/relu.py)
* [Runge Kutta](maths/runge_kutta.py)
* [Segmented Sieve](maths/segmented_sieve.py)
* Series
* [Arithmetic](maths/series/arithmetic.py)
* [Geometric](maths/series/geometric.py)
* [Geometric Series](maths/series/geometric_series.py)
* [Harmonic](maths/series/harmonic.py)
* [Harmonic Series](maths/series/harmonic_series.py)
* [Hexagonal Numbers](maths/series/hexagonal_numbers.py)
* [P Series](maths/series/p_series.py)
* [Sieve Of Eratosthenes](maths/sieve_of_eratosthenes.py)
* [Sigmoid](maths/sigmoid.py)
* [Sigmoid Linear Unit](maths/sigmoid_linear_unit.py)
* [Signum](maths/signum.py)
* [Simpson Rule](maths/simpson_rule.py)
* [Sin](maths/sin.py)
* [Sock Merchant](maths/sock_merchant.py)
* [Softmax](maths/softmax.py)
* [Square Root](maths/square_root.py)
* [Sum Of Arithmetic Series](maths/sum_of_arithmetic_series.py)
* [Sum Of Digits](maths/sum_of_digits.py)
* [Sum Of Geometric Progression](maths/sum_of_geometric_progression.py)
* [Sum Of Harmonic Series](maths/sum_of_harmonic_series.py)
* [Sumset](maths/sumset.py)
* [Sylvester Sequence](maths/sylvester_sequence.py)
* [Test Prime Check](maths/test_prime_check.py)
* [Trapezoidal Rule](maths/trapezoidal_rule.py)
* [Triplet Sum](maths/triplet_sum.py)
* [Twin Prime](maths/twin_prime.py)
* [Two Pointer](maths/two_pointer.py)
* [Two Sum](maths/two_sum.py)
* [Ugly Numbers](maths/ugly_numbers.py)
* [Volume](maths/volume.py)
* [Weird Number](maths/weird_number.py)
* [Zellers Congruence](maths/zellers_congruence.py)
## Matrix
* [Binary Search Matrix](matrix/binary_search_matrix.py)
* [Count Islands In Matrix](matrix/count_islands_in_matrix.py)
* [Count Paths](matrix/count_paths.py)
* [Cramers Rule 2X2](matrix/cramers_rule_2x2.py)
* [Inverse Of Matrix](matrix/inverse_of_matrix.py)
* [Largest Square Area In Matrix](matrix/largest_square_area_in_matrix.py)
* [Matrix Class](matrix/matrix_class.py)
* [Matrix Operation](matrix/matrix_operation.py)
* [Max Area Of Island](matrix/max_area_of_island.py)
* [Nth Fibonacci Using Matrix Exponentiation](matrix/nth_fibonacci_using_matrix_exponentiation.py)
* [Pascal Triangle](matrix/pascal_triangle.py)
* [Rotate Matrix](matrix/rotate_matrix.py)
* [Searching In Sorted Matrix](matrix/searching_in_sorted_matrix.py)
* [Sherman Morrison](matrix/sherman_morrison.py)
* [Spiral Print](matrix/spiral_print.py)
* Tests
* [Test Matrix Operation](matrix/tests/test_matrix_operation.py)
## Networking Flow
* [Ford Fulkerson](networking_flow/ford_fulkerson.py)
* [Minimum Cut](networking_flow/minimum_cut.py)
## Neural Network
* [2 Hidden Layers Neural Network](neural_network/2_hidden_layers_neural_network.py)
* [Back Propagation Neural Network](neural_network/back_propagation_neural_network.py)
* [Convolution Neural Network](neural_network/convolution_neural_network.py)
* [Perceptron](neural_network/perceptron.py)
* [Simple Neural Network](neural_network/simple_neural_network.py)
## Other
* [Activity Selection](other/activity_selection.py)
* [Alternative List Arrange](other/alternative_list_arrange.py)
* [Davisb Putnamb Logemannb Loveland](other/davisb_putnamb_logemannb_loveland.py)
* [Dijkstra Bankers Algorithm](other/dijkstra_bankers_algorithm.py)
* [Doomsday](other/doomsday.py)
* [Fischer Yates Shuffle](other/fischer_yates_shuffle.py)
* [Gauss Easter](other/gauss_easter.py)
* [Graham Scan](other/graham_scan.py)
* [Greedy](other/greedy.py)
* [Least Recently Used](other/least_recently_used.py)
* [Lfu Cache](other/lfu_cache.py)
* [Linear Congruential Generator](other/linear_congruential_generator.py)
* [Lru Cache](other/lru_cache.py)
* [Magicdiamondpattern](other/magicdiamondpattern.py)
* [Maximum Subarray](other/maximum_subarray.py)
* [Nested Brackets](other/nested_brackets.py)
* [Password](other/password.py)
* [Quine](other/quine.py)
* [Scoring Algorithm](other/scoring_algorithm.py)
* [Sdes](other/sdes.py)
* [Tower Of Hanoi](other/tower_of_hanoi.py)
## Physics
* [Archimedes Principle](physics/archimedes_principle.py)
* [Casimir Effect](physics/casimir_effect.py)
* [Centripetal Force](physics/centripetal_force.py)
* [Horizontal Projectile Motion](physics/horizontal_projectile_motion.py)
* [Hubble Parameter](physics/hubble_parameter.py)
* [Ideal Gas Law](physics/ideal_gas_law.py)
* [Kinetic Energy](physics/kinetic_energy.py)
* [Lorentz Transformation Four Vector](physics/lorentz_transformation_four_vector.py)
* [Malus Law](physics/malus_law.py)
* [N Body Simulation](physics/n_body_simulation.py)
* [Newtons Law Of Gravitation](physics/newtons_law_of_gravitation.py)
* [Newtons Second Law Of Motion](physics/newtons_second_law_of_motion.py)
* [Potential Energy](physics/potential_energy.py)
* [Rms Speed Of Molecule](physics/rms_speed_of_molecule.py)
* [Shear Stress](physics/shear_stress.py)
## Project Euler
* Problem 001
* [Sol1](project_euler/problem_001/sol1.py)
* [Sol2](project_euler/problem_001/sol2.py)
* [Sol3](project_euler/problem_001/sol3.py)
* [Sol4](project_euler/problem_001/sol4.py)
* [Sol5](project_euler/problem_001/sol5.py)
* [Sol6](project_euler/problem_001/sol6.py)
* [Sol7](project_euler/problem_001/sol7.py)
* Problem 002
* [Sol1](project_euler/problem_002/sol1.py)
* [Sol2](project_euler/problem_002/sol2.py)
* [Sol3](project_euler/problem_002/sol3.py)
* [Sol4](project_euler/problem_002/sol4.py)
* [Sol5](project_euler/problem_002/sol5.py)
* Problem 003
* [Sol1](project_euler/problem_003/sol1.py)
* [Sol2](project_euler/problem_003/sol2.py)
* [Sol3](project_euler/problem_003/sol3.py)
* Problem 004
* [Sol1](project_euler/problem_004/sol1.py)
* [Sol2](project_euler/problem_004/sol2.py)
* Problem 005
* [Sol1](project_euler/problem_005/sol1.py)
* [Sol2](project_euler/problem_005/sol2.py)
* Problem 006
* [Sol1](project_euler/problem_006/sol1.py)
* [Sol2](project_euler/problem_006/sol2.py)
* [Sol3](project_euler/problem_006/sol3.py)
* [Sol4](project_euler/problem_006/sol4.py)
* Problem 007
* [Sol1](project_euler/problem_007/sol1.py)
* [Sol2](project_euler/problem_007/sol2.py)
* [Sol3](project_euler/problem_007/sol3.py)
* Problem 008
* [Sol1](project_euler/problem_008/sol1.py)
* [Sol2](project_euler/problem_008/sol2.py)
* [Sol3](project_euler/problem_008/sol3.py)
* Problem 009
* [Sol1](project_euler/problem_009/sol1.py)
* [Sol2](project_euler/problem_009/sol2.py)
* [Sol3](project_euler/problem_009/sol3.py)
* Problem 010
* [Sol1](project_euler/problem_010/sol1.py)
* [Sol2](project_euler/problem_010/sol2.py)
* [Sol3](project_euler/problem_010/sol3.py)
* Problem 011
* [Sol1](project_euler/problem_011/sol1.py)
* [Sol2](project_euler/problem_011/sol2.py)
* Problem 012
* [Sol1](project_euler/problem_012/sol1.py)
* [Sol2](project_euler/problem_012/sol2.py)
* Problem 013
* [Sol1](project_euler/problem_013/sol1.py)
* Problem 014
* [Sol1](project_euler/problem_014/sol1.py)
* [Sol2](project_euler/problem_014/sol2.py)
* Problem 015
* [Sol1](project_euler/problem_015/sol1.py)
* Problem 016
* [Sol1](project_euler/problem_016/sol1.py)
* [Sol2](project_euler/problem_016/sol2.py)
* Problem 017
* [Sol1](project_euler/problem_017/sol1.py)
* Problem 018
* [Solution](project_euler/problem_018/solution.py)
* Problem 019
* [Sol1](project_euler/problem_019/sol1.py)
* Problem 020
* [Sol1](project_euler/problem_020/sol1.py)
* [Sol2](project_euler/problem_020/sol2.py)
* [Sol3](project_euler/problem_020/sol3.py)
* [Sol4](project_euler/problem_020/sol4.py)
* Problem 021
* [Sol1](project_euler/problem_021/sol1.py)
* Problem 022
* [Sol1](project_euler/problem_022/sol1.py)
* [Sol2](project_euler/problem_022/sol2.py)
* Problem 023
* [Sol1](project_euler/problem_023/sol1.py)
* Problem 024
* [Sol1](project_euler/problem_024/sol1.py)
* Problem 025
* [Sol1](project_euler/problem_025/sol1.py)
* [Sol2](project_euler/problem_025/sol2.py)
* [Sol3](project_euler/problem_025/sol3.py)
* Problem 026
* [Sol1](project_euler/problem_026/sol1.py)
* Problem 027
* [Sol1](project_euler/problem_027/sol1.py)
* Problem 028
* [Sol1](project_euler/problem_028/sol1.py)
* Problem 029
* [Sol1](project_euler/problem_029/sol1.py)
* Problem 030
* [Sol1](project_euler/problem_030/sol1.py)
* Problem 031
* [Sol1](project_euler/problem_031/sol1.py)
* [Sol2](project_euler/problem_031/sol2.py)
* Problem 032
* [Sol32](project_euler/problem_032/sol32.py)
* Problem 033
* [Sol1](project_euler/problem_033/sol1.py)
* Problem 034
* [Sol1](project_euler/problem_034/sol1.py)
* Problem 035
* [Sol1](project_euler/problem_035/sol1.py)
* Problem 036
* [Sol1](project_euler/problem_036/sol1.py)
* Problem 037
* [Sol1](project_euler/problem_037/sol1.py)
* Problem 038
* [Sol1](project_euler/problem_038/sol1.py)
* Problem 039
* [Sol1](project_euler/problem_039/sol1.py)
* Problem 040
* [Sol1](project_euler/problem_040/sol1.py)
* Problem 041
* [Sol1](project_euler/problem_041/sol1.py)
* Problem 042
* [Solution42](project_euler/problem_042/solution42.py)
* Problem 043
* [Sol1](project_euler/problem_043/sol1.py)
* Problem 044
* [Sol1](project_euler/problem_044/sol1.py)
* Problem 045
* [Sol1](project_euler/problem_045/sol1.py)
* Problem 046
* [Sol1](project_euler/problem_046/sol1.py)
* Problem 047
* [Sol1](project_euler/problem_047/sol1.py)
* Problem 048
* [Sol1](project_euler/problem_048/sol1.py)
* Problem 049
* [Sol1](project_euler/problem_049/sol1.py)
* Problem 050
* [Sol1](project_euler/problem_050/sol1.py)
* Problem 051
* [Sol1](project_euler/problem_051/sol1.py)
* Problem 052
* [Sol1](project_euler/problem_052/sol1.py)
* Problem 053
* [Sol1](project_euler/problem_053/sol1.py)
* Problem 054
* [Sol1](project_euler/problem_054/sol1.py)
* [Test Poker Hand](project_euler/problem_054/test_poker_hand.py)
* Problem 055
* [Sol1](project_euler/problem_055/sol1.py)
* Problem 056
* [Sol1](project_euler/problem_056/sol1.py)
* Problem 057
* [Sol1](project_euler/problem_057/sol1.py)
* Problem 058
* [Sol1](project_euler/problem_058/sol1.py)
* Problem 059
* [Sol1](project_euler/problem_059/sol1.py)
* Problem 062
* [Sol1](project_euler/problem_062/sol1.py)
* Problem 063
* [Sol1](project_euler/problem_063/sol1.py)
* Problem 064
* [Sol1](project_euler/problem_064/sol1.py)
* Problem 065
* [Sol1](project_euler/problem_065/sol1.py)
* Problem 067
* [Sol1](project_euler/problem_067/sol1.py)
* [Sol2](project_euler/problem_067/sol2.py)
* Problem 068
* [Sol1](project_euler/problem_068/sol1.py)
* Problem 069
* [Sol1](project_euler/problem_069/sol1.py)
* Problem 070
* [Sol1](project_euler/problem_070/sol1.py)
* Problem 071
* [Sol1](project_euler/problem_071/sol1.py)
* Problem 072
* [Sol1](project_euler/problem_072/sol1.py)
* [Sol2](project_euler/problem_072/sol2.py)
* Problem 073
* [Sol1](project_euler/problem_073/sol1.py)
* Problem 074
* [Sol1](project_euler/problem_074/sol1.py)
* [Sol2](project_euler/problem_074/sol2.py)
* Problem 075
* [Sol1](project_euler/problem_075/sol1.py)
* Problem 076
* [Sol1](project_euler/problem_076/sol1.py)
* Problem 077
* [Sol1](project_euler/problem_077/sol1.py)
* Problem 078
* [Sol1](project_euler/problem_078/sol1.py)
* Problem 080
* [Sol1](project_euler/problem_080/sol1.py)
* Problem 081
* [Sol1](project_euler/problem_081/sol1.py)
* Problem 082
* [Sol1](project_euler/problem_082/sol1.py)
* Problem 085
* [Sol1](project_euler/problem_085/sol1.py)
* Problem 086
* [Sol1](project_euler/problem_086/sol1.py)
* Problem 087
* [Sol1](project_euler/problem_087/sol1.py)
* Problem 089
* [Sol1](project_euler/problem_089/sol1.py)
* Problem 091
* [Sol1](project_euler/problem_091/sol1.py)
* Problem 092
* [Sol1](project_euler/problem_092/sol1.py)
* Problem 097
* [Sol1](project_euler/problem_097/sol1.py)
* Problem 099
* [Sol1](project_euler/problem_099/sol1.py)
* Problem 100
* [Sol1](project_euler/problem_100/sol1.py)
* Problem 101
* [Sol1](project_euler/problem_101/sol1.py)
* Problem 102
* [Sol1](project_euler/problem_102/sol1.py)
* Problem 104
* [Sol1](project_euler/problem_104/sol1.py)
* Problem 107
* [Sol1](project_euler/problem_107/sol1.py)
* Problem 109
* [Sol1](project_euler/problem_109/sol1.py)
* Problem 112
* [Sol1](project_euler/problem_112/sol1.py)
* Problem 113
* [Sol1](project_euler/problem_113/sol1.py)
* Problem 114
* [Sol1](project_euler/problem_114/sol1.py)
* Problem 115
* [Sol1](project_euler/problem_115/sol1.py)
* Problem 116
* [Sol1](project_euler/problem_116/sol1.py)
* Problem 117
* [Sol1](project_euler/problem_117/sol1.py)
* Problem 119
* [Sol1](project_euler/problem_119/sol1.py)
* Problem 120
* [Sol1](project_euler/problem_120/sol1.py)
* Problem 121
* [Sol1](project_euler/problem_121/sol1.py)
* Problem 123
* [Sol1](project_euler/problem_123/sol1.py)
* Problem 125
* [Sol1](project_euler/problem_125/sol1.py)
* Problem 129
* [Sol1](project_euler/problem_129/sol1.py)
* Problem 131
* [Sol1](project_euler/problem_131/sol1.py)
* Problem 135
* [Sol1](project_euler/problem_135/sol1.py)
* Problem 144
* [Sol1](project_euler/problem_144/sol1.py)
* Problem 145
* [Sol1](project_euler/problem_145/sol1.py)
* Problem 173
* [Sol1](project_euler/problem_173/sol1.py)
* Problem 174
* [Sol1](project_euler/problem_174/sol1.py)
* Problem 180
* [Sol1](project_euler/problem_180/sol1.py)
* Problem 188
* [Sol1](project_euler/problem_188/sol1.py)
* Problem 191
* [Sol1](project_euler/problem_191/sol1.py)
* Problem 203
* [Sol1](project_euler/problem_203/sol1.py)
* Problem 205
* [Sol1](project_euler/problem_205/sol1.py)
* Problem 206
* [Sol1](project_euler/problem_206/sol1.py)
* Problem 207
* [Sol1](project_euler/problem_207/sol1.py)
* Problem 234
* [Sol1](project_euler/problem_234/sol1.py)
* Problem 301
* [Sol1](project_euler/problem_301/sol1.py)
* Problem 493
* [Sol1](project_euler/problem_493/sol1.py)
* Problem 551
* [Sol1](project_euler/problem_551/sol1.py)
* Problem 587
* [Sol1](project_euler/problem_587/sol1.py)
* Problem 686
* [Sol1](project_euler/problem_686/sol1.py)
## Quantum
* [Bb84](quantum/bb84.py)
* [Deutsch Jozsa](quantum/deutsch_jozsa.py)
* [Half Adder](quantum/half_adder.py)
* [Not Gate](quantum/not_gate.py)
* [Q Fourier Transform](quantum/q_fourier_transform.py)
* [Q Full Adder](quantum/q_full_adder.py)
* [Quantum Entanglement](quantum/quantum_entanglement.py)
* [Quantum Teleportation](quantum/quantum_teleportation.py)
* [Ripple Adder Classic](quantum/ripple_adder_classic.py)
* [Single Qubit Measure](quantum/single_qubit_measure.py)
* [Superdense Coding](quantum/superdense_coding.py)
## Scheduling
* [First Come First Served](scheduling/first_come_first_served.py)
* [Highest Response Ratio Next](scheduling/highest_response_ratio_next.py)
* [Job Sequencing With Deadline](scheduling/job_sequencing_with_deadline.py)
* [Multi Level Feedback Queue](scheduling/multi_level_feedback_queue.py)
* [Non Preemptive Shortest Job First](scheduling/non_preemptive_shortest_job_first.py)
* [Round Robin](scheduling/round_robin.py)
* [Shortest Job First](scheduling/shortest_job_first.py)
## Searches
* [Binary Search](searches/binary_search.py)
* [Binary Tree Traversal](searches/binary_tree_traversal.py)
* [Double Linear Search](searches/double_linear_search.py)
* [Double Linear Search Recursion](searches/double_linear_search_recursion.py)
* [Fibonacci Search](searches/fibonacci_search.py)
* [Hill Climbing](searches/hill_climbing.py)
* [Interpolation Search](searches/interpolation_search.py)
* [Jump Search](searches/jump_search.py)
* [Linear Search](searches/linear_search.py)
* [Quick Select](searches/quick_select.py)
* [Sentinel Linear Search](searches/sentinel_linear_search.py)
* [Simple Binary Search](searches/simple_binary_search.py)
* [Simulated Annealing](searches/simulated_annealing.py)
* [Tabu Search](searches/tabu_search.py)
* [Ternary Search](searches/ternary_search.py)
## Sorts
* [Bead Sort](sorts/bead_sort.py)
* [Bitonic Sort](sorts/bitonic_sort.py)
* [Bogo Sort](sorts/bogo_sort.py)
* [Bubble Sort](sorts/bubble_sort.py)
* [Bucket Sort](sorts/bucket_sort.py)
* [Circle Sort](sorts/circle_sort.py)
* [Cocktail Shaker Sort](sorts/cocktail_shaker_sort.py)
* [Comb Sort](sorts/comb_sort.py)
* [Counting Sort](sorts/counting_sort.py)
* [Cycle Sort](sorts/cycle_sort.py)
* [Double Sort](sorts/double_sort.py)
* [Dutch National Flag Sort](sorts/dutch_national_flag_sort.py)
* [Exchange Sort](sorts/exchange_sort.py)
* [External Sort](sorts/external_sort.py)
* [Gnome Sort](sorts/gnome_sort.py)
* [Heap Sort](sorts/heap_sort.py)
* [Insertion Sort](sorts/insertion_sort.py)
* [Intro Sort](sorts/intro_sort.py)
* [Iterative Merge Sort](sorts/iterative_merge_sort.py)
* [Merge Insertion Sort](sorts/merge_insertion_sort.py)
* [Merge Sort](sorts/merge_sort.py)
* [Msd Radix Sort](sorts/msd_radix_sort.py)
* [Natural Sort](sorts/natural_sort.py)
* [Odd Even Sort](sorts/odd_even_sort.py)
* [Odd Even Transposition Parallel](sorts/odd_even_transposition_parallel.py)
* [Odd Even Transposition Single Threaded](sorts/odd_even_transposition_single_threaded.py)
* [Pancake Sort](sorts/pancake_sort.py)
* [Patience Sort](sorts/patience_sort.py)
* [Pigeon Sort](sorts/pigeon_sort.py)
* [Pigeonhole Sort](sorts/pigeonhole_sort.py)
* [Quick Sort](sorts/quick_sort.py)
* [Quick Sort 3 Partition](sorts/quick_sort_3_partition.py)
* [Radix Sort](sorts/radix_sort.py)
* [Random Normal Distribution Quicksort](sorts/random_normal_distribution_quicksort.py)
* [Random Pivot Quick Sort](sorts/random_pivot_quick_sort.py)
* [Recursive Bubble Sort](sorts/recursive_bubble_sort.py)
* [Recursive Insertion Sort](sorts/recursive_insertion_sort.py)
* [Recursive Mergesort Array](sorts/recursive_mergesort_array.py)
* [Recursive Quick Sort](sorts/recursive_quick_sort.py)
* [Selection Sort](sorts/selection_sort.py)
* [Shell Sort](sorts/shell_sort.py)
* [Shrink Shell Sort](sorts/shrink_shell_sort.py)
* [Slowsort](sorts/slowsort.py)
* [Stooge Sort](sorts/stooge_sort.py)
* [Strand Sort](sorts/strand_sort.py)
* [Tim Sort](sorts/tim_sort.py)
* [Topological Sort](sorts/topological_sort.py)
* [Tree Sort](sorts/tree_sort.py)
* [Unknown Sort](sorts/unknown_sort.py)
* [Wiggle Sort](sorts/wiggle_sort.py)
## Strings
* [Aho Corasick](strings/aho_corasick.py)
* [Alternative String Arrange](strings/alternative_string_arrange.py)
* [Anagrams](strings/anagrams.py)
* [Autocomplete Using Trie](strings/autocomplete_using_trie.py)
* [Barcode Validator](strings/barcode_validator.py)
* [Boyer Moore Search](strings/boyer_moore_search.py)
* [Can String Be Rearranged As Palindrome](strings/can_string_be_rearranged_as_palindrome.py)
* [Capitalize](strings/capitalize.py)
* [Check Anagrams](strings/check_anagrams.py)
* [Credit Card Validator](strings/credit_card_validator.py)
* [Detecting English Programmatically](strings/detecting_english_programmatically.py)
* [Dna](strings/dna.py)
* [Frequency Finder](strings/frequency_finder.py)
* [Hamming Distance](strings/hamming_distance.py)
* [Indian Phone Validator](strings/indian_phone_validator.py)
* [Is Contains Unique Chars](strings/is_contains_unique_chars.py)
* [Is Isogram](strings/is_isogram.py)
* [Is Palindrome](strings/is_palindrome.py)
* [Is Pangram](strings/is_pangram.py)
* [Is Spain National Id](strings/is_spain_national_id.py)
* [Is Srilankan Phone Number](strings/is_srilankan_phone_number.py)
* [Jaro Winkler](strings/jaro_winkler.py)
* [Join](strings/join.py)
* [Knuth Morris Pratt](strings/knuth_morris_pratt.py)
* [Levenshtein Distance](strings/levenshtein_distance.py)
* [Lower](strings/lower.py)
* [Manacher](strings/manacher.py)
* [Min Cost String Conversion](strings/min_cost_string_conversion.py)
* [Naive String Search](strings/naive_string_search.py)
* [Ngram](strings/ngram.py)
* [Palindrome](strings/palindrome.py)
* [Prefix Function](strings/prefix_function.py)
* [Rabin Karp](strings/rabin_karp.py)
* [Remove Duplicate](strings/remove_duplicate.py)
* [Reverse Letters](strings/reverse_letters.py)
* [Reverse Long Words](strings/reverse_long_words.py)
* [Reverse Words](strings/reverse_words.py)
* [Snake Case To Camel Pascal Case](strings/snake_case_to_camel_pascal_case.py)
* [Split](strings/split.py)
* [Text Justification](strings/text_justification.py)
* [Upper](strings/upper.py)
* [Wave](strings/wave.py)
* [Wildcard Pattern Matching](strings/wildcard_pattern_matching.py)
* [Word Occurrence](strings/word_occurrence.py)
* [Word Patterns](strings/word_patterns.py)
* [Z Function](strings/z_function.py)
## Web Programming
* [Co2 Emission](web_programming/co2_emission.py)
* [Convert Number To Words](web_programming/convert_number_to_words.py)
* [Covid Stats Via Xpath](web_programming/covid_stats_via_xpath.py)
* [Crawl Google Results](web_programming/crawl_google_results.py)
* [Crawl Google Scholar Citation](web_programming/crawl_google_scholar_citation.py)
* [Currency Converter](web_programming/currency_converter.py)
* [Current Stock Price](web_programming/current_stock_price.py)
* [Current Weather](web_programming/current_weather.py)
* [Daily Horoscope](web_programming/daily_horoscope.py)
* [Download Images From Google Query](web_programming/download_images_from_google_query.py)
* [Emails From Url](web_programming/emails_from_url.py)
* [Fetch Anime And Play](web_programming/fetch_anime_and_play.py)
* [Fetch Bbc News](web_programming/fetch_bbc_news.py)
* [Fetch Github Info](web_programming/fetch_github_info.py)
* [Fetch Jobs](web_programming/fetch_jobs.py)
* [Fetch Quotes](web_programming/fetch_quotes.py)
* [Fetch Well Rx Price](web_programming/fetch_well_rx_price.py)
* [Get Amazon Product Data](web_programming/get_amazon_product_data.py)
* [Get Imdb Top 250 Movies Csv](web_programming/get_imdb_top_250_movies_csv.py)
* [Get Imdbtop](web_programming/get_imdbtop.py)
* [Get Top Hn Posts](web_programming/get_top_hn_posts.py)
* [Get User Tweets](web_programming/get_user_tweets.py)
* [Giphy](web_programming/giphy.py)
* [Instagram Crawler](web_programming/instagram_crawler.py)
* [Instagram Pic](web_programming/instagram_pic.py)
* [Instagram Video](web_programming/instagram_video.py)
* [Nasa Data](web_programming/nasa_data.py)
* [Open Google Results](web_programming/open_google_results.py)
* [Random Anime Character](web_programming/random_anime_character.py)
* [Recaptcha Verification](web_programming/recaptcha_verification.py)
* [Reddit](web_programming/reddit.py)
* [Search Books By Isbn](web_programming/search_books_by_isbn.py)
* [Slack Message](web_programming/slack_message.py)
* [Test Fetch Github Info](web_programming/test_fetch_github_info.py)
* [World Covid19 Stats](web_programming/world_covid19_stats.py)
| -1 |
TheAlgorithms/Python | 8,546 | Fix broken links to Gitter community | ### Describe your change:
The link <https://gitter.im/TheAlgorithms> as used in the `README.md` and `CONTRIBUTING.md` files is invalid and returns the error `404 Not Found`.
I fixed the link to the working version <https://gitter.im/TheAlgorithms/community>.
This PR fixes issue #8197
* [ ] Add an algorithm?
* [ ] Fix a bug or typo in an existing algorithm?
* [x] Documentation change?
### Checklist:
* [x] I have read [CONTRIBUTING.md](https://github.com/TheAlgorithms/Python/blob/master/CONTRIBUTING.md).
* [x] This pull request is all my own work -- I have not plagiarized.
* [x] I know that pull requests will not be merged if they fail the automated tests.
* [ ] This PR only changes one algorithm file. To ease review, please open separate PRs for separate algorithms.
* [ ] All new Python files are placed inside an existing directory.
* [ ] All filenames are in all lowercase characters with no spaces or dashes.
* [ ] All functions and variable names follow Python naming conventions.
* [ ] All function parameters and return values are annotated with Python [type hints](https://docs.python.org/3/library/typing.html).
* [ ] All functions have [doctests](https://docs.python.org/3/library/doctest.html) that pass the automated testing.
* [ ] All new algorithms include at least one URL that points to Wikipedia or another similar explanation.
* [x] If this pull request resolves one or more open issues then the commit message contains `Fixes: #{$ISSUE_NO}`.
| ChVeen | "2023-03-25T21:52:30Z" | "2023-03-26T16:20:47Z" | 7cdb011ba440a07768179bfaea190bddefc890d8 | 86b2ab09aab359ef1b4bea58ed3c1fdf5b989500 | Fix broken links to Gitter community. ### Describe your change:
The link <https://gitter.im/TheAlgorithms> as used in the `README.md` and `CONTRIBUTING.md` files is invalid and returns the error `404 Not Found`.
I fixed the link to the working version <https://gitter.im/TheAlgorithms/community>.
This PR fixes issue #8197
* [ ] Add an algorithm?
* [ ] Fix a bug or typo in an existing algorithm?
* [x] Documentation change?
### Checklist:
* [x] I have read [CONTRIBUTING.md](https://github.com/TheAlgorithms/Python/blob/master/CONTRIBUTING.md).
* [x] This pull request is all my own work -- I have not plagiarized.
* [x] I know that pull requests will not be merged if they fail the automated tests.
* [ ] This PR only changes one algorithm file. To ease review, please open separate PRs for separate algorithms.
* [ ] All new Python files are placed inside an existing directory.
* [ ] All filenames are in all lowercase characters with no spaces or dashes.
* [ ] All functions and variable names follow Python naming conventions.
* [ ] All function parameters and return values are annotated with Python [type hints](https://docs.python.org/3/library/typing.html).
* [ ] All functions have [doctests](https://docs.python.org/3/library/doctest.html) that pass the automated testing.
* [ ] All new algorithms include at least one URL that points to Wikipedia or another similar explanation.
* [x] If this pull request resolves one or more open issues then the commit message contains `Fixes: #{$ISSUE_NO}`.
| # Locally Weighted Linear Regression
It is a non-parametric ML algorithm that does not learn on a fixed set of parameters such as **linear regression**. \
So, here comes a question of what is *linear regression*? \
**Linear regression** is a supervised learning algorithm used for computing linear relationships between input (X) and output (Y). \
### Terminology Involved
number_of_features(i) = Number of features involved. \
number_of_training_examples(m) = Number of training examples. \
output_sequence(y) = Output Sequence. \
$\theta$ $^T$ x = predicted point. \
J($\theta$) = COst function of point.
The steps involved in ordinary linear regression are:
Training phase: Compute \theta to minimize the cost. \
J($\theta$) = $\sum_{i=1}^m$ (($\theta$)$^T$ $x^i$ - $y^i$)$^2$
Predict output: for given query point x, \
return: ($\theta$)$^T$ x
<img src="https://miro.medium.com/max/700/1*FZsLp8yTULf77qrp0Qd91g.png" alt="Linear Regression">
This training phase is possible when data points are linear, but there again comes a question can we predict non-linear relationship between x and y ? as shown below
<img src="https://miro.medium.com/max/700/1*DHYvJg55uN-Kj8jHaxDKvQ.png" alt="Non-linear Data">
<br />
<br />
So, here comes the role of non-parametric algorithm which doesn't compute predictions based on fixed set of params. Rather parameters $\theta$ are computed individually for each query point/data point x.
<br />
<br />
While Computing $\theta$ , a higher preference is given to points in the vicinity of x than points farther from x.
Cost Function J($\theta$) = $\sum_{i=1}^m$ $w^i$ (($\theta$)$^T$ $x^i$ - $y^i$)$^2$
$w^i$ is non-negative weight associated to training point $x^i$. \
$w^i$ is large fr $x^i$'s lying closer to query point $x_i$. \
$w^i$ is small for $x^i$'s lying farther to query point $x_i$.
A Typical weight can be computed using \
$w^i$ = $\exp$(-$\frac{(x^i-x)(x^i-x)^T}{2\tau^2}$)
Where $\tau$ is the bandwidth parameter that controls $w^i$ distance from x.
Let's look at a example :
Suppose, we had a query point x=5.0 and training points $x^1$=4.9 and $x^2$=5.0 than we can calculate weights as :
$w^i$ = $\exp$(-$\frac{(x^i-x)(x^i-x)^T}{2\tau^2}$) with $\tau$=0.5
$w^1$ = $\exp$(-$\frac{(4.9-5)^2}{2(0.5)^2}$) = 0.9802
$w^2$ = $\exp$(-$\frac{(3-5)^2}{2(0.5)^2}$) = 0.000335
So, J($\theta$) = 0.9802*($\theta$ $^T$ $x^1$ - $y^1$) + 0.000335*($\theta$ $^T$ $x^2$ - $y^2$)
So, here by we can conclude that the weight fall exponentially as the distance between x & $x^i$ increases and So, does the contribution of error in prediction for $x^i$ to the cost.
Steps involved in LWL are : \
Compute \theta to minimize the cost.
J($\theta$) = $\sum_{i=1}^m$ $w^i$ (($\theta$)$^T$ $x^i$ - $y^i$)$^2$ \
Predict Output: for given query point x, \
return : $\theta$ $^T$ x
<img src="https://miro.medium.com/max/700/1*H3QS05Q1GJtY-tiBL00iug.png" alt="LWL">
| # Locally Weighted Linear Regression
It is a non-parametric ML algorithm that does not learn on a fixed set of parameters such as **linear regression**. \
So, here comes a question of what is *linear regression*? \
**Linear regression** is a supervised learning algorithm used for computing linear relationships between input (X) and output (Y). \
### Terminology Involved
number_of_features(i) = Number of features involved. \
number_of_training_examples(m) = Number of training examples. \
output_sequence(y) = Output Sequence. \
$\theta$ $^T$ x = predicted point. \
J($\theta$) = COst function of point.
The steps involved in ordinary linear regression are:
Training phase: Compute \theta to minimize the cost. \
J($\theta$) = $\sum_{i=1}^m$ (($\theta$)$^T$ $x^i$ - $y^i$)$^2$
Predict output: for given query point x, \
return: ($\theta$)$^T$ x
<img src="https://miro.medium.com/max/700/1*FZsLp8yTULf77qrp0Qd91g.png" alt="Linear Regression">
This training phase is possible when data points are linear, but there again comes a question can we predict non-linear relationship between x and y ? as shown below
<img src="https://miro.medium.com/max/700/1*DHYvJg55uN-Kj8jHaxDKvQ.png" alt="Non-linear Data">
<br />
<br />
So, here comes the role of non-parametric algorithm which doesn't compute predictions based on fixed set of params. Rather parameters $\theta$ are computed individually for each query point/data point x.
<br />
<br />
While Computing $\theta$ , a higher preference is given to points in the vicinity of x than points farther from x.
Cost Function J($\theta$) = $\sum_{i=1}^m$ $w^i$ (($\theta$)$^T$ $x^i$ - $y^i$)$^2$
$w^i$ is non-negative weight associated to training point $x^i$. \
$w^i$ is large fr $x^i$'s lying closer to query point $x_i$. \
$w^i$ is small for $x^i$'s lying farther to query point $x_i$.
A Typical weight can be computed using \
$w^i$ = $\exp$(-$\frac{(x^i-x)(x^i-x)^T}{2\tau^2}$)
Where $\tau$ is the bandwidth parameter that controls $w^i$ distance from x.
Let's look at a example :
Suppose, we had a query point x=5.0 and training points $x^1$=4.9 and $x^2$=5.0 than we can calculate weights as :
$w^i$ = $\exp$(-$\frac{(x^i-x)(x^i-x)^T}{2\tau^2}$) with $\tau$=0.5
$w^1$ = $\exp$(-$\frac{(4.9-5)^2}{2(0.5)^2}$) = 0.9802
$w^2$ = $\exp$(-$\frac{(3-5)^2}{2(0.5)^2}$) = 0.000335
So, J($\theta$) = 0.9802*($\theta$ $^T$ $x^1$ - $y^1$) + 0.000335*($\theta$ $^T$ $x^2$ - $y^2$)
So, here by we can conclude that the weight fall exponentially as the distance between x & $x^i$ increases and So, does the contribution of error in prediction for $x^i$ to the cost.
Steps involved in LWL are : \
Compute \theta to minimize the cost.
J($\theta$) = $\sum_{i=1}^m$ $w^i$ (($\theta$)$^T$ $x^i$ - $y^i$)$^2$ \
Predict Output: for given query point x, \
return : $\theta$ $^T$ x
<img src="https://miro.medium.com/max/700/1*H3QS05Q1GJtY-tiBL00iug.png" alt="LWL">
| -1 |
TheAlgorithms/Python | 8,546 | Fix broken links to Gitter community | ### Describe your change:
The link <https://gitter.im/TheAlgorithms> as used in the `README.md` and `CONTRIBUTING.md` files is invalid and returns the error `404 Not Found`.
I fixed the link to the working version <https://gitter.im/TheAlgorithms/community>.
This PR fixes issue #8197
* [ ] Add an algorithm?
* [ ] Fix a bug or typo in an existing algorithm?
* [x] Documentation change?
### Checklist:
* [x] I have read [CONTRIBUTING.md](https://github.com/TheAlgorithms/Python/blob/master/CONTRIBUTING.md).
* [x] This pull request is all my own work -- I have not plagiarized.
* [x] I know that pull requests will not be merged if they fail the automated tests.
* [ ] This PR only changes one algorithm file. To ease review, please open separate PRs for separate algorithms.
* [ ] All new Python files are placed inside an existing directory.
* [ ] All filenames are in all lowercase characters with no spaces or dashes.
* [ ] All functions and variable names follow Python naming conventions.
* [ ] All function parameters and return values are annotated with Python [type hints](https://docs.python.org/3/library/typing.html).
* [ ] All functions have [doctests](https://docs.python.org/3/library/doctest.html) that pass the automated testing.
* [ ] All new algorithms include at least one URL that points to Wikipedia or another similar explanation.
* [x] If this pull request resolves one or more open issues then the commit message contains `Fixes: #{$ISSUE_NO}`.
| ChVeen | "2023-03-25T21:52:30Z" | "2023-03-26T16:20:47Z" | 7cdb011ba440a07768179bfaea190bddefc890d8 | 86b2ab09aab359ef1b4bea58ed3c1fdf5b989500 | Fix broken links to Gitter community. ### Describe your change:
The link <https://gitter.im/TheAlgorithms> as used in the `README.md` and `CONTRIBUTING.md` files is invalid and returns the error `404 Not Found`.
I fixed the link to the working version <https://gitter.im/TheAlgorithms/community>.
This PR fixes issue #8197
* [ ] Add an algorithm?
* [ ] Fix a bug or typo in an existing algorithm?
* [x] Documentation change?
### Checklist:
* [x] I have read [CONTRIBUTING.md](https://github.com/TheAlgorithms/Python/blob/master/CONTRIBUTING.md).
* [x] This pull request is all my own work -- I have not plagiarized.
* [x] I know that pull requests will not be merged if they fail the automated tests.
* [ ] This PR only changes one algorithm file. To ease review, please open separate PRs for separate algorithms.
* [ ] All new Python files are placed inside an existing directory.
* [ ] All filenames are in all lowercase characters with no spaces or dashes.
* [ ] All functions and variable names follow Python naming conventions.
* [ ] All function parameters and return values are annotated with Python [type hints](https://docs.python.org/3/library/typing.html).
* [ ] All functions have [doctests](https://docs.python.org/3/library/doctest.html) that pass the automated testing.
* [ ] All new algorithms include at least one URL that points to Wikipedia or another similar explanation.
* [x] If this pull request resolves one or more open issues then the commit message contains `Fixes: #{$ISSUE_NO}`.
| # Blockchain
A Blockchain is a type of **distributed ledger** technology (DLT) that consists of growing list of records, called **blocks**, that are securely linked together using **cryptography**.
Let's breakdown the terminologies in the above definition. We find below terminologies,
- Digital Ledger Technology (DLT)
- Blocks
- Cryptography
## Digital Ledger Technology
It is otherwise called as distributed ledger technology. It is simply the opposite of centralized database. Firstly, what is a **ledger**? A ledger is a book or collection of accounts that records account transactions.
*Why is Blockchain addressed as digital ledger if it can record more than account transactions? What other transaction details and information can it hold?*
Digital Ledger Technology is just a ledger which is shared among multiple nodes. This way there exist no need for central authority to hold the info. Okay, how is it differentiated from central database and what are their benefits?
There is an organization which has 4 branches whose data are stored in a centralized database. So even if one branch needs any data from ledger they need an approval from database in charge. And if one hacks the central database he gets to tamper and control all the data.
Now lets assume every branch has a copy of the ledger and then once anything is added to the ledger by anyone branch it is gonna automatically reflect in all other ledgers available in other branch. This is done using Peer-to-peer network.
So this means even if information is tampered in one branch we can find out. If one branch is hacked we can be alerted ,so we can safeguard other branches. Now, assume these branches as computers or nodes and the ledger is a transaction record or digital receipt. If one ledger is hacked in a node we can detect since there will be a mismatch in comparison with other node information. So this is the concept of Digital Ledger Technology.
*Is it required for all nodes to have access to all information in other nodes? Wouldn't this require enormous storage space in each node?*
## Blocks
In short a block is nothing but collections of records with a labelled header. These are connected cryptographically. Once a new block is added to a chain, the previous block is connected, more precisely said as locked and hence, will remain unaltered. We can understand this concept once we get a clear understanding of working mechanism of blockchain.
## Cryptography
It is the practice and study of secure communication techniques in the midst of adversarial behavior. More broadly, cryptography is the creation and analysis of protocols that prevent third parties or the general public from accessing private messages.
*Which cryptography technology is most widely used in blockchain and why?*
So, in general, blockchain technology is a distributed record holder which records the information about ownership of an asset. To define precisely,
> Blockchain is a distributed, immutable ledger that makes it easier to record transactions and track assets in a corporate network.
An asset could be tangible (such as a house, car, cash, or land) or intangible (such as a business) (intellectual property, patents, copyrights, branding). A blockchain network can track and sell almost anything of value, lowering risk and costs for everyone involved.
So this is all about introduction to blockchain technology. To learn more about the topic refer below links....
* <https://en.wikipedia.org/wiki/Blockchain>
* <https://en.wikipedia.org/wiki/Chinese_remainder_theorem>
* <https://en.wikipedia.org/wiki/Diophantine_equation>
* <https://www.geeksforgeeks.org/modular-division/>
| # Blockchain
A Blockchain is a type of **distributed ledger** technology (DLT) that consists of growing list of records, called **blocks**, that are securely linked together using **cryptography**.
Let's breakdown the terminologies in the above definition. We find below terminologies,
- Digital Ledger Technology (DLT)
- Blocks
- Cryptography
## Digital Ledger Technology
It is otherwise called as distributed ledger technology. It is simply the opposite of centralized database. Firstly, what is a **ledger**? A ledger is a book or collection of accounts that records account transactions.
*Why is Blockchain addressed as digital ledger if it can record more than account transactions? What other transaction details and information can it hold?*
Digital Ledger Technology is just a ledger which is shared among multiple nodes. This way there exist no need for central authority to hold the info. Okay, how is it differentiated from central database and what are their benefits?
There is an organization which has 4 branches whose data are stored in a centralized database. So even if one branch needs any data from ledger they need an approval from database in charge. And if one hacks the central database he gets to tamper and control all the data.
Now lets assume every branch has a copy of the ledger and then once anything is added to the ledger by anyone branch it is gonna automatically reflect in all other ledgers available in other branch. This is done using Peer-to-peer network.
So this means even if information is tampered in one branch we can find out. If one branch is hacked we can be alerted ,so we can safeguard other branches. Now, assume these branches as computers or nodes and the ledger is a transaction record or digital receipt. If one ledger is hacked in a node we can detect since there will be a mismatch in comparison with other node information. So this is the concept of Digital Ledger Technology.
*Is it required for all nodes to have access to all information in other nodes? Wouldn't this require enormous storage space in each node?*
## Blocks
In short a block is nothing but collections of records with a labelled header. These are connected cryptographically. Once a new block is added to a chain, the previous block is connected, more precisely said as locked and hence, will remain unaltered. We can understand this concept once we get a clear understanding of working mechanism of blockchain.
## Cryptography
It is the practice and study of secure communication techniques in the midst of adversarial behavior. More broadly, cryptography is the creation and analysis of protocols that prevent third parties or the general public from accessing private messages.
*Which cryptography technology is most widely used in blockchain and why?*
So, in general, blockchain technology is a distributed record holder which records the information about ownership of an asset. To define precisely,
> Blockchain is a distributed, immutable ledger that makes it easier to record transactions and track assets in a corporate network.
An asset could be tangible (such as a house, car, cash, or land) or intangible (such as a business) (intellectual property, patents, copyrights, branding). A blockchain network can track and sell almost anything of value, lowering risk and costs for everyone involved.
So this is all about introduction to blockchain technology. To learn more about the topic refer below links....
* <https://en.wikipedia.org/wiki/Blockchain>
* <https://en.wikipedia.org/wiki/Chinese_remainder_theorem>
* <https://en.wikipedia.org/wiki/Diophantine_equation>
* <https://www.geeksforgeeks.org/modular-division/>
| -1 |
TheAlgorithms/Python | 8,546 | Fix broken links to Gitter community | ### Describe your change:
The link <https://gitter.im/TheAlgorithms> as used in the `README.md` and `CONTRIBUTING.md` files is invalid and returns the error `404 Not Found`.
I fixed the link to the working version <https://gitter.im/TheAlgorithms/community>.
This PR fixes issue #8197
* [ ] Add an algorithm?
* [ ] Fix a bug or typo in an existing algorithm?
* [x] Documentation change?
### Checklist:
* [x] I have read [CONTRIBUTING.md](https://github.com/TheAlgorithms/Python/blob/master/CONTRIBUTING.md).
* [x] This pull request is all my own work -- I have not plagiarized.
* [x] I know that pull requests will not be merged if they fail the automated tests.
* [ ] This PR only changes one algorithm file. To ease review, please open separate PRs for separate algorithms.
* [ ] All new Python files are placed inside an existing directory.
* [ ] All filenames are in all lowercase characters with no spaces or dashes.
* [ ] All functions and variable names follow Python naming conventions.
* [ ] All function parameters and return values are annotated with Python [type hints](https://docs.python.org/3/library/typing.html).
* [ ] All functions have [doctests](https://docs.python.org/3/library/doctest.html) that pass the automated testing.
* [ ] All new algorithms include at least one URL that points to Wikipedia or another similar explanation.
* [x] If this pull request resolves one or more open issues then the commit message contains `Fixes: #{$ISSUE_NO}`.
| ChVeen | "2023-03-25T21:52:30Z" | "2023-03-26T16:20:47Z" | 7cdb011ba440a07768179bfaea190bddefc890d8 | 86b2ab09aab359ef1b4bea58ed3c1fdf5b989500 | Fix broken links to Gitter community. ### Describe your change:
The link <https://gitter.im/TheAlgorithms> as used in the `README.md` and `CONTRIBUTING.md` files is invalid and returns the error `404 Not Found`.
I fixed the link to the working version <https://gitter.im/TheAlgorithms/community>.
This PR fixes issue #8197
* [ ] Add an algorithm?
* [ ] Fix a bug or typo in an existing algorithm?
* [x] Documentation change?
### Checklist:
* [x] I have read [CONTRIBUTING.md](https://github.com/TheAlgorithms/Python/blob/master/CONTRIBUTING.md).
* [x] This pull request is all my own work -- I have not plagiarized.
* [x] I know that pull requests will not be merged if they fail the automated tests.
* [ ] This PR only changes one algorithm file. To ease review, please open separate PRs for separate algorithms.
* [ ] All new Python files are placed inside an existing directory.
* [ ] All filenames are in all lowercase characters with no spaces or dashes.
* [ ] All functions and variable names follow Python naming conventions.
* [ ] All function parameters and return values are annotated with Python [type hints](https://docs.python.org/3/library/typing.html).
* [ ] All functions have [doctests](https://docs.python.org/3/library/doctest.html) that pass the automated testing.
* [ ] All new algorithms include at least one URL that points to Wikipedia or another similar explanation.
* [x] If this pull request resolves one or more open issues then the commit message contains `Fixes: #{$ISSUE_NO}`.
| # Sorting Algorithms
Sorting is the process of putting data in a specific order. The way to arrange data in a specific order
is specified by the sorting algorithm. The most typical orders are lexical or numerical. The significance
of sorting lies in the fact that, if data is stored in a sorted manner, data searching can be highly optimised.
Another use for sorting is to represent data in a more readable manner.
This section contains a lot of important algorithms that helps us to use sorting algorithms in various scenarios.
## References
* <https://www.tutorialspoint.com/python_data_structure/python_sorting_algorithms.htm>
* <https://www.geeksforgeeks.org/sorting-algorithms-in-python>
* <https://realpython.com/sorting-algorithms-python>
| # Sorting Algorithms
Sorting is the process of putting data in a specific order. The way to arrange data in a specific order
is specified by the sorting algorithm. The most typical orders are lexical or numerical. The significance
of sorting lies in the fact that, if data is stored in a sorted manner, data searching can be highly optimised.
Another use for sorting is to represent data in a more readable manner.
This section contains a lot of important algorithms that helps us to use sorting algorithms in various scenarios.
## References
* <https://www.tutorialspoint.com/python_data_structure/python_sorting_algorithms.htm>
* <https://www.geeksforgeeks.org/sorting-algorithms-in-python>
* <https://realpython.com/sorting-algorithms-python>
| -1 |
TheAlgorithms/Python | 8,546 | Fix broken links to Gitter community | ### Describe your change:
The link <https://gitter.im/TheAlgorithms> as used in the `README.md` and `CONTRIBUTING.md` files is invalid and returns the error `404 Not Found`.
I fixed the link to the working version <https://gitter.im/TheAlgorithms/community>.
This PR fixes issue #8197
* [ ] Add an algorithm?
* [ ] Fix a bug or typo in an existing algorithm?
* [x] Documentation change?
### Checklist:
* [x] I have read [CONTRIBUTING.md](https://github.com/TheAlgorithms/Python/blob/master/CONTRIBUTING.md).
* [x] This pull request is all my own work -- I have not plagiarized.
* [x] I know that pull requests will not be merged if they fail the automated tests.
* [ ] This PR only changes one algorithm file. To ease review, please open separate PRs for separate algorithms.
* [ ] All new Python files are placed inside an existing directory.
* [ ] All filenames are in all lowercase characters with no spaces or dashes.
* [ ] All functions and variable names follow Python naming conventions.
* [ ] All function parameters and return values are annotated with Python [type hints](https://docs.python.org/3/library/typing.html).
* [ ] All functions have [doctests](https://docs.python.org/3/library/doctest.html) that pass the automated testing.
* [ ] All new algorithms include at least one URL that points to Wikipedia or another similar explanation.
* [x] If this pull request resolves one or more open issues then the commit message contains `Fixes: #{$ISSUE_NO}`.
| ChVeen | "2023-03-25T21:52:30Z" | "2023-03-26T16:20:47Z" | 7cdb011ba440a07768179bfaea190bddefc890d8 | 86b2ab09aab359ef1b4bea58ed3c1fdf5b989500 | Fix broken links to Gitter community. ### Describe your change:
The link <https://gitter.im/TheAlgorithms> as used in the `README.md` and `CONTRIBUTING.md` files is invalid and returns the error `404 Not Found`.
I fixed the link to the working version <https://gitter.im/TheAlgorithms/community>.
This PR fixes issue #8197
* [ ] Add an algorithm?
* [ ] Fix a bug or typo in an existing algorithm?
* [x] Documentation change?
### Checklist:
* [x] I have read [CONTRIBUTING.md](https://github.com/TheAlgorithms/Python/blob/master/CONTRIBUTING.md).
* [x] This pull request is all my own work -- I have not plagiarized.
* [x] I know that pull requests will not be merged if they fail the automated tests.
* [ ] This PR only changes one algorithm file. To ease review, please open separate PRs for separate algorithms.
* [ ] All new Python files are placed inside an existing directory.
* [ ] All filenames are in all lowercase characters with no spaces or dashes.
* [ ] All functions and variable names follow Python naming conventions.
* [ ] All function parameters and return values are annotated with Python [type hints](https://docs.python.org/3/library/typing.html).
* [ ] All functions have [doctests](https://docs.python.org/3/library/doctest.html) that pass the automated testing.
* [ ] All new algorithms include at least one URL that points to Wikipedia or another similar explanation.
* [x] If this pull request resolves one or more open issues then the commit message contains `Fixes: #{$ISSUE_NO}`.
| name: Bug report
description: Create a bug report to help us address errors in the repository
labels: [bug]
body:
- type: markdown
attributes:
value: >
Before requesting please search [existing issues](https://github.com/TheAlgorithms/Python/labels/bug).
Usage questions such as "How do I...?" belong on the
[Discord](https://discord.gg/c7MnfGFGa6) and will be closed.
- type: input
attributes:
label: "Repository commit"
description: >
The commit hash for `TheAlgorithms/Python` repository. You can get this
by running the command `git rev-parse HEAD` locally.
placeholder: "a0b0f414ae134aa1772d33bb930e5a960f9979e8"
validations:
required: true
- type: input
attributes:
label: "Python version (python --version)"
placeholder: "Python 3.10.7"
validations:
required: true
- type: textarea
attributes:
label: "Dependencies version (pip freeze)"
description: >
This is the output of the command `pip freeze --all`. Note that the
actual output might be different as compared to the placeholder text.
placeholder: |
appnope==0.1.3
asttokens==2.0.8
backcall==0.2.0
...
validations:
required: true
- type: textarea
attributes:
label: "Expected behavior"
description: "Describe the behavior you expect. May include images or videos."
validations:
required: true
- type: textarea
attributes:
label: "Actual behavior"
validations:
required: true
| name: Bug report
description: Create a bug report to help us address errors in the repository
labels: [bug]
body:
- type: markdown
attributes:
value: >
Before requesting please search [existing issues](https://github.com/TheAlgorithms/Python/labels/bug).
Usage questions such as "How do I...?" belong on the
[Discord](https://discord.gg/c7MnfGFGa6) and will be closed.
- type: input
attributes:
label: "Repository commit"
description: >
The commit hash for `TheAlgorithms/Python` repository. You can get this
by running the command `git rev-parse HEAD` locally.
placeholder: "a0b0f414ae134aa1772d33bb930e5a960f9979e8"
validations:
required: true
- type: input
attributes:
label: "Python version (python --version)"
placeholder: "Python 3.10.7"
validations:
required: true
- type: textarea
attributes:
label: "Dependencies version (pip freeze)"
description: >
This is the output of the command `pip freeze --all`. Note that the
actual output might be different as compared to the placeholder text.
placeholder: |
appnope==0.1.3
asttokens==2.0.8
backcall==0.2.0
...
validations:
required: true
- type: textarea
attributes:
label: "Expected behavior"
description: "Describe the behavior you expect. May include images or videos."
validations:
required: true
- type: textarea
attributes:
label: "Actual behavior"
validations:
required: true
| -1 |
TheAlgorithms/Python | 8,546 | Fix broken links to Gitter community | ### Describe your change:
The link <https://gitter.im/TheAlgorithms> as used in the `README.md` and `CONTRIBUTING.md` files is invalid and returns the error `404 Not Found`.
I fixed the link to the working version <https://gitter.im/TheAlgorithms/community>.
This PR fixes issue #8197
* [ ] Add an algorithm?
* [ ] Fix a bug or typo in an existing algorithm?
* [x] Documentation change?
### Checklist:
* [x] I have read [CONTRIBUTING.md](https://github.com/TheAlgorithms/Python/blob/master/CONTRIBUTING.md).
* [x] This pull request is all my own work -- I have not plagiarized.
* [x] I know that pull requests will not be merged if they fail the automated tests.
* [ ] This PR only changes one algorithm file. To ease review, please open separate PRs for separate algorithms.
* [ ] All new Python files are placed inside an existing directory.
* [ ] All filenames are in all lowercase characters with no spaces or dashes.
* [ ] All functions and variable names follow Python naming conventions.
* [ ] All function parameters and return values are annotated with Python [type hints](https://docs.python.org/3/library/typing.html).
* [ ] All functions have [doctests](https://docs.python.org/3/library/doctest.html) that pass the automated testing.
* [ ] All new algorithms include at least one URL that points to Wikipedia or another similar explanation.
* [x] If this pull request resolves one or more open issues then the commit message contains `Fixes: #{$ISSUE_NO}`.
| ChVeen | "2023-03-25T21:52:30Z" | "2023-03-26T16:20:47Z" | 7cdb011ba440a07768179bfaea190bddefc890d8 | 86b2ab09aab359ef1b4bea58ed3c1fdf5b989500 | Fix broken links to Gitter community. ### Describe your change:
The link <https://gitter.im/TheAlgorithms> as used in the `README.md` and `CONTRIBUTING.md` files is invalid and returns the error `404 Not Found`.
I fixed the link to the working version <https://gitter.im/TheAlgorithms/community>.
This PR fixes issue #8197
* [ ] Add an algorithm?
* [ ] Fix a bug or typo in an existing algorithm?
* [x] Documentation change?
### Checklist:
* [x] I have read [CONTRIBUTING.md](https://github.com/TheAlgorithms/Python/blob/master/CONTRIBUTING.md).
* [x] This pull request is all my own work -- I have not plagiarized.
* [x] I know that pull requests will not be merged if they fail the automated tests.
* [ ] This PR only changes one algorithm file. To ease review, please open separate PRs for separate algorithms.
* [ ] All new Python files are placed inside an existing directory.
* [ ] All filenames are in all lowercase characters with no spaces or dashes.
* [ ] All functions and variable names follow Python naming conventions.
* [ ] All function parameters and return values are annotated with Python [type hints](https://docs.python.org/3/library/typing.html).
* [ ] All functions have [doctests](https://docs.python.org/3/library/doctest.html) that pass the automated testing.
* [ ] All new algorithms include at least one URL that points to Wikipedia or another similar explanation.
* [x] If this pull request resolves one or more open issues then the commit message contains `Fixes: #{$ISSUE_NO}`.
| # Audio Filter
Audio filters work on the frequency of an audio signal to attenuate unwanted frequency and amplify wanted ones.
They are used within anything related to sound, whether it is radio communication or a hi-fi system.
* <https://www.masteringbox.com/filter-types/>
* <http://ethanwiner.com/filters.html>
* <https://en.wikipedia.org/wiki/Audio_filter>
* <https://en.wikipedia.org/wiki/Electronic_filter>
| # Audio Filter
Audio filters work on the frequency of an audio signal to attenuate unwanted frequency and amplify wanted ones.
They are used within anything related to sound, whether it is radio communication or a hi-fi system.
* <https://www.masteringbox.com/filter-types/>
* <http://ethanwiner.com/filters.html>
* <https://en.wikipedia.org/wiki/Audio_filter>
* <https://en.wikipedia.org/wiki/Electronic_filter>
| -1 |
TheAlgorithms/Python | 8,546 | Fix broken links to Gitter community | ### Describe your change:
The link <https://gitter.im/TheAlgorithms> as used in the `README.md` and `CONTRIBUTING.md` files is invalid and returns the error `404 Not Found`.
I fixed the link to the working version <https://gitter.im/TheAlgorithms/community>.
This PR fixes issue #8197
* [ ] Add an algorithm?
* [ ] Fix a bug or typo in an existing algorithm?
* [x] Documentation change?
### Checklist:
* [x] I have read [CONTRIBUTING.md](https://github.com/TheAlgorithms/Python/blob/master/CONTRIBUTING.md).
* [x] This pull request is all my own work -- I have not plagiarized.
* [x] I know that pull requests will not be merged if they fail the automated tests.
* [ ] This PR only changes one algorithm file. To ease review, please open separate PRs for separate algorithms.
* [ ] All new Python files are placed inside an existing directory.
* [ ] All filenames are in all lowercase characters with no spaces or dashes.
* [ ] All functions and variable names follow Python naming conventions.
* [ ] All function parameters and return values are annotated with Python [type hints](https://docs.python.org/3/library/typing.html).
* [ ] All functions have [doctests](https://docs.python.org/3/library/doctest.html) that pass the automated testing.
* [ ] All new algorithms include at least one URL that points to Wikipedia or another similar explanation.
* [x] If this pull request resolves one or more open issues then the commit message contains `Fixes: #{$ISSUE_NO}`.
| ChVeen | "2023-03-25T21:52:30Z" | "2023-03-26T16:20:47Z" | 7cdb011ba440a07768179bfaea190bddefc890d8 | 86b2ab09aab359ef1b4bea58ed3c1fdf5b989500 | Fix broken links to Gitter community. ### Describe your change:
The link <https://gitter.im/TheAlgorithms> as used in the `README.md` and `CONTRIBUTING.md` files is invalid and returns the error `404 Not Found`.
I fixed the link to the working version <https://gitter.im/TheAlgorithms/community>.
This PR fixes issue #8197
* [ ] Add an algorithm?
* [ ] Fix a bug or typo in an existing algorithm?
* [x] Documentation change?
### Checklist:
* [x] I have read [CONTRIBUTING.md](https://github.com/TheAlgorithms/Python/blob/master/CONTRIBUTING.md).
* [x] This pull request is all my own work -- I have not plagiarized.
* [x] I know that pull requests will not be merged if they fail the automated tests.
* [ ] This PR only changes one algorithm file. To ease review, please open separate PRs for separate algorithms.
* [ ] All new Python files are placed inside an existing directory.
* [ ] All filenames are in all lowercase characters with no spaces or dashes.
* [ ] All functions and variable names follow Python naming conventions.
* [ ] All function parameters and return values are annotated with Python [type hints](https://docs.python.org/3/library/typing.html).
* [ ] All functions have [doctests](https://docs.python.org/3/library/doctest.html) that pass the automated testing.
* [ ] All new algorithms include at least one URL that points to Wikipedia or another similar explanation.
* [x] If this pull request resolves one or more open issues then the commit message contains `Fixes: #{$ISSUE_NO}`.
| # Binary Tree Traversal
## Overview
The combination of binary trees being data structures and traversal being an algorithm relates to classic problems, either directly or indirectly.
> If you can grasp the traversal of binary trees, the traversal of other complicated trees will be easy for you.
The following are some common ways to traverse trees.
- Depth First Traversals (DFS): In-order, Pre-order, Post-order
- Level Order Traversal or Breadth First or Traversal (BFS)
There are applications for both DFS and BFS.
Stack can be used to simplify the process of DFS traversal. Besides, since tree is a recursive data structure, recursion and stack are two key points for DFS.
Graph for DFS:

The key point of BFS is how to determine whether the traversal of each level has been completed. The answer is to use a variable as a flag to represent the end of the traversal of current level.
## Pre-order Traversal
The traversal order of pre-order traversal is `root-left-right`.
Algorithm Pre-order
1. Visit the root node and push it into a stack.
2. Pop a node from the stack, and push its right and left child node into the stack respectively.
3. Repeat step 2.
Conclusion: This problem involves the classic recursive data structure (i.e. a binary tree), and the algorithm above demonstrates how a simplified solution can be reached by using a stack.
If you look at the bigger picture, you'll find that the process of traversal is as followed. `Visit the left subtrees respectively from top to bottom, and visit the right subtrees respectively from bottom to top`. If we are to implement it from this perspective, things will be somewhat different. For the `top to bottom` part we can simply use recursion, and for the `bottom to top` part we can turn to stack.
## In-order Traversal
The traversal order of in-order traversal is `left-root-right`.
So the root node is not printed first. Things are getting a bit complicated here.
Algorithm In-order
1. Visit the root and push it into a stack.
2. If there is a left child node, push it into the stack. Repeat this process until a leaf node reached.
> At this point the root node and all the left nodes are in the stack.
3. Start popping nodes from the stack. If a node has a right child node, push the child node into the stack. Repeat step 2.
It's worth pointing out that the in-order traversal of a binary search tree (BST) is a sorted array, which is helpful for coming up simplified solutions for some problems.
## Post-order Traversal
The traversal order of post-order traversal is `left-right-root`.
This one is a bit of a challenge. It deserves the `hard` tag of LeetCode.
In this case, the root node is printed not as the first but the last one. A cunning way to do it is to:
Record whether the current node has been visited. If 1) it's a leaf node or 2) both its left and right subtrees have been traversed, then it can be popped from the stack.
As for `1) it's a leaf node`, you can easily tell whether a node is a leaf if both its left and right are `null`.
As for `2) both its left and right subtrees have been traversed`, we only need a variable to record whether a node has been visited or not. In the worst case, we need to record the status for every single node and the space complexity is `O(n)`. But if you come to think about it, as we are using a stack and start printing the result from the leaf nodes, it makes sense that we only record the status for the current node popping from the stack, reducing the space complexity to `O(1)`.
## Level Order Traversal
The key point of level order traversal is how do we know whether the traversal of each level is done. The answer is that we use a variable as a flag representing the end of the traversal of the current level.

Algorithm Level-order
1. Visit the root node, put it in a FIFO queue, put in the queue a special flag (we are using `null` here).
2. Dequeue a node.
3. If the node equals `null`, it means that all nodes of the current level have been visited. If the queue is empty, we do nothing. Or else we put in another `null`.
4. If the node is not `null`, meaning the traversal of current level has not finished yet, we enqueue its left subtree and right subtree respectively.
## Bi-color marking
We know that there is a tri-color marking in garbage collection algorithm, which works as described below.
- The white color represents "not visited".
- The gray color represents "not all child nodes visited".
- The black color represents "all child nodes visited".
Enlightened by tri-color marking, a bi-color marking method can be invented to solve all three traversal problems with one solution.
The core idea is as follow.
- Use a color to mark whether a node has been visited or not. Nodes yet to be visited are marked as white and visited nodes are marked as gray.
- If we are visiting a white node, turn it into gray, and push its right child node, itself, and it's left child node into the stack respectively.
- If we are visiting a gray node, print it.
Implementation of pre-order and post-order traversal algorithms can be easily done by changing the order of pushing the child nodes into the stack.
Reference: [LeetCode](https://github.com/azl397985856/leetcode/blob/master/thinkings/binary-tree-traversal.en.md)
| # Binary Tree Traversal
## Overview
The combination of binary trees being data structures and traversal being an algorithm relates to classic problems, either directly or indirectly.
> If you can grasp the traversal of binary trees, the traversal of other complicated trees will be easy for you.
The following are some common ways to traverse trees.
- Depth First Traversals (DFS): In-order, Pre-order, Post-order
- Level Order Traversal or Breadth First or Traversal (BFS)
There are applications for both DFS and BFS.
Stack can be used to simplify the process of DFS traversal. Besides, since tree is a recursive data structure, recursion and stack are two key points for DFS.
Graph for DFS:

The key point of BFS is how to determine whether the traversal of each level has been completed. The answer is to use a variable as a flag to represent the end of the traversal of current level.
## Pre-order Traversal
The traversal order of pre-order traversal is `root-left-right`.
Algorithm Pre-order
1. Visit the root node and push it into a stack.
2. Pop a node from the stack, and push its right and left child node into the stack respectively.
3. Repeat step 2.
Conclusion: This problem involves the classic recursive data structure (i.e. a binary tree), and the algorithm above demonstrates how a simplified solution can be reached by using a stack.
If you look at the bigger picture, you'll find that the process of traversal is as followed. `Visit the left subtrees respectively from top to bottom, and visit the right subtrees respectively from bottom to top`. If we are to implement it from this perspective, things will be somewhat different. For the `top to bottom` part we can simply use recursion, and for the `bottom to top` part we can turn to stack.
## In-order Traversal
The traversal order of in-order traversal is `left-root-right`.
So the root node is not printed first. Things are getting a bit complicated here.
Algorithm In-order
1. Visit the root and push it into a stack.
2. If there is a left child node, push it into the stack. Repeat this process until a leaf node reached.
> At this point the root node and all the left nodes are in the stack.
3. Start popping nodes from the stack. If a node has a right child node, push the child node into the stack. Repeat step 2.
It's worth pointing out that the in-order traversal of a binary search tree (BST) is a sorted array, which is helpful for coming up simplified solutions for some problems.
## Post-order Traversal
The traversal order of post-order traversal is `left-right-root`.
This one is a bit of a challenge. It deserves the `hard` tag of LeetCode.
In this case, the root node is printed not as the first but the last one. A cunning way to do it is to:
Record whether the current node has been visited. If 1) it's a leaf node or 2) both its left and right subtrees have been traversed, then it can be popped from the stack.
As for `1) it's a leaf node`, you can easily tell whether a node is a leaf if both its left and right are `null`.
As for `2) both its left and right subtrees have been traversed`, we only need a variable to record whether a node has been visited or not. In the worst case, we need to record the status for every single node and the space complexity is `O(n)`. But if you come to think about it, as we are using a stack and start printing the result from the leaf nodes, it makes sense that we only record the status for the current node popping from the stack, reducing the space complexity to `O(1)`.
## Level Order Traversal
The key point of level order traversal is how do we know whether the traversal of each level is done. The answer is that we use a variable as a flag representing the end of the traversal of the current level.

Algorithm Level-order
1. Visit the root node, put it in a FIFO queue, put in the queue a special flag (we are using `null` here).
2. Dequeue a node.
3. If the node equals `null`, it means that all nodes of the current level have been visited. If the queue is empty, we do nothing. Or else we put in another `null`.
4. If the node is not `null`, meaning the traversal of current level has not finished yet, we enqueue its left subtree and right subtree respectively.
## Bi-color marking
We know that there is a tri-color marking in garbage collection algorithm, which works as described below.
- The white color represents "not visited".
- The gray color represents "not all child nodes visited".
- The black color represents "all child nodes visited".
Enlightened by tri-color marking, a bi-color marking method can be invented to solve all three traversal problems with one solution.
The core idea is as follow.
- Use a color to mark whether a node has been visited or not. Nodes yet to be visited are marked as white and visited nodes are marked as gray.
- If we are visiting a white node, turn it into gray, and push its right child node, itself, and it's left child node into the stack respectively.
- If we are visiting a gray node, print it.
Implementation of pre-order and post-order traversal algorithms can be easily done by changing the order of pushing the child nodes into the stack.
Reference: [LeetCode](https://github.com/azl397985856/leetcode/blob/master/thinkings/binary-tree-traversal.en.md)
| -1 |
TheAlgorithms/Python | 8,546 | Fix broken links to Gitter community | ### Describe your change:
The link <https://gitter.im/TheAlgorithms> as used in the `README.md` and `CONTRIBUTING.md` files is invalid and returns the error `404 Not Found`.
I fixed the link to the working version <https://gitter.im/TheAlgorithms/community>.
This PR fixes issue #8197
* [ ] Add an algorithm?
* [ ] Fix a bug or typo in an existing algorithm?
* [x] Documentation change?
### Checklist:
* [x] I have read [CONTRIBUTING.md](https://github.com/TheAlgorithms/Python/blob/master/CONTRIBUTING.md).
* [x] This pull request is all my own work -- I have not plagiarized.
* [x] I know that pull requests will not be merged if they fail the automated tests.
* [ ] This PR only changes one algorithm file. To ease review, please open separate PRs for separate algorithms.
* [ ] All new Python files are placed inside an existing directory.
* [ ] All filenames are in all lowercase characters with no spaces or dashes.
* [ ] All functions and variable names follow Python naming conventions.
* [ ] All function parameters and return values are annotated with Python [type hints](https://docs.python.org/3/library/typing.html).
* [ ] All functions have [doctests](https://docs.python.org/3/library/doctest.html) that pass the automated testing.
* [ ] All new algorithms include at least one URL that points to Wikipedia or another similar explanation.
* [x] If this pull request resolves one or more open issues then the commit message contains `Fixes: #{$ISSUE_NO}`.
| ChVeen | "2023-03-25T21:52:30Z" | "2023-03-26T16:20:47Z" | 7cdb011ba440a07768179bfaea190bddefc890d8 | 86b2ab09aab359ef1b4bea58ed3c1fdf5b989500 | Fix broken links to Gitter community. ### Describe your change:
The link <https://gitter.im/TheAlgorithms> as used in the `README.md` and `CONTRIBUTING.md` files is invalid and returns the error `404 Not Found`.
I fixed the link to the working version <https://gitter.im/TheAlgorithms/community>.
This PR fixes issue #8197
* [ ] Add an algorithm?
* [ ] Fix a bug or typo in an existing algorithm?
* [x] Documentation change?
### Checklist:
* [x] I have read [CONTRIBUTING.md](https://github.com/TheAlgorithms/Python/blob/master/CONTRIBUTING.md).
* [x] This pull request is all my own work -- I have not plagiarized.
* [x] I know that pull requests will not be merged if they fail the automated tests.
* [ ] This PR only changes one algorithm file. To ease review, please open separate PRs for separate algorithms.
* [ ] All new Python files are placed inside an existing directory.
* [ ] All filenames are in all lowercase characters with no spaces or dashes.
* [ ] All functions and variable names follow Python naming conventions.
* [ ] All function parameters and return values are annotated with Python [type hints](https://docs.python.org/3/library/typing.html).
* [ ] All functions have [doctests](https://docs.python.org/3/library/doctest.html) that pass the automated testing.
* [ ] All new algorithms include at least one URL that points to Wikipedia or another similar explanation.
* [x] If this pull request resolves one or more open issues then the commit message contains `Fixes: #{$ISSUE_NO}`.
| # Cellular Automata
Cellular automata are a way to simulate the behavior of "life", no matter if it is a robot or cell.
They usually follow simple rules but can lead to the creation of complex forms.
The most popular cellular automaton is Conway's [Game of Life](https://en.wikipedia.org/wiki/Conway%27s_Game_of_Life).
* <https://en.wikipedia.org/wiki/Cellular_automaton>
* <https://mathworld.wolfram.com/ElementaryCellularAutomaton.html>
| # Cellular Automata
Cellular automata are a way to simulate the behavior of "life", no matter if it is a robot or cell.
They usually follow simple rules but can lead to the creation of complex forms.
The most popular cellular automaton is Conway's [Game of Life](https://en.wikipedia.org/wiki/Conway%27s_Game_of_Life).
* <https://en.wikipedia.org/wiki/Cellular_automaton>
* <https://mathworld.wolfram.com/ElementaryCellularAutomaton.html>
| -1 |
TheAlgorithms/Python | 8,546 | Fix broken links to Gitter community | ### Describe your change:
The link <https://gitter.im/TheAlgorithms> as used in the `README.md` and `CONTRIBUTING.md` files is invalid and returns the error `404 Not Found`.
I fixed the link to the working version <https://gitter.im/TheAlgorithms/community>.
This PR fixes issue #8197
* [ ] Add an algorithm?
* [ ] Fix a bug or typo in an existing algorithm?
* [x] Documentation change?
### Checklist:
* [x] I have read [CONTRIBUTING.md](https://github.com/TheAlgorithms/Python/blob/master/CONTRIBUTING.md).
* [x] This pull request is all my own work -- I have not plagiarized.
* [x] I know that pull requests will not be merged if they fail the automated tests.
* [ ] This PR only changes one algorithm file. To ease review, please open separate PRs for separate algorithms.
* [ ] All new Python files are placed inside an existing directory.
* [ ] All filenames are in all lowercase characters with no spaces or dashes.
* [ ] All functions and variable names follow Python naming conventions.
* [ ] All function parameters and return values are annotated with Python [type hints](https://docs.python.org/3/library/typing.html).
* [ ] All functions have [doctests](https://docs.python.org/3/library/doctest.html) that pass the automated testing.
* [ ] All new algorithms include at least one URL that points to Wikipedia or another similar explanation.
* [x] If this pull request resolves one or more open issues then the commit message contains `Fixes: #{$ISSUE_NO}`.
| ChVeen | "2023-03-25T21:52:30Z" | "2023-03-26T16:20:47Z" | 7cdb011ba440a07768179bfaea190bddefc890d8 | 86b2ab09aab359ef1b4bea58ed3c1fdf5b989500 | Fix broken links to Gitter community. ### Describe your change:
The link <https://gitter.im/TheAlgorithms> as used in the `README.md` and `CONTRIBUTING.md` files is invalid and returns the error `404 Not Found`.
I fixed the link to the working version <https://gitter.im/TheAlgorithms/community>.
This PR fixes issue #8197
* [ ] Add an algorithm?
* [ ] Fix a bug or typo in an existing algorithm?
* [x] Documentation change?
### Checklist:
* [x] I have read [CONTRIBUTING.md](https://github.com/TheAlgorithms/Python/blob/master/CONTRIBUTING.md).
* [x] This pull request is all my own work -- I have not plagiarized.
* [x] I know that pull requests will not be merged if they fail the automated tests.
* [ ] This PR only changes one algorithm file. To ease review, please open separate PRs for separate algorithms.
* [ ] All new Python files are placed inside an existing directory.
* [ ] All filenames are in all lowercase characters with no spaces or dashes.
* [ ] All functions and variable names follow Python naming conventions.
* [ ] All function parameters and return values are annotated with Python [type hints](https://docs.python.org/3/library/typing.html).
* [ ] All functions have [doctests](https://docs.python.org/3/library/doctest.html) that pass the automated testing.
* [ ] All new algorithms include at least one URL that points to Wikipedia or another similar explanation.
* [x] If this pull request resolves one or more open issues then the commit message contains `Fixes: #{$ISSUE_NO}`.
| blank_issues_enabled: false
contact_links:
- name: Discord community
url: https://discord.gg/c7MnfGFGa6
about: Have any questions or need any help? Please contact us via Discord
| blank_issues_enabled: false
contact_links:
- name: Discord community
url: https://discord.gg/c7MnfGFGa6
about: Have any questions or need any help? Please contact us via Discord
| -1 |
TheAlgorithms/Python | 8,546 | Fix broken links to Gitter community | ### Describe your change:
The link <https://gitter.im/TheAlgorithms> as used in the `README.md` and `CONTRIBUTING.md` files is invalid and returns the error `404 Not Found`.
I fixed the link to the working version <https://gitter.im/TheAlgorithms/community>.
This PR fixes issue #8197
* [ ] Add an algorithm?
* [ ] Fix a bug or typo in an existing algorithm?
* [x] Documentation change?
### Checklist:
* [x] I have read [CONTRIBUTING.md](https://github.com/TheAlgorithms/Python/blob/master/CONTRIBUTING.md).
* [x] This pull request is all my own work -- I have not plagiarized.
* [x] I know that pull requests will not be merged if they fail the automated tests.
* [ ] This PR only changes one algorithm file. To ease review, please open separate PRs for separate algorithms.
* [ ] All new Python files are placed inside an existing directory.
* [ ] All filenames are in all lowercase characters with no spaces or dashes.
* [ ] All functions and variable names follow Python naming conventions.
* [ ] All function parameters and return values are annotated with Python [type hints](https://docs.python.org/3/library/typing.html).
* [ ] All functions have [doctests](https://docs.python.org/3/library/doctest.html) that pass the automated testing.
* [ ] All new algorithms include at least one URL that points to Wikipedia or another similar explanation.
* [x] If this pull request resolves one or more open issues then the commit message contains `Fixes: #{$ISSUE_NO}`.
| ChVeen | "2023-03-25T21:52:30Z" | "2023-03-26T16:20:47Z" | 7cdb011ba440a07768179bfaea190bddefc890d8 | 86b2ab09aab359ef1b4bea58ed3c1fdf5b989500 | Fix broken links to Gitter community. ### Describe your change:
The link <https://gitter.im/TheAlgorithms> as used in the `README.md` and `CONTRIBUTING.md` files is invalid and returns the error `404 Not Found`.
I fixed the link to the working version <https://gitter.im/TheAlgorithms/community>.
This PR fixes issue #8197
* [ ] Add an algorithm?
* [ ] Fix a bug or typo in an existing algorithm?
* [x] Documentation change?
### Checklist:
* [x] I have read [CONTRIBUTING.md](https://github.com/TheAlgorithms/Python/blob/master/CONTRIBUTING.md).
* [x] This pull request is all my own work -- I have not plagiarized.
* [x] I know that pull requests will not be merged if they fail the automated tests.
* [ ] This PR only changes one algorithm file. To ease review, please open separate PRs for separate algorithms.
* [ ] All new Python files are placed inside an existing directory.
* [ ] All filenames are in all lowercase characters with no spaces or dashes.
* [ ] All functions and variable names follow Python naming conventions.
* [ ] All function parameters and return values are annotated with Python [type hints](https://docs.python.org/3/library/typing.html).
* [ ] All functions have [doctests](https://docs.python.org/3/library/doctest.html) that pass the automated testing.
* [ ] All new algorithms include at least one URL that points to Wikipedia or another similar explanation.
* [x] If this pull request resolves one or more open issues then the commit message contains `Fixes: #{$ISSUE_NO}`.
| MIT License
Copyright (c) 2016-2022 TheAlgorithms and contributors
Permission is hereby granted, free of charge, to any person obtaining a copy
of this software and associated documentation files (the "Software"), to deal
in the Software without restriction, including without limitation the rights
to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
copies of the Software, and to permit persons to whom the Software is
furnished to do so, subject to the following conditions:
The above copyright notice and this permission notice shall be included in all
copies or substantial portions of the Software.
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
SOFTWARE.
| MIT License
Copyright (c) 2016-2022 TheAlgorithms and contributors
Permission is hereby granted, free of charge, to any person obtaining a copy
of this software and associated documentation files (the "Software"), to deal
in the Software without restriction, including without limitation the rights
to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
copies of the Software, and to permit persons to whom the Software is
furnished to do so, subject to the following conditions:
The above copyright notice and this permission notice shall be included in all
copies or substantial portions of the Software.
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
SOFTWARE.
| -1 |
TheAlgorithms/Python | 8,546 | Fix broken links to Gitter community | ### Describe your change:
The link <https://gitter.im/TheAlgorithms> as used in the `README.md` and `CONTRIBUTING.md` files is invalid and returns the error `404 Not Found`.
I fixed the link to the working version <https://gitter.im/TheAlgorithms/community>.
This PR fixes issue #8197
* [ ] Add an algorithm?
* [ ] Fix a bug or typo in an existing algorithm?
* [x] Documentation change?
### Checklist:
* [x] I have read [CONTRIBUTING.md](https://github.com/TheAlgorithms/Python/blob/master/CONTRIBUTING.md).
* [x] This pull request is all my own work -- I have not plagiarized.
* [x] I know that pull requests will not be merged if they fail the automated tests.
* [ ] This PR only changes one algorithm file. To ease review, please open separate PRs for separate algorithms.
* [ ] All new Python files are placed inside an existing directory.
* [ ] All filenames are in all lowercase characters with no spaces or dashes.
* [ ] All functions and variable names follow Python naming conventions.
* [ ] All function parameters and return values are annotated with Python [type hints](https://docs.python.org/3/library/typing.html).
* [ ] All functions have [doctests](https://docs.python.org/3/library/doctest.html) that pass the automated testing.
* [ ] All new algorithms include at least one URL that points to Wikipedia or another similar explanation.
* [x] If this pull request resolves one or more open issues then the commit message contains `Fixes: #{$ISSUE_NO}`.
| ChVeen | "2023-03-25T21:52:30Z" | "2023-03-26T16:20:47Z" | 7cdb011ba440a07768179bfaea190bddefc890d8 | 86b2ab09aab359ef1b4bea58ed3c1fdf5b989500 | Fix broken links to Gitter community. ### Describe your change:
The link <https://gitter.im/TheAlgorithms> as used in the `README.md` and `CONTRIBUTING.md` files is invalid and returns the error `404 Not Found`.
I fixed the link to the working version <https://gitter.im/TheAlgorithms/community>.
This PR fixes issue #8197
* [ ] Add an algorithm?
* [ ] Fix a bug or typo in an existing algorithm?
* [x] Documentation change?
### Checklist:
* [x] I have read [CONTRIBUTING.md](https://github.com/TheAlgorithms/Python/blob/master/CONTRIBUTING.md).
* [x] This pull request is all my own work -- I have not plagiarized.
* [x] I know that pull requests will not be merged if they fail the automated tests.
* [ ] This PR only changes one algorithm file. To ease review, please open separate PRs for separate algorithms.
* [ ] All new Python files are placed inside an existing directory.
* [ ] All filenames are in all lowercase characters with no spaces or dashes.
* [ ] All functions and variable names follow Python naming conventions.
* [ ] All function parameters and return values are annotated with Python [type hints](https://docs.python.org/3/library/typing.html).
* [ ] All functions have [doctests](https://docs.python.org/3/library/doctest.html) that pass the automated testing.
* [ ] All new algorithms include at least one URL that points to Wikipedia or another similar explanation.
* [x] If this pull request resolves one or more open issues then the commit message contains `Fixes: #{$ISSUE_NO}`.
| # Compression
Data compression is everywhere, you need it to store data without taking too much space.
Either the compression lose some data (then we talk about lossy compression, such as .jpg) or it does not (and then it is lossless compression, such as .png)
Lossless compression is mainly used for archive purpose as it allow storing data without losing information about the file archived. On the other hand, lossy compression is used for transfer of file where quality isn't necessarily what is required (i.e: images on Twitter).
* <https://www.sciencedirect.com/topics/computer-science/compression-algorithm>
* <https://en.wikipedia.org/wiki/Data_compression>
* <https://en.wikipedia.org/wiki/Pigeonhole_principle>
| # Compression
Data compression is everywhere, you need it to store data without taking too much space.
Either the compression lose some data (then we talk about lossy compression, such as .jpg) or it does not (and then it is lossless compression, such as .png)
Lossless compression is mainly used for archive purpose as it allow storing data without losing information about the file archived. On the other hand, lossy compression is used for transfer of file where quality isn't necessarily what is required (i.e: images on Twitter).
* <https://www.sciencedirect.com/topics/computer-science/compression-algorithm>
* <https://en.wikipedia.org/wiki/Data_compression>
* <https://en.wikipedia.org/wiki/Pigeonhole_principle>
| -1 |
TheAlgorithms/Python | 8,546 | Fix broken links to Gitter community | ### Describe your change:
The link <https://gitter.im/TheAlgorithms> as used in the `README.md` and `CONTRIBUTING.md` files is invalid and returns the error `404 Not Found`.
I fixed the link to the working version <https://gitter.im/TheAlgorithms/community>.
This PR fixes issue #8197
* [ ] Add an algorithm?
* [ ] Fix a bug or typo in an existing algorithm?
* [x] Documentation change?
### Checklist:
* [x] I have read [CONTRIBUTING.md](https://github.com/TheAlgorithms/Python/blob/master/CONTRIBUTING.md).
* [x] This pull request is all my own work -- I have not plagiarized.
* [x] I know that pull requests will not be merged if they fail the automated tests.
* [ ] This PR only changes one algorithm file. To ease review, please open separate PRs for separate algorithms.
* [ ] All new Python files are placed inside an existing directory.
* [ ] All filenames are in all lowercase characters with no spaces or dashes.
* [ ] All functions and variable names follow Python naming conventions.
* [ ] All function parameters and return values are annotated with Python [type hints](https://docs.python.org/3/library/typing.html).
* [ ] All functions have [doctests](https://docs.python.org/3/library/doctest.html) that pass the automated testing.
* [ ] All new algorithms include at least one URL that points to Wikipedia or another similar explanation.
* [x] If this pull request resolves one or more open issues then the commit message contains `Fixes: #{$ISSUE_NO}`.
| ChVeen | "2023-03-25T21:52:30Z" | "2023-03-26T16:20:47Z" | 7cdb011ba440a07768179bfaea190bddefc890d8 | 86b2ab09aab359ef1b4bea58ed3c1fdf5b989500 | Fix broken links to Gitter community. ### Describe your change:
The link <https://gitter.im/TheAlgorithms> as used in the `README.md` and `CONTRIBUTING.md` files is invalid and returns the error `404 Not Found`.
I fixed the link to the working version <https://gitter.im/TheAlgorithms/community>.
This PR fixes issue #8197
* [ ] Add an algorithm?
* [ ] Fix a bug or typo in an existing algorithm?
* [x] Documentation change?
### Checklist:
* [x] I have read [CONTRIBUTING.md](https://github.com/TheAlgorithms/Python/blob/master/CONTRIBUTING.md).
* [x] This pull request is all my own work -- I have not plagiarized.
* [x] I know that pull requests will not be merged if they fail the automated tests.
* [ ] This PR only changes one algorithm file. To ease review, please open separate PRs for separate algorithms.
* [ ] All new Python files are placed inside an existing directory.
* [ ] All filenames are in all lowercase characters with no spaces or dashes.
* [ ] All functions and variable names follow Python naming conventions.
* [ ] All function parameters and return values are annotated with Python [type hints](https://docs.python.org/3/library/typing.html).
* [ ] All functions have [doctests](https://docs.python.org/3/library/doctest.html) that pass the automated testing.
* [ ] All new algorithms include at least one URL that points to Wikipedia or another similar explanation.
* [x] If this pull request resolves one or more open issues then the commit message contains `Fixes: #{$ISSUE_NO}`.
| # Hashes
Hashing is the process of mapping any amount of data to a specified size using an algorithm. This is known as a hash value (or, if you're feeling fancy, a hash code, hash sums, or even a hash digest). Hashing is a one-way function, whereas encryption is a two-way function. While it is functionally conceivable to reverse-hash stuff, the required computing power makes it impractical. Hashing is a one-way street.
Unlike encryption, which is intended to protect data in transit, hashing is intended to authenticate that a file or piece of data has not been altered—that it is authentic. In other words, it functions as a checksum.
## Common hashing algorithms
### MD5
This is one of the first algorithms that has gained widespread acceptance. MD5 is hashing algorithm made by Ray Rivest that is known to suffer vulnerabilities. It was created in 1992 as the successor to MD4. Currently MD6 is in the works, but as of 2009 Rivest had removed it from NIST consideration for SHA-3.
### SHA
SHA stands for Security Hashing Algorithm and it’s probably best known as the hashing algorithm used in most SSL/TLS cipher suites. A cipher suite is a collection of ciphers and algorithms that are used for SSL/TLS connections. SHA handles the hashing aspects. SHA-1, as we mentioned earlier, is now deprecated. SHA-2 is now mandatory. SHA-2 is sometimes known has SHA-256, though variants with longer bit lengths are also available.
### SHA256
SHA 256 is a member of the SHA 2 algorithm family, under which SHA stands for Secure Hash Algorithm. It was a collaborative effort between both the NSA and NIST to implement a successor to the SHA 1 family, which was beginning to lose potency against brute force attacks. It was published in 2001.
The importance of the 256 in the name refers to the final hash digest value, i.e. the hash value will remain 256 bits regardless of the size of the plaintext/cleartext. Other algorithms in the SHA family are similar to SHA 256 in some ways.
### Luhn
The Luhn algorithm, also renowned as the modulus 10 or mod 10 algorithm, is a straightforward checksum formula used to validate a wide range of identification numbers, including credit card numbers, IMEI numbers, and Canadian Social Insurance Numbers. A community of mathematicians developed the LUHN formula in the late 1960s. Companies offering credit cards quickly followed suit. Since the algorithm is in the public interest, anyone can use it. The algorithm is used by most credit cards and many government identification numbers as a simple method of differentiating valid figures from mistyped or otherwise incorrect numbers. It was created to guard against unintentional errors, not malicious attacks. | # Hashes
Hashing is the process of mapping any amount of data to a specified size using an algorithm. This is known as a hash value (or, if you're feeling fancy, a hash code, hash sums, or even a hash digest). Hashing is a one-way function, whereas encryption is a two-way function. While it is functionally conceivable to reverse-hash stuff, the required computing power makes it impractical. Hashing is a one-way street.
Unlike encryption, which is intended to protect data in transit, hashing is intended to authenticate that a file or piece of data has not been altered—that it is authentic. In other words, it functions as a checksum.
## Common hashing algorithms
### MD5
This is one of the first algorithms that has gained widespread acceptance. MD5 is hashing algorithm made by Ray Rivest that is known to suffer vulnerabilities. It was created in 1992 as the successor to MD4. Currently MD6 is in the works, but as of 2009 Rivest had removed it from NIST consideration for SHA-3.
### SHA
SHA stands for Security Hashing Algorithm and it’s probably best known as the hashing algorithm used in most SSL/TLS cipher suites. A cipher suite is a collection of ciphers and algorithms that are used for SSL/TLS connections. SHA handles the hashing aspects. SHA-1, as we mentioned earlier, is now deprecated. SHA-2 is now mandatory. SHA-2 is sometimes known has SHA-256, though variants with longer bit lengths are also available.
### SHA256
SHA 256 is a member of the SHA 2 algorithm family, under which SHA stands for Secure Hash Algorithm. It was a collaborative effort between both the NSA and NIST to implement a successor to the SHA 1 family, which was beginning to lose potency against brute force attacks. It was published in 2001.
The importance of the 256 in the name refers to the final hash digest value, i.e. the hash value will remain 256 bits regardless of the size of the plaintext/cleartext. Other algorithms in the SHA family are similar to SHA 256 in some ways.
### Luhn
The Luhn algorithm, also renowned as the modulus 10 or mod 10 algorithm, is a straightforward checksum formula used to validate a wide range of identification numbers, including credit card numbers, IMEI numbers, and Canadian Social Insurance Numbers. A community of mathematicians developed the LUHN formula in the late 1960s. Companies offering credit cards quickly followed suit. Since the algorithm is in the public interest, anyone can use it. The algorithm is used by most credit cards and many government identification numbers as a simple method of differentiating valid figures from mistyped or otherwise incorrect numbers. It was created to guard against unintentional errors, not malicious attacks. | -1 |
TheAlgorithms/Python | 8,546 | Fix broken links to Gitter community | ### Describe your change:
The link <https://gitter.im/TheAlgorithms> as used in the `README.md` and `CONTRIBUTING.md` files is invalid and returns the error `404 Not Found`.
I fixed the link to the working version <https://gitter.im/TheAlgorithms/community>.
This PR fixes issue #8197
* [ ] Add an algorithm?
* [ ] Fix a bug or typo in an existing algorithm?
* [x] Documentation change?
### Checklist:
* [x] I have read [CONTRIBUTING.md](https://github.com/TheAlgorithms/Python/blob/master/CONTRIBUTING.md).
* [x] This pull request is all my own work -- I have not plagiarized.
* [x] I know that pull requests will not be merged if they fail the automated tests.
* [ ] This PR only changes one algorithm file. To ease review, please open separate PRs for separate algorithms.
* [ ] All new Python files are placed inside an existing directory.
* [ ] All filenames are in all lowercase characters with no spaces or dashes.
* [ ] All functions and variable names follow Python naming conventions.
* [ ] All function parameters and return values are annotated with Python [type hints](https://docs.python.org/3/library/typing.html).
* [ ] All functions have [doctests](https://docs.python.org/3/library/doctest.html) that pass the automated testing.
* [ ] All new algorithms include at least one URL that points to Wikipedia or another similar explanation.
* [x] If this pull request resolves one or more open issues then the commit message contains `Fixes: #{$ISSUE_NO}`.
| ChVeen | "2023-03-25T21:52:30Z" | "2023-03-26T16:20:47Z" | 7cdb011ba440a07768179bfaea190bddefc890d8 | 86b2ab09aab359ef1b4bea58ed3c1fdf5b989500 | Fix broken links to Gitter community. ### Describe your change:
The link <https://gitter.im/TheAlgorithms> as used in the `README.md` and `CONTRIBUTING.md` files is invalid and returns the error `404 Not Found`.
I fixed the link to the working version <https://gitter.im/TheAlgorithms/community>.
This PR fixes issue #8197
* [ ] Add an algorithm?
* [ ] Fix a bug or typo in an existing algorithm?
* [x] Documentation change?
### Checklist:
* [x] I have read [CONTRIBUTING.md](https://github.com/TheAlgorithms/Python/blob/master/CONTRIBUTING.md).
* [x] This pull request is all my own work -- I have not plagiarized.
* [x] I know that pull requests will not be merged if they fail the automated tests.
* [ ] This PR only changes one algorithm file. To ease review, please open separate PRs for separate algorithms.
* [ ] All new Python files are placed inside an existing directory.
* [ ] All filenames are in all lowercase characters with no spaces or dashes.
* [ ] All functions and variable names follow Python naming conventions.
* [ ] All function parameters and return values are annotated with Python [type hints](https://docs.python.org/3/library/typing.html).
* [ ] All functions have [doctests](https://docs.python.org/3/library/doctest.html) that pass the automated testing.
* [ ] All new algorithms include at least one URL that points to Wikipedia or another similar explanation.
* [x] If this pull request resolves one or more open issues then the commit message contains `Fixes: #{$ISSUE_NO}`.
| name: Other
description: Use this for any other issues. PLEASE do not create blank issues
labels: ["awaiting triage"]
body:
- type: textarea
id: issuedescription
attributes:
label: What would you like to share?
description: Provide a clear and concise explanation of your issue.
validations:
required: true
- type: textarea
id: extrainfo
attributes:
label: Additional information
description: Is there anything else we should know about this issue?
validations:
required: false
| name: Other
description: Use this for any other issues. PLEASE do not create blank issues
labels: ["awaiting triage"]
body:
- type: textarea
id: issuedescription
attributes:
label: What would you like to share?
description: Provide a clear and concise explanation of your issue.
validations:
required: true
- type: textarea
id: extrainfo
attributes:
label: Additional information
description: Is there anything else we should know about this issue?
validations:
required: false
| -1 |
TheAlgorithms/Python | 8,546 | Fix broken links to Gitter community | ### Describe your change:
The link <https://gitter.im/TheAlgorithms> as used in the `README.md` and `CONTRIBUTING.md` files is invalid and returns the error `404 Not Found`.
I fixed the link to the working version <https://gitter.im/TheAlgorithms/community>.
This PR fixes issue #8197
* [ ] Add an algorithm?
* [ ] Fix a bug or typo in an existing algorithm?
* [x] Documentation change?
### Checklist:
* [x] I have read [CONTRIBUTING.md](https://github.com/TheAlgorithms/Python/blob/master/CONTRIBUTING.md).
* [x] This pull request is all my own work -- I have not plagiarized.
* [x] I know that pull requests will not be merged if they fail the automated tests.
* [ ] This PR only changes one algorithm file. To ease review, please open separate PRs for separate algorithms.
* [ ] All new Python files are placed inside an existing directory.
* [ ] All filenames are in all lowercase characters with no spaces or dashes.
* [ ] All functions and variable names follow Python naming conventions.
* [ ] All function parameters and return values are annotated with Python [type hints](https://docs.python.org/3/library/typing.html).
* [ ] All functions have [doctests](https://docs.python.org/3/library/doctest.html) that pass the automated testing.
* [ ] All new algorithms include at least one URL that points to Wikipedia or another similar explanation.
* [x] If this pull request resolves one or more open issues then the commit message contains `Fixes: #{$ISSUE_NO}`.
| ChVeen | "2023-03-25T21:52:30Z" | "2023-03-26T16:20:47Z" | 7cdb011ba440a07768179bfaea190bddefc890d8 | 86b2ab09aab359ef1b4bea58ed3c1fdf5b989500 | Fix broken links to Gitter community. ### Describe your change:
The link <https://gitter.im/TheAlgorithms> as used in the `README.md` and `CONTRIBUTING.md` files is invalid and returns the error `404 Not Found`.
I fixed the link to the working version <https://gitter.im/TheAlgorithms/community>.
This PR fixes issue #8197
* [ ] Add an algorithm?
* [ ] Fix a bug or typo in an existing algorithm?
* [x] Documentation change?
### Checklist:
* [x] I have read [CONTRIBUTING.md](https://github.com/TheAlgorithms/Python/blob/master/CONTRIBUTING.md).
* [x] This pull request is all my own work -- I have not plagiarized.
* [x] I know that pull requests will not be merged if they fail the automated tests.
* [ ] This PR only changes one algorithm file. To ease review, please open separate PRs for separate algorithms.
* [ ] All new Python files are placed inside an existing directory.
* [ ] All filenames are in all lowercase characters with no spaces or dashes.
* [ ] All functions and variable names follow Python naming conventions.
* [ ] All function parameters and return values are annotated with Python [type hints](https://docs.python.org/3/library/typing.html).
* [ ] All functions have [doctests](https://docs.python.org/3/library/doctest.html) that pass the automated testing.
* [ ] All new algorithms include at least one URL that points to Wikipedia or another similar explanation.
* [x] If this pull request resolves one or more open issues then the commit message contains `Fixes: #{$ISSUE_NO}`.
| on:
pull_request:
# Run only if a file is changed within the project_euler directory and related files
paths:
- "project_euler/**"
- ".github/workflows/project_euler.yml"
- "scripts/validate_solutions.py"
schedule:
- cron: "0 0 * * *" # Run everyday
name: "Project Euler"
jobs:
project-euler:
runs-on: ubuntu-latest
steps:
- uses: actions/checkout@v3
- uses: actions/setup-python@v4
with:
python-version: 3.x
- name: Install pytest and pytest-cov
run: |
python -m pip install --upgrade pip
python -m pip install --upgrade pytest pytest-cov
- run: pytest --doctest-modules --cov-report=term-missing:skip-covered --cov=project_euler/ project_euler/
validate-solutions:
runs-on: ubuntu-latest
steps:
- uses: actions/checkout@v3
- uses: actions/setup-python@v4
with:
python-version: 3.x
- name: Install pytest and requests
run: |
python -m pip install --upgrade pip
python -m pip install --upgrade pytest requests
- run: pytest scripts/validate_solutions.py
env:
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
| on:
pull_request:
# Run only if a file is changed within the project_euler directory and related files
paths:
- "project_euler/**"
- ".github/workflows/project_euler.yml"
- "scripts/validate_solutions.py"
schedule:
- cron: "0 0 * * *" # Run everyday
name: "Project Euler"
jobs:
project-euler:
runs-on: ubuntu-latest
steps:
- uses: actions/checkout@v3
- uses: actions/setup-python@v4
with:
python-version: 3.x
- name: Install pytest and pytest-cov
run: |
python -m pip install --upgrade pip
python -m pip install --upgrade pytest pytest-cov
- run: pytest --doctest-modules --cov-report=term-missing:skip-covered --cov=project_euler/ project_euler/
validate-solutions:
runs-on: ubuntu-latest
steps:
- uses: actions/checkout@v3
- uses: actions/setup-python@v4
with:
python-version: 3.x
- name: Install pytest and requests
run: |
python -m pip install --upgrade pip
python -m pip install --upgrade pytest requests
- run: pytest scripts/validate_solutions.py
env:
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
| -1 |
TheAlgorithms/Python | 8,546 | Fix broken links to Gitter community | ### Describe your change:
The link <https://gitter.im/TheAlgorithms> as used in the `README.md` and `CONTRIBUTING.md` files is invalid and returns the error `404 Not Found`.
I fixed the link to the working version <https://gitter.im/TheAlgorithms/community>.
This PR fixes issue #8197
* [ ] Add an algorithm?
* [ ] Fix a bug or typo in an existing algorithm?
* [x] Documentation change?
### Checklist:
* [x] I have read [CONTRIBUTING.md](https://github.com/TheAlgorithms/Python/blob/master/CONTRIBUTING.md).
* [x] This pull request is all my own work -- I have not plagiarized.
* [x] I know that pull requests will not be merged if they fail the automated tests.
* [ ] This PR only changes one algorithm file. To ease review, please open separate PRs for separate algorithms.
* [ ] All new Python files are placed inside an existing directory.
* [ ] All filenames are in all lowercase characters with no spaces or dashes.
* [ ] All functions and variable names follow Python naming conventions.
* [ ] All function parameters and return values are annotated with Python [type hints](https://docs.python.org/3/library/typing.html).
* [ ] All functions have [doctests](https://docs.python.org/3/library/doctest.html) that pass the automated testing.
* [ ] All new algorithms include at least one URL that points to Wikipedia or another similar explanation.
* [x] If this pull request resolves one or more open issues then the commit message contains `Fixes: #{$ISSUE_NO}`.
| ChVeen | "2023-03-25T21:52:30Z" | "2023-03-26T16:20:47Z" | 7cdb011ba440a07768179bfaea190bddefc890d8 | 86b2ab09aab359ef1b4bea58ed3c1fdf5b989500 | Fix broken links to Gitter community. ### Describe your change:
The link <https://gitter.im/TheAlgorithms> as used in the `README.md` and `CONTRIBUTING.md` files is invalid and returns the error `404 Not Found`.
I fixed the link to the working version <https://gitter.im/TheAlgorithms/community>.
This PR fixes issue #8197
* [ ] Add an algorithm?
* [ ] Fix a bug or typo in an existing algorithm?
* [x] Documentation change?
### Checklist:
* [x] I have read [CONTRIBUTING.md](https://github.com/TheAlgorithms/Python/blob/master/CONTRIBUTING.md).
* [x] This pull request is all my own work -- I have not plagiarized.
* [x] I know that pull requests will not be merged if they fail the automated tests.
* [ ] This PR only changes one algorithm file. To ease review, please open separate PRs for separate algorithms.
* [ ] All new Python files are placed inside an existing directory.
* [ ] All filenames are in all lowercase characters with no spaces or dashes.
* [ ] All functions and variable names follow Python naming conventions.
* [ ] All function parameters and return values are annotated with Python [type hints](https://docs.python.org/3/library/typing.html).
* [ ] All functions have [doctests](https://docs.python.org/3/library/doctest.html) that pass the automated testing.
* [ ] All new algorithms include at least one URL that points to Wikipedia or another similar explanation.
* [x] If this pull request resolves one or more open issues then the commit message contains `Fixes: #{$ISSUE_NO}`.
| name: "build"
on:
pull_request:
schedule:
- cron: "0 0 * * *" # Run everyday
jobs:
build:
runs-on: ubuntu-latest
steps:
- uses: actions/checkout@v3
- uses: actions/setup-python@v4
with:
python-version: 3.11
- uses: actions/cache@v3
with:
path: ~/.cache/pip
key: ${{ runner.os }}-pip-${{ hashFiles('requirements.txt') }}
- name: Install dependencies
run: |
python -m pip install --upgrade pip setuptools six wheel
python -m pip install pytest-cov -r requirements.txt
- name: Run tests
# See: #6591 for re-enabling tests on Python v3.11
run: pytest
--ignore=computer_vision/cnn_classification.py
--ignore=machine_learning/lstm/lstm_prediction.py
--ignore=quantum/
--ignore=project_euler/
--ignore=scripts/validate_solutions.py
--cov-report=term-missing:skip-covered
--cov=. .
- if: ${{ success() }}
run: scripts/build_directory_md.py 2>&1 | tee DIRECTORY.md
| name: "build"
on:
pull_request:
schedule:
- cron: "0 0 * * *" # Run everyday
jobs:
build:
runs-on: ubuntu-latest
steps:
- uses: actions/checkout@v3
- uses: actions/setup-python@v4
with:
python-version: 3.11
- uses: actions/cache@v3
with:
path: ~/.cache/pip
key: ${{ runner.os }}-pip-${{ hashFiles('requirements.txt') }}
- name: Install dependencies
run: |
python -m pip install --upgrade pip setuptools six wheel
python -m pip install pytest-cov -r requirements.txt
- name: Run tests
# See: #6591 for re-enabling tests on Python v3.11
run: pytest
--ignore=computer_vision/cnn_classification.py
--ignore=machine_learning/lstm/lstm_prediction.py
--ignore=quantum/
--ignore=project_euler/
--ignore=scripts/validate_solutions.py
--cov-report=term-missing:skip-covered
--cov=. .
- if: ${{ success() }}
run: scripts/build_directory_md.py 2>&1 | tee DIRECTORY.md
| -1 |
TheAlgorithms/Python | 8,546 | Fix broken links to Gitter community | ### Describe your change:
The link <https://gitter.im/TheAlgorithms> as used in the `README.md` and `CONTRIBUTING.md` files is invalid and returns the error `404 Not Found`.
I fixed the link to the working version <https://gitter.im/TheAlgorithms/community>.
This PR fixes issue #8197
* [ ] Add an algorithm?
* [ ] Fix a bug or typo in an existing algorithm?
* [x] Documentation change?
### Checklist:
* [x] I have read [CONTRIBUTING.md](https://github.com/TheAlgorithms/Python/blob/master/CONTRIBUTING.md).
* [x] This pull request is all my own work -- I have not plagiarized.
* [x] I know that pull requests will not be merged if they fail the automated tests.
* [ ] This PR only changes one algorithm file. To ease review, please open separate PRs for separate algorithms.
* [ ] All new Python files are placed inside an existing directory.
* [ ] All filenames are in all lowercase characters with no spaces or dashes.
* [ ] All functions and variable names follow Python naming conventions.
* [ ] All function parameters and return values are annotated with Python [type hints](https://docs.python.org/3/library/typing.html).
* [ ] All functions have [doctests](https://docs.python.org/3/library/doctest.html) that pass the automated testing.
* [ ] All new algorithms include at least one URL that points to Wikipedia or another similar explanation.
* [x] If this pull request resolves one or more open issues then the commit message contains `Fixes: #{$ISSUE_NO}`.
| ChVeen | "2023-03-25T21:52:30Z" | "2023-03-26T16:20:47Z" | 7cdb011ba440a07768179bfaea190bddefc890d8 | 86b2ab09aab359ef1b4bea58ed3c1fdf5b989500 | Fix broken links to Gitter community. ### Describe your change:
The link <https://gitter.im/TheAlgorithms> as used in the `README.md` and `CONTRIBUTING.md` files is invalid and returns the error `404 Not Found`.
I fixed the link to the working version <https://gitter.im/TheAlgorithms/community>.
This PR fixes issue #8197
* [ ] Add an algorithm?
* [ ] Fix a bug or typo in an existing algorithm?
* [x] Documentation change?
### Checklist:
* [x] I have read [CONTRIBUTING.md](https://github.com/TheAlgorithms/Python/blob/master/CONTRIBUTING.md).
* [x] This pull request is all my own work -- I have not plagiarized.
* [x] I know that pull requests will not be merged if they fail the automated tests.
* [ ] This PR only changes one algorithm file. To ease review, please open separate PRs for separate algorithms.
* [ ] All new Python files are placed inside an existing directory.
* [ ] All filenames are in all lowercase characters with no spaces or dashes.
* [ ] All functions and variable names follow Python naming conventions.
* [ ] All function parameters and return values are annotated with Python [type hints](https://docs.python.org/3/library/typing.html).
* [ ] All functions have [doctests](https://docs.python.org/3/library/doctest.html) that pass the automated testing.
* [ ] All new algorithms include at least one URL that points to Wikipedia or another similar explanation.
* [x] If this pull request resolves one or more open issues then the commit message contains `Fixes: #{$ISSUE_NO}`.
| # Normal Distribution QuickSort
QuickSort Algorithm where the pivot element is chosen randomly between first and last elements of the array, and the array elements are taken from Standard Normal Distribution.
## Array elements
The array elements are taken from a Standard Normal Distribution, having mean = 0 and standard deviation = 1.
### The code
```python
>>> import numpy as np
>>> from tempfile import TemporaryFile
>>> outfile = TemporaryFile()
>>> p = 100 # 100 elements are to be sorted
>>> mu, sigma = 0, 1 # mean and standard deviation
>>> X = np.random.normal(mu, sigma, p)
>>> np.save(outfile, X)
>>> 'The array is'
>>> X
```
------
#### The distribution of the array elements
```python
>>> mu, sigma = 0, 1 # mean and standard deviation
>>> s = np.random.normal(mu, sigma, p)
>>> count, bins, ignored = plt.hist(s, 30, normed=True)
>>> plt.plot(bins , 1/(sigma * np.sqrt(2 * np.pi)) *np.exp( - (bins - mu)**2 / (2 * sigma**2) ),linewidth=2, color='r')
>>> plt.show()
```
------

------
## Comparing the numbers of comparisons
We can plot the function for Checking 'The Number of Comparisons' taking place between Normal Distribution QuickSort and Ordinary QuickSort:
```python
>>> import matplotlib.pyplot as plt
# Normal Distribution QuickSort is red
>>> plt.plot([1,2,4,16,32,64,128,256,512,1024,2048],[1,1,6,15,43,136,340,800,2156,6821,16325],linewidth=2, color='r')
# Ordinary QuickSort is green
>>> plt.plot([1,2,4,16,32,64,128,256,512,1024,2048],[1,1,4,16,67,122,362,949,2131,5086,12866],linewidth=2, color='g')
>>> plt.show()
```
| # Normal Distribution QuickSort
QuickSort Algorithm where the pivot element is chosen randomly between first and last elements of the array, and the array elements are taken from Standard Normal Distribution.
## Array elements
The array elements are taken from a Standard Normal Distribution, having mean = 0 and standard deviation = 1.
### The code
```python
>>> import numpy as np
>>> from tempfile import TemporaryFile
>>> outfile = TemporaryFile()
>>> p = 100 # 100 elements are to be sorted
>>> mu, sigma = 0, 1 # mean and standard deviation
>>> X = np.random.normal(mu, sigma, p)
>>> np.save(outfile, X)
>>> 'The array is'
>>> X
```
------
#### The distribution of the array elements
```python
>>> mu, sigma = 0, 1 # mean and standard deviation
>>> s = np.random.normal(mu, sigma, p)
>>> count, bins, ignored = plt.hist(s, 30, normed=True)
>>> plt.plot(bins , 1/(sigma * np.sqrt(2 * np.pi)) *np.exp( - (bins - mu)**2 / (2 * sigma**2) ),linewidth=2, color='r')
>>> plt.show()
```
------

------
## Comparing the numbers of comparisons
We can plot the function for Checking 'The Number of Comparisons' taking place between Normal Distribution QuickSort and Ordinary QuickSort:
```python
>>> import matplotlib.pyplot as plt
# Normal Distribution QuickSort is red
>>> plt.plot([1,2,4,16,32,64,128,256,512,1024,2048],[1,1,6,15,43,136,340,800,2156,6821,16325],linewidth=2, color='r')
# Ordinary QuickSort is green
>>> plt.plot([1,2,4,16,32,64,128,256,512,1024,2048],[1,1,4,16,67,122,362,949,2131,5086,12866],linewidth=2, color='g')
>>> plt.show()
```
| -1 |
TheAlgorithms/Python | 8,546 | Fix broken links to Gitter community | ### Describe your change:
The link <https://gitter.im/TheAlgorithms> as used in the `README.md` and `CONTRIBUTING.md` files is invalid and returns the error `404 Not Found`.
I fixed the link to the working version <https://gitter.im/TheAlgorithms/community>.
This PR fixes issue #8197
* [ ] Add an algorithm?
* [ ] Fix a bug or typo in an existing algorithm?
* [x] Documentation change?
### Checklist:
* [x] I have read [CONTRIBUTING.md](https://github.com/TheAlgorithms/Python/blob/master/CONTRIBUTING.md).
* [x] This pull request is all my own work -- I have not plagiarized.
* [x] I know that pull requests will not be merged if they fail the automated tests.
* [ ] This PR only changes one algorithm file. To ease review, please open separate PRs for separate algorithms.
* [ ] All new Python files are placed inside an existing directory.
* [ ] All filenames are in all lowercase characters with no spaces or dashes.
* [ ] All functions and variable names follow Python naming conventions.
* [ ] All function parameters and return values are annotated with Python [type hints](https://docs.python.org/3/library/typing.html).
* [ ] All functions have [doctests](https://docs.python.org/3/library/doctest.html) that pass the automated testing.
* [ ] All new algorithms include at least one URL that points to Wikipedia or another similar explanation.
* [x] If this pull request resolves one or more open issues then the commit message contains `Fixes: #{$ISSUE_NO}`.
| ChVeen | "2023-03-25T21:52:30Z" | "2023-03-26T16:20:47Z" | 7cdb011ba440a07768179bfaea190bddefc890d8 | 86b2ab09aab359ef1b4bea58ed3c1fdf5b989500 | Fix broken links to Gitter community. ### Describe your change:
The link <https://gitter.im/TheAlgorithms> as used in the `README.md` and `CONTRIBUTING.md` files is invalid and returns the error `404 Not Found`.
I fixed the link to the working version <https://gitter.im/TheAlgorithms/community>.
This PR fixes issue #8197
* [ ] Add an algorithm?
* [ ] Fix a bug or typo in an existing algorithm?
* [x] Documentation change?
### Checklist:
* [x] I have read [CONTRIBUTING.md](https://github.com/TheAlgorithms/Python/blob/master/CONTRIBUTING.md).
* [x] This pull request is all my own work -- I have not plagiarized.
* [x] I know that pull requests will not be merged if they fail the automated tests.
* [ ] This PR only changes one algorithm file. To ease review, please open separate PRs for separate algorithms.
* [ ] All new Python files are placed inside an existing directory.
* [ ] All filenames are in all lowercase characters with no spaces or dashes.
* [ ] All functions and variable names follow Python naming conventions.
* [ ] All function parameters and return values are annotated with Python [type hints](https://docs.python.org/3/library/typing.html).
* [ ] All functions have [doctests](https://docs.python.org/3/library/doctest.html) that pass the automated testing.
* [ ] All new algorithms include at least one URL that points to Wikipedia or another similar explanation.
* [x] If this pull request resolves one or more open issues then the commit message contains `Fixes: #{$ISSUE_NO}`.
| # Conversion
Conversion programs convert a type of data, a number from a numerical base or unit into one of another type, base or unit, e.g. binary to decimal, integer to string or foot to meters.
* <https://en.wikipedia.org/wiki/Data_conversion>
* <https://en.wikipedia.org/wiki/Transcoding>
| # Conversion
Conversion programs convert a type of data, a number from a numerical base or unit into one of another type, base or unit, e.g. binary to decimal, integer to string or foot to meters.
* <https://en.wikipedia.org/wiki/Data_conversion>
* <https://en.wikipedia.org/wiki/Transcoding>
| -1 |
TheAlgorithms/Python | 8,546 | Fix broken links to Gitter community | ### Describe your change:
The link <https://gitter.im/TheAlgorithms> as used in the `README.md` and `CONTRIBUTING.md` files is invalid and returns the error `404 Not Found`.
I fixed the link to the working version <https://gitter.im/TheAlgorithms/community>.
This PR fixes issue #8197
* [ ] Add an algorithm?
* [ ] Fix a bug or typo in an existing algorithm?
* [x] Documentation change?
### Checklist:
* [x] I have read [CONTRIBUTING.md](https://github.com/TheAlgorithms/Python/blob/master/CONTRIBUTING.md).
* [x] This pull request is all my own work -- I have not plagiarized.
* [x] I know that pull requests will not be merged if they fail the automated tests.
* [ ] This PR only changes one algorithm file. To ease review, please open separate PRs for separate algorithms.
* [ ] All new Python files are placed inside an existing directory.
* [ ] All filenames are in all lowercase characters with no spaces or dashes.
* [ ] All functions and variable names follow Python naming conventions.
* [ ] All function parameters and return values are annotated with Python [type hints](https://docs.python.org/3/library/typing.html).
* [ ] All functions have [doctests](https://docs.python.org/3/library/doctest.html) that pass the automated testing.
* [ ] All new algorithms include at least one URL that points to Wikipedia or another similar explanation.
* [x] If this pull request resolves one or more open issues then the commit message contains `Fixes: #{$ISSUE_NO}`.
| ChVeen | "2023-03-25T21:52:30Z" | "2023-03-26T16:20:47Z" | 7cdb011ba440a07768179bfaea190bddefc890d8 | 86b2ab09aab359ef1b4bea58ed3c1fdf5b989500 | Fix broken links to Gitter community. ### Describe your change:
The link <https://gitter.im/TheAlgorithms> as used in the `README.md` and `CONTRIBUTING.md` files is invalid and returns the error `404 Not Found`.
I fixed the link to the working version <https://gitter.im/TheAlgorithms/community>.
This PR fixes issue #8197
* [ ] Add an algorithm?
* [ ] Fix a bug or typo in an existing algorithm?
* [x] Documentation change?
### Checklist:
* [x] I have read [CONTRIBUTING.md](https://github.com/TheAlgorithms/Python/blob/master/CONTRIBUTING.md).
* [x] This pull request is all my own work -- I have not plagiarized.
* [x] I know that pull requests will not be merged if they fail the automated tests.
* [ ] This PR only changes one algorithm file. To ease review, please open separate PRs for separate algorithms.
* [ ] All new Python files are placed inside an existing directory.
* [ ] All filenames are in all lowercase characters with no spaces or dashes.
* [ ] All functions and variable names follow Python naming conventions.
* [ ] All function parameters and return values are annotated with Python [type hints](https://docs.python.org/3/library/typing.html).
* [ ] All functions have [doctests](https://docs.python.org/3/library/doctest.html) that pass the automated testing.
* [ ] All new algorithms include at least one URL that points to Wikipedia or another similar explanation.
* [x] If this pull request resolves one or more open issues then the commit message contains `Fixes: #{$ISSUE_NO}`.
| # Computer Vision
Computer vision is a field of computer science that works on enabling computers to see, identify and process images in the same way that human does, and provide appropriate output.
It is like imparting human intelligence and instincts to a computer.
Image processing and computer vision are a little different from each other. Image processing means applying some algorithms for transforming image from one form to the other like smoothing, contrasting, stretching, etc.
While computer vision comes from modelling image processing using the techniques of machine learning, computer vision applies machine learning to recognize patterns for interpretation of images (much like the process of visual reasoning of human vision).
* <https://en.wikipedia.org/wiki/Computer_vision>
* <https://www.algorithmia.com/blog/introduction-to-computer-vision>
| # Computer Vision
Computer vision is a field of computer science that works on enabling computers to see, identify and process images in the same way that human does, and provide appropriate output.
It is like imparting human intelligence and instincts to a computer.
Image processing and computer vision are a little different from each other. Image processing means applying some algorithms for transforming image from one form to the other like smoothing, contrasting, stretching, etc.
While computer vision comes from modelling image processing using the techniques of machine learning, computer vision applies machine learning to recognize patterns for interpretation of images (much like the process of visual reasoning of human vision).
* <https://en.wikipedia.org/wiki/Computer_vision>
* <https://www.algorithmia.com/blog/introduction-to-computer-vision>
| -1 |
TheAlgorithms/Python | 8,546 | Fix broken links to Gitter community | ### Describe your change:
The link <https://gitter.im/TheAlgorithms> as used in the `README.md` and `CONTRIBUTING.md` files is invalid and returns the error `404 Not Found`.
I fixed the link to the working version <https://gitter.im/TheAlgorithms/community>.
This PR fixes issue #8197
* [ ] Add an algorithm?
* [ ] Fix a bug or typo in an existing algorithm?
* [x] Documentation change?
### Checklist:
* [x] I have read [CONTRIBUTING.md](https://github.com/TheAlgorithms/Python/blob/master/CONTRIBUTING.md).
* [x] This pull request is all my own work -- I have not plagiarized.
* [x] I know that pull requests will not be merged if they fail the automated tests.
* [ ] This PR only changes one algorithm file. To ease review, please open separate PRs for separate algorithms.
* [ ] All new Python files are placed inside an existing directory.
* [ ] All filenames are in all lowercase characters with no spaces or dashes.
* [ ] All functions and variable names follow Python naming conventions.
* [ ] All function parameters and return values are annotated with Python [type hints](https://docs.python.org/3/library/typing.html).
* [ ] All functions have [doctests](https://docs.python.org/3/library/doctest.html) that pass the automated testing.
* [ ] All new algorithms include at least one URL that points to Wikipedia or another similar explanation.
* [x] If this pull request resolves one or more open issues then the commit message contains `Fixes: #{$ISSUE_NO}`.
| ChVeen | "2023-03-25T21:52:30Z" | "2023-03-26T16:20:47Z" | 7cdb011ba440a07768179bfaea190bddefc890d8 | 86b2ab09aab359ef1b4bea58ed3c1fdf5b989500 | Fix broken links to Gitter community. ### Describe your change:
The link <https://gitter.im/TheAlgorithms> as used in the `README.md` and `CONTRIBUTING.md` files is invalid and returns the error `404 Not Found`.
I fixed the link to the working version <https://gitter.im/TheAlgorithms/community>.
This PR fixes issue #8197
* [ ] Add an algorithm?
* [ ] Fix a bug or typo in an existing algorithm?
* [x] Documentation change?
### Checklist:
* [x] I have read [CONTRIBUTING.md](https://github.com/TheAlgorithms/Python/blob/master/CONTRIBUTING.md).
* [x] This pull request is all my own work -- I have not plagiarized.
* [x] I know that pull requests will not be merged if they fail the automated tests.
* [ ] This PR only changes one algorithm file. To ease review, please open separate PRs for separate algorithms.
* [ ] All new Python files are placed inside an existing directory.
* [ ] All filenames are in all lowercase characters with no spaces or dashes.
* [ ] All functions and variable names follow Python naming conventions.
* [ ] All function parameters and return values are annotated with Python [type hints](https://docs.python.org/3/library/typing.html).
* [ ] All functions have [doctests](https://docs.python.org/3/library/doctest.html) that pass the automated testing.
* [ ] All new algorithms include at least one URL that points to Wikipedia or another similar explanation.
* [x] If this pull request resolves one or more open issues then the commit message contains `Fixes: #{$ISSUE_NO}`.
| # A naive recursive implementation of 0-1 Knapsack Problem
This overview is taken from:
https://en.wikipedia.org/wiki/Knapsack_problem
---
## Overview
The knapsack problem is a problem in combinatorial optimization: Given a set of items, each with a weight and a value, determine the number of each item to include in a collection so that the total weight is less than or equal to a given limit and the total value is as large as possible. It derives its name from the problem faced by someone who is constrained by a fixed-size knapsack and must fill it with the most valuable items. The problem often arises in resource allocation where the decision makers have to choose from a set of non-divisible projects or tasks under a fixed budget or time constraint, respectively.
The knapsack problem has been studied for more than a century, with early works dating as far back as 1897 The name "knapsack problem" dates back to the early works of mathematician Tobias Dantzig (1884–1956), and refers to the commonplace problem of packing the most valuable or useful items without overloading the luggage.
---
## Documentation
This module uses docstrings to enable the use of Python's in-built `help(...)` function.
For instance, try `help(Vector)`, `help(unit_basis_vector)`, and `help(CLASSNAME.METHODNAME)`.
---
## Usage
Import the module `knapsack.py` from the **.** directory into your project.
---
## Tests
`.` contains Python unit tests which can be run with `python3 -m unittest -v`.
| # A naive recursive implementation of 0-1 Knapsack Problem
This overview is taken from:
https://en.wikipedia.org/wiki/Knapsack_problem
---
## Overview
The knapsack problem is a problem in combinatorial optimization: Given a set of items, each with a weight and a value, determine the number of each item to include in a collection so that the total weight is less than or equal to a given limit and the total value is as large as possible. It derives its name from the problem faced by someone who is constrained by a fixed-size knapsack and must fill it with the most valuable items. The problem often arises in resource allocation where the decision makers have to choose from a set of non-divisible projects or tasks under a fixed budget or time constraint, respectively.
The knapsack problem has been studied for more than a century, with early works dating as far back as 1897 The name "knapsack problem" dates back to the early works of mathematician Tobias Dantzig (1884–1956), and refers to the commonplace problem of packing the most valuable or useful items without overloading the luggage.
---
## Documentation
This module uses docstrings to enable the use of Python's in-built `help(...)` function.
For instance, try `help(Vector)`, `help(unit_basis_vector)`, and `help(CLASSNAME.METHODNAME)`.
---
## Usage
Import the module `knapsack.py` from the **.** directory into your project.
---
## Tests
`.` contains Python unit tests which can be run with `python3 -m unittest -v`.
| -1 |
TheAlgorithms/Python | 8,546 | Fix broken links to Gitter community | ### Describe your change:
The link <https://gitter.im/TheAlgorithms> as used in the `README.md` and `CONTRIBUTING.md` files is invalid and returns the error `404 Not Found`.
I fixed the link to the working version <https://gitter.im/TheAlgorithms/community>.
This PR fixes issue #8197
* [ ] Add an algorithm?
* [ ] Fix a bug or typo in an existing algorithm?
* [x] Documentation change?
### Checklist:
* [x] I have read [CONTRIBUTING.md](https://github.com/TheAlgorithms/Python/blob/master/CONTRIBUTING.md).
* [x] This pull request is all my own work -- I have not plagiarized.
* [x] I know that pull requests will not be merged if they fail the automated tests.
* [ ] This PR only changes one algorithm file. To ease review, please open separate PRs for separate algorithms.
* [ ] All new Python files are placed inside an existing directory.
* [ ] All filenames are in all lowercase characters with no spaces or dashes.
* [ ] All functions and variable names follow Python naming conventions.
* [ ] All function parameters and return values are annotated with Python [type hints](https://docs.python.org/3/library/typing.html).
* [ ] All functions have [doctests](https://docs.python.org/3/library/doctest.html) that pass the automated testing.
* [ ] All new algorithms include at least one URL that points to Wikipedia or another similar explanation.
* [x] If this pull request resolves one or more open issues then the commit message contains `Fixes: #{$ISSUE_NO}`.
| ChVeen | "2023-03-25T21:52:30Z" | "2023-03-26T16:20:47Z" | 7cdb011ba440a07768179bfaea190bddefc890d8 | 86b2ab09aab359ef1b4bea58ed3c1fdf5b989500 | Fix broken links to Gitter community. ### Describe your change:
The link <https://gitter.im/TheAlgorithms> as used in the `README.md` and `CONTRIBUTING.md` files is invalid and returns the error `404 Not Found`.
I fixed the link to the working version <https://gitter.im/TheAlgorithms/community>.
This PR fixes issue #8197
* [ ] Add an algorithm?
* [ ] Fix a bug or typo in an existing algorithm?
* [x] Documentation change?
### Checklist:
* [x] I have read [CONTRIBUTING.md](https://github.com/TheAlgorithms/Python/blob/master/CONTRIBUTING.md).
* [x] This pull request is all my own work -- I have not plagiarized.
* [x] I know that pull requests will not be merged if they fail the automated tests.
* [ ] This PR only changes one algorithm file. To ease review, please open separate PRs for separate algorithms.
* [ ] All new Python files are placed inside an existing directory.
* [ ] All filenames are in all lowercase characters with no spaces or dashes.
* [ ] All functions and variable names follow Python naming conventions.
* [ ] All function parameters and return values are annotated with Python [type hints](https://docs.python.org/3/library/typing.html).
* [ ] All functions have [doctests](https://docs.python.org/3/library/doctest.html) that pass the automated testing.
* [ ] All new algorithms include at least one URL that points to Wikipedia or another similar explanation.
* [x] If this pull request resolves one or more open issues then the commit message contains `Fixes: #{$ISSUE_NO}`.
| # https://beta.ruff.rs
name: ruff
on:
push:
branches:
- master
pull_request:
branches:
- master
jobs:
ruff:
runs-on: ubuntu-latest
steps:
- uses: actions/checkout@v3
- run: pip install --user ruff
- run: ruff --format=github .
| # https://beta.ruff.rs
name: ruff
on:
push:
branches:
- master
pull_request:
branches:
- master
jobs:
ruff:
runs-on: ubuntu-latest
steps:
- uses: actions/checkout@v3
- run: pip install --user ruff
- run: ruff --format=github .
| -1 |
TheAlgorithms/Python | 8,546 | Fix broken links to Gitter community | ### Describe your change:
The link <https://gitter.im/TheAlgorithms> as used in the `README.md` and `CONTRIBUTING.md` files is invalid and returns the error `404 Not Found`.
I fixed the link to the working version <https://gitter.im/TheAlgorithms/community>.
This PR fixes issue #8197
* [ ] Add an algorithm?
* [ ] Fix a bug or typo in an existing algorithm?
* [x] Documentation change?
### Checklist:
* [x] I have read [CONTRIBUTING.md](https://github.com/TheAlgorithms/Python/blob/master/CONTRIBUTING.md).
* [x] This pull request is all my own work -- I have not plagiarized.
* [x] I know that pull requests will not be merged if they fail the automated tests.
* [ ] This PR only changes one algorithm file. To ease review, please open separate PRs for separate algorithms.
* [ ] All new Python files are placed inside an existing directory.
* [ ] All filenames are in all lowercase characters with no spaces or dashes.
* [ ] All functions and variable names follow Python naming conventions.
* [ ] All function parameters and return values are annotated with Python [type hints](https://docs.python.org/3/library/typing.html).
* [ ] All functions have [doctests](https://docs.python.org/3/library/doctest.html) that pass the automated testing.
* [ ] All new algorithms include at least one URL that points to Wikipedia or another similar explanation.
* [x] If this pull request resolves one or more open issues then the commit message contains `Fixes: #{$ISSUE_NO}`.
| ChVeen | "2023-03-25T21:52:30Z" | "2023-03-26T16:20:47Z" | 7cdb011ba440a07768179bfaea190bddefc890d8 | 86b2ab09aab359ef1b4bea58ed3c1fdf5b989500 | Fix broken links to Gitter community. ### Describe your change:
The link <https://gitter.im/TheAlgorithms> as used in the `README.md` and `CONTRIBUTING.md` files is invalid and returns the error `404 Not Found`.
I fixed the link to the working version <https://gitter.im/TheAlgorithms/community>.
This PR fixes issue #8197
* [ ] Add an algorithm?
* [ ] Fix a bug or typo in an existing algorithm?
* [x] Documentation change?
### Checklist:
* [x] I have read [CONTRIBUTING.md](https://github.com/TheAlgorithms/Python/blob/master/CONTRIBUTING.md).
* [x] This pull request is all my own work -- I have not plagiarized.
* [x] I know that pull requests will not be merged if they fail the automated tests.
* [ ] This PR only changes one algorithm file. To ease review, please open separate PRs for separate algorithms.
* [ ] All new Python files are placed inside an existing directory.
* [ ] All filenames are in all lowercase characters with no spaces or dashes.
* [ ] All functions and variable names follow Python naming conventions.
* [ ] All function parameters and return values are annotated with Python [type hints](https://docs.python.org/3/library/typing.html).
* [ ] All functions have [doctests](https://docs.python.org/3/library/doctest.html) that pass the automated testing.
* [ ] All new algorithms include at least one URL that points to Wikipedia or another similar explanation.
* [x] If this pull request resolves one or more open issues then the commit message contains `Fixes: #{$ISSUE_NO}`.
| # Bit manipulation
Bit manipulation is the act of manipulating bits to detect errors (hamming code), encrypts and decrypts messages (more on that in the 'ciphers' folder) or just do anything at the lowest level of your computer.
* <https://en.wikipedia.org/wiki/Bit_manipulation>
* <https://docs.python.org/3/reference/expressions.html#binary-bitwise-operations>
* <https://docs.python.org/3/reference/expressions.html#unary-arithmetic-and-bitwise-operations>
* <https://docs.python.org/3/library/stdtypes.html#bitwise-operations-on-integer-types>
* <https://wiki.python.org/moin/BitManipulation>
* <https://wiki.python.org/moin/BitwiseOperators>
* <https://www.tutorialspoint.com/python3/bitwise_operators_example.htm>
| # Bit manipulation
Bit manipulation is the act of manipulating bits to detect errors (hamming code), encrypts and decrypts messages (more on that in the 'ciphers' folder) or just do anything at the lowest level of your computer.
* <https://en.wikipedia.org/wiki/Bit_manipulation>
* <https://docs.python.org/3/reference/expressions.html#binary-bitwise-operations>
* <https://docs.python.org/3/reference/expressions.html#unary-arithmetic-and-bitwise-operations>
* <https://docs.python.org/3/library/stdtypes.html#bitwise-operations-on-integer-types>
* <https://wiki.python.org/moin/BitManipulation>
* <https://wiki.python.org/moin/BitwiseOperators>
* <https://www.tutorialspoint.com/python3/bitwise_operators_example.htm>
| -1 |
TheAlgorithms/Python | 8,546 | Fix broken links to Gitter community | ### Describe your change:
The link <https://gitter.im/TheAlgorithms> as used in the `README.md` and `CONTRIBUTING.md` files is invalid and returns the error `404 Not Found`.
I fixed the link to the working version <https://gitter.im/TheAlgorithms/community>.
This PR fixes issue #8197
* [ ] Add an algorithm?
* [ ] Fix a bug or typo in an existing algorithm?
* [x] Documentation change?
### Checklist:
* [x] I have read [CONTRIBUTING.md](https://github.com/TheAlgorithms/Python/blob/master/CONTRIBUTING.md).
* [x] This pull request is all my own work -- I have not plagiarized.
* [x] I know that pull requests will not be merged if they fail the automated tests.
* [ ] This PR only changes one algorithm file. To ease review, please open separate PRs for separate algorithms.
* [ ] All new Python files are placed inside an existing directory.
* [ ] All filenames are in all lowercase characters with no spaces or dashes.
* [ ] All functions and variable names follow Python naming conventions.
* [ ] All function parameters and return values are annotated with Python [type hints](https://docs.python.org/3/library/typing.html).
* [ ] All functions have [doctests](https://docs.python.org/3/library/doctest.html) that pass the automated testing.
* [ ] All new algorithms include at least one URL that points to Wikipedia or another similar explanation.
* [x] If this pull request resolves one or more open issues then the commit message contains `Fixes: #{$ISSUE_NO}`.
| ChVeen | "2023-03-25T21:52:30Z" | "2023-03-26T16:20:47Z" | 7cdb011ba440a07768179bfaea190bddefc890d8 | 86b2ab09aab359ef1b4bea58ed3c1fdf5b989500 | Fix broken links to Gitter community. ### Describe your change:
The link <https://gitter.im/TheAlgorithms> as used in the `README.md` and `CONTRIBUTING.md` files is invalid and returns the error `404 Not Found`.
I fixed the link to the working version <https://gitter.im/TheAlgorithms/community>.
This PR fixes issue #8197
* [ ] Add an algorithm?
* [ ] Fix a bug or typo in an existing algorithm?
* [x] Documentation change?
### Checklist:
* [x] I have read [CONTRIBUTING.md](https://github.com/TheAlgorithms/Python/blob/master/CONTRIBUTING.md).
* [x] This pull request is all my own work -- I have not plagiarized.
* [x] I know that pull requests will not be merged if they fail the automated tests.
* [ ] This PR only changes one algorithm file. To ease review, please open separate PRs for separate algorithms.
* [ ] All new Python files are placed inside an existing directory.
* [ ] All filenames are in all lowercase characters with no spaces or dashes.
* [ ] All functions and variable names follow Python naming conventions.
* [ ] All function parameters and return values are annotated with Python [type hints](https://docs.python.org/3/library/typing.html).
* [ ] All functions have [doctests](https://docs.python.org/3/library/doctest.html) that pass the automated testing.
* [ ] All new algorithms include at least one URL that points to Wikipedia or another similar explanation.
* [x] If this pull request resolves one or more open issues then the commit message contains `Fixes: #{$ISSUE_NO}`.
| LETTERS = "ABCDEFGHIJKLMNOPQRSTUVWXYZ"
def main() -> None:
message = input("Enter message: ")
key = input("Enter key [alphanumeric]: ")
mode = input("Encrypt/Decrypt [e/d]: ")
if mode.lower().startswith("e"):
mode = "encrypt"
translated = encrypt_message(key, message)
elif mode.lower().startswith("d"):
mode = "decrypt"
translated = decrypt_message(key, message)
print(f"\n{mode.title()}ed message:")
print(translated)
def encrypt_message(key: str, message: str) -> str:
"""
>>> encrypt_message('HDarji', 'This is Harshil Darji from Dharmaj.')
'Akij ra Odrjqqs Gaisq muod Mphumrs.'
"""
return translate_message(key, message, "encrypt")
def decrypt_message(key: str, message: str) -> str:
"""
>>> decrypt_message('HDarji', 'Akij ra Odrjqqs Gaisq muod Mphumrs.')
'This is Harshil Darji from Dharmaj.'
"""
return translate_message(key, message, "decrypt")
def translate_message(key: str, message: str, mode: str) -> str:
translated = []
key_index = 0
key = key.upper()
for symbol in message:
num = LETTERS.find(symbol.upper())
if num != -1:
if mode == "encrypt":
num += LETTERS.find(key[key_index])
elif mode == "decrypt":
num -= LETTERS.find(key[key_index])
num %= len(LETTERS)
if symbol.isupper():
translated.append(LETTERS[num])
elif symbol.islower():
translated.append(LETTERS[num].lower())
key_index += 1
if key_index == len(key):
key_index = 0
else:
translated.append(symbol)
return "".join(translated)
if __name__ == "__main__":
main()
| LETTERS = "ABCDEFGHIJKLMNOPQRSTUVWXYZ"
def main() -> None:
message = input("Enter message: ")
key = input("Enter key [alphanumeric]: ")
mode = input("Encrypt/Decrypt [e/d]: ")
if mode.lower().startswith("e"):
mode = "encrypt"
translated = encrypt_message(key, message)
elif mode.lower().startswith("d"):
mode = "decrypt"
translated = decrypt_message(key, message)
print(f"\n{mode.title()}ed message:")
print(translated)
def encrypt_message(key: str, message: str) -> str:
"""
>>> encrypt_message('HDarji', 'This is Harshil Darji from Dharmaj.')
'Akij ra Odrjqqs Gaisq muod Mphumrs.'
"""
return translate_message(key, message, "encrypt")
def decrypt_message(key: str, message: str) -> str:
"""
>>> decrypt_message('HDarji', 'Akij ra Odrjqqs Gaisq muod Mphumrs.')
'This is Harshil Darji from Dharmaj.'
"""
return translate_message(key, message, "decrypt")
def translate_message(key: str, message: str, mode: str) -> str:
translated = []
key_index = 0
key = key.upper()
for symbol in message:
num = LETTERS.find(symbol.upper())
if num != -1:
if mode == "encrypt":
num += LETTERS.find(key[key_index])
elif mode == "decrypt":
num -= LETTERS.find(key[key_index])
num %= len(LETTERS)
if symbol.isupper():
translated.append(LETTERS[num])
elif symbol.islower():
translated.append(LETTERS[num].lower())
key_index += 1
if key_index == len(key):
key_index = 0
else:
translated.append(symbol)
return "".join(translated)
if __name__ == "__main__":
main()
| -1 |
TheAlgorithms/Python | 8,546 | Fix broken links to Gitter community | ### Describe your change:
The link <https://gitter.im/TheAlgorithms> as used in the `README.md` and `CONTRIBUTING.md` files is invalid and returns the error `404 Not Found`.
I fixed the link to the working version <https://gitter.im/TheAlgorithms/community>.
This PR fixes issue #8197
* [ ] Add an algorithm?
* [ ] Fix a bug or typo in an existing algorithm?
* [x] Documentation change?
### Checklist:
* [x] I have read [CONTRIBUTING.md](https://github.com/TheAlgorithms/Python/blob/master/CONTRIBUTING.md).
* [x] This pull request is all my own work -- I have not plagiarized.
* [x] I know that pull requests will not be merged if they fail the automated tests.
* [ ] This PR only changes one algorithm file. To ease review, please open separate PRs for separate algorithms.
* [ ] All new Python files are placed inside an existing directory.
* [ ] All filenames are in all lowercase characters with no spaces or dashes.
* [ ] All functions and variable names follow Python naming conventions.
* [ ] All function parameters and return values are annotated with Python [type hints](https://docs.python.org/3/library/typing.html).
* [ ] All functions have [doctests](https://docs.python.org/3/library/doctest.html) that pass the automated testing.
* [ ] All new algorithms include at least one URL that points to Wikipedia or another similar explanation.
* [x] If this pull request resolves one or more open issues then the commit message contains `Fixes: #{$ISSUE_NO}`.
| ChVeen | "2023-03-25T21:52:30Z" | "2023-03-26T16:20:47Z" | 7cdb011ba440a07768179bfaea190bddefc890d8 | 86b2ab09aab359ef1b4bea58ed3c1fdf5b989500 | Fix broken links to Gitter community. ### Describe your change:
The link <https://gitter.im/TheAlgorithms> as used in the `README.md` and `CONTRIBUTING.md` files is invalid and returns the error `404 Not Found`.
I fixed the link to the working version <https://gitter.im/TheAlgorithms/community>.
This PR fixes issue #8197
* [ ] Add an algorithm?
* [ ] Fix a bug or typo in an existing algorithm?
* [x] Documentation change?
### Checklist:
* [x] I have read [CONTRIBUTING.md](https://github.com/TheAlgorithms/Python/blob/master/CONTRIBUTING.md).
* [x] This pull request is all my own work -- I have not plagiarized.
* [x] I know that pull requests will not be merged if they fail the automated tests.
* [ ] This PR only changes one algorithm file. To ease review, please open separate PRs for separate algorithms.
* [ ] All new Python files are placed inside an existing directory.
* [ ] All filenames are in all lowercase characters with no spaces or dashes.
* [ ] All functions and variable names follow Python naming conventions.
* [ ] All function parameters and return values are annotated with Python [type hints](https://docs.python.org/3/library/typing.html).
* [ ] All functions have [doctests](https://docs.python.org/3/library/doctest.html) that pass the automated testing.
* [ ] All new algorithms include at least one URL that points to Wikipedia or another similar explanation.
* [x] If this pull request resolves one or more open issues then the commit message contains `Fixes: #{$ISSUE_NO}`.
| Hello
This is sample data
«küßî»
“ЌύБЇ”
😀😉
😋
| Hello
This is sample data
«küßî»
“ЌύБЇ”
😀😉
😋
| -1 |
TheAlgorithms/Python | 8,546 | Fix broken links to Gitter community | ### Describe your change:
The link <https://gitter.im/TheAlgorithms> as used in the `README.md` and `CONTRIBUTING.md` files is invalid and returns the error `404 Not Found`.
I fixed the link to the working version <https://gitter.im/TheAlgorithms/community>.
This PR fixes issue #8197
* [ ] Add an algorithm?
* [ ] Fix a bug or typo in an existing algorithm?
* [x] Documentation change?
### Checklist:
* [x] I have read [CONTRIBUTING.md](https://github.com/TheAlgorithms/Python/blob/master/CONTRIBUTING.md).
* [x] This pull request is all my own work -- I have not plagiarized.
* [x] I know that pull requests will not be merged if they fail the automated tests.
* [ ] This PR only changes one algorithm file. To ease review, please open separate PRs for separate algorithms.
* [ ] All new Python files are placed inside an existing directory.
* [ ] All filenames are in all lowercase characters with no spaces or dashes.
* [ ] All functions and variable names follow Python naming conventions.
* [ ] All function parameters and return values are annotated with Python [type hints](https://docs.python.org/3/library/typing.html).
* [ ] All functions have [doctests](https://docs.python.org/3/library/doctest.html) that pass the automated testing.
* [ ] All new algorithms include at least one URL that points to Wikipedia or another similar explanation.
* [x] If this pull request resolves one or more open issues then the commit message contains `Fixes: #{$ISSUE_NO}`.
| ChVeen | "2023-03-25T21:52:30Z" | "2023-03-26T16:20:47Z" | 7cdb011ba440a07768179bfaea190bddefc890d8 | 86b2ab09aab359ef1b4bea58ed3c1fdf5b989500 | Fix broken links to Gitter community. ### Describe your change:
The link <https://gitter.im/TheAlgorithms> as used in the `README.md` and `CONTRIBUTING.md` files is invalid and returns the error `404 Not Found`.
I fixed the link to the working version <https://gitter.im/TheAlgorithms/community>.
This PR fixes issue #8197
* [ ] Add an algorithm?
* [ ] Fix a bug or typo in an existing algorithm?
* [x] Documentation change?
### Checklist:
* [x] I have read [CONTRIBUTING.md](https://github.com/TheAlgorithms/Python/blob/master/CONTRIBUTING.md).
* [x] This pull request is all my own work -- I have not plagiarized.
* [x] I know that pull requests will not be merged if they fail the automated tests.
* [ ] This PR only changes one algorithm file. To ease review, please open separate PRs for separate algorithms.
* [ ] All new Python files are placed inside an existing directory.
* [ ] All filenames are in all lowercase characters with no spaces or dashes.
* [ ] All functions and variable names follow Python naming conventions.
* [ ] All function parameters and return values are annotated with Python [type hints](https://docs.python.org/3/library/typing.html).
* [ ] All functions have [doctests](https://docs.python.org/3/library/doctest.html) that pass the automated testing.
* [ ] All new algorithms include at least one URL that points to Wikipedia or another similar explanation.
* [x] If this pull request resolves one or more open issues then the commit message contains `Fixes: #{$ISSUE_NO}`.
| """
https://en.wikipedia.org/wiki/Weird_number
Fun fact: The set of weird numbers has positive asymptotic density.
"""
from math import sqrt
def factors(number: int) -> list[int]:
"""
>>> factors(12)
[1, 2, 3, 4, 6]
>>> factors(1)
[1]
>>> factors(100)
[1, 2, 4, 5, 10, 20, 25, 50]
# >>> factors(-12)
# [1, 2, 3, 4, 6]
"""
values = [1]
for i in range(2, int(sqrt(number)) + 1, 1):
if number % i == 0:
values.append(i)
if int(number // i) != i:
values.append(int(number // i))
return sorted(values)
def abundant(n: int) -> bool:
"""
>>> abundant(0)
True
>>> abundant(1)
False
>>> abundant(12)
True
>>> abundant(13)
False
>>> abundant(20)
True
# >>> abundant(-12)
# True
"""
return sum(factors(n)) > n
def semi_perfect(number: int) -> bool:
"""
>>> semi_perfect(0)
True
>>> semi_perfect(1)
True
>>> semi_perfect(12)
True
>>> semi_perfect(13)
False
# >>> semi_perfect(-12)
# True
"""
values = factors(number)
r = len(values)
subset = [[0 for i in range(number + 1)] for j in range(r + 1)]
for i in range(r + 1):
subset[i][0] = True
for i in range(1, number + 1):
subset[0][i] = False
for i in range(1, r + 1):
for j in range(1, number + 1):
if j < values[i - 1]:
subset[i][j] = subset[i - 1][j]
else:
subset[i][j] = subset[i - 1][j] or subset[i - 1][j - values[i - 1]]
return subset[r][number] != 0
def weird(number: int) -> bool:
"""
>>> weird(0)
False
>>> weird(70)
True
>>> weird(77)
False
"""
return abundant(number) and not semi_perfect(number)
if __name__ == "__main__":
import doctest
doctest.testmod(verbose=True)
for number in (69, 70, 71):
print(f"{number} is {'' if weird(number) else 'not '}weird.")
| """
https://en.wikipedia.org/wiki/Weird_number
Fun fact: The set of weird numbers has positive asymptotic density.
"""
from math import sqrt
def factors(number: int) -> list[int]:
"""
>>> factors(12)
[1, 2, 3, 4, 6]
>>> factors(1)
[1]
>>> factors(100)
[1, 2, 4, 5, 10, 20, 25, 50]
# >>> factors(-12)
# [1, 2, 3, 4, 6]
"""
values = [1]
for i in range(2, int(sqrt(number)) + 1, 1):
if number % i == 0:
values.append(i)
if int(number // i) != i:
values.append(int(number // i))
return sorted(values)
def abundant(n: int) -> bool:
"""
>>> abundant(0)
True
>>> abundant(1)
False
>>> abundant(12)
True
>>> abundant(13)
False
>>> abundant(20)
True
# >>> abundant(-12)
# True
"""
return sum(factors(n)) > n
def semi_perfect(number: int) -> bool:
"""
>>> semi_perfect(0)
True
>>> semi_perfect(1)
True
>>> semi_perfect(12)
True
>>> semi_perfect(13)
False
# >>> semi_perfect(-12)
# True
"""
values = factors(number)
r = len(values)
subset = [[0 for i in range(number + 1)] for j in range(r + 1)]
for i in range(r + 1):
subset[i][0] = True
for i in range(1, number + 1):
subset[0][i] = False
for i in range(1, r + 1):
for j in range(1, number + 1):
if j < values[i - 1]:
subset[i][j] = subset[i - 1][j]
else:
subset[i][j] = subset[i - 1][j] or subset[i - 1][j - values[i - 1]]
return subset[r][number] != 0
def weird(number: int) -> bool:
"""
>>> weird(0)
False
>>> weird(70)
True
>>> weird(77)
False
"""
return abundant(number) and not semi_perfect(number)
if __name__ == "__main__":
import doctest
doctest.testmod(verbose=True)
for number in (69, 70, 71):
print(f"{number} is {'' if weird(number) else 'not '}weird.")
| -1 |
TheAlgorithms/Python | 8,546 | Fix broken links to Gitter community | ### Describe your change:
The link <https://gitter.im/TheAlgorithms> as used in the `README.md` and `CONTRIBUTING.md` files is invalid and returns the error `404 Not Found`.
I fixed the link to the working version <https://gitter.im/TheAlgorithms/community>.
This PR fixes issue #8197
* [ ] Add an algorithm?
* [ ] Fix a bug or typo in an existing algorithm?
* [x] Documentation change?
### Checklist:
* [x] I have read [CONTRIBUTING.md](https://github.com/TheAlgorithms/Python/blob/master/CONTRIBUTING.md).
* [x] This pull request is all my own work -- I have not plagiarized.
* [x] I know that pull requests will not be merged if they fail the automated tests.
* [ ] This PR only changes one algorithm file. To ease review, please open separate PRs for separate algorithms.
* [ ] All new Python files are placed inside an existing directory.
* [ ] All filenames are in all lowercase characters with no spaces or dashes.
* [ ] All functions and variable names follow Python naming conventions.
* [ ] All function parameters and return values are annotated with Python [type hints](https://docs.python.org/3/library/typing.html).
* [ ] All functions have [doctests](https://docs.python.org/3/library/doctest.html) that pass the automated testing.
* [ ] All new algorithms include at least one URL that points to Wikipedia or another similar explanation.
* [x] If this pull request resolves one or more open issues then the commit message contains `Fixes: #{$ISSUE_NO}`.
| ChVeen | "2023-03-25T21:52:30Z" | "2023-03-26T16:20:47Z" | 7cdb011ba440a07768179bfaea190bddefc890d8 | 86b2ab09aab359ef1b4bea58ed3c1fdf5b989500 | Fix broken links to Gitter community. ### Describe your change:
The link <https://gitter.im/TheAlgorithms> as used in the `README.md` and `CONTRIBUTING.md` files is invalid and returns the error `404 Not Found`.
I fixed the link to the working version <https://gitter.im/TheAlgorithms/community>.
This PR fixes issue #8197
* [ ] Add an algorithm?
* [ ] Fix a bug or typo in an existing algorithm?
* [x] Documentation change?
### Checklist:
* [x] I have read [CONTRIBUTING.md](https://github.com/TheAlgorithms/Python/blob/master/CONTRIBUTING.md).
* [x] This pull request is all my own work -- I have not plagiarized.
* [x] I know that pull requests will not be merged if they fail the automated tests.
* [ ] This PR only changes one algorithm file. To ease review, please open separate PRs for separate algorithms.
* [ ] All new Python files are placed inside an existing directory.
* [ ] All filenames are in all lowercase characters with no spaces or dashes.
* [ ] All functions and variable names follow Python naming conventions.
* [ ] All function parameters and return values are annotated with Python [type hints](https://docs.python.org/3/library/typing.html).
* [ ] All functions have [doctests](https://docs.python.org/3/library/doctest.html) that pass the automated testing.
* [ ] All new algorithms include at least one URL that points to Wikipedia or another similar explanation.
* [x] If this pull request resolves one or more open issues then the commit message contains `Fixes: #{$ISSUE_NO}`.
| # Python program to implement Pigeonhole Sorting in python
# Algorithm for the pigeonhole sorting
def pigeonhole_sort(a):
"""
>>> a = [8, 3, 2, 7, 4, 6, 8]
>>> b = sorted(a) # a nondestructive sort
>>> pigeonhole_sort(a) # a destructive sort
>>> a == b
True
"""
# size of range of values in the list (ie, number of pigeonholes we need)
min_val = min(a) # min() finds the minimum value
max_val = max(a) # max() finds the maximum value
size = max_val - min_val + 1 # size is difference of max and min values plus one
# list of pigeonholes of size equal to the variable size
holes = [0] * size
# Populate the pigeonholes.
for x in a:
assert isinstance(x, int), "integers only please"
holes[x - min_val] += 1
# Putting the elements back into the array in an order.
i = 0
for count in range(size):
while holes[count] > 0:
holes[count] -= 1
a[i] = count + min_val
i += 1
def main():
a = [8, 3, 2, 7, 4, 6, 8]
pigeonhole_sort(a)
print("Sorted order is:", " ".join(a))
if __name__ == "__main__":
main()
| # Python program to implement Pigeonhole Sorting in python
# Algorithm for the pigeonhole sorting
def pigeonhole_sort(a):
"""
>>> a = [8, 3, 2, 7, 4, 6, 8]
>>> b = sorted(a) # a nondestructive sort
>>> pigeonhole_sort(a) # a destructive sort
>>> a == b
True
"""
# size of range of values in the list (ie, number of pigeonholes we need)
min_val = min(a) # min() finds the minimum value
max_val = max(a) # max() finds the maximum value
size = max_val - min_val + 1 # size is difference of max and min values plus one
# list of pigeonholes of size equal to the variable size
holes = [0] * size
# Populate the pigeonholes.
for x in a:
assert isinstance(x, int), "integers only please"
holes[x - min_val] += 1
# Putting the elements back into the array in an order.
i = 0
for count in range(size):
while holes[count] > 0:
holes[count] -= 1
a[i] = count + min_val
i += 1
def main():
a = [8, 3, 2, 7, 4, 6, 8]
pigeonhole_sort(a)
print("Sorted order is:", " ".join(a))
if __name__ == "__main__":
main()
| -1 |
TheAlgorithms/Python | 8,546 | Fix broken links to Gitter community | ### Describe your change:
The link <https://gitter.im/TheAlgorithms> as used in the `README.md` and `CONTRIBUTING.md` files is invalid and returns the error `404 Not Found`.
I fixed the link to the working version <https://gitter.im/TheAlgorithms/community>.
This PR fixes issue #8197
* [ ] Add an algorithm?
* [ ] Fix a bug or typo in an existing algorithm?
* [x] Documentation change?
### Checklist:
* [x] I have read [CONTRIBUTING.md](https://github.com/TheAlgorithms/Python/blob/master/CONTRIBUTING.md).
* [x] This pull request is all my own work -- I have not plagiarized.
* [x] I know that pull requests will not be merged if they fail the automated tests.
* [ ] This PR only changes one algorithm file. To ease review, please open separate PRs for separate algorithms.
* [ ] All new Python files are placed inside an existing directory.
* [ ] All filenames are in all lowercase characters with no spaces or dashes.
* [ ] All functions and variable names follow Python naming conventions.
* [ ] All function parameters and return values are annotated with Python [type hints](https://docs.python.org/3/library/typing.html).
* [ ] All functions have [doctests](https://docs.python.org/3/library/doctest.html) that pass the automated testing.
* [ ] All new algorithms include at least one URL that points to Wikipedia or another similar explanation.
* [x] If this pull request resolves one or more open issues then the commit message contains `Fixes: #{$ISSUE_NO}`.
| ChVeen | "2023-03-25T21:52:30Z" | "2023-03-26T16:20:47Z" | 7cdb011ba440a07768179bfaea190bddefc890d8 | 86b2ab09aab359ef1b4bea58ed3c1fdf5b989500 | Fix broken links to Gitter community. ### Describe your change:
The link <https://gitter.im/TheAlgorithms> as used in the `README.md` and `CONTRIBUTING.md` files is invalid and returns the error `404 Not Found`.
I fixed the link to the working version <https://gitter.im/TheAlgorithms/community>.
This PR fixes issue #8197
* [ ] Add an algorithm?
* [ ] Fix a bug or typo in an existing algorithm?
* [x] Documentation change?
### Checklist:
* [x] I have read [CONTRIBUTING.md](https://github.com/TheAlgorithms/Python/blob/master/CONTRIBUTING.md).
* [x] This pull request is all my own work -- I have not plagiarized.
* [x] I know that pull requests will not be merged if they fail the automated tests.
* [ ] This PR only changes one algorithm file. To ease review, please open separate PRs for separate algorithms.
* [ ] All new Python files are placed inside an existing directory.
* [ ] All filenames are in all lowercase characters with no spaces or dashes.
* [ ] All functions and variable names follow Python naming conventions.
* [ ] All function parameters and return values are annotated with Python [type hints](https://docs.python.org/3/library/typing.html).
* [ ] All functions have [doctests](https://docs.python.org/3/library/doctest.html) that pass the automated testing.
* [ ] All new algorithms include at least one URL that points to Wikipedia or another similar explanation.
* [x] If this pull request resolves one or more open issues then the commit message contains `Fixes: #{$ISSUE_NO}`.
| """
Project Euler Problem 135: https://projecteuler.net/problem=135
Given the positive integers, x, y, and z,
are consecutive terms of an arithmetic progression,
the least value of the positive integer, n,
for which the equation,
x2 − y2 − z2 = n, has exactly two solutions is n = 27:
342 − 272 − 202 = 122 − 92 − 62 = 27
It turns out that n = 1155 is the least value
which has exactly ten solutions.
How many values of n less than one million
have exactly ten distinct solutions?
Taking x,y,z of the form a+d,a,a-d respectively,
the given equation reduces to a*(4d-a)=n.
Calculating no of solutions for every n till 1 million by fixing a
,and n must be multiple of a.
Total no of steps=n*(1/1+1/2+1/3+1/4..+1/n)
,so roughly O(nlogn) time complexity.
"""
def solution(limit: int = 1000000) -> int:
"""
returns the values of n less than or equal to the limit
have exactly ten distinct solutions.
>>> solution(100)
0
>>> solution(10000)
45
>>> solution(50050)
292
"""
limit = limit + 1
frequency = [0] * limit
for first_term in range(1, limit):
for n in range(first_term, limit, first_term):
common_difference = first_term + n / first_term
if common_difference % 4: # d must be divisble by 4
continue
else:
common_difference /= 4
if (
first_term > common_difference
and first_term < 4 * common_difference
): # since x,y,z are positive integers
frequency[n] += 1 # so z>0 and a>d ,also 4d<a
count = sum(1 for x in frequency[1:limit] if x == 10)
return count
if __name__ == "__main__":
print(f"{solution() = }")
| """
Project Euler Problem 135: https://projecteuler.net/problem=135
Given the positive integers, x, y, and z,
are consecutive terms of an arithmetic progression,
the least value of the positive integer, n,
for which the equation,
x2 − y2 − z2 = n, has exactly two solutions is n = 27:
342 − 272 − 202 = 122 − 92 − 62 = 27
It turns out that n = 1155 is the least value
which has exactly ten solutions.
How many values of n less than one million
have exactly ten distinct solutions?
Taking x,y,z of the form a+d,a,a-d respectively,
the given equation reduces to a*(4d-a)=n.
Calculating no of solutions for every n till 1 million by fixing a
,and n must be multiple of a.
Total no of steps=n*(1/1+1/2+1/3+1/4..+1/n)
,so roughly O(nlogn) time complexity.
"""
def solution(limit: int = 1000000) -> int:
"""
returns the values of n less than or equal to the limit
have exactly ten distinct solutions.
>>> solution(100)
0
>>> solution(10000)
45
>>> solution(50050)
292
"""
limit = limit + 1
frequency = [0] * limit
for first_term in range(1, limit):
for n in range(first_term, limit, first_term):
common_difference = first_term + n / first_term
if common_difference % 4: # d must be divisble by 4
continue
else:
common_difference /= 4
if (
first_term > common_difference
and first_term < 4 * common_difference
): # since x,y,z are positive integers
frequency[n] += 1 # so z>0 and a>d ,also 4d<a
count = sum(1 for x in frequency[1:limit] if x == 10)
return count
if __name__ == "__main__":
print(f"{solution() = }")
| -1 |
TheAlgorithms/Python | 8,546 | Fix broken links to Gitter community | ### Describe your change:
The link <https://gitter.im/TheAlgorithms> as used in the `README.md` and `CONTRIBUTING.md` files is invalid and returns the error `404 Not Found`.
I fixed the link to the working version <https://gitter.im/TheAlgorithms/community>.
This PR fixes issue #8197
* [ ] Add an algorithm?
* [ ] Fix a bug or typo in an existing algorithm?
* [x] Documentation change?
### Checklist:
* [x] I have read [CONTRIBUTING.md](https://github.com/TheAlgorithms/Python/blob/master/CONTRIBUTING.md).
* [x] This pull request is all my own work -- I have not plagiarized.
* [x] I know that pull requests will not be merged if they fail the automated tests.
* [ ] This PR only changes one algorithm file. To ease review, please open separate PRs for separate algorithms.
* [ ] All new Python files are placed inside an existing directory.
* [ ] All filenames are in all lowercase characters with no spaces or dashes.
* [ ] All functions and variable names follow Python naming conventions.
* [ ] All function parameters and return values are annotated with Python [type hints](https://docs.python.org/3/library/typing.html).
* [ ] All functions have [doctests](https://docs.python.org/3/library/doctest.html) that pass the automated testing.
* [ ] All new algorithms include at least one URL that points to Wikipedia or another similar explanation.
* [x] If this pull request resolves one or more open issues then the commit message contains `Fixes: #{$ISSUE_NO}`.
| ChVeen | "2023-03-25T21:52:30Z" | "2023-03-26T16:20:47Z" | 7cdb011ba440a07768179bfaea190bddefc890d8 | 86b2ab09aab359ef1b4bea58ed3c1fdf5b989500 | Fix broken links to Gitter community. ### Describe your change:
The link <https://gitter.im/TheAlgorithms> as used in the `README.md` and `CONTRIBUTING.md` files is invalid and returns the error `404 Not Found`.
I fixed the link to the working version <https://gitter.im/TheAlgorithms/community>.
This PR fixes issue #8197
* [ ] Add an algorithm?
* [ ] Fix a bug or typo in an existing algorithm?
* [x] Documentation change?
### Checklist:
* [x] I have read [CONTRIBUTING.md](https://github.com/TheAlgorithms/Python/blob/master/CONTRIBUTING.md).
* [x] This pull request is all my own work -- I have not plagiarized.
* [x] I know that pull requests will not be merged if they fail the automated tests.
* [ ] This PR only changes one algorithm file. To ease review, please open separate PRs for separate algorithms.
* [ ] All new Python files are placed inside an existing directory.
* [ ] All filenames are in all lowercase characters with no spaces or dashes.
* [ ] All functions and variable names follow Python naming conventions.
* [ ] All function parameters and return values are annotated with Python [type hints](https://docs.python.org/3/library/typing.html).
* [ ] All functions have [doctests](https://docs.python.org/3/library/doctest.html) that pass the automated testing.
* [ ] All new algorithms include at least one URL that points to Wikipedia or another similar explanation.
* [x] If this pull request resolves one or more open issues then the commit message contains `Fixes: #{$ISSUE_NO}`.
| import os
import sys
from . import rsa_key_generator as rkg
DEFAULT_BLOCK_SIZE = 128
BYTE_SIZE = 256
def get_blocks_from_text(
message: str, block_size: int = DEFAULT_BLOCK_SIZE
) -> list[int]:
message_bytes = message.encode("ascii")
block_ints = []
for block_start in range(0, len(message_bytes), block_size):
block_int = 0
for i in range(block_start, min(block_start + block_size, len(message_bytes))):
block_int += message_bytes[i] * (BYTE_SIZE ** (i % block_size))
block_ints.append(block_int)
return block_ints
def get_text_from_blocks(
block_ints: list[int], message_length: int, block_size: int = DEFAULT_BLOCK_SIZE
) -> str:
message: list[str] = []
for block_int in block_ints:
block_message: list[str] = []
for i in range(block_size - 1, -1, -1):
if len(message) + i < message_length:
ascii_number = block_int // (BYTE_SIZE**i)
block_int = block_int % (BYTE_SIZE**i)
block_message.insert(0, chr(ascii_number))
message.extend(block_message)
return "".join(message)
def encrypt_message(
message: str, key: tuple[int, int], block_size: int = DEFAULT_BLOCK_SIZE
) -> list[int]:
encrypted_blocks = []
n, e = key
for block in get_blocks_from_text(message, block_size):
encrypted_blocks.append(pow(block, e, n))
return encrypted_blocks
def decrypt_message(
encrypted_blocks: list[int],
message_length: int,
key: tuple[int, int],
block_size: int = DEFAULT_BLOCK_SIZE,
) -> str:
decrypted_blocks = []
n, d = key
for block in encrypted_blocks:
decrypted_blocks.append(pow(block, d, n))
return get_text_from_blocks(decrypted_blocks, message_length, block_size)
def read_key_file(key_filename: str) -> tuple[int, int, int]:
with open(key_filename) as fo:
content = fo.read()
key_size, n, eor_d = content.split(",")
return (int(key_size), int(n), int(eor_d))
def encrypt_and_write_to_file(
message_filename: str,
key_filename: str,
message: str,
block_size: int = DEFAULT_BLOCK_SIZE,
) -> str:
key_size, n, e = read_key_file(key_filename)
if key_size < block_size * 8:
sys.exit(
"ERROR: Block size is %s bits and key size is %s bits. The RSA cipher "
"requires the block size to be equal to or greater than the key size. "
"Either decrease the block size or use different keys."
% (block_size * 8, key_size)
)
encrypted_blocks = [str(i) for i in encrypt_message(message, (n, e), block_size)]
encrypted_content = ",".join(encrypted_blocks)
encrypted_content = f"{len(message)}_{block_size}_{encrypted_content}"
with open(message_filename, "w") as fo:
fo.write(encrypted_content)
return encrypted_content
def read_from_file_and_decrypt(message_filename: str, key_filename: str) -> str:
key_size, n, d = read_key_file(key_filename)
with open(message_filename) as fo:
content = fo.read()
message_length_str, block_size_str, encrypted_message = content.split("_")
message_length = int(message_length_str)
block_size = int(block_size_str)
if key_size < block_size * 8:
sys.exit(
"ERROR: Block size is %s bits and key size is %s bits. The RSA cipher "
"requires the block size to be equal to or greater than the key size. "
"Did you specify the correct key file and encrypted file?"
% (block_size * 8, key_size)
)
encrypted_blocks = []
for block in encrypted_message.split(","):
encrypted_blocks.append(int(block))
return decrypt_message(encrypted_blocks, message_length, (n, d), block_size)
def main() -> None:
filename = "encrypted_file.txt"
response = input(r"Encrypt\Decrypt [e\d]: ")
if response.lower().startswith("e"):
mode = "encrypt"
elif response.lower().startswith("d"):
mode = "decrypt"
if mode == "encrypt":
if not os.path.exists("rsa_pubkey.txt"):
rkg.make_key_files("rsa", 1024)
message = input("\nEnter message: ")
pubkey_filename = "rsa_pubkey.txt"
print(f"Encrypting and writing to {filename}...")
encrypted_text = encrypt_and_write_to_file(filename, pubkey_filename, message)
print("\nEncrypted text:")
print(encrypted_text)
elif mode == "decrypt":
privkey_filename = "rsa_privkey.txt"
print(f"Reading from {filename} and decrypting...")
decrypted_text = read_from_file_and_decrypt(filename, privkey_filename)
print("writing decryption to rsa_decryption.txt...")
with open("rsa_decryption.txt", "w") as dec:
dec.write(decrypted_text)
print("\nDecryption:")
print(decrypted_text)
if __name__ == "__main__":
main()
| import os
import sys
from . import rsa_key_generator as rkg
DEFAULT_BLOCK_SIZE = 128
BYTE_SIZE = 256
def get_blocks_from_text(
message: str, block_size: int = DEFAULT_BLOCK_SIZE
) -> list[int]:
message_bytes = message.encode("ascii")
block_ints = []
for block_start in range(0, len(message_bytes), block_size):
block_int = 0
for i in range(block_start, min(block_start + block_size, len(message_bytes))):
block_int += message_bytes[i] * (BYTE_SIZE ** (i % block_size))
block_ints.append(block_int)
return block_ints
def get_text_from_blocks(
block_ints: list[int], message_length: int, block_size: int = DEFAULT_BLOCK_SIZE
) -> str:
message: list[str] = []
for block_int in block_ints:
block_message: list[str] = []
for i in range(block_size - 1, -1, -1):
if len(message) + i < message_length:
ascii_number = block_int // (BYTE_SIZE**i)
block_int = block_int % (BYTE_SIZE**i)
block_message.insert(0, chr(ascii_number))
message.extend(block_message)
return "".join(message)
def encrypt_message(
message: str, key: tuple[int, int], block_size: int = DEFAULT_BLOCK_SIZE
) -> list[int]:
encrypted_blocks = []
n, e = key
for block in get_blocks_from_text(message, block_size):
encrypted_blocks.append(pow(block, e, n))
return encrypted_blocks
def decrypt_message(
encrypted_blocks: list[int],
message_length: int,
key: tuple[int, int],
block_size: int = DEFAULT_BLOCK_SIZE,
) -> str:
decrypted_blocks = []
n, d = key
for block in encrypted_blocks:
decrypted_blocks.append(pow(block, d, n))
return get_text_from_blocks(decrypted_blocks, message_length, block_size)
def read_key_file(key_filename: str) -> tuple[int, int, int]:
with open(key_filename) as fo:
content = fo.read()
key_size, n, eor_d = content.split(",")
return (int(key_size), int(n), int(eor_d))
def encrypt_and_write_to_file(
message_filename: str,
key_filename: str,
message: str,
block_size: int = DEFAULT_BLOCK_SIZE,
) -> str:
key_size, n, e = read_key_file(key_filename)
if key_size < block_size * 8:
sys.exit(
"ERROR: Block size is %s bits and key size is %s bits. The RSA cipher "
"requires the block size to be equal to or greater than the key size. "
"Either decrease the block size or use different keys."
% (block_size * 8, key_size)
)
encrypted_blocks = [str(i) for i in encrypt_message(message, (n, e), block_size)]
encrypted_content = ",".join(encrypted_blocks)
encrypted_content = f"{len(message)}_{block_size}_{encrypted_content}"
with open(message_filename, "w") as fo:
fo.write(encrypted_content)
return encrypted_content
def read_from_file_and_decrypt(message_filename: str, key_filename: str) -> str:
key_size, n, d = read_key_file(key_filename)
with open(message_filename) as fo:
content = fo.read()
message_length_str, block_size_str, encrypted_message = content.split("_")
message_length = int(message_length_str)
block_size = int(block_size_str)
if key_size < block_size * 8:
sys.exit(
"ERROR: Block size is %s bits and key size is %s bits. The RSA cipher "
"requires the block size to be equal to or greater than the key size. "
"Did you specify the correct key file and encrypted file?"
% (block_size * 8, key_size)
)
encrypted_blocks = []
for block in encrypted_message.split(","):
encrypted_blocks.append(int(block))
return decrypt_message(encrypted_blocks, message_length, (n, d), block_size)
def main() -> None:
filename = "encrypted_file.txt"
response = input(r"Encrypt\Decrypt [e\d]: ")
if response.lower().startswith("e"):
mode = "encrypt"
elif response.lower().startswith("d"):
mode = "decrypt"
if mode == "encrypt":
if not os.path.exists("rsa_pubkey.txt"):
rkg.make_key_files("rsa", 1024)
message = input("\nEnter message: ")
pubkey_filename = "rsa_pubkey.txt"
print(f"Encrypting and writing to {filename}...")
encrypted_text = encrypt_and_write_to_file(filename, pubkey_filename, message)
print("\nEncrypted text:")
print(encrypted_text)
elif mode == "decrypt":
privkey_filename = "rsa_privkey.txt"
print(f"Reading from {filename} and decrypting...")
decrypted_text = read_from_file_and_decrypt(filename, privkey_filename)
print("writing decryption to rsa_decryption.txt...")
with open("rsa_decryption.txt", "w") as dec:
dec.write(decrypted_text)
print("\nDecryption:")
print(decrypted_text)
if __name__ == "__main__":
main()
| -1 |
TheAlgorithms/Python | 8,546 | Fix broken links to Gitter community | ### Describe your change:
The link <https://gitter.im/TheAlgorithms> as used in the `README.md` and `CONTRIBUTING.md` files is invalid and returns the error `404 Not Found`.
I fixed the link to the working version <https://gitter.im/TheAlgorithms/community>.
This PR fixes issue #8197
* [ ] Add an algorithm?
* [ ] Fix a bug or typo in an existing algorithm?
* [x] Documentation change?
### Checklist:
* [x] I have read [CONTRIBUTING.md](https://github.com/TheAlgorithms/Python/blob/master/CONTRIBUTING.md).
* [x] This pull request is all my own work -- I have not plagiarized.
* [x] I know that pull requests will not be merged if they fail the automated tests.
* [ ] This PR only changes one algorithm file. To ease review, please open separate PRs for separate algorithms.
* [ ] All new Python files are placed inside an existing directory.
* [ ] All filenames are in all lowercase characters with no spaces or dashes.
* [ ] All functions and variable names follow Python naming conventions.
* [ ] All function parameters and return values are annotated with Python [type hints](https://docs.python.org/3/library/typing.html).
* [ ] All functions have [doctests](https://docs.python.org/3/library/doctest.html) that pass the automated testing.
* [ ] All new algorithms include at least one URL that points to Wikipedia or another similar explanation.
* [x] If this pull request resolves one or more open issues then the commit message contains `Fixes: #{$ISSUE_NO}`.
| ChVeen | "2023-03-25T21:52:30Z" | "2023-03-26T16:20:47Z" | 7cdb011ba440a07768179bfaea190bddefc890d8 | 86b2ab09aab359ef1b4bea58ed3c1fdf5b989500 | Fix broken links to Gitter community. ### Describe your change:
The link <https://gitter.im/TheAlgorithms> as used in the `README.md` and `CONTRIBUTING.md` files is invalid and returns the error `404 Not Found`.
I fixed the link to the working version <https://gitter.im/TheAlgorithms/community>.
This PR fixes issue #8197
* [ ] Add an algorithm?
* [ ] Fix a bug or typo in an existing algorithm?
* [x] Documentation change?
### Checklist:
* [x] I have read [CONTRIBUTING.md](https://github.com/TheAlgorithms/Python/blob/master/CONTRIBUTING.md).
* [x] This pull request is all my own work -- I have not plagiarized.
* [x] I know that pull requests will not be merged if they fail the automated tests.
* [ ] This PR only changes one algorithm file. To ease review, please open separate PRs for separate algorithms.
* [ ] All new Python files are placed inside an existing directory.
* [ ] All filenames are in all lowercase characters with no spaces or dashes.
* [ ] All functions and variable names follow Python naming conventions.
* [ ] All function parameters and return values are annotated with Python [type hints](https://docs.python.org/3/library/typing.html).
* [ ] All functions have [doctests](https://docs.python.org/3/library/doctest.html) that pass the automated testing.
* [ ] All new algorithms include at least one URL that points to Wikipedia or another similar explanation.
* [x] If this pull request resolves one or more open issues then the commit message contains `Fixes: #{$ISSUE_NO}`.
| -1 |
||
TheAlgorithms/Python | 8,546 | Fix broken links to Gitter community | ### Describe your change:
The link <https://gitter.im/TheAlgorithms> as used in the `README.md` and `CONTRIBUTING.md` files is invalid and returns the error `404 Not Found`.
I fixed the link to the working version <https://gitter.im/TheAlgorithms/community>.
This PR fixes issue #8197
* [ ] Add an algorithm?
* [ ] Fix a bug or typo in an existing algorithm?
* [x] Documentation change?
### Checklist:
* [x] I have read [CONTRIBUTING.md](https://github.com/TheAlgorithms/Python/blob/master/CONTRIBUTING.md).
* [x] This pull request is all my own work -- I have not plagiarized.
* [x] I know that pull requests will not be merged if they fail the automated tests.
* [ ] This PR only changes one algorithm file. To ease review, please open separate PRs for separate algorithms.
* [ ] All new Python files are placed inside an existing directory.
* [ ] All filenames are in all lowercase characters with no spaces or dashes.
* [ ] All functions and variable names follow Python naming conventions.
* [ ] All function parameters and return values are annotated with Python [type hints](https://docs.python.org/3/library/typing.html).
* [ ] All functions have [doctests](https://docs.python.org/3/library/doctest.html) that pass the automated testing.
* [ ] All new algorithms include at least one URL that points to Wikipedia or another similar explanation.
* [x] If this pull request resolves one or more open issues then the commit message contains `Fixes: #{$ISSUE_NO}`.
| ChVeen | "2023-03-25T21:52:30Z" | "2023-03-26T16:20:47Z" | 7cdb011ba440a07768179bfaea190bddefc890d8 | 86b2ab09aab359ef1b4bea58ed3c1fdf5b989500 | Fix broken links to Gitter community. ### Describe your change:
The link <https://gitter.im/TheAlgorithms> as used in the `README.md` and `CONTRIBUTING.md` files is invalid and returns the error `404 Not Found`.
I fixed the link to the working version <https://gitter.im/TheAlgorithms/community>.
This PR fixes issue #8197
* [ ] Add an algorithm?
* [ ] Fix a bug or typo in an existing algorithm?
* [x] Documentation change?
### Checklist:
* [x] I have read [CONTRIBUTING.md](https://github.com/TheAlgorithms/Python/blob/master/CONTRIBUTING.md).
* [x] This pull request is all my own work -- I have not plagiarized.
* [x] I know that pull requests will not be merged if they fail the automated tests.
* [ ] This PR only changes one algorithm file. To ease review, please open separate PRs for separate algorithms.
* [ ] All new Python files are placed inside an existing directory.
* [ ] All filenames are in all lowercase characters with no spaces or dashes.
* [ ] All functions and variable names follow Python naming conventions.
* [ ] All function parameters and return values are annotated with Python [type hints](https://docs.python.org/3/library/typing.html).
* [ ] All functions have [doctests](https://docs.python.org/3/library/doctest.html) that pass the automated testing.
* [ ] All new algorithms include at least one URL that points to Wikipedia or another similar explanation.
* [x] If this pull request resolves one or more open issues then the commit message contains `Fixes: #{$ISSUE_NO}`.
| """
Project Euler Problem 129: https://projecteuler.net/problem=129
A number consisting entirely of ones is called a repunit. We shall define R(k) to be
a repunit of length k; for example, R(6) = 111111.
Given that n is a positive integer and GCD(n, 10) = 1, it can be shown that there
always exists a value, k, for which R(k) is divisible by n, and let A(n) be the least
such value of k; for example, A(7) = 6 and A(41) = 5.
The least value of n for which A(n) first exceeds ten is 17.
Find the least value of n for which A(n) first exceeds one-million.
"""
def least_divisible_repunit(divisor: int) -> int:
"""
Return the least value k such that the Repunit of length k is divisible by divisor.
>>> least_divisible_repunit(7)
6
>>> least_divisible_repunit(41)
5
>>> least_divisible_repunit(1234567)
34020
"""
if divisor % 5 == 0 or divisor % 2 == 0:
return 0
repunit = 1
repunit_index = 1
while repunit:
repunit = (10 * repunit + 1) % divisor
repunit_index += 1
return repunit_index
def solution(limit: int = 1000000) -> int:
"""
Return the least value of n for which least_divisible_repunit(n)
first exceeds limit.
>>> solution(10)
17
>>> solution(100)
109
>>> solution(1000)
1017
"""
divisor = limit - 1
if divisor % 2 == 0:
divisor += 1
while least_divisible_repunit(divisor) <= limit:
divisor += 2
return divisor
if __name__ == "__main__":
print(f"{solution() = }")
| """
Project Euler Problem 129: https://projecteuler.net/problem=129
A number consisting entirely of ones is called a repunit. We shall define R(k) to be
a repunit of length k; for example, R(6) = 111111.
Given that n is a positive integer and GCD(n, 10) = 1, it can be shown that there
always exists a value, k, for which R(k) is divisible by n, and let A(n) be the least
such value of k; for example, A(7) = 6 and A(41) = 5.
The least value of n for which A(n) first exceeds ten is 17.
Find the least value of n for which A(n) first exceeds one-million.
"""
def least_divisible_repunit(divisor: int) -> int:
"""
Return the least value k such that the Repunit of length k is divisible by divisor.
>>> least_divisible_repunit(7)
6
>>> least_divisible_repunit(41)
5
>>> least_divisible_repunit(1234567)
34020
"""
if divisor % 5 == 0 or divisor % 2 == 0:
return 0
repunit = 1
repunit_index = 1
while repunit:
repunit = (10 * repunit + 1) % divisor
repunit_index += 1
return repunit_index
def solution(limit: int = 1000000) -> int:
"""
Return the least value of n for which least_divisible_repunit(n)
first exceeds limit.
>>> solution(10)
17
>>> solution(100)
109
>>> solution(1000)
1017
"""
divisor = limit - 1
if divisor % 2 == 0:
divisor += 1
while least_divisible_repunit(divisor) <= limit:
divisor += 2
return divisor
if __name__ == "__main__":
print(f"{solution() = }")
| -1 |
TheAlgorithms/Python | 8,546 | Fix broken links to Gitter community | ### Describe your change:
The link <https://gitter.im/TheAlgorithms> as used in the `README.md` and `CONTRIBUTING.md` files is invalid and returns the error `404 Not Found`.
I fixed the link to the working version <https://gitter.im/TheAlgorithms/community>.
This PR fixes issue #8197
* [ ] Add an algorithm?
* [ ] Fix a bug or typo in an existing algorithm?
* [x] Documentation change?
### Checklist:
* [x] I have read [CONTRIBUTING.md](https://github.com/TheAlgorithms/Python/blob/master/CONTRIBUTING.md).
* [x] This pull request is all my own work -- I have not plagiarized.
* [x] I know that pull requests will not be merged if they fail the automated tests.
* [ ] This PR only changes one algorithm file. To ease review, please open separate PRs for separate algorithms.
* [ ] All new Python files are placed inside an existing directory.
* [ ] All filenames are in all lowercase characters with no spaces or dashes.
* [ ] All functions and variable names follow Python naming conventions.
* [ ] All function parameters and return values are annotated with Python [type hints](https://docs.python.org/3/library/typing.html).
* [ ] All functions have [doctests](https://docs.python.org/3/library/doctest.html) that pass the automated testing.
* [ ] All new algorithms include at least one URL that points to Wikipedia or another similar explanation.
* [x] If this pull request resolves one or more open issues then the commit message contains `Fixes: #{$ISSUE_NO}`.
| ChVeen | "2023-03-25T21:52:30Z" | "2023-03-26T16:20:47Z" | 7cdb011ba440a07768179bfaea190bddefc890d8 | 86b2ab09aab359ef1b4bea58ed3c1fdf5b989500 | Fix broken links to Gitter community. ### Describe your change:
The link <https://gitter.im/TheAlgorithms> as used in the `README.md` and `CONTRIBUTING.md` files is invalid and returns the error `404 Not Found`.
I fixed the link to the working version <https://gitter.im/TheAlgorithms/community>.
This PR fixes issue #8197
* [ ] Add an algorithm?
* [ ] Fix a bug or typo in an existing algorithm?
* [x] Documentation change?
### Checklist:
* [x] I have read [CONTRIBUTING.md](https://github.com/TheAlgorithms/Python/blob/master/CONTRIBUTING.md).
* [x] This pull request is all my own work -- I have not plagiarized.
* [x] I know that pull requests will not be merged if they fail the automated tests.
* [ ] This PR only changes one algorithm file. To ease review, please open separate PRs for separate algorithms.
* [ ] All new Python files are placed inside an existing directory.
* [ ] All filenames are in all lowercase characters with no spaces or dashes.
* [ ] All functions and variable names follow Python naming conventions.
* [ ] All function parameters and return values are annotated with Python [type hints](https://docs.python.org/3/library/typing.html).
* [ ] All functions have [doctests](https://docs.python.org/3/library/doctest.html) that pass the automated testing.
* [ ] All new algorithms include at least one URL that points to Wikipedia or another similar explanation.
* [x] If this pull request resolves one or more open issues then the commit message contains `Fixes: #{$ISSUE_NO}`.
| """
Project Euler Problem 10: https://projecteuler.net/problem=10
Summation of primes
The sum of the primes below 10 is 2 + 3 + 5 + 7 = 17.
Find the sum of all the primes below two million.
References:
- https://en.wikipedia.org/wiki/Prime_number
"""
import math
def is_prime(number: int) -> bool:
"""Checks to see if a number is a prime in O(sqrt(n)).
A number is prime if it has exactly two factors: 1 and itself.
Returns boolean representing primality of given number num (i.e., if the
result is true, then the number is indeed prime else it is not).
>>> is_prime(2)
True
>>> is_prime(3)
True
>>> is_prime(27)
False
>>> is_prime(2999)
True
>>> is_prime(0)
False
>>> is_prime(1)
False
"""
if 1 < number < 4:
# 2 and 3 are primes
return True
elif number < 2 or number % 2 == 0 or number % 3 == 0:
# Negatives, 0, 1, all even numbers, all multiples of 3 are not primes
return False
# All primes number are in format of 6k +/- 1
for i in range(5, int(math.sqrt(number) + 1), 6):
if number % i == 0 or number % (i + 2) == 0:
return False
return True
def solution(n: int = 2000000) -> int:
"""
Returns the sum of all the primes below n.
>>> solution(1000)
76127
>>> solution(5000)
1548136
>>> solution(10000)
5736396
>>> solution(7)
10
"""
return sum(num for num in range(3, n, 2) if is_prime(num)) + 2 if n > 2 else 0
if __name__ == "__main__":
print(f"{solution() = }")
| """
Project Euler Problem 10: https://projecteuler.net/problem=10
Summation of primes
The sum of the primes below 10 is 2 + 3 + 5 + 7 = 17.
Find the sum of all the primes below two million.
References:
- https://en.wikipedia.org/wiki/Prime_number
"""
import math
def is_prime(number: int) -> bool:
"""Checks to see if a number is a prime in O(sqrt(n)).
A number is prime if it has exactly two factors: 1 and itself.
Returns boolean representing primality of given number num (i.e., if the
result is true, then the number is indeed prime else it is not).
>>> is_prime(2)
True
>>> is_prime(3)
True
>>> is_prime(27)
False
>>> is_prime(2999)
True
>>> is_prime(0)
False
>>> is_prime(1)
False
"""
if 1 < number < 4:
# 2 and 3 are primes
return True
elif number < 2 or number % 2 == 0 or number % 3 == 0:
# Negatives, 0, 1, all even numbers, all multiples of 3 are not primes
return False
# All primes number are in format of 6k +/- 1
for i in range(5, int(math.sqrt(number) + 1), 6):
if number % i == 0 or number % (i + 2) == 0:
return False
return True
def solution(n: int = 2000000) -> int:
"""
Returns the sum of all the primes below n.
>>> solution(1000)
76127
>>> solution(5000)
1548136
>>> solution(10000)
5736396
>>> solution(7)
10
"""
return sum(num for num in range(3, n, 2) if is_prime(num)) + 2 if n > 2 else 0
if __name__ == "__main__":
print(f"{solution() = }")
| -1 |
TheAlgorithms/Python | 8,546 | Fix broken links to Gitter community | ### Describe your change:
The link <https://gitter.im/TheAlgorithms> as used in the `README.md` and `CONTRIBUTING.md` files is invalid and returns the error `404 Not Found`.
I fixed the link to the working version <https://gitter.im/TheAlgorithms/community>.
This PR fixes issue #8197
* [ ] Add an algorithm?
* [ ] Fix a bug or typo in an existing algorithm?
* [x] Documentation change?
### Checklist:
* [x] I have read [CONTRIBUTING.md](https://github.com/TheAlgorithms/Python/blob/master/CONTRIBUTING.md).
* [x] This pull request is all my own work -- I have not plagiarized.
* [x] I know that pull requests will not be merged if they fail the automated tests.
* [ ] This PR only changes one algorithm file. To ease review, please open separate PRs for separate algorithms.
* [ ] All new Python files are placed inside an existing directory.
* [ ] All filenames are in all lowercase characters with no spaces or dashes.
* [ ] All functions and variable names follow Python naming conventions.
* [ ] All function parameters and return values are annotated with Python [type hints](https://docs.python.org/3/library/typing.html).
* [ ] All functions have [doctests](https://docs.python.org/3/library/doctest.html) that pass the automated testing.
* [ ] All new algorithms include at least one URL that points to Wikipedia or another similar explanation.
* [x] If this pull request resolves one or more open issues then the commit message contains `Fixes: #{$ISSUE_NO}`.
| ChVeen | "2023-03-25T21:52:30Z" | "2023-03-26T16:20:47Z" | 7cdb011ba440a07768179bfaea190bddefc890d8 | 86b2ab09aab359ef1b4bea58ed3c1fdf5b989500 | Fix broken links to Gitter community. ### Describe your change:
The link <https://gitter.im/TheAlgorithms> as used in the `README.md` and `CONTRIBUTING.md` files is invalid and returns the error `404 Not Found`.
I fixed the link to the working version <https://gitter.im/TheAlgorithms/community>.
This PR fixes issue #8197
* [ ] Add an algorithm?
* [ ] Fix a bug or typo in an existing algorithm?
* [x] Documentation change?
### Checklist:
* [x] I have read [CONTRIBUTING.md](https://github.com/TheAlgorithms/Python/blob/master/CONTRIBUTING.md).
* [x] This pull request is all my own work -- I have not plagiarized.
* [x] I know that pull requests will not be merged if they fail the automated tests.
* [ ] This PR only changes one algorithm file. To ease review, please open separate PRs for separate algorithms.
* [ ] All new Python files are placed inside an existing directory.
* [ ] All filenames are in all lowercase characters with no spaces or dashes.
* [ ] All functions and variable names follow Python naming conventions.
* [ ] All function parameters and return values are annotated with Python [type hints](https://docs.python.org/3/library/typing.html).
* [ ] All functions have [doctests](https://docs.python.org/3/library/doctest.html) that pass the automated testing.
* [ ] All new algorithms include at least one URL that points to Wikipedia or another similar explanation.
* [x] If this pull request resolves one or more open issues then the commit message contains `Fixes: #{$ISSUE_NO}`.
| #
| #
| -1 |
TheAlgorithms/Python | 8,546 | Fix broken links to Gitter community | ### Describe your change:
The link <https://gitter.im/TheAlgorithms> as used in the `README.md` and `CONTRIBUTING.md` files is invalid and returns the error `404 Not Found`.
I fixed the link to the working version <https://gitter.im/TheAlgorithms/community>.
This PR fixes issue #8197
* [ ] Add an algorithm?
* [ ] Fix a bug or typo in an existing algorithm?
* [x] Documentation change?
### Checklist:
* [x] I have read [CONTRIBUTING.md](https://github.com/TheAlgorithms/Python/blob/master/CONTRIBUTING.md).
* [x] This pull request is all my own work -- I have not plagiarized.
* [x] I know that pull requests will not be merged if they fail the automated tests.
* [ ] This PR only changes one algorithm file. To ease review, please open separate PRs for separate algorithms.
* [ ] All new Python files are placed inside an existing directory.
* [ ] All filenames are in all lowercase characters with no spaces or dashes.
* [ ] All functions and variable names follow Python naming conventions.
* [ ] All function parameters and return values are annotated with Python [type hints](https://docs.python.org/3/library/typing.html).
* [ ] All functions have [doctests](https://docs.python.org/3/library/doctest.html) that pass the automated testing.
* [ ] All new algorithms include at least one URL that points to Wikipedia or another similar explanation.
* [x] If this pull request resolves one or more open issues then the commit message contains `Fixes: #{$ISSUE_NO}`.
| ChVeen | "2023-03-25T21:52:30Z" | "2023-03-26T16:20:47Z" | 7cdb011ba440a07768179bfaea190bddefc890d8 | 86b2ab09aab359ef1b4bea58ed3c1fdf5b989500 | Fix broken links to Gitter community. ### Describe your change:
The link <https://gitter.im/TheAlgorithms> as used in the `README.md` and `CONTRIBUTING.md` files is invalid and returns the error `404 Not Found`.
I fixed the link to the working version <https://gitter.im/TheAlgorithms/community>.
This PR fixes issue #8197
* [ ] Add an algorithm?
* [ ] Fix a bug or typo in an existing algorithm?
* [x] Documentation change?
### Checklist:
* [x] I have read [CONTRIBUTING.md](https://github.com/TheAlgorithms/Python/blob/master/CONTRIBUTING.md).
* [x] This pull request is all my own work -- I have not plagiarized.
* [x] I know that pull requests will not be merged if they fail the automated tests.
* [ ] This PR only changes one algorithm file. To ease review, please open separate PRs for separate algorithms.
* [ ] All new Python files are placed inside an existing directory.
* [ ] All filenames are in all lowercase characters with no spaces or dashes.
* [ ] All functions and variable names follow Python naming conventions.
* [ ] All function parameters and return values are annotated with Python [type hints](https://docs.python.org/3/library/typing.html).
* [ ] All functions have [doctests](https://docs.python.org/3/library/doctest.html) that pass the automated testing.
* [ ] All new algorithms include at least one URL that points to Wikipedia or another similar explanation.
* [x] If this pull request resolves one or more open issues then the commit message contains `Fixes: #{$ISSUE_NO}`.
| """
Prime permutations
Problem 49
The arithmetic sequence, 1487, 4817, 8147, in which each of
the terms increases by 3330, is unusual in two ways:
(i) each of the three terms are prime,
(ii) each of the 4-digit numbers are permutations of one another.
There are no arithmetic sequences made up of three 1-, 2-, or 3-digit primes,
exhibiting this property, but there is one other 4-digit increasing sequence.
What 12-digit number do you form by concatenating the three terms in this sequence?
Solution:
First, we need to generate all 4 digits prime numbers. Then greedy
all of them and use permutation to form new numbers. Use binary search
to check if the permutated numbers is in our prime list and include
them in a candidate list.
After that, bruteforce all passed candidates sequences using
3 nested loops since we know the answer will be 12 digits.
The bruteforce of this solution will be about 1 sec.
"""
import math
from itertools import permutations
def is_prime(number: int) -> bool:
"""Checks to see if a number is a prime in O(sqrt(n)).
A number is prime if it has exactly two factors: 1 and itself.
>>> is_prime(0)
False
>>> is_prime(1)
False
>>> is_prime(2)
True
>>> is_prime(3)
True
>>> is_prime(27)
False
>>> is_prime(87)
False
>>> is_prime(563)
True
>>> is_prime(2999)
True
>>> is_prime(67483)
False
"""
if 1 < number < 4:
# 2 and 3 are primes
return True
elif number < 2 or number % 2 == 0 or number % 3 == 0:
# Negatives, 0, 1, all even numbers, all multiples of 3 are not primes
return False
# All primes number are in format of 6k +/- 1
for i in range(5, int(math.sqrt(number) + 1), 6):
if number % i == 0 or number % (i + 2) == 0:
return False
return True
def search(target: int, prime_list: list) -> bool:
"""
function to search a number in a list using Binary Search.
>>> search(3, [1, 2, 3])
True
>>> search(4, [1, 2, 3])
False
>>> search(101, list(range(-100, 100)))
False
"""
left, right = 0, len(prime_list) - 1
while left <= right:
middle = (left + right) // 2
if prime_list[middle] == target:
return True
elif prime_list[middle] < target:
left = middle + 1
else:
right = middle - 1
return False
def solution():
"""
Return the solution of the problem.
>>> solution()
296962999629
"""
prime_list = [n for n in range(1001, 10000, 2) if is_prime(n)]
candidates = []
for number in prime_list:
tmp_numbers = []
for prime_member in permutations(list(str(number))):
prime = int("".join(prime_member))
if prime % 2 == 0:
continue
if search(prime, prime_list):
tmp_numbers.append(prime)
tmp_numbers.sort()
if len(tmp_numbers) >= 3:
candidates.append(tmp_numbers)
passed = []
for candidate in candidates:
length = len(candidate)
found = False
for i in range(length):
for j in range(i + 1, length):
for k in range(j + 1, length):
if (
abs(candidate[i] - candidate[j])
== abs(candidate[j] - candidate[k])
and len({candidate[i], candidate[j], candidate[k]}) == 3
):
passed.append(
sorted([candidate[i], candidate[j], candidate[k]])
)
found = True
if found:
break
if found:
break
if found:
break
answer = set()
for seq in passed:
answer.add("".join([str(i) for i in seq]))
return max(int(x) for x in answer)
if __name__ == "__main__":
print(solution())
| """
Prime permutations
Problem 49
The arithmetic sequence, 1487, 4817, 8147, in which each of
the terms increases by 3330, is unusual in two ways:
(i) each of the three terms are prime,
(ii) each of the 4-digit numbers are permutations of one another.
There are no arithmetic sequences made up of three 1-, 2-, or 3-digit primes,
exhibiting this property, but there is one other 4-digit increasing sequence.
What 12-digit number do you form by concatenating the three terms in this sequence?
Solution:
First, we need to generate all 4 digits prime numbers. Then greedy
all of them and use permutation to form new numbers. Use binary search
to check if the permutated numbers is in our prime list and include
them in a candidate list.
After that, bruteforce all passed candidates sequences using
3 nested loops since we know the answer will be 12 digits.
The bruteforce of this solution will be about 1 sec.
"""
import math
from itertools import permutations
def is_prime(number: int) -> bool:
"""Checks to see if a number is a prime in O(sqrt(n)).
A number is prime if it has exactly two factors: 1 and itself.
>>> is_prime(0)
False
>>> is_prime(1)
False
>>> is_prime(2)
True
>>> is_prime(3)
True
>>> is_prime(27)
False
>>> is_prime(87)
False
>>> is_prime(563)
True
>>> is_prime(2999)
True
>>> is_prime(67483)
False
"""
if 1 < number < 4:
# 2 and 3 are primes
return True
elif number < 2 or number % 2 == 0 or number % 3 == 0:
# Negatives, 0, 1, all even numbers, all multiples of 3 are not primes
return False
# All primes number are in format of 6k +/- 1
for i in range(5, int(math.sqrt(number) + 1), 6):
if number % i == 0 or number % (i + 2) == 0:
return False
return True
def search(target: int, prime_list: list) -> bool:
"""
function to search a number in a list using Binary Search.
>>> search(3, [1, 2, 3])
True
>>> search(4, [1, 2, 3])
False
>>> search(101, list(range(-100, 100)))
False
"""
left, right = 0, len(prime_list) - 1
while left <= right:
middle = (left + right) // 2
if prime_list[middle] == target:
return True
elif prime_list[middle] < target:
left = middle + 1
else:
right = middle - 1
return False
def solution():
"""
Return the solution of the problem.
>>> solution()
296962999629
"""
prime_list = [n for n in range(1001, 10000, 2) if is_prime(n)]
candidates = []
for number in prime_list:
tmp_numbers = []
for prime_member in permutations(list(str(number))):
prime = int("".join(prime_member))
if prime % 2 == 0:
continue
if search(prime, prime_list):
tmp_numbers.append(prime)
tmp_numbers.sort()
if len(tmp_numbers) >= 3:
candidates.append(tmp_numbers)
passed = []
for candidate in candidates:
length = len(candidate)
found = False
for i in range(length):
for j in range(i + 1, length):
for k in range(j + 1, length):
if (
abs(candidate[i] - candidate[j])
== abs(candidate[j] - candidate[k])
and len({candidate[i], candidate[j], candidate[k]}) == 3
):
passed.append(
sorted([candidate[i], candidate[j], candidate[k]])
)
found = True
if found:
break
if found:
break
if found:
break
answer = set()
for seq in passed:
answer.add("".join([str(i) for i in seq]))
return max(int(x) for x in answer)
if __name__ == "__main__":
print(solution())
| -1 |
Subsets and Splits
No saved queries yet
Save your SQL queries to embed, download, and access them later. Queries will appear here once saved.