repo_name
stringclasses
1 value
pr_number
int64
4.12k
11.2k
pr_title
stringlengths
9
107
pr_description
stringlengths
107
5.48k
author
stringlengths
4
18
date_created
unknown
date_merged
unknown
previous_commit
stringlengths
40
40
pr_commit
stringlengths
40
40
query
stringlengths
118
5.52k
before_content
stringlengths
0
7.93M
after_content
stringlengths
0
7.93M
label
int64
-1
1
TheAlgorithms/Python
9,358
Removed redundant greatest_common_divisor code
### Describe your change: Previously all the files that needed greatest_common_divisor (aka gcd), used to define the method instead of just importing from Maths directory's greatest_common_divisor.py file. I removed thos definitions and imported gcd method from Maths folder. Fixes #8098 * [ ] Add an algorithm? * [ ] Fix a bug or typo in an existing algorithm? * [x] Documentation change? ### Checklist: * [x] I have read [CONTRIBUTING.md](https://github.com/TheAlgorithms/Python/blob/master/CONTRIBUTING.md). * [x] This pull request is all my own work -- I have not plagiarized. * [x] I know that pull requests will not be merged if they fail the automated tests. * [x] This PR only changes one algorithm file. To ease review, please open separate PRs for separate algorithms. * [x] All new Python files are placed inside an existing directory. * [x] All filenames are in all lowercase characters with no spaces or dashes. * [x] All functions and variable names follow Python naming conventions. * [x] All function parameters and return values are annotated with Python [type hints](https://docs.python.org/3/library/typing.html). * [x] All functions have [doctests](https://docs.python.org/3/library/doctest.html) that pass the automated testing. * [x] All new algorithms include at least one URL that points to Wikipedia or another similar explanation. * [x] If this pull request resolves one or more open issues then the description above includes the issue number(s) with a [closing keyword](https://docs.github.com/en/issues/tracking-your-work-with-issues/linking-a-pull-request-to-an-issue): "Fixes #ISSUE-NUMBER".
Siddikpatel
"2023-10-02T05:03:50Z"
"2023-10-09T12:19:12Z"
876087be998d5b366d68cbb9394b6b92b7f619f6
583a614fefaa9c932e6d650abfea2eaa75a93b05
Removed redundant greatest_common_divisor code. ### Describe your change: Previously all the files that needed greatest_common_divisor (aka gcd), used to define the method instead of just importing from Maths directory's greatest_common_divisor.py file. I removed thos definitions and imported gcd method from Maths folder. Fixes #8098 * [ ] Add an algorithm? * [ ] Fix a bug or typo in an existing algorithm? * [x] Documentation change? ### Checklist: * [x] I have read [CONTRIBUTING.md](https://github.com/TheAlgorithms/Python/blob/master/CONTRIBUTING.md). * [x] This pull request is all my own work -- I have not plagiarized. * [x] I know that pull requests will not be merged if they fail the automated tests. * [x] This PR only changes one algorithm file. To ease review, please open separate PRs for separate algorithms. * [x] All new Python files are placed inside an existing directory. * [x] All filenames are in all lowercase characters with no spaces or dashes. * [x] All functions and variable names follow Python naming conventions. * [x] All function parameters and return values are annotated with Python [type hints](https://docs.python.org/3/library/typing.html). * [x] All functions have [doctests](https://docs.python.org/3/library/doctest.html) that pass the automated testing. * [x] All new algorithms include at least one URL that points to Wikipedia or another similar explanation. * [x] If this pull request resolves one or more open issues then the description above includes the issue number(s) with a [closing keyword](https://docs.github.com/en/issues/tracking-your-work-with-issues/linking-a-pull-request-to-an-issue): "Fixes #ISSUE-NUMBER".
""" Reference: https://www.investopedia.com/terms/p/presentvalue.asp An algorithm that calculates the present value of a stream of yearly cash flows given... 1. The discount rate (as a decimal, not a percent) 2. An array of cash flows, with the index of the cash flow being the associated year Note: This algorithm assumes that cash flows are paid at the end of the specified year """ def present_value(discount_rate: float, cash_flows: list[float]) -> float: """ >>> present_value(0.13, [10, 20.70, -293, 297]) 4.69 >>> present_value(0.07, [-109129.39, 30923.23, 15098.93, 29734,39]) -42739.63 >>> present_value(0.07, [109129.39, 30923.23, 15098.93, 29734,39]) 175519.15 >>> present_value(-1, [109129.39, 30923.23, 15098.93, 29734,39]) Traceback (most recent call last): ... ValueError: Discount rate cannot be negative >>> present_value(0.03, []) Traceback (most recent call last): ... ValueError: Cash flows list cannot be empty """ if discount_rate < 0: raise ValueError("Discount rate cannot be negative") if not cash_flows: raise ValueError("Cash flows list cannot be empty") present_value = sum( cash_flow / ((1 + discount_rate) ** i) for i, cash_flow in enumerate(cash_flows) ) return round(present_value, ndigits=2) if __name__ == "__main__": import doctest doctest.testmod()
""" Reference: https://www.investopedia.com/terms/p/presentvalue.asp An algorithm that calculates the present value of a stream of yearly cash flows given... 1. The discount rate (as a decimal, not a percent) 2. An array of cash flows, with the index of the cash flow being the associated year Note: This algorithm assumes that cash flows are paid at the end of the specified year """ def present_value(discount_rate: float, cash_flows: list[float]) -> float: """ >>> present_value(0.13, [10, 20.70, -293, 297]) 4.69 >>> present_value(0.07, [-109129.39, 30923.23, 15098.93, 29734,39]) -42739.63 >>> present_value(0.07, [109129.39, 30923.23, 15098.93, 29734,39]) 175519.15 >>> present_value(-1, [109129.39, 30923.23, 15098.93, 29734,39]) Traceback (most recent call last): ... ValueError: Discount rate cannot be negative >>> present_value(0.03, []) Traceback (most recent call last): ... ValueError: Cash flows list cannot be empty """ if discount_rate < 0: raise ValueError("Discount rate cannot be negative") if not cash_flows: raise ValueError("Cash flows list cannot be empty") present_value = sum( cash_flow / ((1 + discount_rate) ** i) for i, cash_flow in enumerate(cash_flows) ) return round(present_value, ndigits=2) if __name__ == "__main__": import doctest doctest.testmod()
-1
TheAlgorithms/Python
9,358
Removed redundant greatest_common_divisor code
### Describe your change: Previously all the files that needed greatest_common_divisor (aka gcd), used to define the method instead of just importing from Maths directory's greatest_common_divisor.py file. I removed thos definitions and imported gcd method from Maths folder. Fixes #8098 * [ ] Add an algorithm? * [ ] Fix a bug or typo in an existing algorithm? * [x] Documentation change? ### Checklist: * [x] I have read [CONTRIBUTING.md](https://github.com/TheAlgorithms/Python/blob/master/CONTRIBUTING.md). * [x] This pull request is all my own work -- I have not plagiarized. * [x] I know that pull requests will not be merged if they fail the automated tests. * [x] This PR only changes one algorithm file. To ease review, please open separate PRs for separate algorithms. * [x] All new Python files are placed inside an existing directory. * [x] All filenames are in all lowercase characters with no spaces or dashes. * [x] All functions and variable names follow Python naming conventions. * [x] All function parameters and return values are annotated with Python [type hints](https://docs.python.org/3/library/typing.html). * [x] All functions have [doctests](https://docs.python.org/3/library/doctest.html) that pass the automated testing. * [x] All new algorithms include at least one URL that points to Wikipedia or another similar explanation. * [x] If this pull request resolves one or more open issues then the description above includes the issue number(s) with a [closing keyword](https://docs.github.com/en/issues/tracking-your-work-with-issues/linking-a-pull-request-to-an-issue): "Fixes #ISSUE-NUMBER".
Siddikpatel
"2023-10-02T05:03:50Z"
"2023-10-09T12:19:12Z"
876087be998d5b366d68cbb9394b6b92b7f619f6
583a614fefaa9c932e6d650abfea2eaa75a93b05
Removed redundant greatest_common_divisor code. ### Describe your change: Previously all the files that needed greatest_common_divisor (aka gcd), used to define the method instead of just importing from Maths directory's greatest_common_divisor.py file. I removed thos definitions and imported gcd method from Maths folder. Fixes #8098 * [ ] Add an algorithm? * [ ] Fix a bug or typo in an existing algorithm? * [x] Documentation change? ### Checklist: * [x] I have read [CONTRIBUTING.md](https://github.com/TheAlgorithms/Python/blob/master/CONTRIBUTING.md). * [x] This pull request is all my own work -- I have not plagiarized. * [x] I know that pull requests will not be merged if they fail the automated tests. * [x] This PR only changes one algorithm file. To ease review, please open separate PRs for separate algorithms. * [x] All new Python files are placed inside an existing directory. * [x] All filenames are in all lowercase characters with no spaces or dashes. * [x] All functions and variable names follow Python naming conventions. * [x] All function parameters and return values are annotated with Python [type hints](https://docs.python.org/3/library/typing.html). * [x] All functions have [doctests](https://docs.python.org/3/library/doctest.html) that pass the automated testing. * [x] All new algorithms include at least one URL that points to Wikipedia or another similar explanation. * [x] If this pull request resolves one or more open issues then the description above includes the issue number(s) with a [closing keyword](https://docs.github.com/en/issues/tracking-your-work-with-issues/linking-a-pull-request-to-an-issue): "Fixes #ISSUE-NUMBER".
"""Prime Check.""" import math import unittest def is_prime(number: int) -> bool: """Checks to see if a number is a prime in O(sqrt(n)). A number is prime if it has exactly two factors: 1 and itself. >>> is_prime(0) False >>> is_prime(1) False >>> is_prime(2) True >>> is_prime(3) True >>> is_prime(27) False >>> is_prime(87) False >>> is_prime(563) True >>> is_prime(2999) True >>> is_prime(67483) False """ # precondition assert isinstance(number, int) and ( number >= 0 ), "'number' must been an int and positive" if 1 < number < 4: # 2 and 3 are primes return True elif number < 2 or number % 2 == 0 or number % 3 == 0: # Negatives, 0, 1, all even numbers, all multiples of 3 are not primes return False # All primes number are in format of 6k +/- 1 for i in range(5, int(math.sqrt(number) + 1), 6): if number % i == 0 or number % (i + 2) == 0: return False return True class Test(unittest.TestCase): def test_primes(self): self.assertTrue(is_prime(2)) self.assertTrue(is_prime(3)) self.assertTrue(is_prime(5)) self.assertTrue(is_prime(7)) self.assertTrue(is_prime(11)) self.assertTrue(is_prime(13)) self.assertTrue(is_prime(17)) self.assertTrue(is_prime(19)) self.assertTrue(is_prime(23)) self.assertTrue(is_prime(29)) def test_not_primes(self): with self.assertRaises(AssertionError): is_prime(-19) self.assertFalse( is_prime(0), "Zero doesn't have any positive factors, primes must have exactly two.", ) self.assertFalse( is_prime(1), "One only has 1 positive factor, primes must have exactly two.", ) self.assertFalse(is_prime(2 * 2)) self.assertFalse(is_prime(2 * 3)) self.assertFalse(is_prime(3 * 3)) self.assertFalse(is_prime(3 * 5)) self.assertFalse(is_prime(3 * 5 * 7)) if __name__ == "__main__": unittest.main()
"""Prime Check.""" import math import unittest def is_prime(number: int) -> bool: """Checks to see if a number is a prime in O(sqrt(n)). A number is prime if it has exactly two factors: 1 and itself. >>> is_prime(0) False >>> is_prime(1) False >>> is_prime(2) True >>> is_prime(3) True >>> is_prime(27) False >>> is_prime(87) False >>> is_prime(563) True >>> is_prime(2999) True >>> is_prime(67483) False """ # precondition assert isinstance(number, int) and ( number >= 0 ), "'number' must been an int and positive" if 1 < number < 4: # 2 and 3 are primes return True elif number < 2 or number % 2 == 0 or number % 3 == 0: # Negatives, 0, 1, all even numbers, all multiples of 3 are not primes return False # All primes number are in format of 6k +/- 1 for i in range(5, int(math.sqrt(number) + 1), 6): if number % i == 0 or number % (i + 2) == 0: return False return True class Test(unittest.TestCase): def test_primes(self): self.assertTrue(is_prime(2)) self.assertTrue(is_prime(3)) self.assertTrue(is_prime(5)) self.assertTrue(is_prime(7)) self.assertTrue(is_prime(11)) self.assertTrue(is_prime(13)) self.assertTrue(is_prime(17)) self.assertTrue(is_prime(19)) self.assertTrue(is_prime(23)) self.assertTrue(is_prime(29)) def test_not_primes(self): with self.assertRaises(AssertionError): is_prime(-19) self.assertFalse( is_prime(0), "Zero doesn't have any positive factors, primes must have exactly two.", ) self.assertFalse( is_prime(1), "One only has 1 positive factor, primes must have exactly two.", ) self.assertFalse(is_prime(2 * 2)) self.assertFalse(is_prime(2 * 3)) self.assertFalse(is_prime(3 * 3)) self.assertFalse(is_prime(3 * 5)) self.assertFalse(is_prime(3 * 5 * 7)) if __name__ == "__main__": unittest.main()
-1
TheAlgorithms/Python
9,358
Removed redundant greatest_common_divisor code
### Describe your change: Previously all the files that needed greatest_common_divisor (aka gcd), used to define the method instead of just importing from Maths directory's greatest_common_divisor.py file. I removed thos definitions and imported gcd method from Maths folder. Fixes #8098 * [ ] Add an algorithm? * [ ] Fix a bug or typo in an existing algorithm? * [x] Documentation change? ### Checklist: * [x] I have read [CONTRIBUTING.md](https://github.com/TheAlgorithms/Python/blob/master/CONTRIBUTING.md). * [x] This pull request is all my own work -- I have not plagiarized. * [x] I know that pull requests will not be merged if they fail the automated tests. * [x] This PR only changes one algorithm file. To ease review, please open separate PRs for separate algorithms. * [x] All new Python files are placed inside an existing directory. * [x] All filenames are in all lowercase characters with no spaces or dashes. * [x] All functions and variable names follow Python naming conventions. * [x] All function parameters and return values are annotated with Python [type hints](https://docs.python.org/3/library/typing.html). * [x] All functions have [doctests](https://docs.python.org/3/library/doctest.html) that pass the automated testing. * [x] All new algorithms include at least one URL that points to Wikipedia or another similar explanation. * [x] If this pull request resolves one or more open issues then the description above includes the issue number(s) with a [closing keyword](https://docs.github.com/en/issues/tracking-your-work-with-issues/linking-a-pull-request-to-an-issue): "Fixes #ISSUE-NUMBER".
Siddikpatel
"2023-10-02T05:03:50Z"
"2023-10-09T12:19:12Z"
876087be998d5b366d68cbb9394b6b92b7f619f6
583a614fefaa9c932e6d650abfea2eaa75a93b05
Removed redundant greatest_common_divisor code. ### Describe your change: Previously all the files that needed greatest_common_divisor (aka gcd), used to define the method instead of just importing from Maths directory's greatest_common_divisor.py file. I removed thos definitions and imported gcd method from Maths folder. Fixes #8098 * [ ] Add an algorithm? * [ ] Fix a bug or typo in an existing algorithm? * [x] Documentation change? ### Checklist: * [x] I have read [CONTRIBUTING.md](https://github.com/TheAlgorithms/Python/blob/master/CONTRIBUTING.md). * [x] This pull request is all my own work -- I have not plagiarized. * [x] I know that pull requests will not be merged if they fail the automated tests. * [x] This PR only changes one algorithm file. To ease review, please open separate PRs for separate algorithms. * [x] All new Python files are placed inside an existing directory. * [x] All filenames are in all lowercase characters with no spaces or dashes. * [x] All functions and variable names follow Python naming conventions. * [x] All function parameters and return values are annotated with Python [type hints](https://docs.python.org/3/library/typing.html). * [x] All functions have [doctests](https://docs.python.org/3/library/doctest.html) that pass the automated testing. * [x] All new algorithms include at least one URL that points to Wikipedia or another similar explanation. * [x] If this pull request resolves one or more open issues then the description above includes the issue number(s) with a [closing keyword](https://docs.github.com/en/issues/tracking-your-work-with-issues/linking-a-pull-request-to-an-issue): "Fixes #ISSUE-NUMBER".
JFIFC     C  "  L !1AQ"aq2 #BR$3bCr4DS %&5cs<!1AQ"aq2#B3R$br4 ?N49 Ԑz?{!yV4J&9gPZs)o9D`'>vO.~l-yl6؃&vg*Z;R>UTD|~dZB> sը);Iȍb˭%4I`~ֶMvNxNNa6Cָ!K4K;rkg=VtIB4ʂ]5&<Zŕ.ց9ԓ=3Í/u Bܙ4-, \&T^#SNϣҐ<CRb>lKt2B,"B΅C9؂-ʅI$ӕ5Dh ay 2j2Nץ5Zʛ%2|@7NRu K'Q(ZBBBwsnm_mP :Nn$,AܒOϺ4*93"<WXo9]В%|^zDGwB  &4jm%OPV6vAXUQH)IJs-KH$e^sKvZ%H$@ bu$D|c :PiH 78g!ФƇk@>Gcu4TAK O+CAKpĥ<B,D sk~eD:T{Cϗם{/ n%!˩Iנ AHXBe–͐i_,)Ƴ8S6q+rT-*Y1[~zR$"H}U YOC?K}RPgM<!6NKmLP 'Y*7I2O!d>L\@1oƲ 饦麁s^PA9_+Xb>?'JJJ\ԡ IuuSJLV|[n9\yhԲx\/!ne^CMm+w*RtN6J#0L>KV]{ Au *IHRڢ{K{M* ꖣ07rYP KnF4X)XKn&EЍto-\e%CCoJSa.xAyoΊ^">iisEC hHMLkt^R oiF Ioa $,x`'Cî)GA s?{$2][*[fS*Iͩ)T2J |:ya^ (j(n g]'elғ*x<d:(jL ":u}2t ̥﯐?bsR1ܢӀӤ)9Zu 6Kf ˖m- }9|pD:ہ@ A>ZͻV}BIDh:mjg+kD{I/ u0|ڋ,HdO[g܆XK2mmp8R`9ءS.%Nwrp01VIJ@3ݑw:RN-@coo!>ebDq}K&D}ej cADM㍩PJO0ck+^7MM^.ռD[JN*(CJ'VRըTNv3ķ5=c/6 ,.c/ 6A3v?dWx7C qJap \z~eM<h#Q^ݰZKrx-}xm2*!tI'sc#4vϒڦ[;Um+TUg)v!é9I;sm!=@([mm>)A˷>{-Gw0d\m-ތ̋ymԤ&4H2v%hPs)*W(rz}OpxVFOCLy(}5U.b6 y $APk*S%&R%Z֥u.6@$3VKU ʒA iem6ܪ}k"i]er4g+b5tHFPvոlc(vR+ ZFҵg >!{Jv08*lV{x[; |B/kޯWt-KI<a;IRKY۔!~FjiBe~&?ChTDr}-ޡf"'arSnc!&6RIrRcc{*_B'R E.BT F}ֵTٶ)tL@Fa(}*FuY uOhG g2aT2$#NB3;lIZ*EP?F!ًn&?kS~`R5Iצlu<iSK Ԙ'pʑpӴ"Ώ4پ<Y Oݭ: 4;~{h-R~!HvOxH4>(KJH:ӗQ6ZqNM<t+eu*qM+myH1;YqIM1[$c@::h,XekeYcI<u@o17XH-4Z@Vzo8%ܶ@B[Ȳ@5KuDj& uiZLi%KQ.X8j^ˆII)Ωg}֒i!A[ JIFr-LVh!* "MƤҩ sBLw$OnV>JѦA)뾞vU C--9Ak4YS' <OГeEBd€OtoKUt u =4.;24nːI HeBR@k/wN`Y ͘e">[ovnQ $O;ӭ $(ײgCRc<5YVb::V—;+DZrD+IJtBb5鵾 .:.! !@$z=&5u LOW'ĐHO+BԼ9QRοMن5}ԒRxWZP=_x\ZwOvvgЍ-(g^Ӻۣ:+ {I$_o C!4TtIˑzgNs3;< nR[^K̰H[[xZvUD4 (%)N]t u7t)iC4'Q>bv̚5-$(|\l|HBJIPЍ /;X Eq_{ʂ I\(fԧM x ZyV\R$'Ym'8RTtߡ em,6 ԡ( $[ x0}mC`CI gy(%A$ z.VVkd- Z|[$[xCWx m, G‘FPwHJVIu ܾ)iBx :Z i4V))x[54JҜs&A+ L-{JqTị P.a ^%4d46E-9;̦yڻʰnxAz! ( nN_ ()ʣ+6WRb硓6 :h~̓}X-6TmȎ T@-5BHrkxt66OQdD"{)*k? ,|e_r J/lڥ,aiL2Np:uDLa Q! FUNs6;!6`A:`ݐTIHO_vod%YS1̉oQAIYe@G+pPv<Sr 1E^T]|)]TF~A&TJ1ftExz9zЏgZwCTS6;t2弑XV6sړweYSD Ċ<cP+$+kZ=7yI|셆FG73k/qSa;⪼/z!=KTPp 7AiT I" [T~%ݚjhK.S 'DL"XBRᵐ JծmnE^BKǏ8u[j鄌∩iʀi`ϧ캝l@-!5ЍdiX) ӄHrLs>VQ!j3 tm41ilnbFauw5-MItg HKAH!S}h%H!Y-5t]Ԟ^vr"9zy!n]W4x'RRSS* 4Jh5_aѶV klo%)'çnʟAxHv As;eo:N($NR@;͵:$"is)65+KuwOW^S$$g42*+Ҳ3oޖŸqYr/1mZh1swi&[lZJ8bv}2 $#_i  Roޜ.xوLx@;VeOJ@')vXQ׺^J5QJUaVi .li;OD@4:M+ؠ|e5xեKI0Tg4Sy9Ao.׺5iNe7Ћoc+e7y`Q~.yXDG_A4J3 Ig?ѩ4/,4AtrX5ni! fM케_JO3MH=BOפּjVt$x:ibۉ4eWKX;HNn0a )EU<PSI:;$꽣U@;y͔jkVpRIBI:ɄPRX$)!BNF`x\H}b H~+]~$"$n4u},9J a"9?[,ڋ*z"Jɝcکu+z̍IIהK=}qa!+>?m[4e*Sm"PHM6tiVe@'iKKIVNv%&F|f_mֵBb?fJK 19ƻG_M,%IIG y3Z+T6 RO(x6 *h$z,51qI}>[},L!E$'h>οh jž&orUO/<ӥ:s#b&iiK 2(@='MuX|tAAeBDx+X2uC!E2T[moN2Z SRt/)@9L(fIۂ)zNΐLm)uJu=Ak*΅37<>Fj!-Z\vmmKao%)$γz[>ð BOx`gRs&G=t1xskffS/ r#S Z͸]$y(V IJ@ח7Z\x!fRe yf.qISH ^( ^V{0q(*u4&P*ݞg'^ xǰbSou=adIYNΒ_HpJ(h'h˯WK8穣S%“LHAJRi?G+sG1Mթ I h*tY#x DfIE[O[}Jp $V),YIʐ7 q.wnNRkFXOzṭk#qn Ɂ.- !TBUTQQe RQH&D;|%&H)"sk-2`lN>ՒsHYMՔ(0u;i.m%0PI1b@(JTe)[B>J]HBLISkh 5l6eVΠϟOp e`ul1zTwPGq:Z{>RTmo7US[pɅ^-d;~-h(+6[VDăa!weiP930L^[HH$ JD&zٍ%XZ}Lur>#! h#g` J0P9uw4c _;55TzoN(4*|塽{)I Hēӝqg6uMӶ[j @W^j뒔ȃߔs|2]%}},+2H9LL}w-{wu-ۇ0"$G?QoFcLuQfu/:Vt)0k s)6=Qf" ^ U5i AqC4F.=Nw/ ̅ml#mwmb.OMC!giJhR*2 t&ڬXi<~vk*E+ʢ i U]ߚGZ]Y$L<cMC#*[(~#0ErQVФT@gbR!Fg5Oe&` ī*]qE@j<䏱]iPU@P&)5,w3xQ4l }~) j*aZ"H{^Z5+v"zVi)iM\vK<L󏖖a*<}M<?[j4tZD²@1[&ֹ][Z*RyP 'fpm}LS='E}Jv}*^䨨nXQ.+R)KX'Քt 6ʑ)ԝdH<6?2\:oԼԴ!J % T^}E9ZH'q: ^ a jHNsNv`^.<~ͨ  ػ;@@vRA3[ %>&\h}Jh:/oniP #~{Tu7[ܢGjǘ6 |GM ,OqD K \RPsNsB׈s̭Md6Rbe8hA4gJRuO+a*t{DܵxQB#~+H-*J`4W_~<D^{*J@$Ln80%Dz7ziT9By;h)p$kFH4T0V]UO{}9}Lt?qb{I[P YR`wKyȉ)ԍ`+먤Ҳۨ@ZBHW]tv쀠㑍yhr[!(T:=`=5-yw2Vmz|&!]!|F51YsK#-XRJij3=$!]M7zpH ,5KRrpʑSNsi۵% ͩL5#t$"X !DGM0,?ԏ3hUd\%iP3:ǦoR[FnJP@;yGc^K Eġ.:@@-d&T_l8N-*>Ӹ[S#2U<`w_hVІ!Fsy4e@:6)e#BLh4?;)Lֱr2 Iimʞv=hT;N[q@ NlG-6ʻh7QϮ 5q^Ub T ꒋʊ ((锁;? L;Ë.V>$-~`Ok+t~\r\P%eBOMw x/)4%Dnrf8WnwL0ٕ)jpO$s7r.FYI ruIica3u_UT "hi<!|-e/Ku22ΚΣb猪Cf@SKVQq5H#Q"؅)%)Rbtϙ ~QZޤ:E& (v##,ppERSW_Ǯ,=i[ C2AQ2tTuĪW*UOU mE!BD!"@T0Jjjh6N3g 6 f$'*Sv"j  Qj7xI\q,іEmܘ[ #*`g.eɽ+\C3:A^S'$_YBin7J(d) s0vLヲh  (TeI21jN]E.> jt믲KWMt3Z[Z\S c4l;7#]5*HBDH>Ǘm `&3-uo*i $:2)I-<RuW>#Btt뚌5UTg/k/0ewپn U#rNkVTYmjRVC1on"?JO]NɹC~+eМԯ)<ž7y}x?8y (Rީ}!@ HyʶJ y}͙clx9x;qΪJ[yI*;[YuM&Oq4o9tZ*BAi5R!)^ %WL= Y qH!p d)PɕAITߟaL`lkyu{$}+D^O& m’rj7|b++S*P HdzZvMweD:;|m?quKNRR:ykaνԸhQr&cŸ^JKRR` be9bC׽í%-%(mi֒H*ghfx|ZB"g ^@^3*gyHQ8D<;I! ;vqEX>锨gTUedRU ߵ{_̳N0BITПMjX$$kC"AMϜmQXhBj2Mr(>ZT^* IHlٚpT2m㖕 3 <ZS0mk :x&4vneυ $ 9G.J՗)Jyڽ h+ۥE %]I?[[k+bwݽ:$|[4ӋAJ#m:Y:hJD'~_bսhUShoH$baD"ؓ P⋑뾡AZcgC WS.,xi󶳸5̔G6#&qjl*nr۶fWGbJ*0帕CBا_ida1E`i&$; |m?ݐGTދ@nmJ!nT+Dӭ+Uy ԲI"w1/n-j[H-LXܧ a~މY/ZַV㲵g^}l(@ e,%V;] w1J!$F"}9θhBd@~= %lsW:RPJBr:_/!%Ax^ Qt0 Dv7jR Os6g$H@vmN4iQu ]KCC̍ ?[yS(j"4e骪^ (RTƠf5S>IV<K$#d!#$ގ񫲁i)9utaĬX!$$]TAPH$;u>ORJ)H &co(JGRӰv^?9.躝ZXJ7(3uS-ƕ zͳ [|yչ)ǁNX:AБڋRryquV:l9`ꯂgLZs /ݨ(aGY>TV7sf4L$ DzשS.R}6:6 Y B7nH.n7N[;qT]j*BbR[@ /l)7eB[9JI, ќ9dzCVt%Iϩ ii)i.rA<`fLԡ 'c|>hBRXP uPIÝe3OvT 9,N쪐RV|YA* #oRiʵ)M(s y|)md)h2J3{}s':Ql-e(VR3)&>͠f TH6zŦ:) $ r<絅XЧT2KHR)~#[%YdftiyV$@$~Rm R (]EԺZ.I2N^z2[i$/A|ͣsw) $,(2$>C[Rj4m' ZJUm#@H8ꏅ7UUR՛[XZ*^g(<vw%j^BuZ J4h0bz뢺Jei)SG=Iᬙ'lF\~EV}5(<$MF5xOʓ9I_+K)>n6)yFB "H/fr"z ;iJZdа%c1Ik:t|Ei7KեST! &'_Kw&4C;`59'[A>[SirTT&QtyDW:,4CMvsf*:moi}=T4*y%Y`vSkJ=x&4:2R7 R~ZMݵZP)t6g$A9F`/kR5JC/8Y'z2<3yxL9EOݗz]C'8C {:Ƒ lleqWL"պq-$3:ZibAMBB2 AMQ:@hXJ@.(ND@='_tfj즤q;JLT߃)m*v-9r׳v2ms%IIR-}MBOv&[[Ԃt׭pQJhh/RlTt@nz뗒UF'4. oAA dlvvit>)ЧY%P&d ,ґ*lT-Y.[H%bIP[1){Ր ].E-v3>YȲYSwKI!ʊJ3ʐ{Nk7ҊZB Xdh5s#ڣ..#²5Sm]}GP\zq 秭mrԅҵ b+PŠt5.SsF].ʫ-3e&C5ܝNkP螻jIP!C3cYX9=x_x4u}[?-\gPmhWvI Eh?@AQmʦ<;]5qj (x۞q&iK_vTw.+dGKJu|?&8In*! H'L4zZ6MeQhdfeo%AZLbtk1fiRTXA Bg_/6evFdy^T25X2**p %sM}uuZ])L[p)(.۪@o-)"TsLԭ@BU}rD+!M2[Aw8Sn$(l-77CI]3enGlw %Ts @B $fQ$k61,;Մ%Dk:ם-j͹ꀧ'ƻo\:̤k'զ ǔuaTDA}#kJ^t"RR)>KSQ :1G6Q̴7jBR"f9筿6Pj++xdsPN5'pL 1VmXФI_@~P#EJLthJe)V[ZqP-#eC3D&&\71L'20Uvgt,<U$+]*i|"v1QݺˡRkK#\_5);ҕ+N]7$#7fqRilkYA#[T.8cn:\toegʹ4|oEZxrY)hdl8%#ݼ8&k"dhn#ٽ$s2PAHZA'XZ`WTBI)%) '[v*\&>ĝ|%QLR/,g<1IsS@rMUս]MNݭ`ƞ@g)hjRm$&t`|ine_5Um4*tF甃oUv(RA*$AGy_V4mҷ@ԄA6Ij-epNs[d9yw =})J2tYւx/$a&sIs=,ik v](ZҨ3DiWb8ĺ<;F*O% gUOFÙSK;Q?5s'RL|[c'u\E;SZfLm'h(.)%:0<S4kR 'M anu !on ԶQ`@"d=<B_BAN&O(;[7U&8r f 1:i3CeIXX)ӟ=F6IUҀ| T猱kK; ISˤZ >E*@YT'AA;NIA*o6> oM*Saa AٻʚGY!*mAkH|mqbK( :clrRfP]>ad_pe](VG* :O/}4R$hO +T+]nv,iq҄ӡҔr=u>`r4% {DTmiBBHQvt:k! 24~kJT% ZP7Ui,stfwDSRcOz@e*Fְ@xeR6X` v2W ЧcVx+RS?a +.B Z)@lAzL}şк֐\H>Mu;kftHwyRr;vJgQYe=I'ŬuuE <COP*I OL≍ eӠdNYN]vtK}.h6Gvb,%uq*0,PGIcqMAPLDy5WB%PdǐA6uKL RbxrO; !je{wQ!uJVVtLD󝷎ZZ@Q$TЧ٧@{ NvmH4ldh<Ö5hͿRST܂9 6./R˩5(R+ ;Qy,]uZ#AȰqɘQt.HRĤk'xJXXE-+BHC3L{[=Y !+I[)yeīœ'?d zYi^jųVR(B?ϮwR]aU- \ڹPaEą!nj:CîK =uQB]h$|^Ѝy|pD,RA)6Ӊ>%* 1vu]L2kT &DBy:Ꞽ񣦦-RΒ{|_}z1d3HuV^x488BIT0 *RR-ʩhU1K<3jEKRQ*u̦bt,oyk ˦|QZ̺PW;)F` m:ZO]udq `e߮ڊOt]B_wGLa|:YF5/#u*uULIHlƃMWӟFd,R\;ql06X$N <:T&z +3lyMg* -'3aTMŃeڪvnEL},]t\-r/%kąu6[J]_x@SS0ifh≔<7X5[RY%cNCACRBr c"~lL_Եw,?'Tu9[F)j w03tW'6rBtm.ɄO8uF1-ȔV ?$|mڠeJvd9F>_;].P@`ϕeCY{K]>Xژ.#_9vVʐ,AIH<槛KNRA@\{Wpg(.TR"#I: Llt֢S ;X%wl/h<|w:F7HKkh:(’I *Tud!S0wZ>݉QTt`me^}9HgVw)uFC F0Uwbu4OKm&| G8Sl%*t֟|%'(0<[ Pip7̊|@y(ک.hAI1'[D0UT?kQ]ET:də_Mk{vC9:2p5v*MY2;bm8k]BڛtɅ)#Bcˮ jnn(=) W2<_KlDzR*s$ ؑ妶cXl-B>=иZm+.S2*"<voV &еFfFaÝ:ҩFan|rv@Stjq'1Lzk,.( ^D(. Q2|E!$YF>rF3) F"y˟HTs/vYx`khkcwWݲ8wKU#PZt?jflQ꼐e ς֞p:2Es(#$ ڥ>!iKr` <ϗ+Oc O|W_rp/[崶$̭4"b,\jFG'!="z' }y+띉rFsݎu82S זn3 })iiX:r@vRSF</KjKL([>]tT;6I*rJ[eAJA;sQpЦc|1h} jF÷:aDZ(CG*ԅ< $3yͤ2lDR6/\E\àaa+a% DJ 1`u#F-Q[Szf )]ۘx`.^tLfI628Lѵϓ*n6;gbgn7NIuy~.MOs]6XBP!0h> uw2[)@Ƅ5sbDsjRh[QSbA~#!k=6S81Ve{)(6 A9DR~mM4[K *BmxZṳfvuʔ%FH#1L]ufεݩLZKhRbRv6>wD  tHi𴰢p0fҞsexoFyG8?ZVIH$$BAۯ0BiZ Z2γ?ӊ[J9i+aCZ{lf'闕(3lE<;jAmIB t#m#0]tNh:gM(I1B2#a?)i(^&>.KSuR=R O^Z~H:;s)2Ӟ7MT ]J3,+p69Fik[iq戒KB !Pr3'Uej=9dBlNX%:諾*IZD5/]BD+ChJQ@5V@2&9smsJ@oi^m!$e$ YA ["qR!*Y>j>Vp}(VHS>cfTeeCC<hA|?K|ï5\DKrQ" %en@cyy=1 zv+ё:i:9p2oOoJ&yþa&\,ems/N{g:4&u/3|yVڪ7qFzuSedљ D! 6oqAuP{HYaTV3Ā:ck/N}-@ Tl4O;B )1xƱz3IJVVـ%J9r#s"]SFjD=d|1GDH|\ gi5 Vj:;(K׊Q,|q-3w׍c)^ ZFrL: L6˦r0Z;ZX .O&<siiPnBTv8:q<2]xEe4g9TAQFVvY[ fkVB:I@N\%е!QPPTNVXͶ{^<~g(r 9K#-\ uח-)(ZPIuʂFhlq#ZR<-5}[! 2>61Xo׭ H$@0'1>P[|8=;}ބ"Rt|$;O;.-^Gy=y0>n ҽz_URp%;:Nw {:֗8FZ.̱@PuN_poa:B*Ng:S&ٜy'pTDt|7ZB>@p%ı *æ?ǀ)[4Ӗ=*V[ H2实QgW'+!c*j`d8SKCo&y 9[KaU Gj)iPtuq'C^^h3;SuE-T=ݗ RPz4i8?1u/wCI!)1P平[QCWpydxR,ELjN;UmZˮؔ;fu*뻶M+m Fm $⥪HJ #AqV5zޚUE"V=GO)<<d⊕dVLO]3R3]煴$KF?WB{PHq\֭ԄG Н 婴y!J..UqsR^\h-$6<_ē+9vwmTe;^53aE0 *G#n\ka!0VL}-;EAOyQAҐ<Ã2U:-6K4mM/ǏE[COU 6 8w ø;&tLFhʲKMɁT Dm>5ݕx}q>Mu,6hQ#T@M%v %Djbb-c3DZ]a`dCk Lc1Yӭ sO 5Ry #"TO{ 7I\akqIJ`~F5AR@Nt]&-}UN+j';3!@Jgم|X;+FCξ@T(fNi P<:o6n҆X,KfFÐNQ>/Q][^`_<:VD4h:Omם j,3_lUҁu;7*Yq*) J:yk=Shu[W R_[ *̞GS=tmxUkqi.HSHʗ@rܟ悤)[Ȫu?l\Rd&gH+H$oXcl6ȼ#]GJRJI:A1en*۬~a8BU籲b9^<ŕSO|oyi+|wv u7iZSۯ[=H_tuEMS+EPB@*P^v߉?jAMLx9% % SSl>NĺIR4%[F//d5?˟|G8$aY #)Qcc&UMD#)2–t܍`0~hU3.ԩ@-+J6tr<Nk#a˱24L8JJ%G__a%wJ|Ko^:m:%jP4Ԧvn oI{};N) =}>C 5ׯNzZIIjIď.z54tUڗCiy c@u %8Mo-lе! `lq;']ʳx »ZwKmq Jd hV|=7vvBBJ_{=pNa%Ow"eJs~$}u%Oz bfʚng6q5,^q2#X뢺Jk*S3+GዖHJT͘om)MR qw9hdMBʚ C-hmyT |;i<c2pDNsZV69U^ˤ'0(Q|;8VDdNU$$6ԕJJH%)jg#K]=LDA+09UaLepf%*"@p#iVR2βcKC`v!#qIk676)ͥ!C_Dq{g.=;N.(z?f7 HNa"'٫2W)h $<Y@<aiP;,Y^ۘ2%2<ӭnKmJSϘ>bUu:}O4wJ`,TD)H I#nQ#gCF\k;$*&#o,Wd'8Nqopu%AhFG筗4l紃h'7U[[ié̤Z ^V J$F~Ve;( u]-+Dõ :I;U,xSdEU-M\SNmBI#}&#[Mp?<eNˉuĄ5q_h} @0dI3 Wq_:R"`)"v;/aOI$ʜNQy~P/aYwl?L %$i#ܪRh#mi<}!8B<&w=9g.̂Az0&a,'ssq2ת!+Jj q($g7l}ǺR@4#CS<o]{KE%kPBG-cMIʺЇZd%0"w>(M R?6p)EQd zX,DB'P2>g.Lch a 6@:[dd*$^x΁2=Y֗Au抪5T<K9JD7n꺤\4y+:+y+"NKwB2V%7cKIȩ)(&v 3o:s鵼wKǿ 6ï.BtvTUwafP27S٫T1AKNTA{ǭx8w]q28s@>=51;n| @Ӊ~u [!7H:ůvl<y@WCD˓/χۘB캻N bRJ.(J@WȐ@1m5i. (_aDehAHHl?W.3q*t3XD9'Fɼh*Yxi mR\}[%y$IȶuWౕj])![򋓊oFE;IRQ .3\j'].P(p?\ 8-Z+t9.=#\g8apȣa Zu:( Ôk\4*&?/SOK*{/1U⊵BaW;Izغ({=/E=#^Ηy yF*fCBgQ6 dfanMyOn3CjVbjw=zY̰%īB9=vWe;~z+-Al(*.  oeUJq02mxn('f"YihuN*GNV^wڔƙrTOKGݞw3xjJevqtTկ-hl_;[u^(GB|̓]FS&.&:g}jVP FRUNۅD yJFU7|Dž4[u3LL뎇eIy^彑L]P `hR z IL*\Amt,fΩD9M!^ۋ+p }qg騒jQt, )Hoƈ$"U[UsXugu TĪ3j>ˈ)޻xJ:Coē3]{IRqdSlCDjLrP$’Hy>?{ {Qhҏ)'Xf }RrBR$urMsFS!=jnޜUy >B4O ׽0ÐdAu״O4R&cA6;vi[ely,89Gxrƻ|X&R*U$Hi|LK ×{iئc2%P@;ۨw\}hNMiK8n~ǞMy[ࢡl=Oؾ/-Cu7P-Ӹ@b EK߼$Ci>tgN)\x2!;RL5M݊o 6 T]vOSzASiRȈlSk/ 7۩4WU2s*q|ԢuRSlV!_-|qd++Oy}6H7/yDƴJ檤)K0/f|?8oUr7Nq7m]JO eb㯄A'I jWZShu$eﱌ4S[L,oA[{ܼ`cF:ʃ8N59YՏ5JQL&!Gi:rڗÏc=v2/JKmKAB2KdV;˼FJG- rF, GqCS5kDjM lkUJ۪m*dI?ռT ]n:A"5 VF 2RT3 k# ~K}ʴR=yNKRdKL2;!& u6qѲEMB%rAdc'-IP*熐( gfծH+RPQ"yZv7ĪKB4)*3Yt eqmuJV'qj;_g'F*(U+J(*A1}9}NnK`FTG{ą;?M/9ޙ_q7u }`%MsJFbONţp}`5if<t!(LT;6dzq)G@9QZS*$k{uPFY3"U'A`hJcR4sǙiPX %BrEQX[i)ˬxSeJ@>!aI[7rJ:Ѕ H[<R  (3?X_RÈ$)hJƟZ3-GRL0c\IZ7:P!Hb~,]>$/9 ƀG+N{@#Rv,TJ4P))+1 <=i,(7XnJZPR.v̝枞%Bt5[ԏ4u Xmw-?fgHV,dSB^Nc iaWWSܕKJ@ !@J'HۥJ\X0<c2R" ;Ni Vrt 8ϕm҇;%?K{3а$d,OO><RNVr3$ijrdRDbFXvԇۅMyi)LpIQ0[%բNJڹRV<ACB'SQm`a!1,gV7} <evؚ̭I`BRg^ tw~n:EU}֗U>^.K' BG;=`kR0o:䩪%COsFRbe:N׈%(n ԝ~kjx⇉`u宇Y[RPqF)P M&'i6i6@O\ N 8EzCGSϔ؞QxFW-ôd<Z)j\UEJԅjFBL_i˪ۺJ|NNxw"Jכ_!ȓ"~[)VJ1OgW`MZkJO|ΐFZ|u 6*9R1^/QJ>@پuwP{ZNgo^ qApuQVTiLuĜJ(IeQWx@-Y|;/~ WỊidMEBr'@:y-Ê&ؠi)qp((顷,fp>4iZ\"54*};IO:XTc7>%G؅Lz~µ8qujśuߕQR\Vwl)k!\)I~ˆ#5"Lc4y-^7KIr ԬBA lKOC\TU]^Y䞉6,1jOwd:?}=y]%wzT:- ymîMj`5dS-+%?򙐻40 j19Β>ȝ=]x-MR<QmNJzduoi_ryy$:{ FXYh߮~sBᣵï '*O-')sP GV'm}F@)҃_Ȓ5Z /,xÔGteAA''I($bbGܔ)ixVr4yiz~zRnp?!i+EOCF;SXkB\ӷ .R2@V%?µ="u$4t)뱲_T"*S5@Fż:}xuߊZwHI#jmk&NE»)ߘ)n"`6~S;7Uh^sMB_PUK-Eż??AELZ݅bbv_ \0￯.}4{qג#o ]{5HB)KcT#e NCjX"=7:b!ii*6ȍ t<q22 -/?>jE|^\m "w~"dk(FhZ{_t6PB=N ebKNT<Q6"%yO~U :NI}x9]# \Hҹ-'Jjx |oK7ŏR)hxҤd-5ٛ͜R]&]]]&GĘ Q4T9NI:mxšzJ.Hl4'O;f1J.7w :~^cS2Hvs)O*3q<qi"F-xvOkRp%@nm;Hd@we[8UԹ3krt##qj{ØBEJH]R@Jm~G8?IYav>zGO80vbE$yfHWrT$Ask=8R*1̘B)Kysɀ/LFx9JVodv7ڄB@K=y$&96,W4w9'ɹpsѡ!fW٩Qu~veT=rTLzYDnH&JIQ< ٝP %[fmV}Uzѧ 37vNҠSH) Q量਑:~?w&$H/R; eYB B諧!N?֗?oWqWqQYa34‹W76i:JU23 g#.kC!k|eת* wIrAbf50tY:**ZRT#m5;+zW&Lڎktw]%B}i I$dv ӐNK"i)JRPni;N姥&|]u6R[<2u#g~m4<R%#Ayׯm|vdp1ڞ44ղL pڎ颧Hq,$zr]w9}^)}$ Iړ\EJL +ԉ)RBJ@<UK]!4sKniw{R2ߗ|,RdyzYg6R $ɑLnHe'}UC8 7PĔJ@|$0׬Y*Ԑ3D>;beR)וgL'-}e >gs6@JY!(IקoKmJ*!03B@ ytZ.890])  |npY%Ye $Lsۮ֦ʽB &|$Lh [e>ҳeVYTv -=thHiЖHSl%h֊d4BT9bTڒYjFl;_U&PH!˛lAh[. %*HJj`?z ʒNp %Z}bȐFE\̰TN&q75U p6>]:zic Ǫ_q6̾\Bա}B\u}}3e">ۥNً*X[RCb:e2@~߳+B$:!$τ(·&P ɘ΄sYss PBYu>6+rod@1)yY $Y5B)2S*b3aww-K*N{ۯ]n,rZma! 6:l){it@TsNghuM%%4n,j`ptʞfmŐS?~#Ny갧p뷪SLVqҶ33Y.RζKMi$kڋ3n1UCOX qTmC2RdO4q}o$ UR28y̕dɞkƆ-4Aŵf> I#d o--+F K^euRhW  ;Ś?N`:oi..o aF'-O9*_@.[ꛩC`BDH'o[(|_~' \ -sWWZ Xh+c\X=RXmaunKh$okb I&)9<Y- &O!%ˮe;W ]l-7{e/毫x]ԩ *$TN[ڊwQbӡ" G7 Aa롛ӊWɣM Ua{(buZ0w\zSns)ffi(tXgGaĈ]mIa?t(T,- 2)0e'gNb2:gT->E%0}񰃩[ -˰ytQJ1#"Puժ)d g`P:X*nvC*-4k1ڶ0ڪí-SX rڑkbw4".UDdzk~`k5㯇\eT6-׆ T5Fw{nuffjrƬTU~k4pq ܳ}^ι}^wxR'AMUDIF7*:m[>K&܎xx]I=PN4eP$tռ*C:ȟwHוiݹiM^r{$cCm"s{$6©CMJHڠ@=4yZ-$hz>Awѧ][:MU8@ >{ZbB_LR5j* Io`oKẲd&r3;ɪ"۫5Ai Πy@tkZeח *Z~]~<V c"Z)-4ĿzwiniL2}LgM/h8QZh$Nȴ|Pҩ4%Ii!r ԐuX kNJiשPB󶣮R:Y3Ө9P\O'VjG͙7Ӝ JMׂXi,)Z 9 Z|:6Պ})Rdr#\-ayȨZD<zzĔz0[I敏')Vc~b,yնMkKQxd-|}- xc Ҕ BA?Qأ,TaRN*A#]X aZb7ݽ*@N]7ju:D+`?O25KJ/H$2vuSnL C qj[ך鲶+Sנ}oUbkRRM?>ZDϔX7uTjHmk!|6~Zo}ievao/RP*pKtzG+K;vZH*:>^s)k-([ Pu+/Gc!=u㪹jK6aky[U-!AI$r*trrmu*ZIB[L//]AO|7p5RRyM>VĬkOג }[ 2I6pm_ꛦJO*[@I<j}-LHL?LNVjM+IiLs$)-ܼv*SOqa[*g߆,]K E[֠ωz lw.\wz*} \)}2ǾG~+*[seg%kjUJ*$VSm'@R`Q.2dH""$lخEswue)̬*7I%x]In:q]€>_[-t!,tuʕ6 ( zm"%3t^5KʉcM ymo7QNIY'_8i&JǬFNy; S=ih\6k2.|7k8KTu-(XJ2469}Nv+<)LW~s(@:aAHR&":[W~aRGŝCA$ lF Ih$)dmB @vRㅶ!CLQ9t閊|'eƖI-)QD@:/+8u]J ( 6FrfvP cQ?. DQM ҐuF9T• AͶh\RI ;d–P(Sjɶs,PUJ\̗ Wh`nv9H*aEzm~tCjTf>xB;аN}4>- fO9yKS4b$ZZ]DLﯤok^fiP`ufI 4C.b ]&Nؗ G-ϭ( @ oV)mj#Jt`ÒYa}FrBd$flWZ  W+])($)}WTPRRI :Nc KI nc:V#yN`0B2$F`k tˊ*QgYjΐP0JV$hu>źt¤JRv<B)ĥ)_u>_wjJ{ږP|'Gv]NeN<,F6+NU]&`%>r -)Fm dYRd蔤CʥK<@hFY?O?3j 6D}U`62o1[|^jꜥCIK)4WD)N55Çm87 (H7۫EUZ"*gnG*nhtK(dqX??]~xUk*J!.XV!$m6/Ƣ켨aJqn•RQ$hk(;[T]Bj)]l|ɀ &M]UBjY4XJ[? I3)Ic]e%KX6N]l4t3ݚ{z|ga-.͸-p\9s})qjvjkɉm r0F' {t\+.4S: p鐦SN Ts(zͷ4Zfrv?wVuuDUq< Jdԓ̛1)p7tePҡqŁt-ْ;\^FZD$wSf3it# kx \N`M%965W[Z؁Ւ<XR\MZ֯갡4sRx<ZU!ҦBt@@jvrTJQ> wGR Y"ŏU7wЩŠ2QNFD/ȃ#ץs N %FFؼ/5Z^^9hl-%r")}u eo5"u+,+G{g{quR:e6 r/wa #\A# a^6Ah i7e0iNn:,)uBMU;PP z4=I|ݜ2~uJj= Iz!<nkbmHKwyC`o"nNQ1ok+7vme ;;hz-SRSmݺ/꿏cU$'Z{J(R;YI 鮇^zآPu, D ))" μ,#3"X3nf<h:%U4Nf?W{Y9i/֑Wwf#bw];ZBuq$($f<-׽MwmK*li!0#XlKwRZO¥3-aӭ4>*7; 2:-_<ai 쥩r(aZu!YI$%Dxa[?7-\*ՐLu؈ʹ*'Dn.aIT^[/П^KDv?FT7>q)ai|6K)W^MgS3ݖ H#I  750ͥu eJ@'°uJP-ԗ/UoTPT ³eAuoT ap˭=UܼoX83+hGȎGpwIMZ>:`n(`ľYM-HLgBu >v-/h˕oJ( 9t$҃mϯ@a~wm/e\Z2RaMFRyA0sn +;>] mQ7NHY]'t}`>q8wf9A32duLG>>rJgln.>;x bwsi#a, :m5/ێ(}h>]P+jvUBA}Rgjdw4:Jl 1=(JF):FfM]Ve-)%9=:u2H$f9BDƿ5h/56$IoiVi@LZjx@S:LZ֎j<=9Q $ @lS"_T%u4𷋮:1 nl ?^]uEmkRӰ}H̛|}2ǫ8;) I?nyJHn+O ŗN<ՆQp:Uګ>'dCkZGk4i_NrZ4 }mv@ur^WѸJ( $ D؁3)0 !Ǻ֎<<|xn.e;tXsj<ԧn(>Q\ͮrJ%mi5\,ÿalOKTo+q7{n$Vew$U{ OUޏ)몝yk\qDJ'ʹ 5rT23fğ|>$3ݖS SizQ!ž[zն V 3IG9k,sS%fI En4, UiU_xڀi @mby[\KES;<FMP̰Ju$Șӝ2+M-G;@; R^s WT&8 Bv FXL96Gd~pۇtT5Je:?,~HL(f+"KbJ+K᫼4_[IέseNZ 0IOmpD=-̥}M'xpJZBAAFOfTՐD$trd 0ßdT$IdArip$)H ):JY[qhl :m8YH孭ҥ}RP B?o*@$1zz@D$Hz)i^e$ēzrD4x<V$e()Aw>Q-8 36bt*4I $ u*DDyR[) 2DA!gY[g)+Jʂ$ yZaY@HnT|Q&D>Zm,<R tNv>K+K`J&9Zy?<֋wlg ! h~'Bh3`B #fmfJsr39겕VG fw]A ¢F][H^QPgR$o>njV [|c~ బPRf'&eUWxB:u<1M+{O tRe gXƨNx2R Fh2<ԧm! jEK%HTv Mg )% g]gf7DAH)rTB!%@qnQYyB-"Gq&w+sP '1H;C[KP[+B)VgoNQJIPY_?Ը\K]pTf,CAþ8 WԊ|rgg+vCRka(qԬeC>i#i3q]Z\}* $̃"-¼LP]5}EJ%t(LiFC Õ0|L̵0?o}uf۱ڐ\Sr@Y,#_1hظTTm4E<¿.k: $g'Ϧ^Yp*(!SKE0-jV,_qO^N;NV=̈PJv m9'ˏ_=ܗ8u/,NOp\iД*TT1 +nbzל mFFi1XQ4 /^(P̂DzZ5xe֚q25 e*;1Lֳ@ѿ\lgHwN y[yZIE;DFuGN]-cS*MuAJu$i;2÷s~dT b7"Dr~ vXhJiDןԜelZS.⸪"$2$=v{%uP)++'YvS"ImBޓsޯ"}ܨsuMր F}utNi RˏgI$6etrbp4U@Y2d2i /[email protected][m`1)KT4-0{9ao!eA-$m ZsU-jJlǘrŋԭT ;J4u*W>1x!c;\5i r"'d0 T^lS<SHZLlQ8<h# `,*QPµԢR"FmghÝVothv< ʆ9Ǽw: 9t\xt/U)JAØN8t8v^(pRp'@@,15c17sOv9( hDf+X++֞ +Ij(hLs$sR >iۢgsy[a@H+4"u{k;B7E=+,'"7VVat\jUOvq5UİȆ6_xfxxBZ\L:rV? 6mDX(i)nmy$bGqkF*Q#+ jL+Ь}2J)d :d_J 9.*E*Uucu`jVuH%*RŔƕ^!Hh!W!uEt(Dj U&k<{>[T&AwJ@Tm3 ۘׯ`>H$ 4ko!ighƥjq) qN+2\J *>)Φu<| 8Jp MMTj &]tHehn!Rp \^ܦeJSIe)tQ_5\r2Rn*0LJwh<5 Sπ'se_=xe[{G(3*ک xX|?ZjJmҐMw#(m3ioh.pz[JaRMd蜻]˻MCn;NĄ˚ICE!;t^N潞gS4܏?Fw*b \j}%H*X WTv)@ ofKIjomS;FWkIK!Rnw-)TF6nr24}}:?oZ a=m鐥 Dt+ŔBQxm>SH9SfLrh 62i}]-jMSBN w'9Zks8k&:Lӕ^zUlgCu2R,ԫ.Nϔ7QH c*PbC6uPAT 6?{c__87 }u5*qaYN:A=+G|8Ann4~/&.<߯$ {ډ)a<e?^.խw#N\bj (P^|5|&4v iŌq Jw[@p#U%Сo;AҜһ_ð}IT x31EEq.=P t(ܒO.Gofnĉi5 J+* i I-#ÄQ󥥡e;[xU;iGۗߊOW{P17U=T|?0% (g)ELJ\UV[ď+vQtgɚ;J/KHXqR6$.F*SQ.J%|lc-y[qXI Z$a/bPeJAX(YOImЩ)am<|7<d  #UHCm!|ʚI3 cםrm2-j) ˘;QyTT*IKRiYzm0S3H;v2DeQ e X$4 osXW!, $3#c륺򛢢ni!-*29@jo j->C+IP>}mxW0v'Gsj{NizpNo ]wQH(r#[H:K2RY,<#s'BmmsuQْ%Y:-O{5-QF B'Pgc3 hSw$ghTH Jy):r;+ M"ck"$/7{g Mu~-RP<,8Ra:}򷼅Id"5}JAZU'i彠[/Xus-V@Rr䍈[1i$ '(#~~VU;)T rL&LNDsur)]J \1:+34tI^Kp$39ZI( TP ~|GVK<)'|-mp:CɂDk%{+ij -Mu"NɃ'MO&h 1%CkR]He>Z曒V^^iBhVR`IOh<)AF]NFqKi iEhҡrՄVd ,VocwTTrlI^plYp҆S+eyf wZs){A4l[p8ӭTR=!@ͺvM$,f6D P!y3l&AŤ@$_MiHotHT[9)] R 0>?/ ն*]mT/'Uʔ`zk¥/93OY@`sNAzZB[Y) Ʀtsj_-;<eubtPZtC%iruu/ -|/)n.fl/_G)SAJ $'?[vQ|vLĆוw|L8a[ G;]O.}#O%p7p_u QLR^gܡB?&ٛp…ӎB|RSNn*I6X9}O'ۺKWs\Ȭ59YxnY^aiUHV*BGl5 u4 t{raFOӟ2oĽRexb?\52U#yYô 4 "$Ahk/-4I}ԳuR +GUF6oM}Zg!Kk/?[S4L7&mÞ$)]j-"DP7*6l;w5rPX2dh)vOHEEZWj TdjvD{>.z-*%I*BCEPd.zY4H;8<UC卦#]5WlRxP IIibBЏgWv+ڹko:z246 ݡ~XT=n* ;fbZ. QXfA &dkkIQr7%0yER^uܫH V'3 N4ˠVYj<{IodU{- >.Jʰ 3mT*^5׽ŧM^6K s m{rOtRŨ %GT&c^SiHufVxٗ.9e/<DċaFuBs9QJ:5l5+OT(.).Y1xIh.ONmPy>"Dk;\xػ䚚n^P!$ˤ;]M:Ys!yU?roTMR* k۳pG_r5&~.*McnLBQp;gݯ߸ʆ[S]٨{1P Q5Ԟ8_B7k;F&3m"a#*78 qzZF33%%K.:ksP1'Ar`^#u$ ꕢfigmIU|8ک"#[U.A IJR$IzXg/^~z:P+p',˕QZ,xn%קx`laUxJ2e2 :m=\C^a_z]ۚg3M6\}Ԁk qcۋ j Þ=%AVT%*mspNazZMnSI\Q C4Tt_k#KS}s:0xr7v]q;^OE툀̦ʽv!zGXLbw<<*SUz-qW\u cKȰ̪d'j#\uqCYǛj^U*ZΜ\]y͒sVWeF< y#zRۧAF$?%\T|BԀHֵK3,楷rHF|Zyeh9u W @\mU^5N8% OӗIq( 2v-b<BSmjpI)HY2ZOxFO}m oT%44Ӕ A7XۅG+KT -u:#o_VSIXA*5X2WtTՀ@)2\PTmt>h4 F/u A̧#DŽ%Brj QW޼-?u V+AS!Fa5lme5c\^M-윔 <JHspXcl6߁;l ϟU¢Iq^em G=cA&&?^S5iÔTK[rLA gEA[M@ S(SEu*m#+h~ _WKQHK4@|:_%v-H{[Aq T{=@ҔKh/,suB-k7}#CΨ! >&h"d xmiʛ.'"NT'rI'|'\}~ a1iԱEqR:ARP:xfM-pzbb#{#[#a<G?r5vtWWIn^#B P}螖ŗ%UK>(HzO(u7e]k4AP2 N~r4-ҭm8ڈm_)<#'J+z]dz!eD)c:o빴mq%OgBRO/WXr X)^e2N^F$<shIoijY eԪgB ؖ!tzK+~N"uo?e.\HCt{ER6\0o ]ym™Q if=JiR%Q<UU@|vRk{Y;)-4#HGvfp'H1Gj?14YK$Ò$[],S&rhR#[G;JgQ3rH"I%Kvd3Jc(N{[ڟN$`VH8t*[57H%]RuS},i%`D)Gbti')HZ&77l0O1q^*J)no m]IgaۤB9=,dP$[&76 K7A` >[B RF|**אRa+u4LJuHuD6&aBb?^g!Z^SMV4:٭s̸ɧb m? ~4ekQiVd2vZJ V`"ztӞu!k s&|:`)*jcM6Ԅd-qꈤCM'ذ)OJu?{X?vCc:u)+ @$#K]GW{.M@LF-E2-Cݓc[E^:hNْFtzrRFTIZu'=T s l V Km %zaQ;a [ V@:%ců2Rx$ewߟlWVN!@H=|ե$0rFSTxAS|`v>6JXHz2-$ LLJE Ʀ4#Hr[1 ($XsA&8'n{NȄey +Uz= %m 29uMZZe Aj$ E:![JBs8;|1NHPA$'`@X΄ N`뛑- .JI͗s$̏8?$>*OauD? گ0q+TZ PgFhgS_•Ҋ ћơIڶ1-W\E="UPARTۨH)Y˂8Ot^m)W'B)Зx 5.vi_<a7u|bwRtt(M=ZW! <r=I6y%VVSҺaA#?93ozn܊FW,@XgZXTnjU:RmAIlNh-c"{|Vpo);v]7 y)H@dאRزx:ҖZYf'nvz1KMwξA [.XCTqڅ(d2!b*=m$ɯ18r+ _:Iii\&:jg6;Ecۺ+-PCA : 7Wk^" 5jV a R; >)VۉpAӨz_|3TwSSS4 *g6mORd}@d1g66h2ϟ]N+vM6ZR vL(slok)S2X9%SH]_tap8DM~asz0+:q%$)*ƇIؓm2{W YM%@;JSjML^hvPZ^}iʕ\2fqƊ 74*pe ;NGxcm{ eyCհĤmOKkŊĎ;K("ѹDU𪒘4k}ʪ!IYKn(LԒO1az+޾1ԣGLcR6#KiNԘcCx+WW2~)!j ,@!GC/ f15H&:I)&>z,{_;_m苫8_a54n8ҷ N-p}u{Ж[aUjof|sڷ ".j7 S4bD$[n٘ s"U>KK(8>5g]*]N1g" @2?FvyNxXV."֤ftŶ?g< xૹRVUֽ 'E mR#Np,1Kp'E݄WUtT%/mjqGq#T.] @rٲ?OGݐ^#C]"-R:6m֯+ک-(%))D1[հðuN:iQyXΩEJ*LlmYUWU^|>L'ath}X ]=MQH ILh^^vSy.T%9 $-翤·"oZ.J `|cOh[奄/E^*za;tm숥3s{+}u՜Ψjfmf 1KwbCTB"R$ئJߤZVuwlkDN^>:in䴵8ZJHlwbl<ȏi[:/B5HY^q() )Q鵩ygj\{Dž4W-]sw-rt$u[x|8?/zkYL쎝 TD?ի[LPդLF/ ғu[RɄ(k:뿔km/sy믢1GWV?{3q8ERi«nwHV4z:κE w#{:^G(|&09kʦZJISj'D-ׁ׼+iu +'2Tܠzx\jR7+-lmq3+c1W⪫ƚ }諒D6ujF7L+Xodxk\@`o-:ʃ]mPTR^{m8m@ ][[ 2'Y I{ x!8 g]4|zi(Úe B<"hinh-'*|*s]"N\77Uĵi4f%=ԂAw""zr<QT%MroO5*Htyʗ mƻ|9d#[aĶ%2`k?75$=1v)j%JUCGv <č6<ŷXY9iSLgn<$v^'f[BJuZO! hE[M@5Yl۔];ؒoJ2$񮶴{0VH"fGsQ5CHR);t4FU%<չ\6_=Z !JLew .d=-/P>ޜ|o!.2@MQm^wn-ĝcoq-12Q<O@Nd@[B5ѯ=cAS K^Q1EYkIB K1ZְzЛT΀e?̪Kp #i $$ LOҌ^@ޱcAc3*>%؈kQVTtO9e}6V`5̟Pa*yU%Q∀}M+}aɅm,TdyZ󚞥}E'ImӯjeZ.%SyjO~Z1Wfp3UyVāQ=,iN)\ zX/J%-9 1)|-kRJZcdy>DU<XB#.c"CYN]wiqJ^YzƧၦ#pC]oM.vdXFf'}=lAU6T} izkh+ӊ!ye & {]3R,Az۫֘G 8iˡ {M3mwm 4GЉݔdJc DmN~xt A%$$(FcY3lK3*d8@&`&5Kpշw^U#julq $$h$ *K#Q Veք<m ȏ|<ݝDvϯDEvq&%(nf..dm":4$3j.]֤%@)9mg4.SG,fNW}Pv gLo^AϺA :GtwêEӉ"JIR QIHo;G?r0E'PFGğIO{%@_UrwY"P5<{Q5 aҭ%n{ݕ52f O]sA0\<Ϙ&}n6ʶnb=쫚];j˙KD%D͉Ha]@WyZP! "0fn瘭}U8"OE%18W$|.uן+h%<=umM-$=ڞKm~+)-AHBsH%g['qSӻ0oV,!ʄ4G9]+N8.۶ʛ -N@O i1OzJT]d-a%; Βr.4p^Sab[Ea!LQ*ž⼩hnS RH#hӘ?jwQiM!6 CwԽTYx!ʖ3-, 7R`yOM8D8Ԡv)$ b h` tϮe*t[a&TGāe(R<(xu#Xz*],=VˣE1 c mp}'5τB[DztS5kNZP%:Ƥk.! XvrT܂'BڨmK$hBHu H_ˮ.1_U4,w 1]bwmxq6mES'K*JYSh'qi@VPyGnyCt4c|pxA}m?-JSJ}uN9lc.D8Fr⦢B[i3=`G;8ê1qRjn|kvRU#VЂJgYmٳs jYm5yҙ!H+AIcěA1w٣{q–E%UC&?;Np*X^BRB 5jIH3ړ5XK$7F[H*ԓY#[L#wpu5&iS޸fPBgafx$"r3knTKn#ɮ+8Xs8WiM;X"MֳeuSW_ݥJBBSc0[p{a$–R*d8I]]+iM}XQA~Ҹaбfzd%q[23j`'w(A#YUzWÕ-4LgL%CvչXEFۤdL5kpuFn'-uS^wxJ-u#a\ wC.Rfז;Am0؅ T͈ XJVR@<x\G]SzHP6\- WQ9D˅=Q$(k2MK0Y!CAҴ6TsL1k]d'RC <@뭥iۄ0hk $fgZfNH: HX/Y~ nՅHUS!HuH@J`Οw׏}CiNg;Jbtmڼ?7F%!ZQ{i3}b~i]/Kݿ8%TV_ U3J H (Xe\_xcTwzRט\ͬw%Z~v+뾐 gp~w~S_7KU&i *YNvgAch97t\i=8p4.)}()!GQ@מZuO @%$h ˵]K+7QID )ʉ2 彅Kaԕ$_29tQIN[RY3)-C[9e[8:iPzlAR=@v QER*ү p[-/9EPFᮇӧS&K€wv%^ |[2$?*i-wdd! 4sq.#zk7 k8/ں}Z$]$P~Т [S{|&fӪi}[\mԾ1HfAEMrR P"w֖nҖ@R$+rH_'{9<4Q gh)H)3Y[Jo㔳t)jGGDT},@J.,2>sl$F1ɴmUڕܝ>-itOЩg.NiqE֔̑yZATucy.[H9J`ngK}/HE;@Z[ :i񵿦nabڂmd`lfC_Y(h~_/ӉI72l7#{QʮB_h7EtdUǁI nymfu +6ot)Z}n'.o ~J(ZY V6SIJHTft3G͝enS* +<IC!t[}JAPptMfyt[P M?p㊉$Lΰvtp+A)2|ͫXl $*ԙXbal6̐gqחgtB]UNP Fu+%)+pjY T4:ocƊ @H)~:HiMWFQfk?~V}l>FRÉ Dνq˒ |FNJX w-9Vl'Vk.q(!%AD(II'H۝/F'}BʏvNCOyM#Ʌ6J_C;n.^žAVB4MLl5m'yUh&#n?HtJJ2HI򷒧Q :D|l#k FuG "BB&4`eNDӟ˥XylT , :5k0eaHU'}sX\`YF݂'N{pw8up:NDS ׫7MߣQ%DBd6w <K/%`R{9dDw<}QP8c>Jv`X54Sf='m$(i vkrqڑ|J\)S^!HfID4祜.-S_+>&)r$w c m_v'9qwScٚXCtt;Qm^a5X7m3˞;>ހkiWgrŀ7[[l=8;p^f]UUt먻ݫHOz%i>o:va|7ɿrzBBfZ[71o"<*)^UJKM&6d.tq{’}*:TG| *PDQ:ASF5 n<2:bgly5:R=¾_W<-X 0y:ȂtR46{p{|KRĮS.ƃhAL彚\=8ċ@(^mrBיꁗu,uQ q80\׏F%)Udd $̩V#$ht.?i8\|=u^WaUV%(I 9 /!kZ-uU\SBu"F aU[u5ɩClw)%'Q$ykiw7CazPR4":LD@SH-njNT4<)9B% iRP\<St еϟ*Q<۴sQ^*[c'veO.wq;7j*0[&A"tgihvW+H/Ti*[email protected]\`zאqqq'7pR A]\@LKlUO X÷M*vjcQU>4 §B=K| {oͰơmCi@:rk++i)3u!cR;/;p <=WK o] l(ӥ1Ӡ([wvhp0/taNָe{6'YoYtblM!Wu3Bu 9fИм<x^hZ-ůrf~[}-<Nss~G߫6n='[o!ǙAX— NBf9bn S?p)r4hN#3y u :ijA?\~e0zUWTrH=$GY 3=(}MlFd7SgQX(`JfqVd1lg{o7⇪.ܘ} Wr[sSP#B 4׎ԼCq3(~]*Kt*Oי߯\Ņ/cU=J{Sjf9xHP}xۭ USD|>홂[email protected]&O[[<<Gg2%%bH9voKxnuv䶠u0_cL6ioU*$2Q%D<:N؈⦞<|FfF }Wn(8)R='oK.{C%IH%0Obnن<JuPKO*6" }8jtW=6YS ~S0ꈎCl1zwIxAV $FaE0暵($'ŹX@_@ |~1Zf+)*TyLTL1fpHjckej+V&^VʼrA8bnLR ZlGi`GقET8,s+eqx[ [kNPn8 ]+1[S"}*]q$#s3b]#q * Ķ#OIP)Q<bwUV?Tޏ0^`iGKVf9&uv=@'Vs=`[&$Z8y7UN ՗$K`UPӬ5Np6u7&G(X**+tI l~'n_|->7H;zaa~:^#ХGZVtB,2m!Iu:QISF1[xLP8u+eJIZǾf٦HT.V ڂ#oKu9}x"츽CH!^ȏBީZD Ӭyih U=Bp0bNش3l)RuA*ZWrHǣs*ob߲LB3̘ܐtc7i1y!JL V=b* @ZO3mKtkXy8'Wxe-NuAc/[yz%Bo\T5)ŭKiG:XBUSS2tβΧ6I!Gs3:l#\*.)C1M-ۉ T@fiJԙ[+8+@~%:ۈ *B˸YBߤI1a+BUЩ[((f25xd;d*ZZAR_1h˝‚ ]:~pr""#yE3Ir ؘpU Y3)"'ϭհi] B'}*f{t;2 J:H6 jhTs@Ы/Q)xLU6yݏS0AIPQ 'Iqb~v[K,aiqH %(*J՘'AouTKJb $'Q,ƾiԵ儓2@ߜ}-0$M|lNg2DsZ{ʂH\(eP"6&t)źTk5Kfx(wju7<1Ri*mEJ HF&JIHXe˭8PPS Nda)De[ȓ׺Ӕ4.eLDlrU6$dRGuNhl,d fӖ[6vNJ$cilE"Vۚ7!+궑 _D&'kb RiKD{ܾn.AB$XQtAղB"y >DsP@"oVS5tAJRa!Cdh rB|6tn.ARΞMQuCn\u$eL>&I7 /VyH9ɾ4sw\@o &{FnBsM_=zッ6sL . 6?m ؾ컉iol-Yg)ET۰B7CbOY7ML^j ccI8ca" $<vUj%I94eZJ ޗSlS0w_ K>8W ^׍}֎7(. keݼz|gIfʶ l,3H T؅ek!{tq~<UY7nu>;7#_lg'+ N @)HRH؏kI,oOziKoRR>Ȍy]xݏ ?-kyDNgRG A$p& $L|aw R*7-$ ;[;,HirO?byd.Oq8oALP-%^i`)AAJ  j66-V$x/ /uIķ Yu ݍM:rL7:TW^ Uwxnjz 廸U R(,$P JcB;E5h [^U\x [@;I̧X#Zf^;s<x^B.ʺaНgV {JR֫˻)JmL:[cb[glղ6h蝂PNJM~jiAPұh$n'j^CnPy\Z0K4x)ƐEK_&Iʣ -N^4|;Q 著[JIsT)*z:i}xw>d;wU0Z2skAlYrÇיlҠIL%=}RJb:4kشGmڣky ŗn*xh`L(4$ -Pc>b by ԧx"! H:-vtMfIJ3"f<1o]”5emwSi1筩{PreטSqp7o$]8blE{T{ $hkX3;۪ݘ/\wUqSǻ)Z+zJ<y d~xl>Âì"`%` b,~w5VkVWb^*̬d5RD1{m<f`zp!YLP^5{=N`% #C OWuaRS6)M'.=uu"-Is8=F=m cFU)+"Act7.L-\zೕr'ß[ؽc2躩s{0MufK 5/ #1$۔\P3KkqF,߯<V>I:>@L".׉<Z~YT)$Bt"NʸׄCEQ-ԸB* TBju=yYOH{SwrӣPa2`^Wn^)a{ҡuU݇$Q:H L-K8hjM# βA?.K8iۮW3 ~m H4:Ye8nvbjq&BjG>ybڊ6SM1k9ߒhGZےvBvqڃ7m?1*^”A`IԘ>Qg}[Ap%JodjJV$@laqNtݢ>VdyTv'gp \7\WV$fIΜ|F8-Em@8i Wbč?p}U.˥SR֗TB'_)Mf;krI)Z7(QxS ~uZwUjP+.u y˨|6viԥ&deNYq1At[x8-nڶ͇[Zۈi.)J~VVx07 ZDSmd-[YlXi:'c>߿xs7ۭzpt>f4;w}R]WU^Ja:,'I2u=e8Z KSUs^o6Ko%DsNdv6>EW[OQ{]%nR:r5'6a4>׹ߐ j) 1?3h8 bOaǴѺNWQ |['wb7ZOx o`I^}m?L)+)UDoDir=h+tK$+*|DNy YM,Uq66FRG)x]yƗ^+% D6<6g8ΠU^eNfBuG0TT-g^RN2mWu9ީ }Hi=lv@'|O#sWawSc䭡 KnIJJܝ ;cu:H觽KnH)Fm$iͷG=EIKūʒTv:ĦU=ty{>űf7vў*pd'zvAn: *57I!!J+~M:֥'*H !y%!o;TL9ˬNA[~E-eD)R}lـi{qޝmUJӝuE1x ~eD݉of8;73Rʑ#XuÔ^8>u֠oJfe+ȳѝws,ҥRJ4%m}% ?Ŕ}PRyiJZZ {|bU#B B[RKzJ{*.HO}zٯ|V`oͩAa9RNHNqn*gRoξ$%N?;5YJ)HMK Fޛz@&VY[++%@},Ebބ(kSPAQV32PRJKhRʁ%CA3h{iŗ_K!_zΰsOKAeJL4+ ZT?_[E^U t(^" dkbn,FT]~͉p 1F$w->$򏍌PusY}thq*mT3S=m% ҆H &Vsx(qgNUG>An|\WOsYuR.6d[]ZDPH~>/IErbMT'"ʥE >QiW*tC8^!I26ס@ *WBr m4Vn .5t1iX^ZR(^ ϻ7v t, Uܾ/İ&$4&-AeUAq*FM*W.Q)&Uo6ZeJdvسM@> "  Txռ'BwZMl]VHʜ`CU#$6B%3M:O}mUӼ1wU#T*@DJISIFu`l4k/r즺1E ;$/+* 8Ns kh}\U& }<UO_ oݠkT=OT^nwIxQVw [V pm% FY-d@WHctڸhib^Q!] ulWE _{Tq .&Puz< oMEUFZkpa\F@8_73~Z{S~3XÁXQ v6ҥVބ  ).3L۞-6#{պM+o'˯ՠ璒i`1c  tCAr"F׬'y6"y$+|y駥XE2;|<+=8i/st.Ԟv<z Cĸ&bM/Ӆ<JQ* k1`S'L'DiPoEPA+*9`&rFzO߸#a_8˵ 5|Q%nԥkG=t"``oj;AF3Ri TA+$k r i&|{m«ۣ `꛽+?Eԅ4KZtfY:WHE\RnK&@> A$+wOZlKzp˂޺m;Q!)(΄i:$S^&hE7U8r.gIv"VP(u9NkbzuK8t7UsX7j(bm5 )\JDQ Y*C*^Ъ)Mz9ҡԴOx#0NXHkMtۘӌ@=qʴPSqeAFW݅m' uc*CoPw$PPPmE R f6)%UeVKHo'Uo%޵Ea}%̘ӜS6J~ Ff0\}ոS~wT69 ܭ2ɍ4WjlҴW/7.z'iZ*y-0<0 DMF tk$S04Rt t$k:^/ALi [m!=8 GUwX'e-菆G0L=,8]F~RAR؛CZk N)IU7cg͆ٹiC@ yʔ'&gSmMӗY}մP`J | & фh*/R=J ?cN=Çx3Yltr򾨉C(I&WQP4'(l^:s0u9 x}~ݧpgf j.cB@9[O5NvŔϊN=תJ)=*Ѧ%#`:ڵN-ݡ1GxZ**BA=#{H?fNmfJ @6 58k'zgnyxK a]unӤ @0A UX i>-ml]LT#+ <Z4m:r Iqj)&:6$tG3 .wa{ijQQ:K,)?KC].o?f􄸧iBJR);/x[yih 8ˏ2Pcaκ}~v1$mds0O2.nq=0]xPΕHPIoj<7J*KmhE[ $TI#7c#mR J@H馣kBcJ 渞m{CF6%0u>ntUh$h6>w-y&)* {Oa[MN)ʣ@ZP\bu($)%m%%6Y.2=>Kt3yM҄$_+bZt߸4LRNI;sy(;LP~fq`Z-Į vq7#qFsCoh [}^ fwM7.eL<]xql ^&T&NO|}vRI!Z2{UX -4 9w}ls1Zc.9y [vaX~$:TIǬwnj7iZʅX%j0|mJv[’;-{ }uխ2X e ><*ᥠirИƺo}elq4抪ӛE 5Hi}BBT& < >ڣӴ2&C'mI{**p'*dO}kC׫8PQ[,/|BOYE|]9\*6*'@r(Du wK6WJq)QD(PYzۣ]_fmPinҵ-uIU`%_)T P-Jy>#pӶ/UDc$|h}zrZpȑ i˷cC\Zu@R'P bH7[Cj{W(…3yaj te;QBMp'(H6y#4[ó o<*:»J K]<8bW(%Zq#OFbtDI֕Ҩni;!S3>SngN_0 5}Z C=F7l ĒrYg)k.ʂ& ٢. {JlһBYUAP w P l9T<9V"'-!HprtD$z) pTN}uz*KPJN_USh#2TLřTխ$nŪG=nvWwy|%9Td4]mFG;œf?kJPLy ESPi>[X"hQ$fߨ+tNmtsb '{װZgGNY7$(<߄ˑHHYF#T O?l5U]u4$xc2r뷟7N.BSL>ҟbl8dulEFrS㚯*7i.-8)%zU;ۡ9TV\tyمe蒠BS:;JiPB>ZAn{lnTn1[=%2HB&gq-fv.p}Imfc6qE銖उ*]}&֟bҬQPsF]qeޮ9fҚFдB%JТP Κi?v8Ta$+ @<ΖMpy; R%3*cm{Af{dyxVt+aS8rJ P2tL F pp{CuSRRhka(oK4{J.)@!.ƧM,cTNuFiW!6X[Ēہ0]&,&@78Kzt5bzۻS֦)y!GTӐcjKJC*ANuDf7O)䢻ZM%rbdnyȣ 4p?1Ae/Nܷ qU&.څB/j24TyZX7F"a4 YKLGXJͤĨ,onZ& ܅ j6|a W#~p^ƘwRo{ƹc"Ъ5DmنHos q:y~zFk.7Kw* 6O#OAm Ê|K{}b\?p&w<Ej ##V} L >M^3xh1zZwn糍-낞z=r/Р~^.0`r˄wM;2>(~3ߘC=.0Ue3V6P&W C@.8_{yC@#MtN$ҽp|G]oLdIm$BFu@m]S<Rx 0AV[Q+Ovyh/I~w-t@u+JC燱'*jqykUL<1)딘(V9/i)qJ^g0T@#=F}lEAQ\uw>G.k-)!]9VPФ č1%d6XǷNV*_^#ŭ[u, X$RfAwLeQOƌm\Iy_uIR .سbf* uITBs>#ktX a7@մU݂#&c)@NgqFJX}lKuaACM4hNiuq5sc+š/ie $C-2Ԥ<Dy'r'xUkn{]f4 4T #^+ii*uCƝ@ y˭QML 1whZ8^˞]BjT[Ԓ@b<(녗</RrnD"jNcٿ g& N"ƨ5Elcti uQk`~x}WiMu`My CG#6ؓk\k;z-0' /Ox,s p߬P75+}C(J@+Rz̝?{qlSӴ~ ]Knlfq̣v8Sz_,GEPiUI1UWdn)B hREg6n^(%}DX~iY(?3 ?PO5xgsPA)k7륭 &ڲJ@) )S_,.Qϝ6+ * qdۗ.vɹ9+eõ+," v>n KDj㩫 +RKRtJe<nzb7r/S4cHmDuҶGr\NVtި~ƕ$;sDYkޥ6rC7؃[HIf#dʙ=IKEDVPTR|j2U\sYESsA%K ^VGeT49tsGgY~h˝b3qBNa ?ń*@BJ_ A+Ԃ|,ay_paR-$lgO>q`{E.Rɤ2Ily ֠OyUBQЪ?L^!RI)Rع/Gtk94vX e bımVOni~%ĒJ+4 {TX\lJ!<Td&@<֮qcTOJ! x1#`e4o}R4.8JH"?[e~H)(ҦRC9p::E`8驐A*iK\;yY D-SV166L*79VXRmq⩼l#ZNӧikܹ@a* Jk.~YPPdk~Wt:ɞ~.SWqU)py4 A@Z9߅Lb.v{;^8Ae׮ evoUh^B)rI;Œ7vb˓ ^o>]]k"0i'"kFq×~ ʈ ő`G1>Z+KXŽaKs;R򔥸FP4GmAlV+ _PT^Hu2\U4;ŇAKg($#ccm=GP%U7rI~0x)cq#aܫ$Cc>~ j݅xB@:?;Ej쩿VHtdk'>RdA z<ytrSXpMmjikq 3 'H,^$5Q:g@N|(n}50tQaJ%ֲ9H3|< qk|[4`/; b%AE%! ƳϞ%P祵8I*u3Im, }͠6P7lTl(] ꫨZ)J@;/~p ,]q!KQn_WxoUK)sJڲi-%olo!'n)5 2ZeK|&$MJxYe`Ը2"}D-nTkL7g;(dH.-/Gp OSV~jv4BBtJjpm-XT*!ȍ@__^ mREIyk#$ }5׋lSjCKR'15]HN+UU#!bGSڍ> V1S}4;:nO)R&(fhmjrHH..\<I<jš[^y.p3JK>ꐩ}XRm*FQ5+W\!JܻY$% ee(Ȁ kKquIvXC9s hz>-BnD&D;h7{ryyڅP)˨PwS'%`]U)OtZ.mweq_rW)r=Zu)"N&[爎]5A[!,6MyP jyIuB$M?}鄂ojFSgHmY%o j.\/AK^Đ{aeljQlooӋWCsNl҇=+ZRi#ՇqUkEe$R5Y5Zw8 B ZT%H}X\VOxoCEm]t'EVS}ލ7־by:k*s$ZMm mo='Y A"ɥ?7 U bgdCz& jvlLU}IMU{ԥZHBBXe`$* ڈ!oi:믢w>؟~<(p=TÕJaxbxW6]P#"U͕l: .ݜn.oT^ u+lȀ7ckQ\|7 o$)gq!DIH 3GU vw WUwsQ=D ̐ zوx8nHj'ap38zXೇj:*L)zцo_haI+ԐeIȨ;kYv]҃Nұ sNזZ6 jR̪EwYRaƕ6n|:hZoׅ  )Χ3O Bw?3Ա:WZ*7/+MY NׅJA [A>y[_.8W{T;[S̐mR*o*IPFIڑsP޴MMZ6eB Ww#AJp ޸ V=WULJ\mVNmBR#<(+&*Ku N3pB%<HUwPǒE8PP. Ƈ(F ǝo\-|&rjie*')h9^#ppB-.-;P,$#ȶtŝn`{ ▯ZSuwR 0A'x[9)!qAǞɥA?K-5`q`Mî'MWxUURT'H΀lxrjV5}*0-D;a=v_؎mVZRH:l-Ѿ v]0]i/ ^/@p$F A+aҒZ/'M5x%̬ns`5'{s[}yM~7}5-nP V3hcbMě~ wr E r!U N #6 9Wl~ԬPi32[_/<votsL(C(ZeVsfQZUf0?֋]O^~v(<eīs@T~_ӉXr𽯯h*Y*y] RQ$ӟ[]Zm*CbI" 2}mNp?.BPo)* &ALHs7j*ESU:R:ϯHr1L{7Ñs:6PBЈ>+XwkppNui^VdBI2jN߯KXcJTIZj41UVqk䓘 Q@:V1ktRk}qJX!‹qЈ~v'jy=ڥ* IAQh,B nB7MP"3q?[H7{ r<WuSk] Rbc zbJhMȞ<ū F2HajHY `IVi? UHVZQ$B@jEwR,(u3o aPY$y(B%COT%H'*R AyM nxTĥ}6~ODjw3`JIO֛- e)Vg;ݶo2%:[yH6%@)Sibu;{R~2 pB+]L8RUT mE!GqY7R"ӽ2fA 59Z ,)@A$$6H\BN_=A:^.i|NAER@uGC.2SS<m~?[êamvF:uNԹ@n|wq&T%ՠC.].Վw R׬|9`6k{ߎu-jyE¢$Ϥ0<N撥l+F7ruGX&Ox-"ǔ,3)g S0RI$RtTܭR*-eHORkqaUczgRM%̢'Nyh)F"yXΑL o҂zUgh6o wOjݍSRRY&TӬ7[o~؍w&SEVJ')m՘]?<#wM7s@gԎ[fBB JyK#bnz8k+?[ 躛*eј ytӜ[BE(.̚dNPO'"R{B^ s$';MuIHbT塓`>+ <QۖxSd+mwĥ%R yYc|(lF0 ecAU*- sV ^B;IfN&?#VNU _yRA6#rOXɵLT5qzfCұ:2FglH*m̟J 3$IukeUEP LH̬7ѴWmV$RKBtוn)A 93k'}eP\!#oSRJGJx}Weuol<Bƻ T⻴#1ea:42BJD@^N_+ruLlj@ܸTU>%Q'XM\s4NUT+!Hllƚ$UFWX(lhaThP"R'%E ^pVQ@͢BFhГwZ]2@BSgo pA֋IMOf8FSm7 5xvq*A Z7BuD"6ŪJi$P̧ [XvQ٤ZIDDS=)22r+W։v$k fJ,42 4v)BRe ge[ו deaB@%@U>6s%@JŤ[_,`):džiT%$S4f4ǘ.^tit/8ڂV铹4'pTWyP֙STI聾1uө:v|?{Kd 2BHBW%;6ԛ#/+WRCtn"ds6˙\wK~URnu:J%I1?;i\9ԢitrzolgZF^6EJRI$RNSy[isж9q}p mڧbOvTBJ4'b K/>뙵{5CO\娫HQC j'hmsp+q]Kw}ſzW?)AZRUā:!N3&' xN) J2w?!Eajdom),Wۖ=bD\K@tvEYoXtļq!TRr:-I^. Eڨך mc`7VpN Vpt̾JY#@>2̼uCkpw4DCT=Xn[\T-<Zާ&'Js器.t :h*j*QmIQN`$kl$Aۡ/Ÿ 徱)nf"HL #xbˎ*yNPPsI PӟMdNm}$_X!pb]OxiװȤ; R4pc:SlnjCmѵ&Ln`3R'[mnN8B/-N:.SФ I 8cLkr`˟b8[Up}7c$.3 :*ylQ,A3,0?{okC^/ -RUIz=wq6u7}cn?PO0:ͷ1 sζĩkm[؝wCpÄ8eSCj(!4BRgSb/lEF,bIHttFXwѫsum<G`kiɑ cu+t~mר@IRKH "݄<>)MNmD%)=<털0^0x7Lݗ]5Rn|ujϕڲ̤LI4?dںZ(zԸ5D(t꼣65lsE`<5N:fI;nt<=~!73R\m׭BuZ=@ (U{^uJZRfu:[L*oJWKq- $RT`b߷j5.6˙K!2LUt3 <:*F;eFrQ}oTW.-?iҔ@iV8S7fCHq`IA\;r0SqV)T8\D~R 4)@-@F#ZV>#o+U1y<M%3 -"L'p"'[]q}pdi aZbq a0lO@()- ynk puіeΨ@ Db{%qq?:禨[GĒ rgc؆HJP2:2 s#EHRL@2?k2+24wk}] {N[- TʲC[LN%&R+-G*ay]%nw. @;zZpHV 7c_JZ* C`C#C6~i i U:sygnV5C{,~K6 W9<VK0ҝ؂y Jrf% O;_uYi!`L#;4\UkʖRNC5?ߒgUP0T8pT 1v;AP y'B-/.=KWTZr`sQÝx=;U^ʚrr:AJɝlAEvIBOh/[\k.Is(~6|@(+^ZI${ĒL;{UR5ߖbGZ]Ĭ ,4kS{ / ybvRL:!12uZءt hPHx3?@? #vCUeT 1wSӯ[R-8f)g2 "uHԍM׽WSTjU'}5gx:zRr[%JNMO3֨6>+V;2] ¨[mC,MH(=S`{D]\!m:jP$Dk]ѱp#JQ]RKg4I>=M}EZ]p6J.{.ٿ:4&mN\A}r/XejՔO<j<{w'ru5A.j2,u|GGcU"ք'2Ir,Mۦ/[noN#2*[P? !.ɝţC~(×%~-ʓXe*iIINuM`3ߋ $6˕c~V;)`=s8KN([\xe%EB4jM[Y$&p4ar ൫ly>}-R3}ӿǔ !%)4}lȔ)(R`fd/!O8|iI%;}ś8V'[}9e<OoUO-}"4Q:v bH'k4T[RR䄁~VtP }㿯O\Kk!))KIp{hT-ڪjE3z9SϦ SuNR-0*\UKLTBNv; hp-aD5F:V8ך u u[sMJ\PJK>3fwe:u$# {;;#l}A3Z7 Iu STATxec:H%VsNB D2$f+) lAA_vyqKO3)@-@ \Rv>,ukvg@+A) .A$SD^/%!#4-+Q$93Q|R!EIL-@+Ǯ;JQ8UwwHwAwWu>]c`ܴ_(̵Xm*^4  '^![%$=wmuMez)RAl$b|ry{\R5zѴ\pTTƖl!Ж @=,_GDU)ä +NTۍR<~d0$$dG>V\<D{C>@Q3FuE=xPGmЈ*|GKX 8WejO{xU&Nצ X^iF #pt1:Dj|hc惇v8Z:×K 葩Hמk!{)+A/OFS⋊7k~r\44K)9>[y+p ]~kZfB KJqcØɃ+i<gk'g;M:}/)$0zWbtSwUt!NT$Fӽ ~X:}ʰb>󣿮pf⠹(!lH66Lt^ƮUmR:S26 ՘Ool5勰i[t6Zٌ!DgK3ko{fJ}j [iB6ID&>0[)۹ٻER]>!+yklgkB1n%,^ԸfSjL(F| Q]}9So!Lsdq_ QWyBEHBn@91>؜a#ʗKs8 g1$˧o6 ers3N2L[O2ļrw_mFJ]lV^mX+qAXe]Ilns%ʡw۠/b+o kR%jJH3-Vhm &F.GkQ}t`7_|8eOQIqˡB€HꠥAOKFDzn($b|d+÷TaE(<P4iO {(sݠÎ鮋U7\ae qGMa z^׋XC;@K$B"')6־L=<닓9쵎7K\[_k߲xgt[E5wU%t!)+Q]ǝxtpGam'zs{}H)DOQP-pvƼ+_º mjjƉ `<#kIxwplɮm mؙ5s'<AA& 3pR<ׯj^5L TRT}Q#d̥+sTb&ώnwl9_`5K~A=2@ Hs2u&֧ON<Fcosx|u_Th)R>#jw0.*i]w8{ )Pzii?IHk.;tGɣvw>:pC4mՖQIb|w˦% uB(O i:@_j;i։ NXX ]Bӕ()H$Z66d|M 7v*PlhϔHŷU"j`%̰AǘޝW*(VA' ~fv&nʥ(EJRXZQ C VGZ p?p\s[/ $&'˧[RVS7~( \=ߵ۵Į5YU TۙB֡sL N]fJcRyLer/L>v+lw`L%D giS3#^qKR@3lnSWT* sN4VqYUd"Y.”OOMSnaNOL]{ #5:y}-c^ ()"d_7(u DϮs^ӏ%ʵ(>}tSwi~^ C7J`%u6 m.xVª*CR=ЭˮiZ (z}-<\Ma uc"2 _k;Š򧻢>q ]*VWZPR:#Zmٲؤ+Jal[I}%JVHux=l)ܦ%m#"]#l痢M{%k`Ӛrםq7i>+c|+MnV vDc}|Xi$|']OTڙ׹Ezj!.Ψ6?{>]lTW6 {TVpU$Z58#:DԨ6,מ<5Xw][-CDP 6<fT6? ӰHƕ5,fA..yz؃)e)qږPLF$DmkBԾ^KKJI Ğ_+a檼"3d3}mv8ɯqyZj槧R( ))N1*s\d):Ym<UԔU*AN9 $R~̀_ƮfZإC7D@ 6@@JK9+🽛J(QVϕghۙNJ7Ux,%-)#y?߇0i=SR%T.HFI[Hշ/*bA(8҈8[S3p%PQRFxqxPGcjD H)j;#{"2҃MAI)!zfdlR(hD'r4KoZ JI=#YlM L6S$X̪%jVHH{2SQ v0xVBNQ>Y ys ?, (xBcO߭: *9c.i"?_p2NShB]W=?K{kMe}xcց@A?m)r]ufE:#?_ݡD}3VUU<G%KܗzXm+qDϞv& MiYp#r,Ֆ|˒N4'1:!Ǟ̍@yio@,NvZBy[X#%y ^hB jGߒ@<<meԅ-LgAo=fw6Ma ,Y0!Ёiz@R{ ;.Ow魆}ˡA[y zEL6r P}۲TFt]RVZgYKh є[kӲaI]~ȼݼ*t{ICmEٞyT $('N)Xeٸt1@AczTJ9JEBl- I םT #$9[\HT*lj6JcMu.v8s^U7+o8YBV HԓO׭B.@G#ԇv5m{6ăyg[HZ+,q7mmuJ۫$E+Titf\U<r/juu{EUK<FyiǕ^+:/<H;G?ZnpC<&R&o¢&z>j6!lkh9/$sX b~`{Hq2Ζ(Z)ZHNGy[6Sp<2H]-l)U@&byXǵV4(kԈQ[zdB4sgFXJcJ_|̫[i@;%$$r-Q;45kOЬtk͇θXuSS {6lZ Zs lazն}֋RB RMm_B|Vk/RQ6j*` :|h]IZ(^M_H[4weӺҁkY@?O;h^} Goa,4vB؊kQf.& fꨑ>~qW]}يrM-d%@.a4rV؃ bn%VoAUGOʎP<T<.@7onMSb0c >R\|Qsc[r;WH) quޣ^HgQQ,hKQPOuh9&ACYJkND`)4upX?*\;Y}Rd\̢bQ3Z3 \4jޮKFBi[-R@juŽzO/ݩ.㠷.-;=pK p 5%ʄ*VSQ6$?q"üP/SXZu7[wls?zZ<~qg^I/z|/6BTұ=mop?Ԏ~ulUx 2eu)7%!IZnQ%՝\F/Fn!9G6/n04L*EI rfvOk4`^Ý؁}hC7GJR{ҕȓPU^v' ΋T&U{6HSG+T6yw>/*[^*}S&<M9"|5G./)S>>P 12?s~JR\ZSV#MsB@^Y+At=w"!C) CiS<Vgj+!T*=H1:\s"蒽[7мVKw2 -2 >lC@PӔa(VZRJL! m|M%cYP |އMNУ]Fr;KNIBO.S`ak@GPM^&җU%NZ[mKALo#ӟ+[4;ҽghu&Y齪d {T]%I $*L{륎. ҩSj 6䁮ҕ(hwU-/g4Do)'Y\x4ah@Te`o=ɏ[]Uӭ9@a|oO^#vHBTJ $ zFZZ*)})TV̀ݰJlNZfmB<ȟb+ie*Fd4^\w*QKz-fQ?Y6.jЪvY$ωz|Tܿ8Xh}KMS E^PJV0~?;:\2Z%! {^૧+o_1B'v.U' Rre+ yS?i+Y 7+eD;z~V.Fgt_+04bP4ח[H:V"ɕYXXV)dKel49lwOk^)rxtYPHW2S= |BblGuD3( u:iX48q!IL-@Hfc͢9E[$ՔQM)psZy,;\\wv ZRzc`[MCϦ(_HoTRHw6{U<.uFz_mS&;BRgԁiJrzI#ilmysZOyے77_늵>vx)rKs ̞luwd %%HI F]~V^ܗUZUsǧv"&C%9;?e #(0_>+Ժ@y$._!o,,bJ@ cI]vչ[hU:g!m$ON`|mWtu [ (:L ~vVVZ%3*RLyjj.ktE#WD.&Hi+ ʀHO`PQyiZlT!@J#`@OST^-ָ70&Ѻat̾BHܭ#~P5e 3\#VC $κ}-֫Bo׫Bbv*BҬ^ܜ3Wg %Bqr)!BF=~xbx U\QPVzh'a>.p4p>aFK+Z9]`&0 ԁiw=7e7QxJesgj yDi*lM!Um|+}eӸ($UNNYYK2F^t !FV>vUJ%$A9|2j5ƪ%WӋ9T`iu.) #O8#kITSӺjw*-T  2y1ų2;oEЄ:QQ$|)dT&R:A?YB u)LmaW\KZPl:tůcl1%ڬNQ2h7XcP[vy+} C@m7ڊ7w' eşfXw""$6b) =~r1]~Yt4lVmϰLČ&ۧKHlʄunfI"ѕ׋9ݡ;sK^[۪"xZPKi^dxrllV=*Q;{s95ЭdQrVd$96ۖ{P;>"3'h= ,'+!N㌗;DIs#: LfY t$bm|UK%ΜŊvnS!y4t$zFm#wuCEk"Nf2F i>R0?Zuw -I mJ D -+߲'8M }{Q{x ))sӥ$ Fyr-I.AE9-U;p-[\3Cyn:`̃Gen&#*uh#Y4Z\KqŢ2T)Ra>.㕋>.{iJEcdΐDs:zv7ece/VH>\]C}!ZFP+ZIZ yVz{Ow]8IڕwJIqͬn9k_zw}:NUKI7o^/3zzٙBAʀi3[de3ɞSanK"֔g*B 6WJMەTRHXsF-_~el8כHH$(+S^* fI)fp:a+8ZWJ} wjל-@twpM{jj6鮻kf)k O|:!Ā`񗆃v~0Ӯe|!ͦiڎ݌1voo *-gĂ96;5R=WVyL}33׿1ˌ.WvaE1QX`(T-cIrlVq F[>ΛqCTH @&5q}J=,^teC d,1 Al>_ A}QauߗwU8BxXPRAz[ژ;ZgR=Uuplv:e_]7a+MMI*l0JfyMg>acl+X8I^IJ\KIYRSȉ'x_/wW5u8sw֡3QDNb3xv:GKPU ]:1M+(Zՙ4*"IwK`jiJVSLH7㥍:l4>*YK#!xswCmav}SŜiQbwwUjiQH $DV+:<*#A_^{{3- K  C,j~&lʵª*-xP0|)㤦d7+Z4<UBA+OnF]]Jx4<$FYTWr2!* {a %.Sa0'NH-؎Nө*RTKzh(k|,}d<g0Y)|]u S)#0`u˦v!ʋQ[6J\V2-E2NX'fu6[X(A>-cH-^1̸(U\.['Q~| Um^QJJPf><%Ais!kYt#]6],>]@\*Lh i9KJ3; qSw1E-jVb`Iq^7EVˁVhtNdڲiSޥ@<L'u|D;8}-g+шX'z]]jk$i6҃mxOT4TR;λYrne3S;Yu  OT`m]ś%Ѵ-"uP*)]>mêi_-!Lw]_ R I-#KJJd)| ^r!$ t0+aKv֠%`>fR_)RNEj* 6сwժϯy/[,ڈ?t %nςoCn6)eT)!!n$jLH,£' I 13bHMo$@@˷]nvm ::ÝtTN6꿿)}xi`Ϻvh/qWxӴf@ m-z[|Ua^wcjeҗȢ ?+r .Hijz+Y*^eyχ1<Fm @N6t/P֚a0Leȓ>v"Z!`<.i4ϙc ѮT$:4~o*o%J^4~0{ST-lOvkxqqE:RuB̑aV5n8z%m8&c&'ejH}jRAOie9LyuO`[(9D* 2@<7p]UxLJb PNok B+{V֖Jh{͇XV_M3c7%jQt"6"qlRLaI}$Ɩ9J_EMN?p + RwK nr:hJ=:~b868u2;|~&MgC~d{V-^ԥRn #_1UXmBJʯ+U A)?$'S~PT}Xdc h{;TR+$jtq%(:vnӃJ\)@`G+yyA0﬍O@>V; @;PIY 6Ih)ԩSH6 O ϯh§6O6&sMjL2@$Γ<XnViĞQbJJQ'}K bڥH˪3qWtƿ* 9'(-0)$x$;U|8׆#p>JCL$z[ -᧵RG>v3G8l135NttTR9DeIXsJn:תtE4& {{Jb5a2הF*zr@<U o@o^ @4$o`U<3[(H$nμsW;;𩗘8MHfxB Z|&Q7NgA}MsմxP\&u&f4孢^fR-qf M],S%j*ZRB`iD-Nx]YB\+]yٷkb'T^[E/:R>/? m<c%޸*wVP|J<(Tܕ8NAsRbQR}>^U%,,]#AP$FaZ@2w9I+4-TBkK\KCD(\]lRHKAH$*:@jCJs);'O)} AR| vS`1յFzJzzgiP5I9.eF'wphԞ8+V ;XC eWeHԒ9Z>=qmR:Kё@yKtḦ́Sj/pKu=r P9CHVu`n-ݕqό+-]. B+Tnч6g<彚ƍl9nmn N&=_N m$x ^צ=U l [email protected]_wuB0])aEQՆ 8PE+Zk6qo,M 멣s7~cD&H&`iטa@|rLd5yKԘf.nuR D eh'2Nzs>N1.)]כ9] AY Lg-aOyQРJRYsBgCAke 'xf6|n c ɻucVqkBI"6O?9ۆ-zeQNj*)*ہ49-#k gٟ JRt5DSYQ:Ho\1{a*Ǻsq-:'/" y,yo9]G;gB>;|eˁW\P݄Sީ*+'@Vh<cпpʛZE+3Pڲ'YPO/odN,0.ժX0dd(zY(S|7LzT(~Uփ$k{Xn~@!"z̓K7ֵEޗĜb+\ 4.$J{D(vLNvEog]f%@eP'A<-Ûf]zWVa{yq||F ./iD vsД62#VӹW0271}>z#};';^ m]r.ZKm`7I"t՝5ߵS Ԉ|_{+=T꿅Vނ8 \; O#n{qۂg~3pMsm/]=ۉ"s5vc[sƎI{fcR+4-7PJ}B5Q!#myo9z^u ӭh+J՛(D68Yx0Kͤ!K) I7>ü n%Ҕ.xdp8UOC(R\mFC #r>V׈tG@zSD$ )L { uֵ@FyǬovI]E0ZRkLe<>[&Ep^%`G=Fa֜eN3*$&zr[u8>;%$=D͸̮kN/-m-9Ddث⻑LVS-qPΛ_+Uw2\IZ$xRtA:XTɽ*AWxsn5)&Ymdd\BgF#l ٽKV+kj҆R&bDץ;غT*W)SRu|YJfX%jQ0socV*0 5#[ST_SeP쯂u{!~`c]4_n-i9㚙好ݕ Lk?ݧiE:BHmzXUT;[M\ 4<8s.ޏs61jZkx<VA* muWPuäe'Zӻ:G;*R`Կr^ ujxFqbN<#4u뭃_JCc6yB{TQ @ً}Z5$7k;vxk[t`ECURj)io9BR<N0ڵPUYFʖ̦tnaKJ%: m1Yu ꦭpÌԲ#76؎!]V@{ӟN"3!JJđ"GN[+t^4m&LѨL$i/)(FVI'6W{'U?yx/ƕ/{R}vFew#IR"ҋuP8ڵP%e>˙)J6;<#Qkk.I)q@9>F I']i+SwkfPɐFk7+t؛ZmҔP\Hib164RH{ɶܼT3VJ2$NjXeh=--qȐO8}i<AX~ _(JJ3 1'{@5lV rU=#N $ɷ%O@o ?vԠK^B gq_.r;jcۚ=rq ]PZH'*.aui 2'N+PkCꋏ~Q75yVڀP'?܅-6<R`f'8& JatV$yO-w<E<4whd/nȢiK)RS'hj Lf}ziP Io3v)!(.JublsPrPh_m{ [5@d?6gW\۩ۀAPqF֭d B$]vT6X:~i=̽]o@JSh,*B܁'ucAf/7)6;C ]XPT} QV;}lTL lwؿ pmmT&$MPYB 4JTR EC5eDuǪ%K~|9EBT;7 =E%H tW;DczS&H6ΰu)34~oSlmOcshR@Xmo˹w]}bjk]Ahʇ-~VAcoj)vZltyo|.1"%vRh.;E~k׎,RE%Ow1DaN'ĔjS^%ufX*o!Bd'skJU;wu0-hdmOKtڇ `g{vZzXf|36x٭~TJmkkAz3v%ҭ]AGkq FԺ26Xq'"7%̄(%%"fN7/һ6:C J{y׵7WEp*R əCl7iO R AVu | Td;pmC{v'g4tPX@))OHN̐t>c[TfH+RDfLFmxc a h -PW 뼸+ ;,nhzݕ QZp%G;\ӭWr2tnGʖ{7"@*|JJ{L{~ݭSC΁F|j>gO1Ŋ؛4suӖޫV`2s7;Av\Av]MHaf 'mRzͺG^87].w;r9p[7I \iZIU-Hm052:[1w/(g*u@>vxD߻Zn=,d6{{(Gj\ - !zrQ ({\5OwZť"2tfU4z-ťJJvߤX]5NS?w8\؟!dIzu X@i@>7e9Q{"st+oJm +UM>xu||Q)u7%M[ҹUwE"zIS~Nômtnʥ-7t;ٜQp4:jx P:"v܌#su.zE]1AT# jMMRiRiP@ 0 p/z<b*"N@Vi!yzĝDLbwQ8s*OQS>2 ']zm+[nq.b|gZdw5*A؂>*Ӹ h?rcqþݗ&zQUw(,$ I N #(X;qWH UPҌ -/<zJ[ZO2:5mEVm˞{%;˙aAH9yXI$| <]qZCվ!]ij4|y2 L86*0#8i.Ja\`?sT e. i.@fկqf&Øb*.)1:r ]X*JBǩV*hFhH%5˝:H6ШDK&tN״\Cf^7+> ,<kvNiRA;`#p),7KX6K㫥II%%Jy:w//vԻ;Gpƥ+x\gU;%%*i;IqYVj[b:GUBy])q7WŽ fCQCwgXh 3u2u7bǞPu vEêg]W\-Q?$=F5 a0M{)笟'02TT曋cB一(',N[̒uIA 0:$hNYJ<UWZɉBd0}śъ/b]!iu*9t$>!:2#dK %I)[)SPs"[** {{*yFZC!to7U6 P'XD}֝l0T(1"#L3a@1난@T<*mWk҂uuќ3HII$gQA &;TV] $^53/T4[%  D`yeN&|q) #ltZ燙ߺ Gv$uIOXUB`9\$CU-pV=u W^-QMd9j*A>ZT4o(MpSR䌣QbS zU 鵣؄.q*߮iHsr"b{I;JS1R%@+Yk;{PRI#Ryy !T%,)+O$FRQ*I ZT $ڥdq&y(ךֶ U57vj G7+hn<@>*kN+P@jUjikYB(9ZZq2w)w`*+MI\\7fCYI#cXw?*FQTPPc;?n|Ctr_k'+T4l1u-n!i>,Kul:oӊg W<#).p~%߂Ë.2vWM%nlxԨno`[ XѾiy%ĂڂFJV Q ?W;xN!7c G65RU AqPKZo}xꨠuIs_W5jdH3RI =t6ji)TT ?ĸ隫 =SFRV FY_Mzгq]Ť4\89S'ӝMX qLHuejvj%n9B`TU|3)h#aɂI- j}c?MH5mB![HĉM߈(uk/Wu#||YS!9I@NOZ6}L"(賂+ $(trIJme\9eLfy.cbtqKszGKXuM$3mm-:[Z]ARNƱpٞ7p32'A`M:^`JI@7ZZ!)C~V.*jC(d2BNk;5W*x*~Ihb.ZH"-Y?xWq~.*gա Lg1)B-ڶZaH@II;?yYB)\R"|8as:C6 vX_D!BzX҅nbG=,ܬS4Ȝ@usc.s\y:JVvN3Ej-uSy:?-nm@q1*QZ3})i@ rbo0ql@st4#|8.7XͥҲNT|'k]WkHw47McSa6WI*Lc=uJj5qJ-4eq`eIBc7"<Oߟbɧ R k[X8릺hU+q)͔jI~{nUƒI -Zf2a]cd[vj۾=Z~_ӭq|6+=|$4x*ue2ZГUL+Cٮʗ7RA]*`!!#Mqqrik#tTJ9>OajnRPBNnv:L'V(HUzk87uW(o(&nxf=bk P!@oR9EgFr`Sl{*[ w奸kk8&m5F{Zץ=™NG\ 9J~#XRXRs&GM5ߙE[]RԠ;Njw0ϯY!a_q=|\=\>/-;_N[Opx3m{KzyZZW)<'pGAmwpXs7UMv 4ʁnJe9@r ]UR qGA:o8wP]n?AK[Q(z1 hT.J,k%D>*n7mSj}]VV'@@)6M YJ`!"} (J^E+P|1$HH!9s ӻꥼqmWN{I( &"R#{2\K}`@M\BmŅnj JTNc#mkX?:9v4[ e]:oX.aw*N<Nb[Iګm zy:vcn6`6-Zj9ڢJy.fq'kq!l6 r9<w~GJMukYZm@H6 nY4lԝix]Y7)5 擯/Rc~\ U޴u%9)%ZZo\֖]ɧKo @)Ӿ/ ]<j_XMk+]y.ARC;$"p"Ϯflu)L]xUb˒^#kCha.wNmp. b+_qau>q(CM9ډ;oDžjf':Acrz^=o c*)}UӸ%mv=Ep, m9uѽ+8tv4ok8V*&y[vJcüHoծ] uܩC];jmQ:.R(UMUWxl$ReTY9d]8h)71;뙪Yw h(1ҹ[^\A˝B8 Zjf]jgN-*m57 h)-th?5JnA%2@I Ngi>\Aj.o ThMbab<;Dc[prRp^(03 #+PL _5ko؆i*XmԜ:BT+QyjaOxQVc Paf)QP3ILʥ$mu2%Բ^#}J׆'u"0G0B_KyrIHGw]h&.{M3iZDkou-=HU*Y^(*g xbKI}! >&ѩQ`Tk).F_UUp?^QVڊRgRu~VΝIt򎩓9QN¤.6^vUIKuaf/vA:E롥xݶ` ON_9)0,Ej\$%]ImP>yFzihnawmw0ng~}mTBkܶn!)qrB@mwۑ9ClݸNP9 ĥ&}DB望tbH@$忘W 3I͗of(mn Tg?=GNieΐz"df`;iaԌ rEneVLēyyD,|ZDm.sʦ$2Jj-~fꂘTXd)"c7cmEDB|d6\BZuvZRQ$m"Uqܛ#j BNb3b;RHyu9[Y*PTc}%CHqj OSh^ej.wԖH?.]RNW QlvݔA2;\̩-GY&q+lyPO_Nj!m.zF[rNM]<6Y!+?_afE'0SUSvuCƴ#2k rJoV]raPR$I2C[]vF1U*x5U\(jSc;d#z҅l=KR!ĐBҬO@j}G4> V2qն|_Bo L_8`禺m "]XHO屰~蕜\*u6u7x j[l9jQLG2WacSA+ ]r?d ?~C.9i)W)}©_],ظڐꖠIuiMjFRQ;T@ <o{+Z-vf0tnmP4zlKf=MbhB!D ƿ+2@ꑼm4%D~5;N~ #<a; 먉hCN!S $hė=HԨ$;u5XMDI^Sf%4uW83<G^< 0>޷bB҈u>bvmk VTR@Ԟv TN: J|ʾN͜C)V:XJ-a/Hp}yZI֒<^tB਼*] ȓNL5H8I?HpQ[!?BET^S QsUg#Mj. ?>+VU+3 :筂(zB[@RLbC[Xv)K]3&l:Hmb6/vzGZC_A 'eJq$6HVbtԂ#kd5dnQ9M6*-zmHU /9QIvӄ#*t1}l -S6k:뿦ZJ[6ނyI@ƹFU(+\!yd:eW|RL d ?V *RFkvq@R`RJU!zD I>vbgG;CQ%%RuuNVeKSrvakRL4nqz G2$]ZiBEpRTO8LyuQ ;]M켦RLk:{Uy<[gN8#@tF>6\K+xL h5.WKP|&hmZa\7Eƴ8'Uk-kjͫ.)1Zv<R jzijingqP¦,;Q ~v!O93G@GYΗM6z,Q" ?=4o0~]WEXjuu H$ lPrEi 30|ȝ7ߤ0٣a+,ڧ[T-@9wԈm⭚su]A3 <$4[zBCTgbL9k;Iu,[ɞ11A<ōQ!@Wf+.k *ih*[*Q?ʸ$6dbj]NNPAn _jLE[y^KM[2Ӂ=G @^:T]ׅmnvj @Q1"Ld9Ҵ2w^PTGS ڜmJnRDrԟ=9Xj5Bx*<IPķuuFf6Aŕ/2@0Bt"ߤT *7A:<ūmĈYk]<EׄmudH<657v*ixW^ %Rdp*4DNV*TTw" YG$Bloƫkl(=xwjmYT#+]I!$8T<Wi1-v:aJnapO0쉅!RnEiС R@Q:tl؏8M}W *:\*% :H4>1x|ܕiL<R|E6<ѮNE}9@Uv~f|JqEʒǔwa[,R76^͗eNj2mu]Xy*,8wzj̩]ڊv9]yۯR@{6 #6?7)xGOq/ ;ڻkKu)M<s͵& EI3ass۟-yx+G0SRvaC$>vtޕ"VB jI&-p9K]e H 'kkvS#|b!QMwqUNNCRrْ= M[']1}pp{M}ө %jALuʧxݽuOB@8x Xr̢6U '"TD H=t:ur T9s6=')[b\b ;\ iJVP\t|)'AojN> 2i.$@#_[ ,8{'%iЦT<*V`Иc^+]]x\wCQ(:tG۝blTg/ϧg +98HRFO)[- =h(j %!11m-,EM((Ru jmj)q R` kd|bx:YJuyl+!֟n7$(E\Kz( 7_JRIIXRgYV-in!J(B؟Y=eZ< h?j]TsMJ6>KDX rym \ʴ&$ =[=R[aYPYHFO._W8Z0BA` ``kMڽӶA@}فMyuH aJi9rm'FZp: ''yioCCnR C.h"}>"V3a1RrT26F*ZC=Ok%ThG(K*2unCe H%nJj@&FDi0,lWV̼KMuKK6`kr0L;bەCj-8.XʰC4!#Uq} J@L:?{vRAFm3DrG#eʑhmeSBR [j8 g߸)_WbrīoʍZlM=p)7Tux<O':']?Yó|e;Hdtu#5ιs6*I*7-ϗAiO1S ZOp=!1uSu-Y)N|7~<Mt9U-Q%'^[Ol޲%IAI~y~9"`ę<3Y ZY>"ot1ܥ&i G1:>:D2MIG;Kyϝ/T[C[@[ظlJ7U0-Nt=-ev4'񕊚RzrWϬT$3ضtj+ ,͂'ESY$mJ )K`cڋ͉4o7廽I8©Z+rf$ )g6wN:iԥͱo0CsT\UD<mE\ÕU;@W t4oũmO I+YE1?wXoh@w֡Yιɟΰ] EEBXy)2>[qJ\ScMDypҼJ` y-҄SwAw`eI|+D8AJv݄* 2!iiT}jLwm ] Gyšoq==(!#AGCgW*u iy)V\rir 2Xߧ{;eqq:,d+uBU@>cO? LBSS8j1"A? u]Fv݊"B v.$0u }';Oυ*GpAケ-Ǽ8iʚT6#(zg1n˾5S?O=̎t<yĕ )+LH͹??+ei"Pޯ)&> ,%YSeU1!A2QO!/d*Ni~Qy^vڜzr @^euU'yJB0_OSzR2ZX]j{:HWݫ*F9I5SVҤW*m泌-mSyFO;jCuӠSxO+A+JJg@u}IHχO_omb..\8e3S)>'q,f4J-L-*H$NzwN #{,K31M!u@(eYLO+7f?ܿo+ȥ]irh~-bjK-wm')p"fgS 'k֚ ΕɘE2^͆,q몗զ7:CYBcm$}|xQ^ H V#SktRT%ߵVPIPOU0&鐠<s# Q:%SsIJBˆqyl-W+؆O a83p6 <ګ`<1)!'PC ė.v{44ד$ N]|[|$i5ىo aj+m*nmIRKe25:8={9P_wKهaYi)I g2 -[mJˮ; a#]19;S5xuCM~w fs~1+/x2IL $n'U$h :m"1%MC|g~H))=QRa*mkӄFy=wU^%Uu}+ѭ`H}nFag;UbWy7"F DF% ۍ_t dkom|EUxpw4';pER!Y³fO)A&@9L4a^Wqws׍ʼnjgҍ`HD$mysn8 7J:w** DJE ;IUl~]Suxa;W{)s.HVPwܷD=S5$m0.#b\L-e8UiSHijT+mXtN _AS.7kbqjT&`NG5SRk)T> Lw<lߕS>q;pqxjsGxĬ)*I+$Qv(=AuL}U2+uk!)#`FJa=[|hCoB„u9Yw<y=8be$-DFzWpUL Dd=#_MA,(KLL׶Al-$5 AZkns~SzkOq%ɇ̥NeNF(.}TQ^(Rgm$~;]8C)k]z!$~Z?:* w>xT/5g O^[5h##)ͧ3KtH Z@ J :i,wLG%h ;OO2%!I*US=m.sZ#hh/R &$rzY*Z|+ T yw(֔0܇ 8/XA߂C% GG>Vr-܍#Dml[ ~\x%$m2XC%[iA2tk)-0 &dVVxA-DAWם鲇@lbslgPznmh6QR]-[F4}b*ܹ MfԮ2RN]m?v'"ѝr(ogmCJȲRDNl2*B@Tեx6fS\FoK{BwG]Gv{9zZrnL 4T۪J ΢=z؞Q)$o0 3(Uv&dESK7tfݢ5u z#m+BHou[W ec!m>(H "GC#en ,okW+>'HM5JِDHAx鯻Y~ "BUkjﵦ'y^\j4--C_+Qxk;PPХ:Zs ^i-wK1~A$f [\ fH :_+x~$քm?%ӵ))Gf XWnkery@IƟ}m{i+FT4P14{6ڏxNcg㯧K'$FLמaL\IIX$:kkns Kt*R)q$u>9V#k3 ,%BFzlLJT]M0RT$/c 벙eLUټ^fT6{FE7 ^z::? BA&Hb }&샅*liORԶY%JTy@;n'Vejzmѽ5Ynil*>)Uӂ*JPS)nt K`Kk>ޜHΥܴ$$v8WV\VXL$~|-L8 Z]T'QZf\^h4az ǟYKTwP)HFdg7M/;xU%))I0}Ɏn0qn`.G7;׹W{S i5:kbTa7YH'wvK@XJ`iM⪪RVHLmXGK(kz~+b!Q nw/ m"rJ8Unꖠdߥ8Eu& % )I ?[2$,QGZ˩]3 !o^ĕm)D'c5n WT7!)*8{TR2$AK5hH#mMNjˋq-one 8]7Onz/: &-ԻJ7hBMZqw[J[JSVSAI>ĀVc@AJL /tYPVZ \S! "&rk:$1@ܿ3]y.i< qŀiAF8w}YxW0'O{ZW(:}ks_JJV0Q0[r&ӴMVq$*0rα*<gux鵥U׼R'JrQ4 tx~wIM=={ZrTiq7vE]@%'F!24~VȔ4).ֺ$-Q$A*TT+M-7 hm/w`,ؓw?2Wʯ*x[mD@頏nvqUK)iH|?˞nԦ:#"5INE|֮_5+q o F\-"J\7}߮>6B T  [ SJuݣ&WrĐ@<W;>5uAu@Ncƚ6[N\&hV&Hn=ju܊k+_HsQLju;m((f*PCͭ6JOc;p&RP9'o:]umn O?j#Eip jXn׷3w0[9|2'+ֻ S4omW-UBr)>8)9r:*wۮVZH iD裤ztACzxB˕SbLNۯ -8&8 j0<\mYr_ o*(* ycP  3vVwF;O]5iHUS:BP0m-)؟p_fQB^kjL!= ʹ9U%Ƙ5Ji?Ak ~=mnArˇUd~'Om1r}wqŴԊUC RPDg]vOLm_oZ/fϵ2ҕ,:PHH>B-n=]7} lݭf M|$<bR) LaBms7QRIqj!$5Hs\Iӆ%1\pȯWXJ.:Ĩ,;2bRL t7Ud2lFBc&7Y[HgYM߭]Uj䧽L+#b hFf)V5Ρ(%':f;zkEꤺQJBVp0O1"Nj5Bj"u uj.`u4*Hmk%:6 @뎂XʒV%@!'4|-z^r騿+j-4<Duym; S2VP| e}% xhi +UMAS N EGQl'[zʨ0l: yc+ϴ[i[R;u (΄O.{p -sYqANU3;iY0:͋~iיQsW?w_ 0ˀ[9?ť)^uR1-fO]ҙ<=W<d'9(+%GQlwr񳍓54lJTO-(񄐡=c u%>㕜!',-YxNSeT~)*)G+AT\E!>ZN(eJDƓٓ ꈑ:AZ:. Rhʵoo-I%#Sn@ im,P*ii4ꃕiE45Ke D;6ko#Duߥ(íB9@Zv)BSs$$5Qi3q,w Nw:-HJe'hdyS6FPeQ66+)Av0S$kb;DZ$:X RFm}}Cjc4xvfWSLhIGLN#A  <+GP@&"uZ R¹uXo(TLEt4@$)WP<?BA 2{Q]źpwsnU8Vn(F`ɔ8<(ZhO0@,ŽwhW@Ds/{|;{]4tldgX筙Vq\N.!HJҨʝ@mok~<JT:$5k`<SVԯШhL  jkPT=knBRRQ po+_8m{PAt*]bwןʋ}.:"!g¤GmjkJ!Ɓ`}v,2Ĥu/.p.2M=tjѶ&I"V'#/Е&Dg/q]-eWULC rHH~6߽SOu]Xzʥqa:0]ԑ8 \͙쨊66|br@uc*T$;O]mlq5p 9|@ a&RR╦ev.ɷkl$klZJЋ8w~ѿYG/hSS~մ+<3\x^JyeB^!磊1V5U>ӊ9 cQ1ŧڪ?Nh2kcvB؀-Sh Dcia 1Ȑu<?Ma˕WKT:37|œxVU&v+cZԓsN)Hm$+2Du76+r7=I6 MuaHduBT H徖q++ xBD.sֽx!S#~N0yA5c[EH-ru<`!A@ 7~\qJK% n*Bę0<^q'ݩJiR (9 ko.^mҴ@!"NC>A4r*ҵPL(&L:zD*O|^D$絰8ya5uXgZVV9=Nm-F.euUՑAD|tb׹E]bcŠUƿ"/%m2 i`3 q ΢$LOkq5{Rpbfu\Ei.ȫJhR|f 0O=N՗+E;i\I3 lMjbt:+wHV4PtnmN#a<Rԃ (m{z=UO+Q-ieq$miY:NڗeqS>bWBgB,R's}`^uWC0K :㮶TMX)P 㡵d yZ뱆 휎ħCIɗI'jn5crON4m$HF=o41v4Μt-T(VVݕL9sumTˤMD<IBLM  St/Q$Fv˩55IRe֕!&3iثU\Wš[˜%82zΛ s<0uw7 gxj #]:y\fݻ8*()ꩳ$-BT|S+:c5\)0~24 LkHkCt Lr'=a[5^JJIZ'q5*4&SfgHMޚMV<⩖Q!.eRv9GNѼ& #U ]W6)B]L +_np@#]R<63'gJ[b@eԜ*3PJu+r\ץ6E@϶ѶӲ: O((ue:W75hoDԂYs('Üe`s zbnϼrk TQ߷=BSVJH|MHa'<[- ԩ2xzj/ePIWO%Djv$xsqw]vޔWABZJ^BUjXBXKEzV.t,)),G d=¸۫÷㘉s<% JlHkLe=UN=":⩎|Q|o7qẻ5N{ J"OFf5R[ BG9F-v6\;WÉuL4)9J%H)1=AC^_~uĄ%Ĉtn+t_~+Υ խ$m$܇ R;wyξ5s"6_JۨhXqTF<-!ko+;7t)}%*0#a 0kM{jW7qNA2$NY8c7!Ĕ)TYeF뵳qq0ʡdBHtzX#Tjq2oE4E6Є<;I'̝Onfc)>nmtÉݤ/p VP]MK)u S{)Sa'}ߍ wRI 2 kkpca4OE͘bt"m`]w5& )Me58P@iN>$aOaQVᥚ<Nn &n)B[% y^b+((P@4뭢)f%)Ve*lG!-%wT'2DA)H]nm+%*@)Lflmy>#]4ƙT2I@* "UTj'BRu~,q):\ac\左tt`NY7|e HFrD@}~6sLd3@>޸tUrE%•&$j58Ҟ +0'4 6v$ kP;ed&|Jd2[lIF޳o!T;+XM:ik)ʧBPRu;O4CL'zZ<B/)gA˝ڒS4ɍ,=H2#+2`맥zHGI7 ]PnPs6* Az90ml#v@|=%+;[*PD Z5UǡV[. &H@faV0 {N, bwU%iNx@:it i$&=rHЀ$}GByepL fm M-{\)%Vuʁ>G.zJ/'U(rzԀOe'8]h66Ggb4Q&J/*BJ/I=+CNDx}>s^BT뭡/J [$*o1wI+/Ւ=]}46jjMg@4cU*ˆrh $DϞk`ӃQ 8R)0v?{K^ *J !S }L XLl2 ×Y33e@ /QECst4Ҝ_w\m«}Pܿߎv*2 RPT[v@Up+pd]AH?Ks\ b|Q5tF4tgڃiGS ٵhw9yۘv+v8UzUέ#!2`Dt;XcRNg**)m|R5٩w QPVAINL;ozNg3$M~gX}8RPA2F6:okp^wjo+ʜ:Nnm \nb[)!@ƿ<w߃.%x#1'H:8A,vJYe}PaZ7!ldL|ڬ:!K 9JD: gA:Sδ y:G[4VWj!DtO2~yZPje uX|JP:367U.kFL(/hxWPݴP{)'ü@fa\5`rӖIAK(s$ߍ+u֪rIHPLs4ZʦzcRjgYxU}jRJU1 6!]*R+-fvqд9*ZjFL3m`RdɏBzmk2kvݩZWjm*CxJw{}j%B$jDd-evtxuM`lyD v1ɘ;ȥ;.pT`¹P^J$MۼM. DNeGTAp` ț 7Z['xETL+)F;:摽eiZmk̬:(1xp  剘[)vjiEV O/+j.QuP}++nkUi>R8 J${fXaT]MC N:}̂GO}2@Y ĉO/Gsi IP-opؼMsn/{y% +={]=Be@1;F5rEEmۮ+@ I $nm-j?x?^pog,$*whKEuxuYhDDhAdi}m%0 pRX= X;_IR@;IQRTbd.IP^̟ګcU.=?NpY@tf#m&gKLᖮfYG! P`ǖZ캯 F0E./qMy Q[4HLy2a9łu-,i9 @lA'Pm vT ݴHVdD{!xmvI/>\㗐HKJ2*tVxל;ۯ s,-Gp^ nK .9Iӭ\8ɋ8)YnjG~tQ 6pB了8w$U3!C`iP踎V>8(/تamYKe6ޟvy<ߒcXh8"VTZUL<锏+vظqsq Ԗ'0Kmrf,jQ{vpc}pmv)4!pU*1>v>GX_qOyv:m֞vR؃}juT\4sE8` Oy﬽Q{=BuB|%ifAg.$7N 3St)DT VC I'N⻣} w{ַ٥Vd`gCe'C"NY>q%ыъGQT뭢XYJ$)(9O,-,7抒?S֨n//$L/vӿ}NdQ- yOb&-)*q! /9IБt:T<po륁wrWӆ*RT9s;FoW}W8KT.m5-Z56|ښ _ס^"p*0*IE}EEm-&I;OH#xMºRA-r4mÎe*3d+)BH]x^G2g% :ۛc8%:.a1D]SxC] ]J\Z͸b }h)BL'Al9Y Q[w ϕCm%嬍~6K4~gd1,yJ!EtH}Yji so-Z=44WH"B:K+}!ED QeR3jAH.hRvY6\+m0bYYW{tIo֨Սh="д! 4Qoe:SO ,ڋyYQOFj$[[u-3{_Z[! qI Hf&i*B1B:}4Rl X@KDePT:(CcB@Y6(o[eBTe@3pB U,]I@PQgc[%?Q-#a?K/ Ϩ_!qgKL=LA" 'alUD@-[&?1OJW =-Te igt[JIJd"v;O#*"bÖMsC6 ~ t^&IZvqŵ*ƀ}ZdۄSqIx!n?bw5[;($BwRP%E^(Q:N>6,ur2)J g,\dub:}pXCžy:X( 7Xq)m*s O^[uu%aJ$ ٜNB3T:@ O3<Ե A)EJ"O۠tR#]ڊNX"_z ]5WJ%hX<lT}ky{\U:޶B9mFmƙ.5a'Cq_AVԩQ L:mb9|=UVШFHdͩmgScYl5 XUL"H!GPvl{ [tcנI ;>o-58{[*HY Nkl#Sؔs ޔNBF#H? Un}-KLhZV%I:$DL͈h0%-:!DRӯM[Nۭ!l8\Zs6 D=뎦P(ȒRֿoi ) r+q%R Q)T ׈ە>ik̕:x_gP "G?o OV%ЄbAp|ﵽwo=%IWhЌOX;m1vz,jsKS~>;R9HTD zo8fw6eWu%?OO?h jJRZM=|ͯƩ( er%#Y. #1uIrk$!)KZ2fTy xUJyiV#qωmuwJA^:-G.5׵r\%N(d#񮟭ĠϊyQx?T\^BaܲA} <SfMIw>][Ғt2tux۝kh4@BҠ9xkZ ۭT/D:R|C}m*ʡG* WVL
JFIFC     C  "  L !1AQ"aq2 #BR$3bCr4DS %&5cs<!1AQ"aq2#B3R$br4 ?N49 Ԑz?{!yV4J&9gPZs)o9D`'>vO.~l-yl6؃&vg*Z;R>UTD|~dZB> sը);Iȍb˭%4I`~ֶMvNxNNa6Cָ!K4K;rkg=VtIB4ʂ]5&<Zŕ.ց9ԓ=3Í/u Bܙ4-, \&T^#SNϣҐ<CRb>lKt2B,"B΅C9؂-ʅI$ӕ5Dh ay 2j2Nץ5Zʛ%2|@7NRu K'Q(ZBBBwsnm_mP :Nn$,AܒOϺ4*93"<WXo9]В%|^zDGwB  &4jm%OPV6vAXUQH)IJs-KH$e^sKvZ%H$@ bu$D|c :PiH 78g!ФƇk@>Gcu4TAK O+CAKpĥ<B,D sk~eD:T{Cϗם{/ n%!˩Iנ AHXBe–͐i_,)Ƴ8S6q+rT-*Y1[~zR$"H}U YOC?K}RPgM<!6NKmLP 'Y*7I2O!d>L\@1oƲ 饦麁s^PA9_+Xb>?'JJJ\ԡ IuuSJLV|[n9\yhԲx\/!ne^CMm+w*RtN6J#0L>KV]{ Au *IHRڢ{K{M* ꖣ07rYP KnF4X)XKn&EЍto-\e%CCoJSa.xAyoΊ^">iisEC hHMLkt^R oiF Ioa $,x`'Cî)GA s?{$2][*[fS*Iͩ)T2J |:ya^ (j(n g]'elғ*x<d:(jL ":u}2t ̥﯐?bsR1ܢӀӤ)9Zu 6Kf ˖m- }9|pD:ہ@ A>ZͻV}BIDh:mjg+kD{I/ u0|ڋ,HdO[g܆XK2mmp8R`9ءS.%Nwrp01VIJ@3ݑw:RN-@coo!>ebDq}K&D}ej cADM㍩PJO0ck+^7MM^.ռD[JN*(CJ'VRըTNv3ķ5=c/6 ,.c/ 6A3v?dWx7C qJap \z~eM<h#Q^ݰZKrx-}xm2*!tI'sc#4vϒڦ[;Um+TUg)v!é9I;sm!=@([mm>)A˷>{-Gw0d\m-ތ̋ymԤ&4H2v%hPs)*W(rz}OpxVFOCLy(}5U.b6 y $APk*S%&R%Z֥u.6@$3VKU ʒA iem6ܪ}k"i]er4g+b5tHFPvոlc(vR+ ZFҵg >!{Jv08*lV{x[; |B/kޯWt-KI<a;IRKY۔!~FjiBe~&?ChTDr}-ޡf"'arSnc!&6RIrRcc{*_B'R E.BT F}ֵTٶ)tL@Fa(}*FuY uOhG g2aT2$#NB3;lIZ*EP?F!ًn&?kS~`R5Iצlu<iSK Ԙ'pʑpӴ"Ώ4پ<Y Oݭ: 4;~{h-R~!HvOxH4>(KJH:ӗQ6ZqNM<t+eu*qM+myH1;YqIM1[$c@::h,XekeYcI<u@o17XH-4Z@Vzo8%ܶ@B[Ȳ@5KuDj& uiZLi%KQ.X8j^ˆII)Ωg}֒i!A[ JIFr-LVh!* "MƤҩ sBLw$OnV>JѦA)뾞vU C--9Ak4YS' <OГeEBd€OtoKUt u =4.;24nːI HeBR@k/wN`Y ͘e">[ovnQ $O;ӭ $(ײgCRc<5YVb::V—;+DZrD+IJtBb5鵾 .:.! !@$z=&5u LOW'ĐHO+BԼ9QRοMن5}ԒRxWZP=_x\ZwOvvgЍ-(g^Ӻۣ:+ {I$_o C!4TtIˑzgNs3;< nR[^K̰H[[xZvUD4 (%)N]t u7t)iC4'Q>bv̚5-$(|\l|HBJIPЍ /;X Eq_{ʂ I\(fԧM x ZyV\R$'Ym'8RTtߡ em,6 ԡ( $[ x0}mC`CI gy(%A$ z.VVkd- Z|[$[xCWx m, G‘FPwHJVIu ܾ)iBx :Z i4V))x[54JҜs&A+ L-{JqTị P.a ^%4d46E-9;̦yڻʰnxAz! ( nN_ ()ʣ+6WRb硓6 :h~̓}X-6TmȎ T@-5BHrkxt66OQdD"{)*k? ,|e_r J/lڥ,aiL2Np:uDLa Q! FUNs6;!6`A:`ݐTIHO_vod%YS1̉oQAIYe@G+pPv<Sr 1E^T]|)]TF~A&TJ1ftExz9zЏgZwCTS6;t2弑XV6sړweYSD Ċ<cP+$+kZ=7yI|셆FG73k/qSa;⪼/z!=KTPp 7AiT I" [T~%ݚjhK.S 'DL"XBRᵐ JծmnE^BKǏ8u[j鄌∩iʀi`ϧ캝l@-!5ЍdiX) ӄHrLs>VQ!j3 tm41ilnbFauw5-MItg HKAH!S}h%H!Y-5t]Ԟ^vr"9zy!n]W4x'RRSS* 4Jh5_aѶV klo%)'çnʟAxHv As;eo:N($NR@;͵:$"is)65+KuwOW^S$$g42*+Ҳ3oޖŸqYr/1mZh1swi&[lZJ8bv}2 $#_i  Roޜ.xوLx@;VeOJ@')vXQ׺^J5QJUaVi .li;OD@4:M+ؠ|e5xեKI0Tg4Sy9Ao.׺5iNe7Ћoc+e7y`Q~.yXDG_A4J3 Ig?ѩ4/,4AtrX5ni! fM케_JO3MH=BOפּjVt$x:ibۉ4eWKX;HNn0a )EU<PSI:;$꽣U@;y͔jkVpRIBI:ɄPRX$)!BNF`x\H}b H~+]~$"$n4u},9J a"9?[,ڋ*z"Jɝcکu+z̍IIהK=}qa!+>?m[4e*Sm"PHM6tiVe@'iKKIVNv%&F|f_mֵBb?fJK 19ƻG_M,%IIG y3Z+T6 RO(x6 *h$z,51qI}>[},L!E$'h>οh jž&orUO/<ӥ:s#b&iiK 2(@='MuX|tAAeBDx+X2uC!E2T[moN2Z SRt/)@9L(fIۂ)zNΐLm)uJu=Ak*΅37<>Fj!-Z\vmmKao%)$γz[>ð BOx`gRs&G=t1xskffS/ r#S Z͸]$y(V IJ@ח7Z\x!fRe yf.qISH ^( ^V{0q(*u4&P*ݞg'^ xǰbSou=adIYNΒ_HpJ(h'h˯WK8穣S%“LHAJRi?G+sG1Mթ I h*tY#x DfIE[O[}Jp $V),YIʐ7 q.wnNRkFXOzṭk#qn Ɂ.- !TBUTQQe RQH&D;|%&H)"sk-2`lN>ՒsHYMՔ(0u;i.m%0PI1b@(JTe)[B>J]HBLISkh 5l6eVΠϟOp e`ul1zTwPGq:Z{>RTmo7US[pɅ^-d;~-h(+6[VDăa!weiP930L^[HH$ JD&zٍ%XZ}Lur>#! h#g` J0P9uw4c _;55TzoN(4*|塽{)I Hēӝqg6uMӶ[j @W^j뒔ȃߔs|2]%}},+2H9LL}w-{wu-ۇ0"$G?QoFcLuQfu/:Vt)0k s)6=Qf" ^ U5i AqC4F.=Nw/ ̅ml#mwmb.OMC!giJhR*2 t&ڬXi<~vk*E+ʢ i U]ߚGZ]Y$L<cMC#*[(~#0ErQVФT@gbR!Fg5Oe&` ī*]qE@j<䏱]iPU@P&)5,w3xQ4l }~) j*aZ"H{^Z5+v"zVi)iM\vK<L󏖖a*<}M<?[j4tZD²@1[&ֹ][Z*RyP 'fpm}LS='E}Jv}*^䨨nXQ.+R)KX'Քt 6ʑ)ԝdH<6?2\:oԼԴ!J % T^}E9ZH'q: ^ a jHNsNv`^.<~ͨ  ػ;@@vRA3[ %>&\h}Jh:/oniP #~{Tu7[ܢGjǘ6 |GM ,OqD K \RPsNsB׈s̭Md6Rbe8hA4gJRuO+a*t{DܵxQB#~+H-*J`4W_~<D^{*J@$Ln80%Dz7ziT9By;h)p$kFH4T0V]UO{}9}Lt?qb{I[P YR`wKyȉ)ԍ`+먤Ҳۨ@ZBHW]tv쀠㑍yhr[!(T:=`=5-yw2Vmz|&!]!|F51YsK#-XRJij3=$!]M7zpH ,5KRrpʑSNsi۵% ͩL5#t$"X !DGM0,?ԏ3hUd\%iP3:ǦoR[FnJP@;yGc^K Eġ.:@@-d&T_l8N-*>Ӹ[S#2U<`w_hVІ!Fsy4e@:6)e#BLh4?;)Lֱr2 Iimʞv=hT;N[q@ NlG-6ʻh7QϮ 5q^Ub T ꒋʊ ((锁;? L;Ë.V>$-~`Ok+t~\r\P%eBOMw x/)4%Dnrf8WnwL0ٕ)jpO$s7r.FYI ruIica3u_UT "hi<!|-e/Ku22ΚΣb猪Cf@SKVQq5H#Q"؅)%)Rbtϙ ~QZޤ:E& (v##,ppERSW_Ǯ,=i[ C2AQ2tTuĪW*UOU mE!BD!"@T0Jjjh6N3g 6 f$'*Sv"j  Qj7xI\q,іEmܘ[ #*`g.eɽ+\C3:A^S'$_YBin7J(d) s0vLヲh  (TeI21jN]E.> jt믲KWMt3Z[Z\S c4l;7#]5*HBDH>Ǘm `&3-uo*i $:2)I-<RuW>#Btt뚌5UTg/k/0ewپn U#rNkVTYmjRVC1on"?JO]NɹC~+eМԯ)<ž7y}x?8y (Rީ}!@ HyʶJ y}͙clx9x;qΪJ[yI*;[YuM&Oq4o9tZ*BAi5R!)^ %WL= Y qH!p d)PɕAITߟaL`lkyu{$}+D^O& m’rj7|b++S*P HdzZvMweD:;|m?quKNRR:ykaνԸhQr&cŸ^JKRR` be9bC׽í%-%(mi֒H*ghfx|ZB"g ^@^3*gyHQ8D<;I! ;vqEX>锨gTUedRU ߵ{_̳N0BITПMjX$$kC"AMϜmQXhBj2Mr(>ZT^* IHlٚpT2m㖕 3 <ZS0mk :x&4vneυ $ 9G.J՗)Jyڽ h+ۥE %]I?[[k+bwݽ:$|[4ӋAJ#m:Y:hJD'~_bսhUShoH$baD"ؓ P⋑뾡AZcgC WS.,xi󶳸5̔G6#&qjl*nr۶fWGbJ*0帕CBا_ida1E`i&$; |m?ݐGTދ@nmJ!nT+Dӭ+Uy ԲI"w1/n-j[H-LXܧ a~މY/ZַV㲵g^}l(@ e,%V;] w1J!$F"}9θhBd@~= %lsW:RPJBr:_/!%Ax^ Qt0 Dv7jR Os6g$H@vmN4iQu ]KCC̍ ?[yS(j"4e骪^ (RTƠf5S>IV<K$#d!#$ގ񫲁i)9utaĬX!$$]TAPH$;u>ORJ)H &co(JGRӰv^?9.躝ZXJ7(3uS-ƕ zͳ [|yչ)ǁNX:AБڋRryquV:l9`ꯂgLZs /ݨ(aGY>TV7sf4L$ DzשS.R}6:6 Y B7nH.n7N[;qT]j*BbR[@ /l)7eB[9JI, ќ9dzCVt%Iϩ ii)i.rA<`fLԡ 'c|>hBRXP uPIÝe3OvT 9,N쪐RV|YA* #oRiʵ)M(s y|)md)h2J3{}s':Ql-e(VR3)&>͠f TH6zŦ:) $ r<絅XЧT2KHR)~#[%YdftiyV$@$~Rm R (]EԺZ.I2N^z2[i$/A|ͣsw) $,(2$>C[Rj4m' ZJUm#@H8ꏅ7UUR՛[XZ*^g(<vw%j^BuZ J4h0bz뢺Jei)SG=Iᬙ'lF\~EV}5(<$MF5xOʓ9I_+K)>n6)yFB "H/fr"z ;iJZdа%c1Ik:t|Ei7KեST! &'_Kw&4C;`59'[A>[SirTT&QtyDW:,4CMvsf*:moi}=T4*y%Y`vSkJ=x&4:2R7 R~ZMݵZP)t6g$A9F`/kR5JC/8Y'z2<3yxL9EOݗz]C'8C {:Ƒ lleqWL"պq-$3:ZibAMBB2 AMQ:@hXJ@.(ND@='_tfj즤q;JLT߃)m*v-9r׳v2ms%IIR-}MBOv&[[Ԃt׭pQJhh/RlTt@nz뗒UF'4. oAA dlvvit>)ЧY%P&d ,ґ*lT-Y.[H%bIP[1){Ր ].E-v3>YȲYSwKI!ʊJ3ʐ{Nk7ҊZB Xdh5s#ڣ..#²5Sm]}GP\zq 秭mrԅҵ b+PŠt5.SsF].ʫ-3e&C5ܝNkP螻jIP!C3cYX9=x_x4u}[?-\gPmhWvI Eh?@AQmʦ<;]5qj (x۞q&iK_vTw.+dGKJu|?&8In*! H'L4zZ6MeQhdfeo%AZLbtk1fiRTXA Bg_/6evFdy^T25X2**p %sM}uuZ])L[p)(.۪@o-)"TsLԭ@BU}rD+!M2[Aw8Sn$(l-77CI]3enGlw %Ts @B $fQ$k61,;Մ%Dk:ם-j͹ꀧ'ƻo\:̤k'զ ǔuaTDA}#kJ^t"RR)>KSQ :1G6Q̴7jBR"f9筿6Pj++xdsPN5'pL 1VmXФI_@~P#EJLthJe)V[ZqP-#eC3D&&\71L'20Uvgt,<U$+]*i|"v1QݺˡRkK#\_5);ҕ+N]7$#7fqRilkYA#[T.8cn:\toegʹ4|oEZxrY)hdl8%#ݼ8&k"dhn#ٽ$s2PAHZA'XZ`WTBI)%) '[v*\&>ĝ|%QLR/,g<1IsS@rMUս]MNݭ`ƞ@g)hjRm$&t`|ine_5Um4*tF甃oUv(RA*$AGy_V4mҷ@ԄA6Ij-epNs[d9yw =})J2tYւx/$a&sIs=,ik v](ZҨ3DiWb8ĺ<;F*O% gUOFÙSK;Q?5s'RL|[c'u\E;SZfLm'h(.)%:0<S4kR 'M anu !on ԶQ`@"d=<B_BAN&O(;[7U&8r f 1:i3CeIXX)ӟ=F6IUҀ| T猱kK; ISˤZ >E*@YT'AA;NIA*o6> oM*Saa AٻʚGY!*mAkH|mqbK( :clrRfP]>ad_pe](VG* :O/}4R$hO +T+]nv,iq҄ӡҔr=u>`r4% {DTmiBBHQvt:k! 24~kJT% ZP7Ui,stfwDSRcOz@e*Fְ@xeR6X` v2W ЧcVx+RS?a +.B Z)@lAzL}şк֐\H>Mu;kftHwyRr;vJgQYe=I'ŬuuE <COP*I OL≍ eӠdNYN]vtK}.h6Gvb,%uq*0,PGIcqMAPLDy5WB%PdǐA6uKL RbxrO; !je{wQ!uJVVtLD󝷎ZZ@Q$TЧ٧@{ NvmH4ldh<Ö5hͿRST܂9 6./R˩5(R+ ;Qy,]uZ#AȰqɘQt.HRĤk'xJXXE-+BHC3L{[=Y !+I[)yeīœ'?d zYi^jųVR(B?ϮwR]aU- \ڹPaEą!nj:CîK =uQB]h$|^Ѝy|pD,RA)6Ӊ>%* 1vu]L2kT &DBy:Ꞽ񣦦-RΒ{|_}z1d3HuV^x488BIT0 *RR-ʩhU1K<3jEKRQ*u̦bt,oyk ˦|QZ̺PW;)F` m:ZO]udq `e߮ڊOt]B_wGLa|:YF5/#u*uULIHlƃMWӟFd,R\;ql06X$N <:T&z +3lyMg* -'3aTMŃeڪvnEL},]t\-r/%kąu6[J]_x@SS0ifh≔<7X5[RY%cNCACRBr c"~lL_Եw,?'Tu9[F)j w03tW'6rBtm.ɄO8uF1-ȔV ?$|mڠeJvd9F>_;].P@`ϕeCY{K]>Xژ.#_9vVʐ,AIH<槛KNRA@\{Wpg(.TR"#I: Llt֢S ;X%wl/h<|w:F7HKkh:(’I *Tud!S0wZ>݉QTt`me^}9HgVw)uFC F0Uwbu4OKm&| G8Sl%*t֟|%'(0<[ Pip7̊|@y(ک.hAI1'[D0UT?kQ]ET:də_Mk{vC9:2p5v*MY2;bm8k]BڛtɅ)#Bcˮ jnn(=) W2<_KlDzR*s$ ؑ妶cXl-B>=иZm+.S2*"<voV &еFfFaÝ:ҩFan|rv@Stjq'1Lzk,.( ^D(. Q2|E!$YF>rF3) F"y˟HTs/vYx`khkcwWݲ8wKU#PZt?jflQ꼐e ς֞p:2Es(#$ ڥ>!iKr` <ϗ+Oc O|W_rp/[崶$̭4"b,\jFG'!="z' }y+띉rFsݎu82S זn3 })iiX:r@vRSF</KjKL([>]tT;6I*rJ[eAJA;sQpЦc|1h} jF÷:aDZ(CG*ԅ< $3yͤ2lDR6/\E\àaa+a% DJ 1`u#F-Q[Szf )]ۘx`.^tLfI628Lѵϓ*n6;gbgn7NIuy~.MOs]6XBP!0h> uw2[)@Ƅ5sbDsjRh[QSbA~#!k=6S81Ve{)(6 A9DR~mM4[K *BmxZṳfvuʔ%FH#1L]ufεݩLZKhRbRv6>wD  tHi𴰢p0fҞsexoFyG8?ZVIH$$BAۯ0BiZ Z2γ?ӊ[J9i+aCZ{lf'闕(3lE<;jAmIB t#m#0]tNh:gM(I1B2#a?)i(^&>.KSuR=R O^Z~H:;s)2Ӟ7MT ]J3,+p69Fik[iq戒KB !Pr3'Uej=9dBlNX%:諾*IZD5/]BD+ChJQ@5V@2&9smsJ@oi^m!$e$ YA ["qR!*Y>j>Vp}(VHS>cfTeeCC<hA|?K|ï5\DKrQ" %en@cyy=1 zv+ё:i:9p2oOoJ&yþa&\,ems/N{g:4&u/3|yVڪ7qFzuSedљ D! 6oqAuP{HYaTV3Ā:ck/N}-@ Tl4O;B )1xƱz3IJVVـ%J9r#s"]SFjD=d|1GDH|\ gi5 Vj:;(K׊Q,|q-3w׍c)^ ZFrL: L6˦r0Z;ZX .O&<siiPnBTv8:q<2]xEe4g9TAQFVvY[ fkVB:I@N\%е!QPPTNVXͶ{^<~g(r 9K#-\ uח-)(ZPIuʂFhlq#ZR<-5}[! 2>61Xo׭ H$@0'1>P[|8=;}ބ"Rt|$;O;.-^Gy=y0>n ҽz_URp%;:Nw {:֗8FZ.̱@PuN_poa:B*Ng:S&ٜy'pTDt|7ZB>@p%ı *æ?ǀ)[4Ӗ=*V[ H2实QgW'+!c*j`d8SKCo&y 9[KaU Gj)iPtuq'C^^h3;SuE-T=ݗ RPz4i8?1u/wCI!)1P平[QCWpydxR,ELjN;UmZˮؔ;fu*뻶M+m Fm $⥪HJ #AqV5zޚUE"V=GO)<<d⊕dVLO]3R3]煴$KF?WB{PHq\֭ԄG Н 婴y!J..UqsR^\h-$6<_ē+9vwmTe;^53aE0 *G#n\ka!0VL}-;EAOyQAҐ<Ã2U:-6K4mM/ǏE[COU 6 8w ø;&tLFhʲKMɁT Dm>5ݕx}q>Mu,6hQ#T@M%v %Djbb-c3DZ]a`dCk Lc1Yӭ sO 5Ry #"TO{ 7I\akqIJ`~F5AR@Nt]&-}UN+j';3!@Jgم|X;+FCξ@T(fNi P<:o6n҆X,KfFÐNQ>/Q][^`_<:VD4h:Omם j,3_lUҁu;7*Yq*) J:yk=Shu[W R_[ *̞GS=tmxUkqi.HSHʗ@rܟ悤)[Ȫu?l\Rd&gH+H$oXcl6ȼ#]GJRJI:A1en*۬~a8BU籲b9^<ŕSO|oyi+|wv u7iZSۯ[=H_tuEMS+EPB@*P^v߉?jAMLx9% % SSl>NĺIR4%[F//d5?˟|G8$aY #)Qcc&UMD#)2–t܍`0~hU3.ԩ@-+J6tr<Nk#a˱24L8JJ%G__a%wJ|Ko^:m:%jP4Ԧvn oI{};N) =}>C 5ׯNzZIIjIď.z54tUڗCiy c@u %8Mo-lе! `lq;']ʳx »ZwKmq Jd hV|=7vvBBJ_{=pNa%Ow"eJs~$}u%Oz bfʚng6q5,^q2#X뢺Jk*S3+GዖHJT͘om)MR qw9hdMBʚ C-hmyT |;i<c2pDNsZV69U^ˤ'0(Q|;8VDdNU$$6ԕJJH%)jg#K]=LDA+09UaLepf%*"@p#iVR2βcKC`v!#qIk676)ͥ!C_Dq{g.=;N.(z?f7 HNa"'٫2W)h $<Y@<aiP;,Y^ۘ2%2<ӭnKmJSϘ>bUu:}O4wJ`,TD)H I#nQ#gCF\k;$*&#o,Wd'8Nqopu%AhFG筗4l紃h'7U[[ié̤Z ^V J$F~Ve;( u]-+Dõ :I;U,xSdEU-M\SNmBI#}&#[Mp?<eNˉuĄ5q_h} @0dI3 Wq_:R"`)"v;/aOI$ʜNQy~P/aYwl?L %$i#ܪRh#mi<}!8B<&w=9g.̂Az0&a,'ssq2ת!+Jj q($g7l}ǺR@4#CS<o]{KE%kPBG-cMIʺЇZd%0"w>(M R?6p)EQd zX,DB'P2>g.Lch a 6@:[dd*$^x΁2=Y֗Au抪5T<K9JD7n꺤\4y+:+y+"NKwB2V%7cKIȩ)(&v 3o:s鵼wKǿ 6ï.BtvTUwafP27S٫T1AKNTA{ǭx8w]q28s@>=51;n| @Ӊ~u [!7H:ůvl<y@WCD˓/χۘB캻N bRJ.(J@WȐ@1m5i. (_aDehAHHl?W.3q*t3XD9'Fɼh*Yxi mR\}[%y$IȶuWౕj])![򋓊oFE;IRQ .3\j'].P(p?\ 8-Z+t9.=#\g8apȣa Zu:( Ôk\4*&?/SOK*{/1U⊵BaW;Izغ({=/E=#^Ηy yF*fCBgQ6 dfanMyOn3CjVbjw=zY̰%īB9=vWe;~z+-Al(*.  oeUJq02mxn('f"YihuN*GNV^wڔƙrTOKGݞw3xjJevqtTկ-hl_;[u^(GB|̓]FS&.&:g}jVP FRUNۅD yJFU7|Dž4[u3LL뎇eIy^彑L]P `hR z IL*\Amt,fΩD9M!^ۋ+p }qg騒jQt, )Hoƈ$"U[UsXugu TĪ3j>ˈ)޻xJ:Coē3]{IRqdSlCDjLrP$’Hy>?{ {Qhҏ)'Xf }RrBR$urMsFS!=jnޜUy >B4O ׽0ÐdAu״O4R&cA6;vi[ely,89Gxrƻ|X&R*U$Hi|LK ×{iئc2%P@;ۨw\}hNMiK8n~ǞMy[ࢡl=Oؾ/-Cu7P-Ӹ@b EK߼$Ci>tgN)\x2!;RL5M݊o 6 T]vOSzASiRȈlSk/ 7۩4WU2s*q|ԢuRSlV!_-|qd++Oy}6H7/yDƴJ檤)K0/f|?8oUr7Nq7m]JO eb㯄A'I jWZShu$eﱌ4S[L,oA[{ܼ`cF:ʃ8N59YՏ5JQL&!Gi:rڗÏc=v2/JKmKAB2KdV;˼FJG- rF, GqCS5kDjM lkUJ۪m*dI?ռT ]n:A"5 VF 2RT3 k# ~K}ʴR=yNKRdKL2;!& u6qѲEMB%rAdc'-IP*熐( gfծH+RPQ"yZv7ĪKB4)*3Yt eqmuJV'qj;_g'F*(U+J(*A1}9}NnK`FTG{ą;?M/9ޙ_q7u }`%MsJFbONţp}`5if<t!(LT;6dzq)G@9QZS*$k{uPFY3"U'A`hJcR4sǙiPX %BrEQX[i)ˬxSeJ@>!aI[7rJ:Ѕ H[<R  (3?X_RÈ$)hJƟZ3-GRL0c\IZ7:P!Hb~,]>$/9 ƀG+N{@#Rv,TJ4P))+1 <=i,(7XnJZPR.v̝枞%Bt5[ԏ4u Xmw-?fgHV,dSB^Nc iaWWSܕKJ@ !@J'HۥJ\X0<c2R" ;Ni Vrt 8ϕm҇;%?K{3а$d,OO><RNVr3$ijrdRDbFXvԇۅMyi)LpIQ0[%բNJڹRV<ACB'SQm`a!1,gV7} <evؚ̭I`BRg^ tw~n:EU}֗U>^.K' BG;=`kR0o:䩪%COsFRbe:N׈%(n ԝ~kjx⇉`u宇Y[RPqF)P M&'i6i6@O\ N 8EzCGSϔ؞QxFW-ôd<Z)j\UEJԅjFBL_i˪ۺJ|NNxw"Jכ_!ȓ"~[)VJ1OgW`MZkJO|ΐFZ|u 6*9R1^/QJ>@پuwP{ZNgo^ qApuQVTiLuĜJ(IeQWx@-Y|;/~ WỊidMEBr'@:y-Ê&ؠi)qp((顷,fp>4iZ\"54*};IO:XTc7>%G؅Lz~µ8qujśuߕQR\Vwl)k!\)I~ˆ#5"Lc4y-^7KIr ԬBA lKOC\TU]^Y䞉6,1jOwd:?}=y]%wzT:- ymîMj`5dS-+%?򙐻40 j19Β>ȝ=]x-MR<QmNJzduoi_ryy$:{ FXYh߮~sBᣵï '*O-')sP GV'm}F@)҃_Ȓ5Z /,xÔGteAA''I($bbGܔ)ixVr4yiz~zRnp?!i+EOCF;SXkB\ӷ .R2@V%?µ="u$4t)뱲_T"*S5@Fż:}xuߊZwHI#jmk&NE»)ߘ)n"`6~S;7Uh^sMB_PUK-Eż??AELZ݅bbv_ \0￯.}4{qג#o ]{5HB)KcT#e NCjX"=7:b!ii*6ȍ t<q22 -/?>jE|^\m "w~"dk(FhZ{_t6PB=N ebKNT<Q6"%yO~U :NI}x9]# \Hҹ-'Jjx |oK7ŏR)hxҤd-5ٛ͜R]&]]]&GĘ Q4T9NI:mxšzJ.Hl4'O;f1J.7w :~^cS2Hvs)O*3q<qi"F-xvOkRp%@nm;Hd@we[8UԹ3krt##qj{ØBEJH]R@Jm~G8?IYav>zGO80vbE$yfHWrT$Ask=8R*1̘B)Kysɀ/LFx9JVodv7ڄB@K=y$&96,W4w9'ɹpsѡ!fW٩Qu~veT=rTLzYDnH&JIQ< ٝP %[fmV}Uzѧ 37vNҠSH) Q量਑:~?w&$H/R; eYB B諧!N?֗?oWqWqQYa34‹W76i:JU23 g#.kC!k|eת* wIrAbf50tY:**ZRT#m5;+zW&Lڎktw]%B}i I$dv ӐNK"i)JRPni;N姥&|]u6R[<2u#g~m4<R%#Ayׯm|vdp1ڞ44ղL pڎ颧Hq,$zr]w9}^)}$ Iړ\EJL +ԉ)RBJ@<UK]!4sKniw{R2ߗ|,RdyzYg6R $ɑLnHe'}UC8 7PĔJ@|$0׬Y*Ԑ3D>;beR)וgL'-}e >gs6@JY!(IקoKmJ*!03B@ ytZ.890])  |npY%Ye $Lsۮ֦ʽB &|$Lh [e>ҳeVYTv -=thHiЖHSl%h֊d4BT9bTڒYjFl;_U&PH!˛lAh[. %*HJj`?z ʒNp %Z}bȐFE\̰TN&q75U p6>]:zic Ǫ_q6̾\Bա}B\u}}3e">ۥNً*X[RCb:e2@~߳+B$:!$τ(·&P ɘ΄sYss PBYu>6+rod@1)yY $Y5B)2S*b3aww-K*N{ۯ]n,rZma! 6:l){it@TsNghuM%%4n,j`ptʞfmŐS?~#Ny갧p뷪SLVqҶ33Y.RζKMi$kڋ3n1UCOX qTmC2RdO4q}o$ UR28y̕dɞkƆ-4Aŵf> I#d o--+F K^euRhW  ;Ś?N`:oi..o aF'-O9*_@.[ꛩC`BDH'o[(|_~' \ -sWWZ Xh+c\X=RXmaunKh$okb I&)9<Y- &O!%ˮe;W ]l-7{e/毫x]ԩ *$TN[ڊwQbӡ" G7 Aa롛ӊWɣM Ua{(buZ0w\zSns)ffi(tXgGaĈ]mIa?t(T,- 2)0e'gNb2:gT->E%0}񰃩[ -˰ytQJ1#"Puժ)d g`P:X*nvC*-4k1ڶ0ڪí-SX rڑkbw4".UDdzk~`k5㯇\eT6-׆ T5Fw{nuffjrƬTU~k4pq ܳ}^ι}^wxR'AMUDIF7*:m[>K&܎xx]I=PN4eP$tռ*C:ȟwHוiݹiM^r{$cCm"s{$6©CMJHڠ@=4yZ-$hz>Awѧ][:MU8@ >{ZbB_LR5j* Io`oKẲd&r3;ɪ"۫5Ai Πy@tkZeח *Z~]~<V c"Z)-4ĿzwiniL2}LgM/h8QZh$Nȴ|Pҩ4%Ii!r ԐuX kNJiשPB󶣮R:Y3Ө9P\O'VjG͙7Ӝ JMׂXi,)Z 9 Z|:6Պ})Rdr#\-ayȨZD<zzĔz0[I敏')Vc~b,yնMkKQxd-|}- xc Ҕ BA?Qأ,TaRN*A#]X aZb7ݽ*@N]7ju:D+`?O25KJ/H$2vuSnL C qj[ך鲶+Sנ}oUbkRRM?>ZDϔX7uTjHmk!|6~Zo}ievao/RP*pKtzG+K;vZH*:>^s)k-([ Pu+/Gc!=u㪹jK6aky[U-!AI$r*trrmu*ZIB[L//]AO|7p5RRyM>VĬkOג }[ 2I6pm_ꛦJO*[@I<j}-LHL?LNVjM+IiLs$)-ܼv*SOqa[*g߆,]K E[֠ωz lw.\wz*} \)}2ǾG~+*[seg%kjUJ*$VSm'@R`Q.2dH""$lخEswue)̬*7I%x]In:q]€>_[-t!,tuʕ6 ( zm"%3t^5KʉcM ymo7QNIY'_8i&JǬFNy; S=ih\6k2.|7k8KTu-(XJ2469}Nv+<)LW~s(@:aAHR&":[W~aRGŝCA$ lF Ih$)dmB @vRㅶ!CLQ9t閊|'eƖI-)QD@:/+8u]J ( 6FrfvP cQ?. DQM ҐuF9T• AͶh\RI ;d–P(Sjɶs,PUJ\̗ Wh`nv9H*aEzm~tCjTf>xB;аN}4>- fO9yKS4b$ZZ]DLﯤok^fiP`ufI 4C.b ]&Nؗ G-ϭ( @ oV)mj#Jt`ÒYa}FrBd$flWZ  W+])($)}WTPRRI :Nc KI nc:V#yN`0B2$F`k tˊ*QgYjΐP0JV$hu>źt¤JRv<B)ĥ)_u>_wjJ{ږP|'Gv]NeN<,F6+NU]&`%>r -)Fm dYRd蔤CʥK<@hFY?O?3j 6D}U`62o1[|^jꜥCIK)4WD)N55Çm87 (H7۫EUZ"*gnG*nhtK(dqX??]~xUk*J!.XV!$m6/Ƣ켨aJqn•RQ$hk(;[T]Bj)]l|ɀ &M]UBjY4XJ[? I3)Ic]e%KX6N]l4t3ݚ{z|ga-.͸-p\9s})qjvjkɉm r0F' {t\+.4S: p鐦SN Ts(zͷ4Zfrv?wVuuDUq< Jdԓ̛1)p7tePҡqŁt-ْ;\^FZD$wSf3it# kx \N`M%965W[Z؁Ւ<XR\MZ֯갡4sRx<ZU!ҦBt@@jvrTJQ> wGR Y"ŏU7wЩŠ2QNFD/ȃ#ץs N %FFؼ/5Z^^9hl-%r")}u eo5"u+,+G{g{quR:e6 r/wa #\A# a^6Ah i7e0iNn:,)uBMU;PP z4=I|ݜ2~uJj= Iz!<nkbmHKwyC`o"nNQ1ok+7vme ;;hz-SRSmݺ/꿏cU$'Z{J(R;YI 鮇^zآPu, D ))" μ,#3"X3nf<h:%U4Nf?W{Y9i/֑Wwf#bw];ZBuq$($f<-׽MwmK*li!0#XlKwRZO¥3-aӭ4>*7; 2:-_<ai 쥩r(aZu!YI$%Dxa[?7-\*ՐLu؈ʹ*'Dn.aIT^[/П^KDv?FT7>q)ai|6K)W^MgS3ݖ H#I  750ͥu eJ@'°uJP-ԗ/UoTPT ³eAuoT ap˭=UܼoX83+hGȎGpwIMZ>:`n(`ľYM-HLgBu >v-/h˕oJ( 9t$҃mϯ@a~wm/e\Z2RaMFRyA0sn +;>] mQ7NHY]'t}`>q8wf9A32duLG>>rJgln.>;x bwsi#a, :m5/ێ(}h>]P+jvUBA}Rgjdw4:Jl 1=(JF):FfM]Ve-)%9=:u2H$f9BDƿ5h/56$IoiVi@LZjx@S:LZ֎j<=9Q $ @lS"_T%u4𷋮:1 nl ?^]uEmkRӰ}H̛|}2ǫ8;) I?nyJHn+O ŗN<ՆQp:Uګ>'dCkZGk4i_NrZ4 }mv@ur^WѸJ( $ D؁3)0 !Ǻ֎<<|xn.e;tXsj<ԧn(>Q\ͮrJ%mi5\,ÿalOKTo+q7{n$Vew$U{ OUޏ)몝yk\qDJ'ʹ 5rT23fğ|>$3ݖS SizQ!ž[zն V 3IG9k,sS%fI En4, UiU_xڀi @mby[\KES;<FMP̰Ju$Șӝ2+M-G;@; R^s WT&8 Bv FXL96Gd~pۇtT5Je:?,~HL(f+"KbJ+K᫼4_[IέseNZ 0IOmpD=-̥}M'xpJZBAAFOfTՐD$trd 0ßdT$IdArip$)H ):JY[qhl :m8YH孭ҥ}RP B?o*@$1zz@D$Hz)i^e$ēzrD4x<V$e()Aw>Q-8 36bt*4I $ u*DDyR[) 2DA!gY[g)+Jʂ$ yZaY@HnT|Q&D>Zm,<R tNv>K+K`J&9Zy?<֋wlg ! h~'Bh3`B #fmfJsr39겕VG fw]A ¢F][H^QPgR$o>njV [|c~ బPRf'&eUWxB:u<1M+{O tRe gXƨNx2R Fh2<ԧm! jEK%HTv Mg )% g]gf7DAH)rTB!%@qnQYyB-"Gq&w+sP '1H;C[KP[+B)VgoNQJIPY_?Ը\K]pTf,CAþ8 WԊ|rgg+vCRka(qԬeC>i#i3q]Z\}* $̃"-¼LP]5}EJ%t(LiFC Õ0|L̵0?o}uf۱ڐ\Sr@Y,#_1hظTTm4E<¿.k: $g'Ϧ^Yp*(!SKE0-jV,_qO^N;NV=̈PJv m9'ˏ_=ܗ8u/,NOp\iД*TT1 +nbzל mFFi1XQ4 /^(P̂DzZ5xe֚q25 e*;1Lֳ@ѿ\lgHwN y[yZIE;DFuGN]-cS*MuAJu$i;2÷s~dT b7"Dr~ vXhJiDןԜelZS.⸪"$2$=v{%uP)++'YvS"ImBޓsޯ"}ܨsuMր F}utNi RˏgI$6etrbp4U@Y2d2i /[email protected][m`1)KT4-0{9ao!eA-$m ZsU-jJlǘrŋԭT ;J4u*W>1x!c;\5i r"'d0 T^lS<SHZLlQ8<h# `,*QPµԢR"FmghÝVothv< ʆ9Ǽw: 9t\xt/U)JAØN8t8v^(pRp'@@,15c17sOv9( hDf+X++֞ +Ij(hLs$sR >iۢgsy[a@H+4"u{k;B7E=+,'"7VVat\jUOvq5UİȆ6_xfxxBZ\L:rV? 6mDX(i)nmy$bGqkF*Q#+ jL+Ь}2J)d :d_J 9.*E*Uucu`jVuH%*RŔƕ^!Hh!W!uEt(Dj U&k<{>[T&AwJ@Tm3 ۘׯ`>H$ 4ko!ighƥjq) qN+2\J *>)Φu<| 8Jp MMTj &]tHehn!Rp \^ܦeJSIe)tQ_5\r2Rn*0LJwh<5 Sπ'se_=xe[{G(3*ک xX|?ZjJmҐMw#(m3ioh.pz[JaRMd蜻]˻MCn;NĄ˚ICE!;t^N潞gS4܏?Fw*b \j}%H*X WTv)@ ofKIjomS;FWkIK!Rnw-)TF6nr24}}:?oZ a=m鐥 Dt+ŔBQxm>SH9SfLrh 62i}]-jMSBN w'9Zks8k&:Lӕ^zUlgCu2R,ԫ.Nϔ7QH c*PbC6uPAT 6?{c__87 }u5*qaYN:A=+G|8Ann4~/&.<߯$ {ډ)a<e?^.խw#N\bj (P^|5|&4v iŌq Jw[@p#U%Сo;AҜһ_ð}IT x31EEq.=P t(ܒO.Gofnĉi5 J+* i I-#ÄQ󥥡e;[xU;iGۗߊOW{P17U=T|?0% (g)ELJ\UV[ď+vQtgɚ;J/KHXqR6$.F*SQ.J%|lc-y[qXI Z$a/bPeJAX(YOImЩ)am<|7<d  #UHCm!|ʚI3 cםrm2-j) ˘;QyTT*IKRiYzm0S3H;v2DeQ e X$4 osXW!, $3#c륺򛢢ni!-*29@jo j->C+IP>}mxW0v'Gsj{NizpNo ]wQH(r#[H:K2RY,<#s'BmmsuQْ%Y:-O{5-QF B'Pgc3 hSw$ghTH Jy):r;+ M"ck"$/7{g Mu~-RP<,8Ra:}򷼅Id"5}JAZU'i彠[/Xus-V@Rr䍈[1i$ '(#~~VU;)T rL&LNDsur)]J \1:+34tI^Kp$39ZI( TP ~|GVK<)'|-mp:CɂDk%{+ij -Mu"NɃ'MO&h 1%CkR]He>Z曒V^^iBhVR`IOh<)AF]NFqKi iEhҡrՄVd ,VocwTTrlI^plYp҆S+eyf wZs){A4l[p8ӭTR=!@ͺvM$,f6D P!y3l&AŤ@$_MiHotHT[9)] R 0>?/ ն*]mT/'Uʔ`zk¥/93OY@`sNAzZB[Y) Ʀtsj_-;<eubtPZtC%iruu/ -|/)n.fl/_G)SAJ $'?[vQ|vLĆוw|L8a[ G;]O.}#O%p7p_u QLR^gܡB?&ٛp…ӎB|RSNn*I6X9}O'ۺKWs\Ȭ59YxnY^aiUHV*BGl5 u4 t{raFOӟ2oĽRexb?\52U#yYô 4 "$Ahk/-4I}ԳuR +GUF6oM}Zg!Kk/?[S4L7&mÞ$)]j-"DP7*6l;w5rPX2dh)vOHEEZWj TdjvD{>.z-*%I*BCEPd.zY4H;8<UC卦#]5WlRxP IIibBЏgWv+ڹko:z246 ݡ~XT=n* ;fbZ. QXfA &dkkIQr7%0yER^uܫH V'3 N4ˠVYj<{IodU{- >.Jʰ 3mT*^5׽ŧM^6K s m{rOtRŨ %GT&c^SiHufVxٗ.9e/<DċaFuBs9QJ:5l5+OT(.).Y1xIh.ONmPy>"Dk;\xػ䚚n^P!$ˤ;]M:Ys!yU?roTMR* k۳pG_r5&~.*McnLBQp;gݯ߸ʆ[S]٨{1P Q5Ԟ8_B7k;F&3m"a#*78 qzZF33%%K.:ksP1'Ar`^#u$ ꕢfigmIU|8ک"#[U.A IJR$IzXg/^~z:P+p',˕QZ,xn%קx`laUxJ2e2 :m=\C^a_z]ۚg3M6\}Ԁk qcۋ j Þ=%AVT%*mspNazZMnSI\Q C4Tt_k#KS}s:0xr7v]q;^OE툀̦ʽv!zGXLbw<<*SUz-qW\u cKȰ̪d'j#\uqCYǛj^U*ZΜ\]y͒sVWeF< y#zRۧAF$?%\T|BԀHֵK3,楷rHF|Zyeh9u W @\mU^5N8% OӗIq( 2v-b<BSmjpI)HY2ZOxFO}m oT%44Ӕ A7XۅG+KT -u:#o_VSIXA*5X2WtTՀ@)2\PTmt>h4 F/u A̧#DŽ%Brj QW޼-?u V+AS!Fa5lme5c\^M-윔 <JHspXcl6߁;l ϟU¢Iq^em G=cA&&?^S5iÔTK[rLA gEA[M@ S(SEu*m#+h~ _WKQHK4@|:_%v-H{[Aq T{=@ҔKh/,suB-k7}#CΨ! >&h"d xmiʛ.'"NT'rI'|'\}~ a1iԱEqR:ARP:xfM-pzbb#{#[#a<G?r5vtWWIn^#B P}螖ŗ%UK>(HzO(u7e]k4AP2 N~r4-ҭm8ڈm_)<#'J+z]dz!eD)c:o빴mq%OgBRO/WXr X)^e2N^F$<shIoijY eԪgB ؖ!tzK+~N"uo?e.\HCt{ER6\0o ]ym™Q if=JiR%Q<UU@|vRk{Y;)-4#HGvfp'H1Gj?14YK$Ò$[],S&rhR#[G;JgQ3rH"I%Kvd3Jc(N{[ڟN$`VH8t*[57H%]RuS},i%`D)Gbti')HZ&77l0O1q^*J)no m]IgaۤB9=,dP$[&76 K7A` >[B RF|**אRa+u4LJuHuD6&aBb?^g!Z^SMV4:٭s̸ɧb m? ~4ekQiVd2vZJ V`"ztӞu!k s&|:`)*jcM6Ԅd-qꈤCM'ذ)OJu?{X?vCc:u)+ @$#K]GW{.M@LF-E2-Cݓc[E^:hNْFtzrRFTIZu'=T s l V Km %zaQ;a [ V@:%ců2Rx$ewߟlWVN!@H=|ե$0rFSTxAS|`v>6JXHz2-$ LLJE Ʀ4#Hr[1 ($XsA&8'n{NȄey +Uz= %m 29uMZZe Aj$ E:![JBs8;|1NHPA$'`@X΄ N`뛑- .JI͗s$̏8?$>*OauD? گ0q+TZ PgFhgS_•Ҋ ћơIڶ1-W\E="UPARTۨH)Y˂8Ot^m)W'B)Зx 5.vi_<a7u|bwRtt(M=ZW! <r=I6y%VVSҺaA#?93ozn܊FW,@XgZXTnjU:RmAIlNh-c"{|Vpo);v]7 y)H@dאRزx:ҖZYf'nvz1KMwξA [.XCTqڅ(d2!b*=m$ɯ18r+ _:Iii\&:jg6;Ecۺ+-PCA : 7Wk^" 5jV a R; >)VۉpAӨz_|3TwSSS4 *g6mORd}@d1g66h2ϟ]N+vM6ZR vL(slok)S2X9%SH]_tap8DM~asz0+:q%$)*ƇIؓm2{W YM%@;JSjML^hvPZ^}iʕ\2fqƊ 74*pe ;NGxcm{ eyCհĤmOKkŊĎ;K("ѹDU𪒘4k}ʪ!IYKn(LԒO1az+޾1ԣGLcR6#KiNԘcCx+WW2~)!j ,@!GC/ f15H&:I)&>z,{_;_m苫8_a54n8ҷ N-p}u{Ж[aUjof|sڷ ".j7 S4bD$[n٘ s"U>KK(8>5g]*]N1g" @2?FvyNxXV."֤ftŶ?g< xૹRVUֽ 'E mR#Np,1Kp'E݄WUtT%/mjqGq#T.] @rٲ?OGݐ^#C]"-R:6m֯+ک-(%))D1[հðuN:iQyXΩEJ*LlmYUWU^|>L'ath}X ]=MQH ILh^^vSy.T%9 $-翤·"oZ.J `|cOh[奄/E^*za;tm숥3s{+}u՜Ψjfmf 1KwbCTB"R$ئJߤZVuwlkDN^>:in䴵8ZJHlwbl<ȏi[:/B5HY^q() )Q鵩ygj\{Dž4W-]sw-rt$u[x|8?/zkYL쎝 TD?ի[LPդLF/ ғu[RɄ(k:뿔km/sy믢1GWV?{3q8ERi«nwHV4z:κE w#{:^G(|&09kʦZJISj'D-ׁ׼+iu +'2Tܠzx\jR7+-lmq3+c1W⪫ƚ }諒D6ujF7L+Xodxk\@`o-:ʃ]mPTR^{m8m@ ][[ 2'Y I{ x!8 g]4|zi(Úe B<"hinh-'*|*s]"N\77Uĵi4f%=ԂAw""zr<QT%MroO5*Htyʗ mƻ|9d#[aĶ%2`k?75$=1v)j%JUCGv <č6<ŷXY9iSLgn<$v^'f[BJuZO! hE[M@5Yl۔];ؒoJ2$񮶴{0VH"fGsQ5CHR);t4FU%<չ\6_=Z !JLew .d=-/P>ޜ|o!.2@MQm^wn-ĝcoq-12Q<O@Nd@[B5ѯ=cAS K^Q1EYkIB K1ZְzЛT΀e?̪Kp #i $$ LOҌ^@ޱcAc3*>%؈kQVTtO9e}6V`5̟Pa*yU%Q∀}M+}aɅm,TdyZ󚞥}E'ImӯjeZ.%SyjO~Z1Wfp3UyVāQ=,iN)\ zX/J%-9 1)|-kRJZcdy>DU<XB#.c"CYN]wiqJ^YzƧၦ#pC]oM.vdXFf'}=lAU6T} izkh+ӊ!ye & {]3R,Az۫֘G 8iˡ {M3mwm 4GЉݔdJc DmN~xt A%$$(FcY3lK3*d8@&`&5Kpշw^U#julq $$h$ *K#Q Veք<m ȏ|<ݝDvϯDEvq&%(nf..dm":4$3j.]֤%@)9mg4.SG,fNW}Pv gLo^AϺA :GtwêEӉ"JIR QIHo;G?r0E'PFGğIO{%@_UrwY"P5<{Q5 aҭ%n{ݕ52f O]sA0\<Ϙ&}n6ʶnb=쫚];j˙KD%D͉Ha]@WyZP! "0fn瘭}U8"OE%18W$|.uן+h%<=umM-$=ڞKm~+)-AHBsH%g['qSӻ0oV,!ʄ4G9]+N8.۶ʛ -N@O i1OzJT]d-a%; Βr.4p^Sab[Ea!LQ*ž⼩hnS RH#hӘ?jwQiM!6 CwԽTYx!ʖ3-, 7R`yOM8D8Ԡv)$ b h` tϮe*t[a&TGāe(R<(xu#Xz*],=VˣE1 c mp}'5τB[DztS5kNZP%:Ƥk.! XvrT܂'BڨmK$hBHu H_ˮ.1_U4,w 1]bwmxq6mES'K*JYSh'qi@VPyGnyCt4c|pxA}m?-JSJ}uN9lc.D8Fr⦢B[i3=`G;8ê1qRjn|kvRU#VЂJgYmٳs jYm5yҙ!H+AIcěA1w٣{q–E%UC&?;Np*X^BRB 5jIH3ړ5XK$7F[H*ԓY#[L#wpu5&iS޸fPBgafx$"r3knTKn#ɮ+8Xs8WiM;X"MֳeuSW_ݥJBBSc0[p{a$–R*d8I]]+iM}XQA~Ҹaбfzd%q[23j`'w(A#YUzWÕ-4LgL%CvչXEFۤdL5kpuFn'-uS^wxJ-u#a\ wC.Rfז;Am0؅ T͈ XJVR@<x\G]SzHP6\- WQ9D˅=Q$(k2MK0Y!CAҴ6TsL1k]d'RC <@뭥iۄ0hk $fgZfNH: HX/Y~ nՅHUS!HuH@J`Οw׏}CiNg;Jbtmڼ?7F%!ZQ{i3}b~i]/Kݿ8%TV_ U3J H (Xe\_xcTwzRט\ͬw%Z~v+뾐 gp~w~S_7KU&i *YNvgAch97t\i=8p4.)}()!GQ@מZuO @%$h ˵]K+7QID )ʉ2 彅Kaԕ$_29tQIN[RY3)-C[9e[8:iPzlAR=@v QER*ү p[-/9EPFᮇӧS&K€wv%^ |[2$?*i-wdd! 4sq.#zk7 k8/ں}Z$]$P~Т [S{|&fӪi}[\mԾ1HfAEMrR P"w֖nҖ@R$+rH_'{9<4Q gh)H)3Y[Jo㔳t)jGGDT},@J.,2>sl$F1ɴmUڕܝ>-itOЩg.NiqE֔̑yZATucy.[H9J`ngK}/HE;@Z[ :i񵿦nabڂmd`lfC_Y(h~_/ӉI72l7#{QʮB_h7EtdUǁI nymfu +6ot)Z}n'.o ~J(ZY V6SIJHTft3G͝enS* +<IC!t[}JAPptMfyt[P M?p㊉$Lΰvtp+A)2|ͫXl $*ԙXbal6̐gqחgtB]UNP Fu+%)+pjY T4:ocƊ @H)~:HiMWFQfk?~V}l>FRÉ Dνq˒ |FNJX w-9Vl'Vk.q(!%AD(II'H۝/F'}BʏvNCOyM#Ʌ6J_C;n.^žAVB4MLl5m'yUh&#n?HtJJ2HI򷒧Q :D|l#k FuG "BB&4`eNDӟ˥XylT , :5k0eaHU'}sX\`YF݂'N{pw8up:NDS ׫7MߣQ%DBd6w <K/%`R{9dDw<}QP8c>Jv`X54Sf='m$(i vkrqڑ|J\)S^!HfID4祜.-S_+>&)r$w c m_v'9qwScٚXCtt;Qm^a5X7m3˞;>ހkiWgrŀ7[[l=8;p^f]UUt먻ݫHOz%i>o:va|7ɿrzBBfZ[71o"<*)^UJKM&6d.tq{’}*:TG| *PDQ:ASF5 n<2:bgly5:R=¾_W<-X 0y:ȂtR46{p{|KRĮS.ƃhAL彚\=8ċ@(^mrBיꁗu,uQ q80\׏F%)Udd $̩V#$ht.?i8\|=u^WaUV%(I 9 /!kZ-uU\SBu"F aU[u5ɩClw)%'Q$ykiw7CazPR4":LD@SH-njNT4<)9B% iRP\<St еϟ*Q<۴sQ^*[c'veO.wq;7j*0[&A"tgihvW+H/Ti*[email protected]\`zאqqq'7pR A]\@LKlUO X÷M*vjcQU>4 §B=K| {oͰơmCi@:rk++i)3u!cR;/;p <=WK o] l(ӥ1Ӡ([wvhp0/taNָe{6'YoYtblM!Wu3Bu 9fИм<x^hZ-ůrf~[}-<Nss~G߫6n='[o!ǙAX— NBf9bn S?p)r4hN#3y u :ijA?\~e0zUWTrH=$GY 3=(}MlFd7SgQX(`JfqVd1lg{o7⇪.ܘ} Wr[sSP#B 4׎ԼCq3(~]*Kt*Oי߯\Ņ/cU=J{Sjf9xHP}xۭ USD|>홂[email protected]&O[[<<Gg2%%bH9voKxnuv䶠u0_cL6ioU*$2Q%D<:N؈⦞<|FfF }Wn(8)R='oK.{C%IH%0Obnن<JuPKO*6" }8jtW=6YS ~S0ꈎCl1zwIxAV $FaE0暵($'ŹX@_@ |~1Zf+)*TyLTL1fpHjckej+V&^VʼrA8bnLR ZlGi`GقET8,s+eqx[ [kNPn8 ]+1[S"}*]q$#s3b]#q * Ķ#OIP)Q<bwUV?Tޏ0^`iGKVf9&uv=@'Vs=`[&$Z8y7UN ՗$K`UPӬ5Np6u7&G(X**+tI l~'n_|->7H;zaa~:^#ХGZVtB,2m!Iu:QISF1[xLP8u+eJIZǾf٦HT.V ڂ#oKu9}x"츽CH!^ȏBީZD Ӭyih U=Bp0bNش3l)RuA*ZWrHǣs*ob߲LB3̘ܐtc7i1y!JL V=b* @ZO3mKtkXy8'Wxe-NuAc/[yz%Bo\T5)ŭKiG:XBUSS2tβΧ6I!Gs3:l#\*.)C1M-ۉ T@fiJԙ[+8+@~%:ۈ *B˸YBߤI1a+BUЩ[((f25xd;d*ZZAR_1h˝‚ ]:~pr""#yE3Ir ؘpU Y3)"'ϭհi] B'}*f{t;2 J:H6 jhTs@Ы/Q)xLU6yݏS0AIPQ 'Iqb~v[K,aiqH %(*J՘'AouTKJb $'Q,ƾiԵ儓2@ߜ}-0$M|lNg2DsZ{ʂH\(eP"6&t)źTk5Kfx(wju7<1Ri*mEJ HF&JIHXe˭8PPS Nda)De[ȓ׺Ӕ4.eLDlrU6$dRGuNhl,d fӖ[6vNJ$cilE"Vۚ7!+궑 _D&'kb RiKD{ܾn.AB$XQtAղB"y >DsP@"oVS5tAJRa!Cdh rB|6tn.ARΞMQuCn\u$eL>&I7 /VyH9ɾ4sw\@o &{FnBsM_=zッ6sL . 6?m ؾ컉iol-Yg)ET۰B7CbOY7ML^j ccI8ca" $<vUj%I94eZJ ޗSlS0w_ K>8W ^׍}֎7(. keݼz|gIfʶ l,3H T؅ek!{tq~<UY7nu>;7#_lg'+ N @)HRH؏kI,oOziKoRR>Ȍy]xݏ ?-kyDNgRG A$p& $L|aw R*7-$ ;[;,HirO?byd.Oq8oALP-%^i`)AAJ  j66-V$x/ /uIķ Yu ݍM:rL7:TW^ Uwxnjz 廸U R(,$P JcB;E5h [^U\x [@;I̧X#Zf^;s<x^B.ʺaНgV {JR֫˻)JmL:[cb[glղ6h蝂PNJM~jiAPұh$n'j^CnPy\Z0K4x)ƐEK_&Iʣ -N^4|;Q 著[JIsT)*z:i}xw>d;wU0Z2skAlYrÇיlҠIL%=}RJb:4kشGmڣky ŗn*xh`L(4$ -Pc>b by ԧx"! H:-vtMfIJ3"f<1o]”5emwSi1筩{PreטSqp7o$]8blE{T{ $hkX3;۪ݘ/\wUqSǻ)Z+zJ<y d~xl>Âì"`%` b,~w5VkVWb^*̬d5RD1{m<f`zp!YLP^5{=N`% #C OWuaRS6)M'.=uu"-Is8=F=m cFU)+"Act7.L-\zೕr'ß[ؽc2躩s{0MufK 5/ #1$۔\P3KkqF,߯<V>I:>@L".׉<Z~YT)$Bt"NʸׄCEQ-ԸB* TBju=yYOH{SwrӣPa2`^Wn^)a{ҡuU݇$Q:H L-K8hjM# βA?.K8iۮW3 ~m H4:Ye8nvbjq&BjG>ybڊ6SM1k9ߒhGZےvBvqڃ7m?1*^”A`IԘ>Qg}[Ap%JodjJV$@laqNtݢ>VdyTv'gp \7\WV$fIΜ|F8-Em@8i Wbč?p}U.˥SR֗TB'_)Mf;krI)Z7(QxS ~uZwUjP+.u y˨|6viԥ&deNYq1At[x8-nڶ͇[Zۈi.)J~VVx07 ZDSmd-[YlXi:'c>߿xs7ۭzpt>f4;w}R]WU^Ja:,'I2u=e8Z KSUs^o6Ko%DsNdv6>EW[OQ{]%nR:r5'6a4>׹ߐ j) 1?3h8 bOaǴѺNWQ |['wb7ZOx o`I^}m?L)+)UDoDir=h+tK$+*|DNy YM,Uq66FRG)x]yƗ^+% D6<6g8ΠU^eNfBuG0TT-g^RN2mWu9ީ }Hi=lv@'|O#sWawSc䭡 KnIJJܝ ;cu:H觽KnH)Fm$iͷG=EIKūʒTv:ĦU=ty{>űf7vў*pd'zvAn: *57I!!J+~M:֥'*H !y%!o;TL9ˬNA[~E-eD)R}lـi{qޝmUJӝuE1x ~eD݉of8;73Rʑ#XuÔ^8>u֠oJfe+ȳѝws,ҥRJ4%m}% ?Ŕ}PRyiJZZ {|bU#B B[RKzJ{*.HO}zٯ|V`oͩAa9RNHNqn*gRoξ$%N?;5YJ)HMK Fޛz@&VY[++%@},Ebބ(kSPAQV32PRJKhRʁ%CA3h{iŗ_K!_zΰsOKAeJL4+ ZT?_[E^U t(^" dkbn,FT]~͉p 1F$w->$򏍌PusY}thq*mT3S=m% ҆H &Vsx(qgNUG>An|\WOsYuR.6d[]ZDPH~>/IErbMT'"ʥE >QiW*tC8^!I26ס@ *WBr m4Vn .5t1iX^ZR(^ ϻ7v t, Uܾ/İ&$4&-AeUAq*FM*W.Q)&Uo6ZeJdvسM@> "  Txռ'BwZMl]VHʜ`CU#$6B%3M:O}mUӼ1wU#T*@DJISIFu`l4k/r즺1E ;$/+* 8Ns kh}\U& }<UO_ oݠkT=OT^nwIxQVw [V pm% FY-d@WHctڸhib^Q!] ulWE _{Tq .&Puz< oMEUFZkpa\F@8_73~Z{S~3XÁXQ v6ҥVބ  ).3L۞-6#{պM+o'˯ՠ璒i`1c  tCAr"F׬'y6"y$+|y駥XE2;|<+=8i/st.Ԟv<z Cĸ&bM/Ӆ<JQ* k1`S'L'DiPoEPA+*9`&rFzO߸#a_8˵ 5|Q%nԥkG=t"``oj;AF3Ri TA+$k r i&|{m«ۣ `꛽+?Eԅ4KZtfY:WHE\RnK&@> A$+wOZlKzp˂޺m;Q!)(΄i:$S^&hE7U8r.gIv"VP(u9NkbzuK8t7UsX7j(bm5 )\JDQ Y*C*^Ъ)Mz9ҡԴOx#0NXHkMtۘӌ@=qʴPSqeAFW݅m' uc*CoPw$PPPmE R f6)%UeVKHo'Uo%޵Ea}%̘ӜS6J~ Ff0\}ոS~wT69 ܭ2ɍ4WjlҴW/7.z'iZ*y-0<0 DMF tk$S04Rt t$k:^/ALi [m!=8 GUwX'e-菆G0L=,8]F~RAR؛CZk N)IU7cg͆ٹiC@ yʔ'&gSmMӗY}մP`J | & фh*/R=J ?cN=Çx3Yltr򾨉C(I&WQP4'(l^:s0u9 x}~ݧpgf j.cB@9[O5NvŔϊN=תJ)=*Ѧ%#`:ڵN-ݡ1GxZ**BA=#{H?fNmfJ @6 58k'zgnyxK a]unӤ @0A UX i>-ml]LT#+ <Z4m:r Iqj)&:6$tG3 .wa{ijQQ:K,)?KC].o?f􄸧iBJR);/x[yih 8ˏ2Pcaκ}~v1$mds0O2.nq=0]xPΕHPIoj<7J*KmhE[ $TI#7c#mR J@H馣kBcJ 渞m{CF6%0u>ntUh$h6>w-y&)* {Oa[MN)ʣ@ZP\bu($)%m%%6Y.2=>Kt3yM҄$_+bZt߸4LRNI;sy(;LP~fq`Z-Į vq7#qFsCoh [}^ fwM7.eL<]xql ^&T&NO|}vRI!Z2{UX -4 9w}ls1Zc.9y [vaX~$:TIǬwnj7iZʅX%j0|mJv[’;-{ }uխ2X e ><*ᥠirИƺo}elq4抪ӛE 5Hi}BBT& < >ڣӴ2&C'mI{**p'*dO}kC׫8PQ[,/|BOYE|]9\*6*'@r(Du wK6WJq)QD(PYzۣ]_fmPinҵ-uIU`%_)T P-Jy>#pӶ/UDc$|h}zrZpȑ i˷cC\Zu@R'P bH7[Cj{W(…3yaj te;QBMp'(H6y#4[ó o<*:»J K]<8bW(%Zq#OFbtDI֕Ҩni;!S3>SngN_0 5}Z C=F7l ĒrYg)k.ʂ& ٢. {JlһBYUAP w P l9T<9V"'-!HprtD$z) pTN}uz*KPJN_USh#2TLřTխ$nŪG=nvWwy|%9Td4]mFG;œf?kJPLy ESPi>[X"hQ$fߨ+tNmtsb '{װZgGNY7$(<߄ˑHHYF#T O?l5U]u4$xc2r뷟7N.BSL>ҟbl8dulEFrS㚯*7i.-8)%zU;ۡ9TV\tyمe蒠BS:;JiPB>ZAn{lnTn1[=%2HB&gq-fv.p}Imfc6qE銖उ*]}&֟bҬQPsF]qeޮ9fҚFдB%JТP Κi?v8Ta$+ @<ΖMpy; R%3*cm{Af{dyxVt+aS8rJ P2tL F pp{CuSRRhka(oK4{J.)@!.ƧM,cTNuFiW!6X[Ēہ0]&,&@78Kzt5bzۻS֦)y!GTӐcjKJC*ANuDf7O)䢻ZM%rbdnyȣ 4p?1Ae/Nܷ qU&.څB/j24TyZX7F"a4 YKLGXJͤĨ,onZ& ܅ j6|a W#~p^ƘwRo{ƹc"Ъ5DmنHos q:y~zFk.7Kw* 6O#OAm Ê|K{}b\?p&w<Ej ##V} L >M^3xh1zZwn糍-낞z=r/Р~^.0`r˄wM;2>(~3ߘC=.0Ue3V6P&W C@.8_{yC@#MtN$ҽp|G]oLdIm$BFu@m]S<Rx 0AV[Q+Ovyh/I~w-t@u+JC燱'*jqykUL<1)딘(V9/i)qJ^g0T@#=F}lEAQ\uw>G.k-)!]9VPФ č1%d6XǷNV*_^#ŭ[u, X$RfAwLeQOƌm\Iy_uIR .سbf* uITBs>#ktX a7@մU݂#&c)@NgqFJX}lKuaACM4hNiuq5sc+š/ie $C-2Ԥ<Dy'r'xUkn{]f4 4T #^+ii*uCƝ@ y˭QML 1whZ8^˞]BjT[Ԓ@b<(녗</RrnD"jNcٿ g& N"ƨ5Elcti uQk`~x}WiMu`My CG#6ؓk\k;z-0' /Ox,s p߬P75+}C(J@+Rz̝?{qlSӴ~ ]Knlfq̣v8Sz_,GEPiUI1UWdn)B hREg6n^(%}DX~iY(?3 ?PO5xgsPA)k7륭 &ڲJ@) )S_,.Qϝ6+ * qdۗ.vɹ9+eõ+," v>n KDj㩫 +RKRtJe<nzb7r/S4cHmDuҶGr\NVtި~ƕ$;sDYkޥ6rC7؃[HIf#dʙ=IKEDVPTR|j2U\sYESsA%K ^VGeT49tsGgY~h˝b3qBNa ?ń*@BJ_ A+Ԃ|,ay_paR-$lgO>q`{E.Rɤ2Ily ֠OyUBQЪ?L^!RI)Rع/Gtk94vX e bımVOni~%ĒJ+4 {TX\lJ!<Td&@<֮qcTOJ! x1#`e4o}R4.8JH"?[e~H)(ҦRC9p::E`8驐A*iK\;yY D-SV166L*79VXRmq⩼l#ZNӧikܹ@a* Jk.~YPPdk~Wt:ɞ~.SWqU)py4 A@Z9߅Lb.v{;^8Ae׮ evoUh^B)rI;Œ7vb˓ ^o>]]k"0i'"kFq×~ ʈ ő`G1>Z+KXŽaKs;R򔥸FP4GmAlV+ _PT^Hu2\U4;ŇAKg($#ccm=GP%U7rI~0x)cq#aܫ$Cc>~ j݅xB@:?;Ej쩿VHtdk'>RdA z<ytrSXpMmjikq 3 'H,^$5Q:g@N|(n}50tQaJ%ֲ9H3|< qk|[4`/; b%AE%! ƳϞ%P祵8I*u3Im, }͠6P7lTl(] ꫨZ)J@;/~p ,]q!KQn_WxoUK)sJڲi-%olo!'n)5 2ZeK|&$MJxYe`Ը2"}D-nTkL7g;(dH.-/Gp OSV~jv4BBtJjpm-XT*!ȍ@__^ mREIyk#$ }5׋lSjCKR'15]HN+UU#!bGSڍ> V1S}4;:nO)R&(fhmjrHH..\<I<jš[^y.p3JK>ꐩ}XRm*FQ5+W\!JܻY$% ee(Ȁ kKquIvXC9s hz>-BnD&D;h7{ryyڅP)˨PwS'%`]U)OtZ.mweq_rW)r=Zu)"N&[爎]5A[!,6MyP jyIuB$M?}鄂ojFSgHmY%o j.\/AK^Đ{aeljQlooӋWCsNl҇=+ZRi#ՇqUkEe$R5Y5Zw8 B ZT%H}X\VOxoCEm]t'EVS}ލ7־by:k*s$ZMm mo='Y A"ɥ?7 U bgdCz& jvlLU}IMU{ԥZHBBXe`$* ڈ!oi:믢w>؟~<(p=TÕJaxbxW6]P#"U͕l: .ݜn.oT^ u+lȀ7ckQ\|7 o$)gq!DIH 3GU vw WUwsQ=D ̐ zوx8nHj'ap38zXೇj:*L)zцo_haI+ԐeIȨ;kYv]҃Nұ sNזZ6 jR̪EwYRaƕ6n|:hZoׅ  )Χ3O Bw?3Ա:WZ*7/+MY NׅJA [A>y[_.8W{T;[S̐mR*o*IPFIڑsP޴MMZ6eB Ww#AJp ޸ V=WULJ\mVNmBR#<(+&*Ku N3pB%<HUwPǒE8PP. Ƈ(F ǝo\-|&rjie*')h9^#ppB-.-;P,$#ȶtŝn`{ ▯ZSuwR 0A'x[9)!qAǞɥA?K-5`q`Mî'MWxUURT'H΀lxrjV5}*0-D;a=v_؎mVZRH:l-Ѿ v]0]i/ ^/@p$F A+aҒZ/'M5x%̬ns`5'{s[}yM~7}5-nP V3hcbMě~ wr E r!U N #6 9Wl~ԬPi32[_/<votsL(C(ZeVsfQZUf0?֋]O^~v(<eīs@T~_ӉXr𽯯h*Y*y] RQ$ӟ[]Zm*CbI" 2}mNp?.BPo)* &ALHs7j*ESU:R:ϯHr1L{7Ñs:6PBЈ>+XwkppNui^VdBI2jN߯KXcJTIZj41UVqk䓘 Q@:V1ktRk}qJX!‹qЈ~v'jy=ڥ* IAQh,B nB7MP"3q?[H7{ r<WuSk] Rbc zbJhMȞ<ū F2HajHY `IVi? UHVZQ$B@jEwR,(u3o aPY$y(B%COT%H'*R AyM nxTĥ}6~ODjw3`JIO֛- e)Vg;ݶo2%:[yH6%@)Sibu;{R~2 pB+]L8RUT mE!GqY7R"ӽ2fA 59Z ,)@A$$6H\BN_=A:^.i|NAER@uGC.2SS<m~?[êamvF:uNԹ@n|wq&T%ՠC.].Վw R׬|9`6k{ߎu-jyE¢$Ϥ0<N撥l+F7ruGX&Ox-"ǔ,3)g S0RI$RtTܭR*-eHORkqaUczgRM%̢'Nyh)F"yXΑL o҂zUgh6o wOjݍSRRY&TӬ7[o~؍w&SEVJ')m՘]?<#wM7s@gԎ[fBB JyK#bnz8k+?[ 躛*eј ytӜ[BE(.̚dNPO'"R{B^ s$';MuIHbT塓`>+ <QۖxSd+mwĥ%R yYc|(lF0 ecAU*- sV ^B;IfN&?#VNU _yRA6#rOXɵLT5qzfCұ:2FglH*m̟J 3$IukeUEP LH̬7ѴWmV$RKBtוn)A 93k'}eP\!#oSRJGJx}Weuol<Bƻ T⻴#1ea:42BJD@^N_+ruLlj@ܸTU>%Q'XM\s4NUT+!Hllƚ$UFWX(lhaThP"R'%E ^pVQ@͢BFhГwZ]2@BSgo pA֋IMOf8FSm7 5xvq*A Z7BuD"6ŪJi$P̧ [XvQ٤ZIDDS=)22r+W։v$k fJ,42 4v)BRe ge[ו deaB@%@U>6s%@JŤ[_,`):džiT%$S4f4ǘ.^tit/8ڂV铹4'pTWyP֙STI聾1uө:v|?{Kd 2BHBW%;6ԛ#/+WRCtn"ds6˙\wK~URnu:J%I1?;i\9ԢitrzolgZF^6EJRI$RNSy[isж9q}p mڧbOvTBJ4'b K/>뙵{5CO\娫HQC j'hmsp+q]Kw}ſzW?)AZRUā:!N3&' xN) J2w?!Eajdom),Wۖ=bD\K@tvEYoXtļq!TRr:-I^. Eڨך mc`7VpN Vpt̾JY#@>2̼uCkpw4DCT=Xn[\T-<Zާ&'Js器.t :h*j*QmIQN`$kl$Aۡ/Ÿ 徱)nf"HL #xbˎ*yNPPsI PӟMdNm}$_X!pb]OxiװȤ; R4pc:SlnjCmѵ&Ln`3R'[mnN8B/-N:.SФ I 8cLkr`˟b8[Up}7c$.3 :*ylQ,A3,0?{okC^/ -RUIz=wq6u7}cn?PO0:ͷ1 sζĩkm[؝wCpÄ8eSCj(!4BRgSb/lEF,bIHttFXwѫsum<G`kiɑ cu+t~mר@IRKH "݄<>)MNmD%)=<털0^0x7Lݗ]5Rn|ujϕڲ̤LI4?dںZ(zԸ5D(t꼣65lsE`<5N:fI;nt<=~!73R\m׭BuZ=@ (U{^uJZRfu:[L*oJWKq- $RT`b߷j5.6˙K!2LUt3 <:*F;eFrQ}oTW.-?iҔ@iV8S7fCHq`IA\;r0SqV)T8\D~R 4)@-@F#ZV>#o+U1y<M%3 -"L'p"'[]q}pdi aZbq a0lO@()- ynk puіeΨ@ Db{%qq?:禨[GĒ rgc؆HJP2:2 s#EHRL@2?k2+24wk}] {N[- TʲC[LN%&R+-G*ay]%nw. @;zZpHV 7c_JZ* C`C#C6~i i U:sygnV5C{,~K6 W9<VK0ҝ؂y Jrf% O;_uYi!`L#;4\UkʖRNC5?ߒgUP0T8pT 1v;AP y'B-/.=KWTZr`sQÝx=;U^ʚrr:AJɝlAEvIBOh/[\k.Is(~6|@(+^ZI${ĒL;{UR5ߖbGZ]Ĭ ,4kS{ / ybvRL:!12uZءt hPHx3?@? #vCUeT 1wSӯ[R-8f)g2 "uHԍM׽WSTjU'}5gx:zRr[%JNMO3֨6>+V;2] ¨[mC,MH(=S`{D]\!m:jP$Dk]ѱp#JQ]RKg4I>=M}EZ]p6J.{.ٿ:4&mN\A}r/XejՔO<j<{w'ru5A.j2,u|GGcU"ք'2Ir,Mۦ/[noN#2*[P? !.ɝţC~(×%~-ʓXe*iIINuM`3ߋ $6˕c~V;)`=s8KN([\xe%EB4jM[Y$&p4ar ൫ly>}-R3}ӿǔ !%)4}lȔ)(R`fd/!O8|iI%;}ś8V'[}9e<OoUO-}"4Q:v bH'k4T[RR䄁~VtP }㿯O\Kk!))KIp{hT-ڪjE3z9SϦ SuNR-0*\UKLTBNv; hp-aD5F:V8ך u u[sMJ\PJK>3fwe:u$# {;;#l}A3Z7 Iu STATxec:H%VsNB D2$f+) lAA_vyqKO3)@-@ \Rv>,ukvg@+A) .A$SD^/%!#4-+Q$93Q|R!EIL-@+Ǯ;JQ8UwwHwAwWu>]c`ܴ_(̵Xm*^4  '^![%$=wmuMez)RAl$b|ry{\R5zѴ\pTTƖl!Ж @=,_GDU)ä +NTۍR<~d0$$dG>V\<D{C>@Q3FuE=xPGmЈ*|GKX 8WejO{xU&Nצ X^iF #pt1:Dj|hc惇v8Z:×K 葩Hמk!{)+A/OFS⋊7k~r\44K)9>[y+p ]~kZfB KJqcØɃ+i<gk'g;M:}/)$0zWbtSwUt!NT$Fӽ ~X:}ʰb>󣿮pf⠹(!lH66Lt^ƮUmR:S26 ՘Ool5勰i[t6Zٌ!DgK3ko{fJ}j [iB6ID&>0[)۹ٻER]>!+yklgkB1n%,^ԸfSjL(F| Q]}9So!Lsdq_ QWyBEHBn@91>؜a#ʗKs8 g1$˧o6 ers3N2L[O2ļrw_mFJ]lV^mX+qAXe]Ilns%ʡw۠/b+o kR%jJH3-Vhm &F.GkQ}t`7_|8eOQIqˡB€HꠥAOKFDzn($b|d+÷TaE(<P4iO {(sݠÎ鮋U7\ae qGMa z^׋XC;@K$B"')6־L=<닓9쵎7K\[_k߲xgt[E5wU%t!)+Q]ǝxtpGam'zs{}H)DOQP-pvƼ+_º mjjƉ `<#kIxwplɮm mؙ5s'<AA& 3pR<ׯj^5L TRT}Q#d̥+sTb&ώnwl9_`5K~A=2@ Hs2u&֧ON<Fcosx|u_Th)R>#jw0.*i]w8{ )Pzii?IHk.;tGɣvw>:pC4mՖQIb|w˦% uB(O i:@_j;i։ NXX ]Bӕ()H$Z66d|M 7v*PlhϔHŷU"j`%̰AǘޝW*(VA' ~fv&nʥ(EJRXZQ C VGZ p?p\s[/ $&'˧[RVS7~( \=ߵ۵Į5YU TۙB֡sL N]fJcRyLer/L>v+lw`L%D giS3#^qKR@3lnSWT* sN4VqYUd"Y.”OOMSnaNOL]{ #5:y}-c^ ()"d_7(u DϮs^ӏ%ʵ(>}tSwi~^ C7J`%u6 m.xVª*CR=ЭˮiZ (z}-<\Ma uc"2 _k;Š򧻢>q ]*VWZPR:#Zmٲؤ+Jal[I}%JVHux=l)ܦ%m#"]#l痢M{%k`Ӛrםq7i>+c|+MnV vDc}|Xi$|']OTڙ׹Ezj!.Ψ6?{>]lTW6 {TVpU$Z58#:DԨ6,מ<5Xw][-CDP 6<fT6? ӰHƕ5,fA..yz؃)e)qږPLF$DmkBԾ^KKJI Ğ_+a檼"3d3}mv8ɯqyZj槧R( ))N1*s\d):Ym<UԔU*AN9 $R~̀_ƮfZإC7D@ 6@@JK9+🽛J(QVϕghۙNJ7Ux,%-)#y?߇0i=SR%T.HFI[Hշ/*bA(8҈8[S3p%PQRFxqxPGcjD H)j;#{"2҃MAI)!zfdlR(hD'r4KoZ JI=#YlM L6S$X̪%jVHH{2SQ v0xVBNQ>Y ys ?, (xBcO߭: *9c.i"?_p2NShB]W=?K{kMe}xcց@A?m)r]ufE:#?_ݡD}3VUU<G%KܗzXm+qDϞv& MiYp#r,Ֆ|˒N4'1:!Ǟ̍@yio@,NvZBy[X#%y ^hB jGߒ@<<meԅ-LgAo=fw6Ma ,Y0!Ёiz@R{ ;.Ow魆}ˡA[y zEL6r P}۲TFt]RVZgYKh є[kӲaI]~ȼݼ*t{ICmEٞyT $('N)Xeٸt1@AczTJ9JEBl- I םT #$9[\HT*lj6JcMu.v8s^U7+o8YBV HԓO׭B.@G#ԇv5m{6ăyg[HZ+,q7mmuJ۫$E+Titf\U<r/juu{EUK<FyiǕ^+:/<H;G?ZnpC<&R&o¢&z>j6!lkh9/$sX b~`{Hq2Ζ(Z)ZHNGy[6Sp<2H]-l)U@&byXǵV4(kԈQ[zdB4sgFXJcJ_|̫[i@;%$$r-Q;45kOЬtk͇θXuSS {6lZ Zs lazն}֋RB RMm_B|Vk/RQ6j*` :|h]IZ(^M_H[4weӺҁkY@?O;h^} Goa,4vB؊kQf.& fꨑ>~qW]}يrM-d%@.a4rV؃ bn%VoAUGOʎP<T<.@7onMSb0c >R\|Qsc[r;WH) quޣ^HgQQ,hKQPOuh9&ACYJkND`)4upX?*\;Y}Rd\̢bQ3Z3 \4jޮKFBi[-R@juŽzO/ݩ.㠷.-;=pK p 5%ʄ*VSQ6$?q"üP/SXZu7[wls?zZ<~qg^I/z|/6BTұ=mop?Ԏ~ulUx 2eu)7%!IZnQ%՝\F/Fn!9G6/n04L*EI rfvOk4`^Ý؁}hC7GJR{ҕȓPU^v' ΋T&U{6HSG+T6yw>/*[^*}S&<M9"|5G./)S>>P 12?s~JR\ZSV#MsB@^Y+At=w"!C) CiS<Vgj+!T*=H1:\s"蒽[7мVKw2 -2 >lC@PӔa(VZRJL! m|M%cYP |އMNУ]Fr;KNIBO.S`ak@GPM^&җU%NZ[mKALo#ӟ+[4;ҽghu&Y齪d {T]%I $*L{륎. ҩSj 6䁮ҕ(hwU-/g4Do)'Y\x4ah@Te`o=ɏ[]Uӭ9@a|oO^#vHBTJ $ zFZZ*)})TV̀ݰJlNZfmB<ȟb+ie*Fd4^\w*QKz-fQ?Y6.jЪvY$ωz|Tܿ8Xh}KMS E^PJV0~?;:\2Z%! {^૧+o_1B'v.U' Rre+ yS?i+Y 7+eD;z~V.Fgt_+04bP4ח[H:V"ɕYXXV)dKel49lwOk^)rxtYPHW2S= |BblGuD3( u:iX48q!IL-@Hfc͢9E[$ՔQM)psZy,;\\wv ZRzc`[MCϦ(_HoTRHw6{U<.uFz_mS&;BRgԁiJrzI#ilmysZOyے77_늵>vx)rKs ̞luwd %%HI F]~V^ܗUZUsǧv"&C%9;?e #(0_>+Ժ@y$._!o,,bJ@ cI]vչ[hU:g!m$ON`|mWtu [ (:L ~vVVZ%3*RLyjj.ktE#WD.&Hi+ ʀHO`PQyiZlT!@J#`@OST^-ָ70&Ѻat̾BHܭ#~P5e 3\#VC $κ}-֫Bo׫Bbv*BҬ^ܜ3Wg %Bqr)!BF=~xbx U\QPVzh'a>.p4p>aFK+Z9]`&0 ԁiw=7e7QxJesgj yDi*lM!Um|+}eӸ($UNNYYK2F^t !FV>vUJ%$A9|2j5ƪ%WӋ9T`iu.) #O8#kITSӺjw*-T  2y1ų2;oEЄ:QQ$|)dT&R:A?YB u)LmaW\KZPl:tůcl1%ڬNQ2h7XcP[vy+} C@m7ڊ7w' eşfXw""$6b) =~r1]~Yt4lVmϰLČ&ۧKHlʄunfI"ѕ׋9ݡ;sK^[۪"xZPKi^dxrllV=*Q;{s95ЭdQrVd$96ۖ{P;>"3'h= ,'+!N㌗;DIs#: LfY t$bm|UK%ΜŊvnS!y4t$zFm#wuCEk"Nf2F i>R0?Zuw -I mJ D -+߲'8M }{Q{x ))sӥ$ Fyr-I.AE9-U;p-[\3Cyn:`̃Gen&#*uh#Y4Z\KqŢ2T)Ra>.㕋>.{iJEcdΐDs:zv7ece/VH>\]C}!ZFP+ZIZ yVz{Ow]8IڕwJIqͬn9k_zw}:NUKI7o^/3zzٙBAʀi3[de3ɞSanK"֔g*B 6WJMەTRHXsF-_~el8כHH$(+S^* fI)fp:a+8ZWJ} wjל-@twpM{jj6鮻kf)k O|:!Ā`񗆃v~0Ӯe|!ͦiڎ݌1voo *-gĂ96;5R=WVyL}33׿1ˌ.WvaE1QX`(T-cIrlVq F[>ΛqCTH @&5q}J=,^teC d,1 Al>_ A}QauߗwU8BxXPRAz[ژ;ZgR=Uuplv:e_]7a+MMI*l0JfyMg>acl+X8I^IJ\KIYRSȉ'x_/wW5u8sw֡3QDNb3xv:GKPU ]:1M+(Zՙ4*"IwK`jiJVSLH7㥍:l4>*YK#!xswCmav}SŜiQbwwUjiQH $DV+:<*#A_^{{3- K  C,j~&lʵª*-xP0|)㤦d7+Z4<UBA+OnF]]Jx4<$FYTWr2!* {a %.Sa0'NH-؎Nө*RTKzh(k|,}d<g0Y)|]u S)#0`u˦v!ʋQ[6J\V2-E2NX'fu6[X(A>-cH-^1̸(U\.['Q~| Um^QJJPf><%Ais!kYt#]6],>]@\*Lh i9KJ3; qSw1E-jVb`Iq^7EVˁVhtNdڲiSޥ@<L'u|D;8}-g+шX'z]]jk$i6҃mxOT4TR;λYrne3S;Yu  OT`m]ś%Ѵ-"uP*)]>mêi_-!Lw]_ R I-#KJJd)| ^r!$ t0+aKv֠%`>fR_)RNEj* 6сwժϯy/[,ڈ?t %nςoCn6)eT)!!n$jLH,£' I 13bHMo$@@˷]nvm ::ÝtTN6꿿)}xi`Ϻvh/qWxӴf@ m-z[|Ua^wcjeҗȢ ?+r .Hijz+Y*^eyχ1<Fm @N6t/P֚a0Leȓ>v"Z!`<.i4ϙc ѮT$:4~o*o%J^4~0{ST-lOvkxqqE:RuB̑aV5n8z%m8&c&'ejH}jRAOie9LyuO`[(9D* 2@<7p]UxLJb PNok B+{V֖Jh{͇XV_M3c7%jQt"6"qlRLaI}$Ɩ9J_EMN?p + RwK nr:hJ=:~b868u2;|~&MgC~d{V-^ԥRn #_1UXmBJʯ+U A)?$'S~PT}Xdc h{;TR+$jtq%(:vnӃJ\)@`G+yyA0﬍O@>V; @;PIY 6Ih)ԩSH6 O ϯh§6O6&sMjL2@$Γ<XnViĞQbJJQ'}K bڥH˪3qWtƿ* 9'(-0)$x$;U|8׆#p>JCL$z[ -᧵RG>v3G8l135NttTR9DeIXsJn:תtE4& {{Jb5a2הF*zr@<U o@o^ @4$o`U<3[(H$nμsW;;𩗘8MHfxB Z|&Q7NgA}MsմxP\&u&f4孢^fR-qf M],S%j*ZRB`iD-Nx]YB\+]yٷkb'T^[E/:R>/? m<c%޸*wVP|J<(Tܕ8NAsRbQR}>^U%,,]#AP$FaZ@2w9I+4-TBkK\KCD(\]lRHKAH$*:@jCJs);'O)} AR| vS`1յFzJzzgiP5I9.eF'wphԞ8+V ;XC eWeHԒ9Z>=qmR:Kё@yKtḦ́Sj/pKu=r P9CHVu`n-ݕqό+-]. B+Tnч6g<彚ƍl9nmn N&=_N m$x ^צ=U l [email protected]_wuB0])aEQՆ 8PE+Zk6qo,M 멣s7~cD&H&`iטa@|rLd5yKԘf.nuR D eh'2Nzs>N1.)]כ9] AY Lg-aOyQРJRYsBgCAke 'xf6|n c ɻucVqkBI"6O?9ۆ-zeQNj*)*ہ49-#k gٟ JRt5DSYQ:Ho\1{a*Ǻsq-:'/" y,yo9]G;gB>;|eˁW\P݄Sީ*+'@Vh<cпpʛZE+3Pڲ'YPO/odN,0.ժX0dd(zY(S|7LzT(~Uփ$k{Xn~@!"z̓K7ֵEޗĜb+\ 4.$J{D(vLNvEog]f%@eP'A<-Ûf]zWVa{yq||F ./iD vsД62#VӹW0271}>z#};';^ m]r.ZKm`7I"t՝5ߵS Ԉ|_{+=T꿅Vނ8 \; O#n{qۂg~3pMsm/]=ۉ"s5vc[sƎI{fcR+4-7PJ}B5Q!#myo9z^u ӭh+J՛(D68Yx0Kͤ!K) I7>ü n%Ҕ.xdp8UOC(R\mFC #r>V׈tG@zSD$ )L { uֵ@FyǬovI]E0ZRkLe<>[&Ep^%`G=Fa֜eN3*$&zr[u8>;%$=D͸̮kN/-m-9Ddث⻑LVS-qPΛ_+Uw2\IZ$xRtA:XTɽ*AWxsn5)&Ymdd\BgF#l ٽKV+kj҆R&bDץ;غT*W)SRu|YJfX%jQ0socV*0 5#[ST_SeP쯂u{!~`c]4_n-i9㚙好ݕ Lk?ݧiE:BHmzXUT;[M\ 4<8s.ޏs61jZkx<VA* muWPuäe'Zӻ:G;*R`Կr^ ujxFqbN<#4u뭃_JCc6yB{TQ @ً}Z5$7k;vxk[t`ECURj)io9BR<N0ڵPUYFʖ̦tnaKJ%: m1Yu ꦭpÌԲ#76؎!]V@{ӟN"3!JJđ"GN[+t^4m&LѨL$i/)(FVI'6W{'U?yx/ƕ/{R}vFew#IR"ҋuP8ڵP%e>˙)J6;<#Qkk.I)q@9>F I']i+SwkfPɐFk7+t؛ZmҔP\Hib164RH{ɶܼT3VJ2$NjXeh=--qȐO8}i<AX~ _(JJ3 1'{@5lV rU=#N $ɷ%O@o ?vԠK^B gq_.r;jcۚ=rq ]PZH'*.aui 2'N+PkCꋏ~Q75yVڀP'?܅-6<R`f'8& JatV$yO-w<E<4whd/nȢiK)RS'hj Lf}ziP Io3v)!(.JublsPrPh_m{ [5@d?6gW\۩ۀAPqF֭d B$]vT6X:~i=̽]o@JSh,*B܁'ucAf/7)6;C ]XPT} QV;}lTL lwؿ pmmT&$MPYB 4JTR EC5eDuǪ%K~|9EBT;7 =E%H tW;DczS&H6ΰu)34~oSlmOcshR@Xmo˹w]}bjk]Ahʇ-~VAcoj)vZltyo|.1"%vRh.;E~k׎,RE%Ow1DaN'ĔjS^%ufX*o!Bd'skJU;wu0-hdmOKtڇ `g{vZzXf|36x٭~TJmkkAz3v%ҭ]AGkq FԺ26Xq'"7%̄(%%"fN7/һ6:C J{y׵7WEp*R əCl7iO R AVu | Td;pmC{v'g4tPX@))OHN̐t>c[TfH+RDfLFmxc a h -PW 뼸+ ;,nhzݕ QZp%G;\ӭWr2tnGʖ{7"@*|JJ{L{~ݭSC΁F|j>gO1Ŋ؛4suӖޫV`2s7;Av\Av]MHaf 'mRzͺG^87].w;r9p[7I \iZIU-Hm052:[1w/(g*u@>vxD߻Zn=,d6{{(Gj\ - !zrQ ({\5OwZť"2tfU4z-ťJJvߤX]5NS?w8\؟!dIzu X@i@>7e9Q{"st+oJm +UM>xu||Q)u7%M[ҹUwE"zIS~Nômtnʥ-7t;ٜQp4:jx P:"v܌#su.zE]1AT# jMMRiRiP@ 0 p/z<b*"N@Vi!yzĝDLbwQ8s*OQS>2 ']zm+[nq.b|gZdw5*A؂>*Ӹ h?rcqþݗ&zQUw(,$ I N #(X;qWH UPҌ -/<zJ[ZO2:5mEVm˞{%;˙aAH9yXI$| <]qZCվ!]ij4|y2 L86*0#8i.Ja\`?sT e. i.@fկqf&Øb*.)1:r ]X*JBǩV*hFhH%5˝:H6ШDK&tN״\Cf^7+> ,<kvNiRA;`#p),7KX6K㫥II%%Jy:w//vԻ;Gpƥ+x\gU;%%*i;IqYVj[b:GUBy])q7WŽ fCQCwgXh 3u2u7bǞPu vEêg]W\-Q?$=F5 a0M{)笟'02TT曋cB一(',N[̒uIA 0:$hNYJ<UWZɉBd0}śъ/b]!iu*9t$>!:2#dK %I)[)SPs"[** {{*yFZC!to7U6 P'XD}֝l0T(1"#L3a@1난@T<*mWk҂uuќ3HII$gQA &;TV] $^53/T4[%  D`yeN&|q) #ltZ燙ߺ Gv$uIOXUB`9\$CU-pV=u W^-QMd9j*A>ZT4o(MpSR䌣QbS zU 鵣؄.q*߮iHsr"b{I;JS1R%@+Yk;{PRI#Ryy !T%,)+O$FRQ*I ZT $ڥdq&y(ךֶ U57vj G7+hn<@>*kN+P@jUjikYB(9ZZq2w)w`*+MI\\7fCYI#cXw?*FQTPPc;?n|Ctr_k'+T4l1u-n!i>,Kul:oӊg W<#).p~%߂Ë.2vWM%nlxԨno`[ XѾiy%ĂڂFJV Q ?W;xN!7c G65RU AqPKZo}xꨠuIs_W5jdH3RI =t6ji)TT ?ĸ隫 =SFRV FY_Mzгq]Ť4\89S'ӝMX qLHuejvj%n9B`TU|3)h#aɂI- j}c?MH5mB![HĉM߈(uk/Wu#||YS!9I@NOZ6}L"(賂+ $(trIJme\9eLfy.cbtqKszGKXuM$3mm-:[Z]ARNƱpٞ7p32'A`M:^`JI@7ZZ!)C~V.*jC(d2BNk;5W*x*~Ihb.ZH"-Y?xWq~.*gա Lg1)B-ڶZaH@II;?yYB)\R"|8as:C6 vX_D!BzX҅nbG=,ܬS4Ȝ@usc.s\y:JVvN3Ej-uSy:?-nm@q1*QZ3})i@ rbo0ql@st4#|8.7XͥҲNT|'k]WkHw47McSa6WI*Lc=uJj5qJ-4eq`eIBc7"<Oߟbɧ R k[X8릺hU+q)͔jI~{nUƒI -Zf2a]cd[vj۾=Z~_ӭq|6+=|$4x*ue2ZГUL+Cٮʗ7RA]*`!!#Mqqrik#tTJ9>OajnRPBNnv:L'V(HUzk87uW(o(&nxf=bk P!@oR9EgFr`Sl{*[ w奸kk8&m5F{Zץ=™NG\ 9J~#XRXRs&GM5ߙE[]RԠ;Njw0ϯY!a_q=|\=\>/-;_N[Opx3m{KzyZZW)<'pGAmwpXs7UMv 4ʁnJe9@r ]UR qGA:o8wP]n?AK[Q(z1 hT.J,k%D>*n7mSj}]VV'@@)6M YJ`!"} (J^E+P|1$HH!9s ӻꥼqmWN{I( &"R#{2\K}`@M\BmŅnj JTNc#mkX?:9v4[ e]:oX.aw*N<Nb[Iګm zy:vcn6`6-Zj9ڢJy.fq'kq!l6 r9<w~GJMukYZm@H6 nY4lԝix]Y7)5 擯/Rc~\ U޴u%9)%ZZo\֖]ɧKo @)Ӿ/ ]<j_XMk+]y.ARC;$"p"Ϯflu)L]xUb˒^#kCha.wNmp. b+_qau>q(CM9ډ;oDžjf':Acrz^=o c*)}UӸ%mv=Ep, m9uѽ+8tv4ok8V*&y[vJcüHoծ] uܩC];jmQ:.R(UMUWxl$ReTY9d]8h)71;뙪Yw h(1ҹ[^\A˝B8 Zjf]jgN-*m57 h)-th?5JnA%2@I Ngi>\Aj.o ThMbab<;Dc[prRp^(03 #+PL _5ko؆i*XmԜ:BT+QyjaOxQVc Paf)QP3ILʥ$mu2%Բ^#}J׆'u"0G0B_KyrIHGw]h&.{M3iZDkou-=HU*Y^(*g xbKI}! >&ѩQ`Tk).F_UUp?^QVڊRgRu~VΝIt򎩓9QN¤.6^vUIKuaf/vA:E롥xݶ` ON_9)0,Ej\$%]ImP>yFzihnawmw0ng~}mTBkܶn!)qrB@mwۑ9ClݸNP9 ĥ&}DB望tbH@$忘W 3I͗of(mn Tg?=GNieΐz"df`;iaԌ rEneVLēyyD,|ZDm.sʦ$2Jj-~fꂘTXd)"c7cmEDB|d6\BZuvZRQ$m"Uqܛ#j BNb3b;RHyu9[Y*PTc}%CHqj OSh^ej.wԖH?.]RNW QlvݔA2;\̩-GY&q+lyPO_Nj!m.zF[rNM]<6Y!+?_afE'0SUSvuCƴ#2k rJoV]raPR$I2C[]vF1U*x5U\(jSc;d#z҅l=KR!ĐBҬO@j}G4> V2qն|_Bo L_8`禺m "]XHO屰~蕜\*u6u7x j[l9jQLG2WacSA+ ]r?d ?~C.9i)W)}©_],ظڐꖠIuiMjFRQ;T@ <o{+Z-vf0tnmP4zlKf=MbhB!D ƿ+2@ꑼm4%D~5;N~ #<a; 먉hCN!S $hė=HԨ$;u5XMDI^Sf%4uW83<G^< 0>޷bB҈u>bvmk VTR@Ԟv TN: J|ʾN͜C)V:XJ-a/Hp}yZI֒<^tB਼*] ȓNL5H8I?HpQ[!?BET^S QsUg#Mj. ?>+VU+3 :筂(zB[@RLbC[Xv)K]3&l:Hmb6/vzGZC_A 'eJq$6HVbtԂ#kd5dnQ9M6*-zmHU /9QIvӄ#*t1}l -S6k:뿦ZJ[6ނyI@ƹFU(+\!yd:eW|RL d ?V *RFkvq@R`RJU!zD I>vbgG;CQ%%RuuNVeKSrvakRL4nqz G2$]ZiBEpRTO8LyuQ ;]M켦RLk:{Uy<[gN8#@tF>6\K+xL h5.WKP|&hmZa\7Eƴ8'Uk-kjͫ.)1Zv<R jzijingqP¦,;Q ~v!O93G@GYΗM6z,Q" ?=4o0~]WEXjuu H$ lPrEi 30|ȝ7ߤ0٣a+,ڧ[T-@9wԈm⭚su]A3 <$4[zBCTgbL9k;Iu,[ɞ11A<ōQ!@Wf+.k *ih*[*Q?ʸ$6dbj]NNPAn _jLE[y^KM[2Ӂ=G @^:T]ׅmnvj @Q1"Ld9Ҵ2w^PTGS ڜmJnRDrԟ=9Xj5Bx*<IPķuuFf6Aŕ/2@0Bt"ߤT *7A:<ūmĈYk]<EׄmudH<657v*ixW^ %Rdp*4DNV*TTw" YG$Bloƫkl(=xwjmYT#+]I!$8T<Wi1-v:aJnapO0쉅!RnEiС R@Q:tl؏8M}W *:\*% :H4>1x|ܕiL<R|E6<ѮNE}9@Uv~f|JqEʒǔwa[,R76^͗eNj2mu]Xy*,8wzj̩]ڊv9]yۯR@{6 #6?7)xGOq/ ;ڻkKu)M<s͵& EI3ass۟-yx+G0SRvaC$>vtޕ"VB jI&-p9K]e H 'kkvS#|b!QMwqUNNCRrْ= M[']1}pp{M}ө %jALuʧxݽuOB@8x Xr̢6U '"TD H=t:ur T9s6=')[b\b ;\ iJVP\t|)'AojN> 2i.$@#_[ ,8{'%iЦT<*V`Иc^+]]x\wCQ(:tG۝blTg/ϧg +98HRFO)[- =h(j %!11m-,EM((Ru jmj)q R` kd|bx:YJuyl+!֟n7$(E\Kz( 7_JRIIXRgYV-in!J(B؟Y=eZ< h?j]TsMJ6>KDX rym \ʴ&$ =[=R[aYPYHFO._W8Z0BA` ``kMڽӶA@}فMyuH aJi9rm'FZp: ''yioCCnR C.h"}>"V3a1RrT26F*ZC=Ok%ThG(K*2unCe H%nJj@&FDi0,lWV̼KMuKK6`kr0L;bەCj-8.XʰC4!#Uq} J@L:?{vRAFm3DrG#eʑhmeSBR [j8 g߸)_WbrīoʍZlM=p)7Tux<O':']?Yó|e;Hdtu#5ιs6*I*7-ϗAiO1S ZOp=!1uSu-Y)N|7~<Mt9U-Q%'^[Ol޲%IAI~y~9"`ę<3Y ZY>"ot1ܥ&i G1:>:D2MIG;Kyϝ/T[C[@[ظlJ7U0-Nt=-ev4'񕊚RzrWϬT$3ضtj+ ,͂'ESY$mJ )K`cڋ͉4o7廽I8©Z+rf$ )g6wN:iԥͱo0CsT\UD<mE\ÕU;@W t4oũmO I+YE1?wXoh@w֡Yιɟΰ] EEBXy)2>[qJ\ScMDypҼJ` y-҄SwAw`eI|+D8AJv݄* 2!iiT}jLwm ] Gyšoq==(!#AGCgW*u iy)V\rir 2Xߧ{;eqq:,d+uBU@>cO? LBSS8j1"A? u]Fv݊"B v.$0u }';Oυ*GpAケ-Ǽ8iʚT6#(zg1n˾5S?O=̎t<yĕ )+LH͹??+ei"Pޯ)&> ,%YSeU1!A2QO!/d*Ni~Qy^vڜzr @^euU'yJB0_OSzR2ZX]j{:HWݫ*F9I5SVҤW*m泌-mSyFO;jCuӠSxO+A+JJg@u}IHχO_omb..\8e3S)>'q,f4J-L-*H$NzwN #{,K31M!u@(eYLO+7f?ܿo+ȥ]irh~-bjK-wm')p"fgS 'k֚ ΕɘE2^͆,q몗զ7:CYBcm$}|xQ^ H V#SktRT%ߵVPIPOU0&鐠<s# Q:%SsIJBˆqyl-W+؆O a83p6 <ګ`<1)!'PC ė.v{44ד$ N]|[|$i5ىo aj+m*nmIRKe25:8={9P_wKهaYi)I g2 -[mJˮ; a#]19;S5xuCM~w fs~1+/x2IL $n'U$h :m"1%MC|g~H))=QRa*mkӄFy=wU^%Uu}+ѭ`H}nFag;UbWy7"F DF% ۍ_t dkom|EUxpw4';pER!Y³fO)A&@9L4a^Wqws׍ʼnjgҍ`HD$mysn8 7J:w** DJE ;IUl~]Suxa;W{)s.HVPwܷD=S5$m0.#b\L-e8UiSHijT+mXtN _AS.7kbqjT&`NG5SRk)T> Lw<lߕS>q;pqxjsGxĬ)*I+$Qv(=AuL}U2+uk!)#`FJa=[|hCoB„u9Yw<y=8be$-DFzWpUL Dd=#_MA,(KLL׶Al-$5 AZkns~SzkOq%ɇ̥NeNF(.}TQ^(Rgm$~;]8C)k]z!$~Z?:* w>xT/5g O^[5h##)ͧ3KtH Z@ J :i,wLG%h ;OO2%!I*US=m.sZ#hh/R &$rzY*Z|+ T yw(֔0܇ 8/XA߂C% GG>Vr-܍#Dml[ ~\x%$m2XC%[iA2tk)-0 &dVVxA-DAWם鲇@lbslgPznmh6QR]-[F4}b*ܹ MfԮ2RN]m?v'"ѝr(ogmCJȲRDNl2*B@Tեx6fS\FoK{BwG]Gv{9zZrnL 4T۪J ΢=z؞Q)$o0 3(Uv&dESK7tfݢ5u z#m+BHou[W ec!m>(H "GC#en ,okW+>'HM5JِDHAx鯻Y~ "BUkjﵦ'y^\j4--C_+Qxk;PPХ:Zs ^i-wK1~A$f [\ fH :_+x~$քm?%ӵ))Gf XWnkery@IƟ}m{i+FT4P14{6ڏxNcg㯧K'$FLמaL\IIX$:kkns Kt*R)q$u>9V#k3 ,%BFzlLJT]M0RT$/c 벙eLUټ^fT6{FE7 ^z::? BA&Hb }&샅*liORԶY%JTy@;n'Vejzmѽ5Ynil*>)Uӂ*JPS)nt K`Kk>ޜHΥܴ$$v8WV\VXL$~|-L8 Z]T'QZf\^h4az ǟYKTwP)HFdg7M/;xU%))I0}Ɏn0qn`.G7;׹W{S i5:kbTa7YH'wvK@XJ`iM⪪RVHLmXGK(kz~+b!Q nw/ m"rJ8Unꖠdߥ8Eu& % )I ?[2$,QGZ˩]3 !o^ĕm)D'c5n WT7!)*8{TR2$AK5hH#mMNjˋq-one 8]7Onz/: &-ԻJ7hBMZqw[J[JSVSAI>ĀVc@AJL /tYPVZ \S! "&rk:$1@ܿ3]y.i< qŀiAF8w}YxW0'O{ZW(:}ks_JJV0Q0[r&ӴMVq$*0rα*<gux鵥U׼R'JrQ4 tx~wIM=={ZrTiq7vE]@%'F!24~VȔ4).ֺ$-Q$A*TT+M-7 hm/w`,ؓw?2Wʯ*x[mD@頏nvqUK)iH|?˞nԦ:#"5INE|֮_5+q o F\-"J\7}߮>6B T  [ SJuݣ&WrĐ@<W;>5uAu@Ncƚ6[N\&hV&Hn=ju܊k+_HsQLju;m((f*PCͭ6JOc;p&RP9'o:]umn O?j#Eip jXn׷3w0[9|2'+ֻ S4omW-UBr)>8)9r:*wۮVZH iD裤ztACzxB˕SbLNۯ -8&8 j0<\mYr_ o*(* ycP  3vVwF;O]5iHUS:BP0m-)؟p_fQB^kjL!= ʹ9U%Ƙ5Ji?Ak ~=mnArˇUd~'Om1r}wqŴԊUC RPDg]vOLm_oZ/fϵ2ҕ,:PHH>B-n=]7} lݭf M|$<bR) LaBms7QRIqj!$5Hs\Iӆ%1\pȯWXJ.:Ĩ,;2bRL t7Ud2lFBc&7Y[HgYM߭]Uj䧽L+#b hFf)V5Ρ(%':f;zkEꤺQJBVp0O1"Nj5Bj"u uj.`u4*Hmk%:6 @뎂XʒV%@!'4|-z^r騿+j-4<Duym; S2VP| e}% xhi +UMAS N EGQl'[zʨ0l: yc+ϴ[i[R;u (΄O.{p -sYqANU3;iY0:͋~iיQsW?w_ 0ˀ[9?ť)^uR1-fO]ҙ<=W<d'9(+%GQlwr񳍓54lJTO-(񄐡=c u%>㕜!',-YxNSeT~)*)G+AT\E!>ZN(eJDƓٓ ꈑ:AZ:. Rhʵoo-I%#Sn@ im,P*ii4ꃕiE45Ke D;6ko#Duߥ(íB9@Zv)BSs$$5Qi3q,w Nw:-HJe'hdyS6FPeQ66+)Av0S$kb;DZ$:X RFm}}Cjc4xvfWSLhIGLN#A  <+GP@&"uZ R¹uXo(TLEt4@$)WP<?BA 2{Q]źpwsnU8Vn(F`ɔ8<(ZhO0@,ŽwhW@Ds/{|;{]4tldgX筙Vq\N.!HJҨʝ@mok~<JT:$5k`<SVԯШhL  jkPT=knBRRQ po+_8m{PAt*]bwןʋ}.:"!g¤GmjkJ!Ɓ`}v,2Ĥu/.p.2M=tjѶ&I"V'#/Е&Dg/q]-eWULC rHH~6߽SOu]Xzʥqa:0]ԑ8 \͙쨊66|br@uc*T$;O]mlq5p 9|@ a&RR╦ev.ɷkl$klZJЋ8w~ѿYG/hSS~մ+<3\x^JyeB^!磊1V5U>ӊ9 cQ1ŧڪ?Nh2kcvB؀-Sh Dcia 1Ȑu<?Ma˕WKT:37|œxVU&v+cZԓsN)Hm$+2Du76+r7=I6 MuaHduBT H徖q++ xBD.sֽx!S#~N0yA5c[EH-ru<`!A@ 7~\qJK% n*Bę0<^q'ݩJiR (9 ko.^mҴ@!"NC>A4r*ҵPL(&L:zD*O|^D$絰8ya5uXgZVV9=Nm-F.euUՑAD|tb׹E]bcŠUƿ"/%m2 i`3 q ΢$LOkq5{Rpbfu\Ei.ȫJhR|f 0O=N՗+E;i\I3 lMjbt:+wHV4PtnmN#a<Rԃ (m{z=UO+Q-ieq$miY:NڗeqS>bWBgB,R's}`^uWC0K :㮶TMX)P 㡵d yZ뱆 휎ħCIɗI'jn5crON4m$HF=o41v4Μt-T(VVݕL9sumTˤMD<IBLM  St/Q$Fv˩55IRe֕!&3iثU\Wš[˜%82zΛ s<0uw7 gxj #]:y\fݻ8*()ꩳ$-BT|S+:c5\)0~24 LkHkCt Lr'=a[5^JJIZ'q5*4&SfgHMޚMV<⩖Q!.eRv9GNѼ& #U ]W6)B]L +_np@#]R<63'gJ[b@eԜ*3PJu+r\ץ6E@϶ѶӲ: O((ue:W75hoDԂYs('Üe`s zbnϼrk TQ߷=BSVJH|MHa'<[- ԩ2xzj/ePIWO%Djv$xsqw]vޔWABZJ^BUjXBXKEzV.t,)),G d=¸۫÷㘉s<% JlHkLe=UN=":⩎|Q|o7qẻ5N{ J"OFf5R[ BG9F-v6\;WÉuL4)9J%H)1=AC^_~uĄ%Ĉtn+t_~+Υ խ$m$܇ R;wyξ5s"6_JۨhXqTF<-!ko+;7t)}%*0#a 0kM{jW7qNA2$NY8c7!Ĕ)TYeF뵳qq0ʡdBHtzX#Tjq2oE4E6Є<;I'̝Onfc)>nmtÉݤ/p VP]MK)u S{)Sa'}ߍ wRI 2 kkpca4OE͘bt"m`]w5& )Me58P@iN>$aOaQVᥚ<Nn &n)B[% y^b+((P@4뭢)f%)Ve*lG!-%wT'2DA)H]nm+%*@)Lflmy>#]4ƙT2I@* "UTj'BRu~,q):\ac\左tt`NY7|e HFrD@}~6sLd3@>޸tUrE%•&$j58Ҟ +0'4 6v$ kP;ed&|Jd2[lIF޳o!T;+XM:ik)ʧBPRu;O4CL'zZ<B/)gA˝ڒS4ɍ,=H2#+2`맥zHGI7 ]PnPs6* Az90ml#v@|=%+;[*PD Z5UǡV[. &H@faV0 {N, bwU%iNx@:it i$&=rHЀ$}GByepL fm M-{\)%Vuʁ>G.zJ/'U(rzԀOe'8]h66Ggb4Q&J/*BJ/I=+CNDx}>s^BT뭡/J [$*o1wI+/Ւ=]}46jjMg@4cU*ˆrh $DϞk`ӃQ 8R)0v?{K^ *J !S }L XLl2 ×Y33e@ /QECst4Ҝ_w\m«}Pܿߎv*2 RPT[v@Up+pd]AH?Ks\ b|Q5tF4tgڃiGS ٵhw9yۘv+v8UzUέ#!2`Dt;XcRNg**)m|R5٩w QPVAINL;ozNg3$M~gX}8RPA2F6:okp^wjo+ʜ:Nnm \nb[)!@ƿ<w߃.%x#1'H:8A,vJYe}PaZ7!ldL|ڬ:!K 9JD: gA:Sδ y:G[4VWj!DtO2~yZPje uX|JP:367U.kFL(/hxWPݴP{)'ü@fa\5`rӖIAK(s$ߍ+u֪rIHPLs4ZʦzcRjgYxU}jRJU1 6!]*R+-fvqд9*ZjFL3m`RdɏBzmk2kvݩZWjm*CxJw{}j%B$jDd-evtxuM`lyD v1ɘ;ȥ;.pT`¹P^J$MۼM. DNeGTAp` ț 7Z['xETL+)F;:摽eiZmk̬:(1xp  剘[)vjiEV O/+j.QuP}++nkUi>R8 J${fXaT]MC N:}̂GO}2@Y ĉO/Gsi IP-opؼMsn/{y% +={]=Be@1;F5rEEmۮ+@ I $nm-j?x?^pog,$*whKEuxuYhDDhAdi}m%0 pRX= X;_IR@;IQRTbd.IP^̟ګcU.=?NpY@tf#m&gKLᖮfYG! P`ǖZ캯 F0E./qMy Q[4HLy2a9łu-,i9 @lA'Pm vT ݴHVdD{!xmvI/>\㗐HKJ2*tVxל;ۯ s,-Gp^ nK .9Iӭ\8ɋ8)YnjG~tQ 6pB了8w$U3!C`iP踎V>8(/تamYKe6ޟvy<ߒcXh8"VTZUL<锏+vظqsq Ԗ'0Kmrf,jQ{vpc}pmv)4!pU*1>v>GX_qOyv:m֞vR؃}juT\4sE8` Oy﬽Q{=BuB|%ifAg.$7N 3St)DT VC I'N⻣} w{ַ٥Vd`gCe'C"NY>q%ыъGQT뭢XYJ$)(9O,-,7抒?S֨n//$L/vӿ}NdQ- yOb&-)*q! /9IБt:T<po륁wrWӆ*RT9s;FoW}W8KT.m5-Z56|ښ _ס^"p*0*IE}EEm-&I;OH#xMºRA-r4mÎe*3d+)BH]x^G2g% :ۛc8%:.a1D]SxC] ]J\Z͸b }h)BL'Al9Y Q[w ϕCm%嬍~6K4~gd1,yJ!EtH}Yji so-Z=44WH"B:K+}!ED QeR3jAH.hRvY6\+m0bYYW{tIo֨Սh="д! 4Qoe:SO ,ڋyYQOFj$[[u-3{_Z[! qI Hf&i*B1B:}4Rl X@KDePT:(CcB@Y6(o[eBTe@3pB U,]I@PQgc[%?Q-#a?K/ Ϩ_!qgKL=LA" 'alUD@-[&?1OJW =-Te igt[JIJd"v;O#*"bÖMsC6 ~ t^&IZvqŵ*ƀ}ZdۄSqIx!n?bw5[;($BwRP%E^(Q:N>6,ur2)J g,\dub:}pXCžy:X( 7Xq)m*s O^[uu%aJ$ ٜNB3T:@ O3<Ե A)EJ"O۠tR#]ڊNX"_z ]5WJ%hX<lT}ky{\U:޶B9mFmƙ.5a'Cq_AVԩQ L:mb9|=UVШFHdͩmgScYl5 XUL"H!GPvl{ [tcנI ;>o-58{[*HY Nkl#Sؔs ޔNBF#H? Un}-KLhZV%I:$DL͈h0%-:!DRӯM[Nۭ!l8\Zs6 D=뎦P(ȒRֿoi ) r+q%R Q)T ׈ە>ik̕:x_gP "G?o OV%ЄbAp|ﵽwo=%IWhЌOX;m1vz,jsKS~>;R9HTD zo8fw6eWu%?OO?h jJRZM=|ͯƩ( er%#Y. #1uIrk$!)KZ2fTy xUJyiV#qωmuwJA^:-G.5׵r\%N(d#񮟭ĠϊyQx?T\^BaܲA} <SfMIw>][Ғt2tux۝kh4@BҠ9xkZ ۭT/D:R|C}m*ʡG* WVL
-1
TheAlgorithms/Python
9,358
Removed redundant greatest_common_divisor code
### Describe your change: Previously all the files that needed greatest_common_divisor (aka gcd), used to define the method instead of just importing from Maths directory's greatest_common_divisor.py file. I removed thos definitions and imported gcd method from Maths folder. Fixes #8098 * [ ] Add an algorithm? * [ ] Fix a bug or typo in an existing algorithm? * [x] Documentation change? ### Checklist: * [x] I have read [CONTRIBUTING.md](https://github.com/TheAlgorithms/Python/blob/master/CONTRIBUTING.md). * [x] This pull request is all my own work -- I have not plagiarized. * [x] I know that pull requests will not be merged if they fail the automated tests. * [x] This PR only changes one algorithm file. To ease review, please open separate PRs for separate algorithms. * [x] All new Python files are placed inside an existing directory. * [x] All filenames are in all lowercase characters with no spaces or dashes. * [x] All functions and variable names follow Python naming conventions. * [x] All function parameters and return values are annotated with Python [type hints](https://docs.python.org/3/library/typing.html). * [x] All functions have [doctests](https://docs.python.org/3/library/doctest.html) that pass the automated testing. * [x] All new algorithms include at least one URL that points to Wikipedia or another similar explanation. * [x] If this pull request resolves one or more open issues then the description above includes the issue number(s) with a [closing keyword](https://docs.github.com/en/issues/tracking-your-work-with-issues/linking-a-pull-request-to-an-issue): "Fixes #ISSUE-NUMBER".
Siddikpatel
"2023-10-02T05:03:50Z"
"2023-10-09T12:19:12Z"
876087be998d5b366d68cbb9394b6b92b7f619f6
583a614fefaa9c932e6d650abfea2eaa75a93b05
Removed redundant greatest_common_divisor code. ### Describe your change: Previously all the files that needed greatest_common_divisor (aka gcd), used to define the method instead of just importing from Maths directory's greatest_common_divisor.py file. I removed thos definitions and imported gcd method from Maths folder. Fixes #8098 * [ ] Add an algorithm? * [ ] Fix a bug or typo in an existing algorithm? * [x] Documentation change? ### Checklist: * [x] I have read [CONTRIBUTING.md](https://github.com/TheAlgorithms/Python/blob/master/CONTRIBUTING.md). * [x] This pull request is all my own work -- I have not plagiarized. * [x] I know that pull requests will not be merged if they fail the automated tests. * [x] This PR only changes one algorithm file. To ease review, please open separate PRs for separate algorithms. * [x] All new Python files are placed inside an existing directory. * [x] All filenames are in all lowercase characters with no spaces or dashes. * [x] All functions and variable names follow Python naming conventions. * [x] All function parameters and return values are annotated with Python [type hints](https://docs.python.org/3/library/typing.html). * [x] All functions have [doctests](https://docs.python.org/3/library/doctest.html) that pass the automated testing. * [x] All new algorithms include at least one URL that points to Wikipedia or another similar explanation. * [x] If this pull request resolves one or more open issues then the description above includes the issue number(s) with a [closing keyword](https://docs.github.com/en/issues/tracking-your-work-with-issues/linking-a-pull-request-to-an-issue): "Fixes #ISSUE-NUMBER".
import itertools import string from collections.abc import Generator, Iterable def chunker(seq: Iterable[str], size: int) -> Generator[tuple[str, ...], None, None]: it = iter(seq) while True: chunk = tuple(itertools.islice(it, size)) if not chunk: return yield chunk def prepare_input(dirty: str) -> str: """ Prepare the plaintext by up-casing it and separating repeated letters with X's """ dirty = "".join([c.upper() for c in dirty if c in string.ascii_letters]) clean = "" if len(dirty) < 2: return dirty for i in range(len(dirty) - 1): clean += dirty[i] if dirty[i] == dirty[i + 1]: clean += "X" clean += dirty[-1] if len(clean) & 1: clean += "X" return clean def generate_table(key: str) -> list[str]: # I and J are used interchangeably to allow # us to use a 5x5 table (25 letters) alphabet = "ABCDEFGHIKLMNOPQRSTUVWXYZ" # we're using a list instead of a '2d' array because it makes the math # for setting up the table and doing the actual encoding/decoding simpler table = [] # copy key chars into the table if they are in `alphabet` ignoring duplicates for char in key.upper(): if char not in table and char in alphabet: table.append(char) # fill the rest of the table in with the remaining alphabet chars for char in alphabet: if char not in table: table.append(char) return table def encode(plaintext: str, key: str) -> str: table = generate_table(key) plaintext = prepare_input(plaintext) ciphertext = "" # https://en.wikipedia.org/wiki/Playfair_cipher#Description for char1, char2 in chunker(plaintext, 2): row1, col1 = divmod(table.index(char1), 5) row2, col2 = divmod(table.index(char2), 5) if row1 == row2: ciphertext += table[row1 * 5 + (col1 + 1) % 5] ciphertext += table[row2 * 5 + (col2 + 1) % 5] elif col1 == col2: ciphertext += table[((row1 + 1) % 5) * 5 + col1] ciphertext += table[((row2 + 1) % 5) * 5 + col2] else: # rectangle ciphertext += table[row1 * 5 + col2] ciphertext += table[row2 * 5 + col1] return ciphertext def decode(ciphertext: str, key: str) -> str: table = generate_table(key) plaintext = "" # https://en.wikipedia.org/wiki/Playfair_cipher#Description for char1, char2 in chunker(ciphertext, 2): row1, col1 = divmod(table.index(char1), 5) row2, col2 = divmod(table.index(char2), 5) if row1 == row2: plaintext += table[row1 * 5 + (col1 - 1) % 5] plaintext += table[row2 * 5 + (col2 - 1) % 5] elif col1 == col2: plaintext += table[((row1 - 1) % 5) * 5 + col1] plaintext += table[((row2 - 1) % 5) * 5 + col2] else: # rectangle plaintext += table[row1 * 5 + col2] plaintext += table[row2 * 5 + col1] return plaintext
import itertools import string from collections.abc import Generator, Iterable def chunker(seq: Iterable[str], size: int) -> Generator[tuple[str, ...], None, None]: it = iter(seq) while True: chunk = tuple(itertools.islice(it, size)) if not chunk: return yield chunk def prepare_input(dirty: str) -> str: """ Prepare the plaintext by up-casing it and separating repeated letters with X's """ dirty = "".join([c.upper() for c in dirty if c in string.ascii_letters]) clean = "" if len(dirty) < 2: return dirty for i in range(len(dirty) - 1): clean += dirty[i] if dirty[i] == dirty[i + 1]: clean += "X" clean += dirty[-1] if len(clean) & 1: clean += "X" return clean def generate_table(key: str) -> list[str]: # I and J are used interchangeably to allow # us to use a 5x5 table (25 letters) alphabet = "ABCDEFGHIKLMNOPQRSTUVWXYZ" # we're using a list instead of a '2d' array because it makes the math # for setting up the table and doing the actual encoding/decoding simpler table = [] # copy key chars into the table if they are in `alphabet` ignoring duplicates for char in key.upper(): if char not in table and char in alphabet: table.append(char) # fill the rest of the table in with the remaining alphabet chars for char in alphabet: if char not in table: table.append(char) return table def encode(plaintext: str, key: str) -> str: table = generate_table(key) plaintext = prepare_input(plaintext) ciphertext = "" # https://en.wikipedia.org/wiki/Playfair_cipher#Description for char1, char2 in chunker(plaintext, 2): row1, col1 = divmod(table.index(char1), 5) row2, col2 = divmod(table.index(char2), 5) if row1 == row2: ciphertext += table[row1 * 5 + (col1 + 1) % 5] ciphertext += table[row2 * 5 + (col2 + 1) % 5] elif col1 == col2: ciphertext += table[((row1 + 1) % 5) * 5 + col1] ciphertext += table[((row2 + 1) % 5) * 5 + col2] else: # rectangle ciphertext += table[row1 * 5 + col2] ciphertext += table[row2 * 5 + col1] return ciphertext def decode(ciphertext: str, key: str) -> str: table = generate_table(key) plaintext = "" # https://en.wikipedia.org/wiki/Playfair_cipher#Description for char1, char2 in chunker(ciphertext, 2): row1, col1 = divmod(table.index(char1), 5) row2, col2 = divmod(table.index(char2), 5) if row1 == row2: plaintext += table[row1 * 5 + (col1 - 1) % 5] plaintext += table[row2 * 5 + (col2 - 1) % 5] elif col1 == col2: plaintext += table[((row1 - 1) % 5) * 5 + col1] plaintext += table[((row2 - 1) % 5) * 5 + col2] else: # rectangle plaintext += table[row1 * 5 + col2] plaintext += table[row2 * 5 + col1] return plaintext
-1
TheAlgorithms/Python
9,358
Removed redundant greatest_common_divisor code
### Describe your change: Previously all the files that needed greatest_common_divisor (aka gcd), used to define the method instead of just importing from Maths directory's greatest_common_divisor.py file. I removed thos definitions and imported gcd method from Maths folder. Fixes #8098 * [ ] Add an algorithm? * [ ] Fix a bug or typo in an existing algorithm? * [x] Documentation change? ### Checklist: * [x] I have read [CONTRIBUTING.md](https://github.com/TheAlgorithms/Python/blob/master/CONTRIBUTING.md). * [x] This pull request is all my own work -- I have not plagiarized. * [x] I know that pull requests will not be merged if they fail the automated tests. * [x] This PR only changes one algorithm file. To ease review, please open separate PRs for separate algorithms. * [x] All new Python files are placed inside an existing directory. * [x] All filenames are in all lowercase characters with no spaces or dashes. * [x] All functions and variable names follow Python naming conventions. * [x] All function parameters and return values are annotated with Python [type hints](https://docs.python.org/3/library/typing.html). * [x] All functions have [doctests](https://docs.python.org/3/library/doctest.html) that pass the automated testing. * [x] All new algorithms include at least one URL that points to Wikipedia or another similar explanation. * [x] If this pull request resolves one or more open issues then the description above includes the issue number(s) with a [closing keyword](https://docs.github.com/en/issues/tracking-your-work-with-issues/linking-a-pull-request-to-an-issue): "Fixes #ISSUE-NUMBER".
Siddikpatel
"2023-10-02T05:03:50Z"
"2023-10-09T12:19:12Z"
876087be998d5b366d68cbb9394b6b92b7f619f6
583a614fefaa9c932e6d650abfea2eaa75a93b05
Removed redundant greatest_common_divisor code. ### Describe your change: Previously all the files that needed greatest_common_divisor (aka gcd), used to define the method instead of just importing from Maths directory's greatest_common_divisor.py file. I removed thos definitions and imported gcd method from Maths folder. Fixes #8098 * [ ] Add an algorithm? * [ ] Fix a bug or typo in an existing algorithm? * [x] Documentation change? ### Checklist: * [x] I have read [CONTRIBUTING.md](https://github.com/TheAlgorithms/Python/blob/master/CONTRIBUTING.md). * [x] This pull request is all my own work -- I have not plagiarized. * [x] I know that pull requests will not be merged if they fail the automated tests. * [x] This PR only changes one algorithm file. To ease review, please open separate PRs for separate algorithms. * [x] All new Python files are placed inside an existing directory. * [x] All filenames are in all lowercase characters with no spaces or dashes. * [x] All functions and variable names follow Python naming conventions. * [x] All function parameters and return values are annotated with Python [type hints](https://docs.python.org/3/library/typing.html). * [x] All functions have [doctests](https://docs.python.org/3/library/doctest.html) that pass the automated testing. * [x] All new algorithms include at least one URL that points to Wikipedia or another similar explanation. * [x] If this pull request resolves one or more open issues then the description above includes the issue number(s) with a [closing keyword](https://docs.github.com/en/issues/tracking-your-work-with-issues/linking-a-pull-request-to-an-issue): "Fixes #ISSUE-NUMBER".
def quick_sort(data: list) -> list: """ >>> for data in ([2, 1, 0], [2.2, 1.1, 0], "quick_sort"): ... quick_sort(data) == sorted(data) True True True """ if len(data) <= 1: return data else: return [ *quick_sort([e for e in data[1:] if e <= data[0]]), data[0], *quick_sort([e for e in data[1:] if e > data[0]]), ] if __name__ == "__main__": import doctest doctest.testmod()
def quick_sort(data: list) -> list: """ >>> for data in ([2, 1, 0], [2.2, 1.1, 0], "quick_sort"): ... quick_sort(data) == sorted(data) True True True """ if len(data) <= 1: return data else: return [ *quick_sort([e for e in data[1:] if e <= data[0]]), data[0], *quick_sort([e for e in data[1:] if e > data[0]]), ] if __name__ == "__main__": import doctest doctest.testmod()
-1
TheAlgorithms/Python
9,076
Fix typos
* [ ] Add an algorithm? * [x] Fix a bug or typo in an existing algorithm? * [ ] Documentation change?
omahs
"2023-09-22T13:22:15Z"
"2023-09-23T08:53:09Z"
dc50add8a78ebf34bc7bb050c1a0e61d207b9544
b203150ac481743a6d8c1ef01091712a54dfbf6c
Fix typos. * [ ] Add an algorithm? * [x] Fix a bug or typo in an existing algorithm? * [ ] Documentation change?
""" Langton's ant @ https://en.wikipedia.org/wiki/Langton%27s_ant @ https://upload.wikimedia.org/wikipedia/commons/0/09/LangtonsAntAnimated.gif """ from functools import partial from matplotlib import pyplot as plt from matplotlib.animation import FuncAnimation WIDTH = 80 HEIGHT = 80 class LangtonsAnt: """ Represents the main LangonsAnt algorithm. >>> la = LangtonsAnt(2, 2) >>> la.board [[True, True], [True, True]] >>> la.ant_position (1, 1) """ def __init__(self, width: int, height: int) -> None: # Each square is either True or False where True is white and False is black self.board = [[True] * width for _ in range(height)] self.ant_position: tuple[int, int] = (width // 2, height // 2) # Initially pointing left (similar to the the wikipedia image) # (0 = 0° | 1 = 90° | 2 = 180 ° | 3 = 270°) self.ant_direction: int = 3 def move_ant(self, axes: plt.Axes | None, display: bool, _frame: int) -> None: """ Performs three tasks: 1. The ant turns either clockwise or anti-clockwise according to the colour of the square that it is currently on. If the square is white, the ant turns clockwise, and if the square is black the ant turns anti-clockwise 2. The ant moves one square in the direction that it is currently facing 3. The square the ant was previously on is inverted (White -> Black and Black -> White) If display is True, the board will also be displayed on the axes >>> la = LangtonsAnt(2, 2) >>> la.move_ant(None, True, 0) >>> la.board [[True, True], [True, False]] >>> la.move_ant(None, True, 0) >>> la.board [[True, False], [True, False]] """ directions = { 0: (-1, 0), # 0° 1: (0, 1), # 90° 2: (1, 0), # 180° 3: (0, -1), # 270° } x, y = self.ant_position # Turn clockwise or anti-clockwise according to colour of square if self.board[x][y] is True: # The square is white so turn 90° clockwise self.ant_direction = (self.ant_direction + 1) % 4 else: # The square is black so turn 90° anti-clockwise self.ant_direction = (self.ant_direction - 1) % 4 # Move ant move_x, move_y = directions[self.ant_direction] self.ant_position = (x + move_x, y + move_y) # Flip colour of square self.board[x][y] = not self.board[x][y] if display and axes: # Display the board on the axes axes.get_xaxis().set_ticks([]) axes.get_yaxis().set_ticks([]) axes.imshow(self.board, cmap="gray", interpolation="nearest") def display(self, frames: int = 100_000) -> None: """ Displays the board without delay in a matplotlib plot to visually understand and track the ant. >>> _ = LangtonsAnt(WIDTH, HEIGHT) """ fig, ax = plt.subplots() # Assign animation to a variable to prevent it from getting garbage collected self.animation = FuncAnimation( fig, partial(self.move_ant, ax, True), frames=frames, interval=1 ) plt.show() if __name__ == "__main__": import doctest doctest.testmod() LangtonsAnt(WIDTH, HEIGHT).display()
""" Langton's ant @ https://en.wikipedia.org/wiki/Langton%27s_ant @ https://upload.wikimedia.org/wikipedia/commons/0/09/LangtonsAntAnimated.gif """ from functools import partial from matplotlib import pyplot as plt from matplotlib.animation import FuncAnimation WIDTH = 80 HEIGHT = 80 class LangtonsAnt: """ Represents the main LangonsAnt algorithm. >>> la = LangtonsAnt(2, 2) >>> la.board [[True, True], [True, True]] >>> la.ant_position (1, 1) """ def __init__(self, width: int, height: int) -> None: # Each square is either True or False where True is white and False is black self.board = [[True] * width for _ in range(height)] self.ant_position: tuple[int, int] = (width // 2, height // 2) # Initially pointing left (similar to the wikipedia image) # (0 = 0° | 1 = 90° | 2 = 180 ° | 3 = 270°) self.ant_direction: int = 3 def move_ant(self, axes: plt.Axes | None, display: bool, _frame: int) -> None: """ Performs three tasks: 1. The ant turns either clockwise or anti-clockwise according to the colour of the square that it is currently on. If the square is white, the ant turns clockwise, and if the square is black the ant turns anti-clockwise 2. The ant moves one square in the direction that it is currently facing 3. The square the ant was previously on is inverted (White -> Black and Black -> White) If display is True, the board will also be displayed on the axes >>> la = LangtonsAnt(2, 2) >>> la.move_ant(None, True, 0) >>> la.board [[True, True], [True, False]] >>> la.move_ant(None, True, 0) >>> la.board [[True, False], [True, False]] """ directions = { 0: (-1, 0), # 0° 1: (0, 1), # 90° 2: (1, 0), # 180° 3: (0, -1), # 270° } x, y = self.ant_position # Turn clockwise or anti-clockwise according to colour of square if self.board[x][y] is True: # The square is white so turn 90° clockwise self.ant_direction = (self.ant_direction + 1) % 4 else: # The square is black so turn 90° anti-clockwise self.ant_direction = (self.ant_direction - 1) % 4 # Move ant move_x, move_y = directions[self.ant_direction] self.ant_position = (x + move_x, y + move_y) # Flip colour of square self.board[x][y] = not self.board[x][y] if display and axes: # Display the board on the axes axes.get_xaxis().set_ticks([]) axes.get_yaxis().set_ticks([]) axes.imshow(self.board, cmap="gray", interpolation="nearest") def display(self, frames: int = 100_000) -> None: """ Displays the board without delay in a matplotlib plot to visually understand and track the ant. >>> _ = LangtonsAnt(WIDTH, HEIGHT) """ fig, ax = plt.subplots() # Assign animation to a variable to prevent it from getting garbage collected self.animation = FuncAnimation( fig, partial(self.move_ant, ax, True), frames=frames, interval=1 ) plt.show() if __name__ == "__main__": import doctest doctest.testmod() LangtonsAnt(WIDTH, HEIGHT).display()
1
TheAlgorithms/Python
9,076
Fix typos
* [ ] Add an algorithm? * [x] Fix a bug or typo in an existing algorithm? * [ ] Documentation change?
omahs
"2023-09-22T13:22:15Z"
"2023-09-23T08:53:09Z"
dc50add8a78ebf34bc7bb050c1a0e61d207b9544
b203150ac481743a6d8c1ef01091712a54dfbf6c
Fix typos. * [ ] Add an algorithm? * [x] Fix a bug or typo in an existing algorithm? * [ ] Documentation change?
# Compression Data compression is everywhere, you need it to store data without taking too much space. Either the compression lose some data (then we talk about lossy compression, such as .jpg) or it does not (and then it is lossless compression, such as .png) Lossless compression is mainly used for archive purpose as it allow storing data without losing information about the file archived. On the other hand, lossy compression is used for transfer of file where quality isn't necessarily what is required (i.e: images on Twitter). * <https://www.sciencedirect.com/topics/computer-science/compression-algorithm> * <https://en.wikipedia.org/wiki/Data_compression> * <https://en.wikipedia.org/wiki/Pigeonhole_principle>
# Compression Data compression is everywhere, you need it to store data without taking too much space. Either the compression loses some data (then we talk about lossy compression, such as .jpg) or it does not (and then it is lossless compression, such as .png) Lossless compression is mainly used for archive purpose as it allows storing data without losing information about the file archived. On the other hand, lossy compression is used for transfer of file where quality isn't necessarily what is required (i.e: images on Twitter). * <https://www.sciencedirect.com/topics/computer-science/compression-algorithm> * <https://en.wikipedia.org/wiki/Data_compression> * <https://en.wikipedia.org/wiki/Pigeonhole_principle>
1
TheAlgorithms/Python
9,076
Fix typos
* [ ] Add an algorithm? * [x] Fix a bug or typo in an existing algorithm? * [ ] Documentation change?
omahs
"2023-09-22T13:22:15Z"
"2023-09-23T08:53:09Z"
dc50add8a78ebf34bc7bb050c1a0e61d207b9544
b203150ac481743a6d8c1ef01091712a54dfbf6c
Fix typos. * [ ] Add an algorithm? * [x] Fix a bug or typo in an existing algorithm? * [ ] Documentation change?
# Hashes Hashing is the process of mapping any amount of data to a specified size using an algorithm. This is known as a hash value (or, if you're feeling fancy, a hash code, hash sums, or even a hash digest). Hashing is a one-way function, whereas encryption is a two-way function. While it is functionally conceivable to reverse-hash stuff, the required computing power makes it impractical. Hashing is a one-way street. Unlike encryption, which is intended to protect data in transit, hashing is intended to authenticate that a file or piece of data has not been altered—that it is authentic. In other words, it functions as a checksum. ## Common hashing algorithms ### MD5 This is one of the first algorithms that has gained widespread acceptance. MD5 is hashing algorithm made by Ray Rivest that is known to suffer vulnerabilities. It was created in 1992 as the successor to MD4. Currently MD6 is in the works, but as of 2009 Rivest had removed it from NIST consideration for SHA-3. ### SHA SHA stands for Security Hashing Algorithm and it’s probably best known as the hashing algorithm used in most SSL/TLS cipher suites. A cipher suite is a collection of ciphers and algorithms that are used for SSL/TLS connections. SHA handles the hashing aspects. SHA-1, as we mentioned earlier, is now deprecated. SHA-2 is now mandatory. SHA-2 is sometimes known has SHA-256, though variants with longer bit lengths are also available. ### SHA256 SHA 256 is a member of the SHA 2 algorithm family, under which SHA stands for Secure Hash Algorithm. It was a collaborative effort between both the NSA and NIST to implement a successor to the SHA 1 family, which was beginning to lose potency against brute force attacks. It was published in 2001. The importance of the 256 in the name refers to the final hash digest value, i.e. the hash value will remain 256 bits regardless of the size of the plaintext/cleartext. Other algorithms in the SHA family are similar to SHA 256 in some ways. ### Luhn The Luhn algorithm, also renowned as the modulus 10 or mod 10 algorithm, is a straightforward checksum formula used to validate a wide range of identification numbers, including credit card numbers, IMEI numbers, and Canadian Social Insurance Numbers. A community of mathematicians developed the LUHN formula in the late 1960s. Companies offering credit cards quickly followed suit. Since the algorithm is in the public interest, anyone can use it. The algorithm is used by most credit cards and many government identification numbers as a simple method of differentiating valid figures from mistyped or otherwise incorrect numbers. It was created to guard against unintentional errors, not malicious attacks.
# Hashes Hashing is the process of mapping any amount of data to a specified size using an algorithm. This is known as a hash value (or, if you're feeling fancy, a hash code, hash sums, or even a hash digest). Hashing is a one-way function, whereas encryption is a two-way function. While it is functionally conceivable to reverse-hash stuff, the required computing power makes it impractical. Hashing is a one-way street. Unlike encryption, which is intended to protect data in transit, hashing is intended to authenticate that a file or piece of data has not been altered—that it is authentic. In other words, it functions as a checksum. ## Common hashing algorithms ### MD5 This is one of the first algorithms that has gained widespread acceptance. MD5 is hashing algorithm made by Ray Rivest that is known to suffer vulnerabilities. It was created in 1992 as the successor to MD4. Currently MD6 is in the works, but as of 2009 Rivest had removed it from NIST consideration for SHA-3. ### SHA SHA stands for Security Hashing Algorithm and it’s probably best known as the hashing algorithm used in most SSL/TLS cipher suites. A cipher suite is a collection of ciphers and algorithms that are used for SSL/TLS connections. SHA handles the hashing aspects. SHA-1, as we mentioned earlier, is now deprecated. SHA-2 is now mandatory. SHA-2 is sometimes known as SHA-256, though variants with longer bit lengths are also available. ### SHA256 SHA 256 is a member of the SHA 2 algorithm family, under which SHA stands for Secure Hash Algorithm. It was a collaborative effort between both the NSA and NIST to implement a successor to the SHA 1 family, which was beginning to lose potency against brute force attacks. It was published in 2001. The importance of the 256 in the name refers to the final hash digest value, i.e. the hash value will remain 256 bits regardless of the size of the plaintext/cleartext. Other algorithms in the SHA family are similar to SHA 256 in some ways. ### Luhn The Luhn algorithm, also renowned as the modulus 10 or mod 10 algorithm, is a straightforward checksum formula used to validate a wide range of identification numbers, including credit card numbers, IMEI numbers, and Canadian Social Insurance Numbers. A community of mathematicians developed the LUHN formula in the late 1960s. Companies offering credit cards quickly followed suit. Since the algorithm is in the public interest, anyone can use it. The algorithm is used by most credit cards and many government identification numbers as a simple method of differentiating valid figures from mistyped or otherwise incorrect numbers. It was created to guard against unintentional errors, not malicious attacks.
1
TheAlgorithms/Python
9,076
Fix typos
* [ ] Add an algorithm? * [x] Fix a bug or typo in an existing algorithm? * [ ] Documentation change?
omahs
"2023-09-22T13:22:15Z"
"2023-09-23T08:53:09Z"
dc50add8a78ebf34bc7bb050c1a0e61d207b9544
b203150ac481743a6d8c1ef01091712a54dfbf6c
Fix typos. * [ ] Add an algorithm? * [x] Fix a bug or typo in an existing algorithm? * [ ] Documentation change?
# Sorting Algorithms Sorting is the process of putting data in a specific order. The way to arrange data in a specific order is specified by the sorting algorithm. The most typical orders are lexical or numerical. The significance of sorting lies in the fact that, if data is stored in a sorted manner, data searching can be highly optimised. Another use for sorting is to represent data in a more readable manner. This section contains a lot of important algorithms that helps us to use sorting algorithms in various scenarios. ## References * <https://www.tutorialspoint.com/python_data_structure/python_sorting_algorithms.htm> * <https://www.geeksforgeeks.org/sorting-algorithms-in-python> * <https://realpython.com/sorting-algorithms-python>
# Sorting Algorithms Sorting is the process of putting data in a specific order. The way to arrange data in a specific order is specified by the sorting algorithm. The most typical orders are lexical or numerical. The significance of sorting lies in the fact that, if data is stored in a sorted manner, data searching can be highly optimised. Another use for sorting is to represent data in a more readable manner. This section contains a lot of important algorithms that help us to use sorting algorithms in various scenarios. ## References * <https://www.tutorialspoint.com/python_data_structure/python_sorting_algorithms.htm> * <https://www.geeksforgeeks.org/sorting-algorithms-in-python> * <https://realpython.com/sorting-algorithms-python>
1
TheAlgorithms/Python
9,076
Fix typos
* [ ] Add an algorithm? * [x] Fix a bug or typo in an existing algorithm? * [ ] Documentation change?
omahs
"2023-09-22T13:22:15Z"
"2023-09-23T08:53:09Z"
dc50add8a78ebf34bc7bb050c1a0e61d207b9544
b203150ac481743a6d8c1ef01091712a54dfbf6c
Fix typos. * [ ] Add an algorithm? * [x] Fix a bug or typo in an existing algorithm? * [ ] Documentation change?
-1
TheAlgorithms/Python
9,076
Fix typos
* [ ] Add an algorithm? * [x] Fix a bug or typo in an existing algorithm? * [ ] Documentation change?
omahs
"2023-09-22T13:22:15Z"
"2023-09-23T08:53:09Z"
dc50add8a78ebf34bc7bb050c1a0e61d207b9544
b203150ac481743a6d8c1ef01091712a54dfbf6c
Fix typos. * [ ] Add an algorithm? * [x] Fix a bug or typo in an existing algorithm? * [ ] Documentation change?
def remove_duplicates(key: str) -> str: """ Removes duplicate alphabetic characters in a keyword (letter is ignored after its first appearance). :param key: Keyword to use :return: String with duplicates removed >>> remove_duplicates('Hello World!!') 'Helo Wrd' """ key_no_dups = "" for ch in key: if ch == " " or ch not in key_no_dups and ch.isalpha(): key_no_dups += ch return key_no_dups def create_cipher_map(key: str) -> dict[str, str]: """ Returns a cipher map given a keyword. :param key: keyword to use :return: dictionary cipher map """ # Create a list of the letters in the alphabet alphabet = [chr(i + 65) for i in range(26)] # Remove duplicate characters from key key = remove_duplicates(key.upper()) offset = len(key) # First fill cipher with key characters cipher_alphabet = {alphabet[i]: char for i, char in enumerate(key)} # Then map remaining characters in alphabet to # the alphabet from the beginning for i in range(len(cipher_alphabet), 26): char = alphabet[i - offset] # Ensure we are not mapping letters to letters previously mapped while char in key: offset -= 1 char = alphabet[i - offset] cipher_alphabet[alphabet[i]] = char return cipher_alphabet def encipher(message: str, cipher_map: dict[str, str]) -> str: """ Enciphers a message given a cipher map. :param message: Message to encipher :param cipher_map: Cipher map :return: enciphered string >>> encipher('Hello World!!', create_cipher_map('Goodbye!!')) 'CYJJM VMQJB!!' """ return "".join(cipher_map.get(ch, ch) for ch in message.upper()) def decipher(message: str, cipher_map: dict[str, str]) -> str: """ Deciphers a message given a cipher map :param message: Message to decipher :param cipher_map: Dictionary mapping to use :return: Deciphered string >>> cipher_map = create_cipher_map('Goodbye!!') >>> decipher(encipher('Hello World!!', cipher_map), cipher_map) 'HELLO WORLD!!' """ # Reverse our cipher mappings rev_cipher_map = {v: k for k, v in cipher_map.items()} return "".join(rev_cipher_map.get(ch, ch) for ch in message.upper()) def main() -> None: """ Handles I/O :return: void """ message = input("Enter message to encode or decode: ").strip() key = input("Enter keyword: ").strip() option = input("Encipher or decipher? E/D:").strip()[0].lower() try: func = {"e": encipher, "d": decipher}[option] except KeyError: raise KeyError("invalid input option") cipher_map = create_cipher_map(key) print(func(message, cipher_map)) if __name__ == "__main__": import doctest doctest.testmod() main()
def remove_duplicates(key: str) -> str: """ Removes duplicate alphabetic characters in a keyword (letter is ignored after its first appearance). :param key: Keyword to use :return: String with duplicates removed >>> remove_duplicates('Hello World!!') 'Helo Wrd' """ key_no_dups = "" for ch in key: if ch == " " or ch not in key_no_dups and ch.isalpha(): key_no_dups += ch return key_no_dups def create_cipher_map(key: str) -> dict[str, str]: """ Returns a cipher map given a keyword. :param key: keyword to use :return: dictionary cipher map """ # Create a list of the letters in the alphabet alphabet = [chr(i + 65) for i in range(26)] # Remove duplicate characters from key key = remove_duplicates(key.upper()) offset = len(key) # First fill cipher with key characters cipher_alphabet = {alphabet[i]: char for i, char in enumerate(key)} # Then map remaining characters in alphabet to # the alphabet from the beginning for i in range(len(cipher_alphabet), 26): char = alphabet[i - offset] # Ensure we are not mapping letters to letters previously mapped while char in key: offset -= 1 char = alphabet[i - offset] cipher_alphabet[alphabet[i]] = char return cipher_alphabet def encipher(message: str, cipher_map: dict[str, str]) -> str: """ Enciphers a message given a cipher map. :param message: Message to encipher :param cipher_map: Cipher map :return: enciphered string >>> encipher('Hello World!!', create_cipher_map('Goodbye!!')) 'CYJJM VMQJB!!' """ return "".join(cipher_map.get(ch, ch) for ch in message.upper()) def decipher(message: str, cipher_map: dict[str, str]) -> str: """ Deciphers a message given a cipher map :param message: Message to decipher :param cipher_map: Dictionary mapping to use :return: Deciphered string >>> cipher_map = create_cipher_map('Goodbye!!') >>> decipher(encipher('Hello World!!', cipher_map), cipher_map) 'HELLO WORLD!!' """ # Reverse our cipher mappings rev_cipher_map = {v: k for k, v in cipher_map.items()} return "".join(rev_cipher_map.get(ch, ch) for ch in message.upper()) def main() -> None: """ Handles I/O :return: void """ message = input("Enter message to encode or decode: ").strip() key = input("Enter keyword: ").strip() option = input("Encipher or decipher? E/D:").strip()[0].lower() try: func = {"e": encipher, "d": decipher}[option] except KeyError: raise KeyError("invalid input option") cipher_map = create_cipher_map(key) print(func(message, cipher_map)) if __name__ == "__main__": import doctest doctest.testmod() main()
-1
TheAlgorithms/Python
9,076
Fix typos
* [ ] Add an algorithm? * [x] Fix a bug or typo in an existing algorithm? * [ ] Documentation change?
omahs
"2023-09-22T13:22:15Z"
"2023-09-23T08:53:09Z"
dc50add8a78ebf34bc7bb050c1a0e61d207b9544
b203150ac481743a6d8c1ef01091712a54dfbf6c
Fix typos. * [ ] Add an algorithm? * [x] Fix a bug or typo in an existing algorithm? * [ ] Documentation change?
""" Implemented an algorithm using opencv to convert a colored image into its negative """ from cv2 import destroyAllWindows, imread, imshow, waitKey def convert_to_negative(img): # getting number of pixels in the image pixel_h, pixel_v = img.shape[0], img.shape[1] # converting each pixel's color to its negative for i in range(pixel_h): for j in range(pixel_v): img[i][j] = [255, 255, 255] - img[i][j] return img if __name__ == "__main__": # read original image img = imread("image_data/lena.jpg", 1) # convert to its negative neg = convert_to_negative(img) # show result image imshow("negative of original image", img) waitKey(0) destroyAllWindows()
""" Implemented an algorithm using opencv to convert a colored image into its negative """ from cv2 import destroyAllWindows, imread, imshow, waitKey def convert_to_negative(img): # getting number of pixels in the image pixel_h, pixel_v = img.shape[0], img.shape[1] # converting each pixel's color to its negative for i in range(pixel_h): for j in range(pixel_v): img[i][j] = [255, 255, 255] - img[i][j] return img if __name__ == "__main__": # read original image img = imread("image_data/lena.jpg", 1) # convert to its negative neg = convert_to_negative(img) # show result image imshow("negative of original image", img) waitKey(0) destroyAllWindows()
-1
TheAlgorithms/Python
9,076
Fix typos
* [ ] Add an algorithm? * [x] Fix a bug or typo in an existing algorithm? * [ ] Documentation change?
omahs
"2023-09-22T13:22:15Z"
"2023-09-23T08:53:09Z"
dc50add8a78ebf34bc7bb050c1a0e61d207b9544
b203150ac481743a6d8c1ef01091712a54dfbf6c
Fix typos. * [ ] Add an algorithm? * [x] Fix a bug or typo in an existing algorithm? * [ ] Documentation change?
""" This function implements the shell sort algorithm which is slightly faster than its pure implementation. This shell sort is implemented using a gap, which shrinks by a certain factor each iteration. In this implementation, the gap is initially set to the length of the collection. The gap is then reduced by a certain factor (1.3) each iteration. For each iteration, the algorithm compares elements that are a certain number of positions apart (determined by the gap). If the element at the higher position is greater than the element at the lower position, the two elements are swapped. The process is repeated until the gap is equal to 1. The reason this is more efficient is that it reduces the number of comparisons that need to be made. By using a smaller gap, the list is sorted more quickly. """ def shell_sort(collection: list) -> list: """Implementation of shell sort algorithm in Python :param collection: Some mutable ordered collection with heterogeneous comparable items inside :return: the same collection ordered by ascending >>> shell_sort([3, 2, 1]) [1, 2, 3] >>> shell_sort([]) [] >>> shell_sort([1]) [1] """ # Choose an initial gap value gap = len(collection) # Set the gap value to be decreased by a factor of 1.3 # after each iteration shrink = 1.3 # Continue sorting until the gap is 1 while gap > 1: # Decrease the gap value gap = int(gap / shrink) # Sort the elements using insertion sort for i in range(gap, len(collection)): temp = collection[i] j = i while j >= gap and collection[j - gap] > temp: collection[j] = collection[j - gap] j -= gap collection[j] = temp return collection if __name__ == "__main__": import doctest doctest.testmod()
""" This function implements the shell sort algorithm which is slightly faster than its pure implementation. This shell sort is implemented using a gap, which shrinks by a certain factor each iteration. In this implementation, the gap is initially set to the length of the collection. The gap is then reduced by a certain factor (1.3) each iteration. For each iteration, the algorithm compares elements that are a certain number of positions apart (determined by the gap). If the element at the higher position is greater than the element at the lower position, the two elements are swapped. The process is repeated until the gap is equal to 1. The reason this is more efficient is that it reduces the number of comparisons that need to be made. By using a smaller gap, the list is sorted more quickly. """ def shell_sort(collection: list) -> list: """Implementation of shell sort algorithm in Python :param collection: Some mutable ordered collection with heterogeneous comparable items inside :return: the same collection ordered by ascending >>> shell_sort([3, 2, 1]) [1, 2, 3] >>> shell_sort([]) [] >>> shell_sort([1]) [1] """ # Choose an initial gap value gap = len(collection) # Set the gap value to be decreased by a factor of 1.3 # after each iteration shrink = 1.3 # Continue sorting until the gap is 1 while gap > 1: # Decrease the gap value gap = int(gap / shrink) # Sort the elements using insertion sort for i in range(gap, len(collection)): temp = collection[i] j = i while j >= gap and collection[j - gap] > temp: collection[j] = collection[j - gap] j -= gap collection[j] = temp return collection if __name__ == "__main__": import doctest doctest.testmod()
-1
TheAlgorithms/Python
9,076
Fix typos
* [ ] Add an algorithm? * [x] Fix a bug or typo in an existing algorithm? * [ ] Documentation change?
omahs
"2023-09-22T13:22:15Z"
"2023-09-23T08:53:09Z"
dc50add8a78ebf34bc7bb050c1a0e61d207b9544
b203150ac481743a6d8c1ef01091712a54dfbf6c
Fix typos. * [ ] Add an algorithm? * [x] Fix a bug or typo in an existing algorithm? * [ ] Documentation change?
""" Problem 78 Url: https://projecteuler.net/problem=78 Statement: Let p(n) represent the number of different ways in which n coins can be separated into piles. For example, five coins can be separated into piles in exactly seven different ways, so p(5)=7. OOOOO OOOO O OOO OO OOO O O OO OO O OO O O O O O O O O Find the least value of n for which p(n) is divisible by one million. """ import itertools def solution(number: int = 1000000) -> int: """ >>> solution(1) 1 >>> solution(9) 14 >>> solution() 55374 """ partitions = [1] for i in itertools.count(len(partitions)): item = 0 for j in itertools.count(1): sign = -1 if j % 2 == 0 else +1 index = (j * j * 3 - j) // 2 if index > i: break item += partitions[i - index] * sign item %= number index += j if index > i: break item += partitions[i - index] * sign item %= number if item == 0: return i partitions.append(item) return 0 if __name__ == "__main__": import doctest doctest.testmod() print(f"{solution() = }")
""" Problem 78 Url: https://projecteuler.net/problem=78 Statement: Let p(n) represent the number of different ways in which n coins can be separated into piles. For example, five coins can be separated into piles in exactly seven different ways, so p(5)=7. OOOOO OOOO O OOO OO OOO O O OO OO O OO O O O O O O O O Find the least value of n for which p(n) is divisible by one million. """ import itertools def solution(number: int = 1000000) -> int: """ >>> solution(1) 1 >>> solution(9) 14 >>> solution() 55374 """ partitions = [1] for i in itertools.count(len(partitions)): item = 0 for j in itertools.count(1): sign = -1 if j % 2 == 0 else +1 index = (j * j * 3 - j) // 2 if index > i: break item += partitions[i - index] * sign item %= number index += j if index > i: break item += partitions[i - index] * sign item %= number if item == 0: return i partitions.append(item) return 0 if __name__ == "__main__": import doctest doctest.testmod() print(f"{solution() = }")
-1
TheAlgorithms/Python
9,076
Fix typos
* [ ] Add an algorithm? * [x] Fix a bug or typo in an existing algorithm? * [ ] Documentation change?
omahs
"2023-09-22T13:22:15Z"
"2023-09-23T08:53:09Z"
dc50add8a78ebf34bc7bb050c1a0e61d207b9544
b203150ac481743a6d8c1ef01091712a54dfbf6c
Fix typos. * [ ] Add an algorithm? * [x] Fix a bug or typo in an existing algorithm? * [ ] Documentation change?
""" Project Euler Problem 3: https://projecteuler.net/problem=3 Largest prime factor The prime factors of 13195 are 5, 7, 13 and 29. What is the largest prime factor of the number 600851475143? References: - https://en.wikipedia.org/wiki/Prime_number#Unique_factorization """ def solution(n: int = 600851475143) -> int: """ Returns the largest prime factor of a given number n. >>> solution(13195) 29 >>> solution(10) 5 >>> solution(17) 17 >>> solution(3.4) 3 >>> solution(0) Traceback (most recent call last): ... ValueError: Parameter n must be greater than or equal to one. >>> solution(-17) Traceback (most recent call last): ... ValueError: Parameter n must be greater than or equal to one. >>> solution([]) Traceback (most recent call last): ... TypeError: Parameter n must be int or castable to int. >>> solution("asd") Traceback (most recent call last): ... TypeError: Parameter n must be int or castable to int. """ try: n = int(n) except (TypeError, ValueError): raise TypeError("Parameter n must be int or castable to int.") if n <= 0: raise ValueError("Parameter n must be greater than or equal to one.") i = 2 ans = 0 if n == 2: return 2 while n > 2: while n % i != 0: i += 1 ans = i while n % i == 0: n = n // i i += 1 return int(ans) if __name__ == "__main__": print(f"{solution() = }")
""" Project Euler Problem 3: https://projecteuler.net/problem=3 Largest prime factor The prime factors of 13195 are 5, 7, 13 and 29. What is the largest prime factor of the number 600851475143? References: - https://en.wikipedia.org/wiki/Prime_number#Unique_factorization """ def solution(n: int = 600851475143) -> int: """ Returns the largest prime factor of a given number n. >>> solution(13195) 29 >>> solution(10) 5 >>> solution(17) 17 >>> solution(3.4) 3 >>> solution(0) Traceback (most recent call last): ... ValueError: Parameter n must be greater than or equal to one. >>> solution(-17) Traceback (most recent call last): ... ValueError: Parameter n must be greater than or equal to one. >>> solution([]) Traceback (most recent call last): ... TypeError: Parameter n must be int or castable to int. >>> solution("asd") Traceback (most recent call last): ... TypeError: Parameter n must be int or castable to int. """ try: n = int(n) except (TypeError, ValueError): raise TypeError("Parameter n must be int or castable to int.") if n <= 0: raise ValueError("Parameter n must be greater than or equal to one.") i = 2 ans = 0 if n == 2: return 2 while n > 2: while n % i != 0: i += 1 ans = i while n % i == 0: n = n // i i += 1 return int(ans) if __name__ == "__main__": print(f"{solution() = }")
-1
TheAlgorithms/Python
9,076
Fix typos
* [ ] Add an algorithm? * [x] Fix a bug or typo in an existing algorithm? * [ ] Documentation change?
omahs
"2023-09-22T13:22:15Z"
"2023-09-23T08:53:09Z"
dc50add8a78ebf34bc7bb050c1a0e61d207b9544
b203150ac481743a6d8c1ef01091712a54dfbf6c
Fix typos. * [ ] Add an algorithm? * [x] Fix a bug or typo in an existing algorithm? * [ ] Documentation change?
from bisect import bisect from itertools import accumulate def frac_knapsack(vl, wt, w, n): """ >>> frac_knapsack([60, 100, 120], [10, 20, 30], 50, 3) 240.0 """ r = sorted(zip(vl, wt), key=lambda x: x[0] / x[1], reverse=True) vl, wt = [i[0] for i in r], [i[1] for i in r] acc = list(accumulate(wt)) k = bisect(acc, w) return ( 0 if k == 0 else sum(vl[:k]) + (w - acc[k - 1]) * (vl[k]) / (wt[k]) if k != n else sum(vl[:k]) ) if __name__ == "__main__": import doctest doctest.testmod()
from bisect import bisect from itertools import accumulate def frac_knapsack(vl, wt, w, n): """ >>> frac_knapsack([60, 100, 120], [10, 20, 30], 50, 3) 240.0 """ r = sorted(zip(vl, wt), key=lambda x: x[0] / x[1], reverse=True) vl, wt = [i[0] for i in r], [i[1] for i in r] acc = list(accumulate(wt)) k = bisect(acc, w) return ( 0 if k == 0 else sum(vl[:k]) + (w - acc[k - 1]) * (vl[k]) / (wt[k]) if k != n else sum(vl[:k]) ) if __name__ == "__main__": import doctest doctest.testmod()
-1
TheAlgorithms/Python
9,076
Fix typos
* [ ] Add an algorithm? * [x] Fix a bug or typo in an existing algorithm? * [ ] Documentation change?
omahs
"2023-09-22T13:22:15Z"
"2023-09-23T08:53:09Z"
dc50add8a78ebf34bc7bb050c1a0e61d207b9544
b203150ac481743a6d8c1ef01091712a54dfbf6c
Fix typos. * [ ] Add an algorithm? * [x] Fix a bug or typo in an existing algorithm? * [ ] Documentation change?
def alternative_string_arrange(first_str: str, second_str: str) -> str: """ Return the alternative arrangements of the two strings. :param first_str: :param second_str: :return: String >>> alternative_string_arrange("ABCD", "XY") 'AXBYCD' >>> alternative_string_arrange("XY", "ABCD") 'XAYBCD' >>> alternative_string_arrange("AB", "XYZ") 'AXBYZ' >>> alternative_string_arrange("ABC", "") 'ABC' """ first_str_length: int = len(first_str) second_str_length: int = len(second_str) abs_length: int = ( first_str_length if first_str_length > second_str_length else second_str_length ) output_list: list = [] for char_count in range(abs_length): if char_count < first_str_length: output_list.append(first_str[char_count]) if char_count < second_str_length: output_list.append(second_str[char_count]) return "".join(output_list) if __name__ == "__main__": print(alternative_string_arrange("AB", "XYZ"), end=" ")
def alternative_string_arrange(first_str: str, second_str: str) -> str: """ Return the alternative arrangements of the two strings. :param first_str: :param second_str: :return: String >>> alternative_string_arrange("ABCD", "XY") 'AXBYCD' >>> alternative_string_arrange("XY", "ABCD") 'XAYBCD' >>> alternative_string_arrange("AB", "XYZ") 'AXBYZ' >>> alternative_string_arrange("ABC", "") 'ABC' """ first_str_length: int = len(first_str) second_str_length: int = len(second_str) abs_length: int = ( first_str_length if first_str_length > second_str_length else second_str_length ) output_list: list = [] for char_count in range(abs_length): if char_count < first_str_length: output_list.append(first_str[char_count]) if char_count < second_str_length: output_list.append(second_str[char_count]) return "".join(output_list) if __name__ == "__main__": print(alternative_string_arrange("AB", "XYZ"), end=" ")
-1
TheAlgorithms/Python
9,076
Fix typos
* [ ] Add an algorithm? * [x] Fix a bug or typo in an existing algorithm? * [ ] Documentation change?
omahs
"2023-09-22T13:22:15Z"
"2023-09-23T08:53:09Z"
dc50add8a78ebf34bc7bb050c1a0e61d207b9544
b203150ac481743a6d8c1ef01091712a54dfbf6c
Fix typos. * [ ] Add an algorithm? * [x] Fix a bug or typo in an existing algorithm? * [ ] Documentation change?
""" The sum-of-subsetsproblem states that a set of non-negative integers, and a value M, determine all possible subsets of the given set whose summation sum equal to given M. Summation of the chosen numbers must be equal to given number M and one number can be used only once. """ from __future__ import annotations def generate_sum_of_subsets_soln(nums: list[int], max_sum: int) -> list[list[int]]: result: list[list[int]] = [] path: list[int] = [] num_index = 0 remaining_nums_sum = sum(nums) create_state_space_tree(nums, max_sum, num_index, path, result, remaining_nums_sum) return result def create_state_space_tree( nums: list[int], max_sum: int, num_index: int, path: list[int], result: list[list[int]], remaining_nums_sum: int, ) -> None: """ Creates a state space tree to iterate through each branch using DFS. It terminates the branching of a node when any of the two conditions given below satisfy. This algorithm follows depth-fist-search and backtracks when the node is not branchable. """ if sum(path) > max_sum or (remaining_nums_sum + sum(path)) < max_sum: return if sum(path) == max_sum: result.append(path) return for index in range(num_index, len(nums)): create_state_space_tree( nums, max_sum, index + 1, [*path, nums[index]], result, remaining_nums_sum - nums[index], ) """ remove the comment to take an input from the user print("Enter the elements") nums = list(map(int, input().split())) print("Enter max_sum sum") max_sum = int(input()) """ nums = [3, 34, 4, 12, 5, 2] max_sum = 9 result = generate_sum_of_subsets_soln(nums, max_sum) print(*result)
""" The sum-of-subsetsproblem states that a set of non-negative integers, and a value M, determine all possible subsets of the given set whose summation sum equal to given M. Summation of the chosen numbers must be equal to given number M and one number can be used only once. """ from __future__ import annotations def generate_sum_of_subsets_soln(nums: list[int], max_sum: int) -> list[list[int]]: result: list[list[int]] = [] path: list[int] = [] num_index = 0 remaining_nums_sum = sum(nums) create_state_space_tree(nums, max_sum, num_index, path, result, remaining_nums_sum) return result def create_state_space_tree( nums: list[int], max_sum: int, num_index: int, path: list[int], result: list[list[int]], remaining_nums_sum: int, ) -> None: """ Creates a state space tree to iterate through each branch using DFS. It terminates the branching of a node when any of the two conditions given below satisfy. This algorithm follows depth-fist-search and backtracks when the node is not branchable. """ if sum(path) > max_sum or (remaining_nums_sum + sum(path)) < max_sum: return if sum(path) == max_sum: result.append(path) return for index in range(num_index, len(nums)): create_state_space_tree( nums, max_sum, index + 1, [*path, nums[index]], result, remaining_nums_sum - nums[index], ) """ remove the comment to take an input from the user print("Enter the elements") nums = list(map(int, input().split())) print("Enter max_sum sum") max_sum = int(input()) """ nums = [3, 34, 4, 12, 5, 2] max_sum = 9 result = generate_sum_of_subsets_soln(nums, max_sum) print(*result)
-1
TheAlgorithms/Python
9,076
Fix typos
* [ ] Add an algorithm? * [x] Fix a bug or typo in an existing algorithm? * [ ] Documentation change?
omahs
"2023-09-22T13:22:15Z"
"2023-09-23T08:53:09Z"
dc50add8a78ebf34bc7bb050c1a0e61d207b9544
b203150ac481743a6d8c1ef01091712a54dfbf6c
Fix typos. * [ ] Add an algorithm? * [x] Fix a bug or typo in an existing algorithm? * [ ] Documentation change?
""" Pure Python implementation of a binary search algorithm. For doctests run following command: python3 -m doctest -v simple_binary_search.py For manual testing run: python3 simple_binary_search.py """ from __future__ import annotations def binary_search(a_list: list[int], item: int) -> bool: """ >>> test_list = [0, 1, 2, 8, 13, 17, 19, 32, 42] >>> binary_search(test_list, 3) False >>> binary_search(test_list, 13) True >>> binary_search([4, 4, 5, 6, 7], 4) True >>> binary_search([4, 4, 5, 6, 7], -10) False >>> binary_search([-18, 2], -18) True >>> binary_search([5], 5) True >>> binary_search(['a', 'c', 'd'], 'c') True >>> binary_search(['a', 'c', 'd'], 'f') False >>> binary_search([], 1) False >>> binary_search([-.1, .1 , .8], .1) True >>> binary_search(range(-5000, 5000, 10), 80) True >>> binary_search(range(-5000, 5000, 10), 1255) False >>> binary_search(range(0, 10000, 5), 2) False """ if len(a_list) == 0: return False midpoint = len(a_list) // 2 if a_list[midpoint] == item: return True if item < a_list[midpoint]: return binary_search(a_list[:midpoint], item) else: return binary_search(a_list[midpoint + 1 :], item) if __name__ == "__main__": user_input = input("Enter numbers separated by comma:\n").strip() sequence = [int(item.strip()) for item in user_input.split(",")] target = int(input("Enter the number to be found in the list:\n").strip()) not_str = "" if binary_search(sequence, target) else "not " print(f"{target} was {not_str}found in {sequence}")
""" Pure Python implementation of a binary search algorithm. For doctests run following command: python3 -m doctest -v simple_binary_search.py For manual testing run: python3 simple_binary_search.py """ from __future__ import annotations def binary_search(a_list: list[int], item: int) -> bool: """ >>> test_list = [0, 1, 2, 8, 13, 17, 19, 32, 42] >>> binary_search(test_list, 3) False >>> binary_search(test_list, 13) True >>> binary_search([4, 4, 5, 6, 7], 4) True >>> binary_search([4, 4, 5, 6, 7], -10) False >>> binary_search([-18, 2], -18) True >>> binary_search([5], 5) True >>> binary_search(['a', 'c', 'd'], 'c') True >>> binary_search(['a', 'c', 'd'], 'f') False >>> binary_search([], 1) False >>> binary_search([-.1, .1 , .8], .1) True >>> binary_search(range(-5000, 5000, 10), 80) True >>> binary_search(range(-5000, 5000, 10), 1255) False >>> binary_search(range(0, 10000, 5), 2) False """ if len(a_list) == 0: return False midpoint = len(a_list) // 2 if a_list[midpoint] == item: return True if item < a_list[midpoint]: return binary_search(a_list[:midpoint], item) else: return binary_search(a_list[midpoint + 1 :], item) if __name__ == "__main__": user_input = input("Enter numbers separated by comma:\n").strip() sequence = [int(item.strip()) for item in user_input.split(",")] target = int(input("Enter the number to be found in the list:\n").strip()) not_str = "" if binary_search(sequence, target) else "not " print(f"{target} was {not_str}found in {sequence}")
-1
TheAlgorithms/Python
9,076
Fix typos
* [ ] Add an algorithm? * [x] Fix a bug or typo in an existing algorithm? * [ ] Documentation change?
omahs
"2023-09-22T13:22:15Z"
"2023-09-23T08:53:09Z"
dc50add8a78ebf34bc7bb050c1a0e61d207b9544
b203150ac481743a6d8c1ef01091712a54dfbf6c
Fix typos. * [ ] Add an algorithm? * [x] Fix a bug or typo in an existing algorithm? * [ ] Documentation change?
-1
TheAlgorithms/Python
9,076
Fix typos
* [ ] Add an algorithm? * [x] Fix a bug or typo in an existing algorithm? * [ ] Documentation change?
omahs
"2023-09-22T13:22:15Z"
"2023-09-23T08:53:09Z"
dc50add8a78ebf34bc7bb050c1a0e61d207b9544
b203150ac481743a6d8c1ef01091712a54dfbf6c
Fix typos. * [ ] Add an algorithm? * [x] Fix a bug or typo in an existing algorithm? * [ ] Documentation change?
# https://en.wikipedia.org/wiki/Simulated_annealing import math import random from typing import Any from .hill_climbing import SearchProblem def simulated_annealing( search_prob, find_max: bool = True, max_x: float = math.inf, min_x: float = -math.inf, max_y: float = math.inf, min_y: float = -math.inf, visualization: bool = False, start_temperate: float = 100, rate_of_decrease: float = 0.01, threshold_temp: float = 1, ) -> Any: """ Implementation of the simulated annealing algorithm. We start with a given state, find all its neighbors. Pick a random neighbor, if that neighbor improves the solution, we move in that direction, if that neighbor does not improve the solution, we generate a random real number between 0 and 1, if the number is within a certain range (calculated using temperature) we move in that direction, else we pick another neighbor randomly and repeat the process. Args: search_prob: The search state at the start. find_max: If True, the algorithm should find the minimum else the minimum. max_x, min_x, max_y, min_y: the maximum and minimum bounds of x and y. visualization: If True, a matplotlib graph is displayed. start_temperate: the initial temperate of the system when the program starts. rate_of_decrease: the rate at which the temperate decreases in each iteration. threshold_temp: the threshold temperature below which we end the search Returns a search state having the maximum (or minimum) score. """ search_end = False current_state = search_prob current_temp = start_temperate scores = [] iterations = 0 best_state = None while not search_end: current_score = current_state.score() if best_state is None or current_score > best_state.score(): best_state = current_state scores.append(current_score) iterations += 1 next_state = None neighbors = current_state.get_neighbors() while ( next_state is None and neighbors ): # till we do not find a neighbor that we can move to index = random.randint(0, len(neighbors) - 1) # picking a random neighbor picked_neighbor = neighbors.pop(index) change = picked_neighbor.score() - current_score if ( picked_neighbor.x > max_x or picked_neighbor.x < min_x or picked_neighbor.y > max_y or picked_neighbor.y < min_y ): continue # neighbor outside our bounds if not find_max: change = change * -1 # in case we are finding minimum if change > 0: # improves the solution next_state = picked_neighbor else: probability = (math.e) ** ( change / current_temp ) # probability generation function if random.random() < probability: # random number within probability next_state = picked_neighbor current_temp = current_temp - (current_temp * rate_of_decrease) if current_temp < threshold_temp or next_state is None: # temperature below threshold, or could not find a suitable neighbor search_end = True else: current_state = next_state if visualization: from matplotlib import pyplot as plt plt.plot(range(iterations), scores) plt.xlabel("Iterations") plt.ylabel("Function values") plt.show() return best_state if __name__ == "__main__": def test_f1(x, y): return (x**2) + (y**2) # starting the problem with initial coordinates (12, 47) prob = SearchProblem(x=12, y=47, step_size=1, function_to_optimize=test_f1) local_min = simulated_annealing( prob, find_max=False, max_x=100, min_x=5, max_y=50, min_y=-5, visualization=True ) print( "The minimum score for f(x, y) = x^2 + y^2 with the domain 100 > x > 5 " f"and 50 > y > - 5 found via hill climbing: {local_min.score()}" ) # starting the problem with initial coordinates (12, 47) prob = SearchProblem(x=12, y=47, step_size=1, function_to_optimize=test_f1) local_min = simulated_annealing( prob, find_max=True, max_x=100, min_x=5, max_y=50, min_y=-5, visualization=True ) print( "The maximum score for f(x, y) = x^2 + y^2 with the domain 100 > x > 5 " f"and 50 > y > - 5 found via hill climbing: {local_min.score()}" ) def test_f2(x, y): return (3 * x**2) - (6 * y) prob = SearchProblem(x=3, y=4, step_size=1, function_to_optimize=test_f1) local_min = simulated_annealing(prob, find_max=False, visualization=True) print( "The minimum score for f(x, y) = 3*x^2 - 6*y found via hill climbing: " f"{local_min.score()}" ) prob = SearchProblem(x=3, y=4, step_size=1, function_to_optimize=test_f1) local_min = simulated_annealing(prob, find_max=True, visualization=True) print( "The maximum score for f(x, y) = 3*x^2 - 6*y found via hill climbing: " f"{local_min.score()}" )
# https://en.wikipedia.org/wiki/Simulated_annealing import math import random from typing import Any from .hill_climbing import SearchProblem def simulated_annealing( search_prob, find_max: bool = True, max_x: float = math.inf, min_x: float = -math.inf, max_y: float = math.inf, min_y: float = -math.inf, visualization: bool = False, start_temperate: float = 100, rate_of_decrease: float = 0.01, threshold_temp: float = 1, ) -> Any: """ Implementation of the simulated annealing algorithm. We start with a given state, find all its neighbors. Pick a random neighbor, if that neighbor improves the solution, we move in that direction, if that neighbor does not improve the solution, we generate a random real number between 0 and 1, if the number is within a certain range (calculated using temperature) we move in that direction, else we pick another neighbor randomly and repeat the process. Args: search_prob: The search state at the start. find_max: If True, the algorithm should find the minimum else the minimum. max_x, min_x, max_y, min_y: the maximum and minimum bounds of x and y. visualization: If True, a matplotlib graph is displayed. start_temperate: the initial temperate of the system when the program starts. rate_of_decrease: the rate at which the temperate decreases in each iteration. threshold_temp: the threshold temperature below which we end the search Returns a search state having the maximum (or minimum) score. """ search_end = False current_state = search_prob current_temp = start_temperate scores = [] iterations = 0 best_state = None while not search_end: current_score = current_state.score() if best_state is None or current_score > best_state.score(): best_state = current_state scores.append(current_score) iterations += 1 next_state = None neighbors = current_state.get_neighbors() while ( next_state is None and neighbors ): # till we do not find a neighbor that we can move to index = random.randint(0, len(neighbors) - 1) # picking a random neighbor picked_neighbor = neighbors.pop(index) change = picked_neighbor.score() - current_score if ( picked_neighbor.x > max_x or picked_neighbor.x < min_x or picked_neighbor.y > max_y or picked_neighbor.y < min_y ): continue # neighbor outside our bounds if not find_max: change = change * -1 # in case we are finding minimum if change > 0: # improves the solution next_state = picked_neighbor else: probability = (math.e) ** ( change / current_temp ) # probability generation function if random.random() < probability: # random number within probability next_state = picked_neighbor current_temp = current_temp - (current_temp * rate_of_decrease) if current_temp < threshold_temp or next_state is None: # temperature below threshold, or could not find a suitable neighbor search_end = True else: current_state = next_state if visualization: from matplotlib import pyplot as plt plt.plot(range(iterations), scores) plt.xlabel("Iterations") plt.ylabel("Function values") plt.show() return best_state if __name__ == "__main__": def test_f1(x, y): return (x**2) + (y**2) # starting the problem with initial coordinates (12, 47) prob = SearchProblem(x=12, y=47, step_size=1, function_to_optimize=test_f1) local_min = simulated_annealing( prob, find_max=False, max_x=100, min_x=5, max_y=50, min_y=-5, visualization=True ) print( "The minimum score for f(x, y) = x^2 + y^2 with the domain 100 > x > 5 " f"and 50 > y > - 5 found via hill climbing: {local_min.score()}" ) # starting the problem with initial coordinates (12, 47) prob = SearchProblem(x=12, y=47, step_size=1, function_to_optimize=test_f1) local_min = simulated_annealing( prob, find_max=True, max_x=100, min_x=5, max_y=50, min_y=-5, visualization=True ) print( "The maximum score for f(x, y) = x^2 + y^2 with the domain 100 > x > 5 " f"and 50 > y > - 5 found via hill climbing: {local_min.score()}" ) def test_f2(x, y): return (3 * x**2) - (6 * y) prob = SearchProblem(x=3, y=4, step_size=1, function_to_optimize=test_f1) local_min = simulated_annealing(prob, find_max=False, visualization=True) print( "The minimum score for f(x, y) = 3*x^2 - 6*y found via hill climbing: " f"{local_min.score()}" ) prob = SearchProblem(x=3, y=4, step_size=1, function_to_optimize=test_f1) local_min = simulated_annealing(prob, find_max=True, visualization=True) print( "The maximum score for f(x, y) = 3*x^2 - 6*y found via hill climbing: " f"{local_min.score()}" )
-1
TheAlgorithms/Python
9,076
Fix typos
* [ ] Add an algorithm? * [x] Fix a bug or typo in an existing algorithm? * [ ] Documentation change?
omahs
"2023-09-22T13:22:15Z"
"2023-09-23T08:53:09Z"
dc50add8a78ebf34bc7bb050c1a0e61d207b9544
b203150ac481743a6d8c1ef01091712a54dfbf6c
Fix typos. * [ ] Add an algorithm? * [x] Fix a bug or typo in an existing algorithm? * [ ] Documentation change?
-1
TheAlgorithms/Python
9,076
Fix typos
* [ ] Add an algorithm? * [x] Fix a bug or typo in an existing algorithm? * [ ] Documentation change?
omahs
"2023-09-22T13:22:15Z"
"2023-09-23T08:53:09Z"
dc50add8a78ebf34bc7bb050c1a0e61d207b9544
b203150ac481743a6d8c1ef01091712a54dfbf6c
Fix typos. * [ ] Add an algorithm? * [x] Fix a bug or typo in an existing algorithm? * [ ] Documentation change?
#!/usr/bin/env python3 """ Simulation of the Quantum Key Distribution (QKD) protocol called BB84, created by Charles Bennett and Gilles Brassard in 1984. BB84 is a key-distribution protocol that ensures secure key distribution using qubits instead of classical bits. The generated key is the result of simulating a quantum circuit. Our algorithm to construct the circuit is as follows: Alice generates two binary strings. One encodes the basis for each qubit: - 0 -> {0,1} basis. - 1 -> {+,-} basis. The other encodes the state: - 0 -> |0> or |+>. - 1 -> |1> or |->. Bob also generates a binary string and uses the same convention to choose a basis for measurement. Based on the following results, we follow the algorithm below: X|0> = |1> H|0> = |+> HX|0> = |-> 1. Whenever Alice wants to encode 1 in a qubit, she applies an X (NOT) gate to the qubit. To encode 0, no action is needed. 2. Wherever she wants to encode it in the {+,-} basis, she applies an H (Hadamard) gate. No action is necessary to encode a qubit in the {0,1} basis. 3. She then sends the qubits to Bob (symbolically represented in this circuit using wires). 4. Bob measures the qubits according to his binary string for measurement. To measure a qubit in the {+,-} basis, he applies an H gate to the corresponding qubit and then performs a measurement. References: https://en.wikipedia.org/wiki/BB84 https://qiskit.org/textbook/ch-algorithms/quantum-key-distribution.html """ import numpy as np import qiskit def bb84(key_len: int = 8, seed: int | None = None) -> str: """ Performs the BB84 protocol using a key made of `key_len` bits. The two parties in the key distribution are called Alice and Bob. Args: key_len: The length of the generated key in bits. The default is 8. seed: Seed for the random number generator. Mostly used for testing. Default is None. Returns: key: The key generated using BB84 protocol. >>> bb84(16, seed=0) '0111110111010010' >>> bb84(8, seed=0) '10110001' """ # Set up the random number generator. rng = np.random.default_rng(seed=seed) # Roughly 25% of the qubits will contribute to the key. # So we take more than we need. num_qubits = 6 * key_len # Measurement basis for Alice's qubits. alice_basis = rng.integers(2, size=num_qubits) # The set of states Alice will prepare. alice_state = rng.integers(2, size=num_qubits) # Measurement basis for Bob's qubits. bob_basis = rng.integers(2, size=num_qubits) # Quantum Circuit to simulate BB84 bb84_circ = qiskit.QuantumCircuit(num_qubits, name="BB84") # Alice prepares her qubits according to rules above. for index, _ in enumerate(alice_basis): if alice_state[index] == 1: bb84_circ.x(index) if alice_basis[index] == 1: bb84_circ.h(index) bb84_circ.barrier() # Bob measures the received qubits according to rules above. for index, _ in enumerate(bob_basis): if bob_basis[index] == 1: bb84_circ.h(index) bb84_circ.barrier() bb84_circ.measure_all() # Simulate the quantum circuit. sim = qiskit.Aer.get_backend("aer_simulator") # We only need to run one shot because the key is unique. # Multiple shots will produce the same key. job = qiskit.execute(bb84_circ, sim, shots=1, seed_simulator=seed) # Returns the result of measurement. result = job.result().get_counts(bb84_circ).most_frequent() # Extracting the generated key from the simulation results. # Only keep measurement results where Alice and Bob chose the same basis. gen_key = "".join( [ result_bit for alice_basis_bit, bob_basis_bit, result_bit in zip( alice_basis, bob_basis, result ) if alice_basis_bit == bob_basis_bit ] ) # Get final key. Pad with 0 if too short, otherwise truncate. key = gen_key[:key_len] if len(gen_key) >= key_len else gen_key.ljust(key_len, "0") return key if __name__ == "__main__": print(f"The generated key is : {bb84(8, seed=0)}") from doctest import testmod testmod()
#!/usr/bin/env python3 """ Simulation of the Quantum Key Distribution (QKD) protocol called BB84, created by Charles Bennett and Gilles Brassard in 1984. BB84 is a key-distribution protocol that ensures secure key distribution using qubits instead of classical bits. The generated key is the result of simulating a quantum circuit. Our algorithm to construct the circuit is as follows: Alice generates two binary strings. One encodes the basis for each qubit: - 0 -> {0,1} basis. - 1 -> {+,-} basis. The other encodes the state: - 0 -> |0> or |+>. - 1 -> |1> or |->. Bob also generates a binary string and uses the same convention to choose a basis for measurement. Based on the following results, we follow the algorithm below: X|0> = |1> H|0> = |+> HX|0> = |-> 1. Whenever Alice wants to encode 1 in a qubit, she applies an X (NOT) gate to the qubit. To encode 0, no action is needed. 2. Wherever she wants to encode it in the {+,-} basis, she applies an H (Hadamard) gate. No action is necessary to encode a qubit in the {0,1} basis. 3. She then sends the qubits to Bob (symbolically represented in this circuit using wires). 4. Bob measures the qubits according to his binary string for measurement. To measure a qubit in the {+,-} basis, he applies an H gate to the corresponding qubit and then performs a measurement. References: https://en.wikipedia.org/wiki/BB84 https://qiskit.org/textbook/ch-algorithms/quantum-key-distribution.html """ import numpy as np import qiskit def bb84(key_len: int = 8, seed: int | None = None) -> str: """ Performs the BB84 protocol using a key made of `key_len` bits. The two parties in the key distribution are called Alice and Bob. Args: key_len: The length of the generated key in bits. The default is 8. seed: Seed for the random number generator. Mostly used for testing. Default is None. Returns: key: The key generated using BB84 protocol. >>> bb84(16, seed=0) '0111110111010010' >>> bb84(8, seed=0) '10110001' """ # Set up the random number generator. rng = np.random.default_rng(seed=seed) # Roughly 25% of the qubits will contribute to the key. # So we take more than we need. num_qubits = 6 * key_len # Measurement basis for Alice's qubits. alice_basis = rng.integers(2, size=num_qubits) # The set of states Alice will prepare. alice_state = rng.integers(2, size=num_qubits) # Measurement basis for Bob's qubits. bob_basis = rng.integers(2, size=num_qubits) # Quantum Circuit to simulate BB84 bb84_circ = qiskit.QuantumCircuit(num_qubits, name="BB84") # Alice prepares her qubits according to rules above. for index, _ in enumerate(alice_basis): if alice_state[index] == 1: bb84_circ.x(index) if alice_basis[index] == 1: bb84_circ.h(index) bb84_circ.barrier() # Bob measures the received qubits according to rules above. for index, _ in enumerate(bob_basis): if bob_basis[index] == 1: bb84_circ.h(index) bb84_circ.barrier() bb84_circ.measure_all() # Simulate the quantum circuit. sim = qiskit.Aer.get_backend("aer_simulator") # We only need to run one shot because the key is unique. # Multiple shots will produce the same key. job = qiskit.execute(bb84_circ, sim, shots=1, seed_simulator=seed) # Returns the result of measurement. result = job.result().get_counts(bb84_circ).most_frequent() # Extracting the generated key from the simulation results. # Only keep measurement results where Alice and Bob chose the same basis. gen_key = "".join( [ result_bit for alice_basis_bit, bob_basis_bit, result_bit in zip( alice_basis, bob_basis, result ) if alice_basis_bit == bob_basis_bit ] ) # Get final key. Pad with 0 if too short, otherwise truncate. key = gen_key[:key_len] if len(gen_key) >= key_len else gen_key.ljust(key_len, "0") return key if __name__ == "__main__": print(f"The generated key is : {bb84(8, seed=0)}") from doctest import testmod testmod()
-1
TheAlgorithms/Python
9,076
Fix typos
* [ ] Add an algorithm? * [x] Fix a bug or typo in an existing algorithm? * [ ] Documentation change?
omahs
"2023-09-22T13:22:15Z"
"2023-09-23T08:53:09Z"
dc50add8a78ebf34bc7bb050c1a0e61d207b9544
b203150ac481743a6d8c1ef01091712a54dfbf6c
Fix typos. * [ ] Add an algorithm? * [x] Fix a bug or typo in an existing algorithm? * [ ] Documentation change?
""" Signum function -- https://en.wikipedia.org/wiki/Sign_function """ def signum(num: float) -> int: """ Applies signum function on the number >>> signum(-10) -1 >>> signum(10) 1 >>> signum(0) 0 """ if num < 0: return -1 return 1 if num else 0 def test_signum() -> None: """ Tests the signum function """ assert signum(5) == 1 assert signum(-5) == -1 assert signum(0) == 0 if __name__ == "__main__": print(signum(12)) print(signum(-12)) print(signum(0))
""" Signum function -- https://en.wikipedia.org/wiki/Sign_function """ def signum(num: float) -> int: """ Applies signum function on the number >>> signum(-10) -1 >>> signum(10) 1 >>> signum(0) 0 """ if num < 0: return -1 return 1 if num else 0 def test_signum() -> None: """ Tests the signum function """ assert signum(5) == 1 assert signum(-5) == -1 assert signum(0) == 0 if __name__ == "__main__": print(signum(12)) print(signum(-12)) print(signum(0))
-1
TheAlgorithms/Python
9,076
Fix typos
* [ ] Add an algorithm? * [x] Fix a bug or typo in an existing algorithm? * [ ] Documentation change?
omahs
"2023-09-22T13:22:15Z"
"2023-09-23T08:53:09Z"
dc50add8a78ebf34bc7bb050c1a0e61d207b9544
b203150ac481743a6d8c1ef01091712a54dfbf6c
Fix typos. * [ ] Add an algorithm? * [x] Fix a bug or typo in an existing algorithm? * [ ] Documentation change?
def is_int_palindrome(num: int) -> bool: """ Returns whether `num` is a palindrome or not (see for reference https://en.wikipedia.org/wiki/Palindromic_number). >>> is_int_palindrome(-121) False >>> is_int_palindrome(0) True >>> is_int_palindrome(10) False >>> is_int_palindrome(11) True >>> is_int_palindrome(101) True >>> is_int_palindrome(120) False """ if num < 0: return False num_copy: int = num rev_num: int = 0 while num > 0: rev_num = rev_num * 10 + (num % 10) num //= 10 return num_copy == rev_num if __name__ == "__main__": import doctest doctest.testmod()
def is_int_palindrome(num: int) -> bool: """ Returns whether `num` is a palindrome or not (see for reference https://en.wikipedia.org/wiki/Palindromic_number). >>> is_int_palindrome(-121) False >>> is_int_palindrome(0) True >>> is_int_palindrome(10) False >>> is_int_palindrome(11) True >>> is_int_palindrome(101) True >>> is_int_palindrome(120) False """ if num < 0: return False num_copy: int = num rev_num: int = 0 while num > 0: rev_num = rev_num * 10 + (num % 10) num //= 10 return num_copy == rev_num if __name__ == "__main__": import doctest doctest.testmod()
-1
TheAlgorithms/Python
9,076
Fix typos
* [ ] Add an algorithm? * [x] Fix a bug or typo in an existing algorithm? * [ ] Documentation change?
omahs
"2023-09-22T13:22:15Z"
"2023-09-23T08:53:09Z"
dc50add8a78ebf34bc7bb050c1a0e61d207b9544
b203150ac481743a6d8c1ef01091712a54dfbf6c
Fix typos. * [ ] Add an algorithm? * [x] Fix a bug or typo in an existing algorithm? * [ ] Documentation change?
#!/usr/bin/env python3 """ Davis–Putnam–Logemann–Loveland (DPLL) algorithm is a complete, backtracking-based search algorithm for deciding the satisfiability of propositional logic formulae in conjunctive normal form, i.e, for solving the Conjunctive Normal Form SATisfiability (CNF-SAT) problem. For more information about the algorithm: https://en.wikipedia.org/wiki/DPLL_algorithm """ from __future__ import annotations import random from collections.abc import Iterable class Clause: """ A clause represented in Conjunctive Normal Form. A clause is a set of literals, either complemented or otherwise. For example: {A1, A2, A3'} is the clause (A1 v A2 v A3') {A5', A2', A1} is the clause (A5' v A2' v A1) Create model >>> clause = Clause(["A1", "A2'", "A3"]) >>> clause.evaluate({"A1": True}) True """ def __init__(self, literals: list[str]) -> None: """ Represent the literals and an assignment in a clause." """ # Assign all literals to None initially self.literals: dict[str, bool | None] = {literal: None for literal in literals} def __str__(self) -> str: """ To print a clause as in Conjunctive Normal Form. >>> str(Clause(["A1", "A2'", "A3"])) "{A1 , A2' , A3}" """ return "{" + " , ".join(self.literals) + "}" def __len__(self) -> int: """ To print a clause as in Conjunctive Normal Form. >>> len(Clause([])) 0 >>> len(Clause(["A1", "A2'", "A3"])) 3 """ return len(self.literals) def assign(self, model: dict[str, bool | None]) -> None: """ Assign values to literals of the clause as given by model. """ for literal in self.literals: symbol = literal[:2] if symbol in model: value = model[symbol] else: continue if value is not None: # Complement assignment if literal is in complemented form if literal.endswith("'"): value = not value self.literals[literal] = value def evaluate(self, model: dict[str, bool | None]) -> bool | None: """ Evaluates the clause with the assignments in model. This has the following steps: 1. Return True if both a literal and its complement exist in the clause. 2. Return True if a single literal has the assignment True. 3. Return None(unable to complete evaluation) if a literal has no assignment. 4. Compute disjunction of all values assigned in clause. """ for literal in self.literals: symbol = literal.rstrip("'") if literal.endswith("'") else literal + "'" if symbol in self.literals: return True self.assign(model) for value in self.literals.values(): if value in (True, None): return value return any(self.literals.values()) class Formula: """ A formula represented in Conjunctive Normal Form. A formula is a set of clauses. For example, {{A1, A2, A3'}, {A5', A2', A1}} is ((A1 v A2 v A3') and (A5' v A2' v A1)) """ def __init__(self, clauses: Iterable[Clause]) -> None: """ Represent the number of clauses and the clauses themselves. """ self.clauses = list(clauses) def __str__(self) -> str: """ To print a formula as in Conjunctive Normal Form. str(Formula([Clause(["A1", "A2'", "A3"]), Clause(["A5'", "A2'", "A1"])])) "{{A1 , A2' , A3} , {A5' , A2' , A1}}" """ return "{" + " , ".join(str(clause) for clause in self.clauses) + "}" def generate_clause() -> Clause: """ Randomly generate a clause. All literals have the name Ax, where x is an integer from 1 to 5. """ literals = [] no_of_literals = random.randint(1, 5) base_var = "A" i = 0 while i < no_of_literals: var_no = random.randint(1, 5) var_name = base_var + str(var_no) var_complement = random.randint(0, 1) if var_complement == 1: var_name += "'" if var_name in literals: i -= 1 else: literals.append(var_name) i += 1 return Clause(literals) def generate_formula() -> Formula: """ Randomly generate a formula. """ clauses: set[Clause] = set() no_of_clauses = random.randint(1, 10) while len(clauses) < no_of_clauses: clauses.add(generate_clause()) return Formula(clauses) def generate_parameters(formula: Formula) -> tuple[list[Clause], list[str]]: """ Return the clauses and symbols from a formula. A symbol is the uncomplemented form of a literal. For example, Symbol of A3 is A3. Symbol of A5' is A5. >>> formula = Formula([Clause(["A1", "A2'", "A3"]), Clause(["A5'", "A2'", "A1"])]) >>> clauses, symbols = generate_parameters(formula) >>> clauses_list = [str(i) for i in clauses] >>> clauses_list ["{A1 , A2' , A3}", "{A5' , A2' , A1}"] >>> symbols ['A1', 'A2', 'A3', 'A5'] """ clauses = formula.clauses symbols_set = [] for clause in formula.clauses: for literal in clause.literals: symbol = literal[:2] if symbol not in symbols_set: symbols_set.append(symbol) return clauses, symbols_set def find_pure_symbols( clauses: list[Clause], symbols: list[str], model: dict[str, bool | None] ) -> tuple[list[str], dict[str, bool | None]]: """ Return pure symbols and their values to satisfy clause. Pure symbols are symbols in a formula that exist only in one form, either complemented or otherwise. For example, { { A4 , A3 , A5' , A1 , A3' } , { A4 } , { A3 } } has pure symbols A4, A5' and A1. This has the following steps: 1. Ignore clauses that have already evaluated to be True. 2. Find symbols that occur only in one form in the rest of the clauses. 3. Assign value True or False depending on whether the symbols occurs in normal or complemented form respectively. >>> formula = Formula([Clause(["A1", "A2'", "A3"]), Clause(["A5'", "A2'", "A1"])]) >>> clauses, symbols = generate_parameters(formula) >>> pure_symbols, values = find_pure_symbols(clauses, symbols, {}) >>> pure_symbols ['A1', 'A2', 'A3', 'A5'] >>> values {'A1': True, 'A2': False, 'A3': True, 'A5': False} """ pure_symbols = [] assignment: dict[str, bool | None] = {} literals = [] for clause in clauses: if clause.evaluate(model): continue for literal in clause.literals: literals.append(literal) for s in symbols: sym = s + "'" if (s in literals and sym not in literals) or ( s not in literals and sym in literals ): pure_symbols.append(s) for p in pure_symbols: assignment[p] = None for s in pure_symbols: sym = s + "'" if s in literals: assignment[s] = True elif sym in literals: assignment[s] = False return pure_symbols, assignment def find_unit_clauses( clauses: list[Clause], model: dict[str, bool | None] ) -> tuple[list[str], dict[str, bool | None]]: """ Returns the unit symbols and their values to satisfy clause. Unit symbols are symbols in a formula that are: - Either the only symbol in a clause - Or all other literals in that clause have been assigned False This has the following steps: 1. Find symbols that are the only occurrences in a clause. 2. Find symbols in a clause where all other literals are assigned False. 3. Assign True or False depending on whether the symbols occurs in normal or complemented form respectively. >>> clause1 = Clause(["A4", "A3", "A5'", "A1", "A3'"]) >>> clause2 = Clause(["A4"]) >>> clause3 = Clause(["A3"]) >>> clauses, symbols = generate_parameters(Formula([clause1, clause2, clause3])) >>> unit_clauses, values = find_unit_clauses(clauses, {}) >>> unit_clauses ['A4', 'A3'] >>> values {'A4': True, 'A3': True} """ unit_symbols = [] for clause in clauses: if len(clause) == 1: unit_symbols.append(next(iter(clause.literals.keys()))) else: f_count, n_count = 0, 0 for literal, value in clause.literals.items(): if value is False: f_count += 1 elif value is None: sym = literal n_count += 1 if f_count == len(clause) - 1 and n_count == 1: unit_symbols.append(sym) assignment: dict[str, bool | None] = {} for i in unit_symbols: symbol = i[:2] assignment[symbol] = len(i) == 2 unit_symbols = [i[:2] for i in unit_symbols] return unit_symbols, assignment def dpll_algorithm( clauses: list[Clause], symbols: list[str], model: dict[str, bool | None] ) -> tuple[bool | None, dict[str, bool | None] | None]: """ Returns the model if the formula is satisfiable, else None This has the following steps: 1. If every clause in clauses is True, return True. 2. If some clause in clauses is False, return False. 3. Find pure symbols. 4. Find unit symbols. >>> formula = Formula([Clause(["A4", "A3", "A5'", "A1", "A3'"]), Clause(["A4"])]) >>> clauses, symbols = generate_parameters(formula) >>> soln, model = dpll_algorithm(clauses, symbols, {}) >>> soln True >>> model {'A4': True} """ check_clause_all_true = True for clause in clauses: clause_check = clause.evaluate(model) if clause_check is False: return False, None elif clause_check is None: check_clause_all_true = False continue if check_clause_all_true: return True, model try: pure_symbols, assignment = find_pure_symbols(clauses, symbols, model) except RecursionError: print("raises a RecursionError and is") return None, {} p = None if len(pure_symbols) > 0: p, value = pure_symbols[0], assignment[pure_symbols[0]] if p: tmp_model = model tmp_model[p] = value tmp_symbols = list(symbols) if p in tmp_symbols: tmp_symbols.remove(p) return dpll_algorithm(clauses, tmp_symbols, tmp_model) unit_symbols, assignment = find_unit_clauses(clauses, model) p = None if len(unit_symbols) > 0: p, value = unit_symbols[0], assignment[unit_symbols[0]] if p: tmp_model = model tmp_model[p] = value tmp_symbols = list(symbols) if p in tmp_symbols: tmp_symbols.remove(p) return dpll_algorithm(clauses, tmp_symbols, tmp_model) p = symbols[0] rest = symbols[1:] tmp1, tmp2 = model, model tmp1[p], tmp2[p] = True, False return dpll_algorithm(clauses, rest, tmp1) or dpll_algorithm(clauses, rest, tmp2) if __name__ == "__main__": import doctest doctest.testmod() formula = generate_formula() print(f"The formula {formula} is", end=" ") clauses, symbols = generate_parameters(formula) solution, model = dpll_algorithm(clauses, symbols, {}) if solution: print(f"satisfiable with the assignment {model}.") else: print("not satisfiable.")
#!/usr/bin/env python3 """ Davis–Putnam–Logemann–Loveland (DPLL) algorithm is a complete, backtracking-based search algorithm for deciding the satisfiability of propositional logic formulae in conjunctive normal form, i.e, for solving the Conjunctive Normal Form SATisfiability (CNF-SAT) problem. For more information about the algorithm: https://en.wikipedia.org/wiki/DPLL_algorithm """ from __future__ import annotations import random from collections.abc import Iterable class Clause: """ A clause represented in Conjunctive Normal Form. A clause is a set of literals, either complemented or otherwise. For example: {A1, A2, A3'} is the clause (A1 v A2 v A3') {A5', A2', A1} is the clause (A5' v A2' v A1) Create model >>> clause = Clause(["A1", "A2'", "A3"]) >>> clause.evaluate({"A1": True}) True """ def __init__(self, literals: list[str]) -> None: """ Represent the literals and an assignment in a clause." """ # Assign all literals to None initially self.literals: dict[str, bool | None] = {literal: None for literal in literals} def __str__(self) -> str: """ To print a clause as in Conjunctive Normal Form. >>> str(Clause(["A1", "A2'", "A3"])) "{A1 , A2' , A3}" """ return "{" + " , ".join(self.literals) + "}" def __len__(self) -> int: """ To print a clause as in Conjunctive Normal Form. >>> len(Clause([])) 0 >>> len(Clause(["A1", "A2'", "A3"])) 3 """ return len(self.literals) def assign(self, model: dict[str, bool | None]) -> None: """ Assign values to literals of the clause as given by model. """ for literal in self.literals: symbol = literal[:2] if symbol in model: value = model[symbol] else: continue if value is not None: # Complement assignment if literal is in complemented form if literal.endswith("'"): value = not value self.literals[literal] = value def evaluate(self, model: dict[str, bool | None]) -> bool | None: """ Evaluates the clause with the assignments in model. This has the following steps: 1. Return True if both a literal and its complement exist in the clause. 2. Return True if a single literal has the assignment True. 3. Return None(unable to complete evaluation) if a literal has no assignment. 4. Compute disjunction of all values assigned in clause. """ for literal in self.literals: symbol = literal.rstrip("'") if literal.endswith("'") else literal + "'" if symbol in self.literals: return True self.assign(model) for value in self.literals.values(): if value in (True, None): return value return any(self.literals.values()) class Formula: """ A formula represented in Conjunctive Normal Form. A formula is a set of clauses. For example, {{A1, A2, A3'}, {A5', A2', A1}} is ((A1 v A2 v A3') and (A5' v A2' v A1)) """ def __init__(self, clauses: Iterable[Clause]) -> None: """ Represent the number of clauses and the clauses themselves. """ self.clauses = list(clauses) def __str__(self) -> str: """ To print a formula as in Conjunctive Normal Form. str(Formula([Clause(["A1", "A2'", "A3"]), Clause(["A5'", "A2'", "A1"])])) "{{A1 , A2' , A3} , {A5' , A2' , A1}}" """ return "{" + " , ".join(str(clause) for clause in self.clauses) + "}" def generate_clause() -> Clause: """ Randomly generate a clause. All literals have the name Ax, where x is an integer from 1 to 5. """ literals = [] no_of_literals = random.randint(1, 5) base_var = "A" i = 0 while i < no_of_literals: var_no = random.randint(1, 5) var_name = base_var + str(var_no) var_complement = random.randint(0, 1) if var_complement == 1: var_name += "'" if var_name in literals: i -= 1 else: literals.append(var_name) i += 1 return Clause(literals) def generate_formula() -> Formula: """ Randomly generate a formula. """ clauses: set[Clause] = set() no_of_clauses = random.randint(1, 10) while len(clauses) < no_of_clauses: clauses.add(generate_clause()) return Formula(clauses) def generate_parameters(formula: Formula) -> tuple[list[Clause], list[str]]: """ Return the clauses and symbols from a formula. A symbol is the uncomplemented form of a literal. For example, Symbol of A3 is A3. Symbol of A5' is A5. >>> formula = Formula([Clause(["A1", "A2'", "A3"]), Clause(["A5'", "A2'", "A1"])]) >>> clauses, symbols = generate_parameters(formula) >>> clauses_list = [str(i) for i in clauses] >>> clauses_list ["{A1 , A2' , A3}", "{A5' , A2' , A1}"] >>> symbols ['A1', 'A2', 'A3', 'A5'] """ clauses = formula.clauses symbols_set = [] for clause in formula.clauses: for literal in clause.literals: symbol = literal[:2] if symbol not in symbols_set: symbols_set.append(symbol) return clauses, symbols_set def find_pure_symbols( clauses: list[Clause], symbols: list[str], model: dict[str, bool | None] ) -> tuple[list[str], dict[str, bool | None]]: """ Return pure symbols and their values to satisfy clause. Pure symbols are symbols in a formula that exist only in one form, either complemented or otherwise. For example, { { A4 , A3 , A5' , A1 , A3' } , { A4 } , { A3 } } has pure symbols A4, A5' and A1. This has the following steps: 1. Ignore clauses that have already evaluated to be True. 2. Find symbols that occur only in one form in the rest of the clauses. 3. Assign value True or False depending on whether the symbols occurs in normal or complemented form respectively. >>> formula = Formula([Clause(["A1", "A2'", "A3"]), Clause(["A5'", "A2'", "A1"])]) >>> clauses, symbols = generate_parameters(formula) >>> pure_symbols, values = find_pure_symbols(clauses, symbols, {}) >>> pure_symbols ['A1', 'A2', 'A3', 'A5'] >>> values {'A1': True, 'A2': False, 'A3': True, 'A5': False} """ pure_symbols = [] assignment: dict[str, bool | None] = {} literals = [] for clause in clauses: if clause.evaluate(model): continue for literal in clause.literals: literals.append(literal) for s in symbols: sym = s + "'" if (s in literals and sym not in literals) or ( s not in literals and sym in literals ): pure_symbols.append(s) for p in pure_symbols: assignment[p] = None for s in pure_symbols: sym = s + "'" if s in literals: assignment[s] = True elif sym in literals: assignment[s] = False return pure_symbols, assignment def find_unit_clauses( clauses: list[Clause], model: dict[str, bool | None] ) -> tuple[list[str], dict[str, bool | None]]: """ Returns the unit symbols and their values to satisfy clause. Unit symbols are symbols in a formula that are: - Either the only symbol in a clause - Or all other literals in that clause have been assigned False This has the following steps: 1. Find symbols that are the only occurrences in a clause. 2. Find symbols in a clause where all other literals are assigned False. 3. Assign True or False depending on whether the symbols occurs in normal or complemented form respectively. >>> clause1 = Clause(["A4", "A3", "A5'", "A1", "A3'"]) >>> clause2 = Clause(["A4"]) >>> clause3 = Clause(["A3"]) >>> clauses, symbols = generate_parameters(Formula([clause1, clause2, clause3])) >>> unit_clauses, values = find_unit_clauses(clauses, {}) >>> unit_clauses ['A4', 'A3'] >>> values {'A4': True, 'A3': True} """ unit_symbols = [] for clause in clauses: if len(clause) == 1: unit_symbols.append(next(iter(clause.literals.keys()))) else: f_count, n_count = 0, 0 for literal, value in clause.literals.items(): if value is False: f_count += 1 elif value is None: sym = literal n_count += 1 if f_count == len(clause) - 1 and n_count == 1: unit_symbols.append(sym) assignment: dict[str, bool | None] = {} for i in unit_symbols: symbol = i[:2] assignment[symbol] = len(i) == 2 unit_symbols = [i[:2] for i in unit_symbols] return unit_symbols, assignment def dpll_algorithm( clauses: list[Clause], symbols: list[str], model: dict[str, bool | None] ) -> tuple[bool | None, dict[str, bool | None] | None]: """ Returns the model if the formula is satisfiable, else None This has the following steps: 1. If every clause in clauses is True, return True. 2. If some clause in clauses is False, return False. 3. Find pure symbols. 4. Find unit symbols. >>> formula = Formula([Clause(["A4", "A3", "A5'", "A1", "A3'"]), Clause(["A4"])]) >>> clauses, symbols = generate_parameters(formula) >>> soln, model = dpll_algorithm(clauses, symbols, {}) >>> soln True >>> model {'A4': True} """ check_clause_all_true = True for clause in clauses: clause_check = clause.evaluate(model) if clause_check is False: return False, None elif clause_check is None: check_clause_all_true = False continue if check_clause_all_true: return True, model try: pure_symbols, assignment = find_pure_symbols(clauses, symbols, model) except RecursionError: print("raises a RecursionError and is") return None, {} p = None if len(pure_symbols) > 0: p, value = pure_symbols[0], assignment[pure_symbols[0]] if p: tmp_model = model tmp_model[p] = value tmp_symbols = list(symbols) if p in tmp_symbols: tmp_symbols.remove(p) return dpll_algorithm(clauses, tmp_symbols, tmp_model) unit_symbols, assignment = find_unit_clauses(clauses, model) p = None if len(unit_symbols) > 0: p, value = unit_symbols[0], assignment[unit_symbols[0]] if p: tmp_model = model tmp_model[p] = value tmp_symbols = list(symbols) if p in tmp_symbols: tmp_symbols.remove(p) return dpll_algorithm(clauses, tmp_symbols, tmp_model) p = symbols[0] rest = symbols[1:] tmp1, tmp2 = model, model tmp1[p], tmp2[p] = True, False return dpll_algorithm(clauses, rest, tmp1) or dpll_algorithm(clauses, rest, tmp2) if __name__ == "__main__": import doctest doctest.testmod() formula = generate_formula() print(f"The formula {formula} is", end=" ") clauses, symbols = generate_parameters(formula) solution, model = dpll_algorithm(clauses, symbols, {}) if solution: print(f"satisfiable with the assignment {model}.") else: print("not satisfiable.")
-1
TheAlgorithms/Python
9,076
Fix typos
* [ ] Add an algorithm? * [x] Fix a bug or typo in an existing algorithm? * [ ] Documentation change?
omahs
"2023-09-22T13:22:15Z"
"2023-09-23T08:53:09Z"
dc50add8a78ebf34bc7bb050c1a0e61d207b9544
b203150ac481743a6d8c1ef01091712a54dfbf6c
Fix typos. * [ ] Add an algorithm? * [x] Fix a bug or typo in an existing algorithm? * [ ] Documentation change?
from collections import defaultdict from graphs.minimum_spanning_tree_prims import prisms_algorithm as mst def test_prim_successful_result(): num_nodes, num_edges = 9, 14 # noqa: F841 edges = [ [0, 1, 4], [0, 7, 8], [1, 2, 8], [7, 8, 7], [7, 6, 1], [2, 8, 2], [8, 6, 6], [2, 3, 7], [2, 5, 4], [6, 5, 2], [3, 5, 14], [3, 4, 9], [5, 4, 10], [1, 7, 11], ] adjancency = defaultdict(list) for node1, node2, cost in edges: adjancency[node1].append([node2, cost]) adjancency[node2].append([node1, cost]) result = mst(adjancency) expected = [ [7, 6, 1], [2, 8, 2], [6, 5, 2], [0, 1, 4], [2, 5, 4], [2, 3, 7], [0, 7, 8], [3, 4, 9], ] for answer in expected: edge = tuple(answer[:2]) reverse = tuple(edge[::-1]) assert edge in result or reverse in result
from collections import defaultdict from graphs.minimum_spanning_tree_prims import prisms_algorithm as mst def test_prim_successful_result(): num_nodes, num_edges = 9, 14 # noqa: F841 edges = [ [0, 1, 4], [0, 7, 8], [1, 2, 8], [7, 8, 7], [7, 6, 1], [2, 8, 2], [8, 6, 6], [2, 3, 7], [2, 5, 4], [6, 5, 2], [3, 5, 14], [3, 4, 9], [5, 4, 10], [1, 7, 11], ] adjancency = defaultdict(list) for node1, node2, cost in edges: adjancency[node1].append([node2, cost]) adjancency[node2].append([node1, cost]) result = mst(adjancency) expected = [ [7, 6, 1], [2, 8, 2], [6, 5, 2], [0, 1, 4], [2, 5, 4], [2, 3, 7], [0, 7, 8], [3, 4, 9], ] for answer in expected: edge = tuple(answer[:2]) reverse = tuple(edge[::-1]) assert edge in result or reverse in result
-1
TheAlgorithms/Python
9,076
Fix typos
* [ ] Add an algorithm? * [x] Fix a bug or typo in an existing algorithm? * [ ] Documentation change?
omahs
"2023-09-22T13:22:15Z"
"2023-09-23T08:53:09Z"
dc50add8a78ebf34bc7bb050c1a0e61d207b9544
b203150ac481743a6d8c1ef01091712a54dfbf6c
Fix typos. * [ ] Add an algorithm? * [x] Fix a bug or typo in an existing algorithm? * [ ] Documentation change?
from __future__ import annotations def find_max_iterative(nums: list[int | float]) -> int | float: """ >>> for nums in ([3, 2, 1], [-3, -2, -1], [3, -3, 0], [3.0, 3.1, 2.9]): ... find_max_iterative(nums) == max(nums) True True True True >>> find_max_iterative([2, 4, 9, 7, 19, 94, 5]) 94 >>> find_max_iterative([]) Traceback (most recent call last): ... ValueError: find_max_iterative() arg is an empty sequence """ if len(nums) == 0: raise ValueError("find_max_iterative() arg is an empty sequence") max_num = nums[0] for x in nums: if x > max_num: max_num = x return max_num # Divide and Conquer algorithm def find_max_recursive(nums: list[int | float], left: int, right: int) -> int | float: """ find max value in list :param nums: contains elements :param left: index of first element :param right: index of last element :return: max in nums >>> for nums in ([3, 2, 1], [-3, -2, -1], [3, -3, 0], [3.0, 3.1, 2.9]): ... find_max_recursive(nums, 0, len(nums) - 1) == max(nums) True True True True >>> nums = [1, 3, 5, 7, 9, 2, 4, 6, 8, 10] >>> find_max_recursive(nums, 0, len(nums) - 1) == max(nums) True >>> find_max_recursive([], 0, 0) Traceback (most recent call last): ... ValueError: find_max_recursive() arg is an empty sequence >>> find_max_recursive(nums, 0, len(nums)) == max(nums) Traceback (most recent call last): ... IndexError: list index out of range >>> find_max_recursive(nums, -len(nums), -1) == max(nums) True >>> find_max_recursive(nums, -len(nums) - 1, -1) == max(nums) Traceback (most recent call last): ... IndexError: list index out of range """ if len(nums) == 0: raise ValueError("find_max_recursive() arg is an empty sequence") if ( left >= len(nums) or left < -len(nums) or right >= len(nums) or right < -len(nums) ): raise IndexError("list index out of range") if left == right: return nums[left] mid = (left + right) >> 1 # the middle left_max = find_max_recursive(nums, left, mid) # find max in range[left, mid] right_max = find_max_recursive( nums, mid + 1, right ) # find max in range[mid + 1, right] return left_max if left_max >= right_max else right_max if __name__ == "__main__": import doctest doctest.testmod(verbose=True)
from __future__ import annotations def find_max_iterative(nums: list[int | float]) -> int | float: """ >>> for nums in ([3, 2, 1], [-3, -2, -1], [3, -3, 0], [3.0, 3.1, 2.9]): ... find_max_iterative(nums) == max(nums) True True True True >>> find_max_iterative([2, 4, 9, 7, 19, 94, 5]) 94 >>> find_max_iterative([]) Traceback (most recent call last): ... ValueError: find_max_iterative() arg is an empty sequence """ if len(nums) == 0: raise ValueError("find_max_iterative() arg is an empty sequence") max_num = nums[0] for x in nums: if x > max_num: max_num = x return max_num # Divide and Conquer algorithm def find_max_recursive(nums: list[int | float], left: int, right: int) -> int | float: """ find max value in list :param nums: contains elements :param left: index of first element :param right: index of last element :return: max in nums >>> for nums in ([3, 2, 1], [-3, -2, -1], [3, -3, 0], [3.0, 3.1, 2.9]): ... find_max_recursive(nums, 0, len(nums) - 1) == max(nums) True True True True >>> nums = [1, 3, 5, 7, 9, 2, 4, 6, 8, 10] >>> find_max_recursive(nums, 0, len(nums) - 1) == max(nums) True >>> find_max_recursive([], 0, 0) Traceback (most recent call last): ... ValueError: find_max_recursive() arg is an empty sequence >>> find_max_recursive(nums, 0, len(nums)) == max(nums) Traceback (most recent call last): ... IndexError: list index out of range >>> find_max_recursive(nums, -len(nums), -1) == max(nums) True >>> find_max_recursive(nums, -len(nums) - 1, -1) == max(nums) Traceback (most recent call last): ... IndexError: list index out of range """ if len(nums) == 0: raise ValueError("find_max_recursive() arg is an empty sequence") if ( left >= len(nums) or left < -len(nums) or right >= len(nums) or right < -len(nums) ): raise IndexError("list index out of range") if left == right: return nums[left] mid = (left + right) >> 1 # the middle left_max = find_max_recursive(nums, left, mid) # find max in range[left, mid] right_max = find_max_recursive( nums, mid + 1, right ) # find max in range[mid + 1, right] return left_max if left_max >= right_max else right_max if __name__ == "__main__": import doctest doctest.testmod(verbose=True)
-1
TheAlgorithms/Python
9,076
Fix typos
* [ ] Add an algorithm? * [x] Fix a bug or typo in an existing algorithm? * [ ] Documentation change?
omahs
"2023-09-22T13:22:15Z"
"2023-09-23T08:53:09Z"
dc50add8a78ebf34bc7bb050c1a0e61d207b9544
b203150ac481743a6d8c1ef01091712a54dfbf6c
Fix typos. * [ ] Add an algorithm? * [x] Fix a bug or typo in an existing algorithm? * [ ] Documentation change?
""" Project Euler Problem 10: https://projecteuler.net/problem=10 Summation of primes The sum of the primes below 10 is 2 + 3 + 5 + 7 = 17. Find the sum of all the primes below two million. References: - https://en.wikipedia.org/wiki/Prime_number """ import math from collections.abc import Iterator from itertools import takewhile def is_prime(number: int) -> bool: """Checks to see if a number is a prime in O(sqrt(n)). A number is prime if it has exactly two factors: 1 and itself. Returns boolean representing primality of given number num (i.e., if the result is true, then the number is indeed prime else it is not). >>> is_prime(2) True >>> is_prime(3) True >>> is_prime(27) False >>> is_prime(2999) True >>> is_prime(0) False >>> is_prime(1) False """ if 1 < number < 4: # 2 and 3 are primes return True elif number < 2 or number % 2 == 0 or number % 3 == 0: # Negatives, 0, 1, all even numbers, all multiples of 3 are not primes return False # All primes number are in format of 6k +/- 1 for i in range(5, int(math.sqrt(number) + 1), 6): if number % i == 0 or number % (i + 2) == 0: return False return True def prime_generator() -> Iterator[int]: """ Generate a list sequence of prime numbers """ num = 2 while True: if is_prime(num): yield num num += 1 def solution(n: int = 2000000) -> int: """ Returns the sum of all the primes below n. >>> solution(1000) 76127 >>> solution(5000) 1548136 >>> solution(10000) 5736396 >>> solution(7) 10 """ return sum(takewhile(lambda x: x < n, prime_generator())) if __name__ == "__main__": print(f"{solution() = }")
""" Project Euler Problem 10: https://projecteuler.net/problem=10 Summation of primes The sum of the primes below 10 is 2 + 3 + 5 + 7 = 17. Find the sum of all the primes below two million. References: - https://en.wikipedia.org/wiki/Prime_number """ import math from collections.abc import Iterator from itertools import takewhile def is_prime(number: int) -> bool: """Checks to see if a number is a prime in O(sqrt(n)). A number is prime if it has exactly two factors: 1 and itself. Returns boolean representing primality of given number num (i.e., if the result is true, then the number is indeed prime else it is not). >>> is_prime(2) True >>> is_prime(3) True >>> is_prime(27) False >>> is_prime(2999) True >>> is_prime(0) False >>> is_prime(1) False """ if 1 < number < 4: # 2 and 3 are primes return True elif number < 2 or number % 2 == 0 or number % 3 == 0: # Negatives, 0, 1, all even numbers, all multiples of 3 are not primes return False # All primes number are in format of 6k +/- 1 for i in range(5, int(math.sqrt(number) + 1), 6): if number % i == 0 or number % (i + 2) == 0: return False return True def prime_generator() -> Iterator[int]: """ Generate a list sequence of prime numbers """ num = 2 while True: if is_prime(num): yield num num += 1 def solution(n: int = 2000000) -> int: """ Returns the sum of all the primes below n. >>> solution(1000) 76127 >>> solution(5000) 1548136 >>> solution(10000) 5736396 >>> solution(7) 10 """ return sum(takewhile(lambda x: x < n, prime_generator())) if __name__ == "__main__": print(f"{solution() = }")
-1
TheAlgorithms/Python
9,076
Fix typos
* [ ] Add an algorithm? * [x] Fix a bug or typo in an existing algorithm? * [ ] Documentation change?
omahs
"2023-09-22T13:22:15Z"
"2023-09-23T08:53:09Z"
dc50add8a78ebf34bc7bb050c1a0e61d207b9544
b203150ac481743a6d8c1ef01091712a54dfbf6c
Fix typos. * [ ] Add an algorithm? * [x] Fix a bug or typo in an existing algorithm? * [ ] Documentation change?
# Conversion Conversion programs convert a type of data, a number from a numerical base or unit into one of another type, base or unit, e.g. binary to decimal, integer to string or foot to meters. * <https://en.wikipedia.org/wiki/Data_conversion> * <https://en.wikipedia.org/wiki/Transcoding>
# Conversion Conversion programs convert a type of data, a number from a numerical base or unit into one of another type, base or unit, e.g. binary to decimal, integer to string or foot to meters. * <https://en.wikipedia.org/wiki/Data_conversion> * <https://en.wikipedia.org/wiki/Transcoding>
-1
TheAlgorithms/Python
9,076
Fix typos
* [ ] Add an algorithm? * [x] Fix a bug or typo in an existing algorithm? * [ ] Documentation change?
omahs
"2023-09-22T13:22:15Z"
"2023-09-23T08:53:09Z"
dc50add8a78ebf34bc7bb050c1a0e61d207b9544
b203150ac481743a6d8c1ef01091712a54dfbf6c
Fix typos. * [ ] Add an algorithm? * [x] Fix a bug or typo in an existing algorithm? * [ ] Documentation change?
# https://en.wikipedia.org/wiki/Charge_carrier_density # https://www.pveducation.org/pvcdrom/pn-junctions/equilibrium-carrier-concentration # http://www.ece.utep.edu/courses/ee3329/ee3329/Studyguide/ToC/Fundamentals/Carriers/concentrations.html from __future__ import annotations def carrier_concentration( electron_conc: float, hole_conc: float, intrinsic_conc: float, ) -> tuple: """ This function can calculate any one of the three - 1. Electron Concentration 2, Hole Concentration 3. Intrinsic Concentration given the other two. Examples - >>> carrier_concentration(electron_conc=25, hole_conc=100, intrinsic_conc=0) ('intrinsic_conc', 50.0) >>> carrier_concentration(electron_conc=0, hole_conc=1600, intrinsic_conc=200) ('electron_conc', 25.0) >>> carrier_concentration(electron_conc=1000, hole_conc=0, intrinsic_conc=1200) ('hole_conc', 1440.0) >>> carrier_concentration(electron_conc=1000, hole_conc=400, intrinsic_conc=1200) Traceback (most recent call last): ... ValueError: You cannot supply more or less than 2 values >>> carrier_concentration(electron_conc=-1000, hole_conc=0, intrinsic_conc=1200) Traceback (most recent call last): ... ValueError: Electron concentration cannot be negative in a semiconductor >>> carrier_concentration(electron_conc=0, hole_conc=-400, intrinsic_conc=1200) Traceback (most recent call last): ... ValueError: Hole concentration cannot be negative in a semiconductor >>> carrier_concentration(electron_conc=0, hole_conc=400, intrinsic_conc=-1200) Traceback (most recent call last): ... ValueError: Intrinsic concentration cannot be negative in a semiconductor """ if (electron_conc, hole_conc, intrinsic_conc).count(0) != 1: raise ValueError("You cannot supply more or less than 2 values") elif electron_conc < 0: raise ValueError("Electron concentration cannot be negative in a semiconductor") elif hole_conc < 0: raise ValueError("Hole concentration cannot be negative in a semiconductor") elif intrinsic_conc < 0: raise ValueError( "Intrinsic concentration cannot be negative in a semiconductor" ) elif electron_conc == 0: return ( "electron_conc", intrinsic_conc**2 / hole_conc, ) elif hole_conc == 0: return ( "hole_conc", intrinsic_conc**2 / electron_conc, ) elif intrinsic_conc == 0: return ( "intrinsic_conc", (electron_conc * hole_conc) ** 0.5, ) else: return (-1, -1) if __name__ == "__main__": import doctest doctest.testmod()
# https://en.wikipedia.org/wiki/Charge_carrier_density # https://www.pveducation.org/pvcdrom/pn-junctions/equilibrium-carrier-concentration # http://www.ece.utep.edu/courses/ee3329/ee3329/Studyguide/ToC/Fundamentals/Carriers/concentrations.html from __future__ import annotations def carrier_concentration( electron_conc: float, hole_conc: float, intrinsic_conc: float, ) -> tuple: """ This function can calculate any one of the three - 1. Electron Concentration 2, Hole Concentration 3. Intrinsic Concentration given the other two. Examples - >>> carrier_concentration(electron_conc=25, hole_conc=100, intrinsic_conc=0) ('intrinsic_conc', 50.0) >>> carrier_concentration(electron_conc=0, hole_conc=1600, intrinsic_conc=200) ('electron_conc', 25.0) >>> carrier_concentration(electron_conc=1000, hole_conc=0, intrinsic_conc=1200) ('hole_conc', 1440.0) >>> carrier_concentration(electron_conc=1000, hole_conc=400, intrinsic_conc=1200) Traceback (most recent call last): ... ValueError: You cannot supply more or less than 2 values >>> carrier_concentration(electron_conc=-1000, hole_conc=0, intrinsic_conc=1200) Traceback (most recent call last): ... ValueError: Electron concentration cannot be negative in a semiconductor >>> carrier_concentration(electron_conc=0, hole_conc=-400, intrinsic_conc=1200) Traceback (most recent call last): ... ValueError: Hole concentration cannot be negative in a semiconductor >>> carrier_concentration(electron_conc=0, hole_conc=400, intrinsic_conc=-1200) Traceback (most recent call last): ... ValueError: Intrinsic concentration cannot be negative in a semiconductor """ if (electron_conc, hole_conc, intrinsic_conc).count(0) != 1: raise ValueError("You cannot supply more or less than 2 values") elif electron_conc < 0: raise ValueError("Electron concentration cannot be negative in a semiconductor") elif hole_conc < 0: raise ValueError("Hole concentration cannot be negative in a semiconductor") elif intrinsic_conc < 0: raise ValueError( "Intrinsic concentration cannot be negative in a semiconductor" ) elif electron_conc == 0: return ( "electron_conc", intrinsic_conc**2 / hole_conc, ) elif hole_conc == 0: return ( "hole_conc", intrinsic_conc**2 / electron_conc, ) elif intrinsic_conc == 0: return ( "intrinsic_conc", (electron_conc * hole_conc) ** 0.5, ) else: return (-1, -1) if __name__ == "__main__": import doctest doctest.testmod()
-1
TheAlgorithms/Python
9,076
Fix typos
* [ ] Add an algorithm? * [x] Fix a bug or typo in an existing algorithm? * [ ] Documentation change?
omahs
"2023-09-22T13:22:15Z"
"2023-09-23T08:53:09Z"
dc50add8a78ebf34bc7bb050c1a0e61d207b9544
b203150ac481743a6d8c1ef01091712a54dfbf6c
Fix typos. * [ ] Add an algorithm? * [x] Fix a bug or typo in an existing algorithm? * [ ] Documentation change?
# Binary Tree Traversal ## Overview The combination of binary trees being data structures and traversal being an algorithm relates to classic problems, either directly or indirectly. > If you can grasp the traversal of binary trees, the traversal of other complicated trees will be easy for you. The following are some common ways to traverse trees. - Depth First Traversals (DFS): In-order, Pre-order, Post-order - Level Order Traversal or Breadth First or Traversal (BFS) There are applications for both DFS and BFS. Stack can be used to simplify the process of DFS traversal. Besides, since tree is a recursive data structure, recursion and stack are two key points for DFS. Graph for DFS: ![binary-tree-traversal-dfs](https://tva1.sinaimg.cn/large/007S8ZIlly1ghluhzhynsg30dw0dw3yl.gif) The key point of BFS is how to determine whether the traversal of each level has been completed. The answer is to use a variable as a flag to represent the end of the traversal of current level. ## Pre-order Traversal The traversal order of pre-order traversal is `root-left-right`. Algorithm Pre-order 1. Visit the root node and push it into a stack. 2. Pop a node from the stack, and push its right and left child node into the stack respectively. 3. Repeat step 2. Conclusion: This problem involves the classic recursive data structure (i.e. a binary tree), and the algorithm above demonstrates how a simplified solution can be reached by using a stack. If you look at the bigger picture, you'll find that the process of traversal is as followed. `Visit the left subtrees respectively from top to bottom, and visit the right subtrees respectively from bottom to top`. If we are to implement it from this perspective, things will be somewhat different. For the `top to bottom` part we can simply use recursion, and for the `bottom to top` part we can turn to stack. ## In-order Traversal The traversal order of in-order traversal is `left-root-right`. So the root node is not printed first. Things are getting a bit complicated here. Algorithm In-order 1. Visit the root and push it into a stack. 2. If there is a left child node, push it into the stack. Repeat this process until a leaf node reached. > At this point the root node and all the left nodes are in the stack. 3. Start popping nodes from the stack. If a node has a right child node, push the child node into the stack. Repeat step 2. It's worth pointing out that the in-order traversal of a binary search tree (BST) is a sorted array, which is helpful for coming up simplified solutions for some problems. ## Post-order Traversal The traversal order of post-order traversal is `left-right-root`. This one is a bit of a challenge. It deserves the `hard` tag of LeetCode. In this case, the root node is printed not as the first but the last one. A cunning way to do it is to: Record whether the current node has been visited. If 1) it's a leaf node or 2) both its left and right subtrees have been traversed, then it can be popped from the stack. As for `1) it's a leaf node`, you can easily tell whether a node is a leaf if both its left and right are `null`. As for `2) both its left and right subtrees have been traversed`, we only need a variable to record whether a node has been visited or not. In the worst case, we need to record the status for every single node and the space complexity is `O(n)`. But if you come to think about it, as we are using a stack and start printing the result from the leaf nodes, it makes sense that we only record the status for the current node popping from the stack, reducing the space complexity to `O(1)`. ## Level Order Traversal The key point of level order traversal is how do we know whether the traversal of each level is done. The answer is that we use a variable as a flag representing the end of the traversal of the current level. ![binary-tree-traversal-bfs](https://tva1.sinaimg.cn/large/007S8ZIlly1ghlui1tpoug30dw0dw3yl.gif) Algorithm Level-order 1. Visit the root node, put it in a FIFO queue, put in the queue a special flag (we are using `null` here). 2. Dequeue a node. 3. If the node equals `null`, it means that all nodes of the current level have been visited. If the queue is empty, we do nothing. Or else we put in another `null`. 4. If the node is not `null`, meaning the traversal of current level has not finished yet, we enqueue its left subtree and right subtree respectively. ## Bi-color marking We know that there is a tri-color marking in garbage collection algorithm, which works as described below. - The white color represents "not visited". - The gray color represents "not all child nodes visited". - The black color represents "all child nodes visited". Enlightened by tri-color marking, a bi-color marking method can be invented to solve all three traversal problems with one solution. The core idea is as follow. - Use a color to mark whether a node has been visited or not. Nodes yet to be visited are marked as white and visited nodes are marked as gray. - If we are visiting a white node, turn it into gray, and push its right child node, itself, and it's left child node into the stack respectively. - If we are visiting a gray node, print it. Implementation of pre-order and post-order traversal algorithms can be easily done by changing the order of pushing the child nodes into the stack. Reference: [LeetCode](https://github.com/azl397985856/leetcode/blob/master/thinkings/binary-tree-traversal.en.md)
# Binary Tree Traversal ## Overview The combination of binary trees being data structures and traversal being an algorithm relates to classic problems, either directly or indirectly. > If you can grasp the traversal of binary trees, the traversal of other complicated trees will be easy for you. The following are some common ways to traverse trees. - Depth First Traversals (DFS): In-order, Pre-order, Post-order - Level Order Traversal or Breadth First or Traversal (BFS) There are applications for both DFS and BFS. Stack can be used to simplify the process of DFS traversal. Besides, since tree is a recursive data structure, recursion and stack are two key points for DFS. Graph for DFS: ![binary-tree-traversal-dfs](https://tva1.sinaimg.cn/large/007S8ZIlly1ghluhzhynsg30dw0dw3yl.gif) The key point of BFS is how to determine whether the traversal of each level has been completed. The answer is to use a variable as a flag to represent the end of the traversal of current level. ## Pre-order Traversal The traversal order of pre-order traversal is `root-left-right`. Algorithm Pre-order 1. Visit the root node and push it into a stack. 2. Pop a node from the stack, and push its right and left child node into the stack respectively. 3. Repeat step 2. Conclusion: This problem involves the classic recursive data structure (i.e. a binary tree), and the algorithm above demonstrates how a simplified solution can be reached by using a stack. If you look at the bigger picture, you'll find that the process of traversal is as followed. `Visit the left subtrees respectively from top to bottom, and visit the right subtrees respectively from bottom to top`. If we are to implement it from this perspective, things will be somewhat different. For the `top to bottom` part we can simply use recursion, and for the `bottom to top` part we can turn to stack. ## In-order Traversal The traversal order of in-order traversal is `left-root-right`. So the root node is not printed first. Things are getting a bit complicated here. Algorithm In-order 1. Visit the root and push it into a stack. 2. If there is a left child node, push it into the stack. Repeat this process until a leaf node reached. > At this point the root node and all the left nodes are in the stack. 3. Start popping nodes from the stack. If a node has a right child node, push the child node into the stack. Repeat step 2. It's worth pointing out that the in-order traversal of a binary search tree (BST) is a sorted array, which is helpful for coming up simplified solutions for some problems. ## Post-order Traversal The traversal order of post-order traversal is `left-right-root`. This one is a bit of a challenge. It deserves the `hard` tag of LeetCode. In this case, the root node is printed not as the first but the last one. A cunning way to do it is to: Record whether the current node has been visited. If 1) it's a leaf node or 2) both its left and right subtrees have been traversed, then it can be popped from the stack. As for `1) it's a leaf node`, you can easily tell whether a node is a leaf if both its left and right are `null`. As for `2) both its left and right subtrees have been traversed`, we only need a variable to record whether a node has been visited or not. In the worst case, we need to record the status for every single node and the space complexity is `O(n)`. But if you come to think about it, as we are using a stack and start printing the result from the leaf nodes, it makes sense that we only record the status for the current node popping from the stack, reducing the space complexity to `O(1)`. ## Level Order Traversal The key point of level order traversal is how do we know whether the traversal of each level is done. The answer is that we use a variable as a flag representing the end of the traversal of the current level. ![binary-tree-traversal-bfs](https://tva1.sinaimg.cn/large/007S8ZIlly1ghlui1tpoug30dw0dw3yl.gif) Algorithm Level-order 1. Visit the root node, put it in a FIFO queue, put in the queue a special flag (we are using `null` here). 2. Dequeue a node. 3. If the node equals `null`, it means that all nodes of the current level have been visited. If the queue is empty, we do nothing. Or else we put in another `null`. 4. If the node is not `null`, meaning the traversal of current level has not finished yet, we enqueue its left subtree and right subtree respectively. ## Bi-color marking We know that there is a tri-color marking in garbage collection algorithm, which works as described below. - The white color represents "not visited". - The gray color represents "not all child nodes visited". - The black color represents "all child nodes visited". Enlightened by tri-color marking, a bi-color marking method can be invented to solve all three traversal problems with one solution. The core idea is as follow. - Use a color to mark whether a node has been visited or not. Nodes yet to be visited are marked as white and visited nodes are marked as gray. - If we are visiting a white node, turn it into gray, and push its right child node, itself, and it's left child node into the stack respectively. - If we are visiting a gray node, print it. Implementation of pre-order and post-order traversal algorithms can be easily done by changing the order of pushing the child nodes into the stack. Reference: [LeetCode](https://github.com/azl397985856/leetcode/blob/master/thinkings/binary-tree-traversal.en.md)
-1
TheAlgorithms/Python
9,076
Fix typos
* [ ] Add an algorithm? * [x] Fix a bug or typo in an existing algorithm? * [ ] Documentation change?
omahs
"2023-09-22T13:22:15Z"
"2023-09-23T08:53:09Z"
dc50add8a78ebf34bc7bb050c1a0e61d207b9544
b203150ac481743a6d8c1ef01091712a54dfbf6c
Fix typos. * [ ] Add an algorithm? * [x] Fix a bug or typo in an existing algorithm? * [ ] Documentation change?
from typing import Literal LETTERS = "ABCDEFGHIJKLMNOPQRSTUVWXYZ" def translate_message( key: str, message: str, mode: Literal["encrypt", "decrypt"] ) -> str: """ >>> translate_message("QWERTYUIOPASDFGHJKLZXCVBNM","Hello World","encrypt") 'Pcssi Bidsm' """ chars_a = LETTERS if mode == "decrypt" else key chars_b = key if mode == "decrypt" else LETTERS translated = "" # loop through each symbol in the message for symbol in message: if symbol.upper() in chars_a: # encrypt/decrypt the symbol sym_index = chars_a.find(symbol.upper()) if symbol.isupper(): translated += chars_b[sym_index].upper() else: translated += chars_b[sym_index].lower() else: # symbol is not in LETTERS, just add it translated += symbol return translated def encrypt_message(key: str, message: str) -> str: """ >>> encrypt_message("QWERTYUIOPASDFGHJKLZXCVBNM", "Hello World") 'Pcssi Bidsm' """ return translate_message(key, message, "encrypt") def decrypt_message(key: str, message: str) -> str: """ >>> decrypt_message("QWERTYUIOPASDFGHJKLZXCVBNM", "Hello World") 'Itssg Vgksr' """ return translate_message(key, message, "decrypt") def main() -> None: message = "Hello World" key = "QWERTYUIOPASDFGHJKLZXCVBNM" mode = "decrypt" # set to 'encrypt' or 'decrypt' if mode == "encrypt": translated = encrypt_message(key, message) elif mode == "decrypt": translated = decrypt_message(key, message) print(f"Using the key {key}, the {mode}ed message is: {translated}") if __name__ == "__main__": import doctest doctest.testmod() main()
from typing import Literal LETTERS = "ABCDEFGHIJKLMNOPQRSTUVWXYZ" def translate_message( key: str, message: str, mode: Literal["encrypt", "decrypt"] ) -> str: """ >>> translate_message("QWERTYUIOPASDFGHJKLZXCVBNM","Hello World","encrypt") 'Pcssi Bidsm' """ chars_a = LETTERS if mode == "decrypt" else key chars_b = key if mode == "decrypt" else LETTERS translated = "" # loop through each symbol in the message for symbol in message: if symbol.upper() in chars_a: # encrypt/decrypt the symbol sym_index = chars_a.find(symbol.upper()) if symbol.isupper(): translated += chars_b[sym_index].upper() else: translated += chars_b[sym_index].lower() else: # symbol is not in LETTERS, just add it translated += symbol return translated def encrypt_message(key: str, message: str) -> str: """ >>> encrypt_message("QWERTYUIOPASDFGHJKLZXCVBNM", "Hello World") 'Pcssi Bidsm' """ return translate_message(key, message, "encrypt") def decrypt_message(key: str, message: str) -> str: """ >>> decrypt_message("QWERTYUIOPASDFGHJKLZXCVBNM", "Hello World") 'Itssg Vgksr' """ return translate_message(key, message, "decrypt") def main() -> None: message = "Hello World" key = "QWERTYUIOPASDFGHJKLZXCVBNM" mode = "decrypt" # set to 'encrypt' or 'decrypt' if mode == "encrypt": translated = encrypt_message(key, message) elif mode == "decrypt": translated = decrypt_message(key, message) print(f"Using the key {key}, the {mode}ed message is: {translated}") if __name__ == "__main__": import doctest doctest.testmod() main()
-1
TheAlgorithms/Python
9,076
Fix typos
* [ ] Add an algorithm? * [x] Fix a bug or typo in an existing algorithm? * [ ] Documentation change?
omahs
"2023-09-22T13:22:15Z"
"2023-09-23T08:53:09Z"
dc50add8a78ebf34bc7bb050c1a0e61d207b9544
b203150ac481743a6d8c1ef01091712a54dfbf6c
Fix typos. * [ ] Add an algorithm? * [x] Fix a bug or typo in an existing algorithm? * [ ] Documentation change?
""" Minimax helps to achieve maximum score in a game by checking all possible moves. """ from __future__ import annotations import math def minimax( depth: int, node_index: int, is_max: bool, scores: list[int], height: float ) -> int: """ depth is current depth in game tree. node_index is index of current node in scores[]. scores[] contains the leaves of game tree. height is maximum height of game tree. >>> scores = [90, 23, 6, 33, 21, 65, 123, 34423] >>> height = math.log(len(scores), 2) >>> minimax(0, 0, True, scores, height) 65 >>> minimax(-1, 0, True, scores, height) Traceback (most recent call last): ... ValueError: Depth cannot be less than 0 >>> minimax(0, 0, True, [], 2) Traceback (most recent call last): ... ValueError: Scores cannot be empty >>> scores = [3, 5, 2, 9, 12, 5, 23, 23] >>> height = math.log(len(scores), 2) >>> minimax(0, 0, True, scores, height) 12 """ if depth < 0: raise ValueError("Depth cannot be less than 0") if not scores: raise ValueError("Scores cannot be empty") if depth == height: return scores[node_index] return ( max( minimax(depth + 1, node_index * 2, False, scores, height), minimax(depth + 1, node_index * 2 + 1, False, scores, height), ) if is_max else min( minimax(depth + 1, node_index * 2, True, scores, height), minimax(depth + 1, node_index * 2 + 1, True, scores, height), ) ) def main() -> None: scores = [90, 23, 6, 33, 21, 65, 123, 34423] height = math.log(len(scores), 2) print(f"Optimal value : {minimax(0, 0, True, scores, height)}") if __name__ == "__main__": import doctest doctest.testmod() main()
""" Minimax helps to achieve maximum score in a game by checking all possible moves. """ from __future__ import annotations import math def minimax( depth: int, node_index: int, is_max: bool, scores: list[int], height: float ) -> int: """ depth is current depth in game tree. node_index is index of current node in scores[]. scores[] contains the leaves of game tree. height is maximum height of game tree. >>> scores = [90, 23, 6, 33, 21, 65, 123, 34423] >>> height = math.log(len(scores), 2) >>> minimax(0, 0, True, scores, height) 65 >>> minimax(-1, 0, True, scores, height) Traceback (most recent call last): ... ValueError: Depth cannot be less than 0 >>> minimax(0, 0, True, [], 2) Traceback (most recent call last): ... ValueError: Scores cannot be empty >>> scores = [3, 5, 2, 9, 12, 5, 23, 23] >>> height = math.log(len(scores), 2) >>> minimax(0, 0, True, scores, height) 12 """ if depth < 0: raise ValueError("Depth cannot be less than 0") if not scores: raise ValueError("Scores cannot be empty") if depth == height: return scores[node_index] return ( max( minimax(depth + 1, node_index * 2, False, scores, height), minimax(depth + 1, node_index * 2 + 1, False, scores, height), ) if is_max else min( minimax(depth + 1, node_index * 2, True, scores, height), minimax(depth + 1, node_index * 2 + 1, True, scores, height), ) ) def main() -> None: scores = [90, 23, 6, 33, 21, 65, 123, 34423] height = math.log(len(scores), 2) print(f"Optimal value : {minimax(0, 0, True, scores, height)}") if __name__ == "__main__": import doctest doctest.testmod() main()
-1
TheAlgorithms/Python
9,076
Fix typos
* [ ] Add an algorithm? * [x] Fix a bug or typo in an existing algorithm? * [ ] Documentation change?
omahs
"2023-09-22T13:22:15Z"
"2023-09-23T08:53:09Z"
dc50add8a78ebf34bc7bb050c1a0e61d207b9544
b203150ac481743a6d8c1ef01091712a54dfbf6c
Fix typos. * [ ] Add an algorithm? * [x] Fix a bug or typo in an existing algorithm? * [ ] Documentation change?
-1
TheAlgorithms/Python
9,076
Fix typos
* [ ] Add an algorithm? * [x] Fix a bug or typo in an existing algorithm? * [ ] Documentation change?
omahs
"2023-09-22T13:22:15Z"
"2023-09-23T08:53:09Z"
dc50add8a78ebf34bc7bb050c1a0e61d207b9544
b203150ac481743a6d8c1ef01091712a54dfbf6c
Fix typos. * [ ] Add an algorithm? * [x] Fix a bug or typo in an existing algorithm? * [ ] Documentation change?
import argparse import datetime def zeller(date_input: str) -> str: """ Zellers Congruence Algorithm Find the day of the week for nearly any Gregorian or Julian calendar date >>> zeller('01-31-2010') 'Your date 01-31-2010, is a Sunday!' Validate out of range month >>> zeller('13-31-2010') Traceback (most recent call last): ... ValueError: Month must be between 1 - 12 >>> zeller('.2-31-2010') Traceback (most recent call last): ... ValueError: invalid literal for int() with base 10: '.2' Validate out of range date: >>> zeller('01-33-2010') Traceback (most recent call last): ... ValueError: Date must be between 1 - 31 >>> zeller('01-.4-2010') Traceback (most recent call last): ... ValueError: invalid literal for int() with base 10: '.4' Validate second separator: >>> zeller('01-31*2010') Traceback (most recent call last): ... ValueError: Date separator must be '-' or '/' Validate first separator: >>> zeller('01^31-2010') Traceback (most recent call last): ... ValueError: Date separator must be '-' or '/' Validate out of range year: >>> zeller('01-31-8999') Traceback (most recent call last): ... ValueError: Year out of range. There has to be some sort of limit...right? Test null input: >>> zeller() Traceback (most recent call last): ... TypeError: zeller() missing 1 required positional argument: 'date_input' Test length of date_input: >>> zeller('') Traceback (most recent call last): ... ValueError: Must be 10 characters long >>> zeller('01-31-19082939') Traceback (most recent call last): ... ValueError: Must be 10 characters long""" # Days of the week for response days = { "0": "Sunday", "1": "Monday", "2": "Tuesday", "3": "Wednesday", "4": "Thursday", "5": "Friday", "6": "Saturday", } convert_datetime_days = {0: 1, 1: 2, 2: 3, 3: 4, 4: 5, 5: 6, 6: 0} # Validate if not 0 < len(date_input) < 11: raise ValueError("Must be 10 characters long") # Get month m: int = int(date_input[0] + date_input[1]) # Validate if not 0 < m < 13: raise ValueError("Month must be between 1 - 12") sep_1: str = date_input[2] # Validate if sep_1 not in ["-", "/"]: raise ValueError("Date separator must be '-' or '/'") # Get day d: int = int(date_input[3] + date_input[4]) # Validate if not 0 < d < 32: raise ValueError("Date must be between 1 - 31") # Get second separator sep_2: str = date_input[5] # Validate if sep_2 not in ["-", "/"]: raise ValueError("Date separator must be '-' or '/'") # Get year y: int = int(date_input[6] + date_input[7] + date_input[8] + date_input[9]) # Arbitrary year range if not 45 < y < 8500: raise ValueError( "Year out of range. There has to be some sort of limit...right?" ) # Get datetime obj for validation dt_ck = datetime.date(int(y), int(m), int(d)) # Start math if m <= 2: y = y - 1 m = m + 12 # maths var c: int = int(str(y)[:2]) k: int = int(str(y)[2:]) t: int = int(2.6 * m - 5.39) u: int = int(c / 4) v: int = int(k / 4) x: int = int(d + k) z: int = int(t + u + v + x) w: int = int(z - (2 * c)) f: int = round(w % 7) # End math # Validate math if f != convert_datetime_days[dt_ck.weekday()]: raise AssertionError("The date was evaluated incorrectly. Contact developer.") # Response response: str = f"Your date {date_input}, is a {days[str(f)]}!" return response if __name__ == "__main__": import doctest doctest.testmod() parser = argparse.ArgumentParser( description=( "Find out what day of the week nearly any date is or was. Enter " "date as a string in the mm-dd-yyyy or mm/dd/yyyy format" ) ) parser.add_argument( "date_input", type=str, help="Date as a string (mm-dd-yyyy or mm/dd/yyyy)" ) args = parser.parse_args() zeller(args.date_input)
import argparse import datetime def zeller(date_input: str) -> str: """ Zellers Congruence Algorithm Find the day of the week for nearly any Gregorian or Julian calendar date >>> zeller('01-31-2010') 'Your date 01-31-2010, is a Sunday!' Validate out of range month >>> zeller('13-31-2010') Traceback (most recent call last): ... ValueError: Month must be between 1 - 12 >>> zeller('.2-31-2010') Traceback (most recent call last): ... ValueError: invalid literal for int() with base 10: '.2' Validate out of range date: >>> zeller('01-33-2010') Traceback (most recent call last): ... ValueError: Date must be between 1 - 31 >>> zeller('01-.4-2010') Traceback (most recent call last): ... ValueError: invalid literal for int() with base 10: '.4' Validate second separator: >>> zeller('01-31*2010') Traceback (most recent call last): ... ValueError: Date separator must be '-' or '/' Validate first separator: >>> zeller('01^31-2010') Traceback (most recent call last): ... ValueError: Date separator must be '-' or '/' Validate out of range year: >>> zeller('01-31-8999') Traceback (most recent call last): ... ValueError: Year out of range. There has to be some sort of limit...right? Test null input: >>> zeller() Traceback (most recent call last): ... TypeError: zeller() missing 1 required positional argument: 'date_input' Test length of date_input: >>> zeller('') Traceback (most recent call last): ... ValueError: Must be 10 characters long >>> zeller('01-31-19082939') Traceback (most recent call last): ... ValueError: Must be 10 characters long""" # Days of the week for response days = { "0": "Sunday", "1": "Monday", "2": "Tuesday", "3": "Wednesday", "4": "Thursday", "5": "Friday", "6": "Saturday", } convert_datetime_days = {0: 1, 1: 2, 2: 3, 3: 4, 4: 5, 5: 6, 6: 0} # Validate if not 0 < len(date_input) < 11: raise ValueError("Must be 10 characters long") # Get month m: int = int(date_input[0] + date_input[1]) # Validate if not 0 < m < 13: raise ValueError("Month must be between 1 - 12") sep_1: str = date_input[2] # Validate if sep_1 not in ["-", "/"]: raise ValueError("Date separator must be '-' or '/'") # Get day d: int = int(date_input[3] + date_input[4]) # Validate if not 0 < d < 32: raise ValueError("Date must be between 1 - 31") # Get second separator sep_2: str = date_input[5] # Validate if sep_2 not in ["-", "/"]: raise ValueError("Date separator must be '-' or '/'") # Get year y: int = int(date_input[6] + date_input[7] + date_input[8] + date_input[9]) # Arbitrary year range if not 45 < y < 8500: raise ValueError( "Year out of range. There has to be some sort of limit...right?" ) # Get datetime obj for validation dt_ck = datetime.date(int(y), int(m), int(d)) # Start math if m <= 2: y = y - 1 m = m + 12 # maths var c: int = int(str(y)[:2]) k: int = int(str(y)[2:]) t: int = int(2.6 * m - 5.39) u: int = int(c / 4) v: int = int(k / 4) x: int = int(d + k) z: int = int(t + u + v + x) w: int = int(z - (2 * c)) f: int = round(w % 7) # End math # Validate math if f != convert_datetime_days[dt_ck.weekday()]: raise AssertionError("The date was evaluated incorrectly. Contact developer.") # Response response: str = f"Your date {date_input}, is a {days[str(f)]}!" return response if __name__ == "__main__": import doctest doctest.testmod() parser = argparse.ArgumentParser( description=( "Find out what day of the week nearly any date is or was. Enter " "date as a string in the mm-dd-yyyy or mm/dd/yyyy format" ) ) parser.add_argument( "date_input", type=str, help="Date as a string (mm-dd-yyyy or mm/dd/yyyy)" ) args = parser.parse_args() zeller(args.date_input)
-1
TheAlgorithms/Python
9,076
Fix typos
* [ ] Add an algorithm? * [x] Fix a bug or typo in an existing algorithm? * [ ] Documentation change?
omahs
"2023-09-22T13:22:15Z"
"2023-09-23T08:53:09Z"
dc50add8a78ebf34bc7bb050c1a0e61d207b9544
b203150ac481743a6d8c1ef01091712a54dfbf6c
Fix typos. * [ ] Add an algorithm? * [x] Fix a bug or typo in an existing algorithm? * [ ] Documentation change?
""" Project Euler Problem 100: https://projecteuler.net/problem=100 If a box contains twenty-one coloured discs, composed of fifteen blue discs and six red discs, and two discs were taken at random, it can be seen that the probability of taking two blue discs, P(BB) = (15/21) x (14/20) = 1/2. The next such arrangement, for which there is exactly 50% chance of taking two blue discs at random, is a box containing eighty-five blue discs and thirty-five red discs. By finding the first arrangement to contain over 10^12 = 1,000,000,000,000 discs in total, determine the number of blue discs that the box would contain. """ def solution(min_total: int = 10**12) -> int: """ Returns the number of blue discs for the first arrangement to contain over min_total discs in total >>> solution(2) 3 >>> solution(4) 15 >>> solution(21) 85 """ prev_numerator = 1 prev_denominator = 0 numerator = 1 denominator = 1 while numerator <= 2 * min_total - 1: prev_numerator += 2 * numerator numerator += 2 * prev_numerator prev_denominator += 2 * denominator denominator += 2 * prev_denominator return (denominator + 1) // 2 if __name__ == "__main__": print(f"{solution() = }")
""" Project Euler Problem 100: https://projecteuler.net/problem=100 If a box contains twenty-one coloured discs, composed of fifteen blue discs and six red discs, and two discs were taken at random, it can be seen that the probability of taking two blue discs, P(BB) = (15/21) x (14/20) = 1/2. The next such arrangement, for which there is exactly 50% chance of taking two blue discs at random, is a box containing eighty-five blue discs and thirty-five red discs. By finding the first arrangement to contain over 10^12 = 1,000,000,000,000 discs in total, determine the number of blue discs that the box would contain. """ def solution(min_total: int = 10**12) -> int: """ Returns the number of blue discs for the first arrangement to contain over min_total discs in total >>> solution(2) 3 >>> solution(4) 15 >>> solution(21) 85 """ prev_numerator = 1 prev_denominator = 0 numerator = 1 denominator = 1 while numerator <= 2 * min_total - 1: prev_numerator += 2 * numerator numerator += 2 * prev_numerator prev_denominator += 2 * denominator denominator += 2 * prev_denominator return (denominator + 1) // 2 if __name__ == "__main__": print(f"{solution() = }")
-1
TheAlgorithms/Python
9,076
Fix typos
* [ ] Add an algorithm? * [x] Fix a bug or typo in an existing algorithm? * [ ] Documentation change?
omahs
"2023-09-22T13:22:15Z"
"2023-09-23T08:53:09Z"
dc50add8a78ebf34bc7bb050c1a0e61d207b9544
b203150ac481743a6d8c1ef01091712a54dfbf6c
Fix typos. * [ ] Add an algorithm? * [x] Fix a bug or typo in an existing algorithm? * [ ] Documentation change?
-1
TheAlgorithms/Python
9,076
Fix typos
* [ ] Add an algorithm? * [x] Fix a bug or typo in an existing algorithm? * [ ] Documentation change?
omahs
"2023-09-22T13:22:15Z"
"2023-09-23T08:53:09Z"
dc50add8a78ebf34bc7bb050c1a0e61d207b9544
b203150ac481743a6d8c1ef01091712a54dfbf6c
Fix typos. * [ ] Add an algorithm? * [x] Fix a bug or typo in an existing algorithm? * [ ] Documentation change?
""" Given the root of a binary tree and an integer target, find the number of paths where the sum of the values along the path equals target. Leetcode reference: https://leetcode.com/problems/path-sum-iii/ """ from __future__ import annotations class Node: """ A Node has value variable and pointers to Nodes to its left and right. """ def __init__(self, value: int) -> None: self.value = value self.left: Node | None = None self.right: Node | None = None class BinaryTreePathSum: r""" The below tree looks like this 10 / \ 5 -3 / \ \ 3 2 11 / \ \ 3 -2 1 >>> tree = Node(10) >>> tree.left = Node(5) >>> tree.right = Node(-3) >>> tree.left.left = Node(3) >>> tree.left.right = Node(2) >>> tree.right.right = Node(11) >>> tree.left.left.left = Node(3) >>> tree.left.left.right = Node(-2) >>> tree.left.right.right = Node(1) >>> BinaryTreePathSum().path_sum(tree, 8) 3 >>> BinaryTreePathSum().path_sum(tree, 7) 2 >>> tree.right.right = Node(10) >>> BinaryTreePathSum().path_sum(tree, 8) 2 """ target: int def __init__(self) -> None: self.paths = 0 def depth_first_search(self, node: Node | None, path_sum: int) -> None: if node is None: return if path_sum == self.target: self.paths += 1 if node.left: self.depth_first_search(node.left, path_sum + node.left.value) if node.right: self.depth_first_search(node.right, path_sum + node.right.value) def path_sum(self, node: Node | None, target: int | None = None) -> int: if node is None: return 0 if target is not None: self.target = target self.depth_first_search(node, node.value) self.path_sum(node.left) self.path_sum(node.right) return self.paths if __name__ == "__main__": import doctest doctest.testmod()
""" Given the root of a binary tree and an integer target, find the number of paths where the sum of the values along the path equals target. Leetcode reference: https://leetcode.com/problems/path-sum-iii/ """ from __future__ import annotations class Node: """ A Node has value variable and pointers to Nodes to its left and right. """ def __init__(self, value: int) -> None: self.value = value self.left: Node | None = None self.right: Node | None = None class BinaryTreePathSum: r""" The below tree looks like this 10 / \ 5 -3 / \ \ 3 2 11 / \ \ 3 -2 1 >>> tree = Node(10) >>> tree.left = Node(5) >>> tree.right = Node(-3) >>> tree.left.left = Node(3) >>> tree.left.right = Node(2) >>> tree.right.right = Node(11) >>> tree.left.left.left = Node(3) >>> tree.left.left.right = Node(-2) >>> tree.left.right.right = Node(1) >>> BinaryTreePathSum().path_sum(tree, 8) 3 >>> BinaryTreePathSum().path_sum(tree, 7) 2 >>> tree.right.right = Node(10) >>> BinaryTreePathSum().path_sum(tree, 8) 2 """ target: int def __init__(self) -> None: self.paths = 0 def depth_first_search(self, node: Node | None, path_sum: int) -> None: if node is None: return if path_sum == self.target: self.paths += 1 if node.left: self.depth_first_search(node.left, path_sum + node.left.value) if node.right: self.depth_first_search(node.right, path_sum + node.right.value) def path_sum(self, node: Node | None, target: int | None = None) -> int: if node is None: return 0 if target is not None: self.target = target self.depth_first_search(node, node.value) self.path_sum(node.left) self.path_sum(node.right) return self.paths if __name__ == "__main__": import doctest doctest.testmod()
-1
TheAlgorithms/Python
9,076
Fix typos
* [ ] Add an algorithm? * [x] Fix a bug or typo in an existing algorithm? * [ ] Documentation change?
omahs
"2023-09-22T13:22:15Z"
"2023-09-23T08:53:09Z"
dc50add8a78ebf34bc7bb050c1a0e61d207b9544
b203150ac481743a6d8c1ef01091712a54dfbf6c
Fix typos. * [ ] Add an algorithm? * [x] Fix a bug or typo in an existing algorithm? * [ ] Documentation change?
""" Shortest job remaining first Please note arrival time and burst Please use spaces to separate times entered. """ from __future__ import annotations import pandas as pd def calculate_waitingtime( arrival_time: list[int], burst_time: list[int], no_of_processes: int ) -> list[int]: """ Calculate the waiting time of each processes Return: List of waiting times. >>> calculate_waitingtime([1,2,3,4],[3,3,5,1],4) [0, 3, 5, 0] >>> calculate_waitingtime([1,2,3],[2,5,1],3) [0, 2, 0] >>> calculate_waitingtime([2,3],[5,1],2) [1, 0] """ remaining_time = [0] * no_of_processes waiting_time = [0] * no_of_processes # Copy the burst time into remaining_time[] for i in range(no_of_processes): remaining_time[i] = burst_time[i] complete = 0 increment_time = 0 minm = 999999999 short = 0 check = False # Process until all processes are completed while complete != no_of_processes: for j in range(no_of_processes): if arrival_time[j] <= increment_time and remaining_time[j] > 0: if remaining_time[j] < minm: minm = remaining_time[j] short = j check = True if not check: increment_time += 1 continue remaining_time[short] -= 1 minm = remaining_time[short] if minm == 0: minm = 999999999 if remaining_time[short] == 0: complete += 1 check = False # Find finish time of current process finish_time = increment_time + 1 # Calculate waiting time finar = finish_time - arrival_time[short] waiting_time[short] = finar - burst_time[short] if waiting_time[short] < 0: waiting_time[short] = 0 # Increment time increment_time += 1 return waiting_time def calculate_turnaroundtime( burst_time: list[int], no_of_processes: int, waiting_time: list[int] ) -> list[int]: """ Calculate the turn around time of each Processes Return: list of turn around times. >>> calculate_turnaroundtime([3,3,5,1], 4, [0,3,5,0]) [3, 6, 10, 1] >>> calculate_turnaroundtime([3,3], 2, [0,3]) [3, 6] >>> calculate_turnaroundtime([8,10,1], 3, [1,0,3]) [9, 10, 4] """ turn_around_time = [0] * no_of_processes for i in range(no_of_processes): turn_around_time[i] = burst_time[i] + waiting_time[i] return turn_around_time def calculate_average_times( waiting_time: list[int], turn_around_time: list[int], no_of_processes: int ) -> None: """ This function calculates the average of the waiting & turnaround times Prints: Average Waiting time & Average Turn Around Time >>> calculate_average_times([0,3,5,0],[3,6,10,1],4) Average waiting time = 2.00000 Average turn around time = 5.0 >>> calculate_average_times([2,3],[3,6],2) Average waiting time = 2.50000 Average turn around time = 4.5 >>> calculate_average_times([10,4,3],[2,7,6],3) Average waiting time = 5.66667 Average turn around time = 5.0 """ total_waiting_time = 0 total_turn_around_time = 0 for i in range(no_of_processes): total_waiting_time = total_waiting_time + waiting_time[i] total_turn_around_time = total_turn_around_time + turn_around_time[i] print(f"Average waiting time = {total_waiting_time / no_of_processes:.5f}") print("Average turn around time =", total_turn_around_time / no_of_processes) if __name__ == "__main__": print("Enter how many process you want to analyze") no_of_processes = int(input()) burst_time = [0] * no_of_processes arrival_time = [0] * no_of_processes processes = list(range(1, no_of_processes + 1)) for i in range(no_of_processes): print("Enter the arrival time and burst time for process:--" + str(i + 1)) arrival_time[i], burst_time[i] = map(int, input().split()) waiting_time = calculate_waitingtime(arrival_time, burst_time, no_of_processes) bt = burst_time n = no_of_processes wt = waiting_time turn_around_time = calculate_turnaroundtime(bt, n, wt) calculate_average_times(waiting_time, turn_around_time, no_of_processes) fcfs = pd.DataFrame( list(zip(processes, burst_time, arrival_time, waiting_time, turn_around_time)), columns=[ "Process", "BurstTime", "ArrivalTime", "WaitingTime", "TurnAroundTime", ], ) # Printing the dataFrame pd.set_option("display.max_rows", fcfs.shape[0] + 1) print(fcfs)
""" Shortest job remaining first Please note arrival time and burst Please use spaces to separate times entered. """ from __future__ import annotations import pandas as pd def calculate_waitingtime( arrival_time: list[int], burst_time: list[int], no_of_processes: int ) -> list[int]: """ Calculate the waiting time of each processes Return: List of waiting times. >>> calculate_waitingtime([1,2,3,4],[3,3,5,1],4) [0, 3, 5, 0] >>> calculate_waitingtime([1,2,3],[2,5,1],3) [0, 2, 0] >>> calculate_waitingtime([2,3],[5,1],2) [1, 0] """ remaining_time = [0] * no_of_processes waiting_time = [0] * no_of_processes # Copy the burst time into remaining_time[] for i in range(no_of_processes): remaining_time[i] = burst_time[i] complete = 0 increment_time = 0 minm = 999999999 short = 0 check = False # Process until all processes are completed while complete != no_of_processes: for j in range(no_of_processes): if arrival_time[j] <= increment_time and remaining_time[j] > 0: if remaining_time[j] < minm: minm = remaining_time[j] short = j check = True if not check: increment_time += 1 continue remaining_time[short] -= 1 minm = remaining_time[short] if minm == 0: minm = 999999999 if remaining_time[short] == 0: complete += 1 check = False # Find finish time of current process finish_time = increment_time + 1 # Calculate waiting time finar = finish_time - arrival_time[short] waiting_time[short] = finar - burst_time[short] if waiting_time[short] < 0: waiting_time[short] = 0 # Increment time increment_time += 1 return waiting_time def calculate_turnaroundtime( burst_time: list[int], no_of_processes: int, waiting_time: list[int] ) -> list[int]: """ Calculate the turn around time of each Processes Return: list of turn around times. >>> calculate_turnaroundtime([3,3,5,1], 4, [0,3,5,0]) [3, 6, 10, 1] >>> calculate_turnaroundtime([3,3], 2, [0,3]) [3, 6] >>> calculate_turnaroundtime([8,10,1], 3, [1,0,3]) [9, 10, 4] """ turn_around_time = [0] * no_of_processes for i in range(no_of_processes): turn_around_time[i] = burst_time[i] + waiting_time[i] return turn_around_time def calculate_average_times( waiting_time: list[int], turn_around_time: list[int], no_of_processes: int ) -> None: """ This function calculates the average of the waiting & turnaround times Prints: Average Waiting time & Average Turn Around Time >>> calculate_average_times([0,3,5,0],[3,6,10,1],4) Average waiting time = 2.00000 Average turn around time = 5.0 >>> calculate_average_times([2,3],[3,6],2) Average waiting time = 2.50000 Average turn around time = 4.5 >>> calculate_average_times([10,4,3],[2,7,6],3) Average waiting time = 5.66667 Average turn around time = 5.0 """ total_waiting_time = 0 total_turn_around_time = 0 for i in range(no_of_processes): total_waiting_time = total_waiting_time + waiting_time[i] total_turn_around_time = total_turn_around_time + turn_around_time[i] print(f"Average waiting time = {total_waiting_time / no_of_processes:.5f}") print("Average turn around time =", total_turn_around_time / no_of_processes) if __name__ == "__main__": print("Enter how many process you want to analyze") no_of_processes = int(input()) burst_time = [0] * no_of_processes arrival_time = [0] * no_of_processes processes = list(range(1, no_of_processes + 1)) for i in range(no_of_processes): print("Enter the arrival time and burst time for process:--" + str(i + 1)) arrival_time[i], burst_time[i] = map(int, input().split()) waiting_time = calculate_waitingtime(arrival_time, burst_time, no_of_processes) bt = burst_time n = no_of_processes wt = waiting_time turn_around_time = calculate_turnaroundtime(bt, n, wt) calculate_average_times(waiting_time, turn_around_time, no_of_processes) fcfs = pd.DataFrame( list(zip(processes, burst_time, arrival_time, waiting_time, turn_around_time)), columns=[ "Process", "BurstTime", "ArrivalTime", "WaitingTime", "TurnAroundTime", ], ) # Printing the dataFrame pd.set_option("display.max_rows", fcfs.shape[0] + 1) print(fcfs)
-1
TheAlgorithms/Python
9,076
Fix typos
* [ ] Add an algorithm? * [x] Fix a bug or typo in an existing algorithm? * [ ] Documentation change?
omahs
"2023-09-22T13:22:15Z"
"2023-09-23T08:53:09Z"
dc50add8a78ebf34bc7bb050c1a0e61d207b9544
b203150ac481743a6d8c1ef01091712a54dfbf6c
Fix typos. * [ ] Add an algorithm? * [x] Fix a bug or typo in an existing algorithm? * [ ] Documentation change?
def reverse_letters(input_str: str) -> str: """ Reverses letters in a given string without adjusting the position of the words >>> reverse_letters('The cat in the hat') 'ehT tac ni eht tah' >>> reverse_letters('The quick brown fox jumped over the lazy dog.') 'ehT kciuq nworb xof depmuj revo eht yzal .god' >>> reverse_letters('Is this true?') 'sI siht ?eurt' >>> reverse_letters("I love Python") 'I evol nohtyP' """ return " ".join([word[::-1] for word in input_str.split()]) if __name__ == "__main__": import doctest doctest.testmod()
def reverse_letters(input_str: str) -> str: """ Reverses letters in a given string without adjusting the position of the words >>> reverse_letters('The cat in the hat') 'ehT tac ni eht tah' >>> reverse_letters('The quick brown fox jumped over the lazy dog.') 'ehT kciuq nworb xof depmuj revo eht yzal .god' >>> reverse_letters('Is this true?') 'sI siht ?eurt' >>> reverse_letters("I love Python") 'I evol nohtyP' """ return " ".join([word[::-1] for word in input_str.split()]) if __name__ == "__main__": import doctest doctest.testmod()
-1
TheAlgorithms/Python
9,076
Fix typos
* [ ] Add an algorithm? * [x] Fix a bug or typo in an existing algorithm? * [ ] Documentation change?
omahs
"2023-09-22T13:22:15Z"
"2023-09-23T08:53:09Z"
dc50add8a78ebf34bc7bb050c1a0e61d207b9544
b203150ac481743a6d8c1ef01091712a54dfbf6c
Fix typos. * [ ] Add an algorithm? * [x] Fix a bug or typo in an existing algorithm? * [ ] Documentation change?
def perfect_cube(n: int) -> bool: """ Check if a number is a perfect cube or not. >>> perfect_cube(27) True >>> perfect_cube(4) False """ val = n ** (1 / 3) return (val * val * val) == n if __name__ == "__main__": print(perfect_cube(27)) print(perfect_cube(4))
def perfect_cube(n: int) -> bool: """ Check if a number is a perfect cube or not. >>> perfect_cube(27) True >>> perfect_cube(4) False """ val = n ** (1 / 3) return (val * val * val) == n if __name__ == "__main__": print(perfect_cube(27)) print(perfect_cube(4))
-1
TheAlgorithms/Python
9,076
Fix typos
* [ ] Add an algorithm? * [x] Fix a bug or typo in an existing algorithm? * [ ] Documentation change?
omahs
"2023-09-22T13:22:15Z"
"2023-09-23T08:53:09Z"
dc50add8a78ebf34bc7bb050c1a0e61d207b9544
b203150ac481743a6d8c1ef01091712a54dfbf6c
Fix typos. * [ ] Add an algorithm? * [x] Fix a bug or typo in an existing algorithm? * [ ] Documentation change?
""" Project Euler Problem 493: https://projecteuler.net/problem=493 70 coloured balls are placed in an urn, 10 for each of the seven rainbow colours. What is the expected number of distinct colours in 20 randomly picked balls? Give your answer with nine digits after the decimal point (a.bcdefghij). ----- This combinatorial problem can be solved by decomposing the problem into the following steps: 1. Calculate the total number of possible picking cominations [combinations := binom_coeff(70, 20)] 2. Calculate the number of combinations with one colour missing [missing := binom_coeff(60, 20)] 3. Calculate the probability of one colour missing [missing_prob := missing / combinations] 4. Calculate the probability of no colour missing [no_missing_prob := 1 - missing_prob] 5. Calculate the expected number of distinct colours [expected = 7 * no_missing_prob] References: - https://en.wikipedia.org/wiki/Binomial_coefficient """ import math BALLS_PER_COLOUR = 10 NUM_COLOURS = 7 NUM_BALLS = BALLS_PER_COLOUR * NUM_COLOURS def solution(num_picks: int = 20) -> str: """ Calculates the expected number of distinct colours >>> solution(10) '5.669644129' >>> solution(30) '6.985042712' """ total = math.comb(NUM_BALLS, num_picks) missing_colour = math.comb(NUM_BALLS - BALLS_PER_COLOUR, num_picks) result = NUM_COLOURS * (1 - missing_colour / total) return f"{result:.9f}" if __name__ == "__main__": print(solution(20))
""" Project Euler Problem 493: https://projecteuler.net/problem=493 70 coloured balls are placed in an urn, 10 for each of the seven rainbow colours. What is the expected number of distinct colours in 20 randomly picked balls? Give your answer with nine digits after the decimal point (a.bcdefghij). ----- This combinatorial problem can be solved by decomposing the problem into the following steps: 1. Calculate the total number of possible picking cominations [combinations := binom_coeff(70, 20)] 2. Calculate the number of combinations with one colour missing [missing := binom_coeff(60, 20)] 3. Calculate the probability of one colour missing [missing_prob := missing / combinations] 4. Calculate the probability of no colour missing [no_missing_prob := 1 - missing_prob] 5. Calculate the expected number of distinct colours [expected = 7 * no_missing_prob] References: - https://en.wikipedia.org/wiki/Binomial_coefficient """ import math BALLS_PER_COLOUR = 10 NUM_COLOURS = 7 NUM_BALLS = BALLS_PER_COLOUR * NUM_COLOURS def solution(num_picks: int = 20) -> str: """ Calculates the expected number of distinct colours >>> solution(10) '5.669644129' >>> solution(30) '6.985042712' """ total = math.comb(NUM_BALLS, num_picks) missing_colour = math.comb(NUM_BALLS - BALLS_PER_COLOUR, num_picks) result = NUM_COLOURS * (1 - missing_colour / total) return f"{result:.9f}" if __name__ == "__main__": print(solution(20))
-1
TheAlgorithms/Python
9,076
Fix typos
* [ ] Add an algorithm? * [x] Fix a bug or typo in an existing algorithm? * [ ] Documentation change?
omahs
"2023-09-22T13:22:15Z"
"2023-09-23T08:53:09Z"
dc50add8a78ebf34bc7bb050c1a0e61d207b9544
b203150ac481743a6d8c1ef01091712a54dfbf6c
Fix typos. * [ ] Add an algorithm? * [x] Fix a bug or typo in an existing algorithm? * [ ] Documentation change?
total_users,total_events,days 18231,0.0,1 22621,1.0,2 15675,0.0,3 23583,1.0,4 68351,5.0,5 34338,3.0,6 19238,0.0,0 24192,0.0,1 70349,0.0,2 103510,0.0,3 128355,1.0,4 148484,6.0,5 153489,3.0,6 162667,1.0,0 311430,3.0,1 435663,7.0,2 273526,0.0,3 628588,2.0,4 454989,13.0,5 539040,3.0,6 52974,1.0,0 103451,2.0,1 810020,5.0,2 580982,3.0,3 216515,0.0,4 134694,10.0,5 93563,1.0,6 55432,1.0,0 169634,1.0,1 254908,4.0,2 315285,3.0,3 191764,0.0,4 514284,7.0,5 181214,4.0,6 78459,2.0,0 161620,3.0,1 245610,4.0,2 326722,5.0,3 214578,0.0,4 312365,5.0,5 232454,4.0,6 178368,1.0,0 97152,1.0,1 222813,4.0,2 285852,4.0,3 192149,1.0,4 142241,1.0,5 173011,2.0,6 56488,3.0,0 89572,2.0,1 356082,2.0,2 172799,0.0,3 142300,1.0,4 78432,2.0,5 539023,9.0,6 62389,1.0,0 70247,1.0,1 89229,0.0,2 94583,1.0,3 102455,0.0,4 129270,0.0,5 311409,1.0,6 1837026,0.0,0 361824,0.0,1 111379,2.0,2 76337,2.0,3 96747,0.0,4 92058,0.0,5 81929,2.0,6 143423,0.0,0 82939,0.0,1 74403,1.0,2 68234,0.0,3 94556,1.0,4 80311,0.0,5 75283,3.0,6 77724,0.0,0 49229,2.0,1 65708,2.0,2 273864,1.0,3 1711281,0.0,4 1900253,5.0,5 343071,1.0,6 1551326,0.0,0 56636,1.0,1 272782,2.0,2 1785678,0.0,3 241866,0.0,4 461904,0.0,5 2191901,2.0,6 102925,0.0,0 242778,1.0,1 298608,0.0,2 322458,10.0,3 216027,9.0,4 916052,12.0,5 193278,12.0,6 263207,8.0,0 672948,10.0,1 281909,1.0,2 384562,1.0,3 1027375,2.0,4 828905,9.0,5 624188,22.0,6 392218,8.0,0 292581,10.0,1 299869,12.0,2 769455,20.0,3 316443,8.0,4 1212864,24.0,5 1397338,28.0,6 223249,8.0,0 191264,14.0,1
total_users,total_events,days 18231,0.0,1 22621,1.0,2 15675,0.0,3 23583,1.0,4 68351,5.0,5 34338,3.0,6 19238,0.0,0 24192,0.0,1 70349,0.0,2 103510,0.0,3 128355,1.0,4 148484,6.0,5 153489,3.0,6 162667,1.0,0 311430,3.0,1 435663,7.0,2 273526,0.0,3 628588,2.0,4 454989,13.0,5 539040,3.0,6 52974,1.0,0 103451,2.0,1 810020,5.0,2 580982,3.0,3 216515,0.0,4 134694,10.0,5 93563,1.0,6 55432,1.0,0 169634,1.0,1 254908,4.0,2 315285,3.0,3 191764,0.0,4 514284,7.0,5 181214,4.0,6 78459,2.0,0 161620,3.0,1 245610,4.0,2 326722,5.0,3 214578,0.0,4 312365,5.0,5 232454,4.0,6 178368,1.0,0 97152,1.0,1 222813,4.0,2 285852,4.0,3 192149,1.0,4 142241,1.0,5 173011,2.0,6 56488,3.0,0 89572,2.0,1 356082,2.0,2 172799,0.0,3 142300,1.0,4 78432,2.0,5 539023,9.0,6 62389,1.0,0 70247,1.0,1 89229,0.0,2 94583,1.0,3 102455,0.0,4 129270,0.0,5 311409,1.0,6 1837026,0.0,0 361824,0.0,1 111379,2.0,2 76337,2.0,3 96747,0.0,4 92058,0.0,5 81929,2.0,6 143423,0.0,0 82939,0.0,1 74403,1.0,2 68234,0.0,3 94556,1.0,4 80311,0.0,5 75283,3.0,6 77724,0.0,0 49229,2.0,1 65708,2.0,2 273864,1.0,3 1711281,0.0,4 1900253,5.0,5 343071,1.0,6 1551326,0.0,0 56636,1.0,1 272782,2.0,2 1785678,0.0,3 241866,0.0,4 461904,0.0,5 2191901,2.0,6 102925,0.0,0 242778,1.0,1 298608,0.0,2 322458,10.0,3 216027,9.0,4 916052,12.0,5 193278,12.0,6 263207,8.0,0 672948,10.0,1 281909,1.0,2 384562,1.0,3 1027375,2.0,4 828905,9.0,5 624188,22.0,6 392218,8.0,0 292581,10.0,1 299869,12.0,2 769455,20.0,3 316443,8.0,4 1212864,24.0,5 1397338,28.0,6 223249,8.0,0 191264,14.0,1
-1
TheAlgorithms/Python
9,076
Fix typos
* [ ] Add an algorithm? * [x] Fix a bug or typo in an existing algorithm? * [ ] Documentation change?
omahs
"2023-09-22T13:22:15Z"
"2023-09-23T08:53:09Z"
dc50add8a78ebf34bc7bb050c1a0e61d207b9544
b203150ac481743a6d8c1ef01091712a54dfbf6c
Fix typos. * [ ] Add an algorithm? * [x] Fix a bug or typo in an existing algorithm? * [ ] Documentation change?
from __future__ import annotations class Node: """ A Node has data variable and pointers to Nodes to its left and right. """ def __init__(self, data: int) -> None: self.data = data self.left: Node | None = None self.right: Node | None = None def display(tree: Node | None) -> None: # In Order traversal of the tree """ >>> root = Node(1) >>> root.left = Node(0) >>> root.right = Node(2) >>> display(root) 0 1 2 >>> display(root.right) 2 """ if tree: display(tree.left) print(tree.data) display(tree.right) def depth_of_tree(tree: Node | None) -> int: """ Recursive function that returns the depth of a binary tree. >>> root = Node(0) >>> depth_of_tree(root) 1 >>> root.left = Node(0) >>> depth_of_tree(root) 2 >>> root.right = Node(0) >>> depth_of_tree(root) 2 >>> root.left.right = Node(0) >>> depth_of_tree(root) 3 >>> depth_of_tree(root.left) 2 """ return 1 + max(depth_of_tree(tree.left), depth_of_tree(tree.right)) if tree else 0 def is_full_binary_tree(tree: Node) -> bool: """ Returns True if this is a full binary tree >>> root = Node(0) >>> is_full_binary_tree(root) True >>> root.left = Node(0) >>> is_full_binary_tree(root) False >>> root.right = Node(0) >>> is_full_binary_tree(root) True >>> root.left.left = Node(0) >>> is_full_binary_tree(root) False >>> root.right.right = Node(0) >>> is_full_binary_tree(root) False """ if not tree: return True if tree.left and tree.right: return is_full_binary_tree(tree.left) and is_full_binary_tree(tree.right) else: return not tree.left and not tree.right def main() -> None: # Main function for testing. tree = Node(1) tree.left = Node(2) tree.right = Node(3) tree.left.left = Node(4) tree.left.right = Node(5) tree.left.right.left = Node(6) tree.right.left = Node(7) tree.right.left.left = Node(8) tree.right.left.left.right = Node(9) print(is_full_binary_tree(tree)) print(depth_of_tree(tree)) print("Tree is: ") display(tree) if __name__ == "__main__": main()
from __future__ import annotations class Node: """ A Node has data variable and pointers to Nodes to its left and right. """ def __init__(self, data: int) -> None: self.data = data self.left: Node | None = None self.right: Node | None = None def display(tree: Node | None) -> None: # In Order traversal of the tree """ >>> root = Node(1) >>> root.left = Node(0) >>> root.right = Node(2) >>> display(root) 0 1 2 >>> display(root.right) 2 """ if tree: display(tree.left) print(tree.data) display(tree.right) def depth_of_tree(tree: Node | None) -> int: """ Recursive function that returns the depth of a binary tree. >>> root = Node(0) >>> depth_of_tree(root) 1 >>> root.left = Node(0) >>> depth_of_tree(root) 2 >>> root.right = Node(0) >>> depth_of_tree(root) 2 >>> root.left.right = Node(0) >>> depth_of_tree(root) 3 >>> depth_of_tree(root.left) 2 """ return 1 + max(depth_of_tree(tree.left), depth_of_tree(tree.right)) if tree else 0 def is_full_binary_tree(tree: Node) -> bool: """ Returns True if this is a full binary tree >>> root = Node(0) >>> is_full_binary_tree(root) True >>> root.left = Node(0) >>> is_full_binary_tree(root) False >>> root.right = Node(0) >>> is_full_binary_tree(root) True >>> root.left.left = Node(0) >>> is_full_binary_tree(root) False >>> root.right.right = Node(0) >>> is_full_binary_tree(root) False """ if not tree: return True if tree.left and tree.right: return is_full_binary_tree(tree.left) and is_full_binary_tree(tree.right) else: return not tree.left and not tree.right def main() -> None: # Main function for testing. tree = Node(1) tree.left = Node(2) tree.right = Node(3) tree.left.left = Node(4) tree.left.right = Node(5) tree.left.right.left = Node(6) tree.right.left = Node(7) tree.right.left.left = Node(8) tree.right.left.left.right = Node(9) print(is_full_binary_tree(tree)) print(depth_of_tree(tree)) print("Tree is: ") display(tree) if __name__ == "__main__": main()
-1
TheAlgorithms/Python
9,076
Fix typos
* [ ] Add an algorithm? * [x] Fix a bug or typo in an existing algorithm? * [ ] Documentation change?
omahs
"2023-09-22T13:22:15Z"
"2023-09-23T08:53:09Z"
dc50add8a78ebf34bc7bb050c1a0e61d207b9544
b203150ac481743a6d8c1ef01091712a54dfbf6c
Fix typos. * [ ] Add an algorithm? * [x] Fix a bug or typo in an existing algorithm? * [ ] Documentation change?
def bin_exp_mod(a, n, b): """ >>> bin_exp_mod(3, 4, 5) 1 >>> bin_exp_mod(7, 13, 10) 7 """ # mod b assert b != 0, "This cannot accept modulo that is == 0" if n == 0: return 1 if n % 2 == 1: return (bin_exp_mod(a, n - 1, b) * a) % b r = bin_exp_mod(a, n / 2, b) return (r * r) % b if __name__ == "__main__": try: BASE = int(input("Enter Base : ").strip()) POWER = int(input("Enter Power : ").strip()) MODULO = int(input("Enter Modulo : ").strip()) except ValueError: print("Invalid literal for integer") print(bin_exp_mod(BASE, POWER, MODULO))
def bin_exp_mod(a, n, b): """ >>> bin_exp_mod(3, 4, 5) 1 >>> bin_exp_mod(7, 13, 10) 7 """ # mod b assert b != 0, "This cannot accept modulo that is == 0" if n == 0: return 1 if n % 2 == 1: return (bin_exp_mod(a, n - 1, b) * a) % b r = bin_exp_mod(a, n / 2, b) return (r * r) % b if __name__ == "__main__": try: BASE = int(input("Enter Base : ").strip()) POWER = int(input("Enter Power : ").strip()) MODULO = int(input("Enter Modulo : ").strip()) except ValueError: print("Invalid literal for integer") print(bin_exp_mod(BASE, POWER, MODULO))
-1
TheAlgorithms/Python
9,076
Fix typos
* [ ] Add an algorithm? * [x] Fix a bug or typo in an existing algorithm? * [ ] Documentation change?
omahs
"2023-09-22T13:22:15Z"
"2023-09-23T08:53:09Z"
dc50add8a78ebf34bc7bb050c1a0e61d207b9544
b203150ac481743a6d8c1ef01091712a54dfbf6c
Fix typos. * [ ] Add an algorithm? * [x] Fix a bug or typo in an existing algorithm? * [ ] Documentation change?
""" A perfect number is a number for which the sum of its proper divisors is exactly equal to the number. For example, the sum of the proper divisors of 28 would be 1 + 2 + 4 + 7 + 14 = 28, which means that 28 is a perfect number. A number n is called deficient if the sum of its proper divisors is less than n and it is called abundant if this sum exceeds n. As 12 is the smallest abundant number, 1 + 2 + 3 + 4 + 6 = 16, the smallest number that can be written as the sum of two abundant numbers is 24. By mathematical analysis, it can be shown that all integers greater than 28123 can be written as the sum of two abundant numbers. However, this upper limit cannot be reduced any further by analysis even though it is known that the greatest number that cannot be expressed as the sum of two abundant numbers is less than this limit. Find the sum of all the positive integers which cannot be written as the sum of two abundant numbers. """ def solution(limit=28123): """ Finds the sum of all the positive integers which cannot be written as the sum of two abundant numbers as described by the statement above. >>> solution() 4179871 """ sum_divs = [1] * (limit + 1) for i in range(2, int(limit**0.5) + 1): sum_divs[i * i] += i for k in range(i + 1, limit // i + 1): sum_divs[k * i] += k + i abundants = set() res = 0 for n in range(1, limit + 1): if sum_divs[n] > n: abundants.add(n) if not any((n - a in abundants) for a in abundants): res += n return res if __name__ == "__main__": print(solution())
""" A perfect number is a number for which the sum of its proper divisors is exactly equal to the number. For example, the sum of the proper divisors of 28 would be 1 + 2 + 4 + 7 + 14 = 28, which means that 28 is a perfect number. A number n is called deficient if the sum of its proper divisors is less than n and it is called abundant if this sum exceeds n. As 12 is the smallest abundant number, 1 + 2 + 3 + 4 + 6 = 16, the smallest number that can be written as the sum of two abundant numbers is 24. By mathematical analysis, it can be shown that all integers greater than 28123 can be written as the sum of two abundant numbers. However, this upper limit cannot be reduced any further by analysis even though it is known that the greatest number that cannot be expressed as the sum of two abundant numbers is less than this limit. Find the sum of all the positive integers which cannot be written as the sum of two abundant numbers. """ def solution(limit=28123): """ Finds the sum of all the positive integers which cannot be written as the sum of two abundant numbers as described by the statement above. >>> solution() 4179871 """ sum_divs = [1] * (limit + 1) for i in range(2, int(limit**0.5) + 1): sum_divs[i * i] += i for k in range(i + 1, limit // i + 1): sum_divs[k * i] += k + i abundants = set() res = 0 for n in range(1, limit + 1): if sum_divs[n] > n: abundants.add(n) if not any((n - a in abundants) for a in abundants): res += n return res if __name__ == "__main__": print(solution())
-1
TheAlgorithms/Python
9,076
Fix typos
* [ ] Add an algorithm? * [x] Fix a bug or typo in an existing algorithm? * [ ] Documentation change?
omahs
"2023-09-22T13:22:15Z"
"2023-09-23T08:53:09Z"
dc50add8a78ebf34bc7bb050c1a0e61d207b9544
b203150ac481743a6d8c1ef01091712a54dfbf6c
Fix typos. * [ ] Add an algorithm? * [x] Fix a bug or typo in an existing algorithm? * [ ] Documentation change?
from math import factorial """ https://en.wikipedia.org/wiki/Automatic_differentiation#Automatic_differentiation_using_dual_numbers https://blog.jliszka.org/2013/10/24/exact-numeric-nth-derivatives.html Note this only works for basic functions, f(x) where the power of x is positive. """ class Dual: def __init__(self, real, rank): self.real = real if isinstance(rank, int): self.duals = [1] * rank else: self.duals = rank def __repr__(self): return ( f"{self.real}+" f"{'+'.join(str(dual)+'E'+str(n+1)for n,dual in enumerate(self.duals))}" ) def reduce(self): cur = self.duals.copy() while cur[-1] == 0: cur.pop(-1) return Dual(self.real, cur) def __add__(self, other): if not isinstance(other, Dual): return Dual(self.real + other, self.duals) s_dual = self.duals.copy() o_dual = other.duals.copy() if len(s_dual) > len(o_dual): o_dual.extend([1] * (len(s_dual) - len(o_dual))) elif len(s_dual) < len(o_dual): s_dual.extend([1] * (len(o_dual) - len(s_dual))) new_duals = [] for i in range(len(s_dual)): new_duals.append(s_dual[i] + o_dual[i]) return Dual(self.real + other.real, new_duals) __radd__ = __add__ def __sub__(self, other): return self + other * -1 def __mul__(self, other): if not isinstance(other, Dual): new_duals = [] for i in self.duals: new_duals.append(i * other) return Dual(self.real * other, new_duals) new_duals = [0] * (len(self.duals) + len(other.duals) + 1) for i, item in enumerate(self.duals): for j, jtem in enumerate(other.duals): new_duals[i + j + 1] += item * jtem for k in range(len(self.duals)): new_duals[k] += self.duals[k] * other.real for index in range(len(other.duals)): new_duals[index] += other.duals[index] * self.real return Dual(self.real * other.real, new_duals) __rmul__ = __mul__ def __truediv__(self, other): if not isinstance(other, Dual): new_duals = [] for i in self.duals: new_duals.append(i / other) return Dual(self.real / other, new_duals) raise ValueError def __floordiv__(self, other): if not isinstance(other, Dual): new_duals = [] for i in self.duals: new_duals.append(i // other) return Dual(self.real // other, new_duals) raise ValueError def __pow__(self, n): if n < 0 or isinstance(n, float): raise ValueError("power must be a positive integer") if n == 0: return 1 if n == 1: return self x = self for _ in range(n - 1): x *= self return x def differentiate(func, position, order): """ >>> differentiate(lambda x: x**2, 2, 2) 2 >>> differentiate(lambda x: x**2 * x**4, 9, 2) 196830 >>> differentiate(lambda y: 0.5 * (y + 3) ** 6, 3.5, 4) 7605.0 >>> differentiate(lambda y: y ** 2, 4, 3) 0 >>> differentiate(8, 8, 8) Traceback (most recent call last): ... ValueError: differentiate() requires a function as input for func >>> differentiate(lambda x: x **2, "", 1) Traceback (most recent call last): ... ValueError: differentiate() requires a float as input for position >>> differentiate(lambda x: x**2, 3, "") Traceback (most recent call last): ... ValueError: differentiate() requires an int as input for order """ if not callable(func): raise ValueError("differentiate() requires a function as input for func") if not isinstance(position, (float, int)): raise ValueError("differentiate() requires a float as input for position") if not isinstance(order, int): raise ValueError("differentiate() requires an int as input for order") d = Dual(position, 1) result = func(d) if order == 0: return result.real return result.duals[order - 1] * factorial(order) if __name__ == "__main__": import doctest doctest.testmod() def f(y): return y**2 * y**4 print(differentiate(f, 9, 2))
from math import factorial """ https://en.wikipedia.org/wiki/Automatic_differentiation#Automatic_differentiation_using_dual_numbers https://blog.jliszka.org/2013/10/24/exact-numeric-nth-derivatives.html Note this only works for basic functions, f(x) where the power of x is positive. """ class Dual: def __init__(self, real, rank): self.real = real if isinstance(rank, int): self.duals = [1] * rank else: self.duals = rank def __repr__(self): return ( f"{self.real}+" f"{'+'.join(str(dual)+'E'+str(n+1)for n,dual in enumerate(self.duals))}" ) def reduce(self): cur = self.duals.copy() while cur[-1] == 0: cur.pop(-1) return Dual(self.real, cur) def __add__(self, other): if not isinstance(other, Dual): return Dual(self.real + other, self.duals) s_dual = self.duals.copy() o_dual = other.duals.copy() if len(s_dual) > len(o_dual): o_dual.extend([1] * (len(s_dual) - len(o_dual))) elif len(s_dual) < len(o_dual): s_dual.extend([1] * (len(o_dual) - len(s_dual))) new_duals = [] for i in range(len(s_dual)): new_duals.append(s_dual[i] + o_dual[i]) return Dual(self.real + other.real, new_duals) __radd__ = __add__ def __sub__(self, other): return self + other * -1 def __mul__(self, other): if not isinstance(other, Dual): new_duals = [] for i in self.duals: new_duals.append(i * other) return Dual(self.real * other, new_duals) new_duals = [0] * (len(self.duals) + len(other.duals) + 1) for i, item in enumerate(self.duals): for j, jtem in enumerate(other.duals): new_duals[i + j + 1] += item * jtem for k in range(len(self.duals)): new_duals[k] += self.duals[k] * other.real for index in range(len(other.duals)): new_duals[index] += other.duals[index] * self.real return Dual(self.real * other.real, new_duals) __rmul__ = __mul__ def __truediv__(self, other): if not isinstance(other, Dual): new_duals = [] for i in self.duals: new_duals.append(i / other) return Dual(self.real / other, new_duals) raise ValueError def __floordiv__(self, other): if not isinstance(other, Dual): new_duals = [] for i in self.duals: new_duals.append(i // other) return Dual(self.real // other, new_duals) raise ValueError def __pow__(self, n): if n < 0 or isinstance(n, float): raise ValueError("power must be a positive integer") if n == 0: return 1 if n == 1: return self x = self for _ in range(n - 1): x *= self return x def differentiate(func, position, order): """ >>> differentiate(lambda x: x**2, 2, 2) 2 >>> differentiate(lambda x: x**2 * x**4, 9, 2) 196830 >>> differentiate(lambda y: 0.5 * (y + 3) ** 6, 3.5, 4) 7605.0 >>> differentiate(lambda y: y ** 2, 4, 3) 0 >>> differentiate(8, 8, 8) Traceback (most recent call last): ... ValueError: differentiate() requires a function as input for func >>> differentiate(lambda x: x **2, "", 1) Traceback (most recent call last): ... ValueError: differentiate() requires a float as input for position >>> differentiate(lambda x: x**2, 3, "") Traceback (most recent call last): ... ValueError: differentiate() requires an int as input for order """ if not callable(func): raise ValueError("differentiate() requires a function as input for func") if not isinstance(position, (float, int)): raise ValueError("differentiate() requires a float as input for position") if not isinstance(order, int): raise ValueError("differentiate() requires an int as input for order") d = Dual(position, 1) result = func(d) if order == 0: return result.real return result.duals[order - 1] * factorial(order) if __name__ == "__main__": import doctest doctest.testmod() def f(y): return y**2 * y**4 print(differentiate(f, 9, 2))
-1
TheAlgorithms/Python
9,076
Fix typos
* [ ] Add an algorithm? * [x] Fix a bug or typo in an existing algorithm? * [ ] Documentation change?
omahs
"2023-09-22T13:22:15Z"
"2023-09-23T08:53:09Z"
dc50add8a78ebf34bc7bb050c1a0e61d207b9544
b203150ac481743a6d8c1ef01091712a54dfbf6c
Fix typos. * [ ] Add an algorithm? * [x] Fix a bug or typo in an existing algorithm? * [ ] Documentation change?
""" Project Euler Problem 234: https://projecteuler.net/problem=234 For any integer n, consider the three functions f1,n(x,y,z) = x^(n+1) + y^(n+1) - z^(n+1) f2,n(x,y,z) = (xy + yz + zx)*(x^(n-1) + y^(n-1) - z^(n-1)) f3,n(x,y,z) = xyz*(xn-2 + yn-2 - zn-2) and their combination fn(x,y,z) = f1,n(x,y,z) + f2,n(x,y,z) - f3,n(x,y,z) We call (x,y,z) a golden triple of order k if x, y, and z are all rational numbers of the form a / b with 0 < a < b ≤ k and there is (at least) one integer n, so that fn(x,y,z) = 0. Let s(x,y,z) = x + y + z. Let t = u / v be the sum of all distinct s(x,y,z) for all golden triples (x,y,z) of order 35. All the s(x,y,z) and t must be in reduced form. Find u + v. Solution: By expanding the brackets it is easy to show that fn(x, y, z) = (x + y + z) * (x^n + y^n - z^n). Since x,y,z are positive, the requirement fn(x, y, z) = 0 is fulfilled if and only if x^n + y^n = z^n. By Fermat's Last Theorem, this means that the absolute value of n can not exceed 2, i.e. n is in {-2, -1, 0, 1, 2}. We can eliminate n = 0 since then the equation would reduce to 1 + 1 = 1, for which there are no solutions. So all we have to do is iterate through the possible numerators and denominators of x and y, calculate the corresponding z, and check if the corresponding numerator and denominator are integer and satisfy 0 < z_num < z_den <= 0. We use a set "uniquq_s" to make sure there are no duplicates, and the fractions.Fraction class to make sure we get the right numerator and denominator. Reference: https://en.wikipedia.org/wiki/Fermat%27s_Last_Theorem """ from __future__ import annotations from fractions import Fraction from math import gcd, sqrt def is_sq(number: int) -> bool: """ Check if number is a perfect square. >>> is_sq(1) True >>> is_sq(1000001) False >>> is_sq(1000000) True """ sq: int = int(number**0.5) return number == sq * sq def add_three( x_num: int, x_den: int, y_num: int, y_den: int, z_num: int, z_den: int ) -> tuple[int, int]: """ Given the numerators and denominators of three fractions, return the numerator and denominator of their sum in lowest form. >>> add_three(1, 3, 1, 3, 1, 3) (1, 1) >>> add_three(2, 5, 4, 11, 12, 3) (262, 55) """ top: int = x_num * y_den * z_den + y_num * x_den * z_den + z_num * x_den * y_den bottom: int = x_den * y_den * z_den hcf: int = gcd(top, bottom) top //= hcf bottom //= hcf return top, bottom def solution(order: int = 35) -> int: """ Find the sum of the numerator and denominator of the sum of all s(x,y,z) for golden triples (x,y,z) of the given order. >>> solution(5) 296 >>> solution(10) 12519 >>> solution(20) 19408891927 """ unique_s: set = set() hcf: int total: Fraction = Fraction(0) fraction_sum: tuple[int, int] for x_num in range(1, order + 1): for x_den in range(x_num + 1, order + 1): for y_num in range(1, order + 1): for y_den in range(y_num + 1, order + 1): # n=1 z_num = x_num * y_den + x_den * y_num z_den = x_den * y_den hcf = gcd(z_num, z_den) z_num //= hcf z_den //= hcf if 0 < z_num < z_den <= order: fraction_sum = add_three( x_num, x_den, y_num, y_den, z_num, z_den ) unique_s.add(fraction_sum) # n=2 z_num = ( x_num * x_num * y_den * y_den + x_den * x_den * y_num * y_num ) z_den = x_den * x_den * y_den * y_den if is_sq(z_num) and is_sq(z_den): z_num = int(sqrt(z_num)) z_den = int(sqrt(z_den)) hcf = gcd(z_num, z_den) z_num //= hcf z_den //= hcf if 0 < z_num < z_den <= order: fraction_sum = add_three( x_num, x_den, y_num, y_den, z_num, z_den ) unique_s.add(fraction_sum) # n=-1 z_num = x_num * y_num z_den = x_den * y_num + x_num * y_den hcf = gcd(z_num, z_den) z_num //= hcf z_den //= hcf if 0 < z_num < z_den <= order: fraction_sum = add_three( x_num, x_den, y_num, y_den, z_num, z_den ) unique_s.add(fraction_sum) # n=2 z_num = x_num * x_num * y_num * y_num z_den = ( x_den * x_den * y_num * y_num + x_num * x_num * y_den * y_den ) if is_sq(z_num) and is_sq(z_den): z_num = int(sqrt(z_num)) z_den = int(sqrt(z_den)) hcf = gcd(z_num, z_den) z_num //= hcf z_den //= hcf if 0 < z_num < z_den <= order: fraction_sum = add_three( x_num, x_den, y_num, y_den, z_num, z_den ) unique_s.add(fraction_sum) for num, den in unique_s: total += Fraction(num, den) return total.denominator + total.numerator if __name__ == "__main__": print(f"{solution() = }")
""" Project Euler Problem 234: https://projecteuler.net/problem=234 For any integer n, consider the three functions f1,n(x,y,z) = x^(n+1) + y^(n+1) - z^(n+1) f2,n(x,y,z) = (xy + yz + zx)*(x^(n-1) + y^(n-1) - z^(n-1)) f3,n(x,y,z) = xyz*(xn-2 + yn-2 - zn-2) and their combination fn(x,y,z) = f1,n(x,y,z) + f2,n(x,y,z) - f3,n(x,y,z) We call (x,y,z) a golden triple of order k if x, y, and z are all rational numbers of the form a / b with 0 < a < b ≤ k and there is (at least) one integer n, so that fn(x,y,z) = 0. Let s(x,y,z) = x + y + z. Let t = u / v be the sum of all distinct s(x,y,z) for all golden triples (x,y,z) of order 35. All the s(x,y,z) and t must be in reduced form. Find u + v. Solution: By expanding the brackets it is easy to show that fn(x, y, z) = (x + y + z) * (x^n + y^n - z^n). Since x,y,z are positive, the requirement fn(x, y, z) = 0 is fulfilled if and only if x^n + y^n = z^n. By Fermat's Last Theorem, this means that the absolute value of n can not exceed 2, i.e. n is in {-2, -1, 0, 1, 2}. We can eliminate n = 0 since then the equation would reduce to 1 + 1 = 1, for which there are no solutions. So all we have to do is iterate through the possible numerators and denominators of x and y, calculate the corresponding z, and check if the corresponding numerator and denominator are integer and satisfy 0 < z_num < z_den <= 0. We use a set "uniquq_s" to make sure there are no duplicates, and the fractions.Fraction class to make sure we get the right numerator and denominator. Reference: https://en.wikipedia.org/wiki/Fermat%27s_Last_Theorem """ from __future__ import annotations from fractions import Fraction from math import gcd, sqrt def is_sq(number: int) -> bool: """ Check if number is a perfect square. >>> is_sq(1) True >>> is_sq(1000001) False >>> is_sq(1000000) True """ sq: int = int(number**0.5) return number == sq * sq def add_three( x_num: int, x_den: int, y_num: int, y_den: int, z_num: int, z_den: int ) -> tuple[int, int]: """ Given the numerators and denominators of three fractions, return the numerator and denominator of their sum in lowest form. >>> add_three(1, 3, 1, 3, 1, 3) (1, 1) >>> add_three(2, 5, 4, 11, 12, 3) (262, 55) """ top: int = x_num * y_den * z_den + y_num * x_den * z_den + z_num * x_den * y_den bottom: int = x_den * y_den * z_den hcf: int = gcd(top, bottom) top //= hcf bottom //= hcf return top, bottom def solution(order: int = 35) -> int: """ Find the sum of the numerator and denominator of the sum of all s(x,y,z) for golden triples (x,y,z) of the given order. >>> solution(5) 296 >>> solution(10) 12519 >>> solution(20) 19408891927 """ unique_s: set = set() hcf: int total: Fraction = Fraction(0) fraction_sum: tuple[int, int] for x_num in range(1, order + 1): for x_den in range(x_num + 1, order + 1): for y_num in range(1, order + 1): for y_den in range(y_num + 1, order + 1): # n=1 z_num = x_num * y_den + x_den * y_num z_den = x_den * y_den hcf = gcd(z_num, z_den) z_num //= hcf z_den //= hcf if 0 < z_num < z_den <= order: fraction_sum = add_three( x_num, x_den, y_num, y_den, z_num, z_den ) unique_s.add(fraction_sum) # n=2 z_num = ( x_num * x_num * y_den * y_den + x_den * x_den * y_num * y_num ) z_den = x_den * x_den * y_den * y_den if is_sq(z_num) and is_sq(z_den): z_num = int(sqrt(z_num)) z_den = int(sqrt(z_den)) hcf = gcd(z_num, z_den) z_num //= hcf z_den //= hcf if 0 < z_num < z_den <= order: fraction_sum = add_three( x_num, x_den, y_num, y_den, z_num, z_den ) unique_s.add(fraction_sum) # n=-1 z_num = x_num * y_num z_den = x_den * y_num + x_num * y_den hcf = gcd(z_num, z_den) z_num //= hcf z_den //= hcf if 0 < z_num < z_den <= order: fraction_sum = add_three( x_num, x_den, y_num, y_den, z_num, z_den ) unique_s.add(fraction_sum) # n=2 z_num = x_num * x_num * y_num * y_num z_den = ( x_den * x_den * y_num * y_num + x_num * x_num * y_den * y_den ) if is_sq(z_num) and is_sq(z_den): z_num = int(sqrt(z_num)) z_den = int(sqrt(z_den)) hcf = gcd(z_num, z_den) z_num //= hcf z_den //= hcf if 0 < z_num < z_den <= order: fraction_sum = add_three( x_num, x_den, y_num, y_den, z_num, z_den ) unique_s.add(fraction_sum) for num, den in unique_s: total += Fraction(num, den) return total.denominator + total.numerator if __name__ == "__main__": print(f"{solution() = }")
-1
TheAlgorithms/Python
9,062
Delete empty junk file
### Describe your change: Delete empty file introduced in #9046 * [ ] Add an algorithm? * [x] Fix a bug or typo in an existing algorithm? * [ ] Documentation change? ### Checklist: * [x] I have read [CONTRIBUTING.md](https://github.com/TheAlgorithms/Python/blob/master/CONTRIBUTING.md). * [x] This pull request is all my own work -- I have not plagiarized. * [x] I know that pull requests will not be merged if they fail the automated tests. * [x] This PR only changes one algorithm file. To ease review, please open separate PRs for separate algorithms. * [ ] All new Python files are placed inside an existing directory. * [ ] All filenames are in all lowercase characters with no spaces or dashes. * [ ] All functions and variable names follow Python naming conventions. * [ ] All function parameters and return values are annotated with Python [type hints](https://docs.python.org/3/library/typing.html). * [ ] All functions have [doctests](https://docs.python.org/3/library/doctest.html) that pass the automated testing. * [ ] All new algorithms include at least one URL that points to Wikipedia or another similar explanation. * [ ] If this pull request resolves one or more open issues then the description above includes the issue number(s) with a [closing keyword](https://docs.github.com/en/issues/tracking-your-work-with-issues/linking-a-pull-request-to-an-issue): "Fixes #ISSUE-NUMBER".
tianyizheng02
"2023-09-16T18:59:42Z"
"2023-09-16T22:12:31Z"
1488cdea708485eb1d81c73126eab13cb9b04a47
fbad85d3ecbbb826a5891807c823149d38bbaed3
Delete empty junk file. ### Describe your change: Delete empty file introduced in #9046 * [ ] Add an algorithm? * [x] Fix a bug or typo in an existing algorithm? * [ ] Documentation change? ### Checklist: * [x] I have read [CONTRIBUTING.md](https://github.com/TheAlgorithms/Python/blob/master/CONTRIBUTING.md). * [x] This pull request is all my own work -- I have not plagiarized. * [x] I know that pull requests will not be merged if they fail the automated tests. * [x] This PR only changes one algorithm file. To ease review, please open separate PRs for separate algorithms. * [ ] All new Python files are placed inside an existing directory. * [ ] All filenames are in all lowercase characters with no spaces or dashes. * [ ] All functions and variable names follow Python naming conventions. * [ ] All function parameters and return values are annotated with Python [type hints](https://docs.python.org/3/library/typing.html). * [ ] All functions have [doctests](https://docs.python.org/3/library/doctest.html) that pass the automated testing. * [ ] All new algorithms include at least one URL that points to Wikipedia or another similar explanation. * [ ] If this pull request resolves one or more open issues then the description above includes the issue number(s) with a [closing keyword](https://docs.github.com/en/issues/tracking-your-work-with-issues/linking-a-pull-request-to-an-issue): "Fixes #ISSUE-NUMBER".
## Arithmetic Analysis * [Bisection](arithmetic_analysis/bisection.py) * [Gaussian Elimination](arithmetic_analysis/gaussian_elimination.py) * [In Static Equilibrium](arithmetic_analysis/in_static_equilibrium.py) * [Intersection](arithmetic_analysis/intersection.py) * [Jacobi Iteration Method](arithmetic_analysis/jacobi_iteration_method.py) * [Junk](arithmetic_analysis/junk.py) * [Lu Decomposition](arithmetic_analysis/lu_decomposition.py) * [Newton Forward Interpolation](arithmetic_analysis/newton_forward_interpolation.py) * [Newton Method](arithmetic_analysis/newton_method.py) * [Newton Raphson](arithmetic_analysis/newton_raphson.py) * [Newton Raphson New](arithmetic_analysis/newton_raphson_new.py) * [Secant Method](arithmetic_analysis/secant_method.py) ## Audio Filters * [Butterworth Filter](audio_filters/butterworth_filter.py) * [Iir Filter](audio_filters/iir_filter.py) * [Show Response](audio_filters/show_response.py) ## Backtracking * [All Combinations](backtracking/all_combinations.py) * [All Permutations](backtracking/all_permutations.py) * [All Subsequences](backtracking/all_subsequences.py) * [Coloring](backtracking/coloring.py) * [Combination Sum](backtracking/combination_sum.py) * [Hamiltonian Cycle](backtracking/hamiltonian_cycle.py) * [Knight Tour](backtracking/knight_tour.py) * [Minimax](backtracking/minimax.py) * [Minmax](backtracking/minmax.py) * [N Queens](backtracking/n_queens.py) * [N Queens Math](backtracking/n_queens_math.py) * [Power Sum](backtracking/power_sum.py) * [Rat In Maze](backtracking/rat_in_maze.py) * [Sudoku](backtracking/sudoku.py) * [Sum Of Subsets](backtracking/sum_of_subsets.py) * [Word Search](backtracking/word_search.py) ## Bit Manipulation * [Binary And Operator](bit_manipulation/binary_and_operator.py) * [Binary Count Setbits](bit_manipulation/binary_count_setbits.py) * [Binary Count Trailing Zeros](bit_manipulation/binary_count_trailing_zeros.py) * [Binary Or Operator](bit_manipulation/binary_or_operator.py) * [Binary Shifts](bit_manipulation/binary_shifts.py) * [Binary Twos Complement](bit_manipulation/binary_twos_complement.py) * [Binary Xor Operator](bit_manipulation/binary_xor_operator.py) * [Count 1S Brian Kernighan Method](bit_manipulation/count_1s_brian_kernighan_method.py) * [Count Number Of One Bits](bit_manipulation/count_number_of_one_bits.py) * [Gray Code Sequence](bit_manipulation/gray_code_sequence.py) * [Highest Set Bit](bit_manipulation/highest_set_bit.py) * [Index Of Rightmost Set Bit](bit_manipulation/index_of_rightmost_set_bit.py) * [Is Even](bit_manipulation/is_even.py) * [Is Power Of Two](bit_manipulation/is_power_of_two.py) * [Numbers Different Signs](bit_manipulation/numbers_different_signs.py) * [Reverse Bits](bit_manipulation/reverse_bits.py) * [Single Bit Manipulation Operations](bit_manipulation/single_bit_manipulation_operations.py) ## Blockchain * [Chinese Remainder Theorem](blockchain/chinese_remainder_theorem.py) * [Diophantine Equation](blockchain/diophantine_equation.py) * [Modular Division](blockchain/modular_division.py) ## Boolean Algebra * [And Gate](boolean_algebra/and_gate.py) * [Nand Gate](boolean_algebra/nand_gate.py) * [Nor Gate](boolean_algebra/nor_gate.py) * [Not Gate](boolean_algebra/not_gate.py) * [Or Gate](boolean_algebra/or_gate.py) * [Quine Mc Cluskey](boolean_algebra/quine_mc_cluskey.py) * [Xnor Gate](boolean_algebra/xnor_gate.py) * [Xor Gate](boolean_algebra/xor_gate.py) ## Cellular Automata * [Conways Game Of Life](cellular_automata/conways_game_of_life.py) * [Game Of Life](cellular_automata/game_of_life.py) * [Langtons Ant](cellular_automata/langtons_ant.py) * [Nagel Schrekenberg](cellular_automata/nagel_schrekenberg.py) * [One Dimensional](cellular_automata/one_dimensional.py) * [Wa Tor](cellular_automata/wa_tor.py) ## Ciphers * [A1Z26](ciphers/a1z26.py) * [Affine Cipher](ciphers/affine_cipher.py) * [Atbash](ciphers/atbash.py) * [Autokey](ciphers/autokey.py) * [Baconian Cipher](ciphers/baconian_cipher.py) * [Base16](ciphers/base16.py) * [Base32](ciphers/base32.py) * [Base64](ciphers/base64.py) * [Base85](ciphers/base85.py) * [Beaufort Cipher](ciphers/beaufort_cipher.py) * [Bifid](ciphers/bifid.py) * [Brute Force Caesar Cipher](ciphers/brute_force_caesar_cipher.py) * [Caesar Cipher](ciphers/caesar_cipher.py) * [Cryptomath Module](ciphers/cryptomath_module.py) * [Decrypt Caesar With Chi Squared](ciphers/decrypt_caesar_with_chi_squared.py) * [Deterministic Miller Rabin](ciphers/deterministic_miller_rabin.py) * [Diffie](ciphers/diffie.py) * [Diffie Hellman](ciphers/diffie_hellman.py) * [Elgamal Key Generator](ciphers/elgamal_key_generator.py) * [Enigma Machine2](ciphers/enigma_machine2.py) * [Hill Cipher](ciphers/hill_cipher.py) * [Mixed Keyword Cypher](ciphers/mixed_keyword_cypher.py) * [Mono Alphabetic Ciphers](ciphers/mono_alphabetic_ciphers.py) * [Morse Code](ciphers/morse_code.py) * [Onepad Cipher](ciphers/onepad_cipher.py) * [Playfair Cipher](ciphers/playfair_cipher.py) * [Polybius](ciphers/polybius.py) * [Porta Cipher](ciphers/porta_cipher.py) * [Rabin Miller](ciphers/rabin_miller.py) * [Rail Fence Cipher](ciphers/rail_fence_cipher.py) * [Rot13](ciphers/rot13.py) * [Rsa Cipher](ciphers/rsa_cipher.py) * [Rsa Factorization](ciphers/rsa_factorization.py) * [Rsa Key Generator](ciphers/rsa_key_generator.py) * [Shuffled Shift Cipher](ciphers/shuffled_shift_cipher.py) * [Simple Keyword Cypher](ciphers/simple_keyword_cypher.py) * [Simple Substitution Cipher](ciphers/simple_substitution_cipher.py) * [Trafid Cipher](ciphers/trafid_cipher.py) * [Transposition Cipher](ciphers/transposition_cipher.py) * [Transposition Cipher Encrypt Decrypt File](ciphers/transposition_cipher_encrypt_decrypt_file.py) * [Vigenere Cipher](ciphers/vigenere_cipher.py) * [Xor Cipher](ciphers/xor_cipher.py) ## Compression * [Burrows Wheeler](compression/burrows_wheeler.py) * [Huffman](compression/huffman.py) * [Lempel Ziv](compression/lempel_ziv.py) * [Lempel Ziv Decompress](compression/lempel_ziv_decompress.py) * [Lz77](compression/lz77.py) * [Peak Signal To Noise Ratio](compression/peak_signal_to_noise_ratio.py) * [Run Length Encoding](compression/run_length_encoding.py) ## Computer Vision * [Cnn Classification](computer_vision/cnn_classification.py) * [Flip Augmentation](computer_vision/flip_augmentation.py) * [Haralick Descriptors](computer_vision/haralick_descriptors.py) * [Harris Corner](computer_vision/harris_corner.py) * [Horn Schunck](computer_vision/horn_schunck.py) * [Mean Threshold](computer_vision/mean_threshold.py) * [Mosaic Augmentation](computer_vision/mosaic_augmentation.py) * [Pooling Functions](computer_vision/pooling_functions.py) ## Conversions * [Astronomical Length Scale Conversion](conversions/astronomical_length_scale_conversion.py) * [Binary To Decimal](conversions/binary_to_decimal.py) * [Binary To Hexadecimal](conversions/binary_to_hexadecimal.py) * [Binary To Octal](conversions/binary_to_octal.py) * [Convert Number To Words](conversions/convert_number_to_words.py) * [Decimal To Any](conversions/decimal_to_any.py) * [Decimal To Binary](conversions/decimal_to_binary.py) * [Decimal To Hexadecimal](conversions/decimal_to_hexadecimal.py) * [Decimal To Octal](conversions/decimal_to_octal.py) * [Energy Conversions](conversions/energy_conversions.py) * [Excel Title To Column](conversions/excel_title_to_column.py) * [Hex To Bin](conversions/hex_to_bin.py) * [Hexadecimal To Decimal](conversions/hexadecimal_to_decimal.py) * [Length Conversion](conversions/length_conversion.py) * [Molecular Chemistry](conversions/molecular_chemistry.py) * [Octal To Binary](conversions/octal_to_binary.py) * [Octal To Decimal](conversions/octal_to_decimal.py) * [Prefix Conversions](conversions/prefix_conversions.py) * [Prefix Conversions String](conversions/prefix_conversions_string.py) * [Pressure Conversions](conversions/pressure_conversions.py) * [Rgb Hsv Conversion](conversions/rgb_hsv_conversion.py) * [Roman Numerals](conversions/roman_numerals.py) * [Speed Conversions](conversions/speed_conversions.py) * [Temperature Conversions](conversions/temperature_conversions.py) * [Volume Conversions](conversions/volume_conversions.py) * [Weight Conversion](conversions/weight_conversion.py) ## Data Structures * Arrays * [Permutations](data_structures/arrays/permutations.py) * [Prefix Sum](data_structures/arrays/prefix_sum.py) * [Product Sum](data_structures/arrays/product_sum.py) * Binary Tree * [Avl Tree](data_structures/binary_tree/avl_tree.py) * [Basic Binary Tree](data_structures/binary_tree/basic_binary_tree.py) * [Binary Search Tree](data_structures/binary_tree/binary_search_tree.py) * [Binary Search Tree Recursive](data_structures/binary_tree/binary_search_tree_recursive.py) * [Binary Tree Mirror](data_structures/binary_tree/binary_tree_mirror.py) * [Binary Tree Node Sum](data_structures/binary_tree/binary_tree_node_sum.py) * [Binary Tree Path Sum](data_structures/binary_tree/binary_tree_path_sum.py) * [Binary Tree Traversals](data_structures/binary_tree/binary_tree_traversals.py) * [Diff Views Of Binary Tree](data_structures/binary_tree/diff_views_of_binary_tree.py) * [Distribute Coins](data_structures/binary_tree/distribute_coins.py) * [Fenwick Tree](data_structures/binary_tree/fenwick_tree.py) * [Inorder Tree Traversal 2022](data_structures/binary_tree/inorder_tree_traversal_2022.py) * [Is Bst](data_structures/binary_tree/is_bst.py) * [Lazy Segment Tree](data_structures/binary_tree/lazy_segment_tree.py) * [Lowest Common Ancestor](data_structures/binary_tree/lowest_common_ancestor.py) * [Maximum Fenwick Tree](data_structures/binary_tree/maximum_fenwick_tree.py) * [Merge Two Binary Trees](data_structures/binary_tree/merge_two_binary_trees.py) * [Non Recursive Segment Tree](data_structures/binary_tree/non_recursive_segment_tree.py) * [Number Of Possible Binary Trees](data_structures/binary_tree/number_of_possible_binary_trees.py) * [Red Black Tree](data_structures/binary_tree/red_black_tree.py) * [Segment Tree](data_structures/binary_tree/segment_tree.py) * [Segment Tree Other](data_structures/binary_tree/segment_tree_other.py) * [Treap](data_structures/binary_tree/treap.py) * [Wavelet Tree](data_structures/binary_tree/wavelet_tree.py) * Disjoint Set * [Alternate Disjoint Set](data_structures/disjoint_set/alternate_disjoint_set.py) * [Disjoint Set](data_structures/disjoint_set/disjoint_set.py) * Hashing * [Bloom Filter](data_structures/hashing/bloom_filter.py) * [Double Hash](data_structures/hashing/double_hash.py) * [Hash Map](data_structures/hashing/hash_map.py) * [Hash Table](data_structures/hashing/hash_table.py) * [Hash Table With Linked List](data_structures/hashing/hash_table_with_linked_list.py) * Number Theory * [Prime Numbers](data_structures/hashing/number_theory/prime_numbers.py) * [Quadratic Probing](data_structures/hashing/quadratic_probing.py) * Tests * [Test Hash Map](data_structures/hashing/tests/test_hash_map.py) * Heap * [Binomial Heap](data_structures/heap/binomial_heap.py) * [Heap](data_structures/heap/heap.py) * [Heap Generic](data_structures/heap/heap_generic.py) * [Max Heap](data_structures/heap/max_heap.py) * [Min Heap](data_structures/heap/min_heap.py) * [Randomized Heap](data_structures/heap/randomized_heap.py) * [Skew Heap](data_structures/heap/skew_heap.py) * Linked List * [Circular Linked List](data_structures/linked_list/circular_linked_list.py) * [Deque Doubly](data_structures/linked_list/deque_doubly.py) * [Doubly Linked List](data_structures/linked_list/doubly_linked_list.py) * [Doubly Linked List Two](data_structures/linked_list/doubly_linked_list_two.py) * [From Sequence](data_structures/linked_list/from_sequence.py) * [Has Loop](data_structures/linked_list/has_loop.py) * [Is Palindrome](data_structures/linked_list/is_palindrome.py) * [Merge Two Lists](data_structures/linked_list/merge_two_lists.py) * [Middle Element Of Linked List](data_structures/linked_list/middle_element_of_linked_list.py) * [Print Reverse](data_structures/linked_list/print_reverse.py) * [Singly Linked List](data_structures/linked_list/singly_linked_list.py) * [Skip List](data_structures/linked_list/skip_list.py) * [Swap Nodes](data_structures/linked_list/swap_nodes.py) * Queue * [Circular Queue](data_structures/queue/circular_queue.py) * [Circular Queue Linked List](data_structures/queue/circular_queue_linked_list.py) * [Double Ended Queue](data_structures/queue/double_ended_queue.py) * [Linked Queue](data_structures/queue/linked_queue.py) * [Priority Queue Using List](data_structures/queue/priority_queue_using_list.py) * [Queue By List](data_structures/queue/queue_by_list.py) * [Queue By Two Stacks](data_structures/queue/queue_by_two_stacks.py) * [Queue On Pseudo Stack](data_structures/queue/queue_on_pseudo_stack.py) * Stacks * [Balanced Parentheses](data_structures/stacks/balanced_parentheses.py) * [Dijkstras Two Stack Algorithm](data_structures/stacks/dijkstras_two_stack_algorithm.py) * [Infix To Postfix Conversion](data_structures/stacks/infix_to_postfix_conversion.py) * [Infix To Prefix Conversion](data_structures/stacks/infix_to_prefix_conversion.py) * [Next Greater Element](data_structures/stacks/next_greater_element.py) * [Postfix Evaluation](data_structures/stacks/postfix_evaluation.py) * [Prefix Evaluation](data_structures/stacks/prefix_evaluation.py) * [Stack](data_structures/stacks/stack.py) * [Stack With Doubly Linked List](data_structures/stacks/stack_with_doubly_linked_list.py) * [Stack With Singly Linked List](data_structures/stacks/stack_with_singly_linked_list.py) * [Stock Span Problem](data_structures/stacks/stock_span_problem.py) * Trie * [Radix Tree](data_structures/trie/radix_tree.py) * [Trie](data_structures/trie/trie.py) ## Digital Image Processing * [Change Brightness](digital_image_processing/change_brightness.py) * [Change Contrast](digital_image_processing/change_contrast.py) * [Convert To Negative](digital_image_processing/convert_to_negative.py) * Dithering * [Burkes](digital_image_processing/dithering/burkes.py) * Edge Detection * [Canny](digital_image_processing/edge_detection/canny.py) * Filters * [Bilateral Filter](digital_image_processing/filters/bilateral_filter.py) * [Convolve](digital_image_processing/filters/convolve.py) * [Gabor Filter](digital_image_processing/filters/gabor_filter.py) * [Gaussian Filter](digital_image_processing/filters/gaussian_filter.py) * [Local Binary Pattern](digital_image_processing/filters/local_binary_pattern.py) * [Median Filter](digital_image_processing/filters/median_filter.py) * [Sobel Filter](digital_image_processing/filters/sobel_filter.py) * Histogram Equalization * [Histogram Stretch](digital_image_processing/histogram_equalization/histogram_stretch.py) * [Index Calculation](digital_image_processing/index_calculation.py) * Morphological Operations * [Dilation Operation](digital_image_processing/morphological_operations/dilation_operation.py) * [Erosion Operation](digital_image_processing/morphological_operations/erosion_operation.py) * Resize * [Resize](digital_image_processing/resize/resize.py) * Rotation * [Rotation](digital_image_processing/rotation/rotation.py) * [Sepia](digital_image_processing/sepia.py) * [Test Digital Image Processing](digital_image_processing/test_digital_image_processing.py) ## Divide And Conquer * [Closest Pair Of Points](divide_and_conquer/closest_pair_of_points.py) * [Convex Hull](divide_and_conquer/convex_hull.py) * [Heaps Algorithm](divide_and_conquer/heaps_algorithm.py) * [Heaps Algorithm Iterative](divide_and_conquer/heaps_algorithm_iterative.py) * [Inversions](divide_and_conquer/inversions.py) * [Kth Order Statistic](divide_and_conquer/kth_order_statistic.py) * [Max Difference Pair](divide_and_conquer/max_difference_pair.py) * [Max Subarray](divide_and_conquer/max_subarray.py) * [Mergesort](divide_and_conquer/mergesort.py) * [Peak](divide_and_conquer/peak.py) * [Power](divide_and_conquer/power.py) * [Strassen Matrix Multiplication](divide_and_conquer/strassen_matrix_multiplication.py) ## Dynamic Programming * [Abbreviation](dynamic_programming/abbreviation.py) * [All Construct](dynamic_programming/all_construct.py) * [Bitmask](dynamic_programming/bitmask.py) * [Catalan Numbers](dynamic_programming/catalan_numbers.py) * [Climbing Stairs](dynamic_programming/climbing_stairs.py) * [Combination Sum Iv](dynamic_programming/combination_sum_iv.py) * [Edit Distance](dynamic_programming/edit_distance.py) * [Factorial](dynamic_programming/factorial.py) * [Fast Fibonacci](dynamic_programming/fast_fibonacci.py) * [Fibonacci](dynamic_programming/fibonacci.py) * [Fizz Buzz](dynamic_programming/fizz_buzz.py) * [Floyd Warshall](dynamic_programming/floyd_warshall.py) * [Integer Partition](dynamic_programming/integer_partition.py) * [Iterating Through Submasks](dynamic_programming/iterating_through_submasks.py) * [K Means Clustering Tensorflow](dynamic_programming/k_means_clustering_tensorflow.py) * [Knapsack](dynamic_programming/knapsack.py) * [Longest Common Subsequence](dynamic_programming/longest_common_subsequence.py) * [Longest Common Substring](dynamic_programming/longest_common_substring.py) * [Longest Increasing Subsequence](dynamic_programming/longest_increasing_subsequence.py) * [Longest Increasing Subsequence O(Nlogn)](dynamic_programming/longest_increasing_subsequence_o(nlogn).py) * [Longest Sub Array](dynamic_programming/longest_sub_array.py) * [Matrix Chain Order](dynamic_programming/matrix_chain_order.py) * [Max Non Adjacent Sum](dynamic_programming/max_non_adjacent_sum.py) * [Max Product Subarray](dynamic_programming/max_product_subarray.py) * [Max Subarray Sum](dynamic_programming/max_subarray_sum.py) * [Min Distance Up Bottom](dynamic_programming/min_distance_up_bottom.py) * [Minimum Coin Change](dynamic_programming/minimum_coin_change.py) * [Minimum Cost Path](dynamic_programming/minimum_cost_path.py) * [Minimum Partition](dynamic_programming/minimum_partition.py) * [Minimum Size Subarray Sum](dynamic_programming/minimum_size_subarray_sum.py) * [Minimum Squares To Represent A Number](dynamic_programming/minimum_squares_to_represent_a_number.py) * [Minimum Steps To One](dynamic_programming/minimum_steps_to_one.py) * [Minimum Tickets Cost](dynamic_programming/minimum_tickets_cost.py) * [Optimal Binary Search Tree](dynamic_programming/optimal_binary_search_tree.py) * [Palindrome Partitioning](dynamic_programming/palindrome_partitioning.py) * [Regex Match](dynamic_programming/regex_match.py) * [Rod Cutting](dynamic_programming/rod_cutting.py) * [Subset Generation](dynamic_programming/subset_generation.py) * [Sum Of Subset](dynamic_programming/sum_of_subset.py) * [Tribonacci](dynamic_programming/tribonacci.py) * [Viterbi](dynamic_programming/viterbi.py) * [Word Break](dynamic_programming/word_break.py) ## Electronics * [Apparent Power](electronics/apparent_power.py) * [Builtin Voltage](electronics/builtin_voltage.py) * [Carrier Concentration](electronics/carrier_concentration.py) * [Circular Convolution](electronics/circular_convolution.py) * [Coulombs Law](electronics/coulombs_law.py) * [Electric Conductivity](electronics/electric_conductivity.py) * [Electric Power](electronics/electric_power.py) * [Electrical Impedance](electronics/electrical_impedance.py) * [Ind Reactance](electronics/ind_reactance.py) * [Ohms Law](electronics/ohms_law.py) * [Real And Reactive Power](electronics/real_and_reactive_power.py) * [Resistor Equivalence](electronics/resistor_equivalence.py) * [Resonant Frequency](electronics/resonant_frequency.py) ## File Transfer * [Receive File](file_transfer/receive_file.py) * [Send File](file_transfer/send_file.py) * Tests * [Test Send File](file_transfer/tests/test_send_file.py) ## Financial * [Equated Monthly Installments](financial/equated_monthly_installments.py) * [Interest](financial/interest.py) * [Present Value](financial/present_value.py) * [Price Plus Tax](financial/price_plus_tax.py) ## Fractals * [Julia Sets](fractals/julia_sets.py) * [Koch Snowflake](fractals/koch_snowflake.py) * [Mandelbrot](fractals/mandelbrot.py) * [Sierpinski Triangle](fractals/sierpinski_triangle.py) ## Fuzzy Logic * [Fuzzy Operations](fuzzy_logic/fuzzy_operations.py) ## Genetic Algorithm * [Basic String](genetic_algorithm/basic_string.py) ## Geodesy * [Haversine Distance](geodesy/haversine_distance.py) * [Lamberts Ellipsoidal Distance](geodesy/lamberts_ellipsoidal_distance.py) ## Graphics * [Bezier Curve](graphics/bezier_curve.py) * [Vector3 For 2D Rendering](graphics/vector3_for_2d_rendering.py) ## Graphs * [A Star](graphs/a_star.py) * [Articulation Points](graphs/articulation_points.py) * [Basic Graphs](graphs/basic_graphs.py) * [Bellman Ford](graphs/bellman_ford.py) * [Bi Directional Dijkstra](graphs/bi_directional_dijkstra.py) * [Bidirectional A Star](graphs/bidirectional_a_star.py) * [Bidirectional Breadth First Search](graphs/bidirectional_breadth_first_search.py) * [Boruvka](graphs/boruvka.py) * [Breadth First Search](graphs/breadth_first_search.py) * [Breadth First Search 2](graphs/breadth_first_search_2.py) * [Breadth First Search Shortest Path](graphs/breadth_first_search_shortest_path.py) * [Breadth First Search Shortest Path 2](graphs/breadth_first_search_shortest_path_2.py) * [Breadth First Search Zero One Shortest Path](graphs/breadth_first_search_zero_one_shortest_path.py) * [Check Bipartite Graph Bfs](graphs/check_bipartite_graph_bfs.py) * [Check Bipartite Graph Dfs](graphs/check_bipartite_graph_dfs.py) * [Check Cycle](graphs/check_cycle.py) * [Connected Components](graphs/connected_components.py) * [Depth First Search](graphs/depth_first_search.py) * [Depth First Search 2](graphs/depth_first_search_2.py) * [Dijkstra](graphs/dijkstra.py) * [Dijkstra 2](graphs/dijkstra_2.py) * [Dijkstra Algorithm](graphs/dijkstra_algorithm.py) * [Dijkstra Alternate](graphs/dijkstra_alternate.py) * [Dijkstra Binary Grid](graphs/dijkstra_binary_grid.py) * [Dinic](graphs/dinic.py) * [Directed And Undirected (Weighted) Graph](graphs/directed_and_undirected_(weighted)_graph.py) * [Edmonds Karp Multiple Source And Sink](graphs/edmonds_karp_multiple_source_and_sink.py) * [Eulerian Path And Circuit For Undirected Graph](graphs/eulerian_path_and_circuit_for_undirected_graph.py) * [Even Tree](graphs/even_tree.py) * [Finding Bridges](graphs/finding_bridges.py) * [Frequent Pattern Graph Miner](graphs/frequent_pattern_graph_miner.py) * [G Topological Sort](graphs/g_topological_sort.py) * [Gale Shapley Bigraph](graphs/gale_shapley_bigraph.py) * [Graph Adjacency List](graphs/graph_adjacency_list.py) * [Graph Adjacency Matrix](graphs/graph_adjacency_matrix.py) * [Graph List](graphs/graph_list.py) * [Graphs Floyd Warshall](graphs/graphs_floyd_warshall.py) * [Greedy Best First](graphs/greedy_best_first.py) * [Greedy Min Vertex Cover](graphs/greedy_min_vertex_cover.py) * [Kahns Algorithm Long](graphs/kahns_algorithm_long.py) * [Kahns Algorithm Topo](graphs/kahns_algorithm_topo.py) * [Karger](graphs/karger.py) * [Markov Chain](graphs/markov_chain.py) * [Matching Min Vertex Cover](graphs/matching_min_vertex_cover.py) * [Minimum Path Sum](graphs/minimum_path_sum.py) * [Minimum Spanning Tree Boruvka](graphs/minimum_spanning_tree_boruvka.py) * [Minimum Spanning Tree Kruskal](graphs/minimum_spanning_tree_kruskal.py) * [Minimum Spanning Tree Kruskal2](graphs/minimum_spanning_tree_kruskal2.py) * [Minimum Spanning Tree Prims](graphs/minimum_spanning_tree_prims.py) * [Minimum Spanning Tree Prims2](graphs/minimum_spanning_tree_prims2.py) * [Multi Heuristic Astar](graphs/multi_heuristic_astar.py) * [Page Rank](graphs/page_rank.py) * [Prim](graphs/prim.py) * [Random Graph Generator](graphs/random_graph_generator.py) * [Scc Kosaraju](graphs/scc_kosaraju.py) * [Strongly Connected Components](graphs/strongly_connected_components.py) * [Tarjans Scc](graphs/tarjans_scc.py) * Tests * [Test Min Spanning Tree Kruskal](graphs/tests/test_min_spanning_tree_kruskal.py) * [Test Min Spanning Tree Prim](graphs/tests/test_min_spanning_tree_prim.py) ## Greedy Methods * [Fractional Knapsack](greedy_methods/fractional_knapsack.py) * [Fractional Knapsack 2](greedy_methods/fractional_knapsack_2.py) * [Minimum Waiting Time](greedy_methods/minimum_waiting_time.py) * [Optimal Merge Pattern](greedy_methods/optimal_merge_pattern.py) ## Hashes * [Adler32](hashes/adler32.py) * [Chaos Machine](hashes/chaos_machine.py) * [Djb2](hashes/djb2.py) * [Elf](hashes/elf.py) * [Enigma Machine](hashes/enigma_machine.py) * [Hamming Code](hashes/hamming_code.py) * [Luhn](hashes/luhn.py) * [Md5](hashes/md5.py) * [Sdbm](hashes/sdbm.py) * [Sha1](hashes/sha1.py) * [Sha256](hashes/sha256.py) ## Knapsack * [Greedy Knapsack](knapsack/greedy_knapsack.py) * [Knapsack](knapsack/knapsack.py) * [Recursive Approach Knapsack](knapsack/recursive_approach_knapsack.py) * Tests * [Test Greedy Knapsack](knapsack/tests/test_greedy_knapsack.py) * [Test Knapsack](knapsack/tests/test_knapsack.py) ## Linear Algebra * Src * [Conjugate Gradient](linear_algebra/src/conjugate_gradient.py) * [Lib](linear_algebra/src/lib.py) * [Polynom For Points](linear_algebra/src/polynom_for_points.py) * [Power Iteration](linear_algebra/src/power_iteration.py) * [Rank Of Matrix](linear_algebra/src/rank_of_matrix.py) * [Rayleigh Quotient](linear_algebra/src/rayleigh_quotient.py) * [Schur Complement](linear_algebra/src/schur_complement.py) * [Test Linear Algebra](linear_algebra/src/test_linear_algebra.py) * [Transformations 2D](linear_algebra/src/transformations_2d.py) ## Linear Programming * [Simplex](linear_programming/simplex.py) ## Machine Learning * [Astar](machine_learning/astar.py) * [Data Transformations](machine_learning/data_transformations.py) * [Decision Tree](machine_learning/decision_tree.py) * [Dimensionality Reduction](machine_learning/dimensionality_reduction.py) * Forecasting * [Run](machine_learning/forecasting/run.py) * [Gradient Descent](machine_learning/gradient_descent.py) * [K Means Clust](machine_learning/k_means_clust.py) * [K Nearest Neighbours](machine_learning/k_nearest_neighbours.py) * [Knn Sklearn](machine_learning/knn_sklearn.py) * [Linear Discriminant Analysis](machine_learning/linear_discriminant_analysis.py) * [Linear Regression](machine_learning/linear_regression.py) * Local Weighted Learning * [Local Weighted Learning](machine_learning/local_weighted_learning/local_weighted_learning.py) * [Logistic Regression](machine_learning/logistic_regression.py) * Lstm * [Lstm Prediction](machine_learning/lstm/lstm_prediction.py) * [Multilayer Perceptron Classifier](machine_learning/multilayer_perceptron_classifier.py) * [Polynomial Regression](machine_learning/polynomial_regression.py) * [Scoring Functions](machine_learning/scoring_functions.py) * [Self Organizing Map](machine_learning/self_organizing_map.py) * [Sequential Minimum Optimization](machine_learning/sequential_minimum_optimization.py) * [Similarity Search](machine_learning/similarity_search.py) * [Support Vector Machines](machine_learning/support_vector_machines.py) * [Word Frequency Functions](machine_learning/word_frequency_functions.py) * [Xgboost Classifier](machine_learning/xgboost_classifier.py) * [Xgboost Regressor](machine_learning/xgboost_regressor.py) ## Maths * [Abs](maths/abs.py) * [Add](maths/add.py) * [Addition Without Arithmetic](maths/addition_without_arithmetic.py) * [Aliquot Sum](maths/aliquot_sum.py) * [Allocation Number](maths/allocation_number.py) * [Arc Length](maths/arc_length.py) * [Area](maths/area.py) * [Area Under Curve](maths/area_under_curve.py) * [Armstrong Numbers](maths/armstrong_numbers.py) * [Automorphic Number](maths/automorphic_number.py) * [Average Absolute Deviation](maths/average_absolute_deviation.py) * [Average Mean](maths/average_mean.py) * [Average Median](maths/average_median.py) * [Average Mode](maths/average_mode.py) * [Bailey Borwein Plouffe](maths/bailey_borwein_plouffe.py) * [Basic Maths](maths/basic_maths.py) * [Binary Exp Mod](maths/binary_exp_mod.py) * [Binary Exponentiation](maths/binary_exponentiation.py) * [Binary Exponentiation 2](maths/binary_exponentiation_2.py) * [Binary Exponentiation 3](maths/binary_exponentiation_3.py) * [Binomial Coefficient](maths/binomial_coefficient.py) * [Binomial Distribution](maths/binomial_distribution.py) * [Bisection](maths/bisection.py) * [Carmichael Number](maths/carmichael_number.py) * [Catalan Number](maths/catalan_number.py) * [Ceil](maths/ceil.py) * [Check Polygon](maths/check_polygon.py) * [Chudnovsky Algorithm](maths/chudnovsky_algorithm.py) * [Collatz Sequence](maths/collatz_sequence.py) * [Combinations](maths/combinations.py) * [Continued Fraction](maths/continued_fraction.py) * [Decimal Isolate](maths/decimal_isolate.py) * [Decimal To Fraction](maths/decimal_to_fraction.py) * [Dodecahedron](maths/dodecahedron.py) * [Double Factorial Iterative](maths/double_factorial_iterative.py) * [Double Factorial Recursive](maths/double_factorial_recursive.py) * [Dual Number Automatic Differentiation](maths/dual_number_automatic_differentiation.py) * [Entropy](maths/entropy.py) * [Euclidean Distance](maths/euclidean_distance.py) * [Euclidean Gcd](maths/euclidean_gcd.py) * [Euler Method](maths/euler_method.py) * [Euler Modified](maths/euler_modified.py) * [Eulers Totient](maths/eulers_totient.py) * [Extended Euclidean Algorithm](maths/extended_euclidean_algorithm.py) * [Factorial](maths/factorial.py) * [Factors](maths/factors.py) * [Fermat Little Theorem](maths/fermat_little_theorem.py) * [Fibonacci](maths/fibonacci.py) * [Find Max](maths/find_max.py) * [Find Min](maths/find_min.py) * [Floor](maths/floor.py) * [Gamma](maths/gamma.py) * [Gamma Recursive](maths/gamma_recursive.py) * [Gaussian](maths/gaussian.py) * [Gaussian Error Linear Unit](maths/gaussian_error_linear_unit.py) * [Gcd Of N Numbers](maths/gcd_of_n_numbers.py) * [Greatest Common Divisor](maths/greatest_common_divisor.py) * [Greedy Coin Change](maths/greedy_coin_change.py) * [Hamming Numbers](maths/hamming_numbers.py) * [Hardy Ramanujanalgo](maths/hardy_ramanujanalgo.py) * [Harshad Numbers](maths/harshad_numbers.py) * [Hexagonal Number](maths/hexagonal_number.py) * [Integration By Simpson Approx](maths/integration_by_simpson_approx.py) * [Interquartile Range](maths/interquartile_range.py) * [Is Int Palindrome](maths/is_int_palindrome.py) * [Is Ip V4 Address Valid](maths/is_ip_v4_address_valid.py) * [Is Square Free](maths/is_square_free.py) * [Jaccard Similarity](maths/jaccard_similarity.py) * [Juggler Sequence](maths/juggler_sequence.py) * [Karatsuba](maths/karatsuba.py) * [Krishnamurthy Number](maths/krishnamurthy_number.py) * [Kth Lexicographic Permutation](maths/kth_lexicographic_permutation.py) * [Largest Of Very Large Numbers](maths/largest_of_very_large_numbers.py) * [Least Common Multiple](maths/least_common_multiple.py) * [Line Length](maths/line_length.py) * [Liouville Lambda](maths/liouville_lambda.py) * [Lucas Lehmer Primality Test](maths/lucas_lehmer_primality_test.py) * [Lucas Series](maths/lucas_series.py) * [Maclaurin Series](maths/maclaurin_series.py) * [Manhattan Distance](maths/manhattan_distance.py) * [Matrix Exponentiation](maths/matrix_exponentiation.py) * [Max Sum Sliding Window](maths/max_sum_sliding_window.py) * [Median Of Two Arrays](maths/median_of_two_arrays.py) * [Miller Rabin](maths/miller_rabin.py) * [Mobius Function](maths/mobius_function.py) * [Modular Exponential](maths/modular_exponential.py) * [Monte Carlo](maths/monte_carlo.py) * [Monte Carlo Dice](maths/monte_carlo_dice.py) * [Nevilles Method](maths/nevilles_method.py) * [Newton Raphson](maths/newton_raphson.py) * [Number Of Digits](maths/number_of_digits.py) * [Numerical Integration](maths/numerical_integration.py) * [Odd Sieve](maths/odd_sieve.py) * [Perfect Cube](maths/perfect_cube.py) * [Perfect Number](maths/perfect_number.py) * [Perfect Square](maths/perfect_square.py) * [Persistence](maths/persistence.py) * [Pi Generator](maths/pi_generator.py) * [Pi Monte Carlo Estimation](maths/pi_monte_carlo_estimation.py) * [Points Are Collinear 3D](maths/points_are_collinear_3d.py) * [Pollard Rho](maths/pollard_rho.py) * [Polygonal Numbers](maths/polygonal_numbers.py) * [Polynomial Evaluation](maths/polynomial_evaluation.py) * Polynomials * [Single Indeterminate Operations](maths/polynomials/single_indeterminate_operations.py) * [Power Using Recursion](maths/power_using_recursion.py) * [Prime Check](maths/prime_check.py) * [Prime Factors](maths/prime_factors.py) * [Prime Numbers](maths/prime_numbers.py) * [Prime Sieve Eratosthenes](maths/prime_sieve_eratosthenes.py) * [Primelib](maths/primelib.py) * [Print Multiplication Table](maths/print_multiplication_table.py) * [Pronic Number](maths/pronic_number.py) * [Proth Number](maths/proth_number.py) * [Pythagoras](maths/pythagoras.py) * [Qr Decomposition](maths/qr_decomposition.py) * [Quadratic Equations Complex Numbers](maths/quadratic_equations_complex_numbers.py) * [Radians](maths/radians.py) * [Radix2 Fft](maths/radix2_fft.py) * [Relu](maths/relu.py) * [Remove Digit](maths/remove_digit.py) * [Runge Kutta](maths/runge_kutta.py) * [Segmented Sieve](maths/segmented_sieve.py) * Series * [Arithmetic](maths/series/arithmetic.py) * [Geometric](maths/series/geometric.py) * [Geometric Series](maths/series/geometric_series.py) * [Harmonic](maths/series/harmonic.py) * [Harmonic Series](maths/series/harmonic_series.py) * [Hexagonal Numbers](maths/series/hexagonal_numbers.py) * [P Series](maths/series/p_series.py) * [Sieve Of Eratosthenes](maths/sieve_of_eratosthenes.py) * [Sigmoid](maths/sigmoid.py) * [Sigmoid Linear Unit](maths/sigmoid_linear_unit.py) * [Signum](maths/signum.py) * [Simpson Rule](maths/simpson_rule.py) * [Simultaneous Linear Equation Solver](maths/simultaneous_linear_equation_solver.py) * [Sin](maths/sin.py) * [Sock Merchant](maths/sock_merchant.py) * [Softmax](maths/softmax.py) * [Square Root](maths/square_root.py) * [Sum Of Arithmetic Series](maths/sum_of_arithmetic_series.py) * [Sum Of Digits](maths/sum_of_digits.py) * [Sum Of Geometric Progression](maths/sum_of_geometric_progression.py) * [Sum Of Harmonic Series](maths/sum_of_harmonic_series.py) * [Sumset](maths/sumset.py) * [Sylvester Sequence](maths/sylvester_sequence.py) * [Tanh](maths/tanh.py) * [Test Prime Check](maths/test_prime_check.py) * [Trapezoidal Rule](maths/trapezoidal_rule.py) * [Triplet Sum](maths/triplet_sum.py) * [Twin Prime](maths/twin_prime.py) * [Two Pointer](maths/two_pointer.py) * [Two Sum](maths/two_sum.py) * [Ugly Numbers](maths/ugly_numbers.py) * [Volume](maths/volume.py) * [Weird Number](maths/weird_number.py) * [Zellers Congruence](maths/zellers_congruence.py) ## Matrix * [Binary Search Matrix](matrix/binary_search_matrix.py) * [Count Islands In Matrix](matrix/count_islands_in_matrix.py) * [Count Negative Numbers In Sorted Matrix](matrix/count_negative_numbers_in_sorted_matrix.py) * [Count Paths](matrix/count_paths.py) * [Cramers Rule 2X2](matrix/cramers_rule_2x2.py) * [Inverse Of Matrix](matrix/inverse_of_matrix.py) * [Largest Square Area In Matrix](matrix/largest_square_area_in_matrix.py) * [Matrix Class](matrix/matrix_class.py) * [Matrix Operation](matrix/matrix_operation.py) * [Max Area Of Island](matrix/max_area_of_island.py) * [Nth Fibonacci Using Matrix Exponentiation](matrix/nth_fibonacci_using_matrix_exponentiation.py) * [Pascal Triangle](matrix/pascal_triangle.py) * [Rotate Matrix](matrix/rotate_matrix.py) * [Searching In Sorted Matrix](matrix/searching_in_sorted_matrix.py) * [Sherman Morrison](matrix/sherman_morrison.py) * [Spiral Print](matrix/spiral_print.py) * Tests * [Test Matrix Operation](matrix/tests/test_matrix_operation.py) ## Networking Flow * [Ford Fulkerson](networking_flow/ford_fulkerson.py) * [Minimum Cut](networking_flow/minimum_cut.py) ## Neural Network * [2 Hidden Layers Neural Network](neural_network/2_hidden_layers_neural_network.py) * Activation Functions * [Exponential Linear Unit](neural_network/activation_functions/exponential_linear_unit.py) * [Leaky Rectified Linear Unit](neural_network/activation_functions/leaky_rectified_linear_unit.py) * [Scaled Exponential Linear Unit](neural_network/activation_functions/scaled_exponential_linear_unit.py) * [Back Propagation Neural Network](neural_network/back_propagation_neural_network.py) * [Convolution Neural Network](neural_network/convolution_neural_network.py) * [Perceptron](neural_network/perceptron.py) * [Simple Neural Network](neural_network/simple_neural_network.py) ## Other * [Activity Selection](other/activity_selection.py) * [Alternative List Arrange](other/alternative_list_arrange.py) * [Davisb Putnamb Logemannb Loveland](other/davisb_putnamb_logemannb_loveland.py) * [Dijkstra Bankers Algorithm](other/dijkstra_bankers_algorithm.py) * [Doomsday](other/doomsday.py) * [Fischer Yates Shuffle](other/fischer_yates_shuffle.py) * [Gauss Easter](other/gauss_easter.py) * [Graham Scan](other/graham_scan.py) * [Greedy](other/greedy.py) * [Guess The Number Search](other/guess_the_number_search.py) * [H Index](other/h_index.py) * [Least Recently Used](other/least_recently_used.py) * [Lfu Cache](other/lfu_cache.py) * [Linear Congruential Generator](other/linear_congruential_generator.py) * [Lru Cache](other/lru_cache.py) * [Magicdiamondpattern](other/magicdiamondpattern.py) * [Maximum Subsequence](other/maximum_subsequence.py) * [Nested Brackets](other/nested_brackets.py) * [Number Container System](other/number_container_system.py) * [Password](other/password.py) * [Quine](other/quine.py) * [Scoring Algorithm](other/scoring_algorithm.py) * [Sdes](other/sdes.py) * [Tower Of Hanoi](other/tower_of_hanoi.py) ## Physics * [Altitude Pressure](physics/altitude_pressure.py) * [Archimedes Principle](physics/archimedes_principle.py) * [Basic Orbital Capture](physics/basic_orbital_capture.py) * [Casimir Effect](physics/casimir_effect.py) * [Centripetal Force](physics/centripetal_force.py) * [Grahams Law](physics/grahams_law.py) * [Horizontal Projectile Motion](physics/horizontal_projectile_motion.py) * [Hubble Parameter](physics/hubble_parameter.py) * [Ideal Gas Law](physics/ideal_gas_law.py) * [Kinetic Energy](physics/kinetic_energy.py) * [Lorentz Transformation Four Vector](physics/lorentz_transformation_four_vector.py) * [Malus Law](physics/malus_law.py) * [N Body Simulation](physics/n_body_simulation.py) * [Newtons Law Of Gravitation](physics/newtons_law_of_gravitation.py) * [Newtons Second Law Of Motion](physics/newtons_second_law_of_motion.py) * [Potential Energy](physics/potential_energy.py) * [Rms Speed Of Molecule](physics/rms_speed_of_molecule.py) * [Shear Stress](physics/shear_stress.py) * [Speed Of Sound](physics/speed_of_sound.py) ## Project Euler * Problem 001 * [Sol1](project_euler/problem_001/sol1.py) * [Sol2](project_euler/problem_001/sol2.py) * [Sol3](project_euler/problem_001/sol3.py) * [Sol4](project_euler/problem_001/sol4.py) * [Sol5](project_euler/problem_001/sol5.py) * [Sol6](project_euler/problem_001/sol6.py) * [Sol7](project_euler/problem_001/sol7.py) * Problem 002 * [Sol1](project_euler/problem_002/sol1.py) * [Sol2](project_euler/problem_002/sol2.py) * [Sol3](project_euler/problem_002/sol3.py) * [Sol4](project_euler/problem_002/sol4.py) * [Sol5](project_euler/problem_002/sol5.py) * Problem 003 * [Sol1](project_euler/problem_003/sol1.py) * [Sol2](project_euler/problem_003/sol2.py) * [Sol3](project_euler/problem_003/sol3.py) * Problem 004 * [Sol1](project_euler/problem_004/sol1.py) * [Sol2](project_euler/problem_004/sol2.py) * Problem 005 * [Sol1](project_euler/problem_005/sol1.py) * [Sol2](project_euler/problem_005/sol2.py) * Problem 006 * [Sol1](project_euler/problem_006/sol1.py) * [Sol2](project_euler/problem_006/sol2.py) * [Sol3](project_euler/problem_006/sol3.py) * [Sol4](project_euler/problem_006/sol4.py) * Problem 007 * [Sol1](project_euler/problem_007/sol1.py) * [Sol2](project_euler/problem_007/sol2.py) * [Sol3](project_euler/problem_007/sol3.py) * Problem 008 * [Sol1](project_euler/problem_008/sol1.py) * [Sol2](project_euler/problem_008/sol2.py) * [Sol3](project_euler/problem_008/sol3.py) * Problem 009 * [Sol1](project_euler/problem_009/sol1.py) * [Sol2](project_euler/problem_009/sol2.py) * [Sol3](project_euler/problem_009/sol3.py) * Problem 010 * [Sol1](project_euler/problem_010/sol1.py) * [Sol2](project_euler/problem_010/sol2.py) * [Sol3](project_euler/problem_010/sol3.py) * Problem 011 * [Sol1](project_euler/problem_011/sol1.py) * [Sol2](project_euler/problem_011/sol2.py) * Problem 012 * [Sol1](project_euler/problem_012/sol1.py) * [Sol2](project_euler/problem_012/sol2.py) * Problem 013 * [Sol1](project_euler/problem_013/sol1.py) * Problem 014 * [Sol1](project_euler/problem_014/sol1.py) * [Sol2](project_euler/problem_014/sol2.py) * Problem 015 * [Sol1](project_euler/problem_015/sol1.py) * Problem 016 * [Sol1](project_euler/problem_016/sol1.py) * [Sol2](project_euler/problem_016/sol2.py) * Problem 017 * [Sol1](project_euler/problem_017/sol1.py) * Problem 018 * [Solution](project_euler/problem_018/solution.py) * Problem 019 * [Sol1](project_euler/problem_019/sol1.py) * Problem 020 * [Sol1](project_euler/problem_020/sol1.py) * [Sol2](project_euler/problem_020/sol2.py) * [Sol3](project_euler/problem_020/sol3.py) * [Sol4](project_euler/problem_020/sol4.py) * Problem 021 * [Sol1](project_euler/problem_021/sol1.py) * Problem 022 * [Sol1](project_euler/problem_022/sol1.py) * [Sol2](project_euler/problem_022/sol2.py) * Problem 023 * [Sol1](project_euler/problem_023/sol1.py) * Problem 024 * [Sol1](project_euler/problem_024/sol1.py) * Problem 025 * [Sol1](project_euler/problem_025/sol1.py) * [Sol2](project_euler/problem_025/sol2.py) * [Sol3](project_euler/problem_025/sol3.py) * Problem 026 * [Sol1](project_euler/problem_026/sol1.py) * Problem 027 * [Sol1](project_euler/problem_027/sol1.py) * Problem 028 * [Sol1](project_euler/problem_028/sol1.py) * Problem 029 * [Sol1](project_euler/problem_029/sol1.py) * Problem 030 * [Sol1](project_euler/problem_030/sol1.py) * Problem 031 * [Sol1](project_euler/problem_031/sol1.py) * [Sol2](project_euler/problem_031/sol2.py) * Problem 032 * [Sol32](project_euler/problem_032/sol32.py) * Problem 033 * [Sol1](project_euler/problem_033/sol1.py) * Problem 034 * [Sol1](project_euler/problem_034/sol1.py) * Problem 035 * [Sol1](project_euler/problem_035/sol1.py) * Problem 036 * [Sol1](project_euler/problem_036/sol1.py) * Problem 037 * [Sol1](project_euler/problem_037/sol1.py) * Problem 038 * [Sol1](project_euler/problem_038/sol1.py) * Problem 039 * [Sol1](project_euler/problem_039/sol1.py) * Problem 040 * [Sol1](project_euler/problem_040/sol1.py) * Problem 041 * [Sol1](project_euler/problem_041/sol1.py) * Problem 042 * [Solution42](project_euler/problem_042/solution42.py) * Problem 043 * [Sol1](project_euler/problem_043/sol1.py) * Problem 044 * [Sol1](project_euler/problem_044/sol1.py) * Problem 045 * [Sol1](project_euler/problem_045/sol1.py) * Problem 046 * [Sol1](project_euler/problem_046/sol1.py) * Problem 047 * [Sol1](project_euler/problem_047/sol1.py) * Problem 048 * [Sol1](project_euler/problem_048/sol1.py) * Problem 049 * [Sol1](project_euler/problem_049/sol1.py) * Problem 050 * [Sol1](project_euler/problem_050/sol1.py) * Problem 051 * [Sol1](project_euler/problem_051/sol1.py) * Problem 052 * [Sol1](project_euler/problem_052/sol1.py) * Problem 053 * [Sol1](project_euler/problem_053/sol1.py) * Problem 054 * [Sol1](project_euler/problem_054/sol1.py) * [Test Poker Hand](project_euler/problem_054/test_poker_hand.py) * Problem 055 * [Sol1](project_euler/problem_055/sol1.py) * Problem 056 * [Sol1](project_euler/problem_056/sol1.py) * Problem 057 * [Sol1](project_euler/problem_057/sol1.py) * Problem 058 * [Sol1](project_euler/problem_058/sol1.py) * Problem 059 * [Sol1](project_euler/problem_059/sol1.py) * Problem 062 * [Sol1](project_euler/problem_062/sol1.py) * Problem 063 * [Sol1](project_euler/problem_063/sol1.py) * Problem 064 * [Sol1](project_euler/problem_064/sol1.py) * Problem 065 * [Sol1](project_euler/problem_065/sol1.py) * Problem 067 * [Sol1](project_euler/problem_067/sol1.py) * [Sol2](project_euler/problem_067/sol2.py) * Problem 068 * [Sol1](project_euler/problem_068/sol1.py) * Problem 069 * [Sol1](project_euler/problem_069/sol1.py) * Problem 070 * [Sol1](project_euler/problem_070/sol1.py) * Problem 071 * [Sol1](project_euler/problem_071/sol1.py) * Problem 072 * [Sol1](project_euler/problem_072/sol1.py) * [Sol2](project_euler/problem_072/sol2.py) * Problem 073 * [Sol1](project_euler/problem_073/sol1.py) * Problem 074 * [Sol1](project_euler/problem_074/sol1.py) * [Sol2](project_euler/problem_074/sol2.py) * Problem 075 * [Sol1](project_euler/problem_075/sol1.py) * Problem 076 * [Sol1](project_euler/problem_076/sol1.py) * Problem 077 * [Sol1](project_euler/problem_077/sol1.py) * Problem 078 * [Sol1](project_euler/problem_078/sol1.py) * Problem 079 * [Sol1](project_euler/problem_079/sol1.py) * Problem 080 * [Sol1](project_euler/problem_080/sol1.py) * Problem 081 * [Sol1](project_euler/problem_081/sol1.py) * Problem 082 * [Sol1](project_euler/problem_082/sol1.py) * Problem 085 * [Sol1](project_euler/problem_085/sol1.py) * Problem 086 * [Sol1](project_euler/problem_086/sol1.py) * Problem 087 * [Sol1](project_euler/problem_087/sol1.py) * Problem 089 * [Sol1](project_euler/problem_089/sol1.py) * Problem 091 * [Sol1](project_euler/problem_091/sol1.py) * Problem 092 * [Sol1](project_euler/problem_092/sol1.py) * Problem 094 * [Sol1](project_euler/problem_094/sol1.py) * Problem 097 * [Sol1](project_euler/problem_097/sol1.py) * Problem 099 * [Sol1](project_euler/problem_099/sol1.py) * Problem 100 * [Sol1](project_euler/problem_100/sol1.py) * Problem 101 * [Sol1](project_euler/problem_101/sol1.py) * Problem 102 * [Sol1](project_euler/problem_102/sol1.py) * Problem 104 * [Sol1](project_euler/problem_104/sol1.py) * Problem 107 * [Sol1](project_euler/problem_107/sol1.py) * Problem 109 * [Sol1](project_euler/problem_109/sol1.py) * Problem 112 * [Sol1](project_euler/problem_112/sol1.py) * Problem 113 * [Sol1](project_euler/problem_113/sol1.py) * Problem 114 * [Sol1](project_euler/problem_114/sol1.py) * Problem 115 * [Sol1](project_euler/problem_115/sol1.py) * Problem 116 * [Sol1](project_euler/problem_116/sol1.py) * Problem 117 * [Sol1](project_euler/problem_117/sol1.py) * Problem 119 * [Sol1](project_euler/problem_119/sol1.py) * Problem 120 * [Sol1](project_euler/problem_120/sol1.py) * Problem 121 * [Sol1](project_euler/problem_121/sol1.py) * Problem 123 * [Sol1](project_euler/problem_123/sol1.py) * Problem 125 * [Sol1](project_euler/problem_125/sol1.py) * Problem 129 * [Sol1](project_euler/problem_129/sol1.py) * Problem 131 * [Sol1](project_euler/problem_131/sol1.py) * Problem 135 * [Sol1](project_euler/problem_135/sol1.py) * Problem 144 * [Sol1](project_euler/problem_144/sol1.py) * Problem 145 * [Sol1](project_euler/problem_145/sol1.py) * Problem 173 * [Sol1](project_euler/problem_173/sol1.py) * Problem 174 * [Sol1](project_euler/problem_174/sol1.py) * Problem 180 * [Sol1](project_euler/problem_180/sol1.py) * Problem 187 * [Sol1](project_euler/problem_187/sol1.py) * Problem 188 * [Sol1](project_euler/problem_188/sol1.py) * Problem 191 * [Sol1](project_euler/problem_191/sol1.py) * Problem 203 * [Sol1](project_euler/problem_203/sol1.py) * Problem 205 * [Sol1](project_euler/problem_205/sol1.py) * Problem 206 * [Sol1](project_euler/problem_206/sol1.py) * Problem 207 * [Sol1](project_euler/problem_207/sol1.py) * Problem 234 * [Sol1](project_euler/problem_234/sol1.py) * Problem 301 * [Sol1](project_euler/problem_301/sol1.py) * Problem 493 * [Sol1](project_euler/problem_493/sol1.py) * Problem 551 * [Sol1](project_euler/problem_551/sol1.py) * Problem 587 * [Sol1](project_euler/problem_587/sol1.py) * Problem 686 * [Sol1](project_euler/problem_686/sol1.py) * Problem 800 * [Sol1](project_euler/problem_800/sol1.py) ## Quantum * [Bb84](quantum/bb84.py) * [Deutsch Jozsa](quantum/deutsch_jozsa.py) * [Half Adder](quantum/half_adder.py) * [Not Gate](quantum/not_gate.py) * [Q Fourier Transform](quantum/q_fourier_transform.py) * [Q Full Adder](quantum/q_full_adder.py) * [Quantum Entanglement](quantum/quantum_entanglement.py) * [Quantum Teleportation](quantum/quantum_teleportation.py) * [Ripple Adder Classic](quantum/ripple_adder_classic.py) * [Single Qubit Measure](quantum/single_qubit_measure.py) * [Superdense Coding](quantum/superdense_coding.py) ## Scheduling * [First Come First Served](scheduling/first_come_first_served.py) * [Highest Response Ratio Next](scheduling/highest_response_ratio_next.py) * [Job Sequencing With Deadline](scheduling/job_sequencing_with_deadline.py) * [Multi Level Feedback Queue](scheduling/multi_level_feedback_queue.py) * [Non Preemptive Shortest Job First](scheduling/non_preemptive_shortest_job_first.py) * [Round Robin](scheduling/round_robin.py) * [Shortest Job First](scheduling/shortest_job_first.py) ## Searches * [Binary Search](searches/binary_search.py) * [Binary Tree Traversal](searches/binary_tree_traversal.py) * [Double Linear Search](searches/double_linear_search.py) * [Double Linear Search Recursion](searches/double_linear_search_recursion.py) * [Fibonacci Search](searches/fibonacci_search.py) * [Hill Climbing](searches/hill_climbing.py) * [Interpolation Search](searches/interpolation_search.py) * [Jump Search](searches/jump_search.py) * [Linear Search](searches/linear_search.py) * [Quick Select](searches/quick_select.py) * [Sentinel Linear Search](searches/sentinel_linear_search.py) * [Simple Binary Search](searches/simple_binary_search.py) * [Simulated Annealing](searches/simulated_annealing.py) * [Tabu Search](searches/tabu_search.py) * [Ternary Search](searches/ternary_search.py) ## Sorts * [Bead Sort](sorts/bead_sort.py) * [Binary Insertion Sort](sorts/binary_insertion_sort.py) * [Bitonic Sort](sorts/bitonic_sort.py) * [Bogo Sort](sorts/bogo_sort.py) * [Bubble Sort](sorts/bubble_sort.py) * [Bucket Sort](sorts/bucket_sort.py) * [Circle Sort](sorts/circle_sort.py) * [Cocktail Shaker Sort](sorts/cocktail_shaker_sort.py) * [Comb Sort](sorts/comb_sort.py) * [Counting Sort](sorts/counting_sort.py) * [Cycle Sort](sorts/cycle_sort.py) * [Double Sort](sorts/double_sort.py) * [Dutch National Flag Sort](sorts/dutch_national_flag_sort.py) * [Exchange Sort](sorts/exchange_sort.py) * [External Sort](sorts/external_sort.py) * [Gnome Sort](sorts/gnome_sort.py) * [Heap Sort](sorts/heap_sort.py) * [Insertion Sort](sorts/insertion_sort.py) * [Intro Sort](sorts/intro_sort.py) * [Iterative Merge Sort](sorts/iterative_merge_sort.py) * [Merge Insertion Sort](sorts/merge_insertion_sort.py) * [Merge Sort](sorts/merge_sort.py) * [Msd Radix Sort](sorts/msd_radix_sort.py) * [Natural Sort](sorts/natural_sort.py) * [Odd Even Sort](sorts/odd_even_sort.py) * [Odd Even Transposition Parallel](sorts/odd_even_transposition_parallel.py) * [Odd Even Transposition Single Threaded](sorts/odd_even_transposition_single_threaded.py) * [Pancake Sort](sorts/pancake_sort.py) * [Patience Sort](sorts/patience_sort.py) * [Pigeon Sort](sorts/pigeon_sort.py) * [Pigeonhole Sort](sorts/pigeonhole_sort.py) * [Quick Sort](sorts/quick_sort.py) * [Quick Sort 3 Partition](sorts/quick_sort_3_partition.py) * [Radix Sort](sorts/radix_sort.py) * [Random Normal Distribution Quicksort](sorts/random_normal_distribution_quicksort.py) * [Random Pivot Quick Sort](sorts/random_pivot_quick_sort.py) * [Recursive Bubble Sort](sorts/recursive_bubble_sort.py) * [Recursive Insertion Sort](sorts/recursive_insertion_sort.py) * [Recursive Mergesort Array](sorts/recursive_mergesort_array.py) * [Recursive Quick Sort](sorts/recursive_quick_sort.py) * [Selection Sort](sorts/selection_sort.py) * [Shell Sort](sorts/shell_sort.py) * [Shrink Shell Sort](sorts/shrink_shell_sort.py) * [Slowsort](sorts/slowsort.py) * [Stooge Sort](sorts/stooge_sort.py) * [Strand Sort](sorts/strand_sort.py) * [Tim Sort](sorts/tim_sort.py) * [Topological Sort](sorts/topological_sort.py) * [Tree Sort](sorts/tree_sort.py) * [Unknown Sort](sorts/unknown_sort.py) * [Wiggle Sort](sorts/wiggle_sort.py) ## Strings * [Aho Corasick](strings/aho_corasick.py) * [Alternative String Arrange](strings/alternative_string_arrange.py) * [Anagrams](strings/anagrams.py) * [Autocomplete Using Trie](strings/autocomplete_using_trie.py) * [Barcode Validator](strings/barcode_validator.py) * [Boyer Moore Search](strings/boyer_moore_search.py) * [Can String Be Rearranged As Palindrome](strings/can_string_be_rearranged_as_palindrome.py) * [Capitalize](strings/capitalize.py) * [Check Anagrams](strings/check_anagrams.py) * [Credit Card Validator](strings/credit_card_validator.py) * [Detecting English Programmatically](strings/detecting_english_programmatically.py) * [Dna](strings/dna.py) * [Frequency Finder](strings/frequency_finder.py) * [Hamming Distance](strings/hamming_distance.py) * [Indian Phone Validator](strings/indian_phone_validator.py) * [Is Contains Unique Chars](strings/is_contains_unique_chars.py) * [Is Isogram](strings/is_isogram.py) * [Is Pangram](strings/is_pangram.py) * [Is Spain National Id](strings/is_spain_national_id.py) * [Is Srilankan Phone Number](strings/is_srilankan_phone_number.py) * [Is Valid Email Address](strings/is_valid_email_address.py) * [Jaro Winkler](strings/jaro_winkler.py) * [Join](strings/join.py) * [Knuth Morris Pratt](strings/knuth_morris_pratt.py) * [Levenshtein Distance](strings/levenshtein_distance.py) * [Lower](strings/lower.py) * [Manacher](strings/manacher.py) * [Min Cost String Conversion](strings/min_cost_string_conversion.py) * [Naive String Search](strings/naive_string_search.py) * [Ngram](strings/ngram.py) * [Palindrome](strings/palindrome.py) * [Prefix Function](strings/prefix_function.py) * [Rabin Karp](strings/rabin_karp.py) * [Remove Duplicate](strings/remove_duplicate.py) * [Reverse Letters](strings/reverse_letters.py) * [Reverse Long Words](strings/reverse_long_words.py) * [Reverse Words](strings/reverse_words.py) * [Snake Case To Camel Pascal Case](strings/snake_case_to_camel_pascal_case.py) * [Split](strings/split.py) * [String Switch Case](strings/string_switch_case.py) * [Text Justification](strings/text_justification.py) * [Top K Frequent Words](strings/top_k_frequent_words.py) * [Upper](strings/upper.py) * [Wave](strings/wave.py) * [Wildcard Pattern Matching](strings/wildcard_pattern_matching.py) * [Word Occurrence](strings/word_occurrence.py) * [Word Patterns](strings/word_patterns.py) * [Z Function](strings/z_function.py) ## Web Programming * [Co2 Emission](web_programming/co2_emission.py) * [Covid Stats Via Xpath](web_programming/covid_stats_via_xpath.py) * [Crawl Google Results](web_programming/crawl_google_results.py) * [Crawl Google Scholar Citation](web_programming/crawl_google_scholar_citation.py) * [Currency Converter](web_programming/currency_converter.py) * [Current Stock Price](web_programming/current_stock_price.py) * [Current Weather](web_programming/current_weather.py) * [Daily Horoscope](web_programming/daily_horoscope.py) * [Download Images From Google Query](web_programming/download_images_from_google_query.py) * [Emails From Url](web_programming/emails_from_url.py) * [Fetch Anime And Play](web_programming/fetch_anime_and_play.py) * [Fetch Bbc News](web_programming/fetch_bbc_news.py) * [Fetch Github Info](web_programming/fetch_github_info.py) * [Fetch Jobs](web_programming/fetch_jobs.py) * [Fetch Quotes](web_programming/fetch_quotes.py) * [Fetch Well Rx Price](web_programming/fetch_well_rx_price.py) * [Get Amazon Product Data](web_programming/get_amazon_product_data.py) * [Get Imdb Top 250 Movies Csv](web_programming/get_imdb_top_250_movies_csv.py) * [Get Imdbtop](web_programming/get_imdbtop.py) * [Get Top Billionaires](web_programming/get_top_billionaires.py) * [Get Top Hn Posts](web_programming/get_top_hn_posts.py) * [Get User Tweets](web_programming/get_user_tweets.py) * [Giphy](web_programming/giphy.py) * [Instagram Crawler](web_programming/instagram_crawler.py) * [Instagram Pic](web_programming/instagram_pic.py) * [Instagram Video](web_programming/instagram_video.py) * [Nasa Data](web_programming/nasa_data.py) * [Open Google Results](web_programming/open_google_results.py) * [Random Anime Character](web_programming/random_anime_character.py) * [Recaptcha Verification](web_programming/recaptcha_verification.py) * [Reddit](web_programming/reddit.py) * [Search Books By Isbn](web_programming/search_books_by_isbn.py) * [Slack Message](web_programming/slack_message.py) * [Test Fetch Github Info](web_programming/test_fetch_github_info.py) * [World Covid19 Stats](web_programming/world_covid19_stats.py)
## Arithmetic Analysis * [Bisection](arithmetic_analysis/bisection.py) * [Gaussian Elimination](arithmetic_analysis/gaussian_elimination.py) * [In Static Equilibrium](arithmetic_analysis/in_static_equilibrium.py) * [Intersection](arithmetic_analysis/intersection.py) * [Jacobi Iteration Method](arithmetic_analysis/jacobi_iteration_method.py) * [Lu Decomposition](arithmetic_analysis/lu_decomposition.py) * [Newton Forward Interpolation](arithmetic_analysis/newton_forward_interpolation.py) * [Newton Method](arithmetic_analysis/newton_method.py) * [Newton Raphson](arithmetic_analysis/newton_raphson.py) * [Newton Raphson New](arithmetic_analysis/newton_raphson_new.py) * [Secant Method](arithmetic_analysis/secant_method.py) ## Audio Filters * [Butterworth Filter](audio_filters/butterworth_filter.py) * [Iir Filter](audio_filters/iir_filter.py) * [Show Response](audio_filters/show_response.py) ## Backtracking * [All Combinations](backtracking/all_combinations.py) * [All Permutations](backtracking/all_permutations.py) * [All Subsequences](backtracking/all_subsequences.py) * [Coloring](backtracking/coloring.py) * [Combination Sum](backtracking/combination_sum.py) * [Hamiltonian Cycle](backtracking/hamiltonian_cycle.py) * [Knight Tour](backtracking/knight_tour.py) * [Minimax](backtracking/minimax.py) * [Minmax](backtracking/minmax.py) * [N Queens](backtracking/n_queens.py) * [N Queens Math](backtracking/n_queens_math.py) * [Power Sum](backtracking/power_sum.py) * [Rat In Maze](backtracking/rat_in_maze.py) * [Sudoku](backtracking/sudoku.py) * [Sum Of Subsets](backtracking/sum_of_subsets.py) * [Word Search](backtracking/word_search.py) ## Bit Manipulation * [Binary And Operator](bit_manipulation/binary_and_operator.py) * [Binary Count Setbits](bit_manipulation/binary_count_setbits.py) * [Binary Count Trailing Zeros](bit_manipulation/binary_count_trailing_zeros.py) * [Binary Or Operator](bit_manipulation/binary_or_operator.py) * [Binary Shifts](bit_manipulation/binary_shifts.py) * [Binary Twos Complement](bit_manipulation/binary_twos_complement.py) * [Binary Xor Operator](bit_manipulation/binary_xor_operator.py) * [Count 1S Brian Kernighan Method](bit_manipulation/count_1s_brian_kernighan_method.py) * [Count Number Of One Bits](bit_manipulation/count_number_of_one_bits.py) * [Gray Code Sequence](bit_manipulation/gray_code_sequence.py) * [Highest Set Bit](bit_manipulation/highest_set_bit.py) * [Index Of Rightmost Set Bit](bit_manipulation/index_of_rightmost_set_bit.py) * [Is Even](bit_manipulation/is_even.py) * [Is Power Of Two](bit_manipulation/is_power_of_two.py) * [Numbers Different Signs](bit_manipulation/numbers_different_signs.py) * [Reverse Bits](bit_manipulation/reverse_bits.py) * [Single Bit Manipulation Operations](bit_manipulation/single_bit_manipulation_operations.py) ## Blockchain * [Chinese Remainder Theorem](blockchain/chinese_remainder_theorem.py) * [Diophantine Equation](blockchain/diophantine_equation.py) * [Modular Division](blockchain/modular_division.py) ## Boolean Algebra * [And Gate](boolean_algebra/and_gate.py) * [Nand Gate](boolean_algebra/nand_gate.py) * [Nor Gate](boolean_algebra/nor_gate.py) * [Not Gate](boolean_algebra/not_gate.py) * [Or Gate](boolean_algebra/or_gate.py) * [Quine Mc Cluskey](boolean_algebra/quine_mc_cluskey.py) * [Xnor Gate](boolean_algebra/xnor_gate.py) * [Xor Gate](boolean_algebra/xor_gate.py) ## Cellular Automata * [Conways Game Of Life](cellular_automata/conways_game_of_life.py) * [Game Of Life](cellular_automata/game_of_life.py) * [Langtons Ant](cellular_automata/langtons_ant.py) * [Nagel Schrekenberg](cellular_automata/nagel_schrekenberg.py) * [One Dimensional](cellular_automata/one_dimensional.py) * [Wa Tor](cellular_automata/wa_tor.py) ## Ciphers * [A1Z26](ciphers/a1z26.py) * [Affine Cipher](ciphers/affine_cipher.py) * [Atbash](ciphers/atbash.py) * [Autokey](ciphers/autokey.py) * [Baconian Cipher](ciphers/baconian_cipher.py) * [Base16](ciphers/base16.py) * [Base32](ciphers/base32.py) * [Base64](ciphers/base64.py) * [Base85](ciphers/base85.py) * [Beaufort Cipher](ciphers/beaufort_cipher.py) * [Bifid](ciphers/bifid.py) * [Brute Force Caesar Cipher](ciphers/brute_force_caesar_cipher.py) * [Caesar Cipher](ciphers/caesar_cipher.py) * [Cryptomath Module](ciphers/cryptomath_module.py) * [Decrypt Caesar With Chi Squared](ciphers/decrypt_caesar_with_chi_squared.py) * [Deterministic Miller Rabin](ciphers/deterministic_miller_rabin.py) * [Diffie](ciphers/diffie.py) * [Diffie Hellman](ciphers/diffie_hellman.py) * [Elgamal Key Generator](ciphers/elgamal_key_generator.py) * [Enigma Machine2](ciphers/enigma_machine2.py) * [Hill Cipher](ciphers/hill_cipher.py) * [Mixed Keyword Cypher](ciphers/mixed_keyword_cypher.py) * [Mono Alphabetic Ciphers](ciphers/mono_alphabetic_ciphers.py) * [Morse Code](ciphers/morse_code.py) * [Onepad Cipher](ciphers/onepad_cipher.py) * [Playfair Cipher](ciphers/playfair_cipher.py) * [Polybius](ciphers/polybius.py) * [Porta Cipher](ciphers/porta_cipher.py) * [Rabin Miller](ciphers/rabin_miller.py) * [Rail Fence Cipher](ciphers/rail_fence_cipher.py) * [Rot13](ciphers/rot13.py) * [Rsa Cipher](ciphers/rsa_cipher.py) * [Rsa Factorization](ciphers/rsa_factorization.py) * [Rsa Key Generator](ciphers/rsa_key_generator.py) * [Shuffled Shift Cipher](ciphers/shuffled_shift_cipher.py) * [Simple Keyword Cypher](ciphers/simple_keyword_cypher.py) * [Simple Substitution Cipher](ciphers/simple_substitution_cipher.py) * [Trafid Cipher](ciphers/trafid_cipher.py) * [Transposition Cipher](ciphers/transposition_cipher.py) * [Transposition Cipher Encrypt Decrypt File](ciphers/transposition_cipher_encrypt_decrypt_file.py) * [Vigenere Cipher](ciphers/vigenere_cipher.py) * [Xor Cipher](ciphers/xor_cipher.py) ## Compression * [Burrows Wheeler](compression/burrows_wheeler.py) * [Huffman](compression/huffman.py) * [Lempel Ziv](compression/lempel_ziv.py) * [Lempel Ziv Decompress](compression/lempel_ziv_decompress.py) * [Lz77](compression/lz77.py) * [Peak Signal To Noise Ratio](compression/peak_signal_to_noise_ratio.py) * [Run Length Encoding](compression/run_length_encoding.py) ## Computer Vision * [Cnn Classification](computer_vision/cnn_classification.py) * [Flip Augmentation](computer_vision/flip_augmentation.py) * [Haralick Descriptors](computer_vision/haralick_descriptors.py) * [Harris Corner](computer_vision/harris_corner.py) * [Horn Schunck](computer_vision/horn_schunck.py) * [Mean Threshold](computer_vision/mean_threshold.py) * [Mosaic Augmentation](computer_vision/mosaic_augmentation.py) * [Pooling Functions](computer_vision/pooling_functions.py) ## Conversions * [Astronomical Length Scale Conversion](conversions/astronomical_length_scale_conversion.py) * [Binary To Decimal](conversions/binary_to_decimal.py) * [Binary To Hexadecimal](conversions/binary_to_hexadecimal.py) * [Binary To Octal](conversions/binary_to_octal.py) * [Convert Number To Words](conversions/convert_number_to_words.py) * [Decimal To Any](conversions/decimal_to_any.py) * [Decimal To Binary](conversions/decimal_to_binary.py) * [Decimal To Hexadecimal](conversions/decimal_to_hexadecimal.py) * [Decimal To Octal](conversions/decimal_to_octal.py) * [Energy Conversions](conversions/energy_conversions.py) * [Excel Title To Column](conversions/excel_title_to_column.py) * [Hex To Bin](conversions/hex_to_bin.py) * [Hexadecimal To Decimal](conversions/hexadecimal_to_decimal.py) * [Length Conversion](conversions/length_conversion.py) * [Molecular Chemistry](conversions/molecular_chemistry.py) * [Octal To Binary](conversions/octal_to_binary.py) * [Octal To Decimal](conversions/octal_to_decimal.py) * [Prefix Conversions](conversions/prefix_conversions.py) * [Prefix Conversions String](conversions/prefix_conversions_string.py) * [Pressure Conversions](conversions/pressure_conversions.py) * [Rgb Hsv Conversion](conversions/rgb_hsv_conversion.py) * [Roman Numerals](conversions/roman_numerals.py) * [Speed Conversions](conversions/speed_conversions.py) * [Temperature Conversions](conversions/temperature_conversions.py) * [Volume Conversions](conversions/volume_conversions.py) * [Weight Conversion](conversions/weight_conversion.py) ## Data Structures * Arrays * [Permutations](data_structures/arrays/permutations.py) * [Prefix Sum](data_structures/arrays/prefix_sum.py) * [Product Sum](data_structures/arrays/product_sum.py) * Binary Tree * [Avl Tree](data_structures/binary_tree/avl_tree.py) * [Basic Binary Tree](data_structures/binary_tree/basic_binary_tree.py) * [Binary Search Tree](data_structures/binary_tree/binary_search_tree.py) * [Binary Search Tree Recursive](data_structures/binary_tree/binary_search_tree_recursive.py) * [Binary Tree Mirror](data_structures/binary_tree/binary_tree_mirror.py) * [Binary Tree Node Sum](data_structures/binary_tree/binary_tree_node_sum.py) * [Binary Tree Path Sum](data_structures/binary_tree/binary_tree_path_sum.py) * [Binary Tree Traversals](data_structures/binary_tree/binary_tree_traversals.py) * [Diff Views Of Binary Tree](data_structures/binary_tree/diff_views_of_binary_tree.py) * [Distribute Coins](data_structures/binary_tree/distribute_coins.py) * [Fenwick Tree](data_structures/binary_tree/fenwick_tree.py) * [Inorder Tree Traversal 2022](data_structures/binary_tree/inorder_tree_traversal_2022.py) * [Is Bst](data_structures/binary_tree/is_bst.py) * [Lazy Segment Tree](data_structures/binary_tree/lazy_segment_tree.py) * [Lowest Common Ancestor](data_structures/binary_tree/lowest_common_ancestor.py) * [Maximum Fenwick Tree](data_structures/binary_tree/maximum_fenwick_tree.py) * [Merge Two Binary Trees](data_structures/binary_tree/merge_two_binary_trees.py) * [Non Recursive Segment Tree](data_structures/binary_tree/non_recursive_segment_tree.py) * [Number Of Possible Binary Trees](data_structures/binary_tree/number_of_possible_binary_trees.py) * [Red Black Tree](data_structures/binary_tree/red_black_tree.py) * [Segment Tree](data_structures/binary_tree/segment_tree.py) * [Segment Tree Other](data_structures/binary_tree/segment_tree_other.py) * [Treap](data_structures/binary_tree/treap.py) * [Wavelet Tree](data_structures/binary_tree/wavelet_tree.py) * Disjoint Set * [Alternate Disjoint Set](data_structures/disjoint_set/alternate_disjoint_set.py) * [Disjoint Set](data_structures/disjoint_set/disjoint_set.py) * Hashing * [Bloom Filter](data_structures/hashing/bloom_filter.py) * [Double Hash](data_structures/hashing/double_hash.py) * [Hash Map](data_structures/hashing/hash_map.py) * [Hash Table](data_structures/hashing/hash_table.py) * [Hash Table With Linked List](data_structures/hashing/hash_table_with_linked_list.py) * Number Theory * [Prime Numbers](data_structures/hashing/number_theory/prime_numbers.py) * [Quadratic Probing](data_structures/hashing/quadratic_probing.py) * Tests * [Test Hash Map](data_structures/hashing/tests/test_hash_map.py) * Heap * [Binomial Heap](data_structures/heap/binomial_heap.py) * [Heap](data_structures/heap/heap.py) * [Heap Generic](data_structures/heap/heap_generic.py) * [Max Heap](data_structures/heap/max_heap.py) * [Min Heap](data_structures/heap/min_heap.py) * [Randomized Heap](data_structures/heap/randomized_heap.py) * [Skew Heap](data_structures/heap/skew_heap.py) * Linked List * [Circular Linked List](data_structures/linked_list/circular_linked_list.py) * [Deque Doubly](data_structures/linked_list/deque_doubly.py) * [Doubly Linked List](data_structures/linked_list/doubly_linked_list.py) * [Doubly Linked List Two](data_structures/linked_list/doubly_linked_list_two.py) * [From Sequence](data_structures/linked_list/from_sequence.py) * [Has Loop](data_structures/linked_list/has_loop.py) * [Is Palindrome](data_structures/linked_list/is_palindrome.py) * [Merge Two Lists](data_structures/linked_list/merge_two_lists.py) * [Middle Element Of Linked List](data_structures/linked_list/middle_element_of_linked_list.py) * [Print Reverse](data_structures/linked_list/print_reverse.py) * [Singly Linked List](data_structures/linked_list/singly_linked_list.py) * [Skip List](data_structures/linked_list/skip_list.py) * [Swap Nodes](data_structures/linked_list/swap_nodes.py) * Queue * [Circular Queue](data_structures/queue/circular_queue.py) * [Circular Queue Linked List](data_structures/queue/circular_queue_linked_list.py) * [Double Ended Queue](data_structures/queue/double_ended_queue.py) * [Linked Queue](data_structures/queue/linked_queue.py) * [Priority Queue Using List](data_structures/queue/priority_queue_using_list.py) * [Queue By List](data_structures/queue/queue_by_list.py) * [Queue By Two Stacks](data_structures/queue/queue_by_two_stacks.py) * [Queue On Pseudo Stack](data_structures/queue/queue_on_pseudo_stack.py) * Stacks * [Balanced Parentheses](data_structures/stacks/balanced_parentheses.py) * [Dijkstras Two Stack Algorithm](data_structures/stacks/dijkstras_two_stack_algorithm.py) * [Infix To Postfix Conversion](data_structures/stacks/infix_to_postfix_conversion.py) * [Infix To Prefix Conversion](data_structures/stacks/infix_to_prefix_conversion.py) * [Next Greater Element](data_structures/stacks/next_greater_element.py) * [Postfix Evaluation](data_structures/stacks/postfix_evaluation.py) * [Prefix Evaluation](data_structures/stacks/prefix_evaluation.py) * [Stack](data_structures/stacks/stack.py) * [Stack With Doubly Linked List](data_structures/stacks/stack_with_doubly_linked_list.py) * [Stack With Singly Linked List](data_structures/stacks/stack_with_singly_linked_list.py) * [Stock Span Problem](data_structures/stacks/stock_span_problem.py) * Trie * [Radix Tree](data_structures/trie/radix_tree.py) * [Trie](data_structures/trie/trie.py) ## Digital Image Processing * [Change Brightness](digital_image_processing/change_brightness.py) * [Change Contrast](digital_image_processing/change_contrast.py) * [Convert To Negative](digital_image_processing/convert_to_negative.py) * Dithering * [Burkes](digital_image_processing/dithering/burkes.py) * Edge Detection * [Canny](digital_image_processing/edge_detection/canny.py) * Filters * [Bilateral Filter](digital_image_processing/filters/bilateral_filter.py) * [Convolve](digital_image_processing/filters/convolve.py) * [Gabor Filter](digital_image_processing/filters/gabor_filter.py) * [Gaussian Filter](digital_image_processing/filters/gaussian_filter.py) * [Local Binary Pattern](digital_image_processing/filters/local_binary_pattern.py) * [Median Filter](digital_image_processing/filters/median_filter.py) * [Sobel Filter](digital_image_processing/filters/sobel_filter.py) * Histogram Equalization * [Histogram Stretch](digital_image_processing/histogram_equalization/histogram_stretch.py) * [Index Calculation](digital_image_processing/index_calculation.py) * Morphological Operations * [Dilation Operation](digital_image_processing/morphological_operations/dilation_operation.py) * [Erosion Operation](digital_image_processing/morphological_operations/erosion_operation.py) * Resize * [Resize](digital_image_processing/resize/resize.py) * Rotation * [Rotation](digital_image_processing/rotation/rotation.py) * [Sepia](digital_image_processing/sepia.py) * [Test Digital Image Processing](digital_image_processing/test_digital_image_processing.py) ## Divide And Conquer * [Closest Pair Of Points](divide_and_conquer/closest_pair_of_points.py) * [Convex Hull](divide_and_conquer/convex_hull.py) * [Heaps Algorithm](divide_and_conquer/heaps_algorithm.py) * [Heaps Algorithm Iterative](divide_and_conquer/heaps_algorithm_iterative.py) * [Inversions](divide_and_conquer/inversions.py) * [Kth Order Statistic](divide_and_conquer/kth_order_statistic.py) * [Max Difference Pair](divide_and_conquer/max_difference_pair.py) * [Max Subarray](divide_and_conquer/max_subarray.py) * [Mergesort](divide_and_conquer/mergesort.py) * [Peak](divide_and_conquer/peak.py) * [Power](divide_and_conquer/power.py) * [Strassen Matrix Multiplication](divide_and_conquer/strassen_matrix_multiplication.py) ## Dynamic Programming * [Abbreviation](dynamic_programming/abbreviation.py) * [All Construct](dynamic_programming/all_construct.py) * [Bitmask](dynamic_programming/bitmask.py) * [Catalan Numbers](dynamic_programming/catalan_numbers.py) * [Climbing Stairs](dynamic_programming/climbing_stairs.py) * [Combination Sum Iv](dynamic_programming/combination_sum_iv.py) * [Edit Distance](dynamic_programming/edit_distance.py) * [Factorial](dynamic_programming/factorial.py) * [Fast Fibonacci](dynamic_programming/fast_fibonacci.py) * [Fibonacci](dynamic_programming/fibonacci.py) * [Fizz Buzz](dynamic_programming/fizz_buzz.py) * [Floyd Warshall](dynamic_programming/floyd_warshall.py) * [Integer Partition](dynamic_programming/integer_partition.py) * [Iterating Through Submasks](dynamic_programming/iterating_through_submasks.py) * [K Means Clustering Tensorflow](dynamic_programming/k_means_clustering_tensorflow.py) * [Knapsack](dynamic_programming/knapsack.py) * [Longest Common Subsequence](dynamic_programming/longest_common_subsequence.py) * [Longest Common Substring](dynamic_programming/longest_common_substring.py) * [Longest Increasing Subsequence](dynamic_programming/longest_increasing_subsequence.py) * [Longest Increasing Subsequence O(Nlogn)](dynamic_programming/longest_increasing_subsequence_o(nlogn).py) * [Longest Sub Array](dynamic_programming/longest_sub_array.py) * [Matrix Chain Order](dynamic_programming/matrix_chain_order.py) * [Max Non Adjacent Sum](dynamic_programming/max_non_adjacent_sum.py) * [Max Product Subarray](dynamic_programming/max_product_subarray.py) * [Max Subarray Sum](dynamic_programming/max_subarray_sum.py) * [Min Distance Up Bottom](dynamic_programming/min_distance_up_bottom.py) * [Minimum Coin Change](dynamic_programming/minimum_coin_change.py) * [Minimum Cost Path](dynamic_programming/minimum_cost_path.py) * [Minimum Partition](dynamic_programming/minimum_partition.py) * [Minimum Size Subarray Sum](dynamic_programming/minimum_size_subarray_sum.py) * [Minimum Squares To Represent A Number](dynamic_programming/minimum_squares_to_represent_a_number.py) * [Minimum Steps To One](dynamic_programming/minimum_steps_to_one.py) * [Minimum Tickets Cost](dynamic_programming/minimum_tickets_cost.py) * [Optimal Binary Search Tree](dynamic_programming/optimal_binary_search_tree.py) * [Palindrome Partitioning](dynamic_programming/palindrome_partitioning.py) * [Regex Match](dynamic_programming/regex_match.py) * [Rod Cutting](dynamic_programming/rod_cutting.py) * [Subset Generation](dynamic_programming/subset_generation.py) * [Sum Of Subset](dynamic_programming/sum_of_subset.py) * [Tribonacci](dynamic_programming/tribonacci.py) * [Viterbi](dynamic_programming/viterbi.py) * [Word Break](dynamic_programming/word_break.py) ## Electronics * [Apparent Power](electronics/apparent_power.py) * [Builtin Voltage](electronics/builtin_voltage.py) * [Carrier Concentration](electronics/carrier_concentration.py) * [Circular Convolution](electronics/circular_convolution.py) * [Coulombs Law](electronics/coulombs_law.py) * [Electric Conductivity](electronics/electric_conductivity.py) * [Electric Power](electronics/electric_power.py) * [Electrical Impedance](electronics/electrical_impedance.py) * [Ind Reactance](electronics/ind_reactance.py) * [Ohms Law](electronics/ohms_law.py) * [Real And Reactive Power](electronics/real_and_reactive_power.py) * [Resistor Equivalence](electronics/resistor_equivalence.py) * [Resonant Frequency](electronics/resonant_frequency.py) ## File Transfer * [Receive File](file_transfer/receive_file.py) * [Send File](file_transfer/send_file.py) * Tests * [Test Send File](file_transfer/tests/test_send_file.py) ## Financial * [Equated Monthly Installments](financial/equated_monthly_installments.py) * [Interest](financial/interest.py) * [Present Value](financial/present_value.py) * [Price Plus Tax](financial/price_plus_tax.py) ## Fractals * [Julia Sets](fractals/julia_sets.py) * [Koch Snowflake](fractals/koch_snowflake.py) * [Mandelbrot](fractals/mandelbrot.py) * [Sierpinski Triangle](fractals/sierpinski_triangle.py) ## Fuzzy Logic * [Fuzzy Operations](fuzzy_logic/fuzzy_operations.py) ## Genetic Algorithm * [Basic String](genetic_algorithm/basic_string.py) ## Geodesy * [Haversine Distance](geodesy/haversine_distance.py) * [Lamberts Ellipsoidal Distance](geodesy/lamberts_ellipsoidal_distance.py) ## Graphics * [Bezier Curve](graphics/bezier_curve.py) * [Vector3 For 2D Rendering](graphics/vector3_for_2d_rendering.py) ## Graphs * [A Star](graphs/a_star.py) * [Articulation Points](graphs/articulation_points.py) * [Basic Graphs](graphs/basic_graphs.py) * [Bellman Ford](graphs/bellman_ford.py) * [Bi Directional Dijkstra](graphs/bi_directional_dijkstra.py) * [Bidirectional A Star](graphs/bidirectional_a_star.py) * [Bidirectional Breadth First Search](graphs/bidirectional_breadth_first_search.py) * [Boruvka](graphs/boruvka.py) * [Breadth First Search](graphs/breadth_first_search.py) * [Breadth First Search 2](graphs/breadth_first_search_2.py) * [Breadth First Search Shortest Path](graphs/breadth_first_search_shortest_path.py) * [Breadth First Search Shortest Path 2](graphs/breadth_first_search_shortest_path_2.py) * [Breadth First Search Zero One Shortest Path](graphs/breadth_first_search_zero_one_shortest_path.py) * [Check Bipartite Graph Bfs](graphs/check_bipartite_graph_bfs.py) * [Check Bipartite Graph Dfs](graphs/check_bipartite_graph_dfs.py) * [Check Cycle](graphs/check_cycle.py) * [Connected Components](graphs/connected_components.py) * [Depth First Search](graphs/depth_first_search.py) * [Depth First Search 2](graphs/depth_first_search_2.py) * [Dijkstra](graphs/dijkstra.py) * [Dijkstra 2](graphs/dijkstra_2.py) * [Dijkstra Algorithm](graphs/dijkstra_algorithm.py) * [Dijkstra Alternate](graphs/dijkstra_alternate.py) * [Dijkstra Binary Grid](graphs/dijkstra_binary_grid.py) * [Dinic](graphs/dinic.py) * [Directed And Undirected (Weighted) Graph](graphs/directed_and_undirected_(weighted)_graph.py) * [Edmonds Karp Multiple Source And Sink](graphs/edmonds_karp_multiple_source_and_sink.py) * [Eulerian Path And Circuit For Undirected Graph](graphs/eulerian_path_and_circuit_for_undirected_graph.py) * [Even Tree](graphs/even_tree.py) * [Finding Bridges](graphs/finding_bridges.py) * [Frequent Pattern Graph Miner](graphs/frequent_pattern_graph_miner.py) * [G Topological Sort](graphs/g_topological_sort.py) * [Gale Shapley Bigraph](graphs/gale_shapley_bigraph.py) * [Graph Adjacency List](graphs/graph_adjacency_list.py) * [Graph Adjacency Matrix](graphs/graph_adjacency_matrix.py) * [Graph List](graphs/graph_list.py) * [Graphs Floyd Warshall](graphs/graphs_floyd_warshall.py) * [Greedy Best First](graphs/greedy_best_first.py) * [Greedy Min Vertex Cover](graphs/greedy_min_vertex_cover.py) * [Kahns Algorithm Long](graphs/kahns_algorithm_long.py) * [Kahns Algorithm Topo](graphs/kahns_algorithm_topo.py) * [Karger](graphs/karger.py) * [Markov Chain](graphs/markov_chain.py) * [Matching Min Vertex Cover](graphs/matching_min_vertex_cover.py) * [Minimum Path Sum](graphs/minimum_path_sum.py) * [Minimum Spanning Tree Boruvka](graphs/minimum_spanning_tree_boruvka.py) * [Minimum Spanning Tree Kruskal](graphs/minimum_spanning_tree_kruskal.py) * [Minimum Spanning Tree Kruskal2](graphs/minimum_spanning_tree_kruskal2.py) * [Minimum Spanning Tree Prims](graphs/minimum_spanning_tree_prims.py) * [Minimum Spanning Tree Prims2](graphs/minimum_spanning_tree_prims2.py) * [Multi Heuristic Astar](graphs/multi_heuristic_astar.py) * [Page Rank](graphs/page_rank.py) * [Prim](graphs/prim.py) * [Random Graph Generator](graphs/random_graph_generator.py) * [Scc Kosaraju](graphs/scc_kosaraju.py) * [Strongly Connected Components](graphs/strongly_connected_components.py) * [Tarjans Scc](graphs/tarjans_scc.py) * Tests * [Test Min Spanning Tree Kruskal](graphs/tests/test_min_spanning_tree_kruskal.py) * [Test Min Spanning Tree Prim](graphs/tests/test_min_spanning_tree_prim.py) ## Greedy Methods * [Fractional Knapsack](greedy_methods/fractional_knapsack.py) * [Fractional Knapsack 2](greedy_methods/fractional_knapsack_2.py) * [Minimum Waiting Time](greedy_methods/minimum_waiting_time.py) * [Optimal Merge Pattern](greedy_methods/optimal_merge_pattern.py) ## Hashes * [Adler32](hashes/adler32.py) * [Chaos Machine](hashes/chaos_machine.py) * [Djb2](hashes/djb2.py) * [Elf](hashes/elf.py) * [Enigma Machine](hashes/enigma_machine.py) * [Hamming Code](hashes/hamming_code.py) * [Luhn](hashes/luhn.py) * [Md5](hashes/md5.py) * [Sdbm](hashes/sdbm.py) * [Sha1](hashes/sha1.py) * [Sha256](hashes/sha256.py) ## Knapsack * [Greedy Knapsack](knapsack/greedy_knapsack.py) * [Knapsack](knapsack/knapsack.py) * [Recursive Approach Knapsack](knapsack/recursive_approach_knapsack.py) * Tests * [Test Greedy Knapsack](knapsack/tests/test_greedy_knapsack.py) * [Test Knapsack](knapsack/tests/test_knapsack.py) ## Linear Algebra * Src * [Conjugate Gradient](linear_algebra/src/conjugate_gradient.py) * [Lib](linear_algebra/src/lib.py) * [Polynom For Points](linear_algebra/src/polynom_for_points.py) * [Power Iteration](linear_algebra/src/power_iteration.py) * [Rank Of Matrix](linear_algebra/src/rank_of_matrix.py) * [Rayleigh Quotient](linear_algebra/src/rayleigh_quotient.py) * [Schur Complement](linear_algebra/src/schur_complement.py) * [Test Linear Algebra](linear_algebra/src/test_linear_algebra.py) * [Transformations 2D](linear_algebra/src/transformations_2d.py) ## Linear Programming * [Simplex](linear_programming/simplex.py) ## Machine Learning * [Astar](machine_learning/astar.py) * [Data Transformations](machine_learning/data_transformations.py) * [Decision Tree](machine_learning/decision_tree.py) * [Dimensionality Reduction](machine_learning/dimensionality_reduction.py) * Forecasting * [Run](machine_learning/forecasting/run.py) * [Gradient Descent](machine_learning/gradient_descent.py) * [K Means Clust](machine_learning/k_means_clust.py) * [K Nearest Neighbours](machine_learning/k_nearest_neighbours.py) * [Knn Sklearn](machine_learning/knn_sklearn.py) * [Linear Discriminant Analysis](machine_learning/linear_discriminant_analysis.py) * [Linear Regression](machine_learning/linear_regression.py) * Local Weighted Learning * [Local Weighted Learning](machine_learning/local_weighted_learning/local_weighted_learning.py) * [Logistic Regression](machine_learning/logistic_regression.py) * Lstm * [Lstm Prediction](machine_learning/lstm/lstm_prediction.py) * [Multilayer Perceptron Classifier](machine_learning/multilayer_perceptron_classifier.py) * [Polynomial Regression](machine_learning/polynomial_regression.py) * [Scoring Functions](machine_learning/scoring_functions.py) * [Self Organizing Map](machine_learning/self_organizing_map.py) * [Sequential Minimum Optimization](machine_learning/sequential_minimum_optimization.py) * [Similarity Search](machine_learning/similarity_search.py) * [Support Vector Machines](machine_learning/support_vector_machines.py) * [Word Frequency Functions](machine_learning/word_frequency_functions.py) * [Xgboost Classifier](machine_learning/xgboost_classifier.py) * [Xgboost Regressor](machine_learning/xgboost_regressor.py) ## Maths * [Abs](maths/abs.py) * [Add](maths/add.py) * [Addition Without Arithmetic](maths/addition_without_arithmetic.py) * [Aliquot Sum](maths/aliquot_sum.py) * [Allocation Number](maths/allocation_number.py) * [Arc Length](maths/arc_length.py) * [Area](maths/area.py) * [Area Under Curve](maths/area_under_curve.py) * [Armstrong Numbers](maths/armstrong_numbers.py) * [Automorphic Number](maths/automorphic_number.py) * [Average Absolute Deviation](maths/average_absolute_deviation.py) * [Average Mean](maths/average_mean.py) * [Average Median](maths/average_median.py) * [Average Mode](maths/average_mode.py) * [Bailey Borwein Plouffe](maths/bailey_borwein_plouffe.py) * [Basic Maths](maths/basic_maths.py) * [Binary Exp Mod](maths/binary_exp_mod.py) * [Binary Exponentiation](maths/binary_exponentiation.py) * [Binary Exponentiation 2](maths/binary_exponentiation_2.py) * [Binary Exponentiation 3](maths/binary_exponentiation_3.py) * [Binomial Coefficient](maths/binomial_coefficient.py) * [Binomial Distribution](maths/binomial_distribution.py) * [Bisection](maths/bisection.py) * [Carmichael Number](maths/carmichael_number.py) * [Catalan Number](maths/catalan_number.py) * [Ceil](maths/ceil.py) * [Check Polygon](maths/check_polygon.py) * [Chudnovsky Algorithm](maths/chudnovsky_algorithm.py) * [Collatz Sequence](maths/collatz_sequence.py) * [Combinations](maths/combinations.py) * [Continued Fraction](maths/continued_fraction.py) * [Decimal Isolate](maths/decimal_isolate.py) * [Decimal To Fraction](maths/decimal_to_fraction.py) * [Dodecahedron](maths/dodecahedron.py) * [Double Factorial Iterative](maths/double_factorial_iterative.py) * [Double Factorial Recursive](maths/double_factorial_recursive.py) * [Dual Number Automatic Differentiation](maths/dual_number_automatic_differentiation.py) * [Entropy](maths/entropy.py) * [Euclidean Distance](maths/euclidean_distance.py) * [Euclidean Gcd](maths/euclidean_gcd.py) * [Euler Method](maths/euler_method.py) * [Euler Modified](maths/euler_modified.py) * [Eulers Totient](maths/eulers_totient.py) * [Extended Euclidean Algorithm](maths/extended_euclidean_algorithm.py) * [Factorial](maths/factorial.py) * [Factors](maths/factors.py) * [Fermat Little Theorem](maths/fermat_little_theorem.py) * [Fibonacci](maths/fibonacci.py) * [Find Max](maths/find_max.py) * [Find Min](maths/find_min.py) * [Floor](maths/floor.py) * [Gamma](maths/gamma.py) * [Gamma Recursive](maths/gamma_recursive.py) * [Gaussian](maths/gaussian.py) * [Gaussian Error Linear Unit](maths/gaussian_error_linear_unit.py) * [Gcd Of N Numbers](maths/gcd_of_n_numbers.py) * [Greatest Common Divisor](maths/greatest_common_divisor.py) * [Greedy Coin Change](maths/greedy_coin_change.py) * [Hamming Numbers](maths/hamming_numbers.py) * [Hardy Ramanujanalgo](maths/hardy_ramanujanalgo.py) * [Harshad Numbers](maths/harshad_numbers.py) * [Hexagonal Number](maths/hexagonal_number.py) * [Integration By Simpson Approx](maths/integration_by_simpson_approx.py) * [Interquartile Range](maths/interquartile_range.py) * [Is Int Palindrome](maths/is_int_palindrome.py) * [Is Ip V4 Address Valid](maths/is_ip_v4_address_valid.py) * [Is Square Free](maths/is_square_free.py) * [Jaccard Similarity](maths/jaccard_similarity.py) * [Juggler Sequence](maths/juggler_sequence.py) * [Karatsuba](maths/karatsuba.py) * [Krishnamurthy Number](maths/krishnamurthy_number.py) * [Kth Lexicographic Permutation](maths/kth_lexicographic_permutation.py) * [Largest Of Very Large Numbers](maths/largest_of_very_large_numbers.py) * [Least Common Multiple](maths/least_common_multiple.py) * [Line Length](maths/line_length.py) * [Liouville Lambda](maths/liouville_lambda.py) * [Lucas Lehmer Primality Test](maths/lucas_lehmer_primality_test.py) * [Lucas Series](maths/lucas_series.py) * [Maclaurin Series](maths/maclaurin_series.py) * [Manhattan Distance](maths/manhattan_distance.py) * [Matrix Exponentiation](maths/matrix_exponentiation.py) * [Max Sum Sliding Window](maths/max_sum_sliding_window.py) * [Median Of Two Arrays](maths/median_of_two_arrays.py) * [Miller Rabin](maths/miller_rabin.py) * [Mobius Function](maths/mobius_function.py) * [Modular Exponential](maths/modular_exponential.py) * [Monte Carlo](maths/monte_carlo.py) * [Monte Carlo Dice](maths/monte_carlo_dice.py) * [Nevilles Method](maths/nevilles_method.py) * [Newton Raphson](maths/newton_raphson.py) * [Number Of Digits](maths/number_of_digits.py) * [Numerical Integration](maths/numerical_integration.py) * [Odd Sieve](maths/odd_sieve.py) * [Perfect Cube](maths/perfect_cube.py) * [Perfect Number](maths/perfect_number.py) * [Perfect Square](maths/perfect_square.py) * [Persistence](maths/persistence.py) * [Pi Generator](maths/pi_generator.py) * [Pi Monte Carlo Estimation](maths/pi_monte_carlo_estimation.py) * [Points Are Collinear 3D](maths/points_are_collinear_3d.py) * [Pollard Rho](maths/pollard_rho.py) * [Polygonal Numbers](maths/polygonal_numbers.py) * [Polynomial Evaluation](maths/polynomial_evaluation.py) * Polynomials * [Single Indeterminate Operations](maths/polynomials/single_indeterminate_operations.py) * [Power Using Recursion](maths/power_using_recursion.py) * [Prime Check](maths/prime_check.py) * [Prime Factors](maths/prime_factors.py) * [Prime Numbers](maths/prime_numbers.py) * [Prime Sieve Eratosthenes](maths/prime_sieve_eratosthenes.py) * [Primelib](maths/primelib.py) * [Print Multiplication Table](maths/print_multiplication_table.py) * [Pronic Number](maths/pronic_number.py) * [Proth Number](maths/proth_number.py) * [Pythagoras](maths/pythagoras.py) * [Qr Decomposition](maths/qr_decomposition.py) * [Quadratic Equations Complex Numbers](maths/quadratic_equations_complex_numbers.py) * [Radians](maths/radians.py) * [Radix2 Fft](maths/radix2_fft.py) * [Relu](maths/relu.py) * [Remove Digit](maths/remove_digit.py) * [Runge Kutta](maths/runge_kutta.py) * [Segmented Sieve](maths/segmented_sieve.py) * Series * [Arithmetic](maths/series/arithmetic.py) * [Geometric](maths/series/geometric.py) * [Geometric Series](maths/series/geometric_series.py) * [Harmonic](maths/series/harmonic.py) * [Harmonic Series](maths/series/harmonic_series.py) * [Hexagonal Numbers](maths/series/hexagonal_numbers.py) * [P Series](maths/series/p_series.py) * [Sieve Of Eratosthenes](maths/sieve_of_eratosthenes.py) * [Sigmoid](maths/sigmoid.py) * [Sigmoid Linear Unit](maths/sigmoid_linear_unit.py) * [Signum](maths/signum.py) * [Simpson Rule](maths/simpson_rule.py) * [Simultaneous Linear Equation Solver](maths/simultaneous_linear_equation_solver.py) * [Sin](maths/sin.py) * [Sock Merchant](maths/sock_merchant.py) * [Softmax](maths/softmax.py) * [Square Root](maths/square_root.py) * [Sum Of Arithmetic Series](maths/sum_of_arithmetic_series.py) * [Sum Of Digits](maths/sum_of_digits.py) * [Sum Of Geometric Progression](maths/sum_of_geometric_progression.py) * [Sum Of Harmonic Series](maths/sum_of_harmonic_series.py) * [Sumset](maths/sumset.py) * [Sylvester Sequence](maths/sylvester_sequence.py) * [Tanh](maths/tanh.py) * [Test Prime Check](maths/test_prime_check.py) * [Trapezoidal Rule](maths/trapezoidal_rule.py) * [Triplet Sum](maths/triplet_sum.py) * [Twin Prime](maths/twin_prime.py) * [Two Pointer](maths/two_pointer.py) * [Two Sum](maths/two_sum.py) * [Ugly Numbers](maths/ugly_numbers.py) * [Volume](maths/volume.py) * [Weird Number](maths/weird_number.py) * [Zellers Congruence](maths/zellers_congruence.py) ## Matrix * [Binary Search Matrix](matrix/binary_search_matrix.py) * [Count Islands In Matrix](matrix/count_islands_in_matrix.py) * [Count Negative Numbers In Sorted Matrix](matrix/count_negative_numbers_in_sorted_matrix.py) * [Count Paths](matrix/count_paths.py) * [Cramers Rule 2X2](matrix/cramers_rule_2x2.py) * [Inverse Of Matrix](matrix/inverse_of_matrix.py) * [Largest Square Area In Matrix](matrix/largest_square_area_in_matrix.py) * [Matrix Class](matrix/matrix_class.py) * [Matrix Operation](matrix/matrix_operation.py) * [Max Area Of Island](matrix/max_area_of_island.py) * [Nth Fibonacci Using Matrix Exponentiation](matrix/nth_fibonacci_using_matrix_exponentiation.py) * [Pascal Triangle](matrix/pascal_triangle.py) * [Rotate Matrix](matrix/rotate_matrix.py) * [Searching In Sorted Matrix](matrix/searching_in_sorted_matrix.py) * [Sherman Morrison](matrix/sherman_morrison.py) * [Spiral Print](matrix/spiral_print.py) * Tests * [Test Matrix Operation](matrix/tests/test_matrix_operation.py) ## Networking Flow * [Ford Fulkerson](networking_flow/ford_fulkerson.py) * [Minimum Cut](networking_flow/minimum_cut.py) ## Neural Network * [2 Hidden Layers Neural Network](neural_network/2_hidden_layers_neural_network.py) * Activation Functions * [Exponential Linear Unit](neural_network/activation_functions/exponential_linear_unit.py) * [Leaky Rectified Linear Unit](neural_network/activation_functions/leaky_rectified_linear_unit.py) * [Scaled Exponential Linear Unit](neural_network/activation_functions/scaled_exponential_linear_unit.py) * [Back Propagation Neural Network](neural_network/back_propagation_neural_network.py) * [Convolution Neural Network](neural_network/convolution_neural_network.py) * [Perceptron](neural_network/perceptron.py) * [Simple Neural Network](neural_network/simple_neural_network.py) ## Other * [Activity Selection](other/activity_selection.py) * [Alternative List Arrange](other/alternative_list_arrange.py) * [Davisb Putnamb Logemannb Loveland](other/davisb_putnamb_logemannb_loveland.py) * [Dijkstra Bankers Algorithm](other/dijkstra_bankers_algorithm.py) * [Doomsday](other/doomsday.py) * [Fischer Yates Shuffle](other/fischer_yates_shuffle.py) * [Gauss Easter](other/gauss_easter.py) * [Graham Scan](other/graham_scan.py) * [Greedy](other/greedy.py) * [Guess The Number Search](other/guess_the_number_search.py) * [H Index](other/h_index.py) * [Least Recently Used](other/least_recently_used.py) * [Lfu Cache](other/lfu_cache.py) * [Linear Congruential Generator](other/linear_congruential_generator.py) * [Lru Cache](other/lru_cache.py) * [Magicdiamondpattern](other/magicdiamondpattern.py) * [Maximum Subsequence](other/maximum_subsequence.py) * [Nested Brackets](other/nested_brackets.py) * [Number Container System](other/number_container_system.py) * [Password](other/password.py) * [Quine](other/quine.py) * [Scoring Algorithm](other/scoring_algorithm.py) * [Sdes](other/sdes.py) * [Tower Of Hanoi](other/tower_of_hanoi.py) ## Physics * [Altitude Pressure](physics/altitude_pressure.py) * [Archimedes Principle](physics/archimedes_principle.py) * [Basic Orbital Capture](physics/basic_orbital_capture.py) * [Casimir Effect](physics/casimir_effect.py) * [Centripetal Force](physics/centripetal_force.py) * [Grahams Law](physics/grahams_law.py) * [Horizontal Projectile Motion](physics/horizontal_projectile_motion.py) * [Hubble Parameter](physics/hubble_parameter.py) * [Ideal Gas Law](physics/ideal_gas_law.py) * [Kinetic Energy](physics/kinetic_energy.py) * [Lorentz Transformation Four Vector](physics/lorentz_transformation_four_vector.py) * [Malus Law](physics/malus_law.py) * [N Body Simulation](physics/n_body_simulation.py) * [Newtons Law Of Gravitation](physics/newtons_law_of_gravitation.py) * [Newtons Second Law Of Motion](physics/newtons_second_law_of_motion.py) * [Potential Energy](physics/potential_energy.py) * [Rms Speed Of Molecule](physics/rms_speed_of_molecule.py) * [Shear Stress](physics/shear_stress.py) * [Speed Of Sound](physics/speed_of_sound.py) ## Project Euler * Problem 001 * [Sol1](project_euler/problem_001/sol1.py) * [Sol2](project_euler/problem_001/sol2.py) * [Sol3](project_euler/problem_001/sol3.py) * [Sol4](project_euler/problem_001/sol4.py) * [Sol5](project_euler/problem_001/sol5.py) * [Sol6](project_euler/problem_001/sol6.py) * [Sol7](project_euler/problem_001/sol7.py) * Problem 002 * [Sol1](project_euler/problem_002/sol1.py) * [Sol2](project_euler/problem_002/sol2.py) * [Sol3](project_euler/problem_002/sol3.py) * [Sol4](project_euler/problem_002/sol4.py) * [Sol5](project_euler/problem_002/sol5.py) * Problem 003 * [Sol1](project_euler/problem_003/sol1.py) * [Sol2](project_euler/problem_003/sol2.py) * [Sol3](project_euler/problem_003/sol3.py) * Problem 004 * [Sol1](project_euler/problem_004/sol1.py) * [Sol2](project_euler/problem_004/sol2.py) * Problem 005 * [Sol1](project_euler/problem_005/sol1.py) * [Sol2](project_euler/problem_005/sol2.py) * Problem 006 * [Sol1](project_euler/problem_006/sol1.py) * [Sol2](project_euler/problem_006/sol2.py) * [Sol3](project_euler/problem_006/sol3.py) * [Sol4](project_euler/problem_006/sol4.py) * Problem 007 * [Sol1](project_euler/problem_007/sol1.py) * [Sol2](project_euler/problem_007/sol2.py) * [Sol3](project_euler/problem_007/sol3.py) * Problem 008 * [Sol1](project_euler/problem_008/sol1.py) * [Sol2](project_euler/problem_008/sol2.py) * [Sol3](project_euler/problem_008/sol3.py) * Problem 009 * [Sol1](project_euler/problem_009/sol1.py) * [Sol2](project_euler/problem_009/sol2.py) * [Sol3](project_euler/problem_009/sol3.py) * Problem 010 * [Sol1](project_euler/problem_010/sol1.py) * [Sol2](project_euler/problem_010/sol2.py) * [Sol3](project_euler/problem_010/sol3.py) * Problem 011 * [Sol1](project_euler/problem_011/sol1.py) * [Sol2](project_euler/problem_011/sol2.py) * Problem 012 * [Sol1](project_euler/problem_012/sol1.py) * [Sol2](project_euler/problem_012/sol2.py) * Problem 013 * [Sol1](project_euler/problem_013/sol1.py) * Problem 014 * [Sol1](project_euler/problem_014/sol1.py) * [Sol2](project_euler/problem_014/sol2.py) * Problem 015 * [Sol1](project_euler/problem_015/sol1.py) * Problem 016 * [Sol1](project_euler/problem_016/sol1.py) * [Sol2](project_euler/problem_016/sol2.py) * Problem 017 * [Sol1](project_euler/problem_017/sol1.py) * Problem 018 * [Solution](project_euler/problem_018/solution.py) * Problem 019 * [Sol1](project_euler/problem_019/sol1.py) * Problem 020 * [Sol1](project_euler/problem_020/sol1.py) * [Sol2](project_euler/problem_020/sol2.py) * [Sol3](project_euler/problem_020/sol3.py) * [Sol4](project_euler/problem_020/sol4.py) * Problem 021 * [Sol1](project_euler/problem_021/sol1.py) * Problem 022 * [Sol1](project_euler/problem_022/sol1.py) * [Sol2](project_euler/problem_022/sol2.py) * Problem 023 * [Sol1](project_euler/problem_023/sol1.py) * Problem 024 * [Sol1](project_euler/problem_024/sol1.py) * Problem 025 * [Sol1](project_euler/problem_025/sol1.py) * [Sol2](project_euler/problem_025/sol2.py) * [Sol3](project_euler/problem_025/sol3.py) * Problem 026 * [Sol1](project_euler/problem_026/sol1.py) * Problem 027 * [Sol1](project_euler/problem_027/sol1.py) * Problem 028 * [Sol1](project_euler/problem_028/sol1.py) * Problem 029 * [Sol1](project_euler/problem_029/sol1.py) * Problem 030 * [Sol1](project_euler/problem_030/sol1.py) * Problem 031 * [Sol1](project_euler/problem_031/sol1.py) * [Sol2](project_euler/problem_031/sol2.py) * Problem 032 * [Sol32](project_euler/problem_032/sol32.py) * Problem 033 * [Sol1](project_euler/problem_033/sol1.py) * Problem 034 * [Sol1](project_euler/problem_034/sol1.py) * Problem 035 * [Sol1](project_euler/problem_035/sol1.py) * Problem 036 * [Sol1](project_euler/problem_036/sol1.py) * Problem 037 * [Sol1](project_euler/problem_037/sol1.py) * Problem 038 * [Sol1](project_euler/problem_038/sol1.py) * Problem 039 * [Sol1](project_euler/problem_039/sol1.py) * Problem 040 * [Sol1](project_euler/problem_040/sol1.py) * Problem 041 * [Sol1](project_euler/problem_041/sol1.py) * Problem 042 * [Solution42](project_euler/problem_042/solution42.py) * Problem 043 * [Sol1](project_euler/problem_043/sol1.py) * Problem 044 * [Sol1](project_euler/problem_044/sol1.py) * Problem 045 * [Sol1](project_euler/problem_045/sol1.py) * Problem 046 * [Sol1](project_euler/problem_046/sol1.py) * Problem 047 * [Sol1](project_euler/problem_047/sol1.py) * Problem 048 * [Sol1](project_euler/problem_048/sol1.py) * Problem 049 * [Sol1](project_euler/problem_049/sol1.py) * Problem 050 * [Sol1](project_euler/problem_050/sol1.py) * Problem 051 * [Sol1](project_euler/problem_051/sol1.py) * Problem 052 * [Sol1](project_euler/problem_052/sol1.py) * Problem 053 * [Sol1](project_euler/problem_053/sol1.py) * Problem 054 * [Sol1](project_euler/problem_054/sol1.py) * [Test Poker Hand](project_euler/problem_054/test_poker_hand.py) * Problem 055 * [Sol1](project_euler/problem_055/sol1.py) * Problem 056 * [Sol1](project_euler/problem_056/sol1.py) * Problem 057 * [Sol1](project_euler/problem_057/sol1.py) * Problem 058 * [Sol1](project_euler/problem_058/sol1.py) * Problem 059 * [Sol1](project_euler/problem_059/sol1.py) * Problem 062 * [Sol1](project_euler/problem_062/sol1.py) * Problem 063 * [Sol1](project_euler/problem_063/sol1.py) * Problem 064 * [Sol1](project_euler/problem_064/sol1.py) * Problem 065 * [Sol1](project_euler/problem_065/sol1.py) * Problem 067 * [Sol1](project_euler/problem_067/sol1.py) * [Sol2](project_euler/problem_067/sol2.py) * Problem 068 * [Sol1](project_euler/problem_068/sol1.py) * Problem 069 * [Sol1](project_euler/problem_069/sol1.py) * Problem 070 * [Sol1](project_euler/problem_070/sol1.py) * Problem 071 * [Sol1](project_euler/problem_071/sol1.py) * Problem 072 * [Sol1](project_euler/problem_072/sol1.py) * [Sol2](project_euler/problem_072/sol2.py) * Problem 073 * [Sol1](project_euler/problem_073/sol1.py) * Problem 074 * [Sol1](project_euler/problem_074/sol1.py) * [Sol2](project_euler/problem_074/sol2.py) * Problem 075 * [Sol1](project_euler/problem_075/sol1.py) * Problem 076 * [Sol1](project_euler/problem_076/sol1.py) * Problem 077 * [Sol1](project_euler/problem_077/sol1.py) * Problem 078 * [Sol1](project_euler/problem_078/sol1.py) * Problem 079 * [Sol1](project_euler/problem_079/sol1.py) * Problem 080 * [Sol1](project_euler/problem_080/sol1.py) * Problem 081 * [Sol1](project_euler/problem_081/sol1.py) * Problem 082 * [Sol1](project_euler/problem_082/sol1.py) * Problem 085 * [Sol1](project_euler/problem_085/sol1.py) * Problem 086 * [Sol1](project_euler/problem_086/sol1.py) * Problem 087 * [Sol1](project_euler/problem_087/sol1.py) * Problem 089 * [Sol1](project_euler/problem_089/sol1.py) * Problem 091 * [Sol1](project_euler/problem_091/sol1.py) * Problem 092 * [Sol1](project_euler/problem_092/sol1.py) * Problem 094 * [Sol1](project_euler/problem_094/sol1.py) * Problem 097 * [Sol1](project_euler/problem_097/sol1.py) * Problem 099 * [Sol1](project_euler/problem_099/sol1.py) * Problem 100 * [Sol1](project_euler/problem_100/sol1.py) * Problem 101 * [Sol1](project_euler/problem_101/sol1.py) * Problem 102 * [Sol1](project_euler/problem_102/sol1.py) * Problem 104 * [Sol1](project_euler/problem_104/sol1.py) * Problem 107 * [Sol1](project_euler/problem_107/sol1.py) * Problem 109 * [Sol1](project_euler/problem_109/sol1.py) * Problem 112 * [Sol1](project_euler/problem_112/sol1.py) * Problem 113 * [Sol1](project_euler/problem_113/sol1.py) * Problem 114 * [Sol1](project_euler/problem_114/sol1.py) * Problem 115 * [Sol1](project_euler/problem_115/sol1.py) * Problem 116 * [Sol1](project_euler/problem_116/sol1.py) * Problem 117 * [Sol1](project_euler/problem_117/sol1.py) * Problem 119 * [Sol1](project_euler/problem_119/sol1.py) * Problem 120 * [Sol1](project_euler/problem_120/sol1.py) * Problem 121 * [Sol1](project_euler/problem_121/sol1.py) * Problem 123 * [Sol1](project_euler/problem_123/sol1.py) * Problem 125 * [Sol1](project_euler/problem_125/sol1.py) * Problem 129 * [Sol1](project_euler/problem_129/sol1.py) * Problem 131 * [Sol1](project_euler/problem_131/sol1.py) * Problem 135 * [Sol1](project_euler/problem_135/sol1.py) * Problem 144 * [Sol1](project_euler/problem_144/sol1.py) * Problem 145 * [Sol1](project_euler/problem_145/sol1.py) * Problem 173 * [Sol1](project_euler/problem_173/sol1.py) * Problem 174 * [Sol1](project_euler/problem_174/sol1.py) * Problem 180 * [Sol1](project_euler/problem_180/sol1.py) * Problem 187 * [Sol1](project_euler/problem_187/sol1.py) * Problem 188 * [Sol1](project_euler/problem_188/sol1.py) * Problem 191 * [Sol1](project_euler/problem_191/sol1.py) * Problem 203 * [Sol1](project_euler/problem_203/sol1.py) * Problem 205 * [Sol1](project_euler/problem_205/sol1.py) * Problem 206 * [Sol1](project_euler/problem_206/sol1.py) * Problem 207 * [Sol1](project_euler/problem_207/sol1.py) * Problem 234 * [Sol1](project_euler/problem_234/sol1.py) * Problem 301 * [Sol1](project_euler/problem_301/sol1.py) * Problem 493 * [Sol1](project_euler/problem_493/sol1.py) * Problem 551 * [Sol1](project_euler/problem_551/sol1.py) * Problem 587 * [Sol1](project_euler/problem_587/sol1.py) * Problem 686 * [Sol1](project_euler/problem_686/sol1.py) * Problem 800 * [Sol1](project_euler/problem_800/sol1.py) ## Quantum * [Bb84](quantum/bb84.py) * [Deutsch Jozsa](quantum/deutsch_jozsa.py) * [Half Adder](quantum/half_adder.py) * [Not Gate](quantum/not_gate.py) * [Q Fourier Transform](quantum/q_fourier_transform.py) * [Q Full Adder](quantum/q_full_adder.py) * [Quantum Entanglement](quantum/quantum_entanglement.py) * [Quantum Teleportation](quantum/quantum_teleportation.py) * [Ripple Adder Classic](quantum/ripple_adder_classic.py) * [Single Qubit Measure](quantum/single_qubit_measure.py) * [Superdense Coding](quantum/superdense_coding.py) ## Scheduling * [First Come First Served](scheduling/first_come_first_served.py) * [Highest Response Ratio Next](scheduling/highest_response_ratio_next.py) * [Job Sequencing With Deadline](scheduling/job_sequencing_with_deadline.py) * [Multi Level Feedback Queue](scheduling/multi_level_feedback_queue.py) * [Non Preemptive Shortest Job First](scheduling/non_preemptive_shortest_job_first.py) * [Round Robin](scheduling/round_robin.py) * [Shortest Job First](scheduling/shortest_job_first.py) ## Searches * [Binary Search](searches/binary_search.py) * [Binary Tree Traversal](searches/binary_tree_traversal.py) * [Double Linear Search](searches/double_linear_search.py) * [Double Linear Search Recursion](searches/double_linear_search_recursion.py) * [Fibonacci Search](searches/fibonacci_search.py) * [Hill Climbing](searches/hill_climbing.py) * [Interpolation Search](searches/interpolation_search.py) * [Jump Search](searches/jump_search.py) * [Linear Search](searches/linear_search.py) * [Quick Select](searches/quick_select.py) * [Sentinel Linear Search](searches/sentinel_linear_search.py) * [Simple Binary Search](searches/simple_binary_search.py) * [Simulated Annealing](searches/simulated_annealing.py) * [Tabu Search](searches/tabu_search.py) * [Ternary Search](searches/ternary_search.py) ## Sorts * [Bead Sort](sorts/bead_sort.py) * [Binary Insertion Sort](sorts/binary_insertion_sort.py) * [Bitonic Sort](sorts/bitonic_sort.py) * [Bogo Sort](sorts/bogo_sort.py) * [Bubble Sort](sorts/bubble_sort.py) * [Bucket Sort](sorts/bucket_sort.py) * [Circle Sort](sorts/circle_sort.py) * [Cocktail Shaker Sort](sorts/cocktail_shaker_sort.py) * [Comb Sort](sorts/comb_sort.py) * [Counting Sort](sorts/counting_sort.py) * [Cycle Sort](sorts/cycle_sort.py) * [Double Sort](sorts/double_sort.py) * [Dutch National Flag Sort](sorts/dutch_national_flag_sort.py) * [Exchange Sort](sorts/exchange_sort.py) * [External Sort](sorts/external_sort.py) * [Gnome Sort](sorts/gnome_sort.py) * [Heap Sort](sorts/heap_sort.py) * [Insertion Sort](sorts/insertion_sort.py) * [Intro Sort](sorts/intro_sort.py) * [Iterative Merge Sort](sorts/iterative_merge_sort.py) * [Merge Insertion Sort](sorts/merge_insertion_sort.py) * [Merge Sort](sorts/merge_sort.py) * [Msd Radix Sort](sorts/msd_radix_sort.py) * [Natural Sort](sorts/natural_sort.py) * [Odd Even Sort](sorts/odd_even_sort.py) * [Odd Even Transposition Parallel](sorts/odd_even_transposition_parallel.py) * [Odd Even Transposition Single Threaded](sorts/odd_even_transposition_single_threaded.py) * [Pancake Sort](sorts/pancake_sort.py) * [Patience Sort](sorts/patience_sort.py) * [Pigeon Sort](sorts/pigeon_sort.py) * [Pigeonhole Sort](sorts/pigeonhole_sort.py) * [Quick Sort](sorts/quick_sort.py) * [Quick Sort 3 Partition](sorts/quick_sort_3_partition.py) * [Radix Sort](sorts/radix_sort.py) * [Random Normal Distribution Quicksort](sorts/random_normal_distribution_quicksort.py) * [Random Pivot Quick Sort](sorts/random_pivot_quick_sort.py) * [Recursive Bubble Sort](sorts/recursive_bubble_sort.py) * [Recursive Insertion Sort](sorts/recursive_insertion_sort.py) * [Recursive Mergesort Array](sorts/recursive_mergesort_array.py) * [Recursive Quick Sort](sorts/recursive_quick_sort.py) * [Selection Sort](sorts/selection_sort.py) * [Shell Sort](sorts/shell_sort.py) * [Shrink Shell Sort](sorts/shrink_shell_sort.py) * [Slowsort](sorts/slowsort.py) * [Stooge Sort](sorts/stooge_sort.py) * [Strand Sort](sorts/strand_sort.py) * [Tim Sort](sorts/tim_sort.py) * [Topological Sort](sorts/topological_sort.py) * [Tree Sort](sorts/tree_sort.py) * [Unknown Sort](sorts/unknown_sort.py) * [Wiggle Sort](sorts/wiggle_sort.py) ## Strings * [Aho Corasick](strings/aho_corasick.py) * [Alternative String Arrange](strings/alternative_string_arrange.py) * [Anagrams](strings/anagrams.py) * [Autocomplete Using Trie](strings/autocomplete_using_trie.py) * [Barcode Validator](strings/barcode_validator.py) * [Boyer Moore Search](strings/boyer_moore_search.py) * [Can String Be Rearranged As Palindrome](strings/can_string_be_rearranged_as_palindrome.py) * [Capitalize](strings/capitalize.py) * [Check Anagrams](strings/check_anagrams.py) * [Credit Card Validator](strings/credit_card_validator.py) * [Detecting English Programmatically](strings/detecting_english_programmatically.py) * [Dna](strings/dna.py) * [Frequency Finder](strings/frequency_finder.py) * [Hamming Distance](strings/hamming_distance.py) * [Indian Phone Validator](strings/indian_phone_validator.py) * [Is Contains Unique Chars](strings/is_contains_unique_chars.py) * [Is Isogram](strings/is_isogram.py) * [Is Pangram](strings/is_pangram.py) * [Is Spain National Id](strings/is_spain_national_id.py) * [Is Srilankan Phone Number](strings/is_srilankan_phone_number.py) * [Is Valid Email Address](strings/is_valid_email_address.py) * [Jaro Winkler](strings/jaro_winkler.py) * [Join](strings/join.py) * [Knuth Morris Pratt](strings/knuth_morris_pratt.py) * [Levenshtein Distance](strings/levenshtein_distance.py) * [Lower](strings/lower.py) * [Manacher](strings/manacher.py) * [Min Cost String Conversion](strings/min_cost_string_conversion.py) * [Naive String Search](strings/naive_string_search.py) * [Ngram](strings/ngram.py) * [Palindrome](strings/palindrome.py) * [Prefix Function](strings/prefix_function.py) * [Rabin Karp](strings/rabin_karp.py) * [Remove Duplicate](strings/remove_duplicate.py) * [Reverse Letters](strings/reverse_letters.py) * [Reverse Long Words](strings/reverse_long_words.py) * [Reverse Words](strings/reverse_words.py) * [Snake Case To Camel Pascal Case](strings/snake_case_to_camel_pascal_case.py) * [Split](strings/split.py) * [String Switch Case](strings/string_switch_case.py) * [Text Justification](strings/text_justification.py) * [Top K Frequent Words](strings/top_k_frequent_words.py) * [Upper](strings/upper.py) * [Wave](strings/wave.py) * [Wildcard Pattern Matching](strings/wildcard_pattern_matching.py) * [Word Occurrence](strings/word_occurrence.py) * [Word Patterns](strings/word_patterns.py) * [Z Function](strings/z_function.py) ## Web Programming * [Co2 Emission](web_programming/co2_emission.py) * [Covid Stats Via Xpath](web_programming/covid_stats_via_xpath.py) * [Crawl Google Results](web_programming/crawl_google_results.py) * [Crawl Google Scholar Citation](web_programming/crawl_google_scholar_citation.py) * [Currency Converter](web_programming/currency_converter.py) * [Current Stock Price](web_programming/current_stock_price.py) * [Current Weather](web_programming/current_weather.py) * [Daily Horoscope](web_programming/daily_horoscope.py) * [Download Images From Google Query](web_programming/download_images_from_google_query.py) * [Emails From Url](web_programming/emails_from_url.py) * [Fetch Anime And Play](web_programming/fetch_anime_and_play.py) * [Fetch Bbc News](web_programming/fetch_bbc_news.py) * [Fetch Github Info](web_programming/fetch_github_info.py) * [Fetch Jobs](web_programming/fetch_jobs.py) * [Fetch Quotes](web_programming/fetch_quotes.py) * [Fetch Well Rx Price](web_programming/fetch_well_rx_price.py) * [Get Amazon Product Data](web_programming/get_amazon_product_data.py) * [Get Imdb Top 250 Movies Csv](web_programming/get_imdb_top_250_movies_csv.py) * [Get Imdbtop](web_programming/get_imdbtop.py) * [Get Top Billionaires](web_programming/get_top_billionaires.py) * [Get Top Hn Posts](web_programming/get_top_hn_posts.py) * [Get User Tweets](web_programming/get_user_tweets.py) * [Giphy](web_programming/giphy.py) * [Instagram Crawler](web_programming/instagram_crawler.py) * [Instagram Pic](web_programming/instagram_pic.py) * [Instagram Video](web_programming/instagram_video.py) * [Nasa Data](web_programming/nasa_data.py) * [Open Google Results](web_programming/open_google_results.py) * [Random Anime Character](web_programming/random_anime_character.py) * [Recaptcha Verification](web_programming/recaptcha_verification.py) * [Reddit](web_programming/reddit.py) * [Search Books By Isbn](web_programming/search_books_by_isbn.py) * [Slack Message](web_programming/slack_message.py) * [Test Fetch Github Info](web_programming/test_fetch_github_info.py) * [World Covid19 Stats](web_programming/world_covid19_stats.py)
1
TheAlgorithms/Python
9,062
Delete empty junk file
### Describe your change: Delete empty file introduced in #9046 * [ ] Add an algorithm? * [x] Fix a bug or typo in an existing algorithm? * [ ] Documentation change? ### Checklist: * [x] I have read [CONTRIBUTING.md](https://github.com/TheAlgorithms/Python/blob/master/CONTRIBUTING.md). * [x] This pull request is all my own work -- I have not plagiarized. * [x] I know that pull requests will not be merged if they fail the automated tests. * [x] This PR only changes one algorithm file. To ease review, please open separate PRs for separate algorithms. * [ ] All new Python files are placed inside an existing directory. * [ ] All filenames are in all lowercase characters with no spaces or dashes. * [ ] All functions and variable names follow Python naming conventions. * [ ] All function parameters and return values are annotated with Python [type hints](https://docs.python.org/3/library/typing.html). * [ ] All functions have [doctests](https://docs.python.org/3/library/doctest.html) that pass the automated testing. * [ ] All new algorithms include at least one URL that points to Wikipedia or another similar explanation. * [ ] If this pull request resolves one or more open issues then the description above includes the issue number(s) with a [closing keyword](https://docs.github.com/en/issues/tracking-your-work-with-issues/linking-a-pull-request-to-an-issue): "Fixes #ISSUE-NUMBER".
tianyizheng02
"2023-09-16T18:59:42Z"
"2023-09-16T22:12:31Z"
1488cdea708485eb1d81c73126eab13cb9b04a47
fbad85d3ecbbb826a5891807c823149d38bbaed3
Delete empty junk file. ### Describe your change: Delete empty file introduced in #9046 * [ ] Add an algorithm? * [x] Fix a bug or typo in an existing algorithm? * [ ] Documentation change? ### Checklist: * [x] I have read [CONTRIBUTING.md](https://github.com/TheAlgorithms/Python/blob/master/CONTRIBUTING.md). * [x] This pull request is all my own work -- I have not plagiarized. * [x] I know that pull requests will not be merged if they fail the automated tests. * [x] This PR only changes one algorithm file. To ease review, please open separate PRs for separate algorithms. * [ ] All new Python files are placed inside an existing directory. * [ ] All filenames are in all lowercase characters with no spaces or dashes. * [ ] All functions and variable names follow Python naming conventions. * [ ] All function parameters and return values are annotated with Python [type hints](https://docs.python.org/3/library/typing.html). * [ ] All functions have [doctests](https://docs.python.org/3/library/doctest.html) that pass the automated testing. * [ ] All new algorithms include at least one URL that points to Wikipedia or another similar explanation. * [ ] If this pull request resolves one or more open issues then the description above includes the issue number(s) with a [closing keyword](https://docs.github.com/en/issues/tracking-your-work-with-issues/linking-a-pull-request-to-an-issue): "Fixes #ISSUE-NUMBER".
""" https://en.wikipedia.org/wiki/Image_texture https://en.wikipedia.org/wiki/Co-occurrence_matrix#Application_to_image_analysis """ import imageio.v2 as imageio import numpy as np def root_mean_square_error(original: np.ndarray, reference: np.ndarray) -> float: """Simple implementation of Root Mean Squared Error for two N dimensional numpy arrays. Examples: >>> root_mean_square_error(np.array([1, 2, 3]), np.array([1, 2, 3])) 0.0 >>> root_mean_square_error(np.array([1, 2, 3]), np.array([2, 2, 2])) 0.816496580927726 >>> root_mean_square_error(np.array([1, 2, 3]), np.array([6, 4, 2])) 3.1622776601683795 """ return np.sqrt(((original - reference) ** 2).mean()) def normalize_image( image: np.ndarray, cap: float = 255.0, data_type: np.dtype = np.uint8 ) -> np.ndarray: """ Normalizes image in Numpy 2D array format, between ranges 0-cap, as to fit uint8 type. Args: image: 2D numpy array representing image as matrix, with values in any range cap: Maximum cap amount for normalization data_type: numpy data type to set output variable to Returns: return 2D numpy array of type uint8, corresponding to limited range matrix Examples: >>> normalize_image(np.array([[1, 2, 3], [4, 5, 10]]), ... cap=1.0, data_type=np.float64) array([[0. , 0.11111111, 0.22222222], [0.33333333, 0.44444444, 1. ]]) >>> normalize_image(np.array([[4, 4, 3], [1, 7, 2]])) array([[127, 127, 85], [ 0, 255, 42]], dtype=uint8) """ normalized = (image - np.min(image)) / (np.max(image) - np.min(image)) * cap return normalized.astype(data_type) def normalize_array(array: np.ndarray, cap: float = 1) -> np.ndarray: """Normalizes a 1D array, between ranges 0-cap. Args: array: List containing values to be normalized between cap range. cap: Maximum cap amount for normalization. Returns: return 1D numpy array, corresponding to limited range array Examples: >>> normalize_array(np.array([2, 3, 5, 7])) array([0. , 0.2, 0.6, 1. ]) >>> normalize_array(np.array([[5], [7], [11], [13]])) array([[0. ], [0.25], [0.75], [1. ]]) """ diff = np.max(array) - np.min(array) return (array - np.min(array)) / (1 if diff == 0 else diff) * cap def grayscale(image: np.ndarray) -> np.ndarray: """ Uses luminance weights to transform RGB channel to greyscale, by taking the dot product between the channel and the weights. Example: >>> grayscale(np.array([[[108, 201, 72], [255, 11, 127]], ... [[56, 56, 56], [128, 255, 107]]])) array([[158, 97], [ 56, 200]], dtype=uint8) """ return np.dot(image[:, :, 0:3], [0.299, 0.587, 0.114]).astype(np.uint8) def binarize(image: np.ndarray, threshold: float = 127.0) -> np.ndarray: """ Binarizes a grayscale image based on a given threshold value, setting values to 1 or 0 accordingly. Examples: >>> binarize(np.array([[128, 255], [101, 156]])) array([[1, 1], [0, 1]]) >>> binarize(np.array([[0.07, 1], [0.51, 0.3]]), threshold=0.5) array([[0, 1], [1, 0]]) """ return np.where(image > threshold, 1, 0) def transform(image: np.ndarray, kind: str, kernel: np.ndarray = None) -> np.ndarray: """ Simple image transformation using one of two available filter functions: Erosion and Dilation. Args: image: binarized input image, onto which to apply transformation kind: Can be either 'erosion', in which case the :func:np.max function is called, or 'dilation', when :func:np.min is used instead. kernel: n x n kernel with shape < :attr:image.shape, to be used when applying convolution to original image Returns: returns a numpy array with same shape as input image, corresponding to applied binary transformation. Examples: >>> img = np.array([[1, 0.5], [0.2, 0.7]]) >>> img = binarize(img, threshold=0.5) >>> transform(img, 'erosion') array([[1, 1], [1, 1]], dtype=uint8) >>> transform(img, 'dilation') array([[0, 0], [0, 0]], dtype=uint8) """ if kernel is None: kernel = np.ones((3, 3)) if kind == "erosion": constant = 1 apply = np.max else: constant = 0 apply = np.min center_x, center_y = (x // 2 for x in kernel.shape) # Use padded image when applying convolotion # to not go out of bounds of the original the image transformed = np.zeros(image.shape, dtype=np.uint8) padded = np.pad(image, 1, "constant", constant_values=constant) for x in range(center_x, padded.shape[0] - center_x): for y in range(center_y, padded.shape[1] - center_y): center = padded[ x - center_x : x + center_x + 1, y - center_y : y + center_y + 1 ] # Apply transformation method to the centered section of the image transformed[x - center_x, y - center_y] = apply(center[kernel == 1]) return transformed def opening_filter(image: np.ndarray, kernel: np.ndarray = None) -> np.ndarray: """ Opening filter, defined as the sequence of erosion and then a dilation filter on the same image. Examples: >>> img = np.array([[1, 0.5], [0.2, 0.7]]) >>> img = binarize(img, threshold=0.5) >>> opening_filter(img) array([[1, 1], [1, 1]], dtype=uint8) """ if kernel is None: np.ones((3, 3)) return transform(transform(image, "dilation", kernel), "erosion", kernel) def closing_filter(image: np.ndarray, kernel: np.ndarray = None) -> np.ndarray: """ Opening filter, defined as the sequence of dilation and then erosion filter on the same image. Examples: >>> img = np.array([[1, 0.5], [0.2, 0.7]]) >>> img = binarize(img, threshold=0.5) >>> closing_filter(img) array([[0, 0], [0, 0]], dtype=uint8) """ if kernel is None: kernel = np.ones((3, 3)) return transform(transform(image, "erosion", kernel), "dilation", kernel) def binary_mask( image_gray: np.ndarray, image_map: np.ndarray ) -> tuple[np.ndarray, np.ndarray]: """ Apply binary mask, or thresholding based on bit mask value (mapping mask is binary). Returns the mapped true value mask and its complementary false value mask. Example: >>> img = np.array([[[108, 201, 72], [255, 11, 127]], ... [[56, 56, 56], [128, 255, 107]]]) >>> gray = grayscale(img) >>> binary = binarize(gray) >>> morphological = opening_filter(binary) >>> binary_mask(gray, morphological) (array([[1, 1], [1, 1]], dtype=uint8), array([[158, 97], [ 56, 200]], dtype=uint8)) """ true_mask, false_mask = image_gray.copy(), image_gray.copy() true_mask[image_map == 1] = 1 false_mask[image_map == 0] = 0 return true_mask, false_mask def matrix_concurrency(image: np.ndarray, coordinate: tuple[int, int]) -> np.ndarray: """ Calculate sample co-occurrence matrix based on input image as well as selected coordinates on image. Implementation is made using basic iteration, as function to be performed (np.max) is non-linear and therefore not callable on the frequency domain. Example: >>> img = np.array([[[108, 201, 72], [255, 11, 127]], ... [[56, 56, 56], [128, 255, 107]]]) >>> gray = grayscale(img) >>> binary = binarize(gray) >>> morphological = opening_filter(binary) >>> mask_1 = binary_mask(gray, morphological)[0] >>> matrix_concurrency(mask_1, (0, 1)) array([[0., 0.], [0., 0.]]) """ matrix = np.zeros([np.max(image) + 1, np.max(image) + 1]) offset_x, offset_y = coordinate for x in range(1, image.shape[0] - 1): for y in range(1, image.shape[1] - 1): base_pixel = image[x, y] offset_pixel = image[x + offset_x, y + offset_y] matrix[base_pixel, offset_pixel] += 1 matrix_sum = np.sum(matrix) return matrix / (1 if matrix_sum == 0 else matrix_sum) def haralick_descriptors(matrix: np.ndarray) -> list[float]: """Calculates all 8 Haralick descriptors based on co-occurence input matrix. All descriptors are as follows: Maximum probability, Inverse Difference, Homogeneity, Entropy, Energy, Dissimilarity, Contrast and Correlation Args: matrix: Co-occurence matrix to use as base for calculating descriptors. Returns: Reverse ordered list of resulting descriptors Example: >>> img = np.array([[[108, 201, 72], [255, 11, 127]], ... [[56, 56, 56], [128, 255, 107]]]) >>> gray = grayscale(img) >>> binary = binarize(gray) >>> morphological = opening_filter(binary) >>> mask_1 = binary_mask(gray, morphological)[0] >>> concurrency = matrix_concurrency(mask_1, (0, 1)) >>> haralick_descriptors(concurrency) [0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0] """ # Function np.indices could be used for bigger input types, # but np.ogrid works just fine i, j = np.ogrid[0 : matrix.shape[0], 0 : matrix.shape[1]] # np.indices() # Pre-calculate frequent multiplication and subtraction prod = np.multiply(i, j) sub = np.subtract(i, j) # Calculate numerical value of Maximum Probability maximum_prob = np.max(matrix) # Using the definition for each descriptor individually to calculate its matrix correlation = prod * matrix energy = np.power(matrix, 2) contrast = matrix * np.power(sub, 2) dissimilarity = matrix * np.abs(sub) inverse_difference = matrix / (1 + np.abs(sub)) homogeneity = matrix / (1 + np.power(sub, 2)) entropy = -(matrix[matrix > 0] * np.log(matrix[matrix > 0])) # Sum values for descriptors ranging from the first one to the last, # as all are their respective origin matrix and not the resulting value yet. return [ maximum_prob, correlation.sum(), energy.sum(), contrast.sum(), dissimilarity.sum(), inverse_difference.sum(), homogeneity.sum(), entropy.sum(), ] def get_descriptors( masks: tuple[np.ndarray, np.ndarray], coordinate: tuple[int, int] ) -> np.ndarray: """ Calculate all Haralick descriptors for a sequence of different co-occurrence matrices, given input masks and coordinates. Example: >>> img = np.array([[[108, 201, 72], [255, 11, 127]], ... [[56, 56, 56], [128, 255, 107]]]) >>> gray = grayscale(img) >>> binary = binarize(gray) >>> morphological = opening_filter(binary) >>> get_descriptors(binary_mask(gray, morphological), (0, 1)) array([0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0.]) """ descriptors = np.array( [haralick_descriptors(matrix_concurrency(mask, coordinate)) for mask in masks] ) # Concatenate each individual descriptor into # one single list containing sequence of descriptors return np.concatenate(descriptors, axis=None) def euclidean(point_1: np.ndarray, point_2: np.ndarray) -> np.float32: """ Simple method for calculating the euclidean distance between two points, with type np.ndarray. Example: >>> a = np.array([1, 0, -2]) >>> b = np.array([2, -1, 1]) >>> euclidean(a, b) 3.3166247903554 """ return np.sqrt(np.sum(np.square(point_1 - point_2))) def get_distances(descriptors: np.ndarray, base: int) -> list[tuple[int, float]]: """ Calculate all Euclidean distances between a selected base descriptor and all other Haralick descriptors The resulting comparison is return in decreasing order, showing which descriptor is the most similar to the selected base. Args: descriptors: Haralick descriptors to compare with base index base: Haralick descriptor index to use as base when calculating respective euclidean distance to other descriptors. Returns: Ordered distances between descriptors Example: >>> index = 1 >>> img = np.array([[[108, 201, 72], [255, 11, 127]], ... [[56, 56, 56], [128, 255, 107]]]) >>> gray = grayscale(img) >>> binary = binarize(gray) >>> morphological = opening_filter(binary) >>> get_distances(get_descriptors( ... binary_mask(gray, morphological), (0, 1)), ... index) [(0, 0.0), (1, 0.0), (2, 0.0), (3, 0.0), (4, 0.0), (5, 0.0), \ (6, 0.0), (7, 0.0), (8, 0.0), (9, 0.0), (10, 0.0), (11, 0.0), (12, 0.0), \ (13, 0.0), (14, 0.0), (15, 0.0)] """ distances = np.array( [euclidean(descriptor, descriptors[base]) for descriptor in descriptors] ) # Normalize distances between range [0, 1] normalized_distances: list[float] = normalize_array(distances, 1).tolist() enum_distances = list(enumerate(normalized_distances)) enum_distances.sort(key=lambda tup: tup[1], reverse=True) return enum_distances if __name__ == "__main__": # Index to compare haralick descriptors to index = int(input()) q_value_list = [int(value) for value in input().split()] q_value = (q_value_list[0], q_value_list[1]) # Format is the respective filter to apply, # can be either 1 for the opening filter or else for the closing parameters = {"format": int(input()), "threshold": int(input())} # Number of images to perform methods on b_number = int(input()) files, descriptors = [], [] for _ in range(b_number): file = input().rstrip() files.append(file) # Open given image and calculate morphological filter, # respective masks and correspondent Harralick Descriptors. image = imageio.imread(file).astype(np.float32) gray = grayscale(image) threshold = binarize(gray, parameters["threshold"]) morphological = ( opening_filter(threshold) if parameters["format"] == 1 else closing_filter(threshold) ) masks = binary_mask(gray, morphological) descriptors.append(get_descriptors(masks, q_value)) # Transform ordered distances array into a sequence of indexes # corresponding to original file position distances = get_distances(np.array(descriptors), index) indexed_distances = np.array(distances).astype(np.uint8)[:, 0] # Finally, print distances considering the Haralick descriptions from the base # file to all other images using the morphology method of choice. print(f"Query: {files[index]}") print("Ranking:") for idx, file_idx in enumerate(indexed_distances): print(f"({idx}) {files[file_idx]}", end="\n")
""" https://en.wikipedia.org/wiki/Image_texture https://en.wikipedia.org/wiki/Co-occurrence_matrix#Application_to_image_analysis """ import imageio.v2 as imageio import numpy as np def root_mean_square_error(original: np.ndarray, reference: np.ndarray) -> float: """Simple implementation of Root Mean Squared Error for two N dimensional numpy arrays. Examples: >>> root_mean_square_error(np.array([1, 2, 3]), np.array([1, 2, 3])) 0.0 >>> root_mean_square_error(np.array([1, 2, 3]), np.array([2, 2, 2])) 0.816496580927726 >>> root_mean_square_error(np.array([1, 2, 3]), np.array([6, 4, 2])) 3.1622776601683795 """ return np.sqrt(((original - reference) ** 2).mean()) def normalize_image( image: np.ndarray, cap: float = 255.0, data_type: np.dtype = np.uint8 ) -> np.ndarray: """ Normalizes image in Numpy 2D array format, between ranges 0-cap, as to fit uint8 type. Args: image: 2D numpy array representing image as matrix, with values in any range cap: Maximum cap amount for normalization data_type: numpy data type to set output variable to Returns: return 2D numpy array of type uint8, corresponding to limited range matrix Examples: >>> normalize_image(np.array([[1, 2, 3], [4, 5, 10]]), ... cap=1.0, data_type=np.float64) array([[0. , 0.11111111, 0.22222222], [0.33333333, 0.44444444, 1. ]]) >>> normalize_image(np.array([[4, 4, 3], [1, 7, 2]])) array([[127, 127, 85], [ 0, 255, 42]], dtype=uint8) """ normalized = (image - np.min(image)) / (np.max(image) - np.min(image)) * cap return normalized.astype(data_type) def normalize_array(array: np.ndarray, cap: float = 1) -> np.ndarray: """Normalizes a 1D array, between ranges 0-cap. Args: array: List containing values to be normalized between cap range. cap: Maximum cap amount for normalization. Returns: return 1D numpy array, corresponding to limited range array Examples: >>> normalize_array(np.array([2, 3, 5, 7])) array([0. , 0.2, 0.6, 1. ]) >>> normalize_array(np.array([[5], [7], [11], [13]])) array([[0. ], [0.25], [0.75], [1. ]]) """ diff = np.max(array) - np.min(array) return (array - np.min(array)) / (1 if diff == 0 else diff) * cap def grayscale(image: np.ndarray) -> np.ndarray: """ Uses luminance weights to transform RGB channel to greyscale, by taking the dot product between the channel and the weights. Example: >>> grayscale(np.array([[[108, 201, 72], [255, 11, 127]], ... [[56, 56, 56], [128, 255, 107]]])) array([[158, 97], [ 56, 200]], dtype=uint8) """ return np.dot(image[:, :, 0:3], [0.299, 0.587, 0.114]).astype(np.uint8) def binarize(image: np.ndarray, threshold: float = 127.0) -> np.ndarray: """ Binarizes a grayscale image based on a given threshold value, setting values to 1 or 0 accordingly. Examples: >>> binarize(np.array([[128, 255], [101, 156]])) array([[1, 1], [0, 1]]) >>> binarize(np.array([[0.07, 1], [0.51, 0.3]]), threshold=0.5) array([[0, 1], [1, 0]]) """ return np.where(image > threshold, 1, 0) def transform( image: np.ndarray, kind: str, kernel: np.ndarray | None = None ) -> np.ndarray: """ Simple image transformation using one of two available filter functions: Erosion and Dilation. Args: image: binarized input image, onto which to apply transformation kind: Can be either 'erosion', in which case the :func:np.max function is called, or 'dilation', when :func:np.min is used instead. kernel: n x n kernel with shape < :attr:image.shape, to be used when applying convolution to original image Returns: returns a numpy array with same shape as input image, corresponding to applied binary transformation. Examples: >>> img = np.array([[1, 0.5], [0.2, 0.7]]) >>> img = binarize(img, threshold=0.5) >>> transform(img, 'erosion') array([[1, 1], [1, 1]], dtype=uint8) >>> transform(img, 'dilation') array([[0, 0], [0, 0]], dtype=uint8) """ if kernel is None: kernel = np.ones((3, 3)) if kind == "erosion": constant = 1 apply = np.max else: constant = 0 apply = np.min center_x, center_y = (x // 2 for x in kernel.shape) # Use padded image when applying convolotion # to not go out of bounds of the original the image transformed = np.zeros(image.shape, dtype=np.uint8) padded = np.pad(image, 1, "constant", constant_values=constant) for x in range(center_x, padded.shape[0] - center_x): for y in range(center_y, padded.shape[1] - center_y): center = padded[ x - center_x : x + center_x + 1, y - center_y : y + center_y + 1 ] # Apply transformation method to the centered section of the image transformed[x - center_x, y - center_y] = apply(center[kernel == 1]) return transformed def opening_filter(image: np.ndarray, kernel: np.ndarray | None = None) -> np.ndarray: """ Opening filter, defined as the sequence of erosion and then a dilation filter on the same image. Examples: >>> img = np.array([[1, 0.5], [0.2, 0.7]]) >>> img = binarize(img, threshold=0.5) >>> opening_filter(img) array([[1, 1], [1, 1]], dtype=uint8) """ if kernel is None: np.ones((3, 3)) return transform(transform(image, "dilation", kernel), "erosion", kernel) def closing_filter(image: np.ndarray, kernel: np.ndarray | None = None) -> np.ndarray: """ Opening filter, defined as the sequence of dilation and then erosion filter on the same image. Examples: >>> img = np.array([[1, 0.5], [0.2, 0.7]]) >>> img = binarize(img, threshold=0.5) >>> closing_filter(img) array([[0, 0], [0, 0]], dtype=uint8) """ if kernel is None: kernel = np.ones((3, 3)) return transform(transform(image, "erosion", kernel), "dilation", kernel) def binary_mask( image_gray: np.ndarray, image_map: np.ndarray ) -> tuple[np.ndarray, np.ndarray]: """ Apply binary mask, or thresholding based on bit mask value (mapping mask is binary). Returns the mapped true value mask and its complementary false value mask. Example: >>> img = np.array([[[108, 201, 72], [255, 11, 127]], ... [[56, 56, 56], [128, 255, 107]]]) >>> gray = grayscale(img) >>> binary = binarize(gray) >>> morphological = opening_filter(binary) >>> binary_mask(gray, morphological) (array([[1, 1], [1, 1]], dtype=uint8), array([[158, 97], [ 56, 200]], dtype=uint8)) """ true_mask, false_mask = image_gray.copy(), image_gray.copy() true_mask[image_map == 1] = 1 false_mask[image_map == 0] = 0 return true_mask, false_mask def matrix_concurrency(image: np.ndarray, coordinate: tuple[int, int]) -> np.ndarray: """ Calculate sample co-occurrence matrix based on input image as well as selected coordinates on image. Implementation is made using basic iteration, as function to be performed (np.max) is non-linear and therefore not callable on the frequency domain. Example: >>> img = np.array([[[108, 201, 72], [255, 11, 127]], ... [[56, 56, 56], [128, 255, 107]]]) >>> gray = grayscale(img) >>> binary = binarize(gray) >>> morphological = opening_filter(binary) >>> mask_1 = binary_mask(gray, morphological)[0] >>> matrix_concurrency(mask_1, (0, 1)) array([[0., 0.], [0., 0.]]) """ matrix = np.zeros([np.max(image) + 1, np.max(image) + 1]) offset_x, offset_y = coordinate for x in range(1, image.shape[0] - 1): for y in range(1, image.shape[1] - 1): base_pixel = image[x, y] offset_pixel = image[x + offset_x, y + offset_y] matrix[base_pixel, offset_pixel] += 1 matrix_sum = np.sum(matrix) return matrix / (1 if matrix_sum == 0 else matrix_sum) def haralick_descriptors(matrix: np.ndarray) -> list[float]: """Calculates all 8 Haralick descriptors based on co-occurence input matrix. All descriptors are as follows: Maximum probability, Inverse Difference, Homogeneity, Entropy, Energy, Dissimilarity, Contrast and Correlation Args: matrix: Co-occurence matrix to use as base for calculating descriptors. Returns: Reverse ordered list of resulting descriptors Example: >>> img = np.array([[[108, 201, 72], [255, 11, 127]], ... [[56, 56, 56], [128, 255, 107]]]) >>> gray = grayscale(img) >>> binary = binarize(gray) >>> morphological = opening_filter(binary) >>> mask_1 = binary_mask(gray, morphological)[0] >>> concurrency = matrix_concurrency(mask_1, (0, 1)) >>> haralick_descriptors(concurrency) [0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0] """ # Function np.indices could be used for bigger input types, # but np.ogrid works just fine i, j = np.ogrid[0 : matrix.shape[0], 0 : matrix.shape[1]] # np.indices() # Pre-calculate frequent multiplication and subtraction prod = np.multiply(i, j) sub = np.subtract(i, j) # Calculate numerical value of Maximum Probability maximum_prob = np.max(matrix) # Using the definition for each descriptor individually to calculate its matrix correlation = prod * matrix energy = np.power(matrix, 2) contrast = matrix * np.power(sub, 2) dissimilarity = matrix * np.abs(sub) inverse_difference = matrix / (1 + np.abs(sub)) homogeneity = matrix / (1 + np.power(sub, 2)) entropy = -(matrix[matrix > 0] * np.log(matrix[matrix > 0])) # Sum values for descriptors ranging from the first one to the last, # as all are their respective origin matrix and not the resulting value yet. return [ maximum_prob, correlation.sum(), energy.sum(), contrast.sum(), dissimilarity.sum(), inverse_difference.sum(), homogeneity.sum(), entropy.sum(), ] def get_descriptors( masks: tuple[np.ndarray, np.ndarray], coordinate: tuple[int, int] ) -> np.ndarray: """ Calculate all Haralick descriptors for a sequence of different co-occurrence matrices, given input masks and coordinates. Example: >>> img = np.array([[[108, 201, 72], [255, 11, 127]], ... [[56, 56, 56], [128, 255, 107]]]) >>> gray = grayscale(img) >>> binary = binarize(gray) >>> morphological = opening_filter(binary) >>> get_descriptors(binary_mask(gray, morphological), (0, 1)) array([0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0.]) """ descriptors = np.array( [haralick_descriptors(matrix_concurrency(mask, coordinate)) for mask in masks] ) # Concatenate each individual descriptor into # one single list containing sequence of descriptors return np.concatenate(descriptors, axis=None) def euclidean(point_1: np.ndarray, point_2: np.ndarray) -> np.float32: """ Simple method for calculating the euclidean distance between two points, with type np.ndarray. Example: >>> a = np.array([1, 0, -2]) >>> b = np.array([2, -1, 1]) >>> euclidean(a, b) 3.3166247903554 """ return np.sqrt(np.sum(np.square(point_1 - point_2))) def get_distances(descriptors: np.ndarray, base: int) -> list[tuple[int, float]]: """ Calculate all Euclidean distances between a selected base descriptor and all other Haralick descriptors The resulting comparison is return in decreasing order, showing which descriptor is the most similar to the selected base. Args: descriptors: Haralick descriptors to compare with base index base: Haralick descriptor index to use as base when calculating respective euclidean distance to other descriptors. Returns: Ordered distances between descriptors Example: >>> index = 1 >>> img = np.array([[[108, 201, 72], [255, 11, 127]], ... [[56, 56, 56], [128, 255, 107]]]) >>> gray = grayscale(img) >>> binary = binarize(gray) >>> morphological = opening_filter(binary) >>> get_distances(get_descriptors( ... binary_mask(gray, morphological), (0, 1)), ... index) [(0, 0.0), (1, 0.0), (2, 0.0), (3, 0.0), (4, 0.0), (5, 0.0), \ (6, 0.0), (7, 0.0), (8, 0.0), (9, 0.0), (10, 0.0), (11, 0.0), (12, 0.0), \ (13, 0.0), (14, 0.0), (15, 0.0)] """ distances = np.array( [euclidean(descriptor, descriptors[base]) for descriptor in descriptors] ) # Normalize distances between range [0, 1] normalized_distances: list[float] = normalize_array(distances, 1).tolist() enum_distances = list(enumerate(normalized_distances)) enum_distances.sort(key=lambda tup: tup[1], reverse=True) return enum_distances if __name__ == "__main__": # Index to compare haralick descriptors to index = int(input()) q_value_list = [int(value) for value in input().split()] q_value = (q_value_list[0], q_value_list[1]) # Format is the respective filter to apply, # can be either 1 for the opening filter or else for the closing parameters = {"format": int(input()), "threshold": int(input())} # Number of images to perform methods on b_number = int(input()) files, descriptors = [], [] for _ in range(b_number): file = input().rstrip() files.append(file) # Open given image and calculate morphological filter, # respective masks and correspondent Harralick Descriptors. image = imageio.imread(file).astype(np.float32) gray = grayscale(image) threshold = binarize(gray, parameters["threshold"]) morphological = ( opening_filter(threshold) if parameters["format"] == 1 else closing_filter(threshold) ) masks = binary_mask(gray, morphological) descriptors.append(get_descriptors(masks, q_value)) # Transform ordered distances array into a sequence of indexes # corresponding to original file position distances = get_distances(np.array(descriptors), index) indexed_distances = np.array(distances).astype(np.uint8)[:, 0] # Finally, print distances considering the Haralick descriptions from the base # file to all other images using the morphology method of choice. print(f"Query: {files[index]}") print("Ranking:") for idx, file_idx in enumerate(indexed_distances): print(f"({idx}) {files[file_idx]}", end="\n")
1
TheAlgorithms/Python
9,062
Delete empty junk file
### Describe your change: Delete empty file introduced in #9046 * [ ] Add an algorithm? * [x] Fix a bug or typo in an existing algorithm? * [ ] Documentation change? ### Checklist: * [x] I have read [CONTRIBUTING.md](https://github.com/TheAlgorithms/Python/blob/master/CONTRIBUTING.md). * [x] This pull request is all my own work -- I have not plagiarized. * [x] I know that pull requests will not be merged if they fail the automated tests. * [x] This PR only changes one algorithm file. To ease review, please open separate PRs for separate algorithms. * [ ] All new Python files are placed inside an existing directory. * [ ] All filenames are in all lowercase characters with no spaces or dashes. * [ ] All functions and variable names follow Python naming conventions. * [ ] All function parameters and return values are annotated with Python [type hints](https://docs.python.org/3/library/typing.html). * [ ] All functions have [doctests](https://docs.python.org/3/library/doctest.html) that pass the automated testing. * [ ] All new algorithms include at least one URL that points to Wikipedia or another similar explanation. * [ ] If this pull request resolves one or more open issues then the description above includes the issue number(s) with a [closing keyword](https://docs.github.com/en/issues/tracking-your-work-with-issues/linking-a-pull-request-to-an-issue): "Fixes #ISSUE-NUMBER".
tianyizheng02
"2023-09-16T18:59:42Z"
"2023-09-16T22:12:31Z"
1488cdea708485eb1d81c73126eab13cb9b04a47
fbad85d3ecbbb826a5891807c823149d38bbaed3
Delete empty junk file. ### Describe your change: Delete empty file introduced in #9046 * [ ] Add an algorithm? * [x] Fix a bug or typo in an existing algorithm? * [ ] Documentation change? ### Checklist: * [x] I have read [CONTRIBUTING.md](https://github.com/TheAlgorithms/Python/blob/master/CONTRIBUTING.md). * [x] This pull request is all my own work -- I have not plagiarized. * [x] I know that pull requests will not be merged if they fail the automated tests. * [x] This PR only changes one algorithm file. To ease review, please open separate PRs for separate algorithms. * [ ] All new Python files are placed inside an existing directory. * [ ] All filenames are in all lowercase characters with no spaces or dashes. * [ ] All functions and variable names follow Python naming conventions. * [ ] All function parameters and return values are annotated with Python [type hints](https://docs.python.org/3/library/typing.html). * [ ] All functions have [doctests](https://docs.python.org/3/library/doctest.html) that pass the automated testing. * [ ] All new algorithms include at least one URL that points to Wikipedia or another similar explanation. * [ ] If this pull request resolves one or more open issues then the description above includes the issue number(s) with a [closing keyword](https://docs.github.com/en/issues/tracking-your-work-with-issues/linking-a-pull-request-to-an-issue): "Fixes #ISSUE-NUMBER".
from enum import Enum from typing import ClassVar, Literal class NumberingSystem(Enum): SHORT = ( (15, "quadrillion"), (12, "trillion"), (9, "billion"), (6, "million"), (3, "thousand"), (2, "hundred"), ) LONG = ( (15, "billiard"), (9, "milliard"), (6, "million"), (3, "thousand"), (2, "hundred"), ) INDIAN = ( (14, "crore crore"), (12, "lakh crore"), (7, "crore"), (5, "lakh"), (3, "thousand"), (2, "hundred"), ) @classmethod def max_value(cls, system: str) -> int: """ Gets the max value supported by the given number system. >>> NumberingSystem.max_value("short") == 10**18 - 1 True >>> NumberingSystem.max_value("long") == 10**21 - 1 True >>> NumberingSystem.max_value("indian") == 10**19 - 1 True """ match (system_enum := cls[system.upper()]): case cls.SHORT: max_exp = system_enum.value[0][0] + 3 case cls.LONG: max_exp = system_enum.value[0][0] + 6 case cls.INDIAN: max_exp = 19 case _: raise ValueError("Invalid numbering system") return 10**max_exp - 1 class NumberWords(Enum): ONES: ClassVar = { 0: "", 1: "one", 2: "two", 3: "three", 4: "four", 5: "five", 6: "six", 7: "seven", 8: "eight", 9: "nine", } TEENS: ClassVar = { 0: "ten", 1: "eleven", 2: "twelve", 3: "thirteen", 4: "fourteen", 5: "fifteen", 6: "sixteen", 7: "seventeen", 8: "eighteen", 9: "nineteen", } TENS: ClassVar = { 2: "twenty", 3: "thirty", 4: "forty", 5: "fifty", 6: "sixty", 7: "seventy", 8: "eighty", 9: "ninety", } def convert_small_number(num: int) -> str: """ Converts small, non-negative integers with irregular constructions in English (i.e., numbers under 100) into words. >>> convert_small_number(0) 'zero' >>> convert_small_number(5) 'five' >>> convert_small_number(10) 'ten' >>> convert_small_number(15) 'fifteen' >>> convert_small_number(20) 'twenty' >>> convert_small_number(25) 'twenty-five' >>> convert_small_number(-1) Traceback (most recent call last): ... ValueError: This function only accepts non-negative integers >>> convert_small_number(123) Traceback (most recent call last): ... ValueError: This function only converts numbers less than 100 """ if num < 0: raise ValueError("This function only accepts non-negative integers") if num >= 100: raise ValueError("This function only converts numbers less than 100") tens, ones = divmod(num, 10) if tens == 0: return NumberWords.ONES.value[ones] or "zero" if tens == 1: return NumberWords.TEENS.value[ones] return ( NumberWords.TENS.value[tens] + ("-" if NumberWords.ONES.value[ones] else "") + NumberWords.ONES.value[ones] ) def convert_number( num: int, system: Literal["short", "long", "indian"] = "short" ) -> str: """ Converts an integer to English words. :param num: The integer to be converted :param system: The numbering system (short, long, or Indian) >>> convert_number(0) 'zero' >>> convert_number(1) 'one' >>> convert_number(100) 'one hundred' >>> convert_number(-100) 'negative one hundred' >>> convert_number(123_456_789_012_345) # doctest: +NORMALIZE_WHITESPACE 'one hundred twenty-three trillion four hundred fifty-six billion seven hundred eighty-nine million twelve thousand three hundred forty-five' >>> convert_number(123_456_789_012_345, "long") # doctest: +NORMALIZE_WHITESPACE 'one hundred twenty-three thousand four hundred fifty-six milliard seven hundred eighty-nine million twelve thousand three hundred forty-five' >>> convert_number(12_34_56_78_90_12_345, "indian") # doctest: +NORMALIZE_WHITESPACE 'one crore crore twenty-three lakh crore forty-five thousand six hundred seventy-eight crore ninety lakh twelve thousand three hundred forty-five' >>> convert_number(10**18) Traceback (most recent call last): ... ValueError: Input number is too large >>> convert_number(10**21, "long") Traceback (most recent call last): ... ValueError: Input number is too large >>> convert_number(10**19, "indian") Traceback (most recent call last): ... ValueError: Input number is too large """ word_groups = [] if num < 0: word_groups.append("negative") num *= -1 if num > NumberingSystem.max_value(system): raise ValueError("Input number is too large") for power, unit in NumberingSystem[system.upper()].value: digit_group, num = divmod(num, 10**power) if digit_group > 0: word_group = ( convert_number(digit_group, system) if digit_group >= 100 else convert_small_number(digit_group) ) word_groups.append(f"{word_group} {unit}") if num > 0 or not word_groups: # word_groups is only empty if input num was 0 word_groups.append(convert_small_number(num)) return " ".join(word_groups) if __name__ == "__main__": import doctest doctest.testmod() print(f"{convert_number(123456789) = }")
from enum import Enum from typing import ClassVar, Literal class NumberingSystem(Enum): SHORT = ( (15, "quadrillion"), (12, "trillion"), (9, "billion"), (6, "million"), (3, "thousand"), (2, "hundred"), ) LONG = ( (15, "billiard"), (9, "milliard"), (6, "million"), (3, "thousand"), (2, "hundred"), ) INDIAN = ( (14, "crore crore"), (12, "lakh crore"), (7, "crore"), (5, "lakh"), (3, "thousand"), (2, "hundred"), ) @classmethod def max_value(cls, system: str) -> int: """ Gets the max value supported by the given number system. >>> NumberingSystem.max_value("short") == 10**18 - 1 True >>> NumberingSystem.max_value("long") == 10**21 - 1 True >>> NumberingSystem.max_value("indian") == 10**19 - 1 True """ match (system_enum := cls[system.upper()]): case cls.SHORT: max_exp = system_enum.value[0][0] + 3 case cls.LONG: max_exp = system_enum.value[0][0] + 6 case cls.INDIAN: max_exp = 19 case _: raise ValueError("Invalid numbering system") return 10**max_exp - 1 class NumberWords(Enum): ONES: ClassVar[dict[int, str]] = { 0: "", 1: "one", 2: "two", 3: "three", 4: "four", 5: "five", 6: "six", 7: "seven", 8: "eight", 9: "nine", } TEENS: ClassVar[dict[int, str]] = { 0: "ten", 1: "eleven", 2: "twelve", 3: "thirteen", 4: "fourteen", 5: "fifteen", 6: "sixteen", 7: "seventeen", 8: "eighteen", 9: "nineteen", } TENS: ClassVar[dict[int, str]] = { 2: "twenty", 3: "thirty", 4: "forty", 5: "fifty", 6: "sixty", 7: "seventy", 8: "eighty", 9: "ninety", } def convert_small_number(num: int) -> str: """ Converts small, non-negative integers with irregular constructions in English (i.e., numbers under 100) into words. >>> convert_small_number(0) 'zero' >>> convert_small_number(5) 'five' >>> convert_small_number(10) 'ten' >>> convert_small_number(15) 'fifteen' >>> convert_small_number(20) 'twenty' >>> convert_small_number(25) 'twenty-five' >>> convert_small_number(-1) Traceback (most recent call last): ... ValueError: This function only accepts non-negative integers >>> convert_small_number(123) Traceback (most recent call last): ... ValueError: This function only converts numbers less than 100 """ if num < 0: raise ValueError("This function only accepts non-negative integers") if num >= 100: raise ValueError("This function only converts numbers less than 100") tens, ones = divmod(num, 10) if tens == 0: return NumberWords.ONES.value[ones] or "zero" if tens == 1: return NumberWords.TEENS.value[ones] return ( NumberWords.TENS.value[tens] + ("-" if NumberWords.ONES.value[ones] else "") + NumberWords.ONES.value[ones] ) def convert_number( num: int, system: Literal["short", "long", "indian"] = "short" ) -> str: """ Converts an integer to English words. :param num: The integer to be converted :param system: The numbering system (short, long, or Indian) >>> convert_number(0) 'zero' >>> convert_number(1) 'one' >>> convert_number(100) 'one hundred' >>> convert_number(-100) 'negative one hundred' >>> convert_number(123_456_789_012_345) # doctest: +NORMALIZE_WHITESPACE 'one hundred twenty-three trillion four hundred fifty-six billion seven hundred eighty-nine million twelve thousand three hundred forty-five' >>> convert_number(123_456_789_012_345, "long") # doctest: +NORMALIZE_WHITESPACE 'one hundred twenty-three thousand four hundred fifty-six milliard seven hundred eighty-nine million twelve thousand three hundred forty-five' >>> convert_number(12_34_56_78_90_12_345, "indian") # doctest: +NORMALIZE_WHITESPACE 'one crore crore twenty-three lakh crore forty-five thousand six hundred seventy-eight crore ninety lakh twelve thousand three hundred forty-five' >>> convert_number(10**18) Traceback (most recent call last): ... ValueError: Input number is too large >>> convert_number(10**21, "long") Traceback (most recent call last): ... ValueError: Input number is too large >>> convert_number(10**19, "indian") Traceback (most recent call last): ... ValueError: Input number is too large """ word_groups = [] if num < 0: word_groups.append("negative") num *= -1 if num > NumberingSystem.max_value(system): raise ValueError("Input number is too large") for power, unit in NumberingSystem[system.upper()].value: digit_group, num = divmod(num, 10**power) if digit_group > 0: word_group = ( convert_number(digit_group, system) if digit_group >= 100 else convert_small_number(digit_group) ) word_groups.append(f"{word_group} {unit}") if num > 0 or not word_groups: # word_groups is only empty if input num was 0 word_groups.append(convert_small_number(num)) return " ".join(word_groups) if __name__ == "__main__": import doctest doctest.testmod() print(f"{convert_number(123456789) = }")
1
TheAlgorithms/Python
9,062
Delete empty junk file
### Describe your change: Delete empty file introduced in #9046 * [ ] Add an algorithm? * [x] Fix a bug or typo in an existing algorithm? * [ ] Documentation change? ### Checklist: * [x] I have read [CONTRIBUTING.md](https://github.com/TheAlgorithms/Python/blob/master/CONTRIBUTING.md). * [x] This pull request is all my own work -- I have not plagiarized. * [x] I know that pull requests will not be merged if they fail the automated tests. * [x] This PR only changes one algorithm file. To ease review, please open separate PRs for separate algorithms. * [ ] All new Python files are placed inside an existing directory. * [ ] All filenames are in all lowercase characters with no spaces or dashes. * [ ] All functions and variable names follow Python naming conventions. * [ ] All function parameters and return values are annotated with Python [type hints](https://docs.python.org/3/library/typing.html). * [ ] All functions have [doctests](https://docs.python.org/3/library/doctest.html) that pass the automated testing. * [ ] All new algorithms include at least one URL that points to Wikipedia or another similar explanation. * [ ] If this pull request resolves one or more open issues then the description above includes the issue number(s) with a [closing keyword](https://docs.github.com/en/issues/tracking-your-work-with-issues/linking-a-pull-request-to-an-issue): "Fixes #ISSUE-NUMBER".
tianyizheng02
"2023-09-16T18:59:42Z"
"2023-09-16T22:12:31Z"
1488cdea708485eb1d81c73126eab13cb9b04a47
fbad85d3ecbbb826a5891807c823149d38bbaed3
Delete empty junk file. ### Describe your change: Delete empty file introduced in #9046 * [ ] Add an algorithm? * [x] Fix a bug or typo in an existing algorithm? * [ ] Documentation change? ### Checklist: * [x] I have read [CONTRIBUTING.md](https://github.com/TheAlgorithms/Python/blob/master/CONTRIBUTING.md). * [x] This pull request is all my own work -- I have not plagiarized. * [x] I know that pull requests will not be merged if they fail the automated tests. * [x] This PR only changes one algorithm file. To ease review, please open separate PRs for separate algorithms. * [ ] All new Python files are placed inside an existing directory. * [ ] All filenames are in all lowercase characters with no spaces or dashes. * [ ] All functions and variable names follow Python naming conventions. * [ ] All function parameters and return values are annotated with Python [type hints](https://docs.python.org/3/library/typing.html). * [ ] All functions have [doctests](https://docs.python.org/3/library/doctest.html) that pass the automated testing. * [ ] All new algorithms include at least one URL that points to Wikipedia or another similar explanation. * [ ] If this pull request resolves one or more open issues then the description above includes the issue number(s) with a [closing keyword](https://docs.github.com/en/issues/tracking-your-work-with-issues/linking-a-pull-request-to-an-issue): "Fixes #ISSUE-NUMBER".
from collections import deque def tarjan(g): """ Tarjan's algo for finding strongly connected components in a directed graph Uses two main attributes of each node to track reachability, the index of that node within a component(index), and the lowest index reachable from that node(lowlink). We then perform a dfs of the each component making sure to update these parameters for each node and saving the nodes we visit on the way. If ever we find that the lowest reachable node from a current node is equal to the index of the current node then it must be the root of a strongly connected component and so we save it and it's equireachable vertices as a strongly connected component. Complexity: strong_connect() is called at most once for each node and has a complexity of O(|E|) as it is DFS. Therefore this has complexity O(|V| + |E|) for a graph G = (V, E) """ n = len(g) stack = deque() on_stack = [False for _ in range(n)] index_of = [-1 for _ in range(n)] lowlink_of = index_of[:] def strong_connect(v, index, components): index_of[v] = index # the number when this node is seen lowlink_of[v] = index # lowest rank node reachable from here index += 1 stack.append(v) on_stack[v] = True for w in g[v]: if index_of[w] == -1: index = strong_connect(w, index, components) lowlink_of[v] = ( lowlink_of[w] if lowlink_of[w] < lowlink_of[v] else lowlink_of[v] ) elif on_stack[w]: lowlink_of[v] = ( lowlink_of[w] if lowlink_of[w] < lowlink_of[v] else lowlink_of[v] ) if lowlink_of[v] == index_of[v]: component = [] w = stack.pop() on_stack[w] = False component.append(w) while w != v: w = stack.pop() on_stack[w] = False component.append(w) components.append(component) return index components = [] for v in range(n): if index_of[v] == -1: strong_connect(v, 0, components) return components def create_graph(n, edges): g = [[] for _ in range(n)] for u, v in edges: g[u].append(v) return g if __name__ == "__main__": # Test n_vertices = 7 source = [0, 0, 1, 2, 3, 3, 4, 4, 6] target = [1, 3, 2, 0, 1, 4, 5, 6, 5] edges = [(u, v) for u, v in zip(source, target)] g = create_graph(n_vertices, edges) assert [[5], [6], [4], [3, 2, 1, 0]] == tarjan(g)
from collections import deque def tarjan(g): """ Tarjan's algo for finding strongly connected components in a directed graph Uses two main attributes of each node to track reachability, the index of that node within a component(index), and the lowest index reachable from that node(lowlink). We then perform a dfs of the each component making sure to update these parameters for each node and saving the nodes we visit on the way. If ever we find that the lowest reachable node from a current node is equal to the index of the current node then it must be the root of a strongly connected component and so we save it and it's equireachable vertices as a strongly connected component. Complexity: strong_connect() is called at most once for each node and has a complexity of O(|E|) as it is DFS. Therefore this has complexity O(|V| + |E|) for a graph G = (V, E) """ n = len(g) stack = deque() on_stack = [False for _ in range(n)] index_of = [-1 for _ in range(n)] lowlink_of = index_of[:] def strong_connect(v, index, components): index_of[v] = index # the number when this node is seen lowlink_of[v] = index # lowest rank node reachable from here index += 1 stack.append(v) on_stack[v] = True for w in g[v]: if index_of[w] == -1: index = strong_connect(w, index, components) lowlink_of[v] = ( lowlink_of[w] if lowlink_of[w] < lowlink_of[v] else lowlink_of[v] ) elif on_stack[w]: lowlink_of[v] = ( lowlink_of[w] if lowlink_of[w] < lowlink_of[v] else lowlink_of[v] ) if lowlink_of[v] == index_of[v]: component = [] w = stack.pop() on_stack[w] = False component.append(w) while w != v: w = stack.pop() on_stack[w] = False component.append(w) components.append(component) return index components = [] for v in range(n): if index_of[v] == -1: strong_connect(v, 0, components) return components def create_graph(n, edges): g = [[] for _ in range(n)] for u, v in edges: g[u].append(v) return g if __name__ == "__main__": # Test n_vertices = 7 source = [0, 0, 1, 2, 3, 3, 4, 4, 6] target = [1, 3, 2, 0, 1, 4, 5, 6, 5] edges = list(zip(source, target)) g = create_graph(n_vertices, edges) assert [[5], [6], [4], [3, 2, 1, 0]] == tarjan(g)
1
TheAlgorithms/Python
9,062
Delete empty junk file
### Describe your change: Delete empty file introduced in #9046 * [ ] Add an algorithm? * [x] Fix a bug or typo in an existing algorithm? * [ ] Documentation change? ### Checklist: * [x] I have read [CONTRIBUTING.md](https://github.com/TheAlgorithms/Python/blob/master/CONTRIBUTING.md). * [x] This pull request is all my own work -- I have not plagiarized. * [x] I know that pull requests will not be merged if they fail the automated tests. * [x] This PR only changes one algorithm file. To ease review, please open separate PRs for separate algorithms. * [ ] All new Python files are placed inside an existing directory. * [ ] All filenames are in all lowercase characters with no spaces or dashes. * [ ] All functions and variable names follow Python naming conventions. * [ ] All function parameters and return values are annotated with Python [type hints](https://docs.python.org/3/library/typing.html). * [ ] All functions have [doctests](https://docs.python.org/3/library/doctest.html) that pass the automated testing. * [ ] All new algorithms include at least one URL that points to Wikipedia or another similar explanation. * [ ] If this pull request resolves one or more open issues then the description above includes the issue number(s) with a [closing keyword](https://docs.github.com/en/issues/tracking-your-work-with-issues/linking-a-pull-request-to-an-issue): "Fixes #ISSUE-NUMBER".
tianyizheng02
"2023-09-16T18:59:42Z"
"2023-09-16T22:12:31Z"
1488cdea708485eb1d81c73126eab13cb9b04a47
fbad85d3ecbbb826a5891807c823149d38bbaed3
Delete empty junk file. ### Describe your change: Delete empty file introduced in #9046 * [ ] Add an algorithm? * [x] Fix a bug or typo in an existing algorithm? * [ ] Documentation change? ### Checklist: * [x] I have read [CONTRIBUTING.md](https://github.com/TheAlgorithms/Python/blob/master/CONTRIBUTING.md). * [x] This pull request is all my own work -- I have not plagiarized. * [x] I know that pull requests will not be merged if they fail the automated tests. * [x] This PR only changes one algorithm file. To ease review, please open separate PRs for separate algorithms. * [ ] All new Python files are placed inside an existing directory. * [ ] All filenames are in all lowercase characters with no spaces or dashes. * [ ] All functions and variable names follow Python naming conventions. * [ ] All function parameters and return values are annotated with Python [type hints](https://docs.python.org/3/library/typing.html). * [ ] All functions have [doctests](https://docs.python.org/3/library/doctest.html) that pass the automated testing. * [ ] All new algorithms include at least one URL that points to Wikipedia or another similar explanation. * [ ] If this pull request resolves one or more open issues then the description above includes the issue number(s) with a [closing keyword](https://docs.github.com/en/issues/tracking-your-work-with-issues/linking-a-pull-request-to-an-issue): "Fixes #ISSUE-NUMBER".
""" Ugly numbers are numbers whose only prime factors are 2, 3 or 5. The sequence 1, 2, 3, 4, 5, 6, 8, 9, 10, 12, 15, … shows the first 11 ugly numbers. By convention, 1 is included. Given an integer n, we have to find the nth ugly number. For more details, refer this article https://www.geeksforgeeks.org/ugly-numbers/ """ def ugly_numbers(n: int) -> int: """ Returns the nth ugly number. >>> ugly_numbers(100) 1536 >>> ugly_numbers(0) 1 >>> ugly_numbers(20) 36 >>> ugly_numbers(-5) 1 >>> ugly_numbers(-5.5) Traceback (most recent call last): ... TypeError: 'float' object cannot be interpreted as an integer """ ugly_nums = [1] i2, i3, i5 = 0, 0, 0 next_2 = ugly_nums[i2] * 2 next_3 = ugly_nums[i3] * 3 next_5 = ugly_nums[i5] * 5 for _ in range(1, n): next_num = min(next_2, next_3, next_5) ugly_nums.append(next_num) if next_num == next_2: i2 += 1 next_2 = ugly_nums[i2] * 2 if next_num == next_3: i3 += 1 next_3 = ugly_nums[i3] * 3 if next_num == next_5: i5 += 1 next_5 = ugly_nums[i5] * 5 return ugly_nums[-1] if __name__ == "__main__": from doctest import testmod testmod(verbose=True) print(f"{ugly_numbers(200) = }")
""" Ugly numbers are numbers whose only prime factors are 2, 3 or 5. The sequence 1, 2, 3, 4, 5, 6, 8, 9, 10, 12, 15, … shows the first 11 ugly numbers. By convention, 1 is included. Given an integer n, we have to find the nth ugly number. For more details, refer this article https://www.geeksforgeeks.org/ugly-numbers/ """ def ugly_numbers(n: int) -> int: """ Returns the nth ugly number. >>> ugly_numbers(100) 1536 >>> ugly_numbers(0) 1 >>> ugly_numbers(20) 36 >>> ugly_numbers(-5) 1 >>> ugly_numbers(-5.5) Traceback (most recent call last): ... TypeError: 'float' object cannot be interpreted as an integer """ ugly_nums = [1] i2, i3, i5 = 0, 0, 0 next_2 = ugly_nums[i2] * 2 next_3 = ugly_nums[i3] * 3 next_5 = ugly_nums[i5] * 5 for _ in range(1, n): next_num = min(next_2, next_3, next_5) ugly_nums.append(next_num) if next_num == next_2: i2 += 1 next_2 = ugly_nums[i2] * 2 if next_num == next_3: i3 += 1 next_3 = ugly_nums[i3] * 3 if next_num == next_5: i5 += 1 next_5 = ugly_nums[i5] * 5 return ugly_nums[-1] if __name__ == "__main__": from doctest import testmod testmod(verbose=True) print(f"{ugly_numbers(200) = }")
-1
TheAlgorithms/Python
9,062
Delete empty junk file
### Describe your change: Delete empty file introduced in #9046 * [ ] Add an algorithm? * [x] Fix a bug or typo in an existing algorithm? * [ ] Documentation change? ### Checklist: * [x] I have read [CONTRIBUTING.md](https://github.com/TheAlgorithms/Python/blob/master/CONTRIBUTING.md). * [x] This pull request is all my own work -- I have not plagiarized. * [x] I know that pull requests will not be merged if they fail the automated tests. * [x] This PR only changes one algorithm file. To ease review, please open separate PRs for separate algorithms. * [ ] All new Python files are placed inside an existing directory. * [ ] All filenames are in all lowercase characters with no spaces or dashes. * [ ] All functions and variable names follow Python naming conventions. * [ ] All function parameters and return values are annotated with Python [type hints](https://docs.python.org/3/library/typing.html). * [ ] All functions have [doctests](https://docs.python.org/3/library/doctest.html) that pass the automated testing. * [ ] All new algorithms include at least one URL that points to Wikipedia or another similar explanation. * [ ] If this pull request resolves one or more open issues then the description above includes the issue number(s) with a [closing keyword](https://docs.github.com/en/issues/tracking-your-work-with-issues/linking-a-pull-request-to-an-issue): "Fixes #ISSUE-NUMBER".
tianyizheng02
"2023-09-16T18:59:42Z"
"2023-09-16T22:12:31Z"
1488cdea708485eb1d81c73126eab13cb9b04a47
fbad85d3ecbbb826a5891807c823149d38bbaed3
Delete empty junk file. ### Describe your change: Delete empty file introduced in #9046 * [ ] Add an algorithm? * [x] Fix a bug or typo in an existing algorithm? * [ ] Documentation change? ### Checklist: * [x] I have read [CONTRIBUTING.md](https://github.com/TheAlgorithms/Python/blob/master/CONTRIBUTING.md). * [x] This pull request is all my own work -- I have not plagiarized. * [x] I know that pull requests will not be merged if they fail the automated tests. * [x] This PR only changes one algorithm file. To ease review, please open separate PRs for separate algorithms. * [ ] All new Python files are placed inside an existing directory. * [ ] All filenames are in all lowercase characters with no spaces or dashes. * [ ] All functions and variable names follow Python naming conventions. * [ ] All function parameters and return values are annotated with Python [type hints](https://docs.python.org/3/library/typing.html). * [ ] All functions have [doctests](https://docs.python.org/3/library/doctest.html) that pass the automated testing. * [ ] All new algorithms include at least one URL that points to Wikipedia or another similar explanation. * [ ] If this pull request resolves one or more open issues then the description above includes the issue number(s) with a [closing keyword](https://docs.github.com/en/issues/tracking-your-work-with-issues/linking-a-pull-request-to-an-issue): "Fixes #ISSUE-NUMBER".
""" Peak signal-to-noise ratio - PSNR https://en.wikipedia.org/wiki/Peak_signal-to-noise_ratio Source: https://tutorials.techonical.com/how-to-calculate-psnr-value-of-two-images-using-python """ import math import os import cv2 import numpy as np PIXEL_MAX = 255.0 def peak_signal_to_noise_ratio(original: float, contrast: float) -> float: mse = np.mean((original - contrast) ** 2) if mse == 0: return 100 return 20 * math.log10(PIXEL_MAX / math.sqrt(mse)) def main() -> None: dir_path = os.path.dirname(os.path.realpath(__file__)) # Loading images (original image and compressed image) original = cv2.imread(os.path.join(dir_path, "image_data/original_image.png")) contrast = cv2.imread(os.path.join(dir_path, "image_data/compressed_image.png"), 1) original2 = cv2.imread(os.path.join(dir_path, "image_data/PSNR-example-base.png")) contrast2 = cv2.imread( os.path.join(dir_path, "image_data/PSNR-example-comp-10.jpg"), 1 ) # Value expected: 29.73dB print("-- First Test --") print(f"PSNR value is {peak_signal_to_noise_ratio(original, contrast)} dB") # # Value expected: 31.53dB (Wikipedia Example) print("\n-- Second Test --") print(f"PSNR value is {peak_signal_to_noise_ratio(original2, contrast2)} dB") if __name__ == "__main__": main()
""" Peak signal-to-noise ratio - PSNR https://en.wikipedia.org/wiki/Peak_signal-to-noise_ratio Source: https://tutorials.techonical.com/how-to-calculate-psnr-value-of-two-images-using-python """ import math import os import cv2 import numpy as np PIXEL_MAX = 255.0 def peak_signal_to_noise_ratio(original: float, contrast: float) -> float: mse = np.mean((original - contrast) ** 2) if mse == 0: return 100 return 20 * math.log10(PIXEL_MAX / math.sqrt(mse)) def main() -> None: dir_path = os.path.dirname(os.path.realpath(__file__)) # Loading images (original image and compressed image) original = cv2.imread(os.path.join(dir_path, "image_data/original_image.png")) contrast = cv2.imread(os.path.join(dir_path, "image_data/compressed_image.png"), 1) original2 = cv2.imread(os.path.join(dir_path, "image_data/PSNR-example-base.png")) contrast2 = cv2.imread( os.path.join(dir_path, "image_data/PSNR-example-comp-10.jpg"), 1 ) # Value expected: 29.73dB print("-- First Test --") print(f"PSNR value is {peak_signal_to_noise_ratio(original, contrast)} dB") # # Value expected: 31.53dB (Wikipedia Example) print("\n-- Second Test --") print(f"PSNR value is {peak_signal_to_noise_ratio(original2, contrast2)} dB") if __name__ == "__main__": main()
-1
TheAlgorithms/Python
9,062
Delete empty junk file
### Describe your change: Delete empty file introduced in #9046 * [ ] Add an algorithm? * [x] Fix a bug or typo in an existing algorithm? * [ ] Documentation change? ### Checklist: * [x] I have read [CONTRIBUTING.md](https://github.com/TheAlgorithms/Python/blob/master/CONTRIBUTING.md). * [x] This pull request is all my own work -- I have not plagiarized. * [x] I know that pull requests will not be merged if they fail the automated tests. * [x] This PR only changes one algorithm file. To ease review, please open separate PRs for separate algorithms. * [ ] All new Python files are placed inside an existing directory. * [ ] All filenames are in all lowercase characters with no spaces or dashes. * [ ] All functions and variable names follow Python naming conventions. * [ ] All function parameters and return values are annotated with Python [type hints](https://docs.python.org/3/library/typing.html). * [ ] All functions have [doctests](https://docs.python.org/3/library/doctest.html) that pass the automated testing. * [ ] All new algorithms include at least one URL that points to Wikipedia or another similar explanation. * [ ] If this pull request resolves one or more open issues then the description above includes the issue number(s) with a [closing keyword](https://docs.github.com/en/issues/tracking-your-work-with-issues/linking-a-pull-request-to-an-issue): "Fixes #ISSUE-NUMBER".
tianyizheng02
"2023-09-16T18:59:42Z"
"2023-09-16T22:12:31Z"
1488cdea708485eb1d81c73126eab13cb9b04a47
fbad85d3ecbbb826a5891807c823149d38bbaed3
Delete empty junk file. ### Describe your change: Delete empty file introduced in #9046 * [ ] Add an algorithm? * [x] Fix a bug or typo in an existing algorithm? * [ ] Documentation change? ### Checklist: * [x] I have read [CONTRIBUTING.md](https://github.com/TheAlgorithms/Python/blob/master/CONTRIBUTING.md). * [x] This pull request is all my own work -- I have not plagiarized. * [x] I know that pull requests will not be merged if they fail the automated tests. * [x] This PR only changes one algorithm file. To ease review, please open separate PRs for separate algorithms. * [ ] All new Python files are placed inside an existing directory. * [ ] All filenames are in all lowercase characters with no spaces or dashes. * [ ] All functions and variable names follow Python naming conventions. * [ ] All function parameters and return values are annotated with Python [type hints](https://docs.python.org/3/library/typing.html). * [ ] All functions have [doctests](https://docs.python.org/3/library/doctest.html) that pass the automated testing. * [ ] All new algorithms include at least one URL that points to Wikipedia or another similar explanation. * [ ] If this pull request resolves one or more open issues then the description above includes the issue number(s) with a [closing keyword](https://docs.github.com/en/issues/tracking-your-work-with-issues/linking-a-pull-request-to-an-issue): "Fixes #ISSUE-NUMBER".
""" This script demonstrates the implementation of the Sigmoid Linear Unit (SiLU) or swish function. * https://en.wikipedia.org/wiki/Rectifier_(neural_networks) * https://en.wikipedia.org/wiki/Swish_function The function takes a vector x of K real numbers as input and returns x * sigmoid(x). Swish is a smooth, non-monotonic function defined as f(x) = x * sigmoid(x). Extensive experiments shows that Swish consistently matches or outperforms ReLU on deep networks applied to a variety of challenging domains such as image classification and machine translation. This script is inspired by a corresponding research paper. * https://arxiv.org/abs/1710.05941 """ import numpy as np def sigmoid(vector: np.ndarray) -> np.ndarray: """ Mathematical function sigmoid takes a vector x of K real numbers as input and returns 1/ (1 + e^-x). https://en.wikipedia.org/wiki/Sigmoid_function >>> sigmoid(np.array([-1.0, 1.0, 2.0])) array([0.26894142, 0.73105858, 0.88079708]) """ return 1 / (1 + np.exp(-vector)) def sigmoid_linear_unit(vector: np.ndarray) -> np.ndarray: """ Implements the Sigmoid Linear Unit (SiLU) or swish function Parameters: vector (np.ndarray): A numpy array consisting of real values Returns: swish_vec (np.ndarray): The input numpy array, after applying swish Examples: >>> sigmoid_linear_unit(np.array([-1.0, 1.0, 2.0])) array([-0.26894142, 0.73105858, 1.76159416]) >>> sigmoid_linear_unit(np.array([-2])) array([-0.23840584]) """ return vector * sigmoid(vector) if __name__ == "__main__": import doctest doctest.testmod()
""" This script demonstrates the implementation of the Sigmoid Linear Unit (SiLU) or swish function. * https://en.wikipedia.org/wiki/Rectifier_(neural_networks) * https://en.wikipedia.org/wiki/Swish_function The function takes a vector x of K real numbers as input and returns x * sigmoid(x). Swish is a smooth, non-monotonic function defined as f(x) = x * sigmoid(x). Extensive experiments shows that Swish consistently matches or outperforms ReLU on deep networks applied to a variety of challenging domains such as image classification and machine translation. This script is inspired by a corresponding research paper. * https://arxiv.org/abs/1710.05941 """ import numpy as np def sigmoid(vector: np.ndarray) -> np.ndarray: """ Mathematical function sigmoid takes a vector x of K real numbers as input and returns 1/ (1 + e^-x). https://en.wikipedia.org/wiki/Sigmoid_function >>> sigmoid(np.array([-1.0, 1.0, 2.0])) array([0.26894142, 0.73105858, 0.88079708]) """ return 1 / (1 + np.exp(-vector)) def sigmoid_linear_unit(vector: np.ndarray) -> np.ndarray: """ Implements the Sigmoid Linear Unit (SiLU) or swish function Parameters: vector (np.ndarray): A numpy array consisting of real values Returns: swish_vec (np.ndarray): The input numpy array, after applying swish Examples: >>> sigmoid_linear_unit(np.array([-1.0, 1.0, 2.0])) array([-0.26894142, 0.73105858, 1.76159416]) >>> sigmoid_linear_unit(np.array([-2])) array([-0.23840584]) """ return vector * sigmoid(vector) if __name__ == "__main__": import doctest doctest.testmod()
-1
TheAlgorithms/Python
9,062
Delete empty junk file
### Describe your change: Delete empty file introduced in #9046 * [ ] Add an algorithm? * [x] Fix a bug or typo in an existing algorithm? * [ ] Documentation change? ### Checklist: * [x] I have read [CONTRIBUTING.md](https://github.com/TheAlgorithms/Python/blob/master/CONTRIBUTING.md). * [x] This pull request is all my own work -- I have not plagiarized. * [x] I know that pull requests will not be merged if they fail the automated tests. * [x] This PR only changes one algorithm file. To ease review, please open separate PRs for separate algorithms. * [ ] All new Python files are placed inside an existing directory. * [ ] All filenames are in all lowercase characters with no spaces or dashes. * [ ] All functions and variable names follow Python naming conventions. * [ ] All function parameters and return values are annotated with Python [type hints](https://docs.python.org/3/library/typing.html). * [ ] All functions have [doctests](https://docs.python.org/3/library/doctest.html) that pass the automated testing. * [ ] All new algorithms include at least one URL that points to Wikipedia or another similar explanation. * [ ] If this pull request resolves one or more open issues then the description above includes the issue number(s) with a [closing keyword](https://docs.github.com/en/issues/tracking-your-work-with-issues/linking-a-pull-request-to-an-issue): "Fixes #ISSUE-NUMBER".
tianyizheng02
"2023-09-16T18:59:42Z"
"2023-09-16T22:12:31Z"
1488cdea708485eb1d81c73126eab13cb9b04a47
fbad85d3ecbbb826a5891807c823149d38bbaed3
Delete empty junk file. ### Describe your change: Delete empty file introduced in #9046 * [ ] Add an algorithm? * [x] Fix a bug or typo in an existing algorithm? * [ ] Documentation change? ### Checklist: * [x] I have read [CONTRIBUTING.md](https://github.com/TheAlgorithms/Python/blob/master/CONTRIBUTING.md). * [x] This pull request is all my own work -- I have not plagiarized. * [x] I know that pull requests will not be merged if they fail the automated tests. * [x] This PR only changes one algorithm file. To ease review, please open separate PRs for separate algorithms. * [ ] All new Python files are placed inside an existing directory. * [ ] All filenames are in all lowercase characters with no spaces or dashes. * [ ] All functions and variable names follow Python naming conventions. * [ ] All function parameters and return values are annotated with Python [type hints](https://docs.python.org/3/library/typing.html). * [ ] All functions have [doctests](https://docs.python.org/3/library/doctest.html) that pass the automated testing. * [ ] All new algorithms include at least one URL that points to Wikipedia or another similar explanation. * [ ] If this pull request resolves one or more open issues then the description above includes the issue number(s) with a [closing keyword](https://docs.github.com/en/issues/tracking-your-work-with-issues/linking-a-pull-request-to-an-issue): "Fixes #ISSUE-NUMBER".
import itertools import string from collections.abc import Generator, Iterable def chunker(seq: Iterable[str], size: int) -> Generator[tuple[str, ...], None, None]: it = iter(seq) while True: chunk = tuple(itertools.islice(it, size)) if not chunk: return yield chunk def prepare_input(dirty: str) -> str: """ Prepare the plaintext by up-casing it and separating repeated letters with X's """ dirty = "".join([c.upper() for c in dirty if c in string.ascii_letters]) clean = "" if len(dirty) < 2: return dirty for i in range(len(dirty) - 1): clean += dirty[i] if dirty[i] == dirty[i + 1]: clean += "X" clean += dirty[-1] if len(clean) & 1: clean += "X" return clean def generate_table(key: str) -> list[str]: # I and J are used interchangeably to allow # us to use a 5x5 table (25 letters) alphabet = "ABCDEFGHIKLMNOPQRSTUVWXYZ" # we're using a list instead of a '2d' array because it makes the math # for setting up the table and doing the actual encoding/decoding simpler table = [] # copy key chars into the table if they are in `alphabet` ignoring duplicates for char in key.upper(): if char not in table and char in alphabet: table.append(char) # fill the rest of the table in with the remaining alphabet chars for char in alphabet: if char not in table: table.append(char) return table def encode(plaintext: str, key: str) -> str: table = generate_table(key) plaintext = prepare_input(plaintext) ciphertext = "" # https://en.wikipedia.org/wiki/Playfair_cipher#Description for char1, char2 in chunker(plaintext, 2): row1, col1 = divmod(table.index(char1), 5) row2, col2 = divmod(table.index(char2), 5) if row1 == row2: ciphertext += table[row1 * 5 + (col1 + 1) % 5] ciphertext += table[row2 * 5 + (col2 + 1) % 5] elif col1 == col2: ciphertext += table[((row1 + 1) % 5) * 5 + col1] ciphertext += table[((row2 + 1) % 5) * 5 + col2] else: # rectangle ciphertext += table[row1 * 5 + col2] ciphertext += table[row2 * 5 + col1] return ciphertext def decode(ciphertext: str, key: str) -> str: table = generate_table(key) plaintext = "" # https://en.wikipedia.org/wiki/Playfair_cipher#Description for char1, char2 in chunker(ciphertext, 2): row1, col1 = divmod(table.index(char1), 5) row2, col2 = divmod(table.index(char2), 5) if row1 == row2: plaintext += table[row1 * 5 + (col1 - 1) % 5] plaintext += table[row2 * 5 + (col2 - 1) % 5] elif col1 == col2: plaintext += table[((row1 - 1) % 5) * 5 + col1] plaintext += table[((row2 - 1) % 5) * 5 + col2] else: # rectangle plaintext += table[row1 * 5 + col2] plaintext += table[row2 * 5 + col1] return plaintext
import itertools import string from collections.abc import Generator, Iterable def chunker(seq: Iterable[str], size: int) -> Generator[tuple[str, ...], None, None]: it = iter(seq) while True: chunk = tuple(itertools.islice(it, size)) if not chunk: return yield chunk def prepare_input(dirty: str) -> str: """ Prepare the plaintext by up-casing it and separating repeated letters with X's """ dirty = "".join([c.upper() for c in dirty if c in string.ascii_letters]) clean = "" if len(dirty) < 2: return dirty for i in range(len(dirty) - 1): clean += dirty[i] if dirty[i] == dirty[i + 1]: clean += "X" clean += dirty[-1] if len(clean) & 1: clean += "X" return clean def generate_table(key: str) -> list[str]: # I and J are used interchangeably to allow # us to use a 5x5 table (25 letters) alphabet = "ABCDEFGHIKLMNOPQRSTUVWXYZ" # we're using a list instead of a '2d' array because it makes the math # for setting up the table and doing the actual encoding/decoding simpler table = [] # copy key chars into the table if they are in `alphabet` ignoring duplicates for char in key.upper(): if char not in table and char in alphabet: table.append(char) # fill the rest of the table in with the remaining alphabet chars for char in alphabet: if char not in table: table.append(char) return table def encode(plaintext: str, key: str) -> str: table = generate_table(key) plaintext = prepare_input(plaintext) ciphertext = "" # https://en.wikipedia.org/wiki/Playfair_cipher#Description for char1, char2 in chunker(plaintext, 2): row1, col1 = divmod(table.index(char1), 5) row2, col2 = divmod(table.index(char2), 5) if row1 == row2: ciphertext += table[row1 * 5 + (col1 + 1) % 5] ciphertext += table[row2 * 5 + (col2 + 1) % 5] elif col1 == col2: ciphertext += table[((row1 + 1) % 5) * 5 + col1] ciphertext += table[((row2 + 1) % 5) * 5 + col2] else: # rectangle ciphertext += table[row1 * 5 + col2] ciphertext += table[row2 * 5 + col1] return ciphertext def decode(ciphertext: str, key: str) -> str: table = generate_table(key) plaintext = "" # https://en.wikipedia.org/wiki/Playfair_cipher#Description for char1, char2 in chunker(ciphertext, 2): row1, col1 = divmod(table.index(char1), 5) row2, col2 = divmod(table.index(char2), 5) if row1 == row2: plaintext += table[row1 * 5 + (col1 - 1) % 5] plaintext += table[row2 * 5 + (col2 - 1) % 5] elif col1 == col2: plaintext += table[((row1 - 1) % 5) * 5 + col1] plaintext += table[((row2 - 1) % 5) * 5 + col2] else: # rectangle plaintext += table[row1 * 5 + col2] plaintext += table[row2 * 5 + col1] return plaintext
-1
TheAlgorithms/Python
9,062
Delete empty junk file
### Describe your change: Delete empty file introduced in #9046 * [ ] Add an algorithm? * [x] Fix a bug or typo in an existing algorithm? * [ ] Documentation change? ### Checklist: * [x] I have read [CONTRIBUTING.md](https://github.com/TheAlgorithms/Python/blob/master/CONTRIBUTING.md). * [x] This pull request is all my own work -- I have not plagiarized. * [x] I know that pull requests will not be merged if they fail the automated tests. * [x] This PR only changes one algorithm file. To ease review, please open separate PRs for separate algorithms. * [ ] All new Python files are placed inside an existing directory. * [ ] All filenames are in all lowercase characters with no spaces or dashes. * [ ] All functions and variable names follow Python naming conventions. * [ ] All function parameters and return values are annotated with Python [type hints](https://docs.python.org/3/library/typing.html). * [ ] All functions have [doctests](https://docs.python.org/3/library/doctest.html) that pass the automated testing. * [ ] All new algorithms include at least one URL that points to Wikipedia or another similar explanation. * [ ] If this pull request resolves one or more open issues then the description above includes the issue number(s) with a [closing keyword](https://docs.github.com/en/issues/tracking-your-work-with-issues/linking-a-pull-request-to-an-issue): "Fixes #ISSUE-NUMBER".
tianyizheng02
"2023-09-16T18:59:42Z"
"2023-09-16T22:12:31Z"
1488cdea708485eb1d81c73126eab13cb9b04a47
fbad85d3ecbbb826a5891807c823149d38bbaed3
Delete empty junk file. ### Describe your change: Delete empty file introduced in #9046 * [ ] Add an algorithm? * [x] Fix a bug or typo in an existing algorithm? * [ ] Documentation change? ### Checklist: * [x] I have read [CONTRIBUTING.md](https://github.com/TheAlgorithms/Python/blob/master/CONTRIBUTING.md). * [x] This pull request is all my own work -- I have not plagiarized. * [x] I know that pull requests will not be merged if they fail the automated tests. * [x] This PR only changes one algorithm file. To ease review, please open separate PRs for separate algorithms. * [ ] All new Python files are placed inside an existing directory. * [ ] All filenames are in all lowercase characters with no spaces or dashes. * [ ] All functions and variable names follow Python naming conventions. * [ ] All function parameters and return values are annotated with Python [type hints](https://docs.python.org/3/library/typing.html). * [ ] All functions have [doctests](https://docs.python.org/3/library/doctest.html) that pass the automated testing. * [ ] All new algorithms include at least one URL that points to Wikipedia or another similar explanation. * [ ] If this pull request resolves one or more open issues then the description above includes the issue number(s) with a [closing keyword](https://docs.github.com/en/issues/tracking-your-work-with-issues/linking-a-pull-request-to-an-issue): "Fixes #ISSUE-NUMBER".
-1
TheAlgorithms/Python
9,062
Delete empty junk file
### Describe your change: Delete empty file introduced in #9046 * [ ] Add an algorithm? * [x] Fix a bug or typo in an existing algorithm? * [ ] Documentation change? ### Checklist: * [x] I have read [CONTRIBUTING.md](https://github.com/TheAlgorithms/Python/blob/master/CONTRIBUTING.md). * [x] This pull request is all my own work -- I have not plagiarized. * [x] I know that pull requests will not be merged if they fail the automated tests. * [x] This PR only changes one algorithm file. To ease review, please open separate PRs for separate algorithms. * [ ] All new Python files are placed inside an existing directory. * [ ] All filenames are in all lowercase characters with no spaces or dashes. * [ ] All functions and variable names follow Python naming conventions. * [ ] All function parameters and return values are annotated with Python [type hints](https://docs.python.org/3/library/typing.html). * [ ] All functions have [doctests](https://docs.python.org/3/library/doctest.html) that pass the automated testing. * [ ] All new algorithms include at least one URL that points to Wikipedia or another similar explanation. * [ ] If this pull request resolves one or more open issues then the description above includes the issue number(s) with a [closing keyword](https://docs.github.com/en/issues/tracking-your-work-with-issues/linking-a-pull-request-to-an-issue): "Fixes #ISSUE-NUMBER".
tianyizheng02
"2023-09-16T18:59:42Z"
"2023-09-16T22:12:31Z"
1488cdea708485eb1d81c73126eab13cb9b04a47
fbad85d3ecbbb826a5891807c823149d38bbaed3
Delete empty junk file. ### Describe your change: Delete empty file introduced in #9046 * [ ] Add an algorithm? * [x] Fix a bug or typo in an existing algorithm? * [ ] Documentation change? ### Checklist: * [x] I have read [CONTRIBUTING.md](https://github.com/TheAlgorithms/Python/blob/master/CONTRIBUTING.md). * [x] This pull request is all my own work -- I have not plagiarized. * [x] I know that pull requests will not be merged if they fail the automated tests. * [x] This PR only changes one algorithm file. To ease review, please open separate PRs for separate algorithms. * [ ] All new Python files are placed inside an existing directory. * [ ] All filenames are in all lowercase characters with no spaces or dashes. * [ ] All functions and variable names follow Python naming conventions. * [ ] All function parameters and return values are annotated with Python [type hints](https://docs.python.org/3/library/typing.html). * [ ] All functions have [doctests](https://docs.python.org/3/library/doctest.html) that pass the automated testing. * [ ] All new algorithms include at least one URL that points to Wikipedia or another similar explanation. * [ ] If this pull request resolves one or more open issues then the description above includes the issue number(s) with a [closing keyword](https://docs.github.com/en/issues/tracking-your-work-with-issues/linking-a-pull-request-to-an-issue): "Fixes #ISSUE-NUMBER".
import sys def minimum_subarray_sum(target: int, numbers: list[int]) -> int: """ Return the length of the shortest contiguous subarray in a list of numbers whose sum is at least target. Reference: https://stackoverflow.com/questions/8269916 >>> minimum_subarray_sum(7, [2, 3, 1, 2, 4, 3]) 2 >>> minimum_subarray_sum(7, [2, 3, -1, 2, 4, -3]) 4 >>> minimum_subarray_sum(11, [1, 1, 1, 1, 1, 1, 1, 1]) 0 >>> minimum_subarray_sum(10, [1, 2, 3, 4, 5, 6, 7]) 2 >>> minimum_subarray_sum(5, [1, 1, 1, 1, 1, 5]) 1 >>> minimum_subarray_sum(0, []) 0 >>> minimum_subarray_sum(0, [1, 2, 3]) 1 >>> minimum_subarray_sum(10, [10, 20, 30]) 1 >>> minimum_subarray_sum(7, [1, 1, 1, 1, 1, 1, 10]) 1 >>> minimum_subarray_sum(6, []) 0 >>> minimum_subarray_sum(2, [1, 2, 3]) 1 >>> minimum_subarray_sum(-6, []) 0 >>> minimum_subarray_sum(-6, [3, 4, 5]) 1 >>> minimum_subarray_sum(8, None) 0 >>> minimum_subarray_sum(2, "ABC") Traceback (most recent call last): ... ValueError: numbers must be an iterable of integers """ if not numbers: return 0 if target == 0 and target in numbers: return 0 if not isinstance(numbers, (list, tuple)) or not all( isinstance(number, int) for number in numbers ): raise ValueError("numbers must be an iterable of integers") left = right = curr_sum = 0 min_len = sys.maxsize while right < len(numbers): curr_sum += numbers[right] while curr_sum >= target and left <= right: min_len = min(min_len, right - left + 1) curr_sum -= numbers[left] left += 1 right += 1 return 0 if min_len == sys.maxsize else min_len
import sys def minimum_subarray_sum(target: int, numbers: list[int]) -> int: """ Return the length of the shortest contiguous subarray in a list of numbers whose sum is at least target. Reference: https://stackoverflow.com/questions/8269916 >>> minimum_subarray_sum(7, [2, 3, 1, 2, 4, 3]) 2 >>> minimum_subarray_sum(7, [2, 3, -1, 2, 4, -3]) 4 >>> minimum_subarray_sum(11, [1, 1, 1, 1, 1, 1, 1, 1]) 0 >>> minimum_subarray_sum(10, [1, 2, 3, 4, 5, 6, 7]) 2 >>> minimum_subarray_sum(5, [1, 1, 1, 1, 1, 5]) 1 >>> minimum_subarray_sum(0, []) 0 >>> minimum_subarray_sum(0, [1, 2, 3]) 1 >>> minimum_subarray_sum(10, [10, 20, 30]) 1 >>> minimum_subarray_sum(7, [1, 1, 1, 1, 1, 1, 10]) 1 >>> minimum_subarray_sum(6, []) 0 >>> minimum_subarray_sum(2, [1, 2, 3]) 1 >>> minimum_subarray_sum(-6, []) 0 >>> minimum_subarray_sum(-6, [3, 4, 5]) 1 >>> minimum_subarray_sum(8, None) 0 >>> minimum_subarray_sum(2, "ABC") Traceback (most recent call last): ... ValueError: numbers must be an iterable of integers """ if not numbers: return 0 if target == 0 and target in numbers: return 0 if not isinstance(numbers, (list, tuple)) or not all( isinstance(number, int) for number in numbers ): raise ValueError("numbers must be an iterable of integers") left = right = curr_sum = 0 min_len = sys.maxsize while right < len(numbers): curr_sum += numbers[right] while curr_sum >= target and left <= right: min_len = min(min_len, right - left + 1) curr_sum -= numbers[left] left += 1 right += 1 return 0 if min_len == sys.maxsize else min_len
-1
TheAlgorithms/Python
9,062
Delete empty junk file
### Describe your change: Delete empty file introduced in #9046 * [ ] Add an algorithm? * [x] Fix a bug or typo in an existing algorithm? * [ ] Documentation change? ### Checklist: * [x] I have read [CONTRIBUTING.md](https://github.com/TheAlgorithms/Python/blob/master/CONTRIBUTING.md). * [x] This pull request is all my own work -- I have not plagiarized. * [x] I know that pull requests will not be merged if they fail the automated tests. * [x] This PR only changes one algorithm file. To ease review, please open separate PRs for separate algorithms. * [ ] All new Python files are placed inside an existing directory. * [ ] All filenames are in all lowercase characters with no spaces or dashes. * [ ] All functions and variable names follow Python naming conventions. * [ ] All function parameters and return values are annotated with Python [type hints](https://docs.python.org/3/library/typing.html). * [ ] All functions have [doctests](https://docs.python.org/3/library/doctest.html) that pass the automated testing. * [ ] All new algorithms include at least one URL that points to Wikipedia or another similar explanation. * [ ] If this pull request resolves one or more open issues then the description above includes the issue number(s) with a [closing keyword](https://docs.github.com/en/issues/tracking-your-work-with-issues/linking-a-pull-request-to-an-issue): "Fixes #ISSUE-NUMBER".
tianyizheng02
"2023-09-16T18:59:42Z"
"2023-09-16T22:12:31Z"
1488cdea708485eb1d81c73126eab13cb9b04a47
fbad85d3ecbbb826a5891807c823149d38bbaed3
Delete empty junk file. ### Describe your change: Delete empty file introduced in #9046 * [ ] Add an algorithm? * [x] Fix a bug or typo in an existing algorithm? * [ ] Documentation change? ### Checklist: * [x] I have read [CONTRIBUTING.md](https://github.com/TheAlgorithms/Python/blob/master/CONTRIBUTING.md). * [x] This pull request is all my own work -- I have not plagiarized. * [x] I know that pull requests will not be merged if they fail the automated tests. * [x] This PR only changes one algorithm file. To ease review, please open separate PRs for separate algorithms. * [ ] All new Python files are placed inside an existing directory. * [ ] All filenames are in all lowercase characters with no spaces or dashes. * [ ] All functions and variable names follow Python naming conventions. * [ ] All function parameters and return values are annotated with Python [type hints](https://docs.python.org/3/library/typing.html). * [ ] All functions have [doctests](https://docs.python.org/3/library/doctest.html) that pass the automated testing. * [ ] All new algorithms include at least one URL that points to Wikipedia or another similar explanation. * [ ] If this pull request resolves one or more open issues then the description above includes the issue number(s) with a [closing keyword](https://docs.github.com/en/issues/tracking-your-work-with-issues/linking-a-pull-request-to-an-issue): "Fixes #ISSUE-NUMBER".
""" Project Euler Problem 73: https://projecteuler.net/problem=73 Consider the fraction, n/d, where n and d are positive integers. If n<d and HCF(n,d)=1, it is called a reduced proper fraction. If we list the set of reduced proper fractions for d ≤ 8 in ascending order of size, we get: 1/8, 1/7, 1/6, 1/5, 1/4, 2/7, 1/3, 3/8, 2/5, 3/7, 1/2, 4/7, 3/5, 5/8, 2/3, 5/7, 3/4, 4/5, 5/6, 6/7, 7/8 It can be seen that there are 3 fractions between 1/3 and 1/2. How many fractions lie between 1/3 and 1/2 in the sorted set of reduced proper fractions for d ≤ 12,000? """ from math import gcd def solution(max_d: int = 12_000) -> int: """ Returns number of fractions lie between 1/3 and 1/2 in the sorted set of reduced proper fractions for d ≤ max_d >>> solution(4) 0 >>> solution(5) 1 >>> solution(8) 3 """ fractions_number = 0 for d in range(max_d + 1): for n in range(d // 3 + 1, (d + 1) // 2): if gcd(n, d) == 1: fractions_number += 1 return fractions_number if __name__ == "__main__": print(f"{solution() = }")
""" Project Euler Problem 73: https://projecteuler.net/problem=73 Consider the fraction, n/d, where n and d are positive integers. If n<d and HCF(n,d)=1, it is called a reduced proper fraction. If we list the set of reduced proper fractions for d ≤ 8 in ascending order of size, we get: 1/8, 1/7, 1/6, 1/5, 1/4, 2/7, 1/3, 3/8, 2/5, 3/7, 1/2, 4/7, 3/5, 5/8, 2/3, 5/7, 3/4, 4/5, 5/6, 6/7, 7/8 It can be seen that there are 3 fractions between 1/3 and 1/2. How many fractions lie between 1/3 and 1/2 in the sorted set of reduced proper fractions for d ≤ 12,000? """ from math import gcd def solution(max_d: int = 12_000) -> int: """ Returns number of fractions lie between 1/3 and 1/2 in the sorted set of reduced proper fractions for d ≤ max_d >>> solution(4) 0 >>> solution(5) 1 >>> solution(8) 3 """ fractions_number = 0 for d in range(max_d + 1): for n in range(d // 3 + 1, (d + 1) // 2): if gcd(n, d) == 1: fractions_number += 1 return fractions_number if __name__ == "__main__": print(f"{solution() = }")
-1
TheAlgorithms/Python
9,062
Delete empty junk file
### Describe your change: Delete empty file introduced in #9046 * [ ] Add an algorithm? * [x] Fix a bug or typo in an existing algorithm? * [ ] Documentation change? ### Checklist: * [x] I have read [CONTRIBUTING.md](https://github.com/TheAlgorithms/Python/blob/master/CONTRIBUTING.md). * [x] This pull request is all my own work -- I have not plagiarized. * [x] I know that pull requests will not be merged if they fail the automated tests. * [x] This PR only changes one algorithm file. To ease review, please open separate PRs for separate algorithms. * [ ] All new Python files are placed inside an existing directory. * [ ] All filenames are in all lowercase characters with no spaces or dashes. * [ ] All functions and variable names follow Python naming conventions. * [ ] All function parameters and return values are annotated with Python [type hints](https://docs.python.org/3/library/typing.html). * [ ] All functions have [doctests](https://docs.python.org/3/library/doctest.html) that pass the automated testing. * [ ] All new algorithms include at least one URL that points to Wikipedia or another similar explanation. * [ ] If this pull request resolves one or more open issues then the description above includes the issue number(s) with a [closing keyword](https://docs.github.com/en/issues/tracking-your-work-with-issues/linking-a-pull-request-to-an-issue): "Fixes #ISSUE-NUMBER".
tianyizheng02
"2023-09-16T18:59:42Z"
"2023-09-16T22:12:31Z"
1488cdea708485eb1d81c73126eab13cb9b04a47
fbad85d3ecbbb826a5891807c823149d38bbaed3
Delete empty junk file. ### Describe your change: Delete empty file introduced in #9046 * [ ] Add an algorithm? * [x] Fix a bug or typo in an existing algorithm? * [ ] Documentation change? ### Checklist: * [x] I have read [CONTRIBUTING.md](https://github.com/TheAlgorithms/Python/blob/master/CONTRIBUTING.md). * [x] This pull request is all my own work -- I have not plagiarized. * [x] I know that pull requests will not be merged if they fail the automated tests. * [x] This PR only changes one algorithm file. To ease review, please open separate PRs for separate algorithms. * [ ] All new Python files are placed inside an existing directory. * [ ] All filenames are in all lowercase characters with no spaces or dashes. * [ ] All functions and variable names follow Python naming conventions. * [ ] All function parameters and return values are annotated with Python [type hints](https://docs.python.org/3/library/typing.html). * [ ] All functions have [doctests](https://docs.python.org/3/library/doctest.html) that pass the automated testing. * [ ] All new algorithms include at least one URL that points to Wikipedia or another similar explanation. * [ ] If this pull request resolves one or more open issues then the description above includes the issue number(s) with a [closing keyword](https://docs.github.com/en/issues/tracking-your-work-with-issues/linking-a-pull-request-to-an-issue): "Fixes #ISSUE-NUMBER".
""" What is the greatest product of four adjacent numbers (horizontally, vertically, or diagonally) in this 20x20 array? 08 02 22 97 38 15 00 40 00 75 04 05 07 78 52 12 50 77 91 08 49 49 99 40 17 81 18 57 60 87 17 40 98 43 69 48 04 56 62 00 81 49 31 73 55 79 14 29 93 71 40 67 53 88 30 03 49 13 36 65 52 70 95 23 04 60 11 42 69 24 68 56 01 32 56 71 37 02 36 91 22 31 16 71 51 67 63 89 41 92 36 54 22 40 40 28 66 33 13 80 24 47 32 60 99 03 45 02 44 75 33 53 78 36 84 20 35 17 12 50 32 98 81 28 64 23 67 10 26 38 40 67 59 54 70 66 18 38 64 70 67 26 20 68 02 62 12 20 95 63 94 39 63 08 40 91 66 49 94 21 24 55 58 05 66 73 99 26 97 17 78 78 96 83 14 88 34 89 63 72 21 36 23 09 75 00 76 44 20 45 35 14 00 61 33 97 34 31 33 95 78 17 53 28 22 75 31 67 15 94 03 80 04 62 16 14 09 53 56 92 16 39 05 42 96 35 31 47 55 58 88 24 00 17 54 24 36 29 85 57 86 56 00 48 35 71 89 07 05 44 44 37 44 60 21 58 51 54 17 58 19 80 81 68 05 94 47 69 28 73 92 13 86 52 17 77 04 89 55 40 04 52 08 83 97 35 99 16 07 97 57 32 16 26 26 79 33 27 98 66 88 36 68 87 57 62 20 72 03 46 33 67 46 55 12 32 63 93 53 69 04 42 16 73 38 25 39 11 24 94 72 18 08 46 29 32 40 62 76 36 20 69 36 41 72 30 23 88 34 62 99 69 82 67 59 85 74 04 36 16 20 73 35 29 78 31 90 01 74 31 49 71 48 86 81 16 23 57 05 54 01 70 54 71 83 51 54 69 16 92 33 48 61 43 52 01 89 19 67 48 """ import os def solution(): """Returns the greatest product of four adjacent numbers (horizontally, vertically, or diagonally). >>> solution() 70600674 """ with open(os.path.dirname(__file__) + "/grid.txt") as f: l = [] # noqa: E741 for _ in range(20): l.append([int(x) for x in f.readline().split()]) maximum = 0 # right for i in range(20): for j in range(17): temp = l[i][j] * l[i][j + 1] * l[i][j + 2] * l[i][j + 3] if temp > maximum: maximum = temp # down for i in range(17): for j in range(20): temp = l[i][j] * l[i + 1][j] * l[i + 2][j] * l[i + 3][j] if temp > maximum: maximum = temp # diagonal 1 for i in range(17): for j in range(17): temp = l[i][j] * l[i + 1][j + 1] * l[i + 2][j + 2] * l[i + 3][j + 3] if temp > maximum: maximum = temp # diagonal 2 for i in range(17): for j in range(3, 20): temp = l[i][j] * l[i + 1][j - 1] * l[i + 2][j - 2] * l[i + 3][j - 3] if temp > maximum: maximum = temp return maximum if __name__ == "__main__": print(solution())
""" What is the greatest product of four adjacent numbers (horizontally, vertically, or diagonally) in this 20x20 array? 08 02 22 97 38 15 00 40 00 75 04 05 07 78 52 12 50 77 91 08 49 49 99 40 17 81 18 57 60 87 17 40 98 43 69 48 04 56 62 00 81 49 31 73 55 79 14 29 93 71 40 67 53 88 30 03 49 13 36 65 52 70 95 23 04 60 11 42 69 24 68 56 01 32 56 71 37 02 36 91 22 31 16 71 51 67 63 89 41 92 36 54 22 40 40 28 66 33 13 80 24 47 32 60 99 03 45 02 44 75 33 53 78 36 84 20 35 17 12 50 32 98 81 28 64 23 67 10 26 38 40 67 59 54 70 66 18 38 64 70 67 26 20 68 02 62 12 20 95 63 94 39 63 08 40 91 66 49 94 21 24 55 58 05 66 73 99 26 97 17 78 78 96 83 14 88 34 89 63 72 21 36 23 09 75 00 76 44 20 45 35 14 00 61 33 97 34 31 33 95 78 17 53 28 22 75 31 67 15 94 03 80 04 62 16 14 09 53 56 92 16 39 05 42 96 35 31 47 55 58 88 24 00 17 54 24 36 29 85 57 86 56 00 48 35 71 89 07 05 44 44 37 44 60 21 58 51 54 17 58 19 80 81 68 05 94 47 69 28 73 92 13 86 52 17 77 04 89 55 40 04 52 08 83 97 35 99 16 07 97 57 32 16 26 26 79 33 27 98 66 88 36 68 87 57 62 20 72 03 46 33 67 46 55 12 32 63 93 53 69 04 42 16 73 38 25 39 11 24 94 72 18 08 46 29 32 40 62 76 36 20 69 36 41 72 30 23 88 34 62 99 69 82 67 59 85 74 04 36 16 20 73 35 29 78 31 90 01 74 31 49 71 48 86 81 16 23 57 05 54 01 70 54 71 83 51 54 69 16 92 33 48 61 43 52 01 89 19 67 48 """ import os def solution(): """Returns the greatest product of four adjacent numbers (horizontally, vertically, or diagonally). >>> solution() 70600674 """ with open(os.path.dirname(__file__) + "/grid.txt") as f: l = [] # noqa: E741 for _ in range(20): l.append([int(x) for x in f.readline().split()]) maximum = 0 # right for i in range(20): for j in range(17): temp = l[i][j] * l[i][j + 1] * l[i][j + 2] * l[i][j + 3] if temp > maximum: maximum = temp # down for i in range(17): for j in range(20): temp = l[i][j] * l[i + 1][j] * l[i + 2][j] * l[i + 3][j] if temp > maximum: maximum = temp # diagonal 1 for i in range(17): for j in range(17): temp = l[i][j] * l[i + 1][j + 1] * l[i + 2][j + 2] * l[i + 3][j + 3] if temp > maximum: maximum = temp # diagonal 2 for i in range(17): for j in range(3, 20): temp = l[i][j] * l[i + 1][j - 1] * l[i + 2][j - 2] * l[i + 3][j - 3] if temp > maximum: maximum = temp return maximum if __name__ == "__main__": print(solution())
-1
TheAlgorithms/Python
9,062
Delete empty junk file
### Describe your change: Delete empty file introduced in #9046 * [ ] Add an algorithm? * [x] Fix a bug or typo in an existing algorithm? * [ ] Documentation change? ### Checklist: * [x] I have read [CONTRIBUTING.md](https://github.com/TheAlgorithms/Python/blob/master/CONTRIBUTING.md). * [x] This pull request is all my own work -- I have not plagiarized. * [x] I know that pull requests will not be merged if they fail the automated tests. * [x] This PR only changes one algorithm file. To ease review, please open separate PRs for separate algorithms. * [ ] All new Python files are placed inside an existing directory. * [ ] All filenames are in all lowercase characters with no spaces or dashes. * [ ] All functions and variable names follow Python naming conventions. * [ ] All function parameters and return values are annotated with Python [type hints](https://docs.python.org/3/library/typing.html). * [ ] All functions have [doctests](https://docs.python.org/3/library/doctest.html) that pass the automated testing. * [ ] All new algorithms include at least one URL that points to Wikipedia or another similar explanation. * [ ] If this pull request resolves one or more open issues then the description above includes the issue number(s) with a [closing keyword](https://docs.github.com/en/issues/tracking-your-work-with-issues/linking-a-pull-request-to-an-issue): "Fixes #ISSUE-NUMBER".
tianyizheng02
"2023-09-16T18:59:42Z"
"2023-09-16T22:12:31Z"
1488cdea708485eb1d81c73126eab13cb9b04a47
fbad85d3ecbbb826a5891807c823149d38bbaed3
Delete empty junk file. ### Describe your change: Delete empty file introduced in #9046 * [ ] Add an algorithm? * [x] Fix a bug or typo in an existing algorithm? * [ ] Documentation change? ### Checklist: * [x] I have read [CONTRIBUTING.md](https://github.com/TheAlgorithms/Python/blob/master/CONTRIBUTING.md). * [x] This pull request is all my own work -- I have not plagiarized. * [x] I know that pull requests will not be merged if they fail the automated tests. * [x] This PR only changes one algorithm file. To ease review, please open separate PRs for separate algorithms. * [ ] All new Python files are placed inside an existing directory. * [ ] All filenames are in all lowercase characters with no spaces or dashes. * [ ] All functions and variable names follow Python naming conventions. * [ ] All function parameters and return values are annotated with Python [type hints](https://docs.python.org/3/library/typing.html). * [ ] All functions have [doctests](https://docs.python.org/3/library/doctest.html) that pass the automated testing. * [ ] All new algorithms include at least one URL that points to Wikipedia or another similar explanation. * [ ] If this pull request resolves one or more open issues then the description above includes the issue number(s) with a [closing keyword](https://docs.github.com/en/issues/tracking-your-work-with-issues/linking-a-pull-request-to-an-issue): "Fixes #ISSUE-NUMBER".
from graphs.minimum_spanning_tree_kruskal import kruskal def test_kruskal_successful_result(): num_nodes = 9 edges = [ [0, 1, 4], [0, 7, 8], [1, 2, 8], [7, 8, 7], [7, 6, 1], [2, 8, 2], [8, 6, 6], [2, 3, 7], [2, 5, 4], [6, 5, 2], [3, 5, 14], [3, 4, 9], [5, 4, 10], [1, 7, 11], ] result = kruskal(num_nodes, edges) expected = [ [7, 6, 1], [2, 8, 2], [6, 5, 2], [0, 1, 4], [2, 5, 4], [2, 3, 7], [0, 7, 8], [3, 4, 9], ] assert sorted(expected) == sorted(result)
from graphs.minimum_spanning_tree_kruskal import kruskal def test_kruskal_successful_result(): num_nodes = 9 edges = [ [0, 1, 4], [0, 7, 8], [1, 2, 8], [7, 8, 7], [7, 6, 1], [2, 8, 2], [8, 6, 6], [2, 3, 7], [2, 5, 4], [6, 5, 2], [3, 5, 14], [3, 4, 9], [5, 4, 10], [1, 7, 11], ] result = kruskal(num_nodes, edges) expected = [ [7, 6, 1], [2, 8, 2], [6, 5, 2], [0, 1, 4], [2, 5, 4], [2, 3, 7], [0, 7, 8], [3, 4, 9], ] assert sorted(expected) == sorted(result)
-1
TheAlgorithms/Python
9,062
Delete empty junk file
### Describe your change: Delete empty file introduced in #9046 * [ ] Add an algorithm? * [x] Fix a bug or typo in an existing algorithm? * [ ] Documentation change? ### Checklist: * [x] I have read [CONTRIBUTING.md](https://github.com/TheAlgorithms/Python/blob/master/CONTRIBUTING.md). * [x] This pull request is all my own work -- I have not plagiarized. * [x] I know that pull requests will not be merged if they fail the automated tests. * [x] This PR only changes one algorithm file. To ease review, please open separate PRs for separate algorithms. * [ ] All new Python files are placed inside an existing directory. * [ ] All filenames are in all lowercase characters with no spaces or dashes. * [ ] All functions and variable names follow Python naming conventions. * [ ] All function parameters and return values are annotated with Python [type hints](https://docs.python.org/3/library/typing.html). * [ ] All functions have [doctests](https://docs.python.org/3/library/doctest.html) that pass the automated testing. * [ ] All new algorithms include at least one URL that points to Wikipedia or another similar explanation. * [ ] If this pull request resolves one or more open issues then the description above includes the issue number(s) with a [closing keyword](https://docs.github.com/en/issues/tracking-your-work-with-issues/linking-a-pull-request-to-an-issue): "Fixes #ISSUE-NUMBER".
tianyizheng02
"2023-09-16T18:59:42Z"
"2023-09-16T22:12:31Z"
1488cdea708485eb1d81c73126eab13cb9b04a47
fbad85d3ecbbb826a5891807c823149d38bbaed3
Delete empty junk file. ### Describe your change: Delete empty file introduced in #9046 * [ ] Add an algorithm? * [x] Fix a bug or typo in an existing algorithm? * [ ] Documentation change? ### Checklist: * [x] I have read [CONTRIBUTING.md](https://github.com/TheAlgorithms/Python/blob/master/CONTRIBUTING.md). * [x] This pull request is all my own work -- I have not plagiarized. * [x] I know that pull requests will not be merged if they fail the automated tests. * [x] This PR only changes one algorithm file. To ease review, please open separate PRs for separate algorithms. * [ ] All new Python files are placed inside an existing directory. * [ ] All filenames are in all lowercase characters with no spaces or dashes. * [ ] All functions and variable names follow Python naming conventions. * [ ] All function parameters and return values are annotated with Python [type hints](https://docs.python.org/3/library/typing.html). * [ ] All functions have [doctests](https://docs.python.org/3/library/doctest.html) that pass the automated testing. * [ ] All new algorithms include at least one URL that points to Wikipedia or another similar explanation. * [ ] If this pull request resolves one or more open issues then the description above includes the issue number(s) with a [closing keyword](https://docs.github.com/en/issues/tracking-your-work-with-issues/linking-a-pull-request-to-an-issue): "Fixes #ISSUE-NUMBER".
""" Project Euler Problem 82: https://projecteuler.net/problem=82 The minimal path sum in the 5 by 5 matrix below, by starting in any cell in the left column and finishing in any cell in the right column, and only moving up, down, and right, is indicated in red and bold; the sum is equal to 994. 131 673 [234] [103] [18] [201] [96] [342] 965 150 630 803 746 422 111 537 699 497 121 956 805 732 524 37 331 Find the minimal path sum from the left column to the right column in matrix.txt (https://projecteuler.net/project/resources/p082_matrix.txt) (right click and "Save Link/Target As..."), a 31K text file containing an 80 by 80 matrix. """ import os def solution(filename: str = "input.txt") -> int: """ Returns the minimal path sum in the matrix from the file, by starting in any cell in the left column and finishing in any cell in the right column, and only moving up, down, and right >>> solution("test_matrix.txt") 994 """ with open(os.path.join(os.path.dirname(__file__), filename)) as input_file: matrix = [ [int(element) for element in line.split(",")] for line in input_file.readlines() ] rows = len(matrix) cols = len(matrix[0]) minimal_path_sums = [[-1 for _ in range(cols)] for _ in range(rows)] for i in range(rows): minimal_path_sums[i][0] = matrix[i][0] for j in range(1, cols): for i in range(rows): minimal_path_sums[i][j] = minimal_path_sums[i][j - 1] + matrix[i][j] for i in range(1, rows): minimal_path_sums[i][j] = min( minimal_path_sums[i][j], minimal_path_sums[i - 1][j] + matrix[i][j] ) for i in range(rows - 2, -1, -1): minimal_path_sums[i][j] = min( minimal_path_sums[i][j], minimal_path_sums[i + 1][j] + matrix[i][j] ) return min(minimal_path_sums_row[-1] for minimal_path_sums_row in minimal_path_sums) if __name__ == "__main__": print(f"{solution() = }")
""" Project Euler Problem 82: https://projecteuler.net/problem=82 The minimal path sum in the 5 by 5 matrix below, by starting in any cell in the left column and finishing in any cell in the right column, and only moving up, down, and right, is indicated in red and bold; the sum is equal to 994. 131 673 [234] [103] [18] [201] [96] [342] 965 150 630 803 746 422 111 537 699 497 121 956 805 732 524 37 331 Find the minimal path sum from the left column to the right column in matrix.txt (https://projecteuler.net/project/resources/p082_matrix.txt) (right click and "Save Link/Target As..."), a 31K text file containing an 80 by 80 matrix. """ import os def solution(filename: str = "input.txt") -> int: """ Returns the minimal path sum in the matrix from the file, by starting in any cell in the left column and finishing in any cell in the right column, and only moving up, down, and right >>> solution("test_matrix.txt") 994 """ with open(os.path.join(os.path.dirname(__file__), filename)) as input_file: matrix = [ [int(element) for element in line.split(",")] for line in input_file.readlines() ] rows = len(matrix) cols = len(matrix[0]) minimal_path_sums = [[-1 for _ in range(cols)] for _ in range(rows)] for i in range(rows): minimal_path_sums[i][0] = matrix[i][0] for j in range(1, cols): for i in range(rows): minimal_path_sums[i][j] = minimal_path_sums[i][j - 1] + matrix[i][j] for i in range(1, rows): minimal_path_sums[i][j] = min( minimal_path_sums[i][j], minimal_path_sums[i - 1][j] + matrix[i][j] ) for i in range(rows - 2, -1, -1): minimal_path_sums[i][j] = min( minimal_path_sums[i][j], minimal_path_sums[i + 1][j] + matrix[i][j] ) return min(minimal_path_sums_row[-1] for minimal_path_sums_row in minimal_path_sums) if __name__ == "__main__": print(f"{solution() = }")
-1
TheAlgorithms/Python
9,062
Delete empty junk file
### Describe your change: Delete empty file introduced in #9046 * [ ] Add an algorithm? * [x] Fix a bug or typo in an existing algorithm? * [ ] Documentation change? ### Checklist: * [x] I have read [CONTRIBUTING.md](https://github.com/TheAlgorithms/Python/blob/master/CONTRIBUTING.md). * [x] This pull request is all my own work -- I have not plagiarized. * [x] I know that pull requests will not be merged if they fail the automated tests. * [x] This PR only changes one algorithm file. To ease review, please open separate PRs for separate algorithms. * [ ] All new Python files are placed inside an existing directory. * [ ] All filenames are in all lowercase characters with no spaces or dashes. * [ ] All functions and variable names follow Python naming conventions. * [ ] All function parameters and return values are annotated with Python [type hints](https://docs.python.org/3/library/typing.html). * [ ] All functions have [doctests](https://docs.python.org/3/library/doctest.html) that pass the automated testing. * [ ] All new algorithms include at least one URL that points to Wikipedia or another similar explanation. * [ ] If this pull request resolves one or more open issues then the description above includes the issue number(s) with a [closing keyword](https://docs.github.com/en/issues/tracking-your-work-with-issues/linking-a-pull-request-to-an-issue): "Fixes #ISSUE-NUMBER".
tianyizheng02
"2023-09-16T18:59:42Z"
"2023-09-16T22:12:31Z"
1488cdea708485eb1d81c73126eab13cb9b04a47
fbad85d3ecbbb826a5891807c823149d38bbaed3
Delete empty junk file. ### Describe your change: Delete empty file introduced in #9046 * [ ] Add an algorithm? * [x] Fix a bug or typo in an existing algorithm? * [ ] Documentation change? ### Checklist: * [x] I have read [CONTRIBUTING.md](https://github.com/TheAlgorithms/Python/blob/master/CONTRIBUTING.md). * [x] This pull request is all my own work -- I have not plagiarized. * [x] I know that pull requests will not be merged if they fail the automated tests. * [x] This PR only changes one algorithm file. To ease review, please open separate PRs for separate algorithms. * [ ] All new Python files are placed inside an existing directory. * [ ] All filenames are in all lowercase characters with no spaces or dashes. * [ ] All functions and variable names follow Python naming conventions. * [ ] All function parameters and return values are annotated with Python [type hints](https://docs.python.org/3/library/typing.html). * [ ] All functions have [doctests](https://docs.python.org/3/library/doctest.html) that pass the automated testing. * [ ] All new algorithms include at least one URL that points to Wikipedia or another similar explanation. * [ ] If this pull request resolves one or more open issues then the description above includes the issue number(s) with a [closing keyword](https://docs.github.com/en/issues/tracking-your-work-with-issues/linking-a-pull-request-to-an-issue): "Fixes #ISSUE-NUMBER".
""" The first known prime found to exceed one million digits was discovered in 1999, and is a Mersenne prime of the form 2**6972593 − 1; it contains exactly 2,098,960 digits. Subsequently other Mersenne primes, of the form 2**p − 1, have been found which contain more digits. However, in 2004 there was found a massive non-Mersenne prime which contains 2,357,207 digits: (28433 * (2 ** 7830457 + 1)). Find the last ten digits of this prime number. """ def solution(n: int = 10) -> str: """ Returns the last n digits of NUMBER. >>> solution() '8739992577' >>> solution(8) '39992577' >>> solution(1) '7' >>> solution(-1) Traceback (most recent call last): ... ValueError: Invalid input >>> solution(8.3) Traceback (most recent call last): ... ValueError: Invalid input >>> solution("a") Traceback (most recent call last): ... ValueError: Invalid input """ if not isinstance(n, int) or n < 0: raise ValueError("Invalid input") modulus = 10**n number = 28433 * (pow(2, 7830457, modulus)) + 1 return str(number % modulus) if __name__ == "__main__": from doctest import testmod testmod() print(f"{solution(10) = }")
""" The first known prime found to exceed one million digits was discovered in 1999, and is a Mersenne prime of the form 2**6972593 − 1; it contains exactly 2,098,960 digits. Subsequently other Mersenne primes, of the form 2**p − 1, have been found which contain more digits. However, in 2004 there was found a massive non-Mersenne prime which contains 2,357,207 digits: (28433 * (2 ** 7830457 + 1)). Find the last ten digits of this prime number. """ def solution(n: int = 10) -> str: """ Returns the last n digits of NUMBER. >>> solution() '8739992577' >>> solution(8) '39992577' >>> solution(1) '7' >>> solution(-1) Traceback (most recent call last): ... ValueError: Invalid input >>> solution(8.3) Traceback (most recent call last): ... ValueError: Invalid input >>> solution("a") Traceback (most recent call last): ... ValueError: Invalid input """ if not isinstance(n, int) or n < 0: raise ValueError("Invalid input") modulus = 10**n number = 28433 * (pow(2, 7830457, modulus)) + 1 return str(number % modulus) if __name__ == "__main__": from doctest import testmod testmod() print(f"{solution(10) = }")
-1
TheAlgorithms/Python
9,062
Delete empty junk file
### Describe your change: Delete empty file introduced in #9046 * [ ] Add an algorithm? * [x] Fix a bug or typo in an existing algorithm? * [ ] Documentation change? ### Checklist: * [x] I have read [CONTRIBUTING.md](https://github.com/TheAlgorithms/Python/blob/master/CONTRIBUTING.md). * [x] This pull request is all my own work -- I have not plagiarized. * [x] I know that pull requests will not be merged if they fail the automated tests. * [x] This PR only changes one algorithm file. To ease review, please open separate PRs for separate algorithms. * [ ] All new Python files are placed inside an existing directory. * [ ] All filenames are in all lowercase characters with no spaces or dashes. * [ ] All functions and variable names follow Python naming conventions. * [ ] All function parameters and return values are annotated with Python [type hints](https://docs.python.org/3/library/typing.html). * [ ] All functions have [doctests](https://docs.python.org/3/library/doctest.html) that pass the automated testing. * [ ] All new algorithms include at least one URL that points to Wikipedia or another similar explanation. * [ ] If this pull request resolves one or more open issues then the description above includes the issue number(s) with a [closing keyword](https://docs.github.com/en/issues/tracking-your-work-with-issues/linking-a-pull-request-to-an-issue): "Fixes #ISSUE-NUMBER".
tianyizheng02
"2023-09-16T18:59:42Z"
"2023-09-16T22:12:31Z"
1488cdea708485eb1d81c73126eab13cb9b04a47
fbad85d3ecbbb826a5891807c823149d38bbaed3
Delete empty junk file. ### Describe your change: Delete empty file introduced in #9046 * [ ] Add an algorithm? * [x] Fix a bug or typo in an existing algorithm? * [ ] Documentation change? ### Checklist: * [x] I have read [CONTRIBUTING.md](https://github.com/TheAlgorithms/Python/blob/master/CONTRIBUTING.md). * [x] This pull request is all my own work -- I have not plagiarized. * [x] I know that pull requests will not be merged if they fail the automated tests. * [x] This PR only changes one algorithm file. To ease review, please open separate PRs for separate algorithms. * [ ] All new Python files are placed inside an existing directory. * [ ] All filenames are in all lowercase characters with no spaces or dashes. * [ ] All functions and variable names follow Python naming conventions. * [ ] All function parameters and return values are annotated with Python [type hints](https://docs.python.org/3/library/typing.html). * [ ] All functions have [doctests](https://docs.python.org/3/library/doctest.html) that pass the automated testing. * [ ] All new algorithms include at least one URL that points to Wikipedia or another similar explanation. * [ ] If this pull request resolves one or more open issues then the description above includes the issue number(s) with a [closing keyword](https://docs.github.com/en/issues/tracking-your-work-with-issues/linking-a-pull-request-to-an-issue): "Fixes #ISSUE-NUMBER".
""" A NOT Gate is a logic gate in boolean algebra which results to 0 (False) if the input is high, and 1 (True) if the input is low. Following is the truth table of a XOR Gate: ------------------------------ | Input | Output | ------------------------------ | 0 | 1 | | 1 | 0 | ------------------------------ Refer - https://www.geeksforgeeks.org/logic-gates-in-python/ """ def not_gate(input_1: int) -> int: """ Calculate NOT of the input values >>> not_gate(0) 1 >>> not_gate(1) 0 """ return 1 if input_1 == 0 else 0 def test_not_gate() -> None: """ Tests the not_gate function """ assert not_gate(0) == 1 assert not_gate(1) == 0 if __name__ == "__main__": print(not_gate(0)) print(not_gate(1))
""" A NOT Gate is a logic gate in boolean algebra which results to 0 (False) if the input is high, and 1 (True) if the input is low. Following is the truth table of a XOR Gate: ------------------------------ | Input | Output | ------------------------------ | 0 | 1 | | 1 | 0 | ------------------------------ Refer - https://www.geeksforgeeks.org/logic-gates-in-python/ """ def not_gate(input_1: int) -> int: """ Calculate NOT of the input values >>> not_gate(0) 1 >>> not_gate(1) 0 """ return 1 if input_1 == 0 else 0 def test_not_gate() -> None: """ Tests the not_gate function """ assert not_gate(0) == 1 assert not_gate(1) == 0 if __name__ == "__main__": print(not_gate(0)) print(not_gate(1))
-1
TheAlgorithms/Python
9,062
Delete empty junk file
### Describe your change: Delete empty file introduced in #9046 * [ ] Add an algorithm? * [x] Fix a bug or typo in an existing algorithm? * [ ] Documentation change? ### Checklist: * [x] I have read [CONTRIBUTING.md](https://github.com/TheAlgorithms/Python/blob/master/CONTRIBUTING.md). * [x] This pull request is all my own work -- I have not plagiarized. * [x] I know that pull requests will not be merged if they fail the automated tests. * [x] This PR only changes one algorithm file. To ease review, please open separate PRs for separate algorithms. * [ ] All new Python files are placed inside an existing directory. * [ ] All filenames are in all lowercase characters with no spaces or dashes. * [ ] All functions and variable names follow Python naming conventions. * [ ] All function parameters and return values are annotated with Python [type hints](https://docs.python.org/3/library/typing.html). * [ ] All functions have [doctests](https://docs.python.org/3/library/doctest.html) that pass the automated testing. * [ ] All new algorithms include at least one URL that points to Wikipedia or another similar explanation. * [ ] If this pull request resolves one or more open issues then the description above includes the issue number(s) with a [closing keyword](https://docs.github.com/en/issues/tracking-your-work-with-issues/linking-a-pull-request-to-an-issue): "Fixes #ISSUE-NUMBER".
tianyizheng02
"2023-09-16T18:59:42Z"
"2023-09-16T22:12:31Z"
1488cdea708485eb1d81c73126eab13cb9b04a47
fbad85d3ecbbb826a5891807c823149d38bbaed3
Delete empty junk file. ### Describe your change: Delete empty file introduced in #9046 * [ ] Add an algorithm? * [x] Fix a bug or typo in an existing algorithm? * [ ] Documentation change? ### Checklist: * [x] I have read [CONTRIBUTING.md](https://github.com/TheAlgorithms/Python/blob/master/CONTRIBUTING.md). * [x] This pull request is all my own work -- I have not plagiarized. * [x] I know that pull requests will not be merged if they fail the automated tests. * [x] This PR only changes one algorithm file. To ease review, please open separate PRs for separate algorithms. * [ ] All new Python files are placed inside an existing directory. * [ ] All filenames are in all lowercase characters with no spaces or dashes. * [ ] All functions and variable names follow Python naming conventions. * [ ] All function parameters and return values are annotated with Python [type hints](https://docs.python.org/3/library/typing.html). * [ ] All functions have [doctests](https://docs.python.org/3/library/doctest.html) that pass the automated testing. * [ ] All new algorithms include at least one URL that points to Wikipedia or another similar explanation. * [ ] If this pull request resolves one or more open issues then the description above includes the issue number(s) with a [closing keyword](https://docs.github.com/en/issues/tracking-your-work-with-issues/linking-a-pull-request-to-an-issue): "Fixes #ISSUE-NUMBER".
""" Project Euler Problem 75: https://projecteuler.net/problem=75 It turns out that 12 cm is the smallest length of wire that can be bent to form an integer sided right angle triangle in exactly one way, but there are many more examples. 12 cm: (3,4,5) 24 cm: (6,8,10) 30 cm: (5,12,13) 36 cm: (9,12,15) 40 cm: (8,15,17) 48 cm: (12,16,20) In contrast, some lengths of wire, like 20 cm, cannot be bent to form an integer sided right angle triangle, and other lengths allow more than one solution to be found; for example, using 120 cm it is possible to form exactly three different integer sided right angle triangles. 120 cm: (30,40,50), (20,48,52), (24,45,51) Given that L is the length of the wire, for how many values of L ≤ 1,500,000 can exactly one integer sided right angle triangle be formed? Solution: we generate all pythagorean triples using Euclid's formula and keep track of the frequencies of the perimeters. Reference: https://en.wikipedia.org/wiki/Pythagorean_triple#Generating_a_triple """ from collections import defaultdict from math import gcd def solution(limit: int = 1500000) -> int: """ Return the number of values of L <= limit such that a wire of length L can be formmed into an integer sided right angle triangle in exactly one way. >>> solution(50) 6 >>> solution(1000) 112 >>> solution(50000) 5502 """ frequencies: defaultdict = defaultdict(int) euclid_m = 2 while 2 * euclid_m * (euclid_m + 1) <= limit: for euclid_n in range((euclid_m % 2) + 1, euclid_m, 2): if gcd(euclid_m, euclid_n) > 1: continue primitive_perimeter = 2 * euclid_m * (euclid_m + euclid_n) for perimeter in range(primitive_perimeter, limit + 1, primitive_perimeter): frequencies[perimeter] += 1 euclid_m += 1 return sum(1 for frequency in frequencies.values() if frequency == 1) if __name__ == "__main__": print(f"{solution() = }")
""" Project Euler Problem 75: https://projecteuler.net/problem=75 It turns out that 12 cm is the smallest length of wire that can be bent to form an integer sided right angle triangle in exactly one way, but there are many more examples. 12 cm: (3,4,5) 24 cm: (6,8,10) 30 cm: (5,12,13) 36 cm: (9,12,15) 40 cm: (8,15,17) 48 cm: (12,16,20) In contrast, some lengths of wire, like 20 cm, cannot be bent to form an integer sided right angle triangle, and other lengths allow more than one solution to be found; for example, using 120 cm it is possible to form exactly three different integer sided right angle triangles. 120 cm: (30,40,50), (20,48,52), (24,45,51) Given that L is the length of the wire, for how many values of L ≤ 1,500,000 can exactly one integer sided right angle triangle be formed? Solution: we generate all pythagorean triples using Euclid's formula and keep track of the frequencies of the perimeters. Reference: https://en.wikipedia.org/wiki/Pythagorean_triple#Generating_a_triple """ from collections import defaultdict from math import gcd def solution(limit: int = 1500000) -> int: """ Return the number of values of L <= limit such that a wire of length L can be formmed into an integer sided right angle triangle in exactly one way. >>> solution(50) 6 >>> solution(1000) 112 >>> solution(50000) 5502 """ frequencies: defaultdict = defaultdict(int) euclid_m = 2 while 2 * euclid_m * (euclid_m + 1) <= limit: for euclid_n in range((euclid_m % 2) + 1, euclid_m, 2): if gcd(euclid_m, euclid_n) > 1: continue primitive_perimeter = 2 * euclid_m * (euclid_m + euclid_n) for perimeter in range(primitive_perimeter, limit + 1, primitive_perimeter): frequencies[perimeter] += 1 euclid_m += 1 return sum(1 for frequency in frequencies.values() if frequency == 1) if __name__ == "__main__": print(f"{solution() = }")
-1
TheAlgorithms/Python
9,062
Delete empty junk file
### Describe your change: Delete empty file introduced in #9046 * [ ] Add an algorithm? * [x] Fix a bug or typo in an existing algorithm? * [ ] Documentation change? ### Checklist: * [x] I have read [CONTRIBUTING.md](https://github.com/TheAlgorithms/Python/blob/master/CONTRIBUTING.md). * [x] This pull request is all my own work -- I have not plagiarized. * [x] I know that pull requests will not be merged if they fail the automated tests. * [x] This PR only changes one algorithm file. To ease review, please open separate PRs for separate algorithms. * [ ] All new Python files are placed inside an existing directory. * [ ] All filenames are in all lowercase characters with no spaces or dashes. * [ ] All functions and variable names follow Python naming conventions. * [ ] All function parameters and return values are annotated with Python [type hints](https://docs.python.org/3/library/typing.html). * [ ] All functions have [doctests](https://docs.python.org/3/library/doctest.html) that pass the automated testing. * [ ] All new algorithms include at least one URL that points to Wikipedia or another similar explanation. * [ ] If this pull request resolves one or more open issues then the description above includes the issue number(s) with a [closing keyword](https://docs.github.com/en/issues/tracking-your-work-with-issues/linking-a-pull-request-to-an-issue): "Fixes #ISSUE-NUMBER".
tianyizheng02
"2023-09-16T18:59:42Z"
"2023-09-16T22:12:31Z"
1488cdea708485eb1d81c73126eab13cb9b04a47
fbad85d3ecbbb826a5891807c823149d38bbaed3
Delete empty junk file. ### Describe your change: Delete empty file introduced in #9046 * [ ] Add an algorithm? * [x] Fix a bug or typo in an existing algorithm? * [ ] Documentation change? ### Checklist: * [x] I have read [CONTRIBUTING.md](https://github.com/TheAlgorithms/Python/blob/master/CONTRIBUTING.md). * [x] This pull request is all my own work -- I have not plagiarized. * [x] I know that pull requests will not be merged if they fail the automated tests. * [x] This PR only changes one algorithm file. To ease review, please open separate PRs for separate algorithms. * [ ] All new Python files are placed inside an existing directory. * [ ] All filenames are in all lowercase characters with no spaces or dashes. * [ ] All functions and variable names follow Python naming conventions. * [ ] All function parameters and return values are annotated with Python [type hints](https://docs.python.org/3/library/typing.html). * [ ] All functions have [doctests](https://docs.python.org/3/library/doctest.html) that pass the automated testing. * [ ] All new algorithms include at least one URL that points to Wikipedia or another similar explanation. * [ ] If this pull request resolves one or more open issues then the description above includes the issue number(s) with a [closing keyword](https://docs.github.com/en/issues/tracking-your-work-with-issues/linking-a-pull-request-to-an-issue): "Fixes #ISSUE-NUMBER".
# Author: Phyllipe Bezerra (https://github.com/pmba) clothes = { 0: "underwear", 1: "pants", 2: "belt", 3: "suit", 4: "shoe", 5: "socks", 6: "shirt", 7: "tie", 8: "watch", } graph = [[1, 4], [2, 4], [3], [], [], [4], [2, 7], [3], []] visited = [0 for x in range(len(graph))] stack = [] def print_stack(stack, clothes): order = 1 while stack: current_clothing = stack.pop() print(order, clothes[current_clothing]) order += 1 def depth_first_search(u, visited, graph): visited[u] = 1 for v in graph[u]: if not visited[v]: depth_first_search(v, visited, graph) stack.append(u) def topological_sort(graph, visited): for v in range(len(graph)): if not visited[v]: depth_first_search(v, visited, graph) if __name__ == "__main__": topological_sort(graph, visited) print(stack) print_stack(stack, clothes)
# Author: Phyllipe Bezerra (https://github.com/pmba) clothes = { 0: "underwear", 1: "pants", 2: "belt", 3: "suit", 4: "shoe", 5: "socks", 6: "shirt", 7: "tie", 8: "watch", } graph = [[1, 4], [2, 4], [3], [], [], [4], [2, 7], [3], []] visited = [0 for x in range(len(graph))] stack = [] def print_stack(stack, clothes): order = 1 while stack: current_clothing = stack.pop() print(order, clothes[current_clothing]) order += 1 def depth_first_search(u, visited, graph): visited[u] = 1 for v in graph[u]: if not visited[v]: depth_first_search(v, visited, graph) stack.append(u) def topological_sort(graph, visited): for v in range(len(graph)): if not visited[v]: depth_first_search(v, visited, graph) if __name__ == "__main__": topological_sort(graph, visited) print(stack) print_stack(stack, clothes)
-1
TheAlgorithms/Python
9,062
Delete empty junk file
### Describe your change: Delete empty file introduced in #9046 * [ ] Add an algorithm? * [x] Fix a bug or typo in an existing algorithm? * [ ] Documentation change? ### Checklist: * [x] I have read [CONTRIBUTING.md](https://github.com/TheAlgorithms/Python/blob/master/CONTRIBUTING.md). * [x] This pull request is all my own work -- I have not plagiarized. * [x] I know that pull requests will not be merged if they fail the automated tests. * [x] This PR only changes one algorithm file. To ease review, please open separate PRs for separate algorithms. * [ ] All new Python files are placed inside an existing directory. * [ ] All filenames are in all lowercase characters with no spaces or dashes. * [ ] All functions and variable names follow Python naming conventions. * [ ] All function parameters and return values are annotated with Python [type hints](https://docs.python.org/3/library/typing.html). * [ ] All functions have [doctests](https://docs.python.org/3/library/doctest.html) that pass the automated testing. * [ ] All new algorithms include at least one URL that points to Wikipedia or another similar explanation. * [ ] If this pull request resolves one or more open issues then the description above includes the issue number(s) with a [closing keyword](https://docs.github.com/en/issues/tracking-your-work-with-issues/linking-a-pull-request-to-an-issue): "Fixes #ISSUE-NUMBER".
tianyizheng02
"2023-09-16T18:59:42Z"
"2023-09-16T22:12:31Z"
1488cdea708485eb1d81c73126eab13cb9b04a47
fbad85d3ecbbb826a5891807c823149d38bbaed3
Delete empty junk file. ### Describe your change: Delete empty file introduced in #9046 * [ ] Add an algorithm? * [x] Fix a bug or typo in an existing algorithm? * [ ] Documentation change? ### Checklist: * [x] I have read [CONTRIBUTING.md](https://github.com/TheAlgorithms/Python/blob/master/CONTRIBUTING.md). * [x] This pull request is all my own work -- I have not plagiarized. * [x] I know that pull requests will not be merged if they fail the automated tests. * [x] This PR only changes one algorithm file. To ease review, please open separate PRs for separate algorithms. * [ ] All new Python files are placed inside an existing directory. * [ ] All filenames are in all lowercase characters with no spaces or dashes. * [ ] All functions and variable names follow Python naming conventions. * [ ] All function parameters and return values are annotated with Python [type hints](https://docs.python.org/3/library/typing.html). * [ ] All functions have [doctests](https://docs.python.org/3/library/doctest.html) that pass the automated testing. * [ ] All new algorithms include at least one URL that points to Wikipedia or another similar explanation. * [ ] If this pull request resolves one or more open issues then the description above includes the issue number(s) with a [closing keyword](https://docs.github.com/en/issues/tracking-your-work-with-issues/linking-a-pull-request-to-an-issue): "Fixes #ISSUE-NUMBER".
""" An implementation of Karger's Algorithm for partitioning a graph. """ from __future__ import annotations import random # Adjacency list representation of this graph: # https://en.wikipedia.org/wiki/File:Single_run_of_Karger%E2%80%99s_Mincut_algorithm.svg TEST_GRAPH = { "1": ["2", "3", "4", "5"], "2": ["1", "3", "4", "5"], "3": ["1", "2", "4", "5", "10"], "4": ["1", "2", "3", "5", "6"], "5": ["1", "2", "3", "4", "7"], "6": ["7", "8", "9", "10", "4"], "7": ["6", "8", "9", "10", "5"], "8": ["6", "7", "9", "10"], "9": ["6", "7", "8", "10"], "10": ["6", "7", "8", "9", "3"], } def partition_graph(graph: dict[str, list[str]]) -> set[tuple[str, str]]: """ Partitions a graph using Karger's Algorithm. Implemented from pseudocode found here: https://en.wikipedia.org/wiki/Karger%27s_algorithm. This function involves random choices, meaning it will not give consistent outputs. Args: graph: A dictionary containing adacency lists for the graph. Nodes must be strings. Returns: The cutset of the cut found by Karger's Algorithm. >>> graph = {'0':['1'], '1':['0']} >>> partition_graph(graph) {('0', '1')} """ # Dict that maps contracted nodes to a list of all the nodes it "contains." contracted_nodes = {node: {node} for node in graph} graph_copy = {node: graph[node][:] for node in graph} while len(graph_copy) > 2: # Choose a random edge. u = random.choice(list(graph_copy.keys())) v = random.choice(graph_copy[u]) # Contract edge (u, v) to new node uv uv = u + v uv_neighbors = list(set(graph_copy[u] + graph_copy[v])) uv_neighbors.remove(u) uv_neighbors.remove(v) graph_copy[uv] = uv_neighbors for neighbor in uv_neighbors: graph_copy[neighbor].append(uv) contracted_nodes[uv] = set(contracted_nodes[u].union(contracted_nodes[v])) # Remove nodes u and v. del graph_copy[u] del graph_copy[v] for neighbor in uv_neighbors: if u in graph_copy[neighbor]: graph_copy[neighbor].remove(u) if v in graph_copy[neighbor]: graph_copy[neighbor].remove(v) # Find cutset. groups = [contracted_nodes[node] for node in graph_copy] return { (node, neighbor) for node in groups[0] for neighbor in graph[node] if neighbor in groups[1] } if __name__ == "__main__": print(partition_graph(TEST_GRAPH))
""" An implementation of Karger's Algorithm for partitioning a graph. """ from __future__ import annotations import random # Adjacency list representation of this graph: # https://en.wikipedia.org/wiki/File:Single_run_of_Karger%E2%80%99s_Mincut_algorithm.svg TEST_GRAPH = { "1": ["2", "3", "4", "5"], "2": ["1", "3", "4", "5"], "3": ["1", "2", "4", "5", "10"], "4": ["1", "2", "3", "5", "6"], "5": ["1", "2", "3", "4", "7"], "6": ["7", "8", "9", "10", "4"], "7": ["6", "8", "9", "10", "5"], "8": ["6", "7", "9", "10"], "9": ["6", "7", "8", "10"], "10": ["6", "7", "8", "9", "3"], } def partition_graph(graph: dict[str, list[str]]) -> set[tuple[str, str]]: """ Partitions a graph using Karger's Algorithm. Implemented from pseudocode found here: https://en.wikipedia.org/wiki/Karger%27s_algorithm. This function involves random choices, meaning it will not give consistent outputs. Args: graph: A dictionary containing adacency lists for the graph. Nodes must be strings. Returns: The cutset of the cut found by Karger's Algorithm. >>> graph = {'0':['1'], '1':['0']} >>> partition_graph(graph) {('0', '1')} """ # Dict that maps contracted nodes to a list of all the nodes it "contains." contracted_nodes = {node: {node} for node in graph} graph_copy = {node: graph[node][:] for node in graph} while len(graph_copy) > 2: # Choose a random edge. u = random.choice(list(graph_copy.keys())) v = random.choice(graph_copy[u]) # Contract edge (u, v) to new node uv uv = u + v uv_neighbors = list(set(graph_copy[u] + graph_copy[v])) uv_neighbors.remove(u) uv_neighbors.remove(v) graph_copy[uv] = uv_neighbors for neighbor in uv_neighbors: graph_copy[neighbor].append(uv) contracted_nodes[uv] = set(contracted_nodes[u].union(contracted_nodes[v])) # Remove nodes u and v. del graph_copy[u] del graph_copy[v] for neighbor in uv_neighbors: if u in graph_copy[neighbor]: graph_copy[neighbor].remove(u) if v in graph_copy[neighbor]: graph_copy[neighbor].remove(v) # Find cutset. groups = [contracted_nodes[node] for node in graph_copy] return { (node, neighbor) for node in groups[0] for neighbor in graph[node] if neighbor in groups[1] } if __name__ == "__main__": print(partition_graph(TEST_GRAPH))
-1
TheAlgorithms/Python
9,062
Delete empty junk file
### Describe your change: Delete empty file introduced in #9046 * [ ] Add an algorithm? * [x] Fix a bug or typo in an existing algorithm? * [ ] Documentation change? ### Checklist: * [x] I have read [CONTRIBUTING.md](https://github.com/TheAlgorithms/Python/blob/master/CONTRIBUTING.md). * [x] This pull request is all my own work -- I have not plagiarized. * [x] I know that pull requests will not be merged if they fail the automated tests. * [x] This PR only changes one algorithm file. To ease review, please open separate PRs for separate algorithms. * [ ] All new Python files are placed inside an existing directory. * [ ] All filenames are in all lowercase characters with no spaces or dashes. * [ ] All functions and variable names follow Python naming conventions. * [ ] All function parameters and return values are annotated with Python [type hints](https://docs.python.org/3/library/typing.html). * [ ] All functions have [doctests](https://docs.python.org/3/library/doctest.html) that pass the automated testing. * [ ] All new algorithms include at least one URL that points to Wikipedia or another similar explanation. * [ ] If this pull request resolves one or more open issues then the description above includes the issue number(s) with a [closing keyword](https://docs.github.com/en/issues/tracking-your-work-with-issues/linking-a-pull-request-to-an-issue): "Fixes #ISSUE-NUMBER".
tianyizheng02
"2023-09-16T18:59:42Z"
"2023-09-16T22:12:31Z"
1488cdea708485eb1d81c73126eab13cb9b04a47
fbad85d3ecbbb826a5891807c823149d38bbaed3
Delete empty junk file. ### Describe your change: Delete empty file introduced in #9046 * [ ] Add an algorithm? * [x] Fix a bug or typo in an existing algorithm? * [ ] Documentation change? ### Checklist: * [x] I have read [CONTRIBUTING.md](https://github.com/TheAlgorithms/Python/blob/master/CONTRIBUTING.md). * [x] This pull request is all my own work -- I have not plagiarized. * [x] I know that pull requests will not be merged if they fail the automated tests. * [x] This PR only changes one algorithm file. To ease review, please open separate PRs for separate algorithms. * [ ] All new Python files are placed inside an existing directory. * [ ] All filenames are in all lowercase characters with no spaces or dashes. * [ ] All functions and variable names follow Python naming conventions. * [ ] All function parameters and return values are annotated with Python [type hints](https://docs.python.org/3/library/typing.html). * [ ] All functions have [doctests](https://docs.python.org/3/library/doctest.html) that pass the automated testing. * [ ] All new algorithms include at least one URL that points to Wikipedia or another similar explanation. * [ ] If this pull request resolves one or more open issues then the description above includes the issue number(s) with a [closing keyword](https://docs.github.com/en/issues/tracking-your-work-with-issues/linking-a-pull-request-to-an-issue): "Fixes #ISSUE-NUMBER".
""" Convert Base 10 (Decimal) Values to Hexadecimal Representations """ # set decimal value for each hexadecimal digit values = { 0: "0", 1: "1", 2: "2", 3: "3", 4: "4", 5: "5", 6: "6", 7: "7", 8: "8", 9: "9", 10: "a", 11: "b", 12: "c", 13: "d", 14: "e", 15: "f", } def decimal_to_hexadecimal(decimal: float) -> str: """ take integer decimal value, return hexadecimal representation as str beginning with 0x >>> decimal_to_hexadecimal(5) '0x5' >>> decimal_to_hexadecimal(15) '0xf' >>> decimal_to_hexadecimal(37) '0x25' >>> decimal_to_hexadecimal(255) '0xff' >>> decimal_to_hexadecimal(4096) '0x1000' >>> decimal_to_hexadecimal(999098) '0xf3eba' >>> # negatives work too >>> decimal_to_hexadecimal(-256) '-0x100' >>> # floats are acceptable if equivalent to an int >>> decimal_to_hexadecimal(17.0) '0x11' >>> # other floats will error >>> decimal_to_hexadecimal(16.16) # doctest: +ELLIPSIS Traceback (most recent call last): ... AssertionError >>> # strings will error as well >>> decimal_to_hexadecimal('0xfffff') # doctest: +ELLIPSIS Traceback (most recent call last): ... AssertionError >>> # results are the same when compared to Python's default hex function >>> decimal_to_hexadecimal(-256) == hex(-256) True """ assert type(decimal) in (int, float) and decimal == int(decimal) decimal = int(decimal) hexadecimal = "" negative = False if decimal < 0: negative = True decimal *= -1 while decimal > 0: decimal, remainder = divmod(decimal, 16) hexadecimal = values[remainder] + hexadecimal hexadecimal = "0x" + hexadecimal if negative: hexadecimal = "-" + hexadecimal return hexadecimal if __name__ == "__main__": import doctest doctest.testmod()
""" Convert Base 10 (Decimal) Values to Hexadecimal Representations """ # set decimal value for each hexadecimal digit values = { 0: "0", 1: "1", 2: "2", 3: "3", 4: "4", 5: "5", 6: "6", 7: "7", 8: "8", 9: "9", 10: "a", 11: "b", 12: "c", 13: "d", 14: "e", 15: "f", } def decimal_to_hexadecimal(decimal: float) -> str: """ take integer decimal value, return hexadecimal representation as str beginning with 0x >>> decimal_to_hexadecimal(5) '0x5' >>> decimal_to_hexadecimal(15) '0xf' >>> decimal_to_hexadecimal(37) '0x25' >>> decimal_to_hexadecimal(255) '0xff' >>> decimal_to_hexadecimal(4096) '0x1000' >>> decimal_to_hexadecimal(999098) '0xf3eba' >>> # negatives work too >>> decimal_to_hexadecimal(-256) '-0x100' >>> # floats are acceptable if equivalent to an int >>> decimal_to_hexadecimal(17.0) '0x11' >>> # other floats will error >>> decimal_to_hexadecimal(16.16) # doctest: +ELLIPSIS Traceback (most recent call last): ... AssertionError >>> # strings will error as well >>> decimal_to_hexadecimal('0xfffff') # doctest: +ELLIPSIS Traceback (most recent call last): ... AssertionError >>> # results are the same when compared to Python's default hex function >>> decimal_to_hexadecimal(-256) == hex(-256) True """ assert type(decimal) in (int, float) and decimal == int(decimal) decimal = int(decimal) hexadecimal = "" negative = False if decimal < 0: negative = True decimal *= -1 while decimal > 0: decimal, remainder = divmod(decimal, 16) hexadecimal = values[remainder] + hexadecimal hexadecimal = "0x" + hexadecimal if negative: hexadecimal = "-" + hexadecimal return hexadecimal if __name__ == "__main__": import doctest doctest.testmod()
-1
TheAlgorithms/Python
9,062
Delete empty junk file
### Describe your change: Delete empty file introduced in #9046 * [ ] Add an algorithm? * [x] Fix a bug or typo in an existing algorithm? * [ ] Documentation change? ### Checklist: * [x] I have read [CONTRIBUTING.md](https://github.com/TheAlgorithms/Python/blob/master/CONTRIBUTING.md). * [x] This pull request is all my own work -- I have not plagiarized. * [x] I know that pull requests will not be merged if they fail the automated tests. * [x] This PR only changes one algorithm file. To ease review, please open separate PRs for separate algorithms. * [ ] All new Python files are placed inside an existing directory. * [ ] All filenames are in all lowercase characters with no spaces or dashes. * [ ] All functions and variable names follow Python naming conventions. * [ ] All function parameters and return values are annotated with Python [type hints](https://docs.python.org/3/library/typing.html). * [ ] All functions have [doctests](https://docs.python.org/3/library/doctest.html) that pass the automated testing. * [ ] All new algorithms include at least one URL that points to Wikipedia or another similar explanation. * [ ] If this pull request resolves one or more open issues then the description above includes the issue number(s) with a [closing keyword](https://docs.github.com/en/issues/tracking-your-work-with-issues/linking-a-pull-request-to-an-issue): "Fixes #ISSUE-NUMBER".
tianyizheng02
"2023-09-16T18:59:42Z"
"2023-09-16T22:12:31Z"
1488cdea708485eb1d81c73126eab13cb9b04a47
fbad85d3ecbbb826a5891807c823149d38bbaed3
Delete empty junk file. ### Describe your change: Delete empty file introduced in #9046 * [ ] Add an algorithm? * [x] Fix a bug or typo in an existing algorithm? * [ ] Documentation change? ### Checklist: * [x] I have read [CONTRIBUTING.md](https://github.com/TheAlgorithms/Python/blob/master/CONTRIBUTING.md). * [x] This pull request is all my own work -- I have not plagiarized. * [x] I know that pull requests will not be merged if they fail the automated tests. * [x] This PR only changes one algorithm file. To ease review, please open separate PRs for separate algorithms. * [ ] All new Python files are placed inside an existing directory. * [ ] All filenames are in all lowercase characters with no spaces or dashes. * [ ] All functions and variable names follow Python naming conventions. * [ ] All function parameters and return values are annotated with Python [type hints](https://docs.python.org/3/library/typing.html). * [ ] All functions have [doctests](https://docs.python.org/3/library/doctest.html) that pass the automated testing. * [ ] All new algorithms include at least one URL that points to Wikipedia or another similar explanation. * [ ] If this pull request resolves one or more open issues then the description above includes the issue number(s) with a [closing keyword](https://docs.github.com/en/issues/tracking-your-work-with-issues/linking-a-pull-request-to-an-issue): "Fixes #ISSUE-NUMBER".
""" Implements an is valid email address algorithm @ https://en.wikipedia.org/wiki/Email_address """ import string email_tests: tuple[tuple[str, bool], ...] = ( ("[email protected]", True), ("[email protected]", True), ("[email protected]", True), ("[email protected]", True), ("[email protected]", True), ("[email protected]", True), ("[email protected]", True), ("[email protected]", True), ("test/[email protected]", True), ( "123456789012345678901234567890123456789012345678901234567890123@example.com", True, ), ("admin@mailserver1", True), ("[email protected]", True), ("Abc.example.com", False), ("A@b@[email protected]", False), ("[email protected]", False), ("a(c)d,e:f;g<h>i[j\\k][email protected]", False), ( "12345678901234567890123456789012345678901234567890123456789012345@example.com", False, ), ("i.like.underscores@but_its_not_allowed_in_this_part", False), ("", False), ) # The maximum octets (one character as a standard unicode character is one byte) # that the local part and the domain part can have MAX_LOCAL_PART_OCTETS = 64 MAX_DOMAIN_OCTETS = 255 def is_valid_email_address(email: str) -> bool: """ Returns True if the passed email address is valid. The local part of the email precedes the singular @ symbol and is associated with a display-name. For example, "john.smith" The domain is stricter than the local part and follows the @ symbol. Global email checks: 1. There can only be one @ symbol in the email address. Technically if the @ symbol is quoted in the local-part, then it is valid, however this implementation ignores "" for now. (See https://en.wikipedia.org/wiki/Email_address#:~:text=If%20quoted,) 2. The local-part and the domain are limited to a certain number of octets. With unicode storing a single character in one byte, each octet is equivalent to a character. Hence, we can just check the length of the string. Checks for the local-part: 3. The local-part may contain: upper and lowercase latin letters, digits 0 to 9, and printable characters (!#$%&'*+-/=?^_`{|}~) 4. The local-part may also contain a "." in any place that is not the first or last character, and may not have more than one "." consecutively. Checks for the domain: 5. The domain may contain: upper and lowercase latin letters and digits 0 to 9 6. Hyphen "-", provided that it is not the first or last character 7. The domain may also contain a "." in any place that is not the first or last character, and may not have more than one "." consecutively. >>> for email, valid in email_tests: ... assert is_valid_email_address(email) == valid """ # (1.) Make sure that there is only one @ symbol in the email address if email.count("@") != 1: return False local_part, domain = email.split("@") # (2.) Check octet length of the local part and domain if len(local_part) > MAX_LOCAL_PART_OCTETS or len(domain) > MAX_DOMAIN_OCTETS: return False # (3.) Validate the characters in the local-part if any( char not in string.ascii_letters + string.digits + ".(!#$%&'*+-/=?^_`{|}~)" for char in local_part ): return False # (4.) Validate the placement of "." characters in the local-part if local_part.startswith(".") or local_part.endswith(".") or ".." in local_part: return False # (5.) Validate the characters in the domain if any(char not in string.ascii_letters + string.digits + ".-" for char in domain): return False # (6.) Validate the placement of "-" characters if domain.startswith("-") or domain.endswith("."): return False # (7.) Validate the placement of "." characters if domain.startswith(".") or domain.endswith(".") or ".." in domain: return False return True if __name__ == "__main__": import doctest doctest.testmod() for email, valid in email_tests: is_valid = is_valid_email_address(email) assert is_valid == valid, f"{email} is {is_valid}" print(f"Email address {email} is {'not ' if not is_valid else ''}valid")
""" Implements an is valid email address algorithm @ https://en.wikipedia.org/wiki/Email_address """ import string email_tests: tuple[tuple[str, bool], ...] = ( ("[email protected]", True), ("[email protected]", True), ("[email protected]", True), ("[email protected]", True), ("[email protected]", True), ("[email protected]", True), ("[email protected]", True), ("[email protected]", True), ("test/[email protected]", True), ( "123456789012345678901234567890123456789012345678901234567890123@example.com", True, ), ("admin@mailserver1", True), ("[email protected]", True), ("Abc.example.com", False), ("A@b@[email protected]", False), ("[email protected]", False), ("a(c)d,e:f;g<h>i[j\\k][email protected]", False), ( "12345678901234567890123456789012345678901234567890123456789012345@example.com", False, ), ("i.like.underscores@but_its_not_allowed_in_this_part", False), ("", False), ) # The maximum octets (one character as a standard unicode character is one byte) # that the local part and the domain part can have MAX_LOCAL_PART_OCTETS = 64 MAX_DOMAIN_OCTETS = 255 def is_valid_email_address(email: str) -> bool: """ Returns True if the passed email address is valid. The local part of the email precedes the singular @ symbol and is associated with a display-name. For example, "john.smith" The domain is stricter than the local part and follows the @ symbol. Global email checks: 1. There can only be one @ symbol in the email address. Technically if the @ symbol is quoted in the local-part, then it is valid, however this implementation ignores "" for now. (See https://en.wikipedia.org/wiki/Email_address#:~:text=If%20quoted,) 2. The local-part and the domain are limited to a certain number of octets. With unicode storing a single character in one byte, each octet is equivalent to a character. Hence, we can just check the length of the string. Checks for the local-part: 3. The local-part may contain: upper and lowercase latin letters, digits 0 to 9, and printable characters (!#$%&'*+-/=?^_`{|}~) 4. The local-part may also contain a "." in any place that is not the first or last character, and may not have more than one "." consecutively. Checks for the domain: 5. The domain may contain: upper and lowercase latin letters and digits 0 to 9 6. Hyphen "-", provided that it is not the first or last character 7. The domain may also contain a "." in any place that is not the first or last character, and may not have more than one "." consecutively. >>> for email, valid in email_tests: ... assert is_valid_email_address(email) == valid """ # (1.) Make sure that there is only one @ symbol in the email address if email.count("@") != 1: return False local_part, domain = email.split("@") # (2.) Check octet length of the local part and domain if len(local_part) > MAX_LOCAL_PART_OCTETS or len(domain) > MAX_DOMAIN_OCTETS: return False # (3.) Validate the characters in the local-part if any( char not in string.ascii_letters + string.digits + ".(!#$%&'*+-/=?^_`{|}~)" for char in local_part ): return False # (4.) Validate the placement of "." characters in the local-part if local_part.startswith(".") or local_part.endswith(".") or ".." in local_part: return False # (5.) Validate the characters in the domain if any(char not in string.ascii_letters + string.digits + ".-" for char in domain): return False # (6.) Validate the placement of "-" characters if domain.startswith("-") or domain.endswith("."): return False # (7.) Validate the placement of "." characters if domain.startswith(".") or domain.endswith(".") or ".." in domain: return False return True if __name__ == "__main__": import doctest doctest.testmod() for email, valid in email_tests: is_valid = is_valid_email_address(email) assert is_valid == valid, f"{email} is {is_valid}" print(f"Email address {email} is {'not ' if not is_valid else ''}valid")
-1
TheAlgorithms/Python
9,062
Delete empty junk file
### Describe your change: Delete empty file introduced in #9046 * [ ] Add an algorithm? * [x] Fix a bug or typo in an existing algorithm? * [ ] Documentation change? ### Checklist: * [x] I have read [CONTRIBUTING.md](https://github.com/TheAlgorithms/Python/blob/master/CONTRIBUTING.md). * [x] This pull request is all my own work -- I have not plagiarized. * [x] I know that pull requests will not be merged if they fail the automated tests. * [x] This PR only changes one algorithm file. To ease review, please open separate PRs for separate algorithms. * [ ] All new Python files are placed inside an existing directory. * [ ] All filenames are in all lowercase characters with no spaces or dashes. * [ ] All functions and variable names follow Python naming conventions. * [ ] All function parameters and return values are annotated with Python [type hints](https://docs.python.org/3/library/typing.html). * [ ] All functions have [doctests](https://docs.python.org/3/library/doctest.html) that pass the automated testing. * [ ] All new algorithms include at least one URL that points to Wikipedia or another similar explanation. * [ ] If this pull request resolves one or more open issues then the description above includes the issue number(s) with a [closing keyword](https://docs.github.com/en/issues/tracking-your-work-with-issues/linking-a-pull-request-to-an-issue): "Fixes #ISSUE-NUMBER".
tianyizheng02
"2023-09-16T18:59:42Z"
"2023-09-16T22:12:31Z"
1488cdea708485eb1d81c73126eab13cb9b04a47
fbad85d3ecbbb826a5891807c823149d38bbaed3
Delete empty junk file. ### Describe your change: Delete empty file introduced in #9046 * [ ] Add an algorithm? * [x] Fix a bug or typo in an existing algorithm? * [ ] Documentation change? ### Checklist: * [x] I have read [CONTRIBUTING.md](https://github.com/TheAlgorithms/Python/blob/master/CONTRIBUTING.md). * [x] This pull request is all my own work -- I have not plagiarized. * [x] I know that pull requests will not be merged if they fail the automated tests. * [x] This PR only changes one algorithm file. To ease review, please open separate PRs for separate algorithms. * [ ] All new Python files are placed inside an existing directory. * [ ] All filenames are in all lowercase characters with no spaces or dashes. * [ ] All functions and variable names follow Python naming conventions. * [ ] All function parameters and return values are annotated with Python [type hints](https://docs.python.org/3/library/typing.html). * [ ] All functions have [doctests](https://docs.python.org/3/library/doctest.html) that pass the automated testing. * [ ] All new algorithms include at least one URL that points to Wikipedia or another similar explanation. * [ ] If this pull request resolves one or more open issues then the description above includes the issue number(s) with a [closing keyword](https://docs.github.com/en/issues/tracking-your-work-with-issues/linking-a-pull-request-to-an-issue): "Fixes #ISSUE-NUMBER".
-1
TheAlgorithms/Python
9,062
Delete empty junk file
### Describe your change: Delete empty file introduced in #9046 * [ ] Add an algorithm? * [x] Fix a bug or typo in an existing algorithm? * [ ] Documentation change? ### Checklist: * [x] I have read [CONTRIBUTING.md](https://github.com/TheAlgorithms/Python/blob/master/CONTRIBUTING.md). * [x] This pull request is all my own work -- I have not plagiarized. * [x] I know that pull requests will not be merged if they fail the automated tests. * [x] This PR only changes one algorithm file. To ease review, please open separate PRs for separate algorithms. * [ ] All new Python files are placed inside an existing directory. * [ ] All filenames are in all lowercase characters with no spaces or dashes. * [ ] All functions and variable names follow Python naming conventions. * [ ] All function parameters and return values are annotated with Python [type hints](https://docs.python.org/3/library/typing.html). * [ ] All functions have [doctests](https://docs.python.org/3/library/doctest.html) that pass the automated testing. * [ ] All new algorithms include at least one URL that points to Wikipedia or another similar explanation. * [ ] If this pull request resolves one or more open issues then the description above includes the issue number(s) with a [closing keyword](https://docs.github.com/en/issues/tracking-your-work-with-issues/linking-a-pull-request-to-an-issue): "Fixes #ISSUE-NUMBER".
tianyizheng02
"2023-09-16T18:59:42Z"
"2023-09-16T22:12:31Z"
1488cdea708485eb1d81c73126eab13cb9b04a47
fbad85d3ecbbb826a5891807c823149d38bbaed3
Delete empty junk file. ### Describe your change: Delete empty file introduced in #9046 * [ ] Add an algorithm? * [x] Fix a bug or typo in an existing algorithm? * [ ] Documentation change? ### Checklist: * [x] I have read [CONTRIBUTING.md](https://github.com/TheAlgorithms/Python/blob/master/CONTRIBUTING.md). * [x] This pull request is all my own work -- I have not plagiarized. * [x] I know that pull requests will not be merged if they fail the automated tests. * [x] This PR only changes one algorithm file. To ease review, please open separate PRs for separate algorithms. * [ ] All new Python files are placed inside an existing directory. * [ ] All filenames are in all lowercase characters with no spaces or dashes. * [ ] All functions and variable names follow Python naming conventions. * [ ] All function parameters and return values are annotated with Python [type hints](https://docs.python.org/3/library/typing.html). * [ ] All functions have [doctests](https://docs.python.org/3/library/doctest.html) that pass the automated testing. * [ ] All new algorithms include at least one URL that points to Wikipedia or another similar explanation. * [ ] If this pull request resolves one or more open issues then the description above includes the issue number(s) with a [closing keyword](https://docs.github.com/en/issues/tracking-your-work-with-issues/linking-a-pull-request-to-an-issue): "Fixes #ISSUE-NUMBER".
""" guess the number using lower,higher and the value to find or guess solution works by dividing lower and higher of number guessed suppose lower is 0, higher is 1000 and the number to guess is 355 >>> guess_the_number(10, 1000, 17) started... guess the number : 17 details : [505, 257, 133, 71, 40, 25, 17] """ def temp_input_value( min_val: int = 10, max_val: int = 1000, option: bool = True ) -> int: """ Temporary input values for tests >>> temp_input_value(option=True) 10 >>> temp_input_value(option=False) 1000 >>> temp_input_value(min_val=100, option=True) 100 >>> temp_input_value(min_val=100, max_val=50) Traceback (most recent call last): ... ValueError: Invalid value for min_val or max_val (min_value < max_value) >>> temp_input_value("ten","fifty",1) Traceback (most recent call last): ... AssertionError: Invalid type of value(s) specified to function! >>> temp_input_value(min_val=-100, max_val=500) -100 >>> temp_input_value(min_val=-5100, max_val=-100) -5100 """ assert ( isinstance(min_val, int) and isinstance(max_val, int) and isinstance(option, bool) ), "Invalid type of value(s) specified to function!" if min_val > max_val: raise ValueError("Invalid value for min_val or max_val (min_value < max_value)") return min_val if option else max_val def get_avg(number_1: int, number_2: int) -> int: """ Return the mid-number(whole) of two integers a and b >>> get_avg(10, 15) 12 >>> get_avg(20, 300) 160 >>> get_avg("abcd", 300) Traceback (most recent call last): ... TypeError: can only concatenate str (not "int") to str >>> get_avg(10.5,50.25) 30 """ return int((number_1 + number_2) / 2) def guess_the_number(lower: int, higher: int, to_guess: int) -> None: """ The `guess_the_number` function that guess the number by some operations and using inner functions >>> guess_the_number(10, 1000, 17) started... guess the number : 17 details : [505, 257, 133, 71, 40, 25, 17] >>> guess_the_number(-10000, 10000, 7) started... guess the number : 7 details : [0, 5000, 2500, 1250, 625, 312, 156, 78, 39, 19, 9, 4, 6, 7] >>> guess_the_number(10, 1000, "a") Traceback (most recent call last): ... AssertionError: argument values must be type of "int" >>> guess_the_number(10, 1000, 5) Traceback (most recent call last): ... ValueError: guess value must be within the range of lower and higher value >>> guess_the_number(10000, 100, 5) Traceback (most recent call last): ... ValueError: argument value for lower and higher must be(lower > higher) """ assert ( isinstance(lower, int) and isinstance(higher, int) and isinstance(to_guess, int) ), 'argument values must be type of "int"' if lower > higher: raise ValueError("argument value for lower and higher must be(lower > higher)") if not lower < to_guess < higher: raise ValueError( "guess value must be within the range of lower and higher value" ) def answer(number: int) -> str: """ Returns value by comparing with entered `to_guess` number """ if number > to_guess: return "high" elif number < to_guess: return "low" else: return "same" print("started...") last_lowest = lower last_highest = higher last_numbers = [] while True: number = get_avg(last_lowest, last_highest) last_numbers.append(number) if answer(number) == "low": last_lowest = number elif answer(number) == "high": last_highest = number else: break print(f"guess the number : {last_numbers[-1]}") print(f"details : {last_numbers!s}") def main() -> None: """ starting point or function of script """ lower = int(input("Enter lower value : ").strip()) higher = int(input("Enter high value : ").strip()) guess = int(input("Enter value to guess : ").strip()) guess_the_number(lower, higher, guess) if __name__ == "__main__": main()
""" guess the number using lower,higher and the value to find or guess solution works by dividing lower and higher of number guessed suppose lower is 0, higher is 1000 and the number to guess is 355 >>> guess_the_number(10, 1000, 17) started... guess the number : 17 details : [505, 257, 133, 71, 40, 25, 17] """ def temp_input_value( min_val: int = 10, max_val: int = 1000, option: bool = True ) -> int: """ Temporary input values for tests >>> temp_input_value(option=True) 10 >>> temp_input_value(option=False) 1000 >>> temp_input_value(min_val=100, option=True) 100 >>> temp_input_value(min_val=100, max_val=50) Traceback (most recent call last): ... ValueError: Invalid value for min_val or max_val (min_value < max_value) >>> temp_input_value("ten","fifty",1) Traceback (most recent call last): ... AssertionError: Invalid type of value(s) specified to function! >>> temp_input_value(min_val=-100, max_val=500) -100 >>> temp_input_value(min_val=-5100, max_val=-100) -5100 """ assert ( isinstance(min_val, int) and isinstance(max_val, int) and isinstance(option, bool) ), "Invalid type of value(s) specified to function!" if min_val > max_val: raise ValueError("Invalid value for min_val or max_val (min_value < max_value)") return min_val if option else max_val def get_avg(number_1: int, number_2: int) -> int: """ Return the mid-number(whole) of two integers a and b >>> get_avg(10, 15) 12 >>> get_avg(20, 300) 160 >>> get_avg("abcd", 300) Traceback (most recent call last): ... TypeError: can only concatenate str (not "int") to str >>> get_avg(10.5,50.25) 30 """ return int((number_1 + number_2) / 2) def guess_the_number(lower: int, higher: int, to_guess: int) -> None: """ The `guess_the_number` function that guess the number by some operations and using inner functions >>> guess_the_number(10, 1000, 17) started... guess the number : 17 details : [505, 257, 133, 71, 40, 25, 17] >>> guess_the_number(-10000, 10000, 7) started... guess the number : 7 details : [0, 5000, 2500, 1250, 625, 312, 156, 78, 39, 19, 9, 4, 6, 7] >>> guess_the_number(10, 1000, "a") Traceback (most recent call last): ... AssertionError: argument values must be type of "int" >>> guess_the_number(10, 1000, 5) Traceback (most recent call last): ... ValueError: guess value must be within the range of lower and higher value >>> guess_the_number(10000, 100, 5) Traceback (most recent call last): ... ValueError: argument value for lower and higher must be(lower > higher) """ assert ( isinstance(lower, int) and isinstance(higher, int) and isinstance(to_guess, int) ), 'argument values must be type of "int"' if lower > higher: raise ValueError("argument value for lower and higher must be(lower > higher)") if not lower < to_guess < higher: raise ValueError( "guess value must be within the range of lower and higher value" ) def answer(number: int) -> str: """ Returns value by comparing with entered `to_guess` number """ if number > to_guess: return "high" elif number < to_guess: return "low" else: return "same" print("started...") last_lowest = lower last_highest = higher last_numbers = [] while True: number = get_avg(last_lowest, last_highest) last_numbers.append(number) if answer(number) == "low": last_lowest = number elif answer(number) == "high": last_highest = number else: break print(f"guess the number : {last_numbers[-1]}") print(f"details : {last_numbers!s}") def main() -> None: """ starting point or function of script """ lower = int(input("Enter lower value : ").strip()) higher = int(input("Enter high value : ").strip()) guess = int(input("Enter value to guess : ").strip()) guess_the_number(lower, higher, guess) if __name__ == "__main__": main()
-1
TheAlgorithms/Python
9,062
Delete empty junk file
### Describe your change: Delete empty file introduced in #9046 * [ ] Add an algorithm? * [x] Fix a bug or typo in an existing algorithm? * [ ] Documentation change? ### Checklist: * [x] I have read [CONTRIBUTING.md](https://github.com/TheAlgorithms/Python/blob/master/CONTRIBUTING.md). * [x] This pull request is all my own work -- I have not plagiarized. * [x] I know that pull requests will not be merged if they fail the automated tests. * [x] This PR only changes one algorithm file. To ease review, please open separate PRs for separate algorithms. * [ ] All new Python files are placed inside an existing directory. * [ ] All filenames are in all lowercase characters with no spaces or dashes. * [ ] All functions and variable names follow Python naming conventions. * [ ] All function parameters and return values are annotated with Python [type hints](https://docs.python.org/3/library/typing.html). * [ ] All functions have [doctests](https://docs.python.org/3/library/doctest.html) that pass the automated testing. * [ ] All new algorithms include at least one URL that points to Wikipedia or another similar explanation. * [ ] If this pull request resolves one or more open issues then the description above includes the issue number(s) with a [closing keyword](https://docs.github.com/en/issues/tracking-your-work-with-issues/linking-a-pull-request-to-an-issue): "Fixes #ISSUE-NUMBER".
tianyizheng02
"2023-09-16T18:59:42Z"
"2023-09-16T22:12:31Z"
1488cdea708485eb1d81c73126eab13cb9b04a47
fbad85d3ecbbb826a5891807c823149d38bbaed3
Delete empty junk file. ### Describe your change: Delete empty file introduced in #9046 * [ ] Add an algorithm? * [x] Fix a bug or typo in an existing algorithm? * [ ] Documentation change? ### Checklist: * [x] I have read [CONTRIBUTING.md](https://github.com/TheAlgorithms/Python/blob/master/CONTRIBUTING.md). * [x] This pull request is all my own work -- I have not plagiarized. * [x] I know that pull requests will not be merged if they fail the automated tests. * [x] This PR only changes one algorithm file. To ease review, please open separate PRs for separate algorithms. * [ ] All new Python files are placed inside an existing directory. * [ ] All filenames are in all lowercase characters with no spaces or dashes. * [ ] All functions and variable names follow Python naming conventions. * [ ] All function parameters and return values are annotated with Python [type hints](https://docs.python.org/3/library/typing.html). * [ ] All functions have [doctests](https://docs.python.org/3/library/doctest.html) that pass the automated testing. * [ ] All new algorithms include at least one URL that points to Wikipedia or another similar explanation. * [ ] If this pull request resolves one or more open issues then the description above includes the issue number(s) with a [closing keyword](https://docs.github.com/en/issues/tracking-your-work-with-issues/linking-a-pull-request-to-an-issue): "Fixes #ISSUE-NUMBER".
""" This is pure Python implementation of fibonacci search. Resources used: https://en.wikipedia.org/wiki/Fibonacci_search_technique For doctests run following command: python3 -m doctest -v fibonacci_search.py For manual testing run: python3 fibonacci_search.py """ from functools import lru_cache @lru_cache def fibonacci(k: int) -> int: """Finds fibonacci number in index k. Parameters ---------- k : Index of fibonacci. Returns ------- int Fibonacci number in position k. >>> fibonacci(0) 0 >>> fibonacci(2) 1 >>> fibonacci(5) 5 >>> fibonacci(15) 610 >>> fibonacci('a') Traceback (most recent call last): TypeError: k must be an integer. >>> fibonacci(-5) Traceback (most recent call last): ValueError: k integer must be greater or equal to zero. """ if not isinstance(k, int): raise TypeError("k must be an integer.") if k < 0: raise ValueError("k integer must be greater or equal to zero.") if k == 0: return 0 elif k == 1: return 1 else: return fibonacci(k - 1) + fibonacci(k - 2) def fibonacci_search(arr: list, val: int) -> int: """A pure Python implementation of a fibonacci search algorithm. Parameters ---------- arr List of sorted elements. val Element to search in list. Returns ------- int The index of the element in the array. -1 if the element is not found. >>> fibonacci_search([4, 5, 6, 7], 4) 0 >>> fibonacci_search([4, 5, 6, 7], -10) -1 >>> fibonacci_search([-18, 2], -18) 0 >>> fibonacci_search([5], 5) 0 >>> fibonacci_search(['a', 'c', 'd'], 'c') 1 >>> fibonacci_search(['a', 'c', 'd'], 'f') -1 >>> fibonacci_search([], 1) -1 >>> fibonacci_search([.1, .4 , 7], .4) 1 >>> fibonacci_search([], 9) -1 >>> fibonacci_search(list(range(100)), 63) 63 >>> fibonacci_search(list(range(100)), 99) 99 >>> fibonacci_search(list(range(-100, 100, 3)), -97) 1 >>> fibonacci_search(list(range(-100, 100, 3)), 0) -1 >>> fibonacci_search(list(range(-100, 100, 5)), 0) 20 >>> fibonacci_search(list(range(-100, 100, 5)), 95) 39 """ len_list = len(arr) # Find m such that F_m >= n where F_i is the i_th fibonacci number. i = 0 while True: if fibonacci(i) >= len_list: fibb_k = i break i += 1 offset = 0 while fibb_k > 0: index_k = min( offset + fibonacci(fibb_k - 1), len_list - 1 ) # Prevent out of range item_k_1 = arr[index_k] if item_k_1 == val: return index_k elif val < item_k_1: fibb_k -= 1 elif val > item_k_1: offset += fibonacci(fibb_k - 1) fibb_k -= 2 else: return -1 if __name__ == "__main__": import doctest doctest.testmod()
""" This is pure Python implementation of fibonacci search. Resources used: https://en.wikipedia.org/wiki/Fibonacci_search_technique For doctests run following command: python3 -m doctest -v fibonacci_search.py For manual testing run: python3 fibonacci_search.py """ from functools import lru_cache @lru_cache def fibonacci(k: int) -> int: """Finds fibonacci number in index k. Parameters ---------- k : Index of fibonacci. Returns ------- int Fibonacci number in position k. >>> fibonacci(0) 0 >>> fibonacci(2) 1 >>> fibonacci(5) 5 >>> fibonacci(15) 610 >>> fibonacci('a') Traceback (most recent call last): TypeError: k must be an integer. >>> fibonacci(-5) Traceback (most recent call last): ValueError: k integer must be greater or equal to zero. """ if not isinstance(k, int): raise TypeError("k must be an integer.") if k < 0: raise ValueError("k integer must be greater or equal to zero.") if k == 0: return 0 elif k == 1: return 1 else: return fibonacci(k - 1) + fibonacci(k - 2) def fibonacci_search(arr: list, val: int) -> int: """A pure Python implementation of a fibonacci search algorithm. Parameters ---------- arr List of sorted elements. val Element to search in list. Returns ------- int The index of the element in the array. -1 if the element is not found. >>> fibonacci_search([4, 5, 6, 7], 4) 0 >>> fibonacci_search([4, 5, 6, 7], -10) -1 >>> fibonacci_search([-18, 2], -18) 0 >>> fibonacci_search([5], 5) 0 >>> fibonacci_search(['a', 'c', 'd'], 'c') 1 >>> fibonacci_search(['a', 'c', 'd'], 'f') -1 >>> fibonacci_search([], 1) -1 >>> fibonacci_search([.1, .4 , 7], .4) 1 >>> fibonacci_search([], 9) -1 >>> fibonacci_search(list(range(100)), 63) 63 >>> fibonacci_search(list(range(100)), 99) 99 >>> fibonacci_search(list(range(-100, 100, 3)), -97) 1 >>> fibonacci_search(list(range(-100, 100, 3)), 0) -1 >>> fibonacci_search(list(range(-100, 100, 5)), 0) 20 >>> fibonacci_search(list(range(-100, 100, 5)), 95) 39 """ len_list = len(arr) # Find m such that F_m >= n where F_i is the i_th fibonacci number. i = 0 while True: if fibonacci(i) >= len_list: fibb_k = i break i += 1 offset = 0 while fibb_k > 0: index_k = min( offset + fibonacci(fibb_k - 1), len_list - 1 ) # Prevent out of range item_k_1 = arr[index_k] if item_k_1 == val: return index_k elif val < item_k_1: fibb_k -= 1 elif val > item_k_1: offset += fibonacci(fibb_k - 1) fibb_k -= 2 else: return -1 if __name__ == "__main__": import doctest doctest.testmod()
-1
TheAlgorithms/Python
9,062
Delete empty junk file
### Describe your change: Delete empty file introduced in #9046 * [ ] Add an algorithm? * [x] Fix a bug or typo in an existing algorithm? * [ ] Documentation change? ### Checklist: * [x] I have read [CONTRIBUTING.md](https://github.com/TheAlgorithms/Python/blob/master/CONTRIBUTING.md). * [x] This pull request is all my own work -- I have not plagiarized. * [x] I know that pull requests will not be merged if they fail the automated tests. * [x] This PR only changes one algorithm file. To ease review, please open separate PRs for separate algorithms. * [ ] All new Python files are placed inside an existing directory. * [ ] All filenames are in all lowercase characters with no spaces or dashes. * [ ] All functions and variable names follow Python naming conventions. * [ ] All function parameters and return values are annotated with Python [type hints](https://docs.python.org/3/library/typing.html). * [ ] All functions have [doctests](https://docs.python.org/3/library/doctest.html) that pass the automated testing. * [ ] All new algorithms include at least one URL that points to Wikipedia or another similar explanation. * [ ] If this pull request resolves one or more open issues then the description above includes the issue number(s) with a [closing keyword](https://docs.github.com/en/issues/tracking-your-work-with-issues/linking-a-pull-request-to-an-issue): "Fixes #ISSUE-NUMBER".
tianyizheng02
"2023-09-16T18:59:42Z"
"2023-09-16T22:12:31Z"
1488cdea708485eb1d81c73126eab13cb9b04a47
fbad85d3ecbbb826a5891807c823149d38bbaed3
Delete empty junk file. ### Describe your change: Delete empty file introduced in #9046 * [ ] Add an algorithm? * [x] Fix a bug or typo in an existing algorithm? * [ ] Documentation change? ### Checklist: * [x] I have read [CONTRIBUTING.md](https://github.com/TheAlgorithms/Python/blob/master/CONTRIBUTING.md). * [x] This pull request is all my own work -- I have not plagiarized. * [x] I know that pull requests will not be merged if they fail the automated tests. * [x] This PR only changes one algorithm file. To ease review, please open separate PRs for separate algorithms. * [ ] All new Python files are placed inside an existing directory. * [ ] All filenames are in all lowercase characters with no spaces or dashes. * [ ] All functions and variable names follow Python naming conventions. * [ ] All function parameters and return values are annotated with Python [type hints](https://docs.python.org/3/library/typing.html). * [ ] All functions have [doctests](https://docs.python.org/3/library/doctest.html) that pass the automated testing. * [ ] All new algorithms include at least one URL that points to Wikipedia or another similar explanation. * [ ] If this pull request resolves one or more open issues then the description above includes the issue number(s) with a [closing keyword](https://docs.github.com/en/issues/tracking-your-work-with-issues/linking-a-pull-request-to-an-issue): "Fixes #ISSUE-NUMBER".
-1
TheAlgorithms/Python
9,062
Delete empty junk file
### Describe your change: Delete empty file introduced in #9046 * [ ] Add an algorithm? * [x] Fix a bug or typo in an existing algorithm? * [ ] Documentation change? ### Checklist: * [x] I have read [CONTRIBUTING.md](https://github.com/TheAlgorithms/Python/blob/master/CONTRIBUTING.md). * [x] This pull request is all my own work -- I have not plagiarized. * [x] I know that pull requests will not be merged if they fail the automated tests. * [x] This PR only changes one algorithm file. To ease review, please open separate PRs for separate algorithms. * [ ] All new Python files are placed inside an existing directory. * [ ] All filenames are in all lowercase characters with no spaces or dashes. * [ ] All functions and variable names follow Python naming conventions. * [ ] All function parameters and return values are annotated with Python [type hints](https://docs.python.org/3/library/typing.html). * [ ] All functions have [doctests](https://docs.python.org/3/library/doctest.html) that pass the automated testing. * [ ] All new algorithms include at least one URL that points to Wikipedia or another similar explanation. * [ ] If this pull request resolves one or more open issues then the description above includes the issue number(s) with a [closing keyword](https://docs.github.com/en/issues/tracking-your-work-with-issues/linking-a-pull-request-to-an-issue): "Fixes #ISSUE-NUMBER".
tianyizheng02
"2023-09-16T18:59:42Z"
"2023-09-16T22:12:31Z"
1488cdea708485eb1d81c73126eab13cb9b04a47
fbad85d3ecbbb826a5891807c823149d38bbaed3
Delete empty junk file. ### Describe your change: Delete empty file introduced in #9046 * [ ] Add an algorithm? * [x] Fix a bug or typo in an existing algorithm? * [ ] Documentation change? ### Checklist: * [x] I have read [CONTRIBUTING.md](https://github.com/TheAlgorithms/Python/blob/master/CONTRIBUTING.md). * [x] This pull request is all my own work -- I have not plagiarized. * [x] I know that pull requests will not be merged if they fail the automated tests. * [x] This PR only changes one algorithm file. To ease review, please open separate PRs for separate algorithms. * [ ] All new Python files are placed inside an existing directory. * [ ] All filenames are in all lowercase characters with no spaces or dashes. * [ ] All functions and variable names follow Python naming conventions. * [ ] All function parameters and return values are annotated with Python [type hints](https://docs.python.org/3/library/typing.html). * [ ] All functions have [doctests](https://docs.python.org/3/library/doctest.html) that pass the automated testing. * [ ] All new algorithms include at least one URL that points to Wikipedia or another similar explanation. * [ ] If this pull request resolves one or more open issues then the description above includes the issue number(s) with a [closing keyword](https://docs.github.com/en/issues/tracking-your-work-with-issues/linking-a-pull-request-to-an-issue): "Fixes #ISSUE-NUMBER".
-1
TheAlgorithms/Python
9,062
Delete empty junk file
### Describe your change: Delete empty file introduced in #9046 * [ ] Add an algorithm? * [x] Fix a bug or typo in an existing algorithm? * [ ] Documentation change? ### Checklist: * [x] I have read [CONTRIBUTING.md](https://github.com/TheAlgorithms/Python/blob/master/CONTRIBUTING.md). * [x] This pull request is all my own work -- I have not plagiarized. * [x] I know that pull requests will not be merged if they fail the automated tests. * [x] This PR only changes one algorithm file. To ease review, please open separate PRs for separate algorithms. * [ ] All new Python files are placed inside an existing directory. * [ ] All filenames are in all lowercase characters with no spaces or dashes. * [ ] All functions and variable names follow Python naming conventions. * [ ] All function parameters and return values are annotated with Python [type hints](https://docs.python.org/3/library/typing.html). * [ ] All functions have [doctests](https://docs.python.org/3/library/doctest.html) that pass the automated testing. * [ ] All new algorithms include at least one URL that points to Wikipedia or another similar explanation. * [ ] If this pull request resolves one or more open issues then the description above includes the issue number(s) with a [closing keyword](https://docs.github.com/en/issues/tracking-your-work-with-issues/linking-a-pull-request-to-an-issue): "Fixes #ISSUE-NUMBER".
tianyizheng02
"2023-09-16T18:59:42Z"
"2023-09-16T22:12:31Z"
1488cdea708485eb1d81c73126eab13cb9b04a47
fbad85d3ecbbb826a5891807c823149d38bbaed3
Delete empty junk file. ### Describe your change: Delete empty file introduced in #9046 * [ ] Add an algorithm? * [x] Fix a bug or typo in an existing algorithm? * [ ] Documentation change? ### Checklist: * [x] I have read [CONTRIBUTING.md](https://github.com/TheAlgorithms/Python/blob/master/CONTRIBUTING.md). * [x] This pull request is all my own work -- I have not plagiarized. * [x] I know that pull requests will not be merged if they fail the automated tests. * [x] This PR only changes one algorithm file. To ease review, please open separate PRs for separate algorithms. * [ ] All new Python files are placed inside an existing directory. * [ ] All filenames are in all lowercase characters with no spaces or dashes. * [ ] All functions and variable names follow Python naming conventions. * [ ] All function parameters and return values are annotated with Python [type hints](https://docs.python.org/3/library/typing.html). * [ ] All functions have [doctests](https://docs.python.org/3/library/doctest.html) that pass the automated testing. * [ ] All new algorithms include at least one URL that points to Wikipedia or another similar explanation. * [ ] If this pull request resolves one or more open issues then the description above includes the issue number(s) with a [closing keyword](https://docs.github.com/en/issues/tracking-your-work-with-issues/linking-a-pull-request-to-an-issue): "Fixes #ISSUE-NUMBER".
# https://en.wikipedia.org/wiki/Lowest_common_ancestor # https://en.wikipedia.org/wiki/Breadth-first_search from __future__ import annotations from queue import Queue def swap(a: int, b: int) -> tuple[int, int]: """ Return a tuple (b, a) when given two integers a and b >>> swap(2,3) (3, 2) >>> swap(3,4) (4, 3) >>> swap(67, 12) (12, 67) """ a ^= b b ^= a a ^= b return a, b def create_sparse(max_node: int, parent: list[list[int]]) -> list[list[int]]: """ creating sparse table which saves each nodes 2^i-th parent """ j = 1 while (1 << j) < max_node: for i in range(1, max_node + 1): parent[j][i] = parent[j - 1][parent[j - 1][i]] j += 1 return parent # returns lca of node u,v def lowest_common_ancestor( u: int, v: int, level: list[int], parent: list[list[int]] ) -> int: # u must be deeper in the tree than v if level[u] < level[v]: u, v = swap(u, v) # making depth of u same as depth of v for i in range(18, -1, -1): if level[u] - (1 << i) >= level[v]: u = parent[i][u] # at the same depth if u==v that mean lca is found if u == v: return u # moving both nodes upwards till lca in found for i in range(18, -1, -1): if parent[i][u] not in [0, parent[i][v]]: u, v = parent[i][u], parent[i][v] # returning longest common ancestor of u,v return parent[0][u] # runs a breadth first search from root node of the tree def breadth_first_search( level: list[int], parent: list[list[int]], max_node: int, graph: dict[int, list[int]], root: int = 1, ) -> tuple[list[int], list[list[int]]]: """ sets every nodes direct parent parent of root node is set to 0 calculates depth of each node from root node """ level[root] = 0 q: Queue[int] = Queue(maxsize=max_node) q.put(root) while q.qsize() != 0: u = q.get() for v in graph[u]: if level[v] == -1: level[v] = level[u] + 1 q.put(v) parent[0][v] = u return level, parent def main() -> None: max_node = 13 # initializing with 0 parent = [[0 for _ in range(max_node + 10)] for _ in range(20)] # initializing with -1 which means every node is unvisited level = [-1 for _ in range(max_node + 10)] graph: dict[int, list[int]] = { 1: [2, 3, 4], 2: [5], 3: [6, 7], 4: [8], 5: [9, 10], 6: [11], 7: [], 8: [12, 13], 9: [], 10: [], 11: [], 12: [], 13: [], } level, parent = breadth_first_search(level, parent, max_node, graph, 1) parent = create_sparse(max_node, parent) print("LCA of node 1 and 3 is: ", lowest_common_ancestor(1, 3, level, parent)) print("LCA of node 5 and 6 is: ", lowest_common_ancestor(5, 6, level, parent)) print("LCA of node 7 and 11 is: ", lowest_common_ancestor(7, 11, level, parent)) print("LCA of node 6 and 7 is: ", lowest_common_ancestor(6, 7, level, parent)) print("LCA of node 4 and 12 is: ", lowest_common_ancestor(4, 12, level, parent)) print("LCA of node 8 and 8 is: ", lowest_common_ancestor(8, 8, level, parent)) if __name__ == "__main__": main()
# https://en.wikipedia.org/wiki/Lowest_common_ancestor # https://en.wikipedia.org/wiki/Breadth-first_search from __future__ import annotations from queue import Queue def swap(a: int, b: int) -> tuple[int, int]: """ Return a tuple (b, a) when given two integers a and b >>> swap(2,3) (3, 2) >>> swap(3,4) (4, 3) >>> swap(67, 12) (12, 67) """ a ^= b b ^= a a ^= b return a, b def create_sparse(max_node: int, parent: list[list[int]]) -> list[list[int]]: """ creating sparse table which saves each nodes 2^i-th parent """ j = 1 while (1 << j) < max_node: for i in range(1, max_node + 1): parent[j][i] = parent[j - 1][parent[j - 1][i]] j += 1 return parent # returns lca of node u,v def lowest_common_ancestor( u: int, v: int, level: list[int], parent: list[list[int]] ) -> int: # u must be deeper in the tree than v if level[u] < level[v]: u, v = swap(u, v) # making depth of u same as depth of v for i in range(18, -1, -1): if level[u] - (1 << i) >= level[v]: u = parent[i][u] # at the same depth if u==v that mean lca is found if u == v: return u # moving both nodes upwards till lca in found for i in range(18, -1, -1): if parent[i][u] not in [0, parent[i][v]]: u, v = parent[i][u], parent[i][v] # returning longest common ancestor of u,v return parent[0][u] # runs a breadth first search from root node of the tree def breadth_first_search( level: list[int], parent: list[list[int]], max_node: int, graph: dict[int, list[int]], root: int = 1, ) -> tuple[list[int], list[list[int]]]: """ sets every nodes direct parent parent of root node is set to 0 calculates depth of each node from root node """ level[root] = 0 q: Queue[int] = Queue(maxsize=max_node) q.put(root) while q.qsize() != 0: u = q.get() for v in graph[u]: if level[v] == -1: level[v] = level[u] + 1 q.put(v) parent[0][v] = u return level, parent def main() -> None: max_node = 13 # initializing with 0 parent = [[0 for _ in range(max_node + 10)] for _ in range(20)] # initializing with -1 which means every node is unvisited level = [-1 for _ in range(max_node + 10)] graph: dict[int, list[int]] = { 1: [2, 3, 4], 2: [5], 3: [6, 7], 4: [8], 5: [9, 10], 6: [11], 7: [], 8: [12, 13], 9: [], 10: [], 11: [], 12: [], 13: [], } level, parent = breadth_first_search(level, parent, max_node, graph, 1) parent = create_sparse(max_node, parent) print("LCA of node 1 and 3 is: ", lowest_common_ancestor(1, 3, level, parent)) print("LCA of node 5 and 6 is: ", lowest_common_ancestor(5, 6, level, parent)) print("LCA of node 7 and 11 is: ", lowest_common_ancestor(7, 11, level, parent)) print("LCA of node 6 and 7 is: ", lowest_common_ancestor(6, 7, level, parent)) print("LCA of node 4 and 12 is: ", lowest_common_ancestor(4, 12, level, parent)) print("LCA of node 8 and 8 is: ", lowest_common_ancestor(8, 8, level, parent)) if __name__ == "__main__": main()
-1
TheAlgorithms/Python
9,062
Delete empty junk file
### Describe your change: Delete empty file introduced in #9046 * [ ] Add an algorithm? * [x] Fix a bug or typo in an existing algorithm? * [ ] Documentation change? ### Checklist: * [x] I have read [CONTRIBUTING.md](https://github.com/TheAlgorithms/Python/blob/master/CONTRIBUTING.md). * [x] This pull request is all my own work -- I have not plagiarized. * [x] I know that pull requests will not be merged if they fail the automated tests. * [x] This PR only changes one algorithm file. To ease review, please open separate PRs for separate algorithms. * [ ] All new Python files are placed inside an existing directory. * [ ] All filenames are in all lowercase characters with no spaces or dashes. * [ ] All functions and variable names follow Python naming conventions. * [ ] All function parameters and return values are annotated with Python [type hints](https://docs.python.org/3/library/typing.html). * [ ] All functions have [doctests](https://docs.python.org/3/library/doctest.html) that pass the automated testing. * [ ] All new algorithms include at least one URL that points to Wikipedia or another similar explanation. * [ ] If this pull request resolves one or more open issues then the description above includes the issue number(s) with a [closing keyword](https://docs.github.com/en/issues/tracking-your-work-with-issues/linking-a-pull-request-to-an-issue): "Fixes #ISSUE-NUMBER".
tianyizheng02
"2023-09-16T18:59:42Z"
"2023-09-16T22:12:31Z"
1488cdea708485eb1d81c73126eab13cb9b04a47
fbad85d3ecbbb826a5891807c823149d38bbaed3
Delete empty junk file. ### Describe your change: Delete empty file introduced in #9046 * [ ] Add an algorithm? * [x] Fix a bug or typo in an existing algorithm? * [ ] Documentation change? ### Checklist: * [x] I have read [CONTRIBUTING.md](https://github.com/TheAlgorithms/Python/blob/master/CONTRIBUTING.md). * [x] This pull request is all my own work -- I have not plagiarized. * [x] I know that pull requests will not be merged if they fail the automated tests. * [x] This PR only changes one algorithm file. To ease review, please open separate PRs for separate algorithms. * [ ] All new Python files are placed inside an existing directory. * [ ] All filenames are in all lowercase characters with no spaces or dashes. * [ ] All functions and variable names follow Python naming conventions. * [ ] All function parameters and return values are annotated with Python [type hints](https://docs.python.org/3/library/typing.html). * [ ] All functions have [doctests](https://docs.python.org/3/library/doctest.html) that pass the automated testing. * [ ] All new algorithms include at least one URL that points to Wikipedia or another similar explanation. * [ ] If this pull request resolves one or more open issues then the description above includes the issue number(s) with a [closing keyword](https://docs.github.com/en/issues/tracking-your-work-with-issues/linking-a-pull-request-to-an-issue): "Fixes #ISSUE-NUMBER".
-1
TheAlgorithms/Python
9,062
Delete empty junk file
### Describe your change: Delete empty file introduced in #9046 * [ ] Add an algorithm? * [x] Fix a bug or typo in an existing algorithm? * [ ] Documentation change? ### Checklist: * [x] I have read [CONTRIBUTING.md](https://github.com/TheAlgorithms/Python/blob/master/CONTRIBUTING.md). * [x] This pull request is all my own work -- I have not plagiarized. * [x] I know that pull requests will not be merged if they fail the automated tests. * [x] This PR only changes one algorithm file. To ease review, please open separate PRs for separate algorithms. * [ ] All new Python files are placed inside an existing directory. * [ ] All filenames are in all lowercase characters with no spaces or dashes. * [ ] All functions and variable names follow Python naming conventions. * [ ] All function parameters and return values are annotated with Python [type hints](https://docs.python.org/3/library/typing.html). * [ ] All functions have [doctests](https://docs.python.org/3/library/doctest.html) that pass the automated testing. * [ ] All new algorithms include at least one URL that points to Wikipedia or another similar explanation. * [ ] If this pull request resolves one or more open issues then the description above includes the issue number(s) with a [closing keyword](https://docs.github.com/en/issues/tracking-your-work-with-issues/linking-a-pull-request-to-an-issue): "Fixes #ISSUE-NUMBER".
tianyizheng02
"2023-09-16T18:59:42Z"
"2023-09-16T22:12:31Z"
1488cdea708485eb1d81c73126eab13cb9b04a47
fbad85d3ecbbb826a5891807c823149d38bbaed3
Delete empty junk file. ### Describe your change: Delete empty file introduced in #9046 * [ ] Add an algorithm? * [x] Fix a bug or typo in an existing algorithm? * [ ] Documentation change? ### Checklist: * [x] I have read [CONTRIBUTING.md](https://github.com/TheAlgorithms/Python/blob/master/CONTRIBUTING.md). * [x] This pull request is all my own work -- I have not plagiarized. * [x] I know that pull requests will not be merged if they fail the automated tests. * [x] This PR only changes one algorithm file. To ease review, please open separate PRs for separate algorithms. * [ ] All new Python files are placed inside an existing directory. * [ ] All filenames are in all lowercase characters with no spaces or dashes. * [ ] All functions and variable names follow Python naming conventions. * [ ] All function parameters and return values are annotated with Python [type hints](https://docs.python.org/3/library/typing.html). * [ ] All functions have [doctests](https://docs.python.org/3/library/doctest.html) that pass the automated testing. * [ ] All new algorithms include at least one URL that points to Wikipedia or another similar explanation. * [ ] If this pull request resolves one or more open issues then the description above includes the issue number(s) with a [closing keyword](https://docs.github.com/en/issues/tracking-your-work-with-issues/linking-a-pull-request-to-an-issue): "Fixes #ISSUE-NUMBER".
-1
TheAlgorithms/Python
9,062
Delete empty junk file
### Describe your change: Delete empty file introduced in #9046 * [ ] Add an algorithm? * [x] Fix a bug or typo in an existing algorithm? * [ ] Documentation change? ### Checklist: * [x] I have read [CONTRIBUTING.md](https://github.com/TheAlgorithms/Python/blob/master/CONTRIBUTING.md). * [x] This pull request is all my own work -- I have not plagiarized. * [x] I know that pull requests will not be merged if they fail the automated tests. * [x] This PR only changes one algorithm file. To ease review, please open separate PRs for separate algorithms. * [ ] All new Python files are placed inside an existing directory. * [ ] All filenames are in all lowercase characters with no spaces or dashes. * [ ] All functions and variable names follow Python naming conventions. * [ ] All function parameters and return values are annotated with Python [type hints](https://docs.python.org/3/library/typing.html). * [ ] All functions have [doctests](https://docs.python.org/3/library/doctest.html) that pass the automated testing. * [ ] All new algorithms include at least one URL that points to Wikipedia or another similar explanation. * [ ] If this pull request resolves one or more open issues then the description above includes the issue number(s) with a [closing keyword](https://docs.github.com/en/issues/tracking-your-work-with-issues/linking-a-pull-request-to-an-issue): "Fixes #ISSUE-NUMBER".
tianyizheng02
"2023-09-16T18:59:42Z"
"2023-09-16T22:12:31Z"
1488cdea708485eb1d81c73126eab13cb9b04a47
fbad85d3ecbbb826a5891807c823149d38bbaed3
Delete empty junk file. ### Describe your change: Delete empty file introduced in #9046 * [ ] Add an algorithm? * [x] Fix a bug or typo in an existing algorithm? * [ ] Documentation change? ### Checklist: * [x] I have read [CONTRIBUTING.md](https://github.com/TheAlgorithms/Python/blob/master/CONTRIBUTING.md). * [x] This pull request is all my own work -- I have not plagiarized. * [x] I know that pull requests will not be merged if they fail the automated tests. * [x] This PR only changes one algorithm file. To ease review, please open separate PRs for separate algorithms. * [ ] All new Python files are placed inside an existing directory. * [ ] All filenames are in all lowercase characters with no spaces or dashes. * [ ] All functions and variable names follow Python naming conventions. * [ ] All function parameters and return values are annotated with Python [type hints](https://docs.python.org/3/library/typing.html). * [ ] All functions have [doctests](https://docs.python.org/3/library/doctest.html) that pass the automated testing. * [ ] All new algorithms include at least one URL that points to Wikipedia or another similar explanation. * [ ] If this pull request resolves one or more open issues then the description above includes the issue number(s) with a [closing keyword](https://docs.github.com/en/issues/tracking-your-work-with-issues/linking-a-pull-request-to-an-issue): "Fixes #ISSUE-NUMBER".
def alternative_list_arrange(first_input_list: list, second_input_list: list) -> list: """ The method arranges two lists as one list in alternative forms of the list elements. :param first_input_list: :param second_input_list: :return: List >>> alternative_list_arrange([1, 2, 3, 4, 5], ["A", "B", "C"]) [1, 'A', 2, 'B', 3, 'C', 4, 5] >>> alternative_list_arrange(["A", "B", "C"], [1, 2, 3, 4, 5]) ['A', 1, 'B', 2, 'C', 3, 4, 5] >>> alternative_list_arrange(["X", "Y", "Z"], [9, 8, 7, 6]) ['X', 9, 'Y', 8, 'Z', 7, 6] >>> alternative_list_arrange([1, 2, 3, 4, 5], []) [1, 2, 3, 4, 5] """ first_input_list_length: int = len(first_input_list) second_input_list_length: int = len(second_input_list) abs_length: int = ( first_input_list_length if first_input_list_length > second_input_list_length else second_input_list_length ) output_result_list: list = [] for char_count in range(abs_length): if char_count < first_input_list_length: output_result_list.append(first_input_list[char_count]) if char_count < second_input_list_length: output_result_list.append(second_input_list[char_count]) return output_result_list if __name__ == "__main__": print(alternative_list_arrange(["A", "B", "C"], [1, 2, 3, 4, 5]), end=" ")
def alternative_list_arrange(first_input_list: list, second_input_list: list) -> list: """ The method arranges two lists as one list in alternative forms of the list elements. :param first_input_list: :param second_input_list: :return: List >>> alternative_list_arrange([1, 2, 3, 4, 5], ["A", "B", "C"]) [1, 'A', 2, 'B', 3, 'C', 4, 5] >>> alternative_list_arrange(["A", "B", "C"], [1, 2, 3, 4, 5]) ['A', 1, 'B', 2, 'C', 3, 4, 5] >>> alternative_list_arrange(["X", "Y", "Z"], [9, 8, 7, 6]) ['X', 9, 'Y', 8, 'Z', 7, 6] >>> alternative_list_arrange([1, 2, 3, 4, 5], []) [1, 2, 3, 4, 5] """ first_input_list_length: int = len(first_input_list) second_input_list_length: int = len(second_input_list) abs_length: int = ( first_input_list_length if first_input_list_length > second_input_list_length else second_input_list_length ) output_result_list: list = [] for char_count in range(abs_length): if char_count < first_input_list_length: output_result_list.append(first_input_list[char_count]) if char_count < second_input_list_length: output_result_list.append(second_input_list[char_count]) return output_result_list if __name__ == "__main__": print(alternative_list_arrange(["A", "B", "C"], [1, 2, 3, 4, 5]), end=" ")
-1
TheAlgorithms/Python
9,062
Delete empty junk file
### Describe your change: Delete empty file introduced in #9046 * [ ] Add an algorithm? * [x] Fix a bug or typo in an existing algorithm? * [ ] Documentation change? ### Checklist: * [x] I have read [CONTRIBUTING.md](https://github.com/TheAlgorithms/Python/blob/master/CONTRIBUTING.md). * [x] This pull request is all my own work -- I have not plagiarized. * [x] I know that pull requests will not be merged if they fail the automated tests. * [x] This PR only changes one algorithm file. To ease review, please open separate PRs for separate algorithms. * [ ] All new Python files are placed inside an existing directory. * [ ] All filenames are in all lowercase characters with no spaces or dashes. * [ ] All functions and variable names follow Python naming conventions. * [ ] All function parameters and return values are annotated with Python [type hints](https://docs.python.org/3/library/typing.html). * [ ] All functions have [doctests](https://docs.python.org/3/library/doctest.html) that pass the automated testing. * [ ] All new algorithms include at least one URL that points to Wikipedia or another similar explanation. * [ ] If this pull request resolves one or more open issues then the description above includes the issue number(s) with a [closing keyword](https://docs.github.com/en/issues/tracking-your-work-with-issues/linking-a-pull-request-to-an-issue): "Fixes #ISSUE-NUMBER".
tianyizheng02
"2023-09-16T18:59:42Z"
"2023-09-16T22:12:31Z"
1488cdea708485eb1d81c73126eab13cb9b04a47
fbad85d3ecbbb826a5891807c823149d38bbaed3
Delete empty junk file. ### Describe your change: Delete empty file introduced in #9046 * [ ] Add an algorithm? * [x] Fix a bug or typo in an existing algorithm? * [ ] Documentation change? ### Checklist: * [x] I have read [CONTRIBUTING.md](https://github.com/TheAlgorithms/Python/blob/master/CONTRIBUTING.md). * [x] This pull request is all my own work -- I have not plagiarized. * [x] I know that pull requests will not be merged if they fail the automated tests. * [x] This PR only changes one algorithm file. To ease review, please open separate PRs for separate algorithms. * [ ] All new Python files are placed inside an existing directory. * [ ] All filenames are in all lowercase characters with no spaces or dashes. * [ ] All functions and variable names follow Python naming conventions. * [ ] All function parameters and return values are annotated with Python [type hints](https://docs.python.org/3/library/typing.html). * [ ] All functions have [doctests](https://docs.python.org/3/library/doctest.html) that pass the automated testing. * [ ] All new algorithms include at least one URL that points to Wikipedia or another similar explanation. * [ ] If this pull request resolves one or more open issues then the description above includes the issue number(s) with a [closing keyword](https://docs.github.com/en/issues/tracking-your-work-with-issues/linking-a-pull-request-to-an-issue): "Fixes #ISSUE-NUMBER".
""" This script demonstrates an implementation of the Gaussian Error Linear Unit function. * https://en.wikipedia.org/wiki/Activation_function#Comparison_of_activation_functions The function takes a vector of K real numbers as input and returns x * sigmoid(1.702*x). Gaussian Error Linear Unit (GELU) is a high-performing neural network activation function. This script is inspired by a corresponding research paper. * https://arxiv.org/abs/1606.08415 """ import numpy as np def sigmoid(vector: np.ndarray) -> np.ndarray: """ Mathematical function sigmoid takes a vector x of K real numbers as input and returns 1/ (1 + e^-x). https://en.wikipedia.org/wiki/Sigmoid_function >>> sigmoid(np.array([-1.0, 1.0, 2.0])) array([0.26894142, 0.73105858, 0.88079708]) """ return 1 / (1 + np.exp(-vector)) def gaussian_error_linear_unit(vector: np.ndarray) -> np.ndarray: """ Implements the Gaussian Error Linear Unit (GELU) function Parameters: vector (np.ndarray): A numpy array of shape (1, n) consisting of real values Returns: gelu_vec (np.ndarray): The input numpy array, after applying gelu Examples: >>> gaussian_error_linear_unit(np.array([-1.0, 1.0, 2.0])) array([-0.15420423, 0.84579577, 1.93565862]) >>> gaussian_error_linear_unit(np.array([-3])) array([-0.01807131]) """ return vector * sigmoid(1.702 * vector) if __name__ == "__main__": import doctest doctest.testmod()
""" This script demonstrates an implementation of the Gaussian Error Linear Unit function. * https://en.wikipedia.org/wiki/Activation_function#Comparison_of_activation_functions The function takes a vector of K real numbers as input and returns x * sigmoid(1.702*x). Gaussian Error Linear Unit (GELU) is a high-performing neural network activation function. This script is inspired by a corresponding research paper. * https://arxiv.org/abs/1606.08415 """ import numpy as np def sigmoid(vector: np.ndarray) -> np.ndarray: """ Mathematical function sigmoid takes a vector x of K real numbers as input and returns 1/ (1 + e^-x). https://en.wikipedia.org/wiki/Sigmoid_function >>> sigmoid(np.array([-1.0, 1.0, 2.0])) array([0.26894142, 0.73105858, 0.88079708]) """ return 1 / (1 + np.exp(-vector)) def gaussian_error_linear_unit(vector: np.ndarray) -> np.ndarray: """ Implements the Gaussian Error Linear Unit (GELU) function Parameters: vector (np.ndarray): A numpy array of shape (1, n) consisting of real values Returns: gelu_vec (np.ndarray): The input numpy array, after applying gelu Examples: >>> gaussian_error_linear_unit(np.array([-1.0, 1.0, 2.0])) array([-0.15420423, 0.84579577, 1.93565862]) >>> gaussian_error_linear_unit(np.array([-3])) array([-0.01807131]) """ return vector * sigmoid(1.702 * vector) if __name__ == "__main__": import doctest doctest.testmod()
-1
TheAlgorithms/Python
9,062
Delete empty junk file
### Describe your change: Delete empty file introduced in #9046 * [ ] Add an algorithm? * [x] Fix a bug or typo in an existing algorithm? * [ ] Documentation change? ### Checklist: * [x] I have read [CONTRIBUTING.md](https://github.com/TheAlgorithms/Python/blob/master/CONTRIBUTING.md). * [x] This pull request is all my own work -- I have not plagiarized. * [x] I know that pull requests will not be merged if they fail the automated tests. * [x] This PR only changes one algorithm file. To ease review, please open separate PRs for separate algorithms. * [ ] All new Python files are placed inside an existing directory. * [ ] All filenames are in all lowercase characters with no spaces or dashes. * [ ] All functions and variable names follow Python naming conventions. * [ ] All function parameters and return values are annotated with Python [type hints](https://docs.python.org/3/library/typing.html). * [ ] All functions have [doctests](https://docs.python.org/3/library/doctest.html) that pass the automated testing. * [ ] All new algorithms include at least one URL that points to Wikipedia or another similar explanation. * [ ] If this pull request resolves one or more open issues then the description above includes the issue number(s) with a [closing keyword](https://docs.github.com/en/issues/tracking-your-work-with-issues/linking-a-pull-request-to-an-issue): "Fixes #ISSUE-NUMBER".
tianyizheng02
"2023-09-16T18:59:42Z"
"2023-09-16T22:12:31Z"
1488cdea708485eb1d81c73126eab13cb9b04a47
fbad85d3ecbbb826a5891807c823149d38bbaed3
Delete empty junk file. ### Describe your change: Delete empty file introduced in #9046 * [ ] Add an algorithm? * [x] Fix a bug or typo in an existing algorithm? * [ ] Documentation change? ### Checklist: * [x] I have read [CONTRIBUTING.md](https://github.com/TheAlgorithms/Python/blob/master/CONTRIBUTING.md). * [x] This pull request is all my own work -- I have not plagiarized. * [x] I know that pull requests will not be merged if they fail the automated tests. * [x] This PR only changes one algorithm file. To ease review, please open separate PRs for separate algorithms. * [ ] All new Python files are placed inside an existing directory. * [ ] All filenames are in all lowercase characters with no spaces or dashes. * [ ] All functions and variable names follow Python naming conventions. * [ ] All function parameters and return values are annotated with Python [type hints](https://docs.python.org/3/library/typing.html). * [ ] All functions have [doctests](https://docs.python.org/3/library/doctest.html) that pass the automated testing. * [ ] All new algorithms include at least one URL that points to Wikipedia or another similar explanation. * [ ] If this pull request resolves one or more open issues then the description above includes the issue number(s) with a [closing keyword](https://docs.github.com/en/issues/tracking-your-work-with-issues/linking-a-pull-request-to-an-issue): "Fixes #ISSUE-NUMBER".
""" author: Christian Bender date: 21.12.2017 class: XORCipher This class implements the XOR-cipher algorithm and provides some useful methods for encrypting and decrypting strings and files. Overview about methods - encrypt : list of char - decrypt : list of char - encrypt_string : str - decrypt_string : str - encrypt_file : boolean - decrypt_file : boolean """ from __future__ import annotations class XORCipher: def __init__(self, key: int = 0): """ simple constructor that receives a key or uses default key = 0 """ # private field self.__key = key def encrypt(self, content: str, key: int) -> list[str]: """ input: 'content' of type string and 'key' of type int output: encrypted string 'content' as a list of chars if key not passed the method uses the key by the constructor. otherwise key = 1 """ # precondition assert isinstance(key, int) and isinstance(content, str) key = key or self.__key or 1 # make sure key is an appropriate size key %= 255 return [chr(ord(ch) ^ key) for ch in content] def decrypt(self, content: str, key: int) -> list[str]: """ input: 'content' of type list and 'key' of type int output: decrypted string 'content' as a list of chars if key not passed the method uses the key by the constructor. otherwise key = 1 """ # precondition assert isinstance(key, int) and isinstance(content, list) key = key or self.__key or 1 # make sure key is an appropriate size key %= 255 return [chr(ord(ch) ^ key) for ch in content] def encrypt_string(self, content: str, key: int = 0) -> str: """ input: 'content' of type string and 'key' of type int output: encrypted string 'content' if key not passed the method uses the key by the constructor. otherwise key = 1 """ # precondition assert isinstance(key, int) and isinstance(content, str) key = key or self.__key or 1 # make sure key can be any size while key > 255: key -= 255 # This will be returned ans = "" for ch in content: ans += chr(ord(ch) ^ key) return ans def decrypt_string(self, content: str, key: int = 0) -> str: """ input: 'content' of type string and 'key' of type int output: decrypted string 'content' if key not passed the method uses the key by the constructor. otherwise key = 1 """ # precondition assert isinstance(key, int) and isinstance(content, str) key = key or self.__key or 1 # make sure key can be any size while key > 255: key -= 255 # This will be returned ans = "" for ch in content: ans += chr(ord(ch) ^ key) return ans def encrypt_file(self, file: str, key: int = 0) -> bool: """ input: filename (str) and a key (int) output: returns true if encrypt process was successful otherwise false if key not passed the method uses the key by the constructor. otherwise key = 1 """ # precondition assert isinstance(file, str) and isinstance(key, int) try: with open(file) as fin, open("encrypt.out", "w+") as fout: # actual encrypt-process for line in fin: fout.write(self.encrypt_string(line, key)) except OSError: return False return True def decrypt_file(self, file: str, key: int) -> bool: """ input: filename (str) and a key (int) output: returns true if decrypt process was successful otherwise false if key not passed the method uses the key by the constructor. otherwise key = 1 """ # precondition assert isinstance(file, str) and isinstance(key, int) try: with open(file) as fin, open("decrypt.out", "w+") as fout: # actual encrypt-process for line in fin: fout.write(self.decrypt_string(line, key)) except OSError: return False return True # Tests # crypt = XORCipher() # key = 67 # # test encrypt # print(crypt.encrypt("hallo welt",key)) # # test decrypt # print(crypt.decrypt(crypt.encrypt("hallo welt",key), key)) # # test encrypt_string # print(crypt.encrypt_string("hallo welt",key)) # # test decrypt_string # print(crypt.decrypt_string(crypt.encrypt_string("hallo welt",key),key)) # if (crypt.encrypt_file("test.txt",key)): # print("encrypt successful") # else: # print("encrypt unsuccessful") # if (crypt.decrypt_file("encrypt.out",key)): # print("decrypt successful") # else: # print("decrypt unsuccessful")
""" author: Christian Bender date: 21.12.2017 class: XORCipher This class implements the XOR-cipher algorithm and provides some useful methods for encrypting and decrypting strings and files. Overview about methods - encrypt : list of char - decrypt : list of char - encrypt_string : str - decrypt_string : str - encrypt_file : boolean - decrypt_file : boolean """ from __future__ import annotations class XORCipher: def __init__(self, key: int = 0): """ simple constructor that receives a key or uses default key = 0 """ # private field self.__key = key def encrypt(self, content: str, key: int) -> list[str]: """ input: 'content' of type string and 'key' of type int output: encrypted string 'content' as a list of chars if key not passed the method uses the key by the constructor. otherwise key = 1 """ # precondition assert isinstance(key, int) and isinstance(content, str) key = key or self.__key or 1 # make sure key is an appropriate size key %= 255 return [chr(ord(ch) ^ key) for ch in content] def decrypt(self, content: str, key: int) -> list[str]: """ input: 'content' of type list and 'key' of type int output: decrypted string 'content' as a list of chars if key not passed the method uses the key by the constructor. otherwise key = 1 """ # precondition assert isinstance(key, int) and isinstance(content, list) key = key or self.__key or 1 # make sure key is an appropriate size key %= 255 return [chr(ord(ch) ^ key) for ch in content] def encrypt_string(self, content: str, key: int = 0) -> str: """ input: 'content' of type string and 'key' of type int output: encrypted string 'content' if key not passed the method uses the key by the constructor. otherwise key = 1 """ # precondition assert isinstance(key, int) and isinstance(content, str) key = key or self.__key or 1 # make sure key can be any size while key > 255: key -= 255 # This will be returned ans = "" for ch in content: ans += chr(ord(ch) ^ key) return ans def decrypt_string(self, content: str, key: int = 0) -> str: """ input: 'content' of type string and 'key' of type int output: decrypted string 'content' if key not passed the method uses the key by the constructor. otherwise key = 1 """ # precondition assert isinstance(key, int) and isinstance(content, str) key = key or self.__key or 1 # make sure key can be any size while key > 255: key -= 255 # This will be returned ans = "" for ch in content: ans += chr(ord(ch) ^ key) return ans def encrypt_file(self, file: str, key: int = 0) -> bool: """ input: filename (str) and a key (int) output: returns true if encrypt process was successful otherwise false if key not passed the method uses the key by the constructor. otherwise key = 1 """ # precondition assert isinstance(file, str) and isinstance(key, int) try: with open(file) as fin, open("encrypt.out", "w+") as fout: # actual encrypt-process for line in fin: fout.write(self.encrypt_string(line, key)) except OSError: return False return True def decrypt_file(self, file: str, key: int) -> bool: """ input: filename (str) and a key (int) output: returns true if decrypt process was successful otherwise false if key not passed the method uses the key by the constructor. otherwise key = 1 """ # precondition assert isinstance(file, str) and isinstance(key, int) try: with open(file) as fin, open("decrypt.out", "w+") as fout: # actual encrypt-process for line in fin: fout.write(self.decrypt_string(line, key)) except OSError: return False return True # Tests # crypt = XORCipher() # key = 67 # # test encrypt # print(crypt.encrypt("hallo welt",key)) # # test decrypt # print(crypt.decrypt(crypt.encrypt("hallo welt",key), key)) # # test encrypt_string # print(crypt.encrypt_string("hallo welt",key)) # # test decrypt_string # print(crypt.decrypt_string(crypt.encrypt_string("hallo welt",key),key)) # if (crypt.encrypt_file("test.txt",key)): # print("encrypt successful") # else: # print("encrypt unsuccessful") # if (crypt.decrypt_file("encrypt.out",key)): # print("decrypt successful") # else: # print("decrypt unsuccessful")
-1
TheAlgorithms/Python
9,062
Delete empty junk file
### Describe your change: Delete empty file introduced in #9046 * [ ] Add an algorithm? * [x] Fix a bug or typo in an existing algorithm? * [ ] Documentation change? ### Checklist: * [x] I have read [CONTRIBUTING.md](https://github.com/TheAlgorithms/Python/blob/master/CONTRIBUTING.md). * [x] This pull request is all my own work -- I have not plagiarized. * [x] I know that pull requests will not be merged if they fail the automated tests. * [x] This PR only changes one algorithm file. To ease review, please open separate PRs for separate algorithms. * [ ] All new Python files are placed inside an existing directory. * [ ] All filenames are in all lowercase characters with no spaces or dashes. * [ ] All functions and variable names follow Python naming conventions. * [ ] All function parameters and return values are annotated with Python [type hints](https://docs.python.org/3/library/typing.html). * [ ] All functions have [doctests](https://docs.python.org/3/library/doctest.html) that pass the automated testing. * [ ] All new algorithms include at least one URL that points to Wikipedia or another similar explanation. * [ ] If this pull request resolves one or more open issues then the description above includes the issue number(s) with a [closing keyword](https://docs.github.com/en/issues/tracking-your-work-with-issues/linking-a-pull-request-to-an-issue): "Fixes #ISSUE-NUMBER".
tianyizheng02
"2023-09-16T18:59:42Z"
"2023-09-16T22:12:31Z"
1488cdea708485eb1d81c73126eab13cb9b04a47
fbad85d3ecbbb826a5891807c823149d38bbaed3
Delete empty junk file. ### Describe your change: Delete empty file introduced in #9046 * [ ] Add an algorithm? * [x] Fix a bug or typo in an existing algorithm? * [ ] Documentation change? ### Checklist: * [x] I have read [CONTRIBUTING.md](https://github.com/TheAlgorithms/Python/blob/master/CONTRIBUTING.md). * [x] This pull request is all my own work -- I have not plagiarized. * [x] I know that pull requests will not be merged if they fail the automated tests. * [x] This PR only changes one algorithm file. To ease review, please open separate PRs for separate algorithms. * [ ] All new Python files are placed inside an existing directory. * [ ] All filenames are in all lowercase characters with no spaces or dashes. * [ ] All functions and variable names follow Python naming conventions. * [ ] All function parameters and return values are annotated with Python [type hints](https://docs.python.org/3/library/typing.html). * [ ] All functions have [doctests](https://docs.python.org/3/library/doctest.html) that pass the automated testing. * [ ] All new algorithms include at least one URL that points to Wikipedia or another similar explanation. * [ ] If this pull request resolves one or more open issues then the description above includes the issue number(s) with a [closing keyword](https://docs.github.com/en/issues/tracking-your-work-with-issues/linking-a-pull-request-to-an-issue): "Fixes #ISSUE-NUMBER".
""" Problem 20: https://projecteuler.net/problem=20 n! means n × (n − 1) × ... × 3 × 2 × 1 For example, 10! = 10 × 9 × ... × 3 × 2 × 1 = 3628800, and the sum of the digits in the number 10! is 3 + 6 + 2 + 8 + 8 + 0 + 0 = 27. Find the sum of the digits in the number 100! """ def factorial(num: int) -> int: """Find the factorial of a given number n""" fact = 1 for i in range(1, num + 1): fact *= i return fact def split_and_add(number: int) -> int: """Split number digits and add them.""" sum_of_digits = 0 while number > 0: last_digit = number % 10 sum_of_digits += last_digit number = number // 10 # Removing the last_digit from the given number return sum_of_digits def solution(num: int = 100) -> int: """Returns the sum of the digits in the factorial of num >>> solution(100) 648 >>> solution(50) 216 >>> solution(10) 27 >>> solution(5) 3 >>> solution(3) 6 >>> solution(2) 2 >>> solution(1) 1 """ nfact = factorial(num) result = split_and_add(nfact) return result if __name__ == "__main__": print(solution(int(input("Enter the Number: ").strip())))
""" Problem 20: https://projecteuler.net/problem=20 n! means n × (n − 1) × ... × 3 × 2 × 1 For example, 10! = 10 × 9 × ... × 3 × 2 × 1 = 3628800, and the sum of the digits in the number 10! is 3 + 6 + 2 + 8 + 8 + 0 + 0 = 27. Find the sum of the digits in the number 100! """ def factorial(num: int) -> int: """Find the factorial of a given number n""" fact = 1 for i in range(1, num + 1): fact *= i return fact def split_and_add(number: int) -> int: """Split number digits and add them.""" sum_of_digits = 0 while number > 0: last_digit = number % 10 sum_of_digits += last_digit number = number // 10 # Removing the last_digit from the given number return sum_of_digits def solution(num: int = 100) -> int: """Returns the sum of the digits in the factorial of num >>> solution(100) 648 >>> solution(50) 216 >>> solution(10) 27 >>> solution(5) 3 >>> solution(3) 6 >>> solution(2) 2 >>> solution(1) 1 """ nfact = factorial(num) result = split_and_add(nfact) return result if __name__ == "__main__": print(solution(int(input("Enter the Number: ").strip())))
-1
TheAlgorithms/Python
9,062
Delete empty junk file
### Describe your change: Delete empty file introduced in #9046 * [ ] Add an algorithm? * [x] Fix a bug or typo in an existing algorithm? * [ ] Documentation change? ### Checklist: * [x] I have read [CONTRIBUTING.md](https://github.com/TheAlgorithms/Python/blob/master/CONTRIBUTING.md). * [x] This pull request is all my own work -- I have not plagiarized. * [x] I know that pull requests will not be merged if they fail the automated tests. * [x] This PR only changes one algorithm file. To ease review, please open separate PRs for separate algorithms. * [ ] All new Python files are placed inside an existing directory. * [ ] All filenames are in all lowercase characters with no spaces or dashes. * [ ] All functions and variable names follow Python naming conventions. * [ ] All function parameters and return values are annotated with Python [type hints](https://docs.python.org/3/library/typing.html). * [ ] All functions have [doctests](https://docs.python.org/3/library/doctest.html) that pass the automated testing. * [ ] All new algorithms include at least one URL that points to Wikipedia or another similar explanation. * [ ] If this pull request resolves one or more open issues then the description above includes the issue number(s) with a [closing keyword](https://docs.github.com/en/issues/tracking-your-work-with-issues/linking-a-pull-request-to-an-issue): "Fixes #ISSUE-NUMBER".
tianyizheng02
"2023-09-16T18:59:42Z"
"2023-09-16T22:12:31Z"
1488cdea708485eb1d81c73126eab13cb9b04a47
fbad85d3ecbbb826a5891807c823149d38bbaed3
Delete empty junk file. ### Describe your change: Delete empty file introduced in #9046 * [ ] Add an algorithm? * [x] Fix a bug or typo in an existing algorithm? * [ ] Documentation change? ### Checklist: * [x] I have read [CONTRIBUTING.md](https://github.com/TheAlgorithms/Python/blob/master/CONTRIBUTING.md). * [x] This pull request is all my own work -- I have not plagiarized. * [x] I know that pull requests will not be merged if they fail the automated tests. * [x] This PR only changes one algorithm file. To ease review, please open separate PRs for separate algorithms. * [ ] All new Python files are placed inside an existing directory. * [ ] All filenames are in all lowercase characters with no spaces or dashes. * [ ] All functions and variable names follow Python naming conventions. * [ ] All function parameters and return values are annotated with Python [type hints](https://docs.python.org/3/library/typing.html). * [ ] All functions have [doctests](https://docs.python.org/3/library/doctest.html) that pass the automated testing. * [ ] All new algorithms include at least one URL that points to Wikipedia or another similar explanation. * [ ] If this pull request resolves one or more open issues then the description above includes the issue number(s) with a [closing keyword](https://docs.github.com/en/issues/tracking-your-work-with-issues/linking-a-pull-request-to-an-issue): "Fixes #ISSUE-NUMBER".
-1
TheAlgorithms/Python
9,062
Delete empty junk file
### Describe your change: Delete empty file introduced in #9046 * [ ] Add an algorithm? * [x] Fix a bug or typo in an existing algorithm? * [ ] Documentation change? ### Checklist: * [x] I have read [CONTRIBUTING.md](https://github.com/TheAlgorithms/Python/blob/master/CONTRIBUTING.md). * [x] This pull request is all my own work -- I have not plagiarized. * [x] I know that pull requests will not be merged if they fail the automated tests. * [x] This PR only changes one algorithm file. To ease review, please open separate PRs for separate algorithms. * [ ] All new Python files are placed inside an existing directory. * [ ] All filenames are in all lowercase characters with no spaces or dashes. * [ ] All functions and variable names follow Python naming conventions. * [ ] All function parameters and return values are annotated with Python [type hints](https://docs.python.org/3/library/typing.html). * [ ] All functions have [doctests](https://docs.python.org/3/library/doctest.html) that pass the automated testing. * [ ] All new algorithms include at least one URL that points to Wikipedia or another similar explanation. * [ ] If this pull request resolves one or more open issues then the description above includes the issue number(s) with a [closing keyword](https://docs.github.com/en/issues/tracking-your-work-with-issues/linking-a-pull-request-to-an-issue): "Fixes #ISSUE-NUMBER".
tianyizheng02
"2023-09-16T18:59:42Z"
"2023-09-16T22:12:31Z"
1488cdea708485eb1d81c73126eab13cb9b04a47
fbad85d3ecbbb826a5891807c823149d38bbaed3
Delete empty junk file. ### Describe your change: Delete empty file introduced in #9046 * [ ] Add an algorithm? * [x] Fix a bug or typo in an existing algorithm? * [ ] Documentation change? ### Checklist: * [x] I have read [CONTRIBUTING.md](https://github.com/TheAlgorithms/Python/blob/master/CONTRIBUTING.md). * [x] This pull request is all my own work -- I have not plagiarized. * [x] I know that pull requests will not be merged if they fail the automated tests. * [x] This PR only changes one algorithm file. To ease review, please open separate PRs for separate algorithms. * [ ] All new Python files are placed inside an existing directory. * [ ] All filenames are in all lowercase characters with no spaces or dashes. * [ ] All functions and variable names follow Python naming conventions. * [ ] All function parameters and return values are annotated with Python [type hints](https://docs.python.org/3/library/typing.html). * [ ] All functions have [doctests](https://docs.python.org/3/library/doctest.html) that pass the automated testing. * [ ] All new algorithms include at least one URL that points to Wikipedia or another similar explanation. * [ ] If this pull request resolves one or more open issues then the description above includes the issue number(s) with a [closing keyword](https://docs.github.com/en/issues/tracking-your-work-with-issues/linking-a-pull-request-to-an-issue): "Fixes #ISSUE-NUMBER".
# @Author : lightXu # @File : convolve.py # @Time : 2019/7/8 0008 下午 16:13 from cv2 import COLOR_BGR2GRAY, cvtColor, imread, imshow, waitKey from numpy import array, dot, pad, ravel, uint8, zeros def im2col(image, block_size): rows, cols = image.shape dst_height = cols - block_size[1] + 1 dst_width = rows - block_size[0] + 1 image_array = zeros((dst_height * dst_width, block_size[1] * block_size[0])) row = 0 for i in range(dst_height): for j in range(dst_width): window = ravel(image[i : i + block_size[0], j : j + block_size[1]]) image_array[row, :] = window row += 1 return image_array def img_convolve(image, filter_kernel): height, width = image.shape[0], image.shape[1] k_size = filter_kernel.shape[0] pad_size = k_size // 2 # Pads image with the edge values of array. image_tmp = pad(image, pad_size, mode="edge") # im2col, turn the k_size*k_size pixels into a row and np.vstack all rows image_array = im2col(image_tmp, (k_size, k_size)) # turn the kernel into shape(k*k, 1) kernel_array = ravel(filter_kernel) # reshape and get the dst image dst = dot(image_array, kernel_array).reshape(height, width) return dst if __name__ == "__main__": # read original image img = imread(r"../image_data/lena.jpg") # turn image in gray scale value gray = cvtColor(img, COLOR_BGR2GRAY) # Laplace operator Laplace_kernel = array([[0, 1, 0], [1, -4, 1], [0, 1, 0]]) out = img_convolve(gray, Laplace_kernel).astype(uint8) imshow("Laplacian", out) waitKey(0)
# @Author : lightXu # @File : convolve.py # @Time : 2019/7/8 0008 下午 16:13 from cv2 import COLOR_BGR2GRAY, cvtColor, imread, imshow, waitKey from numpy import array, dot, pad, ravel, uint8, zeros def im2col(image, block_size): rows, cols = image.shape dst_height = cols - block_size[1] + 1 dst_width = rows - block_size[0] + 1 image_array = zeros((dst_height * dst_width, block_size[1] * block_size[0])) row = 0 for i in range(dst_height): for j in range(dst_width): window = ravel(image[i : i + block_size[0], j : j + block_size[1]]) image_array[row, :] = window row += 1 return image_array def img_convolve(image, filter_kernel): height, width = image.shape[0], image.shape[1] k_size = filter_kernel.shape[0] pad_size = k_size // 2 # Pads image with the edge values of array. image_tmp = pad(image, pad_size, mode="edge") # im2col, turn the k_size*k_size pixels into a row and np.vstack all rows image_array = im2col(image_tmp, (k_size, k_size)) # turn the kernel into shape(k*k, 1) kernel_array = ravel(filter_kernel) # reshape and get the dst image dst = dot(image_array, kernel_array).reshape(height, width) return dst if __name__ == "__main__": # read original image img = imread(r"../image_data/lena.jpg") # turn image in gray scale value gray = cvtColor(img, COLOR_BGR2GRAY) # Laplace operator Laplace_kernel = array([[0, 1, 0], [1, -4, 1], [0, 1, 0]]) out = img_convolve(gray, Laplace_kernel).astype(uint8) imshow("Laplacian", out) waitKey(0)
-1
TheAlgorithms/Python
9,062
Delete empty junk file
### Describe your change: Delete empty file introduced in #9046 * [ ] Add an algorithm? * [x] Fix a bug or typo in an existing algorithm? * [ ] Documentation change? ### Checklist: * [x] I have read [CONTRIBUTING.md](https://github.com/TheAlgorithms/Python/blob/master/CONTRIBUTING.md). * [x] This pull request is all my own work -- I have not plagiarized. * [x] I know that pull requests will not be merged if they fail the automated tests. * [x] This PR only changes one algorithm file. To ease review, please open separate PRs for separate algorithms. * [ ] All new Python files are placed inside an existing directory. * [ ] All filenames are in all lowercase characters with no spaces or dashes. * [ ] All functions and variable names follow Python naming conventions. * [ ] All function parameters and return values are annotated with Python [type hints](https://docs.python.org/3/library/typing.html). * [ ] All functions have [doctests](https://docs.python.org/3/library/doctest.html) that pass the automated testing. * [ ] All new algorithms include at least one URL that points to Wikipedia or another similar explanation. * [ ] If this pull request resolves one or more open issues then the description above includes the issue number(s) with a [closing keyword](https://docs.github.com/en/issues/tracking-your-work-with-issues/linking-a-pull-request-to-an-issue): "Fixes #ISSUE-NUMBER".
tianyizheng02
"2023-09-16T18:59:42Z"
"2023-09-16T22:12:31Z"
1488cdea708485eb1d81c73126eab13cb9b04a47
fbad85d3ecbbb826a5891807c823149d38bbaed3
Delete empty junk file. ### Describe your change: Delete empty file introduced in #9046 * [ ] Add an algorithm? * [x] Fix a bug or typo in an existing algorithm? * [ ] Documentation change? ### Checklist: * [x] I have read [CONTRIBUTING.md](https://github.com/TheAlgorithms/Python/blob/master/CONTRIBUTING.md). * [x] This pull request is all my own work -- I have not plagiarized. * [x] I know that pull requests will not be merged if they fail the automated tests. * [x] This PR only changes one algorithm file. To ease review, please open separate PRs for separate algorithms. * [ ] All new Python files are placed inside an existing directory. * [ ] All filenames are in all lowercase characters with no spaces or dashes. * [ ] All functions and variable names follow Python naming conventions. * [ ] All function parameters and return values are annotated with Python [type hints](https://docs.python.org/3/library/typing.html). * [ ] All functions have [doctests](https://docs.python.org/3/library/doctest.html) that pass the automated testing. * [ ] All new algorithms include at least one URL that points to Wikipedia or another similar explanation. * [ ] If this pull request resolves one or more open issues then the description above includes the issue number(s) with a [closing keyword](https://docs.github.com/en/issues/tracking-your-work-with-issues/linking-a-pull-request-to-an-issue): "Fixes #ISSUE-NUMBER".
def is_palindrome(head): if not head: return True # split the list to two parts fast, slow = head.next, head while fast and fast.next: fast = fast.next.next slow = slow.next second = slow.next slow.next = None # Don't forget here! But forget still works! # reverse the second part node = None while second: nxt = second.next second.next = node node = second second = nxt # compare two parts # second part has the same or one less node while node: if node.val != head.val: return False node = node.next head = head.next return True def is_palindrome_stack(head): if not head or not head.next: return True # 1. Get the midpoint (slow) slow = fast = cur = head while fast and fast.next: fast, slow = fast.next.next, slow.next # 2. Push the second half into the stack stack = [slow.val] while slow.next: slow = slow.next stack.append(slow.val) # 3. Comparison while stack: if stack.pop() != cur.val: return False cur = cur.next return True def is_palindrome_dict(head): if not head or not head.next: return True d = {} pos = 0 while head: if head.val in d: d[head.val].append(pos) else: d[head.val] = [pos] head = head.next pos += 1 checksum = pos - 1 middle = 0 for v in d.values(): if len(v) % 2 != 0: middle += 1 else: step = 0 for i in range(len(v)): if v[i] + v[len(v) - 1 - step] != checksum: return False step += 1 if middle > 1: return False return True
def is_palindrome(head): if not head: return True # split the list to two parts fast, slow = head.next, head while fast and fast.next: fast = fast.next.next slow = slow.next second = slow.next slow.next = None # Don't forget here! But forget still works! # reverse the second part node = None while second: nxt = second.next second.next = node node = second second = nxt # compare two parts # second part has the same or one less node while node: if node.val != head.val: return False node = node.next head = head.next return True def is_palindrome_stack(head): if not head or not head.next: return True # 1. Get the midpoint (slow) slow = fast = cur = head while fast and fast.next: fast, slow = fast.next.next, slow.next # 2. Push the second half into the stack stack = [slow.val] while slow.next: slow = slow.next stack.append(slow.val) # 3. Comparison while stack: if stack.pop() != cur.val: return False cur = cur.next return True def is_palindrome_dict(head): if not head or not head.next: return True d = {} pos = 0 while head: if head.val in d: d[head.val].append(pos) else: d[head.val] = [pos] head = head.next pos += 1 checksum = pos - 1 middle = 0 for v in d.values(): if len(v) % 2 != 0: middle += 1 else: step = 0 for i in range(len(v)): if v[i] + v[len(v) - 1 - step] != checksum: return False step += 1 if middle > 1: return False return True
-1
TheAlgorithms/Python
9,062
Delete empty junk file
### Describe your change: Delete empty file introduced in #9046 * [ ] Add an algorithm? * [x] Fix a bug or typo in an existing algorithm? * [ ] Documentation change? ### Checklist: * [x] I have read [CONTRIBUTING.md](https://github.com/TheAlgorithms/Python/blob/master/CONTRIBUTING.md). * [x] This pull request is all my own work -- I have not plagiarized. * [x] I know that pull requests will not be merged if they fail the automated tests. * [x] This PR only changes one algorithm file. To ease review, please open separate PRs for separate algorithms. * [ ] All new Python files are placed inside an existing directory. * [ ] All filenames are in all lowercase characters with no spaces or dashes. * [ ] All functions and variable names follow Python naming conventions. * [ ] All function parameters and return values are annotated with Python [type hints](https://docs.python.org/3/library/typing.html). * [ ] All functions have [doctests](https://docs.python.org/3/library/doctest.html) that pass the automated testing. * [ ] All new algorithms include at least one URL that points to Wikipedia or another similar explanation. * [ ] If this pull request resolves one or more open issues then the description above includes the issue number(s) with a [closing keyword](https://docs.github.com/en/issues/tracking-your-work-with-issues/linking-a-pull-request-to-an-issue): "Fixes #ISSUE-NUMBER".
tianyizheng02
"2023-09-16T18:59:42Z"
"2023-09-16T22:12:31Z"
1488cdea708485eb1d81c73126eab13cb9b04a47
fbad85d3ecbbb826a5891807c823149d38bbaed3
Delete empty junk file. ### Describe your change: Delete empty file introduced in #9046 * [ ] Add an algorithm? * [x] Fix a bug or typo in an existing algorithm? * [ ] Documentation change? ### Checklist: * [x] I have read [CONTRIBUTING.md](https://github.com/TheAlgorithms/Python/blob/master/CONTRIBUTING.md). * [x] This pull request is all my own work -- I have not plagiarized. * [x] I know that pull requests will not be merged if they fail the automated tests. * [x] This PR only changes one algorithm file. To ease review, please open separate PRs for separate algorithms. * [ ] All new Python files are placed inside an existing directory. * [ ] All filenames are in all lowercase characters with no spaces or dashes. * [ ] All functions and variable names follow Python naming conventions. * [ ] All function parameters and return values are annotated with Python [type hints](https://docs.python.org/3/library/typing.html). * [ ] All functions have [doctests](https://docs.python.org/3/library/doctest.html) that pass the automated testing. * [ ] All new algorithms include at least one URL that points to Wikipedia or another similar explanation. * [ ] If this pull request resolves one or more open issues then the description above includes the issue number(s) with a [closing keyword](https://docs.github.com/en/issues/tracking-your-work-with-issues/linking-a-pull-request-to-an-issue): "Fixes #ISSUE-NUMBER".
""" Wikipedia: https://en.wikipedia.org/wiki/Enigma_machine Video explanation: https://youtu.be/QwQVMqfoB2E Also check out Numberphile's and Computerphile's videos on this topic This module contains function 'enigma' which emulates the famous Enigma machine from WWII. Module includes: - enigma function - showcase of function usage - 9 randomly generated rotors - reflector (aka static rotor) - original alphabet Created by TrapinchO """ from __future__ import annotations RotorPositionT = tuple[int, int, int] RotorSelectionT = tuple[str, str, str] # used alphabet -------------------------- # from string.ascii_uppercase abc = "ABCDEFGHIJKLMNOPQRSTUVWXYZ" # -------------------------- default selection -------------------------- # rotors -------------------------- rotor1 = "EGZWVONAHDCLFQMSIPJBYUKXTR" rotor2 = "FOBHMDKEXQNRAULPGSJVTYICZW" rotor3 = "ZJXESIUQLHAVRMDOYGTNFWPBKC" # reflector -------------------------- reflector = { "A": "N", "N": "A", "B": "O", "O": "B", "C": "P", "P": "C", "D": "Q", "Q": "D", "E": "R", "R": "E", "F": "S", "S": "F", "G": "T", "T": "G", "H": "U", "U": "H", "I": "V", "V": "I", "J": "W", "W": "J", "K": "X", "X": "K", "L": "Y", "Y": "L", "M": "Z", "Z": "M", } # -------------------------- extra rotors -------------------------- rotor4 = "RMDJXFUWGISLHVTCQNKYPBEZOA" rotor5 = "SGLCPQWZHKXAREONTFBVIYJUDM" rotor6 = "HVSICLTYKQUBXDWAJZOMFGPREN" rotor7 = "RZWQHFMVDBKICJLNTUXAGYPSOE" rotor8 = "LFKIJODBEGAMQPXVUHYSTCZRWN" rotor9 = "KOAEGVDHXPQZMLFTYWJNBRCIUS" def _validator( rotpos: RotorPositionT, rotsel: RotorSelectionT, pb: str ) -> tuple[RotorPositionT, RotorSelectionT, dict[str, str]]: """ Checks if the values can be used for the 'enigma' function >>> _validator((1,1,1), (rotor1, rotor2, rotor3), 'POLAND') ((1, 1, 1), ('EGZWVONAHDCLFQMSIPJBYUKXTR', 'FOBHMDKEXQNRAULPGSJVTYICZW', \ 'ZJXESIUQLHAVRMDOYGTNFWPBKC'), \ {'P': 'O', 'O': 'P', 'L': 'A', 'A': 'L', 'N': 'D', 'D': 'N'}) :param rotpos: rotor_positon :param rotsel: rotor_selection :param pb: plugb -> validated and transformed :return: (rotpos, rotsel, pb) """ # Checks if there are 3 unique rotors if (unique_rotsel := len(set(rotsel))) < 3: msg = f"Please use 3 unique rotors (not {unique_rotsel})" raise Exception(msg) # Checks if rotor positions are valid rotorpos1, rotorpos2, rotorpos3 = rotpos if not 0 < rotorpos1 <= len(abc): msg = f"First rotor position is not within range of 1..26 ({rotorpos1}" raise ValueError(msg) if not 0 < rotorpos2 <= len(abc): msg = f"Second rotor position is not within range of 1..26 ({rotorpos2})" raise ValueError(msg) if not 0 < rotorpos3 <= len(abc): msg = f"Third rotor position is not within range of 1..26 ({rotorpos3})" raise ValueError(msg) # Validates string and returns dict pbdict = _plugboard(pb) return rotpos, rotsel, pbdict def _plugboard(pbstring: str) -> dict[str, str]: """ https://en.wikipedia.org/wiki/Enigma_machine#Plugboard >>> _plugboard('PICTURES') {'P': 'I', 'I': 'P', 'C': 'T', 'T': 'C', 'U': 'R', 'R': 'U', 'E': 'S', 'S': 'E'} >>> _plugboard('POLAND') {'P': 'O', 'O': 'P', 'L': 'A', 'A': 'L', 'N': 'D', 'D': 'N'} In the code, 'pb' stands for 'plugboard' Pairs can be separated by spaces :param pbstring: string containing plugboard setting for the Enigma machine :return: dictionary containing converted pairs """ # tests the input string if it # a) is type string # b) has even length (so pairs can be made) if not isinstance(pbstring, str): msg = f"Plugboard setting isn't type string ({type(pbstring)})" raise TypeError(msg) elif len(pbstring) % 2 != 0: msg = f"Odd number of symbols ({len(pbstring)})" raise Exception(msg) elif pbstring == "": return {} pbstring.replace(" ", "") # Checks if all characters are unique tmppbl = set() for i in pbstring: if i not in abc: msg = f"'{i}' not in list of symbols" raise Exception(msg) elif i in tmppbl: msg = f"Duplicate symbol ({i})" raise Exception(msg) else: tmppbl.add(i) del tmppbl # Created the dictionary pb = {} for j in range(0, len(pbstring) - 1, 2): pb[pbstring[j]] = pbstring[j + 1] pb[pbstring[j + 1]] = pbstring[j] return pb def enigma( text: str, rotor_position: RotorPositionT, rotor_selection: RotorSelectionT = (rotor1, rotor2, rotor3), plugb: str = "", ) -> str: """ The only difference with real-world enigma is that I allowed string input. All characters are converted to uppercase. (non-letter symbol are ignored) How it works: (for every letter in the message) - Input letter goes into the plugboard. If it is connected to another one, switch it. - Letter goes through 3 rotors. Each rotor can be represented as 2 sets of symbol, where one is shuffled. Each symbol from the first set has corresponding symbol in the second set and vice versa. example: | ABCDEFGHIJKLMNOPQRSTUVWXYZ | e.g. F=D and D=F | VKLEPDBGRNWTFCJOHQAMUZYIXS | - Symbol then goes through reflector (static rotor). There it is switched with paired symbol The reflector can be represented as2 sets, each with half of the alphanet. There are usually 10 pairs of letters. Example: | ABCDEFGHIJKLM | e.g. E is paired to X | ZYXWVUTSRQPON | so when E goes in X goes out and vice versa - Letter then goes through the rotors again - If the letter is connected to plugboard, it is switched. - Return the letter >>> enigma('Hello World!', (1, 2, 1), plugb='pictures') 'KORYH JUHHI!' >>> enigma('KORYH, juhhi!', (1, 2, 1), plugb='pictures') 'HELLO, WORLD!' >>> enigma('hello world!', (1, 1, 1), plugb='pictures') 'FPNCZ QWOBU!' >>> enigma('FPNCZ QWOBU', (1, 1, 1), plugb='pictures') 'HELLO WORLD' :param text: input message :param rotor_position: tuple with 3 values in range 1..26 :param rotor_selection: tuple with 3 rotors () :param plugb: string containing plugboard configuration (default '') :return: en/decrypted string """ text = text.upper() rotor_position, rotor_selection, plugboard = _validator( rotor_position, rotor_selection, plugb.upper() ) rotorpos1, rotorpos2, rotorpos3 = rotor_position rotor1, rotor2, rotor3 = rotor_selection rotorpos1 -= 1 rotorpos2 -= 1 rotorpos3 -= 1 result = [] # encryption/decryption process -------------------------- for symbol in text: if symbol in abc: # 1st plugboard -------------------------- if symbol in plugboard: symbol = plugboard[symbol] # rotor ra -------------------------- index = abc.index(symbol) + rotorpos1 symbol = rotor1[index % len(abc)] # rotor rb -------------------------- index = abc.index(symbol) + rotorpos2 symbol = rotor2[index % len(abc)] # rotor rc -------------------------- index = abc.index(symbol) + rotorpos3 symbol = rotor3[index % len(abc)] # reflector -------------------------- # this is the reason you don't need another machine to decipher symbol = reflector[symbol] # 2nd rotors symbol = abc[rotor3.index(symbol) - rotorpos3] symbol = abc[rotor2.index(symbol) - rotorpos2] symbol = abc[rotor1.index(symbol) - rotorpos1] # 2nd plugboard if symbol in plugboard: symbol = plugboard[symbol] # moves/resets rotor positions rotorpos1 += 1 if rotorpos1 >= len(abc): rotorpos1 = 0 rotorpos2 += 1 if rotorpos2 >= len(abc): rotorpos2 = 0 rotorpos3 += 1 if rotorpos3 >= len(abc): rotorpos3 = 0 # else: # pass # Error could be also raised # raise ValueError( # 'Invalid symbol('+repr(symbol)+')') result.append(symbol) return "".join(result) if __name__ == "__main__": message = "This is my Python script that emulates the Enigma machine from WWII." rotor_pos = (1, 1, 1) pb = "pictures" rotor_sel = (rotor2, rotor4, rotor8) en = enigma(message, rotor_pos, rotor_sel, pb) print("Encrypted message:", en) print("Decrypted message:", enigma(en, rotor_pos, rotor_sel, pb))
""" Wikipedia: https://en.wikipedia.org/wiki/Enigma_machine Video explanation: https://youtu.be/QwQVMqfoB2E Also check out Numberphile's and Computerphile's videos on this topic This module contains function 'enigma' which emulates the famous Enigma machine from WWII. Module includes: - enigma function - showcase of function usage - 9 randomly generated rotors - reflector (aka static rotor) - original alphabet Created by TrapinchO """ from __future__ import annotations RotorPositionT = tuple[int, int, int] RotorSelectionT = tuple[str, str, str] # used alphabet -------------------------- # from string.ascii_uppercase abc = "ABCDEFGHIJKLMNOPQRSTUVWXYZ" # -------------------------- default selection -------------------------- # rotors -------------------------- rotor1 = "EGZWVONAHDCLFQMSIPJBYUKXTR" rotor2 = "FOBHMDKEXQNRAULPGSJVTYICZW" rotor3 = "ZJXESIUQLHAVRMDOYGTNFWPBKC" # reflector -------------------------- reflector = { "A": "N", "N": "A", "B": "O", "O": "B", "C": "P", "P": "C", "D": "Q", "Q": "D", "E": "R", "R": "E", "F": "S", "S": "F", "G": "T", "T": "G", "H": "U", "U": "H", "I": "V", "V": "I", "J": "W", "W": "J", "K": "X", "X": "K", "L": "Y", "Y": "L", "M": "Z", "Z": "M", } # -------------------------- extra rotors -------------------------- rotor4 = "RMDJXFUWGISLHVTCQNKYPBEZOA" rotor5 = "SGLCPQWZHKXAREONTFBVIYJUDM" rotor6 = "HVSICLTYKQUBXDWAJZOMFGPREN" rotor7 = "RZWQHFMVDBKICJLNTUXAGYPSOE" rotor8 = "LFKIJODBEGAMQPXVUHYSTCZRWN" rotor9 = "KOAEGVDHXPQZMLFTYWJNBRCIUS" def _validator( rotpos: RotorPositionT, rotsel: RotorSelectionT, pb: str ) -> tuple[RotorPositionT, RotorSelectionT, dict[str, str]]: """ Checks if the values can be used for the 'enigma' function >>> _validator((1,1,1), (rotor1, rotor2, rotor3), 'POLAND') ((1, 1, 1), ('EGZWVONAHDCLFQMSIPJBYUKXTR', 'FOBHMDKEXQNRAULPGSJVTYICZW', \ 'ZJXESIUQLHAVRMDOYGTNFWPBKC'), \ {'P': 'O', 'O': 'P', 'L': 'A', 'A': 'L', 'N': 'D', 'D': 'N'}) :param rotpos: rotor_positon :param rotsel: rotor_selection :param pb: plugb -> validated and transformed :return: (rotpos, rotsel, pb) """ # Checks if there are 3 unique rotors if (unique_rotsel := len(set(rotsel))) < 3: msg = f"Please use 3 unique rotors (not {unique_rotsel})" raise Exception(msg) # Checks if rotor positions are valid rotorpos1, rotorpos2, rotorpos3 = rotpos if not 0 < rotorpos1 <= len(abc): msg = f"First rotor position is not within range of 1..26 ({rotorpos1}" raise ValueError(msg) if not 0 < rotorpos2 <= len(abc): msg = f"Second rotor position is not within range of 1..26 ({rotorpos2})" raise ValueError(msg) if not 0 < rotorpos3 <= len(abc): msg = f"Third rotor position is not within range of 1..26 ({rotorpos3})" raise ValueError(msg) # Validates string and returns dict pbdict = _plugboard(pb) return rotpos, rotsel, pbdict def _plugboard(pbstring: str) -> dict[str, str]: """ https://en.wikipedia.org/wiki/Enigma_machine#Plugboard >>> _plugboard('PICTURES') {'P': 'I', 'I': 'P', 'C': 'T', 'T': 'C', 'U': 'R', 'R': 'U', 'E': 'S', 'S': 'E'} >>> _plugboard('POLAND') {'P': 'O', 'O': 'P', 'L': 'A', 'A': 'L', 'N': 'D', 'D': 'N'} In the code, 'pb' stands for 'plugboard' Pairs can be separated by spaces :param pbstring: string containing plugboard setting for the Enigma machine :return: dictionary containing converted pairs """ # tests the input string if it # a) is type string # b) has even length (so pairs can be made) if not isinstance(pbstring, str): msg = f"Plugboard setting isn't type string ({type(pbstring)})" raise TypeError(msg) elif len(pbstring) % 2 != 0: msg = f"Odd number of symbols ({len(pbstring)})" raise Exception(msg) elif pbstring == "": return {} pbstring.replace(" ", "") # Checks if all characters are unique tmppbl = set() for i in pbstring: if i not in abc: msg = f"'{i}' not in list of symbols" raise Exception(msg) elif i in tmppbl: msg = f"Duplicate symbol ({i})" raise Exception(msg) else: tmppbl.add(i) del tmppbl # Created the dictionary pb = {} for j in range(0, len(pbstring) - 1, 2): pb[pbstring[j]] = pbstring[j + 1] pb[pbstring[j + 1]] = pbstring[j] return pb def enigma( text: str, rotor_position: RotorPositionT, rotor_selection: RotorSelectionT = (rotor1, rotor2, rotor3), plugb: str = "", ) -> str: """ The only difference with real-world enigma is that I allowed string input. All characters are converted to uppercase. (non-letter symbol are ignored) How it works: (for every letter in the message) - Input letter goes into the plugboard. If it is connected to another one, switch it. - Letter goes through 3 rotors. Each rotor can be represented as 2 sets of symbol, where one is shuffled. Each symbol from the first set has corresponding symbol in the second set and vice versa. example: | ABCDEFGHIJKLMNOPQRSTUVWXYZ | e.g. F=D and D=F | VKLEPDBGRNWTFCJOHQAMUZYIXS | - Symbol then goes through reflector (static rotor). There it is switched with paired symbol The reflector can be represented as2 sets, each with half of the alphanet. There are usually 10 pairs of letters. Example: | ABCDEFGHIJKLM | e.g. E is paired to X | ZYXWVUTSRQPON | so when E goes in X goes out and vice versa - Letter then goes through the rotors again - If the letter is connected to plugboard, it is switched. - Return the letter >>> enigma('Hello World!', (1, 2, 1), plugb='pictures') 'KORYH JUHHI!' >>> enigma('KORYH, juhhi!', (1, 2, 1), plugb='pictures') 'HELLO, WORLD!' >>> enigma('hello world!', (1, 1, 1), plugb='pictures') 'FPNCZ QWOBU!' >>> enigma('FPNCZ QWOBU', (1, 1, 1), plugb='pictures') 'HELLO WORLD' :param text: input message :param rotor_position: tuple with 3 values in range 1..26 :param rotor_selection: tuple with 3 rotors () :param plugb: string containing plugboard configuration (default '') :return: en/decrypted string """ text = text.upper() rotor_position, rotor_selection, plugboard = _validator( rotor_position, rotor_selection, plugb.upper() ) rotorpos1, rotorpos2, rotorpos3 = rotor_position rotor1, rotor2, rotor3 = rotor_selection rotorpos1 -= 1 rotorpos2 -= 1 rotorpos3 -= 1 result = [] # encryption/decryption process -------------------------- for symbol in text: if symbol in abc: # 1st plugboard -------------------------- if symbol in plugboard: symbol = plugboard[symbol] # rotor ra -------------------------- index = abc.index(symbol) + rotorpos1 symbol = rotor1[index % len(abc)] # rotor rb -------------------------- index = abc.index(symbol) + rotorpos2 symbol = rotor2[index % len(abc)] # rotor rc -------------------------- index = abc.index(symbol) + rotorpos3 symbol = rotor3[index % len(abc)] # reflector -------------------------- # this is the reason you don't need another machine to decipher symbol = reflector[symbol] # 2nd rotors symbol = abc[rotor3.index(symbol) - rotorpos3] symbol = abc[rotor2.index(symbol) - rotorpos2] symbol = abc[rotor1.index(symbol) - rotorpos1] # 2nd plugboard if symbol in plugboard: symbol = plugboard[symbol] # moves/resets rotor positions rotorpos1 += 1 if rotorpos1 >= len(abc): rotorpos1 = 0 rotorpos2 += 1 if rotorpos2 >= len(abc): rotorpos2 = 0 rotorpos3 += 1 if rotorpos3 >= len(abc): rotorpos3 = 0 # else: # pass # Error could be also raised # raise ValueError( # 'Invalid symbol('+repr(symbol)+')') result.append(symbol) return "".join(result) if __name__ == "__main__": message = "This is my Python script that emulates the Enigma machine from WWII." rotor_pos = (1, 1, 1) pb = "pictures" rotor_sel = (rotor2, rotor4, rotor8) en = enigma(message, rotor_pos, rotor_sel, pb) print("Encrypted message:", en) print("Decrypted message:", enigma(en, rotor_pos, rotor_sel, pb))
-1
TheAlgorithms/Python
9,062
Delete empty junk file
### Describe your change: Delete empty file introduced in #9046 * [ ] Add an algorithm? * [x] Fix a bug or typo in an existing algorithm? * [ ] Documentation change? ### Checklist: * [x] I have read [CONTRIBUTING.md](https://github.com/TheAlgorithms/Python/blob/master/CONTRIBUTING.md). * [x] This pull request is all my own work -- I have not plagiarized. * [x] I know that pull requests will not be merged if they fail the automated tests. * [x] This PR only changes one algorithm file. To ease review, please open separate PRs for separate algorithms. * [ ] All new Python files are placed inside an existing directory. * [ ] All filenames are in all lowercase characters with no spaces or dashes. * [ ] All functions and variable names follow Python naming conventions. * [ ] All function parameters and return values are annotated with Python [type hints](https://docs.python.org/3/library/typing.html). * [ ] All functions have [doctests](https://docs.python.org/3/library/doctest.html) that pass the automated testing. * [ ] All new algorithms include at least one URL that points to Wikipedia or another similar explanation. * [ ] If this pull request resolves one or more open issues then the description above includes the issue number(s) with a [closing keyword](https://docs.github.com/en/issues/tracking-your-work-with-issues/linking-a-pull-request-to-an-issue): "Fixes #ISSUE-NUMBER".
tianyizheng02
"2023-09-16T18:59:42Z"
"2023-09-16T22:12:31Z"
1488cdea708485eb1d81c73126eab13cb9b04a47
fbad85d3ecbbb826a5891807c823149d38bbaed3
Delete empty junk file. ### Describe your change: Delete empty file introduced in #9046 * [ ] Add an algorithm? * [x] Fix a bug or typo in an existing algorithm? * [ ] Documentation change? ### Checklist: * [x] I have read [CONTRIBUTING.md](https://github.com/TheAlgorithms/Python/blob/master/CONTRIBUTING.md). * [x] This pull request is all my own work -- I have not plagiarized. * [x] I know that pull requests will not be merged if they fail the automated tests. * [x] This PR only changes one algorithm file. To ease review, please open separate PRs for separate algorithms. * [ ] All new Python files are placed inside an existing directory. * [ ] All filenames are in all lowercase characters with no spaces or dashes. * [ ] All functions and variable names follow Python naming conventions. * [ ] All function parameters and return values are annotated with Python [type hints](https://docs.python.org/3/library/typing.html). * [ ] All functions have [doctests](https://docs.python.org/3/library/doctest.html) that pass the automated testing. * [ ] All new algorithms include at least one URL that points to Wikipedia or another similar explanation. * [ ] If this pull request resolves one or more open issues then the description above includes the issue number(s) with a [closing keyword](https://docs.github.com/en/issues/tracking-your-work-with-issues/linking-a-pull-request-to-an-issue): "Fixes #ISSUE-NUMBER".
""" Implementation of double ended queue. """ from __future__ import annotations from collections.abc import Iterable from dataclasses import dataclass from typing import Any class Deque: """ Deque data structure. Operations ---------- append(val: Any) -> None appendleft(val: Any) -> None extend(iterable: Iterable) -> None extendleft(iterable: Iterable) -> None pop() -> Any popleft() -> Any Observers --------- is_empty() -> bool Attributes ---------- _front: _Node front of the deque a.k.a. the first element _back: _Node back of the element a.k.a. the last element _len: int the number of nodes """ __slots__ = ("_front", "_back", "_len") @dataclass class _Node: """ Representation of a node. Contains a value and a pointer to the next node as well as to the previous one. """ val: Any = None next_node: Deque._Node | None = None prev_node: Deque._Node | None = None class _Iterator: """ Helper class for iteration. Will be used to implement iteration. Attributes ---------- _cur: _Node the current node of the iteration. """ __slots__ = ("_cur",) def __init__(self, cur: Deque._Node | None) -> None: self._cur = cur def __iter__(self) -> Deque._Iterator: """ >>> our_deque = Deque([1, 2, 3]) >>> iterator = iter(our_deque) """ return self def __next__(self) -> Any: """ >>> our_deque = Deque([1, 2, 3]) >>> iterator = iter(our_deque) >>> next(iterator) 1 >>> next(iterator) 2 >>> next(iterator) 3 """ if self._cur is None: # finished iterating raise StopIteration val = self._cur.val self._cur = self._cur.next_node return val def __init__(self, iterable: Iterable[Any] | None = None) -> None: self._front: Any = None self._back: Any = None self._len: int = 0 if iterable is not None: # append every value to the deque for val in iterable: self.append(val) def append(self, val: Any) -> None: """ Adds val to the end of the deque. Time complexity: O(1) >>> our_deque_1 = Deque([1, 2, 3]) >>> our_deque_1.append(4) >>> our_deque_1 [1, 2, 3, 4] >>> our_deque_2 = Deque('ab') >>> our_deque_2.append('c') >>> our_deque_2 ['a', 'b', 'c'] >>> from collections import deque >>> deque_collections_1 = deque([1, 2, 3]) >>> deque_collections_1.append(4) >>> deque_collections_1 deque([1, 2, 3, 4]) >>> deque_collections_2 = deque('ab') >>> deque_collections_2.append('c') >>> deque_collections_2 deque(['a', 'b', 'c']) >>> list(our_deque_1) == list(deque_collections_1) True >>> list(our_deque_2) == list(deque_collections_2) True """ node = self._Node(val, None, None) if self.is_empty(): # front = back self._front = self._back = node self._len = 1 else: # connect nodes self._back.next_node = node node.prev_node = self._back self._back = node # assign new back to the new node self._len += 1 # make sure there were no errors assert not self.is_empty(), "Error on appending value." def appendleft(self, val: Any) -> None: """ Adds val to the beginning of the deque. Time complexity: O(1) >>> our_deque_1 = Deque([2, 3]) >>> our_deque_1.appendleft(1) >>> our_deque_1 [1, 2, 3] >>> our_deque_2 = Deque('bc') >>> our_deque_2.appendleft('a') >>> our_deque_2 ['a', 'b', 'c'] >>> from collections import deque >>> deque_collections_1 = deque([2, 3]) >>> deque_collections_1.appendleft(1) >>> deque_collections_1 deque([1, 2, 3]) >>> deque_collections_2 = deque('bc') >>> deque_collections_2.appendleft('a') >>> deque_collections_2 deque(['a', 'b', 'c']) >>> list(our_deque_1) == list(deque_collections_1) True >>> list(our_deque_2) == list(deque_collections_2) True """ node = self._Node(val, None, None) if self.is_empty(): # front = back self._front = self._back = node self._len = 1 else: # connect nodes node.next_node = self._front self._front.prev_node = node self._front = node # assign new front to the new node self._len += 1 # make sure there were no errors assert not self.is_empty(), "Error on appending value." def extend(self, iterable: Iterable[Any]) -> None: """ Appends every value of iterable to the end of the deque. Time complexity: O(n) >>> our_deque_1 = Deque([1, 2, 3]) >>> our_deque_1.extend([4, 5]) >>> our_deque_1 [1, 2, 3, 4, 5] >>> our_deque_2 = Deque('ab') >>> our_deque_2.extend('cd') >>> our_deque_2 ['a', 'b', 'c', 'd'] >>> from collections import deque >>> deque_collections_1 = deque([1, 2, 3]) >>> deque_collections_1.extend([4, 5]) >>> deque_collections_1 deque([1, 2, 3, 4, 5]) >>> deque_collections_2 = deque('ab') >>> deque_collections_2.extend('cd') >>> deque_collections_2 deque(['a', 'b', 'c', 'd']) >>> list(our_deque_1) == list(deque_collections_1) True >>> list(our_deque_2) == list(deque_collections_2) True """ for val in iterable: self.append(val) def extendleft(self, iterable: Iterable[Any]) -> None: """ Appends every value of iterable to the beginning of the deque. Time complexity: O(n) >>> our_deque_1 = Deque([1, 2, 3]) >>> our_deque_1.extendleft([0, -1]) >>> our_deque_1 [-1, 0, 1, 2, 3] >>> our_deque_2 = Deque('cd') >>> our_deque_2.extendleft('ba') >>> our_deque_2 ['a', 'b', 'c', 'd'] >>> from collections import deque >>> deque_collections_1 = deque([1, 2, 3]) >>> deque_collections_1.extendleft([0, -1]) >>> deque_collections_1 deque([-1, 0, 1, 2, 3]) >>> deque_collections_2 = deque('cd') >>> deque_collections_2.extendleft('ba') >>> deque_collections_2 deque(['a', 'b', 'c', 'd']) >>> list(our_deque_1) == list(deque_collections_1) True >>> list(our_deque_2) == list(deque_collections_2) True """ for val in iterable: self.appendleft(val) def pop(self) -> Any: """ Removes the last element of the deque and returns it. Time complexity: O(1) @returns topop.val: the value of the node to pop. >>> our_deque = Deque([1, 2, 3, 15182]) >>> our_popped = our_deque.pop() >>> our_popped 15182 >>> our_deque [1, 2, 3] >>> from collections import deque >>> deque_collections = deque([1, 2, 3, 15182]) >>> collections_popped = deque_collections.pop() >>> collections_popped 15182 >>> deque_collections deque([1, 2, 3]) >>> list(our_deque) == list(deque_collections) True >>> our_popped == collections_popped True """ # make sure the deque has elements to pop assert not self.is_empty(), "Deque is empty." topop = self._back self._back = self._back.prev_node # set new back # drop the last node - python will deallocate memory automatically self._back.next_node = None self._len -= 1 return topop.val def popleft(self) -> Any: """ Removes the first element of the deque and returns it. Time complexity: O(1) @returns topop.val: the value of the node to pop. >>> our_deque = Deque([15182, 1, 2, 3]) >>> our_popped = our_deque.popleft() >>> our_popped 15182 >>> our_deque [1, 2, 3] >>> from collections import deque >>> deque_collections = deque([15182, 1, 2, 3]) >>> collections_popped = deque_collections.popleft() >>> collections_popped 15182 >>> deque_collections deque([1, 2, 3]) >>> list(our_deque) == list(deque_collections) True >>> our_popped == collections_popped True """ # make sure the deque has elements to pop assert not self.is_empty(), "Deque is empty." topop = self._front self._front = self._front.next_node # set new front and drop the first node self._front.prev_node = None self._len -= 1 return topop.val def is_empty(self) -> bool: """ Checks if the deque is empty. Time complexity: O(1) >>> our_deque = Deque([1, 2, 3]) >>> our_deque.is_empty() False >>> our_empty_deque = Deque() >>> our_empty_deque.is_empty() True >>> from collections import deque >>> empty_deque_collections = deque() >>> list(our_empty_deque) == list(empty_deque_collections) True """ return self._front is None def __len__(self) -> int: """ Implements len() function. Returns the length of the deque. Time complexity: O(1) >>> our_deque = Deque([1, 2, 3]) >>> len(our_deque) 3 >>> our_empty_deque = Deque() >>> len(our_empty_deque) 0 >>> from collections import deque >>> deque_collections = deque([1, 2, 3]) >>> len(deque_collections) 3 >>> empty_deque_collections = deque() >>> len(empty_deque_collections) 0 >>> len(our_empty_deque) == len(empty_deque_collections) True """ return self._len def __eq__(self, other: object) -> bool: """ Implements "==" operator. Returns if *self* is equal to *other*. Time complexity: O(n) >>> our_deque_1 = Deque([1, 2, 3]) >>> our_deque_2 = Deque([1, 2, 3]) >>> our_deque_1 == our_deque_2 True >>> our_deque_3 = Deque([1, 2]) >>> our_deque_1 == our_deque_3 False >>> from collections import deque >>> deque_collections_1 = deque([1, 2, 3]) >>> deque_collections_2 = deque([1, 2, 3]) >>> deque_collections_1 == deque_collections_2 True >>> deque_collections_3 = deque([1, 2]) >>> deque_collections_1 == deque_collections_3 False >>> (our_deque_1 == our_deque_2) == (deque_collections_1 == deque_collections_2) True >>> (our_deque_1 == our_deque_3) == (deque_collections_1 == deque_collections_3) True """ if not isinstance(other, Deque): return NotImplemented me = self._front oth = other._front # if the length of the dequeues are not the same, they are not equal if len(self) != len(other): return False while me is not None and oth is not None: # compare every value if me.val != oth.val: return False me = me.next_node oth = oth.next_node return True def __iter__(self) -> Deque._Iterator: """ Implements iteration. Time complexity: O(1) >>> our_deque = Deque([1, 2, 3]) >>> for v in our_deque: ... print(v) 1 2 3 >>> from collections import deque >>> deque_collections = deque([1, 2, 3]) >>> for v in deque_collections: ... print(v) 1 2 3 """ return Deque._Iterator(self._front) def __repr__(self) -> str: """ Implements representation of the deque. Represents it as a list, with its values between '[' and ']'. Time complexity: O(n) >>> our_deque = Deque([1, 2, 3]) >>> our_deque [1, 2, 3] """ values_list = [] aux = self._front while aux is not None: # append the values in a list to display values_list.append(aux.val) aux = aux.next_node return f"[{', '.join(repr(val) for val in values_list)}]" if __name__ == "__main__": import doctest doctest.testmod()
""" Implementation of double ended queue. """ from __future__ import annotations from collections.abc import Iterable from dataclasses import dataclass from typing import Any class Deque: """ Deque data structure. Operations ---------- append(val: Any) -> None appendleft(val: Any) -> None extend(iterable: Iterable) -> None extendleft(iterable: Iterable) -> None pop() -> Any popleft() -> Any Observers --------- is_empty() -> bool Attributes ---------- _front: _Node front of the deque a.k.a. the first element _back: _Node back of the element a.k.a. the last element _len: int the number of nodes """ __slots__ = ("_front", "_back", "_len") @dataclass class _Node: """ Representation of a node. Contains a value and a pointer to the next node as well as to the previous one. """ val: Any = None next_node: Deque._Node | None = None prev_node: Deque._Node | None = None class _Iterator: """ Helper class for iteration. Will be used to implement iteration. Attributes ---------- _cur: _Node the current node of the iteration. """ __slots__ = ("_cur",) def __init__(self, cur: Deque._Node | None) -> None: self._cur = cur def __iter__(self) -> Deque._Iterator: """ >>> our_deque = Deque([1, 2, 3]) >>> iterator = iter(our_deque) """ return self def __next__(self) -> Any: """ >>> our_deque = Deque([1, 2, 3]) >>> iterator = iter(our_deque) >>> next(iterator) 1 >>> next(iterator) 2 >>> next(iterator) 3 """ if self._cur is None: # finished iterating raise StopIteration val = self._cur.val self._cur = self._cur.next_node return val def __init__(self, iterable: Iterable[Any] | None = None) -> None: self._front: Any = None self._back: Any = None self._len: int = 0 if iterable is not None: # append every value to the deque for val in iterable: self.append(val) def append(self, val: Any) -> None: """ Adds val to the end of the deque. Time complexity: O(1) >>> our_deque_1 = Deque([1, 2, 3]) >>> our_deque_1.append(4) >>> our_deque_1 [1, 2, 3, 4] >>> our_deque_2 = Deque('ab') >>> our_deque_2.append('c') >>> our_deque_2 ['a', 'b', 'c'] >>> from collections import deque >>> deque_collections_1 = deque([1, 2, 3]) >>> deque_collections_1.append(4) >>> deque_collections_1 deque([1, 2, 3, 4]) >>> deque_collections_2 = deque('ab') >>> deque_collections_2.append('c') >>> deque_collections_2 deque(['a', 'b', 'c']) >>> list(our_deque_1) == list(deque_collections_1) True >>> list(our_deque_2) == list(deque_collections_2) True """ node = self._Node(val, None, None) if self.is_empty(): # front = back self._front = self._back = node self._len = 1 else: # connect nodes self._back.next_node = node node.prev_node = self._back self._back = node # assign new back to the new node self._len += 1 # make sure there were no errors assert not self.is_empty(), "Error on appending value." def appendleft(self, val: Any) -> None: """ Adds val to the beginning of the deque. Time complexity: O(1) >>> our_deque_1 = Deque([2, 3]) >>> our_deque_1.appendleft(1) >>> our_deque_1 [1, 2, 3] >>> our_deque_2 = Deque('bc') >>> our_deque_2.appendleft('a') >>> our_deque_2 ['a', 'b', 'c'] >>> from collections import deque >>> deque_collections_1 = deque([2, 3]) >>> deque_collections_1.appendleft(1) >>> deque_collections_1 deque([1, 2, 3]) >>> deque_collections_2 = deque('bc') >>> deque_collections_2.appendleft('a') >>> deque_collections_2 deque(['a', 'b', 'c']) >>> list(our_deque_1) == list(deque_collections_1) True >>> list(our_deque_2) == list(deque_collections_2) True """ node = self._Node(val, None, None) if self.is_empty(): # front = back self._front = self._back = node self._len = 1 else: # connect nodes node.next_node = self._front self._front.prev_node = node self._front = node # assign new front to the new node self._len += 1 # make sure there were no errors assert not self.is_empty(), "Error on appending value." def extend(self, iterable: Iterable[Any]) -> None: """ Appends every value of iterable to the end of the deque. Time complexity: O(n) >>> our_deque_1 = Deque([1, 2, 3]) >>> our_deque_1.extend([4, 5]) >>> our_deque_1 [1, 2, 3, 4, 5] >>> our_deque_2 = Deque('ab') >>> our_deque_2.extend('cd') >>> our_deque_2 ['a', 'b', 'c', 'd'] >>> from collections import deque >>> deque_collections_1 = deque([1, 2, 3]) >>> deque_collections_1.extend([4, 5]) >>> deque_collections_1 deque([1, 2, 3, 4, 5]) >>> deque_collections_2 = deque('ab') >>> deque_collections_2.extend('cd') >>> deque_collections_2 deque(['a', 'b', 'c', 'd']) >>> list(our_deque_1) == list(deque_collections_1) True >>> list(our_deque_2) == list(deque_collections_2) True """ for val in iterable: self.append(val) def extendleft(self, iterable: Iterable[Any]) -> None: """ Appends every value of iterable to the beginning of the deque. Time complexity: O(n) >>> our_deque_1 = Deque([1, 2, 3]) >>> our_deque_1.extendleft([0, -1]) >>> our_deque_1 [-1, 0, 1, 2, 3] >>> our_deque_2 = Deque('cd') >>> our_deque_2.extendleft('ba') >>> our_deque_2 ['a', 'b', 'c', 'd'] >>> from collections import deque >>> deque_collections_1 = deque([1, 2, 3]) >>> deque_collections_1.extendleft([0, -1]) >>> deque_collections_1 deque([-1, 0, 1, 2, 3]) >>> deque_collections_2 = deque('cd') >>> deque_collections_2.extendleft('ba') >>> deque_collections_2 deque(['a', 'b', 'c', 'd']) >>> list(our_deque_1) == list(deque_collections_1) True >>> list(our_deque_2) == list(deque_collections_2) True """ for val in iterable: self.appendleft(val) def pop(self) -> Any: """ Removes the last element of the deque and returns it. Time complexity: O(1) @returns topop.val: the value of the node to pop. >>> our_deque = Deque([1, 2, 3, 15182]) >>> our_popped = our_deque.pop() >>> our_popped 15182 >>> our_deque [1, 2, 3] >>> from collections import deque >>> deque_collections = deque([1, 2, 3, 15182]) >>> collections_popped = deque_collections.pop() >>> collections_popped 15182 >>> deque_collections deque([1, 2, 3]) >>> list(our_deque) == list(deque_collections) True >>> our_popped == collections_popped True """ # make sure the deque has elements to pop assert not self.is_empty(), "Deque is empty." topop = self._back self._back = self._back.prev_node # set new back # drop the last node - python will deallocate memory automatically self._back.next_node = None self._len -= 1 return topop.val def popleft(self) -> Any: """ Removes the first element of the deque and returns it. Time complexity: O(1) @returns topop.val: the value of the node to pop. >>> our_deque = Deque([15182, 1, 2, 3]) >>> our_popped = our_deque.popleft() >>> our_popped 15182 >>> our_deque [1, 2, 3] >>> from collections import deque >>> deque_collections = deque([15182, 1, 2, 3]) >>> collections_popped = deque_collections.popleft() >>> collections_popped 15182 >>> deque_collections deque([1, 2, 3]) >>> list(our_deque) == list(deque_collections) True >>> our_popped == collections_popped True """ # make sure the deque has elements to pop assert not self.is_empty(), "Deque is empty." topop = self._front self._front = self._front.next_node # set new front and drop the first node self._front.prev_node = None self._len -= 1 return topop.val def is_empty(self) -> bool: """ Checks if the deque is empty. Time complexity: O(1) >>> our_deque = Deque([1, 2, 3]) >>> our_deque.is_empty() False >>> our_empty_deque = Deque() >>> our_empty_deque.is_empty() True >>> from collections import deque >>> empty_deque_collections = deque() >>> list(our_empty_deque) == list(empty_deque_collections) True """ return self._front is None def __len__(self) -> int: """ Implements len() function. Returns the length of the deque. Time complexity: O(1) >>> our_deque = Deque([1, 2, 3]) >>> len(our_deque) 3 >>> our_empty_deque = Deque() >>> len(our_empty_deque) 0 >>> from collections import deque >>> deque_collections = deque([1, 2, 3]) >>> len(deque_collections) 3 >>> empty_deque_collections = deque() >>> len(empty_deque_collections) 0 >>> len(our_empty_deque) == len(empty_deque_collections) True """ return self._len def __eq__(self, other: object) -> bool: """ Implements "==" operator. Returns if *self* is equal to *other*. Time complexity: O(n) >>> our_deque_1 = Deque([1, 2, 3]) >>> our_deque_2 = Deque([1, 2, 3]) >>> our_deque_1 == our_deque_2 True >>> our_deque_3 = Deque([1, 2]) >>> our_deque_1 == our_deque_3 False >>> from collections import deque >>> deque_collections_1 = deque([1, 2, 3]) >>> deque_collections_2 = deque([1, 2, 3]) >>> deque_collections_1 == deque_collections_2 True >>> deque_collections_3 = deque([1, 2]) >>> deque_collections_1 == deque_collections_3 False >>> (our_deque_1 == our_deque_2) == (deque_collections_1 == deque_collections_2) True >>> (our_deque_1 == our_deque_3) == (deque_collections_1 == deque_collections_3) True """ if not isinstance(other, Deque): return NotImplemented me = self._front oth = other._front # if the length of the dequeues are not the same, they are not equal if len(self) != len(other): return False while me is not None and oth is not None: # compare every value if me.val != oth.val: return False me = me.next_node oth = oth.next_node return True def __iter__(self) -> Deque._Iterator: """ Implements iteration. Time complexity: O(1) >>> our_deque = Deque([1, 2, 3]) >>> for v in our_deque: ... print(v) 1 2 3 >>> from collections import deque >>> deque_collections = deque([1, 2, 3]) >>> for v in deque_collections: ... print(v) 1 2 3 """ return Deque._Iterator(self._front) def __repr__(self) -> str: """ Implements representation of the deque. Represents it as a list, with its values between '[' and ']'. Time complexity: O(n) >>> our_deque = Deque([1, 2, 3]) >>> our_deque [1, 2, 3] """ values_list = [] aux = self._front while aux is not None: # append the values in a list to display values_list.append(aux.val) aux = aux.next_node return f"[{', '.join(repr(val) for val in values_list)}]" if __name__ == "__main__": import doctest doctest.testmod()
-1
TheAlgorithms/Python
9,062
Delete empty junk file
### Describe your change: Delete empty file introduced in #9046 * [ ] Add an algorithm? * [x] Fix a bug or typo in an existing algorithm? * [ ] Documentation change? ### Checklist: * [x] I have read [CONTRIBUTING.md](https://github.com/TheAlgorithms/Python/blob/master/CONTRIBUTING.md). * [x] This pull request is all my own work -- I have not plagiarized. * [x] I know that pull requests will not be merged if they fail the automated tests. * [x] This PR only changes one algorithm file. To ease review, please open separate PRs for separate algorithms. * [ ] All new Python files are placed inside an existing directory. * [ ] All filenames are in all lowercase characters with no spaces or dashes. * [ ] All functions and variable names follow Python naming conventions. * [ ] All function parameters and return values are annotated with Python [type hints](https://docs.python.org/3/library/typing.html). * [ ] All functions have [doctests](https://docs.python.org/3/library/doctest.html) that pass the automated testing. * [ ] All new algorithms include at least one URL that points to Wikipedia or another similar explanation. * [ ] If this pull request resolves one or more open issues then the description above includes the issue number(s) with a [closing keyword](https://docs.github.com/en/issues/tracking-your-work-with-issues/linking-a-pull-request-to-an-issue): "Fixes #ISSUE-NUMBER".
tianyizheng02
"2023-09-16T18:59:42Z"
"2023-09-16T22:12:31Z"
1488cdea708485eb1d81c73126eab13cb9b04a47
fbad85d3ecbbb826a5891807c823149d38bbaed3
Delete empty junk file. ### Describe your change: Delete empty file introduced in #9046 * [ ] Add an algorithm? * [x] Fix a bug or typo in an existing algorithm? * [ ] Documentation change? ### Checklist: * [x] I have read [CONTRIBUTING.md](https://github.com/TheAlgorithms/Python/blob/master/CONTRIBUTING.md). * [x] This pull request is all my own work -- I have not plagiarized. * [x] I know that pull requests will not be merged if they fail the automated tests. * [x] This PR only changes one algorithm file. To ease review, please open separate PRs for separate algorithms. * [ ] All new Python files are placed inside an existing directory. * [ ] All filenames are in all lowercase characters with no spaces or dashes. * [ ] All functions and variable names follow Python naming conventions. * [ ] All function parameters and return values are annotated with Python [type hints](https://docs.python.org/3/library/typing.html). * [ ] All functions have [doctests](https://docs.python.org/3/library/doctest.html) that pass the automated testing. * [ ] All new algorithms include at least one URL that points to Wikipedia or another similar explanation. * [ ] If this pull request resolves one or more open issues then the description above includes the issue number(s) with a [closing keyword](https://docs.github.com/en/issues/tracking-your-work-with-issues/linking-a-pull-request-to-an-issue): "Fixes #ISSUE-NUMBER".
#
#
-1
TheAlgorithms/Python
9,062
Delete empty junk file
### Describe your change: Delete empty file introduced in #9046 * [ ] Add an algorithm? * [x] Fix a bug or typo in an existing algorithm? * [ ] Documentation change? ### Checklist: * [x] I have read [CONTRIBUTING.md](https://github.com/TheAlgorithms/Python/blob/master/CONTRIBUTING.md). * [x] This pull request is all my own work -- I have not plagiarized. * [x] I know that pull requests will not be merged if they fail the automated tests. * [x] This PR only changes one algorithm file. To ease review, please open separate PRs for separate algorithms. * [ ] All new Python files are placed inside an existing directory. * [ ] All filenames are in all lowercase characters with no spaces or dashes. * [ ] All functions and variable names follow Python naming conventions. * [ ] All function parameters and return values are annotated with Python [type hints](https://docs.python.org/3/library/typing.html). * [ ] All functions have [doctests](https://docs.python.org/3/library/doctest.html) that pass the automated testing. * [ ] All new algorithms include at least one URL that points to Wikipedia or another similar explanation. * [ ] If this pull request resolves one or more open issues then the description above includes the issue number(s) with a [closing keyword](https://docs.github.com/en/issues/tracking-your-work-with-issues/linking-a-pull-request-to-an-issue): "Fixes #ISSUE-NUMBER".
tianyizheng02
"2023-09-16T18:59:42Z"
"2023-09-16T22:12:31Z"
1488cdea708485eb1d81c73126eab13cb9b04a47
fbad85d3ecbbb826a5891807c823149d38bbaed3
Delete empty junk file. ### Describe your change: Delete empty file introduced in #9046 * [ ] Add an algorithm? * [x] Fix a bug or typo in an existing algorithm? * [ ] Documentation change? ### Checklist: * [x] I have read [CONTRIBUTING.md](https://github.com/TheAlgorithms/Python/blob/master/CONTRIBUTING.md). * [x] This pull request is all my own work -- I have not plagiarized. * [x] I know that pull requests will not be merged if they fail the automated tests. * [x] This PR only changes one algorithm file. To ease review, please open separate PRs for separate algorithms. * [ ] All new Python files are placed inside an existing directory. * [ ] All filenames are in all lowercase characters with no spaces or dashes. * [ ] All functions and variable names follow Python naming conventions. * [ ] All function parameters and return values are annotated with Python [type hints](https://docs.python.org/3/library/typing.html). * [ ] All functions have [doctests](https://docs.python.org/3/library/doctest.html) that pass the automated testing. * [ ] All new algorithms include at least one URL that points to Wikipedia or another similar explanation. * [ ] If this pull request resolves one or more open issues then the description above includes the issue number(s) with a [closing keyword](https://docs.github.com/en/issues/tracking-your-work-with-issues/linking-a-pull-request-to-an-issue): "Fixes #ISSUE-NUMBER".
""" By starting at the top of the triangle below and moving to adjacent numbers on the row below, the maximum total from top to bottom is 23. 3 7 4 2 4 6 8 5 9 3 That is, 3 + 7 + 4 + 9 = 23. Find the maximum total from top to bottom of the triangle below: 75 95 64 17 47 82 18 35 87 10 20 04 82 47 65 19 01 23 75 03 34 88 02 77 73 07 63 67 99 65 04 28 06 16 70 92 41 41 26 56 83 40 80 70 33 41 48 72 33 47 32 37 16 94 29 53 71 44 65 25 43 91 52 97 51 14 70 11 33 28 77 73 17 78 39 68 17 57 91 71 52 38 17 14 91 43 58 50 27 29 48 63 66 04 68 89 53 67 30 73 16 69 87 40 31 04 62 98 27 23 09 70 98 73 93 38 53 60 04 23 """ import os def solution(): """ Finds the maximum total in a triangle as described by the problem statement above. >>> solution() 1074 """ script_dir = os.path.dirname(os.path.realpath(__file__)) triangle = os.path.join(script_dir, "triangle.txt") with open(triangle) as f: triangle = f.readlines() a = [[int(y) for y in x.rstrip("\r\n").split(" ")] for x in triangle] for i in range(1, len(a)): for j in range(len(a[i])): number1 = a[i - 1][j] if j != len(a[i - 1]) else 0 number2 = a[i - 1][j - 1] if j > 0 else 0 a[i][j] += max(number1, number2) return max(a[-1]) if __name__ == "__main__": print(solution())
""" By starting at the top of the triangle below and moving to adjacent numbers on the row below, the maximum total from top to bottom is 23. 3 7 4 2 4 6 8 5 9 3 That is, 3 + 7 + 4 + 9 = 23. Find the maximum total from top to bottom of the triangle below: 75 95 64 17 47 82 18 35 87 10 20 04 82 47 65 19 01 23 75 03 34 88 02 77 73 07 63 67 99 65 04 28 06 16 70 92 41 41 26 56 83 40 80 70 33 41 48 72 33 47 32 37 16 94 29 53 71 44 65 25 43 91 52 97 51 14 70 11 33 28 77 73 17 78 39 68 17 57 91 71 52 38 17 14 91 43 58 50 27 29 48 63 66 04 68 89 53 67 30 73 16 69 87 40 31 04 62 98 27 23 09 70 98 73 93 38 53 60 04 23 """ import os def solution(): """ Finds the maximum total in a triangle as described by the problem statement above. >>> solution() 1074 """ script_dir = os.path.dirname(os.path.realpath(__file__)) triangle = os.path.join(script_dir, "triangle.txt") with open(triangle) as f: triangle = f.readlines() a = [[int(y) for y in x.rstrip("\r\n").split(" ")] for x in triangle] for i in range(1, len(a)): for j in range(len(a[i])): number1 = a[i - 1][j] if j != len(a[i - 1]) else 0 number2 = a[i - 1][j - 1] if j > 0 else 0 a[i][j] += max(number1, number2) return max(a[-1]) if __name__ == "__main__": print(solution())
-1
TheAlgorithms/Python
9,062
Delete empty junk file
### Describe your change: Delete empty file introduced in #9046 * [ ] Add an algorithm? * [x] Fix a bug or typo in an existing algorithm? * [ ] Documentation change? ### Checklist: * [x] I have read [CONTRIBUTING.md](https://github.com/TheAlgorithms/Python/blob/master/CONTRIBUTING.md). * [x] This pull request is all my own work -- I have not plagiarized. * [x] I know that pull requests will not be merged if they fail the automated tests. * [x] This PR only changes one algorithm file. To ease review, please open separate PRs for separate algorithms. * [ ] All new Python files are placed inside an existing directory. * [ ] All filenames are in all lowercase characters with no spaces or dashes. * [ ] All functions and variable names follow Python naming conventions. * [ ] All function parameters and return values are annotated with Python [type hints](https://docs.python.org/3/library/typing.html). * [ ] All functions have [doctests](https://docs.python.org/3/library/doctest.html) that pass the automated testing. * [ ] All new algorithms include at least one URL that points to Wikipedia or another similar explanation. * [ ] If this pull request resolves one or more open issues then the description above includes the issue number(s) with a [closing keyword](https://docs.github.com/en/issues/tracking-your-work-with-issues/linking-a-pull-request-to-an-issue): "Fixes #ISSUE-NUMBER".
tianyizheng02
"2023-09-16T18:59:42Z"
"2023-09-16T22:12:31Z"
1488cdea708485eb1d81c73126eab13cb9b04a47
fbad85d3ecbbb826a5891807c823149d38bbaed3
Delete empty junk file. ### Describe your change: Delete empty file introduced in #9046 * [ ] Add an algorithm? * [x] Fix a bug or typo in an existing algorithm? * [ ] Documentation change? ### Checklist: * [x] I have read [CONTRIBUTING.md](https://github.com/TheAlgorithms/Python/blob/master/CONTRIBUTING.md). * [x] This pull request is all my own work -- I have not plagiarized. * [x] I know that pull requests will not be merged if they fail the automated tests. * [x] This PR only changes one algorithm file. To ease review, please open separate PRs for separate algorithms. * [ ] All new Python files are placed inside an existing directory. * [ ] All filenames are in all lowercase characters with no spaces or dashes. * [ ] All functions and variable names follow Python naming conventions. * [ ] All function parameters and return values are annotated with Python [type hints](https://docs.python.org/3/library/typing.html). * [ ] All functions have [doctests](https://docs.python.org/3/library/doctest.html) that pass the automated testing. * [ ] All new algorithms include at least one URL that points to Wikipedia or another similar explanation. * [ ] If this pull request resolves one or more open issues then the description above includes the issue number(s) with a [closing keyword](https://docs.github.com/en/issues/tracking-your-work-with-issues/linking-a-pull-request-to-an-issue): "Fixes #ISSUE-NUMBER".
hex_table = {hex(i)[2:]: i for i in range(16)} # Use [:2] to strip off the leading '0x' def hex_to_decimal(hex_string: str) -> int: """ Convert a hexadecimal value to its decimal equivalent #https://www.programiz.com/python-programming/methods/built-in/hex >>> hex_to_decimal("a") 10 >>> hex_to_decimal("12f") 303 >>> hex_to_decimal(" 12f ") 303 >>> hex_to_decimal("FfFf") 65535 >>> hex_to_decimal("-Ff") -255 >>> hex_to_decimal("F-f") Traceback (most recent call last): ... ValueError: Non-hexadecimal value was passed to the function >>> hex_to_decimal("") Traceback (most recent call last): ... ValueError: Empty string was passed to the function >>> hex_to_decimal("12m") Traceback (most recent call last): ... ValueError: Non-hexadecimal value was passed to the function """ hex_string = hex_string.strip().lower() if not hex_string: raise ValueError("Empty string was passed to the function") is_negative = hex_string[0] == "-" if is_negative: hex_string = hex_string[1:] if not all(char in hex_table for char in hex_string): raise ValueError("Non-hexadecimal value was passed to the function") decimal_number = 0 for char in hex_string: decimal_number = 16 * decimal_number + hex_table[char] return -decimal_number if is_negative else decimal_number if __name__ == "__main__": from doctest import testmod testmod()
hex_table = {hex(i)[2:]: i for i in range(16)} # Use [:2] to strip off the leading '0x' def hex_to_decimal(hex_string: str) -> int: """ Convert a hexadecimal value to its decimal equivalent #https://www.programiz.com/python-programming/methods/built-in/hex >>> hex_to_decimal("a") 10 >>> hex_to_decimal("12f") 303 >>> hex_to_decimal(" 12f ") 303 >>> hex_to_decimal("FfFf") 65535 >>> hex_to_decimal("-Ff") -255 >>> hex_to_decimal("F-f") Traceback (most recent call last): ... ValueError: Non-hexadecimal value was passed to the function >>> hex_to_decimal("") Traceback (most recent call last): ... ValueError: Empty string was passed to the function >>> hex_to_decimal("12m") Traceback (most recent call last): ... ValueError: Non-hexadecimal value was passed to the function """ hex_string = hex_string.strip().lower() if not hex_string: raise ValueError("Empty string was passed to the function") is_negative = hex_string[0] == "-" if is_negative: hex_string = hex_string[1:] if not all(char in hex_table for char in hex_string): raise ValueError("Non-hexadecimal value was passed to the function") decimal_number = 0 for char in hex_string: decimal_number = 16 * decimal_number + hex_table[char] return -decimal_number if is_negative else decimal_number if __name__ == "__main__": from doctest import testmod testmod()
-1
TheAlgorithms/Python
9,062
Delete empty junk file
### Describe your change: Delete empty file introduced in #9046 * [ ] Add an algorithm? * [x] Fix a bug or typo in an existing algorithm? * [ ] Documentation change? ### Checklist: * [x] I have read [CONTRIBUTING.md](https://github.com/TheAlgorithms/Python/blob/master/CONTRIBUTING.md). * [x] This pull request is all my own work -- I have not plagiarized. * [x] I know that pull requests will not be merged if they fail the automated tests. * [x] This PR only changes one algorithm file. To ease review, please open separate PRs for separate algorithms. * [ ] All new Python files are placed inside an existing directory. * [ ] All filenames are in all lowercase characters with no spaces or dashes. * [ ] All functions and variable names follow Python naming conventions. * [ ] All function parameters and return values are annotated with Python [type hints](https://docs.python.org/3/library/typing.html). * [ ] All functions have [doctests](https://docs.python.org/3/library/doctest.html) that pass the automated testing. * [ ] All new algorithms include at least one URL that points to Wikipedia or another similar explanation. * [ ] If this pull request resolves one or more open issues then the description above includes the issue number(s) with a [closing keyword](https://docs.github.com/en/issues/tracking-your-work-with-issues/linking-a-pull-request-to-an-issue): "Fixes #ISSUE-NUMBER".
tianyizheng02
"2023-09-16T18:59:42Z"
"2023-09-16T22:12:31Z"
1488cdea708485eb1d81c73126eab13cb9b04a47
fbad85d3ecbbb826a5891807c823149d38bbaed3
Delete empty junk file. ### Describe your change: Delete empty file introduced in #9046 * [ ] Add an algorithm? * [x] Fix a bug or typo in an existing algorithm? * [ ] Documentation change? ### Checklist: * [x] I have read [CONTRIBUTING.md](https://github.com/TheAlgorithms/Python/blob/master/CONTRIBUTING.md). * [x] This pull request is all my own work -- I have not plagiarized. * [x] I know that pull requests will not be merged if they fail the automated tests. * [x] This PR only changes one algorithm file. To ease review, please open separate PRs for separate algorithms. * [ ] All new Python files are placed inside an existing directory. * [ ] All filenames are in all lowercase characters with no spaces or dashes. * [ ] All functions and variable names follow Python naming conventions. * [ ] All function parameters and return values are annotated with Python [type hints](https://docs.python.org/3/library/typing.html). * [ ] All functions have [doctests](https://docs.python.org/3/library/doctest.html) that pass the automated testing. * [ ] All new algorithms include at least one URL that points to Wikipedia or another similar explanation. * [ ] If this pull request resolves one or more open issues then the description above includes the issue number(s) with a [closing keyword](https://docs.github.com/en/issues/tracking-your-work-with-issues/linking-a-pull-request-to-an-issue): "Fixes #ISSUE-NUMBER".
-1
TheAlgorithms/Python
9,062
Delete empty junk file
### Describe your change: Delete empty file introduced in #9046 * [ ] Add an algorithm? * [x] Fix a bug or typo in an existing algorithm? * [ ] Documentation change? ### Checklist: * [x] I have read [CONTRIBUTING.md](https://github.com/TheAlgorithms/Python/blob/master/CONTRIBUTING.md). * [x] This pull request is all my own work -- I have not plagiarized. * [x] I know that pull requests will not be merged if they fail the automated tests. * [x] This PR only changes one algorithm file. To ease review, please open separate PRs for separate algorithms. * [ ] All new Python files are placed inside an existing directory. * [ ] All filenames are in all lowercase characters with no spaces or dashes. * [ ] All functions and variable names follow Python naming conventions. * [ ] All function parameters and return values are annotated with Python [type hints](https://docs.python.org/3/library/typing.html). * [ ] All functions have [doctests](https://docs.python.org/3/library/doctest.html) that pass the automated testing. * [ ] All new algorithms include at least one URL that points to Wikipedia or another similar explanation. * [ ] If this pull request resolves one or more open issues then the description above includes the issue number(s) with a [closing keyword](https://docs.github.com/en/issues/tracking-your-work-with-issues/linking-a-pull-request-to-an-issue): "Fixes #ISSUE-NUMBER".
tianyizheng02
"2023-09-16T18:59:42Z"
"2023-09-16T22:12:31Z"
1488cdea708485eb1d81c73126eab13cb9b04a47
fbad85d3ecbbb826a5891807c823149d38bbaed3
Delete empty junk file. ### Describe your change: Delete empty file introduced in #9046 * [ ] Add an algorithm? * [x] Fix a bug or typo in an existing algorithm? * [ ] Documentation change? ### Checklist: * [x] I have read [CONTRIBUTING.md](https://github.com/TheAlgorithms/Python/blob/master/CONTRIBUTING.md). * [x] This pull request is all my own work -- I have not plagiarized. * [x] I know that pull requests will not be merged if they fail the automated tests. * [x] This PR only changes one algorithm file. To ease review, please open separate PRs for separate algorithms. * [ ] All new Python files are placed inside an existing directory. * [ ] All filenames are in all lowercase characters with no spaces or dashes. * [ ] All functions and variable names follow Python naming conventions. * [ ] All function parameters and return values are annotated with Python [type hints](https://docs.python.org/3/library/typing.html). * [ ] All functions have [doctests](https://docs.python.org/3/library/doctest.html) that pass the automated testing. * [ ] All new algorithms include at least one URL that points to Wikipedia or another similar explanation. * [ ] If this pull request resolves one or more open issues then the description above includes the issue number(s) with a [closing keyword](https://docs.github.com/en/issues/tracking-your-work-with-issues/linking-a-pull-request-to-an-issue): "Fixes #ISSUE-NUMBER".
#!/usr/bin/env python3 """ Illustrate how to implement bucket sort algorithm. Author: OMKAR PATHAK This program will illustrate how to implement bucket sort algorithm Wikipedia says: Bucket sort, or bin sort, is a sorting algorithm that works by distributing the elements of an array into a number of buckets. Each bucket is then sorted individually, either using a different sorting algorithm, or by recursively applying the bucket sorting algorithm. It is a distribution sort, and is a cousin of radix sort in the most to least significant digit flavour. Bucket sort is a generalization of pigeonhole sort. Bucket sort can be implemented with comparisons and therefore can also be considered a comparison sort algorithm. The computational complexity estimates involve the number of buckets. Time Complexity of Solution: Worst case scenario occurs when all the elements are placed in a single bucket. The overall performance would then be dominated by the algorithm used to sort each bucket. In this case, O(n log n), because of TimSort Average Case O(n + (n^2)/k + k), where k is the number of buckets If k = O(n), time complexity is O(n) Source: https://en.wikipedia.org/wiki/Bucket_sort """ from __future__ import annotations def bucket_sort(my_list: list, bucket_count: int = 10) -> list: """ >>> data = [-1, 2, -5, 0] >>> bucket_sort(data) == sorted(data) True >>> data = [9, 8, 7, 6, -12] >>> bucket_sort(data) == sorted(data) True >>> data = [.4, 1.2, .1, .2, -.9] >>> bucket_sort(data) == sorted(data) True >>> bucket_sort([]) == sorted([]) True >>> data = [-1e10, 1e10] >>> bucket_sort(data) == sorted(data) True >>> import random >>> collection = random.sample(range(-50, 50), 50) >>> bucket_sort(collection) == sorted(collection) True """ if len(my_list) == 0 or bucket_count <= 0: return [] min_value, max_value = min(my_list), max(my_list) bucket_size = (max_value - min_value) / bucket_count buckets: list[list] = [[] for _ in range(bucket_count)] for val in my_list: index = min(int((val - min_value) / bucket_size), bucket_count - 1) buckets[index].append(val) return [val for bucket in buckets for val in sorted(bucket)] if __name__ == "__main__": from doctest import testmod testmod() assert bucket_sort([4, 5, 3, 2, 1]) == [1, 2, 3, 4, 5] assert bucket_sort([0, 1, -10, 15, 2, -2]) == [-10, -2, 0, 1, 2, 15]
#!/usr/bin/env python3 """ Illustrate how to implement bucket sort algorithm. Author: OMKAR PATHAK This program will illustrate how to implement bucket sort algorithm Wikipedia says: Bucket sort, or bin sort, is a sorting algorithm that works by distributing the elements of an array into a number of buckets. Each bucket is then sorted individually, either using a different sorting algorithm, or by recursively applying the bucket sorting algorithm. It is a distribution sort, and is a cousin of radix sort in the most to least significant digit flavour. Bucket sort is a generalization of pigeonhole sort. Bucket sort can be implemented with comparisons and therefore can also be considered a comparison sort algorithm. The computational complexity estimates involve the number of buckets. Time Complexity of Solution: Worst case scenario occurs when all the elements are placed in a single bucket. The overall performance would then be dominated by the algorithm used to sort each bucket. In this case, O(n log n), because of TimSort Average Case O(n + (n^2)/k + k), where k is the number of buckets If k = O(n), time complexity is O(n) Source: https://en.wikipedia.org/wiki/Bucket_sort """ from __future__ import annotations def bucket_sort(my_list: list, bucket_count: int = 10) -> list: """ >>> data = [-1, 2, -5, 0] >>> bucket_sort(data) == sorted(data) True >>> data = [9, 8, 7, 6, -12] >>> bucket_sort(data) == sorted(data) True >>> data = [.4, 1.2, .1, .2, -.9] >>> bucket_sort(data) == sorted(data) True >>> bucket_sort([]) == sorted([]) True >>> data = [-1e10, 1e10] >>> bucket_sort(data) == sorted(data) True >>> import random >>> collection = random.sample(range(-50, 50), 50) >>> bucket_sort(collection) == sorted(collection) True """ if len(my_list) == 0 or bucket_count <= 0: return [] min_value, max_value = min(my_list), max(my_list) bucket_size = (max_value - min_value) / bucket_count buckets: list[list] = [[] for _ in range(bucket_count)] for val in my_list: index = min(int((val - min_value) / bucket_size), bucket_count - 1) buckets[index].append(val) return [val for bucket in buckets for val in sorted(bucket)] if __name__ == "__main__": from doctest import testmod testmod() assert bucket_sort([4, 5, 3, 2, 1]) == [1, 2, 3, 4, 5] assert bucket_sort([0, 1, -10, 15, 2, -2]) == [-10, -2, 0, 1, 2, 15]
-1
TheAlgorithms/Python
9,062
Delete empty junk file
### Describe your change: Delete empty file introduced in #9046 * [ ] Add an algorithm? * [x] Fix a bug or typo in an existing algorithm? * [ ] Documentation change? ### Checklist: * [x] I have read [CONTRIBUTING.md](https://github.com/TheAlgorithms/Python/blob/master/CONTRIBUTING.md). * [x] This pull request is all my own work -- I have not plagiarized. * [x] I know that pull requests will not be merged if they fail the automated tests. * [x] This PR only changes one algorithm file. To ease review, please open separate PRs for separate algorithms. * [ ] All new Python files are placed inside an existing directory. * [ ] All filenames are in all lowercase characters with no spaces or dashes. * [ ] All functions and variable names follow Python naming conventions. * [ ] All function parameters and return values are annotated with Python [type hints](https://docs.python.org/3/library/typing.html). * [ ] All functions have [doctests](https://docs.python.org/3/library/doctest.html) that pass the automated testing. * [ ] All new algorithms include at least one URL that points to Wikipedia or another similar explanation. * [ ] If this pull request resolves one or more open issues then the description above includes the issue number(s) with a [closing keyword](https://docs.github.com/en/issues/tracking-your-work-with-issues/linking-a-pull-request-to-an-issue): "Fixes #ISSUE-NUMBER".
tianyizheng02
"2023-09-16T18:59:42Z"
"2023-09-16T22:12:31Z"
1488cdea708485eb1d81c73126eab13cb9b04a47
fbad85d3ecbbb826a5891807c823149d38bbaed3
Delete empty junk file. ### Describe your change: Delete empty file introduced in #9046 * [ ] Add an algorithm? * [x] Fix a bug or typo in an existing algorithm? * [ ] Documentation change? ### Checklist: * [x] I have read [CONTRIBUTING.md](https://github.com/TheAlgorithms/Python/blob/master/CONTRIBUTING.md). * [x] This pull request is all my own work -- I have not plagiarized. * [x] I know that pull requests will not be merged if they fail the automated tests. * [x] This PR only changes one algorithm file. To ease review, please open separate PRs for separate algorithms. * [ ] All new Python files are placed inside an existing directory. * [ ] All filenames are in all lowercase characters with no spaces or dashes. * [ ] All functions and variable names follow Python naming conventions. * [ ] All function parameters and return values are annotated with Python [type hints](https://docs.python.org/3/library/typing.html). * [ ] All functions have [doctests](https://docs.python.org/3/library/doctest.html) that pass the automated testing. * [ ] All new algorithms include at least one URL that points to Wikipedia or another similar explanation. * [ ] If this pull request resolves one or more open issues then the description above includes the issue number(s) with a [closing keyword](https://docs.github.com/en/issues/tracking-your-work-with-issues/linking-a-pull-request-to-an-issue): "Fixes #ISSUE-NUMBER".
import random from .binary_exp_mod import bin_exp_mod # This is a probabilistic check to test primality, useful for big numbers! # if it's a prime, it will return true # if it's not a prime, the chance of it returning true is at most 1/4**prec def is_prime_big(n, prec=1000): """ >>> from maths.prime_check import is_prime >>> # all(is_prime_big(i) == is_prime(i) for i in range(1000)) # 3.45s >>> all(is_prime_big(i) == is_prime(i) for i in range(256)) True """ if n < 2: return False if n % 2 == 0: return n == 2 # this means n is odd d = n - 1 exp = 0 while d % 2 == 0: d /= 2 exp += 1 # n - 1=d*(2**exp) count = 0 while count < prec: a = random.randint(2, n - 1) b = bin_exp_mod(a, d, n) if b != 1: flag = True for _ in range(exp): if b == n - 1: flag = False break b = b * b b %= n if flag: return False count += 1 return True if __name__ == "__main__": n = abs(int(input("Enter bound : ").strip())) print("Here's the list of primes:") print(", ".join(str(i) for i in range(n + 1) if is_prime_big(i)))
import random from .binary_exp_mod import bin_exp_mod # This is a probabilistic check to test primality, useful for big numbers! # if it's a prime, it will return true # if it's not a prime, the chance of it returning true is at most 1/4**prec def is_prime_big(n, prec=1000): """ >>> from maths.prime_check import is_prime >>> # all(is_prime_big(i) == is_prime(i) for i in range(1000)) # 3.45s >>> all(is_prime_big(i) == is_prime(i) for i in range(256)) True """ if n < 2: return False if n % 2 == 0: return n == 2 # this means n is odd d = n - 1 exp = 0 while d % 2 == 0: d /= 2 exp += 1 # n - 1=d*(2**exp) count = 0 while count < prec: a = random.randint(2, n - 1) b = bin_exp_mod(a, d, n) if b != 1: flag = True for _ in range(exp): if b == n - 1: flag = False break b = b * b b %= n if flag: return False count += 1 return True if __name__ == "__main__": n = abs(int(input("Enter bound : ").strip())) print("Here's the list of primes:") print(", ".join(str(i) for i in range(n + 1) if is_prime_big(i)))
-1
TheAlgorithms/Python
8,960
Consolidate find_min and find_min recursive and find_max and find_max_recursive
### Describe your change: Merges `find_min` and `find_min_recursive` into just `find_min`. Merges `find_min` and `find_max_recursive` into just `find_max`. * [ ] Add an algorithm? * [ ] Fix a bug or typo in an existing algorithm? * [ ] Documentation change? * [x] Merge two different implementations into one file ### Checklist: * [x] I have read [CONTRIBUTING.md](https://github.com/TheAlgorithms/Python/blob/master/CONTRIBUTING.md). * [x] This pull request is all my own work -- I have not plagiarized. * [x] I know that pull requests will not be merged if they fail the automated tests. * [x] This PR only changes one algorithm file. To ease review, please open separate PRs for separate algorithms. * [x] All new Python files are placed inside an existing directory. * [x] All filenames are in all lowercase characters with no spaces or dashes. * [x] All functions and variable names follow Python naming conventions. * [x] All function parameters and return values are annotated with Python [type hints](https://docs.python.org/3/library/typing.html). * [x] All functions have [doctests](https://docs.python.org/3/library/doctest.html) that pass the automated testing. * [x] All new algorithms include at least one URL that points to Wikipedia or another similar explanation. * [x] If this pull request resolves one or more open issues then the description above includes the issue number(s) with a [closing keyword](https://docs.github.com/en/issues/tracking-your-work-with-issues/linking-a-pull-request-to-an-issue): "Fixes #ISSUE-NUMBER".
CaedenPH
"2023-08-14T10:30:16Z"
"2023-08-14T11:17:27Z"
2ab3bf2689d21e7375539c79ecee358e9d7c3359
fb1b939a89fb08370297cbb455846f61f66847bc
Consolidate find_min and find_min recursive and find_max and find_max_recursive. ### Describe your change: Merges `find_min` and `find_min_recursive` into just `find_min`. Merges `find_min` and `find_max_recursive` into just `find_max`. * [ ] Add an algorithm? * [ ] Fix a bug or typo in an existing algorithm? * [ ] Documentation change? * [x] Merge two different implementations into one file ### Checklist: * [x] I have read [CONTRIBUTING.md](https://github.com/TheAlgorithms/Python/blob/master/CONTRIBUTING.md). * [x] This pull request is all my own work -- I have not plagiarized. * [x] I know that pull requests will not be merged if they fail the automated tests. * [x] This PR only changes one algorithm file. To ease review, please open separate PRs for separate algorithms. * [x] All new Python files are placed inside an existing directory. * [x] All filenames are in all lowercase characters with no spaces or dashes. * [x] All functions and variable names follow Python naming conventions. * [x] All function parameters and return values are annotated with Python [type hints](https://docs.python.org/3/library/typing.html). * [x] All functions have [doctests](https://docs.python.org/3/library/doctest.html) that pass the automated testing. * [x] All new algorithms include at least one URL that points to Wikipedia or another similar explanation. * [x] If this pull request resolves one or more open issues then the description above includes the issue number(s) with a [closing keyword](https://docs.github.com/en/issues/tracking-your-work-with-issues/linking-a-pull-request-to-an-issue): "Fixes #ISSUE-NUMBER".
## Arithmetic Analysis * [Bisection](arithmetic_analysis/bisection.py) * [Gaussian Elimination](arithmetic_analysis/gaussian_elimination.py) * [In Static Equilibrium](arithmetic_analysis/in_static_equilibrium.py) * [Intersection](arithmetic_analysis/intersection.py) * [Jacobi Iteration Method](arithmetic_analysis/jacobi_iteration_method.py) * [Lu Decomposition](arithmetic_analysis/lu_decomposition.py) * [Newton Forward Interpolation](arithmetic_analysis/newton_forward_interpolation.py) * [Newton Method](arithmetic_analysis/newton_method.py) * [Newton Raphson](arithmetic_analysis/newton_raphson.py) * [Newton Raphson New](arithmetic_analysis/newton_raphson_new.py) * [Secant Method](arithmetic_analysis/secant_method.py) ## Audio Filters * [Butterworth Filter](audio_filters/butterworth_filter.py) * [Iir Filter](audio_filters/iir_filter.py) * [Show Response](audio_filters/show_response.py) ## Backtracking * [All Combinations](backtracking/all_combinations.py) * [All Permutations](backtracking/all_permutations.py) * [All Subsequences](backtracking/all_subsequences.py) * [Coloring](backtracking/coloring.py) * [Combination Sum](backtracking/combination_sum.py) * [Hamiltonian Cycle](backtracking/hamiltonian_cycle.py) * [Knight Tour](backtracking/knight_tour.py) * [Minimax](backtracking/minimax.py) * [Minmax](backtracking/minmax.py) * [N Queens](backtracking/n_queens.py) * [N Queens Math](backtracking/n_queens_math.py) * [Power Sum](backtracking/power_sum.py) * [Rat In Maze](backtracking/rat_in_maze.py) * [Sudoku](backtracking/sudoku.py) * [Sum Of Subsets](backtracking/sum_of_subsets.py) * [Word Search](backtracking/word_search.py) ## Bit Manipulation * [Binary And Operator](bit_manipulation/binary_and_operator.py) * [Binary Count Setbits](bit_manipulation/binary_count_setbits.py) * [Binary Count Trailing Zeros](bit_manipulation/binary_count_trailing_zeros.py) * [Binary Or Operator](bit_manipulation/binary_or_operator.py) * [Binary Shifts](bit_manipulation/binary_shifts.py) * [Binary Twos Complement](bit_manipulation/binary_twos_complement.py) * [Binary Xor Operator](bit_manipulation/binary_xor_operator.py) * [Count 1S Brian Kernighan Method](bit_manipulation/count_1s_brian_kernighan_method.py) * [Count Number Of One Bits](bit_manipulation/count_number_of_one_bits.py) * [Gray Code Sequence](bit_manipulation/gray_code_sequence.py) * [Highest Set Bit](bit_manipulation/highest_set_bit.py) * [Index Of Rightmost Set Bit](bit_manipulation/index_of_rightmost_set_bit.py) * [Is Even](bit_manipulation/is_even.py) * [Is Power Of Two](bit_manipulation/is_power_of_two.py) * [Numbers Different Signs](bit_manipulation/numbers_different_signs.py) * [Reverse Bits](bit_manipulation/reverse_bits.py) * [Single Bit Manipulation Operations](bit_manipulation/single_bit_manipulation_operations.py) ## Blockchain * [Chinese Remainder Theorem](blockchain/chinese_remainder_theorem.py) * [Diophantine Equation](blockchain/diophantine_equation.py) * [Modular Division](blockchain/modular_division.py) ## Boolean Algebra * [And Gate](boolean_algebra/and_gate.py) * [Nand Gate](boolean_algebra/nand_gate.py) * [Norgate](boolean_algebra/norgate.py) * [Not Gate](boolean_algebra/not_gate.py) * [Or Gate](boolean_algebra/or_gate.py) * [Quine Mc Cluskey](boolean_algebra/quine_mc_cluskey.py) * [Xnor Gate](boolean_algebra/xnor_gate.py) * [Xor Gate](boolean_algebra/xor_gate.py) ## Cellular Automata * [Conways Game Of Life](cellular_automata/conways_game_of_life.py) * [Game Of Life](cellular_automata/game_of_life.py) * [Nagel Schrekenberg](cellular_automata/nagel_schrekenberg.py) * [One Dimensional](cellular_automata/one_dimensional.py) * [Wa Tor](cellular_automata/wa_tor.py) ## Ciphers * [A1Z26](ciphers/a1z26.py) * [Affine Cipher](ciphers/affine_cipher.py) * [Atbash](ciphers/atbash.py) * [Autokey](ciphers/autokey.py) * [Baconian Cipher](ciphers/baconian_cipher.py) * [Base16](ciphers/base16.py) * [Base32](ciphers/base32.py) * [Base64](ciphers/base64.py) * [Base85](ciphers/base85.py) * [Beaufort Cipher](ciphers/beaufort_cipher.py) * [Bifid](ciphers/bifid.py) * [Brute Force Caesar Cipher](ciphers/brute_force_caesar_cipher.py) * [Caesar Cipher](ciphers/caesar_cipher.py) * [Cryptomath Module](ciphers/cryptomath_module.py) * [Decrypt Caesar With Chi Squared](ciphers/decrypt_caesar_with_chi_squared.py) * [Deterministic Miller Rabin](ciphers/deterministic_miller_rabin.py) * [Diffie](ciphers/diffie.py) * [Diffie Hellman](ciphers/diffie_hellman.py) * [Elgamal Key Generator](ciphers/elgamal_key_generator.py) * [Enigma Machine2](ciphers/enigma_machine2.py) * [Hill Cipher](ciphers/hill_cipher.py) * [Mixed Keyword Cypher](ciphers/mixed_keyword_cypher.py) * [Mono Alphabetic Ciphers](ciphers/mono_alphabetic_ciphers.py) * [Morse Code](ciphers/morse_code.py) * [Onepad Cipher](ciphers/onepad_cipher.py) * [Playfair Cipher](ciphers/playfair_cipher.py) * [Polybius](ciphers/polybius.py) * [Porta Cipher](ciphers/porta_cipher.py) * [Rabin Miller](ciphers/rabin_miller.py) * [Rail Fence Cipher](ciphers/rail_fence_cipher.py) * [Rot13](ciphers/rot13.py) * [Rsa Cipher](ciphers/rsa_cipher.py) * [Rsa Factorization](ciphers/rsa_factorization.py) * [Rsa Key Generator](ciphers/rsa_key_generator.py) * [Shuffled Shift Cipher](ciphers/shuffled_shift_cipher.py) * [Simple Keyword Cypher](ciphers/simple_keyword_cypher.py) * [Simple Substitution Cipher](ciphers/simple_substitution_cipher.py) * [Trafid Cipher](ciphers/trafid_cipher.py) * [Transposition Cipher](ciphers/transposition_cipher.py) * [Transposition Cipher Encrypt Decrypt File](ciphers/transposition_cipher_encrypt_decrypt_file.py) * [Vigenere Cipher](ciphers/vigenere_cipher.py) * [Xor Cipher](ciphers/xor_cipher.py) ## Compression * [Burrows Wheeler](compression/burrows_wheeler.py) * [Huffman](compression/huffman.py) * [Lempel Ziv](compression/lempel_ziv.py) * [Lempel Ziv Decompress](compression/lempel_ziv_decompress.py) * [Lz77](compression/lz77.py) * [Peak Signal To Noise Ratio](compression/peak_signal_to_noise_ratio.py) * [Run Length Encoding](compression/run_length_encoding.py) ## Computer Vision * [Cnn Classification](computer_vision/cnn_classification.py) * [Flip Augmentation](computer_vision/flip_augmentation.py) * [Harris Corner](computer_vision/harris_corner.py) * [Horn Schunck](computer_vision/horn_schunck.py) * [Mean Threshold](computer_vision/mean_threshold.py) * [Mosaic Augmentation](computer_vision/mosaic_augmentation.py) * [Pooling Functions](computer_vision/pooling_functions.py) ## Conversions * [Astronomical Length Scale Conversion](conversions/astronomical_length_scale_conversion.py) * [Binary To Decimal](conversions/binary_to_decimal.py) * [Binary To Hexadecimal](conversions/binary_to_hexadecimal.py) * [Binary To Octal](conversions/binary_to_octal.py) * [Decimal To Any](conversions/decimal_to_any.py) * [Decimal To Binary](conversions/decimal_to_binary.py) * [Decimal To Binary Recursion](conversions/decimal_to_binary_recursion.py) * [Decimal To Hexadecimal](conversions/decimal_to_hexadecimal.py) * [Decimal To Octal](conversions/decimal_to_octal.py) * [Energy Conversions](conversions/energy_conversions.py) * [Excel Title To Column](conversions/excel_title_to_column.py) * [Hex To Bin](conversions/hex_to_bin.py) * [Hexadecimal To Decimal](conversions/hexadecimal_to_decimal.py) * [Length Conversion](conversions/length_conversion.py) * [Molecular Chemistry](conversions/molecular_chemistry.py) * [Octal To Decimal](conversions/octal_to_decimal.py) * [Prefix Conversions](conversions/prefix_conversions.py) * [Prefix Conversions String](conversions/prefix_conversions_string.py) * [Pressure Conversions](conversions/pressure_conversions.py) * [Rgb Hsv Conversion](conversions/rgb_hsv_conversion.py) * [Roman Numerals](conversions/roman_numerals.py) * [Speed Conversions](conversions/speed_conversions.py) * [Temperature Conversions](conversions/temperature_conversions.py) * [Volume Conversions](conversions/volume_conversions.py) * [Weight Conversion](conversions/weight_conversion.py) ## Data Structures * Arrays * [Permutations](data_structures/arrays/permutations.py) * [Prefix Sum](data_structures/arrays/prefix_sum.py) * [Product Sum](data_structures/arrays/product_sum.py) * Binary Tree * [Avl Tree](data_structures/binary_tree/avl_tree.py) * [Basic Binary Tree](data_structures/binary_tree/basic_binary_tree.py) * [Binary Search Tree](data_structures/binary_tree/binary_search_tree.py) * [Binary Search Tree Recursive](data_structures/binary_tree/binary_search_tree_recursive.py) * [Binary Tree Mirror](data_structures/binary_tree/binary_tree_mirror.py) * [Binary Tree Node Sum](data_structures/binary_tree/binary_tree_node_sum.py) * [Binary Tree Path Sum](data_structures/binary_tree/binary_tree_path_sum.py) * [Binary Tree Traversals](data_structures/binary_tree/binary_tree_traversals.py) * [Diff Views Of Binary Tree](data_structures/binary_tree/diff_views_of_binary_tree.py) * [Distribute Coins](data_structures/binary_tree/distribute_coins.py) * [Fenwick Tree](data_structures/binary_tree/fenwick_tree.py) * [Inorder Tree Traversal 2022](data_structures/binary_tree/inorder_tree_traversal_2022.py) * [Is Bst](data_structures/binary_tree/is_bst.py) * [Lazy Segment Tree](data_structures/binary_tree/lazy_segment_tree.py) * [Lowest Common Ancestor](data_structures/binary_tree/lowest_common_ancestor.py) * [Maximum Fenwick Tree](data_structures/binary_tree/maximum_fenwick_tree.py) * [Merge Two Binary Trees](data_structures/binary_tree/merge_two_binary_trees.py) * [Non Recursive Segment Tree](data_structures/binary_tree/non_recursive_segment_tree.py) * [Number Of Possible Binary Trees](data_structures/binary_tree/number_of_possible_binary_trees.py) * [Red Black Tree](data_structures/binary_tree/red_black_tree.py) * [Segment Tree](data_structures/binary_tree/segment_tree.py) * [Segment Tree Other](data_structures/binary_tree/segment_tree_other.py) * [Treap](data_structures/binary_tree/treap.py) * [Wavelet Tree](data_structures/binary_tree/wavelet_tree.py) * Disjoint Set * [Alternate Disjoint Set](data_structures/disjoint_set/alternate_disjoint_set.py) * [Disjoint Set](data_structures/disjoint_set/disjoint_set.py) * Hashing * [Bloom Filter](data_structures/hashing/bloom_filter.py) * [Double Hash](data_structures/hashing/double_hash.py) * [Hash Map](data_structures/hashing/hash_map.py) * [Hash Table](data_structures/hashing/hash_table.py) * [Hash Table With Linked List](data_structures/hashing/hash_table_with_linked_list.py) * Number Theory * [Prime Numbers](data_structures/hashing/number_theory/prime_numbers.py) * [Quadratic Probing](data_structures/hashing/quadratic_probing.py) * Tests * [Test Hash Map](data_structures/hashing/tests/test_hash_map.py) * Heap * [Binomial Heap](data_structures/heap/binomial_heap.py) * [Heap](data_structures/heap/heap.py) * [Heap Generic](data_structures/heap/heap_generic.py) * [Max Heap](data_structures/heap/max_heap.py) * [Min Heap](data_structures/heap/min_heap.py) * [Randomized Heap](data_structures/heap/randomized_heap.py) * [Skew Heap](data_structures/heap/skew_heap.py) * Linked List * [Circular Linked List](data_structures/linked_list/circular_linked_list.py) * [Deque Doubly](data_structures/linked_list/deque_doubly.py) * [Doubly Linked List](data_structures/linked_list/doubly_linked_list.py) * [Doubly Linked List Two](data_structures/linked_list/doubly_linked_list_two.py) * [From Sequence](data_structures/linked_list/from_sequence.py) * [Has Loop](data_structures/linked_list/has_loop.py) * [Is Palindrome](data_structures/linked_list/is_palindrome.py) * [Merge Two Lists](data_structures/linked_list/merge_two_lists.py) * [Middle Element Of Linked List](data_structures/linked_list/middle_element_of_linked_list.py) * [Print Reverse](data_structures/linked_list/print_reverse.py) * [Singly Linked List](data_structures/linked_list/singly_linked_list.py) * [Skip List](data_structures/linked_list/skip_list.py) * [Swap Nodes](data_structures/linked_list/swap_nodes.py) * Queue * [Circular Queue](data_structures/queue/circular_queue.py) * [Circular Queue Linked List](data_structures/queue/circular_queue_linked_list.py) * [Double Ended Queue](data_structures/queue/double_ended_queue.py) * [Linked Queue](data_structures/queue/linked_queue.py) * [Priority Queue Using List](data_structures/queue/priority_queue_using_list.py) * [Queue By List](data_structures/queue/queue_by_list.py) * [Queue By Two Stacks](data_structures/queue/queue_by_two_stacks.py) * [Queue On Pseudo Stack](data_structures/queue/queue_on_pseudo_stack.py) * Stacks * [Balanced Parentheses](data_structures/stacks/balanced_parentheses.py) * [Dijkstras Two Stack Algorithm](data_structures/stacks/dijkstras_two_stack_algorithm.py) * [Evaluate Postfix Notations](data_structures/stacks/evaluate_postfix_notations.py) * [Infix To Postfix Conversion](data_structures/stacks/infix_to_postfix_conversion.py) * [Infix To Prefix Conversion](data_structures/stacks/infix_to_prefix_conversion.py) * [Next Greater Element](data_structures/stacks/next_greater_element.py) * [Postfix Evaluation](data_structures/stacks/postfix_evaluation.py) * [Prefix Evaluation](data_structures/stacks/prefix_evaluation.py) * [Stack](data_structures/stacks/stack.py) * [Stack With Doubly Linked List](data_structures/stacks/stack_with_doubly_linked_list.py) * [Stack With Singly Linked List](data_structures/stacks/stack_with_singly_linked_list.py) * [Stock Span Problem](data_structures/stacks/stock_span_problem.py) * Trie * [Radix Tree](data_structures/trie/radix_tree.py) * [Trie](data_structures/trie/trie.py) ## Digital Image Processing * [Change Brightness](digital_image_processing/change_brightness.py) * [Change Contrast](digital_image_processing/change_contrast.py) * [Convert To Negative](digital_image_processing/convert_to_negative.py) * Dithering * [Burkes](digital_image_processing/dithering/burkes.py) * Edge Detection * [Canny](digital_image_processing/edge_detection/canny.py) * Filters * [Bilateral Filter](digital_image_processing/filters/bilateral_filter.py) * [Convolve](digital_image_processing/filters/convolve.py) * [Gabor Filter](digital_image_processing/filters/gabor_filter.py) * [Gaussian Filter](digital_image_processing/filters/gaussian_filter.py) * [Local Binary Pattern](digital_image_processing/filters/local_binary_pattern.py) * [Median Filter](digital_image_processing/filters/median_filter.py) * [Sobel Filter](digital_image_processing/filters/sobel_filter.py) * Histogram Equalization * [Histogram Stretch](digital_image_processing/histogram_equalization/histogram_stretch.py) * [Index Calculation](digital_image_processing/index_calculation.py) * Morphological Operations * [Dilation Operation](digital_image_processing/morphological_operations/dilation_operation.py) * [Erosion Operation](digital_image_processing/morphological_operations/erosion_operation.py) * Resize * [Resize](digital_image_processing/resize/resize.py) * Rotation * [Rotation](digital_image_processing/rotation/rotation.py) * [Sepia](digital_image_processing/sepia.py) * [Test Digital Image Processing](digital_image_processing/test_digital_image_processing.py) ## Divide And Conquer * [Closest Pair Of Points](divide_and_conquer/closest_pair_of_points.py) * [Convex Hull](divide_and_conquer/convex_hull.py) * [Heaps Algorithm](divide_and_conquer/heaps_algorithm.py) * [Heaps Algorithm Iterative](divide_and_conquer/heaps_algorithm_iterative.py) * [Inversions](divide_and_conquer/inversions.py) * [Kth Order Statistic](divide_and_conquer/kth_order_statistic.py) * [Max Difference Pair](divide_and_conquer/max_difference_pair.py) * [Max Subarray](divide_and_conquer/max_subarray.py) * [Mergesort](divide_and_conquer/mergesort.py) * [Peak](divide_and_conquer/peak.py) * [Power](divide_and_conquer/power.py) * [Strassen Matrix Multiplication](divide_and_conquer/strassen_matrix_multiplication.py) ## Dynamic Programming * [Abbreviation](dynamic_programming/abbreviation.py) * [All Construct](dynamic_programming/all_construct.py) * [Bitmask](dynamic_programming/bitmask.py) * [Catalan Numbers](dynamic_programming/catalan_numbers.py) * [Climbing Stairs](dynamic_programming/climbing_stairs.py) * [Combination Sum Iv](dynamic_programming/combination_sum_iv.py) * [Edit Distance](dynamic_programming/edit_distance.py) * [Factorial](dynamic_programming/factorial.py) * [Fast Fibonacci](dynamic_programming/fast_fibonacci.py) * [Fibonacci](dynamic_programming/fibonacci.py) * [Fizz Buzz](dynamic_programming/fizz_buzz.py) * [Floyd Warshall](dynamic_programming/floyd_warshall.py) * [Integer Partition](dynamic_programming/integer_partition.py) * [Iterating Through Submasks](dynamic_programming/iterating_through_submasks.py) * [K Means Clustering Tensorflow](dynamic_programming/k_means_clustering_tensorflow.py) * [Knapsack](dynamic_programming/knapsack.py) * [Longest Common Subsequence](dynamic_programming/longest_common_subsequence.py) * [Longest Common Substring](dynamic_programming/longest_common_substring.py) * [Longest Increasing Subsequence](dynamic_programming/longest_increasing_subsequence.py) * [Longest Increasing Subsequence O(Nlogn)](dynamic_programming/longest_increasing_subsequence_o(nlogn).py) * [Longest Sub Array](dynamic_programming/longest_sub_array.py) * [Matrix Chain Order](dynamic_programming/matrix_chain_order.py) * [Max Non Adjacent Sum](dynamic_programming/max_non_adjacent_sum.py) * [Max Product Subarray](dynamic_programming/max_product_subarray.py) * [Max Subarray Sum](dynamic_programming/max_subarray_sum.py) * [Min Distance Up Bottom](dynamic_programming/min_distance_up_bottom.py) * [Minimum Coin Change](dynamic_programming/minimum_coin_change.py) * [Minimum Cost Path](dynamic_programming/minimum_cost_path.py) * [Minimum Partition](dynamic_programming/minimum_partition.py) * [Minimum Size Subarray Sum](dynamic_programming/minimum_size_subarray_sum.py) * [Minimum Squares To Represent A Number](dynamic_programming/minimum_squares_to_represent_a_number.py) * [Minimum Steps To One](dynamic_programming/minimum_steps_to_one.py) * [Minimum Tickets Cost](dynamic_programming/minimum_tickets_cost.py) * [Optimal Binary Search Tree](dynamic_programming/optimal_binary_search_tree.py) * [Palindrome Partitioning](dynamic_programming/palindrome_partitioning.py) * [Regex Match](dynamic_programming/regex_match.py) * [Rod Cutting](dynamic_programming/rod_cutting.py) * [Subset Generation](dynamic_programming/subset_generation.py) * [Sum Of Subset](dynamic_programming/sum_of_subset.py) * [Tribonacci](dynamic_programming/tribonacci.py) * [Viterbi](dynamic_programming/viterbi.py) * [Word Break](dynamic_programming/word_break.py) ## Electronics * [Apparent Power](electronics/apparent_power.py) * [Builtin Voltage](electronics/builtin_voltage.py) * [Carrier Concentration](electronics/carrier_concentration.py) * [Circular Convolution](electronics/circular_convolution.py) * [Coulombs Law](electronics/coulombs_law.py) * [Electric Conductivity](electronics/electric_conductivity.py) * [Electric Power](electronics/electric_power.py) * [Electrical Impedance](electronics/electrical_impedance.py) * [Ind Reactance](electronics/ind_reactance.py) * [Ohms Law](electronics/ohms_law.py) * [Real And Reactive Power](electronics/real_and_reactive_power.py) * [Resistor Equivalence](electronics/resistor_equivalence.py) * [Resonant Frequency](electronics/resonant_frequency.py) ## File Transfer * [Receive File](file_transfer/receive_file.py) * [Send File](file_transfer/send_file.py) * Tests * [Test Send File](file_transfer/tests/test_send_file.py) ## Financial * [Equated Monthly Installments](financial/equated_monthly_installments.py) * [Interest](financial/interest.py) * [Present Value](financial/present_value.py) * [Price Plus Tax](financial/price_plus_tax.py) ## Fractals * [Julia Sets](fractals/julia_sets.py) * [Koch Snowflake](fractals/koch_snowflake.py) * [Mandelbrot](fractals/mandelbrot.py) * [Sierpinski Triangle](fractals/sierpinski_triangle.py) ## Fuzzy Logic * [Fuzzy Operations](fuzzy_logic/fuzzy_operations.py) ## Genetic Algorithm * [Basic String](genetic_algorithm/basic_string.py) ## Geodesy * [Haversine Distance](geodesy/haversine_distance.py) * [Lamberts Ellipsoidal Distance](geodesy/lamberts_ellipsoidal_distance.py) ## Graphics * [Bezier Curve](graphics/bezier_curve.py) * [Vector3 For 2D Rendering](graphics/vector3_for_2d_rendering.py) ## Graphs * [A Star](graphs/a_star.py) * [Articulation Points](graphs/articulation_points.py) * [Basic Graphs](graphs/basic_graphs.py) * [Bellman Ford](graphs/bellman_ford.py) * [Bi Directional Dijkstra](graphs/bi_directional_dijkstra.py) * [Bidirectional A Star](graphs/bidirectional_a_star.py) * [Bidirectional Breadth First Search](graphs/bidirectional_breadth_first_search.py) * [Boruvka](graphs/boruvka.py) * [Breadth First Search](graphs/breadth_first_search.py) * [Breadth First Search 2](graphs/breadth_first_search_2.py) * [Breadth First Search Shortest Path](graphs/breadth_first_search_shortest_path.py) * [Breadth First Search Shortest Path 2](graphs/breadth_first_search_shortest_path_2.py) * [Breadth First Search Zero One Shortest Path](graphs/breadth_first_search_zero_one_shortest_path.py) * [Check Bipartite Graph Bfs](graphs/check_bipartite_graph_bfs.py) * [Check Bipartite Graph Dfs](graphs/check_bipartite_graph_dfs.py) * [Check Cycle](graphs/check_cycle.py) * [Connected Components](graphs/connected_components.py) * [Depth First Search](graphs/depth_first_search.py) * [Depth First Search 2](graphs/depth_first_search_2.py) * [Dijkstra](graphs/dijkstra.py) * [Dijkstra 2](graphs/dijkstra_2.py) * [Dijkstra Algorithm](graphs/dijkstra_algorithm.py) * [Dijkstra Alternate](graphs/dijkstra_alternate.py) * [Dijkstra Binary Grid](graphs/dijkstra_binary_grid.py) * [Dinic](graphs/dinic.py) * [Directed And Undirected (Weighted) Graph](graphs/directed_and_undirected_(weighted)_graph.py) * [Edmonds Karp Multiple Source And Sink](graphs/edmonds_karp_multiple_source_and_sink.py) * [Eulerian Path And Circuit For Undirected Graph](graphs/eulerian_path_and_circuit_for_undirected_graph.py) * [Even Tree](graphs/even_tree.py) * [Finding Bridges](graphs/finding_bridges.py) * [Frequent Pattern Graph Miner](graphs/frequent_pattern_graph_miner.py) * [G Topological Sort](graphs/g_topological_sort.py) * [Gale Shapley Bigraph](graphs/gale_shapley_bigraph.py) * [Graph Adjacency List](graphs/graph_adjacency_list.py) * [Graph Adjacency Matrix](graphs/graph_adjacency_matrix.py) * [Graph List](graphs/graph_list.py) * [Graphs Floyd Warshall](graphs/graphs_floyd_warshall.py) * [Greedy Best First](graphs/greedy_best_first.py) * [Greedy Min Vertex Cover](graphs/greedy_min_vertex_cover.py) * [Kahns Algorithm Long](graphs/kahns_algorithm_long.py) * [Kahns Algorithm Topo](graphs/kahns_algorithm_topo.py) * [Karger](graphs/karger.py) * [Markov Chain](graphs/markov_chain.py) * [Matching Min Vertex Cover](graphs/matching_min_vertex_cover.py) * [Minimum Path Sum](graphs/minimum_path_sum.py) * [Minimum Spanning Tree Boruvka](graphs/minimum_spanning_tree_boruvka.py) * [Minimum Spanning Tree Kruskal](graphs/minimum_spanning_tree_kruskal.py) * [Minimum Spanning Tree Kruskal2](graphs/minimum_spanning_tree_kruskal2.py) * [Minimum Spanning Tree Prims](graphs/minimum_spanning_tree_prims.py) * [Minimum Spanning Tree Prims2](graphs/minimum_spanning_tree_prims2.py) * [Multi Heuristic Astar](graphs/multi_heuristic_astar.py) * [Page Rank](graphs/page_rank.py) * [Prim](graphs/prim.py) * [Random Graph Generator](graphs/random_graph_generator.py) * [Scc Kosaraju](graphs/scc_kosaraju.py) * [Strongly Connected Components](graphs/strongly_connected_components.py) * [Tarjans Scc](graphs/tarjans_scc.py) * Tests * [Test Min Spanning Tree Kruskal](graphs/tests/test_min_spanning_tree_kruskal.py) * [Test Min Spanning Tree Prim](graphs/tests/test_min_spanning_tree_prim.py) ## Greedy Methods * [Fractional Knapsack](greedy_methods/fractional_knapsack.py) * [Fractional Knapsack 2](greedy_methods/fractional_knapsack_2.py) * [Minimum Waiting Time](greedy_methods/minimum_waiting_time.py) * [Optimal Merge Pattern](greedy_methods/optimal_merge_pattern.py) ## Hashes * [Adler32](hashes/adler32.py) * [Chaos Machine](hashes/chaos_machine.py) * [Djb2](hashes/djb2.py) * [Elf](hashes/elf.py) * [Enigma Machine](hashes/enigma_machine.py) * [Hamming Code](hashes/hamming_code.py) * [Luhn](hashes/luhn.py) * [Md5](hashes/md5.py) * [Sdbm](hashes/sdbm.py) * [Sha1](hashes/sha1.py) * [Sha256](hashes/sha256.py) ## Knapsack * [Greedy Knapsack](knapsack/greedy_knapsack.py) * [Knapsack](knapsack/knapsack.py) * [Recursive Approach Knapsack](knapsack/recursive_approach_knapsack.py) * Tests * [Test Greedy Knapsack](knapsack/tests/test_greedy_knapsack.py) * [Test Knapsack](knapsack/tests/test_knapsack.py) ## Linear Algebra * Src * [Conjugate Gradient](linear_algebra/src/conjugate_gradient.py) * [Lib](linear_algebra/src/lib.py) * [Polynom For Points](linear_algebra/src/polynom_for_points.py) * [Power Iteration](linear_algebra/src/power_iteration.py) * [Rank Of Matrix](linear_algebra/src/rank_of_matrix.py) * [Rayleigh Quotient](linear_algebra/src/rayleigh_quotient.py) * [Schur Complement](linear_algebra/src/schur_complement.py) * [Test Linear Algebra](linear_algebra/src/test_linear_algebra.py) * [Transformations 2D](linear_algebra/src/transformations_2d.py) ## Linear Programming * [Simplex](linear_programming/simplex.py) ## Machine Learning * [Astar](machine_learning/astar.py) * [Data Transformations](machine_learning/data_transformations.py) * [Decision Tree](machine_learning/decision_tree.py) * [Dimensionality Reduction](machine_learning/dimensionality_reduction.py) * Forecasting * [Run](machine_learning/forecasting/run.py) * [Gradient Descent](machine_learning/gradient_descent.py) * [K Means Clust](machine_learning/k_means_clust.py) * [K Nearest Neighbours](machine_learning/k_nearest_neighbours.py) * [Knn Sklearn](machine_learning/knn_sklearn.py) * [Linear Discriminant Analysis](machine_learning/linear_discriminant_analysis.py) * [Linear Regression](machine_learning/linear_regression.py) * Local Weighted Learning * [Local Weighted Learning](machine_learning/local_weighted_learning/local_weighted_learning.py) * [Logistic Regression](machine_learning/logistic_regression.py) * Lstm * [Lstm Prediction](machine_learning/lstm/lstm_prediction.py) * [Multilayer Perceptron Classifier](machine_learning/multilayer_perceptron_classifier.py) * [Polynomial Regression](machine_learning/polynomial_regression.py) * [Scoring Functions](machine_learning/scoring_functions.py) * [Self Organizing Map](machine_learning/self_organizing_map.py) * [Sequential Minimum Optimization](machine_learning/sequential_minimum_optimization.py) * [Similarity Search](machine_learning/similarity_search.py) * [Support Vector Machines](machine_learning/support_vector_machines.py) * [Word Frequency Functions](machine_learning/word_frequency_functions.py) * [Xgboost Classifier](machine_learning/xgboost_classifier.py) * [Xgboost Regressor](machine_learning/xgboost_regressor.py) ## Maths * [Abs](maths/abs.py) * [Add](maths/add.py) * [Addition Without Arithmetic](maths/addition_without_arithmetic.py) * [Aliquot Sum](maths/aliquot_sum.py) * [Allocation Number](maths/allocation_number.py) * [Arc Length](maths/arc_length.py) * [Area](maths/area.py) * [Area Under Curve](maths/area_under_curve.py) * [Armstrong Numbers](maths/armstrong_numbers.py) * [Automorphic Number](maths/automorphic_number.py) * [Average Absolute Deviation](maths/average_absolute_deviation.py) * [Average Mean](maths/average_mean.py) * [Average Median](maths/average_median.py) * [Average Mode](maths/average_mode.py) * [Bailey Borwein Plouffe](maths/bailey_borwein_plouffe.py) * [Basic Maths](maths/basic_maths.py) * [Binary Exp Mod](maths/binary_exp_mod.py) * [Binary Exponentiation](maths/binary_exponentiation.py) * [Binary Exponentiation 2](maths/binary_exponentiation_2.py) * [Binary Exponentiation 3](maths/binary_exponentiation_3.py) * [Binomial Coefficient](maths/binomial_coefficient.py) * [Binomial Distribution](maths/binomial_distribution.py) * [Bisection](maths/bisection.py) * [Carmichael Number](maths/carmichael_number.py) * [Catalan Number](maths/catalan_number.py) * [Ceil](maths/ceil.py) * [Check Polygon](maths/check_polygon.py) * [Chudnovsky Algorithm](maths/chudnovsky_algorithm.py) * [Collatz Sequence](maths/collatz_sequence.py) * [Combinations](maths/combinations.py) * [Decimal Isolate](maths/decimal_isolate.py) * [Decimal To Fraction](maths/decimal_to_fraction.py) * [Dodecahedron](maths/dodecahedron.py) * [Double Factorial Iterative](maths/double_factorial_iterative.py) * [Double Factorial Recursive](maths/double_factorial_recursive.py) * [Dual Number Automatic Differentiation](maths/dual_number_automatic_differentiation.py) * [Entropy](maths/entropy.py) * [Euclidean Distance](maths/euclidean_distance.py) * [Euclidean Gcd](maths/euclidean_gcd.py) * [Euler Method](maths/euler_method.py) * [Euler Modified](maths/euler_modified.py) * [Eulers Totient](maths/eulers_totient.py) * [Extended Euclidean Algorithm](maths/extended_euclidean_algorithm.py) * [Factorial](maths/factorial.py) * [Factors](maths/factors.py) * [Fermat Little Theorem](maths/fermat_little_theorem.py) * [Fibonacci](maths/fibonacci.py) * [Find Max](maths/find_max.py) * [Find Max Recursion](maths/find_max_recursion.py) * [Find Min](maths/find_min.py) * [Find Min Recursion](maths/find_min_recursion.py) * [Floor](maths/floor.py) * [Gamma](maths/gamma.py) * [Gamma Recursive](maths/gamma_recursive.py) * [Gaussian](maths/gaussian.py) * [Gaussian Error Linear Unit](maths/gaussian_error_linear_unit.py) * [Gcd Of N Numbers](maths/gcd_of_n_numbers.py) * [Greatest Common Divisor](maths/greatest_common_divisor.py) * [Greedy Coin Change](maths/greedy_coin_change.py) * [Hamming Numbers](maths/hamming_numbers.py) * [Hardy Ramanujanalgo](maths/hardy_ramanujanalgo.py) * [Hexagonal Number](maths/hexagonal_number.py) * [Integration By Simpson Approx](maths/integration_by_simpson_approx.py) * [Interquartile Range](maths/interquartile_range.py) * [Is Int Palindrome](maths/is_int_palindrome.py) * [Is Ip V4 Address Valid](maths/is_ip_v4_address_valid.py) * [Is Square Free](maths/is_square_free.py) * [Jaccard Similarity](maths/jaccard_similarity.py) * [Juggler Sequence](maths/juggler_sequence.py) * [Karatsuba](maths/karatsuba.py) * [Krishnamurthy Number](maths/krishnamurthy_number.py) * [Kth Lexicographic Permutation](maths/kth_lexicographic_permutation.py) * [Largest Of Very Large Numbers](maths/largest_of_very_large_numbers.py) * [Least Common Multiple](maths/least_common_multiple.py) * [Line Length](maths/line_length.py) * [Liouville Lambda](maths/liouville_lambda.py) * [Lucas Lehmer Primality Test](maths/lucas_lehmer_primality_test.py) * [Lucas Series](maths/lucas_series.py) * [Maclaurin Series](maths/maclaurin_series.py) * [Manhattan Distance](maths/manhattan_distance.py) * [Matrix Exponentiation](maths/matrix_exponentiation.py) * [Max Sum Sliding Window](maths/max_sum_sliding_window.py) * [Median Of Two Arrays](maths/median_of_two_arrays.py) * [Miller Rabin](maths/miller_rabin.py) * [Mobius Function](maths/mobius_function.py) * [Modular Exponential](maths/modular_exponential.py) * [Monte Carlo](maths/monte_carlo.py) * [Monte Carlo Dice](maths/monte_carlo_dice.py) * [Nevilles Method](maths/nevilles_method.py) * [Newton Raphson](maths/newton_raphson.py) * [Number Of Digits](maths/number_of_digits.py) * [Numerical Integration](maths/numerical_integration.py) * [Odd Sieve](maths/odd_sieve.py) * [Perfect Cube](maths/perfect_cube.py) * [Perfect Number](maths/perfect_number.py) * [Perfect Square](maths/perfect_square.py) * [Persistence](maths/persistence.py) * [Pi Generator](maths/pi_generator.py) * [Pi Monte Carlo Estimation](maths/pi_monte_carlo_estimation.py) * [Points Are Collinear 3D](maths/points_are_collinear_3d.py) * [Pollard Rho](maths/pollard_rho.py) * [Polynomial Evaluation](maths/polynomial_evaluation.py) * Polynomials * [Single Indeterminate Operations](maths/polynomials/single_indeterminate_operations.py) * [Power Using Recursion](maths/power_using_recursion.py) * [Prime Check](maths/prime_check.py) * [Prime Factors](maths/prime_factors.py) * [Prime Numbers](maths/prime_numbers.py) * [Prime Sieve Eratosthenes](maths/prime_sieve_eratosthenes.py) * [Primelib](maths/primelib.py) * [Print Multiplication Table](maths/print_multiplication_table.py) * [Pronic Number](maths/pronic_number.py) * [Proth Number](maths/proth_number.py) * [Pythagoras](maths/pythagoras.py) * [Qr Decomposition](maths/qr_decomposition.py) * [Quadratic Equations Complex Numbers](maths/quadratic_equations_complex_numbers.py) * [Radians](maths/radians.py) * [Radix2 Fft](maths/radix2_fft.py) * [Relu](maths/relu.py) * [Remove Digit](maths/remove_digit.py) * [Runge Kutta](maths/runge_kutta.py) * [Segmented Sieve](maths/segmented_sieve.py) * Series * [Arithmetic](maths/series/arithmetic.py) * [Geometric](maths/series/geometric.py) * [Geometric Series](maths/series/geometric_series.py) * [Harmonic](maths/series/harmonic.py) * [Harmonic Series](maths/series/harmonic_series.py) * [Hexagonal Numbers](maths/series/hexagonal_numbers.py) * [P Series](maths/series/p_series.py) * [Sieve Of Eratosthenes](maths/sieve_of_eratosthenes.py) * [Sigmoid](maths/sigmoid.py) * [Sigmoid Linear Unit](maths/sigmoid_linear_unit.py) * [Signum](maths/signum.py) * [Simpson Rule](maths/simpson_rule.py) * [Simultaneous Linear Equation Solver](maths/simultaneous_linear_equation_solver.py) * [Sin](maths/sin.py) * [Sock Merchant](maths/sock_merchant.py) * [Softmax](maths/softmax.py) * [Square Root](maths/square_root.py) * [Sum Of Arithmetic Series](maths/sum_of_arithmetic_series.py) * [Sum Of Digits](maths/sum_of_digits.py) * [Sum Of Geometric Progression](maths/sum_of_geometric_progression.py) * [Sum Of Harmonic Series](maths/sum_of_harmonic_series.py) * [Sumset](maths/sumset.py) * [Sylvester Sequence](maths/sylvester_sequence.py) * [Tanh](maths/tanh.py) * [Test Prime Check](maths/test_prime_check.py) * [Trapezoidal Rule](maths/trapezoidal_rule.py) * [Triplet Sum](maths/triplet_sum.py) * [Twin Prime](maths/twin_prime.py) * [Two Pointer](maths/two_pointer.py) * [Two Sum](maths/two_sum.py) * [Ugly Numbers](maths/ugly_numbers.py) * [Volume](maths/volume.py) * [Weird Number](maths/weird_number.py) * [Zellers Congruence](maths/zellers_congruence.py) ## Matrix * [Binary Search Matrix](matrix/binary_search_matrix.py) * [Count Islands In Matrix](matrix/count_islands_in_matrix.py) * [Count Negative Numbers In Sorted Matrix](matrix/count_negative_numbers_in_sorted_matrix.py) * [Count Paths](matrix/count_paths.py) * [Cramers Rule 2X2](matrix/cramers_rule_2x2.py) * [Inverse Of Matrix](matrix/inverse_of_matrix.py) * [Largest Square Area In Matrix](matrix/largest_square_area_in_matrix.py) * [Matrix Class](matrix/matrix_class.py) * [Matrix Operation](matrix/matrix_operation.py) * [Max Area Of Island](matrix/max_area_of_island.py) * [Nth Fibonacci Using Matrix Exponentiation](matrix/nth_fibonacci_using_matrix_exponentiation.py) * [Pascal Triangle](matrix/pascal_triangle.py) * [Rotate Matrix](matrix/rotate_matrix.py) * [Searching In Sorted Matrix](matrix/searching_in_sorted_matrix.py) * [Sherman Morrison](matrix/sherman_morrison.py) * [Spiral Print](matrix/spiral_print.py) * Tests * [Test Matrix Operation](matrix/tests/test_matrix_operation.py) ## Networking Flow * [Ford Fulkerson](networking_flow/ford_fulkerson.py) * [Minimum Cut](networking_flow/minimum_cut.py) ## Neural Network * [2 Hidden Layers Neural Network](neural_network/2_hidden_layers_neural_network.py) * Activation Functions * [Exponential Linear Unit](neural_network/activation_functions/exponential_linear_unit.py) * [Back Propagation Neural Network](neural_network/back_propagation_neural_network.py) * [Convolution Neural Network](neural_network/convolution_neural_network.py) * [Perceptron](neural_network/perceptron.py) * [Simple Neural Network](neural_network/simple_neural_network.py) ## Other * [Activity Selection](other/activity_selection.py) * [Alternative List Arrange](other/alternative_list_arrange.py) * [Davisb Putnamb Logemannb Loveland](other/davisb_putnamb_logemannb_loveland.py) * [Dijkstra Bankers Algorithm](other/dijkstra_bankers_algorithm.py) * [Doomsday](other/doomsday.py) * [Fischer Yates Shuffle](other/fischer_yates_shuffle.py) * [Gauss Easter](other/gauss_easter.py) * [Graham Scan](other/graham_scan.py) * [Greedy](other/greedy.py) * [Guess The Number Search](other/guess_the_number_search.py) * [H Index](other/h_index.py) * [Least Recently Used](other/least_recently_used.py) * [Lfu Cache](other/lfu_cache.py) * [Linear Congruential Generator](other/linear_congruential_generator.py) * [Lru Cache](other/lru_cache.py) * [Magicdiamondpattern](other/magicdiamondpattern.py) * [Maximum Subsequence](other/maximum_subsequence.py) * [Nested Brackets](other/nested_brackets.py) * [Number Container System](other/number_container_system.py) * [Password](other/password.py) * [Quine](other/quine.py) * [Scoring Algorithm](other/scoring_algorithm.py) * [Sdes](other/sdes.py) * [Tower Of Hanoi](other/tower_of_hanoi.py) ## Physics * [Altitude Pressure](physics/altitude_pressure.py) * [Archimedes Principle](physics/archimedes_principle.py) * [Basic Orbital Capture](physics/basic_orbital_capture.py) * [Casimir Effect](physics/casimir_effect.py) * [Centripetal Force](physics/centripetal_force.py) * [Grahams Law](physics/grahams_law.py) * [Horizontal Projectile Motion](physics/horizontal_projectile_motion.py) * [Hubble Parameter](physics/hubble_parameter.py) * [Ideal Gas Law](physics/ideal_gas_law.py) * [Kinetic Energy](physics/kinetic_energy.py) * [Lorentz Transformation Four Vector](physics/lorentz_transformation_four_vector.py) * [Malus Law](physics/malus_law.py) * [N Body Simulation](physics/n_body_simulation.py) * [Newtons Law Of Gravitation](physics/newtons_law_of_gravitation.py) * [Newtons Second Law Of Motion](physics/newtons_second_law_of_motion.py) * [Potential Energy](physics/potential_energy.py) * [Rms Speed Of Molecule](physics/rms_speed_of_molecule.py) * [Shear Stress](physics/shear_stress.py) * [Speed Of Sound](physics/speed_of_sound.py) ## Project Euler * Problem 001 * [Sol1](project_euler/problem_001/sol1.py) * [Sol2](project_euler/problem_001/sol2.py) * [Sol3](project_euler/problem_001/sol3.py) * [Sol4](project_euler/problem_001/sol4.py) * [Sol5](project_euler/problem_001/sol5.py) * [Sol6](project_euler/problem_001/sol6.py) * [Sol7](project_euler/problem_001/sol7.py) * Problem 002 * [Sol1](project_euler/problem_002/sol1.py) * [Sol2](project_euler/problem_002/sol2.py) * [Sol3](project_euler/problem_002/sol3.py) * [Sol4](project_euler/problem_002/sol4.py) * [Sol5](project_euler/problem_002/sol5.py) * Problem 003 * [Sol1](project_euler/problem_003/sol1.py) * [Sol2](project_euler/problem_003/sol2.py) * [Sol3](project_euler/problem_003/sol3.py) * Problem 004 * [Sol1](project_euler/problem_004/sol1.py) * [Sol2](project_euler/problem_004/sol2.py) * Problem 005 * [Sol1](project_euler/problem_005/sol1.py) * [Sol2](project_euler/problem_005/sol2.py) * Problem 006 * [Sol1](project_euler/problem_006/sol1.py) * [Sol2](project_euler/problem_006/sol2.py) * [Sol3](project_euler/problem_006/sol3.py) * [Sol4](project_euler/problem_006/sol4.py) * Problem 007 * [Sol1](project_euler/problem_007/sol1.py) * [Sol2](project_euler/problem_007/sol2.py) * [Sol3](project_euler/problem_007/sol3.py) * Problem 008 * [Sol1](project_euler/problem_008/sol1.py) * [Sol2](project_euler/problem_008/sol2.py) * [Sol3](project_euler/problem_008/sol3.py) * Problem 009 * [Sol1](project_euler/problem_009/sol1.py) * [Sol2](project_euler/problem_009/sol2.py) * [Sol3](project_euler/problem_009/sol3.py) * Problem 010 * [Sol1](project_euler/problem_010/sol1.py) * [Sol2](project_euler/problem_010/sol2.py) * [Sol3](project_euler/problem_010/sol3.py) * Problem 011 * [Sol1](project_euler/problem_011/sol1.py) * [Sol2](project_euler/problem_011/sol2.py) * Problem 012 * [Sol1](project_euler/problem_012/sol1.py) * [Sol2](project_euler/problem_012/sol2.py) * Problem 013 * [Sol1](project_euler/problem_013/sol1.py) * Problem 014 * [Sol1](project_euler/problem_014/sol1.py) * [Sol2](project_euler/problem_014/sol2.py) * Problem 015 * [Sol1](project_euler/problem_015/sol1.py) * Problem 016 * [Sol1](project_euler/problem_016/sol1.py) * [Sol2](project_euler/problem_016/sol2.py) * Problem 017 * [Sol1](project_euler/problem_017/sol1.py) * Problem 018 * [Solution](project_euler/problem_018/solution.py) * Problem 019 * [Sol1](project_euler/problem_019/sol1.py) * Problem 020 * [Sol1](project_euler/problem_020/sol1.py) * [Sol2](project_euler/problem_020/sol2.py) * [Sol3](project_euler/problem_020/sol3.py) * [Sol4](project_euler/problem_020/sol4.py) * Problem 021 * [Sol1](project_euler/problem_021/sol1.py) * Problem 022 * [Sol1](project_euler/problem_022/sol1.py) * [Sol2](project_euler/problem_022/sol2.py) * Problem 023 * [Sol1](project_euler/problem_023/sol1.py) * Problem 024 * [Sol1](project_euler/problem_024/sol1.py) * Problem 025 * [Sol1](project_euler/problem_025/sol1.py) * [Sol2](project_euler/problem_025/sol2.py) * [Sol3](project_euler/problem_025/sol3.py) * Problem 026 * [Sol1](project_euler/problem_026/sol1.py) * Problem 027 * [Sol1](project_euler/problem_027/sol1.py) * Problem 028 * [Sol1](project_euler/problem_028/sol1.py) * Problem 029 * [Sol1](project_euler/problem_029/sol1.py) * Problem 030 * [Sol1](project_euler/problem_030/sol1.py) * Problem 031 * [Sol1](project_euler/problem_031/sol1.py) * [Sol2](project_euler/problem_031/sol2.py) * Problem 032 * [Sol32](project_euler/problem_032/sol32.py) * Problem 033 * [Sol1](project_euler/problem_033/sol1.py) * Problem 034 * [Sol1](project_euler/problem_034/sol1.py) * Problem 035 * [Sol1](project_euler/problem_035/sol1.py) * Problem 036 * [Sol1](project_euler/problem_036/sol1.py) * Problem 037 * [Sol1](project_euler/problem_037/sol1.py) * Problem 038 * [Sol1](project_euler/problem_038/sol1.py) * Problem 039 * [Sol1](project_euler/problem_039/sol1.py) * Problem 040 * [Sol1](project_euler/problem_040/sol1.py) * Problem 041 * [Sol1](project_euler/problem_041/sol1.py) * Problem 042 * [Solution42](project_euler/problem_042/solution42.py) * Problem 043 * [Sol1](project_euler/problem_043/sol1.py) * Problem 044 * [Sol1](project_euler/problem_044/sol1.py) * Problem 045 * [Sol1](project_euler/problem_045/sol1.py) * Problem 046 * [Sol1](project_euler/problem_046/sol1.py) * Problem 047 * [Sol1](project_euler/problem_047/sol1.py) * Problem 048 * [Sol1](project_euler/problem_048/sol1.py) * Problem 049 * [Sol1](project_euler/problem_049/sol1.py) * Problem 050 * [Sol1](project_euler/problem_050/sol1.py) * Problem 051 * [Sol1](project_euler/problem_051/sol1.py) * Problem 052 * [Sol1](project_euler/problem_052/sol1.py) * Problem 053 * [Sol1](project_euler/problem_053/sol1.py) * Problem 054 * [Sol1](project_euler/problem_054/sol1.py) * [Test Poker Hand](project_euler/problem_054/test_poker_hand.py) * Problem 055 * [Sol1](project_euler/problem_055/sol1.py) * Problem 056 * [Sol1](project_euler/problem_056/sol1.py) * Problem 057 * [Sol1](project_euler/problem_057/sol1.py) * Problem 058 * [Sol1](project_euler/problem_058/sol1.py) * Problem 059 * [Sol1](project_euler/problem_059/sol1.py) * Problem 062 * [Sol1](project_euler/problem_062/sol1.py) * Problem 063 * [Sol1](project_euler/problem_063/sol1.py) * Problem 064 * [Sol1](project_euler/problem_064/sol1.py) * Problem 065 * [Sol1](project_euler/problem_065/sol1.py) * Problem 067 * [Sol1](project_euler/problem_067/sol1.py) * [Sol2](project_euler/problem_067/sol2.py) * Problem 068 * [Sol1](project_euler/problem_068/sol1.py) * Problem 069 * [Sol1](project_euler/problem_069/sol1.py) * Problem 070 * [Sol1](project_euler/problem_070/sol1.py) * Problem 071 * [Sol1](project_euler/problem_071/sol1.py) * Problem 072 * [Sol1](project_euler/problem_072/sol1.py) * [Sol2](project_euler/problem_072/sol2.py) * Problem 073 * [Sol1](project_euler/problem_073/sol1.py) * Problem 074 * [Sol1](project_euler/problem_074/sol1.py) * [Sol2](project_euler/problem_074/sol2.py) * Problem 075 * [Sol1](project_euler/problem_075/sol1.py) * Problem 076 * [Sol1](project_euler/problem_076/sol1.py) * Problem 077 * [Sol1](project_euler/problem_077/sol1.py) * Problem 078 * [Sol1](project_euler/problem_078/sol1.py) * Problem 079 * [Sol1](project_euler/problem_079/sol1.py) * Problem 080 * [Sol1](project_euler/problem_080/sol1.py) * Problem 081 * [Sol1](project_euler/problem_081/sol1.py) * Problem 082 * [Sol1](project_euler/problem_082/sol1.py) * Problem 085 * [Sol1](project_euler/problem_085/sol1.py) * Problem 086 * [Sol1](project_euler/problem_086/sol1.py) * Problem 087 * [Sol1](project_euler/problem_087/sol1.py) * Problem 089 * [Sol1](project_euler/problem_089/sol1.py) * Problem 091 * [Sol1](project_euler/problem_091/sol1.py) * Problem 092 * [Sol1](project_euler/problem_092/sol1.py) * Problem 094 * [Sol1](project_euler/problem_094/sol1.py) * Problem 097 * [Sol1](project_euler/problem_097/sol1.py) * Problem 099 * [Sol1](project_euler/problem_099/sol1.py) * Problem 100 * [Sol1](project_euler/problem_100/sol1.py) * Problem 101 * [Sol1](project_euler/problem_101/sol1.py) * Problem 102 * [Sol1](project_euler/problem_102/sol1.py) * Problem 104 * [Sol1](project_euler/problem_104/sol1.py) * Problem 107 * [Sol1](project_euler/problem_107/sol1.py) * Problem 109 * [Sol1](project_euler/problem_109/sol1.py) * Problem 112 * [Sol1](project_euler/problem_112/sol1.py) * Problem 113 * [Sol1](project_euler/problem_113/sol1.py) * Problem 114 * [Sol1](project_euler/problem_114/sol1.py) * Problem 115 * [Sol1](project_euler/problem_115/sol1.py) * Problem 116 * [Sol1](project_euler/problem_116/sol1.py) * Problem 117 * [Sol1](project_euler/problem_117/sol1.py) * Problem 119 * [Sol1](project_euler/problem_119/sol1.py) * Problem 120 * [Sol1](project_euler/problem_120/sol1.py) * Problem 121 * [Sol1](project_euler/problem_121/sol1.py) * Problem 123 * [Sol1](project_euler/problem_123/sol1.py) * Problem 125 * [Sol1](project_euler/problem_125/sol1.py) * Problem 129 * [Sol1](project_euler/problem_129/sol1.py) * Problem 131 * [Sol1](project_euler/problem_131/sol1.py) * Problem 135 * [Sol1](project_euler/problem_135/sol1.py) * Problem 144 * [Sol1](project_euler/problem_144/sol1.py) * Problem 145 * [Sol1](project_euler/problem_145/sol1.py) * Problem 173 * [Sol1](project_euler/problem_173/sol1.py) * Problem 174 * [Sol1](project_euler/problem_174/sol1.py) * Problem 180 * [Sol1](project_euler/problem_180/sol1.py) * Problem 187 * [Sol1](project_euler/problem_187/sol1.py) * Problem 188 * [Sol1](project_euler/problem_188/sol1.py) * Problem 191 * [Sol1](project_euler/problem_191/sol1.py) * Problem 203 * [Sol1](project_euler/problem_203/sol1.py) * Problem 205 * [Sol1](project_euler/problem_205/sol1.py) * Problem 206 * [Sol1](project_euler/problem_206/sol1.py) * Problem 207 * [Sol1](project_euler/problem_207/sol1.py) * Problem 234 * [Sol1](project_euler/problem_234/sol1.py) * Problem 301 * [Sol1](project_euler/problem_301/sol1.py) * Problem 493 * [Sol1](project_euler/problem_493/sol1.py) * Problem 551 * [Sol1](project_euler/problem_551/sol1.py) * Problem 587 * [Sol1](project_euler/problem_587/sol1.py) * Problem 686 * [Sol1](project_euler/problem_686/sol1.py) * Problem 800 * [Sol1](project_euler/problem_800/sol1.py) ## Quantum * [Bb84](quantum/bb84.py) * [Deutsch Jozsa](quantum/deutsch_jozsa.py) * [Half Adder](quantum/half_adder.py) * [Not Gate](quantum/not_gate.py) * [Q Fourier Transform](quantum/q_fourier_transform.py) * [Q Full Adder](quantum/q_full_adder.py) * [Quantum Entanglement](quantum/quantum_entanglement.py) * [Quantum Teleportation](quantum/quantum_teleportation.py) * [Ripple Adder Classic](quantum/ripple_adder_classic.py) * [Single Qubit Measure](quantum/single_qubit_measure.py) * [Superdense Coding](quantum/superdense_coding.py) ## Scheduling * [First Come First Served](scheduling/first_come_first_served.py) * [Highest Response Ratio Next](scheduling/highest_response_ratio_next.py) * [Job Sequencing With Deadline](scheduling/job_sequencing_with_deadline.py) * [Multi Level Feedback Queue](scheduling/multi_level_feedback_queue.py) * [Non Preemptive Shortest Job First](scheduling/non_preemptive_shortest_job_first.py) * [Round Robin](scheduling/round_robin.py) * [Shortest Job First](scheduling/shortest_job_first.py) ## Searches * [Binary Search](searches/binary_search.py) * [Binary Tree Traversal](searches/binary_tree_traversal.py) * [Double Linear Search](searches/double_linear_search.py) * [Double Linear Search Recursion](searches/double_linear_search_recursion.py) * [Fibonacci Search](searches/fibonacci_search.py) * [Hill Climbing](searches/hill_climbing.py) * [Interpolation Search](searches/interpolation_search.py) * [Jump Search](searches/jump_search.py) * [Linear Search](searches/linear_search.py) * [Quick Select](searches/quick_select.py) * [Sentinel Linear Search](searches/sentinel_linear_search.py) * [Simple Binary Search](searches/simple_binary_search.py) * [Simulated Annealing](searches/simulated_annealing.py) * [Tabu Search](searches/tabu_search.py) * [Ternary Search](searches/ternary_search.py) ## Sorts * [Bead Sort](sorts/bead_sort.py) * [Binary Insertion Sort](sorts/binary_insertion_sort.py) * [Bitonic Sort](sorts/bitonic_sort.py) * [Bogo Sort](sorts/bogo_sort.py) * [Bubble Sort](sorts/bubble_sort.py) * [Bucket Sort](sorts/bucket_sort.py) * [Circle Sort](sorts/circle_sort.py) * [Cocktail Shaker Sort](sorts/cocktail_shaker_sort.py) * [Comb Sort](sorts/comb_sort.py) * [Counting Sort](sorts/counting_sort.py) * [Cycle Sort](sorts/cycle_sort.py) * [Double Sort](sorts/double_sort.py) * [Dutch National Flag Sort](sorts/dutch_national_flag_sort.py) * [Exchange Sort](sorts/exchange_sort.py) * [External Sort](sorts/external_sort.py) * [Gnome Sort](sorts/gnome_sort.py) * [Heap Sort](sorts/heap_sort.py) * [Insertion Sort](sorts/insertion_sort.py) * [Intro Sort](sorts/intro_sort.py) * [Iterative Merge Sort](sorts/iterative_merge_sort.py) * [Merge Insertion Sort](sorts/merge_insertion_sort.py) * [Merge Sort](sorts/merge_sort.py) * [Msd Radix Sort](sorts/msd_radix_sort.py) * [Natural Sort](sorts/natural_sort.py) * [Odd Even Sort](sorts/odd_even_sort.py) * [Odd Even Transposition Parallel](sorts/odd_even_transposition_parallel.py) * [Odd Even Transposition Single Threaded](sorts/odd_even_transposition_single_threaded.py) * [Pancake Sort](sorts/pancake_sort.py) * [Patience Sort](sorts/patience_sort.py) * [Pigeon Sort](sorts/pigeon_sort.py) * [Pigeonhole Sort](sorts/pigeonhole_sort.py) * [Quick Sort](sorts/quick_sort.py) * [Quick Sort 3 Partition](sorts/quick_sort_3_partition.py) * [Radix Sort](sorts/radix_sort.py) * [Random Normal Distribution Quicksort](sorts/random_normal_distribution_quicksort.py) * [Random Pivot Quick Sort](sorts/random_pivot_quick_sort.py) * [Recursive Bubble Sort](sorts/recursive_bubble_sort.py) * [Recursive Insertion Sort](sorts/recursive_insertion_sort.py) * [Recursive Mergesort Array](sorts/recursive_mergesort_array.py) * [Recursive Quick Sort](sorts/recursive_quick_sort.py) * [Selection Sort](sorts/selection_sort.py) * [Shell Sort](sorts/shell_sort.py) * [Shrink Shell Sort](sorts/shrink_shell_sort.py) * [Slowsort](sorts/slowsort.py) * [Stooge Sort](sorts/stooge_sort.py) * [Strand Sort](sorts/strand_sort.py) * [Tim Sort](sorts/tim_sort.py) * [Topological Sort](sorts/topological_sort.py) * [Tree Sort](sorts/tree_sort.py) * [Unknown Sort](sorts/unknown_sort.py) * [Wiggle Sort](sorts/wiggle_sort.py) ## Strings * [Aho Corasick](strings/aho_corasick.py) * [Alternative String Arrange](strings/alternative_string_arrange.py) * [Anagrams](strings/anagrams.py) * [Autocomplete Using Trie](strings/autocomplete_using_trie.py) * [Barcode Validator](strings/barcode_validator.py) * [Boyer Moore Search](strings/boyer_moore_search.py) * [Can String Be Rearranged As Palindrome](strings/can_string_be_rearranged_as_palindrome.py) * [Capitalize](strings/capitalize.py) * [Check Anagrams](strings/check_anagrams.py) * [Credit Card Validator](strings/credit_card_validator.py) * [Detecting English Programmatically](strings/detecting_english_programmatically.py) * [Dna](strings/dna.py) * [Frequency Finder](strings/frequency_finder.py) * [Hamming Distance](strings/hamming_distance.py) * [Indian Phone Validator](strings/indian_phone_validator.py) * [Is Contains Unique Chars](strings/is_contains_unique_chars.py) * [Is Isogram](strings/is_isogram.py) * [Is Pangram](strings/is_pangram.py) * [Is Spain National Id](strings/is_spain_national_id.py) * [Is Srilankan Phone Number](strings/is_srilankan_phone_number.py) * [Is Valid Email Address](strings/is_valid_email_address.py) * [Jaro Winkler](strings/jaro_winkler.py) * [Join](strings/join.py) * [Knuth Morris Pratt](strings/knuth_morris_pratt.py) * [Levenshtein Distance](strings/levenshtein_distance.py) * [Lower](strings/lower.py) * [Manacher](strings/manacher.py) * [Min Cost String Conversion](strings/min_cost_string_conversion.py) * [Naive String Search](strings/naive_string_search.py) * [Ngram](strings/ngram.py) * [Palindrome](strings/palindrome.py) * [Prefix Function](strings/prefix_function.py) * [Rabin Karp](strings/rabin_karp.py) * [Remove Duplicate](strings/remove_duplicate.py) * [Reverse Letters](strings/reverse_letters.py) * [Reverse Long Words](strings/reverse_long_words.py) * [Reverse Words](strings/reverse_words.py) * [Snake Case To Camel Pascal Case](strings/snake_case_to_camel_pascal_case.py) * [Split](strings/split.py) * [String Switch Case](strings/string_switch_case.py) * [Text Justification](strings/text_justification.py) * [Top K Frequent Words](strings/top_k_frequent_words.py) * [Upper](strings/upper.py) * [Wave](strings/wave.py) * [Wildcard Pattern Matching](strings/wildcard_pattern_matching.py) * [Word Occurrence](strings/word_occurrence.py) * [Word Patterns](strings/word_patterns.py) * [Z Function](strings/z_function.py) ## Web Programming * [Co2 Emission](web_programming/co2_emission.py) * [Convert Number To Words](web_programming/convert_number_to_words.py) * [Covid Stats Via Xpath](web_programming/covid_stats_via_xpath.py) * [Crawl Google Results](web_programming/crawl_google_results.py) * [Crawl Google Scholar Citation](web_programming/crawl_google_scholar_citation.py) * [Currency Converter](web_programming/currency_converter.py) * [Current Stock Price](web_programming/current_stock_price.py) * [Current Weather](web_programming/current_weather.py) * [Daily Horoscope](web_programming/daily_horoscope.py) * [Download Images From Google Query](web_programming/download_images_from_google_query.py) * [Emails From Url](web_programming/emails_from_url.py) * [Fetch Bbc News](web_programming/fetch_bbc_news.py) * [Fetch Github Info](web_programming/fetch_github_info.py) * [Fetch Jobs](web_programming/fetch_jobs.py) * [Fetch Quotes](web_programming/fetch_quotes.py) * [Fetch Well Rx Price](web_programming/fetch_well_rx_price.py) * [Get Amazon Product Data](web_programming/get_amazon_product_data.py) * [Get Imdb Top 250 Movies Csv](web_programming/get_imdb_top_250_movies_csv.py) * [Get Imdbtop](web_programming/get_imdbtop.py) * [Get Top Hn Posts](web_programming/get_top_hn_posts.py) * [Get User Tweets](web_programming/get_user_tweets.py) * [Giphy](web_programming/giphy.py) * [Instagram Crawler](web_programming/instagram_crawler.py) * [Instagram Pic](web_programming/instagram_pic.py) * [Instagram Video](web_programming/instagram_video.py) * [Nasa Data](web_programming/nasa_data.py) * [Open Google Results](web_programming/open_google_results.py) * [Random Anime Character](web_programming/random_anime_character.py) * [Recaptcha Verification](web_programming/recaptcha_verification.py) * [Reddit](web_programming/reddit.py) * [Search Books By Isbn](web_programming/search_books_by_isbn.py) * [Slack Message](web_programming/slack_message.py) * [Test Fetch Github Info](web_programming/test_fetch_github_info.py) * [World Covid19 Stats](web_programming/world_covid19_stats.py)
## Arithmetic Analysis * [Bisection](arithmetic_analysis/bisection.py) * [Gaussian Elimination](arithmetic_analysis/gaussian_elimination.py) * [In Static Equilibrium](arithmetic_analysis/in_static_equilibrium.py) * [Intersection](arithmetic_analysis/intersection.py) * [Jacobi Iteration Method](arithmetic_analysis/jacobi_iteration_method.py) * [Lu Decomposition](arithmetic_analysis/lu_decomposition.py) * [Newton Forward Interpolation](arithmetic_analysis/newton_forward_interpolation.py) * [Newton Method](arithmetic_analysis/newton_method.py) * [Newton Raphson](arithmetic_analysis/newton_raphson.py) * [Newton Raphson New](arithmetic_analysis/newton_raphson_new.py) * [Secant Method](arithmetic_analysis/secant_method.py) ## Audio Filters * [Butterworth Filter](audio_filters/butterworth_filter.py) * [Iir Filter](audio_filters/iir_filter.py) * [Show Response](audio_filters/show_response.py) ## Backtracking * [All Combinations](backtracking/all_combinations.py) * [All Permutations](backtracking/all_permutations.py) * [All Subsequences](backtracking/all_subsequences.py) * [Coloring](backtracking/coloring.py) * [Combination Sum](backtracking/combination_sum.py) * [Hamiltonian Cycle](backtracking/hamiltonian_cycle.py) * [Knight Tour](backtracking/knight_tour.py) * [Minimax](backtracking/minimax.py) * [Minmax](backtracking/minmax.py) * [N Queens](backtracking/n_queens.py) * [N Queens Math](backtracking/n_queens_math.py) * [Power Sum](backtracking/power_sum.py) * [Rat In Maze](backtracking/rat_in_maze.py) * [Sudoku](backtracking/sudoku.py) * [Sum Of Subsets](backtracking/sum_of_subsets.py) * [Word Search](backtracking/word_search.py) ## Bit Manipulation * [Binary And Operator](bit_manipulation/binary_and_operator.py) * [Binary Count Setbits](bit_manipulation/binary_count_setbits.py) * [Binary Count Trailing Zeros](bit_manipulation/binary_count_trailing_zeros.py) * [Binary Or Operator](bit_manipulation/binary_or_operator.py) * [Binary Shifts](bit_manipulation/binary_shifts.py) * [Binary Twos Complement](bit_manipulation/binary_twos_complement.py) * [Binary Xor Operator](bit_manipulation/binary_xor_operator.py) * [Count 1S Brian Kernighan Method](bit_manipulation/count_1s_brian_kernighan_method.py) * [Count Number Of One Bits](bit_manipulation/count_number_of_one_bits.py) * [Gray Code Sequence](bit_manipulation/gray_code_sequence.py) * [Highest Set Bit](bit_manipulation/highest_set_bit.py) * [Index Of Rightmost Set Bit](bit_manipulation/index_of_rightmost_set_bit.py) * [Is Even](bit_manipulation/is_even.py) * [Is Power Of Two](bit_manipulation/is_power_of_two.py) * [Numbers Different Signs](bit_manipulation/numbers_different_signs.py) * [Reverse Bits](bit_manipulation/reverse_bits.py) * [Single Bit Manipulation Operations](bit_manipulation/single_bit_manipulation_operations.py) ## Blockchain * [Chinese Remainder Theorem](blockchain/chinese_remainder_theorem.py) * [Diophantine Equation](blockchain/diophantine_equation.py) * [Modular Division](blockchain/modular_division.py) ## Boolean Algebra * [And Gate](boolean_algebra/and_gate.py) * [Nand Gate](boolean_algebra/nand_gate.py) * [Norgate](boolean_algebra/norgate.py) * [Not Gate](boolean_algebra/not_gate.py) * [Or Gate](boolean_algebra/or_gate.py) * [Quine Mc Cluskey](boolean_algebra/quine_mc_cluskey.py) * [Xnor Gate](boolean_algebra/xnor_gate.py) * [Xor Gate](boolean_algebra/xor_gate.py) ## Cellular Automata * [Conways Game Of Life](cellular_automata/conways_game_of_life.py) * [Game Of Life](cellular_automata/game_of_life.py) * [Nagel Schrekenberg](cellular_automata/nagel_schrekenberg.py) * [One Dimensional](cellular_automata/one_dimensional.py) * [Wa Tor](cellular_automata/wa_tor.py) ## Ciphers * [A1Z26](ciphers/a1z26.py) * [Affine Cipher](ciphers/affine_cipher.py) * [Atbash](ciphers/atbash.py) * [Autokey](ciphers/autokey.py) * [Baconian Cipher](ciphers/baconian_cipher.py) * [Base16](ciphers/base16.py) * [Base32](ciphers/base32.py) * [Base64](ciphers/base64.py) * [Base85](ciphers/base85.py) * [Beaufort Cipher](ciphers/beaufort_cipher.py) * [Bifid](ciphers/bifid.py) * [Brute Force Caesar Cipher](ciphers/brute_force_caesar_cipher.py) * [Caesar Cipher](ciphers/caesar_cipher.py) * [Cryptomath Module](ciphers/cryptomath_module.py) * [Decrypt Caesar With Chi Squared](ciphers/decrypt_caesar_with_chi_squared.py) * [Deterministic Miller Rabin](ciphers/deterministic_miller_rabin.py) * [Diffie](ciphers/diffie.py) * [Diffie Hellman](ciphers/diffie_hellman.py) * [Elgamal Key Generator](ciphers/elgamal_key_generator.py) * [Enigma Machine2](ciphers/enigma_machine2.py) * [Hill Cipher](ciphers/hill_cipher.py) * [Mixed Keyword Cypher](ciphers/mixed_keyword_cypher.py) * [Mono Alphabetic Ciphers](ciphers/mono_alphabetic_ciphers.py) * [Morse Code](ciphers/morse_code.py) * [Onepad Cipher](ciphers/onepad_cipher.py) * [Playfair Cipher](ciphers/playfair_cipher.py) * [Polybius](ciphers/polybius.py) * [Porta Cipher](ciphers/porta_cipher.py) * [Rabin Miller](ciphers/rabin_miller.py) * [Rail Fence Cipher](ciphers/rail_fence_cipher.py) * [Rot13](ciphers/rot13.py) * [Rsa Cipher](ciphers/rsa_cipher.py) * [Rsa Factorization](ciphers/rsa_factorization.py) * [Rsa Key Generator](ciphers/rsa_key_generator.py) * [Shuffled Shift Cipher](ciphers/shuffled_shift_cipher.py) * [Simple Keyword Cypher](ciphers/simple_keyword_cypher.py) * [Simple Substitution Cipher](ciphers/simple_substitution_cipher.py) * [Trafid Cipher](ciphers/trafid_cipher.py) * [Transposition Cipher](ciphers/transposition_cipher.py) * [Transposition Cipher Encrypt Decrypt File](ciphers/transposition_cipher_encrypt_decrypt_file.py) * [Vigenere Cipher](ciphers/vigenere_cipher.py) * [Xor Cipher](ciphers/xor_cipher.py) ## Compression * [Burrows Wheeler](compression/burrows_wheeler.py) * [Huffman](compression/huffman.py) * [Lempel Ziv](compression/lempel_ziv.py) * [Lempel Ziv Decompress](compression/lempel_ziv_decompress.py) * [Lz77](compression/lz77.py) * [Peak Signal To Noise Ratio](compression/peak_signal_to_noise_ratio.py) * [Run Length Encoding](compression/run_length_encoding.py) ## Computer Vision * [Cnn Classification](computer_vision/cnn_classification.py) * [Flip Augmentation](computer_vision/flip_augmentation.py) * [Harris Corner](computer_vision/harris_corner.py) * [Horn Schunck](computer_vision/horn_schunck.py) * [Mean Threshold](computer_vision/mean_threshold.py) * [Mosaic Augmentation](computer_vision/mosaic_augmentation.py) * [Pooling Functions](computer_vision/pooling_functions.py) ## Conversions * [Astronomical Length Scale Conversion](conversions/astronomical_length_scale_conversion.py) * [Binary To Decimal](conversions/binary_to_decimal.py) * [Binary To Hexadecimal](conversions/binary_to_hexadecimal.py) * [Binary To Octal](conversions/binary_to_octal.py) * [Decimal To Any](conversions/decimal_to_any.py) * [Decimal To Binary](conversions/decimal_to_binary.py) * [Decimal To Binary Recursion](conversions/decimal_to_binary_recursion.py) * [Decimal To Hexadecimal](conversions/decimal_to_hexadecimal.py) * [Decimal To Octal](conversions/decimal_to_octal.py) * [Energy Conversions](conversions/energy_conversions.py) * [Excel Title To Column](conversions/excel_title_to_column.py) * [Hex To Bin](conversions/hex_to_bin.py) * [Hexadecimal To Decimal](conversions/hexadecimal_to_decimal.py) * [Length Conversion](conversions/length_conversion.py) * [Molecular Chemistry](conversions/molecular_chemistry.py) * [Octal To Decimal](conversions/octal_to_decimal.py) * [Prefix Conversions](conversions/prefix_conversions.py) * [Prefix Conversions String](conversions/prefix_conversions_string.py) * [Pressure Conversions](conversions/pressure_conversions.py) * [Rgb Hsv Conversion](conversions/rgb_hsv_conversion.py) * [Roman Numerals](conversions/roman_numerals.py) * [Speed Conversions](conversions/speed_conversions.py) * [Temperature Conversions](conversions/temperature_conversions.py) * [Volume Conversions](conversions/volume_conversions.py) * [Weight Conversion](conversions/weight_conversion.py) ## Data Structures * Arrays * [Permutations](data_structures/arrays/permutations.py) * [Prefix Sum](data_structures/arrays/prefix_sum.py) * [Product Sum](data_structures/arrays/product_sum.py) * Binary Tree * [Avl Tree](data_structures/binary_tree/avl_tree.py) * [Basic Binary Tree](data_structures/binary_tree/basic_binary_tree.py) * [Binary Search Tree](data_structures/binary_tree/binary_search_tree.py) * [Binary Search Tree Recursive](data_structures/binary_tree/binary_search_tree_recursive.py) * [Binary Tree Mirror](data_structures/binary_tree/binary_tree_mirror.py) * [Binary Tree Node Sum](data_structures/binary_tree/binary_tree_node_sum.py) * [Binary Tree Path Sum](data_structures/binary_tree/binary_tree_path_sum.py) * [Binary Tree Traversals](data_structures/binary_tree/binary_tree_traversals.py) * [Diff Views Of Binary Tree](data_structures/binary_tree/diff_views_of_binary_tree.py) * [Distribute Coins](data_structures/binary_tree/distribute_coins.py) * [Fenwick Tree](data_structures/binary_tree/fenwick_tree.py) * [Inorder Tree Traversal 2022](data_structures/binary_tree/inorder_tree_traversal_2022.py) * [Is Bst](data_structures/binary_tree/is_bst.py) * [Lazy Segment Tree](data_structures/binary_tree/lazy_segment_tree.py) * [Lowest Common Ancestor](data_structures/binary_tree/lowest_common_ancestor.py) * [Maximum Fenwick Tree](data_structures/binary_tree/maximum_fenwick_tree.py) * [Merge Two Binary Trees](data_structures/binary_tree/merge_two_binary_trees.py) * [Non Recursive Segment Tree](data_structures/binary_tree/non_recursive_segment_tree.py) * [Number Of Possible Binary Trees](data_structures/binary_tree/number_of_possible_binary_trees.py) * [Red Black Tree](data_structures/binary_tree/red_black_tree.py) * [Segment Tree](data_structures/binary_tree/segment_tree.py) * [Segment Tree Other](data_structures/binary_tree/segment_tree_other.py) * [Treap](data_structures/binary_tree/treap.py) * [Wavelet Tree](data_structures/binary_tree/wavelet_tree.py) * Disjoint Set * [Alternate Disjoint Set](data_structures/disjoint_set/alternate_disjoint_set.py) * [Disjoint Set](data_structures/disjoint_set/disjoint_set.py) * Hashing * [Bloom Filter](data_structures/hashing/bloom_filter.py) * [Double Hash](data_structures/hashing/double_hash.py) * [Hash Map](data_structures/hashing/hash_map.py) * [Hash Table](data_structures/hashing/hash_table.py) * [Hash Table With Linked List](data_structures/hashing/hash_table_with_linked_list.py) * Number Theory * [Prime Numbers](data_structures/hashing/number_theory/prime_numbers.py) * [Quadratic Probing](data_structures/hashing/quadratic_probing.py) * Tests * [Test Hash Map](data_structures/hashing/tests/test_hash_map.py) * Heap * [Binomial Heap](data_structures/heap/binomial_heap.py) * [Heap](data_structures/heap/heap.py) * [Heap Generic](data_structures/heap/heap_generic.py) * [Max Heap](data_structures/heap/max_heap.py) * [Min Heap](data_structures/heap/min_heap.py) * [Randomized Heap](data_structures/heap/randomized_heap.py) * [Skew Heap](data_structures/heap/skew_heap.py) * Linked List * [Circular Linked List](data_structures/linked_list/circular_linked_list.py) * [Deque Doubly](data_structures/linked_list/deque_doubly.py) * [Doubly Linked List](data_structures/linked_list/doubly_linked_list.py) * [Doubly Linked List Two](data_structures/linked_list/doubly_linked_list_two.py) * [From Sequence](data_structures/linked_list/from_sequence.py) * [Has Loop](data_structures/linked_list/has_loop.py) * [Is Palindrome](data_structures/linked_list/is_palindrome.py) * [Merge Two Lists](data_structures/linked_list/merge_two_lists.py) * [Middle Element Of Linked List](data_structures/linked_list/middle_element_of_linked_list.py) * [Print Reverse](data_structures/linked_list/print_reverse.py) * [Singly Linked List](data_structures/linked_list/singly_linked_list.py) * [Skip List](data_structures/linked_list/skip_list.py) * [Swap Nodes](data_structures/linked_list/swap_nodes.py) * Queue * [Circular Queue](data_structures/queue/circular_queue.py) * [Circular Queue Linked List](data_structures/queue/circular_queue_linked_list.py) * [Double Ended Queue](data_structures/queue/double_ended_queue.py) * [Linked Queue](data_structures/queue/linked_queue.py) * [Priority Queue Using List](data_structures/queue/priority_queue_using_list.py) * [Queue By List](data_structures/queue/queue_by_list.py) * [Queue By Two Stacks](data_structures/queue/queue_by_two_stacks.py) * [Queue On Pseudo Stack](data_structures/queue/queue_on_pseudo_stack.py) * Stacks * [Balanced Parentheses](data_structures/stacks/balanced_parentheses.py) * [Dijkstras Two Stack Algorithm](data_structures/stacks/dijkstras_two_stack_algorithm.py) * [Evaluate Postfix Notations](data_structures/stacks/evaluate_postfix_notations.py) * [Infix To Postfix Conversion](data_structures/stacks/infix_to_postfix_conversion.py) * [Infix To Prefix Conversion](data_structures/stacks/infix_to_prefix_conversion.py) * [Next Greater Element](data_structures/stacks/next_greater_element.py) * [Postfix Evaluation](data_structures/stacks/postfix_evaluation.py) * [Prefix Evaluation](data_structures/stacks/prefix_evaluation.py) * [Stack](data_structures/stacks/stack.py) * [Stack With Doubly Linked List](data_structures/stacks/stack_with_doubly_linked_list.py) * [Stack With Singly Linked List](data_structures/stacks/stack_with_singly_linked_list.py) * [Stock Span Problem](data_structures/stacks/stock_span_problem.py) * Trie * [Radix Tree](data_structures/trie/radix_tree.py) * [Trie](data_structures/trie/trie.py) ## Digital Image Processing * [Change Brightness](digital_image_processing/change_brightness.py) * [Change Contrast](digital_image_processing/change_contrast.py) * [Convert To Negative](digital_image_processing/convert_to_negative.py) * Dithering * [Burkes](digital_image_processing/dithering/burkes.py) * Edge Detection * [Canny](digital_image_processing/edge_detection/canny.py) * Filters * [Bilateral Filter](digital_image_processing/filters/bilateral_filter.py) * [Convolve](digital_image_processing/filters/convolve.py) * [Gabor Filter](digital_image_processing/filters/gabor_filter.py) * [Gaussian Filter](digital_image_processing/filters/gaussian_filter.py) * [Local Binary Pattern](digital_image_processing/filters/local_binary_pattern.py) * [Median Filter](digital_image_processing/filters/median_filter.py) * [Sobel Filter](digital_image_processing/filters/sobel_filter.py) * Histogram Equalization * [Histogram Stretch](digital_image_processing/histogram_equalization/histogram_stretch.py) * [Index Calculation](digital_image_processing/index_calculation.py) * Morphological Operations * [Dilation Operation](digital_image_processing/morphological_operations/dilation_operation.py) * [Erosion Operation](digital_image_processing/morphological_operations/erosion_operation.py) * Resize * [Resize](digital_image_processing/resize/resize.py) * Rotation * [Rotation](digital_image_processing/rotation/rotation.py) * [Sepia](digital_image_processing/sepia.py) * [Test Digital Image Processing](digital_image_processing/test_digital_image_processing.py) ## Divide And Conquer * [Closest Pair Of Points](divide_and_conquer/closest_pair_of_points.py) * [Convex Hull](divide_and_conquer/convex_hull.py) * [Heaps Algorithm](divide_and_conquer/heaps_algorithm.py) * [Heaps Algorithm Iterative](divide_and_conquer/heaps_algorithm_iterative.py) * [Inversions](divide_and_conquer/inversions.py) * [Kth Order Statistic](divide_and_conquer/kth_order_statistic.py) * [Max Difference Pair](divide_and_conquer/max_difference_pair.py) * [Max Subarray](divide_and_conquer/max_subarray.py) * [Mergesort](divide_and_conquer/mergesort.py) * [Peak](divide_and_conquer/peak.py) * [Power](divide_and_conquer/power.py) * [Strassen Matrix Multiplication](divide_and_conquer/strassen_matrix_multiplication.py) ## Dynamic Programming * [Abbreviation](dynamic_programming/abbreviation.py) * [All Construct](dynamic_programming/all_construct.py) * [Bitmask](dynamic_programming/bitmask.py) * [Catalan Numbers](dynamic_programming/catalan_numbers.py) * [Climbing Stairs](dynamic_programming/climbing_stairs.py) * [Combination Sum Iv](dynamic_programming/combination_sum_iv.py) * [Edit Distance](dynamic_programming/edit_distance.py) * [Factorial](dynamic_programming/factorial.py) * [Fast Fibonacci](dynamic_programming/fast_fibonacci.py) * [Fibonacci](dynamic_programming/fibonacci.py) * [Fizz Buzz](dynamic_programming/fizz_buzz.py) * [Floyd Warshall](dynamic_programming/floyd_warshall.py) * [Integer Partition](dynamic_programming/integer_partition.py) * [Iterating Through Submasks](dynamic_programming/iterating_through_submasks.py) * [K Means Clustering Tensorflow](dynamic_programming/k_means_clustering_tensorflow.py) * [Knapsack](dynamic_programming/knapsack.py) * [Longest Common Subsequence](dynamic_programming/longest_common_subsequence.py) * [Longest Common Substring](dynamic_programming/longest_common_substring.py) * [Longest Increasing Subsequence](dynamic_programming/longest_increasing_subsequence.py) * [Longest Increasing Subsequence O(Nlogn)](dynamic_programming/longest_increasing_subsequence_o(nlogn).py) * [Longest Sub Array](dynamic_programming/longest_sub_array.py) * [Matrix Chain Order](dynamic_programming/matrix_chain_order.py) * [Max Non Adjacent Sum](dynamic_programming/max_non_adjacent_sum.py) * [Max Product Subarray](dynamic_programming/max_product_subarray.py) * [Max Subarray Sum](dynamic_programming/max_subarray_sum.py) * [Min Distance Up Bottom](dynamic_programming/min_distance_up_bottom.py) * [Minimum Coin Change](dynamic_programming/minimum_coin_change.py) * [Minimum Cost Path](dynamic_programming/minimum_cost_path.py) * [Minimum Partition](dynamic_programming/minimum_partition.py) * [Minimum Size Subarray Sum](dynamic_programming/minimum_size_subarray_sum.py) * [Minimum Squares To Represent A Number](dynamic_programming/minimum_squares_to_represent_a_number.py) * [Minimum Steps To One](dynamic_programming/minimum_steps_to_one.py) * [Minimum Tickets Cost](dynamic_programming/minimum_tickets_cost.py) * [Optimal Binary Search Tree](dynamic_programming/optimal_binary_search_tree.py) * [Palindrome Partitioning](dynamic_programming/palindrome_partitioning.py) * [Regex Match](dynamic_programming/regex_match.py) * [Rod Cutting](dynamic_programming/rod_cutting.py) * [Subset Generation](dynamic_programming/subset_generation.py) * [Sum Of Subset](dynamic_programming/sum_of_subset.py) * [Tribonacci](dynamic_programming/tribonacci.py) * [Viterbi](dynamic_programming/viterbi.py) * [Word Break](dynamic_programming/word_break.py) ## Electronics * [Apparent Power](electronics/apparent_power.py) * [Builtin Voltage](electronics/builtin_voltage.py) * [Carrier Concentration](electronics/carrier_concentration.py) * [Circular Convolution](electronics/circular_convolution.py) * [Coulombs Law](electronics/coulombs_law.py) * [Electric Conductivity](electronics/electric_conductivity.py) * [Electric Power](electronics/electric_power.py) * [Electrical Impedance](electronics/electrical_impedance.py) * [Ind Reactance](electronics/ind_reactance.py) * [Ohms Law](electronics/ohms_law.py) * [Real And Reactive Power](electronics/real_and_reactive_power.py) * [Resistor Equivalence](electronics/resistor_equivalence.py) * [Resonant Frequency](electronics/resonant_frequency.py) ## File Transfer * [Receive File](file_transfer/receive_file.py) * [Send File](file_transfer/send_file.py) * Tests * [Test Send File](file_transfer/tests/test_send_file.py) ## Financial * [Equated Monthly Installments](financial/equated_monthly_installments.py) * [Interest](financial/interest.py) * [Present Value](financial/present_value.py) * [Price Plus Tax](financial/price_plus_tax.py) ## Fractals * [Julia Sets](fractals/julia_sets.py) * [Koch Snowflake](fractals/koch_snowflake.py) * [Mandelbrot](fractals/mandelbrot.py) * [Sierpinski Triangle](fractals/sierpinski_triangle.py) ## Fuzzy Logic * [Fuzzy Operations](fuzzy_logic/fuzzy_operations.py) ## Genetic Algorithm * [Basic String](genetic_algorithm/basic_string.py) ## Geodesy * [Haversine Distance](geodesy/haversine_distance.py) * [Lamberts Ellipsoidal Distance](geodesy/lamberts_ellipsoidal_distance.py) ## Graphics * [Bezier Curve](graphics/bezier_curve.py) * [Vector3 For 2D Rendering](graphics/vector3_for_2d_rendering.py) ## Graphs * [A Star](graphs/a_star.py) * [Articulation Points](graphs/articulation_points.py) * [Basic Graphs](graphs/basic_graphs.py) * [Bellman Ford](graphs/bellman_ford.py) * [Bi Directional Dijkstra](graphs/bi_directional_dijkstra.py) * [Bidirectional A Star](graphs/bidirectional_a_star.py) * [Bidirectional Breadth First Search](graphs/bidirectional_breadth_first_search.py) * [Boruvka](graphs/boruvka.py) * [Breadth First Search](graphs/breadth_first_search.py) * [Breadth First Search 2](graphs/breadth_first_search_2.py) * [Breadth First Search Shortest Path](graphs/breadth_first_search_shortest_path.py) * [Breadth First Search Shortest Path 2](graphs/breadth_first_search_shortest_path_2.py) * [Breadth First Search Zero One Shortest Path](graphs/breadth_first_search_zero_one_shortest_path.py) * [Check Bipartite Graph Bfs](graphs/check_bipartite_graph_bfs.py) * [Check Bipartite Graph Dfs](graphs/check_bipartite_graph_dfs.py) * [Check Cycle](graphs/check_cycle.py) * [Connected Components](graphs/connected_components.py) * [Depth First Search](graphs/depth_first_search.py) * [Depth First Search 2](graphs/depth_first_search_2.py) * [Dijkstra](graphs/dijkstra.py) * [Dijkstra 2](graphs/dijkstra_2.py) * [Dijkstra Algorithm](graphs/dijkstra_algorithm.py) * [Dijkstra Alternate](graphs/dijkstra_alternate.py) * [Dijkstra Binary Grid](graphs/dijkstra_binary_grid.py) * [Dinic](graphs/dinic.py) * [Directed And Undirected (Weighted) Graph](graphs/directed_and_undirected_(weighted)_graph.py) * [Edmonds Karp Multiple Source And Sink](graphs/edmonds_karp_multiple_source_and_sink.py) * [Eulerian Path And Circuit For Undirected Graph](graphs/eulerian_path_and_circuit_for_undirected_graph.py) * [Even Tree](graphs/even_tree.py) * [Finding Bridges](graphs/finding_bridges.py) * [Frequent Pattern Graph Miner](graphs/frequent_pattern_graph_miner.py) * [G Topological Sort](graphs/g_topological_sort.py) * [Gale Shapley Bigraph](graphs/gale_shapley_bigraph.py) * [Graph Adjacency List](graphs/graph_adjacency_list.py) * [Graph Adjacency Matrix](graphs/graph_adjacency_matrix.py) * [Graph List](graphs/graph_list.py) * [Graphs Floyd Warshall](graphs/graphs_floyd_warshall.py) * [Greedy Best First](graphs/greedy_best_first.py) * [Greedy Min Vertex Cover](graphs/greedy_min_vertex_cover.py) * [Kahns Algorithm Long](graphs/kahns_algorithm_long.py) * [Kahns Algorithm Topo](graphs/kahns_algorithm_topo.py) * [Karger](graphs/karger.py) * [Markov Chain](graphs/markov_chain.py) * [Matching Min Vertex Cover](graphs/matching_min_vertex_cover.py) * [Minimum Path Sum](graphs/minimum_path_sum.py) * [Minimum Spanning Tree Boruvka](graphs/minimum_spanning_tree_boruvka.py) * [Minimum Spanning Tree Kruskal](graphs/minimum_spanning_tree_kruskal.py) * [Minimum Spanning Tree Kruskal2](graphs/minimum_spanning_tree_kruskal2.py) * [Minimum Spanning Tree Prims](graphs/minimum_spanning_tree_prims.py) * [Minimum Spanning Tree Prims2](graphs/minimum_spanning_tree_prims2.py) * [Multi Heuristic Astar](graphs/multi_heuristic_astar.py) * [Page Rank](graphs/page_rank.py) * [Prim](graphs/prim.py) * [Random Graph Generator](graphs/random_graph_generator.py) * [Scc Kosaraju](graphs/scc_kosaraju.py) * [Strongly Connected Components](graphs/strongly_connected_components.py) * [Tarjans Scc](graphs/tarjans_scc.py) * Tests * [Test Min Spanning Tree Kruskal](graphs/tests/test_min_spanning_tree_kruskal.py) * [Test Min Spanning Tree Prim](graphs/tests/test_min_spanning_tree_prim.py) ## Greedy Methods * [Fractional Knapsack](greedy_methods/fractional_knapsack.py) * [Fractional Knapsack 2](greedy_methods/fractional_knapsack_2.py) * [Minimum Waiting Time](greedy_methods/minimum_waiting_time.py) * [Optimal Merge Pattern](greedy_methods/optimal_merge_pattern.py) ## Hashes * [Adler32](hashes/adler32.py) * [Chaos Machine](hashes/chaos_machine.py) * [Djb2](hashes/djb2.py) * [Elf](hashes/elf.py) * [Enigma Machine](hashes/enigma_machine.py) * [Hamming Code](hashes/hamming_code.py) * [Luhn](hashes/luhn.py) * [Md5](hashes/md5.py) * [Sdbm](hashes/sdbm.py) * [Sha1](hashes/sha1.py) * [Sha256](hashes/sha256.py) ## Knapsack * [Greedy Knapsack](knapsack/greedy_knapsack.py) * [Knapsack](knapsack/knapsack.py) * [Recursive Approach Knapsack](knapsack/recursive_approach_knapsack.py) * Tests * [Test Greedy Knapsack](knapsack/tests/test_greedy_knapsack.py) * [Test Knapsack](knapsack/tests/test_knapsack.py) ## Linear Algebra * Src * [Conjugate Gradient](linear_algebra/src/conjugate_gradient.py) * [Lib](linear_algebra/src/lib.py) * [Polynom For Points](linear_algebra/src/polynom_for_points.py) * [Power Iteration](linear_algebra/src/power_iteration.py) * [Rank Of Matrix](linear_algebra/src/rank_of_matrix.py) * [Rayleigh Quotient](linear_algebra/src/rayleigh_quotient.py) * [Schur Complement](linear_algebra/src/schur_complement.py) * [Test Linear Algebra](linear_algebra/src/test_linear_algebra.py) * [Transformations 2D](linear_algebra/src/transformations_2d.py) ## Linear Programming * [Simplex](linear_programming/simplex.py) ## Machine Learning * [Astar](machine_learning/astar.py) * [Data Transformations](machine_learning/data_transformations.py) * [Decision Tree](machine_learning/decision_tree.py) * [Dimensionality Reduction](machine_learning/dimensionality_reduction.py) * Forecasting * [Run](machine_learning/forecasting/run.py) * [Gradient Descent](machine_learning/gradient_descent.py) * [K Means Clust](machine_learning/k_means_clust.py) * [K Nearest Neighbours](machine_learning/k_nearest_neighbours.py) * [Knn Sklearn](machine_learning/knn_sklearn.py) * [Linear Discriminant Analysis](machine_learning/linear_discriminant_analysis.py) * [Linear Regression](machine_learning/linear_regression.py) * Local Weighted Learning * [Local Weighted Learning](machine_learning/local_weighted_learning/local_weighted_learning.py) * [Logistic Regression](machine_learning/logistic_regression.py) * Lstm * [Lstm Prediction](machine_learning/lstm/lstm_prediction.py) * [Multilayer Perceptron Classifier](machine_learning/multilayer_perceptron_classifier.py) * [Polynomial Regression](machine_learning/polynomial_regression.py) * [Scoring Functions](machine_learning/scoring_functions.py) * [Self Organizing Map](machine_learning/self_organizing_map.py) * [Sequential Minimum Optimization](machine_learning/sequential_minimum_optimization.py) * [Similarity Search](machine_learning/similarity_search.py) * [Support Vector Machines](machine_learning/support_vector_machines.py) * [Word Frequency Functions](machine_learning/word_frequency_functions.py) * [Xgboost Classifier](machine_learning/xgboost_classifier.py) * [Xgboost Regressor](machine_learning/xgboost_regressor.py) ## Maths * [Abs](maths/abs.py) * [Add](maths/add.py) * [Addition Without Arithmetic](maths/addition_without_arithmetic.py) * [Aliquot Sum](maths/aliquot_sum.py) * [Allocation Number](maths/allocation_number.py) * [Arc Length](maths/arc_length.py) * [Area](maths/area.py) * [Area Under Curve](maths/area_under_curve.py) * [Armstrong Numbers](maths/armstrong_numbers.py) * [Automorphic Number](maths/automorphic_number.py) * [Average Absolute Deviation](maths/average_absolute_deviation.py) * [Average Mean](maths/average_mean.py) * [Average Median](maths/average_median.py) * [Average Mode](maths/average_mode.py) * [Bailey Borwein Plouffe](maths/bailey_borwein_plouffe.py) * [Basic Maths](maths/basic_maths.py) * [Binary Exp Mod](maths/binary_exp_mod.py) * [Binary Exponentiation](maths/binary_exponentiation.py) * [Binary Exponentiation 2](maths/binary_exponentiation_2.py) * [Binary Exponentiation 3](maths/binary_exponentiation_3.py) * [Binomial Coefficient](maths/binomial_coefficient.py) * [Binomial Distribution](maths/binomial_distribution.py) * [Bisection](maths/bisection.py) * [Carmichael Number](maths/carmichael_number.py) * [Catalan Number](maths/catalan_number.py) * [Ceil](maths/ceil.py) * [Check Polygon](maths/check_polygon.py) * [Chudnovsky Algorithm](maths/chudnovsky_algorithm.py) * [Collatz Sequence](maths/collatz_sequence.py) * [Combinations](maths/combinations.py) * [Decimal Isolate](maths/decimal_isolate.py) * [Decimal To Fraction](maths/decimal_to_fraction.py) * [Dodecahedron](maths/dodecahedron.py) * [Double Factorial Iterative](maths/double_factorial_iterative.py) * [Double Factorial Recursive](maths/double_factorial_recursive.py) * [Dual Number Automatic Differentiation](maths/dual_number_automatic_differentiation.py) * [Entropy](maths/entropy.py) * [Euclidean Distance](maths/euclidean_distance.py) * [Euclidean Gcd](maths/euclidean_gcd.py) * [Euler Method](maths/euler_method.py) * [Euler Modified](maths/euler_modified.py) * [Eulers Totient](maths/eulers_totient.py) * [Extended Euclidean Algorithm](maths/extended_euclidean_algorithm.py) * [Factorial](maths/factorial.py) * [Factors](maths/factors.py) * [Fermat Little Theorem](maths/fermat_little_theorem.py) * [Fibonacci](maths/fibonacci.py) * [Find Max](maths/find_max.py) * [Find Min](maths/find_min.py) * [Floor](maths/floor.py) * [Gamma](maths/gamma.py) * [Gamma Recursive](maths/gamma_recursive.py) * [Gaussian](maths/gaussian.py) * [Gaussian Error Linear Unit](maths/gaussian_error_linear_unit.py) * [Gcd Of N Numbers](maths/gcd_of_n_numbers.py) * [Greatest Common Divisor](maths/greatest_common_divisor.py) * [Greedy Coin Change](maths/greedy_coin_change.py) * [Hamming Numbers](maths/hamming_numbers.py) * [Hardy Ramanujanalgo](maths/hardy_ramanujanalgo.py) * [Hexagonal Number](maths/hexagonal_number.py) * [Integration By Simpson Approx](maths/integration_by_simpson_approx.py) * [Interquartile Range](maths/interquartile_range.py) * [Is Int Palindrome](maths/is_int_palindrome.py) * [Is Ip V4 Address Valid](maths/is_ip_v4_address_valid.py) * [Is Square Free](maths/is_square_free.py) * [Jaccard Similarity](maths/jaccard_similarity.py) * [Juggler Sequence](maths/juggler_sequence.py) * [Karatsuba](maths/karatsuba.py) * [Krishnamurthy Number](maths/krishnamurthy_number.py) * [Kth Lexicographic Permutation](maths/kth_lexicographic_permutation.py) * [Largest Of Very Large Numbers](maths/largest_of_very_large_numbers.py) * [Least Common Multiple](maths/least_common_multiple.py) * [Line Length](maths/line_length.py) * [Liouville Lambda](maths/liouville_lambda.py) * [Lucas Lehmer Primality Test](maths/lucas_lehmer_primality_test.py) * [Lucas Series](maths/lucas_series.py) * [Maclaurin Series](maths/maclaurin_series.py) * [Manhattan Distance](maths/manhattan_distance.py) * [Matrix Exponentiation](maths/matrix_exponentiation.py) * [Max Sum Sliding Window](maths/max_sum_sliding_window.py) * [Median Of Two Arrays](maths/median_of_two_arrays.py) * [Miller Rabin](maths/miller_rabin.py) * [Mobius Function](maths/mobius_function.py) * [Modular Exponential](maths/modular_exponential.py) * [Monte Carlo](maths/monte_carlo.py) * [Monte Carlo Dice](maths/monte_carlo_dice.py) * [Nevilles Method](maths/nevilles_method.py) * [Newton Raphson](maths/newton_raphson.py) * [Number Of Digits](maths/number_of_digits.py) * [Numerical Integration](maths/numerical_integration.py) * [Odd Sieve](maths/odd_sieve.py) * [Perfect Cube](maths/perfect_cube.py) * [Perfect Number](maths/perfect_number.py) * [Perfect Square](maths/perfect_square.py) * [Persistence](maths/persistence.py) * [Pi Generator](maths/pi_generator.py) * [Pi Monte Carlo Estimation](maths/pi_monte_carlo_estimation.py) * [Points Are Collinear 3D](maths/points_are_collinear_3d.py) * [Pollard Rho](maths/pollard_rho.py) * [Polynomial Evaluation](maths/polynomial_evaluation.py) * Polynomials * [Single Indeterminate Operations](maths/polynomials/single_indeterminate_operations.py) * [Power Using Recursion](maths/power_using_recursion.py) * [Prime Check](maths/prime_check.py) * [Prime Factors](maths/prime_factors.py) * [Prime Numbers](maths/prime_numbers.py) * [Prime Sieve Eratosthenes](maths/prime_sieve_eratosthenes.py) * [Primelib](maths/primelib.py) * [Print Multiplication Table](maths/print_multiplication_table.py) * [Pronic Number](maths/pronic_number.py) * [Proth Number](maths/proth_number.py) * [Pythagoras](maths/pythagoras.py) * [Qr Decomposition](maths/qr_decomposition.py) * [Quadratic Equations Complex Numbers](maths/quadratic_equations_complex_numbers.py) * [Radians](maths/radians.py) * [Radix2 Fft](maths/radix2_fft.py) * [Relu](maths/relu.py) * [Remove Digit](maths/remove_digit.py) * [Runge Kutta](maths/runge_kutta.py) * [Segmented Sieve](maths/segmented_sieve.py) * Series * [Arithmetic](maths/series/arithmetic.py) * [Geometric](maths/series/geometric.py) * [Geometric Series](maths/series/geometric_series.py) * [Harmonic](maths/series/harmonic.py) * [Harmonic Series](maths/series/harmonic_series.py) * [Hexagonal Numbers](maths/series/hexagonal_numbers.py) * [P Series](maths/series/p_series.py) * [Sieve Of Eratosthenes](maths/sieve_of_eratosthenes.py) * [Sigmoid](maths/sigmoid.py) * [Sigmoid Linear Unit](maths/sigmoid_linear_unit.py) * [Signum](maths/signum.py) * [Simpson Rule](maths/simpson_rule.py) * [Simultaneous Linear Equation Solver](maths/simultaneous_linear_equation_solver.py) * [Sin](maths/sin.py) * [Sock Merchant](maths/sock_merchant.py) * [Softmax](maths/softmax.py) * [Square Root](maths/square_root.py) * [Sum Of Arithmetic Series](maths/sum_of_arithmetic_series.py) * [Sum Of Digits](maths/sum_of_digits.py) * [Sum Of Geometric Progression](maths/sum_of_geometric_progression.py) * [Sum Of Harmonic Series](maths/sum_of_harmonic_series.py) * [Sumset](maths/sumset.py) * [Sylvester Sequence](maths/sylvester_sequence.py) * [Tanh](maths/tanh.py) * [Test Prime Check](maths/test_prime_check.py) * [Trapezoidal Rule](maths/trapezoidal_rule.py) * [Triplet Sum](maths/triplet_sum.py) * [Twin Prime](maths/twin_prime.py) * [Two Pointer](maths/two_pointer.py) * [Two Sum](maths/two_sum.py) * [Ugly Numbers](maths/ugly_numbers.py) * [Volume](maths/volume.py) * [Weird Number](maths/weird_number.py) * [Zellers Congruence](maths/zellers_congruence.py) ## Matrix * [Binary Search Matrix](matrix/binary_search_matrix.py) * [Count Islands In Matrix](matrix/count_islands_in_matrix.py) * [Count Negative Numbers In Sorted Matrix](matrix/count_negative_numbers_in_sorted_matrix.py) * [Count Paths](matrix/count_paths.py) * [Cramers Rule 2X2](matrix/cramers_rule_2x2.py) * [Inverse Of Matrix](matrix/inverse_of_matrix.py) * [Largest Square Area In Matrix](matrix/largest_square_area_in_matrix.py) * [Matrix Class](matrix/matrix_class.py) * [Matrix Operation](matrix/matrix_operation.py) * [Max Area Of Island](matrix/max_area_of_island.py) * [Nth Fibonacci Using Matrix Exponentiation](matrix/nth_fibonacci_using_matrix_exponentiation.py) * [Pascal Triangle](matrix/pascal_triangle.py) * [Rotate Matrix](matrix/rotate_matrix.py) * [Searching In Sorted Matrix](matrix/searching_in_sorted_matrix.py) * [Sherman Morrison](matrix/sherman_morrison.py) * [Spiral Print](matrix/spiral_print.py) * Tests * [Test Matrix Operation](matrix/tests/test_matrix_operation.py) ## Networking Flow * [Ford Fulkerson](networking_flow/ford_fulkerson.py) * [Minimum Cut](networking_flow/minimum_cut.py) ## Neural Network * [2 Hidden Layers Neural Network](neural_network/2_hidden_layers_neural_network.py) * Activation Functions * [Exponential Linear Unit](neural_network/activation_functions/exponential_linear_unit.py) * [Back Propagation Neural Network](neural_network/back_propagation_neural_network.py) * [Convolution Neural Network](neural_network/convolution_neural_network.py) * [Perceptron](neural_network/perceptron.py) * [Simple Neural Network](neural_network/simple_neural_network.py) ## Other * [Activity Selection](other/activity_selection.py) * [Alternative List Arrange](other/alternative_list_arrange.py) * [Davisb Putnamb Logemannb Loveland](other/davisb_putnamb_logemannb_loveland.py) * [Dijkstra Bankers Algorithm](other/dijkstra_bankers_algorithm.py) * [Doomsday](other/doomsday.py) * [Fischer Yates Shuffle](other/fischer_yates_shuffle.py) * [Gauss Easter](other/gauss_easter.py) * [Graham Scan](other/graham_scan.py) * [Greedy](other/greedy.py) * [Guess The Number Search](other/guess_the_number_search.py) * [H Index](other/h_index.py) * [Least Recently Used](other/least_recently_used.py) * [Lfu Cache](other/lfu_cache.py) * [Linear Congruential Generator](other/linear_congruential_generator.py) * [Lru Cache](other/lru_cache.py) * [Magicdiamondpattern](other/magicdiamondpattern.py) * [Maximum Subsequence](other/maximum_subsequence.py) * [Nested Brackets](other/nested_brackets.py) * [Number Container System](other/number_container_system.py) * [Password](other/password.py) * [Quine](other/quine.py) * [Scoring Algorithm](other/scoring_algorithm.py) * [Sdes](other/sdes.py) * [Tower Of Hanoi](other/tower_of_hanoi.py) ## Physics * [Altitude Pressure](physics/altitude_pressure.py) * [Archimedes Principle](physics/archimedes_principle.py) * [Basic Orbital Capture](physics/basic_orbital_capture.py) * [Casimir Effect](physics/casimir_effect.py) * [Centripetal Force](physics/centripetal_force.py) * [Grahams Law](physics/grahams_law.py) * [Horizontal Projectile Motion](physics/horizontal_projectile_motion.py) * [Hubble Parameter](physics/hubble_parameter.py) * [Ideal Gas Law](physics/ideal_gas_law.py) * [Kinetic Energy](physics/kinetic_energy.py) * [Lorentz Transformation Four Vector](physics/lorentz_transformation_four_vector.py) * [Malus Law](physics/malus_law.py) * [N Body Simulation](physics/n_body_simulation.py) * [Newtons Law Of Gravitation](physics/newtons_law_of_gravitation.py) * [Newtons Second Law Of Motion](physics/newtons_second_law_of_motion.py) * [Potential Energy](physics/potential_energy.py) * [Rms Speed Of Molecule](physics/rms_speed_of_molecule.py) * [Shear Stress](physics/shear_stress.py) * [Speed Of Sound](physics/speed_of_sound.py) ## Project Euler * Problem 001 * [Sol1](project_euler/problem_001/sol1.py) * [Sol2](project_euler/problem_001/sol2.py) * [Sol3](project_euler/problem_001/sol3.py) * [Sol4](project_euler/problem_001/sol4.py) * [Sol5](project_euler/problem_001/sol5.py) * [Sol6](project_euler/problem_001/sol6.py) * [Sol7](project_euler/problem_001/sol7.py) * Problem 002 * [Sol1](project_euler/problem_002/sol1.py) * [Sol2](project_euler/problem_002/sol2.py) * [Sol3](project_euler/problem_002/sol3.py) * [Sol4](project_euler/problem_002/sol4.py) * [Sol5](project_euler/problem_002/sol5.py) * Problem 003 * [Sol1](project_euler/problem_003/sol1.py) * [Sol2](project_euler/problem_003/sol2.py) * [Sol3](project_euler/problem_003/sol3.py) * Problem 004 * [Sol1](project_euler/problem_004/sol1.py) * [Sol2](project_euler/problem_004/sol2.py) * Problem 005 * [Sol1](project_euler/problem_005/sol1.py) * [Sol2](project_euler/problem_005/sol2.py) * Problem 006 * [Sol1](project_euler/problem_006/sol1.py) * [Sol2](project_euler/problem_006/sol2.py) * [Sol3](project_euler/problem_006/sol3.py) * [Sol4](project_euler/problem_006/sol4.py) * Problem 007 * [Sol1](project_euler/problem_007/sol1.py) * [Sol2](project_euler/problem_007/sol2.py) * [Sol3](project_euler/problem_007/sol3.py) * Problem 008 * [Sol1](project_euler/problem_008/sol1.py) * [Sol2](project_euler/problem_008/sol2.py) * [Sol3](project_euler/problem_008/sol3.py) * Problem 009 * [Sol1](project_euler/problem_009/sol1.py) * [Sol2](project_euler/problem_009/sol2.py) * [Sol3](project_euler/problem_009/sol3.py) * Problem 010 * [Sol1](project_euler/problem_010/sol1.py) * [Sol2](project_euler/problem_010/sol2.py) * [Sol3](project_euler/problem_010/sol3.py) * Problem 011 * [Sol1](project_euler/problem_011/sol1.py) * [Sol2](project_euler/problem_011/sol2.py) * Problem 012 * [Sol1](project_euler/problem_012/sol1.py) * [Sol2](project_euler/problem_012/sol2.py) * Problem 013 * [Sol1](project_euler/problem_013/sol1.py) * Problem 014 * [Sol1](project_euler/problem_014/sol1.py) * [Sol2](project_euler/problem_014/sol2.py) * Problem 015 * [Sol1](project_euler/problem_015/sol1.py) * Problem 016 * [Sol1](project_euler/problem_016/sol1.py) * [Sol2](project_euler/problem_016/sol2.py) * Problem 017 * [Sol1](project_euler/problem_017/sol1.py) * Problem 018 * [Solution](project_euler/problem_018/solution.py) * Problem 019 * [Sol1](project_euler/problem_019/sol1.py) * Problem 020 * [Sol1](project_euler/problem_020/sol1.py) * [Sol2](project_euler/problem_020/sol2.py) * [Sol3](project_euler/problem_020/sol3.py) * [Sol4](project_euler/problem_020/sol4.py) * Problem 021 * [Sol1](project_euler/problem_021/sol1.py) * Problem 022 * [Sol1](project_euler/problem_022/sol1.py) * [Sol2](project_euler/problem_022/sol2.py) * Problem 023 * [Sol1](project_euler/problem_023/sol1.py) * Problem 024 * [Sol1](project_euler/problem_024/sol1.py) * Problem 025 * [Sol1](project_euler/problem_025/sol1.py) * [Sol2](project_euler/problem_025/sol2.py) * [Sol3](project_euler/problem_025/sol3.py) * Problem 026 * [Sol1](project_euler/problem_026/sol1.py) * Problem 027 * [Sol1](project_euler/problem_027/sol1.py) * Problem 028 * [Sol1](project_euler/problem_028/sol1.py) * Problem 029 * [Sol1](project_euler/problem_029/sol1.py) * Problem 030 * [Sol1](project_euler/problem_030/sol1.py) * Problem 031 * [Sol1](project_euler/problem_031/sol1.py) * [Sol2](project_euler/problem_031/sol2.py) * Problem 032 * [Sol32](project_euler/problem_032/sol32.py) * Problem 033 * [Sol1](project_euler/problem_033/sol1.py) * Problem 034 * [Sol1](project_euler/problem_034/sol1.py) * Problem 035 * [Sol1](project_euler/problem_035/sol1.py) * Problem 036 * [Sol1](project_euler/problem_036/sol1.py) * Problem 037 * [Sol1](project_euler/problem_037/sol1.py) * Problem 038 * [Sol1](project_euler/problem_038/sol1.py) * Problem 039 * [Sol1](project_euler/problem_039/sol1.py) * Problem 040 * [Sol1](project_euler/problem_040/sol1.py) * Problem 041 * [Sol1](project_euler/problem_041/sol1.py) * Problem 042 * [Solution42](project_euler/problem_042/solution42.py) * Problem 043 * [Sol1](project_euler/problem_043/sol1.py) * Problem 044 * [Sol1](project_euler/problem_044/sol1.py) * Problem 045 * [Sol1](project_euler/problem_045/sol1.py) * Problem 046 * [Sol1](project_euler/problem_046/sol1.py) * Problem 047 * [Sol1](project_euler/problem_047/sol1.py) * Problem 048 * [Sol1](project_euler/problem_048/sol1.py) * Problem 049 * [Sol1](project_euler/problem_049/sol1.py) * Problem 050 * [Sol1](project_euler/problem_050/sol1.py) * Problem 051 * [Sol1](project_euler/problem_051/sol1.py) * Problem 052 * [Sol1](project_euler/problem_052/sol1.py) * Problem 053 * [Sol1](project_euler/problem_053/sol1.py) * Problem 054 * [Sol1](project_euler/problem_054/sol1.py) * [Test Poker Hand](project_euler/problem_054/test_poker_hand.py) * Problem 055 * [Sol1](project_euler/problem_055/sol1.py) * Problem 056 * [Sol1](project_euler/problem_056/sol1.py) * Problem 057 * [Sol1](project_euler/problem_057/sol1.py) * Problem 058 * [Sol1](project_euler/problem_058/sol1.py) * Problem 059 * [Sol1](project_euler/problem_059/sol1.py) * Problem 062 * [Sol1](project_euler/problem_062/sol1.py) * Problem 063 * [Sol1](project_euler/problem_063/sol1.py) * Problem 064 * [Sol1](project_euler/problem_064/sol1.py) * Problem 065 * [Sol1](project_euler/problem_065/sol1.py) * Problem 067 * [Sol1](project_euler/problem_067/sol1.py) * [Sol2](project_euler/problem_067/sol2.py) * Problem 068 * [Sol1](project_euler/problem_068/sol1.py) * Problem 069 * [Sol1](project_euler/problem_069/sol1.py) * Problem 070 * [Sol1](project_euler/problem_070/sol1.py) * Problem 071 * [Sol1](project_euler/problem_071/sol1.py) * Problem 072 * [Sol1](project_euler/problem_072/sol1.py) * [Sol2](project_euler/problem_072/sol2.py) * Problem 073 * [Sol1](project_euler/problem_073/sol1.py) * Problem 074 * [Sol1](project_euler/problem_074/sol1.py) * [Sol2](project_euler/problem_074/sol2.py) * Problem 075 * [Sol1](project_euler/problem_075/sol1.py) * Problem 076 * [Sol1](project_euler/problem_076/sol1.py) * Problem 077 * [Sol1](project_euler/problem_077/sol1.py) * Problem 078 * [Sol1](project_euler/problem_078/sol1.py) * Problem 079 * [Sol1](project_euler/problem_079/sol1.py) * Problem 080 * [Sol1](project_euler/problem_080/sol1.py) * Problem 081 * [Sol1](project_euler/problem_081/sol1.py) * Problem 082 * [Sol1](project_euler/problem_082/sol1.py) * Problem 085 * [Sol1](project_euler/problem_085/sol1.py) * Problem 086 * [Sol1](project_euler/problem_086/sol1.py) * Problem 087 * [Sol1](project_euler/problem_087/sol1.py) * Problem 089 * [Sol1](project_euler/problem_089/sol1.py) * Problem 091 * [Sol1](project_euler/problem_091/sol1.py) * Problem 092 * [Sol1](project_euler/problem_092/sol1.py) * Problem 094 * [Sol1](project_euler/problem_094/sol1.py) * Problem 097 * [Sol1](project_euler/problem_097/sol1.py) * Problem 099 * [Sol1](project_euler/problem_099/sol1.py) * Problem 100 * [Sol1](project_euler/problem_100/sol1.py) * Problem 101 * [Sol1](project_euler/problem_101/sol1.py) * Problem 102 * [Sol1](project_euler/problem_102/sol1.py) * Problem 104 * [Sol1](project_euler/problem_104/sol1.py) * Problem 107 * [Sol1](project_euler/problem_107/sol1.py) * Problem 109 * [Sol1](project_euler/problem_109/sol1.py) * Problem 112 * [Sol1](project_euler/problem_112/sol1.py) * Problem 113 * [Sol1](project_euler/problem_113/sol1.py) * Problem 114 * [Sol1](project_euler/problem_114/sol1.py) * Problem 115 * [Sol1](project_euler/problem_115/sol1.py) * Problem 116 * [Sol1](project_euler/problem_116/sol1.py) * Problem 117 * [Sol1](project_euler/problem_117/sol1.py) * Problem 119 * [Sol1](project_euler/problem_119/sol1.py) * Problem 120 * [Sol1](project_euler/problem_120/sol1.py) * Problem 121 * [Sol1](project_euler/problem_121/sol1.py) * Problem 123 * [Sol1](project_euler/problem_123/sol1.py) * Problem 125 * [Sol1](project_euler/problem_125/sol1.py) * Problem 129 * [Sol1](project_euler/problem_129/sol1.py) * Problem 131 * [Sol1](project_euler/problem_131/sol1.py) * Problem 135 * [Sol1](project_euler/problem_135/sol1.py) * Problem 144 * [Sol1](project_euler/problem_144/sol1.py) * Problem 145 * [Sol1](project_euler/problem_145/sol1.py) * Problem 173 * [Sol1](project_euler/problem_173/sol1.py) * Problem 174 * [Sol1](project_euler/problem_174/sol1.py) * Problem 180 * [Sol1](project_euler/problem_180/sol1.py) * Problem 187 * [Sol1](project_euler/problem_187/sol1.py) * Problem 188 * [Sol1](project_euler/problem_188/sol1.py) * Problem 191 * [Sol1](project_euler/problem_191/sol1.py) * Problem 203 * [Sol1](project_euler/problem_203/sol1.py) * Problem 205 * [Sol1](project_euler/problem_205/sol1.py) * Problem 206 * [Sol1](project_euler/problem_206/sol1.py) * Problem 207 * [Sol1](project_euler/problem_207/sol1.py) * Problem 234 * [Sol1](project_euler/problem_234/sol1.py) * Problem 301 * [Sol1](project_euler/problem_301/sol1.py) * Problem 493 * [Sol1](project_euler/problem_493/sol1.py) * Problem 551 * [Sol1](project_euler/problem_551/sol1.py) * Problem 587 * [Sol1](project_euler/problem_587/sol1.py) * Problem 686 * [Sol1](project_euler/problem_686/sol1.py) * Problem 800 * [Sol1](project_euler/problem_800/sol1.py) ## Quantum * [Bb84](quantum/bb84.py) * [Deutsch Jozsa](quantum/deutsch_jozsa.py) * [Half Adder](quantum/half_adder.py) * [Not Gate](quantum/not_gate.py) * [Q Fourier Transform](quantum/q_fourier_transform.py) * [Q Full Adder](quantum/q_full_adder.py) * [Quantum Entanglement](quantum/quantum_entanglement.py) * [Quantum Teleportation](quantum/quantum_teleportation.py) * [Ripple Adder Classic](quantum/ripple_adder_classic.py) * [Single Qubit Measure](quantum/single_qubit_measure.py) * [Superdense Coding](quantum/superdense_coding.py) ## Scheduling * [First Come First Served](scheduling/first_come_first_served.py) * [Highest Response Ratio Next](scheduling/highest_response_ratio_next.py) * [Job Sequencing With Deadline](scheduling/job_sequencing_with_deadline.py) * [Multi Level Feedback Queue](scheduling/multi_level_feedback_queue.py) * [Non Preemptive Shortest Job First](scheduling/non_preemptive_shortest_job_first.py) * [Round Robin](scheduling/round_robin.py) * [Shortest Job First](scheduling/shortest_job_first.py) ## Searches * [Binary Search](searches/binary_search.py) * [Binary Tree Traversal](searches/binary_tree_traversal.py) * [Double Linear Search](searches/double_linear_search.py) * [Double Linear Search Recursion](searches/double_linear_search_recursion.py) * [Fibonacci Search](searches/fibonacci_search.py) * [Hill Climbing](searches/hill_climbing.py) * [Interpolation Search](searches/interpolation_search.py) * [Jump Search](searches/jump_search.py) * [Linear Search](searches/linear_search.py) * [Quick Select](searches/quick_select.py) * [Sentinel Linear Search](searches/sentinel_linear_search.py) * [Simple Binary Search](searches/simple_binary_search.py) * [Simulated Annealing](searches/simulated_annealing.py) * [Tabu Search](searches/tabu_search.py) * [Ternary Search](searches/ternary_search.py) ## Sorts * [Bead Sort](sorts/bead_sort.py) * [Binary Insertion Sort](sorts/binary_insertion_sort.py) * [Bitonic Sort](sorts/bitonic_sort.py) * [Bogo Sort](sorts/bogo_sort.py) * [Bubble Sort](sorts/bubble_sort.py) * [Bucket Sort](sorts/bucket_sort.py) * [Circle Sort](sorts/circle_sort.py) * [Cocktail Shaker Sort](sorts/cocktail_shaker_sort.py) * [Comb Sort](sorts/comb_sort.py) * [Counting Sort](sorts/counting_sort.py) * [Cycle Sort](sorts/cycle_sort.py) * [Double Sort](sorts/double_sort.py) * [Dutch National Flag Sort](sorts/dutch_national_flag_sort.py) * [Exchange Sort](sorts/exchange_sort.py) * [External Sort](sorts/external_sort.py) * [Gnome Sort](sorts/gnome_sort.py) * [Heap Sort](sorts/heap_sort.py) * [Insertion Sort](sorts/insertion_sort.py) * [Intro Sort](sorts/intro_sort.py) * [Iterative Merge Sort](sorts/iterative_merge_sort.py) * [Merge Insertion Sort](sorts/merge_insertion_sort.py) * [Merge Sort](sorts/merge_sort.py) * [Msd Radix Sort](sorts/msd_radix_sort.py) * [Natural Sort](sorts/natural_sort.py) * [Odd Even Sort](sorts/odd_even_sort.py) * [Odd Even Transposition Parallel](sorts/odd_even_transposition_parallel.py) * [Odd Even Transposition Single Threaded](sorts/odd_even_transposition_single_threaded.py) * [Pancake Sort](sorts/pancake_sort.py) * [Patience Sort](sorts/patience_sort.py) * [Pigeon Sort](sorts/pigeon_sort.py) * [Pigeonhole Sort](sorts/pigeonhole_sort.py) * [Quick Sort](sorts/quick_sort.py) * [Quick Sort 3 Partition](sorts/quick_sort_3_partition.py) * [Radix Sort](sorts/radix_sort.py) * [Random Normal Distribution Quicksort](sorts/random_normal_distribution_quicksort.py) * [Random Pivot Quick Sort](sorts/random_pivot_quick_sort.py) * [Recursive Bubble Sort](sorts/recursive_bubble_sort.py) * [Recursive Insertion Sort](sorts/recursive_insertion_sort.py) * [Recursive Mergesort Array](sorts/recursive_mergesort_array.py) * [Recursive Quick Sort](sorts/recursive_quick_sort.py) * [Selection Sort](sorts/selection_sort.py) * [Shell Sort](sorts/shell_sort.py) * [Shrink Shell Sort](sorts/shrink_shell_sort.py) * [Slowsort](sorts/slowsort.py) * [Stooge Sort](sorts/stooge_sort.py) * [Strand Sort](sorts/strand_sort.py) * [Tim Sort](sorts/tim_sort.py) * [Topological Sort](sorts/topological_sort.py) * [Tree Sort](sorts/tree_sort.py) * [Unknown Sort](sorts/unknown_sort.py) * [Wiggle Sort](sorts/wiggle_sort.py) ## Strings * [Aho Corasick](strings/aho_corasick.py) * [Alternative String Arrange](strings/alternative_string_arrange.py) * [Anagrams](strings/anagrams.py) * [Autocomplete Using Trie](strings/autocomplete_using_trie.py) * [Barcode Validator](strings/barcode_validator.py) * [Boyer Moore Search](strings/boyer_moore_search.py) * [Can String Be Rearranged As Palindrome](strings/can_string_be_rearranged_as_palindrome.py) * [Capitalize](strings/capitalize.py) * [Check Anagrams](strings/check_anagrams.py) * [Credit Card Validator](strings/credit_card_validator.py) * [Detecting English Programmatically](strings/detecting_english_programmatically.py) * [Dna](strings/dna.py) * [Frequency Finder](strings/frequency_finder.py) * [Hamming Distance](strings/hamming_distance.py) * [Indian Phone Validator](strings/indian_phone_validator.py) * [Is Contains Unique Chars](strings/is_contains_unique_chars.py) * [Is Isogram](strings/is_isogram.py) * [Is Pangram](strings/is_pangram.py) * [Is Spain National Id](strings/is_spain_national_id.py) * [Is Srilankan Phone Number](strings/is_srilankan_phone_number.py) * [Is Valid Email Address](strings/is_valid_email_address.py) * [Jaro Winkler](strings/jaro_winkler.py) * [Join](strings/join.py) * [Knuth Morris Pratt](strings/knuth_morris_pratt.py) * [Levenshtein Distance](strings/levenshtein_distance.py) * [Lower](strings/lower.py) * [Manacher](strings/manacher.py) * [Min Cost String Conversion](strings/min_cost_string_conversion.py) * [Naive String Search](strings/naive_string_search.py) * [Ngram](strings/ngram.py) * [Palindrome](strings/palindrome.py) * [Prefix Function](strings/prefix_function.py) * [Rabin Karp](strings/rabin_karp.py) * [Remove Duplicate](strings/remove_duplicate.py) * [Reverse Letters](strings/reverse_letters.py) * [Reverse Long Words](strings/reverse_long_words.py) * [Reverse Words](strings/reverse_words.py) * [Snake Case To Camel Pascal Case](strings/snake_case_to_camel_pascal_case.py) * [Split](strings/split.py) * [String Switch Case](strings/string_switch_case.py) * [Text Justification](strings/text_justification.py) * [Top K Frequent Words](strings/top_k_frequent_words.py) * [Upper](strings/upper.py) * [Wave](strings/wave.py) * [Wildcard Pattern Matching](strings/wildcard_pattern_matching.py) * [Word Occurrence](strings/word_occurrence.py) * [Word Patterns](strings/word_patterns.py) * [Z Function](strings/z_function.py) ## Web Programming * [Co2 Emission](web_programming/co2_emission.py) * [Convert Number To Words](web_programming/convert_number_to_words.py) * [Covid Stats Via Xpath](web_programming/covid_stats_via_xpath.py) * [Crawl Google Results](web_programming/crawl_google_results.py) * [Crawl Google Scholar Citation](web_programming/crawl_google_scholar_citation.py) * [Currency Converter](web_programming/currency_converter.py) * [Current Stock Price](web_programming/current_stock_price.py) * [Current Weather](web_programming/current_weather.py) * [Daily Horoscope](web_programming/daily_horoscope.py) * [Download Images From Google Query](web_programming/download_images_from_google_query.py) * [Emails From Url](web_programming/emails_from_url.py) * [Fetch Bbc News](web_programming/fetch_bbc_news.py) * [Fetch Github Info](web_programming/fetch_github_info.py) * [Fetch Jobs](web_programming/fetch_jobs.py) * [Fetch Quotes](web_programming/fetch_quotes.py) * [Fetch Well Rx Price](web_programming/fetch_well_rx_price.py) * [Get Amazon Product Data](web_programming/get_amazon_product_data.py) * [Get Imdb Top 250 Movies Csv](web_programming/get_imdb_top_250_movies_csv.py) * [Get Imdbtop](web_programming/get_imdbtop.py) * [Get Top Hn Posts](web_programming/get_top_hn_posts.py) * [Get User Tweets](web_programming/get_user_tweets.py) * [Giphy](web_programming/giphy.py) * [Instagram Crawler](web_programming/instagram_crawler.py) * [Instagram Pic](web_programming/instagram_pic.py) * [Instagram Video](web_programming/instagram_video.py) * [Nasa Data](web_programming/nasa_data.py) * [Open Google Results](web_programming/open_google_results.py) * [Random Anime Character](web_programming/random_anime_character.py) * [Recaptcha Verification](web_programming/recaptcha_verification.py) * [Reddit](web_programming/reddit.py) * [Search Books By Isbn](web_programming/search_books_by_isbn.py) * [Slack Message](web_programming/slack_message.py) * [Test Fetch Github Info](web_programming/test_fetch_github_info.py) * [World Covid19 Stats](web_programming/world_covid19_stats.py)
1
TheAlgorithms/Python
8,960
Consolidate find_min and find_min recursive and find_max and find_max_recursive
### Describe your change: Merges `find_min` and `find_min_recursive` into just `find_min`. Merges `find_min` and `find_max_recursive` into just `find_max`. * [ ] Add an algorithm? * [ ] Fix a bug or typo in an existing algorithm? * [ ] Documentation change? * [x] Merge two different implementations into one file ### Checklist: * [x] I have read [CONTRIBUTING.md](https://github.com/TheAlgorithms/Python/blob/master/CONTRIBUTING.md). * [x] This pull request is all my own work -- I have not plagiarized. * [x] I know that pull requests will not be merged if they fail the automated tests. * [x] This PR only changes one algorithm file. To ease review, please open separate PRs for separate algorithms. * [x] All new Python files are placed inside an existing directory. * [x] All filenames are in all lowercase characters with no spaces or dashes. * [x] All functions and variable names follow Python naming conventions. * [x] All function parameters and return values are annotated with Python [type hints](https://docs.python.org/3/library/typing.html). * [x] All functions have [doctests](https://docs.python.org/3/library/doctest.html) that pass the automated testing. * [x] All new algorithms include at least one URL that points to Wikipedia or another similar explanation. * [x] If this pull request resolves one or more open issues then the description above includes the issue number(s) with a [closing keyword](https://docs.github.com/en/issues/tracking-your-work-with-issues/linking-a-pull-request-to-an-issue): "Fixes #ISSUE-NUMBER".
CaedenPH
"2023-08-14T10:30:16Z"
"2023-08-14T11:17:27Z"
2ab3bf2689d21e7375539c79ecee358e9d7c3359
fb1b939a89fb08370297cbb455846f61f66847bc
Consolidate find_min and find_min recursive and find_max and find_max_recursive. ### Describe your change: Merges `find_min` and `find_min_recursive` into just `find_min`. Merges `find_min` and `find_max_recursive` into just `find_max`. * [ ] Add an algorithm? * [ ] Fix a bug or typo in an existing algorithm? * [ ] Documentation change? * [x] Merge two different implementations into one file ### Checklist: * [x] I have read [CONTRIBUTING.md](https://github.com/TheAlgorithms/Python/blob/master/CONTRIBUTING.md). * [x] This pull request is all my own work -- I have not plagiarized. * [x] I know that pull requests will not be merged if they fail the automated tests. * [x] This PR only changes one algorithm file. To ease review, please open separate PRs for separate algorithms. * [x] All new Python files are placed inside an existing directory. * [x] All filenames are in all lowercase characters with no spaces or dashes. * [x] All functions and variable names follow Python naming conventions. * [x] All function parameters and return values are annotated with Python [type hints](https://docs.python.org/3/library/typing.html). * [x] All functions have [doctests](https://docs.python.org/3/library/doctest.html) that pass the automated testing. * [x] All new algorithms include at least one URL that points to Wikipedia or another similar explanation. * [x] If this pull request resolves one or more open issues then the description above includes the issue number(s) with a [closing keyword](https://docs.github.com/en/issues/tracking-your-work-with-issues/linking-a-pull-request-to-an-issue): "Fixes #ISSUE-NUMBER".
from __future__ import annotations def find_max(nums: list[int | float]) -> int | float: """ >>> for nums in ([3, 2, 1], [-3, -2, -1], [3, -3, 0], [3.0, 3.1, 2.9]): ... find_max(nums) == max(nums) True True True True >>> find_max([2, 4, 9, 7, 19, 94, 5]) 94 >>> find_max([]) Traceback (most recent call last): ... ValueError: find_max() arg is an empty sequence """ if len(nums) == 0: raise ValueError("find_max() arg is an empty sequence") max_num = nums[0] for x in nums: if x > max_num: max_num = x return max_num if __name__ == "__main__": import doctest doctest.testmod(verbose=True)
from __future__ import annotations def find_max_iterative(nums: list[int | float]) -> int | float: """ >>> for nums in ([3, 2, 1], [-3, -2, -1], [3, -3, 0], [3.0, 3.1, 2.9]): ... find_max_iterative(nums) == max(nums) True True True True >>> find_max_iterative([2, 4, 9, 7, 19, 94, 5]) 94 >>> find_max_iterative([]) Traceback (most recent call last): ... ValueError: find_max_iterative() arg is an empty sequence """ if len(nums) == 0: raise ValueError("find_max_iterative() arg is an empty sequence") max_num = nums[0] for x in nums: if x > max_num: max_num = x return max_num # Divide and Conquer algorithm def find_max_recursive(nums: list[int | float], left: int, right: int) -> int | float: """ find max value in list :param nums: contains elements :param left: index of first element :param right: index of last element :return: max in nums >>> for nums in ([3, 2, 1], [-3, -2, -1], [3, -3, 0], [3.0, 3.1, 2.9]): ... find_max_recursive(nums, 0, len(nums) - 1) == max(nums) True True True True >>> nums = [1, 3, 5, 7, 9, 2, 4, 6, 8, 10] >>> find_max_recursive(nums, 0, len(nums) - 1) == max(nums) True >>> find_max_recursive([], 0, 0) Traceback (most recent call last): ... ValueError: find_max_recursive() arg is an empty sequence >>> find_max_recursive(nums, 0, len(nums)) == max(nums) Traceback (most recent call last): ... IndexError: list index out of range >>> find_max_recursive(nums, -len(nums), -1) == max(nums) True >>> find_max_recursive(nums, -len(nums) - 1, -1) == max(nums) Traceback (most recent call last): ... IndexError: list index out of range """ if len(nums) == 0: raise ValueError("find_max_recursive() arg is an empty sequence") if ( left >= len(nums) or left < -len(nums) or right >= len(nums) or right < -len(nums) ): raise IndexError("list index out of range") if left == right: return nums[left] mid = (left + right) >> 1 # the middle left_max = find_max_recursive(nums, left, mid) # find max in range[left, mid] right_max = find_max_recursive( nums, mid + 1, right ) # find max in range[mid + 1, right] return left_max if left_max >= right_max else right_max if __name__ == "__main__": import doctest doctest.testmod(verbose=True)
1
TheAlgorithms/Python
8,960
Consolidate find_min and find_min recursive and find_max and find_max_recursive
### Describe your change: Merges `find_min` and `find_min_recursive` into just `find_min`. Merges `find_min` and `find_max_recursive` into just `find_max`. * [ ] Add an algorithm? * [ ] Fix a bug or typo in an existing algorithm? * [ ] Documentation change? * [x] Merge two different implementations into one file ### Checklist: * [x] I have read [CONTRIBUTING.md](https://github.com/TheAlgorithms/Python/blob/master/CONTRIBUTING.md). * [x] This pull request is all my own work -- I have not plagiarized. * [x] I know that pull requests will not be merged if they fail the automated tests. * [x] This PR only changes one algorithm file. To ease review, please open separate PRs for separate algorithms. * [x] All new Python files are placed inside an existing directory. * [x] All filenames are in all lowercase characters with no spaces or dashes. * [x] All functions and variable names follow Python naming conventions. * [x] All function parameters and return values are annotated with Python [type hints](https://docs.python.org/3/library/typing.html). * [x] All functions have [doctests](https://docs.python.org/3/library/doctest.html) that pass the automated testing. * [x] All new algorithms include at least one URL that points to Wikipedia or another similar explanation. * [x] If this pull request resolves one or more open issues then the description above includes the issue number(s) with a [closing keyword](https://docs.github.com/en/issues/tracking-your-work-with-issues/linking-a-pull-request-to-an-issue): "Fixes #ISSUE-NUMBER".
CaedenPH
"2023-08-14T10:30:16Z"
"2023-08-14T11:17:27Z"
2ab3bf2689d21e7375539c79ecee358e9d7c3359
fb1b939a89fb08370297cbb455846f61f66847bc
Consolidate find_min and find_min recursive and find_max and find_max_recursive. ### Describe your change: Merges `find_min` and `find_min_recursive` into just `find_min`. Merges `find_min` and `find_max_recursive` into just `find_max`. * [ ] Add an algorithm? * [ ] Fix a bug or typo in an existing algorithm? * [ ] Documentation change? * [x] Merge two different implementations into one file ### Checklist: * [x] I have read [CONTRIBUTING.md](https://github.com/TheAlgorithms/Python/blob/master/CONTRIBUTING.md). * [x] This pull request is all my own work -- I have not plagiarized. * [x] I know that pull requests will not be merged if they fail the automated tests. * [x] This PR only changes one algorithm file. To ease review, please open separate PRs for separate algorithms. * [x] All new Python files are placed inside an existing directory. * [x] All filenames are in all lowercase characters with no spaces or dashes. * [x] All functions and variable names follow Python naming conventions. * [x] All function parameters and return values are annotated with Python [type hints](https://docs.python.org/3/library/typing.html). * [x] All functions have [doctests](https://docs.python.org/3/library/doctest.html) that pass the automated testing. * [x] All new algorithms include at least one URL that points to Wikipedia or another similar explanation. * [x] If this pull request resolves one or more open issues then the description above includes the issue number(s) with a [closing keyword](https://docs.github.com/en/issues/tracking-your-work-with-issues/linking-a-pull-request-to-an-issue): "Fixes #ISSUE-NUMBER".
from __future__ import annotations def find_min(nums: list[int | float]) -> int | float: """ Find Minimum Number in a List :param nums: contains elements :return: min number in list >>> for nums in ([3, 2, 1], [-3, -2, -1], [3, -3, 0], [3.0, 3.1, 2.9]): ... find_min(nums) == min(nums) True True True True >>> find_min([0, 1, 2, 3, 4, 5, -3, 24, -56]) -56 >>> find_min([]) Traceback (most recent call last): ... ValueError: find_min() arg is an empty sequence """ if len(nums) == 0: raise ValueError("find_min() arg is an empty sequence") min_num = nums[0] for num in nums: min_num = min(min_num, num) return min_num if __name__ == "__main__": import doctest doctest.testmod(verbose=True)
from __future__ import annotations def find_min_iterative(nums: list[int | float]) -> int | float: """ Find Minimum Number in a List :param nums: contains elements :return: min number in list >>> for nums in ([3, 2, 1], [-3, -2, -1], [3, -3, 0], [3.0, 3.1, 2.9]): ... find_min_iterative(nums) == min(nums) True True True True >>> find_min_iterative([0, 1, 2, 3, 4, 5, -3, 24, -56]) -56 >>> find_min_iterative([]) Traceback (most recent call last): ... ValueError: find_min_iterative() arg is an empty sequence """ if len(nums) == 0: raise ValueError("find_min_iterative() arg is an empty sequence") min_num = nums[0] for num in nums: min_num = min(min_num, num) return min_num # Divide and Conquer algorithm def find_min_recursive(nums: list[int | float], left: int, right: int) -> int | float: """ find min value in list :param nums: contains elements :param left: index of first element :param right: index of last element :return: min in nums >>> for nums in ([3, 2, 1], [-3, -2, -1], [3, -3, 0], [3.0, 3.1, 2.9]): ... find_min_recursive(nums, 0, len(nums) - 1) == min(nums) True True True True >>> nums = [1, 3, 5, 7, 9, 2, 4, 6, 8, 10] >>> find_min_recursive(nums, 0, len(nums) - 1) == min(nums) True >>> find_min_recursive([], 0, 0) Traceback (most recent call last): ... ValueError: find_min_recursive() arg is an empty sequence >>> find_min_recursive(nums, 0, len(nums)) == min(nums) Traceback (most recent call last): ... IndexError: list index out of range >>> find_min_recursive(nums, -len(nums), -1) == min(nums) True >>> find_min_recursive(nums, -len(nums) - 1, -1) == min(nums) Traceback (most recent call last): ... IndexError: list index out of range """ if len(nums) == 0: raise ValueError("find_min_recursive() arg is an empty sequence") if ( left >= len(nums) or left < -len(nums) or right >= len(nums) or right < -len(nums) ): raise IndexError("list index out of range") if left == right: return nums[left] mid = (left + right) >> 1 # the middle left_min = find_min_recursive(nums, left, mid) # find min in range[left, mid] right_min = find_min_recursive( nums, mid + 1, right ) # find min in range[mid + 1, right] return left_min if left_min <= right_min else right_min if __name__ == "__main__": import doctest doctest.testmod(verbose=True)
1
TheAlgorithms/Python
8,960
Consolidate find_min and find_min recursive and find_max and find_max_recursive
### Describe your change: Merges `find_min` and `find_min_recursive` into just `find_min`. Merges `find_min` and `find_max_recursive` into just `find_max`. * [ ] Add an algorithm? * [ ] Fix a bug or typo in an existing algorithm? * [ ] Documentation change? * [x] Merge two different implementations into one file ### Checklist: * [x] I have read [CONTRIBUTING.md](https://github.com/TheAlgorithms/Python/blob/master/CONTRIBUTING.md). * [x] This pull request is all my own work -- I have not plagiarized. * [x] I know that pull requests will not be merged if they fail the automated tests. * [x] This PR only changes one algorithm file. To ease review, please open separate PRs for separate algorithms. * [x] All new Python files are placed inside an existing directory. * [x] All filenames are in all lowercase characters with no spaces or dashes. * [x] All functions and variable names follow Python naming conventions. * [x] All function parameters and return values are annotated with Python [type hints](https://docs.python.org/3/library/typing.html). * [x] All functions have [doctests](https://docs.python.org/3/library/doctest.html) that pass the automated testing. * [x] All new algorithms include at least one URL that points to Wikipedia or another similar explanation. * [x] If this pull request resolves one or more open issues then the description above includes the issue number(s) with a [closing keyword](https://docs.github.com/en/issues/tracking-your-work-with-issues/linking-a-pull-request-to-an-issue): "Fixes #ISSUE-NUMBER".
CaedenPH
"2023-08-14T10:30:16Z"
"2023-08-14T11:17:27Z"
2ab3bf2689d21e7375539c79ecee358e9d7c3359
fb1b939a89fb08370297cbb455846f61f66847bc
Consolidate find_min and find_min recursive and find_max and find_max_recursive. ### Describe your change: Merges `find_min` and `find_min_recursive` into just `find_min`. Merges `find_min` and `find_max_recursive` into just `find_max`. * [ ] Add an algorithm? * [ ] Fix a bug or typo in an existing algorithm? * [ ] Documentation change? * [x] Merge two different implementations into one file ### Checklist: * [x] I have read [CONTRIBUTING.md](https://github.com/TheAlgorithms/Python/blob/master/CONTRIBUTING.md). * [x] This pull request is all my own work -- I have not plagiarized. * [x] I know that pull requests will not be merged if they fail the automated tests. * [x] This PR only changes one algorithm file. To ease review, please open separate PRs for separate algorithms. * [x] All new Python files are placed inside an existing directory. * [x] All filenames are in all lowercase characters with no spaces or dashes. * [x] All functions and variable names follow Python naming conventions. * [x] All function parameters and return values are annotated with Python [type hints](https://docs.python.org/3/library/typing.html). * [x] All functions have [doctests](https://docs.python.org/3/library/doctest.html) that pass the automated testing. * [x] All new algorithms include at least one URL that points to Wikipedia or another similar explanation. * [x] If this pull request resolves one or more open issues then the description above includes the issue number(s) with a [closing keyword](https://docs.github.com/en/issues/tracking-your-work-with-issues/linking-a-pull-request-to-an-issue): "Fixes #ISSUE-NUMBER".
""" Project Euler Problem 104 : https://projecteuler.net/problem=104 The Fibonacci sequence is defined by the recurrence relation: Fn = Fn−1 + Fn−2, where F1 = 1 and F2 = 1. It turns out that F541, which contains 113 digits, is the first Fibonacci number for which the last nine digits are 1-9 pandigital (contain all the digits 1 to 9, but not necessarily in order). And F2749, which contains 575 digits, is the first Fibonacci number for which the first nine digits are 1-9 pandigital. Given that Fk is the first Fibonacci number for which the first nine digits AND the last nine digits are 1-9 pandigital, find k. """ import sys sys.set_int_max_str_digits(0) # type: ignore def check(number: int) -> bool: """ Takes a number and checks if it is pandigital both from start and end >>> check(123456789987654321) True >>> check(120000987654321) False >>> check(1234567895765677987654321) True """ check_last = [0] * 11 check_front = [0] * 11 # mark last 9 numbers for _ in range(9): check_last[int(number % 10)] = 1 number = number // 10 # flag f = True # check last 9 numbers for pandigitality for x in range(9): if not check_last[x + 1]: f = False if not f: return f # mark first 9 numbers number = int(str(number)[:9]) for _ in range(9): check_front[int(number % 10)] = 1 number = number // 10 # check first 9 numbers for pandigitality for x in range(9): if not check_front[x + 1]: f = False return f def check1(number: int) -> bool: """ Takes a number and checks if it is pandigital from END >>> check1(123456789987654321) True >>> check1(120000987654321) True >>> check1(12345678957656779870004321) False """ check_last = [0] * 11 # mark last 9 numbers for _ in range(9): check_last[int(number % 10)] = 1 number = number // 10 # flag f = True # check last 9 numbers for pandigitality for x in range(9): if not check_last[x + 1]: f = False return f def solution() -> int: """ Outputs the answer is the least Fibonacci number pandigital from both sides. >>> solution() 329468 """ a = 1 b = 1 c = 2 # temporary Fibonacci numbers a1 = 1 b1 = 1 c1 = 2 # temporary Fibonacci numbers mod 1e9 # mod m=1e9, done for fast optimisation tocheck = [0] * 1000000 m = 1000000000 for x in range(1000000): c1 = (a1 + b1) % m a1 = b1 % m b1 = c1 % m if check1(b1): tocheck[x + 3] = 1 for x in range(1000000): c = a + b a = b b = c # perform check only if in tocheck if tocheck[x + 3] and check(b): return x + 3 # first 2 already done return -1 if __name__ == "__main__": print(f"{solution() = }")
""" Project Euler Problem 104 : https://projecteuler.net/problem=104 The Fibonacci sequence is defined by the recurrence relation: Fn = Fn−1 + Fn−2, where F1 = 1 and F2 = 1. It turns out that F541, which contains 113 digits, is the first Fibonacci number for which the last nine digits are 1-9 pandigital (contain all the digits 1 to 9, but not necessarily in order). And F2749, which contains 575 digits, is the first Fibonacci number for which the first nine digits are 1-9 pandigital. Given that Fk is the first Fibonacci number for which the first nine digits AND the last nine digits are 1-9 pandigital, find k. """ import sys sys.set_int_max_str_digits(0) # type: ignore def check(number: int) -> bool: """ Takes a number and checks if it is pandigital both from start and end >>> check(123456789987654321) True >>> check(120000987654321) False >>> check(1234567895765677987654321) True """ check_last = [0] * 11 check_front = [0] * 11 # mark last 9 numbers for _ in range(9): check_last[int(number % 10)] = 1 number = number // 10 # flag f = True # check last 9 numbers for pandigitality for x in range(9): if not check_last[x + 1]: f = False if not f: return f # mark first 9 numbers number = int(str(number)[:9]) for _ in range(9): check_front[int(number % 10)] = 1 number = number // 10 # check first 9 numbers for pandigitality for x in range(9): if not check_front[x + 1]: f = False return f def check1(number: int) -> bool: """ Takes a number and checks if it is pandigital from END >>> check1(123456789987654321) True >>> check1(120000987654321) True >>> check1(12345678957656779870004321) False """ check_last = [0] * 11 # mark last 9 numbers for _ in range(9): check_last[int(number % 10)] = 1 number = number // 10 # flag f = True # check last 9 numbers for pandigitality for x in range(9): if not check_last[x + 1]: f = False return f def solution() -> int: """ Outputs the answer is the least Fibonacci number pandigital from both sides. >>> solution() 329468 """ a = 1 b = 1 c = 2 # temporary Fibonacci numbers a1 = 1 b1 = 1 c1 = 2 # temporary Fibonacci numbers mod 1e9 # mod m=1e9, done for fast optimisation tocheck = [0] * 1000000 m = 1000000000 for x in range(1000000): c1 = (a1 + b1) % m a1 = b1 % m b1 = c1 % m if check1(b1): tocheck[x + 3] = 1 for x in range(1000000): c = a + b a = b b = c # perform check only if in tocheck if tocheck[x + 3] and check(b): return x + 3 # first 2 already done return -1 if __name__ == "__main__": print(f"{solution() = }")
-1
TheAlgorithms/Python
8,960
Consolidate find_min and find_min recursive and find_max and find_max_recursive
### Describe your change: Merges `find_min` and `find_min_recursive` into just `find_min`. Merges `find_min` and `find_max_recursive` into just `find_max`. * [ ] Add an algorithm? * [ ] Fix a bug or typo in an existing algorithm? * [ ] Documentation change? * [x] Merge two different implementations into one file ### Checklist: * [x] I have read [CONTRIBUTING.md](https://github.com/TheAlgorithms/Python/blob/master/CONTRIBUTING.md). * [x] This pull request is all my own work -- I have not plagiarized. * [x] I know that pull requests will not be merged if they fail the automated tests. * [x] This PR only changes one algorithm file. To ease review, please open separate PRs for separate algorithms. * [x] All new Python files are placed inside an existing directory. * [x] All filenames are in all lowercase characters with no spaces or dashes. * [x] All functions and variable names follow Python naming conventions. * [x] All function parameters and return values are annotated with Python [type hints](https://docs.python.org/3/library/typing.html). * [x] All functions have [doctests](https://docs.python.org/3/library/doctest.html) that pass the automated testing. * [x] All new algorithms include at least one URL that points to Wikipedia or another similar explanation. * [x] If this pull request resolves one or more open issues then the description above includes the issue number(s) with a [closing keyword](https://docs.github.com/en/issues/tracking-your-work-with-issues/linking-a-pull-request-to-an-issue): "Fixes #ISSUE-NUMBER".
CaedenPH
"2023-08-14T10:30:16Z"
"2023-08-14T11:17:27Z"
2ab3bf2689d21e7375539c79ecee358e9d7c3359
fb1b939a89fb08370297cbb455846f61f66847bc
Consolidate find_min and find_min recursive and find_max and find_max_recursive. ### Describe your change: Merges `find_min` and `find_min_recursive` into just `find_min`. Merges `find_min` and `find_max_recursive` into just `find_max`. * [ ] Add an algorithm? * [ ] Fix a bug or typo in an existing algorithm? * [ ] Documentation change? * [x] Merge two different implementations into one file ### Checklist: * [x] I have read [CONTRIBUTING.md](https://github.com/TheAlgorithms/Python/blob/master/CONTRIBUTING.md). * [x] This pull request is all my own work -- I have not plagiarized. * [x] I know that pull requests will not be merged if they fail the automated tests. * [x] This PR only changes one algorithm file. To ease review, please open separate PRs for separate algorithms. * [x] All new Python files are placed inside an existing directory. * [x] All filenames are in all lowercase characters with no spaces or dashes. * [x] All functions and variable names follow Python naming conventions. * [x] All function parameters and return values are annotated with Python [type hints](https://docs.python.org/3/library/typing.html). * [x] All functions have [doctests](https://docs.python.org/3/library/doctest.html) that pass the automated testing. * [x] All new algorithms include at least one URL that points to Wikipedia or another similar explanation. * [x] If this pull request resolves one or more open issues then the description above includes the issue number(s) with a [closing keyword](https://docs.github.com/en/issues/tracking-your-work-with-issues/linking-a-pull-request-to-an-issue): "Fixes #ISSUE-NUMBER".
""" This script demonstrates the implementation of the Sigmoid Linear Unit (SiLU) or swish function. * https://en.wikipedia.org/wiki/Rectifier_(neural_networks) * https://en.wikipedia.org/wiki/Swish_function The function takes a vector x of K real numbers as input and returns x * sigmoid(x). Swish is a smooth, non-monotonic function defined as f(x) = x * sigmoid(x). Extensive experiments shows that Swish consistently matches or outperforms ReLU on deep networks applied to a variety of challenging domains such as image classification and machine translation. This script is inspired by a corresponding research paper. * https://arxiv.org/abs/1710.05941 """ import numpy as np def sigmoid(vector: np.ndarray) -> np.ndarray: """ Mathematical function sigmoid takes a vector x of K real numbers as input and returns 1/ (1 + e^-x). https://en.wikipedia.org/wiki/Sigmoid_function >>> sigmoid(np.array([-1.0, 1.0, 2.0])) array([0.26894142, 0.73105858, 0.88079708]) """ return 1 / (1 + np.exp(-vector)) def sigmoid_linear_unit(vector: np.ndarray) -> np.ndarray: """ Implements the Sigmoid Linear Unit (SiLU) or swish function Parameters: vector (np.ndarray): A numpy array consisting of real values Returns: swish_vec (np.ndarray): The input numpy array, after applying swish Examples: >>> sigmoid_linear_unit(np.array([-1.0, 1.0, 2.0])) array([-0.26894142, 0.73105858, 1.76159416]) >>> sigmoid_linear_unit(np.array([-2])) array([-0.23840584]) """ return vector * sigmoid(vector) if __name__ == "__main__": import doctest doctest.testmod()
""" This script demonstrates the implementation of the Sigmoid Linear Unit (SiLU) or swish function. * https://en.wikipedia.org/wiki/Rectifier_(neural_networks) * https://en.wikipedia.org/wiki/Swish_function The function takes a vector x of K real numbers as input and returns x * sigmoid(x). Swish is a smooth, non-monotonic function defined as f(x) = x * sigmoid(x). Extensive experiments shows that Swish consistently matches or outperforms ReLU on deep networks applied to a variety of challenging domains such as image classification and machine translation. This script is inspired by a corresponding research paper. * https://arxiv.org/abs/1710.05941 """ import numpy as np def sigmoid(vector: np.ndarray) -> np.ndarray: """ Mathematical function sigmoid takes a vector x of K real numbers as input and returns 1/ (1 + e^-x). https://en.wikipedia.org/wiki/Sigmoid_function >>> sigmoid(np.array([-1.0, 1.0, 2.0])) array([0.26894142, 0.73105858, 0.88079708]) """ return 1 / (1 + np.exp(-vector)) def sigmoid_linear_unit(vector: np.ndarray) -> np.ndarray: """ Implements the Sigmoid Linear Unit (SiLU) or swish function Parameters: vector (np.ndarray): A numpy array consisting of real values Returns: swish_vec (np.ndarray): The input numpy array, after applying swish Examples: >>> sigmoid_linear_unit(np.array([-1.0, 1.0, 2.0])) array([-0.26894142, 0.73105858, 1.76159416]) >>> sigmoid_linear_unit(np.array([-2])) array([-0.23840584]) """ return vector * sigmoid(vector) if __name__ == "__main__": import doctest doctest.testmod()
-1
TheAlgorithms/Python
8,960
Consolidate find_min and find_min recursive and find_max and find_max_recursive
### Describe your change: Merges `find_min` and `find_min_recursive` into just `find_min`. Merges `find_min` and `find_max_recursive` into just `find_max`. * [ ] Add an algorithm? * [ ] Fix a bug or typo in an existing algorithm? * [ ] Documentation change? * [x] Merge two different implementations into one file ### Checklist: * [x] I have read [CONTRIBUTING.md](https://github.com/TheAlgorithms/Python/blob/master/CONTRIBUTING.md). * [x] This pull request is all my own work -- I have not plagiarized. * [x] I know that pull requests will not be merged if they fail the automated tests. * [x] This PR only changes one algorithm file. To ease review, please open separate PRs for separate algorithms. * [x] All new Python files are placed inside an existing directory. * [x] All filenames are in all lowercase characters with no spaces or dashes. * [x] All functions and variable names follow Python naming conventions. * [x] All function parameters and return values are annotated with Python [type hints](https://docs.python.org/3/library/typing.html). * [x] All functions have [doctests](https://docs.python.org/3/library/doctest.html) that pass the automated testing. * [x] All new algorithms include at least one URL that points to Wikipedia or another similar explanation. * [x] If this pull request resolves one or more open issues then the description above includes the issue number(s) with a [closing keyword](https://docs.github.com/en/issues/tracking-your-work-with-issues/linking-a-pull-request-to-an-issue): "Fixes #ISSUE-NUMBER".
CaedenPH
"2023-08-14T10:30:16Z"
"2023-08-14T11:17:27Z"
2ab3bf2689d21e7375539c79ecee358e9d7c3359
fb1b939a89fb08370297cbb455846f61f66847bc
Consolidate find_min and find_min recursive and find_max and find_max_recursive. ### Describe your change: Merges `find_min` and `find_min_recursive` into just `find_min`. Merges `find_min` and `find_max_recursive` into just `find_max`. * [ ] Add an algorithm? * [ ] Fix a bug or typo in an existing algorithm? * [ ] Documentation change? * [x] Merge two different implementations into one file ### Checklist: * [x] I have read [CONTRIBUTING.md](https://github.com/TheAlgorithms/Python/blob/master/CONTRIBUTING.md). * [x] This pull request is all my own work -- I have not plagiarized. * [x] I know that pull requests will not be merged if they fail the automated tests. * [x] This PR only changes one algorithm file. To ease review, please open separate PRs for separate algorithms. * [x] All new Python files are placed inside an existing directory. * [x] All filenames are in all lowercase characters with no spaces or dashes. * [x] All functions and variable names follow Python naming conventions. * [x] All function parameters and return values are annotated with Python [type hints](https://docs.python.org/3/library/typing.html). * [x] All functions have [doctests](https://docs.python.org/3/library/doctest.html) that pass the automated testing. * [x] All new algorithms include at least one URL that points to Wikipedia or another similar explanation. * [x] If this pull request resolves one or more open issues then the description above includes the issue number(s) with a [closing keyword](https://docs.github.com/en/issues/tracking-your-work-with-issues/linking-a-pull-request-to-an-issue): "Fixes #ISSUE-NUMBER".
""" Project Euler Problem 5: https://projecteuler.net/problem=5 Smallest multiple 2520 is the smallest number that can be divided by each of the numbers from 1 to 10 without any remainder. What is the smallest positive number that is _evenly divisible_ by all of the numbers from 1 to 20? References: - https://en.wiktionary.org/wiki/evenly_divisible """ def solution(n: int = 20) -> int: """ Returns the smallest positive number that is evenly divisible (divisible with no remainder) by all of the numbers from 1 to n. >>> solution(10) 2520 >>> solution(15) 360360 >>> solution(22) 232792560 >>> solution(3.4) 6 >>> solution(0) Traceback (most recent call last): ... ValueError: Parameter n must be greater than or equal to one. >>> solution(-17) Traceback (most recent call last): ... ValueError: Parameter n must be greater than or equal to one. >>> solution([]) Traceback (most recent call last): ... TypeError: Parameter n must be int or castable to int. >>> solution("asd") Traceback (most recent call last): ... TypeError: Parameter n must be int or castable to int. """ try: n = int(n) except (TypeError, ValueError): raise TypeError("Parameter n must be int or castable to int.") if n <= 0: raise ValueError("Parameter n must be greater than or equal to one.") i = 0 while 1: i += n * (n - 1) nfound = 0 for j in range(2, n): if i % j != 0: nfound = 1 break if nfound == 0: if i == 0: i = 1 return i return None if __name__ == "__main__": print(f"{solution() = }")
""" Project Euler Problem 5: https://projecteuler.net/problem=5 Smallest multiple 2520 is the smallest number that can be divided by each of the numbers from 1 to 10 without any remainder. What is the smallest positive number that is _evenly divisible_ by all of the numbers from 1 to 20? References: - https://en.wiktionary.org/wiki/evenly_divisible """ def solution(n: int = 20) -> int: """ Returns the smallest positive number that is evenly divisible (divisible with no remainder) by all of the numbers from 1 to n. >>> solution(10) 2520 >>> solution(15) 360360 >>> solution(22) 232792560 >>> solution(3.4) 6 >>> solution(0) Traceback (most recent call last): ... ValueError: Parameter n must be greater than or equal to one. >>> solution(-17) Traceback (most recent call last): ... ValueError: Parameter n must be greater than or equal to one. >>> solution([]) Traceback (most recent call last): ... TypeError: Parameter n must be int or castable to int. >>> solution("asd") Traceback (most recent call last): ... TypeError: Parameter n must be int or castable to int. """ try: n = int(n) except (TypeError, ValueError): raise TypeError("Parameter n must be int or castable to int.") if n <= 0: raise ValueError("Parameter n must be greater than or equal to one.") i = 0 while 1: i += n * (n - 1) nfound = 0 for j in range(2, n): if i % j != 0: nfound = 1 break if nfound == 0: if i == 0: i = 1 return i return None if __name__ == "__main__": print(f"{solution() = }")
-1
TheAlgorithms/Python
8,960
Consolidate find_min and find_min recursive and find_max and find_max_recursive
### Describe your change: Merges `find_min` and `find_min_recursive` into just `find_min`. Merges `find_min` and `find_max_recursive` into just `find_max`. * [ ] Add an algorithm? * [ ] Fix a bug or typo in an existing algorithm? * [ ] Documentation change? * [x] Merge two different implementations into one file ### Checklist: * [x] I have read [CONTRIBUTING.md](https://github.com/TheAlgorithms/Python/blob/master/CONTRIBUTING.md). * [x] This pull request is all my own work -- I have not plagiarized. * [x] I know that pull requests will not be merged if they fail the automated tests. * [x] This PR only changes one algorithm file. To ease review, please open separate PRs for separate algorithms. * [x] All new Python files are placed inside an existing directory. * [x] All filenames are in all lowercase characters with no spaces or dashes. * [x] All functions and variable names follow Python naming conventions. * [x] All function parameters and return values are annotated with Python [type hints](https://docs.python.org/3/library/typing.html). * [x] All functions have [doctests](https://docs.python.org/3/library/doctest.html) that pass the automated testing. * [x] All new algorithms include at least one URL that points to Wikipedia or another similar explanation. * [x] If this pull request resolves one or more open issues then the description above includes the issue number(s) with a [closing keyword](https://docs.github.com/en/issues/tracking-your-work-with-issues/linking-a-pull-request-to-an-issue): "Fixes #ISSUE-NUMBER".
CaedenPH
"2023-08-14T10:30:16Z"
"2023-08-14T11:17:27Z"
2ab3bf2689d21e7375539c79ecee358e9d7c3359
fb1b939a89fb08370297cbb455846f61f66847bc
Consolidate find_min and find_min recursive and find_max and find_max_recursive. ### Describe your change: Merges `find_min` and `find_min_recursive` into just `find_min`. Merges `find_min` and `find_max_recursive` into just `find_max`. * [ ] Add an algorithm? * [ ] Fix a bug or typo in an existing algorithm? * [ ] Documentation change? * [x] Merge two different implementations into one file ### Checklist: * [x] I have read [CONTRIBUTING.md](https://github.com/TheAlgorithms/Python/blob/master/CONTRIBUTING.md). * [x] This pull request is all my own work -- I have not plagiarized. * [x] I know that pull requests will not be merged if they fail the automated tests. * [x] This PR only changes one algorithm file. To ease review, please open separate PRs for separate algorithms. * [x] All new Python files are placed inside an existing directory. * [x] All filenames are in all lowercase characters with no spaces or dashes. * [x] All functions and variable names follow Python naming conventions. * [x] All function parameters and return values are annotated with Python [type hints](https://docs.python.org/3/library/typing.html). * [x] All functions have [doctests](https://docs.python.org/3/library/doctest.html) that pass the automated testing. * [x] All new algorithms include at least one URL that points to Wikipedia or another similar explanation. * [x] If this pull request resolves one or more open issues then the description above includes the issue number(s) with a [closing keyword](https://docs.github.com/en/issues/tracking-your-work-with-issues/linking-a-pull-request-to-an-issue): "Fixes #ISSUE-NUMBER".
# XGBoost Regressor Example import numpy as np from sklearn.datasets import fetch_california_housing from sklearn.metrics import mean_absolute_error, mean_squared_error from sklearn.model_selection import train_test_split from xgboost import XGBRegressor def data_handling(data: dict) -> tuple: # Split dataset into features and target. Data is features. """ >>> data_handling(( ... {'data':'[ 8.3252 41. 6.9841269 1.02380952 322. 2.55555556 37.88 -122.23 ]' ... ,'target':([4.526])})) ('[ 8.3252 41. 6.9841269 1.02380952 322. 2.55555556 37.88 -122.23 ]', [4.526]) """ return (data["data"], data["target"]) def xgboost( features: np.ndarray, target: np.ndarray, test_features: np.ndarray ) -> np.ndarray: """ >>> xgboost(np.array([[ 2.3571 , 52. , 6.00813008, 1.06775068, ... 907. , 2.45799458, 40.58 , -124.26]]),np.array([1.114]), ... np.array([[1.97840000e+00, 3.70000000e+01, 4.98858447e+00, 1.03881279e+00, ... 1.14300000e+03, 2.60958904e+00, 3.67800000e+01, -1.19780000e+02]])) array([[1.1139996]], dtype=float32) """ xgb = XGBRegressor(verbosity=0, random_state=42) xgb.fit(features, target) # Predict target for test data predictions = xgb.predict(test_features) predictions = predictions.reshape(len(predictions), 1) return predictions def main() -> None: """ >>> main() Mean Absolute Error : 0.30957163379906033 Mean Square Error : 0.22611560196662744 The URL for this algorithm https://xgboost.readthedocs.io/en/stable/ California house price dataset is used to demonstrate the algorithm. """ # Load California house price dataset california = fetch_california_housing() data, target = data_handling(california) x_train, x_test, y_train, y_test = train_test_split( data, target, test_size=0.25, random_state=1 ) predictions = xgboost(x_train, y_train, x_test) # Error printing print(f"Mean Absolute Error : {mean_absolute_error(y_test, predictions)}") print(f"Mean Square Error : {mean_squared_error(y_test, predictions)}") if __name__ == "__main__": import doctest doctest.testmod(verbose=True) main()
# XGBoost Regressor Example import numpy as np from sklearn.datasets import fetch_california_housing from sklearn.metrics import mean_absolute_error, mean_squared_error from sklearn.model_selection import train_test_split from xgboost import XGBRegressor def data_handling(data: dict) -> tuple: # Split dataset into features and target. Data is features. """ >>> data_handling(( ... {'data':'[ 8.3252 41. 6.9841269 1.02380952 322. 2.55555556 37.88 -122.23 ]' ... ,'target':([4.526])})) ('[ 8.3252 41. 6.9841269 1.02380952 322. 2.55555556 37.88 -122.23 ]', [4.526]) """ return (data["data"], data["target"]) def xgboost( features: np.ndarray, target: np.ndarray, test_features: np.ndarray ) -> np.ndarray: """ >>> xgboost(np.array([[ 2.3571 , 52. , 6.00813008, 1.06775068, ... 907. , 2.45799458, 40.58 , -124.26]]),np.array([1.114]), ... np.array([[1.97840000e+00, 3.70000000e+01, 4.98858447e+00, 1.03881279e+00, ... 1.14300000e+03, 2.60958904e+00, 3.67800000e+01, -1.19780000e+02]])) array([[1.1139996]], dtype=float32) """ xgb = XGBRegressor(verbosity=0, random_state=42) xgb.fit(features, target) # Predict target for test data predictions = xgb.predict(test_features) predictions = predictions.reshape(len(predictions), 1) return predictions def main() -> None: """ >>> main() Mean Absolute Error : 0.30957163379906033 Mean Square Error : 0.22611560196662744 The URL for this algorithm https://xgboost.readthedocs.io/en/stable/ California house price dataset is used to demonstrate the algorithm. """ # Load California house price dataset california = fetch_california_housing() data, target = data_handling(california) x_train, x_test, y_train, y_test = train_test_split( data, target, test_size=0.25, random_state=1 ) predictions = xgboost(x_train, y_train, x_test) # Error printing print(f"Mean Absolute Error : {mean_absolute_error(y_test, predictions)}") print(f"Mean Square Error : {mean_squared_error(y_test, predictions)}") if __name__ == "__main__": import doctest doctest.testmod(verbose=True) main()
-1