blob_id
stringlengths 40
40
| directory_id
stringlengths 40
40
| path
stringlengths 3
616
| content_id
stringlengths 40
40
| detected_licenses
listlengths 0
112
| license_type
stringclasses 2
values | repo_name
stringlengths 5
115
| snapshot_id
stringlengths 40
40
| revision_id
stringlengths 40
40
| branch_name
stringclasses 777
values | visit_date
timestamp[us]date 2015-08-06 10:31:46
2023-09-06 10:44:38
| revision_date
timestamp[us]date 1970-01-01 02:38:32
2037-05-03 13:00:00
| committer_date
timestamp[us]date 1970-01-01 02:38:32
2023-09-06 01:08:06
| github_id
int64 4.92k
681M
⌀ | star_events_count
int64 0
209k
| fork_events_count
int64 0
110k
| gha_license_id
stringclasses 22
values | gha_event_created_at
timestamp[us]date 2012-06-04 01:52:49
2023-09-14 21:59:50
⌀ | gha_created_at
timestamp[us]date 2008-05-22 07:58:19
2023-08-21 12:35:19
⌀ | gha_language
stringclasses 149
values | src_encoding
stringclasses 26
values | language
stringclasses 1
value | is_vendor
bool 2
classes | is_generated
bool 2
classes | length_bytes
int64 3
10.2M
| extension
stringclasses 188
values | content
stringlengths 3
10.2M
| authors
listlengths 1
1
| author_id
stringlengths 1
132
|
---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|
5946b71e2ab0dea1477088813ea136f3eec56027 | 649bd422025e421d86025743eac324c9b882a2e8 | /exam/1_three-dimensional_atomic_system/dump/phasetrans/temp204_3500.py | 2257cb33602a12bda3d813cf203fd13c700b9869 | []
| no_license | scheuclu/atom_class | 36ddee1f6a5995872e858add151c5942c109847c | 0c9a8c63d9b38898c1869fe8983126cef17662cd | refs/heads/master | 2021-01-21T10:52:28.448221 | 2017-03-07T23:04:41 | 2017-03-07T23:04:41 | 83,489,471 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 68,767 | py | ITEM: TIMESTEP
3500
ITEM: NUMBER OF ATOMS
2048
ITEM: BOX BOUNDS pp pp pp
-1.6878559649266461e+02 2.1598559649270894e+02
-1.6878559649266461e+02 2.1598559649270894e+02
-1.6878559649266461e+02 2.1598559649270894e+02
ITEM: ATOMS id type xs ys zs
782 1 0.287021 0.0038045 0.0236388
558 1 0.674384 0.0753822 0.0141806
571 1 0.736726 0.027479 0.0146
226 1 0.184719 0.0834659 0.0136485
1090 1 0.788186 0.0986098 0.0349484
1676 1 0.227413 0.0790193 0.00367851
623 1 0.108919 0.164679 0.0241208
743 1 0.271672 0.202921 0.0259242
799 1 0.203033 0.0074178 0.0604077
1835 1 0.259457 0.228718 0.00165648
1563 1 0.49129 0.205837 0.0154109
1223 1 0.585254 0.201326 0.0179075
1018 1 0.400539 0.411626 0.492716
357 1 0.909863 0.493517 0.0903839
1797 1 0.266311 0.48873 0.332012
426 1 0.784501 0.268655 0.00500636
791 1 0.734288 0.293108 0.0269897
1728 1 0.0389256 0.384458 0.458482
1456 1 0.150157 0.349357 0.00241034
309 1 0.54609 0.317467 0.0291573
144 1 0.692452 0.368829 0.00669871
360 1 0.916866 0.384515 0.0110187
14 1 0.254698 0.428594 0.00105429
189 1 0.376469 0.405675 0.0129368
846 1 0.736811 0.435988 0.0153343
549 1 0.715147 0.270148 0.486979
750 1 0.64092 0.430304 0.0394032
1163 1 0.747024 0.0175343 0.164214
1054 1 0.844282 0.480745 0.0171216
685 1 0.895716 0.030892 0.00829416
829 1 0.941723 0.0389907 0.0372865
192 1 0.445185 0.0627151 0.0575359
1946 1 0.418913 0.0480866 0.0209015
1846 1 0.507345 0.0755584 0.0195348
1333 1 0.862303 0.107428 0.040977
1929 1 0.142456 0.148418 0.0228166
38 1 0.767703 0.128092 0.0522613
1008 1 0.0300546 0.110813 0.0168059
1437 1 0.101936 0.180725 0.0408652
1150 1 0.739122 0.169522 0.026076
983 1 0.456741 0.201169 0.0617608
524 1 0.125748 0.214096 0.00833705
1354 1 0.931753 0.199453 0.00600315
706 1 0.220997 0.00933749 0.277754
1302 1 0.186194 0.294557 0.0354958
2036 1 0.370107 0.0028572 0.367635
1392 1 0.683561 0.275423 0.0230088
452 1 0.0695915 0.324991 0.00186946
861 1 0.0379344 0.345984 0.0397851
1365 1 0.266219 0.00690948 0.0550473
1965 1 0.913325 0.431044 0.0299338
2045 1 0.447454 0.450174 0.027246
807 1 0.335688 0.482712 0.0408451
1103 1 0.0397625 0.499707 0.314927
1180 1 0.555527 0.0320188 0.0253623
1882 1 0.762806 0.00528769 0.0638813
1397 1 0.0151058 0.06914 0.0535124
518 1 0.056648 0.0684046 0.0438589
612 1 0.492857 0.0805633 0.0895032
1136 1 0.241526 0.0864265 0.0693561
1252 1 0.344863 0.137799 0.0324573
638 1 0.150795 0.122494 0.0276713
1259 1 0.818492 0.115782 0.0593689
1695 1 0.0340027 0.11645 0.0294622
574 1 0.978962 0.15037 0.0447155
41 1 0.398178 0.158336 0.0265926
1131 1 0.00954327 0.177606 0.0477161
1884 1 0.312085 0.22046 0.0706057
154 1 0.804338 0.205391 0.0445314
512 1 0.284232 0.261926 0.0368946
691 1 0.205863 0.26872 0.0507788
484 1 0.57605 0.374686 0.0365296
1524 1 0.975459 0.390695 0.0469859
1496 1 0.35629 0.404183 0.0620713
908 1 0.917559 0.425821 0.0765571
1682 1 0.305079 0.471399 0.0938703
1481 1 0.241518 0.0253495 0.0674157
2003 1 0.743742 0.0770412 0.091003
1417 1 0.184156 0.0791361 0.093799
796 1 0.0264026 0.147969 0.063121
1485 1 0.247229 0.110653 0.0393468
1139 1 0.782689 0.13866 0.0501052
860 1 0.517866 0.201737 0.0555555
506 1 0.788743 0.243297 0.050376
371 1 0.0523194 0.197881 0.0700528
493 1 0.0817839 0.254058 0.0664452
1698 1 0.136787 0.320523 0.0666443
362 1 0.320837 0.283392 0.0796508
83 1 0.059732 0.332074 0.0823323
1381 1 0.732452 0.334714 0.0570731
225 1 0.0660459 0.381996 0.025837
1928 1 0.637737 0.365677 0.0433966
1820 1 0.942778 0.479258 0.0559571
1810 1 0.316674 0.0120909 0.116246
379 1 0.632373 0.018177 0.00658732
952 1 0.582316 0.0642909 0.0732702
1115 1 0.955171 0.107175 0.0681753
204 1 0.763829 0.171949 0.102948
1155 1 0.860355 0.1633 0.0991157
1382 1 0.484458 0.218385 0.0566054
1967 1 0.702706 0.215017 0.0757936
299 1 0.208754 0.215118 0.0720838
1227 1 0.558165 0.203348 0.102146
903 1 0.817001 0.232079 0.0710232
1785 1 0.585239 0.261607 0.0771155
370 1 0.281007 0.347485 0.0857349
898 1 0.1668 0.365741 0.0749074
1343 1 0.554586 0.317254 0.097288
1553 1 0.722887 0.372426 0.0627371
1710 1 0.769106 0.333698 0.0454849
682 1 0.185682 0.373574 0.100956
610 1 0.650017 0.376832 0.0758598
1592 1 0.635991 0.441533 0.0891263
1439 1 0.189981 0.483363 0.0617224
1560 1 0.865861 0.0680837 0.118445
994 1 0.933144 0.0839245 0.102352
20 1 0.379122 0.0857993 0.0863376
400 1 0.770886 0.0768227 0.130855
1803 1 0.0364397 0.0299154 0.0914506
835 1 0.184129 0.0816248 0.14002
1387 1 0.194635 0.106546 0.125463
103 1 0.313094 0.0907517 0.0953701
406 1 0.294427 0.135443 0.103403
272 1 0.683818 0.138294 0.0546698
1923 1 0.346061 0.185645 0.120381
1957 1 0.393076 0.153293 0.122174
274 1 0.940577 0.157594 0.111689
1187 1 0.876283 0.174591 0.117858
10 1 0.277295 0.179551 0.100658
606 1 0.445259 0.190294 0.105975
1953 1 0.556248 0.195546 0.119983
1837 1 0.884819 0.213701 0.0931976
18 1 0.782106 0.205365 0.125225
1949 1 0.31512 0.270929 0.105841
1222 1 0.845735 0.30081 0.0825042
1869 1 0.36766 0.297017 0.0737174
1198 1 0.371852 0.305747 0.191936
212 1 0.47927 0.363911 0.0973239
1095 1 0.108164 0.409563 0.115188
517 1 0.942084 0.00103717 0.111065
1419 1 0.154218 0.485555 0.113931
1248 1 0.40247 0.499375 0.0736089
1415 1 0.812539 0.478354 0.0745627
1320 1 0.396397 0.0192811 0.131363
444 1 0.0340229 0.0854257 0.11452
1568 1 0.240147 0.0351204 0.119827
876 1 0.822207 0.0744837 0.115729
208 1 0.228765 0.103578 0.134322
655 1 0.329301 0.0565481 0.114001
547 1 0.504865 0.138093 0.108393
505 1 0.275342 0.171592 0.125223
439 1 0.539916 0.230441 0.125406
1236 1 0.343082 0.264884 0.109209
569 1 0.298228 0.219651 0.11447
884 1 0.646685 0.311682 0.138117
416 1 0.77075 0.270784 0.117851
679 1 0.933636 0.403993 0.140574
1659 1 0.0554636 0.394279 0.144775
1215 1 0.0313014 0.417493 0.0901817
954 1 0.704088 0.451981 0.105422
1121 1 0.925275 0.451364 0.143669
986 1 0.35526 0.498855 0.142014
2025 1 0.0225357 0.00991743 0.139801
29 1 0.924769 0.00818089 0.155675
1838 1 0.993478 0.0808383 0.105172
700 1 0.562136 0.0652294 0.132365
372 1 0.815862 0.111629 0.146973
6 1 0.0513336 0.174651 0.126019
247 1 0.0794059 0.174611 0.127082
1114 1 0.346339 0.172607 0.132497
1213 1 0.279739 0.130578 0.162471
1996 1 0.777592 0.150087 0.109404
911 1 0.812831 0.191335 0.133773
1516 1 0.0966649 0.226508 0.115289
431 1 0.443988 0.244612 0.124546
902 1 0.494392 0.289867 0.103576
1263 1 0.55772 0.246615 0.120425
1811 1 0.34424 0.27363 0.184441
1451 1 0.268059 0.31253 0.132988
602 1 0.276811 0.307212 0.127508
1658 1 0.505746 0.31167 0.173571
109 1 0.465051 0.40734 0.134749
611 1 0.255951 0.448195 0.117327
1452 1 0.30859 0.374717 0.489829
1375 1 0.62733 0.0644468 0.170383
813 1 0.481391 0.0386995 0.160522
1630 1 0.614435 0.0938597 0.159706
314 1 0.84983 0.100004 0.168491
445 1 0.116059 0.13583 0.131465
1806 1 0.463468 0.160078 0.174104
96 1 0.359304 0.192862 0.149896
1120 1 0.0455706 0.218417 0.162012
160 1 0.440835 0.223693 0.144434
1966 1 0.711456 0.262053 0.164611
906 1 0.767894 0.301634 0.145577
390 1 0.966523 0.301803 0.170161
261 1 0.260034 0.322886 0.150767
818 1 0.138347 0.328246 0.190559
1005 1 0.00786844 0.370661 0.163789
648 1 0.188977 0.374197 0.118522
730 1 0.918444 0.370141 0.140809
1323 1 0.0946509 0.39509 0.155176
1663 1 0.643506 0.425798 0.162036
1478 1 0.677944 0.478068 0.150859
1905 1 0.96585 0.487115 0.14732
1943 1 0.460357 0.0358476 0.216611
460 1 0.687364 0.0815561 0.192595
1246 1 0.498706 0.0842038 0.187557
389 1 0.898429 0.0921039 0.207849
1674 1 0.39597 0.0866072 0.181113
1690 1 0.014211 0.164167 0.228486
740 1 0.988072 0.174986 0.203798
1226 1 0.216575 0.176206 0.172758
59 1 0.749203 0.205823 0.223798
221 1 0.178494 0.251901 0.17481
1854 1 0.104188 0.257518 0.215181
1470 1 0.375579 0.273413 0.148331
1736 1 0.326288 0.293427 0.193692
339 1 0.328988 0.282675 0.194964
1608 1 0.570826 0.305706 0.18694
888 1 0.574872 0.407162 0.196129
1898 1 0.793768 0.421244 0.162396
137 1 0.703599 0.428438 0.180937
1857 1 0.948483 0.456169 0.205836
837 1 0.410388 0.473917 0.218327
1108 1 0.970037 0.476539 0.187242
1498 1 0.197741 0.0707624 0.206948
57 1 0.238926 0.0713623 0.17198
182 1 0.195274 0.0610779 0.195963
1772 1 0.8587 0.256787 0.20077
1316 1 0.151923 0.319025 0.163632
106 1 0.0251128 0.342088 0.188473
1986 1 0.486553 0.420399 0.188997
847 1 0.651748 0.461015 0.201596
1995 1 0.11033 0.0247968 0.24455
1078 1 0.494955 0.0490563 0.235647
1547 1 0.27156 0.068689 0.207544
1400 1 0.668226 0.107029 0.224442
1324 1 0.0190366 0.122983 0.223131
823 1 0.543764 0.198151 0.207125
1620 1 0.856197 0.209006 0.201825
1802 1 0.663523 0.252277 0.194891
722 1 0.358293 0.276734 0.220728
765 1 0.515333 0.293687 0.271072
508 1 0.678851 0.29078 0.219947
1948 1 0.340551 0.349 0.211628
1300 1 0.892862 0.379746 0.213061
987 1 0.636802 0.415098 0.228032
559 1 0.691901 0.392026 0.22023
1126 1 0.847048 0.477177 0.233831
2023 1 0.555699 0.475628 0.229519
276 1 0.277666 0.112673 0.240469
1162 1 0.31835 0.133797 0.246262
857 1 0.928946 0.177933 0.248519
1256 1 0.978084 0.198923 0.251933
1816 1 0.358884 0.178161 0.217232
892 1 0.240483 0.277515 0.213401
408 1 0.984556 0.261478 0.267916
229 1 0.238511 0.341219 0.240283
1856 1 0.781813 0.418284 0.231375
235 1 0.838496 0.357054 0.216898
1959 1 0.413702 0.420862 0.192702
1576 1 0.734829 0.407058 0.228112
1822 1 0.787922 0.499527 0.254798
1228 1 0.326362 0.488209 0.225576
1398 1 0.521024 0.353481 0.497498
1421 1 0.534309 0.409643 0.491832
1412 1 0.308551 0.0687717 0.229939
1578 1 0.702477 0.0662001 0.233131
978 1 0.0841401 0.0815897 0.259851
241 1 0.450492 0.0775986 0.214843
858 1 0.689748 0.124058 0.232964
1318 1 0.50253 0.165648 0.222932
1754 1 0.0944126 0.215875 0.277983
1683 1 0.452338 0.195767 0.232613
1141 1 0.796611 0.20832 0.261098
1509 1 0.551539 0.237224 0.231596
1638 1 0.736071 0.255735 0.254045
1705 1 0.751376 0.304689 0.239469
1294 1 0.12236 0.317179 0.266831
1769 1 0.200455 0.338329 0.270811
553 1 0.295005 0.362777 0.249539
710 1 0.797775 0.362794 0.251569
117 1 0.211709 0.404274 0.24278
1548 1 0.606468 0.37435 0.26184
284 1 0.475236 0.433494 0.267683
1801 1 0.555165 0.0379703 0.22895
288 1 0.803134 0.0950364 0.285214
1963 1 0.423788 0.150294 0.238629
66 1 0.798844 0.172214 0.261436
542 1 0.270345 0.171205 0.198883
863 1 0.553072 0.141081 0.254113
1505 1 0.0464571 0.256475 0.291669
2040 1 0.396648 0.279204 0.25701
794 1 0.752894 0.268025 0.275044
1487 1 0.888209 0.240675 0.268233
1186 1 0.654744 0.261238 0.275026
1352 1 0.609966 0.295256 0.27602
1821 1 0.614782 0.323984 0.24435
696 1 0.915113 0.329737 0.281597
53 1 0.452197 0.418388 0.265289
1083 1 0.0280404 0.461744 0.256597
1475 1 0.713725 0.441303 0.248046
1947 1 0.246247 0.0259137 0.240857
1650 1 0.547576 0.0577069 0.303853
318 1 0.343418 0.0176758 0.296418
1703 1 0.445073 0.0757473 0.274881
1249 1 0.145708 0.133022 0.283109
1188 1 0.0500057 0.20591 0.310057
201 1 0.0728814 0.212699 0.269752
658 1 0.271869 0.251742 0.301778
840 1 0.70106 0.254783 0.287213
1533 1 0.971237 0.224845 0.288412
291 1 0.652353 0.264088 0.286596
561 1 0.814403 0.291752 0.317662
186 1 0.0790896 0.324947 0.268963
1685 1 0.256189 0.294996 0.299052
1247 1 0.968938 0.339582 0.305929
1628 1 0.994933 0.378932 0.302897
1661 1 0.0124213 0.427168 0.291994
1612 1 0.591416 0.435456 0.285315
1858 1 0.423912 0.490147 0.315012
1887 1 0.0438806 0.0410799 0.278355
1269 1 0.21455 0.0300814 0.313529
115 1 0.529123 0.0569256 0.295645
1238 1 0.221902 0.0353297 0.314785
170 1 0.0250052 0.217475 0.310911
1961 1 0.0728521 0.19768 0.304513
1212 1 0.81151 0.23216 0.312328
2009 1 0.953153 0.248426 0.28384
831 1 0.113947 0.273819 0.328559
441 1 0.796152 0.336244 0.301957
367 1 0.735634 0.365427 0.318074
727 1 0.53717 0.46366 0.296483
486 1 0.505076 0.443258 0.298219
399 1 0.0821369 0.492126 0.309344
1991 1 0.444446 0.468655 0.319458
296 1 0.174119 0.0224153 0.348384
1532 1 0.0457431 0.0406417 0.336697
236 1 0.225127 0.0362999 0.325069
1377 1 0.941291 0.102037 0.307438
899 1 0.20317 0.152226 0.319841
560 1 0.771624 0.321878 0.336565
1718 1 0.68494 0.302933 0.325431
392 1 0.463601 0.397695 0.358336
1886 1 0.185032 0.44337 0.343085
343 1 0.422451 0.405435 0.322827
1760 1 0.0258456 0.496146 0.320436
1193 1 0.856153 0.00180593 0.380313
1626 1 0.579272 0.49928 0.265929
1881 1 0.671428 0.055723 0.380669
1539 1 0.560223 0.062079 0.376208
714 1 0.931089 0.0681895 0.337925
804 1 0.778193 0.137082 0.364371
1448 1 0.831474 0.129916 0.335897
1285 1 0.730081 0.218121 0.340728
482 1 0.615786 0.184294 0.354219
1692 1 0.85892 0.180864 0.348877
23 1 0.349971 0.223878 0.366658
1216 1 0.800015 0.255763 0.356328
1270 1 0.989228 0.266158 0.337086
39 1 0.986182 0.284446 0.330014
1015 1 0.592559 0.328506 0.343371
1 1 0.336917 0.343918 0.330277
534 1 0.290976 0.354941 0.343464
1918 1 0.882332 0.360418 0.346037
661 1 0.505604 0.404924 0.324458
520 1 0.279569 0.438328 0.368414
810 1 0.358073 0.454786 0.364444
1160 1 0.0526779 0.436698 0.351694
1242 1 0.362746 0.465226 0.311463
1137 1 0.635559 0.45138 0.360241
124 1 0.822895 0.462298 0.35478
1156 1 0.0270468 0.195759 0.00199635
1622 1 0.524868 0.0379691 0.483405
1488 1 0.643119 0.0474684 0.358187
1956 1 0.285987 0.0809693 0.34685
1250 1 0.721586 0.0941998 0.331001
1894 1 0.0132839 0.13172 0.348165
1264 1 0.868886 0.0997435 0.368574
465 1 0.540127 0.162985 0.373735
122 1 0.78225 0.169154 0.37692
662 1 0.757856 0.185322 0.356046
896 1 0.843989 0.167609 0.355294
1235 1 0.587114 0.225625 0.362527
91 1 0.624572 0.207145 0.375233
779 1 0.117892 0.269025 0.350574
1721 1 0.122434 0.249622 0.344743
864 1 0.886497 0.29905 0.332914
1771 1 0.756642 0.323842 0.347088
1409 1 0.907551 0.340512 0.325246
1038 1 0.145232 0.380788 0.384223
998 1 0.51625 0.396599 0.365561
1459 1 0.948144 0.467644 0.383181
361 1 0.75539 0.212207 0.491213
425 1 0.619327 0.0122358 0.377223
135 1 0.525987 0.0817393 0.406893
1361 1 0.234746 0.0920852 0.343643
583 1 0.545807 0.0926217 0.364153
1777 1 0.582036 0.0906657 0.371461
1268 1 0.401881 0.107297 0.37138
1388 1 0.326358 0.174043 0.375603
107 1 0.456928 0.151889 0.350119
258 1 0.480123 0.193483 0.399006
82 1 0.090125 0.200369 0.362312
529 1 0.616649 0.222432 0.375574
1584 1 0.764849 0.208665 0.392434
1507 1 0.290242 0.261465 0.371658
196 1 0.715533 0.250348 0.388641
856 1 0.548398 0.294838 0.376015
375 1 0.693763 0.384925 0.390559
1406 1 0.111841 0.454581 0.381033
1453 1 0.337273 0.457363 0.362314
618 1 0.44073 0.42383 0.469701
900 1 0.385169 0.433493 0.486223
1556 1 0.49073 0.0223658 0.401047
428 1 0.995963 0.102505 0.39111
1925 1 0.719585 0.146022 0.383518
607 1 0.457128 0.196331 0.413859
676 1 0.477095 0.216893 0.36285
377 1 0.426222 0.191486 0.391231
937 1 0.50777 0.225467 0.393405
474 1 0.371922 0.213393 0.369032
760 1 0.184705 0.272781 0.411696
1344 1 0.767182 0.30747 0.412412
77 1 0.534929 0.31076 0.382512
1725 1 0.578261 0.316222 0.408774
1355 1 0.253704 0.429014 0.373645
345 1 0.84067 0.439187 0.355244
1486 1 0.746567 0.418679 0.490251
88 1 0.789905 0.00556533 0.40853
759 1 0.315685 0.0527014 0.362694
1019 1 0.730343 0.0798761 0.418776
1434 1 0.763392 0.10101 0.393826
68 1 0.596228 0.160154 0.375682
253 1 0.838126 0.138188 0.427555
1684 1 0.704326 0.17877 0.403888
784 1 0.0143734 0.182911 0.405866
788 1 0.282563 0.227309 0.430184
1260 1 0.682353 0.198681 0.415667
1279 1 0.10433 0.263483 0.438516
302 1 0.747379 0.267767 0.426519
1732 1 0.81039 0.288208 0.402947
338 1 0.136092 0.294961 0.398821
1976 1 0.680719 0.292766 0.420413
1404 1 0.0255216 0.409394 0.395626
27 1 0.444529 0.389522 0.428598
1165 1 0.49676 0.38191 0.437552
1189 1 0.101845 0.434802 0.448267
1203 1 0.849724 0.442159 0.418119
756 1 0.806445 0.439196 0.403928
1753 1 0.860754 0.0125691 0.420119
292 1 0.0909711 0.0252099 0.447193
1689 1 0.181655 0.0597906 0.449293
128 1 0.711913 0.118135 0.446991
1916 1 0.674768 0.109647 0.407756
803 1 0.834813 0.141905 0.415223
1853 1 0.895042 0.156583 0.448735
643 1 0.0358701 0.195556 0.420332
146 1 0.693031 0.168052 0.416467
401 1 0.0628459 0.199132 0.423884
414 1 0.335749 0.198927 0.420963
639 1 0.0371165 0.272978 0.444642
502 1 0.289348 0.349798 0.458248
1907 1 0.0994801 0.389712 0.439294
981 1 0.0686337 0.415485 0.405044
917 1 0.663433 0.468461 0.447093
1084 1 0.418525 0.356809 0.49532
341 1 0.533574 0.0338698 0.440056
1411 1 0.788748 0.00158473 0.461653
1783 1 0.147257 0.0230664 0.422727
33 1 0.86136 0.0809469 0.443666
1286 1 0.627894 0.166337 0.478271
1221 1 0.65239 0.255664 0.450809
1130 1 0.394428 0.274992 0.441402
1651 1 0.499174 0.292625 0.450502
1233 1 0.0100392 0.341547 0.453452
1720 1 0.945419 0.32748 0.432308
582 1 0.187894 0.369754 0.462046
494 1 0.551054 0.358438 0.448098
832 1 0.554252 0.379637 0.479293
1145 1 0.535622 0.474499 0.453567
1776 1 0.28534 0.467664 0.468879
310 1 0.963172 0.479431 0.422433
1314 1 0.375748 0.0546405 0.489646
1370 1 0.567781 0.106652 0.487936
746 1 0.774423 0.0536732 0.461273
440 1 0.831124 0.496907 0.0949259
1604 1 0.377698 0.11421 0.480019
1154 1 0.59634 0.212863 0.492101
1146 1 0.869285 0.15288 0.49057
464 1 0.818671 0.235471 0.485891
209 1 0.115246 0.268583 0.45678
1071 1 0.213329 0.241344 0.472431
1621 1 0.394268 0.274262 0.485658
745 1 0.0474636 0.426004 0.468347
278 1 0.0483162 0.420693 0.48118
1052 1 0.396362 0.426116 0.472728
808 1 0.131074 0.458852 0.468514
1847 1 0.661678 0.474692 0.477153
1520 1 0.837855 0.00828726 0.0258176
594 1 0.296855 0.491244 0.373236
1161 1 0.895908 0.0709569 0.49103
75 1 0.446474 0.0608641 0.482866
1403 1 0.815577 0.120261 0.467543
325 1 0.788151 0.202072 0.484591
404 1 0.573102 0.225222 0.483272
1081 1 0.199558 0.220818 0.486099
692 1 0.532103 0.0129023 0.487769
262 1 0.361127 0.350217 0.494042
328 1 0.527625 0.000112712 0.164085
1617 1 0.416459 0.371375 0.499917
964 1 0.73022 0.490457 0.919875
1298 1 0.582904 0.119772 0.505028
1878 1 0.662422 0.217862 0.532066
1586 1 0.360781 0.242387 0.527513
105 1 0.360046 0.399291 0.971451
1172 1 0.175121 0.298798 0.528579
2016 1 0.946668 0.287683 0.520432
104 1 0.0272397 0.373187 0.50358
254 1 0.118563 0.305939 0.978201
490 1 0.668024 0.491538 0.892173
1257 1 0.631704 0.00561503 0.600715
915 1 0.850059 0.436971 0.505228
1631 1 0.0331321 0.475854 0.532385
575 1 0.926325 0.323784 0.967176
290 1 0.926366 0.0260043 0.506924
600 1 0.943787 0.0589805 0.556954
1673 1 0.42725 0.0999055 0.551706
514 1 0.92849 0.176703 0.559413
231 1 0.567807 0.177163 0.539362
1616 1 0.779609 0.247031 0.99329
982 1 0.94536 0.240394 0.544844
1372 1 0.362737 0.244867 0.515314
1951 1 0.330316 0.240496 0.502828
1043 1 0.297308 0.285617 0.549298
1606 1 0.818054 0.310277 0.510049
1401 1 0.322145 0.353889 0.563437
1587 1 0.379781 0.351226 0.547945
798 1 0.735564 0.371184 0.558735
364 1 0.23391 0.35497 0.988118
912 1 0.911083 0.41342 0.528173
1077 1 0.570843 0.431056 0.546676
893 1 0.74517 0.358495 0.994343
1061 1 0.235482 0.476236 0.514688
224 1 0.70234 0.489804 0.657611
1906 1 0.591297 0.0205021 0.521669
701 1 0.336567 0.0698936 0.557315
1855 1 0.861791 0.118101 0.527799
614 1 0.306217 0.162431 0.542651
1717 1 0.748358 0.204821 0.521999
1818 1 0.145494 0.220645 0.562947
2000 1 0.178517 0.249655 0.524903
2015 1 0.304345 0.246219 0.545028
540 1 0.768321 0.29176 0.545139
1152 1 0.158069 0.343558 0.536497
81 1 0.397735 0.289617 0.572238
1904 1 0.55479 0.334816 0.511544
1525 1 0.53969 0.335903 0.527806
1551 1 0.699783 0.393074 0.555091
1613 1 0.555618 0.420141 0.531842
417 1 0.0991278 0.468122 0.513423
1202 1 0.0510199 0.0243206 0.53533
366 1 0.0376196 0.0368147 0.567227
2017 1 0.0732399 0.0569219 0.550168
1706 1 0.824505 0.160723 0.563049
1535 1 0.255907 0.208488 0.572736
1755 1 0.530929 0.225417 0.584551
653 1 0.421539 0.294364 0.575499
227 1 0.932227 0.32638 0.575082
2039 1 0.506212 0.354608 0.552183
550 1 0.988578 0.3383 0.571861
1208 1 0.691837 0.348036 0.586687
1480 1 0.952118 0.36565 0.564494
820 1 0.383984 0.395062 0.581296
838 1 0.71379 0.408457 0.59001
1624 1 0.764431 0.44812 0.590617
47 1 0.529988 0.0297539 0.583357
992 1 0.628363 0.00398045 0.550857
673 1 0.213361 0.138302 0.568729
628 1 0.70472 0.21745 0.588985
1518 1 0.59158 0.247221 0.614243
1534 1 0.92149 0.177972 0.606142
507 1 0.347924 0.259008 0.607003
1903 1 0.920954 0.332538 0.574847
1102 1 0.540526 0.344859 0.573125
279 1 0.402616 0.367754 0.558643
769 1 0.497891 0.395542 0.59798
914 1 0.690168 0.403646 0.597909
327 1 0.703724 0.332351 0.596984
1671 1 0.609352 0.432315 0.545147
1644 1 0.0515875 0.414638 0.58415
74 1 0.131429 0.447974 0.595776
1053 1 0.785528 0.45265 0.585377
855 1 0.802033 0.473979 0.57552
150 1 0.478299 0.282629 0.957465
1070 1 0.258719 0.0488222 0.586913
1666 1 0.366955 0.0410076 0.594306
997 1 0.581057 0.107548 0.60207
1288 1 0.76229 0.0969283 0.596677
1069 1 0.88909 0.147456 0.576562
1080 1 0.17574 0.172268 0.566207
1088 1 0.746879 0.227535 0.604002
849 1 0.379677 0.253677 0.61484
1749 1 0.442625 0.289866 0.593689
1983 1 0.487916 0.419448 0.60506
1380 1 0.515471 0.419566 0.614737
19 1 0.971143 0.396059 0.606251
1140 1 0.373096 0.43797 0.623996
466 1 0.92478 0.0492755 0.609844
1123 1 0.689482 0.12292 0.605277
1825 1 0.580369 0.207048 0.604867
430 1 0.734565 0.268263 0.603556
852 1 0.767219 0.275712 0.632256
956 1 0.0535888 0.292997 0.629904
1331 1 0.273934 0.352688 0.62274
123 1 0.70694 0.400537 0.629262
1945 1 0.303223 0.373601 0.624088
1640 1 0.192185 0.472149 0.635764
635 1 0.465904 0.414875 0.628018
92 1 0.126056 0.455377 0.602623
1767 1 0.573781 0.0538133 0.6623
501 1 0.901564 0.0154419 0.63367
1420 1 0.415907 0.0126712 0.668376
1762 1 0.576077 0.176092 0.989918
1750 1 0.254758 0.039171 0.644545
216 1 0.436513 0.0683403 0.655556
1506 1 0.879715 0.0817324 0.653988
1852 1 0.270692 0.146711 0.656414
1338 1 0.652292 0.19998 0.649215
214 1 0.101819 0.153439 0.608926
1581 1 0.378079 0.203758 0.647352
1339 1 0.7801 0.18322 0.646603
515 1 0.378202 0.228676 0.632865
1092 1 0.250355 0.299848 0.648509
1775 1 0.603976 0.265233 0.629903
358 1 0.495648 0.333323 0.652866
1594 1 0.175614 0.349808 0.661074
263 1 0.472473 0.339236 0.643947
65 1 0.254312 0.393949 0.669057
156 1 0.30868 0.399352 0.653356
1933 1 0.526464 0.396593 0.669791
642 1 0.427944 0.0546573 0.684634
709 1 0.953779 0.0457077 0.658862
604 1 0.970388 0.126815 0.678254
1823 1 0.264434 0.100856 0.650408
1491 1 0.707045 0.149157 0.696113
114 1 0.652392 0.186452 0.633939
457 1 0.447694 0.214944 0.655916
1647 1 0.201559 0.250922 0.644819
1536 1 0.0186238 0.240358 0.675808
190 1 0.212095 0.299865 0.653066
12 1 0.996448 0.314341 0.671299
1807 1 0.126959 0.329445 0.688549
1993 1 0.573759 0.00950251 0.678146
34 1 0.830473 0.292889 0.513806
2013 1 0.0748056 0.417643 0.998313
1693 1 0.251249 0.0070153 0.672264
172 1 0.118464 0.0816917 0.683877
1306 1 0.50946 0.0868991 0.653466
35 1 0.888712 0.0829084 0.724269
1056 1 0.194222 0.144237 0.694165
1149 1 0.159962 0.175261 0.687466
54 1 0.479735 0.135298 0.661569
654 1 0.220617 0.214581 0.675425
753 1 0.368596 0.23785 0.650285
1476 1 0.425816 0.265709 0.662784
973 1 0.636365 0.230828 0.687643
1181 1 0.870398 0.255679 0.672697
2024 1 0.63418 0.294373 0.677809
1433 1 0.899713 0.321926 0.646837
268 1 0.832866 0.334329 0.683497
312 1 0.22985 0.369298 0.686365
492 1 0.549968 0.350456 0.684166
2006 1 0.525482 0.353204 0.677697
1815 1 0.617863 0.375288 0.682023
909 1 0.839381 0.452698 0.678263
1885 1 0.13844 0.494451 0.67735
677 1 0.435625 0.489559 0.679062
768 1 0.815394 0.0224688 0.718322
584 1 0.953208 0.458094 0.709656
601 1 0.855757 0.00840657 0.583911
151 1 0.99919 0.0682137 0.66708
504 1 0.32245 0.0950389 0.722688
951 1 0.835482 0.114627 0.675499
2034 1 0.880034 0.142751 0.714164
2001 1 0.0487009 0.16742 0.701314
499 1 0.367008 0.153039 0.71966
184 1 0.810976 0.234334 0.71248
1148 1 0.214927 0.276504 0.712659
1746 1 0.461988 0.315404 0.696468
1429 1 0.915972 0.409554 0.682088
624 1 0.98379 0.423538 0.69611
977 1 0.3379 0.476338 0.675508
1479 1 0.640236 0.0161934 0.711244
1499 1 0.887856 0.0293217 0.738289
1751 1 0.601023 0.0807438 0.721209
1921 1 0.863061 0.065942 0.709609
1573 1 0.927106 0.137713 0.715382
519 1 0.931493 0.0878086 0.729828
176 1 0.366776 0.168748 0.706132
528 1 0.0915105 0.173799 0.732698
935 1 0.210894 0.168959 0.718052
476 1 0.962404 0.193685 0.711454
1512 1 0.435659 0.18289 0.732706
942 1 0.825056 0.210895 0.708283
859 1 0.206856 0.235921 0.735462
1308 1 0.0924278 0.320491 0.711361
851 1 0.474362 0.260944 0.690713
1557 1 0.57559 0.255671 0.701945
173 1 0.58336 0.316909 0.715748
1697 1 0.707035 0.312614 0.706012
510 1 0.147383 0.306889 0.708263
1704 1 0.282205 0.362824 0.70359
1281 1 0.527311 0.34387 0.715477
1350 1 0.645207 0.357439 0.709172
1058 1 0.874512 0.361126 0.69266
629 1 0.225675 0.374501 0.723888
223 1 0.288193 0.424666 0.708298
1201 1 0.567429 0.419924 0.699942
1274 1 0.120712 0.489574 0.715407
1523 1 0.728271 0.464019 0.734703
1301 1 0.958402 0.236049 0.995682
25 1 0.62578 0.361808 0.520728
1554 1 0.545641 0.0202748 0.716959
669 1 0.377352 0.126192 0.695038
84 1 0.540546 0.0927469 0.734436
870 1 0.759088 0.120425 0.718048
1883 1 0.894704 0.140441 0.74434
119 1 0.394854 0.151017 0.726774
605 1 0.186525 0.173621 0.728956
281 1 0.720401 0.191303 0.716038
28 1 0.409832 0.201461 0.758901
1980 1 0.721767 0.283521 0.723199
755 1 0.0873705 0.284014 0.72541
926 1 0.611945 0.298229 0.727642
2012 1 0.669087 0.318169 0.719298
2014 1 0.755147 0.359508 0.688016
1230 1 0.825672 0.405882 0.715957
1833 1 0.476315 0.423901 0.733603
1122 1 0.638764 0.454632 0.695904
1011 1 0.0105888 0.467908 0.709236
64 1 0.371627 0.0227725 0.721749
732 1 0.465458 0.0292212 0.741081
161 1 0.0335532 0.0288754 0.741088
1789 1 0.122404 0.00134563 0.731007
1225 1 0.56821 0.0111627 0.735071
1021 1 0.70589 0.0403612 0.749972
259 1 0.225718 0.068121 0.73743
726 1 0.403525 0.085495 0.698182
1984 1 0.960937 0.0965519 0.777884
976 1 0.562587 0.148959 0.75474
1515 1 0.123995 0.126947 0.763026
1199 1 0.78391 0.192921 0.736129
1107 1 0.918309 0.150232 0.751358
924 1 0.0210087 0.198814 0.733112
1414 1 0.544561 0.298354 0.757622
1307 1 0.699678 0.39561 0.775137
1073 1 0.22017 0.352724 0.747519
1657 1 0.148428 0.448314 0.766276
1752 1 0.757266 0.463236 0.72148
89 1 0.257765 0.466827 0.753754
1917 1 0.0253227 0.0650755 0.791084
1784 1 0.903548 0.0592609 0.745661
704 1 0.933175 0.0903903 0.78152
16 1 0.532451 0.122552 0.812052
116 1 0.875308 0.149948 0.759673
90 1 0.417349 0.273538 0.778142
1542 1 0.696775 0.268347 0.767224
795 1 0.0107044 0.322811 0.792253
1266 1 0.257842 0.367517 0.741965
287 1 0.986714 0.370922 0.769302
958 1 0.410585 0.36173 0.773939
757 1 0.48553 0.391145 0.733472
2018 1 0.484017 0.445403 0.775336
1730 1 0.454232 0.472102 0.78686
708 1 0.978489 0.437493 0.779886
1575 1 0.259773 0.449071 0.738793
953 1 0.47154 0.482385 0.754471
2032 1 0.548911 0.49486 0.761757
1646 1 0.773763 0.0256588 0.764124
1002 1 0.697156 0.107536 0.835323
1258 1 0.448905 0.166316 0.758932
1128 1 0.349687 0.199302 0.810604
634 1 0.819275 0.147206 0.791081
171 1 0.654647 0.200045 0.832729
121 1 0.692293 0.262813 0.78102
155 1 0.731807 0.244822 0.747757
349 1 0.486782 0.303957 0.772123
1607 1 0.65509 0.259347 0.769143
738 1 0.702776 0.302763 0.760481
1418 1 0.0956467 0.318708 0.807341
1735 1 0.618232 0.329447 0.77381
1466 1 0.578773 0.331328 0.785252
323 1 0.78255 0.392389 0.798376
1010 1 0.230019 0.416554 0.798173
1063 1 0.719645 0.451747 0.794353
1234 1 0.0266218 0.479011 0.782115
1677 1 0.623146 0.0186364 0.78396
1840 1 0.476128 0.0452321 0.804379
60 1 0.853293 0.0513665 0.840812
1910 1 0.224371 0.0625416 0.78355
197 1 0.681846 0.0662863 0.780711
801 1 0.0253767 0.140331 0.786072
1940 1 0.936039 0.161005 0.821098
1990 1 0.0901564 0.155629 0.825127
1383 1 0.623499 0.181776 0.80875
427 1 0.155452 0.175164 0.831652
719 1 0.506171 0.221996 0.825354
609 1 0.844347 0.254039 0.825522
616 1 0.200973 0.271588 0.840842
1873 1 0.515736 0.27708 0.826648
1065 1 0.449643 0.330609 0.800042
1911 1 0.822163 0.354649 0.810706
1572 1 0.741021 0.399042 0.814608
1042 1 0.935095 0.417292 0.807649
163 1 0.225515 0.49389 0.832029
1662 1 0.422327 0.0391092 0.829485
868 1 0.562733 0.113183 0.83362
147 1 0.419438 0.0873146 0.839382
420 1 0.902449 0.140938 0.806468
1786 1 0.9776 0.124835 0.807663
1376 1 0.641739 0.151185 0.834989
283 1 0.473224 0.213871 0.818154
930 1 0.611941 0.193974 0.802047
467 1 0.565081 0.190882 0.82873
1834 1 0.302684 0.236014 0.817625
1670 1 0.388502 0.215936 0.790831
1467 1 0.817512 0.220431 0.804562
544 1 0.894139 0.253129 0.829902
944 1 0.403258 0.290467 0.824513
554 1 0.716611 0.317854 0.816924
758 1 0.764917 0.318099 0.800574
174 1 0.462037 0.332695 0.824159
1830 1 0.707793 0.346202 0.845976
697 1 0.236542 0.355708 0.820896
396 1 0.723787 0.367541 0.828894
1432 1 0.818797 0.367545 0.83255
516 1 0.249498 0.351706 0.821777
436 1 0.462607 0.398717 0.827302
1129 1 0.736799 0.402795 0.822567
694 1 0.737591 0.419854 0.84857
1654 1 0.421638 0.408569 0.806822
1455 1 0.408507 0.474341 0.998647
1048 1 0.60919 0.0404814 0.831799
424 1 0.0824512 0.116381 0.50428
1734 1 0.639429 0.129678 0.860579
398 1 0.769926 0.207314 0.837901
1590 1 0.563026 0.238905 0.826748
1842 1 0.962418 0.202261 0.850016
454 1 0.112033 0.246306 0.849655
218 1 0.78364 0.276583 0.830324
228 1 0.720818 0.248892 0.86624
848 1 0.103493 0.32994 0.852708
1390 1 0.435039 0.309685 0.856691
1428 1 0.560592 0.310304 0.871543
725 1 0.408964 0.322707 0.816441
1893 1 0.569776 0.344629 0.830698
1915 1 0.255094 0.380565 0.842652
177 1 0.0827558 0.445849 0.836606
2037 1 0.518626 0.056939 0.850461
1922 1 0.529236 0.0669921 0.864972
256 1 0.256333 0.184402 0.836737
2030 1 0.49532 0.181231 0.874822
1076 1 0.118264 0.244173 0.872561
180 1 0.748782 0.269981 0.841088
965 1 0.0970003 0.342328 0.857906
141 1 0.138094 0.301279 0.889963
1641 1 0.270479 0.379806 0.862927
277 1 0.912589 0.405926 0.859259
1632 1 0.0819379 0.480524 0.837595
925 1 0.143448 0.436917 0.883739
1850 1 0.582647 0.459206 0.842853
491 1 0.214367 0.481353 0.828525
1544 1 0.876591 0.0804047 0.840635
1239 1 0.514443 0.112801 0.912219
1358 1 0.637773 0.12599 0.879667
1601 1 0.537313 0.13349 0.904618
1691 1 0.815412 0.134084 0.870753
240 1 0.110993 0.150374 0.898083
894 1 0.745146 0.234784 0.868689
1277 1 0.370295 0.38213 0.846568
1200 1 0.657084 0.329189 0.910287
1618 1 0.149337 0.372203 0.906745
603 1 0.505356 0.355785 0.85964
368 1 0.854073 0.416471 0.863671
841 1 0.00596333 0.462861 0.871238
1585 1 0.126241 0.460774 0.900652
754 1 0.655823 0.434461 0.882438
1340 1 0.910424 0.464007 0.882313
1119 1 0.30782 0.016849 0.901249
830 1 0.560605 0.0321367 0.889887
1564 1 0.962354 0.090104 0.910182
1066 1 0.230279 0.158061 0.917389
657 1 0.432765 0.139332 0.891098
31 1 0.653381 0.139483 0.917126
1913 1 0.517323 0.199074 0.9274
557 1 0.296008 0.18222 0.903419
1341 1 0.850483 0.240842 0.89467
619 1 0.381549 0.227464 0.913496
686 1 0.0444319 0.272356 0.892561
736 1 0.0656131 0.317391 0.931622
1679 1 0.451045 0.333355 0.896207
631 1 0.605448 0.328762 0.921375
1153 1 0.308542 0.377982 0.898971
1696 1 0.548604 0.407692 0.868092
205 1 0.888455 0.346536 0.875486
1633 1 0.131294 0.447776 0.88866
827 1 0.686399 0.411935 0.905842
382 1 0.441622 0.479333 0.908247
1367 1 0.828512 0.449061 0.911861
828 1 0.149459 0.445992 0.888736
1109 1 0.898973 0.456284 0.894299
885 1 0.0531736 0.479556 0.900577
165 1 0.826347 0.469963 0.880444
1954 1 0.851141 0.0567104 0.918904
878 1 0.37168 0.0199746 0.90354
40 1 0.505149 0.0145775 0.921553
447 1 0.12487 0.0631104 0.910068
1589 1 0.8165 0.0730937 0.937543
1819 1 0.237333 0.0789417 0.933444
1190 1 0.257101 0.128793 0.894669
969 1 0.782301 0.12571 0.926615
1405 1 0.448782 0.163836 0.920903
1758 1 0.751478 0.173921 0.899094
761 1 0.45764 0.160419 0.936874
1110 1 0.232525 0.278624 0.911473
351 1 0.27317 0.212949 0.911974
945 1 0.212054 0.274777 0.868871
1224 1 0.353148 0.225853 0.927543
2046 1 0.673274 0.286801 0.902576
990 1 0.530034 0.331385 0.909595
1067 1 0.698749 0.327214 0.931469
1652 1 0.838312 0.336298 0.930835
473 1 0.0142967 0.374174 0.953782
617 1 0.530004 0.400896 0.92614
62 1 0.995101 0.41509 0.899015
764 1 0.52524 0.484791 0.914263
580 1 0.142596 0.483315 0.918242
1368 1 0.964532 0.0509231 0.945436
1143 1 0.55049 0.090546 0.928434
1521 1 0.269385 0.21782 0.900599
273 1 0.890006 0.200257 0.916805
1969 1 0.690316 0.264818 0.915082
1724 1 0.830521 0.275142 0.96563
1214 1 0.296032 0.313785 0.997161
1778 1 0.261544 0.316534 0.979813
1567 1 0.316723 0.32492 0.927947
1207 1 0.838294 0.279929 0.927964
1862 1 0.940988 0.287543 0.952817
1790 1 0.467381 0.352377 0.950256
1040 1 0.360846 0.413677 0.945823
1438 1 0.0178233 0.435983 0.941539
49 1 0.89338 0.467895 0.91822
2028 1 0.420341 0.487402 0.986902
1824 1 0.090772 0.00794693 0.941364
1363 1 0.496242 0.485188 0.606259
1265 1 0.573685 0.0633404 0.969533
1442 1 0.780976 0.0251285 0.92705
1634 1 0.811822 0.215244 0.966065
1743 1 0.986956 0.225293 0.939639
1538 1 0.30146 0.216091 0.936961
1867 1 0.0612599 0.292044 0.913597
949 1 0.73873 0.28629 0.950819
637 1 0.091324 0.344129 0.960692
1337 1 0.486837 0.365542 0.947138
1169 1 0.776399 0.376642 0.995756
659 1 0.794111 0.407926 0.954529
238 1 0.543957 0.411704 0.946952
586 1 0.754326 0.491896 0.940742
433 1 0.530477 0.0591302 0.961219
1787 1 0.00311161 0.092879 0.967867
1579 1 0.715289 0.0997793 0.991203
1468 1 0.887285 0.158467 0.970459
907 1 0.458462 0.174668 0.966745
168 1 0.658148 0.222292 0.980936
1555 1 0.553974 0.242155 0.960518
570 1 0.616841 0.206835 0.974297
993 1 0.719339 0.235958 0.997158
405 1 0.438701 0.320255 0.956041
871 1 0.587388 0.323192 0.952647
275 1 0.102706 0.35313 0.965992
1588 1 0.650676 0.364959 0.988127
1493 1 0.151463 0.409802 0.988267
777 1 0.417672 0.427313 0.99176
1118 1 0.562245 0.0272527 0.996307
1796 1 0.469513 0.2371 0.990032
959 1 0.479172 0.485157 0.705694
715 1 0.265043 0.0149926 0.988845
303 1 0.637922 0.165995 0.949713
80 1 0.803536 0.00193309 0.538638
380 1 0.0437351 0.176635 0.95489
1293 1 0.767973 0.228501 0.96872
101 1 0.253543 0.482771 0.927391
940 1 0.670085 0.492279 0.746663
1895 1 0.0289263 0.490292 0.574892
1832 1 0.296664 0.259179 0.981814
1197 1 0.415263 0.482772 0.780393
1667 1 0.402331 0.486163 0.983848
1593 1 0.887223 0.2229 0.994988
1461 1 0.968114 0.487763 0.547262
1014 1 0.938312 0.00881305 0.502805
2047 1 0.0828692 0.0386461 0.998983
772 1 0.0752792 0.513877 0.0213971
418 1 0.9482 0.560886 0.0108098
1500 1 0.778773 0.575518 0.0269932
1329 1 0.0252943 0.970977 0.491872
703 1 0.407684 0.678702 0.0351445
656 1 0.850662 0.66747 0.0025251
330 1 0.597007 0.764218 0.019734
1848 1 0.732521 0.771296 0.0279334
1311 1 0.188304 0.786607 0.0384927
1101 1 0.201169 0.793088 0.0154974
70 1 0.394278 0.502665 0.319637
1463 1 0.886782 0.924579 0.0253018
1364 1 0.581935 0.995339 0.367174
63 1 0.0306285 0.611842 0.495031
1327 1 0.0431763 0.970367 0.0199548
555 1 0.10893 0.889916 0.474937
410 1 0.706568 0.562273 0.0553027
215 1 0.366561 0.629249 0.0512999
50 1 0.091208 0.645724 0.00715384
1569 1 0.238722 0.606676 0.0427671
1168 1 0.11318 0.663815 0.031433
1034 1 0.222403 0.668822 0.0236541
409 1 0.0933844 0.709901 0.0314872
889 1 0.159506 0.729444 0.0219521
1446 1 0.0141348 0.774625 0.0314806
456 1 0.20064 0.782626 0.0275959
1808 1 0.7079 0.796242 0.0227626
412 1 0.885918 0.815874 0.0161959
698 1 0.427656 0.811248 0.0450677
1702 1 0.479833 0.502075 0.354001
282 1 0.00295493 0.870772 0.0187739
2 1 0.0781953 0.941666 0.0532013
1026 1 0.614185 0.943249 0.0321607
1615 1 0.366171 0.602066 0.0148741
1968 1 0.1781 0.941905 0.0397781
1719 1 0.341653 0.997525 0.0344691
1501 1 0.114637 0.90053 0.487596
865 1 0.987001 0.51093 0.0746526
1349 1 0.682389 0.575119 0.0331692
762 1 0.415449 0.641835 0.0724541
1402 1 0.880605 0.692877 0.058749
1639 1 0.830849 0.759832 0.0502466
895 1 0.0562982 0.77687 0.0326674
3 1 0.594551 0.763938 0.0359731
1988 1 0.434842 0.892068 0.0439938
646 1 0.513754 0.88846 0.0481102
267 1 0.598568 0.937289 0.0705873
1378 1 0.797383 0.962246 0.0348173
1582 1 0.756651 0.506858 0.0825716
592 1 0.99039 0.505096 0.0637143
1112 1 0.689929 0.566592 0.047548
543 1 0.794861 0.579946 0.0776518
347 1 0.615974 0.632435 0.076353
1958 1 0.29424 0.682127 0.0557248
129 1 0.30047 0.710793 0.0558506
8 1 0.0340366 0.698086 0.0740978
1595 1 0.647636 0.774523 0.0724869
563 1 0.172109 0.871645 0.0645296
1196 1 0.585118 0.948587 0.0957947
2026 1 0.760118 0.933615 0.0685736
1737 1 0.732427 0.982842 0.0431901
1030 1 0.384622 0.522624 0.497023
939 1 0.80438 0.992063 0.0737539
269 1 0.579628 0.959209 0.0774752
422 1 0.793301 0.950957 0.0909549
1489 1 0.0334066 0.505475 0.378543
468 1 0.939822 0.506172 0.396996
359 1 0.806933 0.569918 0.494103
1004 1 0.320011 0.527013 0.0917723
118 1 0.643056 0.506708 0.0686414
921 1 0.858328 0.529367 0.0813995
497 1 0.538702 0.634225 0.0877807
1714 1 0.656758 0.626498 0.0606294
1410 1 0.858211 0.652718 0.0714503
301 1 0.076282 0.646947 0.0979122
1985 1 0.366826 0.690067 0.0820531
432 1 0.329703 0.729981 0.0874351
699 1 0.0839002 0.767128 0.061692
533 1 0.638099 0.797118 0.100089
1484 1 0.770349 0.873975 0.0500014
1936 1 0.0169081 0.959294 0.0992516
625 1 0.213597 0.978314 0.0848643
46 1 0.743868 0.978016 0.0963044
752 1 0.0195494 0.51933 0.110037
668 1 0.595578 0.565313 0.102012
56 1 0.758623 0.652706 0.0835331
207 1 0.442954 0.649439 0.12373
52 1 0.596874 0.668629 0.100291
933 1 0.0459791 0.729496 0.103722
1167 1 0.101591 0.691053 0.0862224
1211 1 0.647699 0.737041 0.12643
946 1 0.601087 0.76408 0.115567
450 1 0.77595 0.758898 0.0677331
1490 1 0.349835 0.790559 0.0942614
316 1 0.685424 0.797351 0.10166
797 1 0.0673445 0.812241 0.122108
874 1 0.244933 0.794305 0.121407
588 1 0.308486 0.903887 0.136602
1296 1 0.00404012 0.958173 0.109192
1068 1 0.173016 0.946668 0.0858699
2044 1 0.201218 0.508017 0.154244
630 1 0.471688 0.524791 0.127923
1386 1 0.407683 0.590835 0.0942658
308 1 0.589842 0.593934 0.0920629
1648 1 0.147723 0.632412 0.131771
331 1 0.693137 0.616964 0.102011
1079 1 0.527729 0.616627 0.146813
564 1 0.854686 0.623791 0.125938
511 1 0.841374 0.640494 0.103564
737 1 0.133378 0.695818 0.128122
1044 1 0.768294 0.673659 0.132326
1813 1 0.369249 0.698339 0.113288
999 1 0.556231 0.733646 0.120268
213 1 0.133406 0.76893 0.102568
1231 1 0.876485 0.757766 0.116458
711 1 0.942092 0.782141 0.104866
429 1 0.525828 0.777451 0.0985845
353 1 0.475168 0.83111 0.137843
386 1 0.275436 0.840303 0.0943638
1097 1 0.074306 0.891124 0.133089
1637 1 0.526482 0.892657 0.13467
315 1 0.540909 0.89833 0.126568
97 1 0.706722 0.913907 0.129243
311 1 0.127024 0.957141 0.12347
381 1 0.185142 0.907534 0.10544
1843 1 0.407525 0.96032 0.118049
1756 1 0.536379 0.9999 0.116047
1864 1 0.531912 0.526832 0.158754
748 1 0.721864 0.516644 0.122921
683 1 0.0372862 0.551002 0.145791
260 1 0.0717432 0.566214 0.166022
565 1 0.794848 0.616568 0.140474
800 1 0.921648 0.660863 0.13604
442 1 0.494371 0.642687 0.126232
943 1 0.279395 0.708777 0.145523
1272 1 0.389719 0.734016 0.148931
1773 1 0.180098 0.704032 0.119743
904 1 0.388638 0.738482 0.094124
1055 1 0.557653 0.796635 0.150472
1085 1 0.380009 0.85742 0.162226
1449 1 0.895053 0.865303 0.155099
252 1 0.522617 0.868795 0.153203
1793 1 0.696345 0.874985 0.1334
1292 1 0.98927 0.936317 0.119755
1037 1 0.979689 0.934709 0.127822
157 1 0.5547 0.952233 0.123369
1871 1 0.247117 0.930328 0.492339
751 1 0.367816 0.554908 0.173915
1799 1 0.81465 0.530134 0.186108
1430 1 0.816381 0.610269 0.154527
250 1 0.012976 0.602598 0.158499
1462 1 0.493124 0.662614 0.142766
434 1 0.941376 0.648515 0.160925
485 1 0.562622 0.63033 0.132239
613 1 0.270379 0.660174 0.147304
938 1 0.229327 0.70391 0.190304
248 1 0.333208 0.76646 0.141207
1591 1 0.324007 0.788384 0.159963
802 1 0.553381 0.775421 0.134991
1669 1 0.846714 0.785677 0.161284
667 1 0.861137 0.876556 0.16698
1526 1 0.446672 0.879921 0.17723
478 1 0.542302 0.889885 0.134843
158 1 0.396303 0.975234 0.136896
1558 1 0.59649 0.501821 0.139293
1267 1 0.804872 0.515046 0.198776
1707 1 0.206342 0.515362 0.17233
1347 1 0.631787 0.502913 0.170736
2043 1 0.378073 0.582403 0.130414
1142 1 0.838855 0.618057 0.174955
495 1 0.63375 0.617677 0.140511
689 1 0.0556632 0.676357 0.175717
111 1 0.591222 0.65943 0.167687
1700 1 0.639675 0.711095 0.202926
763 1 0.387465 0.728316 0.15903
365 1 0.611414 0.750547 0.178126
1543 1 0.763998 0.753979 0.187866
1805 1 0.443996 0.806456 0.141742
1399 1 0.632922 0.806661 0.164219
632 1 0.614293 0.79533 0.171955
1082 1 0.607533 0.90541 0.165566
626 1 0.029972 0.911914 0.163024
393 1 0.26123 0.916213 0.138657
1635 1 0.860255 0.934917 0.15802
1416 1 0.159977 0.946512 0.192283
1982 1 0.431537 0.930066 0.151649
879 1 0.885893 0.957362 0.157014
731 1 0.591332 0.942175 0.167224
1981 1 0.960162 0.556968 0.174925
1549 1 0.876844 0.530442 0.194799
1701 1 0.841528 0.58395 0.210042
979 1 0.0286132 0.566513 0.202916
733 1 0.152932 0.58634 0.191663
2033 1 0.485814 0.583072 0.193126
179 1 0.778506 0.632606 0.205968
71 1 0.91297 0.617404 0.189143
1919 1 0.0194329 0.640323 0.218875
251 1 0.354315 0.663838 0.200006
26 1 0.440882 0.66744 0.196555
1699 1 0.360538 0.656394 0.209728
1863 1 0.597088 0.703329 0.199012
9 1 0.608036 0.726141 0.192642
621 1 0.160734 0.773776 0.200442
1175 1 0.512676 0.836563 0.22707
93 1 0.25463 0.897618 0.200687
739 1 0.945369 0.850383 0.207381
1091 1 0.115894 0.913492 0.189099
1360 1 0.603433 0.96503 0.199578
961 1 0.114197 0.977598 0.143911
1060 1 0.613543 0.541854 0.169852
1330 1 0.345126 0.534757 0.232047
183 1 0.487353 0.545695 0.219677
1889 1 0.912661 0.569496 0.197617
1182 1 0.876333 0.577391 0.183523
1039 1 0.59549 0.611632 0.225036
455 1 0.943308 0.586942 0.182409
1874 1 0.537021 0.609845 0.223327
1127 1 0.0643101 0.679809 0.205193
1745 1 0.462371 0.65783 0.216474
970 1 0.13145 0.768202 0.215648
1829 1 0.407413 0.759176 0.215547
376 1 0.377099 0.880192 0.234532
1764 1 0.582298 0.87439 0.227104
1000 1 0.333042 0.898222 0.1916
211 1 0.274925 0.914093 0.225422
688 1 0.37726 0.930244 0.187713
1774 1 0.0339908 0.921733 0.212665
95 1 0.0442283 0.953336 0.199408
1977 1 0.593847 0.981329 0.199693
233 1 0.602495 0.536658 0.224425
246 1 0.582932 0.568833 0.212354
1779 1 0.702369 0.5581 0.229166
1733 1 0.72116 0.654401 0.265882
289 1 0.852463 0.644323 0.198065
590 1 0.934962 0.72806 0.235418
1087 1 0.173613 0.818152 0.229138
1255 1 0.481213 0.792085 0.248173
1389 1 0.387697 0.849043 0.252193
1178 1 0.538192 0.860278 0.245938
674 1 0.755426 0.86226 0.23297
1113 1 0.351697 0.933323 0.221538
199 1 0.104746 0.936963 0.254412
488 1 0.997769 0.965337 0.225079
1422 1 0.837681 0.521233 0.253372
2027 1 0.0270904 0.543516 0.256715
1800 1 0.389453 0.537592 0.267951
48 1 0.375614 0.578577 0.26357
821 1 0.80366 0.556825 0.235088
475 1 0.639165 0.586783 0.243947
1900 1 0.115161 0.611992 0.236874
1033 1 0.484912 0.612945 0.24472
2042 1 0.382696 0.672265 0.27777
1642 1 0.214167 0.674127 0.250739
1336 1 0.24002 0.798414 0.26528
562 1 0.922777 0.813588 0.252158
1276 1 0.310953 0.950506 0.259724
812 1 0.491837 0.911417 0.238922
388 1 0.982225 0.888504 0.27475
1678 1 0.116654 0.982469 0.261208
556 1 0.450477 0.981572 0.268677
644 1 0.234077 0.997217 0.261863
500 1 0.759468 0.516625 0.265441
1570 1 0.790974 0.503105 0.330708
1027 1 0.944822 0.595445 0.259565
742 1 0.343355 0.673594 0.254034
304 1 0.859304 0.687634 0.256664
1185 1 0.0275497 0.672995 0.262791
1812 1 0.557596 0.657724 0.285433
1681 1 0.91332 0.655985 0.247753
131 1 0.19379 0.719201 0.287791
152 1 0.948487 0.739032 0.276334
1891 1 0.107267 0.753436 0.259004
545 1 0.441791 0.771728 0.287289
816 1 0.703456 0.759267 0.277592
237 1 0.672084 0.805251 0.272341
1075 1 0.506628 0.809379 0.245895
587 1 0.576228 0.857403 0.24403
962 1 0.126101 0.890495 0.251177
1529 1 0.171993 0.864006 0.260951
1431 1 0.247998 0.826614 0.281258
166 1 0.229212 0.906575 0.281239
1220 1 0.47443 0.912674 0.28005
222 1 0.901233 0.985923 0.272913
579 1 0.396435 0.98088 0.27566
374 1 0.462537 0.986496 0.297372
244 1 0.739075 0.996235 0.256848
397 1 0.0264958 0.583687 0.297776
1469 1 0.00313119 0.537366 0.269728
1287 1 0.0273782 0.588607 0.281802
348 1 0.833589 0.529029 0.262452
717 1 0.762744 0.60435 0.299694
886 1 0.867871 0.603872 0.260729
1454 1 0.956276 0.662618 0.264022
191 1 0.383384 0.735564 0.274446
724 1 0.112204 0.809731 0.292139
113 1 0.0978364 0.885337 0.295263
1901 1 0.267665 0.961152 0.273796
671 1 0.0399453 0.958927 0.274406
1059 1 0.462833 0.945765 0.293955
1600 1 0.921363 0.506029 0.322588
665 1 0.923961 0.512723 0.28983
1395 1 0.290661 0.547984 0.320218
1781 1 0.384856 0.555877 0.296086
167 1 0.262119 0.607152 0.267136
509 1 0.56177 0.580855 0.286716
890 1 0.65321 0.674967 0.324931
332 1 0.0675678 0.700986 0.288893
1147 1 0.0442338 0.719175 0.280259
322 1 0.340053 0.738134 0.314197
459 1 0.625642 0.759689 0.272544
783 1 0.622471 0.801308 0.299863
1788 1 0.071 0.852977 0.324255
1441 1 0.263199 0.816607 0.310343
1192 1 0.311982 0.8569 0.348847
1559 1 0.710701 0.87255 0.34784
1766 1 0.893742 0.888089 0.30197
1495 1 0.019519 0.872633 0.300227
1602 1 0.353208 0.874433 0.338252
792 1 0.00362256 0.903666 0.307591
294 1 0.401655 0.906993 0.289618
567 1 0.66226 0.906971 0.319498
1373 1 0.933946 0.921279 0.289546
1804 1 0.607577 0.964622 0.311286
1866 1 0.66442 0.959055 0.294702
305 1 0.861383 0.965689 0.297566
589 1 0.427226 0.998441 0.337795
776 1 0.889661 0.775262 0.489131
1232 1 0.926949 0.556492 0.322414
234 1 0.717168 0.55202 0.348158
1261 1 0.565623 0.580797 0.354667
1920 1 0.393715 0.642852 0.326937
1194 1 0.565125 0.705077 0.324743
1106 1 0.869628 0.708573 0.298586
718 1 0.0308166 0.742943 0.318214
636 1 0.67067 0.801587 0.325424
1473 1 0.998926 0.852987 0.291648
1851 1 0.188717 0.869167 0.329257
1275 1 0.0205024 0.892945 0.336084
824 1 0.26205 0.878074 0.323588
1472 1 0.772917 0.949855 0.329781
1757 1 0.44625 0.514686 0.350387
578 1 0.130682 0.548709 0.346631
210 1 0.0886434 0.589057 0.329185
793 1 0.931304 0.579122 0.330254
243 1 0.517347 0.620522 0.334883
1748 1 0.941797 0.623332 0.346981
72 1 0.890841 0.714602 0.307172
957 1 0.242452 0.757625 0.359474
2020 1 0.512413 0.776934 0.360839
1440 1 0.878534 0.749363 0.33744
1939 1 0.153372 0.767284 0.347403
1494 1 0.442009 0.803712 0.320499
2029 1 0.730489 0.813521 0.335667
1174 1 0.716692 0.842041 0.336362
741 1 0.65997 0.876649 0.354083
42 1 0.541623 0.871231 0.324006
1876 1 0.698501 0.865376 0.335706
1675 1 0.517735 0.894975 0.349655
1912 1 0.105163 0.982032 0.35252
143 1 0.310969 0.986963 0.310107
872 1 0.890263 0.969435 0.341966
1899 1 0.426078 0.513441 0.36508
1649 1 0.393475 0.506704 0.352441
1357 1 0.97713 0.5117 0.329203
880 1 0.710372 0.516817 0.35554
1171 1 0.766008 0.546719 0.389195
620 1 0.00569007 0.577866 0.360422
86 1 0.551531 0.54968 0.331056
198 1 0.629236 0.561507 0.380253
1356 1 0.295695 0.617255 0.383963
489 1 0.386845 0.613291 0.339395
577 1 0.172388 0.651284 0.366571
1665 1 0.321877 0.673139 0.350343
169 1 0.63626 0.688886 0.33485
1794 1 0.269563 0.751949 0.366637
1999 1 0.675925 0.8111 0.374164
919 1 0.700806 0.834945 0.367638
1740 1 0.163868 0.895178 0.373261
712 1 0.585978 0.953936 0.393137
1124 1 0.075192 0.568333 0.341141
936 1 0.0723366 0.621548 0.400025
1335 1 0.751837 0.642955 0.342227
1210 1 0.344465 0.643021 0.411845
695 1 0.787102 0.684679 0.395864
1282 1 0.469446 0.725387 0.378123
805 1 0.272675 0.763026 0.365784
127 1 0.490647 0.776808 0.353744
1546 1 0.495102 0.776564 0.402562
1708 1 0.925671 0.789559 0.382568
867 1 0.873518 0.805573 0.418606
1348 1 0.771032 0.817814 0.398149
446 1 0.59102 0.880651 0.370757
1950 1 0.121122 0.985982 0.36202
1868 1 0.629184 0.96547 0.370251
333 1 0.64292 0.987976 0.370188
449 1 0.409551 0.899242 0.0106176
1117 1 0.074601 0.517165 0.351491
1371 1 0.544813 0.508126 0.374009
206 1 0.340518 0.568781 0.406082
5 1 0.415928 0.574851 0.382357
1519 1 0.280933 0.635631 0.373255
1062 1 0.134312 0.725137 0.425183
1423 1 0.267904 0.72678 0.373587
148 1 0.232157 0.764859 0.391319
1366 1 0.991255 0.753384 0.405978
1064 1 0.2416 0.804488 0.374524
242 1 0.182142 0.762006 0.42247
1280 1 0.219943 0.793531 0.407963
995 1 0.31845 0.852142 0.41535
363 1 0.640596 0.806107 0.406485
1394 1 0.843091 0.84045 0.39645
1761 1 0.676072 0.909016 0.381985
470 1 0.571287 0.921838 0.402115
1791 1 0.960804 0.949208 0.436181
1974 1 0.299602 0.927253 0.387206
2005 1 0.282742 0.507483 0.469948
1596 1 0.272688 0.50961 0.413476
61 1 0.295156 0.558022 0.426232
1159 1 0.462822 0.567574 0.408006
98 1 0.190268 0.636519 0.418516
280 1 0.354483 0.659474 0.401011
1872 1 0.110258 0.653911 0.431436
913 1 0.607865 0.700933 0.411021
1545 1 0.582734 0.704702 0.422142
1577 1 0.374795 0.740315 0.433814
448 1 0.837832 0.700156 0.401699
1844 1 0.0285435 0.75975 0.421181
1998 1 0.786442 0.782653 0.436321
1009 1 0.851465 0.783812 0.451929
120 1 0.115063 0.815079 0.381816
1890 1 0.393726 0.879297 0.415373
461 1 0.400514 0.804553 0.41563
1183 1 0.897604 0.828408 0.419488
1537 1 0.184043 0.837243 0.427916
181 1 0.542121 0.875203 0.445814
87 1 0.0576575 0.912223 0.401137
713 1 0.363868 0.903447 0.418219
790 1 0.152434 0.521388 0.429405
300 1 0.0931834 0.604463 0.458885
897 1 0.630725 0.556485 0.401231
1195 1 0.301868 0.672497 0.386759
407 1 0.619869 0.688557 0.421293
1111 1 0.96627 0.741229 0.451946
354 1 0.0974675 0.730177 0.429838
1471 1 0.143777 0.755564 0.429595
1134 1 0.290495 0.75627 0.430418
834 1 0.87538 0.785194 0.451452
806 1 0.257368 0.797647 0.445055
17 1 0.327063 0.770651 0.442247
833 1 0.872857 0.845194 0.423676
1875 1 0.514541 0.863937 0.440614
922 1 0.514776 0.842944 0.413831
1938 1 0.893053 0.870051 0.456174
394 1 0.557724 0.854635 0.42868
537 1 0.691701 0.858388 0.448446
1253 1 0.763106 0.952905 0.4456
239 1 0.369746 0.975972 0.422988
1897 1 0.0814029 0.531267 0.464501
1511 1 0.916149 0.513274 0.448908
1271 1 0.01336 0.589074 0.439569
963 1 0.063222 0.644938 0.465502
513 1 0.965082 0.643152 0.450403
1426 1 0.499288 0.651194 0.472679
972 1 0.978996 0.700562 0.422495
1297 1 0.350045 0.746983 0.477678
1351 1 0.00527385 0.742038 0.472584
1425 1 0.257877 0.779303 0.429799
1450 1 0.383729 0.857609 0.417271
195 1 0.260793 0.906949 0.461202
342 1 0.00603183 0.931402 0.445814
483 1 0.134869 0.901524 0.459861
1312 1 0.52485 0.919464 0.415769
2035 1 0.61224 0.945077 0.461797
1739 1 0.146036 0.992652 0.454279
1047 1 0.604285 0.506079 0.035413
1934 1 0.611288 0.514921 0.450531
1715 1 0.811385 0.576865 0.458779
640 1 0.0279007 0.603593 0.461057
576 1 0.745893 0.560308 0.452344
1391 1 0.944821 0.63053 0.448374
1859 1 0.020558 0.610934 0.464451
1474 1 0.650565 0.628842 0.440896
1989 1 0.108161 0.669899 0.453861
1627 1 0.154393 0.668032 0.46924
1814 1 0.218869 0.64472 0.470085
1012 1 0.684822 0.661489 0.473912
522 1 0.107162 0.681225 0.468996
1477 1 0.535899 0.674242 0.480478
1656 1 0.903433 0.728669 0.466167
1550 1 0.986173 0.73983 0.444041
1687 1 0.473728 0.791272 0.437769
1396 1 0.86021 0.747772 0.456472
175 1 0.0703021 0.846168 0.440136
789 1 0.905705 0.867047 0.480604
1597 1 0.501486 0.871116 0.474395
1664 1 0.549341 0.868445 0.48515
1583 1 0.288333 0.90582 0.490834
1636 1 0.0446155 0.502262 0.220835
395 1 0.387808 0.963727 0.436439
1057 1 0.396454 0.932048 0.490374
905 1 0.805433 0.896615 0.458902
1007 1 0.868536 0.956304 0.488509
1888 1 0.292827 0.976001 0.466979
1325 1 0.543943 0.98692 0.474783
1503 1 0.0557255 0.503139 0.492917
1738 1 0.365454 0.516631 0.469942
384 1 0.998289 0.995158 0.0537933
134 1 0.220524 0.594482 0.453501
1315 1 0.269911 0.69623 0.480459
985 1 0.395618 0.563643 0.0132871
266 1 0.816683 0.779179 0.479463
355 1 0.661254 0.765134 0.473253
203 1 0.109152 0.799931 0.491917
1295 1 0.484874 0.917712 0.486967
1464 1 0.306195 0.828369 0.495682
1407 1 0.755258 0.827836 0.497953
193 1 0.0469059 0.748542 0.48453
37 1 0.733875 0.856254 0.486295
1017 1 0.998018 0.509852 0.220657
1541 1 0.419523 0.975838 0.487786
1831 1 0.740247 0.746922 0.00757658
1003 1 0.33139 0.500704 0.0740307
546 1 0.504341 0.513925 0.928028
2038 1 0.482206 0.536058 0.512858
1006 1 0.921356 0.524998 0.51924
680 1 0.182121 0.993975 0.941974
297 1 0.741078 0.658172 0.546599
1942 1 0.231739 0.688534 0.5163
1177 1 0.0639559 0.71327 0.505248
153 1 0.904242 0.520083 0.730631
539 1 0.795861 0.514854 0.518631
551 1 0.123604 0.803074 0.502381
770 1 0.544499 0.982998 0.635264
1709 1 0.632929 0.86254 0.515966
1861 1 0.0149029 0.867733 0.540317
670 1 0.179397 0.933993 0.509646
43 1 0.746916 0.962818 0.530678
79 1 0.71328 0.947086 0.989243
306 1 0.00434076 0.527333 0.542131
1932 1 0.751099 0.595854 0.53842
1970 1 0.650215 0.995283 0.570661
1964 1 0.712594 0.646142 0.535791
1413 1 0.509099 0.67946 0.540032
36 1 0.800314 0.636182 0.532227
1780 1 0.861862 0.665434 0.538955
526 1 0.863114 0.663153 0.55237
1619 1 0.512915 0.75673 0.526163
568 1 0.486275 0.797979 0.560221
1726 1 0.395803 0.887284 0.538717
991 1 0.717431 0.918227 0.547062
975 1 0.513614 0.936312 0.508913
2031 1 0.277994 0.967287 0.539605
321 1 0.327061 0.919454 0.537089
1514 1 0.150659 0.592559 0.503521
1747 1 0.921814 0.536202 0.57574
1093 1 0.493464 0.519757 0.564595
663 1 0.00102706 0.548126 0.561891
1839 1 0.50239 0.546742 0.516212
1944 1 0.012681 0.632443 0.571605
2048 1 0.252548 0.624099 0.582823
525 1 0.897563 0.625147 0.54482
264 1 0.381704 0.626962 0.536088
1326 1 0.376768 0.668892 0.5303
1179 1 0.898284 0.671937 0.55259
15 1 0.78429 0.722172 0.537231
1865 1 0.38556 0.772775 0.541692
854 1 0.95638 0.774848 0.574535
1098 1 0.250384 0.808586 0.518754
159 1 0.459623 0.796638 0.56621
597 1 0.318539 0.839196 0.549899
1655 1 0.651521 0.808286 0.554928
1157 1 0.406755 0.84704 0.564348
809 1 0.370683 0.870191 0.532297
1879 1 0.970848 0.900805 0.534871
1792 1 0.0326662 0.902416 0.548245
1668 1 0.3602 0.935603 0.579857
1508 1 0.91135 0.92281 0.565752
1860 1 0.100691 0.962042 0.875162
1158 1 0.556866 0.531637 0.554099
99 1 0.870158 0.511376 0.570261
293 1 0.672717 0.529448 0.575603
1941 1 0.863733 0.545419 0.542668
1892 1 0.571208 0.989623 0.621211
1798 1 0.0249427 0.551565 0.571229
496 1 0.178448 0.582072 0.558293
1283 1 0.33443 0.677641 0.579304
1173 1 0.0307581 0.733852 0.591415
1045 1 0.669752 0.704295 0.565955
1765 1 0.690751 0.749376 0.586596
786 1 0.367263 0.778727 0.579561
1908 1 0.719979 0.802924 0.524509
335 1 0.751805 0.80785 0.582111
298 1 0.853165 0.814574 0.534397
1973 1 0.928658 0.878321 0.555206
1809 1 0.642339 0.93287 0.588271
334 1 0.170567 0.91866 0.533719
1643 1 0.361871 0.844513 0.993405
1427 1 0.200655 0.735646 0.512903
1979 1 0.52631 0.526376 0.570838
419 1 0.453835 0.534289 0.551389
967 1 0.735496 0.516935 0.550848
1817 1 0.591637 0.541928 0.584057
125 1 0.770963 0.54738 0.595512
1510 1 0.996042 0.56903 0.562687
413 1 0.645025 0.612555 0.56115
530 1 0.768616 0.600023 0.597242
1540 1 0.734529 0.614318 0.569801
593 1 0.853967 0.637664 0.608846
202 1 0.228466 0.711707 0.581306
178 1 0.327034 0.793483 0.604057
1094 1 0.317772 0.804188 0.607595
1424 1 0.66045 0.797496 0.577258
2004 1 0.82615 0.827655 0.586792
1457 1 0.0820394 0.825463 0.596794
112 1 0.82877 0.837974 0.577484
1304 1 0.0565984 0.910654 0.592828
140 1 0.139791 0.905342 0.572994
785 1 0.49208 0.954054 0.582187
660 1 0.549796 0.974268 0.574508
108 1 0.29213 0.512879 0.60846
1566 1 0.338276 0.607983 0.629253
1321 1 0.159311 0.663928 0.598997
627 1 0.12124 0.666403 0.572419
598 1 0.907114 0.754378 0.602767
781 1 0.48106 0.759249 0.602677
220 1 0.107103 0.7555 0.626888
1826 1 0.348283 0.754357 0.618765
599 1 0.776207 0.815897 0.578744
664 1 0.977656 0.897258 0.591883
1013 1 0.583278 0.933318 0.62855
1711 1 0.486364 0.956208 0.597086
1562 1 0.36976 0.839145 0.995532
1284 1 0.782292 0.521631 0.621849
324 1 0.111249 0.562624 0.64005
1218 1 0.200245 0.541638 0.610203
1741 1 0.432408 0.914901 0.952834
819 1 0.122956 0.535527 0.60362
1035 1 0.442914 0.554082 0.636839
778 1 0.421838 0.571489 0.599547
916 1 0.624479 0.552885 0.602733
971 1 0.823834 0.588384 0.610954
693 1 0.852048 0.5607 0.589091
1016 1 0.864107 0.641168 0.636298
931 1 0.980284 0.636934 0.607532
356 1 0.963086 0.621263 0.612581
13 1 0.63064 0.617555 0.644518
1688 1 0.763621 0.658744 0.586127
1574 1 0.690156 0.715297 0.590164
1971 1 0.0723076 0.780806 0.631024
1513 1 0.14191 0.810055 0.623062
1614 1 0.201759 0.792042 0.61682
1992 1 0.74555 0.811852 0.656744
766 1 0.0106424 0.812858 0.628166
194 1 0.410839 0.879379 0.622045
1924 1 0.65191 0.951203 0.604313
232 1 0.129122 0.955648 0.614879
1552 1 0.546162 0.958632 0.638023
130 1 0.207097 0.549081 0.614867
735 1 0.327815 0.533389 0.639055
1251 1 0.32576 0.514356 0.65585
1828 1 0.122724 0.611138 0.641536
411 1 0.276809 0.585267 0.633237
1176 1 0.49302 0.621996 0.626138
1291 1 0.700937 0.612743 0.680321
1138 1 0.452636 0.656779 0.644544
1880 1 0.787212 0.666543 0.655259
24 1 0.545275 0.694485 0.659114
1565 1 0.668129 0.691939 0.622644
1716 1 0.878754 0.762023 0.612042
1353 1 0.305118 0.77689 0.665172
469 1 0.650574 0.725226 0.650529
4 1 0.443398 0.802374 0.618095
1309 1 0.388638 0.761106 0.672686
1184 1 0.414443 0.79751 0.644929
675 1 0.313025 0.844235 0.635411
608 1 0.24045 0.831714 0.631282
1319 1 0.165885 0.894042 0.602765
2002 1 0.75113 0.949498 0.64197
1955 1 0.0631833 0.983372 0.644304
385 1 0.16485 0.981498 0.628305
271 1 0.219692 0.990621 0.607983
1914 1 0.327529 0.952578 0.671834
1727 1 0.169628 0.636882 0.987014
1571 1 0.552472 0.560526 0.657938
723 1 0.699513 0.582714 0.650864
649 1 0.298069 0.587056 0.680678
307 1 0.301879 0.614251 0.646293
317 1 0.74213 0.630446 0.668903
850 1 0.428376 0.664105 0.632554
780 1 0.53269 0.653291 0.62788
966 1 0.883734 0.715802 0.654792
1952 1 0.637178 0.775799 0.671837
1987 1 0.926948 0.793781 0.634582
1385 1 0.173032 0.818731 0.648581
472 1 0.836531 0.847424 0.640174
774 1 0.207234 0.851848 0.681801
728 1 0.321469 0.897891 0.662914
705 1 0.343865 0.900485 0.692929
1629 1 0.0112315 0.919941 0.665394
255 1 0.769353 0.941693 0.628728
2021 1 0.332792 0.579126 0.656341
523 1 0.34752 0.619782 0.660512
1997 1 0.905262 0.637663 0.698255
1680 1 0.24966 0.659986 0.706835
230 1 0.347457 0.73627 0.690947
421 1 0.88204 0.764871 0.677489
477 1 0.668176 0.821116 0.657321
853 1 0.556467 0.899006 0.67076
947 1 0.794426 0.852087 0.669821
1024 1 0.37974 0.918092 0.673471
58 1 0.661459 0.931324 0.678219
996 1 0.801598 0.877193 0.674864
771 1 0.0349701 0.947897 0.676717
78 1 0.210602 0.500021 0.654555
1623 1 0.311819 0.532061 0.693584
811 1 0.578361 0.546592 0.692498
1713 1 0.285219 0.541789 0.701434
1132 1 0.730035 0.595395 0.677795
249 1 0.823838 0.586582 0.679679
471 1 0.034178 0.577449 0.699586
1962 1 0.399551 0.586165 0.697556
1447 1 0.954391 0.618621 0.677824
1244 1 0.530964 0.690786 0.680445
1217 1 0.800095 0.669215 0.753087
1166 1 0.38092 0.742672 0.709353
817 1 0.409555 0.739125 0.682555
453 1 0.643119 0.748802 0.68812
775 1 0.900463 0.724817 0.719605
572 1 0.113881 0.747222 0.702249
350 1 0.0965288 0.758842 0.669132
566 1 0.845027 0.87523 0.696406
1334 1 0.137027 0.880194 0.67429
1841 1 0.0579242 0.931453 0.676478
94 1 0.436155 0.908723 0.689177
462 1 0.684613 0.985796 0.704752
1125 1 0.208098 0.527043 0.702265
2008 1 0.536561 0.513777 0.699399
678 1 0.856263 0.513144 0.71714
383 1 0.0674977 0.523823 0.717019
910 1 0.978262 0.544088 0.722175
286 1 0.0916021 0.59184 0.703943
581 1 0.267816 0.588851 0.734909
1240 1 0.312987 0.583982 0.714201
845 1 0.511143 0.600374 0.730723
149 1 0.916548 0.616927 0.700155
1379 1 0.481228 0.659521 0.722516
1050 1 0.0773423 0.717769 0.695999
1046 1 0.620614 0.746406 0.716627
1164 1 0.179035 0.760615 0.716221
532 1 0.221647 0.84907 0.700878
320 1 0.377568 0.845092 0.678141
1028 1 0.627257 0.841026 0.670215
1686 1 0.817294 0.81505 0.701194
887 1 0.252533 0.951935 0.700579
1435 1 0.217854 0.532697 0.737704
536 1 0.410874 0.523397 0.742681
1322 1 0.27532 0.993596 0.671142
1135 1 0.640303 0.618697 0.747115
585 1 0.699571 0.603206 0.745755
633 1 0.170681 0.6522 0.719566
984 1 0.535944 0.654993 0.743662
22 1 0.349241 0.7017 0.692555
481 1 0.821938 0.673959 0.749638
873 1 0.272844 0.762789 0.734248
346 1 0.498186 0.789701 0.697501
1935 1 0.0521975 0.789629 0.722771
707 1 0.443892 0.812056 0.710415
548 1 0.354649 0.861314 0.715364
535 1 0.133181 0.885437 0.731769
1460 1 0.768934 0.88251 0.739012
51 1 0.301258 0.904089 0.724153
844 1 0.783058 0.905611 0.744223
326 1 0.767522 0.937508 0.715611
2022 1 0.91848 0.941893 0.718664
1930 1 0.322728 0.978803 0.727915
1937 1 0.8705 0.972701 0.993849
1290 1 0.609921 0.846617 0.51888
702 1 0.0268451 0.529694 0.747989
1262 1 0.477317 0.568519 0.736842
1445 1 0.30784 0.562056 0.76399
1909 1 0.371041 0.576665 0.774545
2041 1 0.391117 0.557525 0.795917
928 1 0.447977 0.586455 0.762334
645 1 0.903701 0.61961 0.772106
369 1 0.179551 0.677443 0.775831
76 1 0.855707 0.634934 0.747127
1759 1 0.971272 0.734534 0.747592
1072 1 0.181392 0.745609 0.745874
1870 1 0.534418 0.771502 0.764802
463 1 0.977623 0.785203 0.747248
1345 1 0.0233871 0.845321 0.758648
1116 1 0.370282 0.827064 0.724562
437 1 0.0849227 0.846531 0.736273
1517 1 0.167604 0.89227 0.746746
968 1 0.317842 0.910767 0.726476
1151 1 0.328092 0.912619 0.734855
1653 1 0.328008 0.886284 0.728967
923 1 0.267348 0.913145 0.729881
387 1 0.284099 0.952662 0.75293
403 1 0.586808 0.908524 0.748198
866 1 0.685338 0.512001 0.78399
11 1 0.963855 0.504536 0.751368
1527 1 0.65956 0.503466 0.776285
1342 1 0.483452 0.597166 0.751005
869 1 0.537389 0.555529 0.770335
1133 1 0.828555 0.548524 0.7654
270 1 0.904533 0.589272 0.765653
1204 1 0.615116 0.604494 0.762953
1022 1 0.173003 0.68168 0.79006
1729 1 0.212849 0.708053 0.765192
487 1 0.648267 0.656961 0.74394
1023 1 0.231967 0.720562 0.754091
842 1 0.479802 0.747048 0.784338
960 1 0.285384 0.759659 0.779618
1374 1 0.275339 0.706584 0.752292
815 1 0.968554 0.708678 0.759446
1031 1 0.9967 0.819783 0.751416
1049 1 0.01912 0.851134 0.802861
1209 1 0.0796145 0.846324 0.756848
1104 1 0.14451 0.904733 0.77059
883 1 0.487572 0.9843 0.749201
1444 1 0.534924 0.955686 0.703597
1303 1 0.55589 0.983964 0.76153
1975 1 0.0878542 0.59966 0.804235
373 1 0.548929 0.645271 0.800474
1492 1 0.581424 0.624399 0.782417
1205 1 0.790186 0.601649 0.803727
2010 1 0.109088 0.676683 0.767431
55 1 0.14576 0.668393 0.766527
1443 1 0.0221236 0.669478 0.799297
1241 1 0.248716 0.687019 0.809144
1931 1 0.46543 0.667342 0.783729
1346 1 0.135685 0.769858 0.792377
622 1 0.41077 0.760379 0.772578
1827 1 0.504612 0.741272 0.742355
1972 1 0.0913593 0.770294 0.773826
438 1 0.0753413 0.771069 0.779538
1369 1 0.6407 0.882032 0.776283
1191 1 0.916762 0.820203 0.768113
102 1 0.133232 0.895338 0.773321
1845 1 0.26803 0.873009 0.789319
1994 1 0.569091 0.930258 0.769079
1254 1 0.679154 0.915257 0.821313
187 1 0.261515 0.944243 0.806683
1465 1 0.644024 0.518255 0.800631
934 1 0.901561 0.559805 0.799306
67 1 0.416437 0.553479 0.804302
1609 1 0.458636 0.557214 0.785549
950 1 0.835484 0.508272 0.833038
1522 1 0.737057 0.579276 0.811413
443 1 0.779021 0.567153 0.800329
132 1 0.648424 0.616368 0.83966
929 1 0.166718 0.589921 0.780943
920 1 0.4866 0.601631 0.801496
1660 1 0.392125 0.691761 0.775126
136 1 0.913651 0.721445 0.806296
1927 1 0.333677 0.76352 0.786893
162 1 0.650308 0.807381 0.826948
1836 1 0.998264 0.819487 0.806569
684 1 0.674838 0.836976 0.810411
265 1 0.795888 0.873689 0.823073
932 1 0.542967 0.890327 0.787112
1305 1 0.637014 0.919301 0.80155
1317 1 0.980844 0.884915 0.783686
1099 1 0.215189 0.944064 0.810562
1497 1 0.399117 0.943226 0.816959
344 1 0.112634 0.97868 0.801897
647 1 0.840334 0.543596 0.864314
1483 1 0.840823 0.559932 0.830801
2011 1 0.790001 0.640563 0.867021
826 1 0.18661 0.634716 0.849175
1731 1 0.569015 0.661565 0.825195
219 1 0.218527 0.707429 0.810648
458 1 0.783256 0.728555 0.830137
32 1 0.801258 0.710406 0.8241
1528 1 0.309017 0.797246 0.859203
44 1 0.76467 0.816976 0.815874
843 1 0.133438 0.919161 0.817614
1782 1 0.864525 0.937206 0.83343
1219 1 0.955438 0.935276 0.80568
541 1 0.0877132 0.970025 0.827151
435 1 0.479513 0.540415 0.83558
787 1 0.0198408 0.501274 0.85721
672 1 0.575878 0.610271 0.822257
955 1 0.573245 0.58875 0.863532
1599 1 0.608217 0.659366 0.861473
1100 1 0.54949 0.681755 0.855626
1237 1 0.260714 0.695482 0.815784
1877 1 0.244114 0.761157 0.842722
1502 1 0.72477 0.802365 0.849395
591 1 0.818645 0.81909 0.857862
988 1 0.869008 0.843141 0.843377
1645 1 0.501445 0.944779 0.850315
1610 1 0.763057 0.919357 0.791768
217 1 0.64184 0.964602 0.82539
1393 1 0.570385 0.960989 0.832666
1313 1 0.928632 0.499978 0.879356
73 1 0.919806 0.936218 0.988713
666 1 0.432324 0.53801 0.852014
1086 1 0.628028 0.563188 0.873006
480 1 0.983935 0.61312 0.870187
402 1 0.728889 0.60899 0.808608
503 1 0.11222 0.618096 0.833313
337 1 0.728125 0.652604 0.856214
901 1 0.619636 0.693207 0.860256
1978 1 0.0934271 0.700554 0.890837
1744 1 0.349288 0.746914 0.845147
716 1 0.068897 0.833535 0.858021
875 1 0.368065 0.847153 0.849161
531 1 0.433318 0.82532 0.866762
2007 1 0.158867 0.841019 0.870433
552 1 0.421429 0.78414 0.85234
749 1 0.0841482 0.881846 0.872615
1332 1 0.526723 0.909992 0.863707
744 1 0.0758245 0.57187 0.865109
948 1 0.251757 0.589847 0.887394
340 1 0.404247 0.624913 0.842457
126 1 0.223257 0.675064 0.894907
1625 1 0.531996 0.679223 0.879582
1384 1 0.306108 0.733183 0.871418
989 1 0.961988 0.707225 0.866133
1926 1 0.486709 0.745506 0.924267
1763 1 0.175458 0.796254 0.891792
1598 1 0.584433 0.841532 0.888359
822 1 0.301593 0.821159 0.861427
329 1 0.646672 0.918267 0.888107
142 1 0.600633 0.971302 0.890362
378 1 0.887257 0.514071 0.918398
974 1 0.0180301 0.520355 0.898102
596 1 0.644232 0.547506 0.912442
1770 1 0.228144 0.532244 0.882328
980 1 0.148452 0.563415 0.873122
729 1 0.529233 0.627307 0.858847
164 1 0.681329 0.622996 0.907745
479 1 0.234879 0.650414 0.896625
145 1 0.256791 0.649853 0.914094
721 1 0.775796 0.680112 0.905938
1029 1 0.506041 0.789616 0.871058
1245 1 0.101697 0.837325 0.881155
690 1 0.282301 0.880024 0.887099
734 1 0.722689 0.87249 0.909747
881 1 0.57693 0.939754 0.86122
825 1 0.60801 0.935789 0.929258
773 1 0.740947 0.985475 0.921336
1170 1 0.91095 0.964535 0.676989
1359 1 0.641686 0.537097 0.916588
1960 1 0.0814382 0.567587 0.909614
1074 1 0.322863 0.595798 0.952781
1694 1 0.56617 0.603318 0.893584
720 1 0.835276 0.638004 0.89652
336 1 0.364883 0.627522 0.905311
1206 1 0.898451 0.628881 0.910842
245 1 0.390757 0.669912 0.900287
1672 1 0.118602 0.740902 0.924209
1768 1 0.728238 0.735002 0.921932
918 1 0.817748 0.735608 0.926645
285 1 0.166564 0.758429 0.923336
1795 1 0.278227 0.802008 0.897451
100 1 0.884105 0.810223 0.926812
1041 1 0.357931 0.933539 0.907536
1408 1 0.621043 0.96704 0.944017
7 1 0.161835 0.930606 0.933675
1089 1 0.326965 0.94507 0.890902
1096 1 0.953524 0.960035 0.916764
1025 1 0.27012 0.510935 0.928849
133 1 0.49838 0.55555 0.977057
595 1 0.823478 0.575305 0.895762
814 1 0.684933 0.654092 0.926412
69 1 0.212005 0.713858 0.944317
188 1 0.225167 0.706319 0.896862
1902 1 0.970325 0.779336 0.91815
615 1 0.286536 0.764702 0.893204
687 1 0.0906343 0.818303 0.940197
527 1 0.765916 0.842425 0.985624
423 1 0.220515 0.823607 0.959483
681 1 0.0929535 0.888424 0.920504
941 1 0.0524954 0.898004 0.942422
200 1 0.67878 0.901452 0.944412
1849 1 0.560423 0.519227 0.993296
1603 1 0.702786 0.529019 0.96284
882 1 0.170598 0.527266 0.939078
1722 1 0.657959 0.518967 0.941318
1001 1 0.482824 0.593828 0.936752
295 1 0.546079 0.581255 0.901375
1482 1 0.816697 0.55565 0.970353
1723 1 0.389849 0.589294 0.935593
1561 1 0.622028 0.6175 0.944601
747 1 0.779384 0.660823 0.951058
1051 1 0.355569 0.649504 0.993801
1289 1 0.380433 0.631369 0.974507
391 1 0.578333 0.687285 0.95346
1742 1 0.744519 0.663476 0.936712
767 1 0.462691 0.714991 0.951671
1299 1 0.570708 0.779619 0.960726
1229 1 0.571998 0.736889 0.975634
652 1 0.397643 0.796745 0.934994
836 1 0.676871 0.781725 0.924489
538 1 0.544397 0.788531 0.93492
2019 1 0.745868 0.797565 0.953876
1020 1 0.185441 0.848745 0.979148
1105 1 0.630938 0.878187 0.967597
1504 1 0.129128 0.880978 0.941097
1458 1 0.721076 0.904723 0.989605
1605 1 0.0330654 0.979026 0.947656
641 1 0.432372 0.978884 0.985898
45 1 0.662961 0.983574 0.94978
1362 1 0.316746 0.510397 0.993059
1243 1 0.788418 0.996046 0.510919
573 1 0.233794 0.518248 0.988871
451 1 0.989599 0.538382 0.952836
21 1 0.195857 0.580866 0.994477
1144 1 0.34501 0.598413 0.938913
313 1 0.991087 0.621493 0.958942
891 1 0.341448 0.630791 0.987061
498 1 0.464667 0.629937 0.977048
1896 1 0.689779 0.663785 0.970682
651 1 0.08802 0.715013 0.988179
862 1 0.493878 0.699723 0.971182
927 1 0.197882 0.993357 0.519974
1531 1 0.301508 0.802434 0.988497
85 1 0.676503 0.861589 0.973554
521 1 0.872189 0.87756 0.995958
1328 1 0.29896 0.937846 0.966053
139 1 0.00137202 0.971068 0.951874
1273 1 0.198053 0.969404 0.942229
1436 1 0.676514 0.750281 0.514779
110 1 0.337024 0.542341 0.980558
1278 1 0.181193 0.851979 0.512403
319 1 0.293507 0.906418 0.979239
415 1 0.450144 0.597705 0.997901
1530 1 0.704846 0.626954 0.963863
650 1 0.0254381 0.612481 0.97883
257 1 0.387648 0.662138 0.995758
1036 1 0.585148 0.714271 0.957247
352 1 0.451197 0.751816 0.994225
30 1 0.917463 0.727078 0.984348
185 1 0.600823 0.75718 0.974968
1580 1 0.981281 0.803065 1.00002
1032 1 0.730677 0.813996 0.985196
877 1 0.616099 0.827001 0.976164
138 1 0.0388605 0.504475 0.657166
1712 1 0.858115 0.995446 0.839534
1611 1 0.810034 0.957961 0.998224
839 1 0.983772 0.998373 0.687525
1310 1 0.167439 0.949439 0.501039
| [
"[email protected]"
]
| |
cffe0a59e77bcf26414b072c0e182159fac32345 | 82948269c85dd0cce4f1337f75b28b5edf6d81ef | /eval.py | 6b58ef22874feb9bea385f8a78b0cae4bdc90d21 | []
| no_license | zhangwenhao123/Underwater-Color-Correction | 291c5f7131855832d58ddd6a02fd0a334c3f13d3 | b25bd697bfd206afb9b3d1ad58d4ff4dfc850b2e | refs/heads/master | 2021-05-11T03:43:07.330895 | 2018-01-17T22:48:46 | 2018-01-17T22:48:46 | 117,921,125 | 0 | 0 | null | 2018-01-18T02:33:36 | 2018-01-18T02:33:36 | null | UTF-8 | Python | false | false | 4,072 | py | '''
Evaluation File
'''
import cPickle as pickle
import tensorflow as tf
from scipy import misc
from tqdm import tqdm
import numpy as np
import argparse
import random
import ntpath
import sys
import os
import time
import time
import glob
import cPickle as pickle
from tqdm import tqdm
sys.path.insert(0, 'ops/')
sys.path.insert(0, 'nets/')
from tf_ops import *
import data_ops
if __name__ == '__main__':
if len(sys.argv) < 2:
print 'You must provide an info.pkl file'
exit()
pkl_file = open(sys.argv[1], 'rb')
a = pickle.load(pkl_file)
LEARNING_RATE = a['LEARNING_RATE']
LOSS_METHOD = a['LOSS_METHOD']
BATCH_SIZE = a['BATCH_SIZE']
L1_WEIGHT = a['L1_WEIGHT']
IG_WEIGHT = a['IG_WEIGHT']
NETWORK = a['NETWORK']
EPOCHS = a['EPOCHS']
DATA = a['DATA']
EXPERIMENT_DIR = 'checkpoints/LOSS_METHOD_'+LOSS_METHOD\
+'/NETWORK_'+NETWORK\
+'/L1_WEIGHT_'+str(L1_WEIGHT)\
+'/IG_WEIGHT_'+str(IG_WEIGHT)\
+'/DATA_'+DATA+'/'\
IMAGES_DIR = EXPERIMENT_DIR+'test_images/'
print
print 'Creating',IMAGES_DIR
try: os.makedirs(IMAGES_DIR)
except: pass
print
print 'LEARNING_RATE: ',LEARNING_RATE
print 'LOSS_METHOD: ',LOSS_METHOD
print 'BATCH_SIZE: ',BATCH_SIZE
print 'L1_WEIGHT: ',L1_WEIGHT
print 'IG_WEIGHT: ',IG_WEIGHT
print 'NETWORK: ',NETWORK
print 'EPOCHS: ',EPOCHS
print 'DATA: ',DATA
print
if NETWORK == 'pix2pix': from pix2pix import *
if NETWORK == 'resnet': from resnet import *
# global step that is saved with a model to keep track of how many steps/epochs
global_step = tf.Variable(0, name='global_step', trainable=False)
# underwater image
image_u = tf.placeholder(tf.float32, shape=(1, 256, 256, 3), name='image_u')
# generated corrected colors
gen_image = netG(image_u, LOSS_METHOD)
saver = tf.train.Saver(max_to_keep=1)
init = tf.group(tf.local_variables_initializer(), tf.global_variables_initializer())
sess = tf.Session()
sess.run(init)
ckpt = tf.train.get_checkpoint_state(EXPERIMENT_DIR)
if ckpt and ckpt.model_checkpoint_path:
print "Restoring previous model..."
try:
saver.restore(sess, ckpt.model_checkpoint_path)
print "Model restored"
except:
print "Could not restore model"
pass
step = int(sess.run(global_step))
# testing paths
'''
exts = ['*.jpg', '*.jpeg', '*.JPEG', '*.png']
test_paths = []
for ex in exts:
test_paths.extend(glob.glob('datasets/'+DATA+'/test/'+ex))
test_paths = np.asarray(test_paths)
'''
#test_paths = sorted(np.asarray(glob.glob('/mnt/data2/images/underwater/youtube/diving1/*.jpg')))
test_paths = sorted(np.asarray(glob.glob('/mnt/data1/videos/barbados/2018/images/*.png')))
IMAGES_DIR = '/mnt/data1/videos/barbados/2018/out_images/'
#random.shuffle(test_paths)
num_test = len(test_paths)
print 'num test:',num_test
print 'IMAGES_DIR:',IMAGES_DIR
c = 0
times = []
for img_path in tqdm(test_paths):
img_name = ntpath.basename(img_path)
img_name = img_name.split('.')[0]
batch_images = np.empty((1, 256, 256, 3), dtype=np.float32)
a_img = misc.imread(img_path).astype('float32')
a_img = misc.imresize(a_img, (256, 256, 3))
a_img = data_ops.preprocess(a_img)
batch_images[0, ...] = a_img
s = time.time()
gen_images = np.asarray(sess.run(gen_image, feed_dict={image_u:batch_images}))
tot = time.time()-s
times.append(tot)
for gen, real in zip(gen_images, batch_images):
#misc.imsave(IMAGES_DIR+str(step)+'_'+str(c)+'_real.png', real)
#misc.imsave(IMAGES_DIR+str(step)+'_'+str(c)+'_gen.png', gen)
misc.imsave(IMAGES_DIR+img_name+'_real.png', real)
misc.imsave(IMAGES_DIR+img_name+'_gen.png', gen)
c += 1
print
print 'average time:',np.mean(np.asarray(times))
print
| [
"[email protected]"
]
| |
327694995f36b39033eb39821a28e85f2af31c2a | e7b7cc34f77c71e61aa0fa05bcc62f54fc2fc0e1 | /BitManipulation/test_q136_single_number.py | 42836c696458ba313f72ead4768f330aefb9e302 | []
| no_license | sevenhe716/LeetCode | 41d2ef18f5cb317858c9b69d00bcccb743cbdf48 | 4a1747b6497305f3821612d9c358a6795b1690da | refs/heads/master | 2020-03-16T16:12:27.461172 | 2019-04-22T13:27:54 | 2019-04-22T13:27:54 | 130,221,784 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 379 | py | import unittest
from BitManipulation.q136_single_number import Solution
class TestSingleNumber(unittest.TestCase):
"""Test q136_single_number.py"""
def test_single_number(self):
s = Solution()
self.assertEqual(1, s.singleNumber([2, 2, 1]))
self.assertEqual(4, s.singleNumber([4, 1, 2, 1, 2]))
if __name__ == '__main__':
unittest.main()
| [
"[email protected]"
]
| |
22844e0fe9f2ac64b52c240fca902ba6fa38396f | a2830d10e5bb5d559ea0b3c209cee46d25820ea6 | /tests/integration/network/connect_nodes_test.py | 3829da79ad9dea71a59cac4c52138105caceff9f | [
"LicenseRef-scancode-warranty-disclaimer",
"Apache-2.0"
]
| permissive | vishalbelsare/PySyft | 623e41c472a1e66bf3918fdb11399a6f20112fc0 | fb04404fcfbef82fad1fb47407b35a24e9afb599 | refs/heads/dev | 2023-08-21T21:56:37.319567 | 2021-11-23T07:20:09 | 2021-11-23T07:20:09 | 214,707,883 | 0 | 0 | Apache-2.0 | 2021-11-25T04:54:37 | 2019-10-12T20:03:01 | Jupyter Notebook | UTF-8 | Python | false | false | 2,321 | py | # third party
import pytest
import requests
# syft absolute
import syft as sy
NETWORK_PORT = 9081
NETWORK_PUBLIC_HOST = f"docker-host:{NETWORK_PORT}"
DOMAIN1_PORT = 9082
DOMAIN2_PORT = 9083
NETWORK_VPN_IP = "100.64.0.1"
DOMAIN1_VPN_IP = "100.64.0.2"
DOMAIN2_VPN_IP = "100.64.0.3"
TEST_ROOT_EMAIL = "[email protected]"
TEST_ROOT_PASS = "changethis"
def join_to_network_python(
email: str, password: str, port: int, network_host: str
) -> None:
root_client = sy.login(email=email, password=password, port=port)
# test Syft API
root_client.join_network(host_or_ip=network_host)
response = root_client.vpn_status()
return response
def join_to_network_rest(
email: str, password: str, port: int, network_host: str
) -> None:
url = f"http://localhost:{port}/api/v1/login"
auth_response = requests.post(url, json={"email": email, "password": password})
auth = auth_response.json()
# test HTTP API
url = f"http://localhost:{port}/api/v1/vpn/join/{network_host}"
headers = {"Authorization": f"Bearer {auth['access_token']}"}
response = requests.post(url, headers=headers)
result = response.json()
return result
def run_network_tests(port: int, hostname: str, vpn_ip: str) -> None:
response = join_to_network_python(
email=TEST_ROOT_EMAIL,
password=TEST_ROOT_PASS,
port=port,
network_host=NETWORK_PUBLIC_HOST,
)
assert response["status"] == "ok"
host = response["host"]
assert host["ip"] == vpn_ip
assert host["hostname"] == hostname
assert host["os"] == "linux"
response = join_to_network_rest(
email=TEST_ROOT_EMAIL,
password=TEST_ROOT_PASS,
port=port,
network_host=NETWORK_PUBLIC_HOST,
)
assert response["status"] == "ok"
@pytest.mark.network
def test_connect_network_to_network() -> None:
run_network_tests(
port=NETWORK_PORT, hostname="test_network_1", vpn_ip=NETWORK_VPN_IP
)
@pytest.mark.network
def test_connect_domain1_to_network() -> None:
run_network_tests(
port=DOMAIN1_PORT, hostname="test_domain_1", vpn_ip=DOMAIN1_VPN_IP
)
@pytest.mark.network
def test_connect_domain2_to_network() -> None:
run_network_tests(
port=DOMAIN2_PORT, hostname="test_domain_2", vpn_ip=DOMAIN2_VPN_IP
)
| [
"[email protected]"
]
| |
35a789b942ec3a960db601b55fe92fd6f20c6e1b | 9743d5fd24822f79c156ad112229e25adb9ed6f6 | /xai/brain/wordbase/nouns/_triptych.py | 326fed97b6911ff86e26d493fa295989fe50f35a | [
"MIT"
]
| permissive | cash2one/xai | de7adad1758f50dd6786bf0111e71a903f039b64 | e76f12c9f4dcf3ac1c7c08b0cc8844c0b0a104b6 | refs/heads/master | 2021-01-19T12:33:54.964379 | 2017-01-28T02:00:50 | 2017-01-28T02:00:50 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 430 | py |
#calss header
class _TRIPTYCH():
def __init__(self,):
self.name = "TRIPTYCH"
self.definitions = [u'a piece of art made of three paintings connected to each other in a way that allows the two outer ones to fold in towards the larger central one: ']
self.parents = []
self.childen = []
self.properties = []
self.jsondata = {}
self.specie = 'nouns'
def run(self, obj1 = [], obj2 = []):
return self.jsondata
| [
"[email protected]"
]
| |
0a8424d606394ea8ae293cf99d4325fb99aa7704 | 9edaf93c833ba90ae9a903aa3c44c407a7e55198 | /travelport/models/universal_record_import_service_port_type_service_output.py | 785ca9abe0e122f6bfffeea1000df21f8bfa7744 | []
| no_license | tefra/xsdata-samples | c50aab4828b8c7c4448dbdab9c67d1ebc519e292 | ef027fe02e6a075d8ed676c86a80e9647d944571 | refs/heads/main | 2023-08-14T10:31:12.152696 | 2023-07-25T18:01:22 | 2023-07-25T18:01:22 | 222,543,692 | 6 | 1 | null | 2023-06-25T07:21:04 | 2019-11-18T21:00:37 | Python | UTF-8 | Python | false | false | 2,555 | py | from __future__ import annotations
from dataclasses import dataclass, field
from travelport.models.error_info_1 import ErrorInfo1
from travelport.models.universal_record_import_rsp import UniversalRecordImportRsp
__NAMESPACE__ = "http://www.travelport.com/service/air_v52_0"
@dataclass
class UniversalRecordImportServicePortTypeServiceOutput:
class Meta:
name = "Envelope"
namespace = "http://schemas.xmlsoap.org/soap/envelope/"
body: None | UniversalRecordImportServicePortTypeServiceOutput.Body = field(
default=None,
metadata={
"name": "Body",
"type": "Element",
}
)
@dataclass
class Body:
universal_record_import_rsp: None | UniversalRecordImportRsp = field(
default=None,
metadata={
"name": "UniversalRecordImportRsp",
"type": "Element",
"namespace": "http://www.travelport.com/schema/universal_v52_0",
}
)
fault: None | UniversalRecordImportServicePortTypeServiceOutput.Body.Fault = field(
default=None,
metadata={
"name": "Fault",
"type": "Element",
}
)
@dataclass
class Fault:
faultcode: None | str = field(
default=None,
metadata={
"type": "Element",
"namespace": "",
}
)
faultstring: None | str = field(
default=None,
metadata={
"type": "Element",
"namespace": "",
}
)
faultactor: None | str = field(
default=None,
metadata={
"type": "Element",
"namespace": "",
}
)
detail: None | UniversalRecordImportServicePortTypeServiceOutput.Body.Fault.Detail = field(
default=None,
metadata={
"type": "Element",
"namespace": "",
}
)
@dataclass
class Detail:
error_info: None | ErrorInfo1 = field(
default=None,
metadata={
"name": "ErrorInfo",
"type": "Element",
"namespace": "http://www.travelport.com/schema/common_v52_0",
}
)
| [
"[email protected]"
]
| |
dcf147953063c4991c197cbe6efdbcb60532bb65 | 7b3871759d61004217100ce1a858b1acd20c6166 | /study/day3/使用list()函数将range()函数输出的值输出为列表.py | c996564c86e1de7062102d16ba6cff55f3bd5ef7 | []
| no_license | yidaiweiren/Python | 74bcecfe32cef25e3f5692b3a3ebf1309cbe8e00 | 986a51cc59f0ffa90c967b62a3d729bb034c273d | refs/heads/master | 2021-07-25T00:27:52.970745 | 2020-06-05T07:36:35 | 2020-06-05T07:36:35 | 183,752,758 | 3 | 1 | null | 2019-04-28T03:35:01 | 2019-04-27T09:20:56 | Python | UTF-8 | Python | false | false | 235 | py | #使用list()函数将range()函数输出的值输出为列表
#思路
#range()输出一组数字
#把range()函数输出的值作为参数传输值list()函数
num=list(range(1,9))
print (num)
#结果
'''
[1, 2, 3, 4, 5, 6, 7, 8]
''' | [
"[email protected]"
]
| |
6e18658df53e9888090775e690d5b05a05167636 | 16aba0619caf0ad5ffd4817ac0575943e8b4659e | /venv/lib/python3.6/_weakrefset.py | 6d85e9259b1aac3160ee5b876455feefb22817af | []
| no_license | Rin94/seleniumtests | 23cd0b00609fa4b00216dc4fa27a96637ee0aedd | 3ae6a87b2410ed5a6f66f1b69915071c3517b5f8 | refs/heads/main | 2023-05-23T11:02:33.443620 | 2021-06-13T04:43:53 | 2021-06-13T04:43:53 | 376,441,010 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 51 | py | /Users/jared/anaconda3/lib/python3.6/_weakrefset.py | [
"[email protected]"
]
| |
1376040d7ea44397eb2be58d3c4c49bbb5b2748a | 425b5719ecf6b40bf3de94ddf6e0cc9cf72717b7 | /app/events/regions.py | 176df6e4c2fdb4eb0f39440397ba757453882183 | [
"MIT"
]
| permissive | zerorock1312/lt-maker-master | 3b9b2e7245215936018601432a98915c40f3937d | 82f733683f9dba763a5de8567c41fd7cbcfb0173 | refs/heads/main | 2023-06-04T10:28:43.931841 | 2021-06-18T06:03:40 | 2021-06-18T06:03:40 | 378,050,560 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 1,731 | py | from app.utilities.data import Prefab
region_types = ['normal', 'status', 'event', 'formation']
class Region(Prefab):
def __init__(self, nid):
self.nid = nid
self.region_type = 'normal'
self.position = None
self.size = [1, 1]
self.sub_nid = None
self.condition = 'True'
self.only_once = False
@property
def area(self):
return self.size[0] * self.size[1]
@property
def center(self) -> tuple:
if self.position:
x = int(self.position[0] + self.size[0] // 2)
y = int(self.position[1] + self.size[1] // 2)
return x, y
else:
return None
def contains(self, pos: tuple) -> bool:
x, y = pos
if self.position:
return self.position[0] <= x < self.position[0] + self.size[0] and \
self.position[1] <= y < self.position[1] + self.size[1]
else:
return False
def fuzzy_contains(self, pos: tuple) -> bool:
x, y = pos
fuzz = 0.4
if self.position:
return self.position[0] - fuzz <= x < self.position[0] + self.size[0] + fuzz and \
self.position[1] - fuzz <= y < self.position[1] + self.size[1] + fuzz
else:
return False
def get_all_positions(self):
if self.position:
positions = []
for i in range(self.position[0], self.position[0] + self.size[0]):
for j in range(self.position[1], self.position[1] + self.size[1]):
positions.append((i, j))
return positions
else:
return []
@classmethod
def default(cls):
return cls('None')
| [
"[email protected]"
]
| |
d1a3507e16a5cbea29be05f612b36e42c86cbd03 | 5e6bb81b207f3306bca3a2412dcc86525ff09b51 | /Django_test01/mysql_test2000.py | 126dffb3de527794b524ceaf3e3241e6463f044c | []
| no_license | ssk1987/FullStackExerciseLibrary | f16ad4a0ab2ce6864d00905738db0832a8e916a1 | e050bffce3d8497b47980aab30ea99409f61856b | refs/heads/master | 2021-06-03T02:40:06.840380 | 2021-03-27T11:37:22 | 2021-03-27T11:37:22 | 254,331,226 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 754 | py | import pymysql
# 连接数据库
db = pymysql.connect(host='127.0.0.1',
port=3306,
user='root',
password='12345678',
database='country1',
charset='utf8')
# 获取游标
cur = db.cursor()
# 插入数据
data_list = []
for x in range(200000):
name = 'Py87_%s' % x
data_list.append(name)
# 插入语句
# 一个包最大 allowed_packet is 10586
# max_stmt_length = 1024000 ins sql语句的字符串长度
ins = 'insert into students(name) values(%s)'
# 批量插入数据
# 每一次插入都是一次磁盘网络IO 提高单位频率上的效率
cur.executemany(ins, data_list)
# 提交
db.commit()
# 关闭
cur.close()
db.close()
| [
"[email protected]"
]
| |
c220c97fed107071513fbfb21f2e2b337a46b158 | d03a874a5ba8303cdcedf88350bb3cae2c98244a | /cifar10_cnn.py | 6a0aebaf8ea16da8087269b1e30966773f223d2c | []
| no_license | yaroslavvb/whitening | f8b24624d751c7b8b6245eb37859ba527b850814 | 7d071c25c8a9ff9cc624b608013f097646ca3b5e | refs/heads/master | 2021-07-09T16:13:44.707126 | 2017-10-10T22:07:49 | 2017-10-10T22:07:49 | 90,409,782 | 4 | 0 | null | null | null | null | UTF-8 | Python | false | false | 4,886 | py | '''
CIFAR-10 example from https://github.com/fchollet/keras/blob/master/examples/cifar10_cnn.py
Now with weight normalization. Lines 64 and 69 contain the changes w.r.t. original.
'''
from __future__ import print_function
from keras.datasets import cifar10
from keras.preprocessing.image import ImageDataGenerator
from keras.models import Sequential
from keras.layers import Dense, Dropout, Activation, Flatten
from keras.layers import Convolution2D, MaxPooling2D
from keras.utils import np_utils
from keras import optimizers
import util as u
import sys
import numpy as np
if len(sys.argv)<2:
assert False
if sys.argv[1]=='sgd':
prefix='keras_sgd'
optimizer='sgd'
elif sys.argv[1]=='sgd_wn':
prefix='keras_sgd_wn'
optimizer='sgd_wn'
elif sys.argv[1]=='adam':
prefix='keras_adam'
optimizer='adam'
elif sys.argv[1]=='adam_wn':
prefix='keras_adam_wn'
optimizer='adam_wn'
else:
assert False
batch_size = 5000
nb_classes = 10
nb_epoch = 10
data_augmentation = True
# input image dimensions
img_rows, img_cols = 32, 32
# the CIFAR10 images are RGB
img_channels = 3
# the data, shuffled and split between train and test sets
(X_train, y_train), (X_test, y_test) = cifar10.load_data()
print('X_train shape:', X_train.shape)
print(X_train.shape[0], 'train samples')
print(X_test.shape[0], 'test samples')
X_train = X_train.astype('float32')
X_test = X_test.astype('float32')
X_train /= 255
X_test /= 255
# convert class vectors to binary class matrices
Y_train = np_utils.to_categorical(y_train, nb_classes)
Y_test = np_utils.to_categorical(y_test, nb_classes)
model = Sequential()
model.add(Convolution2D(32, 3, 3, border_mode='same',
input_shape=X_train.shape[1:]))
model.add(Activation('relu'))
model.add(Convolution2D(32, 3, 3))
model.add(Activation('relu'))
model.add(MaxPooling2D(pool_size=(2, 2)))
model.add(Dropout(0.25))
model.add(Convolution2D(64, 3, 3, border_mode='same'))
model.add(Activation('relu'))
model.add(Convolution2D(64, 3, 3))
model.add(Activation('relu'))
model.add(MaxPooling2D(pool_size=(2, 2)))
model.add(Dropout(0.25))
model.add(Flatten())
model.add(Dense(512))
model.add(Activation('relu'))
model.add(Dropout(0.5))
model.add(Dense(nb_classes))
model.add(Activation('softmax'))
# let's train the model using SGD + momentum (how original). EDIT: now with weight normalization, so slightly more original ;-)
from weightnorm import SGDWithWeightnorm
from weightnorm import AdamWithWeightnorm
sgd_wn = SGDWithWeightnorm(lr=0.01, decay=1e-6, momentum=0.9, nesterov=True)
sgd = optimizers.SGD(lr=0.01, decay=1e-6, momentum=0.9, nesterov=True)
adam = optimizers.Adam()
adam_wn = AdamWithWeightnorm()
if optimizer == 'sgd':
optimizer=sgd
elif optimizer == 'sgd_wn':
optimizer=sgd_wn
elif optimizer == 'adam':
optimizer=adam
elif optimizer == 'adam_wn':
optimizer=adam_wn
else:
assert False
model.compile(loss='categorical_crossentropy',optimizer=optimizer,
metrics=['accuracy'])
# data based initialization of parameters
from weightnorm import data_based_init
data_based_init(model, X_train[:100])
if not data_augmentation:
print('Not using data augmentation.')
model.fit(X_train, Y_train,
batch_size=batch_size,
nb_epoch=nb_epoch,
validation_data=(X_test, Y_test),
shuffle=True)
else:
print('Using real-time data augmentation.')
# this will do preprocessing and realtime data augmentation
datagen = ImageDataGenerator(
featurewise_center=False, # set input mean to 0 over the dataset
samplewise_center=False, # set each sample mean to 0
featurewise_std_normalization=False, # divide inputs by std of the dataset
samplewise_std_normalization=False, # divide each input by its std
zca_whitening=False, # apply ZCA whitening
rotation_range=0, # randomly rotate images in the range (degrees, 0 to 180)
width_shift_range=0.1, # randomly shift images horizontally (fraction of total width)
height_shift_range=0.1, # randomly shift images vertically (fraction of total height)
horizontal_flip=True, # randomly flip images
vertical_flip=False) # randomly flip images
# compute quantities required for featurewise normalization
# (std, mean, and principal components if ZCA whitening is applied)
datagen.fit(X_train)
# fit the model on the batches generated by datagen.flow()
result = model.fit_generator(datagen.flow(X_train, Y_train,
batch_size=batch_size),
samples_per_epoch=X_train.shape[0],
nb_epoch=nb_epoch,
validation_data=(X_test, Y_test))
acc_hist = np.asarray(result.history['acc'])
u.dump(acc_hist, "%s_losses.csv"%(prefix,))
| [
"[email protected]"
]
| |
550f6cd08f39601e6c9a1a69dd90d5f9e88c3746 | c73beb04d101ca8d98c9126b1c47b4f19cc35066 | /week1/single_process.py | 36f42dc91f0d31ea01767b8f1165e0fd3deaa8a5 | []
| no_license | fywest/python | a5ecf62e1f8cdf59c936da81b478c371f169aec4 | cd97438679d8e129b3cb75d76226b16e7e7850ac | refs/heads/master | 2022-12-13T06:15:04.021492 | 2019-05-28T19:21:18 | 2019-05-28T19:21:18 | 130,403,136 | 0 | 0 | null | 2022-12-08T05:08:55 | 2018-04-20T19:02:57 | Python | UTF-8 | Python | false | false | 249 | py | import time
def io_task():
time.sleep(1)
def main():
start_time=time.time()
for i in range(5):
io_task()
end_time=time.time()
print('it takes: {:.2f}s'.format(end_time-start_time))
if __name__=='__main__':
main()
| [
"[email protected]"
]
| |
6b489da90ccb08d993e668c166da4cc7b52338d9 | 747974d83629a8ba28fcb3f4a33a17319002f169 | /tensorflow/python/distribute/client/client.py | 6eabbfa219a84297d95249d99b22c293878f4614 | [
"Apache-2.0"
]
| permissive | ayushmankumar7/tensorflow | 72f60290e4187644b4b254d25ec3033c9fda0c55 | 69a7d3abbbf5be791d1db397fcfea5d8e6efc4b9 | refs/heads/master | 2022-06-19T01:14:46.563563 | 2020-10-19T10:13:23 | 2020-10-19T10:13:23 | 244,302,166 | 2 | 0 | Apache-2.0 | 2022-05-21T12:44:00 | 2020-03-02T06:58:50 | C++ | UTF-8 | Python | false | false | 49,377 | py | # Lint as: python3
# Copyright 2020 The TensorFlow Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ==============================================================================
"""Module for `Client` and relevant cluster-worker related library.
This is currently under development and the API is subject to change.
"""
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
import contextlib
import enum
import functools
import os
import re
import sys
import threading
import weakref
from six.moves import queue
from tensorflow.python.data.ops import iterator_ops
from tensorflow.python.distribute import input_lib
from tensorflow.python.distribute import parameter_server_strategy_v2
from tensorflow.python.distribute.client import metric_utils
from tensorflow.python.eager import cancellation
from tensorflow.python.eager import context
from tensorflow.python.eager import def_function
from tensorflow.python.eager import executor
from tensorflow.python.eager import function as tf_function
from tensorflow.python.framework import errors
from tensorflow.python.framework import func_graph
from tensorflow.python.framework import ops
from tensorflow.python.ops import resource_variable_ops
from tensorflow.python.ops import variable_scope
from tensorflow.python.platform import tf_logging as logging
from tensorflow.python.util import nest
# Maximum time for failed worker to come back is 1 hour
_WORKER_MAXIMUM_RECOVERY_SEC = 3600
# Maximum size for queued closures, "infinite" if set to 0.
# When the maximum queue size is reached, further schedule calls will become
# blocking until some previously queued closures are executed on workers.
# Note that using an "infinite" queue size can take a non-trivial portion of
# memory, and even lead to client OOM. Modify the size to a smaller value for
# client with constrained memory resource (only recommended for advanced users).
# Also used in unit tests to ensure the correctness when the queue is full.
_CLOSURE_QUEUE_MAX_SIZE = 256 * 1024
# RPC error message from PS
_RPC_ERROR_FROM_PS = "GRPC error information from remote target /job:ps"
# InvalidArgumentError (unknown device) will not have "GRPC error..." string.
_JOB_WORKER_STRING_IDENTIFIER = "/job:worker"
class _RemoteValueStatus(enum.Enum):
"""The status of a `RemoteValue` object.
A `RemoteValue` object can have three states:
1) not ready: no value, no non-retryable error and not aborted;
2) aborted: i.e. the execution of function was aborted because of task
failure, but can be retried;
3) ready: i.e. has value or has non-tryable error;
The initial state of a `RemoteValue` is "not ready". When its corresponding
closure has
been executed at least once, it will become aborted or ready. The state
transitions are:
1) not ready -> 2) aborted:
when the corresponding closure is aborted due to worker failure, and the
worker failure is not immediately handled.
1) not ready -> 3) ready:
when the corresponding closure has been executed successfully.
2) aborted -> 3) ready:
when the `RemoteValue` is rebuilt by rerunning the corresponding closure
and the closure has been executed successfully.
3) ready -> 2) aborted:
when the corresponding closure had been executed successfully but later
the corresponding remote worker failed. This is currently only implemented
for resource `RemoteValue` like iterators.
"""
NOT_READY = "NOT_READY"
ABORTED = "ABORTED"
READY = "READY"
class RemoteValue(object):
"""An asynchronously available value of a remotely executed function.
`RemoteValue` class is used as the return value of `Client.schedule()` where
the underlying concrete value comes at a later time once the function has been
remotely executed. `RemoteValue` can be used as an input to a subsequent
function scheduled with `Client.schedule()`.
Note: this class is not thread-safe.
"""
def __init__(self, closure, type_spec):
self._closure = closure
# The type spec for this `RemoteValue` which is used to trace functions that
# take this `RemoteValue` as input.
self._type_spec = func_graph.convert_structure_to_signature(type_spec)
self._value = None
self._error = None
self._status_available_event = threading.Event()
self._status = _RemoteValueStatus.NOT_READY
def _set_aborted(self):
self._status = _RemoteValueStatus.ABORTED
self._value = None
self._error = None
# Wake up any waiting thread and clear the event.
self._status_available_event.set()
def _rebuild_on(self, worker):
self._status_available_event.clear()
# TODO(yuefengz): we may need to rebuild its inputs as well.
self._closure.execute_on(worker)
def _set_value(self, value):
self._status = _RemoteValueStatus.READY
self._value = value
self._error = None
self._status_available_event.set()
def _set_error(self, exception):
self._status = _RemoteValueStatus.READY
self._value = None
self._error = exception
self._status_available_event.set()
def _get_value(self):
self._status_available_event.wait()
return self._value
def _get_error(self):
self._status_available_event.wait()
return self._error
def _set_type_spec(self, type_spec):
self._type_spec = func_graph.convert_structure_to_signature(type_spec)
def fetch(self):
"""Wait for the result of RemoteValue to be ready and return the result.
Returns:
The remote value, as a numpy data type (if scalar) or ndarray.
Raises:
tf.errors.CancelledError: If the function that produces this `RemoteValue`
is aborted or cancelled due to failure, and the user should handle and
reschedule.
"""
self._status_available_event.wait()
if self._status is _RemoteValueStatus.ABORTED:
raise errors.CancelledError(
None, None,
"The corresponding function is aborted. Please reschedule the "
"function.")
if self._error is not None:
raise self._error # pylint: disable=raising-bad-type
else:
if isinstance(self._value,
(ops.Tensor, resource_variable_ops.BaseResourceVariable)):
return self._value.numpy()
else:
return self._value
class InputError(Exception):
def __init__(self, original_exception):
message = ("Input has an error, the original exception is %r, "
"error message is %s." %
(original_exception, str(original_exception)))
super().__init__(message)
def _maybe_rebuild_remote_values(worker, structure):
"""Attempts to return errors from `RemoteValue`s. Rebuilds them if needed."""
errors_in_structure = []
def _get_error(val):
if isinstance(val, RemoteValue):
if val._status is _RemoteValueStatus.ABORTED: # pylint: disable=protected-access
try:
with worker.failure_handler.wait_on_failure(
on_recovery_fn=functools.partial(val._rebuild_on, worker), # pylint: disable=protected-access
worker_device_name=worker.device_name):
val._rebuild_on(worker) # pylint: disable=protected-access
except Exception as e: # pylint: disable=broad-except
val._set_error(e) # pylint: disable=protected-access
error = val._get_error() # pylint: disable=protected-access
if error:
errors_in_structure.append(error)
nest.map_structure(_get_error, structure)
if errors_in_structure:
return errors_in_structure[0]
else:
return None
def _maybe_get_remote_value(val):
"""Gets the value of `val` if it is a `RemoteValue`."""
if isinstance(val, RemoteValue):
error = val._get_error() # pylint: disable=protected-access
if error:
raise AssertionError(
"RemoteValue doesn't have a value because it has errors.")
else:
return val._get_value() # pylint: disable=protected-access
else:
return val
def _maybe_as_type_spec(val):
if isinstance(val, RemoteValue):
if val._type_spec is None: # pylint: disable=protected-access
raise ValueError("Output of a scheduled function that is not "
"tf.function cannot be the input of another function.")
return val._type_spec # pylint: disable=protected-access
else:
return val
class PerWorkerValues(object):
"""Holds a list of per worker values."""
def __init__(self, values):
self._values = tuple(values)
def _select_worker_slice(worker_id, structured):
"""Selects the worker slice of each of the items in `structured`."""
def _get(x):
return x._values[worker_id] if isinstance(x, PerWorkerValues) else x # pylint: disable=protected-access
return nest.map_structure(_get, structured)
def _disallow_remote_value_as_input(structured):
"""Raises if any element of `structured` is a RemoteValue."""
def _raise_if_remote_value(x):
if isinstance(x, RemoteValue):
raise ValueError("RemoteValue cannot be used as an input to scheduled "
"function. Please file a feature request if you need "
"this feature.")
nest.map_structure(_raise_if_remote_value, structured)
class Closure(object):
"""Hold a function to be scheduled and its arguments."""
def __init__(self, function, cancellation_mgr, args=None, kwargs=None):
if not callable(function):
raise ValueError("Function passed to `Client.schedule` must be a "
"callable object.")
self._args = args or ()
self._kwargs = kwargs or {}
_disallow_remote_value_as_input(self._args)
_disallow_remote_value_as_input(self._kwargs)
if isinstance(function, def_function.Function):
replica_args = _select_worker_slice(0, self._args)
replica_kwargs = _select_worker_slice(0, self._kwargs)
# Note: no need to handle function registration failure since this kind of
# failure will not raise exceptions as designed in the runtime. The client
# has to rely on subsequent operations that raise to catch function
# registration failure.
# Record the function tracing overhead. Note that we pass in the tracing
# count of the def_function.Function as a state tracker, so that metrics
# will only record the time for actual function tracing (i.e., excluding
# function cache lookups).
with metric_utils.monitored_timer(
"function_tracing", state_tracker=function._get_tracing_count): # pylint: disable=protected-access
concrete_function = function.get_concrete_function(
*nest.map_structure(_maybe_as_type_spec, replica_args),
**nest.map_structure(_maybe_as_type_spec, replica_kwargs))
self._function = cancellation_mgr.get_cancelable_function(
concrete_function)
self._output_remote_values = nest.map_structure(
lambda x: RemoteValue(self, x), concrete_function.structured_outputs)
elif isinstance(function, tf_function.ConcreteFunction):
self._function = cancellation_mgr.get_cancelable_function(function)
self._output_remote_values = nest.map_structure(
lambda x: RemoteValue(self, x), function.structured_outputs)
else:
# Regular python functions.
self._function = function
# TODO(yuefengz): maybe we should trace python functions if their inputs
# are Python primitives, tensors and composite tensors.
self._output_remote_values = RemoteValue(self, None)
def _fetch_output_remote_values(self):
"""Temporary method used to sync the scheduler."""
# It will do nothing if there is no return value.
nest.map_structure(lambda x: x.fetch(), self._output_remote_values) # pylint: disable=protected-access
def _set_output_remote_values_cancelled(self):
nest.map_structure(
lambda x: x._set_error( # pylint: disable=protected-access,g-long-lambda
errors.CancelledError(
None, None, "The corresponding function is "
"cancelled. Please reschedule the function.")),
self._output_remote_values) # pylint: disable=protected-access
def execute_on(self, worker):
"""Executes the closure on the given worker.
Args:
worker: a `Worker` object.
"""
replica_args = _select_worker_slice(worker.worker_index, self._args)
replica_kwargs = _select_worker_slice(worker.worker_index, self._kwargs)
e = (
_maybe_rebuild_remote_values(worker, replica_args) or
_maybe_rebuild_remote_values(worker, replica_kwargs))
if e:
if not isinstance(e, InputError):
e = InputError(e)
for remote_value in nest.flatten(self._output_remote_values):
remote_value._set_error(e) # pylint: disable=protected-access
return
with ops.device(worker.device_name):
with context.executor_scope(worker.executor):
with metric_utils.monitored_timer("closure_execution"):
output_value = self._function(
*nest.map_structure(_maybe_get_remote_value, replica_args),
**nest.map_structure(_maybe_get_remote_value, replica_kwargs))
for remote_value, value in zip(
nest.flatten(self._output_remote_values), nest.flatten(output_value)):
remote_value._set_value(value) # pylint: disable=protected-access
class _CoordinatedClosureQueue(object):
"""Manage a queue of closures, inflight count and errors from execution.
This class is thread-safe.
"""
def __init__(self):
# `self._inflight_closure_count` only tracks the number of inflight closures
# that are "in generation". Once an error occurs, error generation is
# incremented and all subsequent arriving closures (from inflight) are
# considered "out of generation".
self._inflight_closure_count = 0
self._queue_lock = threading.Lock()
# Condition indicating that all pending closures (either queued or inflight)
# have been processed, failed, or cancelled.
self._stop_waiting_condition = threading.Condition(self._queue_lock)
# Condition indicating that an item becomes available in queue (not empty).
self._closures_queued_condition = threading.Condition(self._queue_lock)
# Condition indicating that a queue slot becomes available (not full).
# Note that even with "infinite" queue size, there is still a "practical"
# size limit for the queue depending on host memory capacity, and thus the
# queue will eventually become full with a lot of enqueued closures.
self._queue_free_slot_condition = threading.Condition(self._queue_lock)
# Condition indicating there is no inflight closures.
self._no_inflight_closure_condition = threading.Condition(self._queue_lock)
# Use to cancel in-flight closures.
self._cancellation_mgr = cancellation.CancellationManager()
if _CLOSURE_QUEUE_MAX_SIZE <= 0:
logging.warning(
"In a `Client`, creating an infinite closure queue can "
"consume a significant amount of memory and even lead to OOM.")
self._queue = queue.Queue(maxsize=_CLOSURE_QUEUE_MAX_SIZE)
self._error = None
# The following is a lock to make sure when `wait` is called and before it
# returns no `put` can be executed during this period. It is because `wait`
# won't know what to do with newly put closures. This lock adds an cutoff
# for `wait` so that closures put into the queue while waiting would not be
# taken responsible by this `wait`.
#
# We cannot reuse the `self._queue_lock` since when `wait` waits for a
# condition, the `self._queue_lock` will be released.
#
# We don't use a reader/writer's lock on purpose to reduce the complexity
# of the code.
self._put_wait_lock = threading.Lock()
def _cancel_all_closures(self):
"""Clears the queue and sets remaining closures cancelled error.
This method expects self._queue_lock to be held prior to entry.
"""
self._cancellation_mgr.start_cancel()
while self._inflight_closure_count > 0:
self._no_inflight_closure_condition.wait()
while True:
try:
closure = self._queue.get(block=False)
self._queue_free_slot_condition.notify()
closure._set_output_remote_values_cancelled() # pylint: disable=protected-access
except queue.Empty:
break
# The cancellation manager cannot be reused once cancelled. After all
# closures (queued or inflight) are cleaned up, recreate the cancellation
# manager with clean state.
# Note on thread-safety: this is triggered when one of theses client APIs
# are called: `schedule`, `wait`, and `done`. At the same time, no new
# closures can be constructed (which reads the _cancellation_mgr to get
# cancellable functions).
self._cancellation_mgr = cancellation.CancellationManager()
def _raise_if_error(self):
"""Raises the error if one exists.
If an error exists, cancel the closures in queue, raises it, and clear
the error.
This method expects self._queue_lock to be held prior to entry.
"""
if self._error:
logging.error("Start cancelling closures due to error %r: %s",
self._error, self._error)
self._cancel_all_closures()
try:
raise self._error # pylint: disable=raising-bad-type
finally:
self._error = None
def put(self, closure):
"""Put a closure into the queue for later execution.
If `mark_failed` was called before `put`, the error from the first
invocation of `mark_failed` will be raised.
Args:
closure: The `Closure` to put into the queue.
"""
with self._put_wait_lock, self._queue_lock:
self._queue_free_slot_condition.wait_for(lambda: not self._queue.full())
self._queue.put(closure, block=False)
self._raise_if_error()
self._closures_queued_condition.notify()
def get(self, timeout=None):
"""Return a closure from the queue to be executed."""
with self._queue_lock:
while self._queue.empty():
if not self._closures_queued_condition.wait(timeout=timeout):
return None
closure = self._queue.get(block=False)
self._queue_free_slot_condition.notify()
self._inflight_closure_count += 1
return closure
def mark_finished(self):
"""Let the queue know that a closure has been successfully executed."""
with self._queue_lock:
if self._inflight_closure_count < 1:
raise AssertionError("There is no inflight closures to mark_finished.")
self._inflight_closure_count -= 1
if self._inflight_closure_count == 0:
self._no_inflight_closure_condition.notifyAll()
if self._queue.empty() and self._inflight_closure_count == 0:
self._stop_waiting_condition.notifyAll()
def put_back(self, closure):
"""Put the closure back into the queue as it was not properly executed."""
with self._queue_lock:
if self._inflight_closure_count < 1:
raise AssertionError("There is no inflight closures to put_back.")
if self._error:
closure._set_output_remote_values_cancelled() # pylint: disable=protected-access
else:
self._queue_free_slot_condition.wait_for(lambda: not self._queue.full())
self._queue.put(closure, block=False)
self._closures_queued_condition.notify()
self._inflight_closure_count -= 1
if self._inflight_closure_count == 0:
self._no_inflight_closure_condition.notifyAll()
def wait(self, timeout=None):
"""Wait for all closures to be finished before returning.
If `mark_failed` was called before or during `wait`, the error from the
first invocation of `mark_failed` will be raised.
Args:
timeout: A float specifying a timeout for the wait in seconds.
Returns:
True unless the given timeout expired, in which case it returns False.
"""
with self._put_wait_lock, self._queue_lock:
while (not self._error and
(not self._queue.empty() or self._inflight_closure_count > 0)):
if not self._stop_waiting_condition.wait(timeout=timeout):
return False
self._raise_if_error()
return True
def mark_failed(self, e):
"""Sets error and unblocks any wait() call."""
with self._queue_lock:
# TODO(yuefengz): maybe record all failure and give users more
# information?
if self._inflight_closure_count < 1:
raise AssertionError("There is no inflight closures to mark_failed.")
if self._error is None:
self._error = e
self._inflight_closure_count -= 1
if self._inflight_closure_count == 0:
self._no_inflight_closure_condition.notifyAll()
self._stop_waiting_condition.notifyAll()
def done(self):
"""Returns true if the queue is empty and there is no inflight closure.
If `mark_failed` was called before `done`, the error from the first
invocation of `mark_failed` will be raised.
"""
with self._queue_lock:
self._raise_if_error()
return self._queue.empty() and self._inflight_closure_count == 0
class WorkerPreemptionHandler(object):
"""Handles worker preemptions."""
def __init__(self, server_def, cluster):
self._server_def = server_def
self._cluster = cluster
self._cluster_update_lock = threading.Lock()
self._cluster_due_for_update = threading.Event()
self._worker_up_cond = threading.Condition(self._cluster_update_lock)
threading.Thread(target=self._preemption_handler,
name="WorkerPreemptionHandler",
daemon=True).start()
def _validate_preemption_failure(self, e):
"""Validates that the given exception represents worker preemption."""
if _is_worker_failure(e):
return
raise e
@contextlib.contextmanager
def wait_on_failure(self,
on_failure_fn=None,
on_recovery_fn=None,
worker_device_name="(unknown)"):
"""Catches worker preemption error and wait until failed workers are back.
Args:
on_failure_fn: an optional function to run if preemption happens.
on_recovery_fn: an optional function to run when a worker is recovered
from preemption.
worker_device_name: the device name of the worker instance that is passing
through the failure.
Yields:
None.
"""
try:
yield
except errors.OpError as e:
# If the error is due to temporary connectivity issues between worker and
# ps, put back closure, ignore error and do not mark worker as failure.
if self._cluster._record_and_ignore_transient_ps_failure(e): # pylint: disable=protected-access
if on_failure_fn:
on_failure_fn()
return
self._validate_preemption_failure(e)
logging.error("Worker %s failed with error: %s", worker_device_name, e)
if on_failure_fn:
on_failure_fn()
with self._cluster_update_lock:
self._cluster_due_for_update.set()
self._worker_up_cond.wait(_WORKER_MAXIMUM_RECOVERY_SEC)
logging.info("Worker %s has been recovered.", worker_device_name)
if on_recovery_fn:
with self.wait_on_failure(
on_recovery_fn=on_recovery_fn,
worker_device_name=worker_device_name):
on_recovery_fn()
def _preemption_handler(self):
"""A loop that handles preemption.
This loop waits for signal of worker preemption and upon worker preemption,
it waits until all workers are back and updates the cluster about the
restarted workers.
"""
while True:
self._cluster_due_for_update.wait()
with self._cluster_update_lock:
try:
# TODO(haoyuzhang): support partial cluster recovery
logging.info("Cluster now being recovered.")
context.context().update_server_def(self._server_def)
# Cluster updated successfully, clear the update signal, and notify
# all workers that they are recovered from failure.
logging.info("Cluster successfully recovered.")
self._worker_up_cond.notify_all()
self._cluster_due_for_update.clear()
except Exception as e: # pylint: disable=broad-except
self._validate_preemption_failure(e)
# NOTE: Since the first RPC (GetStatus) of update_server_def is
# currently blocking by default, error should only happen if:
# (1) More workers failed while waiting for the previous workers to
# come back;
# (2) Worker failed when exchanging subsequent RPCs after the first
# RPC returns.
# Consider adding backoff retry logic if we see the error logged
# too frequently.
logging.error("Cluster update failed with error: %s. Retrying...", e)
class Worker(object):
"""A worker in a cluster.
Attributes:
worker_index: The index of the worker in the cluster.
device_name: The device string of the worker, e.g. "/job:worker/task:1".
executor: The worker's executor for remote function execution.
failure_handler: The failure handler used to handler worker preemption
failure.
"""
def __init__(self, worker_index, device_name, cluster):
self.worker_index = worker_index
self.device_name = device_name
self.executor = executor.new_executor(enable_async=False)
self.failure_handler = cluster.failure_handler
self._cluster = cluster
self._resource_remote_value_refs = []
# Worker threads need to start after `Worker`'s initialization.
threading.Thread(target=self._process_queue,
name="WorkerClosureProcessingLoop-%d" % self.worker_index,
daemon=True).start()
def _set_resources_aborted(self):
# TODO(yuefengz): maybe we can query whether a tensor is valid or not
# instead of marking a tensor aborted?
for weakref_resource in self._resource_remote_value_refs:
resource = weakref_resource()
if resource:
resource._set_aborted() # pylint: disable=protected-access
def _set_dead(self):
raise NotImplementedError("_set_dead is not implemented.")
def _process_closure(self, closure):
"""Runs a closure with preemption handling."""
try:
with self._cluster.failure_handler.wait_on_failure(
on_failure_fn=lambda: self._cluster._closure_queue.put_back(closure), # pylint: disable=protected-access
on_recovery_fn=self._set_resources_aborted,
worker_device_name=self.device_name):
closure.execute_on(self)
# TODO(yuefengz): we don't have to materialize results every step.
with metric_utils.monitored_timer("remote_value_fetch"):
closure._fetch_output_remote_values() # pylint: disable=protected-access
self._cluster._closure_queue.mark_finished() # pylint: disable=protected-access
except Exception as e: # pylint: disable=broad-except
# Avoid logging the derived cancellation error
if not isinstance(e, errors.CancelledError):
logging.error(
"/job:worker/task:%d encountered the following error when "
"processing closure: %r:%s", self.worker_index, e, e)
nest.map_structure(
lambda x: x._set_error(e), # pylint: disable=protected-access
closure._output_remote_values) # pylint: disable=protected-access
self._cluster._closure_queue.mark_failed(e) # pylint: disable=protected-access
def _process_queue(self):
while True:
closure = self._cluster._closure_queue.get() # pylint: disable=protected-access
self._process_closure(closure)
def _create_resource(self, function, args=None, kwargs=None):
"""Synchronously creates a per-worker resource represented by a `RemoteValue`.
Args:
function: the resource function to be run remotely. It should be a
`tf.function`, a concrete function or a Python function.
args: positional arguments to be passed to the function.
kwargs: keyword arguments to be passed to the function.
Returns:
one or several RemoteValue objects depending on the function return
values.
"""
# Some notes about the concurrency: currently all the activities related to
# the same worker such as creating resources, setting resources' aborted
# status, and executing closures happen on the same thread. This allows us
# to have simpler logic of concurrency.
closure = Closure(
function,
self._cluster._closure_queue._cancellation_mgr, # pylint: disable=protected-access
args=args,
kwargs=kwargs)
resource_remote_value = closure._output_remote_values # pylint: disable=protected-access
self._register_resource(resource_remote_value)
# The following is a short-term solution to lazily create resources in
# parallel.
# TODO(b/160343165): we should create resources eagerly, i.e. schedule the
# resource creation function as soon as users call this method.
resource_remote_value._set_aborted() # pylint: disable=protected-access
return resource_remote_value
def _register_resource(self, resource_remote_value):
if not isinstance(resource_remote_value, RemoteValue):
raise ValueError(
"Resource being registered is not of type `RemoteValue`.")
self._resource_remote_value_refs.append(weakref.ref(resource_remote_value))
class Cluster(object):
"""A cluster with workers.
We assume all function errors are fatal and based on this assumption our
error reporting logic is:
1) Both `schedule` and `join` can raise a non-retryable error which is the
first error seen by the client from any previously scheduled functions.
2) When an error is raised, there is no guarantee on how many previously
scheduled functions have been executed; functions that have not been executed
will be thrown away and marked as cancelled.
3) After an error is raised, the internal state of error will be cleared.
I.e. functions can continue to be scheduled and subsequent calls of `schedule`
or `join` will not raise the same error again.
Attributes:
failure_handler: The failure handler used to handler worker preemption
failure.
workers: a list of `Worker` objects in the cluster.
"""
def __init__(self, strategy):
"""Initializes the cluster instance."""
self._num_workers = strategy._num_workers
self._num_ps = strategy._num_ps
# Ignore PS failures reported by workers due to transient connection errors.
# Transient connectivity issues between workers and PS are relayed by the
# workers to the client, leading the client to believe that there are PS
# failures. The difference between transient vs. permanent PS failure is the
# number of reports from the workers. When this env var is set to a positive
# integer K, the client ignores up to K reports of a failed PS task. I.e.,
# only when there are more than K trials of executing closures fail due to
# errors from the same PS instance do we consider the PS instance encounters
# a failure.
# TODO(b/164279603): Remove this workaround when the underlying connectivity
# issue in gRPC server is resolved.
self._transient_ps_failures_threshold = int(os.environ.get(
"TF_CLIENT_IGNORE_TRANSIENT_PS_FAILURES", 3))
self._potential_ps_failures_lock = threading.Lock()
self._potential_ps_failures_count = [0] * self._num_ps
self._closure_queue = _CoordinatedClosureQueue()
self.failure_handler = WorkerPreemptionHandler(context.get_server_def(),
self)
worker_device_strings = [
"/job:worker/replica:0/task:%d" % i for i in range(self._num_workers)
]
self.workers = [
Worker(i, w, self) for i, w in enumerate(worker_device_strings)
]
def _record_and_ignore_transient_ps_failure(self, e):
"""Records potential PS failures and return if failure should be ignored."""
if self._transient_ps_failures_threshold <= 0 or not _is_ps_failure(e):
return False
ps_tasks = _extract_failed_ps_instances(str(e))
with self._potential_ps_failures_lock:
for t in ps_tasks:
self._potential_ps_failures_count[t] += 1
# The number of UnavailableError encountered on this PS task exceeds the
# maximum number of ignored error
if (self._potential_ps_failures_count[t] >=
self._transient_ps_failures_threshold):
return False
return True
def schedule(self, function, args, kwargs):
"""Schedules `function` to be dispatched to a worker for execution.
Args:
function: The function to be dispatched to a worker for execution
asynchronously.
args: Positional arguments for `fn`.
kwargs: Keyword arguments for `fn`.
Returns:
A structure of `RemoteValue` object.
"""
closure = Closure(
function,
self._closure_queue._cancellation_mgr, # pylint: disable=protected-access
args=args,
kwargs=kwargs)
self._closure_queue.put(closure)
return closure._output_remote_values # pylint: disable=protected-access
def join(self):
"""Blocks until all scheduled functions are executed."""
self._closure_queue.wait()
def done(self):
"""Returns true if all scheduled functions are executed."""
return self._closure_queue.done()
class ParameterServerFailureError(Exception):
"""An error representing at least one parameter server is interrupted."""
pass
class Client(object):
"""An object to schedule and orchestrate remote function execution.
A `Client` object represents a program used to create dataset, schedule
functions to be executed, and fetch the results of the functions.
Currently, `Client` is not supported to be used in a standalone manner.
It should be used in conjunction with `ParameterServerStrategyV2`.
This is currently under development, and the API as well as implementation
is subject to changes.
"""
def __init__(self, strategy):
"""Initialization of a `Client` instance.
This connects the client to remote workers and parameter servers, through
a `tf.config.experimental_connect_to_cluster` call.
Args:
strategy: a `tf.distribute.Strategy` object. Currently, only
`ParameterServerStrategyV2` is supported.
Raises:
ValueError: if the strategy being used is not supported.
"""
if not isinstance(strategy,
parameter_server_strategy_v2.ParameterServerStrategyV2):
raise ValueError("Only `ParameterServerStrategyV2` is supported in "
"`Client` currently.")
self._strategy = strategy
self.cluster = Cluster(strategy)
@property
def strategy(self):
return self._strategy
def schedule(self, fn, args=None, kwargs=None):
"""Schedules `fn` to be dispatched to a worker for execution asynchronously.
When calling `schedule` with a function `fn`, `fn` will be executed on a
remote worker at some later time. The process is asynchronous, meaning
`schedule` returns immediately, possibly without having the result ready
yet. `schedule` returns a structure of `RemoteValue` object, which wraps the
output of the function. Call `fetch()` on `RemoteValue` to wait for the
function execution to finish and retrieve its output from the remote worker.
`schedule` guarantees that `fn` will be executed on a worker at least once;
it could be more than once if its corresponding worker fails in the middle
of its execution. Note that since worker can fail at any point when
executing the function, it is possible that the function is partially
executed, but `Client` guarantees that in those events, the function will
eventually be fully executed, possibly on a different worker that is
available.
If any previously scheduled function raises an error, `schedule` will fail
by raising any one of those errors, and clear the errors collected so far.
There are two implications when this happens: 1) user should call `schedule`
with `fn` again to re-schedule, and 2) some of the previously scheduled
functions may have not been executed. User can call `fetch` on the returned
`RemoteValue` to inspect if they have executed, failed, or cancelled, and
reschedule the corresponding function if needed.
When `schedule` raises, it guarantees that there is no function that is
still being executed.
At this time, there is no support of worker assignment for function
execution, or priority of the workers.
`args` and `kwargs` are the arguments passed into `fn`, when `fn` is
executed on a worker. They can be `PerWorkerValues`, which is a collection
of values, each of which represents a component specific to a worker; in
this case, the argument will be substituted with the corresponding component
on the target worker. Arguments that are not `PerWorkerValues` will be
passed into `fn` as-is. Currently, `RemoteValue` is not supported to be
input `args` or `kwargs`.
Args:
fn: A `tf.function`; the function to be dispatched to a worker for
execution asynchronously.
args: Positional arguments for `fn`.
kwargs: Keyword arguments for `fn`.
Returns:
A structure of `RemoteValue` object.
Raises:
Exception: one of the exceptions caught by the client by any previously
scheduled function since the last time an error was thrown or since
the beginning of the program.
"""
# Slot variables are usually created during function tracing time; thus
# `schedule` needs to be called within the `strategy.scope()`.
with self.strategy.scope(), _translate_parameter_server_failure():
return self.cluster.schedule(fn, args=args, kwargs=kwargs)
def join(self):
"""Blocks until all the scheduled functions have finished execution.
If any previously scheduled function raises an error, `join` will fail by
raising any one of those errors, and clear the errors collected so far. If
this happens, some of the previously scheduled functions may have not been
executed. Users can call `fetch` on the returned `RemoteValue` to inspect if
they have executed, failed, or cancelled. If some that have been cancelled
need to be rescheduled, users should call `schedule` with the function
again.
When `join` returns or raises, it guarantees that there is no function that
is still being executed.
Raises:
Exception: one of the exceptions caught by the client by any previously
scheduled function since the last time an error was thrown or since
the beginning of the program.
"""
with _translate_parameter_server_failure():
self.cluster.join()
def done(self):
"""Returns whether all the scheduled functions have finished execution.
If any previously scheduled function raises an error, `done` will fail by
raising any one of those errors.
When `done` returns True or raises, it guarantees that there is no function
that is still being executed.
"""
return self.cluster.done()
def create_per_worker_dataset(self, dataset_fn):
"""Create dataset on workers by calling `dataset_fn` on worker devices.
This creates the given dataset generated by dataset_fn on the workers
and returns an object that represents the collection of those individual
datasets. Calling `iter` on such collection of dataset returns a
`PerWorkerValues`, which is a collection of iterators, where the iterators
have been placed on respective workers.
Calling `next` on this `PerWorkerValues` of iterators is currently
unsupported; it is meant to be passed as an argument into `Client.schedule`.
When the scheduled function is picked up and being executed by a worker, the
function will receive the individual iterator that corresponds to the
worker, and now `next` can be called on iterator to get the next (batch or
example) of data.
Dataset shuffling and repeating are usually needed in `dataset_fn`; however,
sharding is not recommended: some worker may not be available and those
examples may be skipped and not covered by other workers, if the dataset is
sharded.
Args:
dataset_fn: The dataset function that returns a dataset. This is to be
executed on the workers.
Returns:
An object that represents the collection of those individual
datasets. `iter` is expected to be called on this object that returns
a `PerWorkerValues` of the iterators (that are on the workers).
"""
input_workers = input_lib.InputWorkers([
(w.device_name, [w.device_name]) for w in self.cluster.workers
])
return _PerWorkerDistributedDataset(dataset_fn, input_workers, self)
def _create_per_worker_resources(self, fn, args=None, kwargs=None):
"""Synchronously create resources on the workers.
The resources are represented by `RemoteValue`s.
Args:
fn: The function to be dispatched to all workers for execution
asynchronously.
args: Positional arguments for `fn`.
kwargs: Keyword arguments for `fn`.
Returns:
A `PerWorkerValues` object, which wraps a tuple of `RemoteValue` objects.
"""
results = []
for w in self.cluster.workers:
results.append(w._create_resource(fn, args=args, kwargs=kwargs)) # pylint: disable=protected-access
return PerWorkerValues(tuple(results))
def fetch(self, val):
"""Blocking call to fetch results from `RemoteValue`s.
This returns the execution result of `RemoteValue`s; if not ready,
waiting for it while blocking the caller.
Args:
val: The value to fetch the results from. If this is structure of
`RemoteValue`, `fetch()` will be called on the individual `RemoteValue`
to get the result.
Returns:
If `val` is a `RemoteValue` or a structure of `RemoteValue`s, returns
the fetched `RemoteValue` value immediately if it's available, or blocks
the call until it's available, and returns the fetched `RemoteValue`
values with the same structure. If `val` is other types, return (`val`,).
"""
def _maybe_fetch(val):
if isinstance(val, RemoteValue):
return val.fetch()
else:
return val
# TODO(yuefengz): we should fetch values in a batch.
result = nest.map_structure(_maybe_fetch, val)
if not isinstance(result, tuple):
return (result,)
return result
# pylint: disable=missing-function-docstring
@contextlib.contextmanager
def _translate_parameter_server_failure():
try:
yield
except Exception as e: # pylint: disable=broad-except
if _is_ps_failure(e):
raise ParameterServerFailureError(e)
else:
raise
# pylint: disable=missing-function-docstring
@contextlib.contextmanager
def handle_parameter_server_failure():
try:
with _translate_parameter_server_failure():
yield
except ParameterServerFailureError as e: # pylint: disable=broad-except
restart_exit_code = os.environ.get("TF_CLIENT_NON_FATAL_RESTART_EXIT_CODE",
None)
if restart_exit_code is not None:
sys.exit(int(restart_exit_code))
else:
raise
class _PerWorkerDistributedDataset(object):
"""Represents worker-distributed datasets created from dataset function."""
def __init__(self, dataset_fn, input_workers, client):
"""Makes an iterable from datasets created by the given function.
Args:
dataset_fn: A function that returns a `Dataset`.
input_workers: an `InputWorkers` object.
client: a `Client` object, used to create dataset resources.
"""
def disallow_variable_creation(next_creator, **kwargs):
raise ValueError("Creating variables in `dataset_fn` is not allowed.")
if isinstance(dataset_fn, def_function.Function):
with variable_scope.variable_creator_scope(disallow_variable_creation):
dataset_fn = dataset_fn.get_concrete_function()
elif not isinstance(dataset_fn, tf_function.ConcreteFunction):
with variable_scope.variable_creator_scope(disallow_variable_creation):
dataset_fn = def_function.function(dataset_fn).get_concrete_function()
self._dataset_fn = dataset_fn
self._input_workers = input_workers
self._client = client
self._element_spec = None
def __iter__(self):
# We would like users to create iterators outside `tf.function`s so that we
# can track them.
if (not context.executing_eagerly() or
ops.get_default_graph().building_function):
raise RuntimeError(
"__iter__() is not supported inside of tf.function or in graph mode.")
def _create_per_worker_iterator():
dataset = self._dataset_fn()
return iter(dataset)
# If _PerWorkerDistributedDataset.__iter__ is called multiple
# times, for the same object it should only create and register resource
# once. Using object id to distinguish different iterator resources.
per_worker_iterator = self._client._create_per_worker_resources(
_create_per_worker_iterator)
# Setting type_spec of each RemoteValue so that functions taking these
# RemoteValues as inputs can be traced.
for iterator_remote_value in per_worker_iterator._values:
iterator_remote_value._set_type_spec(
iterator_ops.IteratorSpec(
self._dataset_fn.structured_outputs.element_spec))
return _PerWorkerDistributedIterator(per_worker_iterator._values)
@property
def element_spec(self):
"""The type specification of an element of this dataset."""
raise NotImplementedError("Passing `AsyncDistributedDataset` to a "
"tf.function is not supported.")
class _PerWorkerDistributedIterator(PerWorkerValues):
"""Distributed iterator for `Client`."""
def __next__(self):
return self.get_next()
def get_next(self, name=None):
"""Returns the next input from the iterator for all replicas."""
raise NotImplementedError("Iterating over an `AsyncDistributedIterator` "
"is not supported right now.")
def _extract_failed_ps_instances(err_msg):
"""Return a set of potentially failing ps instances from error message."""
tasks = re.findall("/job:ps/replica:0/task:[0-9]+", err_msg)
return set(int(t.split(":")[-1]) for t in tasks)
def _is_ps_failure(error):
"""Whether the error is considered a parameter server failure."""
if (_RPC_ERROR_FROM_PS in str(error) or
(isinstance(error, errors.InvalidArgumentError) and
"/job:ps" in str(error))):
return True
def _is_worker_failure(error):
"""Whether the error is considered a worker failure."""
if _JOB_WORKER_STRING_IDENTIFIER not in str(error):
return False
if _RPC_ERROR_FROM_PS in str(error):
return False
# TODO(haoyuzhang): Consider using special status code if error from a
# remote is derived from RPC errors originated from other hosts.
if isinstance(error, (errors.UnavailableError, errors.AbortedError)):
return True
# The following error could happen when the remote task fails and restarts
# in a very short interval during which no RPCs were exchanged to detect the
# failure. In that case, gRPC allows channel (which is different from a
# connection) to be reused for a replaced server listening to same address.
if isinstance(error, errors.InvalidArgumentError):
if ("Unable to find a context_id" in str(error) or
"unknown device" in str(error) or
"Unable to find the relevant tensor remote_handle" in str(error)):
# TODO(b/159961667): Fix "Unable to find the relevant tensor
# remote_handle" part.
return True
# TODO(b/162541228): The following 3 types of errors are very rare and only
# observed in large-scale testing. The types of errors should be reduced.
# This error could show up when copying function inputs from remote tasks.
if isinstance(error, errors.InternalError):
if ("Failed copying input tensor" in str(error) or
"Unable to find a context_id" in str(error)):
return True
# This could happen when the function registration fails. In the observed
# cases this only happens to the dataset related functions.
if isinstance(error, errors.NotFoundError):
if ("is neither a type of a primitive operation nor a name of a function "
"registered" in str(error)):
return True
# This could happen when the iterator is no longer valid on the remote worker
# "Resource input tensor contains an invalid device"
if isinstance(error, errors.CancelledError):
return True
return False
| [
"[email protected]"
]
| |
a6c26522a2da68de30a9caebeb0d89d3cde0854f | 82db461036ffb2adbf0424a6f0575cd9d24b48a8 | /library/signal/demo_signal.py | c59427f814a4ca24d3efc82da3ddfcf5469043dd | []
| no_license | webclinic017/option_pdt | fdc559f02cc529b54278e90e04170713fe93684f | dd302c6b2661e26dbfcbea0384b99e85ae9584e1 | refs/heads/master | 2023-03-24T10:43:35.998775 | 2021-03-19T14:08:38 | 2021-03-19T14:08:38 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 1,106 | py | from base.signal_base import SignalBase
class Signal(SignalBase):
def __init__(self, spot_instrument, future_instrument):
super().__init__(spot_instrument, future_instrument)
self.spot_instrument = spot_instrument
self.future_instrument = future_instrument
self.spot_price = float('nan')
self.future_price = float('nan')
self.subscription_list = [f'{_}|1s' for _ in [spot_instrument, future_instrument]]
def on_market_data_1s_ready(self, data):
instrument = '|'.join([data['exchange', 'symbol', 'contract_type']])
if instrument == self.spot_instrument:
self.spot_price = data['metadata']['mid']
else:
self.future_price = data['metadata']['mid']
self.value = self.future_price / self.spot_price - 1
def from_hist_data(self, mds):
spot_orderbook = mds[self.subscription_list[0]]
fut_orderbook = mds[self.subscription_list[1]]
df = spot_orderbook[['local_timestamp']].copy()
df['value'] = fut_orderbook['mid'] / spot_orderbook['mid'] - 1
return df
| [
"[email protected]"
]
| |
4364d3d0e4e45b61777839885827ff630e4e8965 | 353def93fa77384ee3a5e3de98cfed318c480634 | /.history/week01/hoework01/gettop10frommaoyam01_20200625232557.py | 6eb6ccba3f51b68abc25670be539f7e1cf36cf46 | []
| no_license | ydbB/Python001-class01 | d680abc3ea1ccaeb610751e3488421417d381156 | ad80037ccfc68d39125fa94d2747ab7394ac1be8 | refs/heads/master | 2022-11-25T11:27:45.077139 | 2020-07-19T12:35:12 | 2020-07-19T12:35:12 | 272,783,233 | 0 | 0 | null | 2020-06-16T18:28:15 | 2020-06-16T18:28:15 | null | UTF-8 | Python | false | false | 1,776 | py | # 使用requests,bs4库,爬取猫眼电影top10的电影名称、电影类型、上映时间,并以utf-8的字符集保存到csv文件中
import requests
from bs4 import BeautifulSoup as bs
maoyanUrl = "https://maoyan.com/board/4";
user_agent = 'Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/74.0.3729.169 Safari/537.36'
header = {
'Content-Type': 'text/plain; charset=UTF-8',
'Cookie' : '__mta=251934006.1593072991075.1593075273346.1593075275703.6; uuid_n_v=v1; uuid=2395D3F0B6BC11EA9F28E30FF5FFF73C9A16AE2FA53A448DA75AEAA9D715CB59; _csrf=8557626db9b655cf9050ae7e5b2aab69278c8061c21eca95e1c3cf2130b0b64c; _lxsdk_cuid=172ea8cb247c8-0a73066b1c0a8b-4353760-100200-172ea8cb248c8; _lxsdk=2395D3F0B6BC11EA9F28E30FF5FFF73C9A16AE2FA53A448DA75AEAA9D715CB59; mojo-uuid=c457eacb7c1eb59d3d2f6c1f8d75b9c9; Hm_lvt_703e94591e87be68cc8da0da7cbd0be2=1593072989,1593073002; _lx_utm=utm_source%3Dgoogle%26utm_medium%3Dorganic; mojo-session-id={"id":"dd5ec1780230b10b3b01a18882424620","time":1593078373432}; Hm_lpvt_703e94591e87be68cc8da0da7cbd0be2=1593078727; __mta=251934006.1593072991075.1593075275703.1593078726963.7; mojo-trace-id=3; _lxsdk_s=172eade6a22-b72-c5-308%7C%7C6',
'Origin': 'https://maoyan.com',
'Referer': 'https://maoyan.com/board/4',
'User-Agent': 'Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/67.0.3396.99 Safari/537.36',
}
response = requests.get(maoyanUrl,headers=header)
response.encoding = 'utf-8'
bs_info = bs(response.text,"html.parser")
# print(response.text)
for tags in bs_info.find_all('div',altrs={'id':'container'}):
print(tags)
for tag in tags.find_all('a',):
print(tag.get('href'))
print(tag.get('title'))
| [
"[email protected]"
]
| |
15c9a7f22850db515e4e18371917447d643d5ef9 | 8be39fd741cbbb08439433188ca1bc59ee5cf11f | /data_handler/logistic_parser.py | e6aef4d466d44f479d23b3cf53d7c015e2c3c050 | [
"MIT"
]
| permissive | tpimentelms/fast-conversational-banking | ca1e699261f989f3b535a50782062c000985ba1e | b9d3ddfe3adb78522fafab91c2d20495db063dda | refs/heads/master | 2021-03-16T21:30:51.253223 | 2018-02-28T20:30:15 | 2018-02-28T20:30:15 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 431 | py | from .data_parser import DataParser
class LogisticDataParser(DataParser):
def __init__(self, max_len, cuda=True, quiet=True, remove_brackets=False):
super(LogisticDataParser, self).__init__(max_len, cuda=cuda, quiet=quiet)
self.remove_brackets = remove_brackets
def normalize_string(self, s):
if self.remove_brackets:
s = [x for x in s if x not in ['(', ')', ',']]
return s
| [
"[email protected]"
]
| |
58fc0eca94dc7087f6281521d213dd735c29351d | 30cffb7452220c2ac2961dd2e0f42e3b359a59c0 | /simscale_sdk/models/one_of_solid_simulation_control_pseudo_time_stepping.py | fa0feb65ab655b8e7c0d7bac2d73c1a47ecd1ee9 | [
"MIT"
]
| permissive | vpurcarea/simscale-python-sdk | 0bf892d8824f8d4599caa0f345d5ba28e038f5eb | 6f2d12b2d21142bd854042c0fb402c2c797629e4 | refs/heads/master | 2023-03-14T04:31:06.226337 | 2021-03-03T16:20:01 | 2021-03-03T16:20:01 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 7,107 | py | # coding: utf-8
"""
SimScale API
The version of the OpenAPI document: 0.0.0
Generated by: https://openapi-generator.tech
"""
import pprint
import re # noqa: F401
import six
from simscale_sdk.configuration import Configuration
class OneOfSolidSimulationControlPseudoTimeStepping(object):
"""NOTE: This class is auto generated by OpenAPI Generator.
Ref: https://openapi-generator.tech
Do not edit the class manually.
"""
"""
Attributes:
openapi_types (dict): The key is attribute name
and the value is attribute type.
attribute_map (dict): The key is attribute name
and the value is json key in definition.
"""
openapi_types = {
'type': 'str',
'static_timesteps': 'DimensionalTime',
'simulation_intervals': 'DimensionalTime',
'timestep_length': 'RestrictedDimensionalFunctionTime'
}
attribute_map = {
'type': 'type',
'static_timesteps': 'staticTimesteps',
'simulation_intervals': 'simulationIntervals',
'timestep_length': 'timestepLength'
}
discriminator_value_class_map = {
'SINGLE_STEP': 'SingleStepPseudoTimeStepping',
'STEPPING_LIST_V18': 'SteppingListPseudoTimeStepping'
}
def __init__(self, type='STEPPING_LIST_V18', static_timesteps=None, simulation_intervals=None, timestep_length=None, local_vars_configuration=None): # noqa: E501
"""OneOfSolidSimulationControlPseudoTimeStepping - a model defined in OpenAPI""" # noqa: E501
if local_vars_configuration is None:
local_vars_configuration = Configuration()
self.local_vars_configuration = local_vars_configuration
self._type = None
self._static_timesteps = None
self._simulation_intervals = None
self._timestep_length = None
self.discriminator = 'type'
self.type = type
if static_timesteps is not None:
self.static_timesteps = static_timesteps
if simulation_intervals is not None:
self.simulation_intervals = simulation_intervals
if timestep_length is not None:
self.timestep_length = timestep_length
@property
def type(self):
"""Gets the type of this OneOfSolidSimulationControlPseudoTimeStepping. # noqa: E501
:return: The type of this OneOfSolidSimulationControlPseudoTimeStepping. # noqa: E501
:rtype: str
"""
return self._type
@type.setter
def type(self, type):
"""Sets the type of this OneOfSolidSimulationControlPseudoTimeStepping.
:param type: The type of this OneOfSolidSimulationControlPseudoTimeStepping. # noqa: E501
:type: str
"""
if self.local_vars_configuration.client_side_validation and type is None: # noqa: E501
raise ValueError("Invalid value for `type`, must not be `None`") # noqa: E501
self._type = type
@property
def static_timesteps(self):
"""Gets the static_timesteps of this OneOfSolidSimulationControlPseudoTimeStepping. # noqa: E501
:return: The static_timesteps of this OneOfSolidSimulationControlPseudoTimeStepping. # noqa: E501
:rtype: DimensionalTime
"""
return self._static_timesteps
@static_timesteps.setter
def static_timesteps(self, static_timesteps):
"""Sets the static_timesteps of this OneOfSolidSimulationControlPseudoTimeStepping.
:param static_timesteps: The static_timesteps of this OneOfSolidSimulationControlPseudoTimeStepping. # noqa: E501
:type: DimensionalTime
"""
self._static_timesteps = static_timesteps
@property
def simulation_intervals(self):
"""Gets the simulation_intervals of this OneOfSolidSimulationControlPseudoTimeStepping. # noqa: E501
:return: The simulation_intervals of this OneOfSolidSimulationControlPseudoTimeStepping. # noqa: E501
:rtype: DimensionalTime
"""
return self._simulation_intervals
@simulation_intervals.setter
def simulation_intervals(self, simulation_intervals):
"""Sets the simulation_intervals of this OneOfSolidSimulationControlPseudoTimeStepping.
:param simulation_intervals: The simulation_intervals of this OneOfSolidSimulationControlPseudoTimeStepping. # noqa: E501
:type: DimensionalTime
"""
self._simulation_intervals = simulation_intervals
@property
def timestep_length(self):
"""Gets the timestep_length of this OneOfSolidSimulationControlPseudoTimeStepping. # noqa: E501
:return: The timestep_length of this OneOfSolidSimulationControlPseudoTimeStepping. # noqa: E501
:rtype: RestrictedDimensionalFunctionTime
"""
return self._timestep_length
@timestep_length.setter
def timestep_length(self, timestep_length):
"""Sets the timestep_length of this OneOfSolidSimulationControlPseudoTimeStepping.
:param timestep_length: The timestep_length of this OneOfSolidSimulationControlPseudoTimeStepping. # noqa: E501
:type: RestrictedDimensionalFunctionTime
"""
self._timestep_length = timestep_length
def get_real_child_model(self, data):
"""Returns the real base class specified by the discriminator"""
discriminator_key = self.attribute_map[self.discriminator]
discriminator_value = data[discriminator_key]
return self.discriminator_value_class_map.get(discriminator_value)
def to_dict(self):
"""Returns the model properties as a dict"""
result = {}
for attr, _ in six.iteritems(self.openapi_types):
value = getattr(self, attr)
if isinstance(value, list):
result[attr] = list(map(
lambda x: x.to_dict() if hasattr(x, "to_dict") else x,
value
))
elif hasattr(value, "to_dict"):
result[attr] = value.to_dict()
elif isinstance(value, dict):
result[attr] = dict(map(
lambda item: (item[0], item[1].to_dict())
if hasattr(item[1], "to_dict") else item,
value.items()
))
else:
result[attr] = value
return result
def to_str(self):
"""Returns the string representation of the model"""
return pprint.pformat(self.to_dict())
def __repr__(self):
"""For `print` and `pprint`"""
return self.to_str()
def __eq__(self, other):
"""Returns true if both objects are equal"""
if not isinstance(other, OneOfSolidSimulationControlPseudoTimeStepping):
return False
return self.to_dict() == other.to_dict()
def __ne__(self, other):
"""Returns true if both objects are not equal"""
if not isinstance(other, OneOfSolidSimulationControlPseudoTimeStepping):
return True
return self.to_dict() != other.to_dict()
| [
"simscale"
]
| simscale |
b42b8f6ed64adddc457f57db5aa9a253495901dd | 084a13b6524e21914826e842eeefefd09570a970 | /experiments/atari_hard/montezuma_revenge/ppo_cnd_102_2.py | 833f9590c550cdf94ff45ade8163c8d0dfd4556c | [
"MIT"
]
| permissive | michalnand/reinforcement_learning | 28aa0e2c92b6112cf366eff0e0d6a78b9a56e94f | 01635014a37a4c871766b4cdd2caaa26a0c2d8cc | refs/heads/main | 2023-06-01T10:27:36.601631 | 2023-02-12T19:46:01 | 2023-02-12T19:46:01 | 217,841,101 | 3 | 0 | null | null | null | null | UTF-8 | Python | false | false | 1,154 | py | import time
import torch
import RLAgents
import models.ppo_cnd_102_2.src.model_ppo as ModelPPO
import models.ppo_cnd_102_2.src.model_cnd_target as ModelCNDTarget
import models.ppo_cnd_102_2.src.model_cnd as ModelCND
import models.ppo_cnd_102_2.src.config as Config
#torch.cuda.set_device("cuda:0")
#print("running on ", torch.cuda.get_device_name())
path = "models/ppo_cnd_102_2/"
config = Config.Config()
#config.envs_count = 1
envs = RLAgents.MultiEnvParallelOptimised("MontezumaRevengeNoFrameskip-v4", RLAgents.WrapperMontezuma, config.envs_count)
#envs = RLAgents.MultiEnvSeq("MontezumaRevengeNoFrameskip-v4", RLAgents.WrapperMontezuma, config.envs_count)
#envs = RLAgents.MultiEnvSeq("MontezumaRevengeNoFrameskip-v4", RLAgents.WrapperMontezumaVideo, config.envs_count)
agent = RLAgents.AgentPPOCND(envs, ModelPPO, ModelCNDTarget, ModelCND, config)
max_iterations = 500000
trainig = RLAgents.TrainingIterations(envs, agent, max_iterations, path, 128)
trainig.run()
'''
agent.load(path)
agent.disable_training()
while True:
reward, done, _ = agent.main()
envs.render(0)
#time.sleep(0.01)
''' | [
"[email protected]"
]
| |
a0ee9508274fd3f6962a34e95d4396b1d61ab45f | af54f9cef3290fc4066cbbe2a66e36006c7c5896 | /plates/investrategy/views.py | 73e91cb779c1463282983372709ef98a9f3ed074 | []
| no_license | zizle/workAssistant | 4e504baf3e8cb0942bdf48e61d96bf9f4ef3779c | 875df36725118e8737fd38d3873bf75434296d1b | refs/heads/master | 2022-08-12T16:16:33.972788 | 2020-08-07T01:02:42 | 2020-08-07T01:02:42 | 248,373,880 | 0 | 0 | null | 2022-05-25T03:09:04 | 2020-03-19T00:30:32 | JavaScript | UTF-8 | Python | false | false | 23,396 | py | # _*_ coding:utf-8 _*_
# Author: zizle
import datetime
import hashlib
import os
import time
import pandas as pd
import xlrd
from flask import jsonify, request, current_app, send_from_directory
from flask.views import MethodView
from db import MySQLConnection
from settings import BASE_DIR
from utils.psd_handler import verify_json_web_token
from vlibs import ORGANIZATIONS
class InvestrategyView(MethodView):
def get(self):
params = request.args
# 解析用户信息
token = params.get('utoken')
user_info = verify_json_web_token(token)
if not user_info:
return jsonify("您的登录已过期,请重新登录查看.")
user_id = user_info['uid']
try:
start_date = params.get('startDate')
end_date = params.get('endDate')
end_date = datetime.datetime.strptime(end_date, '%Y-%m-%d') + datetime.timedelta(days=1)
end_date = (end_date + datetime.timedelta(seconds=-1)).strftime('%Y-%m-%d %H:%M:%S')
current_page = int(params.get('page', 1)) - 1
page_size = int(params.get('pagesize', 30))
except Exception:
return jsonify("参数错误:DATE FORMAT ERROR & INT TYPE REQUIRED!")
start_id = current_page * page_size
db_connection = MySQLConnection()
cursor = db_connection.get_cursor()
# sql内联查询
inner_join_statement = "SELECT usertb.name,usertb.org_id,invsgytb.id,invsgytb.custom_time,invsgytb.content,invsgytb.variety_id, varietytb.name AS variety, invsgytb.contract,invsgytb.direction,invsgytb.hands,invsgytb.open_position," \
"invsgytb.close_position,invsgytb.profit " \
"FROM `user_info` AS usertb INNER JOIN `investrategy` AS invsgytb INNER JOIN `variety` as varietytb ON " \
"(usertb.id=%s AND usertb.id=invsgytb.author_id) AND invsgytb.variety_id=varietytb.id AND (invsgytb.custom_time BETWEEN %s AND %s) " \
"ORDER BY invsgytb.custom_time DESC " \
"limit %s,%s;"
cursor.execute(inner_join_statement, (user_id, start_date, end_date, start_id, page_size))
result_records = cursor.fetchall()
# print("内连接查投顾策略自方案结果", result_records)
# 查询总条数
count_statement = "SELECT COUNT(invsgytb.id) as total, SUM(invsgytb.profit) AS `sumprofit` " \
"FROM `user_info` AS usertb INNER JOIN `investrategy`AS invsgytb " \
"ON usertb.id=%s AND usertb.id=invsgytb.author_id AND (invsgytb.custom_time BETWEEN %s AND %s);"
cursor.execute(count_statement, (user_id, start_date, end_date))
fetch_one = cursor.fetchone()
# print(fetch_one)
db_connection.close()
if fetch_one:
total_count = fetch_one['total']
sum_porfit = fetch_one['sumprofit']
else:
total_count = sum_porfit = 0
total_page = int((total_count + page_size - 1) / page_size)
# print('total_page',total_page)
# 组织数据返回
response_data = dict()
response_data['records'] = list()
for record_item in result_records:
record_item['custom_time'] = record_item['custom_time'].strftime('%Y-%m-%d')
record_item['variety'] = (record_item['variety'] if record_item['variety'] else '') + str(record_item['contract'])
record_item['org_name'] = ORGANIZATIONS.get(int(record_item['org_id']), '未知')
record_item['open_position'] = float(record_item['open_position'])
record_item['close_position'] = float(record_item['close_position'])
record_item['profit'] = float(record_item['profit'])
response_data['records'].append(record_item)
response_data['current_page'] = current_page + 1 # 查询前给减1处理了,加回来
response_data['total_page'] = total_page
response_data['current_count'] = len(result_records)
response_data['total_count'] = total_count
response_data['sum_profit'] = float(sum_porfit) if sum_porfit else 0
return jsonify(response_data)
def post(self):
body_data = request.json
author_id = body_data.get('author_id', None)
if not author_id:
return jsonify("参数错误,HAS NO AUTHORID.")
# 查找用户
db_connection = MySQLConnection()
cursor = db_connection.get_cursor()
select_user_statement = "SELECT `id`,`name`,`is_admin` FROM `user_info` WHERE `id`=%s AND `is_active`=1;"
cursor.execute(select_user_statement, author_id)
user_obj = cursor.fetchone()
if not user_obj:
return jsonify("系统没有查到您的信息,无法操作."), 400
if user_obj['is_admin']:
return jsonify('请不要使用用管理员用户添加记录.')
# 不为空的信息判断
content = body_data.get('content', False)
variety = body_data.get('variety', False)
direction = body_data.get('direction', False)
if not content or not variety or not direction:
return jsonify("参数错误,NOT FOUND CONTENT,VARIETY,DIRECTION."), 400
# 组织信息
custom_time = body_data.get('custom_time')
custom_time = datetime.datetime.strptime(custom_time, '%Y-%m-%d') if custom_time else datetime.datetime.now()
author_id = user_obj['id']
contract = body_data.get('contract', '')
hands = body_data.get('hands', 0)
open_position = body_data.get('open_position', 0)
close_position = body_data.get('close_position', 0)
profit = body_data.get('profit')
note = body_data.get('work_note', '')
# 存入数据库
save_invest_statement = "INSERT INTO `investrategy`" \
"(`custom_time`,`author_id`,`content`,`variety_id`,`contract`,`direction`,`hands`," \
"`open_position`,`close_position`,`profit`)" \
"VALUES (%s,%s,%s,%s,%s,%s,%s,%s,%s,%s);"
try:
# 转换类型
variety_id = int(variety)
hands = int(hands) if hands else 0
open_position = int(open_position) if open_position else 0
close_position = int(close_position) if close_position else 0
profit = float(profit) if profit else 0
cursor.execute(save_invest_statement,
(custom_time, author_id, content, variety_id, contract, direction,hands,
open_position, close_position, profit)
)
db_connection.commit()
except Exception as e:
db_connection.rollback()
db_connection.close()
current_app.logger.error("写入投顾策略记录错误:" + str(e))
return jsonify("参数错误!无法保存。"), 400
else:
db_connection.close()
return jsonify("保存成功!"), 201
class FileHandlerInvestrategyView(MethodView):
def post(self):
# 获取当前用户的信息
user_id = request.form.get('uid')
file = request.files.get('file', None)
if not file or not user_id:
return jsonify('参数错误,NOT FILE OR UID'), 400
# 查找用户
db_connection = MySQLConnection()
cursor = db_connection.get_cursor()
select_user_statement = "SELECT `id`,`name`,`is_admin` FROM `user_info` WHERE `id`=%s;"
cursor.execute(select_user_statement, user_id)
user_obj = cursor.fetchone()
# 管理员不给添加信息
if user_obj['is_admin']:
return jsonify('请不要使用用管理员用户添加记录.')
# 准备品种信息
# variety_dict = {value: key for key, value in VARIETY_LIB.items()}
query_variety = "SELECT `id`,`name` FROM `variety` WHERE `parent_id` IS NOT NULL;"
cursor.execute(query_variety)
variety_all = cursor.fetchall()
variety_dict = {variety_item["name"]:variety_item['id'] for variety_item in variety_all}
db_connection.close()
# 文件内容
file_contents = file.read()
file_contents = xlrd.open_workbook(file_contents=file_contents)
# 导入名称为“投顾策略记录”的表
table_data = file_contents.sheet_by_name('投顾策略记录')
# 检查sheet1是否导入完毕
status = file_contents.sheet_loaded('投顾策略记录')
if not status:
return jsonify('文件数据导入失败'), 400
# 读取第一行数据
first_row = table_data.row_values(0)
# 格式判断
if first_row != ["日期", "策略内容", "品种", "合约", "方向(多头,空头,套利)","10万为限对应手数",
"策略开仓","策略平仓","策略结果(+/-/0)"]:
return jsonify("表格格式有误,请修正."), 400
# 读取数据并写入数据库
nrows = table_data.nrows
# ncols = table_data.ncols
ready_to_save = list()
start_row_in = False
message = "表格列数据类型有误,请检查后上传."
try:
for row in range(nrows):
row_content = table_data.row_values(row)
if str(row_content[0]).strip() == "start":
start_row_in = True
continue
if str(row_content[0]).strip() == "end":
start_row_in = False
continue
if start_row_in:
record_row = list()
try:
record_row.append(xlrd.xldate_as_datetime(row_content[0], 0))
except Exception as e:
message = "第一列【日期】请使用日期格式上传."
raise ValueError(message)
record_row.append(user_id)
record_row.append(str(row_content[1]))
try:
record_row.append(int(variety_dict.get(str(row_content[2])))) # 品种
except Exception as e:
message = "系统中没有【" + str(row_content[2]) + "】品种信息."
raise ValueError(message)
try:
contract = int(row_content[3])
except Exception:
contract = row_content[3]
record_row.append(str(contract))
record_row.append(str(row_content[4]))
record_row.append(int(row_content[5]) if row_content[5] else 0)
record_row.append(float(row_content[6]) if row_content[6] else 0)
record_row.append(float(row_content[7]) if row_content[7] else 0)
record_row.append(float(row_content[8]) if row_content[8] else 0)
ready_to_save.append(record_row)
if len(ready_to_save) == 0:
raise ValueError('没有读取到数据.')
new_df = pd.DataFrame(ready_to_save)
new_df.columns = ['custom_time', 'author_id', 'content', 'variety_id','contract','direction','hands','open_position','close_position','profit']
save_list = self.drop_old_date_record(new_df, user_id)
if len(save_list) > 0:
message = "数据保存成功!"
insert_statement = "INSERT INTO `investrategy`" \
"(`custom_time`,`author_id`,`content`,`variety_id`,`contract`,`direction`,`hands`," \
"`open_position`,`close_position`,`profit`)" \
"VALUES (%s,%s,%s,%s,%s,%s,%s,%s,%s,%s);"
db_connection = MySQLConnection()
cursor = db_connection.get_cursor()
cursor.executemany(insert_statement, ready_to_save)
db_connection.commit()
db_connection.close()
else:
message = "数据上传成功,没有发现新数据!"
except Exception as e:
return jsonify(str(e)), 400
else:
return jsonify(message)
@staticmethod
def drop_old_date_record(new_df, user_id):
"""删掉数据库最大时间及之前的数据"""
db_connection = MySQLConnection()
cursor = db_connection.get_cursor()
query_statement = "SELECT `id`, MAX(`custom_time`) AS `max_date` FROM `investrategy` WHERE `author_id`=%s;"
cursor.execute(query_statement, (user_id,))
fetch_one = cursor.fetchone()
db_connection.close()
if new_df.empty:
return []
new_df['custom_time'] = pd.to_datetime(new_df['custom_time'], format='%Y-%m-%d')
if fetch_one: # 有数据就删除已存在的最大日期之前的数据
max_date = pd.to_datetime(fetch_one['max_date'], format='%Y-%m-%d')
save_df = new_df[max_date <= new_df['custom_time']].copy() # 截取数据
else:
save_df = new_df.copy()
save_df['custom_time'] = save_df['custom_time'].apply(lambda x: x.strftime('%Y-%m-%d'))
return save_df.values.tolist()
@staticmethod
def drop_duplicates(new_df, user_id): # 遗弃,本方式不好用,出现未知错误
# 查询旧的数据
db_connection = MySQLConnection()
cursor = db_connection.get_cursor()
query_statement = "SELECT `custom_time`,`author_id`,`content`,`variety_id`,`contract`,`direction`,`hands`, " \
"`open_position`,`close_position`,`profit` " \
"FROM `investrategy` WHERE `author_id`=%s;"
cursor.execute(query_statement, user_id)
old_df = pd.DataFrame(cursor.fetchall())
db_connection.close()
if old_df.empty:
new_df['custom_time'] = pd.to_datetime(new_df['custom_time'], format='%Y-%m-%d')
new_df['custom_time'] = new_df['custom_time'].apply(lambda x: x.strftime('%Y-%m-%d'))
save_df = new_df.drop_duplicates(subset=['custom_time', 'content'], keep='last', inplace=False)
else:
old_df['custom_time'] = pd.to_datetime(old_df['custom_time'], format='%Y-%m-%d')
old_df['custom_time'] = old_df['custom_time'].apply(lambda x: x.strftime('%Y-%m-%d'))
new_df['custom_time'] = pd.to_datetime(new_df['custom_time'], format='%Y-%m-%d')
new_df['custom_time'] = new_df['custom_time'].apply(lambda x: x.strftime('%Y-%m-%d'))
concat_df = pd.concat([old_df, new_df, old_df])
save_df = concat_df.drop_duplicates(subset=['custom_time', 'content'], keep=False, inplace=False)
if save_df.empty:
return []
else:
save_df = save_df.copy()
# save_df['custom_time'] = save_df['custom_time'].apply(lambda x: x.strftime('%Y-%m-%d'))
return save_df.values.tolist()
class RetrieveInvestrategyView(MethodView):
def get(self, rid):
db_connection = MySQLConnection()
cursor = db_connection.get_cursor()
select_statement = "SELECT usertb.name,usertb.org_id,invsgytb.id,invsgytb.custom_time,invsgytb.content,invsgytb.variety_id,invsgytb.contract,invsgytb.direction,invsgytb.hands,invsgytb.open_position," \
"invsgytb.close_position,invsgytb.profit " \
"FROM `user_info` AS usertb INNER JOIN `investrategy` AS invsgytb ON " \
"invsgytb.id=%s AND usertb.id=invsgytb.author_id;"
cursor.execute(select_statement, rid)
record_item = cursor.fetchone()
record_item['custom_time'] = record_item['custom_time'].strftime('%Y-%m-%d')
record_item['org_name'] = ORGANIZATIONS.get(int(record_item['org_id']), '未知')
record_item['open_position'] = float(record_item['open_position'])
record_item['close_position'] = float(record_item['close_position'])
record_item['profit'] = int(record_item['profit'])
db_connection.close()
return jsonify(record_item)
def put(self, rid):
body_json = request.json
record_info = body_json.get('record_data')
utoken = body_json.get('utoken')
user_info = verify_json_web_token(utoken)
user_id = user_info['uid']
# 不为空的信息判断
content = record_info.get('content', False)
variety_id = record_info.get('variety_id', False)
direction = record_info.get('direction', False)
if not content or not variety_id or not direction:
return jsonify("参数错误,NOT FOUND CONTENT,VARIETY,DIRECTION."), 400
# 组织信息
custom_time = record_info.get('custom_time')
contract = record_info.get('contract', '')
hands = record_info.get('hands', 0)
open_position = record_info.get('open_position', 0)
close_position = record_info.get('close_position', 0)
profit = record_info.get('profit')
# 存入数据库
save_invest_statement = "UPDATE `investrategy` SET " \
"`custom_time`=%s,`content`=%s,`variety_id`=%s,`contract`=%s,`direction`=%s,`hands`=%s," \
"`open_position`=%s,`close_position`=%s,`profit`=%s " \
"WHERE `id`=%s AND `author_id`=%s;"
db_connection = MySQLConnection()
cursor = db_connection.get_cursor()
try:
# 转换类型
custom_time = datetime.datetime.strptime(custom_time,'%Y-%m-%d') if custom_time else datetime.datetime.now()
variety_id = int(variety_id)
hands = int(hands) if hands else 0
open_position = float(open_position) if open_position else 0
close_position = float(close_position) if close_position else 0
profit = float(profit) if profit else 0
cursor.execute(save_invest_statement,
(custom_time, content, variety_id, contract, direction, hands,
open_position, close_position, profit, rid, user_id)
)
db_connection.commit()
except Exception as e:
db_connection.rollback()
db_connection.close()
current_app.logger.error("更新投顾策略记录错误:" + str(e))
return jsonify("参数错误!无法修改。"), 400
else:
db_connection.close()
return jsonify("修改成功!"), 201
def delete(self, rid):
utoken = request.args.get('utoken')
user_info = verify_json_web_token(utoken)
db_connection = MySQLConnection()
try:
user_id = int(user_info['uid'])
delete_statement = "DELETE FROM `investrategy` " \
"WHERE `id`=%d AND `author_id`=%d;" % (rid, user_id)
cursor = db_connection.get_cursor()
lines_changed = cursor.execute(delete_statement)
db_connection.commit()
if lines_changed <= 0:
raise ValueError("没有记录被删除>…<")
except Exception as e:
db_connection.rollback()
db_connection.close()
return jsonify(str(e))
else:
db_connection.close()
return jsonify("删除成功^.^!")
class InvestrategyExportView(MethodView):
def get(self):
params = request.args
utoken = params.get('utoken')
user_info = verify_json_web_token(utoken)
if not user_info:
return jsonify("登录已过期!刷新网页重新登录."), 400
try:
start_date = params.get('startDate')
end_date = params.get('endDate')
end_date = datetime.datetime.strptime(end_date, '%Y-%m-%d') + datetime.timedelta(days=1)
end_date = (end_date + datetime.timedelta(seconds=-1)).strftime('%Y-%m-%d %H:%M:%S')
except Exception:
return jsonify("参数错误:DATE FORMAT ERROR!")
query_statement = "SELECT usertb.name,usertb.org_id,invsgytb.custom_time,invsgytb.content,invsgytb.variety_id,invsgytb.contract,invsgytb.direction,invsgytb.hands," \
"invsgytb.open_position,invsgytb.close_position,invsgytb.profit,invsgytb.note " \
"FROM `user_info` AS usertb INNER JOIN `investrategy` AS invsgytb ON " \
"usertb.id=%s AND usertb.id=invsgytb.author_id AND (invsgytb.custom_time BETWEEN %s AND %s) " \
"ORDER BY invsgytb.custom_time ASC;"
db_connection = MySQLConnection()
cursor = db_connection.get_cursor()
# 查询品种
query_variety = "SELECT `id`,`name` FROM `variety` WHERE `parent_id` IS NOT NULL;"
cursor.execute(query_variety)
variety_all = cursor.fetchall()
variety_dict = {variety_item["id"]: variety_item['name'] for variety_item in variety_all}
cursor.execute(query_statement, (user_info['uid'], start_date, end_date))
records_all = cursor.fetchall()
db_connection.close()
# 生成承载数据的文件
t = "%.4f" % time.time()
md5_hash = hashlib.md5()
md5_hash.update(t.encode('utf-8'))
md5_hash.update(user_info['name'].encode('utf-8'))
md5_str = md5_hash.hexdigest()
file_folder = os.path.join(BASE_DIR, 'fileStore/exports/')
if not os.path.exists(file_folder):
os.makedirs(file_folder)
file_path = os.path.join(file_folder, '{}.xlsx'.format(md5_str))
file_records = list()
for record_item in records_all:
row_content = list()
row_content.append(record_item['custom_time'].strftime("%Y-%m-%d"))
row_content.append(ORGANIZATIONS.get(record_item['org_id'], '未知'))
row_content.append(record_item['name'])
row_content.append(record_item['content'])
row_content.append(variety_dict.get(record_item['variety_id'], ''))
row_content.append(record_item['contract'])
row_content.append(record_item['direction'])
row_content.append(record_item['hands'])
row_content.append(float(record_item['open_position']))
row_content.append(float(record_item['close_position']))
row_content.append(float(record_item['profit']))
row_content.append(record_item['note'])
file_records.append(row_content)
export_df = pd.DataFrame(file_records)
export_df.columns = ['日期', '部门小组', '姓名', '策略内容', '品种', '合约', '方向', '手数', '策略开仓', '策略平仓', '策略结果','备注']
export_df.to_excel(
excel_writer=file_path,
index=False,
sheet_name='投顾策略记录'
)
return send_from_directory(directory=file_folder, filename='{}.xlsx'.format(md5_str),
as_attachment=True, attachment_filename='{}.xlsx'.format(md5_str)
)
| [
"[email protected]"
]
| |
452809658c1a86e94a421c66c5acc6bee7d001ae | d86c52f4098fd9c1a102c2d3f5630556e0610fa2 | /fitle/myenv/Lib/site-packages/django/db/migrations/operations/base.py | 0cd0ff38a0d70b87f180e2140d1dff4c187e2c1a | []
| no_license | makadama/bitbucket | 24f05c4946168ed15d4f56bfdc45fd6c0774e0f2 | cabfd551b92fe1af6d9d14ab9eb3d9974b64aa79 | refs/heads/master | 2023-06-19T19:04:03.894599 | 2021-07-15T12:10:39 | 2021-07-15T12:10:39 | 385,203,791 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 129 | py | version https://git-lfs.github.com/spec/v1
oid sha256:fabe9ce10ec2202053f79ef81fbddd479a6ff7812ab28cd2e4fd9a357f15c939
size 5016
| [
"[email protected]"
]
| |
076b7d58c5592f6388a9ee56ad93f451e1d154e1 | cdfb77f5fb782ed8c731c6789ba154fefb34b830 | /Seção 4/tipo_booleano.py | 537bf355e9805854ef01d78c4a063d4455a7b447 | []
| no_license | Yuri-Santiago/curso-udemy-python | 7dc83e0ade45e8d959ce12b81098a13617e0a7ca | 2af0ddad01b08f6afd0bfe35648212d4ee49f52b | refs/heads/master | 2023-04-21T07:11:35.594753 | 2021-05-18T05:14:56 | 2021-05-18T05:14:56 | 350,412,085 | 1 | 0 | null | null | null | null | UTF-8 | Python | false | false | 787 | py | """
Tio Booleano
Álgebra Booleana, criada por George Boole
2 constantes: Verdadeiro ou Falso
True -> Verdadeiro
False -> Falso
A primeira letra precisa ser maiúscula
"""
falso = False
verdadeiro = True
print(falso)
print(verdadeiro)
print(type(verdadeiro))
# Operações
# Negação(not) : troca do valor atual para o seu contrário
print(not falso)
print(not verdadeiro)
# Ou(or) : é uma operação binária onde um ou o outro precisa ser verdadeiro para retornar verdadeiro
print(falso or verdadeiro)
# E(and) : é uma operação binária onde ambos os valores devem ser verdadeiro para retornar verdadeiro
print(falso and verdadeiro)
print(not falso and verdadeiro)
# Você pode comparar valores e variáveis para retornar um valor booleano
print(5 < 6)
print(3 > 4)
| [
"[email protected]"
]
| |
83d9dc1d6b51df08606a0f15b32911dbcf604c9b | e19a9be187126cdf36761fed59c8cb287f0e4126 | /coop_local/migrations/0010_auto__del_categoryess.py | c769ba1cfd6e567df62b47360214452550a092a7 | []
| no_license | gutard/coop-mes | d7ad3a756edfd8a709270b66d3eafc0c2a59e266 | b204d4a830c20dd22cfebba3ac697561ecc63634 | refs/heads/master | 2020-12-28T23:50:48.948145 | 2013-09-23T16:13:43 | 2013-09-23T16:14:52 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 50,289 | py | # -*- coding: utf-8 -*-
import datetime
from south.db import db
from south.v2 import SchemaMigration
from django.db import models
class Migration(SchemaMigration):
def forwards(self, orm):
# Deleting model 'CategoryESS'
db.delete_table('coop_local_categoryess')
def backwards(self, orm):
# Adding model 'CategoryESS'
db.create_table('coop_local_categoryess', (
('label', self.gf('django.db.models.fields.CharField')(max_length=100, blank=True)),
('description', self.gf('django.db.models.fields.TextField')(blank=True)),
('slug', self.gf('django_extensions.db.fields.AutoSlugField')(populate_from='label', allow_duplicates=False, max_length=50, separator=u'-', blank=True, overwrite=True)),
('id', self.gf('django.db.models.fields.AutoField')(primary_key=True)),
))
db.send_create_signal('coop_local', ['CategoryESS'])
models = {
'auth.group': {
'Meta': {'object_name': 'Group'},
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '80'}),
'permissions': ('django.db.models.fields.related.ManyToManyField', [], {'to': "orm['auth.Permission']", 'symmetrical': 'False', 'blank': 'True'})
},
'auth.permission': {
'Meta': {'ordering': "('content_type__app_label', 'content_type__model', 'codename')", 'unique_together': "(('content_type', 'codename'),)", 'object_name': 'Permission'},
'codename': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
'content_type': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['contenttypes.ContentType']"}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '50'})
},
'auth.user': {
'Meta': {'object_name': 'User'},
'date_joined': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now'}),
'email': ('django.db.models.fields.EmailField', [], {'max_length': '75', 'blank': 'True'}),
'first_name': ('django.db.models.fields.CharField', [], {'max_length': '30', 'blank': 'True'}),
'groups': ('django.db.models.fields.related.ManyToManyField', [], {'to': "orm['auth.Group']", 'symmetrical': 'False', 'blank': 'True'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'is_active': ('django.db.models.fields.BooleanField', [], {'default': 'True'}),
'is_staff': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'is_superuser': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'last_login': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now'}),
'last_name': ('django.db.models.fields.CharField', [], {'max_length': '30', 'blank': 'True'}),
'password': ('django.db.models.fields.CharField', [], {'max_length': '128'}),
'user_permissions': ('django.db.models.fields.related.ManyToManyField', [], {'to': "orm['auth.Permission']", 'symmetrical': 'False', 'blank': 'True'}),
'username': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '30'})
},
'contenttypes.contenttype': {
'Meta': {'ordering': "('name',)", 'unique_together': "(('app_label', 'model'),)", 'object_name': 'ContentType', 'db_table': "'django_content_type'"},
'app_label': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'model': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '100'})
},
'coop_cms.articlecategory': {
'Meta': {'object_name': 'ArticleCategory'},
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
'ordering': ('django.db.models.fields.IntegerField', [], {'default': '0'}),
'slug': ('django_extensions.db.fields.AutoSlugField', [], {'allow_duplicates': 'False', 'max_length': '100', 'separator': "u'-'", 'blank': 'True', 'unique': 'True', 'populate_from': "'name'", 'overwrite': 'False'})
},
'coop_cms.navtype': {
'Meta': {'object_name': 'NavType'},
'content_type': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['contenttypes.ContentType']", 'unique': 'True'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'label_rule': ('django.db.models.fields.IntegerField', [], {'default': '0'}),
'search_field': ('django.db.models.fields.CharField', [], {'default': "''", 'max_length': '200', 'blank': 'True'})
},
'coop_geo.area': {
'Meta': {'object_name': 'Area'},
'area_type': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['coop_geo.AreaType']"}),
'created': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now', 'null': 'True', 'blank': 'True'}),
'default_location': ('django.db.models.fields.related.ForeignKey', [], {'blank': 'True', 'related_name': "'associated_area'", 'null': 'True', 'to': "orm['coop_geo.Location']"}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'label': ('django.db.models.fields.CharField', [], {'max_length': '150'}),
'modified': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now', 'null': 'True', 'blank': 'True'}),
'polygon': ('django.contrib.gis.db.models.fields.MultiPolygonField', [], {'null': 'True', 'blank': 'True'}),
'reference': ('django.db.models.fields.CharField', [], {'max_length': '150', 'null': 'True', 'blank': 'True'}),
'related_areas': ('django.db.models.fields.related.ManyToManyField', [], {'to': "orm['coop_geo.Area']", 'through': "orm['coop_geo.AreaRelations']", 'symmetrical': 'False'}),
'update_auto': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'uri': ('django.db.models.fields.CharField', [], {'max_length': '250', 'null': 'True', 'blank': 'True'}),
'uri_mode': ('django.db.models.fields.PositiveSmallIntegerField', [], {'default': '1'}),
'uuid': ('django.db.models.fields.CharField', [], {'default': "'cuus73RXRUQ59mVZbAuoc8'", 'max_length': '50', 'unique': 'True', 'null': 'True'})
},
'coop_geo.arealink': {
'Meta': {'object_name': 'AreaLink'},
'content_type': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['contenttypes.ContentType']", 'null': 'True', 'blank': 'True'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'location': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['coop_geo.Area']", 'null': 'True', 'blank': 'True'}),
'object_id': ('django.db.models.fields.PositiveIntegerField', [], {})
},
'coop_geo.arearelations': {
'Meta': {'object_name': 'AreaRelations'},
'child': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'parent_rels'", 'to': "orm['coop_geo.Area']"}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'parent': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'child_rels'", 'to': "orm['coop_geo.Area']"})
},
'coop_geo.areatype': {
'Meta': {'object_name': 'AreaType'},
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'label': ('django.db.models.fields.CharField', [], {'max_length': '150'}),
'txt_idx': ('django.db.models.fields.CharField', [], {'max_length': "'50'"})
},
'coop_geo.located': {
'Meta': {'object_name': 'Located'},
'category': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['coop_geo.LocationCategory']", 'null': 'True', 'blank': 'True'}),
'content_type': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['contenttypes.ContentType']", 'null': 'True', 'blank': 'True'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'location': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['coop_geo.Location']", 'null': 'True', 'blank': 'True'}),
'main_location': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'object_id': ('django.db.models.fields.PositiveIntegerField', [], {})
},
'coop_geo.location': {
'Meta': {'object_name': 'Location'},
'adr1': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
'adr2': ('django.db.models.fields.CharField', [], {'max_length': '100', 'null': 'True', 'blank': 'True'}),
'area': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['coop_geo.Area']", 'null': 'True', 'blank': 'True'}),
'city': ('django.db.models.fields.CharField', [], {'max_length': '100', 'null': 'True', 'blank': 'True'}),
'country': ('django.db.models.fields.CharField', [], {'max_length': '100', 'null': 'True', 'blank': 'True'}),
'created': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now', 'null': 'True', 'blank': 'True'}),
'geohash': ('django.db.models.fields.CharField', [], {'max_length': '20', 'null': 'True', 'blank': 'True'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'is_ref_center': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'label': ('django.db.models.fields.CharField', [], {'max_length': '150', 'null': 'True', 'blank': 'True'}),
'modified': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now', 'null': 'True', 'blank': 'True'}),
'owner': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['auth.User']", 'null': 'True', 'blank': 'True'}),
'point': ('django.contrib.gis.db.models.fields.PointField', [], {'null': 'True', 'blank': 'True'}),
'uri': ('django.db.models.fields.CharField', [], {'max_length': '250', 'null': 'True', 'blank': 'True'}),
'uri_mode': ('django.db.models.fields.PositiveSmallIntegerField', [], {'default': '1'}),
'uuid': ('django.db.models.fields.CharField', [], {'default': "'EX7SCfB4bSEnfqTVoftmSe'", 'max_length': '50', 'unique': 'True', 'null': 'True'}),
'x_code': ('django.db.models.fields.CharField', [], {'max_length': '20', 'null': 'True', 'blank': 'True'}),
'zipcode': ('django.db.models.fields.CharField', [], {'max_length': '5', 'null': 'True', 'blank': 'True'})
},
'coop_geo.locationcategory': {
'Meta': {'ordering': "['label']", 'object_name': 'LocationCategory'},
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'label': ('django.db.models.fields.CharField', [], {'max_length': '60'}),
'slug': ('django_extensions.db.fields.AutoSlugField', [], {'allow_duplicates': 'False', 'max_length': '50', 'separator': "u'-'", 'blank': 'True', 'populate_from': "'label'", 'overwrite': 'False'})
},
'coop_local.activitynomenclature': {
'Meta': {'ordering': "['tree_id', 'lft']", 'unique_together': "(('label', 'parent'),)", 'object_name': 'ActivityNomenclature'},
'avise': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['coop_local.ActivityNomenclatureAvise']", 'null': 'True', 'blank': 'True'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'label': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
'level': ('django.db.models.fields.PositiveIntegerField', [], {'db_index': 'True'}),
'lft': ('django.db.models.fields.PositiveIntegerField', [], {'db_index': 'True'}),
'parent': ('mptt.fields.TreeForeignKey', [], {'blank': 'True', 'related_name': "'children'", 'null': 'True', 'to': "orm['coop_local.ActivityNomenclature']"}),
'path': ('django.db.models.fields.CharField', [], {'max_length': '306'}),
'rght': ('django.db.models.fields.PositiveIntegerField', [], {'db_index': 'True'}),
'tree_id': ('django.db.models.fields.PositiveIntegerField', [], {'db_index': 'True'})
},
'coop_local.activitynomenclatureavise': {
'Meta': {'ordering': "['label']", 'object_name': 'ActivityNomenclatureAvise'},
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'label': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '100'})
},
'coop_local.agreementiae': {
'Meta': {'object_name': 'AgreementIAE'},
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'label': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '100'})
},
'coop_local.article': {
'Meta': {'object_name': 'Article'},
'category': ('django.db.models.fields.related.ForeignKey', [], {'default': 'None', 'related_name': "'coop_local_article_rel'", 'null': 'True', 'blank': 'True', 'to': "orm['coop_cms.ArticleCategory']"}),
'content': ('django.db.models.fields.TextField', [], {'default': "u'Page content'", 'blank': 'True'}),
'created': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now', 'blank': 'True'}),
'headline': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'in_newsletter': ('django.db.models.fields.BooleanField', [], {'default': 'True'}),
'isSection': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'is_homepage': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'logo': ('django.db.models.fields.files.ImageField', [], {'default': "''", 'max_length': '100', 'null': 'True', 'blank': 'True'}),
'modified': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now', 'blank': 'True'}),
'organization': ('django.db.models.fields.related.ForeignKey', [], {'blank': 'True', 'related_name': "'articles'", 'null': 'True', 'to': "orm['coop_local.Organization']"}),
'person': ('django.db.models.fields.related.ForeignKey', [], {'blank': 'True', 'related_name': "'articles'", 'null': 'True', 'to': "orm['coop_local.Person']"}),
'publication': ('django.db.models.fields.IntegerField', [], {'default': '1'}),
'remote_organization_label': ('django.db.models.fields.CharField', [], {'max_length': '250', 'null': 'True', 'blank': 'True'}),
'remote_organization_uri': ('django.db.models.fields.CharField', [], {'max_length': '255', 'blank': 'True'}),
'remote_person_label': ('django.db.models.fields.CharField', [], {'max_length': '250', 'null': 'True', 'blank': 'True'}),
'remote_person_uri': ('django.db.models.fields.CharField', [], {'max_length': '255', 'blank': 'True'}),
'slug': ('django_extensions.db.fields.AutoSlugField', [], {'allow_duplicates': 'False', 'max_length': '100', 'separator': "u'-'", 'blank': 'True', 'unique': 'True', 'populate_from': "'title'", 'overwrite': 'True'}),
'summary': ('django.db.models.fields.TextField', [], {'default': "''", 'blank': 'True'}),
'temp_logo': ('django.db.models.fields.files.ImageField', [], {'default': "''", 'max_length': '100', 'null': 'True', 'blank': 'True'}),
'template': ('django.db.models.fields.CharField', [], {'default': "''", 'max_length': '200', 'blank': 'True'}),
'title': ('django.db.models.fields.TextField', [], {'default': "u'Page title'", 'blank': 'True'}),
'uri': ('django.db.models.fields.CharField', [], {'max_length': '250', 'null': 'True', 'blank': 'True'}),
'uri_mode': ('django.db.models.fields.PositiveSmallIntegerField', [], {'default': '1'}),
'uuid': ('django.db.models.fields.CharField', [], {'default': "'KADiR4Yc7rtw2ZJjxvvKun'", 'max_length': '50', 'unique': 'True', 'null': 'True'})
},
'coop_local.categoryiae': {
'Meta': {'object_name': 'CategoryIAE'},
'description': ('django.db.models.fields.TextField', [], {'blank': 'True'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'label': ('django.db.models.fields.CharField', [], {'max_length': '100', 'blank': 'True'}),
'slug': ('django_extensions.db.fields.AutoSlugField', [], {'allow_duplicates': 'False', 'max_length': '50', 'separator': "u'-'", 'blank': 'True', 'populate_from': "'label'", 'overwrite': 'True'})
},
'coop_local.client': {
'Meta': {'ordering': "['title']", 'object_name': 'Client', '_ormbases': ['coop_local.Organization']},
'organization_ptr': ('django.db.models.fields.related.OneToOneField', [], {'to': "orm['coop_local.Organization']", 'unique': 'True', 'primary_key': 'True'})
},
'coop_local.clienttarget': {
'Meta': {'object_name': 'ClientTarget'},
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'label': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '100'})
},
'coop_local.contact': {
'Meta': {'ordering': "['category']", 'object_name': 'Contact'},
'category': ('django.db.models.fields.PositiveSmallIntegerField', [], {}),
'content': ('django.db.models.fields.CharField', [], {'max_length': '250'}),
'content_type': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['contenttypes.ContentType']", 'null': 'True', 'blank': 'True'}),
'created': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now', 'null': 'True', 'blank': 'True'}),
'details': ('django.db.models.fields.CharField', [], {'max_length': '100', 'blank': 'True'}),
'display': ('django.db.models.fields.PositiveSmallIntegerField', [], {'default': '1'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'modified': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now', 'null': 'True', 'blank': 'True'}),
'object_id': ('django.db.models.fields.PositiveIntegerField', [], {}),
'uri': ('django.db.models.fields.CharField', [], {'max_length': '250', 'null': 'True', 'blank': 'True'}),
'uri_mode': ('django.db.models.fields.PositiveSmallIntegerField', [], {'default': '1'}),
'uuid': ('django.db.models.fields.CharField', [], {'default': "'ctrAYrGRzSiZpQTdCjcdfB'", 'max_length': '50', 'unique': 'True', 'null': 'True'})
},
'coop_local.document': {
'Meta': {'object_name': 'Document'},
'attachment': ('django.db.models.fields.files.FileField', [], {'max_length': '255'}),
'description': ('django.db.models.fields.TextField', [], {'blank': 'True'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '100', 'blank': 'True'}),
'organization': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['coop_local.Organization']"}),
'type': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['coop_local.DocumentType']", 'null': 'True', 'blank': 'True'})
},
'coop_local.documenttype': {
'Meta': {'object_name': 'DocumentType'},
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '100', 'blank': 'True'})
},
'coop_local.engagement': {
'Meta': {'object_name': 'Engagement'},
'created': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now', 'null': 'True', 'blank': 'True'}),
'engagement_display': ('django.db.models.fields.PositiveSmallIntegerField', [], {'default': '1'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'modified': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now', 'null': 'True', 'blank': 'True'}),
'org_admin': ('django.db.models.fields.BooleanField', [], {'default': 'True'}),
'organization': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['coop_local.Organization']"}),
'person': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'engagements'", 'to': "orm['coop_local.Person']"}),
'remote_role_label': ('django.db.models.fields.CharField', [], {'max_length': '100', 'blank': 'True'}),
'remote_role_uri': ('django.db.models.fields.CharField', [], {'max_length': '250', 'null': 'True', 'blank': 'True'}),
'role': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['coop_local.Role']", 'null': 'True', 'blank': 'True'}),
'role_detail': ('django.db.models.fields.CharField', [], {'max_length': '100', 'blank': 'True'}),
'uri': ('django.db.models.fields.CharField', [], {'max_length': '250', 'null': 'True', 'blank': 'True'}),
'uri_mode': ('django.db.models.fields.PositiveSmallIntegerField', [], {'default': '1'}),
'uuid': ('django.db.models.fields.CharField', [], {'default': "'bB3zURYMsAhRzAKJu7TXYb'", 'max_length': '50', 'unique': 'True', 'null': 'True'})
},
'coop_local.exchange': {
'Meta': {'ordering': "('-modified',)", 'object_name': 'Exchange'},
'area': ('django.db.models.fields.related.ForeignKey', [], {'blank': 'True', 'related_name': "'exchange_area'", 'null': 'True', 'to': "orm['coop_geo.Area']"}),
'created': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now', 'null': 'True', 'blank': 'True'}),
'description': ('django.db.models.fields.TextField', [], {'blank': 'True'}),
'etype': ('django.db.models.fields.PositiveSmallIntegerField', [], {}),
'eway': ('django.db.models.fields.PositiveSmallIntegerField', [], {'default': '1'}),
'expiration': ('django.db.models.fields.DateField', [], {'null': 'True', 'blank': 'True'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'location': ('django.db.models.fields.related.ForeignKey', [], {'blank': 'True', 'related_name': "'exchange_location'", 'null': 'True', 'to': "orm['coop_geo.Location']"}),
'methods': ('django.db.models.fields.related.ManyToManyField', [], {'to': "orm['coop_local.ExchangeMethod']", 'symmetrical': 'False'}),
'modified': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now', 'null': 'True', 'blank': 'True'}),
'organization': ('django.db.models.fields.related.ForeignKey', [], {'blank': 'True', 'related_name': "'exchanges'", 'null': 'True', 'to': "orm['coop_local.Organization']"}),
'permanent': ('django.db.models.fields.BooleanField', [], {'default': 'True'}),
'person': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['coop_local.Person']", 'null': 'True', 'blank': 'True'}),
'products': ('django.db.models.fields.related.ManyToManyField', [], {'to': "orm['coop_local.Product']", 'symmetrical': 'False'}),
'remote_organization_label': ('django.db.models.fields.CharField', [], {'max_length': '250', 'null': 'True', 'blank': 'True'}),
'remote_organization_uri': ('django.db.models.fields.CharField', [], {'max_length': '255', 'blank': 'True'}),
'remote_person_label': ('django.db.models.fields.CharField', [], {'max_length': '250', 'null': 'True', 'blank': 'True'}),
'remote_person_uri': ('django.db.models.fields.CharField', [], {'max_length': '255', 'blank': 'True'}),
'slug': ('django_extensions.db.fields.AutoSlugField', [], {'allow_duplicates': 'False', 'max_length': '50', 'separator': "u'-'", 'blank': 'True', 'populate_from': "'title'", 'overwrite': 'True'}),
'title': ('django.db.models.fields.CharField', [], {'max_length': '250'}),
'uri': ('django.db.models.fields.CharField', [], {'max_length': '250', 'null': 'True', 'blank': 'True'}),
'uri_mode': ('django.db.models.fields.PositiveSmallIntegerField', [], {'default': '1'}),
'uuid': ('django.db.models.fields.CharField', [], {'default': "'rZG3Kc5kHDWtGCErjH4b7b'", 'max_length': '50', 'unique': 'True', 'null': 'True'})
},
'coop_local.exchangemethod': {
'Meta': {'object_name': 'ExchangeMethod'},
'etypes': ('coop.utils.fields.MultiSelectField', [], {'max_length': '250', 'null': 'True', 'blank': 'True'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'label': ('django.db.models.fields.CharField', [], {'max_length': '250'}),
'uri': ('django.db.models.fields.CharField', [], {'max_length': '250', 'blank': 'True'})
},
'coop_local.guaranty': {
'Meta': {'object_name': 'Guaranty'},
'description': ('django.db.models.fields.TextField', [], {'blank': 'True'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'logo': ('sorl.thumbnail.fields.ImageField', [], {'max_length': '100', 'null': 'True', 'blank': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '100', 'blank': 'True'}),
'type': ('django.db.models.fields.IntegerField', [], {})
},
'coop_local.legalstatus': {
'Meta': {'object_name': 'LegalStatus'},
'description': ('django.db.models.fields.TextField', [], {'blank': 'True'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'label': ('django.db.models.fields.CharField', [], {'max_length': '100', 'blank': 'True'}),
'slug': ('django_extensions.db.fields.AutoSlugField', [], {'allow_duplicates': 'False', 'max_length': '50', 'separator': "u'-'", 'blank': 'True', 'populate_from': "'label'", 'overwrite': 'True'})
},
'coop_local.link': {
'Meta': {'object_name': 'Link'},
'content_type': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['contenttypes.ContentType']", 'null': 'True', 'blank': 'True'}),
'created': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now', 'null': 'True', 'blank': 'True'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'modified': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now', 'null': 'True', 'blank': 'True'}),
'object_id': ('django.db.models.fields.PositiveIntegerField', [], {}),
'object_uri': ('django.db.models.fields.URLField', [], {'max_length': '200', 'blank': 'True'}),
'predicate': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['coop_local.LinkProperty']"})
},
'coop_local.linkproperty': {
'Meta': {'object_name': 'LinkProperty'},
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'label': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
'uri': ('django.db.models.fields.URLField', [], {'max_length': '200', 'blank': 'True'})
},
'coop_local.navtree': {
'Meta': {'object_name': 'NavTree'},
'created': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now', 'null': 'True', 'blank': 'True'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'last_update': ('django.db.models.fields.DateTimeField', [], {'auto_now': 'True', 'blank': 'True'}),
'modified': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now', 'null': 'True', 'blank': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'default': "'default'", 'unique': 'True', 'max_length': '100', 'db_index': 'True'}),
'types': ('django.db.models.fields.related.ManyToManyField', [], {'to': "orm['coop_cms.NavType']", 'symmetrical': 'False', 'blank': 'True'}),
'uri': ('django.db.models.fields.CharField', [], {'max_length': '250', 'null': 'True', 'blank': 'True'}),
'uri_mode': ('django.db.models.fields.PositiveSmallIntegerField', [], {'default': '1'}),
'uuid': ('django.db.models.fields.CharField', [], {'default': "'dEnNE5h9RCwoxpDANraN4o'", 'max_length': '50', 'unique': 'True', 'null': 'True'})
},
'coop_local.network': {
'Meta': {'ordering': "['title']", 'object_name': 'Network', '_ormbases': ['coop_local.Organization']},
'organization_ptr': ('django.db.models.fields.related.OneToOneField', [], {'to': "orm['coop_local.Organization']", 'unique': 'True', 'primary_key': 'True'})
},
'coop_local.offer': {
'Meta': {'object_name': 'Offer'},
'activity': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['coop_local.ActivityNomenclature']"}),
'coverage': ('django.db.models.fields.CommaSeparatedIntegerField', [], {'max_length': '100', 'null': 'True', 'blank': 'True'}),
'description': ('django.db.models.fields.TextField', [], {'blank': 'True'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'practical_modalities': ('django.db.models.fields.TextField', [], {'blank': 'True'}),
'provider': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['coop_local.Organization']"}),
'targets': ('django.db.models.fields.related.ManyToManyField', [], {'symmetrical': 'False', 'to': "orm['coop_local.ClientTarget']", 'null': 'True', 'blank': 'True'}),
'technical_means': ('django.db.models.fields.TextField', [], {'blank': 'True'}),
'valuation': ('django.db.models.fields.TextField', [], {'blank': 'True'}),
'workforce': ('django.db.models.fields.IntegerField', [], {'null': 'True', 'blank': 'True'})
},
'coop_local.organization': {
'Meta': {'ordering': "['title']", 'object_name': 'Organization'},
'acronym': ('django.db.models.fields.CharField', [], {'max_length': '100', 'null': 'True', 'blank': 'True'}),
'active': ('django.db.models.fields.BooleanField', [], {'default': 'True'}),
'birth': ('django.db.models.fields.DateField', [], {'null': 'True', 'blank': 'True'}),
'category': ('django.db.models.fields.related.ManyToManyField', [], {'symmetrical': 'False', 'to': "orm['coop_local.OrganizationCategory']", 'null': 'True', 'blank': 'True'}),
'created': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now', 'null': 'True', 'blank': 'True'}),
'description': ('django.db.models.fields.TextField', [], {'null': 'True', 'blank': 'True'}),
'email': ('django.db.models.fields.EmailField', [], {'max_length': '75', 'null': 'True', 'blank': 'True'}),
'email_sha1': ('django.db.models.fields.CharField', [], {'max_length': '250', 'null': 'True', 'blank': 'True'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'logo': ('sorl.thumbnail.fields.ImageField', [], {'max_length': '100', 'null': 'True', 'blank': 'True'}),
'members': ('django.db.models.fields.related.ManyToManyField', [], {'to': "orm['coop_local.Person']", 'through': "orm['coop_local.Engagement']", 'symmetrical': 'False'}),
'modified': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now', 'null': 'True', 'blank': 'True'}),
'notes': ('django.db.models.fields.TextField', [], {'blank': 'True'}),
'pref_address': ('django.db.models.fields.related.ForeignKey', [], {'blank': 'True', 'related_name': "'pref_adress'", 'null': 'True', 'to': "orm['coop_geo.Location']"}),
'pref_email': ('django.db.models.fields.related.ForeignKey', [], {'blank': 'True', 'related_name': "'pref_email'", 'null': 'True', 'to': "orm['coop_local.Contact']"}),
'pref_label': ('django.db.models.fields.PositiveSmallIntegerField', [], {'default': '1'}),
'pref_phone': ('django.db.models.fields.related.ForeignKey', [], {'blank': 'True', 'related_name': "'pref_phone'", 'null': 'True', 'to': "orm['coop_local.Contact']"}),
'relations': ('django.db.models.fields.related.ManyToManyField', [], {'to': "orm['coop_local.Organization']", 'through': "orm['coop_local.Relation']", 'symmetrical': 'False'}),
'slug': ('django_extensions.db.fields.AutoSlugField', [], {'allow_duplicates': 'False', 'max_length': '50', 'separator': "u'-'", 'blank': 'True', 'populate_from': "'title'", 'overwrite': 'True'}),
'subtitle': ('django.db.models.fields.CharField', [], {'max_length': '250', 'null': 'True', 'blank': 'True'}),
'title': ('django.db.models.fields.CharField', [], {'max_length': '250'}),
'uri': ('django.db.models.fields.CharField', [], {'max_length': '250', 'null': 'True', 'blank': 'True'}),
'uri_mode': ('django.db.models.fields.PositiveSmallIntegerField', [], {'default': '1'}),
'uuid': ('django.db.models.fields.CharField', [], {'default': "'nSgNXC8m9dWyM2GWEQeboU'", 'max_length': '50', 'unique': 'True', 'null': 'True'}),
'web': ('django.db.models.fields.URLField', [], {'max_length': '200', 'null': 'True', 'blank': 'True'})
},
'coop_local.organizationcategory': {
'Meta': {'object_name': 'OrganizationCategory'},
'description': ('django.db.models.fields.TextField', [], {'blank': 'True'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'label': ('django.db.models.fields.CharField', [], {'max_length': '100', 'blank': 'True'}),
'slug': ('django_extensions.db.fields.AutoSlugField', [], {'allow_duplicates': 'False', 'max_length': '50', 'separator': "u'-'", 'blank': 'True', 'populate_from': "'label'", 'overwrite': 'True'})
},
'coop_local.person': {
'Meta': {'object_name': 'Person'},
'category': ('django.db.models.fields.related.ManyToManyField', [], {'symmetrical': 'False', 'to': "orm['coop_local.PersonCategory']", 'null': 'True', 'blank': 'True'}),
'created': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now', 'null': 'True', 'blank': 'True'}),
'email': ('django.db.models.fields.EmailField', [], {'max_length': '75', 'blank': 'True'}),
'email_sha1': ('django.db.models.fields.CharField', [], {'max_length': '250', 'null': 'True', 'blank': 'True'}),
'first_name': ('django.db.models.fields.CharField', [], {'max_length': '100', 'null': 'True', 'blank': 'True'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'last_name': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
'location': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['coop_geo.Location']", 'null': 'True', 'blank': 'True'}),
'location_display': ('django.db.models.fields.PositiveSmallIntegerField', [], {'default': '2'}),
'modified': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now', 'null': 'True', 'blank': 'True'}),
'notes': ('django.db.models.fields.TextField', [], {'null': 'True', 'blank': 'True'}),
'remote_organization_label': ('django.db.models.fields.CharField', [], {'max_length': '250', 'null': 'True', 'blank': 'True'}),
'remote_organization_uri': ('django.db.models.fields.CharField', [], {'max_length': '255', 'blank': 'True'}),
'structure': ('django.db.models.fields.CharField', [], {'max_length': '100', 'blank': 'True'}),
'uri': ('django.db.models.fields.CharField', [], {'max_length': '250', 'null': 'True', 'blank': 'True'}),
'uri_mode': ('django.db.models.fields.PositiveSmallIntegerField', [], {'default': '1'}),
'user': ('django.db.models.fields.related.OneToOneField', [], {'to': "orm['auth.User']", 'unique': 'True', 'null': 'True', 'blank': 'True'}),
'username': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '100', 'blank': 'True'}),
'uuid': ('django.db.models.fields.CharField', [], {'default': "'g4uvSE7yeBngafL2VxzgjK'", 'max_length': '50', 'unique': 'True', 'null': 'True'})
},
'coop_local.personcategory': {
'Meta': {'object_name': 'PersonCategory'},
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'label': ('django.db.models.fields.CharField', [], {'max_length': '100', 'blank': 'True'}),
'slug': ('django_extensions.db.fields.AutoSlugField', [], {'allow_duplicates': 'False', 'max_length': '50', 'separator': "u'-'", 'blank': 'True', 'populate_from': "'label'", 'overwrite': 'True'})
},
'coop_local.product': {
'Meta': {'ordering': "['-modified']", 'object_name': 'Product'},
'created': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now', 'null': 'True', 'blank': 'True'}),
'description': ('django.db.models.fields.TextField', [], {'blank': 'True'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'modified': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now', 'null': 'True', 'blank': 'True'}),
'organization': ('django.db.models.fields.related.ForeignKey', [], {'blank': 'True', 'related_name': "'products'", 'null': 'True', 'to': "orm['coop_local.Organization']"}),
'remote_organization_label': ('django.db.models.fields.CharField', [], {'max_length': '250', 'blank': 'True'}),
'remote_organization_uri': ('django.db.models.fields.CharField', [], {'max_length': '200', 'blank': 'True'}),
'slug': ('django_extensions.db.fields.AutoSlugField', [], {'allow_duplicates': 'False', 'max_length': '50', 'separator': "u'-'", 'blank': 'True', 'populate_from': "'title'", 'overwrite': 'True'}),
'title': ('django.db.models.fields.CharField', [], {'max_length': '250', 'blank': 'True'}),
'uri': ('django.db.models.fields.CharField', [], {'max_length': '250', 'null': 'True', 'blank': 'True'}),
'uri_mode': ('django.db.models.fields.PositiveSmallIntegerField', [], {'default': '1'}),
'uuid': ('django.db.models.fields.CharField', [], {'default': "'RcqAhbVFvBtKaevL83FEsf'", 'max_length': '50', 'unique': 'True', 'null': 'True'})
},
'coop_local.provider': {
'Meta': {'ordering': "['title']", 'object_name': 'Provider', '_ormbases': ['coop_local.Organization']},
'added_value': ('django.db.models.fields.TextField', [], {'blank': 'True'}),
'agreement_iae': ('django.db.models.fields.related.ManyToManyField', [], {'symmetrical': 'False', 'to': "orm['coop_local.AgreementIAE']", 'null': 'True', 'blank': 'True'}),
'annual_integration_number': ('django.db.models.fields.IntegerField', [], {'null': 'True', 'blank': 'True'}),
'annual_revenue': ('django.db.models.fields.IntegerField', [], {'null': 'True', 'blank': 'True'}),
'author': ('django.db.models.fields.CharField', [], {'max_length': '100', 'blank': 'True'}),
'bdis_id': ('django.db.models.fields.IntegerField', [], {'null': 'True', 'blank': 'True'}),
'brief_description': ('django.db.models.fields.TextField', [], {'blank': 'True'}),
'category_iae': ('django.db.models.fields.related.ManyToManyField', [], {'symmetrical': 'False', 'to': "orm['coop_local.CategoryIAE']", 'null': 'True', 'blank': 'True'}),
'correspondence': ('django.db.models.fields.TextField', [], {'blank': 'True'}),
'creation': ('django.db.models.fields.DateField', [], {'auto_now_add': 'True', 'blank': 'True'}),
'guaranties': ('django.db.models.fields.related.ManyToManyField', [], {'symmetrical': 'False', 'to': "orm['coop_local.Guaranty']", 'null': 'True', 'blank': 'True'}),
'integration_workforce': ('django.db.models.fields.IntegerField', [], {'null': 'True', 'blank': 'True'}),
'legal_status': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['coop_local.LegalStatus']", 'null': 'True', 'blank': 'True'}),
'modification': ('django.db.models.fields.DateField', [], {'auto_now': 'True', 'blank': 'True'}),
'organization_ptr': ('django.db.models.fields.related.OneToOneField', [], {'to': "orm['coop_local.Organization']", 'unique': 'True', 'primary_key': 'True'}),
'production_workforce': ('django.db.models.fields.IntegerField', [], {'null': 'True', 'blank': 'True'}),
'siret': ('django.db.models.fields.CharField', [], {'max_length': '14', 'blank': 'True'}),
'status': ('django.db.models.fields.CharField', [], {'max_length': '1', 'blank': 'True'}),
'supervision_workforce': ('django.db.models.fields.IntegerField', [], {'null': 'True', 'blank': 'True'}),
'transmission': ('django.db.models.fields.IntegerField', [], {'null': 'True', 'blank': 'True'}),
'transmission_date': ('django.db.models.fields.DateField', [], {'null': 'True', 'blank': 'True'}),
'transverse_themes': ('django.db.models.fields.related.ManyToManyField', [], {'symmetrical': 'False', 'to': "orm['coop_local.TransverseTheme']", 'null': 'True', 'blank': 'True'}),
'validation': ('django.db.models.fields.DateField', [], {'null': 'True', 'blank': 'True'}),
'workforce': ('django.db.models.fields.IntegerField', [], {'null': 'True', 'blank': 'True'})
},
'coop_local.reference': {
'Meta': {'object_name': 'Reference'},
'client_name': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
'from_year': ('django.db.models.fields.IntegerField', [], {'null': 'True', 'blank': 'True'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'organization': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['coop_local.Organization']"}),
'services': ('django.db.models.fields.TextField', [], {'blank': 'True'}),
'to_year': ('django.db.models.fields.IntegerField', [], {'null': 'True', 'blank': 'True'})
},
'coop_local.relation': {
'Meta': {'object_name': 'Relation'},
'created': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now', 'null': 'True', 'blank': 'True'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'modified': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now', 'null': 'True', 'blank': 'True'}),
'reltype': ('django.db.models.fields.PositiveSmallIntegerField', [], {}),
'source': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'source'", 'to': "orm['coop_local.Organization']"}),
'target': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'target'", 'to': "orm['coop_local.Organization']"})
},
'coop_local.role': {
'Meta': {'object_name': 'Role'},
'category': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['coop_local.RoleCategory']", 'null': 'True', 'blank': 'True'}),
'created': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now', 'null': 'True', 'blank': 'True'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'label': ('django.db.models.fields.CharField', [], {'max_length': '120'}),
'modified': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now', 'null': 'True', 'blank': 'True'}),
'slug': ('autoslug.fields.AutoSlugField', [], {'unique': 'True', 'max_length': '50', 'populate_from': "'label'", 'unique_with': '()'}),
'uri': ('django.db.models.fields.CharField', [], {'max_length': '250', 'null': 'True', 'blank': 'True'}),
'uri_mode': ('django.db.models.fields.PositiveSmallIntegerField', [], {'default': '1'}),
'uuid': ('django.db.models.fields.CharField', [], {'default': "'tyeggzi65vZf2nTDrxSZCY'", 'max_length': '50', 'unique': 'True', 'null': 'True'})
},
'coop_local.rolecategory': {
'Meta': {'ordering': "['label']", 'object_name': 'RoleCategory'},
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'label': ('django.db.models.fields.CharField', [], {'max_length': '60'}),
'slug': ('django_extensions.db.fields.AutoSlugField', [], {'allow_duplicates': 'False', 'max_length': '50', 'separator': "u'-'", 'blank': 'True', 'populate_from': "'label'", 'overwrite': 'True'}),
'uri': ('django.db.models.fields.CharField', [], {'max_length': '250', 'blank': 'True'})
},
'coop_local.siteprefs': {
'Meta': {'object_name': 'SitePrefs'},
'main_organization': ('django.db.models.fields.related.ForeignKey', [], {'blank': 'True', 'related_name': "'main_org'", 'null': 'True', 'to': "orm['coop_local.Organization']"}),
'preferences_ptr': ('django.db.models.fields.related.OneToOneField', [], {'to': "orm['preferences.Preferences']", 'unique': 'True', 'primary_key': 'True'})
},
'coop_local.tag': {
'Meta': {'object_name': 'Tag'},
'concept_uri': ('django.db.models.fields.CharField', [], {'max_length': '250', 'blank': 'True'}),
'created': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now', 'null': 'True', 'blank': 'True'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'language': ('django.db.models.fields.CharField', [], {'default': "'fr'", 'max_length': '10'}),
'modified': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now', 'null': 'True', 'blank': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
'person': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['coop_local.Person']", 'null': 'True', 'blank': 'True'}),
'person_uri': ('django.db.models.fields.CharField', [], {'max_length': '250', 'blank': 'True'}),
'slug': ('django.db.models.fields.SlugField', [], {'unique': 'True', 'max_length': '100'}),
'uri': ('django.db.models.fields.CharField', [], {'max_length': '250', 'null': 'True', 'blank': 'True'}),
'uri_mode': ('django.db.models.fields.PositiveSmallIntegerField', [], {'default': '1'}),
'uuid': ('django.db.models.fields.CharField', [], {'default': "'jGZRKqfGKrX8btmCVL7mDA'", 'max_length': '50', 'unique': 'True', 'null': 'True'})
},
'coop_local.taggeditem': {
'Meta': {'object_name': 'TaggedItem'},
'content_type': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'coop_local_taggeditem_taggeditem_items'", 'to': "orm['contenttypes.ContentType']"}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'object_id': ('django.db.models.fields.IntegerField', [], {'db_index': 'True'}),
'tag': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'coop_local_taggeditem_items'", 'to': "orm['coop_local.Tag']"})
},
'coop_local.transversetheme': {
'Meta': {'object_name': 'TransverseTheme'},
'description': ('django.db.models.fields.TextField', [], {'blank': 'True'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '100', 'blank': 'True'})
},
'preferences.preferences': {
'Meta': {'object_name': 'Preferences'},
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'sites': ('django.db.models.fields.related.ManyToManyField', [], {'symmetrical': 'False', 'to': "orm['sites.Site']", 'null': 'True', 'blank': 'True'})
},
'sites.site': {
'Meta': {'ordering': "('domain',)", 'object_name': 'Site', 'db_table': "'django_site'"},
'domain': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '50'})
}
}
complete_apps = ['coop_local'] | [
"[email protected]"
]
| |
bfc0ff00fcc96a5888f1b2574bfc1dd1c21a421b | 0bed82cb6c83b0e0e672c424077eb610eca19f40 | /dasy/__main__.py | 2b87fe8dafb0651676d1a322c3cd30c53eef00af | []
| no_license | Chitrank-Dixit/dasy | 896af55736b2fb8c84a889f1949fa28f48ee632a | 09d2b9b8819928827de5ce7ca6b130436f5f35d9 | refs/heads/master | 2023-04-05T14:41:32.057940 | 2021-04-11T17:12:37 | 2021-04-11T17:12:37 | 356,764,855 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 102 | py | #!/usr/bin/env python3
from .dasy import checkCommand
if __name__ == "__main__":
checkCommand()
| [
"[email protected]"
]
| |
0c96381cfe7098294ceee4776eec3458bc497a29 | ede96590eee4880ff83d1f1d8db5229e92c6e919 | /leasing/migrations/0042_contract_sign_fields.py | 84877db9dd92cea1360228eff5704d0bf76a4a3d | [
"MIT"
]
| permissive | igordavydsson/mvj | a4c5b39e7be9f95e15a2e906ad61b98611998063 | b467c6229f9d458d56b66f628b0841adb67a2970 | refs/heads/master | 2020-04-22T20:42:06.650182 | 2019-02-12T13:50:57 | 2019-02-12T13:50:57 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 1,033 | py | # Generated by Django 2.1.5 on 2019-02-12 07:13
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('leasing', '0041_add_collateral'),
]
operations = [
migrations.AddField(
model_name='contract',
name='first_call_sent',
field=models.DateField(blank=True, null=True, verbose_name='First call sent'),
),
migrations.AddField(
model_name='contract',
name='second_call_sent',
field=models.DateField(blank=True, null=True, verbose_name='Second call sent'),
),
migrations.AddField(
model_name='contract',
name='sign_by_date',
field=models.DateField(blank=True, null=True, verbose_name='Sign by date'),
),
migrations.AddField(
model_name='contract',
name='third_call_sent',
field=models.DateField(blank=True, null=True, verbose_name='Third call sent'),
),
]
| [
"[email protected]"
]
| |
e29f5a011a08a9ecc107258a5a197159ce95d9d0 | 290394852b7fb70f791c6c4bb96141523ab96090 | /ExpertIdeas_WikipediaProxyServer_Bot_EmailTracking/ExpertIdeas/core/scripts/i18n/redirect.py | e28933b2cd487018c6ff2af043c5dbc1ae0f37cd | [
"MIT"
]
| permissive | ImanYZ/ExpertIdeas | da1564671f2cfe92d9de3fce68b82552cc6f33f7 | 23e23240854aef59108b16b63a567fffb2aabb69 | refs/heads/master | 2022-07-27T08:11:36.481824 | 2022-07-16T20:32:47 | 2022-07-16T20:32:47 | 144,018,052 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 62,009 | py | # -*- coding: utf-8 -*-
msg = {
# Author: Xqt
'en': {
'redirect-broken-redirect-template': u'{{db-r1}}',
'redirect-fix-broken-moved': u'Bot: Fixing broken redirect to moved target page %(to)s',
'redirect-fix-double': u'Bot: Fixing double redirect to %(to)s',
'redirect-fix-loop': u'Bot: Fixing redirect loop to %(to)s',
'redirect-remove-broken': u'Bot: Redirect to a deleted or non-existent page',
'redirect-remove-loop': u'Bot: Redirect target forms a redirect loop',
},
# Author: Csisc
# Author: Lloffiwr
# Author: Nemo bis
# Author: Xqt
# Author: ZxxZxxZ
'qqq': {
'redirect-fix-double': u'Edit summary when the bot fixes double redirects. <code>%(to)s</code> displays the new redirect target as a wiki link.',
'redirect-remove-broken': u'Edit summary when the bot tags a deleted or non-existent page for speedy deletion.',
'redirect-fix-broken-moved': u'Edit summary when the bot fixes a broken redirect to a moved page whose origin has been deleted.\nParameters:\n* <code>%(to)s</code>: the new redirect target, as a wiki link.',
'redirect-fix-loop': u'Edit summary when the bot fixes redirect loops. <code>%(to)s</code> displays the new redirect target as a wiki link.',
'redirect-remove-loop': u'Edit summary when the bot tags a redirect loop for speedy deletion.',
'redirect-broken-redirect-template': u'{{doc-important|Only use your deletion template like <code><nowiki>{{delete}}</nowiki></code> which exist on your local project.}}\n\nNOTE TO TRANSLATOR: This should only be translated by someone on the Wikipedia of your language code. Thank you.\n\nTemplate for speedy deletion of broken redirect or redirect loops which the bot tags onto the redirect page. This message may contain additional informations like template parameters or reasons for the deletion request.\n\nNOTE: If this system message is not given for a language code, speedy deletion request by a bot is not supported on your site except there is a bot with sysop flag.',
},
# Author: Csisc
'aeb': {
'redirect-fix-double': u'بوت: تصليح تحويلة مزدوجة إلى %(to)s',
'redirect-remove-broken': u'تحويلة إلى صفحة محذوفة أو غير موجودة',
'redirect-fix-loop': u'روبوت: تعديل حلقة إعادة التوجيه إلى %(to)s',
'redirect-remove-loop': u'هدف التحويلة يصنع عقدة تحويل: Robot',
},
# Author: Naudefj
# Author: Xqt
'af': {
'redirect-fix-double': u'Robot: dubbele aanstuur na %(to)s reggemaak',
'redirect-remove-broken': u'Robot: Aanstuur na \'n geskrapte of nie-bestaande bladsy',
'redirect-fix-loop': u'Robot: sirkulêre aanstuur na %(to)s reggemaak',
'redirect-remove-loop': u'Robot: Aanstuur vorm \'n sirkulêre lus',
},
# Author: Als-Holder
# Author: Xqt
'als': {
'redirect-fix-double': u'Bötli: Uflösig vun de doppleti Wyterleitig zue %(to)s',
'redirect-remove-broken': u'Bötli: Wyterleitigsziil git s nit',
'redirect-fix-broken-moved': u'Bot: Kaputtigi Wyterleitig uf di verschobe Ziilsyte %(to)s repariert',
'redirect-fix-loop': u' Bot: Wyterleitigschlupf uf %(to)s korrigiert',
'redirect-remove-loop': u'Bot: Wyterleitig goht im ringrum',
'redirect-broken-redirect-template': u'{{delete}}Wyterleitig wo kaputt isch',
},
# Author: DRIHEM
# Author: Meno25
# Author: Zaher kadour
'ar': {
'redirect-fix-double': u'بوت: تصليح تحويلة مزدوجة إلى %(to)s',
'redirect-remove-broken': u'روبوت: تحويلة إلى صفحة محذوفة أو غير موجودة',
'redirect-fix-broken-moved': u'الروبوت: إصلاح إعادة التوجيه المعطل لصفحة الهدف المحركة %(to)s',
'redirect-fix-loop': u'روبوت: تعديل حلقة إعادة التوجيه إلى %(to)s',
'redirect-remove-loop': u'روبوت: هدف التحويلة يصنع عقدة تحويل',
'redirect-broken-redirect-template': u'{{شطب|تحويلة مكسورة}}',
},
# Author: Jaminianurag
'as': {
'redirect-broken-redirect-template': u'{{db-r1}}',
},
# Author: Esbardu
# Author: Xqt
# Author: Xuacu
'ast': {
'redirect-fix-double': u'Robó: Iguando la redireición doble a %(to)s',
'redirect-remove-broken': u'Robó: Redireición a una páxina desaniciada o que nun esiste',
'redirect-fix-broken-moved': u'Robó: Arreglu de redireición frañada a la páxina de destín treslladada "%(to)s"',
'redirect-fix-loop': u'Robó: Iguando la redireición cíclica a %(to)s',
'redirect-remove-loop': u'Robó: El destín de la redireición forma un bucle de redireición',
'redirect-broken-redirect-template': u'{{Destruir|La páxina a la que redirixe nun esiste|--~~~~}}',
},
# Author: AZISS
# Author: Cekli829
# Author: Ebrahimi-amir
# Author: Khutuck
'az': {
'redirect-fix-double': u'Bot: İkiqat yönləndirmənin düzəldilməsi → %(to)s',
'redirect-remove-broken': u'Bot: Silinmiş və ya mövcud olmayan səhifəyə yönləndirmə',
'redirect-fix-broken-moved': u'Bot: İşləməyən yönləndirilmənin yeri dəyişdirilmiş hədəf səhifəyə %(to)s düzəldilməsi',
'redirect-fix-loop': u'Bot: Sonsuz yönləndirilmənin %(to)s düzəldilməsi',
'redirect-remove-loop': u'Bot: Yönləndirilmə sonsuz yönləndirilmə formalaşdırır',
},
# Author: Amir a57
# Author: E THP
# Author: Xqt
'azb': {
'redirect-fix-double': u'روبات :%(to)s صحیفهسینه ایستیقامتلی ایکیقات ایستیقامتلندیرمه دوزلدیلیر',
'redirect-remove-broken': u'روبات : سیلینئن یا دا وار اولمایان صحیفهیه اولان ایستیقامیلندیرمه',
'redirect-fix-loop': u'روبوت: فیخینگ اوزوک اولان%(to)s یؤنلندیرن',
'redirect-remove-loop': u'بوت: ایستیقامتلندیرمه هدفی بیر ایستیقامتلندیرمه دؤورو تشکیل ائدیر',
},
# Author: Haqmar
# Author: Sagan
'ba': {
'redirect-fix-double': u'Робот: %(to)s битенә икеле йүнәлтеүҙе төҙәтеү',
'redirect-remove-broken': u'Робот: булмаған йәки юйылған биткә йүнәлтеү',
'redirect-fix-loop': u'Робот: %(to)s битенә йүнәлтеүҙе төҙәтеү',
'redirect-remove-loop': u'Робот: бер ҡайҙа ла йүнәлтелмәгән',
},
# Author: Mucalexx
# Author: Xqt
'bar': {
'redirect-fix-double': u'Bot: richtt dóppéde Weiderloattung auf %(to)s',
'redirect-remove-broken': u'Bot: Weiderloattungszü gibts néd',
'redirect-remove-loop': u'Bot: Weiderloattungszü auf sé söwer',
'redirect-broken-redirect-template': u'{{Löschen|hinige Weiderloattung}}',
},
'bat-smg': {
'redirect-fix-double': u'Robots: Taisuoms dvėgobs paradresavėms → %(to)s',
},
# Author: EugeneZelenko
# Author: Jim-by
# Author: Renessaince
# Author: Zedlik
'be-x-old': {
'redirect-fix-double': u'Робат: выпраўленьне падвойнага перанакіраваньня → %(to)s',
'redirect-remove-broken': u'Робат: мэта перанакіраваньня не існуе',
'redirect-fix-broken-moved': u'Робат: Выпраўленьне перанакіраваньня на старонку, перанесеную ў %(to)s',
'redirect-fix-loop': u'Робат: Выпраўленьне перанакіраваньня на %(to)s',
'redirect-remove-loop': u'Робат: Пятля перанакіраваньняў',
'redirect-broken-redirect-template': u'{{Выдаліць|некарэктнае перанакіраваньне}}',
},
# Author: DCLXVI
'bg': {
'redirect-fix-double': u'Робот: Поправяне на двойно пренасочване към %(to)s',
},
# Author: Riemogerz
# Author: Xqt
'bjn': {
'redirect-fix-double': u'Robot: Pamasangan paugahan ganda ka %(to)s',
'redirect-remove-broken': u'Robot: Paalihan ka tungkaran nang dihapus atawa kada ada',
'redirect-remove-loop': u'Robot: Bidikan paalihan mahasilakan paalihan siklik',
},
# Author: Wikitanvir
'bn': {
'redirect-fix-double': u'বট: %(to)s-এ দ্বিপুনর্নির্দেশনা ঠিক করছে',
'redirect-broken-redirect-template': u'{{db-r1}}',
},
# Author: Fohanno
# Author: Fulup
# Author: Gwenn-Ael
# Author: Y-M D
'br': {
'redirect-fix-double': u'Kempennet adkas doubl gant robot → %(to)s',
'redirect-remove-broken': u'Robot : Ar bajenn ma vezer adkaset n\'eus ket anezhi',
'redirect-fix-broken-moved': u'Robot : O reizhañ an adkasoù torret war-zu ar bajenn bal %(to)s',
'redirect-fix-loop': u'Robot : O kempenn al lagadenn adkas war-zu %(to)s',
'redirect-remove-loop': u'Robot: Stumm ur c\'helc\'h-tro born zo gant an [[Wikipedia:Redirect|adkas]]',
},
# Author: CERminator
# Author: Edinwiki
# Author: Xqt
'bs': {
'redirect-fix-double': u'Bot: Popravlja dvostruka preusmjerenja na %(to)s',
'redirect-remove-broken': u' [[WP:CSD#G8|G8]]: [[Wikipedia:Redirect|Preusmjerenje]] na obrisanu ili nepostojeću stranicu',
'redirect-fix-broken-moved': u'Bot: Neispravno preusmjerenje prema premještenoj stranici %(to)s',
'redirect-fix-loop': u'Robot: Popravlja petlje preusmjerenja na %(to)s',
'redirect-remove-loop': u' [[WP:CSD#G8|G8]]: [[Wikipedia:Redirect|Preusmjerenje]] pravi petlju na samo sebe',
'redirect-broken-redirect-template': u'{{Brisanje}}',
},
# Author: BroOk
# Author: Grondin
# Author: SMP
# Author: Vriullop
'ca': {
'redirect-fix-double': u'Bot: reparació de les redireccions dobles a %(to)s',
'redirect-remove-broken': u'Bot: la pàgina a la qual redirecciona no existeix',
'redirect-fix-broken-moved': u'Bot: Reparació de les redireccions trencades per moure-les a %(to)s',
'redirect-fix-loop': u'Bot: arreglant redirecció en bucle per %(to)s',
'redirect-remove-loop': u'Robot: el destí de la redirecció crea un bucle de redirecció',
'redirect-broken-redirect-template': u'{{db-r1}}',
},
# Author: Xqt
# Author: Умар
'ce': {
'redirect-fix-double': u'бот: шалха дӀасахьажинарг нисъяр → %(to)s',
},
# Author: Asoxor
# Author: Calak
# Author: Marmzok
'ckb': {
'redirect-fix-double': u'ڕۆبۆت: چاکسازیی دووجار-ڕەوانەکردنەوە بۆ %(to)s',
'redirect-remove-broken': u'ڕۆبۆت: ڕەوانەکەر بۆ پەڕەیەکی سڕاوە یان پەڕەیەک کە بوونی نییە',
},
# Author: Dontlietome7
# Author: JAn Dudík
# Author: Jezevec
# Author: Spiffyk
# Author: Vks
'cs': {
'redirect-fix-double': u'Robot: Opravuji dvojité přesměrování na %(to)s',
'redirect-remove-broken': u'Robot: Přerušené přesměrování',
'redirect-fix-broken-moved': u'Robot: Oprava přerušeného přesměrování na přesunutou cílovou stránku %(to)s',
'redirect-fix-loop': u'Robot: Oprava smyčky přesměrování na %(to)s',
'redirect-remove-loop': u'Robot: Cíl přesměrování tvoří smyčku',
'redirect-broken-redirect-template': u'{{smazat|přerušené přesměrování}}',
},
# Author: Lloffiwr
# Author: Xqt
# Author: Xxglennxx
'cy': {
'redirect-fix-double': u'Bot: Yn trwsio ailgyfeiriad dwbl i %(to)s',
'redirect-remove-broken': u'Bot: Yn ailgyfeirio i dudalen a ddilëwyd neu nad yw ar gael',
'redirect-remove-loop': u'Bot: Mae nod yr ailgyfeiriad yn ffurfio dolen ailgyfeirio',
},
# Author: Christian List
# Author: Kaare
# Author: Sarrus
'da': {
'redirect-fix-double': u'Robot: Retter dobbelt omdirigering til %(to)s',
'redirect-remove-broken': u'Robot: Omdirigering til en slettet eller ikke-eksisterende side',
'redirect-fix-broken-moved': u'Robot: Retter brudt omdirigering til flyttet målside %(to)s',
'redirect-fix-loop': u'Robot: Retter omdirigeringsløkke til %(to)s',
'redirect-remove-loop': u'Robot: Målet for omdirigeringen danner en omdirigeringsløkke',
'redirect-broken-redirect-template': u'{{slet}}',
},
# Author: Geitost
# Author: Metalhead64
# Author: The Evil IP address
'de': {
'redirect-fix-double': u'Bot: Korrigiere doppelte Weiterleitung auf %(to)s',
'redirect-remove-broken': u'Bot: Weiterleitungsziel existiert nicht',
'redirect-fix-broken-moved': u'Bot: Korrigiere defekte Weiterleitung auf Verschiebeziel %(to)s',
'redirect-fix-loop': u'Bot: Korrigiere Weiterleitungschleife auf %(to)s',
'redirect-remove-loop': u'Bot: Weiterleitungsziel auf sich selbst',
'redirect-broken-redirect-template': u'{{Löschen|Defekte Weiterleitung}}',
},
# Author: Eruedin
'de-ch': {
'redirect-fix-double': u'Bot: Korrigiere doppelte Weiterleitung auf %(to)s',
'redirect-remove-broken': u'Bot: Weiterleitungsziel existiert nicht',
'redirect-fix-broken-moved': u'Bot: Korrigiere defekte Weiterleitung auf verschobener Zielseite %(to)s',
'redirect-fix-loop': u'Bot: Korrigiere Weiterleitungschleife auf %(to)s',
'redirect-remove-loop': u'Bot: Weiterleitungsziel auf sich selbst',
},
# Author: Erdemaslancan
# Author: Gorizon
# Author: Mirzali
'diq': {
'redirect-fix-double': u'Boti Tespitê hetanayışê dıleti heta %(to)s',
'redirect-remove-broken': u'Robot: Yew pela ke esteriya ya zi çıniya, aye ser çarne',
'redirect-fix-broken-moved': u'Bot: Hetenayışe xırabi hedefe pela %(to)s vurneno',
'redirect-fix-loop': u'Boti %(to)s rê hetanayışo dılet deke',
'redirect-remove-loop': u'[[WP:CSD#G8|G8]]: [[Wikipedia:Redirect|Hetenayış]] re formê etiketi vıraşt.',
'redirect-broken-redirect-template': u'{{bestere|m1}}',
},
# Author: Derbeth
'dsb': {
'redirect-broken-redirect-template': u'{{lašowaś|defektne dalejpósrědnjenje}}',
},
# Author: Evropi
# Author: Geraki
# Author: Glavkos
'el': {
'redirect-fix-double': u'Ρομπότ: Διόρθωση διπλής ανακατεύθυνσης προς %(to)s',
'redirect-remove-broken': u'Ρομπότ: Ανακατεύθυνση σε μια σελίδα διαγεγραμμένη ή ανύπαρκτη',
'redirect-fix-broken-moved': u'Ρομπότ: Διόρθωση σπασμένης ανακατεύθυνσης προς την μετακινημένη σελίδα στόχο %(to)s',
'redirect-fix-loop': u'Ρομπότ: Διόρθωση βρόχου ανακατεύθυνσης στο %(to)s',
'redirect-remove-loop': u'Robot: Ανακατεύθυνση στόχου σχηματίζει έναν βρόγχο ανακατεύθυνσης',
},
# Author: Blahma
# Author: Mihxil
# Author: Objectivesea
# Author: Xqt
'eo': {
'redirect-fix-double': u'Roboto: Riparas duoblan alidirekton al %(to)s',
'redirect-remove-broken': u'Roboto: Alidirekto indikas forigitan aŭ neekzistantan paĝon',
'redirect-fix-broken-moved': u'Roboto: Riparas rompitan alidirekton al movita celpaĝo %(to)s',
'redirect-fix-loop': u'Roboto: Riparas ciklan alidirektilon al %(to)s',
'redirect-remove-loop': u'Roboto: Alidirekta celas sin mem',
'redirect-broken-redirect-template': u'{{db-r1}}',
},
# Author: Armando-Martin
# Author: Dferg
# Author: Invadinado
# Author: Vivaelcelta
# Author: Xqt
'es': {
'redirect-fix-double': u'Bot: Arreglando doble redirección → %(to)s',
'redirect-remove-broken': u'Bot: Redirige a una página borrada o que no existe',
'redirect-fix-broken-moved': u'Bot: arreglo la redirección rota hacia la página de destino trasladada "%(to)s"',
'redirect-fix-loop': u'Bot: Arreglando redirección infinita → %(to)s',
'redirect-remove-loop': u'[[WP:CSD#G8|G8]]: El destino de la [[Wikipedia:Redirect|redirección]] crea un bucle de redirección',
'redirect-broken-redirect-template': u'{{destruir|1=La página a la que redirige no existe|bot=~~~~}}',
},
# Author: Pikne
'et': {
'redirect-fix-double': u'Robot: parandatud kahekordne ümbersuunamine leheküljele %(to)s',
},
# Author: An13sa
# Author: Xabier Armendaritz
'eu': {
'redirect-fix-double': u'Robota: «%(to)s» orriranzko birbideratze bikoitza konpontzea',
},
# Author: Ebraminio
# Author: Huji
# Author: Mjbmr
# Author: Reza1615
# Author: ZxxZxxZ
# Author: جواد
'fa': {
'redirect-fix-double': u'ربات: اصلاح تغییرمسیر دوتایی به %(to)s',
'redirect-remove-broken': u'ربات: تغییرمسیر به صفحهٔ ناموجود یا حذفشده',
'redirect-fix-broken-moved': u'ربات:اصلاح تغییرمسیرهای خراب به صفحهٔ هدف %(to)s',
'redirect-fix-loop': u'ربات: رفع حلقه در تغییرمسیر به %(to)s',
'redirect-remove-loop': u'ربات: مقصد تغییرمسیر یک تغییرمسیر حلقهای ایجاد میکند',
'redirect-broken-redirect-template': u'{{حذف سریع|بن بست|bot=yes}}',
},
# Author: Crt
# Author: Nedergard
# Author: Nike
# Author: Olli
# Author: Silvonen
'fi': {
'redirect-fix-double': u'Botti korjasi kaksinkertaisen ohjauksen sivulle %(to)s',
'redirect-remove-broken': u'Botti: Ohjaus poistetulle tai olemattomalle sivulle',
'redirect-fix-broken-moved': u'Botti korjasi rikkinäisen ohjauksen siirrettyyn kohdesivuun %(to)s',
'redirect-fix-loop': u'Botti korjasi ohjaussilmukan sivulle %(to)s',
'redirect-remove-loop': u'Botti: Ohjauksen kohde muodostaa ohjaussilmukan',
'redirect-broken-redirect-template': u'{{Delete}}',
},
# Author: EileenSanda
'fo': {
'redirect-fix-double': u'Bottur: Rættar dupulta umdirigering til %(to)s',
'redirect-remove-broken': u'Bottur: Umstjórnan til eina strikaða síðu ella til eina síðu sum ikki er til',
'redirect-fix-broken-moved': u'Bottur: Rættar brotnar umdirigeringar til flutta málsíðu %(to)s',
'redirect-fix-loop': u'Bottur: Rættar endurstýris (redirect) lykkju til %(to)s',
},
# Author: Boniface
# Author: Crochet.david
# Author: Gomoko
# Author: IAlex
# Author: Od1n
# Author: Xqt
'fr': {
'redirect-fix-double': u'Robot : répare une double redirection vers %(to)s',
'redirect-remove-broken': u'[[WP:CSD#G8|G8]]: [[Wikipedia:Redirect|Redirection]] vers une page supprimée ou inexistante',
'redirect-fix-broken-moved': u'Robot: Correction des redirections erronées vers une page cible %(to)s déplacée',
'redirect-fix-loop': u'Robot : répare une boucle de redirection sur %(to)s',
'redirect-remove-loop': u'Bot: la cible de la redirection forme une boucle de redirection',
'redirect-broken-redirect-template': u'{{db-r1}}',
},
# Author: ChrisPtDe
# Author: Xqt
'frp': {
'redirect-fix-double': u'Robot : rèpâre na redirèccion dobla de vers %(to)s',
'redirect-remove-broken': u'Robot : redirèccion de vers na pâge suprimâye ou ben pas ègzistenta',
'redirect-fix-loop': u'Robot : rèpâre na boclla de redirèccion a %(to)s',
'redirect-remove-loop': u'Robot : la ciba de la redirèccion fôrme na boclla de redirèccion',
},
# Author: Murma174
'frr': {
'redirect-fix-double': u'Bot: Ferbeedre dobelt widjerfeerang tu %(to)s',
'redirect-remove-broken': u'Bot: Widjerfeerang tu en duad sidj.',
'redirect-fix-broken-moved': u'Bot: Uunstaken widjerfeerang feert nü tu %(to)s',
'redirect-fix-loop': u'Bot: Maaget widjerfeerangs-sleuf hial tu %(to)s',
'redirect-remove-loop': u'Bot: Widjerfeerang üüb ham salew',
'redirect-broken-redirect-template': u'{{Strik|Widjerfeerang uunstaken}}',
},
# Author: Klenje
'fur': {
'redirect-fix-double': u'Robot: o comedi un re-indreçament dopli a %(to)s',
},
# Author: Alison
'ga': {
'redirect-fix-double': u'Róbó: Ag socrú athsheolta dúbailte → %(to)s',
'redirect-remove-broken': u'Róbó : Targaid athsheoladh ar iarraidh',
'redirect-broken-redirect-template': u'{{scrios|Athsheoladh briste}}',
},
# Author: Toliño
'gl': {
'redirect-fix-double': u'Bot: Arranxo a redirección dobre cara a "%(to)s"',
'redirect-remove-broken': u'Bot: Redirección cara a unha páxina eliminada ou en branco',
'redirect-fix-broken-moved': u'Bot: Arranxo a redirección rota cara á páxina de destino trasladada "%(to)s"',
'redirect-fix-loop': u'Bot: Arranxo a redirección en bucle cara a "%(to)s"',
'redirect-remove-loop': u'Bot: O destino da redirección crea un bucle',
'redirect-broken-redirect-template': u'{{delete}}',
},
# Author: Jetlag
'hak': {
'redirect-fix-double': u'機械人:修正雙重定向至%(to)s',
'redirect-remove-broken': u'機械人:重定向到已刪除或毋存在个頁面',
'redirect-fix-broken-moved': u'機械人:修復損壞个重定向頁到移動目標頁面 %(to)s',
'redirect-fix-loop': u'機械人:修復重定向迴圈至%(to)s',
'redirect-remove-loop': u'機械人:重定向目標構成循環',
},
# Author: Kolonahe
# Author: Xqt
'haw': {
'redirect-fix-double': u'Lopako: Ke kāpili hou i ka hoʻoili pālua iā %(to)s',
'redirect-remove-broken': u'Lopako: Hoʻoili i kekahi ʻaoʻao i holoi ʻia aiʻole he ʻaoʻao i hakuʻole ʻia',
'redirect-fix-broken-moved': u'Lopako: Ke kāpili hou nei i ka hoʻoili haki o kekahi ʻaoʻao hoʻoneʻe ʻia iā %(to)s',
'redirect-fix-loop': u'Lopako: Ke kāpili hou i ka linapoepoe hoʻoili iā %(to)s',
'redirect-remove-loop': u'Lopako: Hoʻokino ka māka hoʻoili i kekahi linapoepoe hoʻoili',
},
# Author: Amire80
# Author: YaronSh
# Author: ערן
'he': {
'redirect-fix-double': u'בוט: מתקן הפניה כפולה → %(to)s',
'redirect-remove-broken': u'רובוט: יעד ההפניה נמחק או שאינו קיים',
'redirect-fix-broken-moved': u'רובוט: תיקון הפניה שבורה לדף היעד %(to)s, שהועבר',
'redirect-fix-loop': u'בוט: תיקון הפניה מעגלית ל%(to)s',
'redirect-remove-loop': u'רובוט: הפניה זו גורמת ללולאה אין־סופית של הפניות',
},
# Author: Ex13
'hr': {
'redirect-fix-double': u'Bot: Popravak dvostrukih preusmjeravanja → %(to)s',
'redirect-remove-broken': u'[[WP:CSD#G8|G8]]: [[Wikipedia:Redirect|Preusmjeravanje] na obrisanu ili nepostojeću stranicu',
'redirect-remove-loop': u'[[WP:CSD#G8|G8]]: Cilj [[Wikipedia:Redirect|preusmjeravanja]] stvara petlju na sebe',
},
# Author: Bináris
# Author: Dj
'hu': {
'redirect-fix-double': u'Bot: %(to)s lapra mutató dupla átirányítás javítása',
'redirect-remove-broken': u'Bot: Törölt vagy nem létező lapra mutató átirányítás törlése',
'redirect-fix-loop': u'Bot: Ide mutató átirányítási hurkok javítása: %(to)s',
'redirect-remove-loop': u'Bot: körkörös átirányítás',
'redirect-broken-redirect-template': u'{{azonnali|Hibás átirányítás}}',
},
# Author: Xelgen
'hy': {
'redirect-fix-double': u'Ռոբոտ․ Շտկվում են կրկնակի վերահղումները %(to)s -ին',
},
# Author: McDutchie
# Author: Xqt
'ia': {
'redirect-fix-double': u'Robot: reparation de duple redirection → %(to)s',
'redirect-remove-broken': u'Robot: Redirection a un pagina delite o non existente',
'redirect-fix-broken-moved': u'Robot: Repara un redirection rupte verso un pagina renominate: %(to)s',
'redirect-fix-loop': u'Robot: Repara redirection circular a %(to)s',
'redirect-remove-loop': u'Robot: Le destination del redirection forma un circulo de redirectiones',
'redirect-broken-redirect-template': u'{{eliminar|Redirection a un pagina delite o non existente}}',
},
# Author: Farras
# Author: IvanLanin
'id': {
'redirect-fix-double': u'Bot: Memperbaiki pengalihan ganda ke %(to)s',
'redirect-remove-broken': u'Robot: Pengalihan ke halaman yang dihapus atau tidak ada',
'redirect-fix-loop': u'Robot: Memperbaiki pengalihan ganda ke %(to)s',
'redirect-remove-loop': u'Robot: Target pengalihan menghasilkan pengalihan siklik',
'redirect-broken-redirect-template': u'{{db-r1}}',
},
# Author: Renan
'ie': {
'redirect-fix-double': u'Machine: Fixant redirection duplic por %(to)s',
'redirect-remove-broken': u'[[WP:CSD#G8|G8]]: [[Wikipedia:Redirect|Redirect]] por un págine deletet o non-existent',
'redirect-remove-loop': u'[[WP:CSD#G8|G8]]: [[Wikipedia:Redirect|Redirection]] cible forma un lace de redirection',
},
# Author: Lam-ang
'ilo': {
'redirect-fix-double': u'Robot: Agsimsimpa ti doble a baw-ing idiay %(to)s',
'redirect-remove-broken': u'Robot: Baw-ing a mapan ti naikkat wenno awan a panid',
'redirect-remove-loop': u'Robot: Ti baw-ing a puntaan ket agporma baw-ing a silo',
'redirect-broken-redirect-template': u'{{delete}}',
},
# Author: Snævar
'is': {
'redirect-fix-double': u'Vélmenni: Lagfæri tvöfalda tilvísun → %(to)s',
'redirect-remove-broken': u'Vélmenni: Tilvísun bendir á síðu sem hefur verið eytt eða er ekki til',
'redirect-fix-loop': u'Vélmenni: Lagfæri tilvísunar lykkju → %(to)s',
'redirect-remove-loop': u'Vélmenni: Tilvísun bendir á óendanlega tilvísunar lykkju',
},
# Author: Beta16
# Author: Nemo bis
# Author: Rippitippi
# Author: Ximo17
'it': {
'redirect-fix-double': u'Bot: Sistemo i redirect doppi a %(to)s',
'redirect-remove-broken': u'Bot: Redirect a una pagina inesistente',
'redirect-fix-broken-moved': u'Bot: Correggo redirect errati alla pagina spostata %(to)s',
'redirect-fix-loop': u'Bot: sistemo ciclo di reindirizzamento a %(to)s',
'redirect-remove-loop': u'Bot: La destinazione del [[{{ns:project}}:Redirect|redirect]] rimanda alla pagina di partenza',
'redirect-broken-redirect-template': u'{{Cancella subito|9}}',
},
# Author: Shirayuki
# Author: 赤の旋律
'ja': {
'redirect-fix-double': u'ロボットによる: 二重リダイレクト修正 → %(to)s',
'redirect-remove-broken': u'ロボットによる: 削除済みまたは存在しないページへのリダイレクト',
'redirect-fix-broken-moved': u'ロボットによる: 迷子のリダイレクトのリダイレクト先を %(to)s に修正',
'redirect-fix-loop': u'ロボットによる: リダイレクトのループの修正 → %(to)s',
'redirect-remove-loop': u'ロボットによる: リダイレクト先にあるリダイレクトのループを修正',
},
# Author: NoiX180
'jv': {
'redirect-fix-double': u'Bot: Mbenakaké rong pralihan nèng %(to)s',
'redirect-remove-broken': u'Bot: Ngalihaké nèng kaca sing ora ana utawa wis kabusak',
'redirect-fix-loop': u'Bot: Mbenakaké ubengan pangalihan nèng %(to)s',
'redirect-remove-loop': u'Bot: Patujon pangalihan dadi ubengan pangalihan',
},
# Author: David1010
'ka': {
'redirect-fix-double': u'რობოტი: ორმაგი გადამისამართების გასწორება → %(to)s',
'redirect-remove-broken': u'რობოტი: გადამისამართება წაშლილ ან არარსებულ გვერდზე',
},
'kk': {
'redirect-fix-double': u'Бот: Шынжырлы айдатуды түзетті → %(to)s',
'redirect-remove-broken': u'Бот: Айдату нысанасы жоқ болды',
},
# Author: Cwt96
# Author: 아라
'ko': {
'redirect-fix-double': u'로봇: %(to)s(으)로 이중 넘겨주기 고침',
'redirect-remove-broken': u'로봇: 끊긴 넘겨주기',
'redirect-fix-broken-moved': u'로봇: %(to)s(으)로 옮긴 대상 문서로 끊긴 넘겨주기 고침',
'redirect-fix-loop': u'로봇: %(to)s(으)로 재귀적인 넘겨주기 고침',
'redirect-remove-loop': u'로봇: 넘겨주기 대상이 재귀적인 넘겨주기로 생김',
'redirect-broken-redirect-template': u'{{ㅅ}}',
},
# Author: Purodha
'ksh': {
'redirect-fix-double': u'Bot: [[Special:Doubleredirects|Dubbel Ömlëijdong]] fottjemaat → %(to)s',
'redirect-remove-broken': u'Bot: Di Ömlëijdong jingk ennet Liiere.',
'redirect-fix-broken-moved': u'Bot: De kappodde Ömleidong op de ömjenannte Sigg %(to)s es reppareert.',
'redirect-fix-loop': u'Bot: En Reih vun Ömleidonge jeng em Kreis eröm. Op %(to)s jescheck.',
'redirect-remove-loop': u'Bot: Di Ömleidonge jonn em Kreis eröm.',
'redirect-broken-redirect-template': u'{{Schmieß fott}}Di [[Wikipedia:Ömleijdung|Ömlëijdong]] jeiht noh nörjendswoh hen.',
},
# Author: Ghybu
'ku': {
'redirect-fix-double': u'Robot: Fixing double redirect to %(to)s',
},
# Author: Maksat
# Author: Викиней
'ky': {
'redirect-fix-double': u'Бот: кош багыттаманы жоюу → %(to)s',
},
# Author: MissPetticoats
# Author: UV
# Author: Xqt
'la': {
'redirect-fix-double': u'automaton: rectificatio redirectionis duplicis → %(to)s',
'redirect-remove-broken': u'automaton: redirectio ad paginam quae non est',
'redirect-remove-loop': u'automaton: redirectio ad eundem titulum',
},
# Author: Les Meloures
# Author: Robby
'lb': {
'redirect-fix-double': u'Bot: Duebel Viruleedung gefléckt → %(to)s',
'redirect-remove-broken': u'Bot: Viruleedung op eng geläscht Säit oder eng Säit déi et net gëtt',
'redirect-fix-broken-moved': u'Bot: Futtis Viruleedung op déi geréckelt Zilsäit %(to)s gouf gefléckt',
'redirect-fix-loop': u'Bot: Viruleedungsschleef op %(to)s verbessert',
'redirect-remove-loop': u'Bot: Viruleedung där hiert Zil zu enger endlos Schleef féiert',
'redirect-broken-redirect-template': u'{{Läschen|Defekt Viruleedung}}',
},
# Author: Pahles
'li': {
'redirect-fix-double': u'Robot: dobbel doorverwiezing aangepas nao %(to)s',
'redirect-remove-broken': u'Robot: de doelpagina van de doorverwiezing besjteit neet',
'redirect-remove-loop': u'Doorverwiezing vörmp \'n óneindige lus',
'redirect-broken-redirect-template': u'{{delete|Weisdoorverwiezing of doorverwiezing nao eweggesjafde pagina}}',
},
# Author: Hugo.arg
# Author: Mantak111
'lt': {
'redirect-fix-double': u'robotas: Taisomas dvigubas peradresavimas → %(to)s',
'redirect-remove-broken': u'robotas: peradresavimas į ištrintą ar nesantį puslapį',
},
# Author: StefanusRA
'map-bms': {
'redirect-fix-double': u'Bot: Mbenerna pengalihan ganda maring %(to)s',
'redirect-remove-broken': u'Robot: Pangalihan ming kaca sing ora ana utawa wis debusek',
'redirect-fix-broken-moved': u'Robot:Ndandani pangalihan ruwag maring kaca target sing dipindah %(to)s',
'redirect-fix-loop': u'Robot: Ndandani pengalihan dobel maring %(to)s',
'redirect-remove-loop': u'Robot: Target pangalihan marekna dadi pengalihan siklik',
},
# Author: Jagwar
'mg': {
'redirect-fix-double': u'Rôbô : mamaha olam-pihodinana mankany amin\'i %(to)s',
'redirect-remove-broken': u'Rôbô : fihodinana mankany amina pejy tsy misy na erfa voafafa.',
'redirect-fix-broken-moved': u'Rôbô : Fanitsiana fihodinana diso mankany amin\'ny pejy tanjona %(to)s nafindra',
'redirect-fix-loop': u'Rôbô: nanamboatra ny fihodinana manao tondro mifolaka amin\'i %(to)s',
'redirect-remove-loop': u'[[WP:CSD#G8|G8]]: Ny tanjon\'ny fihodinana dia manao fihodinana mifolaka',
'redirect-broken-redirect-template': u'{{fafao}}',
},
# Author: Luthfi94
# Author: Naval Scene
'min': {
'redirect-fix-double': u'Bot: Mampeloki pangalihan gando ka %(to)s',
'redirect-remove-broken': u'Robot: Pangaliahan ka laman nan dihapuih atau indak ado',
'redirect-remove-loop': u'Robot: Target pangaliahan mahasilan pangaliahan bapusiang',
},
# Author: Bjankuloski06
# Author: Rancher
# Author: Xqt
'mk': {
'redirect-fix-double': u'Робот: Исправка на двојни пренасочувања → %(to)s',
'redirect-remove-broken': u'[[ВП:КББ|О6]: [[Википедија:Пренасочување|Пренасочување]] кон избришана или непостоечка страница',
'redirect-fix-broken-moved': u'Робот: Исправка на прекинато пренасочување кон преместена целна страница %(to)s',
'redirect-fix-loop': u'Робот: Поправа јамка на пренасочување кон %(to)s',
'redirect-remove-loop': u'[[ВП:КББ|О6]]: Одредницата за [[Википедија:Пренасочување|пренасочување]] образува јамка',
'redirect-broken-redirect-template': u'{{db|[[ВП:КББ|O8]]}}',
},
# Author: Praveenp
'ml': {
'redirect-fix-double': u'യന്ത്രം: %(to)s എന്നതിലോട്ടുള്ള ഇരട്ട തിരിച്ചുവിടൽ ശരിയാക്കുന്നു',
'redirect-remove-broken': u'യന്ത്രം: മായ്ച്ച അല്ലെങ്കിൽ നിലവിലില്ലാത്ത താളിലോട്ടുള്ള തിരിച്ചുവിടൽ',
'redirect-fix-broken-moved': u'യന്ത്രം: മാറ്റപ്പെട്ട വിക്കിതാളായ %(to)s എന്നതിലേയ്ക്കുള്ള പൊട്ടിയ തിരിച്ചുവിടൽ ശരിയാക്കുന്നു',
'redirect-fix-loop': u'യന്ത്രം: %(to)s എന്നതിലോട്ടുണ്ടായിരുന്ന ചാക്രിക തിരിച്ചുവിടൽ ശരിയാക്കുന്നു',
'redirect-remove-loop': u'യന്ത്രം: ലക്ഷ്യത്തിലോട്ടുള്ള തിരിച്ചുവിടൽ ഒരു തിരിച്ചുവിടൽ ചക്രം സൃഷ്ടിക്കുന്നു',
'redirect-broken-redirect-template': u'{{delete}}',
},
# Author: Anakmalaysia
# Author: Kurniasan
'ms': {
'redirect-fix-double': u'Bot: Memperbetulkan pelencongan berganda ke %(to)s',
'redirect-remove-broken': u'Robot: Melencong kepada laman yang terhapus atau tidak wujud',
'redirect-fix-broken-moved': u'Robot: Membaiki lencongan yang terputus ke halaman sasaran yang terpindah iaitu %(to)s',
'redirect-fix-loop': u'Robot: Membaiki gelung lencongan ke %(to)s',
'redirect-remove-loop': u'Robot: Sasaran lencongan membentuk gelung lencongan',
},
# Author: Chrisportelli
'mt': {
'redirect-fix-double': u'Bot: Tranġar ta\' rindirizz doppju lejn %(to)s',
'redirect-remove-broken': u'Bot: Rindirizz lejn paġna mħassra jew li ma teżistix',
'redirect-remove-loop': u'Bot: Id-destinazzjoni tar-rindirizz qiegħed jifforma ċiklu ta\' rindirizzi',
'redirect-broken-redirect-template': u'{{Ħassar minnufih|9}}',
},
# Author: Lionslayer
'my': {
'redirect-fix-double': u'ဘော့ - %(to)s သို့ ပြန်ညွှန်းနှစ်ထပ်ဖြစ်နေသည်ကို ပြင်နေသည်',
'redirect-remove-loop': u'[WP:CSD#G8|G8]]: [[Wikipedia:Redirect|ပြန်ညွှန်း]]သည် ယင်းကို ပြန်ညွှန်းခြင်းခံရသောကြောင့် သံသရာလည်မှု ဖြစ်စေသည်။',
},
# Author: محک
'mzn': {
'redirect-fix-double': u'ربوت:عوض هایتن دکشیهئون دِتایی → %(to)s',
'redirect-remove-broken': u'ربوت:بیجاء ِدکشییهئون',
'redirect-remove-loop': u'ربوت:ناترینگی [[:fa:وپ:تغییرمسیر|دکشییه]]',
'redirect-broken-redirect-template': u'{{حذف سریع|بن بست|bot=yes}}',
},
# Author: Slomox
'nds': {
'redirect-fix-double': u'Bot: Dubbelte Wiederleiden rutmakt → %(to)s',
'redirect-remove-broken': u'Bot: Kaputte Wiederleiden ward nich brukt',
'redirect-remove-loop': u'Bot: Redirect wiest wedder op sik sülvs',
'redirect-broken-redirect-template': u'{{delete}}Kaputte Wiederleiden, wat nich brukt ward.',
},
# Author: Servien
# Author: Xqt
'nds-nl': {
'redirect-fix-double': u'Bot: dubbele deurverwiezing verbeterd naor %(to)s',
'redirect-remove-broken': u'Bot: de doelpagina van de deurverwiezing besteet niet',
'redirect-fix-broken-moved': u'Robot: kapotte deurverwiezing verbeterd deur anpassing naor herneumden doelzied %(to)s',
'redirect-fix-loop': u'Bot: deurverwiessirkel naor %(to)s erepareerd',
'redirect-remove-loop': u'Bot: deurverwiezing verwis weer naor zichzelf',
'redirect-broken-redirect-template': u'{{vort|Kapotte deurverwiezing of deurverwijzing naor vort-edaone pagina}}',
},
# Author: RajeshPandey
# Author: सरोज कुमार ढकाल
'ne': {
'redirect-fix-double': u'बोट: दोहोरो अनुप्रेषणलाई %(to)sमा ठिक गर्दै',
'redirect-remove-broken': u'रोबोट: हुँदै नभएको पृष्ठमा या मेटिएको पृष्ठमा गरिएको रिडाइरेक्ट',
'redirect-fix-broken-moved': u'रोबोट: टुटेको अनुप्रेषण सारिएको लक्ष्यको लागि मिलाउदै %(to)s',
'redirect-fix-loop': u'रोबोट: अनुप्रेषण चक्र %(to)sलाई मिलाउँदै',
'redirect-remove-loop': u'रोबोट: रिडाइरेक्ट गर्न खोजिएको पृष्ठले रिडाइरेक्ट चक्रको निर्माण गर्यो ।',
'redirect-broken-redirect-template': u'{{db-r1}}',
},
# Author: Eukesh
'new': {
'redirect-fix-loop': u'रोबट: पुनर्निदेश लूपयात %(to)sय् मिलय्यानाछ्वगु',
'redirect-remove-loop': u'रोबट:पुनर्निर्देशया लक्ष्यं पुनर्निदेश लूप दयेकी',
},
# Author: Krinkle
# Author: Siebrand
'nl': {
'redirect-fix-double': u'Robot: dubbele doorverwijzing gecorrigeerd naar %(to)s',
'redirect-remove-broken': u'Robot: de doelpagina van de doorverwijzing bestaat niet',
'redirect-fix-broken-moved': u'Robot: kapotte doorverwijzing gecorrigeerd door aanpassing naar hernoemde doelpagina %(to)s',
'redirect-fix-loop': u'Robot: doorverwijscirkel naar %(to)s gerepareerd',
'redirect-remove-loop': u'[[WP:NW|NUWEG]]: [[Wikipedia:Doorverwijzing|Doorverwijzing]] vormt een oneindige lus',
'redirect-broken-redirect-template': u'{{nuweg|Weesdoorverwijzing of doorverwijzing naar verwijderde pagina}}',
},
# Author: Njardarlogar
'nn': {
'redirect-fix-double': u'robot: retta dobbel omdirigering → %(to)s',
'redirect-remove-broken': u'robot: målet for omdirigeringa finst ikkje',
'redirect-fix-loop': u'robot: retta omdirigeringslykkje til %(to)s',
'redirect-broken-redirect-template': u'{{snøggsletting|dette er ei øydelagd omdirigering}}',
},
# Author: Danmichaelo
# Author: Jon Harald Søby
'no': {
'redirect-fix-double': u'robot: Retter dobbel omdirigering til %(to)s',
'redirect-remove-broken': u'Robot: Målet for omdirigeringen eksisterer ikke',
'redirect-fix-broken-moved': u'bot: Fikser ødelagte omdirigeringer til %(to)s som har blitt flyttet',
'redirect-fix-loop': u'Robot: Fikser omdirigeringsløkke til %(to)s',
'redirect-remove-loop': u'Robot: Målet for omdirigeringen danner en omdirigeringsløkke',
'redirect-broken-redirect-template': u'{{hurtigslett|Feilaktig omdirigering}}',
},
# Author: Xqt
'nv': {
'redirect-fix-double': u'Bot: dah astsihígíí łahgo áyiilaa -> %(to)s',
},
# Author: Cedric31
# Author: Xqt
'oc': {
'redirect-fix-double': u'Robòt : repara una dobla redireccion cap a %(to)s',
'redirect-fix-broken-moved': u'Robòt: Correccion de las redireccions erronèas cap a una pagina cibla %(to)s desplaçada',
'redirect-fix-loop': u'Robòt : repara una bocla de redireccion sus %(to)s',
'redirect-remove-loop': u'Robòt: la cibla de la redireccion forma una bocla de redireccion',
},
# Author: Geitost
# Author: Xqt
'pdc': {
'redirect-fix-double': u'Waddefresser: Doppelte Weiderleiding nooch %(to)s gennert',
'redirect-remove-broken': u'Waddefresser: Kaputte Weiderleiding',
'redirect-broken-redirect-template': u'{{verwische|Kaputte Weiderleiding}}',
},
'pfl': {
'redirect-fix-double': u'Bot: E doppelte Waiterlaitung vabessat zu %(to)s',
},
# Author: BeginaFelicysym
# Author: Derbeth
# Author: Nemo bis
# Author: Sp5uhe
# Author: Ty221
'pl': {
'redirect-fix-double': u'Robot naprawił podwójne przekierowanie do %(to)s',
'redirect-remove-broken': u'Robot: przekierowanie do usuniętej lub nieistniejącej strony',
'redirect-fix-broken-moved': u'Bot: Poprawa urwanych przekierowań - przeniesiono cel do %(to)s',
'redirect-fix-loop': u'Robot: Naprawa pętli przekierowań do %(to)s',
'redirect-remove-loop': u'Robot: pętla przekierowań',
'redirect-broken-redirect-template': u'{{ek|przekierowanie do usuniętej lub nieistniejącej strony}}',
},
# Author: Borichèt
# Author: Dragonòt
'pms': {
'redirect-fix-double': u'Trigomiro: a coregg ridiression dobia a %(to)s',
'redirect-remove-broken': u'Trigomiro: Ridiression a na pàgina scancelà o ch\'a esist nen',
'redirect-fix-broken-moved': u'Trigomiro: Coression ëd le ridiression nne bon-e a na pàgina ëd destinassion tramudà %(to)s',
'redirect-fix-loop': u'Trigomiro: Coression dël sicl ëd ridiression a %(to)s',
'redirect-remove-loop': u'Trigomiro: La destinassion ëd la ridiression a forma un sicl ëd ridiression',
},
# Author: Hamilton Abreu
# Author: Luckas
# Author: Malafaya
# Author: Xqt
'pt': {
'redirect-fix-double': u'Robô: A corrigir o redirecionamento duplo para %(to)s',
'redirect-remove-broken': u'Robô: Redirecionamento para uma página eliminada ou inexistente',
'redirect-fix-broken-moved': u'Robô: A corrigir redirecionamento quebrado para página alvo movida %(to)s',
'redirect-fix-loop': u'Robô: A corrigir o ciclo de redirecionamentos para %(to)s',
'redirect-remove-loop': u'Robô: O destino do redirecionamento cria um ciclo de redirecionamentos',
},
# Author: Amgauna
# Author: Hamilton Abreu
# Author: Helder.wiki
# Author: Luckas
# Author: Luckas Blade
# Author: Tuliouel
# Author: Xqt
# Author: 555
'pt-br': {
'redirect-fix-double': u'Bot: Corrigindo redirecionamento duplo para %(to)s',
'redirect-remove-broken': u'Robô: Redirecionamento para uma página eliminada ou inexistente',
'redirect-fix-broken-moved': u'Bot: consertando redirecionamento quebrado para página-alvo movida %(to)s',
'redirect-fix-loop': u'Bot: Corrigindo ciclo de redirecionamentos para %(to)s',
'redirect-remove-loop': u'Bot: O destino do redirecionamento cria um ciclo de redirecionamentos',
},
# Author: Firilacroco
# Author: Minisarm
'ro': {
'redirect-fix-double': u'Robot: Reparat dubla redirecționare înspre %(to)s',
'redirect-remove-broken': u'Robot: Redirecționare către o pagină ștearsă sau inexistentă',
'redirect-fix-loop': u'Robot: Reparat bucla de redirecționare către %(to)s',
'redirect-remove-loop': u'Robot: Ținta redirecționării formează o buclă de redirecționare',
},
# Author: DCamer
# Author: Rubin
# Author: Volkov
# Author: Xqt
'ru': {
'redirect-fix-double': u'бот: исправление двойного перенаправления → %(to)s',
'redirect-remove-broken': u'бот: [[ВП:КБУ#П1|П1]] - перенаправление на удалённую или несуществующую страницу',
'redirect-fix-broken-moved': u'Робот: Исправление перенаправления на перемещенную целевую страницу %(to)s',
'redirect-fix-loop': u'бот: исправление перенаправления на %(to)s',
'redirect-remove-loop': u'бот: перенаправление в никуда',
'redirect-broken-redirect-template': u'{{db-redirnone}}',
},
# Author: Gazeb
'rue': {
'redirect-fix-double': u'Робот: справив двоїте напрямлїня → %(to)s',
'redirect-remove-broken': u'[[WP:CSD#G8|G8]]: [[Wikipedia:Redirect|Напрямлїня]] на змазану або неекзістуючу сторінку',
'redirect-remove-loop': u'[[WP:CSD#G8|G8]]: [[Wikipedia:Redirect|Напрямлїня]] формує петлю напрямлїнь',
},
# Author: Avicennasis
# Author: John Reid
# Author: Xqt
'sco': {
'redirect-fix-double': u'Bot: Fixin dooble reguidal til %(to)s',
'redirect-remove-broken': u'Robot: Reguidal til ae delytit or non-exeestent page',
'redirect-fix-broken-moved': u'Robot: Fixin broken reguidal til muived target page %(to)s',
'redirect-fix-loop': u'Robot: Fixin reguidal luip til %(to)s',
'redirect-remove-loop': u'Robot: Reguidal tairget forms ae reguidal luip',
'redirect-broken-redirect-template': u'{{delete}}',
},
# Author: Teslaton
# Author: Wizzard
'sk': {
'redirect-fix-double': u'Robot: Opravujem dvojité presmerovanie na %(to)s',
'redirect-remove-broken': u'Robot: Presmerovanie na neexistujúcu stránku',
'redirect-fix-broken-moved': u'Robot: Oprava porušeného presmerovania na presunutú cieľovú stránku %(to)s',
'redirect-fix-loop': u'Robot: Oprava cyklického presmerovania na %(to)s',
'redirect-remove-loop': u'Robot: Cieľ presmerovania tvorí slučku',
},
# Author: Dbc334
# Author: Mateju
'sl': {
'redirect-fix-double': u'Bot: Popravljanje dvojnih preusmeritev na %(to)s',
'redirect-remove-broken': u'Robot: Preusmeritev na izbrisano ali neobstoječo stran',
'redirect-fix-broken-moved': u'Robot: Popravljanje okvarjene povezave preusmeritve na premaknjeno ciljno stran %(to)s',
'redirect-fix-loop': u'Robot: Popravljanje preusmeritvene zanke na %(to)s',
'redirect-remove-loop': u'Robot: Cilj preusmeritve ustvarja preusmeritveno zanko',
'redirect-broken-redirect-template': u'{{delete}}',
},
# Author: Abshirdheere
'so': {
'redirect-fix-double': u'Bot: Hagaajin u rogid labalaaban ee %(to)s',
'redirect-remove-broken': u'Bot: U wareejin bog la tirtiray ama aan jirin',
'redirect-fix-broken-moved': u'Bot: Dib u Sixidda qaldantay waxaa loo wareejiyey Bog kale %(to)s',
'redirect-fix-loop': u'Bot: Diyaarinta dib u bilaabista ee %(to)s',
'redirect-remove-loop': u'Bot: Ujeedka u rogista bogga ee dhaqaaqa',
},
# Author: Vinie007
'sq': {
'redirect-fix-double': u'Bot: Fixing dyfishtë përcjellëse tek %(to)s',
'redirect-remove-broken': u'[[WP:CSD#G8|G8]]: [[Wikipedia:Redirect|Redirect]] to a deleted or non-existent page',
'redirect-remove-loop': u'[[WP:CSD#G8|G8]]: [[Wikipedia:Redirect|Redirect]] target forms a redirect loop',
},
# Author: Rancher
'sr': {
'redirect-fix-double': u'Робот: исправљена двострука преусмерења у %(to)s',
'redirect-remove-broken': u'Робот: преусмерење до обрисане или непостојеће странице',
'redirect-fix-broken-moved': u'Робот: исправљено покварено преусмерење до премештене циљне странице %(to)s',
'redirect-fix-loop': u'Робот: исправљена петља преусмерења на %(to)s',
'redirect-remove-loop': u'Робот: одредиште преусмерења образује петљу',
'redirect-broken-redirect-template': u'{{db-r1}}',
},
# Author: Rancher
'sr-el': {
'redirect-fix-double': u'Robot: ispravljena dvostruka preusmerenja u %(to)s',
'redirect-remove-broken': u'Robot: preusmerenje do obrisane ili nepostojeće stranice',
'redirect-fix-broken-moved': u'Robot: ispravljeno pokvareno preusmerenje do premeštene ciljne stranice %(to)s',
'redirect-fix-loop': u'Robot: ispravljena petlja preusmerenja na %(to)s',
'redirect-remove-loop': u'Robot: odredište preusmerenja obrazuje petlju',
},
# Author: Boivie
# Author: Jopparn
# Author: Lokal Profil
# Author: Tobulos1
# Author: WikiPhoenix
'sv': {
'redirect-fix-double': u'Bot: Rättar dubbel omdirigering till %(to)s',
'redirect-remove-broken': u'Bot: Omdirigerar till en raderad eller en icke-existerande sida',
'redirect-fix-broken-moved': u'Bot: Reparerade trasig omdirigering till den flyttade målsidan %(to)s',
'redirect-fix-loop': u'Bot: Fixar omdirigeringsloop till %(to)s',
'redirect-remove-loop': u'Bot: Målet för omdirigeringen bildar en omdirigeringsloop',
'redirect-broken-redirect-template': u'{{radera|Trasig omdirigering}}',
},
# Author: Kwisha
'sw': {
'redirect-remove-broken': u'Boti: Uelekezaji kwa ukurasa uliofutwa au ambao haupatikani',
'redirect-fix-loop': u'Boti: Inarekebisha tanzi la uelekezaji kwa %(to)s',
},
# Author: Przemub
'szl': {
'redirect-fix-double': u'Robot sprowjo tuplowane przekerowańa → %(to)s',
'redirect-remove-broken': u'Robot: pukniyńcie dŏ wyciepanej zajty',
'redirect-fix-loop': u'Robot: Naprawa mocki pukniyńć dŏ (to)s',
'redirect-remove-loop': u'Robot: mocka pukniyńć',
'redirect-broken-redirect-template': u'{{delete|pukniyńciy dŏ wyciepanyj zajty}}',
},
# Author: செல்வா
'ta': {
'redirect-remove-broken': u'[[WP:CSD#G8|G8]]: நீக்கப்பட்ட அல்லது இல்லாத பக்கத்துக்கு [[Wikipedia:Redirect|வழிமாற்று]]',
},
# Author: Nullzero
'th': {
'redirect-fix-double': u'โรบอต: แก้หน้าเปลี่ยนทางซ้ำซ้อน → %(to)s',
'redirect-remove-broken': u'โรบอต: หน้าเปลี่ยนทางเสีย',
'redirect-fix-broken-moved': u'โรบอต: แก้หน้าเปลี่ยนทางเสียไปยังหน้าเป้าหมายการย้าย %(to)s',
'redirect-fix-loop': u'โรบอต: แก้หน้าเปลี่ยนทางวนรอบ → %(to)s',
'redirect-remove-loop': u'โรบอต: หน้าเปลี่ยนทางทำให้เกิดการเปลี่ยนทางวนรอบ',
'redirect-broken-redirect-template': u'{{ลบ|หน้าเปลี่ยนทางเสีย}}',
},
# Author: AnakngAraw
'tl': {
'redirect-fix-double': u'Bot: Kinukumpuni ang nagkadalawang pagpapapunta sa %(to)s',
'redirect-remove-broken': u'[[WP:CSD#G8|G8]]: [[Wikipedia:Redirect|Papuntahin]] sa isang pahinang nabura na o hindi na umiiral',
'redirect-fix-loop': u'Robot: Kinukumpuni ang silo ng pagpapapunta sa %(to)s',
'redirect-remove-loop': u'[[WP:CSD#G8|G8]]: [[Wikipedia:Redirect|Ang pagpapapunta sa ibang pook]] ay bumubuo ng nakalikaw na pagpapapunta',
},
# Author: Gorizon
# Author: Xqt
# Author: Гусейн
'tly': {
'redirect-fix-double': u'Робот: дығатә унвон дәгиш кардеј сәрост карде → %(to)s',
'redirect-remove-broken': u'Робот: Истиғомәти дәгиш карде бә молә быә јаанки бә мывҹуд ныбә сәһифә',
'redirect-fix-broken-moved': u'Робот: Бә вырәку дәгиш кардә быә мәғсәдә сәһифә %(to)s дәгиш быә истиғомәти сәрост кардеј',
'redirect-fix-loop': u'Робот: Истиғомәти дәгиш кардә ангыли сохте бә %(to)s',
'redirect-remove-loop': u'Bot: Унвони дәгиш карде бешә формон унвони дәгиш кардеј мәрә',
},
# Author: Emperyan
# Author: Khutuck
# Author: Stultiwikia
# Author: Vito Genovese
# Author: Xqt
'tr': {
'redirect-fix-double': u'Bot: %(to)s sayfasına yönelik çift yönlendirme düzeltiliyor',
'redirect-remove-broken': u'Bot: Silinen ya da var olmayan sayfaya olan yönlendirme',
'redirect-remove-loop': u'Bot: Yönlendirme hedefi bir yönlendirme döngüsü oluşturuyor',
'redirect-broken-redirect-template': u'{{sil|y1}}',
},
# Author: Ильнар
'tt': {
'redirect-fix-double': u'Робот: икеле күчешне дөресләү → %(to)s',
'redirect-remove-broken': u'[[ВП:ТБК#П1|П1]]: беркаяда күчеш ясамау',
'redirect-remove-loop': u'[[ВП:ТБК#П1|тиз бетерү критерийлары \'\'П.1\'\']] — беркаяда күчеш ясамау',
},
# Author: Ahonc
# Author: Andriykopanytsia
# Author: Base
# Author: Xqt
# Author: Тест
'uk': {
'redirect-fix-double': u'Робот: виправлення подвійного перенаправлення → %(to)s',
'redirect-remove-broken': u'бот: перенаправлення на вилучену або неіснуючу сторінку',
'redirect-fix-broken-moved': u'Робот: Виправлення розірваного перенаправлення на сторінку, перейменовану на %(to)s',
'redirect-fix-loop': u'бот: виправлення петлі перенаправлень на %(to)s',
'redirect-remove-loop': u'бот: перенаправлення формують петлю',
'redirect-broken-redirect-template': u'{{db-reason|перенаправлення до вилученої або неіснуючої сторінки}}',
},
# Author: Muhammad Shuaib
'ur': {
'redirect-fix-double': u'روبالہ: درستگی دوہرا رجوع مکرر بجانب %(to)s',
},
# Author: Alunardon90
# Author: Candalua
# Author: GatoSelvadego
'vec': {
'redirect-fix-double': u'Robot: Sistemo i dopi rimandi a %(to)s',
'redirect-remove-broken': u'Bot: Rinvio a na pagina scancelà o mia esistente',
'redirect-fix-loop': u'Robot: Preparasion de un ciclo de rindirisamento a %(to)s',
'redirect-remove-loop': u'Ła destinasion del rindirisamento rimanda a ła pajina de partensa',
'redirect-broken-redirect-template': u'{{Scanseła suito|9}}',
},
# Author: Emaus
'vep': {
'redirect-fix-double': u'Bot kohenzi kaksitadud läbikosketusen %(to)s',
},
# Author: Minh Nguyen
'vi': {
'redirect-fix-double': u'Bot: Giải quyết đổi hướng kép đến %(to)s',
'redirect-remove-broken': u'Bot: [[Wikipedia:Trang đổi hướng|Đổi hướng]] đến trang xóa hoặc không tồn tại',
'redirect-fix-broken-moved': u'Bot: Sửa đổi hướng sai; trang đích đã được di chuyển đến %(to)s',
'redirect-fix-loop': u'Bot: Sửa vòng lặp đổi hướng đến %(to)s',
'redirect-remove-loop': u'Bot: [[Wikipedia:Trang đổi hướng|Đổi hướng]] qua lại',
'redirect-broken-redirect-template': u'{{Chờ xóa}}',
},
# Author: Malafaya
'vo': {
'redirect-broken-redirect-template': u'{{moükön|Lüodüköm dädik}}',
},
# Author: Harvzsf
'war': {
'redirect-fix-double': u'Robot: Gin-ayad in nagduduha nga redirek → %(to)s',
'redirect-remove-broken': u'Robot: Redirek ngadto hin ginpárà o waray-didâ nga pakli',
'redirect-fix-loop': u'Robot: Gin-aayad in redirek nga loop ngadto ha %(to)s',
'redirect-remove-loop': u'Robot: An redirek nga ginkakadtoan naghihimo hin redirek nga loop',
'redirect-broken-redirect-template': u'{{delete}}Nautod o nagbinalikbalik nga redirek.',
},
# Author: פוילישער
'yi': {
'redirect-fix-double': u'באט: פארראכטן פארטאפלטע ווייטערפירונג → %(to)s',
'redirect-remove-broken': u'באט: ווײַטערפֿירונג צו א בלאט וואס איז אויסגעמעקט אדער עקזיסטירט נישט',
'redirect-fix-broken-moved': u'באט: פאררעכטן צעבראכענע ווייטערפירונג צו באוועגטן ציל בלאט %(to)s',
'redirect-fix-loop': u'באט: פאררעכטן ווייטערפירונג שלייף אויף %(to)s',
'redirect-remove-loop': u'באט: [[װיקיפּעדיע:ווייטערפירונג|ווייטערפירוג]] ציל שאפט א שלייף',
},
# Author: Demmy
'yo': {
'redirect-broken-redirect-template': u'{{db-r1}}',
},
# Author: Liangent
# Author: Linforest
# Author: Yfdyh000
# Author: 阿pp
'zh': {
'redirect-fix-double': u'机器人:修正双重重定向至%(to)s',
'redirect-remove-broken': u'机器人:重定向到已删除或不存在的页面',
'redirect-fix-broken-moved': u'机器人:修复破损的重定向到已移动的目标页面 %(to)s',
'redirect-fix-loop': u'机器人:修复重定向循环至%(to)s',
'redirect-remove-loop': u'机器人:重定向目标构成循环',
'redirect-broken-redirect-template': u'{{db-r1}}',
},
'zh-classical': {
'redirect-fix-double': u'僕:復修渡口 → %(to)s',
},
# Author: Andrew971218
# Author: Justincheng12345
# Author: Liangent
# Author: Simon Shek
'zh-hant': {
'redirect-fix-double': u'機械人:修正雙重定向至%(to)s',
'redirect-remove-broken': u'機械人:重定向到已刪除或不存在的頁面',
'redirect-fix-broken-moved': u'機械人:修復損壞的重定向頁到移動目標頁面 %(to)s',
'redirect-fix-loop': u'機械人:修復重定向迴圈至%(to)s',
'redirect-remove-loop': u'機械人:重定向目標構成循環',
},
# Author: Justincheng12345
'zh-hk': {
'redirect-fix-double': u'機械人修正雙重定向至%(to)s',
'redirect-remove-broken': u'[[WP:SD#G15|G15]]:[[Wikipedia:重定向|重定向]]到已刪除或不存在的頁面',
'redirect-fix-broken-moved': u'機械人修復損壞的重定向到已移動的目標頁面%(to)s',
'redirect-fix-loop': u'機械人修復重定向迴圈至%(to)s',
'redirect-remove-loop': u'[[WP:SD#R5|R5]]:[[Wikipedia:重定向|重定向]]目標構成循環',
},
'zh-yue': {
'redirect-fix-double': u'機械人:拉直連串跳轉 → %(to)s',
'redirect-remove-broken': u'機械人:跳轉目標唔存在',
},
};
| [
"[email protected]"
]
| |
6515cadde03f814f557d7bbe33305e6298e66d6d | f9d564f1aa83eca45872dab7fbaa26dd48210d08 | /huaweicloud-sdk-cse/huaweicloudsdkcse/v1/model/get_kie_configs.py | c61d74b8ecc0b383da41262fbef00cb47742f108 | [
"Apache-2.0"
]
| permissive | huaweicloud/huaweicloud-sdk-python-v3 | cde6d849ce5b1de05ac5ebfd6153f27803837d84 | f69344c1dadb79067746ddf9bfde4bddc18d5ecf | refs/heads/master | 2023-09-01T19:29:43.013318 | 2023-08-31T08:28:59 | 2023-08-31T08:28:59 | 262,207,814 | 103 | 44 | NOASSERTION | 2023-06-22T14:50:48 | 2020-05-08T02:28:43 | Python | UTF-8 | Python | false | false | 9,988 | py | # coding: utf-8
import six
from huaweicloudsdkcore.utils.http_utils import sanitize_for_serialization
class GetKieConfigs:
"""
Attributes:
openapi_types (dict): The key is attribute name
and the value is attribute type.
attribute_map (dict): The key is attribute name
and the value is json key in definition.
"""
sensitive_list = []
openapi_types = {
'id': 'str',
'key': 'str',
'labels': 'object',
'value': 'str',
'value_type': 'str',
'status': 'str',
'create_time': 'int',
'update_time': 'int',
'create_revision': 'int',
'update_revision': 'int'
}
attribute_map = {
'id': 'id',
'key': 'key',
'labels': 'labels',
'value': 'value',
'value_type': 'value_type',
'status': 'status',
'create_time': 'create_time',
'update_time': 'update_time',
'create_revision': 'create_revision',
'update_revision': 'update_revision'
}
def __init__(self, id=None, key=None, labels=None, value=None, value_type=None, status=None, create_time=None, update_time=None, create_revision=None, update_revision=None):
"""GetKieConfigs
The model defined in huaweicloud sdk
:param id: 配置项的id。
:type id: str
:param key: 配置项的key。
:type key: str
:param labels: 配置项的标签。
:type labels: object
:param value: 配置项的值。
:type value: str
:param value_type: 配置项value的类型。
:type value_type: str
:param status: 配置项的状态。
:type status: str
:param create_time: 创建时间。
:type create_time: int
:param update_time: 更新时间。
:type update_time: int
:param create_revision: 创建配置的版本号
:type create_revision: int
:param update_revision: 修改配置的版本号
:type update_revision: int
"""
self._id = None
self._key = None
self._labels = None
self._value = None
self._value_type = None
self._status = None
self._create_time = None
self._update_time = None
self._create_revision = None
self._update_revision = None
self.discriminator = None
if id is not None:
self.id = id
if key is not None:
self.key = key
if labels is not None:
self.labels = labels
if value is not None:
self.value = value
if value_type is not None:
self.value_type = value_type
if status is not None:
self.status = status
if create_time is not None:
self.create_time = create_time
if update_time is not None:
self.update_time = update_time
if create_revision is not None:
self.create_revision = create_revision
if update_revision is not None:
self.update_revision = update_revision
@property
def id(self):
"""Gets the id of this GetKieConfigs.
配置项的id。
:return: The id of this GetKieConfigs.
:rtype: str
"""
return self._id
@id.setter
def id(self, id):
"""Sets the id of this GetKieConfigs.
配置项的id。
:param id: The id of this GetKieConfigs.
:type id: str
"""
self._id = id
@property
def key(self):
"""Gets the key of this GetKieConfigs.
配置项的key。
:return: The key of this GetKieConfigs.
:rtype: str
"""
return self._key
@key.setter
def key(self, key):
"""Sets the key of this GetKieConfigs.
配置项的key。
:param key: The key of this GetKieConfigs.
:type key: str
"""
self._key = key
@property
def labels(self):
"""Gets the labels of this GetKieConfigs.
配置项的标签。
:return: The labels of this GetKieConfigs.
:rtype: object
"""
return self._labels
@labels.setter
def labels(self, labels):
"""Sets the labels of this GetKieConfigs.
配置项的标签。
:param labels: The labels of this GetKieConfigs.
:type labels: object
"""
self._labels = labels
@property
def value(self):
"""Gets the value of this GetKieConfigs.
配置项的值。
:return: The value of this GetKieConfigs.
:rtype: str
"""
return self._value
@value.setter
def value(self, value):
"""Sets the value of this GetKieConfigs.
配置项的值。
:param value: The value of this GetKieConfigs.
:type value: str
"""
self._value = value
@property
def value_type(self):
"""Gets the value_type of this GetKieConfigs.
配置项value的类型。
:return: The value_type of this GetKieConfigs.
:rtype: str
"""
return self._value_type
@value_type.setter
def value_type(self, value_type):
"""Sets the value_type of this GetKieConfigs.
配置项value的类型。
:param value_type: The value_type of this GetKieConfigs.
:type value_type: str
"""
self._value_type = value_type
@property
def status(self):
"""Gets the status of this GetKieConfigs.
配置项的状态。
:return: The status of this GetKieConfigs.
:rtype: str
"""
return self._status
@status.setter
def status(self, status):
"""Sets the status of this GetKieConfigs.
配置项的状态。
:param status: The status of this GetKieConfigs.
:type status: str
"""
self._status = status
@property
def create_time(self):
"""Gets the create_time of this GetKieConfigs.
创建时间。
:return: The create_time of this GetKieConfigs.
:rtype: int
"""
return self._create_time
@create_time.setter
def create_time(self, create_time):
"""Sets the create_time of this GetKieConfigs.
创建时间。
:param create_time: The create_time of this GetKieConfigs.
:type create_time: int
"""
self._create_time = create_time
@property
def update_time(self):
"""Gets the update_time of this GetKieConfigs.
更新时间。
:return: The update_time of this GetKieConfigs.
:rtype: int
"""
return self._update_time
@update_time.setter
def update_time(self, update_time):
"""Sets the update_time of this GetKieConfigs.
更新时间。
:param update_time: The update_time of this GetKieConfigs.
:type update_time: int
"""
self._update_time = update_time
@property
def create_revision(self):
"""Gets the create_revision of this GetKieConfigs.
创建配置的版本号
:return: The create_revision of this GetKieConfigs.
:rtype: int
"""
return self._create_revision
@create_revision.setter
def create_revision(self, create_revision):
"""Sets the create_revision of this GetKieConfigs.
创建配置的版本号
:param create_revision: The create_revision of this GetKieConfigs.
:type create_revision: int
"""
self._create_revision = create_revision
@property
def update_revision(self):
"""Gets the update_revision of this GetKieConfigs.
修改配置的版本号
:return: The update_revision of this GetKieConfigs.
:rtype: int
"""
return self._update_revision
@update_revision.setter
def update_revision(self, update_revision):
"""Sets the update_revision of this GetKieConfigs.
修改配置的版本号
:param update_revision: The update_revision of this GetKieConfigs.
:type update_revision: int
"""
self._update_revision = update_revision
def to_dict(self):
"""Returns the model properties as a dict"""
result = {}
for attr, _ in six.iteritems(self.openapi_types):
value = getattr(self, attr)
if isinstance(value, list):
result[attr] = list(map(
lambda x: x.to_dict() if hasattr(x, "to_dict") else x,
value
))
elif hasattr(value, "to_dict"):
result[attr] = value.to_dict()
elif isinstance(value, dict):
result[attr] = dict(map(
lambda item: (item[0], item[1].to_dict())
if hasattr(item[1], "to_dict") else item,
value.items()
))
else:
if attr in self.sensitive_list:
result[attr] = "****"
else:
result[attr] = value
return result
def to_str(self):
"""Returns the string representation of the model"""
import simplejson as json
if six.PY2:
import sys
reload(sys)
sys.setdefaultencoding("utf-8")
return json.dumps(sanitize_for_serialization(self), ensure_ascii=False)
def __repr__(self):
"""For `print`"""
return self.to_str()
def __eq__(self, other):
"""Returns true if both objects are equal"""
if not isinstance(other, GetKieConfigs):
return False
return self.__dict__ == other.__dict__
def __ne__(self, other):
"""Returns true if both objects are not equal"""
return not self == other
| [
"[email protected]"
]
| |
64c7384fef7e4dd6391e4f314b57c6de80fca160 | 321116aad628f819e13e4e033d819b1bc8ed5c78 | /deploy/dj_scaffold.wsgi | a88cd18489656640d0a1116a2183f5c874e0fcc2 | []
| no_license | sakishum/timeline-site | ccdd9fb34d69e3ed931d9e984934b885cef02a24 | 59981dac0eaa09aed5413dbd1932d3294be958d8 | refs/heads/master | 2021-04-28T07:16:11.941592 | 2017-03-06T02:48:20 | 2017-03-06T02:48:20 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 433 | wsgi | import os
import site
from dj_scaffold.env import add_site_dir
HERE = os.path.dirname(__file__)
ROOT_PATH = os.path.abspath(os.path.join(HERE, '../'))
ALLDIRS = [os.path.join(ROOT_PATH, 'env/lib/python2.7/site-packages'), os.path.join(ROOT_PATH, 'sites')]
add_site_dir(ALLDIRS)
os.environ['DJANGO_SETTINGS_MODULE'] = 'settings'
import django.core.handlers.wsgi
application = django.core.handlers.wsgi.WSGIHandler()
| [
"[email protected]"
]
| |
b18b81bc61a1aca39d22a6fef58527e12ba9bee6 | 24eca673c9b7b6257847bb7aa154994f8e80295b | /pyguide/__init__.py | efc39bb2b1eddc355649a945707ac4dafdcbcae0 | []
| no_license | MacHu-GWU/pyrabbit-python-advance-guide-project | 47916c7cd498bed77e56e15ec948be0086e058f0 | 6cdc081064c53b77631d3406501bfda7450963b4 | refs/heads/master | 2020-04-12T12:26:53.538471 | 2015-12-30T21:58:58 | 2015-12-30T21:58:58 | 42,208,965 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 1,095 | py | #!/usr/bin/env python
# -*- coding: utf-8 -*-
"""
Description:
This is a project documents everything the author learned from a newbie becoming
a Senior Python developer. This document is written mainly in Chinese, but I am
trying to do the best to make it bilingual.
Because I believe python3 is the future of python, so everything is tested in
python33.
说明:
pyrabbit-python-advance-guide-project 是一个将笔者从一个无任何计算机背景的菜鸟
到成长为一个高级Python开发者 - 大数据科学家的路上, 所学到的所有Python的知识。
经过笔者的思考, 和精心挑选的可执行的代码例子, 并将整个文档网站化, 可搜索化的一个
项目。
本项目基于Python33社区
项目主要分为下面5大部分:
- :mod:`cookbook <pyguide.cookbook>`: 一些有关Python的深入探讨
- :mod:`newbie <pyguide.newbie>`: 从零开始学Python
- :mod:`ref <pyguide.ref>`: Python官方参考文档中的精华总结
- :mod:`stdlib <pyguide.stdlib>`: Python标准库的摘要
- :mod:`package <pyguide.package>`: 第三方库相关文档
""" | [
"[email protected]"
]
| |
c34bf6f617607609bc89a2cc4b71f7dc5a3de12d | acb8e84e3b9c987fcab341f799f41d5a5ec4d587 | /langs/5/l_6.py | 66f9202215fb4e557d13f11fb40492601c33eae6 | []
| no_license | G4te-Keep3r/HowdyHackers | 46bfad63eafe5ac515da363e1c75fa6f4b9bca32 | fb6d391aaecb60ab5c4650d4ae2ddd599fd85db2 | refs/heads/master | 2020-08-01T12:08:10.782018 | 2016-11-13T20:45:50 | 2016-11-13T20:45:50 | 73,624,224 | 0 | 1 | null | null | null | null | UTF-8 | Python | false | false | 486 | py | import sys
def printFunction(lineRemaining):
if lineRemaining[0] == '"' and lineRemaining[-1] == '"':
if len(lineRemaining) > 2:
#data to print
lineRemaining = lineRemaining[1:-1]
print ' '.join(lineRemaining)
else:
print
def main(fileName):
with open(fileName) as f:
for line in f:
data = line.split()
if data[0] == 'l_6':
printFunction(data[1:])
else:
print 'ERROR'
return
if __name__ == '__main__':
main(sys.argv[1]) | [
"[email protected]"
]
| |
b79edf89c8828024f571709b514601ebae22c9db | c2fa3b814a7f56ad804dffc767fc54f5099d60f8 | /dataset_processing/contours_gilbert_256_sparse_nonRandomShear.py | 73feb4a72e2d84d541d19dd708bfc9b3a1a8aa08 | []
| no_license | dmely/contextual_circuit_bp | 223b602dbabbe8f8091fbb9106f3103bd5e1dcba | a277bc3146beaa4e3edd2134fc9fb8d3388a6013 | refs/heads/master | 2021-10-07T19:04:14.509951 | 2018-03-31T17:10:33 | 2018-03-31T17:10:33 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 3,506 | py | import os
import re
import numpy as np
import tensorflow as tf
from glob import glob
from config import Config
from ops import tf_fun
class data_processing(object):
def __init__(self):
self.name = 'contours_gilbert_256_sparse_nonRandomShear'
self.im_extension = '.png'
self.images_dir = 'images'
self.label_regex = r'(?<=length)\d+'
self.config = Config()
self.im_size = [256, 256, 3] # 600, 600
self.model_input_image_size = [256, 256, 3] # [107, 160, 3]
self.max_ims = 0
self.output_size = [1]
self.label_size = self.output_size
self.default_loss_function = 'cce'
self.score_metric = 'accuracy'
self.store_z = False
self.normalize_im = True
self.shuffle = True
self.input_normalization = 'zscore'
self.preprocess = [''] # ['resize_nn']
self.folds = {
'train': 'train',
'val': 'val'
}
self.cv_split = 0.9
self.cv_balance = True
self.targets = {
'image': tf_fun.bytes_feature,
'label': tf_fun.int64_feature
}
self.tf_dict = {
'image': tf_fun.fixed_len_feature(dtype='string'),
'label': tf_fun.fixed_len_feature(dtype='int64')
}
self.tf_reader = {
'image': {
'dtype': tf.float32,
'reshape': self.im_size
},
'label': {
'dtype': tf.int64,
'reshape': self.output_size
}
}
def get_data(self):
"""Get the names of files."""
files = np.asarray(
glob(
os.path.join(
self.config.data_root,
self.name,
'*%s' % self.im_extension)))
labels = np.asarray(
[int(re.search(self.label_regex, x).group()) for x in files])
labels = (labels > 1).astype(np.int32)
ul, lc = np.unique(labels, return_counts=True)
include_count = np.min(lc)
if self.max_ims:
include_count = np.min([include_count, lc])
# Trim files and labels to include_count
pos_idx = np.where(labels == 1)[0][:include_count]
neg_idx = np.where(labels == 0)[0][:include_count]
# Create CV folds
cv_files, cv_labels = {}, {}
cv_files[self.folds['train']] = {}
cv_files[self.folds['val']] = {}
prev_cv = 0
for k, v in self.folds.iteritems():
if k == self.folds['train']:
cv_split = int(include_count * self.cv_split)
elif k == self.folds['val']:
cv_split = int(include_count * (1 - self.cv_split))
else:
raise NotImplementedError
if prev_cv:
cv_split += prev_cv
cv_inds = np.arange(prev_cv, cv_split)
it_files = np.concatenate((
files[pos_idx][cv_inds],
files[neg_idx][cv_inds]))
it_labels = np.concatenate((
labels[pos_idx][cv_inds],
labels[neg_idx][cv_inds]))
if self.shuffle:
shuffle_idx = np.random.permutation(len(it_files))
it_files = it_files[shuffle_idx]
it_labels = it_labels[shuffle_idx]
cv_files[k] = it_files
cv_labels[k] = it_labels
prev_cv = cv_split
return cv_files, cv_labels
| [
"[email protected]"
]
| |
7c67f33358881bf71360ee38962dfd0d831b637a | cc6a674cab1dc959189b9edff975625f4815bc1c | /ResNet/model.py | 75cb8048b0ad5a819a1f03a4a51e5f67be9b1524 | [
"MIT"
]
| permissive | shreyansh26/DL-Code-Repository | 15173042f566ea42f96eb65283347927a2fab4ff | f1974eedc1fef54b2d274703390a22721e46f502 | refs/heads/master | 2023-07-15T23:15:05.484609 | 2021-08-30T15:41:20 | 2021-08-30T15:41:20 | 382,834,342 | 0 | 0 | null | 2021-07-04T12:11:08 | 2021-07-04T11:27:13 | Python | UTF-8 | Python | false | false | 4,018 | py | from typing import List, Optional
import torch
from torch import tensor
import torch.nn as nn
class ShortcutProjection(nn.Module):
def __init__(self, in_channels: int, out_channels: int, stride: int):
super().__init__()
self.conv = nn.Conv2d(in_channels, out_channels, kernel_size=1, stride=stride)
self.bn = nn.BatchNorm2d(out_channels)
def forward(self, x: torch.Tensor):
return self.bn(self.conv(x))
class ResidualBlock(nn.Module):
def __init__(self, in_channels: int, out_channels: int, stride: int):
super().__init__()
self.conv1 = nn.Conv2d(in_channels, out_channels, kernel_size=3, stride=stride, padding=1)
self.bn1 = nn.BatchNorm2d(out_channels)
self.act1 = nn.ReLU()
self.conv2 = nn.Conv2d(out_channels, out_channels, kernel_size=3, stride=1, padding=1)
self.bn2 = nn.BatchNorm2d(out_channels)
if stride != 1 or in_channels != out_channels:
self.shortcut = ShortcutProjection(in_channels, out_channels, stride)
else:
self.shortcut = nn.Identity()
self.act2 = nn.ReLU()
def forward(self, x):
shortcut = self.shortcut(x)
x = self.act1(self.bn1(self.conv1(x)))
x = self.bn2(self.conv2(x))
return self.act2(x + shortcut)
class BottleneckResidualBlock(nn.Module):
def __init__(self, in_channels: int, bottleneck_channels: int, out_channels: int, stride: int):
super().__init__()
self.conv1 = nn.Conv2d(in_channels, bottleneck_channels, kernel_size=1, stride=1)
self.bn1 = nn.BatchNorm2d(bottleneck_channels)
self.act1 = nn.ReLU()
self.conv2 = nn.Conv2d(bottleneck_channels, bottleneck_channels, kernel_size=3, stride=stride, padding=1)
self.bn2 = nn.BatchNorm2d(bottleneck_channels)
self.act2 = nn.ReLU()
self.conv3 = nn.Conv2d(bottleneck_channels, out_channels, kernel_size=1, stride=1)
self.bn3 = nn.BatchNorm2d(out_channels)
if stride != 1 or in_channels != out_channels:
self.shortcut = ShortcutProjection(in_channels, out_channels, stride)
else:
self.shortcut = nn.Identity()
self.act3 = nn.ReLU()
def forward(self, x: torch.Tensor):
shortcut = self.shortcut(x)
x = self.act1(self.bn1(self.conv1(x)))
x = self.act2(self.bn2(self.conv2(x)))
x = self.bn3(self.conv3(x))
return self.act3(x + shortcut)
class ResNetBase(nn.Module):
def __init__(self, n_blocks: List[int], n_channels: List[int], bottlenecks: Optional[List[int]] = None, img_channels: int = 3, first_kernel_size: int = 7):
super().__init__()
assert len(n_blocks) == len(n_channels)
assert bottlenecks is None or len(bottlenecks) == len(n_channels)
self.conv = nn.Conv2d(img_channels, n_channels[0], kernel_size=first_kernel_size, stride=2, padding=first_kernel_size // 2)
self.bn = nn.BatchNorm2d(n_channels[0])
blocks = []
prev_channel = n_channels[0]
for i, channels in enumerate(n_channels):
if len(blocks) == 0:
stride = 2
else:
stride = 1
if bottlenecks is None:
blocks.append(ResidualBlock(prev_channel, channels, stride=stride))
else:
blocks.append(BottleneckResidualBlock(prev_channel, bottlenecks[i], channels, stride=stride))
prev_channel = channels
for _ in range(n_blocks[i] - 1):
if bottlenecks is None:
blocks.append(ResidualBlock(channels, channels, stride=1))
else:
blocks.append(BottleneckResidualBlock(channels, bottlenecks[i], channels, stride=1))
self.blocks = nn.Sequential(*blocks)
def forward(self, x: torch.Tensor):
x = self.bn(self.conv(x))
x = self.blocks(x)
x = x.view(x.shape[0], x.shape[1], -1)
return x.mean(dim=-1)
| [
"[email protected]"
]
| |
6ba198aa9789fafafad70ec42d47555fbc892bfd | 163bbb4e0920dedd5941e3edfb2d8706ba75627d | /Code/CodeRecords/2499/60764/260203.py | a884a40143476a0e998c5bfbe25a62acd3a735eb | []
| no_license | AdamZhouSE/pythonHomework | a25c120b03a158d60aaa9fdc5fb203b1bb377a19 | ffc5606817a666aa6241cfab27364326f5c066ff | refs/heads/master | 2022-11-24T08:05:22.122011 | 2020-07-28T16:21:24 | 2020-07-28T16:21:24 | 259,576,640 | 2 | 1 | null | null | null | null | UTF-8 | Python | false | false | 521 | py | n=int(input())
stack=[]
for i in range(n):
command=input().split()
if command[0]=='Add':
stack.append(command)
elif command[0]=='Del':
ind=int(command[1])
if ind-1>=0 and ind-1<len(stack):
stack[ind-1][0]='No'
else:
x=int(command[1])
res=0
for j in range(len(stack)):
if stack[j][0]=="Add":
tem=int(stack[j][1])*x+int(stack[j][2])
if tem>int(stack[j][3]):
res+=1
print(res) | [
"[email protected]"
]
| |
18ad0fb390f8a5da0684d613ebbd0b40b06b19e0 | 52b5773617a1b972a905de4d692540d26ff74926 | /.history/coder_20200618172244.py | e1df5edcdbebf0e88ece5d9f49dfb3cc0efc3a55 | []
| no_license | MaryanneNjeri/pythonModules | 56f54bf098ae58ea069bf33f11ae94fa8eedcabc | f4e56b1e4dda2349267af634a46f6b9df6686020 | refs/heads/master | 2022-12-16T02:59:19.896129 | 2020-09-11T12:05:22 | 2020-09-11T12:05:22 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 1,353 | py | def MinWindowSubstring(strArr):
# code goes here
containing_string = strArr[0] # first string
search_string = ''.join(sorted(strArr[1])) # second string
print(len(search_string))
min_chars_required = len(search_string)
solution = ''
solution_array = []
for x in containing_string:
solution += x
total_cnt = 0
print("",solution)
for c in search_string:
found_cnt = solution.count(c)
needed_cnt = search_string.count(c)
if found_cnt >= needed_cnt:
total_cnt += 1
# print(total_cnt)
if total_cnt == min_chars_required:
solution_array.append(solution)
# print(solution_array)
# solution = ''
# actual_solution_array = []
# for word in solution_array:
# word = word [::-1]
# for x in word:
# solution += x
# total_cnt = 0
# # print(solution)
# for c in search_string:
# found_cnt = solution.count(c)
# needed_cnt = search_string.count(c)
# if found_cnt >= needed_cnt:
# total_cnt += 1
# # print(total_cnt)
# if total_cnt == min_chars_required:
# actual_solution_array.append(solution)
# answer = min((word for word in actual_solution_array if word), key=len)
# answer = answer [::-1]
# return answer
print(MinWindowSubstring(["sz","azjskfzts"])) | [
"[email protected]"
]
| |
12c9baca46273c4388c17257f7bd4400874025fc | 485be21ebe0a956b7f4a681968e160a463903ecc | /LibSVMRelate/SVMRunSinglePara.py | 4902b0dc26c4909d9231de75050cbe1097987df6 | []
| no_license | xiongchenyan/cxPyLib | e49da79345006d75a4261a8bbd4cc9a7f730fad2 | 8d87f5a872458d56276a2a2b0533170ede4d5851 | refs/heads/master | 2021-01-10T20:43:20.147286 | 2016-01-14T04:02:45 | 2016-01-14T04:02:45 | 17,610,431 | 1 | 0 | null | null | null | null | UTF-8 | Python | false | false | 1,523 | py | '''
Created on Mar 25, 2014
input: train, dev|test, para
output: accuracy, label of required
@author: cx
'''
import site
site.addsitedir('/bos/usr0/cx/PyCode/Geektools')
site.addsitedir('/bos/usr0/cx/PyCode/QueryExpansion')
site.addsitedir('/bos/usr0/cx/PyCode/cxPylib')
site.addsitedir('/bos/usr0/cx/LibSVM/libsvm/python/')
from LibSVMRelate.SVMBase import *
from svmutil import *
from cxBase.base import *
from cxBase.ContingencyTable import *
import json
class SVMRunSingleParaC(object):
def LoadData(self,InName):
print "to be implemented by my inherited class"
return [[],[]]
def DumpPrediction(self,OutName,TestInName ,p_label,p_val):
print "to be implemented by my inherited class"
return False
def Process(self,TrainInName,TestInName,ParaInName,OutName):
lY,lX = self.LoadData(TrainInName)
lSVMPara = ReadSVMParaSet(ParaInName)
SVMPara = lSVMPara[0] #only use first one
SVMModel = svm_train(lY,lX,SVMPara.dump())
lTestY,lTestX = self.LoadData(TestInName)
p_label,p_acc,p_val = svm_predict(lTestY,lTestX,SVMModel,'-b 1')
#add a contengency matrix output?
lCTable = ContingencyTable(p_label,lTestY)
print json.dumps(lCTable)
out = open(OutName,'w')
json.dump(lCTable,out)
out.close()
self.DumpPrediction(OutName + "_pre", TestInName, p_label, p_val)
return True
| [
"[email protected]"
]
| |
fc3532d68cd78aace0e9f911ff245742aa89aed3 | 991eef78c307ebfd5d149d5908270e1e528e75c2 | /models/cells/modelPC2006Akemann.py | b4e850851b05e33ace54ccde2264c8a4c21316f1 | [
"BSD-3-Clause"
]
| permissive | HarshKhilawala/cerebmodels | bd1a2310253de5536a77be08dfdb33e29d6e636d | d2a2f2ef947ef9dc23ddce6e55159240cd3233cb | refs/heads/master | 2023-06-11T09:36:10.969240 | 2021-06-29T17:21:09 | 2021-06-29T17:21:09 | 369,399,200 | 0 | 0 | BSD-3-Clause | 2021-05-21T03:08:41 | 2021-05-21T03:08:40 | null | UTF-8 | Python | false | false | 7,378 | py | # ~/models/cells/modelPC2006Akemann.py
import os
pwd = os.getcwd() # record root directory path ~/cerebmodels
path_to_files = pwd + os.sep + "models" + os.sep + "cells" + os.sep + \
"PC2006Akemann" + os.sep # record path to this model/folder
from models.cells.PC2006Akemann.Purkinje import Purkinje
from executive import ExecutiveControl
from managers.simulation import SimulationManager as sm
from managers.read import ReadManager as rm
from managers.signalprocessing import SignalProcessingManager as spm
import sciunit
from cerebunit.capabilities.cells.response import ProducesElectricalResponse
from cerebunit.capabilities.cells.measurements import ProducesSomaRestingVm, ProducesSomaSpikeHeight
#from pdb import set_trace as breakpoint
class PurkinjeCell( sciunit.Model,
ProducesElectricalResponse,
ProducesSomaRestingVm ):
"""USE CASE:
"""
# AFTER the model is in the HBP Validation Framework Model catalog, set the generated uuid
#uuid = "22dc8fd3-c62b-4e07-9e47-f5829e038d6d"
def __init__(self):
### ===================== Descriptive Attributes ======================
self.modelscale = "cells"
self.modelname = "PC2006Akemann"
# ------specify cell-regions from with response are recorded-------
self.regions = {"soma": ["v"]} #"dend_sm": ["v"], "dend_sp": ["v"]}
self.recordingunits = {"v": "mV"}
# -----------attributed inheritance from sciunit.Model--------------
self.name = "Akemann and Knöpfel 2006 model of PurkinjeCell"
self.description = "Akemann & Knöpfel 006 model of PurkinjeCell (PC) and published in 10.1523/JNEUROSCI.5204-05.2006 This is a single compartment model. This model is the SciUnit wrapped version of the NEURON model in modelDB accession # 80769."
#
### =================== Instantiate cell template ====================
sm.lock_and_load_model_libraries(modelscale=self.modelscale,
modelname=self.modelname)
os.chdir(path_to_files)
self.cell = Purkinje()
os.chdir(pwd)
### ===============================================================
self.fullfilename = "nil"
self.prediction = "nil"
#
# =======================================================================
# +++++++++++++++++++++++ MODEL CAPABILITIES ++++++++++++++++++++++++++++
# =======================================================================
# --------------------- produce_voltage_response ------------------------
def produce_voltage_response(self, **kwargs):
"""generic/essential model response
**Keyword Arguments:**
kwargs = { "parameters": dictionary with keys,
"stimparameters": None or dictionary with keys "type" and "stimlist",
"onmodel": instantiated model }
"""
#ExecutiveControl.launch_model_raw("cells")
print("Simulation produce_voltage_response starting ...")
ec = ExecutiveControl() # only works when in ~/cerebmodels
model = ec.launch_model( parameters = kwargs["parameters"],
stimparameters = kwargs["stimparameters"],
stimloc = kwargs["stimloc"],
onmodel = kwargs["onmodel"], mode = "raw" )
print("File saving ...")
fullfilename = ec.save_response()
setattr(model, "fullfilename", fullfilename)
print("File saved.")
print("Simulation produce_voltage_response Done.")
return model
# ----------------------- produce_soma_restingVm -----------------------------
def produce_soma_restingVm(self, **kwargs):
"""
kwargs = { "parameters": dictionary with keys,
"stimparameters": dictionary with keys "type" and "stimlist",
"onmodel": instantiated model }
"""
print("Sim produce_soma v_restingVm starting ...")
ec = ExecutiveControl() # only works when in ~/cerebmodels
model = ec.launch_model( parameters = kwargs["parameters"],
stimparameters = kwargs["stimparameters"],
stimloc = kwargs["stimloc"], onmodel = kwargs["onmodel"],
capabilities = {"model": "produce_voltage_response",
"vtest": ProducesElectricalResponse},
mode="capability")
nwbfile = rm.load_nwbfile(model.fullfilename)
orderedepochs = rm.order_all_epochs_for_region(nwbfile=nwbfile, region="soma v")
timestamps_over_epochs = [ rm.timestamps_for_epoch( orderedepochs[i] )
for i in range(len(orderedepochs)) ]
data_over_epochs = [ rm.data_for_epoch( orderedepochs[i] )
for i in range(len(orderedepochs)) ]
baseVms = spm.distill_baseVm_pre_epoch( timestamps = timestamps_over_epochs,
datavalues = data_over_epochs )
setattr(model, "prediction", baseVms)
print("Simulation produce_soma v_restingVm Done.")
return model
# ----------------------- produce_soma_spikeheight ------------------------
def produce_soma_spikeheight(self, **kwargs):
"""
kwargs = { "parameters": dictionary with keys,
"stimparameters": dictionary with keys "type" and "stimlist",
"onmodel": instantiated model }
"""
print("Sim produce_soma_spikeheight starting ...")
ec = ExecutiveControl() # only works when in ~/cerebmodels
model = ec.launch_model( parameters = kwargs["parameters"],
stimparameters = kwargs["stimparameters"],
stimloc = kwargs["stimloc"], onmodel = kwargs["onmodel"],
capabilities = {"model": "produce_voltage_response",
"vtest": ProducesElectricalResponse},
mode="capability" )
nwbfile = rm.load_nwbfile(model.fullfilename)
orderedepochs = rm.order_all_epochs_for_region(nwbfile=nwbfile, region="soma v")
timestamps_over_epochs = [ rm.timestamps_for_epoch( orderedepochs[i] )
for i in range(len(orderedepochs)) ]
data_over_epochs = [ rm.data_for_epoch( orderedepochs[i] )
for i in range(len(orderedepochs)) ]
baseVm = spm.distill_baseVm_pre_epoch( timestamps = timestamps_over_epochs,
datavalues = data_over_epochs )
try:
peakVms = spm.distill_peakVm_from_spikes( timestamps = timestamps_over_epochs,
datavalues = data_over_epochs )
except:
peakVms = baseVm
setattr(model, "prediction", peakVms[0] - baseVm[0])
print("Simulation produce_soma_spikeheight Done.")
return model
# ----------------------- produce_spike_train ---------------------------
def produce_spike_train(self, **kwargs):
"""
Use case:
"""
pass
| [
"[email protected]"
]
| |
b997f16691dc838e057231b7245b34283772c091 | e7e34e2726790686a1f239e22487fe7c957e179f | /homeassistant/components/nam/sensor.py | c5c9c9f2e77ca2bc5e91242996efcb8adf307036 | [
"Apache-2.0"
]
| permissive | AlexxIT/home-assistant | 68a17b49644c5d943b204dc75e1f11fe3b701161 | 8de7966104911bca6f855a1755a6d71a07afb9de | refs/heads/dev | 2022-03-22T14:37:18.774214 | 2021-10-09T16:10:43 | 2021-10-09T16:10:43 | 100,278,871 | 9 | 0 | Apache-2.0 | 2022-01-31T06:18:02 | 2017-08-14T14:50:46 | Python | UTF-8 | Python | false | false | 3,817 | py | """Support for the Nettigo Air Monitor service."""
from __future__ import annotations
from datetime import timedelta
import logging
from typing import cast
from homeassistant.components.sensor import (
DOMAIN as PLATFORM,
SensorEntity,
SensorEntityDescription,
)
from homeassistant.config_entries import ConfigEntry
from homeassistant.core import HomeAssistant
from homeassistant.helpers import entity_registry
from homeassistant.helpers.entity_platform import AddEntitiesCallback
from homeassistant.helpers.typing import StateType
from homeassistant.helpers.update_coordinator import CoordinatorEntity
from homeassistant.util.dt import utcnow
from . import NAMDataUpdateCoordinator
from .const import ATTR_UPTIME, DOMAIN, MIGRATION_SENSORS, SENSORS
PARALLEL_UPDATES = 1
_LOGGER = logging.getLogger(__name__)
async def async_setup_entry(
hass: HomeAssistant, entry: ConfigEntry, async_add_entities: AddEntitiesCallback
) -> None:
"""Add a Nettigo Air Monitor entities from a config_entry."""
coordinator: NAMDataUpdateCoordinator = hass.data[DOMAIN][entry.entry_id]
# Due to the change of the attribute name of two sensors, it is necessary to migrate
# the unique_ids to the new names.
ent_reg = entity_registry.async_get(hass)
for old_sensor, new_sensor in MIGRATION_SENSORS:
old_unique_id = f"{coordinator.unique_id}-{old_sensor}"
new_unique_id = f"{coordinator.unique_id}-{new_sensor}"
if entity_id := ent_reg.async_get_entity_id(PLATFORM, DOMAIN, old_unique_id):
_LOGGER.debug(
"Migrating entity %s from old unique ID '%s' to new unique ID '%s'",
entity_id,
old_unique_id,
new_unique_id,
)
ent_reg.async_update_entity(entity_id, new_unique_id=new_unique_id)
sensors: list[NAMSensor | NAMSensorUptime] = []
for description in SENSORS:
if getattr(coordinator.data, description.key) is not None:
if description.key == ATTR_UPTIME:
sensors.append(NAMSensorUptime(coordinator, description))
else:
sensors.append(NAMSensor(coordinator, description))
async_add_entities(sensors, False)
class NAMSensor(CoordinatorEntity, SensorEntity):
"""Define an Nettigo Air Monitor sensor."""
coordinator: NAMDataUpdateCoordinator
def __init__(
self,
coordinator: NAMDataUpdateCoordinator,
description: SensorEntityDescription,
) -> None:
"""Initialize."""
super().__init__(coordinator)
self._attr_device_info = coordinator.device_info
self._attr_unique_id = f"{coordinator.unique_id}-{description.key}"
self.entity_description = description
@property
def native_value(self) -> StateType:
"""Return the state."""
return cast(
StateType, getattr(self.coordinator.data, self.entity_description.key)
)
@property
def available(self) -> bool:
"""Return if entity is available."""
available = super().available
# For a short time after booting, the device does not return values for all
# sensors. For this reason, we mark entities for which data is missing as
# unavailable.
return (
available
and getattr(self.coordinator.data, self.entity_description.key) is not None
)
class NAMSensorUptime(NAMSensor):
"""Define an Nettigo Air Monitor uptime sensor."""
@property
def native_value(self) -> str:
"""Return the state."""
uptime_sec = getattr(self.coordinator.data, self.entity_description.key)
return (
(utcnow() - timedelta(seconds=uptime_sec))
.replace(microsecond=0)
.isoformat()
)
| [
"[email protected]"
]
| |
8be966152d0f159f1526e3cbea1aa111e05af8a3 | d3b3a3e8c3a053c74479d588a26dd0343e015663 | /练习代码/Flask/6.7/重定向.py | 50f2523f83def5f96a516f4a5c4c6f8cfa8cd41e | []
| no_license | Mr-big-c/github | b6978ad920bc8f4d2cee1fca1cac72cce5767e12 | d301aa4516a00007980f34b94de3c7b5fb4198fa | refs/heads/master | 2021-10-01T18:57:51.133095 | 2018-11-28T15:00:40 | 2018-11-28T15:00:40 | 300,145,501 | 1 | 0 | null | 2020-10-01T04:42:30 | 2020-10-01T04:42:29 | null | UTF-8 | Python | false | false | 833 | py | # -*- coding: utf-8 -*-
# @File : 重定向.py
# @Author: 一稚杨
# @Date : 2018/6/7/007
# @Desc : 重定向和404页面定义
# redirect实现重定向
from flask import Flask, redirect, render_template, flash
app = Flask(__name__)
app.secret_key = '123456'
@app.route("/index1")
def index1():
flash("登录成功", category="login")
flash("hello",category="hello")
return redirect("/index2/")
@app.route("/index2/")
def index2():
return render_template("flash.html")
@app.errorhandler(404)
def error(error):
return render_template("404.html"),404
# form表单action为空时访问那个页面?结论:当action为空时,数据提交给发送数据的页面
@app.route("/action_none", methods=["GET", "POST"])
def action_none():
return render_template("action.html")
app.run(debug=True) | [
"[email protected]"
]
| |
4472c64391e5233857750bdc27edfff4c207f694 | c189f58db5e339df742a78b9be0a3b768d5bb3e8 | /Python_学习手册/Exercise/timer.py | 6be6efc4c2ebd1be252362535947d39bc6e51233 | []
| no_license | xuyagang/pycon | 19ac0de6ff2ef0569dbc81673aed51aad1ccd8bc | b6be3293f0ffc7d399fd039f6b274fff8f71584c | refs/heads/master | 2021-12-15T05:03:42.992320 | 2021-11-03T23:59:19 | 2021-11-03T23:59:19 | 153,795,002 | 1 | 0 | null | null | null | null | UTF-8 | Python | false | false | 227 | py | import time
reps = 1000
repslist = range(reps)
def timer(func,*pargs, **kargs):
start = time.clock()
for i in repslist:
ret = func(*pargs,**kargs)
elapsed = time.clock() - start
return (elapsed, ret)
| [
"[email protected]"
]
| |
6c44b31939b27278f1be85c015a64c31f9a07491 | 5f8534eadc182d0c79c4089bd803bb63b1178f5d | /src/textbook/chapter5/chapter5-1/chapter5-1-2.py | 6f6d9fd758ac57e658af109068ea7a4c0991a052 | []
| no_license | mryyomutga/TechnicalSeminar | b0f0a3c2fbff469e22896dc782586febdd604919 | 3bd1c3a9c1c3e2331586dbaab0ce745c2f7e86bd | refs/heads/master | 2021-08-28T02:30:02.062637 | 2017-12-11T03:29:15 | 2017-12-11T03:29:15 | 106,644,976 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 285 | py | # -*- coding: utf-8 -*-
# デスクトップアプリケーションの作成2
# ファイル選択ダイアログ
import tkinter.filedialog as fd
path = fd.askopenfilename(
title="ファイルを選択してください",
filetypes=[("python", "py")]
)
print(path)
| [
"[email protected]"
]
| |
a8881eead08e69937d42ebfbb4af65004a20ed91 | 1c6283303ceb883add8de4ee07c5ffcfc2e93fab | /Jinja2/lib/python3.7/site-packages/ixnetwork_restpy/testplatform/sessions/ixnetwork/impairment/profile/delay/delay.py | 1bb184d1931df595e004a78377ad7156ad6ab4ba | []
| no_license | pdobrinskiy/devcore | 0f5b3dfc2f3bf1e44abd716f008a01c443e14f18 | 580c7df6f5db8c118990cf01bc2b986285b9718b | refs/heads/main | 2023-07-29T20:28:49.035475 | 2021-09-14T10:02:16 | 2021-09-14T10:02:16 | 405,919,390 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 3,817 | py | # MIT LICENSE
#
# Copyright 1997 - 2020 by IXIA Keysight
#
# Permission is hereby granted, free of charge, to any person obtaining a copy
# of this software and associated documentation files (the "Software"),
# to deal in the Software without restriction, including without limitation
# the rights to use, copy, modify, merge, publish, distribute, sublicense,
# and/or sell copies of the Software, and to permit persons to whom the
# Software is furnished to do so, subject to the following conditions:
#
# The above copyright notice and this permission notice shall be included in
# all copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
# THE SOFTWARE.
from ixnetwork_restpy.base import Base
from ixnetwork_restpy.files import Files
from typing import List, Any, Union
class Delay(Base):
"""Delay each packet.
The Delay class encapsulates a required delay resource which will be retrieved from the server every time the property is accessed.
"""
__slots__ = ()
_SDM_NAME = 'delay'
_SDM_ATT_MAP = {
'Enabled': 'enabled',
'Units': 'units',
'Value': 'value',
}
_SDM_ENUM_MAP = {
'units': ['kilometers', 'kKilometers', 'kMicroseconds', 'kMilliseconds', 'kSeconds', 'microseconds', 'milliseconds', 'seconds'],
}
def __init__(self, parent, list_op=False):
super(Delay, self).__init__(parent, list_op)
@property
def Enabled(self):
# type: () -> bool
"""
Returns
-------
- bool: If true, delay packets.
"""
return self._get_attribute(self._SDM_ATT_MAP['Enabled'])
@Enabled.setter
def Enabled(self, value):
# type: (bool) -> None
self._set_attribute(self._SDM_ATT_MAP['Enabled'], value)
@property
def Units(self):
# type: () -> str
"""
Returns
-------
- str(kilometers | kKilometers | kMicroseconds | kMilliseconds | kSeconds | microseconds | milliseconds | seconds): Specify the units for the delay value.
"""
return self._get_attribute(self._SDM_ATT_MAP['Units'])
@Units.setter
def Units(self, value):
# type: (str) -> None
self._set_attribute(self._SDM_ATT_MAP['Units'], value)
@property
def Value(self):
# type: () -> int
"""
Returns
-------
- number: Time to delay each packet.
"""
return self._get_attribute(self._SDM_ATT_MAP['Value'])
@Value.setter
def Value(self, value):
# type: (int) -> None
self._set_attribute(self._SDM_ATT_MAP['Value'], value)
def update(self, Enabled=None, Units=None, Value=None):
# type: (bool, str, int) -> Delay
"""Updates delay resource on the server.
Args
----
- Enabled (bool): If true, delay packets.
- Units (str(kilometers | kKilometers | kMicroseconds | kMilliseconds | kSeconds | microseconds | milliseconds | seconds)): Specify the units for the delay value.
- Value (number): Time to delay each packet.
Raises
------
- ServerError: The server has encountered an uncategorized error condition
"""
return self._update(self._map_locals(self._SDM_ATT_MAP, locals()))
| [
"[email protected]"
]
| |
ccaee085a1c2b7c3aa12fe980fb250035df6e7a2 | 63fe333ce975837a3cb9c061ea910dc6923ec724 | /training/c17_numpy/e09-broadcasting.py | 9f9da96038f4bda2d49208ab685d482e3e40508a | []
| no_license | PablitoMoribe/pythontraining | 1976a5297b8316f1295a0d5d85f5bd3c99a096e3 | 49e146a28080c1b4fc7c3a7b37ce8c4593a139ff | refs/heads/master | 2020-07-07T16:56:44.136633 | 2019-08-19T21:47:34 | 2019-08-19T21:47:34 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 607 | py | import numpy as np
# pip install numpy
arreglo1 = np.arange(1, 10, dtype=np.int32).reshape((3, 3))
print('Numpy Array 1:\n{}'.format(arreglo1))
arreglo2 = np.arange(3, 0, -1, dtype=np.int32).reshape((1, 3))
print('Numpy Array 2:\n{}'.format(arreglo2))
arreglo3 = np.arange(10, 40, 10, dtype=np.int32).reshape((3, 1))
print('Numpy Array 3:\n{}'.format(arreglo3))
print()
# Binary Operations
print('Numpy Addition (a1 + a2)')
print(arreglo1 + arreglo2, end='\n\n')
print('Numpy Addition (a1 + a3)')
print(arreglo1 + arreglo3, end='\n\n')
print('Numpy Addition (a1 + 5)')
print(arreglo1 + 5, end='\n\n')
| [
"[email protected]"
]
| |
2db4525f3313105e7aba42ba26b5a1930ac90257 | 32f5bc330388a96877d93fdd7b21599a40735400 | /Python/intelligenceinfection.py | 49c5fea58ddfdc9d565b71c8d1656d780a9a1244 | []
| no_license | alexlwn123/kattis | 670180d86f0863328a16e12ed937c2fefb3226a2 | c1163bae3fdaf95c1087b216c48e7e19059d3d38 | refs/heads/master | 2021-06-21T16:26:15.642449 | 2020-12-24T20:59:10 | 2020-12-24T20:59:10 | 152,286,208 | 1 | 1 | null | 2018-10-14T22:40:09 | 2018-10-09T16:40:48 | Java | UTF-8 | Python | false | false | 51 | py | def main():
if __name__ == '__main__':
main()
| [
"[email protected]"
]
| |
5213395f6c53b2b1a1104035cc73eb2592848add | 250003a344e5cb974579d67daeeed7ea6b51f620 | /day18/01_Demo_Logistic_Regression.py | 02a1d8461a66806f24b2a0a7da863c86c35cf46c | []
| no_license | lakshsharma07/training2019 | 540cace993859f06b4f402fc5a7849b26fb11ce8 | d8ae0b0e1e567525e806834379713003a676ec06 | refs/heads/master | 2020-05-24T07:11:25.613107 | 2019-06-25T03:27:58 | 2019-06-25T03:27:58 | 187,154,851 | 1 | 2 | null | null | null | null | UTF-8 | Python | false | false | 1,381 | py | # -*- coding: utf-8 -*-
import sklearn as sk
import pandas as pd
import numpy as np
import matplotlib.pyplot as plt
"""
We will look at data regarding coronary heart disease (CHD) in South Africa. The goal is to use different variables such as tobacco usage, family history, ldl cholesterol levels, alcohol usage, obesity and more.
"""
heart = pd.read_csv('Heart_Disease.csv', sep=',', header=0)
heart.head()
labels = heart.iloc[:,9].values
features = heart.iloc[:,:9].values
# Splitting the dataset into the Training set and Test set
from sklearn.model_selection import train_test_split
features_train, features_test, labels_train, labels_test = train_test_split(features, labels, test_size = 0.25, random_state = 0)
# Feature Scaling
from sklearn.preprocessing import StandardScaler
sc = StandardScaler()
features_train = sc.fit_transform(features_train)
features_test = sc.transform(features_test)
# Fitting Logistic Regression to the Training set
from sklearn.linear_model import LogisticRegression
classifier = LogisticRegression()
classifier.fit(features_train, labels_train)
#Calculate Class Probabilities
probability = classifier.predict_proba(features_test)
# Predicting the class labels
labels_pred = classifier.predict(features_test)
# Making the Confusion Matrix
from sklearn.metrics import confusion_matrix
cm = confusion_matrix(labels_test, labels_pred) | [
"[email protected]"
]
| |
a3d809d9847fd1ca1ddb0ce237317c2be57f2b9f | eddb3dfb5e1a0a3e58254f285c3700b45dce76d9 | /repos/spikeextractors/spikeextractors/extractors/mdaextractors/mdaio.py | 1e46e52b8468f65116a852e89d124906536f18b7 | [
"Apache-2.0"
]
| permissive | tjd2002/spikeforest2 | f2281a8d3103b3fbdd85829c176819a5e6d310d0 | 2e393564b858b2995aa2ccccd9bd73065681b5de | refs/heads/master | 2020-04-25T07:55:19.997810 | 2019-02-26T01:19:23 | 2019-02-26T01:19:23 | 172,628,686 | 0 | 0 | Apache-2.0 | 2019-02-26T03:11:27 | 2019-02-26T03:11:26 | null | UTF-8 | Python | false | false | 14,885 | py | import numpy as np
import struct
import os
import requests
import tempfile
import traceback
class MdaHeader:
def __init__(self, dt0, dims0):
uses64bitdims=(max(dims0)>2e9)
self.uses64bitdims=uses64bitdims
self.dt_code=_dt_code_from_dt(dt0)
self.dt=dt0
self.num_bytes_per_entry=get_num_bytes_per_entry_from_dt(dt0)
self.num_dims=len(dims0)
self.dimprod=np.prod(dims0)
self.dims=dims0
if uses64bitdims:
self.header_size=3*4+self.num_dims*8
else:
self.header_size=(3+self.num_dims)*4
def npy_dtype_to_string(dt):
str=dt.str[1:]
map={
"f2":'float16',
"f4":'float32',
"f8":'float64',
"i1":'int8',
"i2":'int16',
"i4":'int32',
"u2":'uint16',
"u4":'uint32'
}
return map[str]
class DiskReadMda:
def __init__(self,path,header=None):
self._npy_mode=False
self._path=path
if (file_extension(path)=='.npy'):
raise Exception('DiskReadMda implementation has not been tested for npy files')
self._npy_mode=True
if header:
raise Exception('header not allowed in npy mode for DiskReadMda')
if header:
self._header=header
self._header.header_size=0
else:
self._header=_read_header(self._path)
def dims(self):
if self._npy_mode:
A=np.load(self._path,mmap_mode='r')
return A.shape
return self._header.dims
def N1(self):
return self.dims()[0]
def N2(self):
return self.dims()[1]
def N3(self):
return self.dims()[2]
def dt(self):
if self._npy_mode:
A=np.load(self._path,mmap_mode='r')
return npy_dtype_to_string(A.dtype)
return self._header.dt
def numBytesPerEntry(self):
if self._npy_mode:
A=np.load(self._path,mmap_mode='r')
return A.itemsize
return self._header.num_bytes_per_entry
def readChunk(self,i1=-1,i2=-1,i3=-1,N1=1,N2=1,N3=1):
#print("Reading chunk {} {} {} {} {} {}".format(i1,i2,i3,N1,N2,N3))
if (i2<0):
if self._npy_mode:
A=np.load(self._path,mmap_mode='r')
return A[:,:,i1:i1+N1]
return self._read_chunk_1d(i1,N1)
elif (i3<0):
if N1 != self.N1():
print ("Unable to support N1 {} != {}".format(N1,self.N1()))
return None
X=self._read_chunk_1d(i1+N1*i2,N1*N2)
if X is None:
print ('Problem reading chunk from file: '+self._path)
return None
if self._npy_mode:
A=np.load(self._path,mmap_mode='r')
return A[:,i2:i2+N2]
return np.reshape(X,(N1,N2),order='F')
else:
if N1 != self.N1():
print ("Unable to support N1 {} != {}".format(N1,self.N1()))
return None
if N2 != self.N2():
print ("Unable to support N2 {} != {}".format(N2,self.N2()))
return None
if self._npy_mode:
A=np.load(self._path,mmap_mode='r')
return A[:,:,i3:i3+N3]
X=self._read_chunk_1d(i1+N1*i2+N1*N2*i3,N1*N2*N3)
return np.reshape(X,(N1,N2,N3),order='F')
def _read_chunk_1d(self,i,N):
offset=self._header.header_size+self._header.num_bytes_per_entry*i
if is_url(self._path):
tmp_fname=_download_bytes_to_tmpfile(self._path,offset,offset+self._header.num_bytes_per_entry*N)
try:
ret=self._read_chunk_1d_helper(tmp_fname,N,offset=0)
except:
ret=None
#os.remove(tmp_fname)
return ret
return self._read_chunk_1d_helper(self._path,N,offset=offset)
def _read_chunk_1d_helper(self,path0,N,*,offset):
f=open(path0,"rb")
try:
f.seek(offset)
ret=np.fromfile(f,dtype=self._header.dt,count=N)
f.close()
return ret
except Exception as e: # catch *all* exceptions
print (e)
f.close()
return None
def is_url(path):
return path.startswith('http://') or path.startswith('https://')
def _download_bytes_to_tmpfile(url,start,end):
headers = {"Range": "bytes={}-{}".format(start,end-1)}
r = requests.get(url, headers=headers, stream=True)
fd, tmp_fname = tempfile.mkstemp()
with open(tmp_fname, 'wb') as f:
for chunk in r.iter_content(chunk_size=1024):
if chunk:
f.write(chunk)
return tmp_fname
def _read_header(path):
if is_url(path):
tmp_fname=_download_bytes_to_tmpfile(path,0,200)
if not tmp_fname:
raise Exception('Problem downloading bytes from '+url)
try:
ret=_read_header(tmp_fname)
except:
ret=None
os.remove(tmp_fname)
return ret
f=open(path,"rb")
try:
dt_code=_read_int32(f)
num_bytes_per_entry=_read_int32(f)
num_dims=_read_int32(f)
uses64bitdims=False
if (num_dims<0):
uses64bitdims=True
num_dims=-num_dims
if (num_dims<1) or (num_dims>6): # allow single dimension as of 12/6/17
print ("Invalid number of dimensions: {}".format(num_dims))
f.close()
return None
dims=[]
dimprod=1
if uses64bitdims:
for j in range(0,num_dims):
tmp0=_read_int64(f)
dimprod=dimprod*tmp0
dims.append(tmp0)
else:
for j in range(0,num_dims):
tmp0=_read_int32(f)
dimprod=dimprod*tmp0
dims.append(tmp0)
dt=_dt_from_dt_code(dt_code)
if dt is None:
print ("Invalid data type code: {}".format(dt_code))
f.close()
return None
H=MdaHeader(dt,dims)
if (uses64bitdims):
H.uses64bitdims=True
H.header_size=3*4+H.num_dims*8
f.close()
return H
except Exception as e: # catch *all* exceptions
print (e)
f.close()
return None
def _dt_from_dt_code(dt_code):
if dt_code == -2:
dt='uint8'
elif dt_code == -3:
dt='float32'
elif dt_code == -4:
dt='int16'
elif dt_code == -5:
dt='int32'
elif dt_code == -6:
dt='uint16'
elif dt_code == -7:
dt='float64'
elif dt_code == -8:
dt='uint32'
else:
dt=None
return dt
def _dt_code_from_dt(dt):
if dt == 'uint8':
return -2
if dt == 'float32':
return -3
if dt == 'int16':
return -4
if dt == 'int32':
return -5
if dt == 'uint16':
return -6
if dt == 'float64':
return -7
if dt == 'uint32':
return -8
return None
def get_num_bytes_per_entry_from_dt(dt):
if dt == 'uint8':
return 1
if dt == 'float32':
return 4
if dt == 'int16':
return 2
if dt == 'int32':
return 4
if dt == 'uint16':
return 2
if dt == 'float64':
return 8
if dt == 'uint32':
return 4
return None
def readmda_header(path):
if (file_extension(path)=='.npy'):
raise Exception('Cannot read mda header for .npy file.')
return _read_header(path)
def _write_header(path,H,rewrite=False):
if rewrite:
f=open(path,"r+b")
else:
f=open(path,"wb")
try:
_write_int32(f,H.dt_code)
_write_int32(f,H.num_bytes_per_entry)
if H.uses64bitdims:
_write_int32(f,-H.num_dims)
for j in range(0,H.num_dims):
_write_int64(f,H.dims[j])
else:
_write_int32(f,H.num_dims)
for j in range(0,H.num_dims):
_write_int32(f,H.dims[j])
f.close()
return True
except Exception as e: # catch *all* exceptions
print (e)
f.close()
return False
def readmda(path):
if (file_extension(path)=='.npy'):
return readnpy(path);
H=_read_header(path)
if (H is None):
print ("Problem reading header of: {}".format(path))
return None
ret=np.array([])
f=open(path,"rb")
try:
f.seek(H.header_size)
#This is how I do the column-major order
ret=np.fromfile(f,dtype=H.dt,count=H.dimprod)
ret=np.reshape(ret,H.dims,order='F')
f.close()
return ret
except Exception as e: # catch *all* exceptions
print (e)
f.close()
return None
def writemda32(X,fname):
if (file_extension(fname)=='.npy'):
return writenpy32(X,fname);
return _writemda(X,fname,'float32')
def writemda64(X,fname):
if (file_extension(fname)=='.npy'):
return writenpy64(X,fname);
return _writemda(X,fname,'float64')
def writemda8(X,fname):
if (file_extension(fname)=='.npy'):
return writenpy8(X,fname);
return _writemda(X,fname,'uint8')
def writemda32i(X,fname):
if (file_extension(fname)=='.npy'):
return writenpy32i(X,fname);
return _writemda(X,fname,'int32')
def writemda32ui(X,fname):
if (file_extension(fname)=='.npy'):
return writenpy32ui(X,fname);
return _writemda(X,fname,'uint32')
def writemda16i(X,fname):
if (file_extension(fname)=='.npy'):
return writenpy16i(X,fname);
return _writemda(X,fname,'int16')
def writemda16ui(X,fname):
if (file_extension(fname)=='.npy'):
return writenpy16ui(X,fname);
return _writemda(X,fname,'uint16')
def writemda(X,fname,*,dtype):
return _writemda(X,fname,dtype)
def _writemda(X,fname,dt):
dt_code=0
num_bytes_per_entry=get_num_bytes_per_entry_from_dt(dt)
dt_code=_dt_code_from_dt(dt)
if dt_code is None:
print ("Unexpected data type: {}".format(dt))
return False
if type(fname)==str:
f=open(fname,'wb')
else:
f=fname
try:
_write_int32(f,dt_code)
_write_int32(f,num_bytes_per_entry)
_write_int32(f,X.ndim)
for j in range(0,X.ndim):
_write_int32(f,X.shape[j])
#This is how I do column-major order
#A=np.reshape(X,X.size,order='F').astype(dt)
#A.tofile(f)
bytes0=X.astype(dt).tobytes(order='F')
f.write(bytes0)
if type(fname)==str:
f.close()
return True
except Exception as e: # catch *all* exceptions
traceback.print_exc()
print (e)
if type(fname)==str:
f.close()
return False
def readnpy(path):
return np.load(path)
def writenpy8(X,path):
return _writenpy(X,path,dtype='int8')
def writenpy32(X,path):
return _writenpy(X,path,dtype='float32')
def writenpy64(X,path):
return _writenpy(X,path,dtype='float64')
def writenpy16i(X,path):
return _writenpy(X,path,dtype='int16')
def writenpy16ui(X,path):
return _writenpy(X,path,dtype='uint16')
def writenpy32i(X,path):
return _writenpy(X,path,dtype='int32')
def writenpy32ui(X,path):
return _writenpy(X,path,dtype='uint32')
def writenpy(X,path,*,dtype):
return _writenpy(X,path,dtype=dtype)
def _writenpy(X,path,*,dtype):
np.save(path,X.astype(dtype=dtype,copy=False)) # astype will always create copy if dtype does not match
# apparently allowing pickling is a security issue. (according to the docs) ??
#np.save(path,X.astype(dtype=dtype,copy=False),allow_pickle=False) # astype will always create copy if dtype does not match
return True
def appendmda(X,path):
if (file_extension(path)=='.npy'):
raise Exception('appendmda not yet implemented for .npy files')
H=_read_header(path)
if (H is None):
print ("Problem reading header of: {}".format(path))
return None
if (len(H.dims) != len(X.shape)):
print ("Incompatible number of dimensions in appendmda",H.dims,X.shape)
return None
num_entries_old=np.product(H.dims)
num_dims=len(H.dims)
for j in range(num_dims-1):
if (X.shape[j]!=X.shape[j]):
print ("Incompatible dimensions in appendmda",H.dims,X.shape)
return None
H.dims[num_dims-1]=H.dims[num_dims-1]+X.shape[num_dims-1]
try:
_write_header(path,H,rewrite=True)
f=open(path,"r+b")
f.seek(H.header_size+H.num_bytes_per_entry*num_entries_old)
A=np.reshape(X,X.size,order='F').astype(H.dt)
A.tofile(f)
f.close()
except Exception as e: # catch *all* exceptions
print (e)
f.close()
return False
def file_extension(fname):
if type(fname)==str:
filename, ext = os.path.splitext(fname)
return ext
else:
return None
def _read_int32(f):
return struct.unpack('<i',f.read(4))[0]
def _read_int64(f):
return struct.unpack('<q',f.read(8))[0]
def _write_int32(f,val):
f.write(struct.pack('<i',val))
def _write_int64(f,val):
f.write(struct.pack('<q',val))
def _header_from_file(f):
try:
dt_code=_read_int32(f)
num_bytes_per_entry=_read_int32(f)
num_dims=_read_int32(f)
uses64bitdims=False
if (num_dims<0):
uses64bitdims=True
num_dims=-num_dims
if (num_dims<1) or (num_dims>6): # allow single dimension as of 12/6/17
print ("Invalid number of dimensions: {}".format(num_dims))
return None
dims=[]
dimprod=1
if uses64bitdims:
for j in range(0,num_dims):
tmp0=_read_int64(f)
dimprod=dimprod*tmp0
dims.append(tmp0)
else:
for j in range(0,num_dims):
tmp0=_read_int32(f)
dimprod=dimprod*tmp0
dims.append(tmp0)
dt=_dt_from_dt_code(dt_code)
if dt is None:
print ("Invalid data type code: {}".format(dt_code))
return None
H=MdaHeader(dt,dims)
if (uses64bitdims):
H.uses64bitdims=True
H.header_size=3*4+H.num_dims*8
return H
except Exception as e: # catch *all* exceptions
print (e)
return None
def mdaio_test():
M=4
N=12
X=np.ndarray((M,N))
for n in range(0,N):
for m in range(0,M):
X[m,n]=n*10+m
writemda32(X,'tmp1.mda')
Y=readmda('tmp1.mda')
print (Y)
print (np.absolute(X-Y).max())
Z=DiskReadMda('tmp1.mda')
print (Z.readChunk(i1=0,i2=4,N1=M,N2=N-4))
A=DiskWriteMda('tmpA.mda',(M,N))
A.writeChunk(Y,i1=0,i2=0)
B=readmda('tmpA.mda')
print (B.shape)
print (B)
#mdaio_test() | [
"[email protected]"
]
| |
786f5b0e25618a69920c89fa5a7cc933ca584bdc | cad91ae76d2746a6c28ddda0f33a58f9d461378f | /PyTorch/LanguageModeling/BERT/triton/deployment_toolkit/warmup.py | f5e0dc0589a40004ab8c2e93652e1cc11b26cce6 | [
"Apache-2.0"
]
| permissive | NVIDIA/DeepLearningExamples | fe677521e7e2a16e3cb0b77e358f9aab72f8c11a | a5388a45f71a949639b35cc5b990bd130d2d8164 | refs/heads/master | 2023-08-31T20:57:08.798455 | 2023-08-23T10:09:12 | 2023-08-23T10:09:12 | 131,881,622 | 11,838 | 3,124 | null | 2023-08-28T16:57:33 | 2018-05-02T17:04:05 | Jupyter Notebook | UTF-8 | Python | false | false | 3,760 | py | # Copyright (c) 2021, NVIDIA CORPORATION. All rights reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import logging
import pathlib
from distutils.version import LooseVersion
from importlib.metadata import version
from typing import List
TRITON_CLIENT_VERSION = LooseVersion(version("tritonclient"))
# method from PEP-366 to support relative import in executed modules
if __package__ is None:
__package__ = pathlib.Path(__file__).parent.name
from .core import BatchingMode, EvaluationMode, MeasurementMode, OfflineMode
from .perf_analyzer import PerfAnalyzer, PerfAnalyzerConfig
from .utils import parse_server_url
LOGGER = logging.getLogger("warmup")
def performance_evaluation_warmup(
server_url: str,
model_name: str,
batch_sizes: List[int],
number_of_triton_instances: int,
number_of_model_instances: int,
input_data: str,
input_shapes: List[str],
measurement_mode: MeasurementMode,
measurement_interval: int,
measurement_request_count: int,
batching_mode: BatchingMode,
offline_mode: OfflineMode,
evaluation_mode: EvaluationMode,
output_shared_memory_size: int,
):
protocol, host, port = parse_server_url(server_url)
measurement_interval = 2 * measurement_interval
measurement_request_count = 2 * measurement_request_count
if batching_mode == BatchingMode.STATIC:
batch_sizes = sorted({1, batch_sizes[-1]})
max_concurrency = 1
min_concurrency = 1
step = 1
elif batching_mode == BatchingMode.DYNAMIC:
max_batch_size = max(batch_sizes)
max_total_requests = 2 * max_batch_size * number_of_triton_instances * number_of_model_instances
max_concurrency = min(256, max_total_requests)
step = max(1, max_concurrency // 2)
min_concurrency = step
batch_sizes = [max(1, max_total_requests // 256)]
else:
raise ValueError(f"Unsupported batching mode: {batching_mode}")
for batch_size in batch_sizes:
for concurrency in range(min_concurrency, max_concurrency + step, step):
params = {
"model-name": model_name,
"model-version": 1,
"batch-size": batch_size,
"url": f"{host}:{port}",
"protocol": protocol,
"input-data": input_data,
"measurement-interval": measurement_interval,
"concurrency-range": f"{concurrency}:{concurrency}:1",
"output-shared-memory-size": output_shared_memory_size,
}
if TRITON_CLIENT_VERSION >= LooseVersion("2.11.0"):
params["measurement-mode"] = measurement_mode.value
params["measurement-request-count"] = measurement_request_count
if evaluation_mode == EvaluationMode.OFFLINE:
params["shared-memory"] = offline_mode.value
params["output-shared-memory-size"] = output_shared_memory_size
config = PerfAnalyzerConfig()
for param, value in params.items():
config[param] = value
for shape in input_shapes:
config["shape"] = shape
perf_analyzer = PerfAnalyzer(config=config)
perf_analyzer.run()
| [
"[email protected]"
]
| |
9030a9aeac0cd19b1e22c38323c28b02eeb34bd6 | 2ce2f39b8997e23ce3e22acbed911bd09b367dff | /tests/test_http_response.py | 052f343814c78b48cc8b6393bb57974cc713763a | [
"Apache-2.0"
]
| permissive | gourneau/Growler | 315270b0d7ae0a81e98efcb028da33f8489529d4 | a057adbed0dbde6e6a15feee59add0f2889b546a | refs/heads/dev | 2021-01-24T15:34:55.851883 | 2016-05-04T17:16:15 | 2016-05-04T17:16:15 | 58,094,178 | 1 | 0 | null | 2016-05-05T01:07:17 | 2016-05-05T01:07:17 | null | UTF-8 | Python | false | false | 3,798 | py | #
# tests/test_http_response.py
#
import growler
import asyncio
import pytest
from unittest import mock
from collections import OrderedDict
from growler.http.response import Headers
from mock_classes import (
request_uri,
)
@pytest.fixture
def res(mock_protocol):
return growler.http.HTTPResponse(mock_protocol)
@pytest.fixture
def mock_app():
return mock.Mock(spec=growler.App,
)
@pytest.fixture
def mock_protocol(mock_app, request_uri):
from urllib.parse import (unquote, urlparse, parse_qs)
parsed_url = urlparse(request_uri)
protocol = mock.Mock(spec=growler.http.GrowlerHTTPProtocol,
loop=mock.Mock(spec=asyncio.BaseEventLoop),
http_application=mock_app,
headers=None,
path=unquote(parsed_url.path),
query=parse_qs(parsed_url.query),)
protocol.socket.getpeername.return_value = ['', '']
return protocol
def test_constructor(res, mock_protocol):
assert isinstance(res, growler.http.HTTPResponse)
assert res.protocol is mock_protocol
def test_construct_with_eol(mock_protocol):
EOL = ':'
res = growler.http.HTTPResponse(mock_protocol, EOL)
assert isinstance(res, growler.http.HTTPResponse)
assert res.protocol is mock_protocol
assert res.EOL is EOL
def test_default_headers(res):
res._set_default_headers()
# assert res.protocol is mock_protocol
def test_send_headers(res):
res.send_headers()
def test_write(res):
res.write()
def test_write_eof(res):
res.write_eof()
def test_end(res):
res.end()
@pytest.mark.parametrize('url, status', [
('/', 200),
])
def test_redirect(res, url, status):
res.redirect(url, status)
@pytest.mark.parametrize('obj, expect', [
({'a': 'b'}, b'{"a": "b"}')
])
def test_json(res, mock_protocol, obj, expect):
res.json(obj)
assert res.headers['content-type'] == 'application/json'
mock_protocol. transport.write.assert_called_with(expect)
@pytest.mark.parametrize('obj, expect', [
({'a': 'b'}, b'{"a": "b"}')
])
def test_headers(res, mock_protocol, obj, expect):
res.json(obj)
assert res.headers['content-type'] == 'application/json'
mock_protocol.transport.write.assert_called_with(expect)
def test_header_construct_with_dict():
headers = Headers({'a': 'b', 'c': 'D'})
s = str(headers)
assert s == 'a: b\r\nc: D\r\n\r\n' or s == 'c: D\r\na: b\r\n\r\n'
def test_header_construct_with_keywords():
headers = Headers(a='b', c='D')
s = str(headers)
assert s == 'a: b\r\nc: D\r\n\r\n' or s == 'c: D\r\na: b\r\n\r\n'
def test_header_construct_mixed():
headers = Headers({'a': 'b'}, c='D')
s = str(headers)
assert s == 'a: b\r\nc: D\r\n\r\n' or s == 'c: D\r\na: b\r\n\r\n'
def test_header_set():
headers = Headers()
headers['foo'] = 'bar'
assert str(headers) == 'foo: bar\r\n\r\n'
def test_header_update_with_dict():
headers = Headers()
d = {'foo': 'bar'}
headers.update(d)
assert str(headers) == 'foo: bar\r\n\r\n'
def test_header_update_with_multiple_dicts():
headers = Headers()
d_0 = OrderedDict([('foo', 'baz'), ('a', 'b')])
d_1 = {'foo': 'bar'}
headers.update(d_0, d_1)
assert str(headers) == 'foo: bar\r\na: b\r\n\r\n'
def test_header_update_with_keyword():
headers = Headers()
headers.update(foo='bar')
assert str(headers) == 'foo: bar\r\n\r\n'
def test_header_update_with_mixed():
headers = Headers()
d = {'foo': 'bazz'}
headers.update(d, foo='bar')
assert str(headers) == 'foo: bar\r\n\r\n'
def test_callable_header_value():
headers = Headers()
headers['foo'] = lambda: 'bar'
assert str(headers) == 'foo: bar\r\n\r\n'
| [
"[email protected]"
]
| |
4d3aacc2cd74a7a3f40fd55c24ee756ac2daa48c | fe6fe31cda7c367ba480faeadb119e5074d7c8a4 | /src/uproot/dynamic.py | 7ed0b06dd64af7d48e2ea49570b8af30c329e21f | [
"BSD-3-Clause"
]
| permissive | scikit-hep/uproot5 | a03ec9f8f20d8b5f45957ccf280e1cd75bfab89a | cb70ce0715276bbc403cb48a511c8a3b27cffe7f | refs/heads/main | 2023-09-02T10:09:29.669547 | 2023-08-24T13:30:06 | 2023-08-24T13:30:06 | 262,422,450 | 51 | 20 | BSD-3-Clause | 2023-09-14T17:04:36 | 2020-05-08T20:30:09 | Python | UTF-8 | Python | false | false | 861 | py | # BSD 3-Clause License; see https://github.com/scikit-hep/uproot5/blob/main/LICENSE
"""
This module is initially empty, a repository for dynamically adding new classes.
The purpose of this namespace is to allow :doc:`uproot.model.VersionedModel`
classes that were automatically generated from ROOT ``TStreamerInfo`` to be
pickled, with the help of :doc:`uproot.model.DynamicModel`.
In `Python 3.7 and later <https://www.python.org/dev/peps/pep-0562>`__, attempts
to extract items from this namespace generate new :doc:`uproot.model.DynamicModel`
classes, which are used as a container in which data from pickled
:doc:`uproot.model.VersionedModel` instances are filled.
"""
def __getattr__(name):
import uproot
g = globals()
if name not in g:
g[name] = uproot._util.new_class(name, (uproot.model.DynamicModel,), {})
return g[name]
| [
"[email protected]"
]
| |
21b22b5af215a87059bbfe2e4d13bdfb04694bcf | 9d278285f2bc899ac93ec887b1c31880ed39bf56 | /ondoc/authentication/migrations/0064_spocdetails_source.py | 550d0b16e9c44fdf0ef8a5a92e73079dbecdec30 | []
| no_license | ronit29/docprime | 945c21f8787387b99e4916cb3ba1618bc2a85034 | 60d4caf6c52a8b70174a1f654bc792d825ba1054 | refs/heads/master | 2023-04-01T14:54:10.811765 | 2020-04-07T18:57:34 | 2020-04-07T18:57:34 | 353,953,576 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 411 | py | # Generated by Django 2.0.5 on 2018-11-02 19:13
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('authentication', '0063_merge_20181025_1818'),
]
operations = [
migrations.AddField(
model_name='spocdetails',
name='source',
field=models.CharField(blank=True, max_length=2000),
),
]
| [
"[email protected]"
]
| |
51d592ae4b786dcaf62efc0be8022199119a1a5e | 32eeb97dff5b1bf18cf5be2926b70bb322e5c1bd | /benchmark/alwayson/testcase/firstcases/testcase5_000.py | e7b33f20807938d44d3fb60f4af1be47325bb417 | []
| no_license | Prefest2018/Prefest | c374d0441d714fb90fca40226fe2875b41cf37fc | ac236987512889e822ea6686c5d2e5b66b295648 | refs/heads/master | 2021-12-09T19:36:24.554864 | 2021-12-06T12:46:14 | 2021-12-06T12:46:14 | 173,225,161 | 5 | 0 | null | null | null | null | UTF-8 | Python | false | false | 2,964 | py | #coding=utf-8
import os
import subprocess
import time
import traceback
from appium import webdriver
from appium.webdriver.common.touch_action import TouchAction
from selenium.common.exceptions import NoSuchElementException, WebDriverException
desired_caps = {
'platformName' : 'Android',
'deviceName' : 'Android Emulator',
'platformVersion' : '4.4',
'appPackage' : 'com.tomer.alwayson',
'appActivity' : 'com.tomer.alwayson.activities.PreferencesActivity',
'resetKeyboard' : True,
'androidCoverage' : 'com.tomer.alwayson/com.tomer.alwayson.JacocoInstrumentation',
'noReset' : True
}
def command(cmd, timeout=5):
p = subprocess.Popen(cmd, stderr=subprocess.STDOUT, stdout=subprocess.PIPE, shell=True)
time.sleep(timeout)
p.terminate()
return
def getElememt(driver, str) :
for i in range(0, 5, 1):
try:
element = driver.find_element_by_android_uiautomator(str)
except NoSuchElementException:
time.sleep(1)
else:
return element
os.popen("adb shell input tap 50 50")
element = driver.find_element_by_android_uiautomator(str)
return element
def getElememtBack(driver, str1, str2) :
for i in range(0, 2, 1):
try:
element = driver.find_element_by_android_uiautomator(str1)
except NoSuchElementException:
time.sleep(1)
else:
return element
for i in range(0, 5, 1):
try:
element = driver.find_element_by_android_uiautomator(str2)
except NoSuchElementException:
time.sleep(1)
else:
return element
os.popen("adb shell input tap 50 50")
element = driver.find_element_by_android_uiautomator(str2)
return element
def swipe(driver, startxper, startyper, endxper, endyper) :
size = driver.get_window_size()
width = size["width"]
height = size["height"]
try:
driver.swipe(start_x=int(width * startxper), start_y=int(height * startyper), end_x=int(width * endxper),
end_y=int(height * endyper), duration=2000)
except WebDriverException:
time.sleep(1)
driver.swipe(start_x=int(width * startxper), start_y=int(height * startyper), end_x=int(width * endxper),
end_y=int(height * endyper), duration=2000)
return
# testcase000
try :
starttime = time.time()
driver = webdriver.Remote('http://localhost:4723/wd/hub', desired_caps)
driver.press_keycode(82)
element = getElememtBack(driver, "new UiSelector().text(\"Uninstall\")", "new UiSelector().className(\"android.widget.TextView\").instance(4)")
TouchAction(driver).tap(element).perform()
except Exception, e:
print 'FAIL'
print 'str(e):\t\t', str(e)
print 'repr(e):\t', repr(e)
print traceback.format_exc()
else:
print 'OK'
finally:
cpackage = driver.current_package
endtime = time.time()
print 'consumed time:', str(endtime - starttime), 's'
command("adb shell am broadcast -a com.example.pkg.END_EMMA --es name \"5_000\"")
jacocotime = time.time()
print 'jacoco time:', str(jacocotime - endtime), 's'
driver.quit()
if (cpackage != 'com.tomer.alwayson'):
cpackage = "adb shell am force-stop " + cpackage
os.popen(cpackage)
| [
"[email protected]"
]
| |
7eab1eee572d2490d59d02e103bda65a10f64328 | 4d9a9546a5dc0b550aede272c4ba85af88dbb673 | /env/lib/python3.8/site-packages/pandas-stubs/core/computation/parsing.pyi | 3139316a492d6b9c573024b3e5e9b527db637ce6 | []
| no_license | LuisGonzalezLopez/Luis-Gonzalez | f4064dc08ccbada80cc7b45f8fbaaf70f54f420c | 109f50e2e26a1c4abed5ba502deda9e212955c69 | refs/heads/master | 2022-08-04T14:24:42.992548 | 2022-07-23T09:03:08 | 2022-07-23T09:03:08 | 103,600,719 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 501 | pyi | import tokenize
from typing import (
Iterator,
Tuple,
)
BACKTICK_QUOTED_STRING: int
def create_valid_python_identifier(name: str) -> str: ...
def clean_backtick_quoted_toks(tok: Tuple[int, str]) -> Tuple[int, str]: ...
def clean_column_name(name: str) -> str: ...
def tokenize_backtick_quoted_string(
token_generator: Iterator[tokenize.TokenInfo], source: str, string_start: int
) -> Tuple[int, str]: ...
def tokenize_string(source: str) -> Iterator[Tuple[int, str]]: ...
| [
"[email protected]"
]
| |
b8ddc127cfcb9245e757ef5500e01dc09c429923 | 163bbb4e0920dedd5941e3edfb2d8706ba75627d | /Code/CodeRecords/2513/60795/267936.py | d6640b0f57f31e720990a6b563b00de551112cf6 | []
| no_license | AdamZhouSE/pythonHomework | a25c120b03a158d60aaa9fdc5fb203b1bb377a19 | ffc5606817a666aa6241cfab27364326f5c066ff | refs/heads/master | 2022-11-24T08:05:22.122011 | 2020-07-28T16:21:24 | 2020-07-28T16:21:24 | 259,576,640 | 2 | 1 | null | null | null | null | UTF-8 | Python | false | false | 193 | py | n=int(input())
result=[]
for i in range(0,n):
arr=[int(n) for n in input().split(',')]
for j in range(0,n):
result.append(arr[j])
k=int(input())
result.sort()
print(result[k-1]) | [
"[email protected]"
]
| |
88f7cf669ad38ec523e65645a0e13c3f281714a6 | c3fc7865f163a43fbd3e832547005cf3c89547c5 | /bin/oldScripts/JobCommands/PINN_ECFP4KSEPTRIGRAM_2_3_commands_5.py | 8823d4a4539d7476a8d27561bca5f92ef60a1eb6 | []
| no_license | ahmetrifaioglu/PyTorch | 936d5317ca90a147e147241cbf75ae0864477f13 | fff02012eb5f05cdd634b5128f8bdceb9ed3b2a7 | refs/heads/master | 2021-10-24T19:27:53.495478 | 2021-10-12T22:05:08 | 2021-10-12T22:05:08 | 165,693,778 | 0 | 0 | null | 2021-10-12T22:04:20 | 2019-01-14T16:24:32 | Jupyter Notebook | UTF-8 | Python | false | false | 11,835 | py | import subprocess
subprocess.call("python dream_challenge.py PINN_2_3 ecfp4 trigram 4096_1536 2048_1024_512 512 512 0.0001", shell=True)
subprocess.call("python dream_challenge.py PINN_2_3 ecfp4 trigram 4096_1536 2048_1024_512 512 512 0.005", shell=True)
subprocess.call("python dream_challenge.py PINN_2_3 ecfp4 trigram 4096_1536 2048_1024_512 512 512 0.001", shell=True)
subprocess.call("python dream_challenge.py PINN_2_3 ecfp4 trigram 4096_1536 2048_1024_512 256 256 0.0001", shell=True)
subprocess.call("python dream_challenge.py PINN_2_3 ecfp4 trigram 4096_1536 2048_1024_512 256 256 0.005", shell=True)
subprocess.call("python dream_challenge.py PINN_2_3 ecfp4 trigram 4096_1536 2048_1024_512 256 256 0.001", shell=True)
subprocess.call("python dream_challenge.py PINN_2_3 ecfp4 trigram 4096_1536 2048_1024_512 64 64 0.0001", shell=True)
subprocess.call("python dream_challenge.py PINN_2_3 ecfp4 trigram 4096_1536 2048_1024_512 64 64 0.005", shell=True)
subprocess.call("python dream_challenge.py PINN_2_3 ecfp4 trigram 4096_1536 2048_1024_512 64 64 0.001", shell=True)
subprocess.call("python dream_challenge.py PINN_2_3 ecfp4 trigram 4096_1536 2048_1024_128 1024 1024 0.0001", shell=True)
subprocess.call("python dream_challenge.py PINN_2_3 ecfp4 trigram 4096_1536 2048_1024_128 1024 1024 0.005", shell=True)
subprocess.call("python dream_challenge.py PINN_2_3 ecfp4 trigram 4096_1536 2048_1024_128 1024 1024 0.001", shell=True)
subprocess.call("python dream_challenge.py PINN_2_3 ecfp4 trigram 4096_1536 2048_1024_128 512 512 0.0001", shell=True)
subprocess.call("python dream_challenge.py PINN_2_3 ecfp4 trigram 4096_1536 2048_1024_128 512 512 0.005", shell=True)
subprocess.call("python dream_challenge.py PINN_2_3 ecfp4 trigram 4096_1536 2048_1024_128 512 512 0.001", shell=True)
subprocess.call("python dream_challenge.py PINN_2_3 ecfp4 trigram 4096_1536 2048_1024_128 256 256 0.0001", shell=True)
subprocess.call("python dream_challenge.py PINN_2_3 ecfp4 trigram 4096_1536 2048_1024_128 256 256 0.005", shell=True)
subprocess.call("python dream_challenge.py PINN_2_3 ecfp4 trigram 4096_1536 2048_1024_128 256 256 0.001", shell=True)
subprocess.call("python dream_challenge.py PINN_2_3 ecfp4 trigram 4096_1536 2048_1024_128 64 64 0.0001", shell=True)
subprocess.call("python dream_challenge.py PINN_2_3 ecfp4 trigram 4096_1536 2048_1024_128 64 64 0.005", shell=True)
subprocess.call("python dream_challenge.py PINN_2_3 ecfp4 trigram 4096_1536 2048_1024_128 64 64 0.001", shell=True)
subprocess.call("python dream_challenge.py PINN_2_3 ecfp4 trigram 4096_1536 2048_512_128 1024 1024 0.0001", shell=True)
subprocess.call("python dream_challenge.py PINN_2_3 ecfp4 trigram 4096_1536 2048_512_128 1024 1024 0.005", shell=True)
subprocess.call("python dream_challenge.py PINN_2_3 ecfp4 trigram 4096_1536 2048_512_128 1024 1024 0.001", shell=True)
subprocess.call("python dream_challenge.py PINN_2_3 ecfp4 trigram 4096_1536 2048_512_128 512 512 0.0001", shell=True)
subprocess.call("python dream_challenge.py PINN_2_3 ecfp4 trigram 4096_1536 2048_512_128 512 512 0.005", shell=True)
subprocess.call("python dream_challenge.py PINN_2_3 ecfp4 trigram 4096_1536 2048_512_128 512 512 0.001", shell=True)
subprocess.call("python dream_challenge.py PINN_2_3 ecfp4 trigram 4096_1536 2048_512_128 256 256 0.0001", shell=True)
subprocess.call("python dream_challenge.py PINN_2_3 ecfp4 trigram 4096_1536 2048_512_128 256 256 0.005", shell=True)
subprocess.call("python dream_challenge.py PINN_2_3 ecfp4 trigram 4096_1536 2048_512_128 256 256 0.001", shell=True)
subprocess.call("python dream_challenge.py PINN_2_3 ecfp4 trigram 4096_1536 2048_512_128 64 64 0.0001", shell=True)
subprocess.call("python dream_challenge.py PINN_2_3 ecfp4 trigram 4096_1536 2048_512_128 64 64 0.005", shell=True)
subprocess.call("python dream_challenge.py PINN_2_3 ecfp4 trigram 4096_1536 2048_512_128 64 64 0.001", shell=True)
subprocess.call("python dream_challenge.py PINN_2_3 ecfp4 trigram 4096_1536 1536_1024_512 1024 1024 0.0001", shell=True)
subprocess.call("python dream_challenge.py PINN_2_3 ecfp4 trigram 4096_1536 1536_1024_512 1024 1024 0.005", shell=True)
subprocess.call("python dream_challenge.py PINN_2_3 ecfp4 trigram 4096_1536 1536_1024_512 1024 1024 0.001", shell=True)
subprocess.call("python dream_challenge.py PINN_2_3 ecfp4 trigram 4096_1536 1536_1024_512 512 512 0.0001", shell=True)
subprocess.call("python dream_challenge.py PINN_2_3 ecfp4 trigram 4096_1536 1536_1024_512 512 512 0.005", shell=True)
subprocess.call("python dream_challenge.py PINN_2_3 ecfp4 trigram 4096_1536 1536_1024_512 512 512 0.001", shell=True)
subprocess.call("python dream_challenge.py PINN_2_3 ecfp4 trigram 4096_1536 1536_1024_512 256 256 0.0001", shell=True)
subprocess.call("python dream_challenge.py PINN_2_3 ecfp4 trigram 4096_1536 1536_1024_512 256 256 0.005", shell=True)
subprocess.call("python dream_challenge.py PINN_2_3 ecfp4 trigram 4096_1536 1536_1024_512 256 256 0.001", shell=True)
subprocess.call("python dream_challenge.py PINN_2_3 ecfp4 trigram 4096_1536 1536_1024_512 64 64 0.0001", shell=True)
subprocess.call("python dream_challenge.py PINN_2_3 ecfp4 trigram 4096_1536 1536_1024_512 64 64 0.005", shell=True)
subprocess.call("python dream_challenge.py PINN_2_3 ecfp4 trigram 4096_1536 1536_1024_512 64 64 0.001", shell=True)
subprocess.call("python dream_challenge.py PINN_2_3 ecfp4 trigram 4096_1536 1536_1024_128 1024 1024 0.0001", shell=True)
subprocess.call("python dream_challenge.py PINN_2_3 ecfp4 trigram 4096_1536 1536_1024_128 1024 1024 0.005", shell=True)
subprocess.call("python dream_challenge.py PINN_2_3 ecfp4 trigram 4096_1536 1536_1024_128 1024 1024 0.001", shell=True)
subprocess.call("python dream_challenge.py PINN_2_3 ecfp4 trigram 4096_1536 1536_1024_128 512 512 0.0001", shell=True)
subprocess.call("python dream_challenge.py PINN_2_3 ecfp4 trigram 4096_1536 1536_1024_128 512 512 0.005", shell=True)
subprocess.call("python dream_challenge.py PINN_2_3 ecfp4 trigram 4096_1536 1536_1024_128 512 512 0.001", shell=True)
subprocess.call("python dream_challenge.py PINN_2_3 ecfp4 trigram 4096_1536 1536_1024_128 256 256 0.0001", shell=True)
subprocess.call("python dream_challenge.py PINN_2_3 ecfp4 trigram 4096_1536 1536_1024_128 256 256 0.005", shell=True)
subprocess.call("python dream_challenge.py PINN_2_3 ecfp4 trigram 4096_1536 1536_1024_128 256 256 0.001", shell=True)
subprocess.call("python dream_challenge.py PINN_2_3 ecfp4 trigram 4096_1536 1536_1024_128 64 64 0.0001", shell=True)
subprocess.call("python dream_challenge.py PINN_2_3 ecfp4 trigram 4096_1536 1536_1024_128 64 64 0.005", shell=True)
subprocess.call("python dream_challenge.py PINN_2_3 ecfp4 trigram 4096_1536 1536_1024_128 64 64 0.001", shell=True)
subprocess.call("python dream_challenge.py PINN_2_3 ecfp4 trigram 4096_1536 1536_512_128 1024 1024 0.0001", shell=True)
subprocess.call("python dream_challenge.py PINN_2_3 ecfp4 trigram 4096_1536 1536_512_128 1024 1024 0.005", shell=True)
subprocess.call("python dream_challenge.py PINN_2_3 ecfp4 trigram 4096_1536 1536_512_128 1024 1024 0.001", shell=True)
subprocess.call("python dream_challenge.py PINN_2_3 ecfp4 trigram 4096_1536 1536_512_128 512 512 0.0001", shell=True)
subprocess.call("python dream_challenge.py PINN_2_3 ecfp4 trigram 4096_1536 1536_512_128 512 512 0.005", shell=True)
subprocess.call("python dream_challenge.py PINN_2_3 ecfp4 trigram 4096_1536 1536_512_128 512 512 0.001", shell=True)
subprocess.call("python dream_challenge.py PINN_2_3 ecfp4 trigram 4096_1536 1536_512_128 256 256 0.0001", shell=True)
subprocess.call("python dream_challenge.py PINN_2_3 ecfp4 trigram 4096_1536 1536_512_128 256 256 0.005", shell=True)
subprocess.call("python dream_challenge.py PINN_2_3 ecfp4 trigram 4096_1536 1536_512_128 256 256 0.001", shell=True)
subprocess.call("python dream_challenge.py PINN_2_3 ecfp4 trigram 4096_1536 1536_512_128 64 64 0.0001", shell=True)
subprocess.call("python dream_challenge.py PINN_2_3 ecfp4 trigram 4096_1536 1536_512_128 64 64 0.005", shell=True)
subprocess.call("python dream_challenge.py PINN_2_3 ecfp4 trigram 4096_1536 1536_512_128 64 64 0.001", shell=True)
subprocess.call("python dream_challenge.py PINN_2_3 ecfp4 trigram 4096_1536 1024_512_128 1024 1024 0.0001", shell=True)
subprocess.call("python dream_challenge.py PINN_2_3 ecfp4 trigram 4096_1536 1024_512_128 1024 1024 0.005", shell=True)
subprocess.call("python dream_challenge.py PINN_2_3 ecfp4 trigram 4096_1536 1024_512_128 1024 1024 0.001", shell=True)
subprocess.call("python dream_challenge.py PINN_2_3 ecfp4 trigram 4096_1536 1024_512_128 512 512 0.0001", shell=True)
subprocess.call("python dream_challenge.py PINN_2_3 ecfp4 trigram 4096_1536 1024_512_128 512 512 0.005", shell=True)
subprocess.call("python dream_challenge.py PINN_2_3 ecfp4 trigram 4096_1536 1024_512_128 512 512 0.001", shell=True)
subprocess.call("python dream_challenge.py PINN_2_3 ecfp4 trigram 4096_1536 1024_512_128 256 256 0.0001", shell=True)
subprocess.call("python dream_challenge.py PINN_2_3 ecfp4 trigram 4096_1536 1024_512_128 256 256 0.005", shell=True)
subprocess.call("python dream_challenge.py PINN_2_3 ecfp4 trigram 4096_1536 1024_512_128 256 256 0.001", shell=True)
subprocess.call("python dream_challenge.py PINN_2_3 ecfp4 trigram 4096_1536 1024_512_128 64 64 0.0001", shell=True)
subprocess.call("python dream_challenge.py PINN_2_3 ecfp4 trigram 4096_1536 1024_512_128 64 64 0.005", shell=True)
subprocess.call("python dream_challenge.py PINN_2_3 ecfp4 trigram 4096_1536 1024_512_128 64 64 0.001", shell=True)
subprocess.call("python dream_challenge.py PINN_2_3 ecfp4 trigram 4096_1024 4096_2048_1536 1024 1024 0.0001", shell=True)
subprocess.call("python dream_challenge.py PINN_2_3 ecfp4 trigram 4096_1024 4096_2048_1536 1024 1024 0.005", shell=True)
subprocess.call("python dream_challenge.py PINN_2_3 ecfp4 trigram 4096_1024 4096_2048_1536 1024 1024 0.001", shell=True)
subprocess.call("python dream_challenge.py PINN_2_3 ecfp4 trigram 4096_1024 4096_2048_1536 512 512 0.0001", shell=True)
subprocess.call("python dream_challenge.py PINN_2_3 ecfp4 trigram 4096_1024 4096_2048_1536 512 512 0.005", shell=True)
subprocess.call("python dream_challenge.py PINN_2_3 ecfp4 trigram 4096_1024 4096_2048_1536 512 512 0.001", shell=True)
subprocess.call("python dream_challenge.py PINN_2_3 ecfp4 trigram 4096_1024 4096_2048_1536 256 256 0.0001", shell=True)
subprocess.call("python dream_challenge.py PINN_2_3 ecfp4 trigram 4096_1024 4096_2048_1536 256 256 0.005", shell=True)
subprocess.call("python dream_challenge.py PINN_2_3 ecfp4 trigram 4096_1024 4096_2048_1536 256 256 0.001", shell=True)
subprocess.call("python dream_challenge.py PINN_2_3 ecfp4 trigram 4096_1024 4096_2048_1536 64 64 0.0001", shell=True)
subprocess.call("python dream_challenge.py PINN_2_3 ecfp4 trigram 4096_1024 4096_2048_1536 64 64 0.005", shell=True)
subprocess.call("python dream_challenge.py PINN_2_3 ecfp4 trigram 4096_1024 4096_2048_1536 64 64 0.001", shell=True)
subprocess.call("python dream_challenge.py PINN_2_3 ecfp4 trigram 4096_1024 4096_2048_1024 1024 1024 0.0001", shell=True)
subprocess.call("python dream_challenge.py PINN_2_3 ecfp4 trigram 4096_1024 4096_2048_1024 1024 1024 0.005", shell=True)
subprocess.call("python dream_challenge.py PINN_2_3 ecfp4 trigram 4096_1024 4096_2048_1024 1024 1024 0.001", shell=True)
subprocess.call("python dream_challenge.py PINN_2_3 ecfp4 trigram 4096_1024 4096_2048_1024 512 512 0.0001", shell=True)
subprocess.call("python dream_challenge.py PINN_2_3 ecfp4 trigram 4096_1024 4096_2048_1024 512 512 0.005", shell=True)
subprocess.call("python dream_challenge.py PINN_2_3 ecfp4 trigram 4096_1024 4096_2048_1024 512 512 0.001", shell=True)
subprocess.call("python dream_challenge.py PINN_2_3 ecfp4 trigram 4096_1024 4096_2048_1024 256 256 0.0001", shell=True)
| [
"[email protected]"
]
| |
054eb96e09b87131cdc8f7bea38c96ab8504d39b | 48d8e96e0eba526fb08c74ee17ca0b89b9ea4192 | /find_maximum_subarray.py | a4607b76dd28b8813ac5e9f9bf58c916cd3afc30 | []
| no_license | John-W-Stevens/algorithms_clrs | 12842e246b66f30d4d71c093f7c5919e941eb1f0 | e926418f069de0ac7e5d6195b810df1be41edf3f | refs/heads/master | 2020-12-21T09:16:35.035058 | 2020-04-22T18:25:42 | 2020-04-22T18:25:42 | 236,383,401 | 2 | 2 | null | null | null | null | UTF-8 | Python | false | false | 2,660 | py |
# Import numpy to use Inf
from numpy import Inf
def find_max_crossing_subarray(arr,low,mid,high):
""" From CLRS pgs. 70-74
"Takes as input an array and indices low,mid,high
Returns a tuple containing the indicies demarcating
a maximum sub-array that crosses the midpoint, along with
the sum of the values in a maximum sub-array."
Running time: linear """
# Look at the left
left_sum = -Inf
sm = 0 # sm for sum
for i in range(mid,low-1,-1):
sm += arr[i]
if sm >= left_sum:
left_sum = sm
max_left = i
# Look at the right
right_sum = -Inf
sm = 0
for j in range(mid+1,high+1):
sm += arr[j]
if sm >= right_sum:
right_sum = sm
max_right = j
return (max_left,max_right,left_sum+right_sum)
def find_maximum_subarray(arr,low,high):
""" From CLSR pgs. 70-74
Input is an array, arr. low and high are index positions in arr that function as bounds for searching.
Returns a tuple containing indicies for maximum sub-array along with the sum of sub-array.
Running time: theta(n lg n) where 'lg n' stands for log2n """
# Base case
if high == low:
return (low,high,arr[low])
else:
# find middle point of array
mid = (low + high)//2
# find a max sub-array in left sub-array
left_low,left_high,left_sum = find_maximum_subarray(arr,low,mid)
# find a max sub-array in right sub-array
right_low,right_high,right_sum = find_maximum_subarray(arr,mid+1,high)
# find a max sub-array that crosses the mid-point
cross_low,cross_high,cross_sum = find_max_crossing_subarray(arr,low,mid,high)
# test if left sub-array contains a sub-array with the maximum sum
if left_sum >= right_sum and left_sum >= cross_sum:
return (left_low,left_high,left_sum)
# test if right sub-array contains a sub-array with the maximum sum
elif right_sum >= left_sum and right_sum >= cross_sum:
return (right_low,right_high,right_sum)
# if neither left nor right sub-arrays contain a sub-array with the maximum sum,
# then a maximum sub-array must cross the mid-point
else:
return (cross_low,cross_high,cross_sum)
test = [17, -25, 6, 18, -23, 8, 28, 6, 34, 31, -50, 3, 46, -33, -45, -26, 14, -23, 45, -24, 21, -31, 19, -41, 49,
47, 29, -11, 16, 12, -9, -14, 26, -46, -11, 39, -41, -13, -11, 8, -19, -13, -9, -25, -15, 27, 30, 8, 10]
print(find_maximum_subarray(test,0,len(test)-1))
# Output: (24, 32, 145) the maximum subarray exists at test[24:33] and the sum of this subarray is 145 | [
"[email protected]"
]
| |
0f104fa41dfb5aa5dba2362b7d925e046b1292c4 | e8d7e13eb4d26c0a0147f2d0208d70e61f865c2c | /untitled50/bin/python3.6m-config | 54996d6a3566cc7b19ca4b84dbefad58b93fe58f | []
| no_license | fazi4888/APCSP-FreshmanYear | 55c5b5717aadeb2d871582754174f88213a488fe | b4f0f797b2c469e148b0330ad9d309610f1f0668 | refs/heads/master | 2022-11-30T11:51:30.210342 | 2020-08-17T15:00:37 | 2020-08-17T15:00:37 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 2,062 | #!/Users/rayaan_siddiqi23/untitled50/bin/python
# -*- python -*-
# Keep this script in sync with python-config.sh.in
import getopt
import os
import sys
import sysconfig
valid_opts = ['prefix', 'exec-prefix', 'includes', 'libs', 'cflags',
'ldflags', 'extension-suffix', 'help', 'abiflags', 'configdir']
def exit_with_usage(code=1):
print("Usage: {0} [{1}]".format(
sys.argv[0], '|'.join('--'+opt for opt in valid_opts)), file=sys.stderr)
sys.exit(code)
try:
opts, args = getopt.getopt(sys.argv[1:], '', valid_opts)
except getopt.error:
exit_with_usage()
if not opts:
exit_with_usage()
pyver = sysconfig.get_config_var('VERSION')
getvar = sysconfig.get_config_var
opt_flags = [flag for (flag, val) in opts]
if '--help' in opt_flags:
exit_with_usage(code=0)
for opt in opt_flags:
if opt == '--prefix':
print(sysconfig.get_config_var('prefix'))
elif opt == '--exec-prefix':
print(sysconfig.get_config_var('exec_prefix'))
elif opt in ('--includes', '--cflags'):
flags = ['-I' + sysconfig.get_path('include'),
'-I' + sysconfig.get_path('platinclude')]
if opt == '--cflags':
flags.extend(getvar('CFLAGS').split())
print(' '.join(flags))
elif opt in ('--libs', '--ldflags'):
libs = ['-lpython' + pyver + sys.abiflags]
libs += getvar('LIBS').split()
libs += getvar('SYSLIBS').split()
# add the prefix/lib/pythonX.Y/config dir, but only if there is no
# shared library in prefix/lib/.
if opt == '--ldflags':
if not getvar('Py_ENABLE_SHARED'):
libs.insert(0, '-L' + getvar('LIBPL'))
if not getvar('PYTHONFRAMEWORK'):
libs.extend(getvar('LINKFORSHARED').split())
print(' '.join(libs))
elif opt == '--extension-suffix':
print(sysconfig.get_config_var('EXT_SUFFIX'))
elif opt == '--abiflags':
print(sys.abiflags)
elif opt == '--configdir':
print(sysconfig.get_config_var('LIBPL'))
| [
"[email protected]"
]
| ||
0820492c6f8f84c03d5ea67148735a46b346b5c4 | 27e890f900bd4bfb2e66f4eab85bc381cf4d5d3f | /tests/unit/plugins/lookup/test_manifold.py | 44050cf36af553019eb84f6a882211c510bf094f | []
| no_license | coll-test/notstdlib.moveitallout | eb33a560070bbded5032385d0aea2f3cf60e690b | 0987f099b783c6cf977db9233e1c3d9efcbcb3c7 | refs/heads/master | 2020-12-19T22:28:33.369557 | 2020-01-23T18:51:26 | 2020-01-23T18:51:26 | 235,865,139 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 28,526 | py | # (c) 2018, Arigato Machine Inc.
# (c) 2018, Ansible Project
# GNU General Public License v3.0+ (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt)
from __future__ import (absolute_import, division, print_function)
__metaclass__ = type
from ansible_collections.notstdlib.moveitallout.tests.unit.compat import unittest
from ansible_collections.notstdlib.moveitallout.tests.unit.compat.mock import patch, call
from ansible.errors import AnsibleError
from ansible_collections.notstdlib.moveitallout.plugins.module_utils.urls import ConnectionError, SSLValidationError
from ansible.module_utils.six.moves.urllib.error import HTTPError, URLError
from ansible.module_utils import six
from ansible_collections.notstdlib.moveitallout.plugins.lookup.manifold import ManifoldApiClient, LookupModule, ApiError
import json
API_FIXTURES = {
'https://api.marketplace.manifold.co/v1/resources':
[
{
"body": {
"label": "resource-1",
"name": "Resource 1"
},
"id": "rid-1"
},
{
"body": {
"label": "resource-2",
"name": "Resource 2"
},
"id": "rid-2"
}
],
'https://api.marketplace.manifold.co/v1/resources?label=resource-1':
[
{
"body": {
"label": "resource-1",
"name": "Resource 1"
},
"id": "rid-1"
}
],
'https://api.marketplace.manifold.co/v1/resources?label=resource-2':
[
{
"body": {
"label": "resource-2",
"name": "Resource 2"
},
"id": "rid-2"
}
],
'https://api.marketplace.manifold.co/v1/resources?team_id=tid-1':
[
{
"body": {
"label": "resource-1",
"name": "Resource 1"
},
"id": "rid-1"
}
],
'https://api.marketplace.manifold.co/v1/resources?project_id=pid-1':
[
{
"body": {
"label": "resource-2",
"name": "Resource 2"
},
"id": "rid-2"
}
],
'https://api.marketplace.manifold.co/v1/resources?project_id=pid-2':
[
{
"body": {
"label": "resource-1",
"name": "Resource 1"
},
"id": "rid-1"
},
{
"body": {
"label": "resource-3",
"name": "Resource 3"
},
"id": "rid-3"
}
],
'https://api.marketplace.manifold.co/v1/resources?team_id=tid-1&project_id=pid-1':
[
{
"body": {
"label": "resource-1",
"name": "Resource 1"
},
"id": "rid-1"
}
],
'https://api.marketplace.manifold.co/v1/projects':
[
{
"body": {
"label": "project-1",
"name": "Project 1",
},
"id": "pid-1",
},
{
"body": {
"label": "project-2",
"name": "Project 2",
},
"id": "pid-2",
}
],
'https://api.marketplace.manifold.co/v1/projects?label=project-2':
[
{
"body": {
"label": "project-2",
"name": "Project 2",
},
"id": "pid-2",
}
],
'https://api.marketplace.manifold.co/v1/credentials?resource_id=rid-1':
[
{
"body": {
"resource_id": "rid-1",
"values": {
"RESOURCE_TOKEN_1": "token-1",
"RESOURCE_TOKEN_2": "token-2"
}
},
"id": "cid-1",
}
],
'https://api.marketplace.manifold.co/v1/credentials?resource_id=rid-2':
[
{
"body": {
"resource_id": "rid-2",
"values": {
"RESOURCE_TOKEN_3": "token-3",
"RESOURCE_TOKEN_4": "token-4"
}
},
"id": "cid-2",
}
],
'https://api.marketplace.manifold.co/v1/credentials?resource_id=rid-3':
[
{
"body": {
"resource_id": "rid-3",
"values": {
"RESOURCE_TOKEN_1": "token-5",
"RESOURCE_TOKEN_2": "token-6"
}
},
"id": "cid-3",
}
],
'https://api.identity.manifold.co/v1/teams':
[
{
"id": "tid-1",
"body": {
"name": "Team 1",
"label": "team-1"
}
},
{
"id": "tid-2",
"body": {
"name": "Team 2",
"label": "team-2"
}
}
]
}
def mock_fixture(open_url_mock, fixture=None, data=None, headers=None):
if not headers:
headers = {}
if fixture:
data = json.dumps(API_FIXTURES[fixture])
if 'content-type' not in headers:
headers['content-type'] = 'application/json'
open_url_mock.return_value.read.return_value = data
open_url_mock.return_value.headers = headers
class TestManifoldApiClient(unittest.TestCase):
@patch('ansible_collections.notstdlib.moveitallout.plugins.lookup.manifold.open_url')
def test_request_sends_default_headers(self, open_url_mock):
mock_fixture(open_url_mock, data='hello')
client = ManifoldApiClient('token-123')
client.request('test', 'endpoint')
open_url_mock.assert_called_with('https://api.test.manifold.co/v1/endpoint',
headers={'Accept': '*/*', 'Authorization': 'Bearer token-123'},
http_agent='python-manifold-ansible-1.0.0')
@patch('ansible_collections.notstdlib.moveitallout.plugins.lookup.manifold.open_url')
def test_request_decodes_json(self, open_url_mock):
mock_fixture(open_url_mock, fixture='https://api.marketplace.manifold.co/v1/resources')
client = ManifoldApiClient('token-123')
self.assertIsInstance(client.request('marketplace', 'resources'), list)
@patch('ansible_collections.notstdlib.moveitallout.plugins.lookup.manifold.open_url')
def test_request_streams_text(self, open_url_mock):
mock_fixture(open_url_mock, data='hello', headers={'content-type': "text/plain"})
client = ManifoldApiClient('token-123')
self.assertEqual('hello', client.request('test', 'endpoint'))
@patch('ansible_collections.notstdlib.moveitallout.plugins.lookup.manifold.open_url')
def test_request_processes_parameterized_headers(self, open_url_mock):
mock_fixture(open_url_mock, data='hello')
client = ManifoldApiClient('token-123')
client.request('test', 'endpoint', headers={'X-HEADER': 'MANIFOLD'})
open_url_mock.assert_called_with('https://api.test.manifold.co/v1/endpoint',
headers={'Accept': '*/*', 'Authorization': 'Bearer token-123',
'X-HEADER': 'MANIFOLD'},
http_agent='python-manifold-ansible-1.0.0')
@patch('ansible_collections.notstdlib.moveitallout.plugins.lookup.manifold.open_url')
def test_request_passes_arbitrary_parameters(self, open_url_mock):
mock_fixture(open_url_mock, data='hello')
client = ManifoldApiClient('token-123')
client.request('test', 'endpoint', use_proxy=False, timeout=5)
open_url_mock.assert_called_with('https://api.test.manifold.co/v1/endpoint',
headers={'Accept': '*/*', 'Authorization': 'Bearer token-123'},
http_agent='python-manifold-ansible-1.0.0',
use_proxy=False, timeout=5)
@patch('ansible_collections.notstdlib.moveitallout.plugins.lookup.manifold.open_url')
def test_request_raises_on_incorrect_json(self, open_url_mock):
mock_fixture(open_url_mock, data='noJson', headers={'content-type': "application/json"})
client = ManifoldApiClient('token-123')
with self.assertRaises(ApiError) as context:
client.request('test', 'endpoint')
self.assertEqual('JSON response can\'t be parsed while requesting https://api.test.manifold.co/v1/endpoint:\n'
'noJson',
str(context.exception))
@patch('ansible_collections.notstdlib.moveitallout.plugins.lookup.manifold.open_url')
def test_request_raises_on_status_500(self, open_url_mock):
open_url_mock.side_effect = HTTPError('https://api.test.manifold.co/v1/endpoint',
500, 'Server error', {}, six.StringIO('ERROR'))
client = ManifoldApiClient('token-123')
with self.assertRaises(ApiError) as context:
client.request('test', 'endpoint')
self.assertEqual('Server returned: HTTP Error 500: Server error while requesting '
'https://api.test.manifold.co/v1/endpoint:\nERROR',
str(context.exception))
@patch('ansible_collections.notstdlib.moveitallout.plugins.lookup.manifold.open_url')
def test_request_raises_on_bad_url(self, open_url_mock):
open_url_mock.side_effect = URLError('URL is invalid')
client = ManifoldApiClient('token-123')
with self.assertRaises(ApiError) as context:
client.request('test', 'endpoint')
self.assertEqual('Failed lookup url for https://api.test.manifold.co/v1/endpoint : <url'
'open error URL is invalid>',
str(context.exception))
@patch('ansible_collections.notstdlib.moveitallout.plugins.lookup.manifold.open_url')
def test_request_raises_on_ssl_error(self, open_url_mock):
open_url_mock.side_effect = SSLValidationError('SSL Error')
client = ManifoldApiClient('token-123')
with self.assertRaises(ApiError) as context:
client.request('test', 'endpoint')
self.assertEqual('Error validating the server\'s certificate for https://api.test.manifold.co/v1/endpoint: '
'SSL Error',
str(context.exception))
@patch('ansible_collections.notstdlib.moveitallout.plugins.lookup.manifold.open_url')
def test_request_raises_on_connection_error(self, open_url_mock):
open_url_mock.side_effect = ConnectionError('Unknown connection error')
client = ManifoldApiClient('token-123')
with self.assertRaises(ApiError) as context:
client.request('test', 'endpoint')
self.assertEqual('Error connecting to https://api.test.manifold.co/v1/endpoint: Unknown connection error',
str(context.exception))
@patch('ansible_collections.notstdlib.moveitallout.plugins.lookup.manifold.open_url')
def test_get_resources_get_all(self, open_url_mock):
url = 'https://api.marketplace.manifold.co/v1/resources'
mock_fixture(open_url_mock, fixture=url)
client = ManifoldApiClient('token-123')
self.assertListEqual(API_FIXTURES[url], client.get_resources())
open_url_mock.assert_called_with(url,
headers={'Accept': '*/*', 'Authorization': 'Bearer token-123'},
http_agent='python-manifold-ansible-1.0.0')
@patch('ansible_collections.notstdlib.moveitallout.plugins.lookup.manifold.open_url')
def test_get_resources_filter_label(self, open_url_mock):
url = 'https://api.marketplace.manifold.co/v1/resources?label=resource-1'
mock_fixture(open_url_mock, fixture=url)
client = ManifoldApiClient('token-123')
self.assertListEqual(API_FIXTURES[url], client.get_resources(label='resource-1'))
open_url_mock.assert_called_with(url,
headers={'Accept': '*/*', 'Authorization': 'Bearer token-123'},
http_agent='python-manifold-ansible-1.0.0')
@patch('ansible_collections.notstdlib.moveitallout.plugins.lookup.manifold.open_url')
def test_get_resources_filter_team_and_project(self, open_url_mock):
url = 'https://api.marketplace.manifold.co/v1/resources?team_id=tid-1&project_id=pid-1'
mock_fixture(open_url_mock, fixture=url)
client = ManifoldApiClient('token-123')
self.assertListEqual(API_FIXTURES[url], client.get_resources(team_id='tid-1', project_id='pid-1'))
args, kwargs = open_url_mock.call_args
url_called = args[0]
# Dict order is not guaranteed, so an url may have querystring parameters order randomized
self.assertIn('team_id=tid-1', url_called)
self.assertIn('project_id=pid-1', url_called)
@patch('ansible_collections.notstdlib.moveitallout.plugins.lookup.manifold.open_url')
def test_get_teams_get_all(self, open_url_mock):
url = 'https://api.identity.manifold.co/v1/teams'
mock_fixture(open_url_mock, fixture=url)
client = ManifoldApiClient('token-123')
self.assertListEqual(API_FIXTURES[url], client.get_teams())
open_url_mock.assert_called_with(url,
headers={'Accept': '*/*', 'Authorization': 'Bearer token-123'},
http_agent='python-manifold-ansible-1.0.0')
@patch('ansible_collections.notstdlib.moveitallout.plugins.lookup.manifold.open_url')
def test_get_teams_filter_label(self, open_url_mock):
url = 'https://api.identity.manifold.co/v1/teams'
mock_fixture(open_url_mock, fixture=url)
client = ManifoldApiClient('token-123')
self.assertListEqual(API_FIXTURES[url][1:2], client.get_teams(label='team-2'))
open_url_mock.assert_called_with(url,
headers={'Accept': '*/*', 'Authorization': 'Bearer token-123'},
http_agent='python-manifold-ansible-1.0.0')
@patch('ansible_collections.notstdlib.moveitallout.plugins.lookup.manifold.open_url')
def test_get_projects_get_all(self, open_url_mock):
url = 'https://api.marketplace.manifold.co/v1/projects'
mock_fixture(open_url_mock, fixture=url)
client = ManifoldApiClient('token-123')
self.assertListEqual(API_FIXTURES[url], client.get_projects())
open_url_mock.assert_called_with(url,
headers={'Accept': '*/*', 'Authorization': 'Bearer token-123'},
http_agent='python-manifold-ansible-1.0.0')
@patch('ansible_collections.notstdlib.moveitallout.plugins.lookup.manifold.open_url')
def test_get_projects_filter_label(self, open_url_mock):
url = 'https://api.marketplace.manifold.co/v1/projects?label=project-2'
mock_fixture(open_url_mock, fixture=url)
client = ManifoldApiClient('token-123')
self.assertListEqual(API_FIXTURES[url], client.get_projects(label='project-2'))
open_url_mock.assert_called_with(url,
headers={'Accept': '*/*', 'Authorization': 'Bearer token-123'},
http_agent='python-manifold-ansible-1.0.0')
@patch('ansible_collections.notstdlib.moveitallout.plugins.lookup.manifold.open_url')
def test_get_credentials(self, open_url_mock):
url = 'https://api.marketplace.manifold.co/v1/credentials?resource_id=rid-1'
mock_fixture(open_url_mock, fixture=url)
client = ManifoldApiClient('token-123')
self.assertListEqual(API_FIXTURES[url], client.get_credentials(resource_id='rid-1'))
open_url_mock.assert_called_with(url,
headers={'Accept': '*/*', 'Authorization': 'Bearer token-123'},
http_agent='python-manifold-ansible-1.0.0')
class TestLookupModule(unittest.TestCase):
def setUp(self):
self.lookup = LookupModule()
self.lookup._load_name = "manifold"
@patch('ansible_collections.notstdlib.moveitallout.plugins.lookup.manifold.ManifoldApiClient')
def test_get_all(self, client_mock):
expected_result = [{'RESOURCE_TOKEN_1': 'token-1',
'RESOURCE_TOKEN_2': 'token-2',
'RESOURCE_TOKEN_3': 'token-3',
'RESOURCE_TOKEN_4': 'token-4'
}]
client_mock.return_value.get_resources.return_value = API_FIXTURES['https://api.marketplace.manifold.co/v1/resources']
client_mock.return_value.get_credentials.side_effect = lambda x: API_FIXTURES['https://api.marketplace.manifold.co/v1/'
'credentials?resource_id={0}'.format(x)]
self.assertListEqual(expected_result, self.lookup.run([], api_token='token-123'))
client_mock.assert_called_with('token-123')
client_mock.return_value.get_resources.assert_called_with(team_id=None, project_id=None)
@patch('ansible_collections.notstdlib.moveitallout.plugins.lookup.manifold.ManifoldApiClient')
def test_get_one_resource(self, client_mock):
expected_result = [{'RESOURCE_TOKEN_3': 'token-3',
'RESOURCE_TOKEN_4': 'token-4'
}]
client_mock.return_value.get_resources.return_value = API_FIXTURES['https://api.marketplace.manifold.co/v1/resources?label=resource-2']
client_mock.return_value.get_credentials.side_effect = lambda x: API_FIXTURES['https://api.marketplace.manifold.co/v1/'
'credentials?resource_id={0}'.format(x)]
self.assertListEqual(expected_result, self.lookup.run(['resource-2'], api_token='token-123'))
client_mock.return_value.get_resources.assert_called_with(team_id=None, project_id=None, label='resource-2')
@patch('ansible_collections.notstdlib.moveitallout.plugins.lookup.manifold.ManifoldApiClient')
def test_get_two_resources(self, client_mock):
expected_result = [{'RESOURCE_TOKEN_1': 'token-1',
'RESOURCE_TOKEN_2': 'token-2',
'RESOURCE_TOKEN_3': 'token-3',
'RESOURCE_TOKEN_4': 'token-4'
}]
client_mock.return_value.get_resources.return_value = API_FIXTURES['https://api.marketplace.manifold.co/v1/resources']
client_mock.return_value.get_credentials.side_effect = lambda x: API_FIXTURES['https://api.marketplace.manifold.co/v1/'
'credentials?resource_id={0}'.format(x)]
self.assertListEqual(expected_result, self.lookup.run(['resource-1', 'resource-2'], api_token='token-123'))
client_mock.assert_called_with('token-123')
client_mock.return_value.get_resources.assert_called_with(team_id=None, project_id=None)
@patch('ansible_collections.notstdlib.moveitallout.plugins.lookup.manifold.display')
@patch('ansible_collections.notstdlib.moveitallout.plugins.lookup.manifold.ManifoldApiClient')
def test_get_resources_with_same_credential_names(self, client_mock, display_mock):
expected_result = [{'RESOURCE_TOKEN_1': 'token-5',
'RESOURCE_TOKEN_2': 'token-6'
}]
client_mock.return_value.get_resources.return_value = API_FIXTURES['https://api.marketplace.manifold.co/v1/resources?project_id=pid-2']
client_mock.return_value.get_projects.return_value = API_FIXTURES['https://api.marketplace.manifold.co/v1/projects?label=project-2']
client_mock.return_value.get_credentials.side_effect = lambda x: API_FIXTURES['https://api.marketplace.manifold.co/v1/'
'credentials?resource_id={0}'.format(x)]
self.assertListEqual(expected_result, self.lookup.run([], api_token='token-123', project='project-2'))
client_mock.assert_called_with('token-123')
display_mock.warning.assert_has_calls([
call("'RESOURCE_TOKEN_1' with label 'resource-1' was replaced by resource data with label 'resource-3'"),
call("'RESOURCE_TOKEN_2' with label 'resource-1' was replaced by resource data with label 'resource-3'")],
any_order=True
)
client_mock.return_value.get_resources.assert_called_with(team_id=None, project_id='pid-2')
@patch('ansible_collections.notstdlib.moveitallout.plugins.lookup.manifold.ManifoldApiClient')
def test_filter_by_team(self, client_mock):
expected_result = [{'RESOURCE_TOKEN_1': 'token-1',
'RESOURCE_TOKEN_2': 'token-2'
}]
client_mock.return_value.get_resources.return_value = API_FIXTURES['https://api.marketplace.manifold.co/v1/resources?team_id=tid-1']
client_mock.return_value.get_teams.return_value = API_FIXTURES['https://api.identity.manifold.co/v1/teams'][0:1]
client_mock.return_value.get_credentials.side_effect = lambda x: API_FIXTURES['https://api.marketplace.manifold.co/v1/'
'credentials?resource_id={0}'.format(x)]
self.assertListEqual(expected_result, self.lookup.run([], api_token='token-123', team='team-1'))
client_mock.assert_called_with('token-123')
client_mock.return_value.get_resources.assert_called_with(team_id='tid-1', project_id=None)
@patch('ansible_collections.notstdlib.moveitallout.plugins.lookup.manifold.ManifoldApiClient')
def test_filter_by_project(self, client_mock):
expected_result = [{'RESOURCE_TOKEN_3': 'token-3',
'RESOURCE_TOKEN_4': 'token-4'
}]
client_mock.return_value.get_resources.return_value = API_FIXTURES['https://api.marketplace.manifold.co/v1/resources?project_id=pid-1']
client_mock.return_value.get_projects.return_value = API_FIXTURES['https://api.marketplace.manifold.co/v1/projects'][0:1]
client_mock.return_value.get_credentials.side_effect = lambda x: API_FIXTURES['https://api.marketplace.manifold.co/v1/'
'credentials?resource_id={0}'.format(x)]
self.assertListEqual(expected_result, self.lookup.run([], api_token='token-123', project='project-1'))
client_mock.assert_called_with('token-123')
client_mock.return_value.get_resources.assert_called_with(team_id=None, project_id='pid-1')
@patch('ansible_collections.notstdlib.moveitallout.plugins.lookup.manifold.ManifoldApiClient')
def test_filter_by_team_and_project(self, client_mock):
expected_result = [{'RESOURCE_TOKEN_1': 'token-1',
'RESOURCE_TOKEN_2': 'token-2'
}]
client_mock.return_value.get_resources.return_value = API_FIXTURES['https://api.marketplace.manifold.co/v1/resources?team_id=tid-1&project_id=pid-1']
client_mock.return_value.get_teams.return_value = API_FIXTURES['https://api.identity.manifold.co/v1/teams'][0:1]
client_mock.return_value.get_projects.return_value = API_FIXTURES['https://api.marketplace.manifold.co/v1/projects'][0:1]
client_mock.return_value.get_credentials.side_effect = lambda x: API_FIXTURES['https://api.marketplace.manifold.co/v1/'
'credentials?resource_id={0}'.format(x)]
self.assertListEqual(expected_result, self.lookup.run([], api_token='token-123', project='project-1'))
client_mock.assert_called_with('token-123')
client_mock.return_value.get_resources.assert_called_with(team_id=None, project_id='pid-1')
@patch('ansible_collections.notstdlib.moveitallout.plugins.lookup.manifold.ManifoldApiClient')
def test_raise_team_doesnt_exist(self, client_mock):
client_mock.return_value.get_teams.return_value = []
with self.assertRaises(AnsibleError) as context:
self.lookup.run([], api_token='token-123', team='no-team')
self.assertEqual("Team 'no-team' does not exist",
str(context.exception))
@patch('ansible_collections.notstdlib.moveitallout.plugins.lookup.manifold.ManifoldApiClient')
def test_raise_project_doesnt_exist(self, client_mock):
client_mock.return_value.get_projects.return_value = []
with self.assertRaises(AnsibleError) as context:
self.lookup.run([], api_token='token-123', project='no-project')
self.assertEqual("Project 'no-project' does not exist",
str(context.exception))
@patch('ansible_collections.notstdlib.moveitallout.plugins.lookup.manifold.ManifoldApiClient')
def test_raise_resource_doesnt_exist(self, client_mock):
client_mock.return_value.get_resources.return_value = API_FIXTURES['https://api.marketplace.manifold.co/v1/resources']
with self.assertRaises(AnsibleError) as context:
self.lookup.run(['resource-1', 'no-resource-1', 'no-resource-2'], api_token='token-123')
self.assertEqual("Resource(s) no-resource-1, no-resource-2 do not exist",
str(context.exception))
@patch('ansible_collections.notstdlib.moveitallout.plugins.lookup.manifold.ManifoldApiClient')
def test_catch_api_error(self, client_mock):
client_mock.side_effect = ApiError('Generic error')
with self.assertRaises(AnsibleError) as context:
self.lookup.run([], api_token='token-123')
self.assertEqual("API Error: Generic error",
str(context.exception))
@patch('ansible_collections.notstdlib.moveitallout.plugins.lookup.manifold.ManifoldApiClient')
def test_catch_unhandled_exception(self, client_mock):
client_mock.side_effect = Exception('Unknown error')
with self.assertRaises(AnsibleError) as context:
self.lookup.run([], api_token='token-123')
self.assertTrue('Exception: Unknown error' in str(context.exception))
@patch('ansible_collections.notstdlib.moveitallout.plugins.lookup.manifold.os.getenv')
@patch('ansible_collections.notstdlib.moveitallout.plugins.lookup.manifold.ManifoldApiClient')
def test_falls_back_to_env_var(self, client_mock, getenv_mock):
getenv_mock.return_value = 'token-321'
client_mock.return_value.get_resources.return_value = []
client_mock.return_value.get_credentials.return_value = []
self.lookup.run([])
getenv_mock.assert_called_with('MANIFOLD_API_TOKEN')
client_mock.assert_called_with('token-321')
@patch('ansible_collections.notstdlib.moveitallout.plugins.lookup.manifold.os.getenv')
@patch('ansible_collections.notstdlib.moveitallout.plugins.lookup.manifold.ManifoldApiClient')
def test_falls_raises_on_no_token(self, client_mock, getenv_mock):
getenv_mock.return_value = None
client_mock.return_value.get_resources.return_value = []
client_mock.return_value.get_credentials.return_value = []
with self.assertRaises(AnsibleError) as context:
self.lookup.run([])
self.assertEqual('API token is required. Please set api_token parameter or MANIFOLD_API_TOKEN env var',
str(context.exception))
| [
"[email protected]"
]
| |
63cff8ecff4e85addb997ed55491b924444e50ac | e1c7c25c22c2f854aa8e3d8f6fffdf80a0b4dfbf | /CodeChef/CodeChef_Contests/Distribute_Candies.py | b7374b54f77afa5e0246f35539e5cff21c1a8d76 | []
| no_license | roshan13ghimire/Competitive_Programming | efc85f9fe6fa46edff96931ca3a1cca78628918b | 0c238a391c6acee8763968ef298b765c133b7111 | refs/heads/master | 2023-04-15T16:35:07.711491 | 2021-04-12T03:00:05 | 2021-04-12T03:00:05 | 273,682,360 | 4 | 1 | null | 2020-08-05T02:11:53 | 2020-06-20T09:59:57 | Python | UTF-8 | Python | false | false | 344 | py | #Distribute_Candies
for _ in range(int(input())):
n = int(input())
a=list(map(int,input().split()))
if(len(a)==0):
print(a[0])
elif(len(a)==1):
print(min(a))
else:
a.sort(reverse=True)
s=0
for i in range(2,len(a),3):
s +=a [i]
print(s)
| [
"[email protected]"
]
| |
ab718c6751938719fe2a3fad3cb11077d9a8ee9a | 82b946da326148a3c1c1f687f96c0da165bb2c15 | /sdk/python/pulumi_azure_native/sql/v20201101preview/get_job_credential.py | 78541af0a1ae2757fdc71c031ccd8aba7355f0fc | [
"Apache-2.0",
"BSD-3-Clause"
]
| permissive | morrell/pulumi-azure-native | 3916e978382366607f3df0a669f24cb16293ff5e | cd3ba4b9cb08c5e1df7674c1c71695b80e443f08 | refs/heads/master | 2023-06-20T19:37:05.414924 | 2021-07-19T20:57:53 | 2021-07-19T20:57:53 | 387,815,163 | 0 | 0 | Apache-2.0 | 2021-07-20T14:18:29 | 2021-07-20T14:18:28 | null | UTF-8 | Python | false | false | 3,672 | py | # coding=utf-8
# *** WARNING: this file was generated by the Pulumi SDK Generator. ***
# *** Do not edit by hand unless you're certain you know what you are doing! ***
import warnings
import pulumi
import pulumi.runtime
from typing import Any, Mapping, Optional, Sequence, Union, overload
from ... import _utilities
__all__ = [
'GetJobCredentialResult',
'AwaitableGetJobCredentialResult',
'get_job_credential',
]
@pulumi.output_type
class GetJobCredentialResult:
"""
A stored credential that can be used by a job to connect to target databases.
"""
def __init__(__self__, id=None, name=None, type=None, username=None):
if id and not isinstance(id, str):
raise TypeError("Expected argument 'id' to be a str")
pulumi.set(__self__, "id", id)
if name and not isinstance(name, str):
raise TypeError("Expected argument 'name' to be a str")
pulumi.set(__self__, "name", name)
if type and not isinstance(type, str):
raise TypeError("Expected argument 'type' to be a str")
pulumi.set(__self__, "type", type)
if username and not isinstance(username, str):
raise TypeError("Expected argument 'username' to be a str")
pulumi.set(__self__, "username", username)
@property
@pulumi.getter
def id(self) -> str:
"""
Resource ID.
"""
return pulumi.get(self, "id")
@property
@pulumi.getter
def name(self) -> str:
"""
Resource name.
"""
return pulumi.get(self, "name")
@property
@pulumi.getter
def type(self) -> str:
"""
Resource type.
"""
return pulumi.get(self, "type")
@property
@pulumi.getter
def username(self) -> str:
"""
The credential user name.
"""
return pulumi.get(self, "username")
class AwaitableGetJobCredentialResult(GetJobCredentialResult):
# pylint: disable=using-constant-test
def __await__(self):
if False:
yield self
return GetJobCredentialResult(
id=self.id,
name=self.name,
type=self.type,
username=self.username)
def get_job_credential(credential_name: Optional[str] = None,
job_agent_name: Optional[str] = None,
resource_group_name: Optional[str] = None,
server_name: Optional[str] = None,
opts: Optional[pulumi.InvokeOptions] = None) -> AwaitableGetJobCredentialResult:
"""
A stored credential that can be used by a job to connect to target databases.
:param str credential_name: The name of the credential.
:param str job_agent_name: The name of the job agent.
:param str resource_group_name: The name of the resource group that contains the resource. You can obtain this value from the Azure Resource Manager API or the portal.
:param str server_name: The name of the server.
"""
__args__ = dict()
__args__['credentialName'] = credential_name
__args__['jobAgentName'] = job_agent_name
__args__['resourceGroupName'] = resource_group_name
__args__['serverName'] = server_name
if opts is None:
opts = pulumi.InvokeOptions()
if opts.version is None:
opts.version = _utilities.get_version()
__ret__ = pulumi.runtime.invoke('azure-native:sql/v20201101preview:getJobCredential', __args__, opts=opts, typ=GetJobCredentialResult).value
return AwaitableGetJobCredentialResult(
id=__ret__.id,
name=__ret__.name,
type=__ret__.type,
username=__ret__.username)
| [
"[email protected]"
]
| |
5dc5dcbbd705e58f27c0631355130a07689f6f51 | acb8e84e3b9c987fcab341f799f41d5a5ec4d587 | /langs/5/lsm.py | 5556fc62a4f5c02583d9d14afdb961e9c782ce7d | []
| no_license | G4te-Keep3r/HowdyHackers | 46bfad63eafe5ac515da363e1c75fa6f4b9bca32 | fb6d391aaecb60ab5c4650d4ae2ddd599fd85db2 | refs/heads/master | 2020-08-01T12:08:10.782018 | 2016-11-13T20:45:50 | 2016-11-13T20:45:50 | 73,624,224 | 0 | 1 | null | null | null | null | UTF-8 | Python | false | false | 486 | py | import sys
def printFunction(lineRemaining):
if lineRemaining[0] == '"' and lineRemaining[-1] == '"':
if len(lineRemaining) > 2:
#data to print
lineRemaining = lineRemaining[1:-1]
print ' '.join(lineRemaining)
else:
print
def main(fileName):
with open(fileName) as f:
for line in f:
data = line.split()
if data[0] == 'lSM':
printFunction(data[1:])
else:
print 'ERROR'
return
if __name__ == '__main__':
main(sys.argv[1]) | [
"[email protected]"
]
| |
bfbdd12c0144be46d6a9f9cd9681526f91cb2302 | b24e993bfae0e530b7c6ee676b0efa1b2cbea33c | /rsopt/run.py | 0b53f55ed903b0c68a87716fab0c306be408db6a | [
"Apache-2.0"
]
| permissive | tanxicccc/rsopt | f99d8d721ce37647717b41c08b44f69a065444ae | 8705e937f95a4bbe6ed3fb1a04b78f724a5f3931 | refs/heads/master | 2023-01-06T19:21:40.065806 | 2020-10-24T23:48:34 | 2020-10-24T23:48:34 | 288,584,476 | 0 | 0 | Apache-2.0 | 2020-08-18T23:19:55 | 2020-08-18T23:19:54 | null | UTF-8 | Python | false | false | 675 | py | # Functions to run a call from originating from pkcli
# This is just a temporary setup. libEnsembleOptimizer shouldn't actually be tied to execution mode
# It is instantiated because nlopt was requested
# THe executor will be setup separately based off 'execution_type' in YAML and registered with libEnsembleOptimizer
from rsopt.libe_tools.optimizer import libEnsembleOptimizer
def serial(config):
opt = libEnsembleOptimizer()
opt.load_configuration(config)
return opt #.run()
# These names have to line up with accepted values for setup.execution_type
# Another place where shared names are imported from common source
run_modes = {
'serial': serial
} | [
"[email protected]"
]
| |
d5e3cb808796ca54150f2786b23de7ecf5ca0361 | 5f57cfb662e7a490235255273114d2eb712a9ce4 | /djd-prog2/manha-aula2/operadores2.py | 8f487d67a5131274c9bbe4910aa05cb36e8f139f | []
| no_license | antoniorcn/fatec-2020-2s | 762e14acfbf0cb42a2e478662e6cf0001794f72c | 4d90cc35d354382ad38c20ce2e32924216d7d747 | refs/heads/master | 2023-01-27T22:52:29.529360 | 2020-12-05T13:51:30 | 2020-12-05T13:51:30 | 289,972,977 | 9 | 7 | null | null | null | null | UTF-8 | Python | false | false | 153 | py | temperatura = 14
frio = temperatura < 15
horas_acordado = 18
sono = horas_acordado > 16
ficar_casa = sono and frio
print("Ficar em casa: ", ficar_casa)
| [
"[email protected]"
]
| |
b9b986b188c838e28b4a55d6dad2d567a40a0153 | 616c7325ee8c4a7d37467a8ad760d00891d3b450 | /get_linkedin.py | 41f290be48e6b44cc8360af16080bdcde11e899f | []
| no_license | vintasoftware/networking.cool | c8f7b3d027b098966ea4c47a211a465eaf746661 | 314d3b32ab3d611fe2b622d57372dc29fb5eac5e | refs/heads/master | 2023-06-19T19:38:03.557182 | 2015-11-15T19:05:08 | 2015-11-15T19:05:08 | 64,401,363 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 2,270 | py | # coding: utf-8
from selenium.webdriver import Chrome
from selenium.common.exceptions import WebDriverException
import requests
from retrying import retry
import json
import time
import pprint
import traceback
def wait_for(fn, timeout=5):
start_time = time.time()
while time.time() - start_time < timeout:
try:
return fn()
except WebDriverException:
time.sleep(0.1)
# one more try, which will raise any errors if they are outstanding
return fn()
def find_by_css(selector):
def _find():
return driver.find_element_by_css_selector(selector)
return wait_for(_find)
def find_linkedin_url(term):
params = {
'key': 'AIzaSyBs_qRMdd3UxIp6HQ9zMidKupXkRCtmZrQ',
'cx': '007098471526848933106:c_yqy7e87hi',
'q': term
}
r = requests.get(
'https://www.googleapis.com/customsearch/v1', params=params)
r_json = r.json()
for item in r_json['items']:
if 'pagemap' in item and 'person' in item['pagemap']:
return item['link']
@retry(stop_max_delay=6000)
def get_contact_linkedin_html(name, company):
term = u"{} {}".format(name, company)
url = find_linkedin_url(term)
if not url:
raise ValueError(u"No result for {}".format(term))
driver.get(url)
return find_by_css('#profile').get_attribute('innerHTML'), url
driver = Chrome()
driver.set_window_size(1280, 700)
results = []
def main():
with open('attendees.json') as f:
attendees_raw = json.load(f)
attendees = [(a['name'], a['info2']) for a in attendees_raw
if 'info2' in a]
for name, company in attendees:
try:
html, link = get_contact_linkedin_html(name, company)
except Exception:
print traceback.print_exc()
html = None
link = None
results.append({
'name': name,
'company': company,
'html': html,
'link': link
})
print len(results)
with open('results.json', 'w+') as out:
json.dump(results, out, indent=2)
driver.quit()
if __name__ == '__main__':
start_time = time.time()
main()
print("--- ran in %s seconds ---" % (time.time() - start_time))
| [
"[email protected]"
]
| |
d2dfa36bff8f5800e973f158cab51208757331e3 | 7e15a679d37e8d8449871c8f6f3f649d05b33b36 | /web/handlers/comments.py | 7a8402f6a5785678f06d62e2661eec09161bb046 | []
| no_license | SuprDewd/gamma | a7514aaa86790e92df481da75c283279ac7a1678 | 98b291da26d8b54959134e493a25b73234d7df63 | refs/heads/master | 2021-01-17T16:59:05.247942 | 2014-03-22T23:09:28 | 2014-03-22T23:09:28 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 98 | py | from base import BaseHandler
class CommentsHandler(BaseHandler):
def get(self):
pass
| [
"[email protected]"
]
| |
e4c28f5e3f4811b1f754c5f93c0fe9abaa34c559 | 09e57dd1374713f06b70d7b37a580130d9bbab0d | /data/p4VQE/R3/benchmark/startPyquil415.py | dc4c420c9e6301077dbf98ee6a6da1cfc57977c0 | [
"BSD-3-Clause"
]
| permissive | UCLA-SEAL/QDiff | ad53650034897abb5941e74539e3aee8edb600ab | d968cbc47fe926b7f88b4adf10490f1edd6f8819 | refs/heads/main | 2023-08-05T04:52:24.961998 | 2021-09-19T02:56:16 | 2021-09-19T02:56:16 | 405,159,939 | 2 | 0 | null | null | null | null | UTF-8 | Python | false | false | 1,157 | py | # qubit number=4
# total number=11
import pyquil
from pyquil.api import local_forest_runtime, QVMConnection
from pyquil import Program, get_qc
from pyquil.gates import *
import numpy as np
conn = QVMConnection()
def make_circuit()-> Program:
prog = Program() # circuit begin
prog += H(0) # number=1
prog += H(1) # number=2
prog += H(2) # number=3
prog += X(1) # number=8
prog += H(3) # number=4
prog += Y(3) # number=5
prog += X(3) # number=7
prog += CNOT(3,0) # number=9
prog += CNOT(3,0) # number=10
# circuit end
return prog
def summrise_results(bitstrings) -> dict:
d = {}
for l in bitstrings:
if d.get(l) is None:
d[l] = 1
else:
d[l] = d[l] + 1
return d
if __name__ == '__main__':
prog = make_circuit()
qvm = get_qc('4q-qvm')
results = qvm.run_and_measure(prog,1024)
bitstrings = np.vstack([results[i] for i in qvm.qubits()]).T
bitstrings = [''.join(map(str, l)) for l in bitstrings]
writefile = open("../data/startPyquil415.csv","w")
print(summrise_results(bitstrings),file=writefile)
writefile.close()
| [
"[email protected]"
]
| |
31f3bf254a89f1c3c9623a7faa5f180eded32255 | 6f0ceee714bccf2a89c34a06aabd3bcb781a2fa4 | /python/mxnet/symbol/linalg.py | 2bf543ea3217d9ac7fdd54328349f3e724134a22 | [
"Apache-2.0",
"MIT",
"Unlicense",
"BSL-1.0",
"NCSA",
"BSD-3-Clause",
"LicenseRef-scancode-generic-cla",
"BSD-2-Clause",
"OFL-1.0",
"BSD-2-Clause-Views",
"Zlib"
]
| permissive | yajiedesign/mxnet | 5a495fd06dd1730c17d2d27d7e46c8a770847f17 | 8e5a16cf673db5aceb48d2cf7a0fc1abd0ee5e51 | refs/heads/master | 2021-03-30T22:37:18.603396 | 2020-10-23T06:40:17 | 2020-10-23T06:40:17 | 43,763,550 | 214 | 59 | Apache-2.0 | 2020-06-01T23:31:15 | 2015-10-06T16:36:40 | C++ | UTF-8 | Python | false | false | 980 | py | # Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
# coding: utf-8
# pylint: disable=wildcard-import, unused-wildcard-import
"""Linear Algebra Symbol API of MXNet."""
try:
from .gen_linalg import *
except ImportError:
pass
__all__ = []
| [
"[email protected]"
]
| |
a5ea2d8532dca777ae398c51f038fb24bb124a41 | bab7cee09c1f90c93f98aad8c83180197a2cdabe | /devel/lib/python2.7/dist-packages/lanelet_map_msgs/msg/_Lanelet.py | 3f6991649c28950e23eaa46d42f21a542cd4593a | []
| no_license | lievech/car_simulation_ws | 25ccad1677e4af9054f8f2298b54ad44e4058676 | d3c74a3af3f242e34fac97d525325ab62d2be3a5 | refs/heads/master | 2020-08-02T06:32:10.944218 | 2019-09-28T02:56:50 | 2019-09-28T02:56:50 | 211,263,284 | 0 | 2 | null | null | null | null | UTF-8 | Python | false | false | 109 | py | /home/lhn/hhh_ws/devel/.private/lanelet_map_msgs/lib/python2.7/dist-packages/lanelet_map_msgs/msg/_Lanelet.py | [
"[email protected]"
]
| |
f165ad0ca683b70f6c06e7d04235a1a1ed8a7f8c | 028a00f712ed1bc005ba0ca60e33c333bd46acb5 | /VL-T5/src/pretrain.py | 6ce85fed51e06824881043597daa9485126fd8ca | []
| no_license | Ch4osMy7h/VL-T5 | 14aa5cd094e7510e110b183bdb5743aead33b3af | 18699e2b1d5b4559f76c88a48cdec7176d356c34 | refs/heads/main | 2023-08-25T14:03:14.946471 | 2021-11-10T16:13:41 | 2021-11-10T16:13:41 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 16,796 | py | import collections
import os
import random
from pathlib import Path
import logging
import shutil
from packaging import version
from tqdm import tqdm
import numpy as np
import torch
import torch.nn as nn
from torch.nn.parallel import DistributedDataParallel as DDP
import torch.distributed as dist
import torch.backends.cudnn as cudnn
from param import parse_args
from pretrain_data import get_loader
from utils import LossMeter
from dist_utils import reduce_dict
import wandb
_use_native_amp = False
_use_apex = False
# Check if Pytorch version >= 1.6 to switch between Native AMP and Apex
if version.parse(torch.__version__) < version.parse("1.6"):
from transormers.file_utils import is_apex_available
if is_apex_available():
from apex import amp
_use_apex = True
else:
_use_native_amp = True
from torch.cuda.amp import autocast
from trainer_base import TrainerBase
class Trainer(TrainerBase):
def __init__(self, args, train_loader=None, val_loader=None, test_loader=None, train=True):
super().__init__(
args,
train_loader=train_loader,
val_loader=val_loader,
test_loader=test_loader,
train=train)
from pretrain_model import VLT5Pretraining, VLBartPretraining
model_kwargs = {}
if 't5' in args.backbone:
model_class = VLT5Pretraining
elif 'bart' in args.backbone:
model_class = VLBartPretraining
config = self.create_config()
self.tokenizer = self.create_tokenizer()
if 'bart' in self.args.tokenizer:
num_added_toks = 0
if config.use_vis_order_embedding:
additional_special_tokens = [f'<extra_id_{i}>' for i in range(100-1, -1, -1)] + \
[f'<vis_extra_id_{i}>' for i in range(100-1, -1, -1)]
special_tokens_dict = {'additional_special_tokens': additional_special_tokens}
num_added_toks = self.tokenizer.add_special_tokens(special_tokens_dict)
config.default_obj_order_ids = self.tokenizer.convert_tokens_to_ids([f'<vis_extra_id_{i}>' for i in range(100)])
self.model = self.create_model(model_class, config, **model_kwargs)
if 't5' in self.args.tokenizer:
self.model.resize_token_embeddings(self.tokenizer.vocab_size)
elif 'bart' in self.args.tokenizer:
self.model.resize_token_embeddings(self.model.model.shared.num_embeddings + num_added_toks)
self.model.tokenizer = self.tokenizer
# Load Checkpoint
self.start_epoch = None
if args.load is not None:
ckpt_path = args.load + '.pth'
self.load_checkpoint(ckpt_path)
self.start_epoch = int(args.load.split('Epoch')[-1])
if self.args.from_scratch:
self.init_weights()
# GPU Options
print(f'Model Launching at GPU {self.args.gpu}')
if self.verbose:
from time import time
start = time()
self.model = self.model.to(args.gpu)
# Optimizer
if train:
self.optim, self.lr_scheduler = self.create_optimizer_and_scheduler()
if self.args.fp16 and _use_native_amp:
self.scaler = torch.cuda.amp.GradScaler()
elif _use_apex:
self.model, self.optim = amp.initialize(
self.model, self.optim, opt_level='O1', verbosity=self.verbose)
if args.multiGPU:
if args.distributed:
self.model = DDP(self.model, device_ids=[args.gpu],
find_unused_parameters=True
)
if self.verbose:
print(f'It took {time() - start:.1f}s')
def train(self):
LOSSES_NAME = self.args.LOSSES_NAME
if self.args.dry:
results = self.evaluate_epoch(epoch=0)
if self.verbose:
loss_meters = [LossMeter() for _ in range(len(LOSSES_NAME))]
best_eval_loss = 9595.
if 't5' in self.args.backbone:
project_name = "VLT5_Pretrain"
elif 'bart' in self.args.backbone:
project_name = "VLBart_Pretrain"
wandb.init(project=project_name)
wandb.run.name = self.args.run_name
wandb.config.update(self.args)
wandb.watch(self.model)
src_dir = Path(__file__).resolve().parent
base_path = str(src_dir.parent)
src_dir = str(src_dir)
wandb.save(os.path.join(src_dir + "/*.py"), base_path=base_path)
if self.args.distributed:
dist.barrier()
global_step = 0
for epoch in range(self.args.epochs):
if self.start_epoch is not None:
epoch += self.start_epoch
if self.args.distributed:
self.train_loader.sampler.set_epoch(epoch)
# Train
self.model.train()
if self.verbose:
pbar = tqdm(total=len(self.train_loader), ncols=250)
epoch_results = {}
for loss_name in LOSSES_NAME:
epoch_results[loss_name] = 0.
epoch_results[f'{loss_name}_count'] = 0
for step_i, batch in enumerate(self.train_loader):
if self.args.fp16 and _use_native_amp:
with autocast():
if self.args.distributed:
results = self.model.module.train_step(batch)
else:
results = self.model.train_step(batch)
else:
if self.args.distributed:
results = self.model.module.train_step(batch)
else:
results = self.model.train_step(batch)
loss = results['loss']
if self.args.fp16 and _use_native_amp:
self.scaler.scale(loss).backward()
elif self.args.fp16 and _use_apex:
with amp.scale_loss(loss, self.optim) as scaled_loss:
scaled_loss.backward()
else:
loss.backward()
loss = loss.detach()
# Update Parameters
if self.args.clip_grad_norm > 0:
if self.args.fp16 and _use_native_amp:
self.scaler.unscale_(self.optim)
torch.nn.utils.clip_grad_norm_(self.model.parameters(), self.args.clip_grad_norm)
elif self.args.fp16 and _use_apex:
torch.nn.utils.clip_grad_norm_(amp.master_params(self.optim), self.args.clip_grad_norm)
else:
torch.nn.utils.clip_grad_norm_(self.model.parameters(), self.args.clip_grad_norm)
if self.args.fp16 and _use_native_amp:
self.scaler.step(self.optim)
self.scaler.update()
else:
self.optim.step()
if self.lr_scheduler:
self.lr_scheduler.step()
# self.model.zero_grad()
for param in self.model.parameters():
param.grad = None
global_step += 1
if self.lr_scheduler:
if version.parse(torch.__version__) >= version.parse("1.4"):
lr = self.lr_scheduler.get_last_lr()[0]
else:
lr = self.lr_scheduler.get_lr()[0]
else:
try:
lr = self.optim.get_lr()[0]
except AttributeError:
lr = self.args.lr
for k, v in results.items():
if k in epoch_results:
if isinstance(v, int):
epoch_results[k] += v
elif isinstance(v, torch.Tensor):
epoch_results[k] += v.item()
if self.verbose:
desc_str = f'Epoch {epoch} | LR {lr:.6f} |'
for i, (loss_name, loss_meter) in enumerate(zip(LOSSES_NAME, loss_meters)):
if loss_name in results:
loss_meter.update(results[f'{loss_name}'] / results[f'{loss_name}_count'])
if len(loss_meter) > 0:
loss_count = epoch_results[f'{loss_name}_count']
desc_str += f' {loss_name} ({loss_count}) {loss_meter.val:.3f}'
pbar.set_description(desc_str)
pbar.update(1)
if self.verbose:
pbar.close()
dist.barrier()
results = reduce_dict(epoch_results, average=False)
if self.verbose:
train_loss = results['total_loss']
train_loss_count = results['total_loss_count']
avg_train_loss = train_loss / train_loss_count
losses_str = f"Train Loss: {avg_train_loss:.3f}\n"
for name, loss in results.items():
if name[-4:] == 'loss':
loss_count = int(results[name+'_count'])
if loss_count > 0:
avg_loss = loss/loss_count
losses_str += f"{name} ({loss_count}): {avg_loss:.3f} "
wandb.log({f'Train Loss/{name}': avg_loss}, step=epoch)
losses_str += '\n'
print(losses_str)
dist.barrier()
# Validation
valid_results, valid_uid2ans = self.evaluate_epoch(epoch=epoch)
valid_results = reduce_dict(valid_results, average=False)
if self.verbose:
valid_loss = valid_results['total_loss']
valid_loss_count = valid_results['total_loss_count']
avg_valid_loss = valid_loss / valid_loss_count
losses_str = f"Valid Loss: {avg_valid_loss:.3f}\n"
for name, loss in valid_results.items():
if name[-4:] == 'loss':
loss_count = int(valid_results[name+'_count'])
if loss_count > 0:
avg_loss = loss / loss_count
losses_str += f"{name} ({loss_count}): {avg_loss:.3f} "
wandb.log({f'Valid Loss/{name}': avg_loss}, step=epoch)
losses_str += '\n'
print(losses_str)
if 'qa' in self.args.losses:
dset2score, dset2cnt, score, cnt = self.val_loader.dataset.evaluator.evaluate(valid_uid2ans)
if len(dset2score) == 0:
dset2score = {'vqa': 0, 'gqa': 0, 'visual7w': 0}
dset2cnt = {'vqa': 1, 'gqa': 1, 'visual7w': 1}
cnt = 3
score = 0
dset2score = reduce_dict(dset2score, average=False)
dset2cnt = reduce_dict(dset2cnt, average=False)
score_cnt_dict = reduce_dict({'score': score, 'cnt': cnt}, average=False)
if self.args.gpu == 0:
score = score_cnt_dict['score']
cnt = score_cnt_dict['cnt']
accu = score / cnt
dset2accu = {}
for dset in dset2cnt:
dset2accu[dset] = dset2score[dset] / dset2cnt[dset]
accu_str = "Overall QA Acc %0.4f" % (accu)
wandb.log({f'Valid QA Acc/Overall': accu}, step=epoch)
sorted_keys = sorted(dset2accu.keys())
for key in sorted_keys:
accu_str += ", %s Acc %0.4f" % (key, dset2accu[key])
wandb.log({f'Valid QA Acc/{key}': dset2accu[key]}, step=epoch)
print(accu_str)
accu_str += '\n\n'
dist.barrier()
if self.verbose:
# Save
if avg_valid_loss < best_eval_loss:
best_eval_loss = avg_valid_loss
# self.save("BEST_EVAL_LOSS")
self.save("Epoch%02d" % (epoch + 1))
dist.barrier()
if self.verbose:
wandb.log({'finished': True})
def evaluate_epoch(self, epoch):
LOSSES_NAME = self.args.LOSSES_NAME
epoch_results = {}
for loss_name in LOSSES_NAME:
epoch_results[loss_name] = 0.
epoch_results[f'{loss_name}_count'] = 0
uid2ans = {}
self.model.eval()
with torch.no_grad():
if self.verbose:
loss_meter = LossMeter()
loss_meters = [LossMeter() for _ in range(len(LOSSES_NAME))]
pbar = tqdm(total=len(self.val_loader), ncols=250)
for step_i, batch in enumerate(self.val_loader):
if self.args.distributed:
results = self.model.module.valid_step(batch)
else:
results = self.model.valid_step(batch)
if 'qa' in self.args.losses:
qa_pred = results['qa_pred']
for uid, ans in zip(batch['uid'], qa_pred):
uid2ans[uid] = ans
for k, v in results.items():
if k in epoch_results:
if isinstance(v, int):
epoch_results[k] += v
elif isinstance(v, torch.Tensor):
epoch_results[k] += v.item()
if self.verbose:
desc_str = f'Valid Epoch {epoch} |'
for i, (loss_name, loss_meter) in enumerate(zip(LOSSES_NAME, loss_meters)):
if loss_name in results:
loss_meter.update(results[f'{loss_name}'] / results[f'{loss_name}_count'])
if len(loss_meter) > 0:
loss_count = epoch_results[f'{loss_name}_count']
desc_str += f' {loss_name} ({loss_count}) {loss_meter.val:.3f}'
pbar.set_description(desc_str)
pbar.update(1)
dist.barrier()
if self.verbose:
pbar.close()
dist.barrier()
if 'qa' not in self.args.losses:
uid2ans = None
return epoch_results, uid2ans
def main_worker(gpu, args):
# GPU is assigned
args.gpu = gpu
args.rank = gpu
print(f'Process Launching at GPU {gpu}')
if args.distributed:
torch.cuda.set_device(args.gpu)
dist.init_process_group(backend='nccl')
print(f'Building train loader at GPU {gpu}')
train_loader = get_loader(
args,
split=args.train, mode='train', batch_size=args.batch_size,
distributed=args.distributed, gpu=args.gpu,
workers=args.num_workers,
topk=args.train_topk,)
print(f'Building val loader at GPU {gpu}')
val_loader = get_loader(
args,
split=args.valid, mode='val', batch_size=args.batch_size,
distributed=args.distributed, gpu=args.gpu,
workers=args.num_workers,
topk=args.valid_topk,)
trainer = Trainer(args, train_loader, val_loader, train=True)
trainer.train()
if __name__ == "__main__":
cudnn.benchmark = True
args = parse_args()
if args.local_rank in [0, -1]:
print(args)
ngpus_per_node = torch.cuda.device_count()
args.world_size = ngpus_per_node
LOSSES_NAME = [f'{name}_loss' for name in args.losses.split(',')]
if args.local_rank in [0, -1]:
print(LOSSES_NAME)
LOSSES_NAME.append('total_loss') # total loss
args.LOSSES_NAME = LOSSES_NAME
comments = []
dsets = []
if 'coco' in args.train:
dsets.append('COCO')
if 'vg' in args.train:
dsets.append('VG')
comments.append(''.join(dsets))
if args.backbone:
comments.append(args.backbone)
comments.append(''.join(args.losses.split(',')))
if args.comment != '':
comments.append(args.comment)
comment = '_'.join(comments)
from datetime import datetime
current_time = datetime.now().strftime('%b%d_%H-%M')
project_dir = Path(__file__).resolve().parent.parent
if args.local_rank in [0, -1]:
run_name = f'{current_time}_GPU{args.world_size}'
if len(comments) > 0:
run_name += f'_{comment}'
args.run_name = run_name
if args.distributed:
main_worker(args.local_rank, args)
| [
"[email protected]"
]
| |
6c8f1633699688ed4eac61c01aefb521fce09724 | 1fe8d4133981e53e88abf633046060b56fae883e | /venv/lib/python3.8/site-packages/tensorflow/lite/python/interpreter 2.py | c71daaf1bb4739a1074055c116eca8825cfc74e0 | []
| no_license | Akira331/flask-cifar10 | 6c49db8485038731ce67d23f0972b9574746c7a7 | 283e7a2867c77d4b6aba7aea9013bf241d35d76c | refs/heads/master | 2023-06-14T16:35:06.384755 | 2021-07-05T14:09:15 | 2021-07-05T14:09:15 | 382,864,970 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 130 | py | version https://git-lfs.github.com/spec/v1
oid sha256:7ecbc5673643005289606d082d863298698c13aa95073050a67828c541faabde
size 33756
| [
"[email protected]"
]
| |
45661f2da45c893dd1a6b2204c6601008132cdfd | f079cce1a4f5d1e43ac5d6d83307609577086dd7 | /tests/test_awesome_cli.py | 7f1f9062ff5facf4788bfeb21532131691b9059d | [
"CC-BY-4.0"
]
| permissive | moraesmv/awesome-aws | 0ee446cba83bb9d3201af5cc496333319b3ea65b | b3bac0395f23901d5338d4c036076307ce8c73b1 | refs/heads/master | 2020-05-31T00:09:26.761636 | 2015-12-28T13:19:48 | 2015-12-28T13:19:48 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 431 | py | # -*- coding: utf-8 -*-
# Copyright 2015 Donne Martin. All Rights Reserved.
#
# Creative Commons Attribution 4.0 International License (CC BY 4.0)
# http://creativecommons.org/licenses/by/4.0/
import unittest
from click.testing import CliRunner
from awesome.awesome_cli import AwesomeCli
class AwesomeCliTest(unittest.TestCase):
def setUp(self):
self.runner = CliRunner()
self.awesome_cli = AwesomeCli()
| [
"[email protected]"
]
| |
3128a7eec6212fb006adc2f806bb0d2303192523 | e23a4f57ce5474d468258e5e63b9e23fb6011188 | /125_algorithms/_exercises/templates/_algorithms_challenges/leetcode/LeetcodePythonProject_with_solution/leetcode_0351_0400/LeetCode358_RearrangeStringKDistanceApart.py | 596860dc901240f03530a0afafccc23be9414195 | []
| no_license | syurskyi/Python_Topics | 52851ecce000cb751a3b986408efe32f0b4c0835 | be331826b490b73f0a176e6abed86ef68ff2dd2b | refs/heads/master | 2023-06-08T19:29:16.214395 | 2023-05-29T17:09:11 | 2023-05-29T17:09:11 | 220,583,118 | 3 | 2 | null | 2023-02-16T03:08:10 | 2019-11-09T02:58:47 | Python | UTF-8 | Python | false | false | 1,068 | py | '''
Created on Mar 23, 2017
@author: MT
'''
_______ h__
c_ Solution(o..
___ rearrangeString s, k
hashmap # dict
___ c __ s:
hashmap[c] hashmap.g.. c, 0)+1
heap # list
___ c, freq __ hashmap.i..
h__.heappush(heap, [-freq, c])
queue # list
res # list
w.... heap:
freq, c h__.heappop(heap)
res.a..(c)
queue.a..([freq, c])
__ l..(queue) < k:
_____
freq, c queue.p.. 0)
freq -freq-1
__ freq > 0
h__.heappush(heap, [-freq, c])
r.. ''.j..(res) __ l..(res) __ l..(s) ____ ''
___ test
testCases [
('aabbcc', 3),
('aaabc', 3),
('aaadbbcc', 2),
]
___ s, k __ testCases:
print('s: %s' % (s
print('k: %s' % (k
result rearrangeString(s, k)
print('result: %s' % (result
print('-='*20+'-')
__ _____ __ _____
Solution().test()
| [
"[email protected]"
]
| |
34d5ebde3ef06d928460a3edefc4c0baffb9005f | f00bedd8c6d1293393a5054859e4e0902d650154 | /pattern-1.py | 596e38284d248f4b83fb03386a68d6bbe779d6d4 | []
| no_license | preetising/Loops | fd73f3fcba07a8c62ea436bbf2d7774e1301ac1d | 6783714262c063569c628ec0efeffaf9a9d6b884 | refs/heads/main | 2023-08-05T16:31:30.008834 | 2021-10-10T08:12:36 | 2021-10-10T08:12:36 | 384,902,449 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 86 | py | i=1
while i<=5:
j=1
while j<=5:
print(i,end=' ')
j=j+1
i=i+1
print()
| [
"[email protected]"
]
| |
ba7485f2e42395962091a0ab75e89a4a1d966a1e | c50e7eb190802d7849c0d0cea02fb4d2f0021777 | /src/k8s-extension/azext_k8s_extension/vendored_sdks/v2022_03_01/operations/_extensions_operations.py | 5ebf5788b77bd72c64d60be3a9b296d2febfb7f7 | [
"LicenseRef-scancode-generic-cla",
"MIT"
]
| permissive | Azure/azure-cli-extensions | c1615b19930bba7166c282918f166cd40ff6609c | b8c2cf97e991adf0c0a207d810316b8f4686dc29 | refs/heads/main | 2023-08-24T12:40:15.528432 | 2023-08-24T09:17:25 | 2023-08-24T09:17:25 | 106,580,024 | 336 | 1,226 | MIT | 2023-09-14T10:48:57 | 2017-10-11T16:27:31 | Python | UTF-8 | Python | false | false | 37,393 | py | # coding=utf-8
# --------------------------------------------------------------------------
# Copyright (c) Microsoft Corporation. All rights reserved.
# Licensed under the MIT License. See License.txt in the project root for license information.
# Code generated by Microsoft (R) AutoRest Code Generator.
# Changes may cause incorrect behavior and will be lost if the code is regenerated.
# --------------------------------------------------------------------------
import functools
from typing import Any, Callable, Dict, Generic, Iterable, Optional, TypeVar, Union
import warnings
from azure.core.exceptions import ClientAuthenticationError, HttpResponseError, ResourceExistsError, ResourceNotFoundError, map_error
from azure.core.paging import ItemPaged
from azure.core.pipeline import PipelineResponse
from azure.core.pipeline.transport import HttpResponse
from azure.core.polling import LROPoller, NoPolling, PollingMethod
from azure.core.rest import HttpRequest
from azure.core.tracing.decorator import distributed_trace
from azure.mgmt.core.exceptions import ARMErrorFormat
from azure.mgmt.core.polling.arm_polling import ARMPolling
from msrest import Serializer
from .. import models as _models
from .._vendor import _convert_request, _format_url_section
T = TypeVar('T')
JSONType = Any
ClsType = Optional[Callable[[PipelineResponse[HttpRequest, HttpResponse], T, Dict[str, Any]], Any]]
_SERIALIZER = Serializer()
_SERIALIZER.client_side_validation = False
def build_create_request_initial(
subscription_id: str,
resource_group_name: str,
cluster_rp: str,
cluster_resource_name: str,
cluster_name: str,
extension_name: str,
*,
json: JSONType = None,
content: Any = None,
**kwargs: Any
) -> HttpRequest:
content_type = kwargs.pop('content_type', None) # type: Optional[str]
api_version = "2022-03-01"
accept = "application/json"
# Construct URL
url = kwargs.pop("template_url", '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/{clusterRp}/{clusterResourceName}/{clusterName}/providers/Microsoft.KubernetesConfiguration/extensions/{extensionName}')
path_format_arguments = {
"subscriptionId": _SERIALIZER.url("subscription_id", subscription_id, 'str', min_length=1),
"resourceGroupName": _SERIALIZER.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1),
"clusterRp": _SERIALIZER.url("cluster_rp", cluster_rp, 'str'),
"clusterResourceName": _SERIALIZER.url("cluster_resource_name", cluster_resource_name, 'str'),
"clusterName": _SERIALIZER.url("cluster_name", cluster_name, 'str'),
"extensionName": _SERIALIZER.url("extension_name", extension_name, 'str'),
}
url = _format_url_section(url, **path_format_arguments)
# Construct parameters
query_parameters = kwargs.pop("params", {}) # type: Dict[str, Any]
query_parameters['api-version'] = _SERIALIZER.query("api_version", api_version, 'str')
# Construct headers
header_parameters = kwargs.pop("headers", {}) # type: Dict[str, Any]
if content_type is not None:
header_parameters['Content-Type'] = _SERIALIZER.header("content_type", content_type, 'str')
header_parameters['Accept'] = _SERIALIZER.header("accept", accept, 'str')
return HttpRequest(
method="PUT",
url=url,
params=query_parameters,
headers=header_parameters,
json=json,
content=content,
**kwargs
)
def build_get_request(
subscription_id: str,
resource_group_name: str,
cluster_rp: str,
cluster_resource_name: str,
cluster_name: str,
extension_name: str,
**kwargs: Any
) -> HttpRequest:
api_version = "2022-03-01"
accept = "application/json"
# Construct URL
url = kwargs.pop("template_url", '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/{clusterRp}/{clusterResourceName}/{clusterName}/providers/Microsoft.KubernetesConfiguration/extensions/{extensionName}')
path_format_arguments = {
"subscriptionId": _SERIALIZER.url("subscription_id", subscription_id, 'str', min_length=1),
"resourceGroupName": _SERIALIZER.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1),
"clusterRp": _SERIALIZER.url("cluster_rp", cluster_rp, 'str'),
"clusterResourceName": _SERIALIZER.url("cluster_resource_name", cluster_resource_name, 'str'),
"clusterName": _SERIALIZER.url("cluster_name", cluster_name, 'str'),
"extensionName": _SERIALIZER.url("extension_name", extension_name, 'str'),
}
url = _format_url_section(url, **path_format_arguments)
# Construct parameters
query_parameters = kwargs.pop("params", {}) # type: Dict[str, Any]
query_parameters['api-version'] = _SERIALIZER.query("api_version", api_version, 'str')
# Construct headers
header_parameters = kwargs.pop("headers", {}) # type: Dict[str, Any]
header_parameters['Accept'] = _SERIALIZER.header("accept", accept, 'str')
return HttpRequest(
method="GET",
url=url,
params=query_parameters,
headers=header_parameters,
**kwargs
)
def build_delete_request_initial(
subscription_id: str,
resource_group_name: str,
cluster_rp: str,
cluster_resource_name: str,
cluster_name: str,
extension_name: str,
*,
force_delete: Optional[bool] = None,
**kwargs: Any
) -> HttpRequest:
api_version = "2022-03-01"
accept = "application/json"
# Construct URL
url = kwargs.pop("template_url", '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/{clusterRp}/{clusterResourceName}/{clusterName}/providers/Microsoft.KubernetesConfiguration/extensions/{extensionName}')
path_format_arguments = {
"subscriptionId": _SERIALIZER.url("subscription_id", subscription_id, 'str', min_length=1),
"resourceGroupName": _SERIALIZER.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1),
"clusterRp": _SERIALIZER.url("cluster_rp", cluster_rp, 'str'),
"clusterResourceName": _SERIALIZER.url("cluster_resource_name", cluster_resource_name, 'str'),
"clusterName": _SERIALIZER.url("cluster_name", cluster_name, 'str'),
"extensionName": _SERIALIZER.url("extension_name", extension_name, 'str'),
}
url = _format_url_section(url, **path_format_arguments)
# Construct parameters
query_parameters = kwargs.pop("params", {}) # type: Dict[str, Any]
query_parameters['api-version'] = _SERIALIZER.query("api_version", api_version, 'str')
if force_delete is not None:
query_parameters['forceDelete'] = _SERIALIZER.query("force_delete", force_delete, 'bool')
# Construct headers
header_parameters = kwargs.pop("headers", {}) # type: Dict[str, Any]
header_parameters['Accept'] = _SERIALIZER.header("accept", accept, 'str')
return HttpRequest(
method="DELETE",
url=url,
params=query_parameters,
headers=header_parameters,
**kwargs
)
def build_update_request_initial(
subscription_id: str,
resource_group_name: str,
cluster_rp: str,
cluster_resource_name: str,
cluster_name: str,
extension_name: str,
*,
json: JSONType = None,
content: Any = None,
**kwargs: Any
) -> HttpRequest:
content_type = kwargs.pop('content_type', None) # type: Optional[str]
api_version = "2022-03-01"
accept = "application/json"
# Construct URL
url = kwargs.pop("template_url", '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/{clusterRp}/{clusterResourceName}/{clusterName}/providers/Microsoft.KubernetesConfiguration/extensions/{extensionName}')
path_format_arguments = {
"subscriptionId": _SERIALIZER.url("subscription_id", subscription_id, 'str', min_length=1),
"resourceGroupName": _SERIALIZER.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1),
"clusterRp": _SERIALIZER.url("cluster_rp", cluster_rp, 'str'),
"clusterResourceName": _SERIALIZER.url("cluster_resource_name", cluster_resource_name, 'str'),
"clusterName": _SERIALIZER.url("cluster_name", cluster_name, 'str'),
"extensionName": _SERIALIZER.url("extension_name", extension_name, 'str'),
}
url = _format_url_section(url, **path_format_arguments)
# Construct parameters
query_parameters = kwargs.pop("params", {}) # type: Dict[str, Any]
query_parameters['api-version'] = _SERIALIZER.query("api_version", api_version, 'str')
# Construct headers
header_parameters = kwargs.pop("headers", {}) # type: Dict[str, Any]
if content_type is not None:
header_parameters['Content-Type'] = _SERIALIZER.header("content_type", content_type, 'str')
header_parameters['Accept'] = _SERIALIZER.header("accept", accept, 'str')
return HttpRequest(
method="PATCH",
url=url,
params=query_parameters,
headers=header_parameters,
json=json,
content=content,
**kwargs
)
def build_list_request(
subscription_id: str,
resource_group_name: str,
cluster_rp: str,
cluster_resource_name: str,
cluster_name: str,
**kwargs: Any
) -> HttpRequest:
api_version = "2022-03-01"
accept = "application/json"
# Construct URL
url = kwargs.pop("template_url", '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/{clusterRp}/{clusterResourceName}/{clusterName}/providers/Microsoft.KubernetesConfiguration/extensions')
path_format_arguments = {
"subscriptionId": _SERIALIZER.url("subscription_id", subscription_id, 'str', min_length=1),
"resourceGroupName": _SERIALIZER.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1),
"clusterRp": _SERIALIZER.url("cluster_rp", cluster_rp, 'str'),
"clusterResourceName": _SERIALIZER.url("cluster_resource_name", cluster_resource_name, 'str'),
"clusterName": _SERIALIZER.url("cluster_name", cluster_name, 'str'),
}
url = _format_url_section(url, **path_format_arguments)
# Construct parameters
query_parameters = kwargs.pop("params", {}) # type: Dict[str, Any]
query_parameters['api-version'] = _SERIALIZER.query("api_version", api_version, 'str')
# Construct headers
header_parameters = kwargs.pop("headers", {}) # type: Dict[str, Any]
header_parameters['Accept'] = _SERIALIZER.header("accept", accept, 'str')
return HttpRequest(
method="GET",
url=url,
params=query_parameters,
headers=header_parameters,
**kwargs
)
class ExtensionsOperations(object):
"""ExtensionsOperations operations.
You should not instantiate this class directly. Instead, you should create a Client instance that
instantiates it for you and attaches it as an attribute.
:ivar models: Alias to model classes used in this operation group.
:type models: ~azure.mgmt.kubernetesconfiguration.v2022_03_01.models
:param client: Client for service requests.
:param config: Configuration of service client.
:param serializer: An object model serializer.
:param deserializer: An object model deserializer.
"""
models = _models
def __init__(self, client, config, serializer, deserializer):
self._client = client
self._serialize = serializer
self._deserialize = deserializer
self._config = config
def _create_initial(
self,
resource_group_name: str,
cluster_rp: str,
cluster_resource_name: str,
cluster_name: str,
extension_name: str,
extension: "_models.Extension",
**kwargs: Any
) -> "_models.Extension":
cls = kwargs.pop('cls', None) # type: ClsType["_models.Extension"]
error_map = {
401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
}
error_map.update(kwargs.pop('error_map', {}))
content_type = kwargs.pop('content_type', "application/json") # type: Optional[str]
_json = self._serialize.body(extension, 'Extension')
request = build_create_request_initial(
subscription_id=self._config.subscription_id,
resource_group_name=resource_group_name,
cluster_rp=cluster_rp,
cluster_resource_name=cluster_resource_name,
cluster_name=cluster_name,
extension_name=extension_name,
content_type=content_type,
json=_json,
template_url=self._create_initial.metadata['url'],
)
request = _convert_request(request)
request.url = self._client.format_url(request.url)
pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs)
response = pipeline_response.http_response
if response.status_code not in [200, 201]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
raise HttpResponseError(response=response, error_format=ARMErrorFormat)
if response.status_code == 200:
deserialized = self._deserialize('Extension', pipeline_response)
if response.status_code == 201:
deserialized = self._deserialize('Extension', pipeline_response)
if cls:
return cls(pipeline_response, deserialized, {})
return deserialized
_create_initial.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/{clusterRp}/{clusterResourceName}/{clusterName}/providers/Microsoft.KubernetesConfiguration/extensions/{extensionName}'} # type: ignore
@distributed_trace
def begin_create(
self,
resource_group_name: str,
cluster_rp: str,
cluster_resource_name: str,
cluster_name: str,
extension_name: str,
extension: "_models.Extension",
**kwargs: Any
) -> LROPoller["_models.Extension"]:
"""Create a new Kubernetes Cluster Extension.
:param resource_group_name: The name of the resource group. The name is case insensitive.
:type resource_group_name: str
:param cluster_rp: The Kubernetes cluster RP - i.e. Microsoft.ContainerService,
Microsoft.Kubernetes, Microsoft.HybridContainerService.
:type cluster_rp: str
:param cluster_resource_name: The Kubernetes cluster resource name - i.e. managedClusters,
connectedClusters, provisionedClusters.
:type cluster_resource_name: str
:param cluster_name: The name of the kubernetes cluster.
:type cluster_name: str
:param extension_name: Name of the Extension.
:type extension_name: str
:param extension: Properties necessary to Create an Extension.
:type extension: ~azure.mgmt.kubernetesconfiguration.v2022_03_01.models.Extension
:keyword callable cls: A custom type or function that will be passed the direct response
:keyword str continuation_token: A continuation token to restart a poller from a saved state.
:keyword polling: By default, your polling method will be ARMPolling. Pass in False for this
operation to not poll, or pass in your own initialized polling object for a personal polling
strategy.
:paramtype polling: bool or ~azure.core.polling.PollingMethod
:keyword int polling_interval: Default waiting time between two polls for LRO operations if no
Retry-After header is present.
:return: An instance of LROPoller that returns either Extension or the result of cls(response)
:rtype:
~azure.core.polling.LROPoller[~azure.mgmt.kubernetesconfiguration.v2022_03_01.models.Extension]
:raises: ~azure.core.exceptions.HttpResponseError
"""
content_type = kwargs.pop('content_type', "application/json") # type: Optional[str]
polling = kwargs.pop('polling', True) # type: Union[bool, azure.core.polling.PollingMethod]
cls = kwargs.pop('cls', None) # type: ClsType["_models.Extension"]
lro_delay = kwargs.pop(
'polling_interval',
self._config.polling_interval
)
cont_token = kwargs.pop('continuation_token', None) # type: Optional[str]
if cont_token is None:
raw_result = self._create_initial(
resource_group_name=resource_group_name,
cluster_rp=cluster_rp,
cluster_resource_name=cluster_resource_name,
cluster_name=cluster_name,
extension_name=extension_name,
extension=extension,
content_type=content_type,
cls=lambda x,y,z: x,
**kwargs
)
kwargs.pop('error_map', None)
def get_long_running_output(pipeline_response):
response = pipeline_response.http_response
deserialized = self._deserialize('Extension', pipeline_response)
if cls:
return cls(pipeline_response, deserialized, {})
return deserialized
if polling is True: polling_method = ARMPolling(lro_delay, lro_options={'final-state-via': 'azure-async-operation'}, **kwargs)
elif polling is False: polling_method = NoPolling()
else: polling_method = polling
if cont_token:
return LROPoller.from_continuation_token(
polling_method=polling_method,
continuation_token=cont_token,
client=self._client,
deserialization_callback=get_long_running_output
)
else:
return LROPoller(self._client, raw_result, get_long_running_output, polling_method)
begin_create.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/{clusterRp}/{clusterResourceName}/{clusterName}/providers/Microsoft.KubernetesConfiguration/extensions/{extensionName}'} # type: ignore
@distributed_trace
def get(
self,
resource_group_name: str,
cluster_rp: str,
cluster_resource_name: str,
cluster_name: str,
extension_name: str,
**kwargs: Any
) -> "_models.Extension":
"""Gets Kubernetes Cluster Extension.
:param resource_group_name: The name of the resource group. The name is case insensitive.
:type resource_group_name: str
:param cluster_rp: The Kubernetes cluster RP - i.e. Microsoft.ContainerService,
Microsoft.Kubernetes, Microsoft.HybridContainerService.
:type cluster_rp: str
:param cluster_resource_name: The Kubernetes cluster resource name - i.e. managedClusters,
connectedClusters, provisionedClusters.
:type cluster_resource_name: str
:param cluster_name: The name of the kubernetes cluster.
:type cluster_name: str
:param extension_name: Name of the Extension.
:type extension_name: str
:keyword callable cls: A custom type or function that will be passed the direct response
:return: Extension, or the result of cls(response)
:rtype: ~azure.mgmt.kubernetesconfiguration.v2022_03_01.models.Extension
:raises: ~azure.core.exceptions.HttpResponseError
"""
cls = kwargs.pop('cls', None) # type: ClsType["_models.Extension"]
error_map = {
401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
}
error_map.update(kwargs.pop('error_map', {}))
request = build_get_request(
subscription_id=self._config.subscription_id,
resource_group_name=resource_group_name,
cluster_rp=cluster_rp,
cluster_resource_name=cluster_resource_name,
cluster_name=cluster_name,
extension_name=extension_name,
template_url=self.get.metadata['url'],
)
request = _convert_request(request)
request.url = self._client.format_url(request.url)
pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs)
response = pipeline_response.http_response
if response.status_code not in [200]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
error = self._deserialize.failsafe_deserialize(_models.ErrorResponse, pipeline_response)
raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat)
deserialized = self._deserialize('Extension', pipeline_response)
if cls:
return cls(pipeline_response, deserialized, {})
return deserialized
get.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/{clusterRp}/{clusterResourceName}/{clusterName}/providers/Microsoft.KubernetesConfiguration/extensions/{extensionName}'} # type: ignore
def _delete_initial(
self,
resource_group_name: str,
cluster_rp: str,
cluster_resource_name: str,
cluster_name: str,
extension_name: str,
force_delete: Optional[bool] = None,
**kwargs: Any
) -> None:
cls = kwargs.pop('cls', None) # type: ClsType[None]
error_map = {
401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
}
error_map.update(kwargs.pop('error_map', {}))
request = build_delete_request_initial(
subscription_id=self._config.subscription_id,
resource_group_name=resource_group_name,
cluster_rp=cluster_rp,
cluster_resource_name=cluster_resource_name,
cluster_name=cluster_name,
extension_name=extension_name,
force_delete=force_delete,
template_url=self._delete_initial.metadata['url'],
)
request = _convert_request(request)
request.url = self._client.format_url(request.url)
pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs)
response = pipeline_response.http_response
if response.status_code not in [200, 202, 204]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
raise HttpResponseError(response=response, error_format=ARMErrorFormat)
if cls:
return cls(pipeline_response, None, {})
_delete_initial.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/{clusterRp}/{clusterResourceName}/{clusterName}/providers/Microsoft.KubernetesConfiguration/extensions/{extensionName}'} # type: ignore
@distributed_trace
def begin_delete(
self,
resource_group_name: str,
cluster_rp: str,
cluster_resource_name: str,
cluster_name: str,
extension_name: str,
force_delete: Optional[bool] = None,
**kwargs: Any
) -> LROPoller[None]:
"""Delete a Kubernetes Cluster Extension. This will cause the Agent to Uninstall the extension
from the cluster.
:param resource_group_name: The name of the resource group. The name is case insensitive.
:type resource_group_name: str
:param cluster_rp: The Kubernetes cluster RP - i.e. Microsoft.ContainerService,
Microsoft.Kubernetes, Microsoft.HybridContainerService.
:type cluster_rp: str
:param cluster_resource_name: The Kubernetes cluster resource name - i.e. managedClusters,
connectedClusters, provisionedClusters.
:type cluster_resource_name: str
:param cluster_name: The name of the kubernetes cluster.
:type cluster_name: str
:param extension_name: Name of the Extension.
:type extension_name: str
:param force_delete: Delete the extension resource in Azure - not the normal asynchronous
delete.
:type force_delete: bool
:keyword callable cls: A custom type or function that will be passed the direct response
:keyword str continuation_token: A continuation token to restart a poller from a saved state.
:keyword polling: By default, your polling method will be ARMPolling. Pass in False for this
operation to not poll, or pass in your own initialized polling object for a personal polling
strategy.
:paramtype polling: bool or ~azure.core.polling.PollingMethod
:keyword int polling_interval: Default waiting time between two polls for LRO operations if no
Retry-After header is present.
:return: An instance of LROPoller that returns either None or the result of cls(response)
:rtype: ~azure.core.polling.LROPoller[None]
:raises: ~azure.core.exceptions.HttpResponseError
"""
polling = kwargs.pop('polling', True) # type: Union[bool, azure.core.polling.PollingMethod]
cls = kwargs.pop('cls', None) # type: ClsType[None]
lro_delay = kwargs.pop(
'polling_interval',
self._config.polling_interval
)
cont_token = kwargs.pop('continuation_token', None) # type: Optional[str]
if cont_token is None:
raw_result = self._delete_initial(
resource_group_name=resource_group_name,
cluster_rp=cluster_rp,
cluster_resource_name=cluster_resource_name,
cluster_name=cluster_name,
extension_name=extension_name,
force_delete=force_delete,
cls=lambda x,y,z: x,
**kwargs
)
kwargs.pop('error_map', None)
def get_long_running_output(pipeline_response):
if cls:
return cls(pipeline_response, None, {})
if polling is True: polling_method = ARMPolling(lro_delay, lro_options={'final-state-via': 'azure-async-operation'}, **kwargs)
elif polling is False: polling_method = NoPolling()
else: polling_method = polling
if cont_token:
return LROPoller.from_continuation_token(
polling_method=polling_method,
continuation_token=cont_token,
client=self._client,
deserialization_callback=get_long_running_output
)
else:
return LROPoller(self._client, raw_result, get_long_running_output, polling_method)
begin_delete.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/{clusterRp}/{clusterResourceName}/{clusterName}/providers/Microsoft.KubernetesConfiguration/extensions/{extensionName}'} # type: ignore
def _update_initial(
self,
resource_group_name: str,
cluster_rp: str,
cluster_resource_name: str,
cluster_name: str,
extension_name: str,
patch_extension: "_models.PatchExtension",
**kwargs: Any
) -> "_models.Extension":
cls = kwargs.pop('cls', None) # type: ClsType["_models.Extension"]
error_map = {
401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
}
error_map.update(kwargs.pop('error_map', {}))
content_type = kwargs.pop('content_type', "application/json") # type: Optional[str]
_json = self._serialize.body(patch_extension, 'PatchExtension')
request = build_update_request_initial(
subscription_id=self._config.subscription_id,
resource_group_name=resource_group_name,
cluster_rp=cluster_rp,
cluster_resource_name=cluster_resource_name,
cluster_name=cluster_name,
extension_name=extension_name,
content_type=content_type,
json=_json,
template_url=self._update_initial.metadata['url'],
)
request = _convert_request(request)
request.url = self._client.format_url(request.url)
pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs)
response = pipeline_response.http_response
if response.status_code not in [202]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
raise HttpResponseError(response=response, error_format=ARMErrorFormat)
deserialized = self._deserialize('Extension', pipeline_response)
if cls:
return cls(pipeline_response, deserialized, {})
return deserialized
_update_initial.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/{clusterRp}/{clusterResourceName}/{clusterName}/providers/Microsoft.KubernetesConfiguration/extensions/{extensionName}'} # type: ignore
@distributed_trace
def begin_update(
self,
resource_group_name: str,
cluster_rp: str,
cluster_resource_name: str,
cluster_name: str,
extension_name: str,
patch_extension: "_models.PatchExtension",
**kwargs: Any
) -> LROPoller["_models.Extension"]:
"""Patch an existing Kubernetes Cluster Extension.
:param resource_group_name: The name of the resource group. The name is case insensitive.
:type resource_group_name: str
:param cluster_rp: The Kubernetes cluster RP - i.e. Microsoft.ContainerService,
Microsoft.Kubernetes, Microsoft.HybridContainerService.
:type cluster_rp: str
:param cluster_resource_name: The Kubernetes cluster resource name - i.e. managedClusters,
connectedClusters, provisionedClusters.
:type cluster_resource_name: str
:param cluster_name: The name of the kubernetes cluster.
:type cluster_name: str
:param extension_name: Name of the Extension.
:type extension_name: str
:param patch_extension: Properties to Patch in an existing Extension.
:type patch_extension: ~azure.mgmt.kubernetesconfiguration.v2022_03_01.models.PatchExtension
:keyword callable cls: A custom type or function that will be passed the direct response
:keyword str continuation_token: A continuation token to restart a poller from a saved state.
:keyword polling: By default, your polling method will be ARMPolling. Pass in False for this
operation to not poll, or pass in your own initialized polling object for a personal polling
strategy.
:paramtype polling: bool or ~azure.core.polling.PollingMethod
:keyword int polling_interval: Default waiting time between two polls for LRO operations if no
Retry-After header is present.
:return: An instance of LROPoller that returns either Extension or the result of cls(response)
:rtype:
~azure.core.polling.LROPoller[~azure.mgmt.kubernetesconfiguration.v2022_03_01.models.Extension]
:raises: ~azure.core.exceptions.HttpResponseError
"""
content_type = kwargs.pop('content_type', "application/json") # type: Optional[str]
polling = kwargs.pop('polling', True) # type: Union[bool, azure.core.polling.PollingMethod]
cls = kwargs.pop('cls', None) # type: ClsType["_models.Extension"]
lro_delay = kwargs.pop(
'polling_interval',
self._config.polling_interval
)
cont_token = kwargs.pop('continuation_token', None) # type: Optional[str]
if cont_token is None:
raw_result = self._update_initial(
resource_group_name=resource_group_name,
cluster_rp=cluster_rp,
cluster_resource_name=cluster_resource_name,
cluster_name=cluster_name,
extension_name=extension_name,
patch_extension=patch_extension,
content_type=content_type,
cls=lambda x,y,z: x,
**kwargs
)
kwargs.pop('error_map', None)
def get_long_running_output(pipeline_response):
response = pipeline_response.http_response
deserialized = self._deserialize('Extension', pipeline_response)
if cls:
return cls(pipeline_response, deserialized, {})
return deserialized
if polling is True: polling_method = ARMPolling(lro_delay, lro_options={'final-state-via': 'azure-async-operation'}, **kwargs)
elif polling is False: polling_method = NoPolling()
else: polling_method = polling
if cont_token:
return LROPoller.from_continuation_token(
polling_method=polling_method,
continuation_token=cont_token,
client=self._client,
deserialization_callback=get_long_running_output
)
else:
return LROPoller(self._client, raw_result, get_long_running_output, polling_method)
begin_update.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/{clusterRp}/{clusterResourceName}/{clusterName}/providers/Microsoft.KubernetesConfiguration/extensions/{extensionName}'} # type: ignore
@distributed_trace
def list(
self,
resource_group_name: str,
cluster_rp: str,
cluster_resource_name: str,
cluster_name: str,
**kwargs: Any
) -> Iterable["_models.ExtensionsList"]:
"""List all Extensions in the cluster.
:param resource_group_name: The name of the resource group. The name is case insensitive.
:type resource_group_name: str
:param cluster_rp: The Kubernetes cluster RP - i.e. Microsoft.ContainerService,
Microsoft.Kubernetes, Microsoft.HybridContainerService.
:type cluster_rp: str
:param cluster_resource_name: The Kubernetes cluster resource name - i.e. managedClusters,
connectedClusters, provisionedClusters.
:type cluster_resource_name: str
:param cluster_name: The name of the kubernetes cluster.
:type cluster_name: str
:keyword callable cls: A custom type or function that will be passed the direct response
:return: An iterator like instance of either ExtensionsList or the result of cls(response)
:rtype:
~azure.core.paging.ItemPaged[~azure.mgmt.kubernetesconfiguration.v2022_03_01.models.ExtensionsList]
:raises: ~azure.core.exceptions.HttpResponseError
"""
cls = kwargs.pop('cls', None) # type: ClsType["_models.ExtensionsList"]
error_map = {
401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
}
error_map.update(kwargs.pop('error_map', {}))
def prepare_request(next_link=None):
if not next_link:
request = build_list_request(
subscription_id=self._config.subscription_id,
resource_group_name=resource_group_name,
cluster_rp=cluster_rp,
cluster_resource_name=cluster_resource_name,
cluster_name=cluster_name,
template_url=self.list.metadata['url'],
)
request = _convert_request(request)
request.url = self._client.format_url(request.url)
else:
request = build_list_request(
subscription_id=self._config.subscription_id,
resource_group_name=resource_group_name,
cluster_rp=cluster_rp,
cluster_resource_name=cluster_resource_name,
cluster_name=cluster_name,
template_url=next_link,
)
request = _convert_request(request)
request.url = self._client.format_url(request.url)
request.method = "GET"
return request
def extract_data(pipeline_response):
deserialized = self._deserialize("ExtensionsList", pipeline_response)
list_of_elem = deserialized.value
if cls:
list_of_elem = cls(list_of_elem)
return deserialized.next_link or None, iter(list_of_elem)
def get_next(next_link=None):
request = prepare_request(next_link)
pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs)
response = pipeline_response.http_response
if response.status_code not in [200]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
error = self._deserialize.failsafe_deserialize(_models.ErrorResponse, pipeline_response)
raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat)
return pipeline_response
return ItemPaged(
get_next, extract_data
)
list.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/{clusterRp}/{clusterResourceName}/{clusterName}/providers/Microsoft.KubernetesConfiguration/extensions'} # type: ignore
| [
"[email protected]"
]
| |
a92affe8b0afb78a1c8610adfff9fe2d407ddb83 | 735a315ea82893f2acd5ac141f1a9b8be89f5cb9 | /pylib/mdsplus_alpha/tests/segmentsUnitTest.py | 67b013c0f7cb3cedebc0e7516d4af242ca439150 | []
| no_license | drsmith48/pppl-mdsplus-python | 5ce6f7ccef4a23ea4b8296aa06f51f3a646dd36f | 0fb5100e6718c8c10f04c3aac120558f521f9a59 | refs/heads/master | 2021-07-08T02:29:59.069616 | 2017-10-04T20:17:32 | 2017-10-04T20:17:32 | 105,808,853 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 1,712 | py | #!/usr/bin/env python
# -*- coding: utf-8 -*-
from unittest import TestCase,TestSuite
from tree import Tree
from mdsdata import *
from mdsscalar import *
from mdsarray import *
import numpy as np
import random
import os
import sys
import tempfile
_tmpdir=tempfile.mkdtemp()
def setUpModule():
pass
def tearDownModule():
import shutil
shutil.rmtree(_tmpdir)
class segmentsTests(TestCase):
def setUp(self):
os.environ["seg_tree_path"]=_tmpdir
def tearDown(self):
pass
def arrayDimensionOrder(self):
ptree=Tree('seg_tree',-1,'NEW')
ptree.addNode('IMM')
ptree.write()
ptree=Tree('seg_tree',-1)
ptree.createPulse(1)
ptree=Tree('seg_tree',1)
node=ptree.getNode('IMM')
WIDTH = 640
HEIGHT =480;
currFrame=np.zeros(WIDTH*HEIGHT, dtype = np.int16);
currTime=float(0);
for i in range(0,WIDTH):
for j in range(0,HEIGHT):
currFrame[i*HEIGHT+j]=random.randint(0,255)
currTime = float(0)
startTime = Float32(currTime)
endTime = Float32(currTime)
dim = Float32Array(currTime)
segment = Int16Array(currFrame)
segment.resize([1,HEIGHT,WIDTH])
shape = segment.getShape()
node.makeSegment(startTime, endTime, dim, segment)
retShape = node.getShape()
self.assertEqual(shape[0],retShape[0])
self.assertEqual(shape[1],retShape[1])
self.assertEqual(shape[2],retShape[2])
def runTest(self):
self.arrayDimensionOrder()
def suite():
tests = ['arrayDimensionOrder']
return TestSuite(map(segmentsTests,tests))
| [
"[email protected]"
]
| |
690ca2306342d5c831a89a5cd5ffcc9790800260 | 7a2a58a440c5883a13ded31aecb03cf1923b1b15 | /backend/techh_18306/settings.py | 45e065cc57fa83fa5189cd5eec2b5d8982b76bab | []
| no_license | crowdbotics-apps/techh-18306 | f4a7086dee5ade70c2d19074d5bcad10f8d72ea0 | a39a881da2671bef242009eb58543625aab2bae5 | refs/heads/master | 2022-11-05T21:48:52.603814 | 2020-06-21T20:59:12 | 2020-06-21T20:59:12 | 273,982,664 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 5,785 | py | """
Django settings for techh_18306 project.
Generated by 'django-admin startproject' using Django 2.2.2.
For more information on this file, see
https://docs.djangoproject.com/en/2.2/topics/settings/
For the full list of settings and their values, see
https://docs.djangoproject.com/en/2.2/ref/settings/
"""
import os
import environ
env = environ.Env()
# SECURITY WARNING: don't run with debug turned on in production!
DEBUG = env.bool("DEBUG", default=False)
# Build paths inside the project like this: os.path.join(BASE_DIR, ...)
BASE_DIR = os.path.dirname(os.path.dirname(os.path.abspath(__file__)))
# Quick-start development settings - unsuitable for production
# See https://docs.djangoproject.com/en/2.2/howto/deployment/checklist/
# SECURITY WARNING: keep the secret key used in production secret!
SECRET_KEY = env.str("SECRET_KEY")
ALLOWED_HOSTS = env.list("HOST", default=["*"])
SITE_ID = 1
SECURE_PROXY_SSL_HEADER = ("HTTP_X_FORWARDED_PROTO", "https")
SECURE_SSL_REDIRECT = env.bool("SECURE_REDIRECT", default=False)
# Application definition
INSTALLED_APPS = [
'django.contrib.admin',
'django.contrib.auth',
'django.contrib.contenttypes',
'django.contrib.sessions',
'django.contrib.messages',
'django.contrib.staticfiles',
'django.contrib.sites'
]
LOCAL_APPS = [
'home',
'users.apps.UsersConfig',
]
THIRD_PARTY_APPS = [
'rest_framework',
'rest_framework.authtoken',
'rest_auth',
'rest_auth.registration',
'bootstrap4',
'allauth',
'allauth.account',
'allauth.socialaccount',
'allauth.socialaccount.providers.google',
'django_extensions',
'drf_yasg',
# start fcm_django push notifications
'fcm_django',
# end fcm_django push notifications
]
INSTALLED_APPS += LOCAL_APPS + THIRD_PARTY_APPS
MIDDLEWARE = [
'django.middleware.security.SecurityMiddleware',
'django.contrib.sessions.middleware.SessionMiddleware',
'django.middleware.common.CommonMiddleware',
'django.middleware.csrf.CsrfViewMiddleware',
'django.contrib.auth.middleware.AuthenticationMiddleware',
'django.contrib.messages.middleware.MessageMiddleware',
'django.middleware.clickjacking.XFrameOptionsMiddleware',
]
ROOT_URLCONF = 'techh_18306.urls'
TEMPLATES = [
{
'BACKEND': 'django.template.backends.django.DjangoTemplates',
'DIRS': [],
'APP_DIRS': True,
'OPTIONS': {
'context_processors': [
'django.template.context_processors.debug',
'django.template.context_processors.request',
'django.contrib.auth.context_processors.auth',
'django.contrib.messages.context_processors.messages',
],
},
},
]
WSGI_APPLICATION = 'techh_18306.wsgi.application'
# Database
# https://docs.djangoproject.com/en/2.2/ref/settings/#databases
DATABASES = {
'default': {
'ENGINE': 'django.db.backends.sqlite3',
'NAME': os.path.join(BASE_DIR, 'db.sqlite3'),
}
}
if env.str("DATABASE_URL", default=None):
DATABASES = {
'default': env.db()
}
# Password validation
# https://docs.djangoproject.com/en/2.2/ref/settings/#auth-password-validators
AUTH_PASSWORD_VALIDATORS = [
{
'NAME': 'django.contrib.auth.password_validation.UserAttributeSimilarityValidator',
},
{
'NAME': 'django.contrib.auth.password_validation.MinimumLengthValidator',
},
{
'NAME': 'django.contrib.auth.password_validation.CommonPasswordValidator',
},
{
'NAME': 'django.contrib.auth.password_validation.NumericPasswordValidator',
},
]
# Internationalization
# https://docs.djangoproject.com/en/2.2/topics/i18n/
LANGUAGE_CODE = 'en-us'
TIME_ZONE = 'UTC'
USE_I18N = True
USE_L10N = True
USE_TZ = True
# Static files (CSS, JavaScript, Images)
# https://docs.djangoproject.com/en/2.2/howto/static-files/
STATIC_URL = '/static/'
MIDDLEWARE += ['whitenoise.middleware.WhiteNoiseMiddleware']
AUTHENTICATION_BACKENDS = (
'django.contrib.auth.backends.ModelBackend',
'allauth.account.auth_backends.AuthenticationBackend'
)
STATIC_ROOT = os.path.join(BASE_DIR, "staticfiles")
STATICFILES_DIRS = [
os.path.join(BASE_DIR, 'static')
]
STATICFILES_STORAGE = 'whitenoise.storage.CompressedManifestStaticFilesStorage'
# allauth / users
ACCOUNT_EMAIL_REQUIRED = True
ACCOUNT_AUTHENTICATION_METHOD = 'email'
ACCOUNT_USERNAME_REQUIRED = False
ACCOUNT_EMAIL_VERIFICATION = "mandatory"
ACCOUNT_CONFIRM_EMAIL_ON_GET = True
ACCOUNT_LOGIN_ON_EMAIL_CONFIRMATION = True
ACCOUNT_UNIQUE_EMAIL = True
LOGIN_REDIRECT_URL = "users:redirect"
ACCOUNT_ADAPTER = "users.adapters.AccountAdapter"
SOCIALACCOUNT_ADAPTER = "users.adapters.SocialAccountAdapter"
ACCOUNT_ALLOW_REGISTRATION = env.bool("ACCOUNT_ALLOW_REGISTRATION", True)
SOCIALACCOUNT_ALLOW_REGISTRATION = env.bool("SOCIALACCOUNT_ALLOW_REGISTRATION", True)
REST_AUTH_SERIALIZERS = {
# Replace password reset serializer to fix 500 error
"PASSWORD_RESET_SERIALIZER": "home.api.v1.serializers.PasswordSerializer",
}
REST_AUTH_REGISTER_SERIALIZERS = {
# Use custom serializer that has no username and matches web signup
"REGISTER_SERIALIZER": "home.api.v1.serializers.SignupSerializer",
}
# Custom user model
AUTH_USER_MODEL = "users.User"
EMAIL_HOST = env.str("EMAIL_HOST", "smtp.sendgrid.net")
EMAIL_HOST_USER = env.str("SENDGRID_USERNAME", "")
EMAIL_HOST_PASSWORD = env.str("SENDGRID_PASSWORD", "")
EMAIL_PORT = 587
EMAIL_USE_TLS = True
# start fcm_django push notifications
FCM_DJANGO_SETTINGS = {
"FCM_SERVER_KEY": env.str("FCM_SERVER_KEY", "")
}
# end fcm_django push notifications
if DEBUG:
# output email to console instead of sending
EMAIL_BACKEND = "django.core.mail.backends.console.EmailBackend"
| [
"[email protected]"
]
| |
21b3ef57b8af5a6359865fc56f601d779e0ab0cb | 27ff8115b114f5a78a0f4c9d1a4981df43d5beb6 | /Matplotlib/demo_00Mofandemo/19_animation_demo.py | e0fed7f44984f26b0ebb7667146806704055d518 | [
"MIT"
]
| permissive | Asurada2015/Python-Data-Analysis-Learning-Notes | 3da937504bc996c273da76b78baa814da3c2bc31 | 5697c8de3e5fd6562e77195b198b2d8ff836008e | refs/heads/master | 2021-06-26T01:08:00.788099 | 2020-04-08T07:46:49 | 2020-04-08T07:46:49 | 97,900,161 | 2 | 3 | null | null | null | null | UTF-8 | Python | false | false | 1,562 | py | import numpy as np
from matplotlib import pyplot as plt
from matplotlib import animation
fig, ax = plt.subplots()
# 生成子图,相当于fig = plt.figure(),
# ax = fig.add_subplot(),其中ax的函数参数表示把当前画布进行分割,
# 例:fig.add_subplot(2,2,2).表示将画布分割为两行两列,ax在第2个子图中绘制,其中行优先。
x = np.arange(0, 2*np.pi, 0.01) # 表示从0~2*np.pi之间每隔0.01取一个点
line, = ax.plot(x, np.sin(x)) # 注意,这里line后面要加上逗号,表示一个具有一个元素的元组
# print(type(line))
# print(type((line,)))
# <class 'matplotlib.lines.Line2D'>
# <class 'tuple'>
def animate(i): # 这里的i其实就是参数0-99,即时frames控制的参数,控制程序画图变换的次数
# print(i) # 0-99
line.set_ydata(np.sin(x + i/10.0)) # 改变线条y的坐标值
return line,
def init(): # 初始化函数,图形开始显示的状态
line.set_ydata(np.sin(x))
return line,
ani = animation.FuncAnimation(fig=fig, func=animate, frames=100, init_func=init,
interval=100, blit=False)
"""frames设定帧数,总共执行100个update就会进行下一次循环,并且frames还会作为参数传入animate()函数,init_func设定初始函数图像,
interval设置更新间隔此处设置为20毫秒,(仔细想想20毫秒其实是很小的一个间隔)
blit如果是只有变化了的像素点才更新就设置为True,如果是整张图片所有像素点全部更新的话就设置为False
"""
plt.show()
| [
"[email protected]"
]
| |
57cca23e4498f672ba7e88f78c1b7c23e7a610ed | d94b6845aeeb412aac6850b70e22628bc84d1d6d | /bam/task_specific/task.py | 8d63fc0feb22d7b92605c5e9906677fe480308dc | [
"CC-BY-4.0",
"Apache-2.0"
]
| permissive | ishine/google-research | 541aea114a68ced68736340e037fc0f8257d1ea2 | c1ae273841592fce4c993bf35cdd0a6424e73da4 | refs/heads/master | 2023-06-08T23:02:25.502203 | 2023-05-31T01:00:56 | 2023-05-31T01:06:45 | 242,478,569 | 0 | 0 | Apache-2.0 | 2020-06-23T01:55:11 | 2020-02-23T07:59:42 | Jupyter Notebook | UTF-8 | Python | false | false | 2,395 | py | # coding=utf-8
# Copyright 2023 The Google Research Authors.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Base class for tasks."""
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
import abc
import csv
import os
import tensorflow.compat.v1 as tf
class Example(object):
__metaclass__ = abc.ABCMeta
def __init__(self, task_name):
self.task_name = task_name
class Task(object):
"""Override this class to add a new task."""
__metaclass__ = abc.ABCMeta
def __init__(self, config, name, long_sequences=False):
self.config = config
self.name = name
self.long_sequences = long_sequences
def get_examples(self, split):
return self.load_data(split + ".tsv", split)
def get_test_splits(self):
return ["test"]
def load_data(self, fname, split):
examples = self._create_examples(
read_tsv(os.path.join(self.config.raw_data_dir(self.name), fname),
max_lines=50 if self.config.debug else None),
split)
return examples
@abc.abstractmethod
def _create_examples(self, lines, split):
pass
@abc.abstractmethod
def get_scorer(self):
pass
@abc.abstractmethod
def get_feature_specs(self):
pass
@abc.abstractmethod
def featurize(self, example, is_training):
pass
@abc.abstractmethod
def get_prediction_module(self, bert_model, features, is_training,
percent_done):
pass
def __repr__(self):
return "Task(" + self.name + ")"
def read_tsv(input_file, quotechar=None, max_lines=None):
"""Reads a tab separated value file."""
with tf.gfile.Open(input_file, "r") as f:
reader = csv.reader(f, delimiter="\t", quotechar=quotechar)
lines = []
for i, line in enumerate(reader):
if max_lines and i >= max_lines:
break
lines.append(line)
return lines
| [
"[email protected]"
]
| |
039644bfddd186986c491626e0b7127322ccbf58 | f576f0ea3725d54bd2551883901b25b863fe6688 | /sdk/identity/azure-identity/azure/identity/aio/_internal/get_token_mixin.py | 162e6a51da5795a3b797f3692d41a13ade964552 | [
"LicenseRef-scancode-generic-cla",
"MIT",
"LGPL-2.1-or-later"
]
| permissive | Azure/azure-sdk-for-python | 02e3838e53a33d8ba27e9bcc22bd84e790e4ca7c | c2ca191e736bb06bfbbbc9493e8325763ba990bb | refs/heads/main | 2023-09-06T09:30:13.135012 | 2023-09-06T01:08:06 | 2023-09-06T01:08:06 | 4,127,088 | 4,046 | 2,755 | MIT | 2023-09-14T21:48:49 | 2012-04-24T16:46:12 | Python | UTF-8 | Python | false | false | 4,791 | py | # ------------------------------------
# Copyright (c) Microsoft Corporation.
# Licensed under the MIT License.
# ------------------------------------
import abc
import logging
import time
from typing import Any, Optional
from azure.core.credentials import AccessToken
from ..._constants import DEFAULT_REFRESH_OFFSET, DEFAULT_TOKEN_REFRESH_RETRY_DELAY
from ..._internal import within_credential_chain
_LOGGER = logging.getLogger(__name__)
class GetTokenMixin(abc.ABC):
def __init__(self, *args, **kwargs) -> None:
self._last_request_time = 0
# https://github.com/python/mypy/issues/5887
super(GetTokenMixin, self).__init__(*args, **kwargs) # type: ignore
@abc.abstractmethod
async def _acquire_token_silently(self, *scopes: str, **kwargs) -> Optional[AccessToken]:
"""Attempt to acquire an access token from a cache or by redeeming a refresh token.
:param str scopes: desired scopes for the access token. This method requires at least one scope.
For more information about scopes, see
https://learn.microsoft.com/azure/active-directory/develop/scopes-oidc.
:return: An access token with the desired scopes if successful; otherwise, None.
:rtype: ~azure.core.credentials.AccessToken or None
"""
@abc.abstractmethod
async def _request_token(self, *scopes: str, **kwargs) -> AccessToken:
"""Request an access token from the STS.
:param str scopes: desired scopes for the access token. This method requires at least one scope.
For more information about scopes, see
https://learn.microsoft.com/azure/active-directory/develop/scopes-oidc.
:return: An access token with the desired scopes.
:rtype: ~azure.core.credentials.AccessToken
"""
def _should_refresh(self, token: AccessToken) -> bool:
now = int(time.time())
if token.expires_on - now > DEFAULT_REFRESH_OFFSET:
return False
if now - self._last_request_time < DEFAULT_TOKEN_REFRESH_RETRY_DELAY:
return False
return True
async def get_token(
self, *scopes: str, claims: Optional[str] = None, tenant_id: Optional[str] = None, **kwargs: Any
) -> AccessToken:
"""Request an access token for `scopes`.
This method is called automatically by Azure SDK clients.
:param str scopes: desired scopes for the access token. This method requires at least one scope.
For more information about scopes, see
https://learn.microsoft.com/azure/active-directory/develop/scopes-oidc.
:keyword str claims: additional claims required in the token, such as those returned in a resource provider's
claims challenge following an authorization failure.
:keyword str tenant_id: optional tenant to include in the token request.
:keyword bool enable_cae: indicates whether to enable Continuous Access Evaluation (CAE) for the requested
token. Defaults to False.
:return: An access token with the desired scopes.
:rtype: ~azure.core.credentials.AccessToken
:raises CredentialUnavailableError: the credential is unable to attempt authentication because it lacks
required data, state, or platform support
:raises ~azure.core.exceptions.ClientAuthenticationError: authentication failed. The error's ``message``
attribute gives a reason.
"""
if not scopes:
raise ValueError('"get_token" requires at least one scope')
try:
token = await self._acquire_token_silently(*scopes, claims=claims, tenant_id=tenant_id, **kwargs)
if not token:
self._last_request_time = int(time.time())
token = await self._request_token(*scopes, claims=claims, tenant_id=tenant_id, **kwargs)
elif self._should_refresh(token):
try:
self._last_request_time = int(time.time())
token = await self._request_token(*scopes, claims=claims, tenant_id=tenant_id, **kwargs)
except Exception: # pylint:disable=broad-except
pass
_LOGGER.log(
logging.DEBUG if within_credential_chain.get() else logging.INFO,
"%s.get_token succeeded",
self.__class__.__name__,
)
return token
except Exception as ex:
_LOGGER.log(
logging.DEBUG if within_credential_chain.get() else logging.WARNING,
"%s.get_token failed: %s",
self.__class__.__name__,
ex,
exc_info=_LOGGER.isEnabledFor(logging.DEBUG),
)
raise
| [
"[email protected]"
]
| |
cb8a404a6028dd2e0c857ee4f7bbbaa73b7296c9 | ca7aa979e7059467e158830b76673f5b77a0f5a3 | /Python_codes/p03200/s271432048.py | bcd6a8482b6eb023426b8dd72faca95f03661394 | []
| no_license | Aasthaengg/IBMdataset | 7abb6cbcc4fb03ef5ca68ac64ba460c4a64f8901 | f33f1c5c3b16d0ea8d1f5a7d479ad288bb3f48d8 | refs/heads/main | 2023-04-22T10:22:44.763102 | 2021-05-13T17:27:22 | 2021-05-13T17:27:22 | 367,112,348 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 176 | py | S = list(input())
n = len(S)
cnt_b = 0
cnt = 0
for i in range(n):
if S[i] == 'B':
cnt_b += 1
cnt += i
ans = int((2*n - cnt_b -1)*cnt_b*0.5 - cnt)
print(ans) | [
"[email protected]"
]
| |
59565e2453584531a21bcde645e428597918d80b | e714dfd95cb74f4e357af8d085e4dcaf8b7ecdf3 | /0x03-python-data_structures/10-divisible_by_2.py | 2d808d5249e958464d33eb26dcdab22c4db79367 | []
| no_license | MCavigli/holbertonschool-higher_level_programming_classic | 2cea769dc1fd39e90f6ef74cdb3191e2472b0282 | 870548f964a3deac4a41918e9c3d0bad6cd732b4 | refs/heads/master | 2022-03-06T09:33:56.839118 | 2019-09-27T06:04:34 | 2019-09-27T06:04:34 | 184,122,977 | 3 | 4 | null | null | null | null | UTF-8 | Python | false | false | 167 | py | #!/usr/bin/python3
def divisible_by_2(my_list=[]):
new = []
for num in my_list:
new.append(True) if num % 2 == 0 else new.append(False)
return new
| [
"[email protected]"
]
| |
3034898b4c6efff6c979aa9c058eaad3bdfe78e5 | d51b4c766661af65b4ee6e7c30f8cb4bdd8603e3 | /python/oop/method/iter.py | a612e99ce5f3e8c7cc0ed0aa897554f8d66889a4 | []
| no_license | yanxurui/keepcoding | 3e988c76b123d55b32cf7cc35fbffb12c4ccb095 | d6b9f07e2d1437681fa77fee0687ea9b83cab135 | refs/heads/master | 2021-01-24T09:01:41.306597 | 2020-05-21T05:36:04 | 2020-05-21T05:36:04 | 93,400,267 | 1 | 1 | null | null | null | null | UTF-8 | Python | false | false | 698 | py | class Spam(object):
"""[summary]
>>> for i in Spam(1, 5):
... print(i)
...
1
4
9
16
>>> # iterate manully
>>> X = Spam(1, 5)
>>> I = iter(X)
>>> next(I)
1
>>> next(I)
4
>>> next(I)
9
>>> next(I)
16
>>> next(I)
Traceback (most recent call last):
StopIteration
"""
def __init__(self, start, stop):
self.value = start
self.stop = stop
def __iter__(self):
return self
# use `def __next__(self)` in python3
def next(self):
if self.value == self.stop:
raise StopIteration
rv = self.value ** 2
self.value += 1
return rv
| [
"[email protected]"
]
| |
e85fdfeeaef3f45656935aec998ebc36b9928833 | 7f3a439c6ac921f07101759e932424792ee9e24e | /std_lib/asyncio/p01_coroutine_task/ch02_awaitables.py | 9d984214e2480f5b6935010ec261bc2252865bb2 | []
| no_license | multiscripter/python | 03f6299af0ee2153e9e3b696aa591e460a3d6a52 | eef7900b7a564e0850659fb59c1218be9c6829ea | refs/heads/master | 2023-04-13T05:24:29.434394 | 2021-04-26T12:11:59 | 2021-04-26T12:11:59 | 289,573,367 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 1,553 | py | import asyncio
# Мы говорим, что объект является awaitable (ожидаемым) объектом,
# если его можно использовать в выражении await.
# Многие API-интерфейсы asyncio предназначены для приёма awaitable-объектов.
# Есть три основных типа awaitable-объектов: Coroutine, Task и Future.
# 1. Coroutine.
# coroutines являются awaitable, и поэтому их можно "ожидать" из других coroutines:
async def nested():
return 42
async def run_nested():
print(await nested())
asyncio.run(run_nested())
# 2. Task.
# Tasks используются для планирования запуска coroutines одновременно.
async def run_task_nested():
task = asyncio.create_task(nested())
print(await task)
asyncio.run(run_task_nested())
# 3. Future.
# Future - это специальный низкоуровневый ожидающий объект,
# представляющий конечный результат асинхронной операции.
# Когда ожидается объект Future, это означает, что coroutine будет ждать,
# пока Future не будет разрешен (resolved) в каком-то другом месте.
# Future-объекты в asyncio необходимы, чтобы разрешить использование кода
# на основе callback с async/await.
| [
"[email protected]"
]
| |
d5fc1a9a549e1a6ce9e5fdc380292c712031d489 | c364fca8ae4c896dee2c8b0dc545f4d73c8c8314 | /supervised_learning/0x0C-neural_style_transfer/8-neural_style.py | 75a2d1e77bbd4d3bcd6a460efacec79a4944e555 | [
"MIT"
]
| permissive | ledbagholberton/holbertonschool-machine_learning | 7672509d2dc1775bd6708430d244e8f4dd4cb169 | eaf23423ec0f412f103f5931d6610fdd67bcc5be | refs/heads/master | 2020-12-22T01:12:32.824436 | 2020-10-11T12:36:48 | 2020-10-11T12:36:48 | 236,623,497 | 1 | 2 | null | null | null | null | UTF-8 | Python | false | false | 13,770 | py | #!/usr/bin/env python3
"""class NST"""
import tensorflow as tf
import numpy as np
tf.enable_eager_execution()
class NST:
"""Class NST"""
style_layers = ['block1_conv1', 'block2_conv1', 'block3_conv1',
'block4_conv1', 'block5_conv1']
content_layer = 'block5_conv2'
def __init__(self, style_image, content_image, alpha=1e4, beta=1):
""" Constructor
style_image - the image used as a style reference,
stored as a numpy.ndarray
content_image - the image used as a content reference,
stored as a numpy.ndarray
alpha - the weight for content cost
beta - the weight for style cost
if style_image is not a np.ndarray with the shape (h, w, 3),
raise a TypeError with the message
'style_image must be a numpy.ndarray with shape (h, w, 3)'
if content_image is not a np.ndarray with the shape (h, w, 3),
raise a TypeError with the message
'content_image must be a numpy.ndarray with shape (h, w, 3)'
if alpha is not a non-negative number,
raise a TypeError with the message
'alpha must be a non-negative number'
if beta is not a non-negative number,
raise a TypeError with the message
'beta must be a non-negative number'
Sets Tensorflow to execute eagerly
Sets the instance attributes:
style_image - the preprocessed style image
content_image - the preprocessed content image
alpha - the weight for content cost
beta - the weight for style cost"""
if (not isinstance(style_image, np.ndarray)
or len(style_image.shape) is not 3
or style_image.shape[2] is not 3):
m1 = 'style_image must be a numpy.ndarray with shape (h, w, 3)'
raise TypeError(m1)
if (not isinstance(content_image, np.ndarray)
or len(content_image.shape) is not 3
or content_image.shape[2] is not 3):
m2 = 'content_image must be a numpy.ndarray with shape (h, w, 3)'
raise TypeError(m2)
if (alpha < 0):
raise TypeError('alpha must be a non-negative number')
if (beta < 0):
raise TypeError('beta must be a non-negative number')
self.style_image = self.scale_image(style_image)
self.content_image = self.scale_image(content_image)
self.alpha = alpha
self.beta = beta
self.load_model()
self.generate_features()
@staticmethod
def scale_image(image):
"""rescales an image such that its pixels values are between 0 and 1
and its largest side is 512 pixels
image - a numpy.ndarray of shape (h, w, 3) containing the image to
be scaled
if image is not a np.ndarray with the shape (h, w, 3),
raise a TypeError with the
message 'image must be a numpy.ndarray with shape (h, w, 3)'
The scaled image should be a tf.tensor with the shape
(1, h_new, w_new, 3)
where max(h_new, w_new) == 512 and min(h_new, w_new)
is scaled proportionately
The image should be resized using bicubic interpolation
The image’s pixel values should be rescaled from the range
[0, 255] to [0, 1]
Public class attributes:
style_layers = ['block1_conv1', 'block2_conv1',
'block3_conv1', 'block4_conv1', 'block5_conv1']
content_layer = 'block5_conv2'
Returns: the scaled image
"""
if (not isinstance(image, np.ndarray)
or len(image.shape) is not 3
or image.shape[2] is not 3):
m3 = 'image must be a numpy.ndarray with shape (h, w, 3)'
raise TypeError(m3)
max_dim = 512
long = max(image.shape[0], image.shape[1])
scale = max_dim/long
new_h = round(image.shape[0] * scale)
new_w = round(image.shape[1] * scale)
image = np.expand_dims(image, axis=0)
image = tf.image.resize_bicubic(image, (new_h, new_w))
image = tf.clip_by_value(image / 255, 0, 1)
return image
def load_model(self):
""" loads the model for neural style transfer """
vgg_pre = tf.keras.applications.vgg19.VGG19(include_top=False,
weights='imagenet')
custom_objects = {'MaxPooling2D': tf.keras.layers.AveragePooling2D}
vgg_pre.save("base_model")
vgg = tf.keras.models.load_model("base_model",
custom_objects=custom_objects)
for layer in vgg.layers:
layer.trainable = False
style_outputs = [vgg.get_layer(name).output
for name in self.style_layers]
content_output = vgg.get_layer(self.content_layer).output
model_outputs = style_outputs + [content_output]
self.model = tf.keras.models.Model(vgg.input, model_outputs)
@staticmethod
def gram_matrix(input_layer):
""" Method gram matrix
input_layer-instance of tf.Tensor or tf.Variable of shape (1, h, w, c)
containing the layer output whose gram matrix should be calculated
if input_layer is not an instance of tf.Tensor or tf.Variable rank 4,
raise a TypeError with the message input_layer must be a tensor rank 4
Returns: a tf.Tensor of shape (1, c, c) containing the gram matrix of
input_layer"""
if (not isinstance(input_layer, tf.Tensor) or
len(input_layer.shape) != 4):
raise TypeError("input_layer must be a tensor of rank 4")
channels = int(input_layer.shape[-1])
a = tf.reshape(input_layer, [-1, channels])
n = tf.shape(a)[0]
gram = tf.matmul(a, a, transpose_a=True)
gram = tf.reshape(gram, shape=[1, -1, channels])
return gram / tf.cast(n, tf.float32)
def generate_features(self):
""" extracts the features used to calculate neural style cost
Sets the public instance attributes:
gram_style_features - a list of gram matrices calculated from the
style layer outputs of the style image
content_feature - the content layer output of the content image
Add:
model - the Keras model used to calculate cost
gram_style_features - a list of gram matrices calculated from the
style layer outputs of the style image
content_feature - the content layer output of the content image"""
num_style_layers = len(self.style_layers)
# Load our images in the model VGG. It is necessary to preprocess it
content_image = self.content_image
style_image = self.style_image
# aplico el modelo para cada imagen
x = tf.keras.applications.vgg19.preprocess_input(style_image*255)
y = tf.keras.applications.vgg19.preprocess_input(content_image*255)
model_outputs = self.model(x) + self.model(y)
# Adiciono las salidas de los modelos, por que asi son dados
self.gram_style_features = [self.gram_matrix(layer) for layer in
model_outputs[:num_style_layers]]
self.content_feature = model_outputs[-1:]
# return(gram_style_features, content_feature)
def layer_style_cost(self, style_output, gram_target):
"""Style cost
Calculates the style cost for a single layer
style_output - tf.Tensor of shape (1, h, w, c) containing the layer
style output of the generated image
gram_target - tf.Tensor of shape (1, c, c) the gram matrix of the
target style output for that layer
if style_output is not an instance of tf.Tensor or tf.Variable of
rank 4,
raise a TypeError with the message style_output must be a tensor of
rank 4
if gram_target is not an instance of tf.Tensor or tf.Variable with
shape (1, c, c), raise a TypeError with
the message gram_target must be a tensor of shape [1, {c}, {c}]
where {c} is the number of channels in style_output
Returns: the layer’s style cost"""
print("1")
"""if (not isinstance(style_output, tf.Tensor) or
not isinstance(style_output, tf.Variable) or
len(style_output.shape) is not 4):
raise TypeError("style_output must be a tensor of rank 4")"""
channels = style_output.shape[-1]
c_gram_0 = gram_target.shape[0]
c_gram_1 = gram_target.shape[1]
"""if (not isinstance(gram_target, (tf.Tensor, tf.Variable)) or
len(gram_target.shape) is not 3 or
c_gram_0 != c_gram_1 or
c_gram_0 != channels):
raise TypeError(
"gram_target must be a tensor of shape [1, {c}, {c}]")"""
gram_style = self.gram_matrix(style_output)
return tf.reduce_mean(tf.square(gram_style - gram_target))
def style_cost(self, style_outputs):
"""Calculates the style cost for generated image
style_outputs:
a list of tf.Tensor style outputs for the generated image
if style_outputs is not a list with the same length as
self.style_layers, raise a TypeError with the message style_outputs
must be a list with a length of {l}
where {l} is the length of self.style_layers
each layer should be weighted evenly with all weights summing to 1
Returns: the style cost"""
if len(style_outputs) is not len(self.style_layers):
raise TypeError(
"style_outputs must be a list with a length of {l}")
# gram_style_features, content_feature = self.generate_features()
weights = 1 / len(style_outputs)
style_cost = 0
for layer in range(len(style_outputs)):
x = (self.layer_style_cost(style_outputs[layer],
self.gram_style_features[layer]))
style_cost = style_cost + x * weights
return(style_cost)
def content_cost(self, content_output):
"""Calculates the content cost for the generated image
content_output - a tf.Tensor containing the content output for the
generated image
if content_output is not an instance of tf.Tensor or tf.Variable with
the same shape as self.content_feature, raise a TypeError with the
message content_output must be a tensor of shape {s} where {s} is the
shape of self.content_feature
Returns: the content cost
"""
"""if not isinstance(content_output, tf.Tensor) or\
content_output.shape is not self.content_feature.shape:
raise TypeError("content_output must be a tensor of shape {}"
.format(self.content_feature.shape))"""
# gram_style_features, content_feature = self.generate_features()
return tf.reduce_mean(tf.square(content_output - self.content_feature))
def total_cost(self, generated_image):
"""Calculates the total cost for the generated image
generated_image - a tf.Tensor of shape (1, nh, nw, 3) containing the
generated image
if generated_image is not an instance of tf.Tensor or tf.Variable with
the same shape as self.content_image, raise a TypeError with message
generated_image must be a tensor of shape {s} where {s} is the shape of
self.content_image
Returns: (J, J_content, J_style)
J is the total cost
J_content is the content cost
J_style is the style cost"""
"""if not isinstance(generated_image, tf.Tensor) or\
generated_image.shape is not self.content_image:
raise TypeError("generated_image must be a tensor of shape {}".
format(self.content_image.shape))"""
"""costo total = alpha * cost_content + beta * cost_style
cost_content calculado con content_image & generated_image
cost_style calculado con style_image & generated_image en todas
las capas."""
a = generated_image*255
generated_image = tf.keras.applications.vgg19.preprocess_input(a)
model_outputs = self.model(generated_image)
content_output = model_outputs[-1]
J_content = self.content_cost(content_output)
style_outputs = model_outputs[:-1]
J_style = self.style_cost(style_outputs)
J = self.alpha * J_content + self.beta * J_style
return(J, J_content, J_style)
def compute_grads(self, generated_image):
"""
Calculates the gradients for the tf.Tensor
generated_image of shape (1, nh, nw, 3)
if generated_image is not an instance of tf.Tensor or tf.Variable with
the same shape as self.content_image, raise a TypeError with the
message generated_image must be a tensor of shape {s}
where {s} is the shape of self.content_image
Returns: gradients, J_total, J_content, J_style
gradients is a tf.Tensor containing the gradients
for the generated image
J_total is the total cost for the generated image
J_content is the content cost for the generated image
J_style is the style cost for the generated image
"""
"""if not isinstance(generated_image, tf.Tensor) or\
generated_image.shape is not self.content_image:
raise TypeError("generated_image must be a tensor of shape {}".
format(self.content_image.shape))"""
with tf.GradientTape() as tape:
J, J_content, J_style = self.total_cost(generated_image)
return(tape.gradient(J, generated_image), J, J_content, J_style)
| [
"[email protected]"
]
| |
53c448f8e4e441f92e2aec10e29d9ca24359595a | 9e988c0dfbea15cd23a3de860cb0c88c3dcdbd97 | /sdBs/AllRun/pg_1412+299/sdB_PG_1412+299_lc.py | 1fc949095568bba26e541e43fa821db7addabfb7 | []
| no_license | tboudreaux/SummerSTScICode | 73b2e5839b10c0bf733808f4316d34be91c5a3bd | 4dd1ffbb09e0a599257d21872f9d62b5420028b0 | refs/heads/master | 2021-01-20T18:07:44.723496 | 2016-08-08T16:49:53 | 2016-08-08T16:49:53 | 65,221,159 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 346 | py | from gPhoton.gAperture import gAperture
def main():
gAperture(band="NUV", skypos=[213.598708,29.684953], stepsz=30., csvfile="/data2/fleming/GPHOTON_OUTPU/LIGHTCURVES/sdBs/sdB_PG_1412+299 /sdB_PG_1412+299_lc.csv", maxgap=1000., overwrite=True, radius=0.00555556, annulus=[0.005972227,0.0103888972], verbose=3)
if __name__ == "__main__":
main()
| [
"[email protected]"
]
| |
91f7b1e95f64d31541ee7a3ad43d81110ac198fd | 1d38c549c07f43cc26b7353ef95300b934eeed33 | /GUI/Edit/Specy.py | fb03a7097cd0a7523d4349c7642606a3422113c2 | []
| no_license | pooyagheyami/Adel3 | a6354fbc5aa56a9c38a8b724c8d22bea689380a1 | 29e257e19fd6914de0e60c303871321e457a858b | refs/heads/master | 2022-11-07T21:53:13.958369 | 2020-06-12T13:22:55 | 2020-06-12T13:22:55 | 271,803,177 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 4,690 | py | # -*- coding: utf-8 -*-
###########################################################################
## Python code generated with wxFormBuilder (version Jun 17 2015)
## http://www.wxformbuilder.org/
##
## PLEASE DO "NOT" EDIT THIS FILE!
###########################################################################
import wx
import wx.xrc
import wx.grid
import Database.MDataGet as DG
###########################################################################
## Class MyPanel2
###########################################################################
class MyPanel2 ( wx.Panel ):
def __init__( self, parent , txts , ccod ,stit):
wx.Panel.__init__ ( self, parent, id = wx.ID_ANY, pos = wx.DefaultPosition, size = wx.Size( 273,256 ), style = wx.TAB_TRAVERSAL )
self.SetLayoutDirection(2)
self.ccod = ccod
#print self.ccod
#print stit
self.iData = DG.GetData(u'',u'')
self.itits = self.iData.gTitel(stit)
self.ispec = self.iData.gSpcy(self.ccod)
self.row = len(self.itits)
Vsz1 = wx.BoxSizer( wx.VERTICAL )
Hsz1 = wx.BoxSizer( wx.HORIZONTAL )
self.txt1 = wx.StaticText( self, wx.ID_ANY, txts[0], wx.DefaultPosition, wx.DefaultSize, 0 )
self.txt1.Wrap( -1 )
Hsz1.Add( self.txt1, 0, wx.ALL|wx.ALIGN_CENTER_VERTICAL, 5 )
self.fld1 = wx.TextCtrl( self, wx.ID_ANY, txts[1], wx.DefaultPosition, wx.DefaultSize, 0 )
Hsz1.Add( self.fld1, 1, wx.ALL|wx.ALIGN_CENTER_VERTICAL, 5 )
Vsz1.Add( Hsz1, 0, wx.EXPAND, 5 )
Hsz2 = wx.BoxSizer( wx.HORIZONTAL )
self.grid1 = wx.grid.Grid( self, wx.ID_ANY, wx.DefaultPosition, wx.DefaultSize, wx.STATIC_BORDER )
# Grid
self.grid1.CreateGrid( self.row, 2 )
self.grid1.EnableEditing( True )
self.grid1.EnableGridLines( True )
self.grid1.EnableDragGridSize( False )
self.grid1.SetMargins( 0, 0 )
# Columns
self.grid1.SetColSize( 0, 99 )
self.grid1.SetColSize( 1, 134 )
self.grid1.EnableDragColMove( False )
self.grid1.EnableDragColSize( True )
self.grid1.SetColLabelSize( 30 )
self.grid1.SetColLabelValue( 0, u"عنوان" )
self.grid1.SetColLabelValue( 1, u"مشخصه" )
self.grid1.SetColLabelAlignment( wx.ALIGN_CENTRE, wx.ALIGN_CENTRE )
# Rows
self.grid1.EnableDragRowSize( True )
self.grid1.SetRowLabelSize( 19 )
self.grid1.SetRowLabelAlignment( wx.ALIGN_CENTRE, wx.ALIGN_CENTRE )
# Label Appearance
# Cell Defaults
self.grid1.SetDefaultCellAlignment( wx.ALIGN_LEFT, wx.ALIGN_TOP )
Hsz2.Add( self.grid1, 1, wx.ALL|wx.ALIGN_CENTER_VERTICAL|wx.EXPAND, 5 )
Vsz1.Add( Hsz2, 1, wx.EXPAND|wx.ALIGN_CENTER_HORIZONTAL, 5 )
Hsz3 = wx.BoxSizer( wx.HORIZONTAL )
self.btn1 = wx.Button( self, wx.ID_ANY, u"انصراف", wx.DefaultPosition, wx.DefaultSize, 0 )
Hsz3.Add( self.btn1, 0, wx.ALL|wx.ALIGN_CENTER_VERTICAL, 5 )
self.btn2 = wx.Button( self, wx.ID_ANY, u"ثبت شود", wx.DefaultPosition, wx.DefaultSize, 0 )
Hsz3.Add( self.btn2, 0, wx.ALL, 5 )
Vsz1.Add( Hsz3, 0, wx.ALIGN_CENTER_HORIZONTAL, 5 )
#self.iData = DG.GetData(u'',u'')
#self.itits = self.iData.gTitel(stit)
#self.ispec = self.iData.gSpcy(self.ccod)
self.lodtit()
self.SetSizer( Vsz1 )
self.Layout()
self.svit = False
# Connect Events
self.btn1.Bind( wx.EVT_BUTTON, self.cancl )
self.btn2.Bind( wx.EVT_BUTTON, self.aplyit )
def __del__( self ):
pass
# Virtual event handlers, overide them in your derived class
def cancl( self, event ):
self.svit = False
q = self.GetParent()
q.Close()
def aplyit( self, event ):
self.svit = True
#self.sData = DG.SetData(u'',u'')
#idata = self.gettit()
#print idata
#self.savtit(idata,self.sData)
q = self.GetParent()
q.Close()
def lodtit(self):
j = 0
for t in self.itits:
self.grid1.SetCellValue(j,0,t[0])
for s in self.ispec:
if s[1] in t:
self.grid1.SetCellValue(j,1,s[0])
j = j + 1
def gettit(self):
self.spcy = []
for i in range(len(self.itits)):
ispc = self.grid1.GetCellValue(i,1)
if ispc != '':
self.spcy.append((self.ccod,self.itits[i][1],ispc))
#print self.itits[i][1]
#print self.spcy
return self.spcy
def RetRev(self):
return self.svit
| [
"[email protected]"
]
| |
8ad7331a5f260ccc92a6d41401ca560c66646425 | 959a1bbaef979183ca0aacf03b748e87aa842197 | /devel/lib/python2.7/dist-packages/pose_graph_msgs/msg/__init__.py | 6847bed2af0e51e2757b804411e0db73023f67d1 | []
| no_license | rishabhgks/quad_ws | 723a95ccc825ddb13f2e27fb9e220cc7b03db6db | 19f7a57a585ca9799b3fd11f5b80d894c5b88167 | refs/heads/master | 2022-11-10T13:44:23.056196 | 2020-06-19T14:32:17 | 2020-06-19T14:32:17 | 250,417,696 | 0 | 0 | null | 2020-06-19T14:32:18 | 2020-03-27T02:03:07 | Makefile | UTF-8 | Python | false | false | 112 | py | from ._KeyedScan import *
from ._PoseGraph import *
from ._PoseGraphEdge import *
from ._PoseGraphNode import *
| [
"[email protected]"
]
| |
41f673888cfffe4c1aa80b2a5a347f7f943800df | 54ddb3f38cd09ac25213a7eb8743376fe778fee8 | /topic_02_syntax/examples/isdigit_isnumeric_isdecimal.py | e1596f3051a547ad360699482c643e2c9d57e600 | []
| no_license | ryndovaira/leveluppythonlevel1_300321 | dbfd4ee41485870097ee490f652751776ccbd7ab | 0877226e6fdb8945531775c42193a90ddb9c8a8b | refs/heads/master | 2023-06-06T07:44:15.157913 | 2021-06-18T11:53:35 | 2021-06-18T11:53:35 | 376,595,962 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 621 | py | def check_str(my_str):
print(f"mystr: {my_str}")
print(f"isnumeric: {str.isnumeric(my_str)}")
print(f"isdigit: {str.isdigit(my_str)}")
print(f"isdecimal: {str.isdecimal(my_str)}")
print("-" * 50)
# isdecimal() ⊆ isdigit() ⊆ isnumeric()
if __name__ == '__main__':
check_str('½')
check_str('ⅠⅢⅧ')
check_str('⑩⑬㊿')
check_str('³')
check_str('🄀⒊⒏')
check_str('⓪③⑧')
check_str('038')
check_str('038') # FULLWIDTH DIGIT
check_str('٠١٢٣٤') # ARABIC-INDIC DIGIT
check_str('-38')
check_str('+38')
check_str('3_8')
| [
"[email protected]"
]
| |
2f3807cf8f62e47c63a60a47b092def214c58a97 | 524756e5e03465584dcb2d04b8092fbe8840448a | /users/signals.py | 61f3f6acbaa87c5bbd5887d4336cc27d40a10557 | [
"Apache-2.0"
]
| permissive | areebbeigh/greenspace-demo | f9a241fb6c39a724b19094ccf21114647492dd81 | 0754f3b50e845bd5e50239361239f9b0b8aba42b | refs/heads/master | 2022-10-11T05:45:05.036097 | 2020-06-11T11:04:12 | 2020-06-11T11:41:38 | 271,524,789 | 2 | 0 | null | null | null | null | UTF-8 | Python | false | false | 300 | py | import sys
from django.conf import settings
def create_groups(sender, **kwargs):
from django.contrib.auth.models import Group
group, created = Group.objects.get_or_create(name=settings.NURSERY_MGR_GROUP)
print('Created new nursery group' if created else 'Nursery group already exists')
| [
"[email protected]"
]
| |
9ab1dae85e9196df975840e6c6d4551650fb7216 | 3adec884f06eabfe50d4ab3456123e04d02b02ff | /148. Sort List.py | a2e8d101bc501c111c899032f45427524ee85d4f | []
| no_license | windmzx/pyleetcode | c57ecb855c8e560dd32cf7cf14616be2f91ba50e | d0a1cb895e1604fcf70a73ea1c4b1e6b283e3400 | refs/heads/master | 2022-10-05T17:51:08.394112 | 2020-06-09T09:24:28 | 2020-06-09T09:24:28 | 250,222,719 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 1,486 | py | # Definition for singly-linked list.
class ListNode:
def __init__(self, x):
self.val = x
self.next = None
class Solution:
def sortList(self, head: ListNode) -> ListNode:
def split(head):
if head.next == None:
return head
p =head
q=head
while q != None and q.next != None:
temp=p
p=p.next
q=q.next.next
temp.next = None
l=split(head)
r=split(p)
return merge(l,r)
def merge(l1,l2):
head=ListNode(-1)
cur=head
while l1!=None and l2!=None:
if l1.val<l2.val:
cur.next=l1
l1=l1.next
cur=cur.next
else:
cur.next=l2
l2=l2.next
cur=cur.next
if l1!=None:
cur.next=l1
if l2!=None:
cur.next=l2
return head.next
if head is None:
return None
return split(head)
if __name__ == "__main__":
x=Solution()
li=ListNode(3)
p=li
p.next=ListNode(1)
p=p.next
p.next=ListNode(4)
p=p.next
p.next=ListNode(0)
p=p.next
p.next=ListNode(2)
p=p.next
p.next=ListNode(5)
p=p.next
re=x.sortList(li)
while re!=None:
print(re.val)
re=re.next | [
"[email protected]"
]
| |
313e25acc001a41e971e137fbcf6bee9f8d3a909 | 8e69eee9b474587925e22413717eb82e4b024360 | /v1.0.0.test/toontown/minigame/TwoDEnemyMgr.py | 2b350c515aa1c976abc5e5490424ee267024aaa9 | [
"MIT"
]
| permissive | TTOFFLINE-LEAK/ttoffline | afaef613c36dc3b70514ccee7030ba73c3b5045b | bb0e91704a755d34983e94288d50288e46b68380 | refs/heads/master | 2020-06-12T15:41:59.411795 | 2020-04-17T08:22:55 | 2020-04-17T08:22:55 | 194,348,185 | 5 | 4 | null | null | null | null | UTF-8 | Python | false | false | 1,535 | py | from panda3d.core import *
from direct.directnotify import DirectNotifyGlobal
from direct.showbase.DirectObject import DirectObject
from toontown.minigame import ToonBlitzGlobals
from toontown.minigame import TwoDEnemy
class TwoDEnemyMgr(DirectObject):
notify = DirectNotifyGlobal.directNotify.newCategory('TwoDEnemyMgr')
def __init__(self, section, enemyList):
self.section = section
self.enemyList = enemyList
self.load()
def destroy(self):
self.section = None
while len(self.enemies):
enemy = self.enemies[0]
enemy.destroy()
self.enemies.remove(enemy)
self.enemies = None
return
def load(self):
if len(self.enemyList):
self.enemiesNP = NodePath('Enemies')
self.enemiesNP.reparentTo(self.section.sectionNP)
self.enemies = []
for index in xrange(len(self.enemyList)):
enemyId = self.section.getSectionizedId(index)
suitAttribs = self.enemyList[index]
newEnemy = TwoDEnemy.TwoDEnemy(self, enemyId, suitAttribs)
newEnemy.suit.reparentTo(self.enemiesNP)
self.enemies.append(newEnemy)
def enterPlay(self, elapsedTime):
for enemy in self.enemies:
enemy.start(elapsedTime)
def exitPlay(self):
pass
def enterPause(self):
for enemy in self.enemies:
enemy.enterPause()
def exitPause(self):
for enemy in self.enemies:
enemy.exitPause() | [
"[email protected]"
]
| |
10e3f86ff0569d076876a2d1d0c9d815c172bf83 | 811bf15a5bad488284300b6016c8b77f1e2daf24 | /rllib/core/testing/tf/bc_module.py | cca3a42f4eeb2f0f78c637b944e16f7057e4c21d | [
"BSD-3-Clause",
"MIT",
"Apache-2.0"
]
| permissive | justinwyang/ray | 8c102e00321e669b7e78488d38329e82195e7b58 | 6c356296a01ebf3d8ad0cab6058fb8c03ccbf8f6 | refs/heads/master | 2023-04-28T15:57:24.167579 | 2023-04-22T08:28:06 | 2023-04-22T08:28:06 | 172,777,393 | 0 | 0 | Apache-2.0 | 2020-12-04T02:39:57 | 2019-02-26T19:37:12 | Python | UTF-8 | Python | false | false | 5,312 | py | import tensorflow as tf
import tensorflow_probability as tfp
from typing import Any, Mapping
from ray.rllib.core.rl_module.rl_module import RLModule, RLModuleConfig
from ray.rllib.core.rl_module.marl_module import (
MultiAgentRLModule,
MultiAgentRLModuleConfig,
)
from ray.rllib.core.rl_module.tf.tf_rl_module import TfRLModule
from ray.rllib.core.models.specs.typing import SpecType
from ray.rllib.policy.sample_batch import SampleBatch
from ray.rllib.utils.annotations import override
from ray.rllib.utils.nested_dict import NestedDict
class DiscreteBCTFModule(TfRLModule):
def __init__(self, config: RLModuleConfig) -> None:
super().__init__(config)
def setup(self):
input_dim = self.config.observation_space.shape[0]
hidden_dim = self.config.model_config_dict["fcnet_hiddens"][0]
output_dim = self.config.action_space.n
layers = []
layers.append(tf.keras.Input(shape=(input_dim,)))
layers.append(tf.keras.layers.ReLU())
layers.append(tf.keras.layers.Dense(hidden_dim))
layers.append(tf.keras.layers.ReLU())
layers.append(tf.keras.layers.Dense(output_dim))
self.policy = tf.keras.Sequential(layers)
self._input_dim = input_dim
@override(RLModule)
def output_specs_exploration(self) -> SpecType:
return ["action_dist"]
@override(RLModule)
def output_specs_inference(self) -> SpecType:
return ["action_dist"]
@override(RLModule)
def output_specs_train(self) -> SpecType:
return ["action_dist"]
@override(RLModule)
def _forward_inference(self, batch: NestedDict) -> Mapping[str, Any]:
obs = batch[SampleBatch.OBS]
action_logits = self.policy(obs)
action_logits_inference = tf.argmax(action_logits, axis=-1)
action_dist = tfp.distributions.Deterministic(action_logits_inference)
return {"action_dist": action_dist}
@override(RLModule)
def _forward_exploration(self, batch: NestedDict) -> Mapping[str, Any]:
return self._forward_inference(batch)
@override(RLModule)
def _forward_train(self, batch: NestedDict) -> Mapping[str, Any]:
obs = batch[SampleBatch.OBS]
action_logits = self.policy(obs)
action_dist = tfp.distributions.Categorical(logits=action_logits)
return {"action_dist": action_dist}
@override(RLModule)
def get_state(self) -> Mapping[str, Any]:
return {"policy": self.policy.get_weights()}
@override(RLModule)
def set_state(self, state: Mapping[str, Any]) -> None:
self.policy.set_weights(state["policy"])
class BCTfRLModuleWithSharedGlobalEncoder(TfRLModule):
def __init__(self, encoder, local_dim, hidden_dim, action_dim):
super().__init__()
self.encoder = encoder
self.policy_head = tf.keras.Sequential(
[
tf.keras.layers.Dense(
hidden_dim + local_dim,
input_shape=(hidden_dim + local_dim,),
activation="relu",
),
tf.keras.layers.Dense(hidden_dim, activation="relu"),
tf.keras.layers.Dense(action_dim),
]
)
@override(RLModule)
def _default_input_specs(self):
return [("obs", "global"), ("obs", "local")]
@override(RLModule)
def _forward_inference(self, batch):
return self._common_forward(batch)
@override(RLModule)
def _forward_exploration(self, batch):
return self._common_forward(batch)
@override(RLModule)
def _forward_train(self, batch):
return self._common_forward(batch)
def _common_forward(self, batch):
obs = batch["obs"]
global_enc = self.encoder(obs["global"])
policy_in = tf.concat([global_enc, obs["local"]], axis=-1)
action_logits = self.policy_head(policy_in)
return {"action_dist": tf.distributions.Categorical(logits=action_logits)}
class BCTfMultiAgentModuleWithSharedEncoder(MultiAgentRLModule):
def __init__(self, config: MultiAgentRLModuleConfig) -> None:
super().__init__(config)
def setup(self):
# constructing the global encoder based on the observation_space of the first
# module
module_specs = self.config.modules
module_spec = next(iter(module_specs.values()))
global_dim = module_spec.observation_space["global"].shape[0]
hidden_dim = module_spec.model_config_dict["fcnet_hiddens"][0]
shared_encoder = tf.keras.Sequential(
[
tf.keras.Input(shape=(global_dim,)),
tf.keras.layers.ReLU(),
tf.keras.layers.Dense(hidden_dim),
]
)
for module_id, module_spec in module_specs.items():
self._rl_modules[module_id] = module_spec.module_class(
encoder=shared_encoder,
local_dim=module_spec.observation_space["local"].shape[0],
hidden_dim=hidden_dim,
action_dim=module_spec.action_space.n,
)
def serialize(self):
# TODO (Kourosh): Implement when needed.
raise NotImplementedError
def deserialize(self, data):
# TODO (Kourosh): Implement when needed.
raise NotImplementedError
| [
"[email protected]"
]
| |
014fc63655d75805b73e1b4f0ca4fe2e2b051844 | 650b516b1214c4d44fd6f04941e87e28e9049cde | /addons/plugin.video.fanfilm/resources/lib/libraries/cache.py | 7d58a18868df7128b7866218d87032edd9459070 | []
| no_license | MultiWu/build | b85cc45a33b871f4ade58de8457fcd094761f385 | f50a64f674b6499668e0a5758fe0879b016f5c38 | refs/heads/master | 2022-10-31T20:35:53.382826 | 2019-12-20T22:50:16 | 2019-12-20T22:50:16 | 228,462,984 | 0 | 3 | null | 2022-10-07T08:47:18 | 2019-12-16T19:46:39 | Python | UTF-8 | Python | false | false | 8,728 | py | # -*- coding: utf-8 -*-
"""
Covenant Add-on
This program is free software: you can redistribute it and/or modify
it under the terms of the GNU General Public License as published by
the Free Software Foundation, either version 3 of the License, or
(at your option) any later version.
This program is distributed in the hope that it will be useful,
but WITHOUT ANY WARRANTY; without even the implied warranty of
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
GNU General Public License for more details.
You should have received a copy of the GNU General Public License
along with this program. If not, see <http://www.gnu.org/licenses/>.
"""
import ast
import hashlib
import re
import time
from resources.lib.libraries import control
try:
from sqlite3 import dbapi2 as db, OperationalError
except ImportError:
from pysqlite2 import dbapi2 as db, OperationalError
"""
This module is used to get/set cache for every action done in the system
"""
cache_table = 'cache'
def get(function, duration, *args):
# type: (function, int, object) -> object or None
"""
Gets cached value for provided function with optional arguments, or executes and stores the result
:param function: Function to be executed
:param duration: Duration of validity of cache in hours
:param args: Optional arguments for the provided function
"""
try:
key = _hash_function(function, args)
cache_result = cache_get(key)
if cache_result:
if _is_cache_valid(cache_result['date'], duration):
return ast.literal_eval(cache_result['value'].encode('utf-8'))
fresh_result = repr(function(*args))
if not fresh_result:
# If the cache is old, but we didn't get fresh result, return the old cache
if cache_result:
return cache_result
return None
cache_insert(key, fresh_result)
return ast.literal_eval(fresh_result.encode('utf-8'))
except Exception:
return None
def timeout(function, *args):
try:
key = _hash_function(function, args)
result = cache_get(key)
return int(result['date'])
except Exception:
return None
def bennu_download_get(function, timeout, *args, **table):
try:
response = None
f = repr(function)
f = re.sub('.+\smethod\s|.+function\s|\sat\s.+|\sof\s.+', '', f)
a = hashlib.md5()
for i in args: a.update(str(i))
a = str(a.hexdigest())
except:
pass
try:
table = table['table']
except:
table = 'rel_list'
try:
control.makeFile(control.dataPath)
dbcon = db.connect(control.cacheFile)
dbcur = dbcon.cursor()
dbcur.execute("SELECT * FROM %s WHERE func = '%s' AND args = '%s'" % (table, f, a))
match = dbcur.fetchone()
response = eval(match[2].encode('utf-8'))
t1 = int(match[3])
t2 = int(time.time())
update = (abs(t2 - t1) / 3600) >= int(timeout)
if update == False:
return response
except:
pass
try:
r = function(*args)
if (r == None or r == []) and not response == None:
return response
elif (r == None or r == []):
return r
except:
return
try:
r = repr(r)
t = int(time.time())
dbcur.execute(
"CREATE TABLE IF NOT EXISTS %s (""func TEXT, ""args TEXT, ""response TEXT, ""added TEXT, ""UNIQUE(func, args)"");" % table)
dbcur.execute("DELETE FROM %s WHERE func = '%s' AND args = '%s'" % (table, f, a))
dbcur.execute("INSERT INTO %s Values (?, ?, ?, ?)" % table, (f, a, r, t))
dbcon.commit()
except:
pass
try:
return eval(r.encode('utf-8'))
except:
pass
def cache_get(key):
# type: (str, str) -> dict or None
try:
cursor = _get_connection_cursor()
cursor.execute("SELECT * FROM %s WHERE key = ?" % cache_table, [key])
return cursor.fetchone()
except OperationalError:
return None
def cache_insert(key, value):
# type: (str, str) -> None
cursor = _get_connection_cursor()
now = int(time.time())
cursor.execute(
"CREATE TABLE IF NOT EXISTS %s (key TEXT, value TEXT, date INTEGER, UNIQUE(key))"
% cache_table
)
update_result = cursor.execute(
"UPDATE %s SET value=?,date=? WHERE key=?"
% cache_table, (value, now, key))
if update_result.rowcount is 0:
cursor.execute(
"INSERT INTO %s Values (?, ?, ?)"
% cache_table, (key, value, now)
)
cursor.connection.commit()
def cache_clear():
try:
cursor = _get_connection_cursor()
for t in [cache_table, 'rel_list', 'rel_lib']:
try:
cursor.execute("DROP TABLE IF EXISTS %s" % t)
cursor.execute("VACUUM")
cursor.commit()
except:
pass
except:
pass
def cache_clear_meta():
try:
cursor = _get_connection_cursor_meta()
for t in ['meta']:
try:
cursor.execute("DROP TABLE IF EXISTS %s" % t)
cursor.execute("VACUUM")
cursor.commit()
except:
pass
except:
pass
def cache_clear_providers():
try:
cursor = _get_connection_cursor_providers()
for t in ['rel_src', 'rel_url']:
try:
cursor.execute("DROP TABLE IF EXISTS %s" % t)
cursor.execute("VACUUM")
cursor.commit()
except:
pass
except:
pass
def cache_clear_search():
try:
cursor = _get_connection_cursor_search()
for t in ['tvshow', 'movies']:
try:
cursor.execute("DROP TABLE IF EXISTS %s" % t)
cursor.execute("VACUUM")
cursor.commit()
except:
pass
except:
pass
def cache_clear_all():
cache_clear()
cache_clear_meta()
cache_clear_providers()
def _get_connection_cursor():
conn = _get_connection()
return conn.cursor()
def _get_connection():
control.makeFile(control.dataPath)
conn = db.connect(control.cacheFile)
conn.row_factory = _dict_factory
return conn
def _get_connection_cursor_meta():
conn = _get_connection_meta()
return conn.cursor()
def _get_connection_meta():
control.makeFile(control.dataPath)
conn = db.connect(control.metacacheFile)
conn.row_factory = _dict_factory
return conn
def _get_connection_cursor_providers():
conn = _get_connection_providers()
return conn.cursor()
def _get_connection_providers():
control.makeFile(control.dataPath)
conn = db.connect(control.providercacheFile)
conn.row_factory = _dict_factory
return conn
def _get_connection_cursor_search():
conn = _get_connection_search()
return conn.cursor()
def _get_connection_search():
control.makeFile(control.dataPath)
conn = db.connect(control.searchFile)
conn.row_factory = _dict_factory
return conn
def _dict_factory(cursor, row):
d = {}
for idx, col in enumerate(cursor.description):
d[col[0]] = row[idx]
return d
def _hash_function(function_instance, *args):
return _get_function_name(function_instance) + _generate_md5(args)
def _get_function_name(function_instance):
return re.sub('.+\smethod\s|.+function\s|\sat\s.+|\sof\s.+', '', repr(function_instance))
def _generate_md5(*args):
md5_hash = hashlib.md5()
[md5_hash.update(str(arg)) for arg in args]
return str(md5_hash.hexdigest())
def _is_cache_valid(cached_time, cache_timeout):
now = int(time.time())
diff = now - cached_time
return (cache_timeout * 3600) > diff
def cache_version_check():
if _find_cache_version():
cache_clear();
cache_clear_meta();
cache_clear_providers()
control.infoDialog(control.lang(32057).encode('utf-8'), sound=True, icon='INFO')
def _find_cache_version():
import os
versionFile = os.path.join(control.dataPath, 'cache.v')
try:
with open(versionFile, 'rb') as fh:
oldVersion = fh.read()
except:
oldVersion = '0'
try:
curVersion = control.addon('plugin.video.fanfilm').getAddonInfo('version')
if oldVersion != curVersion:
with open(versionFile, 'wb') as fh:
fh.write(curVersion)
return True
else:
return False
except:
return False
| [
"[email protected]"
]
| |
553b046fb1e362b9966b86843cf88a190f17cb17 | 544be854639d58b111c345a55597b0580e8106d8 | /example/admin.py | 77862791f8be12447236b147803759aedf1c3d46 | []
| no_license | arineto/django-multi-tenant-example | 152abde09e5362e0ecaab9402d03aa23228f4bf3 | ac74c1fa5ed3a4e3e70758c84398bcc0ec061b30 | refs/heads/master | 2021-01-20T20:44:45.315471 | 2016-07-11T13:48:09 | 2016-07-11T13:48:09 | 63,065,891 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 246 | py | from django.contrib import admin
from .models import Item
from multi_tenant.models import Theme
from multi_tenant.models import Tenant
# Register your models here.
admin.site.register(Item)
admin.site.register(Theme)
admin.site.register(Tenant)
| [
"[email protected]"
]
| |
a35078d691fddc504f227125b99e73b18a3a6ad3 | 8f61d6ae3a80eb6c6d45aab55d9e73df402446fe | /kate3/logger/admin.py | 39df6a9788b2b169f0b040a71d1f3ca4d4fa7602 | [
"MIT"
]
| permissive | katemsu/kate_website | 2047314598e215b0e8b3d3d71b21b4c70df36213 | 9e6912156fe7ce07a13f54009ff1823b3558784d | refs/heads/master | 2021-01-16T20:25:16.264407 | 2013-11-02T20:14:40 | 2013-11-02T20:14:40 | 14,073,589 | 0 | 1 | null | null | null | null | UTF-8 | Python | false | false | 245 | py | from django.contrib import admin
from logger.models import Entry
class EntryAdmin(admin.ModelAdmin):
list_display = ('__unicode__', 'action', 'created_at',)
list_filter = ('action', 'created_at',)
admin.site.register(Entry, EntryAdmin) | [
"[email protected]"
]
| |
04b264c1c72261ff9d515d75793a43e17fa06c3b | c223a3a88aad65fd48cef0d5cc40db5bf383033a | /mopidy/utils/process.py | 11dafa8af19df78bf86ed1a1de01a867e4361409 | [
"Apache-2.0"
]
| permissive | bok/mopidy | 4cfb19a7c55bad2f45f57c76ec9db550a32aaf27 | 71d791291b83728f38a4e401a0c68180f403b6a9 | refs/heads/master | 2020-12-25T00:49:54.009159 | 2011-01-22T13:33:47 | 2011-01-22T13:33:47 | 1,455,277 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 3,334 | py | import logging
import multiprocessing
import multiprocessing.dummy
from multiprocessing.reduction import reduce_connection
import pickle
import gobject
gobject.threads_init()
from mopidy import SettingsError
logger = logging.getLogger('mopidy.utils.process')
def pickle_connection(connection):
return pickle.dumps(reduce_connection(connection))
def unpickle_connection(pickled_connection):
# From http://stackoverflow.com/questions/1446004
(func, args) = pickle.loads(pickled_connection)
return func(*args)
class BaseProcess(multiprocessing.Process):
def __init__(self, core_queue):
super(BaseProcess, self).__init__()
self.core_queue = core_queue
def run(self):
logger.debug(u'%s: Starting process', self.name)
try:
self.run_inside_try()
except KeyboardInterrupt:
logger.info(u'Interrupted by user')
self.exit(0, u'Interrupted by user')
except SettingsError as e:
logger.error(e.message)
self.exit(1, u'Settings error')
except ImportError as e:
logger.error(e)
self.exit(2, u'Import error')
except Exception as e:
logger.exception(e)
self.exit(3, u'Unknown error')
def run_inside_try(self):
raise NotImplementedError
def destroy(self):
self.terminate()
def exit(self, status=0, reason=None):
self.core_queue.put({'to': 'core', 'command': 'exit',
'status': status, 'reason': reason})
self.destroy()
class BaseThread(multiprocessing.dummy.Process):
def __init__(self, core_queue):
super(BaseThread, self).__init__()
self.core_queue = core_queue
# No thread should block process from exiting
self.daemon = True
def run(self):
logger.debug(u'%s: Starting thread', self.name)
try:
self.run_inside_try()
except KeyboardInterrupt:
logger.info(u'Interrupted by user')
self.exit(0, u'Interrupted by user')
except SettingsError as e:
logger.error(e.message)
self.exit(1, u'Settings error')
except ImportError as e:
logger.error(e)
self.exit(2, u'Import error')
except Exception as e:
logger.exception(e)
self.exit(3, u'Unknown error')
def run_inside_try(self):
raise NotImplementedError
def destroy(self):
pass
def exit(self, status=0, reason=None):
self.core_queue.put({'to': 'core', 'command': 'exit',
'status': status, 'reason': reason})
self.destroy()
class GObjectEventThread(BaseThread):
"""
A GObject event loop which is shared by all Mopidy components that uses
libraries that need a GObject event loop, like GStreamer and D-Bus.
Should be started by Mopidy's core and used by
:mod:`mopidy.output.gstreamer`, :mod:`mopidy.frontend.mpris`, etc.
"""
def __init__(self, core_queue):
super(GObjectEventThread, self).__init__(core_queue)
self.name = u'GObjectEventThread'
self.loop = None
def run_inside_try(self):
self.loop = gobject.MainLoop().run()
def destroy(self):
self.loop.quit()
super(GObjectEventThread, self).destroy()
| [
"[email protected]"
]
| |
e75f3424076f2e5d15b48b4dd8a7d95a4cab2052 | 4d99350a527a88110b7bdc7d6766fc32cf66f211 | /OpenGLCffi/EGL/EXT/EXT/stream_consumer_egloutput.py | 92220cad6445f2adbe03b6b49917df998efe4191 | [
"MIT"
]
| permissive | cydenix/OpenGLCffi | e790ef67c2f6c9877badd5c38b7d58961c8739cd | c78f51ae5e6b655eb2ea98f072771cf69e2197f3 | refs/heads/master | 2021-01-11T07:31:10.591188 | 2017-04-17T11:04:55 | 2017-04-17T11:04:55 | 80,312,084 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 146 | py | from OpenGLCffi.EGL import params
@params(api='egl', prms=['dpy', 'stream', 'layer'])
def eglStreamConsumerOutputEXT(dpy, stream, layer):
pass
| [
"[email protected]"
]
| |
fdd1c5c9ed081efbfccfd7332b2d1dc9d7131567 | e1b8ae703c84f6a06dd3a3072cfa9afb7f9ebce7 | /settings/base.py | 1efd5df312d1c81f3a2de70992ff31811d366c08 | []
| no_license | Code-Institute-Submissions/renanclothestore | 95a2a161f0f0046e328cb639a88ddaf6afaceae5 | ea295d1643b06a1f5cdbdbafcdbe767d2c286648 | refs/heads/master | 2020-03-26T12:34:21.946183 | 2018-08-13T21:40:09 | 2018-08-13T21:40:09 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 3,640 | py | """
Django settings for carapp project.
Generated by 'django-admin startproject' using Django 1.11.13.
For more information on this file, see
https://docs.djangoproject.com/en/1.11/topics/settings/
For the full list of settings and their values, see
https://docs.djangoproject.com/en/1.11/ref/settings/
"""
import os
# Build paths inside the project like this: os.path.join(BASE_DIR, ...)
BASE_DIR = os.path.dirname(os.path.dirname(os.path.abspath(__file__)))
# Quick-start development settings - unsuitable for production
# See https://docs.djangoproject.com/en/1.11/howto/deployment/checklist/
# SECURITY WARNING: keep the secret key used in production secret!
SECRET_KEY = 'rwl@k11m(xzyh+&^6=#k36hkev0)7s14n1mi7j6$!a9d7o$dh%'
ALLOWED_HOSTS = ["renanclothestore.herokuapp.com", "127.0.0.1", "localhost"]
SITE_ID = 4
# Application definition
INSTALLED_APPS = [
'django.contrib.admin',
'django.contrib.auth',
'django.contrib.contenttypes',
'django_forms_bootstrap',
'paypal.standard.ipn',
'django.contrib.sessions',
'django.contrib.messages',
'django.contrib.staticfiles',
'django.contrib.sites',
'django.contrib.flatpages',
'carstore.apps.CarstoreConfig',
'accounts',
'paypal_store',
'products',
]
AUTHENTICATION_BACKENDS = (
'django.contrib.auth.backends.ModelBackend',
'accounts.backends.EmailAuth',
)
LOGIN_URL = '/login/'
MIDDLEWARE = [
'django.middleware.security.SecurityMiddleware',
'whitenoise.middleware.WhiteNoiseMiddleware',
'django.contrib.sessions.middleware.SessionMiddleware',
'django.middleware.common.CommonMiddleware',
'django.middleware.csrf.CsrfViewMiddleware',
'django.contrib.auth.middleware.AuthenticationMiddleware',
'django.contrib.messages.middleware.MessageMiddleware',
'django.middleware.clickjacking.XFrameOptionsMiddleware',
'django.middleware.security.SecurityMiddleware',
'django.contrib.flatpages.middleware.FlatpageFallbackMiddleware',
]
ROOT_URLCONF = 'carapp.urls'
AUTH_USER_MODEL = 'accounts.User'
TEMPLATES = [
{
'BACKEND': 'django.template.backends.django.DjangoTemplates',
'DIRS': [os.path.join(BASE_DIR, 'templates')],
'APP_DIRS': True,
'OPTIONS': {
'context_processors': [
'django.template.context_processors.debug',
'django.template.context_processors.request',
'django.contrib.auth.context_processors.auth',
'django.contrib.messages.context_processors.messages',
],
},
},
]
WSGI_APPLICATION = 'carapp.wsgi.application'
# Password validation
# https://docs.djangoproject.com/en/1.11/ref/settings/#auth-password-validators
AUTH_PASSWORD_VALIDATORS = [
{
'NAME': 'django.contrib.auth.password_validation.UserAttributeSimilarityValidator',
},
{
'NAME': 'django.contrib.auth.password_validation.MinimumLengthValidator',
},
{
'NAME': 'django.contrib.auth.password_validation.CommonPasswordValidator',
},
{
'NAME': 'django.contrib.auth.password_validation.NumericPasswordValidator',
},
]
# Internationalization
# https://docs.djangoproject.com/en/1.11/topics/i18n/
LANGUAGE_CODE = 'en-us'
TIME_ZONE = 'UTC'
USE_I18N = True
USE_L10N = True
USE_TZ = True
# Static files (CSS, JavaScript, Images)
# https://docs.djangoproject.com/en/1.11/howto/static-files/
STATIC_URL = '/static/'
STATIC_ROOT = os.path.join(BASE_DIR, 'staticfiles')
STATICFILES_DIRS = (
os.path.join(BASE_DIR, "static"),
)
MEDIA_ROOT = os.path.join(BASE_DIR, 'media')
MEDIA_URL = '/media/'
| [
"[email protected]"
]
| |
6588bb3ceecc40597d34c01833a1ac05336e8806 | f1961c86e6da14f35c21d7235f4fc8a89fabdcad | /DailyProgrammer/DP20130530B.py | 294d74df0d02a55cccef69c8006472e34b7d94ec | [
"MIT"
]
| permissive | DayGitH/Python-Challenges | d4930bdd85cd1a977d8f6192775ca956a375fcde | bc32f1332a92fcc2dfa6f5ea4d95f8a8d64c3edf | refs/heads/master | 2021-01-17T13:01:03.784523 | 2018-06-29T23:49:04 | 2018-06-29T23:49:04 | 58,497,683 | 2 | 0 | null | null | null | null | UTF-8 | Python | false | false | 1,729 | py | """
[05/30/13] Challenge #126 [Intermediate] Perfect P'th Powers
https://www.reddit.com/r/dailyprogrammer/comments/1fcpnx/053013_challenge_126_intermediate_perfect_pth/
# [](#IntermediateIcon) *(Intermediate)*: Perfect P'th Powers
An integer X is a "perfect square power" if there is some integer Y such that Y^2 = X. An integer X is a "perfect cube
power" if there is some integer Y such that Y^3 = X. We can extrapolate this where P is the power in question: an
integer X is a "perfect p'th power" if there is some integer Y such that Y^P = X.
Your goal is to find the highest value of P for a given X such that for some unknown integer Y, Y^P should equal X. You
can expect the given input integer X to be within the range of an unsigned 32-bit integer (0 to 4,294,967,295).
*Special thanks to the ACM collegiate programming challenges group for giving me the initial idea
[here](http://uva.onlinejudge.org/index.php?option=onlinejudge&page=show_problem&problem=1563).*
# Formal Inputs & Outputs
## Input Description
You will be given a single integer on a single line of text through standard console input. This integer will range
from 0 to 4,294,967,295 (the limits of a 32-bit unsigned integer).
## Output Description
You must print out to standard console the highest value P that fits the above problem description's requirements.
# Sample Inputs & Outputs
## Sample Input
*Note:* These are all considered separate input examples.
17
1073741824
25
## Sample Output
*Note:* The string following the result are notes to help with understanding the example; it is NOT expected of you to
write this out.
1 (17^1)
30 (2^30)
2 (5^2)
"""
def main():
pass
if __name__ == "__main__":
main()
| [
"[email protected]"
]
| |
76789d8e347ae1d7769b0f4e7cb04bbb19e6ff03 | 8478b0b08ebdd284740d23eb9347640b97a3d93f | /rttl.py | 74fd9d49e54edaa63a3155581b1515c1d0d3477e | [
"MIT"
]
| permissive | koxa22/Symbian-Python-Files | 4278fd4e25ba37cbd649c8ce07a3dd40f1d4213d | 59d5c5ad0ea5c7c4ac28b1e1ee6443ed2d354502 | refs/heads/master | 2022-07-17T05:36:09.351149 | 2018-04-01T17:18:12 | 2018-04-01T17:18:12 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 4,751 | py | # http://www.pymcu.com/PlayingSounds.html
# rttl.py version 1.0.1 (ported version to pyS60)
#
import e32
import pitchy
# RTTL variable to hold RTTL song
#RTTL = 'Bond:d=4,o=5,b=50:32p,16c#6,32d#6,32d#6,16d#6,8d#6,16c#6,16c#6,16c#6,16c#6,32e6,32e6,16e6,8e6,16d#6,16d#6,16d#6,16c#6,32d#6,32d#6,16d#6,8d#6,16c#6,16c#6,16c#6,16c#6,32e6,32e6,16e6,8e6,16d#6,16d6,16c#6,16c#7,c.7,16g#6,16f#6,g#.6'
# A Dictionary that contains the frequencies for each note
noteFreq = {'p':0,'a':3520,'a#':3729,'b':3951,'c':4186,'c#':4435,'d':4699,'d#':4978,'e':5274,'f':5588,'f#':5920,'g':6272,'g#':6645}
# This function will return the default Duration, Octave, and BeatsPerMinute from the RTTL data
def dob(dobData):
dobVals = dobData.split(',')
defaultDur = int(dobVals[0].split('=')[1])
defaultOct = int(dobVals[1].split('=')[1])
defaultBeats = int(dobVals[2].split('=')[1])
return defaultDur, defaultOct, defaultBeats
# This function will return the duration per note from the RTTL note data
def durSplit(noteData):
for d in noteData:
if ord(d) >= 97 and ord(d) <= 122:
durSplit = noteData.split(d)
if len(durSplit[0]) > 0:
return int(durSplit[0])
return 0
# This function will return just the note for dictionary look up from the RTTL note data
def noteSplit(noteData):
note = ''
hasDot = False
for d in noteData:
if ord(d) >= 97 and ord(d) <= 122:
note += d
if ord(d) == 35:
note += d
if ord(d) == 46:
hasDot = True
return note, hasDot
# This function will return per note octave changes if specified in the RTTL note data
def noteOctave(noteData):
if noteData[len(noteData)-1] >= 53 and noteData[len(noteData)-1] <= 56:
return 8 - int(noteData[len(noteData)-1])
else:
return None
def get_song_name(noktune):
rttlParts = noktune.split(':') # Split the RTTL song data into it's 3 core parts
return rttlParts[0] # Song Name
def get_duration(noktune):
rttlParts = noktune.split(':') # Split the RTTL song data into it's 3 core parts
dobVals = (rttlParts[1]).split(',')
defaultDur = int(dobVals[0].split('=')[1])
return defaultDur
def get_octave(noktune):
rttlParts = noktune.split(':') # Split the RTTL song data into it's 3 core parts
dobVals = (rttlParts[1]).split(',')
defaultOct = iint(dobVals[1].split('=')[1])
return defaultOct
def get_bpm(noktune):
rttlParts = noktune.split(':') # Split the RTTL song data into it's 3 core parts
dobVals = (rttlParts[1]).split(',')
defaultBeats = int(dobVals[2].split('=')[1])
return defaultBeats
def play_noktune(noktune,vol):
global noteFreq
tune=[]
rttlParts = noktune.split(':') # Split the RTTL song data into it's 3 core parts
defaultDur, defaultOct, defaultBeats = dob(rttlParts[1]) # Get default Duration, Octave, and Beats Per Minute
rttlNotes = rttlParts[2].split(',') # Split all the note data into a list
for note in rttlNotes: # Iterate through the note list
note = note.strip() # Strip out any possible pre or post spaces in the note data
durVal = durSplit(note) # Determine the per note duration if not default
if durVal == 0: # If there is no per note duration then use default for that note
durVal = defaultDur
duration = 60000 / defaultBeats / durVal * 3 # Calculate the proper duration based on Beats Per Minute and Duration Value
noteStr, hasDot = noteSplit(note) # Get note for dictionary lookup and check if the note has a dot
nFreq = noteFreq[noteStr] # Look up note frequency from the dictionary
if hasDot == True: # if it has a dot calculate the new duration
duration *= 3 / 2
octave = noteOctave(note) # Determine if there is per note octave change
if octave != None: # if so calculate the new octave frequency
nFreq /= octave
else: # else use the default octave frequency
nFreq /= defaultOct
if nFreq == 0: # if nFreq is 0 then it's a pause note so pause for the required time
e32.ao_sleep(float(duration / 1000.0))
else: # else play the note from the song
tune.append((nFreq,duration,vol))
pitchy.play_list(tune)
#play_noktune('Bond:d=4,o=5,b=50:32p,16c#6,32d#6,32d#6,16d#6,8d#6,16c#6,16c#6,16c#6,16c#6,32e6,32e6,16e6,8e6,16d#6,16d#6,16d#6,16c#6,32d#6,32d#6,16d#6,8d#6,16c#6,16c#6,16c#6,16c#6,32e6,32e6,16e6,8e6,16d#6,16d6,16c#6,16c#7,c.7,16g#6,16f#6,g#.6',3)
| [
"[email protected]"
]
| |
d6a101b306dc31549d207905b650b709fd7cd2d5 | 48eeb03decf7fa135368c5a5570186c31f5c85db | /dbmanage/mongodb/mongo.py | 5405ce9ec0320cd6e8e7827a7e0359020455ad07 | [
"Apache-2.0"
]
| permissive | zhonghua001/sbdb | d5bb86cb5e4075a8f4dbf0d37c3fa72dd3a05a00 | 40d02767ad58e850bb1df48f87e280424779d30f | refs/heads/master | 2021-01-24T03:52:58.751571 | 2018-02-26T03:51:12 | 2018-02-26T03:51:12 | 122,909,859 | 1 | 1 | Apache-2.0 | 2020-07-22T02:38:13 | 2018-02-26T03:50:57 | JavaScript | UTF-8 | Python | false | false | 8,096 | py | #coding=UTF-8
import MySQLdb,sys,string,time,datetime,uuid,pymongo,json
# from django.contrib.auth.models import User
from accounts.models import UserInfo
from dbmanage.myapp.models import Db_name,Db_account,Db_instance
from django.conf import settings
from django.core.serializers.json import DjangoJSONEncoder
from dbmanage.myapp.include.encrypt import prpcrypt
public_user = settings.PUBLIC_USER
export_limit = int(settings.EXPORT_LIMIT)
def get_mongodb_list(username,tag='tag',search=''):
dbtype='mongodb'
host_list = []
if len(search) ==0:
if (tag=='tag'):
a = UserInfo.objects.get(username=username)
#如果没有对应role='read'或者role='all'的account账号,则不显示在下拉菜单中
for row in a.db_name_set.all().order_by("dbtag"):
if row.db_account_set.all().filter(role__in=['read','all']):
if row.instance.all().filter(role__in=['read','all']).filter(db_type=dbtype):
host_list.append(row.dbtag)
elif (tag=='log'):
for row in Db_name.objects.values('dbtag').distinct().order_by("dbtag"):
host_list.append(row['dbtag'])
elif (tag=='exec'):
a = UserInfo.objects.get(username=username)
#如果没有对应role='write'或者role='all'的account账号,则不显示在下拉菜单中
for row in a.db_name_set.all().order_by("dbtag"):
if row.db_account_set.all().filter(role__in=['write','all']):
#排除只读实例
if row.instance.all().filter(role__in=['write','all']).filter(db_type=dbtype):
host_list.append(row.dbtag)
elif len(search) > 0:
if (tag=='tag'):
a = UserInfo.objects.get(username=username)
#如果没有对应role='read'或者role='all'的account账号,则不显示在下拉菜单中
for row in a.db_name_set.filter(dbname__contains=search).order_by("dbtag"):
if row.db_account_set.all().filter(role__in=['read','all']):
if row.instance.all().filter(role__in=['read','all']).filter(db_type=dbtype):
host_list.append(row.dbtag)
elif (tag=='log'):
for row in Db_name.objects.values('dbtag').distinct().order_by("dbtag"):
host_list.append(row['dbtag'])
elif (tag=='exec'):
a = UserInfo.objects.get(username=username)
#如果没有对应role='write'或者role='all'的account账号,则不显示在下拉菜单中
for row in a.db_name_set.filter(dbname__contains=search).order_by("dbtag"):
if row.db_account_set.all().filter(role__in=['write','all']):
#排除只读实例
if row.instance.all().filter(role__in=['write','all']).filter(db_type=dbtype):
host_list.append(row.dbtag)
return host_list
def get_mongo_coninfo(hosttag,useraccount):
a = Db_name.objects.filter(dbtag=hosttag)[0]
# a = Db_name.objects.get(dbtag=hosttag)
tar_dbname = a.dbname
try:
if a.instance.all().filter(role='read')[0]:
tar_host = a.instance.all().filter(role='read')[0].ip
tar_port = a.instance.all().filter(role='read')[0].port
# 如果没有设置或没有role=read,则选择第一个读到的all实例读取
except Exception, e:
tar_host = a.instance.filter(role='all')[0].ip
tar_port = a.instance.filter(role='all')[0].port
# tar_host = a.instance.all()[0].ip
# tar_port = a.instance.all()[0].port
for i in a.db_account_set.all():
if i.role != 'write' and i.role != 'admin':
# find the specified account for the user
if i.account.all().filter(username=useraccount):
tar_username = i.user
tar_passwd = i.passwd
break
# not find specified account for the user ,specified the public account to the user
if not vars().has_key('tar_username'):
for i in a.db_account_set.all():
if i.role != 'write' and i.role != 'admin':
# find the specified account for the user
if i.account.all().filter(username=public_user):
tar_username = i.user
tar_passwd = i.passwd
break
pc = prpcrypt()
return tar_host,tar_port,tar_username,pc.decrypt(tar_passwd),tar_dbname
def get_db_info(hosttag,useraccount):
tar_host, tar_port, tar_username, tar_passwd, tar_dbname = get_mongo_coninfo(hosttag, useraccount)
connect = pymongo.MongoClient(tar_host, int(tar_port))
db = connect[tar_dbname]
try:
db.authenticate(tar_username, tar_passwd)
except Exception, e:
pass
results = db.command({'dbstats': 1})
return results
def get_tb_info(hosttag,tbname,useraccount):
tar_host, tar_port, tar_username, tar_passwd, tar_dbname = get_mongo_coninfo(hosttag, useraccount)
connect = pymongo.MongoClient(tar_host, int(tar_port))
db = connect[tar_dbname]
try:
db.authenticate(tar_username, tar_passwd)
except Exception, e:
pass
results = db.command({'collstats': tbname})
return results
def get_tbindex_info(hosttag,tbname,useraccount):
tar_host, tar_port, tar_username, tar_passwd, tar_dbname = get_mongo_coninfo(hosttag, useraccount)
connect = pymongo.MongoClient(tar_host, int(tar_port))
db = connect[tar_dbname]
try:
db.authenticate(tar_username, tar_passwd)
except Exception, e:
pass
collection = db[tbname]
results = collection.index_information()
return results
def get_mongo_collection(hosttag,useraccount):
try:
tar_host, tar_port, tar_username, tar_passwd, tar_dbname = get_mongo_coninfo(hosttag, useraccount)
# 此处根据tablename获取其他信息
connect = pymongo.MongoClient(tar_host, int(tar_port))
db = connect[tar_dbname]
try:
db.authenticate(tar_username, tar_passwd)
except Exception, e:
pass
results = db.collection_names()
except Exception, e:
results,col = ([str(e)],''),['error']
return results
def get_mongo_data(b,hosttag,tbname,useraccount):
try:
num = int(UserInfo.objects.get(username=useraccount).user_profile.export_limit)
except Exception, e:
num = export_limit
try:
tar_host, tar_port, tar_username, tar_passwd, tar_dbname = get_mongo_coninfo(hosttag, useraccount)
#此处根据tablename获取其他信息
connect = pymongo.MongoClient(tar_host,int(tar_port))
db=connect[tar_dbname]
try:
db.authenticate(tar_username,tar_passwd)
except Exception,e:
pass
#tablename = tablename
collection = db[tbname]
#a = '''{'currTime': 1477371861706}'''
resulta = collection.find(eval(b),{"_id":0}).limit(num)
# resulta = collection.find().limit(20)
#results = db.collection_names() #获取所有tables名字
results = []
for recordjson in resulta:
#resultdict = {}
#for k,v in recordjson:
# resultdict[k] = v.encode('gb18030') #["ObjectId('580ee6e6f3de9821b20e57db') is not JSON serializable"]
results.append(json.dumps(recordjson,ensure_ascii=False,cls=DjangoJSONEncoder))
#results.append(recordjson)
except Exception, e:
results = (['error'],'')
return results
if __name__ == '__main__':
#x="insert /*sdfs*/into mysql.test ;truncate table mysql.db;rename mysql.db ;rename asdf;delete from `msql`.sa set ;delete ignore from t1 mysql.test values sdasdf;insert into ysql.user values()"
# print x
#x=" /*! */; select /**/ #asdfasdf; \nfrom mysql_replication_history;"
#x = " insert into item_infor (id,name) values(7,'t\\'e\"st');drop t * from test;"
print get_mongo_data('{"_class" : "com.mongodb.BasicDBObject"}','mongodb-easemob','message','root') | [
"123"
]
| 123 |
ee61071610eab8a0aed3f149a7ea92a7aed2bdd9 | 86c30560810feb9ba6a70fa7e5e1c46d039a4bb2 | /Ass/3/gameDemo/gd_fucks/alien.py | 9c9c004fefaba806131bd4d7a66ad1b63244ee67 | []
| no_license | zubairabid/ITWS2 | 3f0608835e79725f88fd1658b40a65f1a4a2ef47 | 4703aba0ada3b0a0a165155ba1177420a110decf | refs/heads/master | 2021-03-27T12:47:17.522346 | 2018-04-15T00:11:43 | 2018-04-15T00:11:43 | 116,122,583 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 619 | py | import globalset as gs
class Alien:
def __init__(self, x, y):
self.txt = gs.alien
self.x = x
self.y = y
self.life = gs.ALIEN_LIFE
def update(self, sess):
self.life -= gs.CLOCK_CYCLE
if(self.life <= 0):
self.remove(sess)
def remove(self, sess):
if(self in sess.alist):
# c = 0
# for t in sess.alist:
# if(self == t):
# sess.alist.remove(c)
# break
# c += 1
sess.alist.remove(self)
sess.screen[self.x][self.y] = None
| [
"[email protected]"
]
| |
a5a8886cd1b54a2f799c66975c4821c04172df57 | 6533c515c936d999c4bd8192d03d01c56172940c | /wlauto/commands/run.py | c5432efe274167162fcfca1b1422e0f8c06fc751 | [
"LicenseRef-scancode-unknown-license-reference",
"Apache-2.0"
]
| permissive | caogao/workload-automation | 330558c1a51b42a6e9e3291333f7c0653975c617 | 44a49db04da3224003f16bf82db0dc309422b710 | refs/heads/master | 2021-01-15T11:07:01.869199 | 2016-04-15T15:39:24 | 2016-04-15T15:39:24 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 6,053 | py | # Copyright 2014-2015 ARM Limited
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
import os
import sys
import shutil
import wlauto
from wlauto import Command, settings
from wlauto.exceptions import ConfigError
from wlauto.core.agenda import Agenda
from wlauto.core.execution import Executor
from wlauto.utils.log import add_log_file
class RunCommand(Command):
name = 'run'
description = 'Execute automated workloads on a remote device and process the resulting output.'
def initialize(self, context):
self.parser.add_argument('agenda', metavar='AGENDA',
help="""
Agenda for this workload automation run. This defines which
workloads will be executed, how many times, with which
tunables, etc. See example agendas in {} for an example of
how this file should be structured.
""".format(os.path.dirname(wlauto.__file__)))
self.parser.add_argument('-d', '--output-directory', metavar='DIR', default=None,
help="""
Specify a directory where the output will be generated. If
the directory already exists, the script will abort unless -f
option (see below) is used, in which case the contents of the
directory will be overwritten. If this option is not specified,
then {} will be used instead.
""".format(settings.output_directory))
self.parser.add_argument('-f', '--force', action='store_true',
help="""
Overwrite output directory if it exists. By default, the script
will abort in this situation to prevent accidental data loss.
""")
self.parser.add_argument('-i', '--id', action='append', dest='only_run_ids', metavar='ID',
help="""
Specify a workload spec ID from an agenda to run. If this is
specified, only that particular spec will be run, and other
workloads in the agenda will be ignored. This option may be
used to specify multiple IDs.
""")
self.parser.add_argument('--disable', action='append', dest='instruments_to_disable',
metavar='INSTRUMENT', help="""
Specify an instrument to disable from the command line. This
equivalent to adding "~{metavar}" to the instrumentation list in
the agenda. This can be used to temporarily disable a troublesome
instrument for a particular run without introducing permanent
change to the config (which one might then forget to revert).
This option may be specified multiple times.
""")
def execute(self, args): # NOQA
self.set_up_output_directory(args)
add_log_file(settings.log_file)
if os.path.isfile(args.agenda):
agenda = Agenda(args.agenda)
settings.agenda = args.agenda
shutil.copy(args.agenda, settings.meta_directory)
elif '.' in args.agenda or os.sep in args.agenda:
raise ConfigError('Agenda "{}" does not exist.'.format(args.agenda))
else:
self.logger.debug('{} is not a file; assuming workload name.'.format(args.agenda))
agenda = Agenda()
agenda.add_workload_entry(args.agenda)
if args.instruments_to_disable:
if 'instrumentation' not in agenda.config:
agenda.config['instrumentation'] = []
for itd in args.instruments_to_disable:
self.logger.debug('Updating agenda to disable {}'.format(itd))
agenda.config['instrumentation'].append('~{}'.format(itd))
basename = 'config_'
for file_number, path in enumerate(settings.get_config_paths(), 1):
file_ext = os.path.splitext(path)[1]
shutil.copy(path, os.path.join(settings.meta_directory,
basename + str(file_number) + file_ext))
executor = Executor()
executor.execute(agenda, selectors={'ids': args.only_run_ids})
def set_up_output_directory(self, args):
if args.output_directory:
settings.output_directory = args.output_directory
self.logger.debug('Using output directory: {}'.format(settings.output_directory))
if os.path.exists(settings.output_directory):
if args.force:
self.logger.info('Removing existing output directory.')
shutil.rmtree(settings.output_directory)
else:
self.logger.error('Output directory {} exists.'.format(settings.output_directory))
self.logger.error('Please specify another location, or use -f option to overwrite.\n')
sys.exit(1)
self.logger.info('Creating output directory.')
os.makedirs(settings.output_directory)
os.makedirs(settings.meta_directory)
| [
"[email protected]"
]
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.