repo_name
stringlengths
5
114
repo_url
stringlengths
24
133
snapshot_id
stringlengths
40
40
revision_id
stringlengths
40
40
directory_id
stringlengths
40
40
branch_name
stringclasses
209 values
visit_date
timestamp[ns]
revision_date
timestamp[ns]
committer_date
timestamp[ns]
github_id
int64
9.83k
683M
star_events_count
int64
0
22.6k
fork_events_count
int64
0
4.15k
gha_license_id
stringclasses
17 values
gha_created_at
timestamp[ns]
gha_updated_at
timestamp[ns]
gha_pushed_at
timestamp[ns]
gha_language
stringclasses
115 values
files
listlengths
1
13.2k
num_files
int64
1
13.2k
admiswalker/InPlaceChainedHashTable-IpCHashT-
https://github.com/admiswalker/InPlaceChainedHashTable-IpCHashT-
3a200ef89624a093526410634305a437e116b9a2
5c73b3667ee2fde14da8c380482d1d2967dea596
306503dfde68dad3caaa4f446b546e3a3aaeb05e
refs/heads/master
2021-07-24T14:09:04.359588
2020-06-13T10:49:48
2020-06-13T10:49:48
187,436,123
1
0
null
null
null
null
null
[ { "alpha_fraction": 0.5008738040924072, "alphanum_fraction": 0.5534743070602417, "avg_line_length": 27.589725494384766, "blob_id": "8c8378730b7268a82d32af3b76488df2d526f10d", "content_id": "05a271b4004527134263698925f9494add1dbd07", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "C++", "length_bytes": 40631, "license_type": "no_license", "max_line_length": 157, "num_lines": 1421, "path": "/test_IpCHashT.hpp", "repo_name": "admiswalker/InPlaceChainedHashTable-IpCHashT-", "src_encoding": "UTF-8", "text": "#include <random>\n#include \"./IpCHashT.hpp\"\n\n//-----------------------------------------------------------------------------------------------------------------------------------------------\n\nvoid printTable_all(const sstd::IpCHashT<uint64, uint64>& hashT){\n\tfor(auto itr=hashT.begin(); itr!=hashT.end(); ++itr){\n\t\titr.print_dbg();\n\t\tprintf(\"\\n\");\n\t}\n}/*\nuint64 countup_tableSize(const sstd::IpCHashT<uint64, uint64>& hashT){\n\tuint64 i=0ull;\n\tfor(auto itr=hashT.begin(); itr!=hashT.end(); ++itr){ i++; }\n\treturn i;\n}\n//*/\n//-----------------------------------------------------------------------------------------------------------------------------------------------\n// definition of the user hash function\n\nnamespace usrDef_in_IpCHash{ class hashFn; }\nclass usrDef_in_IpCHash::hashFn{\nprivate:\npublic:\n\thashFn(){}\n\t~hashFn(){}\n\tsize_t operator()(const uint64& key){ return key; }\n};\n\nTEST(sstd_IpCHashT, use_usr_defined_hash_func){\n\tsstd::IpCHashT<uint64, uint64, usrDef_in_IpCHash::hashFn> hashT(10);\n\t\n\thashT.insert_hard( 1, 10);\n\tauto itr = hashT.find(1); ASSERT_TRUE( itr!=hashT.end() ); ASSERT_TRUE( itr.first()==1 ); ASSERT_TRUE( itr.second()==10 );\n}\n\n//-----------------------------------------------------------------------------------------------------------------------------------------------\n// insert_soft\n\nTEST(sstd_IpCHashT, insert_soft_case01){\n\tsstd::IpCHashT<uint64, uint64> hashT(10);\n\t\n\thashT.use_tIdx_dbg = true; // enable debug option\n\thashT.tIdx_dbg = 1; // force to set key-val on the table index \"tIdx_dbg\".\n\t\n\t// insertion: case01\n\thashT.insert_soft( 1, 10);\n\t{\n\t\t// check: case01\n\t\tASSERT_TRUE( hashT._pT()[1].key == 1 );\n\t\tASSERT_TRUE( hashT._pT()[1].val ==10 );\n\t\tASSERT_TRUE( hashT._pT()[1].prev== 0 );\n\t\tASSERT_TRUE( hashT._pT()[1].next== 0 );\n\t}\n}\nTEST(sstd_IpCHashT, insert_soft_case03){\n\tsstd::IpCHashT<uint64, uint64> hashT(10);\n\t\n\thashT.use_tIdx_dbg = true; // enable debug option\n\thashT.tIdx_dbg = 1; // force to set key-val on the table index \"tIdx_dbg\".\n\t\n\t{\n\t\t// init\n\t\thashT._pT()[1].key = 1;\n\t\thashT._pT()[1].val =10;\n\t\thashT._pT()[1].prev= 0;\n\t\thashT._pT()[1].next= 1;\n\t\t\n\t\thashT._pT()[2].key = 2;\n\t\thashT._pT()[2].val =20;\n\t\thashT._pT()[2].prev= 1;\n\t\thashT._pT()[2].next= 0;\n\n\t\thashT._elems() = 2;\n\t}\n\t\n\t// insertion: case03\n\thashT.insert_soft( 3, 30);\n//\tprintTable_all(hashT);\n\t{\n\t\t// check: case03\n\t\tASSERT_TRUE( hashT._pT()[1].key == 1 );\n\t\tASSERT_TRUE( hashT._pT()[1].val ==10 );\n\t\tASSERT_TRUE( hashT._pT()[1].prev== 0 );\n\t\tASSERT_TRUE( hashT._pT()[1].next== 1 );\n\t\t\n\t\tASSERT_TRUE( hashT._pT()[2].key == 2 );\n\t\tASSERT_TRUE( hashT._pT()[2].val ==20 );\n\t\tASSERT_TRUE( hashT._pT()[2].prev== 1 );\n\t\tASSERT_TRUE( hashT._pT()[2].next== 1 );\n\t\t\n\t\tASSERT_TRUE( hashT._pT()[3].key == 3 );\n\t\tASSERT_TRUE( hashT._pT()[3].val ==30 );\n\t\tASSERT_TRUE( hashT._pT()[3].prev== 1 );\n\t\tASSERT_TRUE( hashT._pT()[3].next== 0 );\n\t}\n}\nTEST(sstd_IpCHashT, insert_soft_case06){\n\tsstd::IpCHashT<uint64, uint64> hashT(10);\n\t\n\t{\n\t\t// init\n\t\thashT._pT()[1].key = 1;\n\t\thashT._pT()[1].val =10;\n\t\thashT._pT()[1].prev= 0;\n\t\thashT._pT()[1].next= 1;\n\t\t\n\t\thashT._pT()[2].key = 2;\n\t\thashT._pT()[2].val =20;\n\t\thashT._pT()[2].prev= 1;\n\t\thashT._pT()[2].next= 0;\n\n\t\thashT._elems() = 2;\n\t}\n\t\n\thashT.use_tIdx_dbg = true; // enable debug option\n\thashT.tIdx_dbg = 2; // force to set key-val on the table index \"tIdx_dbg\".\n\t\n\t// insertion: case06\n\thashT.insert_soft( 3, 30);\n//\tprintTable_all(hashT);\n\t\n\t{\n\t\t// check: case06\n\t\tASSERT_TRUE( hashT._pT()[1].key == 1 );\n\t\tASSERT_TRUE( hashT._pT()[1].val ==10 );\n\t\tASSERT_TRUE( hashT._pT()[1].prev== 0 );\n\t\tASSERT_TRUE( hashT._pT()[1].next== 2 );\n\t\t\n\t\tASSERT_TRUE( hashT._pT()[2].key == 3 );\n\t\tASSERT_TRUE( hashT._pT()[2].val ==30 );\n\t\tASSERT_TRUE( hashT._pT()[2].prev== 0 );\n\t\tASSERT_TRUE( hashT._pT()[2].next== 0 );\n\t\t\n\t\tASSERT_TRUE( hashT._pT()[3].key == 2 );\n\t\tASSERT_TRUE( hashT._pT()[3].val ==20 );\n\t\tASSERT_TRUE( hashT._pT()[3].prev== 2 );\n\t\tASSERT_TRUE( hashT._pT()[3].next== 0 );\n\t}\n}\nTEST(sstd_IpCHashT, insert_soft_case11){\n\tsstd::IpCHashT<uint64, uint64> hashT(10);\n\t\n\t{\n\t\t// init\n\t\thashT._pT()[1].key = 1;\n\t\thashT._pT()[1].val =10;\n\t\thashT._pT()[1].prev= 0;\n\t\thashT._pT()[1].next= 1;\n\t\t\n\t\thashT._pT()[2].key = 2;\n\t\thashT._pT()[2].val =20;\n\t\thashT._pT()[2].prev= 1;\n\t\thashT._pT()[2].next= 1;\n\t\t\n\t\thashT._pT()[3].key = 3;\n\t\thashT._pT()[3].val =30;\n\t\thashT._pT()[3].prev= 1;\n\t\thashT._pT()[3].next= 0;\n\n\t\thashT._elems() = 3;\n\t}\n\t\n\thashT.use_tIdx_dbg = true; // enable debug option\n\thashT.tIdx_dbg = 2; // force to set key-val on the table index \"tIdx_dbg\".\n\t\n\t// insertion: case11\n\thashT.insert_soft( 4, 40);\n//\tprintTable_all(hashT);\n\t{\n\t\t// check: case11\n\t\tASSERT_TRUE( hashT._pT()[1].key == 1 );\n\t\tASSERT_TRUE( hashT._pT()[1].val ==10 );\n\t\tASSERT_TRUE( hashT._pT()[1].prev== 0 );\n\t\tASSERT_TRUE( hashT._pT()[1].next== 2 );\n\t\t\n\t\tASSERT_TRUE( hashT._pT()[2].key == 4 );\n\t\tASSERT_TRUE( hashT._pT()[2].val ==40 );\n\t\tASSERT_TRUE( hashT._pT()[2].prev== 0 );\n\t\tASSERT_TRUE( hashT._pT()[2].next== 0 );\n\t\t\n\t\tASSERT_TRUE( hashT._pT()[3].key == 3 );\n\t\tASSERT_TRUE( hashT._pT()[3].val ==30 );\n\t\tASSERT_TRUE( hashT._pT()[3].prev== 2 );\n\t\tASSERT_TRUE( hashT._pT()[3].next== 1 );\n\t\t\n\t\tASSERT_TRUE( hashT._pT()[4].key == 2 );\n\t\tASSERT_TRUE( hashT._pT()[4].val ==20 );\n\t\tASSERT_TRUE( hashT._pT()[4].prev== 1 );\n\t\tASSERT_TRUE( hashT._pT()[4].next== 0 );\n\t}\n}\n\n//-----------------------------------------------------------------------------------------------------------------------------------------------\n// insert_hard\n\nTEST(sstd_IpCHashT, insert_hard_case01){\n\tsstd::IpCHashT<uint64, uint64> hashT(10);\n\t\n\thashT.use_tIdx_dbg = true; // enable debug option\n\thashT.tIdx_dbg = 1; // force to set key-val on the table index \"tIdx_dbg\".\n\t\n\t// insertion: case01\n\thashT.insert_hard( 1, 10);\n\t{\n\t\t// check: case01\n\t\tASSERT_TRUE( hashT._pT()[1].key == 1 );\n\t\tASSERT_TRUE( hashT._pT()[1].val ==10 );\n\t\tASSERT_TRUE( hashT._pT()[1].prev== 0 );\n\t\tASSERT_TRUE( hashT._pT()[1].next== 0 );\n\t}\n}\nTEST(sstd_IpCHashT, insert_hard_case02){\n\tsstd::IpCHashT<uint64, uint64> hashT(10);\n\t\n\thashT.use_tIdx_dbg = true; // enable debug option\n\thashT.tIdx_dbg = 1; // force to set key-val on the table index \"tIdx_dbg\".\n\t\n\t{\n\t\t// init\n\t\thashT._pT()[1].key = 1;\n\t\thashT._pT()[1].val =10;\n\t\thashT._pT()[1].prev= 0;\n\t\thashT._pT()[1].next= 2;\n\t\t\n\t\t// idx==2 is empty\n\t\t\n\t\thashT._pT()[3].key = 3;\n\t\thashT._pT()[3].val =30;\n\t\thashT._pT()[3].prev= 2;\n\t\thashT._pT()[3].next= 0;\n\t\t\n\t\thashT._elems() = 2;\n\t}\n\n\t// insertion: case02\n\thashT.insert_hard( 2, 20);\n//\tprintTable_all(hashT);\n\t{\n\t\t// check: case02\n\t\tASSERT_TRUE( hashT._pT()[1].key == 1 );\n\t\tASSERT_TRUE( hashT._pT()[1].val ==10 );\n\t\tASSERT_TRUE( hashT._pT()[1].prev== 0 );\n\t\tASSERT_TRUE( hashT._pT()[1].next== 1 );\n\t\t\n\t\tASSERT_TRUE( hashT._pT()[2].key == 2 );\n\t\tASSERT_TRUE( hashT._pT()[2].val ==20 );\n\t\tASSERT_TRUE( hashT._pT()[2].prev== 1 );\n\t\tASSERT_TRUE( hashT._pT()[2].next== 1 );\n\t\t\n\t\tASSERT_TRUE( hashT._pT()[3].key == 3 );\n\t\tASSERT_TRUE( hashT._pT()[3].val ==30 );\n\t\tASSERT_TRUE( hashT._pT()[3].prev== 1 );\n\t\tASSERT_TRUE( hashT._pT()[3].next== 0 );\n\t}\n}\nTEST(sstd_IpCHashT, insert_hard_case02_02){\n\tsstd::IpCHashT<uint64, uint64> hashT(10);\n\t\n\thashT.use_tIdx_dbg = true; // enable debug option\n\thashT.tIdx_dbg = 1; // force to set key-val on the table index \"tIdx_dbg\".\n\t\n\t{\n\t\t// init\n\t\thashT._pT()[1].key = 1;\n\t\thashT._pT()[1].val =10;\n\t\thashT._pT()[1].prev= 0;\n\t\thashT._pT()[1].next= 1;\n\t\t\n\t\thashT._pT()[2].key = 2;\n\t\thashT._pT()[2].val =20;\n\t\thashT._pT()[2].prev= 1;\n\t\thashT._pT()[2].next= 2;\n\t\t\n\t\t// idx==3 is empty\n\t\t\n\t\thashT._pT()[4].key = 4;\n\t\thashT._pT()[4].val =40;\n\t\thashT._pT()[4].prev= 2;\n\t\thashT._pT()[4].next= 0;\n\n\t\thashT._elems() = 3;\n\t}\n\t\n\t// insertion: case02\n\thashT.insert_hard( 5, 50);\n//\tprintTable_all(hashT);\n\t{\n\t\t// check: case02\n\t\tASSERT_TRUE( hashT._pT()[1].key == 1 );\n\t\tASSERT_TRUE( hashT._pT()[1].val ==10 );\n\t\tASSERT_TRUE( hashT._pT()[1].prev== 0 );\n\t\tASSERT_TRUE( hashT._pT()[1].next== 1 );\n\t\t\n\t\tASSERT_TRUE( hashT._pT()[2].key == 2 );\n\t\tASSERT_TRUE( hashT._pT()[2].val ==20 );\n\t\tASSERT_TRUE( hashT._pT()[2].prev== 1 );\n\t\tASSERT_TRUE( hashT._pT()[2].next== 1 );\n\t\t\n\t\tASSERT_TRUE( hashT._pT()[3].key == 5 );\n\t\tASSERT_TRUE( hashT._pT()[3].val ==50 );\n\t\tASSERT_TRUE( hashT._pT()[3].prev== 1 );\n\t\tASSERT_TRUE( hashT._pT()[3].next== 1 );\n\t\t\n\t\tASSERT_TRUE( hashT._pT()[4].key == 4 );\n\t\tASSERT_TRUE( hashT._pT()[4].val ==40 );\n\t\tASSERT_TRUE( hashT._pT()[4].prev== 1 );\n\t\tASSERT_TRUE( hashT._pT()[4].next== 0 );\n\t}\n}\nTEST(sstd_IpCHashT, insert_hard_case03){\n\tsstd::IpCHashT<uint64, uint64> hashT(10);\n\t\n\thashT.use_tIdx_dbg = true; // enable debug option\n\thashT.tIdx_dbg = 1; // force to set key-val on the table index \"tIdx_dbg\".\n\t\n\t{\n\t\t// init\n\t\thashT._pT()[1].key = 1;\n\t\thashT._pT()[1].val =10;\n\t\thashT._pT()[1].prev= 0;\n\t\thashT._pT()[1].next= 1;\n\t\t\n\t\thashT._pT()[2].key = 2;\n\t\thashT._pT()[2].val =20;\n\t\thashT._pT()[2].prev= 1;\n\t\thashT._pT()[2].next= 0;\n\n\t\thashT._elems() = 2;\n\t}\n\t\n\t// insertion: case03\n\thashT.insert_hard( 3, 30);\n//\tprintTable_all(hashT);\n\t{\n\t\t// check: case03\n\t\tASSERT_TRUE( hashT._pT()[1].key == 1 );\n\t\tASSERT_TRUE( hashT._pT()[1].val ==10 );\n\t\tASSERT_TRUE( hashT._pT()[1].prev== 0 );\n\t\tASSERT_TRUE( hashT._pT()[1].next== 1 );\n\t\t\n\t\tASSERT_TRUE( hashT._pT()[2].key == 2 );\n\t\tASSERT_TRUE( hashT._pT()[2].val ==20 );\n\t\tASSERT_TRUE( hashT._pT()[2].prev== 1 );\n\t\tASSERT_TRUE( hashT._pT()[2].next== 1 );\n\t\t\n\t\tASSERT_TRUE( hashT._pT()[3].key == 3 );\n\t\tASSERT_TRUE( hashT._pT()[3].val ==30 );\n\t\tASSERT_TRUE( hashT._pT()[3].prev== 1 );\n\t\tASSERT_TRUE( hashT._pT()[3].next== 0 );\n\t}\n}\nTEST(sstd_IpCHashT, insert_hard_case04){\n\tsstd::IpCHashT<uint64, uint64> hashT(10);\n\t\n\t{\n\t\t// init\n\t\thashT._pT()[1].key = 1;\n\t\thashT._pT()[1].val =10;\n\t\thashT._pT()[1].prev= 0;\n\t\thashT._pT()[1].next= 2;\n\t\n\t\t// idx==2 is empty\n\t\t\n\t\thashT._pT()[3].key = 3;\n\t\thashT._pT()[3].val =30;\n\t\thashT._pT()[3].prev= 2;\n\t\thashT._pT()[3].next= 0;\n\n\t\thashT._elems() = 2;\n\t}\n\t\n\thashT.use_tIdx_dbg = true; // enable debug option\n\thashT.tIdx_dbg = 3; // force to set key-val on the table index \"tIdx_dbg\".\n\t\n\t// insertion: case04\n\thashT.insert_hard( 4, 40);\n//\tprintTable_all(hashT);\n\t{\n\t\t// check: case04\n\t\tASSERT_TRUE( hashT._pT()[1].key == 1 );\n\t\tASSERT_TRUE( hashT._pT()[1].val ==10 );\n\t\tASSERT_TRUE( hashT._pT()[1].prev== 0 );\n\t\tASSERT_TRUE( hashT._pT()[1].next== 1 );\n\t\t\n\t\tASSERT_TRUE( hashT._pT()[2].key == 3 );\n\t\tASSERT_TRUE( hashT._pT()[2].val ==30 );\n\t\tASSERT_TRUE( hashT._pT()[2].prev== 1 );\n\t\tASSERT_TRUE( hashT._pT()[2].next== 0 );\n\t\t\n\t\tASSERT_TRUE( hashT._pT()[3].key == 4 );\n\t\tASSERT_TRUE( hashT._pT()[3].val ==40 );\n\t\tASSERT_TRUE( hashT._pT()[3].prev== 0 );\n\t\tASSERT_TRUE( hashT._pT()[3].next== 0 );\n\t}\n}\nTEST(sstd_IpCHashT, insert_hard_case05){\n\tsstd::IpCHashT<uint64, uint64> hashT(10);\n\t\n\t{\n\t\t// init\n\t\thashT._pT()[1].key = 1;\n\t\thashT._pT()[1].val =10;\n\t\thashT._pT()[1].prev= 0;\n\t\thashT._pT()[1].next= 2;\n\t\t\n\t\thashT._pT()[3].key = 3;\n\t\thashT._pT()[3].val =30;\n\t\thashT._pT()[3].prev= 2;\n\t\thashT._pT()[3].next= 1;\n\t\t\n\t\thashT._pT()[4].key = 4;\n\t\thashT._pT()[4].val =40;\n\t\thashT._pT()[4].prev= 1;\n\t\thashT._pT()[4].next= 0;\n\n\t\thashT._elems() = 3;\n\t}\n\t\n\thashT.use_tIdx_dbg = true; // enable debug option\n\thashT.tIdx_dbg = 4; // force to set key-val on the table index \"tIdx_dbg\".\n\t\n\t// insertion: case05\n\thashT.insert_hard( 5, 50);\n//\tprintTable_all(hashT);\n\t{\n\t\t// check: case05\n\t\tASSERT_TRUE( hashT._pT()[1].key == 1 );\n\t\tASSERT_TRUE( hashT._pT()[1].val ==10 );\n\t\tASSERT_TRUE( hashT._pT()[1].prev== 0 );\n\t\tASSERT_TRUE( hashT._pT()[1].next== 1 );\n\t\t\n\t\tASSERT_TRUE( hashT._pT()[2].key == 4 );\n\t\tASSERT_TRUE( hashT._pT()[2].val ==40 );\n\t\tASSERT_TRUE( hashT._pT()[2].prev== 1 );\n\t\tASSERT_TRUE( hashT._pT()[2].next== 1 );\n\t\t\n\t\tASSERT_TRUE( hashT._pT()[3].key == 3 );\n\t\tASSERT_TRUE( hashT._pT()[3].val ==30 );\n\t\tASSERT_TRUE( hashT._pT()[3].prev== 1 );\n\t\tASSERT_TRUE( hashT._pT()[3].next== 0 );\n\t\t\n\t\tASSERT_TRUE( hashT._pT()[4].key == 5 );\n\t\tASSERT_TRUE( hashT._pT()[4].val ==50 );\n\t\tASSERT_TRUE( hashT._pT()[4].prev== 0 );\n\t\tASSERT_TRUE( hashT._pT()[4].next== 0 );\n\t}\n}\nTEST(sstd_IpCHashT, insert_hard_case06){\n\tsstd::IpCHashT<uint64, uint64> hashT(10);\n\t\n\t{\n\t\t// init\n\t\thashT._pT()[1].key = 1;\n\t\thashT._pT()[1].val =10;\n\t\thashT._pT()[1].prev= 0;\n\t\thashT._pT()[1].next= 1;\n\t\t\n\t\thashT._pT()[2].key = 2;\n\t\thashT._pT()[2].val =20;\n\t\thashT._pT()[2].prev= 1;\n\t\thashT._pT()[2].next= 0;\n\n\t\thashT._elems() = 2;\n\t}\n\t\n\thashT.use_tIdx_dbg = true; // enable debug option\n\thashT.tIdx_dbg = 2; // force to set key-val on the table index \"tIdx_dbg\".\n\t\n\t// insertion: case06\n\thashT.insert_hard( 3, 30);\n//\tprintTable_all(hashT);\n\t\n\t{\n\t\t// check: case06\n\t\tASSERT_TRUE( hashT._pT()[1].key == 1 );\n\t\tASSERT_TRUE( hashT._pT()[1].val ==10 );\n\t\tASSERT_TRUE( hashT._pT()[1].prev== 0 );\n\t\tASSERT_TRUE( hashT._pT()[1].next== 2 );\n\t\t\n\t\tASSERT_TRUE( hashT._pT()[2].key == 3 );\n\t\tASSERT_TRUE( hashT._pT()[2].val ==30 );\n\t\tASSERT_TRUE( hashT._pT()[2].prev== 0 );\n\t\tASSERT_TRUE( hashT._pT()[2].next== 0 );\n\t\t\n\t\tASSERT_TRUE( hashT._pT()[3].key == 2 );\n\t\tASSERT_TRUE( hashT._pT()[3].val ==20 );\n\t\tASSERT_TRUE( hashT._pT()[3].prev== 2 );\n\t\tASSERT_TRUE( hashT._pT()[3].next== 0 );\n\t}\n}\nTEST(sstd_IpCHashT, insert_hard_case07){\n\tsstd::IpCHashT<uint64, uint64> hashT(10);\n\t\n\t{\n\t\t// init\n\t\thashT._pT()[1].key = 1;\n\t\thashT._pT()[1].val =10;\n\t\thashT._pT()[1].prev= 0;\n\t\thashT._pT()[1].next= 2;\n\t\t\n\t\t// idx==2 is empty\n\t\t\n\t\thashT._pT()[3].key = 3;\n\t\thashT._pT()[3].val =30;\n\t\thashT._pT()[3].prev= 2;\n\t\thashT._pT()[3].next= 1;\n\t\t\n\t\thashT._pT()[4].key = 4;\n\t\thashT._pT()[4].val =40;\n\t\thashT._pT()[4].prev= 1;\n\t\thashT._pT()[4].next= 0;\n\n\t\thashT._elems() = 3;\n\t}\n\t\n\thashT.use_tIdx_dbg = true; // enable debug option\n\thashT.tIdx_dbg = 3; // force to set key-val on the table index \"tIdx_dbg\".\n\t\n\t// insertion: case07\n\thashT.insert_hard( 5, 50);\n//\tprintTable_all(hashT);\n\t{\n\t\t// check: case04\n\t\tASSERT_TRUE( hashT._pT()[1].key == 1 );\n\t\tASSERT_TRUE( hashT._pT()[1].val ==10 );\n\t\tASSERT_TRUE( hashT._pT()[1].prev== 0 );\n\t\tASSERT_TRUE( hashT._pT()[1].next== 1 );\n\t\t\n\t\tASSERT_TRUE( hashT._pT()[2].key == 3 );\n\t\tASSERT_TRUE( hashT._pT()[2].val ==30 );\n\t\tASSERT_TRUE( hashT._pT()[2].prev== 1 );\n\t\tASSERT_TRUE( hashT._pT()[2].next== 2 );\n\t\t\n\t\tASSERT_TRUE( hashT._pT()[3].key == 5 );\n\t\tASSERT_TRUE( hashT._pT()[3].val ==50 );\n\t\tASSERT_TRUE( hashT._pT()[3].prev== 0 );\n\t\tASSERT_TRUE( hashT._pT()[3].next== 0 );\n\t\t\n\t\tASSERT_TRUE( hashT._pT()[4].key == 4 );\n\t\tASSERT_TRUE( hashT._pT()[4].val ==40 );\n\t\tASSERT_TRUE( hashT._pT()[4].prev== 2 );\n\t\tASSERT_TRUE( hashT._pT()[4].next== 0 );\n\t}\n}\nTEST(sstd_IpCHashT, insert_hard_case08){\n\tsstd::IpCHashT<uint64, uint64> hashT(10);\n\t\n\t{\n\t\t// init\n\t\thashT._pT()[1].key = 1;\n\t\thashT._pT()[1].val =10;\n\t\thashT._pT()[1].prev= 0;\n\t\thashT._pT()[1].next= 2;\n\t\t\n\t\t// idx==2 is empty\n\t\t\n\t\thashT._pT()[3].key = 3;\n\t\thashT._pT()[3].val =30;\n\t\thashT._pT()[3].prev= 2;\n\t\thashT._pT()[3].next= 1;\n\t\t\n\t\thashT._pT()[4].key = 4;\n\t\thashT._pT()[4].val =40;\n\t\thashT._pT()[4].prev= 1;\n\t\thashT._pT()[4].next= 1;\n\t\t\n\t\thashT._pT()[5].key = 5;\n\t\thashT._pT()[5].val =50;\n\t\thashT._pT()[5].prev= 1;\n\t\thashT._pT()[5].next= 0;\n\n\t\thashT._elems() = 4;\n\t}\n\t\n\thashT.use_tIdx_dbg = true; // enable debug option\n\thashT.tIdx_dbg = 4; // force to set key-val on the table index \"tIdx_dbg\".\n\t\n\t// insertion: case08\n\thashT.insert_hard( 6, 60);\n//\tprintTable_all(hashT);\n\t{\n\t\t// check: case08\n\t\tASSERT_TRUE( hashT._pT()[1].key == 1 );\n\t\tASSERT_TRUE( hashT._pT()[1].val ==10 );\n\t\tASSERT_TRUE( hashT._pT()[1].prev== 0 );\n\t\tASSERT_TRUE( hashT._pT()[1].next== 1 );\n\t\t\n\t\tASSERT_TRUE( hashT._pT()[2].key == 4 );\n\t\tASSERT_TRUE( hashT._pT()[2].val ==40 );\n\t\tASSERT_TRUE( hashT._pT()[2].prev== 1 );\n\t\tASSERT_TRUE( hashT._pT()[2].next== 1 );\n\t\t\n\t\tASSERT_TRUE( hashT._pT()[3].key == 3 );\n\t\tASSERT_TRUE( hashT._pT()[3].val ==30 );\n\t\tASSERT_TRUE( hashT._pT()[3].prev== 1 );\n\t\tASSERT_TRUE( hashT._pT()[3].next== 2 );\n\t\t\n\t\tASSERT_TRUE( hashT._pT()[4].key == 6 );\n\t\tASSERT_TRUE( hashT._pT()[4].val ==60 );\n\t\tASSERT_TRUE( hashT._pT()[4].prev== 0 );\n\t\tASSERT_TRUE( hashT._pT()[4].next== 0 );\n\t\t\n\t\tASSERT_TRUE( hashT._pT()[5].key == 5 );\n\t\tASSERT_TRUE( hashT._pT()[5].val ==50 );\n\t\tASSERT_TRUE( hashT._pT()[5].prev== 2 );\n\t\tASSERT_TRUE( hashT._pT()[5].next== 0 );\n\t}\n}\nTEST(sstd_IpCHashT, insert_hard_case09){\n\tsstd::IpCHashT<uint64, uint64> hashT(10);\n\t\n\t{\n\t\t// init\n\t\thashT._pT()[1].key = 1;\n\t\thashT._pT()[1].val =10;\n\t\thashT._pT()[1].prev= 0;\n\t\thashT._pT()[1].next= 1;\n\t\t\n\t\thashT._pT()[2].key = 2;\n\t\thashT._pT()[2].val =20;\n\t\thashT._pT()[2].prev= 1;\n\t\thashT._pT()[2].next= 2;\n\t\t\n\t\t// idx==3 is empty\n\t\t\n\t\thashT._pT()[4].key = 4;\n\t\thashT._pT()[4].val =40;\n\t\thashT._pT()[4].prev= 2;\n\t\thashT._pT()[4].next= 0;\n\n\t\thashT._elems() = 3;\n\t}\n\t\n\thashT.use_tIdx_dbg = true; // enable debug option\n\thashT.tIdx_dbg = 2; // force to set key-val on the table index \"tIdx_dbg\".\n\t\n\t// insertion: case09\n\thashT.insert_hard( 6, 60);\n//\tprintTable_all(hashT);\n\t{\n\t\t// check: case09\n\t\tASSERT_TRUE( hashT._pT()[1].key == 1 );\n\t\tASSERT_TRUE( hashT._pT()[1].val ==10 );\n\t\tASSERT_TRUE( hashT._pT()[1].prev== 0 );\n\t\tASSERT_TRUE( hashT._pT()[1].next== 2 );\n\t\t\n\t\tASSERT_TRUE( hashT._pT()[2].key == 6 );\n\t\tASSERT_TRUE( hashT._pT()[2].val ==60 );\n\t\tASSERT_TRUE( hashT._pT()[2].prev== 0 );\n\t\tASSERT_TRUE( hashT._pT()[2].next== 0 );\n\t\t\n\t\tASSERT_TRUE( hashT._pT()[3].key == 2 );\n\t\tASSERT_TRUE( hashT._pT()[3].val ==20 );\n\t\tASSERT_TRUE( hashT._pT()[3].prev== 2 );\n\t\tASSERT_TRUE( hashT._pT()[3].next== 1 );\n\t\t\n\t\tASSERT_TRUE( hashT._pT()[4].key == 4 );\n\t\tASSERT_TRUE( hashT._pT()[4].val ==40 );\n\t\tASSERT_TRUE( hashT._pT()[4].prev== 1 );\n\t\tASSERT_TRUE( hashT._pT()[4].next== 0 );\n\t}\n}\nTEST(sstd_IpCHashT, insert_hard_case10){\n\tsstd::IpCHashT<uint64, uint64> hashT(10);\n\t\n\t{\n\t\t// init\n\t\thashT._pT()[1].key = 1;\n\t\thashT._pT()[1].val =10;\n\t\thashT._pT()[1].prev= 0;\n\t\thashT._pT()[1].next= 1;\n\t\t\n\t\thashT._pT()[2].key = 2;\n\t\thashT._pT()[2].val =20;\n\t\thashT._pT()[2].prev= 1;\n\t\thashT._pT()[2].next= 1;\n\t\t\n\t\thashT._pT()[3].key = 3;\n\t\thashT._pT()[3].val =30;\n\t\thashT._pT()[3].prev= 1;\n\t\thashT._pT()[3].next= 2;\n\n\t\t// idx==4 is empty\n\t\t\n\t\thashT._pT()[5].key = 5;\n\t\thashT._pT()[5].val =50;\n\t\thashT._pT()[5].prev= 2;\n\t\thashT._pT()[5].next= 0;\n\n\t\thashT._elems() = 4;\n\t}\n\t\n\thashT.use_tIdx_dbg = true; // enable debug option\n\thashT.tIdx_dbg = 2; // force to set key-val on the table index \"tIdx_dbg\".\n\t\n\t// insertion: case10\n\thashT.insert_hard( 6, 60);\n//\tprintTable_all(hashT);\n\t{\n\t\t// check: case10\n\t\tASSERT_TRUE( hashT._pT()[1].key == 1 );\n\t\tASSERT_TRUE( hashT._pT()[1].val ==10 );\n\t\tASSERT_TRUE( hashT._pT()[1].prev== 0 );\n\t\tASSERT_TRUE( hashT._pT()[1].next== 2 );\n\t\t\n\t\tASSERT_TRUE( hashT._pT()[2].key == 6 );\n\t\tASSERT_TRUE( hashT._pT()[2].val ==60 );\n\t\tASSERT_TRUE( hashT._pT()[2].prev== 0 );\n\t\tASSERT_TRUE( hashT._pT()[2].next== 0 );\n\t\t\n\t\tASSERT_TRUE( hashT._pT()[3].key == 3 );\n\t\tASSERT_TRUE( hashT._pT()[3].val ==30 );\n\t\tASSERT_TRUE( hashT._pT()[3].prev== 2 );\n\t\tASSERT_TRUE( hashT._pT()[3].next== 1 );\n\t\t\n\t\tASSERT_TRUE( hashT._pT()[4].key == 2 );\n\t\tASSERT_TRUE( hashT._pT()[4].val ==20 );\n\t\tASSERT_TRUE( hashT._pT()[4].prev== 1 );\n\t\tASSERT_TRUE( hashT._pT()[4].next== 1 );\n\t\t\n\t\tASSERT_TRUE( hashT._pT()[5].key == 5 );\n\t\tASSERT_TRUE( hashT._pT()[5].val ==50 );\n\t\tASSERT_TRUE( hashT._pT()[5].prev== 1 );\n\t\tASSERT_TRUE( hashT._pT()[5].next== 0 );\n\t}\n}\nTEST(sstd_IpCHashT, insert_hard_case10_02){\n\tsstd::IpCHashT<uint64, uint64> hashT(10);\n\t\n\t{\n\t\t// init\n\t\thashT._pT()[1].key = 1;\n\t\thashT._pT()[1].val =10;\n\t\thashT._pT()[1].prev= 0;\n\t\thashT._pT()[1].next= 1;\n\t\t\n\t\thashT._pT()[2].key = 2;\n\t\thashT._pT()[2].val =20;\n\t\thashT._pT()[2].prev= 1;\n\t\thashT._pT()[2].next= 1;\n\t\t\n\t\thashT._pT()[3].key = 3;\n\t\thashT._pT()[3].val =30;\n\t\thashT._pT()[3].prev= 1;\n\t\thashT._pT()[3].next= 1;\n\t\t\n\t\thashT._pT()[4].key = 4;\n\t\thashT._pT()[4].val =40;\n\t\thashT._pT()[4].prev= 1;\n\t\thashT._pT()[4].next= 2;\n\n\t\t// idx==5 is empty\n\t\t\n\t\thashT._pT()[6].key = 6;\n\t\thashT._pT()[6].val =60;\n\t\thashT._pT()[6].prev= 2;\n\t\thashT._pT()[6].next= 0;\n\n\t\thashT._elems() = 5;\n\t}\n\t\n\thashT.use_tIdx_dbg = true; // enable debug option\n\thashT.tIdx_dbg = 2; // force to set key-val on the table index \"tIdx_dbg\".\n\t\n\t// insertion: case10\n\thashT.insert_hard( 7, 70);\n//\tprintTable_all(hashT);\n\t{\n\t\t// check: case10\n\t\tASSERT_TRUE( hashT._pT()[1].key == 1 );\n\t\tASSERT_TRUE( hashT._pT()[1].val ==10 );\n\t\tASSERT_TRUE( hashT._pT()[1].prev== 0 );\n\t\tASSERT_TRUE( hashT._pT()[1].next== 2 );\n\t\t\n\t\tASSERT_TRUE( hashT._pT()[2].key == 7 );\n\t\tASSERT_TRUE( hashT._pT()[2].val ==70 );\n\t\tASSERT_TRUE( hashT._pT()[2].prev== 0 );\n\t\tASSERT_TRUE( hashT._pT()[2].next== 0 );\n\t\t\n\t\tASSERT_TRUE( hashT._pT()[3].key == 3 );\n\t\tASSERT_TRUE( hashT._pT()[3].val ==30 );\n\t\tASSERT_TRUE( hashT._pT()[3].prev== 2 );\n\t\tASSERT_TRUE( hashT._pT()[3].next== 1 );\n\t\t\n\t\tASSERT_TRUE( hashT._pT()[4].key == 4 );\n\t\tASSERT_TRUE( hashT._pT()[4].val ==40 );\n\t\tASSERT_TRUE( hashT._pT()[4].prev== 1 );\n\t\tASSERT_TRUE( hashT._pT()[4].next== 1 );\n\t\t\n\t\tASSERT_TRUE( hashT._pT()[5].key == 2 );\n\t\tASSERT_TRUE( hashT._pT()[5].val ==20 );\n\t\tASSERT_TRUE( hashT._pT()[5].prev== 1 );\n\t\tASSERT_TRUE( hashT._pT()[5].next== 1 );\n\t\t\n\t\tASSERT_TRUE( hashT._pT()[6].key == 6 );\n\t\tASSERT_TRUE( hashT._pT()[6].val ==60 );\n\t\tASSERT_TRUE( hashT._pT()[6].prev== 1 );\n\t\tASSERT_TRUE( hashT._pT()[6].next== 0 );\n\t}\n}\nTEST(sstd_IpCHashT, insert_hard_case11){\n\tsstd::IpCHashT<uint64, uint64> hashT(10);\n\t\n\t{\n\t\t// init\n\t\thashT._pT()[1].key = 1;\n\t\thashT._pT()[1].val =10;\n\t\thashT._pT()[1].prev= 0;\n\t\thashT._pT()[1].next= 1;\n\t\t\n\t\thashT._pT()[2].key = 2;\n\t\thashT._pT()[2].val =20;\n\t\thashT._pT()[2].prev= 1;\n\t\thashT._pT()[2].next= 1;\n\t\t\n\t\thashT._pT()[3].key = 3;\n\t\thashT._pT()[3].val =30;\n\t\thashT._pT()[3].prev= 1;\n\t\thashT._pT()[3].next= 0;\n\n\t\thashT._elems() = 3;\n\t}\n\t\n\thashT.use_tIdx_dbg = true; // enable debug option\n\thashT.tIdx_dbg = 2; // force to set key-val on the table index \"tIdx_dbg\".\n\t\n\t// insertion: case11\n\thashT.insert_hard( 4, 40);\n//\tprintTable_all(hashT);\n\t{\n\t\t// check: case11\n\t\tASSERT_TRUE( hashT._pT()[1].key == 1 );\n\t\tASSERT_TRUE( hashT._pT()[1].val ==10 );\n\t\tASSERT_TRUE( hashT._pT()[1].prev== 0 );\n\t\tASSERT_TRUE( hashT._pT()[1].next== 2 );\n\t\t\n\t\tASSERT_TRUE( hashT._pT()[2].key == 4 );\n\t\tASSERT_TRUE( hashT._pT()[2].val ==40 );\n\t\tASSERT_TRUE( hashT._pT()[2].prev== 0 );\n\t\tASSERT_TRUE( hashT._pT()[2].next== 0 );\n\t\t\n\t\tASSERT_TRUE( hashT._pT()[3].key == 3 );\n\t\tASSERT_TRUE( hashT._pT()[3].val ==30 );\n\t\tASSERT_TRUE( hashT._pT()[3].prev== 2 );\n\t\tASSERT_TRUE( hashT._pT()[3].next== 1 );\n\t\t\n\t\tASSERT_TRUE( hashT._pT()[4].key == 2 );\n\t\tASSERT_TRUE( hashT._pT()[4].val ==20 );\n\t\tASSERT_TRUE( hashT._pT()[4].prev== 1 );\n\t\tASSERT_TRUE( hashT._pT()[4].next== 0 );\n\t}\n}\n//-----------------------------------------------------------------------------------------------------------------------------------------------\nTEST(sstd_IpCHashT, insert_soft_out_of_case_00){\n\tsstd::IpCHashT<uint64, uint64> hashT(10);\n\t\n\t{\n\t\t// init\n\t\thashT._pT()[1].key = 1;\n\t\thashT._pT()[1].val =10;\n\t\thashT._pT()[1].prev= 0;\n\t\thashT._pT()[1].next= 0;\n\t\t\n\t\thashT._pT()[2].key = 2;\n\t\thashT._pT()[2].val =20;\n\t\thashT._pT()[2].prev= 0;\n\t\thashT._pT()[2].next= 0;\n\n\t\thashT._elems() = 2;\n\t}\n\t\n\thashT.use_tIdx_dbg = true; // enable debug option\n\thashT.tIdx_dbg = 1; // force to set key-val on the table index \"tIdx_dbg\".\n\t\n\t// insertion: case03\n\thashT.insert_soft( 3, 30);\n//\tprintTable_all(hashT);\n\t{\n\t\t// check: case03\n\t\tASSERT_TRUE( hashT._pT()[1].key == 1 );\n\t\tASSERT_TRUE( hashT._pT()[1].val ==10 );\n\t\tASSERT_TRUE( hashT._pT()[1].prev== 0 );\n\t\tASSERT_TRUE( hashT._pT()[1].next== 2 );\n\t\t\n\t\tASSERT_TRUE( hashT._pT()[2].key == 2 );\n\t\tASSERT_TRUE( hashT._pT()[2].val ==20 );\n\t\tASSERT_TRUE( hashT._pT()[2].prev== 0 );\n\t\tASSERT_TRUE( hashT._pT()[2].next== 0 );\n\t\t\n\t\tASSERT_TRUE( hashT._pT()[3].key == 3 );\n\t\tASSERT_TRUE( hashT._pT()[3].val ==30 );\n\t\tASSERT_TRUE( hashT._pT()[3].prev== 2 );\n\t\tASSERT_TRUE( hashT._pT()[3].next== 0 );\n\t}\n}\n//---\nTEST(sstd_IpCHashT, insert_hard_out_of_case_00){\n\tsstd::IpCHashT<uint64, uint64> hashT(10);\n\t\n\t{\n\t\t// init\n\t\thashT._pT()[1].key = 1;\n\t\thashT._pT()[1].val =10;\n\t\thashT._pT()[1].prev= 0;\n\t\thashT._pT()[1].next= 0;\n\t\t\n\t\thashT._pT()[2].key = 2;\n\t\thashT._pT()[2].val =20;\n\t\thashT._pT()[2].prev= 0;\n\t\thashT._pT()[2].next= 0;\n\n\t\thashT._elems() = 2;\n\t}\n\t\n\thashT.use_tIdx_dbg = true; // enable debug option\n\thashT.tIdx_dbg = 1; // force to set key-val on the table index \"tIdx_dbg\".\n\n\t// insertion: case03\n\thashT.insert_hard( 3, 30);\n//\tprintTable_all(hashT);\n\t{\n\t\t// check: case03\n\t\tASSERT_TRUE( hashT._pT()[1].key == 1 );\n\t\tASSERT_TRUE( hashT._pT()[1].val ==10 );\n\t\tASSERT_TRUE( hashT._pT()[1].prev== 0 );\n\t\tASSERT_TRUE( hashT._pT()[1].next== 2 );\n\t\t\n\t\tASSERT_TRUE( hashT._pT()[2].key == 2 );\n\t\tASSERT_TRUE( hashT._pT()[2].val ==20 );\n\t\tASSERT_TRUE( hashT._pT()[2].prev== 0 );\n\t\tASSERT_TRUE( hashT._pT()[2].next== 0 );\n\t\t\n\t\tASSERT_TRUE( hashT._pT()[3].key == 3 );\n\t\tASSERT_TRUE( hashT._pT()[3].val ==30 );\n\t\tASSERT_TRUE( hashT._pT()[3].prev== 2 );\n\t\tASSERT_TRUE( hashT._pT()[3].next== 0 );\n\t}\n}\n//-----------------------------------------------------------------------------------------------------------------------------------------------\nTEST(sstd_IpCHashT, find){\n\tsstd::IpCHashT<uint64, uint64> hashT(10);\n\n\thashT.insert_hard(1, 10);\n\tauto itr = hashT.find(1);\n\tASSERT_TRUE( itr.first() == 1 );\n\tASSERT_TRUE( itr.second() == 10 );\n}\nTEST(sstd_IpCHashT, find_02){\n\tsstd::IpCHashT<uint64, uint64> hashT(10);\n\t\n\thashT.insert_hard(1, 10);\n\thashT.insert_hard(1, 20);\n\tauto itr = hashT.find(1);\n\tASSERT_TRUE( itr != hashT.end() );\n\tASSERT_TRUE( itr.first() == 1 );\n\tASSERT_TRUE( itr.second() == 20 );\n\t\n\tauto itr2 = hashT.find(2);\n\tASSERT_FALSE( itr2 != hashT.end() );\n}\n//-----------------------------------------------------------------------------------------------------------------------------------------------\nTEST(sstd_IpCHashT, erase_case01){\n\tsstd::IpCHashT<uint64, uint64> hashT(10);\n\t\n\thashT.use_tIdx_dbg = true; // enable debug option\n\thashT.tIdx_dbg = 1; // force to set key-val on the table index \"tIdx_dbg\".\n\t\n\thashT.insert_hard(1, 10);\n\thashT.erase(1);\n//\tprintTable_all(hashT);\n\t{\n\t\t// check: case01\n\t\tASSERT_TRUE( hashT._pT()[1].prev==hashT._maxShift() );\n\t\tASSERT_TRUE( hashT._pT()[1].next== 0 );\n\t}\n}\nTEST(sstd_IpCHashT, erase_case02){\n\tsstd::IpCHashT<uint64, uint64> hashT(10);\n\t\n\t{\n\t\t// init\n\t\thashT._pT()[1].key = 1;\n\t\thashT._pT()[1].val =10;\n\t\thashT._pT()[1].prev= 0;\n\t\thashT._pT()[1].next= 1;\n\t\t\n\t\thashT._pT()[2].key = 2;\n\t\thashT._pT()[2].val =20;\n\t\thashT._pT()[2].prev= 1;\n\t\thashT._pT()[2].next= 0;\n\t}\n\t\n\thashT.use_tIdx_dbg = true; // enable debug option\n\thashT.tIdx_dbg = 1; // force to set key-val on the table index \"tIdx_dbg\".\n\t\n\t// insertion: case02\n\thashT.erase(2);\n//\tprintTable_all(hashT);\n\t{\n\t\t// check: case02\n\t\tASSERT_TRUE( hashT._pT()[1].key == 1 );\n\t\tASSERT_TRUE( hashT._pT()[1].val ==10 );\n\t\tASSERT_TRUE( hashT._pT()[1].prev== 0 );\n\t\tASSERT_TRUE( hashT._pT()[1].next== 0 );\n\t\t\n\t\tASSERT_TRUE( hashT._pT()[2].prev==hashT._maxShift() );\n\t\tASSERT_TRUE( hashT._pT()[2].next== 0 );\n\t}\n}\nTEST(sstd_IpCHashT, erase_case03){\n\tsstd::IpCHashT<uint64, uint64> hashT(10);\n\t\n\t{\n\t\t// init\n\t\thashT._pT()[1].key = 1;\n\t\thashT._pT()[1].val =10;\n\t\thashT._pT()[1].prev= 0;\n\t\thashT._pT()[1].next= 1;\n\t\t\n\t\thashT._pT()[2].key = 2;\n\t\thashT._pT()[2].val =20;\n\t\thashT._pT()[2].prev= 1;\n\t\thashT._pT()[2].next= 1;\n\t\t\n\t\thashT._pT()[3].key = 3;\n\t\thashT._pT()[3].val =30;\n\t\thashT._pT()[3].prev= 1;\n\t\thashT._pT()[3].next= 0;\n\t}\n\t\n\thashT.use_tIdx_dbg = true; // enable debug option\n\thashT.tIdx_dbg = 1; // force to set key-val on the table index \"tIdx_dbg\".\n\t\n\t// insertion: case11\n\thashT.erase(1);\n//\tprintTable_all(hashT);\n\t{\n\t\t// check: case11\n\t\tASSERT_TRUE( hashT._pT()[1].key == 3 );\n\t\tASSERT_TRUE( hashT._pT()[1].val ==30 );\n\t\tASSERT_TRUE( hashT._pT()[1].prev== 0 );\n\t\tASSERT_TRUE( hashT._pT()[1].next== 1 );\n\t\t\n\t\tASSERT_TRUE( hashT._pT()[2].key == 2 );\n\t\tASSERT_TRUE( hashT._pT()[2].val ==20 );\n\t\tASSERT_TRUE( hashT._pT()[2].prev== 1 );\n\t\tASSERT_TRUE( hashT._pT()[2].next== 0 );\n\t\t\n\t\tASSERT_TRUE( hashT._pT()[3].prev==hashT._maxShift() );\n\t\tASSERT_TRUE( hashT._pT()[3].next== 0 );\n\t}\n}\nTEST(sstd_IpCHashT, erase_case04){\n\tsstd::IpCHashT<uint64, uint64> hashT(10);\n\t\n\t{\n\t\t// init\n\t\thashT._pT()[1].key = 1;\n\t\thashT._pT()[1].val =10;\n\t\thashT._pT()[1].prev= 0;\n\t\thashT._pT()[1].next= 1;\n\t\t\n\t\thashT._pT()[2].key = 2;\n\t\thashT._pT()[2].val =20;\n\t\thashT._pT()[2].prev= 1;\n\t\thashT._pT()[2].next= 1;\n\t\t\n\t\thashT._pT()[3].key = 3;\n\t\thashT._pT()[3].val =30;\n\t\thashT._pT()[3].prev= 1;\n\t\thashT._pT()[3].next= 0;\n\t}\n\t\n\thashT.use_tIdx_dbg = true; // enable debug option\n\thashT.tIdx_dbg = 1; // force to set key-val on the table index \"tIdx_dbg\".\n\t\n\t// insertion: case11\n\thashT.erase(2);\n//\tprintTable_all(hashT);\n\t{\n\t\t// check: case11\n\t\tASSERT_TRUE( hashT._pT()[1].key == 1 );\n\t\tASSERT_TRUE( hashT._pT()[1].val ==10 );\n\t\tASSERT_TRUE( hashT._pT()[1].prev== 0 );\n\t\tASSERT_TRUE( hashT._pT()[1].next== 1 );\n\t\t\n\t\tASSERT_TRUE( hashT._pT()[2].key == 3 );\n\t\tASSERT_TRUE( hashT._pT()[2].val ==30 );\n\t\tASSERT_TRUE( hashT._pT()[2].prev== 1 );\n\t\tASSERT_TRUE( hashT._pT()[2].next== 0 );\n\t\t\n\t\tASSERT_TRUE( hashT._pT()[3].prev==hashT._maxShift() );\n\t\tASSERT_TRUE( hashT._pT()[3].next== 0 );\n\t}\n}\n//-----------------------------------------------------------------------------------------------------------------------------------------------\nbool stressTest_oneCycle_soft(const uint64 seed, const uint64 limitSize){\n\tsstd::IpCHashT<uint64, uint64> hashT(0);\n\thashT.use_pSize_dbg = true; // enable debug option\n\thashT.pSize_dbg = 0; // force to set key-val on the table index \"tIdx_dbg\".\n\t\n//\tuint64 limitSize = 42949672; // 42949672 elements == 1 G Byte or more\n\t// when bool is 1 bytes, one elements of \"struct sstd_CHashT::element\" is 25 bytes (== 1 + key8 + val8 + p8).\n\t// thus, 1 G Bytes table would have 42949672.96 (== 1*1024*1024*1024 Bytes / 25) elements in rough estimate.\n\t// in attention, we needs to consider elements on sligle linked list and new table when rehashing.\n\t\n\t// insert_hard\n\t{\n\t\tstd::mt19937_64 mt(seed); // pseudo random number generator\n\t\t\n\t\tfor(uint64 i=0; i<limitSize; i++){\n\t\t\tuint64 r = mt();\n\t\t\thashT.insert_hard(r, r);\n\t\t}\n\t\tif(!( hashT.size()==limitSize )){ sstd::pdbg(\"ERROR: stressTest_oneCycle_soft\\n\"); sstd::printn( limitSize ); sstd::printn( hashT.size() ); return false; }\n\t}\n//\tprintTable_all(hashT);\n\t\n\t// find\n\t{\n\t\tstd::mt19937_64 mt(seed); // pseudo random number generator\n\t\t\n\t\tfor(uint64 i=0; i<limitSize; i++){\n\t\t\tuint64 r = mt();\n\t\t\tauto itr = hashT.find(r);\n\t\t\tif(!( itr!=hashT.end() )){ sstd::pdbg(\"ERROR: stressTest_oneCycle_soft\\n\"); sstd::printn(r); return false; }\n\t\t\tif(!( itr.first() ==r )){ sstd::pdbg(\"ERROR: stressTest_oneCycle_soft\\n\"); sstd::printn(r); return false; }\n\t\t\tif(!( itr.second()==r )){ sstd::pdbg(\"ERROR: stressTest_oneCycle_soft\\n\"); sstd::printn(r); return false; }\n\t\t}\n\t}\n\t\n\t// erase\n\t{\n\t\tstd::mt19937_64 mt(seed); // pseudo random number generator\n\t\t\n\t\tfor(uint64 i=0; i<limitSize; i++){\n\t\t\tuint64 r = mt();\n\t\t\thashT.erase(r);\n\t\t}\n\t\tif(!( hashT.size()==0 )){ sstd::pdbg(\"ERROR: stressTest_oneCycle_soft\\n\"); return false; }\n\t}\n\treturn true;\n}\nTEST(sstd_IpCHashT, multiple_rehashing_padding0_soft){\n\t// test of multiple rehasing while rehasing (this is a case of 0 passings for initial table size)\n\tuint64 seed = 4163762552;\n\tuint64 limitSize = 100;\n\tbool ret = stressTest_oneCycle_soft(seed, limitSize);\n\tif(ret==false){ sstd::printn(seed); }\n\tASSERT_TRUE( ret );\n}\nTEST(sstd_IpCHashT, stressTest_soft){\n\t// this is a stress test of chained hash table\n\tstd::random_device rnd;\n\tfor(uint limitSize=0; limitSize<10000; limitSize+=500){\n//\t\tfor(uint i=0; i<1000; i++){\n\t\tfor(uint i=0; i<10; i++){\n\t\t\tuint64 seed = rnd();\n\t\t\tbool ret = stressTest_oneCycle_soft(seed, limitSize);\n\t\t\tif(ret==false){ sstd::printn(seed); }\n\t\t\tASSERT_TRUE( ret );\n\t\t}\n\t}\n}\n//---\nbool stressTest_oneCycle_hard(const uint64 seed, const uint64 limitSize){\n\tsstd::IpCHashT<uint64, uint64> hashT(0);\n\thashT.use_pSize_dbg = true; // enable debug option\n\thashT.pSize_dbg = 0; // force to set key-val on the table index \"tIdx_dbg\".\n\t\n//\tuint64 limitSize = 42949672; // 42949672 elements == 1 G Byte or more\n\t// when bool is 1 bytes, one elements of \"struct sstd_CHashT::element\" is 25 bytes (== 1 + key8 + val8 + p8).\n\t// thus, 1 G Bytes table would have 42949672.96 (== 1*1024*1024*1024 Bytes / 25) elements in rough estimate.\n\t// in attention, we needs to consider elements on sligle linked list and new table when rehashing.\n\t\n\t// insert_hard\n\t{\n\t\tstd::mt19937_64 mt(seed); // pseudo random number generator\n\t\t\n\t\tfor(uint64 i=0; i<limitSize; i++){\n\t\t\tuint64 r = mt();\n\t\t\thashT.insert_hard(r, r);\n\t\t}\n\t\tif(!( hashT.size()==limitSize )){ sstd::pdbg(\"ERROR: stressTest_oneCycle_hard\\n\"); sstd::printn( limitSize ); sstd::printn( hashT.size() ); return false; }\n\t}\n//\tprintTable_all(hashT);\n\t\n\t// find\n\t{\n\t\tstd::mt19937_64 mt(seed); // pseudo random number generator\n\t\t\n\t\tfor(uint64 i=0; i<limitSize; i++){\n\t\t\tuint64 r = mt();\n\t\t\tauto itr = hashT.find(r);\n\t\t\tif(!( itr!=hashT.end() )){ sstd::pdbg(\"ERROR: stressTest_oneCycle_hard\\n\"); sstd::printn(r); return false; }\n\t\t\tif(!( itr.first() ==r )){ sstd::pdbg(\"ERROR: stressTest_oneCycle_hard\\n\"); sstd::printn(r); return false; }\n\t\t\tif(!( itr.second()==r )){ sstd::pdbg(\"ERROR: stressTest_oneCycle_hard\\n\"); sstd::printn(r); return false; }\n\t\t}\n\t}\n\t\n\t// erase\n\t{\n\t\tstd::mt19937_64 mt(seed); // pseudo random number generator\n\t\t\n\t\tfor(uint64 i=0; i<limitSize; i++){\n\t\t\tuint64 r = mt();\n\t\t\thashT.erase(r);\n\t\t}\n\t\tif(!( hashT.size()==0 )){ sstd::pdbg(\"ERROR: stressTest_oneCycle_hard\\n\"); return false; }\n\t}\n\treturn true;\n}\nTEST(sstd_IpCHashT, multiple_rehasing_padding0_hard){\n\t// test of multiple rehasing while rehasing (this is a case of 0 passings for initial table size)\n\tuint64 seed = 4163762552;\n\tuint64 limitSize = 100;\n\tbool ret = stressTest_oneCycle_hard(seed, limitSize);\n\tif(ret==false){ sstd::printn(seed); }\n\tASSERT_TRUE( ret );\n}\nTEST(sstd_IpCHashT, stressTest_hard){\n\t// this is a stress test of chained hash table\n\tstd::random_device rnd;\n\tfor(uint limitSize=0; limitSize<10000; limitSize+=500){\n//\t\tfor(uint i=0; i<1000; i++){\n\t\tfor(uint i=0; i<10; i++){\n\t\t\tuint64 seed = rnd();\n\t\t\tbool ret = stressTest_oneCycle_hard(seed, limitSize);\n\t\t\tif(ret==false){ sstd::printn(seed); }\n\t\t\tASSERT_TRUE( ret );\n\t\t}\n\t}\n}\n//-----------------------------------------------------------------------------------------------------------------------------------------------\n// insert for STL compatibility\n\nTEST(sstd_IpCHashT, STL_c_insert_01){\n\t// STL insert (1). Ref: https://cpprefjp.github.io/reference/unordered_map/unordered_map/insert.html\n\t\n\tsstd::IpCHashT<uint64, uint64> hashT;\n\tauto itr_TF1 = hashT.insert(std::pair<uint64,uint64>(1,1));\n\tASSERT_TRUE( itr_TF1.second==true );\n\tauto itr_TF2 = hashT.insert(std::pair<uint64,uint64>(1,1));\n\tASSERT_TRUE( itr_TF2.second==false );\n}\n\n//-----------------------------------------------------------------------------------------------------------------------------------------------\nbool stressTest_oneCycle__failSafe_of_rehashing__hard(const uint64 seed, const uint64 limitSize){\n\tconst uint64 testFOR_size = 553;\n\t\n\tsstd::IpCHashT<uint64, uint64> hashT(0);\n\thashT.use_pSize_dbg = true; // enable debug option\n\thashT.pSize_dbg = 0; // force to set key-val on the table index \"tIdx_dbg\".\n\t\n\t// insert_hard\n\t{\n\t\tstd::mt19937_64 mt(seed); // pseudo random number generator\n\t\t \n\t\tfor(uint64 i=0; i<limitSize; i++){\n\t\t\tuint64 r = mt();\n\t\t\thashT.insert_hard(r, r);\n\t\t\t\n\t\t\tif(i==testFOR_size){ hashT.use_testFOR_dbg = true; }\n\t\t}\n\t\t\n\t\tif(!( hashT.size()==limitSize )){ sstd::pdbg(\"ERROR: stressTest_oneCycle_hard\\n\"); sstd::printn( limitSize ); sstd::printn( hashT.size() ); return false; }\n\t}\n//\tprintTable_all(hashT);\n\t\n\t// find\n\t{\n\t\tstd::mt19937_64 mt(seed); // pseudo random number generator\n\t\t\n\t\tfor(uint64 i=0; i<limitSize; i++){\n\t\t\tuint64 r = mt();\n\t\t\tauto itr = hashT.find(r);\n\t\t\tif(!( itr!=hashT.end() )){ sstd::pdbg(\"ERROR: stressTest_oneCycle_hard\\n\"); sstd::printn(r); return false; }\n\t\t\tif(!( itr.first() ==r )){ sstd::pdbg(\"ERROR: stressTest_oneCycle_hard\\n\"); sstd::printn(r); return false; }\n\t\t\tif(!( itr.second()==r )){ sstd::pdbg(\"ERROR: stressTest_oneCycle_hard\\n\"); sstd::printn(r); return false; }\n\t\t}\n\t}\n\t\n\t// erase\n\t{\n\t\tstd::mt19937_64 mt(seed); // pseudo random number generator\n\t\t\n\t\tfor(uint64 i=0; i<limitSize; i++){\n\t\t\tuint64 r = mt();\n\t\t\thashT.erase(r);\n\t\t}\n\t\tif(!( hashT.size()==0 )){ sstd::pdbg(\"ERROR: stressTest_oneCycle_hard\\n\"); return false; }\n\t}\n\treturn true;\n}\nTEST(sstd_IpCHashT, stressTest__failSafe_of_rehashing__hard){\n\t// this is a stress test of chained hash table\n\tstd::random_device rnd;\n\tfor(uint limitSize=0; limitSize<10000; limitSize+=500){\n\t\tfor(uint i=0; i<1000; i++){\n//\t\tfor(uint i=0; i<10; i++){\n\t\t\tuint64 seed = rnd();\n\t\t\tbool ret = stressTest_oneCycle__failSafe_of_rehashing__hard(seed, limitSize);\n\t\t\tif(ret==false){ sstd::printn(seed); }\n\t\t\tASSERT_TRUE( ret );\n\t\t}\n\t}\n}\n//-----------------------------------------------------------------------------------------------------------------------------------------------\n\nuint64 size2interval(const uint64 size){\n\tuint8 order = (uint8)std::log10((double)size);\n\tuint64 interval = (uint64)std::pow(10, order);\n\tif (order<1){ return interval;\n\t}else if(order<2){ return interval/2;\n\t}else if(order<3){ return interval/4;\n\t}else if(order<5){ return interval/10;\n\t}else { return interval/10;\n\t}\n}\n\ntemplate<typename T_hashTable>\nuint64 get_sizeByItr(T_hashTable& hashT){\n\tuint64 count=0;\n\tfor(auto itr=hashT.begin(); itr!=hashT.end(); ++itr){\n\t\tcount++;\n\t}\n\treturn count;\n}\ntemplate<typename T_hashTable>\nvoid dump(T_hashTable& hashT){\n\tfor(auto itr=hashT.begin(); itr!=hashT.end(); ++itr){\n\t\tprintf(\"index: %5lu, first: %20lu, second: %20lu, prev: %u, next: %u\\n\", itr.index(), itr.first(), itr.second(), itr.prev(), itr.next());\n\t}\n}\n\ntemplate<typename T_hashTable>\nbool test_bench_find(T_hashTable& hashT, const uint64 limitSize, uint64 seed1, uint64 seed2){\n\tstd::mt19937_64 rand (seed1); // pseudo random number generator\n\tstd::mt19937_64 rand_toFind(seed2); // pseudo random number generator\n\t\n\tuint64 interval = 1;\n\tstd::vector<uint64> vecR(limitSize); vecR.clear();\n\t\n\tuint64 numFound=0ull, numNotFound=0ull;\n\tuint64 count=0ull; // dor dbg\n\tfor(;;){\n\t\t// insert\n\t\tfor(uint i=0; i<interval; i++){\n\t\t\tuint64 r = rand();\n\t\t\tvecR <<= r;\n\t\t\thashT[r] = r;\n\t\t}\n\t\t\n\t\tstd::vector<uint64> vecR_toFind(vecR.size());\n\t\t{\n\t\t\tstd::uniform_int_distribution<uint64> range(0, vecR.size()-1); // make randome number between [0, vecR.size()-1].\n\t\t\tfor(uint i=0; i<interval; i++){\n\t\t\t\tvecR_toFind[i] = vecR[range(rand_toFind)];\n\t\t\t}\n\t\t}\n\t\t\n\t\t// find (all elements are found)\n\t\tfor(uint i=0; i<interval; i++){\n\t\t\tauto itr = hashT.find( vecR_toFind[i] );\n\t\t\tcount++;\n\t\t\tif(itr!=hashT.end()){ numFound++; }else{ numNotFound++; goto Exit; }\n\t\t}\n\t\t\n\t\tinterval = size2interval(hashT.size());\n\t\tif(hashT.size()+interval>limitSize){ break; }\n\t}\n Exit:\n\tif(numNotFound!=0ull){\n\t\tprintf(\"ERROR: %lu / %lu = %lf\\n\", numFound, numFound+numNotFound, (double)numFound/(double)(numFound+numNotFound));\n\t\tprintf(\" sizeByItr: %lu\\n\", get_sizeByItr(hashT));\n\t\tprintf(\" tSize : %lu\\n\", hashT._tSize());\n//\t\tprintf(\" tSize_m1: %lu\\n\", hashT._tSize_m1());\n\t\tdump(hashT);\n\t\treturn false;\n\t}\n\treturn true;\n}\nTEST(sstd_IpCHashT, stressTest__for__maxLF50){\n\tstd::random_device seed_gen;\n\tuint64 seed1 = seed_gen();\n\tuint64 seed2 = seed_gen();\n//\tuint64 seed1 = 2781098060;\n//\tuint64 seed2 = 2899935815;\n\t\n\tconst uint64 initSize_wRehash = 0ull;\n\tconst uint64 limitSize = 5000000;\n\t\n\tsstd::IpCHashT<uint64,uint64,std::hash<uint64>,std::equal_to<uint64>,uint8, sstd::IpCHashT_opt::maxLF50> hashT(initSize_wRehash);\n\tbool ret = test_bench_find(hashT, limitSize, seed1, seed2);\n\tif(!ret){\n\t\tprintf(\"seed1: %lu, seed2: %lu\\n\", seed1, seed2);\n\t}\n\tASSERT_TRUE( ret );\n}\n\n//-----------------------------------------------------------------------------------------------------------------------------------------------\nTEST(sstd_IpCHashT, OPE_bracket){\n\t// []\n\t\n\t{\n\t\tsstd::IpCHashT<uint64, uint64> hashT;\n\t\thashT[1] = 10; // insert()\n\t\t\n\t\tauto itr = hashT.find(1);\n\t\tASSERT_TRUE( itr!=hashT.end() );\n\t\tuint64 ret = hashT[1]; // find()\n\t\tASSERT_TRUE( ret==10 );\n\t}\n\t{\n\t\tsstd::IpCHashT<uint64, uint64> hashT;\n\t\tuint64 ret = hashT[1]; // insert() T_key, init emply T_val and return empty T_val.\n\t\tret++; // avoiding \"warning: unused variable ‘ret’\"\n\t}\n}\n//-----------------------------------------------------------------------------------------------------------------------------------------------\n\n" }, { "alpha_fraction": 0.6010448932647705, "alphanum_fraction": 0.6311319470405579, "avg_line_length": 51.12709426879883, "blob_id": "1cc38eed901d74c88a1b187603f130b64f6527fa", "content_id": "a0725b69f8e3499da3b26877e62965e2c7eebe88", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "C++", "length_bytes": 37368, "license_type": "no_license", "max_line_length": 388, "num_lines": 716, "path": "/bench.hpp", "repo_name": "admiswalker/InPlaceChainedHashTable-IpCHashT-", "src_encoding": "UTF-8", "text": "#include <time.h>\n#include <random>\n\n#include <unordered_map>\n#include \"./CHashT.hpp\"\n#include \"./IpCHashT.hpp\"\n#include <sparsehash/dense_hash_map> // sparsehash-master\n#include \"./flat_hash_map-master/flat_hash_map.hpp\"\n\n//-----------------------------------------------------------------------------------------------------------------------------------------------\n// definitions\n//*\ntypedef std::unordered_map<uint64,uint64> uHashT;\ntypedef sstd::CHashT<uint64,uint64> cHashT;\ntypedef sstd::IpCHashT_u8hS<uint64,uint64> iHashT_u8h; // uint8, half (maxLoadfactor50), Successful search major option\ntypedef sstd::IpCHashT_u8fS<uint64,uint64> iHashT_u8f; // uint8, full (maxLoadfactor100), Successful search major option\ntypedef sstd::IpCHashT_u16fS<uint64,uint64> iHashT_u16; // uint16, full (maxLoadfactor100), Successful search major option\ntypedef google::dense_hash_map<uint64,uint64> dHashT;\ntypedef ska::flat_hash_map<uint64,uint64,ska::power_of_two_std_hash<uint64>> fHashT;\n//*/\n/*\ntypedef std::unordered_map<uint64,uint64> uHashT;\ntypedef sstd::CHashT<uint64,uint64> cHashT;\ntypedef sstd::IpCHashT_u8hU<uint64,uint64> iHashT_u8h; // uint8, half (maxLoadfactor50), Unsuccessful search major option\ntypedef sstd::IpCHashT_u8fU<uint64,uint64> iHashT_u8f; // uint8, full (maxLoadfactor100), Unsuccessful search major option\ntypedef sstd::IpCHashT_u16fU<uint64,uint64> iHashT_u16; // uint16, full (maxLoadfactor100), Unsuccessful search major option\ntypedef google::dense_hash_map<uint64,uint64> dHashT;\ntypedef ska::flat_hash_map<uint64,uint64,ska::power_of_two_std_hash<uint64>> fHashT;\n//*/\n#define RUN_BENCH(vvecX, vvecY, initSize, limitSize, Fnc)\t\t\t\t\\\n\tstd::vector<double> vecX_u, vecX_c, vecX_i8h, vecX_i8f, vecX_i16, vecX_d, vecX_f; \\\n\tstd::vector<double> vecY_u, vecY_c, vecY_i8h, vecY_i8f, vecY_i16, vecY_d, vecY_f; \\\n\t{ uHashT hashT(initSize); Fnc(hashT, limitSize, vecX_u, vecY_u ); } \\\n\t{ cHashT hashT(initSize); Fnc(hashT, limitSize, vecX_c, vecY_c ); } \\\n\t{ iHashT_u8h hashT(initSize); Fnc(hashT, limitSize, vecX_i8h, vecY_i8h); } \\\n\t{ iHashT_u8f hashT(initSize); Fnc(hashT, limitSize, vecX_i8f, vecY_i8f); } \\\n\t{ iHashT_u16 hashT(initSize); Fnc(hashT, limitSize, vecX_i16, vecY_i16); } \\\n\t{ dHashT hashT(initSize); hashT.set_empty_key(0ull); /* this meen that 'NULL' will not be able to insert as a key-value. */ \\\n\t Fnc(hashT, limitSize, vecX_d, vecY_d ); } \\\n\t{ fHashT hashT(initSize); Fnc(hashT, limitSize, vecX_f, vecY_f ); } \\\n\tvvecX={vecX_u, vecX_c, vecX_i8h, vecX_i8f, vecX_i16, vecX_d, vecX_f}; \\\n\tvvecY={vecY_u, vecY_c, vecY_i8h, vecY_i8f, vecY_i16, vecY_d, vecY_f};\n\n#define RUN_BENCH_withErase(vvecX, vvecY, initSize, limitSize, Fnc)\t\t\\\n\tstd::vector<double> vecX_u, vecX_c, vecX_i8h, vecX_i8f, vecX_i16, vecX_d, vecX_f; \\\n\tstd::vector<double> vecY_u, vecY_c, vecY_i8h, vecY_i8f, vecY_i16, vecY_d, vecY_f; \\\n\t{ uHashT hashT(initSize); Fnc(hashT, limitSize, vecX_u, vecY_u ); } \\\n\t{ cHashT hashT(initSize); Fnc(hashT, limitSize, vecX_c, vecY_c ); } \\\n\t{ iHashT_u8h hashT(initSize); Fnc(hashT, limitSize, vecX_i8h, vecY_i8h); } \\\n\t{ iHashT_u8f hashT(initSize); Fnc(hashT, limitSize, vecX_i8f, vecY_i8f); } \\\n\t{ iHashT_u16 hashT(initSize); Fnc(hashT, limitSize, vecX_i16, vecY_i16); } \\\n\t{ dHashT hashT(initSize); hashT.set_empty_key(0ull); hashT.set_deleted_key(1ull); /* this meen that 'NULL' and '1' will not be able to insert as a key-value. */ \\\n\t Fnc(hashT, limitSize, vecX_d, vecY_d ); } \\\n\t{ fHashT hashT(initSize); Fnc(hashT, limitSize, vecX_f, vecY_f ); } \\\n\tvvecX={vecX_u, vecX_c, vecX_i8h, vecX_i8f, vecX_i16, vecX_d, vecX_f}; \\\n\tvvecY={vecY_u, vecY_c, vecY_i8h, vecY_i8f, vecY_i16, vecY_d, vecY_f};\n\n#define BENCH_to_CSV(savePath, vvecX, vvecY, vvecHeader)\t\t\t\t\\\n\tsstd::vvec< double> vvec = {vvecX[0]}; for(uint i=0;i<vvecY.size();i++){ vvec <<= {vvecY[i]}; } \\\n\tsstd::vvec<std::string> vvec_str = sstd::double2str(sstd::Tr(vvec)); \\\n\tsstd::vvec<std::string> vvec_csv = vvecHeader << vvec_str;\t\t\t\\\n\tsstd::vvec2csv(savePath, vvec_csv);\n\t\n\t// vvecX[0], vvecY[0], vvecY[1], vvecY[2], vvecY[3], vvecY[4], vvecY[5], vvecY[6]\n\t// [count], uHashT [query/μs], cHashT [query/μs], iHashT_u8h [query/μs], iHashT_u8f [query/μs], iHashT_u16 [query/μs], dHashT [query/μs], fHashT [query/μs]\n\t// 0 , \n\t// 1 , \n\t// ︙ , \n\t// ︙ , \n\nstd::string hashT2typeStr(const uHashT & hashT){ return std::string(\"uHashT\" ); }\nstd::string hashT2typeStr(const cHashT & hashT){ return std::string(\"cHashT\" ); }\nstd::string hashT2typeStr(const iHashT_u8h& hashT){ return std::string(\"iHashT_u8h\"); }\nstd::string hashT2typeStr(const iHashT_u8f& hashT){ return std::string(\"iHashT_u8f\"); }\nstd::string hashT2typeStr(const iHashT_u16& hashT){ return std::string(\"iHashT_u16\"); }\nstd::string hashT2typeStr(const dHashT & hashT){ return std::string(\"dHashT\" ); }\nstd::string hashT2typeStr(const fHashT & hashT){ return std::string(\"fHashT\" ); }\n\n//-----------------------------------------------------------------------------------------------------------------------------------------------\n// common functions\n\n/*\nuint64 size2interval(const uint64 size){\n\tuint8 order = (uint8)std::log10((double)size);\n\tuint64 interval = (uint64)std::pow(10, order);\n\tif(interval!=1){interval/=10;}\n\treturn interval;\n}//*/\n//*\nuint64 size2interval(const uint64 size){\n\tuint8 order = (uint8)std::log10((double)size);\n\tuint64 interval = (uint64)std::pow(10, order);\n\tif (order<1){ return interval;\n\t}else if(order<2){ return interval/2;\n\t}else if(order<3){ return interval/4;\n\t}else if(order<5){ return interval/10;\n\t}else { return interval/10;\n\t}\n}//*/\n\n//-----------------------------------------------------------------------------------------------------------------------------------------------\n// used memory size\n\ntemplate<typename T_hashTable>\nvoid bench_usedMemory(T_hashTable& hashT, const uint64 limitSize, std::vector<double>& vecX_num, std::vector<double>& vecY_GB, const double baseSize_GB){\n\tstd::random_device seed_gen;\n\tstd::mt19937_64 rand(seed_gen()); // pseudo random number generator\n\t\n\tvecY_GB <<= baseSize_GB; // base size of allocated memory\n\tvecX_num <<= 0;\n\t\n//\tuint64 splitNum = 100;\n\tuint64 splitNum = 200;\n//\tuint64 splitNum = 1000;\n\tuint64 interval = limitSize/splitNum;\n\tuint64 tSize_prev = hashT.bucket_count(); // table size\n\tfor(uint sn=0; sn<splitNum; sn++){\n\t\tfor(uint i=0; i<interval; i++){\n\t\t\tuint64 r = rand();\n\t\t\thashT.insert(std::pair<uint64,uint64>(r,r));\n\t\t}\n\t\t\n\t\tdouble h_GB = sstd::status_VmHWM() / (1024.0*1024.0);\n\t\tdouble r_GB = sstd::status_VmRSS() / (1024.0*1024.0);\n\t\tuint64 tSize = hashT.bucket_count();\n\t\tdouble GB; if(tSize > tSize_prev){ GB=h_GB; tSize_prev=tSize; }else{ GB=r_GB; }\n\t\tvecY_GB <<= GB;\n\t\tvecX_num <<= hashT.size();\n\t}\n\tvecY_GB -= (double)vecY_GB[0];\n}\n//---\nvoid vvec2plot_usedMemory(const std::string& savePath, const std::vector<std::string>& saveAs, const sstd::vvec<double>& vvecX, const sstd::vvec<double>& vvecY){\n\tconst char* xlabel = \"Number of elements on the table [conut]\";\n\tconst char* ylabel = \"Allocated memory size [GB]\";\n\tstd::vector<std::string> vecLabel={\"std::unordered_map<uint64,uint64>\", \"sstd::CHashT<uint64,uint64>\", \"sstd::IpCHashT<uint64,uint64> (as uint8 and maxLF50)\", \"sstd::IpCHashT<uint64,uint64> (as uint8 and maxLF100)\", \"sstd::IpCHashT<uint64,uint64> (as uint16 and maxLF100)\", \"google::dense_hash_map<uint64,uint64>\", \"ska::flat_hash_map<uint64,uint64,ska::power_of_two_std_hash<uint64>>\"};\n\t\n\t// plot2fig\n\tconst char* tmpDir = \"./tmpDir\";\n\tconst char* fileName = \"plots\";\n\tconst char* funcName = \"vvec2graph_memory\";\n\tsstd::c2py<void> vvec2graph(tmpDir, fileName, funcName, \"void, const str, const vec<str>*, const char*, const char*, const vec<str>*, const vvec<double>*, const vvec<double>*\");\n\tvvec2graph(savePath, &saveAs, xlabel, ylabel, &vecLabel, &vvecX, &vvecY);\n}\n//---\nvoid bench2csv_usedMomory(const std::string& savePath, const sstd::vvec<std::string>& vvecHeader, const sstd::vec<double>& vecX, const sstd::vec<double>& vecY){\n\tsstd::vvec< double> vvec = {vecX, vecY};\n\tsstd::vvec<std::string> vvec_str = sstd::double2str(sstd::Tr(vvec));\n\tsstd::vvec<std::string> vvec_csv = vvecHeader << vvec_str;\n\tsstd::vvec2csv(savePath, vvec_csv);\n}\n//---\nvoid call_from__main_bench_usedMemory(int& argc, char**& argv){\n\t\n\tuint argIdx=0;\n\tconst std::string fileName = argv[argIdx++];\n\tconst std::string option = argv[argIdx++];\n\tconst std::string savePath = argv[argIdx++];\n\t\n\tconst uint64 initSize = sstd::str2int(argv[argIdx++]);\n\tconst uint64 limitSize = sstd::str2int(argv[argIdx++]);\n\t\n\tconst std::string xLabel = argv[argIdx++];\n\tconst std::string yLabel = argv[argIdx++];\n\t\n\tstd::vector<double> vecX; // table size\n\tstd::vector<double> vecY; // memory size [GB]\n\tdouble baseSize_GB = sstd::status_VmRSS()/(1024.0*1024.0);\n\tif (sstd::strcmp(\"uHashT\", option)){ uHashT hashT(initSize); bench_usedMemory(hashT, limitSize, vecX, vecY, baseSize_GB);\n\t}else if(sstd::strcmp(\"cHashT\", option)){ cHashT hashT(initSize); bench_usedMemory(hashT, limitSize, vecX, vecY, baseSize_GB);\n\t}else if(sstd::strcmp(\"iHashT_u8h\", option)){ iHashT_u8h hashT(initSize); bench_usedMemory(hashT, limitSize, vecX, vecY, baseSize_GB);\n\t}else if(sstd::strcmp(\"iHashT_u8f\", option)){ iHashT_u8f hashT(initSize); bench_usedMemory(hashT, limitSize, vecX, vecY, baseSize_GB);\n\t}else if(sstd::strcmp(\"iHashT_u16\", option)){ iHashT_u16 hashT(initSize); bench_usedMemory(hashT, limitSize, vecX, vecY, baseSize_GB);\n\t}else if(sstd::strcmp(\"dHashT\", option)){ dHashT hashT(initSize); hashT.set_empty_key(0ull); /* this meen that 'NULL' will not be able to insert as a key-value. */\n\t bench_usedMemory(hashT, limitSize, vecX, vecY, baseSize_GB);\n\t}else if(sstd::strcmp(\"fHashT\", option)){ fHashT hashT(initSize); bench_usedMemory(hashT, limitSize, vecX, vecY, baseSize_GB);\n\t}\n\t\n\tsstd::vvec<std::string> vvecHeader = {{xLabel, yLabel}};\n\tbench2csv_usedMomory(savePath, vvecHeader, vecX, vecY);\n}\n//---\nvoid call__main_bench_usedMemory(const std::string& saveDir, const char* num, const char* option, const char* xLabel, const char* yLabel, const uint64 initSize, const uint64 limitSize){\n\tconst std::string savePath = saveDir+'/'+num+'_'+option+\".csv\";\n\tint ret = sstd::system(sstd::ssprintf(\"./exe_bench_uM %s %s %d %d %s %s\", option, savePath.c_str(), initSize, limitSize, xLabel, yLabel));\n\tif(WIFEXITED(ret)<=0){ printf(\"ERROR:\\n\"); }\n}\nvoid bench2csv_usedMemory_runBench(const std::string& saveDir, const uint64 initSize, const uint64 limitSize){\n\tcall__main_bench_usedMemory(saveDir, \"0\", \"uHashT\", \"[count]\", \"uHashT [GB]\", initSize, limitSize);\n\tcall__main_bench_usedMemory(saveDir, \"1\", \"cHashT\", \"[count]\", \"cHashT [GB]\", initSize, limitSize);\n\tcall__main_bench_usedMemory(saveDir, \"2\", \"iHashT_u8h\", \"[count]\", \"iHashT_u8h [GB]\", initSize, limitSize);\n\tcall__main_bench_usedMemory(saveDir, \"3\", \"iHashT_u8f\", \"[count]\", \"iHashT_u8f [GB]\", initSize, limitSize);\n\tcall__main_bench_usedMemory(saveDir, \"4\", \"iHashT_u16\", \"[count]\", \"iHashT_u16 [GB]\", initSize, limitSize);\n\tcall__main_bench_usedMemory(saveDir, \"5\", \"dHashT\", \"[count]\", \"dHashT [GB]\", initSize, limitSize);\n\tcall__main_bench_usedMemory(saveDir, \"6\", \"fHashT\", \"[count]\", \"fHashT [GB]\", initSize, limitSize);\n}\nvoid bench2csv_usedMemory_meargeResult(const std::string& savePath, const std::string& tmpDir){\n\tstd::string csvPath = tmpDir+ \"/*\";\n\t\n\tstd::vector<std::string> vecPath = sstd::glob(csvPath);\n\tsstd::vvec<double> vvecX(vecPath.size()), vvecY(vecPath.size());\n\tfor(uint i=0; i<vecPath.size(); i++){\n\t\tsstd::vvec<std::string> vvecOrig = sstd::csv2vvec(vecPath[i]);\n\t\tsstd::vvec<std::string> header = vvecOrig && sstd::slice_mv(sstd::begin(), 1);\n\t\tsstd::vvec<std::string> vvecStr = vvecOrig && sstd::slice_mv(1, sstd::end());\n\t\tsstd::vvec< double> vvecD = sstd::Tr(sstd::str2double(vvecStr));\n\t\tvvecX[i] <<= vvecD[0];\n\t\tvvecY[i] <<= vvecD[1];\n\t}\n\tsstd::vvec<std::string> vvecHeader = {{\"[count]\", \"uHashT [GB]\", \"cHashT [GB]\", \"iHashT_u8h [GB]\", \"iHashT_u8f [GB]\", \"iHashT_u16 [GB]\", \"dHashT [GB]\", \"fHashT [GB]\"}};\n\tBENCH_to_CSV(savePath, vvecX, vvecY, vvecHeader);\n}\nvoid bench2csv_usedMemory(const std::string& savePath, const std::string& tmpDir, const uint64 initSize, const uint64 limitSize){\n\tbench2csv_usedMemory_runBench(tmpDir, initSize, limitSize);\n\tbench2csv_usedMemory_meargeResult(savePath, tmpDir);\n}\n\n//-----------------------------------------------------------------------------------------------------------------------------------------------\n// insert\n\ntemplate<typename T_hashTable>\nvoid bench_insert(T_hashTable& hashT, const uint64 limitSize, std::vector<double>& vecX_num, std::vector<double>& vecY_quely_per_us){\n\tstd::random_device seed_gen;\n\tstd::mt19937_64 rand(seed_gen()); // pseudo random number generator\n\t\n\tuint64 interval = 1;\n\t\n\tfor(;;){\n\t\tstd::vector<uint64> vecR_toInsert(interval);\n\t\tfor(uint i=0; i<interval; i++){\n\t\t\tvecR_toInsert[i] = rand();\n\t\t}\n\t\t\n\t\ttime_m timem; sstd::measureTime_start(timem);\n\t\tfor(uint i=0; i<interval; i++){\n\t\t\tuint64 r = vecR_toInsert[i];\n\t\t\thashT.insert(std::pair<uint64,uint64>(r,r)); // insert with \"hashT[r] = r;\" will slow down dense_hash_map and IpCHashT-successfulMajorOption.\n\t\t}\n\t\tdouble nsec = sstd::measureTime_stop_ns(timem);\n\t\t\n\t\tvecX_num <<= hashT.size();\n\t\tvecY_quely_per_us <<= ((double)interval * 1000.0) / (nsec);\n\t\t\n\t\tinterval = size2interval(hashT.size());\n\t\tif(hashT.size()+interval>limitSize){ break; }\n\t}\n}\n//---\nvoid vvec2plot_insert(const std::string& savePath, const std::vector<std::string>& saveAs, const sstd::vvec<double>& vvecX, const sstd::vvec<double>& vvecY){\n\tconst char* xlabel = \"Number of elements on the table [conut]\";\n\tconst char* ylabel = \"Insertion speed [query/μs]\";\n\tstd::vector<std::string> vecLabel={\"std::unordered_map<uint64,uint64>\", \"sstd::CHashT<uint64,uint64>\", \"sstd::IpCHashT<uint64,uint64> (as uint8 and maxLF50)\", \"sstd::IpCHashT<uint64,uint64> (as uint8 and maxLF100)\", \"sstd::IpCHashT<uint64,uint64> (as uint16 and maxLF100)\", \"google::dense_hash_map<uint64,uint64>\", \"ska::flat_hash_map<uint64,uint64,ska::power_of_two_std_hash<uint64>>\"};\n\t\n\t// plot2fig\n\tconst char* tmpDir = \"./tmpDir\";\n\tconst char* fileName = \"plots\";\n\tconst char* funcName = \"vvec2graph\";\n\tsstd::c2py<void> vvec2graph(tmpDir, fileName, funcName, \"void, const str, const vec<str>*, const char*, const char*, const vec<str>*, const vvec<double>*, const vvec<double>*\");\n\tvvec2graph(savePath, &saveAs, xlabel, ylabel, &vecLabel, &vvecX, &vvecY);\n}\nvoid bench2plot_insert(const std::string& savePath, const std::vector<std::string>& saveAs, const uint64 initSize, const uint64 limitSize){\n\tstd::vector<std::vector<double>> vvecX, vvecY;\n\tRUN_BENCH(vvecX, vvecY, initSize, limitSize, bench_insert);\n\t\n\tvvec2plot_insert(savePath, saveAs, vvecX, vvecY);\n}\n//---\nvoid bench2csv_insert(const std::string& savePath, const uint64 initSize, const uint64 limitSize){\n\tstd::vector<std::vector<double>> vvecX, vvecY;\n\tRUN_BENCH(vvecX, vvecY, initSize, limitSize, bench_insert);\n\t\n\tsstd::vvec<std::string> vvecHeader = {{\"[count]\", \"uHashT [query/μs]\", \"cHashT [query/μs]\", \"iHashT_u8h [query/μs]\", \"iHashT_u8f [query/μs]\", \"iHashT_u16 [query/μs]\", \"dHashT [query/μs]\", \"fHashT [query/μs]\"}};\n\tBENCH_to_CSV(savePath, vvecX, vvecY, vvecHeader);\n}\n\n//-----------------------------------------------------------------------------------------------------------------------------------------------\n// insert (elapsed time)\n\ntemplate<typename T_hashTable>\nvoid bench_insert_et(T_hashTable& hashT, const uint64 limitSize, std::vector<double>& vecX_num, std::vector<double>& vecY_s){\n\tstd::random_device seed_gen;\n\tstd::mt19937_64 rand(seed_gen()); // pseudo random number generator\n\tuint64 splitNum = 100;\n\tuint64 interval = limitSize/splitNum;\n\t\n\tdouble totalTime_sec = 0.0;\n\tvecX_num <<= hashT.size();\n\tvecY_s <<= totalTime_sec;\n\tfor(uint sn=0; sn<splitNum; sn++){\n\t\tstd::vector<uint64> vecR_toInsert(interval);\n\t\tfor(uint i=0; i<interval; i++){\n\t\t\tvecR_toInsert[i] = rand();\n\t\t}\n\t\t\n\t\ttime_m timem; sstd::measureTime_start(timem);\n\t\tfor(uint i=0; i<interval; i++){\n\t\t\tuint64 r = vecR_toInsert[i];\n\t\t\thashT.insert(std::pair<uint64,uint64>(r,r)); // insert with \"hashT[r] = r;\" will slow down dense_hash_map and IpCHashT-successfulMajorOption.\n\t\t}\n\t\tdouble ms = sstd::measureTime_stop_ms(timem);\n\t\ttotalTime_sec += ms / 1000.0;\n\t\tvecX_num <<= hashT.size();\n\t\tvecY_s <<= totalTime_sec;\n\t}\n}\n//---\nvoid vvec2plot_insert_et(const std::string& savePath, const std::vector<std::string>& saveAs, const sstd::vvec<double>& vvecX, const sstd::vvec<double>& vvecY){\n\tconst char* xlabel = \"Number of elements on the table [conut]\";\n\tconst char* ylabel = \"Elapsed time [sec]\";\n\tstd::vector<std::string> vecLabel={\"std::unordered_map<uint64,uint64>\", \"sstd::CHashT<uint64,uint64>\", \"sstd::IpCHashT<uint64,uint64> (as uint8 and maxLF50)\", \"sstd::IpCHashT<uint64,uint64> (as uint8 and maxLF100)\", \"sstd::IpCHashT<uint64,uint64> (as uint16 and maxLF100)\", \"google::dense_hash_map<uint64,uint64>\", \"ska::flat_hash_map<uint64,uint64,ska::power_of_two_std_hash<uint64>>\"};\n\t\n\t// plot2fig\n\tconst char* tmpDir = \"./tmpDir\";\n\tconst char* fileName = \"plots\";\n\tconst char* funcName = \"vvec2graph_et_insert\";\n\tsstd::c2py<void> vvec2graph(tmpDir, fileName, funcName, \"void, const str, const vec<str>*, const char*, const char*, const vec<str>*, const vvec<double>*, const vvec<double>*\");\n\tvvec2graph(savePath, &saveAs, xlabel, ylabel, &vecLabel, &vvecX, &vvecY);\n}\nvoid bench2plot_insert_et(const std::string& savePath, const std::vector<std::string>& saveAs, const uint64 initSize, const uint64 limitSize){\n\tstd::vector<std::vector<double>> vvecX, vvecY;\n\tRUN_BENCH(vvecX, vvecY, initSize, limitSize, bench_insert_et);\n\t\n\tvvec2plot_insert_et(savePath, saveAs, vvecX, vvecY);\n}\n//---\nvoid bench2csv_insert_et(const std::string& savePath, const uint64 initSize, const uint64 limitSize){\n\tstd::vector<std::vector<double>> vvecX, vvecY;\n\tRUN_BENCH(vvecX, vvecY, initSize, limitSize, bench_insert_et);\n\t\n\tsstd::vvec<std::string> vvecHeader = {{\"[count]\", \"uHashT [sec]\", \"cHashT [sec]\", \"iHashT_u8h [sec]\", \"iHashT_u8f [sec]\", \"iHashT_u16 [sec]\", \"dHashT [sec]\", \"fHashT [sec]\"}};\n\tBENCH_to_CSV(savePath, vvecX, vvecY, vvecHeader);\n}\n\n//-----------------------------------------------------------------------------------------------------------------------------------------------\n// find: successful search\n\ntemplate<typename T_hashTable>\nvoid bench_find(T_hashTable& hashT, const uint64 limitSize, std::vector<double>& vecX_num, std::vector<double>& vecY_quely_per_us){\n\tstd::random_device seed_gen;\n\tuint64 seed1 = seed_gen();\n\tuint64 seed2 = seed_gen();\n\tstd::mt19937_64 rand (seed1); // pseudo random number generator\n\tstd::mt19937_64 rand_toFind(seed2); // pseudo random number generator\n\t\n\tuint64 interval = 1;\n\tstd::vector<uint64> vecR(limitSize); vecR.clear();\n\t\n\tuint64 numFound=0ull, numNotFound=0ull;\n\tfor(;;){\n\t\tfor(uint i=0; i<interval; i++){\n\t\t\tuint64 r = rand();\n\t\t\tvecR <<= r;\n\t\t\thashT.insert(std::pair<uint64,uint64>(r,r));\n\t\t}\n\t\t\n\t\tvecX_num <<= hashT.size();\n\t\t\n\t\tstd::vector<uint64> vecR_toFind(vecR.size());\n\t\t{\n\t\t\tstd::uniform_int_distribution<uint64> range(0, vecR.size()-1); // make randome number between [0, vecR.size()-1].\n\t\t\tfor(uint i=0; i<interval; i++){\n\t\t\t\tvecR_toFind[i] = vecR[range(rand_toFind)];\n\t\t\t}\n\t\t}\n\t\t\n\t\t// find (all elements are found)\n\t\ttime_m timem; sstd::measureTime_start(timem);\n\t\tfor(uint i=0; i<interval; i++){\n//\t\t\tuint64 keyVal = vecR_toFind[i];\n//\t\t\tif(hashT[keyVal] != keyVal){ sstd::pdbg(\"ERROR: key val is not same.\"); exit(-1); } // using operator[] will slow down google::dense_hash_map and std::unordered_map\n\t\t\tauto itr = hashT.find( vecR_toFind[i] );\n\t\t\tif(itr!=hashT.end()){ numFound++; }else{ numNotFound++; printf(\"seed1: %lu, seed2: %lu\\n\", seed1, seed2); }\n\t\t}\n\t\tdouble nsec = sstd::measureTime_stop_ns(timem);\n\t\tvecY_quely_per_us <<= ((double)interval * 1000.0) / (nsec);\n\t\t\n\t\tinterval = size2interval(hashT.size());\n\t\tif(hashT.size()+interval>limitSize){ break; }\n\t}\n\tif(numNotFound!=0ull){\n\t\tprintf(\"ERROR: in bench_find(): \"); // There are some bugs in the software or the keys are actually clashing.\n\t\tprintf(\"%lu / %lu = %lf\\n\", numFound, numFound+numNotFound, (double)numFound/(double)(numFound+numNotFound));\n\t}\n}\n//---\nvoid vvec2plot_find(const std::string& savePath, const std::vector<std::string>& saveAs, const sstd::vvec<double>& vvecX, const sstd::vvec<double>& vvecY){\n\tconst char* xlabel = \"Number of elements on the table [conut]\";\n\tconst char* ylabel = \"Successful search speed [query/μs]\";\n\tstd::vector<std::string> vecLabel={\"std::unordered_map<uint64,uint64>\", \"sstd::CHashT<uint64,uint64>\", \"sstd::IpCHashT<uint64,uint64> (as uint8 and maxLF50)\", \"sstd::IpCHashT<uint64,uint64> (as uint8 and maxLF100)\", \"sstd::IpCHashT<uint64,uint64> (as uint16 and maxLF100)\", \"google::dense_hash_map<uint64,uint64>\", \"ska::flat_hash_map<uint64,uint64,ska::power_of_two_std_hash<uint64>>\"};\n\t\n\t// plot2fig\n\tconst char* tmpDir = \"./tmpDir\";\n\tconst char* fileName = \"plots\";\n\tconst char* funcName = \"vvec2graph\";\n\tsstd::c2py<void> vvec2graph(tmpDir, fileName, funcName, \"void, const str, const vec<str>*, const char*, const char*, const vec<str>*, const vvec<double>*, const vvec<double>*\");\n\tvvec2graph(savePath, &saveAs, xlabel, ylabel, &vecLabel, &vvecX, &vvecY);\n}\nvoid bench2plot_find(const std::string& savePath, const std::vector<std::string>& saveAs, const uint64 initSize, const uint64 limitSize){\n\tstd::vector<std::vector<double>> vvecX, vvecY;\n\tRUN_BENCH(vvecX, vvecY, initSize, limitSize, bench_find);\n\t\n\tvvec2plot_find(savePath, saveAs, vvecX, vvecY);\n}\n//---\nvoid bench2csv_find(const std::string& savePath, const uint64 initSize, const uint64 limitSize){\n\tsstd::vvec<double> vvecX, vvecY;\n\tRUN_BENCH(vvecX, vvecY, initSize, limitSize, bench_find);\n\t\n\tsstd::vvec<std::string> vvecHeader = {{\"[count]\", \"uHashT [query/μs]\", \"cHashT [query/μs]\", \"iHashT_u8h [query/μs]\", \"iHashT_u8f [query/μs]\", \"iHashT_u16 [query/μs]\", \"dHashT [query/μs]\", \"fHashT [query/μs]\"}};\n\tBENCH_to_CSV(savePath, vvecX, vvecY, vvecHeader);\n}\n\n//-----------------------------------------------------------------------------------------------------------------------------------------------\n// find: unsuccessful search\n\ntemplate<typename T_hashTable>\nvoid bench_find_failedAll(T_hashTable& hashT, const uint64 limitSize, std::vector<double>& vecX_num, std::vector<double>& vecY_quely_per_us){\n\tstd::random_device seed_gen;\n\tuint64 seed1 = seed_gen();\n\tuint64 seed2 = seed_gen();\n\tstd::mt19937_64 rand (seed1); // pseudo random number generator\n\tstd::mt19937_64 rand_toFind(seed2); // pseudo random number generator\n\t\n\tuint64 interval = 1;\n\tstd::vector<uint64> vecR(limitSize); vecR.clear();\n\t\n\tuint64 numFound=0ull, numNotFound=0ull;\n\tfor(;;){\n\t\tfor(uint i=0; i<interval; i++){\n\t\t\tuint64 r = rand();\n\t\t\tvecR <<= r;\n\t\t\thashT.insert(std::pair<uint64,uint64>(r,r));\n\t\t}\n\t\t\n\t\tvecX_num <<= hashT.size();\n\t\t\n\t\tstd::vector<uint64> vecR_toFind(interval);\n\t\tfor(uint i=0; i<interval; i++){\n\t\t\tvecR_toFind[i] = rand_toFind();\n\t\t}\n\t\t\n\t\t// find (all elements are not found)\n\t\ttime_m timem; sstd::measureTime_start(timem);\n\t\tfor(uint i=0; i<interval; i++){\n\t\t\tauto itr = hashT.find( vecR_toFind[i] );\n//\t\t\tif(itr!=hashT.end()){ numFound++; printf(\"seed1: %lu, seed2: %lu\\n\", seed1, seed2); }else{ numNotFound++; }\n\t\t\tif(!(itr!=hashT.end())){ numNotFound++; }else{ numFound++; printf(\"T_hashTable:%s, seed1: %lu, seed2: %lu\\n\", hashT2typeStr(hashT).c_str(), seed1, seed2); }\n\t\t}\n\t\tdouble nsec = sstd::measureTime_stop_ns(timem);\n\t\tvecY_quely_per_us <<= ((double)interval * 1000.0) / (nsec);\n\t\t\n\t\tinterval = size2interval(hashT.size());\n\t\tif(hashT.size()+interval>limitSize){ break; }\n\t}\n\tif(numFound!=0ull){\n\t\tprintf(\"ERROR: in bench_find_failedAll(): \"); // There are some bugs in the software or the keys are actually clashing.\n\t\tprintf(\"%lu / %lu = %lf\\n\", numFound, numFound+numNotFound, (double)numFound/(double)(numFound+numNotFound));\n\t}\n}\n//---\nvoid vvec2plot_find_failedAll(const std::string& savePath, const std::vector<std::string>& saveAs, const sstd::vvec<double>& vvecX, const sstd::vvec<double>& vvecY){\n\tconst char* xlabel = \"Number of elements on the table [conut]\";\n\tconst char* ylabel = \"Unsuccessful search speed [query/μs]\";\n\tstd::vector<std::string> vecLabel={\"std::unordered_map<uint64,uint64>\", \"sstd::CHashT<uint64,uint64>\", \"sstd::IpCHashT<uint64,uint64> (as uint8 and maxLF50)\", \"sstd::IpCHashT<uint64,uint64> (as uint8 and maxLF100)\", \"sstd::IpCHashT<uint64,uint64> (as uint16 and maxLF100)\", \"google::dense_hash_map<uint64,uint64>\", \"ska::flat_hash_map<uint64,uint64,ska::power_of_two_std_hash<uint64>>\"};\n\t\n\t// plot2fig\n\tconst char* tmpDir = \"./tmpDir\";\n\tconst char* fileName = \"plots\";\n\tconst char* funcName = \"vvec2graph\";\n\tsstd::c2py<void> vvec2graph(tmpDir, fileName, funcName, \"void, const str, const vec<str>*, const char*, const char*, const vec<str>*, const vvec<double>*, const vvec<double>*\");\n\tvvec2graph(savePath, &saveAs, xlabel, ylabel, &vecLabel, &vvecX, &vvecY);\n}\nvoid bench2plot_find_failedAll(const std::string& savePath, const std::vector<std::string>& saveAs, const uint64 initSize, const uint64 limitSize){\n\tstd::vector<std::vector<double>> vvecX, vvecY;\n\tRUN_BENCH(vvecX, vvecY, initSize, limitSize, bench_find_failedAll);\n\t\n\tvvec2plot_find_failedAll(savePath, saveAs, vvecX, vvecY);\n}\n//---\nvoid bench2csv_find_failedAll(const std::string& savePath, const uint64 initSize, const uint64 limitSize){\n\tstd::vector<std::vector<double>> vvecX, vvecY;\n\tRUN_BENCH(vvecX, vvecY, initSize, limitSize, bench_find_failedAll);\n\t\n\tsstd::vvec<std::string> vvecHeader = {{\"[count]\", \"uHashT [query/μs]\", \"cHashT [query/μs]\", \"iHashT_u8h [query/μs]\", \"iHashT_u8f [query/μs]\", \"iHashT_u16 [query/μs]\", \"dHashT [query/μs]\", \"fHashT [query/μs]\"}};\n\tBENCH_to_CSV(savePath, vvecX, vvecY, vvecHeader);\n}\n\n//-----------------------------------------------------------------------------------------------------------------------------------------------\n// erase\n\ntemplate<typename T_hashTable>\nvoid bench_erase(T_hashTable& hashT, const uint64 limitSize, std::vector<double>& vecX_num, std::vector<double>& vecY_quely_per_us){\n\tstd::random_device seed_gen;\n\tstd::mt19937_64 rand (seed_gen()); // pseudo random number generator\n\tstd::mt19937_64 rand_toFind(seed_gen()); // pseudo random number generator\n\t\n\tuint64 interval = 1;\n\tstd::vector<uint64> vecR(limitSize); vecR.clear();\n\t\n\tfor(;;){\n\t\tfor(uint i=0; i<interval; i++){\n\t\t\tuint64 r = rand();\n\t\t\tvecR <<= r;\n\t\t\thashT.insert(std::pair<uint64,uint64>(r,r));\n\t\t}\n\t\t\n\t\tvecX_num <<= hashT.size();\n\t\t\n\t\tstd::vector<uint64> vecR_toErase(vecR.size());\n\t\tstd::vector<uint64> vecR_idx (vecR.size());\n\t\t{\n\t\t\tstd::uniform_int_distribution<uint64> range(0, vecR.size()-1); // make randome number between [0, vecR.size()-1].\n\t\t\tfor(uint i=0; i<interval; i++){\n\t\t\t\tvecR_idx[i] = range(rand_toFind);\n\t\t\t\tvecR_toErase[i] = vecR[vecR_idx[i]];\n\t\t\t}\n\t\t}\n\t\t\n\t\t// erase (all elements are found)\n\t\ttime_m timem; sstd::measureTime_start(timem);\n\t\tfor(uint i=0; i<interval; i++){\n\t\t\thashT.erase( vecR_toErase[i] );\n\t\t}\n\t\tdouble nsec = sstd::measureTime_stop_ns(timem);\n\t\t\n\t\tfor(uint i=0; i<interval; i++){\n\t\t\tuint64 r = rand();\n\t\t\tvecR[vecR_idx[i]] = r;\n\t\t\thashT.insert(std::pair<uint64,uint64>(r,r));\n\t\t}\n\t\tvecY_quely_per_us <<= ((double)interval * 1000.0) / (nsec);\n\t\t\n\t\tinterval = size2interval(hashT.size());\n\t\tif(hashT.size()+interval>limitSize){ break; }\n\t}\n}\n//---\nvoid vvec2plot_erase(const std::string& savePath, const std::vector<std::string>& saveAs, const sstd::vvec<double>& vvecX, const sstd::vvec<double>& vvecY){\n\tconst char* xlabel = \"Number of elements on the table [conut]\";\n\tconst char* ylabel = \"Erasion speed [query/μs]\";\n\tstd::vector<std::string> vecLabel={\"std::unordered_map<uint64,uint64>\", \"sstd::CHashT<uint64,uint64>\", \"sstd::IpCHashT<uint64,uint64> (as uint8 and maxLF50)\", \"sstd::IpCHashT<uint64,uint64> (as uint8 and maxLF100)\", \"sstd::IpCHashT<uint64,uint64> (as uint16 and maxLF100)\", \"google::dense_hash_map<uint64,uint64>\", \"ska::flat_hash_map<uint64,uint64,ska::power_of_two_std_hash<uint64>>\"};\n\t\n\t// plot2fig\n\tconst char* tmpDir = \"./tmpDir\";\n\tconst char* fileName = \"plots\";\n\tconst char* funcName = \"vvec2graph\";\n\tsstd::c2py<void> vvec2graph(tmpDir, fileName, funcName, \"void, const str, const vec<str>*, const char*, const char*, const vec<str>*, const vvec<double>*, const vvec<double>*\");\n\tvvec2graph(savePath, &saveAs, xlabel, ylabel, &vecLabel, &vvecX, &vvecY);\n}\nvoid bench2plot_erase(const std::string& savePath, const std::vector<std::string>& saveAs, const uint64 initSize, const uint64 limitSize){\n\tstd::vector<std::vector<double>> vvecX, vvecY;\n\tRUN_BENCH_withErase(vvecX, vvecY, initSize, limitSize, bench_erase);\n\t\n\tvvec2plot_erase(savePath, saveAs, vvecX, vvecY);\n}\n//---\nvoid bench2csv_erase(const std::string& savePath, const uint64 initSize, const uint64 limitSize){\n\tstd::vector<std::vector<double>> vvecX, vvecY;\n\tRUN_BENCH_withErase(vvecX, vvecY, initSize, limitSize, bench_erase);\n\t\n\tsstd::vvec<std::string> vvecHeader = {{\"[count]\", \"uHashT [query/μs]\", \"cHashT [query/μs]\", \"iHashT_u8h [query/μs]\", \"iHashT_u8f [query/μs]\", \"iHashT_u16 [query/μs]\", \"dHashT [query/μs]\", \"fHashT [query/μs]\"}};\n\tBENCH_to_CSV(savePath, vvecX, vvecY, vvecHeader);\n}\n\n//-----------------------------------------------------------------------------------------------------------------------------------------------\n\ntemplate<typename T_hashTable>\nvoid bench_maxLoadFactor(T_hashTable& hashT, const uint64 limitSize, std::vector<double>& vecX_tSize, std::vector<double>& vecY_lf){\n\t\n\tstd::random_device seed_gen;\n\tstd::mt19937_64 rand(seed_gen()); // pseudo random number generator\n\t\n\tdouble lf_prev=0.0;\n\twhile(hashT.size()<limitSize){\n\t\tuint64 r = rand();\n\t\thashT.insert(std::pair<uint64,uint64>(r,r));\n\t\tdouble lf = hashT.load_factor();\n\t\tif(lf < lf_prev){\n\t\t\tvecX_tSize <<= (double)hashT.bucket_count()-1.0;\n\t\t\tvecY_lf <<= lf_prev;\n\t\t}\n\t\tlf_prev = lf;\n\t}\n}\n//---\nvoid vvec2plot_maxLoadFactor(const std::string& savePath, const std::vector<std::string>& saveAs, const sstd::vvec<double>& vvecX, const sstd::vvec<double>& vvecY){\n\tconst char* xlabel = \"Table size [count]\";\n\tconst char* ylabel = \"Maximum load factor [%]\";\n\tstd::vector<std::string> vecLabel={\"std::unordered_map<uint64,uint64>\", \"sstd::CHashT<uint64,uint64>\", \"sstd::IpCHashT<uint64,uint64> (as uint8 and maxLF50)\", \"sstd::IpCHashT<uint64,uint64> (as uint8 and maxLF100)\", \"sstd::IpCHashT<uint64,uint64> (as uint16 and maxLF100)\", \"google::dense_hash_map<uint64,uint64>\", \"ska::flat_hash_map<uint64,uint64,ska::power_of_two_std_hash<uint64>>\"};\n\t\n\t// plot2fig\n\tconst char* tmpDir = \"./tmpDir\";\n\tconst char* fileName = \"plots\";\n\tconst char* funcName = \"vvec2graph_lf\";\n\tsstd::c2py<void> vvec2graph(tmpDir, fileName, funcName, \"void, const str, const vec<str>*, const char*, const char*, const vec<str>*, const vvec<double>*, const vvec<double>*\");\n\tvvec2graph(savePath, &saveAs, xlabel, ylabel, &vecLabel, &vvecX, &vvecY);\n}\nvoid bench2plot_maxLoadFactor(const std::string& savePath, const std::vector<std::string>& saveAs, const uint64 limitSize){\n\tstd::vector<std::vector<double>> vvecX, vvecY;\n\tconst uint64 initSize=0ull;\n\tRUN_BENCH(vvecX, vvecY, initSize, limitSize, bench_maxLoadFactor);\n\t\n\tvvec2plot_maxLoadFactor(savePath, saveAs, vvecX, vvecY);\n}\n//---\nvoid bench2csv_maxLoadFactor(const std::string& savePath, const uint64 limitSize){\n\tstd::vector<std::vector<double>> vvecX, vvecY;\n\tconst uint64 initSize=0ull;\n\tRUN_BENCH(vvecX, vvecY, initSize, limitSize, bench_maxLoadFactor);\n\t\n\tsstd::vvec<std::string> vvecHeader = {{\"[count]\", \"uHashT [%]\", \"cHashT [%]\", \"iHashT_u8h [%]\", \"iHashT_u8f [%]\", \"iHashT_u16 [%]\", \"dHashT [%]\", \"fHashT [%]\"}};\n\tBENCH_to_CSV(savePath, vvecX, vvecY, vvecHeader);\n}\n\n//-----------------------------------------------------------------------------------------------------------------------------------------------\n\nuint fileNum(const std::string& path_wWildCard){ return sstd::glob(path_wWildCard).size(); }\n\nvoid RUN_ALL_BENCHS(){\n//\tconst uint64 limitSize = 200000000; // limit of memory (on 32 GB RAM PC)\n\tconst uint64 limitSize = 5000000;\n\tconst uint64 initSize_wRehash = 0ull;\n\tconst uint64 initSize_preAlloc = limitSize;\n\t\n\tconst std::string saveDir = \"./tmpBench\"; sstd::mkdir(saveDir);\n\tstd::vector<std::string> saveAs = {\".pdf\", \".png\"};\n\t\n\t//---------------------------------------------------------------------------------------------------------------------------------------------\n\t//*\n\t// insert: insertion speed [query/sec]\n\tbench2plot_insert(saveDir+\"/insert\", saveAs, initSize_wRehash, limitSize);\n\t\n\t// insert: elapsed time [sec]\n\tbench2plot_insert_et(saveDir+\"/insert_et\", saveAs, initSize_wRehash, limitSize);\n\tbench2plot_insert_et(saveDir+\"/insert_et_preAlloc\", saveAs, initSize_preAlloc, limitSize);\n\t\n\t// find: search speed [quely/sec]\n\tbench2plot_find(saveDir+\"/find_successful_search\", saveAs, initSize_wRehash, limitSize);\n\tbench2plot_find_failedAll(saveDir+\"/find_unsuccessful_search\", saveAs, initSize_wRehash, limitSize);\n\t\n\t// erase\n\tbench2plot_erase(saveDir+\"/erase\", saveAs, initSize_wRehash, limitSize);\n\t\n\t// max-load factor\n\tbench2plot_maxLoadFactor(saveDir+\"/maxLoadFactor\", saveAs, limitSize);\n\t//*/\n\t//---------------------------------------------------------------------------------------------------------------------------------------------\n\t/*\n\tstd::string udM = \"usedMemory\"; sstd::mkdir(saveDir+'/'+udM ); sstd::mkdir(saveDir+\"/tmp_\"+udM );\n\tstd::string udM_pA = \"usedMemory_preAlloc\"; sstd::mkdir(saveDir+'/'+udM_pA); sstd::mkdir(saveDir+\"/tmp_\"+udM_pA);\n\tfor(uint i=fileNum(saveDir+'/'+udM+\"/*\"); i<1; i++){\n\t\tstd::string savePath = saveDir +'/'+udM +sstd::ssprintf(\"/%s_%03u\", udM.c_str(), i)+\".csv\";\n\t\tbench2csv_usedMemory(savePath, saveDir+\"/tmp_\"+udM, initSize_wRehash, limitSize);\n\t}\n\tfor(uint i=fileNum(saveDir+'/'+udM_pA+\"/*\"); i<1; i++){\n\t\tstd::string savePath = saveDir +'/'+udM_pA +sstd::ssprintf(\"/%s_%03u\", udM_pA.c_str(), i)+\".csv\";\n\t\tbench2csv_usedMemory(savePath, saveDir+\"/tmp_\"+udM_pA, initSize_preAlloc, limitSize);\n\t}\n\t//*/\n\t//---\n\t\n\tuint loopNum = 100;\n\t/*\n\tstd::string fwR = \"/find_successful_search\";\n\tsstd::mkdir(saveDir+'/'+fwR);\n\tfor(uint i=fileNum(saveDir+'/'+fwR+\"/*\"); i<loopNum; i++){\n\t\tstd::string savePath = saveDir +'/'+fwR +sstd::ssprintf(\"/%s_%03u\", fwR.c_str(), i)+\".csv\";\n\t\tbench2csv_find(savePath, initSize_wRehash, limitSize);\n\t}\n\t\n\tstd::string ffa = \"/find_unsuccessful_search\";\n\tsstd::mkdir(saveDir+'/'+ffa);\n\tfor(uint i=fileNum(saveDir+'/'+ffa+\"/*\"); i<loopNum; i++){\n\t\tstd::string savePath = saveDir +'/'+ffa +sstd::ssprintf(\"/%s_%03u\", ffa.c_str(), i)+\".csv\";\n\t\tbench2csv_find_failedAll(savePath, initSize_wRehash, limitSize);\n\t}\n\t\n\tstd::string iwR = \"insert\";\n\tsstd::mkdir(saveDir+'/'+iwR);\n\tfor(uint i=fileNum(saveDir+'/'+iwR+\"/*\"); i<loopNum; i++){\n\t\tstd::string savePath = saveDir +'/'+iwR +sstd::ssprintf(\"/%s_%03u\", iwR.c_str(), i)+\".csv\";\n\t\tbench2csv_insert(savePath, initSize_wRehash, limitSize);\n\t}\n\t\n\tstd::string iEwR = \"insert_et\";\n\tsstd::mkdir(saveDir+'/'+iEwR);\n\tfor(uint i=fileNum(saveDir+'/'+iEwR+\"/*\"); i<loopNum; i++){\n\t\tstd::string savePath = saveDir +'/'+iEwR +sstd::ssprintf(\"/%s_%03u\", iEwR.c_str(), i)+\".csv\";\n\t\tbench2csv_insert_et(savePath, initSize_wRehash, limitSize);\n\t}\n\t\n\tstd::string iE = \"insert_et_preAlloc\";\n\tsstd::mkdir(saveDir+'/'+iE);\n\tfor(uint i=fileNum(saveDir+'/'+iE+\"/*\"); i<loopNum; i++){\n\t\tstd::string savePath = saveDir +'/'+iE +sstd::ssprintf(\"/%s_%03u\", iE.c_str(), i)+\".csv\";\n\t\tbench2csv_insert_et(savePath, initSize_preAlloc, limitSize);\n\t}\n\t\n\tstd::string ewR = \"erase\";\n\tsstd::mkdir(saveDir+'/'+ewR);\n\tfor(uint i=fileNum(saveDir+'/'+ewR+\"/*\"); i<loopNum; i++){\n\t\tstd::string savePath = saveDir +'/'+ewR +sstd::ssprintf(\"/%s_%03u\", ewR.c_str(), i)+\".csv\";\n\t\tbench2csv_erase(savePath, initSize_wRehash, limitSize);\n\t}\n\t\n\tstd::string mLF = \"maxLoadFactor\";\n\tsstd::mkdir(saveDir+'/'+mLF);\n\tfor(uint i=fileNum(saveDir+'/'+mLF+\"/*\"); i<loopNum; i++){\n\t\tstd::string savePath = saveDir +'/'+mLF +sstd::ssprintf(\"/%s_%03u\", mLF.c_str(), i)+\".csv\";\n\t\tbench2csv_maxLoadFactor(savePath, limitSize);\n\t}\n\t//*/\n\t//---------------------------------------------------------------------------------------------------------------------------------------------\n}\n\n//-----------------------------------------------------------------------------------------------------------------------------------------------\n\n\n" }, { "alpha_fraction": 0.5565872192382812, "alphanum_fraction": 0.5934973359107971, "avg_line_length": 45.23311996459961, "blob_id": "123d53a8e0c8be916c8d6bbdea19870128ae94a3", "content_id": "fc6096e7a31e946c62742be633ceca8a67a1d868", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "C++", "length_bytes": 65050, "license_type": "no_license", "max_line_length": 371, "num_lines": 1407, "path": "/IpCHashT.hpp", "repo_name": "admiswalker/InPlaceChainedHashTable-IpCHashT-", "src_encoding": "UTF-8", "text": "#pragma once\n#include \"./typeDef.h\"\n#include <memory>\n#include <assert.h> // for debug\n#include <sstd/sstd.hpp> // for debug\n\n//-----------------------------------------------------------------------------------------------------------------------------------------------\n// compile options\n\n#define use_insert_soft // select soft or hard\n//#define use_prime_table\n\n//-----------------------------------------------------------------------------------------------------------------------------------------------\n\n#define elem_m sstd_IpCHashT::element <T_key,T_val,T_shift> // a macro of table element structure\n#define elem_KV_m sstd_IpCHashT::element_KeyVal<T_key,T_val> // a macro of table element structure\n#define itr_m sstd_IpCHashT::iterator <T_key,T_val,T_shift> // a macro of iterator\n\n//-----------------------------------------------------------------------------------------------------------------------------------------------\n\nnamespace sstd{\n\tnamespace IpCHashT_opt{\n\t\tclass maxLF50 { private: public: maxLF50 (){}; ~maxLF50 (){}; };\n\t\tclass maxLF100{ private: public: maxLF100(){}; ~maxLF100(){}; };\n\t\t\n\t\tclass successfulMajor{ private: public: successfulMajor(){}; ~successfulMajor(){}; };\n\t\tclass unsuccessfulMajor{ private: public: unsuccessfulMajor(){}; ~unsuccessfulMajor(){}; };\n\t}\n\t\n\t// set default options\n\ttemplate <class T_key,\n\t\t\t class T_val,\n\t\t\t class T_hash = std::hash<T_key>,\n\t\t\t class T_key_eq = std::equal_to<T_key>,\n\t\t\t typename T_shift = uint8, // or uint16\n\t\t\t typename T_maxLF = sstd::IpCHashT_opt::maxLF50, // or sstd::IpCHashT_opt::maxLF100\n\t\t\t typename T_major = sstd::IpCHashT_opt::successfulMajor // or sstd::IpCHashT_opt::unsuccessfulMajor\n\t\t\t >\n\tclass IpCHashT; // in-place chained hash table\n\t\n\t// set alias for IpCHashT option\n\ttemplate<class T_key, class T_val, class T_hash=std::hash<T_key>, class T_key_eq=std::equal_to<T_key>>\n\tusing IpCHashT_u8hS = IpCHashT<T_key, T_val, T_hash, T_key_eq, uint8, sstd::IpCHashT_opt::maxLF50, sstd::IpCHashT_opt::successfulMajor>; // uint8, half (maxLoadfactor50), Successful lookup major option\n\t\n\ttemplate<class T_key, class T_val, class T_hash=std::hash<T_key>, class T_key_eq=std::equal_to<T_key>>\n\tusing IpCHashT_u8fS = IpCHashT<T_key, T_val, T_hash, T_key_eq, uint8, sstd::IpCHashT_opt::maxLF100, sstd::IpCHashT_opt::successfulMajor>; // uint8, full (maxLoadfactor100), Successful lookup major option\n\t\n\ttemplate<class T_key, class T_val, class T_hash=std::hash<T_key>, class T_key_eq=std::equal_to<T_key>>\n\tusing IpCHashT_u16hS = IpCHashT<T_key, T_val, T_hash, T_key_eq, uint16, sstd::IpCHashT_opt::maxLF50, sstd::IpCHashT_opt::successfulMajor>; // uint16, half (maxLoadfactor50), Successful lookup major option\n\t\n\ttemplate<class T_key, class T_val, class T_hash=std::hash<T_key>, class T_key_eq=std::equal_to<T_key>>\n\tusing IpCHashT_u16fS = IpCHashT<T_key, T_val, T_hash, T_key_eq, uint16, sstd::IpCHashT_opt::maxLF100, sstd::IpCHashT_opt::successfulMajor>; // uint16, full (maxLoadfactor100), Successful lookup major option\n\t\n\ttemplate<class T_key, class T_val, class T_hash=std::hash<T_key>, class T_key_eq=std::equal_to<T_key>>\n\tusing IpCHashT_u8hU = IpCHashT<T_key, T_val, T_hash, T_key_eq, uint8, sstd::IpCHashT_opt::maxLF50, sstd::IpCHashT_opt::unsuccessfulMajor>; // uint8, half (maxLoadfactor50), Unsuccessful lookup major option\n\t\n\ttemplate<class T_key, class T_val, class T_hash=std::hash<T_key>, class T_key_eq=std::equal_to<T_key>>\n\tusing IpCHashT_u8fU = IpCHashT<T_key, T_val, T_hash, T_key_eq, uint8, sstd::IpCHashT_opt::maxLF100, sstd::IpCHashT_opt::unsuccessfulMajor>; // uint8, full (maxLoadfactor100), Unsuccessful lookup major option\n\t\n\ttemplate<class T_key, class T_val, class T_hash=std::hash<T_key>, class T_key_eq=std::equal_to<T_key>>\n\tusing IpCHashT_u16hU = IpCHashT<T_key, T_val, T_hash, T_key_eq, uint16, sstd::IpCHashT_opt::maxLF50, sstd::IpCHashT_opt::unsuccessfulMajor>; // uint16, half (maxLoadfactor50), Unsuccessful lookup major option\n\t\n\ttemplate<class T_key, class T_val, class T_hash=std::hash<T_key>, class T_key_eq=std::equal_to<T_key>>\n\tusing IpCHashT_u16fU = IpCHashT<T_key, T_val, T_hash, T_key_eq, uint16, sstd::IpCHashT_opt::maxLF100, sstd::IpCHashT_opt::unsuccessfulMajor>; // uint16, full (maxLoadfactor100), Unsuccessful lookup major option\n}\n\n//-----------------------------------------------------------------------------------------------------------------------------------------------\n\nnamespace sstd_IpCHashT{\n\ttemplate <class T_key, class T_val, typename T_shift> struct element;\n\ttemplate <class T_key, class T_val> struct element_KeyVal;\n\ttemplate <class T_key, class T_val, typename T_shift> struct iterator;\n\t\n\tbool isSuccessfulMajor(sstd::IpCHashT_opt::successfulMajor dummy);\n\tbool isSuccessfulMajor(sstd::IpCHashT_opt::unsuccessfulMajor dummy);\n\t\n\t inline bool isUint8 (const uint8 dummy){ return true; }\n\ttemplate <class T_dummy> inline bool isUint8 (const T_dummy dummy){ return false; }\n\t inline bool isUint16(const uint16 dummy){ return true; }\n\ttemplate <class T_dummy> inline bool isUint16(const T_dummy dummy){ return false; }\n\t inline bool isMaxLF50 (const sstd::IpCHashT_opt::maxLF50 rhs){ return true; }\n\ttemplate <class T_dummy> inline bool isMaxLF50 (const T_dummy rhs){ return false; }\n\t inline bool isMaxLF100(const sstd::IpCHashT_opt::maxLF100 rhs){ return true; }\n\ttemplate <class T_dummy> inline bool isMaxLF100(const T_dummy rhs){ return false; }\n\t\n\t#ifdef use_prime_table\n\tconst uint64 tSizeL[64] = { // table size list. (Smallest prime list larger than power of 2.)\n\t\t2ull, // 2^ 1 + 0 = 2ull\n\t\t5ull, // 2^ 2 + 1 = 5ull\n\t\t11ull, // 2^ 3 + 3 = 11ull\n\t\t17ull, // 2^ 4 + 1 = 17ull\n\t\t37ull, // 2^ 5 + 5 = 37ull\n\t\t67ull, // 2^ 6 + 3 = 67ull\n\t\t131ull, // 2^ 7 + 3 = 131ull\n\t\t257ull, // 2^ 8 + 1 = 257ull\n\t\t521ull, // 2^ 9 + 9 = 521ull\n\t\t1031ull, // 2^10 + 7 = 1031ull\n\t\t2053ull, // 2^11 + 5 = 2053ull\n\t\t4099ull, // 2^12 + 3 = 4099ull\n\t\t8209ull, // 2^13 + 17 = 8209ull\n\t\t16411ull, // 2^14 + 27 = 16411ull\n\t\t32771ull, // 2^15 + 3 = 32771ull\n\t\t65537ull, // 2^16 + 1 = 65537ull\n\t\t131101ull, // 2^17 + 29 = 131101ull\n\t\t262147ull, // 2^18 + 3 = 262147ull\n\t\t524309ull, // 2^19 + 21 = 524309ull\n\t\t1048583ull, // 2^20 + 7 = 1048583ull\n\t\t2097169ull, // 2^21 + 17 = 2097169ull\n\t\t4194319ull, // 2^22 + 15 = 4194319ull\n\t\t8388617ull, // 2^23 + 9 = 8388617ull\n\t\t16777259ull, // 2^24 + 43 = 16777259ull\n\t\t33554467ull, // 2^25 + 35 = 33554467ull\n\t\t67108879ull, // 2^26 + 15 = 67108879ull\n\t\t134217757ull, // 2^27 + 29 = 134217757ull\n\t\t268435459ull, // 2^28 + 3 = 268435459ull\n\t\t536870923ull, // 2^29 + 11 = 536870923ull\n\t\t1073741827ull, // 2^30 + 85 = 1073741827ull\n\t\t2147483659ull, // 2^31 + 11 = 2147483659ull\n\t\t4294967311ull, // 2^32 + 15 = 4294967311ull\n\t\t8589934609ull, // 2^33 + 17 = 8589934609ull\n\t\t17179869209ull, // 2^34 + 25 = 17179869209ull\n\t\t34359738421ull, // 2^35 + 53 = 34359738421ull\n\t\t68719476767ull, // 2^36 + 31 = 68719476767ull\n\t\t137438953481ull, // 2^37 + 9 = 137438953481ull\n\t\t274877906951ull, // 2^38 + 7 = 274877906951ull\n\t\t549755813911ull, // 2^39 + 23 = 549755813911ull\n\t\t1099511627791ull, // 2^40 + 15 = 1099511627791ull\n\t\t2199023255579ull, // 2^41 + 27 = 2199023255579ull\n\t\t4398046511119ull, // 2^42 + 15 = 4398046511119ull\n\t\t8796093022237ull, // 2^43 + 29 = 8796093022237ull\n\t\t17592186044423ull, // 2^44 + 7 = 17592186044423ull\n\t\t35184372088891ull, // 2^45 + 59 = 35184372088891ull\n\t\t70368744177679ull, // 2^46 + 15 = 70368744177679ull\n\t\t140737488355333ull, // 2^47 + 5 = 140737488355333ull\n\t\t281474976710677ull, // 2^48 + 21 = 281474976710677ull\n\t\t562949953421381ull, // 2^49 + 69 = 562949953421381ull\n\t\t1125899906842679ull, // 2^50 + 55 = 1125899906842679ull\n\t\t2251799813685269ull, // 2^51 + 21 = 2251799813685269ull\n\t\t4503599627370517ull, // 2^52 + 21 = 4503599627370517ull\n\t\t9007199254740997ull, // 2^53 + 5 = 9007199254740997ull\n\t\t18014398509482143ull, // 2^54 + 159 = 18014398509482143ull\n\t\t36028797018963971ull, // 2^55 + 3 = 36028797018963971ull\n\t\t72057594037928017ull, // 2^56 + 81 = 72057594037928017ull\n\t\t144115188075855881ull, // 2^57 + 9 = 144115188075855881ull\n\t\t288230376151711813ull, // 2^58 + 69 = 288230376151711813ull\n\t\t576460752303423619ull, // 2^59 + 131 = 576460752303423619ull\n\t\t1152921504606847009ull, // 2^60 + 33 = 1152921504606847009ull\n\t\t2305843009213693967ull, // 2^61 + 15 = 2305843009213693967ull\n\t\t4611686018427388039ull, // 2^62 + 135 = 4611686018427388039ull\n\t\t9223372036854775837ull, // 2^63 + 29 = 9223372036854775837ull\n\t//\t18446744073709551629ull // 2^64 + 13 = 18446744073709551629ull // larger than the range of uint64\n\t\t18446744073709551360ull\t// 2^64 - 254 = 18446744073709551360ull // not prive (need to find out an appropriate prime)\n\t};\n\t#endif\n}\n\n//-----------------------------------------------------------------------------------------------------------------------------------------------\n\ntemplate <class T_key, class T_val, typename T_shift>\nstruct sstd_IpCHashT::element{ // defined as elem_m\nprivate:\npublic:\n\telement(){\n\t\tT_shift maxShift=(T_shift)0; maxShift=~maxShift; // 'maxShift' indicates that the array is empty.\n\t\tprev = maxShift;\n\t\tnext = (T_shift)0;\n\t}\n\t~element(){}\n\t\n\tT_key key; // key\n\tT_val val; // value\n\tT_shift prev; // A shift number to the previous element. Or, the head of in-place chain while 'prev==0'. Or, empty element while 'prev==t_shift_max' (while \"T_shift t_shift_max=(T_shift)0; t_shift_max=~t_shift_max;\").\n\tT_shift next; // A shift number to the next element. Only right shift is allowed. 'next==0' means myself.\n};\n\n// definition for rehash\ntemplate <class T_key, class T_val>\nstruct sstd_IpCHashT::element_KeyVal{ // defined as elem_KV_m\nprivate:\npublic:\n\telement_KeyVal(){}\n\telement_KeyVal(T_key&& key_in, T_val&& val_in){\n\t\tkey = std::move(key_in);\n\t\tval = std::move(val_in);\n\t}\n\t~element_KeyVal(){}\n\t\n\tT_key key; // key\n\tT_val val; // value\n};\n\n//-----------------------------------------------------------------------------------------------------------------------------------------------\n\n// in order to reduce the calling time of function, macro expansion will be used.\n#define isOverMaxLF_m(elems, elems_maxLF)\t\t\\\n\t(elems>elems_maxLF)\n#define isEmpty_m(ELEM)\t\t\t\t\t\t\t\\\n\t(ELEM.prev==maxShift)\n#define isHead_m(ELEM)\t\t\t\t\t\t\t\\\n\t(ELEM.prev==(T_shift)0)\n#define isTail_m(ELEM)\t\t\t\t\t\t\t\\\n\t(ELEM.next==(T_shift)0)\n#define seek2emptyIndex_m(pT, idx, idx_last)\t\t\t\t\t\t\t\\\n\tfor(;;){\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\\\n\t\tif( idx>=ttSize || idx>=idx_last+seekLimit ){ return std::pair<struct itr_m, bool>(itr_m(maxShift, ttSize, pT, itr_needRehash_m), false); } \\\n\t\tif( isEmpty_m(pT[idx]) ){ break; }\t\t\t\t\t\t\t\t\\\n\t\tidx++;\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\\\n\t}\n#define seek2tail(pT, idx)\t\t\t\t\t\t\t\t\t\t\t\t\\\n\tfor(;; idx+=pT[idx].next){\t\t\t\t\t\t\t\t\t\t\t\\\n\t\tif( isTail_m(pT[idx]) ){ break; }\t\t\t\t\t\t\t\t\\\n\t}\n\n#ifdef use_prime_table\n\t#ifdef SSTD_IpCHashT_DEBUG\n\t#define key2tableIdx_m(tIdx, key)\t\t\t\t\t\t\t\t\t\t\\\n\t\tuint64 hVal = (uint64)(*pHashFn)(key); /* generate hashed value */\t\\\n\t\ttIdx = hVal % tSize;\t\t\t\t\t\t\t\t\t\t\t\t\\\n\t\tif(use_tIdx_dbg){ idx=tIdx_dbg; } /* over write tIdx for debug */\n\t#else\n\t#define key2tableIdx_m(tIdx, key)\t\t\t\t\t\t\t\t\t\t\\\n\t\tuint64 hVal = (uint64)(*pHashFn)(key); /* generate hashed value */\t\\\n\t\ttIdx = hVal % tSize;\n\t#endif\n#else\n\t#ifdef SSTD_IpCHashT_DEBUG\n\t#define key2tableIdx_m(tIdx, key)\t\t\t\t\t\t\t\t\t\t\\\n\t\tuint64 hVal = (uint64)(*pHashFn)(key); /* generate hashed value */\t\\\n\t\ttIdx = hVal & tSize_m1;\t\t\t\t\t\t\t\t\t\t\t\t\\\n\t\tif(use_tIdx_dbg){ idx=tIdx_dbg; } /* over write tIdx for debug */\n\t#else\n\t#define key2tableIdx_m(tIdx, key)\t\t\t\t\t\t\t\t\t\t\\\n\t\tuint64 hVal = (uint64)(*pHashFn)(key); /* generate hashed value */\t\\\n\t\ttIdx = hVal & tSize_m1;\n\t#endif\n#endif\n\n#ifdef use_prime_table\n\t#define key2tableIdx_wDivisor_m(tIdx, key, div) /* with divisor */\t\t\\\n\t\tuint64 hVal = (uint64)(*pHashFn)(key); /* generate hashed value */\t\\\n\t\ttIdx = hVal % div;\n#else\n\t#define key2tableIdx_wDivisor_m(tIdx, key, div) /* with divisor */\t\t\\\n\t\tuint64 hVal = (uint64)(*pHashFn)(key); /* generate hashed value */\t\\\n\t\ttIdx = hVal & div;\n#endif\n\n//-----------------------------------------------------------------------------------------------------------------------------------------------\n\n#define itr_end_m 0xFFFFFFFFFFFFFFFF\n#define itr_needRehash_m 0xFFFFFFFFFFFFFFFE\n\ntemplate <class T_key, class T_val, typename T_shift>\nstruct sstd_IpCHashT::iterator{ // defined as itr_m\nprivate:\n\tconst T_shift maxShift;\n\tconst uint64 ttSize;\n\t\n\tstruct elem_m* pT; // pointer of element\n\tuint64 idx; // when idx==0xFFFFFFFFFFFFFFFF: idx is end flag.\n\t // when idx==0xFFFFFFFFFFFFFFFE: rehash flag.\n\t\npublic:\n\tinline iterator(){}\n\tinline iterator(const T_shift maxShift_in, const uint64 totalTableSize, struct elem_m* pT_in, const uint64 idx_in) : maxShift(maxShift_in), ttSize(totalTableSize) { pT=pT_in; idx=idx_in; }\n\tinline ~iterator(){}\n\t\n\tinline const T_key & first () const { return pT[idx].key; }\n\tinline T_key & first_RW () const { return pT[idx].key; }\n\tinline const T_val & second () const { return pT[idx].val; }\n\tinline T_val & second_RW() { return pT[idx].val; }\n\tinline const uint64 & index () const { return idx; }\n\tinline const T_shift& prev () const { return pT[idx].prev; } // for debug\n\tinline const T_shift& next () const { return pT[idx].next; } // for debug\n\t\n\tinline bool _needRehash(){ return idx==itr_needRehash_m; }\n\t\n\t// operators\n\tinline const bool operator!=(const struct itr_m& rhs){ return this->idx != rhs.index(); }\n\t\n\t// pre-increment (++itr)\n\tinline struct itr_m operator++(){\n\t\tidx++;\n\t\tfor(; idx<ttSize; idx++){\n\t\t\tif(! isEmpty_m(pT[idx]) ){ return *this; }\n\t\t}\n\t\tidx=itr_end_m;\n\t\treturn *this;\n\t}\n\t\n\t// post-increment (itr++)\n//\tclass itr_m operator++(int){ // int is a dummy arg\n//\t\t== not implimented yet. ==\n//\t}\n\t\n\t#ifdef SSTD_IpCHashT_DEBUG\n\tinline void print_dbg(){\n\t\tstd::cout << \" idx: \" << idx << std::endl;\n\t\tstd::cout << \" key: \" << pT[idx].key << std::endl;\n\t\tstd::cout << \" val: \" << pT[idx].val << std::endl;\n\t\tstd::cout << \"prev: \" << pT[idx].prev << std::endl;\n\t\tstd::cout << \"next: \" << pT[idx].next << std::endl;\n\t}\n\t#endif\n};\n\n//-----------------------------------------------------------------------------------------------------------------------------------------------\n\ntemplate <class T_key, class T_val, class T_hash, class T_key_eq, typename T_shift, typename T_maxLF, typename T_major>\nclass sstd::IpCHashT{\nprivate:\n\tvoid IpCHashT_constructor(uint64 tableSize);\n\t\n\t#ifdef use_prime_table\n\tuint8 tSizeL_idx; // table size list index\n\t#else\n\tuint64 tSize_m1; // tSize minus 1.\n\t#endif\n\tuint64 tSize; // table size. This is a hash value division size.\n\tuint64 pSize; // padding size of the table\n\tuint64 ttSize; // total table size. This is a seek limit size. ( ttSize == tSize + pSize ).\n\tuint64 elems; // number of elements on the table\n\tuint64 elems_maxLF; // number of elements when tha table is maxLF.\n\tT_hash* pHashFn; // pointer to the hash function\n\tstruct elem_m* pT; // pointer to the table\n\t\n\t// constant values\n\tT_shift maxShift;\n\tT_shift seekLimit;\n\t\n\tvoid move_hashT2vecKV(std::vector<elem_KV_m>& vecKV, sstd::IpCHashT<T_key, T_val, T_hash, T_key_eq, T_shift, T_maxLF, T_major>& hashT);\n\tvoid failSafe_of_rehashing(sstd::IpCHashT<T_key, T_val, T_hash, T_key_eq, T_shift, T_maxLF, T_major>& hashT_new);\n\t\npublic:\n\ttypedef std::pair<const T_key, T_val> value_type; // for STL compatibility.\n\t\n\tIpCHashT();\n\tIpCHashT(const uint64 tableSize); // nearest size under power of 2 will be allocate.\n\t#ifdef use_prime_table\n\tIpCHashT(const uint8 tableSizeL_idx, const uint64 tableSize); // allocate same size of tableSize. for rehashing. (select size from prime table, never form the others).\n\t#else\n\tIpCHashT(const uint64 tableSize_minus1, const uint64 tableSize); // allocate same size of tableSize. for rehashing. (select size of power 2, never form the others).\n\t#endif\n\t~IpCHashT();\n\t\n\t#ifdef use_prime_table\n\tinline uint8& _tSizeL_idx(){ return tSizeL_idx; }\n\t#else\n\tinline uint64& _tSize_m1(){ return tSize_m1; }\n\t#endif\n\tinline uint64& _tSize(){ return tSize; }\n\tinline uint64& _pSize(){ return pSize; }\n\tinline uint64& _ttSize(){ return ttSize; }\n\tinline uint64& _elems (){ return elems; }\n\tinline uint64& _elems_maxLF(){ return elems_maxLF; }\n\tinline T_hash*& _pHashFn(){ return pHashFn; }\n\tinline struct elem_m*& _pT(){ return pT; }\n\tinline const T_shift& _maxShift() const { return maxShift; }\n\tinline const T_shift& _seekLimit() const { return seekLimit; }\n\t\n\tinline struct itr_m begin() const {\n\t\tuint64 idx=0ull;\n\t\tfor(; idx<ttSize; idx++){\n\t\t\tif(! isEmpty_m(pT[idx]) ){ return itr_m(maxShift, ttSize, pT, idx); }\n\t\t}\n\t\treturn itr_m(maxShift, ttSize, pT, itr_end_m);\n\t}\n\tinline struct itr_m end() const { return itr_m(maxShift, ttSize, pT, itr_end_m); }\n\tinline const uint64 size(){ return elems; }\n\tinline const uint64 tableSize(){ return tSize; }\n\tinline const uint64 bucket_count(){ return tSize; }\n\tinline const double load_factor() const { return (double)elems/(double)ttSize; }\n\t\n\tvoid rehash();\n\t\n\tstruct itr_m find(const T_key& key_in, uint64 idx);\n\tstruct itr_m find(const T_key& key_in);\n\t\n\t// insert() will force to overwrite while the key-value pair exists.\n\tstd::pair<struct itr_m, bool> _insertBase_soft (T_key&& key_in, T_val&& val_in, uint64 idx); // tail insertion. (Tail insertion without probing between head and tail elements. (Right of tail element will be linear probed.)) This function is used when rehashing or initializing the table.\n//\tstd::pair<struct itr_m, bool> _insertBase_medium(T_key&& key_in, T_val&& val_in, uint64 idx); // half lineat probing insertion. (Linear probing for only elements evacuation due to the element insertion.)\n\tstd::pair<struct itr_m, bool> _insertBase_hard (T_key&& key_in, T_val&& val_in, uint64 idx); // full lineat probing insertion. (Linear probing for all of elements insertion including elements evacuation.)\n\tstruct itr_m insert (const T_key& key_in, const T_val& val_in); // copy key and value.\n\tstruct itr_m insert_soft(const T_key& key_in, const T_val& val_in); // copy key and value.\n\tstruct itr_m insert_hard(const T_key& key_in, const T_val& val_in); // copy key and value.\n//\tstruct itr_m insert( T_key&& key_in, const T_val& val_in); // swap key. (Callable by \"sstd::CHashT<T_key, T_val> hashT; hashT.add(std::move(key), val );\".)\n//\tstruct itr_m insert(const T_key& key_in, T_val&& val_in); // swap value. (Callable by \"sstd::CHashT<T_key, T_val> hashT; hashT.add( key , std::move(val));\".)\n//\tstruct itr_m insert( T_key&& key_in, T_val&& val_in); // swap key and value. (Callable by \"sstd::CHashT<T_key, T_val> hashT; hashT.add(std::move(key), std::move(val));\".)\n\tstruct itr_m insert (const T_key& key_in, const T_val& val_in, uint64 idx); // copy key and value.\n\tstruct itr_m insert_soft(const T_key& key_in, const T_val& val_in, uint64 idx); // copy key and value.\n\tstruct itr_m insert_hard(const T_key& key_in, const T_val& val_in, uint64 idx); // copy key and value.\n//\tstruct itr_m insert( T_key&& key_in, const T_val& val_in, uint64 idx); // swap key. (Callable by \"sstd::CHashT<T_key, T_val> hashT; hashT.add(std::move(key), val );\".)\n//\tstruct itr_m insert(const T_key& key_in, T_val&& val_in, uint64 idx); // swap value. (Callable by \"sstd::CHashT<T_key, T_val> hashT; hashT.add( key , std::move(val));\".)\n//\tstruct itr_m insert( T_key&& key_in, T_val&& val_in, uint64 idx); // swap key and value. (Callable by \"sstd::CHashT<T_key, T_val> hashT; hashT.add(std::move(key), std::move(val));\".)\n\t\n\tstd::pair<struct itr_m, bool> insert(const value_type& v); // for STL (1). Ref: https://cpprefjp.github.io/reference/unordered_map/unordered_map/insert.html\n\tvoid erase(const T_key& key_in, uint64 idx);\n\tvoid erase(const T_key& key_in);\n// erase_recursive(); // recursive erasion.\n\t\n#ifdef SSTD_IpCHashT_DEBUG\n\tbool use_tIdx_dbg = false; uint64 tIdx_dbg;\n\tbool use_pSize_dbg = false; uint64 pSize_dbg;\n\tbool use_testFOR_dbg = false;\n#endif\n\t\n\t// ---\n\t\n\tT_val& operator[](const T_key& rhs);\n//\tT_val& operator[]( T_key&& rhs);\n};\n\n//-----------------------------------------------------------------------------------------------------------------------------------------------\n\n#define get_tSizeL_idx(idx)\t\t\t\t\t\t\t\t\t\t\t\t\\\n\tidx=0;\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\\\n\tfor(; idx<64; idx++){\t\t\t\t\t\t\t\t\t\t\t\t\\\n\t\tif(sstd_IpCHashT::tSizeL[idx]>=tableSize){ break; }\t\t\t\t\\\n\t}\n#define get_tSize(tSize)\t\t\t\t\t\t\\\n\ttSize=2;\t\t\t\t\t\t\t\t\t\\\n\twhile(tSize<tableSize){ tSize*=2; }\n\ninline double get_maxLF(const sstd::IpCHashT_opt::maxLF50 & rhs){ return 0.50; }\ninline double get_maxLF(const sstd::IpCHashT_opt::maxLF100& rhs){ return 1.00; }\n\n#define constructorBase_init_pSize_m()\t\t\t\t\t\t\t\t\t\\\n\t/* pSize = (1/a) * tSize + b */\t\t\t\t\\\n\t/* */\t\t\t\t\\\n\t/* pSize */\t\t\t\t\\\n\t/* | */\t\t\t\t\\\n\t/* | */\t\t\t\t\\\n\t/* 254 - ----------------------- */\t\t\t\t\\\n\t/* | * */\t\t\t\t\\\n\t/* | * | */\t\t\t\t\\\n\t/* | * */\t\t\t\t\\\n\t/* b (bias) - | */\t\t\t\t\\\n\t/* | */\t\t\t\t\\\n\t/* +---------|-----------------> tSize */\t\t\t\t\\\n\t/* 254*a */\t\t\t\t\\\n\t/* */\t\t\t\t\\\n\t/* Fig. pSize vs tSize */\t\t\t\t\\\n\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\\\n\tconst double a = 18; /* hyper parametor for T_shift==uint8. */\t\t\\\n\tconst uint64 b = 35; /* hyper parametor for T_shift==uint8. */\t\t\\\n\tpSize=(uint64)((double)tSize/a + b);\t\t\t\t\t\t\t\t\\\n\tif(pSize>254){ pSize=254ull; } /* when using T_shift=uint8, 0xFF-1==254 is the max-shift. */\n\n#define constructorBase_init_m()\t\t\t\t\t\t\t\t\t\t\\\n\tttSize = tSize + pSize; /* while \"#define use_prime_table\" is disabled, ttSize must be satisfied ttSize>=tSize+1. Because (hashVal & tSize) will [0, tSize], not [0, tSize). (when using prime table, hashVal % tSize be satisfied [0, tSize).) */ \\\n\tpT = new struct elem_m[ttSize];\t\t\t\t\t\t\t\\\n\tpHashFn = new T_hash();\t\t\t\t\t\t\t\t\t\t\t\\\n\telems = 0ull;\t\t\t\t\t\t\t\t\t\t\t\t\t\\\n\telems_maxLF = ttSize * get_maxLF(T_maxLF());\t\t\t\t\t\t\\\n\tmaxShift = (T_shift)0;\t\t\t\t\t\t\t\t\t\t\t\\\n\tmaxShift = ~maxShift; /* 'maxShift' will be filled with '1'. */\t\\\n\tseekLimit = maxShift - 1;\n\ntemplate <class T_key, class T_val, class T_hash, class T_key_eq, typename T_shift, typename T_maxLF, typename T_major>\ninline void sstd::IpCHashT<T_key, T_val, T_hash, T_key_eq, T_shift, T_maxLF, T_major>::IpCHashT_constructor(uint64 tableSize){\n\tif ( sstd_IpCHashT::isUint8 (T_shift()) && sstd_IpCHashT::isMaxLF50 (T_maxLF()) ){ tableSize = tableSize*2;\n\t}else if( sstd_IpCHashT::isUint8 (T_shift()) && sstd_IpCHashT::isMaxLF100(T_maxLF()) ){ tableSize = (uint64)(((double)tableSize)*1.429); // with uint8 and maxLT100 option, the load factor will reach a peak at about 0.70. So, 1/0.70 = 1.429.\n\t}else if( sstd_IpCHashT::isUint16(T_shift()) && sstd_IpCHashT::isMaxLF50 (T_maxLF()) ){ tableSize = tableSize*2;\n\t}else if( sstd_IpCHashT::isUint16(T_shift()) && sstd_IpCHashT::isMaxLF100(T_maxLF()) ){ tableSize = (uint64)(((double)tableSize)*1.026); // with uint16 and maxLT100 option, the load factor will reach a peak at about 0.975. So, 1/0.975 = 1.026.\n\t}\n\t\n\t#ifdef use_prime_table\n\tget_tSizeL_idx(tSizeL_idx); tSize = sstd_IpCHashT::tSizeL[tSizeL_idx];\n\t#else\n\tget_tSize(tSize); tSize_m1 = tSize - 1;\n\t#endif\n\t\n\tconstructorBase_init_pSize_m();\n\t#ifdef SSTD_IpCHashT_DEBUG\n\tif(use_pSize_dbg){ pSize=(uint64)pSize_dbg; } // over write pSize for debug\n\t#endif\n\tconstructorBase_init_m();\n}\ntemplate <class T_key, class T_val, class T_hash, class T_key_eq, typename T_shift, typename T_maxLF, typename T_major> inline sstd::IpCHashT<T_key, T_val, T_hash, T_key_eq, T_shift, T_maxLF, T_major>::IpCHashT( ){ IpCHashT_constructor( 512 ); }\ntemplate <class T_key, class T_val, class T_hash, class T_key_eq, typename T_shift, typename T_maxLF, typename T_major> inline sstd::IpCHashT<T_key, T_val, T_hash, T_key_eq, T_shift, T_maxLF, T_major>::IpCHashT(const uint64 tableSize){ IpCHashT_constructor(tableSize); }\n\n//---\n\n#ifdef use_prime_table\ntemplate <class T_key, class T_val, class T_hash, class T_key_eq, typename T_shift, typename T_maxLF, typename T_major>\ninline sstd::IpCHashT<T_key, T_val, T_hash, T_key_eq, T_shift, T_maxLF, T_major>::IpCHashT(const uint8 tableSizeL_idx, const uint64 tableSize){ // allocate same size of tableSize. for rehashing. (select size from prime table, never form the others).\n\ttSizeL_idx = tableSizeL_idx;\n\ttSize = sstd_IpCHashT::tSizeL[tSizeL_idx];\n\tconstructorBase_init_pSize_m();\n\t\n\t#ifdef SSTD_IpCHashT_DEBUG\n\tif(use_pSize_dbg){ pSize=(uint64)pSize_dbg; } // over write pSize for debug\n\t#endif\n\tconstructorBase_init_m();\n}\n#else\ntemplate <class T_key, class T_val, class T_hash, class T_key_eq, typename T_shift, typename T_maxLF, typename T_major>\ninline sstd::IpCHashT<T_key, T_val, T_hash, T_key_eq, T_shift, T_maxLF, T_major>::IpCHashT(const uint64 tableSize_minus1, const uint64 tableSize){ // allocate same size of tableSize. for rehashing. (select size of power 2, never form the others).\n\ttSize_m1 = tableSize_minus1;\n\ttSize = tableSize;\n\t\n\tconstructorBase_init_pSize_m();\n\t#ifdef SSTD_IpCHashT_DEBUG\n\tif(use_pSize_dbg){ pSize=(uint64)pSize_dbg; } // over write pSize for debug\n\t#endif\n\tconstructorBase_init_m();\n}\n#endif\n\n//---\n\n#undef get_tSize\n#undef get_tSizeL_idx\n#undef constructorBase_init_m\n\n//---\n\ntemplate <class T_key, class T_val, class T_hash, class T_key_eq, typename T_shift, typename T_maxLF, typename T_major>\ninline sstd::IpCHashT<T_key, T_val, T_hash, T_key_eq, T_shift, T_maxLF, T_major>::~IpCHashT(){\n\tdelete[] pT;\n\tdelete pHashFn;\n}\n\n//-----------------------------------------------------------------------------------------------------------------------------------------------\n\ntemplate <class T_key, class T_val, class T_hash, class T_key_eq, typename T_shift, typename T_maxLF, typename T_major>\nvoid swap_hashT(sstd::IpCHashT<T_key, T_val, T_hash, T_key_eq, T_shift, T_maxLF, T_major>& lhs, sstd::IpCHashT<T_key, T_val, T_hash, T_key_eq, T_shift, T_maxLF, T_major>& rhs){\n\t// \"using std::swap;\" is defined, in order to preferentially call overloaded function of swap<T>() for type T. (Ref: https://cpprefjp.github.io/reference/utility/swap.html)\n\t// In here, scope of using is limited by \"{}\", this means that scope of using is same as a usual value.\n\tusing std::swap;\n\t\n\t#ifdef use_prime_table\n\tswap(lhs._tSizeL_idx(), rhs._tSizeL_idx() );\n\t#else\n\tswap(lhs._tSize_m1(), rhs._tSize_m1() );\n\t#endif\n\t\n\tswap(lhs._tSize(), rhs._tSize() );\n\tswap(lhs._pSize(), rhs._pSize() );\n\tswap(lhs._ttSize(), rhs._ttSize() );\n\tswap(lhs._pHashFn(), rhs._pHashFn() );\n\tswap(lhs._pT(), rhs._pT() );\n\tswap(lhs._elems(), rhs._elems() );\n\tswap(lhs._elems_maxLF(), rhs._elems_maxLF());\n}\n\ntemplate <class T_key, class T_val, class T_hash, class T_key_eq, typename T_shift, typename T_maxLF, typename T_major>\nvoid sstd::IpCHashT<T_key, T_val, T_hash, T_key_eq, T_shift, T_maxLF, T_major>::move_hashT2vecKV(std::vector<elem_KV_m>& vecKV, sstd::IpCHashT<T_key, T_val, T_hash, T_key_eq, T_shift, T_maxLF, T_major>& hashT){\n\tfor(auto itr=hashT.begin(); itr!=hashT.end(); ++itr){\n\t\tvecKV.push_back( elem_KV_m(std::move(itr.first_RW()), std::move(itr.second_RW())) );\n\t}\n}\ntemplate <class T_key, class T_val, class T_hash, class T_key_eq, typename T_shift, typename T_maxLF, typename T_major>\nvoid sstd::IpCHashT<T_key, T_val, T_hash, T_key_eq, T_shift, T_maxLF, T_major>::failSafe_of_rehashing(sstd::IpCHashT<T_key, T_val, T_hash, T_key_eq, T_shift, T_maxLF, T_major>& hashT){\n\t// \"using std::swap;\" is defined, in order to preferentially call overloaded function of swap<T>() for type T. (Ref: https://cpprefjp.github.io/reference/utility/swap.html)\n\t// In here, scope of using is limited by \"{}\", this means that scope of using is same as a usual value.\n\tusing std::swap;\n\t\n\tstd::vector<elem_KV_m> vecKV(hashT.size()); vecKV.clear();\n\t\n CONTINUE_sstd_failSafe_of_rehashing:\n\tmove_hashT2vecKV(vecKV, hashT);\n\t{\n\t\t#ifdef use_prime_table\n\t\tsstd::IpCHashT<T_key, T_val, T_hash, T_key_eq, T_shift, T_maxLF, T_major> hashT_new(hashT._tSizeL_idx()+1, sstd_IpCHashT::tSizeL[hashT._tSizeL_idx()+1]); // twice size of tSize will be allocated.\n\t\t#else\n\t\tsstd::IpCHashT<T_key, T_val, T_hash, T_key_eq, T_shift, T_maxLF, T_major> hashT_new(hashT.tableSize()*2-1, hashT.tableSize()*2); // twice size of tSize will be allocated.\n\t\t#endif\n\t\tswap_hashT(hashT, hashT_new);\n\t}\n\t\n\twhile(vecKV.size()!=0){\n\t\t#ifdef use_prime_table\n\t\tuint64 idx; key2tableIdx_wDivisor_m(idx, vecKV[vecKV.size()-1].key, hashT.tableSize());\n\t\t#else\n\t\tuint64 idx; key2tableIdx_wDivisor_m(idx, vecKV[vecKV.size()-1].key, hashT._tSize_m1());\n\t\t#endif\n\t\t\n\t\t#if defined(use_insert_soft) && (!defined(SSTD_IpCHashT_DEBUG))\n\t\tstd::pair<struct itr_m, bool> itr_TF = hashT._insertBase_soft(std::move(vecKV[vecKV.size()-1].key), std::move(vecKV[vecKV.size()-1].val), idx);\n\t\t#else\n\t\tstd::pair<struct itr_m, bool> itr_TF = hashT._insertBase_hard(std::move(vecKV[vecKV.size()-1].key), std::move(vecKV[vecKV.size()-1].val), idx); // when rehashing, there is no meaning to use _insertBase_hard() without stress test.\n\t\t#endif\n\t\t\n\t\tif(itr_TF.first.index()==itr_needRehash_m){ goto CONTINUE_sstd_failSafe_of_rehashing; }\n\t\t#ifdef SSTD_IpCHashT_DEBUG\n\t\tif(use_testFOR_dbg){ // testing failSafe_of_rehashing()\n\t\t\thashT._elems()++;\n\t\t\tvecKV.pop_back(); // erase the tail element\n\t\t\tuse_testFOR_dbg=false;\n\t\t\tgoto CONTINUE_sstd_failSafe_of_rehashing;\n\t\t}\n\t\t#endif\n\t\t\n\t\thashT._elems()++;\n\t\tvecKV.pop_back(); // erase the tail element\n\t}\n}\ntemplate <class T_key, class T_val, class T_hash, class T_key_eq, typename T_shift, typename T_maxLF, typename T_major>\ninline void sstd::IpCHashT<T_key, T_val, T_hash, T_key_eq, T_shift, T_maxLF, T_major>::rehash(){\n\t// \"using std::swap;\" is defined, in order to preferentially call overloaded function of swap<T>() for type T. (Ref: https://cpprefjp.github.io/reference/utility/swap.html)\n\t// In here, scope of using is limited by \"{}\", this means that scope of using is same as a usual value.\n\tusing std::swap;\n\t\n\t#ifdef use_prime_table\n\tsstd::IpCHashT<T_key, T_val, T_hash, T_key_eq, T_shift, T_maxLF, T_major> hashT_new(tSizeL_idx+1, sstd_IpCHashT::tSizeL[tSizeL_idx+1]); // twice size of tSize will be allocated.\n\t#else\n\tsstd::IpCHashT<T_key, T_val, T_hash, T_key_eq, T_shift, T_maxLF, T_major> hashT_new(tSize*2-1, tSize*2); // twice size of tSize will be allocated.\n\t#endif\n\t\n\tfor(auto itr=this->begin(); itr!=this->end(); ){\n\t\t\n\t\t#ifdef use_prime_table\n\t\tuint64 idx; key2tableIdx_wDivisor_m(idx, itr.first(), hashT_new.tableSize());\n\t\t#else\n\t\tuint64 idx; key2tableIdx_wDivisor_m(idx, itr.first(), hashT_new._tSize_m1());\n\t\t#endif\n\t\t\n\t\t#if defined(use_insert_soft) && (!defined(SSTD_IpCHashT_DEBUG))\n\t\tstd::pair<struct itr_m, bool> itr_TF = hashT_new._insertBase_soft(std::move(itr.first_RW()), std::move(itr.second_RW()), idx);\n\t\t#else\n\t\tstd::pair<struct itr_m, bool> itr_TF = hashT_new._insertBase_hard(std::move(itr.first_RW()), std::move(itr.second_RW()), idx); // when rehashing, there is no meaning to use _insertBase_hard() without stress test.\n\t\t#endif\n\t\t\n\t\tif(itr_TF.first.index()==itr_needRehash_m){ failSafe_of_rehashing(hashT_new); continue; } // more rehashing is required while rehashing.\n\t\t#ifdef SSTD_IpCHashT_DEBUG\n\t\tif(use_testFOR_dbg){ // testing failSafe_of_rehashing()\n\t\t\thashT_new._elems()++;\n\t\t\t++itr;\n\t\t\tfailSafe_of_rehashing(hashT_new);\n\t\t\tcontinue;\n\t\t}\n\t\t#endif\n\t\t\n\t\thashT_new._elems()++;\n\t\t++itr;\n\t}\n\tswap_hashT(*this, hashT_new);\n}\n\n//-----------------------------------------------------------------------------------------------------------------------------------------------\n\n// What i really want to impliment is [A].\n// And what i really impliment is [B]. for considering the balance between successful lookup and unsuccessful lookup.\n// Some T_key types like to have a high calculation cost of \"T_key_eq()(X, Y)\", need to use [A].\n// While the calculation cost of \"T_key_eq()(X, Y)\" is low, benefiting unsuccessful is more effective than successful.\n// For example, if the load factor is 50%, 2 of 1 unsuccessful lookup pointting empty element.\n\ninline bool sstd_IpCHashT::isSuccessfulMajor(sstd::IpCHashT_opt::successfulMajor dummy){ return true; }\ninline bool sstd_IpCHashT::isSuccessfulMajor(sstd::IpCHashT_opt::unsuccessfulMajor dummy){ return false; }\n\n#define findBase_m()\t\t\t\t\t\t\t\t\t\t\t\t\t\\\n\tif(sstd_IpCHashT::isSuccessfulMajor(T_major())){\t\t\t\t\t\\\n\t\t/* [A] for Successful Major Option */\t\t\t\t\t\t\t\\\n\t\tif(! isHead_m(pT[idx]) ){ return itr_m(maxShift, ttSize, pT, itr_end_m); } /* key is not found. */ \\\n\t\tfor(;;){\t\t\t\t\t\t\t\t\t\t\t\t\t\t\\\n\t\t\tif( T_key_eq()(pT[idx].key, key_in) ){ return itr_m(maxShift, ttSize, pT, idx); } /* key is found. */ \\\n\t\t\tif( pT[idx].next == (T_shift)0 ){ return itr_m(maxShift, ttSize, pT, itr_end_m); } /* key is not found. */ \\\n\t\t\tidx += pT[idx].next;\t\t\t\t\t\t\t\t\t\t\\\n\t\t}\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\\\n\t}else{\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\\\n\t\t/* [B] for Unsuccessful Major Option */\t\t\t\t\t\t\t\\\n\t\tfor(;;){\t\t\t\t\t\t\t\t\t\t\t\t\t\t\\\n\t\t\tif( T_key_eq()(pT[idx].key, key_in) ){\t\t\t\t\t\t\\\n\t\t\t\tif( isEmpty_m(pT[idx]) ){ return itr_m(maxShift, ttSize, pT, itr_end_m); } /* key is not found. */ \\\n\t\t\t\treturn itr_m(maxShift, ttSize, pT, idx); /* key is found. */ \\\n\t\t\t}\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\\\n\t\t\tif( pT[idx].next==(T_shift)0 ){ return itr_m(maxShift, ttSize, pT, itr_end_m); } /* key is not found. */ \\\n\t\t\tidx += pT[idx].next;\t\t\t\t\t\t\t\t\t\t\\\n\t\t}\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\\\n\t}\n\ntemplate <class T_key, class T_val, class T_hash, class T_key_eq, typename T_shift, typename T_maxLF, typename T_major>\ninline struct itr_m sstd::IpCHashT<T_key, T_val, T_hash, T_key_eq, T_shift, T_maxLF, T_major>::find(const T_key& key_in, uint64 idx){\n\tfindBase_m();\n}\ntemplate <class T_key, class T_val, class T_hash, class T_key_eq, typename T_shift, typename T_maxLF, typename T_major>\ninline struct itr_m sstd::IpCHashT<T_key, T_val, T_hash, T_key_eq, T_shift, T_maxLF, T_major>::find(const T_key& key_in){\n\tuint64 idx; key2tableIdx_m(idx, key_in); // get table index\n\tfindBase_m();\n}\n#undef findBase_m\n\n//-----------------------------------------------------------------------------------------------------------------------------------------------\n\ntemplate <class T_key, class T_val, class T_hash, class T_key_eq, typename T_shift, typename T_maxLF, typename T_major>\nstd::pair<struct itr_m, bool> sstd::IpCHashT<T_key, T_val, T_hash, T_key_eq, T_shift, T_maxLF, T_major>::_insertBase_soft(T_key&& key_in, T_val&& val_in, uint64 idx){\n\t\n\t// \"using std::swap;\" is defined, in order to preferentially call overloaded function of swap<T>() for type T. (Ref: https://cpprefjp.github.io/reference/utility/swap.html)\n\t// in here, scope of \"using\" is limited by \"{}\", this means that scope of \"using\" is same as a usual value.\n\tusing std::swap;\n\t\n\tif( isEmpty_m(pT[idx]) ){\n\t\t// 'pT[idx]' is empty\n\t\t\n\t\t// --- case01 ---\n\t\tassert(idx<ttSize);\n\t\tpT[idx].key = std::move(key_in);\n\t\tpT[idx].val = std::move(val_in);\n\t\tpT[idx].prev = (T_shift)0;\n\t\tpT[idx].next = (T_shift)0;\n\t\treturn std::pair<struct itr_m, bool>(itr_m(maxShift, ttSize, pT, idx), true);\n\t}\n\t// below:: 'pT[idx]' is not empty\n\t\n\t//-----------------------------------------------------------------------------------------------------------------------------------------\n\t\n\t// The following process solves a conflict with the existing elements.\n\tif( isHead_m(pT[idx]) ){\n\t\t// If the element is head, append the element between the chain(s).\n\t\t// For insertion case02 and case03.\n\t\t\n\t\t// --- case02 --- ==>> pass for soft\n\t\t\n\t\t// find key and seek idx.\n\t\tfor(;;){\n\t\t\tif( T_key_eq()(pT[idx].key, key_in) ){\n\t\t\t\t// key is found.\n\t\t\t\tassert(idx<ttSize);\n//\t\t\t\tpT[idx].key = std::move(key_in);\n//\t\t\t\tpT[idx].val = std::move(val_in);\n\t\t\t\tpT[idx].prev = (T_shift)0;\n\t\t\t\tpT[idx].next = (T_shift)0;\n\t\t\t\treturn std::pair<struct itr_m, bool>(itr_m(maxShift, ttSize, pT, idx), false);\n\t\t\t}\n\t\t\tif( pT[idx].next == (T_shift)0 ){ break; } // key is not found. (At this time, idx indicates the tail of the chain.)\n\t\t\tidx += pT[idx].next;\n\t\t}\n\t\tuint64 prevIdx_e = idx; // prev index from empty\n\t\t\n\t\t// seek to an empty element until tSize.\n\t\tidx++;\n\t\tseek2emptyIndex_m(pT, idx, prevIdx_e); // when false -> returned intaernally -> rehash will be occurd.\n\t\t\n\t\t// --- case03 ---\n\t\tassert(idx<ttSize);\n\t\tT_shift prevShift_e = idx - prevIdx_e;\n\t\tpT[prevIdx_e].next = prevShift_e;\n\t\tpT[ idx ].key = std::move(key_in);\n\t\tpT[ idx ].val = std::move(val_in);\n\t\tpT[ idx ].prev = prevShift_e;\n//\t\tpT[ idx ].next = (T_shift)0; // aleady zero\n\t\treturn std::pair<struct itr_m, bool>(itr_m(maxShift, ttSize, pT, idx), true);\n\t}else if( isTail_m(pT[idx]) ){\n\t\t// For insertion case04, case05 and case06.\n\t\t\n\t\t// --- case04 --- ==>> pass for soft\n\t\t// --- case05 --- ==>> pass for soft\n\t\t\n\t\t// seek to an empty element until tSize.\n\t\tuint64 idx_f = idx;\n\t\tidx = idx_f + 1;\n\t\tuint64 prevIdx_f = idx_f - pT[idx_f].prev;\n\t\tseek2emptyIndex_m(pT, idx, prevIdx_f); // when false -> returned intaernally -> rehash will be occurd.\n\t\t{\n\t\t\t// --- case06 ---\n\t\t\tT_shift prevShift_e = idx - prevIdx_f; assert(prevShift_e<=seekLimit);\n\t\t\tassert(prevIdx_f<ttSize);\n\t\t\tassert( idx <ttSize);\n\t\t\tassert( idx_f<ttSize);\n\t\t\t\n\t\t\tpT[prevIdx_f].next = prevShift_e;\n\t\t\tswap(pT[idx].key, pT[idx_f].key); // move pT[idx_f] to pT[idx] and make pT[idx_f] empty.\n\t\t\tswap(pT[idx].val, pT[idx_f].val); // move pT[idx_f] to pT[idx] and make pT[idx_f] empty.\n\t\t\tpT[ idx ].prev = prevShift_e;\n\t\t\tpT[ idx ].next = (T_shift)0;\n\t\t\t\n\t\t\tpT[idx_f].key = std::move(key_in);\n\t\t\tpT[idx_f].val = std::move(val_in);\n\t\t\tpT[idx_f].prev = (T_shift)0;\n//\t\t\tpT[idx_f].next = (T_shift)0; // aleady zero\n\t\t\t\n\t\t\treturn std::pair<struct itr_m, bool>(itr_m(maxShift, ttSize, pT, idx_f), true);\n\t\t}\n\t}else{\n\t\t// For insertion case07, case08, case09, case10 and case11.\n\t\tuint64 idx_f = idx; // first index\n\t\tuint64 prev2next_f = (uint64)pT[idx_f].prev + (uint64)pT[idx_f].next;\n\t\tif(prev2next_f>seekLimit){ return std::pair<struct itr_m, bool>(itr_m(maxShift, ttSize, pT, itr_needRehash_m), false); }\n\t\t\n\t\t// --- case07 --- ==>> pass for soft\n\t\t// --- case08 --- ==>> pass for soft\n\t\t// --- case09 --- ==>> pass for soft\n\t\t// --- case10 --- ==>> pass for soft\n\t\t\n\t\tseek2tail(pT, idx);\n\t\t\n\t\t// seek to an empty element until tSize.\n\t\tuint64 prevIdx_e = idx; // prev index from empty\n\t\tseek2emptyIndex_m(pT, idx, prevIdx_e); // when false -> returned intaernally -> rehash will be occurd.\n\t\t{\n\t\t\t// --- case11 ---\n\t\t\tuint64 prevIdx_f = idx_f - pT[idx_f].prev;\n\t\t\tuint64 nextIdx_f = idx_f + pT[idx_f].next;\n\t\t\tassert(prevIdx_f<ttSize);\n\t\t\tassert( idx_f<ttSize);\n\t\t\tassert(nextIdx_f<ttSize);\n\t\t\tassert(prevIdx_e<ttSize);\n\t\t\tassert( idx <ttSize);\n\t\t\t\n\t\t\tpT[prevIdx_f].next = prev2next_f;\n\t\t\tswap(pT[idx].key, pT[idx_f].key); // move pT[idx_f] to pT[idx] and make pT[idx_f] empty.\n\t\t\tswap(pT[idx].val, pT[idx_f].val); // move pT[idx_f] to pT[idx] and make pT[idx_f] empty.\n\t\t\tpT[ idx_f].key = std::move(key_in);\n\t\t\tpT[ idx_f].val = std::move(val_in);\n\t\t\tpT[ idx_f].prev = (T_shift)0;\n\t\t\tpT[ idx_f].next = (T_shift)0;\n\t\t\tpT[nextIdx_f].prev = prev2next_f;\n\t\t\t\n\t\t\tT_shift prevShift_e = idx - prevIdx_e;\n\t\t\tpT[prevIdx_e].next = prevShift_e;\n\t\t\tpT[ idx ].prev = prevShift_e;\n\t\t\tpT[ idx ].next = (T_shift)0;\n\t\t\treturn std::pair<struct itr_m, bool>(itr_m(maxShift, ttSize, pT, idx_f), true);\n\t\t}\n\t}\n}\n#define insert_init_m()\t\t\t\t\t\t\t\t\t\\\n\tif(isOverMaxLF_m(elems, elems_maxLF)){ rehash(); }\n#define insert_soft_cc_m(key_in, val_in)\t\t\t\t\t\t\t\t\\\n\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\\\n\tT_key key = key_in; /* copy -> COPY_OR_NOP <- when impliment as a define */\t\\\n\tT_val val = val_in; /* copy */\t\t\t\t\t\t\t\t\t\t\\\n\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\\\n CONTINUE_sstd_IpCHashT_insert_soft_cc_m:\t\t\t\t\t\t\t\t\\\n\t/* there is not the key-value pair on the table. */\t\t\t\t\t\\\n\tstd::pair<struct itr_m, bool> itrI_TF = this->_insertBase_soft(std::move(key), std::move(val), idx); \\\n\tif(itrI_TF.first._needRehash()){\t\t\t\t\t\t\t\t\t\\\n\t\trehash();\t\t\t\t\t\t\t\t\t\t\t\t\t\t\\\n\t\tkey2tableIdx_m(idx, key_in); /* get table index */\t\t\t\t\\\n\t\tgoto CONTINUE_sstd_IpCHashT_insert_soft_cc_m;\t\t\t\t\t\\\n\t}\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\\\n\telems++;\ntemplate <class T_key, class T_val, class T_hash, class T_key_eq, typename T_shift, typename T_maxLF, typename T_major>\nstruct itr_m sstd::IpCHashT<T_key, T_val, T_hash, T_key_eq, T_shift, T_maxLF, T_major>::insert_soft(const T_key& key_in, const T_val& val_in){ // copy key and value.\n\tinsert_init_m();\n\tuint64 idx; key2tableIdx_m(idx, key_in);\n\tinsert_soft_cc_m(key_in, val_in);\n\treturn itrI_TF.first;\n}\ntemplate <class T_key, class T_val, class T_hash, class T_key_eq, typename T_shift, typename T_maxLF, typename T_major>\nstruct itr_m sstd::IpCHashT<T_key, T_val, T_hash, T_key_eq, T_shift, T_maxLF, T_major>::insert_soft(const T_key& key_in, const T_val& val_in, uint64 idx){\n\tinsert_init_m();\n\tinsert_soft_cc_m(key_in, val_in);\n\treturn itrI_TF.first;\n} // copy key and value.\n\n//-----------------------------------------------------------------------------------------------------------------------------------------------\n\ntemplate <class T_key, class T_val, class T_hash, class T_key_eq, typename T_shift, typename T_maxLF, typename T_major>\nstd::pair<struct itr_m, bool> sstd::IpCHashT<T_key, T_val, T_hash, T_key_eq, T_shift, T_maxLF, T_major>::_insertBase_hard(T_key&& key_in, T_val&& val_in, uint64 idx){\n\t\n\t// \"using std::swap;\" is defined, in order to preferentially call overloaded function of swap<T>() for type T. (Ref: https://cpprefjp.github.io/reference/utility/swap.html)\n\t// in here, scope of using is limited by \"{}\", this means that scope of using is same as a usual value.\n\tusing std::swap;\n\t\n\tif( isEmpty_m(pT[idx]) ){\n\t\t// 'pT[idx]' is empty\n\t\t\n\t\t// --- case01 ---\n\t\tassert(idx<ttSize);\n\t\tpT[idx].key = std::move(key_in);\n\t\tpT[idx].val = std::move(val_in);\n\t\tpT[idx].prev = (T_shift)0;\n\t\tpT[idx].next = (T_shift)0;\n\t\treturn std::pair<struct itr_m, bool>(itr_m(maxShift, ttSize, pT, idx), true);\n\t}\n\t// below:: 'pT[idx]' is not empty\n\t\n\t// The following process solves a conflict with the existing elements.\n\tif( isHead_m(pT[idx]) ){\n\t\t// If the element is head, append the element between the chain(s).\n\t\t// For insertion case02 and case03.\n\n\t\tuint64 prevIdx_e = idx; // prev index from empty\n\t\tuint64 nextIdx_e = idx + pT[idx].next; // next index from empty\n\t\tfor(;; idx++){\n\t\t\t// seek to an empty element until nextIdx.\n\t\t\tif(idx>=nextIdx_e){\n\t\t\t\tprevIdx_e = idx;\n\t\t\t\tif( isTail_m(pT[idx]) ){ break; }\n\t\t\t\tnextIdx_e = idx + pT[idx].next;\n\t\t\t}\n\t\t\tif( isEmpty_m(pT[idx]) ){\n\t\t\t\t// --- case02 ---\n\t\t\t\tT_shift prevShift_e = idx - prevIdx_e; assert(prevShift_e<=seekLimit);\n\t\t\t\tT_shift nextShift_e = nextIdx_e - idx; assert(nextShift_e<=seekLimit);\n\t\t\t\tassert(prevIdx_e<ttSize);\n\t\t\t\tassert( idx <ttSize);\n\t\t\t\tassert(nextIdx_e<ttSize);\n\t\t\t\t\n\t\t\t\tpT[prevIdx_e].next = prevShift_e;\n\t\t\t\tpT[ idx ].key = std::move(key_in);\n\t\t\t\tpT[ idx ].val = std::move(val_in);\n\t\t\t\tpT[ idx ].prev = prevShift_e;\n\t\t\t\tpT[ idx ].next = nextShift_e;\n\t\t\t\tpT[nextIdx_e].prev = nextShift_e;\n\t\t\t\treturn std::pair<struct itr_m, bool>(itr_m(maxShift, ttSize, pT, idx), true);\n\t\t\t}\n\t\t}\n\t\t\n\t\t// seek to an empty element until tSize.\n\t\tseek2emptyIndex_m(pT, idx, nextIdx_e); // when false -> returned intaernally -> rehash will be occurd.\n\t\t\n\t\t// --- case03 ---\n\t\tassert(idx<ttSize);\n\t\tT_shift prevShift_e = idx - prevIdx_e;\n\t\tpT[prevIdx_e].next = prevShift_e;\n\t\tpT[ idx ].key = std::move(key_in);\n\t\tpT[ idx ].val = std::move(val_in);\n\t\tpT[ idx ].prev = prevShift_e;\n//\t\tpT[ idx ].next = (T_shift)0; // aleady zero\n\t\treturn std::pair<struct itr_m, bool>(itr_m(maxShift, ttSize, pT, idx), true);\n\t}else if( isTail_m(pT[idx]) ){\n\t\t// For insertion case04, case05 and case06.\n\t\t\n\t\tuint64 idx_f = idx; // first index\n\t\t{\n\t\t\tuint64 prevIdx_e = idx - pT[idx].prev; // empty prevIdx\n\t\t\tfor(;;){\n\t\t\t\tidx--;\n\t\t\t\tif( idx<=prevIdx_e ){ break; }\n\t\t\t\tif( isEmpty_m(pT[idx]) ){\n\t\t\t\t\t// --- case04 ---\n\t\t\t\t\tT_shift prevShift_e = idx - prevIdx_e; assert(prevShift_e<=seekLimit);\n\t\t\t\t\tassert(prevIdx_e<ttSize);\n\t\t\t\t\tassert( idx <ttSize);\n\t\t\t\t\tassert( idx_f<ttSize);\n\t\t\t\t\t\n\t\t\t\t\tpT[prevIdx_e].next = prevShift_e;\n\t\t\t\t\tswap(pT[idx].key, pT[idx_f].key); // move pT[idx_f] to pT[idx] and make pT[idx_f] empty.\n\t\t\t\t\tswap(pT[idx].val, pT[idx_f].val); // move pT[idx_f] to pT[idx] and make pT[idx_f] empty.\n\t\t\t\t\tpT[ idx ].prev = prevShift_e;\n\t\t\t\t\tpT[ idx ].next = (T_shift)0;\n\t\t\t\t\t\n\t\t\t\t\tpT[idx_f].key = std::move(key_in);\n\t\t\t\t\tpT[idx_f].val = std::move(val_in);\n\t\t\t\t\tpT[idx_f].prev = (T_shift)0;\n\t\t\t\t\tpT[idx_f].next = (T_shift)0;\n\t\t\t\t\t\n\t\t\t\t\treturn std::pair<struct itr_m, bool>(itr_m(maxShift, ttSize, pT, idx_f), true);\n\t\t\t\t}\n\t\t\t}\n\t\t}\n\t\t\n\t\tif(! isHead_m(pT[idx]) ){\n\t\t\tuint64 prevIdx_e = idx - pT[idx].prev;\n\t\t\tuint64 nextIdx_e = idx;\n\t\t\tfor(;;){\n\t\t\t\tidx--;\n\t\t\t\tif( idx<=prevIdx_e ){\n\t\t\t\t\tnextIdx_e = idx;\n\t\t\t\t\tif( isHead_m(pT[idx]) ){ break; }\n\t\t\t\t\tprevIdx_e = idx - pT[idx].prev;\n\t\t\t\t\tcontinue;\n\t\t\t\t}\n\t\t\t\tif( isEmpty_m(pT[idx]) ){\n\t\t\t\t\t// --- case05 ---\n\t\t\t\t\tuint64 prevIdx_f = idx_f - pT[idx_f].prev;\n\t\t\t\t\tT_shift prevShift_e = idx - prevIdx_e; assert(prevShift_e<=seekLimit);\n\t\t\t\t\tT_shift nextShift_e = nextIdx_e - idx; assert(nextShift_e<=seekLimit);\n\t\t\t\t\tassert(prevIdx_e<ttSize);\n\t\t\t\t\tassert( idx <ttSize);\n\t\t\t\t\tassert(nextIdx_e<ttSize);\n\t\t\t\t\tassert(prevIdx_f<ttSize);\n\t\t\t\t\tassert( idx_f<ttSize);\n\t\t\t\t\t\n\t\t\t\t\tpT[prevIdx_e].next = prevShift_e;\n\t\t\t\t\tswap(pT[idx].key, pT[idx_f].key); // move pT[idx_f] to pT[idx] and make pT[idx_f] empty.\n\t\t\t\t\tswap(pT[idx].val, pT[idx_f].val); // move pT[idx_f] to pT[idx] and make pT[idx_f] empty.\n\t\t\t\t\tpT[ idx ].prev = prevShift_e;\n\t\t\t\t\tpT[ idx ].next = nextShift_e;\n\t\t\t\t\tpT[nextIdx_e].prev = nextShift_e;\n\t\t\t\t\t\n\t\t\t\t\tpT[prevIdx_f].next = (T_shift)0;\n\t\t\t\t\tpT[ idx_f].key = std::move(key_in);\n\t\t\t\t\tpT[ idx_f].val = std::move(val_in);\n\t\t\t\t\tpT[ idx_f].prev = (T_shift)0;\n//\t\t\t\t\tpT[ idx_f].next = (T_shift)0; // aleady zero\n\t\t\t\t\t\n\t\t\t\t\treturn std::pair<struct itr_m, bool>(itr_m(maxShift, ttSize, pT, idx_f), true);\n\t\t\t\t}\n\t\t\t}\n\t\t}\n\t\t\n\t\t// seek to an empty element until tSize.\n\t\tidx = idx_f + 1;\n\t\tuint64 prevIdx_f = idx_f - pT[idx_f].prev;\n\t\tseek2emptyIndex_m(pT, idx, prevIdx_f); // when false -> returned intaernally -> rehash will be occurd.\n\t\t{\n\t\t\t// --- case06 ---\n\t\t\tT_shift prevShift_e = idx - prevIdx_f; assert(prevShift_e<=seekLimit);\n\t\t\tassert(prevIdx_f<ttSize);\n\t\t\tassert( idx <ttSize);\n\t\t\tassert( idx_f<ttSize);\n\t\t\t\n\t\t\tpT[prevIdx_f].next = prevShift_e;\n\t\t\tswap(pT[idx].key, pT[idx_f].key); // move pT[idx_f] to pT[idx] and make pT[idx_f] empty.\n\t\t\tswap(pT[idx].val, pT[idx_f].val); // move pT[idx_f] to pT[idx] and make pT[idx_f] empty.\n\t\t\tpT[ idx ].prev = prevShift_e;\n\t\t\tpT[ idx ].next = (T_shift)0;\n\t\t\t\n\t\t\tpT[idx_f].key = std::move(key_in);\n\t\t\tpT[idx_f].val = std::move(val_in);\n\t\t\tpT[idx_f].prev = (T_shift)0;\n//\t\t\tpT[idx_f].next = (T_shift)0; // aleady zero\n\t\t\t\n\t\t\treturn std::pair<struct itr_m, bool>(itr_m(maxShift, ttSize, pT, idx_f), true);\n\t\t}\n\t}else{\n\t\t// For insertion case07, case08, case09, case10 and case11.\n\t\t\n\t\tuint64 idx_f = idx; // first index\n\t\t{\n\t\t\tuint64 prevIdx_e = idx - pT[idx].prev; // empty prevIdx\n\t\t\tfor(;;){\n\t\t\t\tidx--;\n\t\t\t\tif( idx<=prevIdx_e ){ break; }\n\t\t\t\tif( isEmpty_m(pT[idx]) ){\n\t\t\t\t\t// --- case07 ---\n\t\t\t\t\tuint64 nextIdx_f = idx_f + pT[idx_f].next;\n\t\t\t\t\tT_shift prevShift_e = idx - prevIdx_e;\n\t\t\t\t\tuint64 nextShift_e = idx_f + (uint64)pT[idx_f].next - idx;\n\t\t\t\t\tif(nextShift_e>seekLimit){ return std::pair<struct itr_m, bool>(itr_m(maxShift, ttSize, pT, itr_needRehash_m), false); }\n\t\t\t\t\t\n\t\t\t\t\tassert(prevIdx_e<ttSize);\n\t\t\t\t\tassert( idx <ttSize);\n\t\t\t\t\tassert(nextIdx_f<ttSize);\n\t\t\t\t\tassert( idx_f<ttSize);\n\t\t\t\t\tassert(prevShift_e<=seekLimit);\n\t\t\t\t\tassert(nextShift_e<=seekLimit);\n\t\t\t\t\t\n\t\t\t\t\tpT[prevIdx_e].next = prevShift_e;\n\t\t\t\t\tswap(pT[idx].key, pT[idx_f].key); // move pT[idx_f] to pT[idx] and make pT[idx_f] empty.\n\t\t\t\t\tswap(pT[idx].val, pT[idx_f].val); // move pT[idx_f] to pT[idx] and make pT[idx_f] empty.\n\t\t\t\t\tpT[ idx ].prev = prevShift_e;\n\t\t\t\t\tpT[ idx ].next = (T_shift)nextShift_e;\n\t\t\t\t\tpT[nextIdx_f].prev = (T_shift)nextShift_e;\n\t\t\t\t\n\t\t\t\t\tpT[ idx_f].key = std::move(key_in);\n\t\t\t\t\tpT[ idx_f].val = std::move(val_in);\n\t\t\t\t\tpT[ idx_f].prev = (T_shift)0;\n\t\t\t\t\tpT[ idx_f].next = (T_shift)0;\n\t\t\t\t\n\t\t\t\t\treturn std::pair<struct itr_m, bool>(itr_m(maxShift, ttSize, pT, idx_f), true);\n\t\t\t\t}\n\t\t\t}\n\t\t}\n\t\t\n\t\tuint64 prev2next_f = (uint64)pT[idx_f].prev + (uint64)pT[idx_f].next;\n\t\tif(prev2next_f>seekLimit){ return std::pair<struct itr_m, bool>(itr_m(maxShift, ttSize, pT, itr_needRehash_m), false); }\n\t\tif(! isHead_m(pT[idx]) ){\n\t\t\tuint64 prevIdx_e = idx - pT[idx].prev;\n\t\t\tuint64 nextIdx_e = idx;\n\t\t\tfor(;;){\n\t\t\t\tidx--;\n\t\t\t\tif( idx<=prevIdx_e ){\n\t\t\t\t\tnextIdx_e = idx;\n\t\t\t\t\tif( isHead_m(pT[idx]) ){ break; }\n\t\t\t\t\tprevIdx_e = idx - pT[idx].prev;\n\t\t\t\t\tcontinue;\n\t\t\t\t}\n\t\t\t\tif( isEmpty_m(pT[idx]) ){\n\t\t\t\t\t// --- case08 ---\n\t\t\t\t\tuint64 prevIdx_f = idx_f - pT[idx_f].prev;\n\t\t\t\t\tuint64 nextIdx_f = idx_f + pT[idx_f].next;\n\t\t\t\t\tT_shift prevShift_e = idx - prevIdx_e;\n\t\t\t\t\tT_shift nextShift_e = nextIdx_e - idx;\n\t\t\t\t\t\n\t\t\t\t\tassert(prevIdx_e<ttSize);\n\t\t\t\t\tassert( idx <ttSize);\n\t\t\t\t\tassert(nextIdx_e<ttSize);\n\t\t\t\t\tassert(prevIdx_f<ttSize);\n\t\t\t\t\tassert( idx_f<ttSize);\n\t\t\t\t\tassert(nextIdx_f<ttSize);\n\t\t\t\t\tassert(prevShift_e<=seekLimit);\n\t\t\t\t\tassert(nextShift_e<=seekLimit);\n\t\t\t\t\tassert(prev2next_f<=seekLimit);\n\t\t\t\t\t\n\t\t\t\t\tpT[prevIdx_e].next = prevShift_e;\n\t\t\t\t\tswap(pT[idx].key, pT[idx_f].key); // move pT[idx_f] to pT[idx] and make pT[idx_f] empty.\n\t\t\t\t\tswap(pT[idx].val, pT[idx_f].val); // move pT[idx_f] to pT[idx] and make pT[idx_f] empty.\n\t\t\t\t\tpT[ idx ].prev = prevShift_e;\n\t\t\t\t\tpT[ idx ].next = nextShift_e;\n\t\t\t\t\tpT[nextIdx_e].prev = nextShift_e;\n\t\t\t\t\t\t\n\t\t\t\t\tpT[prevIdx_f].next = (T_shift)prev2next_f;\n\t\t\t\t\tpT[ idx_f].key = std::move(key_in);\n\t\t\t\t\tpT[ idx_f].val = std::move(val_in);\n\t\t\t\t\tpT[ idx_f].prev = (T_shift)0;\n\t\t\t\t\tpT[ idx_f].next = (T_shift)0;\n\t\t\t\t\tpT[nextIdx_f].prev = (T_shift)prev2next_f;\n\t\t\t\t\t\n\t\t\t\t\treturn std::pair<struct itr_m, bool>(itr_m(maxShift, ttSize, pT, idx_f), true);\n\t\t\t\t}\n\t\t\t}\n\t\t}\n\t\t\n\t\tuint64 prevIdx_f = idx_f - pT[idx_f].prev;\n\t\tuint64 nextIdx_f = idx_f + pT[idx_f].next;\n\t\tfor(idx=idx_f+1;; idx++){\n\t\t\tif( idx>=nextIdx_f ){ break; }\n\t\t\tif( isEmpty_m(pT[idx]) ){\n\t\t\t\t// --- case09 ---\n\t\t\t\tuint64 prevIdx_f2idx_e = idx - prevIdx_f;\n\t\t\t\tif(prevIdx_f2idx_e>seekLimit){ return std::pair<struct itr_m, bool>(itr_m(maxShift, ttSize, pT, itr_needRehash_m), false); }\n\t\t\t\tT_shift nextShift_e = nextIdx_f - idx;\n\t\t\t\t\n\t\t\t\tassert(prevIdx_f<ttSize);\n\t\t\t\tassert( idx_f<ttSize);\n\t\t\t\tassert( idx <ttSize);\n\t\t\t\tassert(nextIdx_f<ttSize);\n\t\t\t\tassert(prevIdx_f2idx_e<=seekLimit);\n\t\t\t\tassert(nextShift_e <=seekLimit);\n\t\t\t\t\n\t\t\t\tpT[prevIdx_f].next = (T_shift)prevIdx_f2idx_e;\n\t\t\t\t\n\t\t\t\tswap(pT[idx].key, pT[idx_f].key); // move pT[idx_f] to pT[idx] and make pT[idx_f] empty.\n\t\t\t\tswap(pT[idx].val, pT[idx_f].val); // move pT[idx_f] to pT[idx] and make pT[idx_f] empty.\n\t\t\t\tpT[idx_f].key = std::move(key_in);\n\t\t\t\tpT[idx_f].val = std::move(val_in);\n\t\t\t\tpT[idx_f].prev = (T_shift)0;\n\t\t\t\tpT[idx_f].next = (T_shift)0;\n\t\t\t\t\n\t\t\t\tpT[ idx ].prev = (T_shift)prevIdx_f2idx_e;\n\t\t\t\tpT[ idx ].next = nextShift_e;\n\t\t\t\tpT[nextIdx_f].prev = nextShift_e;\n\t\t\t\treturn std::pair<struct itr_m, bool>(itr_m(maxShift, ttSize, pT, idx_f), true);\n\t\t\t}\n\t\t}\n\t\t\n\t\t// The end of case09 guarantees that \"idx==idx_f+1\" is not empty.\n\t\tuint64 prevIdx_e = idx;\n\t\tuint64 nextIdx_e = idx + pT[idx].next;\n\t\tfor(;; idx++){\n\t\t\t// seek to an empty element until nextIdx.\n\t\t\tif( idx>=nextIdx_e ){\n\t\t\t\tprevIdx_e = idx;\n\t\t\t\tif( isTail_m(pT[idx]) ){ break; }\n\t\t\t\tnextIdx_e = idx + pT[idx].next;\n\t\t\t}\n\t\t\tif( isEmpty_m(pT[idx]) ){\n\t\t\t\t// --- case10 ---\n\t\t\t\tassert(prevIdx_f<ttSize);\n\t\t\t\tassert( idx_f<ttSize);\n\t\t\t\tassert(nextIdx_f<ttSize);\n\t\t\t\tassert(prevIdx_e<ttSize);\n\t\t\t\tassert( idx <ttSize);\n\t\t\t\tassert(nextIdx_e<ttSize);\n\t\t\t\tassert(prev2next_f<=seekLimit);\n\t\t\t\t\n\t\t\t\tpT[prevIdx_f].next = prev2next_f;\n\t\t\t\tswap(pT[idx].key, pT[idx_f].key); // move pT[idx_f] to pT[idx] and make pT[idx_f] empty.\n\t\t\t\tswap(pT[idx].val, pT[idx_f].val); // move pT[idx_f] to pT[idx] and make pT[idx_f] empty.\n\t\t\t\tpT[ idx_f].key = std::move(key_in);\n\t\t\t\tpT[ idx_f].val = std::move(val_in);\n\t\t\t\tpT[ idx_f].prev = (T_shift)0;\n\t\t\t\tpT[ idx_f].next = (T_shift)0;\n\t\t\t\tpT[nextIdx_f].prev = prev2next_f;\n\t\t\t\t\n\t\t\t\tT_shift prevShift_e = idx - prevIdx_e;\n\t\t\t\tT_shift nextShift_e = nextIdx_e - idx;\n\t\t\t\tassert(prevShift_e<=seekLimit);\n\t\t\t\tassert(nextShift_e<=seekLimit);\n\t\t\t\t\n\t\t\t\tpT[prevIdx_e].next = prevShift_e;\n\t\t\t\tpT[ idx ].prev = prevShift_e;\n\t\t\t\tpT[ idx ].next = nextShift_e;\n\t\t\t\tpT[nextIdx_e].prev = nextShift_e;\n\t\t\t\treturn std::pair<struct itr_m, bool>(itr_m(maxShift, ttSize, pT, idx_f), true);\n\t\t\t}\n\t\t}\n\t\t\n\t\t// seek to an empty element until tSize.\n\t\tseek2emptyIndex_m(pT, idx, nextIdx_e); // when false -> returned intaernally -> rehash will be occurd.\n\t\t\n\t\t{\n\t\t\t// --- case11 ---\n\t\t\tassert(prevIdx_f<ttSize);\n\t\t\tassert( idx_f<ttSize);\n\t\t\tassert(nextIdx_f<ttSize);\n\t\t\tassert(prevIdx_e<ttSize);\n\t\t\tassert( idx <ttSize);\n\t\t\t\n\t\t\tpT[prevIdx_f].next = prev2next_f;\n\t\t\tswap(pT[idx].key, pT[idx_f].key); // move pT[idx_f] to pT[idx] and make pT[idx_f] empty.\n\t\t\tswap(pT[idx].val, pT[idx_f].val); // move pT[idx_f] to pT[idx] and make pT[idx_f] empty.\n\t\t\tpT[ idx_f].key = std::move(key_in);\n\t\t\tpT[ idx_f].val = std::move(val_in);\n\t\t\tpT[ idx_f].prev = (T_shift)0;\n\t\t\tpT[ idx_f].next = (T_shift)0;\n\t\t\tpT[nextIdx_f].prev = prev2next_f;\n\t\t\t\n\t\t\tT_shift prevShift_e = idx - prevIdx_e;\n\t\t\tpT[prevIdx_e].next = prevShift_e;\n\t\t\tpT[ idx ].prev = prevShift_e;\n\t\t\tpT[ idx ].next = (T_shift)0;\n\t\t\treturn std::pair<struct itr_m, bool>(itr_m(maxShift, ttSize, pT, idx_f), true);\n\t\t}\n\t}\n}\n\n//---\n\n#define insert_hard_cc_m(key_in, val_in)\t\t\t\t\t\t\t\t\\\n\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\\\n\tstruct itr_m itrF = this->find(key_in, idx);\t\t\t\t\t\t\\\n\tif(itrF!=this->end()){ itrF.second_RW()=val_in; return itrF; }\t\t\\\n\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\\\n\tT_key key = key_in; /* copy -> COPY_OR_NOP <- when impliment as a define */\t\\\n\tT_val val = val_in; /* copy */\t\t\t\t\t\t\t\t\t\t\\\n CONTINUE_sstd_IpCHashT_hard_cc_m:\t\t\t\t\t\t\t\t\t\t\\\n\t/* there is not the key-value pair on the table. */\t\t\t\t\t\\\n\tstd::pair<struct itr_m, bool> itrI_TF = this->_insertBase_hard(std::move(key), std::move(val), idx); \\\n\tif(itrI_TF.first._needRehash()){\t\t\t\t\t\t\t\t\t\\\n\t\trehash();\t\t\t\t\t\t\t\t\t\t\t\t\t\t\\\n\t\tkey2tableIdx_m(idx, key_in); /* get table index */\t\t\t\t\\\n\t\tgoto CONTINUE_sstd_IpCHashT_hard_cc_m;\t\t\t\t\t\t\t\\\n\t}\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\\\n\telems++;\ntemplate <class T_key, class T_val, class T_hash, class T_key_eq, typename T_shift, typename T_maxLF, typename T_major>\nstruct itr_m sstd::IpCHashT<T_key, T_val, T_hash, T_key_eq, T_shift, T_maxLF, T_major>::insert_hard(const T_key& key_in, const T_val& val_in){ // copy key and value.\n\tinsert_init_m();\n\tuint64 idx; key2tableIdx_m(idx, key_in); // get table index\n\tinsert_hard_cc_m(key_in, val_in);\n\treturn itrI_TF.first;\n}\ntemplate <class T_key, class T_val, class T_hash, class T_key_eq, typename T_shift, typename T_maxLF, typename T_major>\nstruct itr_m sstd::IpCHashT<T_key, T_val, T_hash, T_key_eq, T_shift, T_maxLF, T_major>::insert_hard(const T_key& key_in, const T_val& val_in, uint64 idx){\n\tinsert_init_m();\n\tinsert_hard_cc_m(key_in, val_in);\n\treturn itrI_TF.first;\n} // copy key and value.\n//template <class T_key, class T_val, class T_hash, class T_key_eq, typename T_shift, typename T_maxLF, typename T_major> void sstd::IpCHashT<T_key, T_val, T_hash, T_key_eq, T_shift, T_maxLF, T_major>::insert( T_key&& key_in, const T_val& val_in){} // swap key. (Callable by \"sstd::CHashT<T_key, T_val> hashT; hashT.add(std::move(key), val );\".)\n//template <class T_key, class T_val, class T_hash, class T_key_eq, typename T_shift, typename T_maxLF, typename T_major> void sstd::IpCHashT<T_key, T_val, T_hash, T_key_eq, T_shift, T_maxLF, T_major>::insert(const T_key& key_in, T_val&& val_in){} // swap value. (Callable by \"sstd::CHashT<T_key, T_val> hashT; hashT.add( key , std::move(val));\".)\n//template <class T_key, class T_val, class T_hash, class T_key_eq, typename T_shift, typename T_maxLF, typename T_major> void sstd::IpCHashT<T_key, T_val, T_hash, T_key_eq, T_shift, T_maxLF, T_major>::insert( T_key&& key_in, T_val&& val_in){} // swap key and value. (Callable by \"sstd::CHashT<T_key, T_val> hashT; hashT.add(std::move(key), std::move(val));\".)\n\n//-----------------------------------------------------------------------------------------------------------------------------------------------\n\n#ifdef use_insert_soft\n\ntemplate <class T_key, class T_val, class T_hash, class T_key_eq, typename T_shift, typename T_maxLF, typename T_major>\nstruct itr_m sstd::IpCHashT<T_key, T_val, T_hash, T_key_eq, T_shift, T_maxLF, T_major>::insert(const T_key& key_in, const T_val& val_in){ // copy key and value.\n\tinsert_init_m();\n\tuint64 idx; key2tableIdx_m(idx, key_in);\n\tinsert_soft_cc_m(key_in, val_in);\n\treturn itrI_TF.first;\n}\ntemplate <class T_key, class T_val, class T_hash, class T_key_eq, typename T_shift, typename T_maxLF, typename T_major>\nstruct itr_m sstd::IpCHashT<T_key, T_val, T_hash, T_key_eq, T_shift, T_maxLF, T_major>::insert(const T_key& key_in, const T_val& val_in, uint64 idx){ // copy key and value.\n\tinsert_init_m();\n\tinsert_soft_cc_m(key_in, val_in);\n\treturn itrI_TF.first;\n}\ntemplate <class T_key, class T_val, class T_hash, class T_key_eq, typename T_shift, typename T_maxLF, typename T_major>\nstd::pair<struct itr_m, bool> sstd::IpCHashT<T_key, T_val, T_hash, T_key_eq, T_shift, T_maxLF, T_major>::insert(const value_type& v){ // for STL (1). Ref: https://cpprefjp.github.io/reference/unordered_map/unordered_map/insert.html\n\tinsert_init_m();\n\tuint64 idx; key2tableIdx_m(idx, v.first);\n\tinsert_soft_cc_m(v.first, v.second);\n\treturn itrI_TF;\n}\n\n#else\n\ntemplate <class T_key, class T_val, class T_hash, class T_key_eq, typename T_shift, typename T_maxLF, typename T_major>\nstruct itr_m sstd::IpCHashT<T_key, T_val, T_hash, T_key_eq, T_shift, T_maxLF, T_major>::insert(const T_key& key_in, const T_val& val_in){ // copy key and value.\n\tinsert_init_m();\n\tuint64 idx; key2tableIdx_m(idx, key_in);\n\tinsert_hard_cc_m(key_in, val_in);\n\treturn itrI_TF.first;\n}\ntemplate <class T_key, class T_val, class T_hash, class T_key_eq, typename T_shift, typename T_maxLF, typename T_major>\nstruct itr_m sstd::IpCHashT<T_key, T_val, T_hash, T_key_eq, T_shift, T_maxLF, T_major>::insert(const T_key& key_in, const T_val& val_in, uint64 idx){ // copy key and value.\n\tinsert_init_m();\n\tinsert_hard_cc_m(key_in, val_in);\n\treturn itrI_TF.first;\n}\ntemplate <class T_key, class T_val, class T_hash, class T_key_eq, typename T_shift, typename T_maxLF, typename T_major>\nstd::pair<struct itr_m, bool> sstd::IpCHashT<T_key, T_val, T_hash, T_key_eq, T_shift, T_maxLF, T_major>::insert(const value_type& v){ // for STL (1). Ref: https://cpprefjp.github.io/reference/unordered_map/unordered_map/insert.html\n\tinsert_init_m();\n\tuint64 idx; key2tableIdx_m(idx, v.first);\n\tinsert_hard_cc_m(v.first, v.second);\n\treturn itrI_TF;\n}\n\n#endif\n\n//-----------------------------------------------------------------------------------------------------------------------------------------------\n\ntemplate <class T_key, class T_val, class T_hash, class T_key_eq, typename T_shift, typename T_maxLF, typename T_major>\ninline T_val& sstd::IpCHashT<T_key, T_val, T_hash, T_key_eq, T_shift, T_maxLF, T_major>::operator[](const T_key& key_in){\n\tinsert_init_m();\n\tuint64 idx; key2tableIdx_m(idx, key_in); // get table index\n\tauto itrF = find(key_in, idx);\n\tif(itrF!=this->end()){ return itrF.second_RW(); }\n\t\n\tT_val val_in = T_val(); // generation of empty buf for insert()\n\tauto itrI = insert(key_in, val_in, idx);\n//\tauto itrI = insert(key_in, std::move(val_in), idx);\n\treturn itrI.second_RW();\n}\n//T_val& operator[]( T_key&& rhs);\n\n//-----------------------------------------------------------------------------------------------------------------------------------------------\n\n//template <class T_key, class T_val, class T_hash, class T_key_eq, typename T_shift, typename T_maxLF, typename T_major>\n//void sstd::IpCHashT<T_key, T_val, T_hash, T_key_eq, T_shift, T_maxLF, T_major>::erase(const T_key& key_in, uint64 idx);\n\ntemplate <class T_key, class T_val, class T_hash, class T_key_eq, typename T_shift, typename T_maxLF, typename T_major>\nvoid sstd::IpCHashT<T_key, T_val, T_hash, T_key_eq, T_shift, T_maxLF, T_major>::erase(const T_key& key_in){\n\t\n\t// \"using std::swap;\" is defined, in order to preferentially call overloaded function of swap<T>() for type T. (Ref: https://cpprefjp.github.io/reference/utility/swap.html)\n\t// in here, scope of using is limited by \"{}\", this means that scope of using is same as a usual value.\n\tusing std::swap;\n\t\n\tauto itr = find(key_in);\n\tif(! (itr!=this->end()) ){ return; }\n\tuint64 idx = itr.index();\n\t\n\tif( isHead_m(pT[idx]) && isTail_m(pT[idx]) ){\n\t\t// --- case01 ---\n\t\tT_key keyBuf; // in order to call destructor\n\t\tT_val valBuf; // in order to call destructor\n\t\tswap(pT[idx].key, keyBuf);\n\t\tswap(pT[idx].val, valBuf);\n\t\tpT[idx].prev = maxShift;\n//\t\tpT[idx].next = (T_shift)0; // already zero\n\t}else if( isTail_m(pT[idx]) ){\n\t\t// --- case02 ---\n\t\tuint64 prevIdx = idx - pT[idx].prev;\n\t\tpT[prevIdx].next = (T_shift)0;\n\t\t\n\t\tT_key keyBuf; // in order to call destructor\n\t\tT_val valBuf; // in order to call destructor\n\t\tswap(pT[idx].key, keyBuf);\n\t\tswap(pT[idx].val, valBuf);\n\t\tpT[idx].prev = maxShift;\n//\t\tpT[idx].next = (T_shift)0; // already zero\n\t}else if( isHead_m(pT[idx]) ){\n\t\tuint64 idx_head = idx;\n\t\tseek2tail(pT, idx);\n\t\t\n\t\t// --- case03 ---\n\t\tswap(pT[idx_head].key, pT[idx].key); // move the tail element to the head.\n\t\tswap(pT[idx_head].val, pT[idx].val); // move the tail element to the head.\n\t\t\n\t\tuint64 prevIdx = idx - pT[idx].prev;\n\t\tpT[prevIdx].next = (T_shift)0;\n\t\tT_key keyBuf; // in order to call destructor\n\t\tT_val valBuf; // in order to call destructor\n\t\tswap(pT[idx].key, keyBuf); // move the tail element to the head.\n\t\tswap(pT[idx].val, valBuf); // move the tail element to the head.\n\t\tpT[idx].prev = maxShift;\n//\t\tpT[idx].next = (T_shift)0; // already zero\n\t}else{\n\t\tuint64 idx_middle = idx;\n\t\tseek2tail(pT, idx);\n\t\t\n\t\t// --- case04 ---\n\t\tswap(pT[idx_middle].key, pT[idx].key); // move the tail element to the head.\n\t\tswap(pT[idx_middle].val, pT[idx].val); // move the tail element to the head.\n\t\t\n\t\tuint64 prevIdx = idx - pT[idx].prev;\n\t\tpT[prevIdx].next = (T_shift)0;\n\t\tT_key keyBuf; // in order to call destructor\n\t\tT_val valBuf; // in order to call destructor\n\t\tswap(pT[idx].key, keyBuf); // move the tail element to the head.\n\t\tswap(pT[idx].val, valBuf); // move the tail element to the head.\n\t\tpT[idx].prev = maxShift;\n//\t\tpT[idx].next = (T_shift)0; // already zero\n\t}\n\telems--;\n\treturn;\n}\n\n//-----------------------------------------------------------------------------------------------------------------------------------------------\n\n#undef insert_hard_cc_m\n#undef insert_soft_cc_m\n#undef insert_init_m\n\n#undef key2tableIdx_m\n#undef seek2tail\n#undef seek2emptyIndex_m\n#undef isTail_m\n#undef isHead_m\n#undef isEmpty_m\n#undef isOverMaxLF_m\n\n#undef itr_needRehash_m\n#undef itr_end_m\n\n#undef elem_KV_m\n#undef elem_m\n#undef itr_m\n\n//-----------------------------------------------------------------------------------------------------------------------------------------------\n\n#ifdef use_insert_soft\n\t#undef use_insert_soft\n#endif\n\n#ifdef use_prime_table\n\t#undef use_prime_table\n#endif\n\n//-----------------------------------------------------------------------------------------------------------------------------------------------\n" }, { "alpha_fraction": 0.5612677931785583, "alphanum_fraction": 0.5683954954147339, "avg_line_length": 38.01183319091797, "blob_id": "bfe6cf3e0fd0ec791cbe79a9031cb963fce975bc", "content_id": "5b74c09e06a72c2344bd730b84b885125efaab2f", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "C++", "length_bytes": 6598, "license_type": "no_license", "max_line_length": 161, "num_lines": 169, "path": "/main_sProc.cpp", "repo_name": "admiswalker/InPlaceChainedHashTable-IpCHashT-", "src_encoding": "UTF-8", "text": "#include <sstd/sstd.hpp>\n#include \"./bench.hpp\"\n\n//-----------------------------------------------------------------------------------------------------------------------------------------------\n\nvoid vecPath2v_vvecXY(std::vector<sstd::vvec<double>>& vC_vT_vecX_out, std::vector<sstd::vvec<double>>& vC_vT_vecY_out, const std::vector<std::string>& vecPath){\n\tvC_vT_vecX_out.resize(vecPath.size());\n\tvC_vT_vecY_out.resize(vecPath.size());\n\t\n\tfor(uint i=0; i<vecPath.size(); i++){\n\t\tsstd::vvec<std::string> vvecOrig = sstd::csv2vvec(vecPath[i]);\n\t\tsstd::vvec<std::string> header = vvecOrig && sstd::slice_mv(sstd::begin(), 1);\n\t\tsstd::vvec<std::string> vvecStr = vvecOrig && sstd::slice_mv(1, sstd::end());\n\t\tsstd::vvec< double> vvecD = sstd::Tr(sstd::str2double(vvecStr));\n\t\t\n\t\tsstd::vvec<double> vT_vecX; for(uint i=0; i<vvecD.size()-1; i++){ vT_vecX<<=vvecD[0]; } // depending on csv format\n\t\tsstd::vvec<double> vT_vecY; for(uint i=1; i<vvecD.size() ; i++){ vT_vecY<<=vvecD[i]; } // depending on csv format\n\t\tvC_vT_vecX_out[i] = std::move(vT_vecX);\n\t\tvC_vT_vecY_out[i] = std::move(vT_vecY);\n\t}\n}\nstd::vector<double> vvec2vecMed(const sstd::vvec<double>& rhs){\n\tstd::vector<double> ret(rhs.size());\n\tfor(uint i=0; i<rhs.size(); i++){\n\t\tret[i] = sstd::med(rhs[i]);\n//\t\tret[i] = sstd::ave(rhs[i]);\n\t}\n\treturn ret;\n}\nvoid vecPath2vvecXY(sstd::vvec<double>& vvecX_out, sstd::vvec<double>& vvecY_out, const std::vector<std::string>& vecPath){\n\t\n\tsstd::vec<sstd::vvec<double>> vC_vT_vecX, vC_vT_vecY; // vecCSV vecType vecVal\n\tvecPath2v_vvecXY(vC_vT_vecX, vC_vT_vecY, vecPath);\n\t\n\tvvecX_out = vC_vT_vecX[0];\n\tsstd::vec<sstd::vvec<double>> vT_vC_vecY = sstd::Tr(vC_vT_vecY);\n\t\n\tuint typeNum = vT_vC_vecY.size();\n\tvvecY_out.resize(typeNum);\n\tfor(uint i=0; i<typeNum; i++){\n\t\tsstd::vvec<double> vecY_vC = sstd::Tr(vT_vC_vecY[i]);\n\t\tvvecY_out[i] = vvec2vecMed(vecY_vC);\n\t}\n}\n\n//-----------------------------------------------------------------------------------------------------------------------------------------------\n\nint main(int argc, char** argv){\n\tprintf(\"\\n\");\n\tprintf(\"+---------------------------------------------------+\\n\");\n\tprintf(\"| |\\n\");\n\tprintf(\"| Welcome to Sub Standard Library (SSTD) ! |\\n\");\n\tprintf(\"| |\\n\");\n\tprintf(\"| > This is an Implementation Plan for |\\n\");\n\tprintf(\"| > In-placeChainedHashTable (IpCHashT) |\\n\");\n\tprintf(\"| > and ChainedHashTable ( CHashT). |\\n\");\n\tprintf(\"| |\\n\");\n\tprintf(\"+---------------------------------------------------+\\n\");\n\tprintf(\"\\n\");\n\tprintf(\"■ measureTime_start---------------\\n\\n\"); time_m timem; sstd::measureTime_start(timem);\n\t\n\tstd::vector<std::string> saveAs={\".png\", \".pdf\"};\n\t\n\t{\n\t\tconst char* csvPath = \"./tmpBench/usedMemory/*\";\n\t\tconst char* savePath = \"./tmpBench/usedMemory\";\n\t\tstd::vector<std::string> vecPath = sstd::glob(csvPath);\n\t\t\n\t\tif(vecPath.size()!=0){\n\t\t\tsstd::vvec<double> vvecX, vvecY; vecPath2vvecXY(vvecX, vvecY, vecPath);\n\t\t\tvvec2plot_usedMemory(savePath, saveAs, vvecX, vvecY);\n\t\t}\n\t}\n\t{\n\t\tconst char* csvPath = \"./tmpBench/usedMemory_preAlloc/*\";\n\t\tconst char* savePath = \"./tmpBench/usedMemory_preAlloc\";\n\t\tstd::vector<std::string> vecPath = sstd::glob(csvPath);\n\t\t\n\t\tif(vecPath.size()!=0){\n\t\t\tsstd::vvec<double> vvecX, vvecY; vecPath2vvecXY(vvecX, vvecY, vecPath);\n\t\t\tvvec2plot_usedMemory(savePath, saveAs, vvecX, vvecY);\n\t\t}\n\t}\n\t{\n\t\tconst char* csvPath = \"./tmpBench/find_successful_search/*\";\n\t\tconst char* savePath = \"./tmpBench/find_successful_search_med\";\n\t\tstd::vector<std::string> vecPath = sstd::glob(csvPath);\n\t\t\n\t\tif(vecPath.size()!=0){\n\t\t\tsstd::vvec<double> vvecX, vvecY; vecPath2vvecXY(vvecX, vvecY, vecPath);\n\t\t\tvvec2plot_find(savePath, saveAs, vvecX, vvecY);\n\t\t}\n\t}\n\t{\n\t\tconst char* csvPath = \"./tmpBench/find_unsuccessful_search/*\";\n\t\tconst char* savePath = \"./tmpBench/find_unsuccessful_search_med\";\n\t\tstd::vector<std::string> vecPath = sstd::glob(csvPath);\n\t\t\n\t\tif(vecPath.size()!=0){\n\t\t\tsstd::vvec<double> vvecX, vvecY; vecPath2vvecXY(vvecX, vvecY, vecPath);\n\t\t\tvvec2plot_find_failedAll(savePath, saveAs, vvecX, vvecY);\n\t\t}\n\t}\n\t{\n\t\tconst char* csvPath = \"./tmpBench/insert/*\";\n\t\tconst char* savePath = \"./tmpBench/insert_med\";\n\t\tstd::vector<std::string> vecPath = sstd::glob(csvPath);\n\t\t\n\t\tif(vecPath.size()!=0){\n\t\t\tsstd::vvec<double> vvecX, vvecY; vecPath2vvecXY(vvecX, vvecY, vecPath);\n\t\t\tvvec2plot_insert(savePath, saveAs, vvecX, vvecY);\n\t\t}\n\t}\n\t{\n\t\tconst char* csvPath = \"./tmpBench/insert_et/*\";\n\t\tconst char* savePath = \"./tmpBench/insert_et_med\";\n\t\tstd::vector<std::string> vecPath = sstd::glob(csvPath);\n\t\t\n\t\tif(vecPath.size()!=0){\n\t\t\tsstd::vvec<double> vvecX, vvecY; vecPath2vvecXY(vvecX, vvecY, vecPath);\n\t\t\tvvec2plot_insert_et(savePath, saveAs, vvecX, vvecY);\n\t\t}\n\t}\n\t{\n\t\tconst char* csvPath = \"./tmpBench/insert_et_preAlloc/*\";\n\t\tconst char* savePath = \"./tmpBench/insert_et_preAlloc_med\";\n\t\tstd::vector<std::string> vecPath = sstd::glob(csvPath);\n\t\t\n\t\tif(vecPath.size()!=0){\n\t\t\tsstd::vvec<double> vvecX, vvecY; vecPath2vvecXY(vvecX, vvecY, sstd::glob(csvPath));\n\t\t\tvvec2plot_insert_et(savePath, saveAs, vvecX, vvecY);\n\t\t}\n\t}\n\t{\n\t\tconst char* csvPath = \"./tmpBench/erase/*\";\n\t\tconst char* savePath = \"./tmpBench/erase_med\";\n\t\tstd::vector<std::string> vecPath = sstd::glob(csvPath);\n\t\t\n\t\tif(vecPath.size()!=0){\n\t\t\tsstd::vvec<double> vvecX, vvecY; vecPath2vvecXY(vvecX, vvecY, vecPath);\n\t\t\tfor(uint i=0; i<vvecX.size(); i++){ // vecType\n\t\t\t\tvvecX[i] = sstd::nonzero(vvecX[i]);\n\t\t\t\tvvecY[i] = sstd::nonzero(vvecY[i]);\n\t\t\t\tsstd::suppress(vvecX[i], vvecY[i]); // <-> padding\n\t\t\t}\n\t\t\tvvec2plot_erase(savePath, saveAs, vvecX, vvecY);\n\t\t}\n\t}\n\t{\n\t\tconst char* csvPath = \"./tmpBench/maxLoadFactor/*\";\n\t\tconst char* savePath = \"./tmpBench/maxLoadFactor_med\";\n\t\tstd::vector<std::string> vecPath = sstd::glob(csvPath);\n\t\t\n\t\tif(vecPath.size()!=0){\n\t\t\tsstd::vvec<double> vvecX, vvecY; vecPath2vvecXY(vvecX, vvecY, vecPath);\n\t\t\tfor(uint i=0; i<vvecX.size(); i++){ // vecType\n\t\t\t\tvvecX[i] = sstd::nonzero(vvecX[i]);\n\t\t\t\tvvecY[i] = sstd::nonzero(vvecY[i]);\n\t\t\t\tsstd::suppress(vvecX[i], vvecY[i]); // <-> padding\n\t\t\t}\n\t\t\tvvec2plot_maxLoadFactor(savePath, saveAs, vvecX, vvecY);\n\t\t}\n\t}\n\t\n\tprintf(\"\\n■ measureTime_stop----------------\\n\"); sstd::measureTime_stop_print(timem);\n\treturn 0;\n}\n\n//-----------------------------------------------------------------------------------------------------------------------------------------------\n\n" }, { "alpha_fraction": 0.4805915653705597, "alphanum_fraction": 0.5457547307014465, "avg_line_length": 43.20186233520508, "blob_id": "a824b9e780f12e30c88ff7901250463e2f90cef9", "content_id": "551c0c0c4f77c48734e10e0c391298ecaa2a5390", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "C++", "length_bytes": 28467, "license_type": "no_license", "max_line_length": 266, "num_lines": 644, "path": "/CHashT.hpp", "repo_name": "admiswalker/InPlaceChainedHashTable-IpCHashT-", "src_encoding": "UTF-8", "text": "#pragma once\n#include \"./typeDef.h\"\n#include <sstd/sstd.hpp> // for debug\n#include <memory>\n\n//-----------------------------------------------------------------------------------------------------------------------------------------------\n// compile options\n\n//#define use_prime_table\n\n//-----------------------------------------------------------------------------------------------------------------------------------------------\n\n#define elem_m sstd_CHashT::element<T_key,T_val> // a macro of table element structure\n#define itr_m sstd_CHashT::iterator<T_key,T_val> // a macro of iterator\n\n//-----------------------------------------------------------------------------------------------------------------------------------------------\n\nnamespace sstd{\n\ttemplate <class T_key,\n\t\t\t class T_val,\n\t\t\t class T_hash = std::hash<T_key>,\n\t\t\t class T_key_eq = std::equal_to<T_key>\n\t\t\t >\n\tclass CHashT; // chained hash table\n}\n\n//-----------------------------------------------------------------------------------------------------------------------------------------------\n\nnamespace sstd_CHashT{\n\ttemplate <class T_key, class T_val> struct element; \n\ttemplate <class T_key, class T_val> struct iterator;\n\t\n\t#ifdef use_prime_table\n\tconst uint64 tSizeL[64] = { // table size list. (Smallest prime list larger than power of 2.)\n\t\t2ull, // 2^ 1 + 0 = 2ull\n\t\t5ull, // 2^ 2 + 1 = 5ull\n\t\t11ull, // 2^ 3 + 3 = 11ull\n\t\t17ull, // 2^ 4 + 1 = 17ull\n\t\t37ull, // 2^ 5 + 5 = 37ull\n\t\t67ull, // 2^ 6 + 3 = 67ull\n\t\t131ull, // 2^ 7 + 3 = 131ull\n\t\t257ull, // 2^ 8 + 1 = 257ull\n\t\t521ull, // 2^ 9 + 9 = 521ull\n\t\t1031ull, // 2^10 + 7 = 1031ull\n\t\t2053ull, // 2^11 + 5 = 2053ull\n\t\t4099ull, // 2^12 + 3 = 4099ull\n\t\t8209ull, // 2^13 + 17 = 8209ull\n\t\t16411ull, // 2^14 + 27 = 16411ull\n\t\t32771ull, // 2^15 + 3 = 32771ull\n\t\t65537ull, // 2^16 + 1 = 65537ull\n\t\t131101ull, // 2^17 + 29 = 131101ull\n\t\t262147ull, // 2^18 + 3 = 262147ull\n\t\t524309ull, // 2^19 + 21 = 524309ull\n\t\t1048583ull, // 2^20 + 7 = 1048583ull\n\t\t2097169ull, // 2^21 + 17 = 2097169ull\n\t\t4194319ull, // 2^22 + 15 = 4194319ull\n\t\t8388617ull, // 2^23 + 9 = 8388617ull\n\t\t16777259ull, // 2^24 + 43 = 16777259ull\n\t\t33554467ull, // 2^25 + 35 = 33554467ull\n\t\t67108879ull, // 2^26 + 15 = 67108879ull\n\t\t134217757ull, // 2^27 + 29 = 134217757ull\n\t\t268435459ull, // 2^28 + 3 = 268435459ull\n\t\t536870923ull, // 2^29 + 11 = 536870923ull\n\t\t1073741827ull, // 2^30 + 85 = 1073741827ull\n\t\t2147483659ull, // 2^31 + 11 = 2147483659ull\n\t\t4294967311ull, // 2^32 + 15 = 4294967311ull\n\t\t8589934609ull, // 2^33 + 17 = 8589934609ull\n\t\t17179869209ull, // 2^34 + 25 = 17179869209ull\n\t\t34359738421ull, // 2^35 + 53 = 34359738421ull\n\t\t68719476767ull, // 2^36 + 31 = 68719476767ull\n\t\t137438953481ull, // 2^37 + 9 = 137438953481ull\n\t\t274877906951ull, // 2^38 + 7 = 274877906951ull\n\t\t549755813911ull, // 2^39 + 23 = 549755813911ull\n\t\t1099511627791ull, // 2^40 + 15 = 1099511627791ull\n\t\t2199023255579ull, // 2^41 + 27 = 2199023255579ull\n\t\t4398046511119ull, // 2^42 + 15 = 4398046511119ull\n\t\t8796093022237ull, // 2^43 + 29 = 8796093022237ull\n\t\t17592186044423ull, // 2^44 + 7 = 17592186044423ull\n\t\t35184372088891ull, // 2^45 + 59 = 35184372088891ull\n\t\t70368744177679ull, // 2^46 + 15 = 70368744177679ull\n\t\t140737488355333ull, // 2^47 + 5 = 140737488355333ull\n\t\t281474976710677ull, // 2^48 + 21 = 281474976710677ull\n\t\t562949953421381ull, // 2^49 + 69 = 562949953421381ull\n\t\t1125899906842679ull, // 2^50 + 55 = 1125899906842679ull\n\t\t2251799813685269ull, // 2^51 + 21 = 2251799813685269ull\n\t\t4503599627370517ull, // 2^52 + 21 = 4503599627370517ull\n\t\t9007199254740997ull, // 2^53 + 5 = 9007199254740997ull\n\t\t18014398509482143ull, // 2^54 + 159 = 18014398509482143ull\n\t\t36028797018963971ull, // 2^55 + 3 = 36028797018963971ull\n\t\t72057594037928017ull, // 2^56 + 81 = 72057594037928017ull\n\t\t144115188075855881ull, // 2^57 + 9 = 144115188075855881ull\n\t\t288230376151711813ull, // 2^58 + 69 = 288230376151711813ull\n\t\t576460752303423619ull, // 2^59 + 131 = 576460752303423619ull\n\t\t1152921504606847009ull, // 2^60 + 33 = 1152921504606847009ull\n\t\t2305843009213693967ull, // 2^61 + 15 = 2305843009213693967ull\n\t\t4611686018427388039ull, // 2^62 + 135 = 4611686018427388039ull\n\t\t9223372036854775837ull, // 2^63 + 29 = 9223372036854775837ull\n\t//\t18446744073709551629ull // 2^64 + 13 = 18446744073709551629ull // larger than the range of uint64\n\t\t18446744073709551360ull\t// 2^64 - 254 = 18446744073709551360ull // not prive (need to find out an appropriate prime)\n\t};\n\t#endif\n}\n\n//-----------------------------------------------------------------------------------------------------------------------------------------------\n\ntemplate <class T_key, class T_val>\nstruct sstd_CHashT::element{\nprivate:\npublic:\n\tinline element(){ isUsed=false; pNext=NULL; }\n\tinline ~element(){ if(pNext!=NULL){delete pNext; pNext=NULL;} } // recursive release of the memory\n\t\n\tbool isUsed; // flag\n\tT_key key; // key\n\tT_val val; // value\n\tstruct elem_m* pNext; // singly linked list\n\t\n\t#ifdef SSTD_CHashT_DEBUG\n\tinline void print_dbg(){\n\t\tstd::cout << \" pElem: \" << this << std::endl;\n\t\tstd::cout << \"isUsed: \" << (this->isUsed ? \"true\" : \"false\") << std::endl;\n\t\tstd::cout << \" key: \" << this->key << std::endl;\n\t\tstd::cout << \" val: \" << this->val << std::endl;\n\t\tstd::cout << \" pNext: \" << this->pNext << std::endl;\n\t}\n\t#endif\n};\n\n//-----------------------------------------------------------------------------------------------------------------------------------------------\n\n#define itr_end_m 1\n#define itr_needRehash_m 2\n\ntemplate <class T_key, class T_val>\nstruct sstd_CHashT::iterator{\nprivate:\n\tstruct elem_m* pT; // table pointer\n\tuint64 idx; // table index\n\tuint64 tSize; // table size\n\tstruct elem_m* pP; // previous element pointer\n\tstruct elem_m* pE; // current element pointer\n\tstruct elem_m* pN; // next element pointer\n\tuint8 state;\n\t\npublic:\n\tinline iterator(){ pT=NULL; idx=0ull; tSize=0ull; pP=NULL; pE=NULL; pN=NULL; state=0; }\n\tinline iterator(struct elem_m* pT_in, const uint64 tIdx_in, const uint64 tSize_in, struct elem_m* pP_in, struct elem_m* pE_in, struct elem_m* pN_in, const uint8 state_in){\n\t\tpT = pT_in;\n\t\tidx = tIdx_in;\n\t\ttSize = tSize_in;\n\t\tpP = pP_in;\n\t\tpE = pE_in;\n\t\tpN = pN_in;\n\t\tstate = state_in;\n\t}\n\tinline ~iterator(){}\n\t\n\tinline const T_key& first() const { return pE->key; }\n\tinline const T_val& second() const { return pE->val; }\n\tinline const T_key& key() const { return pE->key; }\n\tinline T_key& _key_RW() { return pE->key; }\n\tinline T_val& val() { return pE->val; }\n\t\n\tinline uint64 index() { return idx; }\n\tinline struct elem_m*& _pPrev() { return pP; }\n\tinline struct elem_m*& _pElem() { return pE; }\n\tinline struct elem_m*& _pNext() { return pN; }\n\tinline const uint8 _state_R() const { return state; }\n\t\n\tinline const bool operator!=(const struct itr_m& rhs){ return this->state != rhs._state_R(); }\n\t\n\t// pre-increment (++itr)\n\tinline struct itr_m operator++(){\n\t\t\n\t\tif(this->pN!=NULL){\n\t\t\tthis->pP = this->pE;\n\t\t\tthis->pE = this->pN;\n\t\t\tif(this->pN->pNext!=NULL){ this->pN = this->pN->pNext;\n\t\t\t} else { this->pN = NULL; }\n\t\t\treturn *this; // next key is on the singly linked list.\n\t\t}\n\t\t\n\t\tidx++;\n\t\tfor(;;){\n\t\t\tif(idx>=tSize){\n\t\t\t\tthis->pP = this->pE;\n\t\t\t\tthis->pE = NULL;\n\t\t\t\tthis->pN = NULL;\n\t\t\t\tthis->state = itr_end_m;\n\t\t\t\treturn *this; // next key is not found.\n\t\t\t}\n\t\t\tif(pT[idx].isUsed){ break; }\n\t\t\tidx++;\n\t\t}\n\t\t\n\t\tthis->pP = NULL;\n\t\tthis->pE = &pT[idx];\n\t\tthis->pN = pT[idx].pNext;\n\t\tthis->state = 0;\n\t\treturn *this; // next key is on the table.\n\t}\n\t\n\t// post-increment (itr++)\n//\tclass itr_m operator++(int){ // int is a dummy arg\n//\t\t== not implimented yet. ==\n//\t}\n\t\n\t#ifdef SSTD_CHashT_DEBUG\n\tinline void print_dbg(){\n\t\tstd::cout << \" pT: \" << this->pT << std::endl;\n\t\tstd::cout << \" idx: \" << this->idx << std::endl;\n\t\tstd::cout << \"tSize: \" << this->tSize << std::endl;\n\t\tstd::cout << \" pP: \" << this->pP << std::endl;\n\t\tstd::cout << \" pE: \" << this->pT << std::endl;\n\t\tstd::cout << \" pN: \" << this->pN << std::endl;\n\t\tstd::cout << \"state: \" << (int)this->state << std::endl;\n\t}\n\t#endif\n};\n\n//-----------------------------------------------------------------------------------------------------------------------------------------------\n\ntemplate <class T_key, class T_val, class T_hash, class T_key_eq>\nclass sstd::CHashT{\nprivate:\n\tvoid CHashT_constructor(const uint64 tableSize);\n\t\n#ifdef use_prime_table\n\tuint8 tSizeL_idx; // table size list index\n#else\n\tuint64 tSize_m1; // tSize minus 1.\n#endif\n\tuint64 tSize; // table size\n\t\n\tT_hash* pHashFn; // pointer to the hash function\n\t\n\tstruct elem_m* pT; // pointer to the table\n\tuint64 elems; // number of elements on the table\n\tuint64 elems_onSinglyLinkedList; // for detailed load factor\n\t\npublic:\n\ttypedef std::pair<const T_key, T_val> value_type; // for STL compatibility.\n\t\n\tCHashT();\n\tCHashT(const uint64 tableSize); // nearest size under power of 2 will be allocate.\n\t#ifdef use_prime_table\n\tCHashT(const uint8 tableSizeL_idx, const uint64 tableSize); // allocate same size of tableSize. for rehashing. (select size from prime table, never form the others).\n\t#else\n\tCHashT(const uint64 tableSize_minus1, const uint64 tableSize); // allocate same size of tableSize. for rehashing. (select size of power 2, never form the others).\n\t#endif\n\t~CHashT();\n\t\n\t#ifdef use_prime_table\n\tinline uint8& _tSizeL_idx(){ return tSizeL_idx; }\n\t#else\n\tinline uint64& _tSize_m1(){ return tSize_m1; }\n\t#endif\n\tinline uint64& _tSize(){ return tSize; }\n\tinline T_hash*& _pHashFn(){ return pHashFn; }\n\tinline struct elem_m*& _pT(){ return pT; }\n\tinline uint64& _elems(){ return elems; }\n\tinline uint64& _elems_onSinglyLinkedList(){ return elems_onSinglyLinkedList; }\n\t\n\t// iterator\n\tinline struct itr_m begin(){\n\t\tuint64 idx=0ull;\n\t\twhile(!pT[idx].isUsed){\n\t\t\tidx++;\n\t\t\tif(idx>=tSize){\n\t\t\t\t// there is no item.\n\t\t\t\treturn itr_m(pT, 0ull, tSize, NULL, NULL, NULL, itr_end_m); // return .end()\n\t\t\t}\n\t\t}\n\t\treturn itr_m(pT, idx, tSize, NULL, &pT[idx], pT[idx].pNext, 0);\n\t}\n\tinline const struct itr_m end(){ return itr_m(pT, 0ull, tSize, NULL, NULL, NULL, itr_end_m); }\n\t\n\tinline T_val& operator[](const T_key& rhs);\n\tinline T_val& operator[]( T_key&& rhs);\n\t\n\t// bucket();\n\t// bucket_count();\n\t// bucket_size();\n\t// clear();\n\t// load_factor();\n\t// rehash();\n\t// reserve();\n\t\n\t// operator[];\n\t// operator =();\n\t// operator==();\n\t// operator!=();\n\t\n\t// ---------------------------\n\t\n\tstruct itr_m insert(const T_key& key_in, const T_val& val_in); // copy key and value.\n\tstruct itr_m insert( T_key&& key_in, const T_val& val_in); // swap key. (Callable by \"sstd::CHashT<T_key, T_val> hashT; hashT.insert(std::move(key), val );\".)\n\tstruct itr_m insert(const T_key& key_in, T_val&& val_in); // swap value. (Callable by \"sstd::CHashT<T_key, T_val> hashT; hashT.insert( key , std::move(val));\".)\n\tstruct itr_m insert( T_key&& key_in, T_val&& val_in); // swap key and value. (Callable by \"sstd::CHashT<T_key, T_val> hashT; hashT.insert(std::move(key), std::move(val));\".)\n\tstd::pair<struct itr_m, bool> insert(const value_type& v); // for STL (1). Ref: https://cpprefjp.github.io/reference/unordered_map/unordered_map/insert.html\n\t\n\tstruct itr_m insert(struct itr_m& itr, const T_key& key_in); // insert from itr value of find() for operator[]\n\tstruct itr_m insert(struct itr_m& itr, T_key&& key_in); // insert from itr value of find() for operator[]\n\t\n\tstruct itr_m find (const T_key& key_in, const uint64 idx);\n\tstruct itr_m find (const T_key& key_in);\n\t\n\tbool erase (struct itr_m& itr); // erase from itr value of insert() or find()\n\tbool erase (const T_key& key_in); // erase from key value\n\t\n\tinline const uint64 size(){ return elems; }\n\tinline const uint64 tableSize(){ return tSize; }\n\tinline const uint64 bucket_count(){ return tSize; }\n//\tinline const double load_factor(){ return (double)elems/(double)tSize; }\n\tinline const double load_factor(){ return (double)(elems)/(double)(tSize + elems_onSinglyLinkedList); }\n\t\n\t// elems_onSinglyLinkedList = elems - elems_onTable;\n\t// LoadFactor = elems / (tSize + elems_onSinglyLinkedList);\n\t// LoadFactor = elems / (tSize + elems - elems_onTable);\n\t\n\tvoid rehash();\n\t\n\t#ifdef SSTD_CHashT_DEBUG\n\tbool use_tIdx_dbg = false;\n\tuint64 tIdx_dbg;\n\tinline struct elem_m*& pT_dbg(){ return pT; }\n\t#endif\n};\n\n//-----------------------------------------------------------------------------------------------------------------------------------------------\n\n#define get_tSizeL_idx(idx)\t\t\t\t\t\t\t\t\t\t\t\t\\\n\tidx=0;\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\\\n\tfor(; idx<64; idx++){\t\t\t\t\t\t\t\t\t\t\t\t\\\n\t\tif(sstd_IpCHashT::tSizeL[idx]>=tableSize){ break; }\t\t\t\t\\\n\t}\n#define get_tSize(tSize)\t\t\t\t\t\t\\\n\ttSize=2;\t\t\t\t\t\t\t\t\t\\\n\twhile(tSize<tableSize){ tSize*=2; }\n\n#define constructorBase_init_m()\t\t\t\t\t\t\\\n\tpT = new struct elem_m[tSize];\t\t\t\t\\\n\tpHashFn = new T_hash();\t\t\t\t\t\t\t\\\n\telems = 0ull;\t\t\t\t\t\t\t\t\t\\\n\telems_onSinglyLinkedList = 0ull; /* for detailed load factor */\n\ntemplate <class T_key, class T_val, class T_hash, class T_key_eq>\ninline void sstd::CHashT<T_key, T_val, T_hash, T_key_eq>::CHashT_constructor(const uint64 tableSize){\n\t#ifdef use_prime_table\n\tget_tSizeL_idx(tSizeL_idx); tSize = sstd_CHashT::tSizeL[tSizeL_idx];\n\t#else\n\tget_tSize(tSize); tSize_m1 = tSize - 1;\n\t#endif\n\t\n\tconstructorBase_init_m();\n}\ntemplate <class T_key, class T_val, class T_hash, class T_key_eq> inline sstd::CHashT<T_key, T_val, T_hash, T_key_eq>::CHashT( ){ CHashT_constructor( 512 ); }\ntemplate <class T_key, class T_val, class T_hash, class T_key_eq> inline sstd::CHashT<T_key, T_val, T_hash, T_key_eq>::CHashT(const uint64 tableSize){ CHashT_constructor(tableSize); }\n\n//---\n\n#ifdef use_prime_table\ntemplate <class T_key, class T_val, class T_hash, class T_key_eq>\ninline sstd::CHashT<T_key, T_val, T_hash, T_key_eq>::CHashT(const uint8 tableSizeL_idx, const uint64 tableSize){ // allocate same size of tableSize. for rehashing. (select size from prime table, never form the others).\n\ttSizeL_idx = tableSizeL_idx;\n\ttSize = sstd_CHashT::tSizeL[tSizeL_idx];\n\tconstructorBase_init_m();\n}\n#else\ntemplate <class T_key, class T_val, class T_hash, class T_key_eq>\ninline sstd::CHashT<T_key, T_val, T_hash, T_key_eq>::CHashT(const uint64 tableSize_minus1, const uint64 tableSize){ // allocate same size of tableSize. for rehashing. (select size of power 2, never form the others).\n\ttSize_m1 = tableSize_minus1;\n\ttSize = tableSize;\n\tconstructorBase_init_m();\n}\n#endif\n\n//---\n\n#undef get_tSize\n#undef get_tSizeL_idx\n#undef constructorBase_init_m\n\n//---\n\ntemplate <class T_key, class T_val, class T_hash, class T_key_eq>\ninline sstd::CHashT<T_key, T_val, T_hash, T_key_eq>::~CHashT(){\n\tdelete[] pT;\n\tdelete pHashFn;\n}\n\n//-----------------------------------------------------------------------------------------------------------------------------------------------\n\ntemplate <class T_key, class T_val, class T_hash, class T_key_eq>\nvoid swap_hashT(sstd::CHashT<T_key, T_val, T_hash, T_key_eq>& lhs, sstd::CHashT<T_key, T_val, T_hash, T_key_eq>& rhs){\n\t// \"using std::swap;\" is defined, in order to preferentially call overloaded function of swap<T>() for type T. (Ref: https://cpprefjp.github.io/reference/utility/swap.html)\n\t// in here, scope of using is limited by \"{}\", this means that scope of using is same as a usual value.\n\tusing std::swap;\n\t\n\t#ifdef use_prime_table\n\tswap(lhs._tSizeL_idx(), rhs._tSizeL_idx());\n\t#else\n\tswap(lhs._tSize_m1(), rhs._tSize_m1() );\n\t#endif\n\t\n\tswap(lhs._tSize(), rhs._tSize() );\n\tswap(lhs._pHashFn(), rhs._pHashFn() );\n\tswap(lhs._pT(), rhs._pT() );\n\tswap(lhs._elems(), rhs._elems() );\n\tswap(lhs._elems_onSinglyLinkedList(), rhs._elems_onSinglyLinkedList());\n}\n\ntemplate <class T_key, class T_val, class T_hash, class T_key_eq>\ninline void sstd::CHashT<T_key, T_val, T_hash, T_key_eq>::rehash(){\n\t// \"using std::swap;\" is defined, in order to preferentially call overloaded function of swap<T>() for type T. (Ref: https://cpprefjp.github.io/reference/utility/swap.html)\n\t// in here, scope of using is limited by \"{}\", this means that scope of using is same as a usual value.\n\tusing std::swap;\n\t\n#ifdef use_prime_table\n\tuint8 tSizeL_idx_p1 = tSizeL_idx + 1; // p1: plus 1\n\tsstd::CHashT<T_key, T_val, T_hash, T_key_eq> hashT_new(tSizeL_idx_p1, sstd_CHashT::tSizeL[tSizeL_idx_p1]); // twice size of tSize will be allocated.\n#else\n\tuint64 tSize_mul2 = tSize * 2; // mul2: multiply 2\n\tuint64 tSize_mul2_m1 = tSize_mul2 - 1; // m1: minus 1\n\tsstd::CHashT<T_key, T_val, T_hash, T_key_eq> hashT_new(tSize_mul2_m1, tSize_mul2); // twice size of tSize will be allocated.\n#endif\n\t\n\tfor(auto itr=this->begin(); itr!=this->end(); ++itr){\n\t\thashT_new.insert(std::move(itr._key_RW()), std::move(itr.val()));\n\t}\n\tswap_hashT(*this, hashT_new);\n}\n\n//-----------------------------------------------------------------------------------------------------------------------------------------------\n\n#define insert_preproc_m()\t\t\t\t\t\t\t\t\t\t\t\t\\\n\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\\\n\t/* checking the needs of rehash. */\t\t\t\t\t\t\t\t\t\\\n\tif(elems>=tSize){ rehash(); } /* elems needs to be measured before find(), because of the address that the itr suggests will be changed. */ \\\n\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\\\n\tauto itr = find(key_in);\t\t\t\t\t\t\t\t\t\t\t\\\n\tif(itr._state_R()!=itr_end_m){ return itr; } /* key is already on the table */\n\n#define insert_m(key_in, val_in, CAST_KEY, CAST_VAL)\t\t\t\t\t\\\n\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\\\n\telems++;\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\\\n\tif(itr._pElem()!=NULL){\t\t\t\t\t\t\t\t\t\t\t\t\\\n\t\t/* inserting on the table */\t\t\t\t\t\t\t\t\t\\\n\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\\\n\t\titr._pElem()->isUsed = true;\t\t\t\t\t\t\t\t\t\\\n\t\titr._pElem()->key = (CAST_KEY)(key_in);\t\t\t\t\t\t\\\n\t\titr._pElem()->val = (CAST_VAL)(val_in);\t\t\t\t\t\t\\\n\t\treturn itr_m(pT, itr.index(), tSize, NULL, itr._pElem(), NULL, 0); \\\n\t}else{\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\\\n\t\telems_onSinglyLinkedList++;\t\t\t\t\t\t\t\t\t\t\\\n\t\t/* inserting on the singly linked list */\t\t\t\t\t\t\\\n\t\t/* find() guarantees that itr->pElem() is the tail of singly linked list */\t\\\n\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\\\n\t\titr._pPrev()->pNext = new struct elem_m();\t\t\t\t\t\t\\\n\t\titr._pPrev()->pNext->isUsed = true;\t\t\t\t\t\t\t\t\\\n\t\titr._pPrev()->pNext->key = (CAST_KEY)(key_in);\t\t\t\t\\\n\t\titr._pPrev()->pNext->val = (CAST_VAL)(val_in);\t\t\t\t\\\n\t\treturn itr_m(pT, itr.index(), tSize, itr._pPrev(), itr._pPrev()->pNext, NULL, 0); \\\n\t}\n\ntemplate <class T_key, class T_val, class T_hash, class T_key_eq> inline struct itr_m sstd::CHashT<T_key, T_val, T_hash, T_key_eq>::insert(const T_key& key_in, const T_val& val_in){ insert_preproc_m(); insert_m(key_in, val_in, T_key, T_val ); }\ntemplate <class T_key, class T_val, class T_hash, class T_key_eq> inline struct itr_m sstd::CHashT<T_key, T_val, T_hash, T_key_eq>::insert( T_key&& key_in, const T_val& val_in){ insert_preproc_m(); insert_m(key_in, val_in, std::move, T_val ); }\ntemplate <class T_key, class T_val, class T_hash, class T_key_eq> inline struct itr_m sstd::CHashT<T_key, T_val, T_hash, T_key_eq>::insert(const T_key& key_in, T_val&& val_in){ insert_preproc_m(); insert_m(key_in, val_in, T_key, std::move); }\ntemplate <class T_key, class T_val, class T_hash, class T_key_eq> inline struct itr_m sstd::CHashT<T_key, T_val, T_hash, T_key_eq>::insert( T_key&& key_in, T_val&& val_in){ insert_preproc_m(); insert_m(key_in, val_in, std::move, std::move); }\n\ntemplate <class T_key, class T_val, class T_hash, class T_key_eq>\ninline std::pair<struct itr_m, bool> sstd::CHashT<T_key, T_val, T_hash, T_key_eq>::insert(const value_type& v){ // for STL (1). Ref: https://cpprefjp.github.io/reference/unordered_map/unordered_map/insert.html\n\t// Implemented with reference to operator[].\n\tauto itrF = find(v.first);\n\tif(itrF._state_R()!=itr_end_m){ return std::pair<struct itr_m, bool>(itrF, false); }\n\t\n\tauto itrA = insert(itrF, v.first);\n\treturn std::pair<struct itr_m, bool>(itrF, true);\n}\n\n//---\n\n#define insert_by_itr_m(key_in, CAST_KEY)\t\t\t\t\t\t\t\t\\\n\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\\\n\t/* checking the needs of rehash. */\t\t\t\t\t\t\t\t\t\\\n\tif(elems>=tSize){\t\t\t\t\t\t\t\t\t\t\t\t\t\\\n\t\trehash();\t\t\t\t\t\t\t\t\t\t\t\t\t\t\\\n\t\titr = find(key_in); /* because of the address that the itr suggests was changed by rehash(), we needs to find again. */ \\\n\t}\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\\\n\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\\\n\tT_val val_in = T_val(); /* generation of empty buf for insert() */\n\ntemplate <class T_key, class T_val, class T_hash, class T_key_eq> inline struct itr_m sstd::CHashT<T_key, T_val, T_hash, T_key_eq>::insert(struct itr_m& itr, const T_key& key_in){ insert_by_itr_m(key_in, T_key ); insert_m(key_in, val_in, T_key, T_val ); }\ntemplate <class T_key, class T_val, class T_hash, class T_key_eq> inline struct itr_m sstd::CHashT<T_key, T_val, T_hash, T_key_eq>::insert(struct itr_m& itr, T_key&& key_in){ insert_by_itr_m(key_in, std::move); insert_m(key_in, val_in, std::move, std::move); }\n\n//---\n\n#undef insert_by_itr_m\n#undef insert_m\n#undef insert_preproc_m\n\n//-----------------------------------------------------------------------------------------------------------------------------------------------\n\n// in order to reduce the calling time of function, macro expansion will be used.\n#ifdef use_prime_table\n\t#ifdef SSTD_CHashT_DEBUG\n\t#define key2tIdx_m(tIdx, key)\t\t\t\t\t\t\t\t\t\t\t\\\n\t\tuint64 hVal = (uint64)(*pHashFn)(key); /* generate hashed value */\t\\\n\t\ttIdx = hVal % tSize;\t\t\t\t\t\t\t\t\t\t\t \\\n\t\tif(use_tIdx_dbg){ idx=tIdx_dbg; } /* over write idx for debug */\n\t#else\n\t#define key2tIdx_m(tIdx, key)\t\t\t\t\t\t\t\t\t\t\t\\\n\t\tuint64 hVal = (uint64)(*pHashFn)(key); /* generate hashed value */\t\\\n\t\ttIdx = hVal % tSize;\n\t#endif\n#else\n\t#ifdef SSTD_CHashT_DEBUG\n\t#define key2tIdx_m(tIdx, key)\t\t\t\t\t\t\t\t\t\t\t\\\n\t\tuint64 hVal = (uint64)(*pHashFn)(key); /* generate hashed value */\t\\\n\t\ttIdx = hVal & tSize_m1;\t\t\t\t\t\t\t\t\t\t\t \\\n\t\tif(use_tIdx_dbg){ idx=tIdx_dbg; } /* over write idx for debug */\n\t#else\n\t#define key2tIdx_m(tIdx, key)\t\t\t\t\t\t\t\t\t\t\t\\\n\t\tuint64 hVal = (uint64)(*pHashFn)(key); /* generate hashed value */\t\\\n\t\ttIdx = hVal & tSize_m1;\n\t#endif\n#endif\n\n#define findBase_m()\t\t\t\t\t\t\t\t\t\t\t\t\t\\\n\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\\\n\tif(!pT[idx].isUsed){ return itr_m(pT, idx, tSize, NULL, &pT[idx], NULL, itr_end_m); } /* key is not found. */ \\\n\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\\\n\tstruct elem_m* pP = NULL; /* previous element pointer */\t\t\\\n\tstruct elem_m* pE = &pT[idx]; /* current element pointer */\t\t\t\\\n\twhile(pE!=NULL){\t\t\t\t\t\t\t\t\t\t\t\t\t\\\n\t\tif(T_key_eq()(pE->key, key_in)){ return itr_m(pT, idx, tSize, pP, pE, pE->pNext, 0); } /* key is found. */ \\\n\t\tpP = pE;\t\t\t\t\t\t\t\t\t\t\t\t\t\t\\\n\t\tpE = pE->pNext;\t\t\t\t\t\t\t\t\t\t\t\t\t\\\n\t}\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\\\n\treturn itr_m(pT, idx, tSize, pP, NULL, NULL, itr_end_m); /* key is not found. */\n\ntemplate <class T_key, class T_val, class T_hash, class T_key_eq>\ninline struct itr_m sstd::CHashT<T_key, T_val, T_hash, T_key_eq>::find(const T_key& key_in, const uint64 idx){\n\tfindBase_m();\n}\ntemplate <class T_key, class T_val, class T_hash, class T_key_eq>\ninline struct itr_m sstd::CHashT<T_key, T_val, T_hash, T_key_eq>::find(const T_key& key_in){\n\tuint64 idx; key2tIdx_m(idx, key_in);\n\tfindBase_m();\n}\n\n#undef findBase_m\n#undef key2tIdx_m\n\n//-----------------------------------------------------------------------------------------------------------------------------------------------\n\ntemplate <class T_key, class T_val, class T_hash, class T_key_eq>\ninline bool sstd::CHashT<T_key, T_val, T_hash, T_key_eq>::erase(struct itr_m& itr){\n\t// \"using std::swap;\" is defined, in order to preferentially call overloaded function of swap<T>() for type T. (Ref: https://cpprefjp.github.io/reference/utility/swap.html)\n\t// in here, scope of using is limited by \"{}\", this means that scope of using is same as a usual value.\n\tusing std::swap;\n\t\n\tif(itr._state_R()==itr_end_m){ return false; } // key is not found\n\t\n\telems--;\n\tif(itr._pPrev()==NULL && itr._pNext()==NULL){\n\t\t// case 1. erase an element on the table without element on the singly linked list.\n\t\t\n\t\tT_key keyBuf;// in order to call destructor\n\t\tT_val valBuf;// in order to call destructor\n\t\titr._pElem()->isUsed = false;\n\t\tswap(itr._pElem()->key, keyBuf);\n\t\tswap(itr._pElem()->val, valBuf);\n\t}else if(itr._pPrev()==NULL && itr._pNext()!=NULL){\n\t\telems_onSinglyLinkedList--;\n\t\t// case 2. erase an element on the table with element(s) on the singly linked list.\n\t\t\n\t\t// <- on the table ...... -> | <- on the singly linked list ......................... -> |\n\t\t//\n\t\t// <1> <2> <3>\n\t\t// itr._pElem() +-> itr._pElem()->pNext +-> itr._pElem()->pNext->pNext\n\t\t// == &pT[idx] | == itr._pNext() | == itr._pNext()->pNext\n\t\t// [ isUsed ] | [ isUsed ] | [ isUsed ]\n\t\t// [ key <1-1> ] | [ key <2-1> ] | ...\n\t\t// [ val <1-2> ] | [ val <2-2> ] |\n\t\t// [ pNext <1-3> ] -+ [ pNext <2-3> ] --+\n\t\t\n\t\t// table\n\t\tswap(itr._pElem()->key, itr._pNext()->key); // <1-1>, <2-1>\n\t\tswap(itr._pElem()->val, itr._pNext()->val); // <1-2>, <2-2>\n\t\tstruct elem_m* pENN_buf = itr._pNext()->pNext; // <2-3>\n\t\titr._pNext()->pNext = NULL; // <2-3> // deleting \"itr._pElem()->pNext\" without filling \"itr._pElem()->pNext->pNext\" with zero will cause the recursive release of the memory.\n\t\tdelete itr._pNext(); // <2>\n\t\titr._pElem()->pNext = pENN_buf; // <1-3> = &<3>\n\t\t\n\t\t// itr\n\t\titr._pNext()=itr._pElem(); // for operator++.\n\t\titr._pElem()=NULL; // for operator++ will not access.\n\t}else{\n\t\telems_onSinglyLinkedList--;\n\t\t// case 3. erase element on the singly linked list. (\"case 3\" is a interchangeable process by \"case 2\", while ignoring the over head.)\n\t\t\n\t\titr._pPrev()->pNext = itr._pNext();\n\t\titr._pElem()->pNext = NULL; // deleting \"itr._pElem\" without filling \"itr._pElem->pNext\" with zero will cause the recursive release of the memory.\n\t\tdelete itr._pElem();\n\t}\n\t\n\treturn true;\n}\n\n//-----------------------------------------------------------------------------------------------------------------------------------------------\n\ntemplate <class T_key, class T_val, class T_hash, class T_key_eq>\ninline bool sstd::CHashT<T_key, T_val, T_hash, T_key_eq>::erase(const T_key& key_in){\n\tauto itr = find(key_in);\n\treturn this->erase(itr);\n}\n\n//-----------------------------------------------------------------------------------------------------------------------------------------------\n\n#define insert_OPE_bracket_m(CAST_KEY)\t\t\t\t\t\t\t\t\t\\\n\tauto itrF = find(key_in);\t\t\t\t\t\t\t\t\t\t\t\\\n\tif(itrF._state_R()!=itr_end_m){ return itrF._pElem()->val; }\t\t\\\n\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\\\n\tauto itrA = insert(itrF, (CAST_KEY)(key_in));\t\t\t\t\t\t\\\n\treturn itrA._pElem()->val;\n\ntemplate <class T_key, class T_val, class T_hash, class T_key_eq> inline T_val& sstd::CHashT<T_key, T_val, T_hash, T_key_eq>::operator[](const T_key& key_in){ insert_OPE_bracket_m(T_key ); }\ntemplate <class T_key, class T_val, class T_hash, class T_key_eq> inline T_val& sstd::CHashT<T_key, T_val, T_hash, T_key_eq>::operator[]( T_key&& key_in){ insert_OPE_bracket_m(std::move); }\n#undef insert_OPE_bracket_m\n\n//-----------------------------------------------------------------------------------------------------------------------------------------------\n\n#undef itr_end_m\n#undef itr_needRehash_m\n\n#undef elem_m\n#undef itr_m\n\n//-----------------------------------------------------------------------------------------------------------------------------------------------\n\n#ifdef use_prime_table\n\t#undef use_prime_table\n#endif\n\n" }, { "alpha_fraction": 0.5098684430122375, "alphanum_fraction": 0.5444079041481018, "avg_line_length": 23.33333396911621, "blob_id": "6d959fbe9ed98d38c885a91d51701acae3a72eb8", "content_id": "462b08a8c2da00c66c2bce19752650f53e277805", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "C++", "length_bytes": 610, "license_type": "no_license", "max_line_length": 81, "num_lines": 24, "path": "/FNV_Hash.cpp", "repo_name": "admiswalker/InPlaceChainedHashTable-IpCHashT-", "src_encoding": "UTF-8", "text": "/* -*- coding: utf-8 -*- */\r\n#include \"FNV_Hash.hpp\"\r\n\r\n/*\r\n * FNV Hash Algorithm\r\n */\r\nvoid fnv_1_hash_32( UINT32& return_Hash, unsigned char*& bytes, size_t& length ){\r\n\r\n return_Hash = FNV_OFFSET_BASIS_32;\r\n for( size_t i = 0 ; i < length ; i++ ){\r\n return_Hash = (FNV_PRIME_32 * return_Hash) ^ (bytes[i]);\r\n }\r\n\r\n\treturn;\r\n}\r\nvoid fnv_1_hash_64( UINT64& return_Hash, unsigned char*& bytes, size_t& length ){\r\n\r\n return_Hash = FNV_OFFSET_BASIS_64;\r\n for( size_t i = 0 ; i < length ; i++ ){\r\n return_Hash = (FNV_PRIME_64 * return_Hash) ^ (bytes[i]);\r\n }\r\n\r\n return;\r\n}\r\n" }, { "alpha_fraction": 0.37994101643562317, "alphanum_fraction": 0.3852507472038269, "avg_line_length": 36.64444351196289, "blob_id": "ec5fcaf60a83e996c9e696b7ec82609d796a3948", "content_id": "abb194610301ae171ce43a08dc19b67bfe4e75b5", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "C++", "length_bytes": 1701, "license_type": "no_license", "max_line_length": 145, "num_lines": 45, "path": "/main_test.cpp", "repo_name": "admiswalker/InPlaceChainedHashTable-IpCHashT-", "src_encoding": "UTF-8", "text": "#include <sstd/sstd.hpp>\n#include <gtest/gtest.h>\n\n#define SSTD_CHashT_DEBUG\n#include \"./test_CHashT.hpp\"\n\n#define SSTD_IpCHashT_DEBUG\n#include \"./test_IpCHashT.hpp\"\n\n//-----------------------------------------------------------------------------------------------------------------------------------------------\n\n// === usage of Google C++ Testing Framework ===\n//\n// TEST(test_case_name, test_name) {\n// ... test body ...\n// }\n//\n// Ex:\n// namespace SpaceName1{\n// int add1(int x, int y){ return x + y; }\n// };\n// TEST(SpaceName1, add1){ ASSERT_EQ(SpaceName1::add1(1, 1), 2); }\n\nint main(int argc, char** argv){\n\tprintf(\"\\n\");\n\tprintf(\"+---------------------------------------------------+\\n\");\n\tprintf(\"| |\\n\");\n\tprintf(\"| Welcome to Sub Standard Library (SSTD) ! |\\n\");\n\tprintf(\"| |\\n\");\n\tprintf(\"| > This is an Implementation Plan for |\\n\");\n\tprintf(\"| > In-placeChainedHashTable (IpCHashT) |\\n\");\n\tprintf(\"| > and ChainedHashTable ( CHashT). |\\n\");\n\tprintf(\"| |\\n\");\n\tprintf(\"+---------------------------------------------------+\\n\");\n\tprintf(\"\\n\");\n\tprintf(\"■ measureTime_start---------------\\n\\n\"); time_m timem; sstd::measureTime_start(timem);\n\t::testing::InitGoogleTest(&argc, argv);\n\t\n\tauto ret = RUN_ALL_TESTS();\n\t\n\tprintf(\"\\n■ measureTime_stop----------------\\n\"); sstd::measureTime_stop_print(timem);\n\treturn ret;\n}\n\n//-----------------------------------------------------------------------------------------------------------------------------------------------\n\n" }, { "alpha_fraction": 0.3035426735877991, "alphanum_fraction": 0.30434781312942505, "avg_line_length": 44.96296310424805, "blob_id": "71e8fdf399ac4644d577cf4be27735657dc820f8", "content_id": "05051aee58a90a137b1fc314fd8dd2f2530a71a5", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "C++", "length_bytes": 1248, "license_type": "no_license", "max_line_length": 145, "num_lines": 27, "path": "/main_bench.cpp", "repo_name": "admiswalker/InPlaceChainedHashTable-IpCHashT-", "src_encoding": "UTF-8", "text": "#include <sstd/sstd.hpp>\n\n#include \"./bench.hpp\"\n\n//-----------------------------------------------------------------------------------------------------------------------------------------------\n\nint main(int argc, char** argv){\n\tprintf(\"\\n\");\n\tprintf(\"+---------------------------------------------------+\\n\");\n\tprintf(\"| |\\n\");\n\tprintf(\"| Welcome to Sub Standard Library (SSTD) ! |\\n\");\n\tprintf(\"| |\\n\");\n\tprintf(\"| > This is an Implementation Plan for |\\n\");\n\tprintf(\"| > In-placeChainedHashTable (IpCHashT) |\\n\");\n\tprintf(\"| > and ChainedHashTable ( CHashT). |\\n\");\n\tprintf(\"| |\\n\");\n\tprintf(\"+---------------------------------------------------+\\n\");\n\tprintf(\"\\n\");\n\tprintf(\"■ measureTime_start---------------\\n\\n\"); time_m timem; sstd::measureTime_start(timem);\n\t\n\tRUN_ALL_BENCHS();\n\t\n\tprintf(\"\\n■ measureTime_stop----------------\\n\"); sstd::measureTime_stop_print(timem);\n\treturn 0;\n}\n\n//-----------------------------------------------------------------------------------------------------------------------------------------------\n\n" }, { "alpha_fraction": 0.5533815622329712, "alphanum_fraction": 0.5542322397232056, "avg_line_length": 29.134614944458008, "blob_id": "18ce5f681cbea7bd4f178c99d87dabdfd8692c8d", "content_id": "e7decfb239f61d157b58ac6248dd5492d7691f11", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Makefile", "length_bytes": 4702, "license_type": "no_license", "max_line_length": 128, "num_lines": 156, "path": "/Makefile", "repo_name": "admiswalker/InPlaceChainedHashTable-IpCHashT-", "src_encoding": "UTF-8", "text": "#------------------------------------------------------------\n# please set each item\n\n# source files\nSRCS_t = main_test.cpp\nSRCS_bm = main_bench.cpp\nSRCS_bm_uM = main_bench_usedMemory.cpp\nSRCS_sp = main_sProc.cpp\nHEADS = CHashT.hpp IpCHashT.hpp\nHEADS_t = test_CHashT.hpp test_IpCHashT.hpp\nHEADS_bm = bench.hpp\n\n# name of generating file\nTARGET_t = exe_test # test\nTARGET_bm = exe_bench # benchmark\nTARGET_bm_uM = exe_bench_uM # benchmark\nTARGET_sp = exe_sProc # Statistical processing\n\n# remove files\nRMs = *.stackdump __pycache__ tmpDir\n\n# compilation options\nCFLAGS += -L./sstd/lib -I./sstd/include -lsstd # sstd\nCFLAGS += -L./googletest-master/build/lib -I./googletest-master/googletest/include -lgtest -pthread # google test\nCFLAGS += -I./sparsehash-master/src # sparsehash\nCFLAGS += -std=c++11 # CFLAGS += -std=gnu++0x\nCFLAGS += -Wall\n#CFLAGS += -Wextra\nCFLAGS += -O3\n\nCFLAGS_t += -DEBUG\nCFLAGS_bm += -DNDEBUG\nCFLAGS_sp += -DNDEBUG\n\n#------------------------------------------------------------\n\nBACKUP_DIR = ./backup\nALL_FILES = $(wildcard ./*)\nTMP_DIRS = $(wildcard ./tmp*)\nLIBS_DIRS = ./sstd ./googletest-master ./sparsehash-master\nBACKUP_FILES = $(filter-out $(TMP_DIRS) $(LIBS_DIRS) ./$(TARGET_t) ./$(TARGET_bm) $(BACKUP_DIR), $(ALL_FILES))\nTIME_STAMP = `date +%Y_%m%d_%H%M`\n\n\n# when you need to check the change of files in lib, you need to change file name to a not-existing name like \"FORCE_XXX\".\n#LIB_SSTD = FORCE_SSTD\n#LIB_GOOGLETEST = FORCE_GOOGLETEST\nLIB_SSTD = ./sstd/lib/libsstd.a\nLIB_GOOGLETEST = ./googletest-master/build/lib/libgtest.a\nLIB_flat = ./flat_hash_map-master/flat_hash_map.hpp\nLIB_sparsehash = ./sparsehash-master/src/sparsehash/internal/sparseconfig.h\n\n\n# generate exe file\nTARGET_all = FORCE_MAKEALL\n$(TARGET_all): $(LIB_SSTD) $(LIB_GOOGLETEST) $(LIB_sparsehash) $(LIB_flat) $(TARGET_t) $(TARGET_bm) $(TARGET_bm_uM) $(TARGET_sp)\n\t@echo \"make all\"\n$(TARGET_t): $(SRCS_t) $(HEADS) $(HEADS_t)\n\t@echo \"\"\n\t@echo \"------------------------------------------------------------\"\n\t@echo \"SRCS_t: \\n$(SRCS_t)\\n\"\n\t@echo \"CFLAGS: \\n$(CFLAGS)\"\n\t@echo \"------------------------------------------------------------\"\n\t$(CXX) -o $(TARGET_t) $(SRCS_t) $(CFLAGS) $(CFLAGS_t)\n\t@echo \"\"\n$(TARGET_bm): $(SRCS_bm) $(HEADS) $(HEADS_bm)\n\t@echo \"\"\n\t@echo \"------------------------------------------------------------\"\n\t@echo \"SRCS_bm: \\n$(SRCS_bm)\\n\"\n\t@echo \"CFLAGS: \\n$(CFLAGS)\"\n\t@echo \"------------------------------------------------------------\"\n\t$(CXX) -o $(TARGET_bm) $(SRCS_bm) $(CFLAGS) $(CFLAGS_bm)\n\t@echo \"\"\n$(TARGET_bm_uM): $(SRCS_bm_uM) $(HEADS) $(HEADS_bm)\n\t@echo \"\"\n\t@echo \"------------------------------------------------------------\"\n\t@echo \"SRCS_bm_uM: \\n$(SRCS_bm_uM)\\n\"\n\t@echo \"CFLAGS: \\n$(CFLAGS)\"\n\t@echo \"------------------------------------------------------------\"\n\t$(CXX) -o $(TARGET_bm_uM) $(SRCS_bm_uM) $(CFLAGS) $(CFLAGS_bm)\n\t@echo \"\"\n$(TARGET_sp): $(SRCS_sp)\n\t@echo \"\"\n\t@echo \"------------------------------------------------------------\"\n\t@echo \"SRCS_st: \\n$(SRCS_sp)\\n\"\n\t@echo \"CFLAGS: \\n$(CFLAGS)\"\n\t@echo \"------------------------------------------------------------\"\n\t$(CXX) -o $(TARGET_sp) $(SRCS_sp) $(CFLAGS) $(CFLAGS_sp)\n\t@echo \"\"\n\n\n$(LIB_SSTD):\n\t@echo \"\"\n\t@unzip -n SubStandardLibrary-SSTD--master.zip\n\t@mv SubStandardLibrary-SSTD--master/sstd ./\n\t@rm -rf SubStandardLibrary-SSTD--master/\n\t@(cd ./sstd; make -j)\n\n$(LIB_flat):\n\t@echo \"\"\n\t@unzip -n flat_hash_map-master.zip\n\n$(LIB_GOOGLETEST):\n\t@echo \"\"\n\t@unzip -n googletest-master.zip\n\t@(cd ./googletest-master; mkdir -p build; cd build; cmake ..; make)\n\n$(LIB_sparsehash):\n\t@echo \"\"\n\t@unzip -n sparsehash-master.zip\n\t@(cd ./sparsehash-master; ./configure; make -j)\n\n\n.PHONY: all\nall:\n\t@(make clean)\n\t@(make)\n\n\n.PHONY: clean\nclean:\n\t-rm -rf $(TARGET_t)\n\t-rm -rf $(TARGET_bm)\n\t-rm -rf $(TARGET_bm_uM)\n\t-rm -rf $(TARGET_sp)\n\t-rm -rf flat_hash_map-master\n\t-rm -rf googletest-master\n\t-rm -rf sparsehash-master\n\t-rm -rf sstd\n\t-rm -rf $(RMs)\n#\t$(if $(patch_txt_exists) ,$(rm *.stackdump),)\n#\t-rm -f $(OBJS) $(DEPS) $(TARGET)\n\n\n.PHONY: zip\nzip:\n\t-rm -rf $(RMs)\n\t@mkdir -p $(BACKUP_DIR)\n\tzip -r $(BACKUP_DIR)/${TIME_STAMP}$(m).zip $(BACKUP_FILES)\n.PHONY: backup\nbackup:\n\t@(make zip)\n\t@(make)\n# when you need comments for backup, just type\n# $ make backup m=_comment_will_be_inserted_after_the_date\n\n.PHONY: updateLib\nupdateLib:\n\twget https://github.com/admiswalker/SubStandardLibrary-SSTD-/archive/master.zip -O SubStandardLibrary-SSTD--master.zip\n\t-rm -rf sstd # remove previous versions of sstd\n\n.PHONY: steps\nsteps: $(SRCS_t) $(SRCS_bm) $(HEADS) $(HEADS_t) $(HEADS_bm)\n\t@echo \"$^\" | xargs wc -l\n\t@echo \"\"\n\t@(cd ./sstd; make steps)\n\n" }, { "alpha_fraction": 0.5396648049354553, "alphanum_fraction": 0.708379864692688, "avg_line_length": 23.571428298950195, "blob_id": "ad4af1084da18c45a106472b2efe07e670f9b740", "content_id": "d0c53927c168f44dbe7f82e8d9e149af24bf31c8", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "C++", "length_bytes": 897, "license_type": "no_license", "max_line_length": 81, "num_lines": 35, "path": "/FNV_Hash.hpp", "repo_name": "admiswalker/InPlaceChainedHashTable-IpCHashT-", "src_encoding": "UTF-8", "text": "/* -*- coding: utf-8 -*- */\r\n#pragma once\r\n\r\n#include <stdio.h>\r\n#include <stdint.h>\r\n\r\n#ifdef _WIN32\r\n\ttypedef unsigned __int64 UINT64;\r\n\ttypedef unsigned __int32 UINT32;\r\n#else\r\n\ttypedef unsigned long long UINT64;\r\n\ttypedef unsigned long UINT32;\r\n#endif\r\n\r\n/*\r\n * FNV Constants\r\n */\r\nstatic const UINT32 FNV_OFFSET_BASIS_32 = 2166136261ul;\r\nstatic const UINT64 FNV_OFFSET_BASIS_64 = 14695981039346656037ull;\r\n\r\nstatic const UINT32 FNV_PRIME_32 = 16777619ul;\r\nstatic const UINT64 FNV_PRIME_64 = 1099511628211ull;\r\n/*\r\n#define FNV_OFFSET_BASIS_32 2166136261ul\r\n#define FNV_OFFSET_BASIS_64 14695981039346656037ull\r\n\r\n#define FNV_PRIME_32 16777619ul\r\n#define FNV_PRIME_64 1099511628211ull\r\n//*/\r\n\r\n/*\r\n * FNV Hash Algorithm\r\n */\r\nvoid fnv_1_hash_32( UINT32& return_Hash, unsigned char*& bytes, size_t& length );\r\nvoid fnv_1_hash_64( UINT64& return_Hash, unsigned char*& bytes, size_t& length );\r\n" }, { "alpha_fraction": 0.35749688744544983, "alphanum_fraction": 0.3636927008628845, "avg_line_length": 81.03389739990234, "blob_id": "a5033ba2b045db360ff612ea208d57fbc02f9fc6", "content_id": "30e50bef4805d3d7fc4e214fd73a72c66230701f", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Markdown", "length_bytes": 4842, "license_type": "no_license", "max_line_length": 196, "num_lines": 59, "path": "/README.md", "repo_name": "admiswalker/InPlaceChainedHashTable-IpCHashT-", "src_encoding": "UTF-8", "text": "# InPlaceChainedHashTable-IpCHashT-\n\nThis is an implementation plan for **IpCHashT (In-place Chained Hash Table)**.\n\n## Usage\n```\n$ git clone [email protected]:admiswalker/InPlaceChainedHashTable-IpCHashT-\n$ cd ./InPlaceChainedHashTable-IpCHashT-\n```\n### Compile\n```\n$ make\n```\n### Run test\n```\n$ ./exe_t\n```\nIn a test, assert() will be enabled.\n### Run benchmark\n```\n$ ./exe_bm\n```\nIn a benchmark, assert() will be disabled using -DNDEBUG complie option.\n\n### Run statistical process\n```\n$ ./exe_sProc\n```\n\n## File descriptions\n\n| File or directory name | Description | Origin |\n|-------------------------------------------|----------------------------------------------------------------------------------------|:------:|\n| bench | Results of the benchmarks | |\n| tmpBench | Output directory of the benchmarks | |\n| tmpDir | Temporary directory for graph plotting | |\n| CHashT.hpp | Inplimentation of \"sstd::CHashT\" | |\n| FNV\\_Hash.cpp | Hash function for only implimentation test | [Qiita](https://qiita.com/Ushio/items/a19083514d087a57fc72) |\n| FNV\\_Hash.hpp | Hash function for only implimentation test | [Qiita](https://qiita.com/Ushio/items/a19083514d087a57fc72) |\n| IpCHashT.hpp | Inplimentation of \"sstd::IpCHashT\" (Proposing method) | |\n| Makefile | Makefile | |\n| README.md | This file | |\n| ```SubStandardLibrary-SSTD--master.zip``` | Convenient functions set | [GitHub](https://github.com/admiswalker/SubStandardLibrary) |\n| bench.hpp | Benchmark | |\n| exe\\_bench | Binary file for benchmark | |\n| exe\\_bench\\_uM | Binary file for benchmarking allocated memory size called from exe\\_bench | |\n| exe\\_sProc | Binary file for statistical process of \"main\\_sProc.cpp\" | |\n| exe\\_test | Binary file for \"test_CHashT.hpp\" and \"test_IpCHashT.hpp\" called from \"main\\_test.cpp\" | |\n| ```flat_hash_map-master.zip``` | Inplimentation of \"ska::flat\\_hash\\_map\" | [GitHub](https://github.com/skarupke/flat_hash_map) |\n| ```googletest-master.zip``` | Google's C++ test framework | [GitHub](https://github.com/google/googletest) |\n| main\\_bench.cpp | Entry potion for \"bench.hpp\" | |\n| main\\_bench\\_usedMemory.cpp | Entry point of exe\\_bench\\_uM for benchmarking allocated memory size | |\n| main\\_sProc.cpp | Entry potion for generating merget graph. | |\n| main\\_test.cpp | Entry potion for \"test\\_CHashT.hpp\" and \"test\\_IpCHashT.hpp\" | |\n| plots.py | Plotting functions for benchmark | |\n| ```sparsehash-master.zip``` | Inplimentation of \"google::dense\\_hash\\_map\" | [GitHub](https://github.com/sparsehash/sparsehash) |\n| test\\_CHashT.hpp | Test code for \"CHashT.hpp\" | |\n| test\\_IpCHashT.hpp | Test code for \"IpCHashT.hpp\" | |\n| typeDef.h | Type definitions for integer | |\n\n\n" }, { "alpha_fraction": 0.5330158472061157, "alphanum_fraction": 0.5794486403465271, "avg_line_length": 31.984893798828125, "blob_id": "2b1104d8c6a74ce1dac972b901099727d8d4b25f", "content_id": "f960f54cb219d759ed463b6d62fd856119cc8fdd", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "C++", "length_bytes": 10927, "license_type": "no_license", "max_line_length": 145, "num_lines": 331, "path": "/test_CHashT.hpp", "repo_name": "admiswalker/InPlaceChainedHashTable-IpCHashT-", "src_encoding": "UTF-8", "text": "#include <random>\n#include \"./CHashT.hpp\"\n\n//-----------------------------------------------------------------------------------------------------------------------------------------------\n// definition of the user hash function\n\nnamespace usrDef_in_CHashT{ class hashFn; }\nclass usrDef_in_CHashT::hashFn{\nprivate:\npublic:\n\thashFn(){}\n\t~hashFn(){}\n\tsize_t operator()(const uint64& key){ return key; }\n};\nTEST(sstd_CHashT, use_usr_defined_hash_func){\n\tsstd::CHashT<uint64, uint64, usrDef_in_CHashT::hashFn> hashT(10);\n\tauto\n\titr = hashT.insert(1, 10); ASSERT_TRUE( itr!=hashT.end() );\n\titr = hashT.find(1); ASSERT_TRUE( itr!=hashT.end() ); ASSERT_TRUE( itr.first()==1 ); ASSERT_TRUE( itr.second()==10 );\n}\n\n//-----------------------------------------------------------------------------------------------------------------------------------------------\n\nTEST(sstd_CHashT, insert){\n\tsstd::CHashT<uint64, uint64> hashT(100);\n\t\n\thashT.use_tIdx_dbg = true; // enable debug option\n\thashT.tIdx_dbg = 0; // force to set key-val on the table index \"tIdx_dbg\".\n\t\n\t// case 1. insertion of key (there is no conflict)\n\tauto\n\titr = hashT.insert( 1, 10); ASSERT_TRUE( hashT.pT_dbg()[0].key==1 ); ASSERT_TRUE( hashT.pT_dbg()[0].val==10 );\n\t\n\t// case 2. insertion of key solving the conflict of tIdx. (table index (tIdx) is a modulation of hash value).\n\titr = hashT.insert( 2, 20); ASSERT_TRUE( hashT.pT_dbg()[0].pNext->key==2 ); ASSERT_TRUE( hashT.pT_dbg()[0].pNext->val==20 );\n\titr = hashT.insert( 3, 30); ASSERT_TRUE( hashT.pT_dbg()[0].pNext->pNext->key==3 ); ASSERT_TRUE( hashT.pT_dbg()[0].pNext->pNext->val==30 );\n\t\n\t// case 3. confliction of key value\n\t// type 1. on the table.\n\titr = hashT.insert( 1, 99); ASSERT_TRUE( itr.key()==1 ); ASSERT_TRUE( itr.val()==10 );\n\t// type 2. on the singly linked list.\n\titr = hashT.insert( 2, 99); ASSERT_TRUE( itr.key()==2 ); ASSERT_TRUE( itr.val()==20 );\n\titr = hashT.insert( 3, 99); ASSERT_TRUE( itr.key()==3 ); ASSERT_TRUE( itr.val()==30 );\n}\nTEST(sstd_CHashT, insert_f){\n\tsstd::CHashT<uint64, uint64> hashT(100);\n\t\n\thashT.insert( 1, 10);\n\tauto\n\titr = hashT.find(1); ASSERT_TRUE( itr.key()==1 ); ASSERT_TRUE( itr.val()==10 );\n\t\n\titr = hashT.find(1); ASSERT_TRUE( itr!=hashT.end() );\n\tif(itr!=hashT.end()){ itr.val()=20; } // over write\n\titr = hashT.find(1); ASSERT_TRUE( itr.key()==1 ); ASSERT_TRUE( itr.val()==20 );\n\t\n\t// insert by itr\n\titr = hashT.find(4); ASSERT_TRUE(!(itr!=hashT.end()));\n\thashT.insert(itr, 4); itr.val() = 99;\n\t{ auto itr = hashT.find(4); ASSERT_TRUE(itr.val()==99); }\n}\n//-----------------------------------------------------------------------------------------------------------------------------------------------\n// insert for STL compatibility\n\nTEST(sstd_CHashT, STL_c_insert_01){\n\t// STL insert (1). Ref: https://cpprefjp.github.io/reference/unordered_map/unordered_map/insert.html\n\t\n\tsstd::CHashT<uint64, uint64> hashT;\n\tauto itr_TF1 = hashT.insert(std::pair<uint64,uint64>(1,1));\n\tASSERT_TRUE( itr_TF1.second==true );\n\tauto itr_TF2 = hashT.insert(std::pair<uint64,uint64>(1,1));\n\tASSERT_TRUE( itr_TF2.second==false );\n}\n\n//-----------------------------------------------------------------------------------------------------------------------------------------------\nTEST(sstd_CHashT, find){\n\t// case 1. when key is on the table\n\t{\n\t\tsstd::CHashT<uint64, uint64> hashT(1024);\n\t\t// key, val\n\t\thashT.insert(123, 1230);\n\t\t\n\t\tauto itr = hashT.find(123); ASSERT_TRUE( itr!=hashT.end() ); ASSERT_TRUE( itr.val()==1230 );\n\t}\n\t\n\t// case 2. when key is on a singly linked list\n\t{\n\t\tsstd::CHashT<uint64, uint64> hashT(1024);\n\t\t\n\t\thashT.use_tIdx_dbg = true;\n\t\thashT.tIdx_dbg = 0;\n\t\t// key, val\n\t\thashT.insert(1, 10);\n\t\thashT.insert(2, 20);\n\t\thashT.insert(3, 30);\n\t\thashT.insert(4, 40);\n\t\thashT.insert(5, 50);\n\t\t\n\t\tauto\n\t\titr = hashT.find(1); ASSERT_TRUE( itr!=hashT.end() ); ASSERT_TRUE( itr.val()==10 );\n\t\titr = hashT.find(2); ASSERT_TRUE( itr!=hashT.end() ); ASSERT_TRUE( itr.val()==20 );\n\t\titr = hashT.find(3); ASSERT_TRUE( itr!=hashT.end() ); ASSERT_TRUE( itr.val()==30 );\n\t\titr = hashT.find(4); ASSERT_TRUE( itr!=hashT.end() ); ASSERT_TRUE( itr.val()==40 );\n\t\titr = hashT.find(5); ASSERT_TRUE( itr!=hashT.end() ); ASSERT_TRUE( itr.val()==50 );\n\t\titr = hashT.find(6); ASSERT_TRUE(!(itr!=hashT.end()));\n\t}\n}\nTEST(sstd_CHashT, sstd_CHashT_operator){\n\tsstd::CHashT<uint64, uint64> hashT(100);\n\tauto itr = sstd_CHashT::iterator<uint64, uint64>();\n\t\n\titr = hashT.insert( 1, 10); ASSERT_TRUE( itr!=hashT.end() );\n\t\n\t// operator !=(const class itr_m& rhs). \"inline bool operator!=(const class itr_m& rhs){ return this->TF != false; }\"\n\titr = hashT.find(1); ASSERT_TRUE( itr!=hashT.end() );\n}\nTEST(sstd_CHashT, iterator){\n\tsstd::CHashT<uint64, uint64> hashT(30);\n\thashT.use_tIdx_dbg = true;\n\t\n\thashT.tIdx_dbg = 0;\n\thashT.insert(1, 10);\n\t\n\thashT.tIdx_dbg = 5;\n\thashT.insert(2, 20);\n\thashT.insert(3, 30);\n\thashT.insert(4, 40);\n\t\n\thashT.tIdx_dbg = 7;\n\thashT.insert(5, 50);\n\thashT.tIdx_dbg = 8;\n\thashT.insert(6, 60);\n\t\n\thashT.tIdx_dbg = 20;\n\thashT.insert(7, 70);\n\t\n//\tprintf(\"■ print internal value of iterator\\n\");\n//\tauto itr=hashT.begin();\n//\titr.print_dbg();\n//\tprintf(\"\\n\");\n//\titr._pElem()->print_dbg();\n//\tprintf(\"\\n\");\n\t\n//\tprintf(\"■ print const-key and non-const-value of element\\n\");\n\tuint i=0;\n\tfor(auto itr=hashT.begin(); itr!=hashT.end(); ++itr){\n//\t\titr.print_dbg();\n//\t\tsstd::printn(itr.key());\n//\t\tsstd::printn(itr.val());\n//\t\tprintf(\"\\n\");\n\t\ti++;\n\t}\n\tASSERT_TRUE( i==7 );\n\t\n\t//---\n\t\n\t// erase all\n\tfor(auto itr=hashT.begin(); itr!=hashT.end(); ++itr){\n\t\thashT.erase(itr);\n\t}\n\tASSERT_TRUE( hashT.size()==0ull );\n\t\n}\nTEST(sstd_CHashT, erase){\n\t\n\t// case 1. erase an element on the table without singly linked list.\n\t{\n\t\tsstd::CHashT<uint64, uint64> hashT(100);\n\t\t\n\t\tauto itr = hashT.insert( 1, 10);\n\t\tASSERT_TRUE( hashT.erase(1)==true );\n\t\titr = hashT.find(1); ASSERT_TRUE( !(itr!=hashT.end()) );\n\t}\n\t\n\t// case 2. erase an element on the table with singly linked list.\n\t{\n\t\tsstd::CHashT<uint64, uint64> hashT(100);\n\t\t\n\t\thashT.use_tIdx_dbg = true; // enable debug option\n\t\thashT.tIdx_dbg = 0; // force to set key-val on the table index \"tIdx_dbg\".\n\t\t\n\t\tauto\n\t\titr = hashT.insert( 1, 10);\n\t\titr = hashT.insert( 2, 20);\n\t\titr = hashT.insert( 3, 30);\n\t\titr = hashT.insert( 4, 40);\n\t\t\n\t\tASSERT_TRUE( hashT.erase(1)==true );\n\t\titr = hashT.find(1); ASSERT_TRUE( !(itr!=hashT.end()) );\n\t\t\n\t\tASSERT_TRUE( hashT.pT_dbg()[0].isUsed==true );\n\t\tASSERT_TRUE( hashT.pT_dbg()[0].key==2 );\n\t\tASSERT_TRUE( hashT.pT_dbg()[0].val==20 );\n\t\t\n\t\tASSERT_TRUE( hashT.pT_dbg()[0].pNext->isUsed==true );\n\t\tASSERT_TRUE( hashT.pT_dbg()[0].pNext->key==3 );\n\t\tASSERT_TRUE( hashT.pT_dbg()[0].pNext->val==30 );\n\t\tASSERT_TRUE( hashT.pT_dbg()[0].pNext->pNext!=0ull );\n\t\t\n\t\tASSERT_TRUE( hashT.pT_dbg()[0].pNext->pNext->isUsed==true );\n\t\tASSERT_TRUE( hashT.pT_dbg()[0].pNext->pNext->key==4 );\n\t\tASSERT_TRUE( hashT.pT_dbg()[0].pNext->pNext->val==40 );\n\t\tASSERT_TRUE( hashT.pT_dbg()[0].pNext->pNext->pNext==0ull );\n\t}\n\t\n\t// case 3. erase element on the singly linked list\n\t{\n\t\tsstd::CHashT<uint64, uint64> hashT(100);\n\t\t\n\t\thashT.use_tIdx_dbg = true; // enable debug option\n\t\thashT.tIdx_dbg = 0; // force to set key-val on the table index \"tIdx_dbg\".\n\t\t\n\t\tauto\n\t\titr = hashT.insert( 1, 10);\n\t\titr = hashT.insert( 2, 20);\n\t\titr = hashT.insert( 3, 30);\n\t\titr = hashT.insert( 4, 40);\n\t\t\n\t\tASSERT_TRUE( hashT.erase(3)==true );\n\t\titr = hashT.find(1); ASSERT_TRUE( itr!=hashT.end() );\n\t\titr = hashT.find(2); ASSERT_TRUE( itr!=hashT.end() );\n\t\titr = hashT.find(3); ASSERT_TRUE(!( itr!=hashT.end() ));\n\t\titr = hashT.find(4); ASSERT_TRUE( itr!=hashT.end() );\n\t\t\n\t\tASSERT_TRUE( hashT.erase(4)==true );\n\t\titr = hashT.find(1); ASSERT_TRUE( itr!=hashT.end() );\n\t\titr = hashT.find(2); ASSERT_TRUE( itr!=hashT.end() );\n\t\titr = hashT.find(3); ASSERT_TRUE(!( itr!=hashT.end() ));\n\t\titr = hashT.find(4); ASSERT_TRUE(!( itr!=hashT.end() ));\n\t}\n}\nTEST(sstd_CHashT, erase_byItr){\n\t// find key-val pair, get value and erase the pair\n\t\n\tsstd::CHashT<uint64, uint64> hashT(100);\n\t\n\tauto\n\titr = hashT.insert( 1, 10); ASSERT_TRUE( itr!=hashT.end() );\n\titr = hashT.find(1); ASSERT_TRUE( itr!=hashT.end() );\n\tbool ret = hashT.erase(itr); ASSERT_TRUE( ret==true );\n\titr = hashT.find(1); ASSERT_TRUE(!( itr!=hashT.end() ));\n}\nTEST(sstd_CHashT, rehash){\n\tuint64 seed=12345ull; // using a constant value in order to provide reproducibility.\n\tstd::mt19937_64 mt(seed); // pseudo random number generator\n\t\n\tuint64 limitSize = 1031; // 7th value of 'tSizeL' (table size list).\n\t\n\tsstd::CHashT<uint64, uint64> hashT(0);\n\t\n\t{\n\t\tfor(uint i=0; i<limitSize; i++){\n\t\t\tuint64 r = mt();\n\t\t\thashT.insert(r, r);\n\t\t}\n\t\tASSERT_TRUE( hashT.size() ==1031 );\n//\t\tASSERT_TRUE( hashT.tableSize()==1031 ); // this test is for prime table.\n\t}\n\t{\n\t\tuint64 r = mt();\n\t\thashT.insert(r, r); // rehash will be occurd\n\t\tASSERT_TRUE( hashT.size() ==1032 );\n//\t\tASSERT_TRUE( hashT.tableSize()==2053 ); // this test is for prime table.\n\t}\n}\nTEST(sstd_CHashT, stressTest){\n\t// this is a stress test of chained hash table\n\t\n\tuint64 seed=12345ull; // using a constant value in order to provide reproducibility.\n\t\n\tsstd::CHashT<uint64, uint64> hashT;\n\tuint64 limitSize = 543210;\n//\tuint64 limitSize = 42949672; // 42949672 elements == 1 G Byte or more\n\t// when bool is 1 bytes, one elements of \"struct sstd_CHashT::element\" is 25 bytes (== 1 + key8 + val8 + p8).\n\t// thus, 1 G Bytes table would have 42949672.96 (== 1*1024*1024*1024 Bytes / 25) elements in rough estimate.\n\t// in attention, we needs to consider elements on sligle linked list and new table when rehashing.\n\t\n\t// add\n\t{\n\t\tstd::mt19937_64 mt(seed); // pseudo random number generator\n\t\t\n\t\tfor(uint64 i=0; i<limitSize; i++){\n\t\t\tuint64 r = mt();\n\t\t\thashT.insert(r, r);\n\t\t}\n\t\tASSERT_TRUE( hashT.size()==limitSize );\n\t}\n\t\n\t// find\n\t{\n\t\tstd::mt19937_64 mt(seed); // pseudo random number generator\n\t\t\n\t\tfor(uint64 i=0; i<limitSize; i++){\n\t\t\tuint64 r = mt();\n\t\t\tauto itr = hashT.find(r);\n\t\t\tASSERT_TRUE( itr!=hashT.end() );\n\t\t\tASSERT_TRUE( itr.key()==r );\n\t\t\tASSERT_TRUE( itr.val()==r );\n\t\t}\n\t}\n\t\n\t// erase\n\t{\n\t\tstd::mt19937_64 mt(seed); // pseudo random number generator\n\t\t\n\t\tfor(uint64 i=0; i<limitSize; i++){\n\t\t\tuint64 r = mt();\n\t\t\tASSERT_TRUE( hashT.erase(r)==true );\n\t\t}\n\t\tASSERT_TRUE( hashT.size()==0 );\n\t}\n}\nTEST(sstd_CHashT, OPE_bracket){\n\t// []\n\t\n\t{\n\t\tsstd::CHashT<uint64, uint64> hashT;\n\t\thashT[1] = 10; // insert()\n\t\t\n\t\tauto itr = hashT.find(1);\n\t\tASSERT_TRUE( itr!=hashT.end() );\n\t\tuint64 ret = hashT[1]; // find()\n\t\tASSERT_TRUE( ret==10 );\n\t}\n\t{\n\t\tsstd::CHashT<uint64, uint64> hashT;\n\t\tuint64 ret = hashT[1]; // insert() T_key, init emply T_val and return empty T_val.\n\t\tret++; // avoiding \"warning: unused variable ‘ret’\"\n\t}\n}\n\n//-----------------------------------------------------------------------------------------------------------------------------------------------\n\n" }, { "alpha_fraction": 0.5745875239372253, "alphanum_fraction": 0.6191529035568237, "avg_line_length": 41.58695602416992, "blob_id": "a5fd05a41540119476f7d45dbc0d322f4858bb99", "content_id": "02e22b05563fa5d7c6c68ad1efb0efd0640e7abc", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 6017, "license_type": "no_license", "max_line_length": 258, "num_lines": 138, "path": "/plots.py", "repo_name": "admiswalker/InPlaceChainedHashTable-IpCHashT-", "src_encoding": "UTF-8", "text": "import matplotlib as mpl # \"QXcbConnection: Could not connect to display\" への対策\nmpl.use('Agg') # \"QXcbConnection: Could not connect to display\" への対策\nimport matplotlib.pyplot as plt # \"QXcbConnection: Could not connect to display\" への対策\nimport matplotlib.ticker as tick\nimport math\nimport itertools\n\n#------------------------------------------------------------------------------------------------------------------------------------------------\ndef flatten(rhs):\n return list(itertools.chain(*rhs))\n\ndef float2digits(f):\n if f==0:\n return int(0)\n elif f<1:\n return -int(math.log10(1/f))\n else:\n return int(math.log10(f)) + 1\ndef vecLim2tickInterval(vecLim):\n diff = vecLim[1] - vecLim[0]\n digits = float2digits(diff)\n interval = pow(10, digits)/10\n \n # make the tickNum more than 5.\n tickNum = diff / interval\n if tickNum <= 2:\n interval = interval/10\n elif tickNum <= 6:\n interval = interval/2\n \n return interval\n\ndef vvec2graph_base(savePath, saveAs, xlabel, ylabel, vecLabel, labelLoc, vvecX, vvecY, vecXlim, vecYlim, xscale, yscale):\n imgSize = 4\n \n plt.clf()\n plt.rcParams[\"font.size\"] = 10*imgSize\n fig = plt.figure(figsize=(8.5*imgSize, 4*imgSize)) # アスペクト比の設定\n ax1 = fig.add_subplot(111)\n \n ax1.grid(which='minor', linewidth=1, linestyle=':', color='gainsboro')\n ax1.grid(which='major', linewidth=1, linestyle='-', color='silver' )\n \n ax1.set_xlabel(xlabel)\n ax1.set_xscale(xscale)\n ax1.set_xlim(vecXlim[0], vecXlim[1])\n \n ax1.set_ylabel(ylabel)\n ax1.set_yscale(yscale)\n ax1.set_ylim(vecYlim[0], vecYlim[1])\n \n ax1.tick_params(pad=5*imgSize, which='major', direction='in', bottom=True, top=True, left=True, right=True, length=4*imgSize) # 軸の余白 # which: major tick と minor tick に対して変更を適用 # tick を内側方向に # tick を bottom, top, left, right に付加 # tick width # tick length\n ax1.tick_params(pad=5*imgSize, which='minor', direction='in', bottom=True, top=True, left=True, right=True, length=2*imgSize) # 軸の余白 # which: major tick と minor tick に対して変更を適用 # tick を内側方向に # tick を bottom, top, left, right に付加 # tick width # tick length\n if xscale=='linear':\n ax1.xaxis.set_major_formatter(tick.ScalarFormatter(useMathText=True))\n ax1.ticklabel_format(style='sci', axis='x', scilimits=(0,0))\n ax1.xaxis.set_major_locator(tick.MultipleLocator( vecLim2tickInterval(vecXlim) ))\n ax1.xaxis.set_minor_locator(tick.MultipleLocator( vecLim2tickInterval(vecXlim)/4 ))\n if yscale=='linear':\n ax1.yaxis.set_major_locator(tick.MultipleLocator( vecLim2tickInterval(vecYlim) ))\n ax1.yaxis.set_minor_locator(tick.MultipleLocator( vecLim2tickInterval(vecYlim)/4 ))\n \n #cmap = plt.get_cmap(\"tab10\")\n darkred = '#640A1E' # 'crimson'\n vColor=['black', 'blue', 'fuchsia', 'red', darkred, 'green', 'darkorange']\n vLineStyle = ['solid', 'dashed', 'dashed', 'dashed', 'dashdot', 'dotted', 'dotted'] # solid, dashed, dashdot, dotted\n for i in range(len(vecLabel)):\n #ax1.plot(vvecX[i], vvecY[i], linewidth=1, color=cmap(i), label=vecLabel[i])\n ax1.plot(vvecX[i], vvecY[i], linewidth=1, color=vColor[i], linestyle=vLineStyle[i], label=vecLabel[i])\n \n alpha = 0.3\n if len(labelLoc)!=0: ax1.legend(framealpha=alpha, loc=labelLoc)\n else: ax1.legend(framealpha=alpha)\n \n for ext in saveAs:\n plt.savefig(savePath+ext, bbox_inches='tight')\n \n return\n#------------------------------------------------------------------------------------------------------------------------------------------------\n\n# labelLoc = 'upper left'\n# labelLoc = 'upper right'\n# labelLoc = 'lower right'\n# labelLoc = 'lower left'\n\ndef vvec2graph(savePath, saveAs, xlabel, ylabel, vecLabel, vvecX, vvecY):\n #vecXlim = [0.9, 55000000]\n vecXlim = [0.9, 225000000]\n vecYlim = [0.05, 1000]\n xscale = 'log'\n yscale = 'log'\n labelLoc = ''\n vvec2graph_base(savePath, saveAs, xlabel, ylabel, vecLabel, labelLoc, vvecX, vvecY, vecXlim, vecYlim, xscale, yscale)\n return\n\ndef vvec2graph_lf(savePath, saveAs, xlabel, ylabel, vecLabel, vvecX, vvecY):\n #vecXlim = [0.9, 55000000]\n vecXlim = [0.9, 225000000]\n vecYlim = [0, 1.025]\n xscale = 'log'\n yscale = 'linear'\n labelLoc = 'lower right'\n vvec2graph_base(savePath, saveAs, xlabel, ylabel, vecLabel, labelLoc, vvecX, vvecY, vecXlim, vecYlim, xscale, yscale)\n return\n\ndef vvec2graph_memory(savePath, saveAs, xlabel, ylabel, vecLabel, vvecX, vvecY):\n x_max = max(flatten(vvecX))\n margin=1.1\n xmargin=0; vecXlim=[]\n ymargin=0; vecYlim=[]\n if x_max<=5*1000000*margin:\n xmargin=4*10000; vecXlim=[ -xmargin, 5*1000000+xmargin] # GB\n ymargin=0.010; vecYlim=[0-ymargin, 0.6+ymargin] # GB\n else:\n xmargin=4*4*100000; vecXlim=[-xmargin, 2*100000000+xmargin] # GB\n ymargin=0.250; vecYlim=[0-ymargin, 18.00+ymargin] # GB\n xscale = 'linear'\n yscale = 'linear'\n labelLoc = 'upper left'\n vvec2graph_base(savePath, saveAs, xlabel, ylabel, vecLabel, labelLoc, vvecX, vvecY, vecXlim, vecYlim, xscale, yscale)\n return\n\ndef vvec2graph_et_insert(savePath, saveAs, xlabel, ylabel, vecLabel, vvecX, vvecY):\n x_max = max(flatten(vvecX))\n margin=1.1\n xmargin=0; vecXlim=[]\n ymargin=0; vecYlim=[]\n if x_max<=5*1000000*margin:\n xmargin=4*10000; vecXlim=[ -xmargin, 5*1000000+xmargin]\n ymargin=0.025; vecYlim=[0-ymargin, 2.5+ymargin]\n else:\n xmargin=4*10000; vecXlim=[ -xmargin, 202000000+xmargin]\n ymargin=0.025; vecYlim=[0-ymargin, 130+ymargin]\n xscale = 'linear'\n yscale = 'linear'\n labelLoc = 'upper left'\n vvec2graph_base(savePath, saveAs, xlabel, ylabel, vecLabel, labelLoc, vvecX, vvecY, vecXlim, vecYlim, xscale, yscale)\n return \n\n" } ]
13
colinsongf/LeetCode
https://github.com/colinsongf/LeetCode
5a12f12ce110b4eb5bc44fd0c66c5755935a1b47
381c9745ecc9cfab035537807e2898272af78965
2e8f4edd1d7715f4dede179ee00999fbc95412fc
refs/heads/master
2021-06-11T21:12:33.229978
2016-12-03T04:46:07
2016-12-03T04:46:07
null
0
0
null
null
null
null
null
[ { "alpha_fraction": 0.3681592047214508, "alphanum_fraction": 0.3731343150138855, "avg_line_length": 24.125, "blob_id": "4db1d0859215576781de3efb2c789e9042fbaa04", "content_id": "a977676e8589a447364793cb8469d5f89f84a169", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "C++", "length_bytes": 603, "license_type": "no_license", "max_line_length": 86, "num_lines": 24, "path": "/132 Pattern.cpp", "repo_name": "colinsongf/LeetCode", "src_encoding": "UTF-8", "text": "class Solution {\npublic:\n bool find132pattern(vector<int>& nums) {\n int min = INT_MIN;\n stack<int> mystack;\n for (vector<int>::reverse_iterator it = nums.rbegin();it != nums.rend(); ++it)\n {\n if (*it < min)\n {\n return true;\n }\n else\n {\n while (!mystack.empty() && mystack.top() < *it)\n {\n min = mystack.top();\n mystack.pop();\n }\n mystack.push(*it);\n }\n }\n return false;\n }\n};\n" }, { "alpha_fraction": 0.3529411852359772, "alphanum_fraction": 0.35863378643989563, "avg_line_length": 18.518518447875977, "blob_id": "02366bd382fe3bcd95be33a8806e4774a958fbb7", "content_id": "509741358730de79579cc5331b0b81e97556b21b", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "C++", "length_bytes": 527, "license_type": "no_license", "max_line_length": 45, "num_lines": 27, "path": "/Random Pick Index.cpp", "repo_name": "colinsongf/LeetCode", "src_encoding": "UTF-8", "text": "class Solution {\npublic:\n Solution(vector<int> nums):numbers(nums) \n {\n srand(time(NULL));\n }\n \n int pick(int target) {\n int fm = 1;\n int select;\n int i;\n for (i = 0;i < numbers.size(); ++i)\n {\n if (numbers[i] == target)\n {\n int r = rand() % fm++;\n if (r < 1)\n {\n select = i;\n }\n }\n }\n return select;\n }\nprivate:\n vector<int> numbers;\n};\n" }, { "alpha_fraction": 0.3914823830127716, "alphanum_fraction": 0.3972153961658478, "avg_line_length": 30.30769157409668, "blob_id": "e05fac84a3510855d4cc3e1fa0d31a25a136a635", "content_id": "5549540339d106328d2d55fe0505906403ced41f", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "C++", "length_bytes": 1221, "license_type": "no_license", "max_line_length": 86, "num_lines": 39, "path": "/Circular Array Loop.cpp", "repo_name": "colinsongf/LeetCode", "src_encoding": "UTF-8", "text": "class Solution {\npublic:\n bool circularArrayLoop(vector<int>& nums) {\n vector<int> flags(nums.size());\n for (vector<int>::iterator it = flags.begin();it != flags.end(); ++it)\n {\n *it = -1;\n }\n for (int i = 0;i < nums.size(); ++i)\n {\n int flag = i;\n if (flags[i] != -1)\n {\n continue;\n }\n flags[i] = i;\n int current = i;\n int next = (nums.size() + current + nums[current]) % nums.size();\n bool forward = nums[current] >= 0? true: false;\n bool direction = true;\n while (current != next && flags[next] == -1 && direction)\n {\n flags[current] = i;\n current = next;\n if ((forward && nums[current] < 0) || (!forward && nums[current] > 0))\n {\n direction = false;\n }\n next = (nums.size() + current + nums[current]) % nums.size();\n }\n if (current == next || !direction || flags[next] < i)\n {\n continue;\n }\n return true;\n }\n return false;\n }\n};\n" }, { "alpha_fraction": 0.44320711493492126, "alphanum_fraction": 0.4498886466026306, "avg_line_length": 23.94444465637207, "blob_id": "a67ec7de8394f6c26cfd2b1014ff8965656cef1b", "content_id": "2a77abf73ef32f63115c274b1cb8951ba2d15274", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "C++", "length_bytes": 449, "license_type": "no_license", "max_line_length": 76, "num_lines": 18, "path": "/Minimum Moves to Equal Array Elements II.cpp", "repo_name": "colinsongf/LeetCode", "src_encoding": "UTF-8", "text": "class Solution {\npublic:\n int minMoves2(vector<int>& nums) {\n int answer = 0;\n if (nums.empty())\n {\n return answer;\n }\n sort(nums.begin(), nums.end());\n int tnum = nums[nums.size() / 2];\n int temp;\n for (vector<int>::iterator it = nums.begin();it != nums.end(); ++it)\n {\n answer += *it <= tnum? tnum - *it: *it - tnum;\n }\n return answer;\n }\n};\n" }, { "alpha_fraction": 0.4549839198589325, "alphanum_fraction": 0.4726687967777252, "avg_line_length": 37.875, "blob_id": "a6168357c7be9a8ca7efd503c62a262af9816599", "content_id": "711f5d0c9610da0079b24f3b224fcf20b6c7a808", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 622, "license_type": "no_license", "max_line_length": 86, "num_lines": 16, "path": "/Number of Boomerangs.py", "repo_name": "colinsongf/LeetCode", "src_encoding": "UTF-8", "text": "class Solution(object):\n def numberOfBoomerangs(self, points):\n counter = 0\n for ipoint in points:\n mapper = {}\n for jpoint in points:\n if ipoint == jpoint:\n continue\n distance = (ipoint[0] - jpoint[0]) ** 2 + (ipoint[1] - jpoint[1]) ** 2\n if distance not in mapper:\n mapper[distance] = 0\n mapper[distance] += 1\n for distance in mapper:\n if mapper[distance] > 1:\n counter += mapper[distance] * (mapper[distance] - 1)\n return counter\n" }, { "alpha_fraction": 0.4422604441642761, "alphanum_fraction": 0.44717445969581604, "avg_line_length": 24.4375, "blob_id": "cca837584df95c27b7ad4cde9792f7517e1e4c45", "content_id": "aff9b9952954d0c88336626802082050a691ed36", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 407, "license_type": "no_license", "max_line_length": 35, "num_lines": 16, "path": "/H-Index.py", "repo_name": "colinsongf/LeetCode", "src_encoding": "UTF-8", "text": "class Solution(object):\n def hIndex(self, citations):\n \"\"\"\n :type citations: List[int]\n :rtype: int\n \"\"\"\n citations=sorted(citations)\n m=len(citations)\n h=0\n for i in xrange(m):\n if citations[i]<m-1-i:\n h=citations[i]\n elif citations[i]>=m-i:\n h=max(h,m-i)\n break\n return h\n" }, { "alpha_fraction": 0.4373464286327362, "alphanum_fraction": 0.4422604441642761, "avg_line_length": 28.071428298950195, "blob_id": "8888943fcd6dd20771fc3dcc203faf0e8640dd72", "content_id": "5d7f5caf51286552b95f43d01501d2f7d538c8e2", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 407, "license_type": "no_license", "max_line_length": 73, "num_lines": 14, "path": "/Sort Characters By Frequency.py", "repo_name": "colinsongf/LeetCode", "src_encoding": "UTF-8", "text": "class Solution(object):\n def frequencySort(self, s):\n dic = {}\n for ch in s:\n if ch not in dic:\n dic[ch] = 1\n else:\n dic[ch] += 1\n keys = sorted(dic.keys(), key = lambda x: dic[x], reverse = True)\n string = ''\n for key in keys:\n counter = dic[key]\n string += key * counter\n return string\n" }, { "alpha_fraction": 0.4779541492462158, "alphanum_fraction": 0.4850088059902191, "avg_line_length": 28.842105865478516, "blob_id": "63247205bc1b0c172984e52e582e996e89868ac2", "content_id": "bf8958dbed8a4c8e10789681f5c0989789746029", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 567, "license_type": "no_license", "max_line_length": 57, "num_lines": 19, "path": "/Partition Equal Subset Sum.py", "repo_name": "colinsongf/LeetCode", "src_encoding": "UTF-8", "text": "import copy\n\nclass Solution(object):\n def canPartition(self, nums):\n if len(nums) == 1:\n return False\n summary = sum(nums)\n if summary % 2:\n return False\n summary /= 2\n can_reach_set = set([0])\n for number in nums:\n can_reach_set_copy = copy.copy(can_reach_set)\n for s in can_reach_set_copy:\n if s + number < summary:\n can_reach_set.add(s + number)\n if s + number == summary:\n return True\n return False\n" }, { "alpha_fraction": 0.3943950831890106, "alphanum_fraction": 0.40396445989608765, "avg_line_length": 29.47916603088379, "blob_id": "0e9c4c3e57a84373961edcef8fe3ce1730e3d128", "content_id": "82a25faa920f9946c85d7d2421a045419ae2280c", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "C++", "length_bytes": 1463, "license_type": "no_license", "max_line_length": 92, "num_lines": 48, "path": "/Longest Repeating Character Replacement.cpp", "repo_name": "colinsongf/LeetCode", "src_encoding": "UTF-8", "text": "bool cmp(const pair<const int, int> &i, const pair<const int, int> &j) {\n return i.second < j.second;\n}\n\nclass Solution {\npublic:\n int characterReplacement(string s, int k) {\n if (!s.size())\n {\n return 0;\n }\n int window_left = 0, window_right = 0;\n int max_size = 0;\n map<int, int> counter;\n counter[s[0]] = 1;\n while (window_left < s.size() && window_right < s.size())\n {\n map<int, int>::iterator it = max_element(counter.begin(), counter.end(), cmp);\n if (window_right - window_left + 1 - it->second <= k && window_right < s.size())\n {\n if (window_right - window_left + 1 > max_size)\n {\n max_size = window_right - window_left + 1;\n }\n window_right += 1;\n if (counter.find(s[window_right]) == counter.end())\n {\n counter[s[window_right]] = 1;\n }\n else\n {\n counter[s[window_right]] += 1;\n }\n }\n else\n {\n map<int, int>::iterator it = counter.find(s[window_left]);\n it->second -= 1;\n if (!it->second)\n {\n counter.erase(it);\n }\n window_left += 1;\n }\n }\n return max_size;\n }\n};\n" }, { "alpha_fraction": 0.375308632850647, "alphanum_fraction": 0.3802469074726105, "avg_line_length": 21.5, "blob_id": "b48b6a5d92f4c2f2385e8fe8394e9eacd30d9d2d", "content_id": "a69a61525d9c60db79b56368628f9ece7f34808b", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "C", "length_bytes": 405, "license_type": "no_license", "max_line_length": 62, "num_lines": 18, "path": "/Is Subsequence.c", "repo_name": "colinsongf/LeetCode", "src_encoding": "UTF-8", "text": "bool isSubsequence(char* s, char* t) {\n unsigned s_size = strlen(s), t_size = strlen(t);\n int s_pos = 0, t_pos = 0;\n while (s_pos < s_size)\n {\n while (t_pos < t_size && *(t + t_pos) != *(s + s_pos))\n {\n ++t_pos; \n } \n if (t_pos == t_size)\n {\n return false;\n } \n ++t_pos;\n ++s_pos;\n } \n return true;\n}\n" }, { "alpha_fraction": 0.301085889339447, "alphanum_fraction": 0.3089832067489624, "avg_line_length": 24.325000762939453, "blob_id": "2021bcc65c66425e7fd82e132be4db0a0dac9ce3", "content_id": "1b28df0b0e9897b70012f8a10733b56c9fcf6a17", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "C++", "length_bytes": 1013, "license_type": "no_license", "max_line_length": 84, "num_lines": 40, "path": "/4Sum-II.cpp", "repo_name": "colinsongf/LeetCode", "src_encoding": "UTF-8", "text": "class Solution {\npublic:\n int fourSumCount(vector<int>& A, vector<int>& B, vector<int>& C, vector<int>& D)\n {\n map<int, int> mapper;\n map<int, int>::iterator it;\n int answer = 0;\n int size = A.size();\n for (int i = 0;i < size; ++i)\n {\n for (int j = 0;j < size; ++j)\n {\n int sum = A[i] + B[j];\n it = mapper.find(sum);\n if (it == mapper.end())\n {\n mapper[sum] = 1;\n }\n else\n {\n mapper[sum] += 1;\n }\n }\n }\n\n for (int i = 0;i < size; ++i)\n {\n for (int j = 0;j < size; ++j)\n {\n int sum = C[i] + D[j];\n it = mapper.find(sum * (-1));\n if (it != mapper.end())\n {\n answer += it->second;\n }\n }\n }\n return answer;\n }\n};\n" }, { "alpha_fraction": 0.4844337999820709, "alphanum_fraction": 0.48888128995895386, "avg_line_length": 29.44791603088379, "blob_id": "3e739d7598afb10e4c911a09ed8256b7894c1186", "content_id": "96e414ab3af6cca7f25a89fd2e8bc5ef5cd973b2", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "C++", "length_bytes": 2923, "license_type": "no_license", "max_line_length": 120, "num_lines": 96, "path": "/Evaluate Division.cpp", "repo_name": "colinsongf/LeetCode", "src_encoding": "UTF-8", "text": "class QueryNode\n{\npublic:\n QueryNode(const string& nodename, double vertex, QueryNode* prev): _nodename(nodename), _vertex(vertex), _prev(prev)\n {\n }\n string _nodename;\n double _vertex;\n QueryNode* _prev;\n};\n\nclass Solution\n{\npublic:\n double calcEquationForOne(map<string, map<string, double> >& graph, const string& startnode, const string& endnode)\n {\n set<string> visited;\n visited.insert(startnode);\n stack<QueryNode*> openstack;\n QueryNode* current = new QueryNode(startnode, 1, NULL);\n openstack.push(current);\n while (!openstack.empty())\n {\n current = openstack.top();\n if (current->_nodename.compare(endnode) == 0)\n {\n break;\n }\n openstack.pop();\n map<string, double>& neibors = graph[current->_nodename];\n for (map<string, double>::iterator it = neibors.begin();it != neibors.end(); ++it)\n {\n if (visited.find(it->first) == visited.end())\n {\n visited.insert(it->first);\n QueryNode* neibor = new QueryNode(it->first, it->second, current);\n openstack.push(neibor);\n }\n }\n current = NULL;\n }\n if (!current)\n {\n return -1.0;\n }\n double answer = 1.0;\n while (current)\n {\n answer *= current->_vertex;\n current = current->_prev;\n }\n\n while (!openstack.empty())\n {\n current = openstack.top();\n openstack.pop();\n delete current;\n }\n return answer;\n }\n\n vector<double> calcEquation(\n vector<pair<string, string> > equations, \n vector<double>& values, \n vector<pair<string, string> > queries)\n { \n vector<double> answers;\n map<string, map<string, double> > graph;\n vector<pair<string, string> >::iterator eit = equations.begin();\n vector<double>::iterator vit = values.begin();\n while (eit != equations.end() && vit != values.end())\n {\n graph[eit->first][eit->second] = *vit;\n graph[eit->second][eit->first] = 1.0 / (*vit);\n ++eit, ++vit;\n }\n for (vector<pair<string, string> >::iterator qit = queries.begin();qit != queries.end(); ++qit)\n {\n double answer;\n if (graph.find(qit->first) == graph.end() || graph.find(qit->second) == graph.end())\n {\n answer = -1.0;\n }\n else if (qit->first.compare(qit->second) == 0)\n {\n answer = 1.0;\n }\n else\n {\n answer = calcEquationForOne(graph, qit->first, qit->second);\n }\n answers.push_back(answer);\n }\n return answers;\n }\n};\n" }, { "alpha_fraction": 0.3777472674846649, "alphanum_fraction": 0.39423078298568726, "avg_line_length": 33.66666793823242, "blob_id": "a7bc51580d62f4698b38ae411320a418f8b6ea83", "content_id": "e0dc91ec90e44169e245f6b211f999b6f868280e", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 728, "license_type": "no_license", "max_line_length": 58, "num_lines": 21, "path": "/Island Perimeter.py", "repo_name": "colinsongf/LeetCode", "src_encoding": "UTF-8", "text": "class Solution(object):\n def islandPerimeter(self, grid):\n perimeter = 0\n row = len(grid)\n if not row:\n return perimeter\n col = len(grid[0])\n if not col:\n return perimeter\n for i in range(row):\n for j in range(col):\n if grid[i][j]:\n if not i or not grid[i - 1][j]:\n perimeter += 1\n if i == row - 1 or not grid[i + 1][j]:\n perimeter += 1\n if not j or not grid[i][j - 1]:\n perimeter += 1\n if j == col - 1 or not grid[i][j + 1]:\n perimeter += 1\n return perimeter\n" }, { "alpha_fraction": 0.4250113368034363, "alphanum_fraction": 0.4426823854446411, "avg_line_length": 21.98958396911621, "blob_id": "5ff3d640a25091864bd98c7817611c6a8a5f7a78", "content_id": "ffc2e167b77c643b029babd4ea15d6c8e7560c8a", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "C", "length_bytes": 2207, "license_type": "no_license", "max_line_length": 74, "num_lines": 96, "path": "/Add Two Numbers II.c", "repo_name": "colinsongf/LeetCode", "src_encoding": "UTF-8", "text": "struct ListNode* reverseList(struct ListNode* head)\n{\n struct ListNode *current = head, *next, *prev = NULL;\n while (current)\n {\n next = current->next;\n current->next = prev;\n prev = current;\n if (!next)\n {\n break;\n }\n current = next;\n }\n return prev;\n}\n\nstruct ListNode* addTwoNumbers(struct ListNode* l1, struct ListNode* l2) {\n struct ListNode* nl1 = reverseList(l1);\n struct ListNode* nl2 = reverseList(l2);\n struct ListNode* head = NULL;\n struct ListNode* prev = NULL, *current;\n int jw = 0;\n while (nl1 && nl2)\n {\n current = malloc(sizeof(struct ListNode));\n current->val = (nl1->val + nl2->val + jw) % 10;\n jw = (nl1->val + nl2->val + jw) / 10;\n current->next = NULL;\n if (!prev)\n {\n prev = current;\n head = current;\n }\n else\n {\n prev->next = current;\n prev = current;\n }\n nl1 = nl1->next;\n nl2 = nl2->next;\n }\n while (nl1)\n {\n current = malloc(sizeof(struct ListNode));\n current->val = (nl1->val + jw) % 10;\n jw = (nl1->val + jw) / 10;\n current->next = NULL;\n if (!prev)\n {\n prev = current;\n head = current;\n }\n else\n {\n prev->next = current;\n prev = current;\n }\n nl1 = nl1->next;\n }\n while (nl2)\n {\n current = malloc(sizeof(struct ListNode));\n current->val = (nl2->val + jw) % 10;\n jw = (nl2->val + jw) / 10;\n current->next = NULL;\n if (!prev)\n {\n prev = current;\n head = current;\n }\n else\n {\n prev->next = current;\n prev = current;\n }\n nl2 = nl2->next;\n }\n if (jw)\n {\n current = malloc(sizeof(struct ListNode));\n current->val = jw;\n current->next = NULL;\n if (!prev)\n {\n prev = current;\n head = current;\n }\n else\n {\n prev->next = current;\n prev = current;\n }\n }\n return reverseList(head);\n}\n" }, { "alpha_fraction": 0.3629629611968994, "alphanum_fraction": 0.3876543343067169, "avg_line_length": 18.285715103149414, "blob_id": "91d038a342913150197727be732fefb1b336b66d", "content_id": "dc4483ef2d212265ee31768e9986e9acef42273a", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "C", "length_bytes": 405, "license_type": "no_license", "max_line_length": 67, "num_lines": 21, "path": "/Arithmetic Slices.c", "repo_name": "colinsongf/LeetCode", "src_encoding": "UTF-8", "text": "int numberOfArithmeticSlices(int* array, int size) {\n if (size <= 2)\n {\n return 0;\n }\n int answer = 0;\n int adder = 0;\n for (int i = 2;i < size; ++i)\n {\n if (array[i] - array[i - 1] == array[i - 1] - array[i - 2])\n {\n adder += 1;\n answer += adder;\n }\n else\n {\n adder = 0;\n }\n }\n return answer;\n}\n" }, { "alpha_fraction": 0.3691037595272064, "alphanum_fraction": 0.37735849618911743, "avg_line_length": 23.941177368164062, "blob_id": "ad0cc8b81abd475a624da9ede302a8737fed4208", "content_id": "f9b4e8e2ce1468185b1904d1be42b135b78b1d91", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "C++", "length_bytes": 848, "license_type": "no_license", "max_line_length": 85, "num_lines": 34, "path": "/Longest Palindrome.cpp", "repo_name": "colinsongf/LeetCode", "src_encoding": "UTF-8", "text": "class Solution\n{\npublic:\n int longestPalindrome(string s) {\n map<char, int> counter;\n int answer = 0;\n for (string::iterator it = s.begin();it != s.end(); ++it)\n {\n char ch = *it;\n if (counter.find(ch) == counter.end())\n {\n counter[ch] = 1;\n }\n else\n {\n counter[ch] += 1;\n }\n }\n for (map<char, int>::iterator it = counter.begin();it != counter.end(); ++it)\n {\n answer += (it->second / 2) * 2;\n it->second %= 2;\n }\n for (map<char, int>::iterator it = counter.begin();it != counter.end(); ++it)\n {\n if (it->second)\n {\n answer += 1;\n break;\n }\n }\n return answer;\n }\n};\n" }, { "alpha_fraction": 0.4277857542037964, "alphanum_fraction": 0.4311334192752838, "avg_line_length": 21.9780216217041, "blob_id": "8f7333680f05f24d81100eff95afa09d2a6afed2", "content_id": "1d48d9517d2a718f50e8546f4c7b5959dab59a68", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "C++", "length_bytes": 4182, "license_type": "no_license", "max_line_length": 94, "num_lines": 182, "path": "/LFU Cache.cpp", "repo_name": "colinsongf/LeetCode", "src_encoding": "UTF-8", "text": "struct Item\n{\n Item(int _key, int _value): key(_key), value(_value), frequence(1), prev(NULL), next(NULL)\n {\n }\n int key;\n int value;\n int frequence;\n Item *prev;\n Item *next;\n};\n\nstruct LinkedList\n{\n LinkedList(): head(NULL), tail(NULL)\n {\n }\n\n void add_node(Item *newnode)\n {\n if (!head)\n {\n head = newnode;\n tail = newnode;\n }\n else\n {\n tail->next = newnode;\n newnode->prev = tail;\n tail = newnode;\n }\n }\n\n void rm_node(Item *targetnode)\n {\n Item *prev = targetnode->prev;\n Item *next = targetnode->next;\n if (prev)\n {\n prev->next = next;\n }\n else\n {\n head = next;\n }\n if (next)\n {\n next->prev = prev;\n }\n else\n {\n tail = prev;\n }\n }\n\n int pop_node()\n {\n if (!head)\n {\n return -1;\n }\n Item *answernode = head;\n Item *next = head->next;\n head = next;\n if (next)\n {\n next->prev = NULL;\n }\n else\n {\n tail = next;\n }\n return answernode->key;\n }\n\n bool empty()\n {\n return head == NULL;\n }\n\nprivate:\n Item *head;\n Item *tail;\n};\n\nclass LFUCache {\npublic:\n LFUCache(int _capacity): capacity(_capacity)\n {\n }\n \n int get(int key)\n {\n map<int, Item*>::iterator it = node_mapper.find(key);\n if (it == node_mapper.end())\n {\n return -1;\n }\n Item *node = it->second;\n int value = node->value;\n int frequence = node->frequence;\n map<int, LinkedList*>::iterator lit = list_mapper.find(frequence);\n LinkedList *list = lit->second;\n list->rm_node(node);\n if (list->empty())\n {\n list_mapper.erase(lit);\n delete list;\n }\n \n lit = list_mapper.find(frequence + 1);\n if (lit == list_mapper.end())\n {\n list_mapper[frequence + 1] = new LinkedList();\n }\n node->prev = node->next = NULL;\n node->frequence += 1;\n list_mapper[frequence + 1]->add_node(node);\n return value;\n }\n \n void set(int key, int value)\n {\n if (!capacity)\n {\n return ;\n }\n map<int, Item*>::iterator it = node_mapper.find(key);\n if (it != node_mapper.end())\n {\n Item *node = it->second;\n node->value = value;\n\n int frequence = node->frequence;\n map<int, LinkedList*>::iterator lit = list_mapper.find(frequence);\n LinkedList *list = lit->second;\n list->rm_node(node);\n if (list->empty())\n {\n list_mapper.erase(lit);\n delete list;\n }\n\n lit = list_mapper.find(frequence + 1);\n if (lit == list_mapper.end())\n {\n list_mapper[frequence + 1] = new LinkedList();\n }\n node->prev = node->next = NULL;\n node->frequence += 1;\n list_mapper[frequence + 1]->add_node(node);\n }\n else\n {\n Item *node = new Item(key, value);\n if (node_mapper.size() == capacity)\n {\n map<int, LinkedList*>::iterator lit = list_mapper.begin();\n LinkedList *list = lit->second;\n int rmkey = list->pop_node();\n if (list->empty())\n {\n list_mapper.erase(lit);\n delete list;\n }\n it = node_mapper.find(rmkey);\n delete it->second;\n node_mapper.erase(rmkey);\n }\n if (list_mapper.find(1) == list_mapper.end())\n {\n list_mapper[1] = new LinkedList();\n }\n list_mapper[1]->add_node(node);\n node_mapper[key] = node;\n }\n }\nprivate:\n int capacity;\n map<int, Item*> node_mapper;\n map<int, LinkedList*> list_mapper;\n};\n" }, { "alpha_fraction": 0.38964900374412537, "alphanum_fraction": 0.38964900374412537, "avg_line_length": 23.014286041259766, "blob_id": "5a908cb3eb723c9859c9464ef1365312e1c81aae", "content_id": "62f172cf3924aa8abcf79b80716580402306c46a", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "C++", "length_bytes": 1681, "license_type": "no_license", "max_line_length": 59, "num_lines": 70, "path": "/Delete Node in a BST.cpp", "repo_name": "colinsongf/LeetCode", "src_encoding": "UTF-8", "text": "/**\n * Definition for a binary tree node.\n * struct TreeNode {\n * int val;\n * TreeNode *left;\n * TreeNode *right;\n * TreeNode(int x) : val(x), left(NULL), right(NULL) {}\n * };\n */\nclass Solution {\npublic:\n TreeNode* deleteNode(TreeNode* root, int key) {\n TreeNode *parent = NULL;\n TreeNode *target = root;\n while (target && target->val != key)\n {\n parent = target;\n if (target->val >= key)\n {\n target = target->left;\n }\n else\n {\n target = target->right;\n }\n }\n if (!target)\n {\n return NULL;\n }\n TreeNode *right_left_most = target->right;\n if (!right_left_most)//no right subtree\n {\n if (!parent)\n {\n root = target->left;\n }\n else if (parent->val >= key)\n {\n parent->left = target->left;\n }\n else\n {\n parent->right = target->left;\n }\n }\n else//has right subtree\n {\n while (right_left_most->left)\n {\n right_left_most = right_left_most->left;\n }\n right_left_most->left = target->left;\n if (!parent)\n {\n root = target->right;\n }\n else if (parent->val >= key)\n {\n parent->left = target->right;\n }\n else\n {\n parent->right = target->right;\n }\n }\n delete target;\n return root;\n }\n};\n" }, { "alpha_fraction": 0.5581061840057373, "alphanum_fraction": 0.5638450384140015, "avg_line_length": 26.8799991607666, "blob_id": "563506ec9ecd3cd7a40ad30b2a454592d955a188", "content_id": "9af5f92b6d84d5393451ce3c737055e59ee64fcf", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "C++", "length_bytes": 697, "license_type": "no_license", "max_line_length": 92, "num_lines": 25, "path": "/Queue Reconstruction by Height.cpp", "repo_name": "colinsongf/LeetCode", "src_encoding": "UTF-8", "text": "bool cmp_by_1(const pair<int, int>& i, const pair<int, int>& j)\n{\n return i.first > j.first;\n}\n\nbool cmp_by_2(const pair<int, int>& i, const pair<int, int>& j)\n{\n return i.second < j.second;\n}\n\nclass Solution\n{\npublic:\n vector<pair<int, int>> reconstructQueue(vector<pair<int, int>>& people) {\n vector<pair<int, int> >answer;\n sort(people.begin(), people.end(), cmp_by_2);\n stable_sort(people.begin(), people.end(), cmp_by_1);\n for (vector<pair<int, int> >::iterator it = people.begin();it != people.end(); ++it)\n {\n pair<int, int> p = *it;\n answer.insert(answer.begin() + it->second, p);\n }\n return answer;\n }\n};\n" }, { "alpha_fraction": 0.5528090000152588, "alphanum_fraction": 0.5595505833625793, "avg_line_length": 30.785715103149414, "blob_id": "4182bcae8bdef1c3ee97663747d1dadb2ae70ba4", "content_id": "424109cb2ebca49b31315928fbdd4264d70b9511", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "C++", "length_bytes": 445, "license_type": "no_license", "max_line_length": 86, "num_lines": 14, "path": "/Count of Range Sum.cpp", "repo_name": "colinsongf/LeetCode", "src_encoding": "UTF-8", "text": "class Solution {\npublic:\n int countRangeSum(vector<int>& nums, int lower, int upper) {\n long long offset=0,subsum=0;\n multiset<long long> ms;\n for(int i=0;i<nums.size();i++){\n offset-=nums[i];\n ms.insert(nums[i]+offset);\n auto itlow=ms.lower_bound(lower+offset),itup=ms.upper_bound(upper+offset);\n subsum+=distance(itlow,itup);\n }\n return (int)(subsum);\n }\n};\n" }, { "alpha_fraction": 0.2653266191482544, "alphanum_fraction": 0.2859296500682831, "avg_line_length": 22.41176414489746, "blob_id": "5baf2304779ffdbdb4441bdd85c0fc251237e4d9", "content_id": "bd33d9c948b17979611a85c2ed827abb72ffb1dd", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "C++", "length_bytes": 1990, "license_type": "no_license", "max_line_length": 70, "num_lines": 85, "path": "/Third Maximum Number.cpp", "repo_name": "colinsongf/LeetCode", "src_encoding": "UTF-8", "text": "class Solution {\npublic:\n int thirdMax(vector<int>& nums0) {\n set<int> st;\n for (int i = 0;i < nums0.size(); ++i)\n {\n st.insert(nums0[i]);\n }\n vector<int> nums;\n for (set<int>::iterator it = st.begin(); it != st.end(); ++it)\n {\n nums.push_back(*it);\n }\n if (nums.size() == 1)\n {\n return nums[0];\n }\n if (nums.size() == 2)\n {\n return nums[0] > nums[1]? nums[0]: nums[1];\n }\n int big, mid, small;\n if (nums[0] >= nums[1] && nums[0] >= nums[2])\n {\n big = nums[0];\n if (nums[1] > nums[2])\n {\n mid = nums[1];\n small = nums[2];\n }\n else\n {\n mid = nums[2];\n small = nums[1];\n }\n }\n else if (nums[1] >= nums[0] && nums[1] >= nums[2])\n {\n big = nums[1];\n if (nums[0] > nums[2])\n {\n mid = nums[0];\n small = nums[2];\n }\n else\n {\n mid = nums[2];\n small = nums[0];\n }\n }\n else\n {\n big = nums[2];\n if (nums[0] > nums[1])\n {\n mid = nums[0];\n small = nums[1];\n }\n else\n {\n mid = nums[1];\n small = nums[0];\n }\n }\n for (int i = 3;i < nums.size(); ++i)\n {\n if (nums[i] >= big)\n {\n small = mid;\n mid = big;\n big = nums[i];\n }\n else if (nums[i] >= mid)\n {\n small = mid;\n mid = nums[i];\n }\n else if (nums[i] >= small)\n {\n small = nums[i];\n }\n }\n return small;\n }\n};\n" }, { "alpha_fraction": 0.29885056614875793, "alphanum_fraction": 0.31243470311164856, "avg_line_length": 24.864864349365234, "blob_id": "c97555bdf0a88c21628d882d3d15bfe22f67e562", "content_id": "ce177d12dff11241ab7571b06345c54c5cf8c8ff", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "C++", "length_bytes": 957, "license_type": "no_license", "max_line_length": 57, "num_lines": 37, "path": "/Wiggle Subsequence.cpp", "repo_name": "colinsongf/LeetCode", "src_encoding": "UTF-8", "text": "class Solution {\npublic:\n int wiggleMaxLength(vector<int>& nums) {\n if (!nums.size())\n {\n return 0;\n }\n vector<int> up(nums.size());\n vector<int> down(nums.size());\n //init\n up[0] = down[0] = 1;\n //dynamic\n for (int i = 1;i < nums.size(); ++i)\n {\n int num = nums[i];\n //assign for up[i]\n up[i] = up[i - 1];\n for (int j = 0;j < i; ++j)\n {\n if (nums[j] < num && down[j] + 1 > up[i])\n {\n up[i] = down[j] + 1;\n }\n }\n //assign for down[i]\n down[i] = down[i - 1];\n for (int j = 0;j < i; ++j)\n {\n if (nums[j] > num && up[j] + 1 > up[i])\n {\n down[i] = up[j] + 1;\n }\n }\n }\n return max(up.back(), down.back());\n }\n};\n" }, { "alpha_fraction": 0.5315656661987305, "alphanum_fraction": 0.5391414165496826, "avg_line_length": 27.285715103149414, "blob_id": "c8a8a14ed7371f7f33f448ee703a438eb00481ba", "content_id": "ac259d7ad5dd0ae9faa6eff1369534256d76b75e", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 792, "license_type": "no_license", "max_line_length": 47, "num_lines": 28, "path": "/Repeated Substring Pattern.py", "repo_name": "colinsongf/LeetCode", "src_encoding": "UTF-8", "text": "def getMayBeSubstring(string):\n if len(string) <= 1:\n return False\n str_start = string[0]\n str_end = string[-1]\n\n start_choose = []\n end_choose = []\n\n for i in range(len(string) / 2):\n if string[i] == str_start:\n start_choose.append(i)\n if string[i] == str_end:\n end_choose.append(i)\n for start in start_choose:\n for end in end_choose:\n if start > end:\n continue\n sub_size = end - start + 1\n sub_str = string[start: end + 1]\n counter = len(string) / sub_size\n if sub_str * counter == string:\n return True\n return False\n\nclass Solution(object):\n def repeatedSubstringPattern(self, string):\n return getMayBeSubstring(string)\n" }, { "alpha_fraction": 0.4795321524143219, "alphanum_fraction": 0.483208030462265, "avg_line_length": 21.931034088134766, "blob_id": "1b5832f68d04a22880273b3980209c161cbe1d18", "content_id": "0100d9b54a8602ada098bc3858dfbec5477f222e", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "C++", "length_bytes": 6067, "license_type": "no_license", "max_line_length": 99, "num_lines": 261, "path": "/All O`one Data Structure.cpp", "repo_name": "colinsongf/LeetCode", "src_encoding": "UTF-8", "text": "struct TwoWayListNode {\n TwoWayListNode(int _count): prev(NULL), next(NULL), count(_count)\n {\n }\n\n void addMember(const string& member)\n {\n members.insert(member);\n }\n\n void rmMember(const string& member)\n {\n set<string>::iterator it = members.find(member);\n if (it != members.end())\n {\n members.erase(it);\n }\n }\n\n bool hasMember() const\n {\n return members.empty() == false;\n }\n\n string getOneMember() const\n {\n set<string>::iterator it = members.begin();\n return *it;\n }\n\n const int count;\n set<string> members;\n TwoWayListNode* prev;\n TwoWayListNode* next;\n};\n\nstruct TwoWayList {\n TwoWayList(): head(NULL), tail(NULL)\n {\n }\n\n void addNodeInHead(TwoWayListNode* twln)\n {\n if (head)\n {\n head->prev = twln;\n }\n twln->next = head;\n head = twln;\n if (!tail)\n {\n tail = twln;\n }\n }\n\n void addNodeInMiddle(TwoWayListNode* twln)\n {\n\n }\n\n void addNodeBeforeYou(TwoWayListNode* target, TwoWayListNode* oldnode)\n {\n TwoWayListNode* prev = oldnode->prev;\n //prev => target => oldnode\n target->prev = prev;\n target->next = oldnode;\n oldnode->prev = target;\n if (prev)\n {\n prev->next = target;\n }\n else\n {\n head = target;\n }\n }\n\n void addNodeAfterYou(TwoWayListNode* target, TwoWayListNode* oldnode)\n {\n TwoWayListNode* next = oldnode->next;\n //oldnode => target => next\n target->prev = oldnode;\n target->next = next;\n oldnode->next = target;\n if (next)\n {\n next->prev = target;\n }\n else\n {\n tail = target;\n }\n }\n\n void deleteNode(TwoWayListNode* target)\n {\n TwoWayListNode* prev = target->prev;\n TwoWayListNode* next = target->next;\n if (prev)\n {\n prev->next = next;\n }\n else\n {\n head = next;\n }\n if (next)\n {\n next->prev = prev;\n }\n else\n {\n tail = prev;\n }\n }\n\n void incKey(const string& key, int oldvalue)\n {\n if (indexmap.find(oldvalue + 1) == indexmap.end())\n {\n indexmap[oldvalue + 1] = new TwoWayListNode(oldvalue + 1);\n TwoWayListNode* newnode = indexmap[oldvalue + 1];\n newnode->addMember(key);\n if (oldvalue)\n {\n TwoWayListNode* oldnode = indexmap[oldvalue];\n addNodeAfterYou(newnode, oldnode);\n }\n else\n {\n addNodeInHead(newnode);\n }\n }\n else\n {\n TwoWayListNode* newnode = indexmap[oldvalue + 1];\n newnode->addMember(key);\n }\n\n if (oldvalue)\n {\n TwoWayListNode* oldnode = indexmap[oldvalue];\n oldnode->rmMember(key);\n if (!oldnode->hasMember())\n {\n deleteNode(oldnode);\n indexmap.erase(oldvalue);\n }\n }\n }\n\n void decKey(const string& key, int oldvalue)\n {\n TwoWayListNode* oldnode = indexmap[oldvalue];\n oldnode->rmMember(key);\n\n if (oldvalue > 1)\n {\n if (indexmap.find(oldvalue - 1) == indexmap.end())\n {\n indexmap[oldvalue - 1] = new TwoWayListNode(oldvalue - 1);\n TwoWayListNode* newnode = indexmap[oldvalue - 1];\n newnode->addMember(key);\n \n addNodeBeforeYou(newnode, oldnode);\n }\n else\n {\n TwoWayListNode* newnode = indexmap[oldvalue - 1];\n newnode->addMember(key);\n }\n }\n\n if (!oldnode->hasMember())\n {\n deleteNode(oldnode);\n indexmap.erase(oldvalue);\n }\n\n }\n\n string getOneHeadStr() const\n {\n if (!head)\n {\n return \"\";\n }\n return head->getOneMember();\n }\n\n string getOneTailStr() const\n {\n if (!tail)\n {\n return \"\";\n }\n return tail->getOneMember();\n }\n\n TwoWayListNode* head;\n TwoWayListNode* tail;\n map<int, TwoWayListNode*> indexmap;\n};\n\nclass AllOne {\npublic:\n /** Initialize your data structure here. */\n AllOne() {\n }\n \n /** Inserts a new key <Key> with value 1. Or increments an existing key by 1. */\n void inc(string key) {\n map<string, int>::iterator it = countmap.find(key);\n if (it == countmap.end())\n {\n //insert a new key\n countmap[key] = 1;\n dlist.incKey(key, 0);\n }\n else\n {\n //inc a old key\n int oldvalue = countmap[key];\n int newvalue = oldvalue + 1;\n countmap[key] = newvalue;\n dlist.incKey(key, oldvalue);\n }\n }\n \n /** Decrements an existing key by 1. If Key's value is 1, remove it from the data structure. */\n void dec(string key) {\n map<string, int>::iterator it = countmap.find(key);\n if (it != countmap.end())\n {\n int oldvalue = countmap[key];\n int newvalue = oldvalue - 1;\n countmap[key] = newvalue;\n if (!newvalue)\n {\n //need delete node\n countmap.erase(it);\n }\n dlist.decKey(key, oldvalue);\n }\n }\n \n /** Returns one of the keys with maximal value. */\n string getMaxKey() {\n return dlist.getOneTailStr();\n }\n \n /** Returns one of the keys with Minimal value. */\n string getMinKey() {\n return dlist.getOneHeadStr();\n }\n\nprivate:\n map<string, int> countmap;\n TwoWayList dlist;\n};\n//因为每个key值变化不是+1就是-1,故每次值变化就进行类似插入排序的操作就很适合,时间基本上是O(1)\n" }, { "alpha_fraction": 0.47791165113449097, "alphanum_fraction": 0.48393574357032776, "avg_line_length": 30.125, "blob_id": "91575b4a7a61e46613130e9288121ed107e057be", "content_id": "fae6b7c909e5074e086e4ee74425d12375d66bb2", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "C", "length_bytes": 498, "license_type": "no_license", "max_line_length": 98, "num_lines": 16, "path": "/Minimum Moves to Equal Array Elements.c", "repo_name": "colinsongf/LeetCode", "src_encoding": "UTF-8", "text": "int minMoves(int* nums, int numsSize) {\n //finally, every num in nums = cell\n //cell * numsSize = sum + (cell - min) * (numsSize - 1) ---> cell = sum - numsSize * min + min\n //step = cell - min = sum - numsSize * min + min - min = sum - numsSize * min\n int i, sum = 0, min = INT_MAX, step;\n for (i = 0;i < numsSize; ++i)\n {\n if (nums[i] < min)\n {\n min = nums[i];\n }\n sum += nums[i];\n }\n step = sum - numsSize * min;\n return step;\n}\n" }, { "alpha_fraction": 0.508571445941925, "alphanum_fraction": 0.5180952548980713, "avg_line_length": 29.882352828979492, "blob_id": "97685ac87aee3e7c9e77374fa1ac94b5bef5a7ac", "content_id": "bf5511c3ca9a9145276f21ef9d86106f59d0b1c9", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 1050, "license_type": "no_license", "max_line_length": 62, "num_lines": 34, "path": "/Find Right Interval.py", "repo_name": "colinsongf/LeetCode", "src_encoding": "UTF-8", "text": "# Definition for an interval.\n# class Interval(object):\n# def __init__(self, s=0, e=0):\n# self.start = s\n# self.end = e\n\ndef minRight(end, intervals):\n head, rear = 0, len(intervals)\n while head < rear:\n mid = (head + rear) >> 1\n if intervals[mid].start >= end:\n if mid == head:\n return mid\n if intervals[mid - 1].start >= end:\n rear = mid\n else:\n return mid\n else:\n head = mid + 1\n return -1\n\nclass Solution(object):\n def findRightInterval(self, intervals):\n mydict = {}\n for i, interval in enumerate(intervals):\n mydict[interval] = i\n intervals.sort(key = lambda x: x.start)\n answer = [-1 for i in range(len(intervals))]\n for interval in intervals:\n i = minRight(interval.end, intervals)\n myanswer = -1 if i == -1 else mydict[intervals[i]]\n my_pos = mydict[interval]\n answer[my_pos] = myanswer\n return answer\n" }, { "alpha_fraction": 0.4198606312274933, "alphanum_fraction": 0.429006963968277, "avg_line_length": 32.51824951171875, "blob_id": "f75de1dae81a66243cadc55c07ab260d1275f7a7", "content_id": "a626745ae75be7327168ada4ca5ace5282c4a094", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 4592, "license_type": "no_license", "max_line_length": 71, "num_lines": 137, "path": "/Reconstruct Original Digits from English.py", "repo_name": "colinsongf/LeetCode", "src_encoding": "UTF-8", "text": "class Solution(object):\n def originalDigits(self, s):\n counter = {}\n answer_counter = {}\n for ch in s:\n counter[ch] = 1 if ch not in counter else (counter[ch] + 1)\n answer = ''\n #handle 0: z is unique\n zero_cnt = counter['z'] if 'z' in counter else 0\n answer_counter[0] = zero_cnt\n if zero_cnt:\n answer_counter[0] = zero_cnt\n del counter['z']\n counter['e'] -= zero_cnt\n if not counter['e']:\n del counter['e']\n counter['r'] -= zero_cnt\n if not counter['r']:\n del counter['r']\n counter['o'] -= zero_cnt\n if not counter['o']:\n del counter['o']\n #handle 2\n two_cnt = counter['w'] if 'w' in counter else 0\n answer_counter[2] = two_cnt\n if two_cnt:\n del counter['w']\n counter['t'] -= two_cnt\n if not counter['t']:\n del counter['t']\n counter['o'] -= two_cnt\n if not counter['o']:\n del counter['o']\n #handle 4\n four_cnt = counter['u'] if 'u' in counter else 0\n answer_counter[4] = four_cnt\n if four_cnt:\n del counter['u']\n counter['f'] -= four_cnt\n if not counter['f']:\n del counter['f']\n counter['o'] -= four_cnt\n if not counter['o']:\n del counter['o']\n counter['r'] -= four_cnt\n if not counter['r']:\n del counter['r']\n #handle 6\n six_cnt = counter['x'] if 'x' in counter else 0\n answer_counter[6] = six_cnt\n if six_cnt:\n del counter['x']\n counter['s'] -= six_cnt\n if not counter['s']:\n del counter['s']\n counter['i'] -= six_cnt\n if not counter['i']:\n del counter['i']\n #handle 8\n eight_cnt = counter['g'] if 'g' in counter else 0\n answer_counter[8] = eight_cnt\n if eight_cnt:\n del counter['g']\n counter['e'] -= eight_cnt\n if not counter['e']:\n del counter['e']\n counter['i'] -= eight_cnt\n if not counter['i']:\n del counter['i']\n counter['h'] -= eight_cnt\n if not counter['h']:\n del counter['h']\n counter['t'] -= eight_cnt\n if not counter['t']:\n del counter['t']\n #handle 1, 3, 5, 7, 9\n #handle 1\n one_cnt = counter['o'] if 'o' in counter else 0\n answer_counter[1] = one_cnt\n if one_cnt:\n del counter['o']\n counter['n'] -= one_cnt\n if not counter['n']:\n del counter['n']\n counter['e'] -= one_cnt\n if not counter['e']:\n del counter['e']\n #handle 3\n three_cnt = counter['h'] if 'h' in counter else 0\n answer_counter[3] = three_cnt\n if three_cnt:\n del counter['h']\n counter['t'] -= three_cnt\n if not counter['t']:\n del counter['t']\n counter['e'] -= (three_cnt * 2)\n if not counter['e']:\n del counter['e']\n counter['r'] -= three_cnt\n if not counter['r']:\n del counter['r']\n #handle 5\n five_cnt = counter['f'] if 'f' in counter else 0\n answer_counter[5] = five_cnt\n if five_cnt:\n del counter['f']\n counter['i'] -= five_cnt\n if not counter['i']:\n del counter['i']\n counter['e'] -= five_cnt\n if not counter['e']:\n del counter['e']\n counter['v'] -= five_cnt\n if not counter['v']:\n del counter['v']\n #handle 7\n sve_cnt = counter['s'] if 's' in counter else 0\n answer_counter[7] = sve_cnt\n if sve_cnt:\n del counter['s']\n counter['v'] -= sve_cnt\n if not counter['v']:\n del counter['v']\n counter['e'] -= (sve_cnt * 2)\n if not counter['e']:\n del counter['e']\n counter['n'] -= sve_cnt\n if not counter['n']:\n del counter['n']\n #handle 9\n nine_cnt = counter['i'] if 'i' in counter else 0\n answer_counter[9] = nine_cnt\n\n for i in range(10):\n if answer_counter[i]:\n answer += (answer_counter[i] * str(i))\n return answer\n" }, { "alpha_fraction": 0.5185929536819458, "alphanum_fraction": 0.5336683392524719, "avg_line_length": 31.09677505493164, "blob_id": "7dda620597501c1ecd9d7b42c419dc4989cc7ad7", "content_id": "2adffaf0282861ca4a1f0ab5a053f5ae8bce2a3a", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 995, "license_type": "no_license", "max_line_length": 88, "num_lines": 31, "path": "/Minimum Genetic Mutation.py", "repo_name": "colinsongf/LeetCode", "src_encoding": "UTF-8", "text": "class SearchNode:\n def __init__(self, vertex, parent = None):\n self.vertex = vertex\n self.parent = parent\n self.height = 0 if not parent else (parent.height + 1)\n\ndef diff(seq1, seq2):\n diff_arrary = []\n i = 0\n for s1, s2 in zip(seq1, seq2):\n if s1 != s2:\n diff_arrary.append(i)\n i += 1\n return diff_arrary\n\nclass Solution(object):\n def minMutation(self, start, end, bank):\n bank = set(bank)\n current = SearchNode(start)\n queue = [current]\n while queue:\n current = queue.pop(0)\n diff_arrary = diff(current.vertex, end)\n if not diff_arrary:\n return current.height\n for pos in diff_arrary:\n next_vertex = current.vertex[:pos] + end[pos] + current.vertex[pos + 1:]\n if next_vertex in bank:\n next_node = SearchNode(next_vertex, current)\n queue.append(next_node)\n return -1\n" }, { "alpha_fraction": 0.311049222946167, "alphanum_fraction": 0.3862581253051758, "avg_line_length": 38.88888931274414, "blob_id": "f7ce5f78c439049ea132a7663ebfdf36a6d7d275", "content_id": "0c9f5561026c022ba94e1723cc1c4b8b68749b16", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 1077, "license_type": "no_license", "max_line_length": 160, "num_lines": 27, "path": "/Convert a Number to Hexadecimal.py", "repo_name": "colinsongf/LeetCode", "src_encoding": "UTF-8", "text": "class Solution(object):\n def positiveToHex(self, num):\n string = ''\n if num == 0:\n return '0'\n hexHash = {0: '0', 1: '1', 2: '2', 3: '3', 4: '4', 5: '5', 6: '6', 7: '7', 8: '8', 9: '9', 10: 'a', 11: 'b', 12: 'c', 13: 'd', 14: 'e', 15: 'f'}\n while num:\n string = hexHash[num % 16] + string\n num /= 16\n return string\n def toHex(self, num):\n if num < 0:\n num = 2**31 + num\n string = self.positiveToHex(num)\n if len(string) == 8:\n first = string[0]\n string = string[1:]\n head = int(first) + 8\n hexHash = {0: '0', 1: '1', 2: '2', 3: '3', 4: '4', 5: '5', 6: '6', 7: '7', 8: '8', 9: '9', 10: 'a', 11: 'b', 12: 'c', 13: 'd', 14: 'e', 15: 'f'}\n string = hexHash[head] + string\n else:\n while len(string) < 7:\n string = '0' + string\n string = '8' + string\n return string\n else:\n return self.positiveToHex(num)\n" }, { "alpha_fraction": 0.5670102834701538, "alphanum_fraction": 0.6159793734550476, "avg_line_length": 26.214284896850586, "blob_id": "c055258b41622698d7cacdaa86b75494346fcd3b", "content_id": "34a43304a3be0ad2f526b96a8ac6cd24cc42f22d", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 388, "license_type": "no_license", "max_line_length": 45, "num_lines": 14, "path": "/Plus One.py", "repo_name": "colinsongf/LeetCode", "src_encoding": "UTF-8", "text": "class Solution:\n # @param digits, a list of integer digits\n # @return a list of integer digits\n def plusOne(self, digits):\n\t\tsize=len(digits)\n\t\tnextbit = (digits[-1]+1)/10\n\t\tdigits[-1] = (digits[-1]+1)%10\n\t\tfor i in range(size-2,-1,-1):\n\t\t\tsub = digits[i]+nextbit\n\t\t\tdigits[i] = sub%10\n\t\t\tnextbit = sub/10\n\t\tif nextbit:\n\t\t\tdigits[0:1]=[nextbit,digits[0]]\n\t\treturn digits \n" }, { "alpha_fraction": 0.4894157350063324, "alphanum_fraction": 0.5038103461265564, "avg_line_length": 25.840909957885742, "blob_id": "3f374bb10d143d2d02c367d3a501d357cc007bd4", "content_id": "2f257accbddeaca906e0d074079c0ed2b3214933", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "C", "length_bytes": 1181, "license_type": "no_license", "max_line_length": 136, "num_lines": 44, "path": "/Wiggle Sort II.c", "repo_name": "colinsongf/LeetCode", "src_encoding": "UTF-8", "text": "void swap(int* p, int* q)\n{\n int t=*p; *p=*q; *q=t;\n}\n\nvoid sort(int* nums, int begin, int end)\n{\n int l=begin, r=end;\n int v = nums[l+(r-l)/2];\n while(l <= r)\n {\n while(nums[l] < v) l++;\n while(nums[r] > v) r--;\n if(l <= r)\n {\n swap(nums+l, nums+r);\n l++; r--;\n }\n }\n if(begin < r)\n sort(nums, begin, r);\n if(l < end)\n sort(nums, l, end);\n}\n\n//AC - 40ms;\nvoid wiggleSort(int* nums, int size)\n{\n sort(nums, 0, size-1); //using quick sort to sort the array first;\n int *arr = (int*)malloc(sizeof(int)*size);\n for(int i = 0; i < size; i++)\n arr[i] = nums[i];\n int small= 0; //the first of smallers;\n int big = (size-1)/2+1; //the first of biggers;\n int index = size-1; //start to fill in reverse direction: from right to left;\n if(size%2 == 0) //if the size is even then the last should be indexed by odd size-1, so place the bigger one in odd position size-1;\n nums[index--] = arr[big++];\n while(index > -1)\n {\n nums[index--] = arr[small++];\n if(index > -1) //in case of \"underflow\";\n nums[index--] = arr[big++];\n }\n}\n" }, { "alpha_fraction": 0.3413597643375397, "alphanum_fraction": 0.37110480666160583, "avg_line_length": 31.090909957885742, "blob_id": "0c047bba7043973c7971641a4c36a2f2614641bd", "content_id": "f8f7111e9a7ad3c8f565a6f109c04d7bb082db1c", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 706, "license_type": "no_license", "max_line_length": 50, "num_lines": 22, "path": "/Lexicographical Numbers.py", "repo_name": "colinsongf/LeetCode", "src_encoding": "UTF-8", "text": "class Solution(object):\n def lexicalOrder(self, n):\n answer = []\n if n < 1:\n return answer\n answer = [1]\n while len(answer) < n:\n number = answer[-1]\n if number * 10 <= n:\n answer.append(number * 10)\n else:\n if number % 10 == 9:\n next = number + 1\n while next % 10 == 0:\n next /= 10\n if next <= n:\n answer.append(next)\n elif number + 1 <= n:\n answer.append(number + 1)\n else:\n answer.append(number / 10 + 1)\n return answer\n" }, { "alpha_fraction": 0.4883945882320404, "alphanum_fraction": 0.5077369213104248, "avg_line_length": 26.945945739746094, "blob_id": "10eb391d49e1017fd0ebc699718c7ddb3a326329", "content_id": "053b5dbb49594eb5e51bec0eac69675b53c23b0e", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 2156, "license_type": "no_license", "max_line_length": 64, "num_lines": 74, "path": "/Maximum XOR of Two Numbers in an Array.py", "repo_name": "colinsongf/LeetCode", "src_encoding": "UTF-8", "text": "#Define Trie Start\nclass TrieNode:\n def __init__(self):\n self.left = None\n self.right = None\n def insert(self, ch):\n if ch == '0':\n if not self.left:\n self.left = TrieNode()\n return self.left\n else:\n if not self.right:\n self.right = TrieNode()\n return self.right\n def getXOR(self, step, newval):\n if step == 31:\n return ''\n if not self.left and not self.right:\n return '0'\n if newval[step] == '0':\n if self.right:\n return '1' + self.right.getXOR(step + 1, newval)\n else:\n return '0' + self.left.getXOR(step + 1, newval)\n else:\n if self.left:\n return '1' + self.left.getXOR(step + 1, newval)\n else:\n return '0' + self.right.getXOR(step + 1, newval)\n\nclass Trie:\n def __init__(self):\n self.root = TrieNode()\n def insertNode(self, newval):\n current = self.root\n for ch in newval:\n current = current.insert(ch)\n def getXOR(self, newval):\n bit_xor = self.root.getXOR(0, newval)\n return bit_xor\n\n#Define Trie End\n\ndef num2Bit(number):\n bit = ''\n while number:\n bit = str(number % 2) + bit\n number /= 2\n return '0' * (31 - len(bit)) + bit\n\ndef bit2Num(bit):\n num = 0\n for i, ch in enumerate(bit[::-1]):\n if ch == '1':\n num += (2 ** i)\n return num\n\nclass Solution:\n def findMaximumXOR(self, nums):\n bitarray = [num2Bit(num) for num in nums]\n trie = Trie()\n for bit in bitarray:\n trie.insertNode(bit)\n max_xor = 0\n for bit in bitarray:\n my_bit_xor = trie.getXOR(bit)\n my_xor = bit2Num(my_bit_xor)\n print my_xor\n if my_xor > max_xor:\n max_xor = my_xor\n return max_xor\n#先按照31位二进制为每个数字插入到Trie树,再用每个数的31位二进制在Trie树中按反序搜索,得到每个数的最大XOR\nins = Solution()\nprint ins.findMaximumXOR([3, 10, 5, 25, 2, 8])\n" }, { "alpha_fraction": 0.4698318541049957, "alphanum_fraction": 0.48367953300476074, "avg_line_length": 32.70000076293945, "blob_id": "d0f13a15c1cc860e37073e624caa539413ff8d59", "content_id": "de50f9699b0565ea8a7b32f2d25e8fdb2101e84c", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 1011, "license_type": "no_license", "max_line_length": 68, "num_lines": 30, "path": "/Binary Watch.py", "repo_name": "colinsongf/LeetCode", "src_encoding": "UTF-8", "text": "class Solution(object):\n def bitCount(self, number):\n counter = 0\n while number:\n number &= (number - 1)\n counter += 1\n return counter\n def __init__(self):\n self.hash = {}\n for number in range(60):\n bitcount = self.bitCount(number)\n if bitcount not in self.hash:\n self.hash[bitcount] = []\n self.hash[bitcount].append(number)\n\n def readBinaryWatch(self, num):\n final = []\n for hour in range(num + 1):\n minu = num - hour\n if hour not in self.hash or minu not in self.hash:\n continue\n hourSelect = [h for h in self.hash[hour] if h < 12]\n minSelect = self.hash[minu]\n answer = [(h, m) for h in hourSelect for m in minSelect]\n\n for t in answer:\n hs = str(t[0]) + ':'\n ms = str(t[1]) if t[1] > 9 else ('0' + str(t[1]))\n final.append(hs + ms)\n return final\n" }, { "alpha_fraction": 0.6451612710952759, "alphanum_fraction": 0.6653226017951965, "avg_line_length": 26.55555534362793, "blob_id": "833afc390b49a6d75d68bcf1a32cd99a09ca6e45", "content_id": "ca1fe6ffa3db745b1e515a17ece1e5943188d58f", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 248, "license_type": "no_license", "max_line_length": 38, "num_lines": 9, "path": "/Best Time to Buy and Sell Stock II.py", "repo_name": "colinsongf/LeetCode", "src_encoding": "UTF-8", "text": "class Solution:\n # @param prices, a list of integer\n # @return an integer\n\tdef maxProfit(self, prices):\n\t\tsummary,size=0,len(prices)\n\t\tfor i in range(size-1):\n\t\t\tif prices[i+1]-prices[i]>0:\n\t\t\t\tsummary+=prices[i+1]-prices[i]\n\t\treturn summary\n" }, { "alpha_fraction": 0.43525180220603943, "alphanum_fraction": 0.4460431635379791, "avg_line_length": 26.799999237060547, "blob_id": "1c32ef8a69065f8ef5f9d20d8519985436fa58a1", "content_id": "d9c7f30baa9482e0c826d218b2f8fe315ffc84a5", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 278, "license_type": "no_license", "max_line_length": 39, "num_lines": 10, "path": "/Find All Duplicates in an Array.py", "repo_name": "colinsongf/LeetCode", "src_encoding": "UTF-8", "text": "class Solution(object):\n def findDuplicates(self, nums):\n answer = []\n for num in nums:\n pos = abs(num) - 1\n if nums[pos] > 0:\n nums[pos] *= -1\n else:\n answer.append(abs(num))\n return answer\n" }, { "alpha_fraction": 0.4265175759792328, "alphanum_fraction": 0.43769967555999756, "avg_line_length": 19.19354820251465, "blob_id": "45e8f7655f16db4efe4ddf1ed887dc569c160526", "content_id": "226ebeb9cee75fb957f7047216844f861bd5a45c", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "C", "length_bytes": 626, "license_type": "no_license", "max_line_length": 52, "num_lines": 31, "path": "/Rotate Function.c", "repo_name": "colinsongf/LeetCode", "src_encoding": "UTF-8", "text": "int calc_diff(int* array, int size, int sp, int sum)\n{\n return sum - array[sp] - (size - 1) * array[sp];\n}\n\nint maxRotateFunction(int* array, int size)\n{\n int sum = 0, i;\n for (i = 0;i < size; ++i)\n {\n sum += *(array + i);\n }\n int max_r = 0;\n for (i = 0;i < size; ++i)\n {\n max_r += i * (*(array + i));\n }\n int sp = size - 1;\n int prev_value = max_r;\n while (sp > 0)\n {\n int diff = calc_diff(array, size, sp, sum);\n prev_value += diff;\n if (prev_value > max_r)\n {\n max_r = prev_value;\n }\n --sp;\n }\n return max_r;\n}\n" }, { "alpha_fraction": 0.39266616106033325, "alphanum_fraction": 0.39724981784820557, "avg_line_length": 24.66666603088379, "blob_id": "861e9c44e375e8239640b9b061caa23d5efd7377", "content_id": "2fd6752f1aa58ae18254b4b2af306970cb0ead8b", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "C++", "length_bytes": 1309, "license_type": "no_license", "max_line_length": 90, "num_lines": 51, "path": "/Battleships in a Board.cpp", "repo_name": "colinsongf/LeetCode", "src_encoding": "UTF-8", "text": "class Solution {\npublic:\n void oneship(map<int, set<int> >& visited, int i, int j, vector<vector<char> >& board)\n {\n int v = i;\n while (v < board.size() && board[v][j] == 'X')\n {\n visited[v].insert(j);\n ++v;\n }\n int h = j;\n while (h < board[i].size() && board[i][h] == 'X')\n {\n visited[i].insert(h);\n ++h;\n }\n }\n\n bool isVisited(map<int, set<int> >& visited, int i, int j)\n {\n map<int, set<int> >::iterator it = visited.find(i);\n if (it == visited.end())\n {\n return false;\n }\n set<int>::iterator sit = it->second.find(j);\n return sit != it->second.end();\n }\n\n int countBattleships(vector<vector<char> >& board)\n {\n map<int, set<int> > visited;\n int answer = 0;\n if (!board.size() && !board[0].size())\n {\n return 0;\n }\n for (int i = 0;i < board.size(); ++i)\n {\n for (int j = 0;j < board[i].size(); ++j)\n {\n if (board[i][j] == 'X' && !isVisited(visited, i, j))\n {\n oneship(visited, i, j, board);\n answer += 1;\n }\n }\n }\n return answer;\n }\n};\n" }, { "alpha_fraction": 0.39774152636528015, "alphanum_fraction": 0.4127979874610901, "avg_line_length": 20.54054069519043, "blob_id": "f16e2fe8a1a6c5e9beac00aeb6e205fbe9c46f89", "content_id": "1d31e223e57e5a2831d675932b7bd0bee1754950", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "C++", "length_bytes": 797, "license_type": "no_license", "max_line_length": 55, "num_lines": 37, "path": "/Fizz Buzz.cpp", "repo_name": "colinsongf/LeetCode", "src_encoding": "UTF-8", "text": "#include <sstream>\n\ntemplate<class T>\nstd::string convert2String(const T& val)\n{\n ostringstream convert;\n convert << val;\n std::string result = convert.str();\n return result;\n}\n\nclass Solution {\npublic:\n vector<string> fizzBuzz(int n) {\n vector<string> answer;\n for (int i = 1;i < n + 1; ++i)\n {\n if (i % 3 == 0 && i % 5 == 0)\n {\n answer.push_back(\"FizzBuzz\");\n }\n else if (i % 3 == 0)\n {\n answer.push_back(\"Fizz\");\n }\n else if (i % 5 == 0)\n {\n answer.push_back(\"Buzz\");\n }\n else\n {\n answer.push_back(convert2String(i)); \n }\n }\n return answer;\n }\n};\n" }, { "alpha_fraction": 0.5278970003128052, "alphanum_fraction": 0.540772557258606, "avg_line_length": 20.18181800842285, "blob_id": "bee14ffa8747e4a958cf602466abe460435ec3ca", "content_id": "7ec796eaac7c87b91553672601f7c08d664a3c87", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "C", "length_bytes": 466, "license_type": "no_license", "max_line_length": 85, "num_lines": 22, "path": "/Sum of Left Leaves.c", "repo_name": "colinsongf/LeetCode", "src_encoding": "UTF-8", "text": "int sumOfLeftLeaves0(struct TreeNode* root, bool isLeftChild) {\n if (!root)\n {\n return 0;\n }\n if (!root->left && !root->right)\n {\n if (isLeftChild)\n {\n return root->val;\n }\n else\n {\n return 0;\n }\n }\n return sumOfLeftLeaves0(root->left, true) + sumOfLeftLeaves0(root->right, false);\n}\n\nint sumOfLeftLeaves(struct TreeNode* root) {\n return sumOfLeftLeaves0(root, false);\n}\n" }, { "alpha_fraction": 0.41153082251548767, "alphanum_fraction": 0.4254473149776459, "avg_line_length": 28.58823585510254, "blob_id": "799381243b65714e5c3bd9158cf6db75c7624b02", "content_id": "8b5eec782f2382d3737bc7674eff83d60b9110c4", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 503, "license_type": "no_license", "max_line_length": 58, "num_lines": 17, "path": "/Assign Cookies.py", "repo_name": "colinsongf/LeetCode", "src_encoding": "UTF-8", "text": "class Solution(object):\n def findContentChildren(self, g, s):\n counter = 0\n g.sort()\n s.sort()\n g_ptr, s_ptr = 0, 0\n g_size, s_size = len(g), len(s)\n while g_ptr < g_size and s_ptr < s_size:\n current_g = g[g_ptr]\n while s_ptr < s_size and s[s_ptr] < current_g:\n s_ptr += 1\n if s_ptr == s_size:\n break\n counter += 1\n g_ptr += 1\n s_ptr += 1\n return counter\n" }, { "alpha_fraction": 0.6125574111938477, "alphanum_fraction": 0.6355283260345459, "avg_line_length": 23.185184478759766, "blob_id": "9a129d836efca633b986f94bd7a32c61910bfbc3", "content_id": "e8751032a5d188c9696262ea584df8618705550f", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 653, "license_type": "no_license", "max_line_length": 61, "num_lines": 27, "path": "/Combinations.py", "repo_name": "colinsongf/LeetCode", "src_encoding": "UTF-8", "text": "class Solution(object):\n\tdef getAnswer(self,answer):\n\t\tfinalAns=[]\n\t\tfor i,ans in enumerate(answer):\n\t\t\tif ans:\n\t\t\t\tfinalAns.append(i+1)\n\t\treturn finalAns\n\tdef combine(self,n,k):\n\t\tanswerList = []\n\t\tanswer,record = [0 for x in range(n)],[0 for x in range(n)]\n\t\tstep=0\n\t\trecord[step]=-1\n\t\twhile step>=0:\n\t\t\tsummary = sum(answer[:step])\n\t\t\trecord[step]+=1\n\t\t\twhile record[step]<2 and summary>k:\n\t\t\t\trecord[step]+=1\n\t\t\tif record[step]==2:\n\t\t\t\tstep-=1\n\t\t\telse:\n\t\t\t\tanswer[step]=record[step]\n\t\t\t\tif step==n-1 and summary+record[step]==k:\n\t\t\t\t\tanswerList.append(self.getAnswer(answer))\n\t\t\t\telif step<n-1:\n\t\t\t\t\tstep+=1\n\t\t\t\t\trecord[step]=-1\n\t\treturn answerList\n" }, { "alpha_fraction": 0.40874233841896057, "alphanum_fraction": 0.42561349272727966, "avg_line_length": 27.34782600402832, "blob_id": "f29266026ca9d84decaae7101f6b50575fab7020", "content_id": "2a40f3bbcb0be682557a8d31f8bf2cd2c6b14663", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 1304, "license_type": "no_license", "max_line_length": 56, "num_lines": 46, "path": "/Find All Anagrams in a String.py", "repo_name": "colinsongf/LeetCode", "src_encoding": "UTF-8", "text": "def hash_equal(hash1, hash2):\n if len(hash1) != len(hash2):\n return False\n for key in hash1:\n if key not in hash2 or hash2[key] != hash1[key]:\n return False\n return True\n\nclass Solution(object):\n def findAnagrams(self, s, p):\n answer = []\n phash = {}\n for ch in p:\n if ch not in phash:\n phash[ch] = 1\n else:\n phash[ch] += 1\n window_size = len(p)\n total_size = len(s)\n if total_size < window_size:\n return answer\n subhash = {}\n for ch in s[:window_size]:\n if ch not in subhash:\n subhash[ch] = 1\n else:\n subhash[ch] += 1\n i, j = 0, window_size - 1\n while i < total_size and j < total_size:\n if hash_equal(subhash, phash):\n answer.append(i)\n delete = s[i]\n if j + 1>= total_size:\n break\n add = s[j + 1]\n if add not in subhash:\n subhash[add] = 1\n else:\n subhash[add] += 1\n if subhash[delete] == 1:\n del subhash[delete]\n else:\n subhash[delete] -= 1\n i += 1\n j += 1\n return answer\n" }, { "alpha_fraction": 0.4625779688358307, "alphanum_fraction": 0.469854474067688, "avg_line_length": 29.0625, "blob_id": "2d6b771e1af4ba72efa87749eea293544547d5f5", "content_id": "d1af18ca1c5ca62d374c2786aeac44588316bf54", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 962, "license_type": "no_license", "max_line_length": 64, "num_lines": 32, "path": "/Remove K Digits.py", "repo_name": "colinsongf/LeetCode", "src_encoding": "UTF-8", "text": "class Solution(object):\n def function(self, array, delete):\n current_size = len(array)\n candidate = delete + 1\n min_one = 10\n pos = -1\n for i in range(candidate):\n if array[i] < min_one:\n min_one = array[i]\n pos = i\n if candidate == len(array):\n real_delete = delete\n array = []\n else:\n real_delete = pos\n array = array[pos + 1: ]\n choose = min_one\n return real_delete, choose, array\n\n def removeKdigits(self, num, k):\n if len(num) <= k:\n return \"0\"\n array = [int(ch) for ch in num]\n answer = ''\n while k > 0:\n real_delete, choose, array = self.function(array, k)\n k -= real_delete\n answer += str(choose)\n if array:\n answer += (''.join([str(i) for i in array]))\n answer = int(answer)\n return str(answer)\n" }, { "alpha_fraction": 0.5149456262588501, "alphanum_fraction": 0.5163043737411499, "avg_line_length": 29.66666603088379, "blob_id": "2cc0e4adeb12d336e3628225af13eff851e79a55", "content_id": "2b8bc9c3357913c0bb9bb225b3ff3233d4be9b91", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 736, "license_type": "no_license", "max_line_length": 57, "num_lines": 24, "path": "/Path Sum III.py", "repo_name": "colinsongf/LeetCode", "src_encoding": "UTF-8", "text": "# Definition for a binary tree node.\n# class TreeNode(object):\n# def __init__(self, x):\n# self.val = x\n# self.left = None\n# self.right = None\n\nclass Solution(object):\n def __init__(self):\n self.counter = 0\n def pathSum(self, root, sum):\n def backtrace(root):\n sub = []\n if not root:\n return sub\n left_sub = backtrace(root.left)\n right_sub = backtrace(root.right)\n sub.append(root.val)\n sub.extend([i + root.val for i in left_sub])\n sub.extend([i + root.val for i in right_sub])\n self.counter += sub.count(sum)\n return sub\n backtrace(root)\n return self.counter\n" }, { "alpha_fraction": 0.5466101765632629, "alphanum_fraction": 0.5706214904785156, "avg_line_length": 28.5, "blob_id": "c8554b76f699c240ee070d424bb15469aa1d4500", "content_id": "b40c945237a2870ea9104ad5faa8a7a09ebda3a7", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 708, "license_type": "no_license", "max_line_length": 48, "num_lines": 24, "path": "/Non-overlapping Intervals.py", "repo_name": "colinsongf/LeetCode", "src_encoding": "UTF-8", "text": "# Definition for an interval.\n# class Interval(object):\n# def __init__(self, s=0, e=0):\n# self.start = s\n# self.end = e\n\ndef mycmp(interval1, interval2):\n if interval1.end == interval2.end:\n return interval1.start - interval2.start\n return interval1.end - interval2.end\n\nclass Solution(object):\n def eraseOverlapIntervals(self, intervals):\n if len(intervals) <= 1:\n return 0\n intervals.sort(cmp = mycmp)\n current_end = -2**31\n counter = 0\n for interval in intervals:\n if interval.start >= current_end:\n current_end = interval.end\n else:\n counter += 1\n return counter\n" }, { "alpha_fraction": 0.37162160873413086, "alphanum_fraction": 0.4172297418117523, "avg_line_length": 28.600000381469727, "blob_id": "bd571f439333e8c07114ee65d8c7dc734f8d1ca3", "content_id": "a4bd6fe95597324d143a961c0dae5d9ae1010c3e", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 592, "license_type": "no_license", "max_line_length": 50, "num_lines": 20, "path": "/Add Strings.py", "repo_name": "colinsongf/LeetCode", "src_encoding": "UTF-8", "text": "class Solution(object):\n def addStrings(self, num1, num2):\n while len(num1) < len(num2):\n num1 = '0' + num1\n while len(num2) < len(num1):\n num2 = '0' + num2\n i, j = len(num1) - 1, len(num2) - 1\n jw = 0\n answer = []\n while i >= 0 and j >= 0:\n sub = int(num1[i]) + int(num2[j]) + jw\n jw = sub / 10\n sub = sub % 10\n answer.append(str(sub))\n i -= 1\n j -= 1\n if jw:\n answer.append(str(jw))\n answer.reverse()\n return ''.join(answer)\n" }, { "alpha_fraction": 0.516629695892334, "alphanum_fraction": 0.5454545617103577, "avg_line_length": 27.1875, "blob_id": "ffd028b39587e5ff90766b7ba27a4b3a477556b1", "content_id": "cdf888bc449c95432f3b60d9ce8fc592df5a95c0", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 451, "license_type": "no_license", "max_line_length": 58, "num_lines": 16, "path": "/Elimination Game.py", "repo_name": "colinsongf/LeetCode", "src_encoding": "UTF-8", "text": "LEFT_TO_RIGHT, RIGHT_TO_LEFT = 0, 1\n\ndef recursive(n, l_o_r):\n if n == 1:\n return 1\n if l_o_r == LEFT_TO_RIGHT:\n return recursive(n / 2, RIGHT_TO_LEFT) * 2\n else:#RIGHT_TO_LEFT\n if n % 2 == 1:\n return recursive(n / 2, LEFT_TO_RIGHT) * 2\n else:\n return recursive(n / 2, LEFT_TO_RIGHT) * 2 - 1\n\nclass Solution(object):\n def lastRemaining(self, n):\n return recursive(n, LEFT_TO_RIGHT)\n" }, { "alpha_fraction": 0.40061160922050476, "alphanum_fraction": 0.43272170424461365, "avg_line_length": 30.14285659790039, "blob_id": "882949445b7487b8e4441eff3f79a4e5a29c6c13", "content_id": "553eac54d5d497f77f5f13efd75e78ed4673a609", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 654, "license_type": "no_license", "max_line_length": 74, "num_lines": 21, "path": "/Nth Digit.py", "repo_name": "colinsongf/LeetCode", "src_encoding": "UTF-8", "text": "class Solution(object):\n def __init__(self):\n self.slots = [0]\n for i in range(1, 11):\n self.slots.append(self.slots[i - 1] + 9 * (10 ** (i - 1)) * i)\n def findNthDigit(self, n):\n if n < 10:\n return n\n bits = 0\n for i, slot in enumerate(self.slots):\n if n <= self.slots[i]:\n bits = i\n break\n leave = n - self.slots[bits - 1]\n prev = leave / bits - 1 + 10 ** (bits - 1)\n add = leave % bits\n next = prev + 1\n if add == 0:\n return int(str(prev)[-1])\n else:\n return int(str(next)[add - 1])\n" }, { "alpha_fraction": 0.4771519601345062, "alphanum_fraction": 0.48352816700935364, "avg_line_length": 27.515151977539062, "blob_id": "b7f01e910d9efa351411662881f3215c44cd1cf6", "content_id": "dacf6684f86fc8ceeaa25d79bcc6f128bfdee09c", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "C++", "length_bytes": 941, "license_type": "no_license", "max_line_length": 102, "num_lines": 33, "path": "/Minimum Number of Arrows to Burst Balloons.cpp", "repo_name": "colinsongf/LeetCode", "src_encoding": "UTF-8", "text": "bool cmp(const pair<int, int>& i, const pair<int, int>& j)\n{\n return i.first < j.first;\n}\n\nclass Solution {\npublic:\n int findMinArrowShots(vector<pair<int, int> >& points)\n {\n int arraw_count = 0;\n if (points.size() == 0)\n {\n return arraw_count;\n }\n sort(points.begin(), points.end(), cmp);\n pair<int, int> currentnode = points[0];\n for (vector<pair<int, int> >::iterator it = points.begin() + 1;it != points.end(); ++it)\n {\n if (it->first <= currentnode.second)\n {\n currentnode.first = it->first;\n currentnode.second = it->second < currentnode.second ? it->second: currentnode.second;\n }\n else//no intersection\n {\n arraw_count += 1;\n currentnode = *it;\n } \n }\n arraw_count += 1;\n return arraw_count;\n }\n};\n" }, { "alpha_fraction": 0.45047733187675476, "alphanum_fraction": 0.45942720770835876, "avg_line_length": 29.472726821899414, "blob_id": "ad2abc7e9af7018e1578bea9ae43278d46de8479", "content_id": "1f4d6a962c2612b4fb0015115452f9e37c41399a", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 1676, "license_type": "no_license", "max_line_length": 60, "num_lines": 55, "path": "/Serialize and Deserialize BST.py", "repo_name": "colinsongf/LeetCode", "src_encoding": "UTF-8", "text": "# Definition for a binary tree node.\n# class TreeNode(object):\n# def __init__(self, x):\n# self.val = x\n# self.left = None\n# self.right = None\n\nclass Codec:\n def serialize(self, root):\n serializeArray = []\n def prefix(root):\n if not root:\n serializeArray.append('#')\n return\n serializeArray.append(str(root.val))\n prefix(root.left)\n prefix(root.right)\n prefix(root)\n return ' '.join(serializeArray)\n\n def deserialize(self, data):\n serializeArray = data.split(' ')\n if serializeArray[0] == '#':\n return None\n root = TreeNode(serializeArray[0])\n stack = [root]\n finished = [0]\n for ch in serializeArray[1:]:\n current = stack[-1]\n status = finished[-1]\n if ch != '#':\n newnode = TreeNode(ch)\n if status == 0:\n current.left = newnode\n finished[-1] = 1\n elif status == 1:\n current.right = newnode\n stack.pop()\n finished.pop()\n stack.append(newnode)\n finished.append(0)\n else:\n if status == 0:\n current.left = None\n finished[-1] = 1\n elif status == 1:\n current.right = None\n stack.pop()\n finished.pop()\n return root\n \n\n# Your Codec object will be instantiated and called as such:\n# codec = Codec()\n# codec.deserialize(codec.serialize(root))\n" } ]
51
terabithian007/telegram_image_classify_bot
https://github.com/terabithian007/telegram_image_classify_bot
2444c3314ef4d16d94722e54cc25a77bc7282e59
4482b6073b0a6271ea203af1adfc5b0f978c7f45
fa1da2d3549e3ccc9d9d4e0631a374be69916ea9
refs/heads/master
2020-08-01T12:29:11.166199
2019-09-26T04:15:01
2019-09-26T04:15:01
210,997,185
0
0
null
null
null
null
null
[ { "alpha_fraction": 0.602486789226532, "alphanum_fraction": 0.6277317404747009, "avg_line_length": 36.39436721801758, "blob_id": "f98e990759c40ccb9d59b1f0b18e9621dfe6353a", "content_id": "faf9cd25cd36dc489295969c5bedc68b7f83839f", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 2654, "license_type": "no_license", "max_line_length": 117, "num_lines": 71, "path": "/telegram_classification.py", "repo_name": "terabithian007/telegram_image_classify_bot", "src_encoding": "UTF-8", "text": "from torchvision import models,transforms\nimport torch\nfrom PIL import Image\nfrom telegram.ext import Updater, CommandHandler,MessageHandler,BaseFilter,Filters\nimport requests\nimport re\nimport numpy as np\nimport os\n\n#os.environ['TORCH_HOME'] = '~//Documents//CodeBase//pretrained_weights'\n\ndef load(update,context): \n update.message.reply_text(\"Loading the Neural Network\")\n classifier = models.resnet101(pretrained=True)\n context.user_data[0] = classifier\n update.message.reply_text(\"Loading complete\") \n\ndef process_image(update,context):\n chat_id = update.message.chat_id\n file_id = update.message.photo[-1].file_id \n print(\"Time to classify\") \n input_image = context.bot.getFile(file_id) \n input_image.download('test_image.jpg')\n \n transform = transforms.Compose([ #[1]\n transforms.Resize(256), #[2]\n transforms.CenterCrop(224), #[3]\n transforms.ToTensor(), #[4]\n transforms.Normalize( #[5]\n mean=[0.485, 0.456, 0.406], #[6]\n std=[0.229, 0.224, 0.225] #[7]\n )])\n\n img = Image.open(\"test_image.jpg\")\n img_t = transform(img)\n batch_t = torch.unsqueeze(img_t, 0)\n\n classifier = context.user_data[0]\n classifier.eval()\n out = classifier(batch_t)\n percentage = torch.nn.functional.softmax(out, dim=1)[0] * 100 \n\n # Load Imagenet Synsets\n with open('imagenet_synsets.txt', 'r') as f:\n synsets = f.readlines()\n synsets = [x.strip() for x in synsets]\n splits = [line.split(' ') for line in synsets]\n key_to_classname = {spl[0]:' '.join(spl[1:]) for spl in splits}\n\n with open('imagenet_classes.txt', 'r') as f:\n class_id_to_key = f.readlines()\n class_id_to_key = [x.strip() for x in class_id_to_key]\n\n # Make predictions\n _, indices = torch.sort(out, descending=True)\n for idx in indices[0][:5] :\n class_key = class_id_to_key[idx]\n classname = key_to_classname[class_key]\n print(\"The image is of {} with confidence {} %\".format(classname,percentage[idx].item()))\n update.message.reply_text(\"The image is of {} with confidence {} %\".format(classname,percentage[idx].item()))\n\ndef main(): \n updater = Updater('943315344:AAEwI_7FMvQK7NDAcekjFlRM6a4pB6JhIZo',use_context=True) \n dp = updater.dispatcher \n dp.add_handler(CommandHandler('load',load,pass_user_data=True)) \n dp.add_handler(MessageHandler(filters=Filters.photo,callback=process_image, pass_user_data=True)) \n updater.start_polling()\n updater.idle()\n\nif __name__ == '__main__':\n main()" }, { "alpha_fraction": 0.7843137383460999, "alphanum_fraction": 0.813725471496582, "avg_line_length": 50, "blob_id": "8d4edb348cf04905b11b49399fa640cce436556a", "content_id": "898518a642d7ecfdb0b7d89500d82ca57577dff8", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Markdown", "length_bytes": 102, "license_type": "no_license", "max_line_length": 71, "num_lines": 2, "path": "/README.md", "repo_name": "terabithian007/telegram_image_classify_bot", "src_encoding": "UTF-8", "text": "# telegram_image_classify_bot\nA simple bot that uses ResNet101 to classify any image that it receives\n" } ]
2
rishabhdhenkawat/epitop_pred
https://github.com/rishabhdhenkawat/epitop_pred
0c5209fac984df0b8f28a038949bb78a5f91dad9
69a5a6c99ed5a3cde8dfe1adfe0b9b4a52d94f8a
3994e6d36dd8d5655950fdcd7d01507dae1ecc82
refs/heads/master
2023-05-05T14:44:19.348254
2021-05-24T08:57:10
2021-05-24T08:57:10
370,288,614
0
0
CC0-1.0
2021-05-24T08:56:56
2020-11-30T15:25:19
2020-11-30T15:25:17
null
[ { "alpha_fraction": 0.6376704573631287, "alphanum_fraction": 0.6497420072555542, "avg_line_length": 31.88484764099121, "blob_id": "8aa4f525ab30264c7b37cba0da91be4a60944e60", "content_id": "ce97414f9a7d13f61e9caea7c30a31ed460cc03b", "detected_licenses": [ "CC0-1.0" ], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 10852, "license_type": "permissive", "max_line_length": 139, "num_lines": 330, "path": "/utils/generate_binary_double_clustered_training_sets.py", "repo_name": "rishabhdhenkawat/epitop_pred", "src_encoding": "UTF-8", "text": "import os\nimport pandas as pd\nimport re\nimport numpy as np\nnp.random.seed(42)\nimport sys\n\nsys.path.insert(0, '/home/go96bix/projects/epitop_pred/')\nfrom utils import DataGenerator\nimport pickle\nimport glob\nfrom sklearn.metrics.pairwise import cosine_similarity\n\n\"\"\"\nembedd all proteins\ngehe uber blast treffer\nmache zwei dicts (epi_dict; non_epi_dict)\n jeweils als value array [tuple (start stop) von window]\ngehe ueber epi_dict \n\tper value slice window embedding into non_epi_area_protein and epi\ngehe ueber non_epi_dict\n\tslice out non_epi\n\"\"\"\n\n# SETTINGS\nslicesize = 49\nshift = 22\nglobal_embedding_bool = True\n# non_epi_in_protein_bool = False\n# circular filling == if windows with to short entries (frame goes over start or end of protein) fill with AAs of start or end of protein\nuse_circular_filling = False\nbig_set = True\n\ncwd = \"/home/go96bix/projects/epitop_pred\"\ndirectory = os.path.join(cwd, \"data_generator_bepipred_binary_double_cluster_0.8_0.5_seqID\")\n# directory = os.path.join(cwd, \"data_generator_bepipred_binary_0.8_seqID_checked_output\")\n# input_dir = \"/home/go96bix/projects/raw_data/bepipred_proteins_with_marking\"\n# input_dir = \"/home/go96bix/projects/raw_data/bepipred_proteins_with_marking_0.8_seqID\"\n\ninput_cluster_file = \"/home/go96bix/projects/raw_data/clustered_protein_seqs/my_double_cluster0.8_05/0.5_seqID.fasta.clstr\"\n# input_cluster_file = \"/home/go96bix/projects/raw_data/clustered_protein_seqs/my_cluster/0.8_seqID.fasta.clstr\"\ndirectory_fasta = \"/home/go96bix/projects/raw_data/bepipred_proteins_with_marking\"\nnumber_proteins = 1798\n\ndef readFasta_extended(file):\n\t## read fasta file\n\theader = \"\"\n\tseq = \"\"\n\tvalues = []\n\twith open(file, \"r\") as infa:\n\t\tfor index, line in enumerate(infa):\n\t\t\tline = line.strip()\n\t\t\tif index == 0:\n\t\t\t\theader = line[1:].split(\"\\t\")\n\t\t\telif index == 1:\n\t\t\t\tseq += line\n\t\t\telif index == 2:\n\t\t\t\tpass\n\t\t\telse:\n\t\t\t\tvalues = line.split(\"\\t\")\n\treturn header, seq, values\n\n\ndef readFasta(file):\n\t## read fasta file\n\tseq = \"\"\n\twith open(file, \"r\") as infa:\n\t\tfor line in infa:\n\t\t\tline = line.strip()\n\t\t\tif re.match(\">\", line):\n\t\t\t\tpass\n\t\t\telse:\n\t\t\t\tseq += line\n\treturn seq\n\n\ndef prepare_sequences(seq_local, header):\n\tif use_circular_filling:\n\t\tprotein_pad_local = list(seq_local[-shift:] + seq_local + seq_local[0:shift])\n\telse:\n\t\tprotein_pad_local = [\"-\"] * (seq_len + (shift * 2))\n\n\tif global_embedding_bool:\n\t\tif big_set:\n\t\t\tfile_name = header[0].split(\"_\")\n\t\t\tassert len(file_name) == 4, f\"filename of unexpected form, expected epi_1234_100_123 but got {header[0]}\"\n\t\t\tfile_name = file_name[0] + \"_\" + file_name[1]\n\t\t\tseq_global_tuple = pickle.load(\n\t\t\t\topen(os.path.join(\"/home/go96bix/projects/raw_data/embeddings_bepipred_samples\",\n\t\t\t\t file_name + \".pkl\"), \"rb\"))\n\t\t\tseq_global = seq_global_tuple[1]\n\t\t\t# sample_embedding = elmo_embedder.seqvec.embed_sentence(seq_local)\n\t\t\t# sample_embedding = sample_embedding.sum(axis=0)\n\t\t\t# seq_global2 = sample_embedding\n\t\telse:\n\t\t\tprint(seq_local)\n\t\t\tsample_embedding = elmo_embedder.seqvec.embed_sentence(seq_local)\n\t\t\tsample_embedding = sample_embedding.sum(axis=0)\n\t\t\tseq_global = sample_embedding\n\n\t\tprotein_pad_global = np.zeros((seq_len + (shift * 2), 1024), dtype=np.float32)\n\t\tif use_circular_filling:\n\t\t\tprotein_pad_global[0:shift] = seq_global[-shift:]\n\t\t\tprotein_pad_global[-shift:] = seq_global[0:shift]\n\n\tfor i in range(0, seq_len, 1):\n\t\tprotein_pad_local[i + (shift)] = seq_local[i]\n\n\t\tif global_embedding_bool:\n\t\t\tprotein_pad_global[i + (shift)] = seq_global[i]\n\t\t\t# print(cosine_similarity([seq_global[i], seq_global2[i]]))\n\n\tprotein_pad_local = \"\".join(protein_pad_local)\n\t# epitope_arr_local.append([epitope, values, header, file])\n\n\t# if global_embedding_bool:\n\t# \tepitope_arr_global.append([protein_pad_global, values, header, file])\n\n\tif global_embedding_bool:\n\t\treturn protein_pad_local, protein_pad_global\n\telse:\n\t\treturn protein_pad_local\n\n\n\n\n\nelmo_embedder = DataGenerator.Elmo_embedder()\n\nepitope_arr_local = []\nepitope_arr_global = []\nnon_epitope_arr_local = []\nnon_epitope_arr_global = []\nnon_epi_part_in_protein_arr_local = []\nnon_epi_part_in_protein_arr_global = []\n\n# blast_df = pd.DataFrame.from_csv(\n# \t\"/home/go96bix/projects/epitop_pred/utils/bepipred_samples_like_filtered_blast_table.tsv\",\n# \tsep=\"\\t\", index_col=None)\n\nepi_protein_hits_dict = {}\nnone_epi_protein_hits_dict = {}\n\n# protein_file_list = np.array(glob.glob(f\"{input_dir}/*.fasta\"))\n# shuffle = np.random.permutation(range(len(protein_file_list)))\n# protein_file_list = protein_file_list[shuffle]\nprotein_seq_dict = {}\n\nX_train_global = []\nX_train_local = []\nX_val_global = []\nX_val_local = []\nX_test_global = []\nX_test_local = []\nY_train = []\nY_val = []\nY_test = []\ntest_roc = []\ndo_val = True\ndo_test = False\n\n# min_samples = (len(protein_file_list) // 10) * 2\nmin_samples = (number_proteins // 10) * 2\nwith open(input_cluster_file, \"r\") as infile:\n\tindex_prot = 0\n\tcounter_header=0\n\tallLines = infile.read()\n\tclusters = allLines.split(\">Cluster\")\n\tshuffle = np.random.permutation(range(len(clusters)))\n\tclusters = np.array(clusters)[shuffle]\n\tfor cluster in clusters:\n\t\tY = []\n\t\tX_local = []\n\t\tX_global = []\n\t\tfiles = []\n\t\tif len(cluster) > 0:\n\t\t\tproteins = cluster.strip().split(\"\\n\")\n\t\t\tfor index, protein in enumerate(proteins):\n\t\t\t\tif index==0:\n\t\t\t\t\tcluster_name=\"Cluster_\"+protein\n\t\t\t\telse:\n\t\t\t\t\tindex_prot += 1\n\t\t\t\t\tfilename = protein.split(\" \")[1][1:-3] + \".fasta\"\n\t\t\t\t\tfile = os.path.join(directory_fasta, filename)\n\t\t\t\t\tfiles.append(file)\n\t\t\t\t\theaders, seq_local, values = readFasta_extended(file)\n\t\t\t\t\tcounter_header += len(headers)\n\t\t\t\t\t# for index, file in enumerate(protein_file_list):\n\n\t\t\t\t\t# protein_seq_dict.update({file:seq_local})\n\n\t\t\t\t\tseq_local = seq_local.upper()\n\t\t\t\t\tseq_len = len(seq_local)\n\t\t\t\t\tif seq_len < 25:\n\t\t\t\t\t\tcontinue\n\t\t\t\t\tfor head in headers:\n\n\t\t\t\t\t\tif global_embedding_bool:\n\t\t\t\t\t\t\tprotein_pad_local, protein_pad_global = prepare_sequences(seq_local, [head])\n\t\t\t\t\t\telse:\n\t\t\t\t\t\t\tprotein_pad_local = prepare_sequences(seq_local, [head])\n\n\t\t\t\t\t\thead_arr = head.split(\"_\")\n\t\t\t\t\t\tname = head_arr[0] + \"_\" + head_arr[1]\n\t\t\t\t\t\tstart = int(head_arr[2]) + shift\n\t\t\t\t\t\tstop = int(head_arr[3]) + shift\n\t\t\t\t\t\tmedian_pos = (start+stop-1)//2\n\t\t\t\t\t\tslice_start = median_pos - slicesize // 2\n\t\t\t\t\t\tslice_stop = slice_start + slicesize\n\n\t\t\t\t\t\tif head.startswith(\"Pos\"):\n\t\t\t\t\t\t\thits = epi_protein_hits_dict.get(file, [])\n\n\t\t\t\t\t\t\tif (start, stop) not in hits:\n\t\t\t\t\t\t\t\thits.append((start, stop))\n\t\t\t\t\t\t\t\tY.append(\"true_epitope\")\n\t\t\t\t\t\t\t\tif global_embedding_bool:\n\t\t\t\t\t\t\t\t\tX_global.append([protein_pad_global[slice_start:slice_stop], start, stop, cluster_name, head])\n\t\t\t\t\t\t\t\tX_local.append([protein_pad_local[slice_start:slice_stop], start, stop, cluster_name, head])\n\n\t\t\t\t\t\t\telse:\n\t\t\t\t\t\t\t\tprint(f\"doublicated: {head}\")\n\n\t\t\t\t\t\t\t# save all epitopes\n\t\t\t\t\t\t\tepi_protein_hits_dict.update({file: hits})\n\t\t\t\t\t\telse:\n\t\t\t\t\t\t\thits = none_epi_protein_hits_dict.get(file, [])\n\t\t\t\t\t\t\tif (start, stop) not in hits:\n\t\t\t\t\t\t\t\thits.append((start, stop))\n\t\t\t\t\t\t\t\tY.append(\"non_epitope\")\n\t\t\t\t\t\t\t\tif global_embedding_bool:\n\t\t\t\t\t\t\t\t\tX_global.append([protein_pad_global[slice_start:slice_stop],start,stop,cluster_name,head])\n\t\t\t\t\t\t\t\tX_local.append([protein_pad_local[slice_start:slice_stop],start,stop,cluster_name,head])\n\t\t\t\t\t\t\telse:\n\t\t\t\t\t\t\t\tprint(f\"doublicated: {head}\")\n\n\t\t\t\t\t\t\tnone_epi_protein_hits_dict.update({file: hits})\n\n\t\t\tif do_val:\n\t\t\t\tif index_prot > min_samples:\n\t\t\t\t\tdo_val = False\n\t\t\t\t\tdo_test = True\n\t\t\t\telse:\n\t\t\t\t\tif global_embedding_bool:\n\t\t\t\t\t\tX_val_global.extend(X_global)\n\t\t\t\t\tX_val_local.extend(X_local)\n\t\t\t\t\tY_val.extend(Y)\n\t\t\tif do_test:\n\t\t\t\tif index_prot > 2*min_samples:\n\t\t\t\t\tdo_val = False\n\t\t\t\t\tdo_test = False\n\t\t\t\telse:\n\t\t\t\t\ttest_roc.extend(files)\n\t\t\t\t\tif global_embedding_bool:\n\t\t\t\t\t\tX_test_global.extend(X_global)\n\t\t\t\t\tX_test_local.extend(X_local)\n\t\t\t\t\tY_test.extend(Y)\n\t\t\tif not do_test and not do_val:\n\t\t\t\tif global_embedding_bool:\n\t\t\t\t\tX_train_global.extend(X_global)\n\t\t\t\tX_train_local.extend(X_local)\n\t\t\t\tY_train.extend(Y)\n\n\t\t\tprint(counter_header)\n# for file in X_train_global:\n# \tarr = file[0]\n# \tstart = file[1]\n# \tstop = file[2]\n# \tpath = file[3]\n# \theader = file[4]\n# \theaders, seq_local, values = readFasta_extended(path)\n\t# assert header in headers, f\"header {header} not in {headers}, for file {path}\"\n\t# assert f\"{start-shift}_{stop-shift}\" in header, f\"{start}_{stop} not in {header} for file {path}\"\n\nfor i in np.unique(Y_train):\n\tdirectory2 = directory + f\"/train/{i}\"\n\tif not os.path.exists(directory2):\n\t\tos.makedirs(directory2)\n\nX_test_local = np.array(X_test_local)\nX_test_global = np.array(X_test_global)\nX_val_local = np.array(X_val_local)\nX_val_global = np.array(X_val_global)\nX_train_local = np.array(X_train_local)\nX_train_global = np.array(X_train_global)\n\nY_test = np.array(Y_test)\nY_val = np.array(Y_val)\nY_train = np.array(Y_train)\n\nwith open(directory + '/samples_for_ROC.csv', \"w\")as outfile:\n\tfor sample in test_roc:\n\t\toutfile.write(f\"{sample}\\n\")\n\nfor index, sample in enumerate(Y_train):\n\tdirectory2 = directory + f\"/train/{sample}/{index}.csv\"\n\tf = open(directory2, 'w')\n\tf.write(\n\t\tf\"{X_train_local[index][0]}\\t{X_train_local[index][1]}\\t{X_train_local[index][2]}\\t{X_train_local[index][3]}\\t{X_train_local[index][4]}\")\n\n\tif global_embedding_bool:\n\t\twith open(directory + f\"/train/{sample}/{index}.pkl\", \"wb\") as outfile:\n\t\t\tpickle.dump(X_train_global[index], outfile)\n\nfor index, i in enumerate((X_test_local, X_val_local, X_train_local)):\n\tlen_i = i.shape[0]\n\tshuffle = np.random.permutation(range(len_i))\n\tif index == 0:\n\t\tif global_embedding_bool:\n\t\t\tpickle.dump(X_test_global[shuffle], open(directory + '/X_test.pkl', 'wb'))\n\t\tpd.DataFrame(X_test_local[shuffle]).to_csv(directory + '/X_test.csv', sep='\\t', encoding='utf-8', header=None,\n\t\t index=None)\n\t\tpd.DataFrame(Y_test[shuffle]).to_csv(directory + '/Y_test.csv', sep='\\t', encoding='utf-8', header=None,\n\t\t index=None)\n\n\telif index == 1:\n\t\tif global_embedding_bool:\n\t\t\tpickle.dump(X_val_global[shuffle], open(directory + '/X_val.pkl', 'wb'))\n\t\tpd.DataFrame(X_val_local[shuffle]).to_csv(directory + '/X_val.csv', sep='\\t', encoding='utf-8', header=None,\n\t\t index=None)\n\t\tpd.DataFrame(Y_val[shuffle]).to_csv(directory + '/Y_val.csv', sep='\\t', encoding='utf-8', header=None,\n\t\t index=None)\n\n\telif index == 2:\n\t\tif global_embedding_bool:\n\t\t\tpickle.dump(X_train_global[shuffle], open(directory + '/X_train.pkl', 'wb'))\n\t\tpd.DataFrame(X_train_local[shuffle]).to_csv(directory + '/X_train.csv', sep='\\t', encoding='utf-8', header=None,\n\t\t index=None)\n\t\tpd.DataFrame(Y_train[shuffle]).to_csv(directory + '/Y_train.csv', sep='\\t', encoding='utf-8', header=None,\n\t\t index=None)\n" }, { "alpha_fraction": 0.6224166750907898, "alphanum_fraction": 0.6421782970428467, "avg_line_length": 32.34714889526367, "blob_id": "36ef3b9cb9cd698dcb9d7a0e078d859a38361c30", "content_id": "3f0aeaff82d8159be6b3c6f17b43f8aca7d4cb5b", "detected_licenses": [ "CC0-1.0" ], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 6629, "license_type": "permissive", "max_line_length": 186, "num_lines": 193, "path": "/utils/plots_test_set.py", "repo_name": "rishabhdhenkawat/epitop_pred", "src_encoding": "UTF-8", "text": "import numpy as np\r\nfrom bokeh.models import ColumnDataSource, Plot, LinearAxis, Grid, Range1d, Label, BoxAnnotation\r\nfrom bokeh.layouts import column\r\nfrom bokeh.models.glyphs import Text\r\nfrom bokeh.models import Legend\r\nfrom bokeh.plotting import figure, output_file, save\r\nimport pandas as pd\r\nimport os\r\nimport time\r\n\r\nepitope_threshold = 0.75\r\ndeepipred_results_dir = f'/home/go96bix/projects/raw_data/test_training_data'\r\noutdir = os.path.join(deepipred_results_dir, 'plots2/')\r\nstarttime = time.time()\r\n\r\n######## Plots #########\r\nprint('\\nPlotting.')\r\n\r\n##### progress vars ####\r\nfilecounter = 1\r\nprintlen = 1\r\ntotal = 1000\r\n\r\n\r\n########################\r\nclass Protein_seq():\r\n\tdef __init__(self, sequence, score, over_threshold, positions=None):\r\n\t\tself.sequence = sequence\r\n\t\tself.score = score\r\n\t\tself.over_threshold = over_threshold\r\n\t\tif positions == None:\r\n\t\t\tself.positions = list(range(1, len(self.sequence) + 1))\r\n\t\telse:\r\n\t\t\tself.positions = positions\r\n\r\n\r\ndef readFasta_extended(file):\r\n\t## read fasta file\r\n\theader = \"\"\r\n\tseq = \"\"\r\n\tvalues = []\r\n\twith open(file, \"r\") as infa:\r\n\t\tfor index, line in enumerate(infa):\r\n\t\t\tline = line.strip()\r\n\t\t\tif index == 0:\r\n\t\t\t\theader = line[1:].split(\"\\t\")\r\n\t\t\telif index == 1:\r\n\t\t\t\tseq += line\r\n\t\t\telif index == 2:\r\n\t\t\t\tpass\r\n\t\t\telse:\r\n\t\t\t\tvalues = line.split(\"\\t\")\r\n\treturn header, seq, values\r\n\r\n\r\ndef frame_avg(values, frame_extend=2):\r\n\taverages = []\r\n\tprotlen = len(values)\r\n\tfor pos in range(protlen):\r\n\t\tframelist = []\r\n\t\tfor shift in range(-frame_extend, frame_extend + 1, 1):\r\n\t\t\tif not (pos + shift) < 0 and not (pos + shift) > (protlen - 1):\r\n\t\t\t\tframelist.append(float(values[pos + shift]))\r\n\t\taverages.append(sum(framelist) / len(framelist))\r\n\treturn averages\r\n\r\n\r\nholydict = {}\r\n\r\nfor root, dirs, files in os.walk(os.path.join(deepipred_results_dir, \"epidope/\"), topdown=False):\r\n\tfor name in files:\r\n\t\tfile = os.path.join(root, name)\r\n\t\tdf = pd.read_csv(file, sep='\\t', index_col=False, skiprows=1)\r\n\t\tletter_arr = df.values[:, 1]\r\n\t\tvalue_arr = np.array(df.values[:, 2], dtype=np.float)\r\n\r\n\t\tscore_bool = value_arr > epitope_threshold\r\n\t\tprotein = Protein_seq(sequence=\"\".join(letter_arr), score=value_arr, over_threshold=score_bool)\r\n\t\tholydict.update({name[:-(len(\".csv\"))]: protein})\r\n\r\nfor geneid in holydict:\r\n\r\n\t############### progress ###############\r\n\telapsed_time = time.strftime(\"%H:%M:%S\", time.gmtime(time.time() - starttime))\r\n\tprintstring = f'Plotting: {geneid} File: {filecounter} / {total} Elapsed time: {elapsed_time}'\r\n\tif len(printstring) < printlen:\r\n\t\tprint(' ' * printlen, end='\\r')\r\n\tprint(printstring, end='\\r')\r\n\tprintlen = len(printstring)\r\n\tfilecounter += 1\r\n\t#######################################\r\n\r\n\t# make output dir and create output filename\r\n\tif not os.path.exists(outdir + '/plots'):\r\n\t\tos.makedirs(outdir + '/plots')\r\n\tout = f'{outdir}/plots/{geneid}.html'\r\n\toutput_file(out)\r\n\r\n\tseq = holydict[geneid].sequence\r\n\tpos = holydict[geneid].positions\r\n\tscore = holydict[geneid].score\r\n\tflag = holydict[geneid].over_threshold\r\n\t# pwa_score = pwa(score, frame_extend = 24)\r\n\tprotlen = len(seq)\r\n\r\n\t# create a new plot with a title and axis labels\r\n\tp = figure(title=geneid, y_range=(-0.03, 1.03), y_axis_label='Scores', plot_width=1200, plot_height=460,\r\n\t tools='xpan,xwheel_zoom,reset', toolbar_location='above')\r\n\tp.min_border_left = 80\r\n\r\n\t# add a line renderer with legend and line thickness\r\n\tl1 = p.line(range(1, protlen + 1), score, line_width=1, color='black', visible=True)\r\n\tl2 = p.line(range(1, protlen + 1), ([epitope_threshold] * protlen), line_width=1, color='red', visible=True)\r\n\r\n\tlegend = Legend(items=[('EpiDope', [l1]),\r\n\t ('epitope_threshold', [l2])])\r\n\r\n\tp.add_layout(legend, 'right')\r\n\tp.xaxis.visible = False\r\n\tp.legend.click_policy = \"hide\"\r\n\r\n\tp.x_range.bounds = (-50, protlen + 51)\r\n\r\n\t### plot for sequence\r\n\t# symbol based plot stuff\r\n\r\n\tplot = Plot(title=None, x_range=p.x_range, y_range=Range1d(0, 9), plot_width=1200, plot_height=50, min_border=0,\r\n\t toolbar_location=None)\r\n\r\n\ty = [1] * protlen\r\n\tsource = ColumnDataSource(dict(x=list(pos), y=y, text=list(seq)))\r\n\tglyph = Text(x=\"x\", y=\"y\", text=\"text\", text_color='black', text_font_size='8pt')\r\n\tplot.add_glyph(source, glyph)\r\n\tlabel = Label(x=-80, y=y[1], x_units='screen', y_units='data', text='Sequence', render_mode='css',\r\n\t background_fill_color='white', background_fill_alpha=1.0)\r\n\tplot.add_layout(label)\r\n\r\n\txaxis = LinearAxis()\r\n\tplot.add_layout(xaxis, 'below')\r\n\tplot.add_layout(Grid(dimension=0, ticker=xaxis.ticker))\r\n\r\n\t# add predicted epitope boxes\r\n\tpredicted_epitopes = []\r\n\tpred_pos = [i for i, c in enumerate(flag) if c]\r\n\tif len(pred_pos) > 1:\r\n\t\tstart = pred_pos[0]\r\n\t\tstop = pred_pos[0]\r\n\t\tfor i in range(1, len(pred_pos)):\r\n\t\t\tif pred_pos[i] == stop + 1:\r\n\t\t\t\tstop = pred_pos[i]\r\n\t\t\telse:\r\n\t\t\t\tif stop > start:\r\n\t\t\t\t\tpredicted_epitopes.append((start, stop))\r\n\t\t\t\tstart = pred_pos[i]\r\n\t\t\t\tstop = pred_pos[i]\r\n\t\tpredicted_epitopes.append((start, stop))\r\n\t# print(predicted_epitopes)\r\n\r\n\tfor prediction in predicted_epitopes:\r\n\t\tstart = prediction[0]\r\n\t\tstop = prediction[1]\r\n\t\ty = np.array([-0.02] * protlen)\r\n\t\ty[start:stop] = 1.02\r\n\t\tp.vbar(x=list(pos), bottom=-0.02, top=y, width=1, alpha=0.2, line_alpha=0, color='darkgreen',\r\n\t\t legend='predicted_epitopes', visible=True)\r\n\r\n\t# add known epitope boxes\r\n\theader, seq, values = readFasta_extended(\r\n\t\tf'/home/go96bix/projects/raw_data/bepipred_proteins_with_marking/{geneid}.fasta')\r\n\tfor head in header:\r\n\t\tfile_name = head.split(\"_\")\r\n\t\tstart, stop = int(file_name[2]) + 1, int(file_name[3]) + 1\r\n\t\ty = np.array([-0.02] * protlen)\r\n\t\ty[start:stop] = 1.02\r\n\t\tif file_name[0].startswith(\"Negative\"):\r\n\t\t\tp.vbar(x=list(pos), bottom=-0.02, top=y, width=1, alpha=0.2, line_alpha=0, color='darkred',\r\n\t\t\t legend='provided_non_epitope', visible=False)\r\n\t\telse:\r\n\t\t\tp.vbar(x=list(pos), bottom=-0.02, top=y, width=1, alpha=0.2, line_alpha=0, color='blue',\r\n\t\t\t legend='provided_epitope', visible=False)\r\n\r\n\tsave(column(p, plot))\r\n'''\r\n\t# DeepLoc barplot\r\n\tdeeploclocations = ['Membrane','Nucleus','Cytoplasm','Extracellular','Mitochondrion','Cell_membrane','Endoplasmic_reticulum','Plastid','Golgi_apparatus','Lysosome/Vacuole','Peroxisome']\r\n\tdeepplot = figure(x_range=deeploclocations, plot_height=350, title=\"DeepLoc\", toolbar_location=None, tools=\"\")\r\n\tdeepplot.vbar(x = deeploclocations, top=deeploc_dict[geneid], width = 0.8)\r\n\tdeepplot.xgrid.grid_line_color = None\r\n\tdeepplot.xaxis.major_label_orientation = pi/2\r\n\tdeepplot.y_range.start = 0\r\n\r\n\tsave(column(p,plot,deepplot))\r\n'''\r\n" }, { "alpha_fraction": 0.592270016670227, "alphanum_fraction": 0.6080566048622131, "avg_line_length": 33.01852035522461, "blob_id": "5b395951f6294b0158eda1fb1ab233a323b08f9e", "content_id": "5d6027378e176a5d7fa7a9b433796adce97dbfa3", "detected_licenses": [ "CC0-1.0" ], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 1837, "license_type": "permissive", "max_line_length": 101, "num_lines": 54, "path": "/utils/iedb_to_filtered_blast_table.py", "repo_name": "rishabhdhenkawat/epitop_pred", "src_encoding": "UTF-8", "text": "slicesize = 49\nshift_needed = 0\nwith open(\"/home/go96bix/projects/epitop_pred/utils/iedb_linear_epitopes.fasta\") as input_file:\n\twith open(\"/home/go96bix/projects/epitop_pred/utils/bepipred_samples_like_filtered_blast_table.tsv\",\n\t \"w\") as output_file:\n\t\toutput_file.write(\"#qseqid\\tsseqid\\tqstart\\tsstart\\tsend\\n\")\n\t\tnon_epi_counter = 0\n\t\ttrue_epi_counter = 0\n\t\tfor line in input_file:\n\t\t\tif line.startswith(\">\"):\n\t\t\t\theader = line.strip()\n\t\t\t\tsseqid = header[1:]\n\t\t\t\tif sseqid.startswith(\"Negative\"):\n\t\t\t\t\tnon_epi_counter += 1\n\t\t\t\t\tqseqid = f\"nonepi_{non_epi_counter}\"\n\t\t\t\telif sseqid.startswith(\"Positive\"):\n\t\t\t\t\ttrue_epi_counter += 1\n\t\t\t\t\tqseqid = f\"epi_{true_epi_counter}\"\n\t\t\t\telse:\n\t\t\t\t\tprint(f\"error: header {header} not in positive or negative set\")\n\t\t\t\t\texit()\n\n\t\t\telse:\n\t\t\t\tseq = line\n\n\t\t\t\twith open(f\"/home/go96bix/projects/epitop_pred/bepipred_sequences/{qseqid}_{sseqid}.fasta\",\n\t\t\t\t \"w\") as out_fasta:\n\t\t\t\t\tout_fasta.write(f\"{header}\\n\")\n\t\t\t\t\tout_fasta.write(f\"{seq}\\n\")\n\n\t\t\t\tupper_pos = [i for i, c in enumerate(seq) if c.isupper()]\n\t\t\t\tif len(upper_pos) > 1:\n\t\t\t\t\tepitopes = []\n\t\t\t\t\tstart = upper_pos[0]\n\t\t\t\t\tstop = upper_pos[0]\n\t\t\t\t\tfor i in range(1, len(upper_pos)):\n\t\t\t\t\t\tif upper_pos[i] == stop + 1:\n\t\t\t\t\t\t\tstop = upper_pos[i]\n\t\t\t\t\t\telse:\n\t\t\t\t\t\t\tif stop > start:\n\t\t\t\t\t\t\t\tepitopes.append((start, stop))\n\t\t\t\t\t\t\tstart = upper_pos[i]\n\t\t\t\t\t\t\tstop = upper_pos[i]\n\t\t\t\t\tepitopes.append((start, stop))\n\t\t\t\tfor hit in epitopes:\n\t\t\t\t\tmean_pos = (hit[0] + hit[1]) / 2\n\t\t\t\t\tstart = int(mean_pos - slicesize / 2)\n\t\t\t\t\tstop = int(mean_pos + slicesize / 2)\n\t\t\t\t\tif start * -1 > shift_needed:\n\t\t\t\t\t\tshift_needed = start * -1\n\t\t\t\t\tif stop - len(seq) > shift_needed:\n\t\t\t\t\t\tshift_needed = stop - len(seq)\n\t\t\t\t\toutput_file.write(f\"{qseqid}\\t|{sseqid}|\\t{1}\\t{start+1}\\t{stop+1}\\n\")\nprint(f\"shift needed:{shift_needed}\")\n" }, { "alpha_fraction": 0.6400745511054993, "alphanum_fraction": 0.6482046246528625, "avg_line_length": 29.75, "blob_id": "a7ad1594648ddead9e276e4da02803d3842a45df", "content_id": "1ba00faaafd27be08e615dfb7303d57a8e4845bf", "detected_licenses": [ "CC0-1.0" ], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 5904, "license_type": "permissive", "max_line_length": 93, "num_lines": 192, "path": "/utils/input_max.py", "repo_name": "rishabhdhenkawat/epitop_pred", "src_encoding": "UTF-8", "text": "import sys\n\nsys.path.insert(0, '/home/go96bix/projects/Masterarbeit/ML')\n\nimport pandas as pd\nimport numpy as np\nfrom sklearn.model_selection import train_test_split\nfrom sklearn.preprocessing import LabelEncoder\nimport pickle\n\n\ndef semicol_to_rows(data):\n\tdata_total = []\n\tfor j in data:\n\t\tdata_row = np.array([])\n\t\tfor i in j:\n\t\t\tdata_cell = np.array(i.split(';'))\n\t\t\tdata_row = np.append(data_row, data_cell)\n\t\tdata_total.append(data_row)\n\n\tdata_total = np.array(data_total, dtype=str)\n\treturn data_total\n\n\ndef parse_amino(x):\n\tamino = \"GALMFWKQESPVICYHRNDTUOBZX\"\n\tencoder = LabelEncoder()\n\tencoder.fit(list(amino))\n\tout = []\n\tfor i in x:\n\t\tdnaSeq = i.upper()\n\t\tencoded_X = encoder.transform(list(dnaSeq))\n\t\tout.append(encoded_X)\n\treturn np.array(out)\n\n\ndef simple_seq(df, directory):\n\t\"\"\"make train test set\"\"\"\n\t\"\"\"sequence x-set\"\"\"\n\tdata_x = df[\"seq_slice\"]\n\tdata_y = df[['episcore', '#gene_ID', 'episcore_pos']]\n\tdata = df.values\n\tx = data_x.values\n\n\ty = data_y.values\n\n\tprint(directory + '/X_train.csv')\n\tX_train, X_test, Y_train, Y_test = train_test_split(x, y, test_size=0.2, shuffle=False)\n\tnp.savetxt(directory + '/X_train.csv', X_train, delimiter='\\t', fmt='%s')\n\tnp.savetxt(directory + '/X_test.csv', X_test, delimiter='\\t', fmt='%s')\n\tnp.savetxt(directory + '/Y_train.csv', Y_train, delimiter='\\t', fmt='%s')\n\tnp.savetxt(directory + '/Y_test.csv', Y_test, delimiter='\\t', fmt='%s')\n\n\ndef seq_balanced(df_true, df_false, directory):\n\tdf_true = df_true.drop_duplicates(\"seq_slice\", keep=\"first\")\n\tdf_false = df_false.drop_duplicates(\"seq_slice\", keep=\"first\")\n\tsamples_true = df_true.shape[0]\n\tsamples_false = df_false.shape[0]\n\tnumber_total_samples_per_class = min(samples_false, samples_true)\n\n\tdata_x = pd.DataFrame()\n\tdata_y = pd.DataFrame()\n\n\tfor df in (df_false, df_true):\n\t\tdf = df.sample(n=number_total_samples_per_class)\n\t\tdata_x = data_x.append(df[[\"seq_slice\"]])\n\t\tdata_y = data_y.append(df[['episcore', '#gene_ID', 'episcore_pos']])\n\n\tx = data_x.values\n\ty = data_y.values\n\tprint(directory + '/X_train.csv')\n\tX_train, X_test, Y_train, Y_test = train_test_split(x, y, test_size=0.2, shuffle=True)\n\tnp.savetxt(directory + '/X_train.csv', X_train, delimiter='\\t', fmt='%s')\n\tnp.savetxt(directory + '/X_test.csv', X_test, delimiter='\\t', fmt='%s')\n\tnp.savetxt(directory + '/Y_train.csv', Y_train, delimiter='\\t', fmt='%s')\n\tnp.savetxt(directory + '/Y_test.csv', Y_test, delimiter='\\t', fmt='%s')\n\n\ndef complex(df_true, df_false, directory):\n\t# 3D Version, [Sample,Feature, Pos]\n\tdef expand_columns_3D(df):\n\t\tsamples = df.shape[0]\n\t\tout_arr = []\n\t\tfor col in df.columns:\n\t\t\tarr = np.zeros((samples, seq_length))\n\t\t\tcolumn = df[col]\n\t\t\tif col == \"seq_slice\":\n\t\t\t\tarr = column.values\n\t\t\t\t# arr = parse_amino(column.values)\n\t\t\t\tout_arr.append(arr)\n\n\t\t\telif col in [\"#homologs\", \"deeplocvalues\"]:\n\t\t\t\tcolumn = column.str.split(\";\", expand=False)\n\t\t\t\tnumber_values_in_Col = len(column.values[0])\n\t\t\t\tfor value_index in range(number_values_in_Col):\n\t\t\t\t\tarr = np.zeros((samples, seq_length))\n\t\t\t\t\tfor sample_index, values in enumerate(column.values):\n\t\t\t\t\t\tfor seq_index in range(seq_length):\n\t\t\t\t\t\t\tarr[sample_index, seq_index] = values[value_index]\n\t\t\t\t\tout_arr.append(arr)\n\n\t\t\telse:\n\t\t\t\tcolumn = column.str.split(\";\", expand=False)\n\t\t\t\tfor sample_index, values in enumerate(column.values):\n\t\t\t\t\tfor value_index, value in enumerate(values):\n\t\t\t\t\t\tarr[sample_index, value_index] = value\n\t\t\t\tout_arr.append(arr)\n\t\treturn out_arr\n\n\t# 2D Version [Sample, Feature_pos] advantage homologs and deeplocvalues just one time inside\n\tdef expand_columns_2D(df):\n\n\t\tsamples = df.shape[0]\n\t\tout_arr = []\n\t\tfor index, row in df.iterrows():\n\t\t\tarr = np.array([])\n\t\t\tfor index_value, value in enumerate(row):\n\t\t\t\tif index_value == 0:\n\t\t\t\t\tarr = np.append(arr, list(value))\n\t\t\t\telse:\n\t\t\t\t\tvalue = str(value).split(\";\")\n\t\t\t\t\tarr = np.append(arr, value)\n\t\t\tout_arr.append(arr)\n\n\t\treturn np.array(out_arr)\n\n\tsamples = min(df_false.shape[0], df_true.shape[0])\n\tdf_test = pd.DataFrame()\n\tdf_val = pd.DataFrame()\n\tdf_train = pd.DataFrame()\n\tfor df in (df_true, df_false):\n\t\tseq_length = 50\n\n\t\tdf_test_class = df.sample(n=int(0.2 * samples))\n\t\tdf_test = df_test.append(df_test_class)\n\t\tdf = df.drop(df_test_class.index)\n\n\t\tdf_val_class = df.sample(n=int(0.2 * samples))\n\t\tdf_val = df_val.append(df_val_class)\n\t\tdf = df.drop(df_val_class.index)\n\n\t\tdf_train = df_train.append(df)\n\n\tfor index, df in enumerate([df_test, df_val, df_train]):\n\t\tdata_y = df[['episcore', '#gene_ID', 'episcore_pos']]\n\t\tdf = df.drop(['episcore', '#gene_ID', 'episcore_pos'], axis=1)\n\t\tmake_3D_arr = False\n\t\tif make_3D_arr:\n\t\t\tout_arr = expand_columns_3D(df)\n\t\telse:\n\t\t\tout_arr = expand_columns_2D(df)\n\n\t\tif index == 0:\n\t\t\tY_test = data_y.values\n\t\t\tnp.savetxt(directory + '/Y_test.csv', Y_test, delimiter='\\t', fmt='%s')\n\n\t\t\tx = np.array(out_arr)\n\t\t\tif make_3D_arr:\n\t\t\t\tx = np.swapaxes(x, 0, 1)\n\t\t\tX_test_output = open(directory + '/X_test.pkl', 'wb')\n\t\t\tpickle.dump(x, X_test_output)\n\n\t\tif index == 1:\n\t\t\tY_val = data_y.values\n\t\t\tnp.savetxt(directory + '/Y_val.csv', Y_val, delimiter='\\t', fmt='%s')\n\n\t\t\tx = np.array(out_arr)\n\t\t\tif make_3D_arr:\n\t\t\t\tx = np.swapaxes(x, 0, 1)\n\t\t\tX_val_output = open(directory + '/X_val.pkl', 'wb')\n\t\t\tpickle.dump(x, X_val_output)\n\n\t\tif index == 2:\n\t\t\tY_train = data_y.values\n\t\t\tnp.savetxt(directory + '/Y_train.csv', Y_train, delimiter='\\t', fmt='%s')\n\n\t\t\tx = np.array(out_arr)\n\t\t\tif make_3D_arr:\n\t\t\t\tx = np.swapaxes(x, 0, 1)\n\t\t\tX_train_output = open(directory + '/X_train.pkl', 'wb')\n\t\t\tpickle.dump(x, X_train_output)\n\n\npath = \"/home/go96bix/projects/epitop_pred/Full_data\"\ndf_true = pd.read_csv(path + '/epitopes_edited.csv', delimiter=',', dtype='str')\ndf_false = pd.read_csv(path + '/non_epitopes_edited.csv', delimiter=',', dtype='str')\n\ndf_true = df_true.dropna()\ndf_false = df_false.dropna()\ndirectory = \"/home/go96bix/projects/epitop_pred/Full_data_len50_rawSeq\"\ncomplex(df_true, df_false, directory)\n" }, { "alpha_fraction": 0.6544610857963562, "alphanum_fraction": 0.6686735153198242, "avg_line_length": 34.9290771484375, "blob_id": "19034ca72362dbab9f6f1ed84b7025b9c7faefc9", "content_id": "826d316bde65eed796c39f6583424d322d24a15f", "detected_licenses": [ "CC0-1.0" ], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 10132, "license_type": "permissive", "max_line_length": 128, "num_lines": 282, "path": "/utils/PSDAAV.py", "repo_name": "rishabhdhenkawat/epitop_pred", "src_encoding": "UTF-8", "text": "\"\"\"\nPositive Selection Detection Using Amino Acid Variance Analysis\n\nThis script aims to detect positive selection sites on a given\nAmino Acid MSA by computing a Proper Orthogonal Decomposition\nof each column of the MSA. Here the amino acids are interpreted\nas a pair of two numbers [polarity, molecular volume] as suggested\nby R. Grantham in 1974 and used by K. Katouh et al. in 2002.\nThe biological assumption behind this approach is, that amino acids on\nhomolog positive selection sides differ more from their aligned partners, than\namino acids on a non-positive selection side.\nThe variances along the two primary components of each MSA column\nare then used to compute an index, that measures the diversity of the\namino acids within the respective column.\nAlso the scripts performs a simple significance test, assuming a normal\ndistribution of the non-positive selection sites with the variance and the\nmean of the computed index over the whole MSA.\n\"\"\"\n\n# To run this program you will need the following packages.\nimport sys\nfrom Bio import SeqIO\nimport numpy as np\nfrom scipy.stats import norm\nimport os\n\n\"\"\"\nThis method transforms the amino acid sequences within a MSA into sequences of\ntwo dimensional vectors ( [polarity, molecular volume] ). The components of these vectors\nare saved in two seperate files (pol.csv and mVol.csv).\n\"\"\"\n\n\ndef transformAAtoVectors(inputFile, outputDirectory):\n\twith open(os.path.join(outputDirectory, \"pol.csv\"), \"w\") as outputP, open(os.path.join(outputDirectory, \"mVol.csv\"),\n\t \"w\") as outputV:\n\t\t# This step determines the length of the longest Sequence within the input file.\n\t\t# If the input file always is a MSA (in which all Sequences have the same length) then this step is unnecessary.\n\t\tlength = sys.maxsize\n\t\taaAln = SeqIO.parse(open(inputFile, \"r\"), \"fasta\")\n\t\tfor species in aaAln:\n\t\t\tif length > len(str(species.seq)):\n\t\t\t\tlength = len(str(species.seq))\n\t\t\t\t# Now the one character amino acid code is translated into pairs of polarity and molecular volume.\n\t\t\t\t# This for loop iterates over each column of the input MSA.\n\t\t\t\t# The Amino Acid properties determined by Grantham (1974)\n\t\t\t\tp = {\"R\": 10.5,\n\t\t\t\t \"L\": 4.9,\n\t\t\t\t \"P\": 8,\n\t\t\t\t \"T\": 8.6,\n\t\t\t\t \"S\": 9.2,\n\t\t\t\t \"A\": 8.1,\n\t\t\t\t \"V\": 5.9,\n\t\t\t\t \"G\": 9,\n\t\t\t\t \"I\": 5.2,\n\t\t\t\t \"F\": 5.2,\n\t\t\t\t \"Y\": 6.2,\n\t\t\t\t \"C\": 5.5,\n\t\t\t\t \"H\": 10.4,\n\t\t\t\t \"Q\": 10.5,\n\t\t\t\t \"N\": 11.6,\n\t\t\t\t \"K\": 11.3,\n\t\t\t\t \"D\": 13,\n\t\t\t\t \"E\": 12.3,\n\t\t\t\t \"M\": 5.7,\n\t\t\t\t \"W\": 5.4}\n\n\t\t\t\tv = {\"R\": 124,\n\t\t\t\t \"L\": 111,\n\t\t\t\t \"P\": 32.5,\n\t\t\t\t \"T\": 61,\n\t\t\t\t \"S\": 32,\n\t\t\t\t \"A\": 31,\n\t\t\t\t \"V\": 84,\n\t\t\t\t \"G\": 3,\n\t\t\t\t \"I\": 111,\n\t\t\t\t \"F\": 132,\n\t\t\t\t \"Y\": 136,\n\t\t\t\t \"C\": 55,\n\t\t\t\t \"H\": 96,\n\t\t\t\t \"Q\": 85,\n\t\t\t\t \"N\": 56,\n\t\t\t\t \"K\": 119,\n\t\t\t\t \"D\": 54,\n\t\t\t\t \"E\": 83,\n\t\t\t\t \"M\": 105,\n\t\t\t\t \"W\": 170}\n\t\tgapValues = []\n\t\tfor i in range(length):\n\t\t\t# pValues and vValues will contain the respective value for each amino acid in each MSA column\n\t\t\tpValues = []\n\t\t\tvValues = []\n\t\t\tgapValues_pos_i = []\n\t\t\taaAln = SeqIO.parse(open(inputFile, \"r\"), \"fasta\")\n\t\t\tfor species in aaAln:\n\t\t\t\tsequenceTmp = str(species.seq)\n\t\t\t\taa = sequenceTmp[i]\n\n\t\t\t\t# if a sequence contains a gap the vector [0, 0] will be assigned to it since \"no amino acid\" has no polarity or volume.\n\t\t\t\tif p.get(str(aa), False):\n\t\t\t\t\tpValues.append(p[str(aa)])\n\t\t\t\telse:\n\t\t\t\t\tpValues.append(\"-\")\n\n\t\t\t\tif v.get(str(aa), False):\n\t\t\t\t\tvValues.append(v[str(aa)])\n\t\t\t\telse:\n\t\t\t\t\tvValues.append(\"-\")\n\n\t\t\t\t# gaps are counted so later the bias of them can be removed\n\t\t\t\tif aa not in p.keys():\n\t\t\t\t\tgapValues_pos_i.append(0)\n\t\t\t\telse:\n\t\t\t\t\tgapValues_pos_i.append(1)\n\t\t\tgapValues.append(gapValues_pos_i)\n\n\t\t\t\"\"\"\n\t\t\tIn this step the [p, v] vectors are centerized (so that their mean will be [0, 0]).\n\t\t\tThis is necessary for the POD, since this program uses a Singular Value Decomposition and\n\t\t\tthe singular values are the square roots of the eigenvalues of adj(A)*A and adj(A)*A is \n\t\t\tproportional to Cov(A) only if the columns of A are centerized.\n\t\t\t\"\"\"\n\t\t\t# remove sequences with gap at position i and center them\n\t\t\tuseable = np.array(gapValues[i], dtype=bool)\n\t\t\tpValues_useable = np.array(np.array(pValues)[useable], dtype=float)\n\t\t\tpValues_useable = pValues_useable / 12.3\n\t\t\tvValues_useable = np.array(np.array(vValues)[useable], dtype=float)\n\t\t\tvValues_useable = vValues_useable / 170\n\n\t\t\t#\n\t\t\txMean = np.mean(pValues_useable)\n\t\t\tyMean = np.mean(vValues_useable)\n\n\t\t\tpValues_useable = pValues_useable - xMean\n\t\t\tvValues_useable = vValues_useable - yMean\n\n\t\t\t# Now the p and v values of this column will be written in the respective files.\n\t\t\tpLine = \"\"\n\t\t\tvLine = \"\"\n\t\t\tj = 0\n\n\t\t\t# save all sequences also the ones with gaps\n\t\t\t# but the centered values for the non gap ones\n\t\t\tfor xI in range(len(pValues)):\n\t\t\t\tif pValues[xI] == \"-\":\n\t\t\t\t\tpLine += str(pValues[xI]) + \" \"\n\t\t\t\telse:\n\t\t\t\t\tpLine += str(pValues_useable[j]) + \" \"\n\t\t\t\t\tj += 1\n\n\t\t\tpLine.strip(\" \")\n\t\t\tj = 0\n\t\t\tfor xI in range(len(vValues)):\n\t\t\t\tif vValues[xI] == \"-\":\n\t\t\t\t\tvLine += str(vValues[xI]) + \" \"\n\t\t\t\telse:\n\t\t\t\t\tvLine += str(vValues_useable[j]) + \" \"\n\t\t\t\t\tj += 1\n\t\t\tvLine.strip(\" \")\n\t\t\toutputP.write(pLine + \"\\n\")\n\t\t\toutputV.write(vLine + \"\\n\")\n\treturn np.array(gapValues)\n\n\n\"\"\"\nThis method computes the variance index. It uses a Singular Value Decomposition\nto determine the variance along the primary components of each set of vectors representing\na column of the input MSA.\nThe sum of those two variances is multiplied by the \"diversity weight\" to generate the\nindex. The \"diversity weight\" is the number of different amino acids within each column divided\nby the number of sequences in the alignment. This aims to reduce the index of columns with only \nfew different amino acids.\n\"\"\"\n\n\ndef computeVarianceIndex(pFile, vFile, outputDir):\n\tpData = np.genfromtxt(pFile, delimiter=' ')\n\tvData = np.genfromtxt(vFile, delimiter=' ')\n\n\twith open(os.path.join(outputDir, \"indexRaw.csv\"), \"w\") as output:\n\t\t# This for loop iterates over all columns of the input MSA\n\t\tfor i in range(len(pData)):\n\t\t\t# rowVectors contains the actual column\n\t\t\trowVectors = []\n\t\t\t# pList and vList will contain each amino acid value of the column exactly one (to count the number of different amino acids)\n\t\t\tpData_i = pData[i][np.logical_not(np.isnan(pData[i]))]\n\t\t\tvData_i = vData[i][np.logical_not(np.isnan(vData[i]))]\n\t\t\tpList = np.unique(pData_i)\n\t\t\tvList = np.unique(vData_i)\n\t\t\tfor j in range(len(pData_i)):\n\t\t\t\trowVectors.append([pData_i[j], vData_i[j]])\n\n\t\t\t# compute the \"diversity weight\" as descibed before\n\t\t\tnumberOfDifferentAA = max(len(pList), len(vList))\n\t\t\tdiversityWeight = numberOfDifferentAA / len(pData_i)\n\n\t\t\t# use numpy for the actual SVD\n\t\t\ttmpArray = np.array(rowVectors)\n\t\t\ttmpArray = np.transpose(tmpArray)\n\t\t\t# s contains the singular values\n\t\t\tU, s, Vh = np.linalg.svd(tmpArray)\n\t\t\t# the singular values are the square root of the eigenvalues of adj(A)*A\n\t\t\t# they need to be squared\n\t\t\tfor k in range(len(s)):\n\t\t\t\ts[k] = s[k] * s[k]\n\t\t\tindex = sum(s) * diversityWeight\n\t\t\toutput.write(str(index) + \"\\n\")\n\n\n\"\"\"\nThis method computes the q-values for each MSA column. Please note, that this is a very simple\nsignificance test. It assumes, that the index of MSA columns on non-positive selection sites is normally\ndistributed with the same variance and mean as the index on the whole input MSA (which might also contain\npositive selection sites).\nHowever, this method generates a value within the interval [0, 1], which indicates, if the index of a certain\ncolumn is extraordinarily high.\n\"\"\"\n\n\ndef computeQvalue(indexFile, outputDirectory, gapValues):\n\twith open(os.path.join(outputDirectory, \"qValues.csv\"), \"w\") as output:\n\t\t# Numpy and Scipy is used in this method\n\t\trawData = np.genfromtxt(indexFile)\n\t\tmean = np.mean(rawData)\n\t\tstdDev = np.std(rawData)\n\t\tif stdDev == 0:\n\t\t\tfor i in range(len(rawData)):\n\t\t\t\tqValue = 0\n\t\t\t\tqValue_norm = 0\n\t\t\t\toutput.write(str(qValue) + \"\\t\" + str(qValue_norm) + \"\\n\")\n\t\telse:\n\t\t\tnorm_to_zero = norm.cdf(0, mean, stdDev)\n\t\t\tfor i in range(len(rawData)):\n\t\t\t\t# The q-value is the comulated density function at the index value\n\t\t\t\tqValue = norm.cdf(rawData[i], mean, stdDev)\n\t\t\t\tqValue_norm = qValue - ((1 - qValue) / (1 - norm_to_zero)) * norm_to_zero\n\t\t\t\toutput.write(str(qValue) + \"\\t\" + str(qValue_norm) + \"\\n\")\n\n\n# This method just identifies the colums with a q-value higher or equal to (1 - pValueThreshold)\ndef determinePositiveSelectionSites(qValueFile, outputDirectory):\n\twith open(os.path.join(outputDirectory, \"detectedPositions.csv\"), \"w\") as output:\n\t\tfinalIndex = np.genfromtxt(qValueFile)\n\t\tfinalIndex = np.array(finalIndex)\n\t\tfor i in range(len(finalIndex)):\n\t\t\tif finalIndex[i][1] >= (1 - pValueThreshold):\n\t\t\t\t# the first column will be named \"1\", not \"0\" (and so on)\n\t\t\t\toutput.write(str(i + 1) + \"\\n\")\n\n\n\"\"\"\nThe program needs 2 input arguments:\n\nThe AA-MSA that is to be analyzed\nThe output directory (there will be more than one output file) \n\nYou can also specify an p-Value threshold as a third\nargument. If it is not specified the default p-Value\nthreshold is 0.1 (please note that the significance test used\nin this program is not in any way sophisticated. This is not a \"p-value\"\nin the common sense.)\n\"\"\"\n\ninputAAaln = sys.argv[1]\noutputDir = sys.argv[2]\nif len(sys.argv) == 4:\n\tpValueThreshold = float(sys.argv[3])\nelif len(sys.argv) == 3:\n\tpValueThreshold = 0.1\nelse:\n\tprint(\"UNSUPPORTED!\")\n\n# Now the defined methods are called.\n# The first two steps are essential (they compute the actual index.\n\nif not os.path.isdir(outputDir):\n\tos.makedirs(outputDir)\ngapValues = transformAAtoVectors(inputAAaln, outputDir)\ncomputeVarianceIndex(os.path.join(outputDir, \"pol.csv\"), os.path.join(outputDir, \"mVol.csv\"), outputDir)\n# The following steps can be skipped, if they are not needed. They perform the significance test.\ncomputeQvalue(os.path.join(outputDir, \"indexRaw.csv\"), outputDir, gapValues)\ndeterminePositiveSelectionSites(os.path.join(outputDir, \"qValues.csv\"), outputDir)\n" }, { "alpha_fraction": 0.5654135346412659, "alphanum_fraction": 0.6135338544845581, "avg_line_length": 50.07692337036133, "blob_id": "c1b8ed2e77bb9744bc6bd7500f682cfe00a22216", "content_id": "e814a4cd8a67c214e3f1a6989586865a1ebf254e", "detected_licenses": [ "CC0-1.0" ], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 665, "license_type": "permissive", "max_line_length": 157, "num_lines": 13, "path": "/utils/split_AAseq.py", "repo_name": "rishabhdhenkawat/epitop_pred", "src_encoding": "UTF-8", "text": "# with open(\"/home/le86qiz/Documents/Konrad/prediction_pipeline/raptorx_pipeline/NP_220200.1.faa\",\"r\") as input_file:\nwith open(\"/home/le86qiz/Documents/Konrad/stiko/second_prediction/bacteria/bacteria_gram-/results/single_fastas/sp_P14283_PERT_BORPE.faa\",\"r\") as input_file:\n seq = \"\"\n for line in input_file:\n if line.startswith(\">\"):\n continue\n else:\n if line.endswith(\"\\n\"):\n line = line[:-1]\n seq += line\n with open(\"/home/go96bix/projects/epitop_pred/220200_test.csv\", \"w\") as output_file:\n for i in range(0,len(seq)-50):\n output_file.write(f\"{i+25}\\t{seq[i:i+50]}\\n\")\n\n" }, { "alpha_fraction": 0.6320695877075195, "alphanum_fraction": 0.64263516664505, "avg_line_length": 29.358489990234375, "blob_id": "76428980be23fad538484460d07bea3537009742", "content_id": "08ba745010584982e1ceea8d4486b7c9cc62fbeb", "detected_licenses": [ "CC0-1.0" ], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 1609, "license_type": "permissive", "max_line_length": 89, "num_lines": 53, "path": "/utils/cluster_to_proteins_with_markings.py", "repo_name": "rishabhdhenkawat/epitop_pred", "src_encoding": "UTF-8", "text": "from shutil import copyfile\nimport os\n\ndirectory = \"/home/go96bix/projects/raw_data/clustered_protein_seqs/my_cluster/\"\ndirectory_fasta = \"/home/go96bix/projects/raw_data/bepipred_proteins_with_marking\"\ndirectory_out = \"/home/go96bix/projects/raw_data/bepipred_proteins_with_marking_\"\nunique_prot_dict = {}\n\n\ndef readFasta_extended(file):\n\t## read fasta file\n\theader = \"\"\n\tseq = \"\"\n\tvalues = []\n\twith open(file, \"r\") as infa:\n\t\tfor index, line in enumerate(infa):\n\t\t\tline = line.strip()\n\t\t\tif index == 0:\n\t\t\t\theader = line[1:].split(\"\\t\")\n\t\t\telif index == 1:\n\t\t\t\tseq += line\n\t\t\telif index == 2:\n\t\t\t\tpass\n\t\t\telse:\n\t\t\t\tvalues = line.split(\"\\t\")\n\treturn header, seq, values\n\n\nfor root, dirs, files in os.walk(directory):\n\tfor name in files:\n\t\tif name.endswith(\"clstr\"):\n\t\t\tname_dir = directory_out + name.split(\".fasta\")[0]\n\t\t\tif not os.path.isdir(name_dir):\n\t\t\t\tos.makedirs(name_dir)\n\t\t\twith open(os.path.join(root, name), \"r\") as infile:\n\t\t\t\tallLines = infile.read()\n\t\t\t\tclusters = allLines.split(\">Cluster\")\n\t\t\t\tfor cluster in clusters:\n\t\t\t\t\tif len(cluster) > 0:\n\t\t\t\t\t\tcount_validations = 0\n\t\t\t\t\t\tproteins = cluster.strip().split(\"\\n\")\n\t\t\t\t\t\tbestProt = \"\"\n\t\t\t\t\t\tfor index, protein in enumerate(proteins):\n\t\t\t\t\t\t\tif index > 0:\n\t\t\t\t\t\t\t\tfilename = protein.split(\" \")[1][1:-3] + \".fasta\"\n\t\t\t\t\t\t\t\theader, seq, values = readFasta_extended(os.path.join(directory_fasta, filename))\n\t\t\t\t\t\t\t\tif len(header) > count_validations:\n\t\t\t\t\t\t\t\t\tcount_validations = len(header)\n\t\t\t\t\t\t\t\t\tbestProt = filename\n\t\t\t\t\t\t\telse:\n\t\t\t\t\t\t\t\tcontinue\n\n\t\t\t\t\t\tcopyfile(os.path.join(directory_fasta, bestProt), os.path.join(name_dir, bestProt))\n" }, { "alpha_fraction": 0.649297833442688, "alphanum_fraction": 0.6669864058494568, "avg_line_length": 37.15666198730469, "blob_id": "06d421c9f3779b8ffedbf315fb33e9544cf42fec", "content_id": "cf54c0d3faa9ae93039a41fce77cdce7f8a50cb0", "detected_licenses": [ "CC0-1.0" ], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 26062, "license_type": "permissive", "max_line_length": 279, "num_lines": 683, "path": "/epidope.py", "repo_name": "rishabhdhenkawat/epitop_pred", "src_encoding": "UTF-8", "text": "from theano.scan_module.scan_utils import scan_args\n\nprint('\\nLoading packages.')\n\n# suppresses anaconda FutureWarnings\nimport warnings\nwarnings.simplefilter(action='ignore', category=FutureWarning)\nimport pickle\n# import all needed packages\nimport os\nos.environ['KMP_WARNINGS'] = 'off'\nfrom keras import models\nfrom keras import layers\nfrom keras.regularizers import l2\nimport argparse\nimport time\nfrom sklearn.preprocessing import LabelEncoder\nimport sys\nimport numpy as np\n# deeploc needs theano v1.0.4\n# conda install -c conda-forge theano \nos.environ['THEANO_FLAGS']='device=cpu,floatX=float32,optimizer=fast_compile'\n# from DeepLoc.models import *\n# from DeepLoc.utils import *\nfrom math import pi\nfrom bokeh.models import ColumnDataSource, Plot, LinearAxis, Grid, Range1d, Label, BoxAnnotation\nfrom bokeh.layouts import column\nfrom bokeh.models.glyphs import Text\nfrom bokeh.models import Legend\n#from bokeh.io import show\nfrom bokeh.plotting import figure, output_file, save\nimport tensorflow as tf\nfrom multiprocessing import Pool\nimport glob\nfrom utils import DataGenerator\n\n#### Argument parser\n\nparser = argparse.ArgumentParser(description='Runs the epitope predicion Pipeline', epilog='')\nparser.add_argument('-e', '-epitopes', help='File containing a list of known Epitope sequences for plotting.]',metavar='<File>')\nparser.add_argument('-n', '-nonepitopes', help='File containing a list of non Epitope sequences for plotting.]',metavar='<File>')\nparser.add_argument('-i', '-infile', help='Multi- or Singe- Fasta file with protein sequences.',metavar='<File>')\nparser.add_argument('-o','-outdir', help='Specifies output directory. Default = .',metavar='<Folder>')\nparser.add_argument('-delim', help='Delimiter char for fasta header. Default = White space.',metavar='<String>')\nparser.add_argument('-idpos', help='Position of gene ID in fasta header. Zero based. Default = 0.',metavar='<Integer>')\nparser.add_argument('-t','-threshold', help='Threshold for epitope score. Default 0.75.', metavar='<Float>')\nparser.add_argument('-p', '-processes', help='Number of processes used for predictions. Default 1.', metavar='<Int>')\nargs = parser.parse_args()\n########################\n\n\nclass Protein_seq():\n\tdef __init__(self, sequence, score, over_threshold, positions=None):\n\t\tself.sequence = sequence\n\t\tself.score = score\n\t\tself.over_threshold = over_threshold\n\t\tif positions == None:\n\t\t\tself.positions = list(range(1,len(self.sequence)+1))\n\t\telse:\n\t\t\tself.positions = positions\n\n\ndef build_model_old(nodes, seq_length, dropout=0):\n\tmodel = models.Sequential()\n\tmodel.add(layers.Embedding(21, 10, input_length=seq_length))\n\tmodel.add(layers.Bidirectional(layers.LSTM(nodes, return_sequences=True, dropout=dropout, recurrent_dropout=0.2)))\n\tmodel.add(layers.Bidirectional(layers.LSTM(nodes, dropout=dropout, recurrent_dropout=0.2)))\n\tmodel.add(layers.Dense(nodes))\n\tmodel.add(layers.LeakyReLU(alpha=0.01))\n\tmodel.add(layers.Dense(2, activation='softmax'))\n\n\tmodel.compile(optimizer='adam', loss='binary_crossentropy', metrics=['acc'])\n\tmodel.summary()\n\treturn model\n\ndef build_model(nodes, dropout, seq_length, weight_decay_lstm= 0, weight_decay_dense=0):\n\t\"\"\" model with elmo embeddings for amino acids\"\"\"\n\tinputs = layers.Input(shape=(seq_length, 1024))\n\thidden = layers.Bidirectional(layers.LSTM(nodes, input_shape=(seq_length,1024), return_sequences=True, dropout=dropout, recurrent_dropout=0.2, kernel_regularizer=l2(weight_decay_lstm), recurrent_regularizer=l2(weight_decay_lstm), bias_regularizer=l2(weight_decay_lstm)))(inputs)\n\thidden = layers.Bidirectional(layers.LSTM(nodes, dropout=dropout, recurrent_dropout=0.2, kernel_regularizer=l2(weight_decay_lstm), recurrent_regularizer=l2(weight_decay_lstm), bias_regularizer=l2(weight_decay_lstm)))(hidden)\n\thidden = layers.Dense(nodes, kernel_regularizer=l2(weight_decay_dense), bias_regularizer=l2(weight_decay_dense))(hidden)\n\thidden = layers.LeakyReLU(alpha=0.01)(hidden)\n\n\tout = layers.Dense(2, activation='softmax', kernel_regularizer=l2(weight_decay_dense), bias_regularizer=l2(weight_decay_dense))(hidden)\n\tmodel= models.Model(inputs=inputs,outputs=out)\n\n\tmodel.compile(optimizer='adam', loss='binary_crossentropy', metrics=['acc'])\n\tmodel.summary()\n\treturn model\n\ndef parse_amino(x):\n\t\"\"\"\n\tTakes amino acid sequence and parses it to a numerical sequence.\n\t\"\"\"\n\tamino = \"GALMFWKQESPVICYHRNDTU\"\n\tencoder = LabelEncoder()\n\tencoder.fit(list(amino))\n\tout = []\n\tfor i in x:\n\t\tdnaSeq = i[1].upper()\n\t\tencoded_X = encoder.transform(list(dnaSeq))\n\t\tout.append(encoded_X)\n\treturn np.array(out)\n\n\ndef split_AA_seq(seq, slicesize, shift):\n\t\"\"\"\n\tTakes input sequence and slicesize: Returns slices of that sequence with a slice length of 'slicesize' with a sliding window of 1.\n\t\"\"\"\n\tsplited_AA_seqs = []\n\tfor i in range(0, len(seq) - slicesize):\n\t\tsplited_AA_seqs.append([i + (slicesize // 2) - shift, seq[i:i + slicesize]])\n\treturn np.array(splited_AA_seqs)\n\ndef split_embedding_seq(embeddings, slicesize, shift):\n\tassert len(embeddings) == 1, \"splitting of embeddings not intended for multiple proteins (state of affairs 12.06.19)\"\n\tsplited_em_seqs = []\n\tfor protein in embeddings:\n\t\tsplited_em_seq = []\n\t\tfor i in range(0, len(protein) - slicesize):\n\t\t\tsplited_em_seq.append([i + (slicesize // 2) - shift, protein[i:i + slicesize]])\n\t\tsplited_em_seqs.append(splited_em_seq)\n\treturn np.array(splited_em_seqs[0])\n\n# filters tensor flow output (the higher the number the more ist filtered)\nos.environ['TF_CPP_MIN_LOG_LEVEL'] = '1' # {0, 1, 2 (warnings), 3 (errors)}\n\nstarttime = time.time()\n\nif not args.i:\n\tprint('Error: No input file given.')\n\texit()\n\nmultifasta = args.i\nidpos = 0\noutdir = ''\n\nif args.idpos:\n\tidpos = int(args.idpos)\n\nif args.o:\n\toutdir = args.o\n\tif not os.path.isabs(outdir):\n\t\toutdir = f'{os.getcwd()}/{args.o}'\n\tif not os.path.exists(outdir):\n\t\tos.makedirs(outdir)\nelse:\n\toutdir = os.getcwd()\n\nif not os.path.exists(outdir + '/results'):\n\tos.makedirs(outdir + '/results')\noutdir = outdir + '/results'\nprint(f'\\nOut dir set to : {outdir}')\n\nprocesses = 1\nif args.p:\n\tprocesses = int(args.p)\n\tprint(f'Processes set to: {processes}.')\n\nepitope_threshold = 0.75\n\nif args.t:\n\tepitope_threshold = float(args.t)\nprint(f'The epitope threshold is set to: {epitope_threshold}\\n')\n# slicesize is the amount of AA which are used as input to predict liklelyhood of epitope\nslicesize = 49\n\n##### reading input fasta file #####\n\nfasta = {}\nfastaheader = {}\nprint('Reading input fasta.')\nwith open(multifasta,'r') as infile:\n\tacNumber = ''\n\tfor line in infile:\n\t\tif line.startswith('>'):\n\t\t\tif args.delim:\n\t\t\t\tacNumber = line.split(args.delim)[idpos].strip().strip('>')\n\t\t\t\tfastaheader[acNumber] = line.strip()\n\t\t\telse:\n\t\t\t\tacNumber = line.split()[idpos].strip().strip('>')\n\t\t\t\tfastaheader[acNumber] = line.strip()\n\t\telse:\n\t\t\tif acNumber in fasta:\n\t\t\t\tfasta[acNumber] += line.strip()\n\t\t\telse:\n\t\t\t\tfasta[acNumber] = line.strip()\n\n\n##### reading provided epitope lists #######\nepitopes = list()\nif args.e:\n\tprint('Reading provided epitope sequences.')\n\twith open(args.e, 'r') as infile:\n\t\tfor line in infile:\n\t\t\tepitopes.append(line.strip())\n\tprint('There were ' + str(len(epitopes)) + ' epitope sequences provided.')\n\nnonepitopes = list()\nif args.n:\n\tprint('Reading provided non-epitope sequences.')\n\twith open(args.n, 'r') as infile:\n\t\tfor line in infile:\n\t\t\tnonepitopes.append(line.strip())\n\tprint('There were ' + str(len(nonepitopes)) + ' non-epitope sequences provided.')\nprint()\n\ndef ensemble_prediction(model, path, inputs_test, suffix, middle_name = \"\", prediction_weights = False, nb_classes = 2):\n\tmodels_filenames = []\n\tfor file in sorted(os.listdir(path)):\n\t\tif file.endswith(f\"_{suffix}.hdf5\") and file.startswith(f\"weights_model_{middle_name}k-fold_run_\"):\n\t\t\t# print(file)\n\t\t\tmodels_filenames.append(os.path.join(path, file))\n\n\tpreds = []\n\tfor fn in models_filenames:\n\t\tmodel.load_weights(fn, by_name=True)\n\t\tpred = model.predict(inputs_test)\n\t\tpreds.append(pred)\n\n\tif not prediction_weights:\n\t\tprediction_weights = [1. / len(models_filenames)] * len(models_filenames)\n\tweighted_predictions = np.zeros((inputs_test.shape[0], nb_classes), dtype='float32')\n\tfor weight, prediction in zip(prediction_weights, preds):\n\t\tweighted_predictions += weight * np.array(prediction)\n\n\treturn weighted_predictions\n\nmodel_path_local = \"/home/go96bix/projects/epitop_pred/data_generator_bepipred_final/local_embedding/weights.best.auc10.250_nodes_with_decay_local.hdf5\"\t# 250 nodes\nmodel_path_global = \"/home/go96bix/projects/epitop_pred/data_generator_bepipred_final/global_embedding/weights.best.auc10.250_nodes_with_decay.hdf5\"\t# 250 nodes\nshift = 22\nlocal_embedding = False\nuse_circular_filling = False\n\nprint('Deep Neural Network model summary:')\nnodes = 250\nelmo_embedder = DataGenerator.Elmo_embedder()\nmodel_local = build_model(nodes, dropout=0, seq_length=slicesize)\nmodel_global = build_model(nodes, dropout=0, seq_length=slicesize)\nmodel_local.load_weights(model_path_local)\nmodel_global.load_weights(model_path_global)\n\nholydict = {}\n\n\n##############################################\n######### EpiDope score prediction #########\n##############################################\n\n##### progress vars ####\nfilecounter = 1\nprintlen = 1\ntotal = str(len(fasta))\n########################\n\nprint('\\nPredicting EpiDope scores.')\n# go over all entries in dict\nfor geneid in fasta:\n\tscore_both_models = []\n\tfor embedding_version in [\"local\",\"global\"]:\n\t\tif embedding_version == \"local\":\n\t\t\tlocal_embedding = True\n\t\telse:\n\t\t\tlocal_embedding = False\n\t\t############### progress ###############\n\t\telapsed_time = time.strftime(\"%H:%M:%S\", time.gmtime(time.time()-starttime))\n\t\tprintstring = f'Predicting scores for: {geneid} File: {filecounter} / {total} Elapsed time: {elapsed_time}'\n\t\tif len(printstring) < printlen:\n\t\t\tprint(' '*printlen, end='\\r')\n\t\tprint(printstring, end='\\r')\n\t\tprintlen = len(printstring)\n\t\tfilecounter += 1\n\t\t#######################################\n\t\t\"\"\" OLD version\n\t\t# slice the long AA in short segments so it can be used as input for the neural network\n\t\tseq_slices = split_AA_seq(fasta[geneid], slicesize)\n\t\t# parse input to numerical values\n\t\n\t\tX_test = parse_amino(seq_slices)\n\t\t# finally predict the epitopes\n\t\tY_pred_test = model.predict(X_test)\t\n\t\t\"\"\"\n\t\t# embedding per slice version\n\n\t\tif local_embedding:\n\t\t\tseq = fasta[geneid].lower()\n\n\t\t\tif use_circular_filling:\n\t\t\t\tseq_extended = list(seq[-shift:] + seq + seq[0:shift])\n\t\t\telse:\n\t\t\t\tseq_extended = np.array([\"-\"] * (len(seq) + (shift * 2)))\n\t\t\t\tseq_extended[shift:-shift] = np.array(list(seq))\n\n\t\t\tseq_slices = split_AA_seq(seq_extended, slicesize,shift)\n\t\t\tpositions = seq_slices[:,0]\n\t\t\tseq_slices_input = np.array([list(i) for i in seq_slices[:,1]])\n\n\t\t\t# start = time.time()\n\t\t\tX_test = elmo_embedder.elmo_embedding(seq_slices_input, 0, slicesize)\n\t\t\t# stop = time.time()\n\t\t\t# print(stop-start)\n\n\t\t#embedding whole protein version\n\t\telse:\n\t\t\t# seq_slices = split_AA_seq(fasta[geneid], slicesize)\n\t\t\tseq = np.array([list(fasta[geneid].lower())])\n\t\t\tname = geneid.split(\"_\")[2:]\n\t\t\tname = \"_\".join(name)\n\t\t\tX_test = pickle.load(open(f\"/home/go96bix/projects/raw_data/embeddings_bepipred_samples/{name}.pkl\",\"rb\"))[1]\n\t\t\t# X_test = elmo_embedder.elmo_embedding(seq, 0, len(seq[0]))\n\t\t\tseq_extended = np.zeros((1,seq.shape[1] + (shift * 2), 1024), dtype=np.float32)\n\t\t\tif use_circular_filling:\n\t\t\t\tseq_extended[0,0:shift] = X_test[-shift:]\n\t\t\t\tseq_extended[0,-shift:] = X_test[0:shift]\n\t\t\tseq_extended[0,shift:-shift] = X_test\n\t\t\tseq_slices = split_embedding_seq(seq_extended,slicesize,shift)\n\t\t\tpositions = seq_slices[:, 0]\n\t\t\tX_test = np.stack(seq_slices[:,1])\n\n\t\t# # finally predict the epitopes\n\t\t# path_weights = \"/home/go96bix/projects/epitop_pred/epitope_data\"\n\t\t# suffix_weights = \"big_embeddings\"\n\t\t# Y_pred_test = ensemble_prediction(model,path=path_weights,inputs_test=X_test,suffix=suffix_weights)\n\t\tif local_embedding:\n\t\t\tY_pred_test = model_local.predict(X_test)\n\t\telse:\n\t\t\tY_pred_test = model_global.predict(X_test)\n\t\t# the column 0 in Y_pred_test is the likelihood that the slice is NOT a Epitope, for us mostly interesting\n\t\t# is col 1 which contain the likelihood of being a epitope\n\t\tepi_score = Y_pred_test[:, 1]\n\n\t\t# use leading and ending zeros so that the score array has the same length as the input sequence\n\t\tscore = np.zeros(len(fasta[geneid]))\n\t\t# leading AAs which are not predictable get value of first predicted value (were this AA where involved)\n\t\tscore[0:int(positions[0])] = epi_score[0]\n\t\t# last AAs which are not predictable get value of last predicted value (were this AA where involved)\n\t\tscore[int(positions[-1]):]=epi_score[-1]\n\t\tscore[np.array(positions,dtype=int)] = epi_score\n\t\t#print(score)\n\t\tscore_both_models.append(score)\n\n\tscore_both_models = np.array(score_both_models)\n\tscore = score_both_models.mean(axis=0)\n\tscore_bool = score > epitope_threshold\n\n\tprotein = Protein_seq(sequence=fasta[geneid], score=score, over_threshold=score_bool)\n\tholydict.update({geneid:protein})\n\n# function for frame average extends frame by frame_extend to left AND right, thus frame_extend 3 results in a total window of 7\ndef frame_avg(values, frame_extend = 2):\n\taverages = []\n\tprotlen = len(values)\n\tfor pos in range(protlen):\n\t\tframelist = []\n\t\tfor shift in range(-frame_extend,frame_extend+1,1):\n\t\t\tif not (pos+shift) < 0 and not (pos+shift) > (protlen -1):\n\t\t\t\tframelist.append(float(values[pos+shift]))\n\t\taverages.append(sum(framelist)/len(framelist))\n\treturn averages\n\n# calculate parker hydrophilicity scores\ndef parker_avg(seq):\n\tparker_scores = {'A':2.1,'C':1.4,'D':10.0,'E':7.8,'F':-9.2,'G':5.7,'H':2.1,'I':-8.0,'K':5.7,'L':-9.2,'M':-4.2,'N':7.0,'P':2.1,'Q':6.0,'R':4.2,'S':6.5,'T':5.2,'V':-3.7,'W':-10.0,'Y':-1.9}\n\tscore = 0\n\tfor AA in seq:\n\t\tscore += parker_scores.get(AA, 0)\n\tscore = (score / len(seq))/20 + 0.5\t\t# deviding by 20 and adding 0.5 to normalize between 0 and 1\n\treturn score\n\n\n'''\n########### calculate amino acid k-mer scores ############\n# all lines added therefore are marked with\t\t# aa k-mer score\naa_scores = {}\nwith open('/home/le86qiz/Documents/Konrad/general_epitope_analyses/aminoacid_epitope_scores.csv') as infile:\n\tfor line in infile:\n\t\taa = line.split('\\t')[0]\n\t\tscore = float(line.strip().split('\\t')[1])\n\t\taa_scores[aa] = score\n'''\n\n# aa_scoredict = {}\n# non_polar_min = {} # GVCLIMWF\n# hydrophilic_max = {} # STDNQRH\n# verzweigt_min = {} # VLIE\n# strong_pos = {} # PDR\n# strong_neg = {} # VLIF\n# max_min_diff_score = {}\nhyrophilicity_parker = {}\nfor geneid in holydict:\n\tseq = holydict[geneid].sequence\n\t# scores = []\n\t# for aa in seq:\n\t# \tscores.append(aa_scores.get(aa,0))\n\t# aa_scoredict[geneid] = scores\n\t# frame_extend = 10\n\t# normalizer = frame_extend * 2 + 1\n\tprotlen = len(seq)\n\t# non_polar_min_scores = []\n\t# hydrophilic_max_scores = []\n\t# verzweigt_min_scores = []\n\t# strong_pos_scores = []\n\t# strong_neg_scores = []\n\thydrophilicity_parker_scores = []\n\tfor pos in range(protlen):\n\t\t# framelist = []\n\t\t# non_polar_min_score = hydrophilic_max_score = verzweigt_min_score = strong_pos_score = strong_neg_score = 0\n\t\t# for shift in range(-frame_extend,frame_extend+1,1):\n\t\t# \tif not (pos+shift) < 0 and not (pos+shift) > (protlen - 1):\n\t\t# \t\tframelist.append(seq[pos+shift])\n\t\t# for aa in framelist:\n\t\t# \tif aa in 'GVCLIMWF':\n\t\t# \t\tnon_polar_min_score += 1\n\t\t# \tif aa in 'STDNQRH':\n\t\t# \t\thydrophilic_max_score += 1\n\t\t# \tif aa in 'VLIE':\n\t\t# \t\tverzweigt_min_score += 1\n\t\t# \tif aa in 'PDR':\n\t\t# \t\tstrong_pos_score += 1\n\t\t# \tif aa in 'VLIF':\n\t\t# \t\tstrong_neg_score += 1\n\t\t# non_polar_min_score = non_polar_min_score / normalizer\n\t\t# hydrophilic_max_score = hydrophilic_max_score / normalizer\n\t\t# verzweigt_min_score = verzweigt_min_score / normalizer\n\t\t# strong_pos_score = strong_pos_score / normalizer\n\t\t# strong_neg_score = strong_neg_score / normalizer\n\t\t# non_polar_min_scores.append(non_polar_min_score)\n\t\t# hydrophilic_max_scores.append(hydrophilic_max_score)\n\t\t# verzweigt_min_scores.append(verzweigt_min_score)\n\t\t# strong_pos_scores.append(strong_pos_score)\n\t\t# strong_neg_scores.append(strong_neg_score)\n\t\t# hydrophilicity by parker\n\t\tframelist_parker = []\n\t\tfor shift in range(-3,3+1,1):\n\t\t\tif not (pos+shift) < 0 and not (pos+shift) > (protlen - 1):\n\t\t\t\tframelist_parker.append(seq[pos+shift])\n\t\thydrophilicity_parker_scores.append(parker_avg(framelist_parker))\n\thyrophilicity_parker[geneid] = hydrophilicity_parker_scores\n\t# non_polar_min[geneid] = non_polar_min_scores\n\t# hydrophilic_max[geneid] = hydrophilic_max_scores\n\t# verzweigt_min[geneid] = verzweigt_min_scores\n\t# strong_pos[geneid] = strong_pos_scores\n\t# strong_neg[geneid] = strong_neg_scores\n\t# max_min_diff_score[geneid] = (np.array(hydrophilic_max_scores) - np.array(non_polar_min_scores)) + 0.5\n\n\n########### calculate amino acid k-mer scores end ############\n\n\n###### calculate position weighted frame average scores ######\n\ndef pwa(scores = [], frame_extend = 5):\n\tseqlen = len(scores)\n\t# calculate positon weight matrix\n\tweight = frame_extend + 1\n\tweights = []\n\tfor i in range(weight):\n\t\tweights.append(i)\n\tweights = weights[frame_extend:0:-1] + weights\n\tweights = [(weight -x)/(weight) for x in weights]\n\tpwm = []\n\tpwm_adapted =[]\n\tfor i in range(seqlen):\n\t\tout = []\n\t\tfor j in range(len(weights)):\n\t\t\tif i + j - frame_extend >= 0 and j - frame_extend + i < seqlen:\n\t\t\t\tout.append(weights[j])\n\t\tif len(out) < seqlen:\n\t\t\tif i + frame_extend< len(out):\n\t\t\t\tout = out + [0] * (seqlen - len(out))\n\t\t\telif frame_extend + i + 1 >= seqlen:\n\t\t\t\tout = [0] * (seqlen - len(out)) + out\n\t\t\telse:\n\t\t\t\tout = [0] * (i - len(out) + frame_extend + 1) + out + [0] * (seqlen - i - frame_extend -1)\n\t\tpwm.append(out)\n\tpwm_adapted = pwm.copy()\n\t# multiply scores to pwm\n\tfor i in range(len(scores)):\n\t\tpwm_adapted[i] = [scores[i] * pwm_score for pwm_score in pwm[i]]\n\t# sum up and normalize per position\n\tpwm_scores = np.array(pwm_adapted).sum(axis=0) / np.array(pwm).sum(axis=0)\n\treturn(pwm_scores)\n'''\npwm_scoredict = {}\nfor geneid in holydict:\n\tscores = holydict[geneid].score\n\t\t# sum up and normalize per position\n\tpwm_scoredict[geneid] = pwa(scores, frame_extend = 24)\n'''\n\n\n\n##############################################\n############### Output results ###############\n##############################################\n\nif not os.path.exists(outdir + '/EpiDope'):\n\tos.makedirs(outdir + '/EpiDope')\n\n\n\n######## epitope table #########\npredicted_epitopes = {}\nepitope_slicelen = 15\nslice_shiftsize = 5\n\nprint(f'\\n\\nWriting predicted epitopes to:\\n{outdir}/predicted_epitopes.csv\\n{outdir}/predicted_epitopes_sliced.faa')\nopen(f'{outdir}/predicted_epitopes.csv', 'w').close()\nopen(f'{outdir}/predicted_epitopes_sliced.faa', 'w').close()\nopen(f'{outdir}/EpiDope_scores.csv', 'w').close()\nopen(f'{outdir}/hydrophilicity_parker_scores.csv', 'w').close()\n# open(f'{outdir}/max_min_diff_scores.csv', 'w').close()\nwith open(f'{outdir}/predicted_epitopes.csv','w') as outfile:\n\twith open(f'{outdir}/predicted_epitopes_sliced.faa','w') as outfile2:\n\t\twith open(f'{outdir}/EpiDope_scores.csv', 'w') as outfile3:\n\t\t\twith open(f'{outdir}/hydrophilicity_parker_scores.csv', 'w') as outfile4:\n\t\t\t\twith open(f'{outdir}/max_min_diff_scores.csv', 'w') as outfile5:\n\t\t\t\t\toutfile.write('#Gene_ID\\tstart\\tend\\tsequence\\tscore')\n\t\t\t\t\tfor geneid in holydict:\n\t\t\t\t\t\t#scores = pwa(holydict[geneid].score, 24)\n\t\t\t\t\t\tscores = holydict[geneid].score\n\t\t\t\t\t\tscores_hydrophilicity_parker = frame_avg(hyrophilicity_parker[geneid],frame_extend = 10)\n\t\t\t\t\t\t# scores_max_min_diff = frame_avg(max_min_diff_score[geneid], frame_extend = 10)\n\t\t\t\t\t\tseq = holydict[geneid].sequence\n\t\t\t\t\t\tpredicted_epis = set()\n\t\t\t\t\t\tpredicted_epitopes[geneid] = []\n\t\t\t\t\t\tnewepi = True\n\t\t\t\t\t\tstart = 0\n\t\t\t\t\t\tend = 0\n\t\t\t\t\t\ti = 0\n\t\t\t\t\t\tout = f'{outdir}/EpiDope/{geneid}.csv'\n\t\t\t\t\t\twith open(out,'w') as outfile6:\n\t\t\t\t\t\t\t# write complete scores to file\n\t\t\t\t\t\t\toutfile6.write('#Aminoacid\\tEpiDope\\n')\n\t\t\t\t\t\t\toutfile3.write(f'>{geneid}\\n')\n\t\t\t\t\t\t\toutfile4.write(f'>{geneid}\\n')\n\t\t\t\t\t\t\t# outfile5.write(f'>{geneid}\\n')\n\t\t\t\t\t\t\tfor x in range(len(seq)):\n\t\t\t\t\t\t\t\toutfile3.write(f'{seq[x]}\\t{scores[x]}\\n')\n\t\t\t\t\t\t\t\toutfile4.write(f'{seq[x]}\\t{scores_hydrophilicity_parker[x]}\\n')\n\t\t\t\t\t\t\t\t# outfile5.write(f'{seq[x]}\\t{scores_max_min_diff[x]}\\n')\n\t\t\t\t\t\t\t\toutfile6.write(f'{seq[x]}\\t{scores[x]}\\n')\n\t\t\t\t\t\t\tfor score in scores:\n\t\t\t\t\t\t\t\tif score >= epitope_threshold:\n\t\t\t\t\t\t\t\t\tif newepi:\n\t\t\t\t\t\t\t\t\t\tstart = i\n\t\t\t\t\t\t\t\t\t\tnewepi = False\n\t\t\t\t\t\t\t\t\telse:\n\t\t\t\t\t\t\t\t\t\tend = i\n\t\t\t\t\t\t\t\telse:\n\t\t\t\t\t\t\t\t\tnewepi = True\n\t\t\t\t\t\t\t\t\tif end - start >= 8:\n\t\t\t\t\t\t\t\t\t\t#predicted_epis.add((start + 1, end + 1, seq[start:end+1], np.median(scores[start:end+1]) * deeploc_score))\n\t\t\t\t\t\t\t\t\t\tpredicted_epis.add((start + 1, end + 1, seq[start:end+1], np.median(scores[start:end+1])))\n\t\t\t\t\t\t\t\ti += 1\n\t\t\t\t\t\t\tif end - start >= 8:\n\t\t\t\t\t\t\t\t#predicted_epis.add((start + 1, end + 1, seq[start:end+1], np.median(scores[start:end+1]) * deeploc_score))\n\t\t\t\t\t\t\t\tpredicted_epis.add((start + 1, end + 1, seq[start:end+1], np.median(scores[start:end+1])))\n\t\t\t\t\t\t\tpredicted_epis = sorted(predicted_epis)\n\t\t\t\t\t\t\tepiout = ''\n\t\t\t\t\t\t\tfor epi in predicted_epis:\n\t\t\t\t\t\t\t\tepiout = f'{epiout}\\n{geneid}\\t{epi[0]}\\t{epi[1]}\\t{epi[2]}\\t{epi[3]}'\n\t\t\t\t\t\t\t\tpredicted_epitopes[geneid].append(epi[2])\n\t\t\t\t\t\t\t\t# print slices to blast table\n\t\t\t\t### sliced epitope regions\n\t\t\t\t\t\t\t\tif len(epi[2]) > epitope_slicelen:\n\t\t\t\t\t\t\t\t\tfor i in range(0,len(epi[2]) - (epitope_slicelen -1),slice_shiftsize):\n\t\t\t\t\t\t\t\t\t\toutfile2.write(f'>{geneid}|pos_{i+epi[0]}:{i+epi[0]+epitope_slicelen}\\n{epi[2][i:i+epitope_slicelen]}\\n')\n\t\t\t\t### complete epitope regions\n\t\t\t\t#\t\t\t\toutfile2.write(f'>{geneid}|pos_{epi[0]}:{epi[1]}|score_{epi[3]}\\n{epi[2]}\\n')\n\t\t\t\t### complete sequence\n\t\t\t\t#\t\t\t\toutfile2.write(f'>{geneid}|pos_{epi[0]}:{epi[1]}|score_{epi[3]}\\n{seq}\\n')\n\t\t\t\t\t\t\toutfile.write(f'{epiout}')\n\n\n\n######## Plots #########\nprint('\\nPlotting.')\n\n##### progress vars ####\nfilecounter = 1\nprintlen = 1\ntotal = str(len(fasta))\n########################\n\nfor geneid in holydict:\n\n\t############### progress ###############\n\telapsed_time = time.strftime(\"%H:%M:%S\", time.gmtime(time.time()-starttime))\n\tprintstring = f'Plotting: {geneid} File: {filecounter} / {total} Elapsed time: {elapsed_time}'\n\tif len(printstring) < printlen:\n\t\tprint(' '*printlen, end='\\r')\n\tprint(printstring, end='\\r')\n\tprintlen = len(printstring)\n\tfilecounter += 1\n\t#######################################\n\n\n\t# make output dir and create output filename\n\tif not os.path.exists(outdir + '/plots'):\n\t\tos.makedirs(outdir + '/plots')\n\tout = f'{outdir}/plots/{geneid}.html'\n\toutput_file(out)\n\n\tseq = holydict[geneid].sequence\n\tpos = holydict[geneid].positions\n\tscore = holydict[geneid].score\n\tflag = holydict[geneid].over_threshold\n\tpwa_score = pwa(score, frame_extend = 24)\n\tprotlen = len(seq)\n\thyrophilicity_parker_score = frame_avg(hyrophilicity_parker[geneid], frame_extend = 10)\n\n\n\t# create a new plot with a title and axis labels\n\tp = figure(title=fastaheader[geneid][1:], y_range = (-0.03,1.03), y_axis_label='Scores',plot_width=1200,plot_height=460,tools='xpan,xwheel_zoom,reset', toolbar_location='above')\n\tp.min_border_left = 80\n\n\t# add a line renderer with legend and line thickness\n\tl1 = p.line(range(1,protlen+1), score, line_width=1,color='black', visible = True)\n\tl2 = p.line(range(1,protlen+1), ([epitope_threshold] * protlen), line_width=1,color='red', visible = True)\n#\tl10 = p.line(range(1,protlen+1), pwa_score, line_width=1,color='darkgreen', visible = False)\n#\tl12 = p.line(range(1,protlen+1), hyrophilicity_parker_score, line_width=1,color='black', visible = False)\n\n\t#legend = Legend(items=[('EpiDope',[l1]), ('epitope_threshold',[l2]) ] )\n\tlegend = Legend(items=[('EpiDope',[l1]),\n\t('epitope_threshold',[l2]) ])\n#\t('pwa_score',[l10]),\n#\t('hydrophilicity by parker', [l12]) ] )\t# aa k-mer score and pwm\n\n\tp.add_layout(legend,'right')\n#\tp.legend.orientation = 'vertical'\n#\tp.legend.location = 'right'\n\tp.xaxis.visible = False\n\tp.legend.click_policy=\"hide\"\n\n\tp.x_range.bounds = (-50, protlen+51)\n\n\t### plot for sequence\n\t# symbol based plot stuff\n\n\tplot = Plot(title=None, x_range=p.x_range, y_range=Range1d(0,9), plot_width=1200, plot_height=50, min_border=0, toolbar_location=None)\n\n\ty = [1]*protlen\n\tsource = ColumnDataSource(dict(x=list(pos), y=y, text=list(seq)))\n\tglyph = Text(x=\"x\", y=\"y\", text=\"text\", text_color='black', text_font_size='8pt')\n\tplot.add_glyph(source, glyph)\n\tlabel = Label(x=-80,y=y[1],x_units='screen',y_units='data',text = 'Sequence', render_mode='css', background_fill_color='white',background_fill_alpha=1.0)\n\tplot.add_layout(label)\n\n\txaxis = LinearAxis()\n\tplot.add_layout(xaxis, 'below')\n\tplot.add_layout(Grid(dimension=0, ticker=xaxis.ticker))\n\n\n\t# add predicted epitope boxes\n\tif predicted_epitopes[geneid]:\n\t\tfor epi in predicted_epitopes[geneid]:\n\t\t\tif seq.find(epi) > -1:\n\t\t\t\tstart = seq.find(epi) + 1\n\t\t\t\tend = start + len(epi) + 1\n\t\t\t\tnon_epitope = [-0.02] * (start - 1) + [1.02] * len(epi) + [-0.02] * ((protlen - (start-1) - len(epi)))\n\t\t\t\tp.vbar(x = list(pos), bottom = -0.02, top = non_epitope, width = 1, alpha = 0.2, line_alpha = 0, color = 'darkgreen', legend = 'predicted_epitopes', visible = True)\n\n\t# add known epitope boxes\n\tif epitopes:\n\t\tfor epi in epitopes:\n\t\t\tif seq.find(epi) > -1:\n\t\t\t\tstart = seq.find(epi) + 1\n\t\t\t\tend = start + len(epi) + 1\n\t\t\t\tepitope = [-0.02] * (start - 1) + [1.02] * len(epi) + [-0.02] * ((protlen - (start-1) - len(epi)))\n\t\t\t\tp.vbar(x = list(pos), bottom = -0.02, top = epitope, width = 1, alpha = 0.2, line_alpha = 0, color = 'blue', legend = 'provided_epitope', visible = False)\n#\t\t\t\toutput_file(f'{outdir}/plots/{geneid}_epi.html') # adds _epi suffix to outfile if a supplied epitope was provided\n\n\t# add non-epitope boxes\n\tif nonepitopes:\n\t\tfor epi in nonepitopes:\n\t\t\tif seq.find(epi) > -1:\n\t\t\t\tstart = seq.find(epi) + 1\n\t\t\t\tend = start + len(epi) + 1\n\t\t\t\tnon_epitope = [-0.02] * (start - 1) + [1.02] * len(epi) + [-0.02] * ((protlen - (start-1) - len(epi)))\n\t\t\t\tp.vbar(x = list(pos), bottom = -0.02, top = non_epitope, width = 1, alpha = 0.2, line_alpha = 0, color = 'darkred', legend = 'provided_non_epitope', visible = False)\n\n\tsave(column(p,plot))\n\n" }, { "alpha_fraction": 0.6723973155021667, "alphanum_fraction": 0.68767911195755, "avg_line_length": 32.774192810058594, "blob_id": "a7e7ebb87da436490ec14e0a4f5cb6aa81e9a64a", "content_id": "bfb6cdb2e7075aeb4dc4d4fbc6ea5f2cbe79a3b8", "detected_licenses": [ "CC0-1.0" ], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 1047, "license_type": "permissive", "max_line_length": 134, "num_lines": 31, "path": "/utils/get_fastas_test_set.py", "repo_name": "rishabhdhenkawat/epitop_pred", "src_encoding": "UTF-8", "text": "import os\nimport glob\nimport re\n\ntable = '/home/go96bix/projects/epitop_pred/data_generator_bepipred_final/local_embedding/X_test.csv'\n\nids = []\nwith open(table) as infile:\n\tfor line in infile:\n\t\tids.append(line.strip().split('\\t')[3])\n\n\nfor x in ids:\n\tos.system(f'cp /home/go96bix/projects/raw_data/bepipred_sequences/{x}.fasta /home/go96bix/projects/raw_data/bepipred_sequences_test')\n\nfastas = glob.glob('/home/go96bix/projects/raw_data/bepipred_sequences_test/*.fasta')\n\nremove_lower = lambda text: re.sub('[a-z]+', '\\n', text)\n\nepitopelist = '/home/go96bix/projects/raw_data/epitopes.csv'\nmultifasta = '/home/go96bix/projects/raw_data/bepipred_sequences_test.fasta'\nwith open(multifasta, 'w') as outfile:\n\twith open(epitopelist, 'w') as epiout:\n\t\tfor fasta in fastas:\n\t\t\twith open(fasta) as infile:\n\t\t\t\tfor line in infile:\n\t\t\t\t\tif line.startswith('>'):\n\t\t\t\t\t\toutfile.write('>' + fasta.rsplit('/',1)[1][:-6] + '\\n')\n\t\t\t\t\telif line.strip():\n\t\t\t\t\t\toutfile.write(line.strip().upper() + '\\n')\n\t\t\t\t\t\tepiout.write(remove_lower(line.strip()) + '\\n')\n" }, { "alpha_fraction": 0.6187454462051392, "alphanum_fraction": 0.6287527680397034, "avg_line_length": 26.132450103759766, "blob_id": "523a44819756624a283ce3a49894c4afee8c0c2c", "content_id": "df274f8ae35d82fb9553abe2421868e9afb03c5e", "detected_licenses": [ "CC0-1.0" ], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 4097, "license_type": "permissive", "max_line_length": 121, "num_lines": 151, "path": "/utils/download_proteins_from_epitopeNumber.py", "repo_name": "rishabhdhenkawat/epitop_pred", "src_encoding": "UTF-8", "text": "import numpy as np\nimport pandas as pd\nimport urllib.request\n\n\"\"\"\npreviously \n1. download home/go96bix/projects/raw_data/bcell_full_v3.csv\n2. Download \"all\" entries of emble with download_from_iedb.py\nnow \n\"\"\"\n\n\ndf = pd.read_csv(\"/home/go96bix/projects/raw_data/bcell_full_v3.csv\", \",\", skiprows=1)\ndf = df.drop_duplicates(\"Epitope IRI\")\ndf = df.rename(columns={\"Epitope IRI\": \"epitope\"})\n\n\ndef check_ncbi(hit, old_url, old_page_source):\n\t# print(\"try ncbi\")\n\turl = hit[\"Antigen IRI\"].values[0]\n\tprotein_name = \"\"\n\n\tif url == old_url:\n\t\tprotein_name = url.split(\"/\")[-1]\n\t\treturn old_page_source, url, protein_name\n\n\tif type(url) != str:\n\t\tprint(\"did not work\")\n\t\tpage_source = np.nan\n\n\telif \"ncbi\" in url:\n\t\tprotein_name = url.split(\"/\")[-1]\n\t\ttry:\n\t\t\tresponse = urllib.request.urlopen(\n\t\t\t\tf\"https://eutils.ncbi.nlm.nih.gov/entrez/eutils/efetch.fcgi?db=protein&id={protein_name}&rettype=fasta&retmode=text\",\n\t\t\t\ttimeout=1)\n\t\t\tdata = response.read()\n\t\t\tassert len(data) > 0, \"no answer\"\n\t\t\tpage_source = data.decode(\"utf-8\")\n\n\t\texcept:\n\t\t\tprint(\"did not work\")\n\t\t\tpage_source = np.nan\n\telse:\n\t\tprint(\"did not work\")\n\t\tpage_source = np.nan\n\n\treturn page_source, url, protein_name\n\ndef check_uniprot(hit, old_url, old_page_source):\n\tprint(\"try uniprot\")\n\turl = hit['Parent Protein IRI'].values[0]\n\tprotein_name = \"\"\n\n\tif url == old_url:\n\t\tprotein_name = url.split(\"/\")[-1]\n\t\treturn old_page_source, url, protein_name\n\n\tif type(url) != str:\n\t\tprint(\"did not work\")\n\t\tpage_source = np.nan\n\n\telif \"uniprot\" in url:\n\t\tprotein_name = url.split(\"/\")[-1]\n\t\ttry:\n\t\t\tif url == old_url:\n\t\t\t\treturn old_page_source, start, stop, epi_seq, url, protein_name\n\n\t\t\tresponse = urllib.request.urlopen(url + \".fasta\", timeout=1)\n\t\t\tdata = response.read() # a `bytes` object\n\t\t\tassert len(data) > 0, \"no answer\"\n\t\t\tpage_source = data.decode(\"utf-8\")\n\n\t\texcept:\n\t\t\tprint(\"did not work\")\n\t\t\tpage_source = np.nan\n\telse:\n\t\tprint(\"did not work\")\n\t\tpage_source = np.nan\n\n\treturn page_source, url, protein_name\n\ndef get_protein(id_epi, old_url, old_page_source):\n\thit = df.query(f'epitope==\"http://www.iedb.org/epitope/{id_epi}\"')\n\n\tstart = hit[\"Starting Position\"].values[0]-1\n\tstop = hit[\"Ending Position\"].values[0]\n\n\tepi_seq = hit[\"Description\"].values[0]\n\n\tif np.isnan(start) or np.isnan(stop):\n\t\tstart = -1\n\t\tstop = -1\n\telse:\n\t\tstart = int(start)\n\t\tstop = int(stop)\n\n\n\tpage_source, url, protein_name = check_ncbi(hit, old_url, old_page_source)\n\tif type(page_source) != str:\n\t\tpage_source, url, protein_name = check_uniprot(hit, old_url, old_page_source)\n\n\treturn page_source, start, stop, epi_seq, url, protein_name\n\n\nold_line = \"\"\nold_url = \"\"\npage_source = \"\"\nwith open(\"iedb_linear_epitopes_27_11_2019_version3.fasta\", \"w\") as out_fasta:\n\twith open(\"negative_samples.txt\", \"r\") as input_negativ:\n\t\twith open(\"positive_samples.txt\", \"r\") as input_positiv:\n\t\t\tfor i, samples in enumerate([input_negativ, input_positiv]):\n\t\t\t\tepi_bool = i\n\t\t\t\t# for line in samples:\n\t\t\t\tfor line in np.unique(list(samples)):\n\t\t\t\t\tif line == old_line:\n\t\t\t\t\t\tcontinue\n\t\t\t\t\telse:\n\t\t\t\t\t\told_line = line\n\t\t\t\t\t\tid_epi = line.strip()\n\t\t\t\t\t\tpage_source, start, stop, epi_seq, old_url, protein_name = get_protein(id_epi, old_url, page_source)\n\t\t\t\t\t\tif type(page_source) != str:\n\t\t\t\t\t\t\tcontinue\n\t\t\t\t\t\tseq = \"\"\n\t\t\t\t\t\tlines = page_source.split(\"\\n\")\n\t\t\t\t\t\theader = \"\"\n\t\t\t\t\t\tfor index, line in enumerate(lines):\n\t\t\t\t\t\t\tif index == 0:\n\t\t\t\t\t\t\t\theader = f\">{protein_name}\"\n\t\t\t\t\t\t\telse:\n\t\t\t\t\t\t\t\tseq += line\n\n\t\t\t\t\t\tif start == -1 or stop == -1:\n\t\t\t\t\t\t\tstart = seq.find(epi_seq)\n\t\t\t\t\t\t\tstop = start + len(epi_seq)\n\t\t\t\t\t\t\tprint(f\"sequence slice: {seq[start:stop]}, from table: {epi_seq}\")\n\t\t\t\t\t\t\tif start == -1:\n\t\t\t\t\t\t\t\tcontinue\n\t\t\t\t\t\telse:\n\t\t\t\t\t\t\tif seq[start:stop] != epi_seq:\n\t\t\t\t\t\t\t\tprint(f\"sequence slice: {seq[start:stop]}, from table: {epi_seq}\")\n\t\t\t\t\t\t\t\tcontinue\n\n\t\t\t\t\t\tif epi_bool:\n\t\t\t\t\t\t\tout_fasta.write(header + f\"|{start}_{stop}|PositiveID_{id_epi}\\n\")\n\t\t\t\t\t\telse:\n\t\t\t\t\t\t\tout_fasta.write(header + f\"|{start}_{stop}|NegativeID_{id_epi}\\n\")\n\t\t\t\t\t\tout_fasta.write(seq + \"\\n\")\n\t\t\t\t\t\tprint(header)\n\t\t\t# print(seq[start-1:stop])\n\t\t\t# print(epi_seq)\n" }, { "alpha_fraction": 0.6168091297149658, "alphanum_fraction": 0.6260683536529541, "avg_line_length": 30.200000762939453, "blob_id": "ab6d8531fd054529226ed40f7541f39deb3984f6", "content_id": "7435a1e3c09511b4bd592b31fe378bb9c6a21a1b", "detected_licenses": [ "CC0-1.0" ], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 1404, "license_type": "permissive", "max_line_length": 113, "num_lines": 45, "path": "/utils/fix_local_embedding_set.py", "repo_name": "rishabhdhenkawat/epitop_pred", "src_encoding": "UTF-8", "text": "import os\nimport numpy as np\nimport pandas as pd\n\nshift = 22\n\n\ndef fill_with_gaps(samples):\n\tout = []\n\tfor sample in samples:\n\t\tseq = np.array(list(sample[0]))\n\t\tstart = int(sample[1])\n\t\tstop = int(sample[2])\n\t\tfile = sample[3]\n\t\twith open(os.path.join(\"/home/go96bix/projects/raw_data/bepipred_sequences\", f\"{file}.fasta\"), \"r\") as in_file:\n\t\t\tfor line in in_file:\n\t\t\t\tif line.startswith(\">\") or line == \"\\n\":\n\t\t\t\t\tcontinue\n\t\t\t\telse:\n\t\t\t\t\tline = line.strip()\n\t\t\t\t\tseq_len = len(line)\n\t\t\t\t\tline = np.array(shift * [\"-\"] + list(line.lower()) + shift * [\"-\"])\n\n\t\t\t\t\tif stop > seq_len and file.startswith(\"nonepi\"):\n\t\t\t\t\t\tprint(\"foo\")\n\n\t\t\t\t\tseq_new = line[start + shift:stop + shift]\n\t\t\t\t\tassert len(\n\t\t\t\t\t\tseq_new) == 49, f\"Wrong len {len(seq_new)} in file {file}, start {start}, stop {stop}\"\n\t\t\t\t\tseq = seq_new\n\t\t\t\t\tseq = \"\".join(seq)\n\t\tout.append([seq, start, stop, file])\n\tout_df = pd.DataFrame(np.array(out))\n\n\treturn out_df\n\n\ndirectory = \"/home/go96bix/projects/epitop_pred/data_generator_bepipred/local_embedding\"\nfor root, dirs, files in os.walk(directory):\n\tfor file in files:\n\t\tif file.endswith(\".csv\") and not file.startswith(\"Y\"):\n\t\t\tsample_csv = pd.read_csv(os.path.join(root, file), delimiter='\\t', dtype='str', header=None).values\n\t\t\tsample_df = fill_with_gaps(sample_csv)\n\t\t\tsample_df.to_csv(os.path.join(root, file), sep='\\t', encoding='utf-8', header=None,\n\t\t\t index=None)\n" }, { "alpha_fraction": 0.6391791105270386, "alphanum_fraction": 0.6503731608390808, "avg_line_length": 29.804597854614258, "blob_id": "ead989fcc3107c11e375388bff7b75c2214965fd", "content_id": "86213f6afa7805aba584180673cbce191907e597", "detected_licenses": [ "CC0-1.0" ], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 2680, "license_type": "permissive", "max_line_length": 109, "num_lines": 87, "path": "/utils/curate_iedb_linear_epitopes.py", "repo_name": "rishabhdhenkawat/epitop_pred", "src_encoding": "UTF-8", "text": "import numpy as np\n\nunique_prot_dict = {}\n\"\"\"\n1: read file \n2: get postitions of non-/epitope\n3: save positions and header in dict with seq as key\n4: merge non conflicting overlapping epitopes/non-epitopes in proteins\"\"\"\nwith open(\"/home/go96bix/projects/epitop_pred/utils/iedb_linear_epitopes.fasta\") as input_file:\n\tfor line in input_file:\n\t\tif line.startswith(\">\"):\n\t\t\theader = line.strip()\n\n\t\telse:\n\t\t\tseq = line.strip()\n\n\t\t\tupper_pos = [i for i, c in enumerate(seq) if c.isupper()]\n\t\t\tif len(upper_pos) > 1:\n\t\t\t\tepitopes = []\n\t\t\t\tstart = upper_pos[0]\n\t\t\t\tstop = upper_pos[0]\n\t\t\t\tfor i in range(1, len(upper_pos)):\n\t\t\t\t\tif upper_pos[i] == stop + 1:\n\t\t\t\t\t\tstop = upper_pos[i]\n\t\t\t\t\telse:\n\t\t\t\t\t\tif stop > start:\n\t\t\t\t\t\t\tepitopes.append((start, stop))\n\t\t\t\t\t\tstart = upper_pos[i]\n\t\t\t\t\t\tstop = upper_pos[i]\n\n\t\t\t\tepitopes.append((start, stop + 1, header[1:]))\n\n\t\t\tseq_lower = seq.lower()\n\t\t\told_entry = unique_prot_dict.get(seq_lower, [])\n\t\t\told_entry.extend(epitopes)\n\t\t\tunique_prot_dict.update({seq_lower: old_entry})\n\nprint(len(unique_prot_dict.values()))\nnumber_conflicts = 0\nprotein_counter = 0\n\nfor protein, hits in unique_prot_dict.items():\n\tepitope_arr = np.zeros(len(protein))\n\tnon_epitope_arr = np.zeros(len(protein))\n\theader_long = []\n\tmask = np.array([-1] * len(protein))\n\tfor index, marked_area in enumerate(hits):\n\t\t# \tsolve merging\n\t\tstart, stop, header = marked_area\n\t\theader_long.append(f\"{header}_{start}_{stop}\")\n\t\tif header.startswith(\"Positive\"):\n\t\t\tepitope_arr[start:stop] += 1\n\t\telse:\n\t\t\tnon_epitope_arr[start:stop] += 1\n\t\tif any(mask[start: stop] == 0) and header.startswith(\"Positive\"):\n\t\t\tmask[start: stop] = 1\n\t\telif any(mask[start:stop] == 1) and header.startswith(\"Negative\"):\n\t\t\t# mark as non epitope all aa's which are not labeled as epitope or non epitope\n\t\t\tmask[start:stop][mask[start:stop] == -1] = 0\n\t\telse:\n\t\t\tif header.startswith(\"Positive\"):\n\t\t\t\tmask[start: stop] = 1\n\t\t\telse:\n\t\t\t\tmask[start: stop] = 0\n\n\tquantity = []\n\tfor i in range(len(protein)):\n\t\tepi_count = epitope_arr[i]\n\t\tnon_epi_count = non_epitope_arr[i]\n\t\tif epi_count + non_epi_count == 0:\n\t\t\tquantity.append(\"-\")\n\t\telse:\n\t\t\tquantity.append(str(epi_count / (epi_count + non_epi_count)))\n\n\tquantity_str = \"\\t\".join(quantity)\n\tmask_str = [\"-\" if i == -1 else str(i) for i in list(mask)]\n\tmask_str = \"\\t\".join(mask_str)\n\theader_long_str = \"\\t\".join(header_long)\n\n\twith open(f\"/home/go96bix/projects/raw_data/bepipred_proteins_with_marking/protein_{protein_counter}.fasta\",\n\t \"w\") as out_fasta:\n\t\tout_fasta.write(f\">{header_long_str}\\n\")\n\t\tout_fasta.write(f\"{protein.upper()}\\n\")\n\t\tout_fasta.write(f\"{mask_str}\\n\")\n\t\tout_fasta.write(f\"{quantity_str}\\n\")\n\n\tprotein_counter += 1\n" }, { "alpha_fraction": 0.6508875489234924, "alphanum_fraction": 0.6670611500740051, "avg_line_length": 31.922077178955078, "blob_id": "533620fbbbfbbc449ce0ba72b2af242fde726113", "content_id": "8c937b871b8b28166cd9f9c580056068e2539174", "detected_licenses": [ "CC0-1.0" ], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 2535, "license_type": "permissive", "max_line_length": 128, "num_lines": 77, "path": "/utils/filter_test_set_fastas.py", "repo_name": "rishabhdhenkawat/epitop_pred", "src_encoding": "UTF-8", "text": "import sys\nimport os\n\ndef readFasta_extended(file):\n\t## read fasta file\n\theader = \"\"\n\tseq = \"\"\n\tvalues = []\n\twith open(file, \"r\") as infa:\n\t\tfor index, line in enumerate(infa):\n\t\t\tline = line.strip()\n\t\t\tif index == 0:\n\t\t\t\theader = line[1:].split(\"\\t\")\n\t\t\telif index == 1:\n\t\t\t\tseq += line\n\t\t\telif index == 2:\n\t\t\t\tpass\n\t\t\telse:\n\t\t\t\tvalues = line.split(\"\\t\")\n\treturn header, seq, values\n\ndef cluster_to_dict(file=\"/home/go96bix/projects/raw_data/clustered_protein_seqs/my_double_cluster0.8_05/0.5_seqID.fasta.clstr\",\n directory_fasta=\"/home/go96bix/projects/raw_data/bepipred_proteins_with_marking\"):\n\tout_dict = {}\n\twith open(file, \"r\") as infile:\n\t\tallLines = infile.read()\n\t\tclusters = allLines.split(\">Cluster\")\n\t\tfor cluster in clusters:\n\t\t\tif len(cluster) > 0:\n\t\t\t\tproteins = cluster.strip().split(\"\\n\")\n\t\t\t\tfiles = []\n\t\t\t\tfor index, protein in enumerate(proteins):\n\t\t\t\t\tif index == 0:\n\t\t\t\t\t\tcluster_name = \"Cluster_\" + protein\n\t\t\t\t\telse:\n\t\t\t\t\t\tfilename = protein.split(\" \")[1][1:-3] + \".fasta\"\n\t\t\t\t\t\tprotein_file = os.path.join(directory_fasta, filename)\n\t\t\t\t\t\tfiles.append(protein_file)\n\t\t\t\tout_dict.update({cluster_name:files})\n\treturn out_dict\n\nif len(sys.argv)==1:\n\t# testfiletable = '/home/go96bix/projects/epitop_pred/data_generator_bepipred_binary_0.5_seqID/samples_for_ROC.csv'\n\ttestfiletable = '/home/go96bix/projects/raw_data/allprotein.csv'\n\tout_path = \"/home/go96bix/projects/raw_data/bepipred_sequences_allProteins.fasta\"\nelse:\n\ttestfiletable = sys.argv[1]\n\tprint(f\"testfiletable = {sys.argv[1]}\")\n\tout_path = sys.argv[2]\n\tprint(f\"out_path = {sys.argv[2]}\")\n\tcluster_dict = cluster_to_dict()\n\ntestproteinIDs = []\nwith open(testfiletable) as infile:\n\tfor line in infile:\n\t\tif line.startswith(\"/\"):\n\t\t\tfile = line.strip().rsplit('/', 1)[1]\n\t\t\ttestproteinIDs.append(file[:-6])\n\t\telif line.startswith(\"Cluster\"):\n\t\t\tfiles = cluster_dict[line.strip()]\n\t\t\tfor file in files:\n\t\t\t\tfile = file.strip().rsplit('/', 1)[-1]\n\t\t\t\ttestproteinIDs.append(file[:-6])\n\t\telse:\n\t\t\tprint(\"Error: input test set csv should contain either a path to a protein or a name of a Cluster \"\n\t\t\t f\"but contained {line}\")\n\t\t\texit()\n\nif os.path.isfile(out_path):\n\tos.remove(out_path)\nwith open(out_path, \"a\") as outfile:\n\tfor testid in testproteinIDs:\n\t\tfile = f'/home/le86qiz/Documents/Konrad/tool_comparison/comparison3/bepipred_proteins_with_marking/{testid}.fasta'\n\t\theader, seq_local, values = readFasta_extended(file)\n\t\theader_long_str = \"\\t\".join(header)\n\t\toutfile.write(f'>{testid}\\n')\n\t\toutfile.write(f'{seq_local}\\n')\n" }, { "alpha_fraction": 0.6503263115882874, "alphanum_fraction": 0.6589770913124084, "avg_line_length": 26.919490814208984, "blob_id": "3e8687a6821a40640c730d0aae84e1e5bb04230c", "content_id": "4f3378945d77841c48f90150353ac86f8b46ce99", "detected_licenses": [ "CC0-1.0" ], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 6589, "license_type": "permissive", "max_line_length": 132, "num_lines": 236, "path": "/utils/parse_input_full_embedding.py", "repo_name": "rishabhdhenkawat/epitop_pred", "src_encoding": "UTF-8", "text": "import os\nimport pandas as pd\nimport re\nimport numpy as np\n\nfrom utils import DataGenerator\nimport pickle\n\n\"\"\"\nembedd all proteins\ngehe uber balst treffer\nmache dict mit array [tuple (start stop) von window]\ngehe ueber dict \n\tpro value slice window embedding\n\t\n\"\"\"\n\n\ndef readFasta(file):\n\t## read fasta file\n\tseq = \"\"\n\twith open(file, \"r\") as infa:\n\t\tfor line in infa:\n\t\t\tline = line.strip()\n\t\t\tif re.match(\">\", line):\n\t\t\t\tpass\n\t\t\telse:\n\t\t\t\tseq += line\n\treturn seq\n\n\ncwd = \"/home/go96bix/projects/epitop_pred\"\ndirectory = os.path.join(cwd, \"data_generator\")\nelmo_embedder = DataGenerator.Elmo_embedder()\n\n\"\"\"\nwhat to put in test set\n\"\"\"\n\nin_test_set = []\ntest_df = pd.DataFrame.from_csv(\n\t\"/home/le86qiz/Documents/Konrad/general_epitope_analyses/bepipred_evaluation/deepipred_results/test_samples.csv\",\n\tsep=\",\", header=None, index_col=None)\ntest_df = test_df[test_df[2] == 'true_epitopes']\nfor index, row in test_df.iterrows():\n\tseq_id = int(row[0]) + 1\n\tname = f\"seq_{seq_id}\"\n\tin_test_set.append(name)\n\nepitope_arr = []\nepitope_arr_test = []\nnon_epitope_arr = []\n\nslicesize = 49\n\nblast_df = pd.DataFrame.from_csv(\n\t\"/home/le86qiz/Documents/Konrad/prediction_pipeline/raptorx_pipeline/epitopes/epitope_results/filtered_blast_results.csv\",\n\tsep=\"\\t\")\n\n# generate an dict which holds for each protein the start and stop position of each epitope window\n# generate the same dict only containing the test set protein epitopes\nprotein_hits_dict = {}\nprotein_hits_dict_test_set = {}\nfoo = 0\nfor index, row in blast_df.iterrows():\n\tfile = str(row['sseqid']).split(\"|\")[1]\n\tstart = row['sstart'] - row['qstart']\n\tstop = start + slicesize\n\thits = protein_hits_dict.get(file, [])\n\tif (start, stop) not in hits:\n\t\thits.append((start, stop))\n\telse:\n\t\tprint(f\"doublicated: {index}\")\n\n\t# save only epitopes in test set\n\tif index in in_test_set:\n\t\thits_test = protein_hits_dict_test_set.get(file, [])\n\t\thits_test.append((start, stop))\n\t\tprotein_hits_dict_test_set.update({file: hits_test})\n\t# save all epitopes\n\tprotein_hits_dict.update({file: hits})\n\n# embedd each protein\n# extend proteins with 20 zero columns at each site\n# make a mask were the protein is NOT an epitope\n# save epitopes and not epitopes in different arrays\nfor key, values in protein_hits_dict.items():\n\tseq = readFasta(os.path.join(\n\t\t\"/home/le86qiz/Documents/Konrad/prediction_pipeline/raptorx_pipeline/epitopes/epitope_results/complete_epitope_protein_sequences\",\n\t\tkey + \".txt\"))\n\tseq_len = len(seq)\n\tshift = 20\n\tprotein_pad = np.zeros((seq_len + (shift * 2), 1024))\n\n\tsample_embedding = elmo_embedder.seqvec.embed_sentence(seq)\n\tsample_embedding = sample_embedding.mean(axis=0)\n\tseq = sample_embedding\n\n\tfor i in range(0, seq_len, 1):\n\t\tprotein_pad[i + (shift)] = seq[i]\n\n\tseq_len = len(protein_pad)\n\tnot_epi_mask = np.ones(seq_len)\n\t# print(values)\n\tfor val in values:\n\t\tnot_epi_mask[val[0] + shift:val[1] + shift] = 0\n\n\t\tepitope = protein_pad[val[0] + shift:val[1] + shift]\n\n\t\tif val in protein_hits_dict_test_set.get(key, []):\n\t\t\tepitope_arr_test.append(epitope)\n\t\telse:\n\t\t\tepitope_arr.append(epitope)\n\n\tstart_bool = False\n\tstart = 0\n\tstop = False\n\tfor index, i in enumerate(not_epi_mask):\n\t\tif i == 1 and start_bool == False:\n\t\t\tstart = index\n\t\t\tstart_bool = True\n\t\telif i == 0 and start_bool == True:\n\t\t\tstop = index\n\t\t\tif stop - start > slicesize:\n\t\t\t\tnon_epitope = protein_pad[start:stop]\n\t\t\t\tnon_epitope_arr.append(non_epitope)\n\t\t\t\tstart_bool = False\n\t\t\t\tstop = False\n\t\telse:\n\t\t\tpass\n\tif start_bool == True:\n\t\tstop = index + 1\n\t\tif stop - start > slicesize:\n\t\t\tnon_epitope = protein_pad[start:stop]\n\t\t\t# non_epitope = \"\".join(non_epitope)\n\t\t\tnon_epitope_arr.append(non_epitope)\n\nnum_samples = []\nall_samples = []\nfor arr in [non_epitope_arr, epitope_arr]:\n\tcount_non_overlapping_windows_samples = [len(i) // slicesize for i in arr]\n\tnum_samples.append(sum(count_non_overlapping_windows_samples))\n\n# wenn gleiche test set wie frueher gewollt\nmin_samples = 5 * len(epitope_arr_test)\n# klassisch\n# min_samples = min(num_samples)\nval_df = pd.DataFrame()\ntest_df = pd.DataFrame()\ntrain_df = pd.DataFrame()\n\nX_train = []\nX_val = []\nX_test = []\nY_train = []\nY_val = []\nY_test = []\n\n# generate different sets\nfor index, arr in enumerate([non_epitope_arr, epitope_arr]):\n\tdo_val = True\n\tdo_test = False\n\tsamples = 0\n\tselection = np.random.permutation(range(len(arr)))\n\n\ty = [\"non_epitope\", \"true_epitope\"][index]\n\n\tfor i in selection:\n\t\tlen_sample = len(arr[i])\n\t\tmax_shift = len_sample % slicesize\n\t\tstart_pos = np.random.random_integers(0, max_shift)\n\t\tif do_val:\n\t\t\tfor j in range(start_pos, len_sample - slicesize + 1, slicesize):\n\n\t\t\t\tX_val.append(arr[i][j:j + slicesize])\n\t\t\t\tY_val.append(y)\n\t\t\t\tsamples += 1\n\t\t\t\tif samples >= int(0.2 * min_samples):\n\t\t\t\t\tdo_test = True\n\t\t\t\t\tdo_val = False\n\t\t\t\t\tsamples = 0\n\t\t\t\t\tbreak\n\t\telif do_test:\n\t\t\tif y == \"true_epitope\":\n\t\t\t\tfor j in epitope_arr_test:\n\t\t\t\t\tX_test.append(j)\n\t\t\t\t\tY_test.append(y)\n\t\t\t\tdo_test = False\n\t\t\t\tdo_val = False\n\t\t\t\tsamples = 0\n\t\t\telse:\n\t\t\t\tfor j in range(start_pos, len_sample - slicesize, slicesize):\n\t\t\t\t\tX_test.append(arr[i][j:j + slicesize])\n\t\t\t\t\tY_test.append(y)\n\t\t\t\t\tsamples += 1\n\t\t\t\t\tif samples >= len(epitope_arr_test):\n\t\t\t\t\t\tdo_test = False\n\t\t\t\t\t\tdo_val = False\n\t\t\t\t\t\tsamples = 0\n\t\t\t\t\t\tbreak\n\t\telse:\n\t\t\tX_train.append(arr[i])\n\t\t\tY_train.append(y)\n\nfor i in np.unique(Y_train):\n\tdirectory2 = directory + f\"/train/{i}\"\n\tif not os.path.exists(directory2):\n\t\tos.makedirs(directory2)\n\nX_test = np.array(X_test)\nX_val = np.array(X_val)\nX_train = np.array(X_train)\n\nY_test = np.array(Y_test)\nY_val = np.array(Y_val)\nY_train = np.array(Y_train)\n\nfor index, sample in enumerate(Y_train):\n\twith open(directory + f\"/train/{sample}/{index}.pkl\", \"wb\") as outfile:\n\t\tpickle.dump(X_train[index], outfile)\n\nfor index, i in enumerate((X_test, X_val, X_train)):\n\tlen_i = i.shape[0]\n\tshuffle = np.random.permutation(range(len_i))\n\tif index == 0:\n\t\tpickle.dump(X_test[shuffle], open(directory + '/X_test.pkl', 'wb'))\n\t\tpd.DataFrame(Y_test[shuffle]).to_csv(directory + '/Y_test.csv', sep='\\t', encoding='utf-8', header=None,\n\t\t index=None)\n\telif index == 1:\n\t\tpickle.dump(X_val[shuffle], open(directory + '/X_val.pkl', 'wb'))\n\t\tpd.DataFrame(Y_val[shuffle]).to_csv(directory + '/Y_val.csv', sep='\\t', encoding='utf-8', header=None,\n\t\t index=None)\n\telif index == 2:\n\t\tpickle.dump(X_train[shuffle], open(directory + '/X_train.pkl', 'wb'))\n\t\tpd.DataFrame(Y_train[shuffle]).to_csv(directory + '/Y_train.csv', sep='\\t', encoding='utf-8', header=None,\n\t\t index=None)\n" }, { "alpha_fraction": 0.6011083722114563, "alphanum_fraction": 0.6111612319946289, "avg_line_length": 32.73478317260742, "blob_id": "ef628e2dea15edf971eeda4bf48859cc8b24bc46", "content_id": "3bd4f03845740f1f9ff4a02b76a3d3dd9998f690", "detected_licenses": [ "CC0-1.0" ], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 7759, "license_type": "permissive", "max_line_length": 115, "num_lines": 230, "path": "/utils/parse_input.py", "repo_name": "rishabhdhenkawat/epitop_pred", "src_encoding": "UTF-8", "text": "import os\nfrom builtins import enumerate\n\nimport pandas as pd\nimport numpy as np\nfrom utils import DataGenerator\nimport pickle\n\ncwd = \"/home/go96bix/projects/epitop_pred\"\ndirectory = os.path.join(cwd, \"data_generator\")\nclasses = 0\nnum_samples = []\nall_samples = []\nclassic = False\nembedding = False\nelmo_embedder = DataGenerator.Elmo_embedder()\nslicesize = 49\nuse_old_test_set = True\n\nif use_old_test_set:\n\ttest_df_old = pd.DataFrame.from_csv(\n\t\t\"/home/le86qiz/Documents/Konrad/general_epitope_analyses/bepipred_evaluation/deepipred_results/test_samples.csv\",\n\t\tsep=\",\", header=None, index_col=None)\n\ttest_df_old_y = test_df_old[2].values\n\ttest_df_old = test_df_old[1].values\nelse:\n\ttest_df_old = []\n\nfor root, dirs, files in os.walk(directory):\n\tfor file in files:\n\t\tif file.endswith(\".csv\"):\n\t\t\tclasses += 1\n\t\t\tdf_input = pd.DataFrame.from_csv(os.path.join(directory, file), header=None, index_col=False)\n\t\t\tdf_input[\"y\"] = file[:-4]\n\t\t\tdf_input = df_input.drop_duplicates(keep='first')\n\t\t\tmask_not_old_test_set = np.array([seq not in test_df_old for seq in df_input[0]])\n\t\t\tdf_input = df_input[mask_not_old_test_set]\n\t\t\tcount_non_overlapping_windows_samples = [len(i) // slicesize for i in df_input[0].values]\n\t\t\tnum_samples.append(sum(count_non_overlapping_windows_samples))\n\t\t\tall_samples.append(df_input)\n\nmin_samples = min(num_samples)\nval_df = pd.DataFrame()\ntest_df = pd.DataFrame()\ntrain_df = pd.DataFrame()\n\nif use_old_test_set:\n\tmin_samples_test_set = len(test_df_old) / 2\nelse:\n\tmin_samples_test_set = min_samples\n\nif classic:\n\tfor index, df_class in enumerate(all_samples):\n\t\t# validation set\n\t\tval_df_class = df_class.sample(n=int(0.2 * min_samples))\n\t\tval_df = val_df.append(val_df_class)\n\t\tdf_help = df_class.drop(val_df_class.index)\n\n\t\ttest_df_class = df_help.sample(n=int(0.2 * min_samples))\n\t\ttest_df = test_df.append(test_df_class)\n\t\tdf_help = df_help.drop(test_df_class.index)\n\t\t# train set\n\t\ttrain_df = train_df.append(df_help.sample(frac=1))\n\n\tfor i in train_df['y'].unique():\n\t\tdirectory2 = directory + f\"/train/{i}\"\n\t\tif not os.path.exists(directory2):\n\t\t\tos.makedirs(directory2)\n\n\tfor index, sample in train_df.iterrows():\n\t\tdirectory2 = directory + f\"/train/{sample['y']}/{index}.csv\"\n\t\tf = open(directory2, 'w')\n\t\tf.write(f\"{sample.name}\\t{sample[0]}\")\n\t\tf.close()\n\n\t# shuffle\n\ttest_df = test_df.sample(frac=1)\n\tX_test = test_df[0]\n\tY_test = test_df[\"y\"]\n\tX_test.to_csv(directory + '/X_test.csv', sep='\\t', encoding='utf-8')\n\tY_test.to_csv(directory + '/Y_test.csv', sep='\\t', encoding='utf-8')\n\n\ttrain_df = train_df.sample(frac=1)\n\tX_train = train_df[0]\n\tY_train = train_df[\"y\"]\n\tX_train.to_csv(directory + '/X_train.csv', sep='\\t', encoding='utf-8')\n\tY_train.to_csv(directory + '/Y_train.csv', sep='\\t', encoding='utf-8')\n\n\tval_df = val_df.sample(frac=1)\n\tX_val = val_df[0]\n\tY_val = val_df[\"y\"]\n\tX_val.to_csv(directory + '/X_val.csv', sep='\\t', encoding='utf-8')\n\tY_val.to_csv(directory + '/Y_val.csv', sep='\\t', encoding='utf-8')\n\nelse:\n\tX_train = []\n\tX_val = []\n\tX_test = []\n\tY_train = []\n\tY_val = []\n\tY_test = []\n\n\tfor index, df_class in enumerate(all_samples):\n\t\tdo_val = True\n\t\tdo_test = False\n\t\tsamples = 0\n\t\tselection = np.random.permutation(range(len(df_class.index)))\n\t\tnp_class_0 = df_class[0].values\n\t\tnp_class_y = df_class['y'].values\n\t\tfor i in selection:\n\t\t\tlen_sample = len(np_class_0[i])\n\t\t\tif embedding:\n\t\t\t\tsample_embedding = elmo_embedder.seqvec.embed_sentence(np_class_0[i])\n\t\t\t\tsample_embedding = sample_embedding.mean(axis=0)\n\n\t\t\tmax_shift = len_sample % slicesize\n\t\t\t# start from random position and generate non overlapping windows\n\t\t\tstart_pos = np.random.random_integers(0, max_shift)\n\t\t\tif do_val:\n\t\t\t\tfor j in range(start_pos, len_sample - slicesize + 1, slicesize):\n\t\t\t\t\tif embedding:\n\t\t\t\t\t\tX_val.append(sample_embedding[j:j + slicesize])\n\t\t\t\t\telse:\n\t\t\t\t\t\tX_val.append(np_class_0[i][j:j + slicesize])\n\t\t\t\t\tY_val.append(np_class_y[i])\n\t\t\t\t\tsamples += 1\n\n\t\t\t\t\tif samples >= int(0.2 * min_samples):\n\t\t\t\t\t\tdo_test = True\n\t\t\t\t\t\tdo_val = False\n\t\t\t\t\t\tsamples = 0\n\t\t\t\t\t\tbreak\n\t\t\telif do_test:\n\t\t\t\tfor j in range(start_pos, len_sample - slicesize, slicesize):\n\t\t\t\t\tif embedding:\n\t\t\t\t\t\tX_test.append(sample_embedding[j:j + slicesize])\n\t\t\t\t\telse:\n\t\t\t\t\t\tif use_old_test_set:\n\t\t\t\t\t\t\tif len(X_test) < min_samples_test_set:\n\t\t\t\t\t\t\t\tfor index, j in enumerate(test_df_old):\n\t\t\t\t\t\t\t\t\tX_test.append(j[0:slicesize])\n\t\t\t\t\t\t\t\t\tclass_i = test_df_old_y[index]\n\t\t\t\t\t\t\t\t\tif class_i == 'true_epitopes':\n\t\t\t\t\t\t\t\t\t\tclass_i = 'true_epitope'\n\t\t\t\t\t\t\t\t\telse:\n\t\t\t\t\t\t\t\t\t\tclass_i = 'non_epitope'\n\t\t\t\t\t\t\t\t\tY_test.append(class_i)\n\t\t\t\t\t\t\tdo_test = False\n\t\t\t\t\t\t\tdo_val = False\n\t\t\t\t\t\t\tsamples = 0\n\t\t\t\t\t\t\tbreak\n\t\t\t\t\t\telse:\n\t\t\t\t\t\t\tX_test.append(np_class_0[i][j:j + slicesize])\n\t\t\t\t\t\t\tY_test.append(np_class_y[i])\n\t\t\t\t\t\t\tsamples += 1\n\n\t\t\t\t\tif not use_old_test_set:\n\t\t\t\t\t\tif samples >= int(min_samples_test_set):\n\t\t\t\t\t\t\tdo_test = False\n\t\t\t\t\t\t\tdo_val = False\n\t\t\t\t\t\t\tsamples = 0\n\t\t\t\t\t\t\tbreak\n\t\t\telse:\n\t\t\t\tif embedding:\n\t\t\t\t\tX_train.append(sample_embedding)\n\t\t\t\telse:\n\t\t\t\t\tX_train.append(np_class_0[i])\n\t\t\t\tY_train.append(np_class_y[i])\n\n\tfor i in np.unique(Y_train):\n\t\tdirectory2 = directory + f\"/train/{i}\"\n\t\tif not os.path.exists(directory2):\n\t\t\tos.makedirs(directory2)\n\n\tX_test = np.array(X_test)\n\tX_val = np.array(X_val)\n\tX_train = np.array(X_train)\n\n\tY_test = np.array(Y_test)\n\tY_val = np.array(Y_val)\n\tY_train = np.array(Y_train)\n\n\tif embedding:\n\t\tfor index, sample in enumerate(Y_train):\n\t\t\twith open(directory + f\"/train/{sample}/{index}.pkl\", \"wb\") as outfile:\n\t\t\t\tpickle.dump(X_train[index], outfile)\n\n\t\tfor index, i in enumerate((X_test, X_val, X_train)):\n\t\t\tlen_i = i.shape[0]\n\t\t\tshuffle = np.random.permutation(range(len_i))\n\t\t\tif index == 0:\n\t\t\t\tpickle.dump(X_test[shuffle], open(directory + '/X_test.pkl', 'wb'))\n\t\t\t\tpd.DataFrame(Y_test[shuffle]).to_csv(directory + '/Y_test.csv', sep='\\t', encoding='utf-8', header=None,\n\t\t\t\t index=None)\n\t\t\telif index == 1:\n\t\t\t\tpickle.dump(X_val[shuffle], open(directory + '/X_val.pkl', 'wb'))\n\t\t\t\tpd.DataFrame(Y_val[shuffle]).to_csv(directory + '/Y_val.csv', sep='\\t', encoding='utf-8', header=None,\n\t\t\t\t index=None)\n\t\t\telif index == 2:\n\t\t\t\tpickle.dump(X_train[shuffle], open(directory + '/X_train.pkl', 'wb'))\n\t\t\t\tpd.DataFrame(Y_train[shuffle]).to_csv(directory + '/Y_train.csv', sep='\\t', encoding='utf-8',\n\t\t\t\t header=None, index=None)\n\n\telse:\n\t\tfor index, sample in enumerate(Y_train):\n\t\t\tdirectory2 = directory + f\"/train/{sample}/{index}.csv\"\n\t\t\tf = open(directory2, 'w')\n\t\t\tf.write(f\"{index}\\t{X_train[index]}\")\n\t\t\tf.close()\n\n\t\tfor index, i in enumerate((X_test, X_val, X_train)):\n\t\t\tlen_i = i.shape[0]\n\t\t\tshuffle = np.random.permutation(range(len_i))\n\t\t\tif index == 0:\n\t\t\t\tpd.DataFrame(X_test[shuffle]).to_csv(directory + '/X_test.csv', sep='\\t', encoding='utf-8', header=None,\n\t\t\t\t index=None)\n\t\t\t\tpd.DataFrame(Y_test[shuffle]).to_csv(directory + '/Y_test.csv', sep='\\t', encoding='utf-8', header=None,\n\t\t\t\t index=None)\n\n\t\t\tif index == 1:\n\t\t\t\tpd.DataFrame(X_val[shuffle]).to_csv(directory + '/X_val.csv', sep='\\t', encoding='utf-8', header=None,\n\t\t\t\t index=None)\n\t\t\t\tpd.DataFrame(Y_val[shuffle]).to_csv(directory + '/Y_val.csv', sep='\\t', encoding='utf-8', header=None,\n\t\t\t\t index=None)\n\n\t\t\tif index == 2:\n\t\t\t\tpd.DataFrame(X_train[shuffle]).to_csv(directory + '/X_train.csv', sep='\\t', encoding='utf-8',\n\t\t\t\t header=None, index=None)\n\t\t\t\tpd.DataFrame(Y_train[shuffle]).to_csv(directory + '/Y_train.csv', sep='\\t', encoding='utf-8',\n\t\t\t\t header=None, index=None)\n" }, { "alpha_fraction": 0.6375032663345337, "alphanum_fraction": 0.6506921052932739, "avg_line_length": 31.041841506958008, "blob_id": "df06cb839eaede9cc54b1174a44c3f15bc189511", "content_id": "433666d372d769f7dbf623d50a4ec2cfc73d35ec", "detected_licenses": [ "CC0-1.0" ], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 7658, "license_type": "permissive", "max_line_length": 137, "num_lines": 239, "path": "/utils/generate_non_binary_training_sets.py", "repo_name": "rishabhdhenkawat/epitop_pred", "src_encoding": "UTF-8", "text": "import os\nimport pandas as pd\nimport re\nimport numpy as np\nimport sys\n\nsys.path.insert(0, '/home/go96bix/projects/epitop_pred/')\nfrom utils import DataGenerator\nimport pickle\n\nimport glob\n\n\ndef readFasta_extended(file):\n\t## read fasta file\n\theader = \"\"\n\tseq = \"\"\n\tvalues = []\n\twith open(file, \"r\") as infa:\n\t\tfor index, line in enumerate(infa):\n\t\t\tline = line.strip()\n\t\t\tif index == 0:\n\t\t\t\theader = line[1:].split(\"\\t\")\n\t\t\telif index == 1:\n\t\t\t\tseq += line\n\t\t\telif index == 2:\n\t\t\t\tpass\n\t\t\telse:\n\t\t\t\tvalues = line.split(\"\\t\")\n\treturn header, seq, values\n\n\n# SETTINGS\nslicesize = 49\nshift = 24\nglobal_embedding_bool = True\n# circular filling == if windows with to short entries (frame goes over start or end of protein) fill with AAs of start or end of protein\nuse_circular_filling = False\nbig_set = True\n\ncwd = \"/home/go96bix/projects/epitop_pred\"\ndirectory = os.path.join(cwd, \"data_generator_bepipred_NON_binary_0.5_seqID\")\nif not os.path.isdir(directory):\n\tos.makedirs(directory)\n\nelmo_embedder = DataGenerator.Elmo_embedder()\n\nepitope_arr_local = []\nepitope_arr_global = []\n\nprotein_arr = []\n\nfor file in glob.glob(\"/home/go96bix/projects/raw_data/bepipred_proteins_with_marking_0.5_seqID/*.fasta\"):\n\theader, seq_local, values = readFasta_extended(file)\n\n\tseq_local = seq_local.lower()\n\tseq_len = len(seq_local)\n\tif seq_len < 25:\n\t\tcontinue\n\n\tvalues = [\"-\"] * shift + values + [\"-\"] * shift\n\tif use_circular_filling:\n\t\tprotein_pad_local = list(seq_local[-shift:] + seq_local + seq_local[0:shift])\n\telse:\n\t\tprotein_pad_local = [\"-\"] * (seq_len + (shift * 2))\n\n\tif global_embedding_bool:\n\t\tif big_set:\n\t\t\tfile_name = header[0].split(\"_\")\n\t\t\tassert len(file_name) == 4, f\"filename of unexpected form, expected epi_1234_100_123 but got {header[0]}\"\n\t\t\tfile_name = file_name[0] + \"_\" + file_name[1]\n\t\t\tseq_global_tuple = pickle.load(\n\t\t\t\topen(os.path.join(\"/home/go96bix/projects/raw_data/embeddings_bepipred_samples\",\n\t\t\t\t file_name + \".pkl\"), \"rb\"))\n\t\t\tseq_global = seq_global_tuple[1]\n\n\t\telse:\n\t\t\tprint(seq_local)\n\t\t\tsample_embedding = elmo_embedder.seqvec.embed_sentence(seq_local)\n\t\t\t# sample_embedding = sample_embedding.mean(axis=0)\n\t\t\tseq_global = sample_embedding\n\n\t\tprotein_pad_global = np.zeros((seq_len + (shift * 2), 1024), dtype=np.float32)\n\t\tif use_circular_filling:\n\t\t\tprotein_pad_global[0:shift] = seq_global[-shift:]\n\t\t\tprotein_pad_global[-shift:] = seq_global[0:shift]\n\n\tfor i in range(0, seq_len, 1):\n\t\tprotein_pad_local[i + (shift)] = seq_local[i]\n\n\t\tif global_embedding_bool:\n\t\t\tprotein_pad_global[i + (shift)] = seq_global[i]\n\n\tepitope = \"\".join(protein_pad_local)\n\tepitope_arr_local.append([epitope, values, header, file])\n\n\tif global_embedding_bool:\n\t\tepitope_arr_global.append([protein_pad_global, values, header, file])\n\nnum_samples = []\n\nfor arr in epitope_arr_local:\n\t# weiss noch nciht in wie weit ich das brauche\n\tpossible_samples = sum(np.array(arr[1]) != \"-\")\n\tnum_samples.append(possible_samples)\nmax_samples = sum(num_samples)\n\nprint(f\"all predictable positions are {max_samples}\")\n# min_samples = len(epitope_arr_local)\n\nval_df = pd.DataFrame()\ntest_df = pd.DataFrame()\ntrain_df = pd.DataFrame()\n\nX_train_global = []\nX_train_local = []\nX_val_global = []\nX_val_local = []\nX_test_global = []\nX_test_local = []\nY_train = []\nY_val = []\nY_test = []\n\nshuffle = np.random.permutation(range(len(epitope_arr_local)))\nepitope_arr_local = np.array(epitope_arr_local)[shuffle]\nif global_embedding_bool:\n\tepitope_arr_global = np.array(epitope_arr_global)[shuffle]\n\n# make sure number samples per class in val and test set have straight number\nmin_samples = (max_samples // 10) * 2\nsamples = 0\ntest_roc = []\ndo_val = True\ndo_test = False\nfor index, arr in enumerate(epitope_arr_local):\n\n\tpossible_postions = np.where(np.array(arr[1]) != \"-\")[0]\n\tselection = np.random.permutation(possible_postions)\n\tif do_test or do_val:\n\t\tfor index_selection, i in enumerate(selection):\n\t\t\tif index_selection > len(possible_postions) // 10:\n\t\t\t\tbreak\n\t\t\tstart = i - slicesize // 2\n\t\t\tstop = start + slicesize\n\t\t\tassert start >= 0, f\"error calculating start position: start {start}, {header[0]}\"\n\n\t\t\t# len_sample = len(arr[0])\n\t\t\t# max_shift = len_sample % slicesize\n\t\t\tif do_val:\n\t\t\t\tX_val_local.append(epitope_arr_local[index][0][start:stop])\n\t\t\t\tif global_embedding_bool:\n\t\t\t\t\tX_val_global.append(epitope_arr_global[index][0][start:stop])\n\t\t\t\tY_val.append(epitope_arr_local[index][1][i])\n\t\t\t\tsamples += 10\n\n\t\t\t\tif samples >= int(min_samples):\n\t\t\t\t\tdo_test = True\n\t\t\t\t\tdo_val = False\n\t\t\t\t\tsamples = 0\n\t\t\t\t\tbreak\n\t\t\telif do_test:\n\t\t\t\tif index_selection == 0:\n\t\t\t\t\ttest_roc.append(arr[3])\n\t\t\t\tX_test_local.append(epitope_arr_local[index][0][start:stop])\n\t\t\t\t# X_test.append(arr[i][j:j + slicesize])\n\t\t\t\tif global_embedding_bool:\n\t\t\t\t\tX_test_global.append(epitope_arr_global[index][0][start:stop])\n\n\t\t\t\tY_test.append(epitope_arr_local[index][1][i])\n\t\t\t\tsamples += 10\n\t\t\t\tif samples >= int(min_samples):\n\t\t\t\t\tdo_test = False\n\t\t\t\t\tdo_val = False\n\t\t\t\t\tsamples = 0\n\t\t\t\t\tbreak\n\telse:\n\t\tY_str = \"\\t\".join(epitope_arr_local[index][1])\n\t\tX_train_local.append([epitope_arr_local[index][0], Y_str])\n\t\tif global_embedding_bool:\n\t\t\tX_train_global.append([epitope_arr_global[index][0], Y_str])\n\n\t\tY_train.append(epitope_arr_local[index][1])\n\ndirectory2 = directory + f\"/train/all/\"\nif not os.path.exists(directory2):\n\tos.makedirs(directory2)\n# test_roc = np.array(test_roc)\nX_test_local = np.array(X_test_local)\nX_test_global = np.array(X_test_global)\nX_val_local = np.array(X_val_local)\nX_val_global = np.array(X_val_global)\nX_train_local = np.array(X_train_local)\nX_train_global = np.array(X_train_global)\n\nY_test = np.array(Y_test)\nY_val = np.array(Y_val)\nY_train = np.array(Y_train)\n\nwith open(directory + '/samples_for_ROC.csv', \"w\")as outfile:\n\tfor sample in test_roc:\n\t\toutfile.write(f\"{sample}\\n\")\n\nfor index, sample in enumerate(Y_train):\n\tf = open(os.path.join(directory2, f\"{index}.csv\"), 'w')\n\tseq = '\\t'.join(X_train_local[index][0])\n\tvalues = X_train_local[index][1]\n\tf.write(f\"{seq}\\n{values}\")\n\n\tif global_embedding_bool:\n\t\twith open(os.path.join(directory2, f\"{index}.pkl\"), \"wb\") as outfile:\n\t\t\tpickle.dump(X_train_global[index], outfile)\n\nfor index, i in enumerate((X_test_local, X_val_local, X_train_local)):\n\tlen_i = i.shape[0]\n\tshuffle = np.random.permutation(range(len_i))\n\tif index == 0:\n\t\tif global_embedding_bool:\n\t\t\tpickle.dump(X_test_global[shuffle], open(directory + '/X_test.pkl', 'wb'))\n\t\tpd.DataFrame(X_test_local[shuffle]).to_csv(directory + '/X_test.csv', sep='\\t', encoding='utf-8', header=None,\n\t\t index=None)\n\t\tpd.DataFrame(Y_test[shuffle]).to_csv(directory + '/Y_test.csv', sep='\\t', encoding='utf-8', header=None,\n\t\t index=None)\n\n\telif index == 1:\n\t\tif global_embedding_bool:\n\t\t\tpickle.dump(X_val_global[shuffle], open(directory + '/X_val.pkl', 'wb'))\n\t\tpd.DataFrame(X_val_local[shuffle]).to_csv(directory + '/X_val.csv', sep='\\t', encoding='utf-8', header=None,\n\t\t index=None)\n\t\tpd.DataFrame(Y_val[shuffle]).to_csv(directory + '/Y_val.csv', sep='\\t', encoding='utf-8', header=None,\n\t\t index=None)\n\n\telif index == 2:\n\t\tif global_embedding_bool:\n\t\t\tpickle.dump(X_train_global[shuffle], open(directory + '/X_train.pkl', 'wb'))\n\t\tpd.DataFrame(X_train_local[shuffle]).to_csv(directory + '/X_train.csv', sep='\\t', encoding='utf-8', header=None,\n\t\t index=None)\n\t\tpd.DataFrame(Y_train[shuffle]).to_csv(directory + '/Y_train.csv', sep='\\t', encoding='utf-8', header=None,\n\t\t index=None)\n" }, { "alpha_fraction": 0.631512463092804, "alphanum_fraction": 0.6488790512084961, "avg_line_length": 33.994476318359375, "blob_id": "e2c0f8a0bf224fa5cdf354bd8778f3daa8e985b8", "content_id": "0049b00c8b94a5d4a25b574cbf808a09a19dcebb", "detected_licenses": [ "CC0-1.0" ], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 19002, "license_type": "permissive", "max_line_length": 135, "num_lines": 543, "path": "/utils/filter_NonDominat_prot.py", "repo_name": "rishabhdhenkawat/epitop_pred", "src_encoding": "UTF-8", "text": "import matplotlib.pyplot as plt\nimport numpy as np\nimport os\nimport pandas as pd\nfrom sklearn import metrics\n\n\ndef get_testproteinIDS(testfiletable):\n\t\"\"\"\n\tget all training proteins\n\t:param testfiletable:\n\t:return:\n\t\"\"\"\n\ttestproteinIDs = []\n\twith open(testfiletable) as infile:\n\t\tfor line in infile:\n\t\t\t# file = line.strip().rsplit('/', 1)[1]\n\t\t\t# testproteinIDs.append(file[:-6])\n\n\t\t\tif line.startswith(\"/\"):\n\t\t\t\tfile = line.strip().rsplit('/', 1)[-1]\n\t\t\t\ttestproteinIDs.append(file[:-6])\n\t\t\telif line.startswith(\"Cluster\"):\n\t\t\t\tfiles = cluster_dict[line.strip()]\n\t\t\t\tfor file in files:\n\t\t\t\t\tfile = file.strip().rsplit('/', 1)[-1]\n\t\t\t\t\ttestproteinIDs.append(file[:-6])\n\treturn testproteinIDs\n\n\ndef bepipred_samples():\n\t\"\"\"\n\twhich samples are in the training set\n\t:return:\n\t\"\"\"\n\tsamples = set()\n\twith open(\"iedb_linear_epitopes.fasta\", \"r\") as bepi_samples:\n\t\tfor line in bepi_samples:\n\t\t\tif line.startswith(\">\"):\n\t\t\t\tsamples.add(line[1:].strip())\n\treturn samples\n\n\ndominant = []\nnon_dominant = []\ntestfiletable = '/home/go96bix/projects/raw_data/08_allprotein.csv'\ntestproteinIDs = get_testproteinIDS(testfiletable)\n\nepiID_to_testid = {}\n\nfor testid in testproteinIDs:\n\t\"\"\"\n\thow many potentail non dom proteins are in the training data\n\t\"\"\"\n\tfile = f'/home/le86qiz/Documents/Konrad/tool_comparison/comparison3/bepipred_proteins_with_marking/{testid}.fasta'\n\tdominant_bool = False\n\twith open(file) as infile:\n\t\tfor line in infile:\n\t\t\tline = line[1:].strip().split()\n\t\t\tfor epiID in line:\n\n\t\t\t\tepiID = epiID.split('_')\n\t\t\t\tepiID_to_testid.update({f\"{epiID[0]}_{epiID[1]}\": testid})\n\t\t\t\tflag = epiID[0]\n\t\t\t\t# start = int(epiID[2])\n\t\t\t\t# stop = int(epiID[3])\n\t\t\t\tif flag == \"PositiveID\":\n\t\t\t\t\tdominant_bool = True\n\t\t\t\t\tbreak\n\t\t\tbreak\n\tif dominant_bool:\n\t\tdominant.append(testid)\n\telse:\n\t\tnon_dominant.append(testid)\n\nprint(\"dominant: \", len(dominant))\nprint(\"non_dominant: \", len(non_dominant))\n\nbepi_sampels = bepipred_samples()\n\nprot_dict = {}\n\nwith open(\"iedb_linear_epitopes_27_11_2019.fasta\", \"r\") as fasta:\n\t\"\"\"\n\tget all validated regions per protein in the eval set\n\t\"\"\"\n\t# with open(\"/home/go96bix/projects/raw_data/validation_samples_nov_2019_08_seqID.fasta\", \"r\") as fasta:\n\tfor line in fasta:\n\t\tif line.startswith(\">\"):\n\t\t\t# put_in_dict = True\n\n\t\t\tline = line[1:].split(\"|\")\n\t\t\ttestid = line[0]\n\n\t\t\tstart_stop = line[-2].split('_')\n\n\t\t\tepiID = line[-1].strip()\n\n\t\t\t# if already in bepipred samples than dont add to validation set\n\t\t\t# if epiID in bepi_sampels:\n\t\t\t# put_in_dict = False\n\t\t\t# continue\n\t\t\tids = prot_dict.get(testid, [])\n\t\t\tids.append(epiID)\n\t\t\tprot_dict.update({testid: ids})\n\t# epiID = epiID.split('_')\n\t# flag = epiID[0]\n\ndominant = []\nnon_dominant = []\n\nfor testid, values in prot_dict.items():\n\tdominant_bool = False\n\t# print(epiID_to_testid)\n\ttestid_training = epiID_to_testid.get(values[0], None)\n\t# print(values[0])\n\t# print(testid_training)\n\t# exit()\n\tif testid_training != None:\n\t\ttestid = f\"/home/go96bix/projects/raw_data/binary_both_embeddings_0.8_0.5_seqID_benchmark/results/epidope/{testid_training}.csv\"\n\telse:\n\t\tpath = f\"/home/go96bix/projects/raw_data/validation_11_2019/results/epidope/{testid}.csv\"\n\t\tif os.path.isfile(path):\n\t\t\ttestid = path\n\t\telse:\n\t\t\tcontinue\n\tfor epiID in values:\n\t\t# if epiID in bepi_sampels:\n\t\t# put_in_dict = False\n\t\t# continue\n\t\tepiID = epiID.split('_')\n\t\tflag = epiID[0]\n\t\tif flag == \"PositiveID\":\n\t\t\tdominant_bool = True\n\t\t\tbreak\n\tif dominant_bool:\n\t\tdominant.append(testid)\n\telif len(values) < 10:\n\t\tpass\n\telse:\n\t\tnon_dominant.append(testid)\n\nprint(\"dominant: \", len(dominant))\nprint(\"non_dominant: \", len(non_dominant))\n# print(non_dominant)\n\n# teil der proteine ist unter anderen namen getestet wurden im trainingset\n# teil wurde gar nicht getestet da sie in die Cluster gefallen sind\n# teil ist von eval set\n\n\nfpr = {}\ntpr = {}\nroc_auc = {}\nthresholds = {}\nthresh = 1\n\nscores = {}\nmin_scores = []\nmax_scores = []\nmedian_scores = []\nquantile_scores = []\n\nepidope_flag = []\n\nfor i in non_dominant:\n\tdf = pd.read_csv(i, sep='\\t')\n\n\tscore = np.array(df['Deepipred']).mean()\n\tscores['mean'] = np.append(scores.get('mean', np.array([])), score)\n\n\tscore = np.array(df['Deepipred']).min()\n\tscores['min'] = np.append(scores.get('min', np.array([])), score)\n\n\tscore = np.array(df['Deepipred']).max()\n\tscores['max'] = np.append(scores.get('max', np.array([])), score)\n\n\tscore = np.median(np.array(df['Deepipred']))\n\tscores['median'] = np.append(scores.get('median', np.array([])), score)\n\n\tscore = np.quantile(np.array(df['Deepipred']), 0.8)\n\tscores['quantile_high'] = np.append(scores.get('quantile_high', np.array([])), score)\n\n\tscore = np.quantile(np.array(df['Deepipred']), 0.2)\n\tscores['quantile_low'] = np.append(scores.get('quantile_low', np.array([])), score)\n\n\tepidope_flag.append(0)\n\nfor i in dominant:\n\tdf = pd.read_csv(i, sep='\\t')\n\n\tscore = np.array(df['Deepipred']).mean()\n\tscores['mean'] = np.append(scores.get('mean', np.array([])), score)\n\n\tscore = np.array(df['Deepipred']).min()\n\tscores['min'] = np.append(scores.get('min', np.array([])), score)\n\n\tscore = np.array(df['Deepipred']).max()\n\tscores['max'] = np.append(scores.get('max', np.array([])), score)\n\n\tscore = np.median(np.array(df['Deepipred']))\n\tscores['median'] = np.append(scores.get('median', np.array([])), score)\n\n\tscore = np.quantile(np.array(df['Deepipred']), 0.8)\n\tscores['quantile_high'] = np.append(scores.get('quantile_high', np.array([])), score)\n\n\tscore = np.quantile(np.array(df['Deepipred']), 0.2)\n\tscores['quantile_low'] = np.append(scores.get('quantile_low', np.array([])), score)\n\n\tepidope_flag.append(1)\n\nepidope_flag = np.array(epidope_flag)\n\"\"\"\ncalc ROC\n\"\"\"\nfor key in ['mean', 'min', 'max', 'median', 'quantile_high', 'quantile_low']:\n\tfpr[key], tpr[key], thresholds[key] = metrics.roc_curve(epidope_flag, scores[key], pos_label=1)\n\troc_auc[key] = metrics.roc_auc_score(epidope_flag, scores[key], max_fpr=thresh)\n\n\"\"\"\nplotting\n\"\"\"\noutdir = \".\"\n\nplt.figure(figsize=(6, 6))\nlw = 2\nmaxtpr = 0\nfor x in tpr:\n\tmaxtpr = max(maxtpr, max(tpr[x]))\n\nmean_roc, = plt.plot(fpr['mean'], tpr['mean'], color='green', lw=lw,\n\t\t\t\t\t label='mean (AUC = %0.4f)' % roc_auc['mean'])\nmin_roc, = plt.plot(fpr['min'], tpr['min'], color='lightcoral', linestyle='-.', lw=lw,\n\t\t\t\t\tlabel='min (AUC = %0.4f)' % roc_auc['min'])\nmax_roc, = plt.plot(fpr['max'], tpr['max'], color='goldenrod', lw=lw, label='max (AUC = %0.4f)' % roc_auc['max'])\nmedian_roc, = plt.plot(fpr['median'], tpr['median'], color='grey', linestyle=':', lw=lw,\n\t\t\t\t\t label='median (AUC = %0.4f)' % roc_auc['median'])\nquantile_high_roc, = plt.plot(fpr['quantile_high'], tpr['quantile_high'], color='peru', lw=lw, linestyle='--',\n\t\t\t\t\t\t label='quantile 0.8 (AUC = %0.4f)' % roc_auc['quantile_high'])\nquantile_low_roc, = plt.plot(fpr['quantile_low'], tpr['quantile_low'], color='teal', lw=lw, linestyle='--',\n\t\t\t\t\t\t label='quantile 0.2 (AUC = %0.4f)' % roc_auc['quantile_low'])\n\nrandom_roc, = plt.plot([0, thresh], [0, thresh], color='navy', lw=lw, linestyle='--', label='random (AUC = 0.50)')\n\nplt.xlim([0.0, thresh])\nplt.ylim([0.0, 1.0 * maxtpr])\nplt.xlabel('False Positive Rate')\nplt.ylabel('True Positive Rate')\nplt.title('Receiver operating characteristic for imuno dominance')\nplt.legend()\nplt.legend(\n\thandles=[mean_roc, min_roc, max_roc, median_roc, quantile_high_roc, quantile_low_roc, random_roc],\n\tloc=\"lower right\", prop={'size': 9})\nplt.savefig(os.path.join(outdir, f\"ROC_prediction_imunodominance_{thresh}.pdf\"), bbox_inches=\"tight\", pad_inches=0)\nplt.show()\nplt.close()\n\n\"\"\"\ncalc precision recall\n\"\"\"\nprecision = {}\nrecall = {}\nthresholds = {}\nfor key in ['mean', 'min', 'max', 'median', 'quantile_high', 'quantile_low']:\n\tprecision[key], recall[key], thresholds[key] = metrics.precision_recall_curve(epidope_flag, scores[key], pos_label=1)\n\troc_auc[key] = metrics.auc(recall[key], precision[key])\n\n\"\"\"\nplotting\n\"\"\"\noutdir = \".\"\n\nplt.figure(figsize=(6, 6))\nlw = 2\nmaxtpr = 0\nfor x in tpr:\n\tmaxtpr = max(maxtpr, max(tpr[x]))\n\nmean_roc, = plt.plot(recall['mean'], precision['mean'], color='green', lw=lw, label='mean (AUC = %0.4f)' % roc_auc['mean'])\nmin_roc, = plt.plot(recall['min'], precision['min'], color='lightcoral', linestyle='-.', lw=lw,\n\t\t\t\t\tlabel='min (AUC = %0.4f)' % roc_auc['min'])\nmax_roc, = plt.plot(recall['max'], precision['max'], color='goldenrod', lw=lw, label='max (AUC = %0.4f)' % roc_auc['max'])\nmedian_roc, = plt.plot(recall['median'], precision['median'], color='grey', linestyle=':', lw=lw,\n\t\t\t\t\t label='median (AUC = %0.4f)' % roc_auc['median'])\nquantile_high_roc, = plt.plot(recall['quantile_high'], precision['quantile_high'], color='peru', lw=lw, linestyle='--',\n\t\t\t\t\t\t label='quantile 0.8 (AUC = %0.4f)' % roc_auc['quantile_high'])\nquantile_low_roc, = plt.plot(recall['quantile_low'], precision['quantile_low'], color='teal', lw=lw, linestyle='--',\n\t\t\t\t\t\t label='quantile 0.2 (AUC = %0.4f)' % roc_auc['quantile_low'])\n\nratio_true_false = epidope_flag.sum() / len(epidope_flag)\nrandom_pr, = plt.plot([0, 1], [ratio_true_false, ratio_true_false], color='navy', linestyle='--',\n\t\t\t\t\t label='random (AUC = %0.4f)' % ratio_true_false)\nplt.xlim([0.0, thresh])\nplt.ylim([0.97, 1.0 * maxtpr])\nplt.xlabel('False Positive Rate')\nplt.ylabel('True Positive Rate')\nplt.title('Precision-Recall Curve for imuno dominance')\nplt.legend()\nplt.legend(handles=[mean_roc, min_roc, max_roc, median_roc, quantile_high_roc, quantile_low_roc, random_pr], loc=\"lower right\",\n\t\t prop={'size': 9})\nplt.savefig(os.path.join(outdir, f\"precision_recall_imunodominance_{thresh}.pdf\"), bbox_inches=\"tight\", pad_inches=0)\nplt.show()\nplt.close()\n\nNr = 3\nNc = 2\n\nleft = 0.125 # the left side of the subplots of the figure\nright = 0.9 # the right side of the subplots of the figure\nbottom = 0.1 # the bottom of the subplots of the figure\ntop = 0.9 # the top of the subplots of the figure\nwspace = 0.2 # the amount of width reserved for space between subplots,\n # expressed as a fraction of the average axis width\nhspace = 0.2 # the amount of height reserved for space between subplots,\n # expressed as a fraction of the average axis height\n\n\nfig, axs = plt.subplots(Nr, Nc, sharex=True, sharey=True)\nplt.subplots_adjust(left=None, bottom=None, right=None, top=None, wspace=None, hspace=0.4)\nfig.suptitle('score distribution')\nk = 0\nfor i in range(Nr):\n\tfor j in range(Nc):\n\t\tkey = ['mean', 'median', 'max', 'min', 'quantile_high', 'quantile_low'][k]\n\t\tyPred_0 = scores[key][epidope_flag == 0]\n\t\tyPred_1 = scores[key][epidope_flag == 1]\n\t\tyPred_total = [yPred_0, yPred_1]\n\n\t\tfoo = axs[i,j].hist(yPred_total, bins=20, range=(0, 1), stacked=False, label=['no Epitope', 'true Epitope'], density=True)\n\t\taxs[i,j].set_title(key)\n\t\tif i == 0 and j == 1:\n\t\t\taxs[i,j].legend(prop={'size': 9})\n\t\t# if i < Nr -1:\n\t\t# \taxs[i,j].set_title(key)\n# axs.legend()\n\t\t# plt.savefig(os.path.join(epidope_results_dir, f\"prediction_distribution.pdf\"))\n\t\t# plt.close()\n\t\tk += 1\n\nplt.savefig(os.path.join(outdir, f\"prediction_distribution.pdf\"))\n\n\n############################################################### MAX DATA\ndf = pd.read_csv(\"/home/go96bix/projects/raw_data/imunodominat_prots/uniprot-proteome_UP000000800.csv\", \"\\t\")\nprot_to_geneName = {}\nfor i,j in zip(df[\"Entry\"],df[\"Gene names\"]):\n\tgeneName = j.split()[-1]\n\tgeneName = geneName.replace(\"_\",\"\")\n\tprot_to_geneName.update({i:geneName})\n\ndf2 = pd.read_csv(\"/home/go96bix/projects/raw_data/imunodominat_prots/immunodominant.txt\",\"\\t\",header=None)\ndominant = set(df2[0])\n\n\nfpr = {}\ntpr = {}\nroc_auc = {}\nthresholds = {}\nthresh = 1\n\nscores = {}\nmin_scores = []\nmax_scores = []\nmedian_scores = []\nquantile_scores = []\n\nepidope_flag = []\n\nwith open(\"/home/go96bix/projects/raw_data/imunodominat_prots/uniprot-proteome_UP000000800.fasta\", \"r\") as fasta:\n\t\"\"\"\n\tget all validated regions per protein in the eval set\n\t\"\"\"\n\t# with open(\"/home/go96bix/projects/raw_data/validation_samples_nov_2019_08_seqID.fasta\", \"r\") as fasta:\n\tfor line in fasta:\n\t\tif line.startswith(\">\"):\n\t\t\tprot_file_name = line[1:].split()[0]\n\t\t\tprot = prot_file_name.split(\"|\")[1]\n\n\t\t\tgeneName = prot_to_geneName[prot]\n\t\t\tif geneName in dominant:\n\t\t\t\tepidope_flag.append(1)\n\t\t\telse:\n\t\t\t\tepidope_flag.append(0)\n\n\t\t\tpath = f\"/home/go96bix/projects/raw_data/imunodominat_prots/epidope/{prot_file_name}.csv\"\n\t\t\tdf = pd.read_csv(path, sep='\\t')\n\t\t\tscore = np.array(df['Deepipred']).mean()\n\t\t\tscores['mean'] = np.append(scores.get('mean', np.array([])), score)\n\n\t\t\tscore = np.array(df['Deepipred']).min()\n\t\t\tscores['min'] = np.append(scores.get('min', np.array([])), score)\n\n\t\t\tscore = np.array(df['Deepipred']).max()\n\t\t\tscores['max'] = np.append(scores.get('max', np.array([])), score)\n\n\t\t\tscore = np.median(np.array(df['Deepipred']))\n\t\t\tscores['median'] = np.append(scores.get('median', np.array([])), score)\n\n\t\t\tscore = np.quantile(np.array(df['Deepipred']), 0.8)\n\t\t\tscores['quantile_high'] = np.append(scores.get('quantile_high', np.array([])), score)\n\n\t\t\tscore = np.quantile(np.array(df['Deepipred']), 0.2)\n\t\t\tscores['quantile_low'] = np.append(scores.get('quantile_low', np.array([])), score)\n\n\tscores['dif_max_min'] = scores['max'] - scores['min']\n\nepidope_flag = np.array(epidope_flag)\n\n\"\"\"\ncalc ROC\n\"\"\"\nfor key in ['mean', 'min', 'max', 'median', 'quantile_high', 'quantile_low', 'dif_max_min']:\n\tfpr[key], tpr[key], thresholds[key] = metrics.roc_curve(epidope_flag, scores[key], pos_label=1)\n\troc_auc[key] = metrics.roc_auc_score(epidope_flag, scores[key], max_fpr=thresh)\n\n\"\"\"\nplotting\n\"\"\"\noutdir = \".\"\n\nplt.figure(figsize=(6, 6))\nlw = 2\nmaxtpr = 0\nfor x in tpr:\n\tmaxtpr = max(maxtpr, max(tpr[x]))\n\nmean_roc, = plt.plot(fpr['mean'], tpr['mean'], color='green', lw=lw,\n\t\t\t\t\t label='mean (AUC = %0.4f)' % roc_auc['mean'])\nmin_roc, = plt.plot(fpr['min'], tpr['min'], color='lightcoral', linestyle='-.', lw=lw,\n\t\t\t\t\tlabel='min (AUC = %0.4f)' % roc_auc['min'])\nmax_roc, = plt.plot(fpr['max'], tpr['max'], color='goldenrod', lw=lw, label='max (AUC = %0.4f)' % roc_auc['max'])\nmedian_roc, = plt.plot(fpr['median'], tpr['median'], color='grey', linestyle=':', lw=lw,\n\t\t\t\t\t label='median (AUC = %0.4f)' % roc_auc['median'])\nquantile_high_roc, = plt.plot(fpr['quantile_high'], tpr['quantile_high'], color='peru', lw=lw, linestyle='--',\n\t\t\t\t\t\t label='quantile 0.8 (AUC = %0.4f)' % roc_auc['quantile_high'])\nquantile_low_roc, = plt.plot(fpr['quantile_low'], tpr['quantile_low'], color='teal', lw=lw, linestyle='--',\n\t\t\t\t\t\t label='quantile 0.2 (AUC = %0.4f)' % roc_auc['quantile_low'])\ndif_roc, = plt.plot(fpr['dif_max_min'], tpr['dif_max_min'], color='lightsteelblue', linestyle='-.',\n\t\t\t\t\t\t label='differenz max min (AUC = %0.4f)' % roc_auc['dif_max_min'])\n\nrandom_roc, = plt.plot([0, thresh], [0, thresh], color='navy', lw=lw, linestyle='--', label='random (AUC = 0.50)')\n\nplt.xlim([0.0, thresh])\nplt.ylim([0.0, 1.0 * maxtpr])\nplt.xlabel('False Positive Rate')\nplt.ylabel('True Positive Rate')\nplt.title('Receiver operating characteristic for imuno dominance')\nplt.legend()\nplt.legend(\n\thandles=[mean_roc, min_roc, max_roc, median_roc, quantile_high_roc, quantile_low_roc, dif_roc, random_roc],\n\tloc=\"lower right\", prop={'size': 9})\nplt.savefig(os.path.join(outdir, f\"ROC_prediction_imunodominance_{thresh}_max_data.pdf\"), bbox_inches=\"tight\", pad_inches=0)\nplt.show()\nplt.close()\n\n\"\"\"\ncalc precision recall\n\"\"\"\nprecision = {}\nrecall = {}\nthresholds = {}\nfor key in ['mean', 'min', 'max', 'median', 'quantile_high', 'quantile_low', 'dif_max_min']:\n\tprecision[key], recall[key], thresholds[key] = metrics.precision_recall_curve(epidope_flag, scores[key], pos_label=1)\n\troc_auc[key] = metrics.auc(recall[key], precision[key])\n\n\"\"\"\nplotting\n\"\"\"\noutdir = \".\"\n\nplt.figure(figsize=(6, 6))\nlw = 2\nmaxtpr = 0\nfor x in tpr:\n\tmaxtpr = max(maxtpr, max(tpr[x]))\n\nmean_roc, = plt.plot(recall['mean'], precision['mean'], color='green', lw=lw, label='mean (AUC = %0.4f)' % roc_auc['mean'])\nmin_roc, = plt.plot(recall['min'], precision['min'], color='lightcoral', linestyle='-.', lw=lw,\n\t\t\t\t\tlabel='min (AUC = %0.4f)' % roc_auc['min'])\nmax_roc, = plt.plot(recall['max'], precision['max'], color='goldenrod', lw=lw, label='max (AUC = %0.4f)' % roc_auc['max'])\nmedian_roc, = plt.plot(recall['median'], precision['median'], color='grey', linestyle=':', lw=lw,\n\t\t\t\t\t label='median (AUC = %0.4f)' % roc_auc['median'])\nquantile_high_roc, = plt.plot(recall['quantile_high'], precision['quantile_high'], color='peru', lw=lw, linestyle='--',\n\t\t\t\t\t\t label='quantile 0.8 (AUC = %0.4f)' % roc_auc['quantile_high'])\nquantile_low_roc, = plt.plot(recall['quantile_low'], precision['quantile_low'], color='teal', lw=lw, linestyle='--',\n\t\t\t\t\t\t label='quantile 0.2 (AUC = %0.4f)' % roc_auc['quantile_low'])\ndif_roc, = plt.plot(recall['dif_max_min'], precision['dif_max_min'], color='lightsteelblue', linestyle='-.',\n\t\t\t\t\t\t label='dif max min (AUC = %0.4f)' % roc_auc['dif_max_min'])\n\nratio_true_false = epidope_flag.sum() / len(epidope_flag)\nrandom_pr, = plt.plot([0, 1], [ratio_true_false, ratio_true_false], color='navy', linestyle='--',\n\t\t\t\t\t label='random (AUC = %0.4f)' % ratio_true_false)\nplt.xlim([0.0, thresh])\n# plt.ylim([0.97, 1.0 * maxtpr])\nplt.xlabel('False Positive Rate')\nplt.ylabel('True Positive Rate')\nplt.title('Precision-Recall Curve for imuno dominance')\nplt.legend()\nplt.legend(handles=[mean_roc, min_roc, max_roc, median_roc, quantile_high_roc, quantile_low_roc,dif_roc, random_pr], loc=\"lower right\",\n\t\t prop={'size': 9})\nplt.savefig(os.path.join(outdir, f\"precision_recall_imunodominance_{thresh}_max_data.pdf\"), bbox_inches=\"tight\", pad_inches=0)\nplt.show()\nplt.close()\n\nNr = 4\nNc = 2\n\nleft = 0.125 # the left side of the subplots of the figure\nright = 0.9 # the right side of the subplots of the figure\nbottom = 0.1 # the bottom of the subplots of the figure\ntop = 0.9 # the top of the subplots of the figure\nwspace = 0.2 # the amount of width reserved for space between subplots,\n # expressed as a fraction of the average axis width\nhspace = 0.2 # the amount of height reserved for space between subplots,\n # expressed as a fraction of the average axis height\n\n\nfig, axs = plt.subplots(Nr, Nc, sharex=True, sharey=True)\nplt.subplots_adjust(left=None, bottom=None, right=None, top=None, wspace=None, hspace=0.4)\nfig.suptitle('score distribution')\nk = 0\nfor i in range(Nr):\n\tfor j in range(Nc):\n\t\tkey = ['mean', 'median', 'max', 'min', 'quantile_high', 'quantile_low','dif_max_min'][k]\n\t\tyPred_0 = scores[key][epidope_flag == 0]\n\t\tyPred_1 = scores[key][epidope_flag == 1]\n\t\tyPred_total = [yPred_0, yPred_1]\n\n\t\tfoo = axs[i,j].hist(yPred_total, bins=20, range=(0, 1), stacked=False, label=['no Epitope', 'true Epitope'], density=True)\n\t\taxs[i,j].set_title(key)\n\t\tif i == 0 and j == 1:\n\t\t\taxs[i,j].legend(prop={'size': 9})\n\t\t# if i < Nr -1:\n\t\t# \taxs[i,j].set_title(key)\n# axs.legend()\n\t\t# plt.savefig(os.path.join(epidope_results_dir, f\"prediction_distribution.pdf\"))\n\t\t# plt.close()\n\t\tk += 1\n\t\tprint(key)\n\t\tif k == len(key):\n\t\t\tbreak\n\nplt.savefig(os.path.join(outdir, f\"prediction_distribution_max_data.pdf\"))\n" }, { "alpha_fraction": 0.5492957830429077, "alphanum_fraction": 0.6830986142158508, "avg_line_length": 27.399999618530273, "blob_id": "e047cef32316c30febc950fc7c44625d49cf9435", "content_id": "57689a36e512fd0914c8b40948abaa41275b5540", "detected_licenses": [ "CC0-1.0" ], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 284, "license_type": "permissive", "max_line_length": 40, "num_lines": 10, "path": "/utils/plot_number_proteins_seq_identity.py", "repo_name": "rishabhdhenkawat/epitop_pred", "src_encoding": "UTF-8", "text": "import matplotlib.pyplot as plt\n\ny = [3067, 2032, 1799, 1662, 1525, 1378]\nx = [1, 0.9, 0.8, 0.7, 0.6, 0.5]\nplt.xlim([1, 0.5])\nplt.title('#proteins-sequence identity')\nplt.ylabel('number clusters')\nplt.xlabel('sequence identity')\nplt.plot(x, y)\nplt.savefig(\"proteins-sequence_id.pdf\")\n" }, { "alpha_fraction": 0.6404282450675964, "alphanum_fraction": 0.6521903276443481, "avg_line_length": 32.74809265136719, "blob_id": "bf3828199eadb77e0d3c795db822970b030ebba6", "content_id": "3257c84745d548e6f551d7962d835b91010657db", "detected_licenses": [ "CC0-1.0" ], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 13263, "license_type": "permissive", "max_line_length": 137, "num_lines": 393, "path": "/utils/generate_final_sets.py", "repo_name": "rishabhdhenkawat/epitop_pred", "src_encoding": "UTF-8", "text": "import os\nimport pandas as pd\nimport re\nimport numpy as np\nimport sys\n\nsys.path.insert(0, '/home/go96bix/projects/epitop_pred/')\nfrom utils import DataGenerator\nimport pickle\n\n\"\"\"\nembedd all proteins\ngehe uber blast treffer\nmache zwei dicts (epi_dict; non_epi_dict)\n jeweils als value array [tuple (start stop) von window]\ngehe ueber epi_dict \n\tper value slice window embedding into non_epi_area_protein and epi\ngehe ueber non_epi_dict\n\tslice out non_epi\n\"\"\"\n\n\ndef readFasta(file):\n\t## read fasta file\n\tseq = \"\"\n\twith open(file, \"r\") as infa:\n\t\tfor line in infa:\n\t\t\tline = line.strip()\n\t\t\tif re.match(\">\", line):\n\t\t\t\tpass\n\t\t\telse:\n\t\t\t\tseq += line\n\treturn seq\n\n\n# SETTINGS\nslicesize = 49\nshift = 22\nglobal_embedding_bool = True\nnon_epi_in_protein_bool = False\n# circular filling == if windows with to short entries (frame goes over start or end of protein) fill with AAs of start or end of protein\nuse_circular_filling = False\nbig_set = True\n\ncwd = \"/home/go96bix/projects/epitop_pred\"\ndirectory = os.path.join(cwd, \"data_generator_NON_clustered\")\nif not os.path.exists(directory):\n\tos.makedirs(directory)\n\nelmo_embedder = DataGenerator.Elmo_embedder()\n\nepitope_arr_local = []\nepitope_arr_global = []\nnon_epitope_arr_local = []\nnon_epitope_arr_global = []\nnon_epi_part_in_protein_arr_local = []\nnon_epi_part_in_protein_arr_global = []\n\nblast_df = pd.DataFrame.from_csv(\n\t\"/home/go96bix/projects/epitop_pred/utils/bepipred_samples_like_filtered_blast_table.tsv\",\n\tsep=\"\\t\", index_col=None)\n\nepi_protein_hits_dict = {}\nnone_epi_protein_hits_dict = {}\nfoo = 0\n\nfor index, row in blast_df.iterrows():\n\ttype_hit = row['#qseqid']\n\tfile = type_hit + \"_\" + str(row['sseqid']).split(\"|\")[1]\n\tstart = row['sstart'] - row['qstart']\n\tstop = start + slicesize\n\tif type_hit.startswith(\"epi\"):\n\t\thits = epi_protein_hits_dict.get(file, [])\n\t\tif (start, stop) not in hits:\n\t\t\thits.append((start, stop))\n\t\telse:\n\t\t\tprint(f\"doublicated: {index}\")\n\n\t\t# save all epitopes\n\t\tepi_protein_hits_dict.update({file: hits})\n\telse:\n\t\thits = none_epi_protein_hits_dict.get(file, [])\n\t\tif (start, stop) not in hits:\n\t\t\thits.append((start, stop))\n\t\telse:\n\t\t\tprint(f\"doublicated: {index}\")\n\n\t\t# save all epitopes\n\t\tnone_epi_protein_hits_dict.update({file: hits})\n\nfor index, (key, values) in enumerate(epi_protein_hits_dict.items()):\n\n\tif big_set:\n\t\tseq_local = readFasta(os.path.join(\n\t\t\t\"/home/go96bix/projects/raw_data/bepipred_sequences\",\n\t\t\tkey + \".fasta\"))\n\telse:\n\t\tseq_local = readFasta(os.path.join(\n\t\t\t\"/home/le86qiz/Documents/Konrad/deepipred_training_data/complete_protein_sequences/\",\n\t\t\tkey + \".fasta\"))\n\tseq_local = seq_local.lower()\n\tseq_len = len(seq_local)\n\tif seq_len < 25:\n\t\tcontinue\n\n\tif use_circular_filling:\n\t\tprotein_pad_local = list(seq_local[-shift:] + seq_local + seq_local[0:shift])\n\telse:\n\t\tprotein_pad_local = [\"-\"] * (seq_len + (shift * 2))\n\n\tif global_embedding_bool:\n\t\tif big_set:\n\t\t\tfile_name = key.split(\"_\")\n\t\t\tassert len(file_name) == 4, f\"filename of unexpected form, expected epi_1234_ID_123.fasta, but got {key}\"\n\t\t\tfile_name = file_name[2] + \"_\" + file_name[3]\n\t\t\tseq_global_tuple = pickle.load(\n\t\t\t\topen(os.path.join(\"/home/go96bix/projects/raw_data/embeddings_bepipred_samples\",\n\t\t\t\t file_name + \".pkl\"), \"rb\"))\n\t\t\tseq_global = seq_global_tuple[1]\n\n\t\telse:\n\t\t\tprint(seq_local)\n\t\t\tsample_embedding = elmo_embedder.seqvec.embed_sentence(seq_local)\n\t\t\tsample_embedding = sample_embedding.mean(axis=0)\n\t\t\tseq_global = sample_embedding\n\n\t\tprotein_pad_global = np.zeros((seq_len + (shift * 2), 1024), dtype=np.float32)\n\t\tif use_circular_filling:\n\t\t\tprotein_pad_global[0:shift] = seq_global[-shift:]\n\t\t\tprotein_pad_global[-shift:] = seq_global[0:shift]\n\n\tfor i in range(0, seq_len, 1):\n\t\tprotein_pad_local[i + (shift)] = seq_local[i]\n\n\t\tif global_embedding_bool:\n\t\t\tprotein_pad_global[i + (shift)] = seq_global[i]\n\n\tseq_len = len(protein_pad_local)\n\tnot_epi_mask = np.ones(seq_len, dtype=np.int)\n\tfor val in values:\n\t\tnot_epi_mask[val[0] + shift:val[1] + shift] = 0\n\n\t\tepitope = protein_pad_local[val[0] + shift:val[1] + shift]\n\t\tepitope = \"\".join(epitope)\n\t\tassert len(epitope) == slicesize, f\"error {epitope} in {key} is not {slicesize} long but {len(epitope)}\"\n\t\tepitope_arr_local.append([epitope, val[0], val[1], key])\n\n\t\tif global_embedding_bool:\n\t\t\tepitope = protein_pad_global[val[0] + shift:val[1] + shift]\n\t\t\tepitope_arr_global.append([epitope, val[0], val[1], key])\n\n\tif non_epi_in_protein_bool:\n\t\tstart_bool = False\n\t\tstart = 0\n\t\tstop = False\n\t\tfor index, i in enumerate(not_epi_mask):\n\t\t\tif i == 1 and start_bool == False:\n\t\t\t\tstart = index\n\t\t\t\tstart_bool = True\n\t\t\telif i == 0 and start_bool == True:\n\t\t\t\tstop = index\n\t\t\t\tif stop - start > slicesize:\n\n\t\t\t\t\tnon_epitope = protein_pad_local[start:stop]\n\t\t\t\t\tnon_epitope = \"\".join(non_epitope)\n\t\t\t\t\tnon_epi_part_in_protein_arr_local.append([non_epitope, start - shift, stop - shift, key])\n\n\t\t\t\t\tif global_embedding_bool:\n\t\t\t\t\t\tnon_epitope = protein_pad_global[start:stop]\n\t\t\t\t\t\tnon_epi_part_in_protein_arr_global.append([non_epitope, start - shift, stop - shift, key])\n\t\t\t\t\tstart_bool = False\n\t\t\t\t\tstop = False\n\t\t\telse:\n\t\t\t\tpass\n\t\tif start_bool == True:\n\t\t\tstop = index + 1\n\t\t\tif stop - start > slicesize:\n\t\t\t\tnon_epitope = protein_pad_local[start:stop]\n\t\t\t\tnon_epitope = \"\".join(non_epitope)\n\t\t\t\tnon_epi_part_in_protein_arr_local.append([non_epitope, start - shift, stop - shift, key])\n\n\t\t\t\tif global_embedding_bool:\n\t\t\t\t\tnon_epitope = protein_pad_global[start:stop]\n\t\t\t\t\tnon_epi_part_in_protein_arr_global.append([non_epitope, start - shift, stop - shift, key])\n\n# include non-epitopes presented in other papers\nfor key, values in none_epi_protein_hits_dict.items():\n\tif big_set:\n\t\tseq_local = readFasta(os.path.join(\n\t\t\t\"/home/go96bix/projects/raw_data/bepipred_sequences\",\n\t\t\tkey + \".fasta\"))\n\telse:\n\t\tseq_local = readFasta(os.path.join(\n\t\t\t\"/home/le86qiz/Documents/Konrad/deepipred_training_data/complete_protein_sequences/\",\n\t\t\tkey + \".fasta\"))\n\tseq_local = seq_local.lower()\n\tseq_len = len(seq_local)\n\tif seq_len < 25:\n\t\tcontinue\n\n\tif use_circular_filling:\n\t\tprotein_pad_local = list(seq_local[-shift:] + seq_local + seq_local[0:shift])\n\telse:\n\t\tprotein_pad_local = [\"-\"] * (seq_len + (shift * 2))\n\n\tif global_embedding_bool:\n\t\tif big_set:\n\t\t\tfile_name = key.split(\"_\")\n\t\t\tassert len(file_name) == 4, f\"filename of unexpected form, expected epi_1234_ID_123.fasta, but got {key}\"\n\t\t\tfile_name = file_name[2] + \"_\" + file_name[3]\n\t\t\tseq_global_tuple = pickle.load(\n\t\t\t\topen(os.path.join(\"/home/go96bix/projects/raw_data/embeddings_bepipred_samples\",\n\t\t\t\t file_name + \".pkl\"), \"rb\"))\n\t\t\tseq_global = seq_global_tuple[1]\n\n\n\t\telse:\n\t\t\tprint(seq_local)\n\t\t\tsample_embedding = elmo_embedder.seqvec.embed_sentence(seq_local)\n\t\t\tsample_embedding = sample_embedding.mean(axis=0)\n\t\t\tseq_global = sample_embedding\n\n\t\tprotein_pad_global = np.zeros((seq_len + (shift * 2), 1024), dtype=np.float32)\n\t\tif use_circular_filling:\n\t\t\tprotein_pad_global[0:shift] = seq_global[-shift:]\n\t\t\tprotein_pad_global[-shift:] = seq_global[0:shift]\n\n\tfor i in range(0, seq_len, 1):\n\t\tprotein_pad_local[i + (shift)] = seq_local[i]\n\n\t\tif global_embedding_bool:\n\t\t\tprotein_pad_global[i + (shift)] = seq_global[i]\n\n\tfor val in values:\n\t\tnon_epitope = protein_pad_local[val[0] + shift:val[1] + shift]\n\t\tnon_epitope = \"\".join(non_epitope)\n\t\tassert len(epitope) == slicesize, f\"error {epitope} in {key} is not {slicesize} long but {len(epitope)}\"\n\t\tnon_epitope_arr_local.append([non_epitope, val[0], val[1], key])\n\n\t\tif global_embedding_bool:\n\t\t\tnon_epitope = protein_pad_global[val[0] + shift:val[1] + shift]\n\t\t\tnon_epitope_arr_global.append([non_epitope, val[0], val[1], key])\n\nnum_samples = []\nall_samples = []\n\nif non_epi_in_protein_bool:\n\tnon_epi_all_arr = [i for j in (non_epi_part_in_protein_arr_local, non_epitope_arr_local) for i in j]\nelse:\n\tnon_epi_all_arr = non_epitope_arr_local\n\nfor arr in [non_epi_all_arr, epitope_arr_local]:\n\tcount_non_overlapping_windows_samples = [len(i[0]) // slicesize for i in arr]\n\tnum_samples.append(sum(count_non_overlapping_windows_samples))\nmin_samples = min(num_samples)\n\nval_df = pd.DataFrame()\ntest_df = pd.DataFrame()\ntrain_df = pd.DataFrame()\n\nX_train_global = []\nX_train_local = []\nX_val_global = []\nX_val_local = []\nX_test_global = []\nX_test_local = []\nY_train = []\nY_val = []\nY_test = []\n\nif non_epi_in_protein_bool:\n\tlocal_arrays = [non_epi_part_in_protein_arr_local, non_epitope_arr_local, epitope_arr_local]\nelse:\n\tlocal_arrays = [non_epitope_arr_local, epitope_arr_local]\nif global_embedding_bool:\n\tif non_epi_in_protein_bool:\n\t\tglobal_arrays = [non_epi_part_in_protein_arr_global, non_epitope_arr_global, epitope_arr_global]\n\telse:\n\t\tglobal_arrays = [non_epitope_arr_global, epitope_arr_global]\n\n# make sure number samples per class in val and test set have straight number\nmin_samples = (min_samples // 10) * 2\nfor index, arr in enumerate(local_arrays):\n\tdo_val = True\n\tdo_test = False\n\tsamples = 0\n\tselection = np.random.permutation(range(len(arr)))\n\n\tif non_epi_in_protein_bool:\n\t\ty = [\"non_epitope\", \"non_epitope\", \"true_epitope\"][index]\n\telse:\n\t\ty = [\"non_epitope\", \"true_epitope\"][index]\n\n\tfor i in selection:\n\t\tlen_sample = len(arr[i][0])\n\t\tmax_shift = len_sample % slicesize\n\t\tstart_pos = np.random.random_integers(0, max_shift)\n\t\tif do_val:\n\t\t\tfraction = 1\n\t\t\tif y == \"non_epitope\" and non_epi_in_protein_bool:\n\t\t\t\tfraction = 0.5\n\n\t\t\tfor j in range(start_pos, len_sample - slicesize + 1, slicesize):\n\t\t\t\tX_val_local.append([local_arrays[index][i][0][j:j + slicesize], local_arrays[index][i][1] + j,\n\t\t\t\t local_arrays[index][i][1] + j + slicesize, local_arrays[index][i][3]])\n\t\t\t\tif global_embedding_bool:\n\t\t\t\t\tX_val_global.append([global_arrays[index][i][0][j:j + slicesize], global_arrays[index][i][1] + j,\n\t\t\t\t\t global_arrays[index][i][1] + j + slicesize, global_arrays[index][i][3]])\n\t\t\t\tY_val.append(y)\n\t\t\t\tsamples += 1\n\t\t\t\tif samples >= int(fraction * min_samples):\n\t\t\t\t\tdo_test = True\n\t\t\t\t\tdo_val = False\n\t\t\t\t\tsamples = 0\n\t\t\t\t\tbreak\n\t\telif do_test:\n\n\t\t\tfraction = 1\n\t\t\tif y == \"non_epitope\" and non_epi_in_protein_bool:\n\t\t\t\tfraction = 0.5\n\t\t\tfor j in range(start_pos, len_sample - slicesize + 1, slicesize):\n\t\t\t\tX_test_local.append([local_arrays[index][i][0][j:j + slicesize], local_arrays[index][i][1] + j,\n\t\t\t\t local_arrays[index][i][1] + j + slicesize, local_arrays[index][i][3]])\n\t\t\t\tif global_embedding_bool:\n\t\t\t\t\tX_test_global.append([global_arrays[index][i][0][j:j + slicesize], global_arrays[index][i][1] + j,\n\t\t\t\t\t global_arrays[index][i][1] + j + slicesize, global_arrays[index][i][3]])\n\n\t\t\t\tY_test.append(y)\n\t\t\t\tsamples += 1\n\t\t\t\tif samples >= int(fraction * min_samples):\n\t\t\t\t\tdo_test = False\n\t\t\t\t\tdo_val = False\n\t\t\t\t\tsamples = 0\n\t\t\t\t\tbreak\n\t\telse:\n\t\t\tX_train_local.append(local_arrays[index][i])\n\t\t\tif global_embedding_bool:\n\t\t\t\tX_train_global.append(global_arrays[index][i])\n\n\t\t\tY_train.append(y)\n\nfor i in np.unique(Y_train):\n\tdirectory2 = directory + f\"/train/{i}\"\n\tif not os.path.exists(directory2):\n\t\tos.makedirs(directory2)\n\nX_test_local = np.array(X_test_local)\nX_test_global = np.array(X_test_global)\nX_val_local = np.array(X_val_local)\nX_val_global = np.array(X_val_global)\nX_train_local = np.array(X_train_local)\nX_train_global = np.array(X_train_global)\n\nY_test = np.array(Y_test)\nY_val = np.array(Y_val)\nY_train = np.array(Y_train)\n\nfor index, sample in enumerate(Y_train):\n\tdirectory2 = directory + f\"/train/{sample}/{index}.csv\"\n\tf = open(directory2, 'w')\n\tf.write(\n\t\tf\"{X_train_local[index][0]}\\t{X_train_local[index][1]}\\t{X_train_local[index][2]}\\t{X_train_local[index][3]}\")\n\n\tif global_embedding_bool:\n\t\twith open(directory + f\"/train/{sample}/{index}.pkl\", \"wb\") as outfile:\n\t\t\tpickle.dump(X_train_global[index], outfile)\n\nfor index, i in enumerate((X_test_local, X_val_local, X_train_local)):\n\tlen_i = i.shape[0]\n\tshuffle = np.random.permutation(range(len_i))\n\tif index == 0:\n\t\tif global_embedding_bool:\n\t\t\tpickle.dump(X_test_global[shuffle], open(directory + '/X_test.pkl', 'wb'))\n\t\tpd.DataFrame(X_test_local[shuffle]).to_csv(directory + '/X_test.csv', sep='\\t', encoding='utf-8', header=None,\n\t\t index=None)\n\t\tpd.DataFrame(Y_test[shuffle]).to_csv(directory + '/Y_test.csv', sep='\\t', encoding='utf-8', header=None,\n\t\t index=None)\n\n\telif index == 1:\n\t\tif global_embedding_bool:\n\t\t\tpickle.dump(X_val_global[shuffle], open(directory + '/X_val.pkl', 'wb'))\n\t\tpd.DataFrame(X_val_local[shuffle]).to_csv(directory + '/X_val.csv', sep='\\t', encoding='utf-8', header=None,\n\t\t index=None)\n\t\tpd.DataFrame(Y_val[shuffle]).to_csv(directory + '/Y_val.csv', sep='\\t', encoding='utf-8', header=None,\n\t\t index=None)\n\n\telif index == 2:\n\t\tif global_embedding_bool:\n\t\t\tpickle.dump(X_train_global[shuffle], open(directory + '/X_train.pkl', 'wb'))\n\t\tpd.DataFrame(X_train_local[shuffle]).to_csv(directory + '/X_train.csv', sep='\\t', encoding='utf-8', header=None,\n\t\t index=None)\n\t\tpd.DataFrame(Y_train[shuffle]).to_csv(directory + '/Y_train.csv', sep='\\t', encoding='utf-8', header=None,\n\t\t index=None)\n" }, { "alpha_fraction": 0.6782821416854858, "alphanum_fraction": 0.6923047304153442, "avg_line_length": 39.89374923706055, "blob_id": "80f2509d5fe3e90932d09bdb9bf1908ede367d5d", "content_id": "35cefdb5a984cc0c43373d7597e2f3137f3c724e", "detected_licenses": [ "CC0-1.0" ], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 26172, "license_type": "permissive", "max_line_length": 179, "num_lines": 640, "path": "/utils/make_ROC_curves.py", "repo_name": "rishabhdhenkawat/epitop_pred", "src_encoding": "UTF-8", "text": "#!/usr/bin/env python3\n# -*- coding: utf-8 -*-\n\"\"\"\nCreated on Sun Sep 15 15:35:52 2019\n\n@author: le86qiz\n\"\"\"\n\nimport numpy as np\nimport pandas as pd\nimport matplotlib.pyplot as plt\nfrom sklearn import metrics\nfrom sklearn.metrics import precision_recall_curve\nimport os\nfrom scipy import interp\nimport pickle\n\ndeepipred_results_dir = f'/home/go96bix/projects/raw_data/binary_25_nodes_100_epochs_08DO_0.5_seqID_new/results/'\n\n# read test file table\n# testfiletable = '/home/go96bix/projects/epitop_pred/with_errors/data_generator_bepipred_binary_0.5_seqID/samples_for_ROC.csv'\n# testfiletable = '/home/go96bix/projects/raw_data/allprotein.csv'\ntestfiletable = '/home/go96bix/projects/raw_data/08_allprotein.csv'\n# testfiletable = '/home/go96bix/projects/raw_data/05_allprotein.csv'\n# kfoldtable_dir = \"/home/go96bix/projects/epitop_pred/data_generator_bepipred_binary_allProteins\"\nkfoldtable_dir = \"/home/go96bix/projects/epitop_pred/data_generator_bepipred_binary_double_cluster_0.8_0.5_seqID\"\n# kfoldtable_dir = \"/home/go96bix/projects/epitop_pred/data_generator_bepipred_binary_0.5_seqID\"\n# testfiletable = '/home/le86qiz/Documents/Konrad/tool_comparison/comparison3/samples_for_ROC.csv'\n\nthresh = 1\n\ndef lbtope_results():\n\tlbtope_results_dict = dict()\n\twith open(\"/home/go96bix/projects/paper/EpiDope/raw_data/all_seq_used_for_training/training.fasta\", \"r\") as input:\n\t\theader = [i.strip() for i in input.readlines() if i.startswith(\">\")]\n\t# with open(\"/home/go96bix/projects/epitop_pred/lbtope_results.txt\",\"r\") as input:\n\twith open(\"/home/go96bix/projects/epitop_pred/lbtope_results_bepipred_dataset.txt\",\"r\") as input:\n\t\theader_index = 0\n\t\tscores = []\n\t\tfor line in input:\n\t\t\tif line.startswith(\"Epitopes\"):\n\t\t\t\tif header_index > 0:\n\t\t\t\t\tlbtope_results_dict.update({testid:scores})\n\t\t\t\thead = header[header_index]\n\t\t\t\theader_index += 1\n\t\t\t\thead = head[1:].split(\"\\t\")\n\t\t\t\ttestid = head[0]\n\t\t\t\tscores = []\n\n\t\t\telse:\n\t\t\t\tscores.append(float(line.strip().split(\"\\t\")[-1]))\n\n\t\tlbtope_results_dict.update({testid: scores})\n\n\t\treturn lbtope_results_dict\n\ndef _binary_roc_auc_score(fpr, tpr, max_fpr=1):\n\tif max_fpr is None or max_fpr == 1:\n\t\treturn metrics.auc(fpr, tpr)\n\tif max_fpr <= 0 or max_fpr > 1:\n\t\traise ValueError(\"Expected max_frp in range ]0, 1], got: %r\"\n\t\t % max_fpr)\n\n\t# Add a single point at max_fpr by linear interpolation\n\tstop = np.searchsorted(fpr, max_fpr, 'right')\n\tx_interp = [fpr[stop - 1], fpr[stop]]\n\ty_interp = [tpr[stop - 1], tpr[stop]]\n\ttpr = np.append(tpr[:stop], np.interp(max_fpr, x_interp, y_interp))\n\tfpr = np.append(fpr[:stop], max_fpr)\n\tpartial_auc = metrics.auc(fpr, tpr)\n\n\t# McClish correction: standardize result to be 0.5 if non-discriminant\n\t# and 1 if maximal\n\tmin_area = 0.5 * max_fpr ** 2\n\tmax_area = max_fpr\n\treturn 0.5 * (1 + (partial_auc - min_area) / (max_area - min_area))\n\ndef get_testproteinIDS(testfiletable):\n\ttestproteinIDs = []\n\twith open(testfiletable) as infile:\n\t\tfor line in infile:\n\t\t\t# file = line.strip().rsplit('/', 1)[1]\n\t\t\t# testproteinIDs.append(file[:-6])\n\n\t\t\tif line.startswith(\"/\"):\n\t\t\t\tfile = line.strip().rsplit('/', 1)[-1]\n\t\t\t\ttestproteinIDs.append(file[:-6])\n\t\t\telif line.startswith(\"Cluster\"):\n\t\t\t\tfiles = cluster_dict[line.strip()]\n\t\t\t\tfor file in files:\n\t\t\t\t\tfile = file.strip().rsplit('/', 1)[-1]\n\t\t\t\t\ttestproteinIDs.append(file[:-6])\n\treturn testproteinIDs\n\ndef cluster_to_dict(file=\"/home/go96bix/projects/raw_data/clustered_protein_seqs/my_double_cluster0.8_05/0.5_seqID.fasta.clstr\",\n directory_fasta=\"/home/go96bix/projects/raw_data/bepipred_proteins_with_marking\"):\n\tout_dict = {}\n\twith open(file, \"r\") as infile:\n\t\tallLines = infile.read()\n\t\tclusters = allLines.split(\">Cluster\")\n\t\tfor cluster in clusters:\n\t\t\tif len(cluster) > 0:\n\t\t\t\tproteins = cluster.strip().split(\"\\n\")\n\t\t\t\tfiles = []\n\t\t\t\tfor index, protein in enumerate(proteins):\n\t\t\t\t\tif index == 0:\n\t\t\t\t\t\tcluster_name = \"Cluster_\" + protein\n\t\t\t\t\telse:\n\t\t\t\t\t\tfilename = protein.split(\" \")[1][1:-3] + \".fasta\"\n\t\t\t\t\t\tprotein_file = os.path.join(directory_fasta, filename)\n\t\t\t\t\t\tfiles.append(protein_file)\n\t\t\t\tout_dict.update({cluster_name:files})\n\treturn out_dict\n\ncluster_dict = cluster_to_dict()\ntestproteinIDs = get_testproteinIDS(testfiletable)\n\n# get start/stop postions of epitopes/nonepitopes\nstartstop_epi = {}\nstartstop_nonepi = {}\ncounter = 0\ncounter_pos = 0\ncounter_neg = 0\nlength = np.array([])\nlength_pos = np.array([])\nlength_neg = np.array([])\n\nlbtope_train = set()\nwith open(\"/home/go96bix/projects/epitop_pred/LBtope_Variable_Negative_epitopes.txt\", \"r\") as input:\n\tlbtope_train.update(set([i.strip() for i in input.readlines()]))\nwith open(\"/home/go96bix/projects/epitop_pred/LBtope_Variable_Positive_epitopes.txt\", \"r\") as input:\n\tlbtope_train.update(set([i.strip() for i in input.readlines()]))\n\nfor testid in testproteinIDs:\n\tfile = f'/home/le86qiz/Documents/Konrad/tool_comparison/comparison3/bepipred_proteins_with_marking/{testid}.fasta'\n\twith open(file) as infile:\n\t\tfor index, line in enumerate(infile):\n\t\t\tif index == 0:\n\t\t\t\thead = line[1:].strip().split()\n\n\t\t\telif index ==1:\n\t\t\t\tfor epiID in head:\n\t\t\t\t\tepiID = epiID.split('_')\n\t\t\t\t\tflag = epiID[0]\n\t\t\t\t\tstart = int(epiID[2])\n\t\t\t\t\tstop = int(epiID[3])\n\t\t\t\t\t# if stop - start <= 11:\n\t\t\t\t\t# \tcontinue\n\t\t\t\t\tif line[start:stop] in lbtope_train:\n\t\t\t\t\t\tcontinue\n\t\t\t\t\tcounter += 1\n\t\t\t\t\tlength = np.append(length, stop-start)\n\t\t\t\t\tif flag == 'PositiveID':\n\t\t\t\t\t\tcounter_pos += 1\n\t\t\t\t\t\tlength_pos = np.append(length_pos, stop - start)\n\n\t\t\t\t\t\tif testid in startstop_epi:\n\t\t\t\t\t\t\tstartstop_epi[testid].append([start, stop])\n\t\t\t\t\t\telse:\n\t\t\t\t\t\t\tstartstop_epi[testid] = [[start, stop]]\n\t\t\t\t\telse:\n\t\t\t\t\t\tcounter_neg += 1\n\t\t\t\t\t\tlength_neg = np.append(length_neg, stop - start)\n\n\t\t\t\t\t\tif testid in startstop_nonepi:\n\t\t\t\t\t\t\tstartstop_nonepi[testid].append([start, stop])\n\t\t\t\t\t\telse:\n\t\t\t\t\t\t\tstartstop_nonepi[testid] = [[start, stop]]\n\n\t\t\telse:\n\t\t\t\tbreak\n\nprint(\"all\",counter)\nprint(\"positive\",counter_pos)\nprint(\"negative\",counter_neg)\nprint(\"mean length\", np.mean(length))\nprint(\"median length\", np.median(length))\nprint(\"mean length positive\", np.mean(length_pos))\nprint(\"median length positive\", np.median(length_pos))\nprint(\"mean length negative\", np.mean(length_neg))\nprint(\"median length negative\", np.median(length_neg))\n# exit()\n# read bepipred\nbepipred_scores = []\nbepipred_flag = []\nfor testid in testproteinIDs:\n\tbepipred_file = f'/home/le86qiz/Documents/Konrad/tool_comparison/comparison3/bepipred/results/{testid}.csv'\n\tbepipred_table = pd.read_csv(bepipred_file, sep=\"\\t\", index_col=None, skiprows=1).values\n\tbepipred_table = bepipred_table[:, 7]\n\tfor startstop in startstop_epi.get(testid, []):\n\t\tstart = startstop[0]\n\t\tstop = startstop[1]\n\t\tscores = bepipred_table[start:stop]\n\t\tscore = sum(scores) / len(scores)\n\t\tbepipred_scores.append(score)\n\t\t# bepipred_scores.extend(scores)\n\t\tbepipred_flag.append(1)\n\t\t# bepipred_flag.extend([1]*len(scores))\n\tfor startstop in startstop_nonepi.get(testid, []):\n\t\tstart = startstop[0]\n\t\tstop = startstop[1]\n\t\tscores = bepipred_table[start:stop]\n\t\tscore = sum(scores) / len(scores)\n\t\tbepipred_scores.append(score)\n\t\t# bepipred_scores.extend(scores)\n\t\tbepipred_flag.append(0)\n\t\t# bepipred_flag.extend([0] * len(scores))\n\nbepipred_scores = np.array(bepipred_scores)\nbepipred_flag = np.array(bepipred_flag)\n\n# read antigenicity\nantigenicity_scores = []\nantigenicity_flag = []\nfor testid in testproteinIDs:\n\tantigenicity_file = f'/home/le86qiz/Documents/Konrad/tool_comparison/comparison3/aa_scores/antigenicity/{testid}.csv'\n\tantigenicity_table = pd.read_csv(antigenicity_file, sep=\"\\t\", index_col=None).values\n\tantigenicity_table = antigenicity_table[:, 1]\n\tantigenicity_table = antigenicity_table - np.mean(antigenicity_table)\n\tfor startstop in startstop_epi.get(testid, []):\n\t\tstart = startstop[0]\n\t\tstop = startstop[1]\n\t\tscores = antigenicity_table[start:stop]\n\t\tscore = sum(scores) / len(scores)\n\t\tantigenicity_scores.append(score)\n\t\tantigenicity_flag.append(1)\n\tfor startstop in startstop_nonepi.get(testid, []):\n\t\tstart = startstop[0]\n\t\tstop = startstop[1]\n\t\tscores = antigenicity_table[start:stop]\n\t\tscore = sum(scores) / len(scores)\n\t\tantigenicity_scores.append(score)\n\t\tantigenicity_flag.append(0)\nantigenicity_scores = np.array(antigenicity_scores)\nantigenicity_flag = np.array(antigenicity_flag)\n\n# read betaturn\nbetaturn_scores = []\nbetaturn_flag = []\nfor testid in testproteinIDs:\n\tbetaturn_file = f'/home/le86qiz/Documents/Konrad/tool_comparison/comparison3/aa_scores/betaturn/{testid}.csv'\n\tbetaturn_table = pd.read_csv(betaturn_file, sep=\"\\t\", index_col=None).values\n\tbetaturn_table = betaturn_table[:, 1]\n\tfor startstop in startstop_epi.get(testid, []):\n\t\tstart = startstop[0]\n\t\tstop = startstop[1]\n\t\tscores = betaturn_table[start:stop]\n\t\tscore = sum(scores) / len(scores)\n\t\tbetaturn_scores.append(score)\n\t\tbetaturn_flag.append(1)\n\tfor startstop in startstop_nonepi.get(testid, []):\n\t\tstart = startstop[0]\n\t\tstop = startstop[1]\n\t\tscores = betaturn_table[start:stop]\n\t\tscore = sum(scores) / len(scores)\n\t\tbetaturn_scores.append(score)\n\t\tbetaturn_flag.append(0)\nbetaturn_scores = np.array(betaturn_scores)\nbetaturn_flag = np.array(betaturn_flag)\n\n# read hydrophilicity\nhydrophilicity_scores = []\nhydrophilicity_flag = []\nfor testid in testproteinIDs:\n\thydrophilicity_file = f'/home/le86qiz/Documents/Konrad/tool_comparison/comparison3/aa_scores/hydrophilicity/{testid}.csv'\n\thydrophilicity_table = pd.read_csv(hydrophilicity_file, sep=\"\\t\", index_col=None).values\n\thydrophilicity_table = hydrophilicity_table[:, 1]\n\tfor startstop in startstop_epi.get(testid, []):\n\t\tstart = startstop[0]\n\t\tstop = startstop[1]\n\t\tscores = hydrophilicity_table[start:stop]\n\t\tscore = sum(scores) / len(scores)\n\t\thydrophilicity_scores.append(score)\n\t\thydrophilicity_flag.append(1)\n\tfor startstop in startstop_nonepi.get(testid, []):\n\t\tstart = startstop[0]\n\t\tstop = startstop[1]\n\t\tscores = hydrophilicity_table[start:stop]\n\t\tscore = sum(scores) / len(scores)\n\t\thydrophilicity_scores.append(score)\n\t\thydrophilicity_flag.append(0)\nhydrophilicity_scores = np.array(hydrophilicity_scores)\nhydrophilicity_flag = np.array(hydrophilicity_flag)\n\n# read accessibility\naccessibility_scores = []\naccessibility_flag = []\nfor testid in testproteinIDs:\n\taccessibility_file = f'/home/le86qiz/Documents/Konrad/tool_comparison/comparison3/aa_scores/accessibility/{testid}.csv'\n\taccessibility_table = pd.read_csv(accessibility_file, sep=\"\\t\", index_col=None).values\n\taccessibility_table = accessibility_table[:, 1]\n\tfor startstop in startstop_epi.get(testid, []):\n\t\tstart = startstop[0]\n\t\tstop = startstop[1]\n\t\tscores = accessibility_table[start:stop]\n\t\tscore = sum(scores) / len(scores)\n\t\taccessibility_scores.append(score)\n\t\taccessibility_flag.append(1)\n\tfor startstop in startstop_nonepi.get(testid, []):\n\t\tstart = startstop[0]\n\t\tstop = startstop[1]\n\t\tscores = accessibility_table[start:stop]\n\t\tscore = sum(scores) / len(scores)\n\t\taccessibility_scores.append(score)\n\t\taccessibility_flag.append(0)\naccessibility_scores = np.array(accessibility_scores)\naccessibility_flag = np.array(accessibility_flag)\n\n# read lbtope\nlbtope_results_dict = lbtope_results()\nlbtope_scores = []\nlbtope_flag = []\n\n# for file in sorted(os.listdir(kfoldtable_dir)):\n# \tif file.endswith(f\"_test_set.csv\") and file.startswith(f\"k-fold_run_\"):\n# \t\ttestproteinIDs_kfold = get_testproteinIDS(f\"{kfoldtable_dir}/{file}\")\nfor testid in testproteinIDs:\n\tlbtope_table = lbtope_results_dict[testid]\n\tfor startstop in startstop_epi.get(testid, []):\n\t\tstart = startstop[0]\n\t\tstop = startstop[1]\n\t\tscores = lbtope_table[start:stop]\n\t\tscore = sum(scores) / len(scores)\n\t\tlbtope_scores.append(score)\n\t\tlbtope_flag.append(1)\n\tfor startstop in startstop_nonepi.get(testid, []):\n\t\tstart = startstop[0]\n\t\tstop = startstop[1]\n\t\tscores = lbtope_table[start:stop]\n\t\tscore = sum(scores) / len(scores)\n\t\tlbtope_scores.append(score)\n\t\tlbtope_flag.append(0)\n\nlbtope_scores = np.array(lbtope_scores)\nlbtope_flag = np.array(lbtope_flag)\n\n# read deepipred\ndeepipred_scores = []\ndeepipred_flag = []\n# for testid in testproteinIDs:\ntprs = []\naucs = []\nmean_fpr = np.linspace(0, 1, 100)\n\ntprs_prot = []\naucs_prot = []\nmean_fpr_prot = np.linspace(0, 1, 100)\n\nfor file in sorted(os.listdir(kfoldtable_dir)):\n\tdeepipred_scores_kfold = []\n\tdeepipred_flag_kfold = []\n\tif file.endswith(f\"_test_set.csv\") and file.startswith(f\"k-fold_run_\"):\n\t\ttestproteinIDs_kfold = get_testproteinIDS(f\"{kfoldtable_dir}/{file}\")\n\t\tfor testid in testproteinIDs_kfold:\n\t\t\tepidope_scores_protein = []\n\t\t\tepidope_flag_protein = []\n\t\t\tdeepipred_file = f'{os.path.join(deepipred_results_dir,\"epidope/\")}{testid}.csv'\n\t\t\tdeepipred_table = pd.read_csv(deepipred_file, sep=\"\\t\", index_col=None).values\n\t\t\tdeepipred_table = deepipred_table[:, 1]\n\t\t\t# help_array = 1 - deepipred_table\n\t\t\t# weights = np.std(np.array([help_array,deepipred_table], dtype=np.float), axis=0)\n\t\t\t# deepipred_table = deepipred_table*weights\n\n\t\t\tfor startstop in startstop_epi.get(testid, []):\n\t\t\t\tstart = startstop[0]\n\t\t\t\tstop = startstop[1]\n\t\t\t\t# score = deepipred_table[(start+stop)//2]\n\t\t\t\t# print(score)\n\t\t\t\tscores = deepipred_table[start:stop]\n\t\t\t\tscore = sum(scores) / len(scores)\n\n\t\t\t\tdeepipred_scores_kfold.append(score)\n\t\t\t\tepidope_scores_protein.append(score)\n\n\t\t\t\tdeepipred_flag_kfold.append(1)\n\t\t\t\tepidope_flag_protein.append(1)\n\t\t\tfor startstop in startstop_nonepi.get(testid, []):\n\t\t\t\tstart = startstop[0]\n\t\t\t\tstop = startstop[1]\n\t\t\t\tscores = deepipred_table[start:stop]\n\t\t\t\tscore = sum(scores) / len(scores)\n\n\t\t\t\tdeepipred_scores_kfold.append(score)\n\t\t\t\tepidope_scores_protein.append(score)\n\n\t\t\t\tdeepipred_flag_kfold.append(0)\n\t\t\t\tepidope_flag_protein.append(0)\n\n\t\t\tif 0 in epidope_flag_protein and 1 in epidope_flag_protein:\n\t\t\t\tfpr, tpr, thresholds = metrics.roc_curve(epidope_flag_protein, epidope_scores_protein)\n\t\t\t\ttprs_prot.append(interp(mean_fpr, fpr, tpr))\n\t\t\t\ttprs_prot[-1][0] = 0.04\n\t\t\t\troc_auc_prot = metrics.auc(fpr, tpr)\n\t\t\t\taucs_prot.append(roc_auc_prot)\n\n\t\tdeepipred_scores_kfold = np.array(deepipred_scores_kfold)\n\t\tdeepipred_scores = np.append(deepipred_scores,deepipred_scores_kfold)\n\t\tdeepipred_flag_kfold = np.array(deepipred_flag_kfold)\n\t\tdeepipred_flag = np.append(deepipred_flag,deepipred_flag_kfold)\n\n\t\tfpr, tpr, thresholds = metrics.roc_curve(deepipred_flag_kfold, deepipred_scores_kfold)\n\t\ttprs.append(interp(mean_fpr, fpr, tpr))\n\t\ttprs[-1][0] = 0.0\n\t\t# roc_auc = _binary_roc_auc_score(fpr, tpr, thresh)\n\t\troc_auc = metrics.auc(fpr[fpr<thresh], tpr[fpr<thresh])\n\t\taucs.append(roc_auc)\n\nmean_tpr = np.mean(tprs, axis=0)\nmean_tpr[-1] = 1.0\n# mean_auc = _binary_roc_auc_score(mean_fpr, mean_tpr, thresh)\nmean_auc = metrics.auc(mean_fpr[mean_fpr<thresh], mean_tpr[mean_fpr<thresh])\nstd_auc = np.std(aucs)\nstd_tpr = np.std(tprs, axis=0)\n\nmean_tpr_prot = np.mean(tprs_prot, axis=0)\nmean_tpr_prot[-1] = 1.0\nmean_auc_prot = metrics.auc(mean_fpr_prot, mean_tpr_prot)\nstd_auc_prot = np.std(aucs_prot)\nstd_tpr_prot = np.std(tprs_prot, axis=0)\n\ntprs_upper = np.minimum(mean_tpr + std_tpr, 1)\ntprs_lower = np.maximum(mean_tpr - std_tpr, 0)\n# print(any(deepipred_flag == bepipred_flag))\n\n# read raptorx\nraptorx_scores = []\nraptorx_flag = []\niupred_scores = []\niupred_flag = []\nfor testid in testproteinIDs:\n\traptorx_file = f'/home/le86qiz/Documents/Konrad/tool_comparison/comparison3/raptorx/results/flo_files/{testid}.csv'\n\ttry:\n\t\traptorx_table = pd.read_csv(raptorx_file, sep=\"\\t\", index_col=None).values\n\texcept:\n\t\tcontinue\n\tiupred_table = raptorx_table[:, 6]\n\tstructure_table = raptorx_table[:, 2] - raptorx_table[:, 1] - raptorx_table[:, 0] # coil - helix - sheet\n\taccessibility_table = raptorx_table[:, 5] - raptorx_table[:, 3] # exposed - bury\n\traptorx_table = structure_table + accessibility_table\n\tfor startstop in startstop_epi.get(testid, []):\n\t\tstart = startstop[0]\n\t\tstop = startstop[1]\n\t\tscores = raptorx_table[start:stop]\n\t\tscore = sum(scores) / len(scores)\n\t\traptorx_scores.append(score)\n\t\traptorx_flag.append(1)\n\t\tscores = iupred_table[start:stop]\n\t\tscore = sum(scores) / len(scores)\n\t\tiupred_scores.append(score)\n\t\tiupred_flag.append(1)\n\tfor startstop in startstop_nonepi.get(testid, []):\n\t\tstart = startstop[0]\n\t\tstop = startstop[1]\n\t\tscores = raptorx_table[start:stop]\n\t\tscore = sum(scores) / len(scores)\n\t\traptorx_scores.append(score)\n\t\traptorx_flag.append(0)\n\t\tscores = iupred_table[start:stop]\n\t\tscore = sum(scores) / len(scores)\n\t\tiupred_scores.append(score)\n\t\tiupred_flag.append(0)\nraptorx_scores = np.array(raptorx_scores)\nraptorx_flag = np.array(raptorx_flag)\niupred_scores = np.array(iupred_scores)\niupred_flag = np.array(iupred_flag)\n\n# calculate roc curve\nfrom sklearn.metrics import roc_curve, auc\n\n\nfpr = {}\ntpr = {}\nroc_auc = {}\nthresholds = {}\n\nkey = 'bepipred'\nfpr[key], tpr[key], thresholds[key] = metrics.roc_curve(bepipred_flag, bepipred_scores, pos_label=1)\n# roc_auc[key] = metrics.roc_auc_score(bepipred_flag, bepipred_scores, max_fpr=thresh)\nroc_auc[key] = metrics.auc(fpr[key][fpr[key]<=thresh], tpr[key][fpr[key]<=thresh])\nkey = 'antigenicity'\nfpr[key], tpr[key], thresholds[key] = metrics.roc_curve(antigenicity_flag, antigenicity_scores, pos_label=1)\n# roc_auc[key] = metrics.roc_auc_score(antigenicity_flag, antigenicity_scores, max_fpr=thresh)\nroc_auc[key] = metrics.auc(fpr[key][fpr[key]<=thresh], tpr[key][fpr[key]<=thresh])\nkey = 'hydrophilicity'\nfpr[key], tpr[key], thresholds[key] = metrics.roc_curve(hydrophilicity_flag, hydrophilicity_scores, pos_label=1)\n# roc_auc[key] = metrics.roc_auc_score(hydrophilicity_flag, hydrophilicity_scores, max_fpr=thresh)\nroc_auc[key] = metrics.auc(fpr[key][fpr[key]<=thresh], tpr[key][fpr[key]<=thresh])\nkey = 'accessibility'\nfpr[key], tpr[key], thresholds[key] = metrics.roc_curve(accessibility_flag, accessibility_scores, pos_label=1)\n# roc_auc[key] = metrics.roc_auc_score(accessibility_flag, accessibility_scores, max_fpr=thresh)\nroc_auc[key] = metrics.auc(fpr[key][fpr[key]<=thresh], tpr[key][fpr[key]<=thresh])\nkey = 'betaturn'\nfpr[key], tpr[key], thresholds[key] = metrics.roc_curve(betaturn_flag, betaturn_scores, pos_label=1)\n# roc_auc[key] = metrics.roc_auc_score(betaturn_flag, betaturn_scores, max_fpr=thresh)\nroc_auc[key] = metrics.auc(fpr[key][fpr[key]<=thresh], tpr[key][fpr[key]<=thresh])\nkey = 'deepipred'\nfpr[key], tpr[key], thresholds[key] = metrics.roc_curve(deepipred_flag, deepipred_scores, pos_label=1)\n# roc_auc[key] = metrics.roc_auc_score(deepipred_flag, deepipred_scores, max_fpr=thresh)\nroc_auc[key] = metrics.auc(fpr[key][fpr[key]<=thresh], tpr[key][fpr[key]<=thresh])\nkey = 'lbtope'\nfpr[key], tpr[key], thresholds[key] = metrics.roc_curve(lbtope_flag, lbtope_scores, pos_label=1)\n# roc_auc[key] = metrics.roc_auc_score(lbtope_flag, lbtope_scores, max_fpr=thresh)\nroc_auc[key] = metrics.auc(fpr[key][fpr[key]<=thresh], tpr[key][fpr[key]<=thresh])\nkey = 'iupred'\nfpr[key], tpr[key], thresholds[key] = metrics.roc_curve(iupred_flag, iupred_scores, pos_label=1)\n# roc_auc[key] = metrics.roc_auc_score(iupred_flag, iupred_scores, max_fpr=thresh)\nroc_auc[key] = metrics.auc(fpr[key][fpr[key]<=thresh], tpr[key][fpr[key]<=thresh])\n\n# plot\nplt.figure(figsize=(6, 6))\nlw = 2\nfor i in fpr:\n\tfpr[i] = [x for x in fpr[i] if x <= thresh]\n\ttpr[i] = tpr[i][:len(fpr[i])]\n\n\t# youden j score\n\tinterpolated_tpr = np.interp([0.1],fpr[i], tpr[i])[0]\n\tprint(i, interpolated_tpr - 0.1)\nmaxtpr = 0\nfor x in tpr:\n\tmaxtpr = max(maxtpr, max(tpr[x]))\n\nCOLORS = pickle.load(open('/home/mu42cuq/scripts/mypymo/colordictionary.pydict', 'rb'))\n\n# show std-div\nstd_roc = plt.fill_between(mean_fpr, tprs_lower, tprs_upper, color='grey', alpha=.2, label=r'$\\pm$ 1 std. dev.')\n\nepidope_roc, = plt.plot(mean_fpr, mean_tpr, color='green', lw=2, label='Mean ROC EpiDope \\n(AUC = %0.4f $\\pm$ %0.4f)' % (mean_auc, std_auc))\n# plt.plot(mean_fpr_prot, mean_tpr_prot, color='red', lw=2, label=r'Mean ROC EpiDope per protein (AUC = %0.4f $\\pm$ %0.4f)' % (mean_auc_prot, std_auc_prot))\n# plt.plot(fpr['deepipred'], tpr['deepipred'], color='green', lw=lw,\n# label='EpiDope (area = %0.4f)' % roc_auc['deepipred'])\nlbtope_roc, = plt.plot(fpr['lbtope'], tpr['lbtope'], color='orange', linestyle='--', lw=lw,\n label='LBtope (AUC = %0.4f)' % roc_auc['lbtope'])\niupred_roc, = plt.plot(fpr['iupred'], tpr['iupred'], color='lightcoral', linestyle='-.', lw=lw,\n label='IUPred (AUC = %0.4f)' % roc_auc['iupred'])\nbepipred_roc, = plt.plot(fpr['bepipred'], tpr['bepipred'], color='goldenrod', lw=lw, label='Bepipred 2 (AUC = %0.4f)' % roc_auc['bepipred'])\nantigen_roc, = plt.plot(fpr['antigenicity'], tpr['antigenicity'], color='grey', linestyle=':', lw=lw,\n label='Antigenicity-avg (AUC = %0.4f)' % roc_auc['antigenicity'])\nhydro_roc, = plt.plot(fpr['hydrophilicity'], tpr['hydrophilicity'], color='peru', lw=lw, linestyle='--',\n label='Hydrophilicity-avg (AUC = %0.4f)' % roc_auc['hydrophilicity'])\naccess_roc, = plt.plot(fpr['accessibility'], tpr['accessibility'], color='teal', linestyle='--', lw=lw,\n label='Accessibility-avg (AUC = %0.4f)' % roc_auc['accessibility'])\nbetaturn_roc, = plt.plot(fpr['betaturn'], tpr['betaturn'], color='lightsteelblue', linestyle='-.', lw=lw,\n label='Betaturn-avg (AUC = %0.4f)' % roc_auc['betaturn'])\n\nrandom_roc, = plt.plot([0, thresh], [0, thresh], color='navy', lw=lw, linestyle='--', label='random (AUC = 0.50)' )\n\n\nplt.xlim([0.0, thresh])\nplt.ylim([0.0, 1.0 * maxtpr])\nplt.xlabel('False Positive Rate')\nplt.ylabel('True Positive Rate')\nplt.title('Receiver operating characteristic')\nplt.legend(handles=[epidope_roc, betaturn_roc, access_roc, antigen_roc, hydro_roc, iupred_roc, bepipred_roc, std_roc, lbtope_roc, random_roc], loc=\"lower right\", prop={'size': 9})\nplt.savefig(os.path.join(deepipred_results_dir, f\"ROC_prediction_comparison_{thresh}_auc10%.pdf\"), bbox_inches=\"tight\",\n pad_inches=0)\nplt.show()\nplt.close()\n\n# calculate precision-recall curve\nprecision = {}\nrecall = {}\nthresholds = {}\n\nkey = 'bepipred'\nprecision[key], recall[key], thresholds[key] = precision_recall_curve(bepipred_flag, bepipred_scores, pos_label=1)\nroc_auc[key] = auc(recall[key], precision[key])\nkey = 'antigenicity'\nprecision[key], recall[key], thresholds[key] = precision_recall_curve(antigenicity_flag, antigenicity_scores,\n pos_label=1)\nroc_auc[key] = auc(recall[key], precision[key])\nkey = 'hydrophilicity'\nprecision[key], recall[key], thresholds[key] = precision_recall_curve(hydrophilicity_flag, hydrophilicity_scores,\n pos_label=1)\nroc_auc[key] = auc(recall[key], precision[key])\nkey = 'accessibility'\nprecision[key], recall[key], thresholds[key] = precision_recall_curve(accessibility_flag, accessibility_scores,\n pos_label=1)\nroc_auc[key] = auc(recall[key], precision[key])\nkey = 'betaturn'\nprecision[key], recall[key], thresholds[key] = precision_recall_curve(betaturn_flag, betaturn_scores, pos_label=1)\nroc_auc[key] = auc(recall[key], precision[key])\nkey = 'deepipred'\nprecision[key], recall[key], thresholds[key] = precision_recall_curve(deepipred_flag, deepipred_scores, pos_label=1)\nroc_auc[key] = auc(recall[key], precision[key])\nkey = 'lbtope'\nprecision[key], recall[key], thresholds[key] = precision_recall_curve(lbtope_flag, lbtope_scores, pos_label=1)\nroc_auc[key] = auc(recall[key], precision[key])\nkey = 'iupred'\nprecision[key], recall[key], thresholds[key] = precision_recall_curve(iupred_flag, iupred_scores, pos_label=1)\nroc_auc[key] = auc(recall[key], precision[key])\n\n# plot\nplt.figure(figsize=(6, 6))\nlw = 2\nthresh = 1\nfor i in recall:\n\trecall[i] = [x for x in recall[i] if x <= thresh]\n\tprecision[i] = precision[i][:len(recall[i])]\nmaxtpr = 0\nfor x in precision:\n\tmaxtpr = max(maxtpr, max(precision[x]))\nepidope_pr, = plt.plot(recall['deepipred'], precision['deepipred'], color='green', lw=lw,\n label='EpiDope (AUC = %0.4f)' % roc_auc['deepipred'])\nbepipred_pr, = plt.plot(recall['bepipred'], precision['bepipred'], color='goldenrod', lw=lw,\n label='Bepipred 2 (AUC = %0.4f)' % roc_auc['bepipred'])\nantigen_pr, = plt.plot(recall['antigenicity'], precision['antigenicity'], color='grey', linestyle=':', lw=lw,\n label='Antigenicity-avg (AUC = %0.4f)' % roc_auc['antigenicity'])\nhydro_pr, = plt.plot(recall['hydrophilicity'], precision['hydrophilicity'], color='peru', lw=lw, linestyle='--',\n label='Hydrophilicity-avg (AUC = %0.4f)' % roc_auc['hydrophilicity'])\naccess_pr, = plt.plot(recall['accessibility'], precision['accessibility'], color='teal', linestyle='--', lw=lw,\n label='Accessibility-avg (AUC = %0.4f)' % roc_auc['accessibility'])\nbetaturn_pr, = plt.plot(recall['betaturn'], precision['betaturn'], color='lightsteelblue', linestyle='-.', lw=lw,\n label='Betaturn-avg (AUC = %0.4f)' % roc_auc['betaturn'])\nlbtope_pr, = plt.plot(recall['lbtope'], precision['lbtope'], color='orange', linestyle='--', lw=lw,\n label='LBtope (AUC = %0.4f)' % roc_auc['lbtope'])\niupred_pr, = plt.plot(recall['iupred'], precision['iupred'], color='lightcoral', linestyle=':', lw=lw,\n label='IUPred (AUC = %0.4f)' % roc_auc['iupred'])\n\nratio_true_false = deepipred_flag.sum() / len(deepipred_flag)\nrandom_pr, = plt.plot([0, 1], [ratio_true_false, ratio_true_false], color='navy', linestyle='--',\n label='random (AUC = %0.4f)' % ratio_true_false)\nplt.xlim([0.0, thresh])\nplt.ylim([0.0, 1.0 * maxtpr])\nplt.xlabel('Recall')\nplt.ylabel('Precision')\nplt.title('Precision-Recall Curve')\nplt.legend(loc=\"lower right\", handles=[epidope_pr, betaturn_pr, access_pr, antigen_pr, hydro_pr, iupred_pr, bepipred_pr, lbtope_pr, random_pr], prop={'size': 9})\nplt.savefig(os.path.join(deepipred_results_dir, f\"precision_recall_comparison_{thresh}.pdf\"), bbox_inches=\"tight\",\n pad_inches=0)\nplt.close()\n\nyPred_0 = deepipred_scores[deepipred_flag == 0]\nyPred_1 = deepipred_scores[deepipred_flag == 1]\nyPred_total = [yPred_0, yPred_1]\n\nplt.hist(yPred_total, bins=20, range=(0, 1), stacked=False, label=['no Epitope', 'true Epitope'], density=True)\nplt.legend()\nplt.savefig(os.path.join(deepipred_results_dir, f\"prediction_distribution.pdf\"))\nplt.close()\n" }, { "alpha_fraction": 0.6648568511009216, "alphanum_fraction": 0.6744817495346069, "avg_line_length": 36.85981369018555, "blob_id": "e0fa5d595db164371eff08f7c2a25367a31912fd", "content_id": "8771156764ff1b08a4d944ccfef3ab26fbfbb45a", "detected_licenses": [ "CC0-1.0" ], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 4052, "license_type": "permissive", "max_line_length": 123, "num_lines": 107, "path": "/predict_with_DL.py", "repo_name": "rishabhdhenkawat/epitop_pred", "src_encoding": "UTF-8", "text": "from keras import models\nfrom keras import layers\nimport os\nimport pandas as pd\nfrom sklearn.preprocessing import LabelEncoder\nimport sys\n\nimport numpy as np\n# import tensorflow as tf\n# # tf.logging.set_verbosity(tf.logging.ERROR)\n# os.environ['TF_CPP_MIN_LOG_LEVEL'] = '0'\n\nclass Protein_seq():\n def __init__(self, sequence, score, over_threshold, positions=None):\n self.sequence = sequence\n self.score = score\n self.over_threshold = over_threshold\n if positions == None:\n self.positions = list(range(1, len(self.sequence) + 1))\n else:\n self.positions = positions\n\n\ndef build_model(nodes, seq_length, dropout=0):\n model = models.Sequential()\n model.add(layers.Embedding(20, 10, input_length=seq_length))\n model.add(layers.Bidirectional(layers.LSTM(nodes, return_sequences=True, dropout=dropout, recurrent_dropout=0.2)))\n model.add(layers.Bidirectional(layers.LSTM(nodes, dropout=dropout, recurrent_dropout=0.2)))\n model.add(layers.Dense(nodes))\n model.add(layers.LeakyReLU(alpha=0.01))\n model.add(layers.Dense(2, activation='softmax'))\n\n model.compile(optimizer='adam', loss='binary_crossentropy', metrics=['acc'])\n model.summary()\n return model\n\n\ndef load_raw_AA_data(path):\n X_test_old = pd.read_csv(path, delimiter='\\t', dtype='str', header=None).values\n X_test = parse_amino(x=X_test_old, generator=False)\n\n return X_test\n\n\ndef parse_amino(x):\n amino = \"GALMFWKQESPVICYHRNDT\"\n encoder = LabelEncoder()\n encoder.fit(list(amino))\n out = []\n for i in x:\n dnaSeq = i[1].upper()\n encoded_X = encoder.transform(list(dnaSeq))\n out.append(encoded_X)\n return np.array(out)\n\n\ndef split_AA_seq(seq, slicesize):\n splited_AA_seqs = []\n for i in range(0, len(seq) - slicesize):\n splited_AA_seqs.append([i + (slicesize // 2), seq[i:i + 50]])\n\n return np.array(splited_AA_seqs)\n\n\nif __name__ == \"__main__\":\n # dummy dict is the input\n # dummy_dict = {\"name\": \"GALMFWKQESPVICYHRNDTGALMFWKQESPVICY\"}\n dummy_dict = {\"name\": \"GALMFWKQESPVICYHRNDTGALMFWKQESPVICYHRNDTGALMFWKQESPVICYHRNDTGALMFWKQESPVICYHRNDTGALMFWKQESPVDT\"}\n cutoff = 0.5\n # slicesize is the amount of AA which are used as input to predict liklelyhood of epitope\n slicesize = 50\n nodes = slicesize\n\n model = build_model(nodes, seq_length=slicesize)\n # exit()\n # location of weights from the previously trained model\n model_path = \"/home/go96bix/projects/epitop_pred/epitope_data/weights.best.loss.test_generator.hdf5\"\n # load weights, after this step the model behaves as if we trained it\n model.load_weights(model_path)\n\n output_dict = {}\n\n # go over all entries in dict\n for file_name in dummy_dict.keys():\n # slice the long AA in short segments so it can be used as input for the neural network\n seq_slices = split_AA_seq(dummy_dict[file_name], slicesize)\n # parse input to numerical values\n X_test = parse_amino(seq_slices)\n # finally predict the epitopes\n Y_pred_test = model.predict(X_test)\n\n # the column 0 in Y_pred_test is the likelihood that the slice is NOT a Epitope, for us mostly interesting\n # is col 1 which contain the likelihood of being a epitope\n epi_score = Y_pred_test[:, 1]\n\n # use leading and ending zeros so that the score array has the same length as the input sequence\n score = np.zeros(len(dummy_dict[file_name]))\n # leading AAs which are not predictable get value of first predicted value (were this AA where involved)\n score[0:int(seq_slices[0][0])] = epi_score[0]\n # last AAs which are not predictable get value of last predicted value (were this AA where involved)\n score[int(seq_slices[-1][0]):]=epi_score[-1]\n score[np.array(seq_slices[:,0],dtype=int)] = epi_score\n\n score_bool = score > cutoff\n\n protein = Protein_seq(sequence=dummy_dict[file_name], score=score, over_threshold=score_bool)\n output_dict.update({file_name:protein})\n\n" }, { "alpha_fraction": 0.6318743228912354, "alphanum_fraction": 0.6453422904014587, "avg_line_length": 26.015151977539062, "blob_id": "ad898d203728d8a34056569659a362b8c2680c02", "content_id": "55cfcfd67d0fcd6a7626667b335349e049fc2469", "detected_licenses": [ "CC0-1.0" ], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 1782, "license_type": "permissive", "max_line_length": 119, "num_lines": 66, "path": "/utils/download_from_iedb.py", "repo_name": "rishabhdhenkawat/epitop_pred", "src_encoding": "UTF-8", "text": "import urllib.request\nimport pickle\nimport pandas as pd\n\n\"\"\"\nDownload \"all\" entries of emble\n\"\"\"\n\n\n# IDArray = get_accessions()\nlength = 492000\nnegative_samples = []\npositive_samples = []\nnot_working = []\n\nsamples = {}\ndf = pd.read_csv(\"/home/go96bix/projects/raw_data/bcell_full_v3.csv\",\",\",skiprows=1)\n\nfor i, line in df.iterrows():\n\tif line[\"Object Type\"] != \"Linear peptide\":\n\t\tcontinue\n\telse:\n\t\t# url = f'http://www.iedb.org/epitope/{i}'\n\t\turl = line[\"Epitope IRI\"]\n\t\tj = url.split(\"/\")[-1]\n\t\tresponse = urllib.request.urlopen(url)\n\t\tdata = response.read() # a `bytes` object\n\t\tpage_source = str(data)\n\n\t\tif '404 Error' in page_source:\n\t\t\tnot_working.append(j)\n\t\t\tcontinue\n\n\t\t# find b_cell_assays part\n\t\tstart = page_source.find('\"type\":\"bcell\"')\n\t\tstop = page_source[start::].find(\"]\")\n\t\tb_cell_assays = page_source[start:start + stop]\n\t\tparameters = b_cell_assays.split(\",\")\n\n\t\t# count positive assays\n\t\tpos_counts = sum([int(i.split(\":\")[1].strip('\"')) for i in parameters if i.startswith('\"positive_count\"')])\n\t\t# count negative assays\n\t\ttotal_counts = sum([int(i.split(\":\")[1].strip('}').strip('\"')) for i in parameters if i.startswith('\"total_count\"')])\n\n\t\tsamples.update({int(j):(pos_counts,total_counts)})\n\n\t\tif total_counts < 2:\n\t\t\tcontinue\n\t\telif pos_counts == 0:\n\t\t\tnegative_samples.append(str(j)+\"\\n\")\n\t\telif pos_counts >= 2:\n\t\t\tpositive_samples.append(str(j)+\"\\n\")\n\t\telse:\n\t\t\tprint(f\"{pos_counts}/{total_counts}\")\n\n\t\tprint(f\"\\t===== {i+1} / {length} -- {int(((i+1) / length)*100)}% =====\") #, end='\\r')\n\npickle.dump(samples, open(\"all_samples.pkl\",\"wb\"))\n\nwith open(\"negative_samples.txt\", \"w\") as out:\n\tout.writelines(negative_samples)\n\nwith open(\"positive_samples.txt\", \"w\") as out:\n\tout.writelines(positive_samples)\n\nprint(f\"not working samples {not_working}\")" }, { "alpha_fraction": 0.5783497095108032, "alphanum_fraction": 0.5980318188667297, "avg_line_length": 24.403846740722656, "blob_id": "b0a40e8bd7b29bcb41d13d550d4146b13167ec4d", "content_id": "9fbede122e2cd4c1b89a44464951f4f2d75b0a08", "detected_licenses": [ "CC0-1.0" ], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 1321, "license_type": "permissive", "max_line_length": 95, "num_lines": 52, "path": "/utils/plot_distribution_epitopes.py", "repo_name": "rishabhdhenkawat/epitop_pred", "src_encoding": "UTF-8", "text": "import matplotlib.pyplot as plt\n\nnon_epi_pos = []\ntrue_epi_pos = []\nnon_epi = True\n\nwith open(\"/home/go96bix/projects/epitop_pred/utils/iedb_linear_epitopes.fasta\") as input_file:\n\tfor line in input_file:\n\t\tif line.startswith(\">\"):\n\t\t\theader = line.strip()\n\t\t\tsseqid = header[1:]\n\t\t\tif sseqid.startswith(\"Negative\"):\n\t\t\t\tnon_epi = True\n\t\t\telif sseqid.startswith(\"Positive\"):\n\t\t\t\tnon_epi = False\n\t\t\telse:\n\t\t\t\tprint(f\"error: header {header} not in positive or negative set\")\n\t\t\t\texit()\n\n\t\telse:\n\t\t\tseq = line\n\n\t\t\tupper_pos = [i for i, c in enumerate(seq) if c.isupper()]\n\t\t\tif len(upper_pos) > 1:\n\t\t\t\tepitopes = []\n\t\t\t\tstart = upper_pos[0]\n\t\t\t\tstop = upper_pos[0]\n\t\t\t\tfor i in range(1, len(upper_pos)):\n\t\t\t\t\tif upper_pos[i] == stop + 1:\n\t\t\t\t\t\tstop = upper_pos[i]\n\t\t\t\t\telse:\n\t\t\t\t\t\tif stop > start:\n\t\t\t\t\t\t\tepitopes.append((start, stop))\n\t\t\t\t\t\tstart = upper_pos[i]\n\t\t\t\t\t\tstop = upper_pos[i]\n\t\t\t\tepitopes.append((start, stop))\n\t\t\tfor hit in epitopes:\n\t\t\t\tmean_pos = (hit[0] + hit[1]) / 2\n\t\t\t\tstart = hit[0]\n\t\t\t\tstop = hit[1]\n\t\t\t\trel_start = round((start / len(seq)) * 100)\n\t\t\t\trel_stop = round((stop / len(seq)) * 100)\n\t\t\t\tfor i in range(rel_start, rel_stop + 1):\n\t\t\t\t\tif non_epi:\n\t\t\t\t\t\tnon_epi_pos.append(i)\n\t\t\t\t\telse:\n\t\t\t\t\t\ttrue_epi_pos.append(i)\n\nplt.hist(true_epi_pos, 100)\nplt.show()\nplt.hist(non_epi_pos, 100)\nplt.show()\n" }, { "alpha_fraction": 0.6590364575386047, "alphanum_fraction": 0.6742569804191589, "avg_line_length": 31.216217041015625, "blob_id": "7e9f21fd72de557058cf9fa50bd894c4911786a4", "content_id": "2b5f43f67211136741431a1d74c0191904a4b4ad", "detected_licenses": [ "CC0-1.0" ], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 8344, "license_type": "permissive", "max_line_length": 123, "num_lines": 259, "path": "/utils/input_to_embeddings.py", "repo_name": "rishabhdhenkawat/epitop_pred", "src_encoding": "UTF-8", "text": "\"\"\"Load pre-trained model:\"\"\"\nfrom allennlp.commands.elmo import ElmoEmbedder\nfrom allennlp.modules.elmo import Elmo, batch_to_ids\nfrom pathlib import Path\nimport os\nimport numpy as np\nimport pickle\nimport time\nimport torch.nn as nn\nimport torch\nimport subprocess\nimport collections, gc, resource, torch\n\nclass Elmo_embedder():\n\tdef __init__(self, model_dir='/home/go96bix/projects/deep_eve/seqvec/uniref50_v2', weights=\"/weights.hdf5\",\n\t\t\t\t options=\"/options.json\"):\n\t\ttorch.set_num_threads(multiprocessing.cpu_count() // 2)\n\t\tself.model_dir = model_dir\n\t\tself.weights = self.model_dir + weights\n\t\tself.options = self.model_dir + options\n\t\tself.seqvec = ElmoEmbedder(self.options, self.weights, cuda_device=-1)\n\n\tdef elmo_embedding(self, X, start=None, stop=None):\n\t\tprint(X.shape)\n\t\tif start != None and stop != None:\n\t\t\tX_trimmed = X[:, start:stop]\n\t\t\tX_parsed = self.seqvec.embed_sentences(X_trimmed)\n\t\t\tX_parsed = (np.array(list(X_parsed)).mean(axis=1))\n\n\t\telse:\n\t\t\tX_parsed = []\n\t\t\t# X.sort(key=len)\n\t\t\tembedding = self.seqvec.embed_sentences(X,batch_size=2)\n\t\t\tfor i in embedding:\n\t\t\t\tX_parsed.append(np.array(i).sum(axis=0))\n\t\treturn X_parsed\n\ndef debug_memory():\n\tprint('maxrss = {}'.format(\n\t\tresource.getrusage(resource.RUSAGE_SELF).ru_maxrss))\n\ttensors = collections.Counter((str(o.device), o.dtype, tuple(o.shape))\n\t for o in gc.get_objects()\n\t if torch.is_tensor(o))\n\n\tfor line in tensors.items():\n\t\tprint('{}\\t{}'.format(*line))\n\n\ndef dump_tensors(gpu_only=True):\n\ttorch.cuda.empty_cache()\n\tfor obj in gc.get_objects():\n\t\ttry:\n\t\t\tif torch.is_tensor(obj):\n\t\t\t\tif not gpu_only or obj.is_cuda:\n\t\t\t\t\tdel obj\n\t\t\t\t\tgc.collect()\n\t\t\telif hasattr(obj, \"data\") and torch.is_tensor(obj.data):\n\t\t\t\tif not gpu_only or obj.is_cuda:\n\t\t\t\t\tdel obj\n\t\t\t\t\tgc.collect()\n\t\texcept Exception as e:\n\t\t\tpass\n\n\ndef get_gpu_memory_map():\n\tresult = subprocess.check_output(\n\t\t[\n\t\t\t'nvidia-smi', '--query-gpu=memory.used',\n\t\t\t'--format=csv,nounits,noheader'\n\t\t])\n\n\treturn result\n\n\nclass Elmo_embedder():\n\tdef __init__(self, model_dir='/home/go96bix/projects/deep_eve/seqvec/uniref50_v2', weights=\"/weights.hdf5\",\n\t options=\"/options.json\"):\n\t\t# torch.set_num_threads(multiprocessing.cpu_count()//2)\n\t\tself.model_dir = model_dir\n\t\tself.weights = self.model_dir + weights\n\t\tself.options = self.model_dir + options\n\t\tself.seqvec = ElmoEmbedder(self.options, self.weights, cuda_device=-1)\n\n\tdef elmo_embedding(self, X):\n\t\tX_parsed = self.seqvec.embed_sentences(X, 100)\n\t\treturn list(X_parsed)\n\n\nclass DL_embedding():\n\tdef __init__(self, header, seq_embedding):\n\t\tself.sequences = np.array(seq_embedding)\n\t\tself.header = header\n\t\tself.seq_length = self.sequences.shape[0]\n\ndef file_len(fname):\n\twith open(fname) as f:\n\t\tfor i, l in enumerate(f):\n\t\t\tpass\n\treturn i + 1\n\n\ndef sort_file(fname, start_index=0, line_batch_size=1000000):\n\t\"\"\"\n\tCAUTION longest 1% will not be returned\n\t1. open file\n\t2. collect header and seqs from line start_index to start_index + line_batch_size\n\t3. sort by length\n\t4. return sorted lists of header and seqs\n\t:param fname: input file path\n\t:param start_index: number line were to start\n\t:param line_batch_size: how many line to sort\n\t:return: sorted list of header, seqs\n\t\"\"\"\n\n\twith open(fname) as f:\n\t\tstart = time.time()\n\t\theader = []\n\t\tseqs = []\n\t\tend = False\n\t\tfor index, line in enumerate(f):\n\t\t\tif index < start_index:\n\t\t\t\tpass\n\t\t\telif index < start_index + line_batch_size:\n\t\t\t\tif line.startswith(\">\"):\n\t\t\t\t\theader.append(line.strip())\n\t\t\t\telse:\n\t\t\t\t\tseqs.append(line.strip())\n\t\t\t\tend = True\n\t\t\telse:\n\t\t\t\tend = False\n\t\t\t\tbreak\n\n\t\tlengths = [len(i) for i in seqs]\n\t\t# sorting = np.argsort(lengths)[int(0.99 * len(lengths)):]\n\t\tsorting = np.argsort(lengths)[int(0.955 * len(lengths)):int(0.965 * len(lengths))]\n\t\t# sorting = np.argsort(lengths)[0:int(0.99 * len(lengths))]\n\t\theader = np.array([header[i] for i in sorting])\n\t\tseqs = np.array([seqs[i] for i in sorting])\n\t\tstop = time.time()\n\t\tprint(stop - start)\n\treturn header, seqs, index, end\n\n\ndef parse_file(path='../../input/Archeae_rep_seq.fa', max_array_size=65000):\n\t\"\"\"\n\t1. open fasta file\n\t2. sort batch of lines by length of sequences\n\t3. build batches smaller max_array_size (65.000 ~ 32GB GPU RAM)\n\t4. embed batch and save each sequence as single file\n\n\t:param path: path to input fasta file\n\t:param max_array_size: size of data proccessed on same time on GPU should be set as high as possible for faster runtime\n\t:return: save each seq in path as \"number\".pkl\n\t\"\"\"\n\n\tdef embedd2(num_tokens, startindex, seq, header):\n\t\tprint(num_tokens)\n\t\tcharacter_ids = batch_to_ids(seq)\n\t\t# if device != \"cpu\":\n\t\t# torch.cuda.empty_cache()\n\t\tcharacter_ids.to(device)\n\t\tembedding = elmo(character_ids)\n\t\ttensors = embedding['elmo_representations']\n\t\tdel character_ids, embedding\n\t\t# print(f\"GPU MEMORY: {get_gpu_memory_map()}\")\n\t\tembedding = [tensor.detach().cpu().numpy() for tensor in tensors]\n\t\tembedding = (np.array(embedding).mean(axis=0))\n\n\t\tfor index, i in enumerate(embedding):\n\t\t\t# with open(f\"{directory}/{startindex+index}.pkl\", \"wb\") as outfile:\n\t\t\t\t# embedding_object = DL_embedding(header[index], i)\n\t\t\t\t# pickle.dump(embedding_object, outfile)\n\t\t\twith open(f\"{directory}/{header[index][1:]}.pkl\", \"wb\") as outfile:\n\t\t\t\tembedding_i = (header[index],i[0:len(seq[index])])\n\t\t\t\tpickle.dump(embedding_i, outfile)\n\n\tdef embedd3(num_tokens, startindex, seq, header):\n\t\t# print(\"HELLOOOOOOO\")\n\t\tprint(num_tokens)\n\t\tseq_nested_list = np.array([list(i.upper()) for i in seq])\n\t\t# print(seq_nested_list)\n\t\tembedding = elmo_embedder.seqvec.embed_sentences(seq_nested_list)\n\t\tfor index, i in enumerate(embedding):\n\t\t\twith open(f\"{directory}/{header[index][1:]}.pkl\", \"wb\") as outfile:\n\t\t\t\tembedding_i = (header[index],(np.array(i).sum(axis=0)))\n\t\t\t\tpickle.dump(embedding_i, outfile)\n\n\telmo_embedder = Elmo_embedder()\n\tnumber_lines = file_len(path)\n\tend = False\n\tstartindex_file = 0\n\tindex_output = 0\n\tk = 0\n\twhile end == False:\n\t\theaders, seqs, index, end = sort_file(path, start_index=startindex_file)\n\t\tstartindex_file = index\n\t\theader = []\n\t\tseq = []\n\t\tprint(f\"max length: {len(seqs[-1])}\")\n\t\tstart = time.time()\n\t\tstart_origin = start\n\t\t# num_tokens = 0\n\t\tmax_length = 0\n\t\tarray_size = 0\n\t\tfor index, seq_i in enumerate(seqs):\n\t\t\tdirectory = os.path.dirname(path)\n\t\t\t# num_tokens += len(seq_i)\n\t\t\tif len(seq_i) > max_length:\n\t\t\t\tmax_length = len(seq_i)\n\t\t\tk += 1\n\t\t\tarray_size_old = array_size\n\t\t\tarray_size = max_length * (len(seq) + 1)\n\t\t\tif array_size > max_array_size:\n\t\t\t\t# if num_tokens > 60000:\n\t\t\t\t# header_last = header[-1]\n\t\t\t\tprint(array_size_old)\n\t\t\t\tembedd3(array_size_old, index_output, seq, header)\n\t\t\t\t# embedd2(num_tokens - len(seq_i), startindex, seq, header)\n\t\t\t\tstop = time.time()\n\t\t\t\tprint(stop - start)\n\t\t\t\tprint(f\"{k} ca. {k*100/(number_lines/2):>.3f}%\")\n\t\t\t\tstart = stop\n\t\t\t\tseq = []\n\t\t\t\theader = []\n\t\t\t\t# header.append(header_last)\n\t\t\t\t# num_tokens = len(seq_i)\n\t\t\t\tindex_output = k\n\t\t\theader.append(headers[index])\n\t\t\tseq.append(seq_i)\n\t\tembedd3(array_size, index_output, seq, header)\n\t\tindex_output = k\n\t\tprint(\"finish\")\n\t\ttotalTime = time.time() - start_origin\n\t\tprint(f\"total time {totalTime} s\")\n\nif __name__ == \"__main__\":\n\tfrom input_to_embeddings import DL_embedding\n\tcwd = os.getcwd()\n\t# model_dir = '/home/go96bix/virus_detection/seqvec/uniref50_v2/'\n\tmodel_dir = '/home/go96bix/projects/deep_eve/seqvec/uniref50_v2/'\n\tweights = 'weights.hdf5'\n\toptions = 'options.json'\n\t#\n\telmo = Elmo(model_dir + options, model_dir + weights, 3)\n\tdevice = \"cpu\"\n\t# device = torch.device(\"cuda:0\" if torch.cuda.is_available() else \"cpu\")\n\tif torch.cuda.device_count() > 1:\n\t\tprint(\"Let's use\", torch.cuda.device_count(), \"GPUs!\")\n\t\telmo = nn.DataParallel(elmo)\n\n\telmo.to(device)\n\t# elmo_embedder = Elmo_embedder(model_dir=model_dir,weights=weights,options=options)\n\n\t# parse_file(\"/home/go96bix/virus_detection/Viruses/Viruses.fa\")\n\t# parse_file(\"/home/go96bix/virus_detection/input/Archeae_rep_seq.fa\")\n\t# parse_file(\"/mnt/local/uniprot_taxonomy/Eukaryota/Eukaryota_rep_seq.fa\")\n\t# parse_file(\"/home/go96bix/virus_detection/input_files/Eukaryota/Eukaryota_rep_seq.fa\")\n\tparse_file(\"/home/go96bix/projects/raw_data/embeddings_bepipred_samples/iedb_linear_epitopes.fasta\", max_array_size=50000)\n\t# parse_file(\"/home/go96bix/virus_detection/input_files/Bacteria/Bacteria_rep_seq.fa\")\n" }, { "alpha_fraction": 0.655297040939331, "alphanum_fraction": 0.6732982397079468, "avg_line_length": 42.961082458496094, "blob_id": "c14010647066e384fddb52dc46333c8539bdf499", "content_id": "b10b23310b3eb277638fe5979ebd7b3e9bc79aff", "detected_licenses": [ "CC0-1.0" ], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 81328, "license_type": "permissive", "max_line_length": 281, "num_lines": 1850, "path": "/train_DL.py", "repo_name": "rishabhdhenkawat/epitop_pred", "src_encoding": "UTF-8", "text": "from keras.callbacks import ModelCheckpoint, TensorBoard, EarlyStopping\nimport pickle\nfrom keras import layers, optimizers, models\nfrom keras.utils import to_categorical\nimport os\nimport pandas as pd\nfrom sklearn.preprocessing import LabelEncoder, scale, MinMaxScaler\nfrom sklearn.model_selection import train_test_split\nimport sys\nfrom keras.utils import plot_model\nfrom keras.regularizers import l2\nfrom utils import DataGenerator, DataParsing\nimport tensorflow as tf\n\nsys.path.insert(0, '/home/go96bix/projects/Masterarbeit/ML')\nimport matplotlib\n\nmatplotlib.rcParams['backend'] = 'Agg'\nimport matplotlib.pyplot as plt\nimport numpy as np\nimport pickle\nfrom sklearn.utils import class_weight as clw\nimport sklearn.metrics as metrics\nfrom sklearn.model_selection import StratifiedKFold\nfrom sklearn.metrics import log_loss, accuracy_score, roc_auc_score\nfrom scipy.optimize import minimize\nfrom keras import backend as K\nimport math\nfrom keras.engine import Layer\n# import tensorflow_hub as hub\nfrom scipy import interp\n\n\n# class ElmoEmbeddingLayer(Layer):\n# \tdef __init__(self, **kwargs):\n# \t\tself.dimensions = 1024\n# \t\tself.trainable = True\n# \t\tsuper(ElmoEmbeddingLayer, self).__init__(**kwargs)\n#\n# \tdef build(self, input_shape):\n# \t\tself.elmo = hub.Module('https://tfhub.dev/google/elmo/2', trainable=self.trainable,\n# \t\t name=\"{}_module\".format(self.name))\n#\n# \t\tself.trainable_weights += K.tf.trainable_variables(scope=\"^{}_module/.*\".format(self.name))\n# \t\tsuper(ElmoEmbeddingLayer, self).build(input_shape)\n#\n# \tdef call(self, x, mask=None):\n# \t\tresult = self.elmo(K.squeeze(K.cast(x, tf.string), axis=1),\n# \t\t as_dict=True,\n# \t\t signature='default',\n# \t\t )['default']\n# \t\treturn result\n#\n# \tdef compute_mask(self, inputs, mask=None):\n# \t\treturn K.not_equal(inputs, '--PAD--')\n#\n# \tdef compute_output_shape(self, input_shape):\n# \t\treturn (input_shape[0], self.dimensions)\n\n\ndef set_trainability(model, trainable=False):\n\tmodel.trainable = trainable\n\tfor layer in model.layers:\n\t\tlayer.trainable = trainable\n\n\ndef auc_10_perc_fpr(y_true, y_pred):\n\tdef my_roc_auc_score(y_true_np, y_pred_np, max_fpr=0.1):\n\t\ty_true_np = np.append(y_true_np, np.array([1, 1]) - y_true_np[0])\n\t\ty_pred_np = np.append(y_pred_np, np.array([1, 1]) - y_true_np[0])\n\t\treturn roc_auc_score(y_true_np, y_pred_np, max_fpr=max_fpr)\n\n\treturn tf.py_func(my_roc_auc_score, (y_true, y_pred), tf.double)\n\n\ndef auc_10_perc_fpr_binary(y_true, y_pred):\n\tdef my_roc_auc_score(y_true_np, y_pred_np, max_fpr=0.1):\n\t\ty_true_np = np.append(y_true_np, np.array([1, 1]) - y_true_np[0])\n\t\ty_pred_np = np.append(y_pred_np, np.array([1, 1]) - y_true_np[0])\n\t\ty_bin = np.array(y_true_np >= 0.5, np.int)\n\t\treturn roc_auc_score(y_bin, y_pred_np, max_fpr=max_fpr)\n\n\treturn tf.py_func(my_roc_auc_score, (y_true, y_pred), tf.double)\n\n\ndef accuracy_binary(y_true, y_pred):\n\tdef my_acc_score(y_true_np, y_pred_np):\n\t\ty_true_bin = np.array(y_true_np >= 0.5, np.int)\n\t\ty_pred_bin = np.array(y_pred_np >= 0.5, np.int)\n\t\tacc = accuracy_score(y_true_bin, y_pred_bin)\n\t\treturn acc\n\n\treturn tf.py_func(my_acc_score, (y_true, y_pred), tf.double)\n\n\ndef calc_n_plot_ROC_curve(y_true, y_pred, name=\"best\", plot=True):\n\tfpr, tpr, thresholds = metrics.roc_curve(y_true, y_pred)\n\tauc = metrics.roc_auc_score(y_true, y_pred)\n\toptimal_idx = np.argmax(np.abs(tpr - fpr))\n\n\t# alternatives\n\t# optimal = tpr / (tpr + fpr)\n\tfnr = 1 - tpr\n\ttnr = 1 - fpr\n\toptimal_threshold = thresholds[optimal_idx]\n\tprint(optimal_threshold)\n\tprint(len(fpr))\n\tif plot:\n\t\tplt.plot(fpr, tpr, label=f'ROC curve {name.replace(\"_\", \" \")} (area = {auc:0.2f})')\n\t\tplt.scatter(fpr[optimal_idx], tpr[optimal_idx], c=\"green\")\n\t\tplt.plot([0, 1], [0, 1], 'k--', lw=2)\n\t\tplt.xlim([0.0, 1.0])\n\t\tplt.ylim([0.0, 1.05])\n\t\tplt.xlabel('False Positive Rate')\n\t\tplt.ylabel('True Positive Rate')\n\t\tplt.title('Receiver operating characteristic for multiple classes')\n\t\tplt.legend(loc=\"lower right\")\n\t\tplt.savefig(directory + f\"/roc_curve_{name}.pdf\")\n\t\tplt.close()\n\treturn optimal_threshold\n\n\nclass X_Data():\n\tdef __init__(self, sequences, table):\n\t\tself.sequences = np.array(sequences)\n\t\tself.table = table\n\t\tself.seq_length = self.sequences.shape[1]\n\n\ndef parse_amino(x, generator):\n\tamino = \"GALMFWKQESPVICYHRNDTUOBZX\"\n\tencoder = LabelEncoder()\n\tencoder.fit(list(amino))\n\tprint(encoder.classes_)\n\tprint(encoder.transform(encoder.classes_))\n\tout = []\n\tfor i in x:\n\t\tif generator:\n\t\t\tdnaSeq = i[1].upper()\n\t\telse:\n\t\t\tdnaSeq = i[0].upper()\n\t\tencoded_X = encoder.transform(list(dnaSeq))\n\t\tout.append(encoded_X)\n\treturn np.array(out)\n\n\n# hydrophilicity by parker\nhydrophilicity_scores = {'A': 2.1, 'C': 1.4, 'D': 10.0, 'E': 7.8, 'F': -9.2, 'G': 5.7, 'H': 2.1, 'I': -8.0, 'K': 5.7,\n\t\t\t\t\t\t 'L': -9.2, 'M': -4.2, 'N': 7.0, 'P': 2.1, 'Q': 6.0, 'R': 4.2, 'S': 6.5, 'T': 5.2, 'V': -3.7,\n\t\t\t\t\t\t 'W': -10.0, 'Y': -1.9}\n# Chou Fasman beta turn prediction (avg = 1)\nbetaturn_scores = {'A': 0.66, 'C': 1.19, 'D': 1.46, 'E': 0.74, 'F': 0.6, 'G': 1.56, 'H': 0.95, 'I': 0.47, 'K': 1.01,\n\t\t\t\t 'L': 0.59, 'M': 0.6, 'N': 1.56, 'P': 1.52, 'Q': 0.98, 'R': 0.95, 'S': 1.43, 'T': 0.96, 'V': 0.5,\n\t\t\t\t 'W': 0.96, 'Y': 1.14}\n# Emini surface accessibility scale (avg = 0.62)\nsurface_accessibility_scores = {'A': 0.49, 'C': 0.26, 'D': 0.81, 'E': 0.84, 'F': 0.42, 'G': 0.48, 'H': 0.66, 'I': 0.34,\n\t\t\t\t\t\t\t\t'K': 0.97, 'L': 0.4, 'M': 0.48, 'N': 0.78, 'P': 0.75, 'Q': 0.84, 'R': 0.95, 'S': 0.65,\n\t\t\t\t\t\t\t\t'T': 0.7, 'V': 0.36, 'W': 0.51, 'Y': 0.76}\n# Kolaskar and Tongaokar antigenicity scale (avg = 1.0)\nantigenicity_scores = {'A': 1.064, 'C': 1.412, 'D': 0.866, 'E': 0.851, 'F': 1.091, 'G': 0.874, 'H': 1.105, 'I': 1.152,\n\t\t\t\t\t 'K': 0.93, 'L': 1.25, 'M': 0.826, 'N': 0.776, 'P': 1.064, 'Q': 1.015, 'R': 0.873, 'S': 1.012,\n\t\t\t\t\t 'T': 0.909, 'V': 1.383, 'W': 0.893, 'Y': 1.161}\n\n\ndef normalize_dict(in_dict):\n\t\"\"\"\n\tnormalizes values in dict to range [0, 1]\n\t:param in_dict:\n\t:return:\n\t\"\"\"\n\tkeys = []\n\tvalues = []\n\tfor key, value in dict(in_dict).items():\n\t\tkeys.append(key)\n\t\tvalues.append(value)\n\n\tvalues_nestedlist = np.array([[i] for i in values])\n\tmin_max_scaler = MinMaxScaler()\n\t# feed in a numpy array\n\tvalues = min_max_scaler.fit_transform(values_nestedlist).flatten()\n\n\tout_dict = {}\n\tfor i in range(len(values)):\n\t\tout_dict.update({keys[i]: values[i]})\n\n\treturn out_dict\n\n\n# hydrophilicity by parker\nhydrophilicity_scores = normalize_dict(hydrophilicity_scores)\n# Chou Fasman beta turn prediction (avg = 1)\nbetaturn_scores = normalize_dict(betaturn_scores)\n# Emini surface accessibility scale (avg = 0.62)\nsurface_accessibility_scores = normalize_dict(surface_accessibility_scores)\n# Kolaskar and Tongaokar antigenicity scale (avg = 1.0)\nantigenicity_scores = normalize_dict(antigenicity_scores)\n\n\ndef load_data(complex, directory, val_size=0.3, generator=False, sequence_length=50, full_seq_embedding=False,\n\t\t\t final_set=True, include_raptorx_iupred=False, include_dict_scores=False, non_binary=False,\n\t\t\t own_embedding=False):\n\tdef load_raptorx_iupred(samples):\n\t\tout = []\n\t\tshift = 20\n\t\tfor sample in samples:\n\t\t\tstart = int(sample[0])\n\t\t\tstop = int(sample[1])\n\t\t\tfile = sample[2]\n\t\t\ttry:\n\t\t\t\ttable_numpy = pd.read_csv(\n\t\t\t\t\tos.path.join(\"/home/le86qiz/Documents/Konrad/tool_comparison/raptorx/flo_files\", f\"{file}.csv\"),\n\t\t\t\t\tsep=\"\\t\", index_col=None).values\n\t\t\t\tseq_len = table_numpy.shape[0]\n\t\t\t\ttable_numpy_big = np.zeros((seq_len + (shift * 2), 7))\n\t\t\t\ttable_numpy_big[shift:shift + seq_len] = table_numpy\n\t\t\t\ttable_numpy_sliced = table_numpy_big[start + shift:stop + shift]\n\n\t\t\texcept:\n\t\t\t\tprint(f\"not able to load {file}\")\n\t\t\t\tprint(start)\n\t\t\t\ttable_numpy_sliced = np.zeros((49, 7))\n\n\t\t\tout.append(table_numpy_sliced)\n\t\treturn np.array(out)\n\n\tdef get_dict_scores(seqs):\n\t\tout_arr = []\n\t\tfor index_seq, seq in enumerate(seqs):\n\t\t\tseq_arr = np.zeros((49, 4))\n\t\t\tfor index, char in enumerate(seq):\n\t\t\t\tchar = char.upper()\n\t\t\t\t# check value for char in dicts if char not in dict give value 0,5\n\t\t\t\thydro = hydrophilicity_scores.get(char, 0.5)\n\t\t\t\tbeta = betaturn_scores.get(char, 0.5)\n\t\t\t\tsurface = surface_accessibility_scores.get(char, 0.5)\n\t\t\t\tantigen = antigenicity_scores.get(char, 0.5)\n\t\t\t\tfeatures = np.array([hydro, beta, surface, antigen])\n\t\t\t\tseq_arr[index] = features\n\t\t\tout_arr.append(seq_arr)\n\t\treturn np.array(out_arr)\n\n\tif full_seq_embedding:\n\t\tY_train_old = pd.read_csv(directory + '/Y_train.csv', delimiter='\\t', dtype='str', header=None).values\n\t\tY_test_old = pd.read_csv(directory + '/Y_test.csv', delimiter='\\t', dtype='str', header=None).values\n\t\tX_train_old = np.array(pickle.load(open(directory + '/X_train.pkl', \"rb\")))\n\t\t# X_train_old = []\n\t\tX_test_old = np.array(pickle.load(open(directory + '/X_test.pkl', \"rb\")))\n\n\t\ttry:\n\t\t\tY_val_old = pd.read_csv(directory + '/Y_val.csv', delimiter='\\t', dtype='str', header=None).values\n\t\t\tX_val_old = np.array(pickle.load(open(directory + '/X_val.pkl', \"rb\")))\n\t\t\tprint(\"loaded validation set from: \" + directory + '/Y_val.pkl')\n\t\texcept:\n\t\t\tassert generator == False, \"if generator is in use, don't great validation set from train, this would lead to overfitting of the validation set\"\n\t\t\tprint(\"create validation set from train\")\n\t\t\tX_train_old, X_val_old, Y_train_old, Y_val_old = train_test_split(X_train_old, Y_train_old,\n\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t test_size=val_size,\n\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t shuffle=True)\n\t\tif final_set:\n\t\t\tif include_raptorx_iupred:\n\t\t\t\tsamples_test = np.array([i[1:] for i in X_test_old])\n\t\t\t\ttable_test = load_raptorx_iupred(samples_test)\n\t\t\t\tsequences_test = np.array([i[0] for i in X_test_old])\n\t\t\t\tX_test = X_Data(sequences=sequences_test,\n\t\t\t\t\t\t\t\ttable=table_test)\n\n\t\t\t\tsamples_val = np.array([i[1:] for i in X_val_old])\n\t\t\t\ttable_val = load_raptorx_iupred(samples_val)\n\t\t\t\tsequences_val = np.array([i[0] for i in X_val_old])\n\t\t\t\tX_val = X_Data(sequences=sequences_val,\n\t\t\t\t\t\t\t table=table_val)\n\t\t\t\tX_train = X_train_old\n\n\t\t\telif include_dict_scores:\n\t\t\t\tX_test_old_seq = pd.read_csv(directory + '/X_test.csv', delimiter='\\t', dtype='str', header=None).values\n\t\t\t\tX_val_old_seq = pd.read_csv(directory + '/X_val.csv', delimiter='\\t', dtype='str', header=None).values\n\t\t\t\tX_test_old_seq = X_test_old_seq[:, 0]\n\t\t\t\tX_val_old_seq = X_val_old_seq[:, 0]\n\t\t\t\ttable_test = get_dict_scores(X_test_old_seq)\n\t\t\t\ttable_val = get_dict_scores(X_val_old_seq)\n\n\t\t\t\tX_train = X_train_old\n\t\t\t\tX_test = np.stack(X_test_old[:, 0])\n\t\t\t\tX_val = np.stack(X_val_old[:, 0])\n\n\t\t\t\tX_test = X_Data(sequences=X_test,\n\t\t\t\t\t\t\t\ttable=table_test)\n\t\t\t\tX_val = X_Data(sequences=X_val,\n\t\t\t\t\t\t\t table=table_val)\n\n\t\t\telif non_binary:\n\t\t\t\tX_train = X_train_old\n\t\t\t\tX_test = X_test_old\n\t\t\t\tX_val = X_val_old\n\t\t\telse:\n\t\t\t\tprotein_mapping = {}\n\t\t\t\ttrain_val_proteins = np.append(X_train_old[:, 3], X_val_old[:, 3])\n\t\t\t\ttrain_val_proteins = np.append(train_val_proteins, X_test_old[:, 3])\n\t\t\t\tfor index, i in enumerate(np.unique(train_val_proteins)):\n\t\t\t\t\tprotein_mapping[index] = (i, np.where(train_val_proteins == i)[0])\n\n\t\t\t\tX_train = np.stack(X_train_old[:, 0])\n\t\t\t\t# X_train = X_train_old\n\t\t\t\tX_test = np.stack(X_test_old[:, 0])\n\t\t\t\tX_val = np.stack(X_val_old[:, 0])\n\t\telse:\n\t\t\tX_train = X_train_old\n\t\t\tX_test = X_test_old\n\t\t\tX_val = X_val_old\n\t\tif non_binary:\n\t\t\tY_train = Y_train_old[:, 0]\n\t\t\tY_test = np.array(Y_test_old[:, 0], np.float)\n\t\t\tY_test = np.array([1 - Y_test, Y_test]).swapaxes(0, 1)\n\t\t\tY_val = np.array(Y_val_old[:, 0], np.float)\n\t\t\tY_val = np.array([1 - Y_val, Y_val]).swapaxes(0, 1)\n\t\telse:\n\t\t\tY_train, y_encoder = DataParsing.encode_string(y=Y_train_old[:, 0])\n\t\t\tY_test = DataParsing.encode_string(y=Y_test_old[:, 0], y_encoder=y_encoder)\n\t\t\tY_val = DataParsing.encode_string(y=Y_val_old[:, 0], y_encoder=y_encoder)\n\n\t\t# original_length = 49\n\t\t# start_float = (original_length - sequence_length) / 2\n\t\t# start = math.floor(start_float)\n\t\t# stop = original_length - math.ceil(start_float)\n\t\t# X_test = X_test[:,start:stop]\n\t\t# X_train = X_train[:,start:stop]\n\t\t# X_val = X_val[:,start:stop]\n\t\treturn X_train, X_val, X_test, Y_train, Y_val, Y_test, None, protein_mapping\n\n\tif not complex:\n\t\tY_train_old = pd.read_csv(directory + '/Y_train.csv', delimiter='\\t', dtype='str', header=None).values\n\t\tY_test_old = pd.read_csv(directory + '/Y_test.csv', delimiter='\\t', dtype='str', header=None).values\n\t\tX_train_old = pd.read_csv(directory + '/X_train.csv', delimiter='\\t', dtype='str', header=None).values\n\t\tX_test_old = pd.read_csv(directory + '/X_test.csv', delimiter='\\t', dtype='str', header=None).values\n\n\t\ttry:\n\t\t\tY_val_old = pd.read_csv(directory + '/Y_val.csv', delimiter='\\t', dtype='str', header=None).values\n\t\t\tX_val_old = pd.read_csv(directory + '/X_val.csv', delimiter='\\t', dtype='str', header=None).values\n\t\t\tprint(\"loaded validation set from: \" + directory + '/Y_val.csv')\n\t\texcept:\n\t\t\tassert generator == False, \"if generator is in use, don't great validation set from train, this would lead to overfitting of the validation set\"\n\t\t\tprint(\"create validation set from train\")\n\t\t\tX_train_old, X_val_old, Y_train_old, Y_val_old = train_test_split(X_train_old, Y_train_old,\n\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t test_size=val_size, random_state=42)\n\n\t\tif final_set:\n\t\t\tif include_raptorx_iupred:\n\t\t\t\tsamples_test = np.array([i[1:] for i in X_test_old])\n\t\t\t\ttable_test = load_raptorx_iupred(samples_test)\n\t\t\t\tsequences_test = np.array([i[0] for i in X_test_old])\n\n\t\t\t\tsamples_val = np.array([i[1:] for i in X_val_old])\n\t\t\t\ttable_val = load_raptorx_iupred(samples_val)\n\t\t\t\tsequences_val = np.array([i[0] for i in X_val_old])\n\n\t\t\t\telmo_embedder = DataGenerator.Elmo_embedder()\n\n\t\t\t\tsequences_test = np.array([list(i) for i in sequences_test])\n\t\t\t\tsequences_val = np.array([list(i) for i in sequences_val])\n\n\t\t\t\toriginal_length = 49\n\t\t\t\tstart_float = (original_length - sequence_length) / 2\n\t\t\t\tstart = math.floor(start_float)\n\t\t\t\tstop = original_length - math.ceil(start_float)\n\n\t\t\t\tprint(\"embedding test\")\n\t\t\t\tsequences_test = elmo_embedder.elmo_embedding(sequences_test, start, stop)\n\t\t\t\tprint(\"embedding val\")\n\t\t\t\tsequences_val = elmo_embedder.elmo_embedding(sequences_val, start, stop)\n\n\t\t\t\tX_test = X_Data(sequences=sequences_test, table=table_test)\n\t\t\t\tX_val = X_Data(sequences=sequences_val, table=table_val)\n\t\t\t\tX_train = X_train_old\n\n\t\t\t\tY_train, y_encoder = DataParsing.encode_string(y=Y_train_old)\n\t\t\t\tY_test = DataParsing.encode_string(y=Y_test_old, y_encoder=y_encoder)\n\t\t\t\tY_val = DataParsing.encode_string(y=Y_val_old, y_encoder=y_encoder)\n\n\t\t\t\treturn X_train, X_val, X_test, Y_train, Y_val, Y_test, None, None\n\n\t\t\telif include_dict_scores:\n\t\t\t\tpass\n\n\t\t\telse:\n\t\t\t\tprotein_mapping = {}\n\t\t\t\ttrain_val_proteins = np.append(X_train_old[:, 3], X_val_old[:, 3])\n\t\t\t\ttrain_val_proteins = np.append(train_val_proteins, X_test_old[:, 3])\n\t\t\t\tfor index, i in enumerate(np.unique(train_val_proteins)):\n\t\t\t\t\tprotein_mapping[index] = (i, np.where(train_val_proteins == i)[0])\n\n\t\t\t\tX_train_old = X_train_old[:, 0]\n\t\t\t\tX_test_old = X_test_old[:, 0]\n\t\t\t\tX_val_old = X_val_old[:, 0]\n\n\t\tpos_y_test = Y_test_old[:, 1:]\n\t\tif sequence_length != 50:\n\t\t\tprint(\"WARNING not using full length of seq\")\n\n\t\t# cut out middle if necessary cut more away from end\n\t\t# e.g. sequence = ABCD, new_seq_length=1-> B\n\t\toriginal_length = 49\n\t\t# original_length = 50\n\t\tstart_float = (original_length - sequence_length) / 2\n\t\tstart = math.floor(start_float)\n\t\tstop = original_length - math.ceil(start_float)\n\n\t\tif not generator:\n\t\t\tif own_embedding:\n\t\t\t\tamino = \"ABCDEFGHIJKLMNOPQRSTUVWXYZ-\"\n\t\t\t\tencoder = LabelEncoder()\n\t\t\t\tencoder.fit(list(amino))\n\t\t\t\t# print(np.unique(np.array([list(i.upper()) for i in X_train_old]).flatten()))\n\t\t\t\tX_train = np.array(list(map(encoder.transform, np.array([list(i.upper()) for i in X_train_old]))))\n\t\t\t\t# print(X_train)\n\t\t\t\tX_test = np.array(list(map(encoder.transform, np.array([list(i.upper()) for i in X_test_old]))))\n\t\t\t\tX_val = np.array(list(map(encoder.transform, np.array([list(i.upper()) for i in X_val_old]))))\n\t\t\telse:\n\t\t\t\telmo_embedder = DataGenerator.Elmo_embedder()\n\t\t\t\tX_train = elmo_embedder.elmo_embedding(X_train_old[:, 1], start, stop)\n\t\t\t\tprint(X_train.shape)\n\t\t\t\tX_test = elmo_embedder.elmo_embedding(X_test_old[:, 1], start, stop)\n\t\t\t\tX_val = elmo_embedder.elmo_embedding(X_val_old[:, 1], start, stop)\n\t\t\t\tprint(X_val.shape)\n\n\t\telse:\n\t\t\telmo_embedder = DataGenerator.Elmo_embedder()\n\t\t\tprint(\"embedding test\")\n\t\t\tif final_set:\n\t\t\t\tX_test_old = np.array([list(i) for i in X_test_old])\n\t\t\t\tX_val_old = np.array([list(i) for i in X_val_old])\n\t\t\telse:\n\t\t\t\tX_test_old = np.array([list(i) for j in X_test_old for i in j])\n\t\t\t\tX_val_old = np.array([list(i) for j in X_val_old for i in j])\n\t\t\tX_test = elmo_embedder.elmo_embedding(X_test_old, start, stop)\n\t\t\tprint(\"embedding val\")\n\t\t\tX_val = elmo_embedder.elmo_embedding(X_val_old, start, stop)\n\n\t\t\tprint(\"embedding train\")\n\t\t\tX_train = []\n\n\telse:\n\t\tY_train_old = pd.read_csv(directory + '/Y_train.csv', delimiter='\\t', dtype='str', header=None).values\n\t\tY_test_old = pd.read_csv(directory + '/Y_test.csv', delimiter='\\t', dtype='str', header=None).values\n\t\tX_train_old = pickle.load(open(directory + '/X_train.pkl', \"rb\"))\n\t\tX_test_old = pickle.load(open(directory + '/X_test.pkl', \"rb\"))\n\n\t\ttry:\n\t\t\tY_val_old = pd.read_csv(directory + '/Y_val.csv', delimiter='\\t', dtype='str', header=None).values\n\t\t\tX_val_old = pickle.load(open(directory + '/X_val.pkl', \"rb\"))\n\t\t\tprint(\"loaded validation set from: \" + directory + '/Y_val.pkl')\n\t\texcept:\n\t\t\tassert generator == False, \"if generator is in use, don't great validation set from train, this would lead to overfitting of the validation set\"\n\t\t\tprint(\"create validation set from train\")\n\t\t\tX_train_old, X_val_old, Y_train_old, Y_val_old = train_test_split(X_train_old, Y_train_old,\n\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t test_size=val_size,\n\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t shuffle=True)\n\t\toriginal_length = 50\n\t\tstart_float = (original_length - sequence_length) / 2\n\t\tstart = math.floor(start_float)\n\t\tstop = original_length - math.ceil(start_float)\n\t\telmo_embedder = DataGenerator.Elmo_embedder()\n\t\tamino = \"GALMFWKQESPVICYHRNDT\"\n\t\tencoder = LabelEncoder()\n\t\tencoder.fit(list(amino))\n\n\t\tX_train_old_seq = np.array(list(map(encoder.inverse_transform, np.array(X_train_old[:, 0:50], dtype=np.int))))\n\t\tX_test_old_seq = np.array(list(map(encoder.inverse_transform, np.array(X_test_old[:, 0:50], dtype=np.int))))\n\t\tX_val_old_seq = np.array(list(map(encoder.inverse_transform, np.array(X_val_old[:, 0:50], dtype=np.int))))\n\n\t\tprint(\"burn in\")\n\t\telmo_embedder.elmo_embedding(X_test_old_seq, start, stop)\n\t\tprint(\"embedding\")\n\t\tX_train = X_Data(sequences=elmo_embedder.elmo_embedding(X_train_old_seq, start, stop),\n\t\t\t\t\t\t table=X_train_old[:, sequence_length:])\n\t\tX_test = X_Data(sequences=elmo_embedder.elmo_embedding(X_test_old_seq, start, stop),\n\t\t\t\t\t\ttable=X_test_old[:, sequence_length:])\n\t\tX_val = X_Data(sequences=elmo_embedder.elmo_embedding(X_val_old_seq, start, stop),\n\t\t\t\t\t table=X_val_old[:, sequence_length:])\n\n\t\tpos_y_test = Y_test_old[:, 1:]\n\n\tif generator:\n\t\tif non_binary:\n\t\t\tY_train = Y_train_old[:, 0]\n\t\t\tY_test = np.array(Y_test_old[:, 0], np.float)\n\t\t\tY_test = np.array([1 - Y_test, Y_test]).swapaxes(0, 1)\n\t\t\tY_val = np.array(Y_val_old[:, 0], np.float)\n\t\t\tY_val = np.array([1 - Y_val, Y_val]).swapaxes(0, 1)\n\t\telse:\n\t\t\tY_train, y_encoder = DataParsing.encode_string(y=Y_train_old)\n\t\t\tY_test = DataParsing.encode_string(y=Y_test_old, y_encoder=y_encoder)\n\t\t\tY_val = DataParsing.encode_string(y=Y_val_old, y_encoder=y_encoder)\n\n\telif complex:\n\t\tY_train = to_categorical(np.array(Y_train_old[:, 0], dtype=np.float))\n\t\tY_test = to_categorical(np.array(Y_test_old[:, 0], dtype=np.float))\n\t\tY_val = to_categorical(np.array(Y_val_old[:, 0], dtype=np.float))\n\telse:\n\t\tif own_embedding:\n\t\t\tY_train, y_encoder = DataParsing.encode_string(y=Y_train_old)\n\t\t\tY_test = DataParsing.encode_string(y=Y_test_old, y_encoder=y_encoder)\n\t\t\tY_val = DataParsing.encode_string(y=Y_val_old, y_encoder=y_encoder)\n\t\telse:\n\t\t\tY_train, y_encoder = DataParsing.encode_string(y=Y_train_old[:, 1])\n\t\t\tY_test = DataParsing.encode_string(y=Y_test_old[:, 1], y_encoder=y_encoder)\n\t\t\tY_val = DataParsing.encode_string(y=Y_val_old[:, 1], y_encoder=y_encoder)\n\n\treturn X_train, X_val, X_test, Y_train, Y_val, Y_test, pos_y_test, protein_mapping\n\n\ndef build_model(nodes, dropout, seq_length, weight_decay_lstm=1e-6, weight_decay_dense=1e-3, non_binary=False,\n\t\t\t\town_embedding=False, both_embeddings=False):\n\tif own_embedding:\n\t\tinputs = layers.Input(shape=(seq_length,))\n\t\tseq_input = layers.Embedding(27, 10, input_length=seq_length)(inputs)\n\t\thidden = layers.Bidirectional(\n\t\t\tlayers.LSTM(nodes, return_sequences=True, dropout=dropout,\n\t\t\t\t\t\trecurrent_dropout=0.2, kernel_regularizer=l2(weight_decay_lstm),\n\t\t\t\t\t\trecurrent_regularizer=l2(weight_decay_lstm), bias_regularizer=l2(weight_decay_lstm)))(seq_input)\n\t\thidden = layers.Bidirectional(\n\t\t\tlayers.LSTM(nodes, dropout=dropout, recurrent_dropout=0.2, kernel_regularizer=l2(weight_decay_lstm),\n\t\t\t\t\t\trecurrent_regularizer=l2(weight_decay_lstm), bias_regularizer=l2(weight_decay_lstm)))(hidden)\n\n\telif both_embeddings:\n\t\tembedding_input = layers.Input(shape=(seq_length, 1024))\n\t\tleft = layers.Bidirectional(\n\t\t\tlayers.LSTM(nodes, input_shape=(seq_length, 1024), return_sequences=True, dropout=dropout,\n\t\t\t\t\t\trecurrent_dropout=0.2, kernel_regularizer=l2(weight_decay_lstm),\n\t\t\t\t\t\trecurrent_regularizer=l2(weight_decay_lstm), bias_regularizer=l2(weight_decay_lstm)))(\n\t\t\tembedding_input)\n\t\tleft = layers.Dense(nodes)(left)\n\t\tleft = layers.LeakyReLU(alpha=0.01)(left)\n\t\tout_left = layers.Flatten()(left)\n\t\t# big_model = models.Model(embedding_input, out_left)\n\n\t\tseq_input = layers.Input(shape=(seq_length,))\n\t\tright = layers.Embedding(27, 10, input_length=seq_length)(seq_input)\n\t\tright = layers.Bidirectional(\n\t\t\tlayers.LSTM(nodes, return_sequences=True, dropout=dropout,\n\t\t\t\t\t\trecurrent_dropout=0.2, kernel_regularizer=l2(weight_decay_lstm),\n\t\t\t\t\t\trecurrent_regularizer=l2(weight_decay_lstm), bias_regularizer=l2(weight_decay_lstm)))(right)\n\t\tright = layers.Dense(nodes)(right)\n\t\tright = layers.LeakyReLU(alpha=0.01)(right)\n\t\tout_right = layers.Flatten()(right)\n\t\t# small_model = models.Model(seq_input, out_right)\n\n\t\t# hidden = layers.concatenate([big_model(embedding_input),small_model(seq_input)])\n\t\thidden = layers.concatenate([out_left, out_right])\n\n\telse:\n\t\tinputs = layers.Input(shape=(seq_length, 1024))\n\t\thidden = layers.Bidirectional(\n\t\t\tlayers.LSTM(nodes, input_shape=(seq_length, 1024), return_sequences=True, dropout=dropout,\n\t\t\t\t\t\trecurrent_dropout=0.2, kernel_regularizer=l2(weight_decay_lstm),\n\t\t\t\t\t\trecurrent_regularizer=l2(weight_decay_lstm), bias_regularizer=l2(weight_decay_lstm)))(inputs)\n\t\thidden = layers.Bidirectional(\n\t\t\tlayers.LSTM(nodes, dropout=dropout, recurrent_dropout=0.2, kernel_regularizer=l2(weight_decay_lstm),\n\t\t\t\t\t\trecurrent_regularizer=l2(weight_decay_lstm), bias_regularizer=l2(weight_decay_lstm)))(hidden)\n\n\t# hidden = layers.Dense(nodes, kernel_regularizer=l2(weight_decay_dense), bias_regularizer=l2(weight_decay_dense))(\n\t# \tinputs)\n\t# hidden = layers.LeakyReLU(alpha=0.01)(hidden)\n\t# hidden = layers.Flatten()(hidden)\n\thidden = layers.Dense(nodes, kernel_regularizer=l2(weight_decay_dense), bias_regularizer=l2(weight_decay_dense))(\n\t\thidden)\n\n\thidden = layers.LeakyReLU(alpha=0.01)(hidden)\n\n\tout = layers.Dense(2, activation='softmax', kernel_regularizer=l2(weight_decay_dense),\n\t\t\t\t\t bias_regularizer=l2(weight_decay_dense))(hidden)\n\tif both_embeddings:\n\t\tmodel = models.Model(inputs=[embedding_input, seq_input], outputs=out)\n\telse:\n\t\tmodel = models.Model(inputs=inputs, outputs=out)\n\n\tadam = optimizers.Adam(lr=0.0001, beta_1=0.9, beta_2=0.999, epsilon=None, decay=0.0, amsgrad=False)\n\tnadam = optimizers.Nadam(lr=0.00002, beta_1=0.9, beta_2=0.999)\n\tif non_binary:\n\t\tmodel.compile(optimizer=\"adam\", loss='binary_crossentropy', metrics=[accuracy_binary, auc_10_perc_fpr_binary])\n\telse:\n\t\tif both_embeddings:\n\t\t\t# set_trainability(big_model, False)\n\t\t\t# small_model.compile(optimizer=\"adam\", loss='binary_crossentropy', metrics=['acc', auc_10_perc_fpr])\n\t\t\t# big_model.compile(optimizer=adam, loss='binary_crossentropy', metrics=['acc', auc_10_perc_fpr])\n\t\t\tmodel.compile(optimizer=adam, loss='binary_crossentropy', metrics=['acc', auc_10_perc_fpr])\n\t\t\t# model.compile(optimizer=nadam, loss='binary_crossentropy', metrics=['acc', auc_10_perc_fpr])\n\n\t\t\tmodel.summary()\n\t\t\treturn model, None, None\n\t\t# return model, small_model, big_model\n\t\tmodel.compile(optimizer=adam, loss='binary_crossentropy', metrics=['acc', auc_10_perc_fpr])\n\tmodel.summary()\n\treturn model, None, None\n\n\ndef build_complex_model(nodes, dropout, seq_length=19, vector_dim=1, table_columns=0):\n\tseq_input = layers.Input(shape=(seq_length,), name=\"seq_input\")\n\tleft = layers.Embedding(21, 10, input_length=seq_length)(seq_input)\n\n\tleft = layers.Bidirectional(layers.LSTM(nodes, return_sequences=True, dropout=dropout, recurrent_dropout=0.2))(left)\n\tleft = layers.Bidirectional(layers.LSTM(nodes, dropout=dropout, recurrent_dropout=0.2))(left)\n\tleft = layers.Dense(nodes)(left)\n\tout_left = layers.LeakyReLU(alpha=0.01)(left)\n\tSeq_model = models.Model(seq_input, out_left)\n\tauxiliary_input = layers.Input(shape=(table_columns,), dtype='float', name='aux_input')\n\n\tright = layers.Dense(nodes)(auxiliary_input)\n\tright = layers.LeakyReLU(alpha=0.01)(right)\n\n\tTable_model = models.Model(auxiliary_input, right)\n\n\tmiddle_input = layers.concatenate([Seq_model(seq_input), Table_model(auxiliary_input)])\n\tmiddle = layers.Dense(nodes)(middle_input)\n\tmiddle = layers.LeakyReLU(alpha=0.01)(middle)\n\n\toutput = layers.Dense(2, activation='softmax', name='output')(middle)\n\tmodel = models.Model(inputs=[seq_input, auxiliary_input], outputs=output)\n\n\tset_trainability(Table_model, False)\n\tSeq_model.compile(optimizer=\"adam\", loss='binary_crossentropy')\n\tmodel.compile(optimizer='adam', loss='binary_crossentropy', metrics=['acc'])\n\tTable_model.compile(optimizer=\"adam\", loss='binary_crossentropy')\n\tmodel.summary()\n\treturn model, Seq_model, Table_model\n\n\ndef acc_with_cutoffs(Y_true, Y_pred, cutoffs):\n\tassert Y_true.shape[1] == len(cutoffs), \"number of cutoffs and classes must be equal\"\n\t# binarize the pred with cutoffs\n\tY_pred_bin = np.zeros(Y_true.shape)\n\tfor i in range(0, Y_true.shape[1]):\n\t\tY_pred_bin[:, i] = Y_pred[:, i] > cutoffs[i]\n\n\tfor index, sample in enumerate(Y_pred_bin):\n\t\t# if no entry over threshold use highest value as prediction\n\t\tif sum(sample) == 0:\n\t\t\tY_pred_bin[index, np.argmax(sample)] = 1\n\n\t# find multiple predictions\n\tpositions = np.argwhere(Y_pred_bin == 1)\n\tY_true_extended = np.array([])\n\tY_pred_extended = np.array([])\n\tfor i in positions:\n\t\ttrue = np.argmax(Y_true[i[0]])\n\t\tpred = i[1]\n\t\tY_true_extended = np.append(Y_true_extended, true)\n\t\tY_pred_extended = np.append(Y_pred_extended, pred)\n\n\tprint(len(Y_true_extended))\n\tacc = sum(Y_true_extended == Y_pred_extended) / len(Y_true_extended)\n\tprint(f\"acc: {acc}\")\n\n\ndef compare_quality(model, path, X_test, Y_test, X_val, Y_val, pos_y_test, complex_model=False,\n\t\t\t\t\tinclude_raptorx_iupred=False):\n\tdef calc_quality(model, X_test, Y_test, X_val, Y_val, pos_y_test, complex_model=False, path=False, middle_name=\"\",\n\t\t\t\t\t include_raptorx_iupred=False):\n\t\tif path:\n\t\t\tprint(\"load model:\")\n\t\t\tprint(model_path)\n\t\t\tmodel.load_weights(model_path)\n\n\t\tif complex_model:\n\t\t\tpred = model.predict({'seq_input': X_test.sequences, 'aux_input': X_test.table})\n\t\t\tpred_val = model.predict({'seq_input': X_val.sequences, 'aux_input': X_val.table})\n\t\t\tcomplex_model = False\n\t\telif include_raptorx_iupred:\n\t\t\tpred = model.predict({'seq_input': X_test.sequences, 'aux_input': X_test.table})\n\t\t\tpred_val = model.predict({'seq_input': X_val.sequences, 'aux_input': X_val.table})\n\t\telse:\n\t\t\tpred = model.predict(X_test)\n\t\t\tpred_val = model.predict(X_val)\n\t\terror = metrics.mean_absolute_error(Y_test, pred)\n\t\tprint(f\"error {error}\")\n\n\t\taccuracy, error = calculate_weighted_accuracy([1], [pred], [pred_val], 2,\n\t\t\t\t\t\t\t\t\t\t\t\t\t Y=Y_test, Y_val=Y_val,\n\t\t\t\t\t\t\t\t\t\t\t\t\t ROC=True, name=f\"{middle_name}\")\n\n\tdef calc_acc_with_cutoff(name=\"best\"):\n\t\tif complex_model:\n\t\t\tY_pred_test = model.predict({'seq_input': X_test.sequences, 'aux_input': X_test.table})\n\t\t\tY_pred_val = model.predict({'seq_input': X_val.sequences, 'aux_input': X_val.table})\n\n\t\telse:\n\t\t\tY_pred_test = model.predict(X_test)\n\t\t\tY_pred_val = model.predict(X_val)\n\n\t\tcutoff = calc_n_plot_ROC_curve(y_true=Y_val[:, 1], y_pred=Y_pred_val[:, 1], name=name)\n\n\t\t\"\"\"cutoff adapted\"\"\"\n\t\tprint(name)\n\t\tprint(\"cutoff adapted\")\n\t\tY_pred_test[:, 1] = Y_pred_test[:, 1] > cutoff\n\t\tY_pred_test[:, 0] = Y_pred_test[:, 1] == 0\n\t\ttable = pd.crosstab(\n\t\t\tpd.Series(np.argmax(Y_test, axis=1)),\n\t\t\tpd.Series(np.argmax(Y_pred_test, axis=1)),\n\t\t\trownames=['True'],\n\t\t\tcolnames=['Predicted'],\n\t\t\tmargins=True)\n\t\tprint(table)\n\t\tacc = sum(np.argmax(Y_test, axis=1) == np.argmax(Y_pred_test, axis=1)) / len(np.argmax(Y_pred_test, axis=1))\n\t\tprint(f\"Accuracy: {acc}\")\n\n\t# last weights\n\tcalc_quality(model, X_test, Y_test, X_val, Y_val, pos_y_test, complex_model, path=False,\n\t\t\t\t middle_name=f\"last_model_{suffix}\",\n\t\t\t\t include_raptorx_iupred=include_raptorx_iupred)\n\tif path:\n\t\tfor middle_name in (\"loss\", \"acc\", \"auc10\"):\n\t\t\tmodel_path = f\"{path}/weights.best.{middle_name}.{suffix}.hdf5\"\n\n\t\t\tcalc_quality(model, X_test, Y_test, X_val, Y_val, pos_y_test, complex_model, path=path,\n\t\t\t\t\t\t middle_name=f\"{middle_name}_{suffix}\", include_raptorx_iupred=include_raptorx_iupred)\n\n\ndef build_multi_length_model(nodes, dropout):\n\tmodels_multi_length = []\n\tinputs_multi_length = []\n\n\tfor sequence_length in range(9, 51, 10):\n\t\tinput_name = f\"seq_input_{sequence_length}\"\n\n\t\t# build model\n\t\tseq_input = layers.Input(shape=(sequence_length,), name=input_name)\n\t\tleft = layers.Embedding(21, 10, input_length=sequence_length)(seq_input)\n\n\t\tleft = layers.Bidirectional(layers.LSTM(nodes, return_sequences=True, dropout=dropout, recurrent_dropout=0.2))(\n\t\t\tleft)\n\t\tleft = layers.Bidirectional(layers.LSTM(nodes, dropout=dropout, recurrent_dropout=0.2))(left)\n\t\tleft = layers.Dense(nodes)(left)\n\t\tleft = layers.LeakyReLU(alpha=0.01)(left)\n\t\tout_left = layers.Dense(2, activation='softmax')(left)\n\t\tSeq_model = models.Model(seq_input, out_left)\n\t\tSeq_model.compile(optimizer=\"adam\", loss='binary_crossentropy')\n\n\t\tmodels_multi_length.append(Seq_model(seq_input))\n\t\tinputs_multi_length.append(seq_input)\n\n\tmiddle_input = layers.concatenate(models_multi_length)\n\tmiddle = layers.Dense(nodes)(middle_input)\n\tmiddle = layers.LeakyReLU(alpha=0.01)(middle)\n\n\toutput = layers.Dense(2, activation='softmax', name='output')(middle)\n\tmodel = models.Model(inputs=inputs_multi_length, outputs=output)\n\tmodel.compile(optimizer='adam', loss='binary_crossentropy', metrics=['acc'])\n\tmodel.summary()\n\tplot_model(model, to_file='multi_length.png')\n\treturn model\n\n\n# def build_elmo_embedding_model():\n# \timport tensorflow as tf\n# \timport tensorflow_hub as hub\n# \telmo = hub.Module(\"/home/go96bix/projects/deep_eve/elmo\", trainable=False)\n#\n# \tdef ELMoEmbedding(x):\n# \t\treturn elmo(tf.squeeze(tf.cast(x, tf.string)), signature=\"default\", as_dict=True)[\"default\"]\n#\n# \tinput_text = layers.Input(shape=(1,), dtype=\"string\")\n# \tembedding = layers.Lambda(ELMoEmbedding, output_shape=(1024,))(input_text)\n\n\ndef build_model_with_raptorx_iupred(nodes, dropout, seq_length=50, weight_decay_lstm=0, weight_decay_dense=0):\n\tseq_input = layers.Input(shape=(seq_length, 1024), name=\"seq_input\")\n\tleft = layers.Bidirectional(\n\t\tlayers.LSTM(nodes, input_shape=(seq_length, 1024), return_sequences=True, dropout=dropout,\n\t\t\t\t\trecurrent_dropout=0.2, kernel_regularizer=l2(weight_decay_lstm),\n\t\t\t\t\trecurrent_regularizer=l2(weight_decay_lstm), bias_regularizer=l2(weight_decay_lstm)))(seq_input)\n\tleft = layers.Bidirectional(\n\t\tlayers.LSTM(nodes, dropout=dropout, recurrent_dropout=0.2, kernel_regularizer=l2(weight_decay_lstm),\n\t\t\t\t\trecurrent_regularizer=l2(weight_decay_lstm), bias_regularizer=l2(weight_decay_lstm)))(left)\n\tleft = layers.Dense(nodes, kernel_regularizer=l2(weight_decay_dense), bias_regularizer=l2(weight_decay_dense))(left)\n\tout_left = layers.LeakyReLU(alpha=0.01)(left)\n\tSeq_model = models.Model(seq_input, out_left)\n\n\tauxiliary_input = layers.Input(shape=(seq_length, 7), dtype='float', name='aux_input')\n\tright = layers.Bidirectional(\n\t\tlayers.LSTM(nodes, input_shape=(seq_length, 1024), return_sequences=True, dropout=dropout,\n\t\t\t\t\trecurrent_dropout=0.2, kernel_regularizer=l2(weight_decay_lstm),\n\t\t\t\t\trecurrent_regularizer=l2(weight_decay_lstm), bias_regularizer=l2(weight_decay_lstm)))(\n\t\tauxiliary_input)\n\tright = layers.Bidirectional(\n\t\tlayers.LSTM(nodes, dropout=dropout, recurrent_dropout=0.2, kernel_regularizer=l2(weight_decay_lstm),\n\t\t\t\t\trecurrent_regularizer=l2(weight_decay_lstm), bias_regularizer=l2(weight_decay_lstm)))(right)\n\tright = layers.Dense(nodes, kernel_regularizer=l2(weight_decay_dense), bias_regularizer=l2(weight_decay_dense))(\n\t\tright)\n\tout_right = layers.LeakyReLU(alpha=0.01)(right)\n\tTable_model = models.Model(auxiliary_input, out_right)\n\n\tmiddle_input = layers.concatenate([Seq_model(seq_input), Table_model(auxiliary_input)])\n\tmiddle = layers.Dense(nodes)(middle_input)\n\tmiddle = layers.LeakyReLU(alpha=0.01)(middle)\n\toutput = layers.Dense(2, activation='softmax', name='output')(middle)\n\tmodel = models.Model(inputs=[seq_input, auxiliary_input], outputs=output)\n\n\tset_trainability(Table_model, False)\n\tSeq_model.compile(optimizer=\"adam\", loss='binary_crossentropy')\n\tmodel.compile(optimizer='adam', loss='binary_crossentropy', metrics=['acc', auc_10_perc_fpr])\n\tTable_model.compile(optimizer=\"adam\", loss='binary_crossentropy')\n\tmodel.summary()\n\treturn model, Seq_model, Table_model\n\n\ndef build_model_with_table(nodes, dropout, seq_length=50, weight_decay_lstm=0, weight_decay_dense=0):\n\tseq_input = layers.Input(shape=(seq_length, 1024), name=\"seq_input\")\n\tleft = layers.Bidirectional(\n\t\tlayers.LSTM(nodes, input_shape=(seq_length, 1024), return_sequences=True, dropout=dropout,\n\t\t\t\t\trecurrent_dropout=0.2, kernel_regularizer=l2(weight_decay_lstm),\n\t\t\t\t\trecurrent_regularizer=l2(weight_decay_lstm), bias_regularizer=l2(weight_decay_lstm)))(seq_input)\n\tleft = layers.Bidirectional(\n\t\tlayers.LSTM(nodes, dropout=dropout, recurrent_dropout=0.2, kernel_regularizer=l2(weight_decay_lstm),\n\t\t\t\t\trecurrent_regularizer=l2(weight_decay_lstm), bias_regularizer=l2(weight_decay_lstm)))(left)\n\tleft = layers.Dense(nodes, kernel_regularizer=l2(weight_decay_dense), bias_regularizer=l2(weight_decay_dense))(left)\n\tout_left = layers.LeakyReLU(alpha=0.01)(left)\n\tSeq_model = models.Model(seq_input, out_left)\n\n\tauxiliary_input = layers.Input(shape=(seq_length, 4), dtype='float', name='aux_input')\n\tright = layers.Bidirectional(\n\t\tlayers.LSTM(nodes // 2, input_shape=(seq_length, 1024), return_sequences=True, dropout=dropout,\n\t\t\t\t\trecurrent_dropout=0.2, kernel_regularizer=l2(weight_decay_lstm),\n\t\t\t\t\trecurrent_regularizer=l2(weight_decay_lstm), bias_regularizer=l2(weight_decay_lstm)))(\n\t\tauxiliary_input)\n\tright = layers.Bidirectional(\n\t\tlayers.LSTM(nodes // 2, dropout=dropout, recurrent_dropout=0.2, kernel_regularizer=l2(weight_decay_lstm),\n\t\t\t\t\trecurrent_regularizer=l2(weight_decay_lstm), bias_regularizer=l2(weight_decay_lstm)))(right)\n\tright = layers.Dense(nodes // 2, kernel_regularizer=l2(weight_decay_dense),\n\t\t\t\t\t\t bias_regularizer=l2(weight_decay_dense))(\n\t\tright)\n\tout_right = layers.LeakyReLU(alpha=0.01)(right)\n\tTable_model = models.Model(auxiliary_input, out_right)\n\n\tmiddle_input = layers.concatenate([Seq_model(seq_input), Table_model(auxiliary_input)])\n\tmiddle = layers.Dense(nodes)(middle_input)\n\tmiddle = layers.LeakyReLU(alpha=0.01)(middle)\n\toutput = layers.Dense(2, activation='softmax', name='output')(middle)\n\tmodel = models.Model(inputs=[seq_input, auxiliary_input], outputs=output)\n\n\tset_trainability(Table_model, False)\n\tSeq_model.compile(optimizer=\"adam\", loss='binary_crossentropy')\n\tmodel.compile(optimizer='adam', loss='binary_crossentropy', metrics=['acc', auc_10_perc_fpr])\n\tTable_model.compile(optimizer=\"adam\", loss='binary_crossentropy')\n\tmodel.summary()\n\treturn model, Seq_model, Table_model\n\n\ndef build_broad_complex_model(nodes, dropout, sequence_length=50, weight_decay=1e-6):\n\tadam = optimizers.Adam(lr=0.001, beta_1=0.9, beta_2=0.999, epsilon=None, decay=0.0, amsgrad=False)\n\n\ttensors_models = []\n\tlist_models_table_cols = []\n\tinputs_table_cols = []\n\n\t\"\"\"sequence model\"\"\"\n\tseq_input = layers.Input(shape=(sequence_length, 1024), name=\"seq_input\")\n\tleft = layers.Bidirectional(layers.LSTM(nodes, return_sequences=True, dropout=dropout, recurrent_dropout=0.2,\n\t\t\t\t\t\t\t\t\t\t\tkernel_regularizer=l2(weight_decay), recurrent_regularizer=l2(weight_decay),\n\t\t\t\t\t\t\t\t\t\t\tbias_regularizer=l2(weight_decay)))(seq_input)\n\tleft = layers.Bidirectional(\n\t\tlayers.LSTM(nodes, dropout=dropout, recurrent_dropout=0.2, kernel_regularizer=l2(weight_decay),\n\t\t\t\t\trecurrent_regularizer=l2(weight_decay), bias_regularizer=l2(weight_decay)))(left)\n\tleft = layers.Dense(nodes, kernel_regularizer=l2(weight_decay), bias_regularizer=l2(weight_decay))(left)\n\tout_left = layers.LeakyReLU(alpha=0.01)(left)\n\tSeq_model = models.Model(seq_input, out_left)\n\tSeq_model.summary()\n\n\ttensors_models.append(Seq_model(seq_input))\n\tinputs_table_cols.append(seq_input)\n\n\t\"\"\"loop creating multiple table input models\"\"\"\n\tlength_arr = [sequence_length] * 9\n\tlength_arr.append(13)\n\tfor index, length in enumerate(length_arr):\n\t\tinput_name = f\"table_input_{index}\"\n\n\t\t# build model\n\t\tif length == sequence_length:\n\t\t\ttable_input = layers.Input(shape=(length, 1,), name=input_name)\n\t\t\tmiddle = layers.Bidirectional(\n\t\t\t\tlayers.LSTM(nodes, return_sequences=True, dropout=dropout, recurrent_dropout=0.2,\n\t\t\t\t\t\t\tkernel_regularizer=l2(weight_decay), recurrent_regularizer=l2(weight_decay),\n\t\t\t\t\t\t\tbias_regularizer=l2(weight_decay)))(table_input)\n\t\t\tmiddle = layers.Bidirectional(\n\t\t\t\tlayers.LSTM(nodes, dropout=dropout, recurrent_dropout=0.2, kernel_regularizer=l2(weight_decay),\n\t\t\t\t\t\t\trecurrent_regularizer=l2(weight_decay), bias_regularizer=l2(weight_decay)))(middle)\n\t\t\tmiddle = layers.Dense(nodes, kernel_regularizer=l2(weight_decay), bias_regularizer=l2(weight_decay))(middle)\n\t\t\tmiddle_out = layers.LeakyReLU(alpha=0.01)(middle)\n\t\t\tTable_model = models.Model(table_input, middle_out)\n\n\t\telse:\n\t\t\ttable_input = layers.Input(shape=(length,), dtype='float', name=input_name)\n\t\t\tright = layers.Dense(nodes, kernel_regularizer=l2(weight_decay), bias_regularizer=l2(weight_decay))(\n\t\t\t\ttable_input)\n\t\t\tright_out = layers.LeakyReLU(alpha=0.01)(right)\n\t\t\tTable_model = models.Model(table_input, right_out)\n\n\t\tTable_model.compile(optimizer=adam, loss='binary_crossentropy')\n\t\tlist_models_table_cols.append(Table_model)\n\t\ttensors_models.append(Table_model(table_input))\n\t\tinputs_table_cols.append(table_input)\n\n\tmiddle_input = layers.concatenate(tensors_models)\n\tmiddle = layers.Dense(nodes, kernel_regularizer=l2(weight_decay), bias_regularizer=l2(weight_decay))(middle_input)\n\tmiddle = layers.LeakyReLU(alpha=0.01)(middle)\n\n\toutput = layers.Dense(2, activation='softmax', name='output', kernel_regularizer=l2(weight_decay),\n\t\t\t\t\t\t bias_regularizer=l2(weight_decay))(middle)\n\tmodel = models.Model(inputs=inputs_table_cols, outputs=output)\n\tmodel.compile(optimizer=adam, loss='binary_crossentropy', metrics=['acc', auc_10_perc_fpr])\n\tmodel.summary()\n\tplot_model(model, to_file='broad_complex.png')\n\treturn model, Seq_model, list_models_table_cols\n\n\ndef test_broad_complex_model(path, suffix, complex_model=True, shuffleTraining=True,\n\t\t\t\t\t\t\t nodes=32, use_generator=True, epochs=100, dropout=0.0, faster=False, batch_size=32,\n\t\t\t\t\t\t\t sequence_length=50, tensorboard=False, gpus=False, cross_val=True,\n\t\t\t\t\t\t\t modular_training=True, weight_decay=1e-6, **kwargs):\n\tinputs_train = {}\n\tinputs_val = {}\n\tinputs_test = {}\n\n\tX_train, X_val, X_test, Y_train, Y_val, Y_test, pos_y_test = load_data(complex_model, path, val_size=0.3,\n\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t generator=use_generator,\n\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t sequence_length=sequence_length)\n\t# define inputs\n\tfor col_num in range(10):\n\t\tinput_name = f\"table_input_{col_num}\"\n\t\tif col_num == 0:\n\t\t\tinputs_train.update({\"seq_input\": X_train.sequences})\n\t\t\tinputs_val.update({\"seq_input\": X_val.sequences})\n\t\t\tinputs_test.update({\"seq_input\": X_test.sequences})\n\n\t\tcol_train = X_train.table[:, 0:sequence_length]\n\t\tcol_val = X_val.table[:, 0:sequence_length]\n\t\tcol_test = X_test.table[:, 0:sequence_length]\n\n\t\tif col_num != 9:\n\t\t\tcol_train = np.array(col_train).reshape(X_train.table.shape[0], -1, 1)\n\t\t\tcol_val = np.array(col_val).reshape(X_val.table.shape[0], -1, 1)\n\t\t\tcol_test = np.array(col_test).reshape(X_test.table.shape[0], -1, 1)\n\n\t\tX_train.table = X_train.table[:, sequence_length::]\n\t\tX_val.table = X_val.table[:, sequence_length::]\n\t\tX_test.table = X_test.table[:, sequence_length::]\n\n\t\tinputs_train.update({input_name: col_train})\n\t\tinputs_val.update({input_name: col_val})\n\t\tinputs_test.update({input_name: col_test})\n\n\tmodel, Seq_model, list_models_table_cols = build_broad_complex_model(nodes, dropout, sequence_length, weight_decay)\n\n\tfilepath = path + \"/weights.best.acc.\" + suffix + \".hdf5\"\n\tfilepath2 = path + \"/weights.best.loss.\" + suffix + \".hdf5\"\n\tfilepath3 = path + \"/weights.best.auc10.\" + suffix + \".hdf5\"\n\tcheckpoint = ModelCheckpoint(filepath, monitor='val_acc', verbose=1, save_best_only=True, save_weights_only=True,\n\t\t\t\t\t\t\t\t mode='max')\n\tcheckpoint2 = ModelCheckpoint(filepath2, monitor='val_loss', verbose=1, save_best_only=True, save_weights_only=True,\n\t\t\t\t\t\t\t\t mode='min')\n\tcheckpoint3 = ModelCheckpoint(filepath3, monitor='val_auc_10_perc_fpr', verbose=1, save_best_only=True,\n\t\t\t\t\t\t\t\t save_weights_only=True, mode='max')\n\ttensorboard = TensorBoard(f'./tensorboard_log_dir')\n\tcallbacks_list = [checkpoint, checkpoint2, checkpoint3, tensorboard]\n\n\tif cross_val:\n\t\t# define 10-fold cross validation test harness\n\t\tkfold = StratifiedKFold(n_splits=10, shuffle=True, random_state=1337)\n\t\tcvscores = []\n\t\tfor train, val in kfold.split(col_train, np.argmax(Y_train, axis=1)):\n\t\t\tcheckpoint = ModelCheckpoint(filepath, monitor='val_acc', verbose=1, save_best_only=True,\n\t\t\t\t\t\t\t\t\t\t save_weights_only=True, mode='max')\n\t\t\tcheckpoint2 = ModelCheckpoint(filepath2, monitor='val_loss', verbose=1, save_best_only=True,\n\t\t\t\t\t\t\t\t\t\t save_weights_only=True, mode='min')\n\t\t\tcheckpoint3 = ModelCheckpoint(filepath3, monitor='val_auc_10_perc_fpr', verbose=1, save_best_only=True,\n\t\t\t\t\t\t\t\t\t\t save_weights_only=True, mode='max')\n\t\t\tif tf.gfile.Exists(f\"./tensorboard_log_dir/run{len(cvscores)}\"):\n\t\t\t\ttf.gfile.DeleteRecursively(f\"./tensorboard_log_dir/run{len(cvscores)}\")\n\t\t\ttensorboard = TensorBoard(f'./tensorboard_log_dir/run{len(cvscores)}')\n\t\t\tcallbacks_list = [checkpoint, checkpoint2, checkpoint3, tensorboard]\n\n\t\t\tK.clear_session()\n\t\t\tdel model\n\n\t\t\tmodel, Seq_model, list_models_table_cols = build_broad_complex_model(nodes, dropout, sequence_length)\n\n\t\t\tinputs_train_K_fold = {}\n\t\t\tinputs_val_K_fold = {}\n\t\t\tfor item in inputs_train.items():\n\t\t\t\tinputs_train_K_fold.update({item[0]: item[1][train]})\n\t\t\t\tinputs_val_K_fold.update({item[0]: item[1][val]})\n\n\t\t\tif modular_training:\n\t\t\t\tseq_train_length = (epochs // 14) * 3\n\t\t\t\tcol_train_length = (epochs - seq_train_length) // 11\n\n\t\t\t\tepo = 0\n\t\t\t\twhile epo < epochs:\n\t\t\t\t\tif epo == 0:\n\t\t\t\t\t\tfor model_table_col in list_models_table_cols:\n\t\t\t\t\t\t\tset_trainability(model_table_col, False)\n\t\t\t\t\t\t\tmodel_table_col.compile(optimizer='adam', loss='binary_crossentropy')\n\t\t\t\t\t\tset_trainability(Seq_model, True)\n\t\t\t\t\t\tSeq_model.layers[0].trainable = False\n\t\t\t\t\t\tset_trainability(model, True)\n\n\t\t\t\t\t\tSeq_model.compile(optimizer=\"adam\", loss='binary_crossentropy')\n\t\t\t\t\t\tmodel.compile(optimizer='adam', loss='binary_crossentropy', metrics=['acc', auc_10_perc_fpr])\n\n\n\t\t\t\t\telif epo >= seq_train_length and epo < col_train_length * 12 and epo % col_train_length == 0:\n\t\t\t\t\t\tmodel_table_col = list_models_table_cols[(epo // col_train_length) - 2]\n\t\t\t\t\t\tset_trainability(model_table_col, True)\n\t\t\t\t\t\tmodel_table_col.compile(optimizer='adam', loss='binary_crossentropy')\n\t\t\t\t\t\tmodel_table_col = list_models_table_cols[(epo // col_train_length) - 3]\n\t\t\t\t\t\tset_trainability(model_table_col, True)\n\t\t\t\t\t\tmodel_table_col.compile(optimizer='adam', loss='binary_crossentropy')\n\t\t\t\t\t\tset_trainability(Seq_model, False)\n\t\t\t\t\t\tset_trainability(model, True)\n\n\t\t\t\t\t\tSeq_model.compile(optimizer=\"adam\", loss='binary_crossentropy')\n\t\t\t\t\t\tmodel.compile(optimizer='adam', loss='binary_crossentropy', metrics=['acc', auc_10_perc_fpr])\n\n\t\t\t\t\telif epo == epochs - (col_train_length * 2):\n\t\t\t\t\t\tadam = optimizers.Adam(lr=0.0001, beta_1=0.9, beta_2=0.999, epsilon=None, decay=0,\n\t\t\t\t\t\t\t\t\t\t\t amsgrad=False)\n\t\t\t\t\t\tfor model_table_col in list_models_table_cols:\n\t\t\t\t\t\t\tset_trainability(model_table_col, True)\n\t\t\t\t\t\t\tmodel_table_col.compile(optimizer=adam, loss='binary_crossentropy')\n\t\t\t\t\t\tset_trainability(Seq_model, True)\n\t\t\t\t\t\tset_trainability(model, True)\n\n\t\t\t\t\t\tSeq_model.compile(optimizer=adam, loss='binary_crossentropy')\n\t\t\t\t\t\tmodel.compile(optimizer=adam, loss='binary_crossentropy', metrics=['acc', auc_10_perc_fpr])\n\n\t\t\t\t\tmodel.fit(inputs_train_K_fold, {'output': Y_train[train]}, callbacks=callbacks_list,\n\t\t\t\t\t\t\t validation_data=(inputs_val_K_fold, {'output': Y_train[val]}),\n\t\t\t\t\t\t\t epochs=epo + col_train_length, batch_size=batch_size, shuffle=shuffleTraining, verbose=2,\n\t\t\t\t\t\t\t initial_epoch=epo)\n\n\t\t\t\t\tepo += col_train_length\n\t\t\telse:\n\t\t\t\tmodel.fit(inputs_train_K_fold, {'output': Y_train[train]}, callbacks=callbacks_list,\n\t\t\t\t\t\t validation_data=(inputs_val_K_fold, {'output': Y_train[val]}),\n\t\t\t\t\t\t epochs=epochs, batch_size=batch_size, shuffle=shuffleTraining, verbose=1)\n\n\t\t\t\t# load \"best\" model and train finer\n\t\t\t\tmodel.load_weights(filepath2)\n\t\t\t\tadam = optimizers.Adam(lr=0.0001, beta_1=0.9, beta_2=0.999, epsilon=None, decay=0,\n\t\t\t\t\t\t\t\t\t amsgrad=False)\n\t\t\t\tfor model_table_col in list_models_table_cols:\n\t\t\t\t\tset_trainability(model_table_col, True)\n\t\t\t\t\tmodel_table_col.compile(optimizer=adam, loss='binary_crossentropy')\n\t\t\t\tset_trainability(Seq_model, True)\n\t\t\t\tset_trainability(model, True)\n\n\t\t\t\tSeq_model.compile(optimizer=adam, loss='binary_crossentropy')\n\t\t\t\tmodel.compile(optimizer=adam, loss='binary_crossentropy', metrics=['acc', auc_10_perc_fpr])\n\n\t\t\t\tmodel.fit(inputs_train_K_fold, {'output': Y_train[train]}, callbacks=callbacks_list,\n\t\t\t\t\t\t validation_data=(inputs_val_K_fold, {'output': Y_train[val]}),\n\t\t\t\t\t\t epochs=5, batch_size=batch_size, shuffle=shuffleTraining, verbose=1)\n\n\t\t\tscores = model.evaluate(inputs_val, Y_val, verbose=0, batch_size=len(Y_val))\n\t\t\tprint(\"%s: %.2f%%\" % (model.metrics_names[1], scores[1] * 100))\n\t\t\tcvscores.append(scores[1] * 100)\n\t\t\tmodel.save_weights(f\"{path}/weights_model_k-fold_run_{len(cvscores)}_{suffix}.hdf5\")\n\t\t\t# if real val_set exists, use models with lowest val_loss as models in ensemble\n\t\t\tos.rename(filepath, f\"{path}/weights_model_highest_val_acc_k-fold_run_{len(cvscores)}_{suffix}.hdf5\")\n\t\t\tos.rename(filepath2, f\"{path}/weights_model_lowest_val_loss_k-fold_run_{len(cvscores)}_{suffix}.hdf5\")\n\t\t\tos.rename(filepath3, f\"{path}/weights_model_highest_val_auc10_k-fold_run_{len(cvscores)}_{suffix}.hdf5\")\n\n\t\tprint(\"summary\")\n\t\tprint(\"%.2f%% (+/- %.2f%%)\" % (np.mean(cvscores), np.std(cvscores)))\n\n\t\tmodel, Seq_model, list_models_table_cols = build_broad_complex_model(nodes, dropout, sequence_length)\n\t\tset_trainability(Seq_model, True)\n\t\tSeq_model.compile(optimizer='adam', loss='binary_crossentropy')\n\t\tmodel.compile(optimizer='adam', loss='binary_crossentropy', metrics=['acc', auc_10_perc_fpr])\n\t\tSeq_model.summary()\n\t\tmodel.summary()\n\n\t\tvalidate_cross_val_models(model, path, inputs_test, inputs_val, Y_test, Y_val)\n\telse:\n\t\tmodel.fit(inputs_train,\n\t\t\t\t {'output': Y_train}, callbacks=callbacks_list,\n\t\t\t\t validation_data=(inputs_val, {'output': Y_val}),\n\t\t\t\t epochs=epochs, batch_size=batch_size, shuffle=shuffleTraining, verbose=2)\n\n\t\tcompare_quality(model, path, inputs_test, Y_test, inputs_val, Y_val, pos_y_test, complex_model=False)\n\n\ndef test_multiple_length(path, suffix, complex_model=False, online_training=False, shuffleTraining=True,\n\t\t\t\t\t\t one_hot_encoding=True, val_size=0.3, design=1, sampleSize=1, nodes=32, use_generator=True,\n\t\t\t\t\t\t snapShotEnsemble=False, epochs=100, dropout=0.0, faster=False, batch_size=32,\n\t\t\t\t\t\t sequence_length=50,\n\t\t\t\t\t\t voting=False, tensorboard=False, gpus=False, titel='', x_axes='', y_axes='', accuracy=False,\n\t\t\t\t\t\t loss=False, runtime=False, label1='', label2='', label3='', label4='', cross_val=True,\n\t\t\t\t\t\t modular_training=True, **kwargs):\n\t# create input for different models\n\tinputs_train = {}\n\tinputs_val = {}\n\tinputs_test = {}\n\n\tinput_X = {}\n\n\tfor sequence_length in range(9, 51, 10):\n\t\tX_train_i, X_val_i, X_test_i, Y_train_i, Y_val_i, Y_test_i, pos_y_test_i = load_data(complex_model, path,\n\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t val_size=0.3,\n\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t generator=use_generator,\n\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t sequence_length=sequence_length)\n\n\t\t# define inputs\n\t\tinput_name = f\"seq_input_{sequence_length}\"\n\t\tinputs_train.update({input_name: X_train_i})\n\t\tinputs_val.update({input_name: X_val_i})\n\t\tinputs_test.update({input_name: X_test_i})\n\n\t\tif cross_val:\n\t\t\t# if you want weighted esemble dont join train and val set\n\t\t\tX_i = X_train_i\n\t\t\tY_i = Y_train_i\n\t\t\tinput_X.update({input_name: X_i})\n\n\tmodel = build_multi_length_model(nodes, dropout)\n\n\tfilepath2 = path + \"/weights.best.loss.\" + suffix + \".hdf5\"\n\tcheckpoint2 = ModelCheckpoint(filepath2, monitor='val_loss', verbose=1, save_best_only=True, mode='min')\n\tcallbacks_list = [checkpoint2]\n\n\tif cross_val:\n\t\t# define 10-fold cross validation test harness\n\t\tkfold = StratifiedKFold(n_splits=10, shuffle=True, random_state=1337)\n\t\tcvscores = []\n\t\tfor train, val in kfold.split(X_i, np.argmax(Y_i, axis=1)):\n\t\t\tcheckpoint2 = ModelCheckpoint(filepath2, monitor='val_loss', verbose=1, save_best_only=True, mode='min')\n\t\t\tcallbacks_list = [checkpoint2]\n\t\t\tK.clear_session()\n\t\t\tdel model\n\t\t\tmodel = build_multi_length_model(nodes, dropout)\n\t\t\tinputs_train_K_fold = {}\n\t\t\tinputs_val_K_fold = {}\n\t\t\tfor item in input_X.items():\n\t\t\t\tinputs_train_K_fold.update({item[0]: item[1][train]})\n\t\t\t\tinputs_val_K_fold.update({item[0]: item[1][val]})\n\n\t\t\tmodel.fit(inputs_train_K_fold, {'output': Y_i[train]}, callbacks=callbacks_list,\n\t\t\t\t\t validation_data=(inputs_val_K_fold, {'output': Y_i[val]}),\n\t\t\t\t\t epochs=epochs, batch_size=batch_size, shuffle=shuffleTraining, verbose=2)\n\n\t\t\tscores = model.evaluate(inputs_val_K_fold, Y_i[val], verbose=0)\n\t\t\tprint(\"%s: %.2f%%\" % (model.metrics_names[1], scores[1] * 100))\n\t\t\tcvscores.append(scores[1] * 100)\n\t\t\tmodel.save_weights(f\"{path}/weights_model_k-fold_run_{len(cvscores)}.hdf5\")\n\t\t\t# if real val_set exists, use models with lowest val_loss as models in ensemble\n\t\t\tos.rename(filepath2, f\"{path}/weights_model_lowest_val_loss_k-fold_run_{len(cvscores)}.hdf5\")\n\n\t\tprint(\"summary\")\n\t\tprint(\"%.2f%% (+/- %.2f%%)\" % (np.mean(cvscores), np.std(cvscores)))\n\n\t\tfor middle_name in (\"\", \"lowest_val_loss_\"):\n\t\t\tmodels_filenames = []\n\t\t\tfor file in sorted(os.listdir(path)):\n\t\t\t\tif file.endswith(\".hdf5\") and file.startswith(f\"weights_model_{middle_name}k-fold_run_\"):\n\t\t\t\t\tprint(file)\n\t\t\t\t\tmodels_filenames.append(os.path.join(path, file))\n\n\t\t\tpreds = []\n\t\t\tpreds_val = []\n\t\t\tfor fn in models_filenames:\n\t\t\t\tprint(\"load model and predict\")\n\t\t\t\tmodel.load_weights(fn)\n\t\t\t\tpred = model.predict(inputs_test)\n\t\t\t\tpreds.append(pred)\n\t\t\t\tpred_val = model.predict(inputs_val)\n\t\t\t\tpreds_val.append(pred_val)\n\n\t\t\tprediction_weights = [1. / len(models_filenames)] * len(models_filenames)\n\t\t\taccuracy, error = calculate_weighted_accuracy(prediction_weights, preds, preds_val, 2,\n\t\t\t\t\t\t\t\t\t\t\t\t\t\t Y=Y_test_i, Y_val=Y_val_i,\n\t\t\t\t\t\t\t\t\t\t\t\t\t\t ROC=True, name=f\"{middle_name}ensemble\")\n\n\t\t\tbest_weights = weighted_ensemble(preds_val, 2, nb_models=len(models_filenames), Y=Y_val_i)\n\t\t\taccuracy, error = calculate_weighted_accuracy(best_weights, preds, preds_val, 2,\n\t\t\t\t\t\t\t\t\t\t\t\t\t\t Y=Y_test_i, Y_val=Y_val_i,\n\t\t\t\t\t\t\t\t\t\t\t\t\t\t ROC=True, name=f\"{middle_name}ensemble_weighted\")\n\n\telse:\n\t\tmodel.fit(inputs_train,\n\t\t\t\t {'output': Y_train_i}, callbacks=callbacks_list,\n\t\t\t\t validation_data=(inputs_val, {'output': Y_val_i}),\n\t\t\t\t epochs=epochs, batch_size=batch_size, shuffle=shuffleTraining, verbose=2)\n\n\t\tcompare_quality(model, path, inputs_test, Y_test_i, inputs_val, Y_val_i, pos_y_test_i, complex_model=False)\n\n\ndef test_and_plot(path, suffix, complex_model=False, online_training=False, shuffleTraining=True,\n\t\t\t\t full_seq_embedding=False,\n\t\t\t\t one_hot_encoding=True, val_size=0.3, design=1, sampleSize=1, nodes=32, use_generator=True,\n\t\t\t\t snapShotEnsemble=False, epochs=100, dropout=0.0, faster=False, batch_size=32, sequence_length=50,\n\t\t\t\t voting=False, tensorboard=False, gpus=False, titel='', x_axes='', y_axes='', accuracy=False,\n\t\t\t\t loss=False, runtime=False, label1='', label2='', label3='', label4='', cross_val=True,\n\t\t\t\t modular_training=True, include_raptorx_iupred=False, include_dict_scores=False, non_binary=False,\n\t\t\t\t own_embedding=False, both_embeddings=False, **kwargs):\n\t# SAVE SETTINGS\n\twith open(path + '/' + suffix + \"_config.txt\", \"w\") as file:\n\t\tfor i in list(locals().items()):\n\t\t\tfile.write(str(i) + '\\n')\n\n\tX_train, X_val, X_test, Y_train, Y_val, Y_test, pos_y_test, protein_mapping = load_data(complex_model, path,\n\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\tval_size=0.2,\n\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\tgenerator=use_generator,\n\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\tnon_binary=non_binary,\n\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\tsequence_length=sequence_length,\n\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\tfull_seq_embedding=full_seq_embedding,\n\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\tinclude_raptorx_iupred=include_raptorx_iupred,\n\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\tinclude_dict_scores=include_dict_scores,\n\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\town_embedding=own_embedding)\n\t# X_test_2 = X_val.copy()\n\t# Y_test_2 = Y_val.copy()\n\t# X_val = X_test.copy()\n\t# Y_val = Y_test.copy()\n\t# X_test = X_test_2\n\t# Y_test = Y_test_2\n\n\tfilepath = path + \"/weights.best.acc.\" + suffix + \".hdf5\"\n\tfilepath2 = path + \"/weights.best.loss.\" + suffix + \".hdf5\"\n\tfilepath2_model = path + \"/weights.best.loss.\" + suffix + \"_complete_model.hdf5\"\n\tfilepath3 = path + \"/weights.best.auc10.\" + suffix + \".hdf5\"\n\tcheckpoint = ModelCheckpoint(filepath, monitor='val_acc', verbose=1, save_best_only=True, save_weights_only=True,\n\t\t\t\t\t\t\t\t mode='max')\n\t# checkpoint = ModelCheckpoint(filepath, monitor='val_accuracy_binary', verbose=1, save_best_only=True, save_weights_only=True,\n\t# mode='max')\n\t# checkpoint = EarlyStopping('val_loss', min_delta=0, patience=epochs//10, restore_best_weights=True)\n\n\tcheckpoint2 = ModelCheckpoint(filepath2, monitor='val_loss', verbose=1, save_best_only=True,\n\t\t\t\t\t\t\t\t save_weights_only=True, mode='min')\n\t# checkpoint2_model = ModelCheckpoint(filepath2_model, monitor='val_loss', verbose=1, save_best_only=True,\n\t# save_weights_only=False, mode='min')\n\tcheckpoint3 = ModelCheckpoint(filepath3, monitor='val_auc_10_perc_fpr', verbose=1, save_best_only=True,\n\t\t\t\t\t\t\t\t save_weights_only=True, mode='max')\n\t# checkpoint3 = ModelCheckpoint(filepath3, monitor='val_auc_10_perc_fpr_binary', verbose=1, save_best_only=True,\n\t# \t\t\t\t\t\t\t save_weights_only=True, mode='max')\n\n\t# tensorboard = TensorBoard(f'./tensorboard_log_dir')\n\t# checkpoint2 = ModelCheckpoint(filepath2, monitor='loss', verbose=1, save_best_only=True,\n\t# save_weights_only=True, mode='min')\n\t# callbacks_list = [checkpoint2]\n\tcallbacks_list = [checkpoint, checkpoint2, checkpoint3]\n\n\tif not complex_model:\n\t\tif include_raptorx_iupred:\n\t\t\tmodel, Seq_model, Table_model = build_model_with_raptorx_iupred(nodes, dropout, seq_length=sequence_length)\n\t\t\tinputs_val = {}\n\t\t\tinputs_test = {}\n\t\t\tfor col_num in range(2):\n\t\t\t\tif col_num == 0:\n\t\t\t\t\tinputs_val.update({\"seq_input\": X_val.sequences})\n\t\t\t\t\tinputs_test.update({\"seq_input\": X_test.sequences})\n\n\t\t\t\tif col_num == 1:\n\t\t\t\t\tinputs_val.update({\"aux_input\": X_val.table})\n\t\t\t\t\tinputs_test.update({\"aux_input\": X_test.table})\n\t\telif include_dict_scores:\n\t\t\tmodel, Seq_model, Table_model = build_model_with_table(nodes, dropout, seq_length=sequence_length)\n\t\t\tinputs_val = {}\n\t\t\tinputs_test = {}\n\t\t\tfor col_num in range(2):\n\t\t\t\tif col_num == 0:\n\t\t\t\t\tinputs_val.update({\"seq_input\": X_val.sequences})\n\t\t\t\t\tinputs_test.update({\"seq_input\": X_test.sequences})\n\n\t\t\t\tif col_num == 1:\n\t\t\t\t\tinputs_val.update({\"aux_input\": X_val.table})\n\t\t\t\t\tinputs_test.update({\"aux_input\": X_test.table})\n\t\telse:\n\t\t\tmodel = build_model(nodes, dropout, seq_length=sequence_length, non_binary=non_binary,\n\t\t\t\t\t\t\t\town_embedding=own_embedding, both_embeddings=both_embeddings) # X_train.shape[1])\n\t\tif use_generator:\n\t\t\tif include_raptorx_iupred or include_dict_scores:\n\t\t\t\tparams = {\"number_subsequences\": 1, \"dim\": X_test.sequences.shape[1],\n\t\t\t\t\t\t \"n_channels\": X_test.sequences.shape[-1],\n\t\t\t\t\t\t \"n_classes\": Y_test.shape[-1], \"shuffle\": shuffleTraining, \"online_training\": online_training,\n\t\t\t\t\t\t \"seed\": 1, \"faster\": batch_size}\n\t\t\telse:\n\t\t\t\tparams = {\"number_subsequences\": 1, \"dim\": X_test.shape[1], \"n_channels\": X_test.shape[-1],\n\t\t\t\t\t\t \"n_classes\": Y_test.shape[-1], \"shuffle\": shuffleTraining, \"online_training\": online_training,\n\t\t\t\t\t\t \"seed\": 1, \"faster\": batch_size}\n\n\t\t\ttraining_generator = DataGenerator.DataGenerator(directory=directory + \"/train\",\n\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t sequence_length=sequence_length, non_binary=non_binary,\n\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t full_seq_embedding=full_seq_embedding,\n\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t include_raptorx_iupred=include_raptorx_iupred,\n\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t include_dict_scores=include_dict_scores,\n\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t **params, **kwargs)\n\n\t\t\tif include_raptorx_iupred or include_dict_scores:\n\t\t\t\tmodel.fit_generator(generator=training_generator, epochs=epochs, callbacks=callbacks_list,\n\t\t\t\t\t\t\t\t\tvalidation_data=((inputs_val, {'output': Y_val})), shuffle=shuffleTraining,\n\t\t\t\t\t\t\t\t\tverbose=1)\n\n\t\t\t\tmodel.load_weights(filepath)\n\t\t\t\tadam = optimizers.Adam(lr=0.0001, beta_1=0.9, beta_2=0.999, epsilon=None, decay=0,\n\t\t\t\t\t\t\t\t\t amsgrad=False)\n\t\t\t\tset_trainability(Table_model, True)\n\t\t\t\tTable_model.compile(optimizer=adam, loss='binary_crossentropy')\n\t\t\t\tset_trainability(Seq_model, False)\n\t\t\t\tset_trainability(model, True)\n\n\t\t\t\tSeq_model.compile(optimizer=adam, loss='binary_crossentropy')\n\t\t\t\tmodel.compile(optimizer=adam, loss='binary_crossentropy', metrics=['acc', auc_10_perc_fpr])\n\n\t\t\t\tmodel.fit_generator(generator=training_generator, epochs=5, callbacks=callbacks_list,\n\t\t\t\t\t\t\t\t\tvalidation_data=((inputs_val, {'output': Y_val})), shuffle=shuffleTraining,\n\t\t\t\t\t\t\t\t\tverbose=1)\n\n\t\t\t\tmodel.load_weights(filepath)\n\t\t\t\tadam = optimizers.Adam(lr=0.0001, beta_1=0.9, beta_2=0.999, epsilon=None, decay=0,\n\t\t\t\t\t\t\t\t\t amsgrad=False)\n\t\t\t\tset_trainability(Table_model, True)\n\t\t\t\tTable_model.compile(optimizer=adam, loss='binary_crossentropy')\n\t\t\t\tset_trainability(Seq_model, True)\n\t\t\t\tset_trainability(model, True)\n\n\t\t\t\tSeq_model.compile(optimizer=adam, loss='binary_crossentropy')\n\t\t\t\tmodel.compile(optimizer=adam, loss='binary_crossentropy', metrics=['acc', auc_10_perc_fpr])\n\n\t\t\t\t# model.load_weights(filepath2)\n\t\t\t\tmodel.fit_generator(generator=training_generator, epochs=5, callbacks=callbacks_list,\n\t\t\t\t\t\t\t\t\tvalidation_data=((inputs_val, {'output': Y_val})), shuffle=shuffleTraining,\n\t\t\t\t\t\t\t\t\tverbose=1)\n\t\t\telse:\n\t\t\t\tmodel.fit_generator(generator=training_generator, epochs=epochs, callbacks=callbacks_list,\n\t\t\t\t\t\t\t\t\tvalidation_data=(X_val, Y_val),\n\t\t\t\t\t\t\t\t\tshuffle=shuffleTraining, verbose=1)\n\n\t\t\tmodel.save_weights(path + \"/weights.last_model.\" + suffix + \".hdf5\")\n\n\t\telse:\n\t\t\tjoin_train_test_val = True\n\t\t\tprotein_list = list(protein_mapping.keys())\n\t\t\t# define 10-fold cross validation test harness\n\t\t\tkfold = StratifiedKFold(n_splits=10, shuffle=True, random_state=1337)\n\t\t\tcvscores = []\n\t\t\tif cross_val:\n\t\t\t\tif join_train_test_val:\n\n\t\t\t\t\tif both_embeddings:\n\t\t\t\t\t\tX_train_seq, X_val_seq, X_test_seq, Y_train, Y_val, Y_test, pos_y_test, protein_mapping = load_data(\n\t\t\t\t\t\t\tcomplex_model, path, val_size=0.2,\n\t\t\t\t\t\t\tgenerator=use_generator,\n\t\t\t\t\t\t\tnon_binary=non_binary,\n\t\t\t\t\t\t\tsequence_length=sequence_length,\n\t\t\t\t\t\t\tfull_seq_embedding=False,\n\t\t\t\t\t\t\tinclude_raptorx_iupred=include_raptorx_iupred,\n\t\t\t\t\t\t\tinclude_dict_scores=include_dict_scores,\n\t\t\t\t\t\t\town_embedding=True)\n\t\t\t\t\t\tX_seq = np.append(X_train_seq, X_val_seq, axis=0)\n\t\t\t\t\t\tX_seq = np.append(X_seq, X_test_seq, axis=0)\n\t\t\t\t\t\tdel X_train_seq, X_val_seq, X_test_seq\n\t\t\t\t\t# inputs_train ={}\n\t\t\t\t\t# inputs_train.update({\"seq_input\": X_seq, \"embedding_input\": X})\n\n\t\t\t\t\tX = np.append(X_train, X_val, axis=0)\n\t\t\t\t\tdel X_train, X_val\n\t\t\t\t\tX = np.append(X, X_test, axis=0)\n\t\t\t\t\tdel X_test\n\t\t\t\t\tY = np.append(Y_train, Y_val, axis=0)\n\t\t\t\t\tY = np.append(Y, Y_test, axis=0)\n\n\t\t\t\t\tn_proteins = np.zeros(len(protein_list))\n\n\t\t\t\t\ttprs = []\n\t\t\t\t\taucs = []\n\t\t\t\t\tmean_fpr = np.linspace(0, 1, 100)\n\n\t\t\t\t\tfor train, test in kfold.split(protein_list, n_proteins):\n\t\t\t\t\t\tcheckpoint = ModelCheckpoint(filepath, monitor='val_acc', verbose=1, save_best_only=True,\n\t\t\t\t\t\t\t\t\t\t\t\t\t save_weights_only=True, mode='max')\n\t\t\t\t\t\tcheckpoint2 = ModelCheckpoint(filepath2, monitor='val_loss', verbose=1, save_best_only=True,\n\t\t\t\t\t\t\t\t\t\t\t\t\t save_weights_only=True, mode='min')\n\t\t\t\t\t\tcheckpoint3 = ModelCheckpoint(filepath3, monitor='val_auc_10_perc_fpr', verbose=1,\n\t\t\t\t\t\t\t\t\t\t\t\t\t save_best_only=True, save_weights_only=True, mode='max')\n\t\t\t\t\t\tif tf.gfile.Exists(f\"./tensorboard_log_dir_{suffix}/run{len(cvscores)}\"):\n\t\t\t\t\t\t\ttf.gfile.DeleteRecursively(f\"./tensorboard_log_dir_{suffix}/run{len(cvscores)}\")\n\t\t\t\t\t\ttensorboard = TensorBoard(f'./tensorboard_log_dir_{suffix}/run{len(cvscores)}')\n\t\t\t\t\t\tcallbacks_list = [checkpoint, checkpoint2, checkpoint3, tensorboard]\n\n\t\t\t\t\t\tK.clear_session()\n\t\t\t\t\t\tdel model\n\n\t\t\t\t\t\tmodel, small_model, big_model = build_model(nodes, dropout, seq_length=sequence_length,\n\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\town_embedding=own_embedding,\n\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\tboth_embeddings=both_embeddings)\n\t\t\t\t\t\twith open(f\"{path}/k-fold_run_{len(cvscores) + 1}_test_set.csv\", \"w\") as outfile:\n\t\t\t\t\t\t\tfoo = [protein_mapping[i][0] + \"\\n\" for i in test]\n\t\t\t\t\t\t\toutfile.writelines(foo)\n\t\t\t\t\t\ttrain = [j for i in train for j in protein_mapping[i][1]]\n\t\t\t\t\t\ttest = [j for i in test for j in protein_mapping[i][1]]\n\n\t\t\t\t\t\tassert len(test) == len(np.unique(test)), \"duplicates found in k_fold test split\"\n\t\t\t\t\t\tif both_embeddings:\n\t\t\t\t\t\t\tmodel.fit([X[train], X_seq[train]], Y[train], epochs=epochs, batch_size=batch_size,\n\t\t\t\t\t\t\t\t\t verbose=2,\n\t\t\t\t\t\t\t\t\t validation_data=([X[test], X_seq[test]], Y[test]),\n\t\t\t\t\t\t\t\t\t callbacks=callbacks_list, shuffle=shuffleTraining)\n\n\t\t\t\t\t\t# # model.load_weights(filepath3)\n\t\t\t\t\t\t# adam = optimizers.Adam(lr=0.0001, beta_1=0.9, beta_2=0.999, epsilon=None, decay=0,\n\t\t\t\t\t\t# \t\t\t\t\t amsgrad=False)\n\t\t\t\t\t\t# set_trainability(big_model, True)\n\t\t\t\t\t\t# big_model.compile(optimizer=adam, loss='binary_crossentropy')\n\t\t\t\t\t\t# set_trainability(small_model, True)\n\t\t\t\t\t\t# set_trainability(model, True)\n\t\t\t\t\t\t#\n\t\t\t\t\t\t# small_model.compile(optimizer=adam, loss='binary_crossentropy')\n\t\t\t\t\t\t# model.compile(optimizer=adam, loss='binary_crossentropy', metrics=['acc', auc_10_perc_fpr])\n\t\t\t\t\t\t#\n\t\t\t\t\t\t# model.fit([X[train], X_seq[train]], Y[train], epochs=10, batch_size=batch_size, verbose=2,\n\t\t\t\t\t\t# \t\t validation_data=([X[test], X_seq[test]], Y[test]),\n\t\t\t\t\t\t# \t\t callbacks=callbacks_list, shuffle=shuffleTraining)\n\n\t\t\t\t\t\telse:\n\t\t\t\t\t\t\tmodel.fit(X[train], Y[train], epochs=epochs, batch_size=batch_size, verbose=2,\n\t\t\t\t\t\t\t\t\t validation_data=(X[test], Y[test]),\n\t\t\t\t\t\t\t\t\t callbacks=callbacks_list, shuffle=shuffleTraining)\n\t\t\t\t\t\tif both_embeddings:\n\t\t\t\t\t\t\tscores = model.evaluate([X[test], X_seq[test]], Y[test], verbose=0, batch_size=batch_size)\n\t\t\t\t\t\telse:\n\t\t\t\t\t\t\tscores = model.evaluate(X[test], Y[test], verbose=0, batch_size=batch_size)\n\t\t\t\t\t\tprint(\"%s: %.4f\" % (model.metrics_names[2], scores[2]))\n\t\t\t\t\t\tcvscores.append(scores[2])\n\n\t\t\t\t\t\tmodel.save_weights(f\"{path}/weights_model_k-fold_run_{len(cvscores)}_{suffix}.hdf5\")\n\t\t\t\t\t\tos.rename(filepath,\n\t\t\t\t\t\t\t\t f\"{path}/weights_model_highest_val_acc_k-fold_run_{len(cvscores)}_{suffix}.hdf5\")\n\t\t\t\t\t\tos.rename(filepath2,\n\t\t\t\t\t\t\t\t f\"{path}/weights_model_lowest_val_loss_k-fold_run_{len(cvscores)}_{suffix}.hdf5\")\n\t\t\t\t\t\tos.rename(filepath3,\n\t\t\t\t\t\t\t\t f\"{path}/weights_model_highest_val_auc10_k-fold_run_{len(cvscores)}_{suffix}.hdf5\")\n\n\t\t\t\t\t\t# Compute ROC curve and area the curve\n\t\t\t\t\t\tif both_embeddings:\n\t\t\t\t\t\t\tpred = model.predict([X[test], X_seq[test]])\n\t\t\t\t\t\telse:\n\t\t\t\t\t\t\tpred = model.predict(X[test])\n\t\t\t\t\t\tfpr, tpr, thresholds = metrics.roc_curve(Y[:, 1][test], pred[:, 1])\n\t\t\t\t\t\ttprs.append(interp(mean_fpr, fpr, tpr))\n\t\t\t\t\t\ttprs[-1][0] = 0.0\n\t\t\t\t\t\troc_auc = metrics.auc(fpr, tpr)\n\t\t\t\t\t\taucs.append(roc_auc)\n\t\t\t\t\t\tplt.plot(fpr, tpr, lw=1, alpha=0.3,\n\t\t\t\t\t\t\t\t label='ROC fold %d (AUC = %0.2f)' % (len(cvscores), roc_auc))\n\n\t\t\t\t\tmean_tpr = np.mean(tprs, axis=0)\n\t\t\t\t\tmean_tpr[-1] = 1.0\n\t\t\t\t\tmean_auc = metrics.auc(mean_fpr, mean_tpr)\n\t\t\t\t\tstd_auc = np.std(aucs)\n\n\t\t\t\t\tstd_tpr = np.std(tprs, axis=0)\n\t\t\t\t\ttprs_upper = np.minimum(mean_tpr + std_tpr, 1)\n\t\t\t\t\ttprs_lower = np.maximum(mean_tpr - std_tpr, 0)\n\t\t\t\t\tplt.fill_between(mean_fpr, tprs_lower, tprs_upper, color='grey', alpha=.2,\n\t\t\t\t\t\t\t\t\t label=r'$\\pm$ 1 std. dev.')\n\n\t\t\t\t\tplt.plot(mean_fpr, mean_tpr, color='b',\n\t\t\t\t\t\t\t label=r'Mean ROC (AUC = %0.2f $\\pm$ %0.2f)' % (mean_auc, std_auc),\n\t\t\t\t\t\t\t lw=2, alpha=.8)\n\t\t\t\t\tplt.plot([0, 1], [0, 1], 'k--', lw=2)\n\t\t\t\t\tplt.xlim([0.0, 1.0])\n\t\t\t\t\tplt.ylim([0.0, 1.05])\n\t\t\t\t\tplt.xlabel('False Positive Rate')\n\t\t\t\t\tplt.ylabel('True Positive Rate')\n\t\t\t\t\tplt.title('Receiver operating characteristic for multiple classes')\n\t\t\t\t\tplt.legend(loc=\"lower right\")\n\t\t\t\t\tplt.savefig(directory + f\"/roc_curve_{suffix}_ensemble.pdf\")\n\t\t\t\t\tplt.close()\n\n\t\t\t\t\tprint(\"%.4f (+/- %.4f)\" % (np.mean(cvscores), np.std(cvscores)))\n\t\t\t\telse:\n\t\t\t\t\tX = X_train\n\t\t\t\t\tY = Y_train\n\n\t\t\t\t\tfor train, test in kfold.split(X, np.argmax(Y, axis=1)):\n\t\t\t\t\t\tcheckpoint = ModelCheckpoint(filepath, monitor='val_acc', verbose=1, save_best_only=True,\n\t\t\t\t\t\t\t\t\t\t\t\t\t save_weights_only=True, mode='max')\n\t\t\t\t\t\tcheckpoint2 = ModelCheckpoint(filepath2, monitor='val_loss', verbose=1, save_best_only=True,\n\t\t\t\t\t\t\t\t\t\t\t\t\t save_weights_only=True, mode='min')\n\t\t\t\t\t\tcheckpoint3 = ModelCheckpoint(filepath3, monitor='val_auc_10_perc_fpr', verbose=1,\n\t\t\t\t\t\t\t\t\t\t\t\t\t save_best_only=True, save_weights_only=True, mode='max')\n\t\t\t\t\t\tif tf.gfile.Exists(f\"./tensorboard_log_dir/run{len(cvscores)}\"):\n\t\t\t\t\t\t\ttf.gfile.DeleteRecursively(f\"./tensorboard_log_dir/run{len(cvscores)}\")\n\t\t\t\t\t\ttensorboard = TensorBoard(f'./tensorboard_log_dir/run{len(cvscores)}')\n\t\t\t\t\t\tcallbacks_list = [checkpoint, checkpoint2, checkpoint3, tensorboard]\n\n\t\t\t\t\t\tK.clear_session()\n\t\t\t\t\t\tdel model\n\n\t\t\t\t\t\tmodel = build_model(nodes, dropout, seq_length=sequence_length, own_embedding=own_embedding)\n\n\t\t\t\t\t\tmodel.fit(X[train], Y[train], epochs=epochs, batch_size=batch_size, verbose=2,\n\t\t\t\t\t\t\t\t validation_data=(X_val, Y_val),\n\t\t\t\t\t\t\t\t callbacks=callbacks_list, shuffle=shuffleTraining)\n\t\t\t\t\t\tscores = model.evaluate(X[test], Y[test], verbose=0)\n\t\t\t\t\t\tscores = model.evaluate(X[test], Y[test], verbose=0, batch_size=batch_size)\n\n\t\t\t\t\t\tprint(\"%s: %.2f%%\" % (model.metrics_names[1], scores[1] * 100))\n\t\t\t\t\t\tcvscores.append(scores[1] * 100)\n\n\t\t\t\t\t\tmodel.save_weights(f\"{path}/weights_model_k-fold_run_{len(cvscores)}_{suffix}.hdf5\")\n\t\t\t\t\t\t# if real val_set exists, use models with lowest val_loss as models in ensemble\n\t\t\t\t\t\tos.rename(filepath,\n\t\t\t\t\t\t\t\t f\"{path}/weights_model_highest_val_acc_k-fold_run_{len(cvscores)}_{suffix}.hdf5\")\n\t\t\t\t\t\tos.rename(filepath2,\n\t\t\t\t\t\t\t\t f\"{path}/weights_model_lowest_val_loss_k-fold_run_{len(cvscores)}_{suffix}.hdf5\")\n\t\t\t\t\t\tos.rename(filepath3,\n\t\t\t\t\t\t\t\t f\"{path}/weights_model_highest_val_auc10_k-fold_run_{len(cvscores)}_{suffix}.hdf5\")\n\n\t\t\t\t\t\tprint(\"summary\")\n\t\t\t\t\tprint(\"%.2f%% (+/- %.2f%%)\" % (np.mean(cvscores), np.std(cvscores)))\n\n\t\t\t\t\tvalidate_cross_val_models(model, path, X_test, X_val, Y_test, Y_val)\n\n\t\t\telse:\n\t\t\t\tmodel.fit(X_train, Y_train, epochs=epochs, batch_size=batch_size, validation_data=(X_val, Y_val),\n\t\t\t\t\t\t verbose=1, callbacks=callbacks_list, shuffle=shuffleTraining)\n\telse:\n\t\tclass_weight = clw.compute_class_weight('balanced', np.unique(np.argmax(Y_train, axis=1)),\n\t\t\t\t\t\t\t\t\t\t\t\tnp.argmax(Y_train, axis=1))\n\t\tprint(class_weight)\n\t\tmodel = build_complex_model(nodes, dropout, seq_length=X_train.seq_length,\n\t\t\t\t\t\t\t\t\ttable_columns=X_train.table.shape[1])\n\n\t\t# define 10-fold cross validation test harness\n\t\tkfold = StratifiedKFold(n_splits=10, shuffle=True, random_state=1337)\n\t\tcvscores = []\n\t\tX = X_train\n\t\tY = Y_train\n\n\t\tfor train, test in kfold.split(X.sequences, np.argmax(Y, axis=1)):\n\t\t\tif cross_val:\n\t\t\t\tclass_weight = clw.compute_class_weight('balanced', np.unique(np.argmax(Y[train], axis=1)),\n\t\t\t\t\t\t\t\t\t\t\t\t\t\tnp.argmax(Y[train], axis=1))\n\t\t\t\tprint(class_weight)\n\t\t\tcheckpoint2 = ModelCheckpoint(filepath2, monitor='val_acc', verbose=1, save_best_only=True, mode='max')\n\t\t\tcallbacks_list = [checkpoint2]\n\t\t\tK.clear_session()\n\t\t\tdel model\n\t\t\tmodel, Seq_model, Table_model = build_complex_model(nodes, dropout, seq_length=sequence_length,\n\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\ttable_columns=X_train.table.shape[1])\n\t\t\t# plot graph\n\t\t\tplot_model(model, to_file='multiple_inputs.png')\n\t\t\tmodular_training = True\n\n\t\t\tif modular_training:\n\t\t\t\tfor epo in range(epochs):\n\t\t\t\t\tif epo == 50:\n\t\t\t\t\t\tset_trainability(Seq_model, False)\n\t\t\t\t\t\tset_trainability(Table_model, True)\n\n\t\t\t\t\t\tSeq_model.compile(optimizer=\"adam\", loss='binary_crossentropy')\n\t\t\t\t\t\tTable_model.compile(optimizer='adam', loss='binary_crossentropy')\n\t\t\t\t\t\tmodel.compile(optimizer='adam', loss='binary_crossentropy', metrics=['acc'])\n\n\t\t\t\t\tif cross_val:\n\t\t\t\t\t\tmodel.fit({'seq_input': X.sequences[train], 'aux_input': X.table[train]},\n\t\t\t\t\t\t\t\t {'output': Y[train]}, callbacks=callbacks_list, validation_data=(\n\t\t\t\t\t\t\t\t{'seq_input': X_val.sequences, 'aux_input': X_val.table}, {'output': Y_val}),\n\t\t\t\t\t\t\t\t epochs=epo + 1, batch_size=batch_size, shuffle=shuffleTraining, verbose=2,\n\t\t\t\t\t\t\t\t class_weight=class_weight, initial_epoch=epo)\n\t\t\t\t\telse:\n\t\t\t\t\t\tmodel.fit({'seq_input': X_train.sequences, 'aux_input': X_train.table},\n\t\t\t\t\t\t\t\t {'output': Y_train}, callbacks=callbacks_list,\n\t\t\t\t\t\t\t\t validation_data=(\n\t\t\t\t\t\t\t\t\t {'seq_input': X_val.sequences, 'aux_input': X_val.table}, {'output': Y_val}),\n\t\t\t\t\t\t\t\t epochs=epo + 1, batch_size=batch_size, shuffle=shuffleTraining, verbose=2,\n\t\t\t\t\t\t\t\t class_weight=class_weight, initial_epoch=epo)\n\t\t\telse:\n\t\t\t\tif cross_val:\n\t\t\t\t\tmodel.fit({'seq_input': X.sequences[train], 'aux_input': X.table[train]},\n\t\t\t\t\t\t\t {'output': Y[train]}, callbacks=callbacks_list,\n\t\t\t\t\t\t\t epochs=epochs, batch_size=batch_size, shuffle=shuffleTraining, verbose=2,\n\t\t\t\t\t\t\t class_weight=class_weight)\n\t\t\t\telse:\n\t\t\t\t\tmodel.fit({'seq_input': X_train.sequences, 'aux_input': X_train.table},\n\t\t\t\t\t\t\t {'output': Y_train}, callbacks=callbacks_list,\n\t\t\t\t\t\t\t validation_data=(\n\t\t\t\t\t\t\t\t {'seq_input': X_val.sequences, 'aux_input': X_val.table}, {'output': Y_val}),\n\t\t\t\t\t\t\t epochs=epochs, batch_size=batch_size, shuffle=shuffleTraining, verbose=2,\n\t\t\t\t\t\t\t class_weight=class_weight)\n\n\t\t\tif cross_val:\n\t\t\t\tscores = model.evaluate({'seq_input': X.sequences[test], 'aux_input': X.table[test]},\n\t\t\t\t\t\t\t\t\t\t{'output': Y[test]}, verbose=0)\n\t\t\t\tprint(\"%s: %.2f%%\" % (model.metrics_names[1], scores[1] * 100))\n\t\t\t\tcvscores.append(scores[1] * 100)\n\t\t\t\tmodel.save_weights(f\"{path}/weights_model_k-fold_run_{len(cvscores)}.hdf5\")\n\t\t\t\t# if real val_set exists, use models with lowest val_loss as models in ensemble\n\t\t\t\tos.rename(filepath2, f\"{path}/weights_model_lowest_val_loss_k-fold_run_{len(cvscores)}.hdf5\")\n\n\t\tif cross_val:\n\t\t\tprint(\"%.2f%% (+/- %.2f%%)\" % (np.mean(cvscores), np.std(cvscores)))\n\n\t\t\t# single last model test acc\n\t\t\tpred = model.predict({'seq_input': X_test.sequences, 'aux_input': X_test.table})\n\t\t\ttable = pd.crosstab(\n\t\t\t\tpd.Series(np.argmax(Y_test, axis=1)),\n\t\t\t\tpd.Series(np.argmax(pred, axis=1)),\n\t\t\t\trownames=['True'],\n\t\t\t\tcolnames=['Predicted'],\n\t\t\t\tmargins=True)\n\t\t\tprint(table)\n\t\t\tacc = sum(np.argmax(pred, axis=1) == np.argmax(Y_test, axis=1)) / len(np.argmax(Y_test, axis=1))\n\t\t\tprint(f\"Test accuracy last model: {acc}\")\n\n\t\t\tfor middle_name in (\"\", \"lowest_val_loss_\"):\n\t\t\t\tmodels_filenames = []\n\t\t\t\tfor file in sorted(os.listdir(path)):\n\t\t\t\t\tif file.endswith(\".hdf5\") and file.startswith(f\"weights_model_{middle_name}k-fold_run_\"):\n\t\t\t\t\t\tprint(file)\n\t\t\t\t\t\tmodels_filenames.append(os.path.join(path, file))\n\n\t\t\t\tpreds = []\n\t\t\t\tpreds_val = []\n\t\t\t\tfor fn in models_filenames:\n\t\t\t\t\tprint(\"load model and predict\")\n\t\t\t\t\tmodel.load_weights(fn)\n\t\t\t\t\tpred = model.predict({'seq_input': X_test.sequences, 'aux_input': X_test.table})\n\t\t\t\t\tpreds.append(pred)\n\t\t\t\t\tpred_val = model.predict({'seq_input': X_val.sequences, 'aux_input': X_val.table})\n\t\t\t\t\tpreds_val.append(pred_val)\n\n\t\t\t\tprediction_weights = [1. / len(models_filenames)] * len(models_filenames)\n\t\t\t\taccuracy, error = calculate_weighted_accuracy(prediction_weights, preds, preds_val, 2,\n\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t Y=Y_test, Y_val=Y_val,\n\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t ROC=True, name=f\"{middle_name}ensemble\")\n\n\t\t\t\tbest_weights = weighted_ensemble(preds_val, 2, nb_models=len(models_filenames), Y=Y_val)\n\t\t\t\taccuracy, error = calculate_weighted_accuracy(best_weights, preds, preds_val, 2,\n\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t Y=Y_test, Y_val=Y_val,\n\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t ROC=True, name=f\"{middle_name}ensemble_weighted\")\n\n\tif not cross_val:\n\t\tcompare_quality(model, path, X_test, Y_test, X_val, Y_val, pos_y_test, complex_model=complex_model,\n\t\t\t\t\t\tinclude_raptorx_iupred=include_raptorx_iupred or include_dict_scores)\n\n\ndef validate_cross_val_models(model, path, inputs_test, inputs_val, Y_test, Y_val):\n\tfor middle_name in (\"\", \"lowest_val_loss_\", \"highest_val_acc_\", \"highest_val_auc10_\"):\n\t\tmodels_filenames = []\n\t\tfor file in sorted(os.listdir(path)):\n\t\t\tif file.endswith(f\"_{suffix}.hdf5\") and file.startswith(f\"weights_model_{middle_name}k-fold_run_\"):\n\t\t\t\tprint(file)\n\t\t\t\tmodels_filenames.append(os.path.join(path, file))\n\n\t\tpreds = []\n\t\tpreds_val = []\n\t\tfor fn in models_filenames:\n\t\t\tprint(\"load model and predict\")\n\t\t\tmodel.load_weights(fn, by_name=True)\n\t\t\tpred = model.predict(inputs_test)\n\t\t\tpreds.append(pred)\n\t\t\tpred_val = model.predict(inputs_val)\n\t\t\tpreds_val.append(pred_val)\n\n\t\tprediction_weights = [1. / len(models_filenames)] * len(models_filenames)\n\t\taccuracy, error = calculate_weighted_accuracy(prediction_weights, preds, preds_val, 2,\n\t\t\t\t\t\t\t\t\t\t\t\t\t Y=Y_test, Y_val=Y_val,\n\t\t\t\t\t\t\t\t\t\t\t\t\t ROC=True, name=f\"{middle_name}ensemble\")\n\n\t\tbest_weights = weighted_ensemble(preds_val, 2, nb_models=len(models_filenames), Y=Y_val)\n\t\taccuracy, error = calculate_weighted_accuracy(best_weights, preds, preds_val, 2,\n\t\t\t\t\t\t\t\t\t\t\t\t\t Y=Y_test, Y_val=Y_val,\n\t\t\t\t\t\t\t\t\t\t\t\t\t ROC=True, name=f\"{middle_name}ensemble_weighted\")\n\n\ndef calculate_weighted_accuracy(prediction_weights, preds, preds_val, nb_classes, Y, Y_val, ROC=True,\n\t\t\t\t\t\t\t\tname=\"ensemble\"):\n\t\"\"\"\n\tequally weighted model prediction accuracy\n\t:param prediction_weights: array with weights of single models e.g. [0,0.6,0.4]\n\t:param preds: array with the predicted classes/labels of the models\n\t:param nb_classes: how many different classes/labels exist\n\t:param X: raw-data which should be predicted\n\t:param Y: true labels for X\n\t:return: y_true_small == True labels for complete sequences, yTrue == True labels for complete subsequences, y_pred_mean == with mean predicted labels for complete sequences, y_pred_voted == voted labels for complete sequences, y_pred == predicted labels for complete subsequences\n\t\"\"\"\n\tweighted = any([i != prediction_weights[0] for i in prediction_weights])\n\n\tweighted_predictions = np.zeros((Y.shape[0], nb_classes), dtype='float32')\n\n\tfor weight, prediction in zip(prediction_weights, preds):\n\t\tweighted_predictions += weight * np.array(prediction)\n\n\tyPred = np.argmax(weighted_predictions, axis=1)\n\tyTrue = np.argmax(Y, axis=-1)\n\tyTrue_val = np.argmax(Y_val, axis=-1)\n\taccuracy = metrics.accuracy_score(yTrue, yPred) * 100\n\terror = 100 - accuracy\n\n\tif ROC:\n\t\t# plot histogram of predictions\n\t\tyPred_0 = weighted_predictions[:, 1][yTrue == 0]\n\t\tyPred_1 = weighted_predictions[:, 1][yTrue == 1]\n\t\tyPred_total = [yPred_0, yPred_1]\n\t\timport matplotlib.pyplot as plt\n\n\t\tplt.hist(yPred_total, bins=20, range=(0, 1), stacked=False, label=['no Epitope', 'true Epitope'])\n\t\tplt.legend()\n\t\tplt.savefig(directory + f\"/{name}.png\")\n\t\tplt.close()\n\n\t\tweighted_predictions_val = np.zeros((Y_val.shape[0], nb_classes), dtype='float32')\n\n\t\tfor weight, prediction in zip(prediction_weights, preds_val):\n\t\t\tweighted_predictions_val += weight * np.array(prediction)\n\n\t\tcutoff = calc_n_plot_ROC_curve(y_true=yTrue_val, y_pred=weighted_predictions_val[:, 1], name=name, plot=False)\n\t\tcalc_n_plot_ROC_curve(y_true=yTrue, y_pred=weighted_predictions[:, 1], name=name)\n\t\ttable = pd.crosstab(\n\t\t\tpd.Series(yTrue),\n\t\t\tpd.Series(yPred),\n\t\t\trownames=['True'],\n\t\t\tcolnames=['Predicted'],\n\t\t\tmargins=True)\n\t\tprint(table)\n\t\tprint(f\"Accuracy ensemble \" + (not weighted) * f\"not \" + f\"weighted: {accuracy}\")\n\t\tprint(f\"Error ensemble: {error}\")\n\n\t\t\"\"\"cutoff adapted\"\"\"\n\t\tprint(\"cutoff adapted\")\n\t\tweighted_predictions2 = weighted_predictions.copy()\n\t\tweighted_predictions[:, 1] = weighted_predictions[:, 1] > cutoff\n\t\tweighted_predictions[:, 0] = weighted_predictions[:, 1] == 0\n\t\tyPred = np.argmax(weighted_predictions, axis=1)\n\t\ttable = pd.crosstab(\n\t\t\tpd.Series(yTrue),\n\t\t\tpd.Series(yPred),\n\t\t\trownames=['True'],\n\t\t\tcolnames=['Predicted'],\n\t\t\tmargins=True)\n\t\tprint(table)\n\t\taccuracy = metrics.accuracy_score(yTrue, yPred) * 100\n\t\terror = 100 - accuracy\n\t\tprint(f\"Accuracy ensemble \" + (not weighted) * f\"not \" + f\"weighted: {accuracy}\")\n\t\tprint(f\"Error ensemble: {error}\")\n\n\t\t\"\"\"cutoff adapted\"\"\"\n\t\tprint(\"cutoff adapted 50/50\")\n\t\tprint(f\"new cutoff {cutoff}\")\n\t\tcutoff_median = np.median(weighted_predictions2[:, 1])\n\t\tweighted_predictions2[:, 1] = weighted_predictions2[:, 1] > cutoff_median\n\t\tweighted_predictions2[:, 0] = weighted_predictions2[:, 1] == 0\n\t\tyPred = np.argmax(weighted_predictions2, axis=1)\n\t\ttable = pd.crosstab(\n\t\t\tpd.Series(yTrue),\n\t\t\tpd.Series(yPred),\n\t\t\trownames=['True'],\n\t\t\tcolnames=['Predicted'],\n\t\t\tmargins=True)\n\t\tprint(table)\n\t\taccuracy = metrics.accuracy_score(yTrue, yPred) * 100\n\t\terror = 100 - accuracy\n\t\tprint(f\"Accuracy ensemble \" + (not weighted) * f\"not \" + f\"weighted: {accuracy}\")\n\t\tprint(f\"Error ensemble: {error}\")\n\n\treturn accuracy, error\n\n\ndef weighted_ensemble(preds, nb_classes, nb_models, Y, NUM_TESTS=250):\n\t\"\"\"\n\tcalculates the best weights\n\t:param preds: array with predicted labels for X\n\t:param nb_classes: how many different classes/labels exist\n\t:param nb_models: how many different models exist\n\t:param X: raw-data which should be predicted\n\t:param Y: true labels for X\n\t:param NUM_TESTS: how many test should be done for the derteming the best weight\n\t:return: array with best weights\n\t\"\"\"\n\n\t# Create the loss metric\n\tdef log_loss_func(weights, Y, preds, nb_classes):\n\t\t''' scipy minimize will pass the weights as a numpy array\n\t\thttps://github.com/titu1994/Snapshot-Ensembles/blob/master/optimize_cifar100.ipynb\n\t\t'''\n\t\tfinal_prediction = np.zeros((Y.shape[0], nb_classes), dtype='float32')\n\n\t\tfor weight, prediction in zip(weights, preds):\n\t\t\tfinal_prediction += weight * np.array(prediction)\n\n\t\treturn log_loss(np.argmax(Y, axis=-1), final_prediction)\n\n\tbest_acc = 0.0\n\tbest_weights = None\n\n\t# Parameters for optimization\n\tconstraints = ({'type': 'eq', 'fun': lambda w: 1 - sum(w)})\n\tbounds = [(0, 1)] * len(preds)\n\tfoo = []\n\t# Check for NUM_TESTS times\n\tfor iteration in range(NUM_TESTS):\n\t\t# Random initialization of weights\n\t\tprediction_weights = np.random.random(nb_models)\n\n\t\t# Minimise the loss\n\t\tresult = minimize(log_loss_func, prediction_weights, args=(Y, preds, nb_classes), method='SLSQP',\n\t\t\t\t\t\t bounds=bounds, constraints=constraints)\n\t\t# print('Best Ensemble Weights: {weights}'.format(weights=result['x']))\n\t\tweights = result['x']\n\t\tfoo.append(weights)\n\t\taccuracy, error = calculate_weighted_accuracy(prediction_weights, preds, None, 2, Y=Y, Y_val=None,\n\t\t\t\t\t\t\t\t\t\t\t\t\t ROC=False)\n\n\t\tif accuracy > best_acc:\n\t\t\tbest_acc = accuracy\n\t\t\tbest_weights = weights\n\n\tprint(\"Best accuracy: \" + str(best_acc))\n\tprint(\"Best weigths: \" + str(best_weights))\n\treturn best_weights\n\n\nlength = []\nacc = []\nstd_div = []\n\nif __name__ == \"__main__\":\n\tdirectory = \"/home/go96bix/projects/epitop_pred/data_generator_bepipred_binary_double_cluster_0.8_0.5_seqID\"\n\t# directory = \"/home/go96bix/projects/epitop_pred/data_generator_bepipred_binary_0.5_seqID\"\n\t# directory = \"/home/go96bix/projects/epitop_pred/data_generator_bepipred_binary_0.8_seqID_checked_output\"\n\t# directory = \"/home/go96bix/projects/epitop_pred/data_generator_bepipred_binary_allProteins\"\n\tsuffix = \"both_embeddings_LSTM_equal_short_training\"\n\tcomplex_model = False\n\tnodes = 10\n\tdropout = 0.4\n\tsequence_length = 49\n\tfull_seq_embedding = True\n\tinclude_raptorx_iupred = False\n\tinclude_dict_scores = False\n\tnon_binary = False\n\town_embedding = False\n\tboth_embeddings = True\n\tif both_embeddings:\n\t\town_embedding = False\n\ttest_and_plot(path=directory, suffix=suffix, complex_model=complex_model, nodes=nodes, dropout=dropout,\n\t\t\t\t epochs=3, use_generator=False, batch_size=64, sequence_length=sequence_length, cross_val=True,\n\t\t\t\t full_seq_embedding=full_seq_embedding, include_raptorx_iupred=include_raptorx_iupred,\n\t\t\t\t include_dict_scores=include_dict_scores, non_binary=non_binary, own_embedding=own_embedding,\n\t\t\t\t both_embeddings=both_embeddings)\n" }, { "alpha_fraction": 0.6193673014640808, "alphanum_fraction": 0.6425331234931946, "avg_line_length": 34.176597595214844, "blob_id": "89301d32d28d702de54c8bd59d1755ffcea72137", "content_id": "e4d86c6e46e5e304bfba63f1cac96b38493206f3", "detected_licenses": [ "CC0-1.0" ], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 16533, "license_type": "permissive", "max_line_length": 119, "num_lines": 470, "path": "/utils/DataGenerator.py", "repo_name": "rishabhdhenkawat/epitop_pred", "src_encoding": "UTF-8", "text": "from pathlib import Path\nimport pickle\nimport numpy as np\nimport keras\nimport os\nimport multiprocessing.pool\nfrom functools import partial\nimport keras_preprocessing.image.utils as utils\nfrom random import sample as randsomsample\nimport pandas as pd\nimport math\nfrom sklearn.preprocessing import LabelEncoder, MinMaxScaler\nfrom allennlp.commands.elmo import ElmoEmbedder\nimport torch\n\nfrom train_DL import X_Data\n\n\nclass Elmo_embedder():\n\tdef __init__(self, model_dir='/home/go96bix/projects/deep_eve/seqvec/uniref50_v2', weights=\"/weights.hdf5\",\n\t options=\"/options.json\"):\n\t\ttorch.set_num_threads(multiprocessing.cpu_count() // 2)\n\t\tself.model_dir = model_dir\n\t\tself.weights = self.model_dir + weights\n\t\tself.options = self.model_dir + options\n\t\tself.seqvec = ElmoEmbedder(self.options, self.weights, cuda_device=-1)\n\n\tdef elmo_embedding(self, X, start=None, stop=None):\n\t\t# X_trimmed = X[:, start:stop]\n\t\tassert start == None and stop == None, \"deprecated to use start stop, please trim seqs beforehand\"\n\n\t\tif type(X[0]) == str:\n\t\t\tX = np.array([list(i.upper()) for i in X])\n\t\tembedding = self.seqvec.embed_sentences(X)\n\t\tX_parsed = []\n\t\tfor i in embedding:\n\t\t\tX_parsed.append(i.mean(axis=0))\n\t\treturn X_parsed\n\n\nclass DataGenerator(keras.utils.Sequence):\n\t'Generates data for Keras'\n\n\tdef __init__(self, directory, classes=None, number_subsequences=32, dim=(32, 32, 32), n_channels=6,\n\t n_classes=10, shuffle=True, n_samples=None, seed=None, faster=True, online_training=False, repeat=True,\n\t use_spacer=False, randomrepeat=False, sequence_length=50, full_seq_embedding=False, final_set=True,\n\t include_raptorx_iupred=False, include_dict_scores=False, non_binary=False, **kwargs):\n\t\t'Initialization'\n\t\tself.directory = directory\n\t\tself.classes = classes\n\t\tself.dim = dim\n\t\tself.labels = None\n\t\tself.list_IDs = None\n\t\tself.n_channels = n_channels\n\t\tself.shuffle = shuffle\n\t\tself.seed = seed\n\t\tself.online_training = online_training\n\t\tself.repeat = repeat\n\t\tself.use_spacer = use_spacer\n\t\tself.randomrepeat = randomrepeat\n\t\tself.maxLen = kwargs.get(\"maxLen\", None)\n\t\tself.sequence_length = sequence_length\n\t\tself.full_seq_embedding = full_seq_embedding\n\t\tself.final_set = final_set\n\t\tself.include_raptorx_iupred = include_raptorx_iupred\n\t\tself.include_dict_scores = include_dict_scores\n\t\tself.non_binary = non_binary\n\n\t\tif full_seq_embedding:\n\t\t\tfile_format = 'pkl'\n\t\telse:\n\t\t\tfile_format = 'csv'\n\n\t\tif number_subsequences == 1:\n\t\t\tself.shrink_timesteps = False\n\t\telse:\n\t\t\tself.shrink_timesteps = True\n\n\t\tself.number_subsequences = number_subsequences\n\n\t\tif faster == True:\n\t\t\tself.faster = 16\n\t\telif type(faster) == int and faster > 0:\n\t\t\tself.faster = faster\n\t\telse:\n\t\t\tself.faster = 1\n\n\t\tself.number_samples_per_batch = self.faster\n\n\t\tself.number_samples_per_class_to_pick = n_samples\n\n\t\tif not classes:\n\t\t\tclasses = []\n\t\t\tfor subdir in sorted(os.listdir(directory)):\n\t\t\t\tif os.path.isdir(os.path.join(directory, subdir)):\n\t\t\t\t\tclasses.append(subdir)\n\t\t\tself.classes = classes\n\n\t\tself.n_classes = len(classes)\n\t\tself.class_indices = dict(zip(classes, range(len(classes))))\n\t\tprint(self.class_indices)\n\t\t# want a dict which contains dirs and number usable files\n\t\tpool = multiprocessing.pool.ThreadPool()\n\t\tfunction_partial = partial(_count_valid_files_in_directory,\n\t\t white_list_formats={file_format},\n\t\t follow_links=None,\n\t\t split=None)\n\t\tself.samples = pool.map(function_partial, (os.path.join(directory, subdir) for subdir in classes))\n\t\tself.samples = dict(zip(classes, self.samples))\n\n\t\tresults = []\n\n\t\tfor dirpath in (os.path.join(directory, subdir) for subdir in classes):\n\t\t\tresults.append(pool.apply_async(utils._list_valid_filenames_in_directory,\n\t\t\t (dirpath, {file_format}, None, self.class_indices, None)))\n\n\t\tself.filename_dict = {}\n\t\tfor res in results:\n\t\t\tclasses, filenames = res.get()\n\t\t\tfor index, class_i in enumerate(classes):\n\t\t\t\tself.filename_dict.update({f\"{class_i}_{index}\": filenames[index]})\n\n\t\tpool.close()\n\t\tpool.join()\n\n\t\tif not n_samples:\n\t\t\tself.number_samples_per_class_to_pick = min(self.samples.values())\n\n\t\tself.elmo_embedder = Elmo_embedder()\n\n\t\tself.on_epoch_end()\n\n\tdef __len__(self):\n\t\t'Denotes the number of batches per epoch'\n\t\treturn int(np.floor(len(self.list_IDs) / self.number_samples_per_batch))\n\n\tdef __getitem__(self, index):\n\t\t'Generate one batch of data'\n\t\t# Generate indexes of the batch\n\t\tindexes = self.indexes[index * self.number_samples_per_batch:(index + 1) * self.number_samples_per_batch]\n\n\t\t# Find list of IDs\n\t\tlist_IDs_temp = [self.list_IDs[k] for k in indexes]\n\n\t\t# Generate data\n\t\tX, y, sample_weight = self.__data_generation(list_IDs_temp, indexes)\n\n\t\treturn X, y, sample_weight\n\n\tdef on_epoch_end(self):\n\t\t'make X-train sample list'\n\t\t\"\"\"\n\t\t1. go over each class\n\t\t2. select randomly #n_sample samples of each class\n\t\t3. add selection list to dict with class as key \n\t\t\"\"\"\n\n\t\tself.class_selection_path = np.array([])\n\t\tself.labels = np.array([])\n\t\tfor class_i in self.classes:\n\t\t\tsamples_class_i = randsomsample(range(0, self.samples[class_i]), self.number_samples_per_class_to_pick)\n\t\t\tself.class_selection_path = np.append(self.class_selection_path,\n\t\t\t [self.filename_dict[f\"{self.class_indices[class_i]}_{i}\"] for i in\n\t\t\t samples_class_i])\n\t\t\tself.labels = np.append(self.labels, [self.class_indices[class_i] for i in samples_class_i])\n\n\t\tself.list_IDs = self.class_selection_path\n\n\t\t'Updates indexes after each epoch'\n\t\tself.indexes = np.arange(len(self.list_IDs))\n\t\tif self.shuffle == True:\n\t\t\tif self.seed:\n\t\t\t\tnp.random.seed(self.seed)\n\t\t\tnp.random.shuffle(self.indexes)\n\n\tdef __data_generation(self, list_IDs_temp, indexes):\n\t\t'Generates data containing batch_size samples' # X : (n_samples, *dim, n_channels)\n\t\t# Initialization\n\t\tX = np.empty((self.number_samples_per_batch), dtype=object)\n\t\tY = np.empty((self.number_samples_per_batch), dtype=int)\n\t\tX_seq = np.empty((self.number_samples_per_batch), dtype=object)\n\t\tsample_weight = np.array([])\n\n\t\t# Generate data\n\t\tfor i, ID in enumerate(list_IDs_temp):\n\t\t\t# Store sample\n\t\t\t# load tsv, parse to numpy array, get str and set as value in X[i]\n\t\t\tsample_weight = np.append(sample_weight, 1)\n\t\t\tif self.full_seq_embedding:\n\t\t\t\tif self.final_set:\n\t\t\t\t\tif self.include_raptorx_iupred:\n\t\t\t\t\t\tX[i] = np.array(pickle.load(open(os.path.join(self.directory, ID), \"rb\")))\n\n\t\t\t\t\telif self.include_dict_scores:\n\t\t\t\t\t\tX[i] = np.array(pickle.load(open(os.path.join(self.directory, ID), \"rb\")))\n\t\t\t\t\t\tX_seq[i] = \\\n\t\t\t\t\t\tpd.read_csv(os.path.join(self.directory, ID[:-4] + \".csv\"), delimiter='\\t', dtype='str',\n\t\t\t\t\t\t header=None).values[0][0]\n\n\t\t\t\t\telse:\n\t\t\t\t\t\tif self.non_binary:\n\t\t\t\t\t\t\tX[i] = pickle.load(open(os.path.join(self.directory, ID), \"rb\"))\n\t\t\t\t\t\telse:\n\t\t\t\t\t\t\tX[i] = pickle.load(open(os.path.join(self.directory, ID), \"rb\"))[0]\n\t\t\t\telse:\n\t\t\t\t\tX[i] = pickle.load(open(os.path.join(self.directory, ID), \"rb\"))\n\n\t\t\telse:\n\t\t\t\tif self.final_set:\n\t\t\t\t\tif self.include_raptorx_iupred:\n\t\t\t\t\t\tX[i] = \\\n\t\t\t\t\t\tpd.read_csv(os.path.join(self.directory, ID), delimiter='\\t', dtype='str', header=None).values[\n\t\t\t\t\t\t\t0]\n\t\t\t\t\telse:\n\t\t\t\t\t\tif self.non_binary:\n\t\t\t\t\t\t\tprint(os.path.join(self.directory, ID))\n\t\t\t\t\t\t\tX[i] = pd.read_csv(os.path.join(self.directory, ID), delimiter='\\t', dtype='str',\n\t\t\t\t\t\t\t header=None).values\n\t\t\t\t\t\telse:\n\t\t\t\t\t\t\tX[i] = pd.read_csv(os.path.join(self.directory, ID), delimiter='\\t', dtype='str',\n\t\t\t\t\t\t\t header=None).values[0][0]\n\t\t\t\telse:\n\t\t\t\t\tX[i] = \\\n\t\t\t\t\tpd.read_csv(os.path.join(self.directory, ID), delimiter='\\t', dtype='str', header=None)[1].values[0]\n\t\t\t# Store class\n\t\t\tY[i] = self.labels[indexes[i]]\n\n\t\tsample_weight = np.array([[i] * self.number_subsequences for i in sample_weight]).flatten()\n\t\tif self.include_raptorx_iupred:\n\t\t\tsamples_test = [i[1:] for i in X]\n\t\t\ttable_X, filtered = load_raptorx_iupred(samples_test)\n\t\t\tX = np.array([i[0] for i in X])\n\t\telif self.include_dict_scores:\n\t\t\tX = np.array([i[0] for i in X])\n\t\t\ttable_X = get_dict_scores(X_seq)\n\n\t\tif self.non_binary:\n\t\t\tslicesize = self.sequence_length\n\t\t\tX_2 = []\n\t\t\tY_2 = []\n\t\t\tfor x_i in X:\n\t\t\t\tif self.full_seq_embedding:\n\t\t\t\t\ty_i = x_i[1].split(\"\\t\")\n\t\t\t\telse:\n\t\t\t\t\ty_i = x_i[1]\n\t\t\t\tpossible_postions = np.where(np.array(y_i) != \"-\")[0]\n\t\t\t\tselection = np.random.permutation(possible_postions)\n\t\t\t\tfor selection_index, i in enumerate(selection):\n\t\t\t\t\tif selection_index >= 5:\n\t\t\t\t\t\tbreak\n\t\t\t\t\tstart = i - slicesize // 2\n\t\t\t\t\tstop = start + slicesize\n\t\t\t\t\tX_2.append(x_i[0][start:stop])\n\t\t\t\t\tY_2.append([1 - float(y_i[i]), float(y_i[i])])\n\t\t\tX = np.array(X_2)\n\t\t\tY_2 = np.array(Y_2)\n\t\t\tsample_weight = np.ones(len(Y_2))\n\n\t\tif not self.full_seq_embedding:\n\t\t\tif self.final_set:\n\t\t\t\toriginal_length = 49\n\t\t\telse:\n\t\t\t\toriginal_length = 50\n\t\t\tstart_float = (original_length - self.sequence_length) / 2\n\t\t\tstart = math.floor(start_float)\n\t\t\tstop = original_length - math.ceil(start_float)\n\n\t\t\tX = np.array([list(j) for j in X])\n\t\t\tX, mapping_X, slice_position = split_seq_n_times(X, self.sequence_length, self.number_subsequences)\n\t\t\tX = self.elmo_embedder.elmo_embedding(X, start, stop)\n\t\telse:\n\t\t\tX, mapping_X, slice_position = split_embedding_seq_n_times(X, self.sequence_length,\n\t\t\t self.number_subsequences)\n\n\t\tif self.non_binary:\n\t\t\treturn X, Y_2, sample_weight\n\t\tif self.include_raptorx_iupred:\n\t\t\ttable_sliced = np.empty((len(table_X), 49, 7))\n\t\t\tfor index, i in enumerate(slice_position):\n\t\t\t\tif len(table_X[index]) == 49:\n\t\t\t\t\ttable_sliced[index] = table_X[index]\n\t\t\t\telse:\n\t\t\t\t\ttable_sliced[index] = table_X[index][i:i + 49]\n\n\t\t\tX_dict = {}\n\t\t\tX_dict.update({\"seq_input\": X})\n\t\t\tX_dict.update({\"aux_input\": table_sliced})\n\t\t\treturn X_dict, keras.utils.to_categorical(Y, num_classes=self.n_classes), sample_weight\n\t\telif self.include_dict_scores:\n\t\t\ttable_sliced = np.empty((len(table_X), 49, 4))\n\t\t\tfor index, i in enumerate(slice_position):\n\t\t\t\tif len(table_X[index]) == 49:\n\t\t\t\t\ttable_sliced[index] = table_X[index]\n\t\t\t\telse:\n\t\t\t\t\ttable_sliced[index] = table_X[index][i:i + 49]\n\n\t\t\tX_dict = {}\n\t\t\tX_dict.update({\"seq_input\": X})\n\t\t\tX_dict.update({\"aux_input\": table_sliced})\n\t\t\treturn X_dict, keras.utils.to_categorical(Y, num_classes=self.n_classes), sample_weight\n\t\telse:\n\t\t\treturn X, keras.utils.to_categorical(Y, num_classes=self.n_classes), sample_weight\n\n\ndef _count_valid_files_in_directory(directory, white_list_formats, split,\n follow_links):\n\t\"\"\"\n\tCopy from keras 2.1.5\n\tCount files with extension in `white_list_formats` contained in directory.\n\n\tArguments:\n\t\tdirectory: absolute path to the directory\n\t\t\tcontaining files to be counted\n\t\twhite_list_formats: set of strings containing allowed extensions for\n\t\t\tthe files to be counted.\n\t\tsplit: tuple of floats (e.g. `(0.2, 0.6)`) to only take into\n\t\t\taccount a certain fraction of files in each directory.\n\t\t\tE.g.: `segment=(0.6, 1.0)` would only account for last 40 percent\n\t\t\tof images in each directory.\n\t\tfollow_links: boolean.\n\n\tReturns:\n\t\tthe count of files with extension in `white_list_formats` contained in\n\t\tthe directory.\n\t\"\"\"\n\tnum_files = len(\n\t\tlist(utils._iter_valid_files(directory, white_list_formats, follow_links)))\n\tif split:\n\t\tstart, stop = int(split[0] * num_files), int(split[1] * num_files)\n\telse:\n\t\tstart, stop = 0, num_files\n\treturn stop - start\n\n\ndef parse_amino(x, encoder):\n\tout = []\n\tfor i in x:\n\t\tdnaSeq = i[0].upper()\n\t\tencoded_X = encoder.transform(list(dnaSeq))\n\t\tout.append(encoded_X)\n\treturn np.array(out)\n\n\ndef split_embedding_seq_n_times(embeddings, slicesize, amount_samples=10):\n\tsplited_em_seqs = []\n\tmapping_slices_to_protein = []\n\tslice_position = []\n\n\tfor index, protein in enumerate(embeddings):\n\t\tif len(protein) < slicesize:\n\t\t\tprotein_pad = np.zeros((slicesize, 1024))\n\t\t\tfor i in range(0, len(protein)):\n\t\t\t\tprotein_pad[i] = protein[i]\n\t\t\tprotein = protein_pad\n\t\tfor i in np.random.choice(len(protein) - slicesize + 1, amount_samples):\n\t\t\tsplited_em_seqs.append(protein[i:i + slicesize])\n\t\t\tmapping_slices_to_protein.append(index)\n\t\t\tslice_position.append(i)\n\treturn np.array(splited_em_seqs), np.array(mapping_slices_to_protein), slice_position\n\n\ndef split_seq_n_times(seqs, slicesize, amount_samples=10):\n\tsplited_em_seqs = []\n\tmapping_slices_to_protein = []\n\tslice_position = []\n\n\tfor index, protein in enumerate(seqs):\n\t\tif len(protein) < slicesize:\n\t\t\tprotein_pad = ['-'] * slicesize\n\t\t\tfor i in range(0, len(protein)):\n\t\t\t\tprotein_pad[i] = protein[i]\n\t\t\tprotein = protein_pad\n\t\tfor i in np.random.choice(len(protein) - slicesize + 1, amount_samples):\n\t\t\tsplited_em_seqs.append(protein[i:i + slicesize])\n\t\t\tmapping_slices_to_protein.append(index)\n\t\t\tslice_position.append(i)\n\treturn np.array(splited_em_seqs), np.array(mapping_slices_to_protein), slice_position\n\n\ndef load_raptorx_iupred(samples):\n\tout = []\n\tfiltered = []\n\tshift = 20\n\tfor index, sample in enumerate(samples):\n\t\tstart = int(sample[0])\n\t\tstop = int(sample[1])\n\t\tfile = sample[2]\n\t\ttry:\n\t\t\ttable_numpy = pd.read_csv(\n\t\t\t\tos.path.join(\"/home/le86qiz/Documents/Konrad/tool_comparison/raptorx/flo_files\", f\"{file}.csv\"),\n\t\t\t\tsep=\"\\t\", index_col=None).values\n\t\t\tseq_len = table_numpy.shape[0]\n\t\t\ttable_numpy_big = np.zeros((seq_len + (shift * 2), 7))\n\t\t\ttable_numpy_big[shift:shift + seq_len] = table_numpy\n\t\t\ttable_numpy_sliced = table_numpy_big[start + shift:stop + shift]\n\n\t\texcept:\n\t\t\tfiltered.append(index)\n\t\t\tprint(f\"not able to load {file}\")\n\t\t\ttable_numpy_sliced = np.zeros((49, 7))\n\n\t\tout.append(table_numpy_sliced)\n\treturn np.array(out), np.array(filtered)\n\n\ndef get_dict_scores(seqs):\n\tout_arr = []\n\tfor index_seq, seq in enumerate(seqs):\n\t\tseq_arr = np.zeros((49, 4))\n\t\tfor index, char in enumerate(seq):\n\t\t\tchar = char.upper()\n\t\t\thydro = hydrophilicity_scores.get(char, 0.5)\n\t\t\tbeta = betaturn_scores.get(char, 0.5)\n\t\t\tsurface = surface_accessibility_scores.get(char, 0.5)\n\t\t\tantigen = antigenicity_scores.get(char, 0.5)\n\t\t\tfeatures = np.array([hydro, beta, surface, antigen])\n\t\t\tseq_arr[index] = features\n\t\tout_arr.append(seq_arr)\n\treturn np.array(out_arr)\n\n\ndef normalize_dict(in_dict):\n\t\"\"\"\n\tnormalizes values in dict to range [0, 1]\n\t:param in_dict:\n\t:return:\n\t\"\"\"\n\tkeys = []\n\tvalues = []\n\tfor key, value in dict(in_dict).items():\n\t\tkeys.append(key)\n\t\tvalues.append(value)\n\n\tvalues_nestedlist = np.array([[i] for i in values])\n\tmin_max_scaler = MinMaxScaler()\n\t# feed in a numpy array\n\tvalues = min_max_scaler.fit_transform(values_nestedlist).flatten()\n\n\tout_dict = {}\n\tfor i in range(len(values)):\n\t\tout_dict.update({keys[i]: values[i]})\n\n\treturn out_dict\n\n\n# hydrophilicity by parker\nhydrophilicity_scores = {'A': 2.1, 'C': 1.4, 'D': 10.0, 'E': 7.8, 'F': -9.2, 'G': 5.7, 'H': 2.1, 'I': -8.0, 'K': 5.7,\n 'L': -9.2, 'M': -4.2, 'N': 7.0, 'P': 2.1, 'Q': 6.0, 'R': 4.2, 'S': 6.5, 'T': 5.2, 'V': -3.7,\n 'W': -10.0, 'Y': -1.9}\n# Chou Fasman beta turn prediction (avg = 1)\nbetaturn_scores = {'A': 0.66, 'C': 1.19, 'D': 1.46, 'E': 0.74, 'F': 0.6, 'G': 1.56, 'H': 0.95, 'I': 0.47, 'K': 1.01,\n 'L': 0.59, 'M': 0.6, 'N': 1.56, 'P': 1.52, 'Q': 0.98, 'R': 0.95, 'S': 1.43, 'T': 0.96, 'V': 0.5,\n 'W': 0.96, 'Y': 1.14}\n# Emini surface accessibility scale (avg = 0.62)\nsurface_accessibility_scores = {'A': 0.49, 'C': 0.26, 'D': 0.81, 'E': 0.84, 'F': 0.42, 'G': 0.48, 'H': 0.66, 'I': 0.34,\n 'K': 0.97, 'L': 0.4, 'M': 0.48, 'N': 0.78, 'P': 0.75, 'Q': 0.84, 'R': 0.95, 'S': 0.65,\n 'T': 0.7, 'V': 0.36, 'W': 0.51, 'Y': 0.76}\n# Kolaskar and Tongaokar antigenicity scale (avg = 1.0)\nantigenicity_scores = {'A': 1.064, 'C': 1.412, 'D': 0.866, 'E': 0.851, 'F': 1.091, 'G': 0.874, 'H': 1.105, 'I': 1.152,\n 'K': 0.93, 'L': 1.25, 'M': 0.826, 'N': 0.776, 'P': 1.064, 'Q': 1.015, 'R': 0.873, 'S': 1.012,\n 'T': 0.909, 'V': 1.383, 'W': 0.893, 'Y': 1.161}\n\n# hydrophilicity by parker\nhydrophilicity_scores = normalize_dict(hydrophilicity_scores)\n# Chou Fasman beta turn prediction (avg = 1)\nbetaturn_scores = normalize_dict(betaturn_scores)\n# Emini surface accessibility scale (avg = 0.62)\nsurface_accessibility_scores = normalize_dict(surface_accessibility_scores)\n# Kolaskar and Tongaokar antigenicity scale (avg = 1.0)\nantigenicity_scores = normalize_dict(antigenicity_scores)\n" }, { "alpha_fraction": 0.5486908555030823, "alphanum_fraction": 0.5557941794395447, "avg_line_length": 37.43492126464844, "blob_id": "0c8750427bb655e3e3361fc58a29ca98692461e2", "content_id": "354bfa6c8868ce5447a570551c235a1cb477d75c", "detected_licenses": [ "CC0-1.0" ], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 12107, "license_type": "permissive", "max_line_length": 132, "num_lines": 315, "path": "/utils/DataParsing.py", "repo_name": "rishabhdhenkawat/epitop_pred", "src_encoding": "UTF-8", "text": "import numpy as np\nfrom sklearn.preprocessing import LabelEncoder\nimport re\nseed = 42\nimport random\nrandom.seed(seed)\nfrom keras.utils import to_categorical\nfrom logging import warning\nfrom multiprocessing.dummy import Pool as ThreadPool\nimport multiprocessing\nimport operator\n\n\nclass CircularList(list):\n def __getitem__(self, x):\n if isinstance(x, slice):\n return [self[x] for x in self._rangeify(x)]\n\n index = operator.index(x)\n try:\n return super().__getitem__(index % len(self))\n except ZeroDivisionError:\n raise IndexError('list index out of range')\n\n def _rangeify(self, slice):\n start, stop, step = slice.start, slice.stop, slice.step\n if start is None:\n start = 0\n if stop is None:\n stop = len(self)\n if step is None:\n step = 1\n return range(start, stop, step)\n\n\ndef encode_string(maxLen=None, x=[], y=[], y_encoder=None, repeat=True, use_spacer=False, online_Xtrain_set=False,\n randomrepeat=False):\n \"\"\"\n One hot encoding for classes\n to convert the \"old\" exported int data via OHE to binary matrix\n http://machinelearningmastery.com/multi-class-classification-tutorial-keras-deep-learning-library/\n\n for dna ony to int values\n \"\"\"\n\n def pad_n_repeat_sequences(sequences, maxlen=None, dtype='int32',\n padding='post', truncating='post', value=0.):\n \"\"\"extended version of pad_sequences()\"\"\"\n if not hasattr(sequences, '__len__'):\n raise ValueError('`sequences` must be iterable.')\n lengths = []\n for x in sequences:\n if not hasattr(x, '__len__'):\n raise ValueError('`sequences` must be a list of iterables. '\n 'Found non-iterable: ' + str(x))\n lengths.append(len(x))\n num_samples = len(sequences)\n if maxlen is None:\n maxlen = np.max(lengths)\n\n # take the sample shape from the first non empty sequence\n # checking for consistency in the main loop below.\n sample_shape = tuple()\n for s in sequences:\n if len(s) > 0:\n sample_shape = np.asarray(s).shape[1:]\n break\n\n # make new array and fill with input seqs\n x = (np.ones((num_samples, maxlen) + sample_shape) * value).astype(dtype)\n for idx, s in enumerate(sequences):\n if not len(s):\n continue # empty list/array was found\n if truncating == 'pre':\n trunc = s[-maxlen:]\n elif truncating == 'post':\n trunc = s[:maxlen]\n else:\n raise ValueError('Truncating type \"%s\" not understood' % truncating)\n\n # check `trunc` has expected shape\n trunc = np.asarray(trunc, dtype=dtype)\n if trunc.shape[1:] != sample_shape:\n raise ValueError(\n 'Shape of sample %s of sequence at position %s is different from expected shape %s' %\n (trunc.shape[1:], idx, sample_shape))\n\n if repeat:\n # repeat seq multiple times\n repeat_seq = np.array([], dtype=dtype)\n while len(repeat_seq) < maxLen:\n if use_spacer:\n spacer_length = random.randint(1, 50)\n spacer = [value for i in range(spacer_length)]\n repeat_seq = np.append(repeat_seq, spacer)\n if randomrepeat:\n random_start = random.randint(0, len(trunc))\n repeat_seq = np.append(repeat_seq,\n CircularList(trunc)[random_start:random_start + len(trunc)])\n else:\n repeat_seq = np.append(repeat_seq, trunc)\n else:\n if randomrepeat:\n random_start = random.randint(0, len(trunc))\n repeat_seq = np.append(repeat_seq,\n CircularList(trunc)[random_start:random_start + len(trunc)])\n else:\n repeat_seq = np.append(repeat_seq, trunc)\n x[idx, :] = repeat_seq[-maxLen:]\n\n else:\n if padding == 'post':\n x[idx, :len(trunc)] = trunc\n elif padding == 'pre':\n x[idx, -len(trunc):] = trunc\n else:\n raise ValueError('Padding type \"%s\" not understood' % padding)\n\n return x\n\n encoder = LabelEncoder()\n\n if len(x) > 0:\n a = \"ATGCN-\"\n # TODO reverse\n # a = \"GALMFWKQESPVICYHRNDT\"\n\n encoder.fit(list(a))\n # print(encoder.classes_)\n # print(encoder.transform(encoder.classes_))\n out = []\n if type(x)==str:\n dnaSeq = re.sub(r\"[^ACGTUacgtu]\", 'N', x)\n encoded_X = encoder.transform(list(dnaSeq))\n out.append(encoded_X)\n else:\n for i in x:\n dnaSeq = re.sub(r\"[^ACGTUacgtu]\", 'N', i)\n # TODO reverse\n # dnaSeq = i[0]\n encoded_X = encoder.transform(list(dnaSeq))\n out.append(encoded_X)\n\n if online_Xtrain_set:\n X_train_categorial = []\n for seq in out:\n X_train_categorial.append(np.array(to_categorical(seq, num_classes=len(a)), dtype=np.bool))\n return X_train_categorial\n else:\n out = pad_n_repeat_sequences(out, maxlen=maxLen, dtype='int16', truncating='pre', value=0)\n\n # print(out[0][-10:-1])\n return np.array(to_categorical(out, num_classes=len(a)), dtype=np.bool)\n else:\n if y_encoder != None:\n encoder.fit(y)\n # print(encoder.classes_)\n # print(encoder.transform(encoder.classes_))\n if np.array(encoder.classes_ != y_encoder.classes_).all():\n warning(f\"Warning not same classes in training and test set\")\n useable_classes = set(encoder.classes_).intersection(y_encoder.classes_)\n # print(useable_classes)\n try:\n assert np.array(encoder.classes_ == y_encoder.classes_).all()\n except AssertionError:\n warning(\n f\"not all test classes in training data, only {useable_classes} predictable \"\n f\"from {len(encoder.classes_)} different classes\\ntest set will be filtered so only predictable\"\n f\" classes are included\")\n\n try:\n assert len(useable_classes) == len(encoder.classes_)\n except AssertionError:\n print(f\"not all test classes in training data, only \" \\\n f\"{useable_classes} predictable from {len(encoder.classes_)} different classes\" \\\n f\"\\ntest set will be filtered so only predictable classes are included\")\n\n if not len(useable_classes) == len(encoder.classes_):\n global X_test, Y_test\n arr = np.zeros(X_test.shape[0], dtype=int)\n for i in useable_classes:\n arr[y == i] = 1\n\n X_test = X_test[arr == 1, :]\n y = y[arr == 1]\n encoded_Y = y_encoder.transform(y)\n else:\n encoded_Y = encoder.transform(y)\n\n return to_categorical(encoded_Y, num_classes=len(y_encoder.classes_))\n\n else:\n encoder.fit(y)\n # print(encoder.classes_)\n # print(encoder.transform(encoder.classes_))\n encoded_Y = encoder.transform(y)\n return to_categorical(encoded_Y), encoder\n\n\ndef manipulate_training_data(X, Y, subSeqLength, number_subsequences):\n pool = ThreadPool(multiprocessing.cpu_count())\n\n def make_manipulation(sample):\n if len(sample) >= subSeqLength:\n X_train_manipulated = []\n # sample_long = sample.tolist() * number_subsequences\n for i in range(number_subsequences):\n start = random.randint(0, len(sample) - subSeqLength)\n subSeq = sample[start:start + subSeqLength]\n X_train_manipulated.append(subSeq)\n # i = 0\n # while i < number_subsequences:\n # X_train_manipulated.append(sample_long[i*subSeqLength:i*subSeqLength+subSeqLength])\n # i += 1\n\n return np.array(X_train_manipulated)\n else:\n return\n\n # X_train_manipulated_total = list(map(make_manipulation, X))\n X_train_manipulated_total = pool.map(make_manipulation, X)\n pool.close()\n pool.join()\n X_train_manipulated_total = np.array(X_train_manipulated_total)\n shape = X_train_manipulated_total.shape\n X_train_manipulated_total = X_train_manipulated_total.reshape(\n (len(X) * number_subsequences, shape[2], shape[3]))\n\n y = []\n for i in Y:\n y.append(number_subsequences * [i])\n\n Y = np.array(y)\n if len(Y.shape) == 2:\n Y = np.array(y).flatten()\n elif len(Y.shape) == 3:\n Y = Y.reshape((Y.shape[0] * Y.shape[1], Y.shape[2]))\n\n return X_train_manipulated_total, Y\n\n\ndef calc_shrink_size(seqlength):\n subSeqlength = 100\n for i in range(100, 400):\n if (seqlength % i == 0):\n subSeqlength = i\n\n batch_size = int(seqlength / subSeqlength)\n return subSeqlength, batch_size\n\n\ndef shrink_timesteps(X, Y, input_subSeqlength=0):\n \"\"\"\n needed for Truncated Backpropagation Through Time\n If you have long input sequences, such as thousands of timesteps,\n you may need to break the long input sequences into multiple contiguous subsequences.\n\n e.g. 100 subseq.\n Care would be needed to preserve state across each 100 subsequences and reset\n the internal state after each 100 samples either explicitly or by using a batch size of 100.\n :param input_subSeqlength: set for specific subsequence length\n :return:\n \"\"\"\n # TODO return boolean values\n # assert input_subSeqlength != 0, \"must provide variable \\\"input_subSeqlength\\\" when using shrink_timesteps for specific subset\"\n if len(X.shape) == 3:\n seqlength = X.shape[1]\n features = X.shape[-1]\n\n if input_subSeqlength == 0:\n subSeqlength, batch_size = calc_shrink_size(seqlength)\n else:\n subSeqlength = input_subSeqlength\n batch_size = int(seqlength / subSeqlength)\n\n newSeqlength = int(seqlength / subSeqlength) * subSeqlength\n\n bigarray = []\n for sample in X:\n sample = np.array(sample[0:newSeqlength], dtype=np.bool)\n subarray = sample.reshape((int(seqlength / subSeqlength), subSeqlength, features))\n bigarray.append(subarray)\n bigarray = np.array(bigarray)\n X = bigarray.reshape((bigarray.shape[0] * bigarray.shape[1], bigarray.shape[2], bigarray.shape[3]))\n\n elif len(X.shape) == 2:\n seqlength = X.shape[0]\n features = X.shape[-1]\n\n if input_subSeqlength == 0:\n subSeqlength, batch_size = calc_shrink_size(seqlength)\n else:\n subSeqlength = input_subSeqlength\n batch_size = int(seqlength / subSeqlength)\n\n newSeqlength = int(seqlength / subSeqlength) * subSeqlength\n\n sample = np.array(X[0:newSeqlength], dtype=np.bool)\n subarray = sample.reshape((int(seqlength / subSeqlength), subSeqlength, features))\n X = np.array(subarray)\n\n else:\n assert len(X.shape) == 2 or len(\n X.shape) == 3, f\"wrong shape of input X, expect len(shape) to be 2 or 3 but is instead {len(X.shape)}\"\n y = []\n for i in Y:\n y.append(int(seqlength / subSeqlength) * [i])\n\n Y = np.array(y)\n if len(Y.shape) == 2:\n Y = np.array(y).flatten()\n elif len(Y.shape) == 3:\n Y = Y.reshape((Y.shape[0] * Y.shape[1], Y.shape[2]))\n\n return X, Y, batch_size\n" }, { "alpha_fraction": 0.6430205702781677, "alphanum_fraction": 0.6510297656059265, "avg_line_length": 30.214284896850586, "blob_id": "ecf02a798664374ada46626b652c8ead647a5ef2", "content_id": "e9e0fa7dc0a5af2c5c22294db263fa2acac05db7", "detected_licenses": [ "CC0-1.0" ], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 874, "license_type": "permissive", "max_line_length": 80, "num_lines": 28, "path": "/utils/cluster_to_histogram.py", "repo_name": "rishabhdhenkawat/epitop_pred", "src_encoding": "UTF-8", "text": "import matplotlib.pyplot as plt\nimport os\n\ndirectory = \"/home/go96bix/projects/raw_data/clustered_protein_seqs/my_cluster/\"\n\nfor root, dirs, files in os.walk(directory):\n\tfor name in files:\n\t\tif name.endswith(\"clstr\"):\n\t\t\twith open(os.path.join(root, name), \"r\") as infile:\n\t\t\t\tcount_proteins = []\n\t\t\t\tallLines = infile.read()\n\t\t\t\tclusters = allLines.split(\">Cluster\")\n\t\t\t\tfor cluster in clusters:\n\t\t\t\t\tif len(cluster) > 0:\n\t\t\t\t\t\tcount_validations = []\n\t\t\t\t\t\tproteins = cluster.strip().split(\"\\n\")\n\t\t\t\t\t\tfor index, protein in enumerate(proteins):\n\t\t\t\t\t\t\tif index > 0:\n\t\t\t\t\t\t\t\tpass\n\t\t\t\t\t\t\tpass\n\t\t\t\t\t\tcount_proteins.append(index)\n\n\t\t\t\tplt.hist(count_proteins, bins=20, log=True)\n\t\t\t\tplt.xlabel(\"number proteins clustered together\")\n\t\t\t\tplt.ylabel(\"number of clusters\")\n\t\t\t\tfigname = name.split(\"_\")[0] + \"hist.pdf\"\n\t\t\t\tplt.savefig(os.path.join(root, figname))\n\t\t\t\tplt.close()\n" } ]
28
Arnav190709/calculate_data
https://github.com/Arnav190709/calculate_data
295a0163105a775fd9f0bd598ee0a5bd2c1ed722
205bafdb1c58047c1ec264be5af6c4253a42a45a
9de7eaba82346325446eede7885a47cc9f02e2f5
refs/heads/main
2023-08-14T07:20:54.631386
2021-10-06T09:32:55
2021-10-06T09:32:55
414,150,088
0
0
null
null
null
null
null
[ { "alpha_fraction": 0.6431818008422852, "alphanum_fraction": 0.706818163394928, "avg_line_length": 26.5625, "blob_id": "a6e4d2381ce349612537df35ab39a59475f04bf5", "content_id": "c3482b8599d51d56603cc09e7012a27e9a77c494", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 440, "license_type": "no_license", "max_line_length": 76, "num_lines": 16, "path": "/calculate_data/calculate_data.py", "repo_name": "Arnav190709/calculate_data", "src_encoding": "UTF-8", "text": "import matplotlib.pyplot as plt\n\nx_values = range(1,1000)\ny_values = [x**2 for x in x_values] #x**2 means x squared in the values of x\n\nplt.style.use('seaborn')\nfig, ax = plt.subplots()\nax.scatter(x_values, y_values, s=10)\n#Set title and labels\nax.set_title(\"Square Numbers\", fontsize=24)\nax.set_xlabel(\"Value\", fontsize=14)\nax.set_ylabel(\"Square of Value\", fontsize=14)\n#Set the range of the axes\nax.axis([0, 1100, 0, 1100000])\n\nplt.show()" } ]
1
CodyManess/A-StarSearch
https://github.com/CodyManess/A-StarSearch
818678cf0ec3a8eecd0598c22c1ce2ff5028fdd1
ce8c6715f34b01128dfe0bbeaf81974f50d07594
cdf100cb87aa6c9988005a207f43bbc774234cf2
refs/heads/master
2020-12-12T02:34:53.478398
2020-01-15T07:23:05
2020-01-15T07:23:05
234,018,380
0
0
null
null
null
null
null
[ { "alpha_fraction": 0.7302083373069763, "alphanum_fraction": 0.7666666507720947, "avg_line_length": 37.400001525878906, "blob_id": "47f95efb6ef232379b029385906b61f458c1cb51", "content_id": "e367b0e9a0600c56e35ad8f8b2463e323b10802e", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Markdown", "length_bytes": 968, "license_type": "no_license", "max_line_length": 201, "num_lines": 25, "path": "/README.md", "repo_name": "CodyManess/A-StarSearch", "src_encoding": "UTF-8", "text": "# A-Star Search\n\nThis is an open lab assignment for CSCI 5350, Intro to Artificial Intelligent.\n\nThis goal of this code is to explore the results of using different algorithms on the 8-puzzle problem.\n\n![](https://miro.medium.com/max/1046/1*_n4hcTM-akUEoWL1i05xVg.png)\n\n## a-star.py\na-star.py is a problem-solving software agent that performs A* search for the 8-puzzle problem. a-star.py should read a 8-puzzle board configuration from standard input:\n\n2 8 1\\\n0 4 3\\\n7 6 5\n\nand take two arguments(integer: heuristic to use, integer: cost per step).\n\n## random_board.py\nRandom board.py uses random actions to generate random starting states for the 8-puzzle problem. Random board.py should read the input (the goal) from standard input:\n\n0 1 2\\\n3 4 5\\\n6 7 8\n\naccept two arguments (integer: random number generator seed, integer: number of random moves to make), and print a final board configuration to standard output in the same format as the input file format.\n" }, { "alpha_fraction": 0.4413461685180664, "alphanum_fraction": 0.4660256505012512, "avg_line_length": 27.62385368347168, "blob_id": "64c5bda84479e0e339501a0fa7f8124893656c22", "content_id": "c4ea1eb117ef1487be106090fb06e4fb15ed5d2b", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 3120, "license_type": "no_license", "max_line_length": 79, "num_lines": 109, "path": "/random_board.py", "repo_name": "CodyManess/A-StarSearch", "src_encoding": "UTF-8", "text": "#!/usr/bin/env python3\n# -*- coding: utf-8 -*-\n\nimport sys, random, copy\n\n\nif (len(sys.argv) != 3):\n print()\n print(\"Usage: %s [seed] [number of random moves]\" %(sys.argv[0]))\n print()\n sys.exit(1)\n\n# Class state\n# Description: This class represents a particular state of the 8 number puzzle.\nclass state():\n def __init__(self, input):\n self.xpos = 0\n self.ypos = 0\n self.tiles = [[0 for x in range(3)]for y in range(3)]\n count = 0\n if input == 0:\n self.tiles = [[1,2,3],[4,5,6],[7,8,9]]\n else:\n for i in range(3):\n for j in range (3):\n self.tiles[i][j] = int(input[count])\n count += 1\n def left(self):\n if (self.ypos == 0):\n return self\n s = self.copy()\n s.tiles[s.xpos][s.ypos] = s.tiles[s.xpos][s.ypos-1]\n s.ypos -= 1\n s.tiles[s.xpos][s.ypos] = 0\n return s\n def right(self):\n if (self.ypos == 2):\n return self\n s = self.copy()\n s.tiles[s.xpos][s.ypos] = s.tiles[s.xpos][s.ypos+1]\n s.ypos += 1\n s.tiles[s.xpos][s.ypos] = 0\n return s\n def up(self):\n if (self.xpos == 0):\n return self\n s = self.copy()\n s.tiles[s.xpos][s.ypos] = s.tiles[s.xpos-1][s.ypos]\n s.xpos -= 1\n s.tiles[s.xpos][s.ypos] = 0\n return s\n def down(self):\n if (self.xpos == 2):\n return self\n s = self.copy()\n s.tiles[s.xpos][s.ypos] = s.tiles[s.xpos+1][s.ypos]\n s.xpos += 1\n s.tiles[s.xpos][s.ypos] = 0\n return s\n def __hash__(self):\n return (tuple(self.tiles[0]),tuple(self.tiles[1]),tuple(self.tiles[2]))\n def __str__(self):\n return '%d %d %d\\n%d %d %d\\n%d %d %d\\n'%(\n self.tiles[0][0],self.tiles[0][1],self.tiles[0][2],\n self.tiles[1][0],self.tiles[1][1],self.tiles[1][2],\n self.tiles[2][0],self.tiles[2][1],self.tiles[2][2])\n def copy(self):\n s = copy.deepcopy(self)\n return s\n\ndef main():\n \n # Get and properly store input\n random.seed(int(sys.argv[1]))\n number_of_moves = int(sys.argv[2])\n data = sys.stdin.read().split()\n table = state(data)\n x = 0\n \n # Make run moves until at requested amount\n while x < number_of_moves:\n # These moves will be 0,1,2,3 which can each be\n # associated with a particular movement direction\n # (i.e. up, down, left, right).\n move = random.randrange(4)\n if(move == 0):\n if table.up() != None:\n table = table.up()\n x+=1\n elif(move == 1):\n if table.down() != None:\n table = table.down()\n x+=1\n elif(move == 2):\n if table.left() != None:\n table = table.left()\n x+=1\n else:\n if table.right() != None:\n table = table.right()\n x+=1\n \n # Output table \n print(table)\n\n\n \n \nmain()\n" }, { "alpha_fraction": 0.5060241222381592, "alphanum_fraction": 0.5216375589370728, "avg_line_length": 31.670682907104492, "blob_id": "4ff57a647bfe9c62e9b296152038a7b8130eb0d9", "content_id": "eb7cfdf4e9912dc133888848cf99c0060af94595", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 8134, "license_type": "no_license", "max_line_length": 99, "num_lines": 249, "path": "/a-star.py", "repo_name": "CodyManess/A-StarSearch", "src_encoding": "UTF-8", "text": "#!/usr/bin/env python3\n# -*- coding: utf-8 -*-\n\nimport sys, copy, heapq, math\n\n# Ensures all parameters are filled. Error message otherwise\nif (len(sys.argv) != 3):\n print()\n print(\"Usage: %s [Heuristic] [Cost per Step]\" %(sys.argv[0]))\n print()\n sys.exit(1)\n\n# Class Set\n# Description: Represents a set of states.\nclass Set():\n def __init__(self):\n self.thisSet = set()\n def add(self,entry):\n if entry is not None:\n self.thisSet.add(entry.__hash__())\n def length(self):\n return len(self.thisSet)\n def isMember(self,query):\n return query.__hash__() in self.thisSet \n def printSet(self):\n for x in self.thisSet:\n print('%d %d %d\\n%d %d %d\\n%d %d %d\\n'%(\n x[0][0],x[0][1],x[0][2],\n x[1][0],x[1][1],x[1][2],\n x[2][0],x[2][1],x[2][2]))\n\n# Class state\n# Description: This class represents a particular state of the 8 number puzzle.\nclass state():\n def __init__(self, input):\n self.tiles = [[0 for x in range(3)]for y in range(3)]\n if input == 0:\n self.tiles = [[0,1,2],[3,4,5],[6,7,8]]\n self.xpos = 0\n self.ypos = 0\n else: \n count = 0\n for i in range(3):\n for j in range (3):\n self.tiles[i][j] = int(input[count])\n if int(input[count]) == 0:\n self.xpos = i\n self.ypos = j\n count += 1\n def left(self):\n if (self.ypos == 0):\n return None\n s = self.copy()\n s.tiles[s.xpos][s.ypos] = s.tiles[s.xpos][s.ypos-1]\n s.ypos -= 1\n s.tiles[s.xpos][s.ypos] = 0\n return s\n def right(self):\n if (self.ypos == 2):\n return None\n s = self.copy()\n s.tiles[s.xpos][s.ypos] = s.tiles[s.xpos][s.ypos+1]\n s.ypos += 1\n s.tiles[s.xpos][s.ypos] = 0\n return s\n def up(self):\n if (self.xpos == 0):\n return None\n s = self.copy()\n s.tiles[s.xpos][s.ypos] = s.tiles[s.xpos-1][s.ypos]\n s.xpos -= 1\n s.tiles[s.xpos][s.ypos] = 0\n return s\n def down(self):\n if (self.xpos == 2):\n return None\n s = self.copy()\n s.tiles[s.xpos][s.ypos] = s.tiles[s.xpos+1][s.ypos]\n s.xpos += 1\n s.tiles[s.xpos][s.ypos] = 0\n return s\n def tilesDisplaced(self, goal):\n sum = 0\n for i in range(3):\n for j in range (3):\n if self.tiles[i][j] != 0 and (goal.tiles[i][j] != self.tiles[i][j]):\n sum = sum + 1\n return sum\n def manhattanDistance(self, goal):\n sum = 0\n for i in range(0, 3):\n for j in range(0, 3):\n tile = self.tiles[i][j]\n for m in range(0, 3):\n for n in range(0, 3):\n if tile == goal.tiles[m][n] and tile != 0:\n sum += abs(i-m) + abs(j-n)\n return sum\n def __hash__(self):\n return (tuple(self.tiles[0]),tuple(self.tiles[1]),tuple(self.tiles[2]))\n def __str__(self):\n return '%d %d %d\\n%d %d %d\\n%d %d %d\\n'%(\n self.tiles[0][0],self.tiles[0][1],self.tiles[0][2],\n self.tiles[1][0],self.tiles[1][1],self.tiles[1][2],\n self.tiles[2][0],self.tiles[2][1],self.tiles[2][2])\n def isGoal(self, other):\n for i in range(3):\n for j in range (3):\n if self.tiles[i][j] != other.tiles[i][j]:\n return False\n return True\n def copy(self):\n s = copy.deepcopy(self)\n return s\n \n# Class PriorityQueue\n# Description: Priority Queue that stores nodes and organizes them by value\nclass PriorityQueue():\n def __init__(self):\n self.thisQueue = []\n def push(self, thisNode):\n heapq.heappush(self.thisQueue, (thisNode.val, -thisNode.id, thisNode))\n def pop(self):\n return heapq.heappop(self.thisQueue)[2]\n def isEmpty(self):\n return len(self.thisQueue) == 0\n def length(self):\n return len(self.thisQueue)\n \n\nnodeid = 0\n# Class Node\n# Description: Holds information about current state, cost, path cost, depth, and the previous node\nclass Node():\n def __init__(self, val, pathCost, depth, inputTable, previousNode=None):\n global nodeid\n self.id = nodeid\n nodeid += 1\n self.val = val\n self.pathCost = pathCost\n self.depth = depth\n self.table = inputTable\n self.previousNode = previousNode\n def __str__(self):\n return 'Node: id=%d val=%d'%(self.id,self.val)\n def getTable(self):\n return self.table\n def printPath(self):\n path = []\n temp = self\n while temp is not None:\n path.append(temp.table)\n temp = temp.previousNode\n count = len(path)\n while(count != 0):\n print(path[count-1])\n count -= 1\n def getDepth(self):\n count = 0\n temp = self\n while temp is not None:\n count += 1\n temp = temp.previousNode\n return count\n \n \n# Function Heuristic\n# Description: Calculates the correct heuristic requested from the command line argument\ndef heuristic(hType, state, goal):\n if hType == 0:\n return 0\n elif hType == 1:\n return state.tilesDisplaced(goal)\n elif hType == 2:\n return state.manhattanDistance(goal)\n elif hType == 3:\n return state.tilesDisplaced(goal) + state.manhattanDistance(goal)\n \ndef main():\n #Gather input\n hType = int(sys.argv[1])\n stepCost = int(sys.argv[2])\n pathCost = 0\n data = sys.stdin.read().split()\n \n #Set goal, table, closedList, and openList\n goal = state(0)\n currState = state(data)\n closedList = Set()\n openList = PriorityQueue()\n \n #Create first node\n h = heuristic(hType, currState, goal)\n node = Node(h, 0, 0, currState)\n openList.push(node)\n maxNodes = 1\n #Check if openList is empty. If not, checking and expanding\n while(openList.isEmpty() != True):\n \n node = openList.pop()\n currState = node.getTable()\n \n #\n if( closedList.isMember(currState) != False ):\n continue\n \n #Check if current state is goal\n if currState.isGoal(goal) == True:\n break\n \n closedList.add(currState)\n \n #Create children and push onto openList\n if (currState.up() != None) and (closedList.isMember(currState.up()) != True):\n h = heuristic(hType, currState.up(), goal)\n f = node.pathCost + stepCost\n openList.push(Node(h + f, f, node.depth+1, currState.up(), node))\n \n if (currState.down() != None) and (closedList.isMember(currState.down()) != True):\n h = heuristic(hType, currState.down(), goal)\n f = node.pathCost + stepCost\n openList.push(Node(h + f, f, node.depth+1, currState.down(), node))\n \n if (currState.left() != None) and (closedList.isMember(currState.left()) != True):\n h = heuristic(hType, currState.left(), goal)\n pathCost = node.pathCost + stepCost\n openList.push(Node(h + f, f, node.depth+1, currState.left(), node))\n \n if (currState.right() != None) and (closedList.isMember(currState.right()) != True):\n h = heuristic(hType, currState.right(), goal)\n f = node.pathCost + stepCost\n openList.push(Node(h + f, f, node.depth+1, currState.right(), node))\n \n if (closedList.length() + openList.length()) > maxNodes:\n maxNodes = closedList.length() + openList.length()\n \n \n\n # Print data and path\n print(\"V=%d\" %closedList.length())\n print(\"N=%d\" %maxNodes)\n print(\"d=%d\" %node.getDepth())\n if node.depth == 0:\n print(\"b=0\\n\")\n else:\n print(\"b=%.5f\\n\" %pow(closedList.length(), 1/node.depth))\n node.printPath()\n \nmain()" } ]
3
futuresystems-courses/475-Analysis-of-Malware-Connections-to-Command-and-Control-Servers-Ralph
https://github.com/futuresystems-courses/475-Analysis-of-Malware-Connections-to-Command-and-Control-Servers-Ralph
239270ace9fd81c126126dfb8237c86320e0ec58
c535dc96488a625b0f5e0412ed3c485067a4dfb2
868b3b0e9ca9429f764f3f7acbf7b5814260e502
refs/heads/master
2021-01-10T11:11:15.478938
2015-12-30T06:17:10
2015-12-30T06:17:10
48,785,383
0
0
null
null
null
null
null
[ { "alpha_fraction": 0.5114551782608032, "alphanum_fraction": 0.5256754755973816, "avg_line_length": 40.3725471496582, "blob_id": "135f157a0fc1bc392317c13bc74a29a1ae3baa78", "content_id": "a32119d4a5d166a8f7f698162c709f47bdcfd883", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 6329, "license_type": "no_license", "max_line_length": 113, "num_lines": 153, "path": "/geocoder.py", "repo_name": "futuresystems-courses/475-Analysis-of-Malware-Connections-to-Command-and-Control-Servers-Ralph", "src_encoding": "UTF-8", "text": "#Packet Parser Demo\n#Ray Zupancic\nimport socket\nimport pygeoip\nimport dpkt\nfrom collections import Counter\n\ndef get_geocode(address):\n try:\n gic = pygeoip.GeoIP('c:\\\\projects\\\\python\\\\beacon\\GeoLiteCity.dat')\n record = gic.record_by_addr(address)\n #get the latitude, longitude\n geoCodeLcl = (record['latitude'], record['longitude'])\n return geoCodeLcl\n except Exception as e:\n geoCodeLcl = (0,0)\n return geoCodeLcl\n\n\ndef print_markers_map(geoLat, geoLong):\n latLong = []\n element = [[] for i in range(3) ]\n #need a variable to capture the set.datapoint for Google gmaps\n geoMapFileDataPoints = \"\"\n \n #create a master LatLong list so that packets can be sorted or counted\n for index, value in enumerate(geoLat):\n latLong.append(str(geoLat[index]) + ' ' + str(geoLong[index]))\n \n #get unique lat longs and a count of same packets in terms of src, dst\n countedList = Counter(latLong)\n \n for index, value in enumerate(countedList):\n print index, value, countedList[value] \n element[index].append(value.split())\n element[index].append(countedList[value])\n #for troubleshooting\n #print element[index][0][0]\n \n #may have use for a unique list later\n #set will return a unique list\n #uniqueList = list(set(latLong)) \n #for index, value in enumerate(uniqueList):\n # print \" unique \", index, value\n \n for index, values in enumerate (element):\n dataPoint1 = '\\t\\t\\tdata.setValue(' + str(index) + ',' + '0' + ',' + str(element[index][0][0]) + ');\\n'\n dataPoint2 = '\\t\\t\\tdata.setValue(' + str(index) + ',' + '1' + ',' + str(element[index][0][1]) + ');\\n'\n dataPoint3 = '\\t\\t\\tdata.setValue(' + str(index) + ',' + '2' + ',' + str(element[index][1]) + ');\\n'\n dataPoint4 = '\\t\\t\\tdata.setValue(' + str(index) + ',' + '3' + ', \\'packet_count\\' );\\n'\n geoMapFileDataPoints = geoMapFileDataPoints + dataPoint1 + dataPoint2 + dataPoint3 + dataPoint4\n \n try:\n geoFile = open(\"c:\\\\projects\\\\python\\\\beacon\\\\geoFile.html\", 'w')\n geoFile.truncate() \n geoFile.write(get_geo_map_file_header(index+1) + geoMapFileDataPoints + get_geo_map_file_footer())\n except Exception as e:\n print e\n \n\ndef get_geo_map_file_header(count):\n geoMapFileHeader = (\n '<html>\\n'\n '<head>\\n'\n '\\t<script type=\\'text/javascript\\' src=\\'https://www.google.com/jsapi\\'></script>\\n'\n '\\t<script type=\\'text/javascript\\'>\\n'\n '\\t\\tgoogle.load(\\'visualization\\', \\'1\\', {\\'packages\\': [\\'geomap\\']});\\n'\n '\\t\\tgoogle.setOnLoadCallback(drawMap);\\n'\n\n '\\t\\tfunction drawMap() {\\n'\n '\\t\\t\\tvar data = new google.visualization.DataTable();\\n'\n '\\t\\t\\tdata.addRows(' + str(count) + ');\\n'\n '\\t\\t\\tdata.addColumn(\\'number\\', \\'LATITUDE\\', \\'Latitude\\');\\n'\n '\\t\\t\\tdata.addColumn(\\'number\\', \\'LONGITUDE\\', \\'Longitude\\');\\n'\n '\\t\\t\\tdata.addColumn(\\'number\\', \\'count\\', \\'packet_count\\')\\n'\n '\\t\\t\\tdata.addColumn(\\'string\\', \\'HOVER\\',\\'port\\' )\\n'\n )\n return geoMapFileHeader\n\ndef get_geo_map_file_footer():\n geoMapFileFooter = (\n '\\t\\t\\tvar options = {};\\n'\n '\\t\\t\\toptions[\\'region\\'] = \\'world\\';\\n'\n '\\t\\t\\toptions[\\'colors\\'] = [0xaa8747, 0xccB581, 0x446000];\\n'\n '\\t\\t\\toptions[\\'dataMode\\'] = \\'markers\\';\\n'\n '\\t\\t\\toptions[\\'width\\'] = 900\\n'\n '\\t\\t\\toptions[\\'height\\']= 600\\n'\n '\\n'\n '\\t\\t\\tvar container = document.getElementById(\\'map_canvas\\');\\n'\n '\\t\\t\\tvar geomap = new google.visualization.GeoMap(container);\\n'\n '\\t\\t\\tgeomap.draw(data, options);\\n'\n '\\t\\t};\\n'\n '\\n'\n '\\t</script>\\n'\n '</head>\\n'\n '\\n'\n '\\t<body>\\n'\n '\\t\\t<div id=\\'map_canvas\\'></div>\\n'\n '\\t\\t<div id=\\'map_canvas2\\'></div>\\n'\n '\\t</body>\\n'\n '\\n'\n '</html>\\n'\n ) \n return geoMapFileFooter\n\ndef main():\n\n try:\n capFile = open('c:\\\\projects\\\\python\\\\beacon\\\\pcaps\\\\malware1.pcap', 'rb')\n except Exception as e:\n print e\n \n latList = []\n longList = []\n \n pCap = dpkt.pcap.Reader(capFile)\n for (timestamp, packet) in pCap :\n try:\n ethernet = dpkt.ethernet.Ethernet(packet)\n tcpip = ethernet.data\n #the tcpip.src and tcpip.dst contain the values, but in binary\n #need to convert to decimal\n src = socket.inet_ntoa(tcpip.src)\n dst = socket.inet_ntoa(tcpip.dst)\n geoCodeSrc = get_geocode(src)\n #print (\"======================================================\")\n if (geoCodeSrc[0] != 0 or geoCodeSrc[1] != 0):\n #append the overall lat/long list\n latList.append (geoCodeSrc[0])\n longList.append(geoCodeSrc[1])\n print (\"lat\" + str(geoCodeSrc[0]) + \"long\" + str(geoCodeSrc[1]) )\n #else:\n #if the address is private, then simply use the home office lat and long - below is Denver\n #no reason to add private addresses to overall lat/long list\n #print (\"Src: \" + src + \" Src Lat: \" + '39.7392' + \" Dst Long: \" + \"104.9903\")\n geoCodeDst = get_geocode(dst)\n if (geoCodeDst[0] != 0 or geoCodeDst[1] != 0):\n latList.append(geoCodeDst[0])\n longList.append(geoCodeDst[1])\n print (\"lat\" + str(geoCodeDst[0]) + \"long\" + str(geoCodeDst[1]) )\n #else:\n #if the address is private, then simply use the home office lat and long - below is Denver\n #no reason to add private addresses to overall lat/long list\n #print (\"Dst: \" + dst + \" Dst Lat: \" + \"39.7392\", + \" Dst Long: \" + \"104.9903\") \n \n except Exception as e:\n print ( e )\n \n #create the geo map file\n print_markers_map(latList, longList)\n\nif __name__ == \"__main__\":\n main()" } ]
1
HeeJinee/Blogprojecthj
https://github.com/HeeJinee/Blogprojecthj
31447fb2ef956c6db3c8fc5bf059867b5a87e054
ba22acfdcf699644288413f960849ab6bb05d273
258aa4f7c5a0dc5827a74b66ff396b27d0ed369d
refs/heads/master
2020-05-29T14:30:08.423004
2019-06-24T05:46:38
2019-06-24T05:46:38
189,197,386
0
0
null
null
null
null
null
[ { "alpha_fraction": 0.6454183459281921, "alphanum_fraction": 0.6633465886116028, "avg_line_length": 29.5, "blob_id": "f239e62aaf1d447e7a5b37daf145f11779c739e9", "content_id": "ade9e49783b81b8d46fbceca221ae0beab8d0201", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 706, "license_type": "no_license", "max_line_length": 61, "num_lines": 16, "path": "/thirdproject/blogproject/blog/models.py", "repo_name": "HeeJinee/Blogprojecthj", "src_encoding": "UTF-8", "text": "from django.db import models\r\n\r\n# Create your models here.\r\n# class랑 블로그를 만들고 블로그라는 클래스가 어떻게 생겼는지 정의해주는 것\r\n# 제목을 쓰고 charfield라는 타이틀을 정하고 날짜랑 글 쓴 시간을 알아내는 변수를 처리하는 것\r\n# body는 텍스트 필드라는 긴 글의 형식을 저장하는 변수\r\nclass Blog(models.Model):\r\n title = models.CharField(max_length = 200)\r\n pub_date = models.DateTimeField('date published')\r\n body = models.TextField()\r\n\r\n def __str__(self):\r\n return self.title\r\n\r\n def summary(self):\r\n return self.body[:100] #글이 길어지면 자름. 글자 100개까지만 볼 수 있게" } ]
1
ChebuRashkaRF/Example-MQTT-2
https://github.com/ChebuRashkaRF/Example-MQTT-2
25ebe1e0f71379baeb1b0e1db274aadb2fc438cc
da80f9534a99a6c4f9733e9cae636b71e1536032
3756a6379ddf0f5e9b7693387e24012edf102fd2
refs/heads/main
2023-05-19T03:23:08.315306
2021-06-06T14:10:12
2021-06-06T14:10:12
374,376,197
0
0
null
null
null
null
null
[ { "alpha_fraction": 0.5402141809463501, "alphanum_fraction": 0.5841877460479736, "avg_line_length": 27.6862735748291, "blob_id": "c7b40537b5a903ed6f3790c6f71def468c0b13ec", "content_id": "cb90cf9435f0278c85bdd686f9cf6f9b54636a02", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 4658, "license_type": "no_license", "max_line_length": 78, "num_lines": 153, "path": "/subscriber.py", "repo_name": "ChebuRashkaRF/Example-MQTT-2", "src_encoding": "UTF-8", "text": "import pygame\nfrom random import randint as rnd\nimport paho.mqtt.client as mqtt\nimport time\nfrom queue import Queue\n\n# Параметры для окна\nWIDTH, HEIGHT = 626, 417\nfps = 60\nGreen = 0\nRed = 0\n\n# Создание окна\npygame.init()\nsc = pygame.display.set_mode((WIDTH, HEIGHT))\n\npygame.display.set_caption(\"Subscriber\")\nclock = pygame.time.Clock()\n\n# Параметры текста\ngreen_ = pygame.font.SysFont('Arial', 18, bold=True)\nred_ = pygame.font.SysFont('Arial', 18, bold=True)\nspeed_ = pygame.font.SysFont('Arial', 66, bold=True)\n\n\n# Параметры шарика\nball_radius = 30\nball_rect = int(ball_radius * 2 ** 0.5)\nball = pygame.Rect(WIDTH-2*ball_radius, HEIGHT-ball_rect-30, ball_rect,\n ball_rect)\ndy = -1\n\n\n# Параметры блоков\nblock_list = [pygame.Rect(WIDTH // 3, 50 + 120 * j, 250, 80)\n for j in range(3)]\n\ncolor_list = [(212, 3, 3), (53, 145, 6)]\n\n# Парматры фона\nfon_ball = pygame.Rect(WIDTH-2*ball_rect, 0, 2*ball_rect, HEIGHT)\nfon_block = pygame.Rect(0, 0, 20*2+100, HEIGHT)\n\n\n# Функция для получение данных\ndef on_message(client, userdata, message):\n data = str(message.payload.decode(\"utf-8\"))\n if message.topic == 'block':\n print(\"message received cart coords: \",\n str(message.payload.decode(\"utf-8\")))\n print(\"message topic: \", message.topic)\n q1.put(data)\n elif message.topic == 'speed':\n print(\"message received ball coords: \",\n str(message.payload.decode(\"utf-8\")))\n print(\"message topic: \", message.topic)\n q2.put(data)\n\n\n# Соединение с брокером\nq1 = Queue()\nq2 = Queue()\nclient = mqtt.Client(\"Subscriber\")\nclient.on_message = on_message\nclient.connect(\"127.0.0.1\", 1883, 60)\nclient.loop_start()\nclient.subscribe('block')\nclient.subscribe('speed')\n\n\n# Начальные данные\nblock_list_result = []\ncolor_list_result = []\nrand_list = []\nrand_speed = 0\nrun = True\nj = 0\n\n# Пуск программы\nwhile run:\n client.on_message = on_message\n while not q1.empty():\n message = q1.get()\n rand_list = list(map(int, message.split(',')))\n print(\"received from queue cart: \", message)\n while not q2.empty():\n rand_speed = int(q2.get())\n print(\"received from queue ball: \", message)\n for event in pygame.event.get():\n if event.type == pygame.QUIT:\n run = False\n exit()\n client.loop_stop() # Stop loop\n client.disconnect() # disconnect\n\n # Отрисовка фона\n sc.fill((36, 135, 166))\n pygame.draw.rect(sc, (250, 150, 0), fon_block)\n pygame.draw.rect(sc, (250, 150, 0), fon_ball)\n\n # Отрисовка блоков и шарика\n if rand_list and rand_speed:\n\n # Отрисовка блоков\n [pygame.draw.rect(sc, color_list[color], block) for color, block in\n zip(rand_list, block_list)]\n\n if all(rand_list) or not any(rand_list):\n block_result = pygame.Rect(20, 20 + 50 * j, 100, 30),\n block_list_result += block_result\n color_list_result.append(color_list[rand_list[0]])\n j += 1\n if rand_list[0]:\n Green += 1\n else:\n Red += 1\n # print(block_list_result, color_list_result)\n if block_list_result:\n [pygame.draw.rect(sc, color, block) for color, block in\n zip(color_list_result, block_list_result)]\n if len(block_list_result) == 8:\n block_list_result = []\n color_list_result = []\n j = 0\n\n GreenText = green_.render(f'Зеленый: {Green}', 1, (255, 255, 255))\n sc.blit(GreenText, (WIDTH//3, 10))\n RedText = red_.render(f'Красный: {Red}', 1, (255, 255, 255))\n sc.blit(RedText, (WIDTH//2+20, 10))\n\n pygame.draw.circle(sc, (192, 0, 219), ball.center, ball_radius)\n\n # скорость шарика\n if rand_speed <= 10:\n ball.y += 10 * dy\n ball_speed = 10\n elif rand_speed <= 20 and rand_speed >= 10:\n ball.y += 20 * dy\n ball_speed = 20\n else:\n ball.y += 30 * dy\n ball_speed = 30\n\n SpeedText = red_.render(f'Скорость: {ball_speed}', 1, (255, 255, 255))\n sc.blit(SpeedText, (WIDTH//2-30, HEIGHT-30))\n\n # Изменение направление шарика\n if ball.centery < ball_radius or ball.centery > HEIGHT - ball_radius:\n dy = -dy\n\n pygame.display.flip()\n clock.tick(fps)\n time.sleep(1)\n" }, { "alpha_fraction": 0.5605536103248596, "alphanum_fraction": 0.5986159443855286, "avg_line_length": 33, "blob_id": "cdc337fe182534e543a1a332aa353ab28ddc95ba", "content_id": "628c9d47ac23c1477f51fcb35695ab2c7860c7e0", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 643, "license_type": "no_license", "max_line_length": 79, "num_lines": 17, "path": "/publisher.py", "repo_name": "ChebuRashkaRF/Example-MQTT-2", "src_encoding": "UTF-8", "text": "from random import randint as rnd\nimport paho.mqtt.client as mqtt\nimport time\n\nclient = mqtt.Client(\"Publisher\") # создание клиента\n\nclient.connect(\"127.0.0.1\", 1883, 60) # подключение к брокеру\nclient.loop_start() # start the loop\n\n\nwhile True:\n rand_list = [rnd(0, 1) for i in range(3)] # данные для цвета\n rand_speed = rnd(5, 30) # данные для скорости\n client.publish(\"block\", str(rand_list[0])+', '+str(rand_list[1])+', ' +\n str(rand_list[2]))\n client.publish(\"speed\", rand_speed)\n time.sleep(1)\n" }, { "alpha_fraction": 0.732354998588562, "alphanum_fraction": 0.7414395809173584, "avg_line_length": 20.044116973876953, "blob_id": "5cbff95ce6afd82c952523a6f2d0b52b08523961", "content_id": "8ee5c5c2ac7ed30849a130563cc9926e2ff80983", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Markdown", "length_bytes": 2235, "license_type": "no_license", "max_line_length": 293, "num_lines": 68, "path": "/README.md", "repo_name": "ChebuRashkaRF/Example-MQTT-2", "src_encoding": "UTF-8", "text": "# Пример MQTT\n\n***\n\n## Описание\n\nВ проекте показан пример использования MQTT-брокера.\n\nВ качестве брокера выбрал сервер Mosquitto MQTT.\n\nMQTT-издателем является Python программа _publisher.py_. Она отправляет рандомные данные на топики _block_ и _speed_, эти данные необходимы, чтобы подписчик MQTT мог задать цвет (красный или зеленый) для трех блоков и скорость движения шарика.\n\nПодписчиком MQTT является Python программа _subscriber.py_. Она получает значения от брокера и по написанной логике задает цвет (красный или зеленый) блокам и скорость шарику, также программ фиксирует количество случаев, когда три блока принимают одинаковый цвет, и отображает этот цвет слева.\n\nИспользуются:\n* Приложения на Python\n* Mosquitto\n\n---\n\n## Запуск\n\nВ терминале необходимо прописать следующие команды для запуска приложений:\n\n\n_Терминал 1_\n```\n>>> docker compose build\n\n\n>>> docker compose up\n```\n\nДалее необходимо создать виртуальное окружение с любым именем (в примере это имя _venv_) в текущей папке и активировать его (пример для Linux):\n\n_Терминал 2_\n```\n>>> python3 -m venv venv\n```\n\n_Терминал 2_\n```\n>>> source venv/bin/activate\n\n```\n\nДалее устанавливаем все пакеты указанные в _reqs.txt_\n\n_Терминал 2_\n```\n>>> pip install -r reqs.txt\n```\n\nПосле запускаем файл subscriber.py\n\n_Терминал 2_\n```\n>>> python3 subscriber.py\n```\n\n---\n\n## Результат\n\n![ПРимер №1](img/Example1.jpg \"ПРимер №1\")\n\n\n![Пример №2](img/Example2.jpg \"ПРимер №2\")\n" }, { "alpha_fraction": 0.5561224222183228, "alphanum_fraction": 0.6096938848495483, "avg_line_length": 17.380952835083008, "blob_id": "c9ef9a5cb92d7ea852c47389058885cb2a8794a1", "content_id": "7dacc74b38516a0741340f4c4d566ae5b594fbd5", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "YAML", "length_bytes": 392, "license_type": "no_license", "max_line_length": 38, "num_lines": 21, "path": "/docker-compose.yml", "repo_name": "ChebuRashkaRF/Example-MQTT-2", "src_encoding": "UTF-8", "text": "version: '3.1'\n\nservices:\n \n mosquitto:\n image: eclipse-mosquitto:1.6\n hostname: mosquitto\n container_name: mosquitto\n ports:\n - \"1883:1883\"\n - \"9001:9001\"\n \n publisher:\n build:\n context: .\n dockerfile: publisher.Dockerfile\n command: \"python3 publisher.py\"\n container_name: publisher\n depends_on:\n - mosquitto\n network_mode: host\n\n \n" } ]
4
harjeevs17/instagramMessageBot
https://github.com/harjeevs17/instagramMessageBot
ca7d196d14923de73458d570325701ae710cd7e5
b0a69bd62b2bf01cfd3e256ca579473c6881be68
c9328ec05af097dd48823473a0409b78329b9c40
refs/heads/master
2022-12-09T07:40:52.338930
2020-08-25T14:49:36
2020-08-25T14:49:36
290,241,617
0
0
null
null
null
null
null
[ { "alpha_fraction": 0.683591902256012, "alphanum_fraction": 0.7136812210083008, "avg_line_length": 33.86885070800781, "blob_id": "aad9d2f37ac430611e8b88b2d6102786f64f7126", "content_id": "649505e6c6c378f528f84a870a8cb2dbd3502471", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 2127, "license_type": "no_license", "max_line_length": 135, "num_lines": 61, "path": "/instagram.py", "repo_name": "harjeevs17/instagramMessageBot", "src_encoding": "UTF-8", "text": "from selenium import webdriver\nfrom time import sleep\nfrom selenium.webdriver.support import expected_conditions as EC\nfrom selenium.webdriver.common.by import By\nfrom selenium.webdriver.common.keys import Keys\n\ndriver = webdriver.Chrome()\ndriver.get('https://www.instagram.com/')\n\nsleep(3)\n\nusername = driver.find_element_by_xpath(\n '/html/body/div[1]/section/main/article/div[2]/div[1]/div/form/div[2]/div/label/input')\nusername.send_keys(\"\"\"Enter Email address\"\"\")\n\npassword = driver.find_element_by_xpath(\n '/html/body/div[1]/section/main/article/div[2]/div[1]/div/form/div[3]/div/label/input')\npassword.send_keys(\"\"\"Enter password\"\"\")\n\nsignin = driver.find_element_by_xpath(\n '/html/body/div[1]/section/main/article/div[2]/div[1]/div/form/div[4]/button')\nsignin.click()\nsleep(3)\nnotnow = driver.find_element_by_xpath(\n '/html/body/div[1]/section/main/div/div/div/div/button')\nnotnow.click()\nsleep(3)\nagainNotNow = driver.find_element_by_xpath(\n '/html/body/div[4]/div/div/div/div[3]/button[2]')\nagainNotNow.click()\nsleep(3)\nmessage = driver.find_element_by_xpath(\n '/html/body/div[1]/section/nav/div[2]/div/div/div[3]/div/div[2]/a')\nmessage.click()\nsleep(5)\n\ndirect = driver.find_element_by_xpath('/html/body/div[1]/section/div/div[2]/div/div/div[1]/div[1]/div/div[3]/button')\ndirect.click()\nsleep(3)\nsearch_user = driver.find_element_by_xpath('/html/body/div[4]/div/div/div[2]/div[1]/div/div[2]/input')\nsearch_user.send_keys('tannyable_')\nsleep(5)\n\n\nel=driver.find_elements_by_xpath(\"/html/body/div[4]/div/div/div[2]/div[2]/div[1]\")[0]\nel.click()\n\n#user = driver.find_elements_by_class_name('-qQT3')\n#user[0].click()\n#action = webdriver.common.action_chains.ActionChains(driver)\n#action.move_to_element_with_offset(el, 225, 179)\n#action.click()\n\nsleep(2)\nselectUser = driver.find_element_by_xpath('/html/body/div[4]/div/div/div[1]/div/div[2]')\nselectUser.click()\nsleep(5)\ntextbox = driver.find_element_by_xpath('/html/body/div[1]/section/div/div[2]/div/div/div[2]/div[2]/div/div[2]/div/div/div[2]/textarea')\nfor i in range(1000):\n textbox.send_keys('Kya chal la !!!!!')\n textbox.send_keys(Keys.ENTER)\n" } ]
1
shubhi13/Python_Scripts
https://github.com/shubhi13/Python_Scripts
c2a82a550fa26cc8a578b1e1263689c29b87facc
9bd8fd402c61f24355a4d6b0d0815ad913becac3
1967af25a4e18b053682b2dbe8116f4bd38a0479
refs/heads/master
2023-08-22T00:45:21.423803
2021-10-29T06:54:16
2021-10-29T06:54:16
422,703,282
0
0
Unlicense
2021-10-29T20:20:32
2021-10-29T20:20:33
2021-10-29T20:40:02
null
[ { "alpha_fraction": 0.6343779563903809, "alphanum_fraction": 0.6419752836227417, "avg_line_length": 33, "blob_id": "7046436b4e6e24871a06cee21637c28c63379bfe", "content_id": "dce185480812aecbeb826ad4f592c9b4308a13e9", "detected_licenses": [ "Unlicense" ], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 1053, "license_type": "permissive", "max_line_length": 98, "num_lines": 31, "path": "/Automation/Spam Bot/src/Modules/RageSpam.py", "repo_name": "shubhi13/Python_Scripts", "src_encoding": "UTF-8", "text": "from Modules.Colours import *\nfrom Modules.Spammer import *\nfrom time import sleep\nimport random\nimport string\n\ndef message():\n chars = string.ascii_lowercase + string.ascii_uppercase + string.digits\n num = random.randint(5,10)\n spam = \"\"\n for x in range(num):\n spam = spam + \"\".join(random.choice(chars))\n return(spam)\n\ndef rage():\n cyan(\"\\n-----RAGE SPAM-----\")\n print(\"Rage spam is just a combination of random letters and numbers that make no sense.\",\n \"\\nUsed primarily only for the purpose of absolute spam.\\n\")\n try:\n count = int(input(\"Enter the number of spam messages you want to send \\n> \"))\n sleep = float(input(\"Enter time delay(in seconds) between each message \\n> \"))\n except:\n red(\"ERROR : Enter Only Numbers\")\n grey(\"Press enter to exit \")\n input()\n sys.exit(0)\n print(\"Open Your Social Media Platform and select your text box. Wait for atleast 15 seconds\")\n time.sleep(15)\n for x in range(count):\n msg = message()\n spammer(msg,sleep)" }, { "alpha_fraction": 0.49514561891555786, "alphanum_fraction": 0.6990291476249695, "avg_line_length": 13.714285850524902, "blob_id": "0b97db934c5419d088f0add8e773861bd54d1a2f", "content_id": "3519de2dee8d2bc6120286ad91baa7cf45616560", "detected_licenses": [ "Unlicense" ], "is_generated": false, "is_vendor": false, "language": "Text", "length_bytes": 103, "license_type": "permissive", "max_line_length": 17, "num_lines": 7, "path": "/Machine Learning/Manual-Parameter-Tuner/requirements.txt", "repo_name": "shubhi13/Python_Scripts", "src_encoding": "UTF-8", "text": "streamlit==0.71.0\nnumpy==1.18.5\npandas==1.1.4\nsklearn\nmatplotlib==3.3.2\nseaborn==0.11.0\nxgboost==1.3.1\n" }, { "alpha_fraction": 0.7373737096786499, "alphanum_fraction": 0.7373737096786499, "avg_line_length": 18.200000762939453, "blob_id": "f4ae307698414f194736086c358367e191b6e1d7", "content_id": "d3b394544c610910c48cb2572b4133d346fa4822", "detected_licenses": [ "Unlicense" ], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 99, "license_type": "permissive", "max_line_length": 27, "num_lines": 5, "path": "/Basic Scripts/request.py", "repo_name": "shubhi13/Python_Scripts", "src_encoding": "UTF-8", "text": "import requests\r\n\r\nresponse = requests.get(\"\")\r\nprint(response.json())\r\nprint(response.status_code)" }, { "alpha_fraction": 0.7154695987701416, "alphanum_fraction": 0.7292817831039429, "avg_line_length": 29.16666603088379, "blob_id": "c84bbf93226780eaa5e5181b3ae1079350b979cd", "content_id": "b0f8fe939a35f10e77971fc299006d8cf4f8d506", "detected_licenses": [ "Unlicense" ], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 362, "license_type": "permissive", "max_line_length": 57, "num_lines": 12, "path": "/Automation/QrScanner/QrScanner.py", "repo_name": "shubhi13/Python_Scripts", "src_encoding": "UTF-8", "text": "import cv2 ##import the module opencv\nimport numpy as np ##import the module numpy\nimport pyzbar.pyzbar as pyzbar ##import the module pyzbar\n\nimg = cv2.imread(\"QR.png\") ##read your image \ndecode_QR = pyzbar.decode(img) ##function to decode QR\n\nfor i in decode_QR:\n\tprint(\"your scanned QR :\",i.data)\n\ncv2.imshow(\"QR \", img) ##display Cropped image\ncv2.waitKey(0)\n" }, { "alpha_fraction": 0.5752754211425781, "alphanum_fraction": 0.5875152945518494, "avg_line_length": 34.5217399597168, "blob_id": "4c4a110aa3752ef7ebb884b2dad9508ef1438fe3", "content_id": "9abb8a0ce4b1728d43f455ab1e8dbb4cc98dfeb0", "detected_licenses": [ "Unlicense" ], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 817, "license_type": "permissive", "max_line_length": 104, "num_lines": 23, "path": "/Algorithms/sieve_of_eratosthenes.py", "repo_name": "shubhi13/Python_Scripts", "src_encoding": "UTF-8", "text": "n = int(input())\n\nsieve = [True] * n\n\n# inspect until i is equal to sqrt(n) because the greatest divider of n is less than or equal to sqrt(n)\nm = int(n ** 0.5)\nfor i in range(2, m + 1):\n if sieve[i] == True: # if i is a prime number,\n for j in range(i+i, n, i): # let all multiples of i to false\n sieve[j] = False\n\nprime_number_list = [i for i in range(2, n) if sieve[i] == True]n = int(input())\n\nsieve = [True] * n\n\n# inspect until i is equal to sqrt(n) because the greatest divider of n is less than or equal to sqrt(n)\nm = int(n ** 0.5)\nfor i in range(2, m + 1):\n if sieve[i] == True: # if i is a prime number,\n for j in range(i+i, n, i): # let all multiples of i to false\n sieve[j] = False\n\nprime_number_list = [i for i in range(2, n) if sieve[i] == True]\n" }, { "alpha_fraction": 0.6483705043792725, "alphanum_fraction": 0.6535162925720215, "avg_line_length": 27.049999237060547, "blob_id": "82e2b1ec6d8198c07c82da5625986443733fefc2", "content_id": "0cb757e8f01d4e5fb5983a45da2c7eee582368a5", "detected_licenses": [ "Unlicense" ], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 583, "license_type": "permissive", "max_line_length": 73, "num_lines": 20, "path": "/Basic Scripts/password-generator.py", "repo_name": "shubhi13/Python_Scripts", "src_encoding": "UTF-8", "text": "import string\r\nfrom random import *\r\nfrom os import sys\r\n\r\ndef password_create():\r\n characters= string.ascii_letters + string.punctuation + string.digits\r\n password=\"\".join(choice(characters) for x in range(randint(8,16)))\r\n print(\"Your Password is: {}\".format(password))\r\n\r\n\r\nprint(\"Password Generator\")\r\npassword_create()\r\nchange=input(\"Do you want another password? Press 'Y' or 'N': \")\r\nprint('\\n')\r\nwhile change=='Y' or change=='y':\r\n password_create()\r\n change=input(\"Do you want another password? Press 'Y' or 'N': \")\r\n print('\\n')\r\nelse:\r\n sys.exit()\r\n\r\n" }, { "alpha_fraction": 0.4261763095855713, "alphanum_fraction": 0.43699297308921814, "avg_line_length": 30.3389835357666, "blob_id": "f28688a69c6082e332e572d59a1b9e47d90b1034", "content_id": "0a896522c217504b17dbdbf94341cee410a2d576", "detected_licenses": [ "Unlicense" ], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 1849, "license_type": "permissive", "max_line_length": 79, "num_lines": 59, "path": "/Data Structures/stack.py", "repo_name": "shubhi13/Python_Scripts", "src_encoding": "UTF-8", "text": "from os import sys\nclass Stack(object):\n def __init__(self):\n self.array=[]\n self.reverse=[]\n self.top=0\n\n def push(self,item):\n self.array.append(item)\n self.top+=1\n \n\n def pop(self):\n self.top-=1\n self.array.pop()\n \n\n def isEmpty(self):\n return self.array==[]\n\n def menu(self):\n char=0\n while char<6:\n print(\"Press 1 -> To add a element in the Stack\")\n print(\"Press 2 -> To remove a element from the Stack\")\n print(\"Press 3 -> To veiw the top element of the Stack\")\n print(\"Press 4 -> To display all the elements of the Stack\")\n print(\"Press 5 -> To Exit\")\n print('\\n')\n char=int(input(\"Enter your choice: \"))\n print('\\n')\n if char==1:\n val=int(input(\"Enter the value you want to add: \"))\n self.push(val)\n print(\"Your item {} has been added.\".format(val))\n print('\\n')\n elif char==2:\n if self.isEmpty():\n print(\"Stack underflowed! Please add items into the stack\")\n print('\\n')\n break\n else:\n self.pop()\n elif char==3:\n print(\"The top item is -> {}\".format(self.array[self.top-1]))\n print('\\n')\n elif char==4:\n print(\"The Stack: \")\n self.reverse=self.array[::-1]\n for i in range (0,len(self.reverse)):\n if i == 0:\n print(\"{} <- Top Item\".format(self.reverse[i]))\n else:\n print(self.reverse[i])\n else:\n sys.exit()\n \nObject1=Stack()\nObject1.menu()\n" }, { "alpha_fraction": 0.4249422550201416, "alphanum_fraction": 0.4411085546016693, "avg_line_length": 19.571428298950195, "blob_id": "079112fe9d54c71778194a7f382b71c0f1809f67", "content_id": "231dc9dc9129bd1c4c0425b8c0a5b01a5edb19de", "detected_licenses": [ "Unlicense" ], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 433, "license_type": "permissive", "max_line_length": 52, "num_lines": 21, "path": "/Games/tower_of_hanoi.py", "repo_name": "shubhi13/Python_Scripts", "src_encoding": "UTF-8", "text": "def toh(n,s,t,d):\n if n==1:\n print(s,'-->',d)\n return\n toh(n-1,s,d,t)\n print(s,'-->',d)\n toh(n-1,t,s,d)\n\nif __name__==\"__main__\":\n while 1:\n\n n = int(input('''Enter number of disks:'''))\n\n if n<0:\n print(\"Try Again with a valid input\")\n continue\n elif n==0:\n break\n toh(n,'Source','Temporary','Destination')\n\n print('ENTER 0 TO EXIT')\n\n" }, { "alpha_fraction": 0.6741071343421936, "alphanum_fraction": 0.6741071343421936, "avg_line_length": 31.14285659790039, "blob_id": "4716bf99934dc2391ddd4c4d2efb16f335524a04", "content_id": "23cd0a3dc16b2607fce1df94607d1701f55a31f8", "detected_licenses": [ "Unlicense" ], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 224, "license_type": "permissive", "max_line_length": 41, "num_lines": 7, "path": "/Basic Scripts/swap.py", "repo_name": "shubhi13/Python_Scripts", "src_encoding": "UTF-8", "text": "x=int(input(\"Enter first number:\\n\"))\ny=int(input(\"Enter second number:\\n\"))\nprint(\"Before swapping:\\n\",x,\"\\n\",y,\"\\n\")\n#Inputting two numbers from user\nx,y=y,x\n#Swapping two variables\nprint(\"After swapping:\\n\",x,\"\\n\",y,\"\\n\")" }, { "alpha_fraction": 0.6104651093482971, "alphanum_fraction": 0.6395348906517029, "avg_line_length": 43.86666488647461, "blob_id": "73c40f3fe0a24db61a26dd4a106d2424bbe38f1c", "content_id": "6410da01bdee2f763f359ad1a8035df6ab605a3c", "detected_licenses": [ "Unlicense" ], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 688, "license_type": "permissive", "max_line_length": 108, "num_lines": 15, "path": "/Automation/spiro-text.py", "repo_name": "shubhi13/Python_Scripts", "src_encoding": "UTF-8", "text": "import turtle\r\n\r\nmake = turtle.Pen()\r\nturtle.bgcolor('mistyrose') #You can change the bg to your fav color\r\ncolors = [\"red\", \"orange\", \"yellow\", \"green\", \"blue\", \"purple\", \"gray\", \"brown\", \"aqua\", \"sea green\"]\r\nyour_name = turtle.textinput(\"Waiting for your Input :) \", \"Enter Something or it will return nothing :D\")\r\nsides = int(turtle.numinput(\"Add your Color Sides\", \"How Many Color Sides Do You Want (1-10)\", 5, 1, 10))\r\n\r\nfor x in range(100):\r\n make.pencolor(colors[x%sides%10])\r\n make.penup()\r\n make.forward(x*4)\r\n make.pendown()\r\n make.write(your_name, font=(\"Times\", int( (x*2 + 4) /4), \"bold\")) #You can add your custom fonts here..!\r\n make.left(360/sides+2)\r\n" }, { "alpha_fraction": 0.640406608581543, "alphanum_fraction": 0.6467598676681519, "avg_line_length": 38.400001525878906, "blob_id": "a1955b8dbc0dfb12fa9ea709d41bba364da3b297", "content_id": "d4c0e0dffcc1548eed9cf47f396d3018abba309e", "detected_licenses": [ "Unlicense" ], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 787, "license_type": "permissive", "max_line_length": 98, "num_lines": 20, "path": "/Automation/Spam Bot/src/Modules/Static.py", "repo_name": "shubhi13/Python_Scripts", "src_encoding": "UTF-8", "text": "from Modules.Spammer import *\nfrom Modules.Colours import *\nfrom time import sleep\n\ndef static():\n cyan(\"\\n-----FIXED MESSAGE SPAM-----\")\n print(\"This is the most iconic, yet basic spamming method. Spams a fixed string n times\\n\")\n message = input(\"Enter the String you want to spam \\n> \")\n try:\n count = int(input(\"Enter the number of times you want to spam the message \\n> \"))\n sleep = float(input(\"Enter time delay(in seconds) between each message \\n> \"))\n except:\n red(\"ERROR : Enter Only Numbers\")\n grey(\"Press enter to exit \")\n input()\n sys.exit(0)\n print(\"Open Your Social Media Platform and select your text box. Wait for atleast 15 seconds\")\n time.sleep(15)\n for i in range(count):\n spammer(message,sleep)" }, { "alpha_fraction": 0.41407090425491333, "alphanum_fraction": 0.4760625958442688, "avg_line_length": 37.562129974365234, "blob_id": "2a79108a0cebf17ee5c59d9669dc3b705194fd4f", "content_id": "6085c55520f6340eb6bf31450e80607f972ddec7", "detected_licenses": [ "Unlicense" ], "is_generated": false, "is_vendor": false, "language": "Markdown", "length_bytes": 13034, "license_type": "permissive", "max_line_length": 361, "num_lines": 338, "path": "/README.md", "repo_name": "shubhi13/Python_Scripts", "src_encoding": "UTF-8", "text": "# Python_Scripts\n----------------------------------\nIt contains all the Python Programs and Scripts, whether it's a GUI, basic, Data Structures, or even Machine Learning Algorithms etc. It's a collection of some great **Python** scripts from basic to advance levels for automating some monotonous tasks.This project contains 5 folders:\n\n- Algorithms\n- Data Structures\n- Automation programs\n- Basic Scripts\n- GUI & Bot\n- Machine Learning\n \nThis project is for newbies and rookies; who are new to the idea of Open Source contributions but want to be an Open Source Contributor, to advance coder; who have prolific knowledge in __Open Source Contribution__. If your PR gets merged for [**Hacktoberfest-21**](https://hacktoberfest.digitalocean.com/), your PR will get \"**hacktoberfest-accepeted**\" label.\n\n# How to Contribute:\n--------------------------------\nAll contributions are welcomed. Follow these __Guidelines__:\n- Step 1: Fork this Repo. (This will create a separate clonned repo of this one for you)\n- Step 2: Make the Changes. Keep in mind the changes should be according to the contributing guidelines.\n- Step 3: Commit the changes.\n- Step 4: Click on Pull Request.\n- Step 5: If PR gets merged, edit the Readme.md file and add yourself in the **Contributors' Wall** section.\n- For more detailed steps for __How to Contribute__ or __How to Sync__, follow [__CONTRIBUTING.md__](https://github.com/DeepNinja07x/Python_Scripts/blob/master/CONTRIBUTING.md) file.\n#### Voila..... You just made your first Pull Request.\n\n# Contributors Wall:\nAll *Contributions* of the *Contributors* can be found in the [__Contributions__](https://github.com/DeepNinja07x/Python_Scripts/graphs/contributors) section.\n<table>\n <tr>\n <td align=\"center\">\n <a href=\"https://github.com/DeepNinja07x\">\n <img src=\"https://avatars0.githubusercontent.com/u/52314477?s=400&u=1887ecc3afa1e867af50336a3af7ed56b21dc604&v=4\" width=\"100px;\" alt=\"DeepNinja07x\"/><br />\n <sub>\n <b>\n <strong>Deepraj Rakshit</strong>\n </b>\n </sub>\n </a>\n </td>\n <td align=\"center\">\n <a href=\"https://github.com/subhayu99\">\n <img src=\"https://avatars3.githubusercontent.com/u/38143013?s=400&u=28405ea45018cee30268bd61408515033741e87e&v=4\" width=\"100px;\" alt=\"subhayu99\"/><br />\n <sub>\n <b>\n <strong>Subhayu Kumar Bala</strong>\n </b>\n </sub>\n </a>\n </td>\n <td align=\"center\">\n <a href=\"https://github.com/Alekhya003\">\n <img src=\"https://avatars2.githubusercontent.com/u/69395178?s=400&u=c33cc751d7e9bc66730e91e4a901ee9ba2e01a0b&v=4\" width=\"100px;\" alt=\"Alekhya003\"/><br />\n <sub>\n <b>\n <strong>Alekhya Roy</strong>\n </b>\n </sub>\n </a>\n </td>\n <td align=\"center\">\n <a href=\"https://github.com/abirbhattacharya82\">\n <img src=\"https://avatars3.githubusercontent.com/u/70687014?s=400&u=896c00dc4e1927f41364a56d38d6c91be133f387&v=4\" width=\"100px;\" alt=\"abirbhattacharya82\"/><br />\n <sub>\n <b>\n <strong>Abir Bhattacharya</strong>\n </b>\n </sub>\n </a>\n </td>\n <td align=\"center\">\n <a href=\"https://github.com/utsavgadhiya\">\n <img src=\"https://avatars1.githubusercontent.com/u/44888423?s=400&u=32974fa39ea0d4be02d27896da2637ea4bbfb9f5&v=4\" width=\"100px;\" alt=\"utsavgadhiya\"/><br />\n <sub>\n <b>\n <strong>Utsav Gadhiya</strong>\n </b>\n </sub>\n </a>\n </td>\n <td align=\"center\">\n <a href=\"https://github.com/SVijayB\">\n <img src=\"https://avatars1.githubusercontent.com/u/54742586?s=400&u=73e90870560e3707468ca877afef6a74ca2bdd92&v=4\" width=\"100px;\" alt=\"SVijayB\"/><br />\n <sub>\n <b>\n <strong>Vijay</strong>\n </b>\n </sub>\n </a>\n </td>\n </tr>\n <tr>\n <td align=\"center\">\n <a href=\"https://github.com/sarthak1905\">\n <img src=\"https://avatars0.githubusercontent.com/u/61883822?s=400&v=4\" width=\"100px;\" alt=\"sarthaksaxena\"><br />\n <sub>\n <b>\n <strong>Sarthak Saxena</strong>\n </b>\n </sub>\n </a>\n </td>\n <td align=\"center\">\n <a href=\"https://github.com/Raghavarora27\">\n <img src=\"https://avatars2.githubusercontent.com/u/66276244?s=460&u=16746f7b8f2f8c3db7f803b25269078ef34d2e4e&v=4\" width=\"100px;\" alt=\"RaghavArora\"><br />\n <sub>\n <b>\n <strong>Raghav Arora</strong>\n </b>\n </sub>\n </a>\n </td>\n <td align=\"center\">\n <a href=\"https://github.com/TheFenrisLycaon\">\n <img src=\"https://avatars0.githubusercontent.com/u/54172306?s=460&u=b4834344142abbc0f0b742dd579cc9054c112d8c&v=4\" width=\"100px;\" alt=\"TheFenrisLycaon\"><br />\n <sub>\n <b>\n <strong>Rishabh Anand</strong>\n </b>\n </sub>\n </a>\n </td>\n <td align=\"center\">\n <a href=\"https://github.com/someshnarwade\">\n <img src=\"https://avatars3.githubusercontent.com/u/37812370?s=400&u=a3e9ead47d15081bcb783a8e8fc02b70bfa4add8&v=4\" width=\"100px;\" alt=\"someshnarwade\"><br />\n <sub>\n <b>\n <strong>Somesh Narwade</strong>\n </b>\n </sub>\n </a>\n </td> \n <td align=\"center\">\n <a href=\"https://github.com/pastre\">\n <img src=\"https://avatars0.githubusercontent.com/u/6251198?s=400&u=aaa4f9c03f6527b760212ab2784b9be8a2ca3990&v=4\" width=\"100px;\" alt=\"pastre\"><br />\n <sub>\n <b>\n <strong>Bruno Pastre</strong>\n </b>\n </sub>\n </a>\n </td>\n <td align=\"center\">\n <a href=\"https://github.com/MasterMeet\">\n <img src=\"https://avatars2.githubusercontent.com/u/58728390?s=400&v=4\" width=\"100px;\" alt=\"MasterMeet\"><br />\n <sub>\n <b>\n <strong>MasterMeet</strong>\n </b>\n </sub>\n </a>\n </td>\n </tr>\n <tr>\n <td align=\"center\">\n <a href=\"https://github.com/pedrosv20\">\n <img src=\"https://avatars1.githubusercontent.com/u/14371245?s=400&u=16b13f4b1ee6692260713a859af340eb1fc05518&v=4\" width=\"100px;\" alt=\"pedrosv20\"><br />\n <sub>\n <b>\n <strong>Pedro Vargas</strong>\n </b>\n </sub>\n </a>\n </td>\n <td align=\"center\">\n <a href=\"https://github.com/amaanalam0707\">\n <img src=\"https://avatars.githubusercontent.com/u/69518300?v=4\" width=\"100px;\" alt=\"amaanalam0707\"><br />\n <sub>\n <b>\n <strong>Amaan Alam</strong>\n </b>\n </sub>\n </a>\n </td>\n <td align=\"center\">\n <a href=\"https://github.com/Shuchi2211\">\n <img src=\"https://avatars.githubusercontent.com/u/69510684?v=4\" width=\"100px;\" alt=\"Shuchi2211\"><br />\n <sub>\n <b>\n <strong>Shuchismita Mukhopadhyay</strong>\n </b>\n </sub>\n </a>\n </td>\n <td align=\"center\">\n <a href=\"https://github.com/vidhichadha2507\">\n <img src=\"https://avatars.githubusercontent.com/u/74606188?v=4\" width=\"100px;\" alt=\"vidhichadha2507\"><br />\n <sub>\n <b>\n <strong>Vidhi Chadha</strong>\n </b>\n </sub>\n </a>\n </td>\n <td align=\"center\">\n <a href=\"https://github.com/chandankumar1307\">\n <img src=\"https://avatars.githubusercontent.com/u/70543351?v=4\" width=\"100px;\" alt=\"chandankumar1307\"><br />\n <sub>\n <b>\n <strong>Chandan Kumar Saha</strong>\n </b>\n </sub>\n </a>\n </td>\n <td align=\"center\">\n <a href=\"https://github.com/ervaibhavkumar\">\n <img src=\"https://avatars.githubusercontent.com/u/28685411?v=4\" width=\"100px;\" alt=\"ervaibhavkumar\"><br />\n <sub>\n <b>\n <strong>Vaibhav</strong>\n </b>\n </sub>\n </a>\n </td>\n </tr>\n <tr>\n <td align=\"center\">\n <a href=\"https://github.com/rohansharma4050\">\n <img src=\"https://avatars.githubusercontent.com/u/69635604?v=4\" width=\"100px;\" alt=\"rohansharma4050\"><br />\n <sub>\n <b>\n <strong>Rohan Sharma</strong>\n </b>\n </sub>\n </a>\n </td>\n <td align=\"center\">\n <a href=\"https://github.com/priyanshusingh0610\">\n <img src=\"https://avatars.githubusercontent.com/u/62669697?v=4\" width=\"100px;\" alt=\"priyanshusingh0610\"><br />\n <sub>\n <b>\n <strong>Priyanshu Singh</strong>\n </b>\n </sub>\n </a>\n </td>\n <td align=\"center\">\n <a href=\"https://github.com/ABHIGPT401\">\n <img src=\"https://avatars.githubusercontent.com/u/90904360?v=4\" width=\"100px;\" alt=\"ABHIGPT401\"><br />\n <sub>\n <b>\n <strong>Abhishek Gupta</strong>\n </b>\n </sub>\n </a>\n </td>\n <td align=\"center\">\n <a href=\"https://github.com/Spyy004\">\n <img src=\"https://avatars.githubusercontent.com/u/54628130?v=4\" width=\"100px;\" alt=\"Spyy004\"><br />\n <sub>\n <b>\n <strong>Ayush Pawar</strong>\n </b>\n </sub>\n </a>\n </td>\n <td align=\"center\">\n <a href=\"https://github.com/OfficialNMN\">\n <img src=\"https://avatars.githubusercontent.com/u/51831819?v=4\" width=\"100px;\" alt=\"OfficialNMN\"><br />\n <sub>\n <b>\n <strong>Namanjeet Singh</strong>\n </b>\n </sub>\n </a>\n </td>\n <td align=\"center\">\n <a href=\"https://github.com/prathimacode-hub\">\n <img src=\"https://avatars.githubusercontent.com/u/74645302?v=4\" width=\"100px;\" alt=\"prathimacode-hub\"><br />\n <sub>\n <b>\n <strong>Prathima Kadari</strong>\n </b>\n </sub>\n </a>\n </td>\n </tr>\n <tr>\n <td align=\"center\">\n <a href=\"https://github.com/rustzz-27/\">\n <img src=\"https://avatars.githubusercontent.com/u/53906186?v=4\" width=\"100px;\" alt=\"rustzz-27\"><br />\n <sub>\n <b>\n <strong>Reshu Agarwal</strong>\n </b>\n </sub>\n </a>\n </td> \n <td align=\"center\">\n <a href=\"https://github.com/IndraP24\">\n <img src=\"https://avatars.githubusercontent.com/u/64627762?v=4\" width=\"100px;\" alt=\"IndraP24\"><br />\n <sub>\n <b>\n <strong>Indrashis Paul</strong>\n </b>\n </sub>\n </a>\n </td> \n <td align=\"center\">\n <a href=\"https://github.com/ShreyaDayma-cse\">\n <img src=\"https://avatars.githubusercontent.com/u/90904747?v=4\" width=\"100px;\" alt=\"ShreyaDayma-cse\"><br />\n <sub>\n <b>\n <strong>Shreya Dayma</strong>\n </b>\n </sub>\n </a>\n </td> \n <td align=\"center\">\n <a href=\"https://github.com/sagarparmar881\">\n <img src=\"https://avatars.githubusercontent.com/u/47896458?v=4\" width=\"100px;\" alt=\"sagarparmar881\"><br />\n <sub>\n <b>\n <strong>Sagar Parmar</strong>\n </b>\n </sub>\n </a>\n </td>\n <td align=\"center\">\n <a href=\"https://github.com/coodos\">\n <img src=\"https://avatars.githubusercontent.com/u/69296233?v=4\" width=\"100px;\" alt=\"coodos\"><br />\n <sub>\n <b>\n <strong>Merul</strong>\n </b>\n </sub>\n </a>\n </td>\n <td align=\"center\">\n <a href=\"https://github.com/Shivang-Agarwal11\">\n <img src=\"https://avatars.githubusercontent.com/u/65328598?v=4\" width=\"100px;\" alt=\"Shivang-Agarwal11\"><br />\n <sub>\n <b>\n <strong>Shivang Agarwal</strong>\n </b>\n </sub>\n </a>\n </td>\n </tr>\n</table>\n" }, { "alpha_fraction": 0.7068741917610168, "alphanum_fraction": 0.7114137411117554, "avg_line_length": 19.03896141052246, "blob_id": "e66a77e4c69d8baab49f18ce6bb8665a4c4ae155", "content_id": "675b602691604ed3d9c1d1e2633ec6fcffd56e4c", "detected_licenses": [ "Unlicense" ], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 1542, "license_type": "permissive", "max_line_length": 69, "num_lines": 77, "path": "/Basic Scripts/web_scrapper.py", "repo_name": "shubhi13/Python_Scripts", "src_encoding": "UTF-8", "text": "#!/usr/bin/env python\n\n#Requirements\n #requests\n #bs4\n\n\nfrom bs4 import BeautifulSoup\nimport requests\nfrom selenium import webdriver\n\n\nurl= raw_input(\"enter url: \")\nsource=requests.get(url)\n\n\ndef get_chrome_web_driver(options):\n return webdriver.Chrome(\"./chromedriver\", chrome_options=options)\n\n\ndef get_web_driver_options():\n return webdriver.ChromeOptions()\n\n\ndef set_ignore_certificate_error(options):\n options.add_argument('--ignore-certificate-errors')\n\n\ndef set_browser_as_incognito(options):\n options.add_argument('--incognito')\n\nsoup=BeautifulSoup(source.text,'html')\n\ntitle=soup.find('title')\nprint(\"this is with html tags :\",title)\n\nqwery=soup.find('h1')\nprint(\"this is without html tags:\",qwery.text) \n\n\nlinks=soup.find('a')\nprint(links)\nprint(links['href']) \nprint(links['class'])\n\nmany_link=soup.find_all('a')\ntotal_links=len(many_link)\nprint(\"total links in my website :\",total_links)\nprint()\nfor i in many_link[:6]:\n print(i)\n\nsecond_link=many_link[1]\nprint(second_link)\nprint()\nprint(\"href is :\",second_link['href'])\n\nnested_div=second_link.find('div')\nprint(nested_div)\nprint()\nz=(nested_div['class'])\nprint(z)\nprint(type(z))\nprint()\nprint(\"class name of div is :\",\" \".join(nested_div['class'])) \n\nwiki=requests.get(\"https://en.wikipedia.org/wiki/World_War_II\")\nsoup=BeautifulSoup(wiki.text,'html')\nprint(soup.find('title'))\n\nww2_contents=soup.find_all(\"div\",class_='toc')\nfor i in ww2_contents:\n print(i.text)\n\noverview=soup.find_all('table',class_='infobox vevent')\nfor z in overview:\n print(z.text)" }, { "alpha_fraction": 0.5537848472595215, "alphanum_fraction": 0.587649405002594, "avg_line_length": 40.83333206176758, "blob_id": "b7caee925134eeb9d534f50bcf045af9aea72e0e", "content_id": "2b6216531f33d30f7031009217bf2735e14b0b63", "detected_licenses": [ "Unlicense" ], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 1004, "license_type": "permissive", "max_line_length": 105, "num_lines": 24, "path": "/Algorithms/ShellSort.py", "repo_name": "shubhi13/Python_Scripts", "src_encoding": "UTF-8", "text": "def shellSort(array: list):\n \"\"\"Returns the array printed using the shell sort algorithm\n \t>>> import random\n \t>>> unordered_list = [i for i in range(10)]\n \t>>> random.shuffle(unordered_list)\n \t>>> shellSort(unordered_list)\n \t[0, 1, 2, 3, 4, 5, 6, 7, 8, 9]\n \t\"\"\"\n array_size = len(array)\n # Break array in n/2, n/4, n/8 intervals\n interval = array_size // 2\n while interval > 0:\n for middle_position in range(interval, array_size):\n temp = array[middle_position]\n middle_position_number = middle_position\n while middle_position_number >= interval and array[middle_position_number - interval] > temp:\n array[middle_position_number] = array[middle_position_number - interval]\n middle_position_number -= interval\n array[middle_position_number] = temp\n interval = interval // 2\n print(array)\n\nif __name__ == \"__main__\":\n shellSort([9, 8, 3, 7, 5, 6, 4, 1, 9, 8, 3, 7, 5, 6, 4, 1])\n" }, { "alpha_fraction": 0.552342414855957, "alphanum_fraction": 0.6084442138671875, "avg_line_length": 25.44444465637207, "blob_id": "c93fdc3be3bde7db501b282d789c283e70d66357", "content_id": "582160b159043196e079f8eb1b89b2be618b1477", "detected_licenses": [ "Unlicense" ], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 1729, "license_type": "permissive", "max_line_length": 93, "num_lines": 63, "path": "/Automation/Invisibility Cloak.py", "repo_name": "shubhi13/Python_Scripts", "src_encoding": "UTF-8", "text": "import numpy as np\r\nimport cv2\r\nimport time\r\n\r\nprint(\"\"\"\r\nBE PREPARE YOU WILL BE INVISIBLE SOON............\r\n\"\"\")\r\n\r\nif __name__ == '__main__':\r\n cap = cv2.VideoCapture(0)\r\n \r\n#For capturing output video\r\n fourcc = cv2.VideoWriter_fourcc(*'XVID')\r\n out = cv2.VideoWriter('invisibleYou.avi' , fourcc, 20.0, (640,480))\r\n time.sleep(2)\r\n background = 0\r\n\r\n#capturing background\r\nfor i in range(30):\r\n ret, background = cap.read()\r\n\r\n #capturing image\r\nwhile(cap.isOpened()):\r\n ret, img = cap.read()\r\n \r\n if not ret:\r\n break\r\n#HSV stands for Hue Satrurated Value\r\n hsv=cv2.cvtColor(img, cv2.COLOR_BGR2HSV)\r\n\r\n#YOU CAN CHANGE THE COLOR VALUE BELOW ACCORDING TO YOUR CLOTH COLOR\r\n lower_red = np.array([0,120,70])\r\n upper_red = np.array([10,255,255])\r\n mask1 = cv2.inRange(hsv , lower_red , upper_red)\r\n \r\n lower_red = np.array([170,120,70])\r\n upper_red = np.array([180,255,255])\r\n mask2 = cv2.inRange(hsv , lower_red , upper_red)\r\n \r\n mask1 = mask1 + mask2\r\n\r\n#Open and clean the mask image\r\n mask1=cv2.morphologyEx(mask1, cv2.MORPH_OPEN ,np.ones((3,3) , np.uint8) , iterations=2)\r\n \r\n mask2=cv2.morphologyEx(mask1, cv2.MORPH_DILATE ,np.ones((3,3) , np.uint8) , iterations=1)\r\n \r\n mask2 = cv2.bitwise_not(mask1)\r\n\r\n#Generating the final output\r\n res1 = cv2.bitwise_and(background, background, mask=mask1)\r\n res2 = cv2.bitwise_and(img, img, mask=mask2)\r\n \r\n final_output = cv2.addWeighted(res1 , 1, res2 , 1, 0)\r\n \r\n cv2.imshow('Invisibility Game' , final_output)\r\n k=cv2.waitKey(10)\r\n if k==27:\r\n print(\"Escape hit, closing...\")\r\n break\r\n \r\ncap.release()\r\nout.release()\r\ncv2.destroyAllWindows()\r\n" }, { "alpha_fraction": 0.6774743795394897, "alphanum_fraction": 0.6808874011039734, "avg_line_length": 26.904762268066406, "blob_id": "ddff5de85c1ae7317ff6e8c16fb6b883808c10f0", "content_id": "8e2606bbd382861bd1e64a4a844cb05563ec752b", "detected_licenses": [ "Unlicense" ], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 586, "license_type": "permissive", "max_line_length": 74, "num_lines": 21, "path": "/GUI & Bot/covid-tracker.py", "repo_name": "shubhi13/Python_Scripts", "src_encoding": "UTF-8", "text": "from covid import Covid\nimport matplotlib.pyplot as plt\n\ncovid=Covid() #storing calling function of Covid\n\nname=input(\"Enter your country name: \")\n\nvirusdata=covid.get_status_by_country_name(name)\n\nremove=['id', 'country', 'latitude', 'longitude', 'last_update']\nfor i in remove:\n virusdata.pop(i)\n\nall_val = virusdata.pop('confirmed')\nids = list(virusdata.keys())\nvalue = [str(i) for i in virusdata.values()]\n\nplt.pie(value,labels=ids, colors = ['r','y', 'g', 'b'], autopct='%1.1f%%')\nplt.title(\"Country: \"+ name.upper() + \"\\nTotal Cases: \" + str(all_val))\nplt.legend()\nplt.show()\n" }, { "alpha_fraction": 0.6675191521644592, "alphanum_fraction": 0.6743392944335938, "avg_line_length": 39.482757568359375, "blob_id": "5b465e7d6970f7b7806f64c9787709fdc77f191b", "content_id": "384bede258c1f9f16111aada3a7b7700132ee89f", "detected_licenses": [ "Unlicense" ], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 1173, "license_type": "permissive", "max_line_length": 104, "num_lines": 29, "path": "/Basic Scripts/QuotesScraper.py", "repo_name": "shubhi13/Python_Scripts", "src_encoding": "UTF-8", "text": "from bs4 import BeautifulSoup\nimport requests\nimport json\nbase_url=\"https://www.goodreads.com/quotes/tag/{0}?page={1}\" # the url of the site from where quotes \n#will be scrapped emotion and page number will be inserted later\n\ndef process(content,emotion): # function to clean the content of the webpage \n soup=BeautifulSoup(content,features=\"html5lib\")\n quotes_div=soup.find_all(\"div\",attrs={\"class\",\"quote\"}) \n quotes=[]\n for div in quotes_div:\n q_text=div.find(\"div\",attrs={\"class\",\"quoteText\"})\n quote=(q_text.text.strip().split('\\n')[0])\n author=q_text.find(\"span\",attrs={\"class\",\"authorOrTitle\"}).text.strip()\n q_dict={\"quote\":quote,\"author\":author,\"emotion\":emotion}\n quotes.append(q_dict)\n return quotes\n\nemotions=['friend','sad'] # you can select any other emotion\nquotes=[]\nfor emotion in emotions:\n for index in range(1,5): # here 5 pages have been taken \n final_url=base_url.format(emotion,index)\n page=requests.get(final_url)\n content=page.text\n quotes+=process(content,emotion)\n\nwith open('quote.json','w') as file: # dump the quotes in json file\n json.dump(quotes,file)" }, { "alpha_fraction": 0.5965772867202759, "alphanum_fraction": 0.6080864667892456, "avg_line_length": 43.55356979370117, "blob_id": "a2816b03888e6ef4cc34055bd7c5628ebb0b5fb9", "content_id": "ff2e6bbc7622c75edfcae0630c77269422571e0d", "detected_licenses": [ "Unlicense" ], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 9992, "license_type": "permissive", "max_line_length": 169, "num_lines": 224, "path": "/Games/Drawing/gridModule.py", "repo_name": "shubhi13/Python_Scripts", "src_encoding": "UTF-8", "text": "# GRID MODULE BY TIM\n#This module contains 5 classes (grid, menu, pixelArt, pixel,colorPallet)\n#the grid class is the abstract/parent class, the pixelArt and menu class inherit from it.(they are childs)\n#The color pallet class inherits from pixel art class as they both use similar methods.\n#The pixel art and colorPallet class creates pixel objects to\n#populate the grid, therfore they are dependant of pixel.\n#------------------------------------------------------\n#Class Descriptions are given above each class.\nimport pygame\npygame.init()\n\n#Main abstract class (parent)\n#This class is capable of creating a grid containing different rows and different columns, bases upon those arguments it\n#will automatically alter the pixel size. To display the grid simply call ____.drawGrid(). To find the item in the grid\n#that was clicked on call ____.clicked().\nclass grid(object):\n def __init__(self, win, width, height, cols, rows, showGrid=False, startx = 0, starty = 0, bg=(255,255,255)):\n self.width = width\n self.height = height\n self.cols = cols\n self.rows = rows\n self.bg = bg\n self.startx = startx\n self.starty = starty\n self.lineThick = 1\n self.showGrid = showGrid #If we should show the black outline\n self.isSelected = None\n self.grid = None\n\n self.screen = win\n pygame.display.update()\n \n def getGrid(self):\n return self.grid #Return the grid list\n\n def drawGrid(self, lineColor=(0,0,0)): #This will draw the lines to create the grid, this is done so by simply creating overlapping boxes\n x = self.startx\n y = self.starty\n \n for i in range(self.cols):\n y = self.starty + self.height\n if i > 0:\n x += (self.width / self.cols)\n for j in range(self.rows):\n y -= self.height / self.rows \n pygame.draw.rect(self.screen, (0,0,0),(x, y, self.width / self.cols, self.height/ self.rows), 1)\n\n def clicked(self, pos): #Return the position in the grid that user clicked on\n try:\n t = pos[0]\n w = pos[1]\n g1 = int((t - self.startx) / self.grid[0][0].w)\n g2 = int((w - self.starty) / self.grid[0][0].h)\n\n self.selected = self.grid[g1][g2]\n\n return self.grid[g1][g2]\n \n except IndexError: #If we run into an index error that means that the user did not click on a position in the grid\n return False\n\n def isSelected(self): #Return the currently selected object\n return self.selected\n\n\n#This is the concrete class used to draw pixels in a grid\n#The draw grid function in this class uses polymorphism to create a grid\n#full of pixel objects. It still contains the methods from the aboce class\n#has its own specific clearGrid(). Using ____.clearGrid() will simply set the color\n#to the original background color.\nclass pixelArt(grid): \n def drawGrid(self):\n self.grid = []\n # Create pixels in the grid\n for i in range(self.cols):\n self.grid.append([])\n for j in range(self.rows):\n self.grid[i].append(pixel(i, j, self.width, self.height, self.cols, self.rows, self.startx, self.starty, self.showGrid))\n self.grid[i][j].show(self.screen, (255,255,255), self.lineThick)\n if self.showGrid:\n self.grid[i][j].show(self.screen, (0,0,0), 1,False,True)\n \n #This generates the neighbours of each pixel so that we can draw multiple thickness of lines\n for c in range(self.cols):\n for r in range(self.rows):\n self.grid[c][r].getNeighbors(self.grid)\n \n self.selected = self.grid[self.cols - 1][self.rows - 1]\n\n \n def clearGrid(self): #This will set all of the pixels to the same color as the background color\n for pixels in self.grid:\n for p in pixels:\n if self.showGrid: #If the grid is to be showing we must redraw the pixels so that we can see the grid after we change their color\n p.show(self.screen, self.bg, 0)\n p.show(self.screen, (0,0,0), 1)\n else:\n p.show(self.screen, self.bg, 0)\n\n\n#This class is responsible for creating the color pallet in the bottom left hand side of the screen\n#and is a concrete class. The setColor() method simply takes a list of colors and assigns them to pixels\n#in the grid. This can only be called after the grid has been created.\nclass colorPallet(pixelArt):\n def setColor(self, colorList): #The colorList argument passed to the function must be equal to the number of pixels in the grid\n colourCount = 0\n\n for pixels in self.getGrid():\n for p in pixels:\n p.show(self.screen, colorList[colourCount],0)\n colourCount += 1\n\n\n#This class creates basic grid menus that can contain text.\n#It uses all of the methods from the parent grid class and is a concrete class\n#The setText method takes a list of strings and displays them in the grid.\nclass menu(grid):\n def setText(self, textList): #The textList argument passed must be equal to the number of spots in the grid\n \n self.grid = []\n # Create textObjects in the grid\n for i in range(self.cols):\n self.grid.append([])\n for j in range(self.rows):\n self.grid[i].append(textObject(i, j, self.width, self.height, self.cols, self.rows, self.startx, self.starty))\n #Set the text for each of those objects\n c = 0\n for spots in self.getGrid():\n for s in spots:\n s.showText(self.screen, textList[c])\n c += 1\n\n \n#This class is responsible for displaying text and these objects are added into the grid. \n#The showText() method will display the text while the show() method will draw a square showing thr grid.\nclass textObject():\n def __init__(self, i, j, width, height, cols, rows, startx=0, starty=0):\n self.col = i #The column of the current instance in the grid\n self.row = j #The row of the current instance in the grid\n self.rows = rows #Total amount of rows\n self.cols = cols #Total amount of columns\n self.w = width / cols\n self.h = height / rows\n self.x = self.col * self.w + startx\n self.y = self.row * self.h + starty\n self.text = ''\n \n def showText(self, win, txt): #This will render and draw the text on the screen\n self.text = txt\n myFont = pygame.font.SysFont('comicsansms', 15)\n text = myFont.render(self.text, 1, (0,0,0))\n win.blit(text, (self.x + (self.w /2 - text.get_width() / 2), self.y + (self.h/2 - text.get_height() / 2))) #This will make sure the text is center in the screen.\n\n def show(self, screen, color, st, outline=False): #Draws a square displaying the area in the grid \n pygame.draw.rect(screen, color, (self.x, self.y, self.w, self.h), st)\n\n\n#This pixel object is responsible for stroing a color and displaying it to the screen. These objects are added into the grid.\n#The methods are named according to what they do.\nclass pixel():\n def __init__(self, i,j, width, height, cols, rows, startx=0, starty=0, showGrid=False):\n self.col = i #The column of the current instance\n self.row = j #The row of the current instance\n self.color = (255,255,255)\n self.rows = rows #Amount of rows in whole grid\n self.cols = cols #Amount of cols in whole grid\n self.showGrid = showGrid\n self.w = width / cols\n self.h = height / rows\n self.x = self.col * self.w + startx\n self.y = self.row * self.h + starty\n self.neighbors = [] \n \n def show(self, screen, color, st, outline=False, first=False): #Display the current pixel\n if not(first):\n self.color = color\n \n pygame.draw.rect(screen, color, (self.x, self.y, self.w, self.h), st)\n if self.showGrid and not(outline):\n pygame.draw.rect(screen, (0,0,0), (self.x, self.y, self.w, self.h), 1)\n \n def getPos(self):\n return (self.col * self.w, self.row * self.h)#Return a tuple (x,y) of the top left co-ords of the pixel\n\n def click(self, screen, color): #If the pixel has been clicked on call this and it will display the new color and set the color attribute for that pixel\n self.show(screen, color, 0)\n self.color = color\n\n def getColor(self):\n return self.color\n\n def getNeighbors(self, grid):\n # Get the neighbours of each pixel in the grid, this is used for drawing thicker lines\n i = self.col #the var i is responsible for denoting the current col value in the grid\n j = self.row #the var j is responsible for denoting the current row value in the grid\n rows = self.rows\n cols = self.cols\n\n #Horizontal and vertical neighbors\n if i < cols-1: #Right\n self.neighbors.append(grid[i + 1][j])\n if i > 0: #Left\n self.neighbors.append(grid[i - 1][j])\n if j < rows-1: #Up\n self.neighbors.append(grid[i][j + 1])\n if j > 0 : #Down\n self.neighbors.append(grid[i][j - 1])\n\n \n #Diagonal neighbors \n if j > 0 and i > 0: #Top Left\n self.neighbors.append(grid[i - 1][j - 1])\n\n if j + 1 < rows and i > -1 and i - 1 > 0: #Bottom Left\n self.neighbors.append(grid[i - 1][j + 1])\n\n if j - 1 < rows and i < cols - 1 and j - 1 > 0: #Top Right\n self.neighbors.append(grid[i + 1][j - 1])\n \n if j < rows - 1 and i < cols - 1: #Bottom Right\n self.neighbors.append(grid[i + 1][j + 1])\n \n def neighborsReturn(self):\n return self.neighbors #Return a list of the neighbours of the current pixel\n\n \n\n\n" }, { "alpha_fraction": 0.466855525970459, "alphanum_fraction": 0.4753541052341461, "avg_line_length": 26.153846740722656, "blob_id": "01a969d3c134fb4d89c070c59aab572434c568a9", "content_id": "51ede60c9143fb86c047075522f35edc9fd1df60", "detected_licenses": [ "Unlicense" ], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 1765, "license_type": "permissive", "max_line_length": 61, "num_lines": 65, "path": "/Basic Scripts/steganographer.py", "repo_name": "shubhi13/Python_Scripts", "src_encoding": "UTF-8", "text": "def read_data():\n with open('image.jpg', 'rb') as f:\n content = f.read()\n offset = content.index(bytes.fromhex('FFD9'))+2\n f.seek(offset)\n data = f.read().decode('ascii')\n return data\n\ndef insert_data():\n with open('image.jpg', 'ab') as f:\n print('Choose an option\\n \\\n 1. Encode a file\\n \\\n 2. Encode a text')\n ch = int(input('Enter choice : '))\n if ch == 1:\n file_to_encode = input('Enter file to encode : ')\n with open(file_to_encode, 'rb') as fe:\n data = fe.read()\n f.write(data)\n if ch == 2:\n data = input('Enter data to add : ')\n data += ' '\n f.write(data.encode('ascii'))\n\ndef delete_data():\n with open('image.jpg', 'rb') as f: \n content = f.read()\n offset = content.index(bytes.fromhex('FFD9'))+2\n f.seek(0)\n original_data = f.read(offset)\n \n with open('image.jpg', 'wb') as f:\n f.write(original_data)\n print('Past data has been deleted')\n\n\n\n\n\nprint('Choices available\\n \\\n 1. Input data into image\\n \\\n 2. Read data from image\\n \\\n 3. Delete extra data')\n\nchoice = int(input('Enter choice : '))\n\nif choice == 1:\n past_data = read_data()\n if past_data != '':\n print(f'Past data exists : \\'{past_data}\\'')\n c = input('Delete past data [y/n] : ')\n if c == 'y':\n delete_data()\n insert_data()\n else:\n insert_data()\n else:\n insert_data()\n print('Inserted data : \\'', read_data(), '\\'')\n\nif choice == 2:\n print(read_data())\n\nif choice == 3:\n delete_data()\n" }, { "alpha_fraction": 0.5384615659713745, "alphanum_fraction": 0.5602564215660095, "avg_line_length": 27.925926208496094, "blob_id": "1c6e255d5ba1f2976d7da9b736dec3071f92af90", "content_id": "fb93bc4202929959132c88ddda82b1519907591f", "detected_licenses": [ "Unlicense" ], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 780, "license_type": "permissive", "max_line_length": 74, "num_lines": 27, "path": "/Data Strucrures/Middle traversing linked list.py", "repo_name": "shubhi13/Python_Scripts", "src_encoding": "UTF-8", "text": "class Node:\n def __init__(self, data):\n self.data = data \n self.next = None\nclass LinkedList: \n def __init__(self):\n self.start = None\n def push(self, value):\n new_node = Node(value)\n new_node.next = self.start\n self.start = new_node \n def printMiddle(self):\n slow_ptr = self.start\n fast_ptr = self.start\n if self.start is not None:\n while (fast_ptr is not None and fast_ptr.next is not None):\n fast_ptr = fast_ptr.next.next\n slow_ptr = slow_ptr.next\n print(\"The middle element is: \", slow_ptr.data)\nlist1 = LinkedList()\nlist1.push(5)\nlist1.push(9) \nlist1.push(12)\nlist1.push(20) \nlist1.push(3) \nlist1.push(14) \nlist1.printMiddle()" }, { "alpha_fraction": 0.6245421171188354, "alphanum_fraction": 0.6739926934242249, "avg_line_length": 26.299999237060547, "blob_id": "8f9baacce683b70c8b4afa05524905cff09830f9", "content_id": "4ef445c4f1bee6aa34bbdf870f4bf9f7449b33a5", "detected_licenses": [ "Unlicense" ], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 1092, "license_type": "permissive", "max_line_length": 105, "num_lines": 40, "path": "/GUI & Bot/GUI form.py", "repo_name": "shubhi13/Python_Scripts", "src_encoding": "UTF-8", "text": "from tkinter import *\n\ndef submit():\n getf=first.get()\n getl=last.get()\n geta=age.get()\n\n file=open('database.txt','a')\n file .write(getf+\",\",+getl+\",\"+str(geta)+\"\\n\")\n file.close()\n print(\"user registered\")\n\n entry_first.delete(0,END)\n entry_last.delete(0,END)\n entry_age.delete(0,END)\n \nwindow=Tk()\nwindow.title(\"Registration Form\")\nwindow.geometry(\"350x350\")\n\nl1=Label(window, text=\"Please Register Now\", bg=\"black\", fg=\"white\", font=\"times 12\")\nl1.pack()\n\nfirst=StringVar()\nlast=StringVar()\nage=IntVar()\n\nLabel(window, text=\"First Name: \",bg=\"black\", fg=\"white\").place(x=50,y=60)\nentry_first=Entry(window,textvariable=first)\nentry_first.place(x=150,y=60)\n\nLabel(window, text=\"Last Name: \",bg=\"black\", fg=\"white\").place(x=50,y=100)\nentry_last=Entry(window, textvariable=last)\nentry_last.place(x=150,y=120)\n\nLabel(window, text=\"Age: \",bg=\"black\", fg=\"white\").place(x=50,y=1800)\nentry_age=Entry(window, textvariable=age)\nentry_age.place(x=150,y=1800)\n\nButton(window, text=\"Submit\", bg=\"black\", fg=\"white\", font=\"times 12\", command=submit).place(x=100,y=240)\n" }, { "alpha_fraction": 0.6178266406059265, "alphanum_fraction": 0.6483516693115234, "avg_line_length": 34.65217208862305, "blob_id": "253a02f83edc7328b0b9357dd00a199947a1f4f5", "content_id": "21b3d6e989a7145cabe04400c4f9614af77a5987", "detected_licenses": [ "Unlicense" ], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 819, "license_type": "permissive", "max_line_length": 102, "num_lines": 23, "path": "/Automation/Spam Bot/src/Modules/RandNumbers.py", "repo_name": "shubhi13/Python_Scripts", "src_encoding": "UTF-8", "text": "from Modules.Spammer import *\nfrom Modules.Colours import *\nfrom time import sleep\nimport random\nimport sys\n\ndef randomnum():\n cyan(\"\\n-----LARGE NUMBERS SPAM-----\")\n print(\"This spamming method spams random numbers from 1 - 999999999 each as a seperate message\\n\")\n try:\n count = int(input(\"Enter the number of spam messages you want to send \\n> \"))\n sleep = float(input(\"Enter time delay(in seconds) between each message \\n> \"))\n except:\n red(\"ERROR : Enter Only Numbers\")\n grey(\"Press enter to exit \")\n input()\n sys.exit(0)\n print(\"Open Your Social Media Platform and select your text box. Wait for atleast 15 seconds\")\n time.sleep(15)\n for x in range(count):\n num = random.randint(1,999999999)\n term = str(num)\n spammer(term,sleep)" }, { "alpha_fraction": 0.40351414680480957, "alphanum_fraction": 0.4157906770706177, "avg_line_length": 48.18113327026367, "blob_id": "eb7346a341c97a3483880e5e5f30b06405ff7def", "content_id": "2cedd2eb83af12e0e916c7d067f46b58b3f934fd", "detected_licenses": [ "Unlicense" ], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 13033, "license_type": "permissive", "max_line_length": 133, "num_lines": 265, "path": "/Automation/mail-automation/mailing/views.py", "repo_name": "shubhi13/Python_Scripts", "src_encoding": "UTF-8", "text": "import smtplib\nfrom django.http import request\nfrom django.shortcuts import render,redirect\n# from django.http import HttpResponse\nfrom django.core.files.storage import FileSystemStorage\nfrom email.mime.multipart import MIMEMultipart\nfrom email.mime.text import MIMEText\nfrom email.mime.base import MIMEBase\nfrom validate_email import validate_email\nfrom email import encoders\nimport pandas as pd\n# from email.mime.image import MIMEImage\nfrom django.conf import settings\nimport os\nimport time\n\n# Create your views here.\nimage_src=\"\"\nerror=\"No Error\"\nmatter=\"\"\nvid=\"\"\nval=\"\"\nmail=\"\"\npswd=\"\"\ncsv_file_name=\"\"\ni=0\ncolnam=\"\"\nsubject=\"\"\nend=0\nfilename=''\nfilename1=''\ndef home(request):\n return render(request,'mailing/home.html')\ndef starting(request):\n global matter,val,pswd,mail,csv_file_name,colnam,i,subject,end,error,filename1,filename,vid,image_src\n val=request.POST['pdfs']\n mail=request.POST['mail']\n pswd=request.POST['pass']\n matter=request.POST['mailtext']\n vid=request.POST['video']\n directry=settings.MEDIA_ROOT\n files=os.listdir(directry)\n filtered_files=[file for file in files if file.endswith(\".csv\")]\n for file in filtered_files:\n\t path_to_file = os.path.join(directry, file)\n\t os.remove(path_to_file)\n filtered_files_pdf=[file for file in files if file.endswith(\".pdf\")]\n for file in filtered_files_pdf:\n\t path_to_file_pdf = os.path.join(directry, file)\n\t os.remove(path_to_file_pdf)\n filtered_files_mp4=[file for file in files if file.endswith(\".mp4\")]\n for file in filtered_files_mp4:\n\t path_to_file_pdf = os.path.join(directry, file)\n\t os.remove(path_to_file_pdf)\n i=int(request.POST['counting'])\n subject=request.POST['subject']\n end=int(request.POST['ending'])\n print(pswd,mail,end)\n if(request.method==\"POST\"):\n csv_file=request.FILES['csv']\n csv_file_name=csv_file.name\n fs=FileSystemStorage()\n fs.save(csv_file.name,csv_file)\n data=pd.read_csv(settings.MEDIA_ROOT / csv_file_name,encoding=\"cp1252\")\n if(vid==\"NO\"):\n if(val=='0'):\n return render(request,'mailing/pdf0.html',{'Columns':str(data.columns)[8:-19]})\n elif(val=='1'):\n return render(request,'mailing/pdf1.html',{'Columns':str(data.columns)[8:-19]})\n else:\n return render(request,'mailing/pdf2.html',{'Columns':str(data.columns)[8:-19]})\n else:\n if(val=='0'):\n return render(request,'mailing/pdfv0.html',{'Columns':str(data.columns)[8:-19]})\n elif(val=='1'):\n return render(request,'mailing/pdfv1.html',{'Columns':str(data.columns)[8:-19]})\n else:\n return render(request,'mailing/pdfv2.html',{'Columns':str(data.columns)[8:-19]})\ndef startingmails(request):\n global csv_file_name,colnam,val,subject,end,i,error,filename,filename1,vid\n colnam=request.POST['col-nam']\n startindex=i\n data=pd.read_csv(settings.MEDIA_ROOT / csv_file_name,encoding=\"cp1252\")\n limit=i\n try:\n if(request.method==\"POST\"):\n print(\"ENTERING POST REQUEST\")\n if(val=='0'):\n fromaddr = mail\n body = matter\n count=i\n s = smtplib.SMTP('smtp.gmail.com', 587,timeout=600)\n s.ehlo()\n s.starttls()\n s.ehlo()\n s.login(mail, pswd)\n while(count<=i+200 and (i!=data.shape[0]) and count<=end):\n msg = MIMEMultipart('alternative')\n msg.attach(MIMEText(body, 'plain'))\n msg['From'] = mail\n msg['Subject'] = subject\n var = data[f\"{colnam}\"][i]\n i=i+1\n msg['To'] = var\n text = msg.as_string()\n is_valid = validate_email(var)\n if(is_valid):\n s.sendmail(fromaddr, var, text)\n count=count+1\n print(var,count)\n if(limit+60==i):\n s.quit() \n time.sleep(10)\n s = smtplib.SMTP('smtp.gmail.com', 587,timeout=600)\n s.ehlo()\n s.starttls()\n s.ehlo()\n s.login(mail, pswd)\n limit+=60\n if(count==startindex+90):\n break\n #time.sleep(5)\n s.quit()\n elif(val==\"1\"):\n uploaded_video=\"\"\n if(vid==\"YES\"):\n uploaded_video=request.FILES['pdfv1']\n fs=FileSystemStorage()\n fs.save(uploaded_video.name,uploaded_video)\n uploaded_file=request.FILES['pdf1']\n fs=FileSystemStorage()\n fs.save(uploaded_file.name,uploaded_file)\n # print(uploaded_file.name,csv_file_name)\n fromaddr = mail\n body = matter\n count=i\n s = smtplib.SMTP('smtp.gmail.com', 587,timeout=600)\n s.ehlo()\n s.starttls()\n s.ehlo()\n s.login(mail, pswd)\n while(count<=i+200 and (i!=data.shape[0]) and count<=end):\n msg = MIMEMultipart()\n msg.attach(MIMEText(body, 'plain'))\n filename = uploaded_file.name\n if(vid==\"YES\"):\n attachmentv = open(settings.MEDIA_ROOT / uploaded_video.name, \"rb\")\n v = MIMEBase('application', 'octet-stream',Name=uploaded_video.name)\n v.set_payload((attachmentv).read())\n encoders.encode_base64(v)\n v.add_header('Content-Disposition', \"attachment; filename= %s\" % uploaded_video.name)\n msg.attach(v) \n attachmentv.close() \n attachment = open(settings.MEDIA_ROOT / filename, \"rb\")\n p = MIMEBase('application', 'octet-stream',Name=filename)\n p.set_payload((attachment).read())\n encoders.encode_base64(p)\n p.add_header('Content-Disposition', \"attachment; filename= %s\" % filename)\n msg.attach(p)\n msg['From'] = mail\n msg['Subject'] = subject\n var = data[f\"{colnam}\"][i]\n i=i+1\n msg['To'] = var\n text = msg.as_string()\n is_valid = validate_email(var)\n if(is_valid):\n s.sendmail(fromaddr, var, text)\n count=count+1\n print(var,count)\n if(limit+60==i):\n s.quit() \n #time.sleep(5)\n s = smtplib.SMTP('smtp.gmail.com', 587,timeout=600)\n s.ehlo()\n s.starttls()\n s.ehlo()\n s.login(mail, pswd)\n limit+=60\n if(count==startindex+90):\n break\n #time.sleep(5)\n s.quit()\n attachment.close()\n # os.remove(settings.MEDIA_ROOT / csv_file_name)\n\n else:\n uploaded_video=\"\"\n if(vid==\"YES\"):\n uploaded_video=request.FILES['pdfv2']\n fs=FileSystemStorage()\n fs.save(uploaded_video.name,uploaded_video)\n \n uploaded_file1=request.FILES['pdf2']\n uploaded_file2=request.FILES['pdf3']\n fs1=FileSystemStorage()\n fs1.save(uploaded_file1.name,uploaded_file1)\n fs1.save(uploaded_file2.name,uploaded_file2)\n # print(uploaded_file1.name)\n # print(uploaded_file2.name)\n # data=pd.read_csv(settings.MEDIA_ROOT / csv_file_name)\n fromaddr = mail\n body = matter\n count=i\n s = smtplib.SMTP('smtp.gmail.com', 587,timeout=600)\n s.ehlo()\n s.starttls()\n s.ehlo()\n s.login(mail,pswd)\n while(count<=i+200 and (i!=data.shape[0]) and count <=end):\n msg = MIMEMultipart()\n msg.attach(MIMEText(body, 'plain'))\n filename = uploaded_file1.name\n if(vid==\"YES\"):\n attachmentv = open(settings.MEDIA_ROOT / uploaded_video.name, \"rb\")\n v = MIMEBase('application', 'octet-stream',Name=uploaded_video.name)\n v.set_payload((attachmentv).read())\n encoders.encode_base64(v)\n v.add_header('Content-Disposition', \"attachment; filename= %s\" % uploaded_video.name)\n msg.attach(v) \n attachmentv.close()\n attachment = open(settings.MEDIA_ROOT / filename, \"rb\")\n p = MIMEBase('application', 'octet-stream',Name=filename)\n p.set_payload((attachment).read())\n encoders.encode_base64(p)\n p.add_header('Content-Disposition', \"attachment; filename= %s\" % filename)\n msg.attach(p)\n filename1 = uploaded_file2.name\n attachment1 = open(settings.MEDIA_ROOT / filename1, \"rb\")\n q = MIMEBase('application', 'octet-stream',Name=filename1)\n q.set_payload((attachment1).read())\n encoders.encode_base64(q)\n q.add_header('Content-Disposition', \"attachment; filename= %s\" % filename1)\n msg.attach(q)\n msg['From'] = fromaddr\n msg['Subject'] = subject\n var = data[f\"{colnam}\"][i]\n i=i+1\n msg['To'] = var\n text = msg.as_string()\n is_valid = validate_email(var)\n if(is_valid):\n s.sendmail(fromaddr, var, text)\n count=count+1\n print(var,count)\n if(limit+60==i):\n s.quit() \n #time.sleep(5)\n s = smtplib.SMTP('smtp.gmail.com', 587,timeout=600)\n s.ehlo()\n s.starttls()\n s.ehlo()\n s.login(mail, pswd)\n limit+=60\n if(count==startindex+90):\n break\n #time.sleep(5)\n s.quit()\n attachment.close()\n attachment1.close()\n except Exception as e:\n error=str(e.__class__)\n print(error)\n\n return render(request,'mailing/continued.html',{'index':i,'error':error})\n" }, { "alpha_fraction": 0.6756926774978638, "alphanum_fraction": 0.7065491080284119, "avg_line_length": 29.538461685180664, "blob_id": "967d27ad7d9187ee3a7dfb4db1a60010827c3b21", "content_id": "ca4622231212fa9a9ce547278fedc525c256c2ba", "detected_licenses": [ "Unlicense" ], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 1588, "license_type": "permissive", "max_line_length": 126, "num_lines": 52, "path": "/GUI & Bot/Currency Convertor/currency_convertor.py", "repo_name": "shubhi13/Python_Scripts", "src_encoding": "UTF-8", "text": "#Currency Convertor\n\nimport tkinter as tk\nimport tkinter.ttk as ttk\nfrom forex_python.converter import CurrencyRates\n\ndef convertcurr(rate):\n\tx = amount.get()\n\ty = currency_from.get()\n\tz = currency_to.get()\n\tcurr = CurrencyRates()\n\tf = curr.convert(y,z,x)\n\tfinal.set(format(f, '.2f'))\n\nroot = tk.Tk()\nroot.geometry('450x400')\nroot.title('Currency Converter')\n\namount = tk.IntVar()\ncurrency_from = tk.StringVar()\ncurrency_to = tk.StringVar()\nfinal = tk.StringVar()\n\ntk.Label(root, text='Input amount',font='Times').grid(row=0, column=0, columnspan=5,sticky='NSEW')\n\nq = ttk.Entry(root, textvariable=amount)\nq.grid(row=1, column=1, columnspan=3, sticky='NSWE', padx=5, pady=5)\n\ntk.Label(root, text='Input Convert From (USD,INR,EUR,GBP etc)',font='Times').grid(row=2, column=0, columnspan=5,sticky='NSEW')\n\nq = ttk.Entry(root, textvariable=currency_from)\nq.grid(row=3, column=1, columnspan=3, sticky='NSWE', padx=5, pady=5)\n\ntk.Label(root, text='Input Convert To (USD,INR,EUR,GBP etc)',font='Times').grid(row=4, column=0, columnspan=5,sticky='NSEW')\n\nq = ttk.Entry(root, textvariable=currency_to)\nq.grid(row=5, column=1, columnspan=3, sticky='NSWE', padx=5, pady=5)\n\n\nw = ttk.Button(root, text='Convert', command=lambda r=1.08: convertcurr(r))\nw.grid(row=7, column=2, padx=5, pady=5,sticky='NSWE')\n\n\ntk.Label(root).grid(row=9, column=0, columnspan=5)\n\ntk.Label(root, text='--Converted Amount--',font='Times').grid(row=10, column=1, columnspan=3, sticky='NSWE')\n\nl = ttk.Label(root, textvariable=final, relief='groove')\nl.grid(row=11, column=1, columnspan=3, sticky='NSWE')\n\n\nroot.mainloop()\n" }, { "alpha_fraction": 0.7244139313697815, "alphanum_fraction": 0.7352772951126099, "avg_line_length": 37.0217399597168, "blob_id": "2807ef11e88c7cee90946a7601d9b4d63f1ee918", "content_id": "52a34bbfd621d04db7303edf3051ba129cf881d1", "detected_licenses": [ "Unlicense" ], "is_generated": false, "is_vendor": false, "language": "Markdown", "length_bytes": 1749, "license_type": "permissive", "max_line_length": 187, "num_lines": 46, "path": "/Machine Learning/Manual-Parameter-Tuner/README.md", "repo_name": "shubhi13/Python_Scripts", "src_encoding": "UTF-8", "text": "<h1 align=\"center\"> Manual Parameter Tuner </h2>\n<h3 align=\"center\"> A web app for beginners in Machine Learning and Data Science to fiddle with different parameters of various ML algorithms on the Framingham Heart Disease dataset. <h4>\n \n[![UI ](https://img.shields.io/badge/Deployed-%20---->-blue?style=for-the-badge&logo=appveyor)](https://share.streamlit.io/indrap24/manual-parameter-tuner/main/app.py)\n[![Streamlit App](https://static.streamlit.io/badges/streamlit_badge_black_red.svg)](https://share.streamlit.io/indrap24/manual-parameter-tuner/main/app.py)\n\n\n### Functionalities\n- Manually tune parameters of different ML algorithms to get varied result on the **Framingham** Heart Disease dataset and understand the importance of Hyperparameter Tuning.\n\n#### *Detailed description of the Dataset and the functionalities are present in the Website itself.*\n\n\n### Check out the published Website here:\n\nhttps://share.streamlit.io/indrap24/manual-parameter-tuner/main/app.py\n\n<br>\n\n### Instructions to run the web app locally\n \n* Pre-requisites:\n\t- Python 3.6 or 3.7 or 3.8\n\t- Dependencies from requirements.txt\n \n* Install locally:\n\n - First clone this repository onto your system and traverse to this folder.<br>\n - Then, create a Virtual Environment and activate it. <br>\n ```bash\n cd path/to/cloned/repo\n python3 -m venv env\n source env/bin/activate\n ```\n - Install the python dependencies from requirements.txt:\n ```bash\n pip install -r requirements.txt\n ```\n* Directions to Execute\n\n From the same project directory, run the following command in the terminal -\n ```bash\n streamlit run app.py\n ```\n \n This will prompt a localhost and you can view and make changes to the source file locally.\n" }, { "alpha_fraction": 0.5883392095565796, "alphanum_fraction": 0.6060070395469666, "avg_line_length": 20.769229888916016, "blob_id": "aed37889f8b50e2df0247ea69d05526a27eb9b36", "content_id": "4a4a91474c85d3af54e98fc0f029dedf3a84f2e3", "detected_licenses": [ "Unlicense" ], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 566, "license_type": "permissive", "max_line_length": 70, "num_lines": 26, "path": "/Algorithms/SelectionSort.py", "repo_name": "shubhi13/Python_Scripts", "src_encoding": "UTF-8", "text": "def selection_sort(arr):\n \"\"\"Returns the array arr sorted using the selection sort algorithm\n\n >>> import random\n >>> unordered = [i for i in range(5)]\n >>> random.shuffle(unordered)\n >>> selection_sort(unordered)\n [0, 1, 2, 3, 4]\n \"\"\"\n if len(arr) <= 1: return arr\n\n smallest = min(arr)\n del arr[arr.index(smallest)]\n\n return [smallest] + selection_sort(arr)\n\n\n\nif __name__ == \"__main__\":\n import random\n\n unordered = [i for i in range(500)]\n random.shuffle(unordered)\n sort = selection_sort(unordered)\n\n print(sort)\n" }, { "alpha_fraction": 0.4997498393058777, "alphanum_fraction": 0.5246403813362122, "avg_line_length": 37.07143020629883, "blob_id": "62ed9b69ce3c03821284cdd8bcfea572c473c706", "content_id": "4e73a606b079f0f05dc7c10186f5361a36a0b1fb", "detected_licenses": [ "Unlicense" ], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 15990, "license_type": "permissive", "max_line_length": 206, "num_lines": 420, "path": "/Games/Drawing/main.py", "repo_name": "shubhi13/Python_Scripts", "src_encoding": "UTF-8", "text": "# DRAWING PROGRAM BY TIM\n# main.py file (RUN THIS)\n# Description: This program draws a grid of 600, 600 with a given\n# pixel size/rows and cols. The user can then interact with the grid\n# using a variety of tools from the menu bar on the bottom. They can draw\n# by selecting a color from the pallet and clicking the \"D\" button. This program\n# also offers a save feature where a user can save their work to a selected directory.\n# It can then be opened by selecting the file from the file nav within the program.\n#\n#Input: Input is taken at the beginning of the program for the pixel size/rows and cols.\n# It is also taken whenever the user clicks.\n\ntry:\n import pygame\nexcept:\n import install_requirements\n import pygame\nfrom tkinter import *\nfrom tkinter import messagebox\nfrom tkinter.filedialog import askopenfilename, asksaveasfilename\nimport gridModule\nfrom gridModule import colorPallet\nfrom gridModule import pixelArt\nfrom gridModule import menu\nfrom gridModule import grid\nimport sys\nimport time\n\nsys.setrecursionlimit(1000000)\n\npygame.init() #initalize pygame\npaintBrush = pygame.image.load(\"Paintbrush.png\")\ncurrentVersion = 1.1\n\n#Set defaults for our screen size and rows and columns\nrows = 50\ncols = 50\nwid = 600\nheigh = 600\n\nchecked = []\ndef fill(spot, grid, color, c):\n if spot.color != c:\n pass\n else:\n spot.click(grid.screen, color)\n pygame.display.update()\n \n i = spot.col #the var i is responsible for denoting the current col value in the grid\n j = spot.row #the var j is responsible for denoting the current row value in the grid\n\n #Horizontal and vertical neighbors\n if i < cols-1: #Right\n fill(grid.getGrid()[i + 1][j], grid, color, c)\n if i > 0: #Left\n fill(grid.getGrid()[i - 1][j], grid, color, c)\n if j < rows-1: #Up\n fill(grid.getGrid()[i][j + 1], grid, color, c)\n if j > 0 : #Down\n fill(grid.getGrid()[i][j - 1], grid, color, c)\n \n\n# Saves the current project into a text file that contains the size of the screen, if the gird is showing and all the colors of all the pixels\ndef save(cols, rows, show, grid, path):\n if len(path) >= 4: # This just makes sure we have .txt at the end of our file selection\n if path[-4:] != '.txt':\n path = path + '.txt'\n else:\n path = path + '.txt'\n\n # Overwrite the current file, or if it doesn't exist create a new one\n file = open(path, 'w')\n file.write(str(cols) + ' ' + str(rows) + ' ' + str(show) +'\\n')\n\n for pixel in grid:\n for p in pixel: #For every pixel write the color in the text file\n wr = str(p.color[0]) + ',' + str(p.color[1]) + ',' + str(p.color[2])\n file.write(wr + '\\n')\n file.write(str(currentVersion))\n\n file.close()\n name = path.split(\"/\")\n changeCaption(name[-1])\n\n\n#Opens the file from the given path and displays it to the screen\ndef openFile(path):\n global grid\n \n file = open(path, 'r')\n f = file.readlines()\n if f[-1] == str(currentVersion):\n \n dimensions = f[0].split() #Dimesnions for the rows and cols\n columns = int(dimensions[0])\n rows = int(dimensions[1])\n \n if dimensions[2] == '0': #If the show grid attribute at the end of our dimensions line is 0 then don't show grid\n v = False\n else:\n v = True\n initalize(columns, rows, v) #Redraw the grid, tool bars, menu bars etc. \n name = path.split(\"/\")\n changeCaption(name[-1])\n \n line = 0\n for i in range(columns): # For every pixel, read the color and format it into a tuple\n for j in range(rows):\n line += 1\n nColor = []\n for char in f[line].strip().split(','):\n nColor.append(int(char))\n \n \n grid.getGrid()[i][j].show(win, tuple(nColor), 0) #Show the color on the grid\n else:\n window = Tk()\n window.withdraw()\n messagebox.showerror(\"Unsupported Version\", \"The file you have opened is created using a previous version of this program. Please open it in that version.\")\n\n\n#Change pygame caption\ndef changeCaption(txt):\n pygame.display.set_caption(txt)\n \n\n# This shows the file navigator for opening and saving files\ndef showFileNav(op=False):\n #Op is short form for open as open is a key word\n window = Tk()\n window.attributes(\"-topmost\", True)\n window.withdraw()\n myFormats = [('Windows Text File','*.txt')]\n if op:\n filename = askopenfilename(title=\"Open File\",filetypes=myFormats) # Ask the user which file they want to open\n else:\n filename = asksaveasfilename(title=\"Save File\",filetypes=myFormats) # Ask the user choose a path to save their file to\n \n if filename: #If the user seletced something \n x = filename[:] # Make a copy\n return x\n\n# Onsubmit function for tkinter form for choosing pixel size\ndef onsubmit(x=0):\n global cols, rows, wid, heigh\n \n st = rowsCols.get().split(',') # Get the input from the text box\n window.quit()\n window.destroy()\n try: # Make sure both cols and rows are integers\n if st[0].isdigit(): \n cols = int(st[0])\n while 600//cols != 600/cols:\n if cols < 300:\n cols += 1\n else:\n cols -= 1\n if st[1].isdigit():\n rows = int(st[1])\n while 600//rows != 600/rows:\n if rows < 300:\n rows += 1\n else:\n rows -= 1\n if cols > 300:\n cols = 300\n if rows > 300:\n rows = 300\n\n except:\n pass\n\n# Update the lbale which shows the pixel size by getting input on rows and cols\ndef updateLabel(a, b, c):\n sizePixel = rowsCols.get().split(',') #Get the contents of the label\n l = 12\n w = 12\n \n try:\n l = 600/int(sizePixel[0])\n except:\n pass\n\n try:\n w = 600/(int(sizePixel[1]))\n except:\n pass\n\n label1.config(text='Pixel Size: ' + str(l) + ', ' + str(w)) #Change label to show pixel size\n\n\n#CREATE SCREEN\ndef initalize(cols, rows, showGrid=False):\n global pallet, grid, win, tools, lineThickness, saveMenu\n\n #if grid already exsists delete it then recreate it\n try:\n del grid\n except:\n pass\n \n pygame.display.set_icon(paintBrush) \n win = pygame.display.set_mode((int(wid), int(heigh) + 100))\n pygame.display.set_caption('Untitled')\n win.fill((255,255,255))\n\n #CREATION OF OBJECTS\n grid = pixelArt(win, int(wid), int(heigh), cols, rows, showGrid)\n grid.drawGrid()\n\n pallet = colorPallet(win, 90, 90, 3, 3, True, 10, grid.height + 2)\n pallet.drawGrid()\n\n colorList = [(0,0,0), (255,255,255), (255,0,0), (0,255,0), (0,0,255), (255,255,0), (255,168,0), (244, 66, 173), (65, 244, 226)]\n pallet.setColor(colorList)\n\n tools = menu(win, 200, 40, 5, 1, True, grid.width - 210, grid.height + 50)\n tools.drawGrid()\n\n buttons = ['D', 'E', 'F', 'R', 'C']\n tools.setText(buttons)\n tools.drawGrid()\n\n l = tools.getGrid()\n l[0][0].show(grid.screen, (255,0,0),1, True)\n\n lineThickness = menu(win, 180, 40, 4, 1, True, grid.width - 200, grid.height + 10)\n lineThickness.drawGrid()\n\n buttons = ['1', '2', '3', '4']\n lineThickness.setText(buttons)\n\n saveMenu = menu(win, 140, 40, 2, 1, True, grid.width - 400, grid.height + 25)\n saveMenu.drawGrid()\n\n buttons = ['Save', 'Open']\n saveMenu.setText(buttons)\n\n pygame.display.update()\n\n#-----------------------------------------------------------------------#\n #TKINTER FORM FOR GETTING INPUT#\nwindow = Tk()\nwindow.title('Paint Program')\n\nt_var = StringVar()\nt_var.trace('w', updateLabel)\n\nlabel = Label(window, text='# Of Rows and Columns (25,50): ')\nrowsCols = Entry(window, textvariable=t_var)\n\nlabel1 = Label(window, text=\"Pixel Size: 12.0, 12.0\")\nvar = IntVar()\nc = Checkbutton(window, text=\"View Grid\", variable=var)\nsubmit = Button(window, text='Submit', command=onsubmit)\nwindow.bind('<Return>', onsubmit)\n\nsubmit.grid(columnspan=1, row=3, column=1,pady=2)\nc.grid(column=0, row=3)\nlabel1.grid(row=2)\nrowsCols.grid(row=0, column=1, pady=3, padx=8)\nlabel.grid(row=0, pady=3)\n\nwindow.update()\nmainloop()\n\n#------------------------------------------------------------------------#\n\n\n#MAIN LOOP\ninitalize(cols, rows, var.get())\npygame.display.update()\ncolor = (0,0,0) # Current drawing color\nthickness = 1\nreplace = False\ndoFill = False\nsavedPath = '' #Current path of file\n\nrun = True\nwhile run:\n #Main loop for mouse collision\n ev = pygame.event.get()\n\n for event in ev:\n if event.type == pygame.QUIT:\n window = Tk()\n window.withdraw()\n #Ask the user if they want to save before closing\n if pygame.display.get_caption()[0].count('*') > 0: \n if messagebox.askyesno(\"Save Work?\", \"Would you like to save before closing?\"):\n # If they have already saved the file simply save to that path otherwise they need to chose a location\n if savedPath != \"\":\n save(cols, rows, grid.showGrid, grid.getGrid(),savedPath)\n else:\n path = showFileNav()\n if path:\n savedPath = path\n save(cols, rows, grid.showGrid, grid.getGrid(),savedPath)\n run = False\n \n if pygame.mouse.get_pressed()[0]: #See if the user has clicked or dragged their mouse\n try:\n pos = pygame.mouse.get_pos()\n if pos[1] >= grid.height: # If the mouse is below the main drawing grid\n if pos[0] >= tools.startx and pos[0] <= tools.startx + tools.width and pos[1] >= tools.starty and pos[1] <+ tools.starty + tools.height: #If the mouse ic clicking on the tools grid\n replace = False\n doFill = False\n tools.drawGrid() #Redraw the grid so that we dont see the red highlight\n buttons = ['D', 'E', 'F', 'R', 'C']\n tools.setText(buttons)\n \n clicked = tools.clicked(pos)\n clicked.show(grid.screen, (255,0,0), 1, True)\n\n #Depending what tool they click\n if clicked.text == 'D': #Draw tool \n color = (0,0,0)\n elif clicked.text == 'E': #Erase tool\n color = (255,255,255)\n elif clicked.text == 'F':# Fill tool\n doFill = True\n elif clicked.text == 'R':# Replace tool\n replace = True\n elif clicked.text == 'C':# Clear grid tool\n grid.clearGrid()\n tools.drawGrid() #Redraw the grid so that we dont see the red highlight\n buttons = ['D', 'E', 'F', 'R', 'C']\n tools.setText(buttons)\n l = tools.getGrid()\n l[0][0].show(grid.screen, (255,0,0),1, True)\n \n #If they click on the color pallet\n elif pos[0] >= pallet.startx and pos[0] <= pallet.startx + pallet.width and pos[1] >= pallet.starty and pos[1] <= pallet.starty + pallet.height:\n clicked = pallet.clicked(pos)\n color = clicked.getColor() # Set current drawing color\n\n pallet = colorPallet(win, 90, 90, 3, 3, True, 10, grid.height + 2)\n pallet.drawGrid()\n\n colorList = [(0,0,0), (255,255,255), (255,0,0), (0,255,0), (0,0,255), (255,255,0), (255,168,0), (244, 66, 173), (65, 244, 226)]\n pallet.setColor(colorList)\n clicked.show(grid.screen, (255,0,0), 3, True)\n \n elif pos[0] >= lineThickness.startx and pos[0] <= lineThickness.startx + lineThickness.width and pos[1] >= lineThickness.starty and pos[1] <= lineThickness.starty + lineThickness.height:\n lineThickness.drawGrid() #Redraw the grid so that we dont see the red highlight\n buttons = ['1', '2', '3', '4']\n lineThickness.setText(buttons)\n \n clicked = lineThickness.clicked(pos)\n clicked.show(grid.screen, (255,0,0), 1, True)\n\n thickness = int(clicked.text) # set line thickness\n\n #If they click on the save menu \n elif pos[0] >= saveMenu.startx and pos[0] <= saveMenu.startx + saveMenu.width and pos[1] >= saveMenu.starty and pos[1] <= saveMenu.starty + saveMenu.height:\n clicked = saveMenu.clicked(pos)\n\n if clicked.text == 'Save': # save if they click save\n path = showFileNav()\n if path:\n savedPath = path\n save(cols, rows, grid.showGrid, grid.getGrid(),savedPath)\n else: #otherwise open\n path = showFileNav(True)\n if path:\n openFile(path)\n savedPath = path\n #open file\n\n \n else:\n if replace: #If we have the replace tool selected then replace the color\n tools.drawGrid() #Redraw the grid so that we dont see the red highlight\n buttons = ['D', 'E', 'F', 'R', 'C']\n tools.setText(buttons)\n \n tools.getGrid()[0][0].show(grid.screen, (255,0,0), 1, True)\n\n clicked = grid.clicked(pos)\n c = clicked.color\n replace = False\n\n for x in grid.getGrid():\n for y in x:\n if y.color == c:\n y.click(grid.screen, color)\n elif doFill:\n clicked = grid.clicked(pos)\n if clicked.color != color:\n fill(clicked, grid, color, clicked.color)\n pygame.display.update()\n \n else: #otherwise draw the pixels accoding to the line thickness\n name = pygame.display.get_caption()[0]\n if name.find(\"*\") < 1:\n changeCaption(name + '*')\n\n clicked = grid.clicked(pos)\n clicked.click(grid.screen,color)\n if thickness == 2:\n for pixel in clicked.neighbors:\n pixel.click(grid.screen, color)\n elif thickness == 3:\n for pixel in clicked.neighbors:\n pixel.click(grid.screen, color)\n for p in pixel.neighbors:\n p.click(grid.screen, color)\n elif thickness == 4:\n for pixel in clicked.neighbors:\n pixel.click(grid.screen, color)\n for p in pixel.neighbors:\n p.click(grid.screen, color)\n for x in p.neighbors:\n x.click(grid.screen, color)\n \n pygame.display.update()\n except AttributeError:\n pass\n\npygame.quit()\n" }, { "alpha_fraction": 0.44029849767684937, "alphanum_fraction": 0.48507463932037354, "avg_line_length": 11.666666984558105, "blob_id": "740421d0d7875191601ff78b3e0a3dfa54e83c58", "content_id": "d04228df8d1aa820c3cba33158a31fecb173016d", "detected_licenses": [ "Unlicense" ], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 268, "license_type": "permissive", "max_line_length": 36, "num_lines": 21, "path": "/Algorithms/Two Pointer.py", "repo_name": "shubhi13/Python_Scripts", "src_encoding": "UTF-8", "text": "def isPairSum(A, N, X):\n\n\tfor i in range(N):\n\t\tfor j in range(N):\n\n\t\t\tif(i == j):\n\t\t\t\tcontinue\n\n\t\t\n\t\t\tif (A[i] + A[j] == X):\n\t\t\t\treturn True\n\n\t\t\tif (A[i] + A[j] > X):\n\t\t\t\tbreak\n\n\treturn 0\n\narr = [3, 5, 9, 2, 8, 10, 11]\nval = 17\n\nprint(isPairSum(arr, len(arr), val))\n\n\n" }, { "alpha_fraction": 0.6996197700500488, "alphanum_fraction": 0.6996197700500488, "avg_line_length": 23, "blob_id": "99b01a60bfa8e7ce72332e467fdaa15df34154ec", "content_id": "0be9a68deda7ab1db5baeb1443af5b95378d6909", "detected_licenses": [ "Unlicense" ], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 263, "license_type": "permissive", "max_line_length": 31, "num_lines": 11, "path": "/Automation/Spam Bot/src/Modules/Spammer.py", "repo_name": "shubhi13/Python_Scripts", "src_encoding": "UTF-8", "text": "from pynput.keyboard import *\nimport time\n\nkeyboard = Controller()\ndef spammer(message,sleep):\n for unit in message:\n keyboard.press(unit)\n keyboard.release(unit)\n keyboard.press(Key.enter)\n keyboard.release(Key.enter)\n time.sleep(sleep)" }, { "alpha_fraction": 0.5449516177177429, "alphanum_fraction": 0.5622406601905823, "avg_line_length": 30.897058486938477, "blob_id": "49c226ac19493aea6d86f7e4e3c42f74804ab675", "content_id": "e9f83cc774545b0c14144dd0e3f966e6d1e2266f", "detected_licenses": [ "Unlicense" ], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 4338, "license_type": "permissive", "max_line_length": 93, "num_lines": 136, "path": "/GUI & Bot/The_Snake_Game.py", "repo_name": "shubhi13/Python_Scripts", "src_encoding": "UTF-8", "text": "import pygame,sys\nimport time\nimport random\n\npygame.init() #Initializing PyGame Module\n\n#Setting colors\nwhite=(255,255,255)\nblack=(0,0,0)\nred=(255,0,0)\n\n#Setting boreders in px\nwindow_width=800\nwindow_height=600\n\ngameDisplay=pygame.display.set_mode((window_width,window_height))\npygame.display.set_caption('Slither.io - The Snake Game')\n\nclock=pygame.time.Clock()#varible for getting time within the program\nFPS=5 #Frame_per_second \nblockSize=20\nnoPixel=0\n\ndef myquit():\n '''Self explanatory'''\n pygame.quit()\n sys.exit(0)\nfont=pygame.font.SysFont(None,35,bold=True)\n\ndef drawGrid():\n sizeGrd=window_width//blockSize\n\ndef snake(blockSize,snakelist):\n #x=250-(segment_width+segment_margin)*i\n for size in snakelist:\n pygame.draw.rect(gameDisplay,white,[size[0]+5,size[1],blockSize,blockSize],2)\n\ndef message_to_screen(msg,color):\n screen_text=font.render(msg,True,color)\n gameDisplay.blit(screen_text,[100,window_height/2])\n\ndef gameLoop():\n gameExit=False\n gameOver=False\n\n lead_x=window_width/2\n lead_y=window_height/2\n\n change_pixels_of_x=0\n change_pixels_of_y=0\n snakelist = []\n snakeLength = 1\n randomAppleX = int(random.randrange(0, window_width-blockSize)/10)*10\n randomAppleY = int(random.randrange(0, window_height-blockSize)/10)*10\n\n while not gameExit:\n while gameOver == True:\n gameDisplay.fill(black)\n message_to_screen(\"Game over, press 'c' to play again or 'q' to quit\", red)\n pygame.display.update()\n\n for event in pygame.event.get():\n if event.type == pygame.QUIT:\n gameOver = False\n gameExit = True \n if event.type == pygame.KEYDOWN:\n if event.key == pygame.K_q:\n gameExit = True\n gameOver = False\n if event.key == pygame.K_c:\n gameLoop()\n \n for event in pygame.event.get():\n if event.type == pygame.QUIT:\n gameExit = True\n if event.type == pygame.KEYDOWN:\n if event.key == pygame.K_ESCAPE:\n myquit()\n \n leftArrow = event.key == pygame.K_LEFT\n rightArrow = event.key == pygame.K_RIGHT\n upArrow = event.key == pygame.K_UP\n downArrow = event.key == pygame.K_DOWN\n\n if leftArrow:\n change_pixels_of_x = -blockSize\n change_pixels_of_y = noPixel\n elif rightArrow:\n change_pixels_of_x = blockSize\n change_pixels_of_y = noPixel\n elif upArrow:\n change_pixels_of_y = -blockSize\n change_pixels_of_x = noPixel\n elif downArrow:\n change_pixels_of_y = blockSize\n change_pixels_of_x = noPixel\n\n if lead_x >= window_width or lead_x < 0 or lead_y >= window_height or lead_y < 0:\n gameOver = True \n\n lead_x += change_pixels_of_x\n lead_y += change_pixels_of_y\n gameDisplay.fill(black)\n AppleThickness = 20\n\n print([int(randomAppleX),int(randomAppleY),AppleThickness,AppleThickness])\n pygame.draw.circle(gameDisplay, red, [randomAppleX,randomAppleY],10)\n\n allspriteslist = []\n allspriteslist.append(lead_x)\n allspriteslist.append(lead_y)\n snakelist.append(allspriteslist)\n\n if len(snakelist) > snakeLength:\n del snakelist[0] \n\n for eachSegment in snakelist [:-1]:\n if eachSegment == allspriteslist:\n gameOver = True \n\n snake(blockSize, snakelist) \n pygame.display.update()\n \n if lead_x >= randomAppleX and lead_x <= randomAppleX + AppleThickness:\n if lead_y >= randomAppleY and lead_y <= randomAppleY + AppleThickness:\n randomAppleX = int(random.randrange(0, window_width-blockSize)/10)*10\n randomAppleY = int(random.randrange(0, window_height-blockSize)/10)*10\n snakeLength += 1 \n\n clock.tick(FPS)\n \n pygame.quit()\n quit()\n\n\ngameLoop()\n" }, { "alpha_fraction": 0.6110019683837891, "alphanum_fraction": 0.6303209066390991, "avg_line_length": 24.450000762939453, "blob_id": "aa05738b1a06aa64b81c265fba814d83a7886c2d", "content_id": "5c8da1b37a120bafa7ee10e20dc152d955d675ec", "detected_licenses": [ "Unlicense" ], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 3054, "license_type": "permissive", "max_line_length": 69, "num_lines": 120, "path": "/GUI & Bot/random_password_gui.py", "repo_name": "shubhi13/Python_Scripts", "src_encoding": "UTF-8", "text": "'''Python program for generating random password'''\n\n#Importing the modules\nimport tkinter as tk\nfrom tkinter import ttk\n\nimport random\nimport string\n\n#Data setting\nlowercase = list(string.ascii_lowercase)\nuppercase = list(string.ascii_uppercase)\ndigits = list(string.digits)\nsymbols = list(string.punctuation)\n\n\nclass Password:\n\n \"\"\"Class for generating random strong passwords\n Attributes:\n length (int): Length of the password\n pwd (str): The password\n \"\"\"\n\n def __init__(self, char, length):\n self.char = char\n self.length = length\n self.charset = []\n self.pwd = None\n\n def setchar(self):\n \"\"\"Setting character set.\"\"\"\n\n if 'l' in self.char: self.charset.extend(lowercase)\n if 'u' in self.char: self.charset.extend(uppercase)\n if 'd' in self.char: self.charset.extend(digits)\n if 's' in self.char: self.charset.extend(symbols)\n\n def password_gen(self):\n \"\"\"Return the password\n\n Returns:\n str: The password\n \"\"\"\n if len(self.char) == 0:\n self.charset.extend(lowercase)\n\n if len(self.length) == 0:\n self.length = 10 # By default, length is 10\n else:\n self.length = int(self.length)\n\n pwdlist = random.choices(self.charset, k=self.length)\n self.pwd = ''.join(pwdlist)\n return self.pwd\n\n\nwind = tk.Tk()\n\ndef generate():\n global ch\n\n if u.get(): ch += 'u'\n if l.get(): ch += 'l'\n if d.get(): ch += 'd'\n if s.get(): ch += 's'\n\n password = Password(ch, len_entry.get())\n password.setchar()\n\n pwd.set(password.password_gen())\n ch = ''\n\n# Initialise int variables\nch = ''\nu = tk.IntVar()\nd = tk.IntVar()\ns = tk.IntVar()\nl = tk.IntVar()\npwd = tk.StringVar()\n\n# Main program\nwind.title('Paasword Generator')\nwind.geometry('400x300')\nwind.resizable(0, 0)\n\nhead_label = ttk.Label(wind, text='Password Generator',\n font=('Bodoni MT', 30, 'bold'))\nhead_label.grid(row=0, column=0, columnspan=2, pady=5)\n\n\nlen_label = ttk.Label(wind, text='Enter length', font=('Arial', 10))\nlen_label.grid(row=1, column=0, pady=10)\n\nlen_entry = ttk.Entry(wind, font=('Arial', 10, 'bold'))\nlen_entry.grid(row=1, column=1, pady=10)\n\nupper_box = ttk.Checkbutton(wind, text='Uppercase', variable=u)\nupper_box.grid(row=2, column=0)\n\nlower_box = ttk.Checkbutton(wind, text='Lowercase', variable=l)\nlower_box.grid(row=2, column=1)\n\ndigit_box = ttk.Checkbutton(wind, text='Digits', variable=d)\ndigit_box.grid(row=3, column=0)\n\nsymbol_box = ttk.Checkbutton(wind, text='Symbols', variable=s)\nsymbol_box.grid(row=3, column=1)\n\ngenerate_button = ttk.Button(wind, text='Generate', command=generate)\ngenerate_button.grid(row=4, column=0, columnspan=2, pady=10)\n\nt_label = ttk.Label(wind, text=' Password :', font=('Arial', 10))\nt_label.grid(row=5, column=0, pady=10)\n\npassword_disp = ttk.Entry(wind, font=('Arial', 10, 'bold'),\n textvariable=pwd, width=30)\npassword_disp.grid(row=5, column=1, pady=10)\n\nwind.mainloop()\n" }, { "alpha_fraction": 0.7296072244644165, "alphanum_fraction": 0.7356495261192322, "avg_line_length": 32.150001525878906, "blob_id": "3d7c97e9eff87e117fb904ca0a98741b3698f053", "content_id": "7d8fdfbdea40dcb6cc9dbb90b8ab40b2a5e6859f", "detected_licenses": [ "Unlicense" ], "is_generated": false, "is_vendor": false, "language": "Markdown", "length_bytes": 662, "license_type": "permissive", "max_line_length": 133, "num_lines": 20, "path": "/GUI & Bot/Simple Calculator/README.md", "repo_name": "shubhi13/Python_Scripts", "src_encoding": "UTF-8", "text": "# Simple Calculator\n\n<p align=\"center\">\n <img src=\"https://i.ibb.co/G5KDM7k/SS.png\" alt=\"SS\" border=\"0\">\n <br>A simple calculator built using python's tkinter module.\n</p>\n\n## Motivation\n\nThis is the first GUI project I worked on. I built it to learn and understand tkinter's various features and functionalities. \n\n## Usage\n\n**Prerequisite** : Python 3 and PIL\n\nBefore launching the application, make sure you have PIL installed. To do so, just launch your terminal and type `pip install Pillow`\n\nRun the `Simple-Calculator.py` file present in the `src` folder.\n\nOr, launch your terminal and `cd/` to the src folder and type `python Simple-Calculator.py`." }, { "alpha_fraction": 0.550270676612854, "alphanum_fraction": 0.5556844472885132, "avg_line_length": 32.153846740722656, "blob_id": "b9fd7b53b95161aed5c88cda4ced44fe703a2387", "content_id": "35e4b610bce3a38cafe3287d83013a2202b65009", "detected_licenses": [ "Unlicense" ], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 2586, "license_type": "permissive", "max_line_length": 89, "num_lines": 78, "path": "/Games/Stone paper scissors/code.py", "repo_name": "shubhi13/Python_Scripts", "src_encoding": "UTF-8", "text": "import random\n\n\nclass RockPaperScissors:\n commands = {\"!exit\", \"!rating\"}\n\n def __init__(self, file_name=\"rating.txt\"):\n self.game_figures = {\"rock\": 0, \"paper\": 1, \"scissors\": 2}\n self.file_name = file_name\n self.player_choice = \"\"\n self.player_name = \"\"\n self.player_score = 0\n self.game_run = True\n\n def set_player_choice(self):\n self.player_choice = input()\n\n def set_player_name(self):\n self.player_name = input(\"Enter your name: \").strip()\n print(f\"Hello, {self.player_name}\")\n\n def set_player_score(self, value):\n self.player_score = value\n\n def read_player_score(self):\n with open(self.file_name, \"r\") as file:\n for line in file:\n line = line.split()\n if line[0] == self.player_name:\n self.player_score = int(line[1])\n break\n\n def set_game_figures(self):\n figures = input(\"Input your figures for game, separated by comma without space.\")\n self.game_figures = self.game_figures if not figures else \\\n {name: i for i, name in enumerate(figures.split(\",\"))}\n print(\"Okay, let's start\")\n\n def match(self):\n pc_choice = random.choice(list(self.game_figures.keys()))\n result = (self.game_figures[self.player_choice] - self.game_figures[pc_choice]) \\\n % len(self.game_figures)\n if result == 0:\n print(f\"There is a draw ({self.player_choice})\")\n return 50\n if result > len(self.game_figures) / 2:\n print(f\"Sorry, but computer chose {pc_choice}\")\n return 0\n print(f\"Well done. Computer chose {pc_choice} and failed\")\n return 100\n\n def do_command(self, command):\n if command == \"!exit\":\n self.game_run = False\n elif command == \"!rating\":\n print(f\"Your rating: {self.player_score}\")\n\n def check_and_run(self):\n while self.game_run:\n self.set_player_choice()\n if self.player_choice in self.game_figures:\n score = self.player_score + self.match()\n self.set_player_score(score)\n elif self.player_choice in RockPaperScissors.commands:\n self.do_command(self.player_choice)\n else:\n print(\"Invalid input\")\n print(\"Bye!\")\n\n def main(self):\n self.set_player_name()\n self.read_player_score()\n self.set_game_figures()\n self.check_and_run()\n\n\nif __name__ == \"__main__\":\n RockPaperScissors().main()\n" }, { "alpha_fraction": 0.4601770043373108, "alphanum_fraction": 0.6725663542747498, "avg_line_length": 15.142857551574707, "blob_id": "82c0a664b74162e2969cb0929134235a96ad44a4", "content_id": "f219c607c88cb2cb36f84e7ad4ad5361b3127412", "detected_licenses": [ "Unlicense" ], "is_generated": false, "is_vendor": false, "language": "Text", "length_bytes": 113, "license_type": "permissive", "max_line_length": 22, "num_lines": 7, "path": "/Automation/mail-automation/requirements.txt", "repo_name": "shubhi13/Python_Scripts", "src_encoding": "UTF-8", "text": "Django==3.2.5\nemail-forwarder==0.1.1\nemails==0.6\nnumpy==1.19.5.7\npandas==1.1.5\npdf-mail==3.0.0\npdf2image==1.16.0\n" }, { "alpha_fraction": 0.700236976146698, "alphanum_fraction": 0.700236976146698, "avg_line_length": 20.100000381469727, "blob_id": "a8a055b344533cbc2cb5dba32402d4f3c89a0b79", "content_id": "40c474965beb3504b23de665dc312208b86659e0", "detected_licenses": [ "Unlicense" ], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 844, "license_type": "permissive", "max_line_length": 44, "num_lines": 40, "path": "/Data Strucrures/Graph.py", "repo_name": "shubhi13/Python_Scripts", "src_encoding": "UTF-8", "text": "# Python program for\n# validation of a graph\n\n# import dictionary for graph\nfrom collections import defaultdict\n\n# function for adding edge to graph\ngraph = defaultdict(list)\ndef addEdge(graph,u,v):\n\tgraph[u].append(v)\n\n# definition of function\ndef generate_edges(graph):\n\tedges = []\n\n\t# for each node in graph\n\tfor node in graph:\n\t\t\n\t\t# for each neighbour node of a single node\n\t\tfor neighbour in graph[node]:\n\t\t\t\n\t\t\t# if edge exists then append\n\t\t\tedges.append((node, neighbour))\n\treturn edges\n\n# declaration of graph as dictionary\naddEdge(graph,'a','c')\naddEdge(graph,'b','c')\naddEdge(graph,'b','e')\naddEdge(graph,'c','d')\naddEdge(graph,'c','e')\naddEdge(graph,'c','a')\naddEdge(graph,'c','b')\naddEdge(graph,'e','b')\naddEdge(graph,'d','c')\naddEdge(graph,'e','c')\n\n# Driver Function call\n# to print generated graph\nprint(generate_edges(graph))\n" }, { "alpha_fraction": 0.40925267338752747, "alphanum_fraction": 0.41992881894111633, "avg_line_length": 22.41666603088379, "blob_id": "3c77d62ebe58a170f431ff3bbb15662d36d7a384", "content_id": "3c1f34e5fe59dbbd66a03e3fc7057c06d8aa5b68", "detected_licenses": [ "Unlicense" ], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 281, "license_type": "permissive", "max_line_length": 33, "num_lines": 12, "path": "/Algorithms/Hashing.py", "repo_name": "shubhi13/Python_Scripts", "src_encoding": "UTF-8", "text": "class Solution:\n # @param A : list of integers\n # @return an integer\n def solve(self, A):\n s = set(A)\n if len(s) == len(A):\n return -1\n for i in A:\n if A.count(i) > 1:\n return i\n else:\n return -1\n" }, { "alpha_fraction": 0.5306633114814758, "alphanum_fraction": 0.5419273972511292, "avg_line_length": 21.19444465637207, "blob_id": "014bc31514c588d6abddb75d3b046d2864a87eaa", "content_id": "7bdec70f8fa683c5331fd35fca648b1bf3d5247e", "detected_licenses": [ "Unlicense" ], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 799, "license_type": "permissive", "max_line_length": 66, "num_lines": 36, "path": "/Algorithms/QuickSort.py", "repo_name": "shubhi13/Python_Scripts", "src_encoding": "UTF-8", "text": "def quick_sort(arr):\n \"\"\"Returns the array arr sorted using the quick sort algorithm\n\n >>> import random\n >>> unordered = [i for i in range(5)]\n >>> random.shuffle(unordered)\n >>> quick_sort(unordered)\n [0, 1, 2, 3, 4]\n \"\"\"\n less = []\n equal = []\n greater = []\n\n if len(arr) < 1:\n return arr\n\n pivot = arr[len(arr) // 2] # pivot at mid point\n for num in arr:\n if num < pivot:\n less.append(num)\n elif num == pivot:\n equal.append(num)\n elif num > pivot:\n greater.append(num)\n\n return quick_sort(less) + equal + quick_sort(greater)\n\n\nif __name__ == \"__main__\":\n import random\n\n unordered = [i for i in range(5)]\n random.shuffle(unordered)\n sort = quick_sort(unordered)\n\n print(sort)\n" }, { "alpha_fraction": 0.6872928142547607, "alphanum_fraction": 0.6872928142547607, "avg_line_length": 35.2400016784668, "blob_id": "0f68b3d66b0ae7bb3d0b4db2cca034d53c85ed4e", "content_id": "2534b116bea6799f2cdefb7a8155dda195f8265f", "detected_licenses": [ "Unlicense" ], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 905, "license_type": "permissive", "max_line_length": 79, "num_lines": 25, "path": "/Machine Learning/House price prediction/server/app.py", "repo_name": "shubhi13/Python_Scripts", "src_encoding": "UTF-8", "text": "from flask import Flask , request , jsonify,render_template\nimport util\napp=Flask(__name__)\n\[email protected]('/')\ndef get_location_names():\n response = util.get_location_names()\n print(response)\n #response.headers.add('Access-control-Allow-origin','*')\n return render_template('app.html',response=response)\n\[email protected]('/predict_house_price',methods=['POST'])\ndef predict_house_price():\n total_sqft=float(request.form['total_sqft'])\n location = float(request.form['location'])\n bhk = int(request.form['bhk'])\n bath = float(request.form['bhk'])\n response = util.get_location_names()\n #response =jsonify({\n estimated_price = util.get_estimateud_price(location,total_sqft,bhk,bath)\n #})\n return render_template('app.html', response=response,price=estimated_price)\nif __name__==\"__main__\":\n print(\"Starting Python flask server from Home proce prediction...\")\n app.run()" }, { "alpha_fraction": 0.5, "alphanum_fraction": 0.7058823704719543, "avg_line_length": 16.5, "blob_id": "aa78c2c8310582a771963b25a963d1ad4f9d6972", "content_id": "0a8b9572154140e0c3384508428533bf142979fd", "detected_licenses": [ "Unlicense" ], "is_generated": false, "is_vendor": false, "language": "Text", "length_bytes": 34, "license_type": "permissive", "max_line_length": 17, "num_lines": 2, "path": "/Automation/Yts Discord Bot/requirements.txt", "repo_name": "shubhi13/Python_Scripts", "src_encoding": "UTF-8", "text": "discord.py==1.7.1\nrequests==2.25.0" }, { "alpha_fraction": 0.6397180557250977, "alphanum_fraction": 0.6480099558830261, "avg_line_length": 35.530303955078125, "blob_id": "4e82616b46858fd11eae9bb793af5cef809b6eb0", "content_id": "f6ba89e551bef051b8b2b2df999676d6a1bd77dc", "detected_licenses": [ "Unlicense" ], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 2412, "license_type": "permissive", "max_line_length": 111, "num_lines": 66, "path": "/Automation/JpgToAnyFormatConvertor/JpgToAllFormatConvertor.py", "repo_name": "shubhi13/Python_Scripts", "src_encoding": "UTF-8", "text": "'''\nScript uses Pillow module to read the image and convert it to png.\nsys module for accepting inputs from terminal and os module \nfor operations on pathnames.\n\nInstall Pillow module through \"pip install pillow\"\n'''\n\nimport sys,os\nfrom PIL import Image\n\nsource_folder = sys.argv[1] # Accepts source folder given in terminal\ndestination_folder = sys.argv[2] # Accepts destination folder given in terminal\n\nif not os.path.exists(destination_folder): #Check if destination folder exists,if not creates one\n os.makedirs(destination_folder)\n\nchoice=1\nwhile choice!=5:\n print(\"Press 1 -> To convert to PNG\")\n print(\"Press 2 -> To convert to SVG\")\n print(\"Press 3 -> To convert to GIF\")\n print(\"Press 4 -> To Exit\")\n choice=int(input(\"Enter your Choice: \"))\n print()\n if choice==1:\n for filename in os.listdir(source_folder): # For each file present in Source folder\n file = os.path.splitext(filename)[0] # Splits file name into as tuple as ('filename','.extension')\n img = Image.open(f'{source_folder}/{filename}')\n img.save(f'{destination_folder}/{file}.png','png') #Converts to png format\n print(\"Image converted to PNG!\")\n print()\n \n elif choice==2:\n for filename in os.listdir(source_folder): # For each file present in Source folder\n file = os.path.splitext(filename)[0] # Splits file name into as tuple as ('filename','.extension')\n img = Image.open(f'{source_folder}/{filename}')\n img.save(f'{destination_folder}/{file}.svg','svg') #Converts to svg format\n print(\"Image converted to SVG!\")\n print()\n\n elif choice==3:\n for filename in os.listdir(source_folder): # For each file present in Source folder\n file = os.path.splitext(filename)[0] # Splits file name into as tuple as ('filename','.extension')\n img = Image.open(f'{source_folder}/{filename}')\n img.save(f'{destination_folder}/{file}.gif','gif') #Converts to gif format\n print(\"Image converted to GIF!\")\n print()\n\n else:\n sys.exit()\n \n\n'''\nSample input to run in terminal:\n->Python3 JpgToPngConvertor.py Source_Images Destination_Images\n\nOutput:\nPress 1 -> To convert to PNG\nPress 2 -> To convert to SVG\nPress 3 -> To convert to GIF\nPress 4 -> To Exit\nEnter your Choice: 1\n\nImages converted to PNG!\n'''\n\n" }, { "alpha_fraction": 0.4151785671710968, "alphanum_fraction": 0.4508928656578064, "avg_line_length": 13.933333396911621, "blob_id": "aa5afdcfb3fb47f9603cb59c7be9375cbbddc883", "content_id": "837125ea1f6082b83da5b8b58f1c85c22f2420be", "detected_licenses": [ "Unlicense" ], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 224, "license_type": "permissive", "max_line_length": 37, "num_lines": 15, "path": "/Basic Scripts/simpleHardMathProblem.py", "repo_name": "shubhi13/Python_Scripts", "src_encoding": "UTF-8", "text": "#!/usr/bin/python3\n\ndef problem(n):\n print(n, \" \")\n if( n==1 ):\n return 0\n elif( n%2==0 ):\n n = int(n/2)\n else:\n n = 3*n+1\n problem(n)\n\n\nn = int(input(\"Enter any number : \"))\nproblem(n)\n" }, { "alpha_fraction": 0.560669481754303, "alphanum_fraction": 0.6025104522705078, "avg_line_length": 18.875, "blob_id": "1b4b3df8e1c7f95891044d685689f36faa5f49ba", "content_id": "a75925d9b9daafa631c136fb88a83b2d997b2ca6", "detected_licenses": [ "Unlicense" ], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 478, "license_type": "permissive", "max_line_length": 38, "num_lines": 24, "path": "/Algorithms/stalinSort.py", "repo_name": "shubhi13/Python_Scripts", "src_encoding": "UTF-8", "text": "\n# Python3 implementation to sort\n# the array by using the variation\n# of the Stalin sort\n\n# Function to sort the array\ndef variationStalinsort(arr):\n\tj = 0\n\twhile True:\n\t\tmoved = 0\n\t\tfor i in range(len(arr) - 1 - j):\n\t\t\tif arr[i] > arr[i + 1]:\n\t\t\t\tarr.insert(moved, arr.pop(i + 1))\n\t\t\t\tmoved += 1\n\t\tj += 1\n\t\tif moved == 0:\n\t\t\tbreak\n\treturn arr\n\n# Driver Code\nif __name__ == \"__main__\":\n\tarr = [2, 1, 4, 3, 6, 5, 8, 7, 10, 9]\n\t\n\t# Function Call\n\tprint(variationStalinsort(arr))\n" }, { "alpha_fraction": 0.6214592456817627, "alphanum_fraction": 0.6248927116394043, "avg_line_length": 43.80769348144531, "blob_id": "379cfe9f927b8ebf976de288263f16f46c62c237", "content_id": "07c7f0939a3ae12a7160251818a7e2ebdb19015f", "detected_licenses": [ "Unlicense" ], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 1165, "license_type": "permissive", "max_line_length": 84, "num_lines": 26, "path": "/Algorithms/CaesarCipher.py", "repo_name": "shubhi13/Python_Scripts", "src_encoding": "UTF-8", "text": "def caesar_cipher(message_to_encrypt: str, key: int, encrypt_direction: str) -> str:\n alphabet = \"abcdefghijklmnopqrstuvwyzABCDEFGHIJKLMNOPQRSTUVWYZ\"\n result = \"\"\n\n for character in message_to_encrypt:\n # returns the position of the chara\"cter in alphabet array\n position = alphabet.find(character)\n if position == -1:\n # character not found\n result += character\n else:\n if encrypt_direction == \"backward\":\n # if backward direction return 1 position in alphabet array\n new_position = position - key\n elif encrypt_direction == \"forward\":\n # if forward direction advance 1 position in alphabet array\n new_position = position + key\n result += alphabet[new_position: new_position+1]\n return result\n\nif __name__ == \"__main__\":\n message_to_encrypt = input(\"insert the message you want to encrypt: \")\n key = int(input(\"insert the key you want to encrypt your text: \"))\n mode = input(\"insert the direction of the cipher: (backward) or (forward) \")\n\n print(caesar_cipher(message_to_encrypt, key, mode))\n" }, { "alpha_fraction": 0.58890700340271, "alphanum_fraction": 0.6247960925102234, "avg_line_length": 21.66666603088379, "blob_id": "c63575143af215aa099460e722f915a4329140a5", "content_id": "2be26093e6dae8a301d878cece9c62e21030a509", "detected_licenses": [ "Unlicense" ], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 613, "license_type": "permissive", "max_line_length": 73, "num_lines": 27, "path": "/Algorithms/PancakeSort.py", "repo_name": "shubhi13/Python_Scripts", "src_encoding": "UTF-8", "text": "def pancakeFlip(arr, k):\n\t\"\"\"Returns the array arr reverse sorted using the pancake sort algorithm\n\n\t>>> pancakeFlip([1, 2, 3, 4], 3)\n\t[4, 3, 2, 1]\n\t\"\"\"\n\treturn arr[:k + 1][::-1] + arr[k + 1:]\n\ndef pancake_sort(arr):\n\t\"\"\"Returns the array arr reverse sorted using the pancake sort algorithm\n\n\t>>> import random\n\t>>> unordered = [i for i in range(5)]\n\t>>> random.shuffle(unordered)\n\t>>> pancake_sort(unordered)\n\t[0, 1, 2, 3, 4]\n\t\"\"\"\n\n\tif len(arr) <= 1: return arr\n\n\tlargest = arr.index(max(arr))\n\n\tarr = pancakeFlip(arr, largest)\n\n\tarr = pancakeFlip(arr, len(arr) - 1)\n\n\treturn pancake_sort(arr[:-1]) + [arr[-1]] \n" }, { "alpha_fraction": 0.5911329984664917, "alphanum_fraction": 0.6305418610572815, "avg_line_length": 17.545454025268555, "blob_id": "1d3a8abba824eb047d874842156d883899cbcb10", "content_id": "5f9a2002220065a85f96a784fd615fef11f40c71", "detected_licenses": [ "Unlicense" ], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 203, "license_type": "permissive", "max_line_length": 38, "num_lines": 11, "path": "/Basic Scripts/palindrome.py", "repo_name": "shubhi13/Python_Scripts", "src_encoding": "UTF-8", "text": "num=int(input(\"Enter a number:\"))\ntemp=num\nrev=0\nwhile(num>0):\n dig=num%10\n rev=rev*10+dig\n num=num//10\nif(temp==rev):\n print(\"The number is palindrome!\")\nelse:\n print(\"Not a palindrome!\")" }, { "alpha_fraction": 0.6110392212867737, "alphanum_fraction": 0.6313065886497498, "avg_line_length": 30.557823181152344, "blob_id": "7c1d236c02b5e1c737b406b02dc7c9b835218977", "content_id": "c528d39fbca068799c8b643a0e62f2526c099ac5", "detected_licenses": [ "Unlicense" ], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 4638, "license_type": "permissive", "max_line_length": 140, "num_lines": 147, "path": "/Games/Ludo/gui/Ludo.py", "repo_name": "shubhi13/Python_Scripts", "src_encoding": "UTF-8", "text": "from tkinter import * #Tkinter is used as the GUI.\nimport random\nroot= Tk()\n\n#root.geometry('1000x1000')\n\n#base= PhotoImage(file= \"ludo board.gif\")\n\n#Label(root, image=base).pack(side=\"left\")\n\ncanvas = Canvas(width = 1000, height = 800, bg = 'yellow')\nroot.resizable(width=False, height=False)\n\ncanvas.pack(expand = YES, fill = BOTH)\n\ngif1 = PhotoImage(file = 'ludo board.gif')\ncanvas.create_image(50, 10, image = gif1, anchor = NW)\n\n\ng3 = canvas.create_oval(50,290,80,320, outline=\"green\", fill=\"green\", tags=\"oval\")\n #40, 380, 90, 430\ng4 = canvas.create_oval(50,390,80,420, outline=\"green\", fill=\"green\", tags=\"oval\")\n\ndrag_data = {\"x\": 0, \"y\": 0, \"item\": None}\ninit_data = {\"x\": 0, \"y\": 0, \"item\": None}\nfinal_coordinate = [0, 0]\n\n\ndef OnTokenButtonPress(event):\n # record the item and its location\n drag_data[\"item\"] = canvas.find_closest(event.x, event.y)[0]\n drag_data[\"x\"] = event.x\n drag_data[\"y\"] = event.y\n\n init_data[\"item\"] = drag_data[\"item\"] # defining new destination\n init_data[\"x\"] = drag_data[\"x\"]\n init_data[\"y\"] = drag_data[\"y\"]\n\n item_below = canvas.find_overlapping(event.x, event.y, event.x, event.y)[0]\n\n\n\n\n# when the button is released\n# kindof a Destructor\ndef OnTokenButtonRelease(event):\n # reset the drag information\n drag_data[\"item\"] = None\n drag_data[\"x\"] = 0\n drag_data[\"y\"] = 0\n\n\ndef OnTokenMotion(event):\n # compute how much this object has moved\n moved_x = event.x - drag_data[\"x\"]\n moved_y = event.y - drag_data[\"y\"]\n # new location of the dragged item\n\n\n # move the object the appropriate amount\n canvas.move(drag_data[\"item\"], moved_x, moved_y)\n # record the new position\n drag_data[\"x\"] = event.x\n drag_data[\"y\"] = event.y\n if drag_data[\"x\"]>=444 and drag_data[\"x\"]<=582 and drag_data[\"y\"]>=330 and drag_data[\"y\"]<462:\n print ('pug gayi')\n\n# put gif image on canvas\n# pic's upper left corner (NW) on the canvas is at x=50 y=10\n#canvas.create_image(50, 10, image = gif1, anchor = NW)\n\ncanvas.tag_bind(\"oval\", \"<ButtonPress-1>\", OnTokenButtonPress)\ncanvas.tag_bind(\"oval\", \"<B1-Motion>\", OnTokenMotion)\nclass RollTheDice:\n def __init__(self, parent):\n self.dieParent = parent\n self.dieContainer = Frame(parent).pack()\n\n self.dieLabel = Label(self.dieContainer, text=\"Number of Dice you will be rolling:\")\n self.dieLabel.pack(side=TOP)\n\n self.dieEntry = Entry(self.dieContainer)\n self.dieEntry.pack(side=TOP)\n\n self.sideLabel = Label(self.dieContainer, text=\"Number of Sides per Die:\")\n self.sideLabel.pack(side=TOP)\n\n self.sideEntry = Entry(self.dieContainer)\n self.sideEntry.pack(side=TOP)\n\n\n\n global rolldisp\n rolldisp = StringVar()\n self.rollResult = Label(self.dieContainer, textvariable=rolldisp)\n self.rollResult.pack(side=TOP)\n\n self.diceButton = Button(self.dieContainer)\n self.diceButton.configure(text=\"Roll the Dice!\", background=\"orangered1\")\n self.diceButton.pack(side=LEFT)\n self.diceButton.bind(\"<Button-1>\", self.diceButtonClick)\n self.diceButton.bind(\"<Return>\", self.diceButtonClick)\n\n self.quitButton = Button(self.dieContainer)\n self.quitButton.configure(text=\"Quit\", background=\"blue\")\n self.quitButton.pack(side=RIGHT)\n self.quitButton.bind(\"<Button-1>\", self.quitButtonClick)\n self.quitButton.bind(\"<Return>\", self.quitButtonClick)\n\n def diceButtonClick(self, event):\n die = int(self.dieEntry.get())\n side = int(self.sideEntry.get())\n DieRoll(die, side)\n\n def quitButtonClick(self, event):\n self.dieParent.destroy()\n\ndef DieRoll(dice, sides):\n import random\n rollnumber = 1\n runningtotal = 0\n endresult = \"\"\n while rollnumber <= dice:\n roll = random.randint(1, sides)\n endresult += \"Roll #\"\n endresult += str(rollnumber)\n endresult += \": \"\n endresult += str(roll)\n endresult += \"\\n\"\n runningtotal += roll\n rollnumber += 1\n finalresult = \"Your Roll:\\n\"\n finalresult += endresult\n rolldisp.set(finalresult)\n\ndef leftClick(event): #Main play function is called on every left click.\n x = root.winfo_pointerx() #- root.winfo_rootx() # This formula returns the x,y co-ordinates of the mouse pointer relative to the board.\n y = root.winfo_pointery() # root.winfo_rooty()\n\n print(\"Click at: \",x,y)\n\nroot.bind(\"<Button-1>\", leftClick)\n\nroot = Tk()\nroot.title(\"Die Roller\")\nmyapp = RollTheDice(root)\nroot.mainloop()" }, { "alpha_fraction": 0.7416974306106567, "alphanum_fraction": 0.7539975643157959, "avg_line_length": 35.95454406738281, "blob_id": "8a10284f80f1fd08f1457e4a1549d31d30a735b1", "content_id": "2471378c44731090d20b80b20f280ac302e694d4", "detected_licenses": [ "Unlicense" ], "is_generated": false, "is_vendor": false, "language": "Markdown", "length_bytes": 813, "license_type": "permissive", "max_line_length": 139, "num_lines": 22, "path": "/Automation/QrScanner/README.md", "repo_name": "shubhi13/Python_Scripts", "src_encoding": "UTF-8", "text": "# Image Processing\n\nImage Processing is most commonly termed as 'Digital Image Processing' and the domain in which it is frequently used is 'Computer Vision'. \nDon't be confused - we are going to talk about both of these terms and how they connect. \nBoth Image Processing algorithms and Computer Vision (CV) algorithms take an image as input; however, in image processing,\nthe output is also an image, whereas in computer vision the output can be some features/information about the image.\n\n## OpenCV\n\n![](https://logodix.com/logo/1989939.png)\n\n## Installation\n\n### Windows\n $ pip install opencv-python\n $ pip install pyzbar\n### MacOS\n $ brew install opencv3 --with-contrib --with-python3\n $ brew install zbar\n### Linux\n $ sudo apt-get install libopencv-dev python-opencv\n $ sudo apt-get install libzbar0\n" }, { "alpha_fraction": 0.5839655995368958, "alphanum_fraction": 0.626707136631012, "avg_line_length": 25.013158798217773, "blob_id": "4126bf8b68132170599da5a4d1dfc5779c11b81a", "content_id": "bbe33b815c9c1cc38500857b494a82d100bf67d0", "detected_licenses": [ "Unlicense" ], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 3954, "license_type": "permissive", "max_line_length": 101, "num_lines": 152, "path": "/GUI & Bot/Calculator.py", "repo_name": "shubhi13/Python_Scripts", "src_encoding": "UTF-8", "text": "import tkinter as tk\nfrom tkinter import *\nroot = tk.Tk()\nroot.geometry(\"300x230\")\nroot.title(\"Calculator\")\nroot.maxsize(250,230)\nroot.minsize(250,230)\n\n#Entry Widgets to show calculations\ninp = Entry(root,width=16,borderwidth=3,relief=RIDGE)\ninp.grid(pady=10,row=0,sticky=\"w\",padx=15)\n\n\n# <==================== Button Operation code starts here.. ==============>\ndef nine():\n inp.insert(\"end\",\"9\")\n\ndef eight():\n inp.insert(\"end\",\"8\")\n\ndef seven():\n inp.insert(\"end\",\"7\")\n\ndef six():\n inp.insert(\"end\",\"6\")\n\ndef five():\n inp.insert(\"end\",\"5\")\n\ndef four():\n inp.insert(\"end\",\"4\")\n\ndef three():\n inp.insert(\"end\",\"3\")\n\ndef two():\n inp.insert(\"end\",\"2\")\n\ndef one():\n inp.insert(\"end\",\"1\")\n\ndef zero():\n inp.insert(\"end\",\"0\")\n\ndef double_zero():\n inp.insert(\"end\",\"00\")\n\ndef dot():\n inp.insert(\"end\",\".\")\n\ndef plus():\n inp.insert(\"end\",\"+\")\n\ndef minus():\n inp.insert(\"end\",\"-\")\n\ndef mul():\n inp.insert(\"end\",\"*\")\n\ndef divide():\n inp.insert(\"end\",\"/\")\n\ndef modulus():\n inp.insert(\"end\",\"%\")\n\ndef result():\n\n\n if inp.get() == \"\":\n inp.insert(\"end\",\"error\")\n elif inp.get()[0] == \"0\":\n inp.delete(0,\"end\")\n inp.insert(\"end\",\"error\")\n else:\n res = inp.get()\n res = eval(res)\n inp.insert(\"end\",\" = \")\n inp.insert(\"end\",res)\n\ndef clear():\n inp.delete(0,\"end\")\n\n\n# <============ end code ================>\n\n\n\n# <============= Button Design Code starts here.. ==================>\n\nclear = Button(root,text=\"C\",width=2,command=clear,bg=\"red\",fg=\"white\",relief=RIDGE)\nclear.grid(row=0,sticky=\"w\",padx=125)\n\n\nnine = Button(text=\"9\",width=2,command=nine,borderwidth=3,relief=RIDGE)\nnine.grid(row=1,sticky=\"w\",padx=15)\n\neight = Button(text=\"8\",width=2,command=eight,borderwidth=3,relief=RIDGE)\neight.grid(row=1,sticky=\"w\",padx=45)\n\nseven = Button(root,text=\"7\",width=2,command=seven,borderwidth=3,relief=RIDGE)\nseven.grid(row=1,sticky=\"w\",padx=75)\n\nplus = Button(root,text=\"+\",width=2,command=plus,borderwidth=3,relief=RIDGE)\nplus.grid(row=1,sticky=\"e\",padx=125)\n\n\nsix = Button(text=\"6\",width=2,command=six,borderwidth=3,relief=RIDGE)\nsix.grid(row=2,sticky=\"w\",padx=15,pady=5)\n\nfive = Button(text=\"5\",width=2,command=five,borderwidth=3,relief=RIDGE)\nfive.grid(row=2,sticky=\"w\",padx=45,pady=5)\n\nfour = Button(root,text=\"4\",width=2,command=four,borderwidth=3,relief=RIDGE)\nfour.grid(row=2,sticky=\"w\",padx=75,pady=5)\n\nminus = Button(root,text=\"-\",width=2,command=minus,borderwidth=3,relief=RIDGE)\nminus.grid(row=2,sticky=\"e\",padx=125,pady=5)\n\n\n\nthree = Button(text=\"3\",width=2,command=three,borderwidth=3,relief=RIDGE)\nthree.grid(row=3,sticky=\"w\",padx=15,pady=5)\n\ntwo = Button(text=\"2\",width=2,command=two,borderwidth=3,relief=RIDGE)\ntwo.grid(row=3,sticky=\"w\",padx=45,pady=5)\n\none = Button(root,text=\"1\",width=2,command=one,borderwidth=3,relief=RIDGE)\none.grid(row=3,sticky=\"w\",padx=75,pady=5)\n\nmultiply = Button(root,text=\"*\",width=2,command=mul,borderwidth=3,relief=RIDGE)\nmultiply.grid(row=3,sticky=\"e\",padx=125,pady=5)\n\n\nzero = Button(text=\"0\",width=2,command=zero,borderwidth=3,relief=RIDGE)\nzero.grid(row=4,sticky=\"w\",padx=15,pady=5)\n\ndouble_zero = Button(text=\"00\",width=2,command=double_zero,borderwidth=3,relief=RIDGE)\ndouble_zero.grid(row=4,sticky=\"w\",padx=45,pady=5)\n\ndot = Button(root,text=\".\",width=2,command=dot,borderwidth=3,relief=RIDGE)\ndot.grid(row=4,sticky=\"w\",padx=75,pady=5)\n\ndivide = Button(root,text=\"/\",width=2,command=divide,borderwidth=3,relief=RIDGE)\ndivide.grid(row=4,sticky=\"e\",padx=125,pady=5)\n\nresult = Button(root,text=\"=\",width=10,command=result,bg=\"red\",fg=\"white\",borderwidth=3,relief=RIDGE)\nresult.grid(row=5,sticky=\"w\",padx=15,pady=5)\n\nmodulus = Button(root,text=\"%\",width=2,command=modulus,borderwidth=3,relief=RIDGE)\nmodulus.grid(row=5,sticky=\"e\",padx=125,pady=5)\n\nroot.mainloop()\n" }, { "alpha_fraction": 0.6316450238227844, "alphanum_fraction": 0.6442025899887085, "avg_line_length": 29.512821197509766, "blob_id": "e9ee37425c3e85d149799754a73d99ce72441b9f", "content_id": "5b8c24362d9de4c4a78e5e6b05711cde6c6357a9", "detected_licenses": [ "Unlicense" ], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 2389, "license_type": "permissive", "max_line_length": 150, "num_lines": 78, "path": "/GUI & Bot/Paint_box.py", "repo_name": "shubhi13/Python_Scripts", "src_encoding": "UTF-8", "text": "from tkinter import *\nfrom tkinter.colorchooser import askcolor\n\nclass Paint(object):\n DEFAULT_PEN_SIZE=5.0\n DEFAULT_COLOR='black'\n\n def __init__ (self):\n self.root=Tk()\n\n self.pen_button=Button(self.root,text='Pen',command=self.use_pen)\n self.pen_button.grid(row=0,column=0)\n\n self.brush_button=Button(self.root,text='Brush',command=self.use_brush)\n self.brush_button.grid(row=0,column=1)\n\n self.color_button=Button(self.root,text='Color',command=self.choose_color)\n self.color_button.grid(row=0,column=2)\n\n self.eraser_button=Button(self.root,text='Eraser',command=self.use_eraser)\n self.eraser_button.grid(row=0,column=3)\n\n self.choose_size_button=Scale(self.root,from_=1,to=10,orient=HORIZONTAL)\n self.choose_size_button.grid(row=0,column=4)\n\n self.c=Canvas(self.root,bg='white',width=1000, height=1000)\n self.c.grid(row=1,columnspan=5)\n\n self.setup()\n self.root.mainloop()\n\n def setup(self):\n self.old_x=None\n self.old_y=None\n self.line_width=self.choose_size_button.get()\n self.color=self.DEFAULT_COLOR\n self.eraser_on=False\n self.active_button=self.pen_button\n self.c.bind('<B1-Motion>',self.paint)\n self.c.bind('<ButtonRelease-1>',self.reset)\n\n\n def use_pen(self):\n self.activate_button(self.pen_button)\n\n def use_brush(self):\n self.activate_button(self.brush_button)\n\n def choose_color(self):\n self.eraser_on=False\n self.color=askcolor(color=self.color)[1]\n\n def use_eraser(self):\n self.activate_button(self.eraser_button)\n\n\n def activate_button(self,some_button,eraser_mode=False):\n self.activate_button.config(relief=RAISED)\n some_button.config(relief=SUNKEN)\n self.activate_button=some_button\n self.eraser_on=eraser_mode\n\n\n def paint(self,event):\n self.line_width=self.choose_size_button.get()\n paint_color='white' if self.eraser_on else self.color\n if self.old_x and self.old_y:\n self.c.create_line(self.old_x,self.old_y,event.x,event.y,width=self.line_width,fill=paint_color,capstyle=ROUND,smooth=TRUE,splinesteps=36)\n self.old_x=event.x\n self.old_y=event.y\n\n def reset(self,event):\n self.old_x,self.old_y=None,None\n\n\n\nif __name__=='__main__':\n Paint()\n \n" }, { "alpha_fraction": 0.6491002440452576, "alphanum_fraction": 0.6555269956588745, "avg_line_length": 34.40909194946289, "blob_id": "97742e7df921cafc8904c59c646cd900bd73daa9", "content_id": "46b4960b10c41e48490564eb2f03fb57208dbb7a", "detected_licenses": [ "Unlicense" ], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 778, "license_type": "permissive", "max_line_length": 98, "num_lines": 22, "path": "/Automation/Spam Bot/src/Modules/RandomWords.py", "repo_name": "shubhi13/Python_Scripts", "src_encoding": "UTF-8", "text": "from Modules.Spammer import *\nfrom Modules.Colours import *\nfrom time import sleep\nimport random\n\ndef randwords(data):\n cyan(\"\\n-----RANDOM DICTIONARY WORDS SPAM-----\")\n print(\"This spamming method spams random dictionary words\\n\")\n words = data.splitlines()\n try:\n count = int(input(\"Enter the number of words you want to spam \\n> \"))\n sleep = float(input(\"Enter time delay(in seconds) between each message \\n> \"))\n except:\n red(\"ERROR : Enter Only Numbers\")\n grey(\"Press enter to exit \")\n input()\n sys.exit(0)\n randwords = random.sample(words,count)\n print(\"Open Your Social Media Platform and select your text box. Wait for atleast 15 seconds\")\n time.sleep(15)\n for x in randwords:\n spammer(x,sleep)" }, { "alpha_fraction": 0.4265664219856262, "alphanum_fraction": 0.43809524178504944, "avg_line_length": 32.23728942871094, "blob_id": "f27c6b027747e62c87db5baf849487c18d1c38d5", "content_id": "1a22e7fe901302282f2f826acd964e0bdc57c5f6", "detected_licenses": [ "Unlicense" ], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 1995, "license_type": "permissive", "max_line_length": 77, "num_lines": 59, "path": "/Data Strucrures/Queue.py", "repo_name": "shubhi13/Python_Scripts", "src_encoding": "UTF-8", "text": "from os import sys\nclass Queue(object):\n def __init__(self):\n self.array=[]\n self.top=0\n self.rear=0\n\n def isEmpty(self):\n return self.array==[]\n\n def push(self,item):\n self.array.insert(0,item)\n self.rear+=1\n\n def pop(self):\n self.array.pop()\n self.rear-=1\n\n def menu(self):\n char=0\n while char<6:\n print(\"Press 1 -> To add a element to the Queue\")\n print(\"Press 2 -> To remove a element from the Queue\")\n print(\"Press 3 -> To view the top and rear element of the Queue\")\n print(\"Press 4 -> To view all the elements of the Queue\")\n print(\"Press 5 -> To Exit\")\n char=int(input(\"Enter your choice: \"))\n print('\\n')\n if char==1:\n val=int(input(\"Enter the element: \"))\n self.push(val)\n print(\"Your element has been added.\")\n print('\\n')\n elif char==2:\n if self.isEmpty():\n print(\"Queue is underflowed. Please add elements to it.\")\n break\n else:\n self.pop()\n print(\"Your element has been removed\")\n print('\\n')\n elif char==3:\n print(\"Top element -> {}\".format(self.array[self.top]))\n print(\"Rear element -> {}\".format(self.array[self.rear-1]))\n print('\\n')\n elif char==4:\n for i in range(0,len(self.array)):\n if i==len(self.array) - 1:\n print(\"{} <- Rear Element\".format(self.array[i]))\n elif i==0:\n print(\"{} <- Top Element\".format(self.array[0]))\n else:\n print(self.array[i])\n print('\\n')\n else:\n sys.exit()\n\nObject1=Queue()\nObject1.menu()\n \n \n" }, { "alpha_fraction": 0.8016877770423889, "alphanum_fraction": 0.8016877770423889, "avg_line_length": 51.66666793823242, "blob_id": "e3306b43af34e9a3bf99b32f8ef5a18aa1a63f99", "content_id": "67a35aef1a9da9e8c0067e4fe305675a5b7d305a", "detected_licenses": [ "Unlicense" ], "is_generated": false, "is_vendor": false, "language": "Markdown", "length_bytes": 474, "license_type": "permissive", "max_line_length": 100, "num_lines": 9, "path": "/Automation/JpgToAnyFormatConvertor/README.md", "repo_name": "shubhi13/Python_Scripts", "src_encoding": "UTF-8", "text": "# JPG to All Picture Format Convertor\n\n* This script converts the list of JPG images available in source folder to PNG, SVG, and GIF images\nwhich will get stored in destination folder with help of Pillow module.\n\n* Pillow is a free and open-source additional library for the Python programming language \nthat adds support for opening, manipulating, and saving many different image file formats.\n\n* Pillow module can be installed using following command \"pip install pillow\"\n" }, { "alpha_fraction": 0.5175257921218872, "alphanum_fraction": 0.5484536290168762, "avg_line_length": 33.71428680419922, "blob_id": "71a0eccf88043a87c9087001af336ca5e4d2ed80", "content_id": "467c8699f972aad97728bb63c41018a1076ec10c", "detected_licenses": [ "Unlicense" ], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 485, "license_type": "permissive", "max_line_length": 94, "num_lines": 14, "path": "/Basic Scripts/number_to_hex.py", "repo_name": "shubhi13/Python_Scripts", "src_encoding": "UTF-8", "text": "hex_numbers = [\"0\", \"1\", \"2\", \"3\", \"4\", \"5\", \"6\", \"7\", \"8\", \"9\", \"A\", \"B\", \"C\", \"D\", \"E\", \"F\"]\ndef number_to_hex(number_to_convert: int):\n result = number_to_convert\n hexadecimal = \"\"\n while result != 0:\n remainder = hex_numbers[result % 16]\n hexadecimal = str(remainder) + hexadecimal\n result = int(result / 16)\n print(hexadecimal)\n\nif __name__ == \"__main__\":\n num= int(input(\"Enter number in decimal:\\n\"))\n number_to_hex(num)\n #added user input" }, { "alpha_fraction": 0.6473214030265808, "alphanum_fraction": 0.6607142686843872, "avg_line_length": 23.88888931274414, "blob_id": "d8e6a9caf8a4b9caf28e170fcfb4b6765cfbb93a", "content_id": "2e44fc845c1066b16f034304e1b0638a55bdd751", "detected_licenses": [ "Unlicense" ], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 448, "license_type": "permissive", "max_line_length": 66, "num_lines": 18, "path": "/Data Strucrures/Recursion.py", "repo_name": "shubhi13/Python_Scripts", "src_encoding": "UTF-8", "text": "# Program to print the fibonacci series upto n_terms\n \n# Recursive function\ndef recursive_fibonacci(n):\n if n <= 1:\n return n\n else:\n return(recursive_fibonacci(n-1) + recursive_fibonacci(n-2))\n \nn_terms = 10\n \n# check if the number of terms is valid\nif n_terms <= 0:\n print(\"Invalid input ! Please input a positive value\")\nelse:\n print(\"Fibonacci series:\")\n for i in range(n_terms):\n print(recursive_fibonacci(i))\n" }, { "alpha_fraction": 0.6833333373069763, "alphanum_fraction": 0.6904761791229248, "avg_line_length": 34, "blob_id": "88b10ad45723b5cd5cc2a253fa0e859b30e79b4a", "content_id": "aa6e21338544571bc062321a5113690649614a90", "detected_licenses": [ "Unlicense" ], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 420, "license_type": "permissive", "max_line_length": 75, "num_lines": 12, "path": "/Algorithms/kadaneAlgorithm.py", "repo_name": "shubhi13/Python_Scripts", "src_encoding": "UTF-8", "text": "# Kadane's algorithm takes a non-empty array of integers and returns the \n# maximum sum that can be obtained by summing up the subarrays of the main \n# input array\n\ndef kadanesAlgorithm(array):\n\tmaxEnding = array[0]\n currMaximum = array[0]\n for i in range(1, len(array)):\n num = array[i]\n maxEnding = max(num, maxEnding + num)\n currMaximum = max(currMaximum, maxEnding)\n return currMaximum\n" }, { "alpha_fraction": 0.7894737124443054, "alphanum_fraction": 0.7982456088066101, "avg_line_length": 13.375, "blob_id": "cb67da91460d85e78c2f4b66c12f9bd08ec088de", "content_id": "8a1945670d520b7622324b65807863e5fe38b6a5", "detected_licenses": [ "Unlicense" ], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 114, "license_type": "permissive", "max_line_length": 32, "num_lines": 8, "path": "/Games/Sudoku/setup.py", "repo_name": "shubhi13/Python_Scripts", "src_encoding": "UTF-8", "text": "from distutils.core import setup\nimport py2exe\nimport solver\nimport pygame\nimport time\n\n\nsetup(console=['gui.py'])" }, { "alpha_fraction": 0.6004746556282043, "alphanum_fraction": 0.628164529800415, "avg_line_length": 24.795917510986328, "blob_id": "f43f312d2931e6cfa128a1e37c9d6f9d8f9a6360", "content_id": "dd9fabd88589d2a632560082ff7e1e0ddb6560af", "detected_licenses": [ "Unlicense" ], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 1264, "license_type": "permissive", "max_line_length": 80, "num_lines": 49, "path": "/Machine Learning/House price prediction/server/util.py", "repo_name": "shubhi13/Python_Scripts", "src_encoding": "UTF-8", "text": "import json\nimport pickle\nimport numpy as np\n\n__locations=None\n__data_columns=None\n__model=None\n\ndef get_location_names():\n load_saved_artifacts()\n return __locations\n\ndef load_saved_artifacts():\n print(\"Loading saved artifacts.....start\")\n global __data_columns\n global __locations\n\n with open(\"./artifacts/columns.json\",'r') as f:\n __data_columns=json.load(f)['data_columns']\n __locations=__data_columns[3:]\n global __model\n with open(\"./artifacts/banglore_home_price_predict_model.pickel\",'rb') as f:\n __model=pickle.load(f)\n print(\"Loading saved artifacts......done\")\n\n\ndef get_estimated_price(location,sqft,bhk,bath):\n try:\n loc_index=__data_columns.index(location.lower())\n except:\n loc_index=-1\n x=np.zeros(len(__data_columns))\n x[0]=sqft\n x[1]=bath\n x[2]=bhk\n if loc_index>=0:\n x[loc_index]=1\n return round(__model.predict([x])[0],2)\n\n\n\n''' if __name__==\"__main__\":\n load_saved_artifacts()\n print(get_location_names())\n print(get_estimated_price('1st Phase JP Nagar',1000,3,3))\n print(get_estimated_price('1st Phase JP Nagar', 1000, 2,2))\n print(get_estimated_price('Jhotwara', 1000, 3, 3))\n print(get_estimated_price('vashali', 1000, 3, 3)\n'''\n" }, { "alpha_fraction": 0.48426151275634766, "alphanum_fraction": 0.6174334287643433, "avg_line_length": 20.789474487304688, "blob_id": "7262451224026339dd1c6438b880bb2a4efdf1bc", "content_id": "749fbff3e4a32265b6309109adec643ba46c78bd", "detected_licenses": [ "Unlicense" ], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 413, "license_type": "permissive", "max_line_length": 44, "num_lines": 19, "path": "/Automation/Spam Bot/src/Modules/Colours.py", "repo_name": "shubhi13/Python_Scripts", "src_encoding": "UTF-8", "text": "black = \"\\033[1;0;40m\"\n\ndef red(message):\n print(\"\\033[1;31;40m\"+ message + black)\n\ndef grey(message):\n print(\"\\033[1;30;40m\" + message + black)\n\ndef green(message):\n print(\"\\033[1;32;40m\" + message + black)\n\ndef yellow(message):\n print(\"\\033[1;33;40m\" + message + black)\n\ndef blue(message):\n print(\"\\033[1;34;40m\" + message + black)\n\ndef cyan(message):\n print(\"\\033[1;36;40m\" + message + black)" }, { "alpha_fraction": 0.5476166605949402, "alphanum_fraction": 0.5647327899932861, "avg_line_length": 40.40495681762695, "blob_id": "49d9fef3dc0ec1109114b1c1f0438fe909653b07", "content_id": "ca64197f17aa761ed9e126a1f091cfddd62a6b8c", "detected_licenses": [ "Unlicense" ], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 10049, "license_type": "permissive", "max_line_length": 128, "num_lines": 242, "path": "/Machine Learning/House price prediction/model/price prediction.py", "repo_name": "shubhi13/Python_Scripts", "src_encoding": "UTF-8", "text": "import pandas as pd\nimport numpy as np\nimport matplotlib.pyplot as plt\nimport matplotlib\nmatplotlib.rcParams[\"figure.figsize\"]=(20,10)\n\n\ndf=pd.read_csv(r\"D:\\ML\\Data sets\\project 1 (house price)\\house data.csv\") #reading data\n\n #DATA PREPROCESSING\n\ndf0=df.drop(['area_type','availability','society','balcony'],axis='columns') # droping colunms\n#print(df0.isnull().sum()) #checking for null values before preprocessing data\nfinal_df=df0.dropna()\n#print(final_df.isnull().sum()) #checking for null values after preprocessing data\n\n#print(final_df['size'].unique()) #checking for the unique values in size column\n\nfinal_df['BHK']=final_df['size'].apply(lambda x:int(x.split(' ')[0])) #new clm having int values for bedrooms\ndf1=final_df.drop(['size'],axis='columns') #droping size column\n#print(df1['total_sqft'].unique()) #checking for non uniform data\n\ndef isfloat(x): #function for checking for non uniform data\n try:\n float(x)\n except:\n return False\n return True\n#print(df1[~df1['total_sqft'].apply(isfloat)].head(10)) #checking for non uniform data\n\n\ndef sqft_to_num(x): #function for changing diff values in float\n tokens=x.split(\"-\")\n if(len(tokens)==2):\n return(float(tokens[0])+float(tokens[1]))/2\n try:\n return float(x)\n except:\n return None\n\ndf2=df1.copy()\ndf2[\"total_sqft\"]=df2[\"total_sqft\"].apply(sqft_to_num) #changing diff values in float values\n#print(df2['total_sqft'].unique())\n\n #FEATURE ENGINEERING\n\ndf3=df2.copy()\ndf3['price_per_sqft']=df3['price']*100000/df3['total_sqft'] #creating a new feature\n\n#print(len(df3['location'].unique()))\n\ndf3['location']=df3['location'].apply(lambda x: x.strip()) #removing extra spaces from location values\n\n#removing problem of having so many dummy columns since the unique locations value is 1304 by categorising them in \"other\"\n\nlocation_stats=df3.groupby('location')[\"location\"].agg(\"count\").sort_values(ascending=False)\n#print(location_stats)\n#print(len(location_stats[location_stats<=10]))\n\nlocation_stats_less_than_10=location_stats[location_stats<=10]\n#print(location_stats_less_than_10)\n\n#print(len(df3['location'].unique()))\n\ndf3[\"location\"]=df3[\"location\"].apply(lambda x: \"other\" if x in location_stats_less_than_10 else x)\n#print(len(df3['location'].unique()))\n\n #FINDING AND REMOVING OUTLIERS\n\n#print(df3[df3.total_sqft/df3.BHK<300].head())\ndf4=df3[~(df3.total_sqft/df3.BHK<300)]\n#print(df4.shape)\n\n#print(df4.price_per_sqft.describe())\n\ndef remove_pps_outliers(df): #removing price_per_sqft column outliers\n df_out=pd.DataFrame()\n for key,subdf in df.groupby(\"location\"):\n m=np.mean(subdf.price_per_sqft)\n st=np.std(subdf.price_per_sqft)\n reduced_df=subdf[(subdf.price_per_sqft>(m-st)) & (subdf.price_per_sqft<=(m+st))]\n df_out=pd.concat([df_out,reduced_df],ignore_index=True)\n return df_out\n\ndf5=remove_pps_outliers(df4)\n#print(df5.shape)\n\ndef plot_scatter_chart(df,location): #plottinig chart btw square feet area and price\n bhk2=df[(df.location==location) & (df.BHK==2)]\n bhk3=df[(df.location==location) & (df.BHK==3)]\n matplotlib.rcParams[\"figure.figsize\"]=(15,10)\n plt.scatter(bhk2.total_sqft,bhk2.price,color=\"blue\",label=\"2 BHK\",s=50)\n plt.scatter(bhk3.total_sqft,bhk3.price,marker=\"+\",color=\"green\",label=\"3 BHK\",s=50)\n plt.xlabel(\"Total square feet area\")\n plt.ylabel(\"price\")\n plt.title(location)\n plt.legend()\n plt.show()\n\n#plot_scatter_chart(df5,\"Rajaji Nagar\")\n\ndef remove_bhk_outliers(df): #removing BHK columns outliers\n exclude_indices=np.array([])\n for location,location_df in df.groupby(\"location\"):\n bhk_stats={}\n for bhk,bhk_df in location_df.groupby(\"BHK\"):\n bhk_stats[bhk]={\n \"mean\":np.mean(bhk_df.price_per_sqft),\n \"std\":np.std(bhk_df.price_per_sqft),\n \"count\":bhk_df.shape[0]\n }\n for bhk,bhk_df in location_df.groupby(\"BHK\"):\n stats=bhk_stats.get(bhk-1)\n if stats and stats[\"count\"]>5:\n exclude_indices=np.append(exclude_indices,bhk_df[bhk_df.price_per_sqft<(stats[\"mean\"])].index.values)\n return df.drop(exclude_indices,axis=\"index\")\n\ndf6=remove_bhk_outliers(df5)\n#print(df6.shape)\n\n#plot_scatter_chart(df6,\"Rajaji Nagar\")\n\nimport matplotlib #plotting chart btw price_per_sqft and count\nmatplotlib.rcParams[\"figure.figsize\"]=(20,10)\nplt.hist(df6.price_per_sqft*0.8)\nplt.xlabel(\"price per square feet\")\nplt.ylabel(\"count\")\n#plt.show()\n\n#print(df6.bath.unique())\n#print(df6[df6.bath>10])\n\nplt.hist(df6.bath,rwidth=0.8) #plotting chart btw Number of bathrooms and count\nplt.xlabel(\"Number of bathrooms\")\nplt.ylabel(\"count\")\n\n#plt.show()\n\n#print(df6[df6.bath>df6.BHK+2])\n\ndf7=df6[df6.bath<df6.BHK+2]\n#print(df7.shape)\n\n #MODEL BUILDING\n\ndf8=df7.drop([\"price_per_sqft\"],axis=\"columns\")\n\ndummies=pd.get_dummies(df8.location) #creating dummy columns for categoial column locations\n#print(dummies.head(3))\n\ndf9=pd.concat([df8,dummies.drop([\"other\"],axis=\"columns\")],axis=\"columns\") #making complete numerical data frame\ndf10=df9.drop([\"location\"],axis=\"columns\")\n\nx=df10.drop([\"price\"],axis=\"columns\") # creating dataframe of independent variables\ny=df10.price # creating dataframe of dependent variable\n#print(x.head(3))\n#print(y.head(3))\n\nfrom sklearn.model_selection import train_test_split #splitting data into test data and training data\nx_train,x_test,y_train,y_test=train_test_split(x,y,test_size=0.2,random_state=10)\n\nfrom sklearn.model_selection import GridSearchCV #finding best model,score,parameters for data using GridSearchCV\nfrom sklearn.linear_model import Lasso # Lasso model for evaluation\nfrom sklearn.tree import DecisionTreeRegressor #Dession Tree model for Evaluation\nfrom sklearn.linear_model import LinearRegression #LinearRegression model for evaluation\nfrom sklearn.model_selection import ShuffleSplit #for shuffling the data\nfrom sklearn.model_selection import cross_val_score #K_fold model for finding diff score of model at different data\n\n\n\ndef find_best_model_using_gridsearchcv(x,y): #Applying GridSearchCV method on different models\n algos={\n \"linear_regression\": {\n \"model\":LinearRegression(),\n \"params\": {\n \"normalize\":[True,False]\n }\n },\n\n \"Lasso\": {\n \"model\":Lasso(),\n \"params\": {\n \"alpha\":[1,2],\n \"selection\":[\"random\",\"cyclic\"]\n }\n },\n\n \"decision tree\" : {\n \"model\":DecisionTreeRegressor(),\n \"params\": {\n \"criterion\":[\"mse\",\"friedman_mse\"],\n \"splitter\":[\"best\",\"random\"]\n }\n }\n\n }\n\n scores=[]\n cv=ShuffleSplit(n_splits=15,test_size=0.2,random_state=0)\n for algo_name,config in algos.items():\n gs=GridSearchCV(config[\"model\"],config[\"params\"],cv=cv,return_train_score=False)\n gs.fit(x,y)\n scores.append({\n \"model\" : algo_name,\n \"best_score\" : gs.best_score_,\n \"best_params\" : gs.best_params_\n })\n return pd.DataFrame(scores,columns=[\"model\",\"best_score\",\"best_params\"])\n\n#print(find_best_model_using_gridsearchcv(x,y)) #getting the best model,score and parameters\n#Linear Regression model has most score among all tested models so we create LinearRegression model\nmodel=LinearRegression() #creating a linear regression model\nmodel.fit(x_train,y_train)\n#print(model.score(x_test,y_test))\n\ncv=ShuffleSplit(n_splits=15,test_size=0.2,random_state=0) #By k_fold validation checking score of model at diff data\n#print(cross_val_score(LinearRegression(),x,y,cv=cv))\n\ndef predict_price(location,sqft,bath,BHK): #function for price prediction\n loc_index=np.where(x.columns==location)[0][0]\n X=np.zeros(len(x.columns))\n X[0]=sqft\n X[1]=bath\n X[2]=BHK\n if loc_index>=0:\n X[loc_index]=1\n return model.predict([X])[0]\n\nprint(round(predict_price(\"Indira Nagar\",1000,3,3)),2) # predicting price\n\n # TRANSFERRING IN PICKEL FILE AND JSON FILE\n\nimport pickle #transferring model in pickel file\nwith open(\"banglore_home_price_predict_model.pickel\",\"wb\") as f:\n pickle.dump(model,f)\n\nimport json #transferring columns info in json file\ncolumns={\n \"data_columns\":[col.lower() for col in x.columns]\n}\nwith open(\"columns.json\",\"w\") as f:\n f.write(json.dumps(columns))\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n" }, { "alpha_fraction": 0.7317731976509094, "alphanum_fraction": 0.7353735566139221, "avg_line_length": 34.83871078491211, "blob_id": "505e59e8d027672010af43a36adcd41cb00c92c1", "content_id": "d6ebf9fa2c14576a487f2163ea91568cfb8127be", "detected_licenses": [ "Unlicense" ], "is_generated": false, "is_vendor": false, "language": "Markdown", "length_bytes": 1111, "license_type": "permissive", "max_line_length": 111, "num_lines": 31, "path": "/Automation/Spam Bot/README.md", "repo_name": "shubhi13/Python_Scripts", "src_encoding": "UTF-8", "text": "# Spam-Bot\n<p align=\"center\">\n <img src=\"assets/images/Logo.PNG\" alt=\"Logo\" border=\"0\">\n <br>A simple and easy to use Spam Bot\n</p>\n\n---\n\n## This application allows you to spam in variations, like : \n- Spamming a fixed static message.\n- Spamming random words from a dictionary.\n- Sending sequences of numbers.\n- Randomly spamming large numbers.\n- Breaking a sentence into its component words and spam them as seperate messages.\n- Good old Rage spam, but this time without breaking your keyboard.\n\n## Usage\n\n<p align=\"center\">\n <img src=\"assets/images/SS.PNG\" alt=\"Logo\" border=\"0\">\n</p>\n\nRun `Spam_botz.py` file present in the `src` folder.\n\n**NOTE :** It is best to open your messaging window before launching `Spam_botz.py`.\n\n**NOTE :** Make sure your cursor is pointing to the correct text box after you set your preferences.\nYou have only **15 Seconds** before the automation process begins.\n\nSometimes you might get banned from the server for spamming too many messages in a short period. To avoid this,\nThis application has a custom time delay feature. Make sure you enter the appropriate value.\n" }, { "alpha_fraction": 0.735023021697998, "alphanum_fraction": 0.774193525314331, "avg_line_length": 23.11111068725586, "blob_id": "0258814eb76566c283627055c0778a05735d5637", "content_id": "df6197bef0d288858d4fa47458ad349881fb5809", "detected_licenses": [ "Unlicense" ], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 434, "license_type": "permissive", "max_line_length": 74, "num_lines": 18, "path": "/GUI & Bot/ScreenshotTaker.py", "repo_name": "shubhi13/Python_Scripts", "src_encoding": "UTF-8", "text": "import pyautogui\nimport tkinter as tk\nfrom tkinter.filedialog import *\n\nroot=tk.Tk()\n\ncanvas1=tk.Canvas(root,width=300,height=300)\ncanvas1.pack()\n\ndef takeScreenshot():\n myScreenshot=pyautogui.screenshot()\n save_path=asksaveasfilename()\n myScreenshot.save(save_path+\"_screenshot.png\")\n\nmyButton=tk.Button(text=\"Take Screenshot\", command=takeScreenshot,font=10)\ncanvas1.create_window(150,150,window=myButton)\n\nroot.mainloop()\n" }, { "alpha_fraction": 0.5140056014060974, "alphanum_fraction": 0.5492997169494629, "avg_line_length": 20.768293380737305, "blob_id": "b36b18c72e087db1e1af7d8f1cce2578b95126ed", "content_id": "5bfa1e2ffd74f31923810edbf6659ffa50fb6b48", "detected_licenses": [ "Unlicense" ], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 3570, "license_type": "permissive", "max_line_length": 85, "num_lines": 164, "path": "/Games/SoloPong/solopong.py", "repo_name": "shubhi13/Python_Scripts", "src_encoding": "UTF-8", "text": "\"\"\"\nSingle Player Pong Game\nBy @someshnarwade\n\"\"\"\n\n# TODO Add Blocks\n# TODO Add Block Collision\n# TODO Randomize block colors\n# TODO Add ScoreBoard\n# TODO Add player lives\n# TODO Add Levels\n\nimport os\nimport turtle\nfrom random import choice\n\nwn = turtle.Screen()\nwn.title(\"Solo Pong by Somesh Narwade\")\nwn.bgcolor(\"black\")\nwn.setup(width=800, height=600)\nwn.tracer(0)\n\n# paddle\npaddle = turtle.Turtle()\npaddle.speed(0)\npaddle.shape(\"square\") # 20 x 20\npaddle.color(\"white\")\npaddle.shapesize(stretch_wid=0.5, stretch_len=5) # 10 x 100\npaddle.penup()\npaddle.goto(0, -280)\n\n# Ball\nball = turtle.Turtle()\nball.speed(0)\nball.shape(\"circle\")\nball.color(\"white\")\nball.shapesize(stretch_wid=0.5, stretch_len=0.5)\nball.penup()\nball.goto(0, 0)\nball.dx = 1\nball.dy = 1\n\ncolors = [\"red\", \"blue\", \"green\", \"yellow\", \"purple\", \"violet\"]\n# Bricks\nbricks = []\nxcor = -390\ny = 0\nfor y in range(0, 100, 20):\n for i in range(40):\n brick = turtle.Turtle()\n brick.speed(0)\n brick.shape(\"square\") # 20 x 20\n brick.color(choice(colors))\n brick.penup()\n brick.goto(xcor + i * 20, 100 + y)\n bricks.append(brick)\n\nscore = 0\nlife = 3\n# Pen\npen = turtle.Turtle()\npen.speed(0)\npen.color(\"white\")\npen.penup()\npen.hideturtle()\npen.goto(0, 260)\nlives = \"$ \" * life\npen.write(\n f\"Score: {score} Lives = {lives}\", align=\"center\", font=(\"Courier\", 24, \"normal\")\n)\n\n\n# Functions\ndef paddle_left():\n \"\"\"paddle movement\"\"\"\n x = paddle.xcor()\n x -= 20\n paddle.setx(x)\n\n\ndef paddle_right():\n \"\"\"paddle movement\"\"\"\n x = paddle.xcor()\n x += 20\n paddle.setx(x)\n\n\n# Keyboard Bindings\nwn.listen()\nwn.onkeypress(paddle_left, \"Left\")\nwn.onkeypress(paddle_right, \"Right\")\n\n\n# Main Game Loop\nloop = True\nwhile loop:\n wn.update()\n\n # Move the ball\n ball.setx(ball.xcor() + ball.dx)\n ball.sety(ball.ycor() + ball.dy)\n\n # Exit Conditions\n if not life:\n loop = False\n print(\"Game Over!\")\n\n if len(bricks) == 0:\n print(\"You Win!\")\n loop = False\n\n # Border Checking\n if ball.ycor() < -290: # bottom collision\n ball.goto(0, 0)\n ball.dy *= -1\n os.system(\"aplay --quiet sounds/fail.wav&\")\n life -= 1\n lives = \"$ \" * life\n pen.clear()\n pen.write(\n f\"Score: {score} Lives: {lives}\",\n align=\"center\",\n font=(\"Courier\", 24, \"normal\"),\n )\n\n if ball.ycor() > 290:\n ball.sety(290)\n ball.dy *= -1\n os.system(\"aplay --quiet sounds/bounce.wav&\")\n\n if ball.xcor() > 390:\n ball.setx(390)\n ball.dx *= -1\n os.system(\"aplay --quiet sounds/bounce.wav&\")\n\n if ball.xcor() < -390:\n ball.setx(-390)\n ball.dx *= -1\n os.system(\"aplay --quiet sounds/bounce.wav&\")\n\n # Collision\n if -280 < ball.ycor() < -270 and (paddle.xcor() - 50) < ball.xcor() < (\n paddle.xcor() + 50\n ):\n ball.sety(-270)\n ball.dy *= -1\n os.system(\"aplay --quiet sounds/bounce.wav&\")\n\n for brick in bricks:\n if (brick.ycor() - 10) <= ball.ycor() <= (brick.ycor() + 10) and (\n brick.xcor() - 10\n ) <= ball.xcor() <= (brick.xcor() + 10):\n os.system(\"aplay --quiet sounds/bounce.wav&\")\n brick.hideturtle()\n ball.dx *= -1\n ball.dy *= -1\n bricks.remove(brick)\n score += 1\n pen.clear()\n pen.write(\n f\"Score: {score} Lives: {lives}\",\n align=\"center\",\n font=(\"Courier\", 24, \"normal\"),\n )\n" }, { "alpha_fraction": 0.5626007318496704, "alphanum_fraction": 0.5910800695419312, "avg_line_length": 50.70833206176758, "blob_id": "a988ba5a9092d323e6325c48625ab23fcdeb8bd5", "content_id": "7a18294cf79994dec6b0b792e39d4a61c579134b", "detected_licenses": [ "Unlicense" ], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 3722, "license_type": "permissive", "max_line_length": 472, "num_lines": 72, "path": "/Automation/Yts Discord Bot/yts.py", "repo_name": "shubhi13/Python_Scripts", "src_encoding": "UTF-8", "text": "import requests, discord\nfrom discord.ext import commands\n\nclient = commands.Bot(command_prefix='$')\n\nasync def machine(search_mov):\n raw_query = requests.get(f'https://yts.mx/api/v2/list_movies.json?query_term={search_mov}').json()\n finalResult = \"\"\n setStatus = \"\"\n # print(f\"{raw_query['data']['movie_count']} Results Found!!!\")\n finalResult += f\"**{raw_query['data']['movie_count']} Result(s) Found!!!**\\n\\n\"\n\n if raw_query['data']['movie_count']!=0:\n setStatus += raw_query['data']['movies'][0]['title_english']\n for movie in raw_query['data']['movies']:\n # print(f\"Name: {movie['title_long']}\")\n finalResult += f\"**Name:** {movie['title_long']}\\n\"\n # print('Genres: ', end='')\n finalResult += '**Genres:** '\n for genre in movie['genres']:\n # print(f'{genre}\\t')\n finalResult += f'{genre}\\t'\n # print(f\"\\n\\nSummary: {movie['summary']}\")\n finalResult += f\"\\n**Summary:** {movie['summary']}\\n\"\n for torrent in movie['torrents']:\n # print(f\"{torrent['quality']} {torrent['type']} {torrent['size']}\")\n finalResult += f\"**Quality | Size:** {torrent['quality']} {torrent['type']} | {torrent['size']}\\n\"\n # print(f\"Magnet URL: magnet:?xt=urn:btih:{torrent['hash']}&dn={movie['title_long']} {torrent['quality']} {torrent['type']}&tr=udp://open.demonii.com:1337/announce&tr=udp://tracker.openbittorrent.com:80&tr=udp://tracker.coppersurfer.tk:6969&tr=udp://glotorrents.pw:6969/announce&tr=udp://tracker.opentrackr.org:1337/announce&tr=udp://torrent.gresille.org:80/announce&tr=udp://p4p.arenabg.com:1337&tr=udp://tracker.leechers-paradise.org:6969\")\n finalResult += f\"**Magnet URL:** magnet:?xt=urn:btih:{torrent['hash']}&dn={movie['title_long']} {torrent['quality']} {torrent['type']}&tr=udp://open.demonii.com:1337/announce&tr=udp://tracker.openbittorrent.com:80&tr=udp://tracker.coppersurfer.tk:6969&tr=udp://glotorrents.pw:6969/announce&tr=udp://tracker.opentrackr.org:1337/announce&tr=udp://torrent.gresille.org:80/announce&tr=udp://p4p.arenabg.com:1337&tr=udp://tracker.leechers-paradise.org:6969\\n\\n\"\n finalResult += '-'*100 + '\\n\\n'\n return (finalResult, setStatus)\n\[email protected]\nasync def on_ready():\n print(\"Bot Ready!\")\n\[email protected]()\nasync def search(ctx, mov1=\"\", mov2=\"\", mov3=\"\", mov4=\"\", mov5=\"\", mov6=\"\", mov7=\"\", mov8=\"\", mov9=\"\", mov10=\"\"):\n search_mov = mov1 + \" \" + mov2 + \" \" + mov3 + \" \" + mov4 + \" \" + mov5 + \" \" + mov6 + \" \" + mov7 + \" \" + mov8 + \" \" + mov9 + \" \" + mov10\n if mov1 == \"\":\n await ctx.send(f\"Please Enter **Movie** Name, like: `/search The Vault`\")\n else:\n (finalResult, setStatus) = await machine(search_mov)\n await watching(ctx, setStatus)\n if len(finalResult)>2000:\n chunk=0\n while True:\n try:\n await ctx.send(f\"{finalResult[chunk:2000+chunk]}\")\n chunk += 2000\n except discord.errors.HTTPException:\n pass\n except:\n await ctx.send(f'{finalResult[chunk:]}')\n break\n else:\n await ctx.send(f\"{finalResult}\")\n\[email protected]()\nasync def ping(ctx):\n latency = str(int(client.latency * 1000))\n await ctx.send(f'Pong :)\\t{latency}ms')\n\[email protected]()\nasync def watching(ctx, setStatus):\n await client.change_presence(activity=discord.Activity(type=discord.ActivityType.watching, name=setStatus.upper()))\n\[email protected]()\nasync def clear(ctx, amount=5):\n await ctx.channel.purge(limit=amount+1)\n\nclient.run(\"Your Token Here\")" }, { "alpha_fraction": 0.6409313678741455, "alphanum_fraction": 0.6495097875595093, "avg_line_length": 36.1363639831543, "blob_id": "1b66c371493e0a11fc146dbb1a0d18237489e289", "content_id": "594aa93144444e489a9625775897dc5c371b58e9", "detected_licenses": [ "Unlicense" ], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 816, "license_type": "permissive", "max_line_length": 105, "num_lines": 22, "path": "/Automation/Spam Bot/src/Modules/SentenceBreaker.py", "repo_name": "shubhi13/Python_Scripts", "src_encoding": "UTF-8", "text": "from Modules.Spammer import *\nfrom Modules.Colours import *\nfrom time import sleep\n\ndef sentencebreaker():\n cyan(\"\\n-----SENTENCE BREAKER SPAM-----\")\n print(\"Sentence breaker is a type of spam that breaks a given sentence into its components(words),\", \n \"\\nIt then sends them seperately one by one\\n\")\n message = input(\"Enter the String you want to spam \\n> \")\n try:\n sleep = float(input(\"Enter time delay(in seconds) between each message \\n> \"))\n except:\n red(\"ERROR : Enter Only Numbers\")\n grey(\"Press enter to exit \")\n input()\n sys.exit(0)\n print(\"Open Your Social Media Platform and select your text box. Wait for atleast 15 seconds\")\n words = message.split()\n time.sleep(15)\n for unit in words:\n time.sleep(0.1)\n spammer(unit,sleep)" }, { "alpha_fraction": 0.49150383472442627, "alphanum_fraction": 0.5012744069099426, "avg_line_length": 31.70833396911621, "blob_id": "008ec65d1a8b1abff62e70337025f7c4552f5022", "content_id": "92d142ec5db464b5752f57d44e181ba9511dc2c7", "detected_licenses": [ "Unlicense" ], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 2354, "license_type": "permissive", "max_line_length": 87, "num_lines": 72, "path": "/Automation/Spam Bot/src/Spam_botz.py", "repo_name": "shubhi13/Python_Scripts", "src_encoding": "UTF-8", "text": "from Modules.Numbers import *\nfrom Modules.RandNumbers import *\nfrom Modules.Static import *\nfrom Modules.RandomWords import *\nfrom Modules.RageSpam import * \nfrom Modules.SentenceBreaker import *\nfrom Modules.Colours import *\nfrom time import sleep\nfrom os import system\n\nif __name__ == \"__main__\":\n while(True):\n os.system('cls')\n logo = open(\"../assets/logo.txt\",\"r\")\n output = \"\".join(logo.readlines())\n grey(output)\n green(\"\\n\"+\"-\"*20)\n data = open(\"../assets/version.txt\" , \"r\").read()\n cyan(\"Spam-Botz | \" + data)\n time.sleep(1)\n print(\"\"\"How would you like to spam ? \n 1) Static Message Spam\n 2) Spam Random Words From A Dictionary\n 3) Spam A Series of Sequential Numbers\n 4) Spam Random Large Numbers\n 5) Sentence breaker(into words) Spam\n 6) Rage Spam\n 7) EXIT\"\"\")\n choice = 0\n while(choice<1 or choice>7):\n try:\n choice = int(input(\"> \"))\n if(choice<1 or choice>7):\n raise ValueError\n except ValueError:\n red(\"ERROR : INVALID NUMBER\")\n if(choice==1):\n static()\n if(choice==2):\n x = open(\"../assets/dictionary.txt\",\"r\")\n data = x.read()\n randwords(data)\n x.close()\n if(choice==3):\n numbers()\n if(choice==4):\n randomnum()\n if(choice==5):\n sentencebreaker()\n if(choice==6):\n rage()\n if(choice==7):\n green(\"-----x Thank You For Using Spam-Botz x-----\")\n grey(\"Press enter to exit \")\n input()\n break\n green(\"\\nSuccessfully Completed Spamming\")\n ans = input(\"Do you want to continue your adventure on spamming? (Yes/No)\\n> \")\n if(ans==\"Yes\" or ans==\"yes\" or ans==\"y\" or ans==\"Y\"):\n green(\"\\nJourney Continues...\\n\")\n time.sleep(1)\n os.system('cls')\n elif(ans==\"no\" or ans==\"No\" or ans==\"N\" or ans==\"n\"):\n green(\"-----x Thank You For Using Spam-Botz x-----\")\n grey(\"Press enter to exit \")\n input()\n break\n else:\n red(\"ERROR : INVALID INPUT\")\n grey(\"Press enter to exit \")\n input()\n break" }, { "alpha_fraction": 0.6568132638931274, "alphanum_fraction": 0.6633020639419556, "avg_line_length": 27.32653045654297, "blob_id": "b084cd357f6a614c700d962889cd1a077fbb720e", "content_id": "c22d8c1d81cc72749803ff55860d47299c9f3431", "detected_licenses": [ "Unlicense" ], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 1387, "license_type": "permissive", "max_line_length": 61, "num_lines": 49, "path": "/Automation/LinksExtractor/LinksExtractor.py", "repo_name": "shubhi13/Python_Scripts", "src_encoding": "UTF-8", "text": "# Importing required libraries\nfrom urllib.request import Request, urlopen\nfrom bs4 import BeautifulSoup\nfrom xlwt import Workbook \nfrom datetime import datetime\n\n#Tacking URL input from user\nurl=input(\"Enter site to get links\\n\")\nlinks=[]\nwhile(len(url)==0):\n url=input(\"Enter site to get links\\n\")\ntry:\n # Sending request to server using Urllib\n req = Request(url, headers={'User-Agent': 'Mozilla/5.0'})\n html_data = urlopen(req).read()\n\n #Beautyfying all data to html form \n soup=BeautifulSoup(html_data,'html.parser')\n\n #Retriving all anchor tags in html data\n tags=soup('a')\n\n #Adding all href attribute values to list\n for tag in tags:\n if tag.has_attr('href'):\n links.append(tag['href'])\nexcept:\n #Check if any errors\n print(\"Please check the URL properly\")\nif(len(links)==0):\n print(\"No links to fetch\")\nelse:\n # Tackning workbook\n wb=Workbook()\n\n #Creaing sheet in workbook\n sheet1 = wb.add_sheet('Links')\n\n #adding all data in list to excel sheet\n for i in range(0,len(links)):\n sheet1.write(i,0,links[i])\n \n #Getting date and time to create file\n data_time=datetime.now()\n current_time = str(data_time.strftime(\"%H-%M-%S\"))\n \n #Adding time to file name and saving file locally\n wb.save('links for '+current_time+'.xls')\n print(\"Done writing data to excel sheet\")" }, { "alpha_fraction": 0.7785714268684387, "alphanum_fraction": 0.7809523940086365, "avg_line_length": 41, "blob_id": "c963224daa2ef434a82286becd24be6ae972f412", "content_id": "c61712117ca119fdb6501815471d983fd6fafe32", "detected_licenses": [ "Unlicense" ], "is_generated": false, "is_vendor": false, "language": "Markdown", "length_bytes": 420, "license_type": "permissive", "max_line_length": 90, "num_lines": 10, "path": "/Automation/LinksExtractor/README.md", "repo_name": "shubhi13/Python_Scripts", "src_encoding": "UTF-8", "text": "# Links extractor script\nLinks extractor is a python script which helps in extracting all the links in a web page. \nIt goes through all the content in html data and extract data from all anchor tags.\n\n## Libraries used\n* urllib is a python library used to send request used to website to retrive html content\n\n* bs4 is used to extract data from html content\n\n* xlwt is a python library used to write data to excel sheet\n" }, { "alpha_fraction": 0.5267422795295715, "alphanum_fraction": 0.5445705056190491, "avg_line_length": 23.68000030517578, "blob_id": "396120dc1f2ccf702bfff1b488a771e47440ce02", "content_id": "27ee8a1d81c8ad2719d35381a183747fd5d30fa4", "detected_licenses": [ "Unlicense" ], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 617, "license_type": "permissive", "max_line_length": 67, "num_lines": 25, "path": "/Algorithms/BubbleSort.py", "repo_name": "shubhi13/Python_Scripts", "src_encoding": "UTF-8", "text": "def bubble_sort(arr): \n \"\"\"Returns the array arr sorted using the bubble sort algorithm\n\n >>> import random\n >>> unordered = [i for i in range(5)]\n >>> random.shuffle(unordered)\n >>> bubble_sort(unordered)\n [0, 1, 2, 3, 4]\n \"\"\"\n for i, j in enumerate(arr):\n if i + 1 >= len(arr): continue\n if j > arr[i + 1]:\n arr[i], arr[i + 1] = arr[i + 1], arr[i]\n return bubble_sort(arr)\n\n return arr\n\nif __name__ == \"__main__\":\n import random\n\n unordered = [i for i in range(5)]\n random.shuffle(unordered)\n sort = bubble_sort(unordered)\n\n print(sort)\n" }, { "alpha_fraction": 0.6261022686958313, "alphanum_fraction": 0.6261022686958313, "avg_line_length": 28.94444465637207, "blob_id": "77cb90bc82f5c968e2f164d6f64f82486153288e", "content_id": "b8bbbe1c439dded4465bfd004e86d2f9f6e26240", "detected_licenses": [ "Unlicense" ], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 567, "license_type": "permissive", "max_line_length": 67, "num_lines": 18, "path": "/Basic Scripts/Insta_pic_download.py", "repo_name": "shubhi13/Python_Scripts", "src_encoding": "UTF-8", "text": "import os\r\nimport instaloader\r\n\r\ndef pic_download(name):\r\n parser=instaloader.Instaloader()\r\n os.chdir(os.path.join(os.path.expanduser('~'),'Downloads'))\r\n\r\n if os.path.isdir(\"Insta Downloads\"):\r\n os.chdir(\"Insta Downloads\")\r\n return parser.download_profile(name, profile_pic_only=True)\r\n else:\r\n os.mkdir(\"Insta Downloads\")\r\n os.chdir(\"Insta Downloads\")\r\n return parser.download_profile(name, profile_pic_only=True)\r\n\r\nif __name__==\"__main__\":\r\n user=input(\"Enter the Username: \")\r\n pic_download(user)\r\n \r\n" } ]
69
Chung2/APIChung
https://github.com/Chung2/APIChung
ad552d8e70e728ecd57465b3dfda57cfe8d824f6
9ed70972f56a38bdf191fa69880a41f8e10c7b03
912c5f8e80bc869b655342774d5ddef03a568c5b
refs/heads/master
2021-06-09T06:35:52.226085
2016-11-16T20:32:41
2016-11-16T20:32:41
71,806,118
0
0
null
null
null
null
null
[ { "alpha_fraction": 0.6840148568153381, "alphanum_fraction": 0.7137546539306641, "avg_line_length": 18.285715103149414, "blob_id": "9f9839841dff57f6bac09bbf0c018a86caf8f07f", "content_id": "ef7984513050d7d2b77bed20ad5484a558d3ee30", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 269, "license_type": "no_license", "max_line_length": 56, "num_lines": 14, "path": "/Guitkiner2.py", "repo_name": "Chung2/APIChung", "src_encoding": "UTF-8", "text": "from tkinter import *\n\nroot = Tk()\n\nlabel = Label(master=root, text='Hello world', height=2)\nlabel.pack()\n\nbutton = Button(master=root, text='Druk hier')\nbutton.pack(pady=10)\n\nbutton2 = Button(master=root, text='Druk hier 2 keer')\nbutton2.pack(pady=40)\n\nroot.mainloop()" }, { "alpha_fraction": 0.557630181312561, "alphanum_fraction": 0.5852782726287842, "avg_line_length": 29.604394912719727, "blob_id": "dbe77085a93f7829c29b55601c64e8d26a2c97fe", "content_id": "56ffcc9b0f72ca46e05964276ab5858fa68b9913", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 5570, "license_type": "no_license", "max_line_length": 323, "num_lines": 182, "path": "/GUI.py", "repo_name": "Chung2/APIChung", "src_encoding": "UTF-8", "text": "from tkinter import *\nfrom tkinter.messagebox import showinfo\nfrom API_test_marvel import *\nfrom Databasemarvelscore import *\n\nroot = Tk()\npunten = 25\ncounter = 30\ndatumVandaag = datetime.date.today()\n\n\ndef Scores():\n def close2():\n subwindow2.withdraw()\n\n subwindow2 = Toplevel(master=root)\n\n lijstUitslag = Gegevensuithalen()\n\n for i in range(len(lijstUitslag)):\n score = Label(master=subwindow2,\n background='#C92D39',\n text=lijstUitslag[i],\n wraplength=500,\n height=3,\n padx=50)\n score.grid(row=i, column=2)\n\n\ndef toonVenster():\n gebruiker = entry_naam.get()\n root.withdraw()\n\n def close():\n subwindow.withdraw()\n\n subwindow = Toplevel(master=root)\n\n # Maakt het hoofdscherm\n\n naam = gebruiker # dit natuurlijk veranderen\n\n # result wordt opgehaald en in een lijst gezet\n lijst1 = result\n NaamHeld = lijst1[0].get('name')\n IdHeld = lijst1[0].get('id')\n BeschrijvingHeld = lijst1[0].get('description')\n\n if NaamHeld in BeschrijvingHeld:\n BeschrijvingHeld2 = BeschrijvingHeld.replace(NaamHeld, '-----')\n\n # aantal punten bij het begin\n # pop-up checked of iron man wordt opgegeven\n def clicked():\n global punten\n global counter\n invoer = entry_antwoord.get()\n\n if invoer == NaamHeld:\n bericht = 'Dit was het juiste antwoord!'\n counter += 1\n print(naam, datumVandaag, str(punten))\n showinfo(title='Goed', message=bericht)\n Gegevensinvoeren(naam, datumVandaag, punten)\n else:\n bericht = 'Je hebt het fout'\n punten -= 1\n showinfo(title='Fout', message=bericht)\n label3['text'] = punten\n\n # achtergrond wordt rood gemaakt\n subwindow.configure(background='#C92D39')\n\n label3 = Label(master=subwindow,\n background='#C92D39',\n text=punten,\n wraplength=500,\n height=3,\n padx=50)\n label3.grid(row=0, column=5)\n\n # Titel label ( wordt veranderd )\n logo = PhotoImage(file=\"background_helden.png\")\n Titel = Label(master=subwindow, image=logo, background=\"#C92D39\")\n\n Titel.grid(row=1, column=5)\n\n # functie timer (maakt hiervoor label_timer aan!!)\n def counter_label():\n Timer = Label(master=subwindow,\n background='#C92D39',\n text=counter)\n Timer.grid(row=1, column=6)\n\n def count():\n global counter\n global punten\n if counter != 0:\n counter -= 1\n Timer.config(text=str(counter))\n Timer.after(1000, count)\n label3['text'] = punten\n else:\n punten -= 10\n label3['text'] = punten\n\n count()\n\n counter_label()\n\n # De Question knop\n def QuestionKnop():\n bericht = \"De bedoeling van het spel is dat je een Marvel Hero raadt, Je krijgt hiervoor in het begin al een zetje in de goede richting Je start met 25 punten. Een fout antwoord kost je 1 punt en een hint kost je 3 punten! Probeer zo snel mogelijk de Marvel Hero te raden om met zoveel mogelijk punten te eindigen.\"\n showinfo(title='Informatie', message=bericht)\n\n # de Functie voor de hintknop\n def HintKnop():\n global punten\n Hint1[\"text\"] = BeschrijvingHeld2\n punten -= 3\n label3['text'] = punten\n\n # de Hint labellen\n Hint1 = Label(master=subwindow,\n background='#C92D39',\n text='Hint 1',\n wraplength=500,\n height=6,\n padx=50)\n Hint1.grid(row=2, column=5)\n Hint3 = Label(master=subwindow,\n background='#C92D39',\n text='Deze personage heeft ' + str(len(NaamHeld)) + ' letters in zijn naam',\n wraplength=500,\n height=3,\n padx=50)\n Hint3.grid(row=4, column=5)\n\n Naam = Label(master=subwindow,\n background='#C92D39',\n text=naam, # variable waar de naam inzit met input.\n wraplength=500,\n font=(10),\n height=6,\n padx=50)\n Naam.grid(row=3, column=4)\n\n # de invoer button\n InvoerButton = Button(master=subwindow, text=' Enter ', command=clicked)\n InvoerButton.grid(row=6, column=6)\n\n # de Hint button\n HintButton = Button(master=subwindow, text=' Hint ', command=HintKnop)\n HintButton.grid(row=6, column=4)\n\n # de Help button ( spelregels )\n QuestionButton = Button(master=subwindow, text='?', command=QuestionKnop)\n QuestionButton.place(x=1500, y=0)\n\n # de entry balk (input)\n entry_antwoord = Entry(master=subwindow)\n entry_antwoord.grid(row=5, column=5)\n\n # toon het hoofdscherm\n subwindow.mainloop()\n\n\nroot.configure(background='#C92D39')\nlogo = PhotoImage(file=\"Marvel_Beginscherm.gif\")\nlabel_logo = Label(root, image=logo).grid(row=0, column=3)\nlabel_naam = Label(master=root, text='Naam graag!', background='#C92D39')\nlabel_naam.grid(row=1, column=3)\nentry_naam = Entry(master=root)\nentry_naam.grid(row=2, column=3, padx=10, pady=10, )\n\nStartButton = Button(master=root, text=\"Start\", command=toonVenster)\nStartButton.grid(row=3, column=3)\n\nScoreButton = Button(master=root, text=\"Score\", command=Scores)\nScoreButton.grid(row=6, column=0)\n\nroot.mainloop()\n" }, { "alpha_fraction": 0.5446428656578064, "alphanum_fraction": 0.6404221057891846, "avg_line_length": 25.212766647338867, "blob_id": "4fe4a7d04d284f00489ce8a1235dffccc981e426", "content_id": "2f31e51e0ffbfc379c63c5b2313f9b60ec47aa6a", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "SQL", "length_bytes": 1233, "license_type": "no_license", "max_line_length": 67, "num_lines": 47, "path": "/marvelgamescore.sql", "repo_name": "Chung2/APIChung", "src_encoding": "UTF-8", "text": "-- phpMyAdmin SQL Dump\n-- version 4.1.12\n-- http://www.phpmyadmin.net\n--\n-- Machine: 127.0.0.1\n-- Gegenereerd op: 11 nov 2016 om 14:26\n-- Serverversie: 5.6.16\n-- PHP-versie: 5.5.11\n\nSET SQL_MODE = \"NO_AUTO_VALUE_ON_ZERO\";\nSET time_zone = \"+00:00\";\n\n\n/*!40101 SET @OLD_CHARACTER_SET_CLIENT=@@CHARACTER_SET_CLIENT */;\n/*!40101 SET @OLD_CHARACTER_SET_RESULTS=@@CHARACTER_SET_RESULTS */;\n/*!40101 SET @OLD_COLLATION_CONNECTION=@@COLLATION_CONNECTION */;\n/*!40101 SET NAMES utf8 */;\n\n--\n-- Databank: `marvelgamescore`\n--\n\n-- --------------------------------------------------------\n\n--\n-- Tabelstructuur voor tabel `score`\n--\n\nCREATE TABLE IF NOT EXISTS `score` (\n `gebruiker` char(50) DEFAULT NULL,\n `datum` char(50) DEFAULT NULL,\n `score` int(11) DEFAULT NULL\n) ENGINE=InnoDB DEFAULT CHARSET=latin1;\n\n--\n-- Gegevens worden geëxporteerd voor tabel `score`\n--\n\nINSERT INTO `score` (`gebruiker`, `datum`, `score`) VALUES\n('Chung', '2016-11-10', 22),\n('Chung2', '2016-11-11', 21),\n('Chung3', '2016-11-11', 21),\n('Chung4', '2016-11-11', 10);\n\n/*!40101 SET CHARACTER_SET_CLIENT=@OLD_CHARACTER_SET_CLIENT */;\n/*!40101 SET CHARACTER_SET_RESULTS=@OLD_CHARACTER_SET_RESULTS */;\n/*!40101 SET COLLATION_CONNECTION=@OLD_COLLATION_CONNECTION */;\n" }, { "alpha_fraction": 0.7484939694404602, "alphanum_fraction": 0.7635542154312134, "avg_line_length": 29.18181800842285, "blob_id": "4ea8dc4316ff44d9e83bff7d7dc00fa312c4e039", "content_id": "e6d0af69b14e7ef09b84ebdd25d1eefc1feecd66", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Markdown", "length_bytes": 664, "license_type": "no_license", "max_line_length": 124, "num_lines": 22, "path": "/README.md", "repo_name": "Chung2/APIChung", "src_encoding": "UTF-8", "text": "# APIChung\nV1Q Team 3\nSUPER-WONDER-CAPTAIN\n\nhttps://github.com/Chung2/APIChung\n\n1. Start de applicatie: Marvel-spel.py\n2. Voer je naam in op het startscherm\n3. Kies vervolgens 'Start'\n\nHet spel start!\n\nDe bedoeling van het spel is dat je een Marvel Hero raadt, Je krijgt hiervoor in het begin al een zetje in de goede richting\nJe start met 25 punten. Een fout antwoord kost je 1 punt en een hint kost je 3 punten!\nProbeer zo snel mogelijk de Marvel Hero te raden om met zoveel mogelijk punten te eindigen.\n\nWil je de spelregels tijdens het spel zien klik dan op de 'Help' knop.\n\n\nDeze applicatie wordt mede mogelijk gemaakt door:\n\nChung Wong, Lennart Heinen, Roy van Vliet en Koen Broers\n" }, { "alpha_fraction": 0.6565096974372864, "alphanum_fraction": 0.6901464462280273, "avg_line_length": 29.433734893798828, "blob_id": "52777faa0fa03474f3c0df823ff9496fb6a78ac1", "content_id": "f9ee0b147b3cde8b92e52120a8d0dfcca26758c5", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 2527, "license_type": "no_license", "max_line_length": 127, "num_lines": 83, "path": "/API_test_marvel.py", "repo_name": "Chung2/APIChung", "src_encoding": "UTF-8", "text": "import requests\nimport hashlib\nimport time\nimport random\n\n#lijst met de helden\ndef helden():\n #1008368\n lijstHelden = [1009368,1009351,1009220,1009610,1009718,1009697,1009664,1009262,1009619]\n return lijstHelden\n\n#nodig voor apikey\ndef setup_Api(rawLijstHelden):\n #api gegevens\n privKey ='0dbf711f6cbec1de65d0fb27d5c224bcf8957d47'\n publKey ='3b55fc225642ff0cc6e37e5cdf18f29f'\n\n lijstLinkjes =[]\n #tijd in seconden voor\n ts = time.time()\n\n #alles wordt apikey (time + privkey + publkey\n apikey = str(ts) + privKey + publKey\n\n #het wordt gehashed dus naar md5 encryptie veranderd\n hash_object = hashlib.md5(apikey.encode())\n hex_dig = hash_object.hexdigest()\n\n #hele url met time public key en apikey\n #api_url ='http://gateway.marvel.com/v1/public/characters?ts='+(str(ts)+'&apikey='+publKey+'&hash='+hex_dig)\n\n #door gebruik te maken van een character id kan je specifiek een character eruit halen in een lijst\n for i in rawLijstHelden:\n api_url = 'http://gateway.marvel.com/v1/public/characters/'+str(i)+'?ts='+(str(ts)+'&apikey='+publKey+'&hash='+hex_dig)\n lijstLinkjes.append(api_url)\n\n #url wordt gereturned\n return lijstLinkjes\n\n#deze maakt connectie met de api en api_url wordt ingelezen\ndef connect_Api(lijstLinkjes):\n resultHelden = []\n\n #met requests.get haal je data uit het api_url waar alles op staat\n for s in lijstLinkjes:\n response = requests.get(s)\n resultApi = response.json()\n resultHelden.append(resultApi['data']['results'][0])\n\n #alle gegevens worden opgeslagen in een json en die kan worden uitgelezen in python\n return resultHelden\n\n # hier wordt het resultaat gereturned\n\n#Alle namen weergeven uit API, naam, id en description\ndef nameCharacter(result):\n #Alleen de eerste 20 namen worden weergeven\n\n for i in range(len(result)):\n print(result[i]['id'])\n print(result[i]['name'])\n print(result[i]['description'])\n\n#functie specifiek character selecteren uit Marvel API en print id, naam en description\ndef exactNameCharacter(result):\n\n for i in range(len(result)):\n if result[i].get('id') == 1009697:\n print(result[i].get('id'))\n print(result[i].get('name'))\n print(result[i].get('description'))\n\n#functie in willekeurige volgorde zetten\ndef Selectrandomhero(result):\n\n random.shuffle(result)\n\nheroes = helden()\nlijst = setup_Api(heroes)\nresult = connect_Api(lijst)\n#nameResult = nameCharacter(result)\n#nameResult2 = exactNameCharacter(result)\nrandomHero = Selectrandomhero(result)\n\n" }, { "alpha_fraction": 0.6025329828262329, "alphanum_fraction": 0.635138750076294, "avg_line_length": 26.488889694213867, "blob_id": "db3b29535be82c916fefe6c96b89e63e4da24039", "content_id": "f41bc6492913bce1ae0620f43f584f3e46a33431", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 3711, "license_type": "no_license", "max_line_length": 319, "num_lines": 135, "path": "/tweedepaginaGUI.py", "repo_name": "Chung2/APIChung", "src_encoding": "UTF-8", "text": "from tkinter import *\nfrom tkinter.messagebox import showinfo\nfrom API_test_marvel import *\nfrom Databasemarvelscore import *\nfrom time import sleep\n\n# Maakt het hoofdscherm\nroot = Tk()\n\npunten = 25\ncounter = 30\nnaam = 'Chung' # dit natuurlijk veranderen\ndatumVandaag = datetime.date.today()\n\n# result wordt opgehaald en in een lijst gezet\nlijst1 = result\nNaamHeld = lijst1[0].get('name')\nIdHeld = lijst1[0].get('id')\nBeschrijvingHeld = lijst1[0].get('description')\n\nif NaamHeld in BeschrijvingHeld:\n BeschrijvingHeld2 = BeschrijvingHeld.replace(NaamHeld, '-----')\n\n\n# aantal punten bij het begin\n# pop-up checked of iron man wordt opgegeven\ndef clicked():\n global punten\n invoer = entry.get()\n\n if invoer == NaamHeld:\n bericht = 'Dit was het juiste antwoord!'\n print(naam,datumVandaag,str(punten))\n showinfo(title='Goed', message=bericht)\n Gegevensinvoeren(naam, datumVandaag, punten)\n else:\n bericht = 'Je hebt het fout'\n punten -= 1\n showinfo(title='Fout', message=bericht)\n label3['text'] = punten\n\n\n# achtergrond wordt rood gemaakt\nroot.configure(background='#C92D39')\n\nlabel3 = Label(master=root,\n background='#C92D39',\n text=punten,\n wraplength=500,\n height=3,\n padx=50)\nlabel3.grid(row=0, column=5)\n\n# Titel label ( wordt veranderd )\nlogo = PhotoImage(file=\"background_helden.png\")\nTitel = Label(master=root, image=logo, background=\"#C92D39\")\n\nTitel.grid(row=1, column=5)\n\n\n# functie timer (maakt hiervoor label_timer aan!!)\ndef counter_label(label_timer):\n def count():\n global counter\n counter -= 1\n label_timer.config(text=str(counter))\n label_timer.after(1000, count)\n\n count()\n\n\n# De Question knop\ndef QuestionKnop():\n bericht = \"De bedoeling van het spel is dat je een Marvel Hero raadt, Je krijgt hiervoor in het begin al een zetje in de goede richting Je start met 25 punten. Een fout antwoord kost je 1 punt en een hint kost je 3 punten! Probeer zo snel mogelijk de Marvel Hero te raden om met zoveel mogelijk punten te eindigen.\"\n showinfo(title='Informatie', message=bericht)\n\n\n# de Functie voor de hintknop\ndef HintKnop():\n global punten\n Hint1[\"text\"] = BeschrijvingHeld2\n punten -= 3\n label3['text'] = punten\n\n\n# de Hint labellen\nHint1 = Label(master=root,\n background='#C92D39',\n text='Hint 1',\n wraplength=500,\n height=6,\n padx=50)\nHint1.grid(row=2, column=5)\nHint2 = Label(master=root,\n background='#C92D39',\n text='Hint 2',\n wraplength=500,\n height=3,\n padx=50)\nHint2.grid(row=3, column=5)\nHint3 = Label(master=root,\n background='#C92D39',\n text='Hint 3',\n wraplength=500,\n height=3,\n padx=50)\nHint3.grid(row=4, column=5)\n\nNaam = Label(master=root,\n background='#C92D39',\n text=naam, # variable waar de naam inzit met input.\n wraplength=500,\n font=(10),\n height=6,\n padx=50)\nNaam.grid(row=3, column=4)\n\n# de invoer button\nInvoerButton = Button(master=root, text=' Enter ', command=clicked)\nInvoerButton.grid(row=6, column=6)\n\n# de Hint button\nHintButton = Button(master=root, text=' Hint ', command=HintKnop)\nHintButton.grid(row=6, column=4)\n\n# de Help button ( spelregels )\nQuestionButton = Button(master=root, text='?', command=QuestionKnop)\nQuestionButton.place(x=1500, y=0)\n\n# de entry balk (input)\nentry = Entry(master=root)\nentry.grid(row=5, column=5)\n\n# toon het hoofdscherm\nroot.mainloop()\n" }, { "alpha_fraction": 0.5959252715110779, "alphanum_fraction": 0.6196944117546082, "avg_line_length": 25.29464340209961, "blob_id": "0ad207b633de01a667954c310ace755484831dfd", "content_id": "5e7dfef502d62bace58e14bd028bf4d95c8ad29e", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 2945, "license_type": "no_license", "max_line_length": 89, "num_lines": 112, "path": "/TESTpaginas.py", "repo_name": "Chung2/APIChung", "src_encoding": "UTF-8", "text": "from tkinter import *\nfrom tkinter.messagebox import showinfo\nfrom API_test_marvel import *\n\n\ndef toonVenster():\n gebruiker = entry_1.get()\n\n def close():\n subwindow.withdraw()\n subwindow = Toplevel(master=root)\n\n lijst1 = result\n NaamHeld = lijst1[0].get('name')\n IdHeld = lijst1[0].get('id')\n BeschrijvingHeld = lijst1[0].get('description')\n\n\n #if NaamHeld == 'Iron Man':\n # BeschrijvingHeld2 = BeschrijvingHeld.replace('Tony Stark', '-----')\n # if NaamHeld in BeschrijvingHeld2:\n # BeschrijvingHeld2 = BeschrijvingHeld.replace(NaamHeld, '-----')\n\n if NaamHeld in BeschrijvingHeld:\n BeschrijvingHeld2 = BeschrijvingHeld.replace(NaamHeld, '-----')\n\n\n\n # pop-up checked of iron man wordt opgegeven\n def clicked():\n invoer = entry.get()\n if invoer == NaamHeld:\n bericht = 'Je hebt het goed'\n showinfo(title='Goed', message=bericht)\n else:\n bericht = 'Je hebt het fout'\n showinfo(title='Fout', message=bericht)\n\n subwindow.configure(background='#C92D39')\n\n\n\n #button 2\n def clicked2():\n print(\"Hello\")\n pass\n #button 3\n def clicked3():\n T = Text(subwindow,height=8, width=50)\n T.pack(side=RIGHT,fill=Y)\n T.insert(END, BeschrijvingHeld2)\n bericht = BeschrijvingHeld2\n showinfo(title='hint1', message=bericht)\n\n pass\n\n #label = Label(master=root,\n # text=BeschrijvingHeld2,\n # background='red',\n # font=('Helvetica', 10, 'bold italic'),\n # height=10,\n # width=100)\n #label.pack()\n\n\n\n\n label2 = Label(master=subwindow,\n text=entry_1.get(),\n background='red',\n font=('Helvetica', 10, 'bold italic'),\n height=3)\n label2.pack()\n\n #rechterframe = Frame(master=subwindow)\n #rechterframe.pack(side=RIGHT)\n\n #linkerframe = Frame(master=subwindow)\n #linkerframe.pack(side=LEFT)\n\n\n Randomize = Button(master=subwindow, text='nieuwe superheld', command=clicked2)\n Randomize.pack(pady=10)\n\n InvoerButton = Button(master=subwindow, text=' Invoer ', command=clicked)\n InvoerButton.pack(pady=10,ipadx=50)\n #de Hint button\n HintButton = Button(master=subwindow, text=' Hint ', command=clicked3)\n HintButton.pack(pady=10, ipadx=50)\n\n BackButton = Button(master=subwindow, text='Back', command=close)\n BackButton.pack(side=BOTTOM,padx=10, pady=10)\n\n entry = Entry(master=subwindow)\n entry.pack(padx=10, pady=10)\n\n subwindow.mainloop() # toon het hoofdscherm\n\n\n\n\nroot = Tk()\nroot.configure(background='#C92D39')\nlogo = PhotoImage(file=\"Marvel_Beginscherm.gif\")\nlabel_logo = Label(root, image=logo).pack()\nentry_1 = Entry(master=root)\nentry_1.pack(padx=10, pady=10)\n\nStartButton = Button(master=root, text=\"Start\", command=toonVenster)\nStartButton.pack()\n\nroot.mainloop()\n" }, { "alpha_fraction": 0.5282511115074158, "alphanum_fraction": 0.5309417247772217, "avg_line_length": 26.219512939453125, "blob_id": "714ffdbb1e96169e6907cb8676b9cf1bca2d4034", "content_id": "f4c1837e695ffb239c3dab02bf52f7e8c332e612", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 1115, "license_type": "no_license", "max_line_length": 66, "num_lines": 41, "path": "/Databasemarvelscore.py", "repo_name": "Chung2/APIChung", "src_encoding": "UTF-8", "text": "import datetime\nimport pymysql\n\nconnection = pymysql.connect(host='localhost',\n user='root',\n password='',\n db='marvelgamescore')\n\ncursor = connection.cursor()\n\nlijstUitslag =[]\ndef Gegevensuithalen() :\n sql = 'SELECT * FROM score ORDER BY score DESC'\n try:\n cursor.execute(sql)\n results = cursor.fetchall()\n for row in results:\n naam = row[0]\n datum = row[1]\n score = row[2]\n lijstUitslag.append('naam= %s,datum= %s,score= %d' % \\\n (naam, datum, score))\n return lijstUitslag\n except:\n #erior \n print('haha het werkt niet')\n connection.close()\n\ndef Gegevensinvoeren(naam,datumVandaag,punten):\n\n sql ='INSERT INTO score(gebruiker, ' \\\n 'datum, score)VALUES(\"%s\",\"%s\",\"%d\")' % \\\n (naam,datumVandaag,punten)\n\n try:\n cursor.execute(sql)\n connection.commit()\n except pymysql.Error as e:\n connection.rollback()\n print('Error Mysql')\n connection.close()" } ]
8
w348729/Python
https://github.com/w348729/Python
00c999e5d2ca1db38bb12ed755ccfda5aa923b67
d7d5ab44f167178a3f860a36e689d7c100ac9579
f2ff38043972f5de902799c1086f6e7cc773abbb
refs/heads/master
2022-12-24T22:51:10.535909
2018-12-18T09:42:28
2018-12-18T09:42:28
129,866,941
0
1
null
2018-04-17T07:48:59
2018-12-18T09:42:35
2018-12-18T09:42:33
Python
[ { "alpha_fraction": 0.6414384841918945, "alphanum_fraction": 0.6448991894721985, "avg_line_length": 53.024391174316406, "blob_id": "87facbd31bfe8b6c35a1adc2806386a4265847b3", "content_id": "ba69e8e735023e59bfea26cd155436dc3c96b025", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 6646, "license_type": "no_license", "max_line_length": 116, "num_lines": 123, "path": "/mine.py", "repo_name": "w348729/Python", "src_encoding": "UTF-8", "text": "\n\n# assume this is python environment\n\nclass PersonalStatement(object):\n def __init__(self, property, status, my_description, company, job_title, job_description, my_goal, leisure_time,\n it_journey):\n self.property = property\n self.status = status\n self.my_description = my_description\n self.company = company\n self.job_title = job_title\n self.job_description = job_description\n self.my_goal = my_goal\n self.leisure_time = leisure_time\n self.it_journey = it_journey\n\n def get_property(self):\n for each in property:\n print(each)\n\n def my_it_journey(self):\n for each_journey in self.it_journey:\n print(each_journey)\n return 'list my it journey successfully'\n >>>'start my IT journey at the end of 2015 as I was obsessed for those those excellent ' \\\n 'website or mobile app,then I started to learn some basic knowledge by myself.In 2016,' \\\n ' I heard System Analysis programme in ISS(NUS Institution Of System Science) from ' \\\n 'internet and I decided to apply for it and successfully get a offer.So after 1 year ' \\\n 'of studying, I can officially telling others that I am programmer.Now I am working in Fintec' \\\n 'company as a back-end developer'\n\n def describe_myself(self):\n finish_journey = my_it_journey(self.it_journey)\n if finish_journey:\n if self.description:\n print(self.description)\n return 'describe myself successfully'\n >>>'I believe I am a positive person who always can find out the affirmative side of things ' \\\n 'and constantly absorb new knowledge from people around me. My colleagues always say I am ' \\\n 'humorous and friendly person,this two features help me to make to new friends during daily ' \\\n 'life. As I did not graduate from IT relevant major, so I paid much more effort and spend ' \\\n 'quite a lot of time to learn more and I believe that disadvantage does not affect me right ' \\\n 'now. For the IT career, if you wanna be something in future, the most important one ' \\\n 'is you have to keep learning new stuff and get in contact with the latest technologies as ' \\\n 'the computer science updating so significant fast.'\n else:\n return 'print IT journey first'\n\n def describe_leisure_time(self):\n # start to describe what usually I do in my leisure time\n if self.leisure_time:\n print(self.leisure_time)\n return 'description finished'\n >>>'In my daily life, I go work out almost every day to stay healthy and stay fit cause you ' \\\n 'know as a programmer, you barely have to time to walk around in office, almost spend whole ' \\\n 'day in my chair. I do some reading almost every day, sometimes its about IT stuff, sometimes ' \\\n 'its could be a famous science fiction like Te Galactic Empire Trilogy from Isaac Asimov, known' \\\n 'as the creator of the The Laws of Robotics. Besides, I like drawing very much, I draw somthine ' \\\n 'like sketch every week, I put some of my work in the mail. Currently, I am learning Angular and ' \\\n 'some basic theory about machine learning. I believe the AI implementation in financial industry ' \\\n 'could cause huge impact, for example the AI system may recommend the best portfolio for user or ' \\\n 'calculate each trading risk base on massive historical data automatically. Even more, a online ' \\\n 'intelligent supporter which can reply different questions from all kinds of customers. As I know ' \\\n 'more about these knowledge I realize that I am so tiny in the deep endless sea of computer science,' \\\n ' I believe it is the time for me to learn deeper.'\n\n\n def current_job_description(self):\n if self.status:\n above_decription_finished = True\n if above_decription_finished:\n print(self.description)\n return 'describe my daily tasks successfully'\n else:\n return 'failed'\n >>>'I mainly work on the backend stuff now, including develop API interface for project, take a part ' \\\n 'in designin the database and maintain it, develop and optimize finance relevant algorithm. ' \\\n 'Some times, I also need to maintain the meta data which retrieved from Exchange Center, ' \\\n 'basically its like we grab or buy the data from them, ' \\\n 'then convert them the format which we are using. As I work for backend,so I need cooperate ' \\\n 'quite a lot with frontend to satisfy their requirements for the response. I always follow ' \\\n 'a rule for my development, keep my codes clean, readable, neat, so that it will be' \\\n 'much easier to maintain, even when you leave the company, others can take it over smoothly '\n\n def list_my_goal(self):\n for each in self.goal:\n print(each)\n >>>'1. being a professional programmer, not someone who can just develop some business logic, ' \\\n 'but also focus on developing advanced algorithm and constantly optimize system.' \\\n '2. I wanna go deeper and further , so I wanna learn more about AI or machine learning' \\\n '3. someday I will try to be a product manager who equipped by professional technical ' \\\n 'knowledge not someone who only do paper talk'\n\ndef personal_statement_complete(count):\n if count > 0:\n print('Thank very much for your time to read this')\n return 'Wish you have a good day'\n else:\n return 'Automatically print above contents.... '\n\n\n\nif __name__ == '__main__':\n property = {'name': 'wang ruixue', 'gender': 'male', 'birthday': '1990.05.04,', 'nationality': 'china'}\n status = 'working in sg'\n my_description = ['...']\n company = 'Mafint(fintech company in singapore)'\n job_title = 'backend developer'\n job_description = 'long text here...'\n my_goal = ['...']\n it_journey = 'long text here...'\n leiture_time = 'long text here...'\n daily_task = 'long text here...'\n\n step_count = 0\n me = PersonalStatement(property, status, company, my_description, job_title, job_description, my_goal,\n it_journey, leiture_time, daily_task)\n me.get_property()\n me.my_it_journey()\n me.describe_myself()\n me.describe_leisure_time()\n me.list_my_goal()\n step_count += 1\n personal_statement_complete(step_count)" }, { "alpha_fraction": 0.8035714030265808, "alphanum_fraction": 0.8035714030265808, "avg_line_length": 36, "blob_id": "3b808dd2c672502717a1dde271a501467b0d40bd", "content_id": "193eb1c9bcd6ee9ebbf4c5a56523a2e2617a3cae", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Markdown", "length_bytes": 112, "license_type": "no_license", "max_line_length": 100, "num_lines": 3, "path": "/README.md", "repo_name": "w348729/Python", "src_encoding": "UTF-8", "text": "# Python\n\nthese are python projects I developed during the period of learning this simple and nature language.\n\n" }, { "alpha_fraction": 0.5500612854957581, "alphanum_fraction": 0.5800980925559998, "avg_line_length": 27.794116973876953, "blob_id": "675503fa142128ed70e48c63842584dee8ee4126", "content_id": "077eb31aab52ecb7b5362aa66596ba430d5ce896", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 4894, "license_type": "no_license", "max_line_length": 125, "num_lines": 170, "path": "/Practice/liner_regeression/linner_regression.py", "repo_name": "w348729/Python", "src_encoding": "UTF-8", "text": "import numpy as np\nimport pandas as pd\nimport matplotlib.pyplot as plt\nfrom mpl_toolkits.mplot3d import Axes3D\n\nnp.random.seed(36)\nX = 2 * np.random.rand(100, 1)\nY = 4 + 3 * X + .2 * np.random.rand(100, 1)\nplt.figure(figsize=(10, 7))\nplt.scatter(X, Y)\nplt.xlabel('$x$',fontsize=18)\nplt.ylabel('$y$', rotation=0, fontsize=18)\nplt.axis([0, 2, 0, 15])\nplt.show()\n\nfrom sklearn.metrics import mean_squared_error\n\nX_b = np.c_[np.ones((100, 1)), X]\nlr = 0.1 #step_rate\niterations = 1000\nm = 100\ntheta = np.random.randn(2, 1)\nmse = []\nfor iter in range(iterations):\n gradients = 2. / m * X_b.T.dot(X_b.dot(theta) - Y)\n theta = theta - lr * gradients\n preds = X_b.dot(theta)\n mse.append(mean_squared_error(preds, Y))\n\nX_new = np.array([[0], [2]])\nX_new_b = np.c_[np.ones((2, 1)), X_new]\nplt.figure(figsize=(10, 7))\nplt.scatter(X, Y)\nplt.plot(X_new, X_new_b.dot(theta), 'r-', label='prediction')\nplt.xlabel(\"$x_1$\", fontsize=18)\nplt.ylabel(\"$y$\", rotation=0, fontsize=18)\nplt.legend(loc='upper right')\nplt.show()\n\n\n# data_count = 100\n#\n# x_data = np.linspace(-20, 20, data_count)\n# y_data = np.multiply(2, x_data) + 3 + np.random.normal(loc=0, scale=8.0, size=(data_count,))\n# plt.figure(figsize=(16, 6))\n# plt.scatter(x_data, y_data, s=10, color='g')\n# plt.xlabel('x')\n# plt.ylabel('y')\n# plt.show()\n#\n# w_sample = np.linspace(-10, 10, data_count).reshape((-1, 1))\n# b_sample = np.linspace(-10, 10, data_count).reshape((-1, 1))\n#\n# x_data = x_data.reshape((-1, 1))\n# y_data = y_data.reshape((-1, 1))\n#\n# loss = np.square(np.dot(w_sample, x_data.T) + b_sample - y_data) / data_count\n#\n# figure = plt.figure(figsize=(16, 6))\n# axes = Axes3D(figure)\n# axes.set_xlabel('w')\n# axes.set_ylabel('b')\n# axes.plot_surface(w_sample.T, b_sample, loss, cmap='rainbow')\n\n\n# def compute_error(b,m,data):\n#\n# totalError = 0\n# #Two ways to implement this\n# #first way\n# # for i in range(0,len(data)):\n# # x = data[i,0]\n# # y = data[i,1]\n# #\n# # totalError += (y-(m*x+b))**2\n#\n# #second way\n# x = data[:,0]\n# y = data[:,1]\n# totalError = (y-m*x-b)**2\n# totalError = np.sum(totalError,axis=0)\n#\n# return totalError/float(len(data))\n#\n# def optimizer(data,starting_b,starting_m,learning_rate,num_iter):\n# b = starting_b\n# m = starting_m\n#\n# #gradient descent\n# for i in range(num_iter):\n# #update b and m with the new more accurate b and m by performing\n# # thie gradient step\n# b,m =compute_gradient(b,m,data,learning_rate)\n# if i%100==0:\n# print 'iter {0}:error={1}'.format(i,compute_error(b,m,data))\n# return [b,m]\n#\n# def compute_gradient(b_current,m_current,data ,learning_rate):\n#\n# b_gradient = 0\n# m_gradient = 0\n#\n# N = float(len(data))\n# #Two ways to implement this\n# #first way\n# # for i in range(0,len(data)):\n# # x = data[i,0]\n# # y = data[i,1]\n# #\n# # #computing partial derivations of our error function\n# # #b_gradient = -(2/N)*sum((y-(m*x+b))^2)\n# # #m_gradient = -(2/N)*sum(x*(y-(m*x+b))^2)\n# # b_gradient += -(2/N)*(y-((m_current*x)+b_current))\n# # m_gradient += -(2/N) * x * (y-((m_current*x)+b_current))\n#\n# #Vectorization implementation\n# x = data[:,0]\n# y = data[:,1]\n# b_gradient = -(2/N)*(y-m_current*x-b_current)\n# b_gradient = np.sum(b_gradient,axis=0)\n# m_gradient = -(2/N)*x*(y-m_current*x-b_current)\n# m_gradient = np.sum(m_gradient,axis=0)\n# #update our b and m values using out partial derivations\n#\n# new_b = b_current - (learning_rate * b_gradient)\n# new_m = m_current - (learning_rate * m_gradient)\n# return [new_b,new_m]\n#\n#\n# def plot_data(data,b,m):\n#\n# #plottting\n# x = data[:,0]\n# y = data[:,1]\n# y_predict = m*x+b\n# pylab.plot(x,y,'o')\n# pylab.plot(x,y_predict,'k-')\n# pylab.show()\n#\n#\n# def Linear_regression():\n# # get train data\n# data =np.loadtxt('/Users/alexwang/Downloads/GitHub/Python/Practice/liner_regeression/data.csv',delimiter=',')\n#\n# #define hyperparamters\n# #learning_rate is used for update gradient\n# #defint the number that will iteration\n# # define y =mx+b\n# learning_rate = 0.001\n# initial_b =0.0\n# initial_m = 0.0\n# num_iter = 1000\n#\n# #train model\n# #print b m error\n# print 'initial variables:\\n initial_b = {0}\\n intial_m = {1}\\n error of begin = {2} \\n'\\\n# .format(initial_b,initial_m,compute_error(initial_b,initial_m,data))\n#\n# #optimizing b and m\n# [b ,m] = optimizer(data,initial_b,initial_m,learning_rate,num_iter)\n#\n# #print final b m error\n# print 'final formula parmaters:\\n b = {1}\\n m={2}\\n error of end = {3} \\n'.format(num_iter,b,m,compute_error(b,m,data))\n#\n# #plot result\n# plot_data(data,b,m)\n#\n# if __name__ =='__main__':\n#\n# Linear_regression()" }, { "alpha_fraction": 0.6335616707801819, "alphanum_fraction": 0.6506849527359009, "avg_line_length": 21.30769157409668, "blob_id": "c1b3a8e40af0b358dd6b1f2073edf6529cb84df3", "content_id": "952e905d016b87d8cc74dadca633a095dce07631", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 430, "license_type": "no_license", "max_line_length": 38, "num_lines": 13, "path": "/Practice/hellowrold.py", "repo_name": "w348729/Python", "src_encoding": "UTF-8", "text": "\n\nprint(\"你好\");\nstr = 'Hello World!'\n\nprint(str) # 输出完整字符串\nprint(str[0]) # 输出字符串中的第一个字符\nprint(str[2:5]) # 输出字符串中第三个至第五个之间的字符串\nprint(str[2:]) # 输出从第三个字符开始的字符串\nprint(str * 2) # 输出字符串两次\nprint(str + \"TEST\") # 输出连接的字符串\n\nprint(str.find(\"o\"))\nprint(str.replace(\"He\", \"hhe\"))\nprint(str.upper())\n" }, { "alpha_fraction": 0.4808429181575775, "alphanum_fraction": 0.48563218116760254, "avg_line_length": 31.65625, "blob_id": "61755a8e6777c531b4d5b27b1a8307a287659835", "content_id": "03ce64c460be2c23a7a08042ecd9a49957cb2f46", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 1044, "license_type": "no_license", "max_line_length": 92, "num_lines": 32, "path": "/Practice/Cap.py", "repo_name": "w348729/Python", "src_encoding": "UTF-8", "text": "import csv\nfrom geotext import GeoText\n\nwith open('abcnews-date-text.csv', encoding='utf-8') as csvFile:\n reader = csv.reader(csvFile)\n try:\n newList = []\n for i, rows in enumerate(reader):\n if 0 < i <= 20:\n row = rows[1].capitalize()\n\n eachWord = row.split(' ')\n out = []\n for part in eachWord:\n abc = part.capitalize()\n place = GeoText(abc).cities\n if abc in place:\n out.append(abc)\n else:\n out.append(part)\n line = \" \".join(out)\n newList.append(line)\n csvFile.close()\n print(newList)\n except csv.Error as e:\n sys.exit('file {}, line {}: {}'.format('abcnews-date-text.csv', reader.line_num, e))\n\nwith open('example.csv', 'w', newline='') as new:\n writer = csv.writer(new, delimiter=\" \")\n for newRow in newList:\n writer.writerow(newRow.split(' '))\n new.close()" }, { "alpha_fraction": 0.5269308686256409, "alphanum_fraction": 0.5386179089546204, "avg_line_length": 21.49142837524414, "blob_id": "a6979cf0a75bd8bbf853f40ab1cdccc33f5d00e2", "content_id": "e22510ca33bf890588aed745e3e1997b33c0632c", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 3936, "license_type": "no_license", "max_line_length": 98, "num_lines": 175, "path": "/Practice/algorithm/algorithm.py", "repo_name": "w348729/Python", "src_encoding": "UTF-8", "text": "# 1. check wether two strings contain same letters\ndef al(s1, s2):\n c1 = [0] * 26\n c2 = [0] * 26\n for s in s1:\n pos = ord(s) - ord('a')\n c1[pos] = c1[pos] + 1\n for j in s2:\n pos = ord(j) - ord('a')\n c2[pos] = c2[pos] + 1\n k = 0\n still_ok = True\n while k < 26 and still_ok:\n if c1[k] == c2[k]:\n k = k + 1\n else:\n still_ok = False\n return still_ok\n\n\n# 2. create a Stack in python\nclass Stack:\n def __init__(self):\n self.items = []\n\n def is_empty(self):\n return self.items == []\n\n def push(self, new_item):\n self.items.append(new_item)\n\n def pop(self):\n self.items.pop()\n\n def peek(self):\n return self.items[len(self.items) - 1]\n\n def size(self):\n return len(self.items)\n \n# 3. bracket_checker, check a bracket string wether can symmetrical, for example'((())) or ((()()'\ndef bracket_checker(bracket_string):\n s = Stack()\n balanced = True\n index = 0\n while index < len(bracket_string) and balanced:\n symbol = bracket_string[index]\n if symbol == '(':\n s.push(symbol)\n\n else:\n if s.is_empty():\n balanced = False\n else:\n s.pop()\n index += 1\n if balanced and s.is_empty():\n return True\n else:\n return False\n \n# 4. advanced bracket checking mehtod , can check string like '{{([][])}()}'\ndef advanced_bracket_checker(bracket_string):\n def matched(open, close):\n opens = '([{'\n closers = ')]}'\n return opens.index(open) == closers.index(close)\n\n s = Stack()\n balanced = True\n index = 0\n while index < len(bracket_string) and balanced:\n symbol = bracket_string[index]\n if symbol in '([{':\n s.push(symbol)\n\n else:\n if s.is_empty():\n balanced = False\n else:\n top = s.pop()\n if not matched(top, symbol):\n balanced = False\n index += 1\n if balanced and s.is_empty():\n return True\n else:\n return False\n \n# 5. convert integer to binary\ndef divide_by_2(dec_number):\n rem_stack = Stack()\n\n while dec_number > 0:\n rem = dec_number % 2\n rem_stack.push(rem)\n dec_number = dec_number // 2\n\n bin_string = \"\"\n while not rem_stack.is_empty():\n bin_string = bin_string + str(rem_stack.pop())\n\n return bin_string\n# 6. Queue\nclass Queue:\n def __init__(self):\n self.items = []\n\n def isEmpty(self):\n return self.items == []\n\n def enqueue(self, item):\n self.items.insert(0,item)\n\n def dequeue(self):\n return self.items.pop()\n\n def size(self):\n return len(self.items)\n\n\ndef hotPotato(namelist, num):\n simqueue = Queue()\n for name in namelist:\n simqueue.enqueue(name)\n\n while simqueue.size() > 1:\n for i in range(num):\n simqueue.enqueue(simqueue.dequeue())\n\n simqueue.dequeue()\n\n return simqueue.dequeue()\n\n7. deque\nclass Deque:\n def __init__(self):\n self.items = []\n\n def isEmpty(self):\n return self.items == []\n\n def addFront(self, item):\n self.items.append(item)\n\n def addRear(self, item):\n self.items.insert(0,item)\n\n def removeFront(self):\n return self.items.pop()\n\n def removeRear(self):\n return self.items.pop(0)\n\n def size(self):\n return len(self.items) \n8. user Deque to check reverse chart\ndef palchecker(aString):\n chardeque = Deque()\n\n for ch in aString:\n chardeque.addRear(ch)\n\n stillEqual = True\n\n while chardeque.size() > 1 and stillEqual:\n first = chardeque.removeFront()\n last = chardeque.removeRear()\n if first != last:\n stillEqual = False\n\n return stillEqual\n\nprint(palchecker(\"lsdkjfskf\"))\nprint(palchecker(\"radar\"))\n" } ]
6
samz406/python_start
https://github.com/samz406/python_start
75ccb3eca8887470db54258d8a96f5cbabfc7f3c
f5e849688001710abf4923c1a69996b119f20c38
9a83a1e4ac45fd4987fa924bf1f137a9ee4ae2d9
refs/heads/main
2022-12-30T19:35:07.430242
2020-10-22T14:58:49
2020-10-22T14:58:49
306,372,101
0
0
null
null
null
null
null
[ { "alpha_fraction": 0.7857142686843872, "alphanum_fraction": 0.7857142686843872, "avg_line_length": 13, "blob_id": "c788af3711c2633c0933600bf3404204459aa676", "content_id": "5e7fcebbe59dec34ebd060e6041854f00face96f", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Markdown", "length_bytes": 28, "license_type": "no_license", "max_line_length": 14, "num_lines": 2, "path": "/README.md", "repo_name": "samz406/python_start", "src_encoding": "UTF-8", "text": "# python_start\npython start\n" }, { "alpha_fraction": 0.5656565427780151, "alphanum_fraction": 0.5883838534355164, "avg_line_length": 17.85714340209961, "blob_id": "0e8620451f0a015247691aaeef0dff106b9d381a", "content_id": "7a1c4835b59a055cbe79b474595aef50e842a289", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 396, "license_type": "no_license", "max_line_length": 54, "num_lines": 21, "path": "/t_thread.py", "repo_name": "samz406/python_start", "src_encoding": "UTF-8", "text": "#!/usr/bin/python\n# -*- coding: UTF-8 -*-\n\n\nimport thread\nimport time\n\n\ndef print_time(t, d):\n count = 0\n while count < 10:\n time.sleep(d)\n count = count + 1\n print \"%s:%s\" % (t, time.ctime(time.time()))\n\n\ntry:\n thread.start_new_thread(print_time(\"thead-1\", 2))\n thread.start_new_thread(print_time(\"thread-2\", 4))\nexcept:\n print \"Error:unable to start thread\"\n" }, { "alpha_fraction": 0.7222222089767456, "alphanum_fraction": 0.7222222089767456, "avg_line_length": 18, "blob_id": "06df68fa5951b280105bc5c638982859145f96fa", "content_id": "a65cb89d5da88b257ef6d929b8d6d4b872a357cf", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 18, "license_type": "no_license", "max_line_length": 18, "num_lines": 1, "path": "/mysql.py", "repo_name": "samz406/python_start", "src_encoding": "UTF-8", "text": "### import MySQLdb" }, { "alpha_fraction": 0.44134896993637085, "alphanum_fraction": 0.47214075922966003, "avg_line_length": 17.97222137451172, "blob_id": "5c305aeb243055fac746c3a64366eae6b3179654", "content_id": "9a5756bdf1be94018809fddbd4b36a43d9ce8e3f", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 682, "license_type": "no_license", "max_line_length": 51, "num_lines": 36, "path": "/if_else.py", "repo_name": "samz406/python_start", "src_encoding": "UTF-8", "text": "def p_out(name):\n print(\"Hi {0}\".format(name))\n\n\ndef age_type(age):\n if age < 10:\n print('you are little son')\n elif age <= 20:\n print('You are yong son')\n elif age <= 30:\n print (\"you are qingnian son\")\n elif age <= 45:\n print (\"you are middle son\")\n else:\n print (\"you enter error \")\n\n\ndef f():\n for a in [\"a\",\"b\",\"c\"]:\n print a\n\ndef dic():\n d = {\"name\": \"lilin\", \"age\": 10, \"sex\": \"male\"}\n print(d)\n print (d[\"name\"])\n del d[\"name\"]\n print (d[\"age\"])\n\nif __name__ == '__main__':\n # p_out(\"samz\")\n # age_type(10)\n # age_type(19)\n # age_type(22)\n # age_type(31)\n # age_type(50)\n f()" }, { "alpha_fraction": 0.6437346339225769, "alphanum_fraction": 0.6572481393814087, "avg_line_length": 18.380952835083008, "blob_id": "08635fd7514a182a21a6a66fa542a4d748a9fa57", "content_id": "4efbff3c09a307d4802a5af55b221812d4a8b5d1", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 814, "license_type": "no_license", "max_line_length": 57, "num_lines": 42, "path": "/zk.py", "repo_name": "samz406/python_start", "src_encoding": "UTF-8", "text": "#!/usr/bin/env python\n# -*- coding: UTF-8 -*-\n\nfrom kazoo.client import KazooClient\nimport logging\n\nfrom kazoo.protocol.states import KazooState\n\nzk = KazooClient(hosts='127.0.0.1:2181')\nzk.start()\n\nv_path = \"/my/favorite\"\nzk.ensure_path(v_path)\n\n\ndef exist(path):\n if zk.exists(path):\n print(\"/my/favorite exist\")\n else:\n print(\"/my/favorite not exist\")\n\n\nexist(v_path)\n\nzk.delete(v_path)\n\nexist(v_path)\n\n\ndef my_listener(state):\n if state == KazooState.LOST:\n # Register somewhere that the session was lost\n print(\"zk lost\")\n elif state == KazooState.SUSPENDED:\n # Handle being disconnected from Zookeeper\n print(\"zk SUSPENDED\")\n else:\n # Handle being connected/reconnected to Zookeeper\n print(\"zk connected\")\n\n\nzk.add_listener(my_listener)\n" } ]
5
Ralxil/Library
https://github.com/Ralxil/Library
075ff18adff04936b73d4c695d716c8bdaea6dc7
777da558634a54099440cd2b192cd8ad9fff0d20
500552a38e4ec8334b7d9bc0fb67c121dc48e3c7
refs/heads/master
2021-10-11T23:00:06.893180
2021-09-28T20:00:24
2021-09-28T20:00:24
209,885,214
0
0
null
null
null
null
null
[ { "alpha_fraction": 0.6410256624221802, "alphanum_fraction": 0.6500754356384277, "avg_line_length": 35.83333206176758, "blob_id": "fd3bbfcfde9bb79010f01d3167af7e6eafd7cb4f", "content_id": "f0973b1eb30dbf4fd55c0bd8dd55d96b41abd0b9", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 1989, "license_type": "no_license", "max_line_length": 89, "num_lines": 54, "path": "/flask_lib/main/routes.py", "repo_name": "Ralxil/Library", "src_encoding": "UTF-8", "text": "from flask import render_template, flash, redirect, url_for, request, abort, Blueprint\nfrom flask_lib import db\nfrom flask_lib.main.forms import InviteForm\nfrom flask_lib.models import User, Library, BCopy\nfrom flask_login import current_user, login_required\n\nmain = Blueprint('main', __name__)\n\n\[email protected]('/')\[email protected]('/home')\n@login_required\ndef home():\n page = request.args.get('page', 1, type=int)\n main_lib = Library.query.filter_by(owner_id=current_user.id).first()\n # libs = Library.query.filter_by(member=current_user).paginate(page=page, per_page=5)\n libs = current_user.Libraries\n return render_template('home.html', libs=libs, main_lib=main_lib)\n\n\[email protected]('/library/<int:lib_id>')\n@login_required\ndef library(lib_id):\n page = request.args.get('page', 1, type=int)\n lib = Library.query.get_or_404(lib_id)\n user = lib.owner\n if user != current_user and lib not in current_user.Libraries:\n flash('You cant view this library.', 'danger')\n return redirect(url_for('main.home'))\n books = BCopy.query.filter_by(part=lib).paginate(page=page, per_page=5)\n data = {'Reading': 0, 'Finished': 0, 'Lend': 0, 'New': 0, 'PD': 0}\n for book in books.items:\n if book.lend:\n data['Lend'] += 1\n else:\n data[book.status] += 1\n if user.days > 0:\n data['PD'] = user.pages / user.days\n return render_template('library.html', books=books, lib=lib, data=data)\n\n\[email protected]('/library/<int:lib_id>/invite', methods=['GET', 'POST'])\n@login_required\ndef invite(lib_id):\n form = InviteForm()\n if form.validate_on_submit():\n user = User.query.filter_by(username=form.username.data).first()\n lib = Library.query.filter_by(id=lib_id).first()\n if lib.owner != current_user:\n abort(403)\n user.Libraries.append(lib)\n db.session.commit()\n return redirect(url_for('main.home'))\n return render_template('invite.html', title='Invite', form=form)\n" }, { "alpha_fraction": 0.6255843043327332, "alphanum_fraction": 0.6333748698234558, "avg_line_length": 38.128047943115234, "blob_id": "3fa6e437819dbf90756c6ad89ca4ec5b7dbf5333", "content_id": "c2fe8654931b2a44173a531b276ef13808281226", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 6418, "license_type": "no_license", "max_line_length": 106, "num_lines": 164, "path": "/flask_lib/books/routes.py", "repo_name": "Ralxil/Library", "src_encoding": "UTF-8", "text": "from flask import render_template, flash, redirect, url_for, request, abort, Blueprint\nfrom flask_lib import db\nfrom flask_lib.books.forms import SearchForm, BookForm, LendForm\nfrom flask_lib.models import User, Library, Book, BCopy, History\nfrom flask_login import current_user, login_required\nfrom datetime import datetime\n\nbooks = Blueprint('books', __name__)\n\n\[email protected]('/book/list', methods=['GET', 'POST'])\n@login_required\ndef book_list():\n page = request.args.get('page', 1, type=int)\n book = Book.query\n form = SearchForm()\n if form.validate_on_submit() or request.args.get('page', 1, type=int):\n if request.values.get(\"author\"):\n form.author.data=request.values.get(\"author\")\n search = '%{}%'.format(form.author.data.upper())\n book = book.filter(Book.author.like(search))\n if request.values.get(\"title\"):\n form.title.data = request.values.get(\"title\")\n search = '%{}%'.format(form.title.data.upper())\n book = book.filter(Book.title.like(search))\n book = book.paginate(page=page, per_page=2, error_out=False)\n return render_template('book_list.html', title='Book List', form=form, legend='Book List', books=book)\n\n\[email protected]('/book/add/<int:book_id>', methods=['GET', 'POST'])\n@login_required\ndef add_book(book_id):\n lib = Library.query.filter_by(owner_id=current_user.id).first()\n original = Book.query.filter_by(id=book_id).first()\n bcopy = BCopy(owner=current_user, part=lib, original=original)\n db.session.add(bcopy)\n db.session.commit()\n flash('New Book has been added', 'success')\n return redirect(url_for('main.library', lib_id=current_user.Library.id))\n\n\[email protected]('/book/new', methods=['GET', 'POST'])\n@login_required\ndef new_book():\n form = BookForm()\n if form.validate_on_submit():\n book = Book(title=form.title.data.upper(), author=form.author.data.upper(), pages=form.pages.data)\n db.session.add(book)\n db.session.commit()\n flash('New Book has been added', 'success')\n return redirect(url_for('books.book_list'))\n return render_template('create_book.html', title='New Book', form=form, legend='New Book')\n\n\[email protected]('/book/<int:book_id>')\n@login_required\ndef book(book_id):\n book = BCopy.query.get_or_404(book_id)\n lib = Library.query.get_or_404(book.lib_id)\n return render_template('book.html', title=book.original.title, book=book, lib=lib)\n\n\[email protected]('/book/<int:book_id>/<string:status>', methods=['GET'])\n@login_required\ndef update_book(book_id, status):\n book = BCopy.query.get_or_404(book_id)\n if book.owner != current_user:\n abort(403)\n if status == 'Return':\n book.lend = False\n book.guest = False\n his = History(date=datetime.utcnow(), action='return', book=book, username='guest')\n db.session.commit()\n flash('Your book status has been updated!', 'success')\n if status == 'Reading':\n book.status = status\n book.date = datetime.utcnow()\n db.session.commit()\n flash('Your book status has been updated!', 'success')\n if status == 'Finished':\n if book.status == 'Reading':\n delta = datetime.utcnow()-book.date\n current_user.days += delta.days\n current_user.pages += book.original.pages\n book.status = status\n db.session.commit()\n flash('Your book status has been updated!', 'success')\n return redirect(url_for('books.book', book_id=book.id))\n\n\[email protected]('/book/<int:book_id>/delete', methods=['POST'])\n@login_required\ndef delete_book(book_id):\n book = BCopy.query.get_or_404(book_id)\n if book.owner != current_user:\n abort(403)\n if book.lend:\n flash('You cant delete this book now.', 'danger')\n else:\n db.session.delete(book)\n db.session.commit()\n flash('Your book has been deleted!', 'success')\n return redirect(url_for('main.library', lib_id=current_user.Library.id))\n\n\[email protected]('/lend/<int:book_id>', methods=['GET', 'POST'])\n@login_required\ndef lend(book_id):\n form = LendForm()\n if form.validate_on_submit():\n book = BCopy.query.get_or_404(book_id)\n if book.owner != current_user:\n abort(403)\n if form.remember.data:\n book.guest = True\n book.lend = True\n his = History(date=datetime.utcnow(), action='lending', book=book, username='guest')\n db.session.add(his)\n db.session.commit()\n return redirect(url_for('main.library', lib_id=current_user.Library.id))\n else:\n user = User.query.filter_by(username=form.username.data).first()\n if user:\n book.lend = True\n lib = Library.query.filter_by(owner_id=user.id).first()\n Lbook = BCopy(lend_id=book.id, owner=current_user, original=book.original, part=lib)\n his = History(date=datetime.utcnow(), action='lending', book=book, username=user.username)\n db.session.add(Lbook)\n db.session.add(his)\n db.session.commit()\n return redirect(url_for('main.library', lib_id=current_user.Library.id))\n else:\n flash('There is no such user.', 'danger')\n return render_template('lend.html', title='Lend', form=form)\n\n\[email protected]('/book/<int:book_id>/history')\n@login_required\ndef history(book_id):\n page = request.args.get('page', 1, type=int)\n his = History.query.filter_by(book_id=book_id).paginate(page=page, per_page=5)\n book = BCopy.query.get_or_404(book_id)\n lib = Library.query.get_or_404(book.lib_id)\n if lib.owner != current_user:\n abort(403)\n return render_template('history.html', his=his, book=book)\n\n\[email protected]('/return/<int:book_id>')\n@login_required\ndef return_book(book_id):\n book = BCopy.query.get_or_404(book_id)\n Obook = BCopy.query.get_or_404(book.lend_id)\n lib = Library.query.get_or_404(book.lib_id)\n if lib.owner != current_user:\n abort(403)\n Obook.lend = False\n # Obook.return_date = datetime.utcnow\n his = History(date=datetime.utcnow(), action='return', book=Obook, username=current_user.username)\n db.session.add(his)\n db.session.delete(book)\n db.session.commit()\n flash('Book has been returned.', 'success')\n return redirect(url_for('main.library', lib_id=current_user.Library.id))\n\n" }, { "alpha_fraction": 0.6716867685317993, "alphanum_fraction": 0.6801204681396484, "avg_line_length": 41.56410217285156, "blob_id": "521e22e90ef09e7fb1d7de2e01af0458ac7fe9e0", "content_id": "14cf67017fd701f58ab1221f50c5232a084d2512", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 3320, "license_type": "no_license", "max_line_length": 93, "num_lines": 78, "path": "/flask_lib/models.py", "repo_name": "Ralxil/Library", "src_encoding": "UTF-8", "text": "from itsdangerous import TimedJSONWebSignatureSerializer as Serializer\nfrom datetime import datetime\nfrom flask import current_app\nfrom flask_lib import db, login_manager\nfrom flask_login import UserMixin\n\n\n@login_manager.user_loader\ndef load_user(user_id):\n return User.query.get(int(user_id))\n\n\nmembership = db.Table('membership', db.Model.metadata,\n db.Column('member_id', db.Integer, db.ForeignKey('user.id')),\n db.Column('lib_id', db.Integer, db.ForeignKey('library.id'))\n )\n\n\nclass User(db.Model, UserMixin):\n id = db.Column(db.Integer, primary_key=True)\n username = db.Column(db.String(20), unique=True, nullable=False)\n email = db.Column(db.String(120), unique=True, nullable=False)\n image_file = db.Column(db.String(20), nullable=False, default='default.jpg')\n password = db.Column(db.String(60), nullable=False)\n active = db.Column(db.Boolean, nullable=False, default=False)\n pages = db.Column(db.Integer, nullable=False, default=0)\n days = db.Column(db.Integer, nullable=False, default=0)\n Library = db.relationship('Library', backref='owner', lazy=True, uselist=False)\n Libraries = db.relationship('Library', secondary=membership, backref='member', lazy=True)\n Books = db.relationship('BCopy', backref='owner', lazy=True)\n\n def get_token(self, expires_sec=1800):\n s = Serializer(current_app.config['SECRET_KEY'], expires_sec)\n return s.dumps({'user_id': self.id}).decode('utf-8')\n\n @staticmethod\n def verify_token(token):\n s = Serializer(current_app.config['SECRET_KEY'])\n try:\n user_id = s.loads(token)['user_id']\n except:\n return None\n return User.query.get(user_id)\n\n\nclass Library(db.Model):\n id = db.Column(db.Integer, primary_key=True)\n owner_id = db.Column(db.Integer, db.ForeignKey('user.id'), nullable=False, unique=True)\n books = db.relationship('BCopy', backref='part', lazy=True)\n\n\nclass BCopy(db.Model):\n id = db.Column(db.Integer, primary_key=True)\n status = db.Column(db.String(20), nullable=False, default='New')\n lend = db.Column(db.Boolean, nullable=False, default=False)\n guest = db.Column(db.Boolean, nullable=False, default=False)\n date = db.Column(db.DateTime, nullable=False, default=datetime.utcnow())\n owner_id = db.Column(db.Integer, db.ForeignKey('user.id'), nullable=False)\n lib_id = db.Column(db.Integer, db.ForeignKey('library.id'), nullable=False)\n book_id = db.Column(db.Integer, db.ForeignKey('book.id'), nullable=False)\n lend_id = db.Column(db.Integer, nullable=True)\n History = db.relationship('History', backref='book', lazy=True)\n\n\nclass Book(db.Model):\n id = db.Column(db.Integer, primary_key=True)\n title = db.Column(db.String(100), nullable=False, unique=True)\n author = db.Column(db.String(100), nullable=False)\n pages = db.Column(db.Integer, nullable=False)\n Copies = db.relationship('BCopy', backref='original', lazy=True)\n\n\nclass History(db.Model):\n id = db.Column(db.Integer, primary_key=True)\n date = db.Column(db.DateTime, nullable=False)\n action = db.Column(db.String(20), nullable=False)\n book_id = db.Column(db.Integer, db.ForeignKey('b_copy.id'), nullable=False)\n username = db.Column(db.String(20), nullable=False)\n" }, { "alpha_fraction": 0.656095564365387, "alphanum_fraction": 0.6565074324607849, "avg_line_length": 39.14049530029297, "blob_id": "7785024e9039dc1ec8945c6135a3613c9ea4e2f1", "content_id": "6dc973b980202bcf2da713d1efc7f313039a90e3", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 4856, "license_type": "no_license", "max_line_length": 117, "num_lines": 121, "path": "/flask_lib/users/routes.py", "repo_name": "Ralxil/Library", "src_encoding": "UTF-8", "text": "from flask import render_template, flash, redirect, url_for, request, Blueprint\nfrom flask_lib import db, bcrypt\nfrom flask_lib.models import User, Library\nfrom flask_login import login_user, current_user, logout_user, login_required\n\nfrom flask_lib.users.forms import RegistrationForm, LoginForm, UpdateAccountForm, RequestResetForm, ResetPasswordForm\nfrom flask_lib.users.utils import save_picture\n\nusers = Blueprint('users', __name__)\n\n\[email protected]('/register', methods=['GET', 'POST'])\ndef register():\n if current_user.is_authenticated:\n return redirect(url_for('main.home'))\n form = RegistrationForm()\n if form.validate_on_submit():\n hashed_password = bcrypt.generate_password_hash(form.password.data).decode('utf-8')\n user = User(username=form.username.data, email=form.email.data, password=hashed_password)\n user.active = True\n db.session.add(user)\n lib = Library(owner=user)\n db.session.add(lib)\n db.session.commit()\n # send_active_email(user)\n login_user(user, remember=False)\n flash('An email has been send to you to active your account.', 'info')\n # db.session.rollback()\n return redirect(url_for('users.login'))\n return render_template('register.html', title='Register', form=form)\n\n\[email protected]('/login', methods=['GET', 'POST'])\ndef login():\n if current_user.is_authenticated:\n return redirect(url_for('main.home'))\n form = LoginForm()\n if form.validate_on_submit():\n user = User.query.filter_by(email=form.email.data).first()\n if user and bcrypt.check_password_hash(user.password, form.password.data):\n if user.active:\n login_user(user, remember=form.remember.data)\n next_page = request.args.get('next')\n return redirect(next_page) if next_page else redirect(url_for('main.home'))\n else:\n flash('Account inactive.', 'danger')\n else:\n flash('Login Unsuccessful.', 'danger')\n return render_template('login.html', title='Login', form=form)\n\n\[email protected]('/logout')\ndef logout():\n logout_user()\n return redirect(url_for('main.home'))\n\n\[email protected]('/account', methods=['GET', 'POST'])\n@login_required\ndef account():\n form = UpdateAccountForm()\n if form.validate_on_submit():\n if form.picture.data:\n picture_file = save_picture(form.picture.data)\n current_user.image_file = picture_file\n current_user.username = form.username.data\n current_user.email = form.email.data\n db.session.commit()\n flash('Your account has been updated.', 'success')\n return redirect(url_for('users.account'))\n elif request.method == 'GET':\n form.username.data = current_user.username\n form.email.data = current_user.email\n image_file = url_for('static', filename='profile_pic/' + current_user.image_file)\n return render_template('account.html', title='Account', image_file=image_file, form=form)\n\n\[email protected]('/reset_password', methods=['GET', 'POST'])\ndef reset_request():\n if current_user.is_authenticated:\n return redirect(url_for('main.home'))\n form = RequestResetForm()\n if form.validate_on_submit():\n user = User.query.filter_by(email=form.email.data).first()\n # send_reset_email(user)\n flash('An email has been send to reset your password.', 'info')\n token = user.get_token()\n return redirect(url_for('users.reset_token', token=token, _external=True))\n return redirect(url_for('users.login'))\n return render_template('reset_request.html', title='Reset Password', form=form)\n\n\[email protected]('/reset_password/<token>', methods=['GET', 'POST'])\ndef reset_token(token):\n if current_user.is_authenticated:\n return redirect(url_for('main.home'))\n user = User.verify_token(token)\n if user is None:\n flash('That is an invalid/expired token', 'warning')\n return redirect(url_for('users.reset_request'))\n form = ResetPasswordForm()\n if form.validate_on_submit():\n hashed_password = bcrypt.generate_password_hash(form.password.data).decode('utf-8')\n user.password = hashed_password\n db.session.commit()\n flash(f'Your password has been updated', 'success')\n return redirect(url_for('main.home'))\n return render_template('reset_token.html', title='Reset Password', form=form)\n\n\[email protected]('/active/<token>', methods=['GET', 'POST'])\ndef active_token(token):\n user = User.verify_token(token)\n if user is None:\n flash('That is an invalid/expired token', 'warning')\n return redirect(url_for('users.register'))\n user.active = True\n db.session.commit()\n login_user(user, remember=False)\n flash(f'Your account has been activated', 'success')\n return redirect(url_for('main.home'))" }, { "alpha_fraction": 0.7081807255744934, "alphanum_fraction": 0.7228327393531799, "avg_line_length": 36.272727966308594, "blob_id": "c8b6134286f97c1675038548b3447304d222d0f9", "content_id": "5660c6f3d337770e31c6382a5af751ebb77ada94", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 819, "license_type": "no_license", "max_line_length": 87, "num_lines": 22, "path": "/flask_lib/books/forms.py", "repo_name": "Ralxil/Library", "src_encoding": "UTF-8", "text": "from flask_wtf import FlaskForm\nfrom wtforms import StringField, SubmitField, BooleanField, IntegerField\nfrom wtforms.validators import DataRequired, Length, NumberRange\n\n\nclass BookForm(FlaskForm):\n title = StringField('Title', validators=[DataRequired(), Length(min=0, max=100)])\n author = StringField('Author', validators=[DataRequired(), Length(min=0, max=100)])\n pages = IntegerField('Pages', validators=[DataRequired(), NumberRange(min=1)])\n submit = SubmitField('Add')\n\n\nclass LendForm(FlaskForm):\n username = StringField('Username', validators=[Length(min=2, max=20)])\n remember = BooleanField('Guest')\n submit = SubmitField('Lend')\n\n\nclass SearchForm(FlaskForm):\n author = StringField('Author', default='')\n title = StringField('Title', default='')\n submit = SubmitField('Search')" } ]
5
dveratrallero/SimpleChess
https://github.com/dveratrallero/SimpleChess
65764b57bd6855e71f7439c95719067b02c95dd7
29553653975960cf9f5e276b9d748c09404e4cc9
233736feb7479b7254aa158899d58fc41f3d6db0
refs/heads/master
2022-12-14T10:45:21.283062
2019-11-10T15:25:24
2019-11-10T15:25:24
219,253,590
1
0
null
2019-11-03T04:56:32
2021-01-17T07:33:16
2022-11-22T04:47:22
Python
[ { "alpha_fraction": 0.4464285671710968, "alphanum_fraction": 0.6785714030265808, "avg_line_length": 15, "blob_id": "45cb6eb54192307b4f600117a4d1d42abbf20d59", "content_id": "2b6ec8a04c4fb1b9d6190beae433dd63118a6e4f", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Text", "length_bytes": 112, "license_type": "no_license", "max_line_length": 22, "num_lines": 7, "path": "/requirements.txt", "repo_name": "dveratrallero/SimpleChess", "src_encoding": "UTF-8", "text": "numpy==1.17.3\npandas==0.25.2\nPillow==6.2.1\npkg-resources==0.0.0\npython-dateutil==2.8.0\npytz==2019.3\nsix==1.12.0\n" }, { "alpha_fraction": 0.5440660715103149, "alphanum_fraction": 0.5577365756034851, "avg_line_length": 47.00480651855469, "blob_id": "6730bb7470657acd6f7ac57e6a29bc1af7eee540", "content_id": "5153c843b6427a850424f1b1d1176956a27ed6ba", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 19970, "license_type": "no_license", "max_line_length": 224, "num_lines": 416, "path": "/Run.py", "repo_name": "dveratrallero/SimpleChess", "src_encoding": "UTF-8", "text": "import tkinter as tk\nfrom PIL import Image, ImageTk\n\nclass AbstractPiece(tk.Label):\n \"\"\"docstring for Piece\"\"\"\n def __init__(self, root, row, column, *args, **kwargs):\n tk.Label.__init__(self, root, *args, **kwargs)\n self.current_piece=\"\"\n self.row=row\n self.column=column\n self.color = 0\n self.defaultbg=\"\"\n self.selected = False\n self.used = False\n\nclass ChessApp(tk.Frame):\n Brown_Pawn_Image =Image.open(\"ChessPieces/Brown_Pawn.png\")\n Brown_Horse_Image = Image.open(\"ChessPieces/Brown_Horse.png\")\n Brown_Knight_Image =Image.open(\"ChessPieces/Brown_Knight.png\")\n Brown_Tower_Image = Image.open(\"ChessPieces/Brown_Tower.png\")\n Brown_King_Image =Image.open(\"ChessPieces/Brown_King.png\")\n Brown_Queen_Image = Image.open(\"ChessPieces/Brown_Queen.png\")\n Gray_Pawn_Image =Image.open(\"ChessPieces/Gray_Pawn.png\")\n Gray_Horse_Image = Image.open(\"ChessPieces/Gray_Horse.png\")\n Gray_Knight_Image =Image.open(\"ChessPieces/Gray_Knight.png\")\n Gray_Tower_Image = Image.open(\"ChessPieces/Gray_Tower.png\")\n Gray_King_Image =Image.open(\"ChessPieces/Gray_King.png\")\n Gray_Queen_Image = Image.open(\"ChessPieces/Gray_Queen.png\")\n\n def __init__(self, chess_window):\n super().__init__(chess_window)\n chess_window.title(\"QUESS\")\n self.place(width=800, height=800)\n chess_window.geometry(\"800x800\")\n self.selected_square=False\n self.selected_square_tuple=0\n self.selected_square_moves=0\n self.selected_piece=\"\"\n self.color_turn=2\n self.check=False\n self.PhotoPixel=tk.PhotoImage(width=1, height=1)\n self.BrownPiecesPhotos={\"Pawn\": ImageTk.PhotoImage(ChessApp.Brown_Pawn_Image),\n \"Horse\": ImageTk.PhotoImage(ChessApp.Brown_Horse_Image),\n \"Knight\": ImageTk.PhotoImage(ChessApp.Brown_Knight_Image),\n \"Tower\": ImageTk.PhotoImage(ChessApp.Brown_Tower_Image),\n \"King\": ImageTk.PhotoImage(ChessApp.Brown_King_Image),\n \"Queen\": ImageTk.PhotoImage(ChessApp.Brown_Queen_Image)}\n self.GrayPiecesPhotos={\"Pawn\": ImageTk.PhotoImage(ChessApp.Gray_Pawn_Image),\n \"Horse\": ImageTk.PhotoImage(ChessApp.Gray_Horse_Image),\n \"Knight\": ImageTk.PhotoImage(ChessApp.Gray_Knight_Image),\n \"Tower\": ImageTk.PhotoImage(ChessApp.Gray_Tower_Image),\n \"King\": ImageTk.PhotoImage(ChessApp.Gray_King_Image),\n \"Queen\": ImageTk.PhotoImage(ChessApp.Gray_Queen_Image)}\n self.GrayPiecesAlive={\"Pawn\": [(6, i) for i in range(8)],\n \"Knight\": [(7, 2), (7, 5)],\n \"Tower\": [(7, 0), (7, 7)],\n \"Horse\": [(7, 1), (7, 6)],\n \"King\": [(7, 4)],\n \"Queen\": [(7, 3)]}\n self.BrownPiecesAlive={\"Pawn\": [(1, i) for i in range(8)],\n \"Knight\": [(0, 2), (0, 5)],\n \"Tower\": [(0, 0), (0, 7)],\n \"Horse\": [(0, 1), (0, 6)],\n \"King\": [(0, 4)],\n \"Queen\": [(0, 3)]}\n self.Board=[[AbstractPiece(self, y, x, image=self.PhotoPixel, width=99, height=87, compound=\"c\") for x in range(8)] for y in range(8)]\n self.CreateBoard()\n self.CreatePieces()\n self.targetedbrown=self.CheckPlayerMoves(self.GrayPiecesAlive)\n self.targetedgray=self.CheckPlayerMoves(self.BrownPiecesAlive)\n\n def CreatePieces(self):\n def IterRender(PieceAliveDict, PieceAlivePhoto, color):\n for a, b in PieceAliveDict.items():\n if b:\n for item in b:\n self.RenderPiece(self.Board[item[0]][item[1]], PieceAlivePhoto[a], a, color)\n IterRender(self.BrownPiecesAlive, self.BrownPiecesPhotos, 1)\n IterRender(self.GrayPiecesAlive, self.GrayPiecesPhotos, 2)\n\n def CreateBoard(self):\n for i, row in enumerate(self.Board):\n for h, square in enumerate(row):\n if (i+1)%2!=0:\n if (h+1)%2!=0:\n square.defaulbg=\"green\"\n elif (h+1)%2==0:\n square.defaulbg=\"black\"\n elif (i+1)%2==0:\n if (h+1)%2!=0:\n square.defaulbg=\"black\"\n elif (h+1)%2==0:\n square.defaulbg=\"green\"\n square.configure(bg=square.defaulbg)\n square.bind('<Button-1>', lambda event, Square=square: self.PieceSelect(Square))\n square.grid(row=i, column=h)\n\n def UpdateBoard(self):\n for row in self.Board:\n for square in row:\n square.configure(bg=square.defaulbg, image=self.PhotoPixel)\n square.image=self.PhotoPixel\n square.current_piece=\"\"\n square.color=0\n square.selected=False\n square.update()\n self.CreatePieces()\n self.after(0)\n\n def RenderPiece(self, Square, img, name, color):\n Square.configure(image=img)\n Square.image=img\n Square.current_piece=name\n Square.color= color\n Square.update()\n\n def EndTurn(self, Square):\n if self.color_turn==1:\n print(f\"Brown player just moved its {self.selected_piece} from {self.selected_square_tuple} to {(Square.row, Square.column)}\")\n self.targetedgray=self.CheckPlayerMoves(self.BrownPiecesAlive)\n print(f\"Brown player Possible Moves: {sorted(self.targetedgray)}\")\n for item in self.targetedgray:\n if item==self.GrayPiecesAlive[\"King\"][0]:\n self.check=True\n print(\"CHECK\")\n self.color_turn=2\n elif self.color_turn==2:\n print(f\"Gray player just moved its {self.selected_piece} from {self.selected_square_tuple} to {(Square.row, Square.column)}\")\n self.targetedbrown=self.CheckPlayerMoves(self.GrayPiecesAlive)\n print(f\"Gray Player Possible Moves: {sorted(self.targetedbrown)}\")\n for item in self.targetedbrown:\n if item==self.BrownPiecesAlive[\"King\"][0]:\n self.check=True\n print(\"CHECK\")\n self.color_turn=1\n print(\"---------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------\")\n self.selected_square=False\n self.selected_square_tuple=0\n self.selected_square_moves=0\n self.selected_piece=\"\"\n\n def ExecuteMove(self, Square):\n def MovePiece(Square, piecename, piecearr):\n if self.selected_piece==piecename:\n for item in piecearr:\n if item[0]==self.selected_square_tuple[0] and item[1]==self.selected_square_tuple[1]:\n holder = list(item)\n piecearr.remove(item)\n holder[0]=Square.row\n holder[1]=Square.column\n piecearr.append(tuple(holder))\n for item in self.selected_square_moves:\n if Square.row==item[0] and Square.column==item[1]:\n if Square.current_piece!=\"\":\n self.EatPiece(Square)\n if self.color_turn==1:\n MovePiece(Square, self.selected_piece, self.BrownPiecesAlive[self.selected_piece])\n elif self.color_turn==2:\n MovePiece(Square, self.selected_piece, self.GrayPiecesAlive[self.selected_piece])\n self.UpdateBoard()\n self.after(0)\n\n def PieceSelect(self, Square):\n if not Square.selected and not self.selected_square and Square.color==self.color_turn:\n Square.configure(bg=\"yellow\")\n Square.selected=True\n self.selected_piece=Square.current_piece\n self.selected_square=True\n self.selected_square_tuple=(Square.row, Square.column)\n self.selected_square_moves=self.PieceMove(Square)\n try:\n for item in self.selected_square_moves:\n self.Board[item[0]][item[1]].configure(bg=\"white\")\n self.Board[item[0]][item[1]].update()\n except IndexError:\n pass\n # elif not Square.selected and not self.selected_square and Square.color==0:\n # print(\"Please select a Piece\")\n # elif not Square.selected and not self.selected_square and Square.color!=self.color_turn:\n # print(\"Please wait until the other player moves\")\n # elif not Square.selected and self.selected_square and Square.color==self.color_turn:\n # print(\"Another piece has been selected!\")\n elif (not Square.selected and self.selected_square and Square.color==0)\\\n or (not Square.selected and self.selected_square and Square.color!=self.color_turn):\n if self.CheckCalc(Square):\n self.ExecuteMove(Square)\n self.EndTurn(Square)\n Square.used = True\n else:\n print(\"You can't do that move, take care of your King\")\n # elif not Square.selected and self.selected_square and Square.color==self.color_turn:\n # print(\"Hello World! You broke the game!\")\n # elif Square.selected and self.selected_square and Square.color==0:\n # print(\"Yay! You broke the game!\")\n elif Square.selected and self.selected_square and Square.color==self.color_turn:\n print(\"Rethink!\")\n Square.selected=False\n self.selected_square=False\n self.selected_square_moves=0\n self.selected_square_tuple=0\n self.selected_piece=\"\"\n self.UpdateBoard()\n\n def EatPiece(self, Square):\n def eatsetup(piecearr):\n for item in piecearr:\n if item[0]==Square.row and item[1]==Square.column:\n print(f\"{'Gray' if Square.color==1 else 'Brown'} Player\\'s {self.selected_piece} ate {'Brown' if Square.color==1 else 'Gray'} Player\\'s {Square.current_piece}\")\n piecearr.remove(item)\n if Square.color==1:\n eatsetup(self.BrownPiecesAlive[Square.current_piece])\n elif Square.color==2:\n eatsetup(self.GrayPiecesAlive[Square.current_piece])\n\n def MinOfTwo(self, a, b):\n if a>b:\n return b\n else:\n return a\n\n def IterMove(self, a, b, movelist, Square):\n if a<0 or b<0:\n return False\n elif self.Board[a][b].current_piece!=\"\" and self.Board[a][b].color==Square.color:\n return False\n elif self.Board[a][b].current_piece!=\"\" and self.Board[a][b].color!=Square.color:\n movelist.append((a, b))\n return False\n else:\n movelist.append((a, b))\n return True\n\n def NormalMove(self, a, b, movelist, Square):\n if self.Board[a][b].current_piece==\"\" or self.Board[a][b].color!=Square.color:\n movelist.append((a, b))\n\n def EmptySpaceAdd(self, a, b, movelist):\n if self.Board[a][b].current_piece==\"\":\n movelist.append((a, b))\n\n def PossibleMove(self, a, b, movelist, Square):\n if self.Board[a][b].current_piece!=\"\" and self.Board[a][b].color!=Square.color:\n movelist.append((a, b))\n\n def HorseMove(self, a, b, movelist, Square):\n if self.Board[a][b].current_piece==\"\" or self.Board[a][b].color!=Square.color:\n movelist.append((a, b))\n\n def CheckPlayerMoves(self, piecedict):\n movelist=[]\n for values in piecedict.values():\n for item in values:\n movelist+=self.PieceMove(self.Board[item[0]][item[1]])\n return list(set(movelist))\n\n def CheckMovesHelper(self, piecedict, piece, Square):\n movelist=[]\n class square(object):\n def __init__(self, Square):\n self.color=Square.color\n self.row=Square.row\n self.column=Square.column\n self.current_piece=super.current_piece\n for key, values in piecedict.items():\n for item in values:\n if item!=self.selected_square_tuple:\n movelist+=self.PieceMove(self.Board[item[0]][item[1]])\n elif item==self.selected_square_tuple:\n movelist+=self.PieceMove(square(Square))\n return list(set(movelist))\n\n def CheckCalc(self, Square):\n a = self.selected_piece\n b = self.selected_square_tuple\n x = Square\n if self.check:\n if self.color_turn==1:\n for item in self.CheckMovesHelper(self.GrayPiecesAlive, Square.current_piece, Square):\n if self.selected_piece!=\"King\":\n if item==self.BrownPiecesAlive[\"King\"][0]:\n return False\n else:\n if item==(Square.row, Square.column):\n return False\n return True\n elif self.color_turn==2:\n for item in self.CheckMovesHelper(self.BrownPiecesAlive, Square.current_piece, Square):\n if self.selected_piece!=\"King\":\n if item==self.GrayPiecesAlive[\"King\"][0]:\n return False\n else:\n if item==(Square.row, Square.column):\n return False\n return True\n else:\n return True\n\n def PieceMove(self, Square):\n movelist=[]\n if Square.current_piece==\"Pawn\":\n if Square.color==2:\n self.EmptySpaceAdd(Square.row-1, Square.column, movelist)\n if not Square.used and self.Board[Square.row-1][Square.column].current_piece==\"\":\n self.EmptySpaceAdd(Square.row-2, Square.column, movelist)\n try:\n self.PossibleMove(Square.row-1, Square.column-1, movelist, Square)\n self.PossibleMove(Square.row-1, Square.column+1, movelist, Square)\n except IndexError:\n pass\n elif Square.color==1:\n self.EmptySpaceAdd(Square.row+1, Square.column, movelist)\n if not Square.used and self.Board[Square.row+1][Square.column].current_piece==\"\":\n self.EmptySpaceAdd(Square.row+2, Square.column, movelist)\n try:\n self.PossibleMove(Square.row+1, Square.column-1, movelist, Square)\n self.PossibleMove(Square.row+1, Square.column+1, movelist, Square)\n except IndexError:\n pass\n elif Square.current_piece==\"Horse\":\n if Square.row+2<=7 and Square.column+1<=7:\n self.HorseMove(Square.row+2, Square.column+1, movelist, Square)\n if Square.row+2<=7 and Square.column-1>=0:\n self.HorseMove(Square.row+2, Square.column-1, movelist, Square)\n if Square.row-2>=0 and Square.column+1<=7:\n self.HorseMove(Square.row-2, Square.column+1, movelist, Square)\n if Square.row-2>=0 and Square.column-1>=0:\n self.HorseMove(Square.row-2, Square.column-1, movelist, Square)\n if Square.row-1>=0 and Square.column-2>=0:\n self.HorseMove(Square.row-1, Square.column-2, movelist, Square)\n if Square.row-1>=0 and Square.column+2<=7:\n self.HorseMove(Square.row-1, Square.column+2, movelist, Square)\n if Square.row+1<=7 and Square.column-2>=0:\n self.HorseMove(Square.row+1, Square.column-2, movelist, Square)\n if Square.row+1<=7 and Square.column+2<=7:\n self.HorseMove(Square.row+1, Square.column+2, movelist, Square)\n elif Square.current_piece==\"Tower\":\n for i in range(Square.row-1, -1, -1):\n if not self.IterMove(i, Square.column, movelist, Square):\n break\n for i in range(Square.column-1, -1, -1):\n if not self.IterMove(Square.row, i, movelist, Square):\n break\n for i in range(1, 8-Square.row):\n if not self.IterMove(Square.row+i, Square.column, movelist, Square):\n break\n for i in range(1, 8-Square.column):\n if not self.IterMove(Square.row, Square.column+i, movelist, Square):\n break\n elif Square.current_piece==\"Knight\":\n for i in range(1, self.MinOfTwo(Square.row, Square.column)+1):\n if not self.IterMove(Square.row-i, Square.column-i, movelist, Square):\n break\n for i in range(1, self.MinOfTwo(Square.row, 7-Square.column)+1):\n if not self.IterMove(Square.row-i, Square.column+i, movelist, Square):\n break\n for i in range(1, self.MinOfTwo(7-Square.row, Square.column)+1):\n if not self.IterMove(Square.row+i, Square.column-i, movelist, Square):\n break\n for i in range(1, self.MinOfTwo(7-Square.row, 7-Square.column)+1):\n if not self.IterMove(Square.row+i, Square.column+i, movelist, Square):\n break\n elif Square.current_piece==\"King\":\n try:\n if Square.row-1>=0:\n self.NormalMove(Square.row-1, Square.column, movelist, Square)\n if Square.row+1<=7:\n self.NormalMove(Square.row+1, Square.column, movelist, Square)\n if Square.column-1>=0:\n self.NormalMove(Square.row, Square.column-1, movelist, Square)\n if Square.column+1<=7:\n self.NormalMove(Square.row, Square.column+1, movelist, Square)\n if Square.row-1>=0 and Square.column-1>=0:\n self.NormalMove(Square.row-1, Square.column-1, movelist, Square)\n if Square.row+1<=7 and Square.column-1>=0:\n self.NormalMove(Square.row+1, Square.column-1, movelist, Square)\n if Square.row-1>=0 and Square.column+1<=7:\n self.NormalMove(Square.row-1, Square.column+1, movelist, Square)\n if Square.row+1<=7 and Square.column+1<=7:\n self.NormalMove(Square.row+1, Square.column+1, movelist, Square)\n except IndexError:\n pass\n elif Square.current_piece==\"Queen\":\n for i in range(Square.row-1, -1, -1):\n if not self.IterMove(i, Square.column, movelist, Square):\n break\n for i in range(Square.column-1, -1, -1):\n if not self.IterMove(Square.row, i, movelist, Square):\n break\n for i in range(1, 8-Square.row):\n if not self.IterMove(Square.row+i, Square.column, movelist, Square):\n break\n for i in range(1, 8-Square.column):\n if not self.IterMove(Square.row, Square.column+i, movelist, Square):\n break\n for i in range(1, self.MinOfTwo(Square.row, Square.column)+1):\n if not self.IterMove(Square.row-i, Square.column-i, movelist, Square):\n break\n for i in range(1, self.MinOfTwo(Square.row, 7-Square.column)+1):\n if not self.IterMove(Square.row-i, Square.column+i, movelist, Square):\n break\n for i in range(1, self.MinOfTwo(7-Square.row, Square.column)+1):\n if not self.IterMove(Square.row+i, Square.column-i, movelist, Square):\n break\n for i in range(1, self.MinOfTwo(7-Square.row, 7-Square.column)+1):\n if not self.IterMove(Square.row+i, Square.column+i, movelist, Square):\n break\n return movelist\n\n\ndef main():\n aa = tk.Tk()\n cc = ChessApp(aa)\n cc.mainloop()\n\nif __name__ == \"__main__\":\n main()\n" }, { "alpha_fraction": 0.8115941882133484, "alphanum_fraction": 0.8115941882133484, "avg_line_length": 33.5, "blob_id": "02e8be05d6a6d9276546d654bd474fe7bb53f2af", "content_id": "ac5000f5ff52ee5dbd495a35f668f53fd88728b6", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Markdown", "length_bytes": 69, "license_type": "no_license", "max_line_length": 54, "num_lines": 2, "path": "/README.md", "repo_name": "dveratrallero/SimpleChess", "src_encoding": "UTF-8", "text": "# SimpleChess\nSimple Chess Game written in Python with a Tkinter GUI\n" } ]
3
Abhiram124/Swapfile
https://github.com/Abhiram124/Swapfile
ae3b7dc650476c0e600e6b2bd3e1abc745a0df7d
4a3bcf3f691a8abe782931b4cdc359fc6af7dcaa
7cf2ff5d0a3272f846a9dda5e0695e81288cfbd4
refs/heads/master
2022-12-11T09:23:53.543085
2020-09-01T10:52:35
2020-09-01T10:52:35
291,971,849
0
0
null
null
null
null
null
[ { "alpha_fraction": 0.6125355958938599, "alphanum_fraction": 0.6524216532707214, "avg_line_length": 22.785715103149414, "blob_id": "fca003a263364b971c6be3318fced028610a9a0e", "content_id": "115158251ceccb2fa97558ec3ff3262085d7dd63", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 351, "license_type": "no_license", "max_line_length": 63, "num_lines": 14, "path": "/swapfile.py", "repo_name": "Abhiram124/Swapfile", "src_encoding": "UTF-8", "text": "sm1 = \"this is sample text1\"\r\nsm2 = \"this is sample text2\"\r\n\r\n\r\nsm1 = input('Enter string to be swapped with other file: ')\r\nsm2 = input('Enter string to be swapped with the first file: ')\r\n\r\n\r\ntemp = sm1\r\nsm1 = sm2\r\nsm2 = temp\r\n\r\nprint('The value of sm1 after swapping: {}'.format(sm1))\r\nprint('The value of sm2 after swapping: {}'.format(sm2))\r\n " } ]
1
al2dev/gbhw_python
https://github.com/al2dev/gbhw_python
da339cd1b3268889917c5aeed14ec1b6e5fe6ffd
4c93f3eafbe7f1e50f3526d7bbd35f4629cd4856
bf5f3f09b2ba62593da5f2eb96da03b1aa762a17
refs/heads/main
2023-04-05T05:17:44.835030
2021-03-23T17:54:32
2021-03-23T17:54:32
338,177,224
0
0
null
2021-02-11T23:12:26
2021-03-23T17:54:35
2021-04-05T00:53:57
Python
[ { "alpha_fraction": 0.5051656365394592, "alphanum_fraction": 0.5108656883239746, "avg_line_length": 35.219356536865234, "blob_id": "7a0ec9fe762109071c8c2cfb0fdbb0f37bd6f52d", "content_id": "78415dbe51daa94f55682196a8c9c7e2a4c16aef", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 6724, "license_type": "no_license", "max_line_length": 119, "num_lines": 155, "path": "/lesson_3.py", "repo_name": "al2dev/gbhw_python", "src_encoding": "UTF-8", "text": "# Home work of 3 lesson\nimport random\n\n\ndef start():\n modules = [[getattr(Tasks, 'num_translate'), 'Translate', 'Translate entered number'],\n [getattr(Tasks, 'thesaurus'), 'Thesaurus', '2'],\n [getattr(Tasks, 'get_jokes'), 'Get Jokes', 'Generate and return an array of the number of jokes']]\n\n print('''Hi, please select the required script module:''')\n while True:\n print('')\n for idx, val in enumerate(modules):\n print(f'{idx + 1:>3} - {val[1]}')\n print(f'{str():>6}{val[2]}')\n\n print(f'''\\t0 - Exit''')\n\n try:\n user_choice = int(input('>>'))\n if not user_choice:\n print('Goodbye!')\n exit()\n elif user_choice < 0 or user_choice > len(modules):\n print('Please select available identification')\n else:\n print('')\n print(modules[user_choice - 1][1])\n modules[user_choice - 1][0]()\n except ValueError:\n print('Please enter an integer selector')\n\n\nclass Tasks:\n\n \"\"\"\n Написать функцию num_translate(), переводящую числительные от 0 до 10 c английского на русский язык.\n\n * (вместо задачи 1) Доработать предыдущую функцию num_translate_adv():\n реализовать корректную работу с числительными, начинающимися с заглавной буквы.\n \"\"\"\n @staticmethod\n def num_translate():\n user_input = input('Число: ')\n symbols = ['O', 'T', 'F', 'S', 'E', 'N']\n numbers = {'one': 'один',\n 'two': 'два',\n 'tree': 'три',\n 'four': 'четыре',\n 'five': 'пять',\n 'six': 'шесть',\n 'seven': 'семь',\n 'eight': 'восемь',\n 'nine': 'девять',\n 'ten': 'десять'}\n\n if user_input[0] in symbols:\n print(f'{user_input} - {numbers.get(user_input.lower()).title()}')\n else:\n print(f'{user_input} - {numbers.get(user_input)}')\n\n \"\"\"\n Написать функцию thesaurus(), принимающую в качестве аргументов имена сотрудников и возвращающую словарь, в \n котором ключи — первые буквы имен, а значения — списки, содержащие имена, начинающиеся с соответствующей буквы.\n \n * (вместо задачи 3) Написать функцию thesaurus_adv(), принимающую в качестве аргументов строки в формате \n «Имя Фамилия» и возвращающую словарь, в котором ключи — первые буквы фамилий, а значения — словари, \n реализованные по схеме предыдущего задания и содержащие записи, в которых фамилия начинается с \n соответствующей буквы\n \"\"\"\n @staticmethod\n def thesaurus(*arr):\n arr = list(arr)\n if not arr:\n arr = ['Мурат Ибрагимов',\n 'Иван Иванов',\n 'Катя Дьяко',\n 'Ксюша Володькина',\n 'Маша Немидова',\n 'Вова Васильев',\n 'Вася Пупкин']\n result_dict = dict()\n\n ''' simple version\n for el in arr:\n if el[0] not in result_dict.keys():\n result_dict[el[0]] = [el]\n else:\n result_dict[el[0]].append(el)\n '''\n\n for el in arr:\n f_name, l_name = tuple(el.split())\n if l_name[0] not in result_dict.keys():\n result_dict[l_name[0]] = {f_name[0]: [el]}\n else:\n if f_name[0] not in result_dict[l_name[0]].keys():\n result_dict[l_name[0]][f_name[0]] = [el]\n else:\n result_dict[l_name[0]][f_name[0]].append(el)\n\n print(f'Not sorted dict: \\n{result_dict}\\n')\n\n sorted_dict = {}\n for k in sorted(result_dict.keys()):\n d = {}\n for kk in sorted(result_dict[k].keys()):\n d[kk] = result_dict[k][kk]\n sorted_dict[k] = d\n\n print(f'Sorted dict: \\n{sorted_dict}')\n\n \"\"\"\n Реализовать функцию get_jokes(), возвращающую n шуток, сформированных из трех случайных слов, \n взятых из трёх списков:\n nouns = [\"автомобиль\", \"лес\", \"огонь\", \"город\", \"дом\"]\n adverbs = [\"сегодня\", \"вчера\", \"завтра\", \"позавчера\", \"ночью\"]\n adjectives = [\"веселый\", \"яркий\", \"зеленый\", \"утопичный\", \"мягкий\"]\n \n Усложнение: \n * Сможете ли вы добавить еще один аргумент — флаг, разрешающий или запрещающий повторы слов в шутках\n (когда каждое слово можно использовать только в одной шутке)? \n Сможете ли вы сделать аргументы именованными?\n \"\"\"\n @staticmethod\n def get_jokes(count: int = 3, repeat: bool = False):\n \"\"\"\n Generate and return an array of the number of jokes\n\n :param count: counter for the number of jokes\n :param repeat: permission to repeat words\n :return: Array of jokes\n \"\"\"\n nouns = [\"автомобиль\", \"лес\", \"огонь\", \"город\", \"дом\"]\n adverbs = [\"сегодня\", \"вчера\", \"завтра\", \"позавчера\", \"ночью\"]\n adjectives = [\"веселый\", \"яркий\", \"зеленый\", \"утопичный\", \"мягкий\"]\n\n tmp_a = nouns.copy()\n tmp_b = adverbs.copy()\n tmp_c = adjectives.copy()\n\n arr_jokes = []\n\n for i in range(count):\n a, b, c = (random.choice(tmp_a), random.choice(tmp_b), random.choice(tmp_c))\n if not repeat:\n tmp_a.remove(a)\n tmp_b.remove(b)\n tmp_c.remove(c)\n arr_jokes.append(' '.join([a, b, c]))\n print(arr_jokes)\n\n\nif __name__ == '__main__':\n start()\n" }, { "alpha_fraction": 0.5511363744735718, "alphanum_fraction": 0.5511363744735718, "avg_line_length": 24.14285659790039, "blob_id": "11612170ba0eef8082f17bb722719de3be4726c1", "content_id": "6a966aa3989560f672db322a103ea9c40be962c9", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 176, "license_type": "no_license", "max_line_length": 54, "num_lines": 7, "path": "/get_valute.py", "repo_name": "al2dev/gbhw_python", "src_encoding": "UTF-8", "text": "from utils import currency_rates\n\nif __name__ == '__main__':\n codes = ['USD', 'BYN', 'CNY', 'JPY', 'TRY', 'NSV']\n\n for code in codes:\n print(currency_rates(code))\n" }, { "alpha_fraction": 0.46715328097343445, "alphanum_fraction": 0.4825940430164337, "avg_line_length": 34.97979736328125, "blob_id": "d597c488b32f881e20c6ec5869ad3ed9f881e694", "content_id": "97019b7627121b23649e770c8564af8238f54541", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 3597, "license_type": "no_license", "max_line_length": 118, "num_lines": 99, "path": "/lession_1.py", "repo_name": "al2dev/gbhw_python", "src_encoding": "UTF-8", "text": "# Home work of 1 lesson\n\ndef start():\n modules = [[getattr(Tasks, 'task1'), 'duration of duration', 'Convert duration seconds to human readable format'],\n [getattr(Tasks, 'task2'), 'sum of digits', 'Sum of digits, odd numbers from 1 ^ 3 to 1000 ^ 3,'\n ' which are divisible by 7'],\n [getattr(Tasks, 'task3'), 'declension of a word', 'Selection of the end of a word depending on'\n ' the number']]\n\n print('''Hi, please select the required script module:''')\n while True:\n print('''\\n''')\n for idx, val in enumerate(modules):\n print(f''' {idx + 1} - {val[1]}\\n {val[2]}''')\n print(''' 0 - Exit''')\n\n try:\n user_choice = int(input())\n if not user_choice:\n print('Goodbye!')\n exit()\n elif user_choice < 0 or user_choice > len(modules):\n print('Please select available identification')\n else:\n modules[user_choice - 1][0]()\n except ValueError:\n print('Please enter an integer selector')\n\n\nclass Tasks:\n @staticmethod\n def task1():\n # Change to display null values\n output_zero = True\n\n second = 1\n minute = second * 60\n hours = minute * 60\n day = hours * 24\n res = ''\n\n try:\n duration_remainder = int(input())\n if duration_remainder // day:\n res += f'{duration_remainder // day} дн '\n duration_remainder = duration_remainder - duration_remainder // day * day\n check_zero = True\n\n if duration_remainder // hours or not output_zero:\n res += f'{duration_remainder // hours} час '\n duration_remainder = duration_remainder - duration_remainder // hours * hours\n check_zero = True\n\n if duration_remainder // minute or not output_zero:\n res += f'{duration_remainder // minute} мин '\n duration_remainder = duration_remainder - duration_remainder // minute * minute\n\n res += f'{duration_remainder} сек '\n print(res)\n except ValueError:\n print('Please enter an integer selector')\n\n @staticmethod\n def task2():\n for n in range(1, 1000):\n n_cube = n ** 3\n remainder = n_cube\n summa = 0\n while True:\n if remainder > 10:\n summa += remainder % 10\n remainder = int((remainder - remainder % 10) / 10)\n else:\n summa += remainder\n break\n if not summa % 7:\n print(f'''number: {n_cube} sum: {summa}''')\n\n @staticmethod\n def task3():\n ending_array = [' процент', ' процента', ' процентов']\n try:\n user_percent = input()\n if not int(user_percent) < 1 and not int(user_percent) > 20:\n if user_percent in ['1']:\n user_percent += ending_array[0]\n elif user_percent in ['2', '3', '4']:\n user_percent += ending_array[1]\n else:\n user_percent += ending_array[2]\n else:\n raise ValueError\n print(user_percent)\n except ValueError:\n print('Please enter an available value')\n\n\nif __name__ == '__main__':\n start()\n" }, { "alpha_fraction": 0.5330246686935425, "alphanum_fraction": 0.5549382567405701, "avg_line_length": 34.02702713012695, "blob_id": "f8aba2d7399fce43d8d5743190b0b630b8a44f70", "content_id": "bb2b7b250cb1791f370db79e02ebb7e64eb33358", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 8278, "license_type": "no_license", "max_line_length": 124, "num_lines": 185, "path": "/lesson_2.py", "repo_name": "al2dev/gbhw_python", "src_encoding": "UTF-8", "text": "# Home work of 2 lesson\nimport re\nimport random\n\n\ndef start():\n modules = [[getattr(Tasks, 'task1'), 'Type of', 'Show type of result different operations'],\n [getattr(Tasks, 'task2'), 'Change list', 'Change and show weather'],\n [getattr(Tasks, 'task3'), 'Say hello', 'Say hello by name'],\n [getattr(Tasks, 'task4'), 'Cost', 'Show modified cost']]\n\n print('''Hi, please select the required script module:''')\n while True:\n print('')\n for idx, val in enumerate(modules):\n print(f'{idx + 1:>3} - {val[1]}')\n print(f'{str():>6}{val[2]}')\n\n print(f'''\\t0 - Exit''')\n\n try:\n user_choice = int(input('>>'))\n if not user_choice:\n print('Goodbye!')\n exit()\n elif user_choice < 0 or user_choice > len(modules):\n print('Please select available identification')\n else:\n print('')\n print(modules[user_choice - 1][1])\n modules[user_choice - 1][0]()\n except ValueError:\n print('Please enter an integer selector')\n\n\nclass Tasks:\n\n \"\"\" 1. Выяснить тип результата выражений:\n 15 * 3\n 15 / 3\n 15 // 2\n 15 ** 2\n \"\"\"\n\n @staticmethod\n def task1():\n op1 = 15 * 3\n op2 = 15 / 3\n op3 = 15 // 2\n op4 = 15 ** 2\n print(f'{\"15 * 3\"} type {type(op1)}')\n print(f'{\"15 / 3\"} type {type(op2)}')\n print(f'{\"15 // 3\"} type {type(op3)}')\n print(f'{\"15 ** 3\"} type {type(op4)}')\n\n \"\"\"Дан список: ['в', '5', 'часов', '17', 'минут', 'температура', 'воздуха', 'была', '+5', 'градусов']\n \n Необходимо его обработать — обособить каждое целое число (вещественные не трогаем) кавычками (добавить кавычку до \n и кавычку после элемента списка, являющегося числом) и дополнить нулём до двух целочисленных разрядов:\n \n ['в', '\"', '05', '\"', 'часов', '\"', '17', '\"', 'минут', 'температура', 'воздуха', 'была', '\"', '+05', '\"', 'градусов']\n \n Сформировать из обработанного списка строку:\n \n в \"05\" часов \"17\" минут температура воздуха была \"+05\" градусов\n \n Подумать, какое условие записать, чтобы выявить числа среди элементов списка? \n Как модифицировать это условие для чисел со знаком?\n Примечание: если обособление чисел кавычками не будет получаться - можете вернуться к его реализации позже. \n Главное: дополнить числа до двух разрядов нулем!\n\n *(вместо задачи 2) Решить задачу 2 не создавая новый список (как говорят, in place). \n Эта задача намного серьезнее, чем может сначала показаться.\n \"\"\"\n\n @staticmethod\n def task2():\n lst = ['в', '5', 'часов', '17', 'минут', 'температура', 'воздуха', 'была', '+5', 'градусов']\n without_space = [0]\n need_add = False\n\n for idx, val in enumerate(lst):\n pattern = r'([-+])(\\d)'\n num = re.findall(pattern, val)\n\n if num and idx not in without_space:\n exec_elem = lst.pop(idx)\n a, b = num[0]\n mode_elem = a + '0' + b if len(b) < 2 else exec_elem\n lst.insert(idx, mode_elem)\n need_add = True\n\n elif val.isnumeric() and len(val) < 2:\n exec_elem = lst.pop(idx)\n mode_elem = '0' + exec_elem\n lst.insert(idx, mode_elem)\n\n if need_add or val.isnumeric() and lst[idx - 1] != '\"' and lst[idx + 1] != '\"':\n need_add = False\n\n # Так как пробелы добавляем перед словом\n without_space.extend([idx + 1, idx + 2])\n\n exec_elem = lst.pop(idx)\n\n lst.insert(idx, '\"')\n lst.insert(idx + 1, exec_elem)\n lst.insert(idx + 2, '\"')\n\n result_str = ''\n for idx, val in enumerate(lst):\n result_str += val if idx in without_space else f' {val}'\n\n print(lst)\n print(result_str)\n\n \"\"\"Дан список, содержащий искажённые данные с должностями и именами сотрудников:\n ['инженер-конструктор Игорь', 'главный бухгалтер МАРИНА', 'токарь высшего разряда нИКОЛАй', 'директор аэлита']\n Известно, что имя сотрудника всегда в конце строки. Сформировать из этих имен и вывести на экран фразы вида: \n 'Привет, Игорь!' Подумать, как получить имена сотрудников из элементов списка, как привести их к корректному виду. \n Можно ли при этом не создавать новый список?\n \"\"\"\n\n @staticmethod\n def task3():\n lst = ['инженер-конструктор Игорь', 'главный бухгалтер МАРИНА', 'токарь высшего разряда нИКОЛАй', 'директор аэлита']\n\n for v in lst:\n i = len(v) - 1\n name = ''\n\n while i > 0:\n if v[i] == ' ':\n break\n name += v[i]\n i -= 1\n\n hello = 'Привет, ' + name[:-2:-1].upper() + name[-2::-1].lower() + '!'\n print(hello)\n\n \"\"\"Создать вручную список, содержащий цены на товары (10–20 товаров), например: [57.8, 46.51, 97, ...]\n \n Вывести на экран эти цены через запятую в одну строку, цена должна отображаться в виде <r> руб <kk> коп \n (например «5 руб 04 коп»). \n \n Подумать, как из цены получить рубли и копейки, как добавить нули, если, например, получилось 7 копеек или 0 копеек \n (должно быть 07 коп или 00 коп). \n \n Вывести цены, отсортированные по возрастанию, новый список не создавать \n (доказать, что объект списка после сортировки остался тот же).\n \n Создать новый список, содержащий те же цены, но отсортированные по убыванию.\n Вывести цены пяти самых дорогих товаров. Сможете ли вывести цены этих товаров по возрастанию, написав минимум кода?\n \"\"\"\n\n @staticmethod\n def task4():\n costs = []\n\n for v in range(random.randint(10, 20)):\n costs.append(round(random.uniform(1, 99), 2))\n\n print(costs)\n\n Tasks.show_price(costs)\n Tasks.show_price(sorted(costs))\n\n costs_small_to_big = []\n costs_small_to_big.extend(reversed(sorted(costs)))\n print(costs_small_to_big)\n\n Tasks.show_price(costs_small_to_big[:5])\n\n @staticmethod\n def show_price(arr):\n print()\n for val in arr:\n rub = int(val)\n cent = int(val % rub * 100)\n rounded_cent = cent if cent > 9 else '0' + str(cent)\n print(f'{rub} руб {rounded_cent} коп')\n\n\nif __name__ == '__main__':\n start()\n" }, { "alpha_fraction": 0.5636672377586365, "alphanum_fraction": 0.5806451439857483, "avg_line_length": 25.772727966308594, "blob_id": "a161b524efb6e61d7f37c7dcf570ac286fb66c0c", "content_id": "842082e4b80e5ecfcd7a07f284e8b52182901ddb", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 589, "license_type": "no_license", "max_line_length": 121, "num_lines": 22, "path": "/show_sale.py", "repo_name": "al2dev/gbhw_python", "src_encoding": "UTF-8", "text": "from sys import argv\n\n\nbakery_filename = 'bakery.csv'\n\nif __name__ == '__main__':\n bakery_file = open(bakery_filename, 'r', encoding='utf-8')\n bakery_file.seek(0)\n print('Ready to show')\n\n if len(argv) < 2:\n print(bakery_file.read())\n\n elif len(argv) == 2:\n gen = [line for idx, line in enumerate(bakery_file.readlines()) if idx + 2 > int(argv[1])]\n print(gen)\n\n elif len(argv) == 3:\n gen = [line for idx, line in enumerate(bakery_file.readlines()) if idx + 2 > int(argv[1]) and idx < int(argv[2])]\n print(gen)\n\n bakery_file.close()\n" }, { "alpha_fraction": 0.6193990707397461, "alphanum_fraction": 0.6267791390419006, "avg_line_length": 28.184616088867188, "blob_id": "7ad3c5ba54060b02ed74adf085a0732eb6306cde", "content_id": "8e6dac2970c8eea3dd640a5a2e3105bf5a572e13", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 4468, "license_type": "no_license", "max_line_length": 115, "num_lines": 130, "path": "/lesson_6.py", "repo_name": "al2dev/gbhw_python", "src_encoding": "UTF-8", "text": "# Home work of 6 lesson\nfrom requests import get\nfrom itertools import islice\nfrom itertools import zip_longest\n\n# global variable <'ip: str': query: int>\nclients = {}\n\n\n# Methods for 1 tasks\n\ndef download_log(url):\n local_filename = f\"{url.split('/')[-1]}.txt\"\n with get(url, stream=True) as r:\n r.raise_for_status()\n with open(local_filename, 'wb') as f:\n for chunk in r.iter_content(chunk_size=8192):\n f.write(chunk)\n return local_filename\n\n\ndef get_clients_query(file):\n return [get_data(line) for line in file]\n\n\ndef get_data(line):\n global clients\n str_line = line.replace('\"', '')\n arr_line = str_line.split()\n ip, method, path = arr_line[0], arr_line[5], arr_line[6]\n if ip in clients:\n clients[ip] += 1\n else:\n clients[ip] = 1\n return ip, method, path\n\n\n# Methods for 2 task\n\ndef get_spamer(file):\n get_clients_query(file)\n sorted_clients = sorted(clients.items(), key=lambda item: item[1], reverse=True)\n for ip, query in sorted_clients:\n yield ip, query\n\n\n# Methods for 3 task\n\ndef create_test_files(users_filename, hobbies_filename):\n users = '''Иванов,Иван,Иванович\\nПетров,Петр,Петрович\\nМаксимов,Максим,Максимыч'''\n hobbies = '''скалолазание,охота\\nгорные лыжи'''\n\n file_user = open(users_filename, 'w')\n file_hobby = open(hobbies_filename, 'w')\n\n file_user.write(users)\n file_hobby.write(hobbies)\n\n file_user.close()\n file_hobby.close()\n\n\ndef create_end_file(users_filename, hobbies_filename):\n file_user = open(users_filename, 'r')\n file_hobby = open(hobbies_filename, 'r')\n\n result = {key: val for key, val in zip_longest(file_user, file_hobby, fillvalue=None) if key is not None}\n print(result)\n\n end_file = open('users_hobby.csv', 'w')\n end_file.write(str(result)\n .replace('{', '')\n .replace('}', '')\n .replace('\\'', '')\n .replace('\\\\n', '')\n .replace(', ', '\\n'))\n\n end_file.close()\n file_user.close()\n file_hobby.close()\n\n\nif __name__ == '__main__':\n\n # download logs\n url = 'https://github.com/elastic/examples/raw/master/Common%20Data%20Formats/nginx_logs/nginx_logs'\n logs_filename = download_log(url)\n print(logs_filename)\n\n # open file\n logs_file = open(logs_filename, 'r')\n\n '''\n Не используя библиотеки для парсинга, распарсить файл логов web-сервера nginx_logs.txt\n — получить список кортежей вида: (<remote_addr>, <request_type>, <requested_resource>).\n '''\n # 1 task\n print('\\n1 task')\n clients_query = get_clients_query(logs_file)\n print(clients_query[:20])\n\n '''\n Найти IP адрес спамера и количество отправленных им запросов по данным файла логов из предыдущего задания. \n Спамер — это клиент, отправивший больше всех запросов; \n код должен работать даже с файлами, размер которых превышает объем ОЗУ компьютера.\n '''\n # 2 task\n print('\\n2 task')\n show_bad_guys = 5\n spam = get_spamer(logs_file)\n print(*islice(spam, show_bad_guys))\n\n # close file\n logs_file.close()\n\n '''\n Есть два файла: в одном хранятся ФИО пользователей сайта, а в другом — данные об их хобби. \n Известно, что при хранении данных используется принцип: одна строка — один пользователь, \n разделитель между значениями — запятая. \n Написать код, загружающий данные из обоих файлов и формирующий из них словарь: \n ключи — ФИО, значения — данные о хобби. \n Сохранить словарь в файл.\n '''\n\n # 3 task\n print('\\n3 task')\n users_filename = 'users.csv'\n hobbies_filename = 'hobby.csv'\n create_test_files(users_filename, hobbies_filename)\n create_end_file(users_filename, hobbies_filename)\n" }, { "alpha_fraction": 0.6157718300819397, "alphanum_fraction": 0.6258389353752136, "avg_line_length": 28.799999237060547, "blob_id": "70829eab0ad9058368c5effe72e9097ec3c43c17", "content_id": "b005b4a54f60928723653829f71f52fe6b912a6b", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 1192, "license_type": "no_license", "max_line_length": 102, "num_lines": 40, "path": "/utils.py", "repo_name": "al2dev/gbhw_python", "src_encoding": "UTF-8", "text": "from requests import get, utils\nfrom datetime import date\n\n\ndef currency_rates(argv):\n if type(argv) == list:\n programm, valute = argv\n else:\n valute = argv\n\n response = get('https://www.cbr.ru/scripts/XML_daily.asp')\n enc = utils.get_encoding_from_headers(response.headers)\n content = response.content.decode(enc)\n\n # Get date\n date_find = 'Date=\"'\n date_pos = content.find(date_find)\n date_len = len('2020.20.20')\n date_str = content[date_pos + len(date_find): date_pos + len(date_find) + date_len]\n date_arr = date_str.split('.')\n date_obj = date(int(date_arr[2]), int(date_arr[1]), int(date_arr[0]))\n\n # Get value valute\n valute_find = valute.upper()\n tag_start = '<Value>'\n tag_stop = '</Value>'\n valute_pos = content.find(valute_find)\n if valute_pos < 0:\n return None\n value_start = content[valute_pos:].find(tag_start) + len(tag_start)\n value_stop = content[valute_pos:].find(tag_stop)\n valute_value = float(content[valute_pos + value_start: valute_pos + value_stop].replace(',', '.'))\n\n return valute_value, date_obj\n\n\nif __name__ == '__main__':\n import sys\n\n exit(currency_rates(sys.argv))\n" }, { "alpha_fraction": 0.5675675868988037, "alphanum_fraction": 0.5769442915916443, "avg_line_length": 26.469696044921875, "blob_id": "bbce0b12682f5e21873f2bb892cf01e3a3ce6512", "content_id": "1820aa8f8638b54139e783a91a7bb63ea43960fa", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 2153, "license_type": "no_license", "max_line_length": 118, "num_lines": 66, "path": "/lesson_8.py", "repo_name": "al2dev/gbhw_python", "src_encoding": "UTF-8", "text": "import re\n\n\n\"\"\"\n Написать функцию email_parse(<email_address>), которая при помощи регулярного выражения извлекает имя пользователя\n и почтовый домен из email адреса и возвращает их в виде словаря. \n Если адрес не валиден, выбросить исключение ValueError.\n\"\"\"\n\n\ndef email_parse(email_address):\n RE_EMAIL = re.compile(r'(?P<name>\\w+|(\\w+(.|-|_)\\w+){1,10})[@](?P<domain>\\w+\\.\\w{2,6})')\n res = RE_EMAIL.search(email_address)\n if res:\n return {'name': res.group('name'), 'domain': res.group('domain')}\n else:\n raise ValueError('Email not found')\n\n\n\"\"\"\n Написать декоратор для логирования типов позиционных аргументов функции\n\n * Написать декоратор с аргументом-функцией (callback), позволяющий валидировать входные значения функции\n и выбрасывать исключение ValueError, если что-то не так\n\"\"\"\n\n\ndef val_checker(func):\n res = func.__name__\n\n def get_wrapper(*args):\n if len(args):\n markups = []\n types = []\n for el in args:\n if el > 0:\n markups.append(func(el))\n types.append(f'{res}({el}: {type(el)})')\n else:\n raise ValueError(f'wrong val {el}')\n print(types)\n return markups\n else:\n raise ValueError(f'wrong val {args}')\n return get_wrapper\n\n\n@val_checker\ndef calc_cube(x):\n return x ** 3\n\n\nif __name__ == '__main__':\n # task 1\n print(email_parse('[email protected]')) # ok\n print(email_parse('[email protected]')) # ok\n print(email_parse('someone@geekbrainsru')) # exception\n print(email_parse('example@gmail_com')) # exception\n print(email_parse('[email protected]')) # ok\n\n # task 3, 4\n print(calc_cube(3))\n print(calc_cube(3, 5, 7))\n print(calc_cube(9, -3))\n print(calc_cube(-1))\n print(calc_cube())\n" }, { "alpha_fraction": 0.582812488079071, "alphanum_fraction": 0.6273437738418579, "avg_line_length": 29.4761905670166, "blob_id": "9e712d6c624e6c3682b3051c55c8cbae93e9e889", "content_id": "9ab8ccd0d00bd3e8472569b261ae9c65c6f07ff2", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 5208, "license_type": "no_license", "max_line_length": 120, "num_lines": 126, "path": "/lesson_5.py", "repo_name": "al2dev/gbhw_python", "src_encoding": "UTF-8", "text": "# Home work of 5 lesson\nfrom itertools import islice\n\n\n''' 1\n Написать генератор нечётных чисел от 1 до n (включительно), без использования ключевого слова yield,\n полностью истощить генератор.\n'''\n\n\ndef iterator_without_yield(max_num):\n if max_num ** 2 > 200:\n return None\n return (num for num in range(1, max_num + 1, 2))\n\n\n''' 2\n Написать генератор нечётных чисел от 1 до n (включительно), используя ключевое слово yield. \n Полностью истощить генератор.\n Усложнение(*):\n С ключевым словом yield - как в задании 1: генератор нечётных чисел от 1 до n (включительно), \n для чисел, квадрат которых меньше 200.\n \n Усложнение(**):\n С ключевым словом yield: Вычислять и возвращать само число и накопительную сумму этого и предыдущих чисел. \n'''\n\n\ndef iterator_with_yield(max_num):\n if max_num ** 2 > 200:\n return None\n amount = 0\n for num in range(1, max_num + 1, 2):\n amount += num\n yield num, amount\n\n\n''' 3\n Есть два списка:\n tutors = [\n 'Иван', 'Анастасия', 'Петр', 'Сергей', \n 'Дмитрий', 'Борис', 'Елена'\n ]\n klasses = [\n '9А', '7В', '9Б', '9В', '8Б', '10А', '10Б', '9А'\n ]\n Необходимо реализовать генератор, возвращающий кортежи вида (<tutor>, <klass>), например:\n \n ('Иван', '9А')\n ('Анастасия', '7В')\n ...\n \n Количество генерируемых кортежей не должно быть больше длины списка tutors. Если в списке klasses меньше элементов, \n чем в списке tutors, необходимо вывести последние кортежи в виде: (<tutor>, None), например:\n \n ('Станислав', None)\n \n Доказать, что вы создали именно генератор. Проверить его работу вплоть до истощения. Подумать, в каких ситуациях \n генератор даст эффект.\n'''\n\n\ndef tutors_classes(tut, kl):\n for num in range(0, len(tut)):\n c = kl[num] if num < len(kl) else None\n yield tut[num], c\n\n\n''' 4\n Представлен список чисел. Необходимо вывести те его элементы, значения которых больше предыдущего, например:\n src = [300, 2, 12, 44, 1, 1, 4, 10, 7, 1, 78, 123, 55]\n result = [12, 44, 4, 10, 78, 123]\n Выводит или не выводить первый элемент - решите сами. Используйте генераторы или генераторные выражения.\n Подумайте, как можно сделать оптимизацию кода по памяти, по скорости.\n'''\n\n\ndef bigger(arr):\n return [num_2 for num_1, num_2 in zip(arr, arr[1:]) if num_2 > num_1]\n\n\n''' 5\n Представлен список чисел. Определить элементы списка, не имеющие повторений. Сформировать из этих элементов список \n с сохранением порядка их следования в исходном списке, например:\n src = [2, 2, 2, 7, 23, 1, 44, 44, 3, 2, 10, 7, 4, 11]\n result = [23, 1, 3, 10, 4, 11]\n Используйте генераторы или генераторные выражения.\n Сначала найдите способ определить уникальность элемента в списке. Подумайте о сохранении порядка исходного списка.\n'''\n\n\ndef liquidation_clone(arr):\n return [num for idx, num in enumerate(arr) if num not in arr[:idx] and num not in arr[idx+1:]]\n\n\nif __name__ == '__main__':\n num = 11\n\n # 1 task\n print('\\n1 task')\n gen1 = iterator_without_yield(num)\n print(*islice(gen1, num))\n\n # 2 task\n print('\\n2 task')\n gen2 = iterator_with_yield(num)\n print(*islice(gen2, num))\n\n # 3 task\n print('\\n3 task')\n tutors = ['Иван', 'Анастасия', 'Петр', 'Сергей', 'Дмитрий', 'Борис', 'Елена']\n klasses = ['9А', '7В', '9Б', '9В']\n res_tutors_classes = tutors_classes(tutors, klasses)\n print(*res_tutors_classes)\n\n # 4 task\n print('\\n4 task')\n src = [300, 2, 12, 44, 1, 1, 4, 10, 7, 1, 78, 123, 55]\n res_bigger = bigger(src)\n print(res_bigger)\n\n # 5 task\n print('\\n5 task')\n clones = [2, 2, 2, 7, 23, 1, 44, 44, 3, 2, 10, 7, 4, 11]\n res_liquidation_clone = liquidation_clone(clones)\n print(clones, res_liquidation_clone)\n" }, { "alpha_fraction": 0.4993935823440552, "alphanum_fraction": 0.5160703659057617, "avg_line_length": 26.483333587646484, "blob_id": "a04e4f716f85c7de0aca76e749231f8dfbdf3554", "content_id": "40b91adc1148d72a1ea635fb40d28b63120d547e", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 3812, "license_type": "no_license", "max_line_length": 117, "num_lines": 120, "path": "/lesson_7.py", "repo_name": "al2dev/gbhw_python", "src_encoding": "UTF-8", "text": "# # Home work of 7 lesson\nimport os\nimport shutil\n\n'''\n 1. Написать скрипт, создающий стартер (заготовку) для проекта со следующей структурой папок:\n |--my_project\n |--settings\n |--mainapp\n |--adminapp\n |--authapp\n'''\n\n\ndef create_blank():\n main_folder_name = 'my_project'\n child_folders_name = ['settings', 'mainapp', 'adminapp', 'authapp']\n\n for folder in child_folders_name:\n path = os.path.join(main_folder_name, folder)\n os.makedirs(path)\n\n\n'''\n |--my_project\n |--settings\n | |--__init__.py\n | |--dev.py\n | |--prod.py\n |--mainapp\n | |--__init__.py\n | |--models.py\n | |--views.py\n | |--templates\n | |--mainapp\n | |--base.html\n | |--index.html\n |--authapp\n | |--__init__.py\n | |--models.py\n | |--views.py\n | |--templates\n | |--authapp\n | |--base.html\n | |--index.html\n \n Создать структуру файлов и папок, как написано в задании 2 (при помощи скрипта или «руками» в проводнике).\n Написать скрипт, который собирает все шаблоны в одну папку templates, например:\n |--my_project\n ...\n |--templates\n | |--mainapp\n | | |--base.html\n | | |--index.html\n | |--authapp\n | |--base.html\n | |--index.html\n'''\n\n\ndef copy_templates2():\n base_folder_path = os.path.abspath('my_project')\n name_folder_templates = 'templates'\n folder_templates_path = os.path.join(base_folder_path, name_folder_templates)\n\n for folder in os.listdir(base_folder_path):\n local_path = os.path.join(base_folder_path, folder)\n if os.path.isdir(local_path):\n for root, dirs, files in os.walk(local_path):\n if name_folder_templates in root:\n if len(dirs):\n s = os.path.join(root, dirs[0])\n d = os.path.join(folder_templates_path, dirs[0])\n if not os.path.exists(d):\n shutil.copytree(s, d)\n\n\n'''\n Написать скрипт, который выводит статистику для заданной папки в виде словаря, в котором ключи — верхняя граница \n размера файла (пусть будет кратна 10), а значения — общее количество файлов (в том числе и в подпапках), \n размер которых не превышает этой границы, но больше предыдущей (начинаем с 0), например:\n {\n 100: 15,\n 1000: 3,\n 10000: 7,\n 100000: 2\n }\n Тут 15 файлов размером не более 100 байт; 3 файла больше 100 и не больше 1000 байт...\n'''\n\n\ndef show_sizes(name_dir):\n db = {}\n path_to_dir = os.path.abspath(name_dir)\n for file in os.listdir(path_to_dir):\n path_to_file = os.path.join(path_to_dir, file)\n if os.path.isfile(path_to_file):\n size = os.stat(path_to_file).st_size\n k = 10\n while True:\n if not bool(size // k):\n break\n else:\n k *= 10\n if db.get(k):\n db[k] += 1\n else:\n db[k] = 1\n print(db)\n\n\nif __name__ == '__main__':\n # task 1\n create_blank()\n\n # task 3\n copy_templates2()\n\n # task 4\n show_sizes('some_data')\n" }, { "alpha_fraction": 0.5597345232963562, "alphanum_fraction": 0.5730088353157043, "avg_line_length": 24.11111068725586, "blob_id": "b3a03196caa9ffe0aec2860abe07c9984834eca5", "content_id": "fe9bf366b9fdd5ead777f2d57792ea597218ddde", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 452, "license_type": "no_license", "max_line_length": 63, "num_lines": 18, "path": "/add_sale.py", "repo_name": "al2dev/gbhw_python", "src_encoding": "UTF-8", "text": "from sys import argv\n\n\nbakery_filename = 'bakery.csv'\n\nif __name__ == '__main__':\n bakery_file = open(bakery_filename, 'a+', encoding='utf-8')\n print('Ready to add')\n\n if len(argv) < 2:\n print('Please enter number')\n elif len(argv) == 2:\n bakery_file.write(str(argv[1]) + '\\n')\n print('Was added:', argv[1])\n elif len(argv) == 3:\n print(\"Doesn't support many args, enter one number\")\n\n bakery_file.close()\n" } ]
11
lwl27/travel_itinerary
https://github.com/lwl27/travel_itinerary
154b16fd5b8935c4aa2dda16f72d5c96b33f7b0b
ec61604c5bf43349242b1a47e9a3a2c100d671b6
76a7a7f3b9dbd5f28604d37bdd7d5751b39f394f
refs/heads/master
2019-04-30T13:08:13.460541
2015-02-23T09:22:31
2015-02-23T09:22:31
31,193,985
0
0
null
null
null
null
null
[ { "alpha_fraction": 0.7249698638916016, "alphanum_fraction": 0.7322074770927429, "avg_line_length": 27.586206436157227, "blob_id": "d73345324d0849fc8983a4cce675d0a9ab4eec08", "content_id": "8a680883c15e40f7615022d46dda85abbb77a740", "detected_licenses": [ "Apache-2.0" ], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 829, "license_type": "permissive", "max_line_length": 66, "num_lines": 29, "path": "/travel_itinerary/itinerary/models.py", "repo_name": "lwl27/travel_itinerary", "src_encoding": "UTF-8", "text": "from django.db import models\nfrom django.core.urlresolvers import reverse\n\nclass ItineraryQuerySet(models.QuerySet):\n def published(self):\n return self.filter(published=True)\n\nclass Itinerary(models.Model):\n title = models.CharField(max_length=200)\n body = models.TextField()\n slug = models.SlugField(max_length=200, unique=True)\n published = models.BooleanField(default=False)\n created = models.DateTimeField(auto_now_add=True)\n modified = models.DateTimeField(auto_now=True)\n\n objects = ItineraryQuerySet.as_manager()\n\n def get_absolute_url(self):\n return reverse(\"itinerary_detail\", kwargs={\"slug\": self.slug})\n\n def __str__(self):\n return self.title\n\n class Meta:\n verbose_name = \"Travel Itinerary\"\n verbose_name_plural = \"Travel Itineraries\"\n ordering = [\"-created\"]\n\n# Create your models here.\n" }, { "alpha_fraction": 0.8275862336158752, "alphanum_fraction": 0.8275862336158752, "avg_line_length": 28, "blob_id": "7c988cb3a8f72c51c5c7d6d789aa19be0c1e4f7f", "content_id": "699ce05132b4e9930ec2f074855e8931072affb6", "detected_licenses": [ "Apache-2.0" ], "is_generated": false, "is_vendor": false, "language": "Markdown", "length_bytes": 58, "license_type": "permissive", "max_line_length": 38, "num_lines": 2, "path": "/README.md", "repo_name": "lwl27/travel_itinerary", "src_encoding": "UTF-8", "text": "# travel_itinerary\nA simple travel itinerary sharing site\n" }, { "alpha_fraction": 0.7688022255897522, "alphanum_fraction": 0.7715877294540405, "avg_line_length": 24.64285659790039, "blob_id": "937d322349af6944820ee40ee65b9f668f21484a", "content_id": "aa7b8b7c95470c334d689eb754955b6a14f2714a", "detected_licenses": [ "Apache-2.0" ], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 359, "license_type": "permissive", "max_line_length": 49, "num_lines": 14, "path": "/travel_itinerary/itinerary/views.py", "repo_name": "lwl27/travel_itinerary", "src_encoding": "UTF-8", "text": "from django.shortcuts import render\nfrom django.views import generic\nfrom . import models\n\nclass ItineraryIndex(generic.ListView):\n queryset = models.Itinerary.objects.published()\n template_name = 'home.html'\n paginate_by = 2\n\nclass ItineraryDetail(generic.DetailView):\n model = models.Itinerary\n template_name = \"detail.html\"\n\n# Create your views here.\n" }, { "alpha_fraction": 0.6746031641960144, "alphanum_fraction": 0.6746031641960144, "avg_line_length": 30.5, "blob_id": "56a71cb6c643ce7f1580fa30c2dc479e04cc90a6", "content_id": "3e88d8fce620053cd45569cf39dbf6d166e8af10", "detected_licenses": [ "Apache-2.0" ], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 252, "license_type": "permissive", "max_line_length": 86, "num_lines": 8, "path": "/travel_itinerary/itinerary/urls.py", "repo_name": "lwl27/travel_itinerary", "src_encoding": "UTF-8", "text": "from django.conf.urls import patterns, include, url\nfrom . import views\n\nurlpatterns = patterns(\n '',\n url(r'^$', views.ItineraryIndex.as_view(), name=\"index\"),\n url(r'^i/(?P<slug>\\S+)$', views.ItineraryDetail.as_view(), name=\"itinerary_detail\"),\n)\n" }, { "alpha_fraction": 0.5243902206420898, "alphanum_fraction": 0.6951219439506531, "avg_line_length": 15.399999618530273, "blob_id": "cbc3c3ce1130278ee72cbee3b1a8ed38ab407776", "content_id": "84e68d962462e3e4d74b3613ba2f31406cb66412", "detected_licenses": [ "Apache-2.0" ], "is_generated": false, "is_vendor": false, "language": "Text", "length_bytes": 82, "license_type": "permissive", "max_line_length": 22, "num_lines": 5, "path": "/requirements.txt", "repo_name": "lwl27/travel_itinerary", "src_encoding": "UTF-8", "text": "Django==1.7.4\nMarkdown==2.6\nargparse==1.2.1\ndjango-markdown==0.8.4\nwsgiref==0.1.2\n" } ]
5
wallymathieu/smileynoise
https://github.com/wallymathieu/smileynoise
21c06bb940922755c24190fa0f12ee61c8addc3b
2b7646f9e08aa42bddd4f8cabf0dbccd26c7ef50
fc62c070c522409625ac9a49a10043130afc84ea
refs/heads/master
2020-06-08T20:52:14.979075
2017-02-09T19:22:20
2017-02-09T19:22:20
519,963
0
0
null
null
null
null
null
[ { "alpha_fraction": 0.4609120488166809, "alphanum_fraction": 0.4959283471107483, "avg_line_length": 35.14706039428711, "blob_id": "c650eec401c0c00ea484c021851ca38ff11a9b99", "content_id": "d0af9cef0411bd7b5416aa61fe2e258016c8e850", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 1235, "license_type": "no_license", "max_line_length": 509, "num_lines": 34, "path": "/smileys.py", "repo_name": "wallymathieu/smileynoise", "src_encoding": "UTF-8", "text": "#!/usr/bin/env python\n# encoding: utf-8\n\"\"\"\nsmileys.py\n\nCreated by Oskar Gewalli on 2010-02-15.\nCopyright (c) 2010 __MyCompanyName__. All rights reserved.\n\"\"\"\n\nimport sys\nimport os\nimport unittest\nimport re\n\nclass SmileysValidation:\n\tdef __init__(self):\n\t\tself.chars = re.compile(u\"^\\s*['%sÞ]*\\s*$\" % re.escape(u'^#&)(*-,/.0398€;:=<>@CBDOPSTX[]\\_cbdmoqpuwv}|~!\"'),re.UNICODE)\n\t\tpass\n\tdef isValid(self,txt):\n\t\treturn self.chars.match(txt)\n\nclass smileysTests(unittest.TestCase):\n\tdef setUp(self):\n\t\tself.smileys = u\":-) :) :o) :D :] :3 :c) :> =] 8) =) C: :-D :D 8D XD =D =3 <=3 <=8 <=3 <=8 8===D ( o )( o ) :-( :( :c :< :[ D: D8 D; D= DX v.v :-9 ;-) ;) *) ;] ;D :-P :P XP :-p :p =p :-Þ :Þ :-b :b :-O :O O_O o_o 8O OwO O-O 0_o O_o O3O o0o ;o_o; o...o 0w0 :-/ :/ :\\ =/ =\\ :S :| d:-) qB-) :)~ :-X :X :-# :# O:-) 0:3 O:) :'( ;*( T_T TT_TT T.T :-* :* >:) >;) B) B-) 8) 8-) ^>.>^ ^<.<^ ^>_>^ ^<_<^ <3 <333 =^_^= =>.>= =<_<= =>.<= \\,,/ \\m/ \\m/\\>.</\\m/ \\o/ \\o o/ o/\\o :& :u @}-;-'--- 8€ (_!_) ') \\\"_\\\"\".split(\" \")\n\t\tself.val = SmileysValidation()\n\t\tpass\n\tdef test(self):\n\t\tfor smiley in self.smileys:\n\t\t\tisValid = self.val.isValid(smiley)\n\t\t\tif not isValid: print smiley\n\t\t\tself.assertTrue(isValid)\n\nif __name__ == '__main__':\n\tunittest.main()" }, { "alpha_fraction": 0.6579925417900085, "alphanum_fraction": 0.6579925417900085, "avg_line_length": 23.272727966308594, "blob_id": "7ce38a7dfa81ae002fb932f5e0b2910cf9ef933f", "content_id": "93a537573505489c57d2f8f4ed3ba8e7b15e7467", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Markdown", "length_bytes": 269, "license_type": "no_license", "max_line_length": 67, "num_lines": 11, "path": "/README.md", "repo_name": "wallymathieu/smileynoise", "src_encoding": "UTF-8", "text": "Smileynoise\n===========\nYou can view this app on [appspot](http://smileynoise.appspot.com/)\n\nWhere is the code from?\n-----------------------\n\nThe code is based on \nhttp://code.google.com/appengine/\n\nThe urlencode from django is used in order to support unicode text.\n\n\n" }, { "alpha_fraction": 0.7112597823143005, "alphanum_fraction": 0.7157190442085266, "avg_line_length": 24.628570556640625, "blob_id": "c7fb418708f7661ba8ed3d990ecd854ab1767cb3", "content_id": "92311d57bc1404045b3371d3fa84c55c224a7212", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 897, "license_type": "no_license", "max_line_length": 62, "num_lines": 35, "path": "/main.py", "repo_name": "wallymathieu/smileynoise", "src_encoding": "UTF-8", "text": "from google.appengine.ext import db\nfrom google.appengine.api import users\nfrom model import *\nimport wsgiref.handlers\nimport os\nimport jinja2\nfrom google.appengine.ext.webapp import template\n\nfrom google.appengine.ext import webapp\nJENV = jinja2.Environment(\n loader=jinja2.FileSystemLoader(os.path.dirname(__file__)),\n extensions=['jinja2.ext.autoescape'],\n autoescape=True)\n\nclass MainHandler(webapp.RequestHandler):\n\tdef get(self):\n\t\tdata = db.GqlQuery(\"\"\"SELECT * \n\t\t\t\t\t\tFROM Message\n\t\t\t\t\t\tORDER BY updated DESC \"\"\")\n\t\ttxt = []\n\t\tfor item in data:\n\t\t\ttxt.append( Message.toDictionary(item) )\n\t\ttemplate = JENV.get_template('views/main/index.html')\n\t\tself.response.write(template.render({'messages':txt}))\n\n\ndef main():\n\tapplication = webapp.WSGIApplication(\n\t\t[('/', MainHandler),\\\n\t\t],debug=True)\n\twsgiref.handlers.CGIHandler().run(application)\n\n\nif __name__ == '__main__':\n\tmain()\n" }, { "alpha_fraction": 0.7409732937812805, "alphanum_fraction": 0.7409732937812805, "avg_line_length": 32.52631759643555, "blob_id": "fa00a29cee3193a702cd02aa540e7a1017a16e13", "content_id": "cfab6a257d159ae5b02611ca6e384a9dc4367b94", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 637, "license_type": "no_license", "max_line_length": 57, "num_lines": 19, "path": "/model.py", "repo_name": "wallymathieu/smileynoise", "src_encoding": "UTF-8", "text": "import datetime\nfrom google.appengine.ext import db\nfrom google.appengine.api import users\nfrom urlhelper import urlencode,urldecode\n\nclass Message(db.Model):\n\tvalue = db.StringProperty(multiline=False,required=True)\n\twriter = db.UserProperty(required=True)\n\tcreated = db.DateTimeProperty(auto_now_add=True)\n\tupdated = db.DateTimeProperty(auto_now=True)\n\tdef __str__(self):\n\t\treturn self.value\n\tdef assert_access(self):\n\t\tif self.writer != users.get_current_user():\n\t\t\traise 'no access'\n\tdef toDictionary(item):\n\t\treturn {'id':item.key().id(),'value':item.value,\\\n\t\t\t'writer':item.writer.nickname(),\\\n\t\t\t'urlid': urlencode(item.value) }\n" }, { "alpha_fraction": 0.6819161772727966, "alphanum_fraction": 0.6838323473930359, "avg_line_length": 29.253623962402344, "blob_id": "386e567f98ecf12ef30fee74784c5c8d82154fa7", "content_id": "ebe161b142bd42a00677d2e130b904ed48fd0ffd", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 4175, "license_type": "no_license", "max_line_length": 103, "num_lines": 138, "path": "/message.py", "repo_name": "wallymathieu/smileynoise", "src_encoding": "UTF-8", "text": "import datetime\nfrom google.appengine.ext import db\nfrom google.appengine.api import users\nfrom google.appengine.ext.webapp import template\nimport cgi\nimport wsgiref.handlers\nfrom google.appengine.ext import webapp\nimport os\nimport jinja2\n\nfrom model import *\nfrom smileys import SmileysValidation\nfrom urlhelper import urlencode,urldecode\nimport urllib\nJENV = jinja2.Environment(\n loader=jinja2.FileSystemLoader(os.path.dirname(__file__)),\n extensions=['jinja2.ext.autoescape'],\n autoescape=True)\n\ndef str_to_long(_id):\n\tid = int(_id) if _id != '' else None\n\treturn id\n\nclass ViewMessage(webapp.RequestHandler):\n\tdef get(self,value):\n\t\tids=None\n\t\ttry:\n\t\t\tids=int(value)\n\t\texcept ValueError:\n\t\t\tpass\n\t\tif ids:#support both int id and url encoded smiley\n\t\t\titem = Message.get_by_id(ids=ids,parent=None)\n\t\telse:\n\t\t\titem = None\n\t\t\tfor data in db.GqlQuery(\"\"\"SELECT * \n\t\t\t\t\t\t\t\tFROM Message WHERE value = :value\n\t\t\t\t\t\t\t\tORDER BY updated DESC \"\"\",value=urldecode(value)):\n\t\t\t\tif not item : \n\t\t\t\t\titem = data \n\t\t\t\t\titem.writers =[]\n\t\t\t\titem.writers.append(\"%s at %s\" % (item.writer,item.updated.date()))\n\n\t\tif item:\n\t\t\ttemplate = JENV.get_template('views/message/view.html')\n\n\t\t\tself.response.write(template.render(\\\n\t\t\t\t{ 'data':item,\\\n\t\t\t\t 'url': urllib.quote_plus(\"http://smileynoise.appspot.com/view/\"+str(item.key().id())),\\\n\t\t\t\t 'urlid': urlencode(item.value)\\\n\t\t\t\t}))\n\t\telse:\n\t\t\tmissing_view = 'views/message/missing_id.html' if ids else 'views/message/missing_smiley.html'\n\t\t\ttemplate = JENV.get_template(missing_view)\n\t\t\tself.error(404)\n\t\t\tself.response.write(template.render(\\\n\t\t\t\t{ 'urlid': value,\\\n\t\t\t\t 'id': urldecode(value)\\\n\t\t\t\t}))\n\nclass EditMessageForm(webapp.RequestHandler):\n\tType = Message\n\tHome = '/message/'\n\tdef assert_type(self,id,tp):\n\t\tif tp== 'create' and id != None :\n\t\t\traise Exception('New messages should not have an id')\n\t\tif tp== 'edit' and id == None :\n\t\t\traise Exception('When editing messages you must have an id')\n\n\tdef get(self,pagetype,id):\n\t\tid = str_to_long( id)\n\t\tself.assert_type(id,pagetype)\n\t\titem = self.Type.get_by_id(ids=id,parent=None) if id else {'id':\"\",'value':self.request.get('value')}\n\t\tif id: item.assert_access()\n\t\ttemplate = JENV.get_template('views/message/edit.html')\n\t\tself.response.write(template.render({'data':item,'type':pagetype}))\n\tdef post(self,pagetype,id):\n\t\tid = str_to_long( id)\n\t\tself.assert_type(id,pagetype)\n\t\terrors = []\n\t\tvalue=self.request.get('value')\n\t\tif not SmileysValidation().isValid(value):\n\t\t\terrors.append( \"Not a smiley!\")\n\t\t#else:\n\t\tif id :\n\t\t\titem = self.Type.get_by_id(ids=id,parent=None)\n\t\t\titem.assert_access()\n\t\t\titem.value = value\n\t\telse:\n\t\t\titem = Message(value=value,writer=users.get_current_user())\n\t\t\titem.writer = users.get_current_user()\n\t\t\n\t\tif len(errors) == 0:\n\t\t\t# Save the data, and redirect to the view page\n\t\t\titem.put();\n\t\t\tself.redirect(self.Home)\n\t\telse:\n\t\t\t# Reprint the form\n\t\t\ttemplate = JENV.get_template('views/message/edit.html')\n\t\t\tself.response.write(template.render({'errors':errors, 'data':item,'type':pagetype}))\n\nclass ListForm(webapp.RequestHandler):\n\tdef get(self):\n\t\tdata = db.GqlQuery(\"\"\"SELECT * \n\t\t\t\t\t\t\tFROM Message WHERE writer=:writer\n\t\t\t\t\t\t\tORDER BY updated DESC \"\"\", writer=users.get_current_user())\n\t\ttxt = []\n\t\tfor item in data:\n\t\t\ttxt.append(Message.toDictionary(item))\n\t\ttemplate = JENV.get_template('views/message/list.html')\n\t\tself.response.write(template.render({'messages':txt}))\n\nclass ConfirmDelete(webapp.RequestHandler):\n\tdef get(self,id):\n\t\titem = Message.get_by_id(ids=int(id),parent=None) \n\t\titem.assert_access()\n\t\ttemplate = JENV.get_template('views/message/confirmdelete.html')\n\t\tself.response.write(template.render({ 'data':item}))\n\tdef post(self,id):\n\t\titem = Message.get_by_id(ids=int(id),parent=None) \n\t\titem.assert_access()\n\t\titem.delete()\n\t\tself.redirect(\"/message/\")\n\napplication = webapp.WSGIApplication([\n\n('/message/(create|edit)/(.*)', EditMessageForm),\n('/message/confirmdelete/(.*)', ConfirmDelete),\n('/view/(.*)', ViewMessage),\n('/message/view/(.*)', ViewMessage),\n('/message/.*', ListForm),\n\n], debug=True)\n\ndef main():\n\twsgiref.handlers.CGIHandler().run(application)\n\nif __name__ == '__main__':\n\tmain()\n" }, { "alpha_fraction": 0.6060011386871338, "alphanum_fraction": 0.6152551770210266, "avg_line_length": 34.65999984741211, "blob_id": "32f9b9800143fadc455111bce79ddef0d187b343", "content_id": "492ec83f3775d4a15c5a969d8473a71f7d67fb62", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 3582, "license_type": "no_license", "max_line_length": 78, "num_lines": 100, "path": "/urlhelper.py", "repo_name": "wallymathieu/smileynoise", "src_encoding": "UTF-8", "text": "#!/usr/bin/env python\n# encoding: utf-8\n\"\"\"\nurlhelper.py\n\nCreated by Oskar Mathieu Gewalli on 2010-04-29.\nCopyright (c) 2010 Gewalli. All rights reserved.\n\"\"\"\n\nimport sys\nimport os\nimport unittest\nimport urllib\n\ndef urlencode(val):\n\treturn force_unicode(urllib.quote_plus(smart_str(val)))\ndef urldecode(val):\n\treturn force_unicode(urllib.unquote_plus(val.encode(\"utf-8\")))\n#django\ndef force_unicode(s, encoding='utf-8', strings_only=False, errors='strict'):\n \"\"\"\n Similar to smart_unicode, except that lazy instances are resolved to\n strings, rather than kept as lazy objects.\n\n If strings_only is True, don't convert (some) non-string-like objects.\n \"\"\"\n if strings_only and is_protected_type(s):\n return s\n if not isinstance(s, basestring,):\n if hasattr(s, '__unicode__'):\n s = unicode(s)\n else:\n try:\n s = unicode(str(s), encoding, errors)\n except UnicodeEncodeError:\n if not isinstance(s, Exception):\n raise\n # If we get to here, the caller has passed in an Exception\n # subclass populated with non-ASCII data without special\n # handling to display as a string. We need to handle this\n # without raising a further exception. We do an\n # approximation to what the Exception's standard str()\n # output should be.\n s = ' '.join([force_unicode(arg, encoding, strings_only,\n errors) for arg in s])\n elif not isinstance(s, unicode):\n # Note: We use .decode() here, instead of unicode(s, encoding,\n # errors), so that if s is a SafeString, it ends up being a\n # SafeUnicode at the end.\n s = s.decode(encoding, errors)\n return s\n\n#django\ndef smart_str(s, encoding='utf-8', strings_only=False, errors='strict'):\n \"\"\"\n Returns a bytestring version of 's', encoded as specified in 'encoding'.\n\n If strings_only is True, don't convert (some) non-string-like objects.\n \"\"\"\n if strings_only and isinstance(s, (types.NoneType, int)):\n return s\n #if isinstance(s, Promise):\n # return unicode(s).encode(encoding, errors)\n elif not isinstance(s, basestring):\n try:\n return str(s)\n except UnicodeEncodeError:\n if isinstance(s, Exception):\n # An Exception subclass containing non-ASCII data that doesn't\n # know how to print itself properly. We shouldn't raise a\n # further exception.\n return ' '.join([smart_str(arg, encoding, strings_only,\n errors) for arg in s])\n return unicode(s).encode(encoding, errors)\n elif isinstance(s, unicode):\n return s.encode(encoding, errors)\n elif s and encoding != 'utf-8':\n return s.decode('utf-8', errors).encode(encoding, errors)\n else:\n return s\n\nclass urlhelperTests(unittest.TestCase):\n\tdef setUp(self):\n\t\tpass\n\tdef test(self):\n\t\tprint urlencode(u\"€\")\n\t\tprint urllib.quote_plus(u\"€\".encode('utf-8'))\n\t\tself.assertEqual(u\"€\",urldecode(urlencode(u\"€\")));\n\tdef testOfNonUnicode(self):\n\t\tself.assertEqual(u\"€\",urldecode(urlencode(\"€\")));\n\tdef testOfVal(self):\n\t\tself.assertEqual(u\":€\",urldecode(\"%3A%E2%82%AC\"));\n\t\tself.assertEqual(u\":€\",urldecode(u\"%3A%E2%82%AC\"));\n\tdef testOfHeadbangingSmiley(self):\n\t\tencoded = \"%5Cm%2F%5C>.<%2F%5Cm%2F\"\n\t\tunencoded = \"\\m/\\>.</\\m/\"\n\t\tprint unencoded\n\t\tself.assertEqual(unencoded,urldecode(encoded));\nif __name__ == '__main__':\n\tunittest.main()\n" } ]
6
paulwicking/inventing-games-with-pygame
https://github.com/paulwicking/inventing-games-with-pygame
8cec12987b0e227ac43488fcb39510ad5db9ade3
ed1b6254051e08d13e581c76e79999d4c0de5745
70962d15574104e27efae42fd63a442ac581f102
refs/heads/master
2021-08-18T18:55:15.842816
2017-11-23T15:38:31
2017-11-23T15:38:31
null
0
0
null
null
null
null
null
[ { "alpha_fraction": 0.5139372944831848, "alphanum_fraction": 0.6533101201057434, "avg_line_length": 26.975608825683594, "blob_id": "23ee3658e10379e7fc758b1a8ef30b04ba080e9d", "content_id": "3e40cd46ffe1d0e9d952aeb31c54c6eee75d16cb", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 1148, "license_type": "no_license", "max_line_length": 99, "num_lines": 41, "path": "/drawing/drawing.py", "repo_name": "paulwicking/inventing-games-with-pygame", "src_encoding": "UTF-8", "text": "import pygame\nimport sys\n\nfrom pygame.locals import *\n\n\npygame.init()\n\nDISPLAYSURFACE = pygame.display.set_mode((500, 400), 0, 32)\npygame.display.set_caption('Drawing')\n\nBLACK = (0, 0, 0)\nWHITE = (255, 255, 255)\nRED = (255, 0, 0)\nGREEN = (0, 255, 0)\nBLUE = (0, 0, 255)\nRED_ALPHA = (255, 0, 0, 20)\n\nDISPLAYSURFACE.fill(WHITE)\npygame.draw.polygon(DISPLAYSURFACE, GREEN, ((146, 0), (291, 106), (236, 277), (56, 277), (0, 106)))\npygame.draw.line(DISPLAYSURFACE, BLUE, (60, 60), (120, 60), 4)\npygame.draw.line(DISPLAYSURFACE, BLUE, (120, 60), (60, 120))\npygame.draw.line(DISPLAYSURFACE, BLUE, (60, 120), (120, 120), 4)\npygame.draw.circle(DISPLAYSURFACE, BLUE, (300, 50), 20, 0)\npygame.draw.ellipse(DISPLAYSURFACE, RED, (300, 250, 40, 80), 1)\npygame.draw.rect(DISPLAYSURFACE, RED_ALPHA, (200, 150, 100, 50))\n\npix_obj = pygame.PixelArray(DISPLAYSURFACE)\npix_obj[480][380] = BLACK\npix_obj[482][382] = BLACK\npix_obj[484][384] = BLACK\npix_obj[486][386] = BLACK\npix_obj[488][388] = BLACK\ndel pix_obj\n\nwhile True:\n for event in pygame.event.get():\n if event.type == QUIT:\n pygame.quit()\n sys.exit()\n pygame.display.update()\n\n" }, { "alpha_fraction": 0.6239193081855774, "alphanum_fraction": 0.6714697480201721, "avg_line_length": 24.703702926635742, "blob_id": "06e3918e0efcc8c823dc22861078d04f487ba368", "content_id": "7fd41fb7a2473a592e80c6d021a6d39249b3da20", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 694, "license_type": "no_license", "max_line_length": 81, "num_lines": 27, "path": "/font_text/fonttext.py", "repo_name": "paulwicking/inventing-games-with-pygame", "src_encoding": "UTF-8", "text": "import pygame\nimport sys\nfrom pygame.locals import *\n\n\npygame.init()\nDISPLAY_SURFACE = pygame.display.set_mode((400, 300))\npygame.display.set_caption('Hello font world!')\n\nWHITE = (255, 255, 255)\nGREEN = (0, 255, 0)\nBLUE = (0, 0, 128)\n\nfont_object = pygame.font.Font('freesansbold.ttf', 32)\ntext_surface_object = font_object.render('Hello, font world!', True, GREEN, BLUE)\ntext_rect = text_surface_object.get_rect()\ntext_rect.center = (200, 150)\n\nwhile True:\n DISPLAY_SURFACE.fill(WHITE)\n DISPLAY_SURFACE.blit(text_surface_object, text_rect)\n\n for event in pygame.event.get():\n if event.type == QUIT:\n pygame.quit()\n sys.exit()\n pygame.display.update()\n" }, { "alpha_fraction": 0.8409090638160706, "alphanum_fraction": 0.8409090638160706, "avg_line_length": 43, "blob_id": "fd37f6081080e592e6f54badca87b9de55c5513c", "content_id": "937bb214d6d295d7a20e01a1b042a62245b50ddd", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Markdown", "length_bytes": 44, "license_type": "no_license", "max_line_length": 43, "num_lines": 1, "path": "/README.md", "repo_name": "paulwicking/inventing-games-with-pygame", "src_encoding": "UTF-8", "text": "http://inventwithpython.com/makinggames.pdf\n" }, { "alpha_fraction": 0.5748980045318604, "alphanum_fraction": 0.5917189121246338, "avg_line_length": 32.158416748046875, "blob_id": "072e7c79b756dffe1c2a149ff8a2b14aeea1b9cc", "content_id": "6084edd718c41785f767826d5f0be92a2dc8be21", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 10047, "license_type": "no_license", "max_line_length": 120, "num_lines": 303, "path": "/memory_puzzle/memory_puzzle.py", "repo_name": "paulwicking/inventing-games-with-pygame", "src_encoding": "UTF-8", "text": "\"\"\"\nMemory puzzle.\nhttp://inventwithpython.com/pygame\n\"\"\"\nimport pygame\nimport random\nimport sys\nfrom pygame.locals import *\n\nFPS = 30\nWINDOW_WIDTH = 640\nWINDOW_HEIGHT = 480\nREVEAL_SPEED = 8\nBOX_SIZE = 40\nGAP_SIZE = 10\nBOARD_WIDTH = 10\nBOARD_HEIGHT = 7\n\nassert (BOARD_WIDTH * BOARD_HEIGHT) % 2 == 0, 'Board needs to have an even number of boxes for pairs of matches.'\n\nX_MARGIN = int((WINDOW_WIDTH - (BOARD_WIDTH * (BOX_SIZE + GAP_SIZE))) / 2)\nY_MARGIN = int((WINDOW_HEIGHT - (BOARD_HEIGHT * (BOX_SIZE + GAP_SIZE))) / 2)\n\nGRAY = (100, 100, 100)\nNAVY_BLUE = (60, 60, 100)\nWHITE = (255, 255, 255)\nRED = (255, 0, 0)\nGREEN = (0, 255, 0)\nBLUE = (0, 0, 255)\nYELLOW = (255, 255, 0)\nORANGE = (255, 128, 0)\nPURPLE = (255, 0, 255)\nCYAN = (0, 255, 255)\n\nBG_COLOR = NAVY_BLUE\nLIGHT_BG_COLOR = GRAY\nBOX_COLOR = WHITE\nHIGHLIGHT_COLOR = BLUE\n\nDONUT = 'donut'\nSQUARE = 'square'\nDIAMOND = 'diamond'\nLINES = 'lines'\nOVAL = 'oval'\n\nALL_COLORS = (RED, GREEN, BLUE, YELLOW, ORANGE, PURPLE, CYAN)\nALL_SHAPES = (DONUT, SQUARE, DIAMOND, LINES, OVAL)\n\nassert len(ALL_COLORS) * len(ALL_SHAPES) * 2 >= BOARD_WIDTH * BOARD_HEIGHT, \\\n \"Board is too big for the number of shapes/colors.\"\n\n\ndef main():\n global FPS_CLOCK, DISPLAY_SURFACE\n pygame.init()\n FPS_CLOCK = pygame.time.Clock()\n DISPLAY_SURFACE = pygame.display.set_mode((WINDOW_WIDTH, WINDOW_HEIGHT))\n\n mouse_x = 0\n mouse_y = 0\n pygame.display.set_caption('Memory Game')\n\n game_board = get_randomized_board()\n revealed_boxes = generate_revealed_boxes_data(False)\n\n first_selection = None\n\n DISPLAY_SURFACE.fill(BG_COLOR)\n start_game_animation(game_board)\n\n while True:\n mouse_clicked = False\n\n DISPLAY_SURFACE.fill(BG_COLOR)\n draw_board(game_board, revealed_boxes)\n\n for event in pygame.event.get():\n if event.type == QUIT or (event.type == KEYUP and event.key == K_ESCAPE):\n pygame.quit()\n sys.exit()\n elif event.type == MOUSEMOTION:\n mouse_x, mouse_y = event.pos\n elif event.type == MOUSEBUTTONUP:\n mouse_x, mouse_y = event.pos\n mouse_clicked = True\n\n box_x, box_y = get_box_at_pixel(mouse_x, mouse_y)\n if box_x is not None and box_y is not None: # The mouse is over a box\n if not revealed_boxes[box_x][box_y]:\n draw_highlight_box(box_x, box_y)\n if not revealed_boxes[box_x][box_y] and mouse_clicked:\n reveal_boxes_animation(game_board, [(box_x, box_y)])\n revealed_boxes[box_x][box_y] = True # set the box as revealed\n\n if first_selection is None:\n first_selection = (box_x, box_y)\n else:\n icon1shape, icon1color = get_shape_and_color(game_board, first_selection[0], first_selection[1])\n icon2shape, icon2color = get_shape_and_color(game_board, box_x, box_y)\n\n if icon1shape != icon2shape or icon1color != icon2color:\n pygame.time.wait(1000) # milliseconds\n cover_boxes_animation(game_board, [(first_selection[0], first_selection[1]), (box_x, box_y)])\n revealed_boxes[first_selection[0]][first_selection[1]] = False\n revealed_boxes[box_x][box_y] = False\n elif has_won(revealed_boxes):\n game_won_animation(game_board)\n pygame.time.wait(2000)\n\n game_board = get_randomized_board()\n revealed_boxes = generate_revealed_boxes_data(False)\n\n draw_board(game_board, revealed_boxes)\n pygame.display.update()\n pygame.time.wait(1000)\n\n start_game_animation(game_board)\n first_selection = None\n\n pygame.display.update()\n FPS_CLOCK.tick(FPS)\n\n\ndef generate_revealed_boxes_data(value):\n revealed_boxes = []\n for i in range(BOARD_WIDTH):\n revealed_boxes.append([value] * BOARD_HEIGHT)\n return revealed_boxes\n\n\ndef get_randomized_board():\n \"\"\"Get a list of every possible shape in every possible color.\"\"\"\n icons = []\n for color in ALL_COLORS:\n for shape in ALL_SHAPES:\n icons.append((shape, color))\n\n random.shuffle(icons)\n number_of_icons_used = int(BOARD_WIDTH * BOARD_HEIGHT / 2)\n icons = icons[:number_of_icons_used] * 2\n random.shuffle(icons)\n\n board = []\n for x in range(BOARD_WIDTH):\n column = []\n for y in range(BOARD_HEIGHT):\n column.append(icons[0])\n del icons[0]\n board.append(column)\n\n return board\n\n\ndef split_into_groups_of(group_size, the_list):\n \"\"\"Splits a list into a list of lists, where the inner list have at most group_size number of items.\"\"\"\n result = []\n for i in range(0, len(the_list), group_size):\n result.append(the_list[i:i + group_size])\n\n return result\n\n\ndef left_top_coords_of_box(box_x, box_y):\n \"\"\"Convert board coordinates to pixel coordinates.\"\"\"\n left = box_x * (BOX_SIZE + GAP_SIZE) + X_MARGIN\n top = box_y * (BOX_SIZE + GAP_SIZE) + Y_MARGIN\n\n return left, top\n\n\ndef get_box_at_pixel(x, y):\n for box_x in range(BOARD_WIDTH):\n for box_y in range(BOARD_HEIGHT):\n left, top = left_top_coords_of_box(box_x, box_y)\n box_rect = pygame.Rect(left, top, BOX_SIZE, BOX_SIZE)\n if box_rect.collidepoint(x, y):\n return box_x, box_y\n\n return None, None\n\n\ndef draw_icon(shape, color, box_x, box_y):\n quarter = int(BOX_SIZE * 0.25)\n half = int(BOX_SIZE * 0.5)\n\n left, top = left_top_coords_of_box(box_x, box_y)\n\n if shape == DONUT:\n pygame.draw.circle(DISPLAY_SURFACE, color, (left + half, top + half), half - 5)\n pygame.draw.circle(DISPLAY_SURFACE, BG_COLOR, (left + half, top + half), quarter - 5)\n elif shape == SQUARE:\n pygame.draw.rect(DISPLAY_SURFACE, color, (left + quarter, top + quarter, BOX_SIZE - half, BOX_SIZE - half))\n elif shape == DIAMOND:\n pygame.draw.polygon(\n DISPLAY_SURFACE,\n color,\n (\n (left + half, top),\n (left + BOX_SIZE - 1, top + half),\n (left + half, top + BOX_SIZE - 1),\n (left, top + half)\n ))\n elif shape == LINES:\n for i in range(0, BOX_SIZE, 4):\n pygame.draw.line(DISPLAY_SURFACE, color, (left, top + i), (left + i, top))\n pygame.draw.line(DISPLAY_SURFACE, color, (left + i, top + BOX_SIZE - 1), (left + BOX_SIZE - 1, top + i))\n elif shape == OVAL:\n pygame.draw.ellipse(DISPLAY_SURFACE, color, (left, top + quarter, BOX_SIZE, half))\n\n\ndef get_shape_and_color(board, box_x, box_y):\n \"\"\"\n Shape value for x, y spot is stored in board[x][y][0].\n Color value for x, y spot is stored in board[x][y][1].\n \"\"\"\n return board[box_x][box_y][0], board[box_x][box_y][1]\n\n\ndef draw_box_covers(board, boxes, coverage):\n \"\"\"Draws boxes being covered/revealed. \"boxes\" is a list of two-item lists, which have the x & y spot of the box.\"\"\"\n for box in boxes:\n left, top = left_top_coords_of_box(box[0], box[1])\n pygame.draw.rect(DISPLAY_SURFACE, BG_COLOR, (left, top, BOX_SIZE, BOX_SIZE))\n shape, color = get_shape_and_color(board, box[0], box[1])\n draw_icon(shape, color, box[0], box[1])\n if coverage > 0: # only draw the cover if there is a coverage\n pygame.draw.rect(DISPLAY_SURFACE, BOX_COLOR, (left, top, coverage, BOX_SIZE))\n\n pygame.display.update()\n FPS_CLOCK.tick(FPS)\n\n\ndef reveal_boxes_animation(board, boxes_to_reveal):\n \"\"\"Do the \"box reveal\" animation.\"\"\"\n for coverage in range(BOX_SIZE, (-REVEAL_SPEED) - 1, - REVEAL_SPEED):\n draw_box_covers(board, boxes_to_reveal, coverage)\n\n\ndef cover_boxes_animation(board, boxes_to_cover):\n \"\"\"Do the \"box cover\" animation.\"\"\"\n for coverage in range(0, BOX_SIZE + REVEAL_SPEED, REVEAL_SPEED):\n draw_box_covers(board, boxes_to_cover, coverage)\n\n\ndef draw_board(board, revealed):\n \"\"\"Draws all the boxes in their covered or revealed state.\"\"\"\n for box_x in range(BOARD_WIDTH):\n for box_y in range(BOARD_HEIGHT):\n left, top = left_top_coords_of_box(box_x, box_y)\n if not revealed[box_x][box_y]:\n # draw a covered box\n pygame.draw.rect(DISPLAY_SURFACE, BOX_COLOR, (left, top, BOX_SIZE, BOX_SIZE))\n else:\n shape, color = get_shape_and_color(board, box_x, box_y)\n draw_icon(shape, color, box_x, box_y)\n\n\ndef draw_highlight_box(box_x, box_y):\n left, top = left_top_coords_of_box(box_x, box_y)\n pygame.draw.rect(DISPLAY_SURFACE, HIGHLIGHT_COLOR, (left - 5, top - 5, BOX_SIZE + 10, BOX_SIZE + 10), 4)\n\n\ndef start_game_animation(board):\n \"\"\"Randomly reveal the boxes 8 at a time.\"\"\"\n covered_boxes = generate_revealed_boxes_data(False)\n boxes = []\n for x in range(BOARD_WIDTH):\n for y in range(BOARD_HEIGHT):\n boxes.append((x, y))\n random.shuffle(boxes)\n box_groups = split_into_groups_of(8, boxes)\n\n draw_board(board, covered_boxes)\n for box_group in box_groups:\n reveal_boxes_animation(board, box_group)\n cover_boxes_animation(board, box_group)\n\n\ndef game_won_animation(board):\n \"\"\"Flash the background color when the player has won.\"\"\"\n covered_boxes = generate_revealed_boxes_data(True)\n color1 = LIGHT_BG_COLOR\n color2 = BG_COLOR\n\n for i in range(13):\n color1, color2 = color2, color1\n DISPLAY_SURFACE.fill(color1)\n draw_board(board, covered_boxes)\n pygame.display.update()\n pygame.time.wait(300)\n\n\ndef has_won(revealed_boxes):\n \"\"\"Returns True if all the boxes have been revealed, otherwise False.\"\"\"\n for i in revealed_boxes:\n if False in i:\n return False\n\n return True\n\n\nif __name__ == '__main__':\n main()\n" }, { "alpha_fraction": 0.512922465801239, "alphanum_fraction": 0.5506958365440369, "avg_line_length": 19.95833396911621, "blob_id": "05d198447dbe6f9046ad2a93b9b4675b3f09c0e2", "content_id": "44e33ac9d5d3f79329a64bf0854d1575370f98bc", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 1006, "license_type": "no_license", "max_line_length": 59, "num_lines": 48, "path": "/cat_animation/cat_animation.py", "repo_name": "paulwicking/inventing-games-with-pygame", "src_encoding": "UTF-8", "text": "import pygame\nimport sys\nfrom pygame.locals import *\n\n\npygame.init()\n\nFPS = 60\nfps_clock = pygame.time.Clock()\n\nDISPLAYSURFACE = pygame.display.set_mode((400, 300), 0, 32)\npygame.display.set_caption('Cat Animation')\n\nWHITE = (255, 255, 255)\ncat_image = pygame.image.load('cat.png')\ncat_x = 10\ncat_y = 10\ndirection = 'right'\n\nwhile True:\n DISPLAYSURFACE.fill(WHITE)\n\n if direction == 'right':\n cat_x += 5\n if cat_x == 280:\n direction = 'down'\n elif direction == 'down':\n cat_y += 5\n if cat_y == 220:\n direction = 'left'\n elif direction == 'left':\n cat_x -= 5\n if cat_x == 10:\n direction = 'up'\n elif direction == 'up':\n cat_y -= 5\n if cat_y == 10:\n direction = 'right'\n\n DISPLAYSURFACE.blit(cat_image, (cat_x, cat_y))\n\n for event in pygame.event.get():\n if event.type == QUIT:\n pygame.quit()\n sys.exit()\n\n pygame.display.update()\n fps_clock.tick(FPS)\n" }, { "alpha_fraction": 0.6426331996917725, "alphanum_fraction": 0.653082549571991, "avg_line_length": 28.875, "blob_id": "4054274afbac5dd01bc33b8d4f6afdd44698731f", "content_id": "12d95f010864059b2a6029b45a48b430bf577f4c", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 957, "license_type": "no_license", "max_line_length": 89, "num_lines": 32, "path": "/guess_the_number/guess_the_number.py", "repo_name": "paulwicking/inventing-games-with-pygame", "src_encoding": "UTF-8", "text": "import random\n\nMAX_VALUE = 20\nMAX_GUESSES = 6\nwin_condition = False\nnumber_of_guesses = 0\n\nnumber_to_guess = random.randint(1, 20)\n\nname = input('Hello! What is your name?\\n')\nprint(f'Well, {name}, I am thinking of a number between 1 and {MAX_VALUE}.')\n\nwhile not win_condition and number_of_guesses < MAX_GUESSES: # using < to mimic range(6)\n try:\n current_guess = int(input('Take a guess.\\n'))\n except ValueError:\n print('Please give me a number I can understand?')\n continue\n number_of_guesses += 1\n\n if current_guess == number_to_guess:\n win_condition = True\n break\n elif current_guess > number_to_guess:\n print('Your guess is too high.')\n elif current_guess < number_to_guess:\n print('Your guess is too low.')\n\nif win_condition:\n print(f'Good job, {name}! You guessed my number in {number_of_guesses} guesses!')\nelse:\n print(f'Too bad! I was thinking about {number_to_guess}')\n\n" } ]
6
Helloezzi/python_fileio
https://github.com/Helloezzi/python_fileio
0d888ab8bc715282f73b2deae0ffac57acda5d55
86bb2d1173c5598df0e1d6816f353bd3b860a88d
25043d256c2d6eceb1c83c2e158ce4f9d25fd08f
refs/heads/master
2021-01-13T23:35:54.816172
2020-02-23T15:12:07
2020-02-23T15:12:07
242,530,159
0
0
null
null
null
null
null
[ { "alpha_fraction": 0.7526881694793701, "alphanum_fraction": 0.7526881694793701, "avg_line_length": 19.77777862548828, "blob_id": "447416b64f70155bc6e1590367360e26c9fa758f", "content_id": "76e8b0f972a16067213887f52eba6e7179fb7ced", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 186, "license_type": "no_license", "max_line_length": 61, "num_lines": 9, "path": "/main.py", "repo_name": "Helloezzi/python_fileio", "src_encoding": "UTF-8", "text": "from fileManager import *\n\ninstance = FileManager()\n\nstr = input()\n\ninstance.WriteFile(\"C:/Dev/Python/filemanager/test.txt\", str)\n\ninstance.OpenFile(\"C:/Dev/Python/filemanager/test.txt\")" }, { "alpha_fraction": 0.4801587164402008, "alphanum_fraction": 0.4801587164402008, "avg_line_length": 22, "blob_id": "9725b82122fc10a24e5acd16296cb111cd66829d", "content_id": "d2a3d4a1a9385c608528134c2765689a6121ef9e", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 252, "license_type": "no_license", "max_line_length": 35, "num_lines": 11, "path": "/fileManager.py", "repo_name": "Helloezzi/python_fileio", "src_encoding": "UTF-8", "text": "class FileManager:\n def WriteFile(self, path, str):\n f = open(path, 'w')\n f.write(str)\n f.close()\n\n def OpenFile(self, path):\n f = open(path, 'r')\n data = f.read()\n print(\"data:\" + data)\n f.close()" } ]
2
return1/return1.at
https://github.com/return1/return1.at
2b20d98c8196869005c4ba5dd9f52faab5fd58e1
35c30b2b1026b1d7f1a78f120c3c90f842ef30b2
1ccf12205d43b411358291f2724ca01aba050535
refs/heads/main
2022-10-08T10:00:50.094193
2022-09-28T12:02:42
2022-09-28T12:02:42
5,780,989
0
0
null
2012-09-12T14:31:04
2012-09-24T08:38:08
2012-09-24T08:38:08
JavaScript
[ { "alpha_fraction": 0.6952662467956543, "alphanum_fraction": 0.7204142212867737, "avg_line_length": 36.55555725097656, "blob_id": "e8a085056b03b61f8c36c5ff805ef927db757e37", "content_id": "f334fb24293a01baaae9a15162d03c72730241f4", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "reStructuredText", "length_bytes": 676, "license_type": "no_license", "max_line_length": 83, "num_lines": 18, "path": "/content/2011-11-24-how-to-shrink-vmware-fusion-virtual-disks.rst", "repo_name": "return1/return1.at", "src_encoding": "UTF-8", "text": "How to shrink VMware Fusion virtual disks\n#########################################\n:date: 2011-11-24 09:32\n:category: Server\n:tags: virtual disk, vmware\n:slug: how-to-shrink-vmware-fusion-virtual-disks\n\nVMWare has an included tool (vmware-vdiskmanager) for shrinking virtual\ndisks. But before performing the shrink you need to zero out the unused\ndisk space first. So start your VM and do a\n\n``dd if=/dev/zero of=/empty_file; rm /empty_file`` \n\nAfter that, shut down the VM and start the vmware-vdiskmanager, which is located in\n/Applications/VMware Fusion.app/Contents/Library (OSX 10.7.2, VMware\nFusion 4) and shrink the disk:\n\n``./vmware-vdiskmanager -k pathtodisk.vmdk``\n" }, { "alpha_fraction": 0.707317054271698, "alphanum_fraction": 0.7127371430397034, "avg_line_length": 91.25, "blob_id": "9d10f781e32f5460d05baced27d3f53f089ecb85", "content_id": "e01fcd014e24245c5d8fc2b37ca3665085d479a9", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "JavaScript", "length_bytes": 369, "license_type": "no_license", "max_line_length": 187, "num_lines": 4, "path": "/content/sandbox/writeCapture/chain/chain.js", "repo_name": "return1/return1.at", "src_encoding": "UTF-8", "text": "console.log(\"i am chain.js\");\ndocument.write('<div style=\"border:1px solid red; padding:3px\">i am chain.js and i wrote this with document.write(). no i will add chainchain.js via script tag and document.write()</p>');\ndocument.write('<script src=\\\"\\/static\\/sandbox\\/writeCapture\\/chain\\/chainchain.js\\\" type=\\\"text/javascript\\\"><\\/script>');\ndocument.write('</div>');\n" }, { "alpha_fraction": 0.7174301147460938, "alphanum_fraction": 0.7278405427932739, "avg_line_length": 51.53125, "blob_id": "506c4b0a7858e2c0073ce60bc9bbe1f70c53e61a", "content_id": "79f3b6a6dd977adece04ed1a71b64dd48acd4eae", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "reStructuredText", "length_bytes": 3362, "license_type": "no_license", "max_line_length": 121, "num_lines": 64, "path": "/content/2010-11-02-iframe-resizing-how-to-fit-ad-banner-ajax-websites-writecapture.rst", "repo_name": "return1/return1.at", "src_encoding": "UTF-8", "text": "Painful iframe resizing, or: how to fit ad banners into ajax heavy web sites with writeCapture\n##############################################################################################\n:date: 2010-11-02 11:56\n:category: Javascript\n:tags: ads, iframe, jquery, javascript\n:slug: iframe-resizing-how-to-fit-ad-banner-ajax-websites-writecapture\n:summary: The crux with online advertising.\n\nThe crux with online advertising\n================================\n\nThe online advertising industry seems not quite up to date yet.\nTo my knowledge, no AJAX APIs are being so far provided, all services must be embedded as external javascripts,\nexecuting several nested ``document.write()`` instructions. \n(The first JS loads a second one, which in turn loads another, often structured even further.)\nFor a web application where content is reloaded dynamically, this is a catastrophe because ``document.write()`` \ndestroys the page after building the DOM, displaying just an empty page.\n\n**iFrame solution?**\n--------------------\n\nThis problem leads to amazing workarounds, like loading the ad in an iFrame.\nUnfortunately, the iFrame does not know what kind of content the ad-supplier is going to provide at some point. \n\"Context sensitive\" iFrames are called for, which can adapt to the external content's size. \nA simple and efficient jQuery method is the following:\n\n.. raw:: html\n\n <script src=\"https://gist.github.com/4032581.js?file=jquery-ad-iframe-resize.js\"></script>\n\niFrame size is simply checked every x seconds (every second in this example). \nSince it is impossible to determine correctly, easily and rapidly - and working in every browser - the moment when an\niFrame has finished loading its content, this is a legit solution, which of course only makes sense when you have just\na few of those iFrames on your page (apart from the fact that iFrames are \"naughty\" anyway).\n\n**BUT**:\n\nRescue is at hand: just overwrite ``document.write()``. \nIf it were indeed that easy, I would have done it myself, resp. there would have been functional solutions on the web\na long time ago. There is evidence that for several years, developers have `struggled`_ `with`_ such a solution for\nall current browsers. It took me a while to find it, but the solution does exist, somewhere out there on the net.\n`Newsweek.com`_ baptized it the \"Jesus script\". This may seem a bit lofty, but it's okay because it works and it saves\na lot of trouble:\n\n**writeCapture**\n----------------\n\n:Github: \n\t`http://github.com/iamnoah/writeCapture/`_ \n:my test:\n\t`http://return1.at/sandbox/writeCapture/`_ \n\n**Problems?** \n\nProblems just occur when the externally loaded ad-scripts depend on the ``load`` event,\nbecause writeCapture intercepts ``document.write()`` instructions and executes them after the DOM has been established. \nBut that will never happen, since the ``load`` event has already passed.\nThe day is saved by the call-back functionality of writeCapture, which can trigger the ``window.load()`` event once more.\n\n.. _struggled: http://ajax.phpmagazine.net/2006/11/xhtml_and_documentwrite_replac.html\n.. _with: http://www.intertwingly.net/blog/2006/11/10/Thats-Not-Write\n.. _Newsweek.com: http://newsweek.com/\n.. _`http://github.com/iamnoah/writeCapture/`: http://github.com/iamnoah/writeCapture/\n.. _`http://return1.at/sandbox/writeCapture/`: /static/sandbox/writeCapture/\n" }, { "alpha_fraction": 0.5970149040222168, "alphanum_fraction": 0.7164179086685181, "avg_line_length": 15.75, "blob_id": "28a95ef8c9da5eccd3e9fba2cd4e19e948a3236d", "content_id": "c159285a838e09997f48cb57a850fa59fff1037a", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Text", "length_bytes": 67, "license_type": "no_license", "max_line_length": 20, "num_lines": 4, "path": "/requirements.txt", "repo_name": "return1/return1.at", "src_encoding": "UTF-8", "text": "pelican==3.7.1\npelican-minify\n#pelican==3.4.0\n#pelican-minify==0.4\n" }, { "alpha_fraction": 0.6625722646713257, "alphanum_fraction": 0.7203757166862488, "avg_line_length": 35.44736862182617, "blob_id": "1e4742de3a5706c9defbcf8b3ba769d44e5e2358", "content_id": "fd9d57ef8ccddab1ceec64eb02705216cd3078f8", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "reStructuredText", "length_bytes": 1384, "license_type": "no_license", "max_line_length": 87, "num_lines": 38, "path": "/content/2011-11-24-apache2-analyze-tweak-mpm-perl-script.rst", "repo_name": "return1/return1.at", "src_encoding": "UTF-8", "text": "Analyze and tweak your apache2 mpm settings\n###########################################\n:date: 2011-11-24 08:43\n:category: Server\n:tags: apache, perl, ubuntu\n:slug: apache2-analyze-tweak-mpm-perl-script\n:wide: true\n:summary: A handy perl script, which analyzes your\n currently running apache clients and helps you with tweaking your MPM\n settings based on calculations.\n\nRecently I stumbled over a handy perl script, which analyzes your\ncurrently running apache clients and helps you with tweaking your MPM\nsettings based on calculations:\n\n.. raw:: html\n\n <script src=\"https://gist.github.com/4031588.js?file=apache2-mpm-tweak.pl\"></script>\n\nThe script outputs looks like this:\n\n.. sourcecode:: console\n\n Total memory available: 7.80G\n Total used by apache2 (97 instances): 2.66G\n Total used by other processes: 2.57G\n\n Average memory used per apache2 process: 28.03M\n Recommended number of processes based on Average: 191\n Needed memory for 500 processes based on Average: 13.69G\n\n Max memory used for apache2 process: 45.22M\n Recommended number of processes based on Max: 118\n Needed memory for 500 processes based on Max: 22.08G\n\n Mean plus two Standard Deviations (bulk of usage under max): 43.11M\n Recommended number of processes based on Mean + 2*Stdev: 124\n Needed memory for 500 processes based on Mean + 2*Stdev: 21.05G" }, { "alpha_fraction": 0.7979002594947815, "alphanum_fraction": 0.8041040301322937, "avg_line_length": 53.42856979370117, "blob_id": "616866e8da874f7b523ef7f4609276d4302fd821", "content_id": "5eb5bf060f01c2af1f33e27ac29a07eb7280d810", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "reStructuredText", "length_bytes": 4222, "license_type": "no_license", "max_line_length": 132, "num_lines": 77, "path": "/content/2010-09-13-python-und-zope-als-unterrichtswerkzeuge.rst", "repo_name": "return1/return1.at", "src_encoding": "UTF-8", "text": "Python und Zope als Unterrichtswerkzeuge\n########################################\n:date: 2010-09-13 15:01\n:category: Python\n:tags: didactics, education, python, thesis, zope\n:slug: python-und-zope-als-unterrichtswerkzeuge\n:summary: My german thesis about python and zope in education\n\nGerman Thesis, so this post is written in German. If you are\ninterested in the english abstract, please feel free to grab it from `here`_.\n\nFür den Abschluss meiner Ausbildung an der FH\nTechnikum Wien (Studiengang ICSS) wählte ich ein Diplomarbeitsthema, das\nmich a) interessierte b) eventuell Türen für den meinen weiteren Weg\nöffnet. Die Entscheidung, Herrn `Göschka`_ als Betreuer zu wählen, war\nim Vorfeld zwar mutig, jetzt im Nachhinein aber die goldrichtige\nEntscheidung. Seine strikte Vorgehensweise und Kompetenz hat maßgeblich\nzur Qualität der Arbeit beigetragen. Danke an meine Betreuer auf diesem\nWege. \n\nIch zitiere jetzt die Einleitung der Arbeit: \n\n\t*\"Für Anfänger ist das\n\tErlernen einer Programmiersprache schwierig. Das liegt daran, dass im\n\theutigen Unterricht bevorzugt mit Sprachen aus der Industrie gelehrt\n\twird. Studenten wollen in der Industrie Jobs bekommen, und legen deshalb\n\tWert darauf, dass gefragte Technologien Bestandteil ihrer Ausbildung\n\tsind. Die Industrie wiederum will ihren Bedarf befriedigen. Dabei wird\n\tübersehen, dass Programmiersprachen keine Technologien selbst, sondern\n\tWerkzeuge für Technologien sind. Im Unterricht muss eine\n\tProgrammiersprache ein Werkzeug sein, mit dessen Hilfe es möglich ist,\n\tdie fundamentalen Ideen eines Unterrichtsgegenstandes zu vermitteln,\n\tohne in einen Unterricht über die Programmiersprache selbst abzudriften.*\n\n\t*Die vorliegende Arbeit stellt Python als ein solches Werkzeug vor. Sie\n\tzeigt auf, dass Python, im Gegensatz zu heute häufig im Unterricht zum\n\tEinsatz kommenden Sprachen (wie C, C++ oder Java), gut für Anfänger\n\tgeeignet ist. Aufgrund des einfachen Zugangs können mit Python viel\n\tfrüher relevante Konzepte der Informatik und Softwareentwicklung\n\tdiskutiert werden. Ein weiterer wesentlicher Vorteil ist, dass auch die\n\tArbeit der Unterrichtenden erleichtert wird. Die Sprache ist kompakt und\n\tsimpel gehalten und versucht sich dem Entwickler nicht in den Weg zu\n\tstellen. Gleichzeitig ist sie eine allgemein anerkannte Sprache und\n\tfindet Verwendung in der Industrie.*\n\n\t*Für fortgeschrittene Konzepte der\n\tSoftwareentwicklung kann auf komplexere Sprachen umgestiegen werden,\n\twobei die Studenten dabei von ihren Erfahrungen mit Python stark\n\tprofitieren. Der Unterricht kann aber durchaus weiter auf Python\n\taufbauen. So zeigt diese Arbeit, wie das Komponentenframework Zope\n\thierbei Verwendung finden könnte. Dabei werden Themen wie\n\tKomponentenorientierung, Reuse, Datenbanken, das Erleben eines\n\tSoftwareentwicklungsprozesses und Testen und Dokumentieren von Software\n\terläutert. Auch Zope, das auf Python basiert, hat den Vorteil, dass im\n\tVergleich zu anderen Applikationsframeworks ein einfacherer Zugang\n\tschnelle Lernerfolge ermöglicht.\"*\n\nDie Arbeit wurde mit der Note 1\nbewertet, das Feedback aus der Community war positiv, die Arbeit wurde\nzitiert. Für mich für eine Diplomarbeit ein durchaus zufriedenstellendes\nErgebnis. Offen bleibt, ob ich mir jemals das Unterrichten selbst antun\nwill, was ich mit meiner Arbeit langfristig angedacht habe. Mal sehen ;)\nMeine Diplomarbeit kann bei Amazon bestellt werden, oder natürlich auch\nkostenlos gelesen werden: \n\n:Amazon: `Python und Zope als Unterrichtswerkzeuge`_\\ \n:PDF: `Python und Zope als Unterrichtswerkzeuge (PDF)`_\n:Stadtgespräche: `http://www.stadtgespräche.com/`_\n:Github: `https://github.com/return1/masterthesis`_\n\n\n.. _here: /static/assets/Python_und_Zope_als_Unterrichtswerkzeuge.pdf\n.. _Göschka: http://www.infosys.tuwien.ac.at/staff/kmg/\n.. _Python und Zope als Unterrichtswerkzeuge: https://www.amazon.de/Python-Zope-Unterrichtswerkzeuge-Dominique-Lederer/dp/3640238834\n.. _Python und Zope als Unterrichtswerkzeuge (PDF): /static/assets/Python_und_Zope_als_Unterrichtswerkzeuge.pdf\n.. _http://www.stadtgespräche.com/: http://www.stadtgespraeche.com/\n.. _https://github.com/return1/masterthesis: https://github.com/return1/masterthesis\n" }, { "alpha_fraction": 0.7389100193977356, "alphanum_fraction": 0.7566539645195007, "avg_line_length": 42.83333206176758, "blob_id": "20dba263c91a02778d8c0a28ff65b31915029bf6", "content_id": "4d0f579589fe897bf4020eebd4b31236856fad23", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "reStructuredText", "length_bytes": 1578, "license_type": "no_license", "max_line_length": 78, "num_lines": 36, "path": "/content/2010-09-22-wunderweiss-relaunched-rs2-de.rst", "repo_name": "return1/return1.at", "src_encoding": "UTF-8", "text": "wunderweiss relaunched rs2.de\n#############################\n:date: 2010-09-22 11:51\n:category: Projects\n:tags: kronehit, rs2, silverstripe, zend framework\n:slug: wunderweiss-relaunched-rs2-de\n\n`wunderweiss`_ has performed the technical relaunch of `RS2 - Berlin, 94,3 RS2\n- Der Supermix`_. Like `Kronehit`_, this site relies heavily on Ajax, in\norder to have the radio station's web player continue playing during new\npage requests. The relaunch was based on `Zend Framework`_ and\n`Silverstripe`_. \n\nWhat I find interesting is the new approach to\ncommunication between the two frameworks. In the Kronehit project, data\nwas read directly from Silverstripe's SQL tables, which has proved to be\nnot overly maintenance-friendly. At `Matthias Schelling's`_ urging, a\nnew approach was developed for RS2: XMLify. Thereby, a query is put to\nSilverstripe via Zend Framwork and HTTP GET requests, Silverstripe\nresponds with the desired data in XML format. \n\nThis approach is many\ntimes more flexible and straightforward, those who are acquainted with\nthe structure of Silverstripe's tables know why. Besides, the\ndevelopment can be encapsulated better, with XMLify you don't need much\nknowledge about Silverstripe itself. I am curious whether this solution\nwill soon be available as a module. \n\nGood work!\n\n.. _wunderweiss: http://wunderweiss.com/\n.. _RS2 - Berlin, 94,3 RS2 - Der Supermix: http://rs2.de/\n.. _Kronehit: http://test.return1.at/projects/kronehit/\n.. _Zend Framework: http://framework.zend.com/\n.. _Silverstripe: http://www.silverstripe.org/\n.. _Matthias Schelling's: https://twitter.com/schellmax\n" }, { "alpha_fraction": 0.7355654239654541, "alphanum_fraction": 0.7540143728256226, "avg_line_length": 53.203704833984375, "blob_id": "2c7d2de9932891389738242365f0d8cf8fb29256", "content_id": "9934c1389c11e3f301527a8bda73d4a375ea84dd", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "reStructuredText", "length_bytes": 2927, "license_type": "no_license", "max_line_length": 130, "num_lines": 54, "path": "/content/2011-06-23-why-i-switched-from-dropbox-to-wuala.rst", "repo_name": "return1/return1.at", "src_encoding": "UTF-8", "text": "Why i switched from Dropbox to Wuala\n####################################\n:date: 2011-06-23 09:53\n:modified: 2014-10-27 09:19\n:category: General\n:tags: security, zero knowledge, filesync\n:slug: why-i-switched-from-dropbox-to-wuala\n\nUpdate Oct 2014: I switched to `BitTorrent Sync`_ in the meantime. `Wuala`_ is still great, but cancelling their free storage.\n\nI have been using `Dropbox`_ just to sync my `Keepass`_ database. \nIt's no secret that Dropbox data transmission is not encrypted, neither is their data storage.\nTo use Dropbox securely, a `Truecrypt`_ volume must be synced, \nbut `special settings`_ would be necessary so that not the entire volume gets synced every time you make a small change somewhere.\n\nMy business data was synced with `MobileMe`_ and iDisk, but I didn't really like that a lot.\nI'd had an eye on `Wuala`_ for some time now, but their pricing was always rather discouraging. \n\nBecause of Dropbox' lasting `security issues`_ and their bold `security lie`_, \nWuala has reacted, providing all their functionalities (including 1GB) for free. \nI'd been waiting for just that! In a minute, MobileMe got canceled, Dropbox was deleted and replaced by Wuala.\nThere are Wuala clients for every platform, including iOS and Android. \n\nSwitching to Wuala has nothing but advantages for experienced users.\nI must admit that Dropbox has got the more intuitive interface, which may also\nexplain its previous success. But after the latest incidents, Dropbox\nshould be a no-go for everyone. \n\nOn the net you can find all reasons why Wuala is better than Dropbox, \nhere I will just pitch on the points that particularly appeal to me:\n\n- local data encryption, zero knowledge on the part of Wuala (i.e.\n if you forget the Wuala password, unsynced data is lost)\n- flexible file and folder syncing, not just one global folder like\n with Dropbox\n\nWuala has one drawback though: For folders shared with non-Wuala\ncontacts, a \"secret\" weblink is generated, through which the shared data can be accessed. \nTo make this totally secure, the link would have to be sent encrypted, which is not always possible. \nThat means, only sharing between Wuala accounts is 100% secure. \n\nI am really pleased so far.\nIf you want a little more disk space, there is a multitude of `promo codes`_, just google for them.\n\n.. _BitTorrent Sync: http://www.bittorrent.com/sync\n.. _Dropbox: https://www.dropbox.com/\n.. _Keepass: http://keepass.info/\n.. _MobileMe: http://me.com/idisk/\n.. _Wuala: http://www.wuala.com/\n.. _Truecrypt: http://www.truecrypt.org/\n.. _special settings: http://stadt-bremerhaven.de/dropbox-und-truecrypt-verschluesselte-daten-in-der-cloud\n.. _security issues: http://www.thinq.co.uk/2011/6/21/dropbox-flaw-left-thousands-users-risk/\n.. _security lie: http://www.wired.com/threatlevel/2011/05/dropbox-ftc/\n.. _promo codes: http://www.macmacken.com/2011/04/21/nochmals-4-gb-kostenlosen-speicherplatz-fuer-wuala/\n" }, { "alpha_fraction": 0.5354330539703369, "alphanum_fraction": 0.5826771855354309, "avg_line_length": 20.33333396911621, "blob_id": "3044f08b875df5dc1dbd0b74ec835384cadd8e2d", "content_id": "a00a7b93717dce3f65d9a77791f78411d2a53651", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "reStructuredText", "length_bytes": 127, "license_type": "no_license", "max_line_length": 43, "num_lines": 6, "path": "/content/pages/500.rst", "repo_name": "return1/return1.at", "src_encoding": "UTF-8", "text": "500 - Internal Server Error\n###########################\n:status: hidden\n:slug: 500\n\nSorry, but i do not know wtf just happened!" }, { "alpha_fraction": 0.6881973147392273, "alphanum_fraction": 0.7246036529541016, "avg_line_length": 47.657142639160156, "blob_id": "2c6d29cf545f090b7b723001ed86d54f22ae973f", "content_id": "8c8d1d12b9d9442f23349c820a2f34d20c413640", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "reStructuredText", "length_bytes": 1707, "license_type": "no_license", "max_line_length": 124, "num_lines": 35, "path": "/content/2011-10-25-ubuntu-ec2-ami-with-xfs-root-ebs-volume-10-04-lts-lucid-lynx.rst", "repo_name": "return1/return1.at", "src_encoding": "UTF-8", "text": "Ubuntu EC2 AMI with XFS root EBS Volume (10.04 LTS, Lucid Lynx)\n###############################################################\n:date: 2011-10-25 22:33\n:category: Server\n:tags: amazon web services, ami, aws, ebs, ec2, grub, lucid lynx, ubuntu, xfs\n:slug: ubuntu-ec2-ami-with-xfs-root-ebs-volume-10-04-lts-lucid-lynx\n\nAre you also in need of a current Ubuntu LTS (10.04, Lucid Lynx) AMI for\nAmazon Web Services, with EBS Boot and XFS as file system? Maybe because\nyou would like to make consistent snapshots via xfs\\_freeze and\n`ec2\\_consistent\\_snapshot`_? Then this `article`_ by Scott Moser helps\nyou out. But hey. It will not work, because latest 10.04 Ubuntu EC2 AMIs\nhave their boot partition labeled\n``cloudimg-rootfs``, which is too long for an XFS\nfile system label, XFS supports only 12 characters. Ubuntu´s Maverick\nand Natty AMIs for EC2 have already been fixed, the label there is\n``uec-rootfs``. If you follow Scott´s instructions\nfrom the link above, your system will not boot, the error looks like\nthis: \n\n\tALERT! /dev/disk/by-label/cloudimg-rootfs does not exist. Dropping to a shell!\n\nAll you have to do is change the label of the XFS partition from ``cloudimg-rootfs`` to \n``uec-rootfs``, also replace the old label in\n\n- /boot/grub/menu.lst\n- /boot/grub/grub.cfg\n- /etc/fstab (missed that, thanks @scott)\n\nI have built an AMI, complete with above steps. So you can launch Ubuntu\n10.04 LTS Lucid Lynx, with EBS root and XFS: https://console.aws.amazon.com/ec2/home?region=eu-west-1#launchAmi=ami-61c4f615\n\n\n.. _ec2\\_consistent\\_snapshot: http://alestic.com/2009/09/ec2-consistent-snapshot\n.. _article: http://ubuntu-smoser.blogspot.com/2010/11/create-image-with-xfs-root-filesystem.html\n" }, { "alpha_fraction": 0.7215189933776855, "alphanum_fraction": 0.7341772317886353, "avg_line_length": 77.5, "blob_id": "6086b9fab68be353875c33a30c1d203f147c98fe", "content_id": "eeaa44292082fa88b6f4b22a39d93058217282fc", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "JavaScript", "length_bytes": 158, "license_type": "no_license", "max_line_length": 124, "num_lines": 2, "path": "/content/sandbox/writeCapture/chain/chainchain.js", "repo_name": "return1/return1.at", "src_encoding": "UTF-8", "text": "console.log(\"i am chainchain\");\ndocument.write('<div style=\"border:1px solid red; padding:3px\">i am chainchain.js and i wrote this with document.write().');\n\n" }, { "alpha_fraction": 0.8266666531562805, "alphanum_fraction": 0.8266666531562805, "avg_line_length": 74, "blob_id": "b1bbd1a00bf827e41384799a081ffae7a3747e60", "content_id": "d990a2655ea561234993df39b67300b99fb3b83b", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Markdown", "length_bytes": 75, "license_type": "no_license", "max_line_length": 74, "num_lines": 1, "path": "/README.md", "repo_name": "return1/return1.at", "src_encoding": "UTF-8", "text": "my unfinished new webpage build with pelican and python (byebye wordpress)\n" }, { "alpha_fraction": 0.6251944303512573, "alphanum_fraction": 0.6858475804328918, "avg_line_length": 39.25, "blob_id": "18b83423c79e1ce034812ae9104f57e52e4432b3", "content_id": "d4d42bc0f713d9bb2d8d78a8ec721d22e5bf2333", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "reStructuredText", "length_bytes": 645, "license_type": "no_license", "max_line_length": 191, "num_lines": 16, "path": "/content/2012-11-12-trim-enabler-for-mountain-lion.rst", "repo_name": "return1/return1.at", "src_encoding": "UTF-8", "text": "TRIM Enabler for OSX Yosemite\n#############################################\n:date: 2012-11-12 10:20\n:modified: 2015-04-09 10:30\n:category: General\n:tags: osx, trim, yosemite\n:slug: trim-enabler-for-osx\n:summary: Re-enable TRIM support on OSX Yosemite.\n\nHere’s the proper way to re-enable TRIM support on OSX Yosemite. Run these commands in Terminal:\n\n.. raw:: html\n\n <script src=\"https://gist.github.com/4058659.js?file=trim_enabler.txt\"></script>\n\nAll credits to `Grant Pannell <https://digitaldj.net/blog/2011/11/17/trim-enabler-for-os-x-lion-mountain-lion-mavericks/>`_, i am just putting his code into a gist, so it es easier to read and copy." }, { "alpha_fraction": 0.6870588064193726, "alphanum_fraction": 0.7176470756530762, "avg_line_length": 43.68421173095703, "blob_id": "80e0fea684b5f3fd69a9ba4b6bfcc585b7ef9be6", "content_id": "959e0b48b82219ea2e7022ad1331ae9f9073086d", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "reStructuredText", "length_bytes": 850, "license_type": "no_license", "max_line_length": 147, "num_lines": 19, "path": "/content/2013-04-04-bitcoin-mining-ubuntu.rst", "repo_name": "return1/return1.at", "src_encoding": "UTF-8", "text": "Playing with Bitcoin (CPU) Mining on Ubuntu\n#############################################\n:date: 2013-04-04 11:23\n:category: General\n:tags: bitcoin, ubuntu, debian\n:slug: bitcoin-miner-ubuntu\n:summary: Fooling around with a CPU Miner\n\nThe bitcoin is on an all time high at the moment, that's why i finally decided to fool around with it. But notice that mining with CPU's is NOT\ncost-effective, if you want to earn some coins, you have to mine with one or more GPU's. And don't mine alone, not worth it until you have a lot of\ncomputational power, join a miner pool instead.\n\nAnyway, here are the simple steps to run a CPU Miner on Debian or Ubuntu\n\n.. raw:: html\n\n <script src=\"https://gist.github.com/return1/5309843.js\"></script>\n\nAlso, i am accepting bitcoins, if you like to donate for whatever reason :): 1BUq7ijvVedTgG7xhKDgTMd6t3JdDqsH4i\n\n" }, { "alpha_fraction": 0.6826666593551636, "alphanum_fraction": 0.6986666917800903, "avg_line_length": 30.29166603088379, "blob_id": "61c42f83cc59c7cdc9344e830c1a5e76180fdcb3", "content_id": "3ecc94766833672042dde95a72b9aa65094a40b2", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "reStructuredText", "length_bytes": 750, "license_type": "no_license", "max_line_length": 75, "num_lines": 24, "path": "/content/2012-03-15-problems-installing-monit-and-munin-on-ubuntu.rst", "repo_name": "return1/return1.at", "src_encoding": "UTF-8", "text": "Problems installing monit and munin on Ubuntu\n#############################################\n:date: 2012-03-15 11:04\n:category: Server\n:tags: ubuntu, munin, mysql\n:slug: problems-installing-monit-and-munin-on-ubuntu\n:summary: What i needed to tweak to monitor mysql.\n\nTo monitor mysql, you need to activate the mysql pid file creation. In\n``/etc/mysql/my.cnf``, in the ``[mysqld]`` section add\n\n.. sourcecode:: console\n\n pid-file = /var/run/mysqld/mysqld.pid\n\nTo run munin with apache and mysql monitoring, you need to install two\nperl modules.\n\n.. sourcecode:: console\n\n aptitude install libwww-perl libcache-cache-perl\n\nOr else ``munin-node-configure --suggest`` will report ``LWP::UserAgent not\nfound`` and ``Missing dependency Cache::Cache``" }, { "alpha_fraction": 0.6583067774772644, "alphanum_fraction": 0.7120687365531921, "avg_line_length": 66.44860076904297, "blob_id": "dd4ca7567443878ed0ef8f1d63919ceed63a3d90", "content_id": "2bb957c355713bbe7e5881ba1c0f2a475e6aa107", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "reStructuredText", "length_bytes": 7234, "license_type": "no_license", "max_line_length": 404, "num_lines": 107, "path": "/content/pages/home.rst", "repo_name": "return1/return1.at", "src_encoding": "UTF-8", "text": "About\n#####\n:slug: home\n:save_as: index.html\n\nMy name is Dominique Lederer, and I have been a web application developer since 1999/2000. Having worked for renowned corporate\nclients from the start (like Telekom Austria, Allianz, o2 or Red Bull), and after completing my studies at FH Technikum\nWien (Information and Communication Systems and Services), I became self-employed in 2007, in order to offer my clients\nfirst-hand quality.\n\nFocusing on current and reliable technologies, I design, develop and maintain web applications. With constant education\nand training, I make sure to retain the cutting edge.\n\n.. raw:: html\n\n <p>For further information, or if you are interested in a collaboration, please contact me on&nbsp;\n <a class=\"reference external\" href=\"https://www.xing.com/profile/Dominique_Lederer\">Xing</a>,&nbsp;\n <a class=\"reference external\" href=\"http://at.linkedin.com/in/dominiquelederer\">LinkedIn</a> or&nbsp;\n <a class=\"reference external\" href=\"https://twitter.com/return1_at\">Twitter</a>,\n or drop me a mail @ <a target=\"_blank\" href=\"https://mailhide.io/e/sNWWU\">[email protected]</a></p>\n\nCurrent Projects\n----------------\n\n- `Relaunch Kronehit Radio 2022 <http://www.kronehit.at/>`_: Flask API development, AWS Server maintenance. *2022-now()* [1]_\n- `OEAD Student Housing <https://www.oeadstudenthousing.at/>`_: django project (wagtail cms), ansible, AWS server maintenance *2021-now()* [1]_\n- `Registration System for the University Preparation Programme of the Vienna Universities <https://anmeldung.vwu.at/>`_: django project (wagtail cms), ansible, AWS server maintenance *2020-now()* [1]_\n- `kronehit smart / skipfm – next generation radio player <https://www.wunderweiss.com/de/kronehit-smart/>`_: award wining (`Constantinus Award 2018 <https://www.constantinus.net/de/wall-of-fame/84238.html>`_, `Futurezone Award 2017 <https://futurezone.at/myfuzo/das-sind-die-gewinner-des-futurezone-award-2017/296.577.448>`_) large scale radio platform built on Flask, PostgreSQL, AWS. *2015-now()* [1]_\n- `ViennaEventCrew CRM Frontend Rewrite <http://www.viennaeventcrew.at>`_: custom CRM with PostgreSQL, Zend Framework, vue.js and Devextreme, hosted on AWS. *2019-now()*\n- `Radio Arabella <https://www.arabella.at/>`_: Flask API development, AWS Server maintenance. *2020-now()* [1]_\n- `Radio Austria <http://www.radioaustria.at/>`_: Flask API development, AWS Server maintenance. *2019-now()* [1]_\n- `Antenne Salzburg <https://antennesalzburg.oe24.at/>`_: Flask API development, AWS Server maintenance. *2019-now()* [1]_\n- `Topradio <http://www.topradio.de/>`_: serverless hosting of a django project (wagtail cms) via zappa *2019-now()* [1]_\n- `Halle für Kunst und Medien (Graz) Online Journal <https://journal.km-k.at/>`_: django project (wagtail cms), server maintenance *2017-now()*\n- `Radio Ton <http://www.radioton.de/>`_: Flask API development, AWS Server maintenance. *2017-now()* [1]_\n- `CRM for KroneHIT`: Flask, PostgreSQL. *2015-now()* [1]_\n- `Safely <http://www.safely.at/>`_:: Webbased, secure Passwordmanager developed with Flask and PostgreSQL. *2015-now()* [1]_\n- `Halle für Kunst und Medien (Graz) <http://www.km-k.at/>`_: django project, server maintenance *2013-now()*\n- `Nicole Six & Paul Petritsch <http://www.six-petritsch.com/>`_: django project, server maintenance *2012-now()*\n- `Wunderweiss <http://www.wunderweiss.com>`_: maintenance of the wunderweiss server environment (Debian, Ubuntu, AWS). *2007-now()*\n\nProjects - done\n---------------\n\n- `Relaunch Kronehit Radio 2013 <http://www.kronehit.at/>`_: Flask API development, AWS Server maintenance. *2013-2022* [1]_\n- `Relaunch KISS FM, der Beat von Berlin`: Flask API development, AWS Server maintenance. *2017-2020* [1]_\n- `Relaunch Berliner Rundfunk 91.4`: Flask API development, AWS Server maintenance. *2017-2020* [1]_\n- `Relaunch rs2 94,3`: Flask API development, AWS Server maintenance. *2017-2020* [1]_\n- `ViennaEventCrew CRM <http://www.viennaeventcrew.at>`_: custom CRM with PostgreSQL, Zend Framework and ExtJS, hosted on AWS. *2009-2019*\n- `Wenatex Intranet <http://mein.wenatex.com/loki/>`_: highly frequented complex application built on Python, Bluebream\n (formerly known as Zope 3) with RelStorage and SQLAlchemy, PostgreSQL with Postgis, jQuery, Reportlab and Google Maps. *2007-2017* [1]_\n- `Radio Service Berlin <http://ras.berlin/>`_: server maintenance. *2014* [1]_\n- `Kronehit Phonegap App <https://itunes.apple.com/at/app/kronehit-online-radio-charts/id324558085>`_: concept work, phonegap javascript interface. *2014* [1]_\n- `Topradio <http://www.topradio.de/>`_: server maintenance. *2013* [1]_\n- `Kindergruppe Sternenland <http://www.kindergruppe-sternenland.at/>`_: django, mezzanine project. *2013*\n- `Brandmair <http://brandmair.net/>`_: conversion of existing flash page to html only. *2012*\n- `rs2 94,3`: development and server maintenance. *2010* [1]_\n- `Berliner Rundfunk 91.4`: development and server maintenance. *2010* [1]_\n- `KISS FM, der Beat von Berlin`: development and server maintenance. *2010* [1]_\n- `Kronehit Portal <http://www.kronehit.at>`_: social network based on Zend Framework. *2009-2013* [1]_\n- `Kronehit iPhone App <http://itunes.apple.com/WebObjects/MZStore.woa/wa/viewSoftware?id=324558085&amp;amp;mt=8>`_:\n most downloaded music app (2009) in the austrian app store. *2009* [1]_\n- `univercity 2015 <http://www.univercity2015.at/>`_: templating with SIFR. *2009* [1]_\n- `Wenatex Portal <http://www.wenatex.com/>`_: relaunch of the corporate website based on Plone. *2009* [1]_\n- `Wenapower`: job platform, implemented with Zope 3. *2009 †* [1]_\n- `NummerSicher <https://nummer-sicher.at/>`_: zend framework project. *2009* [1]_\n- `Kronehit Fanradio`: KroneHit fan radio based on wordpress during the soccer EURO in Austria. *2008 †* [1]_\n- `arch08 <http://www.architekturtage.at/2008/>`_: Architekturtage Wien based on drupal. *2008* [1]_\n- `Werbeagentur Andrea Preiss <http://www.preiss-wa.at/>`_: silverstripe project. *2007* [1]_\n- `VAT`: installation and implementation of mailman. *2007 †* [1]_\n- `Bacher Systems <http://www.bacher.at>`_: consulting/installation of OTRS (Open Source Trouble Ticket System). *2007*\n- `Stadtgespräche <http://www.stadtgespraeche.com>`_: as practical part of `my degree thesis <http://return1.at/python-und-zope-als-unterrichtswerkzeuge/>`_,\n the website was relaunched on the technical basis of Zope3. *2007*\n\n\nFurther experience\n------------------\n\nBy 09/2006 I had gained a lot of experience through salaried cooperation\non projects for the following customers:\n\nAgrana :: Allianz Deutschland :: Allianz Österreich :: Bank Winter ::\nBAWAG P.S.K. :: Dr. Richard :: EXPO 2000 Österreich :: Fiat :: Graz 2003\n:: Gründerservice :: Kronehit :: Lancia :: Magna Steyr :: Mayr Melnhof\n:: Medwell 24 :: o2 :: ONE :: Red Bull :: Schenker :: Telekom Austria ::\nTurbo Schuh :: Vereinigte Bühnen Wien :: Viewturn :: WWF Österreich\n\n\n.. [1] developed for `wunderweiss <http://www.wunderweiss.com>`_\n\n\nImprint\n-------\n\n.. raw:: html\n\n <p>return1<br>\n Web Application Development</p>\n\n <p>Dominique Lederer<br>\n Josef-Lind-Straße 3/21<br>\n A-8230 Hartberg<br>\n Austria</p>\n\n <p>UID: ATU63029216</p>\n\n <p>BIC: NTSBDEB1XXX, IBAN: DE15 1001 1001 2623 6374 05</p>\n" }, { "alpha_fraction": 0.5736433863639832, "alphanum_fraction": 0.6201550364494324, "avg_line_length": 20.66666603088379, "blob_id": "038da5edbf7478e6b1431a4623d57184acd262e9", "content_id": "f60ff7be01ceb771136032138b605645c946e49b", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "reStructuredText", "length_bytes": 129, "license_type": "no_license", "max_line_length": 59, "num_lines": 6, "path": "/content/pages/404.rst", "repo_name": "return1/return1.at", "src_encoding": "UTF-8", "text": "404 - Page not found\n####################\n:status: hidden\n:slug: 404\n\nSorry, but the page you are looking for has not been found." }, { "alpha_fraction": 0.6292466521263123, "alphanum_fraction": 0.6440176963806152, "avg_line_length": 24.074073791503906, "blob_id": "4969306c91cd08baef15fba19626ac3b7d9d382a", "content_id": "398d120986cc6a58c6bd0b502fd2a4510f9bccde", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 1354, "license_type": "no_license", "max_line_length": 80, "num_lines": 54, "path": "/pelicanconf.py", "repo_name": "return1/return1.at", "src_encoding": "UTF-8", "text": "#!/usr/bin/env python\n# -*- coding: utf-8 -*- #\n\nAUTHOR = u\"Dominique Lederer\"\nSITENAME = u\"return1\"\nSITESUBTITLE = u'\"return true\" quality webapplications'\nSITEURL = 'http://localhost:8000' #'http://return1.at'\n#TWITTER_USERNAME = 'return1_at'\n\nTIMEZONE = 'Europe/Vienna'\n\nDEFAULT_LANG = 'en'\n\n# Social widget\nSOCIAL = (\n ('twitter', 'https://twitter.com/return1_at'),\n ('linkedin', 'http://at.linkedin.com/in/dominiquelederer'),\n ('xing', 'https://www.xing.com/profile/Dominique_Lederer'),\n ('github', 'https://github.com/return1'),\n ('about.me', 'https://about.me/dominique.lederer/'),\n)\n\nDEFAULT_PAGINATION = 50\n\nTHEME = \"themes/return1\"\n\nSTATIC_PATHS = ['images', 'assets', 'sandbox']\n\nLOCALE = 'en_US.UTF-8'\nDEFAULT_DATE_FORMAT = '%B %d, %Y'\n\nSUMMARY_MAX_LENGTH = 30\n\n# A list of files to copy from the source to the destination\nEXTRA_PATH_METADATA = {\n 'assets/robots.txt': {'path': 'robots.txt'},\n 'assets/favicon.ico': {'path': 'favicon.ico'},\n}\n# (\n# ('assets/robots.txt', 'robots.txt'),\n# ('assets/favicon.ico', 'favicon.ico'),\n#)\n\nREADERS = {'html': None} # see https://github.com/getpelican/pelican/issues/1157\n\nDIRECT_TEMPLATES = ('index', 'tags', 'categories', 'archives', 'sitemap')\nSITEMAP_SAVE_AS = 'sitemap.xml'\n\n#seo\nARTICLE_URL = '{slug}/'\nARTICLE_SAVE_AS = '{slug}/index.html'\n\n#PLUGINS = ['minify']\nPLUGINS = []\n" }, { "alpha_fraction": 0.7084447741508484, "alphanum_fraction": 0.7264043092727661, "avg_line_length": 37.485294342041016, "blob_id": "559c66554a48e831ab40c9c12f6f177a24619e95", "content_id": "e4ce950b434f4ef2e600c69958f6f2834379346a", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "reStructuredText", "length_bytes": 2617, "license_type": "no_license", "max_line_length": 164, "num_lines": 68, "path": "/content/2011-01-05-perspective-2011.rst", "repo_name": "return1/return1.at", "src_encoding": "UTF-8", "text": "Perspective 2011\n################\n:date: 2011-01-05 12:00\n:category: General\n:tags: amazon web services, bluebream, eclipse, emacs, extjs, node.js, python, sencha touch, textmate, twig, zend framework, intellij idea, pycharm, php, javascript\n:slug: perspective-2011\n\nThe beginning of a new year makes me consider current developments on\nthe market, and which of those might be of interest to me. It is also a\nkind of fresh start, an attempt at motivation that probably everybody\nneeds in their job to enjoy working. \n\n`Sencha Touch`_ \n---------------\nMobile apps with HTML5 and CSS3, of course in fact cropped in their functionality, but\ndevelopment time and comfort just speak for themselves. Besides, it's\ncool ;). The API relies strongly on ExtJS, which is advantageous to me\ndue to my previous knowledge. \n\n`ExtJS 4`_ \n----------\nA new version of ExtJS? It can\nonly improve a lot. ExtJS3 is very professional already. I'm looking\nforward to possibly employing it in two of my current projects.\n\n`node.js`_ \n----------\nJavascript on the server, AJAX push/pull, server load\nrelieving...... there is an interesting technology indeed in the offing.\n\n`Twig`_ \n-------\nSince I am currently working more with Zend Framework, i.e. PHP,\nI'm quite enthusiastic about Twig, because it reminds me a lot of Zope\npage templates, and of the beautiful world of Python. \n\n`Zend Framework 2`_ \n-------------------\nThe admittedly very useful and convenient PHP framework, completely\noverhauled. ``/me`` in sceptical but eager anticipation. \n\n`Bluebream 1.0`_ \n----------------\n(formerly Zope 3) Will Bluebream become the more organized Zope3?\n\n`Amazon Web Services`_ \n----------------------\nIt would appeal to me to use more AWS services\nthan heretofore, to fully exploit this scalability. At the same time\ntrying out possible alternative providers. \n\nEditor \n------\nI am at odds with all\neditors that I have used up to now. Eclipse is too inflated and slow for\nme, Emacs is too dry (I can use Emacs shortcuts in any other editor\ntoo), Textmate has no proper code outline view. I hope to come across\nthe perfect editor in 2011. What other interesting technologies for us\nweb developers are out there? Trends? Dark horses? Hints are\nappreciated.\n\n.. _Sencha Touch: http://www.sencha.com/products/touch/\n.. _ExtJS 4: http://www.sencha.com/blog/2010/11/22/ext-js-4-preview-faster-easier-more-stable/\n.. _node.js: http://nodejs.org/\n.. _Twig: http://www.twig-project.org/\n.. _Zend Framework 2: https://github.com/zendframework/zf2\n.. _Bluebream 1.0: http://pypi.python.org/pypi/bluebream\n.. _Amazon Web Services: http://aws.amazon.com/\n" }, { "alpha_fraction": 0.5913226008415222, "alphanum_fraction": 0.6864940524101257, "avg_line_length": 46.66666793823242, "blob_id": "c817f023289b415fdbcb4f864d96484be0c7bc3a", "content_id": "b08bff5474e42a8ddc8757a3ae7764c602a90283", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "reStructuredText", "length_bytes": 1429, "license_type": "no_license", "max_line_length": 517, "num_lines": 30, "path": "/content/pages/photography.rst", "repo_name": "return1/return1.at", "src_encoding": "UTF-8", "text": "Photography\n###########\n\n.. raw:: html\n\n <p>If you like my photography portfolio, feel free to drop me a <a href=\"http://www.google.com/recaptcha/mailhide/d?k=01ZilVG1t_mruOsKRl84zCUg==&amp;c=mu8xPEnugt-jSqIWB0x-3QBUGQ-6f8f-MxWhR5gTLus=\" onclick=\"window.open('http://www.google.com/recaptcha/mailhide/d?k\\07501ZilVG1t_mruOsKRl84zCUg\\75\\75\\46c\\75mu8xPEnugt-jSqIWB0x-3QBUGQ-6f8f-MxWhR5gTLus\\075', '', 'toolbar=0,scrollbars=0,location=0,statusbar=0,menubar=0,resizable=0,width=500,height=300'); return false;\" title=\"Reveal this e-mail address\">message</a>.\n These are only the last 50 pictures, if you like to see more visit my <a href=\"https://plus.google.com/photos/113955961996209596236/albums/5843749124426720065\">Google+ page</a> or my <a href=\"https://www.facebook.com/return1.photography\">Facebook page</a>.</p>\n\n.. raw:: html\n\n <link rel=\"stylesheet\" href=\"/theme/css/plusgallery.css\">\n\n <div id=\"plusgallery\"\n data-type=\"google\"\n data-userid=\"113955961996209596236\"\n data-image-path=\"/theme/images/plusgallery\"\n data-album-id=\"5843749124426720065\"\n data-credit=\"false\"\n data-limit=\"50\">\n </div>\n\n <!-- Load jQuery ahead of this -->\n <script src=\"/theme/js/vendor/jquery-1.7.2.min.js\"></script>\n <script src=\"/theme/js/vendor/plusgallery.js\"></script>\n <script>\n \t$(function(){\n //DOM loaded\n $('#plusgallery').plusGallery();\n });\n </script>" }, { "alpha_fraction": 0.6519284844398499, "alphanum_fraction": 0.7064910531044006, "avg_line_length": 34.46666717529297, "blob_id": "375fbd28956c2131ffad4525949b963335af78ab", "content_id": "d043759582f7a7d26d33ac6f1ac6a2040d7bb11d", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "reStructuredText", "length_bytes": 1065, "license_type": "no_license", "max_line_length": 144, "num_lines": 30, "path": "/content/2014-10-15-hardening-ssl-webserver.rst", "repo_name": "return1/return1.at", "src_encoding": "UTF-8", "text": "Hardening Your Web Server’s SSL Ciphers\n#############################################\n:date: 2014-10-15 18:03\n:category: Server\n:tags: apache2, nginx, ssl, security\n:slug: hardening-your-web-servers-ssl-ciphers\n:summary: Just archiving the actual required web server configs for nginx and apache2\n\nJust archiving the actual required web server configs:\n\n* disable SSL 2.0 (FUBAR) and SSL 3.01 (POODLE),\n* disable TLS 1.0 compression (CRIME),\n* disable weak ciphers (DES, RC4), prefer modern ciphers (AES), modes (GCM), and protocols (TLS 1.2).\n\n**Nginx**\n\n.. code-block:: nginx\n\n ssl_prefer_server_ciphers On;\n ssl_protocols TLSv1 TLSv1.1 TLSv1.2;\n ssl_ciphers ECDH+AESGCM:DH+AESGCM:ECDH+AES256:DH+AES256:ECDH+AES128:DH+AES:ECDH+3DES:DH+3DES:RSA+AESGCM:RSA+AES:RSA+3DES:!aNULL:!MD5:!DSS;\n\n**Apache2**\n\n.. code-block:: apache\n\n SSLProtocol ALL -SSLv2 -SSLv3\n SSLHonorCipherOrder On\n SSLCipherSuite ECDH+AESGCM:DH+AESGCM:ECDH+AES256:DH+AES256:ECDH+AES128:DH+AES:ECDH+3DES:DH+3DES:RSA+AESGCM:RSA+AES:RSA+3DES:!aNULL:!MD5:!DSS\n SSLCompression Off" }, { "alpha_fraction": 0.7218237519264221, "alphanum_fraction": 0.7448770403862, "avg_line_length": 32.655174255371094, "blob_id": "c05cd13e2f732db38dfa87ed278b6f30c2984b8d", "content_id": "37969106f00f35d0b12af4b138b5d02158ce992f", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "reStructuredText", "length_bytes": 1952, "license_type": "no_license", "max_line_length": 111, "num_lines": 58, "path": "/content/2010-09-30-integration-doctrine2-zend-framework.rst", "repo_name": "return1/return1.at", "src_encoding": "UTF-8", "text": "Integration of Doctrine2 and Zend Framework\n###########################################\n:date: 2010-09-30 11:54\n:category: PHP\n:tags: autoloader, doctrine, zend framework\n:slug: integration-doctrine2-zend-framework\n:wide: true\n\n*Update*: Meanwhile, Doctrine2 is stable, integration into Zend\nFramework has slightly changed. I have incorporated the changes into the\narticle.\n\nIntegrating Doctrine2 into Zend Framwork (1.11) has\ncost me some time. Enough to write about it, and maybe make it easier\nfor others. \n\nThe procedure is actually easy:\n\n#. download Zend Framework and Doctrine2\n#. create project e.g. via Zend\\_Tool :\n\n ::\n\n zf create project quickstart\n\n#. link Zend Framework and Doctrine libraries with the newly created\n project, either via symlink in the library folder, or just copy and\n paste\n#. configure web server\n\nIn case of problems with the above steps, Zend Framework's `quickstart\nguide`_ is recommendable. `Doctrin's install guide`_ helps with\ninstalling Doctrine2. My problem was with integrating the Doctrine2\nautoloader into the Zend Framework autoloader.I was able to make it work\nwith support from the web. First the relevant excerpt from\nbootstrap.php, in this context it is important that the application\nnamespace is configured before the doctrine namespace, sources are\ninserted in the comments:\n\n.. raw:: html\n\n <script src=\"https://gist.github.com/4032609.js?file=bootstrap.php\"></script>\n\nthen you simply build a model:\n\n.. raw:: html\n\n <script src=\"https://gist.github.com/4032609.js?file=Controller.php\"></script>\n\nand use it in an action, voila! (first create the table in the database\n;) )\n\n.. raw:: html\n\n <script src=\"https://gist.github.com/4032609.js?file=Model.php\"></script>\n\n.. _quickstart guide: http://framework.zend.com/manual/en/learning.quickstart.create-project.html\n.. _Doctrin's install guide: http://www.doctrine-project.org/docs/orm/2.0/en/reference/introduction.html#github\n" } ]
22
ednamartins/Qualidade-de-software
https://github.com/ednamartins/Qualidade-de-software
bc8542f4d30b0bcebb3a65d4e4a6fa04dc33f411
3213d75c035246c23cf65f4fa73ca1c44f1dc484
ff4e411f32f8751bdef5ba4c2b7334059860dacf
refs/heads/main
2022-12-20T08:53:15.469606
2020-10-12T19:06:09
2020-10-12T19:06:09
303,486,253
0
0
null
null
null
null
null
[ { "alpha_fraction": 0.5552193522453308, "alphanum_fraction": 0.5892586708068848, "avg_line_length": 26.47916603088379, "blob_id": "1b2d8ae4341c18a71d3120daebe6d77cd27c9437", "content_id": "1b8d3881c7b150da6f93d87eb2fed38237eb128c", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 1331, "license_type": "no_license", "max_line_length": 59, "num_lines": 48, "path": "/calculadora.py", "repo_name": "ednamartins/Qualidade-de-software", "src_encoding": "UTF-8", "text": "# 1 trabalho de Qualidade de Sotware, Calculadora\n#Recebe uma expressão de números e operadores\n#precorre toda a espreção a busca de um operador\ndef buscar_op(exp):\n vetor=['+','-','*','/']\n for valor in exp:\n if valor in vetor:\n return exp.find(valor)\n# verifica a o nível de prioridade do operador\n#calcula de acordo com o operador\ndef calcular(n1,n2,op):\n cal=0\n if op == '+':\n cal=n1 + n2\n elif op == '-':\n cal=n1 - n2\n elif op == '*':\n cal = n1 * n2\n elif op == '/':\n cal=n1 / n2\n else :\n print(\"Operador Invalido\")\n return cal\n#resultado=calcular(n1,n2,op)\n#print(\"resultado\",resultado)\ndef operacao(exp):\n\n posi_op1 = buscar_op(exp)\n n1 = float(exp[0:posi_op1])\n exp2 = exp[posi_op1 + 1:len(exp)]\n posi_op2 = buscar_op(exp2)\n # não tem mais operadores\n resultado=0\n if not posi_op2:\n #Condição se for vazio\n n2 = float(exp[posi_op1 + 1:len(exp)])\n resultado = calcular(n1, n2, exp[posi_op1])\n print(\"Resultado\", resultado)\n else :\n n2 = float(exp[posi_op1 + 1:posi_op1 + 1+posi_op2])\n resultado = calcular(n1, n2, exp[posi_op1])\n epx2=str(resultado)+exp2[posi_op2:len(exp2)]\n operacao(epx2)\n\n\n#main\nexp= input(\"Digite uma expressão\")\noperacao(exp)\n\n\n\n" }, { "alpha_fraction": 0.7903226017951965, "alphanum_fraction": 0.7903226017951965, "avg_line_length": 29.5, "blob_id": "75df5efb27400b6637a4aba56cd7f59f3be00631", "content_id": "7d9a2624722b83f0c90caf9f11070d2871476315", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Markdown", "length_bytes": 62, "license_type": "no_license", "max_line_length": 36, "num_lines": 2, "path": "/README.md", "repo_name": "ednamartins/Qualidade-de-software", "src_encoding": "UTF-8", "text": "# Qualidade-de-software\n# trabalhos de qualidade de software \n" } ]
2
leearic/spider
https://github.com/leearic/spider
424617009cec02bd860986656bfd0ddf62c3309a
80580904c42226db8584859edcba7598cd41b471
c453ca76d1a2a617ff7ba65b0de46988e66f95a2
refs/heads/master
2023-04-05T10:40:50.525554
2023-03-30T08:56:48
2023-03-30T08:56:48
86,020,347
0
1
null
null
null
null
null
[ { "alpha_fraction": 0.5373232960700989, "alphanum_fraction": 0.5496907830238342, "avg_line_length": 41.32710266113281, "blob_id": "aeebd4b402d23e3f31dd593376eafe4aba98c0be", "content_id": "194c900f2727291a671027762291f853c20d15bf", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 4616, "license_type": "no_license", "max_line_length": 121, "num_lines": 107, "path": "/qiubai_CrawlSpider/spiders/qiubai/spiders/qb2.py", "repo_name": "leearic/spider", "src_encoding": "UTF-8", "text": "# -*- coding: utf-8 -*-\nimport scrapy\nfrom scrapy.linkextractors import LinkExtractor\nfrom scrapy.spiders import CrawlSpider, Rule\n\nfrom qiubai.items import User, QiuShiItem\n\nclass Qb2Spider(CrawlSpider):\n name = 'cq'\n allowed_domains = ['qiushibaike.com']\n start_urls = ['http://www.qiushibaike.com/']\n\n rules = (\n Rule(LinkExtractor(allow=r'article/'), callback='get_per_xiaohua', follow=True),\n Rule(LinkExtractor(allow=r'users/'), callback='Get_User_Info', follow=True),\n\n )\n\n def get_per_xiaohua(self, response):\n # 获取用户信息\n user = response.xpath('//div[@class=\"article block untagged noline mb15\"]/div/a[2]/h2/text()').extract()\n # 获取笑话信息\n content = response.xpath('//div[@class=\"content\"]/text()').extract()\n # 获取图片\n thumb = response.xpath('//div[@class=\"thumb\"]/img/@src').extract()\n # 获取Video\n video = response.xpath('//div[@class=\"video_holder\"]/video/source/@src').extract()\n video_image = response.xpath('//div[@class=\"video_holder\"]/video/@poster').extract()\n\n if len(thumb) != 0:\n type = ['1']\n if len(video_image) != 0:\n type = ['2']\n\n if len(thumb) == 0 and len(video_image) == 0:\n type = ['0']\n thumb = ['None']\n video = ['None']\n video_image = ['None']\n if len(video_image) == 0:\n video = ['None']\n video_image = ['None']\n\n smiling = response.xpath('//div[@class=\"stats\"]/span[@class=\"stats-vote\"]/i/text()').extract()\n comment_count = response.xpath('//div[@class=\"stats\"]/span[@class=\"stats-comments\"]/i/text()').extract()\n\n # 采集评论信息, 通过评论信息,采集用户信息\n cuser = response.xpath('//div[@class=\"comments-wrap\"]/div/div/div/div[@class=\"replay\"]/a/text()').extract()\n #urls = response.xpath('//div[@class=\"comments-wrap\"]/div/div/div/div[@class=\"replay\"]/a/@href').extract()\n ccomment = response.xpath('//div[@class=\"comments-wrap\"]/div/div/div/div[@class=\"replay\"]/span/text()').extract()\n\n try:\n qiushiItems = QiuShiItem()\n qiushiItems[\"page_url\"] = response.url\n qiushiItems[\"user\"] = user[0]\n qiushiItems[\"content\"] = content[0]\n qiushiItems[\"type\"] = type[0]\n if type[0] == '1':\n qiushiItems[\"url\"] = thumb[0]\n qiushiItems[\"url0\"] = 'None'\n if type[0] == '2':\n qiushiItems[\"url\"] = video_image[0]\n qiushiItems[\"url0\"] = video\n if type[0] == '0':\n qiushiItems[\"url\"] = 'None'\n qiushiItems[\"url0\"] = 'None'\n qiushiItems[\"smiling\"] = smiling[0]\n qiushiItems[\"comment_count\"] = comment_count[0]\n\n qiushiItems[\"cuser\"] = cuser\n qiushiItems[\"comment\"] = ccomment\n\n yield qiushiItems\n except Exception, e:\n pass\n\n\n def Get_User_Info(self, response):\n user = response.xpath('//div[@class=\"user-header-cover\"]/h2/text()').extract()\n fans = response.xpath('//div[@class=\"user-statis user-block\"][1]/ul/li[1]/text()').extract()\n follow = response.xpath('//div[@class=\"user-statis user-block\"][1]/ul/li[2]/text()').extract()\n comment = response.xpath('//div[@class=\"user-statis user-block\"][1]/ul/li[3]/text()').extract()\n marriage = response.xpath('//div[@class=\"user-statis user-block\"][2]/ul/li[1]/text()').extract()\n constellation = response.xpath('//div[@class=\"user-statis user-block\"][2]/ul/li[2]/text()').extract()\n occupation = response.xpath('//div[@class=\"user-statis user-block\"][2]/ul/li[3]/text()').extract()\n age = response.xpath('//div[@class=\"user-statis user-block\"][2]/ul/li[5]/text()').extract()\n\n if len(marriage) == 0:\n marriage = ['Security']\n if len(occupation) == 0:\n occupation = ['Security']\n if len(constellation) == 0:\n constellation = ['Security']\n try:\n useritem = User()\n useritem[\"name\"] = user[0]\n useritem[\"fans\"] = fans[0]\n useritem[\"follow\"] = follow[0]\n useritem[\"comment\"] = comment[0]\n useritem[\"marriage\"] = marriage[0]\n useritem[\"constellation\"] = constellation[0]\n useritem[\"occupation\"] = occupation[0]\n useritem[\"age\"] = age[0]\n # print \"-----已经将数据放到Item啦------\"\n yield useritem\n except Exception, e:\n pass" }, { "alpha_fraction": 0.6247838735580444, "alphanum_fraction": 0.6363112330436707, "avg_line_length": 32.36538314819336, "blob_id": "3a54d1a3adc41841144682cb553290818074a33b", "content_id": "9105c533b3891eeac772b5567a908e4c612d9bae", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 1871, "license_type": "no_license", "max_line_length": 88, "num_lines": 52, "path": "/szsj/sp_sz/szsti/spiders/hx-fhrz.py", "repo_name": "leearic/spider", "src_encoding": "UTF-8", "text": "# -*- coding: utf-8 -*-\nimport scrapy\nfrom selenium import webdriver\n# from selenium.webdriver.common.keys import Keys\n# from selenium.webdriver import DesiredCapabilities\nfrom scrapy.selector import Selector\nimport time\nfrom szsti.hexunutils.Save_2_DB import Hexun\nfrom szsti.hexunutils.PageAnalysis import hexun_Analysis\n\n\n\nclass SzSpider(scrapy.Spider):\n name = \"hx_fhrz\"\n # allowed_domains = [\"hexun--delete.com\"]\n # start_urls = ['http://stockdata.stock.hexun.com/gszl/fhrz-000001.shtml']\n\n allowed_domains = [\"hexun.com\"]\n start_urls = ['http://www.hexun.com/']\n\n\n # 初始化,这里要调用无头浏览器\n def __init__(self):\n self.driver = webdriver.Chrome()\n # id,如果中间爬取失败,可以设置继续爬取值重新爬取数据,默认从0开始\n id = 0\n self.units = Hexun.Search_ID(id)\n self.Base_Request_Url = 'http://stockdata.stock.hexun.com/gszl/fhrz-'\n\n def parse(self, response):\n\n # 从数据中拿取数据,组装 URL\n for unit in self.units:\n self.Request_url = self.Base_Request_Url + str(unit.Code) + '.shtml'\n\n print u\"分红融资 search id: \" + unit.Code\n print \"*\" * 30\n\n time.sleep(3)\n # 调用模拟器取访问组装好的URL\n self.driver.get(self.Request_url)\n # self.driver.get('http://stockdata.stock.hexun.com/gszl/fhrz-000402.shtml')\n time.sleep(3)\n\n self.driver.current_window_handle\n aa = self.driver.page_source\n response = Selector(text=aa)\n\n hexun_Analysis.Fenhong_Info(response, str(unit.Code))\n hexun_Analysis.Fenhong_Zhuanzeng_Info(response, str(unit.Code))\n hexun_Analysis.Fenhong_Peigu_Info(response, str(unit.Code))\n hexun_Analysis.Fenhong_Huigou_Info(response, str(unit.Code))\n" }, { "alpha_fraction": 0.6907514333724976, "alphanum_fraction": 0.7225433588027954, "avg_line_length": 37.55555725097656, "blob_id": "0ac56b3c56e8e5bc245fba253754f3aaee2b3994", "content_id": "07256d6d3baedb1d9956db230abf43def6e70a66", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 366, "license_type": "no_license", "max_line_length": 77, "num_lines": 9, "path": "/irole/cnirole/models.py", "repo_name": "leearic/spider", "src_encoding": "UTF-8", "text": "# -*- coding: utf-8 -*-\nfrom django.db import models\n# Create your models here.\n\n\nclass cosplay8dotcom(models.Model):\n base_html_url = models.CharField(max_length=255, verbose_name=u\"网页地址\")\n base_image_url = models.CharField(max_length=255, verbose_name=u\"图片地址\")\n base_image_content = models.CharField(max_length=255, verbose_name=u\"标题\")" }, { "alpha_fraction": 0.636629581451416, "alphanum_fraction": 0.6424809694290161, "avg_line_length": 33.20000076293945, "blob_id": "460f991d9b57265caf872348ded383a24f8721e5", "content_id": "2f356e5b345ec45e103b9a27df9a349f724f3894", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 1729, "license_type": "no_license", "max_line_length": 111, "num_lines": 50, "path": "/baixihecom/bossproxy/utils/Boss_Proxy.py", "repo_name": "leearic/spider", "src_encoding": "UTF-8", "text": "#!/usr/bin/env python\n#coding:utf-8\n\"\"\"\n Author: 'Aric'\n Purpose: u'Bossproxy的方法和函数'\n Created: '2015/7/13'\n\"\"\"\nfrom django.http import HttpResponse\nimport hashlib\nimport baixihecom.settings as sts\nimport httplib,urllib\nimport xml.etree.ElementTree as ET\n\nclass DO_proxy(object):\n @staticmethod\n def CheckSignature(request):\n token = sts.TOKEN\n signature = request.GET.get(\"signature\", '')\n timestamp = request.GET.get(\"timestamp\", '')\n nonce = request.GET.get(\"nonce\", '')\n echoStr = request.GET.get(\"echostr\", '')\n tmp_str = hashlib.sha1(''.join(sorted([token, timestamp, nonce]))).hexdigest()\n if tmp_str == signature:\n return HttpResponse(echoStr)\n else:\n return HttpResponse((u\"禁止访问\".encode('gbk')))\n\n @staticmethod\n def Msg_Gateway(request):\n url = sts.MONSTER_URL\n host = sts.MONSTER_HOST\n fromUserName, toUserName, msgType, content = _Get_xml_data(request)\n content.encode('gbk')\n url = url+\"?FromUser=\"+ fromUserName +\"&toUserName=\"+toUserName+\"&msgType=\"+msgType+\"&content=\"+content\n conn = httplib.HTTPConnection(host)\n conn.request(method='GET', url=url)\n\n res = conn.getresponse()\n reply = res.read()\n return HttpResponse(reply)\ndef _Get_xml_data(req):\n xml_str = req.body\n myxml = ET.fromstring(xml_str)\n content = myxml.find('Content').text\n content = content.encode('utf-8')\n msgType = myxml.find('MsgType').text\n fromUserName = myxml.find('FromUserName').text\n toUserName = myxml.find('ToUserName').text\n createTime = myxml.find('CreateTime').text\n return fromUserName, toUserName, msgType, content" }, { "alpha_fraction": 0.5325714349746704, "alphanum_fraction": 0.5748571157455444, "avg_line_length": 16.612245559692383, "blob_id": "5065c081cfc36cb412ad4898830276db812d164d", "content_id": "890aa08cb9096c6971133cb146d28b09db907dc6", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 875, "license_type": "no_license", "max_line_length": 81, "num_lines": 49, "path": "/szsj/sp_sz/t.py", "repo_name": "leearic/spider", "src_encoding": "UTF-8", "text": "# -*- coding: utf-8 -*-\n#! /usr/bin/env python\n\n\n\n\nfrom selenium import webdriver\nfrom scrapy.selector import Selector\n\n\nimport time\n\n\n\n\n\ndriver = webdriver.Firefox()\ndriver.get(\"http://www.qichacha.com/firm_d38086e115661b46f68c31070231d0ab.shtml\")\n\naa = driver.page_source\n\nprint aa\n\ndriver.close()\n\n\n\n# response = Selector(text=aa)\n# #\n#\n#\n# tbody = response.xpath('//*[@id=\"Changelist\"]/table/tbody/tr')\n#\n#\n# for i in tbody:\n# # print i.extract()\n# try:\n# print i.xpath('td[1]/text()').extract()[0]\n# print i.xpath('td[2]/text()').extract()[0].strip()\n# print i.xpath('td[3]/text()').extract()[0].strip()\n# print i.xpath('td[4]/div/text()').extract()[0].strip()\n# print i.xpath('td[5]/div/text()').extract()[0].strip()\n# except Exception:\n# print \"error\"\n# pass\n#\n# print \"*\" * 30\n#\n#\n\n\n\n\n\n\n\n\n\n\n\n\n" }, { "alpha_fraction": 0.5714285969734192, "alphanum_fraction": 0.6502463221549988, "avg_line_length": 18.899999618530273, "blob_id": "c7c38e1d070b515d05489a2440f921478b61a271", "content_id": "6c60034bcababd4fffe5463e445f14a443e9e4b6", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Text", "length_bytes": 377, "license_type": "no_license", "max_line_length": 65, "num_lines": 10, "path": "/cos17173/ReadMe.txt", "repo_name": "leearic/spider", "src_encoding": "UTF-8", "text": "系统:  Xubuntu 16.04 64bit\npython: 2.7\nscrapy: 1.3.2\n\n\n说明:   项目比较简单, 仅仅是爬取17173上的数据,然后把图片保存到本地.\n\n\n运行: 到项目目录  执行 scrapy list 会罗列出项目. 然后执行 scrapy crawl <项目名> 即可.\n 图片会保存到当前目录的 images 目录下.\n\n\n\n\n" }, { "alpha_fraction": 0.7091661095619202, "alphanum_fraction": 0.738111674785614, "avg_line_length": 26.923076629638672, "blob_id": "0531215ba16d909f467cdb9057d076379eb9295e", "content_id": "4139a2d2e49d545d3087c081051281b3c9deff08", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 1451, "license_type": "no_license", "max_line_length": 80, "num_lines": 52, "path": "/irole/irolespider/irolespider/settings.py", "repo_name": "leearic/spider", "src_encoding": "UTF-8", "text": "# -*- coding: utf-8 -*-\n\n# Scrapy settings for cos8 project\n#\n# For simplicity, this file contains only the most important settings by\n# default. All the other settings are documented here:\n#\n# http://doc.scrapy.org/en/latest/topics/settings.html\n#\n\nBOT_NAME = 'irolespider'\n\nSPIDER_MODULES = ['irolespider.spiders']\nNEWSPIDER_MODULE = 'irolespider.spiders'\n\n# Crawl responsibly by identifying yourself (and your website) on the user-agent\n#USER_AGENT = 'cos8 (+http://www.yourdomain.com)'\nLOG_LEVEL = 'ERROR'\n\nbase_url = \"http://www.cosplay8.com/\"\npath = \"/home/aric/PycharmProjects/irole\"\n\n\nDOWNLOADER_MIDDLEWARES = {\n # 'scrapy.downloadermiddlewares.useragent.UserAgentMiddleware' : None,\n # 'irolespider.utils.user_agent.RotateUserAgentMiddleware' :400,\n\n 'irolespider.middleware.splash.SplashMiddleware': 725,\n\n # 'scrapy.downloadermiddlewares.httpproxy.HttpProxyMiddleware': 110,\n # 'irolespider.utils.proxy_midware.ProxyMiddleware': 100,\n}\n\nDEFAULT_ITEM_CLASS = 'irolespider.items.Cos8Item'\nIMAGES_MIN_HEIGHT = 50\nIMAGES_MIN_WIDTH = 50\nIMAGES_STORE = 'images'\nDOWNLOAD_TIMEOUT = 1200\nITEM_PIPELINES ={'irolespider.pipelines.Cos8Pipeline': 300, }\n\n\n\nSPLASH_ENABLED = True\nSPLASH_ENDPOINT = 'http://127.0.0.1:5000/render.html'\nSPLASH_WAIT = 2\nSPLASH_IMAGES = False\n#SPLASH_URL_PASS = (r'example\\.com',)\n#SPLASH_URL_BLOCK = (r'badexample\\.com',)\n\nCONCURRENT_REQUESTS_PER_DOMAIN = 5\nCONCURRENT_REQUESTS = 5\nRETRY_ENABLED = False" }, { "alpha_fraction": 0.6821191906929016, "alphanum_fraction": 0.695364236831665, "avg_line_length": 17.875, "blob_id": "e90f562470b9cceebea260f231420616a91afe3d", "content_id": "33d67cf8ff2321998d9b7a01fa2a9796bee2e852", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 151, "license_type": "no_license", "max_line_length": 39, "num_lines": 8, "path": "/szsj/dj_sz/hexun/apps.py", "repo_name": "leearic/spider", "src_encoding": "UTF-8", "text": "# -*- coding: utf-8 -*-\nfrom __future__ import unicode_literals\n\nfrom django.apps import AppConfig\n\n\nclass Hexun2Config(AppConfig):\n name = 'hexun'\n" }, { "alpha_fraction": 0.593279242515564, "alphanum_fraction": 0.6071842312812805, "avg_line_length": 24.382352828979492, "blob_id": "e6814a1c329608af33f05d229339f1d6547c3a69", "content_id": "959af59a784cd81c62a60a31510c8291504aabbb", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 863, "license_type": "no_license", "max_line_length": 76, "num_lines": 34, "path": "/irole/irolespider/irolespider/utils/Spider_Django.py", "repo_name": "leearic/spider", "src_encoding": "UTF-8", "text": "__author__ = 'aric'\n\nimport sys,os\n\nfrom irolespider.settings import path\n\nif path not in sys.path:\n sys.path.append(path)\n\nimport django\nos.environ.setdefault(\"DJANGO_SETTINGS_MODULE\", \"irole.settings\")\ndjango.setup()\n\n\n\nfrom cnirole.models import cosplay8dotcom\n\nclass django_sql(object):\n def save_Item(self, item):\n cosplay8 = cosplay8dotcom()\n cosplay8.base_html_url = item[\"html_base_url\"]\n cosplay8.base_image_url = item[\"img_base_url\"][0]\n cosplay8.base_image_content = item[\"img_content\"][0].encode(\"utf-8\")\n cosplay8.save()\n\n def is_crawled(self, html_url):\n try:\n aa = cosplay8dotcom.objects.filter(base_html_url=html_url)\n if len(aa) != 0:\n return False\n else:\n return True;\n except Exception, e:\n return True\n" }, { "alpha_fraction": 0.5121486186981201, "alphanum_fraction": 0.6083849668502808, "avg_line_length": 52.82051467895508, "blob_id": "e5c835be1394f6833ff241cff90aa2dd1f0ac7a5", "content_id": "dfab8556c368e10b3db356a968a758d17ac25f09", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 2099, "license_type": "no_license", "max_line_length": 156, "num_lines": 39, "path": "/hr/frontend/hrweb/lagou/migrations/0001_initial.py", "repo_name": "leearic/spider", "src_encoding": "UTF-8", "text": "# -*- coding: utf-8 -*-\n# Generated by Django 1.11 on 2017-04-13 07:39\nfrom __future__ import unicode_literals\n\nfrom django.db import migrations, models\n\n\nclass Migration(migrations.Migration):\n\n initial = True\n\n dependencies = [\n ]\n\n operations = [\n migrations.CreateModel(\n name='Companys_Info',\n fields=[\n ('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),\n ('searching_company', models.CharField(default='-', max_length=255, verbose_name='\\u9700\\u8981\\u67e5\\u8be2\\u7684\\u516c\\u53f8\\u540d\\u5b57')),\n ],\n ),\n migrations.CreateModel(\n name='Position_Info',\n fields=[\n ('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),\n ('searching_company', models.CharField(default='-', max_length=255, verbose_name='\\u9700\\u8981\\u67e5\\u8be2\\u7684\\u516c\\u53f8\\u540d\\u5b57')),\n ('searched_company', models.CharField(default='-', max_length=255, verbose_name='\\u67e5\\u8be2\\u5230\\u7684\\u516c\\u53f8\\u540d\\u5b57')),\n ('bumen', models.CharField(default='-', max_length=255, verbose_name='\\u90e8\\u95e8')),\n ('zhiwei', models.CharField(default='-', max_length=255, verbose_name='\\u804c\\u4f4d')),\n ('yaoqiu', models.CharField(default='-', max_length=255, verbose_name='\\u8981\\u6c42')),\n ('fabushijian', models.CharField(default='-', max_length=255, verbose_name='\\u53d1\\u5e03\\u65f6\\u95f4')),\n ('zhiweiyouhuo', models.CharField(default='-', max_length=255, verbose_name='\\u804c\\u4f4d\\u8bf1\\u60d1')),\n ('zhiweimiaoshu', models.CharField(default='-', max_length=255, verbose_name='\\u804c\\u4f4d\\u63cf\\u8ff0')),\n ('gongzuodidian', models.CharField(default='-', max_length=255, verbose_name='\\u5de5\\u4f5c\\u5730\\u70b9')),\n ('fabuzhe', models.CharField(default='-', max_length=255, verbose_name='\\u53d1\\u5e03\\u8005')),\n ],\n ),\n ]\n" }, { "alpha_fraction": 0.6416040062904358, "alphanum_fraction": 0.6691729426383972, "avg_line_length": 47.39393997192383, "blob_id": "276ad70653e82a461c25a65c2d8c04dbb0e66580", "content_id": "8d22a26c3c5b8d0b48132fabecc9d861d9318e61", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 1728, "license_type": "no_license", "max_line_length": 98, "num_lines": 33, "path": "/qiubai_CrawlSpider/frontend/qiubaifrontend/spiders/models.py", "repo_name": "leearic/spider", "src_encoding": "UTF-8", "text": "# -*- coding:utf-8 -*-\nfrom __future__ import unicode_literals\n\nfrom django.db import models\n\n# Create your models here.\nclass QiuShi(models.Model):\n page_url = models.CharField(max_length=255, verbose_name=u\"网站地址\")\n user = models.CharField(max_length=255, verbose_name=u\"用户昵称\")\n contents = models.CharField(max_length=255, verbose_name=u\"文字内容\")\n type = models.CharField(max_length=255, verbose_name=u\"类型\")\n url = models.CharField(blank=True, max_length=255, verbose_name=u\"图片内容\")\n url0 = models.CharField(blank=True, default=None, max_length=255, verbose_name=u\"视频内容\")\n smiling = models.IntegerField(verbose_name=u\"笑脸数量\")\n comment_count = models.IntegerField(verbose_name=u\"评论数量\")\n\n\nclass User(models.Model):\n name = models.CharField(max_length=255, verbose_name=u\"用户昵称\")\n fans = models.IntegerField(verbose_name=u\"粉丝数量\")\n follow = models.IntegerField(verbose_name=u\"关注数量\")\n comment = models.IntegerField(verbose_name=u\"评论数量\")\n\n marriage = models.CharField(max_length=255, verbose_name=u\"婚姻状态\")\n occupation = models.CharField(max_length=255, verbose_name=u\"职业\")\n constellation = models.CharField(max_length=255, verbose_name=u\"星座\")\n age = models.CharField(max_length=255, verbose_name=u\"糗龄\")\n\n\nclass Comment(models.Model):\n user = models.CharField(max_length=255, verbose_name=u\"用户昵称\")\n qiushiURL = models.CharField(max_length=255, verbose_name=u\"糗事URL\")\n comment = models.CharField(max_length=255, verbose_name=u\"评论内容\")" }, { "alpha_fraction": 0.5599173307418823, "alphanum_fraction": 0.5743801593780518, "avg_line_length": 47.43333435058594, "blob_id": "7f02eb320b672fd3d875aa3c86b1d291d3c9c84f", "content_id": "ad85c1a9543498e55e766875f8354fdce5bdf7d7", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 1452, "license_type": "no_license", "max_line_length": 118, "num_lines": 30, "path": "/hr/spider/hr/utils/lagou/PageAnalysis.py", "repo_name": "leearic/spider", "src_encoding": "UTF-8", "text": "from hr.utils.lagou.Save_2_DB import lagou_DB\n\nclass lagou_Analysis(object):\n\n @classmethod\n def Analysis_Position_Info(self, response, searching_company):\n\n self.item = {}\n try:\n self.item[\"searching_company\"] = [searching_company]\n\n self.item[\"searched_company\"] = response.xpath('//*[@id=\"job_company\"]/dt/a/div/h2/text()').extract()\n self.item[\"bumen\"] = response.xpath('/html/body/div[3]/div/div[1]/div/div[1]/text()').extract()\n self.item[\"zhiwei\"] =response.xpath(\"/html/body/div[3]/div/div[1]/div/span/text()\").extract()\n\n self.item[\"yaoqiu\"] = response.xpath('/html/body/div[3]/div/div[1]/dd/p[1]').extract()\n self.item[\"fabushijian\"] = response.xpath('/html/body/div[3]/div/div[1]/dd/p[2]/text()').extract()\n self.item[\"zhiweiyouhuo\"] = response.xpath('//*[@id=\"job_detail\"]/dd[1]/p/text()').extract()\n self.item[\"zhiweimiaoshu\"] = response.xpath('//*[@id=\"job_detail\"]/dd[2]/div').extract()\n self.item[\"gongzuodidian\"] = response.xpath('//*[@id=\"job_detail\"]/dd[3]/div[1]').extract()\n\n self.item[\"fabuzhe\"] = response.xpath('//*[@id=\"job_detail\"]/dd[4]/div/div[1]/a/span[1]/text()').extract()\n\n for i in self.item:\n self.item[i] = self.item[i][0].strip()\n\n lagou_DB.Save_Position_Info(self.item)\n print \"Save Position Info ok....\"\n except:\n pass" }, { "alpha_fraction": 0.5388026833534241, "alphanum_fraction": 0.5461936593055725, "avg_line_length": 26.59183692932129, "blob_id": "03ddebb1e4618106172ceb07fa656f3dbec9c9cc", "content_id": "07c94c9f0b26da004351fe0c5a1f0d08133aad6e", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 1357, "license_type": "no_license", "max_line_length": 94, "num_lines": 49, "path": "/mikufan/spider/cosplay/cosplay/utils/DjangoORM.py", "repo_name": "leearic/spider", "src_encoding": "UTF-8", "text": "# -*- coding: utf-8 -*-\n__author__ = 'aric'\nimport sys, os\nimport hashlib\n\nfrom cosplay.settings import DJpath\n\n\nif DJpath not in sys.path:\n sys.path.append(DJpath)\n\nimport django\nos.environ.setdefault(\"DJANGO_SETTINGS_MODULE\", \"mikufan.settings\")\ndjango.setup()\n\n\nfrom webapps.models import Coser, Images, Category\nclass CoserORM(object):\n def Save_Coser_Info(self, item):\n DJcoser = Coser()\n cate = Category()\n try:\n DJcoser.title = item[\"title\"]\n DJcoser.content = item[\"content\"]\n DJcoser.come_from = item[\"url\"]\n DJcoser.topimage = item[\"image\"][0]\n\n # category = models.ForeignKey(DJcoser_Category, related_name=u\"DJcoser_Category\")\n DJcoser.save()\n\n dir_guid = hashlib.sha1(item[\"url\"].encode('utf8')).hexdigest()\n\n for image in item[\"image\"]:\n image_guid = hashlib.sha1(image.encode('utf8')).hexdigest()\n i = Images()\n i.real_url = image\n i.relate_url = '/images/%s/%s.jpg' % (dir_guid, image_guid)\n i.coser = DJcoser\n i.save()\n\n cate.category = '游戏'\n cate.coser = DJcoser\n cate.save()\n\n\n print(\"ok\" * 10)\n except Exception as e:\n print(\"=error=l\"*15)\n print(e)\n\n" }, { "alpha_fraction": 0.676616907119751, "alphanum_fraction": 0.7002487778663635, "avg_line_length": 41.3684196472168, "blob_id": "433cebf8e139eafc7c78b17aa8f6c18904295db8", "content_id": "bc277db453f8f44a2624657b32c4ae7b6bcf5440", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 878, "license_type": "no_license", "max_line_length": 84, "num_lines": 19, "path": "/myqiubai/frontend/qiubai/models.py", "repo_name": "leearic/spider", "src_encoding": "UTF-8", "text": "# -*- coding: utf-8 -*-\nfrom django.db import models\n\n# Create your models here.\n\n\nclass QiuShi(models.Model):\n user_image = models.CharField(max_length=255, verbose_name=u\"用户头像\")\n user_name = models.CharField(max_length=255, verbose_name=u\"用户昵称\")\n content = models.CharField(max_length=255, verbose_name=u\"文字内容\")\n thumb = models.CharField(max_length=255, verbose_name=u\"图片内容\")\n video_image = models.CharField(blank=True, max_length=255, verbose_name=u\"视频图片\")\n video = models.CharField(blank=True, max_length=255, verbose_name=u\"视频内容\")\n laugh = models.IntegerField(verbose_name=u\"笑脸数量\")\n coments = models.IntegerField(verbose_name=u\"评论数量\")\n played = models.IntegerField(verbose_name=u\"视频播放量\")\n\n# def __unicode__(self):\n# return u'%s %s '(self.user_name, self.content)" }, { "alpha_fraction": 0.6009988784790039, "alphanum_fraction": 0.6048834919929504, "avg_line_length": 27.619047164916992, "blob_id": "6e16ab11f9c964faa151faea4485b8dc46cabe03", "content_id": "b9e584ca956ba3d85cd450f4ca3f1c1d34465da5", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 1826, "license_type": "no_license", "max_line_length": 67, "num_lines": 63, "path": "/hr/spider/hr/utils/lagou/Save_2_DB.py", "repo_name": "leearic/spider", "src_encoding": "UTF-8", "text": "# -*- coding: utf-8 -*-\nimport sys, os\nfrom hr.settings import djpath\nif djpath not in sys.path:\n sys.path.append(djpath)\nimport django\nos.environ.setdefault(\"DJANGO_SETTINGS_MODULE\", \"hrweb.settings\")\ndjango.setup()\n\nfrom lagou.models import Companys_Info, Position_Info, Position_URL\n\n\nclass lagou_DB(object):\n\n @classmethod\n def Search_ID(self, id):\n if id == 0:\n aa = Companys_Info.objects.all()\n # print \"ID == 0\"\n else:\n aa = Companys_Info.objects.filter(id__gte=id)\n print \"ID != 0\"\n return aa\n\n @classmethod\n def Search_Position_URL(self, id):\n if id == 0:\n aa = Position_URL.objects.all()\n # print \"ID == 0\"\n else:\n aa = Position_URL.objects.filter(id__gte=id)\n print \"ID != 0\"\n return aa\n\n\n\n\n @classmethod\n def Save_Position_Info(self, item):\n position_Info = Position_Info()\n # lagou 网站招聘信息\n position_Info.searching_company = item[\"searching_company\"]\n position_Info.searched_company = item[\"searched_company\"]\n position_Info.bumen = item[\"bumen\"]\n position_Info.zhiwei = item[\"zhiwei\"]\n\n position_Info.yaoqiu = item[\"yaoqiu\"]\n position_Info.fabushijian = item[\"fabushijian\"]\n position_Info.zhiweiyouhuo = item[\"zhiweiyouhuo\"]\n position_Info.zhiweimiaoshu = item[\"zhiweimiaoshu\"]\n position_Info.gongzuodidian = item[\"gongzuodidian\"]\n position_Info.fabuzhe = item[\"fabuzhe\"]\n\n position_Info.save()\n\n @classmethod\n def Save_Position_URL(self, item):\n position_URL = Position_URL()\n # lagou 网站招聘信息\n position_URL.searching_company = item[\"searching_company\"]\n position_URL.url = item[\"url\"]\n\n position_URL.save()" }, { "alpha_fraction": 0.6579612493515015, "alphanum_fraction": 0.6672282814979553, "avg_line_length": 25.399999618530273, "blob_id": "c4a3db6c811b80a08c56052aa1d32c60068cf66d", "content_id": "fe74602f777c7f38163e2e2fe23886118f9f9e7a", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 1187, "license_type": "no_license", "max_line_length": 105, "num_lines": 45, "path": "/mikufan/frontend/webapps/views.py", "repo_name": "leearic/spider", "src_encoding": "UTF-8", "text": "from django.shortcuts import render_to_response\n\n# Create your views here.\nfrom .models import Coser, Images\nfrom django.db.models import Q\nfrom django.core.paginator import Paginator, EmptyPage,PageNotAnInteger\n\n\n\n\n\ndef index(request, pages=1):\n cosers = Coser.objects.order_by('-id')\n paginator = Paginator(cosers, 8)\n page = pages\n try:\n coser = paginator.page(page)\n except PageNotAnInteger:\n coser = paginator.page(1)\n except EmptyPage:\n coser = paginator.page(paginator.num_pages)\n\n\n\n topcoser = Coser.objects.filter(istop = 1)\n\n return render_to_response('webapps/index.html', {\"coser\": coser, 'TopCoser': topcoser})\n\n\ndef detail(request, id):\n image = Images.objects.filter(coser_id=id)\n mycoser = Coser.objects.get(id=id)\n\n\n cosers = Coser.objects.order_by('-id')[:18]\n # paginator = Paginator(cosers, 18)\n # # page = 1\n # try:\n # coser = paginator.page(1)\n # except PageNotAnInteger:\n # coser = paginator.page(1)\n # except EmptyPage:\n # coser = paginator.page(paginator.num_pages)\n\n return render_to_response('webapps/detail.html', {\"coser\": cosers, \"images\": image, \"Coser\":mycoser})" }, { "alpha_fraction": 0.439119815826416, "alphanum_fraction": 0.4628361761569977, "avg_line_length": 42.9892463684082, "blob_id": "86888996263b4e94e644092175e83423d1583f79", "content_id": "28a8542d161b699281f40d3ba87be47e9cfb8f19", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 4130, "license_type": "no_license", "max_line_length": 164, "num_lines": 93, "path": "/szsj/sp_sz/szsti/spiders/sz.py", "repo_name": "leearic/spider", "src_encoding": "UTF-8", "text": "# -*- coding: utf-8 -*-\nimport scrapy\nfrom selenium import webdriver\nfrom selenium.webdriver import DesiredCapabilities\nfrom scrapy.selector import Selector\nimport time\nfrom szsti.utils.Save2DB import DjORM\n\nclass SzSpider(scrapy.Spider):\n name = \"unit\"\n allowed_domains = [\"szsti.gov.cn\"]\n start_urls = ['http://www.szsti.gov.cn/services/hightech/']\n\n # #\n # allowed_domains = [\"*\"]\n # start_urls = ['http://172.16.201.249']\n\n # 初始化,这里要调用无头浏览器\n def __init__(self):\n\n # capabilities = DesiredCapabilities.PHANTOMJS.copy()\n # capabilities['phantomjs.page.settings.userAgent'] = \"Mozilla/5.0 (X11; Ubuntu; Linux x86_64; rv:51.0) Gecko/20100101 Firefox/51.0\"\n # capabilities = DesiredCapabilities.FIREFOX.copy()\n # # capabilities['general.useragent.override'] = user_agent_string\n self.driver = webdriver.Remote(command_executor='http://172.16.201.250:8080/wd/hub',\n desired_capabilities = {\n 'browserName': \"Firefox\",\n 'takeScreenshot': False,\n \"version\": \"5.1\",\n \"platform\": \"Linux\",\n \"javascriptEnabled\": True,\n # \"marionette\": False,\n\n 'phantomjs.page.settings.userAgent': \"Mozilla/5.0 (X11; Ubuntu; Linux x86_64; rv:51.0) Gecko/20100101 Firefox/51.0\",\n },\n )\n\n def parse(self, response):\n print response.url\n # pass\n self.driver.get(response.url)\n time.sleep(2)\n item = {}\n index = 371\n self.driver.find_element_by_id(\"PagerControl_input\").clear()\n self.driver.find_element_by_id(\"PagerControl_input\").send_keys('371')\n self.driver.find_element_by_id(\"PagerControl_btn\").click()\n self.driver.current_window_handle\n\n try:\n while(self.driver.find_element_by_link_text(u\"下一页\")):\n\n self.driver.current_window_handle\n aa = self.driver.page_source\n response = Selector(text=aa)\n tbody = response.xpath('//table[@id=\"data_list_container\"]/tbody/tr')\n\n\n\n for body in tbody:\n try:\n number = body.xpath('td[1]/text()').extract()[0].strip()\n KeyID = body.xpath('td[2]/text()').extract()[0].strip()\n Unit_name = body.xpath('td[3]/text()').extract()[0].strip()\n address = body.xpath('td[4]/text()').extract()[0].strip()\n Subordinate_Domain = body.xpath('td[5]/text()').extract()[0].strip()\n type = body.xpath('td[6]/text()').extract()[0].strip()\n item[\"number\"] = number\n item[\"KeyID\"] = KeyID\n item[\"Unit_name\"] = Unit_name\n item[\"address\"] = address\n item[\"Subordinate_Domain\"] = Subordinate_Domain\n item[\"type\"] = type\n print item[\"Unit_name\"]\n # DjORM.save(item)\n print number + \" Saved ..... \"\n except Exception:\n print \"tbody Error. ignor .... \"\n pass\n # print \" error \"\n self.driver.find_element_by_link_text(u\"下一页\").click()\n print \"clicked next page .... \"\n index = index + 1\n print \"current page: \" + str(index)\n time.sleep(15)\n self.driver.close()\n except Exception as e:\n print \"do while error .... \" + str(e)\n pass\n\n\n time.sleep(2)\n self.driver.quit()" }, { "alpha_fraction": 0.6669838428497314, "alphanum_fraction": 0.6669838428497314, "avg_line_length": 20.46938705444336, "blob_id": "4784b76e09bf9d86876f0f02cce572bc3e47ceee", "content_id": "2ecae67735cd72049bf765c734a7745aaedbca32", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 1051, "license_type": "no_license", "max_line_length": 84, "num_lines": 49, "path": "/mikufan/frontend/webapps/admin.py", "repo_name": "leearic/spider", "src_encoding": "UTF-8", "text": "from django.contrib import admin\n\n# Register your models here.\n\nfrom .models import Coser, Category, Images, Ads\n\n\n\n\n\nclass ImagesInline(admin.TabularInline):\n model = Images\n fk_name = 'coser'\n\nclass CategoryInline(admin.TabularInline):\n model = Category\n fk_name = 'coser'\n\n# class CategoryAdmin(admin.ModelAdmin):\n# pass\n\nclass CoserAdmin(admin.ModelAdmin):\n list_display = ('title', 'istop')\n search_fields = ('title', 'istop')\n list_filter = ('istop',)\n\n readonly_fields = ('topimage_tag', 'addtime', 'come_from', 'topimage')\n exclude = ('topimage',)\n inlines = [ImagesInline, CategoryInline, ]\n\n\n\n#\n# class ImagesAdmin(admin.ModelAdmin):\n# list_display = ('id', 'real_url')\n# readonly_fields = ('relate_url_tag', 'real_url_tag', 'real_url', 'relate_url')\n# exclude = ('relate_url_tag', 'real_url_tag',)\n# # inlines = [CoserInline, ]\n\n\nadmin.site.register(Coser, CoserAdmin)\n# admin.site.register(Category, CategoryAdmin)\n\n\n\n\n\n# admin.site.register(Images, ImagesAdmin)\nadmin.site.register(Ads)" }, { "alpha_fraction": 0.5190712809562683, "alphanum_fraction": 0.583747923374176, "avg_line_length": 24.125, "blob_id": "50296f70930b529a1c09775420c3e1d1b0430985", "content_id": "5b4ed07496c926fe917954ba00b8a63013a06bdb", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 603, "license_type": "no_license", "max_line_length": 97, "num_lines": 24, "path": "/qiubai_CrawlSpider/frontend/qiubaifrontend/spiders/migrations/0004_auto_20160520_0912.py", "repo_name": "leearic/spider", "src_encoding": "UTF-8", "text": "# -*- coding: utf-8 -*-\n# Generated by Django 1.9.6 on 2016-05-20 09:12\nfrom __future__ import unicode_literals\n\nfrom django.db import migrations, models\n\n\nclass Migration(migrations.Migration):\n\n dependencies = [\n ('spiders', '0003_auto_20160520_0903'),\n ]\n\n operations = [\n migrations.RemoveField(\n model_name='comment',\n name='qiushi',\n ),\n migrations.AddField(\n model_name='comment',\n name='qiushiURL',\n field=models.CharField(default=None, max_length=255, verbose_name='\\u7cd7\\u4e8bURL'),\n ),\n ]\n" }, { "alpha_fraction": 0.5640000104904175, "alphanum_fraction": 0.6079999804496765, "avg_line_length": 15.533333778381348, "blob_id": "9ca0619e892344727bf713d6dc623fc985a19ad0", "content_id": "5198f494a48c73ea9616bdddea06c3df27d2e768", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Text", "length_bytes": 454, "license_type": "no_license", "max_line_length": 39, "num_lines": 15, "path": "/hr/ReadMe.txt", "repo_name": "leearic/spider", "src_encoding": "UTF-8", "text": "项目简介:\n 爬取 lagou 51job 等等招聘网站的招聘信息.\n\n项目原理:\n 1. 我们本来就有一些公司的信息.\n 2. 从数据库中取出公司名称,然后在招聘网站上去爬起这些公司的招聘信息\n 3. 将爬取到的招聘信息保存到数据库中\n\n\n服务器相关:\n 操作系统: Ubuntu 16.04 64bit\n 爬虫 : Scrapy\n 浏览器 : Chrome\n 数据库 : Mysql\n CRM : Django\n\n\n" }, { "alpha_fraction": 0.6977611780166626, "alphanum_fraction": 0.7014925479888916, "avg_line_length": 28.88888931274414, "blob_id": "b3b1a01f30c913347d6892660e6863778946247f", "content_id": "0c97bb11f90d52c60fb39512451f02cae24ad1ba", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 268, "license_type": "no_license", "max_line_length": 51, "num_lines": 9, "path": "/myqiubai/frontend/qiubai/admin.py", "repo_name": "leearic/spider", "src_encoding": "UTF-8", "text": "# -*- coding: utf-8 -*-\nfrom django.contrib import admin\nfrom qiubai.models import QiuShi\n\nclass AuthorAdmin(admin.ModelAdmin):\n list_display = ('user_name', 'laugh', 'played')\n search_fields = ('user_name', 'content')\n\nadmin.site.register(QiuShi, AuthorAdmin)" }, { "alpha_fraction": 0.5045118927955627, "alphanum_fraction": 0.6021329164505005, "avg_line_length": 44.14814758300781, "blob_id": "daaf683232f09e95b083c6fd2452817c92ac6d48", "content_id": "8aea9d0151090f2ad849d8fc5e61782482bfb275", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 1219, "license_type": "no_license", "max_line_length": 114, "num_lines": 27, "path": "/myqiubai/frontend/qiubai/migrations/0001_initial.py", "repo_name": "leearic/spider", "src_encoding": "UTF-8", "text": "# -*- coding: utf-8 -*-\nfrom __future__ import unicode_literals\n\nfrom django.db import models, migrations\n\n\nclass Migration(migrations.Migration):\n\n dependencies = [\n ]\n\n operations = [\n migrations.CreateModel(\n name='QiuShi',\n fields=[\n ('id', models.AutoField(verbose_name='ID', serialize=False, auto_created=True, primary_key=True)),\n ('user_image', models.CharField(max_length=255, verbose_name='\\u7528\\u6237\\u5934\\u50cf')),\n ('user_name', models.CharField(max_length=255, verbose_name='\\u7528\\u6237\\u6635\\u79f0')),\n ('content', models.CharField(max_length=255, verbose_name='\\u6587\\u5b57\\u5185\\u5bb9')),\n ('thumb', models.CharField(max_length=255, verbose_name='\\u56fe\\u7247\\u5185\\u5bb9')),\n ('video', models.CharField(max_length=255, verbose_name='\\u89c6\\u9891\\u5185\\u5bb9')),\n ('laugh', models.IntegerField(verbose_name='\\u7b11\\u8138\\u6570\\u91cf')),\n ('coments', models.IntegerField(verbose_name='\\u8bc4\\u8bba\\u6570\\u91cf')),\n ('played', models.IntegerField(verbose_name='\\u89c6\\u9891\\u64ad\\u653e\\u91cf')),\n ],\n ),\n ]\n" }, { "alpha_fraction": 0.6668097376823425, "alphanum_fraction": 0.6672389507293701, "avg_line_length": 40.58928680419922, "blob_id": "fa250188ad0941d7b829da7c9380ffd93f59f5b7", "content_id": "ac6af0bafc47b83e8606262e569d03ccf1869238", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 9475, "license_type": "no_license", "max_line_length": 190, "num_lines": 224, "path": "/szsj/sp_sz/szsti/hexunutils/Save_2_DB.py", "repo_name": "leearic/spider", "src_encoding": "UTF-8", "text": "# -*- coding: utf-8 -*-\nimport sys, os\nfrom szsti.settings import djpath\nif djpath not in sys.path:\n sys.path.append(djpath)\nimport django\nos.environ.setdefault(\"DJANGO_SETTINGS_MODULE\", \"dj_sz.settings\")\ndjango.setup()\n\nfrom hexun.models import Companys, base_Info, Gaoguan_Info, Dongshihui_Info, Jianshihui_Info, Fenhong_Info, Fenhong_Zhuanzeng_Info, Fenhong_Peigu_Info, Fenhong_Huigou_Info, Showrufenbu_Info\n\n\nclass Hexun(object):\n\n @classmethod\n def Save_Base_Info(self, item):\n base_info = base_Info()\n # 公司简介-基本信息\n base_info.company_code = item[\"company_code\"]\n base_info.name = item[\"name\"]\n base_info.daima = item[\"daima\"]\n base_info.quancheng = item[\"quancheng\"]\n base_info.englishname = item[\"Englishname\"]\n base_info.cengyongming = item[\"cengyongming\"]\n base_info.chengliriqi = item[\"chengliriqi\"]\n base_info.suoshuhangye = item[\"suoshuhangye\"]\n base_info.suoshugannian = item[\"suoshugannian\"]\n base_info.suoshudiyu = item[\"suoshudiyu\"]\n base_info.fadingdabiaoren = item[\"fadingdabiaoren\"]\n base_info.dulidongshi = item[\"dulidongshi\"]\n base_info.zixunfuwujigou = item[\"zixunfuwujigou\"]\n base_info.kuaijishiwusuo = item[\"kuaijishiwusuo\"]\n base_info.zhengquanshifudaibiao = item[\"zhengquanshifudaibiao\"]\n # 公司简介-证券信息\n base_info.faxingriqi = item[\"faxingriqi\"]\n base_info.shangshiriqi = item[\"shangshiriqi\"]\n base_info.shangshijiaoyisuo = item[\"shangshijiaoyisuo\"]\n base_info.zhengquanleixing = item[\"zhengquanleixing\"]\n base_info.liutongguben = item[\"liutongguben\"]\n base_info.zongguben = item[\"zongguben\"]\n base_info.zhuchengxiaoshang = item[\"zhuchengxiaoshang\"]\n base_info.faxingjia = item[\"faxingjia\"]\n base_info.shangshisourikaipanjia = item[\"shangshisourikaipanjia\"]\n base_info.shangshishourizhangdiefu = item[\"shangshishourizhangdiefu\"]\n base_info.shangshishourihuanshoulv = item[\"shangshishourihuanshoulv\"]\n base_info.tebiechulihetuishi = item[\"tebiechulihetuishi\"]\n base_info.faxingshiyinglv = item[\"faxingshiyinglv\"]\n base_info.zuixinshiyinglv = item[\"zuixinshiyinglv\"]\n\n # 公司简介-工商信息\n base_info.zhuceziben = item[\"zhuceziben\"]\n base_info.zhucedizhi = item[\"zhucedizhi\"]\n base_info.suodeisuilv = item[\"suodeisuilv\"]\n base_info.bangongdizhi = item[\"bangongdizhi\"]\n base_info.zhuyaochanpin = item[\"zhuyaochanpin\"]\n\n # 公司简介-联系方式\n base_info.lianxidianhua = item[\"lianxidianhua\"]\n base_info.gongsichuanzhen = item[\"gongsichuanzhen\"]\n base_info.dianziyouxiang = item[\"dianziyouxiang\"]\n base_info.gongsiwangzhi = item[\"gongsiwangzhi\"]\n base_info.lianxiren = item[\"lianxiren\"]\n base_info.youzhengbianma = item[\"youzhengbianma\"]\n\n base_info.jingyingfanwei = item[\"jingyingfanwei\"]\n\n base_info.gongsijianjie = item[\"gongsijianjie\"]\n\n base_info.save()\n\n @classmethod\n def Save_Gaoguan_Info(self, item):\n gaoguan_Info = Gaoguan_Info()\n\n gaoguan_Info.company_code = item[\"company_code\"]\n # 高管-高管成员\n gaoguan_Info.dongjiangao = item[\"dongjiangao\"]\n gaoguan_Info.gaoguanzhiwu = item[\"gaoguanzhiwu\"]\n gaoguan_Info.renzhiriqi = item[\"renzhiriqi\"]\n gaoguan_Info.lizhiriqi = item[\"lizhiriqi\"]\n gaoguan_Info.xueli = item[\"xueli\"]\n gaoguan_Info.nianxin = item[\"nianxin\"]\n gaoguan_Info.chiguzonge = item[\"chiguzonge\"]\n gaoguan_Info.chigushuliang = item[\"chigushuliang\"]\n\n\n gaoguan_Info.save()\n\n @classmethod\n def Save_Dongshihui_Info(self, item):\n dongshihui_Info = Dongshihui_Info()\n\n dongshihui_Info.company_code = item[\"company_code\"]\n # 高管-高管成员\n dongshihui_Info.dongjiangao = item[\"dongjiangao\"]\n dongshihui_Info.gaoguanzhiwu = item[\"gaoguanzhiwu\"]\n dongshihui_Info.renzhiriqi = item[\"renzhiriqi\"]\n dongshihui_Info.lizhiriqi = item[\"lizhiriqi\"]\n dongshihui_Info.xueli = item[\"xueli\"]\n dongshihui_Info.nianxin = item[\"nianxin\"]\n dongshihui_Info.chiguzonge = item[\"chiguzonge\"]\n dongshihui_Info.chigushuliang = item[\"chigushuliang\"]\n\n dongshihui_Info.save()\n\n @classmethod\n def Save_Jianshihui_Info(self, item):\n jianshihui_Info = Jianshihui_Info()\n\n jianshihui_Info.company_code = item[\"company_code\"]\n # 高管-监事\n jianshihui_Info.dongjiangao = item[\"dongjiangao\"]\n jianshihui_Info.gaoguanzhiwu = item[\"gaoguanzhiwu\"]\n jianshihui_Info.renzhiriqi = item[\"renzhiriqi\"]\n jianshihui_Info.lizhiriqi = item[\"lizhiriqi\"]\n jianshihui_Info.xueli = item[\"xueli\"]\n jianshihui_Info.nianxin = item[\"nianxin\"]\n jianshihui_Info.chiguzonge = item[\"chiguzonge\"]\n jianshihui_Info.chigushuliang = item[\"chigushuliang\"]\n\n jianshihui_Info.save()\n\n\n\n @classmethod\n def Save_Fenhong_Info(self, item):\n fenhong_Info = Fenhong_Info()\n\n fenhong_Info.company_code = item[\"company_code\"]\n # 分红、历年分红详表\n fenhong_Info.gonggaoshijian = item[\"gonggaoshijian\"]\n fenhong_Info.kuaijiniandu = item[\"kuaijiniandu\"]\n fenhong_Info.songgu = item[\"songgu\"]\n fenhong_Info.paixi = item[\"paixi\"]\n fenhong_Info.guquandengjiri = item[\"guquandengjiri\"]\n fenhong_Info.guquanchuxiri = item[\"guquanchuxiri\"]\n fenhong_Info.honggushangshiri = item[\"honggushangshiri\"]\n fenhong_Info.shifoshisi = item[\"shifoshisi\"]\n fenhong_Info.xiangqing = item[\"xiangqing\"]\n fenhong_Info.save()\n\n @classmethod\n def Save_Fenhong_Zhuanzeng_Info(self, item):\n fenhong_Zhuanzeng_Info = Fenhong_Zhuanzeng_Info()\n\n fenhong_Zhuanzeng_Info.company_code = item[\"company_code\"]\n # 分红、转增股本\n fenhong_Zhuanzeng_Info.gonggaoshijian = item[\"gonggaoshijian\"]\n fenhong_Zhuanzeng_Info.zhuanzeng = item[\"zhuanzeng\"]\n fenhong_Zhuanzeng_Info.chuquanchuxiri = item[\"chuquanchuxiri\"]\n fenhong_Zhuanzeng_Info.chuquandengjiri = item[\"chuquandengjiri\"]\n fenhong_Zhuanzeng_Info.zhuanzenggushangshiri = item[\"zhuanzenggushangshiri\"]\n fenhong_Zhuanzeng_Info.tongqisonggu = item[\"tongqisonggu\"]\n fenhong_Zhuanzeng_Info.fanganjianjie = item[\"fanganjianjie\"]\n fenhong_Zhuanzeng_Info.shifoshisi = item[\"shifoshisi\"]\n fenhong_Zhuanzeng_Info.xiangqing = item[\"xiangqing\"]\n\n fenhong_Zhuanzeng_Info.save()\n\n @classmethod\n def Save_Fenhong_Peigu_Info(self, item):\n fenhong_Peigu_Info = Fenhong_Peigu_Info()\n\n fenhong_Peigu_Info.company_code = item[\"company_code\"]\n # 分红、配 股\n fenhong_Peigu_Info.gonggaoshijian = item[\"gonggaoshijian\"]\n fenhong_Peigu_Info.peigufangan = item[\"peigufangan\"]\n fenhong_Peigu_Info.peigujia = item[\"peigujia\"]\n fenhong_Peigu_Info.jizhunguben = item[\"jizhunguben\"]\n fenhong_Peigu_Info.chuquanri = item[\"chuquanri\"]\n fenhong_Peigu_Info.guquandengjiri = item[\"guquandengjiri\"]\n fenhong_Peigu_Info.jiaokuanqishiri = item[\"jiaokuanqishiri\"]\n fenhong_Peigu_Info.jiaokuanzhongzhiri = item[\"jiaokuanzhongzhiri\"]\n fenhong_Peigu_Info.peigushangshiri = item[\"peigushangshiri\"]\n fenhong_Peigu_Info.mujizijin = item[\"mujizijin\"]\n fenhong_Peigu_Info.xiangqing = item[\"xiangqing\"]\n fenhong_Peigu_Info.save()\n\n @classmethod\n def Save_Fenhong_Huigou_Info(self, item):\n fenhong_Huigou_Info = Fenhong_Huigou_Info()\n\n fenhong_Huigou_Info.company_code = item[\"company_code\"]\n # 分红、回 购\n fenhong_Huigou_Info.gonggaoshijian = item[\"gonggaoshijian\"]\n fenhong_Huigou_Info.huigouzanzonggubenbili = item[\"huigouzanzonggubenbili\"]\n fenhong_Huigou_Info.huigougushu = item[\"huigougushu\"]\n fenhong_Huigou_Info.nihuigoujiage = item[\"nihuigoujiage\"]\n fenhong_Huigou_Info.gonggaoqianrigujia = item[\"gonggaoqianrigujia\"]\n fenhong_Huigou_Info.goumaizuigaojia = item[\"goumaizuigaojia\"]\n fenhong_Huigou_Info.goumaizuidijia = item[\"goumaizuidijia\"]\n fenhong_Huigou_Info.huigouzongjine = item[\"huigouzongjine\"]\n fenhong_Huigou_Info.shifoshisi = item[\"shifoshisi\"]\n fenhong_Huigou_Info.xiangqing = item[\"xiangqing\"]\n fenhong_Huigou_Info.save()\n\n @classmethod\n def Showrufenbu_Info(self, item):\n showrufenbu_Info = Showrufenbu_Info()\n showrufenbu_Info.company_code = item[\"company_code\"]\n # 收入分布\n showrufenbu_Info.leibiemingcheng = item[\"leibiemingcheng\"]\n showrufenbu_Info.yinyeshouru = item[\"yinyeshouru\"]\n showrufenbu_Info.zhanyinyeshourubili = item[\"zhanyinyeshourubili\"]\n showrufenbu_Info.yinyechengben = item[\"yinyechengben\"]\n showrufenbu_Info.zhanchengbenbili = item[\"zhanchengbenbili\"]\n showrufenbu_Info.yingyelirun = item[\"yingyelirun\"]\n showrufenbu_Info.zhanlirunbili = item[\"zhanlirunbili\"]\n showrufenbu_Info.maolilv =item[\"maolilv\"]\n\n showrufenbu_Info.save()\n\n\n\n @classmethod\n def Search_ID(self, id):\n\n if id == 0:\n aa = Companys.objects.all()\n # print \"ID == 0\"\n else:\n aa = Companys.objects.filter(id__gte=id)\n print \"ID != 0\"\n return aa\n\n\n\n" }, { "alpha_fraction": 0.6767676472663879, "alphanum_fraction": 0.6818181872367859, "avg_line_length": 32.16666793823242, "blob_id": "cb0eba69def102b7e37532edd966a122b5c92a1f", "content_id": "268402a25c950c6783ce61565b8a58587f097f59", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 198, "license_type": "no_license", "max_line_length": 90, "num_lines": 6, "path": "/irole/irolespider/irolespider/utils/Log.py", "repo_name": "leearic/spider", "src_encoding": "UTF-8", "text": "__author__ = 'aric'\nimport syslog\nclass Mylog():\n def log(self, loginfo):\n syslog.openlog(ident=\"irole\",logoption=syslog.LOG_PID, facility=syslog.LOG_LOCAL1)\n syslog.syslog(loginfo)" }, { "alpha_fraction": 0.6014271378517151, "alphanum_fraction": 0.6065239310264587, "avg_line_length": 36.03773498535156, "blob_id": "b6b13b5f9afec911cfda4eba2e09411c93626716", "content_id": "2249fc5ea7c0c8ee58d4787225643ee55b5c6236", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 2000, "license_type": "no_license", "max_line_length": 100, "num_lines": 53, "path": "/mikufan/spider/cosplay/cosplay/pipelines.py", "repo_name": "leearic/spider", "src_encoding": "UTF-8", "text": "# -*- coding: utf-8 -*-\n\n# Define your item pipelines here\n#\n# Don't forget to add your pipeline to the ITEM_PIPELINES setting\n# See: http://doc.scrapy.org/en/latest/topics/item-pipeline.html\n\nfrom scrapy.pipelines.images import ImagesPipeline\nfrom scrapy.http import Request\nfrom scrapy.exceptions import DropItem\nimport hashlib\n\nfrom cosplay.utils.DjangoORM import CoserORM\n\nclass AcgPipeline(ImagesPipeline):\n # 从item中获取图片的真实地址,并执行下载请求\n def get_media_requests(self, item, info):\n self.dirname = item[\"url\"]\n for image_url in item['image']:\n # print(\"*\" * 20)\n # print(image_url)\n # print(\"*\"*20)\n yield Request(image_url)\n\n\n def item_completed(self, results, item, info):\n image_paths = [[x['path'] for ok, x in results if ok]]\n if not image_paths:\n raise DropItem(\"Item contains no images\")\n save_Data = CoserORM()\n save_Data.Save_Coser_Info(item)\n return\n def file_path(self, request, response=None, info=None):\n def _warn():\n from scrapy.exceptions import ScrapyDeprecationWarning\n import warnings\n warnings.warn('ImagesPipeline.image_key(url) and file_key(url) methods are deprecated, '\n 'please use file_path(request, response=None, info=None) instead',\n category=ScrapyDeprecationWarning, stacklevel=1)\n if not isinstance(request, Request):\n _warn()\n url = request\n else:\n url = request.url\n if not hasattr(self.file_key, '_base'):\n _warn()\n return self.file_key(url)\n elif not hasattr(self.image_key, '_base'):\n _warn()\n return self.image_key(url)\n image_guid = hashlib.sha1(url.encode(\"utf8\")).hexdigest()\n dir_guid = hashlib.sha1(self.dirname.encode(\"utf8\")).hexdigest()\n return '%s/%s.jpg' % (dir_guid, image_guid)" }, { "alpha_fraction": 0.6529351472854614, "alphanum_fraction": 0.656024694442749, "avg_line_length": 35, "blob_id": "060a420fe0cdd4e7c13586370ad02f8ca9a9668d", "content_id": "99c5ff6a720c7aaf527e0995ad6ebd0b6a67c8eb", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 1023, "license_type": "no_license", "max_line_length": 98, "num_lines": 27, "path": "/baixihecom/monster/views.py", "repo_name": "leearic/spider", "src_encoding": "UTF-8", "text": "#encoding: UTF-8\nfrom django.shortcuts import render\nimport sys\ndefault_encoding = 'utf-8'\nif sys.getdefaultencoding() != default_encoding:\n reload(sys)\n sys.setdefaultencoding(default_encoding)\n\n# Create your views here.\nfrom django.http import HttpResponse\nfrom utils import Do_Monster\ndef gateway(request):\n if request.method == \"POST\":\n return HttpResponse(u\"这里是monster的测试.默认拒绝访问哦\".encode('gbk'), content_type=\"text/plain\")\n if request.method == \"GET\":\n try:\n data = request.GET\n content = data[\"content\"].encode('utf-8')\n response = Do_Monster.Message_Session(data)\n res = response.Text_Msg_Response(content)\n print res\n return HttpResponse(res, content_type=\"application/xml\")\n except Exception as e:\n print e\n return HttpResponse(u\"这里是monster的测试.默认拒绝访问哦\".encode('gbk'), content_type=\"text/plain\")\n else:\n return HttpResponse('false')" }, { "alpha_fraction": 0.5588235259056091, "alphanum_fraction": 0.5692041516304016, "avg_line_length": 29.36842155456543, "blob_id": "d6bff753d4aa7137134ad014fce0b530ad5d35d7", "content_id": "40449a7a87acfcbed5cdc064c787295f043e5d1e", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 1164, "license_type": "no_license", "max_line_length": 95, "num_lines": 38, "path": "/monster/spider/spider/spiders/ifeng_2.py", "repo_name": "leearic/spider", "src_encoding": "UTF-8", "text": "# -*- coding: utf-8 -*-\nimport scrapy\n\nfrom scrapy.http import Request\n\n\nfrom spider.utils import orm\n\n\nimport time\nclass ExampleSpider(scrapy.Spider):\n name = 's'\n allowed_domains = ['ifeng.com']\n start_urls = ['http://news.ifeng.com/listpage/11502/0/1/rtlist.shtml']\n\n def parse(self, response):\n aa = orm.News()\n news = aa.get_day_news_url()\n for i in news:\n print i.url\n time.sleep(2)\n yield Request(url=i.url, callback=self.parse_news_url, meta={'response': response})\n\n def parse_news_url(self, response):\n aa = orm.News()\n\n news_lists = response.xpath('//div[@class=\"newsList\"]/ul')\n for i in news_lists:\n news_list = i.xpath('li/a/@href').extract()\n for news in news_list:\n print u'新闻地址: ' + news\n aa.save_Daily_url(news)\n try:\n next_page = response.xpath('//div[@class=\"m_page\"]/span[2]/a/@href').extract()[0]\n except:\n next_page = response.xpath('//div[@class=\"m_page\"]/span/a/@href').extract()[0]\n\n yield Request(url=next_page, callback=self.parse_news_url)\n\n\n" }, { "alpha_fraction": 0.5155875086784363, "alphanum_fraction": 0.5899280309677124, "avg_line_length": 35.260868072509766, "blob_id": "01d7f5aa758dd8d0f06f8796b3513042add76483", "content_id": "44f50c67f848fa2098e6d2078b762d5d93d8e293", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 834, "license_type": "no_license", "max_line_length": 114, "num_lines": 23, "path": "/szmap/szmap/webapp/migrations/0001_initial.py", "repo_name": "leearic/spider", "src_encoding": "UTF-8", "text": "# -*- coding: utf-8 -*-\nfrom __future__ import unicode_literals\n\nfrom django.db import models, migrations\n\n\nclass Migration(migrations.Migration):\n\n dependencies = [\n ]\n\n operations = [\n migrations.CreateModel(\n name='szmap',\n fields=[\n ('id', models.AutoField(verbose_name='ID', serialize=False, auto_created=True, primary_key=True)),\n ('zone', models.CharField(max_length=255, verbose_name='\\u533a\\u57df\\u540d\\u79f0 ')),\n ('zhishu', models.CharField(max_length=255, verbose_name='\\u4ea4\\u901a\\u6307\\u6570 ')),\n ('chesu', models.CharField(max_length=255, verbose_name='\\u5e73\\u5747\\u8f66\\u901f ')),\n ('dengji', models.CharField(max_length=255, verbose_name='\\u62e5\\u5835\\u7b49\\u7ea7 ')),\n ],\n ),\n ]\n" }, { "alpha_fraction": 0.6212534308433533, "alphanum_fraction": 0.6376021504402161, "avg_line_length": 21.78125, "blob_id": "fa6fbd401c2905179da7379e27716e3301a9c65e", "content_id": "e7e95f0de6153b3708d27ca1f566e76182e173e3", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Text", "length_bytes": 1178, "license_type": "no_license", "max_line_length": 72, "num_lines": 32, "path": "/szsj/ReadMe.txt", "repo_name": "leearic/spider", "src_encoding": "UTF-8", "text": "1. 环境说明\n 本环境在xubuntu 16.04 64Bit 下完成.\n python 2\n scrapy\n django\n mysql\n Chrome浏览器\n\n2. 部署\n 需要安装 scrapy: pip install scrapy\n 需要安装 Django: pip install django\n 如果木有mysqldb库,则需要另外安装: apt-get install python-mysqldb\n\n\n3. 文件夹说明:\n Brower-Driver: 是chrome浏览器的驱动, chromedriver放到 /usr/bin 下 并且给执行权限\n HeadlessBrowser: 是无头浏览器,这个项目没有用到,忽略\n dj_sz: Django项目, 主要用django来完成数据库的操作.\n sp_sz: 爬虫项目,爬取数据\n\n4. 爬虫说明:\n 到sp_sz 目录下,执行 scrapy list 会获取到几个爬虫.\n a. unit: 到深圳政府网站爬取公司列表\n b. unitinfo: 获取到了列表后,到企查查去查公司详细信息\n c. IDC: 爬取IDC相关数据\n\n\n\n5. 其他:\n 企查查上的源码tag里有中文字符, 所以目前没有爬非首页内容.\n\n 没有按照标准来, 直接一个for 循环搞定. 中间设置了标志,如果中断,被ban等情况发生, 可以从ban的位置继续爬下去\n\n\n\n\n\n" }, { "alpha_fraction": 0.5010471343994141, "alphanum_fraction": 0.606151819229126, "avg_line_length": 62.66666793823242, "blob_id": "3020300a5e017ff05b3ee69266f9a4fef8ea1bfd", "content_id": "7f34e19ddeaced33fc5d945dfd150ae9acec9d75", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 7640, "license_type": "no_license", "max_line_length": 156, "num_lines": 120, "path": "/szsj/dj_sz/hexun/migrations/0004_auto_20170411_0958.py", "repo_name": "leearic/spider", "src_encoding": "UTF-8", "text": "# -*- coding: utf-8 -*-\n# Generated by Django 1.11 on 2017-04-11 09:58\nfrom __future__ import unicode_literals\n\nfrom django.db import migrations, models\n\n\nclass Migration(migrations.Migration):\n\n dependencies = [\n ('hexun', '0003_auto_20170411_0152'),\n ]\n\n operations = [\n migrations.CreateModel(\n name='Fenhong_Huigou_Info',\n fields=[\n ('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),\n ('company_code', models.CharField(default='-', max_length=255, verbose_name='\\u516c\\u53f8\\u80a1\\u7968\\u4ee3\\u7801')),\n ('gonggaoshijian', models.CharField(default='-', max_length=255, verbose_name='\\u516c\\u544a\\u65f6\\u95f4')),\n ('huigouzanzonggubenbili', models.CharField(default='-', max_length=255, verbose_name='\\u56de\\u8d2d\\u5360\\u603b\\u80a1\\u672c\\u6bd4\\u4f8b%')),\n ('huigougushu', models.CharField(default='-', max_length=255, verbose_name='\\u56de\\u8d2d\\u80a1\\u6570(\\u80a1)')),\n ('nihuigoujiage', models.CharField(default='-', max_length=255, verbose_name='\\u62df\\u56de\\u8d2d\\u4ef7\\u683c(\\u5143)')),\n ('gonggaoqianrigujia', models.CharField(default='-', max_length=255, verbose_name='\\u516c\\u544a\\u524d\\u65e5\\u80a1\\u4ef7(\\u5143)')),\n ('goumaizuigaojia', models.CharField(default='-', max_length=255, verbose_name='\\u8d2d\\u4e70\\u6700\\u9ad8\\u4ef7(\\u5143)')),\n ('goumaizuidijia', models.CharField(default='-', max_length=255, verbose_name='\\u8d2d\\u4e70\\u6700\\u4f4e\\u4ef7(\\u5143)')),\n ('huigouzongjine', models.CharField(default='-', max_length=255, verbose_name='\\u56de\\u8d2d\\u603b\\u91d1\\u989d(\\u4e07\\u5143)')),\n ('shifoshisi', models.CharField(default='-', max_length=255, verbose_name='\\u662f\\u5426\\u5b9e\\u65bd')),\n ('xiangqing', models.CharField(default='-', max_length=255, verbose_name='\\u5206\\u7ea2\\u8be6\\u60c5')),\n ],\n ),\n migrations.CreateModel(\n name='Fenhong_Peigu_Info',\n fields=[\n ('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),\n ('company_code', models.CharField(default='-', max_length=255, verbose_name='\\u516c\\u53f8\\u80a1\\u7968\\u4ee3\\u7801')),\n ('gonggaoshijian', models.CharField(default='-', max_length=255, verbose_name='\\u516c\\u544a\\u65f6\\u95f4')),\n ('peigufangan', models.CharField(default='-', max_length=255, verbose_name='\\u914d\\u80a1\\u65b9\\u6848(\\u80a1/10\\u80a1)')),\n ('peigujia', models.CharField(default='-', max_length=255, verbose_name='\\u914d\\u80a1\\u4ef7(\\u5143)')),\n ('jizhunguben', models.CharField(default='-', max_length=255, verbose_name='\\u57fa\\u51c6\\u80a1\\u672c(\\u4e07\\u80a1)')),\n ('chuquanri', models.CharField(default='-', max_length=255, verbose_name='\\u9664\\u6743\\u65e5')),\n ('guquandengjiri', models.CharField(default='-', max_length=255, verbose_name='\\u80a1\\u6743\\u767b\\u8bb0\\u65e5')),\n ('jiaokuanqishiri', models.CharField(default='-', max_length=255, verbose_name='\\u7f34\\u6b3e\\u8d77\\u59cb\\u65e5')),\n ('jiaokuanzhongzhiri', models.CharField(default='-', max_length=255, verbose_name='\\u7f34\\u6b3e\\u7ec8\\u6b62\\u65e5')),\n ('peigushangshiri', models.CharField(default='-', max_length=255, verbose_name='\\u914d\\u80a1\\u4e0a\\u5e02\\u65e5')),\n ('mujizijin', models.CharField(default='-', max_length=255, verbose_name='\\u52df\\u96c6\\u8d44\\u91d1\\u5408\\u8ba1(\\u5143)')),\n ('xiangqing', models.CharField(default='-', max_length=255, verbose_name='\\u5206\\u7ea2\\u8be6\\u60c5')),\n ],\n ),\n migrations.CreateModel(\n name='Fenhong_Zhuanzeng_Info',\n fields=[\n ('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),\n ('company_code', models.CharField(default='-', max_length=255, verbose_name='\\u516c\\u53f8\\u80a1\\u7968\\u4ee3\\u7801')),\n ('gonggaoshijian', models.CharField(default='-', max_length=255, verbose_name='\\u516c\\u544a\\u65f6\\u95f4')),\n ('zhuanzeng', models.CharField(default='-', max_length=255, verbose_name='\\u8f6c\\u589e(\\u80a1/10\\u80a1)')),\n ('chuquanchuxiri', models.CharField(default='-', max_length=255, verbose_name='\\u9664\\u6743\\u9664\\u606f\\u65e5')),\n ('chuquandengjiri', models.CharField(default='-', max_length=255, verbose_name='\\u9664\\u6743\\u767b\\u8bb0\\u65e5')),\n ('zhuanzenggushangshiri', models.CharField(default='-', max_length=255, verbose_name='\\u8f6c\\u589e\\u80a1\\u4e0a\\u5e02\\u65e5')),\n ('tongqisonggu', models.CharField(default='-', max_length=255, verbose_name='\\u540c\\u671f\\u9001\\u80a1(\\u80a1/10\\u80a1)')),\n ('fanganjianjie', models.CharField(default='-', max_length=255, verbose_name='\\u65b9\\u6848\\u7b80\\u4ecb')),\n ('shifoshisi', models.CharField(default='-', max_length=255, verbose_name='\\u662f\\u5426\\u5b9e\\u65bd')),\n ('xiangqing', models.CharField(default='-', max_length=255, verbose_name='\\u5206\\u7ea2\\u8be6\\u60c5')),\n ],\n ),\n migrations.RemoveField(\n model_name='fenhong_info',\n name='time',\n ),\n migrations.AddField(\n model_name='fenhong_info',\n name='company_code',\n field=models.CharField(default='-', max_length=255, verbose_name='\\u516c\\u53f8\\u80a1\\u7968\\u4ee3\\u7801'),\n ),\n migrations.AddField(\n model_name='fenhong_info',\n name='gonggaoshijian',\n field=models.CharField(default='-', max_length=255, verbose_name='\\u516c\\u544a\\u65f6\\u95f4'),\n ),\n migrations.AddField(\n model_name='fenhong_info',\n name='guquanchuxiri',\n field=models.CharField(default='-', max_length=255, verbose_name='\\u9664\\u6743\\u9664\\u606f\\u65e5'),\n ),\n migrations.AddField(\n model_name='fenhong_info',\n name='guquandengjiri',\n field=models.CharField(default='-', max_length=255, verbose_name='\\u80a1\\u6743\\u767b\\u8bb0\\u65e5'),\n ),\n migrations.AddField(\n model_name='fenhong_info',\n name='honggushangshiri',\n field=models.CharField(default='-', max_length=255, verbose_name='\\u7ea2\\u80a1\\u4e0a\\u5e02\\u65e5'),\n ),\n migrations.AddField(\n model_name='fenhong_info',\n name='kuaijiniandu',\n field=models.CharField(default='-', max_length=255, verbose_name='\\u4f1a\\u8ba1\\u5e74\\u5ea6'),\n ),\n migrations.AddField(\n model_name='fenhong_info',\n name='paixi',\n field=models.CharField(default='-', max_length=255, verbose_name='\\u6d3e\\u606f(\\u5143/10\\u80a1)'),\n ),\n migrations.AddField(\n model_name='fenhong_info',\n name='shifoshisi',\n field=models.CharField(default='-', max_length=255, verbose_name='\\u662f\\u5426\\u5b9e\\u65bd'),\n ),\n migrations.AddField(\n model_name='fenhong_info',\n name='songgu',\n field=models.CharField(default='-', max_length=255, verbose_name='\\u9001\\u80a1(\\u80a1/10\\u80a1)'),\n ),\n migrations.AddField(\n model_name='fenhong_info',\n name='xiangqing',\n field=models.CharField(default='-', max_length=255, verbose_name='\\u5206\\u7ea2\\u8be6\\u60c5'),\n ),\n ]\n" }, { "alpha_fraction": 0.5332278609275818, "alphanum_fraction": 0.5427215099334717, "avg_line_length": 30.600000381469727, "blob_id": "e30fcf21733ef4e83727f1ee61107123990a6147", "content_id": "76fa260be7ce8fb4bffbf37d637948b7fa18c55e", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 1904, "license_type": "no_license", "max_line_length": 120, "num_lines": 60, "path": "/monster/spider/spider/spiders/ifeng_3.py", "repo_name": "leearic/spider", "src_encoding": "UTF-8", "text": "# -*- coding: utf-8 -*-\nimport scrapy\n\nfrom scrapy.http import Request\n\n\nfrom spider.utils import orm\nimport time\n\n\nfrom spider.items import newsItem\n\n\nclass ExampleSpider(scrapy.Spider):\n name = 't'\n allowed_domains = ['ifeng.com']\n start_urls = ['http://news.ifeng.com/listpage/11502/0/1/rtlist.shtml']\n\n def parse(self, response):\n aa = orm.News()\n news = aa.get_daily_news_url()\n for i in news:\n print i.url\n time.sleep(2)\n yield Request(url=i.url, callback=self.parse_news, meta={'response': response, 'news': i})\n\n def parse_news(self, response):\n aa = orm.News()\n item = newsItem()\n try:\n # news = response.meta['news']\n url = response.url\n title = response.xpath('//div[@id=\"artical\"]/h1[@id=\"artical_topic\"]/text()').extract()[0]\n content = response.xpath('//div[@id=\"main_content\"]').extract()[0]\n content_time =response.xpath('//*[@id=\"artical_sth\"]/p/span[@itemprop=\"datePublished\"]/text()').extract()[0]\n content_from = response.xpath('//*[@id=\"artical_sth\"]/p/span[3]/span/a/text()').extract()[0]\n content_type = u'文字新闻'\n content_web = self.allowed_domains[0]\n save_time = time.strftime('%Y-%m-%d %H:%M:%S',time.localtime(time.time()))\n content_html = response.body\n\n # item[\"news\"] = news\n item[\"url\"] = url\n item[\"title\"] = title\n item[\"content\"] = content\n item[\"content_time\"] = content_time\n item[\"content_from\"] = content_from\n item[\"content_type\"] = content_type\n item[\"content_web\"] = content_web\n item[\"save_time\"] = save_time\n item[\"content_html\"] = content_html\n\n\n aa.save_News(item)\n\n # print \"*\" * 10\n\n\n except:\n pass\n" }, { "alpha_fraction": 0.5142717957496643, "alphanum_fraction": 0.5894120335578918, "avg_line_length": 40.4040412902832, "blob_id": "2ccbb583a826b227d18464e78b669313109b5c0d", "content_id": "d356586890c48f4ab5144e041d8ae25b503efe44", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 4099, "license_type": "no_license", "max_line_length": 123, "num_lines": 99, "path": "/szsj/dj_sz/hightech/migrations/0015_auto_20170412_0740.py", "repo_name": "leearic/spider", "src_encoding": "UTF-8", "text": "# -*- coding: utf-8 -*-\n# Generated by Django 1.11 on 2017-04-12 07:40\nfrom __future__ import unicode_literals\n\nfrom django.db import migrations, models\n\n\nclass Migration(migrations.Migration):\n\n dependencies = [\n ('hightech', '0014_gao_company_info'),\n ]\n\n operations = [\n migrations.RemoveField(\n model_name='unit_annual_promoters_and_reports_investment_info',\n name='Unit_id',\n ),\n migrations.RemoveField(\n model_name='unit_annual_reports_base_info',\n name='Unit_id',\n ),\n migrations.RemoveField(\n model_name='unit_annual_reports_website_info',\n name='Unit_id',\n ),\n migrations.RemoveField(\n model_name='unit_base_info',\n name='Unit_id',\n ),\n migrations.RemoveField(\n model_name='unit_base_info_changed_info',\n name='Unit_id',\n ),\n migrations.RemoveField(\n model_name='unit_base_info_shareholder_info',\n name='Unit_id',\n ),\n migrations.AddField(\n model_name='unit_annual_promoters_and_reports_investment_info',\n name='searched_name',\n field=models.CharField(default='-', max_length=255, verbose_name='\\u67e5\\u8be2\\u5230\\u7684\\u540d\\u5b57'),\n ),\n migrations.AddField(\n model_name='unit_annual_promoters_and_reports_investment_info',\n name='searching_name',\n field=models.CharField(default='-', max_length=255, verbose_name='\\u9700\\u8981\\u67e5\\u8be2\\u7684\\u540d\\u5b57'),\n ),\n migrations.AddField(\n model_name='unit_annual_reports_base_info',\n name='searched_name',\n field=models.CharField(default='-', max_length=255, verbose_name='\\u67e5\\u8be2\\u5230\\u7684\\u540d\\u5b57'),\n ),\n migrations.AddField(\n model_name='unit_annual_reports_base_info',\n name='searching_name',\n field=models.CharField(default='-', max_length=255, verbose_name='\\u9700\\u8981\\u67e5\\u8be2\\u7684\\u540d\\u5b57'),\n ),\n migrations.AddField(\n model_name='unit_annual_reports_website_info',\n name='searched_name',\n field=models.CharField(default='-', max_length=255, verbose_name='\\u67e5\\u8be2\\u5230\\u7684\\u540d\\u5b57'),\n ),\n migrations.AddField(\n model_name='unit_annual_reports_website_info',\n name='searching_name',\n field=models.CharField(default='-', max_length=255, verbose_name='\\u9700\\u8981\\u67e5\\u8be2\\u7684\\u540d\\u5b57'),\n ),\n migrations.AddField(\n model_name='unit_base_info',\n name='searched_name',\n field=models.CharField(default='-', max_length=255, verbose_name='\\u67e5\\u8be2\\u5230\\u7684\\u540d\\u5b57'),\n ),\n migrations.AddField(\n model_name='unit_base_info',\n name='searching_name',\n field=models.CharField(default='-', max_length=255, verbose_name='\\u9700\\u8981\\u67e5\\u8be2\\u7684\\u540d\\u5b57'),\n ),\n migrations.AddField(\n model_name='unit_base_info_changed_info',\n name='searched_name',\n field=models.CharField(default='-', max_length=255, verbose_name='\\u67e5\\u8be2\\u5230\\u7684\\u540d\\u5b57'),\n ),\n migrations.AddField(\n model_name='unit_base_info_changed_info',\n name='searching_name',\n field=models.CharField(default='-', max_length=255, verbose_name='\\u9700\\u8981\\u67e5\\u8be2\\u7684\\u540d\\u5b57'),\n ),\n migrations.AddField(\n model_name='unit_base_info_shareholder_info',\n name='searched_name',\n field=models.CharField(default='-', max_length=255, verbose_name='\\u67e5\\u8be2\\u5230\\u7684\\u540d\\u5b57'),\n ),\n migrations.AddField(\n model_name='unit_base_info_shareholder_info',\n name='searching_name',\n field=models.CharField(default='-', max_length=255, verbose_name='\\u9700\\u8981\\u67e5\\u8be2\\u7684\\u540d\\u5b57'),\n ),\n ]\n" }, { "alpha_fraction": 0.5151097178459167, "alphanum_fraction": 0.6253291368484497, "avg_line_length": 74.95237731933594, "blob_id": "eecd6654eb32a796eb0e9d4b3a82585444d168ce", "content_id": "f3762e7da5aa99414cf394e58f41fee06b623e68", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 7975, "license_type": "no_license", "max_line_length": 216, "num_lines": 105, "path": "/szsj/dj_sz/hightech/migrations/0003_unit_annual_promoters_and_reports_investment_infor_unit_annual_reports_base_infor_unit_annual_report.py", "repo_name": "leearic/spider", "src_encoding": "UTF-8", "text": "# -*- coding: utf-8 -*-\n# Generated by Django 1.10.5 on 2017-02-27 01:31\nfrom __future__ import unicode_literals\n\nfrom django.db import migrations, models\n\n\nclass Migration(migrations.Migration):\n\n dependencies = [\n ('hightech', '0002_auto_20170223_0555'),\n ]\n\n operations = [\n migrations.CreateModel(\n name='Unit_annual_Promoters_and_reports_investment_Infor',\n fields=[\n ('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),\n ('Unit_id', models.CharField(max_length=255, verbose_name='\\u4f01\\u4e1a\\u5e8f\\u53f7')),\n ('Sponsor', models.CharField(max_length=255, verbose_name='\\u53d1\\u8d77\\u4eba')),\n ('Time_of_subscription', models.CharField(max_length=255, verbose_name='\\u8ba4\\u7f34\\u51fa\\u8d44\\u65f6\\u95f4')),\n ('Subscribed_capital_contribution', models.CharField(max_length=255, verbose_name='\\u8ba4\\u7f34\\u51fa\\u8d44\\u65b9\\u5f0f')),\n ('Paid_in_capital_contribution', models.CharField(max_length=255, verbose_name='\\u5b9e\\u7f34\\u51fa\\u8d44\\u989d\\uff08\\u4e07\\u5143\\uff09')),\n ('Investment_time', models.CharField(max_length=255, verbose_name='\\u51fa\\u8d44\\u65f6\\u95f4')),\n ('Investment_method', models.CharField(max_length=255, verbose_name='\\u51fa\\u8d44\\u65b9\\u5f0f')),\n ],\n ),\n migrations.CreateModel(\n name='Unit_annual_reports_Base_Infor',\n fields=[\n ('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),\n ('Unit_id', models.CharField(max_length=255, verbose_name='\\u4f01\\u4e1a\\u5e8f\\u53f7')),\n ('Registration_number', models.CharField(max_length=255, verbose_name='\\u6ce8\\u518c\\u53f7')),\n ('Business_state', models.CharField(max_length=255, verbose_name='\\u4f01\\u4e1a\\u7ecf\\u8425\\u72b6\\u6001')),\n ('Enterprise_telephone', models.CharField(max_length=255, verbose_name='\\u4f01\\u4e1a\\u8054\\u7cfb\\u7535\\u8bdd')),\n ('Email', models.CharField(max_length=255, verbose_name='\\u7535\\u5b50\\u90ae\\u7bb1')),\n ('Postcode', models.CharField(max_length=255, verbose_name='\\u90ae\\u653f\\u7f16\\u7801')),\n ('number_of_people_engaged', models.CharField(max_length=255, verbose_name='\\u4ece\\u4e1a\\u4eba\\u6570')),\n ('residence', models.CharField(max_length=255, verbose_name='\\u4f4f\\u6240')),\n ('transfer_of_shareholder_equity', models.CharField(max_length=255, verbose_name='\\u6709\\u9650\\u8d23\\u4efb\\u516c\\u53f8\\u672c\\u5e74\\u5ea6\\u662f\\u5426\\u53d1\\u751f\\u80a1\\u4e1c\\u80a1\\u6743\\u8f6c\\u8ba9')),\n ('investment_information', models.CharField(max_length=255, verbose_name='\\u4f01\\u4e1a\\u662f\\u5426\\u6709\\u6295\\u8d44\\u4fe1\\u606f\\u6216\\u8d2d\\u4e70\\u5176\\u4ed6\\u516c\\u53f8\\u80a1\\u6743')),\n ],\n ),\n migrations.CreateModel(\n name='Unit_annual_reports_Website_Infor',\n fields=[\n ('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),\n ('Unit_id', models.CharField(max_length=255, verbose_name='\\u4f01\\u4e1a\\u5e8f\\u53f7')),\n ('Web_Type', models.CharField(max_length=255, verbose_name='\\u7c7b\\u578b')),\n ('Web_Name', models.CharField(max_length=255, verbose_name='\\u540d\\u79f0')),\n ('Web_Site', models.CharField(max_length=255, verbose_name='\\u7f51\\u5740')),\n ],\n ),\n migrations.CreateModel(\n name='Unit_Base_Info',\n fields=[\n ('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),\n ('Unit_id', models.CharField(max_length=255, verbose_name='\\u4f01\\u4e1a\\u5e8f\\u53f7')),\n ('phone_nunber', models.CharField(max_length=255, verbose_name='\\u7535\\u8bdd\\u53f7\\u7801')),\n ('email', models.CharField(max_length=255, verbose_name='\\u90ae\\u7bb1')),\n ('website', models.CharField(max_length=255, verbose_name='\\u7f51\\u5740')),\n ('address', models.CharField(max_length=255, verbose_name='\\u5730\\u5740')),\n ('code', models.CharField(max_length=255, verbose_name='\\u7edf\\u4e00\\u793e\\u4f1a\\u4fe1\\u7528\\u4ee3\\u7801')),\n ('Registration_number', models.CharField(max_length=255, verbose_name='\\u6ce8\\u518c\\u53f7')),\n ('Organization_code', models.CharField(max_length=255, verbose_name='\\u7ec4\\u7ec7\\u673a\\u6784\\u4ee3\\u7801')),\n ('Operating_state', models.CharField(max_length=255, verbose_name='\\u7ecf\\u8425\\u72b6\\u6001')),\n ('Legal_representative', models.CharField(max_length=255, verbose_name='\\u6cd5\\u5b9a\\u4ee3\\u8868\\u4eba')),\n ('registered_capital', models.CharField(max_length=255, verbose_name='\\u6ce8\\u518c\\u8d44\\u672c')),\n ('Company_type', models.CharField(max_length=255, verbose_name='\\u516c\\u53f8\\u7c7b\\u578b')),\n ('date_of_establishment', models.CharField(max_length=255, verbose_name='\\u6210\\u7acb\\u65e5\\u671f')),\n ('Operating_period', models.CharField(max_length=255, verbose_name='\\u8425\\u4e1a\\u671f\\u9650')),\n ('registration_authority', models.CharField(max_length=255, verbose_name='\\u767b\\u8bb0\\u673a\\u5173')),\n ('Date_of_issue', models.CharField(max_length=255, verbose_name='\\u53d1\\u7167\\u65e5\\u671f')),\n ('company_size', models.CharField(max_length=255, verbose_name='\\u516c\\u53f8\\u89c4\\u6a21')),\n ('Subordinate_industry', models.CharField(max_length=255, verbose_name='\\u6240\\u5c5e\\u884c\\u4e1a')),\n ('English_name', models.CharField(max_length=255, verbose_name='\\u82f1\\u6587\\u540d')),\n ('Name_used_Before', models.CharField(max_length=255, verbose_name='\\u66fe\\u7528\\u540d')),\n ('Enterprise_address', models.CharField(max_length=255, verbose_name='\\u4f01\\u4e1a\\u5730\\u5740')),\n ('Business_scope', models.CharField(max_length=255, verbose_name='\\u7ecf\\u8425\\u8303\\u56f4')),\n ],\n ),\n migrations.CreateModel(\n name='Unit_Base_Info_Changed_Information',\n fields=[\n ('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),\n ('Unit_id', models.CharField(max_length=255, verbose_name='\\u4f01\\u4e1a\\u5e8f\\u53f7')),\n ('Change_date', models.CharField(max_length=255, verbose_name='\\u53d8\\u66f4\\u65e5\\u671f')),\n ('Change_item', models.CharField(max_length=255, verbose_name='\\u53d8\\u66f4\\u9879\\u76ee')),\n ('Before_change', models.CharField(max_length=255, verbose_name='\\u53d8\\u66f4\\u524d')),\n ('After_change', models.CharField(max_length=255, verbose_name='\\u53d8\\u66f4\\u540e')),\n ],\n ),\n migrations.CreateModel(\n name='Unit_Base_Info_Shareholder_Information',\n fields=[\n ('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),\n ('Unit_id', models.CharField(max_length=255, verbose_name='\\u4f01\\u4e1a\\u5e8f\\u53f7')),\n ('Shareholder', models.CharField(max_length=255, verbose_name='\\u80a1\\u4e1c')),\n ('Shareholding_ratio', models.CharField(max_length=255, verbose_name='\\u6301\\u80a1\\u6bd4\\u4f8b')),\n ('Subscribed_capital_contribution', models.CharField(max_length=255, verbose_name='\\u8ba4\\u7f34\\u51fa\\u8d44\\u989d')),\n ('Subscription_Date', models.CharField(max_length=255, verbose_name='\\u8ba4\\u7f34\\u51fa\\u8d44\\u65e5\\u671f')),\n ('Shareholder_type', models.CharField(max_length=255, verbose_name='\\u80a1\\u4e1c\\u7c7b\\u578b')),\n ],\n ),\n ]\n" }, { "alpha_fraction": 0.515911877155304, "alphanum_fraction": 0.5183598399162292, "avg_line_length": 30.365385055541992, "blob_id": "204c2613bde1f3ddd932753c523363a9ba119b73", "content_id": "175acfa45719e2757a2aa55b8768ef182d649a3f", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 1652, "license_type": "no_license", "max_line_length": 74, "num_lines": 52, "path": "/qiubai_CrawlSpider/spiders/qiubai/QiubaiORM/SaveToDB.py", "repo_name": "leearic/spider", "src_encoding": "UTF-8", "text": "# -*- coding:utf-8 -*-\n\nimport sys, os\n\nfrom qiubai.settings import path\nif path not in sys.path:\n sys.path.append(path)\nimport django\nos.environ.setdefault(\"DJANGO_SETTINGS_MODULE\", \"qiubaifrontend.settings\")\ndjango.setup()\nfrom spiders.models import QiuShi, User, Comment\n\nclass QiushiORM(object):\n def Saveqiushi(self, items):\n\n try:\n qiushi = QiuShi()\n commenta = Comment()\n qiushi.page_url = items[\"page_url\"]\n qiushi.user = items[\"user\"]\n qiushi.contents = items[\"content\"]\n qiushi.type = items[\"type\"]\n qiushi.url = items[\"url\"]\n qiushi.url0 = items[\"url0\"]\n qiushi.smiling = items[\"smiling\"]\n qiushi.comment_count = items[\"comment_count\"]\n\n # print \"开始写入关键数据库\"\n for i in range(0, len(items[\"cuser\"])):\n commenta.user = items[\"cuser\"][i]\n commenta.qiushiURL = items[\"page_url\"]\n commenta.comment = items[\"comment\"][i]\n commenta.save()\n qiushi.save()\n except:\n pass\n\n\n def SaveUser(self, items):\n try:\n user = User()\n user.name = items['name']\n user.fans = items['fans']\n user.follow = items['follow']\n user.comment = items['comment']\n user.marriage = items['marriage']\n user.occupation = items['occupation']\n user.constellation = items['constellation']\n user.age = items['age']\n user.save()\n except:\n pass\n\n\n\n" }, { "alpha_fraction": 0.7575757503509521, "alphanum_fraction": 0.7575757503509521, "avg_line_length": 17.85714340209961, "blob_id": "e5663a09790fa2982d7a36feb30ea7a84428987d", "content_id": "8bc06ea6ead86e25993834e5e65e8c33e0839da2", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 132, "license_type": "no_license", "max_line_length": 39, "num_lines": 7, "path": "/szsj/dj_sz/hightech/apps.py", "repo_name": "leearic/spider", "src_encoding": "UTF-8", "text": "from __future__ import unicode_literals\n\nfrom django.apps import AppConfig\n\n\nclass HightechConfig(AppConfig):\n name = 'hightech'\n" }, { "alpha_fraction": 0.5274949073791504, "alphanum_fraction": 0.6008146405220032, "avg_line_length": 23.549999237060547, "blob_id": "5793f9a4d8de0dcaacd8cc0b89f9dfb6c899b8e5", "content_id": "2c6026f412057ca8079c80264ba11fe4c0394e4d", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 491, "license_type": "no_license", "max_line_length": 86, "num_lines": 20, "path": "/szsj/dj_sz/hightech/migrations/0010_idc_base_info_qicq.py", "repo_name": "leearic/spider", "src_encoding": "UTF-8", "text": "# -*- coding: utf-8 -*-\n# Generated by Django 1.10.5 on 2017-03-07 07:06\nfrom __future__ import unicode_literals\n\nfrom django.db import migrations, models\n\n\nclass Migration(migrations.Migration):\n\n dependencies = [\n ('hightech', '0009_auto_20170307_0232'),\n ]\n\n operations = [\n migrations.AddField(\n model_name='idc_base_info',\n name='qicq',\n field=models.CharField(default='null', max_length=255, verbose_name='qq'),\n ),\n ]\n" }, { "alpha_fraction": 0.6742897033691406, "alphanum_fraction": 0.7020096778869629, "avg_line_length": 38.02702713012695, "blob_id": "3b39326b5b4c74671684762508a17b6bf2e4c55b", "content_id": "c8ef7ed719f2a68502b2d51d3c0595def9a188f3", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 1623, "license_type": "no_license", "max_line_length": 96, "num_lines": 37, "path": "/hr/frontend/hrweb/lagou/models.py", "repo_name": "leearic/spider", "src_encoding": "UTF-8", "text": "# -*- coding: utf-8 -*-\nfrom __future__ import unicode_literals\n\nfrom django.db import models\n\n# Create your models here.\n\n\nclass Companys_Info(models.Model):\n # lagou 网站招聘信息\n searching_company = models.CharField(max_length=255, verbose_name=u\"需要查询的公司名字\", default='-')\n\n\n\nclass Position_URL(models.Model):\n # lagou 网站招聘信息\n searching_company = models.CharField(max_length=255, verbose_name=u\"需要查询的公司名字\", default='-')\n url = models.CharField(max_length=255, verbose_name=u\"查到公司的某个工作岗位的URL\", default='-')\n\n\n\n\n\nclass Position_Info(models.Model):\n # lagou 网站招聘信息\n searching_company = models.CharField(max_length=255, verbose_name=u\"需要查询的公司名字\", default='-')\n searched_company = models.CharField(max_length=255, verbose_name=u\"查询到的公司名字\", default='-')\n \n bumen = models.CharField(max_length=255, verbose_name=u\"部门\", default='-')\n zhiwei = models.CharField(max_length=255, verbose_name=u\"职位\", default='-')\n yaoqiu = models.CharField(max_length=255, verbose_name=u\"要求\", default='-')\n fabushijian = models.CharField(max_length=255, verbose_name=u\"发布时间\", default='-')\n \n zhiweiyouhuo = models.CharField(max_length=255, verbose_name=u\"职位诱惑\", default='-')\n zhiweimiaoshu = models.CharField(max_length=255, verbose_name=u\"职位描述\", default='-')\n gongzuodidian = models.CharField(max_length=255, verbose_name=u\"工作地点\", default='-')\n fabuzhe = models.CharField(max_length=255, verbose_name=u\"发布者\", default='-')" }, { "alpha_fraction": 0.6266666650772095, "alphanum_fraction": 0.6284444332122803, "avg_line_length": 28.63157844543457, "blob_id": "8a4102dad402ae2c11e5a440ac2ce893f365110c", "content_id": "6be22f5a36b1e5d935fc10e4e4aba01d1af11354", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 1183, "license_type": "no_license", "max_line_length": 65, "num_lines": 38, "path": "/cos17173/spider/acg/acg/pipelines.py", "repo_name": "leearic/spider", "src_encoding": "UTF-8", "text": "# -*- coding: utf-8 -*-\n\n# Define your item pipelines here\n#\n# Don't forget to add your pipeline to the ITEM_PIPELINES setting\n# See: http://doc.scrapy.org/en/latest/topics/item-pipeline.html\n\nfrom scrapy.pipelines.images import ImagesPipeline\nfrom scrapy.http import Request\nfrom scrapy.exceptions import DropItem\n# import logging\n\n\n\nclass AcgPipeline(ImagesPipeline):\n # 从item中获取图片的真实地址,并执行下载请求\n title = ''\n def get_media_requests(self, item, info):\n for image_url in item['image']:\n print image_url\n # title = item[\"name\"]\n\n # logging.error(image_url)\n\n yield Request(image_url)\n def item_completed(self, results, item, info):\n image_paths = [[x['path'] for ok, x in results if ok]]\n if not image_paths:\n raise DropItem(\"Item contains no images\")\n # save_Data = Spider_Django.django_sql()\n # save_Data.save_Item(item)\n return\n # 修改保存图片的文件名...\n def file_path(self, request, response=None, info=None):\n\n url = request.url\n image_guid = url.split('/')[-1]\n return '%s.jpg' % (image_guid)" }, { "alpha_fraction": 0.6157205104827881, "alphanum_fraction": 0.6168122291564941, "avg_line_length": 32.96296310424805, "blob_id": "438927bcbcc3558e98c6ea137c51f44aba713b51", "content_id": "dd63d82cfb66213589a72a8d4319cafaf30c87c7", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 964, "license_type": "no_license", "max_line_length": 62, "num_lines": 27, "path": "/myqiubai/QiuBaiSpider/qiushibaike/pipelines.py", "repo_name": "leearic/spider", "src_encoding": "UTF-8", "text": "# -*- coding: utf-8 -*-\n\nfrom scrapy.contrib.pipeline.images import ImagesPipeline\nfrom scrapy.http import Request\nfrom scrapy.exceptions import DropItem\nfrom qiushibaike import settings\nfrom qiushibaike.utils import SQL\nimport MySQLdb\n\nclass QiushibaikePipeline(ImagesPipeline):\n sql = SQL.SQL_Conn()\n def get_media_requests(self, item, info):\n\n for user_image in item[\"user_image\"]:\n if user_image != \"N\":\n #print u\"开始下载图片咯,用户头像:\"+ user_image\n yield Request(user_image)\n for thumb in item[\"thumb\"]:\n if thumb != \"N\":\n #print u\"开始下载图片咯,图片文件:\"+ thumb\n yield Request(thumb)\n def item_completed(self, results, item, info):\n image_paths = [[x['path'] for ok, x in results if ok]]\n if not image_paths:\n raise DropItem(\"Item contains no images\")\n self.sql.save(item)\n return" }, { "alpha_fraction": 0.7216358780860901, "alphanum_fraction": 0.7255936861038208, "avg_line_length": 30.625, "blob_id": "0aeeba3d53dd29d40da00283791b2f092878071c", "content_id": "de8bb1a63dccc8128375a4295015ba07be3af3e6", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 758, "license_type": "no_license", "max_line_length": 94, "num_lines": 24, "path": "/myqiubai/frontend/qiubai/views.py", "repo_name": "leearic/spider", "src_encoding": "UTF-8", "text": "from django.shortcuts import render\nfrom django.shortcuts import render_to_response\n\n# Create your views here.\nfrom django.template import RequestContext\nfrom qiubai.models import QiuShi\nfrom django.core.paginator import Paginator, EmptyPage,PageNotAnInteger\n\ndef qiushi(request, pages=1):\n Qius = QiuShi.objects.all()\n paginator = Paginator(Qius, 5)\n page = pages #request.GET.get('page')\n try:\n contacts = paginator .page(page)\n except PageNotAnInteger:\n contacts = paginator .page(1)\n except EmptyPage:\n contacts = paginator .page(paginator.num_pages)\n return render_to_response('index.html',{'qiubais':contacts})\n\n\n\n\n # return render('index.html', {'qiubais':Qius}, context_instance=RequestContext(request))" }, { "alpha_fraction": 0.5791855454444885, "alphanum_fraction": 0.6153846383094788, "avg_line_length": 12.8125, "blob_id": "d878cecc7a59432057f48b12ccd4db36693e95f6", "content_id": "288e2b467d18b6684eb7b242ad88fab024593b06", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 221, "license_type": "no_license", "max_line_length": 45, "num_lines": 16, "path": "/baixihecom/monster/urls.py", "repo_name": "leearic/spider", "src_encoding": "UTF-8", "text": "#!/usr/bin/env python\n#coding:utf-8\n\"\"\"\n Author: 'Aric'\n Purpose: ''\n Created: '2015/7/13'\n\"\"\"\n\nfrom django.conf.urls import url\n\nfrom . import views\n\nurlpatterns = [\n url(r'$', views.gateway, name=\"gateway\"),\n\n]\n" }, { "alpha_fraction": 0.6000000238418579, "alphanum_fraction": 0.6909090876579285, "avg_line_length": 17, "blob_id": "64a3fcb2469c1aaefd307d68a468faaebbdd7251", "content_id": "78ed0bf2fc5fe7c4a2b8713b6f2b6595c8697fa6", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Text", "length_bytes": 85, "license_type": "no_license", "max_line_length": 26, "num_lines": 3, "path": "/szsj/Brower-Driver/ReadMe.txt", "repo_name": "leearic/spider", "src_encoding": "UTF-8", "text": "1. ubuntu系统 安装 Chrome 64位\n2. 将驱动放到 /usr/bin下\n3. 重启服务\n\n" }, { "alpha_fraction": 0.526190459728241, "alphanum_fraction": 0.5738095045089722, "avg_line_length": 20, "blob_id": "b2178d938061915dfba4f5fd5574a15d2bedf6f1", "content_id": "7ac4058d9f050b6cc67c3194aa2d4d59cc0da72c", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 420, "license_type": "no_license", "max_line_length": 46, "num_lines": 20, "path": "/szsj/dj_sz/hexun/migrations/0002_auto_20170410_0754.py", "repo_name": "leearic/spider", "src_encoding": "UTF-8", "text": "# -*- coding: utf-8 -*-\n# Generated by Django 1.11 on 2017-04-10 07:54\nfrom __future__ import unicode_literals\n\nfrom django.db import migrations\n\n\nclass Migration(migrations.Migration):\n\n dependencies = [\n ('hexun', '0001_initial'),\n ]\n\n operations = [\n migrations.RenameField(\n model_name='base_info',\n old_name='Company_name',\n new_name='name',\n ),\n ]\n" }, { "alpha_fraction": 0.5044464468955994, "alphanum_fraction": 0.524899959564209, "avg_line_length": 46.8510627746582, "blob_id": "4d67b8c1e92cd98151036097e6b22679165a5f0b", "content_id": "cf84ecd55eb0a64ff2b39240d11a7f8b8d74cee3", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 9112, "license_type": "no_license", "max_line_length": 140, "num_lines": 188, "path": "/szsj/sp_sz/szsti/utils/PageAnaylist.py", "repo_name": "leearic/spider", "src_encoding": "UTF-8", "text": "# -*- coding: utf-8 -*-\n\n\nfrom szsti.utils.Save2DB import DjORM\n\n\nclass qichachca(object):\n\n @classmethod\n def BaseInfo(self, response, number):\n self.item = {}\n self.item[\"searching_name\"] = number\n self.item['searched_name'] = \\\n response.xpath('//*[@id=\"company-top\"]/div/div[1]/span[2]/span/span[1]/text()').extract()[0].strip()\n self.item[\"phone_nunber\"] = response.xpath('//*[@id=\"company-top\"]/div/div[1]/span[2]/small[1]/text()').extract()[0]\n self.item[\"code\"] = response.xpath('//*[@id=\"Cominfo\"]/table/tbody/tr[1]/td[2]/text()').extract()[0]\n\n self.item[\"Registration_number\"] = response.xpath('//*[@id=\"Cominfo\"]/table/tbody/tr[1]/td[4]/text()').extract()[0]\n\n\n self.item[\"Organization_code\"] = response.xpath('//*[@id=\"Cominfo\"]/table/tbody/tr[2]/td[2]/text()').extract()[0]\n self.item[\"Operating_state\"] = response.xpath('//*[@id=\"Cominfo\"]/table/tbody/tr[2]/td[4]/text()').extract()[0]\n self.item[\"Legal_representative\"] = response.xpath('//*[@id=\"Cominfo\"]/table/tbody/tr[3]/td[2]/text()').extract()[0]\n self.item[\"registered_capital\"] = response.xpath('//*[@id=\"Cominfo\"]/table/tbody/tr[3]/td[4]/text()').extract()[0]\n self.item[\"Company_type\"] = response.xpath('//*[@id=\"Cominfo\"]/table/tbody/tr[4]/td[2]/text()').extract()[0]\n self.item[\"date_of_establishment\"] = response.xpath('//*[@id=\"Cominfo\"]/table/tbody/tr[4]/td[4]/text()').extract()[0]\n self.item[\"Operating_period\"] = response.xpath('//*[@id=\"Cominfo\"]/table/tbody/tr[5]/td[2]/text()').extract()[0]\n self.item[\"registration_authority\"] = response.xpath('//*[@id=\"Cominfo\"]/table/tbody/tr[5]/td[4]/text()').extract()[0]\n self.item[\"Date_of_issue\"] = response.xpath('//*[@id=\"Cominfo\"]/table/tbody/tr[6]/td[2]/text()').extract()[0]\n self.item[\"company_size\"] = response.xpath('//*[@id=\"Cominfo\"]/table/tbody/tr[6]/td[4]/text()').extract()[0]\n self.item[\"Subordinate_industry\"] = response.xpath('//*[@id=\"Cominfo\"]/table/tbody/tr[7]/td[2]/text()').extract()[0]\n self.item[\"English_name\"] = response.xpath('//*[@id=\"Cominfo\"]/table/tbody/tr[7]/td[4]/text()').extract()[0]\n self.item[\"Name_used_Before\"] = response.xpath('//*[@id=\"Cominfo\"]/table/tbody/tr[8]/td[2]/text()').extract()[0]\n self.item[\"Enterprise_address\"] = response.xpath('//*[@id=\"Cominfo\"]/table/tbody/tr[9]/td[2]/text()').extract()[0]\n self.item[\"Business_scope\"] = response.xpath('//*[@id=\"Cominfo\"]/table/tbody/tr[10]/td[2]/text()').extract()[0]\n\n\n for i in self.item:\n try:\n self.item[i] = self.item[i].strip()\n\n except Exception:\n print \"BaseInfo error\"\n pass\n\n DjORM.save_Unit_Base_Info(self.item)\n\n print \"* BaseInfo *\"\n\n\n\n @classmethod\n def Unit_Base_Shareholder_Info(self, response, number):\n # 股东信息\n self.item = {}\n self.item[\"searching_name\"] = number\n self.item['searched_name'] = \\\n response.xpath('//*[@id=\"company-top\"]/div/div[1]/span[2]/span/span[1]/text()').extract()[0].strip()\n tbody = response.xpath('//*[@id=\"Sockinfo\"]/table/tbody/tr')\n\n for i in tbody:\n # print i.extract()\n try:\n self.item[\"Shareholder\"] = i.xpath('td[1]/a[1]/text()').extract()[0]\n self.item[\"Shareholding_ratio\"] = i.xpath('td[2]/text()').extract()[0].strip()\n self.item[\"Subscribed_capital_contribution\"] = i.xpath('td[3]/text()').extract()[0].strip()\n self.item[\"Subscription_Date\"] = i.xpath('td[4]/text()').extract()[0].strip()\n self.item[\"Shareholder_type\"] = i.xpath('td[5]/text()').extract()[0].strip()\n except Exception:\n print \"Unit_Base_Shareholder_Info error\"\n pass\n DjORM.save_Unit_Base_Info_Shareholder_Info(self.item)\n print \"* Unit_Base_Shareholder_Info *\"\n\n\n @classmethod\n def Unit_Base_Changed_Info(self, response, number):\n # 变更信息\n self.item = {}\n self.item[\"searching_name\"] = number\n\n self.item['searched_name'] = response.xpath('//*[@id=\"company-top\"]/div/div[1]/span[2]/span/span[1]/text()').extract()[0].strip()\n tbody = response.xpath('//*[@id=\"Changelist\"]/table/tbody/tr')\n\n for i in tbody:\n # print i.extract()\n try:\n self.item[\"Unit_id\"] = i.xpath('td[1]/text()').extract()[0]\n self.item[\"Change_date\"] = i.xpath('td[2]/text()').extract()[0].strip()\n self.item[\"Change_item\"] = i.xpath('td[3]/text()').extract()[0].strip()\n self.item[\"Before_change\"] = i.xpath('td[4]/div/text()').extract()[0].strip()\n self.item[\"After_change\"] = i.xpath('td[5]/div/text()').extract()[0].strip()\n except Exception as e:\n # print e\n print \"Unit_Base_Changed_Info error \"\n pass\n DjORM.save_Unit_Base_Info_Changed_Info(self.item)\n print \"* Unit_Base_Changed_Info *\"\n\n @classmethod\n def Unit_annual_reports_Base_Info(self, response, number):\n # 企业年报-企业基本信息\n\n print u\"Unit_annual_reports_Base_Info\"\n self.item = {}\n self.item[\"searching_name\"] = number\n\n self.item[\"Registration_number\"] = response.xpath('//*[@id=u\"2015年度报告\"]/table[1]/tbody/tr[1]/td[2]/text()').extract()[0]\n self.item[\"Business_state\"] = response.xpath('//*[@id=u\"2015年度报告\"]/table[1]/tbody/tr[1]/td[4]/text()').extract()[0]\n self.item[\"Enterprise_telephone\"] = response.xpath('//*[@id=u\"2015年度报告\"]/table[1]/tbody/tr[2]/td[2]/text()').extract()[0]\n self.item[\"Email\"] = response.xpath('//*[@id=u\"2015年度报告\"]/table[1]/tbody/tr[2]/td[4]/a/text()').extract()[0]\n self.item[\"Postcode\"] = response.xpath('//*[@id=u\"2015年度报告\"]/table[1]/tbody/tr[3]/td[2]/text()').extract()[0]\n self.item[\"number_of_people_engaged\"] = response.xpath('//*[@id=\"2015年度报告\"]/table[1]/tbody/tr[3]/td[4]/text()').extract()[0]\n self.item[\"residence\"] = response.xpath('//*[@id=u\"2015年度报告\"]/table[1]/tbody/tr[4]/td[2]/text()').extract()[0]\n self.item[\"transfer_of_shareholder_equity\"] = response.xpath('//*[@id=u\"2015年度报告\"]/table[1]/tbody/tr[5]/td[2]/text()').extract()[0]\n self.item[\"investment_Info\"] = response.xpath('//*[@id=u\"2015年度报告\"]/table[1]/tbody/tr[5]/td[4]/text()').extract()[0]\n\n\n for i in self.item:\n try:\n self.item[i] = self.item[i].strip()\n\n except Exception:\n pass\n print \"*\" * 30\n\n\n @classmethod\n def Miss_Gao_Info(self, response, category):\n # Miss Gao\n\n print \"Miss Gao Info saving ....\"\n self.item = {}\n self.item[\"category\"] = category\n\n\n tbody = response.xpath('//*[@id=\"searchlist\"]/table/tbody/tr')\n\n\n for i in tbody:\n # print i.extract()\n # print \"%\" * 10\n try:\n allinfo = i.xpath(\"td[2]\").extract()[0]\n # print \"================\"\n # print \"category: \" + category\n # print \"================\"\n\n try:\n self.item[\"phone_nunber\"] = allinfo.split(\"<br>\")[2].strip()\n print \"phone_number\" + allinfo.split(\"<br>\")[2].strip()\n except:\n self.item[\"phone_nunber\"] = \"-\"\n\n try:\n self.item[\"Enterprise_address\"] = allinfo.split(\"<br>\")[3].split(\"</td>\")[0].strip()\n print \"Enterprise_address\" + allinfo.split(\"<br>\")[3].split(\"</td>\")[0].strip()\n except:\n self.item[\"Enterprise_address\"] = \"-\"\n\n try:\n self.item[\"company_name\"] = i.xpath(\"td[2]/a/text()\").extract()[0].strip()\n print \"company_name\" + i.xpath(\"td[2]/a/text()\").extract()[0].strip()\n except:\n self.item[\"company_name\"] = \"-\"\n\n try:\n self.item[\"Legal_representative\"] = allinfo.split(\"<br>\")[1].strip()\n print \"Legal_representative: \" + allinfo.split(\"<br>\")[1].strip()\n except:\n self.item[\"Legal_representative\"] = \"-\"\n\n self.item[\"Business_scope\"] = category\n\n try:\n self.item[\"status\"] = i.xpath('td/span[@class=\"ma_cbt_green m-l-xs\"]/text()').extract()[0].strip()\n print \"status: \" + i.xpath('td/span[@class=\"ma_cbt_green m-l-xs\"]/text()').extract()[0].strip()\n except:\n self.item[\"status\"] = \"-\"\n\n self.item[\"category\"] = category\n\n DjORM.save_info_For_Miss_Gao(self.item)\n\n # print \"***\" * 9\n except:\n print \"---- page analysis error -----\"\n pass\n" }, { "alpha_fraction": 0.5152224898338318, "alphanum_fraction": 0.6143637895584106, "avg_line_length": 44.75, "blob_id": "f8caffdedba8449162799bd02b40167a41fa3687", "content_id": "6a03cc86341d8ab8736328bb6b8a15a31a553df5", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 1281, "license_type": "no_license", "max_line_length": 122, "num_lines": 28, "path": "/szsj/dj_sz/hightech/migrations/0014_gao_company_info.py", "repo_name": "leearic/spider", "src_encoding": "UTF-8", "text": "# -*- coding: utf-8 -*-\n# Generated by Django 1.10.5 on 2017-03-17 10:01\nfrom __future__ import unicode_literals\n\nfrom django.db import migrations, models\n\n\nclass Migration(migrations.Migration):\n\n dependencies = [\n ('hightech', '0013_auto_20170316_1018'),\n ]\n\n operations = [\n migrations.CreateModel(\n name='Gao_Company_Info',\n fields=[\n ('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),\n ('phone_nunber', models.CharField(max_length=255, verbose_name='\\u7535\\u8bdd\\u53f7\\u7801')),\n ('Enterprise_address', models.CharField(max_length=255, verbose_name='\\u4f01\\u4e1a\\u5730\\u5740')),\n ('company_name', models.CharField(max_length=999, verbose_name='\\u4f01\\u4e1a\\u5168\\u540d')),\n ('Legal_representative', models.CharField(max_length=255, verbose_name='\\u6cd5\\u5b9a\\u4ee3\\u8868\\u4eba')),\n ('Business_scope', models.CharField(max_length=255, verbose_name='\\u7ecf\\u8425\\u8303\\u56f4')),\n ('status', models.CharField(max_length=255, verbose_name='\\u72b6\\u6001')),\n ('category', models.CharField(max_length=255, verbose_name='\\u5206\\u7c7b')),\n ],\n ),\n ]\n" }, { "alpha_fraction": 0.6499999761581421, "alphanum_fraction": 0.6499999761581421, "avg_line_length": 5.666666507720947, "blob_id": "e7f860a029293a5c4d2d64ee87ed8e0239f8400e", "content_id": "d877635cf64ccbfdfd133e109b051a095c53b4d8", "detected_licenses": [ "MIT" ], "is_generated": false, "is_vendor": false, "language": "Markdown", "length_bytes": 54, "license_type": "permissive", "max_line_length": 12, "num_lines": 6, "path": "/proxy_pool/README.md", "repo_name": "leearic/spider", "src_encoding": "UTF-8", "text": "# proxy_pool\npython 代理池\n\n### TODO\n\n慢慢实现\n" }, { "alpha_fraction": 0.5162538886070251, "alphanum_fraction": 0.5201238393783569, "avg_line_length": 32.921051025390625, "blob_id": "d2c881519554b4d720a702813bcfb2bda58e3f16", "content_id": "970272d11417f319bac1eeda895db56f684ea8c3", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 1504, "license_type": "no_license", "max_line_length": 70, "num_lines": 38, "path": "/qiubai_CrawlSpider/spiders/qiubai/items.py", "repo_name": "leearic/spider", "src_encoding": "UTF-8", "text": "# -*- coding: utf-8 -*-\n\n# Define here the models for your scraped items\n#\n# See documentation in:\n# http://doc.scrapy.org/en/latest/topics/items.html\n\nimport scrapy\n\n\nclass QiuShiItem(scrapy.Item):\n # define the fields for your item here like:\n # name = scrapy.Field()\n # 笑话相关\n page_url = scrapy.Field() # 来源地址\n user = scrapy.Field() # 用户\n content = scrapy.Field() # 内容\n type = scrapy.Field() # 附带样式,比如图片或者视频 1 为图片, 2为视频, 0为只有文字\n url = scrapy.Field() # 图片地址(视频默认有封面)\n url0 = scrapy.Field() # 如果是视频的话,这里就是视频地址\n smiling = scrapy.Field() # 笑脸数量\n comment_count = scrapy.Field()# 评论数量\n # comment = scrapy.Field() # 评论\n\n# class Comment(scrapy.Item):\n cuser = scrapy.Field() # 评论的用户\n comment = scrapy.Field() # 评论的内容\n\nclass User(scrapy.Item):\n name = scrapy.Field() # 用户名\n fans = scrapy.Field() # 粉丝数量\n follow = scrapy.Field() # 关注数量\n comment = scrapy.Field() # 评论数量\n\n marriage = scrapy.Field() # 婚姻状态\n occupation = scrapy.Field() # 职业\n constellation = scrapy.Field() # 星座\n age = scrapy.Field() # 糗龄\n\n\n\n" }, { "alpha_fraction": 0.514998197555542, "alphanum_fraction": 0.529815673828125, "avg_line_length": 29.700000762939453, "blob_id": "fcce9a89f751136d86c1d54a39eef5ff8191a86e", "content_id": "0080daa9731eaabdc4c07703cd6849a3bb7a71b7", "detected_licenses": [ "BSD-2-Clause" ], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 2785, "license_type": "permissive", "max_line_length": 126, "num_lines": 90, "path": "/akali/spider/spider/spiders/a17173.py", "repo_name": "leearic/spider", "src_encoding": "UTF-8", "text": "# -*- coding: utf-8 -*-\nimport scrapy\nfrom scrapy.http import Request\n\nimport syslog\n\nimport logging\n\nfrom scrapy.mail import MailSender\n\n# from . import settings\n\n\nfrom spider import settings\n\nsyslog.openlog(ident=\"spider\", logoption=syslog.LOG_PID, facility=syslog.LOG_LOCAL7)\n\nlogger = logging.getLogger(\"spiders\")\n\n\n\nclass A17173Spider(scrapy.Spider):\n name = \"17173\"\n allowed_domains = [\"17173.com\"]\n start_urls = (\n 'http://acg.17173.com/cosplay/',\n )\n\n\n def parse(self, response):\n\n\n image_page_url = response.xpath('//div[@class=\"comm-pn\"]/ul/li[@class=\"item\"]/div/a/@href').extract()\n for i in image_page_url:\n yield Request(url=i, callback=self.get_image_details)\n\n try:\n next = response.xpath('//div[@class=\"pagination\"]/ul/li/a/@href').extract()\n for i in next:\n\n logger.error(u\"开始爬详细页面信息\")\n # try:\n # # mailer = MailSender(smtphost=\"172.16.201.200\", mailfrom=\"[email protected]\", smtpuser=\"aric\",\n # # smtppass=\"123456\", smtpport=25)\n #\n # # mailer.send(to=\"[email protected]\", subject=\"helloworld\", body=\"hello body\")\n # except Exception as e:\n # print e\n yield Request(url=i, callback=self.parse)\n\n except:\n pass\n\n\n def get_image_details(self, response):\n\n image_url = response.xpath('//div[@class=\"gb-final-mod-article\"]/p[@class=\"p-image\"]/a/@href').extract()\n if len(image_url) < 1:\n image_url= response.xpath('//div[@class=\"gb-final-mod-article\"]/p[@align=\"center\"]/a/img/@src').extract()\n if len(image_url) < 1:\n image_url = response.xpath('//div[@class=\"gb-final-mod-article\"]/p[@class=\"p-image\"]/img/@src').extract()\n\n title = response.xpath('//div[@class=\"gb-final-pn-article\"]/h1[@class=\"gb-final-tit-article\"]/text()').extract()\n item = []\n for i in image_url:\n if len(image_url) < 1:\n pass\n else:\n aa = i.split('url=')\n if len(aa) > 1:\n item.append(aa[1])\n else:\n item.append(i)\n try:\n next = response.xpath('//div[@class=\"gb-final-mod-pagination-in\"]/a[@class=\"gb-final-page-next\"]/@href').extract()\n for i in next:\n Request(url=i, callback=self.get_image_details)\n\n syslog.syslog(syslog.LOG_INFO, i)\n syslog.syslog(syslog.LOG_ERR, i)\n\n except:\n pass\n\n # items = AcgItem()\n # items[\"name\"] = title\n # items[\"url\"] = response.url\n # items[\"image\"] = item\n\n # return items\n\n\n\n\n" }, { "alpha_fraction": 0.5245901346206665, "alphanum_fraction": 0.5381998419761658, "avg_line_length": 37.488094329833984, "blob_id": "3d1545e5af0090cedd115015b9976e7328c499e2", "content_id": "ac449249d8c1a56e7af6267a02b9834a2bc6b542", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 3483, "license_type": "no_license", "max_line_length": 124, "num_lines": 84, "path": "/szsj/sp_sz/szsti/spiders/gao.py", "repo_name": "leearic/spider", "src_encoding": "UTF-8", "text": "# -*- coding: utf-8 -*-\nimport scrapy\nfrom selenium import webdriver\n# from selenium.webdriver.common.keys import Keys\n# from selenium.webdriver import DesiredCapabilities\nfrom scrapy.selector import Selector\nimport time\nfrom szsti.utils.Save2DB import DjORM\nfrom szsti.utils.PageAnaylist import qichachca\n\n\nclass SzSpider(scrapy.Spider):\n name = \"gao\"\n allowed_domains = [\"qichacha.com\"]\n start_urls = ['http://www.qichacha.com/user_login']\n\n # 初始化,这里要调用无头浏览器\n def __init__(self):\n self.driver = webdriver.Chrome()\n # 定义Number值,如果中间爬取失败,可以设置继续爬取值重新爬取数据,默认从0开始\n # Number = 2053\n # self.units = DjORM.query_itjuzi(Number)\n\n self.units = [u\"洗衣机\", u\"玻璃瓶\", u\"玻璃容器\", u\"化妆品瓶\", u\"玻璃瓶涂装\"]\n\n\n def parse(self, response):\n\n self.driver.get(response.url)\n\n # 打开登录页面,这里需要手动输入验证码: 默操作时间是20秒\n print response.url\n\n self.driver.find_element_by_name(\"nameNormal\").send_keys(\"13771691089\")\n self.driver.find_element_by_name(\"pwdNormal\").send_keys(\"liang152191\")\n print u\"你有20秒钟时间获取验证码并点击登录按钮....\"\n time.sleep(20)\n\n # 随便查询一个试试看\n self.driver.find_element_by_id(\"searchkey\").send_keys(u\"百度\")\n self.driver.find_element_by_id(\"V3_Search_bt\").click()\n time.sleep(2)\n # 正式查询开始\n\n for unit in self.units:\n\n self.driver.find_element_by_id(\"headerKey\").clear()\n self.driver.find_element_by_id(\"headerKey\").send_keys(unit)\n self.driver.find_element_by_css_selector(\".btn.btn-primary.top-searchbtn.btn-icon.btn-top\").click()\n index = 1\n time.sleep(3)\n try:\n while self.driver.find_element_by_link_text(\">\"):\n print \"================\"\n print \"current index: \" + str(index)\n print \"current search category: \" + unit\n print \"================\"\n try:\n self.driver.current_window_handle\n aa = self.driver.page_source\n response = Selector(text=aa)\n qichachca.Miss_Gao_Info(response, unit)\n\n index = index + 1\n\n self.driver.find_element_by_xpath('//*[@id=\"ajaxlist\"]/div[3]/ul/li[8]/input').clear()\n self.driver.find_element_by_xpath('//*[@id=\"ajaxlist\"]/div[3]/ul/li[8]/input').send_keys(index)\n\n except:\n\n try:\n self.driver.find_element_by_xpath('//*[@id=\"ajaxlist\"]/div[3]/ul/li[10]/input').clear()\n self.driver.find_element_by_xpath('//*[@id=\"ajaxlist\"]/div[3]/ul/li[10]/input').send_keys(index)\n except:\n self.driver.find_element_by_xpath('//*[@id=\"ajaxlist\"]/div[3]/ul/li[9]/input').clear()\n self.driver.find_element_by_xpath('//*[@id=\"ajaxlist\"]/div[3]/ul/li[9]/input').send_keys(index)\n\n\n self.driver.find_element_by_id(\"jumpPage\").click()\n print \"next page clicked ....\"\n time.sleep(3)\n except:\n pass\n self.driver.quit()\n" }, { "alpha_fraction": 0.5880705118179321, "alphanum_fraction": 0.5887935161590576, "avg_line_length": 38.65949630737305, "blob_id": "882f9f28987da5fcc6f74ace733842fafced7d9f", "content_id": "1ddfedef1a1dcde9b7fccb7e282ded130c00d4d8", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 11563, "license_type": "no_license", "max_line_length": 288, "num_lines": 279, "path": "/szsj/sp_sz/szsti/utils/Save2DB.py", "repo_name": "leearic/spider", "src_encoding": "UTF-8", "text": "#-*- coding: utf-8 -*-\n\n\nimport sys, os\nfrom szsti.settings import djpath\nif djpath not in sys.path:\n sys.path.append(djpath)\nimport django\nos.environ.setdefault(\"DJANGO_SETTINGS_MODULE\", \"dj_sz.settings\")\ndjango.setup()\nfrom hightech.models import HightechInfo, Unit_Base_Info, Unit_Base_Info_Shareholder_Info, Unit_Base_Info_Changed_Info, Unit_annual_reports_Base_Info, Unit_annual_reports_Website_Info, Unit_annual_Promoters_and_reports_investment_Info, IDC_Base_Info, Itjuzi_Company_Info, Gao_Company_Info\n\nclass DjORM(object):\n\n #--------------------------------------------------------------\n # 以下数据来自: szsti.gov.cn\n #--------------------------------------------------------------\n\n @classmethod\n def save_hightechInfo(self, item):\n # 企业信息 szsti.gov.cn\n hightechInfo = HightechInfo()\n try:\n hightechInfo.number = item['number'] #private key\n hightechInfo.KeyID = item['KeyID']\n hightechInfo.Unit_name = item['Unit_name']\n hightechInfo.address = item['address']\n hightechInfo.Subordinate_Domain = item['Subordinate_Domain']\n hightechInfo.type = item['type']\n\n hightechInfo.save()\n\n except Exception:\n # 这有报错信息,将这个URL留下来以后单独处理\n print \"error================\"\n pass\n\n # --------------------------------------------------------------\n # 以下数据来自: http://dian.idcquan.com\n # --------------------------------------------------------------\n @classmethod\n def save_IDC_Base_Info(self, item):\n idc_Base_Info = IDC_Base_Info()\n try:\n idc_Base_Info.url = item['url']\n idc_Base_Info.name = item['name']\n idc_Base_Info.company = item['company']\n idc_Base_Info.zone = item['zone']\n idc_Base_Info.address = item['address']\n idc_Base_Info.phone_number = item['phone_number']\n idc_Base_Info.website = item['website']\n idc_Base_Info.Main_business = item['Main_business']\n idc_Base_Info.Satisfaction = item['Satisfaction']\n\n idc_Base_Info.QQ = item['qq']\n\n idc_Base_Info.save()\n\n except Exception as e:\n # print e\n print \"...save IDC Base INFO Error ...\"\n # print \"___________________________________\"\n pass\n\n\n\n\n #--------------------------------------------------------------\n # 以下数据来自: qichachca.com\n #--------------------------------------------------------------\n @classmethod\n def save_Unit_Base_Info(self, item):\n # 企业基础信息\n unit_Base_Info = Unit_Base_Info()\n try:\n # 企业详细信息-1\n unit_Base_Info.searching_name = item['searching_name']\n unit_Base_Info.searched_name = item['searched_name']\n\n\n\n unit_Base_Info.phone_nunber = item['phone_nunber']\n # unit_Base_Info.email = item['email']\n # unit_Base_Info.website = item['website']\n # unit_Base_Info.address = item['address']\n #  企业工商信息\n unit_Base_Info.code = item['code']\n unit_Base_Info.Registration_number = item['Registration_number']\n unit_Base_Info.Organization_code = item['Organization_code']\n unit_Base_Info.Operating_state = item['Operating_state']\n unit_Base_Info.Legal_representative = item['Legal_representative']\n unit_Base_Info.registered_capital = item['registered_capital']\n unit_Base_Info.Company_type = item['Company_type']\n unit_Base_Info.date_of_establishment = item['date_of_establishment']\n unit_Base_Info.Operating_period = item['Operating_period']\n unit_Base_Info.registration_authority = item['registration_authority']\n unit_Base_Info.Date_of_issue = item['Date_of_issue']\n unit_Base_Info.company_size = item['company_size']\n unit_Base_Info.Subordinate_industry = item['Subordinate_industry']\n unit_Base_Info.English_name = item['English_name']\n unit_Base_Info.Name_used_Before = item['Name_used_Before']\n unit_Base_Info.Enterprise_address = item['Enterprise_address']\n unit_Base_Info.Business_scope = item['Business_scope']\n\n unit_Base_Info.save()\n\n except Exception:\n # 这有报错信息,将这个URL留下来以后单独处理\n print \"error================\"\n pass\n\n\n @classmethod\n def save_Unit_Base_Info_Shareholder_Info(self, item):\n # 股东信息\n unit_Base_Info_Shareholder_Info = Unit_Base_Info_Shareholder_Info()\n try:\n unit_Base_Info_Shareholder_Info.searching_name = item['searching_name']\n unit_Base_Info_Shareholder_Info.searched_name = item['searched_name']\n\n\n\n unit_Base_Info_Shareholder_Info.Shareholder = item['Shareholder']\n unit_Base_Info_Shareholder_Info.Shareholding_ratio = item['Shareholding_ratio']\n unit_Base_Info_Shareholder_Info.Subscribed_capital_contribution = item['Subscribed_capital_contribution']\n unit_Base_Info_Shareholder_Info.Subscription_Date = item['Subscription_Date']\n unit_Base_Info_Shareholder_Info.Shareholder_type = item['Shareholder_type']\n unit_Base_Info_Shareholder_Info.save()\n\n except Exception:\n # 这有报错信息,将这个URL留下来以后单独处理\n print \"error================\"\n pass\n\n @classmethod\n def save_Unit_Base_Info_Changed_Info(self, item):\n # 变更信息\n unit_Base_Info_Changed_Info = Unit_Base_Info_Changed_Info()\n try:\n unit_Base_Info_Changed_Info.searching_name = item['searching_name']\n unit_Base_Info_Changed_Info.searched_name = item['searched_name']\n\n unit_Base_Info_Changed_Info.Change_date = item['Change_date']\n unit_Base_Info_Changed_Info.Change_item = item['Change_item']\n unit_Base_Info_Changed_Info.Before_change = item['Before_change']\n unit_Base_Info_Changed_Info.After_change = item['After_change']\n unit_Base_Info_Changed_Info.save()\n\n except Exception:\n # 这有报错信息,将这个URL留下来以后单独处理\n print \"error================\"\n pass\n\n @classmethod\n def save_Unit_annual_reports_Base_Info(self, item):\n # 企业年报-企业基本信息\n unit_annual_reports_Base_Info = Unit_annual_reports_Base_Info()\n try:\n unit_annual_reports_Base_Info.searching_name = item['searching_name']\n unit_annual_reports_Base_Info.searched_name = item['searched_name']\n\n unit_annual_reports_Base_Info.Registration_number = item['Registration_number']\n unit_annual_reports_Base_Info.Business_state = item['Business_state']\n unit_annual_reports_Base_Info.Enterprise_telephone = item['Enterprise_telephone']\n unit_annual_reports_Base_Info.Email = item['Email']\n unit_annual_reports_Base_Info.Postcode = item['Postcode']\n unit_annual_reports_Base_Info.number_of_people_engaged = item['number_of_people_engaged']\n unit_annual_reports_Base_Info.residence = item['residence']\n unit_annual_reports_Base_Info.transfer_of_shareholder_equity = item['transfer_of_shareholder_equity']\n unit_annual_reports_Base_Info.investment_Info = item['investment_Info']\n unit_annual_reports_Base_Info.save()\n\n except Exception:\n # 这有报错信息,将这个URL留下来以后单独处理\n print \"error================\"\n pass\n\n @classmethod\n def save_Unit_annual_reports_Website_Info(self, item):\n # 企业年报-网站或网店信息\n unit_annual_reports_Website_Info = Unit_annual_reports_Website_Info()\n try:\n unit_annual_reports_Website_Info.searching_name = item['searching_name']\n unit_annual_reports_Website_Info.searched_name = item['searched_name']\n unit_annual_reports_Website_Info.Web_Type = item['Web_Type']\n unit_annual_reports_Website_Info.Web_Name = item['Web_Name']\n unit_annual_reports_Website_Info.Web_Site = item['Web_Site']\n unit_annual_reports_Website_Info.save()\n\n except Exception:\n # 这有报错信息,将这个URL留下来以后单独处理\n print \"error================\"\n pass\n\n\n @classmethod\n def save_Unit_annual_Promoters_and_reports_investment_Info(self, item):\n # 企业年报-发起人及出资信息\n unit_annual_Promoters_and_reports_investment_Info = Unit_annual_Promoters_and_reports_investment_Info()\n try:\n unit_annual_Promoters_and_reports_investment_Info.searching_name = item['searching_name']\n unit_annual_Promoters_and_reports_investment_Info.searched_name = item['searched_name']\n\n\n\n unit_annual_Promoters_and_reports_investment_Info.Sponsor =item['Sponsor']\n unit_annual_Promoters_and_reports_investment_Info.Subscribed_capital_contribution = item['Subscribed_capital_contribution']\n unit_annual_Promoters_and_reports_investment_Info.Time_of_subscription =item['Time_of_subscription']\n unit_annual_Promoters_and_reports_investment_Info.Subscribed_capital_contribution = item['Subscribed_capital_contribution']\n unit_annual_Promoters_and_reports_investment_Info.Paid_in_capital_contribution = item['Paid_in_capital_contribution']\n unit_annual_Promoters_and_reports_investment_Info.Investment_time = item['Investment_time']\n unit_annual_Promoters_and_reports_investment_Info. Investment_method = item['Investment_method']\n\n unit_annual_Promoters_and_reports_investment_Info.save()\n\n except Exception:\n # 这有报错信息,将这个URL留下来以后单独处理\n print \"error================\"\n pass\n\n\n\n @classmethod\n def save_info_For_Miss_Gao(self, item):\n # 企业年报-发起人及出资信息\n gao_Company_Info = Gao_Company_Info()\n try:\n gao_Company_Info.phone_nunber = item[\"phone_nunber\"]\n gao_Company_Info.Enterprise_address = item[\"Enterprise_address\"]\n gao_Company_Info.company_name = item[\"company_name\"]\n gao_Company_Info.Legal_representative = item[\"Legal_representative\"]\n gao_Company_Info.Business_scope = item[\"Business_scope\"]\n gao_Company_Info.status = item[\"status\"]\n gao_Company_Info.category = item[\"category\"]\n\n gao_Company_Info.save()\n\n\n except Exception:\n # 这有报错信息,将这个URL留下来以后单独处理\n print \" = save info error =\"\n pass\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n @classmethod\n def query(self, Number):\n if Number == 0:\n aa = HightechInfo.objects.all()\n # print \"ID == 0\"\n\n else:\n aa = HightechInfo.objects.filter(id__gte=Number)\n # print \"ID != 0\"\n return aa\n\n @classmethod\n def query_itjuzi(self, Number):\n if Number == 0:\n aa = Itjuzi_Company_Info.objects.all()\n # print \"ID == 0\"\n\n else:\n aa = Itjuzi_Company_Info.objects.filter(id__lte=Number)\n # print \"ID != 0\"\n return aa\n" }, { "alpha_fraction": 0.7099999785423279, "alphanum_fraction": 0.7116666436195374, "avg_line_length": 29.049999237060547, "blob_id": "16488596140d2506f2aba15525c223cd5f781b8f", "content_id": "a9b54765a405dff4634515970ab5e3f4cd13dbff", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 608, "license_type": "no_license", "max_line_length": 88, "num_lines": 20, "path": "/baixihecom/bossproxy/views.py", "repo_name": "leearic/spider", "src_encoding": "UTF-8", "text": "#coding:utf-8\n# from django.shortcuts import render\n\n# Create your views here.\nfrom django.http import HttpResponse\nfrom django.views.decorators.csrf import csrf_exempt\nfrom utils import Boss_Proxy\n\nbp = Boss_Proxy.DO_proxy\n\n@csrf_exempt\ndef interface(request):\n if request.method == 'GET':\n response = HttpResponse(bp.CheckSignature(request), content_type=\"text/plain\")\n return response\n if request.method == 'POST':\n response = HttpResponse(bp.Msg_Gateway(request), content_type=\"application/xml\")\n return response\n else:\n return HttpResponse(u\"禁止访问\")" }, { "alpha_fraction": 0.5554279088973999, "alphanum_fraction": 0.562894880771637, "avg_line_length": 19.690475463867188, "blob_id": "7ceb0b6892e8e18017d610c412e1fd07dbf440b6", "content_id": "00d4caf03a9bb6bfc6cf3a84623fcb07854ab31c", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 1807, "license_type": "no_license", "max_line_length": 66, "num_lines": 84, "path": "/monster/spider/spider/utils/orm.py", "repo_name": "leearic/spider", "src_encoding": "UTF-8", "text": "# -*- coding: utf-8 -*-\n\nimport sys, os\n\nfrom spider.settings import ormpath\nif ormpath not in sys.path:\n sys.path.append(ormpath)\n\nimport django\nos.environ.setdefault(\"DJANGO_SETTINGS_MODULE\", \"orm.settings\")\ndjango.setup()\n\n\n\nimport hashlib\n\n\n\nfrom webapps.models import Day_News, Daily_News, News_Real_Content\n\n\nclass News(object):\n\n def _url_2_md5(self, url):\n\n m2 = hashlib.md5()\n m2.update(url)\n return m2.hexdigest()\n\n\n def save_Day_url(self, url):\n # 保存前一天的URL\n\n day = Day_News()\n day.url = url\n day.md5 = self._url_2_md5(url)\n day.save()\n\n\n def save_Daily_url(self, url):\n # 保存当天的所有URL\n daily = Daily_News()\n daily.url = url\n daily.md5 = self._url_2_md5(url)\n daily.save()\n\n\n def save_News(self, content):\n # 保存当条新闻\n inews = News_Real_Content()\n\n\n try:\n # news.news = content['news']\n inews.url = content['url']\n inews.title = content['title']\n inews.content = content['content']\n inews.content_time = content['content_time']\n inews.content_from = content['content_from']\n inews.content_type = content['content_type']\n inews.content_web = content['content_web']\n inews.save_time = content['save_time']\n inews.content_html = content['content_html']\n\n # print inews.content_html\n inews.save()\n except Exception as e:\n print e\n\n\n\n\n\n\n\n def get_day_news_url(self):\n # 获取按天计算的URL\n news = Day_News.objects.all()\n return news\n\n def get_daily_news_url(self):\n # 获取按天计算的URL\n news = Daily_News.objects.all()\n return news\n\n\n\n" }, { "alpha_fraction": 0.5308784246444702, "alphanum_fraction": 0.5811277627944946, "avg_line_length": 46.400001525878906, "blob_id": "8e9b971f32787bc30d91728192cbda285fc55de9", "content_id": "95094700399d32cf64d348806b255271150f3e33", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 2607, "license_type": "no_license", "max_line_length": 138, "num_lines": 55, "path": "/mikufan/frontend/webapps/migrations/0001_initial.py", "repo_name": "leearic/spider", "src_encoding": "UTF-8", "text": "# -*- coding: utf-8 -*-\nfrom __future__ import unicode_literals\n\nfrom django.db import migrations, models\nimport django.utils.timezone\n\n\nclass Migration(migrations.Migration):\n\n dependencies = [\n ]\n\n operations = [\n migrations.CreateModel(\n name='Ads',\n fields=[\n ('id', models.AutoField(verbose_name='ID', serialize=False, auto_created=True, primary_key=True)),\n ('ad', models.CharField(max_length=255, verbose_name='\\u5e7f\\u544a\\u5185\\u5bb9')),\n ('isshow', models.BooleanField(default=False)),\n ],\n ),\n migrations.CreateModel(\n name='Coser',\n fields=[\n ('id', models.AutoField(verbose_name='ID', serialize=False, auto_created=True, primary_key=True)),\n ('title', models.CharField(default='NUll', max_length=255, verbose_name='\\u6807\\u9898')),\n ('content', models.TextField(default='NUll', max_length=255, verbose_name='\\u6b63\\u6587')),\n ('istop', models.BooleanField(default=False)),\n ('comefrom', models.CharField(default='Mikufan', max_length=255, verbose_name='\\u6765\\u6e90\\u7f51\\u7ad9')),\n ('topimage', models.CharField(default='null', max_length=255, verbose_name='\\u5c01\\u9762\\u56fe\\u7247')),\n ('addtime', models.DateTimeField(default=django.utils.timezone.now, verbose_name='\\u6dfb\\u52a0\\u65f6\\u95f4', blank=True)),\n ],\n ),\n migrations.CreateModel(\n name='Coser_Category',\n fields=[\n ('id', models.AutoField(verbose_name='ID', serialize=False, auto_created=True, primary_key=True)),\n ('category', models.CharField(default='\\u6e38\\u620f', max_length=255, verbose_name='\\u5206\\u7c7b')),\n ],\n ),\n migrations.CreateModel(\n name='Images',\n fields=[\n ('id', models.AutoField(verbose_name='ID', serialize=False, auto_created=True, primary_key=True)),\n ('relate_url', models.CharField(default='NUll', max_length=255, verbose_name='\\u56fe\\u7247\\u5730\\u5740')),\n ('real_url', models.CharField(default='NUll', max_length=255, verbose_name='\\u771f\\u5b9e\\u56fe\\u7247\\u5730\\u5740')),\n ('coser', models.ForeignKey(related_name='Coser_Photo', to='webapps.Coser')),\n ],\n ),\n migrations.AddField(\n model_name='coser',\n name='category',\n field=models.ForeignKey(related_name='Coser_Category', to='webapps.Coser_Category'),\n ),\n ]\n" }, { "alpha_fraction": 0.5186194181442261, "alphanum_fraction": 0.6030880808830261, "avg_line_length": 38.32143020629883, "blob_id": "4b4148c5b3dce4f6e1376a6530e8252c881d4ade", "content_id": "40b0fa22b1ad17a99161139623cd76d3fdae8ac7", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 1101, "license_type": "no_license", "max_line_length": 114, "num_lines": 28, "path": "/szsj/dj_sz/hightech/migrations/0001_initial.py", "repo_name": "leearic/spider", "src_encoding": "UTF-8", "text": "# -*- coding: utf-8 -*-\n# Generated by Django 1.10.5 on 2017-02-23 05:14\nfrom __future__ import unicode_literals\n\nfrom django.db import migrations, models\n\n\nclass Migration(migrations.Migration):\n\n initial = True\n\n dependencies = [\n ]\n\n operations = [\n migrations.CreateModel(\n name='HightechInfo',\n fields=[\n ('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),\n ('url', models.CharField(max_length=255, verbose_name='\\u5e8f\\u53f7')),\n ('KeyID', models.CharField(max_length=255, verbose_name='\\u8bc1\\u4e66\\u7f16\\u53f7')),\n ('Unit_name', models.CharField(max_length=255, verbose_name='\\u5355\\u4f4d\\u540d\\u79f0')),\n ('address', models.CharField(max_length=255, verbose_name='\\u5730\\u5740')),\n ('Subordinate_Domain', models.CharField(max_length=255, verbose_name='\\u6240\\u5c5e\\u9886\\u57df')),\n ('type', models.CharField(max_length=255, verbose_name='\\u9ad8\\u4f01\\u7c7b\\u522b')),\n ],\n ),\n ]\n" }, { "alpha_fraction": 0.5491867065429688, "alphanum_fraction": 0.5584818124771118, "avg_line_length": 24.84000015258789, "blob_id": "82afc7f53a053d27384175173ea0e41de1e501ff", "content_id": "44ae529375df1701480d00d826cf239ba8bdeeb6", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 1299, "license_type": "no_license", "max_line_length": 95, "num_lines": 50, "path": "/monster/spider/spider/spiders/ifeng_1.py", "repo_name": "leearic/spider", "src_encoding": "UTF-8", "text": "# -*- coding: utf-8 -*-\nimport scrapy\n\nfrom scrapy.http import Request\n\n\nfrom spider.utils import orm\n\nclass ExampleSpider(scrapy.Spider):\n name = 'f'\n allowed_domains = ['ifeng.com']\n start_urls = ['http://news.ifeng.com/listpage/11502/0/1/rtlist.shtml']\n\n\n def parse(self, response):\n\n current_day_news = response.url\n aa = orm.News()\n\n aa.save_Day_url(current_day_news)\n last_day_news = response.xpath('//*[@id=\"backDay\"]/a/@href').extract()[0]\n\n\n\n yield Request(url=last_day_news, callback=self.parse)\n\n\n\n #\n # def parse_lastday(self, response):\n #\n #\n # news_lists = response.xpath('//div[@class=\"newsList\"]/ul')\n #\n # for i in news_lists:\n # news_list = i.xpath('li/a/@href').extract()\n # for news in news_list:\n # print u'新闻地址: ' + news\n # yield Request(url=news, callback=self.parse_content)\n #\n # try:\n # next_page = response.xpath('//div[@class=\"m_page\"]/span[2]/a/@href').extract()[0]\n # except:\n # next_page = response.xpath('//div[@class=\"m_page\"]/span/a/@href').extract()[0]\n #\n #\n # yield Request(url=next_page, callback=self.parse_lastday)\n #\n #\n # def parse_content(self, responese):" }, { "alpha_fraction": 0.5275915265083313, "alphanum_fraction": 0.5353274941444397, "avg_line_length": 28.378787994384766, "blob_id": "a60cf2af6332756f40b231bcf09ffd105852e8a1", "content_id": "84feceb79b70d386ad7d9394e9a526614af022e0", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 1963, "license_type": "no_license", "max_line_length": 108, "num_lines": 66, "path": "/szmap/spider/spider/spiders/aa.py", "repo_name": "leearic/spider", "src_encoding": "UTF-8", "text": "# -*- coding: utf-8 -*-\nimport scrapy\nfrom scrapy.http import Request\n\n\n# Get Django Env\nimport sys, os\ndjpath = \"/home/aric/PycharmProjects/aaa/szmap\"\nif djpath not in sys.path:\n sys.path.append(djpath)\nimport django\nos.environ.setdefault(\"DJANGO_SETTINGS_MODULE\", \"szmap.settings\")\ndjango.setup()\n\n# from Django get models\n\nfrom webapp.models import szmap\n\nimport time\n\nclass AaSpider(scrapy.Spider):\n name = \"aa\"\n allowed_domains = [\"sutpc.com\"]\n start_urls = ['http://szmap.sutpc.com/sectcongmore.aspx']\n\n def parse(self, response):\n # print response.body\n table = response.xpath('//*[@id=\"zyweb\"]/div[2]/div[2]/div/table/tr')\n\n for i in table:\n szmap_data = szmap()\n zone = i.xpath('td[1]/text()').extract()[0]\n zhishu = i.xpath('td[2]/text()').extract()[0]\n chesu = i.xpath('td[3]/text()').extract()[0]\n try:\n dengji = i.xpath('td[4]/span/text()').extract()[0]\n except:\n dengji = u\"等级\"\n # pass\n print \"*\" * 30\n szmap_data.zone = zone.strip()\n szmap_data.zhishu = zhishu.strip()\n szmap_data.chesu = chesu.strip()\n szmap_data.dengji = dengji.strip()\n os.environ['TZ'] = 'Asia/Shanghai'\n time.tzset()\n szmap_data.addtime = time.strftime('%Y-%m-%d %H:%M', time.localtime(time.time()))\n if szmap_data.dengji == u'等级':\n pass\n else:\n szmap_data.save()\n\n print u\"区域: \" + szmap_data.zone\n print u\"区域: \" + szmap_data.zhishu\n print u\"区域: \" + szmap_data.chesu\n print u\"区域: \" + szmap_data.dengji\n\n print \"Save.....OK\"\n\n\n for i in range(2,4):\n # print i\n yield Request(url='http://szmap.sutpc.com/sectcongmore.aspx?page='+str(i), callback=self.parse)\n\n\n pass\n" }, { "alpha_fraction": 0.538530707359314, "alphanum_fraction": 0.5499250292778015, "avg_line_length": 34.105262756347656, "blob_id": "0863395017eedb6aee85a2624f91deaf049ad934", "content_id": "e3b14fe79ca1a255e9d9a8ce5afbe6ebf16e7502", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 3567, "license_type": "no_license", "max_line_length": 111, "num_lines": 95, "path": "/szsj/sp_sz/szsti/spiders/qichacha.py", "repo_name": "leearic/spider", "src_encoding": "UTF-8", "text": "# -*- coding: utf-8 -*-\nimport scrapy\nfrom selenium import webdriver\nfrom selenium.webdriver.common.proxy import *\n# from selenium.webdriver.common.keys import Keys\n# from selenium.webdriver import DesiredCapabilities\nfrom scrapy.selector import Selector\nimport time\nfrom szsti.utils.Save2DB import DjORM\n\nfrom szsti.utils.PageAnaylist import qichachca\n\n\nclass SzSpider(scrapy.Spider):\n name = \"qichacha\"\n allowed_domains = [\"qichacha.com\"]\n start_urls = ['http://www.qichacha.com/user_login']\n\n # 初始化,这里要调用无头浏览器\n def __init__(self):\n\n myProxy = \"http://192.168.2.160:1080\"\n\n proxy = Proxy({\n 'proxyType': ProxyType.MANUAL,\n 'httpProxy': myProxy,\n 'ftpProxy': myProxy,\n 'sslProxy': myProxy,\n 'noProxy': ''})\n\n\n\n self.driver = webdriver.Firefox(proxy=proxy)\n # 定义Number值,如果中间爬取失败,可以设置继续爬取值重新爬取数据,默认从0开始\n # Number = 1849\n Number = 1605\n\n self.units = DjORM.query(Number)\n\n def parse(self, response):\n\n self.driver.get(response.url)\n\n # 打开登录页面,这里需要手动输入验证码: 默操作时间是20秒\n print response.url\n\n self.driver.find_element_by_name(\"nameNormal\").send_keys(\"13771691089\")\n self.driver.find_element_by_name(\"pwdNormal\").send_keys(\"liang152191\")\n print u\"你有20秒钟时间获取验证码并点击登录按钮....\"\n time.sleep(20)\n\n # 随便查询一个试试看\n self.driver.find_element_by_id(\"searchkey\").send_keys(u\"百度\")\n self.driver.find_element_by_id(\"V3_Search_bt\").click()\n time.sleep(2)\n # 正式查询开始\n\n for unit in self.units:\n print \"===========================\"\n print \"current search number:\" + str(unit.id)\n print \"===========================\"\n self.driver.find_element_by_id(\"headerKey\").clear()\n self.driver.find_element_by_id(\"headerKey\").send_keys(unit.Unit_name)\n self.driver.find_element_by_css_selector(\".btn.btn-primary.top-searchbtn.btn-icon.btn-top\").click()\n searching = self.driver.current_window_handle\n time.sleep(6)\n try:\n self.driver.find_element_by_class_name(\"ma_h1\").click()\n time.sleep(6)\n tabs = self.driver.window_handles\n for tab in tabs:\n if tab != searching:\n self.driver.switch_to_window(tab)\n aa = self.driver.page_source\n response = Selector(text=aa)\n\n qichachca.BaseInfo(response, unit.Unit_name)\n\n qichachca.Unit_Base_Shareholder_Info(response, unit.Unit_name)\n\n qichachca.Unit_Base_Changed_Info(response, unit.Unit_name)\n\n # 点击页面 进行二次查询\n # self.driver.find_element_by_xpath('//*[@id=\"company-nav\"]/ul/li[5]/a').click()\n # self.driver.current_window_handle\n # aa = self.driver.page_source\n # response = Selector(text=aa)\n # qichachca.Unit_annual_reports_Base_Info(response, unit.number)\n else:\n self.driver.close()\n time.sleep(2)\n except Exception as e:\n print str(e)\n pass\n self.driver.quit()\n" }, { "alpha_fraction": 0.5060039758682251, "alphanum_fraction": 0.6282109618186951, "avg_line_length": 73.76136016845703, "blob_id": "24211eba9da9e9950e075672b6308ab96d2fb81c", "content_id": "dd05d5fb81b4c159d225ced03ce779c5c079f947", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 6579, "license_type": "no_license", "max_line_length": 139, "num_lines": 88, "path": "/szsj/dj_sz/hexun/migrations/0001_initial.py", "repo_name": "leearic/spider", "src_encoding": "UTF-8", "text": "# -*- coding: utf-8 -*-\n# Generated by Django 1.11 on 2017-04-10 07:52\nfrom __future__ import unicode_literals\n\nfrom django.db import migrations, models\n\n\nclass Migration(migrations.Migration):\n\n initial = True\n\n dependencies = [\n ]\n\n operations = [\n migrations.CreateModel(\n name='base_Info',\n fields=[\n ('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),\n ('company_code', models.CharField(max_length=255, verbose_name='\\u516c\\u53f8\\u80a1\\u7968\\u4ee3\\u7801')),\n ('Company_name', models.CharField(max_length=255, verbose_name='\\u516c\\u53f8\\u540d\\u5b57')),\n ('daima', models.CharField(max_length=255, verbose_name='\\u80a1\\u7968\\u4ee3\\u7801')),\n ('quancheng', models.CharField(max_length=255, verbose_name='\\u516c\\u53f8\\u5168\\u79f0')),\n ('englishname', models.CharField(max_length=255, verbose_name='\\u516c\\u53f8\\u82f1\\u6587\\u540d\\u79f0')),\n ('cengyongming', models.CharField(max_length=255, verbose_name='\\u66fe\\u7528\\u540d')),\n ('chengliriqi', models.CharField(max_length=255, verbose_name='\\u6210\\u7acb\\u65e5\\u671f')),\n ('suoshuhangye', models.CharField(max_length=255, verbose_name='\\u6240\\u5c5e\\u884c\\u4e1a')),\n ('suoshugannian', models.CharField(max_length=255, verbose_name='\\u6240\\u5c5e\\u6982\\u5ff5')),\n ('suoshudiyu', models.CharField(max_length=255, verbose_name='\\u6240\\u5c5e\\u5730\\u57df')),\n ('fadingdabiaoren', models.CharField(max_length=255, verbose_name='\\u6cd5\\u5b9a\\u4ee3\\u8868\\u4eba')),\n ('dulidongshi', models.CharField(max_length=255, verbose_name='\\u72ec\\u7acb\\u8463\\u4e8b')),\n ('zixunfuwujigou', models.CharField(max_length=255, verbose_name='\\u54a8\\u8be2\\u670d\\u52a1\\u673a\\u6784')),\n ('kuaijishiwusuo', models.CharField(max_length=255, verbose_name='\\u4f1a\\u8ba1\\u5e08\\u4e8b\\u52a1\\u6240')),\n ('zhengquanshifudaibiao', models.CharField(max_length=255, verbose_name='\\u8bc1\\u5238\\u4e8b\\u52a1\\u4ee3\\u8868')),\n ('faxingriqi', models.CharField(max_length=255, verbose_name='\\u53d1\\u884c\\u65e5\\u671f')),\n ('shangshiriqi', models.CharField(max_length=255, verbose_name='\\u4e0a\\u5e02\\u65e5\\u671f')),\n ('shangshijiaoyisuo', models.CharField(max_length=255, verbose_name='\\u4e0a\\u5e02\\u4ea4\\u6613\\u6240')),\n ('zhengquanleixing', models.CharField(max_length=255, verbose_name='\\u8bc1\\u5238\\u7c7b\\u578b')),\n ('liutongguben', models.CharField(max_length=255, verbose_name='\\u6d41\\u901a\\u80a1\\u672c')),\n ('zongguben', models.CharField(max_length=255, verbose_name='\\u603b\\u80a1\\u672c')),\n ('zhuchengxiaoshang', models.CharField(max_length=255, verbose_name='\\u4e3b\\u627f\\u9500\\u5546')),\n ('faxingjia', models.CharField(max_length=255, verbose_name='\\u53d1\\u884c\\u4ef7')),\n ('shangshisourikaipanjia', models.CharField(max_length=255, verbose_name='\\u4e0a\\u5e02\\u9996\\u65e5\\u5f00\\u76d8\\u4ef7')),\n ('shangshishourizhangdiefu', models.CharField(max_length=255, verbose_name='\\u4e0a\\u5e02\\u9996\\u65e5\\u6da8\\u8dcc\\u5e45')),\n ('shangshishourihuanshoulv', models.CharField(max_length=255, verbose_name=' \\u4e0a\\u5e02\\u9996\\u65e5\\u6362\\u624b\\u7387')),\n ('tebiechulihetuishi', models.CharField(max_length=255, verbose_name='\\u7279\\u522b\\u5904\\u7406\\u548c\\u9000\\u5e02')),\n ('faxingshiyinglv', models.CharField(max_length=255, verbose_name='\\u53d1\\u884c\\u5e02\\u76c8\\u7387')),\n ('zuixinshiyinglv', models.CharField(max_length=255, verbose_name='\\u6700\\u65b0\\u5e02\\u76c8\\u7387')),\n ('zhuceziben', models.CharField(max_length=255, verbose_name='\\u6ce8\\u518c\\u8d44\\u672c')),\n ('zhucedizhi', models.CharField(max_length=255, verbose_name='\\u6ce8\\u518c\\u5730\\u5740')),\n ('suodeisuilv', models.CharField(max_length=255, verbose_name='\\u6240\\u5f97\\u7a0e\\u7387')),\n ('bangongdizhi', models.CharField(max_length=255, verbose_name='\\u529e\\u516c\\u5730\\u5740')),\n ('zhuyaochanpin', models.CharField(max_length=255, verbose_name='\\u4e3b\\u8981\\u4ea7\\u54c1(\\u4e1a\\u52a1)')),\n ('lianxidianhua', models.CharField(max_length=255, verbose_name='\\u8054\\u7cfb\\u7535\\u8bdd(\\u8463\\u79d8)')),\n ('gongsichuanzhen', models.CharField(max_length=255, verbose_name='\\u516c\\u53f8\\u4f20\\u771f')),\n ('dianziyouxiang', models.CharField(max_length=255, verbose_name='\\u7535\\u5b50\\u90ae\\u7bb1')),\n ('gongsiwangzhi', models.CharField(max_length=255, verbose_name='\\u516c\\u53f8\\u7f51\\u5740')),\n ('lianxiren', models.CharField(max_length=255, verbose_name='\\u8054\\u7cfb\\u4eba')),\n ('youzhengbianma', models.CharField(max_length=255, verbose_name='\\u90ae\\u653f\\u7f16\\u7801')),\n ('jingyingfanwei', models.CharField(max_length=999, verbose_name='\\u7ecf\\u8425\\u8303\\u56f4')),\n ('gongsijianjie', models.CharField(max_length=999, verbose_name='\\u516c\\u53f8\\u7b80\\u4ecb')),\n ],\n ),\n migrations.CreateModel(\n name='Companys',\n fields=[\n ('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),\n ('Code', models.CharField(max_length=255, verbose_name='\\u80a1\\u7968\\u4ee3\\u7801')),\n ('Name', models.CharField(max_length=255, verbose_name='\\u80a1\\u7968\\u7b80\\u79f0')),\n ('ALl_Name', models.CharField(max_length=255, verbose_name='\\u516c\\u53f8\\u5168\\u79f0')),\n ('History_Name', models.CharField(max_length=255, verbose_name='\\u66fe\\u7528\\u540d')),\n ],\n ),\n migrations.CreateModel(\n name='Fenhong_Info',\n fields=[\n ('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),\n ('time', models.CharField(max_length=255, verbose_name='\\u516c\\u544a\\u65f6\\u95f4')),\n ],\n ),\n migrations.CreateModel(\n name='Gaoguan_Info',\n fields=[\n ('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),\n ('name', models.CharField(max_length=255, verbose_name='\\u516c\\u53f8\\u540d\\u5b57')),\n ],\n ),\n ]\n" }, { "alpha_fraction": 0.5078219175338745, "alphanum_fraction": 0.6161251664161682, "avg_line_length": 35.130435943603516, "blob_id": "0d9ffe6143f4d61e1757c4c536fa3ebefd81591e", "content_id": "40b2560e8c57828a41a9bbade99b5ca977d67de9", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 831, "license_type": "no_license", "max_line_length": 163, "num_lines": 23, "path": "/hr/frontend/hrweb/lagou/migrations/0002_position_url.py", "repo_name": "leearic/spider", "src_encoding": "UTF-8", "text": "# -*- coding: utf-8 -*-\n# Generated by Django 1.11 on 2017-04-13 08:56\nfrom __future__ import unicode_literals\n\nfrom django.db import migrations, models\n\n\nclass Migration(migrations.Migration):\n\n dependencies = [\n ('lagou', '0001_initial'),\n ]\n\n operations = [\n migrations.CreateModel(\n name='Position_URL',\n fields=[\n ('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),\n ('searching_company', models.CharField(default='-', max_length=255, verbose_name='\\u9700\\u8981\\u67e5\\u8be2\\u7684\\u516c\\u53f8\\u540d\\u5b57')),\n ('url', models.CharField(default='-', max_length=255, verbose_name='\\u67e5\\u5230\\u516c\\u53f8\\u7684\\u67d0\\u4e2a\\u5de5\\u4f5c\\u5c97\\u4f4d\\u7684URL')),\n ],\n ),\n ]\n" }, { "alpha_fraction": 0.6795803308486938, "alphanum_fraction": 0.7141960263252258, "avg_line_length": 60.44198989868164, "blob_id": "0379d4ce0d6bab9a922dc6d9ba27715bf0c73ad8", "content_id": "3383f29d9fbbfa350b68206a2578d3be9f78b8c3", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 12507, "license_type": "no_license", "max_line_length": 101, "num_lines": 181, "path": "/szsj/dj_sz/hexun/models.py", "repo_name": "leearic/spider", "src_encoding": "UTF-8", "text": "# -*- coding: utf-8 -*-\nfrom __future__ import unicode_literals\n\nfrom django.db import models\n\n\n\nclass Companys(models.Model):\n Code = models.CharField(max_length=255, verbose_name=u\"股票代码\")\n Name = models.CharField(max_length=255, verbose_name=u\"股票简称\")\n ALl_Name = models.CharField(max_length=255, verbose_name=u\"公司全称\")\n History_Name = models.CharField(max_length=255, verbose_name=u\"曾用名\")\n\n\nclass base_Info(models.Model):\n\n company_code = models.CharField(max_length=255, verbose_name=u\"公司股票代码\")\n\n # 公司简介-基本信息\n\n name = models.CharField(max_length=255, verbose_name=u\"公司名字\")\n daima = models.CharField(max_length=255, verbose_name=u\"股票代码\")\n quancheng = models.CharField(max_length=255, verbose_name=u\"公司全称\")\n englishname = models.CharField(max_length=255, verbose_name=u\"公司英文名称\")\n cengyongming = models.CharField(max_length=255, verbose_name=u\"曾用名\")\n chengliriqi = models.CharField(max_length=255, verbose_name=u\"成立日期\")\n suoshuhangye = models.CharField(max_length=255, verbose_name=u\"所属行业\")\n suoshugannian = models.CharField(max_length=255, verbose_name=u\"所属概念\")\n suoshudiyu = models.CharField(max_length=255, verbose_name=u\"所属地域\")\n fadingdabiaoren = models.CharField(max_length=255, verbose_name=u\"法定代表人\")\n dulidongshi = models.CharField(max_length=255, verbose_name=u\"独立董事\")\n zixunfuwujigou = models.CharField(max_length=255, verbose_name=u\"咨询服务机构\")\n kuaijishiwusuo = models.CharField(max_length=255, verbose_name=u\"会计师事务所\")\n zhengquanshifudaibiao = models.CharField(max_length=255, verbose_name=u\"证券事务代表\")\n # 公司简介-证券信息\n faxingriqi = models.CharField(max_length=255, verbose_name=u\"发行日期\")\n shangshiriqi = models.CharField(max_length=255, verbose_name=u\"上市日期\")\n shangshijiaoyisuo = models.CharField(max_length=255, verbose_name=u\"上市交易所\")\n zhengquanleixing = models.CharField(max_length=255, verbose_name=u\"证券类型\")\n liutongguben = models.CharField(max_length=255, verbose_name=u\"流通股本\")\n zongguben = models.CharField(max_length=255, verbose_name=u\"总股本\")\n zhuchengxiaoshang = models.CharField(max_length=255, verbose_name=u\"主承销商\")\n faxingjia = models.CharField(max_length=255, verbose_name=u\"发行价\")\n shangshisourikaipanjia = models.CharField(max_length=255, verbose_name=u\"上市首日开盘价\")\n shangshishourizhangdiefu = models.CharField(max_length=255, verbose_name=u\"上市首日涨跌幅\")\n shangshishourihuanshoulv = models.CharField(max_length=255, verbose_name=u\" 上市首日换手率\")\n tebiechulihetuishi = models.CharField(max_length=255, verbose_name=u\"特别处理和退市\")\n faxingshiyinglv = models.CharField(max_length=255, verbose_name=u\"发行市盈率\")\n zuixinshiyinglv = models.CharField(max_length=255, verbose_name=u\"最新市盈率\")\n\n # 公司简介-工商信息\n zhuceziben = models.CharField(max_length=255, verbose_name=u\"注册资本\")\n zhucedizhi = models.CharField(max_length=255, verbose_name=u\"注册地址\")\n suodeisuilv = models.CharField(max_length=255, verbose_name=u\"所得税率\")\n bangongdizhi = models.CharField(max_length=255, verbose_name=u\"办公地址\")\n zhuyaochanpin = models.CharField(max_length=255, verbose_name=u\"主要产品(业务)\")\n\n # 公司简介-联系方式\n lianxidianhua = models.CharField(max_length=255, verbose_name=u\"联系电话(董秘)\")\n gongsichuanzhen = models.CharField(max_length=255, verbose_name=u\"公司传真\")\n dianziyouxiang = models.CharField(max_length=255, verbose_name=u\"电子邮箱\")\n gongsiwangzhi = models.CharField(max_length=255, verbose_name=u\"公司网址\")\n lianxiren = models.CharField(max_length=255, verbose_name=u\"联系人\")\n youzhengbianma = models.CharField(max_length=255, verbose_name=u\"邮政编码\")\n\n jingyingfanwei = models.CharField(max_length=999, verbose_name=u\"经营范围\")\n\n gongsijianjie = models.CharField(max_length=999, verbose_name=u\"公司简介\")\n\n\nclass Gaoguan_Info(models.Model):\n company_code = models.CharField(max_length=255, verbose_name=u\"公司股票代码\", default='-')\n # 高管-高管成员\n dongjiangao = models.CharField(max_length=255, verbose_name=u\"董监高姓名\", default='-')\n gaoguanzhiwu = models.CharField(max_length=255, verbose_name=u\"高管职务\", default='-')\n renzhiriqi = models.CharField(max_length=255, verbose_name=u\"任职日期\", default='-')\n lizhiriqi = models.CharField(max_length=255, verbose_name=u\"离职日期\", default='-')\n xueli = models.CharField(max_length=255, verbose_name=u\"学历\", default='-')\n nianxin = models.CharField(max_length=255, verbose_name=u\"年薪(万元)\", default='-')\n chiguzonge = models.CharField(max_length=255, verbose_name=u\"持股总额(万元)\", default='-')\n chigushuliang = models.CharField(max_length=255, verbose_name=u\"持股数量(万股)\", default='-')\n\n\nclass Dongshihui_Info(models.Model):\n company_code = models.CharField(max_length=255, verbose_name=u\"公司股票代码\", default='-')\n # 高管-董事会成员\n dongjiangao = models.CharField(max_length=255, verbose_name=u\"董监高姓名\", default='-')\n gaoguanzhiwu = models.CharField(max_length=255, verbose_name=u\"高管职务\", default='-')\n renzhiriqi = models.CharField(max_length=255, verbose_name=u\"任职日期\", default='-')\n lizhiriqi = models.CharField(max_length=255, verbose_name=u\"离职日期\", default='-')\n xueli = models.CharField(max_length=255, verbose_name=u\"学历\", default='-')\n nianxin = models.CharField(max_length=255, verbose_name=u\"年薪(万元)\", default='-')\n chiguzonge = models.CharField(max_length=255, verbose_name=u\"持股总额(万元)\", default='-')\n chigushuliang = models.CharField(max_length=255, verbose_name=u\"持股数量(万股)\", default='-')\n\nclass Jianshihui_Info(models.Model):\n company_code = models.CharField(max_length=255, verbose_name=u\"公司股票代码\", default='-')\n # 高管-监事会成员\n dongjiangao = models.CharField(max_length=255, verbose_name=u\"董监高姓名\", default='-')\n gaoguanzhiwu = models.CharField(max_length=255, verbose_name=u\"高管职务\", default='-')\n renzhiriqi = models.CharField(max_length=255, verbose_name=u\"任职日期\", default='-')\n lizhiriqi = models.CharField(max_length=255, verbose_name=u\"离职日期\", default='-')\n xueli = models.CharField(max_length=255, verbose_name=u\"学历\", default='-')\n nianxin = models.CharField(max_length=255, verbose_name=u\"年薪(万元)\", default='-')\n chiguzonge = models.CharField(max_length=255, verbose_name=u\"持股总额(万元)\", default='-')\n chigushuliang = models.CharField(max_length=255, verbose_name=u\"持股数量(万股)\", default='-')\n\n\n\nclass Fenhong_Info(models.Model):\n company_code = models.CharField(max_length=255, verbose_name=u\"公司股票代码\", default='-')\n # 分红、历年分红详表\n gonggaoshijian = models.CharField(max_length=255, verbose_name=u\"公告时间\", default='-')\n kuaijiniandu = models.CharField(max_length=255, verbose_name=u\"会计年度\", default='-')\n songgu = models.CharField(max_length=255, verbose_name=u\"送股(股/10股)\", default='-')\n paixi = models.CharField(max_length=255, verbose_name=u\"派息(元/10股)\", default='-')\n guquandengjiri = models.CharField(max_length=255, verbose_name=u\"股权登记日\", default='-')\n guquanchuxiri = models.CharField(max_length=255, verbose_name=u\"除权除息日\", default='-')\n honggushangshiri = models.CharField(max_length=255, verbose_name=u\"红股上市日\", default='-')\n shifoshisi = models.CharField(max_length=255, verbose_name=u\"是否实施\", default='-')\n xiangqing = models.CharField(max_length=255, verbose_name=u\"分红详情\", default='-')\n\n\nclass Fenhong_Zhuanzeng_Info(models.Model):\n company_code = models.CharField(max_length=255, verbose_name=u\"公司股票代码\", default='-')\n # 分红、转增股本\n gonggaoshijian = models.CharField(max_length=255, verbose_name=u\"公告时间\", default='-')\n zhuanzeng = models.CharField(max_length=255, verbose_name=u\"转增(股/10股)\", default='-')\n chuquanchuxiri = models.CharField(max_length=255, verbose_name=u\"除权除息日\", default='-')\n chuquandengjiri = models.CharField(max_length=255, verbose_name=u\"除权登记日\", default='-')\n zhuanzenggushangshiri = models.CharField(max_length=255, verbose_name=u\"转增股上市日\", default='-')\n tongqisonggu = models.CharField(max_length=255, verbose_name=u\"同期送股(股/10股)\", default='-')\n fanganjianjie = models.CharField(max_length=255, verbose_name=u\"方案简介\", default='-')\n shifoshisi = models.CharField(max_length=255, verbose_name=u\"是否实施\", default='-')\n xiangqing = models.CharField(max_length=255, verbose_name=u\"分红详情\", default='-')\n\nclass Fenhong_Peigu_Info(models.Model):\n company_code = models.CharField(max_length=255, verbose_name=u\"公司股票代码\", default='-')\n # 分红、配 股\n gonggaoshijian = models.CharField(max_length=255, verbose_name=u\"公告时间\", default='-')\n peigufangan = models.CharField(max_length=255, verbose_name=u\"配股方案(股/10股)\", default='-')\n peigujia = models.CharField(max_length=255, verbose_name=u\"配股价(元)\", default='-')\n jizhunguben = models.CharField(max_length=255, verbose_name=u\"基准股本(万股)\", default='-')\n chuquanri = models.CharField(max_length=255, verbose_name=u\"除权日\", default='-')\n guquandengjiri = models.CharField(max_length=255, verbose_name=u\"股权登记日\", default='-')\n jiaokuanqishiri = models.CharField(max_length=255, verbose_name=u\"缴款起始日\", default='-')\n jiaokuanzhongzhiri = models.CharField(max_length=255, verbose_name=u\"缴款终止日\", default='-')\n peigushangshiri = models.CharField(max_length=255, verbose_name=u\"配股上市日\", default='-')\n mujizijin = models.CharField(max_length=255, verbose_name=u\"募集资金合计(元)\", default='-')\n xiangqing = models.CharField(max_length=255, verbose_name=u\"分红详情\", default='-')\n\nclass Fenhong_Huigou_Info(models.Model):\n company_code = models.CharField(max_length=255, verbose_name=u\"公司股票代码\", default='-')\n # 分红、回 购\n gonggaoshijian = models.CharField(max_length=255, verbose_name=u\"公告时间\", default='-')\n huigouzanzonggubenbili = models.CharField(max_length=255, verbose_name=u\"回购占总股本比例%\", default='-')\n huigougushu = models.CharField(max_length=255, verbose_name=u\"回购股数(股)\", default='-')\n nihuigoujiage = models.CharField(max_length=255, verbose_name=u\"拟回购价格(元)\", default='-')\n gonggaoqianrigujia = models.CharField(max_length=255, verbose_name=u\"公告前日股价(元)\", default='-')\n goumaizuigaojia = models.CharField(max_length=255, verbose_name=u\"购买最高价(元)\", default='-')\n goumaizuidijia = models.CharField(max_length=255, verbose_name=u\"购买最低价(元)\", default='-')\n huigouzongjine = models.CharField(max_length=255, verbose_name=u\"回购总金额(万元)\", default='-')\n shifoshisi = models.CharField(max_length=255, verbose_name=u\"是否实施\", default='-')\n xiangqing = models.CharField(max_length=255, verbose_name=u\"分红详情\", default='-')\n\n\n\n\n\n\nclass Showrufenbu_Info(models.Model):\n company_code = models.CharField(max_length=255, verbose_name=u\"公司股票代码\", default='-')\n # 收入分布\n leibiemingcheng = models.CharField(max_length=255, verbose_name=u\"类别名称\", default='-')\n yinyeshouru = models.CharField(max_length=255, verbose_name=u\"营业收入(万元)\", default='-')\n zhanyinyeshourubili = models.CharField(max_length=255, verbose_name=u\"占营业收入比例(%)\", default='-')\n yinyechengben = models.CharField(max_length=255, verbose_name=u\"营业成本(万元)\", default='-')\n zhanchengbenbili = models.CharField(max_length=255, verbose_name=u\"占成本比例\", default='-')\n yingyelirun = models.CharField(max_length=255, verbose_name=u\"营业利润(万元)\", default='-')\n zhanlirunbili = models.CharField(max_length=255, verbose_name=u\"占利润比例\", default='-')\n maolilv = models.CharField(max_length=255, verbose_name=u\"毛利率(%)\", default='-')\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n" }, { "alpha_fraction": 0.5227272510528564, "alphanum_fraction": 0.6016042828559875, "avg_line_length": 31.521739959716797, "blob_id": "527fc2d54eae183cdaeeda289a792b6f3fba4a98", "content_id": "7423250d2ba23071f0a65ca1f9fa54ec24992e98", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 748, "license_type": "no_license", "max_line_length": 114, "num_lines": 23, "path": "/szsj/dj_sz/hightech/migrations/0012_itjuzi_company_info.py", "repo_name": "leearic/spider", "src_encoding": "UTF-8", "text": "# -*- coding: utf-8 -*-\n# Generated by Django 1.10.5 on 2017-03-16 10:16\nfrom __future__ import unicode_literals\n\nfrom django.db import migrations, models\n\n\nclass Migration(migrations.Migration):\n\n dependencies = [\n ('hightech', '0011_auto_20170307_0706'),\n ]\n\n operations = [\n migrations.CreateModel(\n name='Itjuzi_Company_Info',\n fields=[\n ('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),\n ('company_allname', models.CharField(max_length=255, verbose_name='\\u4f01\\u4e1a\\u5168\\u540d')),\n ('company_name', models.CharField(max_length=255, verbose_name='\\u4f01\\u4e1a\\u540d')),\n ],\n ),\n ]\n" }, { "alpha_fraction": 0.5380059480667114, "alphanum_fraction": 0.5409674048423767, "avg_line_length": 36.55555725097656, "blob_id": "6bf28b72e0b7d302f3251052e08811c7fb26d98f", "content_id": "bd1065cff87d43999673ef195a26f428589aa7a1", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 1071, "license_type": "no_license", "max_line_length": 76, "num_lines": 27, "path": "/irole/irolespider/irolespider/pipelines.py", "repo_name": "leearic/spider", "src_encoding": "UTF-8", "text": "# -*- coding: utf-8 -*-\nfrom scrapy.pipelines.images import ImagesPipeline\nfrom scrapy.http import Request\nfrom scrapy.exceptions import DropItem\nfrom utils import Spider_Django\n\n\nclass Cos8Pipeline(ImagesPipeline):\n # 从item中获取图片的真实地址,并执行下载请求\n def get_media_requests(self, item, info):\n for image_url in item['img_base_url']:\n yield Request(image_url)\n# ----------------------------------------------------------------------\n def item_completed(self, results, item, info):\n image_paths = [[x['path'] for ok, x in results if ok]]\n if not image_paths:\n raise DropItem(\"Item contains no images\")\n save_Data = Spider_Django.django_sql()\n save_Data.save_Item(item)\n return\n # 修改保存图片的文件名...\n # ----------------------------------------------------------------------\n def file_path(self, request, response=None, info=None):\n \n url = request.url\n image_guid = url.split('/')[-1]\n return '%s.jpg' % (image_guid)" }, { "alpha_fraction": 0.47926977276802063, "alphanum_fraction": 0.49550268054008484, "avg_line_length": 35.283729553222656, "blob_id": "f8f24a1cf238952022d9a08308157d31a538aeec", "content_id": "30800b8c6b5c2f5762e9b3df9ae443b357fb2e56", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "HTML", "length_bytes": 21753, "license_type": "no_license", "max_line_length": 397, "num_lines": 504, "path": "/mikufan/frontend/templates/webapps.old1/templates-html/index.html", "repo_name": "leearic/spider", "src_encoding": "UTF-8", "text": "<!DOCTYPE HTML>\r\n<html>\r\n<head>\r\n <meta charset=\"utf-8\">\r\n <title>我的博客</title>\r\n\r\n <meta name=\"关键字\" content=\"\">\r\n\r\n <link href=\"/static/webapps/css/bootstrap.min.css\" rel=\"stylesheet\">\r\n <link href=\"/static/webapps/css/style.css\" rel=\"stylesheet\">\r\n\r\n <link href=\"/static/webapps/css/buju.css\" rel=\"stylesheet\">\r\n <link href=\"/static/webapps/css/index.css\" rel=\"stylesheet\">\r\n\r\n\r\n <script type=\"text/javascript\" src=\"/static/webapps/js/jquery.min.js\"></script>\r\n <script type=\"text/javascript\" src=\"/static/webapps/js/sliders.js\"></script>\r\n</head>\r\n<body>\r\n\r\n<div class=\"header\" />\r\n{# <nav class=\"navbar navbar-default navbar-fixed-top\" role=\"navigation\">#}\r\n{# <div class=\"container-fluid\">#}\r\n{# <!-- Brand and toggle get grouped for better mobile display -->#}\r\n{# <div class=\"navbar-header\">#}\r\n{# <button type=\"button\" class=\"navbar-toggle collapsed\" data-toggle=\"collapse\" data-target=\"#bs-example-navbar-collapse-1\">#}\r\n{# <span class=\"sr-only\">Toggle navigation</span>#}\r\n{# <span class=\"icon-bar\"></span>#}\r\n{# <span class=\"icon-bar\"></span>#}\r\n{# <span class=\"icon-bar\"></span>#}\r\n{# </button>#}\r\n{# <a class=\"navbar-brand\" href=\"#\">Brand</a>#}\r\n{# </div>#}\r\n{##}\r\n{# <!-- Collect the nav links, forms, and other content for toggling -->#}\r\n{# <div class=\"collapse navbar-collapse\" id=\"bs-example-navbar-collapse-1\">#}\r\n{# <ul class=\"nav navbar-nav\">#}\r\n{# <li class=\"active\"><a href=\"#\">Link</a></li>#}\r\n{# <li><a href=\"#\">Link</a></li>#}\r\n{# <li class=\"dropdown\">#}\r\n{# <a href=\"#\" class=\"dropdown-toggle\" data-toggle=\"dropdown\">Dropdown <span class=\"caret\"></span></a>#}\r\n{# <ul class=\"dropdown-menu\" role=\"menu\">#}\r\n{# <li><a href=\"#\">Action</a></li>#}\r\n{# <li><a href=\"#\">Another action</a></li>#}\r\n{# <li><a href=\"#\">Something else here</a></li>#}\r\n{# <li class=\"divider\"></li>#}\r\n{# <li><a href=\"#\">Separated link</a></li>#}\r\n{# <li class=\"divider\"></li>#}\r\n{# <li><a href=\"#\">One more separated link</a></li>#}\r\n{# </ul>#}\r\n{# </li>#}\r\n{# </ul>#}\r\n{# <form class=\"navbar-form navbar-left\" role=\"search\">#}\r\n{# <div class=\"form-group\">#}\r\n{# <input type=\"text\" class=\"form-control\" placeholder=\"Search\">#}\r\n{# </div>#}\r\n{# <button type=\"submit\" class=\"btn btn-default\">Submit</button>#}\r\n{# </form>#}\r\n{# <ul class=\"nav navbar-nav navbar-right\">#}\r\n{# <li><a href=\"#\">Link</a></li>#}\r\n{# <li class=\"dropdown\">#}\r\n{# <a href=\"#\" class=\"dropdown-toggle\" data-toggle=\"dropdown\">Dropdown <span class=\"caret\"></span></a>#}\r\n{# <ul class=\"dropdown-menu\" role=\"menu\">#}\r\n{# <li><a href=\"#\">Action</a></li>#}\r\n{# <li><a href=\"#\">Another action</a></li>#}\r\n{# <li><a href=\"#\">Something else here</a></li>#}\r\n{# <li class=\"divider\"></li>#}\r\n{# <li><a href=\"#\">Separated link</a></li>#}\r\n{# </ul>#}\r\n{# </li>#}\r\n{# </ul>#}\r\n{# </div><!-- /.navbar-collapse -->#}\r\n{# </div><!-- /.container-fluid -->#}\r\n{#</nav>#}\r\n\r\n\r\n <header class=\"header\">\r\n <nav class=\"navbar navbar-default navbar-fixed-top\" id=\"navbar\">\r\n <div class=\"container\">\r\n{# <div class=\"header-topbar hidden-xs link-border\">#}\r\n{# <ul class=\"site-nav topmenu\">#}\r\n{# <li><a href=\"http://www.muzhuangnet.com/tags/\" >标签云</a></li>#}\r\n{# <li><a href=\"http://www.muzhuangnet.com/readers/\" rel=\"nofollow\" >读者墙</a></li>#}\r\n{# <li><a href=\"http://www.muzhuangnet.com/rss.html\" title=\"RSS订阅\" >#}\r\n{# <i class=\"fa fa-rss\">#}\r\n{# </i> RSS订阅#}\r\n{# </a></li>#}\r\n{# </ul>#}\r\n{# 勤记录 懂分享</div>#}\r\n <div class=\"navbar-header\">\r\n <button type=\"button\" class=\"navbar-toggle collapsed\" data-toggle=\"collapse\" data-target=\"#header-navbar\" aria-expanded=\"false\"> <span class=\"sr-only\"></span> <span class=\"icon-bar\"></span> <span class=\"icon-bar\"></span> <span class=\"icon-bar\"></span> </button>\r\n <h1 class=\"logo hvr-bounce-in\"><a href=\"/\" title=\"mikufans\"><img src=\"/static/webapps/images/logo.png\" alt=\"mikufan\"></a></h1>\r\n </div>\r\n <div class=\"collapse navbar-collapse\" id=\"header-navbar\">\r\n <form class=\"navbar-form visible-xs\" action=\"/Search\" method=\"post\">\r\n <div class=\"input-group\">\r\n <input type=\"text\" name=\"keyword\" class=\"form-control\" placeholder=\"请输入关键字\" maxlength=\"20\" autocomplete=\"off\">\r\n <span class=\"input-group-btn\">\r\n <button class=\"btn btn-default btn-search\" name=\"search\" type=\"submit\">搜索</button>\r\n </span> </div>\r\n </form>\r\n <ul class=\"nav navbar-nav navbar-right\">\r\n{# <li><a data-cont=\"木庄网络博客\" title=\"木庄网络博客\" href=\"index.html\">首页</a></li>#}\r\n{# <li><a data-cont=\"列表页\" title=\"列表页\" href=\"list.html\">列表页</a></li>#}\r\n{# <li><a data-cont=\"详细页\" title=\"详细页\" href=\"show.html\">详细页</a></li>#}\r\n{# <li><a data-cont=\"404\" title=\"404\" href=\"404.html\">404</a></li>#}\r\n{# <li><a data-cont=\"MZ-NetBolg主题\" title=\"MZ-NetBolg主题\" href=\"http://www.muzhuangnet.com/list/mznetblog/\" >MZ-NetBolg主题</a></li>#}\r\n{# <li><a data-cont=\"IT技术笔记\" title=\"IT技术笔记\" href=\"http://www.muzhuangnet.com/list/code/\" >IT技术笔记</a></li>#}\r\n{# <li><a data-cont=\"源码分享\" title=\"源码分享\" href=\"http://www.muzhuangnet.com/list/share/\" >源码分享</a></li>#}\r\n{# <li><a data-cont=\"靠谱网赚\" title=\"靠谱网赚\" href=\"http://www.muzhuangnet.com/list/money/\" >靠谱网赚</a></li>#}\r\n{# <li><a data-cont=\"资讯分享\" title=\"资讯分享\" href=\"http://www.muzhuangnet.com/list/news/\" >资讯分享</a></li>#}\r\n </ul>\r\n </div>\r\n </div>\r\n </nav>\r\n</header>\r\n\r\n\r\n\r\n\r\n\r\n<br/><br/><br/>\r\n\r\n</div>\r\n\r\n\r\n\r\n\r\n\r\n\r\n <article>\r\n <div class=\"l_box f_l\">\r\n\t <div class=\"banner\">\r\n\t <div id=\"slide-holder\">\r\n\t\t <div id=\"slide-runner\">\r\n\t\t <a href=\"/\" target=\"_blank\">\r\n\t\t\t <img id=\"slide-img-1\" src=\"/static/webapps/images/a1.jpg\" alt style>\r\n\t\t\t </a>\r\n\t\t\t <a href=\"/\" target=\"_blank\">\r\n\t\t\t <img id=\"slide-img-2\" src=\"/static/webapps/images/a2.jpg\" alt style>\r\n\t\t\t </a>\r\n\t\t\t <a href=\"/\" target=\"_blank\">\r\n\t\t\t <img id=\"slide-img-3\" src=\"/static/webapps/images/a3.jpg\" alt style>\r\n\t\t\t </a>\r\n\t\t\t <a href=\"/\" target=\"_blank\">\r\n\t\t\t <img id=\"slide-img-4\" src=\"/static/webapps/images/a4.jpg\" alt style>\r\n\t\t\t </a>\r\n\t\t\t <div id=\"slide-controls\" style=\"display:block;\">\r\n\t\t\t <p id=\"slide-client\" class=\"text\">\r\n\t\t\t\t <strong></strong>\r\n\t\t\t\t\t <span></span>\r\n\t\t\t\t </p>\r\n <p id=\"slide-desc\" class=\"text\"></p> \t\r\n <p id=\"slide-nav\"></p>\t\t \t\t\t \r\n\t\t\t </div>\r\n\t\t </div> \r\n\t\t </div>\r\n\t </div>\r\n\t <script>\r\n\t if(!window.slider){\r\n\t\t var slider={};\r\n\t\t }\r\n\t\t slider.data=[\r\n\t\t {\r\n\t\t \"id\":\"slide-img-1\", //与slide-runner中的img标签id对应\r\n\t\t\t \"client\":\"醉牛逼\",\r\n\t\t\t \"desc\":\"醉牛逼是武魂醉牛逼的存在\" //这里描述图片内容\r\n\t\t },\r\n\t\t {\r\n\t\t \"id\":\"slide-img-2\",\r\n\t\t\t \"client\":\"醉牛逼\",\r\n\t\t\t \"desc\":\"醉牛逼是武魂醉牛逼的存在\"\r\n\t\t },\r\n\t\t {\r\n\t\t \"id\":\"slide-img-3\",\r\n\t\t\t \"client\":\"醉牛逼\",\r\n\t\t\t \"desc\":\"醉牛逼是武魂醉牛逼的存在\"\r\n\t\t },\r\n\t\t {\r\n\t\t \"id\":\"slide-img-4\",\r\n\t\t\t \"client\":\"醉牛逼\",\r\n\t\t\t \"desc\":\"醉牛逼是武魂醉牛逼的存在\"\r\n\t\t }\r\n\t\t ];\r\n\t </script>\r\n\t <div class=\"topnews\">\r\n\t <h2>\r\n{#\t <span>#}\r\n{#\t <a href=\"/\" target=\"_blank\">武魂大罗</a>#}\r\n{#\t\t <a href=\"/\" target=\"_blank\">装逼大神</a>#}\r\n{#\t\t <a href=\"/\" target=\"_blank\">�琶�推荐</a>#}\r\n{#\t </span>\t #}\r\n\t 文章推荐\r\n\t </h2>\r\n\t <div class=\"blogs excerpt excerpt-6\">\r\n\t\t <figure>\r\n\t\t\t <img src=\"/static/webapps/images/01.jpg\">\r\n\t\t\t </figure>\r\n\t\t\t <ul>\r\n\t\t\t <h3><a href=\"/\">住在手机里的朋友</a></h3>\r\n\t\t\t\t <p>\"通信时代,无论是初次相见还是老友重逢,交换联系方式,常常是彼此交换名片,然后郑重或是出于礼貌用手机记下对方的电话号码。在快节奏的生活里,我们不知不觉中就成为住在别人手机里的朋友。又因某些意外,变成了别人手机里匆忙的过客,这种快餐式的友谊 ...\"</p>\r\n\t\t\t <p class=\"autor\">\r\n\t\t\t\t <span class=\"lm f_l\">\r\n\t\t\t\t\t <a href=\"/\">个人博客</a>\r\n\t\t\t\t\t </span>\r\n\t\t\t\t\t <span class=\"dtime f_l\">2016-02-16</span>\r\n\t\t\t\t\t <span class=\"viewnum f_r\">\r\n\t\t\t\t\t 浏览(<a href=\"/\">666</a>)</span>\r\n\t\t\t\t\t <span class=\"pingl f_r\">\t\r\n\t\t\t\t\t 评论(<a href=\"/\">60</a>)</span>\r\n\t\t\t\t </p>\r\n\t\t\t </ul>\r\n\t </div>\r\n\t\t <div class=\"blogs excerpt excerpt-6\">\r\n\t\t <figure>\r\n\t\t\t <img src=\"/static/webapps/images/02.jpg\">\r\n\t\t\t </figure>\r\n\t\t\t <ul>\r\n\t\t\t <h3><a href=\"/\">教你怎样用欠费手机拨打电话</a></h3>\r\n\t\t\t\t <p>\"初次相识的喜悦,让你觉得似乎找到了知音。于是,对于投缘的人,开始了较频繁的交往。渐渐地,初识的喜悦退尽,接下来就是仅仅保持着联系,平淡到偶尔在节假曰发短信互致问候...\"</p>\r\n\t\t\t <p class=\"autor\">\r\n\t\t\t\t <span class=\"lm f_l\">\r\n\t\t\t\t\t <a href=\"/\">个人博客</a>\r\n\t\t\t\t\t </span>\r\n\t\t\t\t\t <span class=\"dtime f_l\">2016-02-16</span>\r\n\t\t\t\t\t <span class=\"viewnum f_r\">\r\n\t\t\t\t\t 浏览(<a href=\"/\">666</a>)</span>\r\n\t\t\t\t\t <span class=\"pingl f_r\">\t\r\n\t\t\t\t\t 评论(<a href=\"/\">60</a>)</span>\r\n\t\t\t\t </p>\r\n\t\t\t </ul>\r\n\t </div>\r\n\t\t <div class=\"blogs excerpt excerpt-6\">\r\n\t\t <figure>\r\n\t\t\t <img src=\"/static/webapps/images/03.jpg\">\r\n\t\t\t </figure>\r\n\t\t\t <ul>\r\n\t\t\t <h3><a href=\"/\">原来以为,一个人的勇敢是,删掉他的手机号码...</a></h3>\r\n\t\t\t\t <p>\"原来以为,一个人的勇敢是,删掉他的手机号码、QQ号码等等一切,努力和他保持距离。等着有一天,习惯不想念他,习惯他不在身边,习惯时间把他在我记忆里的身影磨蚀干净...\"</p>\r\n\t\t\t <p class=\"autor\">\r\n\t\t\t\t <span class=\"lm f_l\">\r\n\t\t\t\t\t <a href=\"/\">个人博客</a>\r\n\t\t\t\t\t </span>\r\n\t\t\t\t\t <span class=\"dtime f_l\">2016-02-16</span>\r\n\t\t\t\t\t <span class=\"viewnum f_r\">\r\n\t\t\t\t\t 浏览(<a href=\"/\">666</a>)</span>\r\n\t\t\t\t\t <span class=\"pingl f_r\">\t\r\n\t\t\t\t\t 评论(<a href=\"/\">60</a>)</span>\r\n\t\t\t\t </p>\r\n\t\t\t </ul>\r\n\t </div>\r\n\t <div class=\"blogs excerpt excerpt-6\">\r\n\t\t <figure>\r\n\t\t\t <img src=\"/static/webapps/images/04.jpg\">\r\n\t\t\t </figure>\r\n\t\t\t <ul>\r\n\t\t\t <h3><a href=\"/\">手机的16个惊人小秘密,据说99.999%的人都不知</a></h3>\r\n\t\t\t\t <p>\"引导语:知道么,手机有备用电池,手机拨号码12593+电话号码=陷阱……手机具有很多你不知道的小秘密,说出来一定很惊奇!不信的话就来看看吧!...\"</p>\r\n\t\t\t <p class=\"autor\">\r\n\t\t\t\t <span class=\"lm f_l\">\r\n\t\t\t\t\t <a href=\"/\">个人博客</a>\r\n\t\t\t\t\t </span>\r\n\t\t\t\t\t <span class=\"dtime f_l\">2016-02-16</span>\r\n\t\t\t\t\t <span class=\"viewnum f_r\">\r\n\t\t\t\t\t 浏览(<a href=\"/\">666</a>)</span>\r\n\t\t\t\t\t <span class=\"pingl f_r\">\t\r\n\t\t\t\t\t 评论(<a href=\"/\">60</a>)</span>\r\n\t\t\t\t </p>\r\n\t\t\t </ul>\r\n\t </div>\r\n\t\t <div class=\"blogs excerpt excerpt-6\">\r\n\t\t <figure>\r\n\t\t\t <img src=\"/static/webapps/images/05.jpg\">\r\n\t\t\t </figure>\r\n\t\t\t <ul>\r\n\t\t\t <h3><a href=\"/\">你面对的是生活而不是手机</a></h3>\r\n\t\t\t\t <p>\"每一次与别人吃饭,总会有人会拿出手机。以为他们在打电话或者有紧急的短信,但用余光瞟了一眼之后发现无非就两件事:1、看小说,2、上人人或者QQ...\"</p>\r\n\t\t\t <p class=\"autor\">\r\n\t\t\t\t <span class=\"lm f_l\">\r\n\t\t\t\t\t <a href=\"/\">个人博客</a>\r\n\t\t\t\t\t </span>\r\n\t\t\t\t\t <span class=\"dtime f_l\">2016-02-16</span>\r\n\t\t\t\t\t <span class=\"viewnum f_r\">\r\n\t\t\t\t\t 浏览(<a href=\"/\">666</a>)</span>\r\n\t\t\t\t\t <span class=\"pingl f_r\">\t\r\n\t\t\t\t\t 评论(<a href=\"/\">60</a>)</span>\r\n\t\t\t\t </p>\r\n\t\t\t </ul>\r\n\t </div>\r\n\t\t <div class=\"blogs excerpt excerpt-6\">\r\n\t\t <figure>\r\n\t\t\t <img src=\"/static/webapps/images/06.jpg\">\r\n\t\t\t </figure>\r\n\t\t\t <ul>\r\n\t\t\t <h3><a href=\"/\">豪雅手机正式发布! 在法国全手工打造的奢侈品</a></h3>\r\n\t\t\t\t <p>\"现在跨界联姻,时尚、汽车以及运动品牌联合手机制造商联合发布手机产品在行业里已经不再新鲜,上周我们给大家报道过著名手表制造商瑞士泰格・豪雅(Tag Heuer) 联合法国的手机制造商Modelabs发布的一款奢华手机的部分谍照,而近日该手机终于被正式发布了...\"</p>\r\n\t\t\t <p class=\"autor\">\r\n\t\t\t\t <span class=\"lm f_l\">\r\n\t\t\t\t\t <a href=\"/\">个人博客</a>\r\n\t\t\t\t\t </span>\r\n\t\t\t\t\t <span class=\"dtime f_l\">2016-02-16</span>\r\n\t\t\t\t\t <span class=\"viewnum f_r\">\r\n\t\t\t\t\t 浏览(<a href=\"/\">666</a>)</span>\r\n\t\t\t\t\t <span class=\"pingl f_r\">\t\r\n\t\t\t\t\t 评论(<a href=\"/\">60</a>)</span>\r\n\t\t\t\t </p>\r\n\t\t\t </ul>\r\n\t </div>\r\n\t\t <div class=\"blogs excerpt excerpt-6\">\r\n\t\t <figure>\r\n\t\t\t <img src=\"/static/webapps/images/04.jpg\">\r\n\t\t\t </figure>\r\n\t\t\t <ul>\r\n\t\t\t <h3><a href=\"/\">手机的16个惊人小秘密,据说99.999%的人都不知</a></h3>\r\n\t\t\t\t <p>\"引导语:知道么,手机有备用电池,手机拨号码12593+电话号码=陷阱……手机具有很多你不知道的小秘密,说出来一定很惊奇!不信的话就来看看吧!...\"</p>\r\n\t\t\t <p class=\"autor\">\r\n\t\t\t\t <span class=\"lm f_l\">\r\n\t\t\t\t\t <a href=\"/\">个人博客</a>\r\n\t\t\t\t\t </span>\r\n\t\t\t\t\t <span class=\"dtime f_l\">2016-02-16</span>\r\n\t\t\t\t\t <span class=\"viewnum f_r\">\r\n\t\t\t\t\t 浏览(<a href=\"/\">666</a>)</span>\r\n\t\t\t\t\t <span class=\"pingl f_r\">\t\r\n\t\t\t\t\t 评论(<a href=\"/\">60</a>)</span>\r\n\t\t\t\t </p>\r\n\t\t\t </ul>\r\n\t </div>\r\n\t </div>\r\n\t </div>\r\n\r\n\r\n\r\n\r\n <div class=\"r_box f_r\">\r\n\t <div class=\"tit01\">\r\n{# <h3>关注我</h3>#}\r\n{#\t\t <div class=\"gzwm\">#}\r\n{#\t\t <ul>#}\r\n{#\t\t\t <li><a class=\"xlwb\" href=\"#\" target=\"_blank\">新浪微博</a></li>#}\r\n{#\t\t\t <li><a class=\"txwb\" href=\"#\" target=\"_blank\">腾讯微博</a></li>#}\r\n{#\t\t\t <li><a class=\"rss\" href=\"#\" target=\"_blank\">RSS</a></li>#}\r\n{#\t\t\t <li><a class=\"wx\" href=\"#\" target=\"_blank\">邮箱</a></li>#}\r\n{#\t\t\t</ul>#}\r\n{#\t\t </div>#}\r\n\r\n <div class=\"widget widget_search\">\r\n <form class=\"navbar-form\" id=\"searchform\">\r\n<div class=\"input-group\">\r\n <input class=\"form-control\" onkeydown=\"if (event.keyCode == 13) {SiteSearch('http://www.muzhuangnet.com/search.html', '#keywords');return false};\" size=\"35\" placeholder=\"请输入关键字\" id=\"keywords\" name=\"keywords\" maxlength=\"15\" autocomplete=\"off\" type=\"text\">\r\n <span class=\"input-group-btn\">\r\n <input value=\"搜索\" onclick=\"SiteSearch('http://www.muzhuangnet.com/search.html', '#keywords');\" class=\"btn btn-default btn-search\" id=\"searchsubmit\" type=\"button\">\r\n </span>\r\n</div>\r\n</form>\r\n\r\n </div>\r\n\r\n\r\n\r\n </div>\r\n\r\n{##}\r\n{#\t <div class=\"ad300x100\">#}\r\n{#\t <img src=\"/static/webapps/images/wh.jpg\">#}\r\n{#\t </div>#}\r\n\r\n{#\t <div class=\"tab\" id=\"lp_right_select\">#}\r\n{#\t <script>#}\r\n{#\t\t window.onload=function()#}\r\n{#\t\t\t {#}\r\n{#\t\t\t var oLi=document.getElementById(\"tb\").getElementsByTagName(\"li\");#}\r\n{#\t\t\t\t var oUl=document.getElementById(\"tb-main\").getElementsByTagName(\"div\");#}\r\n{#\t\t\t\t for(var i=0;i<oLi.length;i++)#}\r\n{#\t\t\t\t {#}\r\n{#\t\t\t\t oLi[i].index=i;#}\r\n{#\t\t\t\t\t oLi[i].onmouseover=function()#}\r\n{#\t\t\t\t\t {#}\r\n{#\t\t\t\t\t for(var n=0;n<oLi.length;n++)#}\r\n{#\t\t\t\t\t\t oLi[n].className=\"\";#}\r\n{#\t\t\t\t\t\t\tthis.className=\"cur\";#}\r\n{#\t\t\t\t\t\tfor(var n=0;n<oUl.length;n++)#}\r\n{# oUl[n].style.display=\"none\";#}\r\n{# oUl[this.index].style.display=\"block\";\t\t\t\t\t\t\t#}\r\n{#\t\t\t\t\t }#}\r\n{#\t\t\t\t }#}\r\n{#\t\t\t }#}\r\n{#\t\t </script>#}\r\n{#\t <div class=\"tab-top\">#}\r\n{#\t\t <ul class=\"hd\" id=\"tb\">#}\r\n{#\t\t\t <li class=\"cur\"><a href=\"/\">点击排行</a></li>#}\r\n{#\t\t\t\t <li><a href=\"/\">最新文章</a></li>#}\r\n{#\t\t\t\t <li><a href=\"/\">站长推荐</a></li>#}\r\n{#\t\t\t </ul>#}\r\n{#\t\t </div>#}\r\n{#\t\t <div class=\"tab-main\" id=\"tb-main\">#}\r\n{#\t\t <div class=\"bd bd-news\" style=\"display:block;\"><ul>#}\r\n{#\t\t\t <li><a href=\"/\" target=\"_blank\">住在手机里的朋友</a></li>#}\r\n{#\t\t\t\t <li><a href=\"/\" target=\"_blank\">教你怎样用欠费手机拨打电话</a></li>#}\r\n{#\t\t\t\t <li><a href=\"/\" target=\"_blank\">原来以为,一个人的勇敢是,删掉他的手机号码...</a></li>#}\r\n{#\t\t\t\t <li><a href=\"/\" target=\"_blank\">手机的16个惊人小秘密,据说99.999%的人都不知</a></li>#}\r\n{#\t\t\t\t <li><a href=\"/\" target=\"_blank\">你面对的是生活而不是手机</a></li>#}\r\n{#\t\t\t\t <li><a href=\"/\" target=\"_blank\">豪雅手机正式发布! 在法国全手工打造的奢侈品</a></li>#}\r\n{#\t\t\t </ul></div>#}\r\n{#\t\t\t <div class=\"bd bd-news\" ><ul>#}\r\n{#\t\t\t <li><a href=\"/\" target=\"_blank\">原来以为,一个人的勇敢是,删掉他的手机号码...</a></li>#}\r\n{#\t\t\t\t <li><a href=\"/\" target=\"_blank\">手机的16个惊人小秘密,据说99.999%的人都不知</a></li>#}\r\n{#\t\t\t\t <li><a href=\"/\" target=\"_blank\">住在手机里的朋友</a></li>#}\r\n{#\t\t\t\t <li><a href=\"/\" target=\"_blank\">教你怎样用欠费手机拨打电话</a></li>#}\r\n{#\t\t\t\t <li><a href=\"/\" target=\"_blank\">你面对的是生活而不是手机</a></li>#}\r\n{#\t\t\t\t <li><a href=\"/\" target=\"_blank\">豪雅手机正式发布! 在法国全手工打造的奢侈品</a></li>#}\r\n{#\t\t\t </ul></div>#}\r\n{#\t\t\t <div class=\"bd bd-news\" ><ul>#}\r\n{#\t\t\t <li><a href=\"/\" target=\"_blank\">手机的16个惊人小秘密,据说99.999%的人都不知</a></li>#}\r\n{#\t\t\t\t <li><a href=\"/\" target=\"_blank\">你面对的是生活而不是手机</a></li>#}\r\n{#\t\t\t\t <li><a href=\"/\" target=\"_blank\">住在手机里的朋友</a></li>#}\r\n{#\t\t\t\t <li><a href=\"/\" target=\"_blank\">豪雅手机正式发布! 在法国全手工打造的奢侈品</a></li>#}\r\n{#\t\t\t\t <li><a href=\"/\" target=\"_blank\">你面对的是生活而不是手机</a></li>#}\r\n{#\t\t\t\t <li><a href=\"/\" target=\"_blank\">原来以为,一个人的勇敢是,删掉他的手机号码...</a></li>#}\r\n{#\t\t\t </ul></div>#}\r\n{#\t\t </div>#}\r\n{#\t </div>#}\r\n{#\t <div class=\"cloud\">#}\r\n{#\t <h3>标签云</h3>#}\r\n{#\t\t <ul>#}\r\n{#\t\t <li><a href=\"/\">个人博客</a></li>#}\r\n{#\t\t\t <li><a href=\"/\">web开发</a></li>#}\r\n{#\t\t\t <li><a href=\"/\">前端设计</a></li>#}\r\n{#\t\t\t <li><a href=\"/\">Html</a></li>#}\r\n{#\t\t\t <li><a href=\"/\">CSS3</a></li>#}\r\n{#\t\t\t <li><a href=\"/\">CSS3+HTML5</a></li>#}\r\n{#\t\t\t <li><a href=\"/\">百度</a></li>#}\r\n{#\t\t\t <li><a href=\"/\">JavaScript</a></li>#}\r\n{#\t\t\t <li><a href=\"/\">C/C++</a></li>#}\r\n{#\t\t\t <li><a href=\"/\">ASP.NET</a></li>#}\r\n{#\t\t\t <li><a href=\"/\">IOS开发</a></li>#}\r\n{#\t\t\t <li><a href=\"/\">Android开发</a></li>#}\r\n{#\t\t\t <li><a href=\"/\">PHP</a></li>#}\r\n{#\t\t\t <li><a href=\"/\">VS</a></li>#}\r\n{#\t\t </ul>#}\r\n{#\t </div>#}\r\n\t <div class=\"tuwen\">\r\n\t <h3>图文推荐</h3>\r\n\t\t <ul>\r\n\t\t <li><a href=\"/\"><img src=\"/static/webapps/images/01.jpg\"><b>住在手机里的朋友</b></a>\r\n\t\t <p>\r\n\t\t\t <span class=\"tulanum\"><a href=\"/\">手机配件</a></span>\r\n\t\t\t\t <span class=\"tutime\">2016-02-16</span>\r\n\t\t\t </p>\r\n\t\t </li>\r\n\t\t <li><a href=\"/\"><img src=\"/static/webapps/images/02.jpg\"><b>教你怎样用欠费手机拨打电话</b></a>\r\n\t\t <p>\r\n\t\t\t <span class=\"tulanum\"><a href=\"/\">手机配件</a></span>\r\n\t\t\t\t <span class=\"tutime\">2016-02-16</span>\r\n\t\t\t </p></li>\r\n\t\t <li><a href=\"/\"><img src=\"/static/webapps/images/03.jpg\"><b>手机的16个惊人小秘密,据说99.999%的人都不知</b></a>\r\n\t\t <p>\r\n\t\t\t <span class=\"tulanum\"><a href=\"/\">手机配件</a></span>\r\n\t\t\t\t <span class=\"tutime\">2016-02-16</span>\r\n\t\t\t </p></li>\r\n\t\t <li><a href=\"/\"><img src=\"/static/webapps/images/06.jpg\"><b>原来以为,一个人的勇敢是,删掉他的手机号码...</b></a>\r\n\t\t <p>\r\n\t\t\t <span class=\"tulanum\"><a href=\"/\">手机配件</a></span>\r\n\t\t\t\t <span class=\"tutime\">2016-02-16</span>\r\n\t\t\t </p></li>\r\n\t\t <li><a href=\"/\"><img src=\"/static/webapps/images/04.jpg\"><b>豪雅手机正式发布! 在法国全手工打造的奢侈品</b></a>\r\n\t\t <p>\r\n\t\t\t <span class=\"tulanum\"><a href=\"/\">手机配件</a></span>\r\n\t\t\t\t <span class=\"tutime\">2016-02-16</span>\r\n\t\t\t </p></li>\r\n\t\t </ul>\r\n\t </div>\r\n\t <div class=\"ad\"><img src=\"/static/webapps/images/03.jpg\"></div>\r\n\r\n\t </div>\r\n\r\n </article>\r\n\r\n<footer class=\"footer\">\r\n <div class=\"container\">\r\n{# \t <div class=\"links\">#}\r\n{#\t\t <ul>#}\r\n{#\t\t <li><a href=\"/\">醉牛逼的武魂生涯</a></li>#}\r\n{#\t\t <li><a href=\"/\">观察者网</a></li>#}\r\n{#\t\t\t <li><a href=\"/\">中国投资</a></li>#}\r\n{#\t\t\t <li><a href=\"/\">强国论坛</a></li>#}\r\n{#\t\t\t <li><a href=\"/\">车讯网</a></li>#}\r\n{#\t\t\t <li><a href=\"/\">360导航</a></li>#}\r\n{#\t\t\t\t <li><a href=\"/\">一带一路门户网</a></li>#}\r\n{#\t\t </ul>#}\r\n <p>本站[<a href=\"http://www.muzhuangnet.com/\" >木庄网络博客</a>]的部分内容来源于网络,若侵犯到您的利益,请联系站长删除!谢谢!Powered By [<a href=\"http://www.dtcms.net/\" target=\"_blank\" rel=\"nofollow\" >DTcms</a>] Version 4.0 &nbsp;<a href=\"http://www.miitbeian.gov.cn/\" target=\"_blank\" rel=\"nofollow\" >闽ICP备00000000号-1</a> &nbsp; <a href=\"http://www.muzhuangnet.com/sitemap.xml\" target=\"_blank\" class=\"sitemap\" >网站地图</a></p>\r\n\t </div>\r\n\r\n\r\n </div>\r\n{# <div id=\"gotop\"><a class=\"gotop\"></a></div>#}\r\n</footer>\r\n\r\n\r\n\r\n\r\n\r\n\r\n</body>\r\n</html>" }, { "alpha_fraction": 0.6281938552856445, "alphanum_fraction": 0.6370044350624084, "avg_line_length": 27.399999618530273, "blob_id": "5994b3fa7299014b9baa176b7bac9f796d7e2703", "content_id": "912687fac9877832a5c4fe5ab1fe86c593398f3c", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 1269, "license_type": "no_license", "max_line_length": 83, "num_lines": 40, "path": "/hr/spider/hr/spiders/lg_position.py", "repo_name": "leearic/spider", "src_encoding": "UTF-8", "text": "# -*- coding: utf-8 -*-\nimport scrapy\nfrom selenium import webdriver\n# from selenium.webdriver.common.keys import Keys\n# from selenium.webdriver import DesiredCapabilities\nfrom scrapy.selector import Selector\nimport time\n\nfrom hr.utils.lagou.Save_2_DB import lagou_DB\nfrom hr.utils.lagou.PageAnalysis import lagou_Analysis\n\n\nclass LgSpider(scrapy.Spider):\n name = \"lgp\"\n allowed_domains = [\"lagou.com\"]\n start_urls = ['http://www.lagou.com/']\n\n # 初始化,这里要调用无头浏览器\n def __init__(self):\n self.driver = webdriver.Chrome()\n # id,如果中间爬取失败,可以设置继续爬取值重新爬取数据,默认从0开始\n id = 2276\n self.units = lagou_DB.Search_Position_URL(id)\n\n def parse(self, response):\n # 从数据中拿取数据,组装 URL\n for unit in self.units:\n # 从数据库中拿取 URL\n print u\"拉钩人才招聘网站 search id: \" + str(unit.id)\n print \"*\" * 30\n\n self.driver.get(unit.url)\n\n\n self.driver.current_window_handle\n aa = self.driver.page_source\n response = Selector(text=aa)\n\n lagou_Analysis.Analysis_Position_Info(response, unit.searching_company)\n time.sleep(3)" }, { "alpha_fraction": 0.48711517453193665, "alphanum_fraction": 0.5091472268104553, "avg_line_length": 43.47921371459961, "blob_id": "c4a9d0629cd9180845a1ddd289431541cd2b314d", "content_id": "bb41cc09d413b427fb212420587dda8897941d28", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 20680, "license_type": "no_license", "max_line_length": 143, "num_lines": 457, "path": "/szsj/sp_sz/szsti/hexunutils/PageAnalysis.py", "repo_name": "leearic/spider", "src_encoding": "UTF-8", "text": "# -*- coding: utf-8 -*-\n\n\nfrom szsti.hexunutils.Save_2_DB import Hexun\n\n\nclass hexun_Analysis(object):\n\n @classmethod\n def BaseInfo(self, response, id):\n self.item = {}\n\n self.item[\"company_code\"] = id\n # 公司简介-基本信息\n try:\n self.item[\"name\"] = response.xpath('/html/body/div[5]/div[8]/div[1]/div[1]/table/tbody/tr[1]/td[2]/text()').extract()[0]\n except:\n self.item[\"name\"] = '-'\n try:\n self.item[\n \"daima\"] = response.xpath('/html/body/div[5]/div[8]/div[1]/div[1]/table/tbody/tr[2]/td[2]/text()').extract()[0]\n except:\n self.item[\"daima\"] = '-'\n try:\n self.item[\n \"quancheng\"] = response.xpath('/html/body/div[5]/div[8]/div[1]/div[1]/table/tbody/tr[3]/td[2]/text()').extract()[0]\n except:\n self.item[\"quancheng\"] = '-'\n try:\n self.item[\n \"Englishname\"] = response.xpath('/html/body/div[5]/div[8]/div[1]/div[1]/table/tbody/tr[4]/td[2]/text()').extract()[0]\n except:\n self.item[\"Englishname\"] = '-'\n try:\n self.item[\n \"cengyongming\"] = response.xpath('/html/body/div[5]/div[8]/div[1]/div[1]/table/tbody/tr[5]/td[2]/text()').extract()[0]\n except:\n self.item[\"cengyongming\"] = '-'\n try:\n self.item[\n \"chengliriqi\"] = response.xpath('/html/body/div[5]/div[8]/div[1]/div[1]/table/tbody/tr[6]/td[2]/a/text()').extract()[0]\n except:\n self.item[\"chengliriqi\"] = '-'\n try:\n self.item[\n \"suoshuhangye\"] = response.xpath('/html/body/div[5]/div[8]/div[1]/div[1]/table/tbody/tr[7]/td[2]/a/text()').extract()[0]\n except:\n self.item[\"suoshuhangye\"] = '-'\n try:\n self.item[\n \"suoshugannian\"] = response.xpath('/html/body/div[5]/div[8]/div[1]/div[1]/table/tbody/tr[8]/td[2]/a/text()').extract()[0]\n except:\n self.item[\"suoshugannian\"] = '-'\n try:\n self.item[\n \"suoshudiyu\"] = response.xpath('/html/body/div[5]/div[8]/div[1]/div[1]/table/tbody/tr[9]/td[2]/a/text()').extract()[0]\n except:\n self.item[\"suoshudiyu\"] = '-'\n try:\n self.item[\n \"fadingdabiaoren\"] = response.xpath('/html/body/div[5]/div[8]/div[1]/div[1]/table/tbody/tr[10]/td[2]/text()').extract()[0]\n except:\n self.item[\"fadingdabiaoren\"] = '-'\n try:\n self.item[\n \"dulidongshi\"] = response.xpath('/html/body/div[5]/div[8]/div[1]/div[1]/table/tbody/tr[11]/td[2]/text()').extract()[0]\n except:\n self.item[\"dulidongshi\"] = '-'\n try:\n self.item[\n \"zixunfuwujigou\"] = response.xpath('/html/body/div[5]/div[8]/div[1]/div[1]/table/tbody/tr[12]/td[2]/text()').extract()[0]\n except:\n self.item[\"zixunfuwujigou\"] = '-'\n try:\n self.item[\n \"kuaijishiwusuo\"] = response.xpath('/html/body/div[5]/div[8]/div[1]/div[1]/table/tbody/tr[13]/td[2]/text()').extract()[0]\n except:\n self.item[\"kuaijishiwusuo\"] = '-'\n try:\n self.item[\n \"zhengquanshifudaibiao\"] = response.xpath('/html/body/div[5]/div[8]/div[1]/div[1]/table/tbody/tr[14]/td[2]/text()').extract()[0]\n # 公司简介-证券信息\n except:\n self.item[\"zhengquanshifudaibiao\"] = '-'\n try:\n self.item[\n \"faxingriqi\"] = response.xpath('/html/body/div[5]/div[8]/div[2]/div[1]/table/tbody/tr[1]/td[2]/text()').extract()[0]\n except:\n self.item[\"faxingriqi\"] = \" - \"\n\n try:\n self.item[\n \"shangshiriqi\"] = response.xpath('/html/body/div[5]/div[8]/div[2]/div[1]/table/tbody/tr[2]/td[2]/text()').extract()[0]\n except:\n self.item[\"shangshiriqi\"] = '-'\n try:\n self.item[\n \"shangshijiaoyisuo\"] = response.xpath('/html/body/div[5]/div[8]/div[2]/div[1]/table/tbody/tr[3]/td[2]/a/text()').extract()[0]\n except:\n self.item[\"shangshijiaoyisuo\"] = '-'\n try:\n self.item[\n \"zhengquanleixing\"] = response.xpath('/html/body/div[5]/div[8]/div[2]/div[1]/table/tbody/tr[4]/td[2]/text()').extract()[0]\n except:\n self.item[\"zhengquanleixing\"] = '-'\n try:\n self.item[\n \"liutongguben\"] = response.xpath('/html/body/div[5]/div[8]/div[2]/div[1]/table/tbody/tr[5]/td[2]/a/text()').extract()[0]\n except:\n self.item[\"liutongguben\"] = '-'\n try:\n self.item[\n \"zongguben\"] = response.xpath('/html/body/div[5]/div[8]/div[2]/div[1]/table/tbody/tr[6]/td[2]/a/text()').extract()[0]\n except:\n self.item[\"zongguben\"] = '-'\n try:\n self.item[\n \"zhuchengxiaoshang\"] = response.xpath('/html/body/div[5]/div[8]/div[2]/div[1]/table/tbody/tr[7]/td[2]/a/text()').extract()[0]\n except:\n self.item[\"zhuchengxiaoshang\"] = '-'\n try:\n self.item[\n \"faxingjia\"] = response.xpath('/html/body/div[5]/div[8]/div[2]/div[1]/table/tbody/tr[8]/td[2]/a/text()').extract()[0]\n except:\n self.item[\"faxingjia\"] = '-'\n try:\n self.item[\n \"shangshisourikaipanjia\"] = response.xpath('/html/body/div[5]/div[8]/div[2]/div[1]/table/tbody/tr[9]/td[2]/a/text()').extract()[0]\n except:\n self.item[\"shangshisourikaipanjia\"] = '-'\n try:\n self.item[\n \"shangshishourizhangdiefu\"] = response.xpath('/html/body/div[5]/div[8]/div[2]/div[1]/table/tbody/tr[10]/td[2]/text()').extract()[0]\n except:\n self.item[\"shangshishourizhangdiefu\"] = '-'\n try:\n self.item[\n \"shangshishourihuanshoulv\"] = response.xpath('/html/body/div[5]/div[8]/div[2]/div[1]/table/tbody/tr[11]/td[2]/text()').extract()[0]\n except:\n self.item[\"shangshishourihuanshoulv\"] = '-'\n try:\n self.item[\n \"tebiechulihetuishi\"] = response.xpath('/html/body/div[5]/div[8]/div[2]/div[1]/table/tbody/tr[12]/td[2]/text()').extract()[0]\n except:\n self.item[\"tebiechulihetuishi\"] = '-'\n try:\n self.item[\n \"faxingshiyinglv\"] = response.xpath('/html/body/div[5]/div[8]/div[2]/div[1]/table/tbody/tr[13]/td[2]/text()').extract()[0]\n except:\n self.item[\"faxingshiyinglv\"] = '-'\n try:\n self.item[\n \"zuixinshiyinglv\"] = response.xpath('/html/body/div[5]/div[8]/div[2]/div[1]/table/tbody/tr[14]/td[2]/a/text()').extract()[0]\n except:\n self.item[\"zuixinshiyinglv\"] = '-'\n # 公司简介-工商信息\n try:\n self.item[\n \"zhuceziben\"] = response.xpath('/html/body/div[5]/div[8]/div[1]/div[2]/table/tbody/tr[1]/td[2]/a/text()').extract()[0]\n except:\n self.item[\"zhuceziben\"] = '-'\n try:\n self.item[\n \"zhucedizhi\"] = response.xpath('/html/body/div[5]/div[8]/div[1]/div[2]/table/tbody/tr[2]/td[2]/text()').extract()[0]\n except:\n self.item[\"zhucedizhi\"] = '-'\n try:\n self.item[\n \"suodeisuilv\"] = response.xpath('/html/body/div[5]/div[8]/div[1]/div[2]/table/tbody/tr[3]/td[2]/a/text()').extract()[0]\n except:\n self.item[\"suodeisuilv\"] = '-'\n try:\n self.item[\n \"bangongdizhi\"] = response.xpath('/html/body/div[5]/div[8]/div[1]/div[2]/table/tbody/tr[4]/td[2]/text()').extract()[0]\n except:\n self.item[\"bangongdizhi\"] = '-'\n try:\n self.item[\n \"zhuyaochanpin\"] = response.xpath('/html/body/div[5]/div[8]/div[1]/div[2]/table/tbody/tr[5]/td[2]/text()').extract()[0]\n\n # 公司简介-联系方式\n except:\n self.item[\"zhuyaochanpin\"] = '-'\n try:\n self.item[\n \"lianxidianhua\"] = response.xpath('/html/body/div[5]/div[8]/div[2]/div[2]/table/tbody/tr[1]/td[2]/text()').extract()[0]\n except:\n self.item[\"lianxidianhua\"] = '-'\n try:\n self.item[\n \"gongsichuanzhen\"] = response.xpath('/html/body/div[5]/div[8]/div[2]/div[2]/table/tbody/tr[2]/td[2]/text()').extract()[0]\n except:\n self.item[\"gongsichuanzhen\"] = '-'\n try:\n self.item[\n \"dianziyouxiang\"] = response.xpath('/html/body/div[5]/div[8]/div[2]/div[2]/table/tbody/tr[3]/td[2]/a/text()').extract()[0]\n except:\n self.item[\"dianziyouxiang\"] = '-'\n try:\n self.item[\n \"gongsiwangzhi\"] = response.xpath('/html/body/div[5]/div[8]/div[2]/div[2]/table/tbody/tr[4]/td[2]/a/text()').extract()[0]\n except:\n self.item[\"gongsiwangzhi\"] = '-'\n try:\n self.item[\n \"lianxiren\"] = response.xpath('/html/body/div[5]/div[8]/div[2]/div[2]/table/tbody/tr[5]/td[2]/text()').extract()[0]\n except:\n self.item[\"jingyingfanwei\"] = '-'\n try:\n self.item[\n \"youzhengbianma\"] = response.xpath('/html/body/div[5]/div[8]/div[2]/div[2]/table/tbody/tr[6]/td[2]/text()').extract()[0]\n except:\n self.item[\"lianxiren\"] = '-'\n try:\n self.item[\"jingyingfanwei\"] = response.xpath('/html/body/div[5]/div[8]/div[1]/div[3]/p/text()').extract()[0]\n except:\n self.item[\"jingyingfanwei\"] = '-'\n try:\n self.item[\"gongsijianjie\"]= response.xpath('/html/body/div[5]/div[8]/div[2]/div[3]/p/text()').extract()[0]\n except:\n self.item[\"gongsijianjie\"] = '-'\n\n for i in self.item:\n try:\n self.item[i] = self.item[i].strip()\n\n except Exception:\n print \"BaseInfo error\"\n pass\n\n Hexun.Save_Base_Info(self.item)\n\n print \"* BaseInfo *\"\n\n @classmethod\n def Gaoguan_Info(self, response, id):\n self.item = {}\n self.item[\"company_code\"] = id\n\n gaoguan_table = response.xpath('//*[@id=\"gkaTable\"]/table/tbody/tr')\n for gaoguan in gaoguan_table:\n try:\n # 高管-监事会成员\n self.item[\"dongjiangao\"] = gaoguan.xpath('td[1]/a/text()').extract()[0]\n self.item[\"gaoguanzhiwu\"] = gaoguan.xpath('td[2]/text()').extract()[0]\n self.item[\"renzhiriqi\"] = gaoguan.xpath('td[3]/text()').extract()[0]\n self.item[\"lizhiriqi\"] = gaoguan.xpath('td[4]/text()').extract()[0]\n self.item[\"xueli\"] = gaoguan.xpath('td[5]/text()').extract()[0]\n self.item[\"nianxin\"] = gaoguan.xpath('td[6]/text()').extract()[0]\n self.item[\"chiguzonge\"] = gaoguan.xpath('td[7]/text()').extract()[0]\n self.item[\"chigushuliang\"] = gaoguan.xpath('td[8]/text()').extract()[0]\n\n Hexun.Save_Gaoguan_Info(self.item)\n print u'高管保存成功'\n except:\n pass\n\n\n\n @classmethod\n def Dongshi_Info(self, response, id):\n self.item = {}\n self.item[\"company_code\"] = id\n\n dongshi_table = response.xpath('//*[@id=\"gkbTable\"]/table/tbody/tr')\n for dongshi in dongshi_table:\n try:\n # 高管-监事会成员\n self.item[\"dongjiangao\"] = dongshi.xpath('td[1]/a/text()').extract()[0]\n self.item[\"gaoguanzhiwu\"] = dongshi.xpath('td[2]/text()').extract()[0]\n self.item[\"renzhiriqi\"] = dongshi.xpath('td[3]/text()').extract()[0]\n self.item[\"lizhiriqi\"] = dongshi.xpath('td[4]/text()').extract()[0]\n self.item[\"xueli\"] = dongshi.xpath('td[5]/text()').extract()[0]\n self.item[\"nianxin\"] = dongshi.xpath('td[6]/text()').extract()[0]\n self.item[\"chiguzonge\"] = dongshi.xpath('td[7]/text()').extract()[0]\n self.item[\"chigushuliang\"] = dongshi.xpath('td[8]/text()').extract()[0]\n\n Hexun.Save_Dongshihui_Info(self.item)\n print u'懂事保存成功'\n except:\n pass\n\n @classmethod\n def Jianshi_Info(self, response, id):\n self.item = {}\n self.item[\"company_code\"] = id\n\n jianshi_table = response.xpath('//*[@id=\"gkcTable\"]/table/tbody/tr')\n for jianshi in jianshi_table:\n try:\n # 高管-监事会成员\n self.item[\"dongjiangao\"] = jianshi.xpath('td[1]/a/text()').extract()[0]\n self.item[\"gaoguanzhiwu\"] = jianshi.xpath('td[2]/text()').extract()[0]\n self.item[\"renzhiriqi\"] = jianshi.xpath('td[3]/text()').extract()[0]\n self.item[\"lizhiriqi\"] = jianshi.xpath('td[4]/text()').extract()[0]\n self.item[\"xueli\"] = jianshi.xpath('td[5]/text()').extract()[0]\n self.item[\"nianxin\"] = jianshi.xpath('td[6]/text()').extract()[0]\n self.item[\"chiguzonge\"] = jianshi.xpath('td[7]/text()').extract()[0]\n self.item[\"chigushuliang\"] = jianshi.xpath('td[8]/text()').extract()[0]\n\n Hexun.Save_Jianshihui_Info(self.item)\n print u'监事保存成功'\n except:\n pass\n\n\n @classmethod\n def Fenhong_Info(self, response, id):\n self.item = {}\n self.item[\"company_code\"] = id\n\n Fenhong_table = response.xpath('//*[@id=\"ldkplaTable\"]/table/tbody/tr')\n\n for jianshi in Fenhong_table:\n try:\n # 高管-监事会成员\n self.item[\"gonggaoshijian\"] = jianshi.xpath('td[1]/text()').extract()[0]\n self.item[\"kuaijiniandu\"] = jianshi.xpath('td[2]/text()').extract()[0]\n self.item[\"songgu\"] = jianshi.xpath('td[3]/text()').extract()[0]\n self.item[\"paixi\"] = jianshi.xpath('td[4]/text()').extract()[0]\n self.item[\"guquandengjiri\"] = jianshi.xpath('td[5]/text()').extract()[0]\n self.item[\"guquanchuxiri\"] = jianshi.xpath('td[6]/text()').extract()[0]\n self.item[\"honggushangshiri\"] = jianshi.xpath('td[7]/text()').extract()[0]\n self.item[\"shifoshisi\"] = jianshi.xpath('td[8]/text()').extract()[0]\n self.item[\"xiangqing\"] = jianshi.xpath('td[9]/a/@href').extract()[0]\n\n if self.item[\"gonggaoshijian\"] == u\"公告时间\":\n pass\n else:\n Hexun.Save_Fenhong_Info(self.item)\n print u\"分红基本信息保存完毕...\"\n except Exception as e:\n pass\n\n @classmethod\n def Fenhong_Zhuanzeng_Info(self, response, id):\n self.item = {}\n self.item[\"company_code\"] = id\n Zhuanzeng_table = response.xpath('//*[@id=\"ldkplbTable\"]/table/tbody/tr')\n for jianshi in Zhuanzeng_table:\n # 分红、转增股本\n try:\n self.item[\"gonggaoshijian\"] = jianshi.xpath('td[1]/text()').extract()[0]\n self.item[\"zhuanzeng\"] = jianshi.xpath('td[2]/text()').extract()[0]\n self.item[\"chuquanchuxiri\"] = jianshi.xpath('td[3]/text()').extract()[0]\n self.item[\"chuquandengjiri\"] = jianshi.xpath('td[4]/text()').extract()[0]\n #self.item[\"guquandengjiri\"] = jianshi.xpath('td[5]/text()').extract()[0]\n self.item[\"zhuanzenggushangshiri\"] = jianshi.xpath('td[5]/text()').extract()[0]\n self.item[\"tongqisonggu\"] = jianshi.xpath('td[6]/text()').extract()[0]\n self.item[\"fanganjianjie\"] = jianshi.xpath('td[7]/text()').extract()[0]\n self.item[\"shifoshisi\"] = jianshi.xpath('td[8]/text()').extract()[0]\n self.item[\"xiangqing\"] = jianshi.xpath('td[9]/a/@href').extract()[0]\n\n if self.item[\"gonggaoshijian\"] == u\"公告时间\":\n pass\n else:\n Hexun.Save_Fenhong_Zhuanzeng_Info(self.item)\n print u\"分红-转增股本-信息保存完毕...\"\n except:\n pass\n\n @classmethod\n def Fenhong_Peigu_Info(self, response, id):\n self.item = {}\n self.item[\"company_code\"] = id\n\n jianshi_table = response.xpath('//*[@id=\"ldkplcTable\"]/table/tbody/tr')\n for jianshi in jianshi_table:\n # 分红、配 股\n try:\n self.item[\"gonggaoshijian\"] = jianshi.xpath('td[1]/text()').extract()[0]\n self.item[\"peigufangan\"] = jianshi.xpath('td[2]/text()').extract()[0]\n self.item[\"peigujia\"] = jianshi.xpath('td[3]/text()').extract()[0]\n self.item[\"jizhunguben\"] = jianshi.xpath('td[4]/text()').extract()[0]\n self.item[\"chuquanri\"] = jianshi.xpath('td[5]/text()').extract()[0]\n self.item[\"guquandengjiri\"] = jianshi.xpath('td[6]/text()').extract()[0]\n self.item[\"jiaokuanqishiri\"] = jianshi.xpath('td[7]/text()').extract()[0]\n self.item[\"jiaokuanzhongzhiri\"] = jianshi.xpath('td[8]/text()').extract()[0]\n self.item[\"peigushangshiri\"] = jianshi.xpath('td[9]/text()').extract()[0]\n self.item[\"mujizijin\"] = jianshi.xpath('td[10]/text()').extract()[0]\n self.item[\"xiangqing\"] = jianshi.xpath('td[11]/a/@href').extract()[0]\n\n if self.item[\"gonggaoshijian\"] == u\"公告时间\":\n pass\n else:\n Hexun.Save_Fenhong_Peigu_Info(self.item)\n print u\"分红-配股-信息保存完毕...\"\n except:\n pass\n\n\n #\n @classmethod\n def Fenhong_Huigou_Info(self, response, id):\n self.item = {}\n self.item[\"company_code\"] = id\n\n jianshi_table = response.xpath('//*[@id=\"hklTable\"]/table/tbody/tr')\n for jianshi in jianshi_table:\n # 分红、回购\n try:\n self.item[\"gonggaoshijian\"] = jianshi.xpath('td[1]/text()').extract()[0]\n self.item[\"huigouzanzonggubenbili\"] = jianshi.xpath('td[2]/text()').extract()[0]\n self.item[\"huigougushu\"] = jianshi.xpath('td[3]/text()').extract()[0]\n self.item[\"nihuigoujiage\"] = jianshi.xpath('td[4]/text()').extract()[0]\n self.item[\"gonggaoqianrigujia\"] = jianshi.xpath('td[5]/text()').extract()[0]\n self.item[\"goumaizuigaojia\"] = jianshi.xpath('td[6]/text()').extract()[0]\n self.item[\"goumaizuidijia\"] = jianshi.xpath('td[7]/text()').extract()[0]\n self.item[\"huigouzongjine\"] = jianshi.xpath('td[8]/text()').extract()[0]\n self.item[\"shifoshisi\"] = jianshi.xpath('td[9]/text()').extract()[0]\n self.item[\"xiangqing\"] = jianshi.xpath('td[10]/a/@href').extract()[0]\n\n if self.item[\"gonggaoshijian\"] == u\"公告时间\":\n pass\n else:\n Hexun.Save_Fenhong_Peigu_Info(self.item)\n print u\"分红融资-回购-信息保存完毕...\"\n except:\n pass\n\n\n\n\n @classmethod\n def Showrufenbu_Info(self, response, id):\n\n self.item = {}\n self.item[\"company_code\"] = id\n\n jianshi_table = response.xpath('//*[@id=\"histDealTablea\"]/table/tbody/tr')\n for jianshi in jianshi_table:\n\n\n # print jianshi.extract()\n # print \"*\" * 30\n\n # 收入分布\n try:\n self.item[\"leibiemingcheng\"] = jianshi.xpath('td[2]/text()').extract()[0]\n self.item[\"yinyeshouru\"] = jianshi.xpath('td[3]/text()').extract()[0]\n self.item[\"zhanyinyeshourubili\"] = jianshi.xpath('td[4]/text()').extract()[0]\n self.item[\"yinyechengben\"] = jianshi.xpath('td[5]/text()').extract()[0]\n self.item[\"zhanchengbenbili\"] = jianshi.xpath('td[6]/text()').extract()[0]\n self.item[\"yingyelirun\"] = jianshi.xpath('td[7]/text()').extract()[0]\n self.item[\"zhanlirunbili\"] = jianshi.xpath('td[8]/text()').extract()[0]\n self.item[\"maolilv\"] = jianshi.xpath('td[9]/text()').extract()[0]\n\n if self.item[\"leibiemingcheng\"] == u\"类别名称\":\n pass\n else:\n Hexun.Showrufenbu_Info(self.item)\n print u\"收入分布-信息保存完毕...\"\n except Exception as e:\n print \"& \"* 30\n print e\n print \"& \" * 30\n pass\n\n\n\n\n\n\n\n" }, { "alpha_fraction": 0.6602150797843933, "alphanum_fraction": 0.6623656153678894, "avg_line_length": 24.83333396911621, "blob_id": "bbc320fc4e8ca07abc72c9c944279f91bf6f54df", "content_id": "952d5d72d19b00d07e792dabc5589f62787ab64a", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 465, "license_type": "no_license", "max_line_length": 51, "num_lines": 18, "path": "/myqiubai/QiuBaiSpider/qiushibaike/items.py", "repo_name": "leearic/spider", "src_encoding": "UTF-8", "text": "# -*- coding: utf-8 -*-\n\n# Define here the models for your scraped items\n#\n# See documentation in:\n# http://doc.scrapy.org/en/latest/topics/items.html\n\nimport scrapy\nclass QiushibaikeItem(scrapy.Item):\n user_image = scrapy.Field()\n user_name = scrapy.Field()\n content = scrapy.Field()\n thumb = scrapy.Field()\n video_image = scrapy.Field()\n video = scrapy.Field()\n laugh = scrapy.Field()\n coments = scrapy.Field()\n played = scrapy.Field()\n" }, { "alpha_fraction": 0.5344827771186829, "alphanum_fraction": 0.6168582439422607, "avg_line_length": 25.100000381469727, "blob_id": "d23ed8f97185d19b696a3325c53cd30dc94bb2fe", "content_id": "fcaa7452ea0e7fed94ef5735bb2815bd62886aec", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 522, "license_type": "no_license", "max_line_length": 120, "num_lines": 20, "path": "/monster/orm/webapps/migrations/0002_auto_20170619_0904.py", "repo_name": "leearic/spider", "src_encoding": "UTF-8", "text": "# -*- coding: utf-8 -*-\n# Generated by Django 1.11.2 on 2017-06-19 09:04\nfrom __future__ import unicode_literals\n\nfrom django.db import migrations, models\n\n\nclass Migration(migrations.Migration):\n\n dependencies = [\n ('webapps', '0001_initial'),\n ]\n\n operations = [\n migrations.AlterField(\n model_name='news_real_content',\n name='content',\n field=models.TextField(default='nil', max_length=1999, verbose_name='\\u65b0\\u95fb\\u5177\\u4f53\\u5185\\u5bb9'),\n ),\n ]\n" }, { "alpha_fraction": 0.6682561039924622, "alphanum_fraction": 0.6995912790298462, "avg_line_length": 46.35483932495117, "blob_id": "48ab444e326dcc29bfeb6015b86d6672c7b34c5f", "content_id": "19e95b83eeb15002fbcfb44981ad9770fea8eae3", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 1696, "license_type": "no_license", "max_line_length": 93, "num_lines": 31, "path": "/monster/orm/webapps/models.py", "repo_name": "leearic/spider", "src_encoding": "UTF-8", "text": "# -*- coding: utf-8 -*-\nfrom __future__ import unicode_literals\n\nfrom django.db import models\n\n# Create your models here.\n\n\n\nclass Day_News(models.Model):\n # 每日新闻地址, 按天算的\n url = models.CharField(max_length=255, verbose_name=u\"每日新闻真实URL\")\n md5 = models.CharField(max_length=255, verbose_name=u\"每日新闻真实URL的MD5\")\n\nclass Daily_News(models.Model):\n # 每日新闻的真实URL, 每天新闻里的每条新闻\n url = models.CharField(max_length=255, verbose_name=u\"每条新闻的真实URL\")\n md5 = models.CharField(max_length=255, verbose_name=u\"每条新闻真实URL的MD5\")\n\nclass News_Real_Content(models.Model):\n # 每条新闻的具体内容\n # news = models.ForeignKey(Daily_News, related_name='Day_News')\n url = models.CharField(max_length=255, verbose_name=u\"网址\", default='nil')\n title = models.CharField(max_length=255, verbose_name=u\"新闻标题\", default='nil')\n content = models.TextField(max_length=1999, verbose_name=u\"新闻具体内容\", default='nil')\n content_time = models.CharField(max_length=255, verbose_name=u\"新闻时间\", default='nil')\n content_from = models.CharField(max_length=255, verbose_name=u\"新闻来源\", default='nil')\n content_type = models.CharField(max_length=255, verbose_name=u\"新闻类型\", default='文字新闻')\n content_web = models.CharField(max_length=255, verbose_name=u\"新闻爬取来源\", default='来源')\n save_time = models.CharField(max_length=255, verbose_name=u\"记录爬取时间\", default='nil')\n content_html = models.TextField(max_length=1999, default='nil', verbose_name=u\"新闻具体内容源码\")\n" }, { "alpha_fraction": 0.501165509223938, "alphanum_fraction": 0.6229603886604309, "avg_line_length": 56.20000076293945, "blob_id": "0ec54100bdb71f069d27b4dceb64501ee29d4613", "content_id": "f87423a9307a6905496f2acb578b305e30b1d1c6", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 1716, "license_type": "no_license", "max_line_length": 149, "num_lines": 30, "path": "/szsj/dj_sz/hexun/migrations/0005_showrufenbu_info.py", "repo_name": "leearic/spider", "src_encoding": "UTF-8", "text": "# -*- coding: utf-8 -*-\n# Generated by Django 1.11 on 2017-04-12 01:58\nfrom __future__ import unicode_literals\n\nfrom django.db import migrations, models\n\n\nclass Migration(migrations.Migration):\n\n dependencies = [\n ('hexun', '0004_auto_20170411_0958'),\n ]\n\n operations = [\n migrations.CreateModel(\n name='Showrufenbu_Info',\n fields=[\n ('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),\n ('company_code', models.CharField(default='-', max_length=255, verbose_name='\\u516c\\u53f8\\u80a1\\u7968\\u4ee3\\u7801')),\n ('leibiemingcheng', models.CharField(default='-', max_length=255, verbose_name='\\u7c7b\\u522b\\u540d\\u79f0')),\n ('yinyeshouru', models.CharField(default='-', max_length=255, verbose_name='\\u8425\\u4e1a\\u6536\\u5165(\\u4e07\\u5143)')),\n ('zhanyinyeshourubili', models.CharField(default='-', max_length=255, verbose_name='\\u5360\\u8425\\u4e1a\\u6536\\u5165\\u6bd4\\u4f8b(%)')),\n ('yinyechengben', models.CharField(default='-', max_length=255, verbose_name='\\u8425\\u4e1a\\u6210\\u672c(\\u4e07\\u5143)')),\n ('zhanchengbenbili', models.CharField(default='-', max_length=255, verbose_name='\\u5360\\u6210\\u672c\\u6bd4\\u4f8b')),\n ('yingyelirun', models.CharField(default='-', max_length=255, verbose_name='\\u8425\\u4e1a\\u5229\\u6da6(\\u4e07\\u5143)')),\n ('zhanlirunbili', models.CharField(default='-', max_length=255, verbose_name='\\u5360\\u5229\\u6da6\\u6bd4\\u4f8b')),\n ('maolilv', models.CharField(default='-', max_length=255, verbose_name='\\u6bdb\\u5229\\u7387(%)')),\n ],\n ),\n ]\n" }, { "alpha_fraction": 0.5359281301498413, "alphanum_fraction": 0.559880256652832, "avg_line_length": 21.33333396911621, "blob_id": "7938dae55a06077c77a2514ee41f0bd0c5105681", "content_id": "780f85e2c78adcaa01935ccfec20f09ca5c183d4", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 334, "license_type": "no_license", "max_line_length": 47, "num_lines": 15, "path": "/baixihecom/monster/templates/templates.py", "repo_name": "leearic/spider", "src_encoding": "UTF-8", "text": "#!/usr/bin/env python\n#coding:utf-8\n\"\"\"\n Author: 'Aric'\n Purpose: ''\n Created: '2015/7/14'\n\"\"\"\n\ntext_info = '''<xml>\n <ToUserName><![CDATA[%s]]></ToUserName>\n <FromUserName><![CDATA[%s]]></FromUserName>\n <CreateTime>%s</CreateTime>\n <MsgType><![CDATA[%s]]></MsgType>\n <Content><![CDATA[%s]]></Content>\n </xml>'''" }, { "alpha_fraction": 0.5890932083129883, "alphanum_fraction": 0.6074825525283813, "avg_line_length": 34.0444450378418, "blob_id": "273f1b9005a008d1fac85beb420048d39f0b21f6", "content_id": "e00f737e2ad44728ce60d27c48e9324156247e28", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 1577, "license_type": "no_license", "max_line_length": 130, "num_lines": 45, "path": "/szsj/dj_sz/hightech/migrations/0004_auto_20170227_0226.py", "repo_name": "leearic/spider", "src_encoding": "UTF-8", "text": "# -*- coding: utf-8 -*-\n# Generated by Django 1.10.5 on 2017-02-27 02:26\nfrom __future__ import unicode_literals\n\nfrom django.db import migrations, models\n\n\nclass Migration(migrations.Migration):\n\n dependencies = [\n ('hightech', '0003_unit_annual_promoters_and_reports_investment_infor_unit_annual_reports_base_infor_unit_annual_report'),\n ]\n\n operations = [\n migrations.RenameModel(\n old_name='Unit_annual_Promoters_and_reports_investment_Infor',\n new_name='Unit_annual_Promoters_and_reports_investment_Info',\n ),\n migrations.RenameModel(\n old_name='Unit_annual_reports_Base_Infor',\n new_name='Unit_annual_reports_Base_Info',\n ),\n migrations.RenameModel(\n old_name='Unit_annual_reports_Website_Infor',\n new_name='Unit_annual_reports_Website_Info',\n ),\n migrations.RenameModel(\n old_name='Unit_Base_Info_Changed_Information',\n new_name='Unit_Base_Info_Changed_Info',\n ),\n migrations.RenameModel(\n old_name='Unit_Base_Info_Shareholder_Information',\n new_name='Unit_Base_Info_Shareholder_Info',\n ),\n migrations.RenameField(\n model_name='unit_annual_reports_base_info',\n old_name='investment_information',\n new_name='investment_Info',\n ),\n migrations.AlterField(\n model_name='hightechinfo',\n name='number',\n field=models.IntegerField(max_length=255, verbose_name='\\u5e8f\\u53f7'),\n ),\n ]\n" }, { "alpha_fraction": 0.5366336703300476, "alphanum_fraction": 0.6099010109901428, "avg_line_length": 24.25, "blob_id": "a1f02159e4c70dc37c79c8723da943a1e50aedb5", "content_id": "55e6928aeaf7568303234f0e3c48a94bd9713716", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 505, "license_type": "no_license", "max_line_length": 118, "num_lines": 20, "path": "/qiubai_CrawlSpider/frontend/qiubaifrontend/spiders/migrations/0002_auto_20160520_0841.py", "repo_name": "leearic/spider", "src_encoding": "UTF-8", "text": "# -*- coding: utf-8 -*-\n# Generated by Django 1.9.6 on 2016-05-20 08:41\nfrom __future__ import unicode_literals\n\nfrom django.db import migrations, models\n\n\nclass Migration(migrations.Migration):\n\n dependencies = [\n ('spiders', '0001_initial'),\n ]\n\n operations = [\n migrations.AlterField(\n model_name='qiushi',\n name='url0',\n field=models.CharField(blank=True, default=None, max_length=255, verbose_name='\\u89c6\\u9891\\u5185\\u5bb9'),\n ),\n ]\n" }, { "alpha_fraction": 0.4975999891757965, "alphanum_fraction": 0.5504000186920166, "avg_line_length": 22.148147583007812, "blob_id": "1461f2d6ded24dd41f8a336f8a4b633b32ac22e6", "content_id": "b0fb0babbd861912e89b3b4e7ed6c4f10381aceb", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 625, "license_type": "no_license", "max_line_length": 48, "num_lines": 27, "path": "/szsj/dj_sz/hightech/migrations/0005_auto_20170227_0637.py", "repo_name": "leearic/spider", "src_encoding": "UTF-8", "text": "# -*- coding: utf-8 -*-\n# Generated by Django 1.10.5 on 2017-02-27 06:37\nfrom __future__ import unicode_literals\n\nfrom django.db import migrations\n\n\nclass Migration(migrations.Migration):\n\n dependencies = [\n ('hightech', '0004_auto_20170227_0226'),\n ]\n\n operations = [\n migrations.RemoveField(\n model_name='unit_base_info',\n name='address',\n ),\n migrations.RemoveField(\n model_name='unit_base_info',\n name='email',\n ),\n migrations.RemoveField(\n model_name='unit_base_info',\n name='website',\n ),\n ]\n" }, { "alpha_fraction": 0.5194552540779114, "alphanum_fraction": 0.5924124717712402, "avg_line_length": 48.74193572998047, "blob_id": "ee62d5d143de007016d89f38639420184c16617f", "content_id": "c5713b31ed17da7758dcc4f688ba75338e68bfce", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 3084, "license_type": "no_license", "max_line_length": 131, "num_lines": 62, "path": "/qiubai_CrawlSpider/frontend/qiubaifrontend/spiders/migrations/0001_initial.py", "repo_name": "leearic/spider", "src_encoding": "UTF-8", "text": "# -*- coding: utf-8 -*-\n# Generated by Django 1.9.6 on 2016-05-20 08:11\nfrom __future__ import unicode_literals\n\nfrom django.db import migrations, models\nimport django.db.models.deletion\n\n\nclass Migration(migrations.Migration):\n\n initial = True\n\n dependencies = [\n ]\n\n operations = [\n migrations.CreateModel(\n name='Comment',\n fields=[\n ('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),\n ('comment', models.CharField(max_length=255, verbose_name='\\u8bc4\\u8bba\\u5185\\u5bb9')),\n ],\n ),\n migrations.CreateModel(\n name='QiuShi',\n fields=[\n ('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),\n ('page_url', models.CharField(max_length=255, verbose_name='\\u7f51\\u7ad9\\u5730\\u5740')),\n ('user', models.CharField(max_length=255, verbose_name='\\u7528\\u6237\\u6635\\u79f0')),\n ('content', models.CharField(max_length=255, verbose_name='\\u6587\\u5b57\\u5185\\u5bb9')),\n ('type', models.CharField(max_length=255, verbose_name='\\u7c7b\\u578b')),\n ('url', models.CharField(blank=True, max_length=255, verbose_name='\\u56fe\\u7247\\u5185\\u5bb9')),\n ('url0', models.CharField(blank=True, max_length=255, verbose_name='\\u89c6\\u9891\\u5185\\u5bb9')),\n ('smiling', models.IntegerField(verbose_name='\\u7b11\\u8138\\u6570\\u91cf')),\n ('comment_count', models.IntegerField(verbose_name='\\u8bc4\\u8bba\\u6570\\u91cf')),\n ],\n ),\n migrations.CreateModel(\n name='User',\n fields=[\n ('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),\n ('name', models.CharField(max_length=255, verbose_name='\\u7528\\u6237\\u6635\\u79f0')),\n ('fans', models.IntegerField(verbose_name='\\u7c89\\u4e1d\\u6570\\u91cf')),\n ('follow', models.IntegerField(verbose_name='\\u5173\\u6ce8\\u6570\\u91cf')),\n ('comment', models.IntegerField(verbose_name='\\u8bc4\\u8bba\\u6570\\u91cf')),\n ('marriage', models.CharField(max_length=255, verbose_name='\\u5a5a\\u59fb\\u72b6\\u6001')),\n ('occupation', models.CharField(max_length=255, verbose_name='\\u804c\\u4e1a')),\n ('constellation', models.CharField(max_length=255, verbose_name='\\u661f\\u5ea7')),\n ('age', models.IntegerField(verbose_name='\\u7cd7\\u9f84')),\n ],\n ),\n migrations.AddField(\n model_name='comment',\n name='qiushi',\n field=models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, related_name='CommentQishi', to='spiders.QiuShi'),\n ),\n migrations.AddField(\n model_name='comment',\n name='user',\n field=models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, related_name='CommentUser', to='spiders.User'),\n ),\n ]\n" }, { "alpha_fraction": 0.6948052048683167, "alphanum_fraction": 0.701298713684082, "avg_line_length": 18.25, "blob_id": "06022e6f698f43eb0960626378d0a3665ea27e9f", "content_id": "abe6c5c1b2fcc039f6f85fcc4d1e06ec53d6a803", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 154, "license_type": "no_license", "max_line_length": 39, "num_lines": 8, "path": "/monster/orm/webapps/apps.py", "repo_name": "leearic/spider", "src_encoding": "UTF-8", "text": "# -*- coding: utf-8 -*-\nfrom __future__ import unicode_literals\n\nfrom django.apps import AppConfig\n\n\nclass WebappsConfig(AppConfig):\n name = 'webapps'\n" }, { "alpha_fraction": 0.5672684907913208, "alphanum_fraction": 0.5716365575790405, "avg_line_length": 32.00961685180664, "blob_id": "c1cf44dfe80e53c8826f11bf541b12fc1a534c5a", "content_id": "910a800db8166c3da115a240ce94022db33c22b2", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 3694, "license_type": "no_license", "max_line_length": 143, "num_lines": 104, "path": "/hr/spider/hr/spiders/lg.py", "repo_name": "leearic/spider", "src_encoding": "UTF-8", "text": "# -*- coding: utf-8 -*-\nimport scrapy\nfrom selenium import webdriver\n# from selenium.webdriver.common.keys import Keys\n# from selenium.webdriver import DesiredCapabilities\nfrom scrapy.selector import Selector\nimport time\n\nfrom hr.utils.lagou.Save_2_DB import lagou_DB\nfrom hr.utils.lagou.PageAnalysis import lagou_Analysis\n\n\nclass LgSpider(scrapy.Spider):\n name = \"lg\"\n allowed_domains = [\"lagou.com\"]\n start_urls = ['http://www.lagou.com/']\n\n # 初始化,这里要调用无头浏览器\n def __init__(self):\n self.driver = webdriver.Chrome()\n # id,如果中间爬取失败,可以设置继续爬取值重新爬取数据,默认从0开始\n id = 0\n self.units = lagou_DB.Search_ID(id)\n self.item = {}\n\n def parse(self, response):\n\n # 打开网站\n self.driver.get(response.url)\n\n print u\"你有5秒钟来选择地区, 推荐地区-全国\"\n time.sleep(5)\n\n\n\n self.driver.find_element_by_class_name(\"search_input\").clear()\n time.sleep(2)\n self.driver.find_element_by_class_name(\"search_input\").send_keys(u'百度')\n time.sleep(2)\n self.driver.find_element_by_id(\"search_button\").click()\n\n time.sleep(2)\n # 从数据中拿取数据,组装 URL\n for unit in self.units:\n print u\"拉钩人才招聘网站 search id: \" + str(unit.id)\n print \"*\" * 30\n\n self.driver.current_window_handle\n\n self.driver.find_element_by_id('keyword').clear()\n self.driver.find_element_by_id('keyword').send_keys(unit.searching_company)\n self.driver.find_element_by_id('submit').click()\n time.sleep(2)\n self.driver.find_element_by_id('tab_pos').click()\n time.sleep(2)\n\n self.driver.current_window_handle\n\n aa = self.driver.page_source\n response = Selector(text=aa)\n\n try:\n # 如果有相关工作岗位, 立即保存起来\n self.get_postion_url(response, unit.searching_company)\n\n # 如果有第二页相关工作岗位,立即点击第二页,然后继续保存下来\n try:\n self.driver.find_element_by_css_selector('#s_position_list>div.item_con_pager>div>span.pager_next.pager_next_disabled')\n self.aa = False\n except:\n self.aa = True\n\n\n while self.aa:\n\n self.driver.find_element_by_class_name('pager_next').click()\n time.sleep(2)\n\n self.driver.current_window_handle\n aa = self.driver.page_source\n response = Selector(text=aa)\n self.get_postion_url(response, unit.searching_company)\n try:\n self.driver.find_element_by_css_selector('#s_position_list>div.item_con_pager>div>span.pager_next.pager_next_disabled')\n self.aa = False\n except:\n self.aa = True\n\n except:\n print \"error occours ... No such postion\"\n\n # lagou_Analysis.Analysis_Position_Info(response, unit.searching_company)\n\n\n\n def get_postion_url(self, response, searching_company):\n\n postion_table = response.xpath('//*[@id=\"s_position_list\"]/ul/li')\n for postion in postion_table:\n self.item[\"url\"] = postion.xpath('div/div/div/a[@class=\"position_link\"]/@href').extract()[0]\n self.item[\"searching_company\"] = searching_company\n lagou_DB.Save_Position_URL(self.item)\n\n print searching_company + u\": 职位保存完毕\"\n\n" }, { "alpha_fraction": 0.692307710647583, "alphanum_fraction": 0.7252747416496277, "avg_line_length": 5.5, "blob_id": "886760b86055da32498af05942ad2c20ac859e01", "content_id": "5ea07f970764cf22e6203e16fab0e3b40c134a32", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Markdown", "length_bytes": 147, "license_type": "no_license", "max_line_length": 20, "num_lines": 14, "path": "/zhihu/README.md", "repo_name": "leearic/spider", "src_encoding": "UTF-8", "text": "# zhihu\n这是一个爬虫代码\n\n\n\n访问密钥 ID\nAKIARBBMNKA7C3QEUDM5 \n私有访问密钥\n4XwPrUGXxJ3KG6Vkm/WJO4ealrZS5dat0I+dvte8\n\n访问密钥 ID\nAKIARBBMNKA7L4AF5AEP\n私有访问密钥\nO7DxwzsuJo4Vgydy3kF3FDVAGq+tPGsLKlMwRqQj\n" }, { "alpha_fraction": 0.4897959232330322, "alphanum_fraction": 0.5714285969734192, "avg_line_length": 13.142857551574707, "blob_id": "8a1e3ac7ac20c123fafea5e64598ec13bdbe3390", "content_id": "8439d1a39d34506947d78b3ef35563c34472302b", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 98, "license_type": "no_license", "max_line_length": 22, "num_lines": 7, "path": "/baixihecom/monster/utils/__init__.py", "repo_name": "leearic/spider", "src_encoding": "UTF-8", "text": "#!/usr/bin/env python\n#coding:utf-8\n\"\"\"\n Author: 'Aric'\n Purpose: ''\n Created: '2015/7/14'\n\"\"\"" }, { "alpha_fraction": 0.5557184815406799, "alphanum_fraction": 0.5591397881507874, "avg_line_length": 32.01612854003906, "blob_id": "11d13643b37fd480378e6da0a57cf2383fd61b67", "content_id": "682ea1131d49e6504ea8cd084885e561c4bff7a7", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 2136, "license_type": "no_license", "max_line_length": 88, "num_lines": 62, "path": "/uumnt/uumnt/spiders/images.py", "repo_name": "leearic/spider", "src_encoding": "UTF-8", "text": "# -*- coding: utf-8 -*-\nimport scrapy\nfrom scrapy.http import Request\nfrom uumnt import items\n\nclass ExampleSpider(scrapy.Spider):\n name = \"imgs\"\n allowed_domains = [\"uumnt.com\"]\n start_urls = (\n 'http://www.uumnt.com/',\n )\n\n Base_URL = 'http://www.uumnt.com'\n\n def parse(self, response):\n # 获取栏目连接\n base_tab = response.xpath('//div[@class=\"tags_list\"]/dl/dt/a/@href').extract()\n for i in base_tab:\n url = self.Base_URL+i\n try:\n yield Request(url, callback=self.image_tabs)\n except Exception as e:\n print e\n\n def image_tabs(self, response):\n #获取栏目列表里的图片链接\n urls = response.xpath('//div[@class=\"wf-main\"]/div/a/@href').extract()\n for i in urls:\n yield Request(i, callback=self.get_image_url)\n\n #获取翻页链接\n pages = response.xpath('//div[@class=\"page\"]/ul/li/a/@href').extract()\n for i in pages:\n url = self.Base_URL+i\n try:\n yield Request(url, callback=self.image_tabs)\n except Exception, e:\n print e\n\n def get_image_url(self, response):\n mysplite = response.url.split('/')\n base_fenye_url = response.url.split(mysplite[len(mysplite)-1])\n base_fenye_url = base_fenye_url[0]\n\n # 获取图片页面的翻页图片链接\n urls = response.xpath('//div[@id=\"fenye\"]/li/a/@href').extract()\n for i in urls:\n real_image_page_url = base_fenye_url+i\n yield Request(real_image_page_url, callback=self.get_image_url)\n\n # 获取图片真实地址\n Base_image_url = \"http://img.uumnt.com\"\n image_url = response.xpath('//div[@class=\"bbox\"]/a/img/@src').extract()\n image_title = response.xpath('//div[@class=\"bbt\"]/h2/strong/a/text()').extract()\n\n if Base_image_url not in image_url[0]:\n image_url[0] = Base_image_url+image_url[0]\n item = items.ImagesItem()\n item[\"image_url\"] = image_url\n item[\"image_title\"] = image_title\n\n yield item" }, { "alpha_fraction": 0.5492818355560303, "alphanum_fraction": 0.5532441735267639, "avg_line_length": 37.11320877075195, "blob_id": "804fb65628eb7b1d160c4998b523e2e4fef227b4", "content_id": "066fc2e03ed1bde16dfbde92da63174354520864", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 2065, "license_type": "no_license", "max_line_length": 103, "num_lines": 53, "path": "/irole/irolespider/irolespider/spiders/cos8.py", "repo_name": "leearic/spider", "src_encoding": "UTF-8", "text": "# -*- coding: utf-8 -*-\nimport scrapy\nfrom scrapy.selector import Selector \nfrom scrapy.http import Request\nfrom irolespider import settings\nfrom irolespider.items import Cos8Item\n\nfrom irolespider.utils import Spider_Django\n\n\nclass MycosSpider(scrapy.Spider):\n name = \"mycos\"\n allowed_domains = [\"cosplay8.com\"]\n start_urls = (\n 'http://www.cosplay8.com/pic/chinacos/',\n 'http://www.cosplay8.com/pic/worldcos/',\n 'http://www.cosplay8.com/pic/cospic/', \n )\n def parse(self, response):\n sel = Selector(response)\n #获取图片详细页面\n img_page_url = sel.xpath('//div[@class=\"pagew center hauto pic_list\"]/ul/li/a/@href').extract()\n for i in img_page_url:\n img_page_real_url = settings.base_url + i\n yield Request(img_page_real_url, callback=self.Get_Imag_URL)\n # 获取图片list 页面\n page_url = sel.xpath('//div[@class=\"pagebox center mbottom yahei\"]/a/@href').extract()\n for relative_url in page_url:\n page_real_url = response.url + relative_url\n yield Request(page_real_url, callback=self.parse)\n # ----------------------------------------------------------------------\n def Get_Imag_URL(self, response):\n\n aa = Spider_Django.django_sql()\n if aa.is_crawled(response.url):\n \"\"\"获取图片的真实地址\"\"\"\n\n sel = Selector(response)\n img_url = sel.xpath('//img[@id=\"bigimg\"]/@src').extract()\n context = sel.xpath('//div[@class=\"p_box hauto tcenter\"]/h1/text()').extract()\n items = []\n item = Cos8Item()\n for relative_img_url in img_url:\n img_real_url = settings.base_url + relative_img_url\n item[\"img_base_url\"] = [img_real_url]\n item[\"html_base_url\"] = response.url\n for aa in context:\n item[\"img_content\"] = [aa]\n items.append(item)\n return items\n else:\n print response.url + \" is crawled !\"\n pass" }, { "alpha_fraction": 0.525896430015564, "alphanum_fraction": 0.613545835018158, "avg_line_length": 24.100000381469727, "blob_id": "cf7fc5a5980b5f99cf79262295b67706cfa2741f", "content_id": "7f96019eb9ee819f4f68d23ab6287f475125d381", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 502, "license_type": "no_license", "max_line_length": 87, "num_lines": 20, "path": "/szsj/dj_sz/hightech/migrations/0009_auto_20170307_0232.py", "repo_name": "leearic/spider", "src_encoding": "UTF-8", "text": "# -*- coding: utf-8 -*-\n# Generated by Django 1.10.5 on 2017-03-07 02:32\nfrom __future__ import unicode_literals\n\nfrom django.db import migrations, models\n\n\nclass Migration(migrations.Migration):\n\n dependencies = [\n ('hightech', '0008_auto_20170307_0232'),\n ]\n\n operations = [\n migrations.AlterField(\n model_name='idc_base_info',\n name='Satisfaction',\n field=models.CharField(max_length=1024, verbose_name='\\u6ee1\\u610f\\u5ea6'),\n ),\n ]\n" }, { "alpha_fraction": 0.6888741850852966, "alphanum_fraction": 0.7231788039207458, "avg_line_length": 49.89864730834961, "blob_id": "7059d5e6cca3074c7df63caa17454354e31d3ad1", "content_id": "8cdd7c63f8e7d3466b6965359b08e3166655510e", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 8510, "license_type": "no_license", "max_line_length": 107, "num_lines": 148, "path": "/szsj/dj_sz/hightech/models.py", "repo_name": "leearic/spider", "src_encoding": "UTF-8", "text": "# -*- coding: utf-8 -*-\n\nfrom __future__ import unicode_literals\n\nfrom django.db import models\n\n# Create your models here.\n\n\nclass HightechInfo(models.Model):\n # 企业信息\n number = models.IntegerField(verbose_name=u\"序号\") # 这个做主键\n KeyID = models.CharField(max_length=255, verbose_name=u\"证书编号\")\n Unit_name = models.CharField(max_length=255, verbose_name=u\"单位名称\")\n address = models.CharField(max_length=255, verbose_name=u\"地址\")\n Subordinate_Domain = models.CharField(max_length=255, verbose_name=u\"所属领域\")\n type = models.CharField(max_length=255, verbose_name=u\"高企类别\")\n\n\n\n\nclass IDC_Base_Info(models.Model):\n # 企业信息\n url = models.CharField(max_length=255, verbose_name=u\"爬取网址\")\n name = models.CharField(max_length=255, verbose_name=u\"IDC名字\")\n company = models.CharField(max_length=255, verbose_name=u\"公司名字\")\n zone = models.CharField(max_length=255, verbose_name=u\"地区\")\n address = models.CharField(max_length=255, verbose_name=u\"公司地址\")\n phone_number = models.CharField(max_length=255, verbose_name=u\"联系电话\")\n website = models.CharField(max_length=255, verbose_name=u\"网址\")\n Main_business = models.CharField(max_length=1024, verbose_name=u\"主营业务\")\n Satisfaction = models.CharField(max_length=1024, verbose_name=u\"满意度\")\n QQ = models.CharField(max_length=255, verbose_name=u\"qq\", default=\"null\")\n\n\n\n\n\n\n\n\nclass Unit_Base_Info(models.Model):\n # 企业详细信息-1\n\n searched_name = models.CharField(max_length=255, verbose_name=u\"查询到的名字\", default='-')\n searching_name = models.CharField(max_length=255, verbose_name=u\"需要查询的名字\", default='-')\n\n phone_nunber = models.CharField(max_length=255, verbose_name=u\"电话号码\")\n # email = models.CharField(max_length=255, verbose_name=u\"邮箱\")\n # website = models.CharField(max_length=255, verbose_name=u\"网址\")\n # address = models.CharField(max_length=255, verbose_name=u\"地址\")\n # 企业工商信息\n code = models.CharField(max_length=255, verbose_name=u\"统一社会信用代码\")\n Registration_number = models.CharField(max_length=255, verbose_name=u\"注册号\")\n Organization_code = models.CharField(max_length=255, verbose_name=u\"组织机构代码\")\n Operating_state = models.CharField(max_length=255, verbose_name=u\"经营状态\")\n Legal_representative = models.CharField(max_length=255, verbose_name=u\"法定代表人\")\n registered_capital = models.CharField(max_length=255, verbose_name=u\"注册资本\")\n Company_type = models.CharField(max_length=255, verbose_name=u\"公司类型\")\n date_of_establishment = models.CharField(max_length=255, verbose_name=u\"成立日期\")\n Operating_period = models.CharField(max_length=255, verbose_name=u\"营业期限\")\n registration_authority = models.CharField(max_length=255, verbose_name=u\"登记机关\")\n Date_of_issue = models.CharField(max_length=255, verbose_name=u\"发照日期\")\n company_size = models.CharField(max_length=255, verbose_name=u\"公司规模\")\n Subordinate_industry = models.CharField(max_length=255, verbose_name=u\"所属行业\")\n English_name = models.CharField(max_length=255, verbose_name=u\"英文名\")\n Name_used_Before = models.CharField(max_length=255, verbose_name=u\"曾用名\")\n Enterprise_address = models.CharField(max_length=255, verbose_name=u\"企业地址\")\n Business_scope = models.CharField(max_length=255, verbose_name=u\"经营范围\")\n\n\nclass Unit_Base_Info_Shareholder_Info(models.Model):\n # 股东信息\n searched_name = models.CharField(max_length=255, verbose_name=u\"查询到的名字\", default='-')\n searching_name = models.CharField(max_length=255, verbose_name=u\"需要查询的名字\", default='-')\n Shareholder = models.CharField(max_length=255, verbose_name=u\"股东\")\n Shareholding_ratio = models.CharField(max_length=255, verbose_name=u\"持股比例\")\n Subscribed_capital_contribution = models.CharField(max_length=255, verbose_name=u\"认缴出资额\")\n Subscription_Date = models.CharField(max_length=255, verbose_name=u\"认缴出资日期\")\n Shareholder_type = models.CharField(max_length=255, verbose_name=u\"股东类型\")\n\n\nclass Unit_Base_Info_Changed_Info(models.Model):\n # 变更信息\n searched_name = models.CharField(max_length=255, verbose_name=u\"查询到的名字\", default='-')\n searching_name = models.CharField(max_length=255, verbose_name=u\"需要查询的名字\", default='-')\n Change_date = models.CharField(max_length=255, verbose_name=u\"变更日期\")\n Change_item = models.CharField(max_length=255, verbose_name=u\"变更项目\")\n Before_change = models.CharField(max_length=255, verbose_name=u\"变更前\")\n After_change = models.CharField(max_length=255, verbose_name=u\"变更后\")\n\nclass Unit_annual_reports_Base_Info(models.Model):\n # 企业年报-企业基本信息\n searched_name = models.CharField(max_length=255, verbose_name=u\"查询到的名字\", default='-')\n searching_name = models.CharField(max_length=255, verbose_name=u\"需要查询的名字\", default='-')\n Registration_number = models.CharField(max_length=255, verbose_name=u\"注册号\")\n Business_state = models.CharField(max_length=255, verbose_name=u\"企业经营状态\")\n Enterprise_telephone = models.CharField(max_length=255, verbose_name=u\"企业联系电话\")\n Email = models.CharField(max_length=255, verbose_name=u\"电子邮箱\")\n Postcode = models.CharField(max_length=255, verbose_name=u\"邮政编码\")\n number_of_people_engaged = models.CharField(max_length=255, verbose_name=u\"从业人数\")\n residence = models.CharField(max_length=255, verbose_name=u\"住所\")\n transfer_of_shareholder_equity = models.CharField(max_length=255, verbose_name=u\"有限责任公司本年度是否发生股东股权转让\")\n investment_Info = models.CharField(max_length=255, verbose_name=u\"企业是否有投资信息或购买其他公司股权\")\n\n\nclass Unit_annual_reports_Website_Info(models.Model):\n # 企业年报-网站或网店信息\n searched_name = models.CharField(max_length=255, verbose_name=u\"查询到的名字\", default='-')\n searching_name = models.CharField(max_length=255, verbose_name=u\"需要查询的名字\", default='-')\n\n Web_Type = models.CharField(max_length=255, verbose_name=u\"类型\")\n Web_Name = models.CharField(max_length=255, verbose_name=u\"名称\")\n Web_Site = models.CharField(max_length=255, verbose_name=u\"网址\")\n\n\nclass Unit_annual_Promoters_and_reports_investment_Info(models.Model):\n # 企业年报-发起人及出资信息\n searched_name = models.CharField(max_length=255, verbose_name=u\"查询到的名字\", default='-')\n searching_name = models.CharField(max_length=255, verbose_name=u\"需要查询的名字\", default='-')\n Sponsor = models.CharField(max_length=255, verbose_name=u\"发起人\")\n Subscribed_capital_contribution = models.CharField(max_length=255, verbose_name=u\"认缴出资额(万元)\")\n Time_of_subscription = models.CharField(max_length=255, verbose_name=u\"认缴出资时间\")\n Subscribed_capital_contribution = models.CharField(max_length=255, verbose_name=u\"认缴出资方式\")\n Paid_in_capital_contribution = models.CharField(max_length=255, verbose_name=u\"实缴出资额(万元)\")\n Investment_time = models.CharField(max_length=255, verbose_name=u\"出资时间\")\n Investment_method = models.CharField(max_length=255, verbose_name=u\"出资方式\")\n\n\n\n\n\nclass Itjuzi_Company_Info(models.Model):\n # 企业年报-发起人及出资信息\n company_allname = models.CharField(max_length=999, verbose_name=u\"企业全名\")\n company_name = models.CharField(max_length=999, verbose_name=u\"企业名\")\n\n\n\nclass Gao_Company_Info(models.Model):\n # 企业年报-发起人及出资信息\n phone_nunber = models.CharField(max_length=255, verbose_name=u\"电话号码\")\n Enterprise_address = models.CharField(max_length=255, verbose_name=u\"企业地址\")\n company_name = models.CharField(max_length=999, verbose_name=u\"企业全名\")\n Legal_representative = models.CharField(max_length=255, verbose_name=u\"法定代表人\")\n Business_scope = models.CharField(max_length=255, verbose_name=u\"经营范围\")\n status = models.CharField(max_length=255, verbose_name=u\"状态\")\n category = models.CharField(max_length=255, verbose_name=u\"分类\")\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n" }, { "alpha_fraction": 0.5186046361923218, "alphanum_fraction": 0.5779069662094116, "avg_line_length": 27.66666603088379, "blob_id": "93a9a3cb7e246cf940c5d764d5d3324ce8a6623a", "content_id": "2301ce19698b4d53d04b8fd5de5559655457179c", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 860, "license_type": "no_license", "max_line_length": 92, "num_lines": 30, "path": "/szsj/dj_sz/hightech/migrations/0007_auto_20170307_0228.py", "repo_name": "leearic/spider", "src_encoding": "UTF-8", "text": "# -*- coding: utf-8 -*-\n# Generated by Django 1.10.5 on 2017-03-07 02:28\nfrom __future__ import unicode_literals\n\nfrom django.db import migrations, models\n\n\nclass Migration(migrations.Migration):\n\n dependencies = [\n ('hightech', '0006_idc_base_info'),\n ]\n\n operations = [\n migrations.AlterField(\n model_name='hightechinfo',\n name='number',\n field=models.IntegerField(verbose_name='\\u5e8f\\u53f7'),\n ),\n migrations.AlterField(\n model_name='idc_base_info',\n name='name',\n field=models.CharField(max_length=255, verbose_name='IDC\\u540d\\u5b57'),\n ),\n migrations.AlterField(\n model_name='idc_base_info',\n name='url',\n field=models.CharField(max_length=255, verbose_name='\\u722c\\u53d6\\u7f51\\u5740'),\n ),\n ]\n" }, { "alpha_fraction": 0.5388046503067017, "alphanum_fraction": 0.5423728823661804, "avg_line_length": 33, "blob_id": "1d5423876fad92d7e3c7e10bfea8b2f613879637", "content_id": "29957714877553333b14c6e5caf0203a008775d2", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 1141, "license_type": "no_license", "max_line_length": 75, "num_lines": 33, "path": "/uumnt/uumnt/pipelines.py", "repo_name": "leearic/spider", "src_encoding": "UTF-8", "text": "# -*- coding: utf-8 -*-\n\n# Define your item pipelines here\n#\n# Don't forget to add your pipeline to the ITEM_PIPELINES setting\n# See: http://doc.scrapy.org/en/latest/topics/item-pipeline.html\n\nfrom scrapy.pipelines.images import ImagesPipeline\nfrom scrapy.http import Request\n\nclass ImagesPipeline(ImagesPipeline):\n\n title = \"\"\n def get_media_requests(self, item, info):\n\n for user_image in item[\"image_url\"]:\n yield Request(user_image)\n self.title = item[\"image_title\"]\n\n def item_completed(self, results, item, info):\n image_paths = [[x['path'] for ok, x in results if ok]]\n if not image_paths:\n raise DropItem(\"Item contains no images\")\n return\n\n # 修改保存图片的文件名...\n #----------------------------------------------------------------------\n def file_path(self, request, response=None, info=None):\n url = request.url\n image_guid = url.split('/')[-1]\n ttle = self.title[0].encode('gbk').split('(')[0]\n return '%s/%s.jpg' % (ttle, image_guid)\n #----------------------------------------------------------------------" }, { "alpha_fraction": 0.6091954112052917, "alphanum_fraction": 0.6103448271751404, "avg_line_length": 21.28205108642578, "blob_id": "9ed53a177786db32af87a87e56ffaba746e1d0b6", "content_id": "c847fdd656eeeec9a3df3acbdc2d88acfa397194", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 870, "license_type": "no_license", "max_line_length": 59, "num_lines": 39, "path": "/qiubai_CrawlSpider/spiders/qiubai/pipelines.py", "repo_name": "leearic/spider", "src_encoding": "UTF-8", "text": "# -*- coding: utf-8 -*-\n\n# from scrapy.contrib.pipeline.images import ImagesPipeline\n# from scrapy.http import Request\n# from scrapy.exceptions import DropItem\nfrom qiubai.QiubaiORM import SaveToDB\n\n\n\nclass QiuShiPipeline(object):\n\n\n def process_item(self, item, spider):\n if 'type' not in item:\n return item\n else:\n qiushi = SaveToDB.QiushiORM()\n qiushi.Saveqiushi(item)\n # self.content.insert(dict(item))\n return item\n\n def spider_closed(self, spider):\n pass\n\n\n\nclass UserPipeline(object):\n def process_item(self, item, spider):\n if 'follow' not in item:\n return item\n else:\n qiushi = SaveToDB.QiushiORM()\n qiushi.SaveUser(item)\n\n # self.content.insert(dict(item))\n return item\n\n def spider_closed(self, spider):\n pass\n\n" }, { "alpha_fraction": 0.5026881694793701, "alphanum_fraction": 0.6105991005897522, "avg_line_length": 54.40425491333008, "blob_id": "6c2d96ea3252d428f718af11cf9c2453f7e27438", "content_id": "29b3300add6c31defb0898f45287e53ef7535eaa", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 2604, "license_type": "no_license", "max_line_length": 148, "num_lines": 47, "path": "/monster/orm/webapps/migrations/0001_initial.py", "repo_name": "leearic/spider", "src_encoding": "UTF-8", "text": "# -*- coding: utf-8 -*-\n# Generated by Django 1.11.2 on 2017-06-19 08:56\nfrom __future__ import unicode_literals\n\nfrom django.db import migrations, models\n\n\nclass Migration(migrations.Migration):\n\n initial = True\n\n dependencies = [\n ]\n\n operations = [\n migrations.CreateModel(\n name='Daily_News',\n fields=[\n ('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),\n ('url', models.CharField(max_length=255, verbose_name='\\u6bcf\\u6761\\u65b0\\u95fb\\u7684\\u771f\\u5b9eURL')),\n ('md5', models.CharField(max_length=255, verbose_name='\\u6bcf\\u6761\\u65b0\\u95fb\\u771f\\u5b9eURL\\u7684MD5')),\n ],\n ),\n migrations.CreateModel(\n name='Day_News',\n fields=[\n ('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),\n ('url', models.CharField(max_length=255, verbose_name='\\u6bcf\\u65e5\\u65b0\\u95fb\\u771f\\u5b9eURL')),\n ('md5', models.CharField(max_length=255, verbose_name='\\u6bcf\\u65e5\\u65b0\\u95fb\\u771f\\u5b9eURL\\u7684MD5')),\n ],\n ),\n migrations.CreateModel(\n name='News_Real_Content',\n fields=[\n ('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),\n ('url', models.CharField(default='nil', max_length=255, verbose_name='\\u7f51\\u5740')),\n ('title', models.CharField(default='nil', max_length=255, verbose_name='\\u65b0\\u95fb\\u6807\\u9898')),\n ('content', models.CharField(default='nil', max_length=255, verbose_name='\\u65b0\\u95fb\\u5177\\u4f53\\u5185\\u5bb9')),\n ('content_time', models.CharField(default='nil', max_length=255, verbose_name='\\u65b0\\u95fb\\u65f6\\u95f4')),\n ('content_from', models.CharField(default='nil', max_length=255, verbose_name='\\u65b0\\u95fb\\u6765\\u6e90')),\n ('content_type', models.CharField(default='\\u6587\\u5b57\\u65b0\\u95fb', max_length=255, verbose_name='\\u65b0\\u95fb\\u7c7b\\u578b')),\n ('content_web', models.CharField(default='\\u6765\\u6e90', max_length=255, verbose_name='\\u65b0\\u95fb\\u722c\\u53d6\\u6765\\u6e90')),\n ('save_time', models.CharField(default='nil', max_length=255, verbose_name='\\u8bb0\\u5f55\\u722c\\u53d6\\u65f6\\u95f4')),\n ('content_html', models.TextField(default='nil', max_length=1999, verbose_name='\\u65b0\\u95fb\\u5177\\u4f53\\u5185\\u5bb9\\u6e90\\u7801')),\n ],\n ),\n ]\n" }, { "alpha_fraction": 0.5202205777168274, "alphanum_fraction": 0.5275735259056091, "avg_line_length": 35.80555725097656, "blob_id": "df4cd000a0d0fd7bfcd0e8b878cf2b4e7d97faf4", "content_id": "276aa74f057b46031b2ad432ab4297bb045b2610", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 2832, "license_type": "no_license", "max_line_length": 114, "num_lines": 72, "path": "/myqiubai/QiuBaiSpider/qiushibaike/spiders/qiubai.py", "repo_name": "leearic/spider", "src_encoding": "UTF-8", "text": "# -*- coding: utf-8 -*-\r\nimport scrapy\r\nfrom scrapy.http import Request\r\n\r\nfrom qiushibaike import items\r\nclass QiuBaiSpider(scrapy.Spider):\r\n name = \"qiubai\"\r\n allowed_domains = [\"qiushibaike.com\"]\r\n start_urls = (\r\n 'http://www.qiushibaike.com/',\r\n )\r\n base_url = \"http://www.qiushibaike.com\"\r\n def parse(self, response):\r\n\r\n # 解析页面超级连接,到详细内容展示页面\r\n xiaohua_url = response.xpath('//div[@class=\"stats\"]/span[@class=\"stats-comments\"]/a/@href').extract()\r\n for i in xiaohua_url:\r\n url = self.base_url + i\r\n yield Request(url, callback=self.get_per_xiaohua)\r\n # 获取下一页连接地址\r\n try:\r\n next_page = response.xpath('//div[@class=\"pageto\"]/a[@class=\"next\"]/@href').extract()\r\n except Exception, e:\r\n pass\r\n if len(next_page) == 0:\r\n pass\r\n else:\r\n next_page = self.base_url+next_page[0]\r\n yield Request(next_page, callback=self.parse)\r\n\r\n def get_per_xiaohua(self, response):\r\n # 获取用户信息\r\n user_image = response.xpath('//div[@class=\"article block untagged noline mb15\"]/div/a/img/@src').extract()\r\n user_name = response.xpath('//div[@class=\"article block untagged noline mb15\"]/div/a/text()').extract()\r\n\r\n # 获取笑话信息\r\n content = response.xpath('//div[@class=\"content\"]/text()').extract()\r\n\r\n # 获取图片\r\n thumb = response.xpath('//div[@class=\"thumb\"]/img/@src').extract()\r\n\r\n # 获取Video\r\n video_image = response.xpath('//div[@class=\"video_holder\"]/video/@poster').extract()\r\n video = response.xpath('//div[@class=\"video_holder\"]/video/source/@src').extract()\r\n # 获取评论数量和好笑数量\r\n stats_vote = response.xpath('//div[@class=\"stats\"]/span/i[@class=\"number\"]/text()').extract()\r\n item = items.QiushibaikeItem()\r\n if len(thumb) == 0:\r\n thumb = \"NN\"\r\n if len(video) == 0:\r\n video = \"NN\"\r\n if len(user_name) == 0:\r\n user_name = \"NN\"\r\n if len(user_image) == 0:\r\n user_image = \"NN\"\r\n if len(video_image) == 0:\r\n video_image = \"NN\"\r\n item[\"user_image\"] = user_image\r\n item[\"user_name\"] = user_name[1].replace(\"\\n\",\" \")\r\n item[\"content\"] = content[0].replace(\"\\n\",\" \")\r\n item[\"thumb\"] = thumb\r\n item[\"video_image\"] = video_image[0]\r\n item[\"video\"] = video[0]\r\n item[\"laugh\"] = stats_vote[0]\r\n item[\"coments\"] = stats_vote[1]\r\n if item[\"video\"] != \"N\":\r\n item[\"played\"] = stats_vote[2]\r\n else:\r\n item[\"played\"] = \"0\"\r\n # print item[\"video_image\"]\r\n # print item[\"video\"]\r\n yield item" }, { "alpha_fraction": 0.5041782855987549, "alphanum_fraction": 0.6065459847450256, "avg_line_length": 46.86666488647461, "blob_id": "f9bdd059c25276d5095ac3435a01c0193d835d4a", "content_id": "c253ecea62d832082cb4e8887a93fc0930cff42b", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 1436, "license_type": "no_license", "max_line_length": 114, "num_lines": 30, "path": "/szsj/dj_sz/hightech/migrations/0006_idc_base_info.py", "repo_name": "leearic/spider", "src_encoding": "UTF-8", "text": "# -*- coding: utf-8 -*-\n# Generated by Django 1.10.5 on 2017-03-07 02:27\nfrom __future__ import unicode_literals\n\nfrom django.db import migrations, models\n\n\nclass Migration(migrations.Migration):\n\n dependencies = [\n ('hightech', '0005_auto_20170227_0637'),\n ]\n\n operations = [\n migrations.CreateModel(\n name='IDC_Base_Info',\n fields=[\n ('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),\n ('url', models.IntegerField(max_length=255, verbose_name='\\u722c\\u53d6\\u7f51\\u5740')),\n ('name', models.IntegerField(max_length=255, verbose_name='IDC\\u540d\\u5b57')),\n ('company', models.CharField(max_length=255, verbose_name='\\u516c\\u53f8\\u540d\\u5b57')),\n ('zone', models.CharField(max_length=255, verbose_name='\\u5730\\u533a')),\n ('address', models.CharField(max_length=255, verbose_name='\\u516c\\u53f8\\u5730\\u5740')),\n ('phone_number', models.CharField(max_length=255, verbose_name='\\u8054\\u7cfb\\u7535\\u8bdd')),\n ('website', models.CharField(max_length=255, verbose_name='\\u7f51\\u5740')),\n ('Main_business', models.CharField(max_length=255, verbose_name='\\u4e3b\\u8425\\u4e1a\\u52a1')),\n ('Satisfaction', models.CharField(max_length=255, verbose_name='\\u6ee1\\u610f\\u5ea6')),\n ],\n ),\n ]\n" }, { "alpha_fraction": 0.5314285755157471, "alphanum_fraction": 0.5392857193946838, "avg_line_length": 31.581396102905273, "blob_id": "3667398bfbe506bdf72030d28f78e20bfce3c0ac", "content_id": "0a86b110c030d671df08c4960299e3c34e1d0fca", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 1400, "license_type": "no_license", "max_line_length": 407, "num_lines": 43, "path": "/myqiubai/QiuBaiSpider/qiushibaike/utils/SQL.py", "repo_name": "leearic/spider", "src_encoding": "UTF-8", "text": "#!/usr/bin/env python\n#coding:utf-8\n\"\"\"\n Author: 'Aric'\n Purpose: ''\n Created: '2015/7/24'\n\"\"\"\nfrom qiushibaike import settings\nimport MySQLdb\n\nclass SQL_Conn():\n\n def __init__(self):\n try:\n self.connection = MySQLdb.connect( host = settings.DB_HOST , user = settings.DB_USER, passwd = settings.DB_PASSWD, db = settings.DB_NAME, charset=\"utf8\")\n except Exception, e:\n print e\n\n def save(self, item):\n cursor = self.connection.cursor()\n\n sql = \"insert into qiubai_qiushi (user_image, user_name, content, thumb, video_image, video, laugh, coments, played) values (\\'\" + item[\"user_image\"][0] + \"\\', \\'\" + item[\"user_name\"] + \"\\', \\'\" + item[\"content\"] + \"\\', \\'\" + item[\"thumb\"][0] + \"\\', \\'\" + item[\"video_image\"] + \"\\', \\'\" + item[\"video\"] + \"\\', \\'\" + item[\"laugh\"] + \"\\', \\'\" + item[\"coments\"] + \"\\', \\'\" + item[\"played\"] + \"\\')\"\n try:\n cursor.execute(sql)\n self.connection.commit()\n except Exception, e:\n print e\n self.connection.rollback()\n print \"insert into sql :\" + item[\"user_name\"]\n cursor.close()\n\n def test(self):\n cursor = self.connection.cursor()\n cursor.execute('select * from qiubai_qiushi')\n\n def __del__(self):\n self.connection.close()\n\n\nif __name__ == '__main__':\n # print sys.path\n aa = SQL_Conn()\n aa.test()" }, { "alpha_fraction": 0.6067880988121033, "alphanum_fraction": 0.610927164554596, "avg_line_length": 33.514286041259766, "blob_id": "4eadc1aaa1ae37d5e53a379305ee077fe5a6a094", "content_id": "5591f3d7c0559347a68dd38b87fcb4ca6d81355f", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 3624, "license_type": "no_license", "max_line_length": 96, "num_lines": 105, "path": "/irole/irolespider/irolespider/middleware/splash.py", "repo_name": "leearic/spider", "src_encoding": "UTF-8", "text": "import re\n\nfrom scrapy import signals\n# from scrapy.http import Request\nfrom scrapy.exceptions import NotConfigured\nimport w3lib.url\n\n_matches = lambda url, regexs: any((r.search(url) for r in regexs))\n\nclass SplashMiddleware(object):\n\n url = 'http://localhost:8050/render.html'\n wait = 2\n url_pass = ()\n url_block = ()\n\n _settings = [\n 'endpoint',\n 'wait',\n 'images',\n 'js',\n 'filters',\n 'viewport',\n ]\n\n def __init__(self, crawler):\n self.crwlr_settings = crawler.settings\n self.crwlr_stats = crawler.stats\n\n @classmethod\n def from_crawler(cls, crawler):\n if not crawler.settings.getbool('SPLASH_ENABLED'):\n raise NotConfigured\n o = cls(crawler)\n crawler.signals.connect(o.open_spider, signal=signals.spider_opened)\n return o\n\n def open_spider(self, spider):\n for k in self._settings:\n setattr(self, k, self._get_setting_value(spider,\n self.crwlr_settings, k))\n # check URL filters\n url_pass = self._get_setting_value(spider, self.crwlr_settings, 'url_pass')\n if url_pass:\n self.url_pass = [re.compile(x) for x in url_pass]\n url_block = self._get_setting_value(spider, self.crwlr_settings, 'url_block')\n if url_block:\n self.url_block = [re.compile(x) for x in url_block]\n\n def _get_setting_value(self, spider, settings, k):\n o = getattr(self, k, None)\n s = settings.get('SPLASH_' + k.upper(), o)\n return getattr(spider, 'splash_' + k, s)\n\n def _needs_wrapping(self, request):\n # already wrapped\n if request.meta.get(\"splashed_url\", False):\n return False\n\n # force wrap or not\n use_splash = request.meta.get(\"use_splash\", None)\n if use_splash is not None:\n return use_splash == True\n\n # check URL regexes\n if not self.url_pass and not self.url_block:\n return False\n if self.url_pass and not _matches(request.url, self.url_pass):\n return False\n if self.url_block and _matches(request.url, self.url_block):\n return False\n\n return True\n\n def process_request(self, request, spider):\n if self._needs_wrapping(request):\n self.crwlr_stats.inc_value('splash/wrapped', spider=spider)\n return self._wrap_url(request)\n\n def process_response(self, request, response, spider):\n if request.meta.get('splashed_url', False):\n self.crwlr_stats.inc_value('splash/unwrapped', spider=spider)\n return self._unwrap_url(request, response)\n else:\n return response\n\n def _wrap_url(self, request):\n wrapped = w3lib.url.add_or_replace_parameter(self.endpoint, 'url', request.url)\n\n # pass options\n wrapped = w3lib.url.add_or_replace_parameter(wrapped, 'wait', self.wait)\n if self.viewport:\n wrapped = w3lib.url.add_or_replace_parameter(wrapped, 'viewport', self.viewport)\n wrapped = w3lib.url.add_or_replace_parameter(wrapped, 'images', 1 if self.images else 0)\n if self.js:\n wrapped = w3lib.url.add_or_replace_parameter(wrapped, 'js', self.js)\n if self.filters:\n wrapped = w3lib.url.add_or_replace_parameter(wrapped, 'filters', self.filters)\n\n return request.replace(url=wrapped, meta={\"splashed_url\": request.url})\n\n def _unwrap_url(self, request, response):\n unwrapped = w3lib.url.url_query_parameter(request.url, 'url')\n response = response.replace(url=unwrapped)\n return response\n" }, { "alpha_fraction": 0.49896126985549927, "alphanum_fraction": 0.5182247161865234, "avg_line_length": 45.05217361450195, "blob_id": "48b2d576237fbb3a91aab0b7b36ce6e439350a64", "content_id": "b2a2d66657fd36be513821b31cad310737a4ec1a", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 5343, "license_type": "no_license", "max_line_length": 161, "num_lines": 115, "path": "/szsj/sp_sz/szsti/spiders/idc.py", "repo_name": "leearic/spider", "src_encoding": "UTF-8", "text": "# -*- coding: utf-8 -*-\nimport scrapy\nfrom scrapy.http import Request\nfrom szsti.utils.Save2DB import DjORM\n\nclass SzSpider(scrapy.Spider):\n name = \"idc\"\n allowed_domains = [\"dian.idcquan.com\"]\n start_urls = ['http://dian.idcquan.com/item.php?act=list&catid=4']\n # start_urls = ['http://dian.idcquan.com/item.php?act=detail&id=1264']\n # start_urls = [\"http://dian.idcquan.com/item.php?act=detail&id=1320\"]\n # start_urls = [\"http://dian.idcquan.com/item.php?act=detail&id=631\"]\n\n\n def parse(self, response):\n\n baseurl = \"http://dian.idcquan.com\"\n indexs = response.xpath('//*[@id=\"px_list2\"]/dl/dd[1]/a/@href').extract()\n # 获取页面 详细信息,然后到一个解析方法去解析\n for index in indexs:\n url = baseurl + index\n yield Request(url=url, callback=self.parse_detail)\n\n # 获取分页\n pages = response.xpath('//*[@id=\"page\"]/p/a/@href').extract()\n for page in pages:\n url = baseurl + page\n # print url\n yield Request(url=url, callback=self.parse)\n\n pass\n\n\n def parse_detail(self, response):\n # def parse(self, response):\n item = {}\n # url\n item[\"url\"] = response.url\n try:\n # name\n item[\"name\"] = response.xpath('//*[@id=\"gongs_name\"]/text()').extract()[0].strip()\n # company\n item[\"company\"] = response.xpath('//p[@class=\"quanming\"]/text()').extract()[0].strip() # //*[@id=\"erp_left\"]/div[1]/div/div/p[3]\n # zone\n item[\"zone\"] = response.xpath('//*[@id=\"erp_left\"]/div[2]/div[1]/dl/dt/p[1]/span/text()').extract()[0].strip()\n # address\n item[\"address\"] = response.xpath('//*[@id=\"erp_left\"]/div[2]/div[1]/dl/dt/p[2]/span/text()').extract()[0].strip()\n # phone number\n item[\"phone_number\"] = response.xpath('//*[@id=\"erp_left\"]/div[2]/div[1]/dl/dt/p[3]/i/text()').extract()[0].strip()\n # website\n item[\"website\"] = response.xpath('//*[@id=\"erp_left\"]/div[2]/div[1]/dl/dt/p[4]/em/a/@href').extract()[0].strip()\n # main bussiness\n item[\"Main_business\"] = response.xpath('//*[@id=\"erp_left\"]/div[2]/div[1]/dl/dt/p[5]/span/text()').extract()[0].strip()\n # Satisfaction\n item[\"Satisfaction\"] = response.xpath('//*[@id=\"erp_left\"]/div[2]/div[1]/dl/dt/p[9]/text()').extract()[0].strip()\n\n\n # QQ\n try:\n qqoneline = response.xpath('//*[@id=\"erp_left\"]/div[2]/div[1]/dl/dt/p[7]/a/@href').extract()[0].strip()\n item[\"qq\"] = qqoneline.split(\"=\")[2].split(\"&\")[0]\n except Exception:\n item[\"qq\"] = \"null\"\n\n except Exception:\n # name\n item[\"name\"] = response.xpath('//table[@class=\"custom_field\"]/tr[3]/td[2]/text()').extract()[0].strip()\n # company\n item[\"company\"] = response.xpath('//table[@class=\"custom_field\"]/tr[3]/td[2]/text()').extract()[0].strip() # //*[@id=\"erp_left\"]/div[1]/div/div/p[3]\n # zone\n try:\n item[\"zone\"] = response.xpath('//table[@class=\"custom_field\"]/tr[6]/td[2]/a[2]/text()').extract()[0].strip()\n except Exception:\n item[\"zone\"] = response.xpath('//table[@class=\"custom_field\"]/tr[5]/td[2]/a[2]/text()').extract()[0].strip()\n # # address\n try:\n item[\"address\"] = response.xpath('//table[@class=\"custom_field\"]/tr[7]/td[2]/text()').extract()[0].strip()\n except Exception:\n item[\"address\"] = response.xpath('//table[@class=\"custom_field\"]/tr[6]/td[2]/text()').extract()[0].strip()\n # phone number\n try:\n item[\"phone_number\"] = response.xpath('//table[@class=\"custom_field\"]/tr[9]/td[2]/text()').extract()[0].strip()\n except Exception:\n item[\"phone_number\"] = response.xpath('//table[@class=\"custom_field\"]/tr[8]/td[2]/text()').extract()[0].strip()\n # website\n try:\n item[\"website\"] = response.xpath('//table[@class=\"custom_field\"]/tr[11]/td[2]/a/@href').extract()[0].strip()\n except Exception:\n item[\"website\"] = response.xpath('//table[@class=\"custom_field\"]/tr[10]/td[2]/a/@href').extract()[0].strip()\n\n\n # main bussiness\n try:\n item[\"Main_business\"] = response.xpath('//table[@class=\"custom_field\"]/tr[8]/td[2]').extract()[0].strip()\n except Exception:\n item[\"Main_business\"] = response.xpath('//table[@class=\"custom_field\"]/tr[7]/td[2]').extract()[0].strip()\n\n # Satisfaction\n try:\n item[\"Satisfaction\"] = response.xpath('//*[@id=\"subject_impress\"]').extract()[0].strip()\n except Exception:\n item[\"Satisfaction\"] = response.xpath('//*[@id=\"subject_impress\"]').extract()[0].strip()\n\n # QQ\n try:\n qqoneline = response.xpath('//table[@class=\"custom_field\"]/tr[10]/td[2]/a/@href').extract()[0].strip()\n item[\"qq\"] = qqoneline.split(\"=\")[2].split(\"&\")[0]\n except Exception:\n item[\"qq\"] = \"null\"\n\n\n print item[\"qq\"]\n print \"*\" * 30\n\n DjORM.save_IDC_Base_Info(item)" }, { "alpha_fraction": 0.5420689582824707, "alphanum_fraction": 0.6068965792655945, "avg_line_length": 28, "blob_id": "2f3f829948bd68e9174028c3d08f5d8030da5632", "content_id": "4e211fe21663a6e7301a5b97d08eada5832d7a6c", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 725, "license_type": "no_license", "max_line_length": 92, "num_lines": 25, "path": "/szsj/dj_sz/hightech/migrations/0013_auto_20170316_1018.py", "repo_name": "leearic/spider", "src_encoding": "UTF-8", "text": "# -*- coding: utf-8 -*-\n# Generated by Django 1.10.5 on 2017-03-16 10:18\nfrom __future__ import unicode_literals\n\nfrom django.db import migrations, models\n\n\nclass Migration(migrations.Migration):\n\n dependencies = [\n ('hightech', '0012_itjuzi_company_info'),\n ]\n\n operations = [\n migrations.AlterField(\n model_name='itjuzi_company_info',\n name='company_allname',\n field=models.CharField(max_length=999, verbose_name='\\u4f01\\u4e1a\\u5168\\u540d'),\n ),\n migrations.AlterField(\n model_name='itjuzi_company_info',\n name='company_name',\n field=models.CharField(max_length=999, verbose_name='\\u4f01\\u4e1a\\u540d'),\n ),\n ]\n" }, { "alpha_fraction": 0.5387930870056152, "alphanum_fraction": 0.5445402264595032, "avg_line_length": 34.69230651855469, "blob_id": "b16e7d1b6ecb5ddc0dba79ed6feb899d3332e0d9", "content_id": "873796d525aefba484796f055e2e599bec9d98dd", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 1430, "license_type": "no_license", "max_line_length": 102, "num_lines": 39, "path": "/baixihecom/monster/utils/Do_Monster.py", "repo_name": "leearic/spider", "src_encoding": "UTF-8", "text": "#!/usr/bin/env python\n#coding:utf-8\n\"\"\"\n Author: 'Aric'\n Purpose: '接受用户发来的消息,并且返回相应的信息'\n Created: '2015/7/14'\n\"\"\"\n# from tempates import templates as temp\nimport time\nclass Message_Session(object):\n def __init__(self, data):\n self.msgType = data[\"msgType\"]\n self.content = data[\"content\"]\n self.FromUser = data[\"FromUser\"]\n self.ToUser = data[\"toUserName\"]\n if self.msgType == \"text\":\n pass\n if self.msgType == \"image\":\n print \"this is a image method\"\n if self.msgType == \"voice\":\n print \"this is a voice method\"\n if self.msgType == \"video\":\n print \"this is a video method\"\n if self.msgType == \"shortvideo\":\n print \"this is a shortvideo method\"\n if self.msgType == \"location\":\n print \"this is a location method\"\n if self.msgType == \"link\":\n print \"this is a link method\"\n\n def Text_Msg_Response(self, content):\n text_info = '''<xml>\n <ToUserName><![CDATA[%s]]></ToUserName>\n <FromUserName><![CDATA[%s]]></FromUserName>\n <CreateTime>%s</CreateTime>\n <MsgType><![CDATA[%s]]></MsgType>\n <Content><![CDATA[%s]]></Content>\n </xml>''' % (self.FromUser, self.ToUser, str(int(time.time())), self.msgType, content)\n return text_info\n" }, { "alpha_fraction": 0.5151098966598511, "alphanum_fraction": 0.5242673754692078, "avg_line_length": 33.68254089355469, "blob_id": "0d213b66947c9f7478b1ca03f36575affa763b72", "content_id": "37cb83a96d56bfb59a81f249be93f1f42983cea2", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 2230, "license_type": "no_license", "max_line_length": 87, "num_lines": 63, "path": "/irole/irolespider/irolespider/spiders/17173.py", "repo_name": "leearic/spider", "src_encoding": "UTF-8", "text": "# -*- coding: utf-8 -*-\nimport scrapy\nfrom scrapy.selector import Selector \nfrom scrapy.http import Request\nfrom irolespider import settings\nfrom irolespider.items import Cos8Item\n\nfrom irolespider.utils import Spider_Django\n\n\nclass MycosSpider(scrapy.Spider):\n name = \"17173\"\n # allowed_domains = [\"cosplay8.com\"]\n # start_urls = (\n # 'http://news.17173.com/gameview/cos/',\n # )\n urls = \"http://news.17173.com/gameview/cos/\"\n def start_requests(self):\n yield scrapy.Request(self.urls, callback=self.parse,\n meta={\"use_splash\": True})\n\n def parse(self, response):\n sel = Selector(response)\n #获取图片详细页面\n img_page_url = sel.xpath('//div[@class=\"yxdy\"]/ul/li/a/@href').extract()\n for i in img_page_url:\n # print i\n yield Request(i, callback=self.Get_Imag_URL)\n # 获取图片list 页面\n page_url = sel.xpath('//div[@class=\"paginationdg\"]/ul/li/a/@href').extract()\n for relative_url in page_url:\n page_real_url = response.url + relative_url\n # print page_real_url\n yield Request(page_real_url, callback=self.parse)\n # ----------------------------------------------------------------------\n\n def Get_Imag_URL(self, response):\n\n aa = Spider_Django.django_sql()\n if aa.is_crawled(response.url):\n \"\"\"获取图片的真实地址\"\"\"\n\n sel = Selector(response)\n img_url = sel.xpath('//p[@align=\"center\"]/a/img/@src').extract()\n context = sel.xpath('//h1[@class=\"gb-final-tit-article\"]/text()').extract()\n items = []\n item = Cos8Item()\n for relative_img_url in img_url:\n img_real_url = relative_img_url\n item[\"img_base_url\"] = [img_real_url]\n item[\"html_base_url\"] = response.url\n\n print \"=====================\"\n print img_real_url\n print response.url\n\n for aa in context:\n item[\"img_content\"] = [aa]\n items.append(item)\n # return items\n else:\n print response.url + \" is crawled !\"\n pass" }, { "alpha_fraction": 0.5344827771186829, "alphanum_fraction": 0.5981432199478149, "avg_line_length": 33.272727966308594, "blob_id": "69634f51a7fc36a414025d3fdc3bbaa4f105d74c", "content_id": "4ae7fc2809d563dcbccbaedfe8e6009392166313", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 754, "license_type": "no_license", "max_line_length": 114, "num_lines": 22, "path": "/irole/cnirole/migrations/0001_initial.py", "repo_name": "leearic/spider", "src_encoding": "UTF-8", "text": "# -*- coding: utf-8 -*-\nfrom __future__ import unicode_literals\n\nfrom django.db import migrations, models\n\n\nclass Migration(migrations.Migration):\n\n dependencies = [\n ]\n\n operations = [\n migrations.CreateModel(\n name='cosplay8dotcom',\n fields=[\n ('id', models.AutoField(verbose_name='ID', serialize=False, auto_created=True, primary_key=True)),\n ('base_html_url', models.CharField(max_length=255, verbose_name='\\u7f51\\u9875\\u5730\\u5740')),\n ('base_image_url', models.CharField(max_length=255, verbose_name='\\u56fe\\u7247\\u5730\\u5740')),\n ('base_image_content', models.CharField(max_length=255, verbose_name='\\u6807\\u9898')),\n ],\n ),\n ]\n" }, { "alpha_fraction": 0.5417057275772095, "alphanum_fraction": 0.5754451751708984, "avg_line_length": 29.485713958740234, "blob_id": "8c416354b43c36002035b3f92d88fe526f9f31dc", "content_id": "e740c07b9fb8d66e8a91eeaee84eb081524f5fed", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 1075, "license_type": "no_license", "max_line_length": 132, "num_lines": 35, "path": "/mikufan/frontend/webapps/migrations/0003_auto_20161213_1114.py", "repo_name": "leearic/spider", "src_encoding": "UTF-8", "text": "# -*- coding: utf-8 -*-\n# Generated by Django 1.10.4 on 2016-12-13 11:14\nfrom __future__ import unicode_literals\n\nfrom django.db import migrations, models\nimport django.db.models.deletion\n\n\nclass Migration(migrations.Migration):\n\n dependencies = [\n ('webapps', '0002_auto_20161203_0709'),\n ]\n\n operations = [\n migrations.CreateModel(\n name='Category',\n fields=[\n ('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),\n ('category', models.CharField(default='游戏', max_length=255, verbose_name='分类')),\n ],\n ),\n migrations.RemoveField(\n model_name='coser',\n name='category',\n ),\n migrations.DeleteModel(\n name='Coser_Category',\n ),\n migrations.AddField(\n model_name='category',\n name='coser',\n field=models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, related_name='Coser_Category', to='webapps.Coser'),\n ),\n ]\n" }, { "alpha_fraction": 0.6631355881690979, "alphanum_fraction": 0.6970338821411133, "avg_line_length": 35.38461685180664, "blob_id": "185073d1b56e43dda74060c6720e5dbfcf3c54ab", "content_id": "a0cbabfb3f3ac8c8e6cebf1c96cc25837f9437ba", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 512, "license_type": "no_license", "max_line_length": 85, "num_lines": 13, "path": "/szmap/szmap/webapp/models.py", "repo_name": "leearic/spider", "src_encoding": "UTF-8", "text": "# -*- coding: utf-8 -*-\n\nfrom django.db import models\n\n# Create your models here.\n\n\nclass szmap(models.Model):\n zone = models.CharField(max_length=255, verbose_name=u\"区域名称 \")\n zhishu = models.CharField(max_length=255, verbose_name=u\"交通指数 \")\n chesu = models.CharField(max_length=255, verbose_name=u\"平均车速 \")\n dengji = models.CharField(max_length=255, verbose_name=u\"拥堵等级 \")\n addtime = models.CharField(max_length=255, verbose_name=u\"添加时间 \", default='Null')" }, { "alpha_fraction": 0.6509566307067871, "alphanum_fraction": 0.6686887741088867, "avg_line_length": 29.18309783935547, "blob_id": "fd36c3f468a632dd0a7c98e542294beba8d0bc03", "content_id": "bd0c33614160ff00f9e6eb93a1294f5fccf57115", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 2331, "license_type": "no_license", "max_line_length": 104, "num_lines": 71, "path": "/mikufan/frontend/webapps/models.py", "repo_name": "leearic/spider", "src_encoding": "UTF-8", "text": "# -*- coding: utf-8 -*-\n\nfrom django.db import models\nfrom datetime import datetime\n\nimport django\n# Create your models here.\nfrom django.utils.safestring import mark_safe\n#\n\n\n\nclass Coser(models.Model):\n u\"\"\" Coser 整片文章 \"\"\"\n # 分类\n title = models.CharField(max_length=255, verbose_name=u\"标题\", blank=False, default=u'NUll')\n content = models.TextField(max_length=255, verbose_name=u\"正文\", blank=False, default=u'NUll')\n\n istop = models.BooleanField(default=False, verbose_name=u\"置顶\")\n come_from = models.CharField(max_length=255, verbose_name=u\"来源网站\", default=u\"Mikufan\")\n topimage = models.CharField(max_length=255, verbose_name=u\"封面图片\", default=u\"null\")\n addtime = models.DateTimeField(default=django.utils.timezone.now, blank=True, verbose_name=u\"添加时间\")\n\n\n def topimage_tag(self):\n result = '<img src=\"%s\", width=200px />' % self.topimage\n return mark_safe(result)\n\n topimage_tag.allow_tags = True\n topimage_tag.short_description = u\"图片预览\"\n\n\n\nclass Category(models.Model):\n u\"\"\" 图片分类 比如: 游戏角色 ,漫画角色 \"\"\"\n # 分类\n coser = models.ForeignKey(Coser, related_name=u\"Coser_Category\")\n category = models.CharField(max_length=255, verbose_name=u\"分类\", default=u'游戏')\n\n\nclass Images(models.Model):\n u\"\"\" Coser 图片,一个Coser可能有多个图片 \"\"\"\n # 分类\n coser = models.ForeignKey(Coser, related_name=u\"Coser_Photo\")\n relate_url = models.CharField(max_length=255, verbose_name=u\"图片地址\", default=u'NUll')\n real_url = models.CharField(max_length=255, verbose_name=u\"真实图片地址\", default=u'NUll')\n\n\n\n def relate_url_tag(self):\n result1 = '<img src=\"%s\", width=200px />' % self.relate_url\n return mark_safe(result1)\n\n def real_url_tag(self):\n result2 = '<img src=\"%s\", width=200px />' % self.real_url\n return mark_safe(result2)\n\n relate_url_tag.allow_tags = True\n relate_url_tag.short_description = u\"相对图片预览\"\n\n real_url_tag.allow_tags = True\n real_url_tag.short_description = u\"原始图片预览\"\n\n\n\n\n\nclass Ads(models.Model):\n u\"\"\"广告相关\"\"\"\n ad = models.CharField(max_length=255, verbose_name=u\"广告内容\")\n isshow = models.BooleanField(default=False)\n" }, { "alpha_fraction": 0.5421994924545288, "alphanum_fraction": 0.5481671094894409, "avg_line_length": 35.90322494506836, "blob_id": "3d7638927ee0b9586803a739f16a279113a2f749", "content_id": "3d823a2fca710ef9b073218c2b1a249b9a88f7ec", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 1223, "license_type": "no_license", "max_line_length": 107, "num_lines": 31, "path": "/myqiubai/QiuBaiSpider/qiushibaike/spiders/qiushi.py", "repo_name": "leearic/spider", "src_encoding": "UTF-8", "text": "# -*- coding: utf-8 -*-\r\nimport scrapy\r\nfrom scrapy.http import Request\r\n\r\nclass QiuBaiSpider(scrapy.Spider):\r\n name = \"qiushi\"\r\n allowed_domains = [\"qiushibaike.com\"]\r\n start_urls = (\r\n 'http://www.qiushibaike.com/',\r\n )\r\n base_url = \"http://www.qiushibaike.com\"\r\n def parse(self, response):\r\n # 获取用户信息\r\n user_image = response.xpath('//div[@class=\"article block untagged mb15\"]/div/a/img/@src').extract()\r\n user_name = response.xpath('//div[@class=\"article block untagged mb15\"]/div/a/text()').extract()\r\n # 获取笑话信息\r\n content = response.xpath('//div[@class=\"content\"]/text()').extract()\r\n # 获取图片\r\n thumb = response.xpath('//div[@class=\"thumb\"]/a/img/@src').extract()\r\n # 获取下一页连接地址\r\n try:\r\n next_page = response.xpath('//div[@class=\"pageto\"]/a[@class=\"next\"]/@href').extract()\r\n except Exception, e:\r\n pass\r\n if len(next_page) == 0:\r\n pass\r\n else:\r\n next_page = self.base_url+next_page[0]\r\n for i in content:\r\n print i.encode(\"gbk\")\r\n yield Request(next_page, callback=self.parse)" }, { "alpha_fraction": 0.5, "alphanum_fraction": 0.569343090057373, "avg_line_length": 26.399999618530273, "blob_id": "7c88148e90b4ce4efafb331d53cdb61ecb4eb1b2", "content_id": "b3ec0a2090609d82bf5c276b5babb62932d0c468", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 822, "license_type": "no_license", "max_line_length": 92, "num_lines": 30, "path": "/qiubai_CrawlSpider/frontend/qiubaifrontend/spiders/migrations/0005_auto_20160520_0921.py", "repo_name": "leearic/spider", "src_encoding": "UTF-8", "text": "# -*- coding: utf-8 -*-\n# Generated by Django 1.9.6 on 2016-05-20 09:21\nfrom __future__ import unicode_literals\n\nfrom django.db import migrations, models\n\n\nclass Migration(migrations.Migration):\n\n dependencies = [\n ('spiders', '0004_auto_20160520_0912'),\n ]\n\n operations = [\n migrations.RenameField(\n model_name='qiushi',\n old_name='content',\n new_name='contents',\n ),\n migrations.AlterField(\n model_name='comment',\n name='qiushiURL',\n field=models.CharField(max_length=255, verbose_name='\\u7cd7\\u4e8bURL'),\n ),\n migrations.AlterField(\n model_name='comment',\n name='user',\n field=models.CharField(max_length=255, verbose_name='\\u7528\\u6237\\u6635\\u79f0'),\n ),\n ]\n" }, { "alpha_fraction": 0.5206286907196045, "alphanum_fraction": 0.6149312257766724, "avg_line_length": 24.450000762939453, "blob_id": "f76a4b1175aefd148a0f281ab05d59b98a2784ec", "content_id": "696e681cdb4caef3540c7e3ac8284ef0afe68f78", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 509, "license_type": "no_license", "max_line_length": 93, "num_lines": 20, "path": "/szsj/dj_sz/hightech/migrations/0008_auto_20170307_0232.py", "repo_name": "leearic/spider", "src_encoding": "UTF-8", "text": "# -*- coding: utf-8 -*-\n# Generated by Django 1.10.5 on 2017-03-07 02:32\nfrom __future__ import unicode_literals\n\nfrom django.db import migrations, models\n\n\nclass Migration(migrations.Migration):\n\n dependencies = [\n ('hightech', '0007_auto_20170307_0228'),\n ]\n\n operations = [\n migrations.AlterField(\n model_name='idc_base_info',\n name='Main_business',\n field=models.CharField(max_length=1024, verbose_name='\\u4e3b\\u8425\\u4e1a\\u52a1'),\n ),\n ]\n" }, { "alpha_fraction": 0.4948717951774597, "alphanum_fraction": 0.5899408459663391, "avg_line_length": 52.9361686706543, "blob_id": "34dc2fe11c471d4a05771c3c2a6614e05a4c0874", "content_id": "04846ef3d9e696f575805fb6bdc6a0a4928257d0", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 5070, "license_type": "no_license", "max_line_length": 136, "num_lines": 94, "path": "/szsj/dj_sz/hexun/migrations/0003_auto_20170411_0152.py", "repo_name": "leearic/spider", "src_encoding": "UTF-8", "text": "# -*- coding: utf-8 -*-\n# Generated by Django 1.11 on 2017-04-11 01:52\nfrom __future__ import unicode_literals\n\nfrom django.db import migrations, models\n\n\nclass Migration(migrations.Migration):\n\n dependencies = [\n ('hexun', '0002_auto_20170410_0754'),\n ]\n\n operations = [\n migrations.CreateModel(\n name='Dongshihui_Info',\n fields=[\n ('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),\n ('company_code', models.CharField(default='-', max_length=255, verbose_name='\\u516c\\u53f8\\u80a1\\u7968\\u4ee3\\u7801')),\n ('dongjiangao', models.CharField(default='-', max_length=255, verbose_name='\\u8463\\u76d1\\u9ad8\\u59d3\\u540d')),\n ('gaoguanzhiwu', models.CharField(default='-', max_length=255, verbose_name='\\u9ad8\\u7ba1\\u804c\\u52a1')),\n ('renzhiriqi', models.CharField(default='-', max_length=255, verbose_name='\\u4efb\\u804c\\u65e5\\u671f')),\n ('lizhiriqi', models.CharField(default='-', max_length=255, verbose_name='\\u79bb\\u804c\\u65e5\\u671f')),\n ('xueli', models.CharField(default='-', max_length=255, verbose_name='\\u5b66\\u5386')),\n ('nianxin', models.CharField(default='-', max_length=255, verbose_name='\\u5e74\\u85aa(\\u4e07\\u5143)')),\n ('chiguzonge', models.CharField(default='-', max_length=255, verbose_name='\\u6301\\u80a1\\u603b\\u989d(\\u4e07\\u5143)')),\n ('chigushuliang', models.CharField(default='-', max_length=255, verbose_name='\\u6301\\u80a1\\u6570\\u91cf(\\u4e07\\u80a1)')),\n ],\n ),\n migrations.CreateModel(\n name='Jianshihui_Info',\n fields=[\n ('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),\n ('company_code', models.CharField(default='-', max_length=255, verbose_name='\\u516c\\u53f8\\u80a1\\u7968\\u4ee3\\u7801')),\n ('dongjiangao', models.CharField(default='-', max_length=255, verbose_name='\\u8463\\u76d1\\u9ad8\\u59d3\\u540d')),\n ('gaoguanzhiwu', models.CharField(default='-', max_length=255, verbose_name='\\u9ad8\\u7ba1\\u804c\\u52a1')),\n ('renzhiriqi', models.CharField(default='-', max_length=255, verbose_name='\\u4efb\\u804c\\u65e5\\u671f')),\n ('lizhiriqi', models.CharField(default='-', max_length=255, verbose_name='\\u79bb\\u804c\\u65e5\\u671f')),\n ('xueli', models.CharField(default='-', max_length=255, verbose_name='\\u5b66\\u5386')),\n ('nianxin', models.CharField(default='-', max_length=255, verbose_name='\\u5e74\\u85aa(\\u4e07\\u5143)')),\n ('chiguzonge', models.CharField(default='-', max_length=255, verbose_name='\\u6301\\u80a1\\u603b\\u989d(\\u4e07\\u5143)')),\n ('chigushuliang', models.CharField(default='-', max_length=255, verbose_name='\\u6301\\u80a1\\u6570\\u91cf(\\u4e07\\u80a1)')),\n ],\n ),\n migrations.RemoveField(\n model_name='gaoguan_info',\n name='name',\n ),\n migrations.AddField(\n model_name='gaoguan_info',\n name='chigushuliang',\n field=models.CharField(default='-', max_length=255, verbose_name='\\u6301\\u80a1\\u6570\\u91cf(\\u4e07\\u80a1)'),\n ),\n migrations.AddField(\n model_name='gaoguan_info',\n name='chiguzonge',\n field=models.CharField(default='-', max_length=255, verbose_name='\\u6301\\u80a1\\u603b\\u989d(\\u4e07\\u5143)'),\n ),\n migrations.AddField(\n model_name='gaoguan_info',\n name='company_code',\n field=models.CharField(default='-', max_length=255, verbose_name='\\u516c\\u53f8\\u80a1\\u7968\\u4ee3\\u7801'),\n ),\n migrations.AddField(\n model_name='gaoguan_info',\n name='dongjiangao',\n field=models.CharField(default='-', max_length=255, verbose_name='\\u8463\\u76d1\\u9ad8\\u59d3\\u540d'),\n ),\n migrations.AddField(\n model_name='gaoguan_info',\n name='gaoguanzhiwu',\n field=models.CharField(default='-', max_length=255, verbose_name='\\u9ad8\\u7ba1\\u804c\\u52a1'),\n ),\n migrations.AddField(\n model_name='gaoguan_info',\n name='lizhiriqi',\n field=models.CharField(default='-', max_length=255, verbose_name='\\u79bb\\u804c\\u65e5\\u671f'),\n ),\n migrations.AddField(\n model_name='gaoguan_info',\n name='nianxin',\n field=models.CharField(default='-', max_length=255, verbose_name='\\u5e74\\u85aa(\\u4e07\\u5143)'),\n ),\n migrations.AddField(\n model_name='gaoguan_info',\n name='renzhiriqi',\n field=models.CharField(default='-', max_length=255, verbose_name='\\u4efb\\u804c\\u65e5\\u671f'),\n ),\n migrations.AddField(\n model_name='gaoguan_info',\n name='xueli',\n field=models.CharField(default='-', max_length=255, verbose_name='\\u5b66\\u5386'),\n ),\n ]\n" }, { "alpha_fraction": 0.7242380380630493, "alphanum_fraction": 0.737300455570221, "avg_line_length": 30.363636016845703, "blob_id": "937ccc33d04802cdc4869d3f3e8bf04fcb9c3e8c", "content_id": "8425c41b5ac39c9664e0dae8780b8da6bba21ae7", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 689, "license_type": "no_license", "max_line_length": 71, "num_lines": 22, "path": "/irole/cnirole/views.py", "repo_name": "leearic/spider", "src_encoding": "UTF-8", "text": "from django.shortcuts import render\nfrom django.shortcuts import render_to_response\n\n# Create your views here.\nfrom django.template import RequestContext\nfrom cnirole.models import cosplay8dotcom\nfrom django.core.paginator import Paginator, EmptyPage,PageNotAnInteger\n\n\n\n\ndef index(request, pages=1):\n cosplay8 = cosplay8dotcom.objects.all()\n paginator = Paginator(cosplay8,40)\n page = pages #request.GET.get('page')\n try:\n contacts = paginator .page(page)\n except PageNotAnInteger:\n contacts = paginator .page(1)\n except EmptyPage:\n contacts = paginator .page(paginator.num_pages)\n return render_to_response('index.html', {'cosplay8': contacts})" } ]
103
BalalRaza/PythonModuleInstallerScript
https://github.com/BalalRaza/PythonModuleInstallerScript
14f0ade154646ca07660fd09460804fa9aab761d
ed481c0ecca684ee9757c5a6e0ef093f4cb13cc4
3f76808b2e12ea1c00d48e91ac0f5526b43a3a8a
refs/heads/master
2021-01-16T19:37:39.853484
2017-08-13T15:34:19
2017-08-13T15:34:19
100,185,537
0
0
null
null
null
null
null
[ { "alpha_fraction": 0.8159722089767456, "alphanum_fraction": 0.8229166865348816, "avg_line_length": 143, "blob_id": "cdf1e9759518b3e226a43f8315324bd7fab74058", "content_id": "8b46454f4de39b57e74a7636b456a7406a3cf6c2", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Markdown", "length_bytes": 288, "license_type": "no_license", "max_line_length": 257, "num_lines": 2, "path": "/README.md", "repo_name": "BalalRaza/PythonModuleInstallerScript", "src_encoding": "UTF-8", "text": "# PythonModuleInstallerScript\nThis script written in Python 3.4 will install python modules written in a JSON file under dependencies. It takes the filename as input and outputs \"success\" if all modules get installed successfully. Else prints the list of modules that failed to download.\n" }, { "alpha_fraction": 0.5388888716697693, "alphanum_fraction": 0.5402777791023254, "avg_line_length": 23.214284896850586, "blob_id": "064bfcd3873b17c02b9f5246183afcba57d69b21", "content_id": "e509d70b92648b03dc37501c2c68a259625a9502", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 720, "license_type": "no_license", "max_line_length": 64, "num_lines": 28, "path": "/InstallerTool.py", "repo_name": "BalalRaza/PythonModuleInstallerScript", "src_encoding": "UTF-8", "text": "import json\r\nimport pip\r\n\r\ndef install(package):\r\n return pip.main(['install', package])\r\n\r\nfileName = input('Enter the JSON file name without extension: ')\r\nfileName += '.json'\r\nwith open(fileName) as f:\r\n data = json.load(f)\r\n\r\nif 'dependencies' in data:\r\n flag = False\r\n failed = []\r\n for r in data['dependencies']:\r\n pckg = str(r) + '==' + str(data['dependencies'][r])\r\n rc = install(pckg)\r\n if rc != 0:\r\n flag = True\r\n failed.append(str(r))\r\n if flag:\r\n print('List of modules that failed to download:')\r\n for s in failed:\r\n print(s)\r\n else:\r\n print('Success')\r\nelse:\r\n print(\"No dependencies found\")\r\n\r\n\r\n\r\n\r\n\r\n\r\n\r\n" } ]
2
ElementumOrg/service.lt2http
https://github.com/ElementumOrg/service.lt2http
08a606aa46782c85818d6519e68ab4c76e653c5f
b5fd56498a43165b13042dd74dd865a254ddd9f4
53c815cbfa5aa2f9441e382983721673b5f3ed90
refs/heads/master
2023-07-04T12:25:37.190065
2023-06-22T11:49:51
2023-06-22T11:49:51
353,385,408
3
1
null
null
null
null
null
[ { "alpha_fraction": 0.6940509676933289, "alphanum_fraction": 0.7053824067115784, "avg_line_length": 12.074073791503906, "blob_id": "93ced0370fb5b2367ea4764731cce3b65d0bb209", "content_id": "5b01a5163fd5b0b2716f5c1b0798b9ad33833e72", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Shell", "length_bytes": 353, "license_type": "no_license", "max_line_length": 43, "num_lines": 27, "path": "/release.sh", "repo_name": "ElementumOrg/service.lt2http", "src_encoding": "UTF-8", "text": "#!/bin/bash\n\nset -e\n\nTAG=$(git describe --tags)\n\ngit checkout master\n\nrm -rf service.lt2http\n\nsudo -S true\n\n# Install Python dependencies\npip3 install -r requirements.txt\n\n# Run Python linting\npython3 -m flake8\n./scripts/xgettext.sh\n\n# Compile zip artifacts\nmake\n\n# Run artifact uploads if we are on the tag\nif [[ $TAG != *-* ]]\nthen\n make upload\nfi\n" }, { "alpha_fraction": 0.5332409739494324, "alphanum_fraction": 0.5394737124443054, "avg_line_length": 21.920635223388672, "blob_id": "5e1ddd41aed6536471e9107c72c1c9c54a795854", "content_id": "8c5ec1fb7ad2beadcb3c923a25025378389d2032", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 1444, "license_type": "no_license", "max_line_length": 63, "num_lines": 63, "path": "/resources/site-packages/lt2http/monitor.py", "repo_name": "ElementumOrg/service.lt2http", "src_encoding": "UTF-8", "text": "import os\nimport xbmc\nimport threading\n\nfrom xbmc import Monitor\nfrom six.moves import urllib_request\n\nfrom lt2http.daemon import shutdown, update\nfrom lt2http.config import init, LT2HTTP_HOST\n\n\nclass Lt2HttpMonitor(Monitor):\n def __init__(self):\n self._closing = threading.Event()\n self._reboot = False\n Monitor.__init__(self)\n\n def __str__(self):\n return 'Lt2HttpMonitor'\n\n @property\n def closing(self):\n return self._closing\n\n def restart(self):\n try:\n init()\n urllib_request.urlopen(\"%s/restart\" % LT2HTTP_HOST)\n except:\n pass\n\n def reboot(self, val=None):\n if val is not None:\n self._reboot = val\n\n return self._reboot\n\n def onAbortRequested(self):\n # Only when closing Kodi\n if self.abortRequested():\n xbmc.executebuiltin(\"Dialog.Close(all, true)\")\n shutdown()\n try:\n self._closing.set()\n self._closing.clear()\n except SystemExit as e:\n if e.code != 0:\n os._exit(0)\n pass\n\n def onSettingsChanged(self):\n try:\n init()\n update()\n except:\n pass\n\n def onNotification(self, sender, method, data):\n if method and \"lt2http_restart\" in method:\n self.restart()\n self.reboot(True)\n\n return\n" }, { "alpha_fraction": 0.7264800071716309, "alphanum_fraction": 0.7434602975845337, "avg_line_length": 40.075469970703125, "blob_id": "07fe97cf0c72f30f49830465459470b44c5f2555", "content_id": "14c1a12fb16714aeb47f5852028983df11fe03b8", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Markdown", "length_bytes": 2179, "license_type": "no_license", "max_line_length": 209, "num_lines": 53, "path": "/README.md", "repo_name": "ElementumOrg/service.lt2http", "src_encoding": "UTF-8", "text": "\n[![Build Status](https://travis-ci.org/ElementumOrg/service.lt2http.svg?branch=master)](https://travis-ci.org/ElementumOrg/service.lt2http)\n[![Join the chat at https://gitter.im/ElementumApp/Lobby](https://badges.gitter.im/Join%20Chat.svg)](https://gitter.im/ElementumApp/Lobby)\n\n\nSupport\n----------\n- Source is open, so you can fork everything.\n- Any participation in development is welcome.\n\n\nWhat it is\n----------\nservice.lt2http is a Kodi addon wrapper for [lt2http](https://github.com/ElementumOrg/lt2http/) application. \nThere is no Kodi interface yet, addons acts like a lt2http service starter and updates the settings, according to Kodi addon settings.\n\nI would appreciate if someone would like to add a simple browsing functionality in Python, to be able to list active torrents, or add file, start/stop/pause/delete torrent, etc..\n\nSupported platforms\n-------------------\n- Windows 32/64 bits\n- Linux 32/64 bits\n- Linux ARM (armv6, armv7, armv8/arm64)\n- OS X 64 bits\n- Android ARM (4.4.x, and later), x86, x64, ARM, ARM64\n\nMinimum supported Kodi version: 16 (Jarvis)\n\nDownload\n--------\nSee the [Releases](http://elementum.surge.sh/) page. **Do NOT use the `Download ZIP` button on this page.**\n\n\nInstallation\n------------\n- Go to Settings > Service settings > Control and **enable both Application control options**\n- Restart Kodi if one or both options were not enabled\n- Install lt2http like any other add-on\n\nBuild\n-----\nservice.lt2http has only Python scripts to run the lt2http application from Kodi and stop gracefully.\nWhen new version is installed - proper binary is downloaded from GitHub.\n\nRelease\n-------\n\nRelease is done by `release.sh` script, that gets precompiled binaries, collects everything into module zip file and upload as a Github release.\n\nHow it works\n------------\nlt2http is a torrent downloading application, whis REST web-server on front and ability to stream downloaded data to media player or downloader. It supports download to the memory, without using the hard disk.\n\nThe BitTorrent streaming engine is very resilient (or at least it's designed to be). It's built on top of [libtorrent](https://github.com/arvidn/libtorrent) package.\n\n" }, { "alpha_fraction": 0.6048387289047241, "alphanum_fraction": 0.6135236024856567, "avg_line_length": 24.58730125427246, "blob_id": "4021d9311ba90c2e4e7b47a4ba22ae85d48a8d69", "content_id": "02187f7c4c7452fe95b8c4599595136a74d39391", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Makefile", "length_bytes": 1612, "license_type": "no_license", "max_line_length": 136, "num_lines": 63, "path": "/Makefile", "repo_name": "ElementumOrg/service.lt2http", "src_encoding": "UTF-8", "text": "NAME = service.lt2http\nGIT = git\nGIT_VERSION = $(shell $(GIT) describe --always)\nGIT_USER = ElementumOrg\nGIT_REPOSITORY = service.lt2http\nVERSION = $(shell sed -ne \"s/.*version=\\\"\\([0-9a-z\\.\\-]*\\)\\\"\\sprovider.*/\\1/p\" addon.xml)\nZIP_SUFFIX = zip\nZIP_FILE = $(NAME)-$(VERSION).$(ZIP_SUFFIX)\n\nall: clean zip\n\n.PHONY: zip\n\n# $(ARCHS):\n# \t$(MAKE) clean_arch [email protected]\n# \t$(MAKE) zip ARCHS=$@ [email protected]\n\n$(ZIP_FILE):\n\tgit archive --format zip --prefix $(NAME)/ --output $(ZIP_FILE) HEAD\n\t# mkdir -p $(NAME)/resources/bin\n\t# for arch in $(ARCHS); do \\\n\t# \tcp -r `pwd`/$(DEV)/resources/bin/$$arch $(NAME)/resources/bin/$$arch; \\\n\t# \tzip -9 -r -g $(ZIP_FILE) $(NAME)/resources/bin/$$arch; \\\n\t# done\n\t# rm -rf $(NAME)\n\nzip: $(ZIP_FILE)\n\n# zipfiles: addon.xml\n# \tfor arch in $(ARCHS); do \\\n# \t\t$(MAKE) $$arch; \\\n# \tdone\n\nupload:\n\t$(eval EXISTS := $(shell github-release info --user $(GIT_USER) --repo $(GIT_REPOSITORY) --tag v$(VERSION) 1>&2 2>/dev/null; echo $$?))\nifneq ($(EXISTS),1)\n\tgithub-release release \\\n\t\t--user $(GIT_USER) \\\n\t\t--repo $(GIT_REPOSITORY) \\\n\t\t--tag v$(VERSION) \\\n\t\t--name \"$(VERSION)\" \\\n\t\t--description \"$(VERSION)\"\nendif\n\n\tgithub-release upload \\\n\t\t--user $(GIT_USER) \\\n\t\t--repo $(GIT_REPOSITORY) \\\n\t\t--replace \\\n\t\t--tag v$(VERSION) \\\n\t\t--file $(NAME)-$(VERSION).zip \\\n\t\t--name $(NAME)-$(VERSION).zip\n\nclean:\n\trm -f $(ZIP_FILE)\n\trm -rf $(NAME)\n\nbinaries:\n\trm -rf service.lt2http\n\twget https://github.com/ElementumOrg/lt2http-binaries/archive/master.zip && \\\n\tunzip master.zip && \\\n\tmv lt2http-binaries-master/* resources/bin/ && \\\n\trm -rf lt2http-binaries-master && \\\n\trm master.zip\n" }, { "alpha_fraction": 0.6424116492271423, "alphanum_fraction": 0.6507276296615601, "avg_line_length": 29.0625, "blob_id": "5693d85031260321d1b3edff7e5bf445e5e33bb0", "content_id": "c73d51a949cee3e2ee7234475c9e9d5eb4286ec0", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 481, "license_type": "no_license", "max_line_length": 56, "num_lines": 16, "path": "/resources/site-packages/lt2http/kodiutils.py", "repo_name": "ElementumOrg/service.lt2http", "src_encoding": "UTF-8", "text": "import xbmc\n\ndef jsonrpc(**kwargs):\n \"\"\"Perform JSONRPC calls\"\"\"\n from json import dumps, loads\n if kwargs.get('id') is None:\n kwargs.update(id=0)\n if kwargs.get('jsonrpc') is None:\n kwargs.update(jsonrpc='2.0')\n return loads(xbmc.executeJSONRPC(dumps(kwargs)))\n\ndef to_unicode(text, encoding='utf-8', errors='strict'):\n \"\"\"Force text to unicode\"\"\"\n if isinstance(text, bytes):\n return text.decode(encoding, errors=errors)\n return text\n" }, { "alpha_fraction": 0.5954631567001343, "alphanum_fraction": 0.6066334247589111, "avg_line_length": 30.97252655029297, "blob_id": "2e23083196c302e10b9197b07eda4a85c11b662c", "content_id": "74f93adc258a253098029362ac436f977d72ab66", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 5819, "license_type": "no_license", "max_line_length": 113, "num_lines": 182, "path": "/resources/site-packages/lt2http/util.py", "repo_name": "ElementumOrg/service.lt2http", "src_encoding": "UTF-8", "text": "import os\nimport platform\nimport re\nimport requests\n\nfrom kodi_six import xbmc, xbmcgui, xbmcaddon\nfrom kodi_six.utils import py2_encode\n\nfrom six.moves import urllib_parse\n\nfrom lt2http.logger import log\nfrom lt2http.osarch import PLATFORM\nfrom lt2http.addon import ADDON, ADDON_NAME, ADDON_ICON\n\ntry:\n import simplejson as json\nexcept ImportError:\n import json\n\ndef translatePath(path):\n try:\n from xbmcvfs import translatePath\n except ImportError:\n from xbmc import translatePath\n\n return translatePath(path)\n\ndef notify(message, header=ADDON_NAME, time=5000, image=ADDON_ICON):\n sound = ADDON.getSetting('do_not_disturb') == 'false'\n dialog = xbmcgui.Dialog()\n return dialog.notification(getLocalizedLabel(header), getLocalizedLabel(message), toUtf8(image), time, sound)\n\ndef dialog_ok(message, header=ADDON_NAME):\n dialog = xbmcgui.Dialog()\n return dialog.ok(getLocalizedLabel(header), getLocalizedLabel(message))\n\ndef getLocalizedText(text):\n try:\n return re.sub(r'LOCALIZE\\[(\\d+)\\]', getLocalizedStringMatch, text)\n except:\n return text\n\ndef getLocalizedLabel(label):\n try:\n if \"LOCALIZE\" not in label:\n return py2_encode(label)\n if \";;\" not in label and label.endswith(']'):\n return py2_encode(getLocalizedString(int(label[9:-1])))\n else:\n parts = label.split(\";;\")\n translation = getLocalizedString(int(parts[0][9:14]))\n for i, part in enumerate(parts[1:]):\n if part[0:8] == \"LOCALIZE\":\n parts[i + 1] = getLocalizedString(int(part[9:14]))\n else:\n parts[i + 1] = py2_encode(parts[i + 1])\n\n return py2_encode(translation % tuple(parts[1:]), 'utf-8', 'ignore')\n except Exception as e:\n log.error(\"Cannot decode the label: %s, Error: %s\" % (label, e))\n return label\n\ndef getLocalizedStringMatch(match):\n try:\n return ADDON.getLocalizedString(int(match.group(1)))\n except:\n return match.group(1)\n\ndef getLocalizedString(stringId):\n try:\n return py2_encode(ADDON.getLocalizedString(stringId), 'utf-8', 'ignore')\n except:\n return stringId\n\ndef getLt2HttpLocalizedString(stringId):\n LT2HTTP_ADDON = xbmcaddon.Addon('service.lt2http')\n try:\n return py2_encode(LT2HTTP_ADDON.getLocalizedString(stringId), 'utf-8', 'ignore')\n except:\n return stringId\n\ndef toUtf8(string):\n try:\n if isinstance(string, bytes):\n string = string.decode(\"utf-8\")\n return py2_encode(string)\n except:\n return string\n\ndef system_information():\n build = xbmc.getInfoLabel(\"System.BuildVersion\")\n log.info(\"System information: %(os)s_%(arch)s %(version)s\" % PLATFORM)\n log.info(\"Kodi build version: %s\" % build)\n log.info(\"OS type: %s\" % platform.system())\n log.info(\"uname: %s\" % repr(platform.uname()))\n return PLATFORM\n\ndef getShortPath(path):\n if PLATFORM[\"os\"] == \"windows\":\n return getWindowsShortPath(path)\n return path\n\ndef getWindowsShortPath(path):\n try:\n import ctypes\n import ctypes.wintypes\n\n ctypes.windll.kernel32.GetShortPathNameW.argtypes = [\n ctypes.wintypes.LPCWSTR, # lpszLongPath\n ctypes.wintypes.LPWSTR, # lpszShortPath\n ctypes.wintypes.DWORD # cchBuffer\n ]\n ctypes.windll.kernel32.GetShortPathNameW.restype = ctypes.wintypes.DWORD\n\n buf = ctypes.create_unicode_buffer(1024) # adjust buffer size, if necessary\n ctypes.windll.kernel32.GetShortPathNameW(path, buf, len(buf))\n\n return buf.value\n except:\n return path\n\ndef download_github_folder(repo, folder, destination):\n contents_url = \"https://api.github.com/repos/%s/contents/%s\" % (repo, folder)\n log.info(\"Downloading repo content for folder: %s, to folder: %s\" % (contents_url, destination))\n try:\n with requests.get(contents_url) as r:\n lists = json.loads(r.content, parse_int=str)\n\n downloaded = 0\n for i in lists:\n if 'download_url' not in i:\n continue\n\n dest = os.path.join(destination, urllib_parse.unquote(i['download_url'].rsplit('/', 1)[1]))\n log.info(\"Downloading file '%s' to '%s'\" % (i['download_url'], dest))\n\n with requests.get(i['download_url'], stream=True) as rd:\n rd.raise_for_status()\n with open(dest, 'wb') as f:\n for chunk in rd.iter_content(chunk_size=8192):\n f.write(chunk)\n downloaded += 1\n return downloaded > 0\n except Exception as e:\n log.error(\"Could not get list of files from github: %s\" % e)\n raise\n\ndef download_current_version(repo):\n contents_url = \"https://api.github.com/repos/%s/git/refs/tags\" % (repo)\n log.debug(\"Downloading repo releases: %s\" % (contents_url))\n try:\n with requests.get(contents_url) as r:\n lists = json.loads(r.content, parse_int=str)\n\n ref = lists[-1][\"ref\"]\n if ref.find('/'):\n return ref.rsplit('/', 1)[1]\n\n return \"\"\n except Exception as e:\n log.error(\"Could not get list of tags from github: %s\" % e)\n raise\n\n\ndef read_current_version(dest_dir):\n p = os.path.join(dest_dir, \"version\")\n if os.path.exists(p):\n try:\n with open(p, 'r') as file:\n return file.read().replace('\\n', '')\n except:\n pass\n return \"\"\n\ndef write_current_version(dest_dir, version):\n p = os.path.join(dest_dir, \"version\")\n try:\n with open(p, 'w') as file:\n file.write(version)\n file.close()\n except:\n pass\n" }, { "alpha_fraction": 0.5713513493537903, "alphanum_fraction": 0.5816216468811035, "avg_line_length": 23.02597427368164, "blob_id": "e997997f08a681b3fa8a247dfdb1d5c36b6e2f77", "content_id": "b94b33711da04bd20e73a64b1eb05d91c4f424c2", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 1850, "license_type": "no_license", "max_line_length": 106, "num_lines": 77, "path": "/resources/site-packages/lt2http/config.py", "repo_name": "ElementumOrg/service.lt2http", "src_encoding": "UTF-8", "text": "import os\n\nfrom xml.etree import ElementTree\n\nfrom lt2http.addon import ADDON, ADDON_PATH\nfrom lt2http.util import translatePath\n\nLT2HTTP_HOST = \"http://127.0.0.1:65225\"\n\nget_setting = ADDON.getSetting\n\ndef init():\n global LT2HTTP_HOST\n\n try:\n LT2HTTP_HOST = \"http://\" + ADDON.getSetting(\"remote_host\") + \":\" + ADDON.getSetting(\"remote_port\")\n except:\n pass\n\ndef get_all_settings_spec():\n data = ElementTree.parse(os.path.join(ADDON_PATH, \"resources\", \"settings.xml\"))\n for element in data.findall(\"*/setting\"):\n yield dict(element.attrib)\n\ndef get_setting_by_spec(spec):\n t = spec[\"type\"]\n if t in (\"number\", \"enum\"):\n handle = get_int_setting\n elif t == \"slider\":\n # May be 'int', 'float' or 'percent'\n if spec.get(\"option\") == \"int\":\n handle = get_int_setting\n else:\n handle = get_float_setting\n elif t == \"bool\":\n handle = get_boolean_setting\n else:\n handle = get_setting\n return handle(spec[\"id\"])\n\ndef get_boolean_setting(setting):\n return get_setting(setting) == \"true\"\n\n\ndef get_int_setting(setting):\n try:\n return int(get_setting(setting))\n except:\n return 0\n\n\ndef get_float_setting(setting):\n try:\n return float(get_setting(setting))\n except:\n return 0\n\ndef get_current_settings():\n specs = get_all_settings_spec()\n res = {}\n\n for spec in specs:\n if 'id' not in spec:\n continue\n\n res[spec[\"id\"]] = get_setting_by_spec(spec)\n if spec[\"id\"].endswith(\"_path\"):\n res[spec[\"id\"]] = translatePath(res[spec[\"id\"]])\n elif spec[\"id\"].endswith(\"_interfaces\"):\n res[spec[\"id\"]] = [res[spec[\"id\"]]]\n elif spec[\"id\"] == 'download_storage':\n res[spec[\"id\"]] = res[spec[\"id\"]] + 1\n\n return res\n\n\ninit()\n" }, { "alpha_fraction": 0.6442167162895203, "alphanum_fraction": 0.664714515209198, "avg_line_length": 20.34375, "blob_id": "124019223be11db1d08519a6a3373ec62b24bde6", "content_id": "962f5f2d3091dd1b08fa34a3e3805a94aee649b2", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 683, "license_type": "no_license", "max_line_length": 81, "num_lines": 32, "path": "/resources/site-packages/lt2http/service.py", "repo_name": "ElementumOrg/service.lt2http", "src_encoding": "UTF-8", "text": "import threading\nfrom lt2http.logger import log\nfrom lt2http.monitor import Lt2HttpMonitor\nfrom lt2http.daemon import lt2http_thread\n\nimport xbmc\n\ndef run():\n # Make sure the XBMC jsonrpc server is started.\n xbmc.startServer(xbmc.SERVER_JSONRPCSERVER, True)\n\n # Make the monitor\n monitor = Lt2HttpMonitor()\n\n threads = [\n threading.Thread(target=lt2http_thread, args=[monitor]) # lt2http thread\n ]\n\n for t in threads:\n t.daemon = True\n t.start()\n\n # XBMC loop\n while not monitor.abortRequested():\n xbmc.sleep(1000)\n\n try:\n monitor.onAbortRequested()\n except:\n pass\n\n log.info(\"lt2http: exiting lt2http\")\n" } ]
8
shivamtiwari841797/DjangoJEEPredictor
https://github.com/shivamtiwari841797/DjangoJEEPredictor
ca4dee22ad55f6a87af54b2023ffec8ff3d81d61
ad9cce1031fa25260604d814f0fbf7adad5f8602
088a28305d0c89c77b7a3bf5d09c4cb545be115c
refs/heads/master
2022-09-25T03:27:07.624198
2020-05-31T10:52:05
2020-05-31T10:52:05
268,029,484
0
0
null
null
null
null
null
[ { "alpha_fraction": 0.7684210538864136, "alphanum_fraction": 0.7684210538864136, "avg_line_length": 18, "blob_id": "ed16311ca5e47537efee9d7609b7c3056af4f8eb", "content_id": "ebfae07daccf341980a03357c327a1e2e09d6c80", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 95, "license_type": "no_license", "max_line_length": 34, "num_lines": 5, "path": "/workingapp/apps.py", "repo_name": "shivamtiwari841797/DjangoJEEPredictor", "src_encoding": "UTF-8", "text": "from django.apps import AppConfig\n\n\nclass WorkingappConfig(AppConfig):\n name = 'workingapp'\n" }, { "alpha_fraction": 0.699999988079071, "alphanum_fraction": 0.699999988079071, "avg_line_length": 22.100000381469727, "blob_id": "79d188215ad02d92deb1addddcf1ed48683a0a0e", "content_id": "d1de088155d194372205a00a130f03bf9e92de6b", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 230, "license_type": "no_license", "max_line_length": 45, "num_lines": 10, "path": "/workingapp/urls.py", "repo_name": "shivamtiwari841797/DjangoJEEPredictor", "src_encoding": "UTF-8", "text": "from django.urls import path\nfrom . import views\n\nurlpatterns = [\n\tpath('',views.home,name='home'),\n\tpath('result',views.result,name='result'),\n\tpath('plot',views.plot,name='plot'),\n\tpath('plotalt',views.plotalt,name='plotalt')\n\n]" }, { "alpha_fraction": 0.618100643157959, "alphanum_fraction": 0.6401515007019043, "avg_line_length": 31.004329681396484, "blob_id": "a418359279d4836e6bd29dd6ece29da0608427d1", "content_id": "6010c961367f07b6e0d131058a3de95bafc5cb73", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 7392, "license_type": "no_license", "max_line_length": 393, "num_lines": 231, "path": "/workingapp/views.py", "repo_name": "shivamtiwari841797/DjangoJEEPredictor", "src_encoding": "UTF-8", "text": "import scipy.stats as stats\nimport math\nfrom django.shortcuts import render\nfrom django.http import HttpResponse\nfrom .train import train_model, Data\nfrom IPython.display import HTML\nimport matplotlib.pyplot as plt, mpld3\nimport matplotlib\nimport pandas as pd\nimport numpy as np\nimport os\nimport statistics as st\nimport scipy.stats\nimport json\n\n\n# Create your views here.\ndef home(request):\n\treturn render(request,'index1.html')\n\ndef result(request):\n\trank = request.POST['rank']\n\tcaste = request.POST['caste']\n\tif rank == '' :\n\t\treturn HttpResponse(\"Fill the rank field\")\n\tif caste == 'select' :\n\t\treturn HttpResponse(\"choose the correct caste\")\n\tif(int(rank)<1):\n\t\treturn HttpResponse(\"Incorrect Rank Entered\")\n\tdf=train_model.getmodel()\n\tl=[caste]\n\t#df=df.sort_values(['Category','Closing_Rank'])\n\tdf=df[df['Category'].isin(l)]\n\t#df=df[(df['Closing_Rank']>=int(rank)) & (df['Opening_Rank']<=int(rank))]\n\tdf=df[df['Opening_Rank']>=int(rank)]\n\t#df=df[['College','Stream']]\n\t#df.set_index('College', inplace=True)\n\tdf=df.sort_values('Stream')\n\tdf.reset_index(drop=True, inplace=True)\n\tif(df.shape[0]==0):\n\t\treturn HttpResponse(\"Sorry.. You can get no college with this category rank\")\n\t#return HttpResponse('<style>.dataframe{position:absolute;top:40px;left:20%;}table {font-family: arial, sans-serif;border-collapse: collapse;width: 50%;}td{onclick:'templates/plot.html';border: 1px solid #dddddd;text-align: left;padding: 8px;}th{border: 1px solid #dddddd;text-align: left;padding: 8px;}tr:nth-child(even) {background-color: #dddddd;}</style>You can get :\\n\\n\\n'+df.to_html())\n\t\n\tdat=Data()\n\tdataList=dat.getdata()\n\tdf16=dataList[0]\n\tdf17=dataList[1]\n\tdf18=dataList[2]\n\n\tcolleges=df16.College.unique()\n\tStreams=df16.Stream.unique()\n\tCategories=df16.Category.unique()\n\tylist=[df16,df17,df18]\n\n\tfor col in colleges:\n\t\tfor stream in Streams:\n\t\t\tfor cat in Categories:\n\t\t\t\tl=[col,stream,cat]\n\t\t\t\trankList=[]\n\t\t\t\tfor yearData in ylist:\n\t\t\t\t\trow=yearData[yearData['College'].isin(l)]\n\t\t\t\t\trow=row[row['Stream'].isin(l)]\n\t\t\t\t\trow=row[row['Category'].isin(l)]\n\t\t\t\t\ttry:\n\t\t\t\t\t\trankList.append(row.iloc[0].Opening_Rank)\n\t\t\t\t\t\trankList.append(row.iloc[0].Closing_Rank)\n\t\t\t\t\texcept:\n\t\t\t\t\t\tpass\n\t\t\t\tif(len(rankList)>0):\n\t\t\t\t\tmu = st.mean(rankList)\n\t\t\t\t\tsigma = st.stdev(rankList)\n\t\t\t\t\tvar = float(sigma)**2\n\t\t\t\t\tdenom = (2*math.pi*var)**.5\n\t\t\t\t\tnum = math.exp(-(float(rank)-float(mu))**2/(2*var))\n\t\t\t\t\tp=num/denom\n\t\t\t\t\tp=p*100\n\t\t\t\t\tp=round(p,3)\n\n\n\t\t\t\t\t#p=NormalDist(mu, sigma).pdf(rank)\n\t\t\t\t\t#p=scipy.stats.norm(mu, sigma).pdf(rank)\n\t\t\t\t\ttry:\n\t\t\t\t\t\tind=df[(df['College'] == col ) & (df['Category'] == cat) & (df['Stream'] == stream)].head().index\n\t\t\t\t\t\tind=ind[0]\n\t\t\t\t\t\tdf.loc[ind,'Probability']=p\n\t\t\t\t\texcept:\n\t\t\t\t\t\tpass\n\n\tdf=df[['College','Stream','Probability']]\n\tdf=df.sort_values(['Probability'],ascending=False)\n\tdf=df[:10]\n\treturn render(request,'table.html',{'df':df.values.tolist(),'cat':caste})\n\t#return HttpResponse(df.to_html())\n\ndef plot(request):\n\n\tif request.method == 'POST' and request.is_ajax():\n\t #name = request.POST.get('name')\n\t BASE_DIR = os.path.dirname(os.path.dirname(os.path.abspath(__file__)))\n\t clg=request.POST.get('clg')\n\t stream=request.POST.get('stream')\n\t cat=request.POST.get('cat')\n\t dat=Data()\n\t dataList=dat.getdata()\n\t df16=dataList[0]\n\t df17=dataList[1]\n\t df18=dataList[2]\n\t l=[clg,stream,cat]\n\t ylist=[df16,df17,df18]\n\t orankList=[]\n\t crankList=[]\n\n\t for yearData in ylist:\n\t \trow=yearData[yearData['College'].isin(l)]\n\t \trow=row[row['Stream'].isin(l)]\n\t \trow=row[row['Category'].isin(l)]\n\t \torankList.append(row.iloc[0].Opening_Rank)\n\t \tcrankList.append(row.iloc[0].Closing_Rank)\n\n\t y_pos=['2016','2017','2018']\n\n\t matplotlib.use(\"Agg\")\n\n\t ops = pd.Series(orankList)\n\t plt.rcParams[\"figure.figsize\"] = (5,3)\n\t plt.bar(y_pos,orankList)\n\t plt.xlabel('Year')\n\t plt.ylabel('Rank')\n\t figo, axo = plt.subplots()\n\t ops.plot.bar()\n\t ob = mpld3.fig_to_html(figo)\n\t plt.savefig(BASE_DIR+r'/static/plots/ob_'+clg+'_'+stream+'_'+cat+'.png')\n\t \n\n\t cs = pd.Series(crankList)\n\t plt.rcParams[\"figure.figsize\"] = (5,3)\n\t plt.bar(y_pos,crankList)\n\t plt.xlabel('Year')\n\t plt.ylabel('Rank')\n\t figc, axc = plt.subplots()\n\t cs.plot.bar()\n\t cb = mpld3.fig_to_html(figc)\n\t plt.savefig(BASE_DIR+r'/static/plots/cb_'+clg+'_'+stream+'_'+cat+'.png')\n\t \n\n\t mu = st.mean(orankList)\n\t plt.rcParams[\"figure.figsize\"] = (5,3)\n\t sigma = st.stdev(orankList)\n\t x = np.linspace(mu - 3*sigma, mu + 3*sigma, 100)\n\t fig1, ax1 = plt.subplots()\n\t ax1.plot(x, stats.norm.pdf(x, mu, sigma))\n\t on= mpld3.fig_to_html(fig1)\n\t plt.savefig(BASE_DIR+r'/static/plots/on_'+clg+'_'+stream+'_'+cat+'.png')\n\t \n\n\t cmu = st.mean(crankList)\n\t plt.rcParams[\"figure.figsize\"] = (5,3)\n\t csigma = st.stdev(crankList)\n\t cx = np.linspace(cmu - 3*csigma, cmu + 3*csigma, 100)\n\t fig2, ax2 = plt.subplots()\n\t ax2.plot(cx, stats.norm.pdf(cx, cmu, csigma))\n\t cn= mpld3.fig_to_html(fig2)\n\t plt.savefig(BASE_DIR+r'/static/plots/cn_'+clg+'_'+stream+'_'+cat+'.png')\n\t \n\t #return HttpResponse(json.dumps({'clg':clg,'stream':stream,'cat':cat,'ob':ob,'cb':cb,'on':on,'cn':cn,'mu':mu,'sigma':sigma}), content_type=\"application/json\")\n\t return render(request,'Plotalt.html',{'clg':clg,'stream':stream,'cat':cat,'ob':ob,'cb':cb,'on':on,'cn':cn,'mu':mu,'sigma':sigma})\n\telse :\n\t\treturn render_to_response('table.html', locals())\n\n\t#return render_to_response('table.html',{'clg':clg,'stream':stream,'cat':cat,'ob':ob,'cb':cb,'on':on,'cn':cn,'mu':mu,'sigma':sigma})\n\t#return render(request,'.html',{'clg':clg,'stream':stream,'cat':cat,'ob':ob,'cb':cb,'on':on,'cn':cn,'mu':mu,'sigma':sigma})\n\ndef plotalt(request):\n\n\t\tclg=request.GET['clg']\n\t\tstream=request.GET['stream']\n\t\tcat=request.GET['cat']\n\t\tdat=Data()\n\t\tdataList=dat.getdata()\n\t\tdf16=dataList[0]\n\t\tdf17=dataList[1]\n\t\tdf18=dataList[2]\n\t\tl=[clg,stream,cat]\n\t\tylist=[df16,df17,df18]\n\t\torankList=[]\n\t\tcrankList=[]\n\n\t\tfor yearData in ylist:\n\t\t\trow=yearData[yearData['College'].isin(l)]\n\t\t\trow=row[row['Stream'].isin(l)]\n\t\t\trow=row[row['Category'].isin(l)]\n\t\t\torankList.append(row.iloc[0].Opening_Rank)\n\t\t\tcrankList.append(row.iloc[0].Closing_Rank)\n\n\t\tplt.rcParams[\"figure.figsize\"] = (5,3)\n\t\ty_pos=['2016','2017','2018']\n\t\tops = pd.Series(orankList)\n\t\tplt.bar(y_pos,orankList)\n\t\tplt.xlabel('Year')\n\t\tplt.ylabel('Rank')\n\t\tfigo, axo = plt.subplots()\n\t\tops.plot.bar()\n\t\tob = mpld3.fig_to_html(figo)\n\t\t\n\n\t\tcs = pd.Series(crankList)\n\t\tplt.bar(y_pos,crankList)\n\t\tplt.xlabel('Year')\n\t\tplt.ylabel('Rank')\n\t\tfigc, axc = plt.subplots()\n\t\tcs.plot.bar()\n\t\tcb = mpld3.fig_to_html(figc)\n\t\t\n\n\t\tmu = st.mean(orankList)\n\t\tsigma = st.stdev(orankList)\n\t\tx = np.linspace(mu - 3*sigma, mu + 3*sigma, 100)\n\t\tfig1, ax1 = plt.subplots()\n\t\tax1.plot(x, stats.norm.pdf(x, mu, sigma))\n\t\ton= mpld3.fig_to_html(fig1)\n\t\t\n\n\t\tcmu = st.mean(crankList)\n\t\tcsigma = st.stdev(crankList)\n\t\tcx = np.linspace(cmu - 3*csigma, cmu + 3*csigma, 100)\n\t\tfig2, ax2 = plt.subplots()\n\t\tax2.plot(cx, stats.norm.pdf(cx, cmu, csigma))\n\t\tcn= mpld3.fig_to_html(fig2)\n\t\t\n\t\t#return HttpResponse(json.dumps({'clg':clg,'stream':stream,'cat':cat,'ob':ob,'cb':cb,'on':on,'cn':cn,'mu':mu,'sigma':sigma}), content_type=\"application/json\")\n\t\treturn render(request,'Plot.html',{'clg':clg,'stream':stream,'cat':cat,'ob':ob,'cb':cb,'on':on,'cn':cn,'mu':mu,'sigma':sigma})" }, { "alpha_fraction": 0.5264996886253357, "alphanum_fraction": 0.5538730621337891, "avg_line_length": 26.238094329833984, "blob_id": "4c2b487530c2b0ae7bd6179b5dd8f9776a51b1bb", "content_id": "2c6d6cd10e0aa08046ed4eeee8704c30e7d38bdb", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 1717, "license_type": "no_license", "max_line_length": 80, "num_lines": 63, "path": "/workingapp/train.py", "repo_name": "shivamtiwari841797/DjangoJEEPredictor", "src_encoding": "UTF-8", "text": "import pandas as pd\nimport os\n\nBASE_DIR = os.path.dirname(os.path.dirname(os.path.abspath(__file__)))\n\ndf16=pd.read_csv(BASE_DIR+'/dataset/2016.csv').sort_values(['College','Stream'])\ndf17=pd.read_csv(BASE_DIR+'/dataset/2017.csv').sort_values(['College','Stream'])\ndf18=pd.read_csv(BASE_DIR+'/dataset/2018.csv').sort_values(['College','Stream'])\ndflist=[df16,df17,df18]\n\ncolleges=df16.College.unique()\n#print(colleges)\n\nStreams=df16.Stream.unique()\n#print(Streams)\n\nCategories=df16.Category.unique()\n#print(Categories)\n\nres=[]\nfor clg in colleges:\n for stream in Streams:\n for cat in Categories:\n l=[clg,stream,cat]\n sumc=0\n sumo=0\n for df in dflist:\n row=df[df['College'].isin(l)]\n row=row[row['Stream'].isin(l)]\n row=row[row['Category'].isin(l)]\n try:\n sumo+=row.iloc[0].Opening_Rank\n sumc+=row.iloc[0].Closing_Rank\n except:\n pass\n avgo=sumo//3\n avgc=sumc//3\n if(avgo!=0 and avgc!=0):\n l.append(avgo)\n l.append(avgc)\n res.append(l)\n\nresult=pd.DataFrame(res)\nresult.columns=['College','Stream','Category','Opening_Rank','Closing_Rank']\nresult.to_csv(BASE_DIR+r'/ResultSet/result.csv')\n#print(result)\n\nclass train_model:\n df=[]\n def __init__(self):\n df=result\n def getmodel():\n try:\n df.insert(3,'Probability',[0 for i in range(df.shape[0])])\n except:\n pass\n return df\n \nclass Data:\n def __init__(self):\n self.df=[df16,df17,df18]\n def getdata(self):\n return self.df\n\n" } ]
4
unique1o1/WeatherInfo
https://github.com/unique1o1/WeatherInfo
79e9f81c2095621b5d4e251cc2aefc31b89a55fe
fd8787416d75ec394b75f95cb405bd7fd9f0f1fa
0ae663cedaa11211075bde539c8f37204f0e5898
refs/heads/master
2021-01-11T22:05:13.050391
2017-05-05T18:09:49
2017-05-05T18:09:49
78,920,394
0
0
null
null
null
null
null
[ { "alpha_fraction": 0.7113820910453796, "alphanum_fraction": 0.7195122241973877, "avg_line_length": 18.639999389648438, "blob_id": "ff853e80af891ec80d3640477becf5ec0ed835fc", "content_id": "6a9da7abc7a945ff4c61a2019d22a54c6e229585", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Markdown", "length_bytes": 492, "license_type": "no_license", "max_line_length": 118, "num_lines": 25, "path": "/README.md", "repo_name": "unique1o1/WeatherInfo", "src_encoding": "UTF-8", "text": "# WeatherInfo\nA super simple python module to retrieve Weather Info about a certain location (eg. New York, Boston, Kathmandu etc..)\n\n\n**Installation:**\n\n\nLinux:\n\n\n\t\tgit clone https://github.com/unique1o1/WeatherInfo\n\t\tsudo cp -R WeatherInfo /usr/lib/python3.5/ \n\n**How to use:**\n\n\n\t\tfrom WeatherInfo import *\n\n\n*Call the constructor to send the location you want and put it in a variable.*\n\n\n\t\tweather = get_weather('new york')\n\t\tweather_info = weather.returnvalue() \n\t\tprint(weather_info) \n" }, { "alpha_fraction": 0.5501307845115662, "alphanum_fraction": 0.5527462959289551, "avg_line_length": 22.40816307067871, "blob_id": "2b245a3219309b199faca782e8e031b3770ad3c5", "content_id": "44e49fa4f7b7304bd2e8d34d1aaa3a69631bb883", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 1147, "license_type": "no_license", "max_line_length": 92, "num_lines": 49, "path": "/__init__.py", "repo_name": "unique1o1/WeatherInfo", "src_encoding": "UTF-8", "text": "import urllib.request\n#import urllib.parse\n\nimport re\n\nclass get_weather():\n def __init__(self,city_name):\n #x = re.search(' ', city_name)\n #city_name=input(\"enter the location\")\n \n x=re.search(' ',city_name)\n if(x != None):\n\n pos = x.start()\n\n city_name = city_name[0:pos] + '-' + city_name[pos+1:]\n\n\n url = 'http://www.weather-forecast.com/locations/' + city_name + '/forecasts/latest'\n try:\n\n datas = urllib.request.urlopen(url).read()\n data = datas.decode('utf-8')\n except IOError as e:\n print('error reading :',e)\n #print(data)\n\n starting_text = re.search('span class=\"phrase\">', data)\n start = starting_text.end()\n\n\n ending_text = re.search('</span></span></span></p><div class=\"forecast-cont\">',data)\n end = ending_text.start()\n\n self.WeatherReport = data[start:end]\n self.WeatherReport=re.sub('&deg;',' degree ', self.WeatherReport)\n \n def returnvalue(self):\n return self.WeatherReport\n\n\n\n\n\ndef main():\n \n weather=get_weather()\n\nif __name__==\"__main__\":main()\n" } ]
2
anusha246/Caching-Proxy-Server
https://github.com/anusha246/Caching-Proxy-Server
7d2a8fea00392be82a1232345f13799b33b651d4
bf945d3f79c91f33a0fde91b10fcdc15dba0a59e
58755919e55e04ec51ae21ddb4d1ace5f83dd471
refs/heads/main
2023-03-02T06:53:16.500093
2021-02-01T08:22:30
2021-02-01T08:22:30
332,614,272
0
0
null
null
null
null
null
[ { "alpha_fraction": 0.38558462262153625, "alphanum_fraction": 0.39967966079711914, "avg_line_length": 44.02404022216797, "blob_id": "e1bd764a58b62bc4219ace84b62bc6f970989562", "content_id": "7800370a8607a2082036fcb42d99c77e358c4b7e", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 9365, "license_type": "no_license", "max_line_length": 141, "num_lines": 208, "path": "/proxy.py", "repo_name": "anusha246/Caching-Proxy-Server", "src_encoding": "UTF-8", "text": "'''\nCode inspiration and snippets from:\nhttps://pymotw.com/3/socket/tcp.html\nhttps://stackoverflow.com/questions/32792333/\n python-socket-module-connecting-to-an-http-proxy-then-performing-a-get-request\n'''\n\nimport sys, os, time, socket, select\n\ndef proxy(expiry):\n\n response_body1 = None\n response_body2 = None\n body_tag = None\n content_length = None\n \n # Server code from https://pymotw.com/3/socket/tcp.html below\n # Create a TCP/IP socket\n sock = socket.socket(socket.AF_INET, socket.SOCK_STREAM)\n\n # Bind the socket to the port\n server_address = ('localhost', 8888)\n \n sock.bind(server_address)\n\n # Listen for incoming connections\n sock.listen(1)\n\n while True:\n # Wait for a connection\n connection, client_address = sock.accept()\n \n try:\n \n header = connection.recv(1000)\n \n if header:\n\n #Get header's requested url and split into host and relative_url\n first_line = header.split(b'\\n')[0]\n url = first_line.split(b' ')[1]\n host = url.split(b'/')[1]\n \n relative_url = b''\n for i in range(2, len(url.split(b'/'))):\n relative_url += b'/' + url.split(b'/')[i]\n\n if relative_url == b'':\n relative_url = b'/'\n \n\n #Use host and relative_url to create a filename\n filename = host.decode('utf-8') + ' ' \\\n + relative_url.decode('utf-8').replace('/', ',') \\\n + '.bin'\n\n is_expired = True\n \n #Check if a cached file is expired, if it exists\n try:\n is_expired = time.time() - os.path.getmtime(filename) >= expiry\n \n except:\n pass #File does not exist\n\n if not is_expired:\n #Send the cached filename contents to browser client\n with open(filename, mode='rb') as file:\n connection.sendall(file.read())\n \n else: \n try: #Try connecting to web server\n\n #Create a socket and connect to host web server \n s = socket.socket(socket.AF_INET, socket.SOCK_STREAM)\n s.connect((host.decode('utf-8'), 80))\n\n #Replace header's localhost with host web server \n header = header.replace(url, relative_url)\n header = header.replace(b'localhost:8888', host)\n \n #Change header encoding to identity and send it\n encoding = header.split(b'Accept-Encoding: ')[1].split(b'\\r\\n')[0]\n header = header.replace(encoding, b'identity') \n s.sendall(header)\n \n \n #Receive response from the web server and set is_html\n response = s.recv(65565)\n\n response_header = response.split(b'\\r\\n\\r\\n')[0]\n\n content_type = response_header.split(b'Content-Type: ')[1].split(b'\\r\\n')[0]\n is_html = b'text/html' in content_type\n \n\n try:\n #Get content length, then get content until we reach that length\n byte_content_length = response_header.split(b'Content-Length: ')[1].split(b'\\r\\n')[0]\n content_length = int(byte_content_length.decode('utf-8'))\n\n content = response.split(b'\\r\\n\\r\\n')[1]\n while len(content) != content_length:\n \n content += s.recv(65565)\n\n #Combine pieces of response\n response = response_header + b'\\r\\n\\r\\n' + content \n\n \n \n except: #HTTP code 304 or Content-Length not specified\n \n\n if b'200 OK' in response:\n\n #Loop responses from web server to get full repsonse\n while True:\n incoming = s.recv(65565)\n if len(incoming) == 0:\n break;\n \n response += incoming\n \n finally:\n\n if is_html:\n #Split and parse html body to add yellow box \n pre_body = response.split(b'<body')[0]\n post_body = response.split(b'<body')[1]\n in_body = post_body.split(b'>')[0]\n body_tag = b'<body' + in_body + b'>'\n \n response_body1 = response.split(body_tag)[0]\n response_body2 = response.split(body_tag)[1]\n\n #Get time for yellow box\n timestamp = time.time()\n fresh_time = time.strftime('%Y-%m-%d %H:%M:%S', time.localtime(timestamp))\n fresh_time = bytes(fresh_time, encoding='utf8')\n\n\n if content_length:\n #Add length of yellow box to response Content-Length\n content_length += 251\n response_body1 = response_body1.replace(b'Content-Length: ' +\n byte_content_length,\n b'Content-Length: ' +\n bytes(str(content_length),encoding='utf8'))\n \n #Add yellow box fresh version to html body\n fresh_response = response_body1 + body_tag + b'<p style=\"z-index:9999; position:fixed; top:20px; left:20px; \\\n width:200px; height:100px; background-color:yellow; padding:10px; font-weight:bold;\">FRESH VERSION \\\n AT: ' + fresh_time + b'</p>' + response_body2\n \n \n connection.sendall(fresh_response)\n else:\n connection.sendall(response)\n \n \n #Write the response to file\n with open(filename, mode='wb') as file:\n if is_html:\n #Get cached file creation time\n cache_time = time.strftime('%Y-%m-%d %H:%M:%S', time.localtime(time.time()))\n cache_time = bytes(cache_time, encoding='utf8')\n\n if content_length:\n #Add length of yellow box to response Content-Length\n response_body1 = response_body1.replace(b'Content-Length: ' +\n bytes(str(content_length),encoding='utf8'),\n b'Content-Length: ' +\n bytes(str(content_length + 12),encoding='utf8'))\n\n #Add yellow box cached version to cache\n response = response_body1 + body_tag + b'<p style=\"z-index:9999; position:fixed; top:20px; left:20px; \\\n width:200px; height:100px; background-color:yellow; padding:10px; font-weight:bold;\">CACHED VERSION AS \\\n OF: '+ cache_time + b'</p>' + response_body2\n \n \n file.write(response)\n \n \n s.close()\n\n \n #Except block below from\n #https://stackoverflow.com/questions/32792333/\n #python-socket-module-connecting-to-an-http-proxy-then-performing-a-get-request\n except socket.error as m:\n s.close()\n sys.exit(1)\n\n \n \n \n else:\n pass #No header receieved\n\n finally:\n # Clean up the connection\n connection.close()\n\n\nif __name__ == \"__main__\":\n expiry = int(sys.argv[1])\n proxy(expiry)\n" }, { "alpha_fraction": 0.4163780212402344, "alphanum_fraction": 0.4401286542415619, "avg_line_length": 43.911109924316406, "blob_id": "b9dc5126702219f66fc5f6145551994000d24217", "content_id": "a46d18471714337e800ec19a1deb0a2f287e7299", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 8084, "license_type": "no_license", "max_line_length": 362, "num_lines": 180, "path": "/a1/proxy.py", "repo_name": "anusha246/Caching-Proxy-Server", "src_encoding": "UTF-8", "text": "'''\nCode inspiration and snippets from:\nhttps://pymotw.com/3/socket/tcp.html\nhttps://stackoverflow.com/questions/32792333/\n python-socket-module-connecting-to-an-http-proxy-then-performing-a-get-request\n'''\n\nimport sys, os, time, socket, select\n\ndef proxy(expiry):\n \n initial_response = True\n response_body1 = None\n response_body2 = None\n \n #yellow_box = b'\\x8cK\\x0e\\x830\\x0cD\\xaf\\x12\\xb1G\\xfd\\xec\\x9aP\\xee\\x02\\xd8I\\xacZq\\x04\\xae\\x128}\\xd3\\xd2Y\\xbd\\x91\\xde\\xcc\\x90\\xcd\\xa6;\\xe3\\xb3;zJ\\x80\\xd5>Z\\x9c\\xc9\\xb2\\x91\\x92$\\xeb\\xa9\"8\\xa3\\x92\\xed\\xfd\\x9a\\xab3\\x8c^\\xffX\\x0846\\xfe\\x95\\x88\\x14\\xa2\\xda\\xdb\\xd9\\xe6iy\\x85U\\xde\\t\\xfaEXV\\xbb#\\xb3\\x94v<\\x01P\\n\\xcd\\xfbj^\\x92\\xf6\\xe5\\\\\\xce\\xc2\\xe0\\xbaQ#\\x1a\\xc5\\xaa'\n \n # Server code from https://pymotw.com/3/socket/tcp.html below\n # Create a TCP/IP socket\n sock = socket.socket(socket.AF_INET, socket.SOCK_STREAM)\n\n # Bind the socket to the port\n server_address = ('localhost', 8888)\n print('starting up on {} port {}'.format(*server_address))\n sock.bind(server_address)\n\n # Listen for incoming connections\n sock.listen(1)\n\n while True:\n # Wait for a connection\n print('waiting for a connection')\n connection, client_address = sock.accept()\n try:\n print('connection from', client_address)\n \n header = connection.recv(1000)\n print('received {!r}'.format(header))\n if header:\n\n first_line = header.split(b'\\n')[0]\n url = first_line.split(b' ')[1]\n \n host = url.split(b'/')[1]\n \n relative_url = b''\n for i in range(2, len(url.split(b'/'))):\n relative_url += b'/' + url.split(b'/')[i]\n\n if relative_url == b'':\n relative_url = b'/'\n\n filename = host.decode('utf-8') + ' ' \\\n + relative_url.decode('utf-8').replace('/', ',') \\\n + '.bin'\n print('Filename: {}'.format(filename))\n\n is_expired = True\n \n #Check if a file is expired, if it exists\n try:\n is_expired = time.time() - os.path.getmtime(filename) >= expiry\n print('Elapsed time is {}, expiry is {}'.format(\\\n time.time() - os.path.getmtime(filename), expiry))\n except:\n print('')\n print('File does not exist')\n\n if not is_expired:\n with open(filename, mode='rb') as file:\n connection.sendall(file.read())\n print('File read successful')\n \n else: \n try: #Try connecting to web server\n s = socket.socket(socket.AF_INET, socket.SOCK_STREAM)\n\n print(host)\n print(host.decode('utf-8'))\n s.connect((host.decode('utf-8'), 80))\n\n header = header.replace(url, relative_url)\n header = header.replace(b'localhost:8888', host)\n print(header)\n \n encoding = header.split(b'Accept-Encoding: ')[1].split(b'\\r\\n')[0]\n header = header.replace(encoding, b'identity') \n s.sendall(header)\n print('Header sent successfully')\n \n\n response = s.recv(65565)\n\n response_header = response.split(b'\\r\\n\\r\\n')[0]\n\n print(response)\n\n try:\n byte_content_length = response_header.split(b'Content-Length: ')[1].split(b'\\r\\n')[0]\n content_length = int(byte_content_length.decode('utf-8'))\n\n content = response.split(b'\\r\\n\\r\\n')[1]\n while len(content) != content_length:\n print('Content length: {}, needed len: {}'.format(len(content), content_length))\n content += s.recv(65565)\n\n response = response_header + b'\\r\\n\\r\\n' + content #this is the full response now\n\n print(response)\n \n if initial_response:\n response_body1 = response.split(b'<body>')[0]\n response_body2 = response.split(b'<body>')[1]\n \n timestamp = time.time()\n fresh_time = time.strftime('%Y-%m-%d %H:%M:%S', time.localtime(timestamp))\n fresh_time = bytes(fresh_time, encoding='utf8')\n fresh_response = response_body1 + b'<body>' + b'<p style=\"z-index:9999; position:fixed; top:20px; left:20px; \\\n width:200px; height:100px; background-color:yellow; padding:10px; font-weight:bold;\">FRESH VERSION \\\n AT: ' + fresh_time + b'</p>' + response_body2\n connection.sendall(fresh_response)\n else:\n connection.sendall(response)\n print('Sent response to client')\n \n except: #HTTP code 304 or Content-Length not specified\n print('HTTP code 304 or Content-Length not specified')\n\n if b'200 OK' in response:\n\n while True:\n incoming = s.recv(65565)\n if len(incoming) == 0:\n break;\n print (incoming)\n print('Receieved incoming data from web server')\n response += incoming\n\n connection.sendall(response)\n \n finally:\n \n #Write the response to file\n with open(filename, mode='wb') as file:\n if initial_response:\n cache_time = time.strftime('%Y-%m-%d %H:%M:%S', time.localtime(time.time()))\n cache_time = bytes(cache_time, encoding='utf8')\n response = response_body1 + b'<body>' + b'<p style=\"z-index:9999; position:fixed; top:20px; left:20px; \\\n width:200px; height:100px; background-color:yellow; padding:10px; font-weight:bold;\">CACHED VERSION AS \\\n OF: '+ cache_time + b'</p>' + response_body2\n initial_response = False\n print('Response: {}'.format(response))\n file.write(response)\n print('Response written successfully')\n \n s.close()\n\n \n #Except block below from\n #https://stackoverflow.com/questions/32792333/\n #python-socket-module-connecting-to-an-http-proxy-then-performing-a-get-request\n except socket.error as m:\n print (str(m))\n s.close()\n sys.exit(1)\n\n \n \n \n else:\n print('no data from', client_address)\n\n finally:\n # Clean up the connection\n connection.close()\n\n\nif __name__ == \"__main__\":\n expiry = int(sys.argv[1])\n proxy(expiry)\n" } ]
2
MadJedi/passwordshuffler
https://github.com/MadJedi/passwordshuffler
97d58c3b4b00c4f21165d9e571dbc57c89beb0a8
6787072e73867853508803b92a6727e7aa6cc632
fda6d719cec2e2ded264e9db0dd393a341e902c0
refs/heads/main
2023-06-14T07:10:42.077421
2021-06-28T16:30:27
2021-06-28T16:30:27
381,027,574
0
0
null
null
null
null
null
[ { "alpha_fraction": 0.6246334314346313, "alphanum_fraction": 0.662756621837616, "avg_line_length": 20.600000381469727, "blob_id": "d906451c0453f45b822ce2fd085c14f2ad0d5393", "content_id": "2d6b28993b492e34e7684409b60944ae5d4771cb", "detected_licenses": [ "MIT" ], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 427, "license_type": "permissive", "max_line_length": 54, "num_lines": 15, "path": "/password shuffler.py", "repo_name": "MadJedi/passwordshuffler", "src_encoding": "UTF-8", "text": "import random\r\na = '123456789'\r\nb = 'qwertyuiopasdfghjklzxcvbnm'\r\nf = '@#$%&?!*'\r\nc = b.upper()\r\n#соединяем все строки\r\nd = a+b+c+f\r\n#преобразуем в список\r\nls = list(d)\r\n#мешаем\r\nrandom.shuffle(ls)\r\n#извлекаем 12 произвольных значений\r\ntotl = ''.join([random.choice(ls) for x in range(12)])\r\nprint(totl)\r\ninput(\"Нажми ENTER чтобы выйти\")\r\n\r\n" }, { "alpha_fraction": 0.7200000286102295, "alphanum_fraction": 0.7200000286102295, "avg_line_length": 11.5, "blob_id": "49de7ab0abc8941d64428483907bac2686bbda9f", "content_id": "7abed069f894e74b4e863b68f02c98c8e9b4b5e1", "detected_licenses": [ "MIT" ], "is_generated": false, "is_vendor": false, "language": "Markdown", "length_bytes": 25, "license_type": "permissive", "max_line_length": 18, "num_lines": 2, "path": "/README.md", "repo_name": "MadJedi/passwordshuffler", "src_encoding": "UTF-8", "text": "# passwordshuffler\nso...\n" } ]
2
ivan-doro/Ventilation-Project
https://github.com/ivan-doro/Ventilation-Project
782e106ffb4ea0a659e0c7663441505b3885e92c
fae4058b073e8d715f45e2183baece960b96f2f5
689ebf28885345d44a7950763ac1670c9e22d92b
refs/heads/master
2020-05-14T09:33:21.883480
2019-04-16T18:04:02
2019-04-16T18:04:02
181,742,263
0
0
null
null
null
null
null
[ { "alpha_fraction": 0.7448349595069885, "alphanum_fraction": 0.7850818634033203, "avg_line_length": 48.05263137817383, "blob_id": "be42cee049d01f5438a935078352ba07ee4b0617", "content_id": "91a5579ef370370fe99fb14e1c202ee3eaff9777", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 3727, "license_type": "no_license", "max_line_length": 104, "num_lines": 76, "path": "/Configuration.py", "repo_name": "ivan-doro/Ventilation-Project", "src_encoding": "UTF-8", "text": "# All the variables, that might be changed by user\n \n# Calibration offset values for all senaors, for Temperature and Humidity\ncalibrationTempOffset_DHT_IN = 0.0\ncalibrationRHumOffset_DHT_IN = 0.0\ncalibrationTempOffset_DHT_OUT = 0.0\ncalibrationRHumOffset_DHT_OUT = 0.0\ncalibrationTempOffset_WT_IN = 0.0\ncalibrationRHumOffset_WT_IN = 0.0\ncalibrationTempOffset_WT_OUT = 0.0\ncalibrationRHumOffset_WT_OUT = 0.0\n \n# Reasonable ranges of sensor values\nallowedMaxTemp = 40.0\nallowedMinTemp = -35.0\nallowedMaxHum = 100.0\nallowedMinHum = 0.0\n \n# Messages of possible errors in program \nerrorsMessages = []\nerrorsMessages.append(\"\") # Error#0\n# Messages of DHT Internal errors\nerrorsMessages.append(\"Error#1 Problem getting value from Internal DHT, out of oder.\")\nerrorsMessages.append(\"Error#2 Internal DHT returns NULL values, out of order.\")\nerrorsMessages.append(\"Error#3 Internal DHT calibrated temperature, out of normal range, out of order.\")\nerrorsMessages.append(\"Error#4 Internal DHT calibrated humidity, out of normal range, out of order.\")\n# Messages of DHT External errors\nerrorsMessages.append(\"Error#5 Problem getting value from External DHT, out of oder.\")\nerrorsMessages.append(\"Error#6 External DHT returns NULL values, out of order.\")\nerrorsMessages.append(\"Error#7 External DHT calibrated temperature, out of normal range, out of order.\")\nerrorsMessages.append(\"Error#8 External DHT calibrated humidity, out of normal range, out of order.\")\n# Messages of WT Internal errors\nerrorsMessages.append(\"Error#9 Problem getting value from Internal WT, out of oder.\")\nerrorsMessages.append(\"Error#10 Internal WT returns NULL values, out of order.\")\nerrorsMessages.append(\"Error#11 Internal WT calibrated temperature, out of normal range, out of order.\")\nerrorsMessages.append(\"Error#12 Internal WT calibrated humidity, out of normal range, out of order.\")\nerrorsMessages.append(\"Error#13 Internal WT values are outdated, out of order.\")\nerrorsMessages.append(\"Error#14 Problem getting time from Internal WT, out of order.\")\n# Messages of WT External errors)\nerrorsMessages.append(\"Error#15 Problem getting value from External WT, out of oder.\")\nerrorsMessages.append(\"Error#16 External WT returns NULL values, out of order.\")\nerrorsMessages.append(\"Error#17 External WT calibrated temperature, out of normal range, out of order.\")\nerrorsMessages.append(\"Error#18 External WT calibrated humidity, out of normal range, out of order.\")\nerrorsMessages.append(\"Error#19 External WT values are outdated, out of order.\")\nerrorsMessages.append(\"Error#20 Problem getting time from External WT, out of order.\")\n# Messages of errors that make ventilation control impossible\nerrorsMessages.append(\"Error#21 Both inside sensors are out of order.\")\nerrorsMessages.append(\"Error#22 Both outside sensors are out of order.\")\n#E-mail sending errors\nerrorsMessages.append(\"Error#23 Problem with e-mail sending\")\nerrorsMessages.append(\"Error#24 Problem with critical error e-mail sending\")\n \n# Wireless Tag sensors uuids\ngarageInternalUUID = \"47d8a2d4-12c1-4b0a-859e-662527b434c0\"\nsaunaExternalUUID = \"763fa6c3-88ce-43a0-917d-37c144809147\"\n \n# Interval of sensor not respondibg that considered as sensor out of range\nWT_TimeCorrection = 2 # 2 = 2 hours\noutOfRangeTime = 14400 #14400 seconds = 4 hours\n \n# Email addresses\nemailSubject = \"Losevo ventilation robot\"\ndeveloperEmail = \"[email protected]\"\nuserDadEmail = \"[email protected]\"\nsenderEmail = \"[email protected]\"\nsenderPassword = \"wfL-Aw6-Wrg-kPo\"\n \n# Period of e-mail sending\nemailSendingInterval = 1 # 28800 = 8 hours; 1 = 1 day\n\n# Threshold - difference between internal and external absolute humidity\n# when it makes sence to ventilate\nthreshold = 1.1\n \n# Polling interval\nsensorsPollingInterval = 1800 # 1800 = 30 min" }, { "alpha_fraction": 0.6389124989509583, "alphanum_fraction": 0.6580289006233215, "avg_line_length": 36.380950927734375, "blob_id": "03b902a67bf62490b4a8a6ece5831568f7944549", "content_id": "329e25560ed7c2453f38f7abf0eb8567ccf378f3", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 2354, "license_type": "no_license", "max_line_length": 83, "num_lines": 63, "path": "/WT_API.py", "repo_name": "ivan-doro/Ventilation-Project", "src_encoding": "UTF-8", "text": "# Wireless Tag humidity and temperature sensors API\n \nimport requests\nimport json\nfrom decimal import Decimal\nimport Configuration as conf\nfrom datetime import datetime\nfrom datetime import timedelta\n \n_BASEURL = \"https://my.wirelesstag.net\"\n \n_SIGNIN = _BASEURL + \"/ethAccount.asmx/SignIn\"\n_ISSIGNED = _BASEURL + \"/ethAccount.asmx/IsSignedIn\"\n_GETTAGLIST = _BASEURL + \"/ethClient.asmx/GetTagList\"\n_GETTEMPDATA = _BASEURL + \"/ethLogShared.asmx/GetLatestTemperatureRawDataByUUID\"\n \n_HEADERS = { \"content-type\": \"application/json; charset=utf-8\" }\n \nroundDecimals = 5\noutOfRangeTime = conf.outOfRangeTime\n \nclass WirelessTagData:\n def getTemperature(uuid):\n data = {\"uuid\": uuid}\n r = requests.post(_GETTEMPDATA, headers=_HEADERS, data=json.dumps(data))\n parsedResponse = r.json()\n roundedTemp = round(float(parsedResponse[\"d\"][\"temp_degC\"]), roundDecimals)\n return roundedTemp\n \n def getHumidity(uuid):\n data = {\"uuid\": uuid}\n r = requests.post(_GETTEMPDATA, headers=_HEADERS, data=json.dumps(data))\n parsedResponse = r.json()\n return parsedResponse[\"d\"][\"cap\"]\n \n def getBatteryVolt(self, uuid):\n data = {\"uuid\": uuid}\n r = requests.post(_GETTEMPDATA, headers=_HEADERS, data=json.dumps(data))\n parsedResponse = r.json()\n return parsedResponse[\"d\"][\"battery_volts\"]\n \n def outOfRange(uuid):\n sensorOutOfRange = False\n data = {\"uuid\": uuid}\n r = requests.post(_GETTEMPDATA, headers=_HEADERS, data=json.dumps(data))\n parsedResponse = r.json()\n sensorTime = parsedResponse[\"d\"][\"time\"]\n year = int(sensorTime[:-21])\n month = int(sensorTime[5:-18])\n day = int(sensorTime[8:-15])\n hours = int(sensorTime[11:-12])# + conf.WT_TimeCorrection\n minutes = int(sensorTime[14:-9])\n seconds = int(sensorTime[17:-6])\n time = datetime(year, month, day, hours, minutes, seconds)\n timeCor = timedelta(hours = conf.WT_TimeCorrection)\n time = time + timeCor\n deltaT = datetime.now() - time\n if (deltaT.days != 0 or deltaT.seconds > conf.outOfRangeTime):\n sensorOutOfRange = True\n return sensorOutOfRange\n## return parsedResponse\n##print(WirelessTagData.outOfRange(\"763fa6c3-88ce-43a0-917d-37c144809147\"))\n##print(datetime.now().time())" }, { "alpha_fraction": 0.5815450549125671, "alphanum_fraction": 0.6244634985923767, "avg_line_length": 23.526315689086914, "blob_id": "24ceaa57952a5434a875e94bf1b6ea206558e1bd", "content_id": "2ec6fd1507d5083063b3129a250e0a04e8944fed", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 466, "license_type": "no_license", "max_line_length": 69, "num_lines": 19, "path": "/DHT22_lib.py", "repo_name": "ivan-doro/Ventilation-Project", "src_encoding": "UTF-8", "text": "# DHT22, directly connected humidity and temperature sensors, library\n \nimport sys\nimport RPi.GPIO as GPIO\nimport time\nimport Adafruit_DHT\nclass DHT22:\n def getTemp(pin):\n #t =358\n Rh,t = Adafruit_DHT.read_retry(Adafruit_DHT.DHT22, pin)\n return(t)\n \n def getHum(pin):\n #Rh = 345\n Rh,t = Adafruit_DHT.read_retry(Adafruit_DHT.DHT22, pin)\n return(Rh)\n \n \n#print('Temp={0:0.1f}*C Humidity={1:0.1f}%'.format(t, Rh))\n" }, { "alpha_fraction": 0.6308788657188416, "alphanum_fraction": 0.6337292194366455, "avg_line_length": 62.787879943847656, "blob_id": "669522c329b573a74142238922393c34deddd63b", "content_id": "7e60a2e684d88f7de946cd17a3de3474882adc7f", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 2105, "license_type": "no_license", "max_line_length": 206, "num_lines": 33, "path": "/EmailSender.py", "repo_name": "ivan-doro/Ventilation-Project", "src_encoding": "UTF-8", "text": "# Part responsible for sending E-mails with information\n \nimport smtplib\nimport sys\nfrom Support import Support\nimport Configuration as conf\nfrom datetime import datetime\n \nclass Send:\n def sendShortMessage(msg):\n header = \"To: \" + conf.developerEmail + \"\\n\" + \"From: \" + conf.senderEmail + \"\\n\" + \"Subject: \" + conf.emailSubject\n server = smtplib.SMTP(\"smtp.gmail.com\", 587)\n server.starttls()\n server.login(conf.senderEmail, conf.senderPassword)\n server.sendmail(conf.senderEmail, conf.developerEmail, header + \"\\n\\n\" + msg)\n## server.sendmail(conf.senderEmail, conf.userDadEmail, header + \"\\n\\n\" + msg)\n server.quit()\n def sendStatsReport(cycleCounter, lastSentTime, workingTime, RHum_IN, Temp_IN, AHum_IN, RHum_OUT, Temp_OUT, AHum_OUT, message, ventMessage):\n msg = \"Working period - from \" + Support.timeCorrection(lastSentTime) + \" till \" + Support.timeCorrection(datetime.now()) + \" (for \" + Support.timeCorrection(datetime.now() - lastSentTime) + \" )\\n\"\n msg = msg + \"During this period fan worked for \" + Support.timeCorrection(workingTime) + \"\\n\\n\"\n msg = msg + \"Inside: \\nR. humidity \" + str(RHum_IN) + \"%\" + \", Temperature \" + str(Temp_IN) + \", A. humidity \" + str(AHum_IN) + \"\\n\\n\"\n msg = msg + \"Outside: \\nR. humidity \" + str(RHum_OUT) + \"%\" + \", Temperature \" + str(Temp_OUT) + \", A. humidity \" + str(AHum_OUT) + \"\\n\\n\"\n msg = msg + \"During the period program has passed \" + str(cycleCounter) + \" cycles\\n\\n\"\n msg = msg + \"Data about sensors errors \\n\" + message + \"\\n\"\n msg = msg + \"Fan operation data \\n\" + ventMessage\n header = \"To: \" + conf.developerEmail + \"\\n\" + \"From: \" + conf.senderEmail + \"\\n\" + \"Subject: \" + conf.emailSubject\n# print(msg)\n server = smtplib.SMTP('smtp.gmail.com', 587)\n server.starttls()\n server.login(conf.senderEmail, conf.senderPassword)\n server.sendmail(conf.senderEmail, conf.developerEmail, header + \"\\n\\n\" + msg)\n## server.sendmail(conf.senderEmail, conf.userDadEmail, msg)\n server.quit()\n" }, { "alpha_fraction": 0.8514851331710815, "alphanum_fraction": 0.8514851331710815, "avg_line_length": 101, "blob_id": "9c5c7ef220aab62e92386a57ee352122989e8ab5", "content_id": "c6688d77208a22e1a0de1c26124cb518dd9cc95f", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Markdown", "length_bytes": 101, "license_type": "no_license", "max_line_length": 101, "num_lines": 1, "path": "/README.md", "repo_name": "ivan-doro/Ventilation-Project", "src_encoding": "UTF-8", "text": "This is code raspberry robot that controlls humidity in garage to keep instruments in good conditions" }, { "alpha_fraction": 0.531870424747467, "alphanum_fraction": 0.5705329179763794, "avg_line_length": 35.846153259277344, "blob_id": "6dc2c975e8381f7a0c491e85d5342d4124f7fac5", "content_id": "4f415d365e86f789c85931e8ffcbb376f3356459", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 957, "license_type": "no_license", "max_line_length": 117, "num_lines": 26, "path": "/Support.py", "repo_name": "ivan-doro/Ventilation-Project", "src_encoding": "UTF-8", "text": "# All the support function the VentillationController programm may need\nimport Configuration as conf\nclass Support:\n #Absolute Humidity calculation\n def getAHum (temp, RHum):\n Pow = pow(2.71828, 17.67*temp/(temp+234.5))\n AHum = (6.112*Pow*RHum*2.1674)/(temp+273.15)\n AHum = round(AHum, 2)\n return(AHum)\n \n def messageFormation (errorsCounters):\n message = \"\"\n for i in range (0, len(errorsCounters)):\n if(errorsCounters[i] != 0):\n message = message + conf.errorsMessages[i] + \" Error occured \" + str(errorsCounters[i]) + \" times\\n\"\n if(message == \"\"):\n message = \"During the period all sensors functioned correctly\"\n return message \n \n def timeCorrection (time):\n time = str(time)\n time2 = \"\"\n for i in range (0, len(time)):\n if (i < len(time) - 7):\n time2 = time2 + time[i]\n return time2" }, { "alpha_fraction": 0.6134259104728699, "alphanum_fraction": 0.625, "avg_line_length": 17.826086044311523, "blob_id": "2675bea0218c48ee8ab7834978b805242ebdbac2", "content_id": "9a280a98082d9e9af64b670b6dbf7a522d1fe9e2", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 432, "license_type": "no_license", "max_line_length": 37, "num_lines": 23, "path": "/Lamp_Test.py", "repo_name": "ivan-doro/Ventilation-Project", "src_encoding": "UTF-8", "text": "import RPi.GPIO as GPIO\nimport time\nfrom datetime import datetime\nimport smtplib\n\n##str = \"dghdyrhjhjr:fhdhh:zxdfhgxgh\"\n##str = str.replace(\":\", '-')\n##print(str)\n\nGPIO.setmode(GPIO.BCM)\nGPIO.setup(6, GPIO.OUT)\n\ntry:\n while True:\n GPIO.output(6, GPIO.HIGH)\n time.sleep(3)\n #GPIO.output(6, GPIO.LOW)\n time.sleep(3)\n \n \nexcept KeyboardInterrupt:\n print(\"Turning off\")\n GPIO.cleanup()" }, { "alpha_fraction": 0.6146603226661682, "alphanum_fraction": 0.6311898231506348, "avg_line_length": 42.71341323852539, "blob_id": "7a99e936484920ebf260ef43ccd22932937dff65", "content_id": "2ac38e7c2562e498dbbcfc65e29fe563fe85f00d", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 14338, "license_type": "no_license", "max_line_length": 284, "num_lines": 328, "path": "/VentilationController.py", "repo_name": "ivan-doro/Ventilation-Project", "src_encoding": "UTF-8", "text": "# Main program that controls ventilation\nfrom EmailSender import Send\n\n# Imports from standard packages\nimport RPi.GPIO as GPIO\nimport sys\nimport os\nimport traceback\nimport logging\nimport time\nimport math\nfrom datetime import datetime\n\n# Path to Adafruit_DHT library (used in DHT22_lib)\nsys.path.insert(0, '/home/pi/.local/lib/python3.5/site-packages')\n\n# Imports from my packages\nimport Configuration as conf\nfrom EmailSender import Send\nfrom WT_API import WirelessTagData\nfrom DHT22_lib import DHT22\nfrom Support import Support\nfrom array import array\n\n# Initiation of GPIO ports numeration\nGPIO.setmode(GPIO.BCM)\n \n# Pins for directly connected humidity/temperature sensors\nDHT22_INSIDE_PIN = 10\nDHT22_OUTSIDE_PIN = 9\n \n# Pin for the ventilation fan power relay\nVent_PIN = 6\nGPIO.setup(Vent_PIN, GPIO.OUT)\n \n# Resetting sensors state\nworkingDHT_IN = True\nworkingDHT_OUT = True\nworkingWT_IN = True\nworkingWT_OUT = True\nworkingSensorsIN = True\nworkingSensorsOUT = True\n \n# Sensors error counters array\nerrorsCounters = []\nerrorsCounters.append(0) # Error#0 (not used for now)\n \n# Counters of DHT Internal errors\nerrorsCounters.append(0) # Error#1 Problem getting value from Internal DHT, out of oder\nerrorsCounters.append(0) # Error#2 Internal DHT returns NULL values, out of order \nerrorsCounters.append(0) # Error#3 Internal DHT calibrated temperature, out of normal range, out of order\nerrorsCounters.append(0) # Error#4 Internal DHT calibrated humidity, out of normal range, out of order\n \n# Counters of DHT External errors\nerrorsCounters.append(0) # Error#5 Problem getting value from External DHT, out of oder\nerrorsCounters.append(0) # Error#6 External DHT returns NULL values, out of order\nerrorsCounters.append(0) # Error#7 External DHT calibrated temperature, out of normal range, out of order\nerrorsCounters.append(0) # Error#8 External DHT calibrated humidity, out of normal range, out of order\n \n# Counters of WT Internal errors\nerrorsCounters.append(0) # Error#9 Problem contacting Internal WT server, out of oder\nerrorsCounters.append(0) # Error#10 Internal WT returns NULL values, out of order\nerrorsCounters.append(0) # Error#11 Internal WT calibrated temperature, out of normal range, out of order\nerrorsCounters.append(0) # Error#12 Internal WT calibrated humidity, out of normal range, out of order\nerrorsCounters.append(0) # Error#13 Internal WT values are outdated, out of order\nerrorsCounters.append(0) # Error#14 Problem getting time from Internal WT, out of order\n \n# Counters of WT External errors\nerrorsCounters.append(0) # Error#15 Problem contacting External WT server, out of oder\nerrorsCounters.append(0) # Error#16 External WT returns NULL values, out of order\nerrorsCounters.append(0) # Error#17 External WT calibrated temperature, out of normal range, out of order\nerrorsCounters.append(0) # Error#18 External WT calibrated humidity, out of normal range, out of order\nerrorsCounters.append(0) # Error#19 External WT values are outdated, out of order\nerrorsCounters.append(0) # Error#20 Problem getting time from External WT, out of order\n\n \n# Messages of errors that make ventilation control impossible\nerrorsCounters.append(0) # Error#21 Both inside sensors are out of order\nerrorsCounters.append(0) # Error#22 Both outside sensors are out of order\n\n# E-mail sending errors\nerrorsCounters.append(0) #Error#23 Problem with e-mail sending\nerrorsCounters.append(0) #Error#24 Problem with critical error e-mail sending\n \n# Fan motor state after program start\nworking = False\n \n#?vvv\nt= datetime.now() #Temporary\ntotalWorkingTime = datetime.now() - t\nstartTime = datetime.now()\n#?vvv\nlastSentTime = datetime.now()\n \n# 10 seconds fan ON test\nGPIO.output(Vent_PIN, GPIO.HIGH)\ntime.sleep(10)\nGPIO.output(Vent_PIN, GPIO.LOW)\n \n# Sending first e-mail after program start\n##try:\nSend.sendShortMessage(\"Ventilation control program started at: \" + Support.timeCorrection(datetime.now()))\n##except:\n## errorsCounters[23] = errorsCounters[23] + 1\n\nmessage = \"\"\nventMessage = \"\"\ncycleCounter = 0\n \n# Main cycle\nwhile True:\n try:\n # Data from DHT22 inside sensor and its correctness\n try:\n Temp_DHT_IN = DHT22.getTemp(DHT22_INSIDE_PIN)\n RHum_DHT_IN = DHT22.getHum(DHT22_INSIDE_PIN)\n print(str(Temp_DHT_IN) + \" C; \" + str(RHum_DHT_IN) + \"% - DHT IN\")\n except:\n workingDHT_IN = False\n errorsCounters[1] = errorsCounters[1] + 1\n if (workingDHT_IN and (Temp_DHT_IN is None or RHum_DHT_IN is None)):\n workingDHT_IN = False\n errorsCounters[2] = errorsCounters[2] + 1\n elif (workingDHT_IN):\n Temp_DHT_IN = Temp_DHT_IN + conf.calibrationTempOffset_DHT_IN \n RHum_DHT_IN = RHum_DHT_IN + conf.calibrationRHumOffset_DHT_IN\n if (workingDHT_IN and (Temp_DHT_IN > conf.allowedMaxTemp or Temp_DHT_IN < conf.allowedMinTemp)):\n workingDHT_IN = False\n errorsCounters[3] = errorsCounters[3] + 1\n if (workingDHT_IN and (RHum_DHT_IN > conf.allowedMaxHum or RHum_DHT_IN < conf.allowedMinHum)):\n workingDHT_IN = False\n errorsCounters[4] = errorsCounters[4] + 1\n \n # Data from DHT22 outside sensor and its correctness\n try:\n Temp_DHT_OUT = DHT22.getTemp(DHT22_OUTSIDE_PIN) \n RHum_DHT_OUT = DHT22.getHum(DHT22_OUTSIDE_PIN)\n print(str(Temp_DHT_OUT)+ \" C; \" + str(RHum_DNT_OUT))\n except:\n workingDHT_OUT = False\n errorsCounters[5] = errorsCounters[5] + 1\n if (workingDHT_OUT and (Temp_DHT_OUT is None or RHum_DHT_OUT is None)):\n workingDHT_OUT = False\n errorsCounters[6] = errorsCounters[6] + 1\n elif (workingDHT_OUT):\n Temp_DHT_OUT = Temp_DHT_OUT + conf.calibrationTempOffset_DHT_OUT \n RHum_DHT_OUT = RHum_DHT_OUT + conf.calibrationRHumOffset_DHT_OUT\n if (workingDHT_OUT and (Temp_DHT_OUT > conf.allowedMaxTemp or Temp_DHT_OUT < conf.allowedMinTemp)):\n workingDHT_OUT = False\n errorsCounters[7] = errorsCounters[7] + 1\n if (workingDHT_OUT and (RHum_DHT_OUT > conf.allowedMaxHum or RHum_DHT_OUT < conf.allowedMinHum)):\n workingDHT_OUT = False\n errorsCounters[8] = errorsCounters[8] + 1\n \n # Data from WirelessTag inside sensor and its correctness\n try:\n Temp_WT_IN = WirelessTagData.getTemperature(conf.garageInternalUUID)\n RHum_WT_IN = WirelessTagData.getHumidity(conf.garageInternalUUID)\n print(str(Temp_WT_IN) + \" C; \" + str(RHum_WT_IN) + \"% - WT IN\")\n except:\n workingWT_IN = False\n errorsCounters[9] = errorsCounters[9] + 1\n \n if (workingWT_IN and (Temp_WT_IN is None or RHum_WT_IN is None)):\n workingWT_IN = False\n errorsCounters[10] = errorsCounters[10] + 1\n elif (workingWT_IN):\n Temp_WT_IN = Temp_WT_IN + conf.calibrationTempOffset_WT_IN\n RHum_WT_IN = RHum_WT_IN + conf.calibrationRHumOffset_WT_IN\n if (workingWT_IN and (Temp_WT_IN > conf.allowedMaxTemp or Temp_WT_IN < conf.allowedMinTemp)):\n workingWT_IN = False\n errorsCounters[11] = errorsCounters[11] + 1\n if (workingWT_IN and (RHum_WT_IN > conf.allowedMaxHum or RHum_WT_IN < conf.allowedMinHum)):\n workingWT_IN = False\n errorsCounters[12] = errorsCounters[12] + 1\n try:\n if (workingWT_IN and WirelessTagData.outOfRange(conf.garageInternalUUID)):\n workingWT_IN = False\n errorsCounters[13] = errorsCounters[13] + 1\n except:\n workingWT_IN = False\n errorsCounters[14] = errorsCounters[14] + 1\n \n \n # Data from WirelessTag outside sensor and its correctness\n try:\n Temp_WT_OUT = WirelessTagData.getTemperature(conf.saunaExternalUUID)\n RHum_WT_OUT = WirelessTagData.getHumidity(conf.saunaExternalUUID)\n print(str(Temp_WT_OUT)+ \" C \" + str(RHum_WT_OUT) + \"% - WT OUT\")\n except:\n workingWT_OUT = False \n errorsCounters[15] = errorsCounters[15] + 1\n if (workingWT_OUT and (Temp_WT_OUT is None or RHum_WT_OUT is None)):\n workingWT_OUT = False\n errorsCounters[16] = errorsCounters[16] + 1\n elif (workingWT_OUT):\n Temp_WT_OUT = Temp_WT_OUT + conf.calibrationTempOffset_WT_OUT\n RHum_WT_OUT = RHum_WT_OUT + conf.calibrationRHumOffset_WT_OUT\n if(workingWT_OUT and (Temp_WT_OUT > conf.allowedMaxTemp or Temp_WT_OUT < conf.allowedMinTemp)):\n workingWT_OUT = False\n errorsCounters[17] = errorsCounters[17] + 1\n if (workingWT_OUT and (RHum_WT_OUT > conf.allowedMaxHum or RHum_WT_OUT < conf.allowedMinHum)):\n workingWT_OUT = False\n errorsCounters[18] = errorsCounters[18] + 1\n## try:\n if (workingWT_OUT and WirelessTagData.outOfRange(conf.saunaExternalUUID)):\n workingWT_OUT = False\n errorsCounters[19] = errorsCounters[19] + 1\n## except:\n## workingWT_OUT = False\n## errorsCounters[20] = errorsCounters[20] + 1\n \n # Defining internal sensors average data\n if ((workingDHT_IN == False and workingWT_IN == False)):\n errorsCounters[21] = errorsCounters[21] + 1\n GPIO.output(Vent_PIN, GPIO.LOW)\n workingSensorsIN = False\n RHum_IN = \"Impossible to define\"\n Temp_IN = \"Impossible to define\"\n AHum_IN = \"Impossible to define\"\n elif (workingDHT_IN == False):\n Temp_IN = round(Temp_WT_IN, 2)\n RHum_IN = round(RHum_WT_IN, 2)\n elif (workingWT_IN == False):\n Temp_IN = round(Temp_DHT_IN, 2)\n RHum_IN = round(RHum_DHT_IN, 2)\n else:\n Temp_IN = round((Temp_DHT_IN + Temp_WT_IN)/2, 2)\n RHum_IN = round((RHum_DHT_IN + RHum_WT_IN)/2, 2)\n # Defining external sensors average data \n if (workingDHT_OUT == False and workingWT_OUT == False):\n errorsCounters[22] = errorsCounters[22] + 1\n GPIO.output(Vent_PIN, GPIO.LOW)\n workingSensorsOUT = False\n RHum_OUT = \"Impossible to define\"\n Temp_OUT = \"Impossible to define\"\n AHum_OUT = \"Impossible to define\"\n elif (workingDHT_OUT == False):\n Temp_OUT = round(Temp_WT_OUT, 2)\n RHum_OUT = round(RHum_WT_OUT, 2)\n elif (workingWT_OUT == False):\n Temp_OUT = round(Temp_DHT_OUT, 2)\n RHum_OUT = round(RHum_DHT_OUT, 2)\n else:\n Temp_OUT = round((Temp_DHT_OUT + Temp_WT_OUT)/2, 2)\n RHum_OUT = round((RHum_DHT_OUT + RHum_WT_OUT)/2, 2)\n \n # Calculating absolute humidity internal and external\n if (workingSensorsIN):\n AHum_IN = Support.getAHum(Temp_IN, RHum_IN)\n if (workingSensorsOUT):\n AHum_OUT = Support.getAHum(Temp_OUT, RHum_OUT)\n \n message = Support.messageFormation(errorsCounters)\n \n # Ventilation decision\n if (workingSensorsIN and workingSensorsOUT and AHum_IN/AHum_OUT > conf.threshold):\n #Turning ON\n if (working == False):\n GPIO.output(Vent_PIN, GPIO.HIGH)\n working = True\n startTime = datetime.now()\n ventMessage = ventMessage + \"Ventilation turning ON at \" + Support.timeCorrection(datetime.now()) + \", with absolute humidity inside \" + str(AHum_IN) + \" and absolute humidity outside \" + str(AHum_OUT) + \"\\n\"\n \n else:\n # Turning OFF\n if (working):\n GPIO.output(Vent_PIN, GPIO.LOW)\n working = False\n workingT = datetime.now() - startTime\n ventMessage = ventMessage + \"Ventilation turning OFF at \" + Support.timeCorrection(datetime.now()) + \", after \" + Support.timeCorrection(workingT) + \" of working, with absolute humidity inside \" + str(AHum_IN) + \" and absolute humidity outside \" + str(AHum_OUT) + \"\\n\"\n totalWorkingTime = totalWorkingTime + workingT\n \n if (ventMessage == \"\"):\n ventMessage = \"Fan did not change working mode during the period\"\n cycleCounter = cycleCounter + 1 # Ventilation control of this cycle finished\n print(cycleCounter)\n## print(errorsCounters)\n # Time to send e-mail with report?\n timeFromLastEmail = datetime.now() - lastSentTime\n if (timeFromLastEmail.days >= conf.emailSendingInterval):\n if (working == True):\n totalWorkingTime = totalWorkingTime + datetime.now() - startTime\n startTime = datetime.now()\n try:\n Send.sendStatsReport(cycleCounter, lastSentTime, totalWorkingTime, RHum_IN, Temp_IN, AHum_IN, RHum_OUT, Temp_OUT, AHum_OUT, message, ventMessage)\n \n # reset of all counters\n message = \"\"\n ventMessage = \"\"\n t = datetime.now()\n totalWorkingTime = datetime.now() - t\n lastSentTime = datetime.now()\n cycleCounter = 0\n for i in range(0, len(errorsCounters)):\n errorsCounters[i] = 0\n except:\n errorsCounters[23] = errorsCounters[23] + 1\n workingDHT_IN = True\n workingDHT_OUT = True\n workingWT_IN = True\n workingWT_OUT = True\n workingSensorsIN = True\n workingSensorsOUT = True\n \n time.sleep(conf.sensorsPollingInterval)\n \n## WHEN DEBUGGING\n## v\n except KeyboardInterrupt:\n print(\"Turning off\")\n GPIO.output(Vent_PIN, GPIO.LOW)\n GPIO.cleanup()\n## ^ \n## except Exception as e:\n## print(e)\n## logging.error(traceback.format_exc())\n## print (e.__doc__)\n## try:\n## Send.sendShortMessage(\"FATAL ERROR \" + str(e) + \"\\n\" + str(e.__doc__) + \"\\nRebooting Raspberry\")\n## except:\n## errorsCounters[24] = errorsCounters[24] + 1\n## GPIO.output(Vent_PIN, GPIO.LOW)\n## GPIO.cleanup()\n## time.sleep(100)\n## print(\"reboot\")\n## #os.system(\"sudo reboot\")\n## break\n" } ]
8
BraunAvi/wos
https://github.com/BraunAvi/wos
0f36fd68fcc75fd0e87f4dd03aac139b964cfe52
530db559c75d8b9a8522bd953c26d2117e5046ae
d1828c1a602f4ebe28acfccad5fa8b092a273a06
refs/heads/master
2021-01-19T03:48:42.604530
2017-04-06T12:06:57
2017-04-06T12:06:57
87,334,225
0
0
null
null
null
null
null
[ { "alpha_fraction": 0.8314606547355652, "alphanum_fraction": 0.8314606547355652, "avg_line_length": 43.5, "blob_id": "6d2aac1cf99029ac53ddc2972ffc1f6292b3af79", "content_id": "9e66750929f6ae296593564c0502d5194c9be46b", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Markdown", "length_bytes": 89, "license_type": "no_license", "max_line_length": 73, "num_lines": 2, "path": "/README.md", "repo_name": "BraunAvi/wos", "src_encoding": "UTF-8", "text": "# WOS_Scraping\nThis project scraping WOS for an independent analysis of citation indexes\n" }, { "alpha_fraction": 0.7530562281608582, "alphanum_fraction": 0.7530562281608582, "avg_line_length": 33.16666793823242, "blob_id": "8ea9db9321795e09d964ceaa4ab12e6e3c04083e", "content_id": "dcb362c9b481c0d2b8341268636160270f80d319", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 409, "license_type": "no_license", "max_line_length": 69, "num_lines": 12, "path": "/testing.py", "repo_name": "BraunAvi/wos", "src_encoding": "UTF-8", "text": "from WebPage import WOSResultsPage, WebPage\nWOSPage=WOSResultsPage('')\nfileName=\"fullText.txt\" # utility text file; saving raw data\nWOSPage.collectdata(fileName=fileName) # collect data into data frame\nprint WOSPage.df.__sizeof__()\nprint WOSPage.df.head()\nprint WOSPage.df.tail()\nfileNameW='DF.txt'\nWOSPage.df.to_csv(fileNameW, sep='\\t')\n# dffile=open('DF.txt','w')\n# dffile.write(WOSPage.df)\n# dffile.close()" }, { "alpha_fraction": 0.6864244937896729, "alphanum_fraction": 0.7007648348808289, "avg_line_length": 37.01818084716797, "blob_id": "00468bd1c413cd51bf148e73c34fee2316564cb6", "content_id": "7a1fadd3e12456c205cca50354c58d7adb1713b6", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 2092, "license_type": "no_license", "max_line_length": 162, "num_lines": 55, "path": "/openWebPage_i.py", "repo_name": "BraunAvi/wos", "src_encoding": "UTF-8", "text": "# Search parameters:\n# (WC=physics) AND LANGUAGE: (English) AND DOCUMENT TYPES: (Article) Indexes=SCI-EXPANDED Timespan=1970\nfrom selenium import webdriver\nfrom selenium.webdriver.firefox.firefox_binary import FirefoxBinary\nfrom selenium.webdriver.common.keys import Keys\nfrom time import sleep\n\n# resource: https://automatetheboringstuff.com/chapter11/\n\n\ndef openpageQuery(url='http://google.com', page=1,browser=webdriver.Firefox(firefox_binary=FirefoxBinary(r'C:\\Program Files (x86)\\Mozilla Firefox\\firefox.exe'))):\n \"\"\"\n Open a given and returns its webdriver handle\n :param url:\n :param page:\n :param browser:\n :return: browser\n \"\"\"\n\n # pageN=page\n url_org=url\n # a new url url has to be acquire for new search\n # url_org=url_org.replace(\"page=1\", \"page=\"+str(pageN),1)\n #open Web of knowledge\n binary = FirefoxBinary(r'C:\\Program Files (x86)\\Mozilla Firefox\\firefox.exe')\n browser = webdriver.Firefox(firefox_binary=binary)\n browser.get(url_org)\n sleep(2)\n print 'loaded Query '\n return browser\n\n\ndef openpagePapers(firstPaper=1,lastPaper=10,browser=webdriver.Firefox(firefox_binary=FirefoxBinary(r'C:\\Program Files (x86)\\Mozilla Firefox\\firefox.exe'))):\n \"\"\"\n 'print' to page a batch of papers; usually 500, but can be less for last page\n returns the url of the 5000-papers print\n \"\"\"\n # open print form and print to screen\n elem = browser.find_element_by_name('formatForPrint') # Find the search box\n elem.send_keys('' + Keys.RETURN)\n elem = browser.find_element_by_name('fields_selection') # Find the search box\n elem.send_keys('f' + Keys.RETURN)\n elem = browser.find_element_by_id('markFrom') # Find the search box\n elem.click()\n elem.send_keys(firstPaper)\n elem = browser.find_element_by_id('markTo') # Find the search box\n elem.click()\n elem.send_keys(lastPaper)\n elem.send_keys(Keys.ENTER)\n sleep(0.5)\n browser.switch_to.window(browser.window_handles[1])\n sleep(7)\n url_to_scrape = browser.current_url\n # print url_to_scrape\n return url_to_scrape\n\n" }, { "alpha_fraction": 0.6625235676765442, "alphanum_fraction": 0.6986817121505737, "avg_line_length": 41.11111068725586, "blob_id": "a6dbb97afceacd88374fea5d35e01a3d96423c37", "content_id": "c9b641f1cf7ae9eb73980415a2de7aadb9ba3de6", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 2655, "license_type": "no_license", "max_line_length": 157, "num_lines": 63, "path": "/mainFile_V2.py", "repo_name": "BraunAvi/wos", "src_encoding": "UTF-8", "text": "from openWebPage_i import openpageQuery, openpagePapers\nfrom BS_Quesry_V2 import getValuesfromPrint\nfrom time import sleep\nimport pandas as pd\nimport time\nimport math\n\n\n####### Initialize data scraping: ##########\nurl='https://apps.webofknowledge.com/summary.do?product=WOS&doc=1&qid=1&SID=Q2O3LWWGZpqKLmLqHEw&search_mode=AdvancedSearch&update_back2search_link_param=yes'\n\ntotal_N_Papers=30799 # manually taken from query\nfirst500page=1\n\nprint 'total_N_Papers is', total_N_Papers\ntotal_N_Pages=total_N_Papers/500 # maximum 500 papers can be printed in one go\nprint 'total_Number of Pages WITH 500 papers is',total_N_Pages\nPapersInLastPage=total_N_Papers-total_N_Pages*500\nprint 'number of papers in last page is:', PapersInLastPage\nstart = time.time()\n\nbrowser=openpageQuery(url=url) # open the WOS search URL; return the 'webdriver' handle\nprint(time.time() - start)\n\ndb=pd.DataFrame([])\n# We can 'print' the results in batched of 500, and view them in pages of 50 resutls.\n#\nfor p in range(first500page,total_N_Pages+2): # collecting data from all pages500 , but not from the last one\n print p, 'x 500'\n firstPaper=(1+(p-1)*500)\n print 'first paper number is: ', firstPaper\n lastPaper=(500+(p-1)*500)\n if p == total_N_Pages + 1:\n lastPaper = PapersInLastPage + (p - 1) * 500\n print 'last paper number is: ', lastPaper\n browser.switch_to.window(browser.window_handles[0]) # move to next 50\n\n url_to_scrape = openpagePapers(browser=browser, firstPaper=firstPaper, lastPaper=lastPaper) # \\\n # 'prints' a summary of 500 papers; returns the url of the print page\n\n if len(url_to_scrape[:])<20: # unsuccessful reading of url\n print 'collecting url again'\n sleep(5)\n url_to_scrape = openpagePapers(browser=browser, firstPaper=firstPaper, lastPaper=lastPaper) # open 500 papers\n\n total_N50_Pages=int(math.ceil((lastPaper-firstPaper)/50)+1)# number of 50papers pages in the 500papers page\n for i in range(2,total_N50_Pages+1):\n papers_df=getValuesfromPrint(url_to_scrape) # get data from 50 papers\n db = db.append(papers_df)\n elem = browser.find_element_by_link_text(str(i)) # move to next 50\n elem.click()\n print 'switching to:', p, ' ' , i, 'x 50s'\n sleep(5)\n browser.switch_to.window(browser.window_handles[1]) # move to next 50\n url_to_scrape = browser.current_url\n papers_df=getValuesfromPrint(url_to_scrape) # save to DB the last 5- papers\n db = db.append(papers_df)\n # print 'db:', db\n db.to_csv('db.csv')\n browser.close()\n # print papers_df\nprint 'loaded all', str(total_N_Papers)\nprint(time.time() - start)\n\n\n" }, { "alpha_fraction": 0.6032042503356934, "alphanum_fraction": 0.6198931932449341, "avg_line_length": 40.60555648803711, "blob_id": "600925b735cb4959d739212bf0fe85333a1e1dda", "content_id": "7cbdccf68f710e6517447de1b925f490c52c6ba3", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 7490, "license_type": "no_license", "max_line_length": 129, "num_lines": 180, "path": "/WebPage.py", "repo_name": "BraunAvi/wos", "src_encoding": "UTF-8", "text": "from selenium import webdriver\nfrom selenium.webdriver.firefox.firefox_binary import FirefoxBinary\nfrom bs4 import BeautifulSoup\nimport requests\nfrom selenium.webdriver.common.keys import Keys\nfrom time import sleep, time\nimport math as m\n\n\nclass WebPage(object):\n \"\"\" Thie class represent WebPage objects\n\n Attributes:\n url: url of WebPage instance\n browser: the browser handle (if using Selenium for browsing)\n fulltext: the full text ead by BS\n Methods:\n Openurl: open the current url\n urlreadBS: read the text of the url using BS\n\n \"\"\"\n def __init__(self, url='http://google.com', browser=''):\n self.url = url\n self.browser = browser\n self.fulltext = []\n\n def open_url(self):\n \"\"\"\n Open a WebPage using Selenium library given and returns its webdriver handle\n return: browser\n \"\"\"\n binary = FirefoxBinary(r'C:\\Program Files (x86)\\Mozilla Firefox\\firefox.exe')\n self.browser = webdriver.Firefox(firefox_binary=binary)\n self.browser.get(self.url)\n # print 'loaded Selenium page:', self.url\n # return self.browser\n\n def read_url_BS4(self,url=''):\n \"\"\"\n Open a WebPage using BS4 library given and returns its webdriver handle\n # return: self.fulltext (full text from url(\n \"\"\"\n fulltext=''\n try: r = requests.get(url)\n except:\n print \"couldn't find URL:\", url\n\n soup = BeautifulSoup(r.text,\"lxml\")\n for string in soup.stripped_strings:\n # fulltext+=repr(string)\n fulltext+=(string+'\\n')\n self.fulltext=fulltext\n print 'loaded BS page:', url\n return self.fulltext\n\n\nclass WOSResultsPage(WebPage):\n \"\"\"\n This is a WebPage class used specificly for the WOS results webpage\n The 'PrintPapers' method 'prints' (navigates to) the papers page and return the url of te 500Papers web-page\n \"\"\"\n def __init__(self, url='http://google.com', browser=''):\n self.url = url\n self.browser = browser\n self.fulltext = []\n self.url2scrape = ''\n self.df = []\n\n def PrintX50Papers(self, firstPaper=1, lastPaper=500,url=''):\n \"\"\"\n 'print' to page a batch of papers; usually 500, but can be less for last page\n returns the url of the 5000-papers print\n \"\"\"\n s_time=time()\n browser=self.browser\n old_url = self.browser.current_url\n # open print form and print to screen\n elem = browser.find_element_by_name('formatForPrint') # Find the search box\n elem.send_keys('' + Keys.RETURN)\n elem = browser.find_element_by_name('fields_selection') # Find the search box\n elem.send_keys('f' + Keys.RETURN)\n elem = browser.find_element_by_id('markFrom') # Find the search box\n elem.click()\n elem.send_keys(firstPaper)\n elem = browser.find_element_by_id('markTo') # Find the search box\n elem.click()\n elem.send_keys(lastPaper)\n elem.send_keys(Keys.ENTER)\n self.url2scrape = browser.current_url\n # print 'url before handle switch is:',self.url2scrape\n # print 'self.url:', self.url\n browser.switch_to.window(browser.window_handles[1]) #handle 1 is the 50 papers results\n print 'waiting for url...',\n while self.url == self.url2scrape or self.url2scrape=='about:blank':\n sleep(0.5)\n self.url2scrape = self.browser.current_url\n # print 'url during handle switch is:', self.url2scrape, len(self.url2scrape)\n sleep(0.2)\n print 'waited for url_50x ',time()-s_time ,'sec'\n self.url2scrape = browser.current_url\n # print 'url after handle switch is:', self.url2scrape\n # print self.url==self.url2scrape\n self.url2scrape = browser.current_url\n # print 'url_to_scrape:',self.url2scrape\n return self.url2scrape\n\n def GoTo50Papers(self, firstPaper=1, lastPaper=500,pageNumber=1):\n \"\"\"\n 'naviagte' to a 50=papers web=page returns the url of the 5000-papers print\n \"\"\"\n old_url=self.url2scrape\n elem = self.browser.find_element_by_link_text(str(pageNumber)) # move to next 50\n elem.click()\n # print 'switching to:', 'page', ' ' , pageNumber, 'x 50s'\n self.url2scrape = self.browser.current_url\n s_time=time()\n while self.url2scrape ==old_url:\n sleep(0.5)\n self.url2scrape = self.browser.current_url\n print 'waited for url _',time()-s_time ,'sec'\n return self.url2scrape\n\n\n\n def parse_data(self,fileName):\n \"\"\"\n This function collect the data from the 'fooltext' variable and organize it in a df array\n :return: df # dataframe with the collected data\n \"\"\"\n import pandas as pd\n attributes = ['Saving records','Title:', 'Author(s):', 'Source:', 'Volume:', 'Issue:', 'Pages:', 'Published:',\n 'Times Cited in Web of Science Core Collection:', 'Cited Reference Count:', 'Accession Number:']\n df = pd.DataFrame(columns=attributes)\n counter = 0\n flag=0\n\n f=open(fileName,'r')\n # f=fileName\n for line in f:\n # print line\n for s in attributes: # check if 'save flag' is 1; if 1 -> save the data at the relevant column\n if flag == s:\n df.set_value(counter, flag, line[0:-1])\n flag = line[0:-1] if (line in s for s in attributes) else flag # check if line is a relevant attribute.\n if flag == 'Title:':\n counter += 1 # increment paper counter\n # rename columns:\n attributes_nl = [s[0:-1] for s in attributes]\n df.columns = attributes_nl\n df = df.rename(columns={'Times Cited in Web of Science Core Collection': 'Times Cited'})\n self.df = df\n return self.df\n\ndef collect_data_from_print(WOSPage,text_file_h,firstPaper=1,lastPaper=500):\n \"\"\"\n The 'collect_data_from_print function' saves the raw text of the papers\n :param firstPaper: number of first paper\n :param lastPaper: number of last paper.\n :return: ()\n \"\"\"\n # print 'first paper number is: ', firstPaper\n # print 'last paper number is: ', lastPaper\n\n no_of_50X_pages=int(m.ceil((lastPaper-firstPaper+1)/50.)) # high range of number of pages\n print 'no_of_50X_pages=',no_of_50X_pages\n WOSPage.PrintX50Papers(firstPaper=firstPaper,lastPaper=lastPaper,url='') # print X papers; and return the URL of the new page\n for i in range (2,no_of_50X_pages+1): #start at 2 since the first page is already loaded\n print i-1,\n WOSPage.read_url_BS4(WOSPage.url2scrape) # print the text fo the 50-papers webpage; return self.fulltext\n sleep(1)\n text_file_h.write('Saving records\\n' + str(int(firstPaper + (i - 2) * 50)) + '\\n')\n text_file_h.write(WOSPage.fulltext.encode('utf-8')) # extract the data and save to file\n WOSPage.GoTo50Papers(pageNumber=i) # go to page i with 50 papers in it\n WOSPage.read_url_BS4(WOSPage.url2scrape) # print the text fo the 50-papers webpage; return self.fulltext\n sleep(1)\n try:text_file_h.write('Saving records\\n' + str(int(firstPaper + (i - 1) * 50)) + '\\n')\n # fails for last partial page with less than 50 entries\n # TODO: fix that\n except: pass\n text_file_h.write(WOSPage.fulltext.encode('utf-8')) # extract the data and save to file\n\n" }, { "alpha_fraction": 0.5208651423454285, "alphanum_fraction": 0.5282442569732666, "avg_line_length": 37.135921478271484, "blob_id": "ce2d735e08e82b5ab7269a1b03c571f2fb2e529f", "content_id": "0542fb9d2fd57f9bde4f4a1bcb12f3046f83cf0d", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 3930, "license_type": "no_license", "max_line_length": 132, "num_lines": 103, "path": "/BS_Quesry_V2.py", "repo_name": "BraunAvi/wos", "src_encoding": "UTF-8", "text": "# https://www.crummy.com/software/BeautifulSoup/bs3/documentation.html#Navigating the Parse Tree\n\ndef getValuesfromPrint(url='http://google.com'):\n import requests\n from bs4 import BeautifulSoup\n import pandas as pd\n import re\n from time import sleep\n\n url_to_scrape=url\n print \"URL to Scrap:\", url_to_scrape\n\n try:\n r = requests.get(url_to_scrape)\n except:\n sleep(5)\n r = requests.get(url_to_scrape)\n\n soup = BeautifulSoup(r.text,\"lxml\")\n\n tables = soup.findChildren('table',attrs={'bordercolor': None, 'class': None}) # ignore inner tables of 'funding'\n # print url_to_scrape\n print \"tabls:\", len(tables)\n tables=tables[2:-2] # clean irrelevant tables\n # print \"relevant tables:\", len(tables)\n\n papers_df = pd.DataFrame([])\n count=0 # counter for papers in page\n\n boldsList = [] # list for collecting 'bold' items (authors)\n for bold_tag in soup.find_all('b', text=re.compile('Author')):\n boldsList.append(bold_tag.next_sibling)\n Authors=([(boldsList[i][1:]) for i in range(0,len(boldsList))])\n print Authors\n print ('No, of Authors=',len(Authors))\n\n AuthList=[]\n for jj in range(0,len(tables)):\n flagAuthor=0 # set to 1 if authors have been already found (solve 'group authors' conflict\n papers_attr = []\n my_table = tables[jj]\n rows = my_table.findChildren(['tr'])\n for row in rows:\n cellsV = row.findChildren('value')\n cellsB = row.findChildren('b')\n founddata = [0]\n # print cellsB\n if len(cellsB)==1:\n for cell in cellsB:\n value=cell.string\n print value\n if value == 'Author(s):':\n print 'Found author'\n # commentSoup = BeautifulSoup(value,'lxml')\n # comment = commentSoup.find(text=re.compile(\"nice\"))\n # AuthrsValue='r'\n else:\n if (value == 'Group Author(s)') and flagAuthor==0:\n AuthrsValue = 's'\n\n if len(cellsB)>2: # build indexes list for available attributes\n founddata = []\n for cell in cellsB:\n value = cell.string\n if value=='Source:':\n founddata.append(0)\n if value=='Volume:':\n founddata.append(1)\n if value=='Issue:':\n founddata.append(2)\n if value=='Pages:':\n founddata.append(3)\n if value == 'DOI:':\n founddata.append(4)\n if 'Publi' in value:\n founddata.append(5)\n\n sublist=[]\n for cell in cellsV:\n value = cell.string\n sublist.append(value) # value of attributes found for THIS paper\n # print sublist\n # print len(cellsV) # up tp 6 for journal information ; 1 for all others\n journalInfoList = sublist\n if len(cellsV)>1:\n journalInfoList=['']*6\n try:\n map(journalInfoList.__setitem__, founddata, sublist)# map the avaliable attributes to the correct position in DB\n except:\n print founddata\n # print journalInfoList\n [papers_attr.append(i) for i in journalInfoList]\n papers_df[count] = pd.Series(papers_attr[0:10])\n count += 1\n papers_df=papers_df.T\n papers_df.columns=['Title','Journal','Volume','Issue','Pages','DOI','Published','Cited_Web','Cited','Citing']\n print ('Len authors=',len(Authors))\n #\n print len (Authors)\n papers_df['Authors']=Authors\n print len(papers_df['Authors'])\n # print papers_df\n return papers_df\n\n\n" }, { "alpha_fraction": 0.6388888955116272, "alphanum_fraction": 0.670634925365448, "avg_line_length": 50.25423812866211, "blob_id": "0cbab2a0dab2d2fbd2917f1f4ba6d6cdf1d606f4", "content_id": "7b18ed5169035d448947661597c4c35f82602570", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 3024, "license_type": "no_license", "max_line_length": 159, "num_lines": 59, "path": "/mainFile_years.py", "repo_name": "BraunAvi/wos", "src_encoding": "UTF-8", "text": "from WebPage import *\nimport math as m\nimport time\nimport os\n\nN = 500 # number of papers in batch (500 for run; 50, or ,ore for testting )\nyears=[1971]\ntotal_N_Papers_years = [26421, 26832,25759,26558]\n# 27462\nurlsWOS =[\n 'https://apps.webofknowledge.com/summary.do?product=WOS&doc=1&qid=1&SID=T2N1dyBPOh3OszCV2wi&search_mode=AdvancedSearch&update_back2search_link_param=yes',\n 'https://apps.webofknowledge.com/summary.do?product=WOS&doc=1&qid=3&SID=T2DnHQuMjC1akZGnO63&search_mode=AdvancedSearch&update_back2search_link_param=yes',\n 'https://apps.webofknowledge.com/summary.do?product=WOS&doc=1&qid=5&SID=T2DnHQuMjC1akZGnO63&search_mode=AdvancedSearch&update_back2search_link_param=yes',\n 'https://apps.webofknowledge.com/summary.do?product=WOS&doc=1&qid=7&SID=T2DnHQuMjC1akZGnO63&search_mode=AdvancedSearch&update_back2search_link_param=yes5',\n ]\n\n\n\nfor ind, year in enumerate(years):\n txtfileName = \"fullText\"+str(year)+\".txt\" # utility text file; saving raw data\n text_file_h = open(txtfileName, \"w+\")\n urlWOS=urlsWOS[ind]\n total_N_Papers=total_N_Papers_years[ind]\n # manually taken from query\n print 'Year:', year, '; total_N_Papers is', total_N_Papers\n total_N_Pages = int(m.ceil(total_N_Papers / (N))) # maximum 500 papers can be printed in one go\n print 'total_Number of Pages WITH' , N, 'papers is', total_N_Pages\n PapersInLastPage = total_N_Papers - total_N_Pages * N\n print 'number of papers in last page is:', PapersInLastPage\n start = time.time()\n\n WOSPage = WOSResultsPage(urlWOS) # initiate WOS page class on the search url\n WOSPage.open_url() # open the url of the main-results file\n\n for p in range(1, total_N_Pages + 1): # collecting data from all pages500 , but not from the last one\n print p, 'x ', N\n firstPaper = (1 + (p - 1) * N)\n print 'first paper number is: ', firstPaper\n lastPaper = (N + (p - 1) * N)\n if p == total_N_Pages + 1:\n lastPaper = PapersInLastPage + (p - 1) * N\n print 'last paper number is: ', lastPaper\n collect_data_from_print(WOSPage, text_file_h, firstPaper,\n lastPaper=firstPaper + N - 1) # collect 500 papers to txt file\n WOSPage.browser.switch_to.window(WOSPage.browser.window_handles[0]) # move back to main search window\n if p==(total_N_Pages) and PapersInLastPage>0:\n print 'another', PapersInLastPage, \"and we're done...\"\n collect_data_from_print(WOSPage, text_file_h, firstPaper=firstPaper+N,\n lastPaper=firstPaper + N + PapersInLastPage - 1) # collect 500 papers to txt file\n os.chdir('C:\\\\Users\\\\abraun\\\\Box Sync\\\\Projects\\\\SciPapers\\\\WOS_Scraping')\n time.sleep(20)\n # time.sleep(4) # allowing time for writing\n WOSPage.parse_data(fileName=txtfileName) # collect data into data frame\n time.sleep(10)\n WOSPage.df.to_csv('parsed'+str(year)+'.txt')\n time.sleep(10)\n text_file_h.close()\n\n print (time.time() - start)\n" }, { "alpha_fraction": 0.6531078815460205, "alphanum_fraction": 0.6809871792793274, "avg_line_length": 47.07692337036133, "blob_id": "5d0f01df3a4c2221dc3a04c5b790b60f3eb4b680", "content_id": "679a84645808468d7d8335a4b65a748c167ae83b", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 4376, "license_type": "no_license", "max_line_length": 163, "num_lines": 91, "path": "/mainFile.py", "repo_name": "BraunAvi/wos", "src_encoding": "UTF-8", "text": "from WebPage import *\nimport math as m\nimport time\nimport os\n\n\ntxtfileName = \"fullText1977.txt\" # utility text file; saving raw data\ntext_file_h = open(txtfileName, \"w+\")\nN = 500 # number of papers in batch (500 for run; 50, or ,ore for testting )\n\nurlWOS = 'https://apps.webofknowledge.com/summary.do?product=WOS&doc=1&qid=2&SID=S2WJUa48YiBqAW1tipW&search_mode=AdvancedSearch&update_back2search_link_param=yes'\n\ntotal_N_Papers = 30043\n# manually taken from query\nprint 'total_N_Papers is', total_N_Papers\ntotal_N_Pages = int(m.ceil(total_N_Papers / (N))) # maximum 500 papers can be printed in one go\nprint 'total_Number of Pages WITH' , N, 'papers is', total_N_Pages\nPapersInLastPage = total_N_Papers - total_N_Pages * N\nprint 'number of papers in last page is:', PapersInLastPage\nstart = time.time()\n\nWOSPage = WOSResultsPage(urlWOS) # initiate WOS page class on the search url\nWOSPage.open_url() # open the url of the main-results file\n\nfor p in range(1, total_N_Pages + 1): # collecting data from all pages500 , but not from the last one\n print p, 'x ', N\n firstPaper = (1 + (p - 1) * N)\n print 'first paper number is: ', firstPaper\n lastPaper = (N + (p - 1) * N)\n if p == total_N_Pages + 1:\n lastPaper = PapersInLastPage + (p - 1) * N\n print 'last paper number is: ', lastPaper\n collect_data_from_print(WOSPage, text_file_h, firstPaper,\n lastPaper=firstPaper + N - 1) # collect 500 papers to txt file\n WOSPage.browser.switch_to.window(WOSPage.browser.window_handles[0]) # move back to main search window\n if p==(total_N_Pages) and PapersInLastPage>0:\n print 'another', PapersInLastPage, \"and we're done...\"\n collect_data_from_print(WOSPage, text_file_h, firstPaper=firstPaper+N,\n lastPaper=firstPaper + N + PapersInLastPage - 1) # collect 500 papers to txt file\nos.chdir('C:\\\\Users\\\\abraun\\\\Box Sync\\\\Projects\\\\SciPapers\\\\WOS_Scraping')\ntime.sleep(20)\n# time.sleep(4) # allowing time for writing\nWOSPage.parse_data(fileName=txtfileName) # collect data into data frame\ntime.sleep(10)\nWOSPage.df.to_csv('parsed1977.txt')\ntime.sleep(10)\ntext_file_h.close()\n\nprint (time.time() - start)\n\n# --------------------- 1978 ---------------------------*\ntxtfileName = \"fullText1978.txt\" # utility text file; saving raw data\ntext_file_h = open(txtfileName, \"w+\")\nN = 500 # number of papers in batch (500 for run; 50, or ,ore for testting )\n\nurlWOS ='https://apps.webofknowledge.com/summary.do?product=WOS&doc=1&qid=125&SID=S2WJUa48YiBqAW1tipW&search_mode=AdvancedSearch&update_back2search_link_param=yes'\ntotal_N_Papers = 30798\n# manually taken from query\nprint 'total_N_Papers is', total_N_Papers\ntotal_N_Pages = int(m.ceil(total_N_Papers / (N))) # maximum 500 papers can be printed in one go\nprint 'total_Number of Pages WITH' , N, 'papers is', total_N_Pages\nPapersInLastPage = total_N_Papers - total_N_Pages * N\nprint 'number of papers in last page is:', PapersInLastPage\nstart = time.time()\n\nWOSPage = WOSResultsPage(urlWOS) # initiate WOS page class on the search url\nWOSPage.open_url() # open the url of the main-results file\n\nfor p in range(1, total_N_Pages + 1): # collecting data from all pages500 , but not from the last one\n print p, 'x ', N\n firstPaper = (1 + (p - 1) * N)\n print 'first paper number is: ', firstPaper\n lastPaper = (N + (p - 1) * N)\n if p == total_N_Pages + 1:\n lastPaper = PapersInLastPage + (p - 1) * N\n print 'last paper number is: ', lastPaper\n collect_data_from_print(WOSPage, text_file_h, firstPaper,\n lastPaper=firstPaper + N - 1) # collect 500 papers to txt file\n WOSPage.browser.switch_to.window(WOSPage.browser.window_handles[0]) # move back to main search window\n if p==(total_N_Pages) and PapersInLastPage>0:\n print 'another', PapersInLastPage, \"and we're done...\"\n collect_data_from_print(WOSPage, text_file_h, firstPaper=firstPaper+N,\n lastPaper=firstPaper +N+ PapersInLastPage - 1) # collect 500 papers to txt file\nos.chdir('C:\\\\Users\\\\abraun\\\\Box Sync\\\\Projects\\\\SciPapers\\\\WOS_Scraping')\ntime.sleep(20)\n# time.sleep(4) # allowing time for writing\nWOSPage.parse_data(fileName=txtfileName) # collect data into data frame\ntime.sleep(10)\nWOSPage.df.to_csv('parsed1978.txt')\ntime.sleep(10)\ntext_file_h.close()\n\n" } ]
8
kjcharles-98/Analysing-the-Implications-of-Adversarial-Training-for-the-Robustness-of-Models-in-NLP
https://github.com/kjcharles-98/Analysing-the-Implications-of-Adversarial-Training-for-the-Robustness-of-Models-in-NLP
46476a56093918ff1075fa08ea4d1f705781bfc3
76e791f4da95fff73c67f659a2a7370d1b953cbf
78616ae24152b281087f956a233db88d6d7c2221
refs/heads/main
2023-07-30T18:03:48.314213
2021-09-27T08:28:32
2021-09-27T08:28:32
410,796,211
0
0
null
null
null
null
null
[ { "alpha_fraction": 0.5025423765182495, "alphanum_fraction": 0.7514830231666565, "avg_line_length": 24.376344680786133, "blob_id": "598ef2f0e425780aef8bf5ac5c6d3df3ccde9c27", "content_id": "624da0281db87b788e5f16191ae44f860f488014", "detected_licenses": [ "Apache-2.0" ], "is_generated": false, "is_vendor": false, "language": "Text", "length_bytes": 4720, "license_type": "permissive", "max_line_length": 55, "num_lines": 186, "path": "/freelb/requirements.txt", "repo_name": "kjcharles-98/Analysing-the-Implications-of-Adversarial-Training-for-the-Robustness-of-Models-in-NLP", "src_encoding": "UTF-8", "text": "# This file may be used to create an environment using:\n# $ conda create --name <env> --file <this file>\n# platform: linux-64\n_libgcc_mutex=0.1=main\n_openmp_mutex=4.5=1_gnu\nabsl-py=0.13.0=pypi_0\napex=0.1=pypi_0\nastunparse=1.6.3=pypi_0\nattrs=21.2.0=pypi_0\nbert-score=0.3.10=pypi_0\nblas=1.0=mkl\nboto3=1.18.16=pypi_0\nbotocore=1.21.16=pypi_0\nbpemb=0.3.3=pypi_0\nbzip2=1.0.8=h7f98852_4\nca-certificates=2021.5.30=ha878542_0\ncachetools=4.2.2=pypi_0\ncertifi=2021.5.30=py38h06a4308_0\ncffi=1.14.6=py38ha65f79e_0\ncharset-normalizer=2.0.4=pypi_0\nclick=8.0.1=pypi_0\ncloudpickle=1.6.0=pypi_0\ncomet-ml=3.14.0=pypi_0\nconfigobj=5.0.6=pypi_0\ncudatoolkit=10.2.89=h8f6ccaa_8\ncycler=0.10.0=pypi_0\ndatasets=1.11.0=pypi_0\ndeprecated=1.2.12=pypi_0\ndill=0.3.4=pypi_0\ndocopt=0.6.2=pypi_0\ndulwich=0.20.24=pypi_0\neditdistance=0.5.3=pypi_0\neverett=2.0.0=pypi_0\nffmpeg=4.3=hf484d3e_0\nfilelock=3.0.12=pypi_0\nflair=0.8.0.post1=pypi_0\nflatbuffers=1.12=pypi_0\nfreetype=2.10.4=h0708190_1\nfsspec=2021.7.0=pypi_0\nftfy=6.0.3=pypi_0\nfuture=0.18.2=py38h578d9bd_3\ngast=0.4.0=pypi_0\ngdown=3.12.2=pypi_0\ngensim=3.8.3=pypi_0\ngmp=6.2.1=h58526e2_0\ngnutls=3.6.13=h85f3911_1\ngoogle-auth=1.34.0=pypi_0\ngoogle-auth-oauthlib=0.4.5=pypi_0\ngoogle-pasta=0.2.0=pypi_0\ngrpcio=1.34.1=pypi_0\nh5py=3.1.0=pypi_0\nhuggingface-hub=0.0.12=pypi_0\nhyperopt=0.2.5=pypi_0\nidna=3.2=pypi_0\nimportlib-metadata=3.10.1=pypi_0\nintel-openmp=2021.3.0=h06a4308_3350\njanome=0.4.1=pypi_0\njmespath=0.10.0=pypi_0\njoblib=1.0.1=pyhd8ed1ab_0\njpeg=9b=h024ee3a_2\njsonschema=3.2.0=pypi_0\nkeras-nightly=2.5.0.dev2021032900=pypi_0\nkeras-preprocessing=1.1.2=pypi_0\nkiwisolver=1.3.1=pypi_0\nkonoha=4.6.5=pypi_0\nlame=3.100=h7f98852_1001\nlangdetect=1.0.9=pypi_0\nlanguage-tool-python=2.5.5=pypi_0\nlcms2=2.12=h3be6417_0\nld_impl_linux-64=2.36.1=hea4e1c9_2\nlemminflect=0.2.2=pypi_0\nlibblas=3.9.0=11_linux64_mkl\nlibcblas=3.9.0=11_linux64_mkl\nlibffi=3.3=he6710b0_2\nlibgcc-ng=9.3.0=h5101ec6_17\nlibgfortran-ng=11.1.0=h69a702a_8\nlibgfortran5=11.1.0=h6c583b3_8\nlibgomp=9.3.0=h5101ec6_17\nlibiconv=1.16=h516909a_0\nliblapack=3.9.0=11_linux64_mkl\nlibpng=1.6.37=h21135ba_2\nlibprotobuf=3.16.0=h780b84a_0\nlibstdcxx-ng=9.3.0=hd4cf53a_17\nlibtiff=4.2.0=h85742a9_0\nlibuv=1.42.0=h7f98852_0\nlibwebp-base=1.2.0=h7f98852_2\nlru-dict=1.1.7=pypi_0\nlxml=4.6.3=pypi_0\nlz4-c=1.9.3=h9c3ff4c_1\nmarkdown=3.3.4=pypi_0\nmatplotlib=3.4.3=pypi_0\nmkl=2021.3.0=h06a4308_520\nmkl-service=2.4.0=py38h497a2fe_0\nmkl_fft=1.3.0=py38h42c9631_2\nmkl_random=1.2.2=py38h1abd341_0\nmore-itertools=8.8.0=pypi_0\nmpld3=0.3=pypi_0\nmultiprocess=0.70.12.2=pypi_0\nncurses=6.2=he6710b0_1\nnettle=3.6=he412f7d_0\nnetworkx=2.6.2=pypi_0\nninja=1.10.2=h4bd325d_0\nnltk=3.6.2=pypi_0\nnum2words=0.5.10=pypi_0\nnumpy=1.19.5=pypi_0\nnumpy-base=1.20.3=py38h74d4b33_0\nnvidia-ml-py3=7.352.0=pypi_0\noauthlib=3.1.1=pypi_0\nolefile=0.46=pyh9f0ad1d_1\nopenh264=2.1.1=h780b84a_0\nopenjpeg=2.4.0=hb52868f_1\nopenssl=1.1.1k=h27cfd23_0\nopt-einsum=3.3.0=pypi_0\noverrides=3.1.0=pypi_0\npackaging=21.0=pypi_0\npandas=1.3.1=pypi_0\npillow=8.3.1=py38h2c7a002_0\npip=21.1.3=py38h06a4308_0\nprotobuf=3.16.0=py38h709712a_0\npyarrow=5.0.0=pypi_0\npyasn1=0.4.8=pypi_0\npyasn1-modules=0.2.8=pypi_0\npycparser=2.20=pyh9f0ad1d_2\npyparsing=2.4.7=pypi_0\npyrsistent=0.18.0=pypi_0\npysocks=1.7.1=pypi_0\npython=3.8.10=h12debd9_8\npython-dateutil=2.8.2=pypi_0\npython_abi=3.8=2_cp38\npytorch=1.9.0=cpu_py38hfb3baa6_1\npytz=2021.1=pypi_0\npyyaml=5.4.1=pypi_0\nreadline=8.1=h27cfd23_0\nregex=2021.8.3=pypi_0\nrequests=2.26.0=pypi_0\nrequests-oauthlib=1.3.0=pypi_0\nrequests-toolbelt=0.9.1=pypi_0\nrsa=4.7.2=pypi_0\ns3transfer=0.5.0=pypi_0\nsacremoses=0.0.45=pypi_0\nscikit-learn=0.24.2=py38hdc147b9_0\nscipy=1.6.3=py38h7b17777_0\nsegtok=1.5.10=pypi_0\nsemantic-version=2.8.5=pypi_0\nsentencepiece=0.1.95=pypi_0\nsetuptools=52.0.0=py38h06a4308_0\nsix=1.15.0=pypi_0\nsleef=3.5.1=h7f98852_1\nsmart-open=5.1.0=pypi_0\nsqlite=3.36.0=hc218d9a_0\nsqlitedict=1.7.0=pypi_0\ntabulate=0.8.9=pypi_0\ntensorboard=2.6.0=pypi_0\ntensorboard-data-server=0.6.1=pypi_0\ntensorboard-plugin-wit=1.8.0=pypi_0\ntensorboardx=2.4=pyhd8ed1ab_0\ntensorflow=2.5.1=pypi_0\ntensorflow-estimator=2.5.0=pypi_0\ntensorflow-hub=0.12.0=pypi_0\ntensorflow-text=2.5.0=pypi_0\ntermcolor=1.1.0=pypi_0\nterminaltables=3.1.0=pypi_0\ntextattack=0.3.3=pypi_0\nthreadpoolctl=2.2.0=pyh8a188c0_0\ntk=8.6.10=hbc83047_0\ntokenizers=0.10.3=pypi_0\ntorch=1.7.1=pypi_0\ntorchaudio=0.9.0=py38\ntorchvision=0.10.0=py38_cu102\ntqdm=4.49.0=pypi_0\ntransformers=4.9.2=pypi_0\ntyping-extensions=3.7.4.3=pypi_0\ntyping_extensions=3.10.0.0=pyha770c72_0\nurllib3=1.26.6=pypi_0\nwcwidth=0.2.5=pypi_0\nwebsocket-client=1.1.1=pypi_0\nwerkzeug=2.0.1=pypi_0\nwheel=0.36.2=pyhd3eb1b0_0\nword2number=1.1=pypi_0\nwrapt=1.12.1=pypi_0\nwurlitzer=2.1.1=pypi_0\nxxhash=2.0.2=pypi_0\nxz=5.2.5=h7b6447c_0\nzipp=3.5.0=pypi_0\nzlib=1.2.11=h7b6447c_3\nzstd=1.4.9=ha95c52a_0\n" }, { "alpha_fraction": 0.8421052694320679, "alphanum_fraction": 0.8421052694320679, "avg_line_length": 65.5, "blob_id": "f54ccb5e1de1b2f94217d16ecccd418d01933e03", "content_id": "9f858f32d16945f4c30756c1ef3608b822a468d4", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Markdown", "length_bytes": 133, "license_type": "no_license", "max_line_length": 88, "num_lines": 2, "path": "/README.md", "repo_name": "kjcharles-98/Analysing-the-Implications-of-Adversarial-Training-for-the-Robustness-of-Models-in-NLP", "src_encoding": "UTF-8", "text": "# Analysing-the-Implications-of-Adversarial-Training-for-the-Robustness-of-Models-in-NLP\nUCL MSc machine learning Individual project\n" }, { "alpha_fraction": 0.7060439586639404, "alphanum_fraction": 0.7060439586639404, "avg_line_length": 35.5, "blob_id": "7ee4914fb3b690ce022336539ce8ec7eede3a955", "content_id": "c908f462164ca0b132fb313ed24ccc73e1d067ba", "detected_licenses": [ "Apache-2.0" ], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 376, "license_type": "permissive", "max_line_length": 69, "num_lines": 10, "path": "/freelb/selftest.py", "repo_name": "kjcharles-98/Analysing-the-Implications-of-Adversarial-Training-for-the-Robustness-of-Models-in-NLP", "src_encoding": "UTF-8", "text": "import os\n\nprint('上级目录路径')\nprint(os.path.abspath(os.path.dirname(os.getcwd())))\nprint(os.path.abspath(os.path.join(os.path.dirname(__file__), \"..\")))\nprint(os.path.abspath(os.path.dirname(os.path.dirname(__file__))))\nprint(os.path.dirname(os.path.dirname(os.path.abspath(__file__))))\n\nprint(os.path.exists('/home'))\nprint(os.path.exists('/data/cheng/checkpoints'))" }, { "alpha_fraction": 0.6721649765968323, "alphanum_fraction": 0.7072165012359619, "avg_line_length": 28.42424201965332, "blob_id": "6d7ee9e4c0e8f1edbdf29732baf54b7a77ac8469", "content_id": "aeebeb70caa172e5aa962e9a7b48004fbde582b4", "detected_licenses": [ "Apache-2.0" ], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 970, "license_type": "permissive", "max_line_length": 169, "num_lines": 33, "path": "/freelb/my_model.py", "repo_name": "kjcharles-98/Analysing-the-Implications-of-Adversarial-Training-for-the-Robustness-of-Models-in-NLP", "src_encoding": "UTF-8", "text": "import torch\nimport textattack\nimport transformers\nfrom transformers import (AlbertConfig, AlbertForSequenceClassification, AlbertTokenizer)\n\ndirmodel = '/data/cheng/checkpoints/FreeLB-albert-xxlarge-v2-RTE-alr1-amag6e-1-anm0.2-as3-sl512-lr1e-5-bs4-gas1-hdp0.1-adp0-ts20935-ws1256-wd1e-2-seed42/checkpoint-best'\n\n\nconfig_class, model_class, tokenizer_class = AlbertConfig, AlbertForSequenceClassification, AlbertTokenizer\n\nconfig = config_class.from_pretrained(\n dirmodel,\n num_labels=2,\n finetuning_task='rte',\n cache_dir=None,\n attention_probs_dropout_prob=0,\n hidden_dropout_prob=0.1\n )\n \ntokenizer = tokenizer_class.from_pretrained(\n dirmodel,\n do_lower_case=True,\n cache_dir=None,\n )\n\nmodel = model_class.from_pretrained(\n dirmodel,\n from_tf=False,\n config=config,\n cache_dir=None,\n )\n\nmodel = textattack.models.wrappers.HuggingFaceModelWrapper(model, tokenizer)" }, { "alpha_fraction": 0.5868867635726929, "alphanum_fraction": 0.6185235977172852, "avg_line_length": 40.88461685180664, "blob_id": "06b6d6a6dd906fb5baa3f65742ccca2c1f59ae3c", "content_id": "dad8cd4f0563ddf101d5472eb3fa1bb14cabec0c", "detected_licenses": [ "Apache-2.0" ], "is_generated": false, "is_vendor": false, "language": "Shell", "length_bytes": 2181, "license_type": "permissive", "max_line_length": 179, "num_lines": 52, "path": "/freelb/launch/myrun_glue.sh", "repo_name": "kjcharles-98/Analysing-the-Implications-of-Adversarial-Training-for-the-Robustness-of-Models-in-NLP", "src_encoding": "UTF-8", "text": "#!/usr/bin/env bash\n\nfunction runexp {\n\nexport GLUE_DIR=/data/cheng/GLUE\nexport TASK_NAME=${1}\n\ngpu=${2} # The GPU you want to use\nmname=${3} # Model name\nalr=${4} # Step size of gradient ascent\namag=${5} # Magnitude of initial (adversarial?) perturbation\nanorm=${6} # Maximum norm of adversarial perturbation\nasteps=${7} # Number of gradient ascent steps for the adversary\nlr=${8} # Learning rate for model parameters\nbsize=${9} # Batch size\ngas=${10} # Gradient accumulation. bsize * gas = effective batch size\nseqlen=512 # Maximum sequence length\nhdp=${11} # Hidden layer dropouts for ALBERT\nadp=${12} # Attention dropouts for ALBERT\nts=${13} # Number of training steps (counted as parameter updates)\nws=${14} # Learning rate warm-up steps\nseed=${15} # Seed for randomness\nwd=${16} # Weight decay\n\nexpname=FreeLB-${mname}-${TASK_NAME}-alr${alr}-amag${amag}-anm${anorm}-as${asteps}-sl${seqlen}-lr${lr}-bs${bsize}-gas${gas}-hdp${hdp}-adp${adp}-ts${ts}-ws${ws}-wd${wd}-seed${seed}\n\nnohup python examples/run_glue_freelb.py \\\n --model_type albert \\\n --model_name_or_path ${mname} \\\n --task_name $TASK_NAME \\\n --do_train \\\n --do_eval \\\n --do_lower_case \\\n --data_dir $GLUE_DIR/$TASK_NAME \\\n --max_seq_length ${seqlen} \\\n --per_gpu_train_batch_size ${bsize} --gradient_accumulation_steps ${gas} \\\n --learning_rate ${lr} --weight_decay ${wd} \\\n --gpu ${gpu} \\\n --output_dir /data/cheng/checkpoints/${expname}/ \\\n --hidden_dropout_prob ${hdp} --attention_probs_dropout_prob ${adp} \\\n --adv-lr ${alr} --adv-init-mag ${amag} --adv-max-norm ${anorm} --adv-steps ${asteps} \\\n --expname ${expname} --evaluate_during_training \\\n --max_steps ${ts} --warmup_steps ${ws} --seed ${seed} \\\n --logging_steps 100 --save_steps 100 \\\n --fp16 \\\n --comet \\\n --overwrite_output_dir > /data/cheng/tmp/sst_lr5_step10_anm0.log 2>&1\n}\n\n\n# runexp TASK_NAME gpu model_name adv_lr adv_mag anorm asteps lr bsize grad_accu hdp adp ts ws seed wd\nrunexp SST-2 0 albert-xxlarge-v2 5 6e-1 0 10 1e-5 4 1 0.1 0 20935 1256 42 1e-2\n\n\n\n" } ]
5
dmbelo/udacity.carnd.behavioral-cloning
https://github.com/dmbelo/udacity.carnd.behavioral-cloning
68186951c70628718778cab01f876aca87ca9e78
7e42f3ef3d69e43bad6485c417ae38a68c857ce5
448ded777a6c2cac3f4201befc8ba619ac087a49
refs/heads/master
2021-01-13T04:03:41.569698
2017-02-06T07:07:08
2017-02-06T07:07:08
77,947,569
0
0
null
null
null
null
null
[ { "alpha_fraction": 0.66715008020401, "alphanum_fraction": 0.6860043406486511, "avg_line_length": 20.21538543701172, "blob_id": "d121c22a6eaca6999782b1b4e962aac44d7c7970", "content_id": "0bea0c36a194c4177a547f40357387a2799d7192", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Shell", "length_bytes": 1379, "license_type": "no_license", "max_line_length": 139, "num_lines": 65, "path": "/server.sh", "repo_name": "dmbelo/udacity.carnd.behavioral-cloning", "src_encoding": "UTF-8", "text": "#!/bin/bash\n\n# aws ec2 stop-instances --instance-ids 'i-0d67c40ac879ecb9c'\n\ninstance_id=i-0d67c40ac879ecb9c\n\nfunction server_usage\n{\n echo \"usage: server (start|stop|push|fetch|ssh)\"\n}\n\nfunction server_start\n{\n echo \"Starting...\"\n aws ec2 start-instances --instance-ids $instance_id\n}\n\nfunction server_stop\n{\n echo \"Stopping...\"\n aws ec2 stop-instances --instance-ids $instance_id\n}\n\nfunction server_ssh\n{\n echo \"Starting ssh session...\"\n ip_address=$(aws ec2 describe-instances --instance-ids $instance_id --output text --query 'Reservations[*].Instances[*].PublicIpAddress')\n ssh carnd@$ip_address\n}\n\nfunction server_push\n{\n echo \"Pushing...\"\n ip_address=$(aws ec2 describe-instances --instance-ids $instance_id --output text --query 'Reservations[*].Instances[*].PublicIpAddress')\n rsync -avL --exclude-from 'exclude-list.txt' . carnd@$ip_address:carnd.behavioral-cloning/\n}\n\nfunction server_fetch\n{\n echo \"Fetching...\"\n ip_address=$(aws ec2 describe-instances --instance-ids $instance_id --output text --query 'Reservations[*].Instances[*].PublicIpAddress')\n rsync -avuL --exclude-from 'exclude-list.txt' carnd@$ip_address:carnd.behavioral-cloning/ .\n}\n\n\ncase $1 in\n start)\n server_start\n ;;\n stop)\n server_stop\n ;;\n push)\n server_push\n ;;\n fetch)\n server_fetch\n ;;\n ssh)\n server_ssh\n ;;\n *)\n server_usage\n exit 1\nesac\n" }, { "alpha_fraction": 0.555027186870575, "alphanum_fraction": 0.578396737575531, "avg_line_length": 31.139738082885742, "blob_id": "3c063974dd96f7f3bbe85046ab732c8e7d642d7a", "content_id": "a385e086b28ac914e8d3ecbcaec38cc0a14e0756", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 7360, "license_type": "no_license", "max_line_length": 79, "num_lines": 229, "path": "/utils.py", "repo_name": "dmbelo/udacity.carnd.behavioral-cloning", "src_encoding": "UTF-8", "text": "import pandas as pd\nimport numpy as np\nimport cv2\n\n\ndef plot(df):\n \"\"\"Plotting helper to visualize the udacity data set\"\"\"\n import matplotlib.pyplot as plt\n ax1 = plt.subplot(411)\n plt.plot(df.steering)\n plt.xlabel('Index')\n plt.ylabel('Steering')\n\n plt.subplot(412, sharex=ax1)\n plt.plot(df.throttle)\n plt.xlabel('Index')\n plt.ylabel('Throttle')\n\n plt.subplot(413, sharex=ax1)\n plt.plot(df.brake)\n plt.xlabel('Index')\n plt.ylabel('Brake')\n\n plt.subplot(414, sharex=ax1)\n plt.plot(df.speed)\n plt.xlabel('Index')\n plt.ylabel('Speed')\n\n plt.show(block=False)\n\n\ndef play(df):\n \"\"\"Helper to visualize the images in the udacity data set in a stream\"\"\"\n for i, file in enumerate(df.image_file):\n img = cv2.imread('data/' + file, 0)\n\n idx = df.index[i]\n cv2.putText(img,\n 'index = {:d}'.format(idx),\n (20, 20),\n cv2.FONT_HERSHEY_SIMPLEX,\n 0.35,\n (255, 180, 180))\n\n cv2.putText(img,\n 'speed = {:.2f}'.format(df.loc[idx, 'speed']),\n (20, 35),\n cv2.FONT_HERSHEY_SIMPLEX,\n 0.35,\n (255, 180, 180))\n\n cv2.putText(img,\n 'steer = {:.2f}'.format(df.loc[idx, 'steering']),\n (20, 50),\n cv2.FONT_HERSHEY_SIMPLEX,\n 0.35,\n (255, 180, 180))\n\n cv2.imshow('image', img)\n if cv2.waitKey(33) == 27:\n cv2.destroyAllWindows()\n break\n elif cv2.waitKey(33) == 32:\n while True:\n if cv2.waitKey(33) == 32:\n break\n\n cv2.destroyAllWindows()\n\n\ndef down_sample_zeros(df, zeros):\n \"\"\"\n Down-sample the zero-steer data\n\n Retain only a random sample of zeros in pandas.DataFrame df and remove rest\n \"\"\"\n\n b_zero_steer = df.steering == 0\n idx = b_zero_steer.index[b_zero_steer].values\n np.random.shuffle(idx)\n idx_remove_zeros = idx[zeros:]\n return df.drop(idx_remove_zeros, axis=0)\n\n\ndef filter_01(df):\n \"\"\"Remove front, middle and ending sections of the udacity data set\"\"\"\n b_ends = (df.index < 80) | (df.index > 7790)\n b_middle = (df.index > 3400) & (df.index < 4600)\n return df.loc[(~b_ends) & (~b_middle), :].copy()\n\n\ndef collapse(df, steer_offset):\n \"\"\"Collapse center, left and right images into one and add steer offset\"\"\"\n c = ['center', 'steering', 'throttle', 'brake', 'speed']\n df_center = df.loc[:, c].copy()\n df_center.rename(columns={'center': 'image_file'}, inplace=True)\n\n c = ['left', 'steering', 'throttle', 'brake', 'speed']\n df_left = df.loc[:, c].copy()\n df_left.rename(columns={'left': 'image_file'}, inplace=True)\n df_left.steering += steer_offset\n\n c = ['right', 'steering', 'throttle', 'brake', 'speed']\n df_right = df.loc[:, c].copy()\n df_right.rename(columns={'right': 'image_file'}, inplace=True)\n df_right.steering -= steer_offset\n\n return pd.concat([df_center, df_left, df_right])\n\n\ndef make_df(file_name):\n \"\"\"\n Make the post-processed data set\n\n Make the pandas.DataFrame from a csv file and perform all pre-processing\n tasks including filtering, adding steering offset, collapsing and down\n sampling the zero-steer data.\n \"\"\"\n df = pd.read_csv(file_name)\n df_filtered = filter_01(df)\n df_collapsed = collapse(df_filtered, 0.2)\n df_down_sampled = down_sample_zeros(df_collapsed, 500)\n df_down_sampled.index = range(df_down_sampled.shape[0])\n return df_down_sampled\n\n\ndef augment_brightness(image_in):\n \"\"\"Randomly chane the brightness of an image\"\"\"\n image_out = cv2.cvtColor(image_in, cv2.COLOR_RGB2HSV)\n random_bright = .25 + np.random.uniform()\n image_out[:, :, 2] = image_out[:, :, 2] * random_bright\n image_out = cv2.cvtColor(image_out, cv2.COLOR_HSV2RGB)\n return image_out\n\n\ndef process_image(img):\n \"\"\"Crop and scale the original Udacity data set image\"\"\"\n crop = img[50:-24, :, :]\n return cv2.resize(crop, (160, 43), interpolation=cv2.INTER_AREA)\n\n\ndef idx_init(idx, shuffle):\n \"\"\"Initialize i_start and idx for a complete pass of training data\"\"\"\n if shuffle:\n np.random.shuffle(idx)\n i_start = 0\n return i_start\n\n\ndef image_data_generator(df, batch_size=32, shuffle=False):\n \"\"\"\n Generator for to be used by keras' fit_generator method.\n\n Generate a batch and indefinitely loop through the training data. Real-time\n data augmentation is also handled here.\n \"\"\"\n\n idx = df.index.copy().values\n n_images = df.shape[0]\n\n sample_image = cv2.imread('data/' + df.image_file.iloc[0].strip())\n image_shape = process_image(sample_image).shape\n\n i_start = idx_init(idx, shuffle)\n\n while True:\n # Calculated the ending index for this batch\n i_end = np.min([i_start + batch_size, n_images])\n # Calculate the number of samples in this batch (could be less than\n # batchsize when approaching the end of the training set)\n n = i_end-i_start\n # Initialize the batch image and label arrays\n batch_image_size = np.concatenate([[n], image_shape])\n batch_image = np.zeros(batch_image_size, dtype=np.uint8)\n batch_steer = np.zeros([i_end - i_start, 1])\n # For each sample in this batch\n for j, k in enumerate(range(i_start, i_end)):\n # Add gaussian noise to the steering values\n steer = df.loc[idx[k], 'steering'] + np.random.randn() * 0.02\n img = cv2.imread('data/' + df.loc[idx[k], 'image_file'].strip())\n # Convert to RGB\n img = cv2.cvtColor(img, cv2.COLOR_BGR2RGB)\n img = augment_brightness(img)\n # Flip a coin to decide wether to flip\n b_flip = np.random.random_integers(0, 1)\n if b_flip:\n img = cv2.flip(img, 1)\n steer = -steer\n\n batch_image[j] = process_image(img)\n batch_steer[j] = steer\n yield batch_image, batch_steer\n\n if i_end == n_images: # End of the training set, re-initialize\n i_start = idx_init(idx, shuffle)\n else:\n i_start = i_end\n\n\ndef validation_set_generator(df, batch_size=32):\n\n idx = df.index.copy().values\n n_images = df.shape[0]\n\n sample_image = cv2.imread('data/' + df.image_file.iloc[0].strip())\n image_shape = process_image(sample_image).shape\n\n i_start = 0\n\n while True:\n i_end = np.min([i_start + batch_size, n_images])\n n = i_end-i_start\n batch_image_size = np.concatenate([[n], image_shape])\n batch_image = np.zeros(batch_image_size, dtype=np.uint8)\n batch_steer = np.zeros([i_end - i_start, 1])\n # For each sample in this batch\n for j, k in enumerate(range(i_start, i_end)):\n steer = df.loc[idx[k], 'steering']\n img = cv2.imread('data/' + df.loc[idx[k], 'image_file'].strip())\n # Convert to RGB\n img = cv2.cvtColor(img, cv2.COLOR_BGR2RGB)\n batch_image[j] = process_image(img)\n batch_steer[j] = steer\n yield batch_image, batch_steer\n\n if i_end == n_images: # End of the set, re-initialize\n i_start = 0\n else:\n i_start = i_end\n" }, { "alpha_fraction": 0.7977138161659241, "alphanum_fraction": 0.808759331703186, "avg_line_length": 128.76666259765625, "blob_id": "4f8b97f0404b5759326d6066b48bbac8f65b5c6a", "content_id": "aa6da5031aba6e2ace39e4a0ec048a305ddc1e5c", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Markdown", "length_bytes": 7786, "license_type": "no_license", "max_line_length": 659, "num_lines": 60, "path": "/README.md", "repo_name": "dmbelo/udacity.carnd.behavioral-cloning", "src_encoding": "UTF-8", "text": "# udacity.carnd.behavioral-cloning\n\n# Sample Usage\n\nTo run this model first launch the Udacity Self Driving Car Nanodegree Simulator and enter \"Autonomous Mode\". Once the simulator is live, launch the trained model by running `python drive.py model.json` and watch as it takes control of the vehicle!\n\n# Model Architecture Overview\n\nThe implemented model architecture is largely based on the NVIDIA CNN model: [End to End Learning for Self-Driving Cars](https://arxiv.org/pdf/1604.07316v1.pdf).\n\n![Model Architecture](etc/ModelArchitecture.png)\n\nThe model uses 5 convolutional layers followed by 4 fully connected layers. All layers feature a rectifier linear unit (ReLU) activation layer on their output which introduce non-linearities into the model.\n\nFurthermore, all convolutional layers include some form of down sampling: a stride greater than 1 is used for the first 3 layers, and all layers use a valid padding. These value have been chosen to improve the training speed while balancing minimal impact on performance observed on the simulator.\n\nDropouts layers are added on top of the original NVIDIA model, specifically after each convolutional and fully connected layers. Dropouts are typically included to minimize over-fitting. A drop out rate of 0.3 (30% of the input nodes are ignored) was chosen for the dropout layers following the convolutional layers, and a 0.5 rate was chosen for the dropout layers following the fully connected layers. These rates were chosen empirically. A higher dropout rate results in a more that is more robust and less prone to over fitting but this comes at the expense of learning time or even convergence.\n\nIn total the model has 8,002,167 parameters which are learned through the training process.\n\n# Training Data\n\nThe training data used for the model was derived from the data set provided by Udacity. This data set consists of 8036 images of simulator driving. Along with the images, their respective steering, braking control and vehicle velocity values were provided. Finally, left and right camera angle perspectives were also included to compliment each center image (for a total of 24,108 images).\n\nThe process of generating the training data from this set is the critical step in the learning approach. The approach is divided into two steps: pre-process and real-time augmentation.\n\n## Pre-process\n\nThe pre-process step primarily deals with removing images that were deemed unnecessary or misleading. The data set contains initial and final sections which are primarily ramping up and slowing down and not actually representative of normal driving conditions. This can be seen in the erratic steering, and speed trace shown below. Around 3500 images in, there is also another erratic driving period which is not smooth or representative of normal driving. For these reasons, the three sections of data, highlighted in red below, are removed from the data set.\n\n![Vehicle Data](etc/vehicle_signals.png)\n\nThe original data set is also highly biased towards straight line driving. This is shown below in the left histogram. By selecting a random set of 500 images from the subset of zero-steer images, the distribution was made to be much more representative of a normal distribution, shown on the right histogram. This results in a more balanced data set which does not bias the model toward a propensity to drive straight as much as the original set.\n\n![Histogram](etc/hist.png)\n\nThe final pre-process step was to assign a steering angle to the left and right camera angle perspectives and use those images in the training set. Besides tripling the amount of data available it also provides essential data to 'show' the model to recover from driving close to the edge of the road without actually driving close to the edge. To synthesize the steering labels for these images, an offset of 0.2 was added/subtracted to the center steer value and assigned to the left and right images. This value was chosen arbitrarily and tuned in various model training iterations.\n\nThe final set of images in the data set consists of 10,725 (the original set was 24,108). Also of note is that while the original images are of size 320x160, these were cropped and scaled down to a size of 43x160. Cropping was down in such a way that the sky in a scene from the top of the image was removed and the hood of the car from the bottom was also removed. In both instance, no gain from including those sections of the image are expected so they are effectively wasted pixels. Scaling was performed as a means of compressing the problems and increasing runtime and learning speed.\n\n## Real-time Augmentation\n\nThe real-time augmentation step consists of various operations performed on images and labels during the training phase. This was implemented in a python generator which is used to load augmented images and labels into memory *one batch a time*. The augmentation process is performed on each batch which itself is randomly sampled from the entire training set. The operations consist of:\n* Brightness - the brightness of an image was modified by scaling it with a multiplier sampled from a uniform distribution. The idea is to make the model learn the required features for the task regardless of whether an image is dark or light.\n* Horizontal flip - each image was horizontally (left/right) flipped at random by performing a coin toss. The sign on the steering angle was also flipped. The motivation is to increase the number of images to learn from by realizing that if the scene is horizontally flipped, the features to be learned should be the same.\n* Steering variation - each steering label has an offset added to it that is sampled from a normal distribution of a *small* standard deviation. This effectively increases the number of training samples to learn. The justification for this is that the steering command from a driver is not a deterministic process and has some randomness in it. Therefore, it is to be expected that two hypothetically identical scenes may have distinct, albeit similar values. By adding some gaussian noise to the data we can grow the number of training samples while incorporate while 'showing' the model that there is uncertainty to be expected of the steering angle values.\n\nSince these augmentation techniques are themselves random and changing every during every batch pass, we are in essence introducing mechanism that minimizes over-fitting. Like drop-outs which randomly shut-off nodes in a forward-pass of the model, these augmentation techniques randomly add noise and modifications to the images which prevent the model from over-fit to a specific set of images.\n\nWith these modifications the dataset is increased to 25,000 images from 10,725.\n\n## Validation data\n\nA set of 2500 images are set aside randomly at the beginning of the training process to assess the performance of the model after each epoch. This is done in order to provide a measure of over fitting by comparing the error of the model against the training data with the error against the validation set.\n\n## Hyperparameter Tuning\n\nThe initial value of the learning rate for the adam optimizer was chosen from experience to be 1e-4. From there the rate was increased eventually to 1e-3 which resulted in faster learning. Usually this is compromise between learning speed and final accuracy but the impact on accuracy was not measurable in this case.\n\nThe number of epochs was chosen to be 10 in order to give the optimization enough time for the validation loss to 'plateau'. The model was saved each time the validation loss was improved as as such, a sufficiently large number of epochs allows for various models by comparing not only their validation losses but by visually inspecting their driving ability in the simulator.\n" }, { "alpha_fraction": 0.5811923146247864, "alphanum_fraction": 0.6106261014938354, "avg_line_length": 32.40833282470703, "blob_id": "0f5bda3a5704a91ce2223890fe1a184403d35611", "content_id": "dde258a2ff8fcdfa453aa9dc9fb438731ae9514a", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 4009, "license_type": "no_license", "max_line_length": 78, "num_lines": 120, "path": "/model.py", "repo_name": "dmbelo/udacity.carnd.behavioral-cloning", "src_encoding": "UTF-8", "text": "import numpy as np\nimport cv2\nfrom keras.models import Sequential\nfrom keras.layers import Dense, Convolution2D, Dropout\nfrom keras.layers.core import Flatten, Activation, Lambda\nfrom keras.optimizers import Adam\nfrom keras.callbacks import ModelCheckpoint\nfrom utils import image_data_generator, validation_set_generator\nfrom utils import process_image, make_df\n\n\nsample_image = cv2.imread('data/IMG/left_2016_12_01_13_37_41_968.jpg')\nimage_shape = process_image(sample_image).shape\nprint('Image Shape:', image_shape)\n\n\ndef nvidia():\n \"\"\"Implementation of the NVIDIA model\"\"\"\n model = Sequential()\n model.add(Lambda(lambda x: x/127.5 - 1.0, input_shape=image_shape))\n model.add(Convolution2D(\n nb_filter=24, nb_row=5, nb_col=5,\n subsample=(2, 2),\n border_mode='valid',\n init='he_normal'))\n model.add(Activation('relu'))\n model.add(Convolution2D(\n nb_filter=36, nb_row=5, nb_col=5,\n subsample=(1, 2),\n border_mode='valid',\n init='he_normal'))\n model.add(Activation('relu'))\n model.add(Convolution2D(\n nb_filter=48, nb_row=5, nb_col=5,\n subsample=(1, 2),\n border_mode='valid',\n init='he_normal'))\n model.add(Dropout(0.3))\n model.add(Activation('relu'))\n model.add(Convolution2D(\n nb_filter=64, nb_row=3, nb_col=3,\n border_mode='valid',\n init='he_normal'))\n model.add(Dropout(0.3))\n model.add(Activation('relu'))\n model.add(Convolution2D(\n nb_filter=64, nb_row=3, nb_col=3,\n border_mode='valid',\n init='he_normal'))\n model.add(Dropout(0.3))\n model.add(Activation('relu'))\n model.add(Flatten())\n model.add(Dense(1164, init='he_normal'))\n model.add(Dropout(0.5))\n model.add(Activation('relu'))\n model.add(Dense(100, init='he_normal'))\n model.add(Dropout(0.5))\n model.add(Activation('relu'))\n model.add(Dense(50, init='he_normal'))\n model.add(Dropout(0.5))\n model.add(Activation('relu'))\n model.add(Dense(10, init='he_normal'))\n model.add(Dropout(0.5))\n model.add(Activation('relu'))\n model.add(Dense(1, init='he_normal'))\n\n model.summary()\n\n return model\n\n\ndef train(model, df, n_epochs=1, batch_size=256):\n \"\"\"Model training function\"\"\"\n model.compile(\n loss='mean_squared_error',\n optimizer=Adam(lr=1e-3))\n\n n_validation_samples = 2500\n idx = np.arange(df.shape[0], dtype=np.uint)\n np.random.shuffle(idx)\n # Any indices before this index belong to the training set\n df_train = df.iloc[idx[:-n_validation_samples]].copy()\n df_valid = df.iloc[idx[-n_validation_samples:]].copy()\n print('Number of total samples:', df.shape[0])\n print('Number of training samples:', df_train.shape[0])\n print('Number of validation samples:', df_valid.shape[0])\n\n filepath = 'weights-improvement-{epoch:02d}-{val_loss:.4f}.hdf5'\n checkpoint = ModelCheckpoint(filepath,\n monitor='val_loss',\n verbose=1,\n save_best_only=True,\n mode='min')\n callbacks_list = [checkpoint]\n\n model.fit_generator(\n generator=image_data_generator(df=df_train,\n batch_size=250,\n shuffle=True),\n samples_per_epoch=30000,\n validation_data=validation_set_generator(df=df_valid, batch_size=250),\n nb_val_samples=n_validation_samples,\n nb_epoch=n_epochs,\n callbacks=callbacks_list,\n verbose=1)\n print('Training done')\n\n # serialize model to JSON\n model_json = model.to_json()\n with open(\"model.json\", \"w\") as json_file:\n json_file.write(model_json)\n # serialize weights to HDF5\n model.save_weights(\"model.h5\")\n print(\"Saved model to disk\")\n\nif __name__ == \"__main__\":\n \"\"\"Run the training\"\"\"\n model = nvidia()\n df = make_df('data/driving_log.csv')\n train(model=model, df=df, n_epochs=10)\n" } ]
4
lwnadams/Rock-Paper-Scissors
https://github.com/lwnadams/Rock-Paper-Scissors
8bd76ab8741cb2dc996c7b5706a92e3cb44289cb
36f62f70ad168848a59b7fbfbbed382edd1b0331
ab06ed1cbd91e364ca4255c2ebf3d1941870f2e4
refs/heads/master
2020-12-04T03:26:33.713761
2020-01-03T13:04:30
2020-01-03T13:04:30
231,590,258
0
0
null
null
null
null
null
[ { "alpha_fraction": 0.5692307949066162, "alphanum_fraction": 0.5692307949066162, "avg_line_length": 27.66666603088379, "blob_id": "ddb1b1c4a63fa1c21631c47c325b377b9d56adca", "content_id": "7cc829a02216c8b93c7833439a0652a4527ab3a9", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 1430, "license_type": "no_license", "max_line_length": 77, "num_lines": 48, "path": "/Rockpaperscissors.py", "repo_name": "lwnadams/Rock-Paper-Scissors", "src_encoding": "UTF-8", "text": "import random\r\n\r\n\r\n\r\nuser = input(\"Enter Rock, Paper or Scissors: \")\r\n#valididity =\r\n\r\nwhile user not in [\"Rock\", \"rock\", \"Paper\", \"paper\", \"Scissors\", \"scissors\"]:\r\n user = input(\"Invalid entry, enter Rock, Paper or Scissors: \")\r\n\r\n\r\noptions = [\"Rock\", \"Paper\", \"Scissors\"]\r\ncomputer = random.choice(options)\r\n\r\n\r\nprint(computer)\r\n\r\ndef play_game(computer, win_status):\r\n if win_status == \"won\":\r\n print(\"Congratulations! Computer chooses \" +computer+ \", you win!\")\r\n elif win_status == \"lost\":\r\n print(\"Computer chose \" +computer+ \", you lost!\")\r\n elif win_status == \"draw\":\r\n print(\"Computer chose \"+computer+\", its a draw!\")\r\n\r\nif computer == \"Rock\":\r\n if user == \"rock\" or \"Rock\":\r\n play_game(computer, \"draw\")\r\n elif user == \"Scissors\" or \"scissors\":\r\n play_game(computer, \"lost\")\r\n elif user == \"Paper\" or \"paper\":\r\n play_game(computer, \"won\")\r\n\r\nif computer == \"Paper\":\r\n if user == \"rock\" or \"Rock\":\r\n play_game(computer, \"lost\")\r\n elif user == \"Scissors\" or \"scissors\":\r\n play_game(computer, \"won\")\r\n elif user == \"Paper\" or \"paper\":\r\n play_game(computer, \"draw\")\r\n\r\nif computer == \"Scissors\":\r\n if user == \"rock\" or \"Rock\":\r\n play_game(computer, \"won\")\r\n elif user == \"Scissors\" or \"scissors\":\r\n play_game(computer, \"draw\")\r\n elif user == \"Paper\" or \"paper\":\r\n play_game(computer, \"lost\")\r\n\r\n " } ]
1
sam-hunt/spam-filter
https://github.com/sam-hunt/spam-filter
a9d5b2b30f9b4b00149ad91bba4f231be629a7dd
7e458653bc421ebec1813a595e8896920c6046c4
eeec8bb01b478f8cba1eafac53e80150d921c90c
refs/heads/master
2021-03-30T18:30:50.142901
2016-12-10T00:41:35
2016-12-10T00:41:35
73,359,953
0
0
null
null
null
null
null
[ { "alpha_fraction": 0.564523458480835, "alphanum_fraction": 0.5982671976089478, "avg_line_length": 41, "blob_id": "0449d62de004082fad573dd5fd885d41ef3eb24e", "content_id": "400399b861442ac13c82c358ccd2c8b55f25c8f7", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 2193, "license_type": "no_license", "max_line_length": 152, "num_lines": 51, "path": "/src/spam_filter_pcn.py", "repo_name": "sam-hunt/spam-filter", "src_encoding": "UTF-8", "text": "'''\r\nCreated on 12/09/2015\r\n\r\n@author: Sam Hunt\r\n'''\r\n\r\nimport numpy as np\r\nimport pcn\r\nimport save_perceptron as sp\r\n\r\nemails = np.loadtxt('../res/emails_proc.data', delimiter=\",\")\r\nprint \"../res/emails_proc.data loaded.\"\r\n\r\n#will still compile if number of inputs changes \r\nnInputs = np.shape(emails)[1]\r\n\r\n#Keep track of the overall highest success rate NN \r\nhighest_overall_sr, best_nn = 0, None\r\n#Keep track of the highest success rate NN with 0 false positives\r\nhighest_sr_nofp, best_nn_nofp = 0, None\r\n \r\n#Train 100 NNs with various learning rates, numbers of training iterations, and shuffle subsets\r\nfor learning_rate in [0.1,0.25,0.5,0.75,1]:\r\n for training_iterations in [50,200,500,1000]:\r\n for shuffle_iterations in xrange(0,5):\r\n #shuffle the email data-set records\r\n np.random.shuffle(emails)\r\n \r\n #use the first half for training, second half for testing \r\n trainin = emails[0:300,:nInputs-1]\r\n testin = emails[300:600,:nInputs-1]\r\n traintgt = emails[0:300,nInputs-1:nInputs]\r\n testtgt = emails[300:600,nInputs-1:nInputs]\r\n\r\n #create a new empty perceptron and train it\r\n p = None\r\n p = pcn.pcn(trainin, traintgt)\r\n p.pcntrain(trainin, traintgt, learning_rate, training_iterations)\r\n \r\n #test the trained perceptron and store the resulting confusion matrix and success rate\r\n cm, sr = p.confmat(testin, testtgt)\r\n \r\n print \"LR: \", learning_rate, \" TI: \", training_iterations, \" SI: \", shuffle_iterations, \" SR: \", int(sr*100), \"% FP: \", cm[0][1]\r\n \r\n #commit to disk and log the networks and with the highest success rates\r\n if (sr > highest_overall_sr):\r\n sp.save(p, \"highest_overall_sr_pcn\", cm, sr, learning_rate, training_iterations, 0)\r\n highest_overall_sr, best_nn = sr, p\r\n if ((cm[0][1] == 0) and sr > highest_sr_nofp):\r\n sp.save(p, \"highest_sr_no_false_positives_pcn\", cm, sr, learning_rate, training_iterations, 0)\r\n highest_sr_nofp, best_nn_nofp = sr, p\r\n" }, { "alpha_fraction": 0.655410885810852, "alphanum_fraction": 0.6611065864562988, "avg_line_length": 36.88888931274414, "blob_id": "9c4e112339877a1fd6f2fde5a355c6a7db8a9387", "content_id": "a2516c1ab204890fcaf8a452a126ec19acdd8cdc", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 2458, "license_type": "no_license", "max_line_length": 99, "num_lines": 63, "path": "/src/create_dataset_pcn.py", "repo_name": "sam-hunt/spam-filter", "src_encoding": "UTF-8", "text": "'''\r\nCreated on 13/09/2015\r\n\r\n@author: Sam Hunt\r\n'''\r\nimport glob\r\nimport remove_stopwords\r\n\r\ndef parse_all():\r\n '''Apply the remove_stopswords.parse() function to each email and produce the *.txt.out file'''\r\n for each_email in glob.glob(\"../res/emails/*.txt\"):\r\n remove_stopwords.parse(each_email)\r\n\r\ndef load_strings(path_pattern):\r\n '''Returns the contents of all files matching the path pattern, as a list of strings'''\r\n contents = []\r\n for each_file in glob.glob(path_pattern):\r\n with open(each_file, \"r\") as current_file:\r\n contents.append(current_file.read())\r\n return contents\r\n\r\n# Ensure all emails are parsed with latest version of stopwords.txt\r\nparse_all()\r\n\r\n# Load all of the parsed emails into RAM as arrays of words\r\n# Want to keep these separate so we can append the right class variable later \r\nall_messages = [each_message.split() for each_message in load_strings(\"../res/emails/*-*.txt.out\")]\r\nall_spams = [each_message.split() for each_message in load_strings(\"../res/emails/spm*.txt.out\")]\r\n\r\n# Get a set of all of the unique words in all of the emails \r\n# These will be the inputs to perceptron so we must have an ordered list without duplicates\r\nall_words = set([])\r\nfor each_message in all_messages:\r\n all_words = all_words | set(each_message) \r\nfor each_spam in all_spams:\r\n all_words = all_words | set(each_spam)\r\n\r\n# Order the set of all unique words\r\n# Should sort these as there is no defined order in converting sets to lists, \r\n# And this may differ across python implementations causing issues for pickled\r\n# Perceptron instances?\r\nall_words = list(all_words)\r\nall_words.sort()\r\nprint \"Total number of perceptron inputs: \" + str(len(all_words))\r\n\r\n# Save the ordered list of inputs for the perceptron\r\nwith open('../res/inputs.names',\"w\") as fid:\r\n for each_word in all_words:\r\n fid.write(each_word)\r\n fid.write(\",\")\r\n\r\n# Generate the data set with a record for each message\r\nwith open('../res/emails_proc.data', \"w\") as fid:\r\n for each_message in all_messages:\r\n message_words = set(each_message)\r\n for each_input in all_words:\r\n fid.write(\"1,\" if each_input in message_words else \"0,\")\r\n fid.write(\"0\\n\")\r\n for each_spam in all_spams:\r\n spam_words = set(each_spam)\r\n for each_input in all_words:\r\n fid.write(\"1,\" if each_input in spam_words else \"0,\")\r\n fid.write(\"1\\n\")\r\n " }, { "alpha_fraction": 0.5559748411178589, "alphanum_fraction": 0.5903564095497131, "avg_line_length": 39.155174255371094, "blob_id": "4a32bf080442fb68bd4b69cb61e23cd9503eb51f", "content_id": "f550748fc1428027e07f61ca44231b15dc2be3a9", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 2385, "license_type": "no_license", "max_line_length": 113, "num_lines": 58, "path": "/src/spam_filter_mlp.py", "repo_name": "sam-hunt/spam-filter", "src_encoding": "UTF-8", "text": "'''\r\nCreated on 13/09/2015\r\n\r\n@author: Sam Hunt\r\n'''\r\n\r\nimport numpy as np\r\nimport pylab as pl\r\nimport mlp\r\nimport save_perceptron as sp\r\n\r\nemails = np.loadtxt('../res/emails_proc.data', delimiter=\",\")\r\nprint \"../res/emails_proc.data loaded.\"\r\n\r\n#will still compile if number of inputs changes \r\nnInputs = np.shape(emails)[1]\r\n\r\n#Keep track of the overall highest success rate NN \r\nhighest_overall_sr = 0\r\n#Keep track of the highest success rate NN with 0 false positives\r\nhighest_sr_nofp = 0\r\n\r\n#Train 125 NNs with various learning rates, numbers of training iterations, and shuffle subsets\r\nfor learning_rate in [0.1]:\r\n for hidden_nodes in [1, 2, 3]:\r\n #for iterations in [50, 100, 200, 300]:\r\n for shuffle_iterations in xrange(0,10):\r\n #shuffle the email data-set records\r\n np.random.shuffle(emails)\r\n \r\n #use the first half for training, second half for testing \r\n trainin = emails[0:200,:nInputs-1]\r\n validin = emails[200:400,:nInputs-1]\r\n testin = emails[400:600,:nInputs-1]\r\n traintgt = emails[0:200,nInputs-1:nInputs]\r\n validtgt = emails[200:400,nInputs-1:nInputs]\r\n testtgt = emails[400:600,nInputs-1:nInputs]\r\n\r\n #create a new empty mlp and train it\r\n p = None\r\n p = mlp.mlp(trainin, traintgt, hidden_nodes)\r\n p.earlystopping(trainin, traintgt, validin, validtgt, learning_rate)\r\n #p.mlptrain(trainin, traintgt, learning_rate, iterations)\r\n \r\n #test the trained perceptron and store the resulting confusion matrix and success rate\r\n cm, sr = p.confmat(testin, testtgt)\r\n \r\n print \"LR: \", learning_rate, \" HN: \", hidden_nodes, \" SR: \", int(sr*100), \"% FP: \", cm[0][1]\r\n pl.plot_date(hidden_nodes, sr)\r\n \r\n #commit to disk and log the networks and with the highest success rates\r\n if (sr > highest_overall_sr):\r\n sp.save(p, \"highest_overall_sr_mlp\", cm, sr, learning_rate, 0, hidden_nodes)\r\n highest_overall_sr, best_nn = sr, p\r\n if ((cm[0][1] == 0) and sr > highest_sr_nofp):\r\n sp.save(p, \"highest_sr_no_false_positives_mlp\", cm, sr, learning_rate, 0, hidden_nodes)\r\n highest_sr_nofp, best_nn_nofp = sr, p\r\npl.show()" }, { "alpha_fraction": 0.7826520204544067, "alphanum_fraction": 0.7826520204544067, "avg_line_length": 53.72222137451172, "blob_id": "67369b424d3ea2e7c4092929f54dcefa64a195fb", "content_id": "67cb40d0adcecb08044eab3f4720f727b557e74a", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Markdown", "length_bytes": 1003, "license_type": "no_license", "max_line_length": 137, "num_lines": 18, "path": "/README.md", "repo_name": "sam-hunt/spam-filter", "src_encoding": "UTF-8", "text": "# spam-filter\r\n\r\nA simple spam filter using neural networks.\r\n\r\n![image](/res/Untitled.png?raw=true \"Visualisation of MLP accuracy as number of hidden nodes increases.\")\r\n\r\nThe algorithm uses two supervised machine learning algorithms:\r\n - the single-layer-perceptron (SLP) and\r\n - the multi-layer-perception (MLP)\r\n\r\nto generate black-box neural networks which can accurately classify email texts as spam or not spam, based on a set of sample emails \r\nwhich have already been classified.\r\n\r\nBitsets were generated for each word found, after applying the porter stemmer algorithm to group words with the same meaning. \r\nThese were then used to train multiple networks by partitioning the data into training, testing, and validation sets, \r\nand then ranking network instances based on the number of correct classifications, false positives, and false negatives.\r\n\r\nA more in-depth explanation, as well as sample resultsets, can be found in the [readme](/doc/README.pdf) file created for the assignment.\r\n" }, { "alpha_fraction": 0.6072980165481567, "alphanum_fraction": 0.6185925006866455, "avg_line_length": 37.36666488647461, "blob_id": "2daef8f6611ebae97f08ff0fa2eb5bd3467e04c7", "content_id": "517c70646e5ac8eb80883583c6b20a23924c2ff8", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 1151, "license_type": "no_license", "max_line_length": 99, "num_lines": 30, "path": "/src/remove_stopwords.py", "repo_name": "sam-hunt/spam-filter", "src_encoding": "UTF-8", "text": "\nfrom nltk.stem.porter import PorterStemmer\n\ndef parse(filename):\n\n punctuation= ['.',',','?',':',';','\\n']\n \n #make stopwords a set for faster membership lookup later (using a large stopwords list)\n stopwords = set(open('../res/stopwords.txt', 'r').read().split())\n textwords = open(filename, 'r').read()\n \n #my extra couple of rules to reduce number of unnecessary inputs\n punctuation.extend([\"'\", '\"', '0', '1', '2', '3', '4', '5', '6', '7', '8', '9'])\n textwords = textwords.replace('-', ' ')\n textwords = textwords.replace('!', ' ! ')\n \n textwords = textwords.translate(None,''.join(punctuation))\n textwords = [t.strip() for t in textwords.split()]\n \n #porters algorithm inserted in here\n filteredtext = [parse.ps.stem_word(t.lower()) for t in textwords if t.lower() not in stopwords]\n \n print([t for t in filteredtext])\n fid = open('../res/emails-clean/'+filename+'.out',\"w\")\n for i in filteredtext:\n fid.write(i)\n fid.write(\" \")\n fid.close()\n\n#static class instance so a new instance is not constructed each time parse is called (600 times)\nparse.ps = PorterStemmer()" }, { "alpha_fraction": 0.5653923749923706, "alphanum_fraction": 0.5814889073371887, "avg_line_length": 43.181819915771484, "blob_id": "b125cedcafe5e6cd7fddb6dc1f2d44b692648cb6", "content_id": "452d9780e39087cd3cb6f726f53dc45931978e5f", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 994, "license_type": "no_license", "max_line_length": 108, "num_lines": 22, "path": "/src/save_perceptron.py", "repo_name": "sam-hunt/spam-filter", "src_encoding": "UTF-8", "text": "'''\r\nCreated on 13/09/2015\r\n\r\n@author: Sam Hunt\r\n'''\r\n\r\nimport pickle\r\n\r\ndef save(p_instance, filename, confmat, success_rate, learning_rate, training_iterations=0, hidden_nodes=0):\r\n '''save the serialised perceptron and log its settings/output'''\r\n with open(\"../out/\" + filename + \".p\", 'wb') as handle:\r\n pickle.dump(p_instance, handle)\r\n with open(\"../out/\" + filename + \".txt\", 'w') as handle:\r\n if (hidden_nodes>0):\r\n handle.write(\"hidden nodes: \" + str(hidden_nodes) + '\\n')\r\n if (training_iterations>0):\r\n handle.write(\"training iterations: \" + str(training_iterations) + '\\n')\r\n handle.write(\"\\nlearning rate: \" + str(learning_rate) + '\\n')\r\n handle.write(\"\\nconfusion matrix: \\n\" + str(confmat) + '\\n')\r\n handle.write(\"\\nsuccess rate: \" + str(success_rate) + '\\n')\r\n handle.write(\"\\nfalse positives: \" + str(confmat[0][1]) + '\\n')\r\n handle.write(\"\\nfalse negatives: \" + str(confmat[1][0]) + '\\n')\r\n" } ]
6
Sulam-Group/ml-ista
https://github.com/Sulam-Group/ml-ista
982757cece8edc5b4739021458191e90a0bbf192
b2d397219e432fbc2696ae8ce10fea0a92b46b25
af97a5ae37837c873fc2aca7323c7e23e91f0b22
refs/heads/master
2022-07-25T09:21:21.597677
2018-11-20T14:13:52
2018-11-20T14:13:52
262,859,575
2
1
null
null
null
null
null
[ { "alpha_fraction": 0.7008797526359558, "alphanum_fraction": 0.7653958797454834, "avg_line_length": 112.66666412353516, "blob_id": "211afc75146f9d75c57e20956ce1167d4915eee3", "content_id": "fa6c85542e07e13abf9a91ef3617d19ee8b7204d", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Markdown", "length_bytes": 341, "license_type": "no_license", "max_line_length": 297, "num_lines": 3, "path": "/README.md", "repo_name": "Sulam-Group/ml-ista", "src_encoding": "UTF-8", "text": "## Multi Layer ISTA and FISTA for CNNs ##\n\nDemo for Multi-Layer ISTA and Multi-Layer FISTA algorithms for convolutional neural networks, as described in [J. Sulam, A. Aberdam, A. Beck, M. Elad, (2018). On Multi-Layer Basis Pursuit, Efficient Algorithms and Convolutional Neural Networks. arXiv preprint:1806.00701](https://arxiv.org/abs/1806.00701)\n" }, { "alpha_fraction": 0.5087180137634277, "alphanum_fraction": 0.5838773846626282, "avg_line_length": 53.21041488647461, "blob_id": "0c1119387422bd7bd6ccab3826bd8b12f565258c", "content_id": "faab5c3c289154754d86ad51e4ff60d4f7484569", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 26038, "license_type": "no_license", "max_line_length": 224, "num_lines": 480, "path": "/CIFAR/Models.py", "repo_name": "Sulam-Group/ml-ista", "src_encoding": "UTF-8", "text": "import torch\nimport torch.nn as nn\nfrom torch.autograd import Variable\nimport torch.utils.data as Data\nimport torch.nn.functional as F\nimport torchvision\nimport matplotlib.pyplot as plt\nfrom matplotlib import cm\nimport numpy as np\n\nimport pdb\n\n##################################################\n\n#### MultiLayer ISTA NET ####\n\n##################################################\n\n\nclass ML_ISTA(nn.Module):\n def __init__(self,T):\n super(ML_ISTA, self).__init__()\n \n self.T = T\n \n # Convolutional Filters\n self.W1 = nn.Parameter(torch.randn(32,3,4,4), requires_grad=True); self.strd1 = 2; \n self.W2 = nn.Parameter(torch.randn(64,32,4,4), requires_grad=True); self.strd2 = 2; \n self.W3 = nn.Parameter(torch.randn(128,64,4,4), requires_grad=True); self.strd3 = 2;\n self.W4 = nn.Parameter(torch.randn(256,128,3,3), requires_grad=True); self.strd4 = 1;\n self.W5 = nn.Parameter(torch.randn(512,256,3,3), requires_grad=True); self.strd5 = 1;\n self.W6 = nn.Parameter(torch.randn(512,512,3,3), requires_grad=True); self.strd6 = 1;\n \n self.c1 = nn.Parameter(torch.ones(1,1,1,1), requires_grad=True)\n self.c2 = nn.Parameter(torch.ones(1,1,1,1), requires_grad=True)\n self.c3 = nn.Parameter(torch.ones(1,1,1,1), requires_grad=True)\n \n # Biases / Thresholds\n self.b1 = nn.Parameter(torch.zeros(1,32,1,1), requires_grad=True)\n self.b2 = nn.Parameter(torch.zeros(1,64,1,1), requires_grad=True)\n self.b3 = nn.Parameter(torch.zeros(1,128,1,1), requires_grad=True)\n self.b4 = nn.Parameter(torch.zeros(1,256,1,1), requires_grad=True)\n self.b5 = nn.Parameter(torch.zeros(1,512,1,1), requires_grad=True)\n self.b6 = nn.Parameter(torch.zeros(1,512,1,1), requires_grad=True)\n \n # Classifier\n self.Wclass = nn.Linear(512, 10)\n \n # Initialization\n self.W1.data = .1/np.sqrt(3*16) * self.W1.data\n self.W2.data = .1/np.sqrt(32*16) * self.W2.data\n self.W3.data = .1/np.sqrt(64*16) * self.W3.data\n self.W4.data = 1/np.sqrt(128*9) * self.W4.data\n self.W5.data = 1/np.sqrt(256*9) * self.W5.data\n self.W6.data = 1/np.sqrt(512*9) * self.W6.data\n \n def forward(self, x):\n \n # Encoding\n gamma1 = F.relu(self.c1 * F.conv2d(x,self.W1, stride = self.strd1,padding=1) + self.b1)\n gamma2 = F.relu(self.c2 * F.conv2d(gamma1,self.W2, stride = self.strd2,padding=1) + self.b2)\n gamma3 = F.relu(self.c3 * F.conv2d(gamma2,self.W3, stride = self.strd3,padding=1) + self.b3)\n \n \n for _ in range(self.T):\n \n # backward computation\n gamma2 = F.conv_transpose2d(gamma3,self.W3, stride=self.strd3,padding = 1)\n gamma1 = F.conv_transpose2d(gamma2,self.W2, stride=self.strd2,padding = 1)\n \n # forward computation\n gamma1 = F.relu( (gamma1 - self.c1 * F.conv2d( F.conv_transpose2d(gamma1,self.W1, stride = self.strd1,padding=1) - x ,self.W1, stride = self.strd1,padding=1)) + self.b1)\n gamma2 = F.relu( (gamma2 - self.c2 * F.conv2d( F.conv_transpose2d(gamma2,self.W2, stride = self.strd2,padding=1) - gamma1, self.W2, stride = self.strd2,padding=1)) + self.b2) \n gamma3 = F.relu( (gamma3 - self.c3 * F.conv2d( F.conv_transpose2d(gamma3,self.W3, stride = self.strd3,padding=1) - gamma2, self.W3, stride = self.strd3,padding=1)) + self.b3) \n \n \n gamma4 = F.relu(F.conv2d(gamma3,self.W4, stride = self.strd4,padding=1) + self.b4)\n gamma5 = F.max_pool2d(F.relu(F.conv2d(gamma4,self.W5, stride = self.strd5,padding=1) + self.b5), kernel_size = 2, stride = 2)\n gamma6 = F.max_pool2d(F.relu(F.conv2d(gamma5,self.W6, stride = self.strd6,padding=1) + self.b6), kernel_size = 2, stride = 2)\n \n # classifier\n gammaGoal = gamma6\n gamma = gammaGoal.view(gammaGoal.shape[0],gammaGoal.shape[1]*gammaGoal.shape[2]*gammaGoal.shape[3])\n out = self.Wclass(gamma)\n out = F.log_softmax(out,dim = 1)\n \n return out\n \n\n\n \n##################################################\n\n#### MultiLayer FISTA NET ####\n\n##################################################\n\n\n\nclass ML_FISTA(nn.Module):\n def __init__(self,T):\n super(ML_FISTA, self).__init__()\n \n self.T = T\n \n # Convolutional Filters\n self.W1 = nn.Parameter(torch.randn(32,3,4,4), requires_grad=True); self.strd1 = 2; \n self.W2 = nn.Parameter(torch.randn(64,32,4,4), requires_grad=True); self.strd2 = 2; \n self.W3 = nn.Parameter(torch.randn(128,64,4,4), requires_grad=True); self.strd3 = 2;\n self.W4 = nn.Parameter(torch.randn(256,128,3,3), requires_grad=True); self.strd4 = 1;\n self.W5 = nn.Parameter(torch.randn(512,256,3,3), requires_grad=True); self.strd5 = 1;\n self.W6 = nn.Parameter(torch.randn(512,512,3,3), requires_grad=True); self.strd6 = 1;\n \n self.c1 = nn.Parameter(torch.ones(1,1,1,1), requires_grad=True)\n self.c2 = nn.Parameter(torch.ones(1,1,1,1), requires_grad=True)\n self.c3 = nn.Parameter(torch.ones(1,1,1,1), requires_grad=True)\n \n # Biases / Thresholds\n self.b1 = nn.Parameter(torch.zeros(1,32,1,1), requires_grad=True)\n self.b2 = nn.Parameter(torch.zeros(1,64,1,1), requires_grad=True)\n self.b3 = nn.Parameter(torch.zeros(1,128,1,1), requires_grad=True)\n self.b4 = nn.Parameter(torch.zeros(1,256,1,1), requires_grad=True)\n self.b5 = nn.Parameter(torch.zeros(1,512,1,1), requires_grad=True)\n self.b6 = nn.Parameter(torch.zeros(1,512,1,1), requires_grad=True)\n \n # Classifier\n self.Wclass = nn.Linear(512, 10)\n \n # Initialization\n self.W1.data = .1/np.sqrt(3*16) * self.W1.data\n self.W2.data = .1/np.sqrt(32*16) * self.W2.data\n self.W3.data = .1/np.sqrt(64*16) * self.W3.data\n self.W4.data = 1/np.sqrt(128*9) * self.W4.data\n self.W5.data = 1/np.sqrt(256*9) * self.W5.data\n self.W6.data = 1/np.sqrt(512*9) * self.W6.data\n \n def forward(self, x):\n \n t = 1\n t_prv = t\n\n # Encoding\n gamma1 = F.relu(self.c1 * F.conv2d(x,self.W1, stride = self.strd1,padding=1) + self.b1)\n gamma2 = F.relu(self.c2 * F.conv2d(gamma1,self.W2, stride = self.strd2,padding=1) + self.b2)\n gamma3 = F.relu(self.c3 * F.conv2d(gamma2,self.W3, stride = self.strd3,padding=1) + self.b3)\n gamma3_prv = gamma3\n \n for _ in range(self.T):\n \n t_prv = t\n t = float((1+np.sqrt(1+4*t_prv**2))/2) \n \n Z = gamma3 + (t_prv-1)/t * (gamma3 - gamma3_prv)\n gamma3_prv = gamma3\n\n # backward computation\n gamma2 = F.conv_transpose2d(Z,self.W3, stride=self.strd3,padding = 1)\n gamma1 = F.conv_transpose2d(gamma2,self.W2, stride=self.strd2,padding = 1)\n \n # forward computation\n gamma1 = F.relu( (gamma1 - self.c1 * F.conv2d( F.conv_transpose2d(gamma1,self.W1, stride = self.strd1,padding=1) - x ,self.W1, stride = self.strd1,padding=1)) + self.b1)\n gamma2 = F.relu( (gamma2 - self.c2 * F.conv2d( F.conv_transpose2d(gamma2,self.W2, stride = self.strd2,padding=1) - gamma1, self.W2, stride = self.strd2,padding=1)) + self.b2) \n gamma3 = F.relu( (Z - self.c3 * F.conv2d( F.conv_transpose2d(Z,self.W3, stride = self.strd3,padding=1) - gamma2, self.W3, stride = self.strd3,padding=1)) + self.b3) \n \n \n gamma4 = F.relu(F.conv2d(gamma3,self.W4, stride = self.strd4,padding=1) + self.b4)\n gamma5 = F.max_pool2d(F.relu(F.conv2d(gamma4,self.W5, stride = self.strd5,padding=1) + self.b5), kernel_size = 2, stride = 2)\n gamma6 = F.max_pool2d(F.relu(F.conv2d(gamma5,self.W6, stride = self.strd6,padding=1) + self.b6), kernel_size = 2, stride = 2)\n \n # classifier\n gammaGoal = gamma6\n gamma = gammaGoal.view(gammaGoal.shape[0],gammaGoal.shape[1]*gammaGoal.shape[2]*gammaGoal.shape[3])\n out = self.Wclass(gamma)\n out = F.log_softmax(out,dim = 1)\n \n return out\n \n \n##################################################\n\n#### ML-LISTA NET ####\n\n##################################################\n\n\nclass ML_LISTA_NET(nn.Module):\n def __init__(self,T):\n super(ML_LISTA_NET, self).__init__()\n \n self.T = T \n \n # Convolutional Filters\n self.W1 = nn.Parameter(torch.randn(32,3,4,4), requires_grad=True); self.strd1 = 2; \n self.W2 = nn.Parameter(torch.randn(64,32,4,4), requires_grad=True); self.strd2 = 2; \n self.W3 = nn.Parameter(torch.randn(128,64,4,4), requires_grad=True); self.strd3 = 2;\n self.W4 = nn.Parameter(torch.randn(256,128,3,3), requires_grad=True); self.strd4 = 1;\n self.W5 = nn.Parameter(torch.randn(512,256,3,3), requires_grad=True); self.strd5 = 1;\n self.W6 = nn.Parameter(torch.randn(512,512,3,3), requires_grad=True); self.strd6 = 1;\n \n self.B1 = nn.Parameter(torch.randn(32,3,4,4), requires_grad=True); \n self.B2 = nn.Parameter(torch.randn(64,32,4,4), requires_grad=True); \n self.B3 = nn.Parameter(torch.randn(128,64,4,4), requires_grad=True); \n \n # Biases / Thresholds\n self.b1 = nn.Parameter(torch.zeros(1,32,1,1), requires_grad=True)\n self.b2 = nn.Parameter(torch.zeros(1,64,1,1), requires_grad=True)\n self.b3 = nn.Parameter(torch.zeros(1,128,1,1), requires_grad=True)\n self.b4 = nn.Parameter(torch.zeros(1,256,1,1), requires_grad=True)\n self.b5 = nn.Parameter(torch.zeros(1,512,1,1), requires_grad=True)\n self.b6 = nn.Parameter(torch.zeros(1,512,1,1), requires_grad=True)\n \n # Classifier\n self.Wclass = nn.Linear(512, 10)\n \n # Initialization\n self.W1.data = .1/np.sqrt(3*16) * self.W1.data\n self.W2.data = .1/np.sqrt(32*16) * self.W2.data\n self.W3.data = .1/np.sqrt(64*16) * self.W3.data\n self.W4.data = 1/np.sqrt(128*9) * self.W4.data\n self.W5.data = 1/np.sqrt(256*9) * self.W5.data\n self.W6.data = 1/np.sqrt(512*9) * self.W6.data\n \n self.B1.data = .1/np.sqrt(3*16) * self.B1.data\n self.B2.data = .1/np.sqrt(32*16) * self.B2.data\n self.B3.data = .1/np.sqrt(64*16) * self.B3.data\n \n \n \n def forward(self, x):\n \n # Encoding\n gamma1 = F.relu(F.conv2d(x,self.B1, stride = self.strd1,padding=1) + self.b1) # first estimation\n gamma2 = F.relu(F.conv2d(gamma1,self.B2, stride = self.strd2,padding=1) + self.b2) \n gamma3 = F.relu(F.conv2d(gamma2,self.B3, stride = self.strd3,padding=1) + self.b3) \n \n for _ in range(self.T): \n \n gamma2 = F.conv_transpose2d(gamma3,self.B3, stride=self.strd3, padding = 1)\n gamma1 = F.conv_transpose2d(gamma2,self.B2, stride=self.strd2, padding = 1) \n \n # forward computation\n #pdb.set_trace()\n gamma1 = F.relu( gamma1 - F.conv2d(F.conv_transpose2d(gamma1,self.W1, stride = self.strd1,padding=1),self.W1, stride = self.strd1,padding=1) + F.conv2d( x ,self.B1, stride = self.strd1,padding=1) + self.b1 )\n gamma2 = F.relu( gamma2 - F.conv2d(F.conv_transpose2d(gamma2,self.W2, stride = self.strd2,padding=1),self.W2, stride = self.strd2,padding=1) + F.conv2d( gamma1 ,self.B2, stride = self.strd2,padding=1) + self.b2 )\n gamma3 = F.relu( gamma3 - F.conv2d(F.conv_transpose2d(gamma3,self.W3, stride = self.strd3,padding=1),self.W3, stride = self.strd3,padding=1) + F.conv2d( gamma2 ,self.B3, stride = self.strd3,padding=1) + self.b3 )\n \n \n gamma4 = F.relu(F.conv2d(gamma3,self.W4, stride = self.strd4,padding=1) + self.b4)\n gamma5 = F.max_pool2d(F.relu(F.conv2d(gamma4,self.W5, stride = self.strd5,padding=1) + self.b5), kernel_size = 2, stride = 2)\n gamma6 = F.max_pool2d(F.relu(F.conv2d(gamma5,self.W6, stride = self.strd6,padding=1) + self.b6), kernel_size = 2, stride = 2)\n \n # classifier\n gammaGoal = gamma6\n gamma = gammaGoal.view(gammaGoal.shape[0],gammaGoal.shape[1]*gammaGoal.shape[2]*gammaGoal.shape[3])\n out = self.Wclass(gamma)\n out = F.log_softmax(out,dim = 1)\n \n \n return out\n \n \n\n\n##################################################\n\n#### Layered BP NET ####\n\n##################################################\n\n\n\n\nclass LBP_NET(nn.Module):\n def __init__(self,T):\n super(LBP_NET, self).__init__()\n \n self.T = T\n \n # Convolutional Filters\n self.W1 = nn.Parameter(torch.randn(32,3,4,4), requires_grad=True); self.strd1 = 2; \n self.W2 = nn.Parameter(torch.randn(64,32,4,4), requires_grad=True); self.strd2 = 2; \n self.W3 = nn.Parameter(torch.randn(128,64,4,4), requires_grad=True); self.strd3 = 2;\n self.W4 = nn.Parameter(torch.randn(256,128,3,3), requires_grad=True); self.strd4 = 1;\n self.W5 = nn.Parameter(torch.randn(512,256,3,3), requires_grad=True); self.strd5 = 1;\n self.W6 = nn.Parameter(torch.randn(512,512,3,3), requires_grad=True); self.strd6 = 1;\n \n self.c1 = nn.Parameter(torch.ones(1,1,1,1), requires_grad=True)\n self.c2 = nn.Parameter(torch.ones(1,1,1,1), requires_grad=True)\n self.c3 = nn.Parameter(torch.ones(1,1,1,1), requires_grad=True)\n \n # Biases / Thresholds\n self.b1 = nn.Parameter(torch.zeros(1,32,1,1), requires_grad=True)\n self.b2 = nn.Parameter(torch.zeros(1,64,1,1), requires_grad=True)\n self.b3 = nn.Parameter(torch.zeros(1,128,1,1), requires_grad=True)\n self.b4 = nn.Parameter(torch.zeros(1,256,1,1), requires_grad=True)\n self.b5 = nn.Parameter(torch.zeros(1,512,1,1), requires_grad=True)\n self.b6 = nn.Parameter(torch.zeros(1,512,1,1), requires_grad=True)\n \n # Classifier\n self.Wclass = nn.Linear(512, 10)\n \n # Initialization\n self.W1.data = .1/np.sqrt(3*16) * self.W1.data\n self.W2.data = .1/np.sqrt(32*16) * self.W2.data\n self.W3.data = .1/np.sqrt(64*16) * self.W3.data\n self.W4.data = 1/np.sqrt(128*9) * self.W4.data\n self.W5.data = 1/np.sqrt(256*9) * self.W5.data\n self.W6.data = 1/np.sqrt(512*9) * self.W6.data\n \n def forward(self, x):\n \n # Encoding\n if self.T==0:\n # just a CNN\n gamma1 = F.relu(self.c1 * F.conv2d(x,self.W1, stride = self.strd1,padding=1) + self.b1)\n gamma2 = F.relu(self.c2 * F.conv2d(gamma1,self.W2, stride = self.strd2,padding=1) + self.b2)\n gamma3 = F.relu(self.c3 * F.conv2d(gamma2,self.W3, stride = self.strd3,padding=1) + self.b3)\n else:\n gamma1 = F.relu(self.c1 * F.conv2d(x,self.W1, stride = self.strd1,padding=1) + self.b1)\n for _ in range(self.T):\n gamma1 = F.relu( (gamma1 - self.c1 * F.conv2d( F.conv_transpose2d(gamma1,self.W1, stride = self.strd1,padding=1) - x ,self.W1, stride = self.strd1,padding=1)) + self.b1)\n \n gamma2 = F.relu(self.c2 * F.conv2d(gamma1,self.W2, stride = self.strd2,padding=1) + self.b2)\n for _ in range(self.T):\n gamma2 = F.relu( (gamma2 - self.c2 * F.conv2d( F.conv_transpose2d(gamma2,self.W2, stride = self.strd2,padding=1) - gamma1, self.W2, stride = self.strd2,padding=1)) + self.b2) \n \n gamma3 = F.relu(self.c3 * F.conv2d(gamma2,self.W3, stride = self.strd3,padding=1) + self.b3)\n for _ in range(self.T):\n gamma3 = F.relu( (gamma3 - self.c3 * F.conv2d( F.conv_transpose2d(gamma3,self.W3, stride = self.strd3,padding=1) - gamma2, self.W3, stride = self.strd3,padding=1)) + self.b3) \n \n \n \n gamma4 = F.relu(F.conv2d(gamma3,self.W4, stride = self.strd4,padding=1) + self.b4)\n gamma5 = F.max_pool2d(F.relu(F.conv2d(gamma4,self.W5, stride = self.strd5,padding=1) + self.b5), kernel_size = 2, stride = 2)\n gamma6 = F.max_pool2d(F.relu(F.conv2d(gamma5,self.W6, stride = self.strd6,padding=1) + self.b6), kernel_size = 2, stride = 2)\n \n # classifier\n gammaGoal = gamma6\n gamma = gammaGoal.view(gammaGoal.shape[0],gammaGoal.shape[1]*gammaGoal.shape[2]*gammaGoal.shape[3])\n out = self.Wclass(gamma)\n out = F.log_softmax(out,dim = 1)\n \n return out\n\n\n\n\n##################################################\n\n#### All Free NET ####\n\n##################################################\n\n\nclass All_Free(nn.Module):\n def __init__(self):\n super(All_Free, self).__init__()\n m1 = 32\n m2 = 64\n m3 = 128\n \n # Convolutional Filters\n self.W1_1 = nn.Parameter(.1 /np.sqrt(3*16) * torch.randn(32,3,4,4), requires_grad=True)\n self.W1_2 = nn.Parameter(.1 /np.sqrt(3*16) * torch.randn(32,3,4,4), requires_grad=True)\n self.W1_3 = nn.Parameter(.1 /np.sqrt(3*16) * torch.randn(32,3,4,4), requires_grad=True)\n self.W1_4 = nn.Parameter(.1 /np.sqrt(3*16) * torch.randn(32,3,4,4), requires_grad=True)\n self.W1_5 = nn.Parameter(.1 /np.sqrt(3*16) * torch.randn(32,3,4,4), requires_grad=True)\n self.W1_6 = nn.Parameter(.1 /np.sqrt(3*16) * torch.randn(32,3,4,4), requires_grad=True)\n self.W1_7 = nn.Parameter(.1 /np.sqrt(3*16) * torch.randn(32,3,4,4), requires_grad=True)\n self.strd1 = 2;\n \n self.W2_1 = nn.Parameter(.1 /np.sqrt(m1*16) * torch.randn(64,32,4,4), requires_grad=True)\n self.W2_2 = nn.Parameter(.1 /np.sqrt(m1*16) * torch.randn(64,32,4,4), requires_grad=True)\n self.W2_3 = nn.Parameter(.1 /np.sqrt(m1*16) * torch.randn(64,32,4,4), requires_grad=True)\n self.W2_4 = nn.Parameter(.1 /np.sqrt(m1*16) * torch.randn(64,32,4,4), requires_grad=True)\n self.W2_5 = nn.Parameter(.1 /np.sqrt(m1*16) * torch.randn(64,32,4,4), requires_grad=True)\n self.W2_6 = nn.Parameter(.1 /np.sqrt(m1*16) * torch.randn(64,32,4,4), requires_grad=True)\n self.W2_7 = nn.Parameter(.1 /np.sqrt(m1*16) * torch.randn(64,32,4,4), requires_grad=True) \n self.strd2 = 2;\n \n self.W3_1 = nn.Parameter(.1 /np.sqrt(m2*16) * torch.randn(128,64,4,4), requires_grad=True)\n self.W3_2 = nn.Parameter(.1 /np.sqrt(m2*16) * torch.randn(128,64,4,4), requires_grad=True)\n self.W3_3 = nn.Parameter(.1 /np.sqrt(m2*16) * torch.randn(128,64,4,4), requires_grad=True)\n self.W3_4 = nn.Parameter(.1 /np.sqrt(m2*16) * torch.randn(128,64,4,4), requires_grad=True)\n self.W3_5 = nn.Parameter(.1 /np.sqrt(m2*16) * torch.randn(128,64,4,4), requires_grad=True)\n self.W3_6 = nn.Parameter(.1 /np.sqrt(m2*16) * torch.randn(128,64,4,4), requires_grad=True)\n self.W3_7 = nn.Parameter(.1 /np.sqrt(m2*16) * torch.randn(128,64,4,4), requires_grad=True)\n self.strd3 = 2\n \n # Biases / Thresholds\n self.b1_1 = nn.Parameter(torch.zeros(1,m1,1,1), requires_grad=True)\n self.b1_2 = nn.Parameter(torch.zeros(1,m1,1,1), requires_grad=True)\n self.b1_3 = nn.Parameter(torch.zeros(1,m1,1,1), requires_grad=True)\n self.b1_4 = nn.Parameter(torch.zeros(1,m1,1,1), requires_grad=True)\n self.b1_5 = nn.Parameter(torch.zeros(1,m1,1,1), requires_grad=True)\n self.b1_6 = nn.Parameter(torch.zeros(1,m1,1,1), requires_grad=True)\n self.b1_7 = nn.Parameter(torch.zeros(1,m1,1,1), requires_grad=True)\n \n self.b2_1 = nn.Parameter(torch.zeros(1,m2,1,1), requires_grad=True)\n self.b2_2 = nn.Parameter(torch.zeros(1,m2,1,1), requires_grad=True)\n self.b2_3 = nn.Parameter(torch.zeros(1,m2,1,1), requires_grad=True)\n self.b2_4 = nn.Parameter(torch.zeros(1,m2,1,1), requires_grad=True)\n self.b2_5 = nn.Parameter(torch.zeros(1,m2,1,1), requires_grad=True)\n self.b2_6 = nn.Parameter(torch.zeros(1,m2,1,1), requires_grad=True)\n self.b2_7 = nn.Parameter(torch.zeros(1,m2,1,1), requires_grad=True)\n \n self.b3_1 = nn.Parameter(torch.zeros(1,m3,1,1), requires_grad=True)\n self.b3_2 = nn.Parameter(torch.zeros(1,m3,1,1), requires_grad=True)\n self.b3_3 = nn.Parameter(torch.zeros(1,m3,1,1), requires_grad=True)\n self.b3_4 = nn.Parameter(torch.zeros(1,m3,1,1), requires_grad=True)\n self.b3_5 = nn.Parameter(torch.zeros(1,m3,1,1), requires_grad=True)\n self.b3_6 = nn.Parameter(torch.zeros(1,m3,1,1), requires_grad=True)\n self.b3_7 = nn.Parameter(torch.zeros(1,m3,1,1), requires_grad=True)\n \n self.W4 = nn.Parameter(torch.randn(256,128,3,3), requires_grad=True); self.strd4 = 1;\n self.W5 = nn.Parameter(torch.randn(512,256,3,3), requires_grad=True); self.strd5 = 1;\n self.W6 = nn.Parameter(torch.randn(512,512,3,3), requires_grad=True); self.strd6 = 1;\n \n self.W4.data = 1/np.sqrt(128*9) * self.W4.data\n self.W5.data = 1/np.sqrt(256*9) * self.W5.data\n self.W6.data = 1/np.sqrt(512*9) * self.W6.data\n \n self.b4 = nn.Parameter(torch.zeros(1,256,1,1), requires_grad=True)\n self.b5 = nn.Parameter(torch.zeros(1,512,1,1), requires_grad=True)\n self.b6 = nn.Parameter(torch.zeros(1,512,1,1), requires_grad=True)\n \n \n # Classifier\n self.Wclass = nn.Linear(512, 10)\n \n \n def forward(self, x):\n \n # iter 0\n gamma1 = F.relu(F.conv2d(x,self.W1_1, stride = self.strd1,padding=1) + self.b1_1) # first estimation\n gamma2 = F.relu(F.conv2d(gamma1,self.W2_1, stride = self.strd2,padding=1) + self.b2_1) \n gamma3 = F.relu(F.conv2d(gamma2,self.W3_1, stride = self.strd3,padding=1) + self.b3_1) \n \n # iter 1\n gamma1 = F.relu( (gamma1 - F.conv2d( F.conv_transpose2d(gamma1,self.W1_1, stride = self.strd1,padding=1) - x ,self.W1_2, stride = self.strd1,padding=1)) + self.b1_2)\n gamma2 = F.relu( (gamma2 - F.conv2d( F.conv_transpose2d(gamma2,self.W2_1, stride = self.strd2,padding=1) - gamma1, self.W2_2, stride = self.strd2,padding=1)) + self.b2_2) \n gamma3 = F.relu( (gamma3 - F.conv2d( F.conv_transpose2d(gamma3,self.W3_1, stride = self.strd3,padding=1) - gamma2, self.W3_2, stride = self.strd3,padding=1)) + self.b3_2) \n\n # iter 2\n gamma1 = F.relu( (gamma1 - F.conv2d( F.conv_transpose2d(gamma1,self.W1_2, stride = self.strd1,padding=1) - x ,self.W1_3, stride = self.strd1,padding=1)) + self.b1_3)\n gamma2 = F.relu( (gamma2 - F.conv2d( F.conv_transpose2d(gamma2,self.W2_2, stride = self.strd2,padding=1) - gamma1, self.W2_3, stride = self.strd2,padding=1)) + self.b2_3)\n gamma3 = F.relu( (gamma3 - F.conv2d( F.conv_transpose2d(gamma3,self.W3_2, stride = self.strd3,padding=1) - gamma2, self.W3_3, stride = self.strd3,padding=1)) + self.b3_3)\n\n # iter 3\n gamma1 = F.relu( (gamma1 - F.conv2d( F.conv_transpose2d(gamma1,self.W1_3, stride = self.strd1,padding=1) - x ,self.W1_4, stride = self.strd1,padding=1)) + self.b1_4)\n gamma2 = F.relu( (gamma2 - F.conv2d( F.conv_transpose2d(gamma2,self.W2_3, stride = self.strd2,padding=1) - gamma1, self.W2_4, stride = self.strd2,padding=1)) + self.b2_4) \n gamma3 = F.relu( (gamma3 - F.conv2d( F.conv_transpose2d(gamma3,self.W3_3, stride = self.strd3,padding=1) - gamma2, self.W3_4, stride = self.strd3,padding=1)) + self.b3_4) \n\n # iter 4\n gamma1 = F.relu( (gamma1 - F.conv2d( F.conv_transpose2d(gamma1,self.W1_4, stride = self.strd1,padding=1) - x ,self.W1_5, stride = self.strd1,padding=1)) + self.b1_5)\n gamma2 = F.relu( (gamma2 - F.conv2d( F.conv_transpose2d(gamma2,self.W2_4, stride = self.strd2,padding=1) - gamma1, self.W2_5, stride = self.strd2,padding=1)) + self.b2_5) \n gamma3 = F.relu( (gamma3 - F.conv2d( F.conv_transpose2d(gamma3,self.W3_4, stride = self.strd3,padding=1) - gamma2, self.W3_5, stride = self.strd3,padding=1)) + self.b3_5) \n\n # iter 5\n gamma1 = F.relu( (gamma1 - F.conv2d( F.conv_transpose2d(gamma1,self.W1_5, stride = self.strd1,padding=1) - x ,self.W1_6, stride = self.strd1,padding=1)) + self.b1_6)\n gamma2 = F.relu( (gamma2 - F.conv2d( F.conv_transpose2d(gamma2,self.W2_5, stride = self.strd2,padding=1) - gamma1, self.W2_6, stride = self.strd2,padding=1)) + self.b2_6) \n gamma3 = F.relu( (gamma3 - F.conv2d( F.conv_transpose2d(gamma3,self.W3_5, stride = self.strd3,padding=1) - gamma2, self.W3_6, stride = self.strd3,padding=1)) + self.b3_6) \n\n # iter 6\n gamma1 = F.relu( (gamma1 - F.conv2d( F.conv_transpose2d(gamma1,self.W1_6, stride = self.strd1,padding=1) - x ,self.W1_7, stride = self.strd1,padding=1)) + self.b1_7)\n gamma2 = F.relu( (gamma2 - F.conv2d( F.conv_transpose2d(gamma2,self.W2_6, stride = self.strd2,padding=1) - gamma1, self.W2_7, stride = self.strd2,padding=1)) + self.b2_7) \n gamma3 = F.relu( (gamma3 - F.conv2d( F.conv_transpose2d(gamma3,self.W3_6, stride = self.strd3,padding=1) - gamma2, self.W3_7, stride = self.strd3,padding=1)) + self.b3_7) \n \n \n gamma4 = F.relu(F.conv2d(gamma3,self.W4, stride = self.strd4,padding=1) + self.b4)\n gamma5 = F.max_pool2d(F.relu(F.conv2d(gamma4,self.W5, stride = self.strd5,padding=1) + self.b5), kernel_size = 2, stride = 2)\n gamma6 = F.max_pool2d(F.relu(F.conv2d(gamma5,self.W6, stride = self.strd6,padding=1) + self.b6), kernel_size = 2, stride = 2)\n \n # classifier\n gammaGoal = gamma6\n gamma = gammaGoal.view(gammaGoal.shape[0],gammaGoal.shape[1]*gammaGoal.shape[2]*gammaGoal.shape[3])\n out = self.Wclass(gamma)\n out = F.log_softmax(out,dim = 1) \n \n return out\n \n " } ]
2
Sangheetha/setupPractice
https://github.com/Sangheetha/setupPractice
5fb1a0d74e04b3728863fe156164955680cbb8d9
ef240ef2d60878ae69a249f7e02581ab07aacccb
99d3faeb729db0d515cf5dee7e3917f58d8ee252
refs/heads/master
2020-09-27T22:04:51.506574
2016-09-08T04:31:00
2016-09-08T04:31:00
67,361,597
0
0
null
null
null
null
null
[ { "alpha_fraction": 0.7657142877578735, "alphanum_fraction": 0.7828571200370789, "avg_line_length": 42.75, "blob_id": "64044214d94e85c3311498c57e3144572ad8b839", "content_id": "bf787b535f3ecc2fa6e85d40eaa420ffa278cdc4", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 175, "license_type": "no_license", "max_line_length": 86, "num_lines": 4, "path": "/testFirebase/tryFirebase.py", "repo_name": "Sangheetha/setupPractice", "src_encoding": "UTF-8", "text": "from firebase import firebase\nfirebase = firebase.FirebaseApplication('https://testapp-1ff42.firebaseio.com/', None)\nresult = firebase.delete('/Users', 'mnaidu')\nprint result\n" }, { "alpha_fraction": 0.488095223903656, "alphanum_fraction": 0.7023809552192688, "avg_line_length": 14.272727012634277, "blob_id": "66df4b7281358f657423f81cda10a7a1254897d2", "content_id": "61944cb4ad3c30575ffa939286d495c025ec3060", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Text", "length_bytes": 168, "license_type": "no_license", "max_line_length": 20, "num_lines": 11, "path": "/requirements.txt", "repo_name": "Sangheetha/setupPractice", "src_encoding": "UTF-8", "text": "click==6.6\ndecorator==4.0.10\nFlask==0.11.1\nitsdangerous==0.24\nJinja2==2.8\nMarkupSafe==0.23\nply==3.9\npython-firebase==1.2\nrequests==2.11.1\nsix==1.10.0\nWerkzeug==0.11.11\n" } ]
2
NanoSoft774849/Nano
https://github.com/NanoSoft774849/Nano
4f9d7b9d538aa1f6920a2f74d6c1b14c8406cc90
36da05083574440787551f985a57e1cb093bfef9
1cca581eaa3cf822fdb6004c52dd7a7b7e19d725
refs/heads/main
2023-06-17T03:15:05.797296
2021-07-07T14:56:25
2021-07-07T14:56:25
383,465,746
1
0
null
null
null
null
null
[ { "alpha_fraction": 0.8094038367271423, "alphanum_fraction": 0.8144416213035583, "avg_line_length": 118, "blob_id": "c80acbdfd4d8e5881526ccff5ff3c2dec8e71e58", "content_id": "f4694fcf11ecadaafca5dfeac82f3c6bc628dad5", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Markdown", "length_bytes": 1195, "license_type": "no_license", "max_line_length": 417, "num_lines": 10, "path": "/README.md", "repo_name": "NanoSoft774849/Nano", "src_encoding": "UTF-8", "text": "# Nano\n\n#I.\tAbstract:\nIn this Report I will talk about the problem of Railway sleepers shift and how we can solve this problem using Image Processing. Railway contains series of sleepers that connected together and these sleepers with time they may shift a little from their original position this problem will cause with time dangerous effects for the railway which leads to distortion in railway.\n\n#II.\tProblem definition\nThe problem in Railway the sleepers creep with time so this creeping process we can’t notice because it’s slow and the creep extension is too small in mm this slight shift may extend with time and cause a harm distortion in the Railway so we must avoid and monitor this slight difference in some way.\n#III.\tProblem Solution using Image Processing\n\nInstall devices every 500 meter and each device is embedded with Camera Module, 4G module, ARM processor and power management Module for low power consumption. The device is placed 10 meters far away from the Railway in tightly fixed location to ensure no slight motion in the device otherwise the results may not be accurate. The device should be calibrated and aligned directly toward the target region of interest. \n" }, { "alpha_fraction": 0.5815859436988831, "alphanum_fraction": 0.5979794263839722, "avg_line_length": 22.390697479248047, "blob_id": "b9112ab7e13cb2f33001574be9e8f2b744af69c7", "content_id": "50bf85824dc4855a62cc68e240ccea86818ce0a6", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "C++", "length_bytes": 5246, "license_type": "no_license", "max_line_length": 116, "num_lines": 215, "path": "/main.cpp", "repo_name": "NanoSoft774849/Nano", "src_encoding": "UTF-8", "text": "// openCvx.cpp : Defines the entry point for the console application.\r\n//\r\n\r\n\r\n#include<conio.h>\r\n#include<opencv2\\highgui\\highgui.hpp>\r\n#include<opencv2/imgproc/imgproc.hpp>\r\n\r\n#include<iostream>\r\nusing namespace cv;\r\nusing namespace std;\r\n\r\n\r\n\r\nbool use_mask;\r\nMat img; Mat templ; Mat mask; Mat result;\r\nconst char* image_window = \"Source Image\";\r\nconst char* result_window = \"Result window\";\r\nint match_method;\r\nint max_Trackbar = 5;\r\nvoid MatchingMethod(int, void*);\r\nPoint ns_find_template(const char*, const char*);\r\n\r\ndouble ns_find_distance(Point p1, Point p2);\r\n\r\ndouble xdiff(Point p1, Point p2);\r\n\r\ndouble ydiff(Point p1, Point p2);\r\n\r\nstruct ns_mat\r\n{\r\n\tMat image;\r\n\tPoint maxLoc;\r\n};\r\n\r\n\r\n#define ns_max(x,y) ( x>=y?x:y)\r\n#define ns_min(x,y) (x<=y ?x:y)\r\n\r\nvoid jsonfmt(const char * key, int value, bool comma)\r\n{\r\n\t///const char* com=\r\n\tcout << \"\\\"\" << key << \"\\\":\" << value << (comma ? \",\" : \" \") << endl;\r\n}\r\n\r\nvoid jsonfmt(const char * key, double value, bool comma)\r\n{\r\n\t///const char* com=\r\n\tcout << \"\\\"\" << key << \"\\\":\" << value << (comma ? \",\" : \" \") << endl;\r\n}\r\nvoid jsonfmt(const char * key, const char * value, bool comma)\r\n{\r\n\t///const char* com=\r\n\tcout << \"\\\"\" << key << \"\\\":\" << \"\\\"\" << value << \"\\\"\" << (comma ? \",\" : \" \") << endl;\r\n}\r\n\r\nint main(int argc, char**argv)\r\n{\r\n\t/**Arg descriptions**/\r\n\t/*\r\n\r\n\targv[0]--appname\r\n\targv[1] -ref image\r\n\targv[2] -template image\r\n\targv[3]--image that you want to match\r\n\r\n\r\n\t*/\r\n\tif (argc < 4)\r\n\t{\r\n\t\treturn -1;\r\n\t}\r\n\r\n\ttry\r\n\t{\r\n\t\tconst char * ref_path = argv[1];\r\n\t\tconst char * temp_path = argv[2];\r\n\t\tconst char * img_path = argv[3];//image\r\n\t\t//template \r\n\r\n\t\tPoint ref_point = ns_find_template(ref_path, temp_path);\r\n\r\n\r\n\t\tPoint img_point = ns_find_template(img_path, temp_path);\r\n\r\n\t\tcout << \"{\" << endl;\r\n\t\tjsonfmt(\"ref_x\", ref_point.x, true);\r\n\t\tjsonfmt(\"ref_y\", ref_point.y, true);\r\n\t\tjsonfmt(\"img_x\", img_point.x, true);\r\n\t\tjsonfmt(\"img_y\", img_point.y, true);\r\n\t\tjsonfmt(\"xdiff\", xdiff(ref_point, img_point), true);\r\n\t\tjsonfmt(\"ydiff\", ydiff(ref_point, img_point), true);\r\n\t\tjsonfmt(\"distance\", ns_find_distance(ref_point, img_point), false);\r\n\r\n\t\tcout << \"}\" << endl;\r\n\t}\r\n\tcatch (Exception ex)\r\n\t{\r\n\t\treturn -1;\r\n\t}\r\n\r\n\r\n\r\n\t//cout << \"ref_Max_loc(x,y):(\" << ref_point.x << \",\" << ref_point.y << \")\" << endl;\r\n\t//\r\n\t//\r\n\t//cout << \"image_Max_loc(x,y):(\" << img_point.x << \",\" << img_point.y << \")\" << endl;\r\n\t//\r\n\t//\r\n\t//cout << \"distance(mm):\" << ns_find_distance(ref_point, img_point) << endl;\r\n\r\n\r\n\r\n\t//cout << \"argcount:\" << argc << endl;\r\n\t//cout << argv[0] << argv[1] << endl;\r\n\t//match_method = TM_CCORR_NORMED;\r\n\t//use_mask = false;\r\n\t// img = imread(img_path);\r\n\t// templ = imread(temp_path);\r\n\t//\r\n\t//if (!img.empty())\r\n\t//{\r\n\t//\t//imshow(\"mywindow\", img);\r\n\t//}\r\n\t//MatchingMethod(0, 0);\r\n\t//waitKey(0);\r\n\treturn 0;\r\n}\r\ndouble xdiff(Point p1, Point p2)\r\n{\r\n\tdouble xmax = ns_max(p1.x, p2.x);\r\n\tdouble xmin = ns_min(p1.x, p2.x);\r\n\treturn (xmax - xmin) * 100 / 294;\r\n}\r\ndouble ydiff(Point p1, Point p2)\r\n{\r\n\tdouble ymax = ns_max(p1.y, p2.y);\r\n\tdouble ymin = ns_min(p1.y, p2.y);\r\n\treturn (ymax - ymin) * 100 / 294;\r\n}\r\ndouble ns_find_distance(Point p1, Point p2)\r\n{\r\n\r\n\tdouble x = p1.x - p2.x;\r\n\tdouble y = p1.y - p2.y;\r\n\r\n\tdouble dist = (x*x) + (y*y);\r\n\r\n\r\n\treturn sqrt(dist) * 100 / 294;// distance in mm\r\n\r\n}\r\nPoint ns_find_template(const char * img_path, const char * temp_path)\r\n{\r\n\r\n\tMat img_display;\r\n\r\n\tMat im = imread(img_path);\r\n\tMat temp = imread(temp_path);\r\n\r\n\tim.copyTo(img_display);\r\n\tMat result;\r\n\tint result_cols = im.cols - temp.cols + 1;\r\n\tint result_rows = im.rows - temp.rows + 1;\r\n\r\n\tresult.create(result_rows, result_cols, CV_32FC1);\r\n\r\n\tint match_method = TM_CCORR_NORMED;\r\n\r\n\r\n\tmatchTemplate(im, temp, result, match_method);\r\n\r\n\tdouble minVal; double maxVal; Point minLoc; Point maxLoc;\r\n\tPoint matchLoc;\r\n\tminMaxLoc(result, &minVal, &maxVal, &minLoc, &maxLoc, Mat());\r\n\r\n\r\n\trectangle(img_display, matchLoc, Point(matchLoc.x + templ.cols, matchLoc.y + templ.rows), Scalar::all(0), 2, 8, 0);\r\n\treturn maxLoc;\r\n}\r\nvoid MatchingMethod(int, void*)\r\n{\r\n\tMat img_display;\r\n\timg.copyTo(img_display);\r\n\tint result_cols = img.cols - templ.cols + 1;\r\n\tint result_rows = img.rows - templ.rows + 1;\r\n\tresult.create(result_rows, result_cols, CV_32FC1);\r\n\tbool method_accepts_mask = (TM_SQDIFF == match_method || match_method == TM_CCORR_NORMED);\r\n\tif (use_mask && method_accepts_mask)\r\n\t{\r\n\t\tmatchTemplate(img, templ, result, match_method);\r\n\t}\r\n\telse\r\n\t{\r\n\t\tmatchTemplate(img, templ, result, match_method);\r\n\t}\r\n\tnormalize(result, result, 0, 1, NORM_MINMAX, -1, Mat());\r\n\tdouble minVal; double maxVal; Point minLoc; Point maxLoc;\r\n\tPoint matchLoc;\r\n\tminMaxLoc(result, &minVal, &maxVal, &minLoc, &maxLoc, Mat());\r\n\tif (match_method == TM_SQDIFF || match_method == TM_SQDIFF_NORMED)\r\n\t{\r\n\t\tmatchLoc = minLoc;\r\n\t}\r\n\telse\r\n\t{\r\n\t\tmatchLoc = maxLoc;\r\n\t}\r\n\trectangle(img_display, matchLoc, Point(matchLoc.x + templ.cols, matchLoc.y + templ.rows), Scalar::all(0), 2, 8, 0);\r\n\trectangle(result, matchLoc, Point(matchLoc.x + templ.cols, matchLoc.y + templ.rows), Scalar::all(0), 2, 8, 0);\r\n\timshow(image_window, img_display);\r\n\t//imshow(result_window, result);\r\n\tcout << \"MaxLoc:(x,y):(\" << maxLoc.x << \",\" << matchLoc.y << \")\" << endl;\r\n\treturn;\r\n}\r\n\r\n" }, { "alpha_fraction": 0.5660532712936401, "alphanum_fraction": 0.5814336538314819, "avg_line_length": 21.9407901763916, "blob_id": "d413ee65df5a13ad642fdfae8851a4ccb0e6d68a", "content_id": "7b010a37e001821684d4367bb3cdb7e396b5f98e", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 3641, "license_type": "no_license", "max_line_length": 75, "num_lines": 152, "path": "/main.py", "repo_name": "NanoSoft774849/Nano", "src_encoding": "UTF-8", "text": "import cv2 as cv\r\n#import matplotlib as plt\r\nimport numpy as np\r\nimport math\r\nimport os\r\nimport time\r\n\r\nimport cv2.data;\r\n\r\n\r\n#Name : Adulbary\r\n#email : [email protected]\r\n#university of science and Technology of China (USTC)\r\n\r\n\r\n# magic constant by experiments.\r\nmagic_constant = 100.0/294.0\r\n#convert Image to Gray.\r\ndef toGray(imgx):\r\n return cv.cvtColor(imgx,cv.COLOR_BGR2GRAY)\r\n\r\n#match Template.\r\ndef ns_find_template(img_path , template_path):\r\n\r\n \r\n img = cv.imread(img_path)\r\n if( img.shape == None):\r\n print(\"image is empty\")\r\n return (0,(0,0))\r\n template = cv.imread(template_path)\r\n if( template.shape == None):\r\n print(\"template is not exist\")\r\n return (0,(0,0))\r\n\r\n # matching method using Cross Correlation\r\n match_method = cv.TM_CCORR_NORMED\r\n #apply template matching to find the location of template in the Image.\r\n\r\n xmatch =cv.matchTemplate(img,template,cv.TM_CCORR_NORMED)\r\n minVal1, _maxVal1, minLoc, maxLoc = cv.minMaxLoc(xmatch ,None)\r\n print(\"maxlocation:\",maxLoc)\r\n return (_maxVal1 , maxLoc)\r\n\r\n#max\r\ndef ns_max ( x, y) :\r\n return x if x>=y else y\r\n\r\n#min\r\ndef ns_min( x, y) :\r\n return x if x<=y else y\r\n\r\n\r\n#calc , xdiff , ydiff , ecludian distance.\r\ndef calc_distance( p1, p2 ):\r\n\r\n x0 = p1[0] \r\n x1 = p2[0]\r\n y0 = p1[1]\r\n y1 = p2[1]\r\n #horizontal difference \r\n xdiff = abs( x1 - x0) * magic_constant # distance in mm\r\n #vertical difference\r\n ydiff =abs ( y1 - y0) * magic_constant # distance in mm\r\n #ecludian difference \r\n diff = math.sqrt( (xdiff * xdiff) +( ydiff * ydiff))\r\n\r\n return (xdiff , ydiff, diff)\r\n\r\n\r\ndef test():\r\n\r\n ref_path = \"images/ref0.jpg\";\r\n test_img= \"images/apptest.jpg\";\r\n template_path =\"images/template.pgm\";\r\n\r\n m, p1 = ns_find_template(test_img , template_path)\r\n m2, p2 = ns_find_template(ref_path, template_path)\r\n\r\n xdif,ydif , edif = calc_distance(p1, p2)\r\n print(\"horizontal shift(mm):\", xdif)\r\n print(\"vertical shift (mm):\", ydif)\r\n print(\"e shift(mm):\",edif)\r\n\r\n #ref = cv.imread(ref_path)\r\n\r\n #cv.imshow(\"ref image\",ref)\r\n #cv.waitKey(0)\r\ndef ProcessImages(ref_path , template_path , test_img):\r\n\r\n m, p1 = ns_find_template(test_img , template_path)\r\n m2, p2 = ns_find_template(ref_path, template_path)\r\n\r\n xdif,ydif , edif = calc_distance(p1, p2)\r\n print(\"horizontal shift(mm):\", xdif)\r\n print(\"vertical shift (mm):\", ydif)\r\n print(\"e shift(mm):\",edif)\r\n\r\n \r\n\r\nimport sys\r\nimport os \r\n\r\ndef isExists(str ):\r\n return os.path.exists(str)\r\n\r\nif __name__ ==\"__main__\":\r\n\r\n argc = len(sys.argv)\r\n #--def arguement used if you want to use default path for images.\r\n if(len(sys.argv)<4):\r\n print(\"Usage main.py ref_image template_img test_img [--def] \")\r\n exit()\r\n\r\n\r\n ref_img = sys.argv[1]\r\n template_img = sys.argv[2]\r\n test_img = sys.argv[3]\r\n _def = False;\r\n def_path =\"images/\"\r\n if( argc == 5):\r\n _def = sys.argv[4] =='--def';\r\n\r\n if ( _def ) :\r\n test_img = def_path+test_img\r\n ref_img = def_path +ref_img\r\n template_img = def_path+template_img\r\n\r\n if not os.path.exists(ref_img):\r\n print(\"ref image is not exist!\")\r\n exit()\r\n\r\n\r\n \r\n if not isExists(template_img):\r\n print( \" template image no exists \");\r\n exit()\r\n \r\n if( not isExists(test_img)):\r\n print(\"test image is not exist !\")\r\n exit()\r\n \r\n \r\n for arg in sys.argv:\r\n print(arg)\r\n\r\n # apply algorithm for images .\r\n ProcessImages(ref_img , template_img , test_img)\r\n \r\n \r\n\r\n\r\n print( \"done ...\")\r\n\r\n" }, { "alpha_fraction": 0.4585893154144287, "alphanum_fraction": 0.46241921186447144, "avg_line_length": 27.769596099853516, "blob_id": "9053cbe7867e4b3fdef5a84867d726c976184b44", "content_id": "04cbdc1a1f8d8eadb85c79b01ce44c016272610b", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "C#", "length_bytes": 25068, "license_type": "no_license", "max_line_length": 119, "num_lines": 842, "path": "/Server.cs", "repo_name": "NanoSoft774849/Nano", "src_encoding": "UTF-8", "text": "using System;\r\nusing System.Collections.Generic;\r\nusing System.Linq;\r\nusing System.Text;\r\nusing System.Threading.Tasks;\r\nusing System.Net.Sockets;\r\nusing System.IO;\r\nusing System.Net;\r\nusing System.Timers;\r\nusing System.Net.NetworkInformation;\r\n\r\nnamespace ns.networking\r\n{\r\n \r\n\r\n public static class NsDns\r\n {\r\n public static string GetHostName()\r\n {\r\n return Dns.GetHostName();\r\n }\r\n public static string GetHostByAddress(string Ip_port)\r\n {\r\n string ii = Ip_port;\r\n if (Ip_port.Contains(':'))\r\n ii = Ip_port.Split(':')[0];\r\n\r\n return Dns.GetHostByAddress(ii).HostName;\r\n }\r\n public static string[] GetInterfacesNames()\r\n {\r\n NetworkInterface[] ifaces = NetworkInterface.GetAllNetworkInterfaces();\r\n string[] _ifaces = new string[ifaces.Length];\r\n int i=0;\r\n foreach(NetworkInterface iface in ifaces)\r\n {\r\n // iface.NetworkInterfaceType==NetworkInterfaceType.\r\n _ifaces[i++] = string.Format(\"Name:{0} address:{1}\", iface.Name, iface.GetIPProperties().DnsAddresses);\r\n \r\n }\r\n return _ifaces;\r\n }\r\n public static string[] GetLocalHostInterfaces()\r\n {\r\n \r\n\r\n IPAddress[] addresses = Dns.GetHostAddresses(GetHostName());\r\n\r\n\r\n string[] _list = new string[addresses.Length];\r\n \r\n int i = 0;\r\n foreach (IPAddress address in addresses)\r\n {\r\n \r\n if (address.GetAddressBytes().Length == 4)\r\n _list[i++] = string.Format(\"{0}\", address);\r\n ;\r\n\r\n\r\n }\r\n return _list;\r\n\r\n\r\n }\r\n }\r\n\r\n public enum ClientType\r\n {\r\n Sensor=1,\r\n http=2,\r\n Websocket=3,\r\n }\r\n public class HandshakeProtocol\r\n {\r\n public static string send_dev_info_cmd = \"send device info\";\r\n public static string sleep_cmd = \"sleep\";\r\n public static string start_cmd = \"start\";\r\n public static string wake_up_cmd = \"wake up\";\r\n public static string send_again_cmd = \"send again\";\r\n public static string send_ref_img_cmd = \"send ref\";\r\n public static string snapshot_cmd = \"snapshot\";\r\n public static string stop_cmd = \"stop\";\r\n public static string ping_cmd = \"ping\";\r\n public bool dev_info_cmd_sent;\r\n public bool sleep_cmd_sent;\r\n public bool start_cmd_sent;\r\n public bool wake_up_cmd_sent;\r\n public bool dev_info_received;\r\n public bool send_again_cmd_sent;\r\n public bool send_snapshot_cmd_sent;\r\n public bool control_cmd_sent;\r\n public bool control_cmd_response_received;\r\n public string sent_cmd;\r\n public HandshakeProtocol()\r\n {\r\n this.dev_info_cmd_sent = false;\r\n this.dev_info_received = false;\r\n this.send_again_cmd_sent = false;\r\n this.sleep_cmd_sent = false;\r\n this.start_cmd_sent = false;\r\n this.wake_up_cmd_sent = false;\r\n this.send_snapshot_cmd_sent = false;\r\n this.control_cmd_sent = false;\r\n this.control_cmd_response_received = false;\r\n }\r\n }\r\n public class ReceiveFileProtocol\r\n {\r\n public string file_name;\r\n public int file_size;\r\n public BinaryWriter bw;\r\n public bool isInProgress;\r\n public bool isEnd;\r\n public int rx_written;\r\n public int written;\r\n public string client_id;\r\n public Client mClient;\r\n \r\n public delegate void _OnError(string error, string where);\r\n public delegate void _OnReceiveFileComplete(ReceiveFileProtocol rx);\r\n public delegate void _OnPacketError( ReceiveFileProtocol rfp);\r\n public _OnReceiveFileComplete OnReceiveFileComplete;\r\n public _OnReceiveFileComplete OnTimeout;\r\n \r\n public _OnError OnError;\r\n public Timer mTimer;\r\n public int rec_time_ms;\r\n private int timer_interval=100;//100ms;\r\n private double max_time=90;//120sec;//2mins // \r\n public bool TimerIsRunning;\r\n public ReceiveFileProtocol(string fn)\r\n {\r\n written = 0;\r\n this.bw = new BinaryWriter(File.Open(fn, FileMode.Create));\r\n this.file_name = fn;\r\n rec_time_ms = 0;\r\n mTimer = new Timer(timer_interval);\r\n \r\n mTimer.Elapsed += mTimer_Elapsed;\r\n \r\n mTimer.Start();\r\n TimerIsRunning = true;\r\n rx_written = 0;\r\n\r\n }\r\n public ReceiveFileProtocol(BinaryWriter bwx)\r\n {\r\n written = 0;\r\n this.bw = bwx;\r\n rx_written = 0;\r\n rec_time_ms = 0;\r\n mTimer = new Timer(timer_interval);\r\n mTimer.Elapsed += mTimer_Elapsed;\r\n mTimer.Start();\r\n TimerIsRunning = true;\r\n rx_written = 0;\r\n //this.file_name = fn;\r\n }\r\n\r\n private void mTimer_Elapsed(object sender, ElapsedEventArgs e)\r\n {\r\n this.rec_time_ms = this.rec_time_ms + 1;\r\n double tt = this.rec_time_ms*0.1;//in sec\r\n if(tt>=max_time)\r\n {\r\n this.isInProgress = false;\r\n this.isEnd = true;\r\n bw.Close();\r\n this.mTimer.Stop();\r\n\r\n TimerIsRunning = false ;\r\n this.OnTimeout(this);\r\n }\r\n }\r\n \r\n \r\n public ReceiveFileProtocol OnInProgressWrite(byte[] buffer, int len,_OnReceiveFileComplete handler )\r\n {\r\n if (!this.isInProgress) return this;\r\n try\r\n {\r\n bool x= (this.written+len)<this.file_size;\r\n if(len<1024 && x)\r\n {\r\n mClient.SendMsg(\"resend\");\r\n return this;\r\n }\r\n\r\n bw.Write(buffer, 0, len);\r\n\r\n this.written += len;\r\n mClient.SendMsg(\"ok\");\r\n if(this.written>=this.file_size)\r\n {\r\n bw.Close();\r\n this.isInProgress = false;\r\n this.isEnd = true;\r\n this.written = 0;\r\n this.mTimer.Stop();\r\n handler(this);\r\n mClient.SendMsg(\"ok\");\r\n return this;\r\n }\r\n \r\n \r\n\r\n return this;\r\n }\r\n catch(Exception ex)\r\n {\r\n OnError(ex.Message, \"OnInProgressWrite\");\r\n return this;\r\n }\r\n\r\n }\r\n public bool isReceivedSuccess()\r\n {\r\n var info = new FileInfo(file_name);\r\n return info.Length == file_size;\r\n }\r\n public ReceiveFileProtocol OnReceiveFileEnd()\r\n {\r\n\r\n this.isEnd = true;\r\n this.isInProgress = false;\r\n try\r\n {\r\n bw.Close();\r\n if (mTimer != null)\r\n {\r\n TimerIsRunning = false;\r\n mTimer.Stop();\r\n mTimer.Close();\r\n }\r\n }\r\n catch(Exception ex)\r\n {\r\n OnError(ex.Message, \"OnReceiveFileEnd\");\r\n }\r\n\r\n return this;\r\n }\r\n\r\n }\r\n\r\n public class ClientCollection\r\n {\r\n private List<Client> ClientList;\r\n\r\n public delegate void _foreach(Client c);\r\n public ClientCollection()\r\n {\r\n ClientList = new List<Client>();\r\n }\r\n public ClientCollection Add(Client client)\r\n {\r\n if (this.IsExist(client)) return this;\r\n\r\n ClientList.Add(client);\r\n return this;\r\n }\r\n public Client getClientByDeviceCode(string dev_code)\r\n {\r\n int len = ClientList.Count;\r\n int i = 0;\r\n for (i = 0; i < len; i++)\r\n {\r\n if (ClientList[i].DeviceCode == dev_code)\r\n {\r\n\r\n return ClientList[i];\r\n }\r\n }\r\n return null;\r\n }\r\n public Client getClientByDevId(int dev_id)\r\n {\r\n \r\n int len = ClientList.Count;\r\n int i = 0;\r\n for(i=0;i<len;i++)\r\n {\r\n if(ClientList[i].DeviceId==dev_id)\r\n {\r\n \r\n return ClientList[i]; \r\n }\r\n }\r\n return null;\r\n }\r\n public void ForEach(_foreach for_each)\r\n {\r\n int len = this.ClientList.Count;\r\n for (int i = 0; i < len; for_each(this.ClientList[i]), i++) ;\r\n }\r\n\r\n public Client getClientById(string id)\r\n {\r\n\r\n return this.ClientList.Find(c => c.ID == id);\r\n }\r\n public ClientCollection RemoveClientById(string id)\r\n {\r\n if (!this.IsExist(id)) return this;\r\n\r\n this.ClientList.Remove(getClientById(id));\r\n\r\n return this;\r\n }\r\n public Client this[string id]\r\n {\r\n get\r\n {\r\n if (this.IsExist(id))\r\n {\r\n return this.getClientById(id);\r\n }\r\n return null;\r\n\r\n }\r\n }\r\n\r\n public bool IsExist(string id)\r\n {\r\n return this.ClientList.Exists(c => c.ID == id);\r\n\r\n }\r\n\r\n public bool IsExist(Client client)\r\n {\r\n return ClientList.Exists(c => c.ID == client.ID);\r\n }\r\n }\r\n public class Client\r\n {\r\n public Socket ClientSocket;\r\n private int buffer_size = 1024;\r\n private int rx_bytes_count;\r\n public string ID;\r\n private bool is_connected;\r\n public int UiId;\r\n public int DeviceId;\r\n public string DeviceCode;\r\n public ReceiveFileProtocol recv_file_proto;\r\n public HandshakeProtocol Flags;\r\n public delegate void _OnCommandHandler(Client client, byte[] buffer, int len);\r\n public delegate void OnMessageArrived(string from, byte[] buffer, int len);\r\n public delegate void _OnTimerElapsed(Client c);\r\n public _OnTimerElapsed TimerRoutine;\r\n public _OnTimerElapsed OnReachMaxTry;\r\n public event _OnCommandHandler CommandHandler;\r\n public OnMessageArrived MessageHandler;\r\n public _OnCommandHandler OnHttpRequest;\r\n public Timer send_cmd_again_timer;\r\n public int send_cmd_interval=1*1000;//30 second\r\n public int max_try_count=3;\r\n public int try_count;\r\n public bool TimerIsRunning;\r\n public ClientType ClientType;\r\n public delegate void _OnDisconnected(string c);\r\n\r\n public _OnDisconnected OnDisConnected;\r\n public Client(string id, Socket c)\r\n {\r\n this.ClientSocket = c;\r\n this.ID = id;\r\n this.is_connected = true;\r\n rx_bytes_count = 0;\r\n this.Flags = new HandshakeProtocol();\r\n \r\n try_count = 0;\r\n send_cmd_again_timer = new Timer(this.send_cmd_interval);\r\n send_cmd_again_timer.Elapsed += _TimerRoutine;\r\n TimerIsRunning = false;\r\n this.ClientType = new ClientType();\r\n this.ClientType = ClientType.Sensor;\r\n }\r\n public Client(Socket s)\r\n {\r\n this.ClientSocket = s;\r\n this.ID = this.ClientSocket.RemoteEndPoint.ToString();\r\n this.is_connected = true;\r\n rx_bytes_count = 0;\r\n\r\n this.Flags = new HandshakeProtocol();\r\n try_count = 0;\r\n //send_cmd_again_timer = new Timer(this.send_cmd_interval);\r\n //send_cmd_again_timer.Elapsed += _TimerRoutine;\r\n TimerIsRunning = false;\r\n this.ClientType = new ClientType();\r\n this.ClientType = ClientType.Sensor;\r\n }\r\n // modified disable timers.\r\n public Client(string ip,int port=20001)\r\n {\r\n var endpoint = new IPEndPoint(IPAddress.Parse(ip), port);\r\n Socket socket = new Socket(endpoint.AddressFamily, SocketType.Stream, ProtocolType.Tcp);\r\n\r\n this.ClientSocket = Connect(socket, new IPAddress[] { IPAddress.Parse(ip) }, port).Result;\r\n\r\n this.is_connected = true;\r\n rx_bytes_count = 0;\r\n\r\n this.Flags = new HandshakeProtocol();\r\n try_count = 0;\r\n //send_cmd_again_timer = new Timer(this.send_cmd_interval);\r\n //send_cmd_again_timer.Elapsed += _TimerRoutine;\r\n TimerIsRunning = false;\r\n this.ClientType = new ClientType();\r\n this.ClientType = ClientType.Sensor;\r\n \r\n }\r\n private async Task<Socket> Connect(Socket sc,IPAddress[] address,int port)\r\n {\r\n try\r\n {\r\n await Task.Factory.FromAsync(\r\n new Func<IPAddress[], int, AsyncCallback, object, IAsyncResult>(sc.BeginConnect),\r\n new Action<IAsyncResult>(sc.EndConnect), address, port, null\r\n ).ConfigureAwait(false);\r\n return sc;\r\n }\r\n catch(Exception)\r\n {\r\n return null;\r\n }\r\n }\r\n private void _TimerRoutine(object sender,EventArgs args)\r\n {\r\n this.try_count = this.try_count + 1;\r\n if(this.try_count>=this.max_try_count)\r\n {\r\n StopTimer();\r\n this.try_count = 0;\r\n TimerIsRunning = false;\r\n this.OnReachMaxTry(this);\r\n }\r\n if(this.TimerRoutine!=null)\r\n {\r\n TimerIsRunning = true;\r\n this.TimerRoutine(this);\r\n }\r\n }\r\n \r\n public void StopTimer()\r\n {\r\n if(this.send_cmd_again_timer!=null)\r\n {\r\n this.send_cmd_again_timer.Stop();\r\n this.TimerIsRunning = false;\r\n this.try_count = 0;\r\n }\r\n }\r\n public void startTimer()\r\n {\r\n if (this.send_cmd_again_timer != null)\r\n {\r\n this.send_cmd_again_timer.Start();\r\n this.TimerIsRunning = true;\r\n this.try_count = 0;\r\n }\r\n\r\n }\r\n public int get_rx_bytes()\r\n {\r\n return this.rx_bytes_count;\r\n }\r\n public Client updateBytesCount(int len)\r\n {\r\n this.rx_bytes_count += len;\r\n return this;\r\n }\r\n public Client reset_rx_bytes()\r\n {\r\n this.rx_bytes_count = 0;\r\n return this;\r\n }\r\n public string getHostName()\r\n {\r\n //string hostname = \"\";\r\n\r\n //this.ClientSocket.RemoteEndPoint.Serialize().\r\n //Dns.\r\n string[] ip = this.ID.Split(':');\r\n\r\n return Dns.GetHostByAddress(ip[0]).HostName;\r\n }\r\n public void Start(OnMessageArrived handler, _OnDisconnected dishandler)\r\n {\r\n this.MessageHandler = handler;\r\n this.is_connected = true;\r\n this.OnDisConnected = dishandler;\r\n\r\n Task.Run(() => OnNewMsg(handler, dishandler));\r\n }\r\n public NetworkStream Stream\r\n {\r\n get\r\n {\r\n if (this.IsConnected)\r\n {\r\n return new NetworkStream(this.ClientSocket);\r\n }\r\n return null;\r\n }\r\n }\r\n public void Close()\r\n {\r\n //this.Stream.Close();\r\n if (this.IsConnected)\r\n {\r\n this.Stream.Close(3);\r\n\r\n this.ClientSocket.Close(3);\r\n }\r\n }\r\n\r\n public bool close(IAsyncResult res)\r\n {\r\n\r\n if (this.IsConnected && res.IsCompleted)\r\n {\r\n this.ClientSocket.Close();\r\n return true;\r\n }\r\n\r\n return false;\r\n }\r\n \r\n\r\n public void ControlCmdReceived()\r\n {\r\n this.Flags.control_cmd_sent = false;\r\n this.Flags.sent_cmd = \"\";\r\n }\r\n public bool SendLastCmd()\r\n {\r\n return this.SendCommand(this.Flags.sent_cmd);\r\n }\r\n public bool SendCommand(string cmd)\r\n {\r\n \r\n\r\n try\r\n {\r\n byte[] bytes = System.Text.Encoding.ASCII.GetBytes(cmd);\r\n int len = bytes.Length;\r\n if(this.ClientSocket!=null)\r\n {\r\n var st = new NetworkStream(this.ClientSocket);\r\n \r\n st.Write(bytes, 0, len);\r\n this.Flags.control_cmd_sent = true;\r\n this.Flags.sent_cmd = cmd;\r\n }\r\n return true;\r\n } \r\n catch(Exception)\r\n {\r\n \r\n return false;\r\n }\r\n\r\n\r\n \r\n }\r\n \r\n\r\n \r\n public void SendFile(string _path,Action<string> msg_out)\r\n {\r\n if(!File.Exists(_path)) {msg_out(string.Format(\"File {0} is Not Exist\",_path)); return;}\r\n \r\n try\r\n {\r\n //StreamReader reader=new StreamReader(_path);\r\n this.ClientSocket.SendFile(_path);\r\n \r\n\r\n }\r\n catch (Exception ex)\r\n {\r\n msg_out(\"Error in Send file function \"+ex.Message);\r\n }\r\n finally\r\n {\r\n msg_out(string.Format(\"File {0} Sent successfully!.\", _path));\r\n }\r\n \r\n\r\n }\r\n\r\n\r\n delegate void _DoJob(string fn);\r\n public IAsyncResult SendAsync(string fn,AsyncCallback _callback,object obj)\r\n {\r\n if (!File.Exists(fn)) return null;\r\n \r\n _DoJob job = ((s) =>\r\n {\r\n try\r\n {\r\n this.ClientSocket.SendFile(s);\r\n \r\n }\r\n catch (Exception)\r\n {\r\n return;\r\n }\r\n\r\n\r\n\r\n\r\n });\r\n\r\n\r\n return job.BeginInvoke(fn, _callback, obj);\r\n\r\n \r\n \r\n \r\n\r\n }\r\n public void SendFile(string fn,Action<string> onException,Action<Client> Final)\r\n {\r\n if (!File.Exists(fn)) return;\r\n try\r\n {\r\n this.ClientSocket.SendFile(fn);\r\n }\r\n catch(Exception ex)\r\n {\r\n onException(\"SendAsync:\" + ex.StackTrace);\r\n }\r\n finally\r\n {\r\n Final(this);\r\n }\r\n }\r\n public void Send(byte[] bytes)\r\n {\r\n try\r\n {\r\n \r\n this.ClientSocket.Send(bytes);\r\n \r\n }\r\n catch(Exception)\r\n {\r\n //if(this.MessageHandler!=null)\r\n //MessageHandler(this.ID, tobytes(\"error-->+\" + ex.StackTrace), 0);\r\n return;\r\n }\r\n \r\n }\r\n public void SendMsg(string msg)\r\n {\r\n Socket mClient = this.ClientSocket;\r\n\r\n try\r\n {\r\n byte[] bytes = Encoding.ASCII.GetBytes(msg);\r\n this.Send(bytes);\r\n }\r\n\r\n catch (Exception)\r\n {\r\n return;\r\n }\r\n \r\n \r\n \r\n\r\n \r\n }\r\n private async Task OnNewMsg(OnMessageArrived handler, _OnDisconnected dhandler)\r\n {\r\n\r\n var stream = new NetworkStream(this.ClientSocket);\r\n // string s = string.Format(\"Msg From {0} {1} \\t\\n \", this.ID,DateTime.Now.ToString(\"hh:mm\"));\r\n var buffer = new byte[this.buffer_size];\r\n do\r\n {\r\n // stream.A\r\n\r\n \r\n int len = buffer.Length;\r\n\r\n int byteRead = await stream.ReadAsync(buffer, 0, len).ConfigureAwait(false);\r\n if (byteRead == 0)\r\n {\r\n // handler(this.ID, tobytes(\" is Disconnected\"), 0);\r\n dhandler(this.ID);\r\n this.is_connected = false;\r\n break;\r\n }\r\n\r\n if (byteRead > 0) \r\n {\r\n //string msg = Encoding.ASCII.GetString(buffer, 0, byteRead).Trim();\r\n //bool is_http = msg.StartsWith(\"GET\") || msg.StartsWith(\"POST\");\r\n //is_http &= msg.Contains(\"HTTP\");\r\n //if(is_http)\r\n //{\r\n // this.ClientType = ClientType.http;\r\n // this.OnHttpRequest(this, buffer, byteRead);\r\n // \r\n //}\r\n if(this.Flags.control_cmd_sent)\r\n {\r\n this.CommandHandler(this, buffer, byteRead);\r\n \r\n }\r\n else\r\n {\r\n handler(this.ID, buffer, byteRead);\r\n \r\n }\r\n //byteRead = -1;\r\n \r\n }\r\n\r\n\r\n } while (true);\r\n }\r\n\r\n private byte[] tobytes(string msg)\r\n {\r\n return ASCIIEncoding.ASCII.GetBytes(msg);\r\n }\r\n\r\n public string HostName\r\n {\r\n get\r\n {\r\n if (this.IsConnected)\r\n {\r\n return this.ClientSocket.RemoteEndPoint.ToString();\r\n }\r\n return null;\r\n }\r\n }\r\n\r\n public bool IsConnected\r\n {\r\n get\r\n {\r\n if (this.ClientSocket.Connected && this.ClientSocket != null && is_connected) return true;\r\n return false;\r\n }\r\n\r\n set\r\n {\r\n is_connected = value;\r\n }\r\n }\r\n\r\n\r\n }\r\n public class tcpServer\r\n {\r\n\r\n public delegate void EventHandler(string msg);\r\n public delegate void OnRecv(string from, byte[] buffer, int len);\r\n public delegate void _OnNewClient(string client);\r\n\r\n public _OnNewClient OnNewClient;\r\n public ClientCollection myClients;\r\n public delegate void _OnClientDisConnect(string id);\r\n\r\n public event OnRecv OnMessageArriveEvent;\r\n\r\n public _OnClientDisConnect OnClientDisConnect;\r\n // public delegate void _OnConnection(Socket s);\r\n //public On\r\n public OnRecv mOnRecv;\r\n public EventHandler OnMsg;\r\n public EventHandler OnError;\r\n public Socket myServerSocket;\r\n public void Start(string ip, int port = 8090)\r\n {\r\n //var endpoint = new IPEndPoint(IPAddress.Loopback, port); //test\r\n var endpoint = new IPEndPoint(IPAddress.Parse(ip), port);\r\n Socket socket = new Socket(endpoint.AddressFamily, SocketType.Stream, ProtocolType.Tcp);\r\n myClients = new ClientCollection();\r\n socket.Bind(endpoint);\r\n socket.Listen(int.MaxValue-1);//128\r\n myServerSocket = socket;\r\n //this.mClient = null;\r\n Task.Run(() => Listen(socket));\r\n }\r\n public void Close()\r\n {\r\n try\r\n {\r\n myServerSocket.Close();\r\n }\r\n catch(Exception ex)\r\n {\r\n OnError(string.Format(\"Error @ Close Socket:{0}\",ex.Message));\r\n }\r\n }\r\n private async Task Listen(Socket msocket)\r\n {\r\n do\r\n {\r\n\r\n var client = await Task.Factory.FromAsync(\r\n new Func<AsyncCallback, object, IAsyncResult>(msocket.BeginAccept),\r\n new Func<IAsyncResult, Socket>(msocket.EndAccept), null).ConfigureAwait(false);\r\n\r\n // client.r\r\n\r\n Client mc = new Client(client);\r\n mc.Start((m, buffer, len) =>\r\n {\r\n //OnMessageArriveEvent(m, buffer, len);\r\n this.mOnRecv(m, buffer, len);\r\n }, (id) =>\r\n {\r\n //string m = string.Format(\"\\n{0} is DisConnected!\\n\", disconnect);\r\n this.OnClientDisConnect(id);\r\n // myClients.RemoveClientById(id);\r\n\r\n\r\n //OnMsg(m);\r\n });\r\n myClients.Add(mc);\r\n\r\n // OnMsg(\"Client Connected\" + client.RemoteEndPoint.ToString());\r\n OnNewClient(client.RemoteEndPoint.ToString());\r\n\r\n\r\n } while (true);\r\n\r\n }\r\n\r\n }\r\n\r\n \r\n}\r\n" } ]
4
khaliliShoja/DynamicProgramming
https://github.com/khaliliShoja/DynamicProgramming
33aa8de93599fd062089c2226ccf5faa3957f4b8
817400e2ba8be5d7e99cd01a8d0cb86ac8380237
5e9ccaf442a5e41b61f080f9d05ec41ec11e601d
refs/heads/master
2021-08-18T23:29:41.676927
2017-11-24T06:23:35
2017-11-24T06:23:35
111,883,169
0
0
null
null
null
null
null
[ { "alpha_fraction": 0.5022222399711609, "alphanum_fraction": 0.5488888621330261, "avg_line_length": 14.928571701049805, "blob_id": "9ec9e45cd7dfb74f4b1b4b43f0974255c2384068", "content_id": "40ff78205daa10c1425dd923ef0b4bf5b6c83b73", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 450, "license_type": "no_license", "max_line_length": 179, "num_lines": 28, "path": "/Dynamic Programming/2-1.py", "repo_name": "khaliliShoja/DynamicProgramming", "src_encoding": "UTF-8", "text": "\n# coding: utf-8\n\n# Given a road of length n and pairs p[i] for i=1,2,3,... where p[i] is the price of a rod of length i. Find the maximum total revenue you can make by cutting and selling the rod.\n\n# In[4]:\n\n\ndef calmax(n,p):\n s=[0]*(n+1)\n s[1]=p[1]\n for i in range(2,n+1):\n maxx=-1\n for j in range(0,i):\n a=s[j]+p[i-j]\n if(a > maxx):\n maxx=a\n s[i]=maxx\n return s\n\n\n\n\np=[0,1,5,8,9,10]\n\nprint(calmax(5,p))\n\n\n# In[ ]:\n\n\n\n" }, { "alpha_fraction": 0.8620689511299133, "alphanum_fraction": 0.8620689511299133, "avg_line_length": 28, "blob_id": "af7b470f8ab55e61228bd51d3f9bc373b0c7e8df", "content_id": "9adba52811187a0ca1def317c4f3cbcde4f8b01c", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Markdown", "length_bytes": 58, "license_type": "no_license", "max_line_length": 36, "num_lines": 2, "path": "/README.md", "repo_name": "khaliliShoja/DynamicProgramming", "src_encoding": "UTF-8", "text": "# DynamicProgramming\nsome problems in dynamic programming\n" }, { "alpha_fraction": 0.548872172832489, "alphanum_fraction": 0.5864661931991577, "avg_line_length": 32.125, "blob_id": "58a82491b12f9b6f28e46f0cc1f114a8abca890d", "content_id": "492ea1385e5326d68bba32ade9d2a0f2dceacb2e", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 1066, "license_type": "no_license", "max_line_length": 539, "num_lines": 32, "path": "/Dynamic Programming/6.py", "repo_name": "khaliliShoja/DynamicProgramming", "src_encoding": "UTF-8", "text": "\n# coding: utf-8\n\n# Given weights and values of n items, put these items in a knapsack of capacity W to get the maximum total value in the knapsack. In other words, given two integer arrays val[0..n-1] and wt[0..n-1] which represent values and weights associated with n items respectively. Also given an integer W which represents knapsack capacity, find out the maximum value subset of val[] such that sum of the weights of this subset is smaller than or equal to W. You cannot break an item, either pick the complete item, or don’t pick it (0-1 property).\n# \n\n# In[10]:\n\nv=[0,30,14,16,9]\nw=[0,6,3,4,2]\n#Total weight is 10\ntw=10\ndef maxVal(tw,v,w):\n m=[[0]*(tw+1) for i in range (0,len(v))]\n for i in range(0,len(v)):\n for j in range(0, tw+1):\n if(i==0 or j==0):\n m[i][j]=0\n else:\n if(j >= w[i]):\n if(m[i-1][j] > m[i-1][j-w[i]]+v[i]):\n m[i][j]=m[i-1][j]\n else:\n m[i][j]=m[i-1][j-w[i]]+v[i]\n else:\n m[i][j]=m[i-1][j]\n return m\nprint( maxVal(tw,v,w))\n \n \n\n\n# In[ ]:\n\n\n\n" }, { "alpha_fraction": 0.4829931855201721, "alphanum_fraction": 0.5408163070678711, "avg_line_length": 12.181818008422852, "blob_id": "a23c39bdb32a12d88e3ff3326b5852547f6c20e1", "content_id": "6e26a8a3cfe8af5c1be444e0e734abf956696e1a", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 294, "license_type": "no_license", "max_line_length": 92, "num_lines": 22, "path": "/Dynamic Programming/1.py", "repo_name": "khaliliShoja/DynamicProgramming", "src_encoding": "UTF-8", "text": "\n# coding: utf-8\n\n# You can climb 1 or 2 stairs with one step. How many different ways can you climb n stairs?\n\n# In[1]:\n\ndef climbSt(n):\n s=[0]*(n+1)\n s[0]=0\n s[1]=1\n s[2]=2\n \n for i in range(3,n+1):\n s[i]=s[i-1]+s[i-2]\n #print(s)\n return s\n \ns=climbSt(4)\nprint(s)\n\n\n# In[ ]:\n\n\n\n" }, { "alpha_fraction": 0.5121412873268127, "alphanum_fraction": 0.556291401386261, "avg_line_length": 14.482758522033691, "blob_id": "adef51db812ea046797360e76647bec73ba6f8b0", "content_id": "a82c180338b3a808a30e14f207dedcd8bc6ed42d", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 453, "license_type": "no_license", "max_line_length": 179, "num_lines": 29, "path": "/Dynamic Programming/2.py", "repo_name": "khaliliShoja/DynamicProgramming", "src_encoding": "UTF-8", "text": "\n# coding: utf-8\n\n# Given a road of length n and pairs p[i] for i=1,2,3,... where p[i] is the price of a rod of length i. Find the maximum total revenue you can make by cutting and selling the rod.\n\n# In[3]:\n\ndef calmax(n, p):\n if(n==0):\n return 0\n if(n==1):\n return p[1]\n \n maxx=-1\n \n for i in range(1,n+1):\n a=p[i]+calmax(n-i, p)\n if(a > maxx):\n maxx=a\n \n return maxx\n\n\n\np=[0,1,5,8,9,10]\n\nprint(calmax(5,p))\n\n\n# In[ ]:\n\n\n\n" }, { "alpha_fraction": 0.5814207792282104, "alphanum_fraction": 0.6174863576889038, "avg_line_length": 25.794116973876953, "blob_id": "33e3880c528803c8f25291364e46e142dc12548a", "content_id": "f57ebc8608f485c9a9169131f247c0a85de6dc5f", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 915, "license_type": "no_license", "max_line_length": 338, "num_lines": 34, "path": "/Dynamic Programming/3-1.py", "repo_name": "khaliliShoja/DynamicProgramming", "src_encoding": "UTF-8", "text": "\n# coding: utf-8\n\n# You are a professional robber planning to rob houses along a street. Each house has a certain amount of money stashed, the only constraint stopping you from robbing each of them is that adjacent houses have security system connected and it will automatically contact the police if two adjacent houses were broken into on the same night.\n# Given a list of non-negative integers representing the amount of money of each house, determine the maximum amount of money you can rob tonight without alerting the police.\n\n# In[2]:\n\np=[0,1,2,3,4,5]\np=[0,0,10,0,0,10]\ndef calMax(n,p):\n s=[0]*(n+1)\n s[0]=0\n s[1]=p[1]\n \n for i in range(2,n+1):\n #maxx=-1\n if(i==2):\n if(s[i-1]>p[i]):\n s[i]=s[i-1]\n else:\n s[i]=p[i]\n else:\n if(s[i-1] > s[i-2]+p[i]):\n s[i]=s[i-1]\n else:\n s[i]=s[i-2]+p[i]\n return s\n \n \nprint(calMax(5,p))\n \n\n\n# In[ ]:\n\n\n\n" }, { "alpha_fraction": 0.523809552192688, "alphanum_fraction": 0.5515872836112976, "avg_line_length": 13.70588207244873, "blob_id": "74134b915dd4d00a89ccdf7c41321858f3309431", "content_id": "af303731353f893edfaa7e41b18ee927bdc5bea8", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 504, "license_type": "no_license", "max_line_length": 192, "num_lines": 34, "path": "/Dynamic Programming/4.py", "repo_name": "khaliliShoja/DynamicProgramming", "src_encoding": "UTF-8", "text": "\n# coding: utf-8\n\n# Say you have an array for which the ith element is the price of a given stock on day i, if you were only permitted to complete at most one transaction, design for finding the maximum profit.\n# \n\n# In[8]:\n\n\n\n\nl=[7,1,5,3,6,4]\n\n\ndef calMax(n,l):\n s=[0]*n\n s[0]=0\n for i in range(1,n):\n maxx=s[i-1]\n for j in range(0,i):\n if(l[i]-l[j] >= maxx):\n s[i]=l[i]-l[j]\n maxx=s[i]\n else:\n s[i]=maxx\n return s\n\n\n\nprint(calMax(len(l),l))\n \n \n\n\n# In[ ]:\n\n\n\n" }, { "alpha_fraction": 0.5355850458145142, "alphanum_fraction": 0.5597105026245117, "avg_line_length": 16.934782028198242, "blob_id": "38f99cd16817a41991a12829dfa99ecc8b4c5298", "content_id": "1b5a66665e8fab654528e3916ed9537cd762741c", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 829, "license_type": "no_license", "max_line_length": 149, "num_lines": 46, "path": "/Dynamic Programming/5.py", "repo_name": "khaliliShoja/DynamicProgramming", "src_encoding": "UTF-8", "text": "\n# coding: utf-8\n\n# You are given n pairs of numbers. In every pair, the first number is always smaller than the second number.\n# \n# Now, we define a pair (c, d) can follow another pair (a, b) if and only if b < c. Chain of pairs can be formed in this fashion.\n# \n# Given a set of pairs, find the length longest chain which can be formed. You needn't use up all the given pairs. You can select pairs in any order.\n# \n\n# In[9]:\n\n\nl=[[1,2],[2,3],[3,4]]\n\n\n\nl.sort(key=lambda x:x[1], reverse=True)\n#print(l)\n#print(l[0][0])\n\ndef calMax(l):\n \n s=[0]*len(l)\n s[0]=1\n \n for i in range(1,len(l)):\n \n maxx=1\n for j in range(0,i):\n \n if(l[j][0]> l[i][1]):\n s[i]=s[j]+1\n #print(s[i])\n if(maxx < s[i]):\n maxx=s[i]\n s[i]=maxx\n \n return s\n \n\n\nprint(calMax(l)) \n \n\n\n# In[ ]:\n\n\n\n" }, { "alpha_fraction": 0.4893617033958435, "alphanum_fraction": 0.5517241358757019, "avg_line_length": 20.571428298950195, "blob_id": "210251641dfe892b90a46ec98cb0b5d26179a08d", "content_id": "4ad956cef3725b681b6f6a49da41efcc03aeebf4", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 1363, "license_type": "no_license", "max_line_length": 338, "num_lines": 63, "path": "/Dynamic Programming/3.py", "repo_name": "khaliliShoja/DynamicProgramming", "src_encoding": "UTF-8", "text": "\n# coding: utf-8\n\n# You are a professional robber planning to rob houses along a street. Each house has a certain amount of money stashed, the only constraint stopping you from robbing each of them is that adjacent houses have security system connected and it will automatically contact the police if two adjacent houses were broken into on the same night.\n# Given a list of non-negative integers representing the amount of money of each house, determine the maximum amount of money you can rob tonight without alerting the police.\n\n# In[1]:\n\np=[0,1,2,3,4,5]\np=[0,0,10,0,0,10]\nl=[0,-1,-1,-1,-1,-1]\n\n\ndef calMax(l, p, s):\n n=len(p)-1\n if(-1 not in l):\n #print(s[0])\n return s[0]\n \n \n \n a1=0\n a2=0\n maxx=-9\n for i in range(1,n+1):\n maxx1=-9\n\n if(l[i]==-1):\n if(i!= n and l[i-1]!=1 and l[i+1]!=1):\n l1=l[:]\n l1[i]=1\n s1=s[:]\n s1[0]=s1[0]+p[i]\n #print(s1)\n a1=calMax(l1,p,s1)\n if(i==n and l[i-1]!=1):\n l1=l[:]\n l1[i]=1\n s1=s[:]\n s1[0]=s1[0]+p[i]\n #print(s1[0])\n a1=calMax(l1,p,s1)\n #print(a1)\n \n l2=l[:]\n l2[i]=0\n s2=s[:]\n a2=calMax(l2,p,s2)\n #print(a2)\n if(a1 > a2):\n maxx1=a1\n else:\n maxx1=a2\n if(maxx1>maxx):\n return maxx1\n else:\n return maxx\n\n\nprint(calMax(l,p,[0]))\n \n\n\n# In[ ]:\n\n\n\n" } ]
9
ysakpal/FirstGitProject
https://github.com/ysakpal/FirstGitProject
81891fd5475a9d715b6ba9cb0376b64b727c4f59
29fd54acaad61bcffc2268f344e31817a964697b
345ff1faec43fb3df072e9a6a136d55525082b93
refs/heads/main
2023-07-15T21:29:14.914373
2021-08-31T07:13:35
2021-08-31T07:13:35
400,486,710
0
0
null
null
null
null
null
[ { "alpha_fraction": 0.6515151262283325, "alphanum_fraction": 0.6515151262283325, "avg_line_length": 30, "blob_id": "433a6b23b2510f532989fa04afca174a9749a359", "content_id": "e8f2fc0494705f5b28c50c2f61d6020e8410d029", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 66, "license_type": "no_license", "max_line_length": 36, "num_lines": 2, "path": "/fileyash.py", "repo_name": "ysakpal/FirstGitProject", "src_encoding": "UTF-8", "text": "\nprint(\"hello yash long time no see\")\nprint(\"is it all good \")\n\n\n\n" }, { "alpha_fraction": 0.7142857313156128, "alphanum_fraction": 0.7142857313156128, "avg_line_length": 24, "blob_id": "68e77dec8114b0188b722523c7965104240e35f8", "content_id": "1bea14b7876cde258bf695d823dc5b96b1ee5f80", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 49, "license_type": "no_license", "max_line_length": 25, "num_lines": 2, "path": "/grettings.py", "repo_name": "ysakpal/FirstGitProject", "src_encoding": "UTF-8", "text": "print(\"welcome to all\")\nprint(\"nice to meet you\")" }, { "alpha_fraction": 0.6296296119689941, "alphanum_fraction": 0.6296296119689941, "avg_line_length": 21.16666603088379, "blob_id": "e431c73171f0255add083152c067d5643be7dd60", "content_id": "306e17494eba9affd13d6f1a695ac882d6e2ad18", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 135, "license_type": "no_license", "max_line_length": 29, "num_lines": 6, "path": "/hello_world.py", "repo_name": "ysakpal/FirstGitProject", "src_encoding": "UTF-8", "text": "print(\"hello world\")\nprint(\"------------\")\nprint(\"good morning mumbai\")\n#all good as all\n#helth is welth\nprint(\"how are you doing\")\n\n\n" } ]
3
dada325/acme
https://github.com/dada325/acme
fd2ff3401cb242ab789e5a5801c0f4a4ffd64b99
546a47a0154b50145dd9ac3fb3ca57c62e69805f
072ef2dc8b5a88bbe814ec9b6a57b61683cfe0bc
refs/heads/master
2023-07-06T01:03:27.150959
2023-06-13T15:36:36
2023-06-13T15:37:14
null
0
0
null
null
null
null
null
[ { "alpha_fraction": 0.6644012331962585, "alphanum_fraction": 0.6693227291107178, "avg_line_length": 38.87850570678711, "blob_id": "40fc8d86487a2367b8e7310a177f82f2ec1a09e8", "content_id": "a5b14ea9c96397a6305f63234dbcc6da15a4b51a", "detected_licenses": [ "Apache-2.0" ], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 4267, "license_type": "permissive", "max_line_length": 80, "num_lines": 107, "path": "/acme/tf/networks/distributions.py", "repo_name": "dada325/acme", "src_encoding": "UTF-8", "text": "# Copyright 2018 DeepMind Technologies Limited. All rights reserved.\n#\n# Licensed under the Apache License, Version 2.0 (the \"License\");\n# you may not use this file except in compliance with the License.\n# You may obtain a copy of the License at\n#\n# http://www.apache.org/licenses/LICENSE-2.0\n#\n# Unless required by applicable law or agreed to in writing, software\n# distributed under the License is distributed on an \"AS IS\" BASIS,\n# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n# See the License for the specific language governing permissions and\n# limitations under the License.\n\n\"\"\"Distributions, for use in acme/networks/distributional.py.\"\"\"\n\nfrom typing import Optional\nimport tensorflow as tf\nimport tensorflow_probability as tfp\n\ntfd = tfp.distributions\n\n\[email protected]_composite_tensor\nclass DiscreteValuedDistribution(tfd.Categorical):\n \"\"\"This is a generalization of a categorical distribution.\n\n The support for the DiscreteValued distribution can be any real valued range,\n whereas the categorical distribution has support [0, n_categories - 1] or\n [1, n_categories]. This generalization allows us to take the mean of the\n distribution over its support.\n \"\"\"\n\n def __init__(self,\n values: tf.Tensor,\n logits: Optional[tf.Tensor] = None,\n probs: Optional[tf.Tensor] = None,\n name: str = 'DiscreteValuedDistribution'):\n \"\"\"Initialization.\n\n Args:\n values: Values making up support of the distribution. Should have a shape\n compatible with logits.\n logits: An N-D Tensor, N >= 1, representing the log probabilities of a set\n of Categorical distributions. The first N - 1 dimensions index into a\n batch of independent distributions and the last dimension indexes into\n the classes.\n probs: An N-D Tensor, N >= 1, representing the probabilities of a set of\n Categorical distributions. The first N - 1 dimensions index into a batch\n of independent distributions and the last dimension represents a vector\n of probabilities for each class. Only one of logits or probs should be\n passed in.\n name: Name of the distribution object.\n \"\"\"\n self._values = tf.convert_to_tensor(values)\n shape_strings = [f'D{i}' for i, _ in enumerate(values.shape)]\n\n if logits is not None:\n logits = tf.convert_to_tensor(logits)\n tf.debugging.assert_shapes([(values, shape_strings),\n (logits, [..., *shape_strings])])\n if probs is not None:\n probs = tf.convert_to_tensor(probs)\n tf.debugging.assert_shapes([(values, shape_strings),\n (probs, [..., *shape_strings])])\n\n super().__init__(logits=logits, probs=probs, name=name)\n\n self._parameters = dict(values=values,\n logits=logits,\n probs=probs,\n name=name)\n\n @property\n def values(self) -> tf.Tensor:\n return self._values\n\n @classmethod\n def _parameter_properties(cls, dtype, num_classes=None):\n return dict(\n values=tfp.util.ParameterProperties(event_ndims=None),\n logits=tfp.util.ParameterProperties(\n event_ndims=lambda self: self.values.shape.rank),\n probs=tfp.util.ParameterProperties(\n event_ndims=lambda self: self.values.shape.rank,\n is_preferred=False))\n\n def _sample_n(self, n, seed=None) -> tf.Tensor:\n indices = super()._sample_n(n, seed=seed)\n return tf.gather(self.values, indices, axis=-1)\n\n def _mean(self) -> tf.Tensor:\n \"\"\"Overrides the Categorical mean by incorporating category values.\"\"\"\n return tf.reduce_sum(self.probs_parameter() * self.values, axis=-1)\n\n def _variance(self) -> tf.Tensor:\n \"\"\"Overrides the Categorical variance by incorporating category values.\"\"\"\n dist_squared = tf.square(tf.expand_dims(self.mean(), -1) - self.values)\n return tf.reduce_sum(self.probs_parameter() * dist_squared, axis=-1)\n\n def _event_shape(self):\n # Omit the atoms axis, to return just the shape of a single (i.e. unbatched)\n # sample value.\n return self._values.shape[:-1]\n\n def _event_shape_tensor(self):\n return tf.shape(self._values)[:-1]\n" }, { "alpha_fraction": 0.6380857229232788, "alphanum_fraction": 0.6438185572624207, "avg_line_length": 39.52525329589844, "blob_id": "2250ea01ea5612ff3cd58df4c9ee82f825021eba", "content_id": "336b10509fa98dbb8cf1fd808b6751a5b53fa4a4", "detected_licenses": [ "Apache-2.0" ], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 4012, "license_type": "permissive", "max_line_length": 78, "num_lines": 99, "path": "/acme/agents/jax/crr/losses.py", "repo_name": "dada325/acme", "src_encoding": "UTF-8", "text": "# Copyright 2018 DeepMind Technologies Limited. All rights reserved.\n#\n# Licensed under the Apache License, Version 2.0 (the \"License\");\n# you may not use this file except in compliance with the License.\n# You may obtain a copy of the License at\n#\n# http://www.apache.org/licenses/LICENSE-2.0\n#\n# Unless required by applicable law or agreed to in writing, software\n# distributed under the License is distributed on an \"AS IS\" BASIS,\n# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n# See the License for the specific language governing permissions and\n# limitations under the License.\n\n\"\"\"Loss (weight) functions for CRR.\"\"\"\n\nfrom typing import Callable\n\nfrom acme import types\nfrom acme.agents.jax.crr.networks import CRRNetworks\nfrom acme.jax import networks as networks_lib\nimport jax.numpy as jnp\n\nPolicyLossCoeff = Callable[[\n CRRNetworks,\n networks_lib.Params,\n networks_lib.Params,\n types.Transition,\n networks_lib.PRNGKey,\n], jnp.ndarray]\n\n\ndef _compute_advantage(networks: CRRNetworks,\n policy_params: networks_lib.Params,\n critic_params: networks_lib.Params,\n transition: types.Transition,\n key: networks_lib.PRNGKey,\n num_action_samples: int = 4) -> jnp.ndarray:\n \"\"\"Returns the advantage for the transition.\"\"\"\n # Sample count actions.\n replicated_observation = jnp.broadcast_to(transition.observation,\n (num_action_samples,) +\n transition.observation.shape)\n dist_params = networks.policy_network.apply(policy_params,\n replicated_observation)\n actions = networks.sample(dist_params, key)\n # Compute the state-action values for the sampled actions.\n q_actions = networks.critic_network.apply(critic_params,\n replicated_observation, actions)\n # Take the mean as the state-value estimate. It is also possible to take the\n # maximum, aka CRR(max); see table 1 in CRR paper.\n q_estimate = jnp.mean(q_actions, axis=0)\n # Compute the advantage.\n q = networks.critic_network.apply(critic_params, transition.observation,\n transition.action)\n return q - q_estimate\n\n\ndef policy_loss_coeff_advantage_exp(\n networks: CRRNetworks,\n policy_params: networks_lib.Params,\n critic_params: networks_lib.Params,\n transition: types.Transition,\n key: networks_lib.PRNGKey,\n num_action_samples: int = 4,\n beta: float = 1.0,\n ratio_upper_bound: float = 20.0) -> jnp.ndarray:\n \"\"\"Exponential advantage weigting; see equation (4) in CRR paper.\"\"\"\n advantage = _compute_advantage(networks, policy_params, critic_params,\n transition, key, num_action_samples)\n return jnp.minimum(jnp.exp(advantage / beta), ratio_upper_bound)\n\n\ndef policy_loss_coeff_advantage_indicator(\n networks: CRRNetworks,\n policy_params: networks_lib.Params,\n critic_params: networks_lib.Params,\n transition: types.Transition,\n key: networks_lib.PRNGKey,\n num_action_samples: int = 4) -> jnp.ndarray:\n \"\"\"Indicator advantage weighting; see equation (3) in CRR paper.\"\"\"\n advantage = _compute_advantage(networks, policy_params, critic_params,\n transition, key, num_action_samples)\n return jnp.heaviside(advantage, 0.)\n\n\ndef policy_loss_coeff_constant(networks: CRRNetworks,\n policy_params: networks_lib.Params,\n critic_params: networks_lib.Params,\n transition: types.Transition,\n key: networks_lib.PRNGKey,\n value: float = 1.0) -> jnp.ndarray:\n \"\"\"Constant weights.\"\"\"\n del networks\n del policy_params\n del critic_params\n del transition\n del key\n return value # pytype: disable=bad-return-type # jax-ndarray\n" }, { "alpha_fraction": 0.7041089534759521, "alphanum_fraction": 0.7083519697189331, "avg_line_length": 36.630252838134766, "blob_id": "f1ff765d78556c571a35a73405c95c9ecf88a4ec", "content_id": "772bb1fb6efdb83c56d2638a21c7da7b087ffaea", "detected_licenses": [ "Apache-2.0" ], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 4478, "license_type": "permissive", "max_line_length": 82, "num_lines": 119, "path": "/acme/jax/experiments/run_offline_experiment.py", "repo_name": "dada325/acme", "src_encoding": "UTF-8", "text": "# Copyright 2018 DeepMind Technologies Limited. All rights reserved.\n#\n# Licensed under the Apache License, Version 2.0 (the \"License\");\n# you may not use this file except in compliance with the License.\n# You may obtain a copy of the License at\n#\n# http://www.apache.org/licenses/LICENSE-2.0\n#\n# Unless required by applicable law or agreed to in writing, software\n# distributed under the License is distributed on an \"AS IS\" BASIS,\n# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n# See the License for the specific language governing permissions and\n# limitations under the License.\n\n\"\"\"Runner used for executing local offline RL agents.\"\"\"\n\nimport acme\nfrom acme import specs\nfrom acme.jax.experiments import config\nfrom acme.tf import savers\nfrom acme.utils import counting\nimport jax\n\n\ndef run_offline_experiment(experiment: config.OfflineExperimentConfig,\n eval_every: int = 100,\n num_eval_episodes: int = 1):\n \"\"\"Runs a simple, single-threaded training loop using the default evaluators.\n\n It targets simplicity of the code and so only the basic features of the\n OfflineExperimentConfig are supported.\n\n Arguments:\n experiment: Definition and configuration of the agent to run.\n eval_every: After how many learner steps to perform evaluation.\n num_eval_episodes: How many evaluation episodes to execute at each\n evaluation step.\n \"\"\"\n\n key = jax.random.PRNGKey(experiment.seed)\n\n # Create the environment and get its spec.\n environment = experiment.environment_factory(experiment.seed)\n environment_spec = experiment.environment_spec or specs.make_environment_spec(\n environment)\n\n # Create the networks and policy.\n networks = experiment.network_factory(environment_spec)\n\n # Parent counter allows to share step counts between train and eval loops and\n # the learner, so that it is possible to plot for example evaluator's return\n # value as a function of the number of training episodes.\n parent_counter = counting.Counter(time_delta=0.)\n\n # Create the demonstrations dataset.\n dataset_key, key = jax.random.split(key)\n dataset = experiment.demonstration_dataset_factory(dataset_key)\n\n # Create the learner.\n learner_key, key = jax.random.split(key)\n learner = experiment.builder.make_learner(\n random_key=learner_key,\n networks=networks,\n dataset=dataset,\n logger_fn=experiment.logger_factory,\n environment_spec=environment_spec,\n counter=counting.Counter(parent_counter, prefix='learner', time_delta=0.))\n\n # Define the evaluation loop.\n eval_loop = None\n if num_eval_episodes > 0:\n # Create the evaluation actor and loop.\n eval_counter = counting.Counter(\n parent_counter, prefix='evaluator', time_delta=0.)\n eval_logger = experiment.logger_factory('evaluator',\n eval_counter.get_steps_key(), 0)\n eval_key, key = jax.random.split(key)\n eval_actor = experiment.builder.make_actor(\n random_key=eval_key,\n policy=experiment.builder.make_policy(networks, environment_spec, True),\n environment_spec=environment_spec,\n variable_source=learner)\n eval_loop = acme.EnvironmentLoop(\n environment,\n eval_actor,\n counter=eval_counter,\n logger=eval_logger,\n observers=experiment.observers)\n\n checkpointer = None\n if experiment.checkpointing is not None:\n checkpointing = experiment.checkpointing\n checkpointer = savers.Checkpointer(\n objects_to_save={'learner': learner, 'counter': parent_counter},\n time_delta_minutes=checkpointing.time_delta_minutes,\n directory=checkpointing.directory,\n add_uid=checkpointing.add_uid,\n max_to_keep=checkpointing.max_to_keep,\n keep_checkpoint_every_n_hours=checkpointing.keep_checkpoint_every_n_hours,\n checkpoint_ttl_seconds=checkpointing.checkpoint_ttl_seconds,\n )\n\n max_num_learner_steps = (\n experiment.max_num_learner_steps -\n parent_counter.get_counts().get('learner_steps', 0))\n\n # Run the training loop.\n if eval_loop:\n eval_loop.run(num_eval_episodes)\n steps = 0\n while steps < max_num_learner_steps:\n learner_steps = min(eval_every, max_num_learner_steps - steps)\n for _ in range(learner_steps):\n learner.step()\n if checkpointer is not None:\n checkpointer.save()\n if eval_loop:\n eval_loop.run(num_eval_episodes)\n steps += learner_steps\n" }, { "alpha_fraction": 0.6706500053405762, "alphanum_fraction": 0.6766250133514404, "avg_line_length": 39.610294342041016, "blob_id": "4626c41d0a25718f0633874edff2db7489268656", "content_id": "e359a39f77fc49d33335f82881220ab84f267f88", "detected_licenses": [ "Apache-2.0" ], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 5523, "license_type": "permissive", "max_line_length": 94, "num_lines": 136, "path": "/acme/agents/agent.py", "repo_name": "dada325/acme", "src_encoding": "UTF-8", "text": "# Copyright 2018 DeepMind Technologies Limited. All rights reserved.\n#\n# Licensed under the Apache License, Version 2.0 (the \"License\");\n# you may not use this file except in compliance with the License.\n# You may obtain a copy of the License at\n#\n# http://www.apache.org/licenses/LICENSE-2.0\n#\n# Unless required by applicable law or agreed to in writing, software\n# distributed under the License is distributed on an \"AS IS\" BASIS,\n# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n# See the License for the specific language governing permissions and\n# limitations under the License.\n\n\"\"\"The base agent interface.\"\"\"\n\nimport math\nfrom typing import List, Optional, Sequence\n\nfrom acme import core\nfrom acme import types\nimport dm_env\nimport numpy as np\nimport reverb\n\n\ndef _calculate_num_learner_steps(num_observations: int,\n min_observations: int,\n observations_per_step: float) -> int:\n \"\"\"Calculates the number of learner steps to do at step=num_observations.\"\"\"\n n = num_observations - min_observations\n if n < 0:\n # Do not do any learner steps until you have seen min_observations.\n return 0\n if observations_per_step > 1:\n # One batch every 1/obs_per_step observations, otherwise zero.\n return int(n % int(observations_per_step) == 0)\n else:\n # Always return 1/obs_per_step batches every observation.\n return int(1 / observations_per_step)\n\n\nclass Agent(core.Actor, core.VariableSource):\n \"\"\"Agent class which combines acting and learning.\n\n This provides an implementation of the `Actor` interface which acts and\n learns. It takes as input instances of both `acme.Actor` and `acme.Learner`\n classes, and implements the policy, observation, and update methods which\n defer to the underlying actor and learner.\n\n The only real logic implemented by this class is that it controls the number\n of observations to make before running a learner step. This is done by\n passing the number of `min_observations` to use and a ratio of\n `observations_per_step` := num_actor_actions / num_learner_steps.\n\n Note that the number of `observations_per_step` can also be in the range[0, 1]\n in order to allow the agent to take more than 1 learner step per action.\n \"\"\"\n\n def __init__(self, actor: core.Actor, learner: core.Learner,\n min_observations: Optional[int] = None,\n observations_per_step: Optional[float] = None,\n iterator: Optional[core.PrefetchingIterator] = None,\n replay_tables: Optional[List[reverb.Table]] = None):\n self._actor = actor\n self._learner = learner\n self._min_observations = min_observations\n self._observations_per_step = observations_per_step\n self._num_observations = 0\n self._iterator = iterator\n self._replay_tables = replay_tables\n self._batch_size_upper_bounds = [1_000_000_000] * len(\n replay_tables) if replay_tables else None\n\n def select_action(self, observation: types.NestedArray) -> types.NestedArray:\n return self._actor.select_action(observation)\n\n def observe_first(self, timestep: dm_env.TimeStep):\n self._actor.observe_first(timestep)\n\n def observe(self, action: types.NestedArray, next_timestep: dm_env.TimeStep):\n self._num_observations += 1\n self._actor.observe(action, next_timestep)\n\n def _has_data_for_training(self):\n if self._iterator.ready():\n return True\n for (table, batch_size) in zip(self._replay_tables,\n self._batch_size_upper_bounds):\n if not table.can_sample(batch_size):\n return False\n return True\n\n def update(self): # pytype: disable=signature-mismatch # overriding-parameter-count-checks\n if self._iterator:\n # Perform learner steps as long as iterator has data.\n update_actor = False\n while self._has_data_for_training():\n # Run learner steps (usually means gradient steps).\n total_batches = self._iterator.retrieved_elements()\n self._learner.step()\n current_batches = self._iterator.retrieved_elements() - total_batches\n assert current_batches == 1, (\n 'Learner step must retrieve exactly one element from the iterator'\n f' (retrieved {current_batches}). Otherwise agent can deadlock. '\n 'Example cause is that your chosen agent'\n 's Builder has a '\n '`make_learner` factory that prefetches the data but it '\n 'shouldn'\n 't.')\n self._batch_size_upper_bounds = [\n math.ceil(t.info.rate_limiter_info.sample_stats.completed /\n (total_batches + 1)) for t in self._replay_tables\n ]\n update_actor = True\n if update_actor:\n # Update the actor weights only when learner was updated.\n self._actor.update()\n return\n\n # If dataset is not provided, follback to the old logic.\n # TODO(stanczyk): Remove when not used.\n num_steps = _calculate_num_learner_steps(\n num_observations=self._num_observations,\n min_observations=self._min_observations,\n observations_per_step=self._observations_per_step,\n )\n for _ in range(num_steps):\n # Run learner steps (usually means gradient steps).\n self._learner.step()\n if num_steps > 0:\n # Update the actor weights when learner updates.\n self._actor.update()\n\n def get_variables(self, names: Sequence[str]) -> List[List[np.ndarray]]:\n return self._learner.get_variables(names)\n" }, { "alpha_fraction": 0.7112725377082825, "alphanum_fraction": 0.7178345322608948, "avg_line_length": 36.42982482910156, "blob_id": "63e1ffc5da628547fccca31d0773db690c681af3", "content_id": "13995077e2c382363b10fdc73b4246d4d11c08f7", "detected_licenses": [ "Apache-2.0" ], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 4267, "license_type": "permissive", "max_line_length": 119, "num_lines": 114, "path": "/acme/agents/jax/dqn/actor.py", "repo_name": "dada325/acme", "src_encoding": "UTF-8", "text": "# Copyright 2018 DeepMind Technologies Limited. All rights reserved.\n#\n# Licensed under the Apache License, Version 2.0 (the \"License\");\n# you may not use this file except in compliance with the License.\n# You may obtain a copy of the License at\n#\n# http://www.apache.org/licenses/LICENSE-2.0\n#\n# Unless required by applicable law or agreed to in writing, software\n# distributed under the License is distributed on an \"AS IS\" BASIS,\n# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n# See the License for the specific language governing permissions and\n# limitations under the License.\n\n\"\"\"DQN actor helpers.\"\"\"\n\nfrom typing import Callable, Sequence\n\nfrom acme.agents.jax import actor_core as actor_core_lib\nfrom acme.agents.jax.dqn import networks as dqn_networks\nfrom acme.jax import networks as networks_lib\nfrom acme.jax import utils\nimport chex\nimport jax\nimport jax.numpy as jnp\n\n\nEpsilon = float\nEpsilonPolicy = Callable[[\n networks_lib.Params, networks_lib.PRNGKey, networks_lib\n .Observation, Epsilon\n], networks_lib.Action]\n\n\[email protected](frozen=True, mappable_dataclass=False)\nclass EpsilonActorState:\n rng: networks_lib.PRNGKey\n epsilon: jnp.ndarray\n\n\nDQNPolicy = actor_core_lib.ActorCore[EpsilonActorState, None]\n\n\ndef alternating_epsilons_actor_core(policy_network: EpsilonPolicy,\n epsilons: Sequence[float]) -> DQNPolicy:\n \"\"\"Returns actor components for alternating epsilon exploration.\n\n Args:\n policy_network: A feedforward action selecting function.\n epsilons: epsilons to alternate per-episode for epsilon-greedy exploration.\n\n Returns:\n A feedforward policy.\n \"\"\"\n epsilons = jnp.array(epsilons)\n\n def apply_and_sample(params: networks_lib.Params,\n observation: networks_lib.Observation,\n state: EpsilonActorState):\n random_key, key = jax.random.split(state.rng)\n actions = policy_network(params, key, observation, state.epsilon) # pytype: disable=wrong-arg-types # jax-ndarray\n return (actions.astype(jnp.int32),\n EpsilonActorState(rng=random_key, epsilon=state.epsilon))\n\n def policy_init(random_key: networks_lib.PRNGKey):\n random_key, key = jax.random.split(random_key)\n epsilon = jax.random.choice(key, epsilons)\n return EpsilonActorState(rng=random_key, epsilon=epsilon)\n\n return actor_core_lib.ActorCore(\n init=policy_init, select_action=apply_and_sample,\n get_extras=lambda _: None)\n\n\ndef behavior_policy(networks: dqn_networks.DQNNetworks) -> EpsilonPolicy:\n \"\"\"A policy with parameterized epsilon-greedy exploration.\"\"\"\n\n def apply_and_sample(params: networks_lib.Params, key: networks_lib.PRNGKey,\n observation: networks_lib.Observation, epsilon: Epsilon\n ) -> networks_lib.Action:\n # TODO(b/161332815): Make JAX Actor work with batched or unbatched inputs.\n observation = utils.add_batch_dim(observation)\n action_values = networks.policy_network.apply(\n params, observation, is_training=False)\n action_values = utils.squeeze_batch_dim(action_values)\n return networks.sample_fn(action_values, key, epsilon)\n\n return apply_and_sample\n\n\ndef default_behavior_policy(networks: dqn_networks.DQNNetworks,\n epsilon: Epsilon) -> EpsilonPolicy:\n \"\"\"A policy with a fixed-epsilon epsilon-greedy exploration.\n\n DEPRECATED: use behavior_policy instead.\n Args:\n networks: DQN networks\n epsilon: sampling parameter that overrides the one in EpsilonPolicy\n Returns:\n epsilon-greedy behavior policy with fixed epsilon\n \"\"\"\n # TODO(lukstafi): remove this function and migrate its users.\n\n def apply_and_sample(params: networks_lib.Params, key: networks_lib.PRNGKey,\n observation: networks_lib.Observation, _: Epsilon\n ) -> networks_lib.Action:\n # TODO(b/161332815): Make JAX Actor work with batched or unbatched inputs.\n observation = utils.add_batch_dim(observation)\n action_values = networks.policy_network.apply(\n params, observation, is_training=False)\n action_values = utils.squeeze_batch_dim(action_values)\n return networks.sample_fn(action_values, key, epsilon)\n\n return apply_and_sample\n" }, { "alpha_fraction": 0.6717513203620911, "alphanum_fraction": 0.6739851832389832, "avg_line_length": 37.78217697143555, "blob_id": "0a3b5384f144e5a6eab9cd2f531bff31d50a9870", "content_id": "d90022fb05064690e33d3a2273c80393865b500e", "detected_licenses": [ "Apache-2.0" ], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 15668, "license_type": "permissive", "max_line_length": 86, "num_lines": 404, "path": "/acme/jax/experiments/make_distributed_experiment.py", "repo_name": "dada325/acme", "src_encoding": "UTF-8", "text": "# Copyright 2018 DeepMind Technologies Limited. All rights reserved.\n#\n# Licensed under the Apache License, Version 2.0 (the \"License\");\n# you may not use this file except in compliance with the License.\n# You may obtain a copy of the License at\n#\n# http://www.apache.org/licenses/LICENSE-2.0\n#\n# Unless required by applicable law or agreed to in writing, software\n# distributed under the License is distributed on an \"AS IS\" BASIS,\n# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n# See the License for the specific language governing permissions and\n# limitations under the License.\n\n\"\"\"Program definition for a distributed layout based on a builder.\"\"\"\n\nimport itertools\nimport math\nfrom typing import Any, List, Optional\n\nfrom acme import core\nfrom acme import environment_loop\nfrom acme import specs\nfrom acme.agents.jax import actor_core\nfrom acme.agents.jax import builders\nfrom acme.jax import inference_server as inference_server_lib\nfrom acme.jax import networks as networks_lib\nfrom acme.jax import savers\nfrom acme.jax import utils\nfrom acme.jax import variable_utils\nfrom acme.jax.experiments import config\nfrom acme.jax import snapshotter\nfrom acme.utils import counting\nfrom acme.utils import lp_utils\nimport jax\nimport launchpad as lp\nimport reverb\n\nActorId = int\nInferenceServer = inference_server_lib.InferenceServer[\n actor_core.SelectActionFn]\n\n\n\n\ndef make_distributed_experiment(\n experiment: config.ExperimentConfig[builders.Networks, Any, Any],\n num_actors: int,\n *,\n inference_server_config: Optional[\n inference_server_lib.InferenceServerConfig\n ] = None,\n num_learner_nodes: int = 1,\n num_actors_per_node: int = 1,\n num_inference_servers: int = 1,\n multiprocessing_colocate_actors: bool = False,\n multithreading_colocate_learner_and_reverb: bool = False,\n make_snapshot_models: Optional[\n config.SnapshotModelFactory[builders.Networks]\n ] = None,\n name: str = 'agent',\n program: Optional[lp.Program] = None,\n) -> lp.Program:\n \"\"\"Builds a Launchpad program for running the experiment.\n\n Args:\n experiment: configuration of the experiment.\n num_actors: number of actors to run.\n inference_server_config: If provided we will attempt to use\n `num_inference_servers` inference servers for selecting actions.\n There are two assumptions if this config is provided:\n 1) The experiment's policy is an `ActorCore` and a\n `TypeError` will be raised if not.\n 2) The `ActorCore`'s `select_action` method runs on\n unbatched observations.\n num_learner_nodes: number of learner nodes to run. When using multiple\n learner nodes, make sure the learner class does the appropriate pmap/pmean\n operations on the loss/gradients, respectively.\n num_actors_per_node: number of actors per one program node. Actors within\n one node are colocated in one or multiple processes depending on the value\n of multiprocessing_colocate_actors.\n num_inference_servers: number of inference servers to serve actors. (Only\n used if `inference_server_config` is provided.)\n multiprocessing_colocate_actors: whether to colocate actor nodes as\n subprocesses on a single machine. False by default, which means colocate\n within a single process.\n multithreading_colocate_learner_and_reverb: whether to colocate the learner\n and reverb nodes in one process. Not supported if the learner is spread\n across multiple nodes (num_learner_nodes > 1). False by default, which\n means no colocation.\n make_snapshot_models: a factory that defines what is saved in snapshots.\n name: name of the constructed program. Ignored if an existing program is\n passed.\n program: a program where agent nodes are added to. If None, a new program is\n created.\n\n Returns:\n The Launchpad program with all the nodes needed for running the experiment.\n \"\"\"\n\n if multithreading_colocate_learner_and_reverb and num_learner_nodes > 1:\n raise ValueError(\n 'Replay and learner colocation is not yet supported when the learner is'\n ' spread across multiple nodes (num_learner_nodes > 1). Please contact'\n ' Acme devs if this is a feature you want. Got:'\n '\\tmultithreading_colocate_learner_and_reverb='\n f'{multithreading_colocate_learner_and_reverb}'\n f'\\tnum_learner_nodes={num_learner_nodes}.')\n\n\n def build_replay():\n \"\"\"The replay storage.\"\"\"\n dummy_seed = 1\n spec = (\n experiment.environment_spec or\n specs.make_environment_spec(experiment.environment_factory(dummy_seed)))\n network = experiment.network_factory(spec)\n policy = config.make_policy(\n experiment=experiment,\n networks=network,\n environment_spec=spec,\n evaluation=False)\n return experiment.builder.make_replay_tables(spec, policy)\n\n def build_model_saver(variable_source: core.VariableSource):\n assert experiment.checkpointing\n environment = experiment.environment_factory(0)\n spec = specs.make_environment_spec(environment)\n networks = experiment.network_factory(spec)\n models = make_snapshot_models(networks, spec)\n # TODO(raveman): Decouple checkpointing and snapshotting configs.\n return snapshotter.JAXSnapshotter(\n variable_source=variable_source,\n models=models,\n path=experiment.checkpointing.directory,\n subdirectory='snapshots',\n add_uid=experiment.checkpointing.add_uid)\n\n def build_counter():\n counter = counting.Counter()\n if experiment.checkpointing:\n checkpointing = experiment.checkpointing\n counter = savers.CheckpointingRunner(\n counter,\n key='counter',\n subdirectory='counter',\n time_delta_minutes=checkpointing.time_delta_minutes,\n directory=checkpointing.directory,\n add_uid=checkpointing.add_uid,\n max_to_keep=checkpointing.max_to_keep,\n keep_checkpoint_every_n_hours=checkpointing.keep_checkpoint_every_n_hours,\n checkpoint_ttl_seconds=checkpointing.checkpoint_ttl_seconds,\n )\n return counter\n\n def build_learner(\n random_key: networks_lib.PRNGKey,\n replay: reverb.Client,\n counter: Optional[counting.Counter] = None,\n primary_learner: Optional[core.Learner] = None,\n ):\n \"\"\"The Learning part of the agent.\"\"\"\n\n dummy_seed = 1\n spec = (\n experiment.environment_spec or\n specs.make_environment_spec(experiment.environment_factory(dummy_seed)))\n\n # Creates the networks to optimize (online) and target networks.\n networks = experiment.network_factory(spec)\n\n iterator = experiment.builder.make_dataset_iterator(replay)\n # make_dataset_iterator is responsible for putting data onto appropriate\n # training devices, so here we apply prefetch, so that data is copied over\n # in the background.\n iterator = utils.prefetch(iterable=iterator, buffer_size=1)\n counter = counting.Counter(counter, 'learner')\n learner = experiment.builder.make_learner(random_key, networks, iterator,\n experiment.logger_factory, spec,\n replay, counter)\n\n if experiment.checkpointing:\n if primary_learner is None:\n checkpointing = experiment.checkpointing\n learner = savers.CheckpointingRunner(\n learner,\n key='learner',\n subdirectory='learner',\n time_delta_minutes=5,\n directory=checkpointing.directory,\n add_uid=checkpointing.add_uid,\n max_to_keep=checkpointing.max_to_keep,\n keep_checkpoint_every_n_hours=checkpointing.keep_checkpoint_every_n_hours,\n checkpoint_ttl_seconds=checkpointing.checkpoint_ttl_seconds,\n )\n else:\n learner.restore(primary_learner.save())\n # NOTE: This initially synchronizes secondary learner states with the\n # primary one. Further synchronization should be handled by the learner\n # properly doing a pmap/pmean on the loss/gradients, respectively.\n\n return learner\n\n def build_inference_server(\n inference_server_config: inference_server_lib.InferenceServerConfig,\n variable_source: core.VariableSource,\n ) -> InferenceServer:\n \"\"\"Builds an inference server for `ActorCore` policies.\"\"\"\n dummy_seed = 1\n spec = (\n experiment.environment_spec or\n specs.make_environment_spec(experiment.environment_factory(dummy_seed)))\n networks = experiment.network_factory(spec)\n policy = config.make_policy(\n experiment=experiment,\n networks=networks,\n environment_spec=spec,\n evaluation=False,\n )\n if not isinstance(policy, actor_core.ActorCore):\n raise TypeError(\n f'Using InferenceServer with policy of unsupported type:'\n f'{type(policy)}. InferenceServer only supports `ActorCore` policies.'\n )\n\n return InferenceServer(\n handler=jax.jit(\n jax.vmap(\n policy.select_action,\n in_axes=(None, 0, 0),\n # Note on in_axes: Params will not be batched. Only the\n # observations and actor state will be stacked along a new\n # leading axis by the inference server.\n ),),\n variable_source=variable_source,\n devices=jax.local_devices(),\n config=inference_server_config,\n )\n\n def build_actor(\n random_key: networks_lib.PRNGKey,\n replay: reverb.Client,\n variable_source: core.VariableSource,\n counter: counting.Counter,\n actor_id: ActorId,\n inference_server: Optional[InferenceServer],\n ) -> environment_loop.EnvironmentLoop:\n \"\"\"The actor process.\"\"\"\n environment_key, actor_key = jax.random.split(random_key)\n # Create environment and policy core.\n\n # Environments normally require uint32 as a seed.\n environment = experiment.environment_factory(\n utils.sample_uint32(environment_key))\n environment_spec = specs.make_environment_spec(environment)\n\n networks = experiment.network_factory(environment_spec)\n policy_network = config.make_policy(\n experiment=experiment,\n networks=networks,\n environment_spec=environment_spec,\n evaluation=False)\n if inference_server is not None:\n policy_network = actor_core.ActorCore(\n init=policy_network.init,\n select_action=inference_server.handler,\n get_extras=policy_network.get_extras,\n )\n variable_source = variable_utils.ReferenceVariableSource()\n\n adder = experiment.builder.make_adder(replay, environment_spec,\n policy_network)\n actor = experiment.builder.make_actor(actor_key, policy_network,\n environment_spec, variable_source,\n adder)\n\n # Create logger and counter.\n counter = counting.Counter(counter, 'actor')\n logger = experiment.logger_factory('actor', counter.get_steps_key(),\n actor_id)\n # Create the loop to connect environment and agent.\n return environment_loop.EnvironmentLoop(\n environment, actor, counter, logger, observers=experiment.observers)\n\n if not program:\n program = lp.Program(name=name)\n\n key = jax.random.PRNGKey(experiment.seed)\n\n checkpoint_time_delta_minutes: Optional[int] = (\n experiment.checkpointing.replay_checkpointing_time_delta_minutes\n if experiment.checkpointing else None)\n replay_node = lp.ReverbNode(\n build_replay, checkpoint_time_delta_minutes=checkpoint_time_delta_minutes)\n replay = replay_node.create_handle()\n\n counter = program.add_node(lp.CourierNode(build_counter), label='counter')\n\n if experiment.max_num_actor_steps is not None:\n program.add_node(\n lp.CourierNode(lp_utils.StepsLimiter, counter,\n experiment.max_num_actor_steps),\n label='counter')\n\n learner_key, key = jax.random.split(key)\n learner_node = lp.CourierNode(build_learner, learner_key, replay, counter)\n learner = learner_node.create_handle()\n variable_sources = [learner]\n\n if multithreading_colocate_learner_and_reverb:\n program.add_node(\n lp.MultiThreadingColocation([learner_node, replay_node]),\n label='learner')\n else:\n program.add_node(replay_node, label='replay')\n\n with program.group('learner'):\n program.add_node(learner_node)\n\n # Maybe create secondary learners, necessary when using multi-host\n # accelerators.\n # Warning! If you set num_learner_nodes > 1, make sure the learner class\n # does the appropriate pmap/pmean operations on the loss/gradients,\n # respectively.\n for _ in range(1, num_learner_nodes):\n learner_key, key = jax.random.split(key)\n variable_sources.append(\n program.add_node(\n lp.CourierNode(\n build_learner, learner_key, replay,\n primary_learner=learner)))\n # NOTE: Secondary learners are used to load-balance get_variables calls,\n # which is why they get added to the list of available variable sources.\n # NOTE: Only the primary learner checkpoints.\n # NOTE: Do not pass the counter to the secondary learners to avoid\n # double counting of learner steps.\n\n if inference_server_config is not None:\n num_actors_per_server = math.ceil(num_actors / num_inference_servers)\n with program.group('inference_server'):\n inference_nodes = []\n for _ in range(num_inference_servers):\n inference_nodes.append(\n program.add_node(\n lp.CourierNode(\n build_inference_server,\n inference_server_config,\n learner,\n courier_kwargs={'thread_pool_size': num_actors_per_server\n })))\n else:\n num_inference_servers = 1\n inference_nodes = [None]\n\n num_actor_nodes, remainder = divmod(num_actors, num_actors_per_node)\n num_actor_nodes += int(remainder > 0)\n\n\n with program.group('actor'):\n # Create all actor threads.\n *actor_keys, key = jax.random.split(key, num_actors + 1)\n\n # Create (maybe colocated) actor nodes.\n for node_id, variable_source, inference_node in zip(\n range(num_actor_nodes),\n itertools.cycle(variable_sources),\n itertools.cycle(inference_nodes),\n ):\n colocation_nodes = []\n\n first_actor_id = node_id * num_actors_per_node\n for actor_id in range(\n first_actor_id, min(first_actor_id + num_actors_per_node, num_actors)\n ):\n actor = lp.CourierNode(\n build_actor,\n actor_keys[actor_id],\n replay,\n variable_source,\n counter,\n actor_id,\n inference_node,\n )\n colocation_nodes.append(actor)\n\n if len(colocation_nodes) == 1:\n program.add_node(colocation_nodes[0])\n elif multiprocessing_colocate_actors:\n program.add_node(lp.MultiProcessingColocation(colocation_nodes))\n else:\n program.add_node(lp.MultiThreadingColocation(colocation_nodes))\n\n for evaluator in experiment.get_evaluator_factories():\n evaluator_key, key = jax.random.split(key)\n program.add_node(\n lp.CourierNode(evaluator, evaluator_key, learner, counter,\n experiment.builder.make_actor),\n label='evaluator')\n\n if make_snapshot_models and experiment.checkpointing:\n program.add_node(\n lp.CourierNode(build_model_saver, learner), label='model_saver')\n\n return program\n" }, { "alpha_fraction": 0.6491833925247192, "alphanum_fraction": 0.6642587780952454, "avg_line_length": 35.59770202636719, "blob_id": "df8da2e474a965699752b59cad8b3e8f7bb3e6ae", "content_id": "26c769fac03752682e7daa109572acf24164c2d4", "detected_licenses": [ "Apache-2.0" ], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 12736, "license_type": "permissive", "max_line_length": 102, "num_lines": 348, "path": "/acme/agents/jax/dqn/losses.py", "repo_name": "dada325/acme", "src_encoding": "UTF-8", "text": "# Copyright 2018 DeepMind Technologies Limited. All rights reserved.\n#\n# Licensed under the Apache License, Version 2.0 (the \"License\");\n# you may not use this file except in compliance with the License.\n# You may obtain a copy of the License at\n#\n# http://www.apache.org/licenses/LICENSE-2.0\n#\n# Unless required by applicable law or agreed to in writing, software\n# distributed under the License is distributed on an \"AS IS\" BASIS,\n# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n# See the License for the specific language governing permissions and\n# limitations under the License.\n\n\"\"\"DQN losses.\"\"\"\nimport dataclasses\nfrom typing import Tuple\n\nfrom acme import types\nfrom acme.agents.jax.dqn import learning_lib\nfrom acme.jax import networks as networks_lib\nimport chex\nimport jax\nimport jax.numpy as jnp\nimport reverb\nimport rlax\n\n\[email protected]\nclass PrioritizedDoubleQLearning(learning_lib.LossFn):\n \"\"\"Clipped double q learning with prioritization on TD error.\"\"\"\n discount: float = 0.99\n importance_sampling_exponent: float = 0.2\n max_abs_reward: float = 1.\n huber_loss_parameter: float = 1.\n\n def __call__(\n self,\n network: networks_lib.TypedFeedForwardNetwork,\n params: networks_lib.Params,\n target_params: networks_lib.Params,\n batch: reverb.ReplaySample,\n key: networks_lib.PRNGKey,\n ) -> Tuple[jax.Array, learning_lib.LossExtra]:\n \"\"\"Calculate a loss on a single batch of data.\"\"\"\n transitions: types.Transition = batch.data\n probs = batch.info.probability\n\n # Forward pass.\n key1, key2, key3 = jax.random.split(key, 3)\n q_tm1 = network.apply(\n params, transitions.observation, is_training=True, key=key1)\n q_t_value = network.apply(\n target_params, transitions.next_observation, is_training=True, key=key2)\n q_t_selector = network.apply(\n params, transitions.next_observation, is_training=True, key=key3)\n\n # Cast and clip rewards.\n d_t = (transitions.discount * self.discount).astype(jnp.float32)\n r_t = jnp.clip(transitions.reward, -self.max_abs_reward,\n self.max_abs_reward).astype(jnp.float32)\n\n # Compute double Q-learning n-step TD-error.\n batch_error = jax.vmap(rlax.double_q_learning)\n td_error = batch_error(q_tm1, transitions.action, r_t, d_t, q_t_value,\n q_t_selector)\n batch_loss = rlax.huber_loss(td_error, self.huber_loss_parameter)\n\n # Importance weighting.\n importance_weights = (1. / probs).astype(jnp.float32)\n importance_weights **= self.importance_sampling_exponent\n importance_weights /= jnp.max(importance_weights)\n\n # Reweight.\n loss = jnp.mean(importance_weights * batch_loss) # []\n extra = learning_lib.LossExtra(\n metrics={}, reverb_priorities=jnp.abs(td_error).astype(jnp.float64))\n return loss, extra\n\n\[email protected]\nclass QrDqn(learning_lib.LossFn):\n \"\"\"Quantile Regression DQN.\n\n https://arxiv.org/abs/1710.10044\n \"\"\"\n num_atoms: int = 51\n huber_param: float = 1.0\n\n def __call__(\n self,\n network: networks_lib.TypedFeedForwardNetwork,\n params: networks_lib.Params,\n target_params: networks_lib.Params,\n batch: reverb.ReplaySample,\n key: networks_lib.PRNGKey,\n ) -> Tuple[jax.Array, learning_lib.LossExtra]:\n \"\"\"Calculate a loss on a single batch of data.\"\"\"\n transitions: types.Transition = batch.data\n key1, key2 = jax.random.split(key)\n _, dist_q_tm1 = network.apply(\n params, transitions.observation, is_training=True, key=key1)\n _, dist_q_target_t = network.apply(\n target_params, transitions.next_observation, is_training=True, key=key2)\n batch_size = len(transitions.observation)\n chex.assert_shape(\n dist_q_tm1, (\n batch_size,\n None,\n self.num_atoms,\n ),\n custom_message=f'Expected (batch_size, num_actions, num_atoms), got: {dist_q_tm1.shape}',\n include_default_message=True)\n chex.assert_shape(\n dist_q_target_t, (\n batch_size,\n None,\n self.num_atoms,\n ),\n custom_message=f'Expected (batch_size, num_actions, num_atoms), got: {dist_q_target_t.shape}',\n include_default_message=True)\n # Swap distribution and action dimension, since\n # rlax.quantile_q_learning expects it that way.\n dist_q_tm1 = jnp.swapaxes(dist_q_tm1, 1, 2)\n dist_q_target_t = jnp.swapaxes(dist_q_target_t, 1, 2)\n quantiles = (\n (jnp.arange(self.num_atoms, dtype=jnp.float32) + 0.5) / self.num_atoms)\n batch_quantile_q_learning = jax.vmap(\n rlax.quantile_q_learning, in_axes=(0, None, 0, 0, 0, 0, 0, None))\n losses = batch_quantile_q_learning(\n dist_q_tm1,\n quantiles,\n transitions.action,\n transitions.reward,\n transitions.discount,\n dist_q_target_t, # No double Q-learning here.\n dist_q_target_t,\n self.huber_param,\n )\n loss = jnp.mean(losses)\n chex.assert_shape(losses, (batch_size,))\n extra = learning_lib.LossExtra(metrics={'mean_loss': loss})\n return loss, extra\n\n\[email protected]\nclass PrioritizedCategoricalDoubleQLearning(learning_lib.LossFn):\n \"\"\"Categorical double q learning with prioritization on TD error.\"\"\"\n discount: float = 0.99\n importance_sampling_exponent: float = 0.2\n max_abs_reward: float = 1.\n\n def __call__(\n self,\n network: networks_lib.TypedFeedForwardNetwork,\n params: networks_lib.Params,\n target_params: networks_lib.Params,\n batch: reverb.ReplaySample,\n key: networks_lib.PRNGKey,\n ) -> Tuple[jax.Array, learning_lib.LossExtra]:\n \"\"\"Calculate a loss on a single batch of data.\"\"\"\n transitions: types.Transition = batch.data\n probs = batch.info.probability\n\n # Forward pass.\n key1, key2, key3 = jax.random.split(key, 3)\n _, logits_tm1, atoms_tm1 = network.apply(\n params, transitions.observation, is_training=True, key=key1)\n _, logits_t, atoms_t = network.apply(\n target_params, transitions.next_observation, is_training=True, key=key2)\n q_t_selector, _, _ = network.apply(\n params, transitions.next_observation, is_training=True, key=key3)\n\n # Cast and clip rewards.\n d_t = (transitions.discount * self.discount).astype(jnp.float32)\n r_t = jnp.clip(transitions.reward, -self.max_abs_reward,\n self.max_abs_reward).astype(jnp.float32)\n\n # Compute categorical double Q-learning loss.\n batch_loss_fn = jax.vmap(\n rlax.categorical_double_q_learning,\n in_axes=(None, 0, 0, 0, 0, None, 0, 0))\n batch_loss = batch_loss_fn(atoms_tm1, logits_tm1, transitions.action, r_t,\n d_t, atoms_t, logits_t, q_t_selector)\n\n # Importance weighting.\n importance_weights = (1. / probs).astype(jnp.float32)\n importance_weights **= self.importance_sampling_exponent\n importance_weights /= jnp.max(importance_weights)\n\n # Reweight.\n loss = jnp.mean(importance_weights * batch_loss) # []\n extra = learning_lib.LossExtra(\n metrics={}, reverb_priorities=jnp.abs(batch_loss).astype(jnp.float64))\n return loss, extra\n\n\[email protected]\nclass QLearning(learning_lib.LossFn):\n \"\"\"Deep q learning.\n\n This matches the original DQN loss: https://arxiv.org/abs/1312.5602.\n It differs by two aspects that improve it on the optimization side\n - it uses Adam instead of RMSProp as an optimizer\n - it uses a square loss instead of the Huber one.\n \"\"\"\n discount: float = 0.99\n max_abs_reward: float = 1.\n\n def __call__(\n self,\n network: networks_lib.TypedFeedForwardNetwork,\n params: networks_lib.Params,\n target_params: networks_lib.Params,\n batch: reverb.ReplaySample,\n key: networks_lib.PRNGKey,\n ) -> Tuple[jax.Array, learning_lib.LossExtra]:\n \"\"\"Calculate a loss on a single batch of data.\"\"\"\n transitions: types.Transition = batch.data\n\n # Forward pass.\n key1, key2 = jax.random.split(key)\n q_tm1 = network.apply(\n params, transitions.observation, is_training=True, key=key1)\n q_t = network.apply(\n target_params, transitions.next_observation, is_training=True, key=key2)\n\n # Cast and clip rewards.\n d_t = (transitions.discount * self.discount).astype(jnp.float32)\n r_t = jnp.clip(transitions.reward, -self.max_abs_reward,\n self.max_abs_reward).astype(jnp.float32)\n\n # Compute Q-learning TD-error.\n batch_error = jax.vmap(rlax.q_learning)\n td_error = batch_error(q_tm1, transitions.action, r_t, d_t, q_t)\n batch_loss = jnp.square(td_error)\n\n loss = jnp.mean(batch_loss)\n extra = learning_lib.LossExtra(metrics={})\n return loss, extra\n\n\[email protected]\nclass RegularizedQLearning(learning_lib.LossFn):\n \"\"\"Regularized Q-learning.\n\n Implements DQNReg loss function: https://arxiv.org/abs/2101.03958.\n This is almost identical to QLearning except: 1) Adds a regularization term;\n 2) Uses vanilla TD error without huber loss. 3) No reward clipping.\n \"\"\"\n discount: float = 0.99\n regularizer_coeff = 0.1\n\n def __call__(\n self,\n network: networks_lib.TypedFeedForwardNetwork,\n params: networks_lib.Params,\n target_params: networks_lib.Params,\n batch: reverb.ReplaySample,\n key: networks_lib.PRNGKey,\n ) -> Tuple[jax.Array, learning_lib.LossExtra]:\n \"\"\"Calculate a loss on a single batch of data.\"\"\"\n transitions: types.Transition = batch.data\n\n # Forward pass.\n key1, key2 = jax.random.split(key)\n q_tm1 = network.apply(\n params, transitions.observation, is_training=True, key=key1)\n q_t = network.apply(\n target_params, transitions.next_observation, is_training=True, key=key2)\n\n d_t = (transitions.discount * self.discount).astype(jnp.float32)\n\n # Compute Q-learning TD-error.\n batch_error = jax.vmap(rlax.q_learning)\n td_error = batch_error(\n q_tm1, transitions.action, transitions.reward, d_t, q_t)\n td_error = 0.5 * jnp.square(td_error)\n\n def select(qtm1, action):\n return qtm1[action]\n q_regularizer = jax.vmap(select)(q_tm1, transitions.action)\n\n loss = self.regularizer_coeff * jnp.mean(q_regularizer) + jnp.mean(td_error)\n extra = learning_lib.LossExtra(metrics={})\n return loss, extra\n\n\[email protected]\nclass MunchausenQLearning(learning_lib.LossFn):\n \"\"\"Munchausen q learning.\n\n Implements M-DQN: https://arxiv.org/abs/2007.14430.\n \"\"\"\n entropy_temperature: float = 0.03 # tau parameter\n munchausen_coefficient: float = 0.9 # alpha parameter\n clip_value_min: float = -1e3\n discount: float = 0.99\n max_abs_reward: float = 1.\n huber_loss_parameter: float = 1.\n\n def __call__(\n self,\n network: networks_lib.TypedFeedForwardNetwork,\n params: networks_lib.Params,\n target_params: networks_lib.Params,\n batch: reverb.ReplaySample,\n key: networks_lib.PRNGKey,\n ) -> Tuple[jax.Array, learning_lib.LossExtra]:\n \"\"\"Calculate a loss on a single batch of data.\"\"\"\n transitions: types.Transition = batch.data\n\n # Forward pass.\n key1, key2, key3 = jax.random.split(key, 3)\n q_online_s = network.apply(\n params, transitions.observation, is_training=True, key=key1)\n action_one_hot = jax.nn.one_hot(transitions.action, q_online_s.shape[-1])\n q_online_sa = jnp.sum(action_one_hot * q_online_s, axis=-1)\n q_target_s = network.apply(\n target_params, transitions.observation, is_training=True, key=key2)\n q_target_next = network.apply(\n target_params, transitions.next_observation, is_training=True, key=key3)\n\n # Cast and clip rewards.\n d_t = (transitions.discount * self.discount).astype(jnp.float32)\n r_t = jnp.clip(transitions.reward, -self.max_abs_reward,\n self.max_abs_reward).astype(jnp.float32)\n\n # Munchausen term : tau * log_pi(a|s)\n munchausen_term = self.entropy_temperature * jax.nn.log_softmax(\n q_target_s / self.entropy_temperature, axis=-1)\n munchausen_term_a = jnp.sum(action_one_hot * munchausen_term, axis=-1)\n munchausen_term_a = jnp.clip(munchausen_term_a,\n a_min=self.clip_value_min,\n a_max=0.)\n\n # Soft Bellman operator applied to q\n next_v = self.entropy_temperature * jax.nn.logsumexp(\n q_target_next / self.entropy_temperature, axis=-1)\n target_q = jax.lax.stop_gradient(r_t + self.munchausen_coefficient *\n munchausen_term_a + d_t * next_v)\n\n batch_loss = rlax.huber_loss(target_q - q_online_sa,\n self.huber_loss_parameter)\n loss = jnp.mean(batch_loss)\n\n extra = learning_lib.LossExtra(metrics={})\n return loss, extra\n" }, { "alpha_fraction": 0.6707111597061157, "alphanum_fraction": 0.6767778992652893, "avg_line_length": 36.84438705444336, "blob_id": "307d21899b8e113d58a3094f9dca939363520bbc", "content_id": "101777a8d0f531d605d80b3e2368dc03b9b3a5ea", "detected_licenses": [ "Apache-2.0" ], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 14835, "license_type": "permissive", "max_line_length": 80, "num_lines": 392, "path": "/acme/wrappers/atari_wrapper.py", "repo_name": "dada325/acme", "src_encoding": "UTF-8", "text": "# Copyright 2018 DeepMind Technologies Limited. All rights reserved.\n#\n# Licensed under the Apache License, Version 2.0 (the \"License\");\n# you may not use this file except in compliance with the License.\n# You may obtain a copy of the License at\n#\n# http://www.apache.org/licenses/LICENSE-2.0\n#\n# Unless required by applicable law or agreed to in writing, software\n# distributed under the License is distributed on an \"AS IS\" BASIS,\n# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n# See the License for the specific language governing permissions and\n# limitations under the License.\n\n\"\"\"Atari wrappers functionality for Python environments.\"\"\"\n\nimport abc\nfrom typing import Tuple, List, Optional, Sequence, Union\n\nfrom acme.wrappers import base\nfrom acme.wrappers import frame_stacking\n\nimport dm_env\nfrom dm_env import specs\nimport numpy as np\nfrom PIL import Image\n\nRGB_INDEX = 0 # Observation index holding the RGB data.\nLIVES_INDEX = 1 # Observation index holding the lives count.\nNUM_COLOR_CHANNELS = 3 # Number of color channels in RGB data.\n\n\nclass BaseAtariWrapper(abc.ABC, base.EnvironmentWrapper):\n \"\"\"Abstract base class for Atari wrappers.\n\n This assumes that the input environment is a dm_env.Environment instance in\n which observations are tuples whose first element is an RGB observation and\n the second element is the lives count.\n\n The wrapper itself performs the following modifications:\n\n 1. Soft-termination (setting discount to zero) on loss of life.\n 2. Action repeats.\n 3. Frame pooling for action repeats.\n 4. Conversion to grayscale and downscaling.\n 5. Reward clipping.\n 6. Observation stacking.\n\n The details of grayscale conversion, downscaling, and frame pooling are\n delegated to the concrete subclasses.\n\n This wrapper will raise an error if the underlying Atari environment does not:\n\n - Exposes RGB observations in interleaved format (shape `(H, W, C)`).\n - Expose zero-indexed actions.\n\n Note that this class does not expose a configurable rescale method (defaults\n to bilinear internally).\n\n This class also exposes an additional option `to_float` that doesn't feature\n in other wrappers, which rescales pixel values to floats in the range [0, 1].\n \"\"\"\n\n def __init__(self,\n environment: dm_env.Environment,\n *,\n max_abs_reward: Optional[float] = None,\n scale_dims: Optional[Tuple[int, int]] = (84, 84),\n action_repeats: int = 4,\n pooled_frames: int = 2,\n zero_discount_on_life_loss: bool = False,\n expose_lives_observation: bool = False,\n num_stacked_frames: int = 4,\n flatten_frame_stack: bool = False,\n max_episode_len: Optional[int] = None,\n to_float: bool = False,\n grayscaling: bool = True):\n \"\"\"Initializes a new AtariWrapper.\n\n Args:\n environment: An Atari environment.\n max_abs_reward: Maximum absolute reward value before clipping is applied.\n If set to `None` (default), no clipping is applied.\n scale_dims: Image size for the rescaling step after grayscaling, given as\n `(height, width)`. Set to `None` to disable resizing.\n action_repeats: Number of times to step wrapped environment for each given\n action.\n pooled_frames: Number of observations to pool over. Set to 1 to disable\n frame pooling.\n zero_discount_on_life_loss: If `True`, sets the discount to zero when the\n number of lives decreases in in Atari environment.\n expose_lives_observation: If `False`, the `lives` part of the observation\n is discarded, otherwise it is kept as part of an observation tuple. This\n does not affect the `zero_discount_on_life_loss` feature. When enabled,\n the observation consists of a single pixel array, otherwise it is a\n tuple (pixel_array, lives).\n num_stacked_frames: Number of recent (pooled) observations to stack into\n the returned observation.\n flatten_frame_stack: Whether to flatten the stack of frames such that\n the channel (RGB) and stacking dimensions are merged.\n max_episode_len: Number of frames before truncating episode. By default,\n there is no maximum length.\n to_float: If `True`, rescales RGB observations to floats in [0, 1].\n grayscaling: If `True` returns a grayscale version of the observations. In\n this case, the observation is 3D (H, W, num_stacked_frames). If `False`\n the observations are RGB and have shape (H, W, C, num_stacked_frames).\n\n Raises:\n ValueError: For various invalid inputs.\n \"\"\"\n if not 1 <= pooled_frames <= action_repeats:\n raise ValueError(\"pooled_frames ({}) must be between 1 and \"\n \"action_repeats ({}) inclusive\".format(\n pooled_frames, action_repeats))\n\n if zero_discount_on_life_loss:\n super().__init__(_ZeroDiscountOnLifeLoss(environment))\n else:\n super().__init__(environment)\n\n if not max_episode_len:\n max_episode_len = np.inf\n\n self._frame_stacker = frame_stacking.FrameStacker(\n num_frames=num_stacked_frames, flatten=flatten_frame_stack)\n self._action_repeats = action_repeats\n self._pooled_frames = pooled_frames\n self._scale_dims = scale_dims\n self._max_abs_reward = max_abs_reward or np.inf\n self._to_float = to_float\n self._expose_lives_observation = expose_lives_observation\n\n if scale_dims:\n self._height, self._width = scale_dims\n else:\n spec = environment.observation_spec()\n self._height, self._width = spec[RGB_INDEX].shape[:2]\n\n self._episode_len = 0\n self._max_episode_len = max_episode_len\n self._reset_next_step = True\n\n self._grayscaling = grayscaling\n\n # Based on underlying observation spec, decide whether lives are to be\n # included in output observations.\n observation_spec = self._environment.observation_spec()\n spec_names = [spec.name for spec in observation_spec]\n if \"lives\" in spec_names and spec_names.index(\"lives\") != 1:\n raise ValueError(\"`lives` observation needs to have index 1 in Atari.\")\n\n self._observation_spec = self._init_observation_spec()\n\n self._raw_observation = None\n\n def _init_observation_spec(self):\n \"\"\"Computes the observation spec for the pixel observations.\n\n Returns:\n An `Array` specification for the pixel observations.\n \"\"\"\n if self._to_float:\n pixels_dtype = float\n else:\n pixels_dtype = np.uint8\n\n if self._grayscaling:\n pixels_spec_shape = (self._height, self._width)\n pixels_spec_name = \"grayscale\"\n else:\n pixels_spec_shape = (self._height, self._width, NUM_COLOR_CHANNELS)\n pixels_spec_name = \"RGB\"\n\n pixel_spec = specs.Array(\n shape=pixels_spec_shape, dtype=pixels_dtype, name=pixels_spec_name)\n pixel_spec = self._frame_stacker.update_spec(pixel_spec)\n\n if self._expose_lives_observation:\n return (pixel_spec,) + self._environment.observation_spec()[1:]\n return pixel_spec\n\n def reset(self) -> dm_env.TimeStep:\n \"\"\"Resets environment and provides the first timestep.\"\"\"\n self._reset_next_step = False\n self._episode_len = 0\n self._frame_stacker.reset()\n timestep = self._environment.reset()\n\n observation = self._observation_from_timestep_stack([timestep])\n\n return self._postprocess_observation(\n timestep._replace(observation=observation))\n\n def step(self, action: int) -> dm_env.TimeStep:\n \"\"\"Steps up to action_repeat times and returns a post-processed step.\"\"\"\n if self._reset_next_step:\n return self.reset()\n\n timestep_stack = []\n\n # Step on environment multiple times for each selected action.\n for _ in range(self._action_repeats):\n timestep = self._environment.step([np.array([action])])\n\n self._episode_len += 1\n if self._episode_len == self._max_episode_len:\n timestep = timestep._replace(step_type=dm_env.StepType.LAST)\n\n timestep_stack.append(timestep)\n\n if timestep.last():\n # Action repeat frames should not span episode boundaries. Also, no need\n # to pad with zero-valued observations as all the reductions in\n # _postprocess_observation work gracefully for any non-zero size of\n # timestep_stack.\n self._reset_next_step = True\n break\n\n # Determine a single step type. We let FIRST take priority over LAST, since\n # we think it's more likely algorithm code will be set up to deal with that,\n # due to environments supporting reset() (which emits a FIRST).\n # Note we'll never have LAST then FIRST in timestep_stack here.\n step_type = dm_env.StepType.MID\n for timestep in timestep_stack:\n if timestep.first():\n step_type = dm_env.StepType.FIRST\n break\n elif timestep.last():\n step_type = dm_env.StepType.LAST\n break\n\n if timestep_stack[0].first():\n # Update first timestep to have identity effect on reward and discount.\n timestep_stack[0] = timestep_stack[0]._replace(reward=0., discount=1.)\n\n # Sum reward over stack.\n reward = sum(timestep_t.reward for timestep_t in timestep_stack)\n\n # Multiply discount over stack (will either be 0. or 1.).\n discount = np.prod([timestep_t.discount for timestep_t in timestep_stack])\n\n observation = self._observation_from_timestep_stack(timestep_stack)\n\n timestep = dm_env.TimeStep(\n step_type=step_type,\n reward=reward,\n observation=observation,\n discount=discount)\n\n return self._postprocess_observation(timestep)\n\n @abc.abstractmethod\n def _preprocess_pixels(self, timestep_stack: List[dm_env.TimeStep]):\n \"\"\"Process Atari pixels.\"\"\"\n\n def _observation_from_timestep_stack(self,\n timestep_stack: List[dm_env.TimeStep]):\n \"\"\"Compute the observation for a stack of timesteps.\"\"\"\n self._raw_observation = timestep_stack[-1].observation[RGB_INDEX].copy()\n processed_pixels = self._preprocess_pixels(timestep_stack)\n\n if self._to_float:\n stacked_observation = self._frame_stacker.step(processed_pixels / 255.0)\n else:\n stacked_observation = self._frame_stacker.step(processed_pixels)\n\n # We use last timestep for lives only.\n observation = timestep_stack[-1].observation\n if self._expose_lives_observation:\n return (stacked_observation,) + observation[1:]\n\n return stacked_observation\n\n def _postprocess_observation(self,\n timestep: dm_env.TimeStep) -> dm_env.TimeStep:\n \"\"\"Observation processing applied after action repeat consolidation.\"\"\"\n\n if timestep.first():\n return dm_env.restart(timestep.observation)\n\n reward = np.clip(timestep.reward, -self._max_abs_reward,\n self._max_abs_reward)\n\n return timestep._replace(reward=reward)\n\n def action_spec(self) -> specs.DiscreteArray:\n raw_spec = self._environment.action_spec()[0]\n return specs.DiscreteArray(num_values=raw_spec.maximum.item() -\n raw_spec.minimum.item() + 1)\n\n def observation_spec(self) -> Union[specs.Array, Sequence[specs.Array]]:\n return self._observation_spec\n\n def reward_spec(self) -> specs.Array:\n return specs.Array(shape=(), dtype=float)\n\n @property\n def raw_observation(self) -> np.ndarray:\n \"\"\"Returns the raw observation, after any pooling has been applied.\"\"\"\n return self._raw_observation\n\n\nclass AtariWrapper(BaseAtariWrapper):\n \"\"\"Standard \"Nature Atari\" wrapper for Python environments.\n\n Before being fed to a neural network, Atari frames go through a prepocessing,\n implemented in this wrapper. For historical reasons, there were different\n choices in the method to apply there between what was done in the Dopamine\n library and what is done in Acme. During the processing of\n Atari frames, three operations need to happen. Images are\n transformed from RGB to grayscale, we perform a max-pooling on the time scale,\n and images are resized to 84x84.\n\n 1. The `standard` style (this one, matches previous acme versions):\n - does max pooling, then rgb -> grayscale\n - uses Pillow inter area interpolation for resizing\n 2. The `dopamine` style:\n - does rgb -> grayscale, then max pooling\n - uses opencv bilinear interpolation for resizing.\n\n This can change the behavior of RL agents on some games. The recommended\n setting is to use the standard style with this class. The Dopamine setting is\n available in `atari_wrapper_dopamine.py` for the\n user that wishes to compare agents between librairies.\n \"\"\"\n\n def _preprocess_pixels(self, timestep_stack: List[dm_env.TimeStep]):\n \"\"\"Preprocess Atari frames.\"\"\"\n # 1. Max pooling\n processed_pixels = np.max(\n np.stack([\n s.observation[RGB_INDEX]\n for s in timestep_stack[-self._pooled_frames:]\n ]),\n axis=0)\n\n # 2. RGB to grayscale\n if self._grayscaling:\n processed_pixels = np.tensordot(processed_pixels,\n [0.299, 0.587, 1 - (0.299 + 0.587)],\n (-1, 0))\n\n # 3. Resize\n processed_pixels = processed_pixels.astype(np.uint8, copy=False)\n if self._scale_dims != processed_pixels.shape[:2]:\n processed_pixels = Image.fromarray(processed_pixels).resize(\n (self._width, self._height), Image.Resampling.BILINEAR)\n processed_pixels = np.array(processed_pixels, dtype=np.uint8)\n\n return processed_pixels\n\n\nclass _ZeroDiscountOnLifeLoss(base.EnvironmentWrapper):\n \"\"\"Implements soft-termination (zero discount) on life loss.\"\"\"\n\n def __init__(self, environment: dm_env.Environment):\n \"\"\"Initializes a new `_ZeroDiscountOnLifeLoss` wrapper.\n\n Args:\n environment: An Atari environment.\n\n Raises:\n ValueError: If the environment does not expose a lives observation.\n \"\"\"\n super().__init__(environment)\n self._reset_next_step = True\n self._last_num_lives = None\n\n def reset(self) -> dm_env.TimeStep:\n timestep = self._environment.reset()\n self._reset_next_step = False\n self._last_num_lives = timestep.observation[LIVES_INDEX]\n return timestep\n\n def step(self, action: int) -> dm_env.TimeStep:\n if self._reset_next_step:\n return self.reset()\n\n timestep = self._environment.step(action)\n lives = timestep.observation[LIVES_INDEX]\n\n is_life_loss = True\n # We have a life loss when:\n # The wrapped environment is in a regular (MID) transition.\n is_life_loss &= timestep.mid()\n # Lives have decreased since last time `step` was called.\n is_life_loss &= lives < self._last_num_lives\n\n self._last_num_lives = lives\n if is_life_loss:\n return timestep._replace(discount=0.0)\n return timestep\n" }, { "alpha_fraction": 0.6536585092544556, "alphanum_fraction": 0.6582167148590088, "avg_line_length": 39.7328987121582, "blob_id": "de1a49725c5597f435355e09b3285fa19d66730e", "content_id": "8dfd8c19a0ea937d984e0d5bf43ccb5d562909f6", "detected_licenses": [ "Apache-2.0" ], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 12505, "license_type": "permissive", "max_line_length": 93, "num_lines": 307, "path": "/acme/adders/reverb/transition.py", "repo_name": "dada325/acme", "src_encoding": "UTF-8", "text": "# Copyright 2018 DeepMind Technologies Limited. All rights reserved.\n#\n# Licensed under the Apache License, Version 2.0 (the \"License\");\n# you may not use this file except in compliance with the License.\n# You may obtain a copy of the License at\n#\n# http://www.apache.org/licenses/LICENSE-2.0\n#\n# Unless required by applicable law or agreed to in writing, software\n# distributed under the License is distributed on an \"AS IS\" BASIS,\n# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n# See the License for the specific language governing permissions and\n# limitations under the License.\n\n\"\"\"Transition adders.\n\nThis implements an N-step transition adder which collapses trajectory sequences\ninto a single transition, simplifying to a simple transition adder when N=1.\n\"\"\"\n\nimport copy\nfrom typing import Optional, Tuple\n\nfrom acme import specs\nfrom acme import types\nfrom acme.adders.reverb import base\nfrom acme.adders.reverb import utils\nfrom acme.utils import tree_utils\n\nimport numpy as np\nimport reverb\nimport tree\n\n\nclass NStepTransitionAdder(base.ReverbAdder):\n \"\"\"An N-step transition adder.\n\n This will buffer a sequence of N timesteps in order to form a single N-step\n transition which is added to reverb for future retrieval.\n\n For N=1 the data added to replay will be a standard one-step transition which\n takes the form:\n\n (s_t, a_t, r_t, d_t, s_{t+1}, e_t)\n\n where:\n\n s_t = state observation at time t\n a_t = the action taken from s_t\n r_t = reward ensuing from action a_t\n d_t = environment discount ensuing from action a_t. This discount is\n applied to future rewards after r_t.\n e_t [Optional] = extra data that the agent persists in replay.\n\n For N greater than 1, transitions are of the form:\n\n (s_t, a_t, R_{t:t+n}, D_{t:t+n}, s_{t+N}, e_t),\n\n where:\n\n s_t = State (observation) at time t.\n a_t = Action taken from state s_t.\n g = the additional discount, used by the agent to discount future returns.\n R_{t:t+n} = N-step discounted return, i.e. accumulated over N rewards:\n R_{t:t+n} := r_t + g * d_t * r_{t+1} + ...\n + g^{n-1} * d_t * ... * d_{t+n-2} * r_{t+n-1}.\n D_{t:t+n}: N-step product of agent discounts g_i and environment\n \"discounts\" d_i.\n D_{t:t+n} := g^{n-1} * d_{t} * ... * d_{t+n-1},\n For most environments d_i is 1 for all steps except the last,\n i.e. it is the episode termination signal.\n s_{t+n}: The \"arrival\" state, i.e. the state at time t+n.\n e_t [Optional]: A nested structure of any 'extras' the user wishes to add.\n\n Notes:\n - At the beginning and end of episodes, shorter transitions are added.\n That is, at the beginning of the episode, it will add:\n (s_0 -> s_1), (s_0 -> s_2), ..., (s_0 -> s_n), (s_1 -> s_{n+1})\n\n And at the end of the episode, it will add:\n (s_{T-n+1} -> s_T), (s_{T-n+2} -> s_T), ... (s_{T-1} -> s_T).\n - We add the *first* `extra` of each transition, not the *last*, i.e.\n if extras are provided, we get e_t, not e_{t+n}.\n \"\"\"\n\n def __init__(\n self,\n client: reverb.Client,\n n_step: int,\n discount: float,\n *,\n priority_fns: Optional[base.PriorityFnMapping] = None,\n max_in_flight_items: int = 5,\n ):\n \"\"\"Creates an N-step transition adder.\n\n Args:\n client: A `reverb.Client` to send the data to replay through.\n n_step: The \"N\" in N-step transition. See the class docstring for the\n precise definition of what an N-step transition is. `n_step` must be at\n least 1, in which case we use the standard one-step transition, i.e.\n (s_t, a_t, r_t, d_t, s_t+1, e_t).\n discount: Discount factor to apply. This corresponds to the agent's\n discount in the class docstring.\n priority_fns: See docstring for BaseAdder.\n max_in_flight_items: The maximum number of items allowed to be \"in flight\"\n at the same time. See `block_until_num_items` in\n `reverb.TrajectoryWriter.flush` for more info.\n\n Raises:\n ValueError: If n_step is less than 1.\n \"\"\"\n # Makes the additional discount a float32, which means that it will be\n # upcast if rewards/discounts are float64 and left alone otherwise.\n self.n_step = n_step\n self._discount = tree.map_structure(np.float32, discount)\n self._first_idx = 0\n self._last_idx = 0\n\n super().__init__(\n client=client,\n max_sequence_length=n_step + 1,\n priority_fns=priority_fns,\n max_in_flight_items=max_in_flight_items)\n\n def add(self, *args, **kwargs):\n # Increment the indices for the start and end of the window for computing\n # n-step returns.\n if self._writer.episode_steps >= self.n_step:\n self._first_idx += 1\n self._last_idx += 1\n\n super().add(*args, **kwargs)\n\n def reset(self): # pytype: disable=signature-mismatch # overriding-parameter-count-checks\n super().reset()\n self._first_idx = 0\n self._last_idx = 0\n\n @property\n def _n_step(self) -> int:\n \"\"\"Effective n-step, which may vary at starts and ends of episodes.\"\"\"\n return self._last_idx - self._first_idx\n\n def _write(self):\n # Convenient getters for use in tree operations.\n get_first = lambda x: x[self._first_idx]\n get_last = lambda x: x[self._last_idx]\n # Note: this getter is meant to be used on a TrajectoryWriter.history to\n # obtain its numpy values.\n get_all_np = lambda x: x[self._first_idx:self._last_idx].numpy()\n\n # Get the state, action, next_state, as well as possibly extras for the\n # transition that is about to be written.\n history = self._writer.history\n s, a = tree.map_structure(get_first,\n (history['observation'], history['action']))\n s_ = tree.map_structure(get_last, history['observation'])\n\n # Maybe get extras to add to the transition later.\n if 'extras' in history:\n extras = tree.map_structure(get_first, history['extras'])\n\n # Note: at the beginning of an episode we will add the initial N-1\n # transitions (of size 1, 2, ...) and at the end of an episode (when\n # called from write_last) we will write the final transitions of size (N,\n # N-1, ...). See the Note in the docstring.\n # Get numpy view of the steps to be fed into the priority functions.\n reward, discount = tree.map_structure(\n get_all_np, (history['reward'], history['discount']))\n\n # Compute discounted return and geometric discount over n steps.\n n_step_return, total_discount = self._compute_cumulative_quantities(\n reward, discount)\n\n # Append the computed n-step return and total discount.\n # Note: if this call to _write() is within a call to _write_last(), then\n # this is the only data being appended and so it is not a partial append.\n self._writer.append(\n dict(n_step_return=n_step_return, total_discount=total_discount),\n partial_step=self._writer.episode_steps <= self._last_idx)\n # This should be done immediately after self._writer.append so the history\n # includes the recently appended data.\n history = self._writer.history\n\n # Form the n-step transition by using the following:\n # the first observation and action in the buffer, along with the cumulative\n # reward and discount computed above.\n n_step_return, total_discount = tree.map_structure(\n lambda x: x[-1], (history['n_step_return'], history['total_discount']))\n transition = types.Transition(\n observation=s,\n action=a,\n reward=n_step_return,\n discount=total_discount,\n next_observation=s_,\n extras=(extras if 'extras' in history else ()))\n\n # Calculate the priority for this transition.\n table_priorities = utils.calculate_priorities(self._priority_fns,\n transition)\n\n # Insert the transition into replay along with its priority.\n for table, priority in table_priorities.items():\n self._writer.create_item(\n table=table, priority=priority, trajectory=transition)\n self._writer.flush(self._max_in_flight_items)\n\n def _write_last(self):\n # Write the remaining shorter transitions by alternating writing and\n # incrementingfirst_idx. Note that last_idx will no longer be incremented\n # once we're in this method's scope.\n self._first_idx += 1\n while self._first_idx < self._last_idx:\n self._write()\n self._first_idx += 1\n\n def _compute_cumulative_quantities(\n self, rewards: types.NestedArray, discounts: types.NestedArray\n ) -> Tuple[types.NestedArray, types.NestedArray]:\n\n # Give the same tree structure to the n-step return accumulator,\n # n-step discount accumulator, and self.discount, so that they can be\n # iterated in parallel using tree.map_structure.\n rewards, discounts, self_discount = tree_utils.broadcast_structures(\n rewards, discounts, self._discount)\n flat_rewards = tree.flatten(rewards)\n flat_discounts = tree.flatten(discounts)\n flat_self_discount = tree.flatten(self_discount)\n\n # Copy total_discount as it is otherwise read-only.\n total_discount = [np.copy(a[0]) for a in flat_discounts]\n\n # Broadcast n_step_return to have the broadcasted shape of\n # reward * discount.\n n_step_return = [\n np.copy(np.broadcast_to(r[0],\n np.broadcast(r[0], d).shape))\n for r, d in zip(flat_rewards, total_discount)\n ]\n\n # NOTE: total_discount will have one less self_discount applied to it than\n # the value of self._n_step. This is so that when the learner/update uses\n # an additional discount we don't apply it twice. Inside the following loop\n # we will apply this right before summing up the n_step_return.\n for i in range(1, self._n_step):\n for nsr, td, r, d, sd in zip(n_step_return, total_discount, flat_rewards,\n flat_discounts, flat_self_discount):\n # Equivalent to: `total_discount *= self._discount`.\n td *= sd\n # Equivalent to: `n_step_return += reward[i] * total_discount`.\n nsr += r[i] * td\n # Equivalent to: `total_discount *= discount[i]`.\n td *= d[i]\n\n n_step_return = tree.unflatten_as(rewards, n_step_return)\n total_discount = tree.unflatten_as(rewards, total_discount)\n return n_step_return, total_discount\n\n # TODO(bshahr): make this into a standalone method. Class methods should be\n # used as alternative constructors or when modifying some global state,\n # neither of which is done here.\n @classmethod\n def signature(cls,\n environment_spec: specs.EnvironmentSpec,\n extras_spec: types.NestedSpec = ()):\n\n # This function currently assumes that self._discount is a scalar.\n # If it ever becomes a nested structure and/or a np.ndarray, this method\n # will need to know its structure / shape. This is because the signature\n # discount shape is the environment's discount shape and this adder's\n # discount shape broadcasted together. Also, the reward shape is this\n # signature discount shape broadcasted together with the environment\n # reward shape. As long as self._discount is a scalar, it will not affect\n # either the signature discount shape nor the signature reward shape, so we\n # can ignore it.\n\n rewards_spec, step_discounts_spec = tree_utils.broadcast_structures(\n environment_spec.rewards, environment_spec.discounts)\n rewards_spec = tree.map_structure(_broadcast_specs, rewards_spec,\n step_discounts_spec)\n step_discounts_spec = tree.map_structure(copy.deepcopy, step_discounts_spec)\n\n transition_spec = types.Transition(\n environment_spec.observations,\n environment_spec.actions,\n rewards_spec,\n step_discounts_spec,\n environment_spec.observations, # next_observation\n extras_spec)\n\n return tree.map_structure_with_path(base.spec_like_to_tensor_spec,\n transition_spec)\n\n\ndef _broadcast_specs(*args: specs.Array) -> specs.Array:\n \"\"\"Like np.broadcast, but for specs.Array.\n\n Args:\n *args: one or more specs.Array instances.\n\n Returns:\n A specs.Array with the broadcasted shape and dtype of the specs in *args.\n \"\"\"\n bc_info = np.broadcast(*tuple(a.generate_value() for a in args))\n dtype = np.result_type(*tuple(a.dtype for a in args))\n return specs.Array(shape=bc_info.shape, dtype=dtype)\n" }, { "alpha_fraction": 0.6391577124595642, "alphanum_fraction": 0.6448924541473389, "avg_line_length": 39.434783935546875, "blob_id": "cb063126769a4dd9ba5c6430e4ba4948d6008dc9", "content_id": "8ac7b475ab087942b523c822f9e34d6fccfe580f", "detected_licenses": [ "Apache-2.0" ], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 11160, "license_type": "permissive", "max_line_length": 128, "num_lines": 276, "path": "/acme/agents/jax/r2d2/learning.py", "repo_name": "dada325/acme", "src_encoding": "UTF-8", "text": "# Copyright 2018 DeepMind Technologies Limited. All rights reserved.\n#\n# Licensed under the Apache License, Version 2.0 (the \"License\");\n# you may not use this file except in compliance with the License.\n# You may obtain a copy of the License at\n#\n# http://www.apache.org/licenses/LICENSE-2.0\n#\n# Unless required by applicable law or agreed to in writing, software\n# distributed under the License is distributed on an \"AS IS\" BASIS,\n# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n# See the License for the specific language governing permissions and\n# limitations under the License.\n\n\"\"\"R2D2 learner implementation.\"\"\"\n\nimport functools\nimport time\nfrom typing import Dict, Iterator, List, NamedTuple, Optional, Tuple\n\nfrom absl import logging\nimport acme\nfrom acme.adders import reverb as adders\nfrom acme.agents.jax.r2d2 import networks as r2d2_networks\nfrom acme.jax import networks as networks_lib\nfrom acme.jax import utils\nfrom acme.utils import async_utils\nfrom acme.utils import counting\nfrom acme.utils import loggers\nimport jax\nimport jax.numpy as jnp\nimport optax\nimport reverb\nimport rlax\nimport tree\n\n_PMAP_AXIS_NAME = 'data'\n# This type allows splitting a sample between the host and device, which avoids\n# putting item keys (uint64) on device for the purposes of priority updating.\nR2D2ReplaySample = utils.PrefetchingSplit\n\n\nclass TrainingState(NamedTuple):\n \"\"\"Holds the agent's training state.\"\"\"\n params: networks_lib.Params\n target_params: networks_lib.Params\n opt_state: optax.OptState\n steps: int\n random_key: networks_lib.PRNGKey\n\n\nclass R2D2Learner(acme.Learner):\n \"\"\"R2D2 learner.\"\"\"\n\n def __init__(self,\n networks: r2d2_networks.R2D2Networks,\n batch_size: int,\n random_key: networks_lib.PRNGKey,\n burn_in_length: int,\n discount: float,\n importance_sampling_exponent: float,\n max_priority_weight: float,\n target_update_period: int,\n iterator: Iterator[R2D2ReplaySample],\n optimizer: optax.GradientTransformation,\n bootstrap_n: int = 5,\n tx_pair: rlax.TxPair = rlax.SIGNED_HYPERBOLIC_PAIR,\n clip_rewards: bool = False,\n max_abs_reward: float = 1.,\n use_core_state: bool = True,\n prefetch_size: int = 2,\n replay_client: Optional[reverb.Client] = None,\n counter: Optional[counting.Counter] = None,\n logger: Optional[loggers.Logger] = None):\n \"\"\"Initializes the learner.\"\"\"\n\n def loss(\n params: networks_lib.Params,\n target_params: networks_lib.Params,\n key_grad: networks_lib.PRNGKey,\n sample: reverb.ReplaySample\n ) -> Tuple[jnp.ndarray, Tuple[jnp.ndarray, jnp.ndarray]]:\n \"\"\"Computes mean transformed N-step loss for a batch of sequences.\"\"\"\n\n # Get core state & warm it up on observations for a burn-in period.\n if use_core_state:\n # Replay core state.\n # NOTE: We may need to recover the type of the hk.LSTMState if the user\n # specifies a dynamically unrolled RNN as it will strictly enforce the\n # match between input/output state types.\n online_state = utils.maybe_recover_lstm_type(\n sample.data.extras.get('core_state'))\n else:\n key_grad, initial_state_rng = jax.random.split(key_grad)\n online_state = networks.init_recurrent_state(initial_state_rng,\n batch_size)\n target_state = online_state\n\n # Convert sample data to sequence-major format [T, B, ...].\n data = utils.batch_to_sequence(sample.data)\n\n # Maybe burn the core state in.\n if burn_in_length:\n burn_obs = jax.tree_map(lambda x: x[:burn_in_length], data.observation)\n key_grad, key1, key2 = jax.random.split(key_grad, 3)\n _, online_state = networks.unroll(params, key1, burn_obs, online_state)\n _, target_state = networks.unroll(target_params, key2, burn_obs,\n target_state)\n\n # Only get data to learn on from after the end of the burn in period.\n data = jax.tree_map(lambda seq: seq[burn_in_length:], data)\n\n # Unroll on sequences to get online and target Q-Values.\n key1, key2 = jax.random.split(key_grad)\n online_q, _ = networks.unroll(params, key1, data.observation,\n online_state)\n target_q, _ = networks.unroll(target_params, key2, data.observation,\n target_state)\n\n # Get value-selector actions from online Q-values for double Q-learning.\n selector_actions = jnp.argmax(online_q, axis=-1)\n # Preprocess discounts & rewards.\n discounts = (data.discount * discount).astype(online_q.dtype)\n rewards = data.reward\n if clip_rewards:\n rewards = jnp.clip(rewards, -max_abs_reward, max_abs_reward)\n rewards = rewards.astype(online_q.dtype)\n\n # Get N-step transformed TD error and loss.\n batch_td_error_fn = jax.vmap(\n functools.partial(\n rlax.transformed_n_step_q_learning,\n n=bootstrap_n,\n tx_pair=tx_pair),\n in_axes=1,\n out_axes=1)\n batch_td_error = batch_td_error_fn(\n online_q[:-1],\n data.action[:-1],\n target_q[1:],\n selector_actions[1:],\n rewards[:-1],\n discounts[:-1])\n batch_loss = 0.5 * jnp.square(batch_td_error).sum(axis=0)\n\n # Importance weighting.\n probs = sample.info.probability\n importance_weights = (1. / (probs + 1e-6)).astype(online_q.dtype)\n importance_weights **= importance_sampling_exponent\n importance_weights /= jnp.max(importance_weights)\n mean_loss = jnp.mean(importance_weights * batch_loss)\n\n # Calculate priorities as a mixture of max and mean sequence errors.\n abs_td_error = jnp.abs(batch_td_error).astype(online_q.dtype)\n max_priority = max_priority_weight * jnp.max(abs_td_error, axis=0)\n mean_priority = (1 - max_priority_weight) * jnp.mean(abs_td_error, axis=0)\n priorities = (max_priority + mean_priority)\n\n return mean_loss, priorities\n\n def sgd_step(\n state: TrainingState,\n samples: reverb.ReplaySample\n ) -> Tuple[TrainingState, jnp.ndarray, Dict[str, jnp.ndarray]]:\n \"\"\"Performs an update step, averaging over pmap replicas.\"\"\"\n\n # Compute loss and gradients.\n grad_fn = jax.value_and_grad(loss, has_aux=True)\n key, key_grad = jax.random.split(state.random_key)\n (loss_value, priorities), gradients = grad_fn(state.params,\n state.target_params,\n key_grad,\n samples)\n\n # Average gradients over pmap replicas before optimizer update.\n gradients = jax.lax.pmean(gradients, _PMAP_AXIS_NAME)\n\n # Apply optimizer updates.\n updates, new_opt_state = optimizer.update(gradients, state.opt_state)\n new_params = optax.apply_updates(state.params, updates)\n\n # Periodically update target networks.\n steps = state.steps + 1\n target_params = optax.periodic_update(new_params, state.target_params, # pytype: disable=wrong-arg-types # numpy-scalars\n steps, self._target_update_period)\n\n new_state = TrainingState(\n params=new_params,\n target_params=target_params,\n opt_state=new_opt_state,\n steps=steps,\n random_key=key)\n return new_state, priorities, {'loss': loss_value}\n\n def update_priorities(\n keys_and_priorities: Tuple[jnp.ndarray, jnp.ndarray]):\n keys, priorities = keys_and_priorities\n keys, priorities = tree.map_structure(\n # Fetch array and combine device and batch dimensions.\n lambda x: utils.fetch_devicearray(x).reshape((-1,) + x.shape[2:]),\n (keys, priorities))\n replay_client.mutate_priorities( # pytype: disable=attribute-error\n table=adders.DEFAULT_PRIORITY_TABLE,\n updates=dict(zip(keys, priorities)))\n\n # Internalise components, hyperparameters, logger, counter, and methods.\n self._iterator = iterator\n self._replay_client = replay_client\n self._target_update_period = target_update_period\n self._counter = counter or counting.Counter()\n self._logger = logger or loggers.make_default_logger(\n 'learner',\n asynchronous=True,\n serialize_fn=utils.fetch_devicearray,\n time_delta=1.,\n steps_key=self._counter.get_steps_key())\n\n self._sgd_step = jax.pmap(sgd_step, axis_name=_PMAP_AXIS_NAME)\n self._async_priority_updater = async_utils.AsyncExecutor(update_priorities)\n\n # Initialise and internalise training state (parameters/optimiser state).\n random_key, key_init = jax.random.split(random_key)\n initial_params = networks.init(key_init)\n opt_state = optimizer.init(initial_params)\n\n # Log how many parameters the network has.\n sizes = tree.map_structure(jnp.size, initial_params)\n logging.info('Total number of params: %d',\n sum(tree.flatten(sizes.values())))\n\n state = TrainingState(\n params=initial_params,\n target_params=initial_params,\n opt_state=opt_state,\n steps=jnp.array(0),\n random_key=random_key)\n # Replicate parameters.\n self._state = utils.replicate_in_all_devices(state)\n\n def step(self):\n prefetching_split = next(self._iterator)\n # The split_sample method passed to utils.sharded_prefetch specifies what\n # parts of the objects returned by the original iterator are kept in the\n # host and what parts are prefetched on-device.\n # In this case the host property of the prefetching split contains only the\n # replay keys and the device property is the prefetched full original\n # sample.\n keys = prefetching_split.host\n samples: reverb.ReplaySample = prefetching_split.device\n\n # Do a batch of SGD.\n start = time.time()\n self._state, priorities, metrics = self._sgd_step(self._state, samples)\n # Take metrics from first replica.\n metrics = utils.get_from_first_device(metrics)\n # Update our counts and record it.\n counts = self._counter.increment(steps=1, time_elapsed=time.time() - start)\n\n # Update priorities in replay.\n if self._replay_client:\n self._async_priority_updater.put((keys, priorities))\n\n # Attempt to write logs.\n self._logger.write({**metrics, **counts})\n\n def get_variables(self, names: List[str]) -> List[networks_lib.Params]:\n del names # There's only one available set of params in this agent.\n # Return first replica of parameters.\n return utils.get_from_first_device([self._state.params])\n\n def save(self) -> TrainingState:\n # Serialize only the first replica of parameters and optimizer state.\n return utils.get_from_first_device(self._state)\n\n def restore(self, state: TrainingState):\n self._state = utils.replicate_in_all_devices(state)\n" }, { "alpha_fraction": 0.6996942758560181, "alphanum_fraction": 0.7049031853675842, "avg_line_length": 35.64315414428711, "blob_id": "9f02a60dec88ed29d299a97a6766c5715e9353b3", "content_id": "0faea932f7d96d5ec8ddf4754a2276f1595da2a8", "detected_licenses": [ "Apache-2.0" ], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 8831, "license_type": "permissive", "max_line_length": 125, "num_lines": 241, "path": "/acme/agents/jax/mpo/categorical_mpo.py", "repo_name": "dada325/acme", "src_encoding": "UTF-8", "text": "# Copyright 2018 DeepMind Technologies Limited. All rights reserved.\n#\n# Licensed under the Apache License, Version 2.0 (the \"License\");\n# you may not use this file except in compliance with the License.\n# You may obtain a copy of the License at\n#\n# http://www.apache.org/licenses/LICENSE-2.0\n#\n# Unless required by applicable law or agreed to in writing, software\n# distributed under the License is distributed on an \"AS IS\" BASIS,\n# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n# See the License for the specific language governing permissions and\n# limitations under the License.\n\n\"\"\"Implements the MPO loss for a discrete (categorical) policy.\n\nThe MPO loss uses CategoricalMPOParams, which can be initialized using\ninit_params, to track the temperature and the dual variables.\n\nTensor shapes are annotated, where helpful, as follow:\n B: batch size,\n D: dimensionality of the action space.\n\"\"\"\n\nfrom typing import NamedTuple, Tuple\n\nimport distrax\nimport jax\nimport jax.numpy as jnp\n\n_MPO_FLOAT_EPSILON = 1e-8\n_MIN_LOG_TEMPERATURE = -18.0\n_MIN_LOG_ALPHA = -18.0\n\nDType = type(jnp.float32) # _ScalarMeta, a private type.\n\n\nclass CategoricalMPOParams(NamedTuple):\n \"\"\"NamedTuple to store trainable loss parameters.\"\"\"\n log_temperature: jnp.ndarray\n log_alpha: jnp.ndarray\n\n\nclass CategoricalMPOStats(NamedTuple):\n \"\"\"NamedTuple to store loss statistics.\"\"\"\n dual_alpha: float\n dual_temperature: float\n\n loss_e_step: float\n loss_m_step: float\n loss_dual: float\n\n loss_policy: float\n loss_alpha: float\n loss_temperature: float\n\n kl_q_rel: float\n kl_mean_rel: float\n\n q_min: float\n q_max: float\n\n entropy_online: float\n entropy_target: float\n\n\nclass CategoricalMPO:\n \"\"\"MPO loss for a categorical policy (Abdolmaleki et al., 2018).\n\n (Abdolmaleki et al., 2018): https://arxiv.org/pdf/1812.02256.pdf\n \"\"\"\n\n def __init__(self,\n epsilon: float,\n epsilon_policy: float,\n init_log_temperature: float,\n init_log_alpha: float):\n \"\"\"Initializes the MPO loss for discrete (categorical) policies.\n\n Args:\n epsilon: KL constraint on the non-parametric auxiliary policy, the one\n associated with the dual variable called temperature.\n epsilon_policy: KL constraint on the categorical policy, the one\n associated with the dual variable called alpha.\n init_log_temperature: initial value for the temperature in log-space, note\n a softplus (rather than an exp) will be used to transform this.\n init_log_alpha: initial value for alpha in log-space. Note that a softplus\n (rather than an exp) will be used to transform this.\n \"\"\"\n\n # MPO constraint thresholds.\n self._epsilon = epsilon\n self._epsilon_policy = epsilon_policy\n\n # Initial values for the constraints' dual variables.\n self._init_log_temperature = init_log_temperature\n self._init_log_alpha = init_log_alpha\n\n def init_params(self, action_dim: int, dtype: DType = jnp.float32):\n \"\"\"Creates an initial set of parameters.\"\"\"\n del action_dim # Unused.\n return CategoricalMPOParams(\n log_temperature=jnp.full([1], self._init_log_temperature, dtype=dtype),\n log_alpha=jnp.full([1], self._init_log_alpha, dtype=dtype))\n\n def __call__(\n self,\n params: CategoricalMPOParams,\n online_action_distribution: distrax.Categorical,\n target_action_distribution: distrax.Categorical,\n actions: jnp.ndarray, # Unused.\n q_values: jnp.ndarray, # Shape [D, B].\n ) -> Tuple[jnp.ndarray, CategoricalMPOStats]:\n \"\"\"Computes the MPO loss for a categorical policy.\n\n Args:\n params: parameters tracking the temperature and the dual variables.\n online_action_distribution: online distribution returned by the online\n policy network; expects batch_dims of [B] and event_dims of [D].\n target_action_distribution: target distribution returned by the target\n policy network; expects same shapes as online distribution.\n actions: Unused.\n q_values: Q-values associated with every action; expects shape [D, B].\n\n Returns:\n Loss, combining the policy loss, KL penalty, and dual losses required to\n adapt the dual variables.\n Stats, for diagnostics and tracking performance.\n \"\"\"\n\n q_values = jnp.transpose(q_values) # [D, B] --> [B, D].\n\n # Transform dual variables from log-space.\n # Note: using softplus instead of exponential for numerical stability.\n temperature = get_temperature_from_params(params)\n alpha = jax.nn.softplus(params.log_alpha) + _MPO_FLOAT_EPSILON\n\n # Compute the E-step logits and the temperature loss, used to adapt the\n # tempering of Q-values.\n logits_e_step, loss_temperature = compute_weights_and_temperature_loss( # pytype: disable=wrong-arg-types # jax-ndarray\n q_values=q_values, logits=target_action_distribution.logits,\n epsilon=self._epsilon, temperature=temperature)\n action_distribution_e_step = distrax.Categorical(logits=logits_e_step)\n\n # Only needed for diagnostics: Compute estimated actualized KL between the\n # non-parametric and current target policies.\n kl_nonparametric = action_distribution_e_step.kl_divergence(\n target_action_distribution)\n\n # Compute the policy loss.\n loss_policy = action_distribution_e_step.cross_entropy(\n online_action_distribution)\n loss_policy = jnp.mean(loss_policy)\n\n # Compute the regularization.\n kl = target_action_distribution.kl_divergence(online_action_distribution)\n mean_kl = jnp.mean(kl, axis=0)\n loss_kl = jax.lax.stop_gradient(alpha) * mean_kl\n\n # Compute the dual loss.\n loss_alpha = alpha * (self._epsilon_policy - jax.lax.stop_gradient(mean_kl))\n\n # Combine losses.\n loss_dual = loss_alpha + loss_temperature\n loss = loss_policy + loss_kl + loss_dual\n\n # Create statistics.\n stats = CategoricalMPOStats(\n # Dual Variables.\n dual_alpha=jnp.mean(alpha),\n dual_temperature=jnp.mean(temperature),\n # Losses.\n loss_e_step=loss_policy,\n loss_m_step=loss_kl,\n loss_dual=loss_dual,\n loss_policy=jnp.mean(loss),\n loss_alpha=jnp.mean(loss_alpha),\n loss_temperature=jnp.mean(loss_temperature),\n # KL measurements.\n kl_q_rel=jnp.mean(kl_nonparametric) / self._epsilon,\n kl_mean_rel=mean_kl / self._epsilon_policy,\n # Q measurements.\n q_min=jnp.mean(jnp.min(q_values, axis=0)),\n q_max=jnp.mean(jnp.max(q_values, axis=0)),\n entropy_online=jnp.mean(online_action_distribution.entropy()),\n entropy_target=jnp.mean(target_action_distribution.entropy())\n )\n\n return loss, stats\n\n\ndef compute_weights_and_temperature_loss(\n q_values: jnp.ndarray,\n logits: jnp.ndarray,\n epsilon: float,\n temperature: jnp.ndarray,\n) -> Tuple[jnp.ndarray, jnp.ndarray]:\n \"\"\"Computes normalized importance weights for the policy optimization.\n\n Args:\n q_values: Q-values associated with the actions sampled from the target\n policy; expected shape [B, D].\n logits: Parameters to the categorical distribution with respect to which the\n expectations are going to be computed.\n epsilon: Desired constraint on the KL between the target and non-parametric\n policies.\n temperature: Scalar used to temper the Q-values before computing normalized\n importance weights from them. This is really the Lagrange dual variable in\n the constrained optimization problem, the solution of which is the\n non-parametric policy targeted by the policy loss.\n\n Returns:\n Normalized importance weights, used for policy optimization.\n Temperature loss, used to adapt the temperature.\n \"\"\"\n\n # Temper the given Q-values using the current temperature.\n tempered_q_values = jax.lax.stop_gradient(q_values) / temperature\n\n # Compute the E-step normalized logits.\n unnormalized_logits = tempered_q_values + jax.nn.log_softmax(logits, axis=-1)\n logits_e_step = jax.nn.log_softmax(unnormalized_logits, axis=-1)\n\n # Compute the temperature loss (dual of the E-step optimization problem).\n # Note that the log normalizer will be the same for all actions, so we choose\n # only the first one.\n log_normalizer = unnormalized_logits[:, 0] - logits_e_step[:, 0]\n loss_temperature = temperature * (epsilon + jnp.mean(log_normalizer))\n\n return logits_e_step, loss_temperature\n\n\ndef clip_categorical_mpo_params(\n params: CategoricalMPOParams) -> CategoricalMPOParams:\n return params._replace(\n log_temperature=jnp.maximum(_MIN_LOG_TEMPERATURE, params.log_temperature),\n log_alpha=jnp.maximum(_MIN_LOG_ALPHA, params.log_alpha))\n\n\ndef get_temperature_from_params(params: CategoricalMPOParams) -> float:\n return jax.nn.softplus(params.log_temperature) + _MPO_FLOAT_EPSILON\n" }, { "alpha_fraction": 0.6429853439331055, "alphanum_fraction": 0.6494095325469971, "avg_line_length": 43.66244888305664, "blob_id": "ccfdb68c8bb695c60e74804e0e55d9a8b243208c", "content_id": "35e292d82193c46321367640841c3f1f29dcf280", "detected_licenses": [ "Apache-2.0" ], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 10585, "license_type": "permissive", "max_line_length": 128, "num_lines": 237, "path": "/acme/agents/jax/mpo/rollout_loss.py", "repo_name": "dada325/acme", "src_encoding": "UTF-8", "text": "# Copyright 2018 DeepMind Technologies Limited. All rights reserved.\n#\n# Licensed under the Apache License, Version 2.0 (the \"License\");\n# you may not use this file except in compliance with the License.\n# You may obtain a copy of the License at\n#\n# http://www.apache.org/licenses/LICENSE-2.0\n#\n# Unless required by applicable law or agreed to in writing, software\n# distributed under the License is distributed on an \"AS IS\" BASIS,\n# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n# See the License for the specific language governing permissions and\n# limitations under the License.\n\n\"\"\"MPO learner implementation. With MoG/not and continuous/discrete policies.\"\"\"\n\nfrom typing import Tuple\n\nfrom acme import types\nfrom acme.adders import reverb as adders\nfrom acme.agents.jax.mpo import categorical_mpo as discrete_losses\nfrom acme.agents.jax.mpo import networks as mpo_networks\nfrom acme.agents.jax.mpo import types as mpo_types\nfrom acme.agents.jax.mpo import utils as mpo_utils\nfrom acme.jax import networks as network_lib\nimport chex\nimport jax\nimport jax.numpy as jnp\nimport rlax\n\n\ndef softmax_cross_entropy(\n logits: chex.Array, target_probs: chex.Array) -> chex.Array:\n \"\"\"Compute cross entropy loss between logits and target probabilities.\"\"\"\n chex.assert_equal_shape([target_probs, logits])\n return -jnp.sum(target_probs * jax.nn.log_softmax(logits), axis=-1)\n\n\ndef top1_accuracy_tiebreak(\n logits: chex.Array,\n targets: chex.Array,\n *,\n rng: chex.PRNGKey,\n eps: float = 1e-6) -> chex.Array:\n \"\"\"Compute the top-1 accuracy with an argmax of targets (random tie-break).\"\"\"\n noise = jax.random.uniform(rng, shape=targets.shape,\n minval=-eps, maxval=eps)\n acc = jnp.argmax(logits, axis=-1) == jnp.argmax(targets + noise, axis=-1)\n return jnp.mean(acc)\n\n\nclass RolloutLoss:\n \"\"\"A MuZero/Muesli-style loss on the rollouts of the dynamics model.\"\"\"\n\n def __init__(\n self,\n dynamics_model: mpo_networks.UnrollableNetwork,\n model_rollout_length: int,\n loss_scales: mpo_types.LossScalesConfig,\n distributional_loss_fn: mpo_types.DistributionalLossFn,\n ):\n self._dynamics_model = dynamics_model\n self._model_rollout_length = model_rollout_length\n self._loss_scales = loss_scales\n self._distributional_loss_fn = distributional_loss_fn\n\n def _rolling_window(self, x: chex.Array, axis: int = 0) -> chex.Array:\n \"\"\"A convenient tree-mapped and configured call to rolling window.\n\n Stacks R = T - K + 1 action slices of length K = model_rollout_length from\n tensor x: [..., 0:K; ...; T-K:T, ...].\n\n Args:\n x: The tensor to select rolling slices from (along specified axis), with\n shape [..., T, ...] such that T = x.shape[axis].\n axis: The axis to slice from (defaults to 0).\n\n Returns:\n A tensor containing the stacked slices [0:K, ... T-K:T] from an axis of x\n with shape [..., K, R, ...] for input shape [..., T, ...].\n \"\"\"\n def rw(y):\n return mpo_utils.rolling_window(\n y, window=self._model_rollout_length, axis=axis, time_major=True)\n\n return mpo_utils.tree_map_distribution(rw, x)\n\n def _compute_model_rollout_predictions(\n self, params: mpo_networks.MPONetworkParams,\n state_embeddings: types.NestedArray,\n action_sequence: types.NestedArray) -> mpo_types.ModelOutputs:\n \"\"\"Roll out the dynamics model for each embedding state.\"\"\"\n assert self._model_rollout_length > 0\n # Stack the R=T-K+1 action slices of length K: [0:K; ...; T-K:T]; [K, R].\n rollout_actions = self._rolling_window(action_sequence)\n\n # Create batch of root states (embeddings) s_t for t \\in {0, ..., R}.\n num_rollouts = action_sequence.shape[0] - self._model_rollout_length + 1\n root_state = self._dynamics_model.initial_state_fn(\n params.dynamics_model_initial_state, state_embeddings[:num_rollouts])\n # TODO(abef): randomly choose (fewer?) root unroll states, as in Muesli?\n\n # Roll out K steps forward in time for each root embedding; [K, R, ...].\n # For example, policy_rollout[k, t] is the step-k prediction starting from\n # state s_t (and same for value_rollout and reward_rollout). Thus, for\n # valid values of k, t, and i, policy_rollout[k, t] and\n # policy_rollout[k-i, t+i] share the same target.\n (policy_rollout, value_rollout, reward_rollout,\n embedding_rollout), _ = self._dynamics_model.unroll(\n params.dynamics_model, rollout_actions, root_state)\n # TODO(abef): try using the same params for both the root & rollout heads.\n\n chex.assert_shape([rollout_actions, embedding_rollout],\n (self._model_rollout_length, num_rollouts, ...))\n\n # Create the outputs but drop the rollout that uses action a_{T-1} (and\n # thus contains state s_T) for the policy, value, and embedding because we\n # don't have targets for s_T (but we do know them for the final reward).\n # Also drop the rollout with s_{T-1} for the value because we don't have\n # targets for that either.\n return mpo_types.ModelOutputs(\n policy=policy_rollout[:, :-1], # [K, R-1, ...]\n value=value_rollout[:, :-2], # [K, R-2, ...]\n reward=reward_rollout, # [K, R, ...]\n embedding=embedding_rollout[:, :-1]) # [K, R-1, ...]\n\n def __call__(\n self,\n params: mpo_networks.MPONetworkParams,\n dual_params: mpo_types.DualParams,\n sequence: adders.Step,\n state_embeddings: types.NestedArray,\n targets: mpo_types.LossTargets,\n key: network_lib.PRNGKey,\n ) -> Tuple[jnp.ndarray, mpo_types.LogDict]:\n\n num_rollouts = sequence.reward.shape[0] - self._model_rollout_length + 1\n indices = jnp.arange(num_rollouts)\n\n # Create rollout predictions.\n rollout = self._compute_model_rollout_predictions(\n params=params, state_embeddings=state_embeddings,\n action_sequence=sequence.action)\n\n # Create rollout target tensors. The rollouts will not contain the policy\n # and value at t=0 because they start after taking the first action in\n # the sequence, so drop those when creating the targets. They will contain\n # the reward at t=0, however, because of how the sequences are stored.\n # Rollout target shapes:\n # - value: [N, Z, T-2] -> [N, Z, K, R-2],\n # - reward: [T] -> [K, R].\n value_targets = self._rolling_window(targets.value[..., 1:], axis=-1)\n reward_targets = self._rolling_window(targets.reward)[None, None, ...]\n\n # Define the value and reward rollout loss functions.\n def value_loss_fn(root_idx) -> jnp.ndarray:\n return self._distributional_loss_fn(\n rollout.value[:, root_idx], # [K, R-2, ...]\n value_targets[..., root_idx]) # [..., K, R-2]\n\n def reward_loss_fn(root_idx) -> jnp.ndarray:\n return self._distributional_loss_fn(\n rollout.reward[:, root_idx], # [K, R, ...]\n reward_targets[..., root_idx]) # [..., K, R]\n\n # Reward and value losses.\n critic_loss = jnp.mean(jax.vmap(value_loss_fn)(indices[:-2]))\n reward_loss = jnp.mean(jax.vmap(reward_loss_fn)(indices))\n\n # Define the MPO policy rollout loss.\n mpo_policy_loss = 0\n if self._loss_scales.rollout.policy:\n # Rollout target shapes:\n # - policy: [T-1, ...] -> [K, R-1, ...],\n # - q_improvement: [N, T-1] -> [N, K, R-1].\n policy_targets = self._rolling_window(targets.policy[1:])\n q_improvement = self._rolling_window(targets.q_improvement[:, 1:], axis=1)\n\n def policy_loss_fn(root_idx) -> jnp.ndarray:\n chex.assert_shape((rollout.policy.logits, policy_targets.logits), # pytype: disable=attribute-error # numpy-scalars\n (self._model_rollout_length, num_rollouts - 1, None))\n chex.assert_shape(q_improvement,\n (None, self._model_rollout_length, num_rollouts - 1))\n # Compute MPO's E-step unnormalized logits.\n temperature = discrete_losses.get_temperature_from_params(dual_params)\n policy_target_probs = jax.nn.softmax(\n jnp.transpose(q_improvement[..., root_idx]) / temperature +\n jax.nn.log_softmax(policy_targets[:, root_idx].logits, axis=-1)) # pytype: disable=attribute-error # numpy-scalars\n return softmax_cross_entropy(rollout.policy[:, root_idx].logits, # pytype: disable=bad-return-type # numpy-scalars\n jax.lax.stop_gradient(policy_target_probs))\n\n # Compute the MPO loss and add it to the overall rollout policy loss.\n mpo_policy_loss = jax.vmap(policy_loss_fn)(indices[:-1])\n mpo_policy_loss = jnp.mean(mpo_policy_loss)\n\n # Define the BC policy rollout loss (only supported for discrete policies).\n bc_policy_loss, bc_policy_acc = 0, 0\n if self._loss_scales.rollout.bc_policy:\n num_actions = rollout.policy.logits.shape[-1] # A\n bc_targets = self._rolling_window( # [T-1, A] -> [K, R-1, A]\n rlax.one_hot(sequence.action[1:], num_actions))\n\n def bc_policy_loss_fn(root_idx) -> Tuple[jnp.ndarray, jnp.ndarray]:\n \"\"\"Self-behavior-cloning loss (cross entropy on rollout actions).\"\"\"\n chex.assert_shape(\n (rollout.policy.logits, bc_targets),\n (self._model_rollout_length, num_rollouts - 1, num_actions))\n loss = softmax_cross_entropy(rollout.policy.logits[:, root_idx],\n bc_targets[:, root_idx])\n top1_accuracy = top1_accuracy_tiebreak(\n rollout.policy.logits[:, root_idx],\n bc_targets[:, root_idx],\n rng=key)\n return loss, top1_accuracy # pytype: disable=bad-return-type # numpy-scalars\n\n # Compute each rollout loss by vmapping over the rollouts.\n bc_policy_loss, bc_policy_acc = jax.vmap(bc_policy_loss_fn)(indices[:-1])\n bc_policy_loss = jnp.mean(bc_policy_loss)\n bc_policy_acc = jnp.mean(bc_policy_acc)\n\n # Combine losses.\n loss = (\n self._loss_scales.rollout.policy * mpo_policy_loss +\n self._loss_scales.rollout.bc_policy * bc_policy_loss +\n self._loss_scales.critic * self._loss_scales.rollout.critic *\n critic_loss + self._loss_scales.rollout.reward * reward_loss)\n\n logging_dict = {\n 'rollout_critic_loss': critic_loss,\n 'rollout_reward_loss': reward_loss,\n 'rollout_policy_loss': mpo_policy_loss,\n 'rollout_bc_policy_loss': bc_policy_loss,\n 'rollout_bc_accuracy': bc_policy_acc,\n 'rollout_loss': loss,\n }\n\n return loss, logging_dict # pytype: disable=bad-return-type # jax-ndarray\n" }, { "alpha_fraction": 0.6876360177993774, "alphanum_fraction": 0.6914606094360352, "avg_line_length": 31.967391967773438, "blob_id": "c1e691e8f980e270247b1274505d7cb51da41a1e", "content_id": "c0efd19a6531f93448ab3840f11122fefacb7ba9", "detected_licenses": [ "Apache-2.0" ], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 15165, "license_type": "permissive", "max_line_length": 80, "num_lines": 460, "path": "/acme/tf/savers.py", "repo_name": "dada325/acme", "src_encoding": "UTF-8", "text": "# Copyright 2018 DeepMind Technologies Limited. All rights reserved.\n#\n# Licensed under the Apache License, Version 2.0 (the \"License\");\n# you may not use this file except in compliance with the License.\n# You may obtain a copy of the License at\n#\n# http://www.apache.org/licenses/LICENSE-2.0\n#\n# Unless required by applicable law or agreed to in writing, software\n# distributed under the License is distributed on an \"AS IS\" BASIS,\n# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n# See the License for the specific language governing permissions and\n# limitations under the License.\n\n\"\"\"Utility classes for saving model checkpoints and snapshots.\"\"\"\n\nimport abc\nimport datetime\nimport os\nimport pickle\nimport time\nfrom typing import Mapping, Optional, Union\n\nfrom absl import logging\nfrom acme import core\nfrom acme.utils import signals\nfrom acme.utils import paths\nimport sonnet as snt\nimport tensorflow as tf\nimport tensorflow_probability as tfp\nimport tree\n\nfrom tensorflow.python.saved_model import revived_types\n\nPythonState = tf.train.experimental.PythonState\nCheckpointable = Union[tf.Module, tf.Variable, PythonState]\n\n_DEFAULT_CHECKPOINT_TTL = int(datetime.timedelta(days=5).total_seconds())\n_DEFAULT_SNAPSHOT_TTL = int(datetime.timedelta(days=90).total_seconds())\n\n\nclass TFSaveable(abc.ABC):\n \"\"\"An interface for objects that expose their checkpointable TF state.\"\"\"\n\n @property\n @abc.abstractmethod\n def state(self) -> Mapping[str, Checkpointable]:\n \"\"\"Returns TensorFlow checkpointable state.\"\"\"\n\n\nclass Checkpointer:\n \"\"\"Convenience class for periodically checkpointing.\n\n This can be used to checkpoint any object with trackable state (e.g.\n tensorflow variables or modules); see tf.train.Checkpoint for\n details. Objects inheriting from tf.train.experimental.PythonState can also\n be checkpointed.\n\n Typically people use Checkpointer to make sure that they can correctly recover\n from a machine going down during learning. For more permanent storage of self-\n contained \"networks\" see the Snapshotter object.\n\n Usage example:\n\n ```python\n model = snt.Linear(10)\n checkpointer = Checkpointer(objects_to_save={'model': model})\n\n for _ in range(100):\n # ...\n checkpointer.save()\n ```\n \"\"\"\n\n def __init__(\n self,\n objects_to_save: Mapping[str, Union[Checkpointable, core.Saveable]],\n *,\n directory: str = '~/acme/',\n subdirectory: str = 'default',\n time_delta_minutes: float = 10.0,\n enable_checkpointing: bool = True,\n add_uid: bool = True,\n max_to_keep: int = 1,\n checkpoint_ttl_seconds: Optional[int] = _DEFAULT_CHECKPOINT_TTL,\n keep_checkpoint_every_n_hours: Optional[int] = None,\n ):\n \"\"\"Builds the saver object.\n\n Args:\n objects_to_save: Mapping specifying what to checkpoint.\n directory: Which directory to put the checkpoint in.\n subdirectory: Sub-directory to use (e.g. if multiple checkpoints are being\n saved).\n time_delta_minutes: How often to save the checkpoint, in minutes.\n enable_checkpointing: whether to checkpoint or not.\n add_uid: If True adds a UID to the checkpoint path, see\n `paths.get_unique_id()` for how this UID is generated.\n max_to_keep: The maximum number of checkpoints to keep.\n checkpoint_ttl_seconds: TTL (time to leave) in seconds for checkpoints.\n keep_checkpoint_every_n_hours: keep_checkpoint_every_n_hours passed to\n tf.train.CheckpointManager.\n \"\"\"\n\n # Convert `Saveable` objects to TF `Checkpointable` first, if necessary.\n def to_ckptable(x: Union[Checkpointable, core.Saveable]) -> Checkpointable:\n if isinstance(x, core.Saveable):\n return SaveableAdapter(x)\n return x\n\n objects_to_save = {k: to_ckptable(v) for k, v in objects_to_save.items()}\n\n self._time_delta_minutes = time_delta_minutes\n self._last_saved = 0.\n self._enable_checkpointing = enable_checkpointing\n self._checkpoint_manager = None\n\n if enable_checkpointing:\n # Checkpoint object that handles saving/restoring.\n self._checkpoint = tf.train.Checkpoint(**objects_to_save)\n self._checkpoint_dir = paths.process_path(\n directory,\n 'checkpoints',\n subdirectory,\n ttl_seconds=checkpoint_ttl_seconds,\n backups=False,\n add_uid=add_uid)\n\n # Create a manager to maintain different checkpoints.\n self._checkpoint_manager = tf.train.CheckpointManager(\n self._checkpoint,\n directory=self._checkpoint_dir,\n max_to_keep=max_to_keep,\n keep_checkpoint_every_n_hours=keep_checkpoint_every_n_hours)\n\n self.restore()\n\n def save(self, force: bool = False) -> bool:\n \"\"\"Save the checkpoint if it's the appropriate time, otherwise no-ops.\n\n Args:\n force: Whether to force a save regardless of time elapsed since last save.\n\n Returns:\n A boolean indicating if a save event happened.\n \"\"\"\n if not self._enable_checkpointing:\n return False\n\n if (not force and\n time.time() - self._last_saved < 60 * self._time_delta_minutes):\n return False\n\n # Save any checkpoints.\n logging.info('Saving checkpoint: %s', self._checkpoint_manager.directory)\n self._checkpoint_manager.save()\n self._last_saved = time.time()\n\n return True\n\n def restore(self):\n # Restore from the most recent checkpoint (if it exists).\n checkpoint_to_restore = self._checkpoint_manager.latest_checkpoint\n logging.info('Attempting to restore checkpoint: %s',\n checkpoint_to_restore)\n self._checkpoint.restore(checkpoint_to_restore)\n\n @property\n def directory(self):\n return self._checkpoint_manager.directory\n\n\nclass CheckpointingRunner(core.Worker):\n \"\"\"Wrap an object and expose a run method which checkpoints periodically.\n\n This internally creates a Checkpointer around `wrapped` object and exposes\n all of the methods of `wrapped`. Additionally, any `**kwargs` passed to the\n runner are forwarded to the internal Checkpointer.\n \"\"\"\n\n def __init__(\n self,\n wrapped: Union[Checkpointable, core.Saveable, TFSaveable],\n key: str = 'wrapped',\n *,\n time_delta_minutes: int = 30,\n **kwargs,\n ):\n\n if isinstance(wrapped, TFSaveable):\n # If the object to be wrapped exposes its TF State, checkpoint that.\n objects_to_save = wrapped.state\n else:\n # Otherwise checkpoint the wrapped object itself.\n objects_to_save = wrapped\n\n self._wrapped = wrapped\n self._time_delta_minutes = time_delta_minutes\n self._checkpointer = Checkpointer(\n objects_to_save={key: objects_to_save},\n time_delta_minutes=time_delta_minutes,\n **kwargs)\n\n # Handle preemption signal. Note that this must happen in the main thread.\n def _signal_handler(self):\n logging.info('Caught SIGTERM: forcing a checkpoint save.')\n self._checkpointer.save(force=True)\n\n def step(self):\n if isinstance(self._wrapped, core.Learner):\n # Learners have a step() method, so alternate between that and ckpt call.\n self._wrapped.step()\n self._checkpointer.save()\n else:\n # Wrapped object doesn't have a run method; set our run method to ckpt.\n self.checkpoint()\n\n def run(self):\n \"\"\"Runs the checkpointer.\"\"\"\n with signals.runtime_terminator(self._signal_handler):\n while True:\n self.step()\n\n def __dir__(self):\n return dir(self._wrapped) + ['get_directory']\n\n # TODO(b/195915583) : Throw when wrapped object has get_directory() method.\n def __getattr__(self, name):\n if name == 'get_directory':\n return self.get_directory\n return getattr(self._wrapped, name)\n\n def checkpoint(self):\n self._checkpointer.save()\n # Do not sleep for a long period of time to avoid LaunchPad program\n # termination hangs (time.sleep is not interruptible).\n for _ in range(self._time_delta_minutes * 60):\n time.sleep(1)\n\n def get_directory(self):\n return self._checkpointer.directory\n\n\nclass Snapshotter:\n \"\"\"Convenience class for periodically snapshotting.\n\n Objects which can be snapshotted are limited to Sonnet or tensorflow Modules\n which implement a __call__ method. This will save the module's graph and\n variables such that they can be loaded later using `tf.saved_model.load`. See\n https://www.tensorflow.org/guide/saved_model for more details.\n\n The Snapshotter is typically used to save infrequent permanent self-contained\n snapshots which can be loaded later for inspection. For frequent saving of\n model parameters in order to guard against pre-emption of the learning process\n see the Checkpointer class.\n\n Usage example:\n\n ```python\n model = snt.Linear(10)\n snapshotter = Snapshotter(objects_to_save={'model': model})\n\n for _ in range(100):\n # ...\n snapshotter.save()\n ```\n \"\"\"\n\n def __init__(\n self,\n objects_to_save: Mapping[str, snt.Module],\n *,\n directory: str = '~/acme/',\n time_delta_minutes: float = 30.0,\n snapshot_ttl_seconds: int = _DEFAULT_SNAPSHOT_TTL,\n ):\n \"\"\"Builds the saver object.\n\n Args:\n objects_to_save: Mapping specifying what to snapshot.\n directory: Which directory to put the snapshot in.\n time_delta_minutes: How often to save the snapshot, in minutes.\n snapshot_ttl_seconds: TTL (time to leave) in seconds for snapshots.\n \"\"\"\n objects_to_save = objects_to_save or {}\n\n self._time_delta_minutes = time_delta_minutes\n self._last_saved = 0.\n self._snapshots = {}\n\n # Save the base directory path so we can refer to it if needed.\n self.directory = paths.process_path(\n directory, 'snapshots', ttl_seconds=snapshot_ttl_seconds)\n\n # Save a dictionary mapping paths to snapshot capable models.\n for name, module in objects_to_save.items():\n path = os.path.join(self.directory, name)\n self._snapshots[path] = make_snapshot(module)\n\n def save(self, force: bool = False) -> bool:\n \"\"\"Snapshots if it's the appropriate time, otherwise no-ops.\n\n Args:\n force: If True, save new snapshot no matter how long it's been since the\n last one.\n\n Returns:\n A boolean indicating if a save event happened.\n \"\"\"\n seconds_since_last = time.time() - self._last_saved\n if (self._snapshots and\n (force or seconds_since_last >= 60 * self._time_delta_minutes)):\n # Save any snapshots.\n for path, snapshot in self._snapshots.items():\n tf.saved_model.save(snapshot, path)\n\n # Record the time we finished saving.\n self._last_saved = time.time()\n\n return True\n\n return False\n\n\nclass Snapshot(tf.Module):\n \"\"\"Thin wrapper which allows the module to be saved.\"\"\"\n\n def __init__(self):\n super().__init__()\n self._module = None\n self._variables = None\n self._trainable_variables = None\n\n @tf.function\n def __call__(self, *args, **kwargs):\n return self._module(*args, **kwargs)\n\n @property\n def submodules(self):\n return [self._module]\n\n @property\n def variables(self):\n return self._variables\n\n @property\n def trainable_variables(self):\n return self._trainable_variables\n\n\n# Registers the Snapshot object above such that when it is restored by\n# tf.saved_model.load it will be restored as a Snapshot. This is important\n# because it allows us to expose the __call__, and *_variables properties.\nrevived_types.register_revived_type(\n 'acme_snapshot',\n lambda obj: isinstance(obj, Snapshot),\n versions=[\n revived_types.VersionedTypeRegistration(\n object_factory=lambda proto: Snapshot(),\n version=1,\n min_producer_version=1,\n min_consumer_version=1,\n setter=setattr,\n )\n ])\n\n\ndef make_snapshot(module: snt.Module):\n \"\"\"Create a thin wrapper around a module to make it snapshottable.\"\"\"\n # Get the input signature as long as it has been created.\n input_signature = _get_input_signature(module)\n if input_signature is None:\n raise ValueError(\n ('module instance \"{}\" has no input_signature attribute, '\n 'which is required for snapshotting; run '\n 'create_variables to add this annotation.').format(module.name))\n\n # This function will return the object as a composite tensor if it is a\n # distribution and will otherwise return it with no changes.\n def as_composite(obj):\n if isinstance(obj, tfp.distributions.Distribution):\n return tfp.experimental.as_composite(obj)\n else:\n return obj\n\n # Replace any distributions returned by the module with composite tensors and\n # wrap it up in tf.function so we can process it properly.\n @tf.function\n def wrapped_module(*args, **kwargs):\n return tree.map_structure(as_composite, module(*args, **kwargs))\n\n # pylint: disable=protected-access\n snapshot = Snapshot()\n snapshot._module = wrapped_module\n snapshot._variables = module.variables\n snapshot._trainable_variables = module.trainable_variables\n # pylint: disable=protected-access\n\n # Make sure the snapshot has the proper input signature.\n snapshot.__call__.get_concrete_function(*input_signature)\n\n # If we are an RNN also save the initial-state generating function.\n if isinstance(module, snt.RNNCore):\n snapshot.initial_state = tf.function(module.initial_state)\n snapshot.initial_state.get_concrete_function(\n tf.TensorSpec(shape=(), dtype=tf.int32))\n\n return snapshot\n\n\ndef _get_input_signature(module: snt.Module) -> Optional[tf.TensorSpec]:\n \"\"\"Get module input signature.\n\n Works even if the module with signature is wrapper into snt.Sequentual or\n snt.DeepRNN.\n\n Args:\n module: the module which input signature we need to get. The module has to\n either have input_signature itself (i.e. you have to run create_variables\n on the module), or it has to be a module (with input_signature) wrapped in\n (one or multiple) snt.Sequential or snt.DeepRNNs.\n\n Returns:\n Input signature of the module or None if it's not available.\n \"\"\"\n if hasattr(module, '_input_signature'):\n return module._input_signature # pylint: disable=protected-access\n\n if isinstance(module, snt.Sequential):\n first_layer = module._layers[0] # pylint: disable=protected-access\n return _get_input_signature(first_layer)\n\n if isinstance(module, snt.DeepRNN):\n first_layer = module._layers[0] # pylint: disable=protected-access\n input_signature = _get_input_signature(first_layer)\n\n # Wrapping a module in DeepRNN changes its state shape, so we need to bring\n # it up to date.\n state = module.initial_state(1)\n input_signature[-1] = tree.map_structure(\n lambda t: tf.TensorSpec((None,) + t.shape[1:], t.dtype), state)\n\n return input_signature\n\n return None\n\n\nclass SaveableAdapter(tf.train.experimental.PythonState):\n \"\"\"Adapter which allows `Saveable` object to be checkpointed by TensorFlow.\"\"\"\n\n def __init__(self, object_to_save: core.Saveable):\n self._object_to_save = object_to_save\n\n def serialize(self):\n state = self._object_to_save.save()\n return pickle.dumps(state)\n\n def deserialize(self, pickled: bytes):\n state = pickle.loads(pickled)\n self._object_to_save.restore(state)\n" }, { "alpha_fraction": 0.6631337404251099, "alphanum_fraction": 0.6663024425506592, "avg_line_length": 37.779659271240234, "blob_id": "f0f30b8b2f853adb1a4c7517d37847cb574c7423", "content_id": "83f88723ffb862ed69e8a9726d7c5f37423df1e8", "detected_licenses": [ "Apache-2.0" ], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 9152, "license_type": "permissive", "max_line_length": 80, "num_lines": 236, "path": "/acme/agents/jax/ail/losses.py", "repo_name": "dada325/acme", "src_encoding": "UTF-8", "text": "# Copyright 2018 DeepMind Technologies Limited. All rights reserved.\n#\n# Licensed under the Apache License, Version 2.0 (the \"License\");\n# you may not use this file except in compliance with the License.\n# You may obtain a copy of the License at\n#\n# http://www.apache.org/licenses/LICENSE-2.0\n#\n# Unless required by applicable law or agreed to in writing, software\n# distributed under the License is distributed on an \"AS IS\" BASIS,\n# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n# See the License for the specific language governing permissions and\n# limitations under the License.\n\n\"\"\"AIL discriminator losses.\"\"\"\n\nimport functools\nfrom typing import Callable, Dict, Optional, Tuple\n\nfrom acme import types\nfrom acme.jax import networks as networks_lib\nimport jax\nimport jax.numpy as jnp\nimport tensorflow_probability as tfp\nimport tree\n\ntfp = tfp.experimental.substrates.jax\ntfd = tfp.distributions\n\n# The loss is a function taking the discriminator, its state, the demo\n# transition and the replay buffer transitions.\n# It returns the loss as a float and a debug dictionary with the new state.\nState = networks_lib.Params\nDiscriminatorOutput = Tuple[networks_lib.Logits, State]\nDiscriminatorFn = Callable[[State, types.Transition], DiscriminatorOutput]\nMetrics = Dict[str, float]\nLossOutput = Tuple[float, Tuple[Metrics, State]]\nLoss = Callable[[\n DiscriminatorFn, State, types.Transition, types.Transition, networks_lib\n .PRNGKey\n], LossOutput]\n\n\ndef _binary_cross_entropy_loss(logit: jnp.ndarray,\n label: jnp.ndarray) -> jnp.ndarray:\n return label * jax.nn.softplus(-logit) + (1 - label) * jax.nn.softplus(logit)\n\n\[email protected]\ndef _weighted_average(x: jnp.ndarray, y: jnp.ndarray,\n lambdas: jnp.ndarray) -> jnp.ndarray:\n return lambdas * x + (1. - lambdas) * y\n\n\ndef _label_data(\n rb_transitions: types.Transition,\n demonstration_transitions: types.Transition, mixup_alpha: Optional[float],\n key: networks_lib.PRNGKey) -> Tuple[types.Transition, jnp.ndarray]:\n \"\"\"Create a tuple data, labels by concatenating the rb and dem transitions.\"\"\"\n data = tree.map_structure(lambda x, y: jnp.concatenate([x, y]),\n rb_transitions, demonstration_transitions)\n labels = jnp.concatenate([\n jnp.zeros(rb_transitions.reward.shape),\n jnp.ones(demonstration_transitions.reward.shape)\n ])\n\n if mixup_alpha is not None:\n lambda_key, mixup_key = jax.random.split(key)\n\n lambdas = tfd.Beta(mixup_alpha, mixup_alpha).sample(\n len(labels), seed=lambda_key)\n\n shuffled_data = tree.map_structure(\n lambda x: jax.random.permutation(key=mixup_key, x=x), data)\n shuffled_labels = jax.random.permutation(key=mixup_key, x=labels)\n\n data = tree.map_structure(lambda x, y: _weighted_average(x, y, lambdas),\n data, shuffled_data)\n labels = _weighted_average(labels, shuffled_labels, lambdas)\n\n return data, labels\n\n\ndef _logit_bernoulli_entropy(logits: networks_lib.Logits) -> jnp.ndarray:\n return (1. - jax.nn.sigmoid(logits)) * logits - jax.nn.log_sigmoid(logits)\n\n\ndef gail_loss(entropy_coefficient: float = 0.,\n mixup_alpha: Optional[float] = None) -> Loss:\n \"\"\"Computes the standard GAIL loss.\"\"\"\n\n def loss_fn(\n discriminator_fn: DiscriminatorFn,\n discriminator_state: State,\n demo_transitions: types.Transition, rb_transitions: types.Transition,\n rng_key: networks_lib.PRNGKey) -> LossOutput:\n\n data, labels = _label_data(\n rb_transitions=rb_transitions,\n demonstration_transitions=demo_transitions,\n mixup_alpha=mixup_alpha,\n key=rng_key)\n logits, discriminator_state = discriminator_fn(discriminator_state, data)\n\n classification_loss = jnp.mean(_binary_cross_entropy_loss(logits, labels))\n\n entropy = jnp.mean(_logit_bernoulli_entropy(logits))\n entropy_loss = -entropy_coefficient * entropy\n\n total_loss = classification_loss + entropy_loss\n\n metrics = {\n 'total_loss': total_loss,\n 'entropy_loss': entropy_loss,\n 'classification_loss': classification_loss\n }\n return total_loss, (metrics, discriminator_state)\n\n return loss_fn\n\n\ndef pugail_loss(positive_class_prior: float,\n entropy_coefficient: float,\n pugail_beta: Optional[float] = None) -> Loss:\n \"\"\"Computes the PUGAIL loss (https://arxiv.org/pdf/1911.00459.pdf).\"\"\"\n\n def loss_fn(\n discriminator_fn: DiscriminatorFn,\n discriminator_state: State,\n demo_transitions: types.Transition, rb_transitions: types.Transition,\n rng_key: networks_lib.PRNGKey) -> LossOutput:\n del rng_key\n\n demo_logits, discriminator_state = discriminator_fn(discriminator_state,\n demo_transitions)\n rb_logits, discriminator_state = discriminator_fn(discriminator_state,\n rb_transitions)\n\n # Quick Maths:\n # output = logit(D) = ln(D) - ln(1-D)\n # -softplus(-output) = ln(D)\n # softplus(output) = -ln(1-D)\n\n # prior * -ln(D(expert))\n positive_loss = positive_class_prior * -jax.nn.log_sigmoid(demo_logits)\n # -ln(1 - D(policy)) - prior * -ln(1 - D(expert))\n negative_loss = jax.nn.softplus(\n rb_logits) - positive_class_prior * jax.nn.softplus(demo_logits)\n if pugail_beta is not None:\n negative_loss = jnp.clip(negative_loss, a_min=-1. * pugail_beta)\n\n classification_loss = jnp.mean(positive_loss + negative_loss)\n\n entropy = jnp.mean(\n _logit_bernoulli_entropy(jnp.concatenate([demo_logits, rb_logits])))\n entropy_loss = -entropy_coefficient * entropy\n\n total_loss = classification_loss + entropy_loss\n\n metrics = {\n 'total_loss': total_loss,\n 'positive_loss': jnp.mean(positive_loss),\n 'negative_loss': jnp.mean(negative_loss),\n 'demo_logits': jnp.mean(demo_logits),\n 'rb_logits': jnp.mean(rb_logits),\n 'entropy_loss': entropy_loss,\n 'classification_loss': classification_loss\n }\n return total_loss, (metrics, discriminator_state)\n\n return loss_fn\n\n\ndef _make_gradient_penalty_data(rb_transitions: types.Transition,\n demonstration_transitions: types.Transition,\n key: networks_lib.PRNGKey) -> types.Transition:\n lambdas = tfd.Uniform().sample(len(rb_transitions.reward), seed=key)\n return tree.map_structure(lambda x, y: _weighted_average(x, y, lambdas),\n rb_transitions, demonstration_transitions)\n\n\[email protected](jax.vmap, in_axes=(0, None, None))\ndef _compute_gradient_penalty(gradient_penalty_data: types.Transition,\n discriminator_fn: Callable[[types.Transition],\n float],\n gradient_penalty_target: float) -> float:\n \"\"\"Computes a penalty based on the gradient norm on the data.\"\"\"\n # The input should not be batched.\n assert not gradient_penalty_data.reward.shape\n discriminator_gradient_fn = jax.grad(discriminator_fn)\n gradients = discriminator_gradient_fn(gradient_penalty_data)\n gradients = tree.map_structure(lambda x: x.flatten(), gradients)\n gradients = jnp.concatenate([gradients.observation, gradients.action,\n gradients.next_observation])\n gradient_norms = jnp.linalg.norm(gradients + 1e-8)\n k = gradient_penalty_target * jnp.ones_like(gradient_norms)\n return jnp.mean(jnp.square(gradient_norms - k))\n\n\ndef add_gradient_penalty(base_loss: Loss,\n gradient_penalty_coefficient: float,\n gradient_penalty_target: float) -> Loss:\n \"\"\"Adds a gradient penalty to the base_loss.\"\"\"\n\n if not gradient_penalty_coefficient:\n return base_loss\n\n def loss_fn(discriminator_fn: DiscriminatorFn,\n discriminator_state: State,\n demo_transitions: types.Transition,\n rb_transitions: types.Transition,\n rng_key: networks_lib.PRNGKey) -> LossOutput:\n super_key, gradient_penalty_key = jax.random.split(rng_key)\n\n partial_loss, (losses, discriminator_state) = base_loss(\n discriminator_fn, discriminator_state, demo_transitions, rb_transitions,\n super_key)\n\n gradient_penalty_data = _make_gradient_penalty_data(\n rb_transitions=rb_transitions,\n demonstration_transitions=demo_transitions,\n key=gradient_penalty_key)\n def apply_discriminator_fn(transitions: types.Transition) -> float:\n logits, _ = discriminator_fn(discriminator_state, transitions)\n return logits # pytype: disable=bad-return-type # jax-ndarray\n gradient_penalty = gradient_penalty_coefficient * jnp.mean(\n _compute_gradient_penalty(gradient_penalty_data, apply_discriminator_fn,\n gradient_penalty_target))\n\n losses['gradient_penalty'] = gradient_penalty\n total_loss = partial_loss + gradient_penalty\n losses['total_loss'] = total_loss\n\n return total_loss, (losses, discriminator_state)\n\n return loss_fn\n" }, { "alpha_fraction": 0.6654690504074097, "alphanum_fraction": 0.6689648628234863, "avg_line_length": 38.45594024658203, "blob_id": "f6ec90aa3963f4fb216a6db48458ecf334964a98", "content_id": "b1b897225d526bff1f2620704404a3997d5e8b0b", "detected_licenses": [ "Apache-2.0" ], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 10298, "license_type": "permissive", "max_line_length": 125, "num_lines": 261, "path": "/acme/agents/jax/d4pg/learning.py", "repo_name": "dada325/acme", "src_encoding": "UTF-8", "text": "# Copyright 2018 DeepMind Technologies Limited. All rights reserved.\n#\n# Licensed under the Apache License, Version 2.0 (the \"License\");\n# you may not use this file except in compliance with the License.\n# You may obtain a copy of the License at\n#\n# http://www.apache.org/licenses/LICENSE-2.0\n#\n# Unless required by applicable law or agreed to in writing, software\n# distributed under the License is distributed on an \"AS IS\" BASIS,\n# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n# See the License for the specific language governing permissions and\n# limitations under the License.\n\n\"\"\"D4PG learner implementation.\"\"\"\n\nimport time\nfrom typing import Dict, Iterator, List, NamedTuple, Optional, Tuple\n\nimport acme\nfrom acme import types\nfrom acme.jax import networks as networks_lib\nfrom acme.jax import utils\nfrom acme.utils import counting\nfrom acme.utils import loggers\nimport jax\nimport jax.numpy as jnp\nimport optax\nimport reverb\nimport rlax\n\n_PMAP_AXIS_NAME = 'data'\n\n\nclass TrainingState(NamedTuple):\n \"\"\"Contains training state for the learner.\"\"\"\n policy_params: networks_lib.Params\n target_policy_params: networks_lib.Params\n critic_params: networks_lib.Params\n target_critic_params: networks_lib.Params\n policy_opt_state: optax.OptState\n critic_opt_state: optax.OptState\n steps: int\n\n\nclass D4PGLearner(acme.Learner):\n \"\"\"D4PG learner.\n\n This is the learning component of a D4PG agent. IE it takes a dataset as input\n and implements update functionality to learn from this dataset.\n \"\"\"\n\n _state: TrainingState\n\n def __init__(self,\n policy_network: networks_lib.FeedForwardNetwork,\n critic_network: networks_lib.FeedForwardNetwork,\n random_key: networks_lib.PRNGKey,\n discount: float,\n target_update_period: int,\n iterator: Iterator[reverb.ReplaySample],\n policy_optimizer: Optional[optax.GradientTransformation] = None,\n critic_optimizer: Optional[optax.GradientTransformation] = None,\n clipping: bool = True,\n counter: Optional[counting.Counter] = None,\n logger: Optional[loggers.Logger] = None,\n jit: bool = True,\n num_sgd_steps_per_step: int = 1):\n\n def critic_mean(\n critic_params: networks_lib.Params,\n observation: types.NestedArray,\n action: types.NestedArray,\n ) -> jnp.ndarray:\n # We add batch dimension to make sure batch concat in critic_network\n # works correctly.\n observation = utils.add_batch_dim(observation)\n action = utils.add_batch_dim(action)\n # Computes the mean action-value estimate.\n logits, atoms = critic_network.apply(critic_params, observation, action)\n logits = utils.squeeze_batch_dim(logits)\n probabilities = jax.nn.softmax(logits)\n return jnp.sum(probabilities * atoms, axis=-1)\n\n def policy_loss(\n policy_params: networks_lib.Params,\n critic_params: networks_lib.Params,\n o_t: types.NestedArray,\n ) -> jnp.ndarray:\n # Computes the discrete policy gradient loss.\n dpg_a_t = policy_network.apply(policy_params, o_t)\n grad_critic = jax.vmap(\n jax.grad(critic_mean, argnums=2), in_axes=(None, 0, 0))\n dq_da = grad_critic(critic_params, o_t, dpg_a_t)\n dqda_clipping = 1. if clipping else None\n batch_dpg_learning = jax.vmap(rlax.dpg_loss, in_axes=(0, 0, None))\n loss = batch_dpg_learning(dpg_a_t, dq_da, dqda_clipping)\n return jnp.mean(loss)\n\n def critic_loss(\n critic_params: networks_lib.Params,\n state: TrainingState,\n transition: types.Transition,\n ):\n # Computes the distributional critic loss.\n q_tm1, atoms_tm1 = critic_network.apply(critic_params,\n transition.observation,\n transition.action)\n a = policy_network.apply(state.target_policy_params,\n transition.next_observation)\n q_t, atoms_t = critic_network.apply(state.target_critic_params,\n transition.next_observation, a)\n batch_td_learning = jax.vmap(\n rlax.categorical_td_learning, in_axes=(None, 0, 0, 0, None, 0))\n loss = batch_td_learning(atoms_tm1, q_tm1, transition.reward,\n discount * transition.discount, atoms_t, q_t)\n return jnp.mean(loss)\n\n def sgd_step(\n state: TrainingState,\n transitions: types.Transition,\n ) -> Tuple[TrainingState, Dict[str, jnp.ndarray]]:\n\n # TODO(jaslanides): Use a shared forward pass for efficiency.\n policy_loss_and_grad = jax.value_and_grad(policy_loss)\n critic_loss_and_grad = jax.value_and_grad(critic_loss)\n\n # Compute losses and their gradients.\n policy_loss_value, policy_gradients = policy_loss_and_grad(\n state.policy_params, state.critic_params,\n transitions.next_observation)\n critic_loss_value, critic_gradients = critic_loss_and_grad(\n state.critic_params, state, transitions)\n\n # Average over all devices.\n policy_loss_value, policy_gradients = jax.lax.pmean(\n (policy_loss_value, policy_gradients), _PMAP_AXIS_NAME)\n critic_loss_value, critic_gradients = jax.lax.pmean(\n (critic_loss_value, critic_gradients), _PMAP_AXIS_NAME)\n\n # Get optimizer updates and state.\n policy_updates, policy_opt_state = policy_optimizer.update( # pytype: disable=attribute-error\n policy_gradients, state.policy_opt_state)\n critic_updates, critic_opt_state = critic_optimizer.update( # pytype: disable=attribute-error\n critic_gradients, state.critic_opt_state)\n\n # Apply optimizer updates to parameters.\n policy_params = optax.apply_updates(state.policy_params, policy_updates)\n critic_params = optax.apply_updates(state.critic_params, critic_updates)\n\n steps = state.steps + 1\n\n # Periodically update target networks.\n target_policy_params, target_critic_params = optax.periodic_update( # pytype: disable=wrong-arg-types # numpy-scalars\n (policy_params, critic_params),\n (state.target_policy_params, state.target_critic_params), steps,\n self._target_update_period)\n\n new_state = TrainingState(\n policy_params=policy_params,\n critic_params=critic_params,\n target_policy_params=target_policy_params,\n target_critic_params=target_critic_params,\n policy_opt_state=policy_opt_state,\n critic_opt_state=critic_opt_state,\n steps=steps,\n )\n\n metrics = {\n 'policy_loss': policy_loss_value,\n 'critic_loss': critic_loss_value,\n }\n\n return new_state, metrics\n\n # General learner book-keeping and loggers.\n self._counter = counter or counting.Counter()\n self._logger = logger or loggers.make_default_logger(\n 'learner',\n asynchronous=True,\n serialize_fn=utils.fetch_devicearray,\n steps_key=self._counter.get_steps_key())\n\n # Necessary to track when to update target networks.\n self._target_update_period = target_update_period\n\n # Create prefetching dataset iterator.\n self._iterator = iterator\n\n # Maybe use the JIT compiler.\n sgd_step = utils.process_multiple_batches(sgd_step, num_sgd_steps_per_step)\n self._sgd_step = (\n jax.pmap(sgd_step, _PMAP_AXIS_NAME, devices=jax.devices())\n if jit else sgd_step)\n\n # Create the network parameters and copy into the target network parameters.\n key_policy, key_critic = jax.random.split(random_key)\n initial_policy_params = policy_network.init(key_policy)\n initial_critic_params = critic_network.init(key_critic)\n initial_target_policy_params = initial_policy_params\n initial_target_critic_params = initial_critic_params\n\n # Create optimizers if they aren't given.\n critic_optimizer = critic_optimizer or optax.adam(1e-4)\n policy_optimizer = policy_optimizer or optax.adam(1e-4)\n\n # Initialize optimizers.\n initial_policy_opt_state = policy_optimizer.init(initial_policy_params) # pytype: disable=attribute-error\n initial_critic_opt_state = critic_optimizer.init(initial_critic_params) # pytype: disable=attribute-error\n\n # Create the initial state and replicate it in all devices.\n self._state = utils.replicate_in_all_devices(\n TrainingState(\n policy_params=initial_policy_params,\n target_policy_params=initial_target_policy_params,\n critic_params=initial_critic_params,\n target_critic_params=initial_target_critic_params,\n policy_opt_state=initial_policy_opt_state,\n critic_opt_state=initial_critic_opt_state,\n steps=0,\n ))\n\n # Do not record timestamps until after the first learning step is done.\n # This is to avoid including the time it takes for actors to come online and\n # fill the replay buffer.\n self._timestamp = None\n\n def step(self):\n # Sample from replay and pack the data in a Transition.\n sample = next(self._iterator)\n transitions = types.Transition(*sample.data)\n\n self._state, metrics = self._sgd_step(self._state, transitions)\n\n # Take the metrics from the first device, since they've been pmeaned over\n # all devices and are therefore identical.\n metrics = utils.get_from_first_device(metrics)\n\n # Compute elapsed time.\n timestamp = time.time()\n elapsed_time = timestamp - self._timestamp if self._timestamp else 0\n self._timestamp = timestamp\n\n # Increment counts and record the current time\n counts = self._counter.increment(steps=1, walltime=elapsed_time)\n\n # Attempts to write the logs.\n self._logger.write({**metrics, **counts})\n\n def get_variables(self, names: List[str]) -> List[networks_lib.Params]:\n variables = {\n 'policy': self._state.target_policy_params,\n 'critic': self._state.target_critic_params,\n }\n return utils.get_from_first_device([variables[name] for name in names])\n\n def save(self) -> TrainingState:\n return utils.get_from_first_device(self._state)\n\n def restore(self, state: TrainingState):\n self._state = utils.replicate_in_all_devices(state)\n" }, { "alpha_fraction": 0.6540287137031555, "alphanum_fraction": 0.6596134305000305, "avg_line_length": 42.147254943847656, "blob_id": "bc0935c34cb78804698eea2a20c1bff1df78ca22", "content_id": "681b0e63f005442a18210e1fdef23e1e8f62a96b", "detected_licenses": [ "Apache-2.0" ], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 32231, "license_type": "permissive", "max_line_length": 157, "num_lines": 747, "path": "/acme/agents/jax/mpo/learning.py", "repo_name": "dada325/acme", "src_encoding": "UTF-8", "text": "# Copyright 2018 DeepMind Technologies Limited. All rights reserved.\n#\n# Licensed under the Apache License, Version 2.0 (the \"License\");\n# you may not use this file except in compliance with the License.\n# You may obtain a copy of the License at\n#\n# http://www.apache.org/licenses/LICENSE-2.0\n#\n# Unless required by applicable law or agreed to in writing, software\n# distributed under the License is distributed on an \"AS IS\" BASIS,\n# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n# See the License for the specific language governing permissions and\n# limitations under the License.\n\n\"\"\"MPO learner implementation. With MoG/not and continuous/discrete policies.\"\"\"\n\nimport dataclasses\nimport functools\nimport time\nfrom typing import Any, Dict, Iterator, List, NamedTuple, Optional, Sequence, Tuple, Union\n\nfrom absl import logging\nimport acme\nfrom acme import specs\nfrom acme import types\nfrom acme.adders import reverb as adders\nfrom acme.agents.jax.mpo import categorical_mpo as discrete_losses\nfrom acme.agents.jax.mpo import networks as mpo_networks\nfrom acme.agents.jax.mpo import rollout_loss\nfrom acme.agents.jax.mpo import types as mpo_types\nfrom acme.agents.jax.mpo import utils as mpo_utils\nfrom acme.jax import networks as network_lib\nfrom acme.jax import types as jax_types\nfrom acme.jax import utils\nimport acme.jax.losses.mpo as continuous_losses\nfrom acme.utils import counting\nfrom acme.utils import loggers\nimport chex\nimport jax\nimport jax.numpy as jnp\nimport numpy as np\nimport optax\nimport reverb\nimport rlax\nimport tree\n\n_PMAP_AXIS_NAME = 'data'\nCriticType = mpo_types.CriticType\n\n\nclass TrainingState(NamedTuple):\n \"\"\"Contains training state for the learner.\"\"\"\n params: mpo_networks.MPONetworkParams\n target_params: mpo_networks.MPONetworkParams\n dual_params: mpo_types.DualParams\n opt_state: optax.OptState\n dual_opt_state: optax.OptState\n steps: int\n random_key: jax_types.PRNGKey\n\n\ndef softmax_cross_entropy(\n logits: chex.Array, target_probs: chex.Array) -> chex.Array:\n \"\"\"Compute cross entropy loss between logits and target probabilities.\"\"\"\n chex.assert_equal_shape([target_probs, logits])\n return -jnp.sum(target_probs * jax.nn.log_softmax(logits), axis=-1)\n\n\ndef top1_accuracy_tiebreak(logits: chex.Array,\n targets: chex.Array,\n *,\n rng: jax_types.PRNGKey,\n eps: float = 1e-6) -> chex.Array:\n \"\"\"Compute the top-1 accuracy with an argmax of targets (random tie-break).\"\"\"\n noise = jax.random.uniform(rng, shape=targets.shape,\n minval=-eps, maxval=eps)\n acc = jnp.argmax(logits, axis=-1) == jnp.argmax(targets + noise, axis=-1)\n return jnp.mean(acc)\n\n\nclass MPOLearner(acme.Learner):\n \"\"\"MPO learner (discrete or continuous, distributional or not).\"\"\"\n\n _state: TrainingState\n\n def __init__( # pytype: disable=annotation-type-mismatch # numpy-scalars\n self,\n critic_type: CriticType,\n discrete_policy: bool,\n environment_spec: specs.EnvironmentSpec,\n networks: mpo_networks.MPONetworks,\n random_key: jax_types.PRNGKey,\n discount: float,\n num_samples: int,\n iterator: Iterator[reverb.ReplaySample],\n experience_type: mpo_types.ExperienceType,\n loss_scales: mpo_types.LossScalesConfig,\n target_update_period: Optional[int] = 100,\n target_update_rate: Optional[float] = None,\n sgd_steps_per_learner_step: int = 20,\n policy_eval_stochastic: bool = True,\n policy_eval_num_val_samples: int = 128,\n policy_loss_config: Optional[mpo_types.PolicyLossConfig] = None,\n use_online_policy_to_bootstrap: bool = False,\n use_stale_state: bool = False,\n use_retrace: bool = False,\n retrace_lambda: float = 0.95,\n model_rollout_length: int = 0,\n optimizer: Optional[optax.GradientTransformation] = None,\n learning_rate: Optional[Union[float, optax.Schedule]] = None,\n dual_optimizer: Optional[optax.GradientTransformation] = None,\n grad_norm_clip: float = 40.0,\n reward_clip: float = np.float32('inf'),\n value_tx_pair: rlax.TxPair = rlax.IDENTITY_PAIR,\n counter: Optional[counting.Counter] = None,\n logger: Optional[loggers.Logger] = None,\n devices: Optional[Sequence[jax.Device]] = None,\n ):\n self._critic_type = critic_type\n self._discrete_policy = discrete_policy\n\n process_id = jax.process_index()\n local_devices = jax.local_devices()\n self._devices = devices or local_devices\n logging.info('Learner process id: %s. Devices passed: %s', process_id,\n devices)\n logging.info('Learner process id: %s. Local devices from JAX API: %s',\n process_id, local_devices)\n self._local_devices = [d for d in self._devices if d in local_devices]\n\n # Store networks.\n self._networks = networks\n\n # General learner book-keeping and loggers.\n self._counter = counter or counting.Counter()\n self._logger = logger\n\n # Other learner parameters.\n self._discount = discount\n self._num_samples = num_samples\n self._sgd_steps_per_learner_step = sgd_steps_per_learner_step\n\n self._policy_eval_stochastic = policy_eval_stochastic\n self._policy_eval_num_val_samples = policy_eval_num_val_samples\n\n self._reward_clip_range = sorted([-reward_clip, reward_clip])\n self._tx_pair = value_tx_pair\n self._loss_scales = loss_scales\n self._use_online_policy_to_bootstrap = use_online_policy_to_bootstrap\n self._model_rollout_length = model_rollout_length\n\n self._use_retrace = use_retrace\n self._retrace_lambda = retrace_lambda\n if use_retrace and critic_type == CriticType.MIXTURE_OF_GAUSSIANS:\n logging.warning(\n 'Warning! Retrace has not been tested with the MoG critic.')\n self._use_stale_state = use_stale_state\n\n self._experience_type = experience_type\n if isinstance(self._experience_type, mpo_types.FromTransitions):\n # Each n=5-step transition will be converted to a length 2 sequence before\n # being passed to the loss, so we do n=1 step bootstrapping on the\n # resulting sequence to get n=5-step bootstrapping as intended.\n self._n_step_for_sequence_bootstrap = 1\n self._td_lambda = 1.0\n elif isinstance(self._experience_type, mpo_types.FromSequences):\n self._n_step_for_sequence_bootstrap = self._experience_type.n_step\n self._td_lambda = self._experience_type.td_lambda\n\n # Necessary to track when to update target networks.\n self._target_update_period = target_update_period\n self._target_update_rate = target_update_rate\n # Assert one and only one of target update period or rate is defined.\n if ((target_update_period and target_update_rate) or\n (target_update_period is None and target_update_rate is None)):\n raise ValueError(\n 'Exactly one of target_update_{period|rate} must be set.'\n f' Received target_update_period={target_update_period} and'\n f' target_update_rate={target_update_rate}.')\n\n # Create policy loss.\n if self._discrete_policy:\n policy_loss_config = (\n policy_loss_config or mpo_types.CategoricalPolicyLossConfig())\n self._policy_loss_module = discrete_losses.CategoricalMPO(\n **dataclasses.asdict(policy_loss_config))\n else:\n policy_loss_config = (\n policy_loss_config or mpo_types.GaussianPolicyLossConfig())\n self._policy_loss_module = continuous_losses.MPO(\n **dataclasses.asdict(policy_loss_config))\n\n self._policy_loss_module.__call__ = jax.named_call(\n self._policy_loss_module.__call__, name='policy_loss')\n\n # Create the dynamics model rollout loss.\n if model_rollout_length > 0:\n if not discrete_policy and (self._loss_scales.rollout.policy or\n self._loss_scales.rollout.bc_policy):\n raise ValueError('Policy rollout losses are only supported in the '\n 'discrete policy case.')\n self._model_rollout_loss_fn = rollout_loss.RolloutLoss(\n dynamics_model=networks.dynamics_model,\n model_rollout_length=model_rollout_length,\n loss_scales=loss_scales,\n distributional_loss_fn=self._distributional_loss)\n\n # Create optimizers if they aren't given.\n self._optimizer = optimizer or _get_default_optimizer(1e-4, grad_norm_clip)\n self._dual_optimizer = dual_optimizer or _get_default_optimizer(\n 1e-2, grad_norm_clip)\n self._lr_schedule = learning_rate if callable(learning_rate) else None\n\n self._action_spec = environment_spec.actions\n\n # Initialize random key for the rest of training.\n random_key, key = jax.random.split(random_key)\n\n # Initialize network parameters, ignoring the dummy initial state.\n network_params, _ = mpo_networks.init_params(\n self._networks,\n environment_spec,\n key,\n add_batch_dim=True,\n dynamics_rollout_length=self._model_rollout_length)\n\n # Get action dims (unused in the discrete case).\n dummy_action = utils.zeros_like(environment_spec.actions)\n dummy_action_concat = utils.batch_concat(dummy_action, num_batch_dims=0)\n\n if isinstance(self._policy_loss_module, discrete_losses.CategoricalMPO):\n self._dual_clip_fn = discrete_losses.clip_categorical_mpo_params\n elif isinstance(self._policy_loss_module, continuous_losses.MPO):\n is_constraining = self._policy_loss_module.per_dim_constraining\n self._dual_clip_fn = lambda dp: continuous_losses.clip_mpo_params( # pylint: disable=g-long-lambda # pytype: disable=wrong-arg-types # numpy-scalars\n dp,\n per_dim_constraining=is_constraining)\n\n # Create dual parameters. In the discrete case, the action dim is unused.\n dual_params = self._policy_loss_module.init_params(\n action_dim=dummy_action_concat.shape[-1], dtype=jnp.float32)\n\n # Initialize optimizers.\n opt_state = self._optimizer.init(network_params)\n dual_opt_state = self._dual_optimizer.init(dual_params)\n\n # Initialise training state (parameters and optimiser state).\n state = TrainingState(\n params=network_params,\n target_params=network_params,\n dual_params=dual_params,\n opt_state=opt_state,\n dual_opt_state=dual_opt_state,\n steps=0,\n random_key=random_key,\n )\n self._state = utils.replicate_in_all_devices(state, self._local_devices)\n\n # Log how many parameters the network has.\n sizes = tree.map_structure(jnp.size, network_params)._asdict()\n num_params_by_component_str = ' | '.join(\n [f'{key}: {sum(tree.flatten(size))}' for key, size in sizes.items()])\n logging.info('Number of params by network component: %s',\n num_params_by_component_str)\n logging.info('Total number of params: %d',\n sum(tree.flatten(sizes.values())))\n\n # Combine multiple SGD steps and pmap across devices.\n sgd_steps = utils.process_multiple_batches(self._sgd_step,\n self._sgd_steps_per_learner_step)\n self._sgd_steps = jax.pmap(\n sgd_steps, axis_name=_PMAP_AXIS_NAME, devices=self._devices)\n\n self._iterator = iterator\n\n # Do not record timestamps until after the first learning step is done.\n # This is to avoid including the time it takes for actors to come online and\n # fill the replay buffer.\n self._timestamp = None\n self._current_step = 0\n\n def _distributional_loss(self, prediction: mpo_types.DistributionLike,\n target: chex.Array):\n \"\"\"Compute the critic loss given the prediction and target.\"\"\"\n # TODO(abef): break this function into separate functions for each critic.\n chex.assert_rank(target, 3) # [N, Z, T] except for Categorical is [1, T, L]\n if self._critic_type == CriticType.MIXTURE_OF_GAUSSIANS:\n # Sample-based cross-entropy loss.\n loss = -prediction.log_prob(target[..., jnp.newaxis])\n loss = jnp.mean(loss, axis=[0, 1]) # [T]\n elif self._critic_type == CriticType.NONDISTRIBUTIONAL:\n # TD error.\n prediction = prediction.squeeze(axis=-1) # [T]\n loss = 0.5 * jnp.square(target - prediction)\n chex.assert_equal_shape([target, loss]) # Check broadcasting.\n elif self._critic_type == mpo_types.CriticType.CATEGORICAL_2HOT:\n # Cross-entropy loss (two-hot categorical).\n target = jnp.mean(target, axis=(0, 1)) # [N, Z, T] -> [T]\n # TODO(abef): Compute target differently? (e.g., do mean cross ent.).\n target_probs = rlax.transform_to_2hot( # [T, L]\n target,\n min_value=prediction.values.min(),\n max_value=prediction.values.max(),\n num_bins=prediction.logits.shape[-1])\n logits = jnp.squeeze(prediction.logits, axis=1) # [T, L]\n chex.assert_equal_shape([target_probs, logits])\n loss = jax.vmap(rlax.categorical_cross_entropy)(target_probs, logits)\n elif self._critic_type == mpo_types.CriticType.CATEGORICAL:\n loss = jax.vmap(rlax.categorical_cross_entropy)(jnp.squeeze(\n target, axis=0), jnp.squeeze(prediction.logits, axis=1))\n return jnp.mean(loss) # [T] -> []\n\n def _compute_predictions(self, params: mpo_networks.MPONetworkParams,\n sequence: adders.Step) -> mpo_types.ModelOutputs:\n \"\"\"Compute model predictions at observed and rolled out states.\"\"\"\n\n # Initialize the core states, possibly to the recorded stale state.\n if self._use_stale_state:\n initial_state = utils.maybe_recover_lstm_type(\n sequence.extras['core_state'])\n initial_state = tree.map_structure(lambda x: x[0], initial_state)\n else:\n initial_state = self._networks.torso.initial_state_fn(\n params.torso_initial_state, None)\n\n # Unroll the online core network. Note that this may pass the embeddings\n # unchanged if, say, the core is an hk.IdentityCore.\n state_embedding, _ = self._networks.torso_unroll( # [T, ...]\n params, sequence.observation, initial_state)\n\n # Compute the root policy and critic outputs; [T, ...] and [T-1, ...].\n policy = self._networks.policy_head_apply(params, state_embedding)\n q_value = self._networks.critic_head_apply(\n params, state_embedding[:-1], sequence.action[:-1])\n\n return mpo_types.ModelOutputs(\n policy=policy, # [T, ...]\n q_value=q_value, # [T-1, ...]\n reward=None,\n embedding=state_embedding) # [T, ...]\n\n def _compute_targets(\n self,\n target_params: mpo_networks.MPONetworkParams,\n dual_params: mpo_types.DualParams,\n sequence: adders.Step,\n online_policy: types.NestedArray, # TODO(abef): remove this.\n key: jax_types.PRNGKey) -> mpo_types.LossTargets:\n \"\"\"Compute the targets needed to train the agent.\"\"\"\n\n # Initialize the core states, possibly to the recorded stale state.\n if self._use_stale_state:\n initial_state = utils.maybe_recover_lstm_type(\n sequence.extras['core_state'])\n initial_state = tree.map_structure(lambda x: x[0], initial_state)\n else:\n initial_state = self._networks.torso.initial_state_fn(\n target_params.torso_initial_state, None)\n\n # Unroll the target core network. Note that this may pass the embeddings\n # unchanged if, say, the core is an hk.IdentityCore.\n target_state_embedding, _ = self._networks.torso_unroll(\n target_params, sequence.observation, initial_state) # [T, ...]\n\n # Compute the action distribution from target policy network.\n target_policy = self._networks.policy_head_apply(\n target_params, target_state_embedding) # [T, ...]\n\n # Maybe reward clip.\n clipped_reward = jnp.clip(sequence.reward, *self._reward_clip_range) # [T]\n # TODO(abef): when to clip rewards, if at all, if learning dynamics model?\n\n @jax.named_call\n @jax.vmap\n def critic_mean_fn(action_: jnp.ndarray) -> jnp.ndarray:\n \"\"\"Compute mean of target critic distribution.\"\"\"\n critic_output = self._networks.critic_head_apply(\n target_params, target_state_embedding, action_)\n if self._critic_type != CriticType.NONDISTRIBUTIONAL:\n critic_output = critic_output.mean()\n return critic_output\n\n @jax.named_call\n @jax.vmap\n def critic_sample_fn(action_: jnp.ndarray,\n seed_: jnp.ndarray) -> jnp.ndarray:\n \"\"\"Sample from the target critic distribution.\"\"\"\n z_distribution = self._networks.critic_head_apply(\n target_params, target_state_embedding, action_)\n z_samples = z_distribution.sample(\n self._policy_eval_num_val_samples, seed=seed_)\n return z_samples # [Z, T, 1]\n\n if self._discrete_policy:\n # Use all actions to improve policy (no sampling); N = num_actions.\n a_improvement = jnp.arange(self._action_spec.num_values) # [N]\n seq_len = target_state_embedding.shape[0] # T\n a_improvement = jnp.tile(a_improvement[..., None], [1, seq_len]) # [N, T]\n else:\n # Sample actions to improve policy; [N=num_samples, T].\n a_improvement = target_policy.sample(self._num_samples, seed=key)\n\n # TODO(abef): use model to get q_improvement = r + gamma*V?\n\n # Compute the mean Q-values used in policy improvement; [N, T].\n q_improvement = critic_mean_fn(a_improvement).squeeze(axis=-1)\n\n # Policy to use for policy evaluation and bootstrapping.\n if self._use_online_policy_to_bootstrap:\n policy_to_evaluate = online_policy\n chex.assert_equal(online_policy.batch_shape, target_policy.batch_shape)\n else:\n policy_to_evaluate = target_policy\n\n # Action(s) to use for policy evaluation; shape [N, T].\n if self._policy_eval_stochastic:\n a_evaluation = policy_to_evaluate.sample(self._num_samples, seed=key)\n else:\n a_evaluation = policy_to_evaluate.mode()\n a_evaluation = jnp.expand_dims(a_evaluation, axis=0) # [N=1, T]\n\n # TODO(abef): policy_eval_stochastic=False makes our targets more \"greedy\"\n\n # Add a stopgrad in case we use the online policy for evaluation.\n a_evaluation = jax.lax.stop_gradient(a_evaluation)\n\n if self._critic_type == CriticType.MIXTURE_OF_GAUSSIANS:\n # Produce Z return samples for every N action sample; [N, Z, T, 1].\n seeds = jax.random.split(key, num=a_evaluation.shape[0])\n z_samples = critic_sample_fn(a_evaluation, seeds)\n else:\n normalized_weights = 1. / a_evaluation.shape[0]\n z_samples = critic_mean_fn(a_evaluation) # [N, T, 1]\n\n # When policy_eval_stochastic == True, this corresponds to expected SARSA.\n # Otherwise, normalized_weights = 1.0 and N = 1 so the sum is a no-op.\n z_samples = jnp.sum(normalized_weights * z_samples, axis=0, keepdims=True)\n z_samples = jnp.expand_dims(z_samples, axis=1) # [N, Z=1, T, 1]\n\n # Slice to t = 1...T and transform into raw reward space; [N, Z, T].\n z_samples_itx = self._tx_pair.apply_inv(z_samples.squeeze(axis=-1))\n\n # Compute the value estimate by averaging the sampled returns in the raw\n # reward space; shape [N=1, Z=1, T].\n value_target_itx = jnp.mean(z_samples_itx, axis=(0, 1), keepdims=True)\n\n if self._use_retrace:\n # Warning! Retrace has not been tested with the MoG critic.\n log_rhos = (\n target_policy.log_prob(sequence.action) - sequence.extras['log_prob'])\n\n # Compute Q-values; expand and squeeze because critic_mean_fn is vmapped.\n q_t = critic_mean_fn(jnp.expand_dims(sequence.action, axis=0)).squeeze(0)\n q_t = q_t.squeeze(-1) # Also squeeze trailing scalar dimension; [T].\n\n # Compute retrace targets.\n # These targets use the rewards and discounts as in normal TD-learning but\n # they use a mix of bootstrapped values V(s') and Q(s', a'), weighing the\n # latter based on how likely a' is under the current policy (s' and a' are\n # samples from replay).\n # See [Munos et al., 2016](https://arxiv.org/abs/1606.02647) for more.\n q_value_target_itx = rlax.general_off_policy_returns_from_q_and_v(\n q_t=self._tx_pair.apply_inv(q_t[1:-1]),\n v_t=jnp.squeeze(value_target_itx, axis=(0, 1))[1:],\n r_t=clipped_reward[:-1],\n discount_t=self._discount * sequence.discount[:-1],\n c_t=self._retrace_lambda * jnp.minimum(1.0, jnp.exp(log_rhos[1:-1])))\n\n # Expand dims to the expected [N=1, Z=1, T-1].\n q_value_target_itx = jnp.expand_dims(q_value_target_itx, axis=(0, 1))\n else:\n # Compute bootstrap target from sequences. vmap return computation across\n # N action and Z return samples; shape [N, Z, T-1].\n n_step_return_fn = functools.partial(\n rlax.n_step_bootstrapped_returns,\n r_t=clipped_reward[:-1],\n discount_t=self._discount * sequence.discount[:-1],\n n=self._n_step_for_sequence_bootstrap,\n lambda_t=self._td_lambda)\n n_step_return_vfn = jax.vmap(jax.vmap(n_step_return_fn))\n q_value_target_itx = n_step_return_vfn(v_t=z_samples_itx[..., 1:])\n\n # Transform back to the canonical space and stop gradients.\n q_value_target = jax.lax.stop_gradient(\n self._tx_pair.apply(q_value_target_itx))\n reward_target = jax.lax.stop_gradient(self._tx_pair.apply(clipped_reward))\n value_target = jax.lax.stop_gradient(self._tx_pair.apply(value_target_itx))\n\n if self._critic_type == mpo_types.CriticType.CATEGORICAL:\n\n @jax.vmap\n def get_logits_and_values(\n action: jnp.ndarray) -> Tuple[jnp.ndarray, jnp.ndarray]:\n critic_output = self._networks.critic_head_apply(\n target_params, target_state_embedding[1:], action)\n return critic_output.logits, critic_output.values\n\n z_t_logits, z_t_values = get_logits_and_values(a_evaluation[:, 1:])\n z_t_logits = jnp.squeeze(z_t_logits, axis=2) # [N, T-1, L]\n z_t_values = z_t_values[0] # Values are identical at each N; [L].\n\n gamma = self._discount * sequence.discount[:-1, None] # [T-1, 1]\n r_t = clipped_reward[:-1, None] # [T-1, 1]\n atoms_itx = self._tx_pair.apply_inv(z_t_values)[None, ...] # [1, L]\n z_target_atoms = self._tx_pair.apply(r_t + gamma * atoms_itx) # [T-1, L]\n # Note: this is n=1-step TD unless using experience=FromTransitions(n>1).\n z_target_probs = jax.nn.softmax(z_t_logits) # [N, T-1, L]\n z_target_atoms = jax.lax.broadcast(\n z_target_atoms, z_target_probs.shape[:1]) # [N, T-1, L]\n project_fn = functools.partial(\n rlax.categorical_l2_project, z_q=z_t_values)\n z_target = jax.vmap(jax.vmap(project_fn))(z_target_atoms, z_target_probs)\n z_target = jnp.mean(z_target, axis=0)\n q_value_target = jax.lax.stop_gradient(z_target[None, ...]) # [1, T-1, L]\n # TODO(abef): make q_v_target shape align with expected [N, Z, T-1] shape?\n\n targets = mpo_types.LossTargets(\n policy=target_policy, # [T, ...]\n a_improvement=a_improvement, # [N, T]\n q_improvement=q_improvement, # [N, T]\n q_value=q_value_target, # [N, Z, T-1] ([1, T-1, L] for CATEGORICAL)\n value=value_target[..., :-1], # [N=1, Z=1, T-1]\n reward=reward_target, # [T]\n embedding=target_state_embedding) # [T, ...]\n\n return targets\n\n def _loss_fn(\n self,\n params: mpo_networks.MPONetworkParams,\n dual_params: mpo_types.DualParams,\n # TODO(bshahr): clean up types: Step is not a great type for sequences.\n sequence: adders.Step,\n target_params: mpo_networks.MPONetworkParams,\n key: jax_types.PRNGKey) -> Tuple[jnp.ndarray, mpo_types.LogDict]:\n # Compute the model predictions at the root and for the rollouts.\n predictions = self._compute_predictions(params=params, sequence=sequence)\n\n # Compute the targets to use for the losses.\n targets = self._compute_targets(\n target_params=target_params,\n dual_params=dual_params,\n sequence=sequence,\n online_policy=predictions.policy,\n key=key)\n\n # TODO(abef): mask policy loss at terminal states or use uniform targets\n # is_terminal = sequence.discount == 0.\n\n # Compute MPO policy loss on each state in the sequence.\n policy_loss, policy_stats = self._policy_loss_module(\n params=dual_params,\n online_action_distribution=predictions.policy, # [T, ...].\n target_action_distribution=targets.policy, # [T, ...].\n actions=targets.a_improvement, # Unused in discrete case; [N, T].\n q_values=targets.q_improvement) # [N, T]\n\n # Compute the critic loss on the states in the sequence.\n critic_loss = self._distributional_loss(\n prediction=predictions.q_value, # [T-1, 1, ...]\n target=targets.q_value) # [N, Z, T-1]\n\n loss = (self._loss_scales.policy * policy_loss +\n self._loss_scales.critic * critic_loss)\n loss_logging_dict = {\n 'loss': loss,\n 'root_policy_loss': policy_loss,\n 'root_critic_loss': critic_loss,\n 'policy_loss': policy_loss,\n 'critic_loss': critic_loss,\n }\n\n # Append MPO statistics.\n loss_logging_dict.update(\n {f'policy/root/{k}': v for k, v in policy_stats._asdict().items()})\n\n # Compute rollout losses.\n if self._model_rollout_length > 0:\n model_rollout_loss, rollout_logs = self._model_rollout_loss_fn(\n params, dual_params, sequence, predictions.embedding, targets, key)\n loss += model_rollout_loss\n loss_logging_dict.update(rollout_logs)\n loss_logging_dict.update({\n 'policy_loss': policy_loss + rollout_logs['rollout_policy_loss'],\n 'critic_loss': critic_loss + rollout_logs['rollout_critic_loss'],\n 'loss': loss})\n\n return loss, loss_logging_dict\n\n def _sgd_step(\n self,\n state: TrainingState,\n transitions: Union[types.Transition, adders.Step],\n ) -> Tuple[TrainingState, Dict[str, Any]]:\n \"\"\"Perform one parameter update step.\"\"\"\n\n if isinstance(transitions, types.Transition):\n sequences = mpo_utils.make_sequences_from_transitions(transitions)\n if self._model_rollout_length > 0:\n raise ValueError('model rollouts not yet supported from transitions')\n else:\n sequences = transitions\n\n # Get next random_key and `batch_size` keys.\n batch_size = sequences.reward.shape[0]\n keys = jax.random.split(state.random_key, num=batch_size+1)\n random_key, keys = keys[0], keys[1:]\n\n # Vmap over the batch dimension when learning from sequences.\n loss_vfn = jax.vmap(self._loss_fn, in_axes=(None, None, 0, None, 0))\n safe_mean = lambda x: jnp.mean(x) if x is not None else x\n # TODO(bshahr): Consider cleaning this up via acme.tree_utils.tree_map.\n loss_fn = lambda *a, **k: tree.map_structure(safe_mean, loss_vfn(*a, **k))\n\n loss_and_grad = jax.value_and_grad(loss_fn, argnums=(0, 1), has_aux=True)\n\n # Compute the loss and gradient.\n (_, loss_log_dict), all_gradients = loss_and_grad(\n state.params, state.dual_params, sequences, state.target_params, keys)\n\n # Average gradients across replicas.\n gradients, dual_gradients = jax.lax.pmean(all_gradients, _PMAP_AXIS_NAME)\n\n # Compute gradient norms before clipping.\n gradients_norm = optax.global_norm(gradients)\n dual_gradients_norm = optax.global_norm(dual_gradients)\n\n # Get optimizer updates and state.\n updates, opt_state = self._optimizer.update(\n gradients, state.opt_state, state.params)\n dual_updates, dual_opt_state = self._dual_optimizer.update(\n dual_gradients, state.dual_opt_state, state.dual_params)\n\n # Apply optimizer updates to parameters.\n params = optax.apply_updates(state.params, updates)\n dual_params = optax.apply_updates(state.dual_params, dual_updates)\n\n # Clip dual params at some minimum value.\n dual_params = self._dual_clip_fn(dual_params)\n\n steps = state.steps + 1\n\n # Periodically update target networks.\n if self._target_update_period:\n target_params = optax.periodic_update(params, state.target_params, steps, # pytype: disable=wrong-arg-types # numpy-scalars\n self._target_update_period)\n elif self._target_update_rate:\n target_params = optax.incremental_update(params, state.target_params,\n self._target_update_rate)\n\n new_state = TrainingState( # pytype: disable=wrong-arg-types # numpy-scalars\n params=params,\n target_params=target_params,\n dual_params=dual_params,\n opt_state=opt_state,\n dual_opt_state=dual_opt_state,\n steps=steps,\n random_key=random_key,\n )\n\n # Log the metrics from this learner step.\n metrics = {f'loss/{k}': v for k, v in loss_log_dict.items()}\n\n metrics.update({\n 'opt/grad_norm': gradients_norm,\n 'opt/param_norm': optax.global_norm(params)})\n if callable(self._lr_schedule):\n metrics['opt/learning_rate'] = self._lr_schedule(state.steps) # pylint: disable=not-callable\n\n dual_metrics = {\n 'opt/dual_grad_norm': dual_gradients_norm,\n 'opt/dual_param_norm': optax.global_norm(dual_params),\n 'params/dual/log_temperature_avg': dual_params.log_temperature}\n if isinstance(dual_params, continuous_losses.MPOParams):\n dual_metrics.update({\n 'params/dual/log_alpha_mean_avg': dual_params.log_alpha_mean,\n 'params/dual/log_alpha_stddev_avg': dual_params.log_alpha_stddev})\n if dual_params.log_penalty_temperature is not None:\n dual_metrics['params/dual/log_penalty_temp_mean'] = (\n dual_params.log_penalty_temperature)\n elif isinstance(dual_params, discrete_losses.CategoricalMPOParams):\n dual_metrics['params/dual/log_alpha_avg'] = dual_params.log_alpha\n metrics.update(jax.tree_map(jnp.mean, dual_metrics))\n\n return new_state, metrics\n\n def step(self):\n \"\"\"Perform one learner step, which in general does multiple SGD steps.\"\"\"\n with jax.profiler.StepTraceAnnotation('step', step_num=self._current_step):\n # Get data from replay (dropping extras if any). Note there is no\n # extra data here because we do not insert any into Reverb.\n sample = next(self._iterator)\n if isinstance(self._experience_type, mpo_types.FromTransitions):\n minibatch = types.Transition(*sample.data)\n elif isinstance(self._experience_type, mpo_types.FromSequences):\n minibatch = adders.Step(*sample.data)\n\n self._state, metrics = self._sgd_steps(self._state, minibatch)\n self._current_step, metrics = mpo_utils.get_from_first_device(\n (self._state.steps, metrics))\n\n # Compute elapsed time.\n timestamp = time.time()\n elapsed_time = timestamp - self._timestamp if self._timestamp else 0\n self._timestamp = timestamp\n\n # Increment counts and record the current time\n counts = self._counter.increment(\n steps=self._sgd_steps_per_learner_step, walltime=elapsed_time)\n\n if elapsed_time > 0:\n metrics['steps_per_second'] = (\n self._sgd_steps_per_learner_step / elapsed_time)\n else:\n metrics['steps_per_second'] = 0.\n\n # Attempts to write the logs.\n if self._logger:\n self._logger.write({**metrics, **counts})\n\n def get_variables(self, names: List[str]) -> network_lib.Params:\n params = mpo_utils.get_from_first_device(self._state.target_params)\n\n variables = {\n 'policy_head': params.policy_head,\n 'critic_head': params.critic_head,\n 'torso': params.torso,\n 'network': params,\n 'policy': params._replace(critic_head={}),\n 'critic': params._replace(policy_head={}),\n }\n return [variables[name] for name in names]\n\n def save(self) -> TrainingState:\n return jax.tree_map(mpo_utils.get_from_first_device, self._state)\n\n def restore(self, state: TrainingState):\n self._state = utils.replicate_in_all_devices(state, self._local_devices)\n\n\ndef _get_default_optimizer(\n learning_rate: float,\n max_grad_norm: Optional[float] = None) -> optax.GradientTransformation:\n optimizer = optax.adam(learning_rate)\n if max_grad_norm and max_grad_norm > 0:\n optimizer = optax.chain(optax.clip_by_global_norm(max_grad_norm), optimizer)\n return optimizer\n" }, { "alpha_fraction": 0.6586897373199463, "alphanum_fraction": 0.662932813167572, "avg_line_length": 38.81081008911133, "blob_id": "718ef55659941335edeaf9d7235e0d4243186363", "content_id": "3bf4f5c7efde46fa143d8f4e8b881e2288e55ed2", "detected_licenses": [ "Apache-2.0" ], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 11784, "license_type": "permissive", "max_line_length": 93, "num_lines": 296, "path": "/acme/adders/reverb/sequence.py", "repo_name": "dada325/acme", "src_encoding": "UTF-8", "text": "# Copyright 2018 DeepMind Technologies Limited. All rights reserved.\n#\n# Licensed under the Apache License, Version 2.0 (the \"License\");\n# you may not use this file except in compliance with the License.\n# You may obtain a copy of the License at\n#\n# http://www.apache.org/licenses/LICENSE-2.0\n#\n# Unless required by applicable law or agreed to in writing, software\n# distributed under the License is distributed on an \"AS IS\" BASIS,\n# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n# See the License for the specific language governing permissions and\n# limitations under the License.\n\n\"\"\"Sequence adders.\n\nThis implements adders which add sequences or partial trajectories.\n\"\"\"\n\nimport enum\nimport operator\nfrom typing import Iterable, Optional\n\nfrom acme import specs\nfrom acme import types\nfrom acme.adders.reverb import base\nfrom acme.adders.reverb import utils\n\nimport numpy as np\nimport reverb\nimport tensorflow as tf\nimport tree\n\n\nclass EndBehavior(enum.Enum):\n \"\"\"Class to enumerate available options for writing behavior at episode ends.\n\n Example:\n\n sequence_length = 3\n period = 2\n\n Episode steps (digits) and writing events (W):\n\n 1 2 3 4 5 6\n W W\n\n First two sequences:\n\n 1 2 3\n . . 3 4 5\n\n Written sequences for the different end of episode behaviors:\n Here are the last written sequences for each end of episode behavior:\n\n WRITE . . . 4 5 6\n CONTINUE . . . . 5 6 F\n ZERO_PAD . . . . 5 6 0\n TRUNCATE . . . . 5 6\n\n Key:\n F: First step of the next episode\n 0: Zero-filled Step\n \"\"\"\n WRITE = 'write_buffer'\n CONTINUE = 'continue_to_next_episode'\n ZERO_PAD = 'zero_pad_til_next_write'\n TRUNCATE = 'write_truncated_buffer'\n\n\nclass SequenceAdder(base.ReverbAdder):\n \"\"\"An adder which adds sequences of fixed length.\"\"\"\n\n def __init__(\n self,\n client: reverb.Client,\n sequence_length: int,\n period: int,\n *,\n delta_encoded: bool = False,\n priority_fns: Optional[base.PriorityFnMapping] = None,\n max_in_flight_items: Optional[int] = 2,\n end_of_episode_behavior: Optional[EndBehavior] = None,\n # Deprecated kwargs.\n chunk_length: Optional[int] = None,\n pad_end_of_episode: Optional[bool] = None,\n break_end_of_episode: Optional[bool] = None,\n validate_items: bool = True,\n ):\n \"\"\"Makes a SequenceAdder instance.\n\n Args:\n client: See docstring for BaseAdder.\n sequence_length: The fixed length of sequences we wish to add.\n period: The period with which we add sequences. If less than\n sequence_length, overlapping sequences are added. If equal to\n sequence_length, sequences are exactly non-overlapping.\n delta_encoded: If `True` (False by default) enables delta encoding, see\n `Client` for more information.\n priority_fns: See docstring for BaseAdder.\n max_in_flight_items: The maximum number of items allowed to be \"in flight\"\n at the same time. See `block_until_num_items` in\n `reverb.TrajectoryWriter.flush` for more info.\n end_of_episode_behavior: Determines how sequences at the end of the\n episode are handled (default `EndOfEpisodeBehavior.ZERO_PAD`). See\n the docstring for `EndOfEpisodeBehavior` for more information.\n chunk_length: Deprecated and unused.\n pad_end_of_episode: If True (default) then upon end of episode the current\n sequence will be padded (with observations, actions, etc... whose values\n are 0) until its length is `sequence_length`. If False then the last\n sequence in the episode may have length less than `sequence_length`.\n break_end_of_episode: If 'False' (True by default) does not break\n sequences on env reset. In this case 'pad_end_of_episode' is not used.\n validate_items: Whether to validate items against the table signature\n before they are sent to the server. This requires table signature to be\n fetched from the server and cached locally.\n \"\"\"\n del chunk_length\n super().__init__(\n client=client,\n # We need an additional space in the buffer for the partial step the\n # base.ReverbAdder will add with the next observation.\n max_sequence_length=sequence_length+1,\n delta_encoded=delta_encoded,\n priority_fns=priority_fns,\n max_in_flight_items=max_in_flight_items,\n validate_items=validate_items)\n\n if pad_end_of_episode and not break_end_of_episode:\n raise ValueError(\n 'Can\\'t set pad_end_of_episode=True and break_end_of_episode=False at'\n ' the same time, since those behaviors are incompatible.')\n\n self._period = period\n self._sequence_length = sequence_length\n\n if end_of_episode_behavior and (pad_end_of_episode is not None or\n break_end_of_episode is not None):\n raise ValueError(\n 'Using end_of_episode_behavior and either '\n 'pad_end_of_episode or break_end_of_episode is not permitted. '\n 'Please use only end_of_episode_behavior instead.')\n\n # Set pad_end_of_episode and break_end_of_episode to default values.\n if end_of_episode_behavior is None and pad_end_of_episode is None:\n pad_end_of_episode = True\n if end_of_episode_behavior is None and break_end_of_episode is None:\n break_end_of_episode = True\n\n self._end_of_episode_behavior = EndBehavior.ZERO_PAD\n if pad_end_of_episode is not None or break_end_of_episode is not None:\n if not break_end_of_episode:\n self._end_of_episode_behavior = EndBehavior.CONTINUE\n elif break_end_of_episode and pad_end_of_episode:\n self._end_of_episode_behavior = EndBehavior.ZERO_PAD\n elif break_end_of_episode and not pad_end_of_episode:\n self._end_of_episode_behavior = EndBehavior.TRUNCATE\n else:\n raise ValueError(\n 'Reached an unexpected configuration of the SequenceAdder '\n f'with break_end_of_episode={break_end_of_episode} '\n f'and pad_end_of_episode={pad_end_of_episode}.')\n elif isinstance(end_of_episode_behavior, EndBehavior):\n self._end_of_episode_behavior = end_of_episode_behavior\n else:\n raise ValueError('end_of_episod_behavior must be an instance of '\n f'EndBehavior, received {end_of_episode_behavior}.')\n\n def reset(self): # pytype: disable=signature-mismatch # overriding-parameter-count-checks\n \"\"\"Resets the adder's buffer.\"\"\"\n # If we do not write on end of episode, we should not reset the writer.\n if self._end_of_episode_behavior is EndBehavior.CONTINUE:\n return\n\n super().reset()\n\n def _write(self):\n self._maybe_create_item(self._sequence_length)\n\n def _write_last(self):\n # Maybe determine the delta to the next time we would write a sequence.\n if self._end_of_episode_behavior in (EndBehavior.TRUNCATE,\n EndBehavior.ZERO_PAD):\n delta = self._sequence_length - self._writer.episode_steps\n if delta < 0:\n delta = (self._period + delta) % self._period\n\n # Handle various end-of-episode cases.\n if self._end_of_episode_behavior is EndBehavior.CONTINUE:\n self._maybe_create_item(self._sequence_length, end_of_episode=True)\n\n elif self._end_of_episode_behavior is EndBehavior.WRITE:\n # Drop episodes that are too short.\n if self._writer.episode_steps < self._sequence_length:\n return\n self._maybe_create_item(\n self._sequence_length, end_of_episode=True, force=True)\n\n elif self._end_of_episode_behavior is EndBehavior.TRUNCATE:\n self._maybe_create_item(\n self._sequence_length - delta,\n end_of_episode=True,\n force=True)\n\n elif self._end_of_episode_behavior is EndBehavior.ZERO_PAD:\n zero_step = tree.map_structure(lambda x: np.zeros_like(x[-2].numpy()),\n self._writer.history)\n for _ in range(delta):\n self._writer.append(zero_step)\n\n self._maybe_create_item(\n self._sequence_length, end_of_episode=True, force=True)\n else:\n raise ValueError(\n f'Unhandled end of episode behavior: {self._end_of_episode_behavior}.'\n ' This should never happen, please contact Acme dev team.')\n\n def _maybe_create_item(self,\n sequence_length: int,\n *,\n end_of_episode: bool = False,\n force: bool = False):\n\n # Check conditions under which a new item is created.\n first_write = self._writer.episode_steps == sequence_length\n # NOTE(bshahr): the following line assumes that the only way sequence_length\n # is less than self._sequence_length, is if the episode is shorter than\n # self._sequence_length.\n period_reached = (\n self._writer.episode_steps > self._sequence_length and\n ((self._writer.episode_steps - self._sequence_length) % self._period\n == 0))\n\n if not first_write and not period_reached and not force:\n return\n\n # TODO(b/183945808): will need to change to adhere to the new protocol.\n if not end_of_episode:\n get_traj = operator.itemgetter(slice(-sequence_length - 1, -1))\n else:\n get_traj = operator.itemgetter(slice(-sequence_length, None))\n\n history = self._writer.history\n trajectory = base.Trajectory(**tree.map_structure(get_traj, history))\n\n # Compute priorities for the buffer.\n table_priorities = utils.calculate_priorities(self._priority_fns,\n trajectory)\n\n # Create a prioritized item for each table.\n for table_name, priority in table_priorities.items():\n self._writer.create_item(table_name, priority, trajectory)\n self._writer.flush(self._max_in_flight_items)\n\n # TODO(bshahr): make this into a standalone method. Class methods should be\n # used as alternative constructors or when modifying some global state,\n # neither of which is done here.\n @classmethod\n def signature(cls, environment_spec: specs.EnvironmentSpec,\n extras_spec: types.NestedSpec = (),\n sequence_length: Optional[int] = None):\n \"\"\"This is a helper method for generating signatures for Reverb tables.\n\n Signatures are useful for validating data types and shapes, see Reverb's\n documentation for details on how they are used.\n\n Args:\n environment_spec: A `specs.EnvironmentSpec` whose fields are nested\n structures with leaf nodes that have `.shape` and `.dtype` attributes.\n This should come from the environment that will be used to generate\n the data inserted into the Reverb table.\n extras_spec: A nested structure with leaf nodes that have `.shape` and\n `.dtype` attributes. The structure (and shapes/dtypes) of this must\n be the same as the `extras` passed into `ReverbAdder.add`.\n sequence_length: An optional integer representing the expected length of\n sequences that will be added to replay.\n\n Returns:\n A `Trajectory` whose leaf nodes are `tf.TensorSpec` objects.\n \"\"\"\n\n def add_time_dim(paths: Iterable[str], spec: tf.TensorSpec):\n return tf.TensorSpec(shape=(sequence_length, *spec.shape),\n dtype=spec.dtype,\n name='/'.join(str(p) for p in paths))\n\n trajectory_env_spec, trajectory_extras_spec = tree.map_structure_with_path(\n add_time_dim, (environment_spec, extras_spec))\n\n spec_step = base.Trajectory(\n *trajectory_env_spec,\n start_of_episode=tf.TensorSpec(\n shape=(sequence_length,), dtype=tf.bool, name='start_of_episode'),\n extras=trajectory_extras_spec)\n\n return spec_step\n" }, { "alpha_fraction": 0.6221274733543396, "alphanum_fraction": 0.6323146224021912, "avg_line_length": 36.02631759643555, "blob_id": "a24ba62837bd399b138d5d4e7c1a7f9a6700e91b", "content_id": "dba61033d72399e221e3231a141dccfff834fda1", "detected_licenses": [ "Apache-2.0" ], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 4221, "license_type": "permissive", "max_line_length": 106, "num_lines": 114, "path": "/acme/jax/losses/impala.py", "repo_name": "dada325/acme", "src_encoding": "UTF-8", "text": "# Copyright 2018 DeepMind Technologies Limited. All rights reserved.\n#\n# Licensed under the Apache License, Version 2.0 (the \"License\");\n# you may not use this file except in compliance with the License.\n# You may obtain a copy of the License at\n#\n# http://www.apache.org/licenses/LICENSE-2.0\n#\n# Unless required by applicable law or agreed to in writing, software\n# distributed under the License is distributed on an \"AS IS\" BASIS,\n# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n# See the License for the specific language governing permissions and\n# limitations under the License.\n\n\"\"\"Loss function for IMPALA (Espeholt et al., 2018) [1].\n\n[1] https://arxiv.org/abs/1802.01561\n\"\"\"\n\nfrom typing import Callable, Mapping, Tuple\n\nfrom acme.agents.jax.impala import types\nfrom acme.jax import utils\nimport haiku as hk\nimport jax\nimport jax.numpy as jnp\nimport numpy as np\nimport reverb\nimport rlax\nimport tree\n\n\ndef impala_loss(\n unroll_fn: types.PolicyValueFn,\n *,\n discount: float,\n max_abs_reward: float = np.inf,\n baseline_cost: float = 1.0,\n entropy_cost: float = 0.0,\n) -> Callable[[hk.Params, reverb.ReplaySample], jax.Array]:\n \"\"\"Builds the standard entropy-regularised IMPALA loss function.\n\n Args:\n unroll_fn: A `hk.Transformed` object containing a callable which maps\n (params, observations_sequence, initial_state) -> ((logits, value), state)\n discount: The standard geometric discount rate to apply.\n max_abs_reward: Optional symmetric reward clipping to apply.\n baseline_cost: Weighting of the critic loss relative to the policy loss.\n entropy_cost: Weighting of the entropy regulariser relative to policy loss.\n\n Returns:\n A loss function with signature (params, data) -> (loss_scalar, metrics).\n \"\"\"\n\n def loss_fn(\n params: hk.Params,\n sample: reverb.ReplaySample,\n ) -> Tuple[jax.Array, Mapping[str, jax.Array]]:\n \"\"\"Batched, entropy-regularised actor-critic loss with V-trace.\"\"\"\n\n # Extract the data.\n data = sample.data\n observations, actions, rewards, discounts, extra = (data.observation,\n data.action,\n data.reward,\n data.discount,\n data.extras)\n initial_state = tree.map_structure(lambda s: s[0], extra['core_state'])\n behaviour_logits = extra['logits']\n\n # Apply reward clipping.\n rewards = jnp.clip(rewards, -max_abs_reward, max_abs_reward)\n\n # Unroll current policy over observations.\n (logits, values), _ = unroll_fn(params, observations, initial_state)\n\n # Compute importance sampling weights: current policy / behavior policy.\n rhos = rlax.categorical_importance_sampling_ratios(logits[:-1],\n behaviour_logits[:-1],\n actions[:-1])\n\n # Critic loss.\n vtrace_returns = rlax.vtrace_td_error_and_advantage(\n v_tm1=values[:-1],\n v_t=values[1:],\n r_t=rewards[:-1],\n discount_t=discounts[:-1] * discount,\n rho_tm1=rhos)\n critic_loss = jnp.square(vtrace_returns.errors)\n\n # Policy gradient loss.\n policy_gradient_loss = rlax.policy_gradient_loss(\n logits_t=logits[:-1],\n a_t=actions[:-1],\n adv_t=vtrace_returns.pg_advantage,\n w_t=jnp.ones_like(rewards[:-1]))\n\n # Entropy regulariser.\n entropy_loss = rlax.entropy_loss(logits[:-1], jnp.ones_like(rewards[:-1]))\n\n # Combine weighted sum of actor & critic losses, averaged over the sequence.\n mean_loss = jnp.mean(policy_gradient_loss + baseline_cost * critic_loss +\n entropy_cost * entropy_loss) # []\n\n metrics = {\n 'policy_loss': jnp.mean(policy_gradient_loss),\n 'critic_loss': jnp.mean(baseline_cost * critic_loss),\n 'entropy_loss': jnp.mean(entropy_cost * entropy_loss),\n 'entropy': jnp.mean(entropy_loss),\n }\n\n return mean_loss, metrics\n\n return utils.mapreduce(loss_fn, in_axes=(None, 0)) # pytype: disable=bad-return-type # jax-devicearray\n" }, { "alpha_fraction": 0.6832639575004578, "alphanum_fraction": 0.6895065307617188, "avg_line_length": 37.890174865722656, "blob_id": "16d24d95f00866161a32dfc45b1880725228da52", "content_id": "3097cdeae53e221c35e5ae3e81560e05a4fee0cc", "detected_licenses": [ "Apache-2.0" ], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 13456, "license_type": "permissive", "max_line_length": 102, "num_lines": 346, "path": "/acme/agents/jax/mpo/networks.py", "repo_name": "dada325/acme", "src_encoding": "UTF-8", "text": "# Copyright 2018 DeepMind Technologies Limited. All rights reserved.\n#\n# Licensed under the Apache License, Version 2.0 (the \"License\");\n# you may not use this file except in compliance with the License.\n# You may obtain a copy of the License at\n#\n# http://www.apache.org/licenses/LICENSE-2.0\n#\n# Unless required by applicable law or agreed to in writing, software\n# distributed under the License is distributed on an \"AS IS\" BASIS,\n# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n# See the License for the specific language governing permissions and\n# limitations under the License.\n\n\"\"\"MoG-MPO network definitions.\"\"\"\n\nimport dataclasses\nfrom typing import Callable, NamedTuple, Optional, Sequence, Tuple, Union\n\nfrom acme import specs\nfrom acme.agents.jax.mpo import types\nfrom acme.jax import networks as networks_lib\nfrom acme.jax import utils\nimport chex\nimport haiku as hk\nimport haiku.initializers as hk_init\nimport jax\nimport jax.numpy as jnp\nimport numpy as np\nimport tensorflow_probability.substrates.jax as tfp\n\ntfd = tfp.distributions\nDistributionOrArray = Union[tfd.Distribution, jnp.ndarray]\n\n\nclass MPONetworkParams(NamedTuple):\n policy_head: Optional[hk.Params] = None\n critic_head: Optional[hk.Params] = None\n torso: Optional[hk.Params] = None\n torso_initial_state: Optional[hk.Params] = None\n dynamics_model: Union[hk.Params, Tuple[()]] = ()\n dynamics_model_initial_state: Union[hk.Params, Tuple[()]] = ()\n\n\[email protected]\nclass UnrollableNetwork:\n \"\"\"Network that can unroll over an input sequence.\"\"\"\n init: Callable[[networks_lib.PRNGKey, types.Observation, hk.LSTMState],\n hk.Params]\n apply: Callable[[hk.Params, types.Observation, hk.LSTMState],\n Tuple[jnp.ndarray, hk.LSTMState]]\n unroll: Callable[[hk.Params, types.Observation, hk.LSTMState],\n Tuple[jnp.ndarray, hk.LSTMState]]\n initial_state_fn_init: Callable[[networks_lib.PRNGKey, Optional[int]],\n hk.Params]\n initial_state_fn: Callable[[hk.Params, Optional[int]], hk.LSTMState]\n\n\[email protected]\nclass MPONetworks:\n \"\"\"Network for the MPO agent.\"\"\"\n policy_head: Optional[hk.Transformed] = None\n critic_head: Optional[hk.Transformed] = None\n torso: Optional[UnrollableNetwork] = None\n dynamics_model: Optional[UnrollableNetwork] = None\n\n def policy_head_apply(self, params: MPONetworkParams,\n obs_embedding: types.ObservationEmbedding):\n return self.policy_head.apply(params.policy_head, obs_embedding)\n\n def critic_head_apply(self, params: MPONetworkParams,\n obs_embedding: types.ObservationEmbedding,\n actions: types.Action):\n return self.critic_head.apply(params.critic_head, obs_embedding, actions)\n\n def torso_unroll(self, params: MPONetworkParams,\n observations: types.Observation, state: hk.LSTMState):\n return self.torso.unroll(params.torso, observations, state)\n\n def dynamics_model_unroll(self, params: MPONetworkParams,\n actions: types.Action, state: hk.LSTMState):\n return self.dynamics_model.unroll(params.dynamics_model, actions, state)\n\n\ndef init_params(\n networks: MPONetworks,\n spec: specs.EnvironmentSpec,\n random_key: types.RNGKey,\n add_batch_dim: bool = False,\n dynamics_rollout_length: int = 0,\n) -> Tuple[MPONetworkParams, hk.LSTMState]:\n \"\"\"Initialize the parameters of a MPO network.\"\"\"\n\n rng_keys = jax.random.split(random_key, 6)\n\n # Create a dummy observation/action to initialize network parameters.\n observations, actions = utils.zeros_like((spec.observations, spec.actions))\n\n # Add batch dimensions if necessary by the scope that is calling this init.\n if add_batch_dim:\n observations, actions = utils.add_batch_dim((observations, actions))\n\n # Initialize the state torso parameters and create a dummy core state.\n batch_size = 1 if add_batch_dim else None\n params_torso_initial_state = networks.torso.initial_state_fn_init(\n rng_keys[0], batch_size)\n state = networks.torso.initial_state_fn(\n params_torso_initial_state, batch_size)\n\n # Initialize the core and unroll one step to create a dummy core output.\n # The input to the core is the current action and the next observation.\n params_torso = networks.torso.init(rng_keys[1], observations, state)\n embeddings, _ = networks.torso.apply(params_torso, observations, state)\n\n # Initialize the policy and critic heads by passing in the dummy embedding.\n params_policy_head, params_critic_head = {}, {} # Cannot be None for BIT.\n if networks.policy_head:\n params_policy_head = networks.policy_head.init(rng_keys[2], embeddings)\n if networks.critic_head:\n params_critic_head = networks.critic_head.init(rng_keys[3], embeddings,\n actions)\n\n # Initialize the recurrent dynamics model if it exists.\n if networks.dynamics_model and dynamics_rollout_length > 0:\n params_dynamics_initial_state = networks.dynamics_model.initial_state_fn_init(\n rng_keys[4], embeddings)\n dynamics_state = networks.dynamics_model.initial_state_fn(\n params_dynamics_initial_state, embeddings)\n params_dynamics = networks.dynamics_model.init(\n rng_keys[5], actions, dynamics_state)\n else:\n params_dynamics_initial_state = ()\n params_dynamics = ()\n\n params = MPONetworkParams(\n policy_head=params_policy_head,\n critic_head=params_critic_head,\n torso=params_torso,\n torso_initial_state=params_torso_initial_state,\n dynamics_model=params_dynamics,\n dynamics_model_initial_state=params_dynamics_initial_state)\n\n return params, state\n\n\ndef make_unrollable_network(\n make_core_module: Callable[[], hk.RNNCore] = hk.IdentityCore,\n make_feedforward_module: Optional[Callable[[], hk.SupportsCall]] = None,\n make_initial_state_fn: Optional[Callable[[], hk.SupportsCall]] = None,\n) -> UnrollableNetwork:\n \"\"\"Produces an UnrollableNetwork and a state initializing hk.Transformed.\"\"\"\n\n def default_initial_state_fn(batch_size: Optional[int] = None) -> jnp.ndarray:\n return make_core_module().initial_state(batch_size)\n\n def _apply_core_fn(observation: types.Observation,\n state: jnp.ndarray) -> Tuple[jnp.ndarray, jnp.ndarray]:\n if make_feedforward_module:\n observation = make_feedforward_module()(observation)\n return make_core_module()(observation, state)\n\n def _unroll_core_fn(observation: types.Observation,\n state: jnp.ndarray) -> Tuple[jnp.ndarray, jnp.ndarray]:\n if make_feedforward_module:\n observation = make_feedforward_module()(observation)\n return hk.dynamic_unroll(make_core_module(), observation, state)\n\n if make_initial_state_fn:\n initial_state_fn = make_initial_state_fn()\n else:\n initial_state_fn = default_initial_state_fn\n\n # Transform module functions into pure functions.\n hk_initial_state_fn = hk.without_apply_rng(hk.transform(initial_state_fn))\n apply_core = hk.without_apply_rng(hk.transform(_apply_core_fn))\n unroll_core = hk.without_apply_rng(hk.transform(_unroll_core_fn))\n\n # Pack all core network pure functions into a single convenient container.\n return UnrollableNetwork(\n init=apply_core.init,\n apply=apply_core.apply,\n unroll=unroll_core.apply,\n initial_state_fn_init=hk_initial_state_fn.init,\n initial_state_fn=hk_initial_state_fn.apply)\n\n\ndef make_control_networks(\n environment_spec: specs.EnvironmentSpec,\n *,\n with_recurrence: bool = False,\n policy_layer_sizes: Sequence[int] = (256, 256, 256),\n critic_layer_sizes: Sequence[int] = (512, 512, 256),\n policy_init_scale: float = 0.7,\n critic_type: types.CriticType = types.CriticType.MIXTURE_OF_GAUSSIANS,\n mog_init_scale: float = 1e-3, # Used by MoG critic.\n mog_num_components: int = 5, # Used by MoG critic.\n categorical_num_bins: int = 51, # Used by CATEGORICAL* critics.\n vmin: float = -150., # Used by CATEGORICAL* critics.\n vmax: float = 150., # Used by CATEGORICAL* critics.\n) -> MPONetworks:\n \"\"\"Creates MPONetworks to be used DM Control suite tasks.\"\"\"\n\n # Unpack the environment spec to get appropriate shapes, dtypes, etc.\n num_dimensions = np.prod(environment_spec.actions.shape, dtype=int)\n\n # Factory to create the core hk.Module. Must be a factory as the module must\n # be initialized within a hk.transform scope.\n if with_recurrence:\n make_core_module = lambda: GRUWithSkip(16)\n else:\n make_core_module = hk.IdentityCore\n\n def policy_fn(observation: types.NestedArray) -> tfd.Distribution:\n embedding = networks_lib.LayerNormMLP(\n policy_layer_sizes, activate_final=True)(\n observation)\n return networks_lib.MultivariateNormalDiagHead(\n num_dimensions, init_scale=policy_init_scale)(\n embedding)\n\n def critic_fn(observation: types.NestedArray,\n action: types.NestedArray) -> DistributionOrArray:\n # Action is clipped to avoid critic extrapolations outside the spec range.\n clipped_action = networks_lib.ClipToSpec(environment_spec.actions)(action)\n inputs = jnp.concatenate([observation, clipped_action], axis=-1)\n embedding = networks_lib.LayerNormMLP(\n critic_layer_sizes, activate_final=True)(\n inputs)\n\n if critic_type == types.CriticType.MIXTURE_OF_GAUSSIANS:\n return networks_lib.GaussianMixture(\n num_dimensions=1,\n num_components=mog_num_components,\n multivariate=False,\n init_scale=mog_init_scale,\n append_singleton_event_dim=False,\n reinterpreted_batch_ndims=0)(\n embedding)\n elif critic_type in (types.CriticType.CATEGORICAL,\n types.CriticType.CATEGORICAL_2HOT):\n return networks_lib.CategoricalCriticHead(\n num_bins=categorical_num_bins, vmin=vmin, vmax=vmax)(\n embedding)\n else:\n return hk.Linear(\n output_size=1, w_init=hk_init.TruncatedNormal(0.01))(\n embedding)\n\n # Create unrollable torso.\n torso = make_unrollable_network(make_core_module=make_core_module)\n\n # Create MPONetworks to add functionality required by the agent.\n return MPONetworks(\n policy_head=hk.without_apply_rng(hk.transform(policy_fn)),\n critic_head=hk.without_apply_rng(hk.transform(critic_fn)),\n torso=torso)\n\n\ndef add_batch(nest, batch_size: Optional[int]):\n \"\"\"Adds a batch dimension at axis 0 to the leaves of a nested structure.\"\"\"\n broadcast = lambda x: jnp.broadcast_to(x, (batch_size,) + x.shape)\n return jax.tree_map(broadcast, nest)\n\n\ndef w_init_identity(shape: Sequence[int], dtype) -> jnp.ndarray:\n chex.assert_equal(len(shape), 2)\n chex.assert_equal(shape[0], shape[1])\n return jnp.eye(shape[0], dtype=dtype)\n\n\nclass IdentityRNN(hk.RNNCore):\n r\"\"\"Basic fully-connected RNN core with identity initialization.\n\n Given :math:`x_t` and the previous hidden state :math:`h_{t-1}` the\n core computes\n .. math::\n h_t = \\operatorname{ReLU}(w_i x_t + b_i + w_h h_{t-1} + b_h)\n The output is equal to the new state, :math:`h_t`.\n\n Initialized using the strategy described in:\n https://arxiv.org/pdf/1504.00941.pdf\n \"\"\"\n\n def __init__(self,\n hidden_size: int,\n hidden_scale: float = 1e-2,\n name: Optional[str] = None):\n \"\"\"Constructs a vanilla RNN core.\n\n Args:\n hidden_size: Hidden layer size.\n hidden_scale: Scalar multiplying the hidden-to-hidden matmul.\n name: Name of the module.\n \"\"\"\n super().__init__(name=name)\n self._initial_state = jnp.zeros([hidden_size])\n self._hidden_scale = hidden_scale\n self._input_to_hidden = hk.Linear(hidden_size)\n self._hidden_to_hidden = hk.Linear(\n hidden_size, with_bias=True, w_init=w_init_identity)\n\n def __call__(self, inputs: jnp.ndarray, prev_state: jnp.ndarray):\n out = jax.nn.relu(\n self._input_to_hidden(inputs) +\n self._hidden_scale * self._hidden_to_hidden(prev_state))\n return out, out\n\n def initial_state(self, batch_size: Optional[int]):\n state = self._initial_state\n if batch_size is not None:\n state = add_batch(state, batch_size)\n return state\n\n\nclass GRU(hk.GRU):\n \"\"\"GRU with an identity initialization.\"\"\"\n\n def __init__(self, hidden_size: int, name: Optional[str] = None):\n\n def b_init(unused_size: Sequence[int], dtype) -> jnp.ndarray:\n \"\"\"Initializes the biases so the GRU ignores the state and acts as a tanh.\"\"\"\n return jnp.concatenate([\n +2 * jnp.ones([hidden_size], dtype=dtype),\n -2 * jnp.ones([hidden_size], dtype=dtype),\n jnp.zeros([hidden_size], dtype=dtype)\n ])\n\n super().__init__(hidden_size=hidden_size, b_init=b_init, name=name)\n\n\nclass GRUWithSkip(hk.GRU):\n \"\"\"GRU with a skip-connection from input to output.\"\"\"\n\n def __call__(self, inputs: jnp.ndarray, prev_state: jnp.ndarray):\n outputs, state = super().__call__(inputs, prev_state)\n outputs = jnp.concatenate([inputs, outputs], axis=-1)\n return outputs, state\n\n\nclass Conv2DLSTMWithSkip(hk.Conv2DLSTM):\n \"\"\"Conv2DLSTM with a skip-connection from input to output.\"\"\"\n\n def __call__(self, inputs: jnp.ndarray, state: jnp.ndarray):\n outputs, state = super().__call__(inputs, state) # pytype: disable=wrong-arg-types # jax-ndarray\n outputs = jnp.concatenate([inputs, outputs], axis=-1)\n return outputs, state\n" }, { "alpha_fraction": 0.7200704216957092, "alphanum_fraction": 0.7266725301742554, "avg_line_length": 30.123287200927734, "blob_id": "861d991830229a4020637aa15100dbd10a2549c8", "content_id": "90bc2eeb6e01a714ec73197f0273cf3fe3b767fd", "detected_licenses": [ "Apache-2.0" ], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 4544, "license_type": "permissive", "max_line_length": 80, "num_lines": 146, "path": "/examples/baselines/imitation/run_iqlearn.py", "repo_name": "dada325/acme", "src_encoding": "UTF-8", "text": "# Copyright 2018 DeepMind Technologies Limited. All rights reserved.\n#\n# Licensed under the Apache License, Version 2.0 (the \"License\");\n# you may not use this file except in compliance with the License.\n# You may obtain a copy of the License at\n#\n# http://www.apache.org/licenses/LICENSE-2.0\n#\n# Unless required by applicable law or agreed to in writing, software\n# distributed under the License is distributed on an \"AS IS\" BASIS,\n# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n# See the License for the specific language governing permissions and\n# limitations under the License.\n\n\"\"\"Example running IQLearn on continuous control tasks.\n\nThis handles the online imitation setting.\n\"\"\"\n\nfrom typing import Callable, Iterator\n\nfrom absl import flags\nfrom acme import specs\nfrom acme import types\nfrom acme.agents.jax import actor_core as actor_core_lib\nfrom acme.agents.jax import iq_learn\nfrom acme.datasets import tfds\nimport helpers\nfrom absl import app\nfrom acme.jax import experiments\nfrom acme.jax import types as jax_types\nfrom acme.utils import lp_utils\nimport dm_env\nimport jax\nimport launchpad as lp\n\nFLAGS = flags.FLAGS\n\nflags.DEFINE_bool(\n 'run_distributed',\n True,\n (\n 'Should an agent be executed in a distributed '\n 'way. If False, will run single-threaded.'\n ),\n)\nflags.DEFINE_string('env_name', 'HalfCheetah-v2', 'What environment to run')\nflags.DEFINE_integer('seed', 0, 'Random seed.')\nflags.DEFINE_integer('num_steps', 1_000_000, 'Number of env steps to run.')\nflags.DEFINE_integer('eval_every', 50_000, 'Number of env steps to run.')\nflags.DEFINE_integer(\n 'num_demonstrations', 11, 'Number of demonstration trajectories.'\n)\nflags.DEFINE_integer('evaluation_episodes', 10, 'Evaluation episodes.')\n\n\ndef _make_environment_factory(env_name: str) -> jax_types.EnvironmentFactory:\n \"\"\"Returns the environment factory for the given environment.\"\"\"\n\n def environment_factory(seed: int) -> dm_env.Environment:\n del seed\n return helpers.make_environment(task=env_name)\n\n return environment_factory\n\n\ndef _make_demonstration_dataset_factory(\n dataset_name: str,\n environment_spec: specs.EnvironmentSpec,\n num_demonstrations: int,\n random_key: jax_types.PRNGKey,\n) -> Callable[[jax_types.PRNGKey], Iterator[types.Transition]]:\n \"\"\"Returns the demonstration dataset factory for the given dataset.\"\"\"\n\n def demonstration_dataset_factory(\n batch_size: int,\n ) -> Iterator[types.Transition]:\n \"\"\"Returns an iterator of demonstration samples.\"\"\"\n transitions_iterator = tfds.get_tfds_dataset(\n dataset_name, num_episodes=num_demonstrations, env_spec=environment_spec\n )\n return tfds.JaxInMemoryRandomSampleIterator(\n transitions_iterator, key=random_key, batch_size=batch_size\n )\n\n return demonstration_dataset_factory\n\n\ndef build_experiment_config() -> (\n experiments.ExperimentConfig[\n iq_learn.IQLearnNetworks,\n actor_core_lib.ActorCore,\n iq_learn.IQLearnSample,\n ]\n):\n \"\"\"Returns a configuration for IQLearn experiments.\"\"\"\n\n # Create an environment, grab the spec, and use it to create networks.\n env_name = FLAGS.env_name\n environment_factory = _make_environment_factory(env_name)\n\n dummy_seed = 1\n environment = environment_factory(dummy_seed)\n environment_spec = specs.make_environment_spec(environment)\n\n # Create demonstrations function.\n dataset_name = helpers.get_dataset_name(env_name)\n make_demonstrations = _make_demonstration_dataset_factory(\n dataset_name,\n environment_spec,\n FLAGS.num_demonstrations,\n jax.random.PRNGKey(FLAGS.seed),\n )\n\n # Construct the agent\n iq_learn_config = iq_learn.IQLearnConfig(alpha=1.0)\n iq_learn_builder = iq_learn.IQLearnBuilder(\n config=iq_learn_config, make_demonstrations=make_demonstrations\n )\n\n return experiments.ExperimentConfig(\n builder=iq_learn_builder,\n environment_factory=environment_factory,\n network_factory=iq_learn.make_networks,\n seed=FLAGS.seed,\n max_num_actor_steps=FLAGS.num_steps,\n )\n\n\ndef main(_):\n config = build_experiment_config()\n if FLAGS.run_distributed:\n program = experiments.make_distributed_experiment(\n experiment=config, num_actors=4\n )\n lp.launch(program, xm_resources=lp_utils.make_xm_docker_resources(program))\n else:\n experiments.run_experiment(\n experiment=config,\n eval_every=FLAGS.eval_every,\n num_eval_episodes=FLAGS.evaluation_episodes,\n )\n\n\nif __name__ == '__main__':\n app.run(main)\n" }, { "alpha_fraction": 0.6167004704475403, "alphanum_fraction": 0.6274827718734741, "avg_line_length": 36.492401123046875, "blob_id": "74175bc8d4b293768580f92c84f808e6f793d4b5", "content_id": "33d49e08cf4d521d0e4493f59bd0c3d6f7658e89", "detected_licenses": [ "Apache-2.0" ], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 12335, "license_type": "permissive", "max_line_length": 83, "num_lines": 329, "path": "/acme/agents/jax/value_dice/learning.py", "repo_name": "dada325/acme", "src_encoding": "UTF-8", "text": "# Copyright 2018 DeepMind Technologies Limited. All rights reserved.\n#\n# Licensed under the Apache License, Version 2.0 (the \"License\");\n# you may not use this file except in compliance with the License.\n# You may obtain a copy of the License at\n#\n# http://www.apache.org/licenses/LICENSE-2.0\n#\n# Unless required by applicable law or agreed to in writing, software\n# distributed under the License is distributed on an \"AS IS\" BASIS,\n# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n# See the License for the specific language governing permissions and\n# limitations under the License.\n\n\"\"\"ValueDice learner implementation.\"\"\"\n\nimport functools\nimport time\nfrom typing import Any, Dict, Iterator, List, Mapping, NamedTuple, Optional, Tuple\n\nimport acme\nfrom acme import types\nfrom acme.agents.jax.value_dice import networks as value_dice_networks\nfrom acme.jax import networks as networks_lib\nfrom acme.jax import utils\nfrom acme.utils import counting\nfrom acme.utils import loggers\nimport jax\nimport jax.numpy as jnp\nimport optax\nimport reverb\n\n\nclass TrainingState(NamedTuple):\n \"\"\"Contains training state for the learner.\"\"\"\n policy_optimizer_state: optax.OptState\n policy_params: networks_lib.Params\n nu_optimizer_state: optax.OptState\n nu_params: networks_lib.Params\n key: jnp.ndarray\n steps: int\n\n\ndef _orthogonal_regularization_loss(params: networks_lib.Params):\n \"\"\"Orthogonal regularization.\n\n See equation (3) in https://arxiv.org/abs/1809.11096.\n\n Args:\n params: Dictionary of parameters to apply regualization for.\n\n Returns:\n A regularization loss term.\n \"\"\"\n reg_loss = 0\n for key in params:\n if isinstance(params[key], Mapping):\n reg_loss += _orthogonal_regularization_loss(params[key])\n continue\n variable = params[key]\n assert len(variable.shape) in [1, 2, 4]\n if len(variable.shape) == 1:\n # This is a bias so do not apply regularization.\n continue\n if len(variable.shape) == 4:\n # CNN\n variable = jnp.reshape(variable, (-1, variable.shape[-1]))\n prod = jnp.matmul(jnp.transpose(variable), variable)\n reg_loss += jnp.sum(jnp.square(prod * (1 - jnp.eye(prod.shape[0]))))\n return reg_loss\n\n\nclass ValueDiceLearner(acme.Learner):\n \"\"\"ValueDice learner.\"\"\"\n\n _state: TrainingState\n\n def __init__(self,\n networks: value_dice_networks.ValueDiceNetworks,\n policy_optimizer: optax.GradientTransformation,\n nu_optimizer: optax.GradientTransformation,\n discount: float,\n rng: jnp.ndarray,\n iterator_replay: Iterator[reverb.ReplaySample],\n iterator_demonstrations: Iterator[types.Transition],\n alpha: float = 0.05,\n policy_reg_scale: float = 1e-4,\n nu_reg_scale: float = 10.0,\n num_sgd_steps_per_step: int = 1,\n counter: Optional[counting.Counter] = None,\n logger: Optional[loggers.Logger] = None):\n\n rng, policy_key, nu_key = jax.random.split(rng, 3)\n policy_init_params = networks.policy_network.init(policy_key)\n policy_optimizer_state = policy_optimizer.init(policy_init_params)\n\n nu_init_params = networks.nu_network.init(nu_key)\n nu_optimizer_state = nu_optimizer.init(nu_init_params)\n\n def compute_losses(\n policy_params: networks_lib.Params,\n nu_params: networks_lib.Params,\n key: jnp.ndarray,\n replay_o_tm1: types.NestedArray,\n replay_a_tm1: types.NestedArray,\n replay_o_t: types.NestedArray,\n demo_o_tm1: types.NestedArray,\n demo_a_tm1: types.NestedArray,\n demo_o_t: types.NestedArray,\n ) -> jnp.ndarray:\n # TODO(damienv, hussenot): what to do with the discounts ?\n\n def policy(obs, key):\n dist_params = networks.policy_network.apply(policy_params, obs)\n return networks.sample(dist_params, key)\n\n key1, key2, key3, key4 = jax.random.split(key, 4)\n\n # Predicted actions.\n demo_o_t0 = demo_o_tm1\n policy_demo_a_t0 = policy(demo_o_t0, key1)\n policy_demo_a_t = policy(demo_o_t, key2)\n policy_replay_a_t = policy(replay_o_t, key3)\n\n replay_a_tm1 = networks.encode_action(replay_a_tm1)\n demo_a_tm1 = networks.encode_action(demo_a_tm1)\n policy_demo_a_t0 = networks.encode_action(policy_demo_a_t0)\n policy_demo_a_t = networks.encode_action(policy_demo_a_t)\n policy_replay_a_t = networks.encode_action(policy_replay_a_t)\n\n # \"Value function\" nu over the expert states.\n nu_demo_t0 = networks.nu_network.apply(nu_params, demo_o_t0,\n policy_demo_a_t0)\n nu_demo_tm1 = networks.nu_network.apply(nu_params, demo_o_tm1, demo_a_tm1)\n nu_demo_t = networks.nu_network.apply(nu_params, demo_o_t,\n policy_demo_a_t)\n nu_demo_diff = nu_demo_tm1 - discount * nu_demo_t\n\n # \"Value function\" nu over the replay buffer states.\n nu_replay_tm1 = networks.nu_network.apply(nu_params, replay_o_tm1,\n replay_a_tm1)\n nu_replay_t = networks.nu_network.apply(nu_params, replay_o_t,\n policy_replay_a_t)\n nu_replay_diff = nu_replay_tm1 - discount * nu_replay_t\n\n # Linear part of the loss.\n linear_loss_demo = jnp.mean(nu_demo_t0 * (1.0 - discount))\n linear_loss_rb = jnp.mean(nu_replay_diff)\n linear_loss = (linear_loss_demo * (1 - alpha) + linear_loss_rb * alpha)\n\n # Non linear part of the loss.\n nu_replay_demo_diff = jnp.concatenate([nu_demo_diff, nu_replay_diff],\n axis=0)\n replay_demo_weights = jnp.concatenate([\n jnp.ones_like(nu_demo_diff) * (1 - alpha),\n jnp.ones_like(nu_replay_diff) * alpha\n ],\n axis=0)\n replay_demo_weights /= jnp.mean(replay_demo_weights)\n non_linear_loss = jnp.sum(\n jax.lax.stop_gradient(\n utils.weighted_softmax(nu_replay_demo_diff, replay_demo_weights,\n axis=0)) *\n nu_replay_demo_diff)\n\n # Final loss.\n loss = (non_linear_loss - linear_loss)\n\n # Regularized policy loss.\n if policy_reg_scale > 0.:\n policy_reg = _orthogonal_regularization_loss(policy_params)\n else:\n policy_reg = 0.\n\n # Gradient penality on nu\n if nu_reg_scale > 0.0:\n batch_size = demo_o_tm1.shape[0]\n c = jax.random.uniform(key4, shape=(batch_size,))\n shape_o = [\n dim if i == 0 else 1 for i, dim in enumerate(replay_o_tm1.shape)\n ]\n shape_a = [\n dim if i == 0 else 1 for i, dim in enumerate(replay_a_tm1.shape)\n ]\n c_o = jnp.reshape(c, shape_o)\n c_a = jnp.reshape(c, shape_a)\n mixed_o_tm1 = c_o * demo_o_tm1 + (1 - c_o) * replay_o_tm1\n mixed_a_tm1 = c_a * demo_a_tm1 + (1 - c_a) * replay_a_tm1\n mixed_o_t = c_o * demo_o_t + (1 - c_o) * replay_o_t\n mixed_policy_a_t = c_a * policy_demo_a_t + (1 - c_a) * policy_replay_a_t\n mixed_o = jnp.concatenate([mixed_o_tm1, mixed_o_t], axis=0)\n mixed_a = jnp.concatenate([mixed_a_tm1, mixed_policy_a_t], axis=0)\n\n def sum_nu(o, a):\n return jnp.sum(networks.nu_network.apply(nu_params, o, a))\n\n nu_grad_o_fn = jax.grad(sum_nu, argnums=0)\n nu_grad_a_fn = jax.grad(sum_nu, argnums=1)\n nu_grad_o = nu_grad_o_fn(mixed_o, mixed_a)\n nu_grad_a = nu_grad_a_fn(mixed_o, mixed_a)\n nu_grad = jnp.concatenate([\n jnp.reshape(nu_grad_o, [batch_size, -1]),\n jnp.reshape(nu_grad_a, [batch_size, -1])], axis=-1)\n # TODO(damienv, hussenot): check for the need of eps\n # (like in the original value dice code).\n nu_grad_penalty = jnp.mean(\n jnp.square(\n jnp.linalg.norm(nu_grad + 1e-8, axis=-1, keepdims=True) - 1))\n else:\n nu_grad_penalty = 0.0\n\n policy_loss = -loss + policy_reg_scale * policy_reg\n nu_loss = loss + nu_reg_scale * nu_grad_penalty\n\n return policy_loss, nu_loss # pytype: disable=bad-return-type # jax-ndarray\n\n def sgd_step(\n state: TrainingState,\n data: Tuple[types.Transition, types.Transition]\n ) -> Tuple[TrainingState, Dict[str, jnp.ndarray]]:\n replay_transitions, demo_transitions = data\n key, key_loss = jax.random.split(state.key)\n compute_losses_with_input = functools.partial(\n compute_losses,\n replay_o_tm1=replay_transitions.observation,\n replay_a_tm1=replay_transitions.action,\n replay_o_t=replay_transitions.next_observation,\n demo_o_tm1=demo_transitions.observation,\n demo_a_tm1=demo_transitions.action,\n demo_o_t=demo_transitions.next_observation,\n key=key_loss)\n (policy_loss_value, nu_loss_value), vjpfun = jax.vjp(\n compute_losses_with_input,\n state.policy_params, state.nu_params)\n policy_gradients, _ = vjpfun((1.0, 0.0))\n _, nu_gradients = vjpfun((0.0, 1.0))\n\n # Update optimizers.\n policy_update, policy_optimizer_state = policy_optimizer.update(\n policy_gradients, state.policy_optimizer_state)\n policy_params = optax.apply_updates(state.policy_params, policy_update)\n\n nu_update, nu_optimizer_state = nu_optimizer.update(\n nu_gradients, state.nu_optimizer_state)\n nu_params = optax.apply_updates(state.nu_params, nu_update)\n\n new_state = TrainingState(\n policy_optimizer_state=policy_optimizer_state,\n policy_params=policy_params,\n nu_optimizer_state=nu_optimizer_state,\n nu_params=nu_params,\n key=key,\n steps=state.steps + 1,\n )\n\n metrics = {\n 'policy_loss': policy_loss_value,\n 'nu_loss': nu_loss_value,\n }\n\n return new_state, metrics\n\n # General learner book-keeping and loggers.\n self._counter = counter or counting.Counter()\n self._logger = logger or loggers.make_default_logger(\n 'learner',\n asynchronous=True,\n serialize_fn=utils.fetch_devicearray,\n steps_key=self._counter.get_steps_key())\n\n # Iterator on demonstration transitions.\n self._iterator_demonstrations = iterator_demonstrations\n self._iterator_replay = iterator_replay\n\n self._sgd_step = jax.jit(utils.process_multiple_batches(\n sgd_step, num_sgd_steps_per_step))\n\n # Create initial state.\n self._state = TrainingState(\n policy_optimizer_state=policy_optimizer_state,\n policy_params=policy_init_params,\n nu_optimizer_state=nu_optimizer_state,\n nu_params=nu_init_params,\n key=rng,\n steps=0,\n )\n\n # Do not record timestamps until after the first learning step is done.\n # This is to avoid including the time it takes for actors to come online and\n # fill the replay buffer.\n self._timestamp = None\n\n def step(self):\n # Get data from replay (dropping extras if any). Note there is no\n # extra data here because we do not insert any into Reverb.\n # TODO(raveman): Add a support for offline training, where we do not consume\n # data from the replay buffer.\n sample = next(self._iterator_replay)\n replay_transitions = types.Transition(*sample.data)\n\n # Get a batch of Transitions from the demonstration.\n demonstration_transitions = next(self._iterator_demonstrations)\n\n self._state, metrics = self._sgd_step(\n self._state, (replay_transitions, demonstration_transitions))\n\n # Compute elapsed time.\n timestamp = time.time()\n elapsed_time = timestamp - self._timestamp if self._timestamp else 0\n self._timestamp = timestamp\n\n # Increment counts and record the current time\n counts = self._counter.increment(steps=1, walltime=elapsed_time)\n\n # Attempts to write the logs.\n self._logger.write({**metrics, **counts})\n\n def get_variables(self, names: List[str]) -> List[Any]:\n variables = {\n 'policy': self._state.policy_params,\n 'nu': self._state.nu_params,\n }\n return [variables[name] for name in names]\n\n def save(self) -> TrainingState:\n return self._state\n\n def restore(self, state: TrainingState):\n self._state = state\n" }, { "alpha_fraction": 0.6750937700271606, "alphanum_fraction": 0.6895402073860168, "avg_line_length": 39.90340805053711, "blob_id": "35abcbdd34f3482f644480350b42c52e49e12662", "content_id": "3e1d70ab313821f203432d4c0a387a7c69b17434", "detected_licenses": [ "Apache-2.0" ], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 7199, "license_type": "permissive", "max_line_length": 101, "num_lines": 176, "path": "/acme/agents/jax/mpo/config.py", "repo_name": "dada325/acme", "src_encoding": "UTF-8", "text": "# Copyright 2018 DeepMind Technologies Limited. All rights reserved.\n#\n# Licensed under the Apache License, Version 2.0 (the \"License\");\n# you may not use this file except in compliance with the License.\n# You may obtain a copy of the License at\n#\n# http://www.apache.org/licenses/LICENSE-2.0\n#\n# Unless required by applicable law or agreed to in writing, software\n# distributed under the License is distributed on an \"AS IS\" BASIS,\n# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n# See the License for the specific language governing permissions and\n# limitations under the License.\n\n\"\"\"Defines the available MPO configuration options.\"\"\"\n\nimport dataclasses\nfrom typing import Callable, Optional, Union\n\nfrom acme import types\nfrom acme.agents.jax.mpo import types as mpo_types\nimport numpy as np\nimport rlax\n\n\[email protected]\nclass MPOConfig:\n \"\"\"MPO agent configuration.\"\"\"\n\n batch_size: int = 256 # Total batch size across all learner devices.\n discount: float = 0.99\n discrete_policy: bool = False\n\n # Specification of the type of experience the learner will consume.\n experience_type: mpo_types.ExperienceType = mpo_types.FromTransitions(\n n_step=5)\n num_stacked_observations: int = 1\n # Optional data-augmentation transformation for observations.\n observation_transform: Optional[Callable[[types.NestedTensor],\n types.NestedTensor]] = None\n\n # Specification of replay, e.g., min/max size, pure or mixed.\n # NOTE: When replay_fraction = 1.0, this reverts to pure replay and the online\n # queue is not created.\n replay_fraction: float = 1.0 # Fraction of replay data (vs online) per batch.\n samples_per_insert: Optional[float] = 32.0\n min_replay_size: int = 1_000\n max_replay_size: int = 1_000_000\n online_queue_capacity: int = 0 # If not set, will use 4 * online_batch_size.\n\n # Critic training configuration.\n critic_type: mpo_types.CriticType = mpo_types.CriticType.MIXTURE_OF_GAUSSIANS\n value_tx_pair: rlax.TxPair = rlax.IDENTITY_PAIR\n use_retrace: bool = False\n retrace_lambda: float = 0.95\n reward_clip: float = np.float32('inf') # pytype: disable=annotation-type-mismatch # numpy-scalars\n use_online_policy_to_bootstrap: bool = False\n use_stale_state: bool = False\n\n # Policy training configuration.\n num_samples: int = 20 # Number of MPO action samples.\n policy_loss_config: Optional[mpo_types.PolicyLossConfig] = None\n policy_eval_stochastic: bool = True\n policy_eval_num_val_samples: int = 128\n\n # Optimizer configuration.\n learning_rate: Union[float, Callable[[int], float]] = 1e-4\n dual_learning_rate: Union[float, Callable[[int], float]] = 1e-2\n grad_norm_clip: float = 40.\n adam_b1: float = 0.9\n adam_b2: float = 0.999\n weight_decay: float = 0.0\n use_cosine_lr_decay: bool = False\n cosine_lr_decay_warmup_steps: int = 3000\n\n # Set the target update period or rate depending on whether you want a\n # periodic or incremental (exponential weighted average) target update.\n # Exactly one must be specified (not None).\n target_update_period: Optional[int] = 100\n target_update_rate: Optional[float] = None\n variable_update_period: int = 1000\n\n # Configuring the mixture of policy and critic losses.\n policy_loss_scale: float = 1.0\n critic_loss_scale: float = 1.0\n\n # Optional roll-out loss configuration (off by default).\n model_rollout_length: int = 0\n rollout_policy_loss_scale: float = 1.0\n rollout_bc_policy_loss_scale: float = 1.0\n rollout_critic_loss_scale: float = 1.0\n rollout_reward_loss_scale: float = 1.0\n\n jit_learner: bool = True\n\n def __post_init__(self):\n if ((self.target_update_period and self.target_update_rate) or\n (self.target_update_period is None and\n self.target_update_rate is None)):\n raise ValueError(\n 'Exactly one of target_update_{period|rate} must be set.'\n f' Received target_update_period={self.target_update_period} and'\n f' target_update_rate={self.target_update_rate}.')\n\n online_batch_size = int(self.batch_size * (1. - self.replay_fraction))\n if not self.online_queue_capacity:\n # Note: larger capacities mean the online data is more \"stale\". This seems\n # a reasonable default for now.\n self.online_queue_capacity = int(4 * online_batch_size)\n self.online_queue_capacity = max(self.online_queue_capacity,\n online_batch_size + 1)\n\n if self.samples_per_insert is not None and self.replay_fraction < 1:\n raise ValueError(\n 'Cannot set samples_per_insert when using a mixed replay (i.e when '\n '0 < replay_fraction < 1). Received:\\n'\n f'\\tsamples_per_insert={self.samples_per_insert} and\\n'\n f'\\treplay_fraction={self.replay_fraction}.')\n\n if (0 < self.replay_fraction < 1 and\n self.min_replay_size > self.online_queue_capacity):\n raise ValueError('When mixing replay with an online queue, min replay '\n 'size must not be larger than the queue capacity.')\n\n if (isinstance(self.experience_type, mpo_types.FromTransitions) and\n self.num_stacked_observations > 1):\n raise ValueError(\n 'Agent-side frame-stacking is currently only supported when learning '\n 'from sequences. Consider environment-side frame-stacking instead.')\n\n if self.critic_type == mpo_types.CriticType.CATEGORICAL:\n if self.model_rollout_length > 0:\n raise ValueError(\n 'Model rollouts are not supported for the Categorical critic')\n if not isinstance(self.experience_type, mpo_types.FromTransitions):\n raise ValueError(\n 'Categorical critic only supports experience_type=FromTransitions')\n if self.use_retrace:\n raise ValueError('retrace is not supported for the Categorical critic')\n\n if self.model_rollout_length > 0 and not self.discrete_policy:\n if (self.rollout_policy_loss_scale or self.rollout_bc_policy_loss_scale):\n raise ValueError('Policy rollout losses are only supported in the '\n 'discrete policy case.')\n\n\ndef _compute_spi_from_replay_fraction(replay_fraction: float) -> float:\n \"\"\"Computes an estimated samples_per_insert from a replay_fraction.\n\n Assumes actors simultaneously add to both the queue and replay in a mixed\n replay setup. Since the online queue sets samples_per_insert = 1, then the\n total SPI can be calculated as:\n\n SPI = B / O = O / (1 - f) / O = 1 / (1 - f).\n\n Key:\n B: total batch size\n O: online batch size\n f: replay fraction.\n\n Args:\n replay_fraction: fraction of a batch size taken from replay (as opposed to\n the queue of online experience) in a mixed replay setting.\n\n Returns:\n An estimate of the samples_per_insert value to produce comparable runs in\n the pure replay setting.\n \"\"\"\n return 1 / (1 - replay_fraction)\n\n\ndef _compute_num_inserts_per_actor_step(samples_per_insert: float,\n batch_size: int,\n sequence_period: int = 1) -> float:\n \"\"\"Estimate the number inserts per actor steps.\"\"\"\n return sequence_period * batch_size / samples_per_insert\n" }, { "alpha_fraction": 0.7079513669013977, "alphanum_fraction": 0.7174929976463318, "avg_line_length": 36.11805725097656, "blob_id": "813c76b1f9be46933618f070110d7ad20cd17cc3", "content_id": "6684278eb806d157de6f71ab335892db8fc25725", "detected_licenses": [ "Apache-2.0" ], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 5345, "license_type": "permissive", "max_line_length": 97, "num_lines": 144, "path": "/examples/offline/run_offline_td3_jax.py", "repo_name": "dada325/acme", "src_encoding": "UTF-8", "text": "# Copyright 2018 DeepMind Technologies Limited. All rights reserved.\n#\n# Licensed under the Apache License, Version 2.0 (the \"License\");\n# you may not use this file except in compliance with the License.\n# You may obtain a copy of the License at\n#\n# http://www.apache.org/licenses/LICENSE-2.0\n#\n# Unless required by applicable law or agreed to in writing, software\n# distributed under the License is distributed on an \"AS IS\" BASIS,\n# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n# See the License for the specific language governing permissions and\n# limitations under the License.\n\n\"\"\"An example offline TD3 running on locomotion datasets (mujoco) from D4rl.\"\"\"\n\nfrom absl import app\nfrom absl import flags\nimport acme\nfrom acme import specs\nfrom acme.agents.jax import actor_core as actor_core_lib\nfrom acme.agents.jax import actors\nfrom acme.agents.jax import td3\nfrom acme.datasets import tfds\nfrom acme.examples.offline import helpers as gym_helpers\nfrom acme.jax import variable_utils\nfrom acme.types import Transition\nfrom acme.utils import loggers\nimport haiku as hk\nimport jax\nimport optax\nimport reverb\nimport rlds\nimport tensorflow as tf\nimport tree\n\n# Agent flags\nflags.DEFINE_integer('batch_size', 64, 'Batch size.')\nflags.DEFINE_integer('evaluate_every', 20, 'Evaluation period.')\nflags.DEFINE_integer('evaluation_episodes', 10, 'Evaluation episodes.')\nflags.DEFINE_integer(\n 'num_demonstrations', 10,\n 'Number of demonstration episodes to load from the dataset. If None, loads the full dataset.'\n)\nflags.DEFINE_integer('seed', 0, 'Random seed for learner and evaluator.')\n# TD3 specific flags.\nflags.DEFINE_float('discount', 0.99, 'Discount.')\nflags.DEFINE_float('policy_learning_rate', 3e-4, 'Policy learning rate.')\nflags.DEFINE_float('critic_learning_rate', 3e-4, 'Critic learning rate.')\nflags.DEFINE_float('bc_alpha', 2.5,\n 'Add a bc regularization term to the policy loss.'\n 'If set to None, TD3 is run without bc regularisation.')\nflags.DEFINE_bool(\n 'use_sarsa_target', True,\n 'Compute on-policy target using iterator actions rather than sampled '\n 'actions.'\n)\n# Environment flags.\nflags.DEFINE_string('env_name', 'HalfCheetah-v2',\n 'Gym mujoco environment name.')\nflags.DEFINE_string(\n 'dataset_name', 'd4rl_mujoco_halfcheetah/v2-medium',\n 'D4rl dataset name. Can be any locomotion dataset from '\n 'https://www.tensorflow.org/datasets/catalog/overview#d4rl.')\n\nFLAGS = flags.FLAGS\n\n\ndef _add_next_action_extras(double_transitions: Transition\n ) -> reverb.ReplaySample:\n # As TD3 is online by default, it expects an iterator over replay samples.\n info = tree.map_structure(lambda dtype: tf.ones([], dtype),\n reverb.SampleInfo.tf_dtypes())\n return reverb.ReplaySample(\n info=info,\n data=Transition(\n observation=double_transitions.observation[0],\n action=double_transitions.action[0],\n reward=double_transitions.reward[0],\n discount=double_transitions.discount[0],\n next_observation=double_transitions.next_observation[0],\n extras={'next_action': double_transitions.action[1]}))\n\n\ndef main(_):\n key = jax.random.PRNGKey(FLAGS.seed)\n key_demonstrations, key_learner = jax.random.split(key, 2)\n\n # Create an environment and grab the spec.\n environment = gym_helpers.make_environment(task=FLAGS.env_name)\n environment_spec = specs.make_environment_spec(environment)\n\n # Get a demonstrations dataset with next_actions extra.\n transitions = tfds.get_tfds_dataset(\n FLAGS.dataset_name, FLAGS.num_demonstrations)\n double_transitions = rlds.transformations.batch(\n transitions, size=2, shift=1, drop_remainder=True)\n transitions = double_transitions.map(_add_next_action_extras)\n demonstrations = tfds.JaxInMemoryRandomSampleIterator(\n transitions, key=key_demonstrations, batch_size=FLAGS.batch_size)\n\n # Create the networks to optimize.\n networks = td3.make_networks(environment_spec)\n\n # Create the learner.\n learner = td3.TD3Learner(\n networks=networks,\n random_key=key_learner,\n discount=FLAGS.discount,\n iterator=demonstrations,\n policy_optimizer=optax.adam(FLAGS.policy_learning_rate),\n critic_optimizer=optax.adam(FLAGS.critic_learning_rate),\n twin_critic_optimizer=optax.adam(FLAGS.critic_learning_rate),\n use_sarsa_target=FLAGS.use_sarsa_target,\n bc_alpha=FLAGS.bc_alpha,\n num_sgd_steps_per_step=1)\n\n def evaluator_network(\n params: hk.Params, key: jax.Array, observation: jax.Array\n ) -> jax.Array:\n del key\n return networks.policy_network.apply(params, observation)\n\n actor_core = actor_core_lib.batched_feed_forward_to_actor_core(\n evaluator_network)\n variable_client = variable_utils.VariableClient(\n learner, 'policy', device='cpu')\n evaluator = actors.GenericActor(\n actor_core, key, variable_client, backend='cpu')\n\n eval_loop = acme.EnvironmentLoop(\n environment=environment,\n actor=evaluator,\n logger=loggers.TerminalLogger('evaluation', time_delta=0.))\n\n # Run the environment loop.\n while True:\n for _ in range(FLAGS.evaluate_every):\n learner.step()\n eval_loop.run(FLAGS.evaluation_episodes)\n\n\nif __name__ == '__main__':\n app.run(main)\n" }, { "alpha_fraction": 0.6596663594245911, "alphanum_fraction": 0.6643092632293701, "avg_line_length": 36.49595260620117, "blob_id": "4e882ec7f5d2e0188c715984b4606cd27c2479dd", "content_id": "c16945927f66095473512d485b472e5f36b0a7f3", "detected_licenses": [ "Apache-2.0" ], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 18523, "license_type": "permissive", "max_line_length": 88, "num_lines": 494, "path": "/acme/jax/networks/distributional.py", "repo_name": "dada325/acme", "src_encoding": "UTF-8", "text": "# Copyright 2018 DeepMind Technologies Limited. All rights reserved.\n#\n# Licensed under the Apache License, Version 2.0 (the \"License\");\n# you may not use this file except in compliance with the License.\n# You may obtain a copy of the License at\n#\n# http://www.apache.org/licenses/LICENSE-2.0\n#\n# Unless required by applicable law or agreed to in writing, software\n# distributed under the License is distributed on an \"AS IS\" BASIS,\n# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n# See the License for the specific language governing permissions and\n# limitations under the License.\n\n\"\"\"Haiku modules that output tfd.Distributions.\"\"\"\n\nfrom typing import Any, List, Optional, Sequence, Union, Callable\n\nimport chex\nimport haiku as hk\nimport jax\nimport jax.numpy as jnp\nimport numpy as np\nimport tensorflow_probability as tf_tfp\nimport tensorflow_probability.substrates.jax as tfp\n\nhk_init = hk.initializers\ntfd = tfp.distributions\n_MIN_SCALE = 1e-4\nInitializer = hk.initializers.Initializer\n\n\nclass CategoricalHead(hk.Module):\n \"\"\"Module that produces a categorical distribution with the given number of values.\"\"\"\n\n def __init__(\n self,\n num_values: Union[int, List[int]],\n dtype: Optional[Any] = jnp.int32,\n w_init: Optional[Initializer] = None,\n name: Optional[str] = None,\n ):\n super().__init__(name=name)\n self._dtype = dtype\n self._logit_shape = num_values\n self._linear = hk.Linear(np.prod(num_values), w_init=w_init)\n\n def __call__(self, inputs: jnp.ndarray) -> tfd.Distribution:\n logits = self._linear(inputs)\n if not isinstance(self._logit_shape, int):\n logits = hk.Reshape(self._logit_shape)(logits)\n return tfd.Categorical(logits=logits, dtype=self._dtype)\n\n\nclass GaussianMixture(hk.Module):\n \"\"\"Module that outputs a Gaussian Mixture Distribution.\"\"\"\n\n def __init__(self,\n num_dimensions: int,\n num_components: int,\n multivariate: bool,\n init_scale: Optional[float] = None,\n append_singleton_event_dim: bool = False,\n reinterpreted_batch_ndims: Optional[int] = None,\n transformation_fn: Optional[Callable[[tfd.Distribution],\n tfd.Distribution]] = None,\n name: str = 'GaussianMixture'):\n \"\"\"Initialization.\n\n Args:\n num_dimensions: dimensionality of the output distribution\n num_components: number of mixture components.\n multivariate: whether the resulting distribution is multivariate or not.\n init_scale: the initial scale for the Gaussian mixture components.\n append_singleton_event_dim: (univariate only) Whether to add an extra\n singleton dimension to the event shape.\n reinterpreted_batch_ndims: (univariate only) Number of batch dimensions to\n reinterpret as event dimensions.\n transformation_fn: Distribution transform such as TanhTransformation\n applied to individual components.\n name: name of the module passed to snt.Module parent class.\n \"\"\"\n super().__init__(name=name)\n\n self._num_dimensions = num_dimensions\n self._num_components = num_components\n self._multivariate = multivariate\n self._append_singleton_event_dim = append_singleton_event_dim\n self._reinterpreted_batch_ndims = reinterpreted_batch_ndims\n\n if init_scale is not None:\n self._scale_factor = init_scale / jax.nn.softplus(0.)\n else:\n self._scale_factor = 1.0 # Corresponds to init_scale = softplus(0).\n\n self._transformation_fn = transformation_fn\n\n def __call__(self,\n inputs: jnp.ndarray,\n low_noise_policy: bool = False) -> tfd.Distribution:\n \"\"\"Run the networks through inputs.\n\n Args:\n inputs: hidden activations of the policy network body.\n low_noise_policy: whether to set vanishingly small scales for each\n component. If this flag is set to True, the policy is effectively run\n without Gaussian noise.\n\n Returns:\n Mixture Gaussian distribution.\n \"\"\"\n\n # Define the weight initializer.\n w_init = hk.initializers.VarianceScaling(scale=1e-5)\n\n # Create a layer that outputs the unnormalized log-weights.\n if self._multivariate:\n logits_size = self._num_components\n else:\n logits_size = self._num_dimensions * self._num_components\n logit_layer = hk.Linear(logits_size, w_init=w_init)\n\n # Create two layers that outputs a location and a scale, respectively, for\n # each dimension and each component.\n loc_layer = hk.Linear(\n self._num_dimensions * self._num_components, w_init=w_init)\n scale_layer = hk.Linear(\n self._num_dimensions * self._num_components, w_init=w_init)\n\n # Compute logits, locs, and scales if necessary.\n logits = logit_layer(inputs)\n locs = loc_layer(inputs)\n\n # When a low_noise_policy is requested, set the scales to its minimum value.\n if low_noise_policy:\n scales = jnp.full(locs.shape, _MIN_SCALE)\n else:\n scales = scale_layer(inputs)\n scales = self._scale_factor * jax.nn.softplus(scales) + _MIN_SCALE\n\n if self._multivariate:\n components_class = tfd.MultivariateNormalDiag\n shape = [-1, self._num_components, self._num_dimensions] # [B, C, D]\n # In this case, no need to reshape logits as they are in the correct shape\n # already, namely [batch_size, num_components].\n else:\n components_class = tfd.Normal\n shape = [-1, self._num_dimensions, self._num_components] # [B, D, C]\n if self._append_singleton_event_dim:\n shape.insert(2, 1) # [B, D, 1, C]\n logits = logits.reshape(shape)\n\n # Reshape the mixture's location and scale parameters appropriately.\n locs = locs.reshape(shape)\n scales = scales.reshape(shape)\n\n if self._multivariate:\n components_distribution = components_class(loc=locs, scale_diag=scales)\n else:\n components_distribution = components_class(loc=locs, scale=scales)\n\n # Transformed the component distributions in the mixture.\n if self._transformation_fn:\n components_distribution = self._transformation_fn(components_distribution)\n\n # Create the mixture distribution.\n distribution = tfd.MixtureSameFamily(\n mixture_distribution=tfd.Categorical(logits=logits),\n components_distribution=components_distribution)\n\n if not self._multivariate:\n distribution = tfd.Independent(\n distribution,\n reinterpreted_batch_ndims=self._reinterpreted_batch_ndims)\n\n return distribution\n\n\nclass TanhTransformedDistribution(tfd.TransformedDistribution):\n \"\"\"Distribution followed by tanh.\"\"\"\n\n def __init__(self, distribution, threshold=.999, validate_args=False):\n \"\"\"Initialize the distribution.\n\n Args:\n distribution: The distribution to transform.\n threshold: Clipping value of the action when computing the logprob.\n validate_args: Passed to super class.\n \"\"\"\n super().__init__(\n distribution=distribution,\n bijector=tfp.bijectors.Tanh(),\n validate_args=validate_args)\n # Computes the log of the average probability distribution outside the\n # clipping range, i.e. on the interval [-inf, -atanh(threshold)] for\n # log_prob_left and [atanh(threshold), inf] for log_prob_right.\n self._threshold = threshold\n inverse_threshold = self.bijector.inverse(threshold)\n # average(pdf) = p/epsilon\n # So log(average(pdf)) = log(p) - log(epsilon)\n log_epsilon = jnp.log(1. - threshold)\n # Those 2 values are differentiable w.r.t. model parameters, such that the\n # gradient is defined everywhere.\n self._log_prob_left = self.distribution.log_cdf(\n -inverse_threshold) - log_epsilon\n self._log_prob_right = self.distribution.log_survival_function(\n inverse_threshold) - log_epsilon\n\n def log_prob(self, event):\n # Without this clip there would be NaNs in the inner tf.where and that\n # causes issues for some reasons.\n event = jnp.clip(event, -self._threshold, self._threshold)\n # The inverse image of {threshold} is the interval [atanh(threshold), inf]\n # which has a probability of \"log_prob_right\" under the given distribution.\n return jnp.where(\n event <= -self._threshold, self._log_prob_left,\n jnp.where(event >= self._threshold, self._log_prob_right,\n super().log_prob(event)))\n\n def mode(self):\n return self.bijector.forward(self.distribution.mode())\n\n def entropy(self, seed=None):\n # We return an estimation using a single sample of the log_det_jacobian.\n # We can still do some backpropagation with this estimate.\n return self.distribution.entropy() + self.bijector.forward_log_det_jacobian(\n self.distribution.sample(seed=seed), event_ndims=0)\n\n @classmethod\n def _parameter_properties(cls, dtype: Optional[Any], num_classes=None):\n td_properties = super()._parameter_properties(dtype,\n num_classes=num_classes)\n del td_properties['bijector']\n return td_properties\n\n\nclass NormalTanhDistribution(hk.Module):\n \"\"\"Module that produces a TanhTransformedDistribution distribution.\"\"\"\n\n def __init__(self,\n num_dimensions: int,\n min_scale: float = 1e-3,\n w_init: hk_init.Initializer = hk_init.VarianceScaling(\n 1.0, 'fan_in', 'uniform'),\n b_init: hk_init.Initializer = hk_init.Constant(0.)):\n \"\"\"Initialization.\n\n Args:\n num_dimensions: Number of dimensions of a distribution.\n min_scale: Minimum standard deviation.\n w_init: Initialization for linear layer weights.\n b_init: Initialization for linear layer biases.\n \"\"\"\n super().__init__(name='Normal')\n self._min_scale = min_scale\n self._loc_layer = hk.Linear(num_dimensions, w_init=w_init, b_init=b_init)\n self._scale_layer = hk.Linear(num_dimensions, w_init=w_init, b_init=b_init)\n\n def __call__(self, inputs: jnp.ndarray) -> tfd.Distribution:\n loc = self._loc_layer(inputs)\n scale = self._scale_layer(inputs)\n scale = jax.nn.softplus(scale) + self._min_scale\n distribution = tfd.Normal(loc=loc, scale=scale)\n return tfd.Independent(\n TanhTransformedDistribution(distribution), reinterpreted_batch_ndims=1)\n\n\nclass MultivariateNormalDiagHead(hk.Module):\n \"\"\"Module that produces a tfd.MultivariateNormalDiag distribution.\"\"\"\n\n def __init__(self,\n num_dimensions: int,\n init_scale: float = 0.3,\n min_scale: float = 1e-6,\n w_init: hk_init.Initializer = hk_init.VarianceScaling(1e-4),\n b_init: hk_init.Initializer = hk_init.Constant(0.)):\n \"\"\"Initialization.\n\n Args:\n num_dimensions: Number of dimensions of MVN distribution.\n init_scale: Initial standard deviation.\n min_scale: Minimum standard deviation.\n w_init: Initialization for linear layer weights.\n b_init: Initialization for linear layer biases.\n \"\"\"\n super().__init__(name='MultivariateNormalDiagHead')\n self._min_scale = min_scale\n self._init_scale = init_scale\n self._loc_layer = hk.Linear(num_dimensions, w_init=w_init, b_init=b_init)\n self._scale_layer = hk.Linear(num_dimensions, w_init=w_init, b_init=b_init)\n\n def __call__(self, inputs: jnp.ndarray) -> tfd.Distribution:\n loc = self._loc_layer(inputs)\n scale = jax.nn.softplus(self._scale_layer(inputs))\n scale *= self._init_scale / jax.nn.softplus(0.)\n scale += self._min_scale\n return tfd.MultivariateNormalDiag(loc=loc, scale_diag=scale)\n\n\nclass CategoricalValueHead(hk.Module):\n \"\"\"Network head that produces a categorical distribution and value.\"\"\"\n\n def __init__(\n self,\n num_values: int,\n name: Optional[str] = None,\n ):\n super().__init__(name=name)\n self._logit_layer = hk.Linear(num_values)\n self._value_layer = hk.Linear(1)\n\n def __call__(self, inputs: jnp.ndarray):\n logits = self._logit_layer(inputs)\n value = jnp.squeeze(self._value_layer(inputs), axis=-1)\n return (tfd.Categorical(logits=logits), value)\n\n\nclass DiscreteValued(hk.Module):\n \"\"\"C51-style head.\n\n For each action, it produces the logits for a discrete distribution over\n atoms. Therefore, the returned logits represents several distributions, one\n for each action.\n \"\"\"\n\n def __init__(\n self,\n num_actions: int,\n head_units: int = 512,\n num_atoms: int = 51,\n v_min: float = -1.0,\n v_max: float = 1.0,\n ):\n super().__init__('DiscreteValued')\n self._num_actions = num_actions\n self._num_atoms = num_atoms\n self._atoms = jnp.linspace(v_min, v_max, self._num_atoms)\n self._network = hk.nets.MLP([head_units, num_actions * num_atoms])\n\n def __call__(self, inputs: jnp.ndarray):\n q_logits = self._network(inputs)\n q_logits = jnp.reshape(q_logits, (-1, self._num_actions, self._num_atoms))\n q_dist = jax.nn.softmax(q_logits)\n q_values = jnp.sum(q_dist * self._atoms, axis=2)\n q_values = jax.lax.stop_gradient(q_values)\n return q_values, q_logits, self._atoms\n\n\nclass CategoricalCriticHead(hk.Module):\n \"\"\"Critic head that uses a categorical to represent action values.\"\"\"\n\n def __init__(self,\n num_bins: int = 601,\n vmax: Optional[float] = None,\n vmin: Optional[float] = None,\n w_init: hk_init.Initializer = hk_init.VarianceScaling(1e-5)):\n super().__init__(name='categorical_critic_head')\n vmax = vmax if vmax is not None else 0.5 * (num_bins - 1)\n vmin = vmin if vmin is not None else -1.0 * vmax\n\n self._head = DiscreteValuedTfpHead(\n vmin=vmin,\n vmax=vmax,\n logits_shape=(1,),\n num_atoms=num_bins,\n w_init=w_init)\n\n def __call__(self, embedding: chex.Array) -> tfd.Distribution:\n output = self._head(embedding)\n return output\n\n\nclass DiscreteValuedTfpHead(hk.Module):\n \"\"\"Represents a parameterized discrete valued distribution.\n\n The returned distribution is essentially a `tfd.Categorical` that knows its\n support and thus can compute the mean value.\n \"\"\"\n\n def __init__(self,\n vmin: float,\n vmax: float,\n num_atoms: int,\n logits_shape: Optional[Sequence[int]] = None,\n w_init: Optional[Initializer] = None,\n b_init: Optional[Initializer] = None):\n \"\"\"Initialization.\n\n If vmin and vmax have shape S, this will store the category values as a\n Tensor of shape (S*, num_atoms).\n\n Args:\n vmin: Minimum of the value range\n vmax: Maximum of the value range\n num_atoms: The atom values associated with each bin.\n logits_shape: The shape of the logits, excluding batch and num_atoms\n dimensions.\n w_init: Initialization for linear layer weights.\n b_init: Initialization for linear layer biases.\n \"\"\"\n super().__init__(name='DiscreteValuedHead')\n self._values = np.linspace(vmin, vmax, num=num_atoms, axis=-1)\n if not logits_shape:\n logits_shape = ()\n self._logits_shape = logits_shape + (num_atoms,)\n self._w_init = w_init\n self._b_init = b_init\n\n def __call__(self, inputs: chex.Array) -> tfd.Distribution:\n net = hk.Linear(\n np.prod(self._logits_shape), w_init=self._w_init, b_init=self._b_init)\n logits = net(inputs)\n logits = hk.Reshape(self._logits_shape, preserve_dims=1)(logits)\n return DiscreteValuedTfpDistribution(values=self._values, logits=logits)\n\n\n@tf_tfp.experimental.auto_composite_tensor\nclass DiscreteValuedTfpDistribution(tfd.Categorical):\n \"\"\"This is a generalization of a categorical distribution.\n\n The support for the DiscreteValued distribution can be any real valued range,\n whereas the categorical distribution has support [0, n_categories - 1] or\n [1, n_categories]. This generalization allows us to take the mean of the\n distribution over its support.\n \"\"\"\n\n def __init__(self,\n values: chex.Array,\n logits: Optional[chex.Array] = None,\n probs: Optional[chex.Array] = None,\n name: str = 'DiscreteValuedDistribution'):\n \"\"\"Initialization.\n\n Args:\n values: Values making up support of the distribution. Should have a shape\n compatible with logits.\n logits: An N-D Tensor, N >= 1, representing the log probabilities of a set\n of Categorical distributions. The first N - 1 dimensions index into a\n batch of independent distributions and the last dimension indexes into\n the classes.\n probs: An N-D Tensor, N >= 1, representing the probabilities of a set of\n Categorical distributions. The first N - 1 dimensions index into a batch\n of independent distributions and the last dimension represents a vector\n of probabilities for each class. Only one of logits or probs should be\n passed in.\n name: Name of the distribution object.\n \"\"\"\n parameters = dict(locals())\n self._values = np.asarray(values)\n\n if logits is not None:\n logits = jnp.asarray(logits)\n chex.assert_shape(logits, (..., *self._values.shape))\n\n if probs is not None:\n probs = jnp.asarray(probs)\n chex.assert_shape(probs, (..., *self._values.shape))\n\n super().__init__(logits=logits, probs=probs, name=name)\n\n self._parameters = parameters\n\n @property\n def values(self):\n return self._values\n\n @classmethod\n def _parameter_properties(cls, dtype, num_classes=None):\n return dict(\n values=tfp.util.ParameterProperties(\n event_ndims=None,\n shape_fn=lambda shape: (num_classes,),\n specifies_shape=True),\n logits=tfp.util.ParameterProperties(event_ndims=1),\n probs=tfp.util.ParameterProperties(event_ndims=1, is_preferred=False))\n\n def _sample_n(self, key: chex.PRNGKey, n: int) -> chex.Array:\n indices = super()._sample_n(key=key, n=n)\n return jnp.take_along_axis(self._values, indices, axis=-1)\n\n def mean(self) -> chex.Array:\n \"\"\"Overrides the Categorical mean by incorporating category values.\"\"\"\n return jnp.sum(self.probs_parameter() * self._values, axis=-1)\n\n def variance(self) -> chex.Array:\n \"\"\"Overrides the Categorical variance by incorporating category values.\"\"\"\n dist_squared = jnp.square(jnp.expand_dims(self.mean(), -1) - self._values)\n return jnp.sum(self.probs_parameter() * dist_squared, axis=-1)\n\n def _event_shape(self):\n return jnp.zeros((), dtype=jnp.int32)\n\n def _event_shape_tensor(self):\n return []\n" }, { "alpha_fraction": 0.6329599022865295, "alphanum_fraction": 0.6380350589752197, "avg_line_length": 39.320465087890625, "blob_id": "8112e5a8fef91a82c791b2f035d03d9e5cf036ec", "content_id": "101f249b9be79523ee8d87514f5439579d6d717a", "detected_licenses": [ "Apache-2.0" ], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 20886, "license_type": "permissive", "max_line_length": 89, "num_lines": 518, "path": "/acme/agents/jax/ppo/learning.py", "repo_name": "dada325/acme", "src_encoding": "UTF-8", "text": "# Copyright 2018 DeepMind Technologies Limited. All rights reserved.\n#\n# Licensed under the Apache License, Version 2.0 (the \"License\");\n# you may not use this file except in compliance with the License.\n# You may obtain a copy of the License at\n#\n# http://www.apache.org/licenses/LICENSE-2.0\n#\n# Unless required by applicable law or agreed to in writing, software\n# distributed under the License is distributed on an \"AS IS\" BASIS,\n# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n# See the License for the specific language governing permissions and\n# limitations under the License.\n\n\"\"\"Learner for the PPO agent.\"\"\"\n\nfrom typing import Dict, Iterator, List, NamedTuple, Optional, Tuple\n\nimport acme\nfrom acme import types\nfrom acme.agents.jax.ppo import networks\nfrom acme.agents.jax.ppo import normalization\nfrom acme.jax import networks as networks_lib\nfrom acme.jax.utils import get_from_first_device\nfrom acme.utils import counting\nfrom acme.utils import loggers\nimport jax\nimport jax.numpy as jnp\nimport optax\nimport reverb\nimport rlax\n\n\nPPOParams = networks.PPOParams\n\n\nclass Batch(NamedTuple):\n \"\"\"A batch of data; all shapes are expected to be [B, ...].\"\"\"\n observations: types.NestedArray\n actions: jnp.ndarray\n advantages: jnp.ndarray\n\n # Target value estimate used to bootstrap the value function.\n target_values: jnp.ndarray\n\n # Value estimate and action log-prob at behavior time.\n behavior_values: jnp.ndarray\n behavior_log_probs: jnp.ndarray\n\n\nclass TrainingState(NamedTuple):\n \"\"\"Training state for the PPO learner.\"\"\"\n params: PPOParams\n opt_state: optax.OptState\n random_key: networks_lib.PRNGKey\n\n # Optional counter used for exponential moving average zero debiasing\n # Using float32 as it covers a larger range than int32. If using int64 we\n # would need to do jax_enable_x64.\n ema_counter: Optional[jnp.float32] = None\n\n # Optional parameter for maintaining a running estimate of the scale of\n # advantage estimates\n biased_advantage_scale: Optional[networks_lib.Params] = None\n advantage_scale: Optional[networks_lib.Params] = None\n\n # Optional parameter for maintaining a running estimate of the mean and\n # standard deviation of value estimates\n biased_value_first_moment: Optional[networks_lib.Params] = None\n biased_value_second_moment: Optional[networks_lib.Params] = None\n value_mean: Optional[networks_lib.Params] = None\n value_std: Optional[networks_lib.Params] = None\n\n # Optional parameters for observation normalization\n obs_normalization_params: Optional[normalization.NormalizationParams] = None\n\n\nclass PPOLearner(acme.Learner):\n \"\"\"Learner for PPO.\"\"\"\n\n def __init__(\n self,\n ppo_networks: networks.PPONetworks,\n iterator: Iterator[reverb.ReplaySample],\n optimizer: optax.GradientTransformation,\n random_key: networks_lib.PRNGKey,\n ppo_clipping_epsilon: float = 0.2,\n normalize_advantage: bool = True,\n normalize_value: bool = False,\n normalization_ema_tau: float = 0.995,\n clip_value: bool = False,\n value_clipping_epsilon: float = 0.2,\n max_abs_reward: Optional[float] = None,\n gae_lambda: float = 0.95,\n discount: float = 0.99,\n entropy_cost: float = 0.,\n value_cost: float = 1.,\n num_epochs: int = 4,\n num_minibatches: int = 1,\n counter: Optional[counting.Counter] = None,\n logger: Optional[loggers.Logger] = None,\n log_global_norm_metrics: bool = False,\n metrics_logging_period: int = 100,\n pmap_axis_name: str = 'devices',\n obs_normalization_fns: Optional[normalization.NormalizationFns] = None,\n ):\n self.local_learner_devices = jax.local_devices()\n self.num_local_learner_devices = jax.local_device_count()\n self.learner_devices = jax.devices()\n self.num_epochs = num_epochs\n self.num_minibatches = num_minibatches\n self.metrics_logging_period = metrics_logging_period\n self._num_full_update_steps = 0\n self._iterator = iterator\n\n normalize_obs = obs_normalization_fns is not None\n if normalize_obs:\n assert obs_normalization_fns is not None\n\n # Set up logging/counting.\n self._counter = counter or counting.Counter()\n self._logger = logger or loggers.make_default_logger('learner')\n\n def ppo_loss(\n params: networks_lib.Params,\n observations: networks_lib.Observation,\n actions: networks_lib.Action,\n advantages: jnp.ndarray,\n target_values: networks_lib.Value,\n behavior_values: networks_lib.Value,\n behavior_log_probs: networks_lib.LogProb,\n value_mean: jnp.ndarray,\n value_std: jnp.ndarray,\n key: networks_lib.PRNGKey,\n ) -> Tuple[jnp.ndarray, Dict[str, jnp.ndarray]]:\n \"\"\"PPO loss for the policy and the critic.\"\"\"\n distribution_params, values = ppo_networks.network.apply(\n params, observations)\n if normalize_value:\n # values = values * jnp.fmax(value_std, 1e-6) + value_mean\n target_values = (target_values - value_mean) / jnp.fmax(value_std, 1e-6)\n policy_log_probs = ppo_networks.log_prob(distribution_params, actions)\n key, sub_key = jax.random.split(key)\n policy_entropies = ppo_networks.entropy(distribution_params, sub_key)\n\n # Compute the policy losses\n rhos = jnp.exp(policy_log_probs - behavior_log_probs)\n clipped_ppo_policy_loss = rlax.clipped_surrogate_pg_loss(\n rhos, advantages, ppo_clipping_epsilon)\n policy_entropy_loss = -jnp.mean(policy_entropies)\n total_policy_loss = (\n clipped_ppo_policy_loss + entropy_cost * policy_entropy_loss)\n\n # Compute the critic losses\n unclipped_value_loss = (values - target_values)**2\n\n if clip_value:\n # Clip values to reduce variablility during critic training.\n clipped_values = behavior_values + jnp.clip(values - behavior_values,\n -value_clipping_epsilon,\n value_clipping_epsilon)\n clipped_value_error = target_values - clipped_values\n clipped_value_loss = clipped_value_error ** 2\n value_loss = jnp.mean(jnp.fmax(unclipped_value_loss,\n clipped_value_loss))\n else:\n # For Mujoco envs clipping hurts a lot. Evidenced by Figure 43 in\n # https://arxiv.org/pdf/2006.05990.pdf\n value_loss = jnp.mean(unclipped_value_loss)\n\n total_ppo_loss = total_policy_loss + value_cost * value_loss\n return total_ppo_loss, { # pytype: disable=bad-return-type # numpy-scalars\n 'loss_total': total_ppo_loss,\n 'loss_policy_total': total_policy_loss,\n 'loss_policy_pg': clipped_ppo_policy_loss,\n 'loss_policy_entropy': policy_entropy_loss,\n 'loss_critic': value_loss,\n }\n\n ppo_loss_grad = jax.grad(ppo_loss, has_aux=True)\n\n def sgd_step(state: TrainingState, minibatch: Batch):\n observations = minibatch.observations\n actions = minibatch.actions\n advantages = minibatch.advantages\n target_values = minibatch.target_values\n behavior_values = minibatch.behavior_values\n behavior_log_probs = minibatch.behavior_log_probs\n key, sub_key = jax.random.split(state.random_key)\n\n loss_grad, metrics = ppo_loss_grad(\n state.params.model_params,\n observations,\n actions,\n advantages,\n target_values,\n behavior_values,\n behavior_log_probs,\n state.value_mean,\n state.value_std,\n sub_key,\n )\n\n # Apply updates\n loss_grad = jax.lax.pmean(loss_grad, axis_name=pmap_axis_name)\n updates, opt_state = optimizer.update(loss_grad, state.opt_state)\n model_params = optax.apply_updates(state.params.model_params, updates)\n params = PPOParams(\n model_params=model_params,\n num_sgd_steps=state.params.num_sgd_steps + 1)\n\n if log_global_norm_metrics:\n metrics['norm_grad'] = optax.global_norm(loss_grad)\n metrics['norm_updates'] = optax.global_norm(updates)\n\n state = state._replace(params=params, opt_state=opt_state, random_key=key)\n\n return state, metrics\n\n def epoch_update(\n carry: Tuple[TrainingState, Batch],\n unused_t: Tuple[()],\n ):\n state, carry_batch = carry\n\n # Shuffling into minibatches\n batch_size = carry_batch.advantages.shape[0]\n key, sub_key = jax.random.split(state.random_key)\n # TODO(kamyar) For effiency could use same permutation for all epochs\n permuted_batch = jax.tree_util.tree_map(\n lambda x: jax.random.permutation( # pylint: disable=g-long-lambda\n sub_key,\n x,\n axis=0,\n independent=False),\n carry_batch)\n state = state._replace(random_key=key)\n minibatches = jax.tree_util.tree_map(\n lambda x: jnp.reshape( # pylint: disable=g-long-lambda\n x,\n [ # pylint: disable=g-long-lambda\n num_minibatches, batch_size // num_minibatches\n ] + list(x.shape[1:])),\n permuted_batch)\n\n # Scan over the minibatches\n state, metrics = jax.lax.scan(\n sgd_step, state, minibatches, length=num_minibatches)\n metrics = jax.tree_util.tree_map(jnp.mean, metrics)\n\n return (state, carry_batch), metrics\n\n vmapped_network_apply = jax.vmap(\n ppo_networks.network.apply, in_axes=(None, 0), out_axes=0)\n\n def single_device_update(\n state: TrainingState,\n trajectories: types.NestedArray,\n ):\n params_num_sgd_steps_before_update = state.params.num_sgd_steps\n\n # Update the EMA counter and obtain the zero debiasing multiplier\n if normalize_advantage or normalize_value:\n ema_counter = state.ema_counter + 1\n state = state._replace(ema_counter=ema_counter)\n zero_debias = 1. / (1. - jnp.power(normalization_ema_tau, ema_counter))\n\n # Extract the data.\n data = trajectories.data\n observations, actions, rewards, termination, extra = (data.observation,\n data.action,\n data.reward,\n data.discount,\n data.extras)\n\n if normalize_obs:\n obs_norm_params = obs_normalization_fns.update(\n state.obs_normalization_params, observations, pmap_axis_name)\n state = state._replace(obs_normalization_params=obs_norm_params)\n observations = obs_normalization_fns.normalize(\n observations, state.obs_normalization_params)\n\n if max_abs_reward is not None:\n # Apply reward clipping.\n rewards = jnp.clip(rewards, -1. * max_abs_reward, max_abs_reward)\n discounts = termination * discount\n behavior_log_probs = extra['log_prob']\n _, behavior_values = vmapped_network_apply(state.params.model_params,\n observations)\n\n if normalize_value:\n batch_value_first_moment = jnp.mean(behavior_values)\n batch_value_second_moment = jnp.mean(behavior_values**2)\n batch_value_first_moment, batch_value_second_moment = jax.lax.pmean(\n (batch_value_first_moment, batch_value_second_moment),\n axis_name=pmap_axis_name)\n\n biased_value_first_moment = (\n normalization_ema_tau * state.biased_value_first_moment +\n (1. - normalization_ema_tau) * batch_value_first_moment)\n biased_value_second_moment = (\n normalization_ema_tau * state.biased_value_second_moment +\n (1. - normalization_ema_tau) * batch_value_second_moment)\n\n value_mean = biased_value_first_moment * zero_debias\n value_second_moment = biased_value_second_moment * zero_debias\n value_std = jnp.sqrt(jax.nn.relu(value_second_moment - value_mean**2))\n\n state = state._replace(\n biased_value_first_moment=biased_value_first_moment,\n biased_value_second_moment=biased_value_second_moment,\n value_mean=value_mean,\n value_std=value_std,\n )\n\n behavior_values = behavior_values * jnp.fmax(state.value_std,\n 1e-6) + state.value_mean\n\n behavior_values = jax.lax.stop_gradient(behavior_values)\n\n # Compute GAE using rlax\n vmapped_rlax_truncated_generalized_advantage_estimation = jax.vmap(\n rlax.truncated_generalized_advantage_estimation,\n in_axes=(0, 0, None, 0))\n advantages = vmapped_rlax_truncated_generalized_advantage_estimation(\n rewards[:, :-1], discounts[:, :-1], gae_lambda, behavior_values)\n advantages = jax.lax.stop_gradient(advantages)\n target_values = behavior_values[:, :-1] + advantages\n target_values = jax.lax.stop_gradient(target_values)\n\n # Exclude the last step - it was only used for bootstrapping.\n # The shape is [num_sequences, num_steps, ..]\n (observations, actions, behavior_log_probs, behavior_values) = (\n jax.tree_util.tree_map(\n lambda x: x[:, :-1],\n (observations, actions, behavior_log_probs, behavior_values),\n )\n )\n\n # Shuffle the data and break into minibatches\n batch_size = advantages.shape[0] * advantages.shape[1]\n batch = Batch(\n observations=observations,\n actions=actions,\n advantages=advantages,\n target_values=target_values,\n behavior_values=behavior_values,\n behavior_log_probs=behavior_log_probs)\n batch = jax.tree_util.tree_map(\n lambda x: jnp.reshape(x, [batch_size] + list(x.shape[2:])), batch)\n\n if normalize_advantage:\n batch_advantage_scale = jnp.mean(jnp.abs(batch.advantages))\n batch_advantage_scale = jax.lax.pmean(batch_advantage_scale,\n pmap_axis_name)\n\n # update the running statistics\n biased_advantage_scale = (\n normalization_ema_tau * state.biased_advantage_scale +\n (1. - normalization_ema_tau) * batch_advantage_scale)\n advantage_scale = biased_advantage_scale * zero_debias\n state = state._replace(\n biased_advantage_scale=biased_advantage_scale,\n advantage_scale=advantage_scale)\n\n # scale the advantages\n scaled_advantages = batch.advantages / jnp.fmax(state.advantage_scale,\n 1e-6)\n batch = batch._replace(advantages=scaled_advantages)\n\n # Scan desired number of epoch updates\n (state, _), metrics = jax.lax.scan(\n epoch_update, (state, batch), (), length=num_epochs)\n metrics = jax.tree_util.tree_map(jnp.mean, metrics)\n\n if normalize_advantage:\n metrics['advantage_scale'] = state.advantage_scale\n\n if normalize_value:\n metrics['value_mean'] = value_mean\n metrics['value_std'] = value_std\n\n delta_params_sgd_steps = (\n data.extras['params_num_sgd_steps'][:, 0] -\n params_num_sgd_steps_before_update)\n metrics['delta_params_sgd_steps_min'] = jnp.min(delta_params_sgd_steps)\n metrics['delta_params_sgd_steps_max'] = jnp.max(delta_params_sgd_steps)\n metrics['delta_params_sgd_steps_mean'] = jnp.mean(delta_params_sgd_steps)\n metrics['delta_params_sgd_steps_std'] = jnp.std(delta_params_sgd_steps)\n\n return state, metrics\n\n pmapped_update_step = jax.pmap(\n single_device_update,\n axis_name=pmap_axis_name,\n devices=self.learner_devices)\n\n def full_update_step(\n state: TrainingState,\n trajectories: types.NestedArray,\n ):\n state, metrics = pmapped_update_step(state, trajectories)\n return state, metrics\n\n self._full_update_step = full_update_step\n\n def make_initial_state(key: networks_lib.PRNGKey) -> TrainingState:\n \"\"\"Initialises the training state (parameters and optimiser state).\"\"\"\n all_keys = jax.random.split(key, num=self.num_local_learner_devices + 1)\n key_init, key_state = all_keys[0], all_keys[1:]\n key_state = [key_state[i] for i in range(self.num_local_learner_devices)]\n key_state = jax.device_put_sharded(key_state, self.local_learner_devices)\n\n initial_params = ppo_networks.network.init(key_init)\n initial_opt_state = optimizer.init(initial_params)\n # Using float32 as it covers a larger range than int32. If using int64 we\n # would need to do jax_enable_x64.\n params_num_sgd_steps = jnp.zeros(shape=(), dtype=jnp.float32)\n\n initial_params = jax.device_put_replicated(initial_params,\n self.local_learner_devices)\n initial_opt_state = jax.device_put_replicated(initial_opt_state,\n self.local_learner_devices)\n params_num_sgd_steps = jax.device_put_replicated(\n params_num_sgd_steps, self.local_learner_devices)\n\n ema_counter = jnp.float32(0)\n ema_counter = jax.device_put_replicated(ema_counter,\n self.local_learner_devices)\n\n init_state = TrainingState(\n params=PPOParams(\n model_params=initial_params, num_sgd_steps=params_num_sgd_steps),\n opt_state=initial_opt_state,\n random_key=key_state,\n ema_counter=ema_counter,\n )\n\n if normalize_advantage:\n biased_advantage_scale = jax.device_put_replicated(\n jnp.zeros([]), self.local_learner_devices)\n advantage_scale = jax.device_put_replicated(\n jnp.zeros([]), self.local_learner_devices)\n\n init_state = init_state._replace(\n biased_advantage_scale=biased_advantage_scale,\n advantage_scale=advantage_scale)\n\n if normalize_value:\n biased_value_first_moment = jax.device_put_replicated(\n jnp.zeros([]), self.local_learner_devices)\n value_mean = biased_value_first_moment\n\n biased_value_second_moment = jax.device_put_replicated(\n jnp.zeros([]), self.local_learner_devices)\n value_second_moment = biased_value_second_moment\n value_std = jnp.sqrt(jax.nn.relu(value_second_moment - value_mean**2))\n\n init_state = init_state._replace(\n biased_value_first_moment=biased_value_first_moment,\n biased_value_second_moment=biased_value_second_moment,\n value_mean=value_mean,\n value_std=value_std)\n\n if normalize_obs:\n obs_norm_params = obs_normalization_fns.init() # pytype: disable=attribute-error\n obs_norm_params = jax.device_put_replicated(obs_norm_params,\n self.local_learner_devices)\n init_state = init_state._replace(\n obs_normalization_params=obs_norm_params)\n\n return init_state\n\n # Initialise training state (parameters and optimizer state).\n self._state = make_initial_state(random_key)\n self._cached_state = get_from_first_device(self._state, as_numpy=True)\n\n def step(self):\n \"\"\"Does a learner step and logs the results.\n\n One learner step consists of (possibly multiple) epochs of PPO updates on\n a batch of NxT steps collected by the actors.\n \"\"\"\n sample = next(self._iterator)\n self._state, results = self._full_update_step(self._state, sample)\n self._cached_state = get_from_first_device(self._state, as_numpy=True)\n\n # Update our counts and record it.\n counts = self._counter.increment(steps=self.num_epochs *\n self.num_minibatches)\n\n # Snapshot and attempt to write logs.\n if self._num_full_update_steps % self.metrics_logging_period == 0:\n results = jax.tree_util.tree_map(jnp.mean, results)\n self._logger.write({**results, **counts})\n\n self._num_full_update_steps += 1\n\n def get_variables(self, names: List[str]) -> List[networks_lib.Params]:\n variables = self._cached_state\n return [getattr(variables, name) for name in names]\n\n def save(self) -> TrainingState:\n return self._cached_state\n\n def restore(self, state: TrainingState):\n # TODO(kamyar) Should the random_key come from self._state instead?\n random_key = state.random_key\n random_key = jax.random.split(\n random_key, num=self.num_local_learner_devices)\n random_key = jax.device_put_sharded(\n [random_key[i] for i in range(self.num_local_learner_devices)],\n self.local_learner_devices)\n\n state = jax.device_put_replicated(state, self.local_learner_devices)\n state = state._replace(random_key=random_key)\n self._state = state\n self._cached_state = get_from_first_device(self._state, as_numpy=True)\n" }, { "alpha_fraction": 0.6790501475334167, "alphanum_fraction": 0.6808443069458008, "avg_line_length": 38.31535339355469, "blob_id": "3df491933be22038d7f443e1d88994fc874fbfd3", "content_id": "9dc7c4a5e3de36eecc306613b63d58ed0640a0cf", "detected_licenses": [ "Apache-2.0" ], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 9475, "license_type": "permissive", "max_line_length": 131, "num_lines": 241, "path": "/acme/agents/jax/ppo/builder.py", "repo_name": "dada325/acme", "src_encoding": "UTF-8", "text": "# Copyright 2018 DeepMind Technologies Limited. All rights reserved.\n#\n# Licensed under the Apache License, Version 2.0 (the \"License\");\n# you may not use this file except in compliance with the License.\n# You may obtain a copy of the License at\n#\n# http://www.apache.org/licenses/LICENSE-2.0\n#\n# Unless required by applicable law or agreed to in writing, software\n# distributed under the License is distributed on an \"AS IS\" BASIS,\n# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n# See the License for the specific language governing permissions and\n# limitations under the License.\n\n\"\"\"PPO Builder.\"\"\"\nfrom typing import Iterator, List, Optional\n\nfrom acme import adders\nfrom acme import core\nfrom acme import specs\nfrom acme.adders import reverb as adders_reverb\nfrom acme.agents.jax import actor_core as actor_core_lib\nfrom acme.agents.jax import actors\nfrom acme.agents.jax import builders\nfrom acme.agents.jax.ppo import config as ppo_config\nfrom acme.agents.jax.ppo import learning\nfrom acme.agents.jax.ppo import networks as ppo_networks\nfrom acme.agents.jax.ppo import normalization\nfrom acme.jax import networks as networks_lib\nfrom acme.jax import utils\nfrom acme.jax import variable_utils\nfrom acme.utils import counting\nfrom acme.utils import loggers\nimport jax\nimport numpy as np\nimport optax\nimport reverb\n\n\nclass PPOBuilder(\n builders.ActorLearnerBuilder[ppo_networks.PPONetworks,\n actor_core_lib.FeedForwardPolicyWithExtra,\n reverb.ReplaySample]):\n \"\"\"PPO Builder.\"\"\"\n\n def __init__(\n self,\n config: ppo_config.PPOConfig,\n ):\n \"\"\"Creates PPO builder.\"\"\"\n self._config = config\n\n # An extra step is used for bootstrapping when computing advantages.\n self._sequence_length = config.unroll_length + 1\n\n def make_replay_tables(\n self,\n environment_spec: specs.EnvironmentSpec,\n policy: actor_core_lib.FeedForwardPolicyWithExtra,\n ) -> List[reverb.Table]:\n \"\"\"Creates reverb tables for the algorithm.\"\"\"\n del policy\n # params_num_sgd_steps is used to track how old the actor parameters are\n extra_spec = {\n 'log_prob': np.ones(shape=(), dtype=np.float32),\n 'params_num_sgd_steps': np.ones(shape=(), dtype=np.float32),\n }\n signature = adders_reverb.SequenceAdder.signature(\n environment_spec, extra_spec, sequence_length=self._sequence_length)\n return [\n reverb.Table.queue(\n name=self._config.replay_table_name,\n max_size=self._config.batch_size,\n signature=signature)\n ]\n\n def make_dataset_iterator(\n self, replay_client: reverb.Client) -> Iterator[reverb.ReplaySample]:\n \"\"\"Creates a dataset.\n\n The iterator batch size is computed as follows:\n\n Let:\n B := learner batch size (config.batch_size)\n H := number of hosts (jax.process_count())\n D := number of local devices per host\n\n The Reverb iterator will load batches of size B // (H * D). After wrapping\n the iterator with utils.multi_device_put, this will result in an iterable\n that provides B // H samples per item, with B // (H * D) samples placed on\n each local device. In a multi-host setup, each host has its own learner\n node and builds its own instance of the iterator. This will result\n in a total batch size of H * (B // H) == B being consumed per learner\n step (since the learner is pmapped across all devices). Note that\n jax.device_count() returns the total number of devices across hosts,\n i.e. H * D.\n\n Args:\n replay_client: the reverb replay client\n\n Returns:\n A replay buffer iterator to be used by the local devices.\n \"\"\"\n iterator_batch_size, ragged = divmod(self._config.batch_size,\n jax.device_count())\n if ragged:\n raise ValueError(\n 'Learner batch size must be divisible by total number of devices!')\n\n # We don't use datasets.make_reverb_dataset() here to avoid interleaving\n # and prefetching, that doesn't work well with can_sample() check on update.\n # NOTE: Value for max_in_flight_samples_per_worker comes from a\n # recommendation here: https://git.io/JYzXB\n dataset = reverb.TrajectoryDataset.from_table_signature(\n server_address=replay_client.server_address,\n table=self._config.replay_table_name,\n max_in_flight_samples_per_worker=(\n 2 * self._config.batch_size // jax.process_count()\n ),\n )\n dataset = dataset.batch(iterator_batch_size, drop_remainder=True)\n dataset = dataset.as_numpy_iterator()\n return utils.multi_device_put(iterable=dataset, devices=jax.local_devices())\n\n def make_adder(\n self,\n replay_client: reverb.Client,\n environment_spec: Optional[specs.EnvironmentSpec],\n policy: Optional[actor_core_lib.FeedForwardPolicyWithExtra],\n ) -> Optional[adders.Adder]:\n \"\"\"Creates an adder which handles observations.\"\"\"\n del environment_spec, policy\n # Note that the last transition in the sequence is used for bootstrapping\n # only and is ignored otherwise. So we need to make sure that sequences\n # overlap on one transition, thus \"-1\" in the period length computation.\n return adders_reverb.SequenceAdder(\n client=replay_client,\n priority_fns={self._config.replay_table_name: None},\n period=self._sequence_length - 1,\n sequence_length=self._sequence_length,\n )\n\n def make_learner(\n self,\n random_key: networks_lib.PRNGKey,\n networks: ppo_networks.PPONetworks,\n dataset: Iterator[reverb.ReplaySample],\n logger_fn: loggers.LoggerFactory,\n environment_spec: specs.EnvironmentSpec,\n replay_client: Optional[reverb.Client] = None,\n counter: Optional[counting.Counter] = None,\n ) -> core.Learner:\n del replay_client\n\n if callable(self._config.learning_rate):\n optimizer = optax.chain(\n optax.clip_by_global_norm(self._config.max_gradient_norm),\n optax.scale_by_adam(eps=self._config.adam_epsilon),\n optax.scale_by_schedule(self._config.learning_rate), optax.scale(-1)) # pytype: disable=wrong-arg-types # numpy-scalars\n else:\n optimizer = optax.chain(\n optax.clip_by_global_norm(self._config.max_gradient_norm),\n optax.scale_by_adam(eps=self._config.adam_epsilon),\n optax.scale(-self._config.learning_rate))\n\n obs_normalization_fns = None\n if self._config.obs_normalization_fns_factory is not None:\n obs_normalization_fns = self._config.obs_normalization_fns_factory(\n environment_spec.observations)\n\n return learning.PPOLearner(\n ppo_networks=networks,\n iterator=dataset,\n discount=self._config.discount,\n entropy_cost=self._config.entropy_cost,\n value_cost=self._config.value_cost,\n ppo_clipping_epsilon=self._config.ppo_clipping_epsilon,\n normalize_advantage=self._config.normalize_advantage,\n normalize_value=self._config.normalize_value,\n normalization_ema_tau=self._config.normalization_ema_tau,\n clip_value=self._config.clip_value,\n value_clipping_epsilon=self._config.value_clipping_epsilon,\n max_abs_reward=self._config.max_abs_reward,\n gae_lambda=self._config.gae_lambda,\n counter=counter,\n random_key=random_key,\n optimizer=optimizer,\n num_epochs=self._config.num_epochs,\n num_minibatches=self._config.num_minibatches,\n logger=logger_fn('learner'),\n log_global_norm_metrics=self._config.log_global_norm_metrics,\n metrics_logging_period=self._config.metrics_logging_period,\n pmap_axis_name=self._config.pmap_axis_name,\n obs_normalization_fns=obs_normalization_fns,\n )\n\n def make_actor(\n self,\n random_key: networks_lib.PRNGKey,\n policy: actor_core_lib.FeedForwardPolicyWithExtra,\n environment_spec: specs.EnvironmentSpec,\n variable_source: Optional[core.VariableSource] = None,\n adder: Optional[adders.Adder] = None,\n ) -> core.Actor:\n assert variable_source is not None\n actor_core = actor_core_lib.batched_feed_forward_with_extras_to_actor_core(\n policy)\n if self._config.obs_normalization_fns_factory is not None:\n variable_client = variable_utils.VariableClient(\n variable_source, ['params', 'obs_normalization_params'],\n device='cpu',\n update_period=self._config.variable_update_period)\n obs_normalization_fns = self._config.obs_normalization_fns_factory(\n environment_spec.observations)\n actor = normalization.NormalizedGenericActor(\n actor_core,\n obs_normalization_fns,\n random_key,\n variable_client,\n adder,\n jit=True,\n backend='cpu',\n per_episode_update=False,\n )\n else:\n variable_client = variable_utils.VariableClient(\n variable_source,\n 'params',\n device='cpu',\n update_period=self._config.variable_update_period)\n actor = actors.GenericActor(\n actor_core, random_key, variable_client, adder, backend='cpu')\n return actor\n\n def make_policy(\n self,\n networks: ppo_networks.PPONetworks,\n environment_spec: specs.EnvironmentSpec,\n evaluation: bool = False) -> actor_core_lib.FeedForwardPolicyWithExtra:\n del environment_spec\n return ppo_networks.make_inference_fn(networks, evaluation)\n" }, { "alpha_fraction": 0.8074866533279419, "alphanum_fraction": 0.8122400641441345, "avg_line_length": 50, "blob_id": "00a104a6b6ac016d6de23932a1c7905aff9fe029", "content_id": "4185e6345daa376fbf29d8135a62790e602ae333", "detected_licenses": [ "Apache-2.0" ], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 1683, "license_type": "permissive", "max_line_length": 75, "num_lines": 33, "path": "/acme/agents/jax/ppo/__init__.py", "repo_name": "dada325/acme", "src_encoding": "UTF-8", "text": "# Copyright 2018 DeepMind Technologies Limited. All rights reserved.\n#\n# Licensed under the Apache License, Version 2.0 (the \"License\");\n# you may not use this file except in compliance with the License.\n# You may obtain a copy of the License at\n#\n# http://www.apache.org/licenses/LICENSE-2.0\n#\n# Unless required by applicable law or agreed to in writing, software\n# distributed under the License is distributed on an \"AS IS\" BASIS,\n# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n# See the License for the specific language governing permissions and\n# limitations under the License.\n\n\"\"\"PPO agent.\"\"\"\n\nfrom acme.agents.jax.ppo.builder import PPOBuilder\nfrom acme.agents.jax.ppo.config import PPOConfig\nfrom acme.agents.jax.ppo.learning import PPOLearner\nfrom acme.agents.jax.ppo.networks import EntropyFn\nfrom acme.agents.jax.ppo.networks import make_categorical_ppo_networks\nfrom acme.agents.jax.ppo.networks import make_continuous_networks\nfrom acme.agents.jax.ppo.networks import make_discrete_networks\nfrom acme.agents.jax.ppo.networks import make_inference_fn\nfrom acme.agents.jax.ppo.networks import make_mvn_diag_ppo_networks\nfrom acme.agents.jax.ppo.networks import make_networks\nfrom acme.agents.jax.ppo.networks import make_ppo_networks\nfrom acme.agents.jax.ppo.networks import make_tanh_normal_ppo_networks\nfrom acme.agents.jax.ppo.networks import PPONetworks\nfrom acme.agents.jax.ppo.normalization import build_ema_mean_std_normalizer\nfrom acme.agents.jax.ppo.normalization import build_mean_std_normalizer\nfrom acme.agents.jax.ppo.normalization import NormalizationFns\nfrom acme.agents.jax.ppo.normalization import NormalizedGenericActor\n" }, { "alpha_fraction": 0.6418772339820862, "alphanum_fraction": 0.6465634107589722, "avg_line_length": 35.90225601196289, "blob_id": "235aee3a7ca2b70ada09f61a8133cfb67794197c", "content_id": "38eab6c72176325eaf9257153be870759054bed7", "detected_licenses": [ "Apache-2.0" ], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 14724, "license_type": "permissive", "max_line_length": 84, "num_lines": 399, "path": "/acme/agents/jax/ail/networks.py", "repo_name": "dada325/acme", "src_encoding": "UTF-8", "text": "# Copyright 2018 DeepMind Technologies Limited. All rights reserved.\n#\n# Licensed under the Apache License, Version 2.0 (the \"License\");\n# you may not use this file except in compliance with the License.\n# You may obtain a copy of the License at\n#\n# http://www.apache.org/licenses/LICENSE-2.0\n#\n# Unless required by applicable law or agreed to in writing, software\n# distributed under the License is distributed on an \"AS IS\" BASIS,\n# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n# See the License for the specific language governing permissions and\n# limitations under the License.\n\n\"\"\"Networks definitions for the BC agent.\n\nAIRL network architecture follows https://arxiv.org/pdf/1710.11248.pdf.\n\"\"\"\nimport dataclasses\nimport functools\nfrom typing import Any, Callable, Generic, Iterable, Optional\n\nfrom acme import specs\nfrom acme import types\nfrom acme.jax import networks as networks_lib\nfrom acme.jax import utils\nfrom acme.jax.imitation_learning_types import DirectRLNetworks\nimport haiku as hk\nimport jax\nfrom jax import numpy as jnp\nimport numpy as np\n\n# Function from discriminator logit to imitation reward.\nImitationRewardFn = Callable[[networks_lib.Logits], jnp.ndarray]\nState = networks_lib.Params\n\n\[email protected]\nclass AILNetworks(Generic[DirectRLNetworks]):\n \"\"\"AIL networks data class.\n\n Attributes:\n discriminator_network: Networks which takes as input:\n (observations, actions, next_observations, direct_rl_params)\n to return the logit of the discriminator.\n If the discriminator does not need direct_rl_params you can pass ().\n imitation_reward_fn: Function from logit of the discriminator to imitation\n reward.\n direct_rl_networks: Networks of the direct RL algorithm.\n \"\"\"\n discriminator_network: networks_lib.FeedForwardNetwork\n imitation_reward_fn: ImitationRewardFn\n direct_rl_networks: DirectRLNetworks\n\n\ndef compute_ail_reward(discriminator_params: networks_lib.Params,\n discriminator_state: State,\n policy_params: Optional[networks_lib.Params],\n transitions: types.Transition,\n networks: AILNetworks) -> jnp.ndarray:\n \"\"\"Computes the AIL reward for a given transition.\n\n Args:\n discriminator_params: Parameters of the discriminator network.\n discriminator_state: State of the discriminator network.\n policy_params: Parameters of the direct RL policy.\n transitions: Transitions to compute the reward for.\n networks: AIL networks.\n\n Returns:\n The rewards as an ndarray.\n \"\"\"\n logits, _ = networks.discriminator_network.apply(\n discriminator_params,\n policy_params,\n discriminator_state,\n transitions,\n is_training=False,\n rng=None)\n return networks.imitation_reward_fn(logits)\n\n\nclass SpectralNormalizedLinear(hk.Module):\n \"\"\"SpectralNormalizedLinear module.\n\n This is a Linear layer with a upper-bounded Lipschitz. It is used in iResNet.\n\n Reference:\n Behrmann et al. Invertible Residual Networks. ICML 2019.\n https://arxiv.org/pdf/1811.00995.pdf\n \"\"\"\n\n def __init__(\n self,\n output_size: int,\n lipschitz_coeff: float,\n with_bias: bool = True,\n w_init: Optional[hk.initializers.Initializer] = None,\n b_init: Optional[hk.initializers.Initializer] = None,\n name: Optional[str] = None,\n ):\n \"\"\"Constructs the SpectralNormalizedLinear module.\n\n Args:\n output_size: Output dimensionality.\n lipschitz_coeff: Spectral normalization coefficient.\n with_bias: Whether to add a bias to the output.\n w_init: Optional initializer for weights. By default, uses random values\n from truncated normal, with stddev ``1 / sqrt(fan_in)``. See\n https://arxiv.org/abs/1502.03167v3.\n b_init: Optional initializer for bias. By default, zero.\n name: Name of the module.\n \"\"\"\n super().__init__(name=name)\n self.input_size = None\n self.output_size = output_size\n self.with_bias = with_bias\n self.w_init = w_init\n self.b_init = b_init or jnp.zeros\n self.lipschitz_coeff = lipschitz_coeff\n self.num_iterations = 100\n self.eps = 1e-6\n\n def get_normalized_weights(self,\n weights: jnp.ndarray,\n renormalize: bool = False) -> jnp.ndarray:\n\n def _l2_normalize(x, axis=None, eps=1e-12):\n return x * jax.lax.rsqrt((x * x).sum(axis=axis, keepdims=True) + eps)\n\n output_size = self.output_size\n dtype = weights.dtype\n assert output_size == weights.shape[-1]\n sigma = hk.get_state('sigma', (), init=jnp.ones)\n if renormalize:\n # Power iterations to compute spectral norm V*W*U^T.\n u = hk.get_state(\n 'u', (1, output_size), dtype, init=hk.initializers.RandomNormal())\n for _ in range(self.num_iterations):\n v = _l2_normalize(jnp.matmul(u, weights.transpose()), eps=self.eps)\n u = _l2_normalize(jnp.matmul(v, weights), eps=self.eps)\n u = jax.lax.stop_gradient(u)\n v = jax.lax.stop_gradient(v)\n sigma = jnp.matmul(jnp.matmul(v, weights), jnp.transpose(u))[0, 0]\n hk.set_state('u', u)\n hk.set_state('v', v)\n hk.set_state('sigma', sigma)\n factor = jnp.maximum(1, sigma / self.lipschitz_coeff)\n return weights / factor\n\n def __call__(self, inputs: jnp.ndarray) -> jnp.ndarray:\n \"\"\"Computes a linear transform of the input.\"\"\"\n if not inputs.shape:\n raise ValueError('Input must not be scalar.')\n\n input_size = self.input_size = inputs.shape[-1]\n output_size = self.output_size\n dtype = inputs.dtype\n\n w_init = self.w_init\n if w_init is None:\n stddev = 1. / np.sqrt(self.input_size)\n w_init = hk.initializers.TruncatedNormal(stddev=stddev)\n w = hk.get_parameter('w', [input_size, output_size], dtype, init=w_init)\n w = self.get_normalized_weights(w, renormalize=True)\n\n out = jnp.dot(inputs, w)\n\n if self.with_bias:\n b = hk.get_parameter('b', [self.output_size], dtype, init=self.b_init)\n b = jnp.broadcast_to(b, out.shape)\n out = out + b\n\n return out\n\n\nclass DiscriminatorMLP(hk.Module):\n \"\"\"A multi-layer perceptron module.\"\"\"\n\n def __init__(\n self,\n hidden_layer_sizes: Iterable[int],\n w_init: Optional[hk.initializers.Initializer] = None,\n b_init: Optional[hk.initializers.Initializer] = None,\n with_bias: bool = True,\n activation: Callable[[jnp.ndarray], jnp.ndarray] = jax.nn.relu,\n input_dropout_rate: float = 0.,\n hidden_dropout_rate: float = 0.,\n spectral_normalization_lipschitz_coeff: Optional[float] = None,\n name: Optional[str] = None\n ):\n \"\"\"Constructs an MLP.\n\n Args:\n hidden_layer_sizes: Hiddent layer sizes.\n w_init: Initializer for :class:`~haiku.Linear` weights.\n b_init: Initializer for :class:`~haiku.Linear` bias. Must be ``None`` if\n ``with_bias=False``.\n with_bias: Whether or not to apply a bias in each layer.\n activation: Activation function to apply between :class:`~haiku.Linear`\n layers. Defaults to ReLU.\n input_dropout_rate: Dropout on the input.\n hidden_dropout_rate: Dropout on the hidden layer outputs.\n spectral_normalization_lipschitz_coeff: If not None, the network will have\n spectral normalization with the given constant.\n name: Optional name for this module.\n\n Raises:\n ValueError: If ``with_bias`` is ``False`` and ``b_init`` is not ``None``.\n \"\"\"\n if not with_bias and b_init is not None:\n raise ValueError('When with_bias=False b_init must not be set.')\n\n super().__init__(name=name)\n self._activation = activation\n self._input_dropout_rate = input_dropout_rate\n self._hidden_dropout_rate = hidden_dropout_rate\n layer_sizes = list(hidden_layer_sizes) + [1]\n\n if spectral_normalization_lipschitz_coeff is not None:\n layer_lipschitz_coeff = np.power(spectral_normalization_lipschitz_coeff,\n 1. / len(layer_sizes))\n layer_module = functools.partial(\n SpectralNormalizedLinear,\n lipschitz_coeff=layer_lipschitz_coeff,\n w_init=w_init,\n b_init=b_init,\n with_bias=with_bias)\n else:\n layer_module = functools.partial(\n hk.Linear,\n w_init=w_init,\n b_init=b_init,\n with_bias=with_bias)\n\n layers = []\n for index, output_size in enumerate(layer_sizes):\n layers.append(\n layer_module(output_size=output_size, name=f'linear_{index}'))\n self._layers = tuple(layers)\n\n def __call__(\n self,\n inputs: jnp.ndarray,\n is_training: bool,\n rng: Optional[networks_lib.PRNGKey],\n ) -> networks_lib.Logits:\n rng = hk.PRNGSequence(rng) if rng is not None else None\n\n out = inputs\n for i, layer in enumerate(self._layers):\n if is_training:\n dropout_rate = (\n self._input_dropout_rate if i == 0 else self._hidden_dropout_rate)\n out = hk.dropout(next(rng), dropout_rate, out)\n out = layer(out)\n if i < len(self._layers) - 1:\n out = self._activation(out)\n\n return out\n\n\nclass DiscriminatorModule(hk.Module):\n \"\"\"Discriminator module that concatenates its inputs.\"\"\"\n\n def __init__(self,\n environment_spec: specs.EnvironmentSpec,\n use_action: bool,\n use_next_obs: bool,\n network_core: Callable[..., Any],\n observation_embedding: Callable[[networks_lib.Observation],\n jnp.ndarray] = lambda x: x,\n name='discriminator'):\n super().__init__(name=name)\n self._use_action = use_action\n self._environment_spec = environment_spec\n self._use_next_obs = use_next_obs\n self._network_core = network_core\n self._observation_embedding = observation_embedding\n\n def __call__(self, observations: networks_lib.Observation,\n actions: networks_lib.Action,\n next_observations: networks_lib.Observation, is_training: bool,\n rng: networks_lib.PRNGKey) -> networks_lib.Logits:\n observations = self._observation_embedding(observations)\n if self._use_next_obs:\n next_observations = self._observation_embedding(next_observations)\n data = jnp.concatenate([observations, next_observations], axis=-1)\n else:\n data = observations\n if self._use_action:\n action_spec = self._environment_spec.actions\n if isinstance(action_spec, specs.DiscreteArray):\n actions = jax.nn.one_hot(actions,\n action_spec.num_values)\n data = jnp.concatenate([data, actions], axis=-1)\n output = self._network_core(data, is_training, rng)\n output = jnp.squeeze(output, axis=-1)\n return output\n\n\nclass AIRLModule(hk.Module):\n \"\"\"AIRL Module.\"\"\"\n\n def __init__(self,\n environment_spec: specs.EnvironmentSpec,\n use_action: bool,\n use_next_obs: bool,\n discount: float,\n g_core: Callable[..., Any],\n h_core: Callable[..., Any],\n observation_embedding: Callable[[networks_lib.Observation],\n jnp.ndarray] = lambda x: x,\n name='airl'):\n super().__init__(name=name)\n self._environment_spec = environment_spec\n self._use_action = use_action\n self._use_next_obs = use_next_obs\n self._discount = discount\n self._g_core = g_core\n self._h_core = h_core\n self._observation_embedding = observation_embedding\n\n def __call__(self, observations: networks_lib.Observation,\n actions: networks_lib.Action,\n next_observations: networks_lib.Observation,\n is_training: bool,\n rng: networks_lib.PRNGKey) -> networks_lib.Logits:\n g_output = DiscriminatorModule(\n environment_spec=self._environment_spec,\n use_action=self._use_action,\n use_next_obs=self._use_next_obs,\n network_core=self._g_core,\n observation_embedding=self._observation_embedding,\n name='airl_g')(observations, actions, next_observations, is_training,\n rng)\n h_module = DiscriminatorModule(\n environment_spec=self._environment_spec,\n use_action=False,\n use_next_obs=False,\n network_core=self._h_core,\n observation_embedding=self._observation_embedding,\n name='airl_h')\n return (g_output + self._discount * h_module(next_observations, (),\n (), is_training, rng) -\n h_module(observations, (), (), is_training, rng))\n\n\n# TODO(eorsini): Manipulate FeedForwardNetworks instead of transforms to\n# increase compatibility with Flax.\ndef make_discriminator(\n environment_spec: specs.EnvironmentSpec,\n discriminator_transformed: hk.TransformedWithState,\n logpi_fn: Optional[Callable[\n [networks_lib.Params, networks_lib.Observation, networks_lib.Action],\n jnp.ndarray]] = None\n) -> networks_lib.FeedForwardNetwork:\n \"\"\"Creates the discriminator network.\n\n Args:\n environment_spec: Environment spec\n discriminator_transformed: Haiku transformed of the discriminator.\n logpi_fn: If the policy logpi function is provided, its output will be\n removed from the discriminator logit.\n\n Returns:\n The network.\n \"\"\"\n\n def apply_fn(params: hk.Params,\n policy_params: networks_lib.Params,\n state: hk.State,\n transitions: types.Transition,\n is_training: bool,\n rng: networks_lib.PRNGKey) -> networks_lib.Logits:\n output, state = discriminator_transformed.apply(\n params, state, transitions.observation, transitions.action,\n transitions.next_observation, is_training, rng)\n if logpi_fn is not None:\n logpi = logpi_fn(policy_params, transitions.observation,\n transitions.action)\n\n # Quick Maths:\n # D = exp(output)/(exp(output) + pi(a|s))\n # logit(D) = log(D/(1-D)) = log(exp(output)/pi(a|s))\n # logit(D) = output - logpi\n return output - logpi, state # pytype: disable=bad-return-type # jax-ndarray\n return output, state # pytype: disable=bad-return-type # jax-ndarray\n\n dummy_obs = utils.zeros_like(environment_spec.observations)\n dummy_obs = utils.add_batch_dim(dummy_obs)\n dummy_actions = utils.zeros_like(environment_spec.actions)\n dummy_actions = utils.add_batch_dim(dummy_actions)\n\n return networks_lib.FeedForwardNetwork(\n # pylint: disable=g-long-lambda\n init=lambda rng: discriminator_transformed.init(\n rng, dummy_obs, dummy_actions, dummy_obs, False, rng),\n apply=apply_fn)\n" }, { "alpha_fraction": 0.7057682275772095, "alphanum_fraction": 0.7078849673271179, "avg_line_length": 35.108280181884766, "blob_id": "c23bd523cd9cb0f9fb8f30c68e913d1ea06c35de", "content_id": "40599eb2d99f9a9d3bc70a6950795ebd7a7f5b74", "detected_licenses": [ "Apache-2.0" ], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 5669, "license_type": "permissive", "max_line_length": 81, "num_lines": 157, "path": "/acme/jax/experiments/make_distributed_offline_experiment.py", "repo_name": "dada325/acme", "src_encoding": "UTF-8", "text": "# Copyright 2018 DeepMind Technologies Limited. All rights reserved.\n#\n# Licensed under the Apache License, Version 2.0 (the \"License\");\n# you may not use this file except in compliance with the License.\n# You may obtain a copy of the License at\n#\n# http://www.apache.org/licenses/LICENSE-2.0\n#\n# Unless required by applicable law or agreed to in writing, software\n# distributed under the License is distributed on an \"AS IS\" BASIS,\n# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n# See the License for the specific language governing permissions and\n# limitations under the License.\n\n\"\"\"Program definition for a distributed layout for an offline RL experiment.\"\"\"\n\nfrom typing import Any, Optional\n\nfrom acme import core\nfrom acme import specs\nfrom acme.agents.jax import builders\nfrom acme.jax import networks as networks_lib\nfrom acme.jax import savers\nfrom acme.jax import utils\nfrom acme.jax.experiments import config\nfrom acme.jax import snapshotter\nfrom acme.utils import counting\nfrom acme.utils import lp_utils\nimport jax\nimport launchpad as lp\n\n\ndef make_distributed_offline_experiment(\n experiment: config.OfflineExperimentConfig[builders.Networks, Any, Any],\n *,\n make_snapshot_models: Optional[config.SnapshotModelFactory[\n builders.Networks]] = None,\n name: str = 'agent',\n program: Optional[lp.Program] = None) -> lp.Program:\n \"\"\"Builds a Launchpad program for running the experiment.\n\n Args:\n experiment: configuration for the experiment.\n make_snapshot_models: a factory that defines what is saved in snapshots.\n name: name of the constructed program. Ignored if an existing program is\n passed.\n program: a program where agent nodes are added to. If None, a new program is\n created.\n\n Returns:\n The Launchpad program with all the nodes needed for running the experiment.\n \"\"\"\n\n def build_model_saver(variable_source: core.VariableSource):\n assert experiment.checkpointing\n environment = experiment.environment_factory(0)\n spec = specs.make_environment_spec(environment)\n networks = experiment.network_factory(spec)\n models = make_snapshot_models(networks, spec)\n # TODO(raveman): Decouple checkpointing and snahpshotting configs.\n return snapshotter.JAXSnapshotter(\n variable_source=variable_source,\n models=models,\n path=experiment.checkpointing.directory,\n add_uid=experiment.checkpointing.add_uid)\n\n def build_counter():\n counter = counting.Counter()\n if experiment.checkpointing:\n counter = savers.CheckpointingRunner(\n counter,\n key='counter',\n subdirectory='counter',\n time_delta_minutes=experiment.checkpointing.time_delta_minutes,\n directory=experiment.checkpointing.directory,\n add_uid=experiment.checkpointing.add_uid,\n max_to_keep=experiment.checkpointing.max_to_keep,\n checkpoint_ttl_seconds=experiment.checkpointing.checkpoint_ttl_seconds,\n )\n return counter\n\n def build_learner(\n random_key: networks_lib.PRNGKey,\n counter: Optional[counting.Counter] = None,\n ):\n \"\"\"The Learning part of the agent.\"\"\"\n\n dummy_seed = 1\n spec = (\n experiment.environment_spec or\n specs.make_environment_spec(experiment.environment_factory(dummy_seed)))\n\n # Creates the networks to optimize (online) and target networks.\n networks = experiment.network_factory(spec)\n\n dataset_key, random_key = jax.random.split(random_key)\n iterator = experiment.demonstration_dataset_factory(dataset_key)\n # make_demonstrations is responsible for putting data onto appropriate\n # training devices, so here we apply prefetch, so that data is copied over\n # in the background.\n iterator = utils.prefetch(iterable=iterator, buffer_size=1)\n counter = counting.Counter(counter, 'learner')\n learner = experiment.builder.make_learner(\n random_key=random_key,\n networks=networks,\n dataset=iterator,\n logger_fn=experiment.logger_factory,\n environment_spec=spec,\n counter=counter)\n\n if experiment.checkpointing:\n learner = savers.CheckpointingRunner(\n learner,\n key='learner',\n subdirectory='learner',\n time_delta_minutes=5,\n directory=experiment.checkpointing.directory,\n add_uid=experiment.checkpointing.add_uid,\n max_to_keep=experiment.checkpointing.max_to_keep,\n checkpoint_ttl_seconds=experiment.checkpointing.checkpoint_ttl_seconds,\n )\n\n return learner\n\n if not program:\n program = lp.Program(name=name)\n\n key = jax.random.PRNGKey(experiment.seed)\n\n counter = program.add_node(lp.CourierNode(build_counter), label='counter')\n\n if experiment.max_num_learner_steps is not None:\n program.add_node(\n lp.CourierNode(\n lp_utils.StepsLimiter,\n counter,\n experiment.max_num_learner_steps,\n steps_key='learner_steps'),\n label='counter')\n\n learner_key, key = jax.random.split(key)\n learner_node = lp.CourierNode(build_learner, learner_key, counter)\n learner = learner_node.create_handle()\n program.add_node(learner_node, label='learner')\n\n for evaluator in experiment.get_evaluator_factories():\n evaluator_key, key = jax.random.split(key)\n program.add_node(\n lp.CourierNode(evaluator, evaluator_key, learner, counter,\n experiment.builder.make_actor),\n label='evaluator')\n\n if make_snapshot_models and experiment.checkpointing:\n program.add_node(lp.CourierNode(build_model_saver, learner),\n label='model_saver')\n\n return program\n" }, { "alpha_fraction": 0.7186122536659241, "alphanum_fraction": 0.7242997288703918, "avg_line_length": 35.06666564941406, "blob_id": "2760f8fb8ef7baa738e728f6e89832e890e4f4dc", "content_id": "ccbb2b29c619aeaa510f74924ed701ff4a50bcfe", "detected_licenses": [ "Apache-2.0" ], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 7033, "license_type": "permissive", "max_line_length": 94, "num_lines": 195, "path": "/examples/baselines/imitation/run_bc.py", "repo_name": "dada325/acme", "src_encoding": "UTF-8", "text": "# Copyright 2018 DeepMind Technologies Limited. All rights reserved.\n#\n# Licensed under the Apache License, Version 2.0 (the \"License\");\n# you may not use this file except in compliance with the License.\n# You may obtain a copy of the License at\n#\n# http://www.apache.org/licenses/LICENSE-2.0\n#\n# Unless required by applicable law or agreed to in writing, software\n# distributed under the License is distributed on an \"AS IS\" BASIS,\n# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n# See the License for the specific language governing permissions and\n# limitations under the License.\n\n\"\"\"Example running BC on continuous control tasks.\n\nThe network structure consists in a 3-layer MLP with ReLU activation\nand dropout.\n\"\"\"\n\nfrom typing import Callable, Iterator, Tuple\n\nfrom absl import flags\nfrom acme import specs\nfrom acme import types\nfrom acme.agents.jax import actor_core as actor_core_lib\nfrom acme.agents.jax import bc\nfrom acme.datasets import tfds\nimport helpers\nfrom absl import app\nfrom acme.jax import experiments\nfrom acme.jax import types as jax_types\nfrom acme.jax import utils\nfrom acme.utils import lp_utils\nimport dm_env\nimport haiku as hk\nimport launchpad as lp\nimport numpy as np\n\nFLAGS = flags.FLAGS\n\nflags.DEFINE_bool(\n 'run_distributed', True, 'Should an agent be executed in a distributed '\n 'way. If False, will run single-threaded.')\n# Agent flags\nflags.DEFINE_string('env_name', 'HalfCheetah-v2', 'What environment to run')\nflags.DEFINE_integer('num_demonstrations', 11,\n 'Number of demonstration trajectories.')\nflags.DEFINE_integer('num_bc_steps', 100_000, 'Number of bc learning steps.')\nflags.DEFINE_integer('num_steps', 0, 'Number of environment steps.')\nflags.DEFINE_integer('batch_size', 64, 'Batch size.')\nflags.DEFINE_float('learning_rate', 1e-4, 'Optimizer learning rate.')\nflags.DEFINE_float('dropout_rate', 0.1, 'Dropout rate of bc network.')\nflags.DEFINE_integer('num_layers', 3, 'Num layers of bc network.')\nflags.DEFINE_integer('num_units', 256, 'Num units of bc network layers.')\nflags.DEFINE_integer('eval_every', 5000, 'Evaluation period.')\nflags.DEFINE_integer('evaluation_episodes', 10, 'Evaluation episodes.')\nflags.DEFINE_integer('seed', 0, 'Random seed for learner and evaluator.')\n\n\ndef _make_demonstration_dataset_factory(\n dataset_name: str, num_demonstrations: int,\n environment_spec: specs.EnvironmentSpec, batch_size: int\n) -> Callable[[jax_types.PRNGKey], Iterator[types.Transition]]:\n \"\"\"Returns the demonstration dataset factory for the given dataset.\"\"\"\n\n def demonstration_dataset_factory(\n random_key: jax_types.PRNGKey) -> Iterator[types.Transition]:\n \"\"\"Returns an iterator of demonstration samples.\"\"\"\n\n transitions_iterator = tfds.get_tfds_dataset(\n dataset_name, num_demonstrations, env_spec=environment_spec)\n return tfds.JaxInMemoryRandomSampleIterator(\n transitions_iterator, key=random_key, batch_size=batch_size)\n\n return demonstration_dataset_factory\n\n\ndef _make_environment_factory(env_name: str) -> jax_types.EnvironmentFactory:\n \"\"\"Returns the environment factory for the given environment.\"\"\"\n\n def environment_factory(seed: int) -> dm_env.Environment:\n del seed\n return helpers.make_environment(task=env_name)\n\n return environment_factory\n\n\ndef _make_network_factory(\n shift: Tuple[np.float64], scale: Tuple[np.float64], num_layers: int,\n num_units: int,\n dropout_rate: float) -> Callable[[specs.EnvironmentSpec], bc.BCNetworks]:\n \"\"\"Returns the factory of networks to be used by the agent.\n\n Args:\n shift: Shift of the observations in demonstrations.\n scale: Scale of the observations in demonstrations.\n num_layers: Number of layers of the BC network.\n num_units: Number of units of the BC network.\n dropout_rate: Dropout rate of the BC network.\n\n Returns:\n Network factory.\n \"\"\"\n\n def network_factory(spec: specs.EnvironmentSpec) -> bc.BCNetworks:\n \"\"\"Creates the network used by the agent.\"\"\"\n\n action_spec = spec.actions\n num_dimensions = np.prod(action_spec.shape, dtype=int)\n\n def actor_fn(obs, is_training=False, key=None):\n obs += shift\n obs *= scale\n hidden_layers = [num_units] * num_layers\n mlp = hk.Sequential([\n hk.nets.MLP(hidden_layers + [num_dimensions]),\n ])\n if is_training:\n return mlp(obs, dropout_rate=dropout_rate, rng=key)\n else:\n return mlp(obs)\n\n policy = hk.without_apply_rng(hk.transform(actor_fn))\n\n # Create dummy observations to create network parameters.\n dummy_obs = utils.zeros_like(spec.observations)\n dummy_obs = utils.add_batch_dim(dummy_obs)\n\n policy_network = bc.BCPolicyNetwork(lambda key: policy.init(key, dummy_obs),\n policy.apply)\n\n return bc.BCNetworks(policy_network=policy_network)\n\n return network_factory\n\n\ndef build_experiment_config() -> experiments.OfflineExperimentConfig[\n bc.BCNetworks, actor_core_lib.FeedForwardPolicy, types.Transition]:\n \"\"\"Returns a config for BC experiments.\"\"\"\n\n # Create an environment, grab the spec, and use it to create networks.\n environment = helpers.make_environment(task=FLAGS.env_name)\n environment_spec = specs.make_environment_spec(environment)\n\n # Define the demonstrations factory.\n dataset_name = helpers.get_dataset_name(FLAGS.env_name)\n demonstration_dataset_factory = _make_demonstration_dataset_factory(\n dataset_name, FLAGS.num_demonstrations, environment_spec,\n FLAGS.batch_size)\n\n # Load the demonstrations to compute the stats.\n dataset = tfds.get_tfds_dataset(\n dataset_name, FLAGS.num_demonstrations, env_spec=environment_spec)\n shift, scale = helpers.get_observation_stats(dataset)\n\n # Define the network factory.\n network_factory = _make_network_factory( # pytype: disable=wrong-arg-types # numpy-scalars\n shift=shift,\n scale=scale,\n num_layers=FLAGS.num_layers,\n num_units=FLAGS.num_units,\n dropout_rate=FLAGS.dropout_rate)\n\n # Create the BC builder.\n bc_config = bc.BCConfig(learning_rate=FLAGS.learning_rate)\n bc_builder = bc.BCBuilder(bc_config, loss_fn=bc.mse())\n\n environment_factory = _make_environment_factory(FLAGS.env_name)\n\n return experiments.OfflineExperimentConfig(\n builder=bc_builder,\n network_factory=network_factory,\n demonstration_dataset_factory=demonstration_dataset_factory,\n environment_factory=environment_factory,\n max_num_learner_steps=FLAGS.num_bc_steps,\n seed=FLAGS.seed,\n environment_spec=environment_spec,\n )\n\n\ndef main(_):\n config = build_experiment_config()\n if FLAGS.run_distributed:\n program = experiments.make_distributed_offline_experiment(experiment=config)\n lp.launch(program, xm_resources=lp_utils.make_xm_docker_resources(program))\n else:\n experiments.run_offline_experiment(\n experiment=config,\n eval_every=FLAGS.eval_every,\n num_eval_episodes=FLAGS.evaluation_episodes)\n\n\nif __name__ == '__main__':\n app.run(main)\n" }, { "alpha_fraction": 0.6902424693107605, "alphanum_fraction": 0.6949335932731628, "avg_line_length": 38.03098678588867, "blob_id": "e5bc61990382ff49390185f024e31d4dd0f19235", "content_id": "6f6688e41d58a0d36069436962cba90a49a1c55c", "detected_licenses": [ "Apache-2.0" ], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 13856, "license_type": "permissive", "max_line_length": 105, "num_lines": 355, "path": "/acme/jax/running_statistics.py", "repo_name": "dada325/acme", "src_encoding": "UTF-8", "text": "# Copyright 2018 DeepMind Technologies Limited. All rights reserved.\n#\n# Licensed under the Apache License, Version 2.0 (the \"License\");\n# you may not use this file except in compliance with the License.\n# You may obtain a copy of the License at\n#\n# http://www.apache.org/licenses/LICENSE-2.0\n#\n# Unless required by applicable law or agreed to in writing, software\n# distributed under the License is distributed on an \"AS IS\" BASIS,\n# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n# See the License for the specific language governing permissions and\n# limitations under the License.\n\n\"\"\"Utility functions to compute running statistics.\"\"\"\n\nimport dataclasses\nfrom typing import Any, Optional, Tuple, Union\n\nfrom acme import types\nfrom acme.utils import tree_utils\nimport chex\nimport jax\nimport jax.numpy as jnp\nimport numpy as np\nimport tree\n\n\nPath = Tuple[Any, ...]\n\"\"\"Path in a nested structure.\n\n A path is a tuple of indices (normally strings for maps and integers for\n arrays and tuples) that uniquely identifies a subtree in the nested structure.\n See\n https://tree.readthedocs.io/en/latest/api.html#tree.map_structure_with_path\n for more details.\n\"\"\"\n\n\ndef _is_prefix(a: Path, b: Path) -> bool:\n \"\"\"Returns whether `a` is a prefix of `b`.\"\"\"\n return b[:len(a)] == a\n\n\ndef _zeros_like(nest: types.Nest, dtype=None) -> types.NestedArray:\n return jax.tree_map(lambda x: jnp.zeros(x.shape, dtype or x.dtype), nest)\n\n\ndef _ones_like(nest: types.Nest, dtype=None) -> types.NestedArray:\n return jax.tree_map(lambda x: jnp.ones(x.shape, dtype or x.dtype), nest)\n\n\[email protected](frozen=True)\nclass NestedMeanStd:\n \"\"\"A container for running statistics (mean, std) of possibly nested data.\"\"\"\n mean: types.NestedArray\n std: types.NestedArray\n\n\[email protected](frozen=True)\nclass RunningStatisticsState(NestedMeanStd):\n \"\"\"Full state of running statistics computation.\"\"\"\n count: Union[int, jnp.ndarray]\n summed_variance: types.NestedArray\n\n\[email protected](frozen=True)\nclass NestStatisticsConfig:\n \"\"\"Specifies how to compute statistics for Nests with the same structure.\n\n Attributes:\n paths: A sequence of Nest paths to compute statistics for. If there is a\n collision between paths (one is a prefix of the other), the shorter path\n takes precedence.\n \"\"\"\n paths: Tuple[Path, ...] = ((),)\n\n\ndef _is_path_included(config: NestStatisticsConfig, path: Path) -> bool:\n \"\"\"Returns whether the path is included in the config.\"\"\"\n # A path is included in the config if it corresponds to a tree node that\n # belongs to a subtree rooted at the node corresponding to some path in\n # the config.\n return any(_is_prefix(config_path, path) for config_path in config.paths)\n\n\ndef init_state(nest: types.Nest) -> RunningStatisticsState:\n \"\"\"Initializes the running statistics for the given nested structure.\"\"\"\n dtype = jnp.float64 if jax.config.jax_enable_x64 else jnp.float32\n\n return RunningStatisticsState( # pytype: disable=wrong-arg-types # jax-ndarray\n count=0.,\n mean=_zeros_like(nest, dtype=dtype),\n summed_variance=_zeros_like(nest, dtype=dtype),\n # Initialize with ones to make sure normalization works correctly\n # in the initial state.\n std=_ones_like(nest, dtype=dtype))\n\n\ndef _validate_batch_shapes(batch: types.NestedArray,\n reference_sample: types.NestedArray,\n batch_dims: Tuple[int, ...]) -> None:\n \"\"\"Verifies shapes of the batch leaves against the reference sample.\n\n Checks that batch dimensions are the same in all leaves in the batch.\n Checks that non-batch dimensions for all leaves in the batch are the same\n as in the reference sample.\n\n Arguments:\n batch: the nested batch of data to be verified.\n reference_sample: the nested array to check non-batch dimensions.\n batch_dims: a Tuple of indices of batch dimensions in the batch shape.\n\n Returns:\n None.\n \"\"\"\n def validate_node_shape(reference_sample: jnp.ndarray,\n batch: jnp.ndarray) -> None:\n expected_shape = batch_dims + reference_sample.shape\n assert batch.shape == expected_shape, f'{batch.shape} != {expected_shape}'\n\n tree_utils.fast_map_structure(validate_node_shape, reference_sample, batch)\n\n\ndef update(state: RunningStatisticsState,\n batch: types.NestedArray,\n *,\n config: NestStatisticsConfig = NestStatisticsConfig(),\n weights: Optional[jnp.ndarray] = None,\n std_min_value: float = 1e-6,\n std_max_value: float = 1e6,\n pmap_axis_name: Optional[str] = None,\n validate_shapes: bool = True) -> RunningStatisticsState:\n \"\"\"Updates the running statistics with the given batch of data.\n\n Note: data batch and state elements (mean, etc.) must have the same structure.\n\n Note: by default will use int32 for counts and float32 for accumulated\n variance. This results in an integer overflow after 2^31 data points and\n degrading precision after 2^24 batch updates or even earlier if variance\n updates have large dynamic range.\n To improve precision, consider setting jax_enable_x64 to True, see\n https://jax.readthedocs.io/en/latest/notebooks/Common_Gotchas_in_JAX.html#double-64bit-precision\n\n Arguments:\n state: The running statistics before the update.\n batch: The data to be used to update the running statistics.\n config: The config that specifies which leaves of the nested structure\n should the running statistics be computed for.\n weights: Weights of the batch data. Should match the batch dimensions.\n Passing a weight of 2. should be equivalent to updating on the\n corresponding data point twice.\n std_min_value: Minimum value for the standard deviation.\n std_max_value: Maximum value for the standard deviation.\n pmap_axis_name: Name of the pmapped axis, if any.\n validate_shapes: If true, the shapes of all leaves of the batch will be\n validated. Enabled by default. Doesn't impact performance when jitted.\n\n Returns:\n Updated running statistics.\n \"\"\"\n # We require exactly the same structure to avoid issues when flattened\n # batch and state have different order of elements.\n tree.assert_same_structure(batch, state.mean)\n batch_shape = tree.flatten(batch)[0].shape\n # We assume the batch dimensions always go first.\n batch_dims = batch_shape[:len(batch_shape) - tree.flatten(state.mean)[0].ndim]\n batch_axis = range(len(batch_dims))\n if weights is None:\n step_increment = np.prod(batch_dims)\n else:\n step_increment = jnp.sum(weights)\n if pmap_axis_name is not None:\n step_increment = jax.lax.psum(step_increment, axis_name=pmap_axis_name)\n count = state.count + step_increment\n\n # Validation is important. If the shapes don't match exactly, but are\n # compatible, arrays will be silently broadcasted resulting in incorrect\n # statistics.\n if validate_shapes:\n if weights is not None:\n if weights.shape != batch_dims:\n raise ValueError(f'{weights.shape} != {batch_dims}')\n _validate_batch_shapes(batch, state.mean, batch_dims)\n\n def _compute_node_statistics(\n path: Path, mean: jnp.ndarray, summed_variance: jnp.ndarray,\n batch: jnp.ndarray) -> Tuple[jnp.ndarray, jnp.ndarray]:\n assert isinstance(mean, jnp.ndarray), type(mean)\n assert isinstance(summed_variance, jnp.ndarray), type(summed_variance)\n if not _is_path_included(config, path):\n # Return unchanged.\n return mean, summed_variance\n # The mean and the sum of past variances are updated with Welford's\n # algorithm using batches (see https://stackoverflow.com/q/56402955).\n diff_to_old_mean = batch - mean\n if weights is not None:\n expanded_weights = jnp.reshape(\n weights,\n list(weights.shape) + [1] * (batch.ndim - weights.ndim))\n diff_to_old_mean = diff_to_old_mean * expanded_weights\n mean_update = jnp.sum(diff_to_old_mean, axis=batch_axis) / count\n if pmap_axis_name is not None:\n mean_update = jax.lax.psum(\n mean_update, axis_name=pmap_axis_name)\n mean = mean + mean_update\n\n diff_to_new_mean = batch - mean\n variance_update = diff_to_old_mean * diff_to_new_mean\n variance_update = jnp.sum(variance_update, axis=batch_axis)\n if pmap_axis_name is not None:\n variance_update = jax.lax.psum(variance_update, axis_name=pmap_axis_name)\n summed_variance = summed_variance + variance_update\n return mean, summed_variance\n\n updated_stats = tree_utils.fast_map_structure_with_path(\n _compute_node_statistics, state.mean, state.summed_variance, batch)\n # map_structure_up_to is slow, so shortcut if we know the input is not\n # structured.\n if isinstance(state.mean, jnp.ndarray):\n mean, summed_variance = updated_stats\n else:\n # Reshape the updated stats from `nest(mean, summed_variance)` to\n # `nest(mean), nest(summed_variance)`.\n mean, summed_variance = [\n tree.map_structure_up_to(\n state.mean, lambda s, i=idx: s[i], updated_stats)\n for idx in range(2)\n ]\n\n def compute_std(path: Path, summed_variance: jnp.ndarray,\n std: jnp.ndarray) -> jnp.ndarray:\n assert isinstance(summed_variance, jnp.ndarray)\n if not _is_path_included(config, path):\n return std\n # Summed variance can get negative due to rounding errors.\n summed_variance = jnp.maximum(summed_variance, 0)\n std = jnp.sqrt(summed_variance / count)\n std = jnp.clip(std, std_min_value, std_max_value)\n return std\n\n std = tree_utils.fast_map_structure_with_path(compute_std, summed_variance,\n state.std)\n\n return RunningStatisticsState(\n count=count, mean=mean, summed_variance=summed_variance, std=std)\n\n\ndef normalize(batch: types.NestedArray,\n mean_std: NestedMeanStd,\n max_abs_value: Optional[float] = None) -> types.NestedArray:\n \"\"\"Normalizes data using running statistics.\"\"\"\n\n def normalize_leaf(data: jnp.ndarray, mean: jnp.ndarray,\n std: jnp.ndarray) -> jnp.ndarray:\n # Only normalize inexact types.\n if not jnp.issubdtype(data.dtype, jnp.inexact):\n return data\n data = (data - mean) / std\n if max_abs_value is not None:\n # TODO(b/124318564): remove pylint directive\n data = jnp.clip(data, -max_abs_value, +max_abs_value) # pylint: disable=invalid-unary-operand-type\n return data\n\n return tree_utils.fast_map_structure(normalize_leaf, batch, mean_std.mean,\n mean_std.std)\n\n\ndef denormalize(batch: types.NestedArray,\n mean_std: NestedMeanStd) -> types.NestedArray:\n \"\"\"Denormalizes values in a nested structure using the given mean/std.\n\n Only values of inexact types are denormalized.\n See https://numpy.org/doc/stable/_images/dtype-hierarchy.png for Numpy type\n hierarchy.\n\n Args:\n batch: a nested structure containing batch of data.\n mean_std: mean and standard deviation used for denormalization.\n\n Returns:\n Nested structure with denormalized values.\n \"\"\"\n\n def denormalize_leaf(data: jnp.ndarray, mean: jnp.ndarray,\n std: jnp.ndarray) -> jnp.ndarray:\n # Only denormalize inexact types.\n if not np.issubdtype(data.dtype, np.inexact):\n return data\n return data * std + mean\n\n return tree_utils.fast_map_structure(denormalize_leaf, batch, mean_std.mean,\n mean_std.std)\n\n\[email protected](frozen=True)\nclass NestClippingConfig:\n \"\"\"Specifies how to clip Nests with the same structure.\n\n Attributes:\n path_map: A map that specifies how to clip values in Nests with the same\n structure. Keys correspond to paths in the nest. Values are maximum\n absolute values to use for clipping. If there is a collision between paths\n (one path is a prefix of the other), the behavior is undefined.\n \"\"\"\n path_map: Tuple[Tuple[Path, float], ...] = ()\n\n\ndef get_clip_config_for_path(config: NestClippingConfig,\n path: Path) -> NestClippingConfig:\n \"\"\"Returns the config for a subtree from the leaf defined by the path.\"\"\"\n # Start with an empty config.\n path_map = []\n for map_path, max_abs_value in config.path_map:\n if _is_prefix(map_path, path):\n return NestClippingConfig(path_map=(((), max_abs_value),))\n if _is_prefix(path, map_path):\n path_map.append((map_path[len(path):], max_abs_value))\n return NestClippingConfig(path_map=tuple(path_map))\n\n\ndef clip(batch: types.NestedArray,\n clipping_config: NestClippingConfig) -> types.NestedArray:\n \"\"\"Clips the batch.\"\"\"\n\n def max_abs_value_for_path(path: Path, x: jnp.ndarray) -> Optional[float]:\n del x # Unused, needed by interface.\n return next((max_abs_value\n for clipping_path, max_abs_value in clipping_config.path_map\n if _is_prefix(clipping_path, path)), None)\n\n max_abs_values = tree_utils.fast_map_structure_with_path(\n max_abs_value_for_path, batch)\n\n def clip_leaf(data: jnp.ndarray,\n max_abs_value: Optional[float]) -> jnp.ndarray:\n if max_abs_value is not None:\n # TODO(b/124318564): remove pylint directive\n data = jnp.clip(data, -max_abs_value, +max_abs_value) # pylint: disable=invalid-unary-operand-type\n return data\n\n return tree_utils.fast_map_structure(clip_leaf, batch, max_abs_values)\n\n\[email protected](frozen=True)\nclass NestNormalizationConfig:\n \"\"\"Specifies how to normalize Nests with the same structure.\n\n Attributes:\n stats_config: A config that defines how to compute running statistics to be\n used for normalization.\n clip_config: A config that defines how to clip normalized values.\n \"\"\"\n stats_config: NestStatisticsConfig = NestStatisticsConfig()\n clip_config: NestClippingConfig = NestClippingConfig()\n" }, { "alpha_fraction": 0.6954489350318909, "alphanum_fraction": 0.7043050527572632, "avg_line_length": 30.511627197265625, "blob_id": "b3523fff2b1c4c2c962da4bd984663b2fff268d1", "content_id": "ca8fc83eec57ae344f66486a5ad1f6d7dd3a9d2e", "detected_licenses": [ "Apache-2.0" ], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 4065, "license_type": "permissive", "max_line_length": 86, "num_lines": 129, "path": "/examples/baselines/rl_discrete/run_muzero.py", "repo_name": "dada325/acme", "src_encoding": "UTF-8", "text": "# Copyright 2018 DeepMind Technologies Limited. All rights reserved.\n#\n# Licensed under the Apache License, Version 2.0 (the \"License\");\n# you may not use this file except in compliance with the License.\n# You may obtain a copy of the License at\n#\n# http://www.apache.org/licenses/LICENSE-2.0\n#\n# Unless required by applicable law or agreed to in writing, software\n# distributed under the License is distributed on an \"AS IS\" BASIS,\n# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n# See the License for the specific language governing permissions and\n# limitations under the License.\n\n\"\"\"Example running MuZero on discrete control tasks.\"\"\"\n\nimport datetime\nimport math\n\nfrom absl import flags\nfrom acme import specs\nfrom acme.agents.jax import muzero\nimport helpers\nfrom absl import app\nfrom acme.jax import experiments\nfrom acme.jax import inference_server as inference_server_lib\nfrom acme.utils import lp_utils\nimport dm_env\nimport launchpad as lp\n\n\nENV_NAME = flags.DEFINE_string('env_name', 'Pong', 'What environment to run')\nSEED = flags.DEFINE_integer('seed', 0, 'Random seed.')\nNUM_STEPS = flags.DEFINE_integer(\n 'num_steps', 2_000_000, 'Number of env steps to run.'\n)\nNUM_LEARNERS = flags.DEFINE_integer('num_learners', 1, 'Number of learners.')\nNUM_ACTORS = flags.DEFINE_integer('num_actors', 4, 'Number of actors.')\nNUM_ACTORS_PER_NODE = flags.DEFINE_integer(\n 'num_actors_per_node',\n 2,\n 'Number of colocated actors',\n)\nRUN_DISTRIBUTED = flags.DEFINE_bool(\n 'run_distributed', True, 'Should an agent be executed in a distributed '\n 'way. If False, will run single-threaded.',)\n\n\ndef build_experiment_config() -> experiments.ExperimentConfig:\n \"\"\"Builds DQN experiment config which can be executed in different ways.\"\"\"\n env_name = ENV_NAME.value\n muzero_config = muzero.MZConfig()\n\n def env_factory(seed: int) -> dm_env.Environment:\n del seed\n return helpers.make_atari_environment(\n level=env_name,\n sticky_actions=True,\n zero_discount_on_life_loss=True,\n num_stacked_frames=1,\n grayscaling=False,\n to_float=False,\n )\n\n def network_factory(\n spec: specs.EnvironmentSpec,\n ) -> muzero.MzNetworks:\n return muzero.make_network(\n spec,\n stack_size=muzero_config.stack_size,\n )\n\n # Construct the builder.\n env_spec = specs.make_environment_spec(env_factory(SEED.value))\n extra_spec = {\n muzero.POLICY_PROBS_KEY: specs.Array(\n shape=(env_spec.actions.num_values,), dtype='float32'\n ),\n muzero.RAW_VALUES_KEY: specs.Array(shape=(), dtype='float32'),\n }\n muzero_builder = muzero.MzBuilder( # pytype: disable=wrong-arg-types # jax-ndarray\n muzero_config,\n extra_spec,\n )\n\n checkpointing_config = experiments.CheckpointingConfig(\n replay_checkpointing_time_delta_minutes=20,\n time_delta_minutes=1,\n )\n return experiments.ExperimentConfig(\n builder=muzero_builder,\n environment_factory=env_factory,\n network_factory=network_factory,\n seed=SEED.value,\n max_num_actor_steps=NUM_STEPS.value,\n checkpointing=checkpointing_config,\n )\n\n\ndef main(_):\n experiment_config = build_experiment_config()\n\n if not RUN_DISTRIBUTED.value:\n raise NotImplementedError('Single threaded experiment not supported.')\n\n inference_server_config = inference_server_lib.InferenceServerConfig(\n batch_size=64,\n update_period=400,\n timeout=datetime.timedelta(\n seconds=1,\n ),\n )\n num_inference_servers = math.ceil(\n NUM_ACTORS.value / (128 * NUM_ACTORS_PER_NODE.value),\n )\n\n program = experiments.make_distributed_experiment(\n experiment=experiment_config,\n num_actors=NUM_ACTORS.value,\n num_learner_nodes=NUM_LEARNERS.value,\n num_actors_per_node=NUM_ACTORS_PER_NODE.value,\n num_inference_servers=num_inference_servers,\n inference_server_config=inference_server_config,\n )\n lp.launch(program, xm_resources=lp_utils.make_xm_docker_resources(program,),)\n\n\nif __name__ == '__main__':\n app.run(main)\n" }, { "alpha_fraction": 0.6904168128967285, "alphanum_fraction": 0.7025293707847595, "avg_line_length": 35.45454406738281, "blob_id": "a8538a34ec96b8c15c1228e775141a72fc045b7f", "content_id": "cac6e278c85cd005e8fae5296db494e631b224d3", "detected_licenses": [ "Apache-2.0" ], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 2807, "license_type": "permissive", "max_line_length": 80, "num_lines": 77, "path": "/acme/agents/jax/bve/losses.py", "repo_name": "dada325/acme", "src_encoding": "UTF-8", "text": "# Copyright 2018 DeepMind Technologies Limited. All rights reserved.\n#\n# Licensed under the Apache License, Version 2.0 (the \"License\");\n# you may not use this file except in compliance with the License.\n# You may obtain a copy of the License at\n#\n# http://www.apache.org/licenses/LICENSE-2.0\n#\n# Unless required by applicable law or agreed to in writing, software\n# distributed under the License is distributed on an \"AS IS\" BASIS,\n# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n# See the License for the specific language governing permissions and\n# limitations under the License.\n\n\"\"\"Behavior Value Estimation loss.\"\"\"\nimport dataclasses\nfrom typing import Tuple\n\nfrom acme import types\nfrom acme.agents.jax import dqn\nfrom acme.jax import networks as networks_lib\nimport jax\nimport jax.numpy as jnp\nimport reverb\nimport rlax\n\n\[email protected]\nclass BVELoss(dqn.LossFn):\n \"\"\"This loss implements TD-loss to estimate behavior value.\n\n This loss function uses the next action to learn with the SARSA tuples.\n It is intended to be used with dqn.SGDLearner. The method was proposed\n in \"Regularized Behavior Value Estimation\" by Gulcehre et al to overcome\n the extrapolation error in offline RL setting:\n https://arxiv.org/abs/2103.09575\n \"\"\"\n discount: float = 0.99\n max_abs_reward: float = 1.\n huber_loss_parameter: float = 1.\n\n def __call__(\n self,\n network: networks_lib.TypedFeedForwardNetwork,\n params: networks_lib.Params,\n target_params: networks_lib.Params,\n batch: reverb.ReplaySample,\n key: networks_lib.PRNGKey,\n ) -> Tuple[jax.Array, dqn.LossExtra]:\n \"\"\"Calculate a loss on a single batch of data.\"\"\"\n transitions: types.Transition = batch.data\n\n # Forward pass.\n key1, key2 = jax.random.split(key)\n q_tm1 = network.apply(\n params, transitions.observation, is_training=True, key=key1)\n q_t_value = network.apply(\n target_params, transitions.next_observation, is_training=True, key=key2)\n\n # Cast and clip rewards.\n d_t = (transitions.discount * self.discount).astype(jnp.float32)\n r_t = jnp.clip(transitions.reward, -self.max_abs_reward,\n self.max_abs_reward).astype(jnp.float32)\n\n # Compute double Q-learning n-step TD-error.\n batch_error = jax.vmap(rlax.sarsa)\n next_action = transitions.extras['next_action']\n td_error = batch_error(q_tm1, transitions.action, r_t, d_t, q_t_value,\n next_action)\n batch_loss = rlax.huber_loss(td_error, self.huber_loss_parameter)\n\n # Average:\n loss = jnp.mean(batch_loss) # []\n metrics = {'td_error': td_error, 'batch_loss': batch_loss}\n return loss, dqn.LossExtra(\n metrics=metrics,\n reverb_priorities=jnp.abs(td_error).astype(jnp.float64))\n" }, { "alpha_fraction": 0.6897338628768921, "alphanum_fraction": 0.7030418515205383, "avg_line_length": 33.605262756347656, "blob_id": "a0eee3378e6d26252166ed18dfc95f3e598cc55e", "content_id": "ea737ad86da2c4696cce3e9ad89b14604caecb43", "detected_licenses": [ "Apache-2.0" ], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 2630, "license_type": "permissive", "max_line_length": 77, "num_lines": 76, "path": "/acme/agents/jax/ail/rewards.py", "repo_name": "dada325/acme", "src_encoding": "UTF-8", "text": "# Copyright 2018 DeepMind Technologies Limited. All rights reserved.\n#\n# Licensed under the Apache License, Version 2.0 (the \"License\");\n# you may not use this file except in compliance with the License.\n# You may obtain a copy of the License at\n#\n# http://www.apache.org/licenses/LICENSE-2.0\n#\n# Unless required by applicable law or agreed to in writing, software\n# distributed under the License is distributed on an \"AS IS\" BASIS,\n# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n# See the License for the specific language governing permissions and\n# limitations under the License.\n\n\"\"\"AIL logits to AIL reward.\"\"\"\nfrom typing import Optional\n\nfrom acme.agents.jax.ail import networks as ail_networks\nfrom acme.jax import networks as networks_lib\nimport jax\nimport jax.numpy as jnp\n\n\ndef fairl_reward(\n max_reward_magnitude: Optional[float] = None\n) -> ail_networks.ImitationRewardFn:\n \"\"\"The FAIRL reward function (https://arxiv.org/pdf/1911.02256.pdf).\n\n Args:\n max_reward_magnitude: Clipping value for the reward.\n\n Returns:\n The function from logit to imitation reward.\n \"\"\"\n\n def imitation_reward(logits: networks_lib.Logits) -> float:\n rewards = jnp.exp(jnp.clip(logits, a_max=20.)) * -logits\n if max_reward_magnitude is not None:\n # pylint: disable=invalid-unary-operand-type\n rewards = jnp.clip(\n rewards, a_min=-max_reward_magnitude, a_max=max_reward_magnitude)\n return rewards # pytype: disable=bad-return-type # jax-types\n\n return imitation_reward # pytype: disable=bad-return-type # jax-ndarray\n\n\ndef gail_reward(\n reward_balance: float = .5,\n max_reward_magnitude: Optional[float] = None\n) -> ail_networks.ImitationRewardFn:\n \"\"\"GAIL reward function (https://arxiv.org/pdf/1606.03476.pdf).\n\n Args:\n reward_balance: 1 means log(D) reward, 0 means -log(1-D) and other values\n mean an average of the two.\n max_reward_magnitude: Clipping value for the reward.\n\n Returns:\n The function from logit to imitation reward.\n \"\"\"\n\n def imitation_reward(logits: networks_lib.Logits) -> float:\n # Quick Maths:\n # logits = ln(D) - ln(1-D)\n # -softplus(-logits) = ln(D)\n # softplus(logits) = -ln(1-D)\n rewards = (\n reward_balance * -jax.nn.softplus(-logits) +\n (1 - reward_balance) * jax.nn.softplus(logits))\n if max_reward_magnitude is not None:\n # pylint: disable=invalid-unary-operand-type\n rewards = jnp.clip(\n rewards, a_min=-max_reward_magnitude, a_max=max_reward_magnitude)\n return rewards\n\n return imitation_reward # pytype: disable=bad-return-type # jax-ndarray\n" }, { "alpha_fraction": 0.7109023332595825, "alphanum_fraction": 0.715664803981781, "avg_line_length": 36.17224884033203, "blob_id": "43055a6a71ab86f7654f50e2d0e34248f70327af", "content_id": "6e961ce42c5d2b5bd89baa0515c5993b6a72fe68", "detected_licenses": [ "Apache-2.0" ], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 7769, "license_type": "permissive", "max_line_length": 116, "num_lines": 209, "path": "/examples/multiagent/multigrid/helpers.py", "repo_name": "dada325/acme", "src_encoding": "UTF-8", "text": "# Copyright 2018 DeepMind Technologies Limited. All rights reserved.\n#\n# Licensed under the Apache License, Version 2.0 (the \"License\");\n# you may not use this file except in compliance with the License.\n# You may obtain a copy of the License at\n#\n# http://www.apache.org/licenses/LICENSE-2.0\n#\n# Unless required by applicable law or agreed to in writing, software\n# distributed under the License is distributed on an \"AS IS\" BASIS,\n# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n# See the License for the specific language governing permissions and\n# limitations under the License.\n\n\"\"\"Helpers for multigrid environment.\"\"\"\n\nimport functools\nfrom typing import Any, Dict, NamedTuple, Sequence\n\nfrom acme import specs\nfrom acme.agents.jax import ppo\nfrom acme.agents.jax.multiagent.decentralized import factories\nfrom acme.jax import networks as networks_lib\nfrom acme.jax import utils as acme_jax_utils\nfrom acme.multiagent import types as ma_types\nimport haiku as hk\nimport jax\nimport jax.numpy as jnp\nimport numpy as np\nimport tensorflow_probability\n\ntfp = tensorflow_probability.substrates.jax\ntfd = tfp.distributions\n\n\nclass CategoricalParams(NamedTuple):\n \"\"\"Parameters for a categorical distribution.\"\"\"\n logits: jnp.ndarray\n\n\ndef multigrid_obs_preproc(obs: Dict[str, Any],\n conv_filters: int = 8,\n conv_kernel: int = 3,\n scalar_fc: int = 5,\n scalar_name: str = 'direction',\n scalar_dim: int = 4) -> jnp.ndarray:\n \"\"\"Conducts preprocessing on 'multigrid' environment dict observations.\n\n The preprocessing applied here is similar to those in:\n https://github.com/google-research/google-research/blob/master/social_rl/multiagent_tfagents/multigrid_networks.py\n\n Args:\n obs: multigrid observation dict, which can include observation inputs such\n as 'image', 'position', and a custom additional observation (defined by\n scalar_name).\n conv_filters: Number of convolution filters.\n conv_kernel: Size of the convolution kernel.\n scalar_fc: Number of neurons in the fully connected layer processing the\n scalar input.\n scalar_name: a special observation key, which is set to\n `direction` in most multigrid environments (and can be overridden here if\n otherwise).\n scalar_dim: Highest possible value for the scalar input. Used to convert to\n one-hot representation.\n\n Returns:\n out: output observation.\n \"\"\"\n\n def _cast_and_scale(x, scale_by=10.0):\n if isinstance(x, jnp.ndarray):\n x = x.astype(jnp.float32)\n return x / scale_by\n\n outputs = []\n\n if 'image' in obs.keys():\n image_preproc = hk.Sequential([\n _cast_and_scale,\n hk.Conv2D(output_channels=conv_filters, kernel_shape=conv_kernel),\n jax.nn.relu,\n hk.Flatten()\n ])\n outputs.append(image_preproc(obs['image']))\n\n if 'position' in obs.keys():\n position_preproc = hk.Sequential([_cast_and_scale, hk.Linear(scalar_fc)])\n outputs.append(position_preproc(obs['position']))\n\n if scalar_name in obs.keys():\n direction_preproc = hk.Sequential([\n functools.partial(jax.nn.one_hot, num_classes=scalar_dim),\n hk.Flatten(),\n hk.Linear(scalar_fc)\n ])\n outputs.append(direction_preproc(obs[scalar_name]))\n\n out = jnp.concatenate(outputs, axis=-1)\n return out\n\n\ndef make_multigrid_dqn_networks(\n environment_spec: specs.EnvironmentSpec) -> networks_lib.FeedForwardNetwork:\n \"\"\"Returns DQN networks used by the agent in the multigrid environment.\"\"\"\n # Check that multigrid environment is defined with discrete actions, 0-indexed\n assert np.issubdtype(environment_spec.actions.dtype, np.integer), (\n 'Expected multigrid environment to have discrete actions with int dtype'\n f' but environment_spec.actions.dtype == {environment_spec.actions.dtype}'\n )\n assert environment_spec.actions.minimum == 0, (\n 'Expected multigrid environment to have 0-indexed action indices, but'\n f' environment_spec.actions.minimum == {environment_spec.actions.minimum}'\n )\n num_actions = environment_spec.actions.maximum + 1\n\n def network(inputs):\n model = hk.Sequential([\n hk.Flatten(),\n hk.nets.MLP([50, 50, num_actions]),\n ])\n processed_inputs = multigrid_obs_preproc(inputs)\n return model(processed_inputs)\n\n network_hk = hk.without_apply_rng(hk.transform(network))\n dummy_obs = acme_jax_utils.add_batch_dim(\n acme_jax_utils.zeros_like(environment_spec.observations))\n\n return networks_lib.FeedForwardNetwork(\n init=lambda rng: network_hk.init(rng, dummy_obs), apply=network_hk.apply)\n\n\ndef make_multigrid_ppo_networks(\n environment_spec: specs.EnvironmentSpec,\n hidden_layer_sizes: Sequence[int] = (64, 64),\n) -> ppo.PPONetworks:\n \"\"\"Returns PPO networks used by the agent in the multigrid environments.\"\"\"\n\n # Check that multigrid environment is defined with discrete actions, 0-indexed\n assert np.issubdtype(environment_spec.actions.dtype, np.integer), (\n 'Expected multigrid environment to have discrete actions with int dtype'\n f' but environment_spec.actions.dtype == {environment_spec.actions.dtype}'\n )\n assert environment_spec.actions.minimum == 0, (\n 'Expected multigrid environment to have 0-indexed action indices, but'\n f' environment_spec.actions.minimum == {environment_spec.actions.minimum}'\n )\n num_actions = environment_spec.actions.maximum + 1\n\n def forward_fn(inputs):\n processed_inputs = multigrid_obs_preproc(inputs)\n trunk = hk.nets.MLP(hidden_layer_sizes, activation=jnp.tanh)\n h = trunk(processed_inputs)\n logits = hk.Linear(num_actions)(h)\n values = hk.Linear(1)(h)\n values = jnp.squeeze(values, axis=-1)\n return (CategoricalParams(logits=logits), values)\n\n # Transform into pure functions.\n forward_fn = hk.without_apply_rng(hk.transform(forward_fn))\n\n dummy_obs = acme_jax_utils.zeros_like(environment_spec.observations)\n dummy_obs = acme_jax_utils.add_batch_dim(dummy_obs) # Dummy 'sequence' dim.\n network = networks_lib.FeedForwardNetwork(\n lambda rng: forward_fn.init(rng, dummy_obs), forward_fn.apply)\n return make_categorical_ppo_networks(network) # pylint:disable=undefined-variable\n\n\ndef make_categorical_ppo_networks(\n network: networks_lib.FeedForwardNetwork) -> ppo.PPONetworks:\n \"\"\"Constructs a PPONetworks for Categorical Policy from FeedForwardNetwork.\n\n Args:\n network: a transformed Haiku network (or equivalent in other libraries) that\n takes in observations and returns the action distribution and value.\n\n Returns:\n A PPONetworks instance with pure functions wrapping the input network.\n \"\"\"\n\n def log_prob(params: CategoricalParams, action):\n return tfd.Categorical(logits=params.logits).log_prob(action)\n\n def entropy(params: CategoricalParams, key: networks_lib.PRNGKey):\n del key\n return tfd.Categorical(logits=params.logits).entropy()\n\n def sample(params: CategoricalParams, key: networks_lib.PRNGKey):\n return tfd.Categorical(logits=params.logits).sample(seed=key)\n\n def sample_eval(params: CategoricalParams, key: networks_lib.PRNGKey):\n del key\n return tfd.Categorical(logits=params.logits).mode()\n\n return ppo.PPONetworks(\n network=network,\n log_prob=log_prob,\n entropy=entropy,\n sample=sample,\n sample_eval=sample_eval)\n\n\ndef init_default_multigrid_network(\n agent_type: str,\n agent_spec: specs.EnvironmentSpec) -> ma_types.Networks:\n \"\"\"Returns default networks for multigrid environment.\"\"\"\n if agent_type == factories.DefaultSupportedAgent.PPO:\n return make_multigrid_ppo_networks(agent_spec)\n else:\n raise ValueError(f'Unsupported agent type: {agent_type}.')\n" }, { "alpha_fraction": 0.6802870631217957, "alphanum_fraction": 0.6828336715698242, "avg_line_length": 38.09049606323242, "blob_id": "27b069ba68bc0e34f2c4599a87f03d480b4226c7", "content_id": "af860d9cc0e076a7bc3f98f1b256dcac4ddb3cc2", "detected_licenses": [ "Apache-2.0" ], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 8639, "license_type": "permissive", "max_line_length": 128, "num_lines": 221, "path": "/acme/agents/jax/dqn/learning_lib.py", "repo_name": "dada325/acme", "src_encoding": "UTF-8", "text": "# Copyright 2018 DeepMind Technologies Limited. All rights reserved.\n#\n# Licensed under the Apache License, Version 2.0 (the \"License\");\n# you may not use this file except in compliance with the License.\n# You may obtain a copy of the License at\n#\n# http://www.apache.org/licenses/LICENSE-2.0\n#\n# Unless required by applicable law or agreed to in writing, software\n# distributed under the License is distributed on an \"AS IS\" BASIS,\n# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n# See the License for the specific language governing permissions and\n# limitations under the License.\n\n\"\"\"SgdLearner takes steps of SGD on a LossFn.\"\"\"\n\nimport functools\nimport time\nfrom typing import Dict, Iterator, List, NamedTuple, Optional, Tuple\n\nimport acme\nfrom acme.adders import reverb as adders\nfrom acme.jax import networks as networks_lib\nfrom acme.jax import utils\nfrom acme.utils import async_utils\nfrom acme.utils import counting\nfrom acme.utils import loggers\nimport jax\nimport jax.numpy as jnp\nimport optax\nimport reverb\nimport tree\nimport typing_extensions\n\n\n# The pmap axis name. Data means data parallelization.\nPMAP_AXIS_NAME = 'data'\n\n\nclass ReverbUpdate(NamedTuple):\n \"\"\"Tuple for updating reverb priority information.\"\"\"\n keys: jnp.ndarray\n priorities: jnp.ndarray\n\n\nclass LossExtra(NamedTuple):\n \"\"\"Extra information that is returned along with loss value.\"\"\"\n metrics: Dict[str, jax.Array]\n # New optional updated priorities for the samples.\n reverb_priorities: Optional[jax.Array] = None\n\n\nclass LossFn(typing_extensions.Protocol):\n \"\"\"A LossFn calculates a loss on a single batch of data.\"\"\"\n\n def __call__(\n self,\n network: networks_lib.TypedFeedForwardNetwork,\n params: networks_lib.Params,\n target_params: networks_lib.Params,\n batch: reverb.ReplaySample,\n key: networks_lib.PRNGKey,\n ) -> Tuple[jax.Array, LossExtra]:\n \"\"\"Calculates a loss on a single batch of data.\"\"\"\n\n\nclass TrainingState(NamedTuple):\n \"\"\"Holds the agent's training state.\"\"\"\n params: networks_lib.Params\n target_params: networks_lib.Params\n opt_state: optax.OptState\n steps: int\n rng_key: networks_lib.PRNGKey\n\n\nclass SGDLearner(acme.Learner):\n \"\"\"An Acme learner based around SGD on batches.\n\n This learner currently supports optional prioritized replay and assumes a\n TrainingState as described above.\n \"\"\"\n\n def __init__(self,\n network: networks_lib.TypedFeedForwardNetwork,\n loss_fn: LossFn,\n optimizer: optax.GradientTransformation,\n data_iterator: Iterator[utils.PrefetchingSplit],\n target_update_period: int,\n random_key: networks_lib.PRNGKey,\n replay_client: Optional[reverb.Client] = None,\n replay_table_name: str = adders.DEFAULT_PRIORITY_TABLE,\n counter: Optional[counting.Counter] = None,\n logger: Optional[loggers.Logger] = None,\n num_sgd_steps_per_step: int = 1):\n \"\"\"Initialize the SGD learner.\"\"\"\n self.network = network\n\n # Internalize the loss_fn with network.\n self._loss = jax.jit(functools.partial(loss_fn, self.network))\n\n # SGD performs the loss, optimizer update and periodic target net update.\n def sgd_step(state: TrainingState,\n batch: reverb.ReplaySample) -> Tuple[TrainingState, LossExtra]:\n next_rng_key, rng_key = jax.random.split(state.rng_key)\n # Implements one SGD step of the loss and updates training state\n (loss, extra), grads = jax.value_and_grad(\n self._loss, has_aux=True)(state.params, state.target_params, batch,\n rng_key)\n\n loss = jax.lax.pmean(loss, axis_name=PMAP_AXIS_NAME)\n # Average gradients over pmap replicas before optimizer update.\n grads = jax.lax.pmean(grads, axis_name=PMAP_AXIS_NAME)\n # Apply the optimizer updates\n updates, new_opt_state = optimizer.update(grads, state.opt_state)\n new_params = optax.apply_updates(state.params, updates)\n\n extra.metrics.update({'total_loss': loss})\n\n # Periodically update target networks.\n steps = state.steps + 1\n target_params = optax.periodic_update(new_params, state.target_params, # pytype: disable=wrong-arg-types # numpy-scalars\n steps, target_update_period)\n\n new_training_state = TrainingState(\n new_params, target_params, new_opt_state, steps, next_rng_key)\n return new_training_state, extra\n\n def postprocess_aux(extra: LossExtra) -> LossExtra:\n reverb_priorities = jax.tree_util.tree_map(\n lambda a: jnp.reshape(a, (-1, *a.shape[2:])), extra.reverb_priorities)\n return extra._replace(\n metrics=jax.tree_util.tree_map(jnp.mean, extra.metrics),\n reverb_priorities=reverb_priorities)\n\n self._num_sgd_steps_per_step = num_sgd_steps_per_step\n sgd_step = utils.process_multiple_batches(sgd_step, num_sgd_steps_per_step,\n postprocess_aux)\n self._sgd_step = jax.pmap(\n sgd_step, axis_name=PMAP_AXIS_NAME, devices=jax.devices())\n\n # Internalise agent components\n self._data_iterator = data_iterator\n self._target_update_period = target_update_period\n self._counter = counter or counting.Counter()\n self._logger = logger or loggers.TerminalLogger('learner', time_delta=1.)\n\n # Do not record timestamps until after the first learning step is done.\n # This is to avoid including the time it takes for actors to come online and\n # fill the replay buffer.\n self._timestamp = None\n\n # Initialize the network parameters\n key_params, key_target, key_state = jax.random.split(random_key, 3)\n initial_params = self.network.init(key_params)\n initial_target_params = self.network.init(key_target)\n state = TrainingState(\n params=initial_params,\n target_params=initial_target_params,\n opt_state=optimizer.init(initial_params),\n steps=0,\n rng_key=key_state,\n )\n self._state = utils.replicate_in_all_devices(state, jax.local_devices())\n\n # Update replay priorities\n def update_priorities(reverb_update: ReverbUpdate) -> None:\n if replay_client is None:\n return\n keys, priorities = tree.map_structure(\n # Fetch array and combine device and batch dimensions.\n lambda x: utils.fetch_devicearray(x).reshape((-1,) + x.shape[2:]),\n (reverb_update.keys, reverb_update.priorities))\n replay_client.mutate_priorities(\n table=replay_table_name,\n updates=dict(zip(keys, priorities)))\n self._replay_client = replay_client\n self._async_priority_updater = async_utils.AsyncExecutor(update_priorities)\n\n self._current_step = 0\n\n def step(self):\n \"\"\"Takes one SGD step on the learner.\"\"\"\n with jax.profiler.StepTraceAnnotation('step', step_num=self._current_step):\n prefetching_split = next(self._data_iterator)\n # In this case the host property of the prefetching split contains only\n # replay keys and the device property is the prefetched full original\n # sample. Key is on host since it's uint64 type.\n reverb_keys = prefetching_split.host\n batch: reverb.ReplaySample = prefetching_split.device\n\n self._state, extra = self._sgd_step(self._state, batch)\n # Compute elapsed time.\n timestamp = time.time()\n elapsed = timestamp - self._timestamp if self._timestamp else 0\n self._timestamp = timestamp\n\n if self._replay_client and extra.reverb_priorities is not None:\n reverb_update = ReverbUpdate(reverb_keys, extra.reverb_priorities)\n self._async_priority_updater.put(reverb_update)\n\n steps_per_sec = (self._num_sgd_steps_per_step / elapsed) if elapsed else 0\n self._current_step, metrics = utils.get_from_first_device(\n (self._state.steps, extra.metrics))\n metrics['steps_per_second'] = steps_per_sec\n\n # Update our counts and record it.\n result = self._counter.increment(\n steps=self._num_sgd_steps_per_step, walltime=elapsed)\n result.update(metrics)\n self._logger.write(result)\n\n def get_variables(self, names: List[str]) -> List[networks_lib.Params]:\n # Return first replica of parameters.\n return utils.get_from_first_device([self._state.params])\n\n def save(self) -> TrainingState:\n # Serialize only the first replica of parameters and optimizer state.\n return utils.get_from_first_device(self._state)\n\n def restore(self, state: TrainingState):\n self._state = utils.replicate_in_all_devices(state, jax.local_devices())\n" }, { "alpha_fraction": 0.7122235894203186, "alphanum_fraction": 0.7192874550819397, "avg_line_length": 32.224491119384766, "blob_id": "62ebd16d9c0403a4a3db9135dfe5d21f5f71158f", "content_id": "dc21b23a0eb91fad31cfbaa62f56a42b0f019143", "detected_licenses": [ "Apache-2.0" ], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 3256, "license_type": "permissive", "max_line_length": 77, "num_lines": 98, "path": "/examples/offline/run_bc_jax.py", "repo_name": "dada325/acme", "src_encoding": "UTF-8", "text": "# Copyright 2018 DeepMind Technologies Limited. All rights reserved.\n#\n# Licensed under the Apache License, Version 2.0 (the \"License\");\n# you may not use this file except in compliance with the License.\n# You may obtain a copy of the License at\n#\n# http://www.apache.org/licenses/LICENSE-2.0\n#\n# Unless required by applicable law or agreed to in writing, software\n# distributed under the License is distributed on an \"AS IS\" BASIS,\n# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n# See the License for the specific language governing permissions and\n# limitations under the License.\n\n\"\"\"An example BC running on BSuite.\"\"\"\n\nfrom absl import app\nfrom absl import flags\nimport acme\nfrom acme import specs\nfrom acme.agents.jax import actor_core as actor_core_lib\nfrom acme.agents.jax import actors\nfrom acme.agents.jax import bc\nfrom acme.examples.offline import bc_utils\nfrom acme.jax import utils\nfrom acme.jax import variable_utils\nfrom acme.utils import loggers\nimport haiku as hk\nimport jax\nimport optax\nimport rlax\n\n# Agent flags\nflags.DEFINE_float('learning_rate', 1e-3, 'Learning rate.')\nflags.DEFINE_integer('batch_size', 64, 'Batch size.')\nflags.DEFINE_float('evaluation_epsilon', 0.,\n 'Epsilon for the epsilon greedy in the evaluation agent.')\nflags.DEFINE_integer('evaluate_every', 20, 'Evaluation period.')\nflags.DEFINE_integer('evaluation_episodes', 10, 'Evaluation episodes.')\nflags.DEFINE_integer('seed', 0, 'Random seed for learner and evaluator.')\n\nFLAGS = flags.FLAGS\n\n\ndef main(_):\n # Create an environment and grab the spec.\n environment = bc_utils.make_environment()\n environment_spec = specs.make_environment_spec(environment)\n\n # Unwrap the environment to get the demonstrations.\n dataset = bc_utils.make_demonstrations(environment.environment,\n FLAGS.batch_size)\n dataset = dataset.as_numpy_iterator()\n\n # Create the networks to optimize.\n bc_networks = bc_utils.make_network(environment_spec)\n\n key = jax.random.PRNGKey(FLAGS.seed)\n key, key1 = jax.random.split(key, 2)\n\n loss_fn = bc.logp()\n\n learner = bc.BCLearner(\n networks=bc_networks,\n random_key=key1,\n loss_fn=loss_fn,\n optimizer=optax.adam(FLAGS.learning_rate),\n prefetching_iterator=utils.sharded_prefetch(dataset),\n num_sgd_steps_per_step=1)\n\n def evaluator_network(\n params: hk.Params, key: jax.Array, observation: jax.Array\n ) -> jax.Array:\n dist_params = bc_networks.policy_network.apply(params, observation)\n return rlax.epsilon_greedy(FLAGS.evaluation_epsilon).sample(\n key, dist_params)\n\n actor_core = actor_core_lib.batched_feed_forward_to_actor_core(\n evaluator_network)\n variable_client = variable_utils.VariableClient(\n learner, 'policy', device='cpu')\n evaluator = actors.GenericActor(\n actor_core, key, variable_client, backend='cpu')\n\n eval_loop = acme.EnvironmentLoop(\n environment=environment,\n actor=evaluator,\n logger=loggers.TerminalLogger('evaluation', time_delta=0.))\n\n # Run the environment loop.\n while True:\n for _ in range(FLAGS.evaluate_every):\n learner.step()\n eval_loop.run(FLAGS.evaluation_episodes)\n\n\nif __name__ == '__main__':\n app.run(main)\n" }, { "alpha_fraction": 0.714950680732727, "alphanum_fraction": 0.7197914719581604, "avg_line_length": 32.77987289428711, "blob_id": "2bf6d1dbfee2108d32945277662e03e0304ffa22", "content_id": "c6825f518448fba3851cd5714c177aa43c3ac7c0", "detected_licenses": [ "Apache-2.0" ], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 5371, "license_type": "permissive", "max_line_length": 94, "num_lines": 159, "path": "/acme/jax/networks/base.py", "repo_name": "dada325/acme", "src_encoding": "UTF-8", "text": "# Copyright 2018 DeepMind Technologies Limited. All rights reserved.\n#\n# Licensed under the Apache License, Version 2.0 (the \"License\");\n# you may not use this file except in compliance with the License.\n# You may obtain a copy of the License at\n#\n# http://www.apache.org/licenses/LICENSE-2.0\n#\n# Unless required by applicable law or agreed to in writing, software\n# distributed under the License is distributed on an \"AS IS\" BASIS,\n# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n# See the License for the specific language governing permissions and\n# limitations under the License.\n\n\"\"\"Base interfaces for networks.\"\"\"\n\nimport dataclasses\nfrom typing import Callable, Optional, Tuple\n\nfrom acme import specs\nfrom acme import types\nfrom acme.jax import types as jax_types\nfrom acme.jax import utils as jax_utils\nimport haiku as hk\nimport jax.numpy as jnp\nfrom typing_extensions import Protocol\n\n# This definition is deprecated. Use jax_types.PRNGKey directly instead.\n# TODO(sinopalnikov): migrate all users and remove this definition.\nPRNGKey = jax_types.PRNGKey\n\n# Commonly-used types.\nBatchSize = int\nObservation = types.NestedArray\nAction = types.NestedArray\nParams = types.NestedArray\nNetworkOutput = types.NestedArray\nQValues = jnp.ndarray\nLogits = jnp.ndarray\nLogProb = jnp.ndarray\nValue = jnp.ndarray\nRecurrentState = types.NestedArray\nEntropy = jnp.ndarray\n\n# Commonly-used function/network signatures.\nQNetwork = Callable[[Observation], QValues]\nLSTMOutputs = Tuple[Tuple[Logits, Value], hk.LSTMState]\nPolicyValueRNN = Callable[[Observation, hk.LSTMState], LSTMOutputs]\nRecurrentQNetwork = Callable[[Observation, hk.LSTMState],\n Tuple[QValues, hk.LSTMState]]\nSampleFn = Callable[[NetworkOutput, PRNGKey], Action]\nLogProbFn = Callable[[NetworkOutput, Action], LogProb]\n\n\[email protected]\nclass FeedForwardNetwork:\n \"\"\"Holds a pair of pure functions defining a feed-forward network.\n\n Attributes:\n init: A pure function: ``params = init(rng, *a, **k)``\n apply: A pure function: ``out = apply(params, rng, *a, **k)``\n \"\"\"\n # Initializes and returns the networks parameters.\n init: Callable[..., Params]\n # Computes and returns the outputs of a forward pass.\n apply: Callable[..., NetworkOutput]\n\n\nclass ApplyFn(Protocol):\n\n def __call__(self,\n params: Params,\n observation: Observation,\n *args,\n is_training: bool,\n key: Optional[PRNGKey] = None,\n **kwargs) -> NetworkOutput:\n ...\n\n\[email protected]\nclass TypedFeedForwardNetwork:\n \"\"\"FeedForwardNetwork with more specific types of the member functions.\n\n Attributes:\n init: A pure function. Initializes and returns the networks parameters.\n apply: A pure function. Computes and returns the outputs of a forward pass.\n \"\"\"\n init: Callable[[PRNGKey], Params]\n apply: ApplyFn\n\n\ndef non_stochastic_network_to_typed(\n network: FeedForwardNetwork) -> TypedFeedForwardNetwork:\n \"\"\"Converts non-stochastic FeedForwardNetwork to TypedFeedForwardNetwork.\n\n Non-stochastic network is the one that doesn't take a random key as an input\n for its `apply` method.\n\n Arguments:\n network: non-stochastic feed-forward network.\n\n Returns:\n corresponding TypedFeedForwardNetwork\n \"\"\"\n\n def apply(params: Params,\n observation: Observation,\n *args,\n is_training: bool,\n key: Optional[PRNGKey] = None,\n **kwargs) -> NetworkOutput:\n del is_training, key\n return network.apply(params, observation, *args, **kwargs)\n\n return TypedFeedForwardNetwork(init=network.init, apply=apply)\n\n\[email protected]\nclass UnrollableNetwork:\n \"\"\"Network that can unroll over an input sequence.\"\"\"\n init: Callable[[PRNGKey], Params]\n apply: Callable[[Params, PRNGKey, Observation, RecurrentState],\n Tuple[NetworkOutput, RecurrentState]]\n unroll: Callable[[Params, PRNGKey, Observation, RecurrentState],\n Tuple[NetworkOutput, RecurrentState]]\n init_recurrent_state: Callable[[PRNGKey, Optional[BatchSize]], RecurrentState]\n # TODO(b/244311990): Consider supporting parameterized and learnable initial\n # state functions.\n\n\ndef make_unrollable_network(\n environment_spec: specs.EnvironmentSpec,\n make_core_module: Callable[[], hk.RNNCore]) -> UnrollableNetwork:\n \"\"\"Builds an UnrollableNetwork from a hk.Module factory.\"\"\"\n\n dummy_observation = jax_utils.zeros_like(environment_spec.observations)\n\n def make_unrollable_network_functions():\n model = make_core_module()\n apply = model.__call__\n\n def init() -> Tuple[NetworkOutput, RecurrentState]:\n return model(dummy_observation, model.initial_state(None))\n\n return init, (apply, model.unroll, model.initial_state) # pytype: disable=attribute-error\n\n # Transform and unpack pure functions\n f = hk.multi_transform(make_unrollable_network_functions)\n apply, unroll, initial_state_fn = f.apply\n\n def init_recurrent_state(key: jax_types.PRNGKey,\n batch_size: Optional[int]) -> RecurrentState:\n # TODO(b/244311990): Consider supporting parameterized and learnable initial\n # state functions.\n no_params = None\n return initial_state_fn(no_params, key, batch_size)\n\n return UnrollableNetwork(f.init, apply, unroll, init_recurrent_state)\n" }, { "alpha_fraction": 0.7152312994003296, "alphanum_fraction": 0.7192177772521973, "avg_line_length": 33.71279525756836, "blob_id": "7c938544925b5ac0b9a52b532be532f1b2197d25", "content_id": "99b009a26949a3ec2800c9a13d7ff32c82d910ca", "detected_licenses": [ "Apache-2.0" ], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 13295, "license_type": "permissive", "max_line_length": 84, "num_lines": 383, "path": "/acme/agents/jax/ppo/networks.py", "repo_name": "dada325/acme", "src_encoding": "UTF-8", "text": "# Copyright 2018 DeepMind Technologies Limited. All rights reserved.\n#\n# Licensed under the Apache License, Version 2.0 (the \"License\");\n# you may not use this file except in compliance with the License.\n# You may obtain a copy of the License at\n#\n# http://www.apache.org/licenses/LICENSE-2.0\n#\n# Unless required by applicable law or agreed to in writing, software\n# distributed under the License is distributed on an \"AS IS\" BASIS,\n# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n# See the License for the specific language governing permissions and\n# limitations under the License.\n\n\"\"\"PPO network definitions.\"\"\"\n\nimport dataclasses\nfrom typing import Callable, NamedTuple, Optional, Sequence\n\nfrom acme import specs\nfrom acme.agents.jax import actor_core as actor_core_lib\nfrom acme.jax import networks as networks_lib\nfrom acme.jax import utils\nimport haiku as hk\nimport jax\nimport jax.numpy as jnp\nimport numpy as np\nimport tensorflow_probability\n\ntfp = tensorflow_probability.substrates.jax\ntfd = tfp.distributions\n\nEntropyFn = Callable[\n [networks_lib.Params, networks_lib.PRNGKey], networks_lib.Entropy\n]\n\n\nclass MVNDiagParams(NamedTuple):\n \"\"\"Parameters for a diagonal multi-variate normal distribution.\"\"\"\n loc: jnp.ndarray\n scale_diag: jnp.ndarray\n\n\nclass TanhNormalParams(NamedTuple):\n \"\"\"Parameters for a tanh squashed diagonal MVN distribution.\"\"\"\n loc: jnp.ndarray\n scale: jnp.ndarray\n\n\nclass CategoricalParams(NamedTuple):\n \"\"\"Parameters for a categorical distribution.\"\"\"\n logits: jnp.ndarray\n\n\nclass PPOParams(NamedTuple):\n model_params: networks_lib.Params\n # Using float32 as it covers a larger range than int32. If using int64 we\n # would need to do jax_enable_x64.\n num_sgd_steps: jnp.float32\n\n\[email protected]\nclass PPONetworks:\n \"\"\"Network and pure functions for the PPO agent.\n\n If 'network' returns tfd.Distribution, you can use make_ppo_networks() to\n create this object properly.\n If one is building this object manually, one has a freedom to make 'network'\n object return anything that is later being passed as input to\n log_prob/entropy/sample functions to perform the corresponding computations.\n An example scenario where you would want to do this due to\n tfd.Distribution not playing nice with jax.vmap. Please refer to the\n make_continuous_networks() for an example where the network does not return a\n tfd.Distribution object.\n \"\"\"\n network: networks_lib.FeedForwardNetwork\n log_prob: networks_lib.LogProbFn\n entropy: EntropyFn\n sample: networks_lib.SampleFn\n sample_eval: Optional[networks_lib.SampleFn] = None\n\n\ndef make_inference_fn(\n ppo_networks: PPONetworks,\n evaluation: bool = False) -> actor_core_lib.FeedForwardPolicyWithExtra:\n \"\"\"Returns a function to be used for inference by a PPO actor.\"\"\"\n\n def inference(\n params: networks_lib.Params,\n key: networks_lib.PRNGKey,\n observations: networks_lib.Observation,\n ):\n dist_params, _ = ppo_networks.network.apply(params.model_params,\n observations)\n if evaluation and ppo_networks.sample_eval:\n actions = ppo_networks.sample_eval(dist_params, key)\n else:\n actions = ppo_networks.sample(dist_params, key)\n if evaluation:\n return actions, {}\n log_prob = ppo_networks.log_prob(dist_params, actions)\n extras = {\n 'log_prob': log_prob,\n # Add batch dimension.\n 'params_num_sgd_steps': params.num_sgd_steps[None, ...]\n }\n return actions, extras\n\n return inference\n\n\ndef make_networks(\n spec: specs.EnvironmentSpec, hidden_layer_sizes: Sequence[int] = (256, 256)\n) -> PPONetworks:\n if isinstance(spec.actions, specs.DiscreteArray):\n return make_discrete_networks(spec, hidden_layer_sizes)\n else:\n return make_continuous_networks(\n spec,\n policy_layer_sizes=hidden_layer_sizes,\n value_layer_sizes=hidden_layer_sizes)\n\n\ndef make_ppo_networks(network: networks_lib.FeedForwardNetwork) -> PPONetworks:\n \"\"\"Constructs a PPONetworks instance from the given FeedForwardNetwork.\n\n This method assumes that the network returns a tfd.Distribution. Sometimes it\n may be preferable to have networks that do not return tfd.Distribution\n objects, for example, due to tfd.Distribution not playing nice with jax.vmap.\n Please refer to the make_continuous_networks() for an example where the\n network does not return a tfd.Distribution object.\n\n Args:\n network: a transformed Haiku network that takes in observations and returns\n the action distribution and value.\n\n Returns:\n A PPONetworks instance with pure functions wrapping the input network.\n \"\"\"\n return PPONetworks(\n network=network,\n log_prob=lambda distribution, action: distribution.log_prob(action),\n entropy=lambda distribution, key=None: distribution.entropy(),\n sample=lambda distribution, key: distribution.sample(seed=key),\n sample_eval=lambda distribution, key: distribution.mode())\n\n\ndef make_mvn_diag_ppo_networks(\n network: networks_lib.FeedForwardNetwork) -> PPONetworks:\n \"\"\"Constructs a PPONetworks for MVN Diag policy from the FeedForwardNetwork.\n\n Args:\n network: a transformed Haiku network (or equivalent in other libraries) that\n takes in observations and returns the action distribution and value.\n\n Returns:\n A PPONetworks instance with pure functions wrapping the input network.\n \"\"\"\n\n def log_prob(params: MVNDiagParams, action):\n return tfd.MultivariateNormalDiag(\n loc=params.loc, scale_diag=params.scale_diag).log_prob(action)\n\n def entropy(\n params: MVNDiagParams, key: networks_lib.PRNGKey\n ) -> networks_lib.Entropy:\n del key\n return tfd.MultivariateNormalDiag(\n loc=params.loc, scale_diag=params.scale_diag).entropy()\n\n def sample(params: MVNDiagParams, key: networks_lib.PRNGKey):\n return tfd.MultivariateNormalDiag(\n loc=params.loc, scale_diag=params.scale_diag).sample(seed=key)\n\n def sample_eval(params: MVNDiagParams, key: networks_lib.PRNGKey):\n del key\n return tfd.MultivariateNormalDiag(\n loc=params.loc, scale_diag=params.scale_diag).mode()\n\n return PPONetworks(\n network=network,\n log_prob=log_prob,\n entropy=entropy,\n sample=sample,\n sample_eval=sample_eval)\n\n\ndef make_tanh_normal_ppo_networks(\n network: networks_lib.FeedForwardNetwork) -> PPONetworks:\n \"\"\"Constructs a PPONetworks for Tanh MVN Diag policy from the FeedForwardNetwork.\n\n Args:\n network: a transformed Haiku network (or equivalent in other libraries) that\n takes in observations and returns the action distribution and value.\n\n Returns:\n A PPONetworks instance with pure functions wrapping the input network.\n \"\"\"\n\n def build_distribution(params: TanhNormalParams):\n distribution = tfd.Normal(loc=params.loc, scale=params.scale)\n distribution = tfd.Independent(\n networks_lib.TanhTransformedDistribution(distribution),\n reinterpreted_batch_ndims=1)\n return distribution\n\n def log_prob(params: TanhNormalParams, action):\n distribution = build_distribution(params)\n return distribution.log_prob(action)\n\n def entropy(\n params: TanhNormalParams, key: networks_lib.PRNGKey\n ) -> networks_lib.Entropy:\n distribution = build_distribution(params)\n return distribution.entropy(seed=key)\n\n def sample(params: TanhNormalParams, key: networks_lib.PRNGKey):\n distribution = build_distribution(params)\n return distribution.sample(seed=key)\n\n def sample_eval(params: TanhNormalParams, key: networks_lib.PRNGKey):\n del key\n distribution = build_distribution(params)\n return distribution.mode()\n\n return PPONetworks(\n network=network,\n log_prob=log_prob,\n entropy=entropy,\n sample=sample,\n sample_eval=sample_eval)\n\n\ndef make_discrete_networks(\n environment_spec: specs.EnvironmentSpec,\n hidden_layer_sizes: Sequence[int] = (512,),\n use_conv: bool = True,\n) -> PPONetworks:\n \"\"\"Creates networks used by the agent for discrete action environments.\n\n Args:\n environment_spec: Environment spec used to define number of actions.\n hidden_layer_sizes: Network definition.\n use_conv: Whether to use a conv or MLP feature extractor.\n Returns:\n PPONetworks\n \"\"\"\n\n num_actions = environment_spec.actions.num_values\n\n def forward_fn(inputs):\n layers = []\n if use_conv:\n layers.extend([networks_lib.AtariTorso()])\n layers.extend([hk.nets.MLP(hidden_layer_sizes, activate_final=True)])\n trunk = hk.Sequential(layers)\n h = utils.batch_concat(inputs)\n h = trunk(h)\n logits = hk.Linear(num_actions)(h)\n values = hk.Linear(1)(h)\n values = jnp.squeeze(values, axis=-1)\n return (CategoricalParams(logits=logits), values)\n\n forward_fn = hk.without_apply_rng(hk.transform(forward_fn))\n dummy_obs = utils.zeros_like(environment_spec.observations)\n dummy_obs = utils.add_batch_dim(dummy_obs) # Dummy 'sequence' dim.\n network = networks_lib.FeedForwardNetwork(\n lambda rng: forward_fn.init(rng, dummy_obs), forward_fn.apply)\n # Create PPONetworks to add functionality required by the agent.\n return make_categorical_ppo_networks(network) # pylint:disable=undefined-variable\n\n\ndef make_categorical_ppo_networks(\n network: networks_lib.FeedForwardNetwork) -> PPONetworks:\n \"\"\"Constructs a PPONetworks for Categorical Policy from FeedForwardNetwork.\n\n Args:\n network: a transformed Haiku network (or equivalent in other libraries) that\n takes in observations and returns the action distribution and value.\n\n Returns:\n A PPONetworks instance with pure functions wrapping the input network.\n \"\"\"\n\n def log_prob(params: CategoricalParams, action):\n return tfd.Categorical(logits=params.logits).log_prob(action)\n\n def entropy(\n params: CategoricalParams, key: networks_lib.PRNGKey\n ) -> networks_lib.Entropy:\n del key\n return tfd.Categorical(logits=params.logits).entropy()\n\n def sample(params: CategoricalParams, key: networks_lib.PRNGKey):\n return tfd.Categorical(logits=params.logits).sample(seed=key)\n\n def sample_eval(params: CategoricalParams, key: networks_lib.PRNGKey):\n del key\n return tfd.Categorical(logits=params.logits).mode()\n\n return PPONetworks(\n network=network,\n log_prob=log_prob,\n entropy=entropy,\n sample=sample,\n sample_eval=sample_eval)\n\n\ndef make_continuous_networks(\n environment_spec: specs.EnvironmentSpec,\n policy_layer_sizes: Sequence[int] = (64, 64),\n value_layer_sizes: Sequence[int] = (64, 64),\n use_tanh_gaussian_policy: bool = True,\n) -> PPONetworks:\n \"\"\"Creates PPONetworks to be used for continuous action environments.\"\"\"\n\n # Get total number of action dimensions from action spec.\n num_dimensions = np.prod(environment_spec.actions.shape, dtype=int)\n\n def forward_fn(inputs: networks_lib.Observation):\n\n def _policy_network(obs: networks_lib.Observation):\n h = utils.batch_concat(obs)\n h = hk.nets.MLP(policy_layer_sizes, activate_final=True)(h)\n\n # tfd distributions have a weird bug in jax when vmapping is used, so the\n # safer implementation in general is for the policy network to output the\n # distribution parameters, and for the distribution to be constructed\n # in a method such as make_ppo_networks above\n if not use_tanh_gaussian_policy:\n # Following networks_lib.MultivariateNormalDiagHead\n init_scale = 0.3\n min_scale = 1e-6\n w_init = hk.initializers.VarianceScaling(1e-4)\n b_init = hk.initializers.Constant(0.)\n loc_layer = hk.Linear(num_dimensions, w_init=w_init, b_init=b_init)\n scale_layer = hk.Linear(num_dimensions, w_init=w_init, b_init=b_init)\n\n loc = loc_layer(h)\n scale = jax.nn.softplus(scale_layer(h))\n scale *= init_scale / jax.nn.softplus(0.)\n scale += min_scale\n\n return MVNDiagParams(loc=loc, scale_diag=scale)\n\n # Following networks_lib.NormalTanhDistribution\n min_scale = 1e-3\n w_init = hk.initializers.VarianceScaling(1.0, 'fan_in', 'uniform')\n b_init = hk.initializers.Constant(0.)\n loc_layer = hk.Linear(num_dimensions, w_init=w_init, b_init=b_init)\n scale_layer = hk.Linear(num_dimensions, w_init=w_init, b_init=b_init)\n\n loc = loc_layer(h)\n scale = scale_layer(h)\n scale = jax.nn.softplus(scale) + min_scale\n\n return TanhNormalParams(loc=loc, scale=scale)\n\n value_network = hk.Sequential([\n utils.batch_concat,\n hk.nets.MLP(value_layer_sizes, activate_final=True),\n hk.Linear(1),\n lambda x: jnp.squeeze(x, axis=-1)\n ])\n\n policy_output = _policy_network(inputs)\n value = value_network(inputs)\n return (policy_output, value)\n\n # Transform into pure functions.\n forward_fn = hk.without_apply_rng(hk.transform(forward_fn))\n\n dummy_obs = utils.zeros_like(environment_spec.observations)\n dummy_obs = utils.add_batch_dim(dummy_obs) # Dummy 'sequence' dim.\n network = networks_lib.FeedForwardNetwork(\n lambda rng: forward_fn.init(rng, dummy_obs), forward_fn.apply)\n\n # Create PPONetworks to add functionality required by the agent.\n\n if not use_tanh_gaussian_policy:\n return make_mvn_diag_ppo_networks(network)\n\n return make_tanh_normal_ppo_networks(network)\n" }, { "alpha_fraction": 0.6912938356399536, "alphanum_fraction": 0.6944496035575867, "avg_line_length": 33.98051834106445, "blob_id": "9a2f16c19dee5623c08ec9ee250ccc378cc7caee", "content_id": "513418499152a7f666ef3b45baf2060e2e3d8524", "detected_licenses": [ "Apache-2.0" ], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 5387, "license_type": "permissive", "max_line_length": 83, "num_lines": 154, "path": "/acme/jax/variable_utils.py", "repo_name": "dada325/acme", "src_encoding": "UTF-8", "text": "# Copyright 2018 DeepMind Technologies Limited. All rights reserved.\n#\n# Licensed under the Apache License, Version 2.0 (the \"License\");\n# you may not use this file except in compliance with the License.\n# You may obtain a copy of the License at\n#\n# http://www.apache.org/licenses/LICENSE-2.0\n#\n# Unless required by applicable law or agreed to in writing, software\n# distributed under the License is distributed on an \"AS IS\" BASIS,\n# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n# See the License for the specific language governing permissions and\n# limitations under the License.\n\n\"\"\"Variable utilities for JAX.\"\"\"\n\nfrom concurrent import futures\nimport datetime\nimport time\nfrom typing import List, NamedTuple, Optional, Sequence, Union\n\nfrom acme import core\nfrom acme.jax import networks as network_types\nimport jax\n\n\nclass VariableReference(NamedTuple):\n variable_name: str\n\n\nclass ReferenceVariableSource(core.VariableSource):\n \"\"\"Variable source which returns references instead of values.\n\n This is passed to each actor when using a centralized inference server. The\n actor uses this special variable source to get references rather than values.\n These references are then passed to calls to the inference server, which will\n dereference them to obtain the value of the corresponding variables at\n inference time. This avoids passing around copies of variables from each\n actor to the inference server.\n \"\"\"\n\n def get_variables(self, names: Sequence[str]) -> List[VariableReference]:\n return [VariableReference(name) for name in names]\n\n\nclass VariableClient:\n \"\"\"A variable client for updating variables from a remote source.\"\"\"\n\n def __init__(\n self,\n client: core.VariableSource,\n key: Union[str, Sequence[str]],\n update_period: Union[int, datetime.timedelta] = 1,\n device: Optional[Union[str, jax.Device]] = None,\n ):\n \"\"\"Initializes the variable client.\n\n Args:\n client: A variable source from which we fetch variables.\n key: Which variables to request. When multiple keys are used, params\n property will return a list of params.\n update_period: Interval between fetches, specified as either (int) a\n number of calls to update() between actual fetches or (timedelta) a time\n interval that has to pass since the last fetch.\n device: The name of a JAX device to put variables on. If None (default),\n VariableClient won't put params on any device.\n \"\"\"\n self._update_period = update_period\n self._call_counter = 0\n self._last_call = time.time()\n self._client = client\n self._params: Sequence[network_types.Params] = None\n\n self._device = device\n if isinstance(self._device, str):\n self._device = jax.devices(device)[0]\n\n self._executor = futures.ThreadPoolExecutor(max_workers=1)\n\n if isinstance(key, str):\n key = [key]\n\n self._key = key\n self._request = lambda k=key: client.get_variables(k)\n self._future: Optional[futures.Future] = None # pylint: disable=g-bare-generic\n self._async_request = lambda: self._executor.submit(self._request)\n\n def update(self, wait: bool = False) -> None:\n \"\"\"Periodically updates the variables with the latest copy from the source.\n\n If wait is True, a blocking request is executed. Any active request will be\n cancelled.\n If wait is False, this method makes an asynchronous request for variables.\n\n Args:\n wait: Whether to execute asynchronous (False) or blocking updates (True).\n Defaults to False.\n \"\"\"\n # Track calls (we only update periodically).\n self._call_counter += 1\n\n # Return if it's not time to fetch another update.\n if isinstance(self._update_period, datetime.timedelta):\n if self._update_period.total_seconds() + self._last_call > time.time():\n return\n else:\n if self._call_counter < self._update_period:\n return\n\n if wait:\n if self._future is not None:\n if self._future.running():\n self._future.cancel()\n self._future = None\n self._call_counter = 0\n self._last_call = time.time()\n self.update_and_wait()\n return\n\n # Return early if we are still waiting for a previous request to come back.\n if self._future and not self._future.done():\n return\n\n # Get a future and add the copy function as a callback.\n self._call_counter = 0\n self._last_call = time.time()\n self._future = self._async_request()\n self._future.add_done_callback(lambda f: self._callback(f.result()))\n\n def update_and_wait(self):\n \"\"\"Immediately update and block until we get the result.\"\"\"\n self._callback(self._request())\n\n def _callback(self, params_list: List[network_types.Params]):\n if self._device and not isinstance(self._client, ReferenceVariableSource):\n # Move variables to a proper device.\n self._params = jax.device_put(params_list, self._device)\n else:\n self._params = params_list\n\n @property\n def device(self) -> Optional[jax.Device]:\n return self._device\n\n @property\n def params(self) -> Union[network_types.Params, List[network_types.Params]]:\n \"\"\"Returns the first params for one key, otherwise the whole params list.\"\"\"\n if self._params is None:\n self.update_and_wait()\n\n if len(self._params) == 1:\n return self._params[0]\n else:\n return self._params\n" }, { "alpha_fraction": 0.6428245306015015, "alphanum_fraction": 0.6485283970832825, "avg_line_length": 33.2421875, "blob_id": "c35cdfb723ff085e6c72ffb454d2c48f76e17ae5", "content_id": "4a35a2f63a0bdd765d8dd456cbf3d86319120f0b", "detected_licenses": [ "Apache-2.0" ], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 8766, "license_type": "permissive", "max_line_length": 79, "num_lines": 256, "path": "/acme/wrappers/video.py", "repo_name": "dada325/acme", "src_encoding": "UTF-8", "text": "# Copyright 2018 DeepMind Technologies Limited. All rights reserved.\n#\n# Licensed under the Apache License, Version 2.0 (the \"License\");\n# you may not use this file except in compliance with the License.\n# You may obtain a copy of the License at\n#\n# http://www.apache.org/licenses/LICENSE-2.0\n#\n# Unless required by applicable law or agreed to in writing, software\n# distributed under the License is distributed on an \"AS IS\" BASIS,\n# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n# See the License for the specific language governing permissions and\n# limitations under the License.\n\n\"\"\"Environment wrappers which record videos.\n\nThe code used to generate animations in this wrapper is based on that used in\nthe `dm_control/tutorial.ipynb` file.\n\"\"\"\n\nimport os.path\nimport tempfile\nfrom typing import Callable, Optional, Sequence, Tuple, Union\n\nfrom acme.utils import paths\nfrom acme.wrappers import base\nimport dm_env\n\nimport matplotlib\nmatplotlib.use('Agg') # Switch to headless 'Agg' to inhibit figure rendering.\nimport matplotlib.animation as anim # pylint: disable=g-import-not-at-top\nimport matplotlib.pyplot as plt\nimport numpy as np\n\n# Internal imports.\n# Make sure you have FFMpeg configured.\n\ndef make_animation(\n frames: Sequence[np.ndarray], frame_rate: float,\n figsize: Optional[Union[float, Tuple[int, int]]]) -> anim.Animation:\n \"\"\"Generates a matplotlib animation from a stack of frames.\"\"\"\n\n # Set animation characteristics.\n if figsize is None:\n height, width, _ = frames[0].shape\n elif isinstance(figsize, tuple):\n height, width = figsize\n else:\n diagonal = figsize\n height, width, _ = frames[0].shape\n scale_factor = diagonal / np.sqrt(height**2 + width**2)\n width *= scale_factor\n height *= scale_factor\n\n dpi = 70\n interval = int(round(1e3 / frame_rate)) # Time (in ms) between frames.\n\n # Create and configure the figure.\n fig, ax = plt.subplots(1, 1, figsize=(width / dpi, height / dpi), dpi=dpi)\n ax.set_axis_off()\n ax.set_aspect('equal')\n ax.set_position([0, 0, 1, 1])\n\n # Initialize the first frame.\n im = ax.imshow(frames[0])\n\n # Create the function that will modify the frame, creating an animation.\n def update(frame):\n im.set_data(frame)\n return [im]\n\n return anim.FuncAnimation(\n fig=fig,\n func=update,\n frames=frames,\n interval=interval,\n blit=True,\n repeat=False)\n\n\nclass VideoWrapper(base.EnvironmentWrapper):\n \"\"\"Wrapper which creates and records videos from generated observations.\n\n This will limit itself to recording once every `record_every` episodes and\n videos will be recorded to the directory `path` + '/<unique id>/videos' where\n `path` defaults to '~/acme'. Users can specify the size of the screen by\n passing either a tuple giving height and width or a float giving the size\n of the diagonal.\n \"\"\"\n\n def __init__(\n self,\n environment: dm_env.Environment,\n *,\n path: str = '~/acme',\n filename: str = '',\n process_path: Callable[[str, str], str] = paths.process_path,\n record_every: int = 100,\n frame_rate: int = 30,\n figsize: Optional[Union[float, Tuple[int, int]]] = None,\n to_html: bool = True,\n ):\n super(VideoWrapper, self).__init__(environment)\n self._path = process_path(path, 'videos')\n self._filename = filename\n self._record_every = record_every\n self._frame_rate = frame_rate\n self._frames = []\n self._counter = 0\n self._figsize = figsize\n self._to_html = to_html\n\n def _render_frame(self, observation):\n \"\"\"Renders a frame from the given environment observation.\"\"\"\n return observation\n\n def _write_frames(self):\n \"\"\"Writes frames to video.\"\"\"\n if self._counter % self._record_every == 0:\n animation = make_animation(self._frames, self._frame_rate, self._figsize)\n path_without_extension = os.path.join(\n self._path, f'{self._filename}_{self._counter:04d}'\n )\n if self._to_html:\n path = path_without_extension + '.html'\n video = animation.to_html5_video()\n with open(path, 'w') as f:\n f.write(video)\n else:\n path = path_without_extension + '.m4v'\n # Animation.save can save only locally. Save first and copy using\n # gfile.\n with tempfile.TemporaryDirectory() as tmp_dir:\n tmp_path = os.path.join(tmp_dir, 'temp.m4v')\n animation.save(tmp_path)\n with open(path, 'wb') as f:\n with open(tmp_path, 'rb') as g:\n f.write(g.read())\n\n # Clear the frame buffer whether a video was generated or not.\n self._frames = []\n\n def _append_frame(self, observation):\n \"\"\"Appends a frame to the sequence of frames.\"\"\"\n if self._counter % self._record_every == 0:\n self._frames.append(self._render_frame(observation))\n\n def step(self, action) -> dm_env.TimeStep:\n timestep = self.environment.step(action)\n self._append_frame(timestep.observation)\n return timestep\n\n def reset(self) -> dm_env.TimeStep:\n # If the frame buffer is nonempty, flush it and record video\n if self._frames:\n self._write_frames()\n self._counter += 1\n timestep = self.environment.reset()\n self._append_frame(timestep.observation)\n return timestep\n\n def make_html_animation(self):\n if self._frames:\n return make_animation(self._frames, self._frame_rate,\n self._figsize).to_html5_video()\n else:\n raise ValueError('make_html_animation should be called after running a '\n 'trajectory and before calling reset().')\n\n def close(self):\n if self._frames:\n self._write_frames()\n self._frames = []\n self.environment.close()\n\n\nclass MujocoVideoWrapper(VideoWrapper):\n \"\"\"VideoWrapper which generates videos from a mujoco physics object.\n\n This passes its keyword arguments into the parent `VideoWrapper` class (refer\n here for any default arguments).\n \"\"\"\n\n # Note that since we can be given a wrapped mujoco environment we can't give\n # the type as dm_control.Environment.\n\n def __init__(self,\n environment: dm_env.Environment,\n *,\n frame_rate: Optional[int] = None,\n camera_id: Optional[int] = 0,\n height: int = 240,\n width: int = 320,\n playback_speed: float = 1.,\n **kwargs):\n\n # Check that we have a mujoco environment (or a wrapper thereof).\n if not hasattr(environment, 'physics'):\n raise ValueError('MujocoVideoWrapper expects an environment which '\n 'exposes a physics attribute corresponding to a MuJoCo '\n 'physics engine')\n\n # Compute frame rate if not set.\n if frame_rate is None:\n try:\n control_timestep = getattr(environment, 'control_timestep')()\n except AttributeError as e:\n raise AttributeError('MujocoVideoWrapper expects an environment which '\n 'exposes a control_timestep method, like '\n 'dm_control environments, or frame_rate '\n 'to be specified.') from e\n frame_rate = int(round(playback_speed / control_timestep))\n\n super().__init__(environment, frame_rate=frame_rate, **kwargs)\n self._camera_id = camera_id\n self._height = height\n self._width = width\n\n def _render_frame(self, unused_observation):\n del unused_observation\n\n # We've checked above that this attribute should exist. Pytype won't like\n # it if we just try and do self.environment.physics, so we use the slightly\n # grosser version below.\n physics = getattr(self.environment, 'physics')\n\n if self._camera_id is not None:\n frame = physics.render(\n camera_id=self._camera_id, height=self._height, width=self._width)\n else:\n # If camera_id is None, we create a minimal canvas that will accommodate\n # physics.model.ncam frames, and render all of them on a grid.\n num_cameras = physics.model.ncam\n num_columns = int(np.ceil(np.sqrt(num_cameras)))\n num_rows = int(np.ceil(float(num_cameras)/num_columns))\n height = self._height\n width = self._width\n\n # Make a black canvas.\n frame = np.zeros((num_rows*height, num_columns*width, 3), dtype=np.uint8)\n\n for col in range(num_columns):\n for row in range(num_rows):\n\n camera_id = row*num_columns + col\n\n if camera_id >= num_cameras:\n break\n\n subframe = physics.render(\n camera_id=camera_id, height=height, width=width)\n\n # Place the frame in the appropriate rectangle on the pixel canvas.\n frame[row*height:(row+1)*height, col*width:(col+1)*width] = subframe\n\n return frame\n" }, { "alpha_fraction": 0.6776096224784851, "alphanum_fraction": 0.6907804012298584, "avg_line_length": 28.575580596923828, "blob_id": "de7cf7988546fff21b9e9ef0a34c0b2d0450333a", "content_id": "f14377ab509aa2e673cc33cef40d57cfe683b5bb", "detected_licenses": [ "Apache-2.0" ], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 5087, "license_type": "permissive", "max_line_length": 82, "num_lines": 172, "path": "/setup.py", "repo_name": "dada325/acme", "src_encoding": "UTF-8", "text": "# Copyright 2018 DeepMind Technologies Limited. All rights reserved.\n#\n# Licensed under the Apache License, Version 2.0 (the \"License\");\n# you may not use this file except in compliance with the License.\n# You may obtain a copy of the License at\n#\n# http://www.apache.org/licenses/LICENSE-2.0\n#\n# Unless required by applicable law or agreed to in writing, software\n# distributed under the License is distributed on an \"AS IS\" BASIS,\n# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n# See the License for the specific language governing permissions and\n# limitations under the License.\n\n\"\"\"Install script for setuptools.\"\"\"\n\nimport datetime\nfrom importlib import util as import_util\nimport os\nimport sys\n\nfrom setuptools import find_packages\nfrom setuptools import setup\nimport setuptools.command.build_py\nimport setuptools.command.develop\n\nspec = import_util.spec_from_file_location('_metadata', 'acme/_metadata.py')\n_metadata = import_util.module_from_spec(spec)\nspec.loader.exec_module(_metadata)\n\n# TODO(b/184148890): Add a release flag\n\n\n# Any particular version of reverb needs to be pinned against a particular\n# version of TF due to how it is built. While the versions below should be the\n# most recent stable versions of each library we'll be explicit just make make\n# sure this constraint is upheld.\n\ntensorflow = [\n 'tensorflow==2.8.0',\n 'tensorflow_probability==0.15.0',\n 'tensorflow_datasets==4.6.0',\n 'dm-reverb==0.7.2',\n 'dm-launchpad==0.5.2',\n]\n\ncore_requirements = [\n 'absl-py',\n 'dm-env',\n 'dm-tree',\n 'numpy',\n 'pillow',\n 'typing-extensions',\n]\n\njax_requirements = [\n 'jax>=0.4.3',\n 'chex',\n 'dm-haiku',\n 'flax',\n 'optax',\n 'rlax',\n] + tensorflow\n\ntf_requirements = [\n 'dm-sonnet',\n 'trfl',\n] + tensorflow\n\ntesting_requirements = [\n 'pytype==2021.8.11', # TODO(b/206926677): update to new version.\n 'pytest-xdist',\n]\n\nenvs_requirements = [\n 'atari-py',\n 'bsuite',\n 'dm-control',\n 'gym==0.25.0',\n 'gym[atari]',\n 'pygame==2.1.0',\n 'rlds',\n]\n\n\ndef generate_requirements_file(path=None):\n \"\"\"Generates requirements.txt file with the Acme's dependencies.\n\n It is used by Launchpad GCP runtime to generate Acme requirements to be\n installed inside the docker image. Acme itself is not installed from pypi,\n but instead sources are copied over to reflect any local changes made to\n the codebase.\n\n Args:\n path: path to the requirements.txt file to generate.\n \"\"\"\n if not path:\n path = os.path.join(os.path.dirname(__file__), 'acme/requirements.txt')\n with open(path, 'w') as f:\n for package in set(core_requirements + jax_requirements + tf_requirements +\n envs_requirements):\n f.write(f'{package}\\n')\n\n\nlong_description = \"\"\"Acme is a library of reinforcement learning (RL) agents\nand agent building blocks. Acme strives to expose simple, efficient,\nand readable agents, that serve both as reference implementations of popular\nalgorithms and as strong baselines, while still providing enough flexibility\nto do novel research. The design of Acme also attempts to provide multiple\npoints of entry to the RL problem at differing levels of complexity.\n\nFor more information see [github repository](https://github.com/deepmind/acme).\"\"\"\n\n# Get the version from metadata.\nversion = _metadata.__version__\n\n# If we're releasing a nightly/dev version append to the version string.\nif '--nightly' in sys.argv:\n sys.argv.remove('--nightly')\n version += '.dev' + datetime.datetime.now().strftime('%Y%m%d')\n\n\nclass BuildPy(setuptools.command.build_py.build_py):\n\n def run(self):\n generate_requirements_file()\n setuptools.command.build_py.build_py.run(self)\n\n\nclass Develop(setuptools.command.develop.develop):\n\n def run(self):\n generate_requirements_file()\n setuptools.command.develop.develop.run(self)\n\ncmdclass = {\n 'build_py': BuildPy,\n 'develop': Develop,\n}\n\nsetup(\n name='dm-acme',\n version=version,\n cmdclass=cmdclass,\n description='A Python library for Reinforcement Learning.',\n long_description=long_description,\n long_description_content_type='text/markdown',\n author='DeepMind',\n license='Apache License, Version 2.0',\n keywords='reinforcement-learning python machine learning',\n packages=find_packages(),\n package_data={'': ['requirements.txt']},\n include_package_data=True,\n install_requires=core_requirements,\n extras_require={\n 'jax': jax_requirements,\n 'tf': tf_requirements,\n 'testing': testing_requirements,\n 'envs': envs_requirements,\n },\n classifiers=[\n 'Development Status :: 3 - Alpha',\n 'Environment :: Console',\n 'Intended Audience :: Science/Research',\n 'License :: OSI Approved :: Apache Software License',\n 'Operating System :: POSIX :: Linux',\n 'Programming Language :: Python :: 3',\n 'Programming Language :: Python :: 3.8',\n 'Programming Language :: Python :: 3.9',\n 'Topic :: Scientific/Engineering :: Artificial Intelligence',\n ],\n)\n" }, { "alpha_fraction": 0.7169151902198792, "alphanum_fraction": 0.7243708968162537, "avg_line_length": 36.64912414550781, "blob_id": "cf04aa619fbbe2a58a804e3d955fb91007e9d7c1", "content_id": "15482933cfc5b8b7bd4d799dc5e92e2332b05512", "detected_licenses": [ "Apache-2.0" ], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 4292, "license_type": "permissive", "max_line_length": 97, "num_lines": 114, "path": "/examples/offline/run_cql_jax.py", "repo_name": "dada325/acme", "src_encoding": "UTF-8", "text": "# Copyright 2018 DeepMind Technologies Limited. All rights reserved.\n#\n# Licensed under the Apache License, Version 2.0 (the \"License\");\n# you may not use this file except in compliance with the License.\n# You may obtain a copy of the License at\n#\n# http://www.apache.org/licenses/LICENSE-2.0\n#\n# Unless required by applicable law or agreed to in writing, software\n# distributed under the License is distributed on an \"AS IS\" BASIS,\n# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n# See the License for the specific language governing permissions and\n# limitations under the License.\n\n\"\"\"An example CQL running on locomotion datasets (mujoco) from D4rl.\"\"\"\n\nfrom absl import app\nfrom absl import flags\nimport acme\nfrom acme import specs\nfrom acme.agents.jax import actor_core as actor_core_lib\nfrom acme.agents.jax import actors\nfrom acme.agents.jax import cql\nfrom acme.datasets import tfds\nfrom acme.examples.offline import helpers as gym_helpers\nfrom acme.jax import variable_utils\nfrom acme.utils import loggers\nimport haiku as hk\nimport jax\nimport optax\n\n# Agent flags\nflags.DEFINE_integer('batch_size', 64, 'Batch size.')\nflags.DEFINE_integer('evaluate_every', 20, 'Evaluation period.')\nflags.DEFINE_integer('evaluation_episodes', 10, 'Evaluation episodes.')\nflags.DEFINE_integer(\n 'num_demonstrations', 10,\n 'Number of demonstration episodes to load from the dataset. If None, loads the full dataset.'\n)\nflags.DEFINE_integer('seed', 0, 'Random seed for learner and evaluator.')\n# CQL specific flags.\nflags.DEFINE_float('policy_learning_rate', 3e-5, 'Policy learning rate.')\nflags.DEFINE_float('critic_learning_rate', 3e-4, 'Critic learning rate.')\nflags.DEFINE_float('fixed_cql_coefficient', None,\n 'Fixed CQL coefficient. If None, an adaptive one is used.')\nflags.DEFINE_float('cql_lagrange_threshold', 10.,\n 'Lagrange threshold for the adaptive CQL coefficient.')\n# Environment flags.\nflags.DEFINE_string('env_name', 'HalfCheetah-v2',\n 'Gym mujoco environment name.')\nflags.DEFINE_string(\n 'dataset_name', 'd4rl_mujoco_halfcheetah/v2-medium',\n 'D4rl dataset name. Can be any locomotion dataset from '\n 'https://www.tensorflow.org/datasets/catalog/overview#d4rl.')\n\nFLAGS = flags.FLAGS\n\n\ndef main(_):\n key = jax.random.PRNGKey(FLAGS.seed)\n key_demonstrations, key_learner = jax.random.split(key, 2)\n\n # Create an environment and grab the spec.\n environment = gym_helpers.make_environment(task=FLAGS.env_name)\n environment_spec = specs.make_environment_spec(environment)\n\n # Get a demonstrations dataset.\n transitions_iterator = tfds.get_tfds_dataset(FLAGS.dataset_name,\n FLAGS.num_demonstrations)\n demonstrations = tfds.JaxInMemoryRandomSampleIterator(\n transitions_iterator, key=key_demonstrations, batch_size=FLAGS.batch_size)\n\n # Create the networks to optimize.\n networks = cql.make_networks(environment_spec)\n\n # Create the learner.\n learner = cql.CQLLearner(\n batch_size=FLAGS.batch_size,\n networks=networks,\n random_key=key_learner,\n policy_optimizer=optax.adam(FLAGS.policy_learning_rate),\n critic_optimizer=optax.adam(FLAGS.critic_learning_rate),\n fixed_cql_coefficient=FLAGS.fixed_cql_coefficient,\n cql_lagrange_threshold=FLAGS.cql_lagrange_threshold,\n demonstrations=demonstrations,\n num_sgd_steps_per_step=1)\n\n def evaluator_network(\n params: hk.Params, key: jax.Array, observation: jax.Array\n ) -> jax.Array:\n dist_params = networks.policy_network.apply(params, observation)\n return networks.sample_eval(dist_params, key)\n\n actor_core = actor_core_lib.batched_feed_forward_to_actor_core(\n evaluator_network)\n variable_client = variable_utils.VariableClient(\n learner, 'policy', device='cpu')\n evaluator = actors.GenericActor(\n actor_core, key, variable_client, backend='cpu')\n\n eval_loop = acme.EnvironmentLoop(\n environment=environment,\n actor=evaluator,\n logger=loggers.TerminalLogger('evaluation', time_delta=0.))\n\n # Run the environment loop.\n while True:\n for _ in range(FLAGS.evaluate_every):\n learner.step()\n eval_loop.run(FLAGS.evaluation_episodes)\n\n\nif __name__ == '__main__':\n app.run(main)\n" }, { "alpha_fraction": 0.6590976715087891, "alphanum_fraction": 0.6626830101013184, "avg_line_length": 36.60674285888672, "blob_id": "2afee94ac0d9e8a3330362368c52183552811efc", "content_id": "d21ae72ee50783ddf75be0347d006bd3a523f18a", "detected_licenses": [ "Apache-2.0" ], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 3347, "license_type": "permissive", "max_line_length": 80, "num_lines": 89, "path": "/acme/agents/jax/mpo/acting.py", "repo_name": "dada325/acme", "src_encoding": "UTF-8", "text": "# Copyright 2018 DeepMind Technologies Limited. All rights reserved.\n#\n# Licensed under the Apache License, Version 2.0 (the \"License\");\n# you may not use this file except in compliance with the License.\n# You may obtain a copy of the License at\n#\n# http://www.apache.org/licenses/LICENSE-2.0\n#\n# Unless required by applicable law or agreed to in writing, software\n# distributed under the License is distributed on an \"AS IS\" BASIS,\n# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n# See the License for the specific language governing permissions and\n# limitations under the License.\n\n\"\"\"Acting logic for the MPO agent.\"\"\"\n\nfrom typing import Mapping, NamedTuple, Tuple, Union\n\nfrom acme.agents.jax import actor_core as actor_core_lib\nfrom acme.agents.jax.mpo import networks\nfrom acme.agents.jax.mpo import types\nfrom acme.jax import types as jax_types\nimport haiku as hk\nimport jax\nimport jax.numpy as jnp\nimport numpy as np\n\n\nclass ActorState(NamedTuple):\n key: jax_types.PRNGKey\n core_state: hk.LSTMState\n prev_core_state: hk.LSTMState\n log_prob: Union[jnp.ndarray, Tuple[()]] = ()\n\n\ndef make_actor_core(mpo_networks: networks.MPONetworks,\n stochastic: bool = True,\n store_core_state: bool = False,\n store_log_prob: bool = True) -> actor_core_lib.ActorCore:\n \"\"\"Returns a MPO ActorCore from the MPONetworks.\"\"\"\n\n def init(key: jax_types.PRNGKey) -> ActorState:\n next_key, key = jax.random.split(key, 2)\n batch_size = None\n params_initial_state = mpo_networks.torso.initial_state_fn_init(\n key, batch_size)\n core_state = mpo_networks.torso.initial_state_fn(params_initial_state,\n batch_size)\n return ActorState(\n key=next_key,\n core_state=core_state,\n prev_core_state=core_state,\n log_prob=np.zeros(shape=(), dtype=np.float32) if store_log_prob else ())\n\n def select_action(params: networks.MPONetworkParams,\n observations: types.Observation,\n state: ActorState) -> Tuple[types.Action, ActorState]:\n\n next_key, key = jax.random.split(state.key, 2)\n\n # Embed observations and apply stateful core (e.g. recurrent, transformer).\n embeddings, core_state = mpo_networks.torso.apply(params.torso,\n observations,\n state.core_state)\n\n # Get the action distribution for these observations.\n policy = mpo_networks.policy_head_apply(params, embeddings)\n actions = policy.sample(seed=key) if stochastic else policy.mode()\n\n return actions, ActorState(\n key=next_key,\n core_state=core_state,\n prev_core_state=state.core_state,\n # Compute log-probabilities for use in off-policy correction schemes.\n log_prob=policy.log_prob(actions) if store_log_prob else ())\n\n def get_extras(state: ActorState) -> Mapping[str, jnp.ndarray]:\n extras = {}\n\n if store_core_state:\n extras['core_state'] = state.prev_core_state\n\n if store_log_prob:\n extras['log_prob'] = state.log_prob\n\n return extras # pytype: disable=bad-return-type # jax-ndarray\n\n return actor_core_lib.ActorCore(\n init=init, select_action=select_action, get_extras=get_extras)\n" }, { "alpha_fraction": 0.6932852864265442, "alphanum_fraction": 0.696509838104248, "avg_line_length": 36.12676239013672, "blob_id": "ee93b3217a62b20d6da60602c0fb9683cc3dbebb", "content_id": "9e4f27a684b07e6b9bec90876cd27d01bb6f0374", "detected_licenses": [ "Apache-2.0" ], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 5272, "license_type": "permissive", "max_line_length": 80, "num_lines": 142, "path": "/acme/jax/inference_server.py", "repo_name": "dada325/acme", "src_encoding": "UTF-8", "text": "# Copyright 2018 DeepMind Technologies Limited. All rights reserved.\n#\n# Licensed under the Apache License, Version 2.0 (the \"License\");\n# you may not use this file except in compliance with the License.\n# You may obtain a copy of the License at\n#\n# http://www.apache.org/licenses/LICENSE-2.0\n#\n# Unless required by applicable law or agreed to in writing, software\n# distributed under the License is distributed on an \"AS IS\" BASIS,\n# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n# See the License for the specific language governing permissions and\n# limitations under the License.\n\n\"\"\"Defines Inference Server class used for centralised inference.\"\"\"\n\nimport dataclasses\nimport datetime\nimport threading\nfrom typing import Any, Callable, Generic, Optional, Sequence, TypeVar\nimport acme\nfrom acme.jax import variable_utils\nimport jax\nimport launchpad as lp\n\n\[email protected]\nclass InferenceServerConfig:\n \"\"\"Configuration options for centralised inference.\n\n Attributes:\n batch_size: How many elements to batch together per single inference call.\n Auto-computed when not specified.\n update_period: Frequency of updating variables from the variable source.\n It is passed to VariableClient. Auto-computed when not specified.\n timeout: Time after which incomplete batch is executed (batch is padded,\n so there batch handler is always called with batch_size elements).\n By default timeout is effectively disabled (set to 30 days).\n \"\"\"\n batch_size: Optional[int] = None\n update_period: Optional[int] = None\n timeout: datetime.timedelta = datetime.timedelta(days=30)\n\n\nInferenceServerHandler = TypeVar('InferenceServerHandler')\n\n\nclass InferenceServer(Generic[InferenceServerHandler]):\n \"\"\"Centralised, batched inference server.\"\"\"\n\n def __init__(\n self,\n handler: InferenceServerHandler,\n variable_source: acme.VariableSource,\n devices: Sequence[jax.Device],\n config: InferenceServerConfig,\n ):\n \"\"\"Constructs an inference server object.\n\n Args:\n handler: A callable or a mapping of callables to be exposed\n through the inference server.\n variable_source: Source of variables\n devices: Devices used for executing handlers. All devices are used in\n parallel.\n config: Inference Server configuration.\n \"\"\"\n self._variable_source = variable_source\n self._variable_client = None\n self._keys = []\n self._devices = devices\n self._config = config\n self._call_cnt = 0\n self._device_params = [None] * len(self._devices)\n self._device_params_ids = [None] * len(self._devices)\n self._mutex = threading.Lock()\n self._handler = jax.tree_map(self._build_handler, handler, is_leaf=callable)\n\n @property\n def handler(self) -> InferenceServerHandler:\n return self._handler\n\n def _dereference_params(self, arg):\n \"\"\"Replaces VariableReferences with their corresponding param values.\"\"\"\n\n if not isinstance(arg, variable_utils.VariableReference):\n # All arguments but VariableReference are returned without modifications.\n return arg\n\n # Due to batching dimension we take the first element.\n variable_name = arg.variable_name[0]\n\n if variable_name not in self._keys:\n # Create a new VariableClient which also serves new variables.\n self._keys.append(variable_name)\n self._variable_client = variable_utils.VariableClient(\n client=self._variable_source,\n key=self._keys,\n update_period=self._config.update_period)\n\n params = self._variable_client.params\n device_idx = self._call_cnt % len(self._devices)\n # Select device via round robin, and update its params if they changed.\n if self._device_params_ids[device_idx] != id(params):\n self._device_params_ids[device_idx] = id(params)\n self._device_params[device_idx] = jax.device_put(\n params, self._devices[device_idx])\n\n # Return the params that are located on the chosen device.\n device_params = self._device_params[device_idx]\n if len(self._keys) == 1:\n return device_params\n return device_params[self._keys.index(variable_name)]\n\n def _build_handler(self, handler: Callable[..., Any]) -> Callable[..., Any]:\n \"\"\"Builds a batched handler for a given callable handler and its name.\"\"\"\n\n def dereference_params_and_call_handler(*args, **kwargs):\n with self._mutex:\n # Dereference args corresponding to params, leaving others unchanged.\n args_with_dereferenced_params = [\n self._dereference_params(arg) for arg in args\n ]\n kwargs_with_dereferenced_params = {\n key: self._dereference_params(value)\n for key, value in kwargs.items()\n }\n self._call_cnt += 1\n\n # Maybe update params, depending on client configuration.\n if self._variable_client is not None:\n self._variable_client.update()\n\n return handler(*args_with_dereferenced_params,\n **kwargs_with_dereferenced_params)\n\n return lp.batched_handler(\n batch_size=self._config.batch_size,\n timeout=self._config.timeout,\n pad_batch=True,\n max_parallelism=2 * len(self._devices))(\n dereference_params_and_call_handler)\n" }, { "alpha_fraction": 0.670543372631073, "alphanum_fraction": 0.6752345561981201, "avg_line_length": 37.90494155883789, "blob_id": "5dad1db3fa5992d818b60d213b11eb68d2aa79d0", "content_id": "5502cfca667939af3cc90c359f46c40625490e9d", "detected_licenses": [ "Apache-2.0" ], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 10232, "license_type": "permissive", "max_line_length": 125, "num_lines": 263, "path": "/acme/agents/jax/crr/learning.py", "repo_name": "dada325/acme", "src_encoding": "UTF-8", "text": "# Copyright 2018 DeepMind Technologies Limited. All rights reserved.\n#\n# Licensed under the Apache License, Version 2.0 (the \"License\");\n# you may not use this file except in compliance with the License.\n# You may obtain a copy of the License at\n#\n# http://www.apache.org/licenses/LICENSE-2.0\n#\n# Unless required by applicable law or agreed to in writing, software\n# distributed under the License is distributed on an \"AS IS\" BASIS,\n# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n# See the License for the specific language governing permissions and\n# limitations under the License.\n\n\"\"\"CRR learner implementation.\"\"\"\n\nimport time\nfrom typing import Dict, Iterator, List, NamedTuple, Optional, Tuple\n\nimport acme\nfrom acme import types\nfrom acme.agents.jax.crr.losses import PolicyLossCoeff\nfrom acme.agents.jax.crr.networks import CRRNetworks\nfrom acme.jax import networks as networks_lib\nfrom acme.jax import utils\nfrom acme.utils import counting\nfrom acme.utils import loggers\nimport jax\nimport jax.numpy as jnp\nimport optax\n\n\nclass TrainingState(NamedTuple):\n \"\"\"Contains training state for the learner.\"\"\"\n policy_params: networks_lib.Params\n target_policy_params: networks_lib.Params\n critic_params: networks_lib.Params\n target_critic_params: networks_lib.Params\n policy_opt_state: optax.OptState\n critic_opt_state: optax.OptState\n steps: int\n key: networks_lib.PRNGKey\n\n\nclass CRRLearner(acme.Learner):\n \"\"\"Critic Regularized Regression (CRR) learner.\n\n This is the learning component of a CRR agent as described in\n https://arxiv.org/abs/2006.15134.\n \"\"\"\n\n _state: TrainingState\n\n def __init__(self,\n networks: CRRNetworks,\n random_key: networks_lib.PRNGKey,\n discount: float,\n target_update_period: int,\n policy_loss_coeff_fn: PolicyLossCoeff,\n iterator: Iterator[types.Transition],\n policy_optimizer: optax.GradientTransformation,\n critic_optimizer: optax.GradientTransformation,\n counter: Optional[counting.Counter] = None,\n logger: Optional[loggers.Logger] = None,\n grad_updates_per_batch: int = 1,\n use_sarsa_target: bool = False):\n \"\"\"Initializes the CRR learner.\n\n Args:\n networks: CRR networks.\n random_key: a key for random number generation.\n discount: discount to use for TD updates.\n target_update_period: period to update target's parameters.\n policy_loss_coeff_fn: set the loss function for the policy.\n iterator: an iterator over training data.\n policy_optimizer: the policy optimizer.\n critic_optimizer: the Q-function optimizer.\n counter: counter object used to keep track of steps.\n logger: logger object to be used by learner.\n grad_updates_per_batch: how many gradient updates given a sampled batch.\n use_sarsa_target: compute on-policy target using iterator's actions rather\n than sampled actions.\n Useful for 1-step offline RL (https://arxiv.org/pdf/2106.08909.pdf).\n When set to `True`, `target_policy_params` are unused.\n \"\"\"\n\n critic_network = networks.critic_network\n policy_network = networks.policy_network\n\n def policy_loss(\n policy_params: networks_lib.Params,\n critic_params: networks_lib.Params,\n transition: types.Transition,\n key: networks_lib.PRNGKey,\n ) -> jnp.ndarray:\n # Compute the loss coefficients.\n coeff = policy_loss_coeff_fn(networks, policy_params, critic_params,\n transition, key)\n coeff = jax.lax.stop_gradient(coeff)\n # Return the weighted loss.\n dist_params = policy_network.apply(policy_params, transition.observation)\n logp_action = networks.log_prob(dist_params, transition.action)\n # Make sure there is no broadcasting.\n logp_action *= coeff.flatten()\n assert len(logp_action.shape) == 1\n return -jnp.mean(logp_action)\n\n def critic_loss(\n critic_params: networks_lib.Params,\n target_policy_params: networks_lib.Params,\n target_critic_params: networks_lib.Params,\n transition: types.Transition,\n key: networks_lib.PRNGKey,\n ):\n # Sample the next action.\n if use_sarsa_target:\n # TODO(b/222674779): use N-steps Trajectories to get the next actions.\n assert 'next_action' in transition.extras, (\n 'next actions should be given as extras for one step RL.')\n next_action = transition.extras['next_action']\n else:\n next_dist_params = policy_network.apply(target_policy_params,\n transition.next_observation)\n next_action = networks.sample(next_dist_params, key)\n # Calculate the value of the next state and action.\n next_q = critic_network.apply(target_critic_params,\n transition.next_observation, next_action)\n target_q = transition.reward + transition.discount * discount * next_q\n target_q = jax.lax.stop_gradient(target_q)\n\n q = critic_network.apply(critic_params, transition.observation,\n transition.action)\n q_error = q - target_q\n # Loss is MSE scaled by 0.5, so the gradient is equal to the TD error.\n # TODO(sertan): Replace with a distributional critic. CRR paper states\n # that this may perform better.\n return 0.5 * jnp.mean(jnp.square(q_error))\n\n policy_loss_and_grad = jax.value_and_grad(policy_loss)\n critic_loss_and_grad = jax.value_and_grad(critic_loss)\n\n def sgd_step(\n state: TrainingState,\n transitions: types.Transition,\n ) -> Tuple[TrainingState, Dict[str, jnp.ndarray]]:\n\n key, key_policy, key_critic = jax.random.split(state.key, 3)\n\n # Compute losses and their gradients.\n policy_loss_value, policy_gradients = policy_loss_and_grad(\n state.policy_params, state.critic_params, transitions, key_policy)\n critic_loss_value, critic_gradients = critic_loss_and_grad(\n state.critic_params, state.target_policy_params,\n state.target_critic_params, transitions, key_critic)\n\n # Get optimizer updates and state.\n policy_updates, policy_opt_state = policy_optimizer.update(\n policy_gradients, state.policy_opt_state)\n critic_updates, critic_opt_state = critic_optimizer.update(\n critic_gradients, state.critic_opt_state)\n\n # Apply optimizer updates to parameters.\n policy_params = optax.apply_updates(state.policy_params, policy_updates)\n critic_params = optax.apply_updates(state.critic_params, critic_updates)\n\n steps = state.steps + 1\n\n # Periodically update target networks.\n target_policy_params, target_critic_params = optax.periodic_update( # pytype: disable=wrong-arg-types # numpy-scalars\n (policy_params, critic_params),\n (state.target_policy_params, state.target_critic_params), steps,\n target_update_period)\n\n new_state = TrainingState(\n policy_params=policy_params,\n target_policy_params=target_policy_params,\n critic_params=critic_params,\n target_critic_params=target_critic_params,\n policy_opt_state=policy_opt_state,\n critic_opt_state=critic_opt_state,\n steps=steps,\n key=key,\n )\n\n metrics = {\n 'policy_loss': policy_loss_value,\n 'critic_loss': critic_loss_value,\n }\n\n return new_state, metrics\n\n sgd_step = utils.process_multiple_batches(sgd_step, grad_updates_per_batch)\n self._sgd_step = jax.jit(sgd_step)\n\n # General learner book-keeping and loggers.\n self._counter = counter or counting.Counter()\n self._logger = logger or loggers.make_default_logger(\n 'learner',\n asynchronous=True,\n serialize_fn=utils.fetch_devicearray,\n steps_key=self._counter.get_steps_key())\n\n # Create prefetching dataset iterator.\n self._iterator = iterator\n\n # Create the network parameters and copy into the target network parameters.\n key, key_policy, key_critic = jax.random.split(random_key, 3)\n initial_policy_params = policy_network.init(key_policy)\n initial_critic_params = critic_network.init(key_critic)\n initial_target_policy_params = initial_policy_params\n initial_target_critic_params = initial_critic_params\n\n # Initialize optimizers.\n initial_policy_opt_state = policy_optimizer.init(initial_policy_params)\n initial_critic_opt_state = critic_optimizer.init(initial_critic_params)\n\n # Create initial state.\n self._state = TrainingState(\n policy_params=initial_policy_params,\n target_policy_params=initial_target_policy_params,\n critic_params=initial_critic_params,\n target_critic_params=initial_target_critic_params,\n policy_opt_state=initial_policy_opt_state,\n critic_opt_state=initial_critic_opt_state,\n steps=0,\n key=key,\n )\n\n # Do not record timestamps until after the first learning step is done.\n # This is to avoid including the time it takes for actors to come online and\n # fill the replay buffer.\n self._timestamp = None\n\n def step(self):\n transitions = next(self._iterator)\n\n self._state, metrics = self._sgd_step(self._state, transitions)\n\n # Compute elapsed time.\n timestamp = time.time()\n elapsed_time = timestamp - self._timestamp if self._timestamp else 0\n self._timestamp = timestamp\n\n # Increment counts and record the current time\n counts = self._counter.increment(steps=1, walltime=elapsed_time)\n\n # Attempts to write the logs.\n self._logger.write({**metrics, **counts})\n\n def get_variables(self, names: List[str]) -> List[networks_lib.Params]:\n # We only expose the variables for the learned policy and critic. The target\n # policy and critic are internal details.\n variables = {\n 'policy': self._state.target_policy_params,\n 'critic': self._state.target_critic_params,\n }\n return [variables[name] for name in names]\n\n def save(self) -> TrainingState:\n return self._state\n\n def restore(self, state: TrainingState):\n self._state = state\n" }, { "alpha_fraction": 0.6876891255378723, "alphanum_fraction": 0.691819965839386, "avg_line_length": 33.98991775512695, "blob_id": "86e1f122d16b19b6fab0e4e13e79de522f575522", "content_id": "60ed8aaf398d3043b0a2d9f9503dedd4d3fb0071", "detected_licenses": [ "Apache-2.0" ], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 20819, "license_type": "permissive", "max_line_length": 95, "num_lines": 595, "path": "/acme/jax/utils.py", "repo_name": "dada325/acme", "src_encoding": "UTF-8", "text": "# Copyright 2018 DeepMind Technologies Limited. All rights reserved.\n#\n# Licensed under the Apache License, Version 2.0 (the \"License\");\n# you may not use this file except in compliance with the License.\n# You may obtain a copy of the License at\n#\n# http://www.apache.org/licenses/LICENSE-2.0\n#\n# Unless required by applicable law or agreed to in writing, software\n# distributed under the License is distributed on an \"AS IS\" BASIS,\n# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n# See the License for the specific language governing permissions and\n# limitations under the License.\n\n\"\"\"Utilities for JAX.\"\"\"\n\nimport functools\nimport itertools\nimport queue\nimport threading\nfrom typing import Callable, Iterable, Iterator, NamedTuple, Optional, Sequence, Tuple, TypeVar\n\nfrom absl import logging\nfrom acme import core\nfrom acme import types\nfrom acme.jax import types as jax_types\nimport haiku as hk\nimport jax\nimport jax.numpy as jnp\nimport numpy as np\nimport reverb\nimport tree\n\n\nF = TypeVar('F', bound=Callable)\nN = TypeVar('N', bound=types.NestedArray)\nT = TypeVar('T')\n\n\nNUM_PREFETCH_THREADS = 1\n\n\ndef add_batch_dim(values: types.Nest) -> types.NestedArray:\n return jax.tree_map(lambda x: jnp.expand_dims(x, axis=0), values)\n\n\ndef _flatten(x: jnp.ndarray, num_batch_dims: int) -> jnp.ndarray:\n \"\"\"Flattens the input, preserving the first ``num_batch_dims`` dimensions.\n\n If the input has fewer than ``num_batch_dims`` dimensions, it is returned\n unchanged.\n If the input has exactly ``num_batch_dims`` dimensions, an extra dimension\n is added. This is needed to handle batched scalars.\n\n Arguments:\n x: the input array to flatten.\n num_batch_dims: number of dimensions to preserve.\n\n Returns:\n flattened input.\n \"\"\"\n # TODO(b/173492429): consider throwing an error instead.\n if x.ndim < num_batch_dims:\n return x\n return jnp.reshape(x, list(x.shape[:num_batch_dims]) + [-1])\n\n\ndef batch_concat(\n values: types.NestedArray,\n num_batch_dims: int = 1,\n) -> jnp.ndarray:\n \"\"\"Flatten and concatenate nested array structure, keeping batch dims.\"\"\"\n flatten_fn = lambda x: _flatten(x, num_batch_dims)\n flat_leaves = tree.map_structure(flatten_fn, values)\n return jnp.concatenate(tree.flatten(flat_leaves), axis=-1)\n\n\ndef zeros_like(nest: types.Nest, dtype=None) -> types.NestedArray:\n return jax.tree_map(lambda x: jnp.zeros(x.shape, dtype or x.dtype), nest)\n\n\ndef ones_like(nest: types.Nest, dtype=None) -> types.NestedArray:\n return jax.tree_map(lambda x: jnp.ones(x.shape, dtype or x.dtype), nest)\n\n\ndef squeeze_batch_dim(nest: types.Nest) -> types.NestedArray:\n return jax.tree_map(lambda x: jnp.squeeze(x, axis=0), nest)\n\n\ndef to_numpy_squeeze(values: types.Nest) -> types.NestedArray:\n \"\"\"Converts to numpy and squeezes out dummy batch dimension.\"\"\"\n return jax.tree_map(lambda x: np.asarray(x).squeeze(axis=0), values)\n\n\ndef to_numpy(values: types.Nest) -> types.NestedArray:\n return jax.tree_map(np.asarray, values)\n\n\ndef fetch_devicearray(values: types.Nest) -> types.Nest:\n \"\"\"Fetches and converts any DeviceArrays to np.ndarrays.\"\"\"\n return tree.map_structure(_fetch_devicearray, values)\n\n\ndef _fetch_devicearray(x):\n if isinstance(x, jax.Array):\n return np.asarray(x)\n return x\n\n\ndef batch_to_sequence(values: types.Nest) -> types.NestedArray:\n return jax.tree_map(\n lambda x: jnp.transpose(x, axes=(1, 0, *range(2, len(x.shape)))), values)\n\n\ndef tile_array(array: jnp.ndarray, multiple: int) -> jnp.ndarray:\n \"\"\"Tiles `multiple` copies of `array` along a new leading axis.\"\"\"\n return jnp.stack([array] * multiple)\n\n\ndef tile_nested(inputs: types.Nest, multiple: int) -> types.Nest:\n \"\"\"Tiles tensors in a nested structure along a new leading axis.\"\"\"\n tile = functools.partial(tile_array, multiple=multiple)\n return jax.tree_map(tile, inputs)\n\n\ndef maybe_recover_lstm_type(state: types.NestedArray) -> types.NestedArray:\n \"\"\"Recovers the type hk.LSTMState if LSTMState is in the type name.\n\n When the recurrent state of recurrent neural networks (RNN) is deserialized,\n for example when it is sampled from replay, it is sometimes repacked in a type\n that is identical to the source type but not the correct type itself. When\n using this state as the initial state in an hk.dynamic_unroll, this will\n cause hk.dynamic_unroll to raise an error as it requires its input and output\n states to be identical.\n\n Args:\n state: a nested structure of arrays representing the state of an RNN.\n\n Returns:\n Either the state unchanged if it is anything but an LSTMState, otherwise\n returns the state arrays properly contained in an hk.LSTMState.\n \"\"\"\n return hk.LSTMState(*state) if type(state).__name__ == 'LSTMState' else state\n\n\ndef prefetch(\n iterable: Iterable[T],\n buffer_size: int = 5,\n device: Optional[jax.Device] = None,\n num_threads: int = NUM_PREFETCH_THREADS,\n) -> core.PrefetchingIterator[T]:\n \"\"\"Returns prefetching iterator with additional 'ready' method.\"\"\"\n\n return PrefetchIterator(iterable, buffer_size, device, num_threads)\n\n\nclass PrefetchingSplit(NamedTuple):\n host: types.NestedArray\n device: types.NestedArray\n\n\n_SplitFunction = Callable[[types.NestedArray], PrefetchingSplit]\n\n\ndef keep_key_on_host(sample: reverb.ReplaySample) -> PrefetchingSplit:\n \"\"\"Returns PrefetchingSplit which keeps uint64 reverb key on the host.\n\n We want to avoid truncation of the uint64 reverb key by JAX.\n\n Args:\n sample: a sample from a Reverb replay buffer.\n\n Returns:\n PrefetchingSplit with device having the reverb sample, and key on host.\n \"\"\"\n return PrefetchingSplit(host=sample.info.key, device=sample)\n\n\ndef device_put(\n iterable: Iterable[types.NestedArray],\n device: jax.Device,\n split_fn: Optional[_SplitFunction] = None,\n):\n \"\"\"Returns iterator that samples an item and places it on the device.\"\"\"\n\n return PutToDevicesIterable(\n iterable=iterable,\n pmapped_user=False,\n devices=[device],\n split_fn=split_fn)\n\n\ndef multi_device_put(\n iterable: Iterable[types.NestedArray],\n devices: Sequence[jax.Device],\n split_fn: Optional[_SplitFunction] = None,\n):\n \"\"\"Returns iterator that, per device, samples an item and places on device.\"\"\"\n\n return PutToDevicesIterable(\n iterable=iterable, pmapped_user=True, devices=devices, split_fn=split_fn)\n\n\nclass PutToDevicesIterable(Iterable[types.NestedArray]):\n \"\"\"Per device, samples an item from iterator and places on device.\n\n if pmapped_user:\n Items from the resulting generator are intended to be used in a pmapped\n function. Every element is a ShardedDeviceArray or (nested) Python container\n thereof. A single next() call to this iterator results in len(devices)\n calls to the underlying iterator. The returned items are put one on each\n device.\n if not pmapped_user:\n Places a sample from the iterator on the given device.\n\n Yields:\n If no split_fn is specified:\n DeviceArray/ShardedDeviceArray or (nested) Python container thereof\n representing the elements of shards stacked together, with each shard\n backed by physical device memory specified by the corresponding entry in\n devices.\n\n If split_fn is specified:\n PrefetchingSplit where the .host element is a stacked numpy array or\n (nested) Python contained thereof. The .device element is a\n DeviceArray/ShardedDeviceArray or (nested) Python container thereof.\n\n Raises:\n StopIteration: if there are not enough items left in the iterator to place\n one sample on each device.\n Any error thrown by the iterable_function. Note this is not raised inside\n the producer, but after it finishes executing.\n \"\"\"\n\n def __init__(\n self,\n iterable: Iterable[types.NestedArray],\n pmapped_user: bool,\n devices: Sequence[jax.Device],\n split_fn: Optional[_SplitFunction] = None,\n ):\n \"\"\"Constructs PutToDevicesIterable.\n\n Args:\n iterable: A python iterable. This is used to build the python prefetcher.\n Note that each iterable should only be passed to this function once as\n iterables aren't thread safe.\n pmapped_user: whether the user of data from this iterator is implemented\n using pmapping.\n devices: Devices used for prefecthing.\n split_fn: Optional function applied to every element from the iterable to\n split the parts of it that will be kept in the host and the parts that\n will sent to the device.\n\n Raises:\n ValueError: If devices list is empty, or if pmapped_use=False and more\n than 1 device is provided.\n \"\"\"\n self.num_devices = len(devices)\n if self.num_devices == 0:\n raise ValueError('At least one device must be specified.')\n if (not pmapped_user) and (self.num_devices != 1):\n raise ValueError('User is not implemented with pmapping but len(devices) '\n f'= {len(devices)} is not equal to 1! Devices given are:'\n f'\\n{devices}')\n\n self.iterable = iterable\n self.pmapped_user = pmapped_user\n self.split_fn = split_fn\n self.devices = devices\n self.iterator = iter(self.iterable)\n\n def __iter__(self) -> Iterator[types.NestedArray]:\n # It is important to structure the Iterable like this, because in\n # JustPrefetchIterator we must build a new iterable for each thread.\n # This is crucial if working with tensorflow datasets because tf.Graph\n # objects are thread local.\n self.iterator = iter(self.iterable)\n return self\n\n def __next__(self) -> types.NestedArray:\n try:\n if not self.pmapped_user:\n item = next(self.iterator)\n if self.split_fn is None:\n return jax.device_put(item, self.devices[0])\n item_split = self.split_fn(item)\n return PrefetchingSplit(\n host=item_split.host,\n device=jax.device_put(item_split.device, self.devices[0]))\n\n items = itertools.islice(self.iterator, self.num_devices)\n items = tuple(items)\n if len(items) < self.num_devices:\n raise StopIteration\n if self.split_fn is None:\n return jax.device_put_sharded(tuple(items), self.devices)\n else:\n # ((host: x1, device: y1), ..., (host: xN, device: yN)).\n items_split = (self.split_fn(item) for item in items)\n # (host: (x1, ..., xN), device: (y1, ..., yN)).\n split = tree.map_structure_up_to(\n PrefetchingSplit(None, None), lambda *x: x, *items_split)\n\n return PrefetchingSplit(\n host=np.stack(split.host),\n device=jax.device_put_sharded(split.device, self.devices))\n\n except StopIteration:\n raise\n\n except Exception: # pylint: disable=broad-except\n logging.exception('Error for %s', self.iterable)\n raise\n\n\ndef sharded_prefetch(\n iterable: Iterable[types.NestedArray],\n buffer_size: int = 5,\n num_threads: int = 1,\n split_fn: Optional[_SplitFunction] = None,\n devices: Optional[Sequence[jax.Device]] = None,\n) -> core.PrefetchingIterator:\n \"\"\"Performs sharded prefetching from an iterable in separate threads.\n\n Elements from the resulting generator are intended to be used in a jax.pmap\n call. Every element is a sharded prefetched array with an additional replica\n dimension and corresponds to jax.local_device_count() elements from the\n original iterable.\n\n Args:\n iterable: A python iterable. This is used to build the python prefetcher.\n Note that each iterable should only be passed to this function once as\n iterables aren't thread safe.\n buffer_size (int): Number of elements to keep in the prefetch buffer.\n num_threads (int): Number of threads.\n split_fn: Optional function applied to every element from the iterable to\n split the parts of it that will be kept in the host and the parts that\n will sent to the device.\n devices: Devices used for prefecthing. Optional, jax.local_devices() by\n default.\n\n Returns:\n Prefetched elements from the original iterable with additional replica\n dimension.\n Raises:\n ValueError if the buffer_size <= 1.\n Any error thrown by the iterable_function. Note this is not raised inside\n the producer, but after it finishes executing.\n \"\"\"\n\n devices = devices or jax.local_devices()\n\n iterable = PutToDevicesIterable(\n iterable=iterable, pmapped_user=True, devices=devices, split_fn=split_fn)\n\n return prefetch(iterable, buffer_size, device=None, num_threads=num_threads)\n\n\ndef replicate_in_all_devices(\n nest: N, devices: Optional[Sequence[jax.Device]] = None\n) -> N:\n \"\"\"Replicate array nest in all available devices.\"\"\"\n devices = devices or jax.local_devices()\n return jax.device_put_sharded([nest] * len(devices), devices)\n\n\ndef get_from_first_device(nest: N, as_numpy: bool = True) -> N:\n \"\"\"Gets the first array of a nest of `jax.Array`s.\n\n Args:\n nest: A nest of `jax.Array`s.\n as_numpy: If `True` then each `DeviceArray` that is retrieved is transformed\n (and copied if not on the host machine) into a `np.ndarray`.\n\n Returns:\n The first array of a nest of `jax.Array`s. Note that if\n `as_numpy=False` then the array will be a `DeviceArray` (which will live on\n the same device as the sharded device array). If `as_numpy=True` then the\n array will be copied to the host machine and converted into a `np.ndarray`.\n \"\"\"\n zeroth_nest = jax.tree_map(lambda x: x[0], nest)\n return jax.device_get(zeroth_nest) if as_numpy else zeroth_nest\n\n\ndef mapreduce(\n f: F,\n reduce_fn: Optional[Callable[[jax.Array], jax.Array]] = None,\n **vmap_kwargs,\n) -> F:\n \"\"\"A simple decorator that transforms `f` into (`reduce_fn` o vmap o f).\n\n By default, we vmap over axis 0, and the `reduce_fn` is jnp.mean over axis 0.\n Note that the call signature of `f` is invariant under this transformation.\n\n If, for example, f has shape signature [H, W] -> [N], then mapreduce(f)\n (with the default arguments) will have shape signature [B, H, W] -> [N].\n\n Args:\n f: A pure function over examples.\n reduce_fn: A pure function that reduces DeviceArrays -> DeviceArrays.\n **vmap_kwargs: Keyword arguments to forward to `jax.vmap`.\n\n Returns:\n g: A pure function over batches of examples.\n \"\"\"\n\n if reduce_fn is None:\n reduce_fn = lambda x: jnp.mean(x, axis=0)\n\n vmapped_f = jax.vmap(f, **vmap_kwargs)\n\n def g(*args, **kwargs):\n return jax.tree_map(reduce_fn, vmapped_f(*args, **kwargs))\n\n return g\n\n\n_TrainingState = TypeVar('_TrainingState')\n_TrainingData = TypeVar('_TrainingData')\n_TrainingAux = TypeVar('_TrainingAux')\n\n\n# TODO(b/192806089): migrate all callers to process_many_batches and remove this\n# method.\ndef process_multiple_batches(\n process_one_batch: Callable[[_TrainingState, _TrainingData],\n Tuple[_TrainingState, _TrainingAux]],\n num_batches: int,\n postprocess_aux: Optional[Callable[[_TrainingAux], _TrainingAux]] = None\n) -> Callable[[_TrainingState, _TrainingData], Tuple[_TrainingState,\n _TrainingAux]]:\n \"\"\"Makes 'process_one_batch' process multiple batches at once.\n\n Args:\n process_one_batch: a function that takes 'state' and 'data', and returns\n 'new_state' and 'aux' (for example 'metrics').\n num_batches: how many batches to process at once\n postprocess_aux: how to merge the extra information, defaults to taking the\n mean.\n\n Returns:\n A function with the same interface as 'process_one_batch' which processes\n multiple batches at once.\n \"\"\"\n assert num_batches >= 1\n if num_batches == 1:\n if not postprocess_aux:\n return process_one_batch\n def _process_one_batch(state, data):\n state, aux = process_one_batch(state, data)\n return state, postprocess_aux(aux)\n return _process_one_batch\n\n if postprocess_aux is None:\n postprocess_aux = lambda x: jax.tree_map(jnp.mean, x)\n\n def _process_multiple_batches(state, data):\n data = jax.tree_map(\n lambda a: jnp.reshape(a, (num_batches, -1, *a.shape[1:])), data)\n\n state, aux = jax.lax.scan(\n process_one_batch, state, data, length=num_batches)\n return state, postprocess_aux(aux)\n\n return _process_multiple_batches\n\n\ndef process_many_batches(\n process_one_batch: Callable[[_TrainingState, _TrainingData],\n jax_types.TrainingStepOutput[_TrainingState]],\n num_batches: int,\n postprocess_aux: Optional[Callable[[jax_types.TrainingMetrics],\n jax_types.TrainingMetrics]] = None\n) -> Callable[[_TrainingState, _TrainingData],\n jax_types.TrainingStepOutput[_TrainingState]]:\n \"\"\"The version of 'process_multiple_batches' with stronger typing.\"\"\"\n\n def _process_one_batch(\n state: _TrainingState,\n data: _TrainingData) -> Tuple[_TrainingState, jax_types.TrainingMetrics]:\n result = process_one_batch(state, data)\n return result.state, result.metrics\n\n func = process_multiple_batches(_process_one_batch, num_batches,\n postprocess_aux)\n\n def _process_many_batches(\n state: _TrainingState,\n data: _TrainingData) -> jax_types.TrainingStepOutput[_TrainingState]:\n state, aux = func(state, data)\n return jax_types.TrainingStepOutput(state, aux)\n\n return _process_many_batches\n\n\ndef weighted_softmax(x: jnp.ndarray, weights: jnp.ndarray, axis: int = 0):\n x = x - jnp.max(x, axis=axis)\n return weights * jnp.exp(x) / jnp.sum(weights * jnp.exp(x),\n axis=axis, keepdims=True)\n\n\ndef sample_uint32(random_key: jax_types.PRNGKey) -> int:\n \"\"\"Returns an integer uniformly distributed in 0..2^32-1.\"\"\"\n iinfo = jnp.iinfo(jnp.int32)\n # randint only accepts int32 values as min and max.\n jax_random = jax.random.randint(\n random_key, shape=(), minval=iinfo.min, maxval=iinfo.max, dtype=jnp.int32)\n return np.uint32(jax_random).item()\n\n\nclass PrefetchIterator(core.PrefetchingIterator):\n \"\"\"Performs prefetching from an iterable in separate threads.\n\n Its interface is additionally extended with `ready` method which tells whether\n there is any data waiting for processing and a `retrieved_elements` method\n specifying number of elements retrieved from the iterator.\n\n Yields:\n Prefetched elements from the original iterable.\n\n Raises:\n ValueError: if the buffer_size < 1.\n StopIteration: If the iterable contains no more items.\n Any error thrown by the iterable_function. Note this is not raised inside\n the producer, but after it finishes executing.\n \"\"\"\n\n def __init__(\n self,\n iterable: Iterable[types.NestedArray],\n buffer_size: int = 5,\n device: Optional[jax.Device] = None,\n num_threads: int = NUM_PREFETCH_THREADS,\n ):\n \"\"\"Constructs PrefetchIterator.\n\n Args:\n iterable: A python iterable. This is used to build the python prefetcher.\n Note that each iterable should only be passed to this function once as\n iterables aren't thread safe.\n buffer_size (int): Number of elements to keep in the prefetch buffer.\n device (deprecated): Optionally place items from the iterable on the given\n device. If None, the items are returns as given by the iterable. This\n argument is deprecated and the recommended usage is to wrap the\n iterables using utils.device_put or utils.multi_device_put before using\n utils.prefetch.\n num_threads (int): Number of threads.\n \"\"\"\n\n if buffer_size < 1:\n raise ValueError('the buffer_size should be >= 1')\n self.buffer = queue.Queue(maxsize=buffer_size)\n self.producer_error = []\n self.end = object()\n self.iterable = iterable\n self.device = device\n self.count = 0\n\n # Start producer threads.\n for _ in range(num_threads):\n threading.Thread(target=self.producer, daemon=True).start()\n\n def producer(self):\n \"\"\"Enqueues items from `iterable` on a given thread.\"\"\"\n try:\n # Build a new iterable for each thread. This is crucial if working with\n # tensorflow datasets because tf.Graph objects are thread local.\n for item in self.iterable:\n if self.device:\n jax.device_put(item, self.device)\n self.buffer.put(item)\n except Exception as e: # pylint: disable=broad-except\n logging.exception('Error in producer thread for %s', self.iterable)\n self.producer_error.append(e)\n finally:\n self.buffer.put(self.end)\n\n def __iter__(self):\n return self\n\n def ready(self):\n return not self.buffer.empty()\n\n def retrieved_elements(self):\n return self.count\n\n def __next__(self):\n value = self.buffer.get()\n if value is self.end:\n if self.producer_error:\n raise self.producer_error[0] from self.producer_error[0]\n raise StopIteration\n self.count += 1\n return value\n" }, { "alpha_fraction": 0.6658329367637634, "alphanum_fraction": 0.6743371486663818, "avg_line_length": 30.234375, "blob_id": "d463f737ae6cb0b8a8161ad27c870447a92651c8", "content_id": "7e6f72bd688204b2295b869bf8f5145310a49f81", "detected_licenses": [ "Apache-2.0" ], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 5997, "license_type": "permissive", "max_line_length": 80, "num_lines": 192, "path": "/acme/utils/tree_utils.py", "repo_name": "dada325/acme", "src_encoding": "UTF-8", "text": "# Copyright 2018 DeepMind Technologies Limited. All rights reserved.\n#\n# Licensed under the Apache License, Version 2.0 (the \"License\");\n# you may not use this file except in compliance with the License.\n# You may obtain a copy of the License at\n#\n# http://www.apache.org/licenses/LICENSE-2.0\n#\n# Unless required by applicable law or agreed to in writing, software\n# distributed under the License is distributed on an \"AS IS\" BASIS,\n# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n# See the License for the specific language governing permissions and\n# limitations under the License.\n\n\"\"\"Tensor framework-agnostic utilities for manipulating nested structures.\"\"\"\n\nfrom typing import Sequence, List, TypeVar, Any\n\nimport numpy as np\nimport tree\n\nElementType = TypeVar('ElementType')\n\n\ndef fast_map_structure(func, *structure):\n \"\"\"Faster map_structure implementation which skips some error checking.\"\"\"\n flat_structure = (tree.flatten(s) for s in structure)\n entries = zip(*flat_structure)\n # Arbitrarily choose one of the structures of the original sequence (the last)\n # to match the structure for the flattened sequence.\n return tree.unflatten_as(structure[-1], [func(*x) for x in entries])\n\n\ndef fast_map_structure_with_path(func, *structure):\n \"\"\"Faster map_structure_with_path implementation.\"\"\"\n head_entries_with_path = tree.flatten_with_path(structure[0])\n if len(structure) > 1:\n tail_entries = (tree.flatten(s) for s in structure[1:])\n entries_with_path = [\n e[0] + e[1:] for e in zip(head_entries_with_path, *tail_entries)\n ]\n else:\n entries_with_path = head_entries_with_path\n # Arbitrarily choose one of the structures of the original sequence (the last)\n # to match the structure for the flattened sequence.\n return tree.unflatten_as(structure[-1], [func(*x) for x in entries_with_path])\n\n\ndef stack_sequence_fields(sequence: Sequence[ElementType]) -> ElementType:\n \"\"\"Stacks a list of identically nested objects.\n\n This takes a sequence of identically nested objects and returns a single\n nested object whose ith leaf is a stacked numpy array of the corresponding\n ith leaf from each element of the sequence.\n\n For example, if `sequence` is:\n\n ```python\n [{\n 'action': np.array([1.0]),\n 'observation': (np.array([0.0, 1.0, 2.0]),),\n 'reward': 1.0\n }, {\n 'action': np.array([0.5]),\n 'observation': (np.array([1.0, 2.0, 3.0]),),\n 'reward': 0.0\n }, {\n 'action': np.array([0.3]),1\n 'observation': (np.array([2.0, 3.0, 4.0]),),\n 'reward': 0.5\n }]\n ```\n\n Then this function will return:\n\n ```python\n {\n 'action': np.array([....]) # array shape = [3 x 1]\n 'observation': (np.array([...]),) # array shape = [3 x 3]\n 'reward': np.array([...]) # array shape = [3]\n }\n ```\n\n Note that the 'observation' entry in the above example has two levels of\n nesting, i.e it is a tuple of arrays.\n\n Args:\n sequence: a list of identically nested objects.\n\n Returns:\n A nested object with numpy.\n\n Raises:\n ValueError: If `sequence` is an empty sequence.\n \"\"\"\n # Handle empty input sequences.\n if not sequence:\n raise ValueError('Input sequence must not be empty')\n\n # Default to asarray when arrays don't have the same shape to be compatible\n # with old behaviour.\n try:\n return fast_map_structure(lambda *values: np.stack(values), *sequence)\n except ValueError:\n return fast_map_structure(lambda *values: np.asarray(values, dtype=object),\n *sequence)\n\n\ndef unstack_sequence_fields(struct: ElementType,\n batch_size: int) -> List[ElementType]:\n \"\"\"Converts a struct of batched arrays to a list of structs.\n\n This is effectively the inverse of `stack_sequence_fields`.\n\n Args:\n struct: An (arbitrarily nested) structure of arrays.\n batch_size: The length of the leading dimension of each array in the struct.\n This is assumed to be static and known.\n\n Returns:\n A list of structs with the same structure as `struct`, where each leaf node\n is an unbatched element of the original leaf node.\n \"\"\"\n\n return [\n tree.map_structure(lambda s, i=i: s[i], struct) for i in range(batch_size)\n ]\n\n\ndef broadcast_structures(*args: Any) -> Any:\n \"\"\"Returns versions of the arguments that give them the same nested structure.\n\n Any nested items in *args must have the same structure.\n\n Any non-nested item will be replaced with a nested version that shares that\n structure. The leaves will all be references to the same original non-nested\n item.\n\n If all *args are nested, or all *args are non-nested, this function will\n return *args unchanged.\n\n Example:\n ```\n a = ('a', 'b')\n b = 'c'\n tree_a, tree_b = broadcast_structure(a, b)\n tree_a\n > ('a', 'b')\n tree_b\n > ('c', 'c')\n ```\n\n Args:\n *args: A Sequence of nested or non-nested items.\n\n Returns:\n `*args`, except with all items sharing the same nest structure.\n \"\"\"\n if not args:\n return\n\n reference_tree = None\n for arg in args:\n if tree.is_nested(arg):\n reference_tree = arg\n break\n\n # If reference_tree is None then none of args are nested and we can skip over\n # the rest of this function, which would be a no-op.\n if reference_tree is None:\n return args\n\n def mirror_structure(value, reference_tree):\n if tree.is_nested(value):\n # Use check_types=True so that the types of the trees we construct aren't\n # dependent on our arbitrary choice of which nested arg to use as the\n # reference_tree.\n tree.assert_same_structure(value, reference_tree, check_types=True)\n return value\n else:\n return tree.map_structure(lambda _: value, reference_tree)\n\n return tuple(mirror_structure(arg, reference_tree) for arg in args)\n\n\ndef tree_map(f):\n \"\"\"Transforms `f` into a tree-mapped version.\"\"\"\n\n def mapped_f(*structures):\n return tree.map_structure(f, *structures)\n\n return mapped_f\n" } ]
48
arjunsinghy96/practice
https://github.com/arjunsinghy96/practice
85fa69f982d1740b68b5ad1eb5abec9a19ad1d4f
48c4d43ab1d3e715481cda3bef4d008eebd3098a
47da1d5fe93c06bb0dda11365519b7c46be6b087
refs/heads/master
2021-09-06T01:02:37.467346
2018-02-01T05:19:13
2018-02-01T05:19:13
114,698,861
0
0
null
null
null
null
null
[ { "alpha_fraction": 0.48564496636390686, "alphanum_fraction": 0.5002021789550781, "avg_line_length": 19.957626342773438, "blob_id": "87f7637a151e6e352d3c0a33eb9bf71c136118ee", "content_id": "07779e0ffb8baa0afa35a7f3709fd43610a3de65", "detected_licenses": [ "MIT" ], "is_generated": false, "is_vendor": false, "language": "C", "length_bytes": 2473, "license_type": "permissive", "max_line_length": 71, "num_lines": 118, "path": "/c/vector/vector.c", "repo_name": "arjunsinghy96/practice", "src_encoding": "UTF-8", "text": "/* Copyright (c) 2017 arjunsinghy96\n * * A primary implementation of dynamic array/vector\n */\n\n#include<stdio.h>\n#include<stdlib.h>\n#include\"exception.c\"\n\n#define INIT_SIZE 16\n\ntypedef struct vectors{\n int *arr;\n int size;\n int capacity;\n}vector;\n\nint is_empty(vector * v){\n return v->size > 0;\n}\n\nvoid _shrink_vector(vector *v){\n v->capacity /= 2;\n v->arr = realloc(v->arr, v->capacity * sizeof(int));\n}\n\nvoid _extend_vector(vector *v){\n v->arr = realloc(v->arr, 2*v->capacity *sizeof(int));\n v->capacity *= 2;\n}\n\nvoid append(vector *v, int num){\n if(v->arr == NULL){\n printf(\"You need to initialize the vector first\");\n return;\n }\n if(v->size == v->capacity){\n _extend_vector(v);\n }\n *(v->arr + v->size) = num;\n v->size += 1;\n}\n\nint pop(vector *v) {\n if(v->size == 0){\n Exception excpt = new_exception(41);\n printf(\"Error %d: %s\\n\", excpt.exp_no, excpt.exp_name);\n exit(-1);\n }\n v->size -= 1;\n int ret_val = *(v->arr + v->size);\n if(v->size < v->capacity/4){\n _shrink_vector(v);\n }\n return ret_val;\n}\n\nint get(vector *v, int position){\n if(position < v->size){\n return *(v->arr + position);\n }\n else {\n Exception err = new_exception(42);\n printf(\"Error %d: %s %d\\n\",err.exp_no, err.exp_name, position);\n exit(-1);\n }\n}\n\nvoid set(vector *v, int position, int num){\n if(position < v->size){\n *(v->arr + position) = num;\n }\n else {\n Exception err = new_exception(1);\n printf(\"%s %d\\n\", err.exp_name, position);\n exit(-1);\n }\n}\n\nvoid insert_at(vector *v, int position, int num){\n int temp, temp2=num, i;\n if(v->size == v->capacity){\n _extend_vector(v);\n }\n if(position < v->size){\n for(i=position; i< v->size; i++){\n temp = *(v->arr + i);\n *(v->arr + i) = temp2;\n temp2 = temp;\n }\n *(v->arr + v->size) = temp2;\n v->size += 1;\n }\n}\n\nvoid delete(vector * v, int index) {\n int i;\n if(v->size > index){\n for(i=index; i<v->size - 1;i++) {\n *(v->arr + i) = *(v->arr + i + 1);\n }\n v->size -= 1;\n }\n}\n\nint find(vector *v, int value){\n int i;\n for(i=0; i<v->size; i++){\n if(*(v->arr + i) == value){\n return i;\n }\n }\n}\n\nvoid init_vector(vector *v){\n v->arr = malloc(INIT_SIZE* sizeof(int));\n v->size = 0;\n v->capacity = 16;\n}\n" }, { "alpha_fraction": 0.6662908792495728, "alphanum_fraction": 0.6730552315711975, "avg_line_length": 24.342857360839844, "blob_id": "1adf57f1d229eac3e613a82c2da24b4f43df28ae", "content_id": "dd05b7c95edb495f7d1c69def27c6eb75de4413a", "detected_licenses": [ "MIT" ], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 887, "license_type": "permissive", "max_line_length": 85, "num_lines": 35, "path": "/python/img-download/img-download.py", "repo_name": "arjunsinghy96/practice", "src_encoding": "UTF-8", "text": "\"\"\"\nPurpose: 1. Download images from a list of urls save in urllist.txt and save\n them in ./images folder.\n 2. Make a zip file named imageArchive.zip with those images.\n\"\"\"\nfrom __future__ import print_function\n\nimport requests\nimport os\nfrom zipfile import ZipFile\n\ndef get_filename(url):\n\t\"\"\"\n\tExtracts filename from the url\n\treturn the last path\n\t\"\"\"\n\treturn url.split('/')[-1]\n\ntry:\n\tos.mkdir('images')\nexcept OSError:\n\tpass\n\nwith open('urllist.txt', 'r') as listfile, ZipFile('imageArchive.zip', 'a') as myzip:\n\tfor line in listfile:\n\t\timage_url = line.strip('\\n')\n\t\tfilename = get_filename(image_url)\n\t\tprint(\"Downloading\", filename, \"...\")\n\t\timg = requests.get(image_url, stream=True)\n\t\tif img.status_code == 200:\n\t\t\twith open('images/' + filename, 'wb') as f:\n\t\t\t\tfor chunk in img:\n\t\t\t\t\tf.write(chunk)\n\n\t\tmyzip.write(os.path.join('./images', filename), filename)\n" }, { "alpha_fraction": 0.7857142686843872, "alphanum_fraction": 0.7857142686843872, "avg_line_length": 20, "blob_id": "d2df9b0ba58e4e48be4082fd78053c6a81b77d57", "content_id": "90bd8a2c4ec2118ee3171d55a02a4651b240f6f6", "detected_licenses": [ "MIT" ], "is_generated": false, "is_vendor": false, "language": "Markdown", "length_bytes": 42, "license_type": "permissive", "max_line_length": 30, "num_lines": 2, "path": "/README.md", "repo_name": "arjunsinghy96/practice", "src_encoding": "UTF-8", "text": "# practice\nA library for random practice.\n" }, { "alpha_fraction": 0.32993084192276, "alphanum_fraction": 0.33980903029441833, "avg_line_length": 23.103174209594727, "blob_id": "04d12504d56c26edecb551b34766979944e8c2e0", "content_id": "cc2bfcd0f3e59ae3d81a0def48a2ba2a05b17829", "detected_licenses": [ "MIT" ], "is_generated": false, "is_vendor": false, "language": "C++", "length_bytes": 3037, "license_type": "permissive", "max_line_length": 81, "num_lines": 126, "path": "/cpp/custom_vector/my_vector.cpp", "repo_name": "arjunsinghy96/practice", "src_encoding": "UTF-8", "text": "#include<iostream>\n#include<stdlib.h>\n\nusing namespace std;\n\nclass Vector{\n private:\n int * arr;\n int v_size, v_capacity;\n\n void _extend_vector(){\n this->v_capacity *=2;\n this-> arr = (int *)realloc(this->arr, this->v_capacity*sizeof(int));\n }\n\n void _shrink_vector(){\n this->v_capacity /=2;\n this->arr = (int *)realloc(this->arr, this->v_capacity*sizeof(int));\n }\n\n public:\n Vector(){\n this->arr = new int[16];\n this->v_size = 0;\n this->v_capacity = 16;\n }\n\n int size(){\n return v_size;\n }\n\n int capacity(){\n return v_capacity;\n }\n\n void print(){\n if(v_size == 0){\n cout<< \"[]\" << endl;\n return;\n }\n cout << \"[\";\n for(int i =0; i<v_size-1; i++){\n cout << *(arr + i) << \", \";\n }\n cout << *(arr + v_size-1)<< \"]\"<<endl;\n }\n\n bool is_empty(){\n return v_size == 0;\n }\n\n int at(int index){\n if(index < v_size){\n return *(arr + index);\n }\n cout << \"IndexOutOfBounds\\n\" << endl;\n exit(-1);\n }\n\n void push(int value){\n if(v_size == v_capacity){\n _extend_vector();\n }\n *(arr + v_size) = value;\n v_size += 1;\n }\n\n void insert(int index, int value){\n int temp, temp2=value , i;\n if(v_size == v_capacity){\n _extend_vector();\n }\n if(index < v_size){\n for(i=index; i<v_size; i++){\n temp = *(arr + i);\n *(arr + i) = temp2;\n temp2 = temp;\n }\n *(arr + v_size) = temp2;\n v_size +=1;\n }\n }\n\n int pop(){\n if(v_size == 0){\n cout << \"ArrayEmptyError\\n\";\n exit(-1);\n }\n v_size -=1;\n int ret_val = *(arr + v_size);\n if(v_size < v_capacity/4){\n _shrink_vector();\n }\n return ret_val;\n }\n\n void del(int index){\n if(v_size > index){\n for(int i=index; i< v_size -1; i++) {\n *(arr + i) = *(arr + i + 1);\n }\n v_size -= 1;\n }\n else {\n cout << \"IndexOutOfBounds\\n\";\n exit(-1);\n }\n }\n\n int find(int value){\n for(int i=0; i< v_size; i++) {\n if(*(arr + i) == value) {\n return i;\n }\n }\n return -1;\n }\n\n int& operator[] (int index) {\n if(index > v_size) {\n cout << \"IndexOutOfBounds\\n\";\n exit(-1);\n }\n return arr[index];\n }\n};\n" }, { "alpha_fraction": 0.5532646179199219, "alphanum_fraction": 0.5756013989448547, "avg_line_length": 19.785715103149414, "blob_id": "7d92c80bfa155045766d17e3567d50103684a4f2", "content_id": "edfecdc98ce1e6c756dc34bff3580e18c03c12e5", "detected_licenses": [ "MIT" ], "is_generated": false, "is_vendor": false, "language": "C", "length_bytes": 582, "license_type": "permissive", "max_line_length": 57, "num_lines": 28, "path": "/c/vector/exception.c", "repo_name": "arjunsinghy96/practice", "src_encoding": "UTF-8", "text": "/* Copyright (c) 2017 arjunsinghy96\n *\n * My little exception implementation for the vectors\n */\n\n#include<stdio.h>\n#include<string.h>\n\ntypedef struct err{\n int exp_no;\n char exp_name[100];\n}Exception;\n\nException new_exception(int num){\n Exception expt;\n expt.exp_no = num;\n switch(num){\n case 41:\n strcpy(expt.exp_name, \"Empty vector\");\n break;\n case 42:\n strcpy(expt.exp_name, \"Index out of bounds\");\n break;\n default:\n strcpy(expt.exp_name, \"Unknown error\");\n }\n return expt;\n}\n" }, { "alpha_fraction": 0.4403669834136963, "alphanum_fraction": 0.4610091745853424, "avg_line_length": 15.769230842590332, "blob_id": "ab81d927e8b6f30b7a1b9b293f52556c0386bd94", "content_id": "671e09377b4601f31040c2e197ba1b4b574d3e0c", "detected_licenses": [ "MIT" ], "is_generated": false, "is_vendor": false, "language": "C", "length_bytes": 436, "license_type": "permissive", "max_line_length": 39, "num_lines": 26, "path": "/c/vector/test.c", "repo_name": "arjunsinghy96/practice", "src_encoding": "UTF-8", "text": "#include<stdio.h>\n#include<assert.h>\n#include\"vector.c\"\n\nvoid print(vector x) {\n int i;\n printf(\"[\");\n for(i=0;i < x.size - 1; i++){\n printf(\"%d, \", get(&x, i));\n }\n printf(\"%d]\\n\", get(&x, x.size-1));\n}\n\nvoid main(){\n int i;\n vector x;\n init_vector(&x);\n append(&x, 10);\n print(x);\n insert_at(&x, 0, 9);\n print(x);\n i = pop(&x);\n assert(i == 10);\n printf(\"%d\\n\", i);\n print(x);\n}\n" }, { "alpha_fraction": 0.44331982731819153, "alphanum_fraction": 0.49190282821655273, "avg_line_length": 17.296297073364258, "blob_id": "f15a7265b1c997a4e647b56af338968b6aa5441e", "content_id": "1c492ad11080563883fc300af7426808399be25e", "detected_licenses": [ "MIT" ], "is_generated": false, "is_vendor": false, "language": "C++", "length_bytes": 494, "license_type": "permissive", "max_line_length": 31, "num_lines": 27, "path": "/cpp/custom_vector/test.cpp", "repo_name": "arjunsinghy96/practice", "src_encoding": "UTF-8", "text": "#include<iostream>\n#include<assert.h>\n#include\"my_vector.cpp\"\n\nusing namespace std;\n\nint main(){\n Vector v;\n assert(v.size() == 0);\n assert(v.capacity() == 16);\n for(int i=0;i<20; i++){\n v.push(2*i);\n }\n assert(v.size() == 20);\n assert(v.capacity() == 32);\n v.print();\n v.del(10);\n assert(v.size() == 19);\n v.insert(5, 100);\n v.print();\n for(int i=0; i<17; i++){\n v.pop();\n }\n v.print();\n assert(v.capacity() == 8);\n return 0;\n}\n" } ]
7
oun1982/gitrepo
https://github.com/oun1982/gitrepo
89c267b61e2dd555d049a8296b4949a7f1639621
73e4b5798f8e0ed805a7ee49d1e54cef2b7b51c1
41a9fcc8df963ffb3c58b196d0d5c8053cb08509
refs/heads/master
2021-05-09T04:00:33.667791
2019-02-24T04:52:56
2019-02-24T04:52:56
119,258,440
0
0
null
null
null
null
null
[ { "alpha_fraction": 0.6756756901741028, "alphanum_fraction": 0.6756756901741028, "avg_line_length": 18, "blob_id": "f35ae54e6cc79778aa4de65ad30d125583b6e0a1", "content_id": "31b9e321d882140e4c66c6717d430d8880f88551", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 37, "license_type": "no_license", "max_line_length": 20, "num_lines": 2, "path": "/Person/__init__.py", "repo_name": "oun1982/gitrepo", "src_encoding": "UTF-8", "text": "from . import person\n#print(__name__)" }, { "alpha_fraction": 0.458119660615921, "alphanum_fraction": 0.5219373106956482, "avg_line_length": 23.38888931274414, "blob_id": "592967447cefea2308078ff4687acd33f0909c26", "content_id": "813c13bfcae94980942d7e031226e6db8c384b60", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 1755, "license_type": "no_license", "max_line_length": 96, "num_lines": 72, "path": "/test_auto.py", "repo_name": "oun1982/gitrepo", "src_encoding": "UTF-8", "text": "import pyautogui as pg\nimport webbrowser as web\nimport time\n\n''''\nurl = 'http://192.168.1.205/dcallcenter'\nweb.open(url)\ntime.sleep(1)\n#pg.typewrite('oun1982', interval=0.25)\n#pg.press('tab')\n#pg.typewrite('Cisco@1982', interval=0.25)_\n#pg.press('enter')\npg.press(['tab', 'tab', 'tab'])\ntime.sleep(3)\npg.typewrite('osdadmin', interval=0.25)\npg.press('tab')\npg.typewrite('osd_SIP4321!', interval=0.25)\npg.press('enter')\ntime.sleep(2)\npg.press(['tab', 'tab', 'tab', 'tab', 'tab', 'tab', 'tab', 'tab', 'tab'])\npg.press('enter') \n'''\n#print(pg.position())\n#pg.moveTo(735,390)\n#pg.click()\n#time.sleep(2)\n#pg.press(['tab', 'tab'])\n#time.sleep(2)\n#pg.moveTo(735,367)\n\nwith open(\"D:\\Oun1982\\OSD_Work\\CUSTOMER\\CENTER AUTO LEASE\\POC E1 Cisco\\mobile.txt\",\"r\") as file:\n data = file.read()\n num = data.split(\"\\n\")\n x = 368\n y = 0\n z = 620\n sc = -50\n for i in num:\n print(x,y,z,sc)\n\n if y > 9:\n pg.scroll(sc)\n time.sleep(0.5)\n pg.moveTo(735, z)\n pg.click()\n time.sleep(1)\n pg.press(['tab', 'tab'])\n pg.typewrite(i, interval=0.05)\n pg.press('tab')\n pg.typewrite('1', interval=0.05)\n time.sleep(1)\n pg.moveTo(736, 322)\n pg.click()\n z = z + 3\n sc = sc - 50\n time.sleep(2)\n pg.moveTo(735, x)\n pg.click()\n time.sleep(1)\n pg.press(['tab', 'tab'])\n pg.typewrite(i, interval=0.1)\n pg.press('tab')\n pg.typewrite('1', interval=0.1)\n time.sleep(1)\n pg.moveTo(736, 322)\n pg.click()\n x = x + 30\n y = y + 1\n time.sleep(1)\n\n#for num in range(len(file.readlines())):7\n# print(file[0])" }, { "alpha_fraction": 0.49738219380378723, "alphanum_fraction": 0.5654450058937073, "avg_line_length": 22.75, "blob_id": "25ebfd75d1e9e4eccd2f5a9481c8cb6a72274eb9", "content_id": "e7cb33d3cc768670e06e0c68fc1e9c07b6c129b0", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 191, "license_type": "no_license", "max_line_length": 42, "num_lines": 8, "path": "/test1.py", "repo_name": "oun1982/gitrepo", "src_encoding": "UTF-8", "text": "#__author__ = 'oun1982'\nage = int(input(\"Please enter number : \"))\nprint (age)\nif (age >= 0 and age <= 99) :\n print (age,\" between 0 - 99 \")\nelse :\n print (\"Not between 0 - 99\")\nprint\n\n" }, { "alpha_fraction": 0.46699875593185425, "alphanum_fraction": 0.5080946683883667, "avg_line_length": 22.647058486938477, "blob_id": "f088d5becdf6d0146b32dfb585e2b09159f063cf", "content_id": "6a99e8a658215ce0c4324e0fa7bd12475dd8d83e", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 803, "license_type": "no_license", "max_line_length": 96, "num_lines": 34, "path": "/pygui_asterisk.py", "repo_name": "oun1982/gitrepo", "src_encoding": "UTF-8", "text": "import pyautogui as pg\nimport webbrowser as web\nimport time\n\nwith open(\"D:\\Oun1982\\OSD_Work\\CUSTOMER\\CENTER AUTO LEASE\\POC E1 Cisco\\mobile.txt\",\"r\") as file:\n data = file.read()\n num = data.split(\"\\n\")\n x = 4\n for i in num:\n print(x)\n pg.moveTo(326, 205)\n pg.click()\n time.sleep(1)\n for y in range(x):\n pg.press('tab')\n time.sleep(0.025)\n\n pg.press('enter')\n time.sleep(1)\n pg.press(['tab', 'tab'])\n pg.typewrite(i, interval=0.01)\n pg.press('tab')\n pg.typewrite('1', interval=0.01)\n time.sleep(1)\n pg.press('tab')\n pg.press('enter')\n if x > 18:\n x = 19\n else:\n x = x + 1\n\n\n#for num in range(len(file.readlines())):7\n# print(file[0])" }, { "alpha_fraction": 0.7428571581840515, "alphanum_fraction": 0.7428571581840515, "avg_line_length": 22.66666603088379, "blob_id": "b199e081f5d62ff5e6dcd3b1ac5d589c1cbe20e7", "content_id": "3cf9c894ab9408a9f5cd7be882d30540f859bb9b", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 70, "license_type": "no_license", "max_line_length": 37, "num_lines": 3, "path": "/Asterisk-Pandas.py", "repo_name": "oun1982/gitrepo", "src_encoding": "UTF-8", "text": "import pandas as pd\ncsvdf = pd.read_csv('auto-agent.csv')\nprint(csvdf)" }, { "alpha_fraction": 0.602150559425354, "alphanum_fraction": 0.6236559152603149, "avg_line_length": 25.714284896850586, "blob_id": "366a8f28f847480efedeef843148bb9bcaabcefc", "content_id": "9d8b0b08d03a55d1bac957f0387498ad664063e6", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 186, "license_type": "no_license", "max_line_length": 61, "num_lines": 7, "path": "/Arg_Except.py", "repo_name": "oun1982/gitrepo", "src_encoding": "UTF-8", "text": "__author__ = 'oun1982'\ndef tmp_covert(var):\n try:\n return int(var)\n except ValueError as err:\n print (\"Argument doesn't contain number \\n\",err.args)\ntmp_covert(\"aaa\")" }, { "alpha_fraction": 0.660347580909729, "alphanum_fraction": 0.6761453151702881, "avg_line_length": 26.565217971801758, "blob_id": "7db04b39787fe58e6b30640dce74d88cab58f0ab", "content_id": "a8d8103bf6073bd53952075af8ca94d5dd89ed9a", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 633, "license_type": "no_license", "max_line_length": 77, "num_lines": 23, "path": "/ma/py_ssh.py", "repo_name": "oun1982/gitrepo", "src_encoding": "UTF-8", "text": "import paramiko\nimport socket\n\nssh = paramiko.SSHClient()\nssh.set_missing_host_key_policy(paramiko.AutoAddPolicy())\ntry:\n ssh.connect('192.168.1.205', username='root', password='osd_SIP4321!')\nexcept paramiko.SSHException:\n print(\"Connection Failed\")\n quit()\n#ssh.exec_command(\"su\")\n#stdin,stdout,stderr = ssh.exec_command(\"ansible -m ping all\")\nname = stdin,stdout,stderr = ssh.exec_command(\"hostname\")\nfor hname in stdout.readlines():\n hostname = hname\n\nstdin,stdout,stderr = ssh.exec_command(\"df -h\")\nres = ''\nfor line in stdout.readlines():\n res += line\n #print(line.strip())\n\nssh.close()" }, { "alpha_fraction": 0.4652840495109558, "alphanum_fraction": 0.5374211072921753, "avg_line_length": 19.924528121948242, "blob_id": "152ff555d31467febe3612547edf0518408f1b5d", "content_id": "9695efd3bb9ea7bad42bdc392bdd6f6dd2107fa6", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 1109, "license_type": "no_license", "max_line_length": 58, "num_lines": 53, "path": "/test2.py", "repo_name": "oun1982/gitrepo", "src_encoding": "UTF-8", "text": "__author__ = 'oun1982'\n'''\nimport time\nstart_time = time.time()\nnumber = int(input(\"enter your number : \"))\nfor i in range(1,1000001):\n print(\"%d * %d = %d\" %(number,i,(number * i)))\nprint(\"---%s seconds ---\"%(time.time() - start_time))\n\ntup1 = (12,13,14,15.6)\ntup2 = ('Asterisk','linux',1982)\nprint (tup1)\nprint (tup2)\n\ndict1={'oun1982':'1','pongsakon':'2'}\nprint (dict1)\n'''\n\n'''\nvar = 100\nif var < 200:\n print (\"Expression value is less than 200\" )\n if var == 150:\n print (\"which is 150\")\n elif var == 100:\n print (\"which is 100\")\n elif var == 50:\n print (\"which is 50\")\nelif var < 50:\n print (\"Expression value is less than less than 50\" )\nelse:\n print (\"Could not find true expression\")\nprint ('Good Bye')\n\nstrr = 'Pongsakon Tongsook'\nfor name in strr:\n print (name)\n\nfor row in range(1,10):\n for col in range(1,10):\n prod = row * col\n if prod < 10:\n print (' ',end = '')\n print(row * col,' ',end =' ')\n print()\n'''\n\ncombs = []\nfor x in [1,2,3]:\n for y in [7,8,9]:\n if x != y:\n combs.append((x,y))\nprint(combs)\n" }, { "alpha_fraction": 0.8461538553237915, "alphanum_fraction": 0.8461538553237915, "avg_line_length": 25.5, "blob_id": "b280605b6e86f5199b3e165ab3d59d0652930527", "content_id": "da1c308720309c837eefea395014531e0c3ee81f", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Markdown", "length_bytes": 52, "license_type": "no_license", "max_line_length": 31, "num_lines": 2, "path": "/readme.md", "repo_name": "oun1982/gitrepo", "src_encoding": "UTF-8", "text": "Hello GIT From gitmac directory\nThis edit by gitrepo" }, { "alpha_fraction": 0.6503496766090393, "alphanum_fraction": 0.6806526780128479, "avg_line_length": 20.399999618530273, "blob_id": "e665e49f07fb3019d665fd29b2112771b0f8cffa", "content_id": "f98ab8cbf29c9c5eec00f57d78ea957b5da4e211", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 429, "license_type": "no_license", "max_line_length": 39, "num_lines": 20, "path": "/Server.py", "repo_name": "oun1982/gitrepo", "src_encoding": "UTF-8", "text": "__author__ = 'oun1982'\nfrom socket import *\nfrom time import ctime\n\nHOST = 'localhost'\nPORT = 5000\nBUFFER_SIZE = 1024\nADDRESS = (HOST,PORT)\n\nserver = socket(AF_INET,SOCK_STREAM)\nserver.bind(ADDRESS)\nserver.listen(5)\n\nwhile True:\n print('waiting for connection...')\n client, address = server.accept()\n print('Connection from : ',address)\n data_byte = str.encode(ctime())\n client.send(data_byte)\n client.close()\n\n" }, { "alpha_fraction": 0.707317054271698, "alphanum_fraction": 0.7398374080657959, "avg_line_length": 23.799999237060547, "blob_id": "6abb1b19c757c8b7ed68e6501a7ae9cce785b5f3", "content_id": "1f9749b2884d89bad1cc56d40dc77c8e22e1d158", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 123, "license_type": "no_license", "max_line_length": 42, "num_lines": 5, "path": "/GUI2.py", "repo_name": "oun1982/gitrepo", "src_encoding": "UTF-8", "text": "__author__ = 'oun1982'\nfrom tkinter import Label\nwidget = Label(None,text='GUI HelloWorld')\nwidget.pack()\nwidget.mainloop()" }, { "alpha_fraction": 0.5750331878662109, "alphanum_fraction": 0.5962815284729004, "avg_line_length": 27.923076629638672, "blob_id": "4bd4fe017465e1f5ef18fef1e19d32fd35e38326", "content_id": "8d00a2bc2d06fbf4651c9548fc94d30bc3367be0", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 753, "license_type": "no_license", "max_line_length": 65, "num_lines": 26, "path": "/Thread_Test.py", "repo_name": "oun1982/gitrepo", "src_encoding": "UTF-8", "text": "from threading import Thread\nimport time\n__author__ = 'oun1982'\nclass Thread_Test(Thread):\n def __init__(self,threadID,name,counter):\n Thread.__init__(self)\n self.threadID = threadID\n self.name = name\n self.counter = counter\n def printTime(self,threadName,delay,counter):\n while counter:\n time.sleep(delay)\n print(\"%s : %s\"%(threadName,time.ctime(time.time())))\n print(time.strftime(\"%Y-%m-%d %H:%M:%S\"))\n counter -=1\n def run(self):\n print(\"Start \"+self.name)\n self.printTime(self.name,self.counter,5)\n print(\"Exiting \"+self.name)\n\n\nthread1 = Thread_Test(1,\"Thread-1\",1)\nthread2 = Thread_Test(2,\"Thread-2\",2)\n\nthread1.start()\nthread2.start()\n\n" }, { "alpha_fraction": 0.6034985184669495, "alphanum_fraction": 0.6413994431495667, "avg_line_length": 26.399999618530273, "blob_id": "28a127c698c5726d24ede2b19f3ab58e829d4efb", "content_id": "7d3c88d1dc0083167fa37d7950c00463012773c1", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 686, "license_type": "no_license", "max_line_length": 56, "num_lines": 25, "path": "/Employee.py", "repo_name": "oun1982/gitrepo", "src_encoding": "UTF-8", "text": "__author__ = 'oun1982'\nclass Employee:\n empCount = 0\n\n def __init__(self,name,salary):\n self.name = name\n self.salary = salary\n Employee.empCount += 1\n\n def displayCount(self):\n print(\"Total Employee = %d\" %Employee.empCount)\n\n def displayEmployee(self):\n print(\"Name :\",self.name,\",salary:\",self.salary)\n\nemp1 = Employee(\"Oun1982\",30000)\nemp2 = Employee(\"Jang\",40000)\nemp1.displayEmployee()\nemp2.displayEmployee()\nemp1.displayCount()\nprint(emp1.name)\nprint(\"Employee.__name__:\",Employee.__name__)\nprint(\"Employee.__module__:\",Employee.__module__)\nprint(\"Employee.__doc__:\",Employee.__doc__)\nprint(\"Employee.__dict__:\",Employee.__dict__)\n\n" }, { "alpha_fraction": 0.4946666657924652, "alphanum_fraction": 0.5406666398048401, "avg_line_length": 32.33333206176758, "blob_id": "215bee7f2445c2dfcaee814f699fe1f1e8dbdf1d", "content_id": "dc4c4efdb3ba1ca4151c057a4d6bbd97ae7bf5ca", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 1500, "license_type": "no_license", "max_line_length": 129, "num_lines": 45, "path": "/newmon.py", "repo_name": "oun1982/gitrepo", "src_encoding": "UTF-8", "text": "import paramiko, socket\nfrom fpdf import FPDF\nclass Mainmon:\n def __init__(self, host):\n ip = host\n res = ' '\n ssh = paramiko.SSHClient()\n ssh.set_missing_host_key_policy(paramiko.AutoAddPolicy())\n try:\n ssh.connect(ip, username='root', password='osd_SIP4321!')\n except paramiko.SSHException:\n print(\"Connection Failed\")\n quit()\n\n name = stdin,stdout,stderr = ssh.exec_command(\"hostname\")\n for hname in stdout.readlines():\n hostname = hname\n\n stdin,stdout,stderr = ssh.exec_command(\"df -h\")\n\n for line in stdout.readlines():\n res += line\n ssh.close()\n self.showReport(res, hostname)\n\n def showReport(self, res, hostname):\n df = res.split(' ')\n list1 = [e for e in df if e]\n df_res = ('Name: Total %s | Avaliable %s | Used %s | Used in Percent %s'%(list1[7] ,list1[8] ,list1[9], list1[10]))\n symbol = '-' * 200\n pcent = int((list1[10]).replace('%',''))\n\n pdf = FPDF()\n pdf.add_page()\n pdf.set_font(\"Arial\", size=12)\n if pcent > 90:\n pdf.set_text_color(254, 0, 0)\n pdf.cell(100, 10, txt=symbol, ln=1, align=\"C\")\n pdf.cell(200, 10, txt=hostname, ln=1, align=\"C\")\n pdf.cell(120, 10, txt=df_res, ln=1, align=\"C\")\n pdf.cell(100, 10, txt=symbol, ln=1, align=\"C\")\n pdf.output(\"df.pdf\")\n\nmon = Mainmon('192.168.1.205')\nmon = Mainmon('192.168.1.220')\n" }, { "alpha_fraction": 0.534426212310791, "alphanum_fraction": 0.6262295246124268, "avg_line_length": 16.91176414489746, "blob_id": "933981040dbbe769b0149caf51a55af31fa11894", "content_id": "8ac0471de997a96309ba3200b0e8ca69660e3e34", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 610, "license_type": "no_license", "max_line_length": 45, "num_lines": 34, "path": "/01.py", "repo_name": "oun1982/gitrepo", "src_encoding": "UTF-8", "text": "__author__ = 'oun1982'\n\nlst1 = [0,1,2,3,4]\nlst2 = [5,6,7,8,9]\nstr1 = \"Python language\"\nprint(\"Hello %s \"%str1)\nprint(\"%0.6s\"%str1)\nprint(\"str1[0:6] =\",str1[0:6])\nprint(lst1[3])\nprint(lst1[-2])\nprint(type(lst1))\nprint(type(str1))\nprint(len(lst1))\nprint(lst1+lst2)\nprint(lst1*3)\nprint(3 in lst1)\nfor x in lst1:\n print(x)\n\nsetCar = {\"toyota\",\"honda\",\"mazda\",\"benz\"}\nprint(setCar)\nprint('toyota' in setCar)\nprint(10**5)\n\nvar1 = 100\nif var1:\n print(\"1 - Got a true expression value \")\n print(var1)\nvar2 = 0\nif var2:\n print(\"1 - Got a true expression value \")\n print(var2)\n pass\nprint(\"Good Bye\")\n\n" }, { "alpha_fraction": 0.679186224937439, "alphanum_fraction": 0.7261345982551575, "avg_line_length": 31, "blob_id": "925beb63f6dac4f0fb2d2b2a0ca04a2bce9510db", "content_id": "9c5f57f92dcfb00c6a48a8cd1782fdce9af064da", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 639, "license_type": "no_license", "max_line_length": 74, "num_lines": 20, "path": "/CalArea.py", "repo_name": "oun1982/gitrepo", "src_encoding": "UTF-8", "text": "__author__ = 'oun1982'\n'''\nimport CalAreaRectangle\nprint(\"Area of Recangle : \", CalAreaRectangle.rectangle(3, 4))\nprint(\"Area of Squre :\", CalAreaRectangle.squre(10, 10))\nprint(\"Area of Parallelogram :\", CalAreaRectangle.parallelogram(1.5, 4.5))\nprint(\"Area of Trapzoid :\",CalAreaRectangle.trapezoid(5, 2.5))\n'''\nimport sys\n\nfrom CalArea import CalAreaRectangle\nfrom CalArea.CalAreaRectangle import *\n\nprint(\"Area of Recangle : \", rectangle(3, 4))\nprint(\"Area of Squre :\", squre(10, 10))\nprint(\"Area of Parallelogram :\", parallelogram(1.5, 4.5))\nprint(\"Area of Trapzoid :\", trapezoid(5, 2.5))\nprint (dir(CalAreaRectangle))\n\nprint(sys.path)" }, { "alpha_fraction": 0.4680851101875305, "alphanum_fraction": 0.5957446694374084, "avg_line_length": 22.5, "blob_id": "289ac32fdd0a564628d4e09d8d3a0d8a4d3a7403", "content_id": "c6eed999c56a9e9a6eb19d26ec36d17db966f102", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 47, "license_type": "no_license", "max_line_length": 23, "num_lines": 2, "path": "/hello.py", "repo_name": "oun1982/gitrepo", "src_encoding": "UTF-8", "text": "__author__ = 'oun1982'\nprint (sum(range(1,3)))\n" }, { "alpha_fraction": 0.44690266251564026, "alphanum_fraction": 0.4690265357494354, "avg_line_length": 16.461538314819336, "blob_id": "789322b6d669c1ddbaf1546a302ec7dc33ca0478", "content_id": "d8c4ccc4eac404300808f8b19370207d99b6cf5c", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 226, "license_type": "no_license", "max_line_length": 43, "num_lines": 13, "path": "/pywebapp/webapp.py", "repo_name": "oun1982/gitrepo", "src_encoding": "UTF-8", "text": "from flask import Flask\n\napp = Flask('beagle')\n\[email protected]('/')\ndef hello():\n return '<center>' \\\n '<h1 style=\"font-size:100px\">' \\\n 'Hello World' \\\n '</center>' \\\n '</h1>'\n\napp.run()" }, { "alpha_fraction": 0.3913043439388275, "alphanum_fraction": 0.5652173757553101, "avg_line_length": 22, "blob_id": "0384a3a20f9aebb2e7c2efc305f83cee366ea3f5", "content_id": "46e0b04464a7616f5e077a75113ea4d7abd31974", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 23, "license_type": "no_license", "max_line_length": 22, "num_lines": 1, "path": "/CalArea/__init__.py", "repo_name": "oun1982/gitrepo", "src_encoding": "UTF-8", "text": "__author__ = 'oun1982'\n" }, { "alpha_fraction": 0.6964656710624695, "alphanum_fraction": 0.7297297120094299, "avg_line_length": 20.909090042114258, "blob_id": "1c7b7104c52c2dbfaa5edba5467c8c13af57f725", "content_id": "b96baf58851ecde291bb65fc891e24e04d6e1c39", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 481, "license_type": "no_license", "max_line_length": 41, "num_lines": 22, "path": "/CalArea/CalAreaRectangle.py", "repo_name": "oun1982/gitrepo", "src_encoding": "UTF-8", "text": "__author__ = 'oun1982'\n\ndef rectangle (width,height):\n return width * height\n\ndef squre(width1,width2):\n return width1 * width2\n\ndef parallelogram(height,base):\n return height * base\n\ndef trapezoid(sumifpararell,height):\n return 0.5 * sumifpararell * height\n\ndef rhomboid(mulofdiagonal):\n return 0.5 * mulofdiagonal\n\ndef RectangularKite(mulofdiagonal):\n return 0.5 * mulofdiagonal\n\ndef AnyRectangular(diagonal,sumofbranch):\n return 0.5 * diagonal * sumofbranch" }, { "alpha_fraction": 0.5554770231246948, "alphanum_fraction": 0.5901060104370117, "avg_line_length": 33.10843276977539, "blob_id": "f58b8d46450ac44612f7f19d77fb0f557fb3815a", "content_id": "6077c6e1f81b3e863a072b25faca4062d932bf4d", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 2830, "license_type": "no_license", "max_line_length": 127, "num_lines": 83, "path": "/QT_login.py", "repo_name": "oun1982/gitrepo", "src_encoding": "UTF-8", "text": "import sys, socket\nfrom PyQt5.QtWidgets import QApplication, QWidget, \\\n QMainWindow,QPushButton, QMessageBox, QLineEdit, QLabel, QComboBox\nfrom PyQt5.QtCore import pyqtSlot\nfrom PyQt5.QtGui import QIcon\nfrom gitrepo.QT_main import *\nfrom PyQt5.QtCore import QSize, QRect\n\nclass App(QMainWindow):\n def __init__(self):\n super().__init__()\n self.title = 'Asterisk DialPlan'\n self.left = 500\n self.top = 200\n self.setFixedSize(400, 300)\n self.width = 400\n self.height = 300\n self.initUI()\n self.setMinimumSize(QSize(640, 140))\n\n def initUI(self):\n hostname = socket.gethostname()\n ip = socket.gethostbyname(hostname)\n self.setWindowTitle(self.title)\n self.setGeometry(self.left, self.top, self.width, self.height)\n self.statusBar().showMessage(ip)\n\n self.textbox_user = QLineEdit(self)\n self.textbox_user.move(100, 60)\n self.textbox_user.resize(200, 25)\n self.textbox_pass = QLineEdit(self)\n self.textbox_pass.setEchoMode(QLineEdit.Password)\n self.textbox_pass.move(100, 90)\n self.textbox_pass.resize(200, 25)\n self.textbox_host = QLineEdit(self)\n self.textbox_host.move(100, 120)\n self.textbox_host.resize(200, 25)\n\n lb_user = QLabel('User: ',self)\n lb_user.move(60,60)\n lb_pass = QLabel('Password: ', self)\n lb_pass.move(37, 90)\n lb_host = QLabel('Host: ', self)\n lb_host.move(61, 120)\n lb_eg = QLabel('e.g. 192.168.1.1 ', self)\n lb_eg.move(100, 150)\n\n submit = QPushButton('Submit', self)\n submit.setToolTip('submit button')\n submit.move(100, 180)\n submit.clicked.connect(self.on_click_submit)\n reset = QPushButton('Reset', self)\n reset.setToolTip('Clear data in dialog')\n reset.move(200, 180)\n reset.clicked.connect(self.on_click_reset)\n self.show()\n\n @pyqtSlot()\n\n def on_click_reset(self):\n self.textbox_user.setText(\"\")\n self.textbox_pass.setText(\"\")\n self.textbox_host.setText(\"\")\n\n def on_click_submit(self):\n if self.textbox_user.text() == 'admin' and self.textbox_pass.text() == 'admin' and self.textbox_host.text() != \"\":\n textboxOK = self.textbox_user.text()\n QMessageBox.question(self, 'Asterisk Dialplan',' Login Success '\n , QMessageBox.Ok,QMessageBox.Cancel)\n self.textbox_user.setText(\"\")\n self.hide()\n main = AppMain()\n main.show()\n\n else:\n QMessageBox.warning(self, 'Login Error ', ' Invalid Value !!!!! '\n ,QMessageBox.Ok)\n\n\nif __name__ == '__main__':\n app = QApplication(sys.argv)\n ex = App()\n sys.exit(app.exec_())" }, { "alpha_fraction": 0.709039568901062, "alphanum_fraction": 0.7203390002250671, "avg_line_length": 24.35714340209961, "blob_id": "65cf5a977473c2dcc270ffd0b2841612e1f71fe2", "content_id": "5d987c1b46d6de65f0ffd5bc5d8d5345f674864b", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 354, "license_type": "no_license", "max_line_length": 90, "num_lines": 14, "path": "/CalArea/Db.py", "repo_name": "oun1982/gitrepo", "src_encoding": "UTF-8", "text": "__author__ = 'oun1982'\n\nimport mysql.connector\n\n\nconn = mysql.connector.connect(user='root',password='',host='localhost',database='beagle')\nmycursor = conn.cursor()\nmycursor.execute(\"SELECT * FROM agents\")\nrow = mycursor.fetchone()\nwhile row is not None:\n print(row)\n row = mycursor.fetchone()\nprint(\"Number of Rows :\",mycursor.rowcount)\nconn.close" }, { "alpha_fraction": 0.5948936343193054, "alphanum_fraction": 0.6434042453765869, "avg_line_length": 27.634145736694336, "blob_id": "408b5dbe498b17631a190e8d5a8c3bebdace2a58", "content_id": "87b519a8b0ec7361af3ed56017235c5604c58f49", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 1175, "license_type": "no_license", "max_line_length": 78, "num_lines": 41, "path": "/paramiko2.py", "repo_name": "oun1982/gitrepo", "src_encoding": "UTF-8", "text": "import paramiko\nimport socket\nfrom fpdf import FPDF\n\nssh = paramiko.SSHClient()\nssh.set_missing_host_key_policy(paramiko.AutoAddPolicy())\ntry:\n ssh.connect('192.168.1.205', username='root', password='osd_SIP4321!')\nexcept paramiko.SSHException:\n print(\"Connection Failed\")\n quit()\n#ssh.exec_command(\"su\")\n#stdin,stdout,stderr = ssh.exec_command(\"ansible -m ping all\")\nname = stdin,stdout,stderr = ssh.exec_command(\"hostname\")\nfor hname in stdout.readlines():\n hostname = hname\n\nstdin,stdout,stderr = ssh.exec_command(\"df -h\")\nres = ''\nfor line in stdout.readlines():\n res += line\n #print(line.strip())\n\nssh.close()\n\ndf = res.split(' ')\nlist1 = [e for e in df if e]\nprint(list1[10])\n\ndf_res = ('Name: Total %s | Avariable %s | Used %s | Used in Percent %s'\n %(list1[7] ,list1[8] ,list1[9], list1[10]))\nsymbol = '-' * 200\npdf = FPDF()\npdf.add_page()\npdf.set_font(\"Arial\", size=12)\npdf.set_text_color(254,0,0)\npdf.cell(100, 10, txt=symbol, ln=1, align=\"C\")\npdf.cell(200, 10, txt=hostname, ln=1, align=\"C\")\npdf.cell(120, 10, txt=df_res, ln=1, align=\"C\")\npdf.cell(100, 10, txt=symbol, ln=1, align=\"C\")\npdf.output(\"df.pdf\")\n\n" }, { "alpha_fraction": 0.5559566617012024, "alphanum_fraction": 0.5740072131156921, "avg_line_length": 17.33333396911621, "blob_id": "1d58b5d8b9eeb4c1fa15ac59b3bad4a626b37e88", "content_id": "bc83b8ad6944f1536cbe2efeac73b6c08c249b2f", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 277, "license_type": "no_license", "max_line_length": 30, "num_lines": 15, "path": "/TestTry.py", "repo_name": "oun1982/gitrepo", "src_encoding": "UTF-8", "text": "__author__ = 'oun1982'\nimport sys\ntry:\n f = open('num.txt')\n s = f.read()\n i = int(s.strip())\n print(i)\nexcept IOError:\n print(\"IO Error\")\nexcept ValueError:\n print(\"N0 valid interger\")\nexcept:\n print(\"Unexcepted error\")\n#try:\n# fh = open(\"test\",\"w\")\n\n\n" }, { "alpha_fraction": 0.487574964761734, "alphanum_fraction": 0.5029991269111633, "avg_line_length": 45.68000030517578, "blob_id": "931a90d3fb5d607bb4de5956454e1f7ef101fcb0", "content_id": "3fa70c7bcaba262b36f720093e45ae65d48b907c", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 1167, "license_type": "no_license", "max_line_length": 117, "num_lines": 25, "path": "/ping.py", "repo_name": "oun1982/gitrepo", "src_encoding": "UTF-8", "text": "import time\nimport subprocess\nlan_host = \"192.168.66.254\"\nwan_host = \"google.co.th\"\n#ping = call([\"host\",shell=True])\n#ping = call([\"ping\", host , \"-c\", \"2\"])\ndef ping_func(host):\n datetime = time.strftime(\"-------------------------- %Y%m%d_%H%M --------------------------\")\n #date_out = subprocess.Popen([\"date\"], stdout=subprocess.PIPE).communicate()\n #res_out = str(date_out)\n #ping_out = subprocess.Popen([\"/bin/ping\", host , \"-c\", \"3\"], stdout=subprocess.PIPE).communicate()\n ping_out = subprocess.Popen(\"/bin/ping \" + host + \" -c 15\", shell=True, stdout=subprocess.PIPE).communicate()\n res_ping = str(ping_out).split(\"\\\\n\")\n file_op = open(\"ping.log\", \"a\")\n file_op.write(datetime + \"\\n\")\n file_op.write(\" PING \" + host + \"\\n\")\n #file_op.write(res_ping[1] + \"\\n\")\n #file_op.write(res_ping[2] + \"\\n\")\n #file_op.write(res_ping[3] + \"\\n\")\n for i in range(len(res_ping)):\n file_op.write(res_ping[i] + \"\\n\")\n file_op.write(\"########################### end ping #######################\\n\\n\")\n\nping_func(lan_host)\nping_func(wan_host)\n" }, { "alpha_fraction": 0.692307710647583, "alphanum_fraction": 0.7342657446861267, "avg_line_length": 19.428571701049805, "blob_id": "ad1f6c700cda31d117d99c787ebca59d08071d52", "content_id": "14aaff681669489ca2680000903e5f42094bc1f3", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 286, "license_type": "no_license", "max_line_length": 36, "num_lines": 14, "path": "/Client.py", "repo_name": "oun1982/gitrepo", "src_encoding": "UTF-8", "text": "__author__ = 'oun1982'\nfrom socket import *\n\nHOST = 'localhost'\nPORT = 5000\nBUFFER_SIZE = 1024\nADDRESS = (HOST,PORT)\n\nserver = socket(AF_INET,SOCK_STREAM)\nserver.connect(ADDRESS)\ndata_byte = server.recv(BUFFER_SIZE)\ndayAndTime = bytes.decode(data_byte)\nprint(dayAndTime)\nserver.close()\n" }, { "alpha_fraction": 0.5381165742874146, "alphanum_fraction": 0.5762332081794739, "avg_line_length": 23.83333396911621, "blob_id": "62b67e46a94b4b6380c61ba22ba4c0119100e576", "content_id": "361c18583adc9b2a17451e0b2d007c371dafa040", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 446, "license_type": "no_license", "max_line_length": 72, "num_lines": 18, "path": "/02.py", "repo_name": "oun1982/gitrepo", "src_encoding": "UTF-8", "text": "__author__ = 'oun1982'\nname = input(\"Enter goods name : \")\nprice = float(input(\"Enter price of %s : \"%name))\nprint(price)\nif price >= 500:\n temp = price * 0.03\n price = price - temp\nelse:\n VAT = price * 0.07\n price = price + VAT\nprint(\"The price of %s (inc VAT 7%%) is %.2f %s :\"%(name,price,\"Bath.\"))\n\nnum = int(input(\"Put your number : \"))\nif num % 2 != 0:\n print(num,\"is Odd.\")\nelse:\n print(num,\"is Even\")\nprint(\"Good Bye!\")" }, { "alpha_fraction": 0.5849056839942932, "alphanum_fraction": 0.6037735939025879, "avg_line_length": 27.909090042114258, "blob_id": "6151cbf7c166b671732723c439bff70d79d5a60b", "content_id": "9682cfc09569b86fabdf158a2814a0258c5957c5", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 1272, "license_type": "no_license", "max_line_length": 77, "num_lines": 44, "path": "/Car.py", "repo_name": "oun1982/gitrepo", "src_encoding": "UTF-8", "text": "__author__ = 'oun1982'\nclass Car:\n #Attricutes\n color = \"No brand yet\"\n brand = \"No brand yet\"\n number_of_seats = 4\n number_of_wheels = 4\n maxSpeed = 0\n regis_number = 0\n\n def __init__(self,color,brand,number_of_seats,number_of_wheels,maxSpeed):\n self.color = color\n self.brand = brand\n self.number_of_seats = number_of_seats\n self.number_of_wheels = number_of_wheels\n self.maxSpeed = maxSpeed\n #self.regis_number +=1\n Car.regis_number +=1\n def serColor(self,x):\n self.color = x\n\n def setBrand(self,x):\n self.brand = x\n\n def setNumberOfSeates(self,x):\n self.number_of_seats = x\n\n def setNumberOfWheels(self,x):\n self.number_of_wheels = x\n\n def setMaxSpeed(self,x):\n self.maxSpeed = x\n\n def printData(self):\n print(\"The color of car is : \",self.color)\n print(\"The car brand : \",self.brand)\n print(\"The number of seats :\",self.number_of_seats)\n print(\"The number of Wheels :\",self.number_of_wheels)\n print(\"The maximum speed is :\",self.maxSpeed,\"km/h\")\n print(\"Register number : \",self.regis_number)\ncar1 = Car('blue','ToYoTa',4,4,150)\ncar1.printData()\ncar2 = Car('green','Honda',4,4,120)\ncar2.printData()\n" }, { "alpha_fraction": 0.7142857313156128, "alphanum_fraction": 0.7142857313156128, "avg_line_length": 14, "blob_id": "808ee5d6ee041d5c04447cf9b9b109b49655f065", "content_id": "7328ea344db100b4414fe8bd5a5fcb1176209d9d", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 14, "license_type": "no_license", "max_line_length": 14, "num_lines": 1, "path": "/test_git.py", "repo_name": "oun1982/gitrepo", "src_encoding": "UTF-8", "text": "print('Cicso')" }, { "alpha_fraction": 0.6035182476043701, "alphanum_fraction": 0.617050051689148, "avg_line_length": 16.619047164916992, "blob_id": "26259309ad7f707f64fa084a05eb897b67be3812", "content_id": "eb348937d6d926847f31156d130028aa72fe1cb2", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 739, "license_type": "no_license", "max_line_length": 39, "num_lines": 42, "path": "/Book.py", "repo_name": "oun1982/gitrepo", "src_encoding": "UTF-8", "text": "__author__ = 'oun1982'\nclass Book:\n bookCount = 0\n def __init__(self):\n Book.bookCount += 1\n\n def setBookName(self,name):\n self.name = name\n\n def getBookName(self):\n print(\"Book Name : \",self.name)\nclass OtherBook(Book):\n def showYear(self):\n print(\"This Year 2016\")\nclass Apple:\n def printApple(self):\n print(\"Apple\")\n\nobj = OtherBook()\nobj.setBookName(\"Asterisk\")\nobj.getBookName()\nobj.showYear()\n\nif issubclass(OtherBook,Book):\n print(\"True\")\nelse:\n print(\"False\")\n\nif isinstance(obj,OtherBook):\n print(\"True\")\nelse:\n print(\"False\")\n\nif isinstance(obj,Book):\n print(\"true\")\nelse:\n print(\"False\")\n\nif isinstance(obj,Apple):\n print(\"True\")\nelse:\n print(\"False\")" }, { "alpha_fraction": 0.7053139805793762, "alphanum_fraction": 0.7246376872062683, "avg_line_length": 33.66666793823242, "blob_id": "0bdb6f2a0a36f3255eaab305dfd798d8426cb040", "content_id": "9ea3956d3ebe2a9f9a427af5519cb7464db7efe7", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 207, "license_type": "no_license", "max_line_length": 72, "num_lines": 6, "path": "/ma/ma-dcall.py", "repo_name": "oun1982/gitrepo", "src_encoding": "UTF-8", "text": "from docxtpl import DocxTemplate\n\ndoc =DocxTemplate(\"dcall-ma-server.docx\")\nname = {'name': 'Trade4145', 'model': 'Dell PowerEdge', 'sn': 'ABCDEFG'}\ndoc.render(name)\ndoc.save('dcall-ma-server-complete.docx')" }, { "alpha_fraction": 0.5792163610458374, "alphanum_fraction": 0.5945485234260559, "avg_line_length": 25.590909957885742, "blob_id": "a5af38018e4a312a630b872bdc08cc2efb689846", "content_id": "d683f86e883f7da532ee0e2bb20a3052fc37569e", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 587, "license_type": "no_license", "max_line_length": 82, "num_lines": 22, "path": "/06.py", "repo_name": "oun1982/gitrepo", "src_encoding": "UTF-8", "text": "__author__ = 'oun1982'\nclass Car:\n color = \"No brand yet\"\n brand = \"No brand yet\"\n number_of_seats = 4\n number_of_wheels = 4\n maxSpeed = 0\n registration_number = 0\n\n def __init__(self, color, brand, number_of_seats, number_of_wheels, maxSpeed):\n self.color = color\n self.brand = brand\n self.number_of_seats = number_of_seats\n self.number_of_wheels = number_of_wheels\n self.maxSpeed = maxSpeed\n self.registration_number += 1\n\n def setColor(self, x):\n self.color = x\n\n def setBrand(self, x):\n self.color = x\n\n\n" }, { "alpha_fraction": 0.4819079041481018, "alphanum_fraction": 0.5263158082962036, "avg_line_length": 17.42424201965332, "blob_id": "0832a5a7b82e473180cbfe30d2779d7d9084bf21", "content_id": "4cce17327f4d3a557cdf661b26a99f32345f59b7", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 608, "license_type": "no_license", "max_line_length": 43, "num_lines": 33, "path": "/03.py", "repo_name": "oun1982/gitrepo", "src_encoding": "UTF-8", "text": "__author__ = 'oun1982'\nscore = float(input(\"Enter your score : \"))\nmsg = \"Your score is : \"\nif score >= 80:\n print(msg + \"A\")\nelif score >= 75:\n print(msg + \"B+\")\nelif score >= 70:\n print(msg + \"B\" )\nelif score >= 60:\n print(msg + \"C+\")\nelif score >= 55:\n print(msg + \"C\" )\nelif score >= 50:\n print(msg + \"D+\")\nelse:\n print(msg + \"F\")\nprint(\"GoodBye\")\n\ncount = 0\nwhile (count <= 9):\n print(\"The count is : \",count)\n count += 1\nprint(\"Good Luck\")\n\nfor i in range(5,10):\n print(\"i = \",i)\n\nfor i in range(100):\n print(\"i = \",i)\n\nfor k in range(20):\n print(\"k =\",k,end = ' ')\n" }, { "alpha_fraction": 0.6194690465927124, "alphanum_fraction": 0.6991150379180908, "avg_line_length": 21.600000381469727, "blob_id": "7f718553e8555391b7c805c33d63c078bd22eb54", "content_id": "770297f1e9ced915979e6bb6892f6b5e4a85713c", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 113, "license_type": "no_license", "max_line_length": 30, "num_lines": 5, "path": "/Calen.py", "repo_name": "oun1982/gitrepo", "src_encoding": "UTF-8", "text": "__author__ = 'oun1982'\nimport calendar\ncalen = calendar.month(2016,4)\nprint(\"Here is Calendar : \")\nprint (calen)\n" }, { "alpha_fraction": 0.5405405163764954, "alphanum_fraction": 0.5675675868988037, "avg_line_length": 20.14285659790039, "blob_id": "91015339d5385a43b17af97fd72bce9f241c9932", "content_id": "9959375b6ca3da6b3426232b556cfb6528d7e5b3", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 148, "license_type": "no_license", "max_line_length": 40, "num_lines": 7, "path": "/RegEx.py", "repo_name": "oun1982/gitrepo", "src_encoding": "UTF-8", "text": "__author__ = 'oun1982'\nimport re\nif re.search(\"cat\",\"A cat is eat fish\"):\n print(\"Have a Cat\")\nelse:\n print(\"Haven't Cat\")\nprint(r\"\\\\\\\\!@_!\")\n" }, { "alpha_fraction": 0.5309168696403503, "alphanum_fraction": 0.5543709993362427, "avg_line_length": 22.450000762939453, "blob_id": "5c9278a2c28624ab5fe999c009c51e0e935b44e7", "content_id": "b9a9de0a6319cd7b74c633bb1e02ff7e1f82699d", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 469, "license_type": "no_license", "max_line_length": 63, "num_lines": 20, "path": "/func_1.py", "repo_name": "oun1982/gitrepo", "src_encoding": "UTF-8", "text": "__author__ = 'oun1982'\n\nfrom CalArea import CalAreaRectangle\n\n\ndef printMe(str):\n \"This prints a passed string into this function\"\n print (str)\n return\nprint ('----------------------')\nprintMe(\"My Name is pongsakon\")\nprint ('----------------------')\n\ndef areaShow(width,heigth):\n return width * heigth\nprint ('----------------------')\nprint (areaShow(12,10))\nprint ('----------------------')\n\nprint (\"Area of Rectangle :\", CalAreaRectangle.rectangle(20,6))\n" }, { "alpha_fraction": 0.6145510673522949, "alphanum_fraction": 0.6207430362701416, "avg_line_length": 24.760000228881836, "blob_id": "cb2aee32edfaf7cc00c8d4fa18ff4f6b6684d16f", "content_id": "50e848b0f764146b0210a22684270818babc11b0", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 646, "license_type": "no_license", "max_line_length": 61, "num_lines": 25, "path": "/TryExcept.py", "repo_name": "oun1982/gitrepo", "src_encoding": "UTF-8", "text": "__author__ = 'oun1982'\n'''\ntry:\n fh = open(\"myfile\", \"w\")\n fh.write(\"This is my file exception handling!!!\")\nexcept IOError:\n print(\"Error : Can\\'t find file ore read data\")\nelse:\n print(\"Write content in the file successfully\")\n fh.close()\n\nwhile True:\n try:\n n = int(input(\"Please enter an interger :\"))\n break\n except ValueError:\n print(\"No valid value! Please try again...\")\nprint(\"Great ,you sucessfully enter an integer!\")\n'''\ndef temp_convert(var):\n try:\n return int(var)\n except ValueError as Args:\n print(\"Argument doesn't contain number\\n\", Args.args)\ntemp_convert(\"xyz\")\n\n\n" }, { "alpha_fraction": 0.6508525013923645, "alphanum_fraction": 0.6538176536560059, "avg_line_length": 26.5510196685791, "blob_id": "fe5074dca7208d62af8f4dd33ba1914fcffcab13", "content_id": "afffad483f0356bec8d7a09288c7a3db6d1398ee", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 1349, "license_type": "no_license", "max_line_length": 88, "num_lines": 49, "path": "/openCD.py", "repo_name": "oun1982/gitrepo", "src_encoding": "UTF-8", "text": "# coding: UTF-8\nimport platform\nimport os\nimport ctypes\n\n# windows\nif platform.system() == 'Windows':\n\t# need 'u' before \"\", if you are using UTF-8. if not you don't need to put it.\n\tctypes.windll.WINMM.mciSendStringW(u\"set cdaudio door open\", None, 0, None)\n\t#ctypes.windll.WINMM.mciSendStringW(u\"set cdaudio door closed\", None, 0, None)\n\tprint(\"Windows worked\\n\")\n# OSX\nelif platform.system() == 'Darwin':\n\tos.system(\"drutil tray open\")\n\t#os.system(\"drutil tray closed\")\n\tprint(\"Darwin worked\\n\")\n\n# Linux\nelif platform.system() == 'Linux':\n\tos.system(\"eject cdrom\")\n\t#os.system(\"eject -t cdrom\")\n\tprint(\"Linux worked\\n\")\n\n# NetBSD\n# thank you to the man who adviced me how to eject on NetBSD\nelif platform.system() == 'NetBSD':\n\t#you must be su\n\tos.system(\"eject cd\")\n\tprint(\"NetBSD worked\\n\")\n\n\n#######################################################\n# Operation under this comment has not been confirmed #\n#######################################################\n\n# FreeBSD\nelif platform.system() == 'FreeBSD':\n#you can use cdcontrol without typing su password. but do it on your own responsibility.\n#visudo /usr/local/etc/sudoers\n#username ALL=(ALL) NOPASSWD: /usr/sbin/cdcontrol\n\tos.system(\"sudo cdcontrol eject\")\n\t#os.system(\"sudo cdcontrol close\")\n\tprint(\"FreeBSD worked\\n\")\n\n\nelse:\n\tprint(\"OS Unsupported\\n\")\n# if needed\n#\tprint \"UIIIIIIIN\"" }, { "alpha_fraction": 0.6091954112052917, "alphanum_fraction": 0.647988498210907, "avg_line_length": 25.653846740722656, "blob_id": "607e5bb44a26057d212e2a896d6750272b21bab2", "content_id": "f386b0512a70998df4e074095a4279624465af6a", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 696, "license_type": "no_license", "max_line_length": 136, "num_lines": 26, "path": "/Gui.py", "repo_name": "oun1982/gitrepo", "src_encoding": "UTF-8", "text": "__author__ = 'oun1982'\n\nimport tkinter\nfrom tkinter import *\nroot = tkinter.Tk()\n'''\nfor r in range(5):\n for c in range(5):\n tkinter.Label(root,text='Row[%s]/Col[%s]'%(r,c),borderwidth = 5)\\\n\n .grid(row=r,column=c)\n'''\nCheckVar1 = StringVar()\nCheckVar2 = IntVar()\n\ndef checkCallBack():\n C1.select()\n C2.toggle()\n print(CheckVar1.get())\n print(CheckVar2.get())\n\nC1 = Checkbutton(root,text = \"Music\",variable = CheckVar1,onvalue = \"on\",offvalue = \"off\",height = 5,width = 20,command = checkCallBack)\nC2 = Checkbutton(root,text = \"Video\",variable = CheckVar2,onvalue = 1,offvalue = 0,height = 5,width = 20,command = checkCallBack)\nC1.pack()\nC2.pack()\nroot.mainloop()\n\n\n\n" }, { "alpha_fraction": 0.6538461446762085, "alphanum_fraction": 0.692307710647583, "avg_line_length": 22.22222137451172, "blob_id": "ed81c8feb1abf01081d968d326786e152e1cf222", "content_id": "e2f2374a879ed4fee08f76322b34e36113f603ef", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 208, "license_type": "no_license", "max_line_length": 55, "num_lines": 9, "path": "/TestCal.py", "repo_name": "oun1982/gitrepo", "src_encoding": "UTF-8", "text": "__author__ = 'oun1982'\n'''\nimport CalArea\nprint(\"Area of squre :\",CalArea.squre(5, 5))\n'''\n'''\nfrom CalArea import CalAreaRectangle, CalAreaTriangle\nprint(\"Area of square :\", CalAreaRectangle.squre(4, 4))\n'''" }, { "alpha_fraction": 0.6131687164306641, "alphanum_fraction": 0.6351165771484375, "avg_line_length": 24.928571701049805, "blob_id": "2d706b3d86d2ff9a68cd9d15d33d0858666c5c3a", "content_id": "fdfcc7f119ed3a6e37f29ae4b979c2c6b0f2f208", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 729, "license_type": "no_license", "max_line_length": 46, "num_lines": 28, "path": "/telnet_mikrotik.py", "repo_name": "oun1982/gitrepo", "src_encoding": "UTF-8", "text": "import getpass\nimport sys\nimport telnetlib\n\nbot = telnetlib.Telnet(\"192.168.1.244\", 23)\nuser = b'osdadmin'# user to login\npassword = b'internet@osd' #password to login\nbot.read_until(b'Username:')\nbot.write(user + b'\\r\\n')\nbot.read_until(b'Password')\nbot.write(password + b'\\r\\n')\n#bot.write(('enable').encode('ascii'))\n#bot.write(('OSD@R-SL5').encode('ascii'))\n#bot.write((\"\\r\\n\").encode('ascii'))\nbot.write(b'terminal length 0' + b'\\r\\n')\nbot.write(b'sh ver | inc IOS | mem' + b'\\r\\n')\nbot.write(b'sh ip int br' + b'\\r\\n')\nbot.write(b'exit' + b'\\r\\n')\nres = bot.read_all()\nstr_res = (str(res))\nsp_res = (str_res.split('\\\\r\\\\n'))\n#print(sp_res)\nfor i in sp_res:\n print(i)\n\n#print(sp_res)\n#print(sp_res[3])\n#print(sp_res[4])\n\n\n\n" }, { "alpha_fraction": 0.5715277791023254, "alphanum_fraction": 0.5868055820465088, "avg_line_length": 33.30952453613281, "blob_id": "a3e6996e8b2690bf2967f2639a0b61ec48a33313", "content_id": "7b592e18623531652de0278c542836daf9ba12a9", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 1440, "license_type": "no_license", "max_line_length": 127, "num_lines": 42, "path": "/QT_main.py", "repo_name": "oun1982/gitrepo", "src_encoding": "UTF-8", "text": "import sys, socket\nfrom PyQt5.QtWidgets import QApplication, QWidget, \\\n QMainWindow,QPushButton, QMessageBox, QLineEdit, QLabel\nfrom PyQt5.QtCore import pyqtSlot\nfrom PyQt5.QtGui import QIcon\n\nclass AppMain(QMainWindow):\n def __init__(self):\n super().__init__()\n self.title = 'Asterisk DialPlan'\n self.left = 100\n self.top = 60\n self.width = 1200\n self.height = 600\n self.setFixedSize(1200, 600)\n self.initUI()\n\n def initUI(self):\n hostname = socket.gethostname()\n ip = socket.gethostbyname(hostname)\n self.setWindowTitle(self.title)\n self.setGeometry(self.left, self.top, self.width, self.height)\n self.statusBar().showMessage(ip)\n self.show()\n\n @pyqtSlot()\n def on_click_submit(self):\n if self.textbox_user.text() == 'admin' and self.textbox_pass.text() == 'password' and self.textbox_host.text() != \"\":\n textboxOK = self.textbox_user.text()\n QMessageBox.question(self, 'Asterisk Dialplan',' Login Success '\n , QMessageBox.Ok,QMessageBox.Cancel)\n self.textbox_user.setText(\"\")\n self.hide()\n\n else:\n QMessageBox.warning(self, 'Login Error ', ' Invalid Value !!!!! '\n ,QMessageBox.Ok)\n\nif __name__ == '__main__':\n app = QApplication(sys.argv)\n ex = AppMain()\n sys.exit(app.exec_())" }, { "alpha_fraction": 0.5589005351066589, "alphanum_fraction": 0.5706806182861328, "avg_line_length": 29.600000381469727, "blob_id": "fe4727b6d018ad30271bdf42a0a171da3431bb5c", "content_id": "1c404f6a8a5acfc6d0b46791e848cfba271ccf54", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 764, "license_type": "no_license", "max_line_length": 95, "num_lines": 25, "path": "/QT_ssh.py", "repo_name": "oun1982/gitrepo", "src_encoding": "UTF-8", "text": "import paramiko\nclass QT_ssh:\n def qt_ssh(self):\n ssh = paramiko.SSHClient()\n ssh.set_missing_host_key_policy(paramiko.AutoAddPolicy())\n try:\n ssh.connect('192.168.1.50', username='root', password='osd_SIP4321!')\n except paramiko.SSHException:\n print(\"Connection Failed\")\n quit()\n #ssh.exec_command(\"su\")\n #stdin,stdout,stderr = ssh.exec_command(\"ansible -m ping all\")\n stdin,stdout,stderr = ssh.exec_command(\"/bin/cat /etc/asterisk/extensions_custom.conf\")\n\n for line in stdout.readlines():\n list_ssh = []\n list_ssh = line.strip(\"\\n\")\n print(list_ssh)\n\n #print(line.strip(line))\n\n ssh.close()\n\nres = QT_ssh()\nres.qt_ssh()" }, { "alpha_fraction": 0.3667711615562439, "alphanum_fraction": 0.38349008560180664, "avg_line_length": 26.314285278320312, "blob_id": "29c453c8db622ffac4412eec33ee802303cfd3c5", "content_id": "6bc2f8ed6c5f9877cc0746abba3afb2a29e21fef", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 1037, "license_type": "no_license", "max_line_length": 60, "num_lines": 35, "path": "/File_test.py", "repo_name": "oun1982/gitrepo", "src_encoding": "UTF-8", "text": "__author__ = 'oun1982'\n'''\ntry:\n print(\"ไม่สามารถเปิดแฟ้มได้ :\",err.args)\nelse:\n print(\"ปิดแฟ้มเรียบร้อยแล้ว\")\n f.closed\n'''\nfilePath = \"install Zabbix.txt\"\ntry:\n f = open(filePath)\n print(\"-------------------- STEP 1 -------------------\")\n str = f.read()\n print(\"-------------------- STEP 1 -------------------\")\n print(str)\n print(\"-------------------- STEP 2 -------------------\")\n str2 = f.readline(15)\n print(\"-------------------- STEP 2 -------------------\")\n print (str2)\n print(\"-------------------- STEP 2 -------------------\")\n\n #Test Readline method\n print(\"-------------------- STEP 3 -------------------\")\n f = open(filePath)\n while 1:\n line = f.readline()\n if len(line):\n print(line)\n else:break\nexcept IOError as err:\n print(\"Cannot open file\")\nelse:\n print(\"This file close\")\n print(\"-------------------- STEP 3 -------------------\")\n f.close()\n\n" }, { "alpha_fraction": 0.6394051909446716, "alphanum_fraction": 0.6524163484573364, "avg_line_length": 18.88888931274414, "blob_id": "085588b4dbd6161180fc41f2a2c442fbae078304", "content_id": "97b7ba5b46912ef98555d9223d691f200608f0bd", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 538, "license_type": "no_license", "max_line_length": 41, "num_lines": 27, "path": "/05.py", "repo_name": "oun1982/gitrepo", "src_encoding": "UTF-8", "text": "__author__ = 'oun1982'\n'''\nf = open(\"myfile.txt\",\"r+\")\n\nprint(\"nmae of file :\",f.name)\nprint(\"Close or not :\",f.closed)\nprint(\"Opening mode :\",f.mode)\nprint(f.readlines())\n'''\nimport os\nimport linecache\nfPath = \"myfile.txt\"\nfor line in range(5):\n print(linecache.getline(fPath, line))\nlinecache.clearcache()\n\nwordTemp = []\nwordCount = 0\nfile = open(\"myfile.txt\", 'r+')\nfor line in file:\n for word in line.split():\n wordTemp.append(word)\n wordCount = wordCount + 1\nprint(wordTemp)\nprint(wordCount)\n\nprint(os.getcwd())\n\n" }, { "alpha_fraction": 0.7197802066802979, "alphanum_fraction": 0.7454212307929993, "avg_line_length": 25, "blob_id": "d02aff134ca68a8e324d5a3be8ff554d05e5f3d3", "content_id": "d3e1a85e80ea4565ad0e2dddb9b828deb76231d2", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 546, "license_type": "no_license", "max_line_length": 52, "num_lines": 21, "path": "/ami_events.py", "repo_name": "oun1982/gitrepo", "src_encoding": "UTF-8", "text": "from asterisk.ami import AMIClient\nfrom asterisk.ami import EventListener\nimport time\n\nclass RegistryEventListener(EventListener):\n\n def on_Registry(event, **kwargs):\n print('Registry Event', event)\n\nclass AllEventListener(EventListener):\n\n def on_event(event, **kwargs):\n print('Event', event)\n\nclient = AMIClient(address='192.168.66.104')\nclient.login(username='beagle', secret='password')\n#client.add_event_listener(RegistryEventListener())\n#client.add_event_listener(AllEventListener())\n\nclient.logoff()\ntime.sleep(100)\n" }, { "alpha_fraction": 0.6283186078071594, "alphanum_fraction": 0.6696165204048157, "avg_line_length": 47.57143020629883, "blob_id": "a8f7e7e817a3aed6c22382d9a01244d8fb9191ac", "content_id": "552c074c5a53bc9f401d40b71cd66989eed81169", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 339, "license_type": "no_license", "max_line_length": 68, "num_lines": 7, "path": "/picnicTable.py", "repo_name": "oun1982/gitrepo", "src_encoding": "UTF-8", "text": "def printPicnic(itemDict, leftWidth, rightWidth):\n print('PICNIC ITEM'.center(leftWidth + rightWidth, '-'))\n for k, v in itemDict.items():\n print(k.ljust(leftWidth, '.') + str(v).rjust(rightWidth))\npinicItem = {'sandwich': 4, 'apple': 12, 'cups': 4, 'cookies': 8000}\nprintPicnic(pinicItem, 12, 5)\nprintPicnic(pinicItem, 20, 6)" }, { "alpha_fraction": 0.6073619723320007, "alphanum_fraction": 0.650306761264801, "avg_line_length": 22.428571701049805, "blob_id": "556d6506bb67424c552f8783eb1ddddf27f2884f", "content_id": "6cd6996227bfa5d4a4195352cb9c9a5684ecfcab", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 163, "license_type": "no_license", "max_line_length": 55, "num_lines": 7, "path": "/Person/manager.py", "repo_name": "oun1982/gitrepo", "src_encoding": "UTF-8", "text": "import Person as Ps\nbob = Ps.person.Person('Bob Smith', 42, 30000, 'software')\nprint(bob.lastName())\nprint(__name__)\nprint(__package__)\nprint(__file__)\nprint(__doc__)" }, { "alpha_fraction": 0.45045965909957886, "alphanum_fraction": 0.4759959280490875, "avg_line_length": 30.580644607543945, "blob_id": "273f4685294f7acee0ac94189336780f6fd48b15", "content_id": "04c0cf0f385c18ac4ef67f58020a088816af7f82", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 979, "license_type": "no_license", "max_line_length": 69, "num_lines": 31, "path": "/Overload_Test.py", "repo_name": "oun1982/gitrepo", "src_encoding": "UTF-8", "text": "__author__ = 'oun1982'\nclass Car:\n def __init__(self,*args):\n if(len(args) == 0):\n print(\"No parameter No Overloading\")\n else:\n print(\"There are %d argument overloading : \"%(len(args)))\n lists = []\n for i in args:\n lists.append(i)\n print(\"Constructor overloading argument:\",lists)\n\n def printX(*args):\n if (len(args) == 0):\n print(\"No Overloading\")\n else:\n print(\"There are %d argument overloading\"%(len(args)-1))\n lists = []\n for i in args:\n lists.append(i)\n print(\"Constructor overloading argument:\",lists[1:])\ncar1 = Car()\nprint(\"-------------------------\")\n#car2 = Car(\"Toyota\")\n#car3 = Car(\"Toyota\",\"Honda\",\"BMW\",\"BENZ\")\ncar1.printX()\nprint(\"---------car1-1-------------\")\ncar1.printX(5)\nprint(\"---------car1-2-------------\")\ncar1.printX(5,6.5,-5,\"Overloading\")\nprint(\"---------car1-3-------------\")\n" } ]
49
theeomm/learn-py
https://github.com/theeomm/learn-py
853bc8d7450ca3da8a4a66981460d514080162fc
a0ef3afa2e38c33991ab124cd2909bf2f744b583
e7c23c0bc0beec6fee5bd2a65f16a3542b5800b5
refs/heads/main
2023-07-26T10:35:27.343694
2021-09-14T07:13:53
2021-09-14T07:13:53
406,260,753
0
0
null
null
null
null
null
[ { "alpha_fraction": 0.6433120965957642, "alphanum_fraction": 0.6433120965957642, "avg_line_length": 25.16666603088379, "blob_id": "9c12b3eac1386c381206506e3f368c1405de1328", "content_id": "88f42deba3e08f1a422b0add7f2e5d0ae3a09a07", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 157, "license_type": "no_license", "max_line_length": 59, "num_lines": 6, "path": "/conditions.py", "repo_name": "theeomm/learn-py", "src_encoding": "UTF-8", "text": "name = input(\"What's your name? \")\n\nif name.startswith(\"Doug\"):\n print(f\"Hello {name}\")\nelse:\n print(f\"Ops, sorry {name}, I thought this was Douglas\")\n" }, { "alpha_fraction": 0.7851239442825317, "alphanum_fraction": 0.7851239442825317, "avg_line_length": 59.5, "blob_id": "b79f47c74849c7d1d0eba0f3c584e5c022c0f450", "content_id": "5724f0b4772fb8a53139cda499e2635578e636d9", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Markdown", "length_bytes": 242, "license_type": "no_license", "max_line_length": 113, "num_lines": 4, "path": "/README.md", "repo_name": "theeomm/learn-py", "src_encoding": "UTF-8", "text": "# Python Programming Bootcamp\n\nThis repo has the code I wrote while taking Derek Banas' Python Programming Bootcamp course on Udemy.\nThe code in this repo may not be the same as the code in the course because sometimes I like to do things my way.\n" } ]
2
wasit7/book_pae
https://github.com/wasit7/book_pae
1815c3a8b9080c44775fcb5567bed65d3d794b19
c53cca3342593a2769f398db9bf969515d3de117
0fc6e9da40341ba3487c1e2a9c3774b93c49a454
refs/heads/master
2021-01-15T15:33:04.825530
2016-06-30T09:16:41
2016-06-30T09:16:41
39,626,642
0
0
null
null
null
null
null
[ { "alpha_fraction": 0.7265193462371826, "alphanum_fraction": 0.7451657652854919, "avg_line_length": 33.5, "blob_id": "0add9ef9676e413277baf7205239dcdc676e720c", "content_id": "b767e582030a7090ff60646527d49a975f5866d1", "detected_licenses": [ "MIT" ], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 1448, "license_type": "permissive", "max_line_length": 71, "num_lines": 42, "path": "/book/django/project/mywebpage/models.py", "repo_name": "wasit7/book_pae", "src_encoding": "UTF-8", "text": "from django.db import models\nfrom django.contrib.auth.models import User\n\n# Create your models here.\n#unique -> If True, this field must be unique throughout the table.\nclass Student(models.Model):\n\tfirstname = models.CharField(max_length=30)\n\tlastname = models.CharField(max_length=20)\n\tstd_id = models.IntegerField(primary_key=True, unique=True)\n\tusername = models.ForeignKey(User)\n\temail = models.EmailField(max_length=255)\n\t#birthdate = models.DateField()\n\tsch_gpa = models.FloatField()\n\tprovince_id = models.CharField(max_length=30)\n\tadmit_year = models.CharField(max_length=8)\n\tdef __unicode__(self):\n\t\treturn str(self.std_id)\n\nclass Subject(models.Model):\n\tsub_id = models.CharField(max_length=5, primary_key=True, unique=True)\n\tsub_name = models.CharField(max_length=40)\n\tdescription = models.CharField(max_length=500)\n\tcredit = models.IntegerField()\n\tdef __unicode__(self):\n\t\treturn str(self.sub_id)\n\nclass Enrollment(models.Model):\n\tstd_id = models.ForeignKey(Student)\n\tsub_id = models.ForeignKey(Subject)\n\tgrade = models.CharField(max_length=1)\n\tterm = models.IntegerField()\n\tyear = models.IntegerField()\n\tdef __unicode__(self):\n\t\treturn str(self.std_id)\n\nclass Factor(models.Model):\n\tsub_id = models.CharField(max_length=5, primary_key=True, unique=True)\n\tsubfac_1 = models.CharField(max_length=50)\n\tsubfac_2 = models.CharField(max_length=50)\n\tsubfac_3 = models.CharField(max_length=50)\n\tdef __unicode__(self):\n\t\treturn str(self.sub_id)" }, { "alpha_fraction": 0.48552656173706055, "alphanum_fraction": 0.513052761554718, "avg_line_length": 28.32291603088379, "blob_id": "1210512cc8c938253a6acbdd0a1a216552b380d2", "content_id": "be09124881466ce482a65cc58bf37234a3463b64", "detected_licenses": [ "MIT" ], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 5631, "license_type": "permissive", "max_line_length": 99, "num_lines": 192, "path": "/pae/forcast/src/compare_traingData_accuracy.py", "repo_name": "wasit7/book_pae", "src_encoding": "UTF-8", "text": "## -*- coding: utf-8 -*-\n#\"\"\"\n#Created on Tue May 10 17:19:14 2016\n#\n#@author: Methinee\n#\"\"\"\nimport pandas as pd\nimport numpy as np\nimport pickle\nimport xlwt\nimport matplotlib.pyplot as plt\n\n#\n#\n#\n##----------------------Traning Data With Merging-----------------------\n##df_file = pd.read_csv('../data/df_more20.csv',delimiter=\",\", skip_blank_lines = True, \n## error_bad_lines=False)\n#df_acc = pd.read_csv('../src/compare_accuracy.csv',delimiter=\",\", skip_blank_lines = True, \n# error_bad_lines=False)\n# \n#df_train = pd.read_csv('../src/compare_trainingData.csv',delimiter=\",\", skip_blank_lines = True, \n# error_bad_lines=False)\n# \n#df_class = pd.read_csv('../src/compare_class.csv',delimiter=\",\", skip_blank_lines = True, \n# error_bad_lines=False)\n#\ndf_all = pd.read_csv('../src/compare_all.csv',delimiter=\",\", skip_blank_lines = True, \n error_bad_lines=False)\n\nheaders=list(df_all.columns.values)\nsubjects = []\n#Create dictionary of list subjects\nfor sub in df_all[headers[0]]:\n subjects.append(sub)\n\nclasses = []\n#Create dictionary of list subjects\nfor c in df_all[headers[1]]:\n classes.append(c)\n\n\naccuracy = []\n#Create dictionary of list subjects\nfor acc in df_all[headers[2]]:\n accuracy.append(acc)\n \ntrain = []\n#Create dictionary of list subjects\nfor tr in df_all[headers[3]]:\n train.append(tr) \n\n\n#n_groups = 15\n#\n#std_men = (3, 5, 2, 3, 3,3, 5, 2, 3, 3,3, 5, 2, 3, 3)\n#\n#std_women = (3, 5, 2, 3, 3,3, 5, 2, 3, 3,3, 5, 2, 3, 3)\n#\n#\n#index = np.arange(n_groups)\n#bar_width = 0.20\n#\n#opacity = 0.7\n#error_config = {'ecolor': '0.3'}\n\n#rects1 = plt.bar(index,df_all[headers[1]][:15], bar_width,\n# alpha=opacity,\n# color='b',\n# \n# error_kw=error_config,\n# label='#Class')\n#\n#rects2 = plt.bar(index + bar_width, df_all[headers[3]][:15], bar_width,\n# alpha=opacity,\n# color='r',\n# yerr=std_women,\n# error_kw=error_config,\n# label='#Training Data')\n# \n#rects3 = plt.bar(index + bar_width+bar_width, df_all[headers[2]][:15], bar_width,\n# alpha=opacity,\n# color='g',\n# yerr=std_women,\n# error_kw=error_config,\n# label='#Accuracy')\n# \n#\n#for rect in rects1:\n# height = rect.get_height()\n# plt.text(rect.get_x()+rect.get_width()/2., 1.05*height, '%d'%int(height),\n# ha='center', va='bottom')\n# \n#for rect in rects2:\n# height = rect.get_height()\n# plt.text(rect.get_x()+rect.get_width()/2., 1.05*height, '%d'%int(height),\n# ha='center', va='bottom')\n# \n#for rect in rects3:\n# height = rect.get_height()\n# plt.text(rect.get_x()+rect.get_width()/2., 1.05*height, '%d'%int(height),\n# ha='center', va='bottom')\n#\n#\n#plt.xlabel('Subject')\n#plt.ylabel('Scores')\n#plt.title('Scores of #Traing Data and #Class')\n#plt.xticks(index + bar_width, subjects[:15])\n#plt.legend()\n#\n#plt.tight_layout()\n#plt.show()\n#\n\n#//////////////////////////////////////////////////////////////////////////////////////////////////\n#rects1 = plt.bar(index,df_all[headers[1]][95:110], bar_width,\n# alpha=opacity,\n# color='b',\n# \n# error_kw=error_config,\n# label='#Class')\n#\n#rects2 = plt.bar(index + bar_width, df_all[headers[3]][95:110], bar_width,\n# alpha=opacity,\n# color='r',\n# yerr=std_women,\n# error_kw=error_config,\n# label='#Training Data')\n# \n#rects3 = plt.bar(index + bar_width+bar_width, df_all[headers[2]][95:110], bar_width,\n# alpha=opacity,\n# color='g',\n# yerr=std_women,\n# error_kw=error_config,\n# label='#Accuracy')\n# \n#\n#for rect in rects1:\n# height = rect.get_height()\n# plt.text(rect.get_x()+rect.get_width()/2., 1.02*height, '%d'%int(height),\n# ha='center', va='bottom')\n# \n#for rect in rects2:\n# height = rect.get_height()\n# plt.text(rect.get_x()+rect.get_width()/2., 1.02*height, '%d'%int(height),\n# ha='center', va='bottom')\n# \n#for rect in rects3:\n# height = rect.get_height()\n# plt.text(rect.get_x()+rect.get_width()/2., 1.02*height, '%d'%int(height),\n# ha='center', va='bottom')\n#\n#\n#plt.xlabel('Subject')\n#plt.ylabel('Scores')\n##plt.title('Scores of #Traing Data and #Class')\n#plt.xticks(index + bar_width, subjects[:15])\n#plt.legend()\n#\n#plt.tight_layout()\n#plt.show()\n\n#///////////////////////////////////////////////////////////////////////////////////////\n\ncolumn = 6\nrow = 300\nz = np.zeros((row,column))\nfor d in xrange(len(train)):\n for i in xrange(row):\n for j in xrange(column):\n if j == classes[d] and i == train[d]:\n z[i][j] = accuracy[d]\n print \" hello\"\n \nprint z\n\n\n#dx, dy = 0.15, 0.05\ny, x = np.mgrid[slice(0, 300),slice(0, 6)]\n#plt.subplot()\n#plt.pcolormesh(x, y, z,shading='gouraud', cmap='jet')\nplt.pcolor(x, y, z, cmap='jet')\n#plt.title('pcolor')\n#plt.xticks(np.arange(min(x), max(x), 1.50))\n## set the limits of the plot to the limits of the data\n#plt.colorbar()\n#plt.axis([x.min(), x.max(), y.min(), y.max()])\n\n\nplt.yticks(np.arange(0, 300, 50.0),label=\"#Training Data\")\nplt.xticks(np.arange(0, 6, 1.0),label='#Class')\nplt.colorbar()\n\n" }, { "alpha_fraction": 0.7106339335441589, "alphanum_fraction": 0.7137014269828796, "avg_line_length": 50.52631759643555, "blob_id": "1d228c3bfb380b10f496993940e53ee1e70bfcdb", "content_id": "c896bb3e8c6ba500fac72914047b8b1dcaf5d34e", "detected_licenses": [ "MIT" ], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 978, "license_type": "permissive", "max_line_length": 88, "num_lines": 19, "path": "/pae/book_edit_bootstrap/django/project/mywebpage/urls.py", "repo_name": "wasit7/book_pae", "src_encoding": "UTF-8", "text": "from django.conf.urls import url\nfrom . import views\n\nurlpatterns = [\n\turl(r'^showprofile/$', views.showprofile, name='showprofile'),\n\turl(r'^home/$', views.home, name='home'),\n\turl(r'^homeD3/$', views.homeD3, name='homeD3'),\n\turl(r'^addprofile/$', views.addprofile, name='addprofile'),\n\turl(r'^editprofile/$', views.editprofile, name='editprofile'),\n\turl(r'^predict/$', views.predict, name='predict'),\n\turl(r'^userprofile/$', views.userprofile, name='userprofile'),\n\turl(r'^test/$', views.test, name='test'),\n\turl(r'^jsonSubject.json$', views.jsonSubject, name='jsonSubject'), #get data\n\turl(r'^jsonEnrollment.json$', views.jsonEnrollment, name='jsonEnrollment'),\n\turl(r'^jsonStudent.json$', views.jsonStudent, name='jsonStudent'),\n\turl(r'^jsonProvience.json$', views.jsonProvience, name='jsonProvience'),\n\turl(r'^coordinate_home.json$', views.coordinate_home, name='coordinate_home'),\n\turl(r'^coordinate_predict.json$', views.coordinate_predict, name='coordinate_predict'),\n]" }, { "alpha_fraction": 0.7315789461135864, "alphanum_fraction": 0.7315789461135864, "avg_line_length": 30.83333396911621, "blob_id": "4287d5df95cd2ded63edbb569038bfd8e04f63a9", "content_id": "2e6ed7d9c3967aaa3db588d3f7b8c0607305f4e7", "detected_licenses": [ "MIT" ], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 190, "license_type": "permissive", "max_line_length": 55, "num_lines": 6, "path": "/pae/book_edit_bootstrap/django/testproject/login/views.py", "repo_name": "wasit7/book_pae", "src_encoding": "UTF-8", "text": "from django.http import HttpResponse\nfrom datetime import datetime\ndef home(request):\n\tnow = datetime.now()\n\thtml = \"<html><body>It is now %s.</body></home>\" % now\n\treturn HttpResponse(html)" }, { "alpha_fraction": 0.5394737124443054, "alphanum_fraction": 0.640350878238678, "avg_line_length": 15.357142448425293, "blob_id": "911c49ef95f82bf803f1d401c255101c5b895681", "content_id": "3166d9b761cbf91aacfe1c98bba150a8939337e3", "detected_licenses": [ "MIT" ], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 228, "license_type": "permissive", "max_line_length": 68, "num_lines": 14, "path": "/pae/forcast/src/csv/CS_table_No5_E.py", "repo_name": "wasit7/book_pae", "src_encoding": "UTF-8", "text": "# -*- coding: utf-8 -*-\n\"\"\"\nCreated on Wed Sep 09 16:19:48 2015\n\n@author: Administrator\n\"\"\"\n\nimport pandas as pd\ndata = pd.read_csv('D:\\\\project\\\\forcast\\\\data\\\\CS_table_No5_E.csv')\n\nlist = ['cs101','cs102','el171']\n\n\nprint data" }, { "alpha_fraction": 0.5620052814483643, "alphanum_fraction": 0.6121371984481812, "avg_line_length": 28.230770111083984, "blob_id": "c4d13811649f359cae3904f74cae00b534d543b7", "content_id": "79c73139a08c4ec8890b543465844df0a6070c78", "detected_licenses": [ "MIT" ], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 379, "license_type": "permissive", "max_line_length": 94, "num_lines": 13, "path": "/pae/forcast/src/create_dfmore20_dropNanResult.py", "repo_name": "wasit7/book_pae", "src_encoding": "UTF-8", "text": "# -*- coding: utf-8 -*-\n\"\"\"\nCreated on Sun May 29 20:49:37 2016\n\n@author: Methinee\n\"\"\"\nimport pandas as pd\n\ndf_file = pd.read_csv('../data/df_dropSub_less20.csv',delimiter=\",\", skip_blank_lines = True, \n error_bad_lines=False)\n \ndrop_naResult = df_file[df_file['4RESULT'] != 0]\ndrop_naResult.to_csv('../data'+'/df_dropSub_less20_dropNaResult.csv')" }, { "alpha_fraction": 0.37078651785850525, "alphanum_fraction": 0.42119649052619934, "avg_line_length": 32.93814468383789, "blob_id": "490dd8856daea51a1eeffb4dabb81d39434b97a1", "content_id": "8a9f0b3e1ea487b586f6c6bef06fdddd95fe9c22", "detected_licenses": [ "MIT" ], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 3293, "license_type": "permissive", "max_line_length": 109, "num_lines": 97, "path": "/pae/forcast/src/gen_dataset_allsub.py", "repo_name": "wasit7/book_pae", "src_encoding": "UTF-8", "text": "# -*- coding: utf-8 -*-\n\"\"\"\nCreated on Thu Feb 25 23:15:03 2016\n\n@author: Methinee\n\"\"\"\n\nimport pandas as pd\nimport numpy as np\nimport pickle\nimport os \nfrom collections import defaultdict\n\n\ndf_file = pd.read_excel('../data/transform_sort.xlsx')\nheaders=list(df_file.columns.values)\n\n#replace grade with integer and noplus in each grade\n#{'':0, 'U#':1, 'U':1, 'S#':2, 'S':2, 'W':3, 'F':4, 'D':5, 'D+':5, 'C':6, 'C+':6, 'B':7, 'B+':7, 'A':8}\ndf_file = df_file.fillna(0)\ndf_file = df_file.replace(['A', 'B+', 'B', 'C+', 'C' , 'D+' , 'D' , 'F' , 'W' , 'S' , 'S#' , 'U' , 'U#'], \n [8, 7, 7, 6 , 6, 5, 5, 4, 3, 2, 2, 1, 1])\n \n\nfor i in xrange(5,208):\n #Create folder to keep datasets \n newpath = r'D:\\project\\forcast\\src\\train%s' %(headers[i])\n if not os.path.exists(newpath):\n os.makedirs(newpath)\n \n #filter just cs213 in column #3\n cs = df_file.loc[df_file['3COURSEID'] == headers[i]]\n count = len(cs)\n\n #select row from 20% \n a = count/5\n a = int(a)\n a = a\n\n dataset = defaultdict(list) \n df_matrix = df_file.as_matrix()\n cs = df_matrix[df_matrix[:,3]==headers[i]]\n for j in range(0,5):\n if(count%5) == 0: \n dataset_cs = cs[a*j:a*(j+1),:]\n dataset[j] = dataset_cs \n \n else:\n if(count%5) == 1:\n dataset[0] = cs[a*j:a*(j+1)+1,:]\n dataset[1] = cs[a*j+1:a*(j+1)+1,:]\n dataset[2] = cs[a*j+1:a*(j+1)+1,:]\n dataset[3] = cs[a*j+1:a*(j+1)+1,:] \n dataset[4] = cs[a*j+1:a*(j+1)+1,:]\n \n elif(count%5) == 2:\n dataset[0] = cs[a*j:a*(j+1)+1,:]\n dataset[1] = cs[a*j+1:a*(j+1)+2,:]\n dataset[2] = cs[a*j+2:a*(j+1)+2,:]\n dataset[3] = cs[a*j+2:a*(j+1)+2,:] \n dataset[4] = cs[a*j+2:a*(j+1)+2,:]\n \n elif(count%5) == 3:\n dataset[0] = cs[a*j:a*(j+1)+1,:]\n dataset[1] = cs[a*j+1:a*(j+1)+2,:]\n dataset[2] = cs[a*j+2:a*(j+1)+3,:]\n dataset[3] = cs[a*j+3:a*(j+1)+3,:] \n dataset[4] = cs[a*j+3:a*(j+1)+3,:]\n \n elif(count%5) == 4:\n dataset[0] = cs[a*j:a*(j+1)+1,:]\n dataset[1] = cs[a*j+1:a*(j+1)+2,:]\n dataset[2] = cs[a*j+2:a*(j+1)+3,:]\n dataset[3] = cs[a*j+3:a*(j+1)+4,:] \n dataset[4] = cs[a*j+4:a*(j+1)+4,:]\n \n print \"dataset0%d\"%(j)\n print dataset[j]\n #print \"dataset_213\",dataset_213\n \n L = \"L%02d\"%(j)\n I = \"I%02d\"%(j)\n L = dataset[j][:,4]\n #L = L.astype(np.int64, copy=False)\n I = dataset[j][:,5:]#L.shape\n #I = I.astype(np.int64, copy=False)\n \n #save pickle file\n# f = \"train%s/dataset%02d.pic\"%(headers[i],j)\n# print f\n# with open(f, 'wb') as pickleFile:\n# theta_dim=1\n# clmax = 14\n# theta_range = I.shape[1]\n# pickle.dump((clmax,theta_dim,theta_range,len(L),L,I,None), pickleFile, pickle.HIGHEST_PROTOCOL) \n# \n# print \"-------------------------------------------------------\"\n\n" }, { "alpha_fraction": 0.516370415687561, "alphanum_fraction": 0.5416277050971985, "avg_line_length": 29.304964065551758, "blob_id": "1e1103c5614494525fecb87ad99c6653889a9f98", "content_id": "cca9df1250b54d13d99797002efa0411b97bbc9b", "detected_licenses": [ "MIT" ], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 4276, "license_type": "permissive", "max_line_length": 106, "num_lines": 141, "path": "/pae/forcast/src/transform.py", "repo_name": "wasit7/book_pae", "src_encoding": "UTF-8", "text": "# -*- coding: utf-8 -*-\n\"\"\"\nCreated on Tue Feb 02 15:59:05 2016\n\n@author: Administrator\n\"\"\"\n\nimport pandas as pd\nimport numpy as np\nfrom collections import defaultdict\n\ndf_file = pd.read_csv('../data/CS_table_No2_No4_new.csv',delimiter=\";\", skip_blank_lines = True, \n error_bad_lines=False)\n\ndf = {'0STUDENTID':df_file['STUDENTID'],'1ACADYEAR':df_file['ACADYEAR'],\n '2SEMESTER':df_file['SEMESTER'],'3COURSEID':df_file['COURSEID'],'4RESULT':df_file['GRADE']}\n\nheaders=list(df_file.columns.values)\nsubjects = {'courseId':[]}\nstudents = {'0studentId':[]}\ncountSub = 0\ncountStd = 0\ncountEachSub = 0\ncountEachSubSort = 0\ncountEachStd = 0\ncountEachYear = 0\ncountEachTerm = 0\nkey_sub_sort = defaultdict(list)\nkey_sub = defaultdict(list)\nkey_std = defaultdict(list)\nkey_year = defaultdict(list)\nkey_term = defaultdict(list)\n\n#Create dictionary of list subjects\nfor sub in df_file[headers[4]]:\n if sub not in subjects['courseId']:\n subjects['courseId'].append(sub)\n countSub = countSub+1\n\n#Create dictionary of list students(key of student)\nfor std in df_file[headers[0]]:\n if std not in students['0studentId']:\n students['0studentId'].append(std)\n countStd = countStd+1\n\n#Create column with all subjects\ni = 5\nfor i in subjects[\"courseId\"]:\n #print i\n df[i] = np.empty(len(df['0STUDENTID']))\n df[i][:]=np.NAN\n\n#key of student\nfor eachStd in df_file[headers[0]]:\n countEachStd = countEachStd+1\n key_std[countEachStd] = eachStd\n\n#Key of year\nfor eachYear in df_file[headers[1]]:\n countEachYear = countEachYear+1\n key_year[countEachYear] = eachYear\n\n#Key of month\nfor eachTerm in df_file[headers[2]]:\n countEachTerm = countEachTerm+1\n key_term[countEachTerm] = eachTerm\n\n#Key of all subjects(1-)\nfor eachSub in df_file[headers[4]]:\n countEachSub = countEachSub+1\n key_sub[countEachSub] = eachSub\n\n\n#Key of sorted subjects(1-203)\nsubjects['courseId'].sort()\nfor eachSubSort in subjects[\"courseId\"]:\n countEachSubSort = countEachSubSort+1\n key_sub_sort[countEachSubSort] = eachSubSort\n \ndf_a = pd.DataFrame(df)\n\n#Add all before grade\nstart_record = 0\nmark = 0 \nchange = 0\nfor i in range(0,31343):\n count = 0\n if key_std[i+1] == key_std[i+2]:\n if key_year[i+1] == key_year[i+2]:\n if key_term[i+1] < key_term[i+2]:\n for j in range(0,mark+1):\n for find in range(1,204):\n if key_sub[i+1-j] == key_sub_sort[find]:\n print \"yeah\"\n df_a.loc[i+1,key_sub_sort[find]] = df_a.loc[i-j,'4RESULT']\n else:\n print \"--\"\n count = count + 1\n print count\n change = 1\n \n elif key_term[i+1] == key_term[i+2]:\n print \"condition term\"\n if key_term[i+1] != key_term[start_record+1] or key_year[i+1] != key_year[start_record+1]:\n for j in range(change,mark+1):\n for find in range(1,204):\n if key_sub[i+1-j] == key_sub_sort[find]:\n print \"yeah\"\n df_a.loc[i+1,key_sub_sort[find]] = df_a.loc[i-j,'4RESULT']\n else:\n print \"--\"\n count = count + 1\n print count\n change = change+1\n else:\n for j in range(0,mark+1):\n for find in range(1,204):\n if key_sub[i+1-j] == key_sub_sort[find]:\n print \"yeah\"\n df_a.loc[i+1,key_sub_sort[find]] = df_a.loc[i-j,'4RESULT']\n else:\n print \"--\"\n count = count + 1\n print count\n change = 1\n \n else:\n print \"condition student\"\n start_record = i+1\n mark = -1\n change = 0\n \n mark = mark+1\nprint \"change is\",change\nprint \"mark is\",mark\nprint \"i is\",i\nprint \"start is \",start_record \n\nwriter = pd.ExcelWriter(\"transform.xlsx\")\npd.DataFrame(df_a).to_excel(writer,\"grade\")\nwriter.save()\n\n\n\n" }, { "alpha_fraction": 0.6348958611488342, "alphanum_fraction": 0.6784722208976746, "avg_line_length": 27.379310607910156, "blob_id": "b3749464ce0a5ed9d92db8d2ed1b692cc5b901c1", "content_id": "b745aa22923e5d430b1d271c6113d37088fdf2cc", "detected_licenses": [ "MIT" ], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 5760, "license_type": "permissive", "max_line_length": 130, "num_lines": 203, "path": "/pae/forcast/src/convert_sub_home_name_tojson.py", "repo_name": "wasit7/book_pae", "src_encoding": "UTF-8", "text": "# -*- coding: utf-8 -*-\n\"\"\"\nCreated on Wed Jun 01 21:37:20 2016\n\n@author: Administrator\n\"\"\"\n\nimport pandas as pd\nimport pandas.io.sql as pd_sql\nimport sqlite3 as sql\n\ndf_file_all = pd.read_csv('../data/CS_table_No2_No4_new.csv',delimiter=\";\", skip_blank_lines = True, \n error_bad_lines=False,encoding='utf8')\n \ndf_file_less = pd.read_csv('../data/df_dropSub_less20.csv',delimiter=\",\", skip_blank_lines = True, \n error_bad_lines=False,encoding='utf8')\n \ndf_file_all = df_file_all.drop(['STUDENTID','ACADYEAR','CAMPUSID','SEMESTER','CURRIC','CAMPUSNAME','SECTIONGROUP','GRADE'],axis=1)\n\nsubjects = []\nnames = []\ncountSub = 0\nsubjects = []\nlink = []\nout={}\nsources=[]\ntargets=[]\n\n\nfor sub in df_file_less['3COURSEID']:\n if sub not in subjects:\n subjects.append(sub)\n countSub = countSub+1\nsubjects.sort()\n\n\ndf_db = df_file_all[df_file_all[\"COURSEID\"].isin(subjects)]\ndf_db = df_db.drop_duplicates(['COURSEID'], take_last=True) \ndf_db = df_db.sort(['COURSEID']) \n\n#Create lis of names subject\nfor name in df_db['COURSENAME']:\n names.append(name)\n \n\n\nsubjects_home = []\nnode = []\n\ncs = 'CS'\ntu = 'TU'\nel = 'EL'\n\nfor index in xrange(0,111):\n s = subjects[index]\n n = names[index] \n if cs in s:\n subjects_home.append(s)\n node.append({\"id\":s,\"name\":n})\n elif tu in s:\n subjects_home.append(s)\n node.append({\"id\":s,\"name\":n})\n elif el in s:\n subjects_home.append(s)\n node.append({\"id\":s,\"name\":n})\n\nsubjects_home.remove(\"CS105\")\nnode.pop(2)\n\nsubjects_home.remove(\"CS115\")\nnode.pop(3)\n\nsubjects_home.remove(\"CS211\")\nnode.pop(3)\n\nsubjects_home.remove(\"CS215\")\nnode.pop(5)\n\nsubjects_home.remove(\"CS231\")\nnode.pop(7)\n\nsubjects_home.remove(\"CS300\")\nnode.pop(18)\n\n\nsubjects_home.append('CS112')\nnode.append({\"id\":'CS112',\"name\":'Introduction to Object-Oriented Programming'})\n\nsubjects_home.append('CS327')\nnode.append({\"id\":'CS327',\"name\":'Digital Logic Design'})\n\nsubjects_home.append('CS328')\nnode.append({\"id\":'CS328',\"name\":'Compiler Construction'})\n\nsubjects_home.append('CS357')\nnode.append({\"id\":'CS357',\"name\":'Electronic Business'})\n\nsubjects_home.append('CS358')\nnode.append({\"id\":'CS358',\"name\":'Computer Simulation and Forecasting Techniques in Business'})\n\nsubjects_home.append('CS359')\nnode.append({\"id\":'CS359',\"name\":'Document Indexing and Retrieval'})\n\nsubjects_home.append('CS389')\nnode.append({\"id\":'CS389',\"name\":'Software Architecture'})\n\nsubjects_home.append('CS406')\nnode.append({\"id\":'CS406',\"name\":'Selected Topics in Advance Sofware Engineering Technology'})\n\nsubjects_home.append('CS428')\nnode.append({\"id\":'CS428',\"name\":'Principles of Multiprocessors Programming'})\n\nsubjects_home.append('CS439')\nnode.append({\"id\":'CS439',\"name\":'Selected Topics in Programming Languages'})\n\nsubjects_home.append('CS447')\nnode.append({\"id\":'CS447',\"name\":'Operating Systems II'})\n\nsubjects_home.append('CS448')\nnode.append({\"id\":'CS448',\"name\":'Software systems for advanced distributed computing'})\n\nsubjects_home.append('CS458')\nnode.append({\"id\":'CS458',\"name\":'Information Systems for Entrepreneur Management'})\n\nsubjects_home.append('CS469')\nnode.append({\"id\":'CS469',\"name\":'Selected Topics in Artificial Intelligent Systems'})\n\nsubjects_home.append('CS479')\nnode.append({\"id\":'CS479',\"name\":'Selected Topics in Computer Interface and Multimedia'})\n\nsubjects_home.append('CS496')\nnode.append({\"id\":'CS496',\"name\":'Rendering II'})\n\nsubjects_home.append('CS497')\nnode.append({\"id\":'CS497',\"name\":'Real-time Graphics'})\n\nsubjects_home.append('CS499')\nnode.append({\"id\":'CS499',\"name\":'Selected Topics in Computer Graphics'})\n\nsubjects_home.append('TH161')\nnode.append({\"id\":'TH161',\"name\":'Thai Usage'})\n\nsubjects_home.append('PY228')\nnode.append({\"id\":'PY228',\"name\":'Psychology Of Interpersonal Relations'})\n\nsubjects_home.append('BA291')\nnode.append({\"id\":'BA291',\"name\":'Introduction Of Business'})\n\nsubjects_home.append('EC210')\nnode.append({\"id\":'EC210',\"name\":'Introductory Economics'})\n\nsubjects_home.append('HO201')\nnode.append({\"id\":'HO201',\"name\":'Principles Of Management'})\n\nsubjects_home.append('MA211')\nnode.append({\"id\":'MA211',\"name\":'Calculus 1'})\n\nsubjects_home.append('SC135')\nnode.append({\"id\":'SC135',\"name\":'General Physics'})\n\nsubjects_home.append('SC185')\nnode.append({\"id\":'SC185',\"name\":'General Physics Laboratory'})\n\nsubjects_home.append('SC123')\nnode.append({\"id\":'SC123',\"name\":'Fundamental Chemistry'})\n\nsubjects_home.append('SC173')\nnode.append({\"id\":'SC173',\"name\":'Fundamental Chemistry Laboratory'})\n\nsubjects_home.append('MA212')\nnode.append({\"id\":'MA212',\"name\":'Calculus 2'})\n\nsubjects_home.append('MA332')\nnode.append({\"id\":'MA332',\"name\":'Linear Algebra'})\n\nsubjects_home.append('ST216')\nnode.append({\"id\":'ST216',\"name\":'Statistics For Social Science Students 1'})\n\nsubjects_home.sort()\nnode.sort()\n\n## Find index of source and target from book/graph1.gv \ndf_st = pd.read_csv('../data/source-target_home.csv',delimiter=\";\", skip_blank_lines = True, \n error_bad_lines=False)\nheaders_st=list(df_st.columns.values)\ndf_st = df_st.dropna()\n\nfor source in df_st[headers_st[0]]:\n #print \"source is %s, index is %d\"%(source,subjects_db.index(source))\n sources.append(subjects_home.index(source))\n \nfor target in df_st[headers_st[1]]:\n #print \"target is %s, index is %d\"%(target,subjects_db.index(target))\n targets.append(subjects_home.index(target))\n \nfor i in xrange(0,82): #In Bachelor has 83 links\n link.append({\"source\":sources[i],\"target\":targets[i],\"type\": \"licensing\"})\n \nout[\"node\"]=node\nout[\"link\"]=link\n\nwith open(\"subjects_name.json\",\"w\") as outfile:\n json.dump(out,outfile,sort_keys=True, indent=4, separators=(',',': '))" }, { "alpha_fraction": 0.7307189702987671, "alphanum_fraction": 0.7307189702987671, "avg_line_length": 32.21739196777344, "blob_id": "68a7ada338c5794b90d202f1eef6e13387828114", "content_id": "5fb784040fbf3d55a86fd7c7d22bedaf28ce259d", "detected_licenses": [ "MIT" ], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 765, "license_type": "permissive", "max_line_length": 105, "num_lines": 23, "path": "/pae/book_edit_bootstrap/django/project/mywebpage/admin.py", "repo_name": "wasit7/book_pae", "src_encoding": "UTF-8", "text": "from django.contrib import admin\nfrom .models import Student, Subject, Enrollment\n\n# Register your models here\nclass StudentAdmin(admin.ModelAdmin):\n\tlist_display = ('username','std_id','firstname','lastname','email','province_id','sch_gpa','admit_year')\n\t#list_display = ('username','std_id','province_id','sch_gpa','admit_year')\n\nclass SubjectAdmin(admin.ModelAdmin):\n\tlist_display = ('sub_id','sub_name','description','credit')\n\t#def sub_name(self,obj):\n\t#\treturn obj.sub_name\n\nclass EnrollmentAdmin(admin.ModelAdmin):\n\tlist_display = ('std_id','sub_id','grade','term','year')\n\n\t#def std_id(self,obj):\n\t\t#return obj.std_id\n\t\t\n\nadmin.site.register(Student,StudentAdmin)\nadmin.site.register(Subject, SubjectAdmin)\nadmin.site.register(Enrollment, EnrollmentAdmin)\n\n" }, { "alpha_fraction": 0.48730963468551636, "alphanum_fraction": 0.5215736031532288, "avg_line_length": 22.909090042114258, "blob_id": "03c2afb5f23a0929a64d7685f004f0e1579df534", "content_id": "5101d76bca97e86ef8f5c526960cea7353a672a0", "detected_licenses": [ "MIT" ], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 788, "license_type": "permissive", "max_line_length": 74, "num_lines": 33, "path": "/pae/book_edit_bootstrap/graphviz/convert_ext_to_json.py", "repo_name": "wasit7/book_pae", "src_encoding": "UTF-8", "text": "# -*- coding: utf-8 -*-\n\"\"\"\nCreated on Wed Jan 20 11:04:32 2016\n\n@author: BOOK\n\"\"\"\nimport json\nout={}\nnode=[]\nedges=[]\nwith open(\"graph_reY.plain-ext\") as f:\n content = f.readlines()\nfor n in content[1:]:\n line = n.split()\n \n if line[0] == \"node\":\n #print line[1:4]\n node.append({\"COURSE_ID\":line[1],\"Y\":line[2],\"X\":line[3]})\n elif line[0] == \"edge\":\n linepath = []\n for i in xrange(int(line[3])):\n x = line[4+(i*2)]\n y = line[5+(i*2)]\n mydict = {\"x\":x,\"y\":y}\n linepath.append(mydict)\n edges.append(linepath) \nout[\"node\"]=node\nout[\"edges\"]=edges \n \nprint out\n\nwith open(\"coordinate.json\",\"w\") as outfile:\n json.dump(out,outfile,sort_keys=True, indent=4, separators=(',',': '))" }, { "alpha_fraction": 0.7749999761581421, "alphanum_fraction": 0.7749999761581421, "avg_line_length": 23.200000762939453, "blob_id": "40669b3b8a8fa43d2e417fb08369d217f89b2bb7", "content_id": "563582b15d77da5eb881db3a8f1e008b1e1bff8f", "detected_licenses": [ "MIT" ], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 120, "license_type": "permissive", "max_line_length": 37, "num_lines": 5, "path": "/book/django/test/testapp/views.py", "repo_name": "wasit7/book_pae", "src_encoding": "UTF-8", "text": "from django.shortcuts import render\n\n# Create your views here.\ndef homep(request):\n\treturn render(request, 'homep.html')" }, { "alpha_fraction": 0.609375, "alphanum_fraction": 0.6450892686843872, "avg_line_length": 20.380952835083008, "blob_id": "eb3d548abd9f3c095cdc61cb725d10576d70c1c7", "content_id": "0de7a8ebb9d3ebfb4d43ec064d68525ca76929cd", "detected_licenses": [ "MIT" ], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 448, "license_type": "permissive", "max_line_length": 44, "num_lines": 21, "path": "/pae/forcast/src/sqlite/subject_sqlite.py", "repo_name": "wasit7/book_pae", "src_encoding": "UTF-8", "text": "# -*- coding: utf-8 -*-\n\"\"\"\nCreated on Thu Feb 18 21:51:57 2016\n\n@author: Administrator\n\"\"\"\nimport csv, sqlite3\nimport pandas as pd\n\nimport sqlite3\n\nconn = sqlite3.connect('subject.sqlite')\nprint \"Opened database successfully\";\n\nconn.execute('''CREATE TABLE Subject\n (sub_id INT PRIMARY KEY NOT NULL,\n sub_name TEXT NOT NULL,\n credit INT NOT NULL);''')\nprint \"Table created successfully\";\n\nconn.close()" }, { "alpha_fraction": 0.5863874554634094, "alphanum_fraction": 0.6596858501434326, "avg_line_length": 16.454545974731445, "blob_id": "6ea390ebcdb6301d1ae48cf8b1de5ab63afda2b5", "content_id": "4d705719e3aa04e83811167f66c93f925b11a8e3", "detected_licenses": [ "MIT" ], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 191, "license_type": "permissive", "max_line_length": 66, "num_lines": 11, "path": "/pae/forcast/src/csv/CS_table_No3.py", "repo_name": "wasit7/book_pae", "src_encoding": "UTF-8", "text": "# -*- coding: utf-8 -*-\n\"\"\"\nCreated on Wed Sep 09 14:51:02 2015\n\n@author: Administrator\n\"\"\"\n\nimport pandas as pd\ndata = pd.read_csv('D:\\\\project\\\\forcast\\\\data\\\\CS_table_No3.csv')\n\nprint data" }, { "alpha_fraction": 0.832335352897644, "alphanum_fraction": 0.832335352897644, "avg_line_length": 27, "blob_id": "7d70224c52dd8e186d55e94a2fefacfacd3928bc", "content_id": "5d9a8ebac4050c757031e8094ae9a8299a987412", "detected_licenses": [ "MIT" ], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 167, "license_type": "permissive", "max_line_length": 43, "num_lines": 6, "path": "/book/django/webapp/login/admin.py", "repo_name": "wasit7/book_pae", "src_encoding": "UTF-8", "text": "from django.contrib import admin\nfrom login.models import Users,UsersStudent\n\n# Register your models here.\nadmin.site.register(Users)\nadmin.site.register(UsersStudent)" }, { "alpha_fraction": 0.5972445607185364, "alphanum_fraction": 0.6548794507980347, "avg_line_length": 21.75916290283203, "blob_id": "63d48cdc6a24376f8390f26f300ea1a978652fe2", "content_id": "8c8c5e6d9397d5b49afbd9326eb98a2b3a69b7b4", "detected_licenses": [ "MIT" ], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 4355, "license_type": "permissive", "max_line_length": 94, "num_lines": 191, "path": "/pae/forcast/src/convert_sub_home_tojson.py", "repo_name": "wasit7/book_pae", "src_encoding": "UTF-8", "text": "# -*- coding: utf-8 -*-\n\"\"\"\nCreated on Wed May 25 00:18:25 2016\n\n@author: Methinee\n\"\"\"\nimport pandas as pd\nimport json\nfrom collections import defaultdict\ncountEachSubSort = 0\nkey_sub_sort = defaultdict(list)\n\nsubjects = []\ncountSub = 0\nlink = []\nout={}\nsources=[]\ntargets=[]\n\ndf_file = pd.read_csv('../data/df_dropSub_less20.csv',delimiter=\",\", skip_blank_lines = True, \n error_bad_lines=False,encoding='utf8')\nheaders=list(df_file.columns.values)\n\nfor sub in df_file['3COURSEID']:\n if sub not in subjects: \n subjects.append(sub)\n# print \"%s, index is %d\"%(sub,subjects.index(sub))\n countSub = countSub+1\n\nsubjects.sort() \n\n#///////For making Bachelor Graph (just subject has in Cirriculum)//////////////////\ncs = 'CS'\ntu = 'TU'\nel = 'EL'\n#print \"\\n\".join(s for s in subjects if cs in s)\n\nsubjects_home = []\nnode = []\nfor s in subjects:\n if cs in s:\n print s\n subjects_home.append(s)\n node.append({\"name\":s})\n elif tu in s:\n print s\n subjects_home.append(s)\n node.append({\"name\":s})\n elif el in s:\n print s\n subjects_home.append(s)\n node.append({\"name\":s})\n\n \nsubjects_home.remove(\"CS105\")\nnode.remove({\"name\":'CS105'})\n\nsubjects_home.remove(\"CS115\")\nnode.remove({\"name\":'CS115'})\n\nsubjects_home.remove(\"CS211\")\nnode.remove({\"name\":'CS211'})\n\nsubjects_home.remove(\"CS215\")\nnode.remove({\"name\":'CS215'})\n\nsubjects_home.remove(\"CS231\")\nnode.remove({\"name\":'CS231'})\n\nsubjects_home.remove(\"CS300\")\nnode.remove({\"name\":'CS300'})\n\n\nsubjects_home.append('CS112')\nnode.append({\"name\":'CS112'})\n\nsubjects_home.append('CS327')\nnode.append({\"name\":'CS327'})\n\nsubjects_home.append('CS328')\nnode.append({\"name\":'CS328'})\n\nsubjects_home.append('CS357')\nnode.append({\"name\":'CS357'})\n\nsubjects_home.append('CS358')\nnode.append({\"name\":'CS358'})\n\nsubjects_home.append('CS359')\nnode.append({\"name\":'CS359'})\n\nsubjects_home.append('CS389')\nnode.append({\"name\":'CS389'})\n\nsubjects_home.append('CS406')\nnode.append({\"name\":'CS406'})\n\nsubjects_home.append('CS428')\nnode.append({\"name\":'CS428'})\n\nsubjects_home.append('CS439')\nnode.append({\"name\":'CS439'})\n\nsubjects_home.append('CS447')\nnode.append({\"name\":'CS447'})\n\nsubjects_home.append('CS448')\nnode.append({\"name\":'CS448'})\n\nsubjects_home.append('CS458')\nnode.append({\"name\":'CS458'})\n\nsubjects_home.append('CS469')\nnode.append({\"name\":'CS469'})\n\nsubjects_home.append('CS479')\nnode.append({\"name\":'CS479'})\n\nsubjects_home.append('CS496')\nnode.append({\"name\":'CS496'})\n\nsubjects_home.append('CS497')\nnode.append({\"name\":'CS497'})\n\nsubjects_home.append('CS499')\nnode.append({\"name\":'CS499'})\n\nsubjects_home.append('TH161')\nnode.append({\"name\":'TH161'})\n\nsubjects_home.append('PY228')\nnode.append({\"name\":'PY228'})\n\nsubjects_home.append('BA291')\nnode.append({\"name\":'BA291'})\n\nsubjects_home.append('EC210')\nnode.append({\"name\":'EC210'})\n\nsubjects_home.append('HO201')\nnode.append({\"name\":'HO201'})\n\nsubjects_home.append('MA211')\nnode.append({\"name\":'MA211'})\n\nsubjects_home.append('SC135')\nnode.append({\"name\":'SC135'})\n\nsubjects_home.append('SC185')\nnode.append({\"name\":'SC185'})\n\nsubjects_home.append('SC123')\nnode.append({\"name\":'SC123'})\n\nsubjects_home.append('SC173')\nnode.append({\"name\":'SC173'})\n\nsubjects_home.append('MA212')\nnode.append({\"name\":'MA212'})\n\nsubjects_home.append('MA332')\nnode.append({\"name\":'MA332'})\n\nsubjects_home.append('ST216')\nnode.append({\"name\":'ST216'})\n\nsubjects_home.sort()\nnode.sort()\n\n## Find index of source and target from book/graph1.gv \ndf_st = pd.read_csv('../data/source-target_home.csv',delimiter=\";\", skip_blank_lines = True, \n error_bad_lines=False)\nheaders_st=list(df_st.columns.values)\ndf_st = df_st.dropna()\n\nfor source in df_st[headers_st[0]]:\n #print \"source is %s, index is %d\"%(source,subjects_db.index(source))\n sources.append(subjects_home.index(source))\n \nfor target in df_st[headers_st[1]]:\n #print \"target is %s, index is %d\"%(target,subjects_db.index(target))\n targets.append(subjects_home.index(target))\n \nfor i in xrange(0,82): #In Bachelor has 83 links\n link.append({\"source\":sources[i],\"target\":targets[i],\"type\": \"licensing\"})\n \nout[\"node\"]=node\nout[\"link\"]=link\n\nwith open(\"subjects_cc.json\",\"w\") as outfile:\n json.dump(out,outfile,sort_keys=True, indent=4, separators=(',',': '))\n " }, { "alpha_fraction": 0.7279411554336548, "alphanum_fraction": 0.7279411554336548, "avg_line_length": 21.83333396911621, "blob_id": "d66ec3f8b7311240db586e8e90aa35fe500ca842", "content_id": "34117f6b2f230f6e38392c966b6bf7ae42f76ec5", "detected_licenses": [ "MIT" ], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 136, "license_type": "permissive", "max_line_length": 36, "num_lines": 6, "path": "/pae/book_edit_bootstrap/django/project/mywebpage/forms.py", "repo_name": "wasit7/book_pae", "src_encoding": "UTF-8", "text": "\"\"\"from django import forms\nfrom .models import Student, Subject\n\nclass SubjectForm(forms.ModelForm):\n\tmodel = Subject\n\tfields = ('')\"\"\"" }, { "alpha_fraction": 0.567161500453949, "alphanum_fraction": 0.5736510157585144, "avg_line_length": 37.395904541015625, "blob_id": "5412187d1cf213d17bddd4953cb4c04490f24e24", "content_id": "7cbad7d77f709d2661fbb70aa959304254f3f626", "detected_licenses": [ "MIT" ], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 11249, "license_type": "permissive", "max_line_length": 265, "num_lines": 293, "path": "/book/django/project/mywebpage/views.py", "repo_name": "wasit7/book_pae", "src_encoding": "UTF-8", "text": "from django.shortcuts import render, redirect\nfrom django.http import HttpResponse, HttpResponseRedirect\nfrom django.http import JsonResponse\nfrom django.core.urlresolvers import reverse\nfrom .models import Subject, Enrollment, Student\nfrom django.contrib.auth import logout as auth_logout\nfrom django.contrib.auth.models import User\nfrom django.contrib.staticfiles.templatetags.staticfiles import static\nimport sys\nimport json\nimport pickle\nimport numpy as np\n# Create your views here.\n#y = []\n\ndef home(request):\n if request.method == 'GET':\n if 'username' not in request.session:\n return render(request, 'pleaselogin.html')\n else:\n username = User.objects.get(username=request.session['username'])\n student = Student.objects.filter(username__username=username)\n if not student:\n #url = reverse('userprofile' ,kwargs={'': ''})\n return HttpResponseRedirect(\"../userprofile\")\n else:\n return render(request,'home.html')\n\ndef homegraph(request):\n if request.method == 'GET':\n if 'username' not in request.session:\n return render(request, 'pleaselogin.html')\n else:\n return render(request, 'homegraph.html')\n\n\ndef showprofile(request):\n if request.method == 'GET':\n if 'username' not in request.session:\n return render(request, 'pleaselogin.html')\n else:\n return render(request, 'showprofile.html')\n\ndef addprofile(request):\n if request.method == 'GET':\n if 'username' not in request.session:\n return render(request, 'pleaselogin.html')\n else:\n return render(request, 'addprofile.html')\n if request.method == 'POST':\n enroll = json.loads(request.body) #jsEnrolled is python Object or python dictionary #101\n #body={\"CS101\": {\"sub_id\": \"CS101\", \"grade\": \"A\", \"term\": \"2\", \"year\": \"2555\"}}\n \n if not enroll:\n print \"enroll\", enroll\n \n enrolled_old = Enrollment.objects.all() #2 cs101 cs102\n enrolled_oldData = { str(i.sub_id) : { 'sub_id': str(i.sub_id),'term': str(i.term), 'year': str(i.year) , 'grade': i.grade} for i in enrolled_old }\n\n for k in enrolled_oldData: #cs101, cs102\n if k in enroll:\n pass\n else:\n #delete\n delete_enroll = Enrollment.objects.filter(sub_id=k)\n delete_enroll.delete()\n\n\n for key,value in enroll.iteritems():\n std_id = str(Student.objects.get(username__username=request.session['username']).std_id)\n\n #update\n if Enrollment.objects.filter(std_id__std_id = std_id ,sub_id__sub_id=key ).exists():\n record = Enrollment.objects.get(std_id__std_id=std_id, sub_id__sub_id=key ) \n record.grade = value['grade']\n record.term = value['term']\n record.year = value['year']\n record.save()\n\n #add\n elif Enrollment.objects.filter(std_id__std_id = std_id ,sub_id__sub_id=key ).exists() is False:\n fk_std_id = Student.objects.get(std_id=std_id)\n fk_sub_id = Subject.objects.get(sub_id=key)\n new_enroll = Enrollment.objects.create(std_id=fk_std_id, sub_id=fk_sub_id, grade=value['grade'], term=value['term'], year=value['year'])\n new_enroll.save()\n\n return HttpResponse(\"OK\")\n\ndef sortLabel(request):\n subject = Subject.objects.values('sub_id').order_by('sub_id') #sort by subid\n sList = [ i for i in subject ]\n subjectList =[]\n for i in range(len(sList)):\n v = sList[i]\n obj = v['sub_id']\n subjectList.append(obj)\n #print subjectList #[u'AT316, u'AT326, .... , u'TU154'] 110\n\n std_id = str(Student.objects.get(username__username=request.session['username']).std_id)\n enroll = Enrollment.objects.filter(std_id__std_id = std_id).order_by('sub_id').values('sub_id','grade')\n #print >> sys.stderr, enroll #type(enroll) \n\n enList = [i for i in enroll] #[{'sub_id':u'AT316', 'grade': u'B'}] 3\n enrollList = []\n for i in range(len(enList)):\n v = enList[i]\n obj = v['sub_id']\n enrollList.append(obj) \n\n label = []\n for i,n in enumerate(subjectList):\n if n in enrollList:\n label.append(n)\n else:\n label.append(0)\n\n for i in enList:\n #print type(i) #print i['sub_id']\n if i['sub_id'] in label:\n sub = i['sub_id']\n rep = i['grade']\n\n if rep == 'A':\n label[label.index(sub)] = 8\n elif rep == 'B+':\n label[label.index(sub)] = 7\n elif rep == 'B':\n label[label.index(sub)] = 7\n elif rep == 'C+':\n label[label.index(sub)] = 6\n elif rep == 'C':\n label[label.index(sub)] = 6\n elif rep == 'D+':\n label[label.index(sub)] = 5\n elif rep == 'D':\n label[label.index(sub)] = 5\n elif rep == 'F':\n label[label.index(sub)] = 4\n elif rep == 'W':\n label[label.index(sub)] = 3\n elif rep == 'S':\n label[label.index(sub)] = 2\n elif rep == 'S#':\n label[label.index(sub)] = 2\n elif rep == 'U':\n label[label.index(sub)] = 1\n elif rep == 'U#':\n label[label.index(sub)] = 1 \n\n classify(label)\n return HttpResponse(\"yyyyyyyyyyyyyyyyyyy\")\n\ndef predict(request):\n if request.method == 'GET':\n if 'username' not in request.session:\n return render(request, 'pleaselogin.html')\n else:\n sortLabel(request)\n return render(request, 'predict.html')\n\ndef classify(X):\n #print X\n subject = Subject.objects.values('sub_id').order_by('sub_id') #sort by subid\n sList = [ i for i in subject ]\n subjectList =[]\n for i in range(len(sList)):\n v = sList[i]\n obj = v['sub_id']\n subjectList.append(obj)\n\n y = []\n probability =[]\n subenrolled = []\n for i in range(0,110):\n if X[i] == 0:\n subject = subjectList[i]\n f = \"tree/tree%s.pic\"%subject\n with open(f, 'rb') as pickleFile:\n clf2 = pickle.load(pickleFile)\n clf2.predict(X)\n Grade=['A', 'B', 'C' , 'D' , 'F' , 'W' , 'S' , 'U' ,'na']\n grade_predicted = Grade[::-1][clf2.predict(X)]\n prob = \"%.02f\"%np.max(clf2.predict_proba(X))\n #print \"prediction of %s: \"%subject,grade_predicted\n probability.append(prob)\n y.append(grade_predicted)\n elif X[i] != 0: \n subject = subjectList[i]\n subenrolled.append(subject)\n Grade=['A', 'B', 'C' , 'D' , 'F' , 'W' , 'S' , 'U' ,'na']\n grade_truth=Grade[::-1][X[i]]\n prob = \"-\"\n #print \"grade %s has already is \"%subject,grade_truth\n probability.append(prob)\n y.append(grade_truth)\n #print \"list of all grade predicted is %s\"%y \n #print subenrolled\n\n with open('coordinate_predict.json') as f:\n myfile = json.load(f)\n all_subject = myfile['node']\n for i,k in enumerate(all_subject):\n subject = all_subject[i]\n subject['grade'] = y[i]\n subject['prob'] = probability[i]\n if subject['name'] in subenrolled:\n subject['type'] = \"enrolled\"\n #print myfile\n\n with open('j.json','w+') as f:\n json.dump(myfile, f)\n f.close()\n\n return HttpResponse(\"OK\")\n\ndef userprofile(request):\n if request.method == 'GET':\n if 'username' not in request.session:\n return render(request, 'pleaselogin.html')\n else:\n return render(request, 'userprofile.html')\n\n if request.method == 'POST':\n userprofile = json.loads(request.body)\n\n for key,value in userprofile.iteritems():\n #print key\n username = str(User.objects.get(username=request.session['username']))\n #print username\n #update\n if Student.objects.filter(username__username=username).exists() :\n record = Student.objects.get(username__username=username)\n record.firstname = value['firstname']\n record.lastname = value['lastname']\n record.std_id = record.std_id\n record.email = value['email']\n record.sch_gpa = value['sch_gpa']\n record.admit_year = value['admit_year']\n record.province_id = value['province_id']\n record.save()\n\n #add\n elif Student.objects.filter(username__username=username).exists() is False:\n fk_username = User.objects.get(username=username)\n new_student = Student.objects.create(username=fk_username,firstname=value['firstname'],lastname=value['lastname'],std_id=value['std_id'],email=value['email'],sch_gpa=value['sch_gpa'],admit_year= value['admit_year'], province_id=value['province_id'])\n new_student.save()\n\n return render(request, 'userprofile.html')\n\ndef jsonProvience(request):\n subject = Subject.objects.all().order_by('sub_id')\n return HttpResponse(subject)\n\ndef jsonSubject(request):\n subjectID = Subject.objects.all()\n subjectIDdata = { i.sub_id : {'sub_name' : i.sub_name} for i in subjectID }\n return JsonResponse(subjectIDdata)\n\ndef jsonEnrollment(request):\n std_id = str(Student.objects.get(username__username=request.session['username']).std_id)\n enrollmentID = Enrollment.objects.filter(std_id=std_id)\n #enrollmentID = Enrollment.objects.filter(std_id__username__username=request.session['username'])\n enrollmentData = { i.sub_id.sub_id : { 'sub_id': i.sub_id.sub_id,'term': str(i.term), 'year': str(i.year) , 'grade': i.grade} for i in enrollmentID }\n return JsonResponse(enrollmentData)\n\ndef jsonStudent(request):\n username = User.objects.get(username=request.session['username'])\n student = Student.objects.filter(username__username=username)\n #studentData = {i.username.username :{'std_id': i.std_id, 'firstname'i.firstname, 'lastname': i.lastname, 'email': i.email, 'province':i.province, 'gpa':i.gpa, 'admityear':i.admityear} for i in student}\n studentData = { 'user' :{'std_id': str(i.std_id), 'firstname': i.firstname, 'lastname': i.lastname,'email': i.email, 'sch_gpa': str(i.sch_gpa),'admit_year': str(i.admit_year), 'province_id':i.province_id } for i in student}\n return JsonResponse(studentData)\n\ndef coordinate_home(request):\n with open('coordinate_home.json') as f:\n myfile = json.load(f)\n #print myfile1\n\n return JsonResponse({'myfile':myfile})\n\ndef coordinate_predict(request):\n with open('j.json') as f:\n myfile = json.load(f)\n #print myfile\n return JsonResponse({'myfile':myfile})\n\ndef test(request):\n return render(request, 'test.html')\n\ndef testcoor(request):\n with open('test2.json') as f:\n myfile = json.load(f)\n print myfile\n return JsonResponse({'myfile':myfile})" }, { "alpha_fraction": 0.6051175594329834, "alphanum_fraction": 0.6390041708946228, "avg_line_length": 28.489795684814453, "blob_id": "3fa22b4877da59f391a533921117a182bfa2a8e8", "content_id": "dc483c94913859de783214d825f6f0d64fe11d11", "detected_licenses": [ "MIT" ], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 1446, "license_type": "permissive", "max_line_length": 96, "num_lines": 49, "path": "/pae/forcast/src/$RP6EMGT.py", "repo_name": "wasit7/book_pae", "src_encoding": "UTF-8", "text": "# -*- coding: utf-8 -*-\n\"\"\"\nCreated on Fri May 13 22:59:53 2016\n\n@author: Methinee\n\"\"\"\nimport pandas as pd\nimport numpy as np\nimport pickle\n\ndf_file = pd.read_csv('../data/df_sub_more20_merge.csv',delimiter=\",\", skip_blank_lines = True, \n error_bad_lines=False)\nheaders=list(df_file.columns.values)\nsubjects = []\ncountSub = 0\n#Create dictionary of list subjects\nfor sub in df_file['3COURSEID']:\n if sub not in subjects:\n subjects.append(sub)\n countSub = countSub+1\n# Function definition is here\ndef classify( X ):\n with open('tree/treeCS213.pic', 'rb') as pickleFile:\n clf2 = pickle.load(pickleFile)\n clf2.predict(X)\n Grade=['A', 'B', 'C' , 'D' , 'F' , 'W' , 'S' , 'U' ,'na']\n grade_predicted = Grade[::-1][clf2.predict(X)]\n print \"prediction: \",grade_predicted \n \n return\n\n#Example1: Create lable X from Pae's Transcript.. result of CS213 should be \"C\"\n#df_labelX = pd.read_csv('../data/test_labelX.csv',delimiter=\",\", skip_blank_lines = True, \n# error_bad_lines=False)\n#B = df_labelX.as_matrix()\n#X = B[:,6:209] #get all subject without term,year,province,schGpa\n\n\n#Example2: Create lable X from first record of csv only cs213.. result of CS213 should be \"C\"\n\nsubject = 'CS213'\nprint subject \ndf_sub = df_file[df_file['3COURSEID'] == subject]\ndf_sub = df_sub.iloc[np.random.permutation(len(df_sub))]\n\nA = df_sub.as_matrix()\nX = A[0,6:209]\n\nclassify( X );\n\n" }, { "alpha_fraction": 0.6528089642524719, "alphanum_fraction": 0.7033708095550537, "avg_line_length": 34.63999938964844, "blob_id": "fd3b09c5fb55800dbaa04f653dcc0402da96c57f", "content_id": "01cbda6b2aca3a0584eef9c12ac06184f9724c60", "detected_licenses": [ "MIT" ], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 890, "license_type": "permissive", "max_line_length": 79, "num_lines": 25, "path": "/pae/forcast/save_excel.py", "repo_name": "wasit7/book_pae", "src_encoding": "UTF-8", "text": "# -*- coding: utf-8 -*-\n\"\"\"\nCreated on Wed Apr 13 23:36:10 2016\n\n@author: Administrator\n\"\"\"\n\nimport xlwt \n\nbook = xlwt.Workbook(encoding=\"utf-8\") \n\nsheet1 = book.add_sheet(\"Python Sheet 1\") \nsheet2 = book.add_sheet(\"Python Sheet 2\") \nsheet3 = book.add_sheet(\"Python Sheet 3\") \n\nsheet1.write(0, 0, \"This is the First Cell of the First Sheet\") \nsheet2.write(0, 0, \"This is the First Cell of the Second Sheet\") \nsheet3.write(0, 0, \"This is the First Cell of the Third Sheet\") \nsheet2.write(1, 10, \"This is written to the Second Sheet\") \nsheet3.write(0, 2, \"This is part of a list of information in the Third Sheet\") \nsheet3.write(1, 2, \"This is part of a list of information in the Third Sheet\") \nsheet3.write(2, 2, \"This is part of a list of information in the Third Sheet\") \nsheet3.write(3, 2, \"This is part of a list of information in the Third Sheet\") \n\nbook.save(\"python_spreadsheet.xls\")" }, { "alpha_fraction": 0.31752943992614746, "alphanum_fraction": 0.3458658754825592, "avg_line_length": 22.591304779052734, "blob_id": "9a46144cb6522098f2096392ca0a27a93d318490", "content_id": "a7056a6c7a2f868511779b79112560a334db3f35", "detected_licenses": [ "MIT" ], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 43407, "license_type": "permissive", "max_line_length": 265, "num_lines": 1840, "path": "/pae/book_edit_bootstrap/django/project/mywebpage/views.py", "repo_name": "wasit7/book_pae", "src_encoding": "UTF-8", "text": "from django.shortcuts import render, redirect\nfrom django.http import HttpResponse\nfrom django.http import JsonResponse\nfrom django.core.urlresolvers import reverse\nfrom .models import Subject, Enrollment, Student\nfrom django.contrib.auth import logout as auth_logout\nfrom django.contrib.auth.models import User\nimport sys\nimport json\n# Create your views here.\n\ndef home(request):\n username = User.objects.get(username=request.session['username'])\n student = Student.objects.filter(username__username=username)\n if not student:\n return render(request,'userprofile.html')\n else:\n return render(request,'home.html')\n\ndef homeD3(request):\n username = User.objects.get(username=request.session['username'])\n student = Student.objects.filter(username__username=username)\n if not student:\n return render(request,'userprofile.html')\n else:\n return render(request,'homeD3.html')\n\ndef showprofile(request):\n return render(request,'showprofile.html')\n\ndef addprofile(request):\n\n if request.method == 'GET':\n return render(request,'addprofile.html')\n if request.method == 'POST':\n enroll = json.loads(request.body) #jsEnrolled is python Object or python dictionary #101\n #body={\"CS101\": {\"sub_id\": \"CS101\", \"grade\": \"A\", \"term\": \"2\", \"year\": \"2555\"}}\n enrolled_old = Enrollment.objects.all() #2 cs101 cs102\n enrolled_oldData = { str(i.sub_id) : { 'sub_id': str(i.sub_id),'term': str(i.term), 'year': str(i.year) , 'grade': i.grade} for i in enrolled_old }\n\n for k in enrolled_oldData: #cs101, cs102\n if k in enroll:\n pass\n else:\n #delete\n delete_enroll = Enrollment.objects.filter(sub_id=k)\n delete_enroll.delete()\n\n\n for key,value in enroll.iteritems():\n std_id = str(Student.objects.get(username__username=request.session['username']).std_id)\n\n #update\n if Enrollment.objects.filter(std_id__std_id = std_id ,sub_id__sub_id=key ).exists():\n record = Enrollment.objects.get(std_id__std_id=std_id, sub_id__sub_id=key ) \n record.grade = value['grade']\n record.term = value['term']\n record.year = value['year']\n record.save()\n\n #add\n elif Enrollment.objects.filter(std_id__std_id = std_id ,sub_id__sub_id=key ).exists() is False:\n fk_std_id = Student.objects.get(std_id=std_id)\n fk_sub_id = Subject.objects.get(sub_id=key)\n #print >> sys.stderr, fk_sub_id\n new_enroll = Enrollment.objects.create(std_id=fk_std_id, sub_id=fk_sub_id, grade=value['grade'], term=value['term'], year=value['year'])\n new_enroll.save()\n\n return HttpResponse(\"OK\")\n\ndef editprofile(request):\n return render(request,'editprofile.html')\n\ndef predict(request):\n std_id = str(Student.objects.get(username__username=request.session['username']).std_id)\n enroll = Enrollment.objects.filter(std_id__std_id = std_id)\n \n return render(request, 'predict.html')\n\ndef userprofile(request):\n if request.method == 'GET':\n return render(request, 'userprofile.html')\n\n if request.method == 'POST':\n userprofile = json.loads(request.body)\n\n for key,value in userprofile.iteritems():\n username = str(User.objects.get(username=request.session['username']))\n\n\n #update\n if Student.objects.filter(username__username=username).exists():\n record = Student.objects.get(username__username=username)\n #str except int\n record.firstname = value['firstname']\n record.lastname = value['lastname']\n #integer 10\n record.std_id = value['std_id']\n # @\n record.email = value['email']\n #float .00\n record.sch_gpa = value['sch_gpa']\n record.admit_year = value['admit_year']\n record.province_id = value['province_id']\n #print >> sys.stderr, value['firstname']\n record.save()\n\n #check field none\n\n #add\n elif Student.objects.filter(username__username=username).exists() is False:\n fk_username = User.objects.get(username=username)\n\n #else:\n new_student = Student.objects.create(username=fk_username,firstname=value['firstname'],lastname=value['lastname'],std_id=value['std_id'],email=value['email'],sch_gpa=value['sch_gpa'],admit_year= value['admit_year'], province_id=value['province_id'])\n new_student.save()\n\n return render(request, 'userprofile.html')\n\ndef jsonProvience(request):\n province = {'10':{'name':'10-bangkok'},'11':{'name':'11-nonthaburi'}}\n return JsonResponse(province)\n\ndef jsonSubject(request):\n subjectID = Subject.objects.all()\n\n subjectIDdata = { i.sub_id : {'sub_name' : i.sub_name} for i in subjectID } \n return JsonResponse(subjectIDdata)\n\ndef jsonEnrollment(request):\n std_id = str(Student.objects.get(username__username=request.session['username']).std_id)\n enrollmentID = Enrollment.objects.filter(std_id=std_id)\n #enrollmentID = Enrollment.objects.filter(std_id__username__username=request.session['username'])\n enrollmentData = { i.sub_id.sub_id : { 'sub_id': i.sub_id.sub_id,'term': str(i.term), 'year': str(i.year) , 'grade': i.grade} for i in enrollmentID }\n return JsonResponse(enrollmentData)\n\ndef jsonStudent(request):\n username = User.objects.get(username=request.session['username'])\n student = Student.objects.filter(username__username=username)\n #studentData = {i.username.username :{'std_id': i.std_id, 'firstname'i.firstname, 'lastname': i.lastname, 'email': i.email, 'province':i.province, 'gpa':i.gpa, 'admityear':i.admityear} for i in student}\n studentData = { 'user' :{'std_id': str(i.std_id), 'firstname': i.firstname, 'lastname': i.lastname,'email': i.email, 'sch_gpa': str(i.sch_gpa),'admit_year': str(i.admit_year), 'province_id':i.province_id } for i in student}\n return JsonResponse(studentData)\n\ndef test(request):\n return render(request,'test.html')\n\n\ndef coordinate_home(request):\n myfile1 = {\n \"link\": [\n {\n \"source\": 87,\n \"target\": 44,\n \"type\": \"licensing\"\n },\n {\n \"source\": 1,\n \"target\": 3,\n \"type\": \"licensing\"\n },\n {\n \"source\": 2,\n \"target\": 3,\n \"type\": \"licensing\"\n },\n {\n \"source\": 3,\n \"target\": 5,\n \"type\": \"licensing\"\n },\n {\n \"source\": 3,\n \"target\": 7,\n \"type\": \"licensing\"\n },\n {\n \"source\": 3,\n \"target\": 8,\n \"type\": \"licensing\"\n },\n {\n \"source\": 3,\n \"target\": 12,\n \"type\": \"licensing\"\n },\n {\n \"source\": 5,\n \"target\": 9,\n \"type\": \"licensing\"\n },\n {\n \"source\": 5,\n \"target\": 13,\n \"type\": \"licensing\"\n },\n {\n \"source\": 5,\n \"target\": 22,\n \"type\": \"licensing\"\n },\n {\n \"source\": 5,\n \"target\": 27,\n \"type\": \"licensing\"\n },\n {\n \"source\": 5,\n \"target\": 34,\n \"type\": \"licensing\"\n },\n {\n \"source\": 5,\n \"target\": 38,\n \"type\": \"licensing\"\n },\n {\n \"source\": 5,\n \"target\": 44,\n \"type\": \"licensing\"\n },\n {\n \"source\": 7,\n \"target\": 23,\n \"type\": \"licensing\"\n },\n {\n \"source\": 7,\n \"target\": 59,\n \"type\": \"licensing\"\n },\n {\n \"source\": 8,\n \"target\": 24,\n \"type\": \"licensing\"\n },\n {\n \"source\": 8,\n \"target\": 25,\n \"type\": \"licensing\"\n },\n {\n \"source\": 8,\n \"target\": 27,\n \"type\": \"licensing\"\n },\n {\n \"source\": 8,\n \"target\": 55,\n \"type\": \"licensing\"\n },\n {\n \"source\": 8,\n \"target\": 58,\n \"type\": \"licensing\"\n },\n {\n \"source\": 9,\n \"target\": 11,\n \"type\": \"licensing\"\n },\n {\n \"source\": 9,\n \"target\": 30,\n \"type\": \"licensing\"\n },\n {\n \"source\": 9,\n \"target\": 33,\n \"type\": \"licensing\"\n },\n {\n \"source\": 9,\n \"target\": 64,\n \"type\": \"licensing\"\n },\n {\n \"source\": 9,\n \"target\": 65,\n \"type\": \"licensing\"\n },\n {\n \"source\": 9,\n \"target\": 67,\n \"type\": \"licensing\"\n },\n {\n \"source\": 10,\n \"target\": 37,\n \"type\": \"licensing\"\n },\n {\n \"source\": 12,\n \"target\": 14,\n \"type\": \"licensing\"\n },\n {\n \"source\": 12,\n \"target\": 16,\n \"type\": \"licensing\"\n },\n {\n \"source\": 12,\n \"target\": 37,\n \"type\": \"licensing\"\n },\n {\n \"source\": 12,\n \"target\": 39,\n \"type\": \"licensing\"\n },\n {\n \"source\": 12,\n \"target\": 51,\n \"type\": \"licensing\"\n },\n {\n \"source\": 12,\n \"target\": 52,\n \"type\": \"licensing\"\n },\n {\n \"source\": 12,\n \"target\": 53,\n \"type\": \"licensing\"\n },\n {\n \"source\": 12,\n \"target\": 71,\n \"type\": \"licensing\"\n },\n {\n \"source\": 12,\n \"target\": 74,\n \"type\": \"licensing\"\n },\n {\n \"source\": 11,\n \"target\": 15,\n \"type\": \"licensing\"\n },\n {\n \"source\": 11,\n \"target\": 42,\n \"type\": \"licensing\"\n },\n {\n \"source\": 15,\n \"target\": 43,\n \"type\": \"licensing\"\n },\n {\n \"source\": 16,\n \"target\": 40,\n \"type\": \"licensing\"\n },\n {\n \"source\": 16,\n \"target\": 72,\n \"type\": \"licensing\"\n },\n {\n \"source\": 0,\n \"target\": 66,\n \"type\": \"licensing\"\n },\n {\n \"source\": 17,\n \"target\": 44,\n \"type\": \"licensing\"\n },\n {\n \"source\": 17,\n \"target\": 45,\n \"type\": \"licensing\"\n },\n {\n \"source\": 17,\n \"target\": 19,\n \"type\": \"licensing\"\n },\n {\n \"source\": 18,\n \"target\": 45,\n \"type\": \"licensing\"\n },\n {\n \"source\": 20,\n \"target\": 49,\n \"type\": \"licensing\"\n },\n {\n \"source\": 23,\n \"target\": 26,\n \"type\": \"licensing\"\n },\n {\n \"source\": 27,\n \"target\": 28,\n \"type\": \"licensing\"\n },\n {\n \"source\": 27,\n \"target\": 29,\n \"type\": \"licensing\"\n },\n {\n \"source\": 27,\n \"target\": 55,\n \"type\": \"licensing\"\n },\n {\n \"source\": 27,\n \"target\": 56,\n \"type\": \"licensing\"\n },\n {\n \"source\": 27,\n \"target\": 57,\n \"type\": \"licensing\"\n },\n {\n \"source\": 27,\n \"target\": 61,\n \"type\": \"licensing\"\n },\n {\n \"source\": 28,\n \"target\": 41,\n \"type\": \"licensing\"\n },\n {\n \"source\": 28,\n \"target\": 60,\n \"type\": \"licensing\"\n },\n {\n \"source\": 28,\n \"target\": 63,\n \"type\": \"licensing\"\n },\n {\n \"source\": 29,\n \"target\": 62,\n \"type\": \"licensing\"\n },\n {\n \"source\": 30,\n \"target\": 31,\n \"type\": \"licensing\"\n },\n {\n \"source\": 30,\n \"target\": 32,\n \"type\": \"licensing\"\n },\n {\n \"source\": 30,\n \"target\": 66,\n \"type\": \"licensing\"\n },\n {\n \"source\": 34,\n \"target\": 35,\n \"type\": \"licensing\"\n },\n {\n \"source\": 34,\n \"target\": 36,\n \"type\": \"licensing\"\n },\n {\n \"source\": 34,\n \"target\": 69,\n \"type\": \"licensing\"\n },\n {\n \"source\": 37,\n \"target\": 70,\n \"type\": \"licensing\"\n },\n {\n \"source\": 38,\n \"target\": 70,\n \"type\": \"licensing\"\n },\n {\n \"source\": 39,\n \"target\": 72,\n \"type\": \"licensing\"\n },\n {\n \"source\": 39,\n \"target\": 73,\n \"type\": \"licensing\"\n },\n {\n \"source\": 44,\n \"target\": 19,\n \"type\": \"licensing\"\n },\n {\n \"source\": 44,\n \"target\": 46,\n \"type\": \"licensing\"\n },\n {\n \"source\": 44,\n \"target\": 47,\n \"type\": \"licensing\"\n },\n {\n \"source\": 45,\n \"target\": 47,\n \"type\": \"licensing\"\n },\n {\n \"source\": 46,\n \"target\": 75,\n \"type\": \"licensing\"\n },\n {\n \"source\": 47,\n \"target\": 48,\n \"type\": \"licensing\"\n },\n {\n \"source\": 47,\n \"target\": 76,\n \"type\": \"licensing\"\n },\n {\n \"source\": 47,\n \"target\": 77,\n \"type\": \"licensing\"\n },\n {\n \"source\": 49,\n \"target\": 50,\n \"type\": \"licensing\"\n },\n {\n \"source\": 64,\n \"target\": 67,\n \"type\": \"licensing\"\n },\n {\n \"source\": 75,\n \"target\": 76,\n \"type\": \"licensing\"\n },\n {\n \"source\": 84,\n \"target\": 64,\n \"type\": \"licensing\"\n },\n {\n \"source\": 0,\n \"target\": 66,\n \"type\": \"licensing\"\n }\n ],\n \"node\": [\n {\n \"id\": \"BA291\",\n \"name\": \"Introduction Of Business\",\n \"type\": \"general\"\n },\n {\n \"id\": \"CS101\",\n \"name\": \"DISCRETE STRUCTURES\",\n \"type\": \"force\",\n \"type\": \"force\"\n },\n {\n \"id\": \"CS102\",\n \"name\": \"COMPUTER PROGRAMMING FUNDAMENTALS\",\n \"type\": \"force\"\n },\n {\n \"id\": \"CS111\",\n \"name\": \"OBJECT-ORIENTED PROGRAMMING\",\n \"type\": \"force\"\n },\n {\n \"id\": \"CS112\",\n \"name\": \"Introduction to Object-Oriented Programming\",\n \"type\": \"force\"\n },\n {\n \"id\": \"CS213\",\n \"name\": \"DATA STRUCTURES\",\n \"type\": \"force\"\n },\n {\n \"id\": \"CS214\",\n \"name\": \"SOCIAL AND PROFESSIONAL ETHICS\",\n \"type\": \"force\"\n },\n {\n \"id\": \"CS222\",\n \"name\": \"PROGRAMMING LANGUAGES AND PARADIGMS\",\n \"type\": \"force\"\n },\n {\n \"id\": \"CS223\",\n \"name\": \"COMPUTER ORGANIZATION AND ARCHITECTURE\",\n \"type\": \"force\"\n },\n {\n \"id\": \"CS251\",\n \"name\": \"DATABASE SYSTEMS 1\",\n \"type\": \"force\"\n },\n {\n \"id\": \"CS261\",\n \"name\": \"HUMAN INFORMATION PROCESSING\",\n \"type\": \"force\"\n },\n {\n \"id\": \"CS281\",\n \"name\": \"OBJECT-ORIENTED ANALYSIS AND DESIGN\",\n \"type\": \"force\"\n },\n {\n \"id\": \"CS284\",\n \"name\": \"INTRODUCTION TO SOFTWARE ENGINEERING\",\n \"type\": \"force\"\n },\n {\n \"id\": \"CS285\",\n \"name\": \"PRACTICES AND PATTERNS IN OBJECT-ORIENTED PROGRAMMING\",\n \"type\": \"force\"\n },\n {\n \"id\": \"CS286\",\n \"name\": \"SOFTWARE PROCESS AND QUALITY ASSURANCE\",\n \"type\": \"force\"\n },\n {\n \"id\": \"CS288\",\n \"name\": \"COMPONENT-BASED SOFTWARE DEVELOPMENT\",\n \"type\": \"force\"\n },\n {\n \"id\": \"CS289\",\n \"name\": \"SOFTWARE PROCESS AND QUALITY ASSURANCE\",\n \"type\": \"force\"\n },\n {\n \"id\": \"CS295\",\n \"name\": \"MATHEMATICS FOR COMPUTER GRAPHICS\",\n \"type\": \"force\"\n },\n {\n \"id\": \"CS296\",\n \"name\": \"ART AND DESIGN FOUNDATIONS\",\n \"type\": \"force\"\n },\n {\n \"id\": \"CS297\",\n \"name\": \"FUNDAMENTAL TECHNIQUES IN COMPUTER GRAPHICS USING API\",\n \"type\": \"force\"\n },\n {\n \"id\": \"CS301\",\n \"name\": \"COMPUTER SCIENCE PROJECT PROPOSAL AND PRESENTATION\",\n \"type\": \"force\"\n },\n {\n \"id\": \"CS302\",\n \"name\": \"COMPUTER SECURITY\",\n \"type\": \"force\"\n },\n {\n \"id\": \"CS311\",\n \"name\": \"DESIGN AND ANALYSIS OF ALGORITHMS\",\n \"type\": \"force\"\n },\n {\n \"id\": \"CS314\",\n \"name\": \"THEORY OF COMPUTATION\",\n \"type\": \"force\"\n },\n {\n \"id\": \"CS326\",\n \"name\": \"EMBEDDED SYSTEMS DESIGN\",\n \"type\": \"force\"\n },\n {\n \"id\": \"CS327\",\n \"name\": \"Digital Logic Design\",\n \"type\": \"force\"\n },\n {\n \"id\": \"CS328\",\n \"name\": \"Compiler Construction\",\n \"type\": \"force\"\n },\n {\n \"id\": \"CS341\",\n \"name\": \"OPERATING SYSTEMS I\",\n \"type\": \"force\"\n },\n {\n \"id\": \"CS342\",\n \"name\": \"NET-CENTRIC COMPUTING 1\",\n \"type\": \"force\"\n },\n {\n \"id\": \"CS348\",\n \"name\": \"INTRODUCTION TO CLUSTER COMPUTING AND DISTRIBUTED COMPUTING\",\n \"type\": \"force\"\n },\n {\n \"id\": \"CS356\",\n \"name\": \"COMPUTER APPLICATIONS IN BUSINESS\",\n \"type\": \"force\"\n },\n {\n \"id\": \"CS357\",\n \"name\": \"Electronic Business\",\n \"type\": \"force\"\n },\n {\n \"id\": \"CS358\",\n \"name\": \"Computer Simulation and Forecasting Techniques in Business\",\n \"type\": \"force\"\n },\n {\n \"id\": \"CS359\",\n \"name\": \"Document Indexing and Retrieval\",\n \"type\": \"force\"\n },\n {\n \"id\": \"CS365\",\n \"name\": \"ARTIFICIAL INTELLIGENT SYSTEMS\",\n \"type\": \"force\"\n },\n {\n \"id\": \"CS366\",\n \"name\": \"FUNDAMENTAL OF NATURAL LANGUAGE PROCESSING\",\n \"type\": \"force\"\n },\n {\n \"id\": \"CS367\",\n \"name\": \"KNOWLEDGE REPRESENTATION AND REASONING\",\n \"type\": \"force\"\n },\n {\n \"id\": \"CS374\",\n \"name\": \"HUMAN COMPUTER INTERACTION\",\n \"type\": \"force\"\n },\n {\n \"id\": \"CS377\",\n \"name\": \"DIGITAL IMAGE PROCESSING\",\n \"type\": \"force\"\n },\n {\n \"id\": \"CS385\",\n \"name\": \"SOFTWARE REQUIREMENT SPECIFICATION AND MANAGEMENT\",\n \"type\": \"force\"\n },\n {\n \"id\": \"CS386\",\n \"name\": \"SOFTWARE CONFIGURATION MANAGEMENT\",\n \"type\": \"force\"\n },\n {\n \"id\": \"CS387\",\n \"name\": \"WEB APPLICATION AND ENTERPRISE PROGRAMS\",\n \"type\": \"force\"\n },\n {\n \"id\": \"CS388\",\n \"name\": \"SOFTWARE CONSTRUCTION AND EVOLUTION\",\n \"type\": \"force\"\n },\n {\n \"id\": \"CS389\",\n \"name\": \"Software Architecture\",\n \"type\": \"force\"\n },\n {\n \"id\": \"CS395\",\n \"name\": \"COMPUTER GRAPHICS\",\n \"type\": \"force\"\n },\n {\n \"id\": \"CS396\",\n \"name\": \"COMPUTER GRAPHICS MODELING\",\n \"type\": \"force\"\n },\n {\n \"id\": \"CS397\",\n \"name\": \"RENDERING 1\",\n \"type\": \"force\"\n },\n {\n \"id\": \"CS398\",\n \"name\": \"COMPUTER ANIMATION\",\n \"type\": \"force\"\n },\n {\n \"id\": \"CS399\",\n \"name\": \"THREE DIMENSIONAL INDUSTRY CONCEPTS AND PRACTICES\",\n \"type\": \"force\"\n },\n {\n \"id\": \"CS401\",\n \"name\": \"SPECIAL PROJECTS 1\",\n \"type\": \"force\"\n },\n {\n \"id\": \"CS402\",\n \"name\": \"SPECIAL PROJECTS 2\",\n \"type\": \"force\"\n },\n {\n \"id\": \"CS406\",\n \"name\": \"Selected Topics in Advance Sofware Engineering Technology\",\n \"type\": \"force\"\n },\n {\n \"id\": \"CS407\",\n \"name\": \"SEMINAR IN SOFTWARE ENGINEERING\",\n \"type\": \"force\"\n },\n {\n \"id\": \"CS408\",\n \"name\": \"SEMINAR IN SYSTEM ENGINEERING\",\n \"type\": \"force\"\n },\n {\n \"id\": \"CS409\",\n \"name\": \"SELECTED TOPICS IN COMPUTER SCIENCE\",\n \"type\": \"force\"\n },\n {\n \"id\": \"CS426\",\n \"name\": \"PARALLEL ALGORITHM DESIGNS\",\n \"type\": \"force\"\n },\n {\n \"id\": \"CS427\",\n \"name\": \"INTRODUCTION TO PARALLEL COMPUTING\",\n \"type\": \"force\"\n },\n {\n \"id\": \"CS428\",\n \"name\": \"Principles of Multiprocessors Programming\",\n \"type\": \"force\"\n },\n {\n \"id\": \"CS429\",\n \"name\": \"SELECTED TOPICS IN COMPUTER ARCHITECTURE\",\n \"type\": \"force\"\n },\n {\n \"id\": \"CS439\",\n \"name\": \"Selected Topics in Programming Languages\",\n \"type\": \"force\"\n },\n {\n \"id\": \"CS446\",\n \"name\": \"NET-CENTRIC COMPUTING 2\",\n \"type\": \"force\"\n },\n {\n \"id\": \"CS447\",\n \"name\": \"Operating Systems II\",\n \"type\": \"force\"\n },\n {\n \"id\": \"CS448\",\n \"name\": \"Software systems for advanced distributed computing\",\n \"type\": \"force\"\n },\n {\n \"id\": \"CS449\",\n \"name\": \"SELECTED TOPICS IN NET-CENTRIC COMPUTING\",\n \"type\": \"force\"\n },\n {\n \"id\": \"CS456\",\n \"name\": \"MANAGEMENT INFORMATION SYSTEMS\",\n \"type\": \"force\"\n },\n {\n \"id\": \"CS457\",\n \"name\": \"DATABASE SYSTEMS 2\",\n \"type\": \"force\"\n },\n {\n \"id\": \"CS458\",\n \"name\": \"Information Systems for Entrepreneur Management\",\n \"type\": \"force\"\n },\n {\n \"id\": \"CS459\",\n \"name\": \"SELECTED TOPICS IN INFORMATION SYSTEMS\",\n \"type\": \"force\"\n },\n {\n \"id\": \"CS467\",\n \"name\": \"MACHINE LEARNING\",\n \"type\": \"force\"\n },\n {\n \"id\": \"CS469\",\n \"name\": \"Selected Topics in Artificial Intelligent Systems\",\n \"type\": \"force\"\n },\n {\n \"id\": \"CS479\",\n \"name\": \"Selected Topics in Computer Interface and Multimedia\",\n \"type\": \"force\"\n },\n {\n \"id\": \"CS486\",\n \"name\": \"SOFTWARE VALIDATION AND VERIFICATION\",\n \"type\": \"force\"\n },\n {\n \"id\": \"CS487\",\n \"name\": \"SOFTWARE PROJECT MANAGEMENT\",\n \"type\": \"force\"\n },\n {\n \"id\": \"CS488\",\n \"name\": \"FORMAL METHODS\",\n \"type\": \"force\"\n },\n {\n \"id\": \"CS489\",\n \"name\": \"SELECTED TOPICS IN SOFTWARE ENGINEERING\",\n \"type\": \"force\"\n },\n {\n \"id\": \"CS496\",\n \"name\": \"Rendering II\",\n \"type\": \"force\"\n },\n {\n \"id\": \"CS497\",\n \"name\": \"Real-time Graphics\",\n \"type\": \"force\"\n },\n {\n \"id\": \"CS499\",\n \"name\": \"Selected Topics in Computer Graphics\",\n \"type\": \"force\"\n },\n {\n \"id\": \"EC210\",\n \"name\": \"Introductory Economics\",\n \"type\": \"general\"\n },\n {\n \"id\": \"EL070\",\n \"name\": \"ENGLISH COURSE 1\",\n \"type\": \"general\"\n },\n {\n \"id\": \"EL171\",\n \"name\": \"ENGLISH COURSE 2\",\n \"type\": \"general\"\n },\n {\n \"id\": \"EL172\",\n \"name\": \"ENGLISH COURSE 3\",\n \"type\": \"general\"\n },\n {\n \"id\": \"EL295\",\n \"name\": \"ACADEMIC ENGLISH 1\",\n \"type\": \"general\"\n },\n {\n \"id\": \"EL395\",\n \"name\": \"ACADEMIC ENGLISH 2\",\n \"type\": \"force\"\n },\n {\n \"id\": \"HO201\",\n \"name\": \"Principles Of Management\",\n \"type\": \"general\"\n },\n {\n \"id\": \"MA211\",\n \"name\": \"Calculus 1\",\n \"type\": \"force\"\n },\n {\n \"id\": \"MA212\",\n \"name\": \"Calculus 2\",\n \"type\": \"force\"\n },\n {\n \"id\": \"MA332\",\n \"name\": \"Linear Algebra\",\n \"type\": \"force\"\n },\n {\n \"id\": \"PY228\",\n \"name\": \"Psychology Of Interpersonal Relations\",\n \"type\": \"general\"\n },\n {\n \"id\": \"SC123\",\n \"name\": \"Fundamental Chemistry\",\n \"type\": \"force\"\n },\n {\n \"id\": \"SC135\",\n \"name\": \"General Physics\",\n \"type\": \"force\"\n },\n {\n \"id\": \"SC173\",\n \"name\": \"Fundamental Chemistry Laboratory\",\n \"type\": \"force\"\n },\n {\n \"id\": \"SC185\",\n \"name\": \"General Physics Laboratory\",\n \"type\": \"force\"\n },\n {\n \"id\": \"ST216\",\n \"name\": \"Statistics For Social Science Students 1\",\n \"type\": \"force\"\n },\n {\n \"id\": \"TH161\",\n \"name\": \"Thai Usage\",\n \"type\": \"general\"\n },\n {\n \"id\": \"TU100\",\n \"name\": \"CIVIC EDUCATION\",\n \"type\": \"general\"\n },\n {\n \"id\": \"TU110\",\n \"name\": \"INTEGRATED HUMANITIES\",\n \"type\": \"general\"\n },\n {\n \"id\": \"TU120\",\n \"name\": \"INTEGRATED SOCIAL SCIENCES\",\n \"type\": \"general\"\n },\n {\n \"id\": \"TU122\",\n \"name\": \"LAW IN EVERYDAY LIFE\",\n \"type\": \"general\"\n },\n {\n \"id\": \"TU130\",\n \"name\": \"INTEGRATED SCIENCES AND TECHNOLOGY\",\n \"type\": \"general\"\n },\n {\n \"id\": \"TU154\",\n \"name\": \"FOUNDATION OF MATHEMATICS\",\n \"type\": \"general\"\n }\n ]\n}\n \n\n return JsonResponse({'myfile1':myfile1})\n\n\n\n\n\n\n\n\n\ndef coordinate_predict(request):\n myfile = {\n \"link\": [\n {\n \"source\": 7,\n \"target\": 10,\n \"type\": \"licensing\"\n },\n {\n \"source\": 8,\n \"target\": 10,\n \"type\": \"licensing\"\n },\n {\n \"source\": 10,\n \"target\": 13,\n \"type\": \"licensing\"\n },\n {\n \"source\": 10,\n \"target\": 16,\n \"type\": \"licensing\"\n },\n {\n \"source\": 10,\n \"target\": 17,\n \"type\": \"licensing\"\n },\n {\n \"source\": 10,\n \"target\": 21,\n \"type\": \"licensing\"\n },\n {\n \"source\": 13,\n \"target\": 18,\n \"type\": \"licensing\"\n },\n {\n \"source\": 13,\n \"target\": 22,\n \"type\": \"licensing\"\n },\n {\n \"source\": 13,\n \"target\": 32,\n \"type\": \"licensing\"\n },\n {\n \"source\": 13,\n \"target\": 35,\n \"type\": \"licensing\"\n },\n {\n \"source\": 13,\n \"target\": 39,\n \"type\": \"licensing\"\n },\n {\n \"source\": 13,\n \"target\": 43,\n \"type\": \"licensing\"\n },\n {\n \"source\": 13,\n \"target\": 48,\n \"type\": \"licensing\"\n },\n {\n \"source\": 16,\n \"target\": 33,\n \"type\": \"licensing\"\n },\n {\n \"source\": 17,\n \"target\": 34,\n \"type\": \"licensing\"\n },\n {\n \"source\": 17,\n \"target\": 35,\n \"type\": \"licensing\"\n },\n {\n \"source\": 17,\n \"target\": 58,\n \"type\": \"licensing\"\n },\n {\n \"source\": 17,\n \"target\": 60,\n \"type\": \"licensing\"\n },\n {\n \"source\": 18,\n \"target\": 20,\n \"type\": \"licensing\"\n },\n {\n \"source\": 18,\n \"target\": 38,\n \"type\": \"licensing\"\n },\n {\n \"source\": 18,\n \"target\": 63,\n \"type\": \"licensing\"\n },\n {\n \"source\": 18,\n \"target\": 64,\n \"type\": \"licensing\"\n },\n {\n \"source\": 18,\n \"target\": 65,\n \"type\": \"licensing\"\n },\n {\n \"source\": 19,\n \"target\": 42,\n \"type\": \"licensing\"\n },\n {\n \"source\": 21,\n \"target\": 23,\n \"type\": \"licensing\"\n },\n {\n \"source\": 21,\n \"target\": 25,\n \"type\": \"licensing\"\n },\n {\n \"source\": 21,\n \"target\": 42,\n \"type\": \"licensing\"\n },\n {\n \"source\": 21,\n \"target\": 44,\n \"type\": \"licensing\"\n },\n {\n \"source\": 21,\n \"target\": 55,\n \"type\": \"licensing\"\n },\n {\n \"source\": 21,\n \"target\": 56,\n \"type\": \"licensing\"\n },\n {\n \"source\": 21,\n \"target\": 67,\n \"type\": \"licensing\"\n },\n {\n \"source\": 21,\n \"target\": 70,\n \"type\": \"licensing\"\n },\n {\n \"source\": 20,\n \"target\": 24,\n \"type\": \"licensing\"\n },\n {\n \"source\": 20,\n \"target\": 47,\n \"type\": \"licensing\"\n },\n {\n \"source\": 25,\n \"target\": 45,\n \"type\": \"licensing\"\n },\n {\n \"source\": 25,\n \"target\": 68,\n \"type\": \"licensing\"\n },\n {\n \"source\": 26,\n \"target\": 48,\n \"type\": \"licensing\"\n },\n {\n \"source\": 26,\n \"target\": 49,\n \"type\": \"licensing\"\n },\n {\n \"source\": 26,\n \"target\": 28,\n \"type\": \"licensing\"\n },\n {\n \"source\": 27,\n \"target\": 49,\n \"type\": \"licensing\"\n },\n {\n \"source\": 30,\n \"target\": 53,\n \"type\": \"licensing\"\n },\n {\n \"source\": 83,\n \"target\": 48,\n \"type\": \"licensing\"\n },\n {\n \"source\": 35,\n \"target\": 36,\n \"type\": \"licensing\"\n },\n {\n \"source\": 35,\n \"target\": 37,\n \"type\": \"licensing\"\n },\n {\n \"source\": 35,\n \"target\": 58,\n \"type\": \"licensing\"\n },\n {\n \"source\": 35,\n \"target\": 59,\n \"type\": \"licensing\"\n },\n {\n \"source\": 36,\n \"target\": 46,\n \"type\": \"licensing\"\n },\n {\n \"source\": 36,\n \"target\": 61,\n \"type\": \"licensing\"\n },\n {\n \"source\": 36,\n \"target\": 62,\n \"type\": \"licensing\"\n },\n {\n \"source\": 39,\n \"target\": 66,\n \"type\": \"licensing\"\n },\n {\n \"source\": 44,\n \"target\": 68,\n \"type\": \"licensing\"\n },\n {\n \"source\": 44,\n \"target\": 69,\n \"type\": \"licensing\"\n },\n {\n \"source\": 48,\n \"target\": 28,\n \"type\": \"licensing\"\n },\n {\n \"source\": 48,\n \"target\": 50,\n \"type\": \"licensing\"\n },\n {\n \"source\": 48,\n \"target\": 51,\n \"type\": \"licensing\"\n },\n {\n \"source\": 49,\n \"target\": 51,\n \"type\": \"licensing\"\n },\n {\n \"source\": 51,\n \"target\": 52,\n \"type\": \"licensing\"\n },\n {\n \"source\": 39,\n \"target\": 40,\n \"type\": \"licensing\"\n },\n {\n \"source\": 39,\n \"target\": 41,\n \"type\": \"licensing\"\n },\n {\n \"source\": 53,\n \"target\": 54,\n \"type\": \"licensing\"\n },\n {\n \"source\": 63,\n \"target\": 65,\n \"type\": \"licensing\"\n },\n {\n \"source\": 77,\n \"target\": 63,\n \"type\": \"licensing\"\n }\n ],\n \"node\": [\n {\n \"name\": \"AT316\",\n \"type\": \"freedom\"\n },\n {\n \"name\": \"AT326\",\n \"type\": \"freedom\"\n },\n {\n \"name\": \"BA291\",\n \"type\": \"general\"\n },\n {\n \"name\": \"CJ315\",\n \"type\": \"freedom\"\n },\n {\n \"name\": \"CJ316\",\n \"type\": \"freedom\"\n },\n {\n \"name\": \"CJ317\",\n \"type\": \"freedom\"\n },\n {\n \"name\": \"CJ321\",\n \"type\": \"freedom\"\n },\n {\n \"name\": \"CS101\",\n \"type\": \"force\"\n },\n {\n \"name\": \"CS102\",\n \"type\": \"force\"\n },\n {\n \"name\": \"CS105\",\n \"type\": \"force\"\n },\n {\n \"name\": \"CS111\",\n \"type\": \"force\"\n },\n {\n \"name\": \"CS115\",\n \"type\": \"force\"\n },\n {\n \"name\": \"CS211\",\n \"type\": \"force\"\n },\n {\n \"name\": \"CS213\",\n \"type\": \"force\"\n },\n {\n \"name\": \"CS214\",\n \"type\": \"force\"\n },\n {\n \"name\": \"CS215\",\n \"type\": \"force\"\n },\n {\n \"name\": \"CS222\",\n \"type\": \"force\"\n },\n {\n \"name\": \"CS223\",\n \"type\": \"force\"\n },\n {\n \"name\": \"CS251\",\n \"type\": \"force\"\n },\n {\n \"name\": \"CS261\",\n \"type\": \"force\"\n },\n {\n \"name\": \"CS281\",\n \"type\": \"force\"\n },\n {\n \"name\": \"CS284\",\n \"type\": \"force\"\n },\n {\n \"name\": \"CS285\",\n \"type\": \"force\"\n },\n {\n \"name\": \"CS286\",\n \"type\": \"force\"\n },\n {\n \"name\": \"CS288\",\n \"type\": \"force\"\n },\n {\n \"name\": \"CS289\",\n \"type\": \"force\"\n },\n {\n \"name\": \"CS295\",\n \"type\": \"force\"\n },\n {\n \"name\": \"CS296\",\n \"type\": \"force\"\n },\n {\n \"name\": \"CS297\",\n \"type\": \"force\"\n },\n {\n \"name\": \"CS300\",\n \"type\": \"force\"\n },\n {\n \"name\": \"CS301\",\n \"type\": \"force\"\n },\n {\n \"name\": \"CS302\",\n \"type\": \"force\"\n },\n {\n \"name\": \"CS311\",\n \"type\": \"force\"\n },\n {\n \"name\": \"CS314\",\n \"type\": \"force\"\n },\n {\n \"name\": \"CS326\",\n \"type\": \"force\"\n },\n {\n \"name\": \"CS341\",\n \"type\": \"force\"\n },\n {\n \"name\": \"CS342\",\n \"type\": \"force\"\n },\n {\n \"name\": \"CS348\",\n \"type\": \"force\"\n },\n {\n \"name\": \"CS356\",\n \"type\": \"force\"\n },\n {\n \"name\": \"CS365\",\n \"type\": \"force\"\n },\n {\n \"name\": \"CS366\",\n \"type\": \"force\"\n },\n {\n \"name\": \"CS367\",\n \"type\": \"force\"\n },\n {\n \"name\": \"CS374\",\n \"type\": \"force\"\n },\n {\n \"name\": \"CS377\",\n \"type\": \"force\"\n },\n {\n \"name\": \"CS385\",\n \"type\": \"force\"\n },\n {\n \"name\": \"CS386\",\n \"type\": \"force\"\n },\n {\n \"name\": \"CS387\",\n \"type\": \"force\"\n },\n {\n \"name\": \"CS388\",\n \"type\": \"force\"\n },\n {\n \"name\": \"CS395\",\n \"type\": \"force\"\n },\n {\n \"name\": \"CS396\",\n \"type\": \"force\"\n },\n {\n \"name\": \"CS397\",\n \"type\": \"force\"\n },\n {\n \"name\": \"CS398\",\n \"type\": \"force\"\n },\n {\n \"name\": \"CS399\",\n \"type\": \"force\"\n },\n {\n \"name\": \"CS401\",\n \"type\": \"force\"\n },\n {\n \"name\": \"CS402\",\n \"type\": \"force\"\n },\n {\n \"name\": \"CS407\",\n \"type\": \"force\"\n },\n {\n \"name\": \"CS408\",\n \"type\": \"force\"\n },\n {\n \"name\": \"CS409\",\n \"type\": \"force\"\n },\n {\n \"name\": \"CS426\",\n \"type\": \"force\"\n },\n {\n \"name\": \"CS427\",\n \"type\": \"force\"\n },\n {\n \"name\": \"CS429\",\n \"type\": \"force\"\n },\n {\n \"name\": \"CS446\",\n \"type\": \"force\"\n },\n {\n \"name\": \"CS449\",\n \"type\": \"force\"\n },\n {\n \"name\": \"CS456\",\n \"type\": \"force\"\n },\n {\n \"name\": \"CS457\",\n \"type\": \"force\"\n },\n {\n \"name\": \"CS459\",\n \"type\": \"force\"\n },\n {\n \"name\": \"CS467\",\n \"type\": \"force\"\n },\n {\n \"name\": \"CS486\",\n \"type\": \"force\"\n },\n {\n \"name\": \"CS487\",\n \"type\": \"force\"\n },\n {\n \"name\": \"CS488\",\n \"type\": \"force\"\n },\n {\n \"name\": \"CS489\",\n \"type\": \"force\"\n },\n {\n \"name\": \"EL070\",\n \"type\": \"general\"\n },\n {\n \"name\": \"EL171\",\n \"type\": \"general\"\n },\n {\n \"name\": \"EL172\",\n \"type\": \"general\"\n },\n {\n \"name\": \"EL295\",\n \"type\": \"general\"\n },\n {\n \"name\": \"EL395\",\n \"type\": \"force\"\n },\n {\n \"name\": \"ES356\",\n \"type\": \"freedom\"\n },\n {\n \"name\": \"HO201\",\n \"type\": \"general\"\n },\n {\n \"name\": \"HR201\",\n \"type\": \"freedom\"\n },\n {\n \"name\": \"LA209\",\n \"type\": \"freedom\"\n },\n {\n \"name\": \"MA211\",\n \"type\": \"force\"\n },\n {\n \"name\": \"MA212\",\n \"type\": \"force\"\n },\n {\n \"name\": \"MA216\",\n \"type\": \"freedom\"\n },\n {\n \"name\": \"MA332\",\n \"type\": \"force\"\n },\n {\n \"name\": \"MW313\",\n \"type\": \"freedom\"\n },\n {\n \"name\": \"MW314\",\n \"type\": \"freedom\"\n },\n {\n \"name\": \"NS132\",\n \"type\": \"freedom\"\n },\n {\n \"name\": \"PY228\",\n \"type\": \"general\"\n },\n {\n \"name\": \"SC123\",\n \"type\": \"force\"\n },\n {\n \"name\": \"SC135\",\n \"type\": \"force\"\n },\n {\n \"name\": \"SC173\",\n \"type\": \"force\"\n },\n {\n \"name\": \"SC185\",\n \"type\": \"force\"\n },\n {\n \"name\": \"SO201\",\n \"type\": \"freedom\"\n },\n {\n \"name\": \"ST216\",\n \"type\": \"force\"\n },\n {\n \"name\": \"SW111\",\n \"type\": \"freedom\"\n },\n {\n \"name\": \"SW212\",\n \"type\": \"freedom\"\n },\n {\n \"name\": \"SW213\",\n \"type\": \"freedom\"\n },\n {\n \"name\": \"SW221\",\n \"type\": \"freedom\"\n },\n {\n \"name\": \"SW335\",\n \"type\": \"freedom\"\n },\n {\n \"name\": \"SW365\",\n \"type\": \"freedom\"\n },\n {\n \"name\": \"SW475\",\n \"type\": \"freedom\"\n },\n {\n \"name\": \"SW478\",\n \"type\": \"freedom\"\n },\n {\n \"name\": \"TA395\",\n \"type\": \"freedom\"\n },\n {\n \"name\": \"TH161\",\n \"type\": \"general\"\n },\n {\n \"name\": \"TU100\",\n \"type\": \"general\"\n },\n {\n \"name\": \"TU110\",\n \"type\": \"general\"\n },\n {\n \"name\": \"TU120\",\n \"type\": \"general\"\n },\n {\n \"name\": \"TU122\",\n \"type\": \"general\"\n },\n {\n \"name\": \"TU130\",\n \"type\": \"general\"\n },\n {\n \"name\": \"TU154\",\n \"type\": \"general\"\n }\n ]\n}\n return JsonResponse({'myfile':myfile})" }, { "alpha_fraction": 0.6906779408454895, "alphanum_fraction": 0.7217513918876648, "avg_line_length": 34.45000076293945, "blob_id": "0a75cbd1af997d66c7281db7088bdf3a9f33db6e", "content_id": "fe781c48bbe3c995257c349b52954a93a6285dba", "detected_licenses": [ "MIT" ], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 708, "license_type": "permissive", "max_line_length": 89, "num_lines": 20, "path": "/pae/book_edit_bootstrap/django/webapp/login/models.py", "repo_name": "wasit7/book_pae", "src_encoding": "UTF-8", "text": "from django.db import models\n\n# Create your models here.\nclass Users(models.Model):\n\tusername = models.CharField(max_length=100)\n\tpassword = models.CharField(max_length=10)\n\tfirstname = models.CharField(max_length=100)\n\tlastname = models.CharField(max_length=100)\n\n\tdef __unicode__(self):\n\t\treturn self.username + \",\" + self.password + \",\" + self.firstname + \",\" + self.lastname\n\nclass UsersStudent(models.Model):\n\tusername = models.CharField(max_length=100)\n\tpassword = models.CharField(max_length=10)\n\tfirstname = models.CharField(max_length=100)\n\tlastname = models.CharField(max_length=100)\n\n\tdef __unicode__(self):\n\t\treturn self.username + \",\" + self.password + \",\" + self.firstname + \",\" + self.lastname" }, { "alpha_fraction": 0.5848624110221863, "alphanum_fraction": 0.646789014339447, "avg_line_length": 19.809524536132812, "blob_id": "cad0665b8be103bcf64c0d935578cfd4fa759153", "content_id": "d17e081dfacdba92dd738c431947287108d309f3", "detected_licenses": [ "MIT" ], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 436, "license_type": "permissive", "max_line_length": 75, "num_lines": 21, "path": "/pae/test_sqllite.py", "repo_name": "wasit7/book_pae", "src_encoding": "UTF-8", "text": "# -*- coding: utf-8 -*-\n\"\"\"\nCreated on Fri Nov 20 12:05:39 2015\n\n@author: Wasit\n\"\"\"\n\nimport pandas as pd\nimport pandas.io.sql as pd_sql\nimport sqlite3 as sql\n\ncon = sql.connect(\"test.db\")\ndf = pd.DataFrame({'TestData': [1, 2, 3, 4, 5, 6, 7, 8, 9]}, dtype='float')\npd_sql.write_frame(df, \"test_table2\", con)\ncon.close()\n\n\ncon = sql.connect(\"test.db\")\ndf2 = pd.read_sql_query(\"SELECT * from test_table2\", con)\nprint df2.head()\ncon.close()" }, { "alpha_fraction": 0.5529820322990417, "alphanum_fraction": 0.6056745648384094, "avg_line_length": 40.119049072265625, "blob_id": "a3fba48c7ab95172c9715d59cff9d5c00602ebe7", "content_id": "f61ecd6fb492c8fa11e2ad2d800208a8430ad397", "detected_licenses": [ "MIT" ], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 3454, "license_type": "permissive", "max_line_length": 111, "num_lines": 84, "path": "/pae/final_code/src/create_dfmore20.py", "repo_name": "wasit7/book_pae", "src_encoding": "UTF-8", "text": "# -*- coding: utf-8 -*-\n\"\"\"\nCreated on Thu Feb 25 23:15:03 2016\n\n@author: Methinee\n\"\"\"\n\nimport pandas as pd\nimport numpy as np\nimport pickle\nimport os \nimport matplotlib\nmatplotlib.style.use('ggplot')\nfrom collections import defaultdict\n\n##////////////////Create dfmore20 without merging(schoolGpa,province)/////////////\n#df_file = pd.read_excel('../src/transform.xlsx')\n#headers=list(df_file.columns.values)\n#\n##replace grade with integer and noplus in each grade\n##{'':0, 'U#':1, 'U':1, 'S#':2, 'S':2, 'W':3, 'F':4, 'D':5, 'D+':5, 'C':6, 'C+':6, 'B':7, 'B+':7, 'A':8}\n#df_file = df_file.fillna(0)\n#df_file = df_file.replace(['A', 'B+', 'B', 'C+', 'C' , 'D+' , 'D' , 'F' , 'W' , 'S' , 'S#' , 'U' , 'U#'], \n# [8, 7, 7, 6 , 6, 5, 5, 4, 3, 2, 2, 1, 1])\n# \n##Select subject that have student enroll >=20 \n#count_courseId = df_file[\"3COURSEID\"].value_counts() \n#more20 = count_courseId[count_courseId[:]>=20]\n#less20 = count_courseId[count_courseId[:]<20]\n#df_more20 = df_file[df_file[\"3COURSEID\"].isin(more20.index)]\n#df_more20.to_csv('../data'+'/df_more20.csv') #create new dataframe (>=20 enrollment)\n#df_less20 = df_file[~df_file[\"3COURSEID\"].isin(more20.index)]\n#df_less20.to_csv('../data'+'/df_more20.csv') #create new dataframe (>=20 enrollment)\n#\n##Create new file csv, all subject(>=20) and random order \n#for m in more20.index:\n# dfx=df_more20[df_more20[\"3COURSEID\"].isin([m])]\n# dfx=dfx.iloc[np.random.permutation(len(dfx))]\n# dfx.to_csv('../data/df_sub_more20'+\"/df_%s.csv\"%m)\n\n#more20.plot(kind='bar')\n\n\n#////////////////Create dfmore20 with merging(schoolGpa,province)/////////////\n#df_file = pd.read_csv('../data'+'/df_dropSub_less20_dropNaResult.csv',delimiter=\",\", skip_blank_lines = True, \n# error_bad_lines=False)\n\ndf_file = pd.read_excel('../data/transform_merge.xlsx')\nheaders=list(df_file.columns.values)\n\n#replace grade with integer and noplus in each grade\n#{'':0, 'U#':1, 'U':1, 'S#':2, 'S':2, 'W':3, 'F':4, 'D':5, 'D+':5, 'C':6, 'C+':6, 'B':7, 'B+':7, 'A':8}\ndf_file = df_file.fillna(0)\ndf_file = df_file.replace(['A', 'B+', 'B', 'C+', 'C' , 'D+' , 'D' , 'F' , 'W' , 'S' , 'S#' , 'U' , 'U#'], \n [8, 7, 7, 6 , 6, 5, 5, 4, 3, 2, 2, 1, 1])\n \n#Select subject that have student enroll >=20 \ncount_courseId = df_file[\"3COURSEID\"].value_counts() \nmore20 = count_courseId[count_courseId[:]>=20]\nless20 = count_courseId[count_courseId[:]<20]\ndf_more20 = df_file[df_file[\"3COURSEID\"].isin(more20.index)]\ndf_more20.to_csv('../data'+'/df_more20.csv') #create new dataframe (>=20 enrollment)\ndf_less20 = df_file[~df_file[\"3COURSEID\"].isin(more20.index)]\ndf_less20.to_csv('../data'+'/df_less20.csv') #create new dataframe (>=20 enrollment))\n\n#Create new file csv, all subject(>=20) and random order \nfor m in more20.index:\n dfx=df_more20[df_more20[\"3COURSEID\"].isin([m])]\n dfx=dfx.iloc[np.random.permutation(len(dfx))]\n dfx.to_csv('../data/df_sub_more20_merge'+\"/df_%s.csv\"%m)\n\nmore20.plot(kind='bar')\n\n#Create new Dataframe (drop column subject that less than 20)\nsubjects = []\ncountSub = 0\nfor sub in df_less20['3COURSEID']:\n if sub not in subjects:\n subjects.append(sub)\n countSub = countSub+1\nfor drop in subjects:\n df_file = df_file.drop([drop],axis=1)\ndf_file = df_file[df_file[\"3COURSEID\"].isin(more20.index)]\n#df_file.to_csv('../data'+'/df_dropSub_less20.csv') #create new dataframe (>=20 enrollment))\n" }, { "alpha_fraction": 0.735981285572052, "alphanum_fraction": 0.735981285572052, "avg_line_length": 35.86206817626953, "blob_id": "5073a4784eb4f1bc2c3815800bbe0b6f7dfcf6e7", "content_id": "70b16030a3c81a11a27411b6937aada7edf56dad", "detected_licenses": [ "MIT" ], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 2140, "license_type": "permissive", "max_line_length": 122, "num_lines": 58, "path": "/pae/book_edit_bootstrap/django/project/authen/views.py", "repo_name": "wasit7/book_pae", "src_encoding": "UTF-8", "text": "from django.shortcuts import render, render_to_response, HttpResponse\nfrom django.contrib.auth import authenticate\nfrom django.contrib.auth import login as auth_login\nfrom django.contrib.auth import logout as auth_logout\nfrom django.core.urlresolvers import reverse\nfrom django.template import RequestContext\nfrom django.contrib.auth.models import User\nfrom django.shortcuts import redirect\nimport sys, json\nfrom mywebpage.models import Student, Enrollment, Subject\n\n# Create your views here.\ndef login(request):\n\tif request.POST:\n\t\tusername = request.POST.get('username')\n\t\tpassword = request.POST.get('password')\n\t\tuser = authenticate(username=username, password=password) #verify (check username&password against a database of users)\n\t\tif user is not None:\n\t\t\tif user.is_active:\t\t\t\t\n\t\t\t\tauth_login(request, user)\n\t\t\t\trequest.session['username'] = username\n\t\t\t\treturn redirect(reverse('home'))\n\t\t\t\t#return render_to_response('registration.html',context_instance = RequestContext(request))\n\t\t\telse:\n\t\t\t\tstate = \"Disable account\"\n\t\telse:\n\t\t\tstate = \"Invalid login\"\n\t\treturn render(request,'login.html',{'state':state})\n\treturn\trender(request,'login.html',{'state':\"Please login\"})\n\ndef logout(request):\n\tif request.GET:\n\t\t#reverse('/authen/logout')\n\t\tprint >> sys.stderr, \"logout\"\n\t\tif 'username' in request.session:\n\t\t\tdel request.session['username']\t\t\t\t#delete user_username left from sesstion\n\t\tauth_logout(request)\n\treturn render(request,'login.html',{'state':\"Please login\"})\n\n\ndef registration(request):\n\tif request.method == 'GET':\n\t\treturn render(request,'registration.html')\n\tif request.method == 'POST':\n\t\tuser = json.loads(request.body)\n\t\tuname = user['uname']\n\t\tpassword = user['password']\n\t\tcfpassword = user['cfpassword']\n\t\tif password == cfpassword:\n\t\t\t#print >> sys.stderr, user\n\t\t\tif not User.objects.filter(username=uname).exists():\n\t\t\t\tcreateUser = User.objects.create_user(username= uname, password= password)\n\t\t\t\tcreateUser.save()\n\t\t\t\treturn HttpResponse(\"OK\")\n\t\t\telif User.objects.filter(username=uname).exists():\n\t\t\t\treturn HttpResponse(\"user is exist\")\n\t\telse:\n\t\t\treturn HttpResponse(\"password is not valid\")\n\n\n" }, { "alpha_fraction": 0.5301204919815063, "alphanum_fraction": 0.5927711129188538, "avg_line_length": 22.11111068725586, "blob_id": "9c1054c30a253e421a3e96c01b5d9de6083e735a", "content_id": "ad97538256d5866102cc0a789db24701c3bed7e9", "detected_licenses": [ "MIT" ], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 415, "license_type": "permissive", "max_line_length": 99, "num_lines": 18, "path": "/pae/forcast/src/testclassify_cs213.py", "repo_name": "wasit7/book_pae", "src_encoding": "UTF-8", "text": "# -*- coding: utf-8 -*-\n\"\"\"\nCreated on Thu Apr 07 14:09:20 2016\n\n@author: Administrator\n\"\"\"\nimport pandas as pd\nimport numpy as np\ndf_file = pd.read_csv('../data/df_m20/df_m20/df_CS341.csv',delimiter=\",\", skip_blank_lines = True, \n error_bad_lines=False)\n \n\nA = df_file.as_matrix()\n\nL = A[:,3]\nL = L.astype(np.int64, copy=False)\nI = A[:,5:]#L.shape\nI = I.astype(np.int64, copy=False)" }, { "alpha_fraction": 0.5882722735404968, "alphanum_fraction": 0.6123560070991516, "avg_line_length": 31.70547866821289, "blob_id": "db02b61a24747e77db52bc912418486fc8aad3cb", "content_id": "da573347885fbbcd7c119917d16c95a0047c268e", "detected_licenses": [ "MIT" ], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 4775, "license_type": "permissive", "max_line_length": 107, "num_lines": 146, "path": "/pae/forcast/src/feature_eachSub_dropNaResult.py", "repo_name": "wasit7/book_pae", "src_encoding": "UTF-8", "text": "# -*- coding: utf-8 -*-\n\"\"\"\nCreated on Tue May 10 17:19:14 2016\n\n@author: Methinee\n\"\"\"\nimport pandas as pd\nimport numpy as np\nimport pickle\nimport xlwt\nimport matplotlib.pyplot as plt\nfrom sklearn.datasets import make_classification\nfrom sklearn.ensemble import RandomForestClassifier\nfrom sklearn.cross_validation import cross_val_score\nfrom sklearn.cross_validation import train_test_split\nbook = xlwt.Workbook(encoding=\"utf-8\")\nsheet1 = book.add_sheet(\"Precission Only Subject\")\n\n\n\n#----------------------Traning Data With Merging-----------------------\n#df_file = pd.read_csv('../data/df_more20.csv',delimiter=\",\", skip_blank_lines = True, \n# error_bad_lines=False)\ndf_file = pd.read_csv('../data/df_dropSub_less20_dropNaResult.csv',delimiter=\",\", skip_blank_lines = True, \n error_bad_lines=False)\ndf_file = df_file.drop('Unnamed: 0',axis=1)\ndf_file = df_file.fillna(0)\ndf_file = df_file.replace(['A', 'B+', 'B', 'C+', 'C' , 'D+' , 'D' , 'F' , 'W' , 'S' , 'S#' , 'U' , 'U#'], \n [8, 7, 7, 6 , 6, 5, 5, 4, 3, 2, 2, 1, 1])\n \ncount_courseId = df_file[\"3COURSEID\"].value_counts() \nmore20 = count_courseId\n\nheaders=list(df_file.columns.values)\nsubjects = []\ncountSub = 0\n#Create dictionary of list subjects\nfor sub in df_file[headers[1]]:\n if sub not in subjects:\n subjects.append(sub)\n countSub = countSub+1\n#Get subject that more 20 enrollment\ncount = 0\nsubjects.sort()\nprecision_rf={}\ndf_precision = more20.drop('CS231').copy()\n\nlist_allsub = df_file.columns[4:]\nallSubject_df = pd.DataFrame(columns=[subjects],index=[list_allsub])\ntop10_df = pd.DataFrame(columns=[subjects])\n\nsubjects.remove('CS231')\n\nfor subject in subjects:\n #Create new Dataframe\n \n print subject \n df_sub = df_file[df_file['3COURSEID'] == subject]\n df_sub = df_sub.iloc[np.random.permutation(len(df_sub))]\n count_enrollment = df_sub['3COURSEID'].value_counts()\n #print \"Number of %s enrollment: %s\"%(subject,count_enrollment)\n\n A = df_sub.as_matrix()\n X = A[:,6:116]\n X = X.astype(np.int64, copy=False)\n y = A[:,2]\n y = y.astype(np.int64, copy=False)\n\n # Split the data into a training set and a test set\n X_train, X_test, y_train, y_test = train_test_split(X, y,test_size=0.2,random_state=0)\n\n #Training data\n forest = RandomForestClassifier(n_estimators=10, max_depth=None, \n min_samples_split=1, random_state=None, max_features=None)\n clf = forest.fit(X, y)\n scores = cross_val_score(clf, X, y, cv=5)\n print scores\n print \"Random Forest Cross Validation of %s: %s\"%(subject,scores.mean())\n precision_rf[subject] = scores.mean()\n df_precision.loc[subject]=precision_rf[subject]\n \n \n row_cm=[]\n Grade = ['A', 'B', 'C' , 'D' , 'F' , 'W' , 'S' , 'U' ,'na']\n row = []\n for cls in y_train:\n if cls not in row:\n row.append(cls)\n row.sort()\n #print row\n\n \n for i in xrange(len(row)):\n Grade = ['A', 'B', 'C' , 'D' , 'F' , 'W' , 'S' , 'U' ,'na']\n grade = Grade[::-1][row[i]]\n print grade\n row_cm.append(grade)\n print row_cm\n print len(row_cm)\n #print row_cm[1] \n \n \n sheet1.write(count, 0, subject)\n# sheet1.write(count,3, scores.mean())\n sheet1.write(count,2,len(y_train))\n# sheet1.write(count,1, scores.mean()*100)\n sheet1.write(count,1,len(row_cm))\n book.save(\"RF_crossvalidation_dropNaResault.xls\")\n count = count+1\n \n #print all subjects\n #save trees to pickle file\n# f = \"tree_drop/tree%s.pic\"%subject\n# with open(f, 'wb') as pickleFile:\n# pickle.dump(clf, pickleFile, pickle.HIGHEST_PROTOCOL)\n \n #///////////////Find Importance Feature importances with forests of trees/////////////////// \n importances = forest.feature_importances_\n std = np.std([tree.feature_importances_ for tree in forest.estimators_],\n axis=0)\n indices = np.argsort(importances)[::-1]\n list_grade = df_file.columns[6:117]\n # Print the feature ranking\n print(\"Feature ranking:\")\n\n for f in range(X.shape[1]):\n print(\"%d. feature %s (%f)\" % (f + 1, list_grade[indices[f]], importances[indices[f]]))\n allSubject_df.loc[list_grade[indices[f]],subject] = importances[indices[f]]\n \n top10 = list_grade[indices][:10]\n print str(top10)\n for i in range(1,11):\n top10_df.loc[i,subject] = str(top10[i-1])\n print \"-----------------------------------\"\n\n \n\ndf_precision.plot(kind='bar')\ndf_precision.sort(ascending=False)\ndf_precision.plot(kind=\"bar\")\n\n\nwriter = pd.ExcelWriter(\"feature_eachSub_dropNaResult.xlsx\")\npd.DataFrame(allSubject_df).to_excel(writer,\"all_feature\")\npd.DataFrame(top10_df).to_excel(writer,\"top10_feature\")\nwriter.save()\n" }, { "alpha_fraction": 0.6170084476470947, "alphanum_fraction": 0.6411339044570923, "avg_line_length": 27.947368621826172, "blob_id": "61bdf7f67ed01e3c1e1770a08da3d4c8761a9367", "content_id": "050a4f684fcc2226893af06bd49803bcfce82736", "detected_licenses": [ "MIT" ], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 1658, "license_type": "permissive", "max_line_length": 83, "num_lines": 57, "path": "/pae/forcast/plotgraph_cs213.py", "repo_name": "wasit7/book_pae", "src_encoding": "UTF-8", "text": "# -*- coding: utf-8 -*-\n\"\"\"\nCreated on Thu Apr 14 18:21:40 2016\n\n@author: Administrator\n\"\"\"\n\nimport pandas as pd\nimport numpy as np\nfrom sklearn.ensemble import RandomForestClassifier\nfrom sklearn.cross_validation import cross_val_score\nfrom matplotlib import pyplot as plt\n\n#----------------------Traning Data-----------------------\ndf_file = pd.read_csv('data/df_more20.csv',delimiter=\",\", skip_blank_lines = True, \n error_bad_lines=False)\nheaders=list(df_file.columns.values)\nsubjects = {'courseId':[]}\ncountSub = 0\n#Create dictionary of list subjects\nfor sub in df_file[headers[4]]:\n if sub not in subjects['courseId']:\n subjects['courseId'].append(sub)\n countSub = countSub+1\n#Get subject that more 20 enrollment\ncount = 0\nsubjects[\"courseId\"].sort()\n \ndf_sub = df_file[df_file['3COURSEID']=='TU154']\ndf_sub = df_sub.iloc[np.random.permutation(len(df_sub))]\n\nA = df_sub.as_matrix()\nX = A[:,6:]\nX = X.astype(np.int64, copy=False)\ny = A[:,5]\ny = y.astype(np.int64, copy=False)\n\n#Training data\nclf_rf = RandomForestClassifier(n_estimators=10, max_depth=None, \n min_samples_split=1, random_state=None, max_features=None)\nscores = cross_val_score(clf_rf, X, y, cv=5)\nclf = clf_rf.fit(X,y)\nprint scores\nprint \"Random Forest Cross Validation: %s\"%scores.mean()\nprint \"-----------------------------------\"\n\n#i=0\n#actual=np.array(y)\n#predicted=np.zeros(len(y))\n#n = 9\n#cm = np.zeros((n,n))\n#for i in xrange(len(y)):\n# predicted[i]=clf.predict(X)[i]\n# #print \"actual grade:%d predicted grade:%d\"%(actual[i],predict[i]) \n# cm[predicted[i],actual[i]] +=1 \n#plt.hist(actual-predicted,50)\n#print cm\n\n\n\n \n" }, { "alpha_fraction": 0.6439267992973328, "alphanum_fraction": 0.6705490946769714, "avg_line_length": 29.100000381469727, "blob_id": "e3d65ecf22d249e9d7f6d204873df2aeb340a34f", "content_id": "00c8fc671a759ebeadad4d58b1a46dcf99f0fea4", "detected_licenses": [ "MIT" ], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 601, "license_type": "permissive", "max_line_length": 111, "num_lines": 20, "path": "/pae/forcast/src/csv/merge_table1_2.py", "repo_name": "wasit7/book_pae", "src_encoding": "UTF-8", "text": "# -*- coding: utf-8 -*-\n\"\"\"\nCreated on Wed May 04 11:34:19 2016\n\n@author: Methinee\n\"\"\"\nimport pandas as pd\n\ndf_first = pd.read_excel('../../src/transform.xlsx')\n\ndf_second = pd.read_csv('../../data/CS_table_No1.csv',delimiter=\";\", skip_blank_lines = True, \n error_bad_lines=False)\ndf_second = df_second.drop(['ADMITYEAR','STUDENTSTATUS','STUDENTSTATUSNAME','PROVINCENAME','BIRTHDATE'],axis=1)\n\nresult = pd.merge(df_first, df_second, on=['0STUDENTID'])\n\n\nwriter = pd.ExcelWriter(\"../../data/transform_merge.xlsx\")\npd.DataFrame(result).to_excel(writer,\"schoolGpa&province\")\nwriter.save()" }, { "alpha_fraction": 0.581615149974823, "alphanum_fraction": 0.6108247637748718, "avg_line_length": 32.28571319580078, "blob_id": "c1e8406cbbd5015f1023adf68025d1252c2f0a7f", "content_id": "48e56a5f368883ae572c0de3e38e1805c3f3c44e", "detected_licenses": [ "MIT" ], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 1164, "license_type": "permissive", "max_line_length": 130, "num_lines": 35, "path": "/book/django/project/add_subject_order.py", "repo_name": "wasit7/book_pae", "src_encoding": "UTF-8", "text": "# -*- coding: utf-8 -*-\n\"\"\"\nCreated on Fri Feb 19 16:55:54 2016\n\n@author: Administrator\n\"\"\"\n\nimport pandas as pd\nimport pandas.io.sql as pd_sql\nimport sqlite3 as sql\n\ndf_file_all = pd.read_csv('CS_table_No2_No4_new.csv',delimiter=\";\", skip_blank_lines = True, \n error_bad_lines=False,encoding='utf8')\n \ndf_file_less = pd.read_csv('df_dropSub_less20.csv',delimiter=\",\", skip_blank_lines = True, \n error_bad_lines=False,encoding='utf8')\n \ndf_file_all = df_file_all.drop(['STUDENTID','ACADYEAR','CAMPUSID','SEMESTER','CURRIC','CAMPUSNAME','SECTIONGROUP','GRADE'],axis=1)\n\nsubjects = []\ncountSub = 0\nfor sub in df_file_less['3COURSEID']:\n if sub not in subjects:\n subjects.append(sub)\n countSub = countSub+1\nsubjects.sort()\n\ndf_db = df_file_all[df_file_all[\"sub_id\"].isin(subjects)]\ndf_db = df_db.drop_duplicates(['sub_id'], take_last=True) \ndf_db = df_db.sort(['sub_id'])\n \ncon = sql.connect(\"db.sqlite3\")\n#df = pd.DataFrame({'TestData': [1, 2, 3, 4, 5, 6, 7, 8, 9]}, dtype='float')\npd_sql.to_sql(df_db, \"mywebpage_subject\", con, index=False)\ncon.close()" }, { "alpha_fraction": 0.6578013896942139, "alphanum_fraction": 0.716312050819397, "avg_line_length": 28.736841201782227, "blob_id": "92a3e1bf5008b3fcd7213503058d8e856277f5b3", "content_id": "475f154bf2276606c3e6292345cf21199fb68644", "detected_licenses": [ "MIT" ], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 564, "license_type": "permissive", "max_line_length": 94, "num_lines": 19, "path": "/pae/forcast/src/histrogram_table_No1.py", "repo_name": "wasit7/book_pae", "src_encoding": "UTF-8", "text": "# -*- coding: utf-8 -*-\n\"\"\"\nCreated on Sat Nov 28 21:51:46 2015\n\n@author: Methinee\n\"\"\"\n\n#http://pandas.pydata.org/pandas-docs/stable/visualization.html\nimport pandas as pd\n\nxl = pd.ExcelFile('../data/CS_table_No1.xls')\n#xl.sheet_names # see all sheet names\ntable1=xl.parse(xl.sheet_names[0],index_col='STUDENTID') # read a specific sheet to DataFrame\n#table1.convert_objects(convert_numeric=True)\ntable1.SCHOOLGPA[1:].plot() # found higher gpa in 470000\n#table1.SCHOOLGPA[1:].plot(logy=True,legend=True) # found lower gpa in 360000\n\n\n# edit in xls already!!" }, { "alpha_fraction": 0.5803108811378479, "alphanum_fraction": 0.6528497338294983, "avg_line_length": 16.454545974731445, "blob_id": "8712a8010e2d29a7a7214ed4f988253e5eda2a28", "content_id": "01b0174a4a5fac57824effe933f473b1eb0d3d65", "detected_licenses": [ "MIT" ], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 193, "license_type": "permissive", "max_line_length": 66, "num_lines": 11, "path": "/pae/forcast/src/csv/CS_table_No1.py", "repo_name": "wasit7/book_pae", "src_encoding": "UTF-8", "text": "# -*- coding: utf-8 -*-\n\"\"\"\nCreated on Wed Sep 09 13:26:05 2015\n\n@author: Administrator\n\"\"\"\n\nimport pandas as pd\ndata = pd.read_csv('D:\\\\project\\\\forcast\\\\data\\\\CS_table_No1.csv')\n\nprint data\n\n" }, { "alpha_fraction": 0.6250756978988647, "alphanum_fraction": 0.6480920910835266, "avg_line_length": 27.877193450927734, "blob_id": "140db753e5372883a116bb1c63dd4ac432bd441e", "content_id": "8fbf6743f408a4c396182920eb2808d8eaf3a7ed", "detected_licenses": [ "MIT" ], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 1651, "license_type": "permissive", "max_line_length": 96, "num_lines": 57, "path": "/pae/forcast/src/convert_allsub_tojson.py", "repo_name": "wasit7/book_pae", "src_encoding": "UTF-8", "text": "# -*- coding: utf-8 -*-\n\"\"\"\nCreated on Fri Apr 22 23:20:29 2016\n\n@author: Methinee\n\"\"\"\nimport pandas as pd\nimport json\nfrom collections import defaultdict\ncountEachSubSort = 0\nkey_sub_sort = defaultdict(list)\n\nsubjects = []\ncountSub = 0\nnode = []\nlink= []\nout={}\nsources=[]\ntargets=[]\n\ndf_file = pd.read_csv('../data/df_dropSub_less20.csv',delimiter=\",\", skip_blank_lines = True, \n error_bad_lines=False,encoding='utf8')\nheaders=list(df_file.columns.values)\n\nfor sub in df_file['3COURSEID']:\n if sub not in subjects: \n subjects.append(sub)\n# print \"%s, index is %d\"%(sub,subjects.index(sub))\n countSub = countSub+1\n node.append({\"name\":sub})\nsubjects.remove('CS231')\nnode.remove({\"name\":'CS231'})\nsubjects.sort() \nnode.sort()\n \n# Find index of source and target from book/graph1.gv \ndf_st = pd.read_csv('../data/source-target_predict.csv',delimiter=\";\", skip_blank_lines = True, \n error_bad_lines=False)\nheaders_st=list(df_st.columns.values)\ndf_st = df_st.dropna()\n\nfor source in df_st[headers_st[0]]:\n #print \"source is %s, index is %d\"%(source,subjects_db.index(source))\n sources.append(subjects.index(source))\n \nfor target in df_st[headers_st[1]]:\n #print \"target is %s, index is %d\"%(target,subjects_db.index(target))\n targets.append(subjects.index(target))\n \nfor i in xrange(0,62): #In Bachelor has 70 links\n link.append({\"source\":sources[i],\"target\":targets[i],\"type\": \"licensing\"})\n \nout[\"node\"]=node\nout[\"link\"]=link\n\n#with open(\"subjects_111.json\",\"w\") as outfile:\n# json.dump(out,outfile,sort_keys=True, indent=4, separators=(',',': '))\n\n " }, { "alpha_fraction": 0.6421310901641846, "alphanum_fraction": 0.66316157579422, "avg_line_length": 32.16279220581055, "blob_id": "41734117052d9420e5e18b1701d816afcadfa899", "content_id": "134edd5b835bb5938a5ad49a852521215353ca0d", "detected_licenses": [ "MIT" ], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 2853, "license_type": "permissive", "max_line_length": 96, "num_lines": 86, "path": "/pae/forcast/src/tree_classify.py", "repo_name": "wasit7/book_pae", "src_encoding": "UTF-8", "text": "# -*- coding: utf-8 -*-\n\"\"\"\nCreated on Wed Apr 13 16:02:16 2016\n\n@author: Methinee\n\"\"\"\n\nimport pandas as pd\nimport numpy as np\nimport pickle\nimport xlwt\nimport matplotlib.pyplot as plt\nfrom sklearn.datasets import make_classification\nfrom sklearn.ensemble import RandomForestClassifier\nfrom sklearn.cross_validation import cross_val_score\nbook = xlwt.Workbook(encoding=\"utf-8\")\nsheet1 = book.add_sheet(\"Precission Without Merge\")\nsheet2 = book.add_sheet(\"Precission With Merge\")\n\n\n#----------------------Traning Data With Merging-----------------------\n#df_file = pd.read_csv('../data/df_more20.csv',delimiter=\",\", skip_blank_lines = True, \n# error_bad_lines=False)\ndf_file = pd.read_csv('../data/df_sub_more20_merge.csv',delimiter=\",\", skip_blank_lines = True, \n error_bad_lines=False)\n \ncount_courseId = df_file[\"3COURSEID\"].value_counts() \nmore20 = count_courseId\n\nheaders=list(df_file.columns.values)\nsubjects = {'courseId':[]}\ncountSub = 0\n#Create dictionary of list subjects\nfor sub in df_file[headers[1]]:\n if sub not in subjects['courseId']:\n subjects['courseId'].append(sub)\n countSub = countSub+1\n#Get subject that more 20 enrollment\ncount = 0\nsubjects[\"courseId\"].sort()\nprecision_rf={}\ndf_precision = more20.copy()\n\nsubject = 'CS213'\nprint subject \ndf_sub = df_file[df_file['3COURSEID'] == subject]\ndf_sub = df_sub.iloc[np.random.permutation(len(df_sub))]\ncount_enrollment = df_sub['3COURSEID'].value_counts()\n#print \"Number of %s enrollment: %s\"%(subject,count_enrollment)\n\nA = df_sub.as_matrix()\nX = A[:,6:209]\nX = X.astype(np.int64, copy=False)\ny = A[:,2]\ny = y.astype(np.int64, copy=False)\n\n#Training data\nforest = RandomForestClassifier(n_estimators=10, max_depth=None, \n min_samples_split=1, random_state=None, max_features=None)\nclf = forest.fit(X, y)\nscores = cross_val_score(clf, X, y, cv=5)\nprint scores\nprint \"Random Forest Cross Validation of %s: %s\"%(subject,scores.mean())\nprecision_rf[subject] = scores.mean()\ndf_precision.loc[subject]=precision_rf[subject]\nprint \"-----------------------------------\"\n\n#print all subjects\n#save trees to pickle file\nf = \"tree/tree%s.pic\"%subject\nwith open(f, 'wb') as pickleFile:\n pickle.dump(clf, pickleFile, pickle.HIGHEST_PROTOCOL)\n\n#///////////////////Classify with pickle without retrain(Model persistence) \nwith open('tree/treeCS213.pic', 'rb') as pickleFile:\n clf2 = pickle.load(pickleFile)\n \ndf_labelX = pd.read_csv('../data/test_labelX.csv',delimiter=\",\", skip_blank_lines = True, \n error_bad_lines=False)\n\nB = df_labelX.as_matrix()\nX = B[:,6:209] #get all subject without term,year,province,schGpa\nclf2.predict(X)\nGrade=['A', 'B', 'C' , 'D' , 'F' , 'W' , 'S' , 'U' ,'na']\ngrade_predicted = Grade[::-1][clf2.predict(X)]\nprint \"prediction of %s:\"%subject,grade_predicted\n\n" }, { "alpha_fraction": 0.530386745929718, "alphanum_fraction": 0.5782688856124878, "avg_line_length": 26.200000762939453, "blob_id": "55c8cf079a8f3d95ad2e7f4f607c145086291477", "content_id": "de3d1b395efdaddd1f74b27dc2f38f16e8ddf811", "detected_licenses": [ "MIT" ], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 543, "license_type": "permissive", "max_line_length": 97, "num_lines": 20, "path": "/pae/forcast/src/csv/CS_table_No2_No4_new_sqlite.py", "repo_name": "wasit7/book_pae", "src_encoding": "UTF-8", "text": "# -*- coding: utf-8 -*-\n\"\"\"\nCreated on Fri Nov 20 11:56:48 2015\n\n@author: Administrator\n\"\"\"\n\n\nimport pandas as pd\nimport pandas.io.sql as pd_sql\nimport sqlite3 as sql\n\ndf_file = pd.read_csv('../data/CS_table_No2_No4_new.csv',delimiter=\";\", skip_blank_lines = True, \n error_bad_lines=False,encoding='utf8')\n \n \ncon = sql.connect(\"project.sqlite\")\n#df = pd.DataFrame({'TestData': [1, 2, 3, 4, 5, 6, 7, 8, 9]}, dtype='float')\npd_sql.to_sql(df_file, \"registration\", con)\ncon.close()" }, { "alpha_fraction": 0.5799458026885986, "alphanum_fraction": 0.6178861856460571, "avg_line_length": 32.59090805053711, "blob_id": "828db2a3154304b87c8e4aa357cf3253cf8bac2c", "content_id": "31bcabdaf65c4400e5ae950f859c294a82e9dcdf", "detected_licenses": [ "MIT" ], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 738, "license_type": "permissive", "max_line_length": 122, "num_lines": 22, "path": "/book/django/project/add_subject.py", "repo_name": "wasit7/book_pae", "src_encoding": "UTF-8", "text": "# -*- coding: utf-8 -*-\n\"\"\"\nCreated on Fri Feb 19 16:55:54 2016\n\n@author: Administrator\n\"\"\"\n\nimport pandas as pd\nimport pandas.io.sql as pd_sql\nimport sqlite3 as sql\n\ndf_file = pd.read_csv('CS_table_No2_No4_new.csv',delimiter=\";\", skip_blank_lines = True, \n error_bad_lines=False,encoding='utf8')\ndf_file = df_file.drop(['STUDENTID','ACADYEAR','CAMPUSID','SEMESTER','CURRIC','CAMPUSNAME','SECTIONGROUP','GRADE'],axis=1)\n\ndf_dropDup = df_file.drop_duplicates(['sub_id'], take_last=True)\n \n \ncon = sql.connect(\"db.sqlite3\")\n#df = pd.DataFrame({'TestData': [1, 2, 3, 4, 5, 6, 7, 8, 9]}, dtype='float')\npd_sql.to_sql(df_dropDup, \"mywebpage_subject\", con, index=False)\ncon.close()" }, { "alpha_fraction": 0.5764074921607971, "alphanum_fraction": 0.5973190069198608, "avg_line_length": 26.397058486938477, "blob_id": "fc3cafc48dd4c72b68656ed452407cae0d6a5f4d", "content_id": "88583326473334eaad35fedd67120c38b4be71f4", "detected_licenses": [ "MIT" ], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 1865, "license_type": "permissive", "max_line_length": 92, "num_lines": 68, "path": "/pae/forcast/src/csv/CS_table_No2_No4.py", "repo_name": "wasit7/book_pae", "src_encoding": "UTF-8", "text": "# -*- coding: utf-8 -*-\n\"\"\"\nCreated on Wed Sep 09 14:51:02 2015\n\n@author: Methinee\n\"\"\"\n\nimport pandas as pd\nimport numpy as np\nfrom collections import defaultdict\nfrom astropy.table import Table, Column\n\ndf = pd.read_csv('../data/CS_table_No2_No4_new.csv',delimiter=\";\", skip_blank_lines = True, \n error_bad_lines=False)\n \nheaders=list(df.columns.values)\nsubjects = {'courseId':[]}\nstudents = {'studentId':[]}\nyears = [52,53,54,55,56]\nsemester = [1,2]\nkey_sub = defaultdict(list)\nkey_std = defaultdict(list)\nkey=[]\n\ncountSub = 0\ncountStd = 0\n\n#Create dictionary of list subjects\nfor sub in df[headers[4]]:\n if sub not in subjects['courseId']:\n subjects['courseId'].append(sub)\n countSub = countSub+1\n for keyCol in subjects['courseId']:\n key_sub[countSub] = keyCol \n#print subjects[\"courseId\"]\n#print \"number of subjects are \",countSub\nprint \"-----------------------------------------------\"\nprint key_sub\nprint \"-----------------------------------------------\"\n\n#Create dictionary of list students\nfor std in df[headers[0]]:\n if std not in students['studentId']:\n students['studentId'].append(std)\n countStd = countStd+1\n# for keyRow in students['studentId']:\n# for y in years: \n# students['studentId'].append(y)\n \n#print students['studentId']\n#print \"number of students are \",countStd \nprint \"-----------------------------------------------\"\n\n\n\n#create table row are stdId+years+semester, column is key of subjects\ncolumn = key_sub\nt = Table(column , names=(subjects['courseId']))\n\nfirstCol = students\nt = Table(firstCol, names=(firstCol))\nprint t\n \n \n\"\"\"table_No2_No4_out = pd.DataFrame(subjects) \nwriter = pd.ExcelWriter(\"table_No2_No4_fomat.xlsx\")\ntable_No2_No4_out.to_excel(writer,\"grade\")\nwriter.save()\"\"\"\n\n\n" }, { "alpha_fraction": 0.5520535111427307, "alphanum_fraction": 0.5787965655326843, "avg_line_length": 27.324323654174805, "blob_id": "3636653427619e8d780c314c12b428c9faf13325", "content_id": "64d12b2eb8ff0582fabd58f9acff85850848a5d5", "detected_licenses": [ "MIT" ], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 1047, "license_type": "permissive", "max_line_length": 97, "num_lines": 37, "path": "/pae/forcast/src/sqlite/student_sqlite.py", "repo_name": "wasit7/book_pae", "src_encoding": "UTF-8", "text": "# -*- coding: utf-8 -*-\n\"\"\"\nCreated on Thu Feb 18 22:32:19 2016\n\n@author: Administrator\n\"\"\"\n\n#import sqlite3\n#\n#conn = sqlite3.connect('student.sqlite')\n#print \"Opened database successfully\";\n#\n#conn.execute('''CREATE TABLE Student\n# (Sub_id NOT NULL,\n# Sub_name TEXT NOT NULL,\n# description TEXT,\n# credit INT);''')\n#print \"Table created successfully\";\n#\n#conn.close()\n\nimport pandas as pd\nimport pandas.io.sql as pd_sql\nimport sqlite3 as sql\n\n\ndf_file = pd.read_csv('../data/CS_table_No2_No4_new.csv',delimiter=\";\", skip_blank_lines = True, \n error_bad_lines=False,encoding='utf8')\n \ndf = {'Sub_id':df_file['COURSEID'],'Sub_name':df_file['COURSENAME'],'credit':df_file['CREDIT']}\n \ndf_a = pd.DataFrame(df)\n \ncon = sql.connect(\"student.sqlite\")\n#df = pd.DataFrame({'TestData': [1, 2, 3, 4, 5, 6, 7, 8, 9]}, dtype='float')\npd_sql.write_frame(df_a, name='Student',if_exists=\"append\", con=con)\ncon.close()" }, { "alpha_fraction": 0.546875, "alphanum_fraction": 0.5803571343421936, "avg_line_length": 20.380952835083008, "blob_id": "10e57baa7ffb072a7249e286ffc3691955e3d761", "content_id": "554c41749739218748405c82e7f8d155e6e43ee5", "detected_licenses": [ "MIT" ], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 448, "license_type": "permissive", "max_line_length": 44, "num_lines": 21, "path": "/pae/forcast/src/sqlite/testCreate.py", "repo_name": "wasit7/book_pae", "src_encoding": "UTF-8", "text": "# -*- coding: utf-8 -*-\n\"\"\"\nCreated on Thu Feb 18 22:16:43 2016\n\n@author: Administrator\n\"\"\"\n\nimport sqlite3\n\nconn = sqlite3.connect('enrollment.sqlite')\nprint \"Opened database successfully\";\n\nconn.execute('''CREATE TABLE Enrollment\n (std_id INT PRIMARY KEY NOT NULL,\n sub_id TEXT ,\n grade INT ,\n term INT,\n year INT);''')\nprint \"Table created successfully\";\n\nconn.close()" }, { "alpha_fraction": 0.5235361456871033, "alphanum_fraction": 0.5235361456871033, "avg_line_length": 58.7931022644043, "blob_id": "06c731b03232889e1e8011dccccd0251ab1e229f", "content_id": "099cc308df266de13cc31291908bfe69320c121a", "detected_licenses": [ "MIT" ], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 1742, "license_type": "permissive", "max_line_length": 144, "num_lines": 29, "path": "/book/django/webapp/login/views.py", "repo_name": "wasit7/book_pae", "src_encoding": "UTF-8", "text": "\nfrom django.shortcuts import render_to_response\nfrom django.contrib.auth import authenticate, login\nfrom django.core.context_processors import csrf\nfrom django.template import RequestContext\n\n\ndef login_user(request):\n state = \"Please log in below...\"\n username = password = '' #set username&password is empty string\n if request.POST: #request method POST after click submit btn \n username = request.POST.get('username')\n password = request.POST.get('password')\n\n user = authenticate(username=username, password=password) #authen with DB\n \n if user is not None: #find user\n if user.is_active:\n login(request, user)\n state = \"You're successfully logged in!\" #user & pass correct\n else:\n state = \"Your account is not active, please contact the site admin.\" #maybe username expired\n else:\n state = \"Your username and/or password were incorrect.\" #user & pass incorrect\n\n csrfContext = RequestContext(request) \n #return render_to_response('login.html',{'state':state, 'username': username}, csrfContext) #called html file and sent state&username value\n #sent csrfContext\n #return render_to_response('index.html')\n return render_to_response('login.html',{'state':state, 'username': username}, csrfContext)\n\n\n\n " }, { "alpha_fraction": 0.5171717405319214, "alphanum_fraction": 0.5717171430587769, "avg_line_length": 26.38888931274414, "blob_id": "4a329ae34e3c0f05d8d18f15f632e7ec34d38572", "content_id": "d2dfaf63dff892034cf3f7adbf7524c59556a200", "detected_licenses": [ "MIT" ], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 990, "license_type": "permissive", "max_line_length": 106, "num_lines": 36, "path": "/pae/forcast/src/sqlite/test_filter213.py", "repo_name": "wasit7/book_pae", "src_encoding": "UTF-8", "text": "# -*- coding: utf-8 -*-\n\"\"\"\nCreated on Thu Feb 11 15:00:30 2016\n\n@author: Methinee\n\"\"\"\n\n\nimport pandas as pd\nimport numpy as np\nimport pickle\n\ndf_file = pd.read_excel('../src/transform.xlsx')\nheaders=list(df_file.columns.values)\n\ndf_file = df_file.fillna(0)\ndf_file = df_file.replace(['A', 'B+', 'B', 'C+', 'C' , 'D+' , 'D' , 'F' , 'W' , 'S' , 'S#' , 'U' , 'U#'], \n [13, 12, 11, 10 , 9, 8, 7, 6, 5, 4, 3, 2, 1])\n \nA = df_file.as_matrix()\ntemp = A[A[:,3]=='CS213']\nL = temp[:,4]\nL = L.astype(np.int64, copy=False)\nI = temp[:,5:]#L.shape\nI = I.astype(np.int64, copy=False)\n\n#S = df_file.as_matrix(columns=['4RESULT'])\n#I = S[A[:,3]=='CS213']\n#T = df_file.as_matrix(columns=['3COURSEID','4RESULT']\n\nwith open('train/dataset00.pic', 'wb') as pickleFile:\n #write label and feature vector\n theta_dim=1\n clmax = 14\n theta_range = I.shape[1]\n pickle.dump((clmax,theta_dim,theta_range,len(L),L,I,None), pickleFile, pickle.HIGHEST_PROTOCOL)\n " }, { "alpha_fraction": 0.5400843620300293, "alphanum_fraction": 0.5766525864601135, "avg_line_length": 29.95652198791504, "blob_id": "e5da8feda70c92f51c1eb247c65eb33049295a4f", "content_id": "72d5932440e19e593b5c93850781025864f194f3", "detected_licenses": [ "MIT" ], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 711, "license_type": "permissive", "max_line_length": 97, "num_lines": 23, "path": "/pae/forcast/src/create_sub_dataframe.py", "repo_name": "wasit7/book_pae", "src_encoding": "UTF-8", "text": "# -*- coding: utf-8 -*-\n\"\"\"\nCreated on Fri Feb 19 17:38:25 2016\n\n@author: Administrator\n\"\"\"\n\nimport pandas as pd\nimport pandas.io.sql as pd_sql\nimport sqlite3 as sql\n\ndf_file = pd.read_csv('../data/CS_table_No2_No4_new.csv',delimiter=\";\", skip_blank_lines = True, \n error_bad_lines=False,encoding='utf8')\n \ndf = {'Sub_id':df_file['COURSEID'],'Sub_name':df_file['COURSENAME'],\n 'credit':df_file['CREDIT']}\n \ndf_a = pd.DataFrame(df)\n \ncon = sql.connect(\"subject.sqlite\")\n#df = pd.DataFrame({'TestData': [1, 2, 3, 4, 5, 6, 7, 8, 9]}, dtype='float')\npd_sql.write_frame(df_a, name='Subject',if_exists=\"append\", con=con)\ncon.close()" }, { "alpha_fraction": 0.7368420958518982, "alphanum_fraction": 0.7368420958518982, "avg_line_length": 9, "blob_id": "1d03302b02692abcf9a47b7191b38c4bc80829d7", "content_id": "e9d77e2e084b4fe0771eb69f0c0e6d005b8efc16", "detected_licenses": [ "MIT" ], "is_generated": false, "is_vendor": false, "language": "Markdown", "length_bytes": 19, "license_type": "permissive", "max_line_length": 10, "num_lines": 2, "path": "/README.md", "repo_name": "wasit7/book_pae", "src_encoding": "UTF-8", "text": "# book_pae\nHello me" }, { "alpha_fraction": 0.5351681709289551, "alphanum_fraction": 0.5932721495628357, "avg_line_length": 20, "blob_id": "c42683380db96755a8e830bf223c0468e183b68c", "content_id": "717a3668c842ea6e919c0298983583b74ef751e5", "detected_licenses": [ "MIT" ], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 327, "license_type": "permissive", "max_line_length": 80, "num_lines": 15, "path": "/pae/forcast/src/dataset_train_cs213.py", "repo_name": "wasit7/book_pae", "src_encoding": "UTF-8", "text": "# -*- coding: utf-8 -*-\n\"\"\"\nCreated on Tue Mar 29 22:44:14 2016\n\n@author: Methinee\n\"\"\"\nimport pandas as pd\nimport numpy as np\nimport pickle\n\nfor i in range(0,5):\n f = open(\"train/dataset%02d.pic\"%(i), 'rb') # 'rb' for reading binary file\n mydict = \"mydict%02d\"%(i)\n mydict = pickle.load(f)\n f.close() " }, { "alpha_fraction": 0.5854922533035278, "alphanum_fraction": 0.6580311059951782, "avg_line_length": 16.636363983154297, "blob_id": "bfb07b26e18d0f0dd24a93a29617fc497f8f08a4", "content_id": "483ab0fb6142733e48d60b14221a5ad55e1bf999", "detected_licenses": [ "MIT" ], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 193, "license_type": "permissive", "max_line_length": 68, "num_lines": 11, "path": "/pae/forcast/src/csv/CS_table_No5_A.py", "repo_name": "wasit7/book_pae", "src_encoding": "UTF-8", "text": "# -*- coding: utf-8 -*-\n\"\"\"\nCreated on Wed Sep 09 15:12:07 2015\n\n@author: Administrator\n\"\"\"\n\nimport pandas as pd\ndata = pd.read_csv('D:\\\\project\\\\forcast\\\\data\\\\CS_table_No5_A.csv')\n\nprint data" }, { "alpha_fraction": 0.5602836608886719, "alphanum_fraction": 0.588652491569519, "avg_line_length": 28.939393997192383, "blob_id": "4e2b9c49ab690246388fdb12f20c777732597107", "content_id": "c192942b8e8554952450ad9201ce98075bb6734e", "detected_licenses": [ "MIT" ], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 987, "license_type": "permissive", "max_line_length": 97, "num_lines": 33, "path": "/pae/final_code/src/create_sub_dataframe_uptodate.py", "repo_name": "wasit7/book_pae", "src_encoding": "UTF-8", "text": "# -*- coding: utf-8 -*-\n\"\"\"\nCreated on Fri Feb 19 22:12:25 2016\n\n@author: Methinee\n\"\"\"\n\nimport pandas as pd\nimport numpy as np\nimport pandas.io.sql as pd_sql\nimport sqlite3 as sql\n\n\ndf_file = pd.read_csv('../data/CS_table_No2_No4_new.csv',delimiter=\";\", skip_blank_lines = True, \n error_bad_lines=False,encoding='utf8')\n \n \ndf = {'sub_id':df_file['COURSEID'],'sub_name':df_file['COURSENAME'],\n 'credit':df_file['CREDIT']}\n \ndf = pd.DataFrame(df) \ndf_a = df.drop_duplicates('sub_id')\ndf_a = df_a[df_a['credit'] != 0]\n \nwriter = pd.ExcelWriter(\"create_uptodate.xlsx\")\npd.DataFrame(df_a).to_excel(writer,\"grade\")\nwriter.save() \n \n \ncon = sql.connect(\"D:/project/GitHub/book_pae/book/django/project/db.sqlite3\")\n#df = pd.DataFrame({'TestData': [1, 2, 3, 4, 5, 6, 7, 8, 9]}, dtype='float')\npd_sql.write_frame(df_a, name='mywebpage_subject',if_exists=\"append\", con=con)\ncon.close()" }, { "alpha_fraction": 0.6583514213562012, "alphanum_fraction": 0.6800433993339539, "avg_line_length": 30.827587127685547, "blob_id": "e39a08afe234ca051c24d74f9cd40284ce0b36e0", "content_id": "575f9f16403f5a361fcbffc8bc036a2c18601feb", "detected_licenses": [ "MIT" ], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 922, "license_type": "permissive", "max_line_length": 118, "num_lines": 29, "path": "/pae/forcast/src/sqlite/test_sqlite.py", "repo_name": "wasit7/book_pae", "src_encoding": "UTF-8", "text": "# -*- coding: utf-8 -*-\n\"\"\"\nCreated on Sun Nov 15 14:32:26 2015\n\n@author: Administrator\n\"\"\"\n\nimport csv, sqlite3\nimport pandas as pd\n\n#Create table and import csv file in sqlite3 database\n#con = sqlite3.connect(\"../sqlite/test\")\n#cur = con.cursor()\n#cur.execute(\"CREATE TABLE 'testSqlite1' ('policyID', 'statecode', 'county', 'eq_site_limit');\")\n#with open('../sqlite/test.csv','rb') as fin:\n# dr = csv.DictReader(fin) # comma is default delimiter\n# to_db = [(i['policyID'], i['statecode'], i['county'], i['eq_site_limit']) for i in dr]\n#cur.executemany(\"INSERT INTO 'testSqlite1' (policyID, statecode, county, eq_site_limit) VALUES (?, ?, ?, ?);\", to_db)\n#con.commit()\n\n\n\n# Read sqlite query results into a pandas DataFrame\ncon = sqlite3.connect(\"../sqlite/test\")\ndf = pd.read_sql_query(\"SELECT eq_site_limit from testSqlite1\", con)\n# verify that result of SQL query is stored in the dataframe\nprint df\n\ncon.close()" }, { "alpha_fraction": 0.5846154093742371, "alphanum_fraction": 0.656410276889801, "avg_line_length": 16.81818199157715, "blob_id": "b5c7d13d450b2b642d26f5f3f073f08b0e8fa5b8", "content_id": "8b37a85f0e33ca8294f024117698d2b43f3d0569", "detected_licenses": [ "MIT" ], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 195, "license_type": "permissive", "max_line_length": 70, "num_lines": 11, "path": "/pae/forcast/src/csv/CS_table_No5_B_C.py", "repo_name": "wasit7/book_pae", "src_encoding": "UTF-8", "text": "# -*- coding: utf-8 -*-\n\"\"\"\nCreated on Wed Sep 09 15:24:34 2015\n\n@author: Administrator\n\"\"\"\n\nimport pandas as pd\ndata = pd.read_csv('D:\\\\project\\\\forcast\\\\data\\\\CS_table_No5_B_C.csv')\n\nprint data" }, { "alpha_fraction": 0.36072424054145813, "alphanum_fraction": 0.42513927817344666, "avg_line_length": 36.31168746948242, "blob_id": "a5e147a3e8dd01eaffeb7fa67eeb79d1d8aea5f3", "content_id": "32268a9e9c874bf26f544d62b77dea1fc4bead2b", "detected_licenses": [ "MIT" ], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 2872, "license_type": "permissive", "max_line_length": 84, "num_lines": 77, "path": "/book/graphviz/json_dict.py", "repo_name": "wasit7/book_pae", "src_encoding": "UTF-8", "text": "# -*- coding: utf-8 -*-\n\"\"\"\nCreated on Wed Jan 20 11:04:32 2016\n\n@author: BOOK\n\"\"\"\nimport json\nout={}\nnode=[]\nedges=[]\nwith open(\"graph1.plain-ext\") as f:\n content = f.readlines()\nfor n in content[1:]:\n line = n.split()\n \n if line[0] == \"node\":\n #print line[1:4]\n node.append({\"COURSE_ID\":line[1],\"Y\":line[2],\"X\":line[3]})\n elif line[0] == \"edge\":\n linepath = []\n for i in xrange(int(line[3])):\n x = line[4+(i*2)]\n y = line[5+(i*2)]\n mydict = {\"x\":x,\"y\":y}\n linepath.append(mydict)\n edges.append(linepath) \nout[\"node\"]=node\nout[\"edges\"]=edges \n #co_x = {\"x\":x}\n #co_y = {\"y\":y}\n #print co_path\n #out.append({\"head\":line[1],\"tail\":line[2],\"x\":co_x,\"y\":co_y})\n #out.append({\"x\":x,\"y\":y})\n #out.append({\"x\":x ,\"y\":y})\n #count = line[3] #count is number of control point\n #print line\n #for n in count:\n #out.append({\"x\":line[4],\"y\":line[5]})\n #out.append({\"head\":line[1],\"tail\":line[2],\"x\":line[4],\"y\":line[5]})\n #if line[3] == \"4\":\n #for n in line[4:12]:\n #intCo = [map(int, n) for n in line[4]]\n #intCo = float(line[4])\n #n = range(line[4],line[12],2)\n #n = line[4:12]\n #coordinate = [line]\n #out.append({\"x\"'{0}'.format(line)})\n \n #print 'x{0} y{1}'.format(line[0:2])\n #elif line[3] == \"7\":\n #l = line[4:18]\n #print l\n #elif line[3] == \"10\":\n #l = line[4:24]\n #print l\n #out.append({\"count\":line[3],\"x\":line[4:12]})\n #for n in line[4:12]:\n #out.append({\"x\":line[4],\"y\":line[5]})\n \n #if line[3] == \"4\":\n #out.append({\"x\":line[4],\"y\":line[5],\"x\":line[6],\"y\":line[7],\n # \"x\":line[8],\"y\":line[9],\"x\":line[10],\"y\":line[11]})\n #elif line[3] == \"7\":\n #out.append({\"x1\":line[4],\"y1\":line[5],\"x2\":line[6],\"y2\":line[7],\n # \"x3\":line[8],\"y3\":line[9],\"x4\":line[10],\"y4\":line[11],\n # \"x5\":line[12],\"y5\":line[13],\"x6\":line[14],\"y6\":line[15],\n # \"x7\":line[16],\"y7\":line[17]})\n #elif line[3] == \"10\":\n #out.append({\"x1\":line[4],\"y1\":line[5],\"x2\":line[6],\"y2\":line[7],\n # \"x3\":line[8],\"y3\":line[9],\"x4\":line[10],\"y4\":line[11],\n # \"x5\":line[12],\"y5\":line[13],\"x6\":line[14],\"y6\":line[15],\n # \"x7\":line[16],\"y7\":line[17],\"x8\":line[18],\"y8\":line[19],\n # \"x9\":line[20],\"y9\":line[21],\"x10\":line[22],\"y10\":line[23]})\nprint out\n\nwith open(\"test.json\",\"w\") as outfile:\n json.dump(out,outfile,sort_keys=True, indent=4, separators=(',',': '))" }, { "alpha_fraction": 0.6178033947944641, "alphanum_fraction": 0.641718327999115, "avg_line_length": 25.541175842285156, "blob_id": "921ecc764785a83c5bb2b01bab4881006d0aaa5f", "content_id": "6a60f87e5192835c35381b70bd08dd64180b101d", "detected_licenses": [ "MIT" ], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 2258, "license_type": "permissive", "max_line_length": 97, "num_lines": 85, "path": "/pae/forcast/src/csv/CS_table_No2_No4_new.py", "repo_name": "wasit7/book_pae", "src_encoding": "UTF-8", "text": "# -*- coding: utf-8 -*-\n\"\"\"\nCreated on Sat Nov 14 16:06:17 2015\n\n@author: Methinee\n\"\"\"\n\nimport pandas as pd\nimport numpy as np\nfrom collections import defaultdict\n\ndf_file = pd.read_csv('../data/CS_table_No2_No4_new.csv',delimiter=\";\", skip_blank_lines = True, \n error_bad_lines=False)\n \nheaders=list(df_file.columns.values)\nsubjects = {'courseId':[]}\nstudents = {'0studentId':[]}\nyears = [52,53,54,55,56]\nterm = [1,2]\nkey_sub = defaultdict(list)\nkey_std = defaultdict(list)\nkey=[]\n\ncountSub = 0\ncountStd = 0\ncountEachSub = 0\ncountEachStd = 0\n\n#Create dictionary of list subjects\nfor sub in df_file[headers[4]]:\n if sub not in subjects['courseId']:\n subjects['courseId'].append(sub)\n countSub = countSub+1\n#print subjects[\"courseId\"]\n#print \"number of subjects are \",countSub\nprint \"-----------------------------------------------\"\n#print key_sub\nprint \"-----------------------------------------------\"\n\n#Create dictionary of list students\nfor std in df_file[headers[0]]:\n if std not in students['0studentId']:\n students['0studentId'].append(std)\n countStd = countStd+1\n\n#print students['0studentId']\n#print \"number of students are \",countStd \nprint \"-----------------------------------------------\"\n\nsubjects['courseId'].sort()\n\n#Loop for giving all subject in dictionary len is num of students\nfor eachSub in subjects[\"courseId\"]:\n #print eachSub\n countEachSub = countEachSub+1\n key_sub[countEachSub] = eachSub\n#print key_sub\n \nfor eachStd in students['0studentId']:\n #print eachSub\n countEachStd = countEachStd+1\n key_std[countEachStd] = eachStd\n\n#Create column with all subjects\ni = 1\nfor i in subjects[\"courseId\"]:\n #print i\n students[i] = np.empty(len(students['0studentId']))\n students[i][:]=np.NAN\n\nstudents['year'] = np.empty(len(students['0studentId']))\nstudents['term'] = np.empty(len(students['0studentId']))\nstudents['1CourseId'] = np.empty(len(students['0studentId']))\ndf_students = pd.DataFrame(students)\n\n\n#Add grade into column subject\nfor record in df_file.values:\n student_grade = record[10]\nprint df_file[headers[10]]\n\n\nwriter = pd.ExcelWriter(\"table_No2_No4_new.xlsx\")\npd.DataFrame(students).to_excel(writer,\"grade\")\nwriter.save()\n\n\n" }, { "alpha_fraction": 0.7142857313156128, "alphanum_fraction": 0.7142857313156128, "avg_line_length": 49.894737243652344, "blob_id": "3ac6d4b33cf71a231099c24dfb36b7daa6a89334", "content_id": "aad04541a2cf2b24546041d95dbfcc477ea4574a", "detected_licenses": [ "MIT" ], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 966, "license_type": "permissive", "max_line_length": 83, "num_lines": 19, "path": "/book/django/project/mywebpage/urls.py", "repo_name": "wasit7/book_pae", "src_encoding": "UTF-8", "text": "from django.conf.urls import url\nfrom . import views\n\nurlpatterns = [\n\turl(r'^showprofile/$', views.showprofile, name='showprofile'),\n\turl(r'^home/$', views.home, name='home'),\n\turl(r'^homegraph/$', views.homegraph, name='homegraph'),\n\turl(r'^addprofile/$', views.addprofile, name='addprofile'),\n\turl(r'^predict/$', views.predict, name='predict'),\n\turl(r'^userprofile/$', views.userprofile, name='userprofile'),\n\turl(r'^jsonSubject.json$', views.jsonSubject, name='jsonSubject'), #get data\n\turl(r'^jsonEnrollment.json$', views.jsonEnrollment, name='jsonEnrollment'),\n\turl(r'^jsonStudent.json$', views.jsonStudent, name='jsonStudent'),\n\turl(r'^jsonProvience.json$', views.jsonProvience, name='jsonProvience'),\n\turl(r'^coordinate_predict$', views.coordinate_predict, name='coordinate_predict'),\n\turl(r'^coordinate_home$', views.coordinate_home, name='coordinate_home'),\n\turl(r'^test$', views.test, name='test'),\n\turl(r'^testcoor$', views.testcoor, name='testcoor'),\n]" }, { "alpha_fraction": 0.46773120760917664, "alphanum_fraction": 0.528276801109314, "avg_line_length": 26.814815521240234, "blob_id": "003f1645befd77e8a526b66b81db1b8c2153344a", "content_id": "41a794e16ff7b42e46a0594fba4bfd4b47922c4d", "detected_licenses": [ "MIT" ], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 1503, "license_type": "permissive", "max_line_length": 106, "num_lines": 54, "path": "/pae/final_code/src/dataset213.py", "repo_name": "wasit7/book_pae", "src_encoding": "UTF-8", "text": "# -*- coding: utf-8 -*-\n\"\"\"\nCreated on Thu Feb 25 23:15:03 2016\n\n@author: Methinee\n\"\"\"\n\nimport pandas as pd\nimport numpy as np\nimport pickle\nfrom collections import defaultdict\n\ndf_file = pd.read_excel('../src/transform.xlsx')\nheaders=list(df_file.columns.values)\n\ndf_file = df_file.fillna(0)\ndf_file = df_file.replace(['A', 'B+', 'B', 'C+', 'C' , 'D+' , 'D' , 'F' , 'W' , 'S' , 'S#' , 'U' , 'U#'], \n [13, 12, 11, 10 , 9, 8, 7, 6, 5, 4, 3, 2, 1])\n \n#filter just cs213 in column #3\ncs213 = df_file.loc[df_file['3COURSEID'] == 'CS213']\ncount = len(cs213)\n\n#select row from 20% \na = 0.2*count\na = int(a)\na = a\n\ndataset = defaultdict(list) \ndf_matrix = df_file.as_matrix()\ncs213 = df_matrix[df_matrix[:,3]=='CS213']\nfor i in range(0,5):\n dataset_213 = cs213[a*i:a*(i+1),:]\n dataset[i] = dataset_213 \n print \"dataset0%d\"%(i)\n print dataset[i]\n #print \"dataset_213\",dataset_213\n L = \"L%02d\"%(i)\n I = \"I%02d\"%(i)\n L = dataset[i][:,4]\n L = L.astype(np.int64, copy=False)\n I = dataset[i][:,5:]#L.shape\n I = I.astype(np.int64, copy=False)\n\n #save pickle file\n f = \"train/dataset%02d.pic\"%(i)\n print f\n with open(f, 'wb') as pickleFile:\n theta_dim=1\n clmax = 14\n theta_range = I.shape[1]\n pickle.dump((clmax,theta_dim,theta_range,len(L),L,I,None), pickleFile, pickle.HIGHEST_PROTOCOL) \n \n print \"-------------------------------------------------------\"\n\n" } ]
52
jgontrum/histogramPostprocessor
https://github.com/jgontrum/histogramPostprocessor
8ecd21734064e62f4c41173208bf0255c9e49bc2
a4a39e23a32354ef557a6e23b14d064f4f2ccced
e8c5b720e05e0eb046d239f6b0e284831b6c70c6
refs/heads/master
2021-01-20T00:56:41.813034
2015-01-04T16:53:31
2015-01-04T16:53:31
28,779,924
0
0
null
null
null
null
null
[ { "alpha_fraction": 0.6995223760604858, "alphanum_fraction": 0.7134172916412354, "avg_line_length": 31, "blob_id": "7abe8c7bb2a3990aba332119c318dae7581af78f", "content_id": "0ff1d88ed80b82dadf313d8a162cbdcbf33b5992", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 2303, "license_type": "no_license", "max_line_length": 132, "num_lines": 72, "path": "/Datatools.py", "repo_name": "jgontrum/histogramPostprocessor", "src_encoding": "UTF-8", "text": "# -*- coding: utf-8 -*-\n# !/usr/bin/env python\n__author__ = 'Johannes Gontrum <[email protected]>'\n\nfrom operator import itemgetter\nimport IntervalFinder\nimport pytz\nfrom datetime import datetime,timedelta\nimport calendar\n\ndeTimezone = pytz.timezone(\"Europe/Berlin\")\n\n\"\"\"\nLet list1 and list2 be two lists of tuples. This reduce function\nwill merge the list by summing up the second values of the tuples.\n\"\"\"\ndef reduceLists(list1, list2):\n returnList = list()\n mergedList = sorted(list1 + list2, key=itemgetter(0))\n\n index = 0\n\n while index < len(mergedList):\n endIndex = IntervalFinder.getEndOfCurrentInterval(mergedList, index)\n\n sum = 0\n key = mergedList[index][0]\n for i in range(index, endIndex):\n sum += mergedList[i][1]\n\n returnList.append((key, sum))\n\n index = endIndex\n\n return returnList\n\ndef sumTupleList(tupleList):\n ret = 0\n for key, value in tupleList:\n ret += value\n return value\n\n\"\"\"\nTakes the UNIX timestamp of the current day and returns\nthe timestamp of the next day at 0:00 o'clock.\n\"\"\"\ndef getNextDayTimestamp(currentday):\n dt = getGermanTimestamp(currentday)\n midnight = dt.replace(hour=0, minute=0, second=0, microsecond=0)\n tomorrow = midnight + timedelta(days=1)\n # Convert it to a timestamp and back again to avoid problems with DST\n ts = calendar.timegm(tomorrow.timetuple())\n nextday = getGermanTimestamp(ts).replace(hour=0)\n return calendar.timegm(nextday.utctimetuple())\n\n\n\"\"\"\nLike getNextDayTimestamp, but returns the timestamp of\nthe first day of the next month at 0:00 o'clock.\n\"\"\"\ndef getNextMonthTimestamp(currentmonth):\n dt = getGermanTimestamp(currentmonth)\n midnight = dt.replace(hour=0, minute=0, second=0, microsecond=0)\n nextday = midnight.replace(day=calendar.monthrange(midnight.year, midnight.month)[1]).astimezone(deTimezone) + timedelta(days=1)\n # Convert it to a timestamp and back again to avoid problems with DST\n ts = calendar.timegm(nextday.timetuple())\n nextday = getGermanTimestamp(ts).replace(hour=0)\n return calendar.timegm(nextday.utctimetuple())\n\n\"\"\" Returns a datetime object fromt a UNIX timestamp that is interpreted in the right timezone \"\"\"\ndef getGermanTimestamp(timestamp):\n return datetime.fromtimestamp(timestamp, tz=deTimezone)" }, { "alpha_fraction": 0.5271317958831787, "alphanum_fraction": 0.5348837375640869, "avg_line_length": 28.788461685180664, "blob_id": "305955eacea48c1700b4d7ae2dd22fdd3b148f25", "content_id": "d37990b506e50dbfc5261ea1217fd705b7033cf8", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 1548, "license_type": "no_license", "max_line_length": 140, "num_lines": 52, "path": "/LaTeXOutput.py", "repo_name": "jgontrum/histogramPostprocessor", "src_encoding": "UTF-8", "text": "# -*- coding: utf-8 -*-\n# !/usr/bin/env python\n__author__ = 'Johannes Gontrum <[email protected]>'\n\nimport Datatools\n\ndef saveMonthlyHistogram(data, caption, file):\n __saveHistogram(data,caption,file, \"%F\")\n\ndef saveDaylyHistogram (data, caption, file):\n __saveHistogram(data, caption, file, \"%F\")\n\ndef __saveHistogram(data, caption, file, xcaption):\n outfile = open(file, 'w')\n\n static = \"\"\"\\\\documentclass[11pt]{{article}}\n\\\\usepackage[T1]{{fontenc}}\n\\\\usepackage{{pgfplots}}\n\\\\usetikzlibrary{{pgfplots.dateplot}}\n\\\\usepgfplotslibrary{{dateplot}}\n\\\\pgfplotsset{{compat=newest}}\n\n\\\\begin{{document}}\n \\\\begin{{figure}}\n \\\\begin{{center}}\n \\\\begin{{tikzpicture}}\n \\\\begin{{axis}}[\n width=0.9\\\\columnwidth,\n height=0.3\\\\textheight,\n date coordinates in=x,\n xticklabel style= {{rotate=90,anchor=near xticklabel}},\n xticklabel=\\\\day.\\\\month.\\\\year,\n scaled x ticks=true\n ]\n\n \\\\addplot[ybar] coordinates {{\n{0}\n }};\n \\\\end{{axis}}\n \\\\end{{tikzpicture}}\n \\\\end{{center}}\n \\\\caption{{{1}}}\n \\\\end{{figure}}\n\\\\end{{document}}\"\"\"\n\n\n # Data\n formatedData = \"\"\n for month, value in data:\n formatedData += \" (\" + str(Datatools.getGermanTimestamp(month).strftime(xcaption)) + \",\" + str(value) + \")\\n\"\n # Last static part\n outfile.write(static.format(formatedData, caption))" }, { "alpha_fraction": 0.5784726142883301, "alphanum_fraction": 0.5805772542953491, "avg_line_length": 33.288658142089844, "blob_id": "aa4326854ebfc7f67a1c786d1489ee52be482652", "content_id": "b4b25e39184927242631b6bff5bdb89b091f5481", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 3326, "license_type": "no_license", "max_line_length": 115, "num_lines": 97, "path": "/SignatureReader.py", "repo_name": "jgontrum/histogramPostprocessor", "src_encoding": "UTF-8", "text": "# -*- coding: utf-8 -*-\n# !/usr/bin/env python\n__author__ = 'Johannes Gontrum <[email protected]>'\n\nclass Signature(object):\n def __init__(self):\n \"\"\"\n Maps IDs to Objects and Objects to IDs.\n :return:\n \"\"\"\n self.idToObject = {}\n self.objectToID = {}\n self.counter = 0\n\n def fromFile(self, filename):\n for line in open(filename, 'r'):\n id, keyword = line.split(',')\n keyword = keyword.rstrip() # remove linebreak\n self.idToObject[int(id)] = keyword\n self.objectToID[keyword] = int(id)\n self.counter = max(self.counter, int(id))\n\n def addObject(self, object):\n id = self.resolveObject(object)\n if id == None:\n self.idToObject[self.counter] = object\n self.objectToID[object] = self.counter\n self.counter += 1\n return self.counter - 1\n else:\n return id\n\n def resolveID(self, id):\n if id in self.idToObject:\n return self.idToObject[id]\n else:\n return None\n\n def resolveObject(self, object):\n if object in self.objectToID:\n return self.objectToID[object]\n else:\n return None\n\n def __str__ (self):\n ret = \"\"\n for i in range(self.counter):\n if i in self.idToObject:\n ret += str(i) + \"\\t <-> \" + self.resolveID(i) + \"\\n\"\n return ret[:-1]\n\nclass KeywordHierarchy(object):\n def __init__(self, filename, keywordSignature):\n \"\"\"\n Displays the hierarchy of the keywords, as described in BMBF_Arbeitspapier_Keywords_Energiewende.pdf, pp.70\n :param filename:\n :return:\n \"\"\"\n self.dimensionSignature = Signature()\n self.subdimensionSignature = Signature()\n\n self.dimensions = {} # Maps a dimension to a list of subdimensions\n self.subdimensions = {} # Maps a subdimension to a list of keyword IDs\n\n for line in open(filename, 'r'):\n dimension, subdimension, keyword = line.split(',')\n\n dimensionID = self.dimensionSignature.addObject(dimension)\n subdimensionID = self.subdimensionSignature.addObject(subdimension)\n keyword = keyword.rstrip() # remove linebreak\n\n # Subdimension -> Keyword\n if subdimensionID in self.subdimensions:\n self.subdimensions[subdimensionID].add(keywordSignature.resolveObject(keyword))\n else:\n self.subdimensions[subdimensionID] = set([keywordSignature.resolveObject(keyword)])\n\n # Dimension -> Subdimensions\n if dimensionID in self.dimensions:\n self.dimensions[dimensionID].add(subdimensionID)\n else:\n self.dimensions[dimensionID] = set([subdimensionID])\n\n def getKeywordsForDimension(self, dimensionID):\n if dimensionID in self.dimensions:\n ret = []\n for subdimensionID in self.dimensions[dimensionID]:\n ret += self.getKeywordsForSubdimension(subdimensionID)\n return ret\n else:\n return []\n\n def getKeywordsForSubdimension(self, subdimensionID):\n if subdimensionID in self.subdimensions:\n return self.subdimensions[subdimensionID]\n else:\n return []\n" }, { "alpha_fraction": 0.6116828322410583, "alphanum_fraction": 0.6168280839920044, "avg_line_length": 33.41666793823242, "blob_id": "227ad2193da86d4df75bbacf8410e2ff562af88b", "content_id": "daa0aa79b320d2bd3238e0a67448580681b28a9e", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 3304, "license_type": "no_license", "max_line_length": 92, "num_lines": 96, "path": "/Console.py", "repo_name": "jgontrum/histogramPostprocessor", "src_encoding": "UTF-8", "text": "# -*- coding: utf-8 -*-\n# !/usr/bin/env python\n__author__ = 'Johannes Gontrum <[email protected]>'\n\nimport sys\nimport Histogram\nimport LaTeXOutput\nimport Analysis\nfrom SignatureReader import Signature, KeywordHierarchy\n\nif len(sys.argv) != 5:\n print \"Syntax: mia_result.csv signature.csv hierachy.csv [month/day]\"\n sys.exit(1)\n\n# Create Histogram.\nprint \"Preparing data... \"\nhistogram = None\nif sys.argv[4] == 'month':\n histogram = Histogram.getMonthlyHistogram(sys.argv[1])\nelif sys.argv[4] == 'day':\n histogram = Histogram.getDaylyHistogram(sys.argv[1])\nelse:\n print \"Choose between 'month' and 'day'!\"\n sys.exit(1)\n\n# Read in the signature\nkeywordSignature = Signature()\nkeywordSignature.fromFile(sys.argv[2])\n\n# Read in hierarchy\nkeywordHierarchy = KeywordHierarchy(sys.argv[3], keywordSignature)\n\nprint \"Enter a command [help/exit]:\"\n# Console\ninput = None\nwhile input != \"exit\":\n input = raw_input()\n if input == \"help\":\n print \"\"\"Commands:\n help This help\n exit Leave the program\n show keywords Show IDs of the keywords\n show subdimensions Show IDs of the subdimensions\n show dimensions Show IDs of the dimensions\n create general [.tex] [caption] Creates an overall histogram in the tex file.\n create [k/s/d] [.tex] [caption] [keywords/subdimensions/dimensions]\"\"\"\n if input == \"show keywords\" or input == \"show k\":\n print keywordSignature\n if input == \"show subdimensions\" or input == \"show s\":\n print keywordHierarchy.subdimensionSignature\n if input == \"show dimensions\" or input == \"show d\":\n print keywordHierarchy.dimensionSignature\n if input.startswith(\"create\"):\n line = input.split()\n if len(line) < 4:\n print \"Error: Not enough arguments given.\"\n continue\n mode = line[1]\n texfile = line[2]\n caption = line[3]\n\n # Catch general case\n if mode == \"general\" or mode == \"g\":\n histogram_data = Analysis.generalHistogram(histogram)\n if sys.argv[4] == 'month':\n LaTeXOutput.saveMonthlyHistogram(histogram_data, caption, texfile)\n else:\n LaTeXOutput.saveDaylyHistogram(histogram_data, caption, texfile)\n print \"Histogram created.\"\n continue\n\n # More specific cases:\n ids = []\n for id in line[4].split(','):\n ids.append(int(id))\n\n # Find keywords\n keywords = []\n if mode == \"k\":\n keywords = ids\n if mode == \"s\":\n for subdimensionID in ids:\n keywords.append(keywordHierarchy.getKeywordsForSubdimension(subdimensionID))\n if mode == \"d\":\n for dimensionID in ids:\n keywords += list(keywordHierarchy.getKeywordsForDimension(dimensionID))\n\n # Processing the keywords\n print \"Creating a histogram for keywords \" , keywords\n histogram_data = Analysis.specificHistogram(histogram, keywords)\n if sys.argv[4] == 'month':\n LaTeXOutput.saveMonthlyHistogram(histogram_data, caption, texfile)\n else:\n LaTeXOutput.saveDaylyHistogram(histogram_data, caption, texfile)\n print \"Histogram created.\"\n continue\n" }, { "alpha_fraction": 0.6095890402793884, "alphanum_fraction": 0.611872136592865, "avg_line_length": 25.57575798034668, "blob_id": "1ff994e2764ba5cd73e41cd17cc46d3bb1126e84", "content_id": "d14d47aae0608703a99f1a483ab3d1eccb94f506", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 876, "license_type": "no_license", "max_line_length": 69, "num_lines": 33, "path": "/Analysis.py", "repo_name": "jgontrum/histogramPostprocessor", "src_encoding": "UTF-8", "text": "# -*- coding: utf-8 -*-\n# !/usr/bin/env python\n__author__ = 'Johannes Gontrum <[email protected]>'\n\nimport Datatools\n\ndef generalHistogram(data):\n \"\"\"\n Creates a histogram for all words.\n :param data: Sorted tuples of timestamp and a list of occurrences\n :return: List of tuples of timestamp and count\n \"\"\"\n ret = []\n for time, lst in data:\n ret.append((time, Datatools.sumTupleList(lst)))\n return ret\n\n\ndef specificHistogram(data, words):\n \"\"\"\n Creates a histogram for given words.\n :param data: Sorted tuples of timestamp and a list of occurrences\n :param words: Collection of word ids to track\n :return: List of tuples of timestamp and count\n \"\"\"\n ret = []\n for time, lst in data:\n score = 0\n for key, value in lst:\n if key in words:\n score += value\n ret.append((time, score))\n return ret" }, { "alpha_fraction": 0.5880979895591736, "alphanum_fraction": 0.6015169024467468, "avg_line_length": 27.58333396911621, "blob_id": "3c8e43ecd51a9a6f98d56680858fd37122b72311", "content_id": "4cc10e10216efa862a1301dc7a4fd578bd29368a", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 1714, "license_type": "no_license", "max_line_length": 77, "num_lines": 60, "path": "/CSVReader.py", "repo_name": "jgontrum/histogramPostprocessor", "src_encoding": "UTF-8", "text": "# -*- coding: utf-8 -*-\n# !/usr/bin/env python\n__author__ = 'Johannes Gontrum <[email protected]>'\n\nimport re\nfrom operator import itemgetter\n\"\"\"\nReads a CSV that is generated by request to MIA.\nReturns a sorted list of triples of timestamp, publisher and bag of tuples.\n\"\"\"\ndef readCSV(path):\n timestampPublisherDataTuples = list()\n firstline = True\n\n for line in open(path, 'r'):\n # Skip first line in file that describes the layout of the columns\n if firstline:\n firstline = False\n continue\n\n fields = line.split(\";\")\n # Assumption: Three columns: Publisher, Date, Bag of Tuples\n publisher = __removeQuotation(__removeEndline(fields[0]))\n timestamp = int(__removeQuotation(__removeEndline(fields[1]))) / 1000\n bag = __parseBag(__removeQuotation(__removeEndline(fields[2])))\n\n insert = (timestamp, publisher, bag)\n timestampPublisherDataTuples.append(insert)\n\n return sorted(timestampPublisherDataTuples, key=itemgetter(0))\n\ndef __removeEndline(word):\n ret = word\n if word[-1] == \"\\n\":\n ret = ret[:-1]\n return ret\n\ndef __removeQuotation(word):\n ret = word\n if word[0] == '\"':\n ret = ret[1:]\n if word[-1] == '\"':\n ret = ret[0:-1]\n return ret\n\n# Returns a list of tuples\ndef __parseBag(bag):\n returnList = list()\n # Remove curly brackets\n currentBag = bag[1:-1]\n\n # split with regex\n for item in re.split(\"\\),\\(|\\(|\\)\", currentBag):\n if len(item) > 0:\n # item is styled like '3,5' (w/o the quotation marks)\n tupleRaw = item.split(\",\")\n tuple = (int(tupleRaw[0]), int(tupleRaw[1]))\n returnList.append(tuple)\n\n return returnList" }, { "alpha_fraction": 0.5514626502990723, "alphanum_fraction": 0.5601300001144409, "avg_line_length": 22.564102172851562, "blob_id": "91316c6e72615a73553fb31d673b51e4ed921b4c", "content_id": "785bbf913c8da6fddf667725160199ca18f35112", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 923, "license_type": "no_license", "max_line_length": 62, "num_lines": 39, "path": "/IntervalFinder.py", "repo_name": "jgontrum/histogramPostprocessor", "src_encoding": "UTF-8", "text": "# -*- coding: utf-8 -*-\n# !/usr/bin/env python\n__author__ = 'Johannes Gontrum <[email protected]>'\n\n\"\"\"\nTODO: docu\nReturns a tuple of the first index and the last index in data.\n\"\"\"\ndef getIntervalByTimestamp(data, begin, end):\n beginIndex = -1\n endIndex = -1\n\n for i in range(len(data)):\n timestamp = data[i][0]\n\n # still looking for the start index\n if beginIndex < 0:\n if timestamp >= begin:\n beginIndex = i\n # start index found, now searching the end index\n else:\n if timestamp >= end:\n endIndex = i\n return (beginIndex, endIndex)\n\n # No end index found: Set it to last index\n endIndex = len(data) - 1\n return (beginIndex, endIndex)\n\n\n\ndef getEndOfCurrentInterval(data, index):\n key = data[index][0]\n\n for i in range(index, len(data)):\n if data[i][0] != key:\n return i\n\n return len(data)\n\n\n\n\n" }, { "alpha_fraction": 0.6853856444358826, "alphanum_fraction": 0.6874154210090637, "avg_line_length": 30.46808433532715, "blob_id": "c77972131b47b22815320895f3faca69fbd716d1", "content_id": "a06271ebae4e613abf00348c1ce67883b0a11aff", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 1478, "license_type": "no_license", "max_line_length": 91, "num_lines": 47, "path": "/Histogram.py", "repo_name": "jgontrum/histogramPostprocessor", "src_encoding": "UTF-8", "text": "# -*- coding: utf-8 -*-\n# !/usr/bin/env python\n__author__ = 'Johannes Gontrum <[email protected]>'\n\nimport CSVReader\nimport Datatools\nimport Analysis\n\nsortedData = None\n\ndef __sort(sortedData, thresholdFunction):\n \"\"\"\n Takes read in data from CSVReader.readCSV and merges it (defined by the fiven function)\n :param sortedData: output of CSVReader.readCSV\n :param thresholdFunction: Returns the next day/month/... for a given timestamp.\n :return: Sorted tuples of timestamp and a list of occurrences\n \"\"\"\n mergedData = []\n currentList = []\n\n begin = sortedData[0][0]\n threshold = thresholdFunction(begin)\n\n # Sort and collect the data by month\n for timestamp, publisher, data in sortedData:\n if timestamp >= threshold:\n mergedData.append((begin, currentList))\n currentList = list()\n begin = threshold\n threshold = thresholdFunction(begin)\n currentList = Datatools.reduceLists(currentList, data)\n\n # Append the last month\n mergedData.append((begin, currentList))\n return mergedData\n\ndef getMonthlyHistogram(filename):\n global sortedData\n if sortedData == None:\n sortedData = CSVReader.readCSV(filename)\n return __sort(sortedData, lambda x: Datatools.getNextMonthTimestamp(x))\n\ndef getDaylyHistogram(filename):\n global sortedData\n if sortedData == None:\n sortedData = CSVReader.readCSV(filename)\n return __sort(sortedData, lambda x: Datatools.getNextDayTimestamp(x))" } ]
8
hitcher24x/learning
https://github.com/hitcher24x/learning
88b692727d59eef0737de5e3c2b5660c842a7362
9aec20bf3cbeefe268cdafff62d491de391970a2
6b7426a57a865c6c0dc45da1062230130eadf5dc
refs/heads/master
2021-05-08T13:02:48.297401
2017-11-12T19:43:56
2017-11-12T19:43:56
119,997,417
0
0
null
null
null
null
null
[ { "alpha_fraction": 0.7504521012306213, "alphanum_fraction": 0.7522603869438171, "avg_line_length": 28.72222137451172, "blob_id": "28898419a7cd7a0aae344c818433101df6436542", "content_id": "4a4babcdd32139e08dd682f3e5ae8bd8070cdf52", "detected_licenses": [ "MIT" ], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 553, "license_type": "permissive", "max_line_length": 65, "num_lines": 18, "path": "/Main.py", "repo_name": "hitcher24x/learning", "src_encoding": "UTF-8", "text": "from Baseball import Baseball, Cnp\r\nimport configparser\r\nimport time\r\n\r\n# reads configuration\r\nconfiguration=configparser.ConfigParser()\r\nconfiguration.read(\"parameters.cfg\")\r\n\r\n# parsing the parameters:\r\nbegin=int(configuration.get(\"Parameters\",\"begin\"))\r\nend=int(configuration.get(\"Parameters\",\"end\"))\r\nthreshold=int(configuration.get(\"Parameters\",\"threshold\"))\r\n\r\n# Runs the program with a timer\r\nstart= time.time()\r\nbaseball=Baseball(begin,end,threshold)\r\nbaseball.start_program()\r\nprint (\"Program Ran in\", round(time.time() - start,3), \"seconds\")\r\n" }, { "alpha_fraction": 0.7136824131011963, "alphanum_fraction": 0.7407094836235046, "avg_line_length": 29.33333396911621, "blob_id": "aa791e0c9396484e1cecf1a622b4ed6e8b082ccb", "content_id": "a8058a2bf8572ccef89fd665f15c41f4ff2730c3", "detected_licenses": [ "MIT" ], "is_generated": false, "is_vendor": false, "language": "Markdown", "length_bytes": 1184, "license_type": "permissive", "max_line_length": 175, "num_lines": 39, "path": "/README.md", "repo_name": "hitcher24x/learning", "src_encoding": "UTF-8", "text": "Baseball counting program (python3)\n-----------------------------------------------.\n\n--------Description and Architecture :\n\nThis program downloads files of Major League Baseball data, parses it and returns a list of triples of teams for which at least 50 players have played for all three teams.\n\nThe files are :\n\n-Baseball.py \n-Main.py\n-parameters.cfg\n-requirements.txt\n-Readme.md\n-Test.py\n-Simulated_data_2000_2000.csv\n-Complexity.pdf\n\n* Baseball class handles the program\n\n* Main runs the program with the parameters given\n\n* requirements contains the library pandas\n\n* parameters.cfg contains the parameters : starting year for the study (default=1871), the last year (default=2014), and the Threshold of players in each triplet (default= 50)\n\n* Test of the program is made with unittest\n\n* Simulated_data_2000_2000.csv is created manually and contains 12 lines corresponding to football data (famous players with their corresponding teams)\nThe Test file needs it.\n\n* Complexity.pdf provides a Spatial and Time complexity analysis.\n\n\n---------Run the program:\n\n-Install dependancies : 'pip install -r requirements.txt'\n-Set parameters in parameters.cfg \n-Run 'python3 Main.py'\n\n" }, { "alpha_fraction": 0.5197679400444031, "alphanum_fraction": 0.5305113792419434, "avg_line_length": 40.55555725097656, "blob_id": "e4fea720dba2eeb7fafa0508717faba5a707ac98", "content_id": "1286663f1c4bd7f479615b1b12441f1129225326", "detected_licenses": [ "MIT" ], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 4654, "license_type": "permissive", "max_line_length": 132, "num_lines": 108, "path": "/Baseball.py", "repo_name": "hitcher24x/learning", "src_encoding": "UTF-8", "text": "import pandas as pd\r\n\r\ndef Cnp(n,p, l=None, res=None):\r\n \"\"\" calculates p-uplets among n elements and returns a list of the p-uplets \"\"\"\r\n \r\n if l is None: l=[]\r\n if res is None: res=[]\r\n if p==0:\r\n res.append(l)\r\n return res \r\n if n==0:\r\n return res\r\n else:\r\n l1=list(l)\r\n l1.append(n)\r\n Cnp(n-1, p-1, l1, res)\r\n Cnp(n-1, p, l, res)\r\n return res\r\n\r\n\r\nclass Baseball:\r\n def __init__(self,begin,end,threshold):\r\n \r\n self.begin=begin # first year considered for the program\r\n self.end=end # last year considered for the program\r\n self.Threshold=threshold # ex : 50\r\n self.url=\"https://s3.amazonaws.com/dd-interview-data/data_scientist/baseball/appearances/%d/%d-0,000\"\r\n self.liste= pd.DataFrame({'Player_ID': [],'Team': []}) # initialise dataframe\r\n self.dico={} # dictionary of players (keys) and teams associated to each (items)\r\n self.triples={} # dictionary of 3 teams paired (keys) and the number of players associated (items)\r\n\r\n def reader(self):\r\n \"\"\" downloads data from the url \"\"\"\r\n \r\n for i in range (self.begin,self.end+1):\r\n urlbis = self.url % (i,i)\r\n data=pd.read_csv(urlbis,names=[\"Year\",\"Team\",\"League\",\"Player_ID\",\"Player_Name\",\"Total_games_played\",\"Games started\",\r\n \"Games in which player batted\",\"Games in which player appeared on defense\",\r\n \"Games as pitcher\",\"Games as catcher\",\"Games as firstbaseman\",\"Games as secondbaseman\",\r\n \"Games as thirdbaseman\",\"Games as shortstop\",\"Games as leftfielder\",\"Games as centerfielder\",\r\n \"Games as right fielder\",\"Games as outfielder\",\"Games as designated hitter\",\r\n \"Games as pinch hitter\",\"Games as pinch runner\"],usecols=['Player_ID','Team'])\r\n\r\n self.liste=pd.concat([self.liste,data[['Player_ID','Team']]])\r\n\r\n def playerTeams(self):\r\n \"\"\" crosses the dataframe: provides the teams of each player; we use numpy array for faster computations \"\"\"\r\n \r\n for row in self.liste.values:\r\n if row[0] in self.dico:\r\n self.dico[row[0]]=self.dico[row[0]]+\" \"+row[1]\r\n else:\r\n self.dico[row[0]]=row[1]\r\n\r\n def cleaner(self):\r\n \"\"\" puts the teams of a player into a list with no repetition \"\"\"\r\n \"\"\" it also removes players who have played for less than 3 teams \"\"\"\r\n \r\n for player in list(self.dico.keys()): # makes a copy of the keys so that we can modify dico\r\n self.dico[player]=list(set(self.dico[player].split()))\r\n if len(self.dico[player])< 3:\r\n self.dico.pop(player)\r\n\r\n \r\n\r\n def calculator(self):\r\n \"\"\" calculates the number of players that have played for any triple of teams\"\"\"\r\n\r\n for player in self.dico.keys():\r\n cnp=Cnp(len(self.dico[player]),3) # takes every possible choice of \"3 teams among n teams\"\r\n for l in cnp:\r\n\r\n # we sort the triplet to preserve its uniqueness in the counting\r\n triplet=sorted([self.dico[player][l[0]-1],self.dico[player][l[1]-1],self.dico[player][l[2]-1]])\r\n \r\n name=\"%s,%s,%s\"%(triplet[0],triplet[1],triplet[2])\r\n if name in self.triples:\r\n self.triples[name]+=1\r\n else:\r\n self.triples[name]=1\r\n \r\n \r\n def start_program(self):\r\n \"\"\" runs the program \"\"\"\r\n print(\"Parameters : \")\r\n print(\"Begin : %d\" % self.begin)\r\n print(\"End : %d\" % self.end)\r\n print(\"Threshold : %d\" % self.Threshold)\r\n print(\" \")\r\n print(\"Starting Step 1/3 :\")\r\n print(\"Downloading data...\")\r\n self.reader()\r\n print(\"Step 1/3 finished correctly.\")\r\n print(\"Starting Step 2/3 :\")\r\n print(\"Parsing and Cleaning data for each Player...\")\r\n self.playerTeams()\r\n self.cleaner()\r\n print(\"Step 2/3 finished correctly.\")\r\n print(\"Starting Step 3/3 :\")\r\n print(\"Creating Triplets of Teams and Counting Players...\")\r\n self.calculator()\r\n print(\"Step 3/3 finished correctly.\")\r\n print(\"The Triplets are : \")\r\n Final=[]\r\n for name in self.triples:\r\n if self.triples[name]>=self.Threshold:\r\n print(name)\r\n Final+=[name]\r\n\r\n \r\n \r\n \r\n\r\n\r\n\r\n\r\n \r\n" }, { "alpha_fraction": 0.6172360181808472, "alphanum_fraction": 0.625, "avg_line_length": 32.81081008911133, "blob_id": "3cf7cc7383d19c743bfa946f4b460fa140764f6b", "content_id": "26d8c8c4c73b7501c26a0edfbef3b047c1d09534", "detected_licenses": [ "MIT" ], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 1288, "license_type": "permissive", "max_line_length": 90, "num_lines": 37, "path": "/Test.py", "repo_name": "hitcher24x/learning", "src_encoding": "UTF-8", "text": "import unittest\r\nfrom Baseball import Baseball, Cnp\r\n\r\nclass TestBaseball(unittest.TestCase):\r\n \r\n def test_Baseball(self):\r\n \"\"\" evaluates if the program runs correctly and returns the appropriate results\"\"\"\r\n\r\n\r\n \"\"\"\r\nWe created manually a simulated file, Simulated_data.csv, containing football data:\r\n\r\n- Ronaldo has played for : RealMadrid, InterMilan, ACMilan, Paris\r\n- Figo has played for : RealMadrid, InterMilan, Barcelona, Paris\r\n- Ibrahimovich has played for : Barcelona, InterMilan, ACMilan, Paris\r\n\r\nwith a Threshold of 2 players, we should obtain the following Triplets:\r\n \"\"\"\r\n simulated=['InterMilan,Paris,RealMadrid','Barcelona,InterMilan,Paris',\r\n 'ACMilan,InterMilan,Paris']\r\n print(\"Tests The Program : \")\r\n baseball=Baseball(2000,2000,2)\r\n baseball.url=\"Simulated_data_%d_%d.csv\"\r\n baseball.reader()\r\n baseball.playerTeams()\r\n baseball.cleaner()\r\n baseball.calculator()\r\n compiled=[]\r\n for name in baseball.triples:\r\n if baseball.triples[name]>= baseball.Threshold:\r\n compiled+=[name]\r\n self.assertEqual(set(compiled),set(simulated))\r\n \r\n\r\n \r\nif __name__ == '__main__':\r\n unittest.main()\r\n" } ]
4
Poet-LiBai/DataAnalysis
https://github.com/Poet-LiBai/DataAnalysis
c8600ba12f5d3da4e2f54b8f79ea6c1047735e07
01716ca6c785644bb0337997e45e08e995ee743e
c9b2b7680ed7e833b35aa1eaaff3a31cc3fc444d
refs/heads/main
2022-12-29T05:21:46.632050
2020-10-12T11:09:39
2020-10-12T11:09:39
303,365,861
0
0
null
null
null
null
null
[ { "alpha_fraction": 0.5097651481628418, "alphanum_fraction": 0.5515451431274414, "avg_line_length": 13.380783081054688, "blob_id": "3338b66cdc99ae502744283a782e5d21b82c5bb3", "content_id": "14d6325d4dadadcce2f8134d39ec9f3051d78049", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 4857, "license_type": "no_license", "max_line_length": 77, "num_lines": 281, "path": "/LaTex.py", "repo_name": "Poet-LiBai/DataAnalysis", "src_encoding": "UTF-8", "text": "#!/usr/bin/env python\n# coding: utf-8\n\n# In[9]:\n\n\nfrom IPython.display import Latex\n\n\n# In[10]:\n\n\n# 数学公式的前后要加上 $ 或 \\( 和 \\)\nLatex(r\"$f(x) = 3x + 7$\")\n\n\n# In[11]:\n\n\n# 普通字符在数学公式中含义一样,除了 # $ % & ~ _ ^ \\ { }\n# 若要在数学环境中表示这些符号# $ % & _ { },\n# 需要分别表示为\\# \\$ \\% \\& \\_ \\{ \\},即在个字符前加上\\\n# 上标和下标\n# 用 ^ 来表示上标,用 _ 来表示下标\n# 如果有多个字符做上标或下标,要用{}括起来\nLatex(r\"$\\sum_{i=1}^n a_i=0$\")\n\n\n# In[16]:\n\n\nLatex(r\"$f(x)=x^{x^x}$\")\n\n\n# In[13]:\n\n\n# 希腊字母\n# 大写希腊字母\nLatex(r\"$\\Gamma \\Delta \\Theta \\Lambda \\Xi \\Pi \\Sigma \\Upsilon$\")\n\n\n# In[14]:\n\n\nLatex(r\"$\\Phi \\Psi \\Omega$\")\n\n\n# In[15]:\n\n\n# 小写希腊字母\nLatex(r\"$\\alpha \\beta \\gamma \\delta \\epsilon \\varepsilon \\zeta \\eta$\")\n\n\n# In[17]:\n\n\nLatex(r\"$\\theta \\vartheta \\iota \\kappa \\lambda \\mu \\nu \\xi$\")\n\n\n# In[18]:\n\n\nLatex(r\"$o \\pi \\varpi \\rho \\varrho \\sigma \\varsigma \\tau$\")\n\n\n# In[19]:\n\n\nLatex(r\"$\\upsilon \\phi \\varphi \\chi \\psi \\omega$\")\n\n\n# In[20]:\n\n\n# 大尺寸运算符\nLatex(r\"$\\sum \\prod \\coprod \\int \\iint \\iiint \\oint$\")\n\n\n# In[21]:\n\n\nLatex(r\"$ \\bigvee \\bigwedge \\bigoplus \\bigotimes \\bigodot \\biguplus$\")\n\n\n# In[22]:\n\n\n# 箭头\nLatex(r\"$\\leftarrow \\rightarrow \\Leftarrow \\Rightarrow \\uparrow \\downarrow$\")\n\n\n# In[23]:\n\n\nLatex(r\"$\\nearrow \\searrow \\swarrow \\nwarrow \\leadsto$\")\n\n\n# In[24]:\n\n\nLatex(r\"$ \\iff \\rightleftharpoons$\")\n\n\n# In[25]:\n\n\n# 在公式中插入文本可以通过 \\mbox{text} 在公式中添加text\nLatex(r\"$\\mbox{对任意的$x>0$}, \\mbox{有 }f(x)>0. $\")\n\n\n# In[27]:\n\n\n# 分数及开方\n# \\frac{numerator}{denominator} \\sqrt{expression_r_r_r}表示开平方,\n# \\sqrt[n]{expression_r_r_r} 表示开 n 次方\nLatex(r\"$\\frac{7x+5}{1+y^2} \\sqrt{x^2+y^2} \\sqrt[n]{x^n+y^n}$\")\n\n\n# In[28]:\n\n\n# 省略号(3个点)\n# \\ldots 表示跟文本底线对齐的省略号;\\cdots 表示跟文本中线对齐的省略号\nLatex(r\"$ f(x_1,x_x,\\ldots,x_n)=x_1^2+x_2^2+\\cdots+x_n^2 $\")\n\n\n# In[29]:\n\n\n# 省略号(3个点)\n# \\ldots 表示跟文本底线对齐的省略号;\\cdots 表示跟文本中线对齐的省略号\nLatex(r\"$ f(x_1,x_x,\\ldots,x_n)=x_1^2+x_2^2+\\cdots+x_n^2 $\")\n\n\n# In[30]:\n\n\n# 省略号(3个点)\n# \\ldots 表示跟文本底线对齐的省略号;\\cdots 表示跟文本中线对齐的省略号\nLatex(r\"$ f(x_1,x_x,\\ldots,x_n)=x_1^2+x_2^2+\\cdots+x_n^2 $\")\n\n\n# In[31]:\n\n\n# 括号和分隔符\n#() 和 [ ] 和 | 对应于自己;\n#{} 对应于 \\{ \\};\n#|| 对应于 \\|。\n#当要显示大号的括号或分隔符时,要对应用 \\left 和 \\right\nLatex(r\"$f(x,y,z)=3y^2z\\left(3+\\frac{7x+5}{1+y^2}\\right). $\")\n\n\n# In[32]:\n\n\n# \\left. 和 \\right. 只用与匹配,本身是不显示的\nLatex(r\"$\\left. \\frac{du}{dx} \\right|_{x=0}.$\")\n\n\n# In[33]:\n\n\n# 多行的数学公式\n# 其中&是对其点,表示在此对齐。\n# *使latex不自动显示序号,如果想让latex自动标上序号,则把*去掉\nLatex(r\"\"\"\\begin{eqnarray*}\n\\cos 2\\theta & = & \\cos^2 \\theta - \\sin^2 \\theta \\\\\n& = & 2 \\cos^2 \\theta - 1.\n\\end{eqnarray*}\"\"\")\n\n\n# In[35]:\n\n\n#矩阵\n# c表示向中对齐,l表示向左对齐,r表示向右对齐\nLatex(r\"\"\"The \\emph{characteristic polynomial} $\\chi(\\lambda)$ of the\n$3 \\times 3$~matrix\n\\[ \\left( \\begin{array}{ccc}\na & b & c \\\\\nd & e & f \\\\\ng & h & i \\end{array} \\right)\\]\nis given by the formula\n\\[ \\chi(\\lambda) = \\left| \\begin{array}{ccc}\n\\lambda - a & -b & -c \\\\\n-d & \\lambda - e & -f \\\\\n-g & -h & \\lambda - i \\end{array} \\right|.\\]\"\"\")\n\n\n# In[36]:\n\n\n# 导数、极限、求和、积分\nLatex(r\"$\\frac{du}{dt} and \\frac{d^2 u}{dx^2}$\")\n\n\n# In[37]:\n\n\n# 偏导数\nLatex(r\"\"\"\\[ \\frac{\\partial u}{\\partial t}\n= h^2 \\left( \\frac{\\partial^2 u}{\\partial x^2}\n+ \\frac{\\partial^2 u}{\\partial y^2}\n+ \\frac{\\partial^2 u}{\\partial z^2}\\right)\\]\"\"\")\n\n\n# In[38]:\n\n\nLatex(r\"$\\lim_{x \\to +\\infty}, \\inf_{x > s}$\")\n\n\n# In[39]:\n\n\n# 极限\nLatex(r\"\\[ \\lim_{x \\to 0} \\frac{3x^2 +7x^3}{x^2 +5x^4} = 3.\\]\")\n\n\n# In[40]:\n\n\n# 求和\nLatex(r\"\\[ \\sum_{k=1}^n k^2 = \\frac{1}{2} n (n+1).\\]\")\n\n\n# In[41]:\n\n\n# 积分\n# To obtain the correct appearance one \n# should put extra space before the d, using \\,\nLatex(r\"\\[ \\int_a^b f(x)\\,dx.\\]\")\n\n\n# In[42]:\n\n\nLatex(r\"\\[ \\int_0^{+\\infty} x^n e^{-x} \\,dx = n!.\\]\")\n\n\n# In[43]:\n\n\nLatex(r\"\\[ \\int \\cos \\theta \\,d\\theta = \\sin \\theta.\\]\")\n\n\n# In[44]:\n\n\nLatex(r\"\"\"\\[ \\int_{x^2 + y^2 \\leq R^2} f(x,y)\\,dx\\,dy\n= \\int_{\\theta=0}^{2\\pi} \\int_{r=0}^R\nf(r\\cos\\theta,r\\sin\\theta) r\\,dr\\,d\\theta.\\]\"\"\")\n\n\n# In[45]:\n\n\nLatex(r\"\\[ \\int_0^R \\frac{2x\\,dx}{1+x^2} = \\log(1+R^2).\\]\")\n\n\n# In[46]:\n\n\n# The way to improve the appearance of \n# of the integral is to use the control sequence \\! \n# to remove a thin strip of unwanted space\nLatex(r\"\\[ \\int_0^1 \\! \\int_0^1 x^2 y^2\\,dx\\,dy.\\]\")\n\n\n# In[47]:\n\n\nLatex(r\"\\[ \\int_0^1 \\int_0^1 x^2 y^2\\,dx\\,dy.\\]\")\n\n\n# In[ ]:\n\n\n\n\n" } ]
1
svalabs/training-as-code
https://github.com/svalabs/training-as-code
dce9a383b8b76398a4c7806785cc5b5a210bbc59
dbcbdbe056e8bd88ccb082501bc3fff08962a1c8
a3e76477f682a1b4a50276bda623cb27b9b9291c
refs/heads/master
2023-02-28T00:05:43.383675
2021-01-28T11:07:32
2021-01-28T11:07:32
289,033,327
14
1
null
null
null
null
null
[ { "alpha_fraction": 0.675268828868866, "alphanum_fraction": 0.6877419352531433, "avg_line_length": 14.604026794433594, "blob_id": "65990e148cc3809167ce97b2df1bbacd20b31795", "content_id": "2760410163ddb6fe4d1be93ced2956c73ba117f6", "detected_licenses": [ "Apache-2.0", "MIT", "LicenseRef-scancode-unknown-license-reference" ], "is_generated": false, "is_vendor": false, "language": "Markdown", "length_bytes": 2333, "license_type": "permissive", "max_line_length": 429, "num_lines": 149, "path": "/01-content.md", "repo_name": "svalabs/training-as-code", "src_encoding": "UTF-8", "text": "class: center, middle\n\n# {{ title }}\n\n## Untertitel\n\n???\n\nNotes for the _first_ slide!\n\n---\n\n{{ agenda }}\n\n---\n\n## Whoami\n\n![:img Paula Pinkepank, 35%](assets/imgs/doge.jpg)\n\n- Detlef Doge\n- Zertifizierter Entschleunigungsbeauftragter\n\n---\n\n## Skills\n\n- Twitter Bootstrap\n- jQuery less\n- GruntJS\n- JSHint\n- JSLint\n- markdown\n- sass\n- jade\n- coffeescript\n\n---\n\n# Motivation\n\nLorem ipsum dolor sit amet, consectetur adipisici elit, sed eiusmod tempor incidunt ut labore et dolore magna aliqua. Ut enim ad minim veniam, quis nostrud exercitation ullamco laboris nisi ut aliquid ex ea commodi consequat. Quis aute iure reprehenderit in voluptate velit esse cillum dolore eu fugiat nulla pariatur. Excepteur sint obcaecat cupiditat non proident, sunt in culpa qui officia deserunt mollit anim id est laborum.\n\n---\n\n# Thema A\n\n## Untertitel\n\n---\n\n# Thema B\n\nHier folgt eine tolle Einführung\n\n---\n\n# Ergänzungen\n\nWeitere Folien vermitteln weitere Zusammenhänge und Informationen..red[*]\n\n.footnote[.red.bold[*] Fußnoten auch.]\n\n--\n\n## Noch mehr Informationen\n\nReicht dann jetzt auch mal.\n\n---\n\n# Code-Beispiel\n\nBeispielhafte Implementation\n\n```js\n import React, { useState } from 'react';\n function Example() {\n const [count, setCount] = useState(0);\n return (\n <div>\n* <p>You clicked {count} times</p>\n* <button onClick={() => setCount(count + 1)}>\n Click me\n </button></div>\n );\n }\n```\n\n(Man beachte die hervorgehobenen Zeilen 6 und 7)\n\n---\n\n## Beispiel in Bash\n\n```bash\n# Check what Linux we are on\nuname -a\n```\n\n---\n\n## Code-Beispiel\n\nElliot Alderson wäre stolz.\n\n![:img Hacker](assets/imgs/hacker.gif)\n\n---\n\n## Katzenbild\n\n![:img Katzenbild](assets/imgs/cat_hacker.gif)\n\n---\n\n# Lab 01: Aufgabe\n\nBeschreibung der Aufgabe\n\n- Akzeptanzkriterium 1\n- Akzeptanzkriterium 2\n- Akzeptanzkriterium 3\n\n---\n\n![:img Lösung, 100%](assets/labs/lab01_01.png)\n\n---\n\n![:img Lösung, 100%](assets/labs/lab01_02.png)\n\n---\n\n# Linkliste\n\n- Ein tolles Projekt: [[klick!]](https://github.com/stdevel/katprep/)\n- Tolles Produkt-Benutzerhandbuch: [[klick!]](https://access.redhat.com/documentation/en-us/red_hat_satellite/6.5/)\n- Tolleres Online-Forum: [[klick!]](https://community.icinga.com/)\n\n---\n\nclass: center, middle\n\n## Danke für die Aufmerksamkeit\n\n### (jetzt bitte wieder aufwachen)\n\n![:img Obama Micdrop, 65%](assets/imgs/obama_micdrop.gif)\n" }, { "alpha_fraction": 0.7194244861602783, "alphanum_fraction": 0.7194244861602783, "avg_line_length": 18.85714340209961, "blob_id": "63bd8d8a292f565404252e3c3e495920ccf639f6", "content_id": "d8e4b166985dd411475bf79f4e6029d2288474e7", "detected_licenses": [ "Apache-2.0", "MIT", "LicenseRef-scancode-unknown-license-reference" ], "is_generated": false, "is_vendor": false, "language": "Markdown", "length_bytes": 139, "license_type": "permissive", "max_line_length": 70, "num_lines": 7, "path": "/Terraform/README.md", "repo_name": "svalabs/training-as-code", "src_encoding": "UTF-8", "text": "# Terraform\n\nThis folder contains provider-specific code.\n\n## TODO\n\n- Create a folder for your provider (*e.g. `Azure`*) and add your code\n" }, { "alpha_fraction": 0.575667679309845, "alphanum_fraction": 0.5964391827583313, "avg_line_length": 11.481481552124023, "blob_id": "bdb692cae5b443db1f73b60a4cde280472aaff07", "content_id": "fd043587c584baca7bb7c7a58164b18e4666d8f6", "detected_licenses": [ "Apache-2.0", "MIT", "LicenseRef-scancode-unknown-license-reference" ], "is_generated": false, "is_vendor": false, "language": "Markdown", "length_bytes": 337, "license_type": "permissive", "max_line_length": 44, "num_lines": 27, "path": "/labs.md", "repo_name": "svalabs/training-as-code", "src_encoding": "UTF-8", "text": "# Demo training\n\n## Lab Guide\n\n---\n\n## Formatting\n\n| Format | Description |\n| ------ | ----------- |\n| **Note** | Important hint |\n| `command` | Shell output or command name |\n\n---\n\n## Lab 01: Demo\n\nTODO: Lab description\n\n1. Task 1\n2. Task 2\n3. Yet another task you need to fulfill\n\n```shell\n$ example command\nLorem ipsum doloret...\n```\n" }, { "alpha_fraction": 0.7344681620597839, "alphanum_fraction": 0.7429174780845642, "avg_line_length": 33.68965530395508, "blob_id": "02d881b7afcac2c29b636a4b63bb53c08082f4f5", "content_id": "6827eed1e521edf7b5d5737aea10a23ef980436e", "detected_licenses": [ "Apache-2.0", "MIT", "LicenseRef-scancode-unknown-license-reference" ], "is_generated": false, "is_vendor": false, "language": "Markdown", "length_bytes": 8049, "license_type": "permissive", "max_line_length": 300, "num_lines": 232, "path": "/README.md", "repo_name": "svalabs/training-as-code", "src_encoding": "UTF-8", "text": "# training-as-code\n\nThis is a training template based on [remark.js (*v.0.15.0*)](https://github.com/gnab/remark). It is based on Markdown/JavaScript/HTML and offers the following advantages over conventional presentation software such as LibreOffice Impress and Microsoft PowerPoint:\n\n- It only requires a web browser\n- It also works on smartphones, tablets and other touch-optimized devices\n- It can be used with version control systems such as **Git** as content is ``text/plain``\n- You can use **any text editor** and operating system you like to create and display presentations\n- It's **free** (*and hip!*)\n\n## Automatic builds\n\nWhen used in GitLab all Markdown documents (``*.md``) will be automatically build into a HTML presentation and PDF document which can then be downloaded as job artifact. This allows distributing your slides easily.\n\nTo manually create/update the HTML presentation, run the ``build.py`` utility. It requires Python 3.\n\n### Handout\n\nA handout will be created from the file `handout.md`.\nIt has only basic styling applied to it.\n\n## Usage\n\nClone this repository and have a look at the [example training (`01-content.md`)](01-content.md).\nThe format is markdown, you can leverage an editor such as [VSCodium](https://github.com/VSCodium/vscodium) and to edit the file and preview changes:\n\n![vscodium](screenshots/vscodium.jpg)\n![browser](screenshots/browser.jpg)\n\nCheck out the following websites for more information about Markdown:\n\n- [https://www.markdowntutorial.com/](https://www.markdowntutorial.com/)\n- [https://www.markdownguide.org/](https://www.markdownguide.org/)\n\nYou can also check-out the [official remark.js website](https://remarkjs.com) for further examples.\n\n### Presenter View\n\nThe presenter should download the artifact from the _collect files_ pipeline job and extract these to wherever (s)he pleases.\nThen open `presentation.html` with your browser.\n\nPress `?` to see a list of all available commands.\n\nPressing `p` will open a presenter view that shows the current slide, the next slide and speaker notes.\n\n### Multi-Screen Setup\n\nA common setup has one screen that presents to the audience and another that shows the information for the speaker.\n\nTo achieve this press `c` in the presentation in order to clone it.\nThe screens are linked and moving forward on the clone will move the other forward aswell.\n\nYou can move the window to another screen and sent it into fullscreen in most browsers by `F11`?\n\nBy then pressing `p` on the cloned presentation you will have your presenter view there.\n\n## Markdown macros\n\nremark.js uses some Markdown, macros - e.g. for resizing images:\n\n```markdown\n![:img Paula Pinkepank, 25%](assets/imgs/ridley.jpg)\n```\n\nThe value before the colon represents an alternative text in case the image cannot be loaded. The value describes the image width in percentage.\n\nNew slides are defined by the following line:\n\n```markdown\n---\n```\n\nIn order to add presentation notes, use the following code before the next slide:\n\n```markdown\n???\n\nNotes\n```\n\nIncremental slides (*slides that add more information after a button press*) can be added with the following:\n\n```markdown\nThis is already visible\n\n--\n\nThis text will only be visible when going to the next element.\n\n```\n\n## Tweaks\n\n### Set title\n\nYou can simply change the title by altering the [``settings.ini``](settings.ini) file:\n\n```ini\n[meta]\ntitle=Zeit unproduktiv vergeuden leicht gemacht\n```\n\n### Change screen ratio\n\nBy default, the ratio **16:9** is selected, but you can also override this in [``settings.ini``](settings.ini) by changing the ``ratio`` line:\n\n```ini\n[layout]\nratio=4:3\n```\n\n### Set customer name as watermark\n\nBefore sending presentations to customers you can set their company name as watermark. To do so, edit [``settings.ini``](settings.ini):\n\n```ini\n[meta]\ntitle=Zielgruppenorientierte Memes in Präsentationen\ncustomer=Simone Giertz Ltd.\n```\n\nWhile the customer name could be removed from `presentation.html`, it can't be changed in the PDF version.\n\n# Deploying lab environment\n\n## Local\n\nEnsure meeting the following requirements:\n\n- Install [HashiCorp Vagrant](https://vagrantup.com)\n- Install a supported hypervisor, such as:\n - [Oracle VirtualBox](https://virtualbox.org)\n - [VMware Workstation Player](https://www.vmware.com/products/workstation-player/workstation-player-evaluation.html), [VMware Workstation Pro](https://www.vmware.com/products/workstation-pro.html) or [VMware Fusion](https://www.vmware.com/products/fusion.html)\n - libvirt/KVM\n - [Parallels Desktop](https://www.parallels.com/)\n - [Microsoft Hyper-V](https://docs.microsoft.com/en-us/virtualization/hyper-v-on-windows/)\n\nFor deploying the lab on a local machine, simply clone this repository to a host and switch to the `Vagrant` folder. Run the following command to create and configure the VM:\n\n```shell\n$ vagrant up\nBringing machine 'default' up with 'virtualbox' provider...\n==> default: Importing base box 'centos/8'...\n==> default: Matching MAC address for NAT networking...\n==> default: Checking if box 'centos/8' version '1905.1' is up to date...\n...\nPLAY RECAP *********************************************************************\ndefault : ok=5 changed=3 unreachable=0 failed=0 skipped=0 rescued=0 ignored=0\n```\n\n## Cloud\n\nFor deploying the lab in the cloud, a provider-specific configuration needs to be created within the [`Terraform`](terraform/) folder.\n\nCreating the environment differs per provider, but mostly the following commands are necessary:\n\n```shell\n$ terraform init\n$ ssh-keygen -f clouduser\n$ cp azureuser ../../Ansible/files\n$ terraform plan -var-file=credentials-<name>.tfvars\n$ terraform apply -var-file=credentials-<name>.tfvars\n```\n\nTo configure the VMs using Ansible after creating the VMs, run the following commands:\n\n```shell\n$ ansible-playbook -i inventory.py ../../ansible/node.yml --flush-cache\n```\n\n# Testing\n\n## VM customization\n\nFor the VM customization, unit tests can be found in the [`Ansible`](ansible/) folder. To run them, execute the following command within the VM:\n\n```shell\n$ pytest-3 /vagrant/ansible/test_generic.py\n=== test session starts ===\nplatform linux -- Python 3.6.8, pytest-3.4.2, py-1.5.3, pluggy-0.6.0\nrootdir: /vagrant/ansible, inifile:\nplugins: testinfra-5.2.2\ncollected 3 items\ntest_generic.py ... [100%]\n\n=== 3 passed in 0.09 seconds ===\n```\n\nWhen using a cloud environment, you can also leverage Ansible from your Terraform configuration to run the tests:\n\n```shell\n$ py.test --connection=ansible --ansible-inventory=inventory.py ../../Ansible/test_generic.py --sudo\n```\n\nNote that this will require **dynamic inventory**.\n\n## Participant lab solution\n\nParticipants can easily check whether they succeeded a lab by running the following command:\n\n```shell\n$ lab 1\nChecking lab 1\n\n[1/2] Checking Good Task... Success!\n[2/2] Checking Bad Task... Failure!\n\nErrors in 1 task\n```\n\n`1` needs to be replaced with the lab number.\n\n# Automated validation of lab solutions\n\nAutomated testing of lab solutions can be performed by placing test definition, written in [commander](https://github.com/SimonBaeumer/commander) syntax, in the `labs/test` directory.\n\nWhen working with the example Vagrantfile in `vagrant`, the command `lab` becomes available inside the VM. Using it inside of the ``labs`` folder, which is placed inside the users home directory checks for either a single lab, or the whole suite can be performed. See ``lab -h`` for more information.\n\n# Pipeline configuration\n\nA [GitLab CI/CD](.gitlab-ci.yml) configuration is part of this repository. It executes the following tasks:\n\n1. Render HTML from Markdown files\n2. Build PDF presentation for sharing with participants\n3. Create handout PDF from `handout.md`\n4. Collect all the files necessary for a training (*presentations, additional content and handout*)\n\nIt should be easy to migrate this logic to other pipelines such as **GitHub Actions**.\n\n# Further notes\n\nSee also the official [remark.js README](README-remarkjs.md) for more details.\n" }, { "alpha_fraction": 0.7284946441650391, "alphanum_fraction": 0.7822580933570862, "avg_line_length": 30, "blob_id": "b7495e1925edeaccbb9e1564853e82a80291016c", "content_id": "d2d7c30e8e6555601ea2866b2fc81a55b622353d", "detected_licenses": [ "Apache-2.0", "MIT", "LicenseRef-scancode-unknown-license-reference" ], "is_generated": false, "is_vendor": false, "language": "Markdown", "length_bytes": 744, "license_type": "permissive", "max_line_length": 99, "num_lines": 24, "path": "/CHANGELOG.md", "repo_name": "svalabs/training-as-code", "src_encoding": "UTF-8", "text": "# Release 1.4, 21.08.2020\n\n- title slide uses the title from config file\n- added automatic agenda generation based on first-level-headings\n- add linter configurations\n- added lab tests for participants based on [Commander](https://github.com/commander-cli/commander)\n- added Ansible lab configuration\n- added lab configuration unit tests\n\n# Release 1.3, 22.06.2020\n\n- added footnotes and in-line syntax highlightning\n- fixed smaller issues (updated example, changed font size, simplified pipeline)\n- fixed some bugs while building the presentation\n- implemented handout feature\n- implemented watermark function (for customer presentations)\n\n# Release 1.1, 09.03.2020\n\n- implemented basic SVA theme\n\n# Release 1.0, 20.02.2020\n\n- initial release\n" }, { "alpha_fraction": 0.7230769395828247, "alphanum_fraction": 0.7692307829856873, "avg_line_length": 10, "blob_id": "8facddd77177160712b12839f17521ae7d3a8a16", "content_id": "a7a1c46f276308b2b62e8f0ad60871c3b255ac3b", "detected_licenses": [ "Apache-2.0", "MIT", "LicenseRef-scancode-unknown-license-reference" ], "is_generated": false, "is_vendor": false, "language": "INI", "length_bytes": 66, "license_type": "permissive", "max_line_length": 27, "num_lines": 6, "path": "/settings.ini", "repo_name": "svalabs/training-as-code", "src_encoding": "UTF-8", "text": "[meta]\ntitle=SVA Demo-Präsentation\ncustomer=\n\n[layout]\nratio=16:9" }, { "alpha_fraction": 0.625634491443634, "alphanum_fraction": 0.6269035339355469, "avg_line_length": 20.88888931274414, "blob_id": "952c65eebfd9b9f37b2e5fb19fcec32848da6542", "content_id": "ef4a5f6cc551400152aff5546582e8d93aec3307", "detected_licenses": [ "Apache-2.0", "MIT", "LicenseRef-scancode-unknown-license-reference" ], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 788, "license_type": "permissive", "max_line_length": 58, "num_lines": 36, "path": "/Ansible/test_generic.py", "repo_name": "svalabs/training-as-code", "src_encoding": "UTF-8", "text": "\"\"\"\nDeployment unit tests\nGeneric tests\n\"\"\"\nimport os\nimport testinfra.utils.ansible_runner\n\n\ndef test_software(host):\n \"\"\"\n Check whether required software packages are installed\n \"\"\"\n packages = [\n \"vim-common\",\n \"bind-utils\",\n \"epel-release\",\n \"python3-pytest\"\n ]\n for pkg in packages:\n _pkg = host.package(pkg)\n assert _pkg.is_installed\n\ndef test_firewall(host):\n \"\"\"\n Check whether firewall is disabled\n \"\"\"\n firewalld = host.service(\"firewalld\")\n assert firewalld.is_enabled == False\n assert firewalld.is_running == False\n\ndef test_selinux(host):\n \"\"\"\n Check whether SELinux is in Permissive mode\n \"\"\"\n sebool = host.run(\"getenforce\")\n assert sebool.stdout.lower().strip() == \"permissive\"\n" }, { "alpha_fraction": 0.6678403615951538, "alphanum_fraction": 0.6889671087265015, "avg_line_length": 41.599998474121094, "blob_id": "16803e2211e61c0d07a04538f25fc8c7a96e9f45", "content_id": "3bae787b7b8b6a8ed934659c1594462f339d5ea6", "detected_licenses": [ "Apache-2.0", "MIT", "LicenseRef-scancode-unknown-license-reference" ], "is_generated": false, "is_vendor": true, "language": "Ruby", "length_bytes": 852, "license_type": "permissive", "max_line_length": 127, "num_lines": 20, "path": "/Vagrant/Vagrantfile", "repo_name": "svalabs/training-as-code", "src_encoding": "UTF-8", "text": "# -*- mode: ruby -*-\n# vi: set ft=ruby :\n\nVagrant.configure(\"2\") do |config|\n config.vm.network \"private_network\", ip: \"192.168.24.99\"\n config.vm.box = \"centos/8\"\n # copy Lab tools\n config.vm.provision \"file\", source: \"../Ansible\", destination: \"/vagrant/ansible\"\n config.vm.provision \"file\", source: \"../labs\", destination: \"/home/vagrant/labs\"\n config.vm.provision \"file\", source: \"../tools/lab\", destination: \"/tmp/lab\"\n config.vm.provision \"shell\", privileged: true, inline: <<-SHELL\n mv /tmp/lab /usr/local/bin/lab\n curl -Lo /usr/local/bin/commander https://github.com/commander-cli/commander/releases/download/v2.2.0/commander-linux-amd64\n yum install -y python3\n chmod +x /usr/local/bin/commander /usr/local/bin/lab\n SHELL\n config.vm.provision \"ansible_local\" do |ansible|\n ansible.playbook = \"ansible/lab.yml\"\n end\nend\n" }, { "alpha_fraction": 0.5637149214744568, "alphanum_fraction": 0.582073450088501, "avg_line_length": 24.254545211791992, "blob_id": "7368be4f511a8f4702ffb55ec7795a949b5353c6", "content_id": "bd49725c07101dbe16d7c3e3bd5552c2b37549b0", "detected_licenses": [ "Apache-2.0", "MIT", "LicenseRef-scancode-unknown-license-reference" ], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 2778, "license_type": "permissive", "max_line_length": 88, "num_lines": 110, "path": "/tools/lab", "repo_name": "svalabs/training-as-code", "src_encoding": "UTF-8", "text": "#!/usr/bin/env python3\nimport os\nimport re\nimport subprocess\nimport argparse\nimport sys\n\nTEST_PATH = ''\n\n\ndef get_labs():\n labs = set()\n lab_pattern = re.compile(r'lab-(\\d+)-')\n for f in get_test_files():\n result = lab_pattern.search(f)\n\n if not result:\n continue\n\n lab = int(result.group(1))\n labs.add(lab)\n\n if not labs:\n print(f\"No lab tests found\")\n exit(1)\n\n return list(labs)\n\n\ndef get_lab_test(lab):\n tasks = []\n for f in get_test_files():\n if f.startswith(f'lab-{lab}'):\n tasks.append(os.path.join(TEST_PATH, f))\n return tasks\n\n\ndef get_test_files():\n try:\n files = os.listdir(TEST_PATH)\n except FileNotFoundError:\n print(\"Test folder not found. Are you in the labs directory?\")\n exit(1)\n else:\n return files\n\n\ndef run_single_check(lab):\n tasks = get_lab_test(lab)\n\n print(f\"Checking lab {lab}\\n\")\n\n if not tasks:\n print(f\"No tests defined for lab {lab}\")\n exit(1)\n\n lab_task_pattern = re.compile(fr'lab-{lab}-task-(\\d+)-(.+)\\.(?:yaml|yml)')\n success_counter = 0\n\n for task in tasks:\n task_match = lab_task_pattern.search(task)\n\n if not task_match:\n print(f\"Invalid task test name {task}. Please contact your instructor.\")\n exit(1)\n\n task_num = task_match.group(1)\n task_name = task_match.group(2)\n\n print(f\"[{task_num}/{len(tasks)}] Checking {task_name}...\", end=\" \")\n\n try:\n output = subprocess.check_output(['commander', 'test', task])\n except subprocess.CalledProcessError:\n print(\"\\033[1;31mFailure!\\033[0m\")\n # TODO: Give some reasons here\n # Question is question is how much of the commander output to use\n else:\n print(\"\\033[1;32mSuccess! \\033[0m\")\n success_counter += 1\n\n if success_counter == len(tasks):\n print(\"\\033[1;32mAll tasks correct! \\033[0m\\n\")\n else:\n print(\n f\"\\n\\033[1;31mErrors in {len(tasks) - success_counter} \"\n f\"{'tasks' if len(tasks) - success_counter > 1 else 'task'}\\033[0m\\n\")\n\n\ndef run_full_check():\n labs = get_labs()\n for lab in labs:\n run_single_check(lab)\n\n\nif __name__ == '__main__':\n parser = argparse.ArgumentParser(description='check lab solutions for correctness.')\n\n parser.add_argument('check', nargs='?', default=None)\n parser.add_argument('--workdir', required=False, default=\"/home/vagrant/labs\")\n parser.add_argument('--testpath', required=False, default=\"./tests\")\n args = parser.parse_args()\n\n TEST_PATH = args.testpath\n os.chdir(args.workdir)\n\n if args.check:\n run_single_check(args.check)\n else:\n run_full_check()\n" }, { "alpha_fraction": 0.6119873523712158, "alphanum_fraction": 0.6182965040206909, "avg_line_length": 9.22580623626709, "blob_id": "4e023995d11418b35f8c2605fb1720bcef53c9a2", "content_id": "b61fb52db51cff9ecad4976abd3dcb0a93ea9fc2", "detected_licenses": [ "Apache-2.0", "MIT", "LicenseRef-scancode-unknown-license-reference" ], "is_generated": false, "is_vendor": false, "language": "Markdown", "length_bytes": 318, "license_type": "permissive", "max_line_length": 30, "num_lines": 31, "path": "/handout.md", "repo_name": "svalabs/training-as-code", "src_encoding": "UTF-8", "text": "# Handout\n\nDies ist ein Beispiel-Handout.\n\n## Überschrift 2\n\n* Eine Liste\n* mit\n* vielen Punkten\n\n### Weitere Auszeichnungen\n\nHier geht _kursiver Text_,\n\nes gibt **fetten Text**\n\nund `monospace Text`.\n\n### Code-Beispiel\n\n```Python\nimport json\n\nnull = None\n\nd = {\n \"abc\": \"def\",\n \"foo\": 1,\n \"bar\": null\n}\n```\n" }, { "alpha_fraction": 0.6021654009819031, "alphanum_fraction": 0.6049297451972961, "avg_line_length": 25.150602340698242, "blob_id": "ba5259ea4623ac4e8e211f3992324abdba86406a", "content_id": "8bd214b02ff1df4141fd112a526d89399265f007", "detected_licenses": [ "Apache-2.0", "MIT", "LicenseRef-scancode-unknown-license-reference" ], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 4341, "license_type": "permissive", "max_line_length": 90, "num_lines": 166, "path": "/build.py", "repo_name": "svalabs/training-as-code", "src_encoding": "UTF-8", "text": "#!/usr/bin/env python3\n# -*- coding: utf-8 -*-\n\"\"\"\nThis script builds the presentation by filling the boilerplate with content\n\"\"\"\n\nimport codecs\nimport configparser\nimport glob\nimport re\nfrom itertools import zip_longest\nfrom math import ceil\n\n\nIGNORED_DOCS = [\n \"CHANGELOG.md\",\n \"README.md\",\n \"README-remarkjs.md\",\n \"handout.md\",\n \"labs.md\"\n]\n\"\"\"\nIGNORED_DOCS: documents that will never be imported\n\"\"\"\n\nSLIDE_DIVIDER = '\\n---\\n'\nPOINTS_PER_AGENDA_SLIDE = 9\nAGENDA_TEMPLATE = \"\"\"\n# Agenda {counter}\n{points}\n\n\"\"\"\n\n\ndef main():\n \"\"\"\n Main function, starts the logic based on parameters.\n \"\"\"\n config = read_config('settings.ini')\n slides = render_slides(read_slides(), config)\n\n template = read_template('template.html')\n\n rendered_template = render_metadata(template, config)\n rendered_template = rendered_template.replace('{{ content }}', slides)\n\n write_file(\"presentation.html\", rendered_template)\n\n\ndef read_config(filename):\n config = configparser.ConfigParser()\n config.read(filename, encoding='utf-8')\n return config\n\n\ndef read_template(filename):\n with open(filename) as file_:\n return file_.read()\n\n\ndef read_slides():\n slides = []\n for file in sorted(glob.iglob(\"*.md\")):\n if file not in IGNORED_DOCS:\n with open(file, 'r', encoding=\"utf-8\") as slide_file:\n content = slide_file.read()\n\n slides.extend(\n [slide.strip() for slide in content.split(SLIDE_DIVIDER)]\n )\n\n if not slides:\n raise RuntimeError(\"No slides loaded. \"\n \"Please add some slides or adjust IGNORED_DOCS.\")\n\n return slides\n\n\ndef render_slides(slides, config):\n agenda = create_agenda(slides)\n print(\"On our agenda: {}\".format(', '.join(agenda)))\n rendered_agenda = render_agenda(agenda)\n\n combined_slides = SLIDE_DIVIDER.join(slides)\n\n rendered_slides = render_metadata(combined_slides, config)\n rendered_slides = rendered_slides.replace('{{ agenda }}', rendered_agenda)\n\n return rendered_slides\n\n\ndef create_agenda(slides):\n agenda = []\n for slide in slides[1:]: # ignore title slide\n title = get_title(slide)\n if not title:\n continue\n\n if title not in agenda:\n agenda.append(title)\n\n return agenda\n\n\ndef get_title(slide):\n match = re.match(r'^(class: .*\\n+){0,1}#\\s+(?P<title>.*)$', slide, flags=re.MULTILINE)\n if match:\n title = match.group('title').strip()\n return title\n\n\ndef render_agenda(agenda):\n if not agenda:\n # Avoid having an empty slide.\n return (\"Unable to detect agenda. \"\n \"Please add at least one first-level heading (`# Title`) \"\n \"or remove the `{{ agenda }}` tag from your slides.\")\n\n slide_count = ceil(len(agenda) / POINTS_PER_AGENDA_SLIDE)\n\n filled_agenda = []\n for index, agenda_points in enumerate(chunks(agenda, POINTS_PER_AGENDA_SLIDE)):\n if slide_count < 2:\n count = ''\n else:\n count = '{index}/{count}'.format(index=index + 1,\n count=slide_count)\n\n topics = ['- %s' % t for t in agenda_points if t is not None]\n points = '\\n'.join(topics)\n\n filled_agenda.append(AGENDA_TEMPLATE.format(counter=count,\n points=points))\n\n return SLIDE_DIVIDER.join(filled_agenda)\n\n\ndef chunks(iterable, count):\n \"Collect data into fixed-length chunks or blocks\"\n # chunks('ABCDEFG', 3) --> ABC DEF Gxx\"\n args = [iter(iterable)] * count\n return zip_longest(*args)\n\n\ndef render_metadata(slides, metadata):\n rendered = slides.replace('{{ title }}', metadata['meta']['title'])\n customer = metadata['meta'].get('customer', '')\n rendered = rendered.replace('{{ customer }}', customer)\n rendered = rendered.replace('{{ ratio }}', metadata['layout']['ratio'])\n\n if customer:\n slideFormat = f\"%current% | %total% - KOPIE: {customer}\"\n else:\n slideFormat = \"%current% | %total%\"\n\n rendered = rendered.replace('{{ slideNumberFormat }}', slideFormat)\n\n return rendered\n\n\ndef write_file(filename, content):\n with codecs.open(filename, \"w\", \"utf-8\") as file_:\n file_.write(content)\n\nif __name__ == \"__main__\":\n main()\n" } ]
11
rahulvashistha/pythonprojects
https://github.com/rahulvashistha/pythonprojects
c9af34bef14809d4f32eea00a2ba9e2831fdfcb5
229b1717bd373f749ebc41a39fbec3c90bd68fa2
004965706683557d35d95e1bf56118d2b2cb1bab
refs/heads/main
2023-07-02T07:08:25.466987
2021-08-11T11:17:38
2021-08-11T11:17:38
394,606,425
1
0
null
null
null
null
null
[ { "alpha_fraction": 0.5088719725608826, "alphanum_fraction": 0.515842854976654, "avg_line_length": 28.346153259277344, "blob_id": "975e3f314176e139aeeb490021a811ed786ec49f", "content_id": "7c942f793133c4bc91b10379650b9bc5958f0190", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 1578, "license_type": "no_license", "max_line_length": 75, "num_lines": 52, "path": "/Rock Paper Scissors.py", "repo_name": "rahulvashistha/pythonprojects", "src_encoding": "UTF-8", "text": "import random\r\n\r\nplayer_name = input(\"Enter Player Name: \\n\")\r\nchoices = [\"Rock\", \"Paper\", \"Scissors\"]\r\n\r\nprint(\"Welcome,\",player_name,\"Make your choice\")\r\nplayer = False\r\ncpu_score = 0\r\nplayer_score = 0\r\n\r\nwhile True:\r\n player = input(\"Rock, Paper or Scissors? (End to Quit)\\n\").capitalize()\r\n computer = random.choice(choices)\r\n#Rules and Cases of the game\r\n#Case 1\r\n if player == computer:\r\n print(\"Tie!\")\r\n elif player == \"Rock\":\r\n if computer == \"Paper\":\r\n print(\"You Lose!\", computer, \"covers\", player)\r\n cpu_score+=1\r\n else:\r\n print(\"You Win!\", player, \"smashes\", computer)\r\n player_score+=1\r\n#Case 2 \r\n elif player == \"Paper\":\r\n if computer == \"Scissors\":\r\n print(\"You Lose!\", computer, \"cuts\", player)\r\n cpu_score+=1\r\n else:\r\n print(\"You Win!\", player, \"covers\", computer)\r\n player_score+=1\r\n#Case 3\r\n elif player == \"Scissors\":\r\n if computer == \"Rock\":\r\n print(\"You Lose!\", computer, \"smashes\", player)\r\n cpu_score+=1\r\n else:\r\n print(\"You Win!\", player, \"cuts\", computer)\r\n player_score+=1\r\n elif player == 'End':\r\n#Show the final score and result\r\n print(\"Final Score\")\r\n print(f\"CPU : {cpu_score}\")\r\n print(f\"You : {player_score}\")\r\n if player_score > cpu_score:\r\n print(\"YOU WIN!\")\r\n elif cpu_score > player_score:\r\n print(\"YOU LOSE!\")\r\n else:\r\n print(\"TIED!\")\r\n break\r\n" }, { "alpha_fraction": 0.5659824013710022, "alphanum_fraction": 0.5967742204666138, "avg_line_length": 31.317073822021484, "blob_id": "c0c3c7e1b59a0a174dd8469bec1a5f351ee81f82", "content_id": "58391ec4608ddc8e0f94941200f13b6c647c81db", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 1364, "license_type": "no_license", "max_line_length": 89, "num_lines": 41, "path": "/BMI Calc.py", "repo_name": "rahulvashistha/pythonprojects", "src_encoding": "UTF-8", "text": "#Function to take height and its unit and convert it into meter stand.\r\ndef convert_H():\r\n H_unit = int(input(\"Height: Select your Unit\\n 1. centimeter 2. meter 3. feet \\n\"))\r\n Height = float(input(\"Enter you Height: \\n\"))\r\n if (H_unit == 1):\r\n return (Height/100)\r\n elif (H_unit == 2):\r\n return Height\r\n elif (H_unit == 3):\r\n return (round(Height/3.2808399,3))\r\n else: print(\"Wrong Input...\")\r\n#Call func for Height\r\nHeight = convert_H()\r\n#Function to take weight and its unit and convert it into kg stand.\r\ndef convert_W():\r\n W_unit = int(input(\"Weight: Select your Unit\\n 1. kilograms 2. pounds/lbs \\n\"))\r\n Weight = float(input(\"Enter you Weight: \\n\"))\r\n if (W_unit == 1):\r\n return Weight\r\n elif (W_unit == 2):\r\n return (round(Weight/2.20462262,3))\r\n else: print(\"Wrong Input...\")\r\n#Call func for Weight\r\nWeight = convert_W()\r\n#Calc BMI and print\r\nBMI = Weight/(Height * Height)\r\nprint(\"Your Body Mass Index is: \", BMI)\r\n#print condition acc to BMI\r\nif(BMI>0):\r\n if(BMI<=16):\r\n print(\"You're Severely Underweight\")\r\n elif(BMI<=18.5):\r\n print(\"You're Underweight\")\r\n elif(BMI<=25):\r\n print(\"You're Healthy\")\r\n elif(BMI<=30):\r\n print(\"You're Overweight\")\r\n else:\r\n print(\"You're Severely Overweight\")\r\nelse:\r\n print(\"Enter Valid Details\")" }, { "alpha_fraction": 0.6743515729904175, "alphanum_fraction": 0.698847234249115, "avg_line_length": 26.66666603088379, "blob_id": "3554aed6ede6759ef2e5dfa9e0a66a25ee72d083", "content_id": "ac1a9f44d96217bfbb5a7af163e17717d2a4287b", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 694, "license_type": "no_license", "max_line_length": 88, "num_lines": 24, "path": "/Digital Clock.py", "repo_name": "rahulvashistha/pythonprojects", "src_encoding": "UTF-8", "text": "from tkinter import Label, Tk\r\nimport time\r\nfrom datetime import datetime\r\n#define title, window size, resize\r\napp_window = Tk()\r\napp_window.title(\"Digita Clock\")\r\napp_window.geometry(\"420x180\")\r\napp_window.resizable(1,1)\r\n#define font type, size, background, foreground\r\ntext_font = (\"Boulder\", 70, 'bold')\r\nbackground = \"black\"\r\nforeground = \"white\"\r\nborder_width = 35\r\n\r\nlabel = Label(app_window, font=text_font, bg=background, fg=foreground, bd=border_width)\r\nlabel.grid(row=0, column=1)\r\n#clock function\r\ndef digital_clock():\r\n time_live = time.strftime(\"%H:%M:%S\")\r\n label.config(text=time_live)\r\n label.after(200, digital_clock)\r\n\r\ndigital_clock()\r\napp_window.mainloop()\r\n\r\n\r\n\r\n" }, { "alpha_fraction": 0.5714285969734192, "alphanum_fraction": 0.5863717794418335, "avg_line_length": 31.5, "blob_id": "9dca191a47d516da5d30ab49f29ce502f220e421", "content_id": "1ae4284a50d81028357930cde12554ddad7ade93", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 1673, "license_type": "no_license", "max_line_length": 63, "num_lines": 50, "path": "/Alarm Clock.py", "repo_name": "rahulvashistha/pythonprojects", "src_encoding": "UTF-8", "text": "from datetime import datetime\r\nfrom playsound import playsound\r\n#Print current time\r\nprint(\"Current Time: \", datetime.now())\r\n#Validation Function\r\ndef validate_time(alarm_time):\r\n if len(alarm_time) != 11:\r\n return \"Invalid time format! Please try again...\"\r\n else:\r\n if int(alarm_time[0:2]) > 12:\r\n return \"Invalid HOUR format! Please try again...\"\r\n elif int(alarm_time[3:5]) > 59:\r\n return \"Invalid MINUTE format! Please try again...\"\r\n elif int(alarm_time[6:8]) > 59:\r\n return \"Invalid SECOND format! Please try again...\"\r\n else:\r\n return \"ok\"\r\nwhile True:\r\n#Take input and validate it\r\n alarm_time = input(\"Enter Alarm Time: 'HH:MM:SS AM/PM' \\n\")\r\n\r\n validate = validate_time(alarm_time)\r\n if validate != \"ok\":\r\n print(validate)\r\n else:\r\n print(\"Setting up Alarm...\") \r\n break \r\n\r\n#Slice and assign the input string\r\nalarm_hour = alarm_time[0:2]\r\nalarm_minute = alarm_time[3:5]\r\nalarm_second = alarm_time[6:8]\r\nalarm_period = alarm_time[9:11].upper()\r\n\r\nwhile True:\r\n#Assign the current time values\r\n now = datetime.now()\r\n current_hour = now.strftime(\"%I\")\r\n current_minute = now.strftime(\"%M\")\r\n current_second = now.strftime(\"%S\")\r\n current_period = now.strftime(\"%p\") \r\n#Check the current and alarm time\r\n if(alarm_period == current_period):\r\n if(alarm_hour == current_hour):\r\n if(alarm_minute == current_minute):\r\n if(alarm_second == current_second):\r\n#Display msg and play sound\r\n print(\"Wake Up!\")\r\n playsound('aud1.mp3')\r\n break" }, { "alpha_fraction": 0.7909516096115112, "alphanum_fraction": 0.797191858291626, "avg_line_length": 79, "blob_id": "fe3bfe86ce7b323b2437832995e309e0f097dca7", "content_id": "24900d68a88be0a9625c3c03842a18fd46265426", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Markdown", "length_bytes": 641, "license_type": "no_license", "max_line_length": 112, "num_lines": 8, "path": "/README.md", "repo_name": "rahulvashistha/pythonprojects", "src_encoding": "UTF-8", "text": "# pythonprojects\nThis repo contains some of the python projects I made for learning and developing my python and coding skills.\nThey are simple programs with comments explaining the steps and methods that I followed during the coding phase.\n\n1. Alarm Clock: set a time and the program will play a sound at that time. (datetime and playsound module)\n2. BMI Calculator: add weight and height in any provided format to calculate the bmi and see the result.\n3. Digital Clock: a digital clock providing the current time using Tkinter. (Tkinter, datetime and time module)\n4. Rock Paper Scissors: simple game against computer, results at the end. \n" } ]
5
sanaltsk/codepath
https://github.com/sanaltsk/codepath
d7bfae962b8e4be6da7a6f2fb973403051712a89
a6fc36e64fc26c146103870ac44c928e53c92775
34c13025bcaa63e7d45a390a463f701f3e674476
refs/heads/master
2020-03-16T03:38:51.702680
2018-05-07T17:13:49
2018-05-07T17:13:49
132,492,082
0
0
null
null
null
null
null
[ { "alpha_fraction": 0.5287958383560181, "alphanum_fraction": 0.5340313911437988, "avg_line_length": 22.875, "blob_id": "878f5dcc51cb109d4a6fdf0bd6f233b4409bf966", "content_id": "fcb1d283afa357dc6a018b6e76de531c965a3e5a", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 191, "license_type": "no_license", "max_line_length": 34, "num_lines": 8, "path": "/level3_kthsmallest_in_array.py", "repo_name": "sanaltsk/codepath", "src_encoding": "UTF-8", "text": "class Solution:\n # @param A : tuple of integers\n # @param B : integer\n # @return an integer\n def kthsmallest(self, A, B):\n A=list(A)\n A.sort()\n return A[B-1]\n" }, { "alpha_fraction": 0.4659574329853058, "alphanum_fraction": 0.48510637879371643, "avg_line_length": 28.375, "blob_id": "0ec8f0a3519bbc22a6f1c2d5ec46b53f23f06f0f", "content_id": "0dfdb631bb411ee74068d43953a85e033e4bfd51", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 470, "license_type": "no_license", "max_line_length": 76, "num_lines": 16, "path": "/level2_prettyprint.py", "repo_name": "sanaltsk/codepath", "src_encoding": "UTF-8", "text": "class Solution:\n # @param A : integer\n # @return a list of list of integers\n def prettyPrint(self, A):\n a=[]\n maxn=2*A-1\n mid = A-1\n for i in range(0,maxn):\n for j in range(0,maxn):\n a.append([i,j])\n \n new_matrix=[]\n for j in a:\n n = max(abs(j[0]-mid),abs(j[1]-mid))+1\n new_matrix.append(n)\n return [new_matrix[i:i+maxn] for i in range(0,len(new_matrix),maxn)]\n" }, { "alpha_fraction": 0.6974790096282959, "alphanum_fraction": 0.6974790096282959, "avg_line_length": 22.799999237060547, "blob_id": "c7452d01291a35ca37624f0588229053f1f0d5aa", "content_id": "f905d9389b24fe55ebd79fd73033dbf1c5d1a007", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Markdown", "length_bytes": 119, "license_type": "no_license", "max_line_length": 104, "num_lines": 5, "path": "/README.md", "repo_name": "sanaltsk/codepath", "src_encoding": "UTF-8", "text": "# codepath\n\n\n\n<img src='https://i.imgur.com/MLUjFTR.gif' title='Video Walkthrough' width='' alt='Video Walkthrough' />\n" }, { "alpha_fraction": 0.3839285671710968, "alphanum_fraction": 0.4017857015132904, "avg_line_length": 23.94444465637207, "blob_id": "cca494c77aba538f965a0f43e644db72fabfbeee", "content_id": "61b6640905c9cf098676574e0f32827a76bc7c88", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 448, "license_type": "no_license", "max_line_length": 36, "num_lines": 18, "path": "/level5_longest_seq.py", "repo_name": "sanaltsk/codepath", "src_encoding": "UTF-8", "text": "class Solution:\n # @param A : tuple of integers\n # @return an integer\n def longestConsecutive(self, A):\n A=list(A)\n A.sort()\n max=1\n count = 1\n for i in range(1,len(A)):\n if(A[i]-A[i-1]==1):\n count+=1\n elif (A[i]==A[i-1]):\n continue\n else:\n count=1;\n if (count > max):\n max = count\n return max" }, { "alpha_fraction": 0.39240506291389465, "alphanum_fraction": 0.40506330132484436, "avg_line_length": 27.285715103149414, "blob_id": "98ce9468e8a69d4d5a73c31660add0ed525c06ce", "content_id": "31d285d647657f705aba43837025a8e462311dcc", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 395, "license_type": "no_license", "max_line_length": 39, "num_lines": 14, "path": "/level4_nextgreater.py", "repo_name": "sanaltsk/codepath", "src_encoding": "UTF-8", "text": "class Solution:\n # @param A : list of integers\n # @return a list of integers\n def nextGreater(self, A):\n x=[]\n for i in range(0,len(A)):\n for j in range(i+1,len(A)):\n if A[i]<A[j]:\n x.append(A[j])\n break\n elif j==len(A)-1:\n x.append(-1)\n x.append(-1)\n return x" } ]
5
arjunfzk/Innovate_AI_workshop
https://github.com/arjunfzk/Innovate_AI_workshop
c9518829d9c5182c1efeb2dec638b4d13c548a5f
a46c21fa95b0ea9b1f7d9ce6683c319b69dfae3d
a0049e5e0cc074fa3cf7b1c68330c4f767ad8e1f
refs/heads/master
2021-07-05T16:29:55.042037
2020-08-22T18:06:56
2020-08-22T18:06:56
144,022,866
0
0
null
null
null
null
null
[ { "alpha_fraction": 0.6805555820465088, "alphanum_fraction": 0.6875, "avg_line_length": 14.222222328186035, "blob_id": "9a389b3b867997cae3737e41a6de011360311d7a", "content_id": "d11aac82e5a85503567a827fef12a10161639bbe", "detected_licenses": [ "MIT" ], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 144, "license_type": "permissive", "max_line_length": 41, "num_lines": 9, "path": "/lyrics.py", "repo_name": "arjunfzk/Innovate_AI_workshop", "src_encoding": "UTF-8", "text": "import requests\r\nimport json\r\n\r\n\r\nx=\"https://api.lyrics.ovh/v1/adele/hello\"\r\nr=requests.get(x)\r\n\r\njson_data=r.json()\r\nprint(json_data['lyrics'])" }, { "alpha_fraction": 0.6090047359466553, "alphanum_fraction": 0.6540284156799316, "avg_line_length": 27.034482955932617, "blob_id": "c3f7356f3dc191f057f04b5dac5ef655d26f8674", "content_id": "187ae41ba4836a8afba7dbec644a225a3b95b3fc", "detected_licenses": [ "MIT" ], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 844, "license_type": "permissive", "max_line_length": 73, "num_lines": 29, "path": "/eye_detection/eye[windows].py", "repo_name": "arjunfzk/Innovate_AI_workshop", "src_encoding": "UTF-8", "text": "import pyautogui\r\nimport cv2 \r\nimport numpy as np\r\nface_cascade=cv2.CascadeClassifier('haarcascade_frontalface_default.xml')\r\neye_cascade=cv2.CascadeClassifier('haarcascade_eye.xml')\r\n\t\r\ncap=cv2.VideoCapture(0)\r\n\t\r\nwhile True:\r\n\tret,img=cap.read()\r\n\tgray=cv2.cvtColor(img, cv2.COLOR_BGR2GRAY)\r\n\tfaces=face_cascade.detectMultiScale(gray,1.1 ,5)#1.5,5 1.3\r\n\tfor (x,y,w,h) in faces:\r\n\t\tcv2.rectangle(img, (x,y), (x+w, y+h), (255,0,0), 2)\r\n\t\troi_gray=gray[y:y+h, x:x+w] \r\n\t\troi_color=img[y:y+h, x:x+w] \r\n\t\teyes=eye_cascade.detectMultiScale(roi_gray)\r\n\t\tprint(len(eyes)) \r\n\t\tif len(eyes)<2:\r\n\t\t\tpass\r\n\t\t\tpyautogui.press(\"space\")\r\n\t\tfor(ex,ey,ew,eh) in eyes:\r\n\t\t\tcv2.rectangle(roi_color,(ex,ey),(ex+ew,ey+eh),(0,255,0),2)\r\n\tcv2.imshow('img',img)\r\n\tk=cv2.waitKey(30) & 0xff\r\n\tif k==27:\r\n\t\tbreak\r\ncap.release()\r\ncap.destroyAllWindows()\r\n\r\n" }, { "alpha_fraction": 0.6661631464958191, "alphanum_fraction": 0.7296072244644165, "avg_line_length": 29, "blob_id": "3e1eac8c6bec018da7a6cee5eb46473121a6fca9", "content_id": "200caa47f476a528de3401e819fe7aa65a514500", "detected_licenses": [ "MIT" ], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 662, "license_type": "permissive", "max_line_length": 64, "num_lines": 22, "path": "/automation.py", "repo_name": "arjunfzk/Innovate_AI_workshop", "src_encoding": "UTF-8", "text": "\nfrom selenium import webdriver\nimport time\nimport random\nbrowser = webdriver.Firefox()\nname_list = open(\"male.txt\", \"r\")\nname = name_list.read()\nname = name.split()\n\nbrowser.get('file:///home/arjun/Desktop/Workshop/index.html')\nelem = browser.find_element_by_id('inputName')\nelem2 = browser.find_element_by_id('inputEmail')\nelem22 = browser.find_element_by_id('inputContact')\n\nelem3=browser.find_element_by_id('btnClick')\nfor i in range(1,100):\n\telem.send_keys(name[i])\n\ttime.sleep(0.1)\n\telem2.send_keys(name[i]+str(random.randint(1,101))+'gmail.com')\n\ttime.sleep(0.1)\n\telem22.send_keys(random.randint(9339827257,9976567459))\t\n\ttime.sleep(0.2)\t\n\telem3.click()\n\n" }, { "alpha_fraction": 0.7292817831039429, "alphanum_fraction": 0.7348066568374634, "avg_line_length": 24.14285659790039, "blob_id": "5594c993125625d881cec8e17abc14462c4baf9e", "content_id": "ed1c9946bc8adffcaa621e4efa2f7e30154108bb", "detected_licenses": [ "MIT" ], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 181, "license_type": "permissive", "max_line_length": 64, "num_lines": 7, "path": "/instantanswers.py", "repo_name": "arjunfzk/Innovate_AI_workshop", "src_encoding": "UTF-8", "text": "import requests\r\nimport json\r\nx=\"http://api.duckduckgo.com/?q=chandigarh&format=json&pretty=1\"\r\nr=requests.get(x)\r\njson_data=r.json()\r\nprint(json_data['Abstract'])\r\nprint(json_data)" }, { "alpha_fraction": 0.7971311211585999, "alphanum_fraction": 0.8053278923034668, "avg_line_length": 43.3636360168457, "blob_id": "8a038477a5e0154065c32c207162ef3152919cf4", "content_id": "c39eb1b01d42c783f029f4ec44037e27b446d6c5", "detected_licenses": [ "MIT" ], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 488, "license_type": "permissive", "max_line_length": 94, "num_lines": 11, "path": "/object detection/image1.py", "repo_name": "arjunfzk/Innovate_AI_workshop", "src_encoding": "UTF-8", "text": "from imageai.Prediction import ImagePrediction\nimport os\nexecution_path = os.getcwd()\nprint(execution_path)\nprediction = ImagePrediction()\nprediction.setModelTypeAsResNet()\nprediction.setModelPath(execution_path+\"/resnet50_weights_tf_dim_ordering_tf_kernels.h5\")\nprediction.loadModel()\npredictions, percentage_probabilities = prediction.predictImage(\"sample.jpeg\", result_count=5)\nfor index in range(len(predictions)):\n\tprint(predictions[index] , \" : \" , percentage_probabilities[index])\n" } ]
5
kingking888/douyin_new
https://github.com/kingking888/douyin_new
f67b97c9342b3ba0f05cdc16512f494077f41683
214e9bdea92bd9c8b31f98a6f31dadd595f10f54
3f151cf425c023c14ac6b02d6b37e1009bdd5a0b
refs/heads/master
2021-05-20T08:24:01.090721
2020-03-23T03:56:15
2020-03-23T03:56:15
null
0
0
null
null
null
null
null
[ { "alpha_fraction": 0.5166826248168945, "alphanum_fraction": 0.5290564894676208, "avg_line_length": 34.28385543823242, "blob_id": "7b947d5f106cd41243edce7b6fb2b1551462dc20", "content_id": "30ec699d12265cde95670fee5f46e9ad2428a911", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 14457, "license_type": "no_license", "max_line_length": 138, "num_lines": 384, "path": "/douyin_data.py", "repo_name": "kingking888/douyin_new", "src_encoding": "UTF-8", "text": "#按时更新id 开启mitmdump监听 开启按键 入库 全部数据入库 上传\nimport douyin_mongo\nimport datetime\nimport requests\nimport json\nimport time\nimport re\ndb = douyin_mongo.douyin_mongo()\n\ndef reptile_t():\n now = int(time.time()) # 1533952277\n timeArray = time.localtime(now)\n otherStyleTime = time.strftime(\"%Y-%m-%d %H:%M:%S\", timeArray)\n return otherStyleTime\n\ndef reptile_d():\n now = int(time.time()) # 1533952277\n timeArray = time.localtime(now)\n otherStyleTime = time.strftime(\"%Y-%m-%d\", timeArray)\n return otherStyleTime\n\ndef write_json(data):\n with open(r\"E:\\douyin_day\\{0}抖音数据.txt\".format(reptile_d()),\"w\",encoding=\"utf-8\") as f:\n f.write(data)\ndef down_txt(data):\n with open(\"down_txt.txt\",\"a\",encoding=\"utf-8\") as f:\n data = str(reptile_t()) + \"账户数据下载数量down:1\" + \",%s\"%data + \"\\n\"\n f.write(data)\n\ndef down_error(data):\n with open(\"down_error.txt\", \"a\", encoding=\"utf-8\") as f:\n data = str(reptile_t()) + \"账户数据没值\" + \",%s\" % data + \"\\n\"\n f.write(data)\n\n# 获取前1天或N天的日期,beforeOfDay=1:前1天;beforeOfDay=N:前N天\ndef getDate(beforeOfDay):\n today = datetime.datetime.now()\n # 计算偏移量\n offset = datetime.timedelta(days=-beforeOfDay)\n # 获取想要的日期的时间\n re_date = (today + offset).strftime('%Y-%m-%d') # %Y-%m-%d %H:%M:%S\n\n return re_date\ndef reptile_d():\n now = int(time.time()) # 1533952277\n timeArray = time.localtime(now)\n otherStyleTime = time.strftime(\"%Y-%m-%d\", timeArray)\n return otherStyleTime\n\ndef get_data():\n date = datetime.datetime.now()\n strDate = date.strftime(\"%Y-%m-%d\")\n print(\"时间\",strDate)\n\n url = \"http://218.241.201.165:30001/Logic/AccountInfoForWeb.ashx?FromDate=%s&ToDate=%s&PlatformID=5&AuditStatus=5\"%(strDate,strDate)\n try:\n response = requests.get(url).text\n except Exception as e:\n print(e)\n response = requests.get(url).text\n data_list = json.loads(response)[\"data\"]\n i = 0\n for data in data_list:\n if data[\"ThirdPartyPlatformName\"] == \"抖音\":\n print(data)\n id = str(data[\"ID\"]).strip()\n accountname = str(data[\"AccountName\"].strip())\n accountid = str(data[\"AccountID\"].strip())\n accountplatform = str(data[\"AccountPlatform\"].strip())\n thirdpartyplatformname = str(data[\"ThirdPartyPlatformName\"].strip())\n medianame = str(data[\"MediaName\"]).strip()\n\n if accountid:\n db.save_douyin_urls(strDate,id,accountname,accountid,accountplatform,thirdpartyplatformname,medianame)\n i += 1\n print(i)\n#日志txt\ndef log_txt(message):\n with open(\"log_txt.txt\",\"a\",encoding=\"utf-8\") as f:\n data = str(message) + \"\\n\"\n f.write(data)\n\ndef log_error_txt(message):\n with open(\"log_error_txt.txt\", \"a\", encoding=\"utf-8\") as f:\n data = str(message) + \"\\n\"\n f.write(data)\n#上传抖音账户数据\ndef push_user_data(strDate):\n # date = datetime.datetime.now()\n # strDate = date.strftime(\"%Y-%m-%d\")\n # strDate = \"2019-10-08\"\n id_count = db.get_douyin_count(strDate)\n # id的总数\n dataCount = id_count\n print(\"id数量:\",dataCount)\n rows = db.get_user_data(strDate)\n rows_list = []\n for row in rows:\n rows_list.append(row)\n #userdata 数据总数\n userdata_count = len(rows_list)\n print(\"用户数据数量\",userdata_count)\n data_list = []\n for i in rows_list:\n print(i)\n AccountName = i[\"douyin_unique_id\"] if i['douyin_id'] == \"0\" else i['douyin_id']\n PlatformName = \"抖音\"\n TPAccountName = i[\"nickname\"]\n TPAccountUrl = i[\"share_url\"] if \"share_url\" in i.keys() else \"\"\n TPAccountFollowersCount = i[\"followers_count\"] #粉丝数\n TPAccountPlayCount = -2\n TPAccountLikeCount = i[\"total_favorited\"]\n TPAccountPostCount = i[\"aweme_count\"] #作品数\n TPAccountRepostCount = -2\n TPAccountCommentCount = -2\n LastUpdateTime = strDate\n AuthenticationInfo = i[\"verify\"] #认证信息\n IntroductionInfo = (i[\"signature\"]).replace('\"','') #简介\n CreateTime = strDate\n RemarkInfo = \"\"\n DataPeriodID = \"1\"\n IsFailure = \"0\"\n ID = i[\"id\"]\n TPAccountFavorite = i[\"like_count\"] #喜欢数\n TPAccountFollowing = i[\"following_count\"] #关注数\n TPAccountAction = i[\"dongtai_count\"]\n dict_data = {\n \"AccountName\":AccountName,\n \"PlatformName\":PlatformName,\n \"TPAccountName\" : TPAccountName,\n \"TPAccountUrl\" : TPAccountUrl,\n \"TPAccountFollowersCount\" : TPAccountFollowersCount, # 粉丝数\n \"TPAccountPlayCount\" : TPAccountPlayCount,\n \"TPAccountLikeCount\" : TPAccountLikeCount,\n \"TPAccountPostCount\" : TPAccountPostCount, # 作品数\n \"TPAccountRepostCount\" : TPAccountRepostCount,\n \"TPAccountCommentCount\" : TPAccountCommentCount,\n \"LastUpdateTime\" : LastUpdateTime,\n \"AuthenticationInfo\" : AuthenticationInfo, # 认证信息\n \"IntroductionInfo\" : IntroductionInfo, # 简介\n \"CreateTime\" : CreateTime,\n \"RemarkInfo\" : RemarkInfo,\n \"DataPeriodID\" : DataPeriodID,\n \"IsFailure\" : IsFailure,\n \"ID\":ID,\n \"TPAccountFavorite\" : TPAccountFavorite, # 喜欢数\n \"TPAccountFollowing\" : TPAccountFollowing, # 关注数\n \"TPAccountAction\" : TPAccountAction\n }\n if dict_data not in data_list:\n data_list.append(dict_data)\n\n\n\n\n print(\"去重后\",len(data_list))\n #上传接口\n serverUrl = \"http://218.241.201.165:30001/Logic/MetaDataFromWeb.ashx\"\n exceptionCount = 0\n data_json = json.dumps({'userid': 19, 'datatype': 5, 'dataCount': dataCount,\n 'exceptionCount': exceptionCount,'data':data_list}, ensure_ascii=False)\n headers = {'Content-type': 'application/json'}\n\n try:\n response = requests.post(serverUrl, json=data_json, headers=headers)\n print(\"************************************************************\")\n print(response)\n print(response.text)\n dictinfo = json.loads(response.text)\n if dictinfo and dictinfo.get('code') == 0:\n print('上传成功%d条抖音账号数据,其中全部记录%d条,采集记录%d条,异常账号记录%d条' % (userdata_count,dataCount, userdata_count, exceptionCount))\n date_data = reptile_t()\n message = date_data + '上传成功%d条抖音账号数据,其中全部记录%d条,采集记录%d条,异常账号记录%d条' % (userdata_count,dataCount, userdata_count, exceptionCount)\n message1 = date_data + str(response.text)\n log_txt(message)\n log_txt(message1)\n\n\n else:\n print('上传抖音账号数据失败:%s' % response.text)\n date_data = reptile_t()\n message = date_data + '上传抖音账号数据失败:%s' % response.text\n log_txt(message)\n\n\n except Exception as ex:\n print(ex)\n date_data = reptile_t()\n message = date_data + \"抖音账号数据\" + str(ex)\n log_error_txt(message)\n#上传内容数据\ndef push_content_data(strDate):\n # date = datetime.datetime.now()\n # strDate = date.strftime(\"%Y-%m-%d\")\n # strDate = \"2019-10-08\"\n content_count = db.get_content_count(strDate)\n # 作品的总数\n dataCount = content_count\n print(\"作品数量:\", dataCount)\n rows = db.get_content_data(strDate)\n rows_list = []\n for row in rows:\n rows_list.append(row)\n # userdata 数据总数\n userdata_count = len(rows_list)\n print(\"作品数据数量\", userdata_count)\n data_list = []\n for j,i in enumerate(rows_list):\n # print(i)\n ID = i[\"id\"]\n if ID != 0:\n ArticleType = \"视频\"\n #做替换 a.replace('\"','')\n i[\"desc\"] = (i[\"desc\"].replace(\"~\", \"\")).replace('\"','')\n Title = i[\"desc\"]\n ReadNum = -2\n CommentNum = i[\"comment_count\"]\n PlayNum = i[\"play_count\"]\n LikeNum = i[\"digg_count\"]\n ArticleCreateTime = i[\"create_time\"]\n # 判断时间 发布时间是前一天的\n pattern = re.compile('(.*-.*-.*) ')\n yesterday_o = pattern.findall(ArticleCreateTime)[0]\n #只上传昨天的数据\n if yesterday_o == getDate(1):\n UpdateTime = strDate\n IsFailure = \"0\"\n RemarkInfo = \"\"\n ArticleText = \"\"\n ArticleUrl = i[\"share_url\"]\n VoiceLength = i[\"duration\"]\n Comments = \"\"\n ArticleID = i[\"aweme_id\"]\n ShareNum = i[\"share_count\"]\n DownLoadNum = i[\"download_count\"]\n\n dict_data = {\n \"ID\": ID,\n \"ArticleType\": ArticleType,\n \"Title\": Title,\n \"ReadNum\":ReadNum,\n \"CommentNum\": CommentNum,\n \"PlayNum\": PlayNum,\n \"LikeNum\": LikeNum,\n \"ArticleCreateTime\": ArticleCreateTime,\n \"UpdateTime\": UpdateTime,\n \"IsFailure\": IsFailure,\n \"RemarkInfo\": RemarkInfo,\n \"ArticleText\": ArticleText,\n \"ArticleUrl\": ArticleUrl,\n \"VoiceLength\": VoiceLength,\n \"Comments\": Comments,\n \"ArticleID\": ArticleID,\n \"ShareNum\": ShareNum,\n \"DownLoadNum\":DownLoadNum\n }\n print(dict_data)\n #存入每日视频的url\n video_url = dict_data[\"ArticleUrl\"].strip(\"https://\")\n db.save_video_url(dict_data[\"ID\"],video_url,dict_data[\"ArticleCreateTime\"],dict_data[\"ArticleID\"],dict_data[\"UpdateTime\"])\n data_list.append(dict_data)\n # if dict_data not in data_list:\n # print(j, dict_data)\n # data_list.append(dict_data)\n print(len(data_list))\n # 去重\n\n\n\n\n\n\n\n yesterday_count = len(data_list)\n print(yesterday_count)\n # 上传接口\n serverUrl = \"http://218.241.201.165:30001/Logic/MetaDataFromWeb.ashx\"\n exceptionCount = 0\n data_json = json.dumps({'userid': 19, 'datatype': 51, 'dataCount': dataCount,\n 'exceptionCount': exceptionCount, 'data': data_list}, ensure_ascii=False)\n write_json(data_json)\n headers = {'Content-type': 'application/json'}\n # print('上传成功%d条抖音作品数据,其中全部记录%d条,采集记录%d条,异常账号记录%d条' % (\n # dataCount,dataCount,dataCount,exceptionCount))\n\n try:\n response = requests.post(serverUrl, json=data_json, headers=headers)\n print(response)\n print(response.text)\n dictinfo = json.loads(response.text)\n if dictinfo and dictinfo.get('code') == 0:\n print('上传成功%d条抖音作品数据,其中全部记录%d条,采集记录%d条,异常账号记录%d条' % (\n yesterday_count, yesterday_count, yesterday_count, exceptionCount))\n date_data = reptile_t()\n message = date_data + '上传成功%d条抖音作品数据,其中全部记录%d条,采集记录%d条,异常账号记录%d条' % (\n yesterday_count, yesterday_count, yesterday_count, exceptionCount)\n message1 = date_data + str(response.text)\n log_txt(message)\n log_txt(message1)\n\n\n\n else:\n print('上传抖音作品数据失败:%s' % response.text)\n date_data = reptile_t()\n message = date_data + '上传抖音作品数据失败:%s' % response.text\n log_txt(message)\n\n\n except Exception as ex:\n print(ex)\n\n date_data = reptile_t()\n print(ex)\n message = date_data + \"抖音内容数据\" +str(ex)\n log_error_txt(message)\n\n\n\n\nif __name__ == '__main__':\n\n while True:\n date = datetime.datetime.now()\n\n strDate = date.strftime(\"%Y-%m-%d\")\n strtime = date.strftime(\"%Y-%m-%d %H:%M:%S\")\n time.sleep(10)\n print(strtime + \"正常循环\")\n #00:07:30 \"%H:%M:%S\"\n if date.strftime(\"%H:%M\") == \"00:01\":\n print(\"等待1分中\")\n time.sleep(60)\n get_data()\n\n\n\n\n elif date.strftime(\"%H:%M\") == \"10:03\": # %H:%M:%S\n # strDate = \"2020-02-13\"\n print(\"&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&\")\n time.sleep(60)\n\n result = db.get_douyinid_down(strDate)\n time.sleep(60)\n if result > 600:\n print(\"***************************************\")\n #将账户数据的数量写入 down_txt(str(result))\n\n print(result)\n\n rows = db.get_douyinid_notdown(strDate)\n list_rows = []\n for row in rows:\n list_rows.append(row)\n print(len(list_rows))\n for i, row in enumerate(list_rows):\n nickname = row[\"accountname\"]\n douyin_id = row[\"accountid\"]\n id = row[\"id\"]\n # reptile_time = reptile_t()\n # reptile_date = reptile_d()\n reptile_time = \"\"\n reptile_date = strDate\n print(row)\n # break\n\n #\n message_one = id + \" \" + nickname + \" \" + douyin_id\n down_error(message_one)\n db.save_noexist_userData(id, nickname, douyin_id, \"\", \"\", \"\",\n -1, -1, -1, -1,\n -1, -1, reptile_time, reptile_date,\"\")\n\n try:\n\n\n try:\n push_user_data(strDate)\n except Exception as e:\n push_user_data(strDate)\n push_content_data(strDate)\n except Exception as e:\n print(e)\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n" }, { "alpha_fraction": 0.4762237071990967, "alphanum_fraction": 0.4894779622554779, "avg_line_length": 44.02617645263672, "blob_id": "8d7d371c6fdf021e60a6ba23535817e62445431a", "content_id": "213a119d06ddc64f0ee8e3313834128f5a7cbb9a", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 8877, "license_type": "no_license", "max_line_length": 126, "num_lines": 191, "path": "/day_all_dongtai_one.py", "repo_name": "kingking888/douyin_new", "src_encoding": "UTF-8", "text": "import json\nimport time\nimport re\n\nimport douyin_mongo\ndb = douyin_mongo.douyin_mongo()\n\ndef write_date(publish_dates):\n\n with open(r\"c:\\Users\\wenxin\\Nox_share\\Other\\anjian00.txt\",\"w\",encoding=\"utf-8\") as f:\n f.write(publish_dates)\n\n\n\n#发布时间 时间戳转换\ndef publish_time(timeStamp):\n timeArray = time.localtime(timeStamp)\n date_time = time.strftime(\"%Y-%m-%d %H:%M:%S\", timeArray)\n return date_time\n\n#当前时间戳转换\ndef reptile_t():\n now = int(time.time()) # 1533952277\n timeArray = time.localtime(now)\n otherStyleTime = time.strftime(\"%Y-%m-%d %H:%M:%S\", timeArray)\n return otherStyleTime\n\n\ndef reptile_d():\n now = int(time.time()) # 1533952277\n timeArray = time.localtime(now)\n otherStyleTime = time.strftime(\"%Y-%m-%d\", timeArray)\n return otherStyleTime\n\ndef write_error(message):\n with open(\"mitmdu_error.txt\",\"a\",encoding=\"utf-8\") as f:\n f.write(message)\n\ndef write_user(message):\n with open(\"userurl.txt\", \"a\", encoding=\"utf-8\") as f:\n f.write(message)\n\ndef write_content(message):\n with open(\"contenturl.txt\", \"a\", encoding=\"utf-8\") as f:\n f.write(message)\n#必须的格式\ndef response(flow):\n #通过抓包软件获取请求的接口\n # url1 = \"https://aweme.snssdk.com/aweme/v1/user/?sec_user_id=\" #账号信息链接\n # url2 = \"https://aweme.snssdk.com/aweme/v1/aweme/post/?max_cursor=0&sec_user_id=\" #前二十条信息链接\n #https://aweme-hl.snssdk.com/aweme/v1/user/?sec_user_id 新账号\n #https://aweme-hl.snssdk.com/aweme/v1/forward/list/?user_id\n # https://aweme-hl.snssdk.com/aweme/v1/forward/list/?user_id\n # https://aweme-hl.snssdk.com/aweme/v1/user/?user_id=\n try:\n url_list = [\"https://aweme-hl.snssdk.com/aweme/v1/user/?sec_user_id\",\n \"https://aweme-eagle-hl.snssdk.com/aweme/v1/user/?sec_user_id=\",\n \"https://aweme-hl.snssdk.com/aweme/v1/forward/list/?user_id\"\n ]\n # for i,url in enumerate(url_list):\n # if i == 0:\n\n if \"/aweme/v1/user/\" in flow.request.url:\n\n #数据的解析\n # with open(\"user.txt\", \"a\",encoding=\"utf-8\") as f:\n print(flow.response.text)\n data = json.loads(flow.response.text)\n nickname = data[\"user\"][\"nickname\"]#分享id\n douyin_id = data[\"user\"][\"short_id\"]#抖音号 short_id 或unique_id\n douyin_unique_id = data[\"user\"][\"unique_id\"]\n verify = data[\"user\"][\"enterprise_verify_reason\"]\n signature = data[\"user\"][\"signature\"]\n total_favorited = data[\"user\"][\"total_favorited\"] #获赞数\n following_count = data[\"user\"][\"following_count\"] # 关注数\n followers_count = data[\"user\"][\"mplatform_followers_count\"] # 粉丝数\n aweme_count = data[\"user\"][\"aweme_count\"] # 作品数\n dongtai_count = data[\"user\"][\"dongtai_count\"] # 动态数\n like_count = data[\"user\"][\"favoriting_count\"] # 喜欢数\n uid = data[\"user\"][\"uid\"]\n share_url = data[\"user\"][\"share_info\"][\"share_url\"]\n reptile_time = reptile_t()\n reptile_date = reptile_d()\n # print(\"账号信息:\",douyin_info)\n #nickname,douyin_id,douyin_unique_id,verify,signature,\n # total_favorited,following_count,followers_count,aweme_count,dongtai_count,\n # like_count,reptile_time,reptile_date\n print(\"***************************************************\")\n print(nickname,douyin_id,douyin_unique_id,verify,signature,\n total_favorited,following_count,followers_count,aweme_count,dongtai_count,\n like_count,reptile_time,reptile_date,share_url)\n message = {\n \"nickname\":nickname,\n \"userurl\":flow.request.url,\n \"douyin_id\":douyin_id,\n \"douyin_unique_id\": douyin_unique_id,\n \"uid\": uid\n\n }\n #获取账号链接\n # message = str(message) + \"\\n\"\n # write_user(message)\n\n # n = [nickname, douyin_id, douyin_unique_id, verify, signature,\n # total_favorited, following_count, followers_count, aweme_count, dongtai_count,\n # like_count, reptile_time, reptile_date]\n # with open(\"ww.txt\",\"a\",encoding=\"utf-8\") as f:\n # data = str(n) + \"\\n\"\n # f.write(data)\n #save_userData\n\n\n if nickname:\n result = db.save_userData(nickname,douyin_id,douyin_unique_id,verify,signature,\n total_favorited,following_count,followers_count,aweme_count,dongtai_count,\n like_count,reptile_time,reptile_date,share_url)\n if result:\n db.updateurl(nickname,douyin_id,douyin_unique_id,reptile_date)\n\n\n\n elif flow.request.url.startswith(url_list[2]):\n # 数据的解析\n # with open(\"production.txt\", \"a\", encoding=\"utf-8\") as f:\n #获取动态链接\n # message = str(flow.request.url) + \"\\n\"\n # write_content(message)\n #获取时间\n print(\"1111111111111111111111111111111111111111111111111111111111111111111\")\n print(\"%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%\")\n print(\"%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%\")\n print(\"%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%\")\n print(\"%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%\")\n print(flow.response.text)\n Dongtai_List = json.loads(flow.response.text)[\"dongtai_list\"]\n pattern = re.compile('(.*-.*-.*) ')\n publish_times = publish_time(Dongtai_List[-1][\"aweme\"][\"create_time\"])\n publish_dates = pattern.findall(publish_times)[0]\n print(publish_dates)\n write_date(publish_dates)\n\n\n\n for user in Dongtai_List:\n nickname = user[\"aweme\"][\"author\"][\"nickname\"] # 昵称\n desc = user[\"aweme\"][\"desc\"] # 描述\n aweme_id = user[\"aweme\"][\"aweme_id\"]\n create_time = publish_time(user[\"aweme\"][\"create_time\"])\n try:\n signature = (user[\"aweme\"][\"author\"][\"signature\"]).replace(\"🧡\",\"\").replace(\"�\",\"\").replace(\"�\",\"\") # 官方描述\n except Exception as e:\n signature = \"\"\n print(\"signature$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$🥰\")\n print(signature)\n # try:\n # short_id = user[\"aweme\"][\"author\"][\"short_id\"]\n # except Exception as e:\n # short_id = \"\"\n # print(\"short_id\",short_id)\n comment_count = user[\"aweme\"][\"statistics\"][\"comment_count\"] # 评论数\n digg_count = user[\"aweme\"][\"statistics\"][\"digg_count\"] # 点赞数\n download_count = user[\"aweme\"][\"statistics\"][\"download_count\"] # 下载数\n play_count = user[\"aweme\"][\"statistics\"][\"play_count\"] # 不知\n share_count = user[\"aweme\"][\"statistics\"][\"share_count\"] # 分享数\n share_url = user[\"aweme\"][\"share_info\"][\"share_url\"]\n # 时长\n try:\n duration = user[\"aweme\"][\"music\"][\"duration\"]\n except Exception as e:\n duration = 0\n\n reptile_time = reptile_t()\n reptile_date = reptile_d()\n print(\"%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%\")\n print(\"%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%\")\n print(\"%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%\")\n print(\"%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%\")\n print(\"%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%\")\n print(\"%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%\")\n print(\"%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%\")\n\n # print(nickname,desc,aweme_id,create_time,signature,comment_count,digg_count,\n # download_count,play_count,share_count,duration,share_url,reptile_time,reptile_date)\n if nickname:\n db.sava_Content(nickname,desc,aweme_id,create_time,signature,comment_count,digg_count,\n download_count,play_count,share_count,duration,share_url,reptile_time,reptile_date)#,short_id\n\n\n except Exception as e:\n message = str(reptile_t()) + \"出现错误\" + str(e) + \"\\n\"\n write_error(message)\n\n" }, { "alpha_fraction": 0.4966740608215332, "alphanum_fraction": 0.532802939414978, "avg_line_length": 36.57843017578125, "blob_id": "4a220c465c2f37b75e42b7abcd892b6ba86309ea", "content_id": "c1c841be15be5e9cc943d1e24d0fe7d9ec297d87", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 8167, "license_type": "no_license", "max_line_length": 427, "num_lines": 204, "path": "/write_excel.py", "repo_name": "kingking888/douyin_new", "src_encoding": "UTF-8", "text": "# 数据到导出\n#coding:utf-8\nimport os\nimport xlrd\nimport xlwt\nimport re\nimport douyin_mongo\ndb = douyin_mongo.douyin_mongo()\nexportExcelPath = r\"E:\\pc_project\\douyin_new\"\n\ndef WriteSheetRow(sheet, rowValueList, rowIndex, isBold):\n i = 0\n style = xlwt.easyxf('font: bold 1')\n # style = xlwt.easyxf('font: bold 0, color red;')#红色字体\n style2 = xlwt.easyxf('pattern: pattern solid, fore_colour yellow; font: bold on;') # 设置Excel单元格的背景色为黄色,字体为粗体\n for svalue in rowValueList:\n if isBold:\n sheet.write(rowIndex, i, svalue, style2)\n else:\n sheet.write(rowIndex, i, svalue)\n i = i + 1\ndef export(exportExcelPath):\n fileName = \"抖音账号补采数据\" + \".xls\"\n fileName = os.path.join(exportExcelPath, fileName)\n result = False\n dict_id = {\"央视国家记忆\": \"guojiajiyi\", \"CCTV4\": \"902971025\", \"CCTV中华医药\": \"1980133383\", \"CCTV中国缘\": \"cctvwgrzzg\", \"CCTV4《记住乡愁》\": \"cctvjzxc\", \"央视一套\": \"helloCCTV1\", \"央视新闻\": \"cctvnews\", \"央视社会与法\": \"1115087566\", \"警察特训营\": \"876728104\", \"CCTV法律讲堂\": \"cctv12fljt\", \"CCTV-12方方圆圆\": \"CCTV12_666\", \"CCTV热线12\": \"1058272902\", \"CCTV热话\": \"1838202415\", \"CCTV12融屏剧阵\": \"1535538212\", \"法讲生活\": \"cctvfljtlife\", \"道道侠\": \"1714686323\", \"央视夜线\": \"CCTV_YEXIAN\"}\n\n if not os.path.exists(fileName):\n\n android_headList = ['title', 'video_id', 'publish_time','user_name','comment_count','download_count','like_count','share_count','video_duration','share_url','reptile_time']\n\n android_row_list = []\n strDate = \"2020-02-27\"\n rows = db.get_content_data(strDate)\n num_list_one = [\"01\", \"02\", \"03\",\"04\"]\n num_list_two = [\"01\",\"02\",\"03\",\"04\",\"05\"]\n # data_dict = {\n # \"nickname\": nickname,\n # \"desc\": desc,\n # \"aweme_id\": aweme_id,\n # \"create_time\": create_time,\n # \"signature\": signature,\n # \"comment_count\": comment_count,\n # \"digg_count\": digg_count,\n # \"download_count\": download_count,\n # \"play_count\": play_count,\n # \"share_count\": share_count,\n # \"duration\": duration,\n # \"reptile_time\": reptile_time,\n # \"reptile_date\": reptile_date\n # }\n for r in rows:\n\n col_list = []\n\n ab = r[\"create_time\"]\n nm = re.compile('(.*)-(.*)-(.*) .*')\n num = nm.findall(ab)[0]\n print(num[0])\n print(num[1])\n print(num[2])\n\n # if num[0] == \"2019\" and num[1] == \"11\":\n # if (num[2]) in num_list:\n\n if num[0] == \"2020\":\n if num[1] == \"01\" and int(num[2]) > 19:\n col_list.append(r[\"desc\"])\n col_list.append(r[\"aweme_id\"])\n col_list.append(r[\"create_time\"])\n col_list.append(r[\"nickname\"])\n\n\n\n\n\n # col_list.append(strDate)'video_duration','share_url','tag','platform','reptile_time\n\n col_list.append(r[\"comment_count\"])\n col_list.append(r[\"download_count\"])\n col_list.append(r[\"digg_count\"])\n\n col_list.append(r[\"share_count\"])\n\n col_list.append(r[\"duration\"])\n col_list.append(r[\"share_url\"])\n col_list.append(r[\"reptile_time\"])\n\n\n\n android_row_list.append(col_list)\n elif num[1] == \"02\" and int(num[2]) < 21:\n col_list.append(r[\"desc\"])\n col_list.append(r[\"aweme_id\"])\n col_list.append(r[\"create_time\"])\n col_list.append(r[\"nickname\"])\n\n # col_list.append(strDate)'video_duration','share_url','tag','platform','reptile_time\n\n col_list.append(r[\"comment_count\"])\n col_list.append(r[\"download_count\"])\n col_list.append(r[\"digg_count\"])\n\n col_list.append(r[\"share_count\"])\n\n col_list.append(r[\"duration\"])\n col_list.append(r[\"share_url\"])\n col_list.append(r[\"reptile_time\"])\n\n\n\n android_row_list.append(col_list)\n print(len(android_row_list))\n\n wbk = xlwt.Workbook()\n sheet1 = wbk.add_sheet('抖音数据', cell_overwrite_ok=True)\n\n rowIndex = 0\n WriteSheetRow(sheet1, android_headList, rowIndex, True)\n for lst in android_row_list:\n rowIndex += 1\n WriteSheetRow(sheet1, lst, rowIndex, False)\n wbk.save(fileName)\n result = True\n # return result\n\ndef export_user(exportExcelPath):\n fileName = \"抖音账号补采账号数据\" + \".xls\"\n fileName = os.path.join(exportExcelPath, fileName)\n result = False\n dict_id = {\"央视国家记忆\": \"guojiajiyi\", \"CCTV4\": \"902971025\", \"CCTV中华医药\": \"1980133383\", \"CCTV中国缘\": \"cctvwgrzzg\", \"CCTV4《记住乡愁》\": \"cctvjzxc\", \"央视一套\": \"helloCCTV1\", \"央视新闻\": \"cctvnews\", \"央视社会与法\": \"1115087566\", \"警察特训营\": \"876728104\", \"法律讲堂\": \"cctv12fljt\", \"CCTV-12方方圆圆\": \"CCTV12_666\", \"CCTV热线12\": \"1058272902\", \"CCTV热话\": \"1838202415\", \"CCTV12融屏剧阵\": \"1535538212\", \"法讲生活\": \"cctvfljtlife\", \"道道侠\": \"1714686323\", \"央视夜线\": \"CCTV_YEXIAN\"}\n\n if not os.path.exists(fileName):\n \"\"\"\n nickname = data[\"user\"][\"nickname\"]#分享id\n douyin_id = data[\"user\"][\"short_id\"]#抖音号 short_id 或unique_id\n douyin_unique_id = data[\"user\"][\"unique_id\"]\n verify = data[\"user\"][\"enterprise_verify_reason\"]\n signature = data[\"user\"][\"signature\"]\n total_favorited = data[\"user\"][\"total_favorited\"] #获赞数\n following_count = data[\"user\"][\"following_count\"] # 关注数\n followers_count = data[\"user\"][\"mplatform_followers_count\"] # 粉丝数\n aweme_count = data[\"user\"][\"aweme_count\"] # 作品数\n dongtai_count = data[\"user\"][\"dongtai_count\"] # 动态数\n like_count = data[\"user\"][\"favoriting_count\"] # 喜欢数\n \"\"\"\n\n android_headList = ['账号名称','认证', '简介','获赞数','关注数', '粉丝数', '作品数','喜欢数','爬取时间']\n\n android_row_list = []\n strDate = \"2020-02-27\"\n rows = db.get_user_data(strDate)\n # with open(r\"E:\\pc_project\\douyin_guanjian_bu\\guanjian_bu.txt\",\"r\",encoding=\"utf-8\") as f:\n # reads = f.readlines()\n for r in rows:\n # r = eval(i)\n\n\n\n\n\n col_list = []\n # ArticleCreateTime = r[\"create_time\"]\n # pattern = re.compile('.*-(.*)-.* ')\n # yesterday_o = pattern.findall(ArticleCreateTime)[0]\n # print(yesterday_o)\n # if yesterday_o in [\"07\", \"08\", \"09\"]:\n # print(\"来了\", r)\n\n col_list.append(r[\"nickname\"])\n #col_list.append(dict_id[r[\"nickname\"]])\n col_list.append(r[\"verify\"])\n col_list.append(r[\"signature\"])\n\n # col_list.append(strDate)\n\n col_list.append(r[\"total_favorited\"])\n col_list.append(r[\"following_count\"])\n col_list.append(r[\"followers_count\"])\n col_list.append(r[\"aweme_count\"])\n\n col_list.append(r[\"like_count\"])\n col_list.append(r[\"reptile_time\"])\n\n\n android_row_list.append(col_list)\n print(len(android_row_list))\n\n wbk = xlwt.Workbook()\n sheet1 = wbk.add_sheet('抖音账号数据', cell_overwrite_ok=True)\n\n rowIndex = 0\n WriteSheetRow(sheet1, android_headList, rowIndex, True)\n for lst in android_row_list:\n rowIndex += 1\n WriteSheetRow(sheet1, lst, rowIndex, False)\n wbk.save(fileName)\n result = True\n return result\n#生成作品数据excel\nexport(exportExcelPath)\n\n#生成账号数据excel\n# export_user(exportExcelPath)\n\n" }, { "alpha_fraction": 0.5190551280975342, "alphanum_fraction": 0.5297102332115173, "avg_line_length": 45.390052795410156, "blob_id": "acbb1fe4643b5ad43e30c545a9ef4379f5fe32bb", "content_id": "a7f5432a3ef0901ee7222a74aa7eac2f37e4caa2", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 18002, "license_type": "no_license", "max_line_length": 164, "num_lines": 382, "path": "/douyin_mongo.py", "repo_name": "kingking888/douyin_new", "src_encoding": "UTF-8", "text": "#coding =utf-8\nimport os\nfrom pymongo import MongoClient\n\nimport re\n# import trsData\nimport datetime\n\n#获取当时时间\ndef get_date():\n date = datetime.datetime.now()\n strDate = date.strftime(\"%Y-%m-%d\")\n return strDate\n\nclass douyin_mongo:\n\n # 日志函数\n # def WriteLog(self,message):\n # fileName = os.path.join(os.getcwd(), \"log.txt\")\n # message = \"\\n\" + datetime.datetime.now().strftime(\"%Y-%m-%d %H:%M:%S\") + \":\" + message\n # with open(fileName, 'a') as f:\n # f.write(message)\n # def __init__(self, connstr='mongodb://127.0.0.1:27017/', mongodbName='douyin_%s'%(get_date())):\n # client = MongoClient(connstr)\n # self.db = client[mongodbName]\n #douyin_2020-02-18 #douyin_2020-02-27 # 'douyin_2020-02-18' #douyin_0121-0220\n def __init__(self, connstr='mongodb://127.0.0.1:27017/', mongodbName='douyin_2020-02-end'):\n client = MongoClient(connstr)\n self.db = client[mongodbName]\n\n def save_douyin_urls(self,strDate,id,accountname,accountid,accountplatform,thirdpartyplatformname,medianame):\n #count = self.db.douyinid.find({'id': id, 'date': strDate,'accountid':accountid,\"accountname\":accountname,\"medianame\":medianame}).count()\n\n count = self.db.douyinid.find({ 'date': strDate,'accountid':accountid}).count()\n if count == 0:\n self.db.douyinid.insert({'id': id, 'date': strDate,'accountid':accountid,\n \"accountname\":accountname,\"medianame\":medianame,\"accountplatform\":accountplatform,\"thirdpartyplatformname\":thirdpartyplatformname,'isdown':0,'isget':0})\n else:\n with open(\"%s_chongfu.txt\"%(get_date()),\"a\",encoding=\"utf-8\") as f:\n data = \"抖音id重复 %s\"%(accountid) + \"\\n\"\n f.write(data)\n #存入每天的vdeo_url\n def save_video_url(self,id,video_url,publish_time,video_id,reptile_time):\n count = self.db.video_url.find({'video_id': video_id, 'publish_time': publish_time}).count()\n if count == 0:\n self.db.video_url.insert({'id':id, 'video_url': video_url, 'publish_time': publish_time,\n \"video_id\": video_id, \"reptile_time\": reptile_time})\n\n\n #获取抖音原始数据的数量\n def get_douyin_count(self,strDate):\n id_count = self.db.douyinid.count({\"date\": strDate})\n return id_count\n #已经请求的数量\n def get_user_count(self):\n user_count = self.db.user_data.count({\"isget\": 1})\n return user_count\n\n #获取content数量\n def get_content_count(self,strDate):\n content_count = self.db.content_data.count({\"reptile_date\": strDate})\n return content_count\n\n\n\n\n #获取user库里的账号数据\n def get_user_data(self,strDate):\n where = {\"reptile_date\": strDate,\"isdata\":1}\n rows = self.db.user_data.find(where, {\"id\": 1, \"nickname\": 1, \"douyin_id\": 1, \"douyin_unique_id\": 1,\n \"verify\": 1, \"signature\": 1, \"total_favorited\": 1,\n \"following_count\": 1, \"followers_count\": 1,\n \"aweme_count\": 1,\n \"dongtai_count\": 1, \"like_count\": 1, \"reptile_time\": 1,\n \"reptile_date\": 1,\"isdata\":1,\"share_url\":1})\n return rows\n\n def get_content_data(self,strDate):\n where = {\"reptile_date\": strDate}\n rows = self.db.content_data.find(where, {\"id\":1,\"nickname\":1,\"desc\":1,\"aweme_id\":1,\"create_time\":1,\n \"signature\":1,\"comment_count\":1,\"digg_count\":1,\"share_url\":1,\n \"download_count\":1,\"play_count\":1,\"share_count\":1,\"duration\":1,\n \"reptile_time\":1,\"reptile_date\":1})\n return rows\n\n\n\n #获取库里的抖音id isget:0\n def get_douyin_id(self,strDate):\n where = {\"isget\":0,\"date\":strDate}\n rows = self.db.douyinid.find(where,{\"accountid\":1,\"id\":1})\n return rows\n\n\n # 获取库里的抖音id isget:0\n # 已经请求的数量\n\n def get_douyinid_down(self,strDate):\n user_count = self.db.douyinid.count({\"isdown\": 1,\"date\":strDate})\n return user_count\n\n def get_douyinid_notdown(self, strDate):\n where = {\"isdown\": 0, \"date\": strDate}\n rows = self.db.douyinid.find(where, {\"accountid\": 1,\"accountname\":1, \"id\": 1,\"_id\":0})\n return rows\n\n #更新id 只要获取了就标识\n def updateid(self,douyin_id):\n mdict = {'isget': 1}\n\n self.db.douyinid.update({\"accountid\": douyin_id}, {\"$set\": mdict})\n\n\n\n\n def updateurl(self,nickname,douyin_id,douyin_unique_id,reptile_date):\n mdict = {'isdown': 1}\n count1 = self.db.douyinid.find({\"accountid\":douyin_id,\"date\":reptile_date}).count()\n count2 = self.db.douyinid.find({\"accountid\": douyin_unique_id,\"date\":reptile_date}).count()\n count3 = self.db.douyinid.find({\"accountname\": nickname,\"date\":reptile_date}).count()\n if count1:\n self.db.douyinid.update({\"accountid\":douyin_id,\"date\":reptile_date},{\"$set\":mdict})\n elif count2:\n self.db.douyinid.update({\"accountid\": douyin_unique_id,\"date\":reptile_date}, {\"$set\": mdict})\n elif count3:\n self.db.douyinid.update({\"accountname\": nickname,\"date\":reptile_date}, {\"$set\": mdict})\n # else:\n # #当数据库的数据总数和链接总数一致时再推出\n #\n # message = \"%s 可能不是目标数据 抖音id 有误\" % (nickname)\n # message = \"\\n\" + datetime.datetime.now().strftime(\"%Y-%m-%d %H:%M:%S\") + \":\" + message\n # with open(\"error.txt\", \"a\", encoding=\"utf-8\") as f:\n # data = message\n # f.write(data)\n #\n\n\n\n\n\n\n def save_userData(self, nickname,douyin_id,douyin_unique_id,verify,signature,\n total_favorited,following_count,followers_count,aweme_count,dongtai_count,\n like_count,reptile_time,reptile_date,share_url):\n count = self.db.user_data.find({\"nickname\":nickname,\"douyin_id\":douyin_id,\"douyin_unique_id\":douyin_unique_id,\"reptile_date\":reptile_date}).count()\n if count == 0:\n\n\n # \"\"\"\n # where = {\"isdown\":0,\"date\":strDate}\n # rows = self.db.douyinid.find(where,{\"accountid\":1,\"id\":1})\n # return rows\n\n # \"\"\"\n # rows_o = []\n # rows_t = []\n # for row in rows_one:\n # rows_o.append(row)\n # for row in rows_two:\n # rows_t.append(row)\n # if len(rows_o)>0:\n # print(\"11\")\n # for row in rows_o:\n # print(row)\n # return 1\n # elif len(rows_t)>0:\n # print(\"22\")\n # for row in rows_t:\n # print(row)\n # return 1\n\n\n # \"\"\"\n # # \"\"\"\n #技术\n where_one = {\"accountid\": douyin_id,\"date\":reptile_date}\n rows_one = self.db.douyinid.find(where_one,{\"id\": 1})\n where_two = {\"accountid\":douyin_unique_id,\"date\":reptile_date}\n rows_two = self.db.douyinid.find(where_two,{\"id\":1})\n where_three = {\"accountname\": nickname,\"date\":reptile_date}\n rows_three = self.db.douyinid.find(where_three, {\"id\": 1})\n rows_o = []\n rows_t = []\n rows_th = []\n for row in rows_one:\n rows_o.append(row)\n for row in rows_two:\n rows_t.append(row)\n for row in rows_three:\n rows_th.append(row)\n\n if len(rows_o) > 0:\n for row in rows_o:\n id = row[\"id\"]\n self.db.user_data.insert(\n {\"id\": id, \"nickname\": nickname, \"douyin_id\": douyin_id, \"douyin_unique_id\": douyin_unique_id,\n \"verify\": verify, \"signature\": signature, \"total_favorited\": total_favorited,\n \"following_count\": following_count, \"followers_count\": followers_count,\n \"aweme_count\": aweme_count,\n \"dongtai_count\": dongtai_count, \"like_count\": like_count, \"reptile_time\": reptile_time,\n \"reptile_date\": reptile_date,\"isdata\":1,\"share_url\":share_url})\n return 1\n elif len(rows_t) > 0:\n for row in rows_t:\n id = row[\"id\"]\n self.db.user_data.insert(\n {\"id\": id, \"nickname\": nickname, \"douyin_id\": douyin_id, \"douyin_unique_id\": douyin_unique_id,\n \"verify\": verify, \"signature\": signature, \"total_favorited\": total_favorited,\n \"following_count\": following_count, \"followers_count\": followers_count,\n \"aweme_count\": aweme_count,\n \"dongtai_count\": dongtai_count, \"like_count\": like_count, \"reptile_time\": reptile_time,\n \"reptile_date\": reptile_date,\"isdata\":1,\"share_url\":share_url})\n return 1\n\n\n\n elif len(rows_th) > 0:\n for row in rows_th:\n id = row[\"id\"]\n self.db.user_data.insert(\n {\"id\": id, \"nickname\": nickname, \"douyin_id\": douyin_id, \"douyin_unique_id\": douyin_unique_id,\n \"verify\": verify, \"signature\": signature, \"total_favorited\": total_favorited,\n \"following_count\": following_count, \"followers_count\": followers_count,\n \"aweme_count\": aweme_count,\n \"dongtai_count\": dongtai_count, \"like_count\": like_count, \"reptile_time\": reptile_time,\n \"reptile_date\": reptile_date, \"isdata\": 1,\"share_url\":share_url})\n return 1\n else:\n self.db.user_data.insert(\n {\"id\": 0, \"nickname\": nickname, \"douyin_id\": douyin_id, \"douyin_unique_id\": douyin_unique_id,\n \"verify\": verify, \"signature\": signature, \"total_favorited\": total_favorited,\n \"following_count\": following_count, \"followers_count\": followers_count,\n \"aweme_count\": aweme_count,\n \"dongtai_count\": dongtai_count, \"like_count\": like_count, \"reptile_time\": reptile_time,\n \"reptile_date\": reptile_date, \"isdata\": 0,\"share_url\":share_url})\n\n def save_noexist_userData(self, id,nickname, douyin_id, douyin_unique_id, verify, signature,\n total_favorited, following_count, followers_count, aweme_count, dongtai_count,\n like_count, reptile_time, reptile_date,share_url):\n count = self.db.user_data.find(\n {\"nickname\": nickname, \"douyin_id\": douyin_id, \"douyin_unique_id\": douyin_unique_id,\n \"reptile_date\": reptile_date}).count()\n if count == 0:\n self.db.user_data.insert(\n {\"id\": id, \"nickname\": nickname, \"douyin_id\": douyin_id, \"douyin_unique_id\": douyin_unique_id,\n \"verify\": verify, \"signature\": signature, \"total_favorited\": total_favorited,\n \"following_count\": following_count, \"followers_count\": followers_count,\n \"aweme_count\": aweme_count,\n \"dongtai_count\": dongtai_count, \"like_count\": like_count, \"reptile_time\": reptile_time,\n \"reptile_date\": reptile_date, \"isdata\": 1,\"share_url\":share_url})\n\n\n\n\n\n\n\n\n\n\n def sava_Content(self,nickname,desc,aweme_id,create_time,signature,comment_count,digg_count,\n download_count,play_count,share_count,duration,share_url,reptile_time,reptile_date):#,short_id\n count = self.db.content_data.find(\n {\"nickname\": nickname, \"aweme_id\": aweme_id,\"reptile_date\": reptile_date}).count()\n if count == 0:\n where = {\"accountname\": nickname,\"date\":reptile_date}\n # where_one = {\"accountid\": short_id, \"date\": reptile_date}\n rows = self.db.douyinid.find(where,{\"id\": 1})\n # rows_one = self.db.douyinid.find(where_one, {\"id\": 1})\n\n ro = []\n for o in rows:\n ro.append(o)\n\n # r_list = []\n # for r in rows_one:\n # r_list.append(r)\n\n if len(ro):\n for row in ro:\n id = row[\"id\"]\n\n self.db.content_data.insert({\"id\":id,\"nickname\":nickname,\"desc\":desc,\"aweme_id\":aweme_id,\"create_time\":create_time,\n \"signature\":signature,\"comment_count\":comment_count,\"digg_count\":digg_count,\n \"download_count\":download_count,\"play_count\":play_count,\"share_count\":share_count,\"duration\":duration,\n \"share_url\":share_url,\"reptile_time\":reptile_time,\"reptile_date\":reptile_date})\n\n # elif len(r_list):\n # for row in r_list:\n # id = row[\"id\"]\n #\n # self.db.content_data.insert(\n # {\"id\": id, \"nickname\": nickname, \"desc\": desc, \"aweme_id\": aweme_id, \"create_time\": create_time,\n # \"signature\": signature, \"comment_count\": comment_count, \"digg_count\": digg_count,\n # \"download_count\": download_count, \"play_count\": play_count, \"share_count\": share_count,\n # \"duration\": duration,\n # \"share_url\": share_url, \"reptile_time\": reptile_time, \"reptile_date\": reptile_date})\n\n\n else:\n self.db.content_data.insert(\n {\"id\": 0, \"nickname\": nickname, \"desc\": desc, \"aweme_id\": aweme_id, \"create_time\": create_time,\n \"signature\": signature, \"comment_count\": comment_count, \"digg_count\": digg_count,\n \"download_count\": download_count, \"play_count\": play_count, \"share_count\": share_count,\n \"duration\": duration,\n \"share_url\": share_url, \"reptile_time\": reptile_time, \"reptile_date\": reptile_date}\n )\n\n\n\n def sava_video_Content(self, nickname, desc, aweme_id, create_time, signature, comment_count, digg_count,\n download_count, play_count, share_count, duration, share_url, reptile_time, reptile_date):\n count = self.db.video_content_data.find(\n {\"nickname\": nickname, \"aweme_id\": aweme_id, \"reptile_date\": reptile_date}).count()\n if count == 0:\n where = {\"accountname\": nickname, \"date\": reptile_date}\n rows = self.db.douyinid.find(where, {\"id\": 1})\n if rows:\n for row in rows:\n id = row[\"id\"]\n\n self.db.video_content_data.insert(\n {\"id\": id, \"nickname\": nickname, \"desc\": desc, \"aweme_id\": aweme_id, \"create_time\": create_time,\n \"signature\": signature, \"comment_count\": comment_count, \"digg_count\": digg_count,\n \"download_count\": download_count, \"play_count\": play_count, \"share_count\": share_count,\n \"duration\": duration,\n \"share_url\": share_url, \"reptile_time\": reptile_time, \"reptile_date\": reptile_date}\n )\n\n def sava_video_seven_Content(self, nickname, desc, aweme_id, create_time, signature, comment_count, digg_count,\n download_count, play_count, share_count, duration, share_url, reptile_time, reptile_date):\n count = self.db.detail_data.find(\n {\"nickname\": nickname, \"aweme_id\": aweme_id, \"reptile_date\": reptile_date}).count()\n if count == 0:\n where = {\"accountname\": nickname, \"date\": reptile_date}\n rows = self.db.douyinid.find(where, {\"id\": 1})\n if rows:\n for row in rows:\n id = row[\"id\"]\n\n self.db.detail_data.insert(\n {\"id\": id, \"nickname\": nickname, \"desc\": desc, \"aweme_id\": aweme_id, \"create_time\": create_time,\n \"signature\": signature, \"comment_count\": comment_count, \"digg_count\": digg_count,\n \"download_count\": download_count, \"play_count\": play_count, \"share_count\": share_count,\n \"duration\": duration,\n \"share_url\": share_url, \"reptile_time\": reptile_time, \"reptile_date\": reptile_date}\n )\n\n\n\n\n\n def getDate(self,beforeOfDay):\n today = datetime.datetime.now()\n # 计算偏移量\n offset = datetime.timedelta(days=-beforeOfDay)\n # 获取想要的日期的时间\n re_date = (today + offset).strftime('%Y-%m-%d') # %Y-%m-%d %H:%M:%S\n\n return re_date\n\n def get_qimai_Urls(self, strDate, iosOrAndroid):\n where = {'isdown': 0, \"date\": strDate}\n if iosOrAndroid:\n where[\"iosOrAndroid\"] = iosOrAndroid\n\n rows = self.db.qimaiurl08.find(where,\n {'_id': 0, 'category': 1, 'evalue_object': 1, 'id': 1, \"netname\": 1, \"neturl\": 1,\n 'iosOrAndroid': 1, 'IsFailure': 1, 'RemarkInfo': 1})\n return rows\n\n\n def getAndroidData(self,strDate):\n rows = self.db.android08.find({\"date\": strDate},{'_id':0,'id': 1, 'netname': 1, 'link':1,'app_market': 1,\n 'add_down_count': 1, 'total_down_count': 1,'ModifiedName':1,'IsFailure':1,'RemarkInfoInfo':1})\n return rows\n\n def getIOSData(self,strDate):\n rows = self.db.ios08.find({\"date\": strDate}, {'_id': 0, 'id': 1, 'netname': 1, 'link': 1,\n 'total_down_count': 1, 'ModifiedName': 1,'IsFailure':1,'RemarkInfo':1\n })\n return rows\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n" } ]
4
emreozdemir0770/pythonexceltopluislem
https://github.com/emreozdemir0770/pythonexceltopluislem
f5c11088d779871d66102b9f6afe86e7bb1bbca1
de72a4304e0de4223370c9de7c90711d1e4686fb
18b46da0c42c08a037f689df56abf5c5d8d7f009
refs/heads/master
2020-06-22T15:18:30.332905
2019-07-19T08:42:34
2019-07-19T08:42:34
197,736,102
2
0
null
null
null
null
null
[ { "alpha_fraction": 0.46101364493370056, "alphanum_fraction": 0.48830410838127136, "avg_line_length": 22.4761905670166, "blob_id": "f3bb033dfb05dfea5b371140421ce740019ffe2c", "content_id": "2c511c5429d8b08135106e2f765d27b78cdac173", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 1029, "license_type": "no_license", "max_line_length": 110, "num_lines": 42, "path": "/hesap2.py", "repo_name": "emreozdemir0770/pythonexceltopluislem", "src_encoding": "UTF-8", "text": "import os\r\nimport xlrd \r\n\r\n\r\n\r\ntoplam=0\r\nisim=\"\"\r\ndeger=\"\"\r\nklasor = \"./dosya\"\r\nfor i in os.listdir(klasor):\r\n dosya = os.path.join(klasor,i)\r\n if os.path.isdir(dosya):\r\n print ('Klasör => ',i)\r\n elif os.path.isfile(dosya): \r\n loc=\"dosya/\"+i\r\n wb = xlrd.open_workbook(loc) \r\n sheet = wb.sheet_by_index(0)\r\n print(sheet.cell_value(2,1))\r\n \r\n deger=sheet.cell_value(2,1)\r\n deger2=sheet.cell_value(1,1)\r\n deger3=sheet.cell_value(2,2)\r\n deger4=sheet.cell_value(1,2)\r\n \r\n \r\n x = []\r\n x.append([loc])\r\n \r\n y=[]\r\n y.append(deger)\r\n deger=int(deger)\r\n deger2=int(deger2)\r\n deger3=int(deger3)\r\n deger4=int(deger4)\r\n \r\n print (x)\r\n isim+=\"dosya adi:\"+i+\"deger:\"+\"===\"+str(deger)+\"--\"+str(deger2)+\"--\"+str(deger3)+\"--\"+str(deger4)+\"\\n\"\r\nfile1 = open(\"dosyalar.txt\",\"w\") \r\nL = [isim] \r\nfile1.write(\"-----------------\") \r\nfile1.writelines(L) \r\nfile1.close()" }, { "alpha_fraction": 0.533088207244873, "alphanum_fraction": 0.5698529481887817, "avg_line_length": 14, "blob_id": "abd50c611858f681b4bfa75ea3f6e17897f32724", "content_id": "fc450ddac87443b4b867fb48f80ef30102ee96cb", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 272, "license_type": "no_license", "max_line_length": 29, "num_lines": 16, "path": "/hesap.py", "repo_name": "emreozdemir0770/pythonexceltopluislem", "src_encoding": "UTF-8", "text": "import xlrd \r\n\r\n\r\n \r\nloc = (\"notlar.xlsx\") \r\nloc2 = (\"notlar2.xlsx\") \r\n \r\nwb = xlrd.open_workbook(loc) \r\nwb2 = xlrd.open_workbook(loc)\r\n\r\nsheet = wb.sheet_by_index(0)\r\n \r\n# For row 0 and column 0 \r\nsheet.cell_value(2,1) \r\n\r\nprint(sheet.cell_value(2,1))\r\n\r\n\r\n \r\n \r\n" } ]
2
mazent/SC635A_TEST
https://github.com/mazent/SC635A_TEST
367b6808c992d5a2c39743f754b4465448cfb63c
6c028d84167111633aaec652ec1369a4c1b276d5
9e1c75f4e6a4633c8653fe78ec98e9b472dfce3b
refs/heads/master
2020-03-25T04:39:16.436093
2018-09-19T14:00:07
2018-09-19T14:00:07
143,406,366
0
0
null
null
null
null
null
[ { "alpha_fraction": 0.5423406362533569, "alphanum_fraction": 0.5528068542480469, "avg_line_length": 21.36170196533203, "blob_id": "1e422416aef0f8016a178faee46057c8932d0cf7", "content_id": "8f201d762a520f1e8b625e4911bce716a532298c", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 1051, "license_type": "no_license", "max_line_length": 81, "num_lines": 47, "path": "/test/build/decifra.py", "repo_name": "mazent/SC635A_TEST", "src_encoding": "UTF-8", "text": "from __future__ import print_function\n\nimport sys\nimport cidec\nimport chiavi\n\n\"\"\"\n Verifica del file di aggiornamento\n\"\"\"\n\nclass problema(Exception):\n\n def __init__(self, msg):\n Exception.__init__(self)\n self.msg = msg\n\n def __str__(self):\n return self.msg\n\nif __name__ == '__main__':\n try:\n if len(sys.argv) != 3:\n raise problema('passare il nome del file cifrato e quello in chiaro')\n\n agg = None\n with open(sys.argv[1], 'rb') as pt:\n agg = pt.read()\n\n dim = len(agg)\n if dim < 16 + 32:\n raise problema('troppo piccolo')\n\n cifrato = agg[:dim-32]\n firma = agg[dim-32:]\n\n if not cidec.verifica(cifrato, firma, chiavi.KEY_MAC):\n raise problema('firma sbagliata')\n\n chiaro = cidec.decifra(cifrato, chiavi.KEY_CIF)\n if chiaro is None:\n raise problema('cifratura sbagliata')\n\n with open(sys.argv[2], 'wb') as pt:\n pt.write(chiaro)\n\n except problema as err:\n print(err)\n" }, { "alpha_fraction": 0.7475082874298096, "alphanum_fraction": 0.7674418687820435, "avg_line_length": 24.08333396911621, "blob_id": "691bd890e6d4ee0c0c58efc700b414329dec1fdb", "content_id": "a2289dc6f7f20144366c3645b5a6249c29cd0f76", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Markdown", "length_bytes": 301, "license_type": "no_license", "max_line_length": 76, "num_lines": 12, "path": "/README.md", "repo_name": "mazent/SC635A_TEST", "src_encoding": "UTF-8", "text": "# SC635A_TEST\n\nprogramma per la validazione delle schede\n\nContiene il sottoprogetto bsp, usare: git clone --recursive\n\nPer aggiornare bsp:\n1. git submodule update --remote bsp\n1. git commit -am 'aggiornato bsp'\n1. git push\n\nVedi [qui](https://www.atlassian.com/blog/git/git-submodules-workflows-tips)\n" }, { "alpha_fraction": 0.7408906817436218, "alphanum_fraction": 0.7408906817436218, "avg_line_length": 21.363636016845703, "blob_id": "bc317eb934f51e428dd0f8ad0217404d681b4ba6", "content_id": "cbde0e7c65fc1e9165e9a88080d2f647332540e0", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Makefile", "length_bytes": 247, "license_type": "no_license", "max_line_length": 104, "num_lines": 11, "path": "/test/main/component.mk", "repo_name": "mazent/SC635A_TEST", "src_encoding": "UTF-8", "text": "#\n# \"main\" pseudo-component makefile.\n#\n# (Uses default behaviour of compiling all source files in directory, adding 'include' to include path.)\n\nCOMPONENT_EXTRA_CLEAN := versione.h\n\ntest.o: versione.h\n\nversione.h:\n\t$(COMPONENT_PATH)/versione.sh\n\n" }, { "alpha_fraction": 0.5172264575958252, "alphanum_fraction": 0.5521126985549927, "avg_line_length": 17.241106033325195, "blob_id": "cc229c446ea52d5281a14a8379e125d9f44f674a", "content_id": "0ffde7cee9efdd727193d252cc43e15809848eda", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "C", "length_bytes": 4615, "license_type": "no_license", "max_line_length": 71, "num_lines": 253, "path": "/test/main/comandi.c", "repo_name": "mazent/SC635A_TEST", "src_encoding": "UTF-8", "text": "#include \"spc.h\"\n#include \"prod.h\"\n#include \"cavo.h\"\n#include \"mobd.h\"\n#include \"led.h\"\n#include \"rid.h\"\n#include \"phy.h\"\n#include \"aggiorna.h\"\n\nextern int cntTst ;\nextern const uint32_t VERSIONE ;\nextern const char * DATA ;\n\n\n#define CMD_ECO\t\t((SPC_CMD) 0x0000)\n\n#define CMD_CODP_L\t((SPC_CMD) 0x0100)\n#define CMD_CODP_S\t((SPC_CMD) 0x0101)\n#define CMD_CODS_L\t((SPC_CMD) 0x0102)\n#define CMD_CODS_S\t((SPC_CMD) 0x0103)\n\n#define CMD_TST_Z \t((SPC_CMD) 0x0200)\n#define CMD_TST_L \t((SPC_CMD) 0x0201)\n#define CMD_CRJ_I \t((SPC_CMD) 0x0202)\n#define CMD_MOBD \t((SPC_CMD) 0x0203)\n#define CMD_ETH \t((SPC_CMD) 0x0204)\n#define CMD_LED \t((SPC_CMD) 0x0205)\n#define CMD_RID_I \t((SPC_CMD) 0x0206)\n#define CMD_RID_T \t((SPC_CMD) 0x0207)\n#define CMD_RID_E \t((SPC_CMD) 0x0208)\n#define CMD_PHYRST\t((SPC_CMD) 0x0209)\n\n#define CMD_AGG_I\t((SPC_CMD) 0x0300)\n#define CMD_AGG_D\t((SPC_CMD) 0x0301)\n#define CMD_AGG_F\t((SPC_CMD) 0x0302)\n#define CMD_AGG_V\t((SPC_CMD) 0x0303)\n#define CMD_AGG_DC\t((SPC_CMD) 0x0304)\n\n// Sala di lettura\nstatic union {\n\tPROD_PSN psn ;\n\tPROD_BSN bsn ;\n} sdl ;\n\n\nvoid esegui(RX_SPC * rx, TX_SPC * tx)\n{\n\tSPC_CMD cmd ;\n\tuint8_t * dati = rx->rx + sizeof(SPC_CMD) ;\n\tint dim = rx->dimRx - sizeof(SPC_CMD) ;\n\n\tmemcpy(&cmd, rx->rx, sizeof(SPC_CMD)) ;\n\n\tswitch (cmd) {\n\tcase CMD_ECO:\n\t\tSPC_resp(tx, cmd, dati, dim) ;\n\t\tbreak ;\n\n\tcase CMD_CODP_L:\n\t\tif (0 == dim) {\n\t\t\tif ( PROD_read_product(&sdl.psn) )\n\t\t\t\tSPC_resp(tx, cmd, sdl.psn.psn, sdl.psn.len) ;\n\t\t\telse\n\t\t\t\tSPC_err(tx, cmd) ;\n\t\t}\n\t\telse\n\t\t\tSPC_err(tx, cmd) ;\n\t\tbreak ;\n\tcase CMD_CODP_S:\n\t\tif (0 == dim)\n\t\t\tSPC_err(tx, cmd) ;\n\t\telse if (dim >= PRODUCT_SERIAL_NUMBER_DIM)\n\t\t\tSPC_err(tx, cmd) ;\n\t\telse {\n\t\t\tdati[dim] = 0 ;\n\t\t\tif ( PROD_write_product((char *) dati) )\n\t\t\t\tSPC_resp(tx, cmd, NULL, 0) ;\n\t\t\telse\n\t\t\t\tSPC_err(tx, cmd) ;\n\t\t}\n\t\tbreak ;\n\n\tcase CMD_CODS_L:\n\t\tif (0 == dim) {\n\t\t\tif ( PROD_read_board(&sdl.bsn) )\n\t\t\t\tSPC_resp(tx, cmd, sdl.bsn.bsn, sdl.bsn.len) ;\n\t\t\telse\n\t\t\t\tSPC_err(tx, cmd) ;\n\t\t}\n\t\telse\n\t\t\tSPC_err(tx, cmd) ;\n\t\tbreak ;\n\tcase CMD_CODS_S:\n\t\tif (0 == dim)\n\t\t\tSPC_err(tx, cmd) ;\n\t\telse if (dim >= BOARD_SERIAL_NUMBER_DIM)\n\t\t\tSPC_err(tx, cmd) ;\n\t\telse {\n\t\t\tdati[dim] = 0 ;\n\t\t\tif ( PROD_write_board((char *) dati) )\n\t\t\t\tSPC_resp(tx, cmd, NULL, 0) ;\n\t\t\telse\n\t\t\t\tSPC_err(tx, cmd) ;\n\t\t}\n\t\tbreak ;\n\n\tcase CMD_TST_Z:\n\t\tif (0 == dim) {\n\t\t\tcntTst = 0 ;\n\t\t\tSPC_resp(tx, cmd, NULL, 0) ;\n\t\t}\n\t\telse\n\t\t\tSPC_err(tx, cmd) ;\n\t\tbreak ;\n\tcase CMD_TST_L:\n\t\tif (0 == dim)\n\t\t\tSPC_resp(tx, cmd, &cntTst, 1) ;\n\t\telse\n\t\t\tSPC_err(tx, cmd) ;\n\t\tbreak ;\n\n\tcase CMD_CRJ_I:\n\t\tif (0 == dim) {\n\t\t\tbool x = CRJ_in() ;\n\t\t\tSPC_resp(tx, cmd, &x, 1) ;\n\t\t}\n\t\telse\n\t\t\tSPC_err(tx, cmd) ;\n\t\tbreak ;\n\n\tcase CMD_MOBD:\n\t\tif (1 == dim) {\n\t\t\tMOBD_mobd_eth(0 != dati[0]) ;\n\t\t\tSPC_resp(tx, cmd, NULL, 0) ;\n\t\t}\n\t\telse\n\t\t\tSPC_err(tx, cmd) ;\n\t\tbreak ;\n\tcase CMD_ETH:\n\t\tif (1 == dim) {\n\t\t\tMOBD_eth_esp32(0 != dati[0]) ;\n\t\t\tSPC_resp(tx, cmd, NULL, 0) ;\n\t\t}\n\t\telse\n\t\t\tSPC_err(tx, cmd) ;\n\t\tbreak ;\n\n\tcase CMD_LED:\n\t\tif (1 == dim) {\n\t\t\tLED_rosso(0 != dati[0]) ;\n\t\t\tSPC_resp(tx, cmd, NULL, 0) ;\n\t\t}\n\t\telse\n\t\t\tSPC_err(tx, cmd) ;\n\t\tbreak ;\n\n\tcase CMD_RID_I:\n\t\tif (0 == dim) {\n\t\t\tif ( RID_start() )\n\t\t\t\tSPC_resp(tx, cmd, NULL, 0) ;\n\t\t\telse\n\t\t\t\tSPC_err(tx, cmd) ;\n\t\t}\n\t\telse\n\t\t\tSPC_err(tx, cmd) ;\n\t\tbreak ;\n\tcase CMD_RID_T:\n\t\tif (0 == dim) {\n\t\t\tRID_stop() ;\n\t\t\tSPC_resp(tx, cmd, NULL, 0) ;\n\t\t}\n\t\telse\n\t\t\tSPC_err(tx, cmd) ;\n\t\tbreak ;\n\tcase CMD_RID_E:\n\t\tif (0 == dim) {\n\t\t\tbool doip ;\n\t\t\tif ( RID_doip(&doip) )\n\t\t\t\tSPC_resp(tx, cmd, &doip, 1) ;\n\t\t\telse\n\t\t\t\tSPC_err(tx, cmd) ;\n\t\t}\n\t\telse\n\t\t\tSPC_err(tx, cmd) ;\n\t\tbreak ;\n\n\tcase CMD_PHYRST:\n\t\tif (1 == dim) {\n\t\t\tSPC_resp(tx, cmd, NULL, 0) ;\n\n\t\t\tPHY_reset(dati[0]) ;\n\t\t}\n\t\telse\n\t\t\tSPC_err(tx, cmd) ;\n\t\tbreak ;\n\n\tcase CMD_AGG_I:\n\t\tif (sizeof(uint32_t) == dim) {\n\t\t\tuint32_t bdim ;\n\n\t\t\tSPC_resp(tx, cmd, NULL, 0) ;\n\n\t\t\tmemcpy(&bdim, dati, dim) ;\n\t\t\tAGG_beg(bdim) ;\n\t\t}\n\t\telse\n\t\t\tSPC_err(tx, cmd) ;\n\t\tbreak ;\n\tcase CMD_AGG_D:\n\t\tif (dim > sizeof(uint32_t)) {\n\t\t\tuint32_t ofs ;\n\n\t\t\tmemcpy(&ofs, dati, sizeof(uint32_t)) ;\n\t\t\tif ( AGG_dat(dati + sizeof(uint32_t), dim - sizeof(uint32_t), ofs) )\n\t\t\t\tSPC_resp(tx, cmd, NULL, 0) ;\n\t\t\telse\n\t\t\t\tSPC_err(tx, cmd) ;\n\t\t}\n\t\telse\n\t\t\tSPC_err(tx, cmd) ;\n\t\tbreak ;\n\tcase CMD_AGG_F:\n\t\tif (0 == dim) {\n\t\t\tif ( AGG_end() )\n\t\t\t\tSPC_resp(tx, cmd, NULL, 0) ;\n\t\t\telse\n\t\t\t\tSPC_err(tx, cmd) ;\n\t\t}\n\t\telse\n\t\t\tSPC_err(tx, cmd) ;\n\t\tbreak ;\n\n\tcase CMD_AGG_V:\n\t\tif (0 == dim)\n\t\t\tSPC_resp(tx, cmd, &VERSIONE, sizeof(VERSIONE)) ;\n\t\telse\n\t\t\tSPC_err(tx, cmd) ;\n\t\tbreak ;\n\tcase CMD_AGG_DC:\n\t\tif (0 == dim) {\n\t\t\tint d = strlen(DATA) ;\n\n\t\t\tSPC_resp(tx, cmd, DATA, d) ;\n\t\t}\n\t\telse\n\t\t\tSPC_err(tx, cmd) ;\n\t\tbreak ;\n\n\tdefault:\n\t\tSPC_unk(tx, cmd) ;\n\t\tbreak ;\n\t}\n}\n" }, { "alpha_fraction": 0.5630630850791931, "alphanum_fraction": 0.5729729533195496, "avg_line_length": 23.130434036254883, "blob_id": "b85d441fb9d45f47392630839b642674f4e82b72", "content_id": "c6f421beae4f0de7cf5ba7a1566ec110796bbfb7", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 1110, "license_type": "no_license", "max_line_length": 83, "num_lines": 46, "path": "/test/build/cifra.py", "repo_name": "mazent/SC635A_TEST", "src_encoding": "UTF-8", "text": "from __future__ import print_function\n\nimport os\nimport sys\nimport cidec\nimport chiavi\n\n\"\"\"\n Preparazione del file per l'aggiornamento:\n 1) si aggiunge una intestazione che porta la dimensione a multiplo di 16\n 2) si cifra con un iv\n 3) si firma (iv + cifrato)\n 4) il file per l'aggiornamento e' la concatenazione: iv || cifrato || firma\n\"\"\"\n\nclass problema(Exception):\n\n def __init__(self, msg):\n Exception.__init__(self)\n self.msg = msg\n\n def __str__(self):\n return self.msg\n\n\nif __name__ == '__main__':\n try:\n if len(sys.argv) != 3:\n raise problema('passare il nome del file in chiaro e quello cifrato')\n\n chiaro = None\n with open(sys.argv[1], 'rb') as pt:\n chiaro = pt.read()\n\n iv = os.urandom(16)\n cifrato = cidec.cifra(chiaro, iv, chiavi.KEY_CIF)\n\n firma = cidec.firma(iv + cifrato, chiavi.KEY_MAC)\n\n with open(sys.argv[2], 'wb') as agg:\n agg.write(iv)\n agg.write(cifrato)\n agg.write(firma)\n\n except problema as err:\n print(err)\n" }, { "alpha_fraction": 0.5518626570701599, "alphanum_fraction": 0.5788897275924683, "avg_line_length": 25.843137741088867, "blob_id": "678aeb656ee1d337e443fbe4dd6c7299031d51f5", "content_id": "fe3ff0fea6f7468cc6518abd5154662cbce3ee1a", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 2738, "license_type": "no_license", "max_line_length": 89, "num_lines": 102, "path": "/test/build/cidec.py", "repo_name": "mazent/SC635A_TEST", "src_encoding": "UTF-8", "text": "import sys\nfrom cryptography.hazmat.primitives.ciphers import Cipher, algorithms, modes\nfrom cryptography.hazmat.primitives import hashes, hmac\nfrom cryptography.hazmat.backends import default_backend\n\ndef _cifra_3(ptext, iv, key):\n if isinstance(ptext, str):\n ptext = bytes(ptext.encode('ascii'))\n\n TESTA = b'208 - SC635'\n mancano = (len(ptext) + len(TESTA)) % 16\n if mancano:\n mancano = 16 - mancano\n testa = b'208 *'\n for _ in range(mancano):\n testa = testa + b'*'\n testa = testa + b' SC635'\n ptext = testa + ptext\n else:\n ptext = TESTA + ptext\n\n cipher = Cipher(algorithms.AES(bytes(key)), modes.CBC(iv), backend=default_backend())\n encryptor = cipher.encryptor()\n\n return encryptor.update(ptext) + encryptor.finalize()\n\ndef _cifra_2(ptext, iv, key):\n TESTA = '208 - SC635'\n mancano = (len(ptext) + len(TESTA)) % 16\n if mancano:\n mancano = 16 - mancano\n testa = '208 *'\n for _ in range(mancano):\n testa = testa + '*'\n testa = testa + ' SC635'\n ptext = testa + ptext\n else:\n ptext = TESTA + ptext\n\n cipher = Cipher(algorithms.AES(bytes(key)), modes.CBC(iv), backend=default_backend())\n encryptor = cipher.encryptor()\n\n return encryptor.update(ptext) + encryptor.finalize()\n\n\ndef cifra(ptext, iv, key):\n if sys.version_info.major == 3:\n return _cifra_3(ptext, iv, key)\n else:\n return _cifra_2(ptext, iv, key)\n\ndef _elimina_3(dec):\n if dec.startswith(b'208 '):\n dec = dec[4:]\n while dec[0] == 42:\n dec = dec[1:]\n if dec.startswith(b' SC635'):\n return dec[6:]\n else:\n return None\n else:\n return None\n\ndef _elimina_2(dec):\n if dec.startswith('208 '):\n dec = dec[4:]\n while dec[0] == '*':\n dec = dec[1:]\n if dec.startswith(' SC635'):\n return dec[6:]\n else:\n return None\n else:\n return None\n\ndef decifra(ctext, key):\n iv = ctext[:16]\n ctext = ctext[16:]\n\n cipher = Cipher(algorithms.AES(bytes(key)), modes.CBC(iv), backend=default_backend())\n decryptor = cipher.decryptor()\n dec = decryptor.update(ctext) + decryptor.finalize()\n\n if sys.version_info.major == 3:\n return _elimina_3(dec)\n else:\n return _elimina_2(dec)\n\n\ndef firma(cosa, key):\n h = hmac.HMAC(bytes(key), hashes.SHA256(), backend=default_backend())\n h.update(bytes(cosa))\n return h.finalize()\n\ndef verifica(cosa, mac, key):\n h = hmac.HMAC(bytes(key), hashes.SHA256(), backend=default_backend())\n h.update(bytes(cosa))\n try:\n h.verify(mac)\n return True\n except:\n return False\n" }, { "alpha_fraction": 0.5977304577827454, "alphanum_fraction": 0.6091940999031067, "avg_line_length": 26.242902755737305, "blob_id": "9bfde00474ca20e106d217d2bb957679865647c7", "content_id": "a22f589d3f31c5e9ba166efb313bfcbcbc1b3219", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "C", "length_bytes": 8636, "license_type": "no_license", "max_line_length": 120, "num_lines": 317, "path": "/test/main/test.c", "repo_name": "mazent/SC635A_TEST", "src_encoding": "UTF-8", "text": "#include \"prod.h\"\n#include \"spc.h\"\n#include \"tasto.h\"\n#include \"ap.h\"\n#include \"gestore.h\"\n#include \"uspc.h\"\n#include \"cavo.h\"\n#include \"mobd.h\"\n#include \"led.h\"\n#include \"rid.h\"\n#include \"phy.h\"\n#include \"versione.h\"\n\n#include \"driver/gpio.h\"\n\n#include \"esp_log.h\"\n#include \"esp_event_loop.h\"\n#include \"nvs_flash.h\"\n\nextern void esegui(RX_SPC *, TX_SPC *) ;\n\nstatic const char * TAG = \"test\";\n\n#ifdef NDEBUG\nconst uint32_t VERSIONE = (1 << 24) + VER ;\n#else\nconst uint32_t VERSIONE = VER ;\n#endif\nconst char * DATA = __DATE__ ;\n\n// memoria per i messaggi\nosPoolDef(pbcid, NUM_BUFFER, UN_BUFFER) ;\nstatic osPoolId pbcid = NULL ;\n\n// coda dei messaggi\nosMessageQDef(comes, 2 * NUM_BUFFER, UN_BUFFER *) ;\nstatic osMessageQId comes = NULL ;\n\t// speciali\n#define MSG_TASTO\t\t0x90B56557\n#define MSG_CAVO\t\t0xCA8AB86D\n#define MSG_RIDE\t\t0xA74C0DE7\n#define MSG_ARIMB\t\t0x5876FD23\n\nint cntTst = 0 ;\n\n#define ANTIRIMBALZO\t50\n\nstatic void antirimb(void * v)\n{\n\tUNUSED(v) ;\n\n\tCHECK_IT(osOK == osMessagePut(comes, MSG_ARIMB, 0)) ;\n}\n\nosTimerDef(timArimb, antirimb) ;\nstatic osTimerId timArimb = NULL ;\n\n\nstatic void tasto(void)\n{\n\tCHECK_IT(osOK == osMessagePut(comes, MSG_TASTO, 0)) ;\n}\n\nstatic void cavo(void)\n{\n\tCHECK_IT(osOK == osMessagePut(comes, MSG_CAVO, 0)) ;\n}\n\nstatic void rid(void)\n{\n\tCHECK_IT(osOK == osMessagePut(comes, MSG_RIDE, 0)) ;\n}\n\nstatic void gst_conn(const char * ip)\n{\n\tUNUSED(ip) ;\n\n\tESP_LOGI(TAG, \"connesso %s\", ip) ;\n}\n\nstatic void gst_msg(UN_BUFFER * msg)\n{\n\tCHECK_IT(osOK == osMessagePut(comes, (uint32_t) msg, 0)) ;\n}\n\nstatic void gst_scon(void)\n{\n\tESP_LOGI(TAG, \"sconnesso\") ;\n}\n\nstatic S_GST_CFG gstcb = {\n\t.conn = gst_conn, \n\t.msg = gst_msg,\n\t.scon = gst_scon\n} ;\n\nstatic S_USPC_CFG ucfg = {\n\t.msg = gst_msg\n} ;\n\nstatic esp_err_t event_handler(void *ctx, system_event_t *event)\n{\n\tUNUSED(ctx) ;\n\n\tswitch (event->event_id) {\n case SYSTEM_EVENT_WIFI_READY: /**< ESP32 WiFi ready */\n \tESP_LOGI(TAG, \"SYSTEM_EVENT_WIFI_READY\");\n \tbreak ;\n case SYSTEM_EVENT_SCAN_DONE: /**< ESP32 finish scanning AP */\n \tESP_LOGI(TAG, \"SYSTEM_EVENT_SCAN_DONE\");\n \tbreak ;\n case SYSTEM_EVENT_STA_START: /**< ESP32 station start */\n\t\tESP_LOGI(TAG, \"SYSTEM_EVENT_STA_START\");\n\t\tbreak;\n case SYSTEM_EVENT_STA_STOP: /**< ESP32 station stop */\n \tESP_LOGI(TAG, \"SYSTEM_EVENT_STA_STOP\");\n \tbreak ;\n case SYSTEM_EVENT_STA_CONNECTED: /**< ESP32 station connected to AP */\n \tESP_LOGI(TAG, \"SYSTEM_EVENT_STA_CONNECTED\");\n \tbreak ;\n case SYSTEM_EVENT_STA_DISCONNECTED: /**< ESP32 station disconnected from AP */\n\t\tESP_LOGI(TAG, \"SYSTEM_EVENT_STA_DISCONNECTED\");\n\t\tbreak;\n case SYSTEM_EVENT_STA_AUTHMODE_CHANGE: /**< the auth mode of AP connected by ESP32 station changed */\n \tESP_LOGI(TAG, \"SYSTEM_EVENT_STA_AUTHMODE_CHANGE\");\n \tbreak ;\n case SYSTEM_EVENT_STA_GOT_IP: /**< ESP32 station got IP from connected AP */\n\t\tESP_LOGI(TAG, \"SYSTEM_EVENT_STA_GOT_IP\");\n\t\tbreak ;\n case SYSTEM_EVENT_STA_LOST_IP: /**< ESP32 station lost IP and the IP is reset to 0 */\n \tESP_LOGI(TAG, \"SYSTEM_EVENT_STA_LOST_IP\");\n \tbreak ;\n case SYSTEM_EVENT_STA_WPS_ER_SUCCESS: /**< ESP32 station wps succeeds in enrollee mode */\n \tESP_LOGI(TAG, \"SYSTEM_EVENT_STA_WPS_ER_SUCCESS\");\n \tbreak ;\n case SYSTEM_EVENT_STA_WPS_ER_FAILED: /**< ESP32 station wps fails in enrollee mode */\n \tESP_LOGI(TAG, \"SYSTEM_EVENT_STA_WPS_ER_FAILED\");\n \tbreak ;\n case SYSTEM_EVENT_STA_WPS_ER_TIMEOUT: /**< ESP32 station wps timeout in enrollee mode */\n \tESP_LOGI(TAG, \"SYSTEM_EVENT_STA_WPS_ER_TIMEOUT\");\n \tbreak ;\n case SYSTEM_EVENT_STA_WPS_ER_PIN: /**< ESP32 station wps pin code in enrollee mode */\n \tESP_LOGI(TAG, \"SYSTEM_EVENT_STA_WPS_ER_PIN\");\n \tbreak ;\n case SYSTEM_EVENT_AP_START: /**< ESP32 soft-AP start */\n \tESP_LOGI(TAG, \"SYSTEM_EVENT_AP_START\");\n \tAP_evn(AP_EVN_START, &event->event_info) ;\n \tCHECK_IT( GST_beg(&gstcb) ) ;\n \tbreak ;\n case SYSTEM_EVENT_AP_STOP: /**< ESP32 soft-AP stop */\n \tESP_LOGI(TAG, \"SYSTEM_EVENT_AP_STOP\");\n \tAP_evn(AP_EVN_STOP, &event->event_info) ;\n \tGST_end() ;\n \tbreak ;\n case SYSTEM_EVENT_AP_STACONNECTED: /**< a station connected to ESP32 soft-AP */\n \tESP_LOGI(TAG, \"SYSTEM_EVENT_AP_STACONNECTED\");\n \tAP_evn(AP_EVN_STACONNECTED, &event->event_info) ;\n \tbreak ;\n case SYSTEM_EVENT_AP_STADISCONNECTED: /**< a station disconnected from ESP32 soft-AP */\n \tESP_LOGI(TAG, \"SYSTEM_EVENT_AP_STADISCONNECTED\");\n \tAP_evn(AP_EVN_STADISCONNECTED, &event->event_info) ;\n \tbreak ;\n case SYSTEM_EVENT_AP_STAIPASSIGNED: /**< ESP32 soft-AP assign an IP to a connected station */\n \tESP_LOGI(TAG, \"SYSTEM_EVENT_AP_STAIPASSIGNED\");\n \tAP_evn(AP_EVN_STAIPASSIGNED, &event->event_info) ;\n \tbreak ;\n case SYSTEM_EVENT_AP_PROBEREQRECVED: /**< Receive probe request packet in soft-AP interface */\n \tESP_LOGI(TAG, \"SYSTEM_EVENT_AP_PROBEREQRECVED\");\n \tbreak ;\n case SYSTEM_EVENT_GOT_IP6: /**< ESP32 station or ap or ethernet interface v6IP addr is preferred */\n \tESP_LOGI(TAG, \"SYSTEM_EVENT_GOT_IP6\");\n \tbreak ;\n case SYSTEM_EVENT_ETH_START: /**< ESP32 ethernet start */\n \tESP_LOGI(TAG, \"SYSTEM_EVENT_ETH_START\");\n \tbreak ;\n case SYSTEM_EVENT_ETH_STOP: /**< ESP32 ethernet stop */\n \tESP_LOGI(TAG, \"SYSTEM_EVENT_ETH_STOP\");\n \tbreak ;\n case SYSTEM_EVENT_ETH_CONNECTED: /**< ESP32 ethernet phy link up */\n \tESP_LOGI(TAG, \"SYSTEM_EVENT_ETH_CONNECTED\");\n \tbreak ;\n case SYSTEM_EVENT_ETH_DISCONNECTED: /**< ESP32 ethernet phy link down */\n \tESP_LOGI(TAG, \"SYSTEM_EVENT_ETH_DISCONNECTED\");\n \tbreak ;\n case SYSTEM_EVENT_ETH_GOT_IP: /**< ESP32 ethernet got IP from connected AP */\n \tESP_LOGI(TAG, \"SYSTEM_EVENT_ETH_GOT_IP\");\n \tbreak ;\n\n\tdefault:\n\t\tESP_LOGE(TAG, \"? evento %d %p ?\", event->event_id, &event->event_info) ;\n\t\tbreak;\n\t}\n\n\treturn ESP_OK;\n}\n\nstatic RX_SPC rxSock = {\n\t.DIM_RX = DIM_BUFFER\n} ;\nstatic TX_SPC txSock = {\n\t.DIM_TX = DIM_BUFFER,\n\t.ftx = GST_tx\n} ;\n\nstatic RX_SPC rxUart = {\n\t.DIM_RX = DIM_BUFFER\n} ;\nstatic TX_SPC txUart = {\n\t.DIM_TX = DIM_BUFFER,\n\t.ftx = USPC_tx\n} ;\n\nvoid app_main()\n{\n esp_log_level_set(\"*\", ESP_LOG_INFO) ;\n\n\t// questa la fanno sempre\n\tesp_err_t ret = nvs_flash_init();\n\tif (ret == ESP_ERR_NVS_NO_FREE_PAGES) {\n\t\tESP_ERROR_CHECK( nvs_flash_erase() );\n\t\tESP_ERROR_CHECK( nvs_flash_init() );\n\t}\n\n\t// Scambio messaggi\n\tpbcid = osPoolCreate(osPool(pbcid)) ;\n\tassert(pbcid) ;\n\tgstcb.mp = pbcid ;\n\tucfg.mp = pbcid ;\n\n\tcomes = osMessageCreate(osMessageQ(comes), NULL) ;\n\tassert(comes) ;\n\n\t// Varie\n gpio_install_isr_service(0) ;\n\n tcpip_adapter_init();\n\n ESP_ERROR_CHECK(esp_event_loop_init(event_handler, NULL));\n\n timArimb = osTimerCreate(osTimer(timArimb), osTimerOnce, NULL) ;\n assert(timArimb) ;\n\n // Scheda\n CHECK_IT( TST_beg(tasto) ) ;\n CHECK_IT( CRJ_beg(cavo) ) ;\n CHECK_IT( MOBD_beg() ) ;\n CHECK_IT( LED_beg() ) ;\n CHECK_IT( RID_beg(rid) ) ;\n CHECK_IT( PHY_beg() ) ;\n\n // Comunicazione\n CHECK_IT( SPC_ini_rx(&rxSock) ) ;\n CHECK_IT( SPC_ini_tx(&txSock) ) ;\n\n CHECK_IT( SPC_ini_rx(&rxUart) ) ;\n CHECK_IT( SPC_ini_tx(&txUart) ) ;\n \t// uart\n CHECK_IT( USPC_open(&ucfg) ) ;\n\t\t// ap\n\tS_AP sap = {\n\t\t.ssid = \"SC635\",\n\t\t.max_connection = 1,\n\t\t.auth = AUTH_OPEN\n\t} ;\n\tCHECK_IT( AP_beg(&sap) ) ;\n\n#ifdef NDEBUG\n\tESP_LOGI(TAG, \"vers %d\", VER) ;\n#else\n\tESP_LOGI(TAG, \"vers %d (dbg)\", VER) ;\n#endif\n\tESP_LOGI(TAG, \"data %s\", DATA) ;\n\n\t// Eseguo i comandi\n\twhile (true) {\n\t\tosEvent event = osMessageGet(comes, osWaitForever) ;\n\t\tassert(osEventMessage == event.status) ;\n\n\t\tif (osEventMessage == event.status) {\n\t\t\tswitch (event.value.v) {\n\t\t\tcase MSG_TASTO:\n\t\t\t\t++cntTst ;\n\t\t\t\tESP_LOGI(TAG, \"tasto premuto\") ;\n\t\t\t\tbreak ;\n\t\t\tcase MSG_CAVO:\n\t\t\t\tCHECK_IT(osOK == osTimerStart(timArimb, ANTIRIMBALZO)) ;\n\t\t\t\tbreak ;\n\t\t\tcase MSG_ARIMB:\n\t\t\t\tif (CRJ_in())\n\t\t\t\t\tESP_LOGI(TAG, \"cavo RJ inserito\") ;\n\t\t\t\telse\n\t\t\t\t\tESP_LOGI(TAG, \"cavo RJ estratto\") ;\n\t\t\t\tbreak ;\n\t\t\tcase MSG_RIDE:\n\t\t\t\tESP_LOGI(TAG, \"fine rilevazione diagnosi\") ;\n\t\t\t\tbreak ;\n\t\t\tdefault: {\n\t\t\t\t\t// Comando\n\t\t\t\t\tUN_BUFFER * msg = (UN_BUFFER *) event.value.p ;\n\t\t\t\t\tRX_SPC * prx = &rxUart ;\n\t\t\t\t\tTX_SPC * ptx = &txUart ;\n\n\t\t\t\t\tif (SOCKET == msg->orig) {\n\t\t\t\t\t\tprx = &rxSock ;\n\t\t\t\t\t\tptx = &txSock ;\n\t\t\t\t\t}\n\n\t\t\t\t\tif ( SPC_esamina(prx, msg) )\n\t\t\t\t\t\tesegui(prx, ptx) ;\n\n\t\t\t\t\tCHECK_IT(osOK == osPoolFree(pbcid, msg)) ;\n\t\t\t\t}\n\t\t\t\tbreak ;\n\t\t\t}\n\t\t}\n\t}\n}\n" } ]
7
azfar-imtiaz/Meetups-Django-project
https://github.com/azfar-imtiaz/Meetups-Django-project
a1c1f42ab30e711812993c57e60d60ec0f374ff3
8fb8cbd87d3b16bc209c48d014bff447f3701606
9766a9d4b924a66cea000c85c3430aad5a6d9ce0
refs/heads/master
2023-07-05T09:53:42.446679
2021-08-09T11:37:47
2021-08-09T11:37:47
393,417,093
0
0
null
null
null
null
null
[ { "alpha_fraction": 0.6544461846351624, "alphanum_fraction": 0.6544461846351624, "avg_line_length": 24.65999984741211, "blob_id": "baa5d9ae964eb3d0b4a71a57d42a267a992a3185", "content_id": "ccca1396b72a6c4f874af21deb1a6ba2f9ffa5ad", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 1282, "license_type": "no_license", "max_line_length": 80, "num_lines": 50, "path": "/meetups/views.py", "repo_name": "azfar-imtiaz/Meetups-Django-project", "src_encoding": "UTF-8", "text": "from django.shortcuts import render\n# from django.http import HttpResponse\n\nfrom .models import Meetup\n\n# Create your views here.\n\ndef index(request):\n\t# return HttpResponse('Hello world!')\n\t'''\n\t\tNOTE: Important: The path to the HTML template is\n\t\tfrom INSIDE the templates folder. So it isn't \n\t\t\"templates/meetups/index.html\", but rather, it is\n\t\t\"meetups/index.html\"\n\t'''\n\t# meetups = [\n\t# \t{\n\t# \t\t'title': 'A first meetup', \n\t# \t\t'location': 'New York', \n\t# \t\t'slug': 'a-first-meetup'\n\t# \t},\n\t# \t{\n\t# \t\t'title': 'A second meetup', \n\t# \t\t'location': 'San Fransisco', \n\t# \t\t'slug': 'a-second-meeting'\n\t# \t},\n\t# ]\n\tmeetups = Meetup.objects.all()\n\treturn render(request, 'meetups/index.html', {\n\t\t'show_meetups': True,\n\t\t'meetups': meetups\n\t})\n\ndef meetup_details(request, meetup_slug):\n\t# selected_meetup = {\n\t# \t'title': 'A first meetup',\n\t# \t'description': \"This is the first meeting\"\n\t# }\n\ttry:\n\t\tselected_meetup = Meetup.objects.get(slug=meetup_slug)\n\t\treturn render(request, \n\t\t\t'meetups/meetup-details.html', \n\t\t\t{\n\t\t\t\t'meetup_found': True,\n\t\t\t\t'title': selected_meetup.title,\n\t\t\t\t'description': selected_meetup.description,\n\t\t\t\t'image': selected_meetup.image.url\n\t\t\t})\n\texcept Exception as e:\n\t\treturn render(request, 'meetups/meetup-details.html', {'meetup_found': False})" }, { "alpha_fraction": 0.7462235689163208, "alphanum_fraction": 0.7552869915962219, "avg_line_length": 24.538461685180664, "blob_id": "12fdb7dbcea73737158f55441e346b7bb7e992ec", "content_id": "aaac881ac39684765796eefa61a87722dd5351ed", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 331, "license_type": "no_license", "max_line_length": 64, "num_lines": 13, "path": "/meetups/models.py", "repo_name": "azfar-imtiaz/Meetups-Django-project", "src_encoding": "UTF-8", "text": "from django.db import models\n\n# Create your models here.\n\n\n# NOTE: Please ensure that 'meetups' was added to INSTALLED_APPS\n# in settings.py!\n\nclass Meetup(models.Model):\n\ttitle = models.CharField(max_length=200)\n\tslug = models.SlugField(unique=True)\n\tdescription = models.TextField()\n\timage = models.ImageField(upload_to='images')" }, { "alpha_fraction": 0.7328947186470032, "alphanum_fraction": 0.7434210777282715, "avg_line_length": 35.238094329833984, "blob_id": "8dd0e94640f460cd47ee85f2b3e1447b761bc71a", "content_id": "7a4d487fafd4c52c2370592a731c81e5e040a916", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 760, "license_type": "no_license", "max_line_length": 80, "num_lines": 21, "path": "/meetups/urls.py", "repo_name": "azfar-imtiaz/Meetups-Django-project", "src_encoding": "UTF-8", "text": "from django.urls import path\n\nfrom . import views\n\n# NOTE: this variable must always be named urlpatterns, and it must be a list\n# This list contains a mapping from routes to their corresponding functions\n# \tthat generate the appropriate view\n\n'''\n\tNOTE: For any URL that you add here, it is a good idea to add a slash\n\tafter it. This ensures that the URL is hit whether it is followed by \n\ta slash or not. For example:\n\tpath('meetups/', views.index) means that \"localhost:7000/meetups\" and \n\t\"localhost:7000/meetups/\" will both hit the views.index function\n'''\n\nurlpatterns = [\n\tpath('meetups/', views.index, name='all-meetups'),\n\t# specifying the type as slug isn't necessary\n\tpath('meetups/<slug:meetup_slug>', views.meetup_details, name='meetup-details')\n]" } ]
3
awhiteman625/Minecraft_Repo_6th_AAW
https://github.com/awhiteman625/Minecraft_Repo_6th_AAW
6b3d27374b973e58350238d2eb2426cf1a847366
4e5473f2cf750f3dbe9ef0d0d1e331dd7b8b3bc2
a196f2c4c18761baca497fb90719a6a60bb5a618
refs/heads/master
2021-09-09T11:34:56.343971
2018-03-15T18:05:17
2018-03-15T18:05:17
111,600,050
0
0
null
null
null
null
null
[ { "alpha_fraction": 0.5269624590873718, "alphanum_fraction": 0.5549488067626953, "avg_line_length": 18.02597427368164, "blob_id": "faa132967ffd75449a6475cac7f01e93e11a0a3f", "content_id": "f193c136c8758fecd0ef519db14ecb06113d5dbe", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 1465, "license_type": "no_license", "max_line_length": 38, "num_lines": 77, "path": "/fastet robot AROUND!.py", "repo_name": "awhiteman625/Minecraft_Repo_6th_AAW", "src_encoding": "UTF-8", "text": "import RPi.GPIO as GPIO\nimport time\nimport curses\n\nscreen = curses.initscr()\ncurses.noecho()\ncurses.cbreak()\nscreen.keypad(True)\n\nGPIO.setmode(GPIO.BCM)\nGPIO.setwarnings(False)\nGPIO.setup(7, GPIO.OUT)\nGPIO.setup(8, GPIO.OUT)\nGPIO.setup(9, GPIO.OUT)\nGPIO.setup(10, GPIO.OUT)\n\nMAf = 8\nMAb = 7\nMBf = 10\nMBb = 9\n\ndef stop():\n GPIO.output(MAf, 0)\n GPIO.output(MAb, 0)\n GPIO.output(MBf, 0)\n GPIO.output(MBb, 0)\n\ndef forward():\n GPIO.output(MAf, 1)\n GPIO.output(MAb, 0)\n GPIO.output(MBf, 1)\n GPIO.output(MBb, 0)\n\ndef back():\n GPIO.output(MAf, 0)\n GPIO.output(MAb, 2)\n GPIO.output(MBf, 0)\n GPIO.output(MBb, 2)\n\ndef left():\n GPIO.output(MAf, 0)\n GPIO.output(MAb, 2)\n GPIO.output(MBf, 2)\n GPIO.output(MBb, 0)\n\ndef right():\n GPIO.output(MAf, 2)\n GPIO.output(MAb, 0)\n GPIO.output(MBf, 0)\n GPIO.output(MBb, 2)\n\ntry:\n while True:\n char = screen.getch()\n if char == ord('q'):\n break\n elif char == curses.KEY_UP:\n forward()\n time.sleep(0.1)\n elif char == curses.KEY_DOWN:\n back()\n time.sleep(0.1)\n elif char == curses.KEY_LEFT:\n left()\n time.sleep(0.1)\n elif char == curses.KEY_RIGHT:\n right()\n time.sleep(0.1)\n elif char == 10:\n stop()\n \nfinally:\n curses.nocbreak()\n screen.keypad(0)\n curses.echo()\n curses.endwin()\n GPIO.cleanup()\n" }, { "alpha_fraction": 0.6654757857322693, "alphanum_fraction": 0.6895095705986023, "avg_line_length": 23.173229217529297, "blob_id": "1bcf25801818122a533aa221a41e8423119f9e5e", "content_id": "0c963f3a059497edbd4b05e263eb9880680407d2", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 3079, "license_type": "no_license", "max_line_length": 78, "num_lines": 127, "path": "/auto-robot pi.py", "repo_name": "awhiteman625/Minecraft_Repo_6th_AAW", "src_encoding": "UTF-8", "text": "\"\"\" this program will use the GPIO pins to run the motors of the robot pi,\nand the ultrasonic sensor to sense distance. this will run and make the robot\nautomatic.\ncreated by Andy Whiteman, 3-14-2018\n\"\"\"\n\n#import all libraries needed\nimport RPi.GPIO as GPIO\nimport time\n\n#set GPIO library mode and warnings\nGPIO.setmode(GPIO.BCM)\nGPIO.setwarnings(False)\n\n#sets variables used for the ultrasonic sensor and for controlling motor speed\npT = 17\npE = 18\nHowNear = 15.0\nfrequency = 20\ndutyCycleA = 50\ndutyCycleB = 37\n\n#sets up GPIO pins for use in program\nGPIO.setup(pT, GPIO.OUT)\nGPIO.setup(pE, GPIO.IN)\nGPIO.setup(7, GPIO.OUT)\nGPIO.setup(8, GPIO.OUT)\nGPIO.setup(9, GPIO.OUT)\nGPIO.setup(10, GPIO.OUT)\n\n#changes frequency of the motors (how fast the electric signal is sent)\nMAf = GPIO.PWM(8, frequency)\nMAb = GPIO.PWM(7, frequency)\nMBf = GPIO.PWM(10, frequency)\nMBb = GPIO.PWM(9, frequency)\n\n#changes the duty cycle 9speed of motors) to start at 0\nMAf.start(0)\nMAb.start(0)\nMBf.start(0)\nMBb.start(0)\n#funtion to stop motors\ndef stop():\n MAf.ChangeDutyCycle(0)\n MAb.ChangeDutyCycle(0)\n MBf.ChangeDutyCycle(0)\n MBb.ChangeDutyCycle(0)\n\n#function to move motors forward\ndef forward():\n MAf.ChangeDutyCycle(dutyCycleA)\n MAb.ChangeDutyCycle(0)\n MBf.ChangeDutyCycle(dutyCycleB)\n MBb.ChangeDutyCycle(0)\n\n#function to move motors backward\ndef back():\n MAf.ChangeDutyCycle(0)\n MAb.ChangeDutyCycle(dutyCycleA)\n MBf.ChangeDutyCycle(0)\n MBb.ChangeDutyCycle(dutyCycleB)\n\n#function to move motors left\ndef left():\n MAf.ChangeDutyCycle(0)\n MAb.ChangeDutyCycle(dutyCycleA)\n MBf.ChangeDutyCycle(dutyCycleB)\n MBb.ChangeDutyCycle(0)\n\n#function to move motors right\ndef right():\n MAf.ChangeDutyCycle(dutyCycleA)\n MAb.ChangeDutyCycle(0)\n MBf.ChangeDutyCycle(0)\n MBb.ChangeDutyCycle(dutyCycleB)\n\n#function to sense if it is near an object\ndef IsNearObject(localHowNear):\n Distance = measure()\n if Distance < localHowNear:\n return True\n else:\n return False\n\n#function to measure distance\ndef measure():\n GPIO.output(pT, True)\n time.sleep(0.00001)\n GPIO.output(pT, False)\n startTime = time.time()\n stopTime = startTime\n while GPIO.input(pE) == 0:\n startTime = time.time()\n stopTime = startTime\n while GPIO.input(pE) == 1:\n stopTime = time.time()\n if stopTime - startTime >= 0.04:\n stopTime = startTime\n break\n totalTime = stopTime - startTime\n dist = totalTime * 34300\n dist = dist / 2\n return dist\n\n#function to avoid obstacle\ndef AvoidObstacle():\n back()\n time.sleep(0.5)\n stop()\n right()\n time.sleep(0.75)\n stop()\n\n#try portion of loop will infinatly move forward untill it senses an object\n#if it does sense an object it will avoid it\ntry:\n GPIO.output(pT, False)\n time.sleep(0.1)\n while True:\n forward()\n time.sleep(0.1)\n if IsNearObject(HowNear):\n stop()\n AvoidObstacle()\n#will shutdown program and turn off all motors\nexcept KeyboardInterrupt:\n GPIO.cleanup()\n \n" }, { "alpha_fraction": 0.6222222447395325, "alphanum_fraction": 0.644444465637207, "avg_line_length": 14, "blob_id": "b1bc1e8dd99bb781fe8509678ebcccaf54b54f25", "content_id": "d3e29d59c10b4fb04c8d80ab0b9c81deda56e16f", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 225, "license_type": "no_license", "max_line_length": 36, "num_lines": 15, "path": "/melon funtion.py", "repo_name": "awhiteman625/Minecraft_Repo_6th_AAW", "src_encoding": "UTF-8", "text": "from mcpi.minecraft import Minecraft\nmc = Minecraft.create()\nimport time\n\ndef melon():\n x, y, z = mc.player.getTilePos()\n mc.setBlock(x, y - 1, z, 103)\n time.sleep(2)\n\nmelon()\nmelon()\nmelon()\nmelon()\nmelon()\nmelon()\n" }, { "alpha_fraction": 0.6420664191246033, "alphanum_fraction": 0.660516619682312, "avg_line_length": 23.636363983154297, "blob_id": "f66c02e3bfdb1ffeb7528be5f6d1980f39e50273", "content_id": "9c4450fec1a4d147c24be35b86eb51d612ef71d0", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 271, "license_type": "no_license", "max_line_length": 70, "num_lines": 11, "path": "/pyramid.py", "repo_name": "awhiteman625/Minecraft_Repo_6th_AAW", "src_encoding": "UTF-8", "text": "from mcpi.minecraft import Minecraft\nmc = Minecraft.create()\nheight = 10\nlevels = range(height)\nlevels = reversed(levels)\nx, y, z = mc.player.getTilePos()\ny += height\n\nfor level in levels:\n mc.setBlocks(x - level, y, z - level, x + level, y, z + level, 24)\n y += 1\n" }, { "alpha_fraction": 0.6718286871910095, "alphanum_fraction": 0.6866557002067566, "avg_line_length": 26.098215103149414, "blob_id": "5b39eeb4c06c8befff970df4827c6b07c38a72db", "content_id": "95bc145b9527f2baaf9e7caf3a9f99f726cffce5", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 3035, "license_type": "no_license", "max_line_length": 81, "num_lines": 112, "path": "/Keyboard Robot Pi.py", "repo_name": "awhiteman625/Minecraft_Repo_6th_AAW", "src_encoding": "UTF-8", "text": "\"\"\"this program uses the GPIO pins to run the motors of the robot pi, and the\ncurses library to register keystrokes. Based on the direction of the arrows, the\nrobot will move in that direction. if you press enter, or OK on the wireless\nkeyboard, it will stop the motors. if press 'Q', it will shutdown the program\n\"\"\"\n\n#import all libraries needed\nimport RPi.GPIO as GPIO\nimport time\nimport curses\n\n#set curses library up for our use\nscreen = curses.initscr()\ncurses.noecho()\ncurses.cbreak()\nscreen.keypad(True)\n\n#set GPIO library and sets pins for our use\nGPIO.setmode(GPIO.BCM)\nGPIO.setwarnings(False)\nGPIO.setup(7, GPIO.OUT)\nGPIO.setup(8, GPIO.OUT)\nGPIO.setup(9, GPIO.OUT)\nGPIO.setup(10, GPIO.OUT)\n\n#sets three variables name frequency and dutycycle, to better control robot speed\nfrequency = 20\ndutyCycleA = 50\ndutyCycleB = 37\n\n#sets 4 variables to control each motor going forward and back with\n#a specific frequency\nMAf = GPIO.PWM(8, frequency)\nMAb = GPIO.PWM(7, frequency)\nMBf = GPIO.PWM(10, frequency)\nMBb = GPIO.PWM(9, frequency)\n\n#starts software with duty cylce of 0\nMAf.start(0)\nMAb.start(0)\nMBf.start(0)\nMBb.start(0)\n\n#function to stop motors\ndef stop():\n MAf.ChangeDutyCycle(0)\n MAb.ChangeDutyCycle(0)\n MBf.ChangeDutyCycle(0)\n MBb.ChangeDutyCycle(0)\n\n#function to push both motors forward\ndef forward():\n MAf.ChangeDutyCycle(dutyCycleA)\n MAb.ChangeDutyCycle(0)\n MBf.ChangeDutyCycle(dutyCycleB)\n MBb.ChangeDutyCycle(0)\n\n#function to push both motors backwards\ndef back():\n MAf.ChangeDutyCycle(0)\n MAb.ChangeDutyCycle(dutyCycleA)\n MBf.ChangeDutyCycle(0)\n MBb.ChangeDutyCycle(dutyCycleB)\n \n#function to push one motor forward and one backward\n#causing it to turn left\ndef left():\n MAf.ChangeDutyCycle(0)\n MAb.ChangeDutyCycle(dutyCycleA)\n MBf.ChangeDutyCycle(dutyCycleB)\n MBb.ChangeDutyCycle(0)\n\n#function to push one motor forward and on backward\n #causing it to turn right\ndef right():\n MAf.ChangeDutyCycle(dutyCycleA)\n MAb.ChangeDutyCycle(0)\n MBf.ChangeDutyCycle(0)\n MBb.ChangeDutyCycle(dutyCycleB)\n\n#a try-finally loop that will try the top portion, untl is is not true\n #and then continue on the the bottom portion\ntry:\n #an infinite while loop used to call specific functions when a\n #specific key is pressed. will break if you press \"q\"\n while True:\n char = screen.getch()\n if char == ord('q'):\n break\n elif char == curses.KEY_UP:\n forward()\n time.sleep(0.1)\n elif char == curses.KEY_DOWN:\n back()\n time.sleep(0.1)\n elif char == curses.KEY_LEFT:\n left()\n time.sleep(0.1)\n elif char == curses.KEY_RIGHT:\n right()\n time.sleep(0.1)\n elif char == 10:\n stop()\n \nfinally:\n #this portion of the try-finally loop will cleanup the\n #GPIO pins and set curses back to normal\n curses.nocbreak()\n screen.keypad(0)\n curses.echo()\n curses.endwin()\n GPIO.cleanup()\n" }, { "alpha_fraction": 0.6652892827987671, "alphanum_fraction": 0.7231404781341553, "avg_line_length": 19.16666603088379, "blob_id": "77ccd9bf347fd438ce1b243b2aee58c62d577f91", "content_id": "4b58b2be6718de59825f4f30585e77d87cc2fda3", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 242, "license_type": "no_license", "max_line_length": 24, "num_lines": 12, "path": "/EMERGANCY ROBOT Pi SHUTDOWN.py", "repo_name": "awhiteman625/Minecraft_Repo_6th_AAW", "src_encoding": "UTF-8", "text": "import RPi.GPIO as GPIO\n\nGPIO.setmode(GPIO.BCM)\nGPIO.setwarnings(False)\nGPIO.setup(7, GPIO.OUT)\nGPIO.setup(8, GPIO.OUT)\nGPIO.setup(9, GPIO.OUT)\nGPIO.setup(10, GPIO.OUT)\nGPIO.output(7, 0)\nGPIO.output(8, 0)\nGPIO.output(9, 0)\nGPIO.output(10, 0)\n" }, { "alpha_fraction": 0.560606062412262, "alphanum_fraction": 0.6565656661987305, "avg_line_length": 27.285715103149414, "blob_id": "da355fb8e8973c09768f964ada5dcc4b124ab23c", "content_id": "abfe07822c223fa24bac39b80d3d16815800e252", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 198, "license_type": "no_license", "max_line_length": 59, "num_lines": 7, "path": "/MEGA ULTRA SUPREME DELETE THE WORLD!!!!!!!!!!!!!!!!!!!!.py", "repo_name": "awhiteman625/Minecraft_Repo_6th_AAW", "src_encoding": "UTF-8", "text": "from mcpi.minecraft import Minecraft\nmc = Minecraft.create()\nimport time\nwhile True:\n x, y, z = mc.player.getPos()\n mc.setBlocks(x-127, y-63, z-127, x+127, y+63, z+127, 0)\n time.sleep(0.1)\n" }, { "alpha_fraction": 0.699999988079071, "alphanum_fraction": 0.7049999833106995, "avg_line_length": 21.22222137451172, "blob_id": "9ba1847aec3bebea6eaf6c20fc04af7a287b872d", "content_id": "78fc528acf90e52b3b69c0f81e36181e7f11b1b7", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 200, "license_type": "no_license", "max_line_length": 57, "num_lines": 9, "path": "/Forest.py", "repo_name": "awhiteman625/Minecraft_Repo_6th_AAW", "src_encoding": "UTF-8", "text": "from mcpi.minecraft import Minecraft\nmc = Minecraft.create()\n\ndef growTree(x, y, z):\n #Write function to create tre at specified cordanates\n\nx, y, z = mc.player.getTilePos()\n\ngrowTree(x + 1, y, z)\n" }, { "alpha_fraction": 0.8108108043670654, "alphanum_fraction": 0.8243243098258972, "avg_line_length": 36, "blob_id": "ca491306b560710c2e81885ee35f3fa091c40208", "content_id": "aab104c53cad631c8f7443c392fb2ac71a98954d", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Markdown", "length_bytes": 74, "license_type": "no_license", "max_line_length": 48, "num_lines": 2, "path": "/README.md", "repo_name": "awhiteman625/Minecraft_Repo_6th_AAW", "src_encoding": "UTF-8", "text": "# Minecraft_Repo_6th_AAW\nRepository to hold Minecraft Haking Python files\n" }, { "alpha_fraction": 0.6006006002426147, "alphanum_fraction": 0.642642617225647, "avg_line_length": 14.857142448425293, "blob_id": "34e313e021fa761b553aa21ba000f1068fc467ac", "content_id": "928c4816f10feee7b8b1e0f9a3469f3ea4fb70e2", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 333, "license_type": "no_license", "max_line_length": 36, "num_lines": 21, "path": "/blockID.py", "repo_name": "awhiteman625/Minecraft_Repo_6th_AAW", "src_encoding": "UTF-8", "text": "from mcpi.minecraft import Minecraft\nmc = Minecraft.create()\n\ndef melon():\n return 103\ndef wool():\n return 35\ndef lava():\n return 11\ndef water():\n return 9\ndef TNT():\n return 46\ndef flower():\n return 37\ndef diamondBlock():\n return 57\n\nblock = melon()\nx, y, z = mc.player.getTilePos()\nmc.setBlock(x, y, z, block)\n" }, { "alpha_fraction": 0.6042780876159668, "alphanum_fraction": 0.6470588445663452, "avg_line_length": 25.714284896850586, "blob_id": "ccb4bf81323cf86ce05f57f84fa4ba9f5f74c5df", "content_id": "1e1d54f3068be5628b9563ee3f6387da18ffbd36", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 187, "license_type": "no_license", "max_line_length": 42, "num_lines": 7, "path": "/stairs.py", "repo_name": "awhiteman625/Minecraft_Repo_6th_AAW", "src_encoding": "UTF-8", "text": "from mcpi.minecraft import Minecraft\nmc = Minecraft.create()\nx, y, z = mc.player.getTilePos()\nstep = 0\nfor step in range(0, 100):\n mc.setBlock(x + step, y + step, z, 53)\n step += 1\n" }, { "alpha_fraction": 0.5859375, "alphanum_fraction": 0.6067708134651184, "avg_line_length": 24.600000381469727, "blob_id": "76ed84887127bf52c49172a0d1e2e8ff6983ff99", "content_id": "90c6dc695e2befaf99e4343a390460c206e18bea", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 384, "license_type": "no_license", "max_line_length": 83, "num_lines": 15, "path": "/diamond survay.py", "repo_name": "awhiteman625/Minecraft_Repo_6th_AAW", "src_encoding": "UTF-8", "text": "from mcpi.minecraft import Minecraft\nmc = Minecraft.create()\n\ndepth = 0\nx, y, z = mc.player.getTilePos()\nfor error in range(1, 51):\n depth += 1\n block = mc.getBlock(x, y, z)\n if block == 56:\n mc.postToChat(\"There is a diamond ore \" + str(depth) + \" blocks below you\")\n break\n else:\n y -= 1\nelse:\n mc.postToChat(\"There is no diamond ore below you\")\n" }, { "alpha_fraction": 0.663551390171051, "alphanum_fraction": 0.6869158744812012, "avg_line_length": 25.75, "blob_id": "2a0cd4520a09d903b8525d453ce41bd79dae5b4c", "content_id": "e4bf28ea2862f801f5f5c7aff5609d77573c83c1", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 214, "license_type": "no_license", "max_line_length": 45, "num_lines": 8, "path": "/magic wand.py", "repo_name": "awhiteman625/Minecraft_Repo_6th_AAW", "src_encoding": "UTF-8", "text": "from mcpi.minecraft import Minecraft\nmc = Minecraft.create()\nimport time\ntime.sleep(60)\nhits = mc.events.pollBlockHits()\nfor hit in hits:\n x, y, z = hit.pos.x, hit.pos.y, hit.pos.z\n mc.setBlock(x, y, z, 103)\n" }, { "alpha_fraction": 0.4153645932674408, "alphanum_fraction": 0.51953125, "avg_line_length": 33.90909194946289, "blob_id": "943ad22209cf7c974d7e98c6b2bd4721a0c2f69e", "content_id": "38bef0bb1ef75e34ea21141095e31706ce381e0d", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 768, "license_type": "no_license", "max_line_length": 76, "num_lines": 22, "path": "/pillars.py", "repo_name": "awhiteman625/Minecraft_Repo_6th_AAW", "src_encoding": "UTF-8", "text": "from mcpi.minecraft import Minecraft\nmc = Minecraft.create()\ndef setPillar(x, y, z, height):\n mc.setBlocks(x - 1, y + height, z - 1, x + 1, y + height, z + 1, 155, 1)\n mc.setBlock(x - 1, y + height - 1, z, 156, 12)\n mc.setBlock(x + 1, y + height - 1, z, 156, 13)\n mc.setBlock(x, y + height - 1, z + 1, 156, 15)\n mc.setBlock(x, y + height - 1, z - 1, 156, 14)\n mc.setBlocks(x - 1, y, z - 1, x + 1, y, z + 1, 155, 1)\n mc.setBlock(x - 1, y + 1, z, 156, 0)\n mc.setBlock(x + 1, y + 1, z, 156, 1)\n mc.setBlock(x, y + 1, z + 1, 156, 3)\n mc.setBlock(x, y + 1, z - 1, 156, 2)\n mc.setBlocks(x, y, z, x, y + height, z, 155, 2)\n\nx, y, z = mc.player.getTilePos()\nx += 2\n\nfor pillar in range(1, 21):\n setPillar(x, y, z, 15)\n x += 5\n z += 5\n" } ]
14
Flynston/SHA-1
https://github.com/Flynston/SHA-1
ea997c78d268a6b43a3357b7b68ea95da617d0e9
a93acb8c7d70655c7cc956d4bbf6c6e73df6a007
e4d3a5c7942d9cae3928fa47e6385367292d84e3
refs/heads/master
2016-09-07T19:17:32.578201
2014-07-25T10:09:58
2014-07-25T10:09:58
null
0
0
null
null
null
null
null
[ { "alpha_fraction": 0.4190560281276703, "alphanum_fraction": 0.48213496804237366, "avg_line_length": 23.630434036254883, "blob_id": "cc675ad67bc245bed7b0912ad242b85cb2c533b3", "content_id": "efd49f67ded0e27760ab05a61cb507ee56da8616", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 2267, "license_type": "no_license", "max_line_length": 85, "num_lines": 92, "path": "/src/sha1.py", "repo_name": "Flynston/SHA-1", "src_encoding": "UTF-8", "text": "\ndef a8(n,k):#take k bits\n return n^(n>>k)<<k\n\n#SHA-1\ndef f(x,y,z,t):\n if t < 20:\n return (x&y)|(~x&z)\n elif t < 40:\n return x^y^z\n elif t < 60:\n return (x&y)|(x&z)|(y&z)\n else:\n return x^y^z\n\ndef K(t):\n if t < 20:\n return 0x5A827999\n elif t < 40:\n return 0x6ED9EBA1\n elif t < 60:\n return 0x8F1BBCDC\n else:\n return 0xCA62C1D6\n\ndef cycleLeftShift(n,k): #left shift 32-bit number n to k bits\n delta = 32-k\n return a8(n,delta)<<k|(n>>delta)\n# return (n<<k)|(n>>(32-k)) it is just for 32-bit mod\n\ndef genW(currentBlock): #generate array of w values for current block\n w = []\n for t in range(16):\n w.append(currentBlock&0xffffffff);\n currentBlock>>=32 \n w=w[::-1]\n for t in range(16,80):\n w.append(cycleLeftShift(w[t-3]^w[t-8]^w[t-14]^w[t-16],1)&0xffffffff) \n return w; \n\ndef genBlocks(s): #generate array of 512-bit blocks\n h = genBitString(s)\n delta = len(s)*8 % 512\n if (delta != 0):\n h=h<<1|1;\n if(delta < 447): # fill by 0 bits\n h<<=447-delta\n h=h<<64|len(s*8)\n else: h<<=512-delta\n print(\"{0:b}\".format(h)) \n blocks = []\n while (h > 0):\n blocks.append(a8(h,512))\n h>>=512\n return blocks[::-1] \n \ndef genBitString(s): #generate equal bit string; 1 symbol compare to ASCII 8-bit code\n h = 0;\n for ch in s:\n h = h<<8|ord(ch) \n return h\n\ndef a19(s): #SHA-1\n A = 0x67452301 #initialization\n B = 0xEFCDAB89\n C = 0x98BADCFE\n D = 0x10325476\n E = 0xC3D2E1F0\n \n blocks = genBlocks(s);\n for block in blocks:\n a = A\n b = B\n c = C\n d = D\n e = E\n w = genW(block)\n for t in range(80):\n tmp = (cycleLeftShift(a,5) + f(b,c,d,t) + e + w[t] + K(t))&0xffffffff\n e = d\n d = c\n c = cycleLeftShift(b,30)\n b = a\n a = tmp\n A=(A+a)&0xffffffff\n B=(B+b)&0xffffffff\n C=(C+c)&0xffffffff\n D=(D+d)&0xffffffff\n E=(E+e)&0xffffffff\n return [hex(A), hex(B), hex(C), hex(D), hex(E)] \n \nif __name__ == '__main__':\n print(a19(\"sha\"))\n" }, { "alpha_fraction": 0.7267441749572754, "alphanum_fraction": 0.75, "avg_line_length": 18.11111068725586, "blob_id": "2b8301d2c0e8b5a16ce1cfbed400ed8215814c06", "content_id": "0126a88b538644531ef11197c683e2faee4ec38b", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Markdown", "length_bytes": 172, "license_type": "no_license", "max_line_length": 43, "num_lines": 9, "path": "/README.md", "repo_name": "Flynston/SHA-1", "src_encoding": "UTF-8", "text": "SHA-1\n=====\n\nSHA-1 algorithm\n\ntheory materials:\nhttp://life-prog.ru/view_teorinfo.php?id=11\nhttp://solutionmes.wikidot.com/crypto-sha\nhttp://kriptografea.narod.ru/Sha.html\n" } ]
2
gauravinstasafe/com.instasafev2
https://github.com/gauravinstasafe/com.instasafev2
7b0b129d9007ccd450bf6a9b4f96df2351670fcd
38cef732ab665cf9cfc6175862afba5220065143
3f1e29b27587b97595306360f92c8251089c725f
refs/heads/master
2021-05-10T08:38:07.311462
2018-06-28T20:12:39
2018-06-28T20:12:39
118,897,552
0
2
null
null
null
null
null
[ { "alpha_fraction": 0.8034350872039795, "alphanum_fraction": 0.8034350872039795, "avg_line_length": 56.94444274902344, "blob_id": "f0edaa236c7c2cf636de12e18fd53a2c070953f6", "content_id": "aaeb7149855b10453db1628308c8753fa1028575", "detected_licenses": [ "Apache-2.0" ], "is_generated": false, "is_vendor": false, "language": "reStructuredText", "length_bytes": 1048, "license_type": "permissive", "max_line_length": 151, "num_lines": 18, "path": "/test/tests/compatability/testng/before_and_after/README.rst", "repo_name": "gauravinstasafe/com.instasafev2", "src_encoding": "UTF-8", "text": "Explores how the \"before and after\" annotations from TestNG work.\n\nTest methods inside of a class with a @BeforeClass or @BeforeMethod annotation\nhave a relationship similar to dependsOn, in that if the annotated methods fail\nthen the other methods are skipped.\n\n@AfterClass and @AfterMethod run after all methods in a class, and after each\nmethod respectively. However, the relationship is different than dependsOn in\nthat if the method fails the @AfterMethod and @AfterClass methods will still\nrun. However, @AfterMethod and @AfterClass methods both have a dependsOn\nrelationship with @BeforeClass and even @BeforeMethod, in that if one of those\nfail both the methods and the \"after\" methods will not run.\n\nAdditionally if @BeforeMethod runs successfully the first time then\n@AfterMethod will run, but if the same @BeforeMethod runs again and fails @AfterMethod will not run though @AfterClass still will run. @AfterClass then\nseems to only run if a test method executes.\n\nIn the absense of test methods none of these decorators run at all.\n\n\n\n\n\n" }, { "alpha_fraction": 0.8074324131011963, "alphanum_fraction": 0.8175675868988037, "avg_line_length": 36.125, "blob_id": "1e4c72c0211c5d798d059a9dfcf5bdb99a204a4d", "content_id": "492ec0c5ad0fd838329c825fa2111fac57bb3779", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 296, "license_type": "no_license", "max_line_length": 61, "num_lines": 8, "path": "/com.instasafev2/src/tests/test_suite_demo.py", "repo_name": "gauravinstasafe/com.instasafev2", "src_encoding": "UTF-8", "text": "import unittest\nfrom tests.login_tests import LoginTests\n# Get all tests from the test classes\ntc1 = unittest.TestLoader().loadTestsFromTestCase(LoginTests)\n# Create a test suite combining all test classes\nsmokeTest = unittest.TestSuite([tc1])\n\nunittest.TextTestRunner(verbosity=2).run(smokeTest)" }, { "alpha_fraction": 0.6370106935501099, "alphanum_fraction": 0.6389877200126648, "avg_line_length": 27.75, "blob_id": "3bd0a8893b647406c75b794621358ff204ba3528", "content_id": "3d8c66976e0b1c2c44ea85287bf607bb44bb9120", "detected_licenses": [ "Apache-2.0" ], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 2529, "license_type": "permissive", "max_line_length": 75, "num_lines": 88, "path": "/test/examples/example3/tests/test_entanglement.py", "repo_name": "gauravinstasafe/com.instasafev2", "src_encoding": "UTF-8", "text": "import unittest\nimport mymodule\nfrom proboscis.asserts import assert_equal\nfrom proboscis.asserts import assert_false\nfrom proboscis.asserts import assert_true\n#from proboscis import before_class\nfrom proboscis import test\n\nservice_config = {\n \"user_name\":\"bob\",\n \"pass_word\":\"pass_word\"\n}\n\n@test\nclass SetUp(object):\n\n @test(groups=['setup'])\n def create_database(self):\n \"\"\"Creates a local database.\"\"\"\n mymodule.create_database()\n assert_true(mymodule.tables_exist())\n #assert_true(False)\n\n @test(groups=['setup'])\n def start_web_server(self):\n \"\"\"Start up web server then issue a connect to make sure its up.\"\"\"\n mymodule.start_web_server()\n client = mymodule.ServiceClient(service_config)\n assert_true(client.service_is_up)\n\n\n@test(groups=['normal'])\nclass WhenConnectingAsAdmin(object):\n\n #TODO: before_class\n def __init__(self):\n self.client = mymodule.ServiceClient(service_config)\n\n @test(groups=[\"a\"], depends_on_groups=[\"setup\"])\n def test_has_credentials(self):\n \"\"\"Make sure the given client has ADMIN access.\"\"\"\n assert_equal(self.client.check_credentials,\n mymodule.ServiceClient.ADMIN)\n\n @test(groups=['c'], depends_on_groups=[\"setup\", 'b'])\n def test_change_profile_image(self):\n \"\"\"Test changing a client's profile image.\"\"\"\n assert_equal(\"default.jpg\", self.client.get_profile_image())\n self.client.set_profile_image(\"spam.jpg\")\n assert_equal(\"spam.jpg\", self.client.get_profile_image())\n\n\n# Add more tests in the service.tests group here, or in any other file.\n# Then when we're finished...\n\n@test\nclass SomethingElse(object):\n\n def __init__(self):\n self.bee = 1\n\n @test(groups=['d'], depends_on_groups=[\"c\"])\n def whatever(self):\n self.bee += 1\n assert_equal(self.bee, 3)\n\n @test(groups=['b'], depends_on_groups=[\"a\"])\n def something_else(self):\n self.bee += 1\n assert_equal(self.bee, 2)\n\n\n@test(depends_on=[WhenConnectingAsAdmin],\n depends_on_groups=['normal'], always_run=True)\nclass ShutDown(object):\n\n @test\n def test_stop_service(self):\n \"\"\"Shut down the web service.\"\"\"\n client = mymodule.ServiceClient(service_config)\n if client.service_is_up:\n mymodule.stop_web_server()\n assert_false(client.service_is_up())\n\n @test\n def test_destroy_database(self):\n \"\"\"Destroy the local database.\"\"\"\n mymodule.destroy_database()" }, { "alpha_fraction": 0.48478835821151733, "alphanum_fraction": 0.48974868655204773, "avg_line_length": 37.769229888916016, "blob_id": "a0e3cb55cc602df0a6a1a3f8fb9352ec14856637", "content_id": "709b5b9386171c99410326005a028b5f894ce5a8", "detected_licenses": [ "Apache-2.0" ], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 3024, "license_type": "permissive", "max_line_length": 80, "num_lines": 78, "path": "/test/tests/unit/test_core_with.py", "repo_name": "gauravinstasafe/com.instasafev2", "src_encoding": "UTF-8", "text": "import unittest\nfrom tests.unit.test_core import ProboscisRegistryTest\n\n\nclass TestClassMethodEntry(ProboscisRegistryTest):\n\n def test_method_im_class_points_to_class(self):\n from proboscis import test\n from proboscis.asserts import Check\n\n @test\n def func1():\n pass\n\n @test(groups=[\"top_level_class_group\"])\n class ExampleTest(object):\n @test\n def test_1(self):\n pass\n\n with Check() as check:\n for t in self.registry.tests:\n if t.home == ExampleTest:\n pass\n elif t.home == ExampleTest.test_1:\n if not t.is_child:\n check.fail(\"Test Entry did not mark method as such!\")\n else:\n check.true(ExampleTest in t.homes,\n \"Class was not stored in 'homes' property.\")\n check.true(ExampleTest.test_1 in t.homes,\n \"Method was not stored in 'homes' property.\")\n check.equal(t.method, ExampleTest.test_1)\n # Just make sure this doesn't blow up...\n repr(t)\n elif t.home == func1:\n check.is_none(t.method)\n\n\nclass TestClassContainsAndMethodProp(ProboscisRegistryTest):\n\n def test_contains_searches_methods(self):\n from proboscis import test\n from proboscis.asserts import Check\n\n class Unrelated(object):\n def func1(self):\n pass\n\n @test(groups=[\"top_level_class_group\"])\n class ExampleTest(object):\n @test\n def test_1(self):\n pass\n @test(groups=[\"test_2_group\"])\n def test_2(self):\n pass\n\n with Check() as check:\n for t in self.registry.tests:\n if t.home == ExampleTest:\n check.false(t.contains([\"bjkjd\"], []))\n check.true(t.contains([\"top_level_class_group\"], []))\n check.false(t.contains([], [Unrelated]))\n check.true(t.contains([\"test_2_group\"], []))\n check.true(t.contains([], [ExampleTest]))\n elif t.home == ExampleTest.test_1:\n check.false(t.contains([\"bjkjd\"], []))\n check.true(t.contains([\"top_level_class_group\"], []))\n check.false(t.contains([], [Unrelated]))\n check.false(t.contains([\"test_2_group\"], []))\n check.true(t.contains([], [ExampleTest]))\n elif t.home == ExampleTest.test_2:\n check.false(t.contains([\"bjkjd\"], []))\n check.true(t.contains([\"top_level_class_group\"], []))\n check.false(t.contains([], [Unrelated]))\n check.true(t.contains([\"test_2_group\"], []))\n check.true(t.contains([], [ExampleTest]))\n" }, { "alpha_fraction": 0.7010050415992737, "alphanum_fraction": 0.7072864174842834, "avg_line_length": 27.428571701049805, "blob_id": "b38f125933583996a28b196d9dcf4c56b0a96107", "content_id": "1579c361a2d60e06735d233fd431568c77d9e4f5", "detected_licenses": [ "Apache-2.0" ], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 796, "license_type": "permissive", "max_line_length": 60, "num_lines": 28, "path": "/test/examples/unit/tests/unit.py", "repo_name": "gauravinstasafe/com.instasafev2", "src_encoding": "UTF-8", "text": "import unittest\nfrom proboscis.asserts import assert_equal\nfrom proboscis import test\n\nimport utils\n\n@test(groups=[\"unit\", \"numbers\"])\nclass TestIsNegative(unittest.TestCase):\n \"\"\"Confirm that utils.is_negative works correctly.\"\"\"\n\n def test_should_return_true_for_negative_numbers(self):\n self.assertTrue(utils.is_negative(-47))\n\n def test_should_return_false_for_positive_numbers(self):\n self.assertFalse(utils.is_negative(56))\n\n def test_should_return_false_for_zero(self):\n self.assertFalse(utils.is_negative(0))\n\n#rst-break\n\n@test(groups=[\"unit\", \"strings\"])\ndef test_reverse():\n \"\"\"Make sure our complex string reversal logic works.\"\"\"\n original = \"hello\"\n expected = \"olleh\"\n actual = utils.reverse(original)\n assert_equal(expected, actual)\n" }, { "alpha_fraction": 0.5530126094818115, "alphanum_fraction": 0.5618869662284851, "avg_line_length": 21.28125, "blob_id": "38bac4cf5826451ebc56607f8e399af8c0ac8213", "content_id": "b2e9d86257a77c8f7ed6db1e6499fbb8fb3fcebe", "detected_licenses": [ "Apache-2.0" ], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 2141, "license_type": "permissive", "max_line_length": 62, "num_lines": 96, "path": "/test/examples/example_factory/spam_api.py", "repo_name": "gauravinstasafe/com.instasafev2", "src_encoding": "UTF-8", "text": "\n\nGLOBAL_ID = 10\n\nclass User(object):\n\n def __init__(self, id, user_type):\n self.id = id\n self.user_type = user_type\n\n\nclass UserApi(object):\n\n resource = {}\n\n def __init__(self):\n self.resource[1] = User(1, 'admin')\n pass\n\n def create(self, user_type):\n global GLOBAL_ID\n user = User(GLOBAL_ID, user_type)\n self.resource[user.id] = user\n GLOBAL_ID += 1\n return user\n\n def delete(self, id):\n del self.resource[id]\n\n def get(self, id):\n return self.resource[id]\n\n\nclass SpamHttpException(RuntimeError):\n\n def __init__(self, status_code):\n self.status_code = status_code\n\n\nclass Spam(object):\n\n def __init__(self, owner_id=None):\n global GLOBAL_ID\n self.owner_id = owner_id\n self.id = GLOBAL_ID\n GLOBAL_ID += 1\n\n\nclass SpamApi(object):\n\n resources = {}\n\n def __init__(self, users, user_id):\n self.users = users\n self.user_id = user_id\n\n def create(self, owner=None):\n if owner:\n user_id = owner\n else:\n user_id = self.user_id\n user_type = self.users.get(user_id).user_type\n if user_type not in (\"normal\", \"admin\"):\n raise SpamHttpException(401)\n spam = Spam(owner_id = self.user_id)\n self.resources[spam.id] = spam\n return spam\n\n def delete(self, id):\n user_type = self.users.get(self.user_id).user_type\n if user_type not in (\"normal\", \"admin\"):\n raise SpamHttpException(401)\n del self.resources[id]\n\n def get(self, id):\n user_type = self.users.get(self.user_id).user_type\n if user_type not in (\"normal\", \"admin\", \"restricted\"):\n raise SpamHttpException(401)\n spam = self.resources[id]\n if spam.id != id:\n raise SpamHttpException(401)\n return spam\n\n\n\nclass Api(object):\n\n def __init__(self, user_id):\n self.user_id = user_id\n self.user = UserApi()\n self.spam = SpamApi(self.user, user_id)\n\n\ndef create_admin_api():\n return Api(1)\n\ndef create_api(user_id):\n return Api(user_id)\n" }, { "alpha_fraction": 0.5718817710876465, "alphanum_fraction": 0.5905827283859253, "avg_line_length": 34.868263244628906, "blob_id": "a4b81fef52783602835e7cd7c0e6d83aa2dc50d3", "content_id": "b51761f36fc53cc62275f812618a31c5c3ad06b7", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 5989, "license_type": "no_license", "max_line_length": 154, "num_lines": 167, "path": "/com.instasafev2/src/tests/login_tests.py", "repo_name": "gauravinstasafe/com.instasafev2", "src_encoding": "UTF-8", "text": "import logging\nimport unittest\nimport time\nfrom ddt import ddt, data, unpack\nimport pytest\n\nfrom pages.loginPage import LoginPage\nimport utilities.custom_logger as cl\nfrom utilities.read_data import getCSVData\nfrom utilities.teststatus import Status\n\n\[email protected](\"setUp\")\n@ddt\nclass LoginTests(unittest.TestCase):\n log = cl.customLogger(logging.DEBUG)\n \n result_set = {}\n _username = \"richard\"\n _password = \"Richard@999\"\n\n @pytest.fixture(autouse=True) \n def objectSetup(self, setUp):\n self.lp = LoginPage(self.driver)\n #self.ts = Status(self.driver)\n # print(\"print lp \"+str(self.lp))\n i=0\n @pytest.mark.run() \n @data(*getCSVData(\"C:/Users/user/workspace/com.instasafev2/usource/usernamePassword.csv\"))\n @unpack\n def test_t1invalidLogin(self, condition, username, password):\n self.i+=1\n self.log.info(\"*#\" * 20)\n self.log.info(\"ID \"+ str(self.i) +\" test_t1_Login with \" + condition + \" credential where username is \"+ username +\" and password is \" + password)\n self.log.info(\"*#\" * 20)\n time.sleep(3)\n self.lp.multiple_login(condition, username, password)\n''' if condition==\"valid\":\n result = self.lp.verifyLogin()\n print(\"valid\")\n if condition == \"invalid\" :\n result = not(self.lp.verifyLogin())\n print(\"Invalid\")\n else :\n print(\"blank entry in CSV file\")\n \n # self.result_set[str(i)+\"_\"+condition]=result\n i+=1\n #self.assert_Result()\n \n def assert_Result(self):\n assert self.result.values() == True \n''' \n''' def test_t1invalidLogin(self, condition, username, password):\n self.log.info(\"*#\" * 20)\n self.log.info(\"test_t1_invalidLogin when both field blank field\")\n self.log.info(\"*#\" * 20)\n #self.lp.logout()\n #time.sleep(3)\n self.lp.clickLoginButton()\n result1 = self.lp.verifyLoginFailed()\n assert result1 == False\n \n\n @pytest.mark.run(order=2)\n def test_t2invalidLogin(self):\n self.log.info(\"*#\" * 20)\n self.log.info(\"test_t3invalidLogin started\")\n self.log.info(\"*#\" * 20)\n self.log.info(\"test with blank username and valid password field\")\n self.lp.logout()\n self.lp.login(\"\", \"gaurav@123\")\n result3 = self.lp.verifyLoginFailed()\n assert result3 == False\n \n @pytest.mark.run(order=3)\n def test_t3invalidLogin(self):\n self.log.info(\"*#\" * 20)\n self.log.info(\"test_t3invalidLogin started\")\n self.log.info(\"*#\" * 20)\n self.log.info(\"test with blank username and invalid password field\")\n self.lp.logout()\n self.lp.login(\"\", \"invalid\")\n result3 = self.lp.verifyLoginFailed()\n assert result3 == False \n \n @pytest.mark.run(order=4)\n def test_t4invalidLogin(self):\n self.log.info(\"*#\" * 20)\n self.log.info(\"test_t3invalidLogin started\")\n self.log.info(\"*#\" * 20)\n self.log.info(\"test with invalid username and blank password field\")\n self.lp.logout()\n self.lp.login(\"invalid\", \"\")\n result3 = self.lp.verifyLoginFailed()\n assert result3 == False \n \n @pytest.mark.run(order=5)\n def test_t5invalidLogin(self):\n self.log.info(\"*#\" * 20)\n self.log.info(\"test_t3invalidLogin started\")\n self.log.info(\"*#\" * 20)\n self.log.info(\"test with invaid username and invalid password field\")\n self.lp.logout()\n self.lp.login(\"invalid\", \"invalid\")\n result3 = self.lp.verifyLoginFailed()\n assert result3 == False \n\n @pytest.mark.run(order=6)\n def test_t6invalidLogin(self):\n self.log.info(\"*#\" * 20)\n self.log.info(\"test_t3invalidLogin started\")\n self.log.info(\"*#\" * 20)\n self.log.info(\"test with invaid username and valid password field\")\n self.lp.logout()\n self.lp.login(\"invalid\", self._password)\n result3 = self.lp.verifyLoginFailed()\n assert result3 == False \n \n @pytest.mark.run(order=7)\n def test_t7invalidLogin(self):\n self.log.info(\"*#\" * 20)\n self.log.info(\"test_t2_invalidLogin started\")\n self.log.info(\"*#\" * 20)\n self.log.info(\"test with valid username and blank password field\")\n self.lp.logout()\n self.lp.login(self._username, \"\")\n result2 = self.lp.verifyLoginFailed()\n assert result2 == False\n \n @pytest.mark.run(order=8)\n def test_t8invalidLogin(self):\n self.log.info(\"*#\" * 20)\n self.log.info(\"test_t3invalidLogin started\")\n self.log.info(\"*#\" * 20)\n self.log.info(\"test with valid username and invalid password field\")\n self.lp.logout()\n self.lp.login(\"self._username\", \"invalid\")\n result3 = self.lp.verifyLoginFailed()\n assert result3 == False \n\n @pytest.mark.run(order=9)\n def test_t9invalidLogin(self):\n self.log.info(\"*#\" * 20)\n self.log.info(\"test_t4invalidLogin started\")\n self.log.info(\"*#\" * 20)\n self.log.info(\"test with invalid username and blank password field\")\n self.lp.logout()\n self.lp.login(\"[email protected]\", \"Abc@122\")\n result4 = self.lp.verifyLoginFailed()\n assert result4 == False \n \n @pytest.mark.run(order=5)\n def test_t5validLogin(self):\n self.log.info(\"*#\" * 20)\n self.log.info(\"test_t5invalidLogin started\")\n self.log.info(\"*#\" * 20)\n self.lp.logout() \n self.lp.login(\"gaurav\", \"gaurav@123\")\n time.sleep(3)\n result5 = self.lp.verifyLoginTitle()\n self.ts.mark(result5, \"Title Verification\")\n result6 = self.lp.verifyLoginSuccessful()\n print(\"Result5: \" + str(result5))\n print(\"Result6: \" + str(result6))\n self.ts.markFinal(\"test_t5validLogin\", result6, \"Login Verification\")\n '''" }, { "alpha_fraction": 0.5767761468887329, "alphanum_fraction": 0.5813598036766052, "avg_line_length": 28.76744270324707, "blob_id": "1d2382aa6169cdc5718bb2fafaaf999757238679", "content_id": "11b171fc66bf2ee6b5f4bdcd37d96fe781eb9b36", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 1309, "license_type": "no_license", "max_line_length": 87, "num_lines": 43, "path": "/com.instasafev2/src/common/common.py", "repo_name": "gauravinstasafe/com.instasafev2", "src_encoding": "UTF-8", "text": "'''\nCreated on 12-Sep-2017\n\n@author: gaurav\n'''\nfrom selenium.common.exceptions import NoSuchElementException, \\\n NoAlertPresentException\n\n\nclass abc():\n \n def is_element_present(self, how, what):\n try: self.driver.find_element(by=how, value=what)\n except NoSuchElementException as e:\n return False\n return True\n \n def is_alert_present(self):\n try: self.driver.switch_to.alert()\n except NoAlertPresentException as e:\n return False\n return True \n\n def close_alert_and_get_its_text(self):\n try:\n alert = self.driver.switch_to.alert\n alert_text = alert.text\n if self.accept_next_alert:\n alert.accept()\n else:\n alert.dismiss()\n return alert_text\n finally: self.accept_next_alert = True\n \n \n def test_popup_windows(self):\n driver = self.driver\n driver.find_element_by_xpath(\"//a[contains(@href, 'smallPopup.html')]\").click()\n driver.switch_to.window(\"notes\")\n driver.find_element_by_name(\"FirstName\").clear()\n element = driver.find_element_by_name(\"FirstName\")\n element.send_keys(\"Hermione\")\n self.assertTrue(\"Hermione\", element.text) \n \n " }, { "alpha_fraction": 0.6795058250427246, "alphanum_fraction": 0.6882267594337463, "avg_line_length": 22.70689582824707, "blob_id": "88ade7265a2a54a02e3a17371830718ccfede757", "content_id": "0f0541bde5f788c7368addf69bd4de62ab94f403", "detected_licenses": [ "Apache-2.0" ], "is_generated": false, "is_vendor": false, "language": "INI", "length_bytes": 1376, "license_type": "permissive", "max_line_length": 77, "num_lines": 58, "path": "/test/tox.ini", "repo_name": "gauravinstasafe/com.instasafev2", "src_encoding": "UTF-8", "text": "# content of: tox.ini , put in same dir as setup.py\n[tox]\nenvlist = jython, py26, py27, py33\n\n\n[testenv:jython]\ncommands =\n {envpython} run_unit_tests.py\n {envpython} run_tests.py\n\n[testenv:py26]\ndeps =\n coverage\n nose\n sphinx\ncommands =\n coverage erase\n coverage run -p --source=proboscis run_unit_tests.py\n coverage run -p --source=proboscis run_tests.py\n coverage combine\n coverage html -d {envtmpdir}/covhtml -i\n sphinx-build -b html {toxinidir}/docs/source {envtmpdir}/html\n\n\n[testenv:py27]\ndeps =\n coverage\n nose\n sphinx\ncommands =\n coverage erase\n coverage run -p --source=proboscis run_unit_tests.py\n coverage run -p --source=proboscis run_tests.py\n coverage combine\n coverage html -d {envtmpdir}/covhtml -i\n sphinx-build -b html {toxinidir}/docs/source {envtmpdir}/html\n\n\n[testenv:py33]\ndeps =\n coverage\n nose\n sphinx\ncommands =\n coverage erase\n coverage run -p --source=proboscis run_unit_tests.py\n coverage run -p --source=proboscis run_tests.py\n coverage combine\n coverage html -d {envtmpdir}/covhtml -i\n sphinx-build -b html {toxinidir}/docs/source {envtmpdir}/html\n\n\n#[testenv:ipy]\n## To make this work, IRON_PYTHON_PATH must be defined as an absolute path to\n## IronPython.\n#commands =\n# {env:IRON_PYTHON_PATH} run_unit_tests.py\n# {env:IRON_PYTHON_PATH} run_tests.py\n\n" }, { "alpha_fraction": 0.6678966879844666, "alphanum_fraction": 0.6763310432434082, "avg_line_length": 35.480770111083984, "blob_id": "42a3b50dfc2a9aa297513a549ae29778c9493b72", "content_id": "9fe15eac82e6fff9a21895b059553478c0f62e11", "detected_licenses": [ "Apache-2.0" ], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 1897, "license_type": "permissive", "max_line_length": 78, "num_lines": 52, "path": "/test/setup.py", "repo_name": "gauravinstasafe/com.instasafev2", "src_encoding": "UTF-8", "text": "# vim: tabstop=4 shiftwidth=4 softtabstop=4\n\n\n# Copyright 2010 United States Government as represented by the\n# Administrator of the National Aeronautics and Space Administration.\n# All Rights Reserved.\n#\n# Licensed under the Apache License, Version 2.0 (the \"License\"); you may\n# not use this file except in compliance with the License. You may obtain\n# a copy of the License at\n#\n# http://www.apache.org/licenses/LICENSE-2.0\n#\n# Unless required by applicable law or agreed to in writing, software\n# distributed under the License is distributed on an \"AS IS\" BASIS, WITHOUT\n# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the\n# License for the specific language governing permissions and limitations\n# under the License.\n\nimport os\nfrom setuptools import setup\n\ndef read(fname):\n return open(os.path.join(os.path.dirname(__file__), fname)).read()\n\nsetup(\n name=\"proboscis\",\n version=\"1.2.6.0\",\n author='Rackspace',\n author_email='[email protected]',\n description=\"Extends Nose with certain TestNG like features.\",\n keywords=\"nose test testng\",\n long_description=\"Proboscis is a Python test framework that extends \"\n \"Python's built-in unittest module and Nose with \"\n \"features from TestNG.\",\n url='https://github.com/rackerlabs/python-proboscis',\n license='Apache',\n classifiers = [\n 'Development Status :: 5 - Production/Stable',\n 'Environment :: Console',\n 'Intended Audience :: Developers',\n 'Intended Audience :: Information Technology',\n 'License :: OSI Approved :: Apache Software License',\n 'Operating System :: OS Independent',\n 'Programming Language :: Python',\n ],\n py_modules=[],\n packages=['proboscis', 'proboscis.compatability'],\n scripts=[],\n tests_require=[\"nose\"],\n test_suite=\"nose.collector\"\n)\n" }, { "alpha_fraction": 0.5919345617294312, "alphanum_fraction": 0.5925942063331604, "avg_line_length": 37.4754638671875, "blob_id": "068c3d6d9f21bb37baf8bccad7a7aaca783fa1f4", "content_id": "35e294664845df9ccfc24552987d774866c2bcc2", "detected_licenses": [ "Apache-2.0" ], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 22739, "license_type": "permissive", "max_line_length": 80, "num_lines": 591, "path": "/test/proboscis/case.py", "repo_name": "gauravinstasafe/com.instasafev2", "src_encoding": "UTF-8", "text": "# Copyright (c) 2011 Rackspace\n# All Rights Reserved.\n#\n# Licensed under the Apache License, Version 2.0 (the \"License\"); you may\n# not use this file except in compliance with the License. You may obtain\n# a copy of the License at\n#\n# http://www.apache.org/licenses/LICENSE-2.0\n#\n# Unless required by applicable law or agreed to in writing, software\n# distributed under the License is distributed on an \"AS IS\" BASIS, WITHOUT\n# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the\n# License for the specific language governing permissions and limitations\n# under the License.\n\n\"\"\"Creates TestCases from a list of TestEntries.\n\nThis module mainly exists to translate Proboscis classes and concepts into the\nunittest equivalents.\n\n\"\"\"\n\nimport os\nimport pydoc\nimport types\nimport unittest\nimport sys\n\nfrom collections import deque\nfrom functools import wraps\n\nfrom proboscis import compatability\nfrom proboscis import dependencies\nfrom proboscis import SkipTest\nfrom proboscis.sorting import TestGraph\nfrom proboscis.core import TestMethodClassEntry\nfrom proboscis.decorators import DEFAULT_REGISTRY\n\n# This is here so Proboscis own test harness can change it while still calling\n# TestProgram normally. Its how the examples are tested.\nOVERRIDE_DEFAULT_STREAM = None\n\n\nclass TestPlan(object):\n \"\"\"Grabs information from the TestRegistry and creates a test plan.\"\"\"\n\n def __init__(self, groups, test_entries, factories):\n test_cases = self.create_cases(test_entries, factories)\n graph = TestGraph(groups, test_entries, test_cases)\n self.tests = graph.sort()\n\n @staticmethod\n def create_from_registry(registry):\n \"\"\"Returns a sorted TestPlan from a TestRegistry instance.\"\"\"\n return TestPlan(registry.groups, registry.tests, registry.factories)\n\n @staticmethod\n def create_cases_from_instance(factory, instance):\n if isinstance(instance, type):\n raise RuntimeError(\"Factory %s returned type %s (rather than an \"\n \"instance), which is not allowed.\" % (factory, instance))\n if isinstance(instance, types.MethodType):\n home = compatability.get_method_function(instance)\n elif isinstance(instance, types.FunctionType):\n home = instance\n else:\n home = type(instance)\n if issubclass(home, unittest.TestCase):\n raise RuntimeError(\"Factory %s returned a unittest.TestCase \"\n \"instance %s, which is not legal.\")\n try:\n entry = home._proboscis_entry_\n except AttributeError:\n raise RuntimeError(\"Factory method %s returned an instance %s \"\n \"which was not tagged as a Proboscis TestEntry.\" %\n (factory, instance))\n entry.mark_as_used_by_factory() # Don't iterate this since a\n # function is creating it.\n if entry.is_child:\n raise RuntimeError(\"Function %s, which exists as a bound method \"\n \"in a decorated class may not be returned from a factory.\" %\n instance)\n # There is potentially an issue in that a different Registry might\n # register an entry, and we could then read that in with a factory.\n # Later the entry would not be found in the dictionary of entries.\n if isinstance(instance, types.MethodType):\n try:\n state = TestMethodState(instance.im_self)\n except AttributeError:\n raise RuntimeError(\"Only bound methods may be returned from \"\n \"factories. %s is not bound.\" % instance)\n else:\n state = TestMethodState(entry, instance)\n return TestPlan._create_test_cases_for_entry(entry, state)\n\n @staticmethod\n def create_cases(test_entries, factories):\n tests = []\n entries = {}\n for factory in factories:\n list = factory()\n for item in list:\n cases = TestPlan.create_cases_from_instance(factory, item)\n tests += cases\n for entry in test_entries:\n if not entry.is_child and not entry.used_by_factory:\n test_cases = TestPlan._create_test_cases_for_entry(entry)\n entries[entry] = test_cases\n tests += test_cases\n return tests\n\n @staticmethod\n def _create_test_cases_for_entry(entry, state=None):\n \"\"\"Processes a test case entry.\"\"\"\n if not hasattr(entry, 'children'): # function or unittest.TestCase\n return [TestCase(entry)]\n state = state or TestMethodState(entry)\n cases = []\n for child_entry in entry.children:\n case = TestCase(child_entry, state=state)\n cases.append(case)\n return cases\n\n def create_test_suite(self, config, loader):\n \"\"\"Transforms the plan into a Nose test suite.\"\"\"\n creator = TestSuiteCreator(loader)\n if dependencies.use_nose:\n from nose.suite import ContextSuiteFactory\n suite = ContextSuiteFactory(config)([])\n else:\n suite = unittest.TestSuite()\n for case in self.tests:\n if case.entry.info.enabled and case.entry.home is not None:\n tests = creator.loadTestsFromTestEntry(case)\n for test in tests:\n suite.addTest(test)\n return suite\n\n def filter(self, group_names=None, classes=None, functions=None):\n \"\"\"Whittles down test list to those matching criteria.\"\"\"\n test_homes = []\n classes = classes or []\n functions = functions or []\n for cls in classes:\n test_homes.append(cls)\n for function in functions:\n test_homes.append(function)\n group_names = group_names or []\n filtered_list = []\n while self.tests:\n case = self.tests.pop()\n if case.entry.contains(group_names, test_homes):\n filtered_list.append(case)\n # Add any groups this depends on so they will run as well.\n for group_name in case.entry.info.depends_on_groups:\n if not group_name in group_names:\n group_names.append(group_name)\n for test_home in case.entry.info.depends_on:\n if not test_home in test_homes:\n test_homes.append(test_home)\n self.tests = list(reversed(filtered_list))\n\n\nclass TestCase(object):\n \"\"\"Represents an instance of a TestEntry.\n\n This class is also used to store status information, such as the dependent\n TestEntry objects (discovered when this test is sorted) and any failure\n in the dependencies of this test (used to raise SkipTest if needed).\n\n There may be multiple TestCase instances for each TestEntry instance.\n\n \"\"\"\n def __init__(self, entry, state=None):\n self.entry = entry\n self.dependents = [] # This is populated when we sort the tests.\n self.dependency_failure = None\n self.state = state\n\n def check_dependencies(self, test_self):\n \"\"\"If a dependency has failed, SkipTest is raised.\"\"\"\n if self.dependency_failure is not None and \\\n self.dependency_failure != self and not self.entry.info.always_run:\n home = self.dependency_failure.entry.home\n dependencies.skip_test(test_self, \"Failure in %s\" % home)\n\n def fail_test(self, dependency_failure=None):\n \"\"\"Called when this entry fails to notify dependents.\"\"\"\n if not dependency_failure:\n dependency_failure = self\n if not self.dependency_failure: # Do NOT overwrite the first cause\n self.dependency_failure = dependency_failure\n for dependent in self.dependents:\n if dependent.critical:\n dependent.case.fail_test(\n dependency_failure=dependency_failure)\n\n def write_doc(self, file):\n file.write(str(self.entry.home) + \"\\n\")\n doc = pydoc.getdoc(self.entry.home)\n if doc:\n file.write(doc + \"\\n\")\n for field in str(self.entry.info).split(', '):\n file.write(\"\\t\" + field + \"\\n\")\n\n def __repr__(self):\n return \"TestCase(\" + repr(self.entry.home) + \", \" + \\\n repr(self.entry.info) + \", \" + object.__repr__(self) + \")\"\n\n def __str__(self):\n return \"Home = \" + str(self.entry.home) + \", Info(\" + \\\n str(self.entry.info) + \")\"\n\n\nclass TestResultListener():\n \"\"\"Implements methods of TestResult to be informed of test failures.\"\"\"\n\n def __init__(self, chain_to_cls):\n self.chain_to_cls = chain_to_cls\n\n def addError(self, test, err):\n self.onError(test)\n self.chain_to_cls.addError(self, test, err)\n\n def addFailure(self, test, err):\n self.onError(test)\n self.chain_to_cls.addFailure(self, test, err)\n\n def addSkip(self, test, err):\n self.onError(test)\n self.chain_to_cls.addSkip(self, test, err)\n\n def onError(self, test):\n \"\"\"Notify a test entry and its dependents of failure.\"\"\"\n if dependencies.use_nose:\n root = test.test\n else:\n root = test\n if hasattr(root, \"__proboscis_case__\"):\n case = root.__proboscis_case__\n case.fail_test()\n\n\n\nclass TestResult(TestResultListener, dependencies.TextTestResult):\n \"\"\"Adds Proboscis skip on dependency failure functionality.\n\n Extends either Nose or unittest's TextTestResult class.\n If a program needs to use its own TestResult class it must inherit from\n this class and call \"onError\" at the start of both the addError and\n addFailure functions, passing the \"test\" parameter, to keep\n Proboscis's skip on depdendency failure functionality.\n\n \"\"\"\n\n # I had issues extending TextTestResult directly so resorted to this.\n\n def __init__(self, *args, **kwargs):\n TestResultListener.__init__(self, dependencies.TextTestResult)\n dependencies.TextTestResult.__init__(self, *args, **kwargs)\n\n\ndef test_runner_cls(wrapped_cls, cls_name):\n \"\"\"Creates a test runner class which uses Proboscis TestResult.\"\"\"\n new_dict = wrapped_cls.__dict__.copy()\n\n if dependencies.use_nose:\n def cb_make_result(self):\n return TestResult(self.stream, self.descriptions, self.verbosity,\n self.config)\n else:\n def cb_make_result(self):\n return TestResult(self.stream, self.descriptions, self.verbosity)\n new_dict[\"_makeResult\"] = cb_make_result\n return type(cls_name, (wrapped_cls,), new_dict)\n\n\ndef skippable_func(test_case, func):\n \"\"\"Gives free functions a Nose independent way of skipping a test.\n\n The unittest module TestCase class has a skipTest method, but to run it you\n need access to the TestCase class. This wraps the runTest method of the\n underlying unittest.TestCase subclass to invoke the skipTest method if it\n catches the SkipTest exception.\n\n \"\"\"\n s_func = None\n if dependencies.use_nose:\n s_func = func\n else:\n @wraps(func)\n def skip_capture_func():\n st = compatability.capture_exception(func, SkipTest)\n if st is not None:\n dependencies.skip_test(test_case, st.message)\n s_func = skip_capture_func\n\n @wraps(s_func)\n def testng_method_mistake_capture_func():\n compatability.capture_type_error(s_func)\n\n return testng_method_mistake_capture_func\n\n\nclass FunctionTest(unittest.FunctionTestCase):\n \"\"\"Wraps a single function as a test runnable by unittest / nose.\"\"\"\n\n def __init__(self, test_case):\n func = test_case.entry.home\n _old_setup = None\n if hasattr(func, 'setup'): # Don't destroy nose-style setup\n _old_setup = func.setup\n def cb_check(cb_self=None):\n test_case.check_dependencies(self)\n if _old_setup is not None:\n _old_setup()\n self.__proboscis_case__ = test_case\n sfunc = skippable_func(self, func)\n unittest.FunctionTestCase.__init__(self, testFunc=sfunc, setUp=cb_check)\n\n\nclass TestMethodState(object):\n \"\"\"Manages a test class instance used by one or more test methods.\"\"\"\n\n def __init__(self, entry, instance=None):\n self.entry = entry\n # This would be a simple \"isinstance\" but due to the reloading mania\n # needed for Proboscis's documentation tests it has to be a bit\n # weirder.\n if not str(type(self.entry)) == str(TestMethodClassEntry):\n raise RuntimeError(\"%s is not a TestMethodClassEntry but is a %s.\"\n % (self.entry, type(self.entry)))\n self.instance = instance\n\n def get_state(self):\n if not self.instance:\n self.instance = self.entry.home()\n return self.instance\n\n\nclass MethodTest(unittest.FunctionTestCase):\n \"\"\"Wraps a method as a test runnable by unittest.\"\"\"\n\n def __init__(self, test_case):\n assert test_case.state is not None\n #TODO: Figure out how to attach calls to BeforeMethod and BeforeClass,\n # AfterMethod and AfterClass. It should be easy enough to\n # just find them using the TestEntry parent off test_case Entrty.\n def cb_check(cb_self=None):\n test_case.check_dependencies(self)\n @wraps(test_case.entry.home)\n def func(self=None): # Called by FunctionTestCase\n func = test_case.entry.home\n func(test_case.state.get_state())\n self.__proboscis_case__ = test_case\n sfunc = skippable_func(self, func)\n unittest.FunctionTestCase.__init__(self, testFunc=sfunc, setUp=cb_check)\n\n\ndef decorate_class(setUp_method=None, tearDown_method=None):\n \"\"\"Inserts method calls in the setUp / tearDown methods of a class.\"\"\"\n def return_method(cls):\n \"\"\"Returns decorated class.\"\"\"\n new_dict = cls.__dict__.copy()\n if setUp_method:\n if hasattr(cls, \"setUp\"):\n @wraps(setUp_method)\n def _setUp(self):\n setUp_method(self)\n cls.setUp(self)\n else:\n @wraps(setUp_method)\n def _setUp(self):\n setUp_method(self)\n new_dict[\"setUp\"] = _setUp\n if tearDown_method:\n if hasattr(cls, \"tearDown\"):\n @wraps(tearDown_method)\n def _tearDown(self):\n tearDown_method(self)\n cls.setUp(self)\n else:\n @wraps(tearDown_method)\n def _tearDown(self):\n tearDown_method(self)\n new_dict[\"tearDown\"] = _tearDown\n return type(cls.__name__, (cls,), new_dict)\n return return_method\n\n\nclass TestSuiteCreator(object):\n \"\"\"Turns Proboscis test cases into elements to be run by unittest.\"\"\"\n\n def __init__(self, loader):\n self.loader = loader\n\n def loadTestsFromTestEntry(self, test_case):\n \"\"\"Wraps a test class in magic so it will skip on dependency failures.\n\n Decorates the testEntry class's setUp method to raise SkipTest if\n tests this test was dependent on failed or had errors.\n\n \"\"\"\n home = test_case.entry.home\n if home is None:\n return []\n if isinstance(home, type):\n return self.wrap_unittest_test_case_class(test_case)\n if isinstance(home, types.FunctionType):\n if home._proboscis_entry_.is_child:\n return self.wrap_method(test_case)\n else:\n return self.wrap_function(test_case)\n raise RuntimeError(\"Unknown test type:\" + str(type(home)))\n\n def wrap_function(self, test_case):\n return [FunctionTest(test_case)]\n\n def wrap_method(self, test_case):\n return [MethodTest(test_case)]\n\n def wrap_unittest_test_case_class(self, test_case):\n original_cls = test_case.entry.home\n def cb_check(cb_self):\n test_case.check_dependencies(cb_self)\n testCaseClass = decorate_class(setUp_method=cb_check)(original_cls)\n testCaseNames = self.loader.getTestCaseNames(testCaseClass)\n if not testCaseNames and hasattr(testCaseClass, 'runTest'):\n testCaseNames = ['runTest']\n suite = []\n if issubclass(original_cls, unittest.TestCase):\n for name in testCaseNames:\n test_instance = testCaseClass(name)\n setattr(test_instance, \"__proboscis_case__\", test_case)\n suite.append(test_instance)\n return suite\n\n\nclass TestProgram(dependencies.TestProgram):\n \"\"\"Use this to run Proboscis.\n\n Translates the Proboscis test registry into types used by Nose or unittest\n in order to run the program.\n\n Most arguments to this are simply passed to Nose or unittest's TestProgram\n class.\n\n For most cases using the default arguments works fine.\n\n :param registry: The test registry to use. If unset uses the default global\n registry.\n :param groups: A list of strings representing the groups of tests to run.\n The list is added to by parsing the argv argument. If unset\n then all groups are run.\n :param testLoader: The test loader. By default, its unittest.TestLoader.\n :param config: The config passed to Nose or unittest.TestProgram. The\n config determines things such as plugins or output streams,\n so it may be necessary to create this for advanced use\n cases.\n :param plugins: Nose plugins. Similar to config it may be necessary to\n set this in an advanced setup.\n :param env: By default is os.environ. This is used only by Nose.\n :param testRunner: By default Proboscis uses its own. If this is set\n however care must be taken to avoid breaking Proboscis's\n automatic skipping of tests on dependency failures.\n In particular, _makeResult must return a subclass of\n proboscis.TestResult which calls\n proboscis.TestResult.onError at the start of the\n addFailure and addError methods.\n :param stream: By default this is standard out.\n :param argv: By default this is sys.argv. Proboscis parses this for the\n --group argument.\n \"\"\"\n def __init__(self,\n registry=DEFAULT_REGISTRY,\n groups=None,\n testLoader=None,\n config=None,\n plugins=None,\n env=None,\n testRunner=None,\n stream=None,\n argv=None,\n *args, **kwargs):\n groups = groups or []\n argv = argv or sys.argv\n argv = self.extract_groups_from_argv(argv, groups)\n if \"suite\" in kwargs:\n raise ValueError(\"'suite' is not a valid argument, as Proboscis \" \\\n \"creates the suite.\")\n\n self.__loader = testLoader or unittest.TestLoader()\n\n if OVERRIDE_DEFAULT_STREAM:\n stream = OVERRIDE_DEFAULT_STREAM\n\n if env is None:\n env = os.environ\n if dependencies.use_nose and config is None:\n config = self.makeConfig(env, plugins)\n if not stream:\n stream = config.stream\n\n stream = stream or sys.stdout\n\n if testRunner is None:\n runner_cls = test_runner_cls(dependencies.TextTestRunner,\n \"ProboscisTestRunner\")\n if dependencies.use_nose:\n testRunner = runner_cls(stream,\n verbosity=3, # config.verbosity,\n config=config)\n else:\n testRunner = runner_cls(stream, verbosity=3)\n\n #registry.sort()\n self.plan = TestPlan.create_from_registry(registry)\n\n if len(groups) > 0:\n self.plan.filter(group_names=groups)\n self.cases = self.plan.tests\n if \"--show-plan\" in argv:\n self.__run = self.show_plan\n else:\n self.__suite = self.create_test_suite_from_entries(config,\n self.cases)\n def run():\n if dependencies.use_nose:\n dependencies.TestProgram.__init__(\n self,\n suite=self.__suite,\n config=config,\n env=env,\n plugins=plugins,\n testLoader=testLoader, # Pass arg, not what we create\n testRunner=testRunner,\n argv=argv,\n *args, **kwargs\n )\n else:\n dependencies.TestProgram.__init__(\n self,\n suite=self.__suite,\n config=config,\n testLoader=testLoader, # Pass arg, not what we create\n testRunner=testRunner,\n argv=argv,\n *args, **kwargs\n )\n self.__run = run\n\n def create_test_suite_from_entries(self, config, cases):\n \"\"\"Creates a suite runnable by unittest.\"\"\"\n return self.plan.create_test_suite(config, self.__loader)\n\n def extract_groups_from_argv(self, argv, groups):\n \"\"\"Given argv, records the \"--group\" options.\n\n :param argv: A list of arguments, such as sys.argv.\n :param groups: A list of strings for each group to run which is added\n to.\n\n Returns a copy of param argv with the --group options removed. This is\n useful if argv needs to be passed to another program such as Nose.\n\n \"\"\"\n new_argv = [argv[0]]\n for arg in argv[1:]:\n if arg[:8] == \"--group=\":\n groups.append(arg[8:])\n else:\n new_argv.append(arg)\n return new_argv\n\n def run_and_exit(self):\n \"\"\"Calls unittest or Nose to run all tests.\n\n unittest will call sys.exit on completion.\n\n \"\"\"\n self.__run()\n\n def show_plan(self):\n \"\"\"Prints information on test entries and the order they will run.\"\"\"\n print(\" * * * Test Plan * * *\")\n for case in self.cases:\n case.write_doc(sys.stdout)\n\n @property\n def test_suite(self):\n return self.__suite\n" }, { "alpha_fraction": 0.7897648811340332, "alphanum_fraction": 0.795297384262085, "avg_line_length": 50.64285659790039, "blob_id": "798e594f9cf3c30d622720af0d135619bf92c8da", "content_id": "7c6c1ced84baec7e2dad6247139185eaf74de9b3", "detected_licenses": [ "Apache-2.0" ], "is_generated": false, "is_vendor": false, "language": "reStructuredText", "length_bytes": 1446, "license_type": "permissive", "max_line_length": 79, "num_lines": 28, "path": "/test/docs/source/how.rst", "repo_name": "gauravinstasafe/com.instasafev2", "src_encoding": "UTF-8", "text": "How it works\n------------\n\nProboscis uses decorators similar to TestNG's annotations instead of naming\nconventions to mark classes and\nfunctions as tests. If a class is decorated with proboscis.test, a single\ninstance of the class is created and used to run all decorated methods, similar\nto TestNG. However, if the decorated class extends unittest.TestCase it is run\nusing the traditional rules.\n\nLike TestNG, Proboscis allows tests to be added to groups so they can be\norganized and run independently of the code layout (similar to tags in Nose).\nIt also\nlets tests cleanly and explicitly declare dependencies on other tests, opening\nthe door for functional and integration testing (if you are maintaining Python\ntests which use funny names like \"010_start\", \"020_connect\", etc. or find that\neverything breaks when you try to move a module to a different package, you're\nrunning tests like this now and need to be using a tool that supports them).\nOf course, Proboscis works fine for unit testing as well.\n\nProboscis also supports factory methods, which operate similar to those in\nTestNG.\n\nProboscis will use Nose instead of the unittest module if it is available;\notherwise, it uses only the core Python libraries so that it can also run on\nIron Python and Jython. Some Nose plugins work with Proboscis out of the box,\nwhile others may take some prodding or not work at all. Proboscis works only\nin Python 2, but Python 3 support is pending.\n" }, { "alpha_fraction": 0.5102040767669678, "alphanum_fraction": 0.7551020383834839, "avg_line_length": 23.5, "blob_id": "64ff539ae4fdd294e06202fd45d49dd81765dccd", "content_id": "d4b492f0bb970cb030bb090ae30ccf64e9363491", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "INI", "length_bytes": 49, "license_type": "no_license", "max_line_length": 29, "num_lines": 2, "path": "/.metadata/.plugins/org.eclipse.pde.core/.cache/clean-cache.properties", "repo_name": "gauravinstasafe/com.instasafev2", "src_encoding": "UTF-8", "text": "#Cached timestamps\n#Sun Feb 25 21:35:33 IST 2018\n" }, { "alpha_fraction": 0.6783515214920044, "alphanum_fraction": 0.6792989373207092, "avg_line_length": 30.736841201782227, "blob_id": "1366cbe8af6fb31062b82d22e318797da4a4792e", "content_id": "ac54e615edd812e6876cdb39d9b5fa7bc1762d3f", "detected_licenses": [ "Apache-2.0" ], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 4222, "license_type": "permissive", "max_line_length": 79, "num_lines": 133, "path": "/test/examples/example2/tests/service_tests.py", "repo_name": "gauravinstasafe/com.instasafev2", "src_encoding": "UTF-8", "text": "\"\"\"\nUser service tests.\n\nThis is a test for a fictitious user web service which has rich client bindings\nwritten in Python.\n\nIt assumes we have an existing test database which we can run the web service\nagainst, using the function \"mymodule.start_web_server().\"\n\nAfter spinning up the service, the test creates a new user and tests various\nCRUD actions. Since its a test database, it is OK\nto leave old users in the system but we try to always delete them if possible\nat the end of the test.\n\n\"\"\"\n\nfrom datetime import datetime\nimport random\nimport types\nimport unittest\nimport mymodule\nfrom proboscis.asserts import assert_equal\nfrom proboscis.asserts import assert_false\nfrom proboscis.asserts import assert_raises\nfrom proboscis.asserts import assert_true\nfrom proboscis import SkipTest\nfrom proboscis import test\n\ndb_config = {\n \"url\": \"test.db.mycompany.com\",\n \"user\": \"service_admin\",\n \"password\": \"pass\"\n}\n\n\ntest_user = None\n\n\ndef generate_new_user_config():\n \"\"\"Constructs the dictionary needed to make a new user.\"\"\"\n new_user_config = {\n \"username\": \"TEST_%s_%s\" % (datetime.now(), random.randint(0, 256)),\n \"password\": \"password\",\n \"type\":\"normal\"\n }\n return new_user_config\n\n\n\n@test\ndef initialize_database():\n \"\"\"Creates a local database.\"\"\"\n mymodule.create_database()\n assert_true(mymodule.tables_exist())\n\n@test(depends_on=[initialize_database])\ndef initialize_web_server():\n \"\"\"Starts up the web service.\"\"\"\n mymodule.start_web_server()\n admin = mymodule.get_admin_client()\n assert_true(admin.service_is_up)\n\n\n@test(groups=[\"user\", \"service.tests\"],\n depends_on=[initialize_web_server])\ndef create_user():\n random.seed()\n global test_user\n test_user = None\n new_user_config = generate_new_user_config()\n admin = mymodule.get_admin_client()\n test_user = admin.create_user(new_user_config)\n assert_equal(test_user.username, new_user_config[\"username\"])\n assert_true(test_user.id is not None)\n assert_true(isinstance(test_user.id, int))\n\n\n@test(groups=[\"user\", \"user.tests\", \"service.tests\"],\n depends_on=[create_user])\ndef user_cant_connect_with_wrong_password():\n assert_raises(mymodule.UserNotFoundException, mymodule.login,\n {'username':test_user.username, 'password':'fdgggdsds'})\n\n\n@test(groups=[\"user\", \"user.tests\", \"service.tests\"],\n depends_on=[create_user])\nclass WhenConnectingAsANormalUser(unittest.TestCase):\n\n def setUp(self):\n self.client = mymodule.login({\n 'username':test_user.username, 'password':'password'})\n\n def test_auth_create(self):\n \"\"\"Make sure the given client cannot perform admin actions..\"\"\"\n self.assertRaises(mymodule.AuthException, self.client.create_user,\n generate_new_user_config())\n\n def test_auth_delete(self):\n \"\"\"Make sure the given client cannot perform admin actions..\"\"\"\n self.assertRaises(mymodule.AuthException, self.client.delete_user,\n test_user.id)\n\n def test_change_profile_image(self):\n \"\"\"Test changing a client's profile image.\"\"\"\n self.assertEquals(\"default.jpg\", self.client.get_profile_image())\n self.client.set_profile_image(\"spam.jpg\")\n self.assertEquals(\"spam.jpg\", self.client.get_profile_image())\n\n\n@test(groups=[\"user\", \"service.tests\"], depends_on_groups=[\"user.tests\"],\n always_run=True)\ndef delete_user():\n if test_user is None:\n raise SkipTest(\"User tests were never run.\")\n admin = mymodule.get_admin_client()\n admin.delete_user(test_user.id)\n assert_raises(mymodule.UserNotFoundException, mymodule.login,\n {'username':test_user.username, 'password':'password'})\n\n\n# Add more tests in the service.tests group here, or in any other file.\n# Then when we're finished...\n\n\n@test(groups=[\"service.shutdown\"], depends_on_groups=[\"service.tests\"],\n always_run=True)\ndef shut_down():\n \"\"\"Shut down the web service and destroys the database.\"\"\"\n admin = mymodule.get_admin_client()\n if admin.service_is_up:\n mymodule.stop_web_server()\n assert_false(admin.service_is_up())\n mymodule.destroy_database()\n\n" }, { "alpha_fraction": 0.7627118825912476, "alphanum_fraction": 0.7627118825912476, "avg_line_length": 10.800000190734863, "blob_id": "ebc2b916fa5c916460836cdadf515fcbda292c2c", "content_id": "b2538cdca14e9bddbdafcccf04b12929d6d13a5e", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Java", "length_bytes": 59, "license_type": "no_license", "max_line_length": 27, "num_lines": 5, "path": "/com.safehats/src/test/java/com/safehats/base/constant.java", "repo_name": "gauravinstasafe/com.instasafev2", "src_encoding": "UTF-8", "text": "package com.safehats.base;\n\npublic interface constant {\n\n}\n" }, { "alpha_fraction": 0.6220028400421143, "alphanum_fraction": 0.6431593894958496, "avg_line_length": 32, "blob_id": "3d9e164a1338b2b5027e12905044bb595e317ac1", "content_id": "3bb202473a8d56d40782cada472a64a91fd5c169", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 1418, "license_type": "no_license", "max_line_length": 96, "num_lines": 43, "path": "/com.instasafev2/src/tests/gateway_test.py", "repo_name": "gauravinstasafe/com.instasafev2", "src_encoding": "UTF-8", "text": "'''\nCreated on 28-Jun-2018\n\n@author: gaurav\n'''\n\nimport logging\nimport unittest\nimport pytest\nimport utilities.custom_logger as cl\nfrom pages.companyPortal.gateway_Page import GatewayPages\n\n\nclass Test_GatewayTests(unittest.TestCase):\n log = cl.customLogger(logging.DEBUG)\n \n @pytest.fixture(autouse=True)\n def objectSetup(self, universalSetUp):\n self.gp = GatewayPages(universalSetUp)\n \n @pytest.mark.run(order = 0)\n def test_t1AddGateway(self):\n self.log.info(\"*#\" * 20)\n self.log.info(\"test_t1 Add Gateway start\")\n self.log.info(\"*#\" * 20)\n self.log.info(\"navigate to Gateway page\")\n self.gp.navigate_to_gateway_Add_window() \n self.log.info(\"navigate to Gateway Add page\")\n self.gp.add_Single_Gateway(\"Gateway1\", \"Location1\", \"BackUpGateway\", \"20\", \"10.14.70.2\")\n self.log.info(\"controller add process complete now verify newly added controller\")\n result = self.gp.verify_gateway_entry(\"Gateway1\")\n assert result == True\n \n \n @pytest.mark.run( after = \"test_t1AddGateway\")\n def test_delete_gateway(self):\n self.log.info(\"*#\" * 20)\n self.log.info(\"test_delete_controller\")\n self.log.info(\"*#\" * 20) \n self.log.info(\"navigate to controller page\")\n self.gp.navigategatewayPage()\n result = self.gp.delete_Sigle_Gateway(\"test\") \n assert result == True" }, { "alpha_fraction": 0.6283028721809387, "alphanum_fraction": 0.6388128399848938, "avg_line_length": 33.29187774658203, "blob_id": "1d5203bab39a6e309b3de31e8bd23dd97cf388b8", "content_id": "78c2c5ff8513496d6b46da2080b40937efb7a7d2", "detected_licenses": [ "Apache-2.0" ], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 13511, "license_type": "permissive", "max_line_length": 79, "num_lines": 394, "path": "/test/tests/unit/test_sorting.py", "repo_name": "gauravinstasafe/com.instasafev2", "src_encoding": "UTF-8", "text": "# Copyright (c) 2011 Rackspace\n# All Rights Reserved.\n#\n# Licensed under the Apache License, Version 2.0 (the \"License\"); you may\n# not use this file except in compliance with the License. You may obtain\n# a copy of the License at\n#\n# http://www.apache.org/licenses/LICENSE-2.0\n#\n# Unless required by applicable law or agreed to in writing, software\n# distributed under the License is distributed on an \"AS IS\" BASIS, WITHOUT\n# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the\n# License for the specific language governing permissions and limitations\n# under the License.\n\n\"\"\"Tests the internal logic of the proboscis module.\"\"\"\n\nimport imp\nimport sys\nimport time\nimport unittest\n\n\nfrom proboscis.asserts import assert_raises\nfrom proboscis.asserts import assert_true\nfrom proboscis.asserts import assert_false\nfrom proboscis import compatability\nfrom proboscis import decorators\nfrom proboscis.decorators import expect_exception\nfrom proboscis.decorators import time_out\nfrom proboscis.decorators import TimeoutError\nfrom proboscis import ProboscisTestMethodClassNotDecorated\n\n# We can't import Proboscis classes here or Nose will try to run them as tests.\n\ndef assert_sort_order_is_correct(result):\n \"\"\"Tests the result of a sort.\"\"\"\n # TODO(tim.simpson): Clean this up, its kind of confusing.\n for i in range(len(result)):\n case = result[i]\n for d in case.entry.info.depends_on:\n for j in range(i, len(result)):\n if d is result[j].entry.home:\n return \"Invalid sort: \" + str(case) + \" appears \" + \\\n \" before \" + str(result[j]) + \" but depends on it.\"\n for d in case.entry.info.depends_on_groups:\n for j in range(i, len(result)):\n for g in result[j].groups:\n if d == g:\n return \"Invalid sort: \" + str(case) + \\\n \" depends on group \" + d + \", but \" \\\n \"appears before \" + str(result[j]) + \\\n \" which is itself in group \" + g + \".\"\n\n\n# Fake classes we use as Nodes\nclass N2(unittest.TestCase):\n pass\nclass N3(unittest.TestCase):\n pass\ndef N5():\n pass\ndef N7():\n pass\nclass N8(unittest.TestCase):\n pass\nclass N9(unittest.TestCase):\n pass\ndef N10():\n pass\nclass N11(unittest.TestCase):\n pass\n\n\ndef remove_entry(home):\n \"\"\"Proboscis fails if a class or function is registry twice.\n This prevents that.\"\"\"\n if hasattr(home, '_proboscis_entry_'):\n delattr(home, '_proboscis_entry_')\n\ndef remove_entries():\n for item in [N2, N3, N5, N7, N8, N9, N10, N11]:\n remove_entry(item)\n\nclass TestValidation(unittest.TestCase):\n\n def test_should_not_allow_group_to_depend_on_self(self):\n from proboscis import TestRegistry\n registry = TestRegistry()\n try:\n registry.register(N2, groups=[\"tests\"],\n depends_on_groups=[\"tests\"])\n self.fail(\"Expected runtime error.\")\n except RuntimeError:\n pass\n\n def test_should_not_allow_classes_to_depend_on_self(self):\n from proboscis import TestRegistry\n registry = TestRegistry()\n try:\n registry.register(N2, depends_on_classes=[N2])\n self.fail(\"Expected runtime error.\")\n except RuntimeError:\n pass\n\n\nclass TestTopologicalSort(unittest.TestCase):\n\n def setUp(self):\n remove_entries()\n\n def test_simple_sort(self):\n from proboscis.case import TestPlan\n from proboscis.sorting import TestGraph\n from proboscis import TestRegistry\n registry = TestRegistry()\n registry.register(N2, groups=[\"blah\"], depends_on_classes=[N11])\n registry.register(N3, depends_on_classes=[N11, N2])\n registry.register(N7, depends_on_groups=[\"blah\"])\n registry.register(N11)\n cases = TestPlan.create_cases(registry.tests, [])\n graph = TestGraph(registry.groups, registry.tests, cases)\n sorted_entries = graph.sort()\n result = list(case.entry.home for case in sorted_entries)\n expected = [N11, N2, N3, N7]\n self.assertEqual(4, len(result))\n self.assertEqual(N11, result[0])\n self.assertEqual(N2, result[1])\n self.assertTrue((result[2] == N3 and result[3] == N7) or \\\n (result[2] == N7 or result[3] == N2))\n\n def test_complex_sort(self):\n from proboscis.case import TestPlan\n from proboscis.sorting import TestGraph\n from proboscis import TestRegistry\n\n registry = TestRegistry()\n registry.register(N2, depends_on_classes=[N11])\n registry.register(N3)\n registry.register(N5)\n registry.register(N7)\n registry.register(N8, depends_on_classes=[N3])\n registry.register(N9, depends_on_classes=[N8, N11])\n registry.register(N10, depends_on_classes=[N3, N11])\n registry.register(N11, depends_on_classes=[N5, N7])\n cases = TestPlan.create_cases(registry.tests, [])\n graph = TestGraph(registry.groups, registry.tests, cases)\n result = graph.sort()\n self.assertEqual(8, len(result))\n msg = assert_sort_order_is_correct(result)\n self.assertEqual(None, msg)\n\n\n def test_do_not_allow_sneaky_cycle(self):\n from proboscis.case import TestPlan\n from proboscis.sorting import TestGraph\n from proboscis import TestRegistry\n\n registry = TestRegistry()\n registry.register(N2, depends_on_classes=[N11])\n registry.register(N3)\n registry.register(N5, depends_on_groups=[\"something\"])\n registry.register(N7)\n registry.register(N8, depends_on_classes=[N3])\n registry.register(N9, depends_on_classes=[N8, N11])\n registry.register(N10, depends_on_classes=[N3, N11])\n registry.register(N11, groups=[\"something\"],\n depends_on_classes=[N5, N7])\n cases = TestPlan.create_cases(registry.tests, [])\n graph = TestGraph(registry.groups, registry.tests, cases)\n\n re = compatability.capture_exception(graph.sort, RuntimeError)\n self.assertTrue(re is not None)\n self.assertTrue(isinstance(re, RuntimeError))\n self.assertTrue(str(re).find(\"Cycle found\") >= 0)\n\nclass TestModuleConversionToNodes(unittest.TestCase):\n\n def setUp(self):\n import proboscis\n from tests.unit import proboscis_example\n from proboscis.case import TestPlan\n from proboscis import TestRegistry\n\n old_default_registry = proboscis.decorators.DEFAULT_REGISTRY\n proboscis.decorators.DEFAULT_REGISTRY = TestRegistry()\n compatability.reload(proboscis_example)\n self.registry = proboscis.decorators.DEFAULT_REGISTRY\n proboscis.default_registry = old_default_registry\n self.plan = TestPlan.create_from_registry(self.registry)\n\n def test_should_load_correct_number_of_tests(self):\n self.assertEqual(5, len(self.plan.tests))\n\n def test_startup_must_be_first(self):\n from tests.unit.proboscis_example import StartUp\n self.assertEqual(StartUp, self.plan.tests[0].entry.home)\n\n def test_filter_with_one(self):\n self.plan.filter(group_names=[\"init\"])\n filtered = self.plan.tests\n self.assertEqual(1, len(filtered))\n from tests.unit.proboscis_example import StartUp\n self.assertEqual(StartUp, filtered[0].entry.home)\n\n def test_filter_should_keep_dependencies(self):\n self.plan.filter(group_names=[\"integration\"])\n filtered = self.plan.tests\n # Should include \"integration\" group and also \"init\" group since it\n # is a dependency.\n self.assertEqual(4, len(filtered))\n from tests.unit.proboscis_example import StartUp\n self.assertEqual(StartUp, filtered[0].entry.home)\n # All the other ones must be in the integration group\n for i in range(1, 4):\n self.assertEqual(\"integration\", filtered[i].entry.info.groups[0])\n\n def test_filter_with_classes(self):\n from tests.unit.proboscis_example import RandomTestOne\n self.plan.filter(classes=[RandomTestOne])\n filtered = self.plan.tests\n # Should include RandomTestOne, which depends on RandomTestZero,\n # which depends on init\n self.assertEqual(3, len(filtered))\n from tests.unit.proboscis_example import StartUp\n self.assertEqual(StartUp, filtered[0].entry.home)\n from tests.unit.proboscis_example import RandomTestZero\n self.assertEqual(RandomTestZero, filtered[1].entry.home)\n self.assertEqual(RandomTestOne, filtered[2].entry.home)\n\n\nif compatability.supports_time_out():\n\n @time_out(2)\n def lackadaisical_multiply(a, b):\n sum = 0\n for i in range(0, b):\n time.sleep(1)\n sum = sum + a\n return sum\n\n\n class TestTimeoutDecorator(unittest.TestCase):\n\n def test_should_not_time_out_before_time_exceeded(self):\n self.assertEqual(0, lackadaisical_multiply(4, 0))\n self.assertEqual(8, lackadaisical_multiply(8, 1))\n\n def test_should_timeout_if_time_exceeded(self):\n try:\n self.assertEqual(8 * 8, lackadaisical_multiply(8, 8))\n self.fail(\"time_out decorator did not work.\")\n except TimeoutError:\n pass\n\nclass MockCase(object):\n\n @expect_exception(TimeoutError)\n def broadly_exceptional_function(self):\n raise Exception()\n\n @expect_exception(TimeoutError)\n def exceptional_function(self):\n raise TimeoutError()\n\n @expect_exception(TimeoutError)\n def unexceptional_function(self):\n pass\n\n @expect_exception(Exception)\n def broadly_decorated_function(self):\n raise TimeoutError()\n\n\nclass TestExpectExceptionDecorator(unittest.TestCase):\n\n def test_should_fail_if_no_exception_occurs(self):\n case = MockCase()\n self.assertRaises(AssertionError, case.unexceptional_function)\n\n def test_should_fail_if_incorrect_exception_occurs(self):\n case = MockCase()\n # The original exception is raised unfiltered\n self.assertRaises(Exception, case.broadly_exceptional_function)\n\n def test_should_not_fail_if_exception_occurs(self):\n case = MockCase()\n case.exceptional_function()\n\n def test_should_fail_if_incorrect_exception_occurs(self):\n case = MockCase()\n case.broadly_decorated_function()\n\n\nclass TestAssertRaises(unittest.TestCase):\n\n def test_should_fail_if_no_exception_occurs(self):\n def throw_time_out():\n pass\n self.assertRaises(AssertionError, assert_raises, TimeoutError,\n throw_time_out)\n\n def test_should_fail_if_incorrect_exception_occurs(self):\n def throw_time_out():\n raise Exception()\n self.assertRaises(AssertionError, assert_raises, TimeoutError,\n throw_time_out)\n\n def test_should_not_fail_if_exception_occurs(self):\n def throw_time_out():\n raise TimeoutError()\n assert_raises(TimeoutError, throw_time_out)\n\n def test_should_fail_if_incorrect_exception_occurs(self):\n \"\"\"The subclass is not good enough for assert_raises.\"\"\"\n def throw_time_out():\n raise TimeoutError()\n self.assertRaises(AssertionError, assert_raises, Exception,\n throw_time_out)\n\n\nclass ProboscisRegistryTest(unittest.TestCase):\n\n def setUp(self):\n import proboscis\n from proboscis import TestRegistry\n self.old_default_registry = proboscis.decorators.DEFAULT_REGISTRY\n self.registry = TestRegistry()\n proboscis.decorators.DEFAULT_REGISTRY = self.registry\n\n def tearDown(self):\n import proboscis\n proboscis.decorators.DEFAULT_REGISTRY = self.old_default_registry\n\n\nclass TestMethodMarker(ProboscisRegistryTest):\n\n def test_should_mark_methods(self):\n import proboscis\n from proboscis import test\n\n class Example(object):\n\n def __init__(self):\n self.a = 5\n\n @test\n def something(self):\n \"\"\"This tests something.\"\"\"\n self.a = 55\n\n self.assertTrue(hasattr(Example.something, '_proboscis_entry_'))\n if sys.version_info < (3,0):\n self.assertTrue(hasattr(Example.something.im_func,\n '_proboscis_entry_'))\n\n\nclass TestClassLevelDecorators(unittest.TestCase):\n\n def setUp(self):\n pass\n\n def test_should_raise_error_if_missing_class_decorator(self):\n from proboscis import test\n\n class ExampleTest(object):\n @test\n def test_1(self):\n pass\n\n assert_raises(ProboscisTestMethodClassNotDecorated,\n compatability.capture_type_error, ExampleTest.test_1)\n\n\n def test_compatability_wrapper_should_not_hide_error(self):\n\n def test_1():\n raise RuntimeError()\n\n assert_raises(RuntimeError,\n compatability.capture_type_error, test_1)\n\n def test_compatability_wrapper_should_not_hide_TypeError(self):\n\n def test_1():\n raise TypeError()\n\n assert_raises(TypeError,\n compatability.capture_type_error, test_1)\n\n\n\nif __name__ == \"__main__\":\n unittest.TestProgram()\n" }, { "alpha_fraction": 0.680272102355957, "alphanum_fraction": 0.680272102355957, "avg_line_length": 23.5, "blob_id": "b058ce3d50b1f20dcb5582a0efcb6346c3df47c8", "content_id": "97bc2a37c0c386d086b46ae74a806855a0ca6b6f", "detected_licenses": [ "Apache-2.0" ], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 147, "license_type": "permissive", "max_line_length": 33, "num_lines": 6, "path": "/test/tests/compatability/testng/before_and_after/src/python/run_tests.py", "repo_name": "gauravinstasafe/com.instasafev2", "src_encoding": "UTF-8", "text": "import BeforeAndAfter\nfrom proboscis import TestProgram\n\nif __name__ == '__main__':\n # Run Proboscis and exit.\n TestProgram().run_and_exit()\n" }, { "alpha_fraction": 0.6534653306007385, "alphanum_fraction": 0.7128713130950928, "avg_line_length": 9.100000381469727, "blob_id": "6247881bb2666061bce32bad236d6ef20727a74f", "content_id": "b668cc4de15f59497f3626a892abd03d8b2bf5cc", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 101, "license_type": "no_license", "max_line_length": 23, "num_lines": 10, "path": "/com.instasafev2/src/common/myconstants.py", "repo_name": "gauravinstasafe/com.instasafev2", "src_encoding": "UTF-8", "text": "'''\nCreated on 19-Sep-2017\n\n@author: gaurav\n'''\n\n#instasafe admin portal\n\n#instasafe \nglobal BROWSER\n" }, { "alpha_fraction": 0.6178010702133179, "alphanum_fraction": 0.6178010702133179, "avg_line_length": 20.11111068725586, "blob_id": "857668d5c4c56a109f3a7fffafd360834cf12705", "content_id": "56948cc7d4d59e425e62d6de8f31c06d34b54688", "detected_licenses": [ "Apache-2.0" ], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 191, "license_type": "permissive", "max_line_length": 37, "num_lines": 9, "path": "/test/examples/unit/run_tests.py", "repo_name": "gauravinstasafe/com.instasafev2", "src_encoding": "UTF-8", "text": "\ndef run_tests():\n from proboscis import TestProgram\n from tests import unit\n\n # Run Proboscis and exit.\n TestProgram().run_and_exit()\n\nif __name__ == '__main__':\n run_tests()\n" }, { "alpha_fraction": 0.6308420896530151, "alphanum_fraction": 0.6344681978225708, "avg_line_length": 43.50224304199219, "blob_id": "2c9ac55eaaf240237d863cac65d4275a8fde8a73", "content_id": "303402ccda850476228bac68eb3cf120b8906481", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 9928, "license_type": "no_license", "max_line_length": 121, "num_lines": 223, "path": "/com.instasafev2/src/pages/companyPortal/gateway_Page.py", "repo_name": "gauravinstasafe/com.instasafev2", "src_encoding": "UTF-8", "text": "'''\nCreated on 28-Jun-2018\n\n@author: gaurav\n'''\nimport logging\nimport time\nfrom base.basepage import BasePage\nfrom base.selenium_driver import SeleniumDriver\nimport utilities.custom_logger as cl\n\n\n\nclass GatewayPages(BasePage,SeleniumDriver):\n\n log = cl.customLogger(logging.DEBUG)\n\n def __init__(self, driver):\n super().__init__(driver)\n self.driver = driver\n\n #Locator\n _gateways_menu = \".//span[contains(text(),'Gateways')]\" \n _Page_heading_text = \".//a[contains(text(),'Gateway')]\"\n _add_button = \".//button[contains(text(),'Add Gateway')]\"\n _bulk_add_button = \".//button[contains(text(),'Bulk')]\"\n _delete_button = \".//button[contains(text(),'Delete')]\"\n _select_dropdown=\".//select[@name='pageselect']\"\n _prev_button=\".//span[contains(text(),'Prev')]\"\n _next_button = \".//span[contains(text(),'Next')]\"\n _search_box = \".//input[@placeholder='Type keyword and enter to search']\"\n _checkBox_single = \".//a[contains(text(),'gateway')]/parent::*/..//input[@type='checkbox']\"\n _checkBox_selectAll = \".//input[@ng-model='selectedAll']\"\n _add_window_heading = \".//h2[contains(text(),'Add new gateway details to setup')]\"\n _name_field = \".//input[@name='name']\"\n _location_field = \".//input[@name='location']\"\n _backup_field = \".//input[@name='backup_name']\"\n _bandwith_field = \".//input[@name='bandwidth_limit']\"\n #div[x] x change with newly added network address text box. value of x started from 1\n _protected_network_field = \".//div[1][@ng-repeat='net in Gateway.gateway.networks']//input[@name='value']\"\n _add_more_button = \".//button[contains(text(),'Add more')]\"\n _save_button =\".//button[@type='submit']\"\n _reset_button = \".//span[contains(text(),'Reset')]\"\n _add_window_close_button = \".//i[@ng-click='closeThisDialog(0)']\"\n\n _gateway_entry = \".//a[contains(text(),'gateway')]\"\n _close_addwindow = \".//a[@class='close_slide ion-android-close']\"\n\n \n \n def clickAddButton(self):\n self.waitForElement(self._add_button, locatorType=\"Xpath\", pollFrequency=1) \n self.elementClick(self._add_button, locatorType=\"xpath\")\n \n def clickonBulkAddButton(self): \n self.waitForElement(self._bulk_add_button, locatorType=\"Xpath\",timeout=3, pollFrequency=1) \n self.elementClick(self._bulk_add_button, locatorType=\"xpath\")\n \n def verifyAddWindow(self,verificationText):\n heading = self.isElementPresent(verificationText, locatorType=\"Xpath\")\n self.log.info(\"Add window appear status is \" + str(heading))\n \n def enterGatewayName(self, gateway):\n self.sendKeys(gateway, self._name_field, locatorType=\"xpath\", element=\"gateway name\")\n \n def enterLocation(self, gateway):\n self.sendKeys(gateway, self._location_field, locatorType=\"xpath\", element=\"gateway name\")\n \n def enterBackupGatewayName(self, gateway):\n self.sendKeys(gateway, self._backup_field, locatorType=\"xpath\", element=\"gateway name\")\n \n def enterBandwithLimit(self, gateway):\n self.sendKeys(gateway, self._bandwith_field, locatorType=\"xpath\", element=\"gateway name\")\n \n def enterProtectedNetwork(self, gateway):\n self.sendKeys(gateway, self._protected_network_field, locatorType=\"xpath\", element=\"gateway name\")\n \n def clickAddMoreButton(self):\n self.elementClick(locator =self._add_more_button, locatorType=\"Xpath\") \n\n def clickResetButton(self):\n self.elementClick(locator =self._reset_button, locatorType=\"Xpath\")\n \n def clickSaveButton(self):\n self.elementClick(locator =self._save_button, locatorType=\"Xpath\") \n\n def closeAddWindow(self):\n self.elementClick(locator= self._add_window_close_button, locatorType=\"Xpath\", element=\"closeAddWindow\")\n \n def verify_gateway_entry(self, name):\n gateway_entry = self._gateway_entry.replace(\"gateway\", name)\n self.log.info(\"new Xpath of gateway :- \" + gateway_entry)\n self.waitForElement(locator= gateway_entry, locatorType=\"Xpath\", timeout=20, pollFrequency=1)\n result = self.isElementPresent(locator= gateway_entry, locatorType=\"Xpath\", element= \"none\")\n if (result == False):\n self.closeAddWindow()\n \n return result\n \n def clearFields(self):\n Field = self.getElement(locator=self._portNumber_field,locatorType=\"xpath\")\n Field.clear()\n \n def deleteAll(self,locator):\n self.elementClick(self._checkBox_selectAll, locatorType=\"Xpath\", element=\"checkBox\")\n self.elementClick(self._delete_button, locatorType=\"Xpath\", element=\"checkBox\")\n\n def update_xpath(self,old_xpath,text_to_update,name):\n new_xpath = old_xpath.replace(\"text_to_update\", name)\n self.log.info(\"Updated Xpath is as follow \" + new_xpath) \n return new_xpath\n \n def select_check_box(self,new_xpath,name):\n self.waitForElement(locator= new_xpath, locatorType=\"Xpath\", timeout=20, pollFrequency=1)\n #self.isElementPresent(locator= new_xpath, locatorType=\"Xpath\", element= \"name\"):\n self.elementClick(locator=new_xpath, locatorType=\"Xpath\", element=name)\n self.log.info(name+ \"related check box selected successfully\") \n \n def delete_entries(self,new_xpath):\n self.waitForElement(locator= new_xpath, locatorType=\"Xpath\", element= \"xpath of new entry\")\n self.elementClick(locator=self._delete_button, locatorType=\"Xpath\", element=\"delete button\") \n self.log.info(\"successfully clicked on delete button\") \n ##--------------------------------------------------------------------------------------------------------------------##\n \n def close_Whatfix_Windows(self):\n self.log.info(\"waiting for the element\")\n time.sleep(10)\n self.switch_to(locator=self._iframe,locatorType=\"Xpath\")\n self.log.info(\"switching frame now\") \n self.elementClick(locator=self._whatfix, locatorType=\"Xpath\", element=\"whatfix\")\n self.switch_to_default()\n\n def navigategatewayPage(self):\n time.sleep(1)\n '''\n self.waitForElement(self._gateways_gateways_menu, locatorType=\"Xpath\", pollFrequency=1) \n self.elementClick(self._gateways_gateways_menu, locatorType=\"Xpath\", element=\"gateway_&_gateway_button\")\n '''\n self.waitForElement(self._gateways_menu, locatorType=\"Xpath\", pollFrequency=1) \n self.elementClick(self._gateways_menu, locatorType=\"Xpath\", element = \"gateway_button\")\n self.verifyPageTitle(\"Gateways - SafeHats | MyInstaSafe\")\n\n def navigate_to_gateway_Add_window(self): \n self.navigategatewayPage()\n self.clickAddButton()\n time.sleep(1)\n self.verifyAddWindow(self._add_window_heading) \n \n def add_Single_Gateway(self, gatewayName=\"\", location=\"\", backUpGateway=\"\", bandwidth=\"\", protectedNetwork=\"\"): \n time.sleep(1)\n self.log.info(\"clear all data field\")\n #self.clearFields()\n self.log.info(\"Enter gateway name\")\n self.enterGatewayName(gatewayName)\n self.log.info(\"gateway name entered successfully\")\n self.enterLocation(location)\n self.log.info(\"gateway location entered successfully\")\n self.enterBackupGatewayName(backUpGateway)\n self.log.info(\"Backup Gateway Name entered successfully\")\n self.enterBandwithLimit(bandwidth)\n self.log.info(\"Band width entered successfully\")\n self.enterProtectedNetwork(protectedNetwork)\n self.log.info(\"Protected Network Address entered successfully\")\n self.clickSaveButton()\n self.log.info(\"Save button clicked successfully\")\n #self.closeAddWindow()\n #self.log.info(\"refresh gateway page\")\n #self.driver.refresh()\n self.log.info(\"reload page and wait up to 30 second \")\n self.driver.set_page_load_timeout(10)\n self.log.info(\"\")\n \n def addMultiplegateway(self, gatewayName=\"\", location=\"\", backUpGateway=\"\", bandwidth=\"\", protectedNetwork=\"\"):\n self.log.info(\"Enter gateway name\")\n self.enterGatewayName(gatewayName)\n self.log.info(\"gateway name entered successfully\")\n self.enterLocation(location)\n self.log.info(\"gateway location entered successfully\")\n self.enterBackupGatewayName(backUpGateway)\n self.log.info(\"Backup Gateway Name entered successfully\")\n self.enterBandwithLimit(bandwidth)\n self.log.info(\"Band width entered successfully\")\n self.enterProtectedNetwork(protectedNetwork)\n self.log.info(\"Protected Network Address entered successfully\")\n self.clickSaveButton()\n self.log.info(\"Save button clicked successfully\")\n self.log.info(\"reload page and wait up to 30 second \")\n self.driver.set_page_load_timeout(10)\n \n def delete_Sigle_Gateway(self,name):\n # change Xpath at run time\n new_xpath = self.update_xpath(self._gateway_entry, \"gateway\", name)\n # search what to delete\n value = self.verify_gateway_entry(name)\n if value:\n self.select_check_box(new_xpath, name)\n self.delete_entries(new_xpath,name)\n value1 = self.verify_gateway_entry(self,name)\n if value1 :\n self.log.info(\"Fail to delete (\"+name+\") gateway\")\n return False\n else :\n self.log.info(name+ \" gateway successfully deleted\")\n return True\n else :\n self.log.info(\"desire gateway \" + name + \" doesn't found\")\n return False\n # \n \n # select delete button\n # click on delete button\n \n \n \n # def deleteMultipalegateway(self,):\n \n \n # def deleteAll gateway(self):\n \n\n\n\n # def single_Delete(self):\n\n\n\n\n" }, { "alpha_fraction": 0.6480000019073486, "alphanum_fraction": 0.6480000019073486, "avg_line_length": 16.714284896850586, "blob_id": "53dc3384933412c65e53f85f26b7700efc2eeedd", "content_id": "f50ed9e9ae715150bb9f192206c1b10db5adf351", "detected_licenses": [ "Apache-2.0" ], "is_generated": false, "is_vendor": false, "language": "reStructuredText", "length_bytes": 125, "license_type": "permissive", "max_line_length": 87, "num_lines": 7, "path": "/test/docs/source/download.rst", "repo_name": "gauravinstasafe/com.instasafev2", "src_encoding": "UTF-8", "text": "Downloads\n=========\n\nSource\n------\n\nThe source can be found at `GitHub <https://github.com/rackerlabs/python-proboscis/>`_.\n\n" }, { "alpha_fraction": 0.49494948983192444, "alphanum_fraction": 0.7777777910232544, "avg_line_length": 32, "blob_id": "2e043f835841c6307b6641b2bae243a394dab4ab", "content_id": "ba3e0b7f5d922b03e9e753b69e6a753f528ee95e", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "INI", "length_bytes": 99, "license_type": "no_license", "max_line_length": 41, "num_lines": 3, "path": "/.metadata/version.ini", "repo_name": "gauravinstasafe/com.instasafev2", "src_encoding": "UTF-8", "text": "#Sun Feb 25 21:35:41 IST 2018\norg.eclipse.core.runtime=2\norg.eclipse.platform=4.6.3.v20170301-0400\n" }, { "alpha_fraction": 0.6172704696655273, "alphanum_fraction": 0.6208117604255676, "avg_line_length": 36.408164978027344, "blob_id": "b67d3451a119efc5b4125782bfe532f2f7fe5abc", "content_id": "d779f38257da4834e2774abcd959a3af12a89bdf", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 3671, "license_type": "no_license", "max_line_length": 78, "num_lines": 98, "path": "/com.instasafev2/src/base/webdriverfactory.py", "repo_name": "gauravinstasafe/com.instasafev2", "src_encoding": "UTF-8", "text": "'''\nCreated on 11-Sep-2017\n\n@author: user\n'''\nimport os\nfrom symbol import parameters\n\nfrom selenium import webdriver\nfrom selenium.webdriver.common.desired_capabilities import DesiredCapabilities\nfrom selenium.webdriver.common.keys import Keys\nfrom selenium.webdriver.firefox.firefox_binary import FirefoxBinary\nimport utilities.custom_logger as cl\nimport logging \n\n\nclass WebDriverFactory():\n \n log = cl.customLogger(logging.DEBUG)\n \n def __init__(self, browser):\n self.browser = browser\n \n#logging.error(msg)\n def getWebDriverInstance(self,browser = 'firefox'): \n if browser == 'firefox' :\n driver = self.get_firefox()\n elif self.browser == 'chrome' : \n driver = self.get_chrome()\n elif browser == 'EI' :\n driver = self.get_ei()\n elif self.browser == 'opera' :\n driver = self.get_opera()\n else:\n driver = self.get_firefox()\n self.log.info(browser+ \"browser open\") \n driver.delete_all_cookies()\n self.log.info(\"All cookies clear\")\n driver.set_page_load_timeout(3)\n self.log.info(\"wait for 10 seconds\")\n driver.implicitly_wait(10)\n self.log.info(\"maximize browser window\")\n #driver.maximize_window()\n return driver\n \n def get_firefox(self):\n # Locate firFox from the default directory otherwise use FIREFOX_BIN #\n try:\n driver = webdriver.Firefox(\"D:\\\\geckodriver\")\n self.log.info(\"\")\n except Exception:\n my_local_firefox_bin = os.environ.get('FIREFOX_BIN')\n firefox_binary = FirefoxBinary(my_local_firefox_bin)\n driver = webdriver.Firefox(firefox_binary=firefox_binary)\n return driver\n \n def get_chrome(self):\n # Locate chrome from the default directory otherwise use FIREFOX_BIN #\n try:\n driver = webdriver.Chrome(\"D:\\\\software\\\\chromedriver.exe\")\n self.log.info(\"browser ---- Chrome\")\n except Exception:\n my_local_firefox_bin = os.environ.get('FIREFOX_BIN')\n firefox_binary = FirefoxBinary(my_local_firefox_bin)\n driver = webdriver.Firefox(firefox_binary=firefox_binary)\n return driver\n \n def get_ei(self):\n # Locate EI from the default directory otherwise use FIREFOX_BIN #\n try:\n capabilities = DesiredCapabilities.INTERNETEXPLORER\n capabilities.pop(\"platform\", None)\n capabilities.pop(\"version\", None)\n iepath=\"D:\\\\software\\\\IEDriverServer.exe\"\n driver = webdriver.Ie(iepath,capabilities = capabilities ) \n except Exception:\n my_local_firefox_bin = os.environ.get('FIREFOX_BIN')\n firefox_binary = FirefoxBinary(my_local_firefox_bin)\n driver = webdriver.Firefox(firefox_binary=firefox_binary)\n return driver\n def get_opera(self):\n # Locate opera from the default directory otherwise use FIREFOX_BIN #\n try:\n capabilities = DesiredCapabilities.OPERA\n capabilities.pop(\"platform\", None)\n capabilities.pop(\"version\", None)\n iepath=\"D:\\\\software\\\\operadriver_win32\\\\operadriver.exe\"\n driver = webdriver.Ie(iepath,capabilities = capabilities ) \n driver = webdriver.Firefox()\n except Exception:\n my_local_firefox_bin = os.environ.get('FIREFOX_BIN')\n firefox_binary = FirefoxBinary(my_local_firefox_bin)\n driver = webdriver.Firefox(firefox_binary=firefox_binary)\n return driver\n\n\ndef close_drivers(driver):\n driver.close()\n \n" }, { "alpha_fraction": 0.6740087866783142, "alphanum_fraction": 0.6740087866783142, "avg_line_length": 19.636363983154297, "blob_id": "f745ed197c9d0395fe6a86dc6c788642f6deebfc", "content_id": "2d4c73b5b3c3225eb42e7d0701e4acf2b3496d19", "detected_licenses": [ "Apache-2.0" ], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 227, "license_type": "permissive", "max_line_length": 35, "num_lines": 11, "path": "/test/examples/example_factory/run_tests.py", "repo_name": "gauravinstasafe/com.instasafev2", "src_encoding": "UTF-8", "text": "from proboscis import register\nfrom proboscis import TestProgram\n\ndef run_tests():\n from tests import service_tests\n # Run Proboscis and exit.\n TestProgram().run_and_exit()\n\n\nif __name__ == '__main__':\n run_tests()\n" }, { "alpha_fraction": 0.6003989577293396, "alphanum_fraction": 0.6041666865348816, "avg_line_length": 36.599998474121094, "blob_id": "b6f5c13039c9accbdde53d48196f92d4dfd7f2bf", "content_id": "13313922ac901c270eb54c6d86aa0ba2bf0a5dbf", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 4512, "license_type": "no_license", "max_line_length": 154, "num_lines": 120, "path": "/com.instasafev2/src/pages/loginPage.py", "repo_name": "gauravinstasafe/com.instasafev2", "src_encoding": "UTF-8", "text": "import logging\nimport time\n\nfrom base.basepage import BasePage\nfrom base.selenium_driver import SeleniumDriver\nimport utilities.custom_logger as cl\n\n\nclass LoginPage(BasePage,SeleniumDriver):\n\n log = cl.customLogger(logging.DEBUG)\n\n def __init__(self, driver):\n super().__init__(driver)\n self.driver = driver\n \n\n # Locators\n # _login_link = \".//input[@class='form-control ng-pristine ng-valid ng-empty ng-touched']\"\n _username_Field = \".//input[@placeholder='Username']\"\n _password_field = \".//input[@placeholder='Password']\"\n _login_button = \".//button[@type='submit']\"\n \n result_list = []\n\n def clickUserName(self):\n self.elementClick(self._username_Field, locatorType=\"xpath\")\n\n def enterUsername(self, username):\n self.sendKeys(username, self._username_Field, locatorType=\"xpath\")\n \n def clickPassWord(self):\n self.elementClick(self._password_field, locatorType=\"xpath\")\n\n def enterPassword(self, password):\n self.sendKeys(password, self._password_field, locatorType=\"xpath\")\n\n def clickLoginButton(self):\n self.elementClick(self._login_button, locatorType=\"xpath\")\n\n def login(self, username=\"\", password=\"\"):\n self.clearFields()\n self.clickUserName()\n self.log.info(\"click on username field\")\n self.enterUsername(username)\n self.log.info(\"username enter successfully\")\n self.clickPassWord()\n self.log.info(\"click on Password field\")\n self.enterPassword(password)\n self.log.info(\"password enter successfully\")\n self.clickLoginButton()\n self.log.info(\"click on login button successfully\")\n \n \n \n def multiple_login(self, condition=\"\", username=\"\", password=\"\"):\n self.login(username,password)\n result1=True\n if condition=='valid':\n self.log.info(\"test 2\")\n result1 = self.verifyLoginFailed()\n print('valid')\n elif condition == 'invalid' :\n self.log.info(\"test 3\")\n res = self.verifyLoginFailed()\n self.log.info(\"test 3.1\")\n result1 = not(res)\n self.log.info(\"test 3.1\")\n print('invalid')\n else :\n print(\"blank entry in CSV file\") \n self.log.info(\"test 4\")\n self.result_list.append(result1)\n self.log.info(self.result_list[0])\n self.driver.refresh()\n \n def verifyLogin(self):\n self.waitForElement(\"//*[contains(text(), 'Dashboard')]\",\n locatorType=\"xpath\")\n result = self.isElementPresent(locator=\"//*[contains(text(), 'Dashboard')]\",\n locatorType=\"xpath\")\n return result\n \n\n def verifyLoginSuccessful(self):\n self.waitForElement(\"//*[contains(text(), 'Dashboard')]\",\n locatorType=\"xpath\")\n result = self.isElementPresent(locator=\"//*[contains(text(), 'Dashboard')]\",\n locatorType=\"xpath\")\n return result\n\n def verifyLoginFailed(self):\n result = self.isElementPresent(locator=\"//*[contains(text(), 'Dashboard')]\",\n locatorType=\"xpath\")\n return result\n\n def verifyLoginTitle(self):\n return self.verifyPageTitle(\"Dashboard - Mumbai | MyInstaSafe\")\n\n def logout(self):\n \n #self.nav.navigateToUserSettings()\n time.sleep(3)\n logoutDropDownLinkElement = self.waitForElement(locator=\".//a[@class='dropdown-toggle']\",\n locatorType=\"xpath\", pollFrequency=2)\n self.elementClick(element=logoutDropDownLinkElement)\n time.sleep(1)\n logoutLinkElement = self.waitForElement(locator=\".//*[@class='list-inline list-unstyled navitem nav navbar-nav']//*[contains(text(),'Sign out')]\",\n locatorType=\"xpath\", pollFrequency=2)\n\n self.elementClick(element=logoutLinkElement)\n\n def clearFields(self):\n self.waitForElement(locator=self._username_Field,locatorType=\"xpath\", pollFrequency=2)\n usernameField = self.getElement(locator=self._username_Field,locatorType=\"xpath\")\n usernameField.clear()\n self.log.info(\"Username field successfully clear\")\n passwordField = self.getElement(locator=self._password_field,locatorType=\"xpath\")\n self.log.info(\"Password field successfully clear\")\n passwordField.clear()\n" }, { "alpha_fraction": 0.7074949741363525, "alphanum_fraction": 0.7125165462493896, "avg_line_length": 29.44628143310547, "blob_id": "5d6f657159ba80f03235ce02e90a2ddfc36e7c2c", "content_id": "6cd73cb3780a7c0082f8fd44520ea778a74b8539", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "reStructuredText", "length_bytes": 29475, "license_type": "no_license", "max_line_length": 197, "num_lines": 968, "path": "/sample/selenium_with_python.rst", "repo_name": "gauravinstasafe/com.instasafev2", "src_encoding": "UTF-8", "text": "====================\nSelenium with Python\n====================\n\n:Author: `Baiju Muthukadan <http://baijum81.livejournal.com/>`_\n:Email: baiju.m.mail AT gmail.com\n:Version: 0.3.2\n\n.. note::\n\n `This document has been submitted to Selenium\n <http://code.google.com/p/selenium/issues/detail?id=1930>`_ project\n to be included in the official documentation. The `format of this\n text is reStucturedText\n <https://raw.github.com/gist/1047207/selenium_with_python.rst>`_. I\n am looking forward to your feedback. Please send your feedback to:\n `baiju.m.mail AT gmail.com` or you can `comment at the bottom of\n this gist <https://gist.github.com/1047207#comments>`_.\n\n\nIntroduction\n------------\n\nSelenium Python bindings provide a convenient API to access Selenium\nWebDrivers like Firefox, Ie and Chrome. The current supported Python\nversions are Python 2.6 and Python 2.7. Python 3 is not yet\nsupported. Selenium server is a Java program. Java Runtime\nEnvironment (JRE) 1.6 is recommended to run Selenium server. This\narticle explain using Selenium 2 with WebDriver API. Selenium 1 API\nis not covered here.\n\nInstallation\n------------\n\n\nDownloading Selenium server\n~~~~~~~~~~~~~~~~~~~~~~~~~~~\n\nYou can download Selenium server 2.x from the `download page of\nselenium website <http://seleniumhq.org/download/>`_. The file name\nshould be something like this:\n``selenium-server-standalone-2.x.x.jar``. You can always download the\nlatest 2.x version of Selenium server.\n\nIf Java Runtime Environment (JRE) is not installed in your system, you\ncan download the `JRE from the Oracle website\n<http://www.oracle.com/technetwork/java/javase/downloads/index.html>`_.\nIf you have root access in your system, you can also use your\noperating system instructions to install JRE.\n\n\nDownloading Python bindings for Selenium\n~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~\n\nYou can download Python bindings for Selenium from the `PyPI page for\nselenium package <http://pypi.python.org/pypi/selenium>`_. It has a\ndependency on `rdflib <http://pypi.python.org/pypi/rdflib>`_ , version\n3.1.x.\n\nYou can also use `easy_install\n<http://python-distribute.org/distribute_setup.py>`_ or `pip\n<http://pypi.python.org/pypi/pip>`_ to install the bindings::\n\n easy_install selenium\n\nor::\n\n pip install selenium\n\nYou may consider using `virtualenv\n<http://pypi.python.org/pypi/virtualenv>`_ to create isolated Python\nenvironments.\n\n\nUsing Buildout for installation\n~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~\n\nIf you prefer to use `Buildout <http://wwww.buildout.org>`_ to install\nall the dependencies from a GNU/Linux machine, you can use this\nconfiguration::\n\n [buildout]\n file-server = http://fileserver.example.org\n parts =\n jre_download\n jre_install\n selenium_server_download\n selenium_py\n\n [jre_download]\n recipe = hexagonit.recipe.download\n url = ${buildout:file-server}/jre-6u26-linux-i586.bin\n download-only = true\n ignore-existing = true\n\n [jre_install]\n recipe = iw.recipe.cmd\n on_install = true\n on_update = true\n shell = bash\n cmds =\n chmod +x ${jre_download:location}/jre-6u26-linux-i586.bin\n cd ${buildout:directory};if [[ -e \"jre1.6.0_26\" ]]; then echo -n; else ${jre_download:location}/jre-6u26-linux-i586.bin; fi\n\n [selenium_server_download]\n recipe = hexagonit.recipe.download\n url = ${buildout:file-server}/selenium-server-standalone-2.1.0.jar\n download-only = true\n ignore-existing = true\n\n [selenium_py]\n recipe = z3c.recipe.scripts\n interpreter = python\n eggs = selenium\n\nWindows Buildout users can use these parts for automating installation\nof Selenium server and creating a script to run it::\n\n [buildout]\n file-server = http://fileserver.example.org\n parts =\n jre_download\n jre_install\n selenium_server_download\n selenium_py\n\n [jre_download]\n recipe = hexagonit.recipe.download\n url = ${pkgserver:fullurl}/zepackages/jre-6u26-windows-i586.zip\n destination = ${buildout:directory}\n ignore-existing = true\n\n [selenium_server_download]\n recipe = hexagonit.recipe.download\n url = ${pkgserver:fullurl}/zepackages/selenium-server-standalone-2.1.0.jar\n download-only = true\n ignore-existing = true\n\n [selenium_server_script]\n recipe = collective.recipe.template\n input = inline:\n set PATH=%PATH%;${firefox_download:destination}\\firefox36\n ${jre_download:destination}\\jre6\\bin\\java.exe -jar ${selenium_server_download:location}\\selenium-server-standalone-2.1.0.jar -timeout 180 -port 4444 -forcedBrowserModeRestOfLine firefoxchrome\n output = ${buildout:bin-directory}/selenium-server.bat\n\n [selenium_py]\n recipe = z3c.recipe.scripts\n interpreter = python\n eggs = selenium\n\nThe `jre-6u26-windows-i586.zip` is not available from Oracle site.\nSo, you can install JRE once and zip it and then upload to your\nfile server.\n\n\nRunning Selenium server\n-----------------------\n\nYou should have Java Runtime Environment (JRE) in the system. If\n`java` command is available in the PATH (environment variable), you\ncan start the Selenium server using the command command given below.\nReplace `2.x.x` with actual version of Selenium server you downloaded\nfrom the site. If JRE is installed as a non-root user and/or if it is\nnot available in the PATH (environment variable), you can type the\nrelative/absolute path to the `java` command, for eg:-\n``./jre1.6.0_26/bin/java``::\n\n java -jar selenium-server-standalone-2.x.x.jar\n\nIn GNU/Linux, you can use the script given below to run it as a\ndaemon. You will be required to change the location of `java` command\n(``JAVA`` variable) and Selenium server jar file (``SELENIUM``\nvariable).\n\n::\n\n #! /bin/bash\n\n ### BEGIN INIT INFO\n # Provides: selenium-server\n # Required-Start: $local_fs $remote_fs $network $named $time\n # Required-Stop: $local_fs $remote_fs $network $named $time\n # Should-Start: $syslog\n # Should-Stop: $syslog\n # Default-Start: 2 3 4 5\n # Default-Stop: 0 1 6\n # Short-Description: Starts selenium server for testing\n # Description: Selenium server for functional testing\n ### END INIT INFO\n\n . /lib/lsb/init-functions\n\n JAVA=java\n SELENIUM=/path/to/selenium-server-standalone-2.x.xjar\n BMODE=firefoxchrome\n PORT=4444\n LOG=selenium.log\n PIDFILE=selenium.pid\n DISPLAY=:0.0\n\n pidof_server() {\n if [ -e \"$PIDFILE\" ]; then\n if pidofproc java | tr ' ' '\\n' | grep -w $(cat $PIDFILE); then\n return 0\n fi\n fi\n return 1\n }\n\n\n case $1 in\n start)\n DISPLAY=$DISPLAY $JAVA -jar $SELENIUM \\\n -timeout 180 -port $PORT -forcedBrowserModeRestOfLine $BMODE > $LOG &\n log_success_msg \"Starting Selenium server!\" \"selenium-server\"\n echo $! > $PIDFILE\n ;;\n stop)\n SELPID=`cat $PIDFILE` && kill $SELPID\n log_success_msg \"Stopping Selenium server!\" \"selenium-server\"\n ;;\n status)\n PID=$(pidof_server) || true\n if [ -n \"$PID\" ]; then\n echo \"Selenium Server is running (pid $PID).\"\n exit 0\n else\n echo \"Selenium Server is NOT running.\"\n exit 1\n fi\n ;;\n *)\n echo \"Usage: selenium-server.sh {start|stop|status}\"\n exit 1\n ;;\n esac\n\nBuildout users can use `collective.recipe.template` recipe with above\ntext as the template. You will be required to change the location of\n`java` command (``JAVA`` variable) and Selenium server jar file\n(``SELENIUM`` variable). If you are using Buildout configuration\ngiven in the previous section, you can change the variables like\nthis::\n\n JAVA=${buildout:directory}/jre1.6.0_26/bin/java\n SELENIUM=${selenium_server_download:location}/selenium-server-standalone-2.1.0.jar\n\n\nUsing Selenium\n--------------\n\nIf you have installed Selenium server and Python bindings and able to\nrun the server, you can start using it from Python like this.\n\n::\n\n from selenium import webdriver\n from selenium.webdriver.common.keys import Keys\n\n driver = webdriver.Firefox()\n driver.get(\"http://www.python.org\")\n assert \"Python\" in driver.title\n elem = driver.find_element_by_name(\"q\")\n elem.send_keys(\"selenium\")\n elem.send_keys(Keys.RETURN)\n assert \"Google\" in driver.title\n driver.close()\n\nThe above script can be saved into a file (eg:-\n`python_org_search.py`), then it can be run like this::\n\n python python_org_search.py\n\nThe `python` which you are running should have the `selenium` module\ninstalled.\n\nWalk through of the example\n~~~~~~~~~~~~~~~~~~~~~~~~~~~\n\nThe `selenium.webdriver` module provides all the WebDriver\nimplementations. Currently supported WebDriver implementations are\nFirefox, Chrome, Ie and Remote. The `Keys` class provide keys in the\nkeyboard like RETURN, F1, ALT etc.\n\n::\n\n from selenium import webdriver\n from selenium.webdriver.common.keys import Keys\n\nNext, the instance of Firefox WebDriver is created.\n\n::\n\n driver = webdriver.Firefox()\n\nThe `driver.get` method will navigate to a page given by the URL.\nWebDriver will wait until the page has fully loaded (that is, the\n\"onload\" event has fired) before returning control to your test or\nscript. It's worth noting that if your page uses a lot of AJAX on\nload then WebDriver may not know when it has completely loaded.::\n\n driver.get(\"http://www.python.org\")\n\nThe next line is an assertion to confirm that title has \"Python\" word\nin it::\n\n assert \"Python\" in driver.title\n\nWebDriver offers a number of ways to find elements. One of the\napproach is to use the `find_element_by_*` methods. Commonly used\nmethods are `find_element_by_id`, `find_element_by_name`,\n`find_element_by_xpath`, `find_element_by_link_text`,\n`find_element_by_partial_link_text`, `find_element_by_tag_name`,\n`find_element_by_class_name`, `find_element_by_css_selector`::\n\n elem = driver.find_element_by_name(\"q\")\n\nNext we are sending keys, this is similar to entering keys using your\nkeyboard. Special keys can be send using `Keys` class imported from\n`selenium.webdriver.common.keys`::\n\n elem.send_keys(\"selenium\")\n elem.send_keys(Keys.RETURN)\n\nAfter submission of the page, you should be reached in the Google\nsite::\n\n assert \"Google\" in driver.title\n\nFinally, the browser window is closed. You can also call `quit`\nmethod instead of `close`. The `quit` will exit entire browser where\nas `close` will close one tab, but if it just one tab, by default most\nbrowser will exit entirely.::\n\n driver.close()\n\n\nUsing Selenium to write tests\n-----------------------------\n\nSelenium will be used mostly for writing test cases. You can write\ntest cases using Python’s unittest module. Here is the modified\nexample which uses unittest module. This is a test for python.org\nsearch functionality::\n\n import unittest\n from selenium import webdriver\n from selenium.webdriver.common.keys import Keys\n\n class PythonOrgSearch(unittest.TestCase):\n\n def setUp(self):\n self.driver = webdriver.Firefox()\n\n def test_search_in_python_org(self):\n driver = self.driver\n driver.get(\"http://www.python.org\")\n self.assertIn(\"Python\", driver.title)\n elem = driver.find_element_by_name(\"q\")\n elem.send_keys(\"selenium\")\n elem.send_keys(Keys.RETURN)\n self.assertIn(\"Google\", driver.title)\n\n def tearDown(self):\n self.driver.close()\n\n if __name__ == \"__main__\":\n unittest.main()\n\n\nYou can run the above test case from a shell like this::\n\n python test_python_org_search.py\n .\n ----------------------------------------------------------------------\n Ran 1 test in 15.566s\n\n OK\n\n\nWalk through of the example\n---------------------------\n\nInitially, all the basic modules required are imported. The `unittest\n<http://docs.python.org/library/unittest.html>`_ module is a built-in\nPython based on Java's JUnit. This module provides the framework for\norganizing the test cases. The `selenium.webdriver` module provides\nall the WebDriver implementations. Currently supported WebDriver\nimplementations are Firefox, Chrome, Ie and Remote. The `Keys` class\nprovide keys in the keyboard like RETURN, F1, ALT etc.\n\n::\n\n import unittest\n from selenium import webdriver\n from selenium.webdriver.common.keys import Keys\n\nThe test case class is inherited from `unittest.TestCase`.\nInheriting from `TestCase` class is the way to tell `unittest` module\nthat, this is a test case::\n\n class PythonOrgSearch(unittest.TestCase):\n\n\nThe `setUp` is part of initialization, this method will get called\nbefore every test function which you are going to write in this test\ncase class. Here you are creating the instance of Firefox WebDriver.\n\n::\n\n def setUp(self):\n self.driver = webdriver.Firefox()\n\nThis is the test case method. The first line inside this method\ncreate a local reference to the driver object created in `setUp`\nmethod.\n\n::\n\n def test_search_in_python_org(self):\n driver = self.driver\n\nThe `driver.get` method will navigate to a page given by the URL.\nWebDriver will wait until the page has fully loaded (that is, the\n\"onload\" event has fired) before returning control to your test or\nscript. It's worth noting that if your page uses a lot of AJAX on\nload then WebDriver may not know when it has completely loaded.::\n\n driver.get(\"http://www.python.org\")\n\nThe next line is an assertion to confirm that title has \"Python\" word\nin it::\n\n self.assertIn(\"Python\", driver.title)\n\nWebDriver offers a number of ways to find elements. One of the\napproach is to use the `find_element_by_*` methods. Commonly used\nmethods are `find_element_by_id`, `find_element_by_name`,\n`find_element_by_xpath`, `find_element_by_link_text`,\n`find_element_by_partial_link_text`, `find_element_by_tag_name`,\n`find_element_by_class_name`, `find_element_by_css_selector`::\n\n elem = driver.find_element_by_name(\"q\")\n\nNext we are sending keys, this is similar to entering keys using your\nkeyboard. Special keys can be send using `Keys` class imported from\n`selenium.webdriver.common.keys`::\n\n elem.send_keys(\"selenium\")\n elem.send_keys(Keys.RETURN)\n\nAfter submission of the page, you should be reached in the Google\nsite. You can confirm it by asserting \"Google\" in the title::\n\n self.assertIn(\"Google\", driver.title)\n\nThe `tearDown` method will get called after every test method. This\nis a place to do all cleanup actions. In the current method, the\nbrowser window is closed. You can also call `quit` method instead of\n`close`. The `quit` will exit all entire browser where as `close`\nwill close one tab, but if it just one tab, by default most browser\nwill exit entirely.::\n\n def tearDown(self):\n self.driver.close()\n\nFinal lines are some boiler plate code to run the test suite::\n\n if __name__ == \"__main__\":\n unittest.main()\n\n\nNavigating\n----------\n\n.. warning::\n\n This section is a copy-paste from Java docs, so it requires some\n modification.\n\nThe first thing you'll want to do with WebDriver is navigate to a\npage. The normal way to do this is by calling \"get\":\n\n::\n\n driver.get(\"http://www.google.com\");\n\nWebDriver will wait until the page has fully loaded (that is, the\n\"onload\" event has fired) before returning control to your test or\nscript. It's worth noting that if your page uses a lot of AJAX on\nload then WebDriver may not know when it has completely loaded. If\nyou need to ensure such pages are fully loaded then you can use\n\"waits\".\n\n.. TODO: link to a section on explicit waits in WebDriver\n\n\nInteracting with the page\n~~~~~~~~~~~~~~~~~~~~~~~~~\n\nJust being able to go to places isn't terribly useful. What we'd\nreally like to do is to interact with the pages, or, more\nspecifically, the HTML elements within a page. First of all, we need\nto find one. WebDriver offers a number of ways to find elements. For\nexample, given an element defined as::\n\n <input type=\"text\" name=\"passwd\" id=\"passwd-id\" />\n\nyou could find it using any of::\n\n element = driver.find_element_by_id(\"passwd-id\")\n element = driver.find_element_by_name(\"passwd\")\n element = driver.find_element_by_xpath(\"//input[@id='passwd-id']\")\n\nYou can also look for a link by its text, but be careful! The text\nmust be an exact match! You should also be careful when using `XPATH\nin WebDriver`. If there's more than one element that matches the\nquery, then only the first will be returned. If nothing can be found,\na ``NoSuchElementException`` will be raised.\n\n.. TODO: Is this following paragraph correct ?\n\nWebDriver has an \"Object-based\" API; we represent all types of\nelements using the same interface. This means that although you may\nsee a lot of possible methods you could invoke when you hit your IDE's\nauto-complete key combination, not all of them will make sense or be\nvalid. Don't worry! WebDriver will attempt to do the Right Thing, and\nif you call a method that makes no sense (\"setSelected()\" on a \"meta\"\ntag, for example) an exception will be raised.\n\nSo, you've got an element. What can you do with it? First of all, you\nmay want to enter some text into a text field::\n\n element.send_keys(\"some text\");\n\nYou can simulate pressing the arrow keys by using the \"Keys\" class::\n\n element.send_keys(\" and some\", Keys.ARROW_DOWN);\n\nIt is possible to call `send_keys` on any element, which makes it\npossible to test keyboard shortcuts such as those used on GMail. A\nside-effect of this is that typing something into a text field won't\nautomatically clear it. Instead, what you type will be appended to\nwhat's already there. You can easily clear the contents of a text\nfield or textarea with `clear` method::\n\n element.clear();\n\n\nFilling in forms\n~~~~~~~~~~~~~~~~\n\nWe've already seen how to enter text into a textarea or text field,\nbut what about the other elements? You can \"toggle\" the state of\ncheckboxes, and you can use \"setSelected\" to set something like an\n`OPTION` tag selected. Dealing with `SELECT` tags isn't too bad::\n\n select = driver.find_element_by_xpath(\"//select\"))\n all_options = select.find_elements_by_tag_name(\"option\"))\n for option in all_options:\n print \"Value is: %s\" % option.getValue() #<- FIXME: API\n option.setSelected() #<- FIXME: API\n\nThis will find the first \"SELECT\" element on the page, and cycle\nthrough each of it's OPTIONs in turn, printing out their values, and\nselecting each in turn. As you can see, this isn't the most efficient\nway of dealing with SELECT elements . WebDriver's support classes\ninclude one called \"Select\", which provides useful methods for\ninteracting with these.\n\n::\n\n select = driver.find_element_by_xpath(\"//select\").select() #<- FIXME: API\n select.deselectAll() #<- FIXME: API\n select.selectByVisibleText(\"Edam\") #<- FIXME: API\n\nThis will deselect all OPTIONs from the first SELECT on the page, and\nthen select the OPTION with the displayed text of \"Edam\".\n\nOnce you've finished filling out the form, you probably want to submit\nit. One way to do this would be to find the \"submit\" button and click\nit::\n\n # Assume the button has the ID \"submit\" :)\n driver.find_element_by_id(\"submit\").click()\n\nAlternatively, WebDriver has the convenience method \"submit\" on every\nelement. If you call this on an element within a form, WebDriver will\nwalk up the DOM until it finds the enclosing form and then calls\nsubmit on that. If the element isn't in a form, then the\n``NoSuchElementException`` will be raised::\n\n element.submit();\n\n\nDrag and drop\n~~~~~~~~~~~~~\n\nYou can use drag and drop, either moving an element by a certain\namount, or on to another element::\n\n element = driver.find_element_by_name(\"source\")\n target = driver.find_element_by_name(\"target\")\n\n from selenium.webdriver import ActionChains\n action_chains = ActionChains(driver)\n action_chains.drag_and_drop(element, target);\n\n\nMoving between windows and frames\n~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~\n\nIt's rare for a modern web application not to have any frames or to be\nconstrained to a single window. WebDriver supports moving between\nnamed windows using the \"switch_to_window\" method::\n\n driver.switch_to_window(\"windowName\")\n\nAll calls to ``driver`` will now be interpreted as being directed to\nthe particular window. But how do you know the window's name? Take a\nlook at the javascript or link that opened it::\n\n <a href=\"somewhere.html\" target=\"windowName\">Click here to open a new window</a>\n\nAlternatively, you can pass a \"window handle\" to the\n\"switch_to_window()\" method. Knowing this, it's possible to iterate\nover every open window like so::\n\n for handle in driver.window_handles:\n driver.switch_to_window(handle);\n\nYou can also swing from frame to frame (or into iframes)::\n\n driver.switch_to_frame(\"frameName\")\n\nIt's possible to access subframes by separating the path with a dot,\nand you can specify the frame by its index too. That is::\n\n driver.switch_to_frame(\"frameName.0.child\")\n\nwould go to the frame named \"child\" of the first subframe of the frame\ncalled \"frameName\". **All frames are evaluated as if from *top*.**\n\n\nPopup dialogs\n~~~~~~~~~~~~~\n\nSelenium WebDriver has built-in support for handling popup dialog\nboxes. After you've triggerd and action that would open a popup, you\ncan access the alert with the following::\n\n alert = driver.switch_to_alert()\n\nThis will return the currently open alert object. With this object\nyou can now accept, dismiss, read its contents or even type into a\nprompt. This interface works equally well on alerts, confirms,\nprompts. Refer to the API documentation for more information.\n\n\nNavigation: history and location\n~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~\n\nEarlier, we covered navigating to a page using the \"get\" command (\n``driver.get(\"http://www.example.com\")``) As you've seen, WebDriver\nhas a number of smaller, task-focused interfaces, and navigation is a\nuseful task. To navigate to a page, you can use `get` method::\n\n driver.get(\"http://www.example.com\");\n\nTo move backwards and forwards in your browser's history::\n\n driver.forward()\n driver.back()\n\nPlease be aware that this functionality depends entirely on the\nunderlying driver. It's just possible that something unexpected may\nhappen when you call these methods if you're used to the behaviour of\none browser over another.\n\n\nCookies\n~~~~~~~\n\nBefore we leave these next steps, you may be interested in\nunderstanding how to use cookies. First of all, you need to be on the\ndomain that the cookie will be valid for:\n\n::\n\n # Go to the correct domain\n driver.get(\"http://www.example.com\")\n\n # Now set the cookie. This one's valid for the entire domain\n cookie = {\"key\": \"value\"})\n driver.add_cookie(cookie)\n\n # And now output all the available cookies for the current URL\n all_cookies = driver.get_cookies()\n for cookie_name, cookie_value in all_cookies.items():\n print \"%s -> %s\", cookie_name, cookie_value\n\n\nNext, next steps!\n~~~~~~~~~~~~~~~~~\n\nThis has been a high level walkthrough of WebDriver and some of its\nkey capabilities. You may want to look at the `Test Design\nConsiderations` chapter to get some ideas about how you can reduce the\npain of maintaining your tests and how to make your code more modular.\n\n\nTest Design Considerations\n--------------------------\n\n\nAPI\n---\n\nThis chapter cover all the interfaces of Selenium WebDriver.\n\nExceptions\n~~~~~~~~~~\n\n**module:** selenium.common.exceptions\n\n\n- class WebDriverException(msg=None, screen=None, stacktrace=None)\n\n base: Exception\n\n\n- class ErrorInResponseException(response, msg)\n\n base: WebDriverException\n\n An error has occurred on the server side.\n\n This may happen when communicating with the firefox extension or the\n remote driver server.\n\n\n- class InvalidSwitchToTargetException(msg=None, screen=None, stacktrace=None)\n\n base: WebDriverException\n\n The frame or window target to be switched doesn't exist.\n\n\n- class NoSuchFrameException(msg=None, screen=None, stacktrace=None)\n\n base: InvalidSwitchToTargetException\n\n The frame target to be switched doesn't exist.\n\n- class NoSuchWindowException(msg=None, screen=None, stacktrace=None)\n\n base: InvalidSwitchToTargetException\n\n The window target to be switched doesn't exist.\n\n- class NoSuchElementException(msg=None, screen=None, stacktrace=None)\n\n base: WebDriverException\n\n The find_element_by_* methods can't find the element.\n\n\n- class NoSuchAttributeException(msg=None, screen=None, stacktrace=None)\n\n base: WebDriverException\n\n- class StaleElementReferenceException(msg=None, screen=None, stacktrace=None)\n\n base: WebDriverException\n\n Indicates that a reference to an element is now \"stale\" --- the\n element no longer appears on the DOM of the page.\n\n- class InvalidElementStateException(msg=None, screen=None, stacktrace=None)\n\n base: WebDriverException\n\n- class ElementNotVisibleException(msg=None, screen=None, stacktrace=None)\n\n base: InvalidElementStateException\n\n Thrown to indicate that although an element is present on the DOM,\n it is not visible, and so is not able to be interacted with.\n\n- class ElementNotSelectableException(msg=None, screen=None, stacktrace=None)\n\n base: InvalidElementStateException\n\n- class InvalidCookieDomainException(msg=None, screen=None, stacktrace=None)\n\n base: WebDriverException\n\n Thrown when attempting to add a cookie under a different domain\n than the current URL.\n\n- class UnableToSetCookieException(msg=None, screen=None, stacktrace=None)\n\n base: WebDriverException\n\n Thrown when a driver fails to set a cookie.\n\n- class RemoteDriverServerException(msg=None, screen=None, stacktrace=None)\n\n base: WebDriverException\n\n- class TimeoutException(msg=None, screen=None, stacktrace=None)\n\n Thrown when a command does not complete in enough time.\n\nAction Chains\n~~~~~~~~~~~~~\n\n**module:** selenium.webdriver.common.action_chains\n\n- class ActionChains(driver)\n\n *driver:* The WebDriver instance which performs user actions.\n\n Generate user actions. All actions are stored in the ActionChains\n object. Call perform() to fire stored actions.\n\n - perform()\n\n Performs all stored actions.\n\n - click(on_element=None)\n\n Clicks an element.\n\n *on_element:* The element to click. If None, clicks on current\n mouse position.\n\n - click_and_hold(on_element)\n\n Holds down the left mouse button on an element.\n\n *on_element:* The element to mouse down. If None, clicks on\n current mouse position.\n\n - context_click(on_element)\n\n Performs a context-click (right click) on an element.\n\n *on_element:* The element to context-click. If None, clicks on\n current mouse position.\n\n - double_click(on_element)\n\n Double-clicks an element.\n\n *on_element:* The element to double-click. If None, clicks on\n current mouse position.\n\n - drag_and_drop(source, target)\n\n Holds down the left mouse button on the source element, then moves\n to the target element and releases the mouse button.\n\n *source:* The element to mouse down.\n\n *target:* The element to mouse up.\n\n - key_down(key, element=None)\n\n Sends a key press only, without releasing it. Should only be used\n with modifier keys (Control, Alt and Shift).\n\n *key:* The modifier key to send. Values are defined in Keys class.\n\n *element:* The element to send keys. If None, sends a key to\n current focused element.\n\n\n - key_up(key, element=None)\n\n Releases a modifier key.\n\n *key:* The modifier key to send. Values are defined in Keys class.\n\n *element:* The element to send keys. If None, sends a key to\n current focused element.\n\n - move_by_offset(xoffset, yoffset)\n\n Moving the mouse to an offset from current mouse position.\n\n *xoffset:* X offset to move to.\n *yoffset:* Y offset to move to.\n\n - move_to_element(to_element)\n\n Moving the mouse to the middle of an element.\n\n *to_element:* The element to move to.\n\n - move_to_element_with_offset(to_element, xoffset, yoffset)\n\n Move the mouse by an offset of the specificed element.\n Offsets are relative to the top-left corner of the element.\n\n *to_element:* The element to move to.\n *xoffset:* X offset to move to.\n *yoffset:* Y offset to move to.\n\n - release(on_element)\n\n Releasing a held mouse button.\n\n *on_element:* The element to mouse up.\n\n - end_keys(`*keys_to_send`)\n\n Sends keys to current focused element.\n\n *keys_to_send:* The keys to send.\n\n - end_keys_to_element(self, element, `*keys_to_send`):\n\n Sends keys to an element.\n\n *element:* The element to send keys.\n *keys_to_send:* The keys to send.\n\n\nResources\n---------\n\n- Blog post explaining how to use headless X for running Selenium\n tests:\n http://coreygoldberg.blogspot.com/2011/06/python-headless-selenium-webdriver.html\n\n- Jenkins plugin for headless Selenium tests:\n https://wiki.jenkins-ci.org/display/JENKINS/Xvnc+Plugin\n\n\nFrequently asked questions\n--------------------------\n\n\nHow to use ChromeDriver ?\n~~~~~~~~~~~~~~~~~~~~~~~~~\n\nDownload the latest `chromdriver from download page\n<http://code.google.com/p/chromium/downloads/list>`_. Unzip the\nfile::\n\n unzip chromedriver_linux32_x.x.x.x.zip\n\nYou should see a ``chromedriver`` executable. Now you can instance of\nChrome WebDriver like this::\n\n driver = webdriver.Chrome(executable_path=\"/path/to/chromedriver\")\n\nThe rest of the example should work as given in other other\ndocumentation.\n\n\nConclusion\n----------\n\nSelenium Python bindings provides a simple API to automate all kinds\nof functional/acceptance tests using Selenium WebDriver. Though Python\nAPI you can access all functionalities in an intuitive way.\n\n" }, { "alpha_fraction": 0.6354421377182007, "alphanum_fraction": 0.6462775468826294, "avg_line_length": 36.63999938964844, "blob_id": "46277e95c32313b8a31f97271c825cea4d7f5154", "content_id": "5c43cf91498e782e982a96fa7bfcf6478f60939f", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 2861, "license_type": "no_license", "max_line_length": 107, "num_lines": 75, "path": "/com.instasafev2/src/tests/controller_tests.py", "repo_name": "gauravinstasafe/com.instasafev2", "src_encoding": "UTF-8", "text": "import logging\nimport unittest\n\nfrom ddt import ddt, data, unpack\nimport pytest\n\nfrom pages.companyPortal.controller_Page import ControllerPages\nimport utilities.custom_logger as cl\nfrom utilities.read_data import getCSVData\n#from utilities.teststatus import Status\n\n\n#from pages.companyPortal.controller_Page import ControllerPage\n#@pytest.mark.usefixtures(\"oneTimeSetUp\", \"setUp\")\[email protected](\"universalSetUp\", \"setUp\")\n@ddt\nclass Test_ControllerTests(unittest.TestCase):\n log = cl.customLogger(logging.DEBUG)\n \n @pytest.fixture(autouse=True)\n def objectSetup(self, universalSetUp):\n self.cp = ControllerPages(universalSetUp)\n # self.cp = ControllerPages(self.driver)\n # self.ts = Status(self.driver)\n \n \n \n #to handle whatfix #\n \"\"\" \n @pytest.mark.run(order = 0)\n def test_close_whatfix_window(self): \n self.log.info(\"try to close whatfix_window\")\n self.cp.close_Whatfix_Windows()\n self.log.info(\"close_whatfix_window\") \n \"\"\" \n # @pytest.mark.run(after = \"test_close_whatfix_window\")\n # def test_open_add_window(self):\n # self.log.info(\"test_open_add_window\")\n # self.cp.navigate_to_Controller_Add_window() \n \n @pytest.mark.run(order = 0)\n def test_t1AddController(self):\n self.log.info(\"*#\" * 20)\n self.log.info(\"test_t1Addcontroller start\")\n self.log.info(\"*#\" * 20)\n self.log.info(\"navigate to controller page\")\n self.cp.navigate_to_Controller_Add_window() \n self.log.info(\"navigate to controller Add page\")\n self.cp.add_Single_Controller(\"isademo\", \"test\", \"TCP\", \"1470\", \"10.14.70.0\", \"27\")\n self.log.info(\"controller add process complete now verify newly added controller\")\n result = self.cp.verify_Controller_entry(\"test\")\n assert result == True\n\n \n \n @pytest.mark.run( after = \"test_t1AddController\")\n def test_delete_controller(self):\n self.log.info(\"*#\" * 20)\n self.log.info(\"test_delete_controller\")\n self.log.info(\"*#\" * 20) \n self.log.info(\"navigate to controller page\")\n self.cp.navigateControllerPage()\n result = self.cp.delete_Sigle_Controller(\"test\") \n assert result == True\n \n'''\n #@pytest.mark.run(after='test_open_add_window') \n @data(*getCSVData(\"C:/Users/user/workspace/com.instasafev2/usource/controllerlist.csv\"))\n @unpack\n def test_AddMultipleControllers(self, cloudServer, name, protocol, port, internalNetwork, netmaskBits):\n self.log.info(\"*#\" * 20)\n self.log.info(\"test_AddMultiplecontroller start\")\n self.log.info(\"*#\" * 20)\n self.cp.addMultipleController(cloudServer, name, protocol, port, internalNetwork, netmaskBits)\n result = self.cp.verifyAddController(\"test\")''' \n \n \n " }, { "alpha_fraction": 0.5384615659713745, "alphanum_fraction": 0.5384615659713745, "avg_line_length": 23, "blob_id": "a1624bcdc2016ebedf868e8370795c8a3e1ab8bd", "content_id": "80a6c6736040e46533c0f708e2cb29e8740e51ed", "detected_licenses": [ "Apache-2.0" ], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 26, "license_type": "permissive", "max_line_length": 23, "num_lines": 1, "path": "/test/examples/example_factory/__init__.py", "repo_name": "gauravinstasafe/com.instasafev2", "src_encoding": "UTF-8", "text": "__author__ = 'tsimpson'\n " }, { "alpha_fraction": 0.577531635761261, "alphanum_fraction": 0.5838607549667358, "avg_line_length": 17.05714225769043, "blob_id": "84b3d5e4538be7d87759c60e542c81b39eb43958", "content_id": "e81cebd1ac69dd4146da4c9c7699e52c7e871ab6", "detected_licenses": [ "Apache-2.0" ], "is_generated": false, "is_vendor": false, "language": "Java", "length_bytes": 632, "license_type": "permissive", "max_line_length": 59, "num_lines": 35, "path": "/test/tests/compatability/testng/before_and_after/src/java/BeforeAndAfterSuccess.java", "repo_name": "gauravinstasafe/com.instasafev2", "src_encoding": "UTF-8", "text": "import org.testng.annotations.*;\n\n@Test(groups= {\"BeforeAndAfter\"})\npublic class BeforeAndAfterSuccess extends BeforeAndAfter {\n\n @BeforeClass\n public void beforeEverything() {\n println(\"@BeforeClass\");\n }\n\n @BeforeMethod\n public void setUp() {\n println(\"@BeforeMethod\");\n }\n\n @Test\n public void method1() {\n println(\"@Test 1\");\n }\n\n @Test\n public void method2() {\n println(\"@Test 2\");\n }\n\n @AfterClass\n public void afterEverything() {\n println(\"@AfterClass\");\n }\n\n @AfterMethod\n public void tearDown() {\n println(\"@AfterMethod\");\n }\n}\n" }, { "alpha_fraction": 0.5759075880050659, "alphanum_fraction": 0.5759075880050659, "avg_line_length": 27.85714340209961, "blob_id": "0132d623a19aebaed9f8aa2f20125c93e9c63009", "content_id": "be9e5ae493b0d040d5f7ceef467837f0ad628af1", "detected_licenses": [ "Apache-2.0" ], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 606, "license_type": "permissive", "max_line_length": 57, "num_lines": 21, "path": "/test/examples/example1/run_tests.py", "repo_name": "gauravinstasafe/com.instasafev2", "src_encoding": "UTF-8", "text": "from proboscis import register\nfrom proboscis import TestProgram\n\ndef run_tests():\n from tests import service_tests\n\n # Now create some groups of groups.\n register(groups=[\"fast\"], depends_on_groups=[\"unit\"])\n register(groups=[\"integration\"],\n depends_on_groups=[\"service.initialize\",\n \"service.tests\",\n \"service.shutdown\"])\n register(groups=[\"slow\"],\n depends_on_groups=[\"fast\", \"integration\"])\n\n # Run Proboscis and exit.\n TestProgram().run_and_exit()\n\n\nif __name__ == '__main__':\n run_tests()\n" }, { "alpha_fraction": 0.6358209252357483, "alphanum_fraction": 0.6383795142173767, "avg_line_length": 35.06153869628906, "blob_id": "f4e7a9e107cfcbeb7f6c7047ce985d212a077004", "content_id": "a94076298b11e00268873929e3e4b454c8e030da", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 2345, "license_type": "no_license", "max_line_length": 96, "num_lines": 65, "path": "/com.instasafev2/src/pages/old_new_password.py", "repo_name": "gauravinstasafe/com.instasafev2", "src_encoding": "UTF-8", "text": "'''\nCreated on 15-Jun-2018\n\n@author: Gaurav\n'''\nimport logging\n\nfrom base.selenium_driver import SeleniumDriver\nimport utilities.custom_logger as cl\n\n\nclass Old_reset_new_password(SeleniumDriver):\n \n log = cl.customLogger(logging.DEBUG)\n def __init__(self, driver):\n super().__init__(driver)\n self.driver = driver\n\n _old_Password_Field = \".//input[@placeholder='Old Password']\"\n _new_Password_Field = \".//input[@placeholder='new Password']\"\n _signIn_Button = \".//button[contains(text(),'Sign In')]\"\n \n def click_On_Old_Password_Field(self):\n self.elementClick(self._old_Password_Field, locatorType=\"xpath\")\n\n def enter_Old_Password(self, username):\n self.sendKeys(username, self._old_Password_Field, locatorType=\"xpath\")\n \n def click_On_New_Password_Field(self):\n self.elementClick(self._new_Password_Field, locatorType=\"xpath\")\n\n def enter_New_Password(self, password):\n self.sendKeys(password, self._new_Password_Field, locatorType=\"xpath\")\n\n def click_SignIn_Button(self):\n self.elementClick(self._signIn_Button, locatorType=\"xpath\")\n\n def change_password(self, username=\"\", password=\"\"):\n self.clearFields()\n self.click_On_Old_Password_Field()\n self.enter_Old_Password(username)\n self.click_On_New_Password_Field()\n self.enter_New_Password(password)\n self.click_SignIn_Button()\n\n def verifyLoginSuccessful(self):\n self.waitForElement(\"//*[contains(text(), 'Dashboard')]\",\n locatorType=\"xpath\")\n result = self.isElementPresent(locator=\"//*[contains(text(), 'Dashboard')]\",\n locatorType=\"xpath\")\n return result\n\n def verifyLoginFailed(self):\n result = self.isElementPresent(locator=\"//*[contains(text(), 'Dashboard')]\",\n locatorType=\"xpath\")\n return result\n\n def verifyLoginTitle(self):\n return self.verifyPageTitle(\"Dashboard - Mumbai | MyInstaSafe\")\n\n def clearFields(self):\n oldPasswordField = self.getElement(locator=self._old_Password_Field,locatorType=\"xpath\")\n oldPasswordField.clear()\n newPasswordField = self.getElement(locator=self._old_Password_Field,locatorType=\"xpath\")\n newPasswordField.clear()\n\n" }, { "alpha_fraction": 0.6778338551521301, "alphanum_fraction": 0.6842588186264038, "avg_line_length": 28.849315643310547, "blob_id": "39f9493ab519deedb1144da6298f3c674289c855", "content_id": "73bf20cec60aa9124acdcbacd43397641bafaff0", "detected_licenses": [ "Apache-2.0" ], "is_generated": false, "is_vendor": false, "language": "reStructuredText", "length_bytes": 4358, "license_type": "permissive", "max_line_length": 101, "num_lines": 146, "path": "/test/README.rst", "repo_name": "gauravinstasafe/com.instasafev2", "src_encoding": "UTF-8", "text": "Proboscis\n================\n\nProboscis is a Python test framework that extends Python's built-in unittest\nmodule and `Nose`_ with features from `TestNG`_.\n\n.. _Nose: http://readthedocs.org/docs/nose/en/latest/\n\n.. _TestNG: http://testng.org/doc/index.html\n\n`Click here to read the full docs`_.\n\n.. _`Click here to read the full docs`: http://packages.python.org/proboscis/\n\n\nFeatures\n--------\n\n- Uses decorators instead of naming conventions.\n\n- Allows for TestNG style test methods, in which a class is initialized once,\n as an alternative to using class fields (see the example below).\n\n- Allows for explicit `test dependencies`_ and skipping of dependent tests\n on failures.\n\n- Runs xUnit style clases if desired or needed for backwards compatability.\n\n- Uses Nose if available (but doesn't require it), and works with many of its\n plugins.\n\n- Runs in `IronPython`_ and `Jython`_ (although if you're targetting the JVM\n you should consider using TestNG instead)!\n\n.. _`test dependencies`: http://beust.com/weblog/2004/08/18/using-annotation-inheritance-for-testing/\n.. _IronPython: http://ironpython.net/\n.. _Jython: http://www.jython.org/\n\n\n\nUpdates\n-------\n\nVersion 1.2.6.0\n~~~~~~~~~~~~~~~\n\n- Proboscis now works with Python 3!\n\nVersion 1.2.5.3\n~~~~~~~~~~~~~~~\n\n- Fixed bug in runs_after_groups inheritance.\n- Allow \"import *\" from proboscis asserts.\n\nVersion 1.2.5.2\n~~~~~~~~~~~~~~~\n\n- Fixed a bug that prevented some Nose plugins from working.\n\nVersion 1.2.5.1\n~~~~~~~~~~~~~~~\n\n- Implemented test decorator property \"runs_after\", which affects only the\n order of test runs. If a test noted by \"runs_after\" fails, the test method\n or class targeted by the decorator will *not* be skipped. If a group is run,\n tests which are listed in \"runs_after\" will not implicitly be run as well.\n- Added 'fail' method to Checker class.\n- Using tox discovered some issues with Jython compatability.\n\nVersion 1.2.4\n~~~~~~~~~~~~~\n\n- Added a missing parameter to a format string error message.\n- Fixed bug where the enabled property was not being inherited by class methods.\n- Added a Check class to allow testing multiple assertions in a with block.\n\n\nExample\n-------\n\nThis example tests an external web service by creating an admin user and\nupdating the profile picture.\n\n::\n\n @test(groups=[\"service.initialization\"])\n def make_sure_service_is_up():\n # No point in proceeding if the service isn't responding.\n assert_true(service_module.ping(service_config))\n\n\n @test(groups=[\"service.tests\"], depends_on_groups=[\"service.initialization\"])\n class AdminTest(object):\n\n @before_class\n def create_admin_user(self):\n self.client = service_module.ServiceClient(service_config)\n self.admin = self.client.create_admin_user(\"boss\")\n\n @test\n def check_for_defaults(self):\n assert_equals(\"default.jpg\", self.admin.get_profile_image())\n\n @test(depends_on=check_for_defaults)\n def change_picture(self):\n self.admin.set_profile_image(\"spam.jpg\")\n assert_equals(\"spam.jpg\", self.admin.get_profile_image())\n\n # Put other tests against admin user here...\n\n @after_class\n def destroy_admin_user(self):\n self.client.delete_user(self.admin)\n\n\n\nHere, the variable \"admin\" is created only once, similar to TestNG.\n\nIf the xUnit style is preferred or needed for backwards compatability the\nfollowing code will create the admin variable once for each test function:\n\n::\n\n @test(groups=[\"service.tests\"], depends_on_groups=[\"service.initialization\"])\n class AdminTest(unittest.TestCase):\n\n def setUp(self):\n self.client = service_module.ServiceClient(service_config)\n self.admin = self.client.create_admin_user(\"boss\")\n\n def test_change_picture(self):\n assert_equals(\"default.jpg\", self.admin.get_profile_image())\n self.admin.set_profile_image(\"spam.jpg\")\n assert_equals(\"spam.jpg\", self.admin.get_profile_image())\n\n # Put other tests against admin user here...\n\n def tearDown(self):\n self.client.delete_user(self.admin)\n\nThough this version of AdminTest runs like an xUnit test, it still runs after\nthe \"service.initialization\" group.\n\nFor more info see the `full docs`_.\n\n.. _`full docs`: http://packages.python.org/proboscis/\n" }, { "alpha_fraction": 0.6430723071098328, "alphanum_fraction": 0.6487199068069458, "avg_line_length": 29.88372039794922, "blob_id": "6700e51a8778f0c186b50a00d1b6273ab9628486", "content_id": "b17b40e6aace66b8812f38a9c355fd04921d660d", "detected_licenses": [ "Apache-2.0" ], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 2656, "license_type": "permissive", "max_line_length": 79, "num_lines": 86, "path": "/test/examples/example_factory/tests/service_tests.py", "repo_name": "gauravinstasafe/com.instasafev2", "src_encoding": "UTF-8", "text": "\"\"\"\nUser service tests.\n\nThis is a test for a fictitious user web service which has rich client bindings\nwritten in Python.\n\nIt assumes we have an existing test database which we can run the web service\nagainst, using the function \"mymodule.start_web_server().\"\n\nAfter spinning up the service, the test creates a new user and tests various\nCRUD actions. Since its a test database, it is OK\nto leave old users in the system but we try to always delete them if possible\nat the end of the test.\n\n\"\"\"\n\nfrom proboscis import after_class\nfrom proboscis import before_class\nfrom proboscis import factory\nfrom proboscis import test\nfrom proboscis.asserts import assert_equal\nfrom proboscis.asserts import assert_is_none\n\nfrom spam_api import create_admin_api\nfrom spam_api import create_api\nfrom spam_api import SpamHttpException\n\n\n@test\nclass UserPermissionsTest(object):\n\n def __init__(self, config):\n self.user_type = config['user_type']\n self.create = config['create'] or None\n self.delete = config['delete'] or None\n self.read = config['read'] or None\n\n @before_class\n def create_user(self):\n self.admin_api = create_admin_api()\n user = self.admin_api.user.create(self.user_type)\n self.user_id = user.id\n self.api = create_api(self.user_id)\n\n @test\n def test_create(self):\n try:\n self.spam = self.api.spam.create()\n assert_is_none(self.create)\n except SpamHttpException as she:\n self.spam = self.admin_api.spam.create()\n assert_equal(she.status_code, self.create)\n\n @test(depends_on=[test_create])\n def test_read(self):\n try:\n spam = self.api.spam.get(self.spam.id)\n assert_is_none(self.read)\n assert_equal(spam, self.spam)\n except SpamHttpException as she:\n assert_equal(she.status_code, self.read)\n\n @test(depends_on=[test_create, test_read])\n def test_delete(self):\n try:\n self.api.spam.delete(self.spam.id)\n assert_is_none(self.delete)\n except SpamHttpException as she:\n assert_equal(she.status_code, self.delete)\n\n @after_class\n def delete_user(self):\n self.admin_api.user.delete(self.user_id)\n\n\n@factory\ndef generate_user_tests():\n user_configs = [\n { 'user_type': \"anonymous\",\n 'create':401, 'read':401, 'delete': 401 },\n { 'user_type': \"restricted\",\n 'create':401, 'read':None, 'delete': 401 },\n { 'user_type': \"normal\",\n 'create':None, 'read':None, 'delete': None }\n ]\n return [UserPermissionsTest(config) for config in user_configs]\n" }, { "alpha_fraction": 0.8129870295524597, "alphanum_fraction": 0.8207792043685913, "avg_line_length": 28.69230842590332, "blob_id": "e91f5420dcdc14f66a8792630e69de262c5b9b7f", "content_id": "ae83f2e474e03f66be0bcde92b47f0daf5361994", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 385, "license_type": "no_license", "max_line_length": 79, "num_lines": 13, "path": "/python/src/tests/test_suite_demo.py", "repo_name": "gauravinstasafe/com.instasafev2", "src_encoding": "UTF-8", "text": "import unittest\n\nfrom tests.courses.register_courses_csv_data import RegisterCoursesCSVDataTests\nfrom tests.home.login_tests import LoginTests\n\n\n# Get all tests from the test classes\ntc1 = unittest.TestLoader().loadTestsFromTestCase(Test_Login)\n\n# Create a test suite combining all test classes\nsmokeTest = unittest.TestSuite([tc1])\n\nunittest.TextTestRunner(verbosity=2).run(smokeTest)" }, { "alpha_fraction": 0.5703030824661255, "alphanum_fraction": 0.5739716291427612, "avg_line_length": 32.32264709472656, "blob_id": "beea00fa150d026c6537c18be0359fed9a79c2a8", "content_id": "6f55e118c70e8b6b3332e66c7bdd4c0d5b6bfcdf", "detected_licenses": [ "Apache-2.0" ], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 16628, "license_type": "permissive", "max_line_length": 84, "num_lines": 499, "path": "/test/run_tests.py", "repo_name": "gauravinstasafe/com.instasafev2", "src_encoding": "UTF-8", "text": "\"\"\"\n\nThis mess runs the documentation examples to make sure they actually work.\nIt isn't easy because it has to use Python's reload mechanics since Proboscis\nisn't currently designed to be run multiple times.\n\nThe tests run here can be run more easily from the command line by entering\ninto the various subdirectories of tests/examples and running the following\ncommands in Linux (its basically the same in Windows with the usual changes):\n\n Python:\n PYTHONPATH=../../../ python run_tests.py\n Jython:\n JYTHONPATH=../../../ jython run_tests.py\n\n\nThese are basically the higher order tests for Proboscis. Some unit tests\nare in tests/proboscis_test.py.\n\n\"\"\"\nimport os\nimport sys\n\nfrom proboscis.asserts import assert_equal\nfrom os.path import join\n\nimport proboscis\nfrom proboscis.compatability import capture_exception\nfrom proboscis.compatability import is_jython\nfrom proboscis.compatability import reload\n\n\nCAN_USE_WITH = not is_jython()\n\n\ndef fake_exit(*args, **kwargs):\n pass\n\ndef make_dirs(dir):\n if not os.path.exists(dir):\n os.makedirs(dir)\n\n\ndef reload_proboscis():\n \"\"\"\n Reloading Proboscis like this causes problems- for instance,\n exceptions aren't caught because the exception to be caught is a\n an earlier version of the reloaded doppleganger that is thrown.\n \"\"\"\n reload(proboscis)\n def new_cap_ex(body_func, except_type):\n e = capture_exception(body_func, Exception)\n if e:\n if (str(type(e)) == str(except_type)):\n return e\n raise e\n return None\n proboscis.compatability.capture_exception = new_cap_ex\n\n\nclass FailureLines(object):\n \"\"\"Tallies expected and actual occurrences of lines in output.\"\"\"\n\n def __init__(self, source_file, lines):\n self.source_file = source_file\n self.failures = {}\n for line in lines:\n self.add_expected(line)\n\n def add_actual(self, line):\n self.get(line)[\"actual\"] += 1\n\n def add_expected(self, line):\n self.get(line)[\"expected\"] += 1\n\n def assert_all(self):\n for key in self.failures.keys():\n self.assert_line(key)\n\n def assert_line(self, line):\n info = self.get(line)\n assert_equal(str(info[\"expected\"]).strip(),\n str(info[\"actual\"]).strip(),\n \"In %s, expected to see failure for \\\"%s\\\" %d time(s) but saw \"\n \"it %d time(s). File: %s \"\n \"Additional info for this test: %s\" %\n (self.source_file, line, info[\"expected\"], info[\"actual\"],\n self.source_file, str(self.failures)))\n\n def get(self, line):\n if line not in self.failures:\n self.failures[line] = {\"expected\":0, \"actual\":0}\n return self.failures[line]\n\n\ndef assert_failures_in_file(source_file, expected_failures):\n \"\"\"Checks the output of Proboscis run for expected failures.\"\"\"\n failures = FailureLines(source_file, expected_failures)\n # Iterate the output, find all lines of text with the words FAIL or ERROR\n # and add them to a collection of \"actual\" failures that can be checked\n # against expected failures.\n # 2.7 seems to put the important parts on the next line, while 2.6 has it\n # on the same line.\n # I think Nose may also use this first format.\n if is_jython() or sys.version_info < (2, 7) \\\n or proboscis.dependencies.use_nose:\n for line in open(source_file, 'r'):\n if \"FAIL: \" in line:\n failures.add_actual(line[6:].strip())\n elif \"ERROR: \" in line:\n failures.add_actual(line[7:].strip())\n else:\n error_next = False\n for line in open(source_file, 'r'):\n if error_next:\n failures.add_actual(line.strip())\n error_next = False\n if \"ERROR: \" in line or \"FAIL: \" in line:\n error_next = True\n failures.assert_all()\n\n\ndef create_rst(block_type, source_file, rst_file):\n \"\"\"Converts Python files into a .rst files in the docs/build directory.\"\"\"\n print(source_file + \" ---> \" + rst_file)\n if not os.path.exists(source_file):\n raise ValueError(\"File %s not found.\" % source_file)\n make_dirs(os.path.dirname(rst_file))\n output = open(rst_file, 'w')\n try:\n def code_block():\n output.write(\".. code-block:: \" + block_type + \"\\n\\n\")\n code_block()\n for line in open(source_file, 'r'):\n if line.strip() == \"#rst-break\":\n code_block()\n else:\n output.write(\" \" + line)\n finally:\n output.close()\n\nclass ExampleRunner(object):\n \"\"\"Runs an example folder as if Python was executed from that directory.\n\n Also converts all source code into .rst files which can be easily consumed\n by the docs. In order to act as if Python was executed from a different\n directory, it has to do nasty things to the path. In order to run through\n Proboscis (and especially unittest.TestProgram, which Proboscis and Nose\n call) multiple times it has to muck with the modules dictionary.\n\n \"\"\"\n\n def __init__(self, root, test):\n \"\"\"\n Runs an example, which contains a base_directory relative to\n tests/examples, a list of source files in that directory (for .rst\n conversion) and a series of elements describing the different ways to\n \"run\" the example (this is to show examples of how to invoke Proboscis\n in the docs).\n \"\"\"\n self.test = test\n self.base_directory = join(root, \"docs\", \"build\", \"examples\",\n test.base_directory)\n self.rst_directory = join(self.base_directory, \"source\")\n self.src_directory = join(root, \"examples\")\n\n self.create_rst_from_source()\n for (index, run_info) in enumerate(test.runs):\n self.run(run_info, index)\n\n def alter_argv(self, args):\n while(len(sys.argv) > 0):\n del sys.argv[0]\n if len(self.test.source_files) > 0:\n sys.argv.append(self.test.source_files[0])\n for arg in args:\n sys.argv.append(arg)\n reload_proboscis()\n\n def create_rst_from_source(self):\n \"\"\"\n Copies the source files from a directory to .rst equiveaents in another\n directory.\n \"\"\"\n make_dirs(self.rst_directory)\n for file_name in self.test.source_files:\n source_rel_path = join(self.test.base_directory, file_name)\n source_file = join(self.src_directory, source_rel_path)\n rst_file = join(self.rst_directory, file_name)\n create_rst(\"python\", source_file, rst_file)\n\n def restore_modules(self):\n \"\"\"Necessary to get the decorators to register tests again.\"\"\"\n current_module_names = sys.modules.keys()\n delete_list = []\n for name in current_module_names:\n if name not in self.module_names:\n delete_list.append(name)\n for name in delete_list:\n del sys.modules[name]\n\n def run(self, run_info, index):\n \"\"\"Manipulates various global variables before running a test.\n\n Of course it would be nicer to not use global variables and Proboscis\n even has facilities for this, but since the examples are written to use\n globals (for example most large test suites would use the default test\n registry via the @test decorator) its better to make the examples\n simpler at the expense of needing this code.\n\n Captures std out, changes the sys argv list to match the \"args\" field\n of the run information, then runs a test before asserting that the\n output was as expected. It mucks with proboscis's default registry (a global\n variable provided for convience that is used by the examples) and the\n Python module dictionry.\n \"\"\"\n output_file_name = \"output%d\" % index\n self.alter_argv(run_info[\"args\"])\n output_directory = join(self.base_directory, \"output\")\n output_file = join(output_directory, output_file_name + \".txt\")\n make_dirs(output_directory)\n output = open(output_file, 'w')\n\n # Disable unittest's habit of murdering program on invocation.\n old_sys_exit = sys.exit\n sys.exit = fake_exit\n # Redirect standard out.\n old_std_out = sys.stdout\n sys.stdout = output\n\n # Pretend we're running this from a shell.\n #output.write(\"$ python run_tests.py \" + str(run_info[\"args\"]) + \"\\n\\n\")\n fake_sh_output = \"$ python\"\n for arg in sys.argv:\n fake_sh_output = fake_sh_output + \" \" + arg\n print(fake_sh_output + \"\\n\\n\")\n proboscis.case.OVERRIDE_DEFAULT_STREAM = output\n # Run the actual test, raise error if necessary.\n try:\n proboscis.decorators.DEFAULT_REGISTRY.reset()\n self.store_modules()\n self.test.run(index)\n finally:\n output.close()\n sys.stdout = old_std_out\n sys.exit = old_sys_exit\n\n failures = run_info[\"failures\"]\n if sys.version_info < (2, 7) and not proboscis.dependencies.use_nose:\n failures += run_info[\"skips\"]\n assert_failures_in_file(output_file, failures)\n rst_file = join(output_directory, output_file_name + \".rst\")\n create_rst(\"bash\", output_file, rst_file)\n\n self.restore_modules()\n\n def store_modules(self):\n self.module_names = list(sys.modules.keys())\n\n\nclass UnitTestExample(object):\n\n base_directory=\"unit\"\n\n runs = [\n {\n \"args\":[],\n \"failures\":[],\n \"skips\":[]\n },\n {\n \"args\":[\"--group=strings\"],\n \"failures\":[],\n \"skips\":[]\n },\n {\n \"args\":[\"--show-plan\"],\n \"failures\":[],\n \"skips\":[]\n },\n {\n \"args\":[\"--verbosity=4\"],\n \"failures\":[],\n \"skips\":[]\n }\n ]\n\n source_files = [\"run_tests.py\",\n join(\"tests\", \"unit.py\")]\n\n def run(self, index):\n from examples import unit\n sys.path.append(unit.__path__[0])\n from examples.unit import run_tests as unit_run\n reload(unit_run) # Reload to force a new Proboscis\n unit_run.run_tests()\n\n\nclass Example1(object):\n\n base_directory=\"example1\"\n\n runs = [\n {\n \"args\":[],\n \"failures\":[],\n \"skips\":[\"Delete the user.\"],\n },\n {\n \"args\":[],\n \"failures\":[\n \"Creates a local database and starts up the web service.\"],\n \"skips\":[\n \"proboscis.case.FunctionTest (create_user)\",\n \"proboscis.case.FunctionTest (user_cant_connect_with_wrong_password)\",\n \"Make sure the given client cannot perform admin actions..\",\n \"Make sure the given client cannot perform admin actions..\",\n \"Test changing a client's profile image.\",\n \"Delete the user.\",\n ]\n }\n ]\n\n source_files = [\"run_tests.py\",\n join(\"tests\", \"service_tests.py\"),\n join(\"tests\", \"unit_test.py\")]\n\n def run(self, index):\n from examples import example1\n sys.path = example1.__path__ + sys.path\n\n if (index == 1):\n # Change the code during this run to show what happens when the\n # code is busted. This is for an unhappy path example in the docs.\n import mymodule\n mymodule.start_web_server = mymodule.bad_start_web_server\n from examples.example1 import run_tests as example1_run\n example1_run.run_tests()\n\n\nclass Example2(Example1):\n\n base_directory=\"example2\"\n\n runs = [\n {\n \"args\":[],\n \"failures\":[],\n \"skips\":[]\n },\n {\n \"args\":[],\n \"failures\":[\n \"Starts up the web service.\"],\n \"skips\":[\"proboscis.case.FunctionTest (create_user)\",\n \"proboscis.case.FunctionTest (user_cant_connect_with_wrong_password)\",\n \"Make sure the given client cannot perform admin actions..\",\n \"Make sure the given client cannot perform admin actions..\",\n \"Test changing a client's profile image.\",\n \"proboscis.case.FunctionTest (delete_user)\"\n ]\n }\n ]\n\n source_files = [\"run_tests.py\",\n join(\"tests\", \"service_tests.py\")]\n\n def run(self, index):\n from examples import example2\n sys.path = example2.__path__ + sys.path\n if (index == 1):\n import mymodule\n mymodule.start_web_server = mymodule.bad_start_web_server\n from examples.example2 import run_tests as example2_run\n example2_run.run_tests()\n\n\nclass Example3(Example1):\n\n base_directory=\"example3\"\n\n runs = [\n {\n \"args\":[],\n \"failures\":[],\n \"skips\":[]\n },\n {\n \"args\":[],\n \"failures\":[\n \"Create a user.\"],\n \"skips\":[\"Test changing a client's profile image.\",\n \"proboscis.case.MethodTest (delete_user)\",\n \"Make sure the given client cannot perform admin actions..\",\n \"Make sure the given client cannot perform admin actions..\",\n \"proboscis.case.MethodTest (cant_login_with_wrong_password)\",\n \"proboscis.case.MethodTest (successful_login)\"]\n }\n ]\n\n source_files = [\"run_tests.py\",\n join(\"tests\", \"service_tests.py\")]\n\n def run(self, index):\n from examples import example3\n sys.path = example3.__path__ + sys.path\n\n if index == 1:\n def return_nadda(*args):\n return None\n import mymodule\n mymodule.UserServiceClient.create_user = return_nadda\n from examples.example3 import run_tests as example3_run\n example3_run.run_tests()\n\n\nclass Example4(Example1):\n\n base_directory=\"example4\"\n\n runs = [\n {\n \"args\":[],\n \"failures\":[],\n \"skips\":[]\n },\n {\n \"args\":[],\n \"failures\":[\n \"Create a user.\",\n \"Create a user.\"],\n \"skips\":[\"Test changing a client's profile image.\",\n \"Test changing a client's profile image.\",\n \"proboscis.case.MethodTest (delete_user)\",\n \"proboscis.case.MethodTest (delete_user)\",\n \"Make sure the given client cannot perform admin actions..\",\n \"Make sure the given client cannot perform admin actions..\",\n \"Make sure the given client cannot perform admin actions..\",\n \"Make sure the given client cannot perform admin actions..\",\n \"proboscis.case.MethodTest (successful_login)\",\n \"proboscis.case.MethodTest (successful_login)\"]\n }\n ]\n\n source_files = [\"run_tests.py\",\n join(\"tests\", \"service_tests.py\")]\n\n def run(self, index):\n from examples import example4\n sys.path = example4.__path__ + sys.path\n\n if index == 1:\n def return_nadda(*args):\n return None\n import mymodule\n mymodule.UserServiceClient.create_user = return_nadda\n from examples.example4 import run_tests as example4_run\n example4_run.run_tests()\n\n\nclass ExampleF(Example1):\n\n base_directory=\"example_factory\"\n\n runs = [\n {\n \"args\":[],\n \"failures\":[],\n \"skips\":[]\n }\n ]\n\n source_files = [\"run_tests.py\",\n join(\"tests\", \"service_tests.py\")]\n\n def run(self, index):\n from examples import example_factory\n sys.path.append(example_factory.__path__[0])\n import spam_api\n from examples.example_factory import run_tests as exampleF_run\n exampleF_run.run_tests()\n\n\ndef run_all(root=\".\"):\n if not os.path.exists(join(root, \"docs\")) or \\\n not os.path.exists(join(root, \"proboscis\", \"decorators.py\")):\n raise ValueError(\"Please invoke this from the root of proboscis's \"\n \"source.\")\n ExampleRunner(root, UnitTestExample())\n ExampleRunner(root, Example1())\n ExampleRunner(root, Example2())\n ExampleRunner(root, Example3())\n ExampleRunner(root, Example4())\n if CAN_USE_WITH:\n ExampleRunner(root, ExampleF())\n\n\nif __name__ == '__main__':\n run_all()\n" }, { "alpha_fraction": 0.6016260385513306, "alphanum_fraction": 0.6178861856460571, "avg_line_length": 19.33333396911621, "blob_id": "286d610f5acad9b751928f5d213d4623cdc8a7f1", "content_id": "b0589efb7ce83ef5fca0b54b5a4b8c4b6372ef2a", "detected_licenses": [ "Apache-2.0" ], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 123, "license_type": "permissive", "max_line_length": 28, "num_lines": 6, "path": "/test/examples/unit/utils.py", "repo_name": "gauravinstasafe/com.instasafev2", "src_encoding": "UTF-8", "text": "\ndef is_negative(number):\n return number < 0\n\ndef reverse(string):\n \"\"\"Reverses a string.\"\"\"\n return string[::-1]\n" }, { "alpha_fraction": 0.7567567825317383, "alphanum_fraction": 0.7567567825317383, "avg_line_length": 11.333333015441895, "blob_id": "8480c0b7e822f93fef430d52e353a08a4efa9d3a", "content_id": "6f060e0ed7ba15c03a55dd65abffb529483ae2f0", "detected_licenses": [ "Apache-2.0" ], "is_generated": false, "is_vendor": false, "language": "INI", "length_bytes": 37, "license_type": "permissive", "max_line_length": 16, "num_lines": 3, "path": "/test/.coveragerc", "repo_name": "gauravinstasafe/com.instasafev2", "src_encoding": "UTF-8", "text": "[run]\nbranch = True\nsource=proboscis\n" }, { "alpha_fraction": 0.7640767097473145, "alphanum_fraction": 0.7662896513938904, "avg_line_length": 39.869346618652344, "blob_id": "2c0673e7201281a24f5d1921fb53bb1911d4d16b", "content_id": "b6fb386fffe35a9c2cf215b86a968213f3d86f20", "detected_licenses": [ "Apache-2.0" ], "is_generated": false, "is_vendor": false, "language": "reStructuredText", "length_bytes": 8134, "license_type": "permissive", "max_line_length": 80, "num_lines": 199, "path": "/test/docs/source/tutorial.rst", "repo_name": "gauravinstasafe/com.instasafev2", "src_encoding": "UTF-8", "text": "Tutorial\n=================\n\nWriting Unit Tests\n------------------\n\nProboscis runs imported test functions or classes decorated with the\nproboscis.test decorator. Decorated classes extending unittest.TestCase run\nexactly like they do in Nose / unittest.\n\nThis means traditional Python unit test classes can run as-is in Proboscis\nprovided they are decorated.\n\nFor example:\n\n.. include:: ../build/examples/unit/source/tests/unit.py\n :end-line: 20\n\nYou can also attach the proboscis.test decorator to functions to run them\nby themselves:\n\n.. include:: ../build/examples/unit/source/tests/unit.py\n :start-line: 20\n\nUnlike Nose Proboscis requires all tests modules must be imported directly in\ncode, so using it requires you write a start-up script like the following:\n\n.. include:: ../build/examples/unit/source/run_tests.py\n\nAssuming this is named something like \"run_test.py\" you can run it like so:\n\n.. include:: ../build/examples/unit/output/output0.rst\n\n\nTestProgram.run_and_exit() expects to be used in scripts like this and takes\ncommand line arguments into account (Note: it's called \"run_and_exit()\"\nbecause to run the tests it calls Nose which then calls unittest, which calls\nsys.exit() on completion and forces the program to exit).\n\nNormally, all tests are run, but we can use the \"--group\" command line\nparameter to run only a certain group (and the groups it depends on)\ninstead:\n\n.. include:: ../build/examples/unit/output/output1.rst\n\nIf you want to run multiple specific groups, use the \"--group\"\nparameter more than once.\n\nYou can also use the \"--show-plan\" argument to get a preview of how Proboscis\nwill run the tests:\n\n.. include:: ../build/examples/unit/output/output2.rst\n\nUnused arguments get passed along to Nose or the unittest module, which means\nits possible to run some plugins designed for them. However,\nProboscis is by nature invasive and intentionally and unintentionally breaks\ncertain features of Nose and unittest (such as test discovery) so your\nmileage may vary.\n\nWriting Higher Level Tests\n--------------------------\n\nProboscis is more useful for higher level tests which may have dependencies on\neach other or need to run in a guaranteed order.\n\nNose can order tests lexically but the effect is\ndifficult to maintain, especially when working with multiple modules.\nAdditionally, if one test performs some sort of initialization to produce a\nstate required by other tests and fails, the dependent tests run despite\nhaving no chance of succeeding. These additional failures pollute the results\nmaking the true problem harder to see.\n\nIn Proboscis, if one tests depends on another which fails, the\ndependent test raises Nose's SkipTest or calls unittest's skipTest()\nautomatically, making it easier to track down the real problem. If neither\nfeature is available (as is the case with Python 2.5), it simply raises an\nassertion with a message beginning with the word \"SKIPPED.\"\n\nThe following example shows how to write a test with dependencies to test a\nfictitious web service that stores user profiles. The service allows admin\nusers to create and delete users and allows users to edit a profile picture.\n\n.. include:: ../build/examples/example1/source/tests/service_tests.py\n\nOur initialization code runs in three phases: first, we create the database,\nsecond, we start the web service (assuming its some kind of daemon we can\nrun programmatically) and third we create a new user. The function\n\"initialize_database_and_server\" is in the group\n\"service.initialization\", while the function \"create_user\" is in the group\n\"user.initialization\". Note that the \"create_user\" depends on\n\"initialize_database_and_server\", so Proboscis guarantees it runs after.\n\nThe meat of the test is where we run some operations against the user. These\nclasses and functions are marked as depending on the \"user.initialization\"\ngroup and so run later.\n\nThe tests which clean everything up depend on the groups \"user.tests\" and\n\"service.tests\" respectively. We also set the \"always_run\" property to true so\nthat if a test in the group they depend on fails they will still run. Since\nthe \"delete_user\" test function could run even when the \"create_user\" test\nfunction fails to even make a user, we add some code to check the status of the\nglobal \"test_user\" object and skip it if it was never set.\n\nWhen we run the run_test.py script, we see everything is ordered correctly:\n\n.. include:: ../build/examples/example1/output/output0.rst\n\nIn some frameworks initialization code is run as part of a \"fixture\", or\nsomething else which is a bit different than a test, but in Proboscis our\ninitialization code is a test itself and can be covered with assertions.\n\nLet's say there's an error and the web service starts up. In a traditional\ntesting framework, you'd see a stream of error messages as every test failed.\nIn Proboscis, you get this:\n\n.. include:: ../build/examples/example1/output/output1.rst\n\n\nOrdering tests without groups\n-----------------------------\n\nThe example above is pretty group heavy- in some cases, a group is created\njust to establish a single dependency.\n\nIts also possible to establish dependencies without groups by listing a\nfunction or class directly as a dependencies. The code below runs identically to\nthe example above but does so without groups:\n\n.. include:: ../build/examples/example2/source/tests/service_tests.py\n\n\nUsing TestNG style test methods to factor out global variables\n--------------------------------------------------------------\n\nThe example above creates the test user as a global variable so it can pass it\nbetween the tests which use it. Because unittest creates a new instance of\nthe class \"WhenConnectingAsANormalUser\" for each method it runs, we can't\nrun the code to create the user in the setUp method and store it in that class\neither.\n\nAn gross alternative would be to merge all of the\ntests which require a user into a single function, but this would understandably\nbe a bit gross. It also would not be equivalent, since if one test failed,\nno other tests would get a chance to run (for example, if test represented by\n\"test_auth_delete\" unittest would output a single test failure, and the test\nfor \"change_profile_image\" would never run). It would also be uncharitable to\nanyone who had to maintain the code.\n\nThere's another way in Proboscis, though, which is to run test methods in the\nstyle of TestNG by putting the @test decorator on both the class and test\nmethods and making sure the class does not extend unittest.TestCase.\n\nWhen the TestNG method is used, a single instance of a class is created and\nused to run each method.\n\nIf we do this, we can combine all of the tests which require the user into\none class as follows:\n\n.. include:: ../build/examples/example3/source/tests/service_tests.py\n\n@before_class and @after_class work just like the @test decorator and accept\nthe same arguments, but also tell the method to run either before and after all\nother methods in the given class.\n\nIf a test can fit into one class, its usually best to write it this way.\n\nConsider what happens if we want to test the admin user- before, we would have\nhad to duplicate our test code for the normal user or somehow gotten the\nsame test code to run twice while we altered the global test_user variable\nin between.\n\nHowever using the newly refactored code testing for the admin user can be\naccomplished fairly easy via subclassing:\n\n.. include:: ../build/examples/example4/source/tests/service_tests.py\n\n\n\n\nAdditional Tricks\n-----------------\n\nGroups of Groups\n~~~~~~~~~~~~~~~~\n\nIts possible to create empty test entries that link groups together using the\nproboscis.register function without a class or function. A good\nplace to do (as well as store other bits of configuration) is in the start up\nscript you write for Proboscis. Here's an example:\n\n.. include:: ../build/examples/example1/source/run_tests.py\n\nHere the groups \"fast\", \"integration\", and \"slow\" are created as simple\ndependencies on other groups. This makes it possible to, for example, run\nall \"slow\" tests with the following command:\n\n.. code-block:: bash\n\n python runtests.py --group=slow\n\n" }, { "alpha_fraction": 0.6005706191062927, "alphanum_fraction": 0.6062767505645752, "avg_line_length": 17.945945739746094, "blob_id": "4d29fe4ca5a087ceeb4c51aaafb985d35391aac4", "content_id": "21d06e24a4fbe4e18c3e2e300fc8e1aed91a707c", "detected_licenses": [ "Apache-2.0" ], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 701, "license_type": "permissive", "max_line_length": 48, "num_lines": 37, "path": "/test/tests/compatability/testng/before_and_after/src/python/BeforeAndAfter.py", "repo_name": "gauravinstasafe/com.instasafev2", "src_encoding": "UTF-8", "text": "from proboscis import *\n\nclass BeforeAndAfter(object):\n\n def getName(self):\n return str(type(self))\n\n def println(self, msg):\n print(\"%s : %s\" % (self.getName(), msg))\n\n\nclass BeforeAndAfterSuccess(BeforeAndAfter):\n\n @before_class\n def beforeEverything(self):\n self.println(\"@BeforeClass\");\n\n\n @before_method\n def setUp(self):\n self.println(\"@BeforeMethod\")\n\n @test\n def method1(self):\n self.println(\"@Test 1\")\n\n @test\n def method2(self):\n self.println(\"@Test 2\")\n\n @after_class\n def afterEverything(self):\n self.println(\"@AfterClass\")\n\n @after_method\n def tearDown(self):\n self.println(\"@AfterMethod\")\n" }, { "alpha_fraction": 0.532567024230957, "alphanum_fraction": 0.553639829158783, "avg_line_length": 22.545454025268555, "blob_id": "0d589a3a240d8724bcadb3a865b4081247532f0b", "content_id": "1c261894e13a4798d979c5b80ba60c1589f5cd8f", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 522, "license_type": "no_license", "max_line_length": 83, "num_lines": 22, "path": "/Testing/src/readFile/ReadFile.py", "repo_name": "gauravinstasafe/com.instasafev2", "src_encoding": "UTF-8", "text": "'''\nCreated on 18-Dec-2017\n\n@author: Gaurav\n'''\n\nwith open(\"C://Users/user/Desktop/testing.txt\")as F :\n file = F.readlines()\n print(file)\n file = [a.strip() for a in file]\n test = file[0].split(\" \")\n print(test)\n test.append('aaaaa')\n print(test)\n test.insert(6,'test')\n print(\"ppppp\" +\" \"+ test[0] +\" \"+ \"wwwwww\" \" \"+ test[1] + \"ttttttt\")\n \n\n filea = open(\"C://Users/user/Desktop/testing1.txt\",\"w\")\n\n filea.write(\"This is a test\") \n filea.write(\"To add more lines.\")\n " }, { "alpha_fraction": 0.5860127806663513, "alphanum_fraction": 0.5898019075393677, "avg_line_length": 31.989286422729492, "blob_id": "0886493d74d308ac462109c5f0e8a25042e4087c", "content_id": "c0d7d74d72e07926c6952b325bce8cb1be4cfdea", "detected_licenses": [ "Apache-2.0" ], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 9237, "license_type": "permissive", "max_line_length": 79, "num_lines": 280, "path": "/test/tests/unit/test_core.py", "repo_name": "gauravinstasafe/com.instasafev2", "src_encoding": "UTF-8", "text": "import imp\nimport sys\nimport time\nimport unittest\n\n\nfrom proboscis.asserts import assert_equal\nfrom proboscis.asserts import assert_raises\nfrom proboscis.asserts import assert_true\nfrom proboscis.asserts import assert_false\nfrom proboscis.asserts import fail\nfrom proboscis import compatability\nfrom proboscis.compatability import get_method_function\nfrom proboscis import decorators\nfrom proboscis.decorators import expect_exception\nfrom proboscis.decorators import time_out\nfrom proboscis.decorators import TimeoutError\nfrom proboscis import ProboscisTestMethodClassNotDecorated\n\n\nclass ProboscisRegistryTest(unittest.TestCase):\n\n def setUp(self):\n import proboscis\n from proboscis import TestRegistry\n self.old_default_registry = proboscis.decorators.DEFAULT_REGISTRY\n self.registry = TestRegistry()\n proboscis.decorators.DEFAULT_REGISTRY = self.registry\n\n def tearDown(self):\n import proboscis\n proboscis.decorators.DEFAULT_REGISTRY = self.old_default_registry\n\n\nclass ExampleTest(object):\n def test_1(self):\n pass\n\n\nclass TestClassDecoratorInheritanceForEnabled(ProboscisRegistryTest):\n\n def test_if_unset_then_func_should_inherit_enabled_false(self):\n from proboscis import test\n\n @test(enabled=False)\n class ExampleTest(object):\n @test\n def test_1(self):\n pass\n\n for t in self.registry.tests:\n if t.home == ExampleTest:\n assert_false(t.info.enabled)\n if t.home == ExampleTest.test_1:\n assert_false(t.info.enabled)\n\n def test_if_set_then_func_should_not_inherit(self):\n from proboscis import test\n\n @test(enabled=False)\n class ExampleTest(object):\n @test(enabled=True)\n def test_1(self):\n pass\n\n for t in self.registry.tests:\n if t.home == ExampleTest:\n assert_false(t.info.enabled)\n if t.home == ExampleTest.test_1:\n assert_true(t.info.enabled)\n\n def test_if_set_then_func_should_not_inherit(self):\n from proboscis import test\n\n @test(enabled=False)\n class ExampleTest(object):\n @test(enabled=False)\n def test_1(self):\n pass\n\n for t in self.registry.tests:\n if t.home is ExampleTest:\n assert_false(t.info.enabled)\n elif t.home is get_method_function(ExampleTest.test_1):\n assert_false(t.info.enabled)\n else:\n fail(\"Unexpected test seen in iteration: %s\" % t)\n\n\nclass TestClassDecoratorInheritanceForRunsAfter(ProboscisRegistryTest):\n\n def test_if_not_set_on_parent_func_is_unaffected(self):\n from proboscis import test\n\n @test\n def other_test():\n pass\n\n @test\n class ExampleTest(object):\n @test(runs_after=[other_test])\n def test_1(self):\n pass\n\n for t in self.registry.tests:\n if t.home is ExampleTest:\n assert_equal(0, len(t.info.runs_after))\n elif t.home is get_method_function(ExampleTest.test_1):\n assert_equal(1, len(t.info.runs_after))\n assert_true(other_test in t.info.runs_after)\n elif t.home is not other_test:\n fail(\"Unexpected test seen in iteration: %s\" % t)\n\n def test_if_set_on_parent_func_adds_parent_items_to_list(self):\n from proboscis import test\n\n @test\n def other_test():\n pass\n\n @test\n def yet_another_test():\n pass\n\n @test(runs_after=[yet_another_test])\n class ExampleTest(object):\n @test(runs_after=[other_test])\n def test_1(self):\n pass\n\n for t in self.registry.tests:\n if t.home is ExampleTest:\n assert_equal(1, len(t.info.runs_after))\n assert_true(yet_another_test in t.info.runs_after)\n elif t.home is get_method_function(ExampleTest.test_1):\n assert_equal(2, len(t.info.runs_after))\n expected_homes = {other_test:False, yet_another_test:False}\n for home in t.info.runs_after:\n if home not in expected_homes.keys():\n fail(\"%s should not be in runs_after\" % home)\n expected_homes[home] = True\n for expected_home, found in expected_homes.items():\n if not found:\n fail(\"%s was not found in runs_after\" % expected_home)\n elif t.home not in (other_test, yet_another_test):\n fail(\"Unexpected test seen in iteration: %s\" % t)\n\n\nclass TestClassDecoratorInheritanceForRunsAfterGroups(ProboscisRegistryTest):\n\n def test_if_not_set_on_parent_func_is_unaffected(self):\n from proboscis import test\n\n @test\n class ExampleTest(object):\n @test(runs_after_groups=[\"other_test\"])\n def test_1(self):\n pass\n\n for t in self.registry.tests:\n if t.home == ExampleTest:\n assert_equal(0, len(t.info.runs_after_groups))\n elif t.home == get_method_function(ExampleTest.test_1):\n assert_equal(1, len(t.info.runs_after_groups))\n assert_true(\"other_test\" in t.info.runs_after_groups)\n else:\n fail(\"Unexpected test seen in iteration: %s\" % t)\n\n def test_if_set_on_parent_func_adds_parent_items_to_list(self):\n from proboscis import test\n\n @test(runs_after_groups=[\"yet_another_test\"])\n class ExampleTest(object):\n @test(runs_after_groups=[\"other_test\"])\n def test_1(self):\n pass\n\n for t in self.registry.tests:\n if t.home == ExampleTest:\n assert_equal(1, len(t.info.runs_after_groups))\n assert_true(\"yet_another_test\" in t.info.runs_after_groups)\n elif t.home == get_method_function(ExampleTest.test_1):\n assert_equal(2, len(t.info.runs_after_groups))\n expected_homes = {\"other_test\":False, \"yet_another_test\":False}\n for home in t.info.runs_after_groups:\n if home not in expected_homes.keys():\n fail(\"%s should not be in runs_after_groups\" % home)\n expected_homes[home] = True\n for expected_home, found in expected_homes.items():\n if not found:\n fail(\"%s was not found in runs_after_groups\"\n % expected_home)\n else:\n fail(\"Unexpected test seen in iteration: %s\" % t)\n\n\nclass TestClassDecoratorInheritanceForAlwaysRun(ProboscisRegistryTest):\n\n def test_class_if_true_forces_child_to_true(self):\n from proboscis import test\n\n @test(always_run=True)\n class ExampleTest(object):\n @test(always_run=False)\n def test_1(self):\n pass\n\n for t in self.registry.tests:\n if t.home == ExampleTest:\n assert_true(t.info.enabled)\n if t.home == ExampleTest.test_1:\n assert_true(t.info.enabled)\n\n\nclass TestClassDecoratorInheritanceForDependsOn(ProboscisRegistryTest):\n\n def test_class_if_true_forces_child_to_true(self):\n from proboscis import test\n\n @test\n def dependency():\n pass\n\n @test(depends_on=[dependency])\n class ExampleTest(object):\n @test\n def test_1(self):\n pass\n @test(depends_on=[dependency])\n def test_2(self):\n pass\n\n for t in self.registry.tests:\n if t.home in (ExampleTest, ExampleTest.test_1, ExampleTest.test_2):\n assert_true(dependency in t.info.depends_on)\n\nclass TestClassDecoratorInheritanceForDependsOnGroups(ProboscisRegistryTest):\n\n def test_class_if_true_forces_child_to_true(self):\n from proboscis import test\n\n @test(depends_on_groups=[\"blah\"])\n class ExampleTest(object):\n @test\n def test_1(self):\n pass\n @test(depends_on_groups=[\"blah\"])\n def test_2(self):\n pass\n\n for t in self.registry.tests:\n if t.home in (ExampleTest, ExampleTest.test_1, ExampleTest.test_2):\n assert_true(\"blah\" in t.info.depends_on_groups)\n\n\nclass TestCannotUseBothBeforeClassAndAfterClass(ProboscisRegistryTest):\n\n def test_wont_work(self):\n from proboscis.core import TestEntryInfo\n assert_raises(RuntimeError, TestEntryInfo, run_before_class=5,\n run_after_class=6)\n\n\nclass TestEntryInfoRepr(unittest.TestCase):\n\n def test_it_doesnt_blow_up(self):\n from proboscis.core import TestEntryInfo\n repr(TestEntryInfo())\n\n\nclass TestCannotApplyDecoratorTwice(ProboscisRegistryTest):\n\n def test_cant_do_that(self):\n from proboscis import test\n def reg():\n @test\n @test\n def hi():\n pass\n assert_raises(RuntimeError, reg)\n" }, { "alpha_fraction": 0.66746985912323, "alphanum_fraction": 0.6771084070205688, "avg_line_length": 28.64285659790039, "blob_id": "2e74e08f0545bd8b824b206e94f5e699297082ab", "content_id": "ae0679118c440f196d39c1a9a585aa5a7512b16a", "detected_licenses": [ "Apache-2.0" ], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 415, "license_type": "permissive", "max_line_length": 51, "num_lines": 14, "path": "/test/run_unit_tests.py", "repo_name": "gauravinstasafe/com.instasafev2", "src_encoding": "UTF-8", "text": "import unittest\nimport sys\nfrom tests.unit.test_asserts import *\nif sys.version >= \"2.6\": # These tests use \"with\".\n from tests.unit.test_check import *\nfrom tests.unit.test_core import *\nif sys.version >= \"2.6\": # These tests use \"with\".\n from tests.unit.test_check import *\n from tests.unit.test_core_with import *\nfrom tests.unit.test_sorting import *\n\n\nif __name__ == '__main__':\n unittest.main()\n" }, { "alpha_fraction": 0.6936026811599731, "alphanum_fraction": 0.6936026811599731, "avg_line_length": 23.83333396911621, "blob_id": "be11e13e1694fdda0830de705378aa6a13c87150", "content_id": "89716b6569eeb15f34d429e7ccf308841944e60a", "detected_licenses": [ "Apache-2.0" ], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 297, "license_type": "permissive", "max_line_length": 43, "num_lines": 12, "path": "/test/examples/example1/tests/unit_test.py", "repo_name": "gauravinstasafe/com.instasafev2", "src_encoding": "UTF-8", "text": "import unittest\nimport mymodule\nfrom proboscis import test\n\n@test(groups=[\"unit\"])\nclass TestReverseString(unittest.TestCase):\n\n def test_reversal(self):\n original = \"hello\"\n expected = \"olleh\"\n actual = mymodule.reverse(original)\n self.assertEqual(expected, actual)" }, { "alpha_fraction": 0.8586956262588501, "alphanum_fraction": 0.8586956262588501, "avg_line_length": 45, "blob_id": "96211bce5ea5c1c0c3054492d26574fc64fca271", "content_id": "ca4aacd15e6bd6586421016d4bae8055c8f0fa55", "detected_licenses": [ "Apache-2.0" ], "is_generated": false, "is_vendor": false, "language": "reStructuredText", "length_bytes": 92, "license_type": "permissive", "max_line_length": 77, "num_lines": 2, "path": "/test/tests/compatability/testng/README.rst", "repo_name": "gauravinstasafe/com.instasafev2", "src_encoding": "UTF-8", "text": "Contains TestNG examples to understand TestNG behavior and test compatability\nin Proboscis.\n" }, { "alpha_fraction": 0.5564356446266174, "alphanum_fraction": 0.5722772479057312, "avg_line_length": 13.428571701049805, "blob_id": "1c568a848ddf597c2833e5952fda4a8b1de1b9d8", "content_id": "d21ec4f0cb603cff12f2ff460da8b169a71f1110", "detected_licenses": [ "Apache-2.0" ], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 505, "license_type": "permissive", "max_line_length": 58, "num_lines": 35, "path": "/test/examples/bugs/gh_issue4.py", "repo_name": "gauravinstasafe/com.instasafev2", "src_encoding": "UTF-8", "text": "from proboscis import test\nfrom proboscis import TestProgram\n\norder = []\n\ndef mark(msg):\n global order\n order += msg\n\n@test(groups=\"A\")\nclass ClassA(object):\n\n @test\n def methodA1(self):\n mark(\"A1\")\n\n @test\n def methodA2(self):\n mark(\"A2\")\n\n\n@test(groups=\"B\")\nclass ClassB(object):\n\n @test(depends_on=[ClassA])\n def methodB1(self):\n mark(\"B1\")\n\n @test\n def methodB2(self):\n mark(\"B2\")\n\n\n\nif __name__ == '__main__': TestProgram().run_and_exit()\n" }, { "alpha_fraction": 0.47826087474823, "alphanum_fraction": 0.6086956262588501, "avg_line_length": 8.199999809265137, "blob_id": "b63b66801f1b8963fcd9db7fb6f324c23b121dee", "content_id": "dbf15312bf72df543afd3cb15306cccc90bf2d09", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 46, "license_type": "no_license", "max_line_length": 22, "num_lines": 5, "path": "/python/src/base/testing.py", "repo_name": "gauravinstasafe/com.instasafev2", "src_encoding": "UTF-8", "text": "'''\nCreated on 18-Sep-2017\n\n@author: user\n'''\n" }, { "alpha_fraction": 0.6008056402206421, "alphanum_fraction": 0.6134944558143616, "avg_line_length": 26.893259048461914, "blob_id": "989cd5ddbaaf8e44d5c4e5917c63f3f24775993b", "content_id": "a377720752dec7aab1b49a93f5d1da227397e937", "detected_licenses": [ "Apache-2.0" ], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 4965, "license_type": "permissive", "max_line_length": 72, "num_lines": 178, "path": "/test/tests/unit/test_asserts.py", "repo_name": "gauravinstasafe/com.instasafev2", "src_encoding": "UTF-8", "text": "import unittest\n\nfrom proboscis.asserts import ASSERTION_ERROR\nfrom proboscis.asserts import assert_false\nfrom proboscis.asserts import assert_equal\nfrom proboscis.asserts import assert_not_equal\nfrom proboscis.asserts import assert_is\nfrom proboscis.asserts import assert_is_none\nfrom proboscis.asserts import assert_is_not\nfrom proboscis.asserts import assert_is_not_none\nfrom proboscis.asserts import assert_true\nfrom proboscis.asserts import assert_raises\nfrom proboscis.asserts import assert_raises_instance\nfrom proboscis.compatability import capture_exception\nfrom proboscis.asserts import fail\n\n\nclass BadClass(object):\n\n def __init__(self, value=5):\n self.value = value\n\n def __eq__(self, rhs):\n return self.value == rhs.value\n\n def __str__(self):\n raise RuntimeError()\n\n def __repr__(self):\n raise RuntimeError()\n\n\nclass MyException(RuntimeError):\n pass\n\n\nclass TestAsserts(unittest.TestCase):\n\n def fails(self, func, *args, **kwargs):\n self.assertRaises(ASSERTION_ERROR, func, *args, **kwargs)\n\n def fails_m(self, message, func, *args, **kwargs):\n def try_block():\n func(*args, **kwargs)\n ae = capture_exception(try_block, ASSERTION_ERROR)\n self.assertTrue(ae is not None)\n self.assertTrue(type(ae) is ASSERTION_ERROR)\n self.assertEqual(message, str(ae))\n\n def test_equal1(self):\n assert_equal(2,2)\n\n def test_equal2(self):\n self.fails(assert_equal, 2, 4)\n\n def test_equal3(self):\n self.fails_m(\"Blah!\", assert_equal, 2, 4, \"Blah!\")\n\n def test_equal4(self):\n self.fails_m(\"The actual value did not equal the expected one.\",\n assert_equal, BadClass(3), BadClass(7))\n\n def test_false1(self):\n assert_false(False)\n\n def test_false2(self):\n self.fails(assert_false, True)\n\n def test_false3(self):\n self.fails_m(\"Blah!\", assert_false, True, \"Blah!\")\n\n def test_is1(self):\n assert_is(2, 2)\n\n def test_is2(self):\n self.fails(assert_is, 2, 4)\n\n def test_is3(self):\n self.fails_m(\"Blah!\", assert_is, 2, 4, \"Blah!\")\n\n def test_is4(self):\n self.fails_m(\"The actual value is not the expected one.\",\n assert_is, BadClass(), BadClass())\n\n def test_is_none1(self):\n assert_is_none(None)\n\n def test_is_none2(self):\n self.fails(assert_is_none, 2)\n\n def test_is_none3(self):\n self.fails_m(\"Blah!\", assert_is_none, 2, \"Blah!\")\n\n def test_is_none4(self):\n self.fails_m(\"The value is not None.\",\n assert_is_none, BadClass())\n\n def test_is_not1(self):\n assert_is_not(None, 3)\n\n def test_is_not2(self):\n self.fails(assert_is_not, 2, 2)\n\n def test_is_not(self):\n self.fails_m(\"Blah!\", assert_is_not, 2, 2, \"Blah!\")\n\n def test_is_not4(self):\n b = BadClass()\n self.fails_m(\"The actual value is the expected one.\",\n assert_is_not, b, b)\n\n def test_is_not_none1(self):\n assert_is_not_none(True)\n\n def test_is_not_none2(self):\n self.fails(assert_is_not_none, None)\n\n def test_is_not_none3(self):\n assert_is_none(None, \"Blah!\")\n\n def test_is_not_none4(self):\n assert_is_not_none(BadClass())\n\n def test_not_equal1(self):\n assert_not_equal(2,4)\n\n def test_not_equal2(self):\n self.fails(assert_not_equal, 2, 2)\n\n def test_not_equal3(self):\n self.fails_m(\"Blah!\", assert_not_equal, 2, 2, \"Blah!\")\n\n def test_not_equal4(self):\n self.fails_m(\"The actual value equalled the expected one.\",\n assert_not_equal, BadClass(2), BadClass(2))\n\n def test_true1(self):\n assert_true(True)\n\n def test_true2(self):\n self.fails(assert_true, False)\n\n def test_true3(self):\n self.fails_m(\"Blah!\", assert_true, False, \"Blah!\")\n\n def test_fail(self):\n self.fails_m(\"Blah!\", fail, \"Blah!\")\n\n def test_assert_raises1(self):\n def correct():\n raise RuntimeError()\n re = assert_raises(RuntimeError, correct)\n assert_equal(type(re), RuntimeError)\n\n def test_assert_raises2(self):\n def not_correct():\n pass\n self.fails(assert_raises, RuntimeError, not_correct)\n\n def test_assert_raises3(self):\n def not_precise():\n raise MyException() # Even a derived class won't work.\n self.fails(assert_raises, RuntimeError, not_precise)\n\n def test_assert_raises4(self):\n def not_precise():\n raise Exception(\"HELLO!!\")\n def try_block():\n assert_raises(RuntimeError, not_precise)\n ex = capture_exception(try_block, Exception)\n self.assertTrue(ex is not None)\n self.assertTrue(type(ex) is Exception)\n # Make sure assert_raises gives us the original exception.\n self.assertEqual(\"HELLO!!\", str(ex))\n\n\nif __name__ == '__main__':\n unittest.main()\n" }, { "alpha_fraction": 0.5491234064102173, "alphanum_fraction": 0.5513562560081482, "avg_line_length": 37.5737190246582, "blob_id": "2a5610344a423feae38023656d9565957540f5d1", "content_id": "a9a87b053af03cf54c5bc99ac7b403aa604f546e", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 12092, "license_type": "no_license", "max_line_length": 93, "num_lines": 312, "path": "/com.instasafev2/src/base/selenium_driver.py", "repo_name": "gauravinstasafe/com.instasafev2", "src_encoding": "UTF-8", "text": "import logging\nimport os\nimport time\nfrom traceback import print_stack\n\nfrom selenium.common.exceptions import *\nfrom selenium.webdriver.common.by import By\nfrom selenium.webdriver.common.keys import Keys\nfrom selenium.webdriver.support import expected_conditions as EC\nfrom selenium.webdriver.support.ui import Select\nfrom selenium.webdriver.support.ui import WebDriverWait\n\nimport utilities.custom_logger as cl\n\n\nclass SeleniumDriver():\n\n log = cl.customLogger(logging.DEBUG)\n\n def __init__(self, driver):\n self.driver = driver\n\n def screenShot(self, resultMessage):\n \"\"\"\n Takes screenshot of the current open web page\n \"\"\"\n fileName = resultMessage + \".\" + str(round(time.time() * 1000)) + \".png\"\n screenshotDirectory = \"C:/Usersuser/workspace/com.instasafev2/src/screenshots/\"\n relativeFileName = screenshotDirectory + fileName\n currentDirectory = os.path.dirname(__file__)\n destinationFile = os.path.join(currentDirectory, relativeFileName)\n destinationDirectory = os.path.join(currentDirectory, screenshotDirectory)\n\n try:\n if not os.path.exists(destinationDirectory):\n os.makedirs(destinationDirectory)\n self.driver.save_screenshot(destinationFile)\n self.log.info(\"Screenshot save to directory: \" + destinationFile)\n except:\n self.log.error(\"### Exception Occurred when taking screenshot\")\n print_stack()\n\n def getTitle(self):\n return self.driver.title\n\n def getByType(self, locatorType):\n locatorType = locatorType.lower()\n if locatorType == \"id\":\n return By.ID\n elif locatorType == \"name\":\n return By.NAME\n elif locatorType == \"xpath\":\n return By.XPATH\n elif locatorType == \"css\":\n return By.CSS_SELECTOR\n elif locatorType == \"class\":\n return By.CLASS_NAME\n elif locatorType == \"link\":\n return By.LINK_TEXT\n else:\n self.log.info(\"Locator type \" + locatorType +\n \" not correct/supported\")\n return False\n\n def getElement(self, locator, locatorType=\"id\"):\n element = None\n try:\n locatorType = locatorType.lower()\n byType = self.getByType(locatorType)\n element = self.driver.find_element(byType, locator)\n self.log.info(\"Element found with locator: \" + locator +\n \" and locatorType: \" + locatorType)\n except:\n self.log.info(\"Element not found with locator: \" + locator +\n \" and locatorType: \" + locatorType)\n return element\n\n def getElementList(self, locator, locatorType=\"id\"):\n \"\"\"\n NEW METHOD\n Get list of elements\n \"\"\"\n element = None\n try:\n locatorType = locatorType.lower()\n byType = self.getByType(locatorType)\n element = self.driver.find_elements(byType, locator)\n self.log.info(\"Element list found with locator: \" + locator +\n \" and locatorType: \" + locatorType)\n except:\n self.log.info(\"Element list not found with locator: \" + locator +\n \" and locatorType: \" + locatorType)\n return element\n\n def elementClick(self, locator=\"\", locatorType=\"id\", element=None):\n \"\"\"\n Click on an element -> MODIFIED\n Either provide element or a combination of locator and locatorType\n \"\"\"\n try:\n if locator: # This means if locator is not empty\n element = self.getElement(locator, locatorType)\n element.click()\n self.log.info(\"Clicked on element with locator: \" + locator +\n \" locatorType: \" + locatorType)\n except:\n self.log.info(\"Cannot click on the element with locator: \" + locator +\n \" locatorType: \" + locatorType)\n print_stack()\n\n def sendKeys(self, data, locator=\"\", locatorType=\"id\", element=None):\n \"\"\"\n Send keys to an element -> MODIFIED\n Either provide element or a combination of locator and locatorType\n \"\"\"\n try:\n if locator: # This means if locator is not empty\n element = self.getElement(locator, locatorType)\n element.send_keys(data)\n self.log.info(\"Sent data on element with locator: \" + locator +\n \" locatorType: \" + locatorType)\n except:\n self.log.info(\"Cannot send data on the element with locator: \" + locator +\n \" locatorType: \" + locatorType)\n print_stack()\n\n def clearField(self, locator=\"\", locatorType=\"id\"):\n \"\"\"\n Clear an element field\n \"\"\"\n element = self.getElement(locator, locatorType)\n element.clear()\n self.log.info(\"Clear field with locator: \" + locator +\n \" locatorType: \" + locatorType)\n\n def getText(self, locator=\"\", locatorType=\"id\", element=None, info=\"\"):\n \"\"\"\n NEW METHOD\n Get 'Text' on an element\n Either provide element or a combination of locator and locatorType\n \"\"\"\n try:\n if locator: # This means if locator is not empty\n element = self.getElement(locator, locatorType)\n text = element.text\n if len(text) == 0:\n text = element.get_attribute(\"innerText\")\n if len(text) != 0:\n self.log.info(\"Getting text on element :: \" + info)\n self.log.info(\"The text is :: '\" + text + \"'\")\n text = text.strip()\n except:\n self.log.error(\"Failed to get text on element \" + info)\n print_stack()\n text = None\n return text\n\n def isElementPresent(self, locator=\"\", locatorType=\"id\", element=None):\n \"\"\"\n Check if element is present -> MODIFIED\n Either provide element or a combination of locator and locatorType\n \"\"\"\n try:\n if locator: # This means if locator is not empty\n element = self.getElement(locator, locatorType)\n if element is not None:\n self.log.info(\"Element present with locator: \" + locator +\n \" locatorType: \" + locatorType)\n return True\n else:\n self.log.info(\"Element not present with locator: \" + locator +\n \" locatorType: \" + locatorType)\n return False\n except:\n print(\"Element not found\")\n return False\n\n def isElementDisplayed(self, locator=\"\", locatorType=\"id\", element=None):\n \"\"\"\n NEW METHOD\n Check if element is displayed\n Either provide element or a combination of locator and locatorType\n \"\"\"\n isDisplayed = False\n try:\n if locator: # This means if locator is not empty\n element = self.getElement(locator, locatorType)\n if element is not None:\n isDisplayed = element.is_displayed()\n self.log.info(\"Element is displayed\" )\n else:\n self.log.info(\"Element not displayed\")\n return isDisplayed\n except:\n print(\"Element not found\")\n return False\n\n def elementPresenceCheck(self, locator, byType):\n \"\"\"\n Check if element is present\n \"\"\"\n try:\n elementList = self.driver.find_elements(byType, locator)\n if len(elementList) > 0:\n self.log.info(\"Element present with locator: \" + locator +\n \" locatorType: \" + str(byType))\n return True\n else:\n self.log.info(\"Element not present with locator: \" + locator +\n \" locatorType: \" + str(byType))\n return False\n except:\n self.log.info(\"Element not found\")\n return False\n\n def waitForElement(self, locator, locatorType=\"id\",\n timeout=10, pollFrequency=0.5):\n element = None\n try:\n byType = self.getByType(locatorType)\n self.log.info(\"Waiting for maximum :: \" + str(timeout) +\n \" :: seconds for element to be clickable\")\n wait = WebDriverWait(self.driver, timeout=timeout,\n poll_frequency=pollFrequency,\n ignored_exceptions=[NoSuchElementException,\n ElementNotVisibleException,\n ElementNotSelectableException])\n element = wait.until(EC.element_to_be_clickable((byType, locator)))\n self.log.info(\"Element appeared on the web page\")\n except:\n self.log.info(\"Element not appeared on the web page\")\n print_stack()\n return element\n\n def implicitWait(self, seconds=\"10\"):\n try :\n self.driver.implicitly_wait(seconds)\n except :\n self.log.info(\"DOM fail to load element in 10 seconds\")\n \n def webScroll(self, direction=\"up\"):\n \"\"\"\n NEW METHOD\n \"\"\"\n if direction == \"up\":\n # Scroll Up\n self.driver.execute_script(\"window.scrollBy(0, -1000);\")\n\n if direction == \"down\":\n # Scroll Down\n self.driver.execute_script(\"window.scrollBy(0, 1000);\")\n\n def dropDownListDownKey(self, option):\n try:\n option.send_keys(Keys.DOWN)\n except:\n self.log.info(\"unable to click down key\")\n\n def singleSelect_set_selections(self, locator=\"\", locatorType=\"Xpath\", searchfor=\"\"):\n self.log.info(\"run single drop down element\")\n time.sleep(1)\n elementList = self.getElementList(locator, locatorType)\n try :\n for option in elementList: \n text = option.text \n self.log.info(text)\n self.dropDownListDownKey(option)\n if text == searchfor:\n option.click() # select() in earlier versions of webdriver\n break\n except:\n self.log.info(\"server name does not found\")\n print_stack()\n \n def multiselect_set_selections(self, labels, locator=\"\" , locatorType=\"Xpath\"):\n el = self.getElement(locator, locatorType)\n for option in el.find_elements_by_tag_name(labels):\n if option.text in labels:\n option.click()\n break\n\n def singleSelectDropdownElement(self, locator=\"\", locatorType=\"Xpath\", searchfor=\"\"):\n element = self.getElementList(locator, locatorType)\n select = Select(element) \n try : \n select.select_by_visible_text(searchfor)\n self.log.info(searchfor + \"found in drop down list\")\n except :\n self.log.info(searchfor + \"option does not found in drop down list\")\n \n #def singleSelectDropdownElement(self, locator=\"\", locatorType=\"Xpath\", searchingfor=\"\"):\n # element = self.getElement(locator, locatorType)\n \n def switch_to(self,locator=\"\",locatorType=\"id\"):\n try:\n element = self.getElement(locator, locatorType)\n self.driver.switch_to_frame(element)\n #self.driver.switch_to_active_element()\n self.log.info(\"switch to i frame\")\n except:\n self.log.info(\"fail to switch new frame\")\n \n def switch_to_default(self):\n self.driver.switch_to_default_content() \n \n def pop_up(self):\n try:\n alert = self.driver.switch_to_alert()\n alert.accept()\n print(\"alert accepted\")\n except:\n print(\"no alert\") \n \n \n \n \n " }, { "alpha_fraction": 0.6616801023483276, "alphanum_fraction": 0.6616801023483276, "avg_line_length": 17.489360809326172, "blob_id": "b1470d9de4730a7827e2e2793e83b7de4b88544d", "content_id": "a4ab0f1525d70dd6bc8a7b11aad2f9a83c0360af", "detected_licenses": [ "Apache-2.0" ], "is_generated": false, "is_vendor": false, "language": "reStructuredText", "length_bytes": 869, "license_type": "permissive", "max_line_length": 61, "num_lines": 47, "path": "/test/docs/source/pydocs.rst", "repo_name": "gauravinstasafe/com.instasafev2", "src_encoding": "UTF-8", "text": "PyDocs\n=================\n\nproboscis\n---------\n.. autofunction:: proboscis.test\n\n.. autofunction:: proboscis.before_class\n\n.. autofunction:: proboscis.after_class\n\n.. autoclass:: proboscis.TestProgram\n :members:\n :undoc-members:\n :show-inheritance:\n\n.. autoclass:: proboscis.SkipTest\n :members:\n :undoc-members:\n :show-inheritance:\n\n.. autofunction:: proboscis.register\n\n.. autofunction:: proboscis.factory\n\n.. autoclass:: proboscis.TestRegistry\n :members:\n :undoc-members:\n :show-inheritance:\n\n.. autoclass:: proboscis.ProboscisTestMethodClassNotDecorated\n :members:\n :undoc-members:\n :show-inheritance:\n\nproboscis.asserts\n--------------------\n\n.. automodule:: proboscis.asserts\n :members:\n :undoc-members:\n :show-inheritance:\n\n.. autoclass:: proboscis.asserts.Check\n :members:\n :undoc-members:\n :show-inheritance:\n" }, { "alpha_fraction": 0.604651153087616, "alphanum_fraction": 0.604651153087616, "avg_line_length": 21, "blob_id": "c50ee20b4a3e7b4466e25e3a6bed73700170ce8b", "content_id": "1cffe380abcdebff3cfb09c0da6ebc949c9a405d", "detected_licenses": [ "Apache-2.0" ], "is_generated": false, "is_vendor": false, "language": "Shell", "length_bytes": 43, "license_type": "permissive", "max_line_length": 23, "num_lines": 2, "path": "/test/examples/unit/run_py.sh", "repo_name": "gauravinstasafe/com.instasafev2", "src_encoding": "UTF-8", "text": "PYTHONPATH=./:../../../\npython run_tests.py" }, { "alpha_fraction": 0.6813441514968872, "alphanum_fraction": 0.6900347471237183, "avg_line_length": 27.766666412353516, "blob_id": "c107272d36a4b42c2074510166da653ecff7437d", "content_id": "727261c53de1a43d2e472c051ab5e595b47b9025", "detected_licenses": [ "Apache-2.0" ], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 1726, "license_type": "permissive", "max_line_length": 78, "num_lines": 60, "path": "/test/tests/proboscis_example.py", "repo_name": "gauravinstasafe/com.instasafev2", "src_encoding": "UTF-8", "text": "# Copyright (c) 2011 Rackspace\n# All Rights Reserved.\n#\n# Licensed under the Apache License, Version 2.0 (the \"License\"); you may\n# not use this file except in compliance with the License. You may obtain\n# a copy of the License at\n#\n# http://www.apache.org/licenses/LICENSE-2.0\n#\n# Unless required by applicable law or agreed to in writing, software\n# distributed under the License is distributed on an \"AS IS\" BASIS, WITHOUT\n# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the\n# License for the specific language governing permissions and limitations\n# under the License.\n\n\"\"\"Example module loaded by proboscis_test.\"\"\"\n\nimport unittest\nfrom proboscis import test\n\n_data_exists = False\n_tests_run = [ False, False, False ]\n\n\n@test(groups=[\"integration\"], depends_on_groups=[\"init\"])\nclass RandomTestZero(unittest.TestCase):\n\n def test_something(self):\n self.assertEquals(_data_exists)\n _tests_run[0] = True\n\n\n@test(depends_on_groups=[\"integration\"])\nclass Destroy(unittest.TestCase):\n\n def test_destroy(self):\n assert _data_exists\n\n@test(groups=[\"integration\"], depends_on_groups=[\"init\"],\n depends_on_classes=[RandomTestZero])\nclass RandomTestOne(unittest.TestCase):\n\n def test_something(self):\n assert _data_exists\n _tests_run[1] = True\n\n@test(groups=[\"integration\"], depends_on_groups=[\"init\"])\nclass RandomTestTwo(unittest.TestCase):\n\n def test_something(self):\n self.assertEquals(_data_exists)\n _tests_run[2] = True\n\n@test(groups=[\"init\"])\nclass StartUp(unittest.TestCase):\n\n def test_connect_to_db(self):\n self.assertEquals(10, 10)\n global _data_exists\n _data_exists = True\n" }, { "alpha_fraction": 0.632209062576294, "alphanum_fraction": 0.6345113515853882, "avg_line_length": 43.301021575927734, "blob_id": "3f63a1e6825ab99d8be4af1d329f2de5702ae045", "content_id": "ddbcbd398bd5bc34ad5fb3f8351b8188c59fad83", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 8687, "license_type": "no_license", "max_line_length": 134, "num_lines": 196, "path": "/com.instasafev2/src/pages/companyPortal/controller_Page.py", "repo_name": "gauravinstasafe/com.instasafev2", "src_encoding": "UTF-8", "text": "import logging\nimport string\nimport time\n\nfrom base.basepage import BasePage\nfrom base.selenium_driver import SeleniumDriver\nimport utilities.custom_logger as cl\nfrom asyncio.tasks import sleep\n\n\nclass ControllerPages(BasePage,SeleniumDriver):\n\n log = cl.customLogger(logging.DEBUG)\n\n def __init__(self, driver):\n super().__init__(driver)\n self.driver = driver\n\n #Locator\n # _whatfix = \".//button[@class='WFSTCY']\" \n #_iframe =\".//*[@id='wfx-frame-smartPopup']\" \n _controllers_gateways_menu = \".//i[@class='icons ion-gear-a']\" \n _controller_menu = \".//a[contains(text(),'Controllers')]\"\n _add_button = \".//button[contains(text(),'Add')]\" \n _heading_text=\".//h2[contains(text(),'Add Controller')]\"\n _cloudServer_field = \".//div[@class='selectize-input']\"\n _cloudServer_list = \".//div[@role='option']\"\n _name_field = \".//*[@id='vpn_name']\" \n _protocol_field = \".//*[@id='protocol']/option\"\n _portNumber_field = \".//*[@id='port']\"\n _internalNetwork_field = \".//*[@id='network']\"\n _netmaskBit_field = \".//div[@name='netmask_bits']/descendant::div[@class='selectize-input']\"\n _netmaskBit_list = \".//div[@role='option']\"\n _save_button =\".//button[@type='submit']\"\n _controller_entry = \".//a[contains(text(),'controller')]\"\n _close_addwindow = \".//a[@class='close_slide ion-android-close']\"\n _checkBox_single = \".//a[contains(text(),'controller')]/parent::*/..//input[@type='checkbox']\"\n _checkBox_selectAll = \".//input[@ng-model='selectedAll']\"\n _delete_button = \".//button[contains(text(),'Delete')]\"\n \n \n def clickAddButton(self):\n self.waitForElement(self._add_button, locatorType=\"Xpath\", pollFrequency=1) \n self.elementClick(self._add_button, locatorType=\"xpath\")\n \n def verifyAddWindow(self):\n heading = self.isElementPresent(self._heading_text, locatorType=\"Xpath\")\n self.log.info(\"Add window appear status is \" + str(heading))\n \n def selectCloudServerDropDown(self, cloudServer):\n self.elementClick( self._cloudServer_field, locatorType=\"xpath\")\n self.singleSelect_set_selections( self._cloudServer_list, locatorType=\"xpath\",searchfor=cloudServer )\n \n def enterControllerName(self, controller):\n self.sendKeys(controller, self._name_field, locatorType=\"xpath\", element=\"controller name\")\n \n def selectProtocol(self, protocol):\n self.singleSelect_set_selections(self._protocol_field, locatorType=\"xpath\", searchfor=protocol)\n \n def enterPortNumber(self, port):\n self.sendKeys(port, self._portNumber_field, locatorType=\"xpath\")\n \n def enterInternalNetwork(self, ip):\n self.sendKeys(ip, self._internalNetwork_field, locatorType=\"xpath\")\n \n def enterNetmaskBit(self, netmaskBit):\n self.elementClick( self._netmaskBit_field, locatorType=\"xpath\")\n self.singleSelect_set_selections(self._netmaskBit_list, locatorType=\"xpath\", searchfor=netmaskBit)\n \n def clickSaveButton(self):\n self.elementClick(locator =self._save_button, locatorType=\"Xpath\") \n\n def closeAddWindow(self):\n self.elementClick(locator= self._close_addwindow, locatorType=\"Xpath\", element=\"closeAddWindow\")\n \n def verify_Controller_entry(self, name):\n controller_entry = self._controller_entry.replace(\"controller\", name)\n self.log.info(\"new Xpath of controller :- \" + controller_entry)\n self.waitForElement(locator= controller_entry, locatorType=\"Xpath\", timeout=30, pollFrequency=1)\n return self.isElementPresent(locator= controller_entry, locatorType=\"Xpath\", element= \"none\")\n \n def clearFields(self):\n portNumberField = self.getElement(locator=self._portNumber_field,locatorType=\"xpath\")\n portNumberField.clear()\n \n def checkbox(self,locator):\n self.elementClick(locator=\"\", locatorType=\"Xpath\", element=\"checkBox\")\n\n def update_xpath(self,old_xpath,text_to_update,name):\n new_xpath = old_xpath.replace(\"text_to_update\", name)\n self.log.info(\"Updated Xpath is as follow \" + new_xpath) \n return new_xpath\n \n # def check_for_element_precence(self,):\n \n def select_check_box(self,new_xpath,name):\n self.waitForElement(locator= new_xpath, locatorType=\"Xpath\", timeout=30, pollFrequency=1)\n #self.isElementPresent(locator= new_xpath, locatorType=\"Xpath\", element= \"name\"):\n self.elementClick(locator=new_xpath, locatorType=\"Xpath\", element=name)\n self.log.info(\"related check box selected successfully\") \n \n def delete_entries(self,new_xpath):\n self.waitForElement(locator= new_xpath, locatorType=\"Xpath\", element= \"xpath of new entry\")\n self.elementClick(locator=self._delete_button, locatorType=\"Xpath\", element=\"delete button\") \n self.log.info(\"successfully clicked on delete button\") \n ##--------------------------------------------------------------------------------------------------------------------##\n \n def close_Whatfix_Windows(self):\n self.log.info(\"waiting for the element\")\n time.sleep(10)\n self.switch_to(locator=self._iframe,locatorType=\"Xpath\")\n self.log.info(\"switching frame now\") \n self.elementClick(locator=self._whatfix, locatorType=\"Xpath\", element=\"whatfix\")\n self.switch_to_default()\n\n def navigateControllerPage(self):\n time.sleep(4)\n self.waitForElement(self._controllers_gateways_menu, locatorType=\"Xpath\", pollFrequency=1) \n self.elementClick(self._controllers_gateways_menu, locatorType=\"Xpath\", element=\"controller_&_gateway_button\")\n self.waitForElement(self._controller_menu, locatorType=\"Xpath\", pollFrequency=1) \n self.elementClick(self._controller_menu, locatorType=\"Xpath\", element = \"controller_button\")\n #self.verifyPageTitle(self, \"titleToVerify\")\n\n def navigate_to_Controller_Add_window(self): \n self.navigateControllerPage()\n self.clickAddButton()\n self.verifyAddWindow() \n \n def add_Single_Controller(self, cloudServer=\"\", controllerName=\"\", protocol=\"\", port =\"\", internalNetwork=\"\", netmaskBit=\"\" ): \n time.sleep(2)\n self.log.info(\"clear all data field\")\n self.clearFields()\n self.log.info(\"select cloud server\")\n self.selectCloudServerDropDown(cloudServer)\n self.log.info(\"Enter controller name\")\n self.enterControllerName(controllerName)\n self.log.info(\"select protocol\")\n self.selectProtocol(protocol)\n self.log.info(\"enter port number\")\n self.enterPortNumber(port)\n self.log.info(\"enter IP address / network address\")\n self.enterInternalNetwork(internalNetwork)\n self.log.info(\"enter net mask bit\")\n self.enterNetmaskBit(netmaskBit)\n self.log.info(\"click on save button to save data\")\n self.clickSaveButton()\n self.log.info(\"close Add window\")\n self.closeAddWindow()\n self.log.info(\"refresh controller page\")\n self.driver.refresh()\n self.log.info(\"reload page and wait up to 30 second \")\n self.driver.set_page_load_timeout(30)\n self.log.info(\"\")\n \n def addMultipleController(self, cloudServer, name, protocol, port, internalNetwork, netmaskBits):\n self.selectCloudServerDropDown(cloudServer)\n self.enterControllerName(name)\n self.selectProtocol(protocol)\n self.enterInternalNetwork(internalNetwork)\n self.enterNetmaskBit(netmaskBits)\n self.clickSaveButton()\n \n def delete_Sigle_Controller(self,name):\n # change Xpath at run time\n new_xpath = self.update_xpath(self._controller_entry, \"controller\", name)\n # search what to delete\n value = self.verify_Controller_entry(name)\n if value:\n self.select_check_box(new_xpath, name)\n self.delete_entries(new_xpath,name)\n value1 = self.verify_Controller_entry(self,name)\n if value1 :\n self.log.info(\"Fail to delete (\"+name+\") controller\")\n return False\n else :\n self.log.info(name+ \" controller successfully deleted\")\n return True\n else :\n self.log.info(\"desire controller \" + name + \" doesn't found\")\n return False\n # \n \n # select delete button\n # click on delete button\n \n \n \n # def deleteMultipaleController(self,):\n \n \n # def deleteAll Controller(self):\n \n\n\n\n # def single_Delete(self):\n\n\n\n\n" }, { "alpha_fraction": 0.5995762944221497, "alphanum_fraction": 0.6033898591995239, "avg_line_length": 22.366336822509766, "blob_id": "a8910357994f0c94afc0b2a107ad7cbf08f5ea2d", "content_id": "35dc596411970d07be799fd7a70e5ff50a2f2dad", "detected_licenses": [ "Apache-2.0" ], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 2360, "license_type": "permissive", "max_line_length": 77, "num_lines": 101, "path": "/test/examples/example3/mymodule.py", "repo_name": "gauravinstasafe/com.instasafev2", "src_encoding": "UTF-8", "text": "import random\n\ndatabase = None\nweb_server = None\n\nclass AuthException(Exception):\n pass\n\nclass UserNotFoundException(Exception):\n pass\n\n\ndef create_database():\n global database\n database = { 'users':{} }\n\ndef destroy_database():\n global database\n database = None\n\n\ndef get_admin_client():\n return UserServiceClient(0, { 'username':\"admin\", 'password':None })\n\ndef login(user_settings):\n for user in database['users'].values():\n if user.username == user_settings['username']:\n if user.password != user_settings['password']:\n raise UserNotFoundException()\n else:\n return user\n raise UserNotFoundException()\n\ndef reverse(string):\n \"\"\"Reverses a string.\"\"\"\n return string[::-1]\n\ndef start_web_server():\n global web_server\n web_server = {\n \"bob\": {\n \"credentials\":\"admin\",\n \"image\":\"default.jpg\"\n }\n }\n\n \ndef bad_start_web_server():\n raise RuntimeError(\"Error starting service.\")\n\n\ndef stop_web_server():\n global web_server\n web_server = None\n\n\ndef tables_exist():\n return database != None\n\n\nclass UserServiceClient(object):\n\n ADMIN = \"admin\"\n NOBODY = \"nobody\"\n\n def __init__(self, id, settings):\n self.id = id\n self.username = settings['username']\n self.password = settings['password']\n\n @property\n def check_credentials(self):\n return web_server[self.username][\"credentials\"]\n\n def create_user(self, settings):\n if self.id != 0:\n raise AuthException(\"Must be an Admin to perform this function.\")\n random.seed()\n id = random.randint(1, 1000)\n settings['id'] = id\n user = UserServiceClient(id, settings)\n database['users'][id] = user\n web_server[user.username] = {'image':\"default.jpg\"}\n return user\n\n def delete_user(self, id):\n if self.id != 0:\n raise AuthException(\"Must be an Admin to perform this function.\")\n if id not in database['users']:\n raise UserNotFoundException()\n del database['users'][id]\n\n\n def get_profile_image(self):\n return web_server[self.username][\"image\"]\n\n def set_profile_image(self, new_value):\n web_server[self.username][\"image\"] = new_value\n\n def service_is_up(self):\n return web_server != None\n" }, { "alpha_fraction": 0.618461549282074, "alphanum_fraction": 0.618461549282074, "avg_line_length": 25, "blob_id": "38c863399f7582ffd5ed0bce3fbf52ea31511cd5", "content_id": "02baa5f85e4fd49819e4ffd0e7092687d3f08d59", "detected_licenses": [ "Apache-2.0" ], "is_generated": false, "is_vendor": false, "language": "Ant Build System", "length_bytes": 650, "license_type": "permissive", "max_line_length": 63, "num_lines": 25, "path": "/test/tests/compatability/testng/before_and_after/build.xml", "repo_name": "gauravinstasafe/com.instasafev2", "src_encoding": "UTF-8", "text": "<project name=\"BeforeAndAfter\" basedir=\".\" default=\"test\">\n <taskdef name=\"testng\"\n classname=\"org.testng.TestNGAntTask\" />\n\n <path id=\"cp\">\n <pathelement path=\"build/classes\"/>\n </path>\n\n <target name=\"clean\">\n <delete dir=\"build\"/>\n </target>\n\n <target name=\"compile\">\n <mkdir dir=\"build/classes\"/>\n <javac srcdir=\"src/java\" destdir=\"build/classes\"/>\n </target>\n\n <target name=\"test\" depends=\"compile\">\n <testng classpathref=\"cp\" groups=\"BeforeAndAfter\">\n <classfileset dir=\"build/classes\" includes=\"**/*.class\"/>\n <!-- <xmlfileset dir=\"\" includes=\"testng.xml\"/> -->\n </testng>\n </target>\n\n</project>\n" }, { "alpha_fraction": 0.5860108137130737, "alphanum_fraction": 0.5999431610107422, "avg_line_length": 32.4952392578125, "blob_id": "280dbf9e1442f81caa302da828c6b01174113f70", "content_id": "22b615534ed82842739a114cbf60d810e1321a0c", "detected_licenses": [ "Apache-2.0" ], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 3517, "license_type": "permissive", "max_line_length": 79, "num_lines": 105, "path": "/test/tests/unit/test_check.py", "repo_name": "gauravinstasafe/com.instasafev2", "src_encoding": "UTF-8", "text": "# Copyright (c) 2011 Rackspace\n# All Rights Reserved.\n#\n# Licensed under the Apache License, Version 2.0 (the \"License\"); you may\n# not use this file except in compliance with the License. You may obtain\n# a copy of the License at\n#\n# http://www.apache.org/licenses/LICENSE-2.0\n#\n# Unless required by applicable law or agreed to in writing, software\n# distributed under the License is distributed on an \"AS IS\" BASIS, WITHOUT\n# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the\n# License for the specific language governing permissions and limitations\n# under the License.\n\n\"\"\"Tests the internal logic of the proboscis module.\"\"\"\n\n\nimport unittest\n\n\nfrom proboscis.asserts import Check\nfrom proboscis.asserts import ASSERTION_ERROR\nfrom proboscis.asserts import assert_raises\nfrom proboscis.asserts import assert_true\nfrom proboscis.asserts import assert_false\nfrom proboscis.asserts import fail\nfrom proboscis.check import get_stack_trace_of_caller\n\n\nclass TestCheckerNoWithBlock(unittest.TestCase):\n\n def test_should_simply_raise(self):\n check = Check()\n assert_raises(ASSERTION_ERROR, check.equal, \"HI\", \"BYE\")\n\n\nclass TestCheckerWithBlock(unittest.TestCase):\n\n def test_when_no_failures_occur_nothing_happens(self):\n with Check() as check:\n check.equal(\"HI\", \"HI\")\n\n def test_when_no_failures_occur_nothing_happens(self):\n with Check() as check:\n check.equal(\"HI\", \"HI\")\n\n def test_single_failure_is_presented(self):\n try:\n with Check() as check:\n check.equal(4, 6)\n fail(\"Expected an assertion!\")\n except ASSERTION_ERROR as ae:\n assert_true(\"4 != 6\" in str(ae), str(ae))\n\n def test_multiple_failures_are_presented(self):\n try:\n with Check() as c:\n c.equal(2,27)\n c.equal(\"BEE\", \"BEE\")\n c.equal(39, 37)\n c.equal(\"CAT\", \"RAT\")\n fail(\"Expected an assertion!\")\n except ASSERTION_ERROR as ae:\n msg = str(ae)\n assert_true(\"2 != 27\" in msg, msg)\n assert_true(\"39 != 37\" in msg, msg)\n assert_true(\"'CAT' != 'RAT'\" in msg, msg)\n\n def test_when_no_failures_happen_but_an_error_occurs(self):\n # The exception is *not* wrapped as ASSERTION_ERROR because no failures\n # occur.\n def check_func():\n with Check() as c:\n c.equal(2,2)\n c.equal(\"BEE\", \"BEE\")\n c.equal(37, 37)\n raise RuntimeError(\"Unexplained error!\")\n c.equal(\"CAT\", \"RAT\")\n assert_raises(RuntimeError, check_func)\n\n def test_when_failures_and_an_error_occurs(self):\n try:\n with Check() as c:\n c.equal(2,27)\n c.equal(\"BEE\", \"BEE\")\n c.equal(39, 37)\n raise RuntimeError(\"Unexplained error!\")\n c.equal(\"CAT\", \"RAT\") # This is never reached.\n except ASSERTION_ERROR as ae:\n msg = str(ae)\n assert_true(\"2 != 27\" in msg, msg)\n assert_true(\"39 != 37\" in msg, msg)\n assert_false(\"CAT != RAT\" in msg, msg)\n assert_true(\"RuntimeError: Unexplained error!\" in msg, msg)\n\n\nclass TestOverShortenStackTrace(unittest.TestCase):\n\n def test_should_cut_down_to_zero_and_not_raise(self):\n get_stack_trace_of_caller(830)\n\n\nif __name__ == \"__main__\":\n unittest.TestProgram()\n" }, { "alpha_fraction": 0.6741328835487366, "alphanum_fraction": 0.6751567721366882, "avg_line_length": 31.966245651245117, "blob_id": "01c651141cf52477013f858c52e5d5a7d18b116a", "content_id": "090a9942835ccc661a3ccf6c3fdb8e3a73ce5509", "detected_licenses": [ "Apache-2.0" ], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 7813, "license_type": "permissive", "max_line_length": 79, "num_lines": 237, "path": "/test/proboscis/asserts.py", "repo_name": "gauravinstasafe/com.instasafev2", "src_encoding": "UTF-8", "text": "# Copyright (c) 2011 Rackspace\n# All Rights Reserved.\n#\n# Licensed under the Apache License, Version 2.0 (the \"License\"); you may\n# not use this file except in compliance with the License. You may obtain\n# a copy of the License at\n#\n# http://www.apache.org/licenses/LICENSE-2.0\n#\n# Unless required by applicable law or agreed to in writing, software\n# distributed under the License is distributed on an \"AS IS\" BASIS, WITHOUT\n# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the\n# License for the specific language governing permissions and limitations\n# under the License.\n\n\"\"\"Assert functions with a parameter order of actual_value, expected_value.\n\nThis module contains many stand-ins for functions in Nose.tools. It is also\na clone of TestNG's Assert class with the static methods changed to functions,\nand the term \"equals\" changed to simply \"equal\" to be more Pythonic.\n\nThere are also a few original assertions methods and the class Check.\n\nThis module should be preferred when Nose is not always available.\n\n\"\"\"\n\n\nimport sys\nimport traceback\n\nfrom proboscis import compatability\n\nASSERTION_ERROR=AssertionError\n# Setting this causes stack traces shown by unittest and nose to stop before\n# this moudle. It feels dirty but modifying the traceback is even worse.\n__unittest = True\n\n\ndef assert_equal(actual, expected, message=None):\n \"\"\"Asserts that the two values are equal.\n\n :param actual: The actual value.\n :param expected: The expected value.\n :param message: A message to show in the event of a failure.\n \"\"\"\n #TODO: assert equal with dictionaries, arrays, etc\n if actual == expected:\n return\n if not message:\n try:\n message = \"%r != %r\" % (actual, expected)\n except Exception:\n message = \"The actual value did not equal the expected one.\"\n raise ASSERTION_ERROR(message)\n\n\ndef assert_false(condition, message=None):\n \"\"\"Asserts that the given condition is false.\n\n :param condition: Must be true.\n :param message: A message to show in the event of failure.\n \"\"\"\n if condition:\n if not message:\n message = \"Condition was True.\"\n raise ASSERTION_ERROR(message)\n\n\ndef assert_is(actual, expected, message=None):\n \"\"\"Asserts that the two variables share the same identity.\n\n :param actual: A variable which has the actual identity.\n :param expected: The variable which has the expected variable.\n :param message: A message to show in the event of failure.\n\n \"\"\"\n #TODO: assert equal with dictionaries, arrays, etc\n if actual is expected:\n return\n if not message:\n try:\n message = \"%r is not %r\" % (actual, expected)\n except Exception:\n message = \"The actual value is not the expected one.\"\n raise ASSERTION_ERROR(message)\n\n\ndef assert_is_none(value, message=None):\n \"\"\"Asserts that the given value is None.\n\n :param value: The value which is tested for nothingness.\n :param message: A message to show in the event of failure.\n \"\"\"\n #TODO: assert equal with dictionaries, arrays, etc\n if value is None:\n return\n if not message:\n try:\n message = \"%r is not None\" % value\n except Exception:\n message = \"The value is not None.\"\n raise ASSERTION_ERROR(message)\n\n\ndef assert_is_not(actual, expected, message=None):\n \"\"\"Asserts that the two variables has different identities.\n\n :param actual: A variable which has the actual identity.\n :param expected: A variable which has the expected identity.\n :param message: The assertion message if the variables share an identity.\n \"\"\"\n #TODO: assert equal with dictionaries, arrays, etc\n if actual is not expected:\n return\n if not message:\n try:\n message = \"%r is %r\" % (actual, expected)\n except Exception:\n message = \"The actual value is the expected one.\"\n raise ASSERTION_ERROR(message)\n\n\ndef assert_is_not_none(value, message=None):\n \"\"\"Asserts that a value is anything other than None.\n\n :param value: A variable which is expected to be anything other than None.\n :param message: The assertion message if the variable is None.\n \"\"\"\n #TODO: assert equal with dictionaries, arrays, etc\n if value is not None:\n return\n if not message:\n message = \"The value is None.\"\n raise ASSERTION_ERROR(message)\n\ndef assert_not_equal(actual, expected, message=None):\n \"\"\"Asserts that the two values are not equal.\n\n :param actual: The actual value.\n :param expected: The expected value.\n :param message: The assertion message if the variables are equal.\n \"\"\"\n if (actual != expected) and not (actual == expected):\n return\n if not message:\n try:\n message = \"%r == %r\" % (actual, expected)\n except Exception:\n message = \"The actual value equalled the expected one.\"\n raise ASSERTION_ERROR(message)\n\n\ndef assert_true(condition, message=None):\n \"\"\"Asserts that the given value is True.\n\n :param condition: A value that must be True.\n :param message: The assertion message if the value is not True.\n \"\"\"\n if not condition:\n if not message:\n message = \"Condition was False.\"\n raise ASSERTION_ERROR(message)\n\n\ndef assert_raises(exception_type, function, *args, **kwargs):\n \"\"\"Calls function and fails the test if an exception is not raised.\n\n Unlike nose.Tool's assert_raises or TestCase.assertRaises the given\n exception type must match the exactly: if the raised exception is a\n subclass the test will fail. For example, it fails if the exception_type\n param is \"Exception\" but \"RuntimeException\" is raised. To be less demanding\n use assert_raises_instance.\n\n :param exception_type: The exact type of exception to be raised.\n :param function: The function to call, followed by its arguments.\n\n \"\"\"\n actual_exception = compatability.capture_exception(\n lambda : function(*args, **kwargs),\n exception_type)\n if actual_exception is None:\n fail(\"Expected an exception of type %s to be raised.\" % exception_type)\n elif type(actual_exception) != exception_type:\n _a, _b, tb = sys.exc_info()\n info = traceback.format_list(traceback.extract_tb(tb))\n fail(\"Expected a raised exception of type %s, but found type %s. \"\n \"%s\" % (exception_type, type(actual_exception), info))\n return actual_exception\n\n\ndef assert_raises_instance(exception_type, function, *args, **kwargs):\n \"\"\"Calls function and fails the test if an exception is not raised.\n\n The exception thrown must only be an instance of the given type. This means\n if \"Exception\" is expected but \"RuntimeException\" is raised the test will\n still pass. For a stricter function see assert_raises.\n\n :param exception_type: The expected exception type.\n :param function: The function to call, followed by its arguments.\n\n \"\"\"\n actual_exception = compatability.capture_exception(\n lambda : function(*args, **kwargs),\n exception_type)\n if actual_exception is None:\n fail(\"Expected an exception of type %s to be raised.\" % exception_type)\n\n\ndef fail(message):\n \"\"\"Fails a test.\n\n :param message: The message to display.\n\n Unlike the other functions in this module the message argument is required.\n\n \"\"\"\n if not message:\n message = \"Test failure.\"\n raise ASSERTION_ERROR(message)\n\n\nfrom proboscis.check import Check\n\n__all__ = [\n 'assert_equal',\n 'assert_false',\n 'assert_is',\n 'assert_is_none',\n 'assert_is_not',\n 'assert_is_not_none',\n 'assert_not_equal',\n 'assert_true',\n 'assert_raises',\n 'assert_raises_instance',\n 'fail',\n]\n" } ]
57
Bonuseto/webscraping
https://github.com/Bonuseto/webscraping
8bcfd43696b05e26db870648b06a76595090ae56
61139d97ce04ad046ebbdad2b676c91b59e6eed5
c45a188d02b0822c1ea54b9f19ca91d94c28be61
refs/heads/main
2023-06-17T21:53:34.910260
2021-07-14T07:32:03
2021-07-14T07:32:03
385,852,941
0
0
null
null
null
null
null
[ { "alpha_fraction": 0.5451040863990784, "alphanum_fraction": 0.5705474019050598, "avg_line_length": 29.634145736694336, "blob_id": "50525023c8ac322dfb476f5e5c3b1c600f77a7ea", "content_id": "e0f62c19f6a3b273ad48f2832e9fa36e3b1eecec", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 1297, "license_type": "no_license", "max_line_length": 128, "num_lines": 41, "path": "/main.py", "repo_name": "Bonuseto/webscraping", "src_encoding": "UTF-8", "text": "from User import User\r\n\r\nimport requests\r\n\r\nfrom bs4 import BeautifulSoup as BS\r\n\r\nfile1 = open(\"cases.txt\", \"r+\")\r\nlines = file1.readlines()\r\nfor line in lines:\r\n line = line.split(\",\")\r\n\r\n data = {\r\n 'ioff_application_number': line[2],\r\n 'ioff_application_number_fake': line[3],\r\n 'ioff_application_code': line[4],\r\n 'ioff_application_year': line[5],\r\n 'form_id': 'ioff_application_status_form',\r\n 'honeypot_time': '1626245458|hhSKJ7eadYjR87usUrLNOpNH539nIqSqqwnH_52KkjI',\r\n\r\n }\r\n url = 'https://frs.gov.cz/en/ioff/application-status'\r\n html = requests.post(url, data)\r\n soup = BS(html.text, 'html.parser')\r\n result = ''\r\n warn = soup.findAll('span', {'class': 'alert alert-warning'})\r\n if warn:\r\n result = warn\r\n dang = soup.findAll('span', {'class': 'alert alert-danger'})\r\n if dang:\r\n result = dang\r\n sucs = soup.findAll('span', {'class': 'alert alert-success'})\r\n if sucs:\r\n result = sucs\r\n\r\n user = User(line[0], line[1], line[2], line[3], line[4], line[5], result[0].text)\r\n\r\n f = open(\"results.txt\", \"a\")\r\n f.write(\r\n user.firstname + ',' + user.lastname + ',' + user.appnum + ',' + user.type + ',' + user.year + ',' + user.status + '\\n')\r\n f.close()\r\nfile1.close()\r\n" }, { "alpha_fraction": 0.8303571343421936, "alphanum_fraction": 0.8303571343421936, "avg_line_length": 55, "blob_id": "f315cb52b645fa47d3259c17ba00da012dca8e23", "content_id": "99ac99b1cbec6bae0ee7604ef7fe0fec45d11f7d", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Markdown", "length_bytes": 112, "license_type": "no_license", "max_line_length": 97, "num_lines": 2, "path": "/README.md", "repo_name": "Bonuseto/webscraping", "src_encoding": "UTF-8", "text": "# webscraping\nThis program checking visa application in file and then uploading data with status in result file\n" }, { "alpha_fraction": 0.5697329640388489, "alphanum_fraction": 0.5697329640388489, "avg_line_length": 28.636363983154297, "blob_id": "2101ee8f4d855873a7684cdd4f13658a12fb2ebf", "content_id": "7ad0b439aef64511958d8612b93e67dc09aed815", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 337, "license_type": "no_license", "max_line_length": 82, "num_lines": 11, "path": "/User.py", "repo_name": "Bonuseto/webscraping", "src_encoding": "UTF-8", "text": "class User:\r\n \"\"\"User class\"\"\"\r\n\r\n def __init__(self, firstname, lastname, appnum, appnumxx, type, year, status):\r\n self.firstname = firstname\r\n self.lastname = lastname\r\n self.appnum = appnum\r\n self.appnumxx = appnumxx\r\n self.type = type\r\n self.year = year\r\n self.status = status\r\n" } ]
3
nekvinder/Sudoku-Backend-Python
https://github.com/nekvinder/Sudoku-Backend-Python
b526f8a0e8e0d45efc297d6b95a4db279f42ef7c
5ef9b9dcc8622acb5e2a69f0de52c34f058fe842
842f6cd1d83fb8268770ffdecad9c11a325419dd
refs/heads/master
2022-12-16T21:35:49.837615
2020-09-15T15:44:02
2020-09-15T15:44:02
295,774,694
0
0
null
null
null
null
null
[ { "alpha_fraction": 0.6337209343910217, "alphanum_fraction": 0.6569767594337463, "avg_line_length": 20.625, "blob_id": "b1e4931915cb7abafe40db6971b48160f613988c", "content_id": "35b795220aaf96f947eb534be8590df6af77d60c", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 172, "license_type": "no_license", "max_line_length": 47, "num_lines": 8, "path": "/runpythonWatchdog.py", "repo_name": "nekvinder/Sudoku-Backend-Python", "src_encoding": "UTF-8", "text": "from watchgod import run_process\nimport os\n\ndef foobar(a, b, c):\n os.system(\"python3 app_with_python_only.py\")\n # print(a,b,c)\n\nrun_process('.', foobar, args=(1, 2, 3))" }, { "alpha_fraction": 0.770257830619812, "alphanum_fraction": 0.7771639227867126, "avg_line_length": 40.769229888916016, "blob_id": "eb2890403252a2675f7e8a79137944f74aedf17b", "content_id": "e3605e5159f167727e05b3326fa435e5e67b878f", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Markdown", "length_bytes": 2172, "license_type": "no_license", "max_line_length": 196, "num_lines": 52, "path": "/README.md", "repo_name": "nekvinder/Sudoku-Backend-Python", "src_encoding": "UTF-8", "text": "# Visual Sudoku Solver\n\nA simple python program to solve basic sudoku puzzles with a dynamically generated visual interpretation of the internal states of solver algorithm.\n\n## Working\n\nThe driver code loads all the puzzles from *sudokus.txt* into a numpy array(9x9)'s list.\n\nThen it iterates over all the puzzles solving each,showing the visualizations, and skipping over after a set threshold number of tries if it can not solve the puzzle.\n\nThe visualizations speed in controlled by *speed* variable with time in milliseconds for displaying each state.\n\nThe Solver consists of three classes and a driver code. The 3 classes are as follows:\n\n### SudokuValidate\n\nThis class is initialize with a **board** containing the sudoku, and its method **getValidator()** returns the validation array required by the SudokuDesign class.\n\n### SudokuDesign\n\nThis is responsible to generate all the visual elements for the sudoku using the opencv library.\n\nIts object initialize with two arguments:\n* a **board** object that is numpy array of 9x9 containing the sudoku\n* a **Validator** object which is numpy array containing 9x9 array containing invalid elements with *value **1*** else *0*\n\n### SudokuSolver\n\nThis class implements the solving algorithm. It is initialized with the board's representation in numpy array of 9x9.\n\nIt contains two major methods:\n\n* **possibleValues(r,c)** : this method finds out all the possible value at any given cell location in form of row and column coordinates.\n* **getSolvedBoard()** : this method simply applies possibleValues() method on each cell in the board, and where-ever it finds only single possible value in any cell, it fills up the cell with it.\n\n## Example\n\nSample Puzzle given is :\n\n![alt text](images/sample.png)\n\n\nThe Iterations of the solver are shown as :\n\n![alt text](images/solution.gif)\n\n## Limitations\n\nThis implements a very basic solving algorithm, hence can only sovle easier puzzlues.\nHence in the driver code we have used **maxtries** threshold that limits the solving and evaluating possible values cycles.\n\nIn the 50 sudoku puzzles taken from [here](http://lipas.uwasa.fi/~timan/sudoku/) it solves only 13 puzzles.\n" }, { "alpha_fraction": 0.5, "alphanum_fraction": 0.7222222089767456, "avg_line_length": 36, "blob_id": "331e3347500b40bf9cbd723e05f30da4cc86b3a9", "content_id": "93d76f3217d9859b97960053b9243003ea3e5ca6", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Shell", "length_bytes": 36, "license_type": "no_license", "max_line_length": 36, "num_lines": 1, "path": "/runGunicornServer.sh", "repo_name": "nekvinder/Sudoku-Backend-Python", "src_encoding": "UTF-8", "text": "gunicorn --bind 0.0.0.0:5001 app:app" }, { "alpha_fraction": 0.49955567717552185, "alphanum_fraction": 0.5321386456489563, "avg_line_length": 31.152381896972656, "blob_id": "8a98cae5a0134c7f33b360a71ff283dc0a1b6cbd", "content_id": "6bb0033605267a7fde7f44215d74e92c3edf35d4", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 6752, "license_type": "no_license", "max_line_length": 126, "num_lines": 210, "path": "/app.py", "repo_name": "nekvinder/Sudoku-Backend-Python", "src_encoding": "UTF-8", "text": "import imageio\nimport os\nfrom flask import Flask, url_for, send_file\nfrom markupsafe import escape\nimport cv2 as cv\nimport numpy as np\nimport math\nfrom flask_cors import CORS\n\n\nclass SudokuValidate:\n def __init__(self, board):\n self.board = board\n\n def updateBoard(self, board):\n self.board = board\n\n def __Repeat(self, x):\n _size = len(x)\n repeated = [0, 0, 0, 0, 0, 0, 0, 0, 0]\n for i in range(_size):\n k = i + 1\n for j in range(k, _size):\n if x[i] == x[j] and x[i] not in repeated:\n repeated[i], repeated[j] = 1, 1\n return repeated\n\n def __getRowValidator(self, r):\n return self.__Repeat(self.board[r-1])\n\n def __getColValidator(self, c):\n return self.__Repeat(self.board[:, c-1])\n\n def __getBlockValidator(self, blockRow, blockCol):\n return np.reshape(self.__Repeat(np.reshape(self.board[blockRow*3:blockRow *\n 3+3, blockCol*3:blockCol*3+3], 9)), (3, 3))\n\n def getValidator(self):\n tmp = np.zeros((9, 9), int)\n for x in range(3):\n for y in range(3):\n tmp[x*3:x * 3+3, y*3:y*3 +\n 3] = self.__getBlockValidator(x, y)\n\n for x in range(9):\n tmp[x] = np.add(tmp[x], self.__getRowValidator(x+1))\n tmp[:, x] = np.add(tmp[:, x], self.__getColValidator(x+1))\n\n return(tmp)\n\n\nclass SudokuDesign:\n\n def __init__(self, board, validator):\n # self.name = name\n self.board = board\n self.validator = validator\n self.create_background()\n\n def updateBoard(self, board):\n self.board = board\n\n def getBoardImage(self, frame):\n frame = self.__create_sudoku(frame, self.board)\n return frame\n\n def create_background(self):\n frame = np.ones(shape=[450, 450, 3], dtype=np.uint8)\n frame = cv.rectangle(frame, (0, 0), (500, 500), (255, 255, 255), -1)\n return frame\n\n def __create_one_box(self, frame, r, c, value, valid):\n r, c = c-1, r-1\n frame = cv.rectangle(frame, ((0+r)*50, (0+c)*50),\n (50+((0+r)*50), 50+((0+c)*50)), (200, 200, 200), 2)\n frame = cv.putText(frame, value, (((0+r)*50)+12, (50+((0+c)*50)) - 12), cv.FONT_HERSHEY_SIMPLEX,\n 1, (255 if valid else 0, 0, 0 if valid else 255), 2, cv.LINE_AA)\n return frame\n\n def __addUnitPossibles(self, frame, r, c, possibles: set):\n frame = cv.putText(frame, \"\".join(map(str, possibles)), (((0+c)*50)+2, (50+((0+r)*50)) - 37), cv.FONT_HERSHEY_SIMPLEX,\n 0.4, (100, 50, 100), 1, cv.LINE_AA)\n return frame\n\n def addPossibles(self, frame, sdkSolver):\n for x in range(9):\n for y in range(9):\n if int(self.board[x, y]) < 1:\n self.__addUnitPossibles(frame,\n x, y, sdkSolver.possibleValues(x+1, y+1))\n return frame\n\n def __create_one_block(self, frame, r, c, values):\n for x in range(3*(r-1), r*3):\n for y in range(3*(c-1), c*3):\n frame = self.__create_one_box(frame,\n x+1, y+1, str(values[x, y]), True if self.validator[x, y] == 0 else False)\n frame = cv.rectangle(frame, (50*3*(c-1), 50*3*(r-1)),\n (50*3*(c), 50*3*(r)), (0, 0, 0), 4)\n return frame\n\n def __create_sudoku(self, frame, values):\n for x in range(1, 4):\n for y in range(1, 4):\n frame = self.__create_one_block(frame, x, y, values)\n return frame\n\n\nclass SudokuSolver:\n def __init__(self, board):\n self.board = board\n\n def updateBoard(self, board):\n self.board = board\n\n def __getRow(self, r) -> set:\n return set(self.board[r-1])-{0}\n\n def __getCol(self, c) -> set:\n return set(self.board[:, c-1])-{0}\n\n def __getBlock(self, x, y) -> set:\n x, y = math.ceil(x/3)-1, math.ceil(y/3)-1\n return set(np.reshape(self.board[x*3:(x*3)+3, y*3:(y*3)+3], (9)))-{0}\n\n def possibleValues(self, r, c):\n return set(c for c in range(1, 10)) - (self.__getRow(r) | self.__getCol(c) | self.__getBlock(r, c))\n\n def getSolvedBoard(self):\n for x in range(9):\n for y in range(9):\n if int(self.board[x, y]) < 1:\n vals = self.possibleValues(x+1, y+1)\n if len(vals) == 1:\n self.board[x, y] = vals.pop()\n return self.board\n\n\ndef solveBoard(board):\n solDir = \"solutions/\"\n dirlen = (len(next(os.walk(solDir))[1]))\n if not os.path.exists(solDir+'/'+str(dirlen)):\n os.makedirs(solDir+'/'+str(dirlen))\n\n cnt = \"nek\"\n maxtries = 20\n totalFails = 0\n tries = 0\n sdkValidator = SudokuValidate(board)\n validator = sdkValidator.getValidator()\n sdkDesigner = SudokuDesign(board, validator)\n sdkSolver = SudokuSolver(board)\n speed = 1\n while 0 in set(np.reshape(board, (81))):\n fname = '{}{}/{}_a.jpg'.format(solDir, dirlen, tries)\n # print(fname)\n frame = sdkDesigner.create_background()\n sdkDesigner.addPossibles(frame, sdkSolver)\n frame = sdkDesigner.getBoardImage(frame)\n cv.imwrite(fname, frame)\n # cv.imshow('image', frame)\n # cv.waitKey(speed)\n\n f2 = sdkDesigner.create_background()\n sdkDesigner.updateBoard(sdkSolver.getSolvedBoard())\n f2 = sdkDesigner.getBoardImage(f2)\n fname = '{}{}/{}_b.jpg'.format(solDir, dirlen, tries)\n cv.imwrite(fname, f2)\n # cv.imshow('image', f2)\n # cv.waitKey(speed)\n tries += 1\n if tries > maxtries:\n totalFails += 1\n break\n\n # break\n print(\"Total Fails:{}\".format(totalFails))\n # cv.waitKey(1000)\n cv.destroyAllWindows()\n # print(\"returning\")\n return dirlen\n\n\ndef makeGif(dirname):\n images = []\n files = sorted(os.listdir(\"solutions/\"+str(dirname)))\n for filename in files:\n images.append(imageio.imread(\"solutions/\"+str(dirname)+\"/\"+filename))\n imageio.mimsave((\"solutions/\"+str(dirname)+'/movie.gif'),\n images, format=\"GIF\", duration=2)\n return \"solutions/\"+str(dirname)+'/movie.gif'\n\n\napp = Flask(__name__)\nCORS(app)\napp.config[\"DEBUG\"] = True\n\n\[email protected]('/giveSolution/<puzzle>', methods=['GET'])\ndef solve(puzzle):\n # try:\n p = puzzle\n p = np.reshape(np.array(list(p), int), (9, 9))\n return send_file(makeGif(str(solveBoard(p))), mimetype='image/gif')\n # except Exception as e:\n # return str(e)\n\n\nif __name__ == '__main__':\n app.run()\n" }, { "alpha_fraction": 0.4683460295200348, "alphanum_fraction": 0.5071262717247009, "avg_line_length": 31.79347801208496, "blob_id": "4bc9c6141d185b4231411ef45dcb6376e96fbc72", "content_id": "f9e1c5d5401f167f726edb5a7c2f0d95d17b79bc", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 6034, "license_type": "no_license", "max_line_length": 126, "num_lines": 184, "path": "/design.py", "repo_name": "nekvinder/Sudoku-Backend-Python", "src_encoding": "UTF-8", "text": "import cv2 as cv\nimport numpy as np\nimport math\n\n\nclass SudokuValidate:\n def __init__(self, board):\n self.board = board\n\n def updateBoard(self, board):\n self.board = board\n\n def __Repeat(self, x):\n _size = len(x)\n repeated = [0, 0, 0, 0, 0, 0, 0, 0, 0]\n for i in range(_size):\n k = i + 1\n for j in range(k, _size):\n if x[i] == x[j] and x[i] not in repeated:\n repeated[i], repeated[j] = 1, 1\n return repeated\n\n def __getRowValidator(self, r):\n return self.__Repeat(self.board[r-1])\n\n def __getColValidator(self, c):\n return self.__Repeat(self.board[:, c-1])\n\n def __getBlockValidator(self, blockRow, blockCol):\n return np.reshape(self.__Repeat(np.reshape(self.board[blockRow*3:blockRow *\n 3+3, blockCol*3:blockCol*3+3], 9)), (3, 3))\n\n def getValidator(self):\n tmp = np.zeros((9, 9), int)\n for x in range(3):\n for y in range(3):\n tmp[x*3:x * 3+3, y*3:y*3 +\n 3] = self.__getBlockValidator(x, y)\n\n for x in range(9):\n tmp[x] = np.add(tmp[x], self.__getRowValidator(x+1))\n tmp[:, x] = np.add(tmp[:, x], self.__getColValidator(x+1))\n\n return(tmp)\n\n\nclass SudokuDesign:\n\n def __init__(self, board, validator):\n # self.name = name\n self.board = board\n self.validator = validator\n self.create_background()\n\n def updateBoard(self, board):\n self.board = board\n\n def getBoardImage(self, frame):\n frame = self.__create_sudoku(frame, self.board)\n return frame\n\n def create_background(self):\n frame = np.ones(shape=[500, 500, 3], dtype=np.uint8)\n frame = cv.rectangle(frame, (0, 0), (500, 500), (255, 255, 255), -1)\n return frame\n\n def __create_one_box(self, frame, r, c, value, valid):\n r, c = c-1, r-1\n frame = cv.rectangle(frame, ((0+r)*50, (0+c)*50),\n (50+((0+r)*50), 50+((0+c)*50)), (200, 200, 200), 2)\n frame = cv.putText(frame, value, (((0+r)*50)+12, (50+((0+c)*50)) - 12), cv.FONT_HERSHEY_SIMPLEX,\n 1, (255 if valid else 0, 0, 0 if valid else 255), 2, cv.LINE_AA)\n return frame\n\n def __addUnitPossibles(self, frame, r, c, possibles: set):\n frame = cv.putText(frame, \"\".join(map(str, possibles)), (((0+c)*50)+2, (50+((0+r)*50)) - 37), cv.FONT_HERSHEY_SIMPLEX,\n 0.4, (100, 50, 100), 1, cv.LINE_AA)\n return frame\n\n def addPossibles(self, frame, sdkSolver):\n for x in range(9):\n for y in range(9):\n if int(self.board[x, y]) < 1:\n self.__addUnitPossibles(frame,\n x, y, sdkSolver.possibleValues(x+1, y+1))\n return frame\n\n def __create_one_block(self, frame, r, c, values):\n for x in range(3*(r-1), r*3):\n for y in range(3*(c-1), c*3):\n frame = self.__create_one_box(frame,\n x+1, y+1, str(values[x, y]), True if self.validator[x, y] == 0 else False)\n frame = cv.rectangle(frame, (50*3*(c-1), 50*3*(r-1)),\n (50*3*(c), 50*3*(r)), (0, 0, 0), 4)\n return frame\n\n def __create_sudoku(self, frame, values):\n for x in range(1, 4):\n for y in range(1, 4):\n frame = self.__create_one_block(frame, x, y, values)\n return frame\n\n\nclass SudokuSolver:\n def __init__(self, board):\n self.board = board\n\n def updateBoard(self, board):\n self.board = board\n\n def __getRow(self, r) -> set:\n return set(self.board[r-1])-{0}\n\n def __getCol(self, c) -> set:\n return set(self.board[:, c-1])-{0}\n\n def __getBlock(self, x, y) -> set:\n x, y = math.ceil(x/3)-1, math.ceil(y/3)-1\n return set(np.reshape(self.board[x*3:(x*3)+3, y*3:(y*3)+3], (9)))-{0}\n\n def possibleValues(self, r, c):\n return set(c for c in range(1, 10)) - (self.__getRow(r) | self.__getCol(c) | self.__getBlock(r, c))\n\n def getSolvedBoard(self):\n for x in range(9):\n for y in range(9):\n if int(self.board[x, y]) < 1:\n vals = self.possibleValues(x+1, y+1)\n if len(vals) == 1:\n self.board[x, y] = vals.pop()\n return self.board\n\n\ndef solveX():\n f = open(\"sudokus.txt\", \"r\")\n arr = f.readlines()\n f.close()\n boards = []\n for i in range(0, 480, 10):\n lst = \"\"\n for x in arr[i:i+9]:\n lst += x[:9]\n boards.append(np.reshape(np.array(list(lst), int), (9, 9)))\n\n # print(boards)\n\n maxtries = 20\n totalFails = 0\n cnt = -1\n for board in boards[31:32]:\n print(board)\n cnt = cnt+1\n tries = 0\n sdkValidator = SudokuValidate(board)\n validator = sdkValidator.getValidator()\n sdkDesigner = SudokuDesign(board, validator)\n sdkSolver = SudokuSolver(board)\n speed = 100\n while 0 in set(np.reshape(board, (81))):\n frame = sdkDesigner.create_background()\n sdkDesigner.addPossibles(frame, sdkSolver)\n frame = sdkDesigner.getBoardImage(frame)\n # cv.imwrite('solutions/image{}_{}.jpg'.format(cnt, tries), frame)\n cv.imshow('image', frame)\n cv.waitKey(speed)\n\n f2 = sdkDesigner.create_background()\n sdkDesigner.updateBoard(sdkSolver.getSolvedBoard())\n f2 = sdkDesigner.getBoardImage(f2)\n # cv.imwrite('solutions/image{}_{}_sol.jpg'.format(cnt, tries), f2)\n cv.imshow('image', f2)\n cv.waitKey(speed)\n tries += 1\n if tries > maxtries:\n totalFails += 1\n break\n\n # break\n print(\"Total Fails:{}\".format(totalFails))\n cv.waitKey(1000)\n cv.destroyAllWindows()\n\n\nsolveX()\n" } ]
5
StanislavKraev/jb_code
https://github.com/StanislavKraev/jb_code
87a08e2acade3688b5da240240c8040b51724525
9d178440c130590d2027bca976b54e6a33bc0c6d
32c5cb32c804ce81679d692fc04963a73857ba1c
refs/heads/master
2020-12-24T19:36:01.410748
2016-04-27T18:16:11
2016-04-27T18:16:11
57,235,797
0
0
null
null
null
null
null
[ { "alpha_fraction": 0.5380991697311401, "alphanum_fraction": 0.5436409115791321, "avg_line_length": 36.98947525024414, "blob_id": "7eac46e8e037357d01880a03ef0057b863d8d649", "content_id": "481452baff280f2e79c74d65329349a238142829", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 3844, "license_type": "no_license", "max_line_length": 119, "num_lines": 95, "path": "/app/services/notarius/manage_commands/notarius_commands.py", "repo_name": "StanislavKraev/jb_code", "src_encoding": "UTF-8", "text": "# -*- coding: utf-8 -*-\nimport json\nimport os\nimport codecs\n\nfrom manage_commands import BaseManageCommand, get_single\nfrom fw.db.sql_base import db as sqldb\nfrom services.notarius.data_model.models import NotariusObject, ScheduleTypeEnum\n\n\nclass FilenameSimpleValidator(object):\n def validate(self, val):\n if not os.path.exists(val):\n return False\n return True\n\n def get_value(self, value):\n return value\n\n\nclass AddNotariusCommand(BaseManageCommand):\n NAME = \"add_notarius\"\n\n def run(self):\n self.logger.info(u\"Добавление нотариуса\")\n self.logger.info(u'=' * 50)\n\n filename_str = get_single(u'File name: ', validator=FilenameSimpleValidator(), error_hint=u\"File not found\")\n\n with codecs.open(filename_str, encoding='utf8') as f:\n content = f.read()\n try:\n data = json.loads(content)\n notarius = NotariusObject(\n id = data['id'],\n surname = data.get('surname', u''),\n name = data.get('name', u''),\n patronymic = data.get('patronymic', None),\n\n schedule = data['schedule'],\n schedule_caption = data['schedule_caption'],\n title = data['title'],\n address = data['address'],\n region = data['region'],\n metro_station = data.get('metro_station', u'')\n )\n sqldb.session.add(notarius)\n sqldb.session.commit()\n except Exception, ex:\n self.logger.exception(u\"Не удалось прочитать файл. Проверьте формат.\")\n\n\nclass ListNotariusCommand(BaseManageCommand):\n NAME = \"list_notarius\"\n\n def run(self):\n self.logger.info(u\"Нотариусы в системе:\")\n self.logger.info(u'=' * 50)\n\n DAYS = {\n 1: u\"понедельник\",\n 2: u\"вторник\",\n 3: u\"среда\",\n 4: u\"четверг\",\n 5: u\"пятница\",\n 6: u\"суббота\",\n 7: u\"воскресенье\"\n }\n\n for notarius in NotariusObject.query.filter():\n self.logger.info(u\"Наименование: %s\" % notarius.title)\n if notarius.patronymic:\n self.logger.info(u\"ФИО: %s %s %s\" % (notarius.surname, notarius.name, notarius.patronymic))\n else:\n self.logger.info(u\"ФИО: %s %s\" % (notarius.surname, notarius.name))\n\n self.logger.info(u\"Адрес: %s\" % json.dumps(notarius.address))\n if notarius.schedule['type'] == ScheduleTypeEnum.ST_NORMAL:\n weekends = set(notarius.schedule['weekends'])\n work_days = {1, 2, 3, 4, 5, 6, 7} - weekends\n\n schedule = u\"%s - рабочий день, %s - выходной\" % (\n u\",\".join([DAYS[i] for i in work_days]), u\",\".join([DAYS[i] for i in weekends]))\n else:\n schedule = u\"%d рабочих, затем %d выходных\" % (\n notarius.schedule['working_days_count'], notarius.schedule['weekends_count'])\n self.logger.info(u\"Расписание: %s\" % schedule)\n\n time = u\"С %s до %s\" % (notarius.schedule['start_time'], notarius.schedule['end_time'])\n if 'lunch_start' in notarius.schedule and 'lunch_end' in notarius.schedule:\n time += u\" с перерывом с %s до %s\" % (notarius.schedule['lunch_start'], notarius.schedule['lunch_end'])\n else:\n time += u\" без обеда\"\n self.logger.info(u\"Время работы: %s\" % time)\n self.logger.info(u\"id: %s\" % notarius.id)\n" }, { "alpha_fraction": 0.5875936150550842, "alphanum_fraction": 0.6051774621009827, "avg_line_length": 48.12799835205078, "blob_id": "d4d4545b79f4be2a70d00f754ca838570f7ff24c", "content_id": "ac1f3d3f1c84bcda18b5d76772e8a44e92544f66", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 6325, "license_type": "no_license", "max_line_length": 123, "num_lines": 125, "path": "/jb_tests/test_pack/test_general.py", "repo_name": "StanislavKraev/jb_code", "src_encoding": "UTF-8", "text": "# -*- coding: utf-8 -*-\nfrom bson import ObjectId\n\nfrom flask import json\n\nfrom base_test_case import BaseTestCase\nfrom fw.db.sql_base import db as sqldb\nfrom fw.api.args_validators import EmailAddressValidator\nfrom fw.catalogs.models import OkvadObject\nfrom services.ifns.utils.process_okvad import process_okvad\nfrom test_pack.test_api import authorized\n\n\nclass GeneralTestCase(BaseTestCase):\n\n @authorized()\n def test_search_for_okvad(self):\n with self.app.app_context():\n new_ok = OkvadObject(id=str(ObjectId()), okved=u\"100.1\", caption=u\"Образование\", nalog=u\"usn\", parent=None)\n sqldb.session.add(new_ok)\n sqldb.session.commit()\n new_ok = OkvadObject(id=str(ObjectId()), okved=u\"100.2\", caption=u\"Стройка\", nalog=u\"eshn\", parent=None)\n sqldb.session.add(new_ok)\n sqldb.session.commit()\n new_ok = OkvadObject(id=str(ObjectId()), okved=u\"100.3\", caption=u\"Программы\", nalog=u\"usn\", parent=new_ok.id)\n sqldb.session.add(new_ok)\n sqldb.session.commit()\n\n result = self.test_client.get(u'/search_okvad/?title=рой')\n self.assertEqual(result.status_code, 200)\n result_data = json.loads(result.data)\n self.assertEqual(len(result_data['result']), 1)\n self.assertEqual(len(result_data['result'][0]), 4)\n self.assertIn('_id', result_data['result'][0])\n self.assertIn('caption', result_data['result'][0])\n self.assertIn('code', result_data['result'][0])\n self.assertIn('parent', result_data['result'][0])\n\n @authorized()\n def test_search_for_okvad_no_term(self):\n with self.app.app_context():\n new_ok = OkvadObject(id=str(ObjectId()), okved=u\"100.1\", caption=u\"Образование\", nalog=u\"usn\", parent=None)\n sqldb.session.add(new_ok)\n sqldb.session.commit()\n new_ok = OkvadObject(id=str(ObjectId()), okved=u\"100.2\", caption=u\"Стройка\", nalog=u\"eshn\", parent=None)\n sqldb.session.add(new_ok)\n sqldb.session.commit()\n new_ok = OkvadObject(id=str(ObjectId()), okved=u\"100.3\", caption=u\"Программы\", nalog=u\"usn\", parent=new_ok.id)\n sqldb.session.add(new_ok)\n sqldb.session.commit()\n\n result = self.test_client.get(u'/search_okvad/')\n self.assertEqual(result.status_code, 400)\n\n @authorized()\n def test_okvad_skeleton(self):\n with self.app.app_context():\n new_ok = OkvadObject(id=str(ObjectId()), okved=u\"100.1\", caption=u\"Стройка\", nalog=u\"eshn\", parent=None)\n sqldb.session.add(new_ok)\n sqldb.session.commit()\n new_ok = OkvadObject(id=str(ObjectId()), okved=u\"100.2\", caption=u\"Образование\", nalog=u\"usn\", parent=None)\n sqldb.session.add(new_ok)\n sqldb.session.commit()\n xx = OkvadObject(id=str(ObjectId()), okved=u\"100.3\", caption=u\"Программы\", nalog=u\"usn\", parent=new_ok.id)\n sqldb.session.add(xx)\n sqldb.session.commit()\n new_ok = OkvadObject(id=str(ObjectId()), okved=u\"100.4\", caption=u\"Программы\", nalog=u\"usn\", parent=new_ok.id)\n sqldb.session.add(new_ok)\n sqldb.session.commit()\n\n xx = OkvadObject(id=str(ObjectId()), okved=u\"200.3\", caption=u\"Программы\", nalog=u\"eshn\", parent=new_ok.id)\n sqldb.session.add(xx)\n sqldb.session.commit()\n new_ok = OkvadObject(id=str(ObjectId()), okved=u\"200.4\", caption=u\"Программы\", nalog=u\"eshn\", parent=new_ok.id)\n sqldb.session.add(new_ok)\n sqldb.session.commit()\n\n result = self.test_client.post(u'/get_okvad_skeleton/')\n self.assertEqual(result.status_code, 200)\n print(result.data)\n\n @authorized()\n def test_search_for_okvad_short_term(self):\n with self.app.app_context():\n new_ok = OkvadObject(id=str(ObjectId()), okved=u\"100.1\", caption=u\"Образование\", nalog=u\"usn\", parent=None)\n sqldb.session.add(new_ok)\n sqldb.session.commit()\n new_ok = OkvadObject(id=str(ObjectId()), okved=u\"100.2\", caption=u\"Стройка\", nalog=u\"eshn\", parent=None)\n sqldb.session.add(new_ok)\n sqldb.session.commit()\n new_ok = OkvadObject(id=str(ObjectId()), okved=u\"100.3\", caption=u\"Программы\", nalog=u\"usn\", parent=new_ok.id)\n sqldb.session.add(new_ok)\n sqldb.session.commit()\n\n result = self.test_client.get(u'/search_okvad/?title=ab')\n self.assertEqual(result.status_code, 400)\n\n @authorized()\n def test_search_for_okvad_new(self):\n with self.app.app_context():\n new_ok = OkvadObject(id=str(ObjectId()), okved=u\"100.1\", caption=u\"Образование\", nalog=u\"usn\", parent=None)\n sqldb.session.add(new_ok)\n sqldb.session.commit()\n new_ok = OkvadObject(id=str(ObjectId()), okved=u\"100.2\", caption=u\"Стройка\", nalog=u\"eshn\", parent=None)\n sqldb.session.add(new_ok)\n sqldb.session.commit()\n new_ok = OkvadObject(id=str(ObjectId()), okved=u\"100.3\", caption=u\"Программы\", nalog=u\"usn\", parent=new_ok.id)\n sqldb.session.add(new_ok)\n sqldb.session.commit()\n\n result = self.test_client.post(u'/get_okvad/', data = {})\n self.assertEqual(result.status_code, 200)\n result_data = json.loads(result.data)\n self.assertEqual(len(result_data['result']), 3)\n\n result = self.test_client.post(u'/get_okvad/', data = {\"search\" : u\"образование\"})\n self.assertEqual(result.status_code, 200)\n result_data = json.loads(result.data)\n self.assertEqual(len(result_data['result']), 1)\n\n parent_ok = OkvadObject.query.filter_by(okved=\"100.2\").scalar()\n result = self.test_client.post(u'/get_okvad/', data = {\"search\" : u\"програм\", \"parent\" : parent_ok.id})\n self.assertEqual(result.status_code, 200)\n result_data = json.loads(result.data)\n self.assertEqual(len(result_data['result']), 1)\n\n" }, { "alpha_fraction": 0.5348777770996094, "alphanum_fraction": 0.5422607064247131, "avg_line_length": 37.891090393066406, "blob_id": "95384c1170411db96dc499ffcb6cbb80d5ca702f", "content_id": "443dc3bd2dc586e8a185b850b2595bbe58b8cd1a", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 3970, "license_type": "no_license", "max_line_length": 157, "num_lines": 101, "path": "/app/fw/api/dadata_proxy.py", "repo_name": "StanislavKraev/jb_code", "src_encoding": "UTF-8", "text": "# -*- coding: utf-8 -*-\nimport hashlib\nimport json\nimport shlex\nimport subprocess\nimport requests\nfrom flask import current_app\n\napi_key = \"Token 3dc7eb747eb6ac11509b941b40df1b582de68f2b\" # todo: move to configuration\nsecret_key = \"0affb032f2563b4c2bb6a66d7ee4f9c8fef48240\"\nDADATA_TIMEOUT_SECONDS = 20\n\ndef make_key(key):\n m = hashlib.md5()\n m.update(key)\n return m.hexdigest()\n\ndef dadata_clean(method, data):\n key = make_key('dadata/clean' + method + unicode(data))\n result_text = current_app.external_tools.cache.get(key)\n\n if not result_text:\n current_app.logger.debug(u'ddc: cache miss')\n result = requests.post('https://dadata.ru/api/v2/clean/%s' % method,\n data = json.dumps(data),\n headers = {\n \"Content-Type\": \"application/json\",\n \"Authorization\": api_key,\n \"X-Secret\": secret_key\n }, timeout=DADATA_TIMEOUT_SECONDS)\n\n if result.status_code == 200:\n result_text = result.text\n else:\n current_app.logger.error(u'invalid response code: %s (%s)' % (result.status_code, result.text))\n return\n current_app.external_tools.cache.set(key, result_text, 1800)\n else:\n current_app.logger.debug(u'ddc: cache hit')\n\n if result_text:\n try:\n items = json.loads(result_text)\n for item in items:\n if 'house' in item and item['house'] and 'block_type' in item and item['block_type'] is None:\n if len(item['house']) > 1 and item['house'][0].isdigit() and item['house'][-1].isalpha():\n item['block_type'] = u'литер'\n item['block'] = item['house'][-1]\n item['house'] = item['house'][:-1]\n\n try:\n cmd = 'zabbix_sender -c /etc/zabbix/zabbix_agentd.conf -k %s -o 1' % 'dadata_clean' # todo: make quick!\n p = subprocess.Popen(shlex.split(cmd), stdin=subprocess.PIPE, stdout=subprocess.PIPE, stderr=subprocess.PIPE)\n p.communicate()\n except Exception:\n pass\n return items\n except Exception:\n current_app.logger.exception(u'error during response processing')\n pass\n\n\ndef dadata_suggest(method, data):\n if method not in ('fio', 'address', 'party') or not data:\n current_app.logger.error('invalid input data')\n return\n\n try:\n result = requests.post('https://dadata.ru/api/v2/suggest/%s/' % method,\n data = json.dumps(data),\n headers = {\n \"Accept\": \"application/json\",\n \"Content-Type\": \"application/json\",\n \"Authorization\": api_key,\n \"X-Secret\": secret_key\n }, timeout=20)\n except Exception:\n current_app.logger.error('error during suggestion request')\n return\n\n if result.status_code == 200:\n try:\n data = result.json()\n suggestions = data['suggestions']\n for sugg in suggestions:\n item = sugg['data']\n if 'house' in item and item['house'] and 'block_type' in item and item['block_type'] is None:\n if len(item['house']) > 1 and item['house'][0].isdigit() and item['house'][-1].isalpha():\n item['block_type'] = u'литер'\n item['block'] = item['house'][-1]\n item['house'] = item['house'][:-1]\n if 'city' in item and item['city'] in [u\"Москва\", u\"Санкт-Петербург\", u\"Севастополь\"] and 'city_type' in item and item['city_type'] == u\"г\":\n item['city'] = None\n item['city_type'] = None\n item['city_type_full'] = None\n\n return data\n except Exception:\n current_app.logger.exception(u'error during suggestion processing')\n pass\n current_app.logger.error('invalid response code')\n" }, { "alpha_fraction": 0.7223300933837891, "alphanum_fraction": 0.7281553149223328, "avg_line_length": 31.25, "blob_id": "6c0a02cb5a8f3685304372b5e23eafce9dc9c384", "content_id": "45f1ec2666e704a68fa85fae939405d67ea15836", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 515, "license_type": "no_license", "max_line_length": 106, "num_lines": 16, "path": "/app/services/partners/__init__.py", "repo_name": "StanislavKraev/jb_code", "src_encoding": "UTF-8", "text": "# -*- coding: utf-8 -*-\nimport os\n\nimport jinja2\nfrom services.partners.api import partners_bp\n\n\ndef register(app, jinja_env, class_loader, url_prefix=None):\n app.register_blueprint(partners_bp, url_prefix=url_prefix)\n\n search_path = os.path.normpath(os.path.join(os.path.abspath(os.path.dirname(__file__)), u\"templates\"))\n jinja_env.loader.loaders.append(jinja2.FileSystemLoader(search_path))\n\n\ndef get_manager_command_locations():\n return [os.path.normpath(os.path.abspath(os.path.dirname(__file__)))]" }, { "alpha_fraction": 0.6837349534034729, "alphanum_fraction": 0.6847389340400696, "avg_line_length": 34.57143020629883, "blob_id": "aa33dc21ffbd72d6d0610a69f41a267ffb2fc729", "content_id": "e497b63b3e72b63593b37710b4da30d3cac14e2a", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 996, "license_type": "no_license", "max_line_length": 110, "num_lines": 28, "path": "/app/deployment_migrations/migration_list/20151012_add_notarius_check_arg.py", "repo_name": "StanislavKraev/jb_code", "src_encoding": "UTF-8", "text": "# -*- coding: utf-8 -*-\n\nfrom fw.db.sql_base import db as sqldb\nfrom fw.documents.db_fields import DocumentBatchDbObject\nfrom fw.documents.enums import DocumentBatchTypeEnum\nfrom services.notarius.data_model.models import NotariusBookingObject\nfrom services.pay.models import PayInfoObject, PurchaseServiceType\n\n\ndef forward(config, logger):\n logger.debug(u\"Add new argument to data\")\n\n batches = set()\n for booking in NotariusBookingObject.query.filter(NotariusBookingObject.batch_id != None):\n if booking.batch_id in batches:\n continue\n for batch in DocumentBatchDbObject.query.filter_by(id=booking.batch_id, deleted=False, _broken=False):\n batches.add(batch.id)\n data = batch.data or {}\n data['lawyer_check'] = not booking._discarded\n DocumentBatchDbObject.query.filter_by(id=batch.id).update({\n 'data': data\n })\n sqldb.session.commit()\n\n\ndef rollback(config, logger):\n pass\n" }, { "alpha_fraction": 0.4690365791320801, "alphanum_fraction": 0.48411646485328674, "avg_line_length": 37.18411636352539, "blob_id": "d8fffe8aeb89f30908aebf6208d9f7c1a1c477dc", "content_id": "3a9c767a8516bb19a8b8c6f857eaacbddac874c6", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 21887, "license_type": "no_license", "max_line_length": 106, "num_lines": 554, "path": "/jb_tests/test_pack/test_partners.py", "repo_name": "StanislavKraev/jb_code", "src_encoding": "UTF-8", "text": "# -*- coding: utf-8 -*-\nfrom copy import copy\nfrom datetime import datetime\nimport json\nimport os\nfrom bson import ObjectId\n\nfrom fw.db.sql_base import db as sqldb\nfrom fw.documents.address_enums import RFRegionsEnum\nfrom fw.documents.db_fields import BatchDocumentDbObject\nfrom fw.documents.doc_requisites_storage import DocRequisitiesStorage\nfrom fw.documents.enums import DocumentBatchTypeEnum, DocumentTypeEnum\nfrom services.partners import partners_manage_commands\nfrom services.partners.models import StampPartnersObject, AccountantPartnersObject, BankPartnersObject, \\\n BankPartnerRequestObject, BankPartnersServiceObject\n\nos.environ['CELERY_CONFIG_MODULE'] = 'dev_celeryconfig'\n\nfrom test_pack.base_batch_test import BaseBatchTestCase\nfrom test_pack.test_api import authorized\n\n\nclass PartnersApiTestCase(BaseBatchTestCase):\n\n @authorized()\n def test_get_stamp_partners(self):\n with self.app.app_context():\n sp = StampPartnersObject(\n id=str(ObjectId()),\n region=RFRegionsEnum.RFR_SPB,\n enabled=True,\n sort_index=10,\n link='http://ya.ru',\n banner='http://yastatic.net/morda-logo/i/logo.svg',\n title='Йандекс'\n )\n sqldb.session.add(sp)\n\n sp = StampPartnersObject(\n id=str(ObjectId()),\n region=RFRegionsEnum.RFR_SPB,\n enabled=True,\n sort_index=2,\n link='http://ya1.ru',\n banner='http://yastatic1.net/morda-logo/i/logo.svg',\n title='Йандекс2'\n )\n sqldb.session.add(sp)\n\n sp = StampPartnersObject(\n id=str(ObjectId()),\n region=RFRegionsEnum.RFR_SPB,\n enabled=True,\n sort_index=13,\n link='http://ya2.ru',\n banner='http://yastatic2.net/morda-logo/i/logo.svg',\n title='Йандекс3'\n )\n sqldb.session.add(sp)\n\n sp = StampPartnersObject(\n id=str(ObjectId()),\n region=RFRegionsEnum.RFR_SPB,\n enabled=False,\n sort_index=11,\n link='http://ya.ru',\n banner='http://yastatic.net/morda-logo/i/logo.svg',\n title='Йандекс'\n )\n sqldb.session.add(sp)\n\n sp = StampPartnersObject(\n id=str(ObjectId()),\n region=RFRegionsEnum.RFR_MOSCOW,\n enabled=True,\n sort_index=2,\n link='http://ya1.ru',\n banner='http://yastatic1.net/morda-logo/i/logo.svg',\n title='Йандекс2'\n )\n sqldb.session.add(sp)\n sqldb.session.commit()\n\n batch_id = self.create_batch('llc', self.user).id\n doc1 = BatchDocumentDbObject(\n _owner=self.user,\n document_type='test',\n batch_id=batch_id,\n data={\n 'address': {\n 'region': u'Санкт-Петербург'\n }\n }\n )\n sqldb.session.add(doc1)\n sqldb.session.commit()\n\n result = self.test_client.get('/partners/stamps/?batch_id=%s' % batch_id)\n self.assertEqual(result.status_code, 200)\n\n data = json.loads(result.data)\n self.assertEqual(len(data['result']['stamp_partners']), 3)\n\n @authorized()\n def test_get_accountant_partners(self):\n with self.app.app_context():\n new_partner = AccountantPartnersObject(\n id=str(ObjectId()),\n region=[RFRegionsEnum.RFR_SPB],\n enabled=True,\n sort_index=10,\n link='http://ya.ru',\n banner='http://yastatic.net/morda-logo/i/logo.svg',\n title='Йандекс',\n type=\"online\"\n )\n sqldb.session.add(new_partner)\n sqldb.session.commit()\n\n new_partner = AccountantPartnersObject(\n id=str(ObjectId()),\n region=[RFRegionsEnum.RFR_SPB],\n enabled=True,\n sort_index=2,\n link='http://ya1.ru',\n banner='http://yastatic1.net/morda-logo/i/logo.svg',\n title=\"Йандекс2\",\n type=\"offline\"\n )\n sqldb.session.add(new_partner)\n sqldb.session.commit()\n\n new_partner = AccountantPartnersObject(\n id=str(ObjectId()),\n region=[RFRegionsEnum.RFR_SPB],\n enabled=True,\n sort_index=13,\n link='http://ya2.ru',\n banner='http://yastatic2.net/morda-logo/i/logo.svg',\n title=\"Йандекс3\",\n type=\"online\"\n )\n sqldb.session.add(new_partner)\n sqldb.session.commit()\n\n new_partner = AccountantPartnersObject(\n id=str(ObjectId()),\n region=[RFRegionsEnum.RFR_SPB],\n enabled=False,\n sort_index=11,\n link='http://ya.ru',\n banner='http://yastatic.net/morda-logo/i/logo.svg',\n title=\"Йандекс\",\n type=\"online\"\n )\n sqldb.session.add(new_partner)\n sqldb.session.commit()\n\n new_partner = AccountantPartnersObject(\n id=str(ObjectId()),\n region=[RFRegionsEnum.RFR_MOSCOW],\n enabled=True,\n sort_index=2,\n link='http://ya1.ru',\n banner='http://yastatic1.net/morda-logo/i/logo.svg',\n title=\"Йандекс2\",\n type=\"offline\"\n )\n sqldb.session.add(new_partner)\n sqldb.session.commit()\n\n batch_id = self.create_batch('llc', self.user).id\n doc1 = BatchDocumentDbObject(\n _owner=self.user,\n document_type='test',\n batch_id=batch_id,\n data={\n 'address': {\n 'region': u'Санкт-Петербург'\n }\n }\n )\n sqldb.session.add(doc1)\n sqldb.session.commit()\n\n result = self.test_client.get('/partners/accounts/?batch_id=%s' % str(batch_id))\n self.assertEqual(result.status_code, 200)\n\n data = json.loads(result.data)\n self.assertEqual(len(data['result']['accounts_partners']), 3)\n\n @authorized()\n def test_get_bank_partners(self):\n new_partner = BankPartnersObject(\n id=str(ObjectId()),\n region=[RFRegionsEnum.RFR_SPB],\n city=[RFRegionsEnum.RFR_SPB],\n enabled=True,\n sort_index=10,\n link=u'http://ya.ru',\n banner=u'http://yastatic.net/morda-logo/i/logo.svg',\n title=u'Йандекс',\n conditions=[]\n )\n sqldb.session.add(new_partner)\n sqldb.session.commit()\n\n new_partner = BankPartnersObject(\n id=str(ObjectId()),\n region=[RFRegionsEnum.RFR_SPB],\n city=[RFRegionsEnum.RFR_SPB],\n enabled=True,\n sort_index=2,\n link=u'http://ya1.ru',\n banner=u'http://yastatic1.net/morda-logo/i/logo.svg',\n title=u'Йандекс2',\n conditions=[]\n )\n sqldb.session.add(new_partner)\n sqldb.session.commit()\n\n new_partner = BankPartnersObject(\n id=str(ObjectId()),\n region=[RFRegionsEnum.RFR_SPB],\n city=[RFRegionsEnum.RFR_SPB],\n enabled=True,\n sort_index=13,\n link=u'http://ya2.ru',\n banner=u'http://yastatic2.net/morda-logo/i/logo.svg',\n title=u'Йандекс3',\n conditions=[]\n )\n sqldb.session.add(new_partner)\n sqldb.session.commit()\n\n new_partner = BankPartnersObject(\n id=str(ObjectId()),\n region=[RFRegionsEnum.RFR_SPB],\n city=[RFRegionsEnum.RFR_SPB],\n enabled=False,\n sort_index=11,\n link=u'http://ya.ru',\n banner=u'http://yastatic.net/morda-logo/i/logo.svg',\n title=u'Йандекс',\n conditions=[]\n )\n sqldb.session.add(new_partner)\n sqldb.session.commit()\n\n new_partner = BankPartnersObject(\n id=str(ObjectId()),\n region=[RFRegionsEnum.RFR_MOSCOW],\n city=[RFRegionsEnum.RFR_MOSCOW],\n enabled=True,\n sort_index=2,\n link=u'http://ya1.ru',\n banner=u'http://yastatic1.net/morda-logo/i/logo.svg',\n title=u'Йандекс2',\n conditions=[]\n )\n sqldb.session.add(new_partner)\n sqldb.session.commit()\n\n batch_id = self.create_batch('llc', self.user).id\n doc1 = BatchDocumentDbObject(\n _owner=self.user,\n document_type='test',\n batch_id=batch_id,\n data={\n 'address': {\n 'region': u'Санкт-Петербург'\n }\n }\n )\n sqldb.session.add(doc1)\n sqldb.session.commit()\n\n result = self.test_client.get('/partners/banks/?batch_id=%s' % str(batch_id))\n self.assertEqual(result.status_code, 200)\n\n data = json.loads(result.data)\n self.assertEqual(len(data['result']['banks_partners']), 3)\n\n @authorized()\n def test_request_bank(self):\n with self.app.app_context():\n DocRequisitiesStorage.get_batch_descriptor(DocumentBatchTypeEnum.DBT_NEW_LLC)['doc_types'] = [\n DocumentTypeEnum.DT_P11001]\n\n general_manager_person = self.create_person(self.user)\n batch = self.create_batch(DocumentBatchTypeEnum.DBT_NEW_LLC, self.user)\n batch.data = {\n u\"full_name\": u\"фывафыва\",\n u\"short_name\": u\"Бокс\",\n u\"address\": {\n \"street_type\": u\"ул\",\n \"index\": 191186,\n \"house\": u\"4\",\n \"region\": u\"Санкт-Петербург\",\n \"flat\": u\"12\",\n \"street\": u\"Большая Морская\",\n \"address_string\": u\"г Санкт-Петербург, ул Большая Морская, д 4, кв 12\",\n \"flat_type\": u\"кв\",\n \"house_type\": u\"д\",\n \"long_form_mode\": True,\n \"ifns\": u\"7841\",\n \"okato\": u\"92401385000\",\n },\n u\"address_type\": u\"general_manager_registration_address\",\n u\"general_manager_caption\": u\"повелитель\",\n u\"general_manager\": {\n \"_id\": general_manager_person.id,\n \"type\": u\"person\"\n }\n }\n batch.result_fields = {\n 'ifns_reg_info': {\n 'status': 'registered',\n 'reg_date': datetime.now(),\n 'full_name': u\"Лютик\",\n 'ogrn': 1095543023135\n }\n }\n sqldb.session.commit()\n batch_id = batch.id\n\n new_partner = BankPartnersObject(\n id=str(ObjectId()),\n region=[RFRegionsEnum.RFR_SPB],\n city=[RFRegionsEnum.RFR_SPB],\n enabled=True,\n sort_index=10,\n link=u'http://ya.ru',\n banner=u'http://yastatic.net/morda-logo/i/logo.svg',\n title=u'Йандекс',\n conditions=[]\n )\n sqldb.session.add(new_partner)\n sqldb.session.commit()\n bank_id = new_partner.id\n\n svc = BankPartnersServiceObject(\n id=str(ObjectId()),\n type='email',\n email='test_email@test_domain.zz',\n fields=partners_manage_commands._BANK_PARTNER_SCHEMA,\n template_name='account_creation_consultation_request',\n bank_partner_id=bank_id\n )\n sqldb.session.add(svc)\n sqldb.session.commit()\n\n result = self.test_client.post('/partners/banks/send/', data={\n 'batch_id': batch_id,\n 'bank_id': bank_id,\n 'bank_contact_phone_general_manager': True,\n 'bank_contact_phone': \"+79001231213\",\n 'send_private_data': True\n })\n self.assertEqual(result.status_code, 200)\n data = json.loads(result.data)\n batch = data['result']\n\n # DocumentBatchDbObject.get_collection(self.db).update({'_id': batch_id}, {\n # '$set': {\n # 'data.ogrn': 1095543023135\n # }\n # })\n # result = self.test_client.post('/partners/banks/send/', data={\n # 'batch_id': batch_id,\n # 'bank_id': bank_id,\n # 'bank_contact_phone_general_manager': True,\n # 'bank_contact_phone': \"+79001231213\",\n # 'send_private_data': True\n # })\n # self.assertEqual(result.status_code, 200)\n data = json.loads(result.data)\n batch = data['result']\n\n self.assertNotIn('error_info', batch)\n self.assertIn('result_fields', batch)\n\n result_fields = batch['result_fields']\n self.assertIn('bank_partner_requests', result_fields)\n self.maxDiff = None\n del result_fields['bank_partner_requests'][0]['sent_date']\n self.assertEqual(result_fields['bank_partner_requests'], [{\n 'bank_partner_id': str(bank_id),\n 'bank_contact_phone': '+79001231213',\n 'bank_contact_phone_general_manager': True,\n # идентификатор партнера по банковскому обслуживанию\n 'bank_partner_caption': u'Йандекс',\n # название банка-партнера\n 'send_private_data': True\n # дата отправки заявки\n }])\n\n self.assertEqual(BankPartnerRequestObject.query.count(), 1)\n bank_request = BankPartnerRequestObject.query.first()\n bank_request = copy(bank_request.__dict__)\n del bank_request['id']\n del bank_request['sent_date']\n del bank_request['_sa_instance_state']\n self.maxDiff = None\n self.assertEqual(bank_request, {\n 'bank_partner_id': bank_id,\n 'bank_contact_phone': '+79001231213',\n 'bank_contact_phone_general_manager': 'true',\n 'batch_id': batch_id,\n 'bank_partner_caption': u'Йандекс',\n 'send_private_data': True,\n 'status': 'success'\n })\n\n @authorized()\n def test_request_bank_via_web(self):\n with self.app.app_context():\n DocRequisitiesStorage.get_batch_descriptor(DocumentBatchTypeEnum.DBT_NEW_LLC)['doc_types'] = [\n DocumentTypeEnum.DT_P11001]\n\n general_manager_person = self.create_person(self.user)\n batch = self.create_batch(DocumentBatchTypeEnum.DBT_NEW_LLC, self.user)\n batch.data = {\n u\"full_name\": u\"фывафыва\",\n u\"short_name\": u\"Бокс\",\n u\"address\": {\n \"street_type\": u\"ул\",\n \"index\": 191186,\n \"house\": u\"4\",\n \"region\": u\"Санкт-Петербург\",\n \"flat\": u\"12\",\n \"street\": u\"Большая Морская\",\n \"address_string\": u\"г Санкт-Петербург, ул Большая Морская, д 4, кв 12\",\n \"flat_type\": u\"кв\",\n \"house_type\": u\"д\",\n \"long_form_mode\": True,\n \"ifns\": u\"7841\",\n \"okato\": u\"92401385000\",\n },\n u\"address_type\": u\"general_manager_registration_address\",\n u\"general_manager_caption\": u\"повелитель\",\n u\"general_manager\": {\n \"_id\": general_manager_person.id,\n \"type\": u\"person\"\n }\n }\n batch.result_fields = {\n 'ifns_reg_info': {\n 'status': 'registered',\n 'reg_date': datetime.now(),\n 'full_name': u\"Лютик\",\n 'ogrn': 1095543023135\n }\n }\n sqldb.session.commit()\n batch_id = batch.id\n\n new_partner = BankPartnersObject(\n id=str(ObjectId(\"55c9afab543ed837fea53db2\")),\n region=[RFRegionsEnum.RFR_SPB],\n city=[RFRegionsEnum.RFR_SPB],\n enabled=True,\n sort_index=10,\n link=u'',\n banner=u\"some link\",\n title=u'«Альфа-банк»',\n conditions=[\n u\"бесплатный выезд менеджера в офис\",\n u\"открытие расчетного счета за 2‒3 дня\",\n u\"3 месяца бесплатно при оплате сразу 9 месяцев\",\n u\"до 3000 рублей на поиск профессионалов на HH.ru\",\n u\"до 9000 рублей на Яндекс.Директ после открытия счета в подарок\"\n ]\n )\n sqldb.session.add(new_partner)\n sqldb.session.commit()\n bank_id = new_partner.id\n\n svc = BankPartnersServiceObject(\n id=str(ObjectId()),\n type='web',\n config={\n 'method': 'post',\n 'url': 'http://ya.ru',\n },\n fields=partners_manage_commands._BANK_PARTNER_SCHEMA2,\n template_name='alpha_bank_web_request',\n bank_partner_id=bank_id\n )\n sqldb.session.add(svc)\n sqldb.session.commit()\n\n result = self.test_client.post('/partners/banks/send/', data={\n 'batch_id': batch_id,\n 'bank_id': bank_id,\n 'bank_contact_phone': \"+79001231213\",\n 'bank_contact_phone_general_manager': True,\n 'send_private_data': True\n })\n self.assertEqual(result.status_code, 200)\n data = json.loads(result.data)\n batch = data['result']\n\n req_col = self.db['bank_partners_request']\n # DocumentBatchDbObject.get_collection(self.db).update({'_id': batch_id}, {\n # '$set': {\n # 'data.ogrn': 1095543023135\n # }\n # })\n # result = self.test_client.post('/partners/banks/send/', data={\n # 'batch_id': batch_id,\n # 'bank_id': bank_id,\n # 'bank_contact_phone_general_manager': True,\n # 'bank_contact_phone': \"+79001231213\",\n # 'send_private_data': True\n # })\n # self.assertEqual(result.status_code, 200)\n data = json.loads(result.data)\n batch = data['result']\n\n self.assertNotIn('error_info', batch)\n self.assertIn('result_fields', batch)\n\n result_fields = batch['result_fields']\n self.assertIn('bank_partner_requests', result_fields)\n self.maxDiff = None\n del result_fields['bank_partner_requests'][0]['sent_date']\n self.assertEqual(result_fields['bank_partner_requests'], [{\n 'bank_partner_id': str(bank_id),\n 'bank_contact_phone': '+79001231213',\n 'bank_contact_phone_general_manager': 'true',\n # идентификатор партнера по банковскому обслуживанию\n 'bank_partner_caption': u'«Альфа-банк»',\n # название банка-партнера\n 'send_private_data': True,\n # дата отправки заявки\n }])\n\n self.assertEqual(BankPartnerRequestObject.query.count(), 1)\n bank_request = BankPartnerRequestObject.query.first()\n bank_request = copy(bank_request.__dict__)\n del bank_request['id']\n del bank_request['sent_date']\n del bank_request['_sa_instance_state']\n self.maxDiff = None\n self.assertEqual(bank_request, {\n 'bank_partner_id': bank_id,\n 'bank_contact_phone': '+79001231213',\n 'bank_contact_phone_general_manager': 'true',\n 'batch_id': batch_id,\n 'bank_partner_caption': u'«Альфа-банк»',\n 'send_private_data': True,\n 'status': 'failed'\n })\n" }, { "alpha_fraction": 0.5714285969734192, "alphanum_fraction": 0.5822102427482605, "avg_line_length": 38.897850036621094, "blob_id": "179e84930d4dfa9449e61dca015120bfce03c7f9", "content_id": "6a327424ae0c8085374a9f7c24fdb7e7306f4e23", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 7420, "license_type": "no_license", "max_line_length": 256, "num_lines": 186, "path": "/jb_tests/test_pack/test_russian_post.py", "repo_name": "StanislavKraev/jb_code", "src_encoding": "UTF-8", "text": "# -*- coding: utf-8 -*-\nfrom datetime import datetime\nimport json\nfrom lxml import etree, objectify\nimport os\nimport requests\nfrom fw.documents.enums import DocumentBatchTypeEnum\nfrom fw.db.sql_base import db as sqldb\n\nos.environ['CELERY_CONFIG_MODULE'] = 'dev_celeryconfig'\n\nfrom services.russian_post.db_models import RussianPostTrackingItem, PostTrackingStatus\nfrom test_pack.base_batch_test import BaseBatchTestCase\nfrom test_pack.test_api import authorized\n\n\nclass RPTestCase(BaseBatchTestCase):\n\n @authorized()\n def test_get_status(self):\n url = 'http://tracking.russianpost.ru/rtm34?wsdl'\n headers = {\n \"Accept-Encoding\": \"gzip,deflate\",\n \"Content-Type\": \"application/soap+xml;charset=UTF-8\",\n \"User-Agent\": \"Apache-HttpClient/4.1.1 (java 1.5)\"\n }\n\n data = u\"\"\"<soap:Envelope xmlns:soap=\"http://www.w3.org/2003/05/soap-envelope\" xmlns:oper=\"http://russianpost.org/operationhistory\" xmlns:data=\"http://russianpost.org/operationhistory/data\" xmlns:soapenv=\"http://schemas.xmlsoap.org/soap/envelope/\">\n <soap:Header/>\n <soap:Body>\n <oper:getOperationHistory>\n <!--Optional:-->\n <data:OperationHistoryRequest>\n <data:Barcode>19083586376477</data:Barcode>\n <data:MessageType>0</data:MessageType>\n <!--Optional:-->\n <data:Language>RUS</data:Language>\n </data:OperationHistoryRequest>\n <!--Optional:-->\n <data:AuthorizationHeader soapenv:mustUnderstand=\"1\">\n <data:login>rocketscienceacademy</data:login>\n <data:password>dBu46cgOra97s</data:password>\n </data:AuthorizationHeader>\n </oper:getOperationHistory>\n </soap:Body>\n </soap:Envelope>\"\"\"\n\n response = requests.post(url, data=data, headers=headers)\n self.assertEqual(response.status_code, 200)\n #print(response.text)\n\n last_status = {}\n root = etree.fromstring(response.content)\n for elem in root.getiterator():\n if not hasattr(elem.tag, 'find'): continue # (1)\n i = elem.tag.find('}')\n if i >= 0:\n elem.tag = elem.tag[i+1:]\n objectify.deannotate(root, cleanup_namespaces=True)\n tags = root.xpath('//OperationHistoryData/historyRecord')\n for tag in tags:\n oper_type_id = None\n oper_type_descr = None\n date_val = None\n address_descr = None\n\n oper_tags = tag.xpath('./OperationParameters/OperType/Id')\n for otag in oper_tags:\n oper_type_id = otag.text\n break\n\n oper_tags = tag.xpath('./OperationParameters/OperType/Name')\n for otag in oper_tags:\n oper_type_descr = otag.text\n break\n\n operdate_tags = tag.xpath('./OperationParameters/OperDate')\n for otag in operdate_tags:\n date_val = datetime.strptime(otag.text[:19], \"%Y-%m-%dT%H:%M:%S\")\n break\n\n address_tags = tag.xpath('./AddressParameters/OperationAddress/Description')\n for atag in address_tags:\n address_descr = atag.text\n break\n\n if oper_type_id is not None and oper_type_descr is not None and date_val is not None and address_tags is not None:\n last_status = {\n 'operation': oper_type_id,\n 'op_name': oper_type_descr,\n 'dt': date_val,\n 'address': address_descr\n }\n\n print etree.tostring(root, pretty_print = True, encoding='utf-8')\n print(json.dumps(last_status, ensure_ascii=False, indent=1, default=lambda x: unicode(x)))\n\n @authorized()\n def test_get_status_from_db(self):\n batch = self.create_batch(DocumentBatchTypeEnum.DBT_OSAGO, self.user)\n tracking1 = RussianPostTrackingItem(\n tracking=u\"track1\",\n batch=batch,\n owner=self.user\n )\n sqldb.session.add(tracking1)\n sqldb.session.commit()\n\n tracking2 = RussianPostTrackingItem(\n tracking=u\"track2\",\n batch=batch,\n owner=self.user\n )\n sqldb.session.add(tracking2)\n sqldb.session.commit()\n\n tracking3 = RussianPostTrackingItem(\n tracking=u\"track3\",\n owner=self.user\n )\n sqldb.session.add(tracking3)\n sqldb.session.commit()\n\n response = self.test_client.get('/external/russianpost/mail/status/?batch_id=%s' % batch.id)\n self.assertEqual(response.status_code, 200)\n\n data = json.loads(response.data)\n self.assertEqual(data, {u'result': {u'status': u'progress', u'status_caption': u''}})\n\n response = self.test_client.get('/external/russianpost/mail/status/?batch_id=%s' % batch.id)\n self.assertEqual(response.status_code, 200)\n\n data = json.loads(response.data)\n self.assertEqual(data, {u'result': {u'status': u'progress', u'status_caption': u''}})\n\n @authorized()\n def test_create_track(self):\n batch = self.create_batch(DocumentBatchTypeEnum.DBT_OSAGO, self.user)\n\n response = self.test_client.post('/external/russianpost/mail/track/', data={\n 'batch_id': batch.id,\n 'tracking': \"track1\"\n })\n self.assertEqual(response.status_code, 200)\n\n data = json.loads(response.data)\n self.assertEqual(data, {u'result': True})\n\n self.assertEqual(RussianPostTrackingItem.query.count(), 1)\n item = RussianPostTrackingItem.query.first()\n self.assertEqual(item.tracking, 'track1')\n self.assertEqual(item.batch_id, batch.id)\n self.assertEqual(item.status, PostTrackingStatus.PTS_NOT_FOUND)\n\n response = self.test_client.post('/external/russianpost/mail/track/', data={\n 'batch_id': batch.id,\n 'tracking': \"track2\"\n })\n self.assertEqual(response.status_code, 200)\n\n data = json.loads(response.data)\n self.assertEqual(data, {u'result': True})\n\n self.assertEqual(RussianPostTrackingItem.query.count(), 1)\n item = RussianPostTrackingItem.query.first()\n self.assertEqual(item.tracking, 'track2')\n self.assertEqual(item.batch_id, batch.id)\n self.assertEqual(item.status, PostTrackingStatus.PTS_NOT_FOUND)\n\n response = self.test_client.post('/external/russianpost/mail/track/', data={\n 'batch_id': batch.id,\n 'tracking': \"track2\"\n })\n self.assertEqual(response.status_code, 200)\n\n data = json.loads(response.data)\n self.assertEqual(data, {u'result': True})\n\n self.assertEqual(RussianPostTrackingItem.query.count(), 1)\n item2 = RussianPostTrackingItem.query.first()\n self.assertEqual(item2.id, item.id)\n self.assertEqual(item2.tracking, 'track2')\n self.assertEqual(item2.batch_id, batch.id)\n self.assertEqual(item2.status, PostTrackingStatus.PTS_NOT_FOUND)\n\n # todo: duplicate tracking id" }, { "alpha_fraction": 0.6522670388221741, "alphanum_fraction": 0.6640485525131226, "avg_line_length": 32.345237731933594, "blob_id": "34a0aca02d4f78d8718d703cdf8602bf2bec421c", "content_id": "4047886ff54ac24ff9d239cf0be2a942a24faea1", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 2801, "license_type": "no_license", "max_line_length": 161, "num_lines": 84, "path": "/app/services/car_assurance/api.py", "repo_name": "StanislavKraev/jb_code", "src_encoding": "UTF-8", "text": "# -*- coding: utf-8 -*-\nfrom flask import Blueprint\nfrom flask_login import login_required\n\nfrom fw.api import errors\nfrom fw.api.args_validators import validate_arguments, ArgumentValidator, IntValidator\nfrom fw.api.base_handlers import api_view\nfrom services.car_assurance.db_models import CarAssurance, CarAssuranceBranch\n\ncar_assurance_bp = Blueprint('car_assurance', __name__)\n\n\n@car_assurance_bp.route('/structures/insurances/', methods=['GET'])\n@api_view\n@login_required\n@validate_arguments(\n type=ArgumentValidator(required=True),\n search=ArgumentValidator(required=False),\n limit=IntValidator(required=False, min_val=1, max_val=100500),\n offset=IntValidator(required=False, min_val=0, max_val=100500)\n)\ndef get_car_insurances(search=None, limit=100, offset=0, **kwargs):\n if 'type' not in kwargs:\n raise errors.MissingRequiredParameter('type')\n type_arg = kwargs['type']\n if type_arg != 'osago':\n raise errors.InvalidParameterValue('type')\n\n if search:\n query = CarAssurance.query.filter_by(full_name=search).order_by(CarAssurance.full_name)\n else:\n query = CarAssurance.query.filter().order_by(CarAssurance.short_name)\n\n total = query.count()\n query = query.limit(limit).offset(offset)\n count = query.count()\n\n result = {\n 'total': total,\n 'count': count,\n 'insurances': [{\n \"id\": i.id,\n \"short_name\": i.short_name,\n \"full_name\": i.full_name\n } for i in query]\n }\n return {'result': result}\n\n\n@car_assurance_bp.route('/structures/insurances/branches/', methods=['GET'])\n@api_view\n@login_required\n@validate_arguments(\n id=ArgumentValidator(required=True),\n region=ArgumentValidator(required=False),\n limit=IntValidator(required=False, min_val=1, max_val=100500),\n offset=IntValidator(required=False, min_val=0, max_val=100500)\n)\ndef get_car_insurance_branches(region=None, limit=None, offset=None, **kwargs):\n if 'id' not in kwargs:\n raise errors.MissingRequiredParameter('id')\n id_arg = kwargs['id']\n\n if region:\n query = CarAssuranceBranch.query.filter_by(car_assurance_id=id_arg, region=region).order_by(CarAssuranceBranch.region).order_by(CarAssuranceBranch.title)\n else:\n query = CarAssuranceBranch.query.filter_by(car_assurance_id=id_arg).order_by(CarAssuranceBranch.region).order_by(CarAssuranceBranch.title)\n\n total = query.count()\n query = query.limit(limit).offset(offset)\n count = query.count()\n\n result = {\n 'total': total,\n 'count': count,\n 'branches': [{\n \"id\": i.id,\n \"title\": i.title,\n \"phone\": i.phone,\n \"address\": i.address,\n \"region\": i.region\n } for i in query]\n }\n return {'result': result}\n" }, { "alpha_fraction": 0.4536558985710144, "alphanum_fraction": 0.4636586010456085, "avg_line_length": 38.818180084228516, "blob_id": "7463cb6d160483e8f39595f4fb8df3d9be98d0c4", "content_id": "9f6be99a9d254631ac009474884823b9ef48eaf5", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 19177, "license_type": "no_license", "max_line_length": 135, "num_lines": 462, "path": "/jb_tests/test_pack/test_car_assurance_api.py", "repo_name": "StanislavKraev/jb_code", "src_encoding": "UTF-8", "text": "# -*- coding: utf-8 -*-\nimport codecs\nimport glob\nimport json\nimport os\nfrom random import randint\nimport subprocess\nfrom tempfile import NamedTemporaryFile\nimport tempfile\nimport requests\nimport shutil\nfrom fw.api.dadata_proxy import dadata_suggest\nfrom fw.documents.address_enums import RFRegionsEnum\nfrom fw.db.sql_base import db as sqldb\nfrom services.car_assurance.db_models import CarAssuranceBranch, CarAssurance\nfrom services.ifns.utils.process_egrul_captcha import recognize_captcha\n\nos.environ['CELERY_CONFIG_MODULE'] = 'dev_celeryconfig'\n\nfrom test_pack.base_batch_test import BaseBatchTestCase\nfrom test_pack.test_api import authorized\n\nimport html5lib\nfrom lxml.cssselect import CSSSelector\n\nclass CarAssuranceApiTestCase(BaseBatchTestCase):\n\n @authorized()\n def test_get_assurance_list(self):\n response = self.test_client.get('/structures/insurances/')\n self.assertEqual(response.status_code, 400)\n\n a1 = self.addCarAssurance(u\"Assurance 1\")\n a2 = self.addCarAssurance(u\"Assurance 2\")\n a3 = self.addCarAssurance(u\"Assurance 3\")\n\n response = self.test_client.get('/structures/insurances/?type=osago')\n self.assertEqual(response.status_code, 200)\n result = json.loads(response.data)\n self.assertEqual(result, {\n 'result': {\n 'total': 3,\n 'count': 3,\n 'ifns': [{\n 'id': a1.id,\n 'short_name': u\"Assurance 1\",\n 'full_name': u\"Assurance 1\"\n }, {\n 'id': a2.id,\n 'short_name': u\"Assurance 2\",\n 'full_name': u\"Assurance 2\"\n }, {\n 'id': a3.id,\n 'short_name': u\"Assurance 3\",\n 'full_name': u\"Assurance 3\"\n }]\n }\n })\n\n response = self.test_client.get('/structures/insurances/?type=osago&limit=2')\n self.assertEqual(response.status_code, 200)\n result = json.loads(response.data)\n self.assertEqual(result, {\n 'result': {\n 'total': 3,\n 'count': 2,\n 'ifns': [{\n 'id': a1.id,\n 'short_name': u\"Assurance 1\",\n 'full_name': u\"Assurance 1\"\n }, {\n 'id': a2.id,\n 'short_name': u\"Assurance 2\",\n 'full_name': u\"Assurance 2\"\n }]\n }\n })\n\n response = self.test_client.get('/structures/insurances/?type=osago&limit=2&offset=1')\n self.assertEqual(response.status_code, 200)\n result = json.loads(response.data)\n self.assertEqual(result, {\n 'result': {\n 'total': 3,\n 'count': 2,\n 'ifns': [{\n 'id': a2.id,\n 'short_name': u\"Assurance 2\",\n 'full_name': u\"Assurance 2\"\n }, {\n 'id': a3.id,\n 'short_name': u\"Assurance 3\",\n 'full_name': u\"Assurance 3\"\n }]\n }\n })\n\n response = self.test_client.get('/structures/insurances/?type=osago&search=Assurance%202')\n self.assertEqual(response.status_code, 200)\n result = json.loads(response.data)\n self.assertEqual(result, {\n 'result': {\n 'total': 1,\n 'count': 1,\n 'ifns': [{\n 'id': a2.id,\n 'short_name': u\"Assurance 2\",\n 'full_name': u\"Assurance 2\"\n }]\n }\n })\n\n @authorized()\n def test_get_assurance_branches(self):\n response = self.test_client.get('/structures/insurances/branches/')\n self.assertEqual(response.status_code, 400)\n\n a1 = self.addCarAssurance(u'А1')\n b1_1 = self.addCarAssuranceBranch(a1)\n b1_2 = self.addCarAssuranceBranch(a1, region=RFRegionsEnum.RFR_ADYGEYA)\n b1_3 = self.addCarAssuranceBranch(a1)\n\n a2 = self.addCarAssurance(u'А2')\n b2_1 = self.addCarAssuranceBranch(a2)\n b2_2 = self.addCarAssuranceBranch(a2, region=RFRegionsEnum.RFR_ADYGEYA)\n b2_3 = self.addCarAssuranceBranch(a2, region=RFRegionsEnum.RFR_ADYGEYA)\n\n response = self.test_client.get('/structures/insurances/branches/?id=%s' % a1.id)\n self.assertEqual(response.status_code, 200)\n result = json.loads(response.data)\n self.assertEqual(result, {\n u'result': {\n u'total': 3,\n u'count': 3,\n u'ifns': [{\n u'id': b1_2.id,\n u'title': u\"title\",\n u'phone': u\"112\",\n u'address': u\"дер. Поганкино д. 13\",\n u'region': RFRegionsEnum.RFR_ADYGEYA\n }, {\n u'id': b1_1.id,\n u'title': u\"title\",\n u'phone': u\"112\",\n u'address': u\"дер. Поганкино д. 13\",\n u'region': RFRegionsEnum.RFR_LENINGRADSKAYA_REGION\n }, {\n u'id': b1_3.id,\n u'title': u\"title\",\n u'phone': u\"112\",\n u'address': u\"дер. Поганкино д. 13\",\n u'region': RFRegionsEnum.RFR_LENINGRADSKAYA_REGION\n }]\n }\n })\n\n response = self.test_client.get('/structures/insurances/branches/?id=%s&limit=1&offset=1' % a2.id)\n self.assertEqual(response.status_code, 200)\n result = json.loads(response.data)\n self.assertEqual(result, {\n u'result': {\n u'total': 3,\n u'count': 1,\n u'ifns': [{\n u'id': b2_3.id,\n u'title': u\"title\",\n u'phone': u\"112\",\n u'address': u\"дер. Поганкино д. 13\",\n u'region': RFRegionsEnum.RFR_ADYGEYA\n }]\n }\n })\n\n response = self.test_client.get('/structures/insurances/branches/?id=%s&region=%s' % (a2.id, RFRegionsEnum.RFR_ADYGEYA))\n self.assertEqual(response.status_code, 200)\n result = json.loads(response.data)\n self.assertEqual(result, {\n u'result': {\n u'total': 2,\n u'count': 2,\n u'ifns': [{\n u'id': b2_2.id,\n u'title': u\"title\",\n u'phone': u\"112\",\n u'address': u\"дер. Поганкино д. 13\",\n u'region': RFRegionsEnum.RFR_ADYGEYA\n }, {\n u'id': b2_3.id,\n u'title': u\"title\",\n u'phone': u\"112\",\n u'address': u\"дер. Поганкино д. 13\",\n u'region': RFRegionsEnum.RFR_ADYGEYA\n }]\n }\n })\n\n def _test_collect_strah_info(self):\n response = requests.get('http://autoins.ru/ru/about_rsa/members/actual_members.wbp')\n\n str_data = response.text.encode('utf-8').decode('string_escape')\n content = u\"<!DOCTYPE html><html><head><title></title></head><body>%s</body></html>\" % str_data.decode('utf-8')\n root = html5lib.parse(content, treebuilder='lxml', namespaceHTMLElements=False)\n\n names = ('N', 'full_name', 'short_name', 'old_name', 'lic N', 'lic Dt', 'svid N', 'svid Dt', 'phone', 'email', 'address', 'inn')\n\n objects = []\n for tr_item in CSSSelector('table.usual tr:not(.header)')(root):\n i = 0\n obj = {}\n for td in CSSSelector('td')(tr_item):\n text = td.text.strip() if td.text else u\"\"\n obj[names[i]] = text\n i += 1\n objects.append(obj)\n assert(obj['inn'])\n\n print(json.dumps(objects, indent=1, ensure_ascii=False, default=lambda x: unicode(x)))\n\n failed_inns = []\n for obj in objects:\n inn = obj['inn']\n print(obj['inn'])\n files = [f for f in glob.glob('/tmp/%s_*.pdf' % inn)]\n if files:\n print('already has')\n continue\n\n s = requests.Session()\n s.get('http://egrul.nalog.ru/')\n response = s.get('http://egrul.nalog.ru/static/captcha.html?%s' % str(randint(100000, 100000000)))\n\n token = response.text\n captcha = recognize_captcha(token)\n if not captcha:\n print(u'failed to get captcha')\n failed_inns.append(obj['inn'])\n continue\n\n response = s.post('http://egrul.nalog.ru/', data={\n 'captcha': captcha,\n 'captchaToken': token,\n 'fam': u'',\n 'kind': u'ul',\n 'nam': u'',\n 'namul':'',\n 'ogrninnfl':'',\n 'ogrninnul': obj['inn'],\n 'otch': '',\n 'region': '',\n 'regionul': '',\n 'srchFl': 'ogrn',\n 'srchUl': 'ogrn'})\n\n if response.status_code != 200:\n print(\"invalid response: %s\" % str(response.status_code))\n failed_inns.append(obj['inn'])\n continue\n\n data = json.loads(response.text)\n for r in data[\"rows\"]:\n file_url = \"http://egrul.nalog.ru/download/%s\" % r[\"T\"]\n print(file_url)\n\n response = s.get(file_url, stream=True)\n tmp_file = NamedTemporaryFile(delete=False, prefix=obj['inn'] + '_', suffix='.pdf')\n response.raw.decode_content = True\n shutil.copyfileobj(response.raw, tmp_file)\n tmp_file.close()\n print(tmp_file.name)\n\n print(failed_inns)\n\n def _test_collect_strah_info_stage2(self):\n def is_quoted_text_end(t):\n if '\"' not in t:\n return False\n if not t:\n return False\n if t[0] == '\"':\n return False\n for c in t[1:]:\n if c.isalpha():\n continue\n if c == '\"':\n return True\n if c == ' ':\n return False\n return False\n\n file_path = \"/home/skraev/strah_info/*_*.pdf\"\n\n address_starts = (\n (u\"Почтовый индекс\", \"\"),\n (u\"Субъект Российской Федерации\", \"\"),\n (u\"Улица (проспект, переулок и т.д.)\", \"\"),\n (u\"Дом (владение и т.п.)\", u\"д.\"),\n (u\"Корпус (строение и т.п.)\", u\"корпус\"),\n (u\"Город (волость и т.п.)\", \"\"),\n (u\"Офис (квартира и т.п.)\", u\"кв.\"),\n (u\"Район (улус и т.п.)\", \"\"),\n )\n\n CarAssuranceBranch.query.filter().delete()\n sqldb.session.commit()\n CarAssurance.query.filter().delete()\n sqldb.session.commit()\n\n for path in glob.glob(file_path):\n temp_file_out = tempfile.NamedTemporaryFile(mode=\"w+\", suffix=\".txt\")\n output_file_name = temp_file_out.name\n temp_file_out.close()\n p = subprocess.Popen(['pdftotext', '-layout', path, output_file_name], stdin=subprocess.PIPE, stdout=subprocess.PIPE,\n stderr=subprocess.PIPE)\n out, err = p.communicate()\n rc = p.returncode\n if rc is not 0:\n print(u\"Failed to executed pdftotext (%s, %s)\" % (out, err))\n return\n if not os.path.exists(output_file_name):\n print(u\"No file were generated\")\n return\n\n with codecs.open(output_file_name, 'r', 'utf-8') as f:\n content = f.read()\n\n inn = os.path.split(path)[-1].split('_')[0]\n status = 'initial'\n\n full_name = u\"\"\n short_name = u\"\"\n address = u\"\"\n branch_name = u\"\"\n branch_address = u\"\"\n\n branches = []\n\n for line in content.split('\\n'):\n text = line.strip()\n if not text or text.startswith(u\"Сведения с сайта ФНС России\"):\n continue\n if status == 'initial':\n if text == u'Наименование':\n status = 'full_name'\n continue\n elif status == 'full_name':\n if u'Полное наименование' in text:\n status = u'full_name_continue'\n full_name = text[text.find(u'Полное наименование') + len(u'Полное наименование'):].strip()\n elif status == u'full_name_continue':\n if u'ГРН и дата внесения в ЕГРЮЛ записи' in text:\n status = \"waiting_short_name\"\n continue\n if u\"Сокращенное наименование\" in text:\n status = u'short_name'\n short_name = text[text.find(u'Сокращенное наименование') + len(u'Сокращенное наименование'):].strip()\n continue\n if full_name.endswith('-') or (full_name.endswith('\"') and is_quoted_text_end(text)):\n full_name += text\n else:\n full_name += u\" \" + text\n while u\" \" in full_name:\n full_name = full_name.replace(u\" \", \" \")\n elif status == 'waiting_short_name':\n if u\"Сокращенное наименование\" in text:\n status = u'short_name'\n short_name = text[text.find(u'Сокращенное наименование') + len(u'Сокращенное наименование'):].strip()\n continue\n elif status == 'short_name':\n if u\"ГРН и дата внесения в ЕГРЮЛ записи\" in text:\n status = 'waiting_for_address'\n continue\n if short_name.endswith('-') or (short_name.endswith('\"') and is_quoted_text_end(text)):\n short_name += text\n else:\n short_name += u\" \" + text\n while u\" \" in short_name:\n short_name = short_name.replace(u\" \", \" \")\n elif status == 'waiting_for_address':\n if text == u\"Адрес (место нахождения)\":\n status = \"address\"\n elif status == 'address':\n if u\"ГРН и дата внесения в ЕГРЮЛ записи\" in text:\n status = 'branches_and_agencies'\n continue\n\n for ads, rep in address_starts:\n if ads in text:\n text = rep + \" \" + text[text.find(ads) + len(ads):].strip()\n break\n address += text + \" \"\n elif status == 'branches_and_agencies':\n if text == u\"Филиалы\":\n status = 'branches'\n branch_name = u\"\"\n continue\n elif status == 'branches':\n if u\"Наименование\" in text:\n branch_name = text[text.find(u\"Наименование\") + len(u\"Наименование\"):].strip()\n status = \"branch_name_continue\"\n continue\n if u\"Почтовый индекс\" in text:\n branch_address = text[text.find(u\"Почтовый индекс\") + len(u\"Почтовый индекс\"):].strip()\n status = \"branch_address\"\n elif status == 'branch_name_continue':\n if u\"Почтовый индекс\" in text:\n branch_address = text[text.find(u\"Почтовый индекс\") + len(u\"Почтовый индекс\"):].strip()\n status = \"branch_address\"\n continue\n if u\"ГРН и дата внесения в ЕГРЮЛ записи\" in text:\n status = \"waiting_branch_address\"\n continue\n branch_name += u\" \" + text\n elif status == \"waiting_branch_address\":\n if u\"Почтовый индекс\" in text:\n branch_address = text[text.find(u\"Почтовый индекс\") + len(u\"Почтовый индекс\"):].strip()\n status = \"branch_address\"\n continue\n elif status == \"branch_address\":\n if u\"ГРН и дата внесения в ЕГРЮЛ записи\" in text:\n status = 'branches'\n branches.append({\n 'name': branch_name,\n 'address': branch_address\n })\n continue\n\n for ads, rep in address_starts:\n if ads in text:\n text = rep + \" \" + text[text.find(ads) + len(ads):].strip()\n break\n branch_address += text + \" \"\n\n if not short_name or not address:\n continue\n\n resp = dadata_suggest('address', {\"query\": address})\n new_ca = CarAssurance(\n full_name=full_name,\n short_name=short_name,\n address=resp['suggestions'][0]['value']\n )\n sqldb.session.add(new_ca)\n sqldb.session.commit()\n\n print(u\"inn: %s\\n full_name: %s\\n short_name: %s\\n address: %s\\n\" % (inn, full_name, short_name, address))\n for branch in branches:\n# print(json.dumps(branch, indent=1, ensure_ascii=False))\n address = branch['address']\n resp = dadata_suggest('address', {\"query\": address})\n region = resp['suggestions'][0]['data']['region']\n new_ca_branch = CarAssuranceBranch(\n address=resp['suggestions'][0]['value'],\n car_assurance=new_ca,\n region=region\n )\n if branch['name']:\n new_ca_branch.title = branch['name']\n\n sqldb.session.add(new_ca_branch)\n sqldb.session.commit()\n\n a = 1" }, { "alpha_fraction": 0.5895046591758728, "alphanum_fraction": 0.5899950861930847, "avg_line_length": 27.33333396911621, "blob_id": "2959305fe7466e81566fda0c145b08fcd0f37cfd", "content_id": "6b7f0648f23f0b29f1cec459d0c7cc452cecd951", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 2039, "license_type": "no_license", "max_line_length": 66, "num_lines": 72, "path": "/app/services/ip_reg/documents/ip_reg_methods.py", "repo_name": "StanislavKraev/jb_code", "src_encoding": "UTF-8", "text": "# -*- coding: utf-8 -*-\nfrom datetime import datetime\nfrom flask import current_app\nfrom common_utils import num_word\nfrom fw.catalogs.models import BikCatalog\nfrom services.ifns.data_model.enums import IfnsRegStatusEnum\nfrom services.ifns.data_model.models import IfnsBookingObject\n\n\ndef get_company_registration_info(batch_id=None):\n null_value = {\n \"status\": IfnsRegStatusEnum.IRS_UNKNOWN\n }\n if not batch_id:\n return null_value\n booking = IfnsBookingObject.query.filter(\n IfnsBookingObject.batch_id==batch_id,\n # todo: add ifns service id, reg_date date range\n IfnsBookingObject.reg_info.__ne__(None)\n ).first()\n if booking:\n result = {\n \"status\": booking.reg_info.get('status', 'unknown')\n }\n try:\n reg_date = booking.reg_info.get('reg_date', None)\n if isinstance(reg_date, basestring):\n reg_date = datetime.strptime(reg_date, \"%d.%m.%Y\")\n if reg_date:\n result['reg_date'] = reg_date\n except Exception:\n current_app.logger.exception(u\"Failed to get date\")\n\n try:\n ogrn = booking.reg_info.get('ogrnip', None)\n if isinstance(ogrn, basestring):\n ogrn = int(ogrn)\n if ogrn is not None:\n result['ogrnip'] = ogrn\n except Exception:\n current_app.logger.exception(u\"Failed to get ogrnip\")\n\n return result\n\n return null_value\n\n\ndef get_bank_info(bank_bik=None):\n if not bank_bik:\n return {}\n\n bank_bik = unicode(bank_bik)\n if not bank_bik.isdigit():\n return {}\n\n info = BikCatalog.query.filter_by(bik=bank_bik).scalar()\n return {\n '_id': info.id,\n 'name': info.name,\n 'okpo': info.okpo,\n 'bik': info.bik,\n 'phone': info.phone,\n 'address': info.address,\n 'kor_account': info.kor_account\n } or {}\n\n\ndef num_to_text(value):\n if not isinstance(value, int):\n return\n\n return num_word(value)" }, { "alpha_fraction": 0.6384180784225464, "alphanum_fraction": 0.6391242742538452, "avg_line_length": 34.400001525878906, "blob_id": "2c11b9990492967336dc5078983a4be4c90f61c6", "content_id": "6618993197998721719eb8fc0aee13e7f4397185", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 1416, "license_type": "no_license", "max_line_length": 92, "num_lines": 40, "path": "/app/deployment_migrations/migration_list/20150910_migrate_yurist_models.py", "repo_name": "StanislavKraev/jb_code", "src_encoding": "UTF-8", "text": "# -*- coding: utf-8 -*-\nfrom datetime import datetime\n\nfrom fw.db.sql_base import db as sqldb\nfrom fw.documents.db_fields import DocumentBatchDbObject\nfrom fw.storage.file_storage import FileStorage\nfrom services.yurist.data_model.models import YuristBatchCheckObject, YuristCheckFilesObject\n\n\ndef forward(config, logger):\n logger.debug(u\"Migrate yurist models\")\n\n yurist_col = db['yurist_batch_check']\n YuristCheckFilesObject.query.delete()\n YuristBatchCheckObject.query.delete()\n sqldb.session.commit()\n for old_yc in yurist_col.find():\n batch_id = str(old_yc['batch_id'])\n batch = DocumentBatchDbObject.query.filter_by(id=batch_id).first()\n if not batch:\n continue\n\n new_yc = YuristBatchCheckObject(\n id=str(old_yc['_id']),\n batch_id=batch_id,\n status=old_yc['status'],\n create_date=old_yc.get('create_date', datetime.utcnow()),\n typos_correction=old_yc.get('typos_correction', False)\n )\n sqldb.session.add(new_yc)\n for file_descr in (old_yc.get('attached_files') or []):\n file_obj = FileStorage.get_file(str(file_descr['id']))\n if file_obj:\n attach = YuristCheckFilesObject()\n attach.files_id = file_obj.id\n new_yc.attached_files.append(attach)\n sqldb.session.commit()\n\ndef rollback(config, logger):\n pass\n" }, { "alpha_fraction": 0.7068063020706177, "alphanum_fraction": 0.7085514664649963, "avg_line_length": 27.649999618530273, "blob_id": "0a346e4621f77ade6b30feb54513888ac8615876", "content_id": "8cf7d0cfe5134d944295ad7e1b143c6b85893619", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 573, "license_type": "no_license", "max_line_length": 74, "num_lines": 20, "path": "/app/fw/storage/models.py", "repo_name": "StanislavKraev/jb_code", "src_encoding": "UTF-8", "text": "# -*- coding: utf-8 -*-\nfrom bson.objectid import ObjectId\n\nfrom sqlalchemy import Column, String, ForeignKey, Unicode, Integer\nfrom sqlalchemy.orm import relationship\n\nfrom fw.db.sql_base import db as sqldb\n\n\nclass FileObject(sqldb.Model):\n __tablename__ = 'files'\n\n id = Column(String, primary_key=True, default=lambda: str(ObjectId()))\n file_name = Column(Unicode)\n file_path = Column(String)\n\n _owner_id = Column(Integer, ForeignKey('authuser.id'), nullable=True)\n _owner = relationship(\"AuthUser\")\n\n _original_file = Column(String, nullable=True)\n" }, { "alpha_fraction": 0.6709486246109009, "alphanum_fraction": 0.678524374961853, "avg_line_length": 35.154762268066406, "blob_id": "0173e473481d47673a770a2b6630c48cfff8a4fe", "content_id": "31e08db0ebefd0bc1c9915ec3bce67e4866586cd", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 3036, "license_type": "no_license", "max_line_length": 106, "num_lines": 84, "path": "/app/services/ip_reg/__init__.py", "repo_name": "StanislavKraev/jb_code", "src_encoding": "UTF-8", "text": "# -*- coding: utf-8 -*-\n\nimport os\nimport jinja2\nfrom fw.documents.batch_manager import BatchManager\nfrom fw.documents.doc_requisites_storage import DocRequisitiesStorage\nfrom fw.documents.enums import DocumentBatchTypeEnum, DocumentTypeEnum\nfrom services.ip_reg.ip_reg_manager import IpRegBatchManager\n\n\ndef _init_doc_requisities(config):\n from services.ip_reg.documents.initial_db_data_ip import load_data\n data = load_data(config)\n\n templates = (\n \"P21001_TEMPLATE\",\n \"IP_DOV_FILING_TEMPLATE\",\n \"IP_DOV_RECEIVING_TEMPLATE\",\n \"IP_DOV_FILING_RECEIVING_TEMPLATE\",\n \"IP_LETTER_INVENTORY_TEMPLATE\",\n \"IP_USN_TEMPLATE\",\n \"IP_ESHN_TEMPLATE\"\n )\n\n for template_name in templates:\n DocRequisitiesStorage.add_template(data[template_name]['doc_name'], data[template_name])\n\n schemas = (\n \"P21001_SCHEMA\",\n \"IP_REG_BATCH_SCHEMA\",\n \"IP_STATE_DUTY_SCHEMA\",\n \"IP_DOV_FILING_SCHEMA\",\n \"IP_DOV_RECEIVING_SCHEMA\",\n \"IP_DOV_FILING_RECEIVING_SCHEMA\",\n \"IP_LETTER_INVENTORY_SCHEMA\",\n \"IP_USN_SCHEMA\",\n \"IP_ESHN_SCHEMA\"\n )\n\n for schema_name in schemas:\n DocRequisitiesStorage.add_schema(data[schema_name]['doc_name'], data[schema_name])\n\n matchers = (\n \"P21001_MATCHER\",\n \"IP_USN_MATCHER\",\n \"IP_ESHN_MATCHER\"\n )\n\n for matcher_name in matchers:\n DocRequisitiesStorage.add_field_matcher(data[matcher_name]['doc_name'], data[matcher_name])\n\n bd = dict(\n batch_type=DocumentBatchTypeEnum.DBT_NEW_IP,\n doc_types=[\n DocumentTypeEnum.DT_P21001,\n DocumentTypeEnum.DT_IP_STATE_DUTY,\n DocumentTypeEnum.DT_IP_DOV_FILING_DOCS,\n DocumentTypeEnum.DT_IP_DOV_RECEIVING_DOCS,\n DocumentTypeEnum.DT_IP_DOV_FILING_RECEIVING_DOCS,\n DocumentTypeEnum.DT_IP_USN_CLAIM,\n DocumentTypeEnum.DT_IP_ESHN_CLAIM,\n DocumentTypeEnum.DT_IP_LETTER_INVENTORY\n ],\n result_fields=data['IP_REG_RESULT_FIELDS'],\n deferred_render_docs=data['IP_REG_DEFER_DOCS'],\n fields=data['IP_REG_BATCH_SCHEMA'][\"fields\"]\n )\n\n DocRequisitiesStorage.add_batch_descriptor(DocumentBatchTypeEnum.DBT_NEW_IP, bd)\n\n\ndef register(app, jinja_env, class_loader, **kwargs):\n search_path = os.path.normpath(os.path.join(os.path.abspath(os.path.dirname(__file__)), u\"templates\"))\n jinja_env.loader.loaders.append(jinja2.FileSystemLoader(search_path))\n\n class_loader.POSSIBLE_LOCATIONS.append('services.ip_reg.documents')\n class_loader.POSSIBLE_LOCATIONS.append('services.ip_reg.documents.enums')\n class_loader.POSSIBLE_LOCATIONS.append('services.ip_reg.documents.general_doc_fields')\n class_loader.POSSIBLE_LOCATIONS.append('services.ip_reg.documents.ip_reg_methods')\n class_loader.POSSIBLE_LOCATIONS.append('services.ip_reg.documents.ip_validators')\n\n BatchManager.register_manager(DocumentBatchTypeEnum.DBT_NEW_IP, IpRegBatchManager)\n\n _init_doc_requisities(app.config)" }, { "alpha_fraction": 0.5957537293434143, "alphanum_fraction": 0.5961783528327942, "avg_line_length": 33.632354736328125, "blob_id": "f4b1fb132839dafba6f173483519e649cd25493a", "content_id": "71874d1049cd4d73af67c6120530a19d08dcc896", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 2355, "license_type": "no_license", "max_line_length": 92, "num_lines": 68, "path": "/app/deployment_migrations/migration_list/20150911_migrate_ifns_models.py", "repo_name": "StanislavKraev/jb_code", "src_encoding": "UTF-8", "text": "# -*- coding: utf-8 -*-\n\nfrom fw.db.sql_base import db as sqldb\nfrom fw.documents.db_fields import DocumentBatchDbObject\nfrom fw.storage.file_storage import FileStorage\nfrom services.ifns.data_model.models import IfnsCatalogObject, IfnsBookingObject\nfrom services.notarius.data_model.models import NotariusObject, NotariusBookingObject\nfrom services.yurist.data_model.models import YuristBatchCheckObject, YuristCheckFilesObject\n\n\ndef forward(config, logger):\n logger.debug(u\"Migrate ifns models\")\n\n ifns_cat_col = db['ifns_catalog']\n IfnsCatalogObject.query.delete()\n sqldb.session.commit()\n for old_cat in ifns_cat_col.find():\n new_cat = IfnsCatalogObject(\n id=str(old_cat['_id']),\n updated=old_cat['updated'],\n code=old_cat['code'],\n comment=old_cat.get('comment'),\n tel=old_cat.get('tel', []),\n name=old_cat.get('name'),\n rof=old_cat.get('rof'),\n rou=old_cat.get('rou'),\n plat=old_cat.get('plat'),\n address=old_cat.get('address'),\n region=old_cat.get('region')\n )\n sqldb.session.add(new_cat)\n sqldb.session.commit()\n\n ifns_booking_col = db['ifns_booking']\n IfnsBookingObject.query.delete()\n sqldb.session.commit()\n for old_book in ifns_booking_col.find():\n batch_id = old_book.get('batch_id')\n if batch_id:\n batch_id = str(batch_id)\n if 'code' not in old_book or 'date' not in old_book or 'service' not in old_book:\n continue\n\n batch = DocumentBatchDbObject.query.filter_by(id=batch_id).first()\n if not batch:\n continue\n\n new_book = IfnsBookingObject(\n id=str(old_book['_id']),\n batch_id=batch_id,\n code=old_book['code'],\n date=old_book['date'],\n service=old_book['service'],\n _discarded=old_book['_discarded'],\n phone=old_book.get('phone'),\n window=old_book.get('window'),\n address=old_book.get('address'),\n service_id=old_book['service_id'],\n ifns=old_book.get('ifns'),\n how_to_get=old_book.get('how_to_get'),\n reg_info=old_book.get('reg_info')\n )\n sqldb.session.add(new_book)\n sqldb.session.commit()\n\n\ndef rollback(config, logger):\n pass\n" }, { "alpha_fraction": 0.5194239616394043, "alphanum_fraction": 0.5274878144264221, "avg_line_length": 43.0527229309082, "blob_id": "e40c2d92a2a3e3b7645dd7a49b26a4ff23cd58e4", "content_id": "40cc5e68717d9ea849ac50700a8bdc6b07ee7bb0", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 52741, "license_type": "no_license", "max_line_length": 144, "num_lines": 1157, "path": "/jb_tests/test_pack/test_osago_docs.py", "repo_name": "StanislavKraev/jb_code", "src_encoding": "UTF-8", "text": "# -*- coding: utf-8 -*-\nfrom datetime import datetime\n\nimport json\nfrom datetime import timedelta\n\nfrom flask import current_app\nfrom fw.async_tasks.models import CeleryScheduledTask\n\nfrom fw.db.sql_base import db as sqldb\nfrom fw.documents.batch_manager import BatchManager\nfrom fw.documents.db_fields import (BatchDocumentDbObject, DocumentBatchDbObject)\nfrom fw.documents.enums import DocumentBatchTypeEnum, UserDocumentStatus\nfrom fw.documents.enums import DocumentTypeEnum\nfrom fw.documents.fields.doc_fields import DocumentBatch\nfrom services.osago.documents.enums import OsagoDocTypeEnum\nfrom test_api import authorized\nfrom test_pack.base_batch_test import BaseBatchTestCase\n\nfrom fw.async_tasks.core_tasks import check_scheduled_tasks\n\nclass OsagoDocsTestCase(BaseBatchTestCase):\n\n @authorized()\n def test_keep_document_instance_on_update(self):\n batch = self.create_batch(DocumentBatchTypeEnum.DBT_TEST_TYPE, self.user)\n new_data = {\n 'short_name': u'Тест нейм'\n }\n new_batch_data = {\n 'data': new_data,\n 'batch_type': DocumentBatchTypeEnum.DBT_TEST_TYPE,\n 'metadata': {}\n }\n\n new_batch = DocumentBatch.parse_raw_value(new_batch_data, api_data=False)\n manager = BatchManager.init(batch)\n result = manager.update_batch(batch.id, new_batch, self.user.id, None, self.config, current_app.logger)\n self.assertEqual(BatchDocumentDbObject.query.count(), 1)\n doc = BatchDocumentDbObject.query.scalar()\n del result['result']['creation_date']\n self.assertEqual(result, {\n 'result': {\n 'status': u'new',\n 'all_docs': [{u'caption': u'Тестовый документ 1', u'document_type': u'test_doc_1', u'file_link': None, u'document_id': doc.id}],\n 'name': u'Тестовый батч',\n 'paid': 'false',\n 'batch_type': DocumentBatchTypeEnum.DBT_TEST_TYPE,\n 'result_fields': {u'name': u'Тест нейм'},\n 'data': {\n 'short_name': u'Тест нейм'\n },\n 'id': batch.id,\n 'metadata': {},\n 'status_data': {'finalisation_count': u'0'}\n }\n })\n\n new_data['short_name'] = u'создай второй документ'\n\n new_batch = DocumentBatch.parse_raw_value(new_batch_data, api_data=False)\n manager = BatchManager.init(batch)\n result = manager.update_batch(batch.id, new_batch, self.user.id, None, self.config, current_app.logger)\n self.assertEqual(BatchDocumentDbObject.query.count(), 2)\n doc = BatchDocumentDbObject.query.filter_by(id=doc.id).scalar()\n self.assertIsNotNone(doc)\n doc2 = BatchDocumentDbObject.query.filter(BatchDocumentDbObject.id!=doc.id).scalar()\n self.assertIsNotNone(doc2)\n del result['result']['creation_date']\n all_docs = result['result']['all_docs']\n self.assertEqual(len(all_docs), 2)\n del result['result']['all_docs']\n\n test_docs = [\n {u'caption': u'Тестовый документ 2', u'document_type': u'test_doc_2', u'file_link': None, u'document_id': doc2.id},\n {u'caption': u'Тестовый документ 1', u'document_type': u'test_doc_1', u'file_link': None, u'document_id': doc.id},\n ]\n test_doc_id_set = set()\n for d in all_docs:\n for td in test_docs:\n if d and d == td:\n test_doc_id_set.add(d['document_id'])\n\n self.assertEqual(len(test_doc_id_set), len(test_docs))\n\n self.assertEqual(result, {\n 'result': {\n 'status': u'new',\n 'name': u'Тестовый батч',\n 'paid': 'false',\n 'batch_type': DocumentBatchTypeEnum.DBT_TEST_TYPE,\n 'result_fields': {u'name': u'создай второй документ'},\n 'data': {\n 'short_name': u'создай второй документ'\n },\n 'id': batch.id,\n 'metadata': {},\n 'status_data': {'finalisation_count': u'0'}\n }\n })\n\n new_data['short_name'] = u'не создавай второй документ'\n\n new_batch = DocumentBatch.parse_raw_value(new_batch_data, api_data=False)\n manager = BatchManager.init(batch)\n result = manager.update_batch(batch.id, new_batch, self.user.id, None, self.config, current_app.logger)\n self.assertEqual(BatchDocumentDbObject.query.count(), 1)\n doc = BatchDocumentDbObject.query.filter_by(id=doc.id).scalar()\n self.assertIsNotNone(doc)\n del result['result']['creation_date']\n all_docs = result['result']['all_docs']\n self.assertEqual(len(all_docs), 1)\n del result['result']['all_docs']\n\n test_docs = [\n {u'caption': u'Тестовый документ 1', u'document_type': u'test_doc_1', u'file_link': None, u'document_id': doc.id},\n ]\n test_doc_id_set = set()\n for d in all_docs:\n for td in test_docs:\n if d and d == td:\n test_doc_id_set.add(d['document_id'])\n\n self.assertEqual(len(test_doc_id_set), len(test_docs))\n\n self.assertEqual(result, {\n 'result': {\n 'status': u'new',\n 'name': u'Тестовый батч',\n 'paid': 'false',\n 'batch_type': DocumentBatchTypeEnum.DBT_TEST_TYPE,\n 'result_fields': {u'name': u'не создавай второй документ'},\n 'data': {\n 'short_name': u'не создавай второй документ'\n },\n 'id': batch.id,\n 'metadata': {},\n 'status_data': {'finalisation_count': u'0'}\n }\n })\n\n @authorized()\n def test_keep_document_instance_on_batch_render(self):\n batch = self.create_batch(DocumentBatchTypeEnum.DBT_TEST_TYPE, self.user)\n new_data = {\n 'short_name': u'Тест нейм'\n }\n new_batch_data = {\n 'data': new_data,\n 'batch_type': DocumentBatchTypeEnum.DBT_TEST_TYPE,\n 'metadata': {}\n }\n\n new_batch = DocumentBatch.parse_raw_value(new_batch_data, api_data=False)\n manager = BatchManager.init(batch)\n result = manager.update_batch(batch.id, new_batch, self.user.id, None, self.config, current_app.logger)\n self.assertEqual(BatchDocumentDbObject.query.count(), 1)\n doc = BatchDocumentDbObject.query.scalar()\n del result['result']['creation_date']\n self.assertEqual(result, {\n 'result': {\n 'status': u'new',\n 'all_docs': [{u'caption': u'Тестовый документ 1', u'document_type': u'test_doc_1', u'file_link': None, u'document_id': doc.id}],\n 'name': u'Тестовый батч',\n 'paid': 'false',\n 'batch_type': DocumentBatchTypeEnum.DBT_TEST_TYPE,\n 'result_fields': {u'name': u'Тест нейм'},\n 'data': {\n 'short_name': u'Тест нейм'\n },\n 'id': batch.id,\n 'metadata': {},\n 'status_data': {'finalisation_count': u'0'}\n }\n })\n\n new_data['short_name'] = u'создай второй документ'\n\n new_batch = DocumentBatch.parse_raw_value(new_batch_data, api_data=False)\n manager = BatchManager.init(batch)\n result = manager.update_batch(batch.id, new_batch, self.user.id, None, self.config, current_app.logger)\n\n doc_ids = set()\n for d in BatchDocumentDbObject.query.filter_by():\n doc_ids.add(d.id)\n\n result = self.test_client.post('/batch/finalise/', data={'batch_id': batch.id})\n self.assertEqual(result.status_code, 200)\n self.assertEqual(json.loads(result.data), {'result': True})\n\n self.assertEqual(BatchDocumentDbObject.query.count(), 2)\n new_doc_ids = set()\n for d in BatchDocumentDbObject.query.filter_by():\n new_doc_ids.add(d.id)\n\n self.assertEqual(doc_ids, new_doc_ids)\n\n @authorized()\n def test_keep_document_instance_on_document_render(self):\n batch = self.create_batch(DocumentBatchTypeEnum.DBT_TEST_TYPE, self.user)\n new_data = {\n 'short_name': u'Тест нейм',\n 'text_field': u'Начальное значение'\n }\n new_batch_data = {\n 'data': new_data,\n 'batch_type': DocumentBatchTypeEnum.DBT_TEST_TYPE,\n 'metadata': {}\n }\n\n new_batch = DocumentBatch.parse_raw_value(new_batch_data, api_data=False)\n manager = BatchManager.init(batch)\n result = manager.update_batch(batch.id, new_batch, self.user.id, None, self.config, current_app.logger)\n self.assertEqual(BatchDocumentDbObject.query.count(), 1)\n doc = BatchDocumentDbObject.query.scalar()\n del result['result']['creation_date']\n self.assertEqual(result, {\n 'result': {\n 'status': u'new',\n 'all_docs': [{u'caption': u'Тестовый документ 1', u'document_type': u'test_doc_1', u'file_link': None, u'document_id': doc.id}],\n 'name': u'Тестовый батч',\n 'paid': 'false',\n 'batch_type': DocumentBatchTypeEnum.DBT_TEST_TYPE,\n 'result_fields': {u'name': u'Тест нейм'},\n 'data': {\n 'short_name': u'Тест нейм',\n 'text_field': u'Начальное значение'\n },\n 'id': batch.id,\n 'metadata': {},\n 'status_data': {'finalisation_count': u'0'}\n }\n })\n\n new_data['short_name'] = u'создай второй документ'\n\n new_batch = DocumentBatch.parse_raw_value(new_batch_data, api_data=False)\n manager = BatchManager.init(batch)\n result = manager.update_batch(batch.id, new_batch, self.user.id, None, self.config, current_app.logger)\n\n doc_ids = set()\n for d in BatchDocumentDbObject.query.filter_by():\n doc_ids.add(d.id)\n\n result = self.test_client.post('/batch/render_document/', data={\n 'batch_id': batch.id,\n 'document_type': json.dumps([DocumentTypeEnum.DT_TEST_DOC_1])\n })\n self.assertEqual(result.status_code, 200)\n self.assertEqual(json.loads(result.data), {'result': True})\n\n self.assertEqual(BatchDocumentDbObject.query.count(), 2)\n new_doc_ids = set()\n for d in BatchDocumentDbObject.query.filter_by():\n new_doc_ids.add(d.id)\n\n self.assertEqual(doc_ids, new_doc_ids)\n\n result = self.test_client.post('/batch/render_document/', data={\n 'batch_id': batch.id,\n 'document_type': json.dumps([DocumentTypeEnum.DT_TEST_DOC_2])\n })\n self.assertEqual(result.status_code, 200)\n self.assertEqual(json.loads(result.data), {'result': True})\n\n self.assertEqual(BatchDocumentDbObject.query.count(), 2)\n new_doc_ids = set()\n for d in BatchDocumentDbObject.query.filter_by():\n new_doc_ids.add(d.id)\n\n self.assertEqual(doc_ids, new_doc_ids)\n\n self.assertEqual(BatchDocumentDbObject.query.filter_by(status=UserDocumentStatus.DS_RENDERED).count(), 2)\n\n new_data['text_field'] = u\"Новое значение\"\n\n new_batch = DocumentBatch.parse_raw_value(new_batch_data, api_data=False)\n manager = BatchManager.init(batch)\n manager.update_batch(batch.id, new_batch, self.user.id, None, self.config, current_app.logger)\n self.assertEqual(BatchDocumentDbObject.query.count(), 2)\n\n new_doc_ids = set()\n for d in BatchDocumentDbObject.query.filter_by():\n new_doc_ids.add(d.id)\n\n self.assertEqual(doc_ids, new_doc_ids)\n\n result = self.test_client.post('/batch/render_document/', data={\n 'batch_id': batch.id,\n 'document_type': json.dumps([DocumentTypeEnum.DT_TEST_DOC_2])\n })\n self.assertEqual(result.status_code, 200)\n self.assertEqual(json.loads(result.data), {'result': True})\n\n @authorized()\n def test_filter_errors_in_document(self):\n batch = self.create_batch(DocumentBatchTypeEnum.DBT_TEST_TYPE, self.user)\n new_data = {\n 'short_name': u'Тест нейм' * 30\n }\n new_batch_data = {\n 'data': new_data,\n 'batch_type': DocumentBatchTypeEnum.DBT_TEST_TYPE,\n 'metadata': {}\n }\n\n new_batch = DocumentBatch.parse_raw_value(new_batch_data, api_data=False)\n manager = BatchManager.init(batch)\n result = manager.update_batch(batch.id, new_batch, self.user.id, None, self.config, current_app.logger)\n self.assertEqual(BatchDocumentDbObject.query.count(), 1)\n doc = BatchDocumentDbObject.query.scalar()\n del result['result']['creation_date']\n self.assertEqual(result, {\n 'result': {\n 'status': u'new',\n 'all_docs': [{\n u'caption': u'Тестовый документ 1',\n u'document_type': u'test_doc_1',\n u'file_link': None,\n u'document_id': doc.id\n }],\n 'name': u'Тестовый батч',\n 'paid': 'false',\n 'batch_type': DocumentBatchTypeEnum.DBT_TEST_TYPE,\n 'result_fields': {u'name': u'Тест нейм' * 30},\n 'error_info': {'error_ext': [{'error_code': 5,\n 'field': u'short_name'}]},\n 'data': {\n 'short_name': u'Тест нейм' * 30\n },\n 'id': batch.id,\n 'metadata': {},\n 'status_data': {'finalisation_count': u'0'}\n }\n })\n\n new_data['short_name'] = u'создай второй документ'\n new_data['text_field'] = u'err'\n\n new_batch = DocumentBatch.parse_raw_value(new_batch_data, api_data=False)\n manager = BatchManager.init(batch)\n result = manager.update_batch(batch.id, new_batch, self.user.id, None, self.config, current_app.logger)\n self.assertEqual(BatchDocumentDbObject.query.count(), 2)\n doc = BatchDocumentDbObject.query.filter_by(id=doc.id).scalar()\n self.assertIsNotNone(doc)\n doc2 = BatchDocumentDbObject.query.filter(BatchDocumentDbObject.id!=doc.id).scalar()\n self.assertIsNotNone(doc2)\n del result['result']['creation_date']\n all_docs = result['result']['all_docs']\n self.assertEqual(len(all_docs), 2)\n del result['result']['all_docs']\n\n test_docs = [\n {u'caption': u'Тестовый документ 2', u'document_type': u'test_doc_2', u'file_link': None, u'document_id': doc2.id},\n {u'caption': u'Тестовый документ 1', u'document_type': u'test_doc_1', u'file_link': None, u'document_id': doc.id},\n ]\n test_doc_id_set = set()\n for d in all_docs:\n for td in test_docs:\n if d and d == td:\n test_doc_id_set.add(d['document_id'])\n\n self.assertEqual(len(test_doc_id_set), len(test_docs))\n\n self.assertEqual(result, {\n 'result': {\n 'status': u'new',\n 'name': u'Тестовый батч',\n 'paid': 'false',\n 'batch_type': DocumentBatchTypeEnum.DBT_TEST_TYPE,\n 'result_fields': {u'name': u'создай второй документ'},\n 'data': {\n 'short_name': u'создай второй документ',\n 'text_field': 'err'\n },\n 'id': batch.id,\n 'metadata': {},\n 'status_data': {'finalisation_count': u'0'}\n }\n })\n\n result = self.test_client.post('/batch/render_document/', data={\n 'batch_id': batch.id,\n 'document_type': json.dumps([DocumentTypeEnum.DT_TEST_DOC_2])\n })\n self.assertEqual(result.status_code, 200)\n self.assertEqual(json.loads(result.data), {'result': True})\n\n doc2 = BatchDocumentDbObject.query.filter_by(document_type = DocumentTypeEnum.DT_TEST_DOC_2).scalar()\n self.assertIsNotNone(doc2)\n self.assertEqual(doc2.status, UserDocumentStatus.DS_RENDERING_FAILED)\n\n batch = doc2.batch\n self.assertEqual(batch.error_info, {u'error_ext': [{u'error_code': 5, u'field': u'text_field'}]})\n\n result = manager.update_batch(batch.id, new_batch, self.user.id, None, self.config, current_app.logger)\n self.assertEqual(BatchDocumentDbObject.query.count(), 2)\n doc = BatchDocumentDbObject.query.filter_by(id=doc.id).scalar()\n self.assertIsNotNone(doc)\n doc2 = BatchDocumentDbObject.query.filter(BatchDocumentDbObject.id!=doc.id).scalar()\n self.assertIsNotNone(doc2)\n del result['result']['creation_date']\n all_docs = result['result']['all_docs']\n self.assertEqual(len(all_docs), 2)\n del result['result']['all_docs']\n\n test_docs = [\n {u'caption': u'Тестовый документ 2', u'document_type': u'test_doc_2', u'file_link': None, u'document_id': doc2.id},\n {u'caption': u'Тестовый документ 1', u'document_type': u'test_doc_1', u'file_link': None, u'document_id': doc.id},\n ]\n test_doc_id_set = set()\n for d in all_docs:\n for td in test_docs:\n if d and d == td:\n test_doc_id_set.add(d['document_id'])\n\n self.assertEqual(len(test_doc_id_set), len(test_docs))\n\n self.assertEqual(result, {\n 'result': {\n 'status': u'finalised2',\n 'name': u'Тестовый батч',\n 'paid': 'false',\n 'batch_type': DocumentBatchTypeEnum.DBT_TEST_TYPE,\n 'result_fields': {u'name': u'создай второй документ'},\n 'data': {\n 'short_name': u'создай второй документ',\n 'text_field': 'err'\n },\n 'id': batch.id,\n 'metadata': {},\n 'error_info': {'error_ext': [{'error_code': 5,\n 'field': u'text_field'}]},\n 'status_data': {'finalisation_count': u'0'}\n }\n })\n\n @authorized()\n def test_transit_on_data(self):\n batch = self.create_batch(DocumentBatchTypeEnum.DBT_TEST_TYPE, self.user)\n new_data = {\n 'short_name': u'Тест нейм'\n }\n new_batch_data = {\n 'data': new_data,\n 'batch_type': DocumentBatchTypeEnum.DBT_TEST_TYPE,\n 'metadata': {}\n }\n\n new_batch = DocumentBatch.parse_raw_value(new_batch_data, api_data=False)\n result = self.test_client.post('/batch/update/', data={\n 'batch_id': batch.id,\n 'batch': json.dumps(new_batch.get_api_structure())\n })\n self.assertEqual(result.status_code, 200)\n doc = BatchDocumentDbObject.query.scalar()\n d = json.loads(result.data)\n del d['result']['creation_date']\n self.assertEqual(d, {u'result': {\n u'all_docs': [{\n u'caption': u'Тестовый документ 1',\n u'document_id': doc.id,\n u'document_type': u'test_doc_1',\n u'file_link': None\n }],\n u'batch_type': u'_test',\n u'data': {u'short_name': u'Тест нейм'},\n u'id': batch.id,\n u'metadata': {},\n u'name': u'Тестовый батч',\n u'paid': u'false',\n u'result_fields': {u'name': u'Тест нейм'},\n u'status': u'new',\n u'status_data': {'finalisation_count': u'0'}\n }\n })\n\n new_data['short_name'] = u'едитыд'\n new_batch = DocumentBatch.parse_raw_value(new_batch_data, api_data=False)\n result = self.test_client.post('/batch/update/', data={\n 'batch_id': batch.id,\n 'batch': json.dumps(new_batch.get_api_structure())\n })\n self.assertEqual(result.status_code, 200)\n doc = BatchDocumentDbObject.query.scalar()\n d = json.loads(result.data)\n del d['result']['creation_date']\n self.assertEqual(d, {u'result': {\n u'all_docs': [{\n u'caption': u'Тестовый документ 1',\n u'document_id': doc.id,\n u'document_type': u'test_doc_1',\n u'file_link': None\n }],\n u'batch_type': u'_test',\n u'data': {u'short_name': u'едитыд'},\n u'id': batch.id,\n u'metadata': {},\n u'name': u'Тестовый батч',\n u'paid': u'false',\n u'result_fields': {u'name': u'едитыд'},\n u'status': u'edited',\n u'status_data': {'finalisation_count': u'0'}\n }\n })\n\n DocumentBatchDbObject.query.filter_by(id=doc.batch_id).update({'status': 'finalised'})\n\n result = self.test_client.post('/batch/update/', data={\n 'batch_id': batch.id,\n 'batch': json.dumps(new_batch.get_api_structure())\n })\n self.assertEqual(result.status_code, 200)\n doc = BatchDocumentDbObject.query.scalar()\n d = json.loads(result.data)\n del d['result']['creation_date']\n self.assertEqual(d, {u'result': {\n u'all_docs': [{\n u'caption': u'Тестовый документ 1',\n u'document_id': doc.id,\n u'document_type': u'test_doc_1',\n u'file_link': None\n }],\n u'batch_type': u'_test',\n u'data': {u'short_name': u'едитыд'},\n u'id': batch.id,\n u'metadata': {},\n u'name': u'Тестовый батч',\n u'paid': u'false',\n u'result_fields': {u'name': u'едитыд'},\n u'status': u'edited',\n u'status_data': {'finalisation_count': u'0'}\n }\n })\n\n @authorized()\n def test_transit_on_data_and_status(self):\n batch = self.create_batch(DocumentBatchTypeEnum.DBT_TEST_TYPE, self.user)\n new_data = {\n 'short_name': u'Тест нейм'\n }\n new_batch_data = {\n 'data': new_data,\n 'batch_type': DocumentBatchTypeEnum.DBT_TEST_TYPE,\n 'metadata': {}\n }\n\n new_batch = DocumentBatch.parse_raw_value(new_batch_data, api_data=False)\n result = self.test_client.post('/batch/update/', data={\n 'batch_id': batch.id,\n 'batch': json.dumps(new_batch.get_api_structure())\n })\n self.assertEqual(result.status_code, 200)\n doc = BatchDocumentDbObject.query.scalar()\n d = json.loads(result.data)\n del d['result']['creation_date']\n self.assertEqual(d, {u'result': {\n u'all_docs': [{\n u'caption': u'Тестовый документ 1',\n u'document_id': doc.id,\n u'document_type': u'test_doc_1',\n u'file_link': None\n }],\n u'batch_type': u'_test',\n u'data': {u'short_name': u'Тест нейм'},\n u'id': batch.id,\n u'metadata': {},\n u'name': u'Тестовый батч',\n u'paid': u'false',\n u'result_fields': {u'name': u'Тест нейм'},\n u'status': u'new',\n u'status_data': {'finalisation_count': u'0'}\n }\n })\n\n new_data['short_name'] = u'финализируйся'\n new_batch = DocumentBatch.parse_raw_value(new_batch_data, api_data=False)\n result = self.test_client.post('/batch/update/', data={\n 'batch_id': batch.id,\n 'batch': json.dumps(new_batch.get_api_structure())\n })\n self.assertEqual(result.status_code, 200)\n doc = BatchDocumentDbObject.query.scalar()\n d = json.loads(result.data)\n del d['result']['creation_date']\n self.assertEqual(d, {u'result': {\n u'all_docs': [{\n u'caption': u'Тестовый документ 1',\n u'document_id': doc.id,\n u'document_type': u'test_doc_1',\n u'file_link': None\n }],\n u'batch_type': u'_test',\n u'data': {u'short_name': u'финализируйся'},\n u'id': batch.id,\n u'metadata': {},\n u'name': u'Тестовый батч',\n u'paid': u'false',\n u'result_fields': {u'name': u'финализируйся'},\n u'status': u'finalised',\n u'status_data': {'finalisation_count': u'0'}\n }\n })\n\n DocumentBatchDbObject.query.filter_by(id=doc.batch_id).update({'status': 'edited'})\n\n result = self.test_client.post('/batch/update/', data={\n 'batch_id': batch.id,\n 'batch': json.dumps(new_batch.get_api_structure())\n })\n self.assertEqual(result.status_code, 200)\n doc = BatchDocumentDbObject.query.scalar()\n d = json.loads(result.data)\n del d['result']['creation_date']\n self.assertEqual(d, {u'result': {\n u'all_docs': [{\n u'caption': u'Тестовый документ 1',\n u'document_id': doc.id,\n u'document_type': u'test_doc_1',\n u'file_link': None\n }],\n u'batch_type': u'_test',\n u'data': {u'short_name': u'финализируйся'},\n u'id': batch.id,\n u'metadata': {},\n u'name': u'Тестовый батч',\n u'paid': u'false',\n u'result_fields': {u'name': u'финализируйся'},\n u'status': u'edited',\n u'status_data': {'finalisation_count': u'0'}\n }\n })\n\n @authorized()\n def test_transit_on_event(self):\n batch = self.create_batch(DocumentBatchTypeEnum.DBT_TEST_TYPE, self.user)\n new_data = {\n 'short_name': u'Тест нейм'\n }\n new_batch_data = {\n 'data': new_data,\n 'batch_type': DocumentBatchTypeEnum.DBT_TEST_TYPE,\n 'metadata': {}\n }\n\n new_batch = DocumentBatch.parse_raw_value(new_batch_data, api_data=False)\n result = self.test_client.post('/batch/update/', data={\n 'batch_id': batch.id,\n 'batch': json.dumps(new_batch.get_api_structure())\n })\n self.assertEqual(result.status_code, 200)\n doc = BatchDocumentDbObject.query.scalar()\n d = json.loads(result.data)\n del d['result']['creation_date']\n self.assertEqual(d, {u'result': {\n u'all_docs': [{\n u'caption': u'Тестовый документ 1',\n u'document_id': doc.id,\n u'document_type': u'test_doc_1',\n u'file_link': None\n }],\n u'batch_type': u'_test',\n u'data': {u'short_name': u'Тест нейм'},\n u'id': batch.id,\n u'metadata': {},\n u'name': u'Тестовый батч',\n u'paid': u'false',\n u'result_fields': {u'name': u'Тест нейм'},\n u'status': u'new',\n u'status_data': {'finalisation_count': u'0'}\n }\n })\n\n BatchManager.handle_event(batch.id, 'simple_event', {}, current_app.logger, config=self.config)\n\n doc = DocumentBatchDbObject.query.scalar()\n self.assertEqual(doc.status, 'after_simple_event')\n\n @authorized()\n def test_transit_on_data_and_event(self):\n batch = self.create_batch(DocumentBatchTypeEnum.DBT_TEST_TYPE, self.user)\n new_data = {\n 'short_name': u'Тест нейм'\n }\n new_batch_data = {\n 'data': new_data,\n 'batch_type': DocumentBatchTypeEnum.DBT_TEST_TYPE,\n 'metadata': {}\n }\n\n new_batch = DocumentBatch.parse_raw_value(new_batch_data, api_data=False)\n result = self.test_client.post('/batch/update/', data={\n 'batch_id': batch.id,\n 'batch': json.dumps(new_batch.get_api_structure())\n })\n self.assertEqual(result.status_code, 200)\n doc = BatchDocumentDbObject.query.scalar()\n d = json.loads(result.data)\n del d['result']['creation_date']\n self.assertEqual(d, {u'result': {\n u'all_docs': [{\n u'caption': u'Тестовый документ 1',\n u'document_id': doc.id,\n u'document_type': u'test_doc_1',\n u'file_link': None\n }],\n u'batch_type': u'_test',\n u'data': {u'short_name': u'Тест нейм'},\n u'id': batch.id,\n u'metadata': {},\n u'name': u'Тестовый батч',\n u'paid': u'false',\n u'result_fields': {u'name': u'Тест нейм'},\n u'status': u'new',\n u'status_data': {'finalisation_count': u'0'}\n }\n })\n\n new_data['short_name'] = u'по событию'\n new_batch = DocumentBatch.parse_raw_value(new_batch_data, api_data=False)\n result = self.test_client.post('/batch/update/', data={\n 'batch_id': batch.id,\n 'batch': json.dumps(new_batch.get_api_structure())\n })\n self.assertEqual(result.status_code, 200)\n doc = BatchDocumentDbObject.query.scalar()\n d = json.loads(result.data)\n del d['result']['creation_date']\n self.assertEqual(d, {u'result': {\n u'all_docs': [{\n u'caption': u'Тестовый документ 1',\n u'document_id': doc.id,\n u'document_type': u'test_doc_1',\n u'file_link': None\n }],\n u'batch_type': u'_test',\n u'data': {u'short_name': u'по событию'},\n u'id': batch.id,\n u'metadata': {},\n u'name': u'Тестовый батч',\n u'paid': u'false',\n u'result_fields': {u'name': u'по событию'},\n u'status': u'new',\n u'status_data': {'finalisation_count': u'0'}\n }\n })\n\n result = self.test_client.post('/batch/go_ahead/', data={'batch_id': batch.id})\n self.assertEqual(result.status_code, 200)\n doc = DocumentBatchDbObject.query.scalar()\n self.assertEqual(doc.status, 'after_event')\n\n @authorized()\n def test_send_email_on_transition(self):\n batch = self.create_batch(DocumentBatchTypeEnum.DBT_TEST_TYPE, self.user)\n new_data = {\n 'short_name': u'Тест нейм'\n }\n new_batch_data = {\n 'data': new_data,\n 'batch_type': DocumentBatchTypeEnum.DBT_TEST_TYPE,\n 'metadata': {}\n }\n\n new_batch = DocumentBatch.parse_raw_value(new_batch_data, api_data=False)\n result = self.test_client.post('/batch/update/', data={\n 'batch_id': batch.id,\n 'batch': json.dumps(new_batch.get_api_structure())\n })\n self.assertEqual(result.status_code, 200)\n\n self.assertEqual(len(self.mailer.mails), 0)\n BatchManager.handle_event(batch.id, 'simple_event', None, logger=current_app.logger, config=self.config)\n\n self.assertEqual(len(self.mailer.mails), 1)\n\n @authorized()\n def test_fields_modification_restriction(self):\n batch = self.create_batch(DocumentBatchTypeEnum.DBT_TEST_TYPE, self.user)\n result = self.test_client.post('/batch/update/', data={\n 'batch_id': batch.id,\n 'batch': json.dumps({\n \"data\": {\n \"short_name\": u\"короткое\",\n \"restricted_field\": u\"начальное значение\"\n }\n })\n })\n self.assertEqual(result.status_code, 200)\n batch_db = DocumentBatchDbObject.query.filter_by(id=batch.id).scalar()\n self.assertEqual(batch_db.data, {\n \"short_name\": u\"короткое\",\n \"restricted_field\": u\"начальное значение\"\n })\n self.assertEqual(batch_db.status, \"new\")\n\n result = self.test_client.post('/batch/update/', data={\n 'batch_id': batch.id,\n 'batch': json.dumps({\n \"data\": {\n \"short_name\": u\"едитыд\",\n \"some_text_field\": u\"шо\",\n \"restricted_field\": u\"значение 2\"\n }\n })\n })\n self.assertEqual(result.status_code, 200)\n batch_db = DocumentBatchDbObject.query.filter_by(id=batch.id).scalar()\n self.assertEqual(batch_db.data, {\n \"short_name\": u\"едитыд\",\n \"restricted_field\": u\"значение 2\",\n \"some_text_field\": u\"шо\"\n })\n self.assertEqual(batch_db.error_info, {\n 'error_ext': [{'field': \"some_text_field\", \"error_code\": 5}]\n })\n self.assertEqual(batch_db.status, \"edited\")\n\n result = self.test_client.post('/batch/update/', data={\n 'batch_id': batch.id,\n 'batch': json.dumps({\n \"data\": {\n \"short_name\": u\"едитыд\",\n \"restricted_field\": u\"значение 3\",\n \"some_text_field\": u\"шо11111111111\",\n }\n })\n })\n self.assertEqual(result.status_code, 200)\n result_data = json.loads(result.data)['result']\n self.assertIn('error_info', result_data)\n batch_db = DocumentBatchDbObject.query.filter_by(id=batch.id).scalar()\n self.assertEqual(batch_db.error_info, {\n 'error_ext': [\n {'field': \"some_text_field\", \"error_code\": 5},\n {'field': 'restricted_field', 'error_code': 1000}\n ]\n })\n self.assertEqual(batch_db.data, {\n \"short_name\": u\"едитыд\",\n \"restricted_field\": u\"значение 2\",\n \"some_text_field\": u\"шо\",\n })\n self.assertEqual(batch_db.status, \"edited\")\n\n @authorized()\n def test_transit_on_docs_group_generated(self):\n batch = self.create_batch(DocumentBatchTypeEnum.DBT_TEST_TYPE, self.user)\n new_data = {\n 'short_name': u'создай второй документ',\n 'text_field': u'текстфилд'\n }\n new_batch_data = {\n 'data': new_data,\n 'batch_type': DocumentBatchTypeEnum.DBT_TEST_TYPE,\n 'metadata': {}\n }\n\n new_batch = DocumentBatch.parse_raw_value(new_batch_data, api_data=False)\n manager = BatchManager.init(batch)\n result = manager.update_batch(batch.id, new_batch, self.user.id, None, self.config, current_app.logger)\n\n result = self.test_client.post('/batch/render_document/', data={\n 'batch_id': batch.id,\n 'document_type': json.dumps([DocumentTypeEnum.DT_TEST_DOC_1, DocumentTypeEnum.DT_TEST_DOC_2])\n })\n self.assertEqual(result.status_code, 200)\n self.assertEqual(len(self.events), 6)\n sqldb.session.commit()\n batch_db = DocumentBatchDbObject.query.filter_by(id=batch.id).scalar()\n doc1 = BatchDocumentDbObject.query.filter_by(batch_id=batch.id).order_by(BatchDocumentDbObject.creation_date.asc()).first()\n doc2 = BatchDocumentDbObject.query.filter(BatchDocumentDbObject.batch_id==batch.id, BatchDocumentDbObject.id != doc1.id).first()\n self.assertEqual(batch_db.status, 'finalised')\n self.assertEqual(self.events[0]['batch'].id, batch_db.id)\n self.assertEqual(self.events[1]['batch'].id, batch_db.id)\n self.assertEqual(self.events[2]['batch'].id, batch_db.id)\n del self.events[0]['batch']\n del self.events[1]['batch']\n del self.events[2]['batch']\n del self.events[3]['batch']\n del self.events[4]['batch']\n del self.events[5]['batch']\n self.assertEqual(self.events[0], {'event': 'batch_manager.on_field_changed',\n 'event_data': {'field_name': 'short_name',\n 'new_value': u'создай второй документ',\n 'old_value': None}})\n self.assertEqual(self.events[1], {'event': 'batch_manager.on_field_changed',\n 'event_data': {'field_name': 'text_field',\n 'new_value': u'текстфилд',\n 'old_value': None}})\n self.assertEqual(self.events[2], {'event': 'batch_manager.on_fieldset_changed',\n 'event_data': {'fields': [{'field_name': 'short_name',\n 'new_value': u'создай второй документ',\n 'old_value': None}, {'field_name': 'text_field',\n 'new_value': u'текстфилд',\n 'old_value': None}]}})\n self.assertEqual(self.events[3], {'event': 'doc_render_success', 'event_data': {'doc_id': doc1.id}})\n self.assertEqual(self.events[4], {'event': 'doc_render_success', 'event_data': {'doc_id': doc2.id}})\n self.assertEqual(self.events[5], {'event': 'doc_group_render_success',\n 'event_data': {'batch_id': batch_db.id,\n 'doc_types': ['test_doc_1', 'test_doc_2']}})\n\n @authorized()\n def test_transit_on_docs_group_generation_fail(self):\n batch = self.create_batch(DocumentBatchTypeEnum.DBT_TEST_TYPE, self.user)\n new_data = {\n 'short_name': u'создай второй документ',\n 'text_field': u'текстфилд'\n }\n new_batch_data = {\n 'data': new_data,\n 'batch_type': DocumentBatchTypeEnum.DBT_TEST_TYPE,\n 'metadata': {}\n }\n\n new_batch = DocumentBatch.parse_raw_value(new_batch_data, api_data=False)\n manager = BatchManager.init(batch)\n result = manager.update_batch(batch.id, new_batch, self.user.id, None, self.config, current_app.logger)\n\n DocumentBatchDbObject.query.filter_by(id=batch.id).update({\n 'data': {\n 'short_name': u'создай второй документ',\n 'text_field': u'1'\n }\n })\n\n result = self.test_client.post('/batch/render_document/', data={\n 'batch_id': batch.id,\n 'document_type': json.dumps([DocumentTypeEnum.DT_TEST_DOC_1, DocumentTypeEnum.DT_TEST_DOC_2])\n })\n self.assertEqual(result.status_code, 200)\n self.assertEqual(len(self.events), 6)\n sqldb.session.commit()\n batch_db = DocumentBatchDbObject.query.filter_by(id=batch.id).scalar()\n doc1 = BatchDocumentDbObject.query.filter_by(batch_id=batch.id).order_by(BatchDocumentDbObject.creation_date.asc()).first()\n doc2 = BatchDocumentDbObject.query.filter(BatchDocumentDbObject.batch_id==batch.id, BatchDocumentDbObject.id != doc1.id).first()\n self.assertEqual(batch_db.status, 'finalised1')\n self.assertEqual(self.events[0]['batch'].id, batch_db.id)\n self.assertEqual(self.events[1]['batch'].id, batch_db.id)\n self.assertEqual(self.events[2]['batch'].id, batch_db.id)\n del self.events[0]['batch']\n del self.events[1]['batch']\n del self.events[2]['batch']\n del self.events[3]['batch']\n del self.events[4]['batch']\n del self.events[5]['batch']\n self.assertEqual(self.events[0], {'event': 'batch_manager.on_field_changed',\n 'event_data': {'field_name': 'short_name',\n 'new_value': u'создай второй документ',\n 'old_value': None}})\n self.assertEqual(self.events[1], {'event': 'batch_manager.on_field_changed',\n 'event_data': {'field_name': 'text_field',\n 'new_value': u'текстфилд',\n 'old_value': None}})\n self.assertEqual(self.events[2], {'event': 'batch_manager.on_fieldset_changed',\n 'event_data': {'fields': [{'field_name': 'short_name',\n 'new_value': u'создай второй документ',\n 'old_value': None}, {'field_name': 'text_field',\n 'new_value': u'текстфилд',\n 'old_value': None}]}})\n self.assertEqual(self.events[3], {'event': 'doc_render_success', 'event_data': {'doc_id': doc1.id}})\n self.assertEqual(self.events[4], {'event': 'doc_render_fail', 'event_data': {'doc_id': doc2.id}})\n self.assertEqual(self.events[5], {'event': 'doc_group_render_fail',\n 'event_data': {'batch_id': batch_db.id,\n 'doc_types': ['test_doc_1', 'test_doc_2']}})\n\n @authorized()\n def test_transit_on_doc_generated(self):\n batch = self.create_batch(DocumentBatchTypeEnum.DBT_TEST_TYPE, self.user)\n new_data = {\n 'short_name': u'создай второй документ',\n 'text_field': u'текстфилд'\n }\n new_batch_data = {\n 'data': new_data,\n 'batch_type': DocumentBatchTypeEnum.DBT_TEST_TYPE,\n 'metadata': {}\n }\n\n new_batch = DocumentBatch.parse_raw_value(new_batch_data, api_data=False)\n manager = BatchManager.init(batch)\n result = manager.update_batch(batch.id, new_batch, self.user.id, None, self.config, current_app.logger)\n\n result = self.test_client.post('/batch/render_document/', data={\n 'batch_id': batch.id,\n 'document_type': json.dumps([DocumentTypeEnum.DT_TEST_DOC_1])\n })\n self.assertEqual(result.status_code, 200)\n self.assertEqual(len(self.events), 4)\n batch_db = DocumentBatchDbObject.query.filter_by(id=batch.id).scalar()\n doc1 = BatchDocumentDbObject.query.filter_by(batch_id=batch.id).order_by(BatchDocumentDbObject.creation_date.asc()).first()\n self.assertEqual(batch_db.status, 'finalised1')\n self.assertEqual(self.events[0]['batch'].id, batch_db.id)\n del self.events[0]['batch']\n del self.events[1]['batch']\n del self.events[2]['batch']\n del self.events[3]['batch']\n self.assertEqual(self.events[0], {'event': 'batch_manager.on_field_changed',\n 'event_data': {'field_name': 'short_name',\n 'new_value': u'создай второй документ',\n 'old_value': None}})\n self.assertEqual(self.events[1], {'event': 'batch_manager.on_field_changed',\n 'event_data': {'field_name': 'text_field',\n 'new_value': u'текстфилд',\n 'old_value': None}})\n self.assertEqual(self.events[2], {'event': 'batch_manager.on_fieldset_changed',\n 'event_data': {'fields': [{'field_name': 'short_name',\n 'new_value': u'создай второй документ',\n 'old_value': None}, {'field_name': 'text_field',\n 'new_value': u'текстфилд',\n 'old_value': None}]}})\n self.assertEqual(self.events[3], {'event': 'doc_render_success', 'event_data': {'doc_id': doc1.id}})\n\n @authorized()\n def test_transit_on_doc_generation_fail(self):\n batch = self.create_batch(DocumentBatchTypeEnum.DBT_TEST_TYPE, self.user)\n new_data = {\n 'short_name': u'создай второй документ',\n 'text_field': u'текстфилд'\n }\n new_batch_data = {\n 'data': new_data,\n 'batch_type': DocumentBatchTypeEnum.DBT_TEST_TYPE,\n 'metadata': {}\n }\n\n new_batch = DocumentBatch.parse_raw_value(new_batch_data, api_data=False)\n manager = BatchManager.init(batch)\n result = manager.update_batch(batch.id, new_batch, self.user.id, None, self.config, current_app.logger)\n\n DocumentBatchDbObject.query.filter_by(id=batch.id).update({\n 'data': {\n 'short_name': u'создай второй документ',\n 'text_field': u'1'\n }\n })\n\n result = self.test_client.post('/batch/render_document/', data={\n 'batch_id': batch.id,\n 'document_type': json.dumps([DocumentTypeEnum.DT_TEST_DOC_2])\n })\n self.assertEqual(result.status_code, 200)\n self.assertEqual(len(self.events), 4)\n batch_db = DocumentBatchDbObject.query.filter_by(id=batch.id).scalar()\n doc1 = BatchDocumentDbObject.query.filter_by(batch_id=batch.id).order_by(BatchDocumentDbObject.creation_date.desc()).first()\n self.assertEqual(batch_db.status, 'finalised2')\n self.assertEqual(self.events[0]['batch'].id, batch_db.id)\n del self.events[0]['batch']\n del self.events[1]['batch']\n del self.events[2]['batch']\n del self.events[3]['batch']\n self.assertEqual(self.events[0], {'event': 'batch_manager.on_field_changed',\n 'event_data': {'field_name': 'short_name',\n 'new_value': u'создай второй документ',\n 'old_value': None}})\n self.assertEqual(self.events[1], {'event': 'batch_manager.on_field_changed',\n 'event_data': {'field_name': 'text_field',\n 'new_value': u'текстфилд',\n 'old_value': None}})\n self.assertEqual(self.events[2], {'event': 'batch_manager.on_fieldset_changed',\n 'event_data': {'fields': [{'field_name': 'short_name',\n 'new_value': u'создай второй документ',\n 'old_value': None}, {'field_name': 'text_field',\n 'new_value': u'текстфилд',\n 'old_value': None}]}})\n self.assertEqual(self.events[3], {'event': 'doc_render_fail', 'event_data': {'doc_id': doc1.id}})\n\n @authorized()\n def test_send_email_on_docs_ready(self):\n with self.app.app_context():\n self.app.db['bik_catalog'].insert({\n 'bik': '040173745',\n 'address': u'Адрес',\n 'name': u'Просто Банк'\n })\n\n batch = self.create_batch(DocumentBatchTypeEnum.DBT_OSAGO, self.user, status=\"pretension\")\n victim_car_owner = self.create_person(self.user, batch.id, name=u\"ЖЖ\", surname=u\"ЖЖ\", patronymic=u\"ЖЖ\")\n guilty_car_owner = self.create_person(self.user, batch.id)\n responsible_person = self.create_person(self.user, batch.id, name=u\"Арина\",\n surname=u\"Поганкина\", patronymic=u\"Мстиславовна\", age=22)\n\n ddd = {\n \"data\": {\n 'crash_date': (datetime.utcnow() - timedelta(days=100)).strftime(\"%Y-%m-%d\"),\n 'policy_called': True,\n 'all_have_osago': True,\n 'own_insurance_company': True,\n 'have_osago': 'both',\n 'problem_type': 'refusal',\n 'refusal_reason': 'wrong_docs',\n 'notice_has_mistakes': False,\n 'got_cash': False,\n 'victim_owner': victim_car_owner.id + \"_person\",\n 'owner_as_victim_driver': True,\n 'victim_car_brand': u\"Форд Фокус в кредит\",\n 'victim_car_number': u\"А000ОО98\",\n 'guilty_owner': guilty_car_owner.id + \"_person\",\n 'owner_as_guilty_driver': True,\n 'guilty_car_brand': u'Рено',\n 'guilty_car_number': u'В000ВВ50',\n 'other_victims': None,\n 'insurance_company_region': u'Санкт-Петербург',\n 'policy_series': u'ААА',\n 'policy_number': '0123456789',\n 'other_insurance': True,\n 'insurance_name': u\"РоСгосСтраХ\",\n 'insurance_id': None,\n 'other_date': True,\n 'policy_date': (datetime.utcnow() - timedelta(days=100)).strftime(\"%Y-%m-%d\"),\n 'first_claim_date': (datetime.utcnow() - timedelta(days=90)).strftime(\"%Y-%m-%d\"),\n 'independent_expertise_number': u'01234567890123456789',\n 'independent_expertise_sum': '222000.50',\n 'independent_expertise_cost': 1000,\n 'compensation_sum': 1000.9,\n 'add_person_to_claim': True,\n 'docs_got': [OsagoDocTypeEnum.ODT_INQUIRE_CRASH,\n OsagoDocTypeEnum.ODT_NOTICE_CRASH,\n OsagoDocTypeEnum.ODT_ACT_INSURANCE_CASE],\n 'insurance_case_number': '01234567890123456789',\n 'submission_way': 'responsible_person',\n 'submission_branch_id': '',\n 'use_other_submission_address': True,\n 'submission_address': u'сабмишн адрес',\n 'obtain_way': 'responsible_person',\n 'responsible_person': responsible_person.id + '_person',\n 'court_include': True,\n 'obtain_address_type': 'other_address',\n 'obtain_address': 'аптейн адрес',\n 'bik_account': '040173745',\n 'account_number': '01234567890123456789',\n 'police_case': True\n }\n }\n batch_json = json.dumps(ddd)\n result = self.test_client.post('/batch/update/', data={\n 'batch_id': batch.id,\n 'batch': batch_json\n })\n # CeleryScheduledTask.query.update({'eta': datetime.utcnow()})\n # sqldb.session.commit()\n # check_scheduled_tasks.delay()\n\n self.assertEqual(result.status_code, 200)\n db_batch = DocumentBatchDbObject.query.filter_by(id=batch.id).first()\n print (json.dumps(db_batch.__dict__, indent=1, default=lambda x: unicode(x), ensure_ascii=False))\n\n result = self.test_client.post('/batch/go_ahead/', data={\n 'batch_id': batch.id,\n })\n self.assertEqual(result.status_code, 200)\n self.assertEqual(BatchDocumentDbObject.query.count(), 3)\n self.assertEqual(BatchDocumentDbObject.query.filter_by(status=\"rendered\").count(), 3)\n db_batch = DocumentBatchDbObject.query.filter_by(id=batch.id).first()\n self.assertEqual(db_batch.status, \"claim\")\n\n self.assertEqual(len(self.mailer.mails), 1)" }, { "alpha_fraction": 0.6179104447364807, "alphanum_fraction": 0.6238806247711182, "avg_line_length": 34.26315689086914, "blob_id": "1ec3deca41d37fc77c2a0c0d16ba613d80bd9212", "content_id": "4c21fd132e01cbb7452ae147bdbda4e2b186bc65", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 746, "license_type": "no_license", "max_line_length": 85, "num_lines": 19, "path": "/app/manage_commands/user_commands.py", "repo_name": "StanislavKraev/jb_code", "src_encoding": "UTF-8", "text": "# -*- coding: utf-8 -*-\nfrom fw.auth.user_manager import UserManager\nfrom manage_commands import BaseManageCommand, get_single\n\n\nclass AddUserCommand(BaseManageCommand):\n NAME = \"add_user\"\n\n def run(self):\n self.logger.info(u\"Добавление нового пользователя:\")\n self.logger.info(u'=' * 50)\n username = get_single(u'username: ')\n password = get_single(u'password: ')\n try:\n UserManager.create_user(u\"\", u\"\", username, u\"\", u\"\", u\"\", password, u\"\")\n except Exception, ex:\n self.logger.exception(u'не удалось создать пользователя')\n exit(-1)\n self.logger.info(u'Пользователь добавлен')\n" }, { "alpha_fraction": 0.5845043063163757, "alphanum_fraction": 0.5872365236282349, "avg_line_length": 35.86330795288086, "blob_id": "6ca7c48081ad8b8f604005d09fdfb07d1e0e1839", "content_id": "754af7fe7b171e1fae2967845829143d9de3cb12", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 5124, "license_type": "no_license", "max_line_length": 149, "num_lines": 139, "path": "/app/fw/api/sql_session_storage.py", "repo_name": "StanislavKraev/jb_code", "src_encoding": "UTF-8", "text": "import hashlib\nimport pickle\n\nfrom datetime import timedelta, datetime\nfrom uuid import uuid4\nfrom werkzeug.datastructures import CallbackDict\nfrom flask.sessions import SessionInterface, SessionMixin\nfrom werkzeug.wrappers import BaseResponse\nfrom fw.db.sql_base import db\n\n\n_table_name = \"sesiones\"\n_data_serializer = pickle\n\n\n# def set_db_session_interface(app, table_name=None, data_serializer=None):\n# global _table_name, _data_serializer\n# if table_name is not None:\n# _table_name = table_name\n# if data_serializer is not None:\n# _data_serializer = data_serializer\n# db.init_app(app)\n# app.session_interface = SQLAlchemySessionInterface()\n# return app\n\n\nclass SQLAlchemySession(CallbackDict, SessionMixin):\n def __init__(self, initial=None, sid=None, new=False):\n\n def on_update(self):\n self.modified = True\n\n CallbackDict.__init__(self, initial, on_update)\n self.sid = sid\n self.new = new\n self.modified = False\n\n\nclass SQLAlchemySessionInterface(SessionInterface):\n\n def __init__(self, config):\n # this could be your mysql database or sqlalchemy db object\n self.permanent_session_lifetime = config['PERMANENT_SESSION_LIFETIME']\n self.cookie_name = config['SESSION_COOKIE_NAME']\n\n def generate_sid(self):\n return str(uuid4())\n\n def magic(self, i):\n m = hashlib.md5()\n val = u\"Everybody loves Yurburo%s\" % i\n m.update(val)\n return 'yb' + m.hexdigest()\n\n def open_session(self, app, request):\n # query your cookie for the session id\n ret = None\n sid = request.cookies.get(app.session_cookie_name)\n\n if not sid:\n sid = self.generate_sid()\n ret = SQLAlchemySession(sid=sid, new=True)\n else:\n val = Session.query.get(sid)\n if val is not None:\n data = _data_serializer.loads(val.data)\n ret = SQLAlchemySession(data, sid=sid)\n else:\n ss = str(sid)\n if '-' not in ss and ss.startswith('yb'):\n for i in xrange(100000):\n if ss == self.magic(i):\n data = {\n \"user_id\": i\n }\n sid = self.generate_sid()\n return SQLAlchemySession(data, sid=sid)\n ret = SQLAlchemySession(sid=sid, new=True)\n else:\n ret = SQLAlchemySession(sid=sid, new=True)\n return ret\n\n def save_session(self, app, session, response):\n cookie_exp = self.get_expiration_time(self.permanent_session_lifetime, session)\n\n val = Session.query.get(session.sid)\n db.session.commit()\n domain = self.get_cookie_domain(app)\n if not session:\n if val is not None:\n db.session.delete(val)\n if session.modified:\n if isinstance(response, BaseResponse):\n response.delete_cookie(key=self.cookie_name, domain=domain)\n else:\n response.clear_cookie(self.cookie_name)\n return\n\n # If session isn't permanent if will be considered valid for 1 day\n # (but not cookie which will be deleted by browser after exit).\n session_exp = cookie_exp or datetime.utcnow()+timedelta(days=1)\n data = _data_serializer.dumps(dict(session))\n if 'user_id' in session:\n if val is not None:\n val.data = data\n val.exp = session_exp\n else:\n val = Session(session_id=session.sid, data=data, exp=session_exp)\n db.session.add(val)\n db.session.commit()\n\n if isinstance(response, BaseResponse):\n response.set_cookie(self.cookie_name,\n value=session.sid,\n expires=cookie_exp,\n httponly=False)\n else:\n response.set_cookie(self.cookie_name, session.sid, expires=cookie_exp, httponly=False)\n elif 'logout' in session:\n session.pop('logout')\n Session.query.filter_by(session_id=session.sid).delete()\n db.session.commit()\n response.delete_cookie(self.cookie_name)\n\n def get_expiration_time(self, permanent_session_lifetime, session):\n \"\"\"A helper method that returns an expiration date for the session\n or `None` if the session is linked to the browser session. The\n default implementation returns now + the permanent session\n lifetime configured on the application.\n \"\"\"\n if session.permanent:\n dt = permanent_session_lifetime if isinstance(permanent_session_lifetime, timedelta) else timedelta(seconds = permanent_session_lifetime)\n return datetime.utcnow() + dt\n\nclass Session(db.Model):\n __tablename__ = _table_name\n session_id = db.Column(db.String(129), unique=True, primary_key=True)\n exp = db.Column(db.DateTime())\n data = db.Column(db.Text())\n" }, { "alpha_fraction": 0.6062164306640625, "alphanum_fraction": 0.6066837906837463, "avg_line_length": 38.62036895751953, "blob_id": "b1877619ead2005e53cdcc9606ba36bcbc0f4411", "content_id": "3fb821b5db85f02ab123369e2dcced30eced4e37", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 4279, "license_type": "no_license", "max_line_length": 120, "num_lines": 108, "path": "/app/services/yurist/yurist_manager.py", "repo_name": "StanislavKraev/jb_code", "src_encoding": "UTF-8", "text": "# -*- coding: utf-8 -*-\n\nfrom datetime import datetime\nfrom flask_login import current_user\nimport os\nfrom fw.auth.social_services import SocialServiceBackends\nfrom fw.db.sql_base import db as sqldb\nfrom fw.storage.file_storage import FileStorage\nfrom services.yurist.data_model.enums import YuristBatchCheckStatus\nfrom services.yurist.data_model.models import YuristBatchCheckObject\n\n\ndef yurist_check(config, batch_db, file_obj_list, logger):\n # get batch id and check if it is still in active state\n batch_check = YuristBatchCheckObject.query.filter(\n YuristBatchCheckObject.batch_id == batch_db.id,\n YuristBatchCheckObject.status.notin_(YuristBatchCheckStatus.FINAL_STATUSES)\n ).order_by(YuristBatchCheckObject.create_date.desc()).first()\n # this check should be performed later\n if not batch_check:\n return False\n user = batch_db._owner\n if not user:\n raise Exception(\"Failed to find batch owner\")\n\n from fw.documents.batch_manager import BatchManager\n attaches = BatchManager.get_shared_links_to_rendered_docs(batch_db, config, logger)\n\n schema = config['WEB_SCHEMA']\n domain = config['DOMAIN']\n for file_obj in file_obj_list:\n path = FileStorage.get_path(file_obj, config)\n if os.path.exists(path):\n if file_obj._owner:\n url = u\"%s://%s%s\" % (schema, domain, FileStorage.get_shared_link(file_obj.id, config))\n else:\n url = u\"%s://%s%s\" % (schema, domain, FileStorage.get_url(file_obj, config))\n\n attaches.append({\n 'url': url,\n 'title': file_obj.file_name or url\n })\n\n rec_list = config['YURIST_EMAIL_LIST']\n from services.yurist.async_tasks import yurist_check_send\n batch_check_id = batch_check.id if batch_check else \"not-found\"\n # countdown 2 hours before execution\n yurist_check_send.check_and_send.apply_async(\n args=[],\n kwargs=dict(\n email=user.email,\n batch_check_id=batch_check_id,\n server_url_schema=config['WEB_SCHEMA'],\n api_url=config['api_url'],\n attaches=attaches,\n mail_type='yurist_batch_check',\n rec_list=rec_list\n ),\n countdown=config['SEND_DOCS_TO_YURIST_DELAY_SECONDS']\n )\n\n\ndef cancel_check(batch, config, logger):\n \"\"\"\n @type batch: db.db_fields.DocumentBatchDbObject\n \"\"\"\n try:\n batch_id = batch.id\n yurist_check = YuristBatchCheckObject.query.filter(\n YuristBatchCheckObject.batch_id == batch_id,\n YuristBatchCheckObject.status.in_([YuristBatchCheckStatus.YBS_IN_PROGRESS, YuristBatchCheckStatus.YBS_WAIT])\n ).first()\n\n if not yurist_check:\n check_new = YuristBatchCheckObject.query.filter_by(batch_id=batch_id,\n status=YuristBatchCheckStatus.YBS_NEW).first()\n if not check_new:\n yurist_batch_check = YuristBatchCheckObject(**{\n 'batch_id': batch_id,\n 'create_date': datetime.now(),\n 'status': YuristBatchCheckStatus.YBS_NEW,\n 'typos_correction': False\n })\n sqldb.session.add(yurist_batch_check)\n sqldb.session.commit()\n return\n\n status = yurist_check.status\n\n yurist_check.status = YuristBatchCheckStatus.YBS_NEW\n sqldb.session.commit()\n\n if status == YuristBatchCheckStatus.YBS_IN_PROGRESS:\n llc_full_name = batch.data.get('full_name', \"\")\n social_link = SocialServiceBackends.get_user_social_network_profile_url(current_user.id, db)\n rec_list = config['YURIST_EMAIL_LIST']\n from fw.async_tasks import send_email\n for recipient in rec_list:\n send_email.send_email.delay(\n recipient,\n 'yurist_batch_check_discard',\n email=current_user.email,\n mobile=current_user.mobile,\n social_link=social_link,\n full_name=llc_full_name\n )\n except Exception, ex:\n logger.exception(u\"Failed to cancel yurist batch check\")\n" }, { "alpha_fraction": 0.6173815727233887, "alphanum_fraction": 0.6182531118392944, "avg_line_length": 40.83073043823242, "blob_id": "c30bc73da04b3360af1ee73c45d81081dbc50d2e", "content_id": "bc3e9765a159bcfbdc99e6fc8157c10a75c7495a", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 16063, "license_type": "no_license", "max_line_length": 131, "num_lines": 384, "path": "/app/services/partners/api.py", "repo_name": "StanislavKraev/jb_code", "src_encoding": "UTF-8", "text": "# -*- coding: utf-8 -*-\n\nimport copy\nfrom datetime import datetime\nimport json\n\nfrom flask import Blueprint\nfrom bson import ObjectId\nfrom flask import current_app\nfrom flask_login import login_required, current_user\nimport requests\nfrom sqlalchemy import or_\nfrom sqlalchemy.dialects.postgresql import ARRAY\n\nfrom custom_exceptions import MissingRequiredFieldException, InvalidFieldValueException\nfrom fw.api.args_validators import BoolTypeValidator\nfrom fw.api.base_handlers import error_tree_to_list\nfrom fw.db.sql_base import db as sqldb\nfrom fw.documents.address_enums import SPECIAL_CITY_REGIONS\nfrom fw.documents.db_fields import DocumentBatchDbObject\nfrom fw.documents.enums import DocumentBatchTypeEnum\nfrom fw.documents.fields.doc_fields import DocumentBatch\nfrom fw.documents.schema.conditions import Condition\nfrom fw.api import errors\nfrom fw.api.args_validators import validate_arguments, ArgumentValidator\nfrom fw.api.base_handlers import api_view\nfrom fw.documents.batch_manager import BatchManager\nfrom services.partners.models import AccountantPartnersObject, BankPartnersObject, BankPartnerRequestObject, \\\n BankPartnersServiceObject, StampPartnersObject\n\npartners_bp = Blueprint('partners', __name__)\n\n@partners_bp.route('/partners/accounts/', methods=['GET'])\n@api_view\n@login_required\n@validate_arguments(batch_id=ArgumentValidator())\ndef get_accountant_partners(batch_id=None):\n accountant_partner_list = []\n\n try:\n region = BatchManager.get_batch_region(batch_id)\n except Exception, ex:\n current_app.logger.exception(u\"Failed to get batch region\")\n raise errors.BatchNotFound()\n\n partners = AccountantPartnersObject.query.filter_by(enabled=True)\n if region:\n partners = partners.filter(or_(AccountantPartnersObject.region.contains([region]),\n AccountantPartnersObject.region == None))\n for item in partners.order_by(AccountantPartnersObject.sort_index.asc()):\n accountant_partner_list.append({\n \"id\": item.id,\n \"link\": item.link,\n \"banner\": item.banner,\n \"title\": item.title,\n \"type\": item.type\n })\n\n return {\"result\": {\"accounts_partners\": accountant_partner_list}}\n\n\n@partners_bp.route('/partners/banks/', methods=['GET'])\n@api_view\n@login_required\n@validate_arguments(batch_id=ArgumentValidator())\ndef get_bank_partners(batch_id=None):\n bank_partner_list = []\n\n try:\n address = BatchManager.get_batch_address(batch_id)\n city = address['region'] if address['region'] in SPECIAL_CITY_REGIONS else address.get('city', address.get('village', u\"\"))\n except Exception:\n raise errors.BatchNotFound()\n\n banks = BankPartnersObject.query.filter_by(enabled=True)\n if city:\n banks = banks.filter(or_(BankPartnersObject.city.contains([city]),\n BankPartnersObject.city == None))\n\n for item in banks.order_by(BankPartnersObject.sort_index.asc()):\n bank_partner_list.append({\n \"id\": item.id,\n \"link\": item.link,\n \"banner\": item.banner,\n \"title\": item.title,\n \"conditions\": item.conditions or []\n })\n\n return {\"result\": {\"banks_partners\": bank_partner_list}}\n\n\n@partners_bp.route('/partners/banks/send/', methods=['POST'])\n@api_view\n@login_required\n@validate_arguments(\n bank_id=ArgumentValidator(),\n batch_id=ArgumentValidator(),\n bank_contact_phone_general_manager=BoolTypeValidator(required=False),\n bank_contact_phone=ArgumentValidator(required=False),\n send_private_data=BoolTypeValidator()\n)\ndef request_bank_partner(bank_id=None, batch_id=None, bank_contact_phone_general_manager=False,\n bank_contact_phone=\"\", send_private_data=None):\n\n if not bank_contact_phone_general_manager and not bank_contact_phone:\n raise errors.MissingRequiredParameter('bank_contact_phone')\n\n batch = DocumentBatchDbObject.query.filter_by(id=batch_id,\n _owner=current_user,\n deleted=False,\n batch_type=DocumentBatchTypeEnum.DBT_NEW_LLC).scalar()\n if not batch or not batch.data:\n raise errors.BatchNotFound()\n current_batch = DocumentBatch.db_obj_to_field(batch)\n\n partner = BankPartnersObject.query.filter_by(id=bank_id).first()\n if not partner:\n raise errors.InvalidParameterValue('partner_id')\n\n svc_data = BankPartnersServiceObject.query.filter_by(bank_partner_id=partner.id).first()\n if not svc_data:\n raise errors.ServerError()\n\n current_bank_request = BankPartnerRequestObject.query.filter_by(bank_partner_id=partner.id, batch_id=batch_id).first()\n if current_bank_request and current_bank_request.status in ('sending', 'success'):\n struct = current_batch.get_api_structure()\n return {'result': struct}\n\n if current_bank_request and abs((datetime.utcnow() - current_bank_request.sent_date).total_seconds()) > 60:\n BankPartnerRequestObject.query.filter_by(id=current_bank_request.id).delete()\n sqldb.session.commit()\n current_bank_request = None\n\n svc_type = svc_data.type\n\n fields = svc_data.fields\n extra_context = {\n 'bank_contact_phone_general_manager': bank_contact_phone_general_manager,\n 'bank_contact_phone': bank_contact_phone,\n 'send_private_data': send_private_data,\n 'bank_title': partner.title\n }\n field_list = BatchManager.make_fields_from_data(batch_id, fields, current_app.config, extra_context=extra_context)\n\n context = {}\n errors_list = []\n for name in field_list:\n field = field_list[name]\n try:\n if not field.initialized:\n if field.required:\n raise MissingRequiredFieldException(name)\n else:\n field.validate()\n except (InvalidFieldValueException, MissingRequiredFieldException), ex:\n if hasattr(field, \"suppress_validation_errors\"):\n suppress_validation_errors = field.suppress_validation_errors\n if isinstance(suppress_validation_errors, dict):\n suppress_validation_condition = Condition(suppress_validation_errors)\n context = copy.copy(batch.data)\n context.update(extra_context)\n suppress_validation_errors = suppress_validation_condition.check(context)\n if suppress_validation_errors:\n continue\n\n if getattr(ex, 'ext_data', None):\n err_list = error_tree_to_list(ex.ext_data)\n error_field_paths = [{'field': name + '.' + i['field'], 'error_code': i['error_code']} for i in err_list]\n errors_list.extend(error_field_paths)\n else:\n errors_list.append({\n 'field': name,\n 'error_code': ex.ERROR_CODE\n })\n current_app.logger.exception(u\"Field %s validation error\" % name)\n continue\n if field_list[name].initialized:\n context[name] = field_list[name]\n\n if errors_list:\n current_app.logger.exception(u\"Failed to construct email context\")\n if current_bank_request:\n current_bank_request.sent_date = datetime.utcnow()\n current_bank_request.status = \"failed\"\n current_bank_request.bank_contact_phone_general_manager = bank_contact_phone_general_manager\n current_bank_request.bank_contact_phone = bank_contact_phone\n current_bank_request.send_private_data = send_private_data\n else:\n new_item = BankPartnerRequestObject(\n bank_partner_id=partner.id,\n batch_id=batch_id,\n bank_partner_caption=partner.title,\n sent_date=datetime.utcnow(),\n status=\"failed\",\n bank_contact_phone_general_manager=bank_contact_phone_general_manager,\n bank_contact_phone=bank_contact_phone,\n send_private_data=send_private_data\n )\n sqldb.session.add(new_item)\n sqldb.session.commit()\n\n batch.error_info = {\n 'error_ext': errors_list\n }\n sqldb.session.commit()\n current_batch.error_info.value = {\n 'error_ext': errors_list\n }\n current_batch.error_info.initialized = True\n struct = current_batch.get_api_structure()\n return {'result': struct}\n else:\n batch.error_info = None\n sqldb.session.commit()\n current_batch.error_info.value = None\n current_batch.error_info.initialized = False\n\n if context is None or not isinstance(context, dict):\n raise errors.ServerError()\n\n from flask.templating import render_template\n if svc_type == 'email':\n target_address = svc_data.email\n if current_app.config['STAGING'] or current_app.config['DEBUG']:\n target_address = current_app.config['ADMIN_EMAIL_LIST']\n template_name = svc_data.template_name\n if not target_address or not template_name:\n raise errors.ServerError()\n\n context['send_private_data'] = send_private_data\n html_text = render_template('email/%s.html' % template_name, **context)\n plain_text = render_template('email/%s.text' % template_name, **context)\n subject_text = render_template('email/%s.subject' % template_name, **context)\n\n from fw.async_tasks import send_email\n if current_bank_request:\n current_bank_request.sent_date = datetime.utcnow()\n current_bank_request.status = \"sending\"\n current_bank_request.bank_contact_phone_general_manager = bank_contact_phone_general_manager\n current_bank_request.bank_contact_phone = bank_contact_phone\n current_bank_request.send_private_data = send_private_data\n else:\n new_item = BankPartnerRequestObject(\n bank_partner_id=bank_id,\n batch_id=batch_id,\n bank_partner_caption=partner.title,\n sent_date=datetime.utcnow(),\n status=\"sending\",\n bank_contact_phone_general_manager=bank_contact_phone_general_manager,\n bank_contact_phone=bank_contact_phone,\n send_private_data=send_private_data\n )\n sqldb.session.add(new_item)\n sqldb.session.commit()\n send_email.send_email_to_partner_and_set_result.delay(target_address, template_name, batch_id,\n bank_id, bank_contact_phone_general_manager,\n bank_contact_phone, send_private_data,\n html_text=html_text, plain_text=plain_text,\n subject_text=subject_text)\n elif svc_type == 'web':\n config = svc_data.config\n url = config['url']\n method = config['method']\n template_name = svc_data.template_name\n\n context['send_private_data'] = send_private_data\n json_str = render_template('%s.json' % template_name, **context)\n data = json.loads(json_str)\n new_data = {}\n for k, v in data.items():\n if isinstance(v, basestring):\n new_data[k] = v.encode('cp1251')\n else:\n new_data[k] = v\n\n try:\n if method == 'get':\n response = requests.get(url, params=new_data, timeout=3)\n elif method == 'post':\n response = requests.post(url, data=new_data, timeout=3)\n else:\n raise NotImplementedError()\n except Exception:\n current_app.logger.exception(u\"Failed to send request to partner\")\n return {\"result\": current_batch.get_api_structure()}\n\n if response.status_code == 200:\n if current_bank_request:\n current_bank_request.sent_date = datetime.utcnow()\n current_bank_request.status = \"success\"\n current_bank_request.bank_contact_phone_general_manager = bank_contact_phone_general_manager\n current_bank_request.bank_contact_phone = bank_contact_phone\n current_bank_request.send_private_data = send_private_data\n else:\n new_item = BankPartnerRequestObject(\n bank_partner_id=bank_id,\n batch_id=batch_id,\n bank_partner_caption=partner.title,\n sent_date=datetime.utcnow(),\n status=\"success\",\n bank_contact_phone_general_manager=bank_contact_phone_general_manager,\n bank_contact_phone=bank_contact_phone,\n send_private_data=send_private_data\n )\n sqldb.session.add(new_item)\n else:\n if current_bank_request:\n current_bank_request.sent_date = datetime.utcnow()\n current_bank_request.status = \"failed\"\n current_bank_request.bank_contact_phone_general_manager = bank_contact_phone_general_manager\n current_bank_request.bank_contact_phone = bank_contact_phone\n current_bank_request.send_private_data = send_private_data\n else:\n new_item = BankPartnerRequestObject(\n bank_partner_id=bank_id,\n batch_id=batch_id,\n bank_partner_caption=partner.title,\n sent_date=datetime.utcnow(),\n status=\"failed\",\n bank_contact_phone_general_manager=bank_contact_phone_general_manager,\n bank_contact_phone=bank_contact_phone,\n send_private_data=send_private_data\n )\n sqldb.session.add(new_item)\n sqldb.session.commit()\n\n else:\n raise errors.ServerError()\n\n return {\"result\": current_batch.get_api_structure()}\n\n\n@partners_bp.route('/partners/banks/status/', methods=['GET'])\n@api_view\n@login_required\n@validate_arguments(bank_id=ArgumentValidator(), batch_id=ArgumentValidator())\ndef get_bank_partner_request_status(bank_id=None, batch_id=None):\n try:\n bank_id = ObjectId(bank_id)\n except Exception:\n raise errors.InvalidParameterValue('bank_id')\n try:\n ObjectId(batch_id)\n except Exception:\n raise errors.InvalidParameterValue('batch_id')\n\n current_bank_request = BankPartnerRequestObject.query.filter_by(bank_partner_id=bank_id, batch_id=batch_id).first()\n if not current_bank_request:\n raise errors.BatchNotFound()\n\n if current_bank_request and current_bank_request.status == 'sending' and \\\n abs((datetime.utcnow() - current_bank_request.sent_date).total_seconds()) > 60:\n\n sqldb.session.delete(current_bank_request)\n sqldb.session.commit()\n raise errors.BatchNotFound()\n\n return {\"result\": current_bank_request['status']}\n\n\n@partners_bp.route('/partners/stamps/', methods=['GET'])\n@api_view\n@login_required\n@validate_arguments(batch_id=ArgumentValidator())\ndef get_stamp_partners(batch_id=None):\n stamp_partner_list = []\n\n try:\n region = BatchManager.get_batch_region(batch_id)\n except Exception:\n raise errors.BatchNotFound()\n\n stamps = StampPartnersObject.query.filter_by(enabled=True)\n if region:\n stamps = stamps.filter(or_(StampPartnersObject.region.contains([region]), StampPartnersObject.region == None))\n for item in stamps.order_by(StampPartnersObject.sort_index.asc()):\n stamp_partner_list.append({\n \"id\": item.id,\n \"link\": item.link,\n \"banner\": item.banner,\n \"title\": item.title\n })\n\n return {\"result\": {\"stamp_partners\": stamp_partner_list}}\n" }, { "alpha_fraction": 0.7401960492134094, "alphanum_fraction": 0.7438725233078003, "avg_line_length": 39.849998474121094, "blob_id": "9e6ce64a6f501ea50a47f741f695d27f136fec16", "content_id": "67914b367449cbd96e2ee80dc848d662976c4428", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 816, "license_type": "no_license", "max_line_length": 106, "num_lines": 20, "path": "/app/services/ifns/__init__.py", "repo_name": "StanislavKraev/jb_code", "src_encoding": "UTF-8", "text": "# -*- coding: utf-8 -*-\nimport os\nimport jinja2\nfrom services.ifns.api import ifns_bp\n\n\ndef register(app, jinja_env, class_loader, url_prefix=None):\n app.register_blueprint(ifns_bp, url_prefix=url_prefix)\n\n search_path = os.path.normpath(os.path.join(os.path.abspath(os.path.dirname(__file__)), u\"templates\"))\n jinja_env.loader.loaders.append(jinja2.FileSystemLoader(search_path))\n\n class_loader.POSSIBLE_LOCATIONS.append('services.ifns.data_model.db_models')\n class_loader.POSSIBLE_LOCATIONS.append('services.ifns.data_model.enums')\n class_loader.POSSIBLE_LOCATIONS.append('services.ifns.data_model.fields')\n class_loader.POSSIBLE_LOCATIONS.append('services.ifns.data_model.okved')\n\n\ndef get_manager_command_locations():\n return [os.path.normpath(os.path.abspath(os.path.dirname(__file__)))]" }, { "alpha_fraction": 0.6436694264411926, "alphanum_fraction": 0.6451857686042786, "avg_line_length": 33.25973892211914, "blob_id": "dbdea2143896a17e5b6dca97e1c50c17e956360e", "content_id": "f77409b061b87ace674aaefcc9fc19212dc78b2d", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 2638, "license_type": "no_license", "max_line_length": 87, "num_lines": 77, "path": "/app/fw/plugins/emailer_plugin/__init__.py", "repo_name": "StanislavKraev/jb_code", "src_encoding": "UTF-8", "text": "# -*- coding: utf-8 -*-\nfrom copy import copy\nfrom datetime import datetime\nfrom fw.async_tasks import core_tasks\nfrom fw.documents.schema.schema_transform import transform_field_with_schema\nfrom fw.plugins.emailer_plugin.enums import MailTargetEnum\nfrom fw.plugins.emailer_plugin.mail_composer import create_composer\n\nPLUGIN_NAME = 'emailer'\n\n\ndef get_events():\n events = [{\n 'name': 'mail_sent'\n }, {\n 'name': 'send_failed' # after the last try\n }]\n return events\n\ndef act(action, batch_db, previous_event_data, plugin_config, logger, config):\n assert batch_db\n if action != 'send_email':\n logger.error(u\"Invalid action %s for emailer plugin\" % action)\n return False\n\n max_retries = plugin_config.get('max_retries', 0)\n retry_timeout_seconds = plugin_config.get('retry_timeout_seconds', 10)\n silent = plugin_config.get('silent', False)\n mail_type = plugin_config.get('mail_type')\n assert mail_type\n\n recipients = plugin_config.get('recipients', [])\n\n source_data = copy(plugin_config)\n source_data['<batch>'] = batch_db\n source_data['<app_config>'] = config\n source_data['<current_user>'] = batch_db._owner\n source_data['<previous_event_data>'] = previous_event_data\n if isinstance(recipients, dict):\n recipients = transform_field_with_schema(source_data, recipients).db_value()\n\n event_data = {\n 'recipients': recipients,\n 'max_retries': max_retries,\n 'retry_timeout_seconds': retry_timeout_seconds,\n 'silent': silent,\n 'mail_type': mail_type,\n '<action_dt>': datetime.utcnow(),\n }\n\n if not recipients:\n core_tasks.send.delay(batch_db.id, '%s:send_fail' % PLUGIN_NAME, event_data)\n return False\n\n template_data = {}\n data_fields = plugin_config.get('data', {})\n for d, dv in data_fields.items():\n template_data[d] = transform_field_with_schema(source_data, dv)\n\n template_data_raw = {}\n for k, v in template_data.items():\n if v is not None:\n template_data_raw[k] = v.db_value()\n event_data['template_data'] = template_data_raw\n\n composer = create_composer(mail_type, logger)\n try:\n composer.send_email(recipients, batch_db.id, event_data, max_retries,\n retry_timeout_seconds=retry_timeout_seconds, silent=silent,\n template_data=template_data_raw)\n except Exception:\n logger.exception(u\"Failed to send email\")\n return False\n return True\n\ndef register(class_loader):\n class_loader.POSSIBLE_LOCATIONS.append('fw.plugins.emailer_plugin.enums')\n" }, { "alpha_fraction": 0.6736744046211243, "alphanum_fraction": 0.673907995223999, "avg_line_length": 32.05791473388672, "blob_id": "1b764ae675bab35b8bafeac26f66d185c96b284b", "content_id": "8bd8b8ce3978cfc33b7555c21350d66078672379", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 8562, "license_type": "no_license", "max_line_length": 112, "num_lines": 259, "path": "/app/deployment_migrations/migration_list/20150903_add_catalog_tables.py", "repo_name": "StanislavKraev/jb_code", "src_encoding": "UTF-8", "text": "# -*- coding: utf-8 -*-\n\nfrom fw.db.sql_base import db as sqldb\n\n\ndef forward(config, logger):\n logger.debug(u\"Create new models\")\n\n sqldb.session.close()\n sqldb.engine.execute(u\"\"\"CREATE TABLE IF NOT EXISTS okved_catalog (\n id VARCHAR NOT NULL,\n name VARCHAR NOT NULL,\n departments JSONB,\n PRIMARY KEY (id)\n);\"\"\")\n\n sqldb.engine.execute(u\"\"\"CREATE TABLE IF NOT EXISTS okvad (\n id VARCHAR NOT NULL,\n caption VARCHAR NOT NULL,\n okved VARCHAR NOT NULL,\n nalog VARCHAR NOT NULL,\n parent VARCHAR,\n PRIMARY KEY (id)\n);\"\"\")\n\n sqldb.engine.execute(u\"\"\"DROP INDEX IF EXISTS ix_okvad_okved;\"\"\")\n sqldb.engine.execute(u\"\"\"CREATE UNIQUE INDEX ix_okvad_okved ON okvad (okved);\"\"\")\n\n sqldb.engine.execute(u\"\"\"CREATE TABLE IF NOT EXISTS geo_ranges (\n start BIGINT NOT NULL,\n \"end\" BIGINT NOT NULL,\n cid SERIAL NOT NULL,\n PRIMARY KEY (cid),\n UNIQUE (cid)\n);\"\"\")\n\n sqldb.engine.execute(u\"\"\"CREATE TABLE IF NOT EXISTS geo_cities (\n name VARCHAR NOT NULL,\n cid SERIAL NOT NULL,\n region VARCHAR NOT NULL,\n lat VARCHAR NOT NULL,\n lng VARCHAR NOT NULL,\n PRIMARY KEY (cid),\n UNIQUE (cid)\n);\"\"\")\n \n sqldb.engine.execute(u\"\"\"CREATE TABLE IF NOT EXISTS notarius (\n id VARCHAR NOT NULL, \n surname VARCHAR NOT NULL, \n name VARCHAR NOT NULL, \n patronymic VARCHAR, \n schedule JSONB NOT NULL, \n schedule_caption VARCHAR, \n title VARCHAR, \n address JSONB, \n region VARCHAR NOT NULL, \n metro_station VARCHAR, \n PRIMARY KEY (id)\n);\"\"\")\n \n sqldb.engine.execute(u\"\"\"CREATE TABLE IF NOT EXISTS notarius_booking (\n id VARCHAR NOT NULL, \n batch_id VARCHAR, \n owner_id INTEGER, \n notarius_id VARCHAR, \n dt TIMESTAMP WITHOUT TIME ZONE NOT NULL, \n address VARCHAR NOT NULL, \n _discarded BOOLEAN, \n PRIMARY KEY (id), \n FOREIGN KEY(batch_id) REFERENCES doc_batch (id), \n FOREIGN KEY(owner_id) REFERENCES authuser (id), \n FOREIGN KEY(notarius_id) REFERENCES notarius (id)\n);\"\"\")\n\n sqldb.engine.execute(u\"\"\"DROP INDEX IF EXISTS ix_notarius_booking_owner_id;\"\"\")\n sqldb.engine.execute(u\"CREATE INDEX ix_notarius_booking_owner_id ON notarius_booking (owner_id);\")\n\n sqldb.engine.execute(u\"\"\"DROP INDEX IF EXISTS ix_notarius_booking_notarius_id;\"\"\")\n sqldb.engine.execute(u\"CREATE INDEX ix_notarius_booking_notarius_id ON notarius_booking (notarius_id);\")\n\n sqldb.engine.execute(u\"\"\"CREATE TABLE IF NOT EXISTS yurist_batch_check (\n id VARCHAR NOT NULL, \n batch_id VARCHAR, \n status VARCHAR NOT NULL, \n create_date TIMESTAMP WITHOUT TIME ZONE NOT NULL, \n typos_correction BOOLEAN NOT NULL, \n PRIMARY KEY (id), \n FOREIGN KEY(batch_id) REFERENCES doc_batch (id)\n);\"\"\")\n \n sqldb.engine.execute(u\"\"\"CREATE TABLE IF NOT EXISTS yurist_check_files (\n \"check_id\" VARCHAR NOT NULL,\n files_id VARCHAR NOT NULL,\n PRIMARY KEY (\"check_id\", files_id),\n FOREIGN KEY(\"check_id\") REFERENCES yurist_batch_check (id),\n FOREIGN KEY(files_id) REFERENCES files (id)\n);\"\"\")\n \n sqldb.engine.execute(u\"\"\"CREATE TABLE IF NOT EXISTS ifns_catalog (\n id VARCHAR NOT NULL, \n updated TIMESTAMP WITHOUT TIME ZONE, \n code INTEGER NOT NULL, \n comment VARCHAR, \n tel TEXT[],\n name VARCHAR, \n rof JSONB, \n rou JSONB, \n plat JSONB, \n address VARCHAR, \n region VARCHAR, \n PRIMARY KEY (id)\n);\"\"\")\n\n sqldb.engine.execute(u\"\"\"DROP INDEX IF EXISTS ix_ifns_catalog_code;\"\"\")\n sqldb.engine.execute(u\"\"\"CREATE INDEX ix_ifns_catalog_code ON ifns_catalog (code);\"\"\")\n\n sqldb.engine.execute(u\"\"\"DROP INDEX IF EXISTS ix_ifns_catalog_region;\"\"\")\n sqldb.engine.execute(u\"\"\"CREATE INDEX ix_ifns_catalog_region ON ifns_catalog (region);\"\"\")\n \n sqldb.engine.execute(u\"\"\"CREATE TABLE IF NOT EXISTS ifns_booking (\n id VARCHAR NOT NULL, \n batch_id VARCHAR, \n code VARCHAR NOT NULL, \n date TIMESTAMP WITHOUT TIME ZONE NOT NULL, \n service VARCHAR NOT NULL, \n _discarded BOOLEAN, \n phone VARCHAR, \n \"window\" VARCHAR, \n address VARCHAR, \n service_id INTEGER NOT NULL, \n ifns VARCHAR, \n how_to_get VARCHAR, \n reg_info JSONB, \n PRIMARY KEY (id), \n FOREIGN KEY(batch_id) REFERENCES doc_batch (id)\n);\"\"\")\n\n sqldb.engine.execute(u\"\"\"DROP INDEX IF EXISTS ix_ifns_booking_code;\"\"\")\n sqldb.engine.execute(u\"CREATE INDEX ix_ifns_booking_code ON ifns_booking (code);\")\n\n sqldb.engine.execute(u\"\"\"CREATE TABLE IF NOT EXISTS stamp_partners (\n id VARCHAR NOT NULL, \n region TEXT[],\n enabled BOOLEAN, \n sort_index INTEGER NOT NULL, \n link VARCHAR, \n banner VARCHAR NOT NULL, \n title VARCHAR NOT NULL, \n created TIMESTAMP WITHOUT TIME ZONE NOT NULL, \n PRIMARY KEY (id)\n);\"\"\")\n\n sqldb.engine.execute(u\"\"\"CREATE TABLE IF NOT EXISTS bank_partners (\n id VARCHAR NOT NULL, \n created TIMESTAMP WITHOUT TIME ZONE NOT NULL, \n link VARCHAR, \n title VARCHAR NOT NULL, \n banner VARCHAR NOT NULL, \n enabled BOOLEAN, \n sort_index INTEGER NOT NULL, \n region TEXT[],\n city TEXT[],\n conditions TEXT[],\n PRIMARY KEY (id)\n);\"\"\")\n\n sqldb.engine.execute(u\"\"\"CREATE TABLE IF NOT EXISTS accountant_partners (\n id VARCHAR NOT NULL, \n type VARCHAR NOT NULL, \n created TIMESTAMP WITHOUT TIME ZONE NOT NULL, \n link VARCHAR, \n title VARCHAR NOT NULL, \n banner VARCHAR NOT NULL, \n enabled BOOLEAN, \n sort_index INTEGER NOT NULL, \n region TEXT[],\n city TEXT[],\n PRIMARY KEY (id)\n);\"\"\")\n\n sqldb.engine.execute(u\"\"\"CREATE TABLE IF NOT EXISTS bank_partners_service (\n id VARCHAR NOT NULL, \n type VARCHAR NOT NULL, \n fields JSONB, \n email VARCHAR, \n template_name VARCHAR, \n config JSONB, \n bank_partner_id VARCHAR NOT NULL, \n PRIMARY KEY (id), \n FOREIGN KEY(bank_partner_id) REFERENCES bank_partners (id)\n);\"\"\")\n\n sqldb.engine.execute(u\"\"\"CREATE TABLE IF NOT EXISTS bank_partners_request (\n id SERIAL NOT NULL, \n bank_partner_id VARCHAR NOT NULL, \n batch_id VARCHAR NOT NULL, \n bank_partner_caption VARCHAR, \n sent_date TIMESTAMP WITHOUT TIME ZONE NOT NULL, \n status VARCHAR NOT NULL, \n bank_contact_phone_general_manager VARCHAR, \n bank_contact_phone VARCHAR, \n send_private_data BOOLEAN, \n PRIMARY KEY (id), \n FOREIGN KEY(bank_partner_id) REFERENCES bank_partners (id), \n FOREIGN KEY(batch_id) REFERENCES doc_batch (id)\n);\"\"\")\n \n sqldb.engine.execute(u\"\"\"CREATE TABLE IF NOT EXISTS payment_subscription (\n id SERIAL NOT NULL, \n pay_info JSONB NOT NULL, \n created TIMESTAMP WITHOUT TIME ZONE NOT NULL, \n end_dt TIMESTAMP WITHOUT TIME ZONE NOT NULL, \n type VARCHAR NOT NULL, \n user_id INTEGER, \n PRIMARY KEY (id), \n FOREIGN KEY(user_id) REFERENCES authuser (id)\n);\"\"\")\n\n sqldb.engine.execute(u\"\"\"DROP INDEX IF EXISTS ix_payment_subscription_user_id;\"\"\")\n sqldb.engine.execute(u\"\"\"CREATE INDEX ix_payment_subscription_user_id ON payment_subscription (user_id);\"\"\")\n\n sqldb.engine.execute(u\"\"\"CREATE TABLE IF NOT EXISTS yad_requests (\n id SERIAL NOT NULL, \n ip VARCHAR NOT NULL, \n created TIMESTAMP WITHOUT TIME ZONE NOT NULL, \n request_datetime TIMESTAMP WITHOUT TIME ZONE NOT NULL, \n md5 VARCHAR NOT NULL, \n shop_id BIGINT NOT NULL,\n shop_article_id BIGINT NOT NULL,\n invoice_id BIGINT NOT NULL,\n order_number VARCHAR NOT NULL, \n customer_number VARCHAR NOT NULL, \n order_created_datetime TIMESTAMP WITHOUT TIME ZONE NOT NULL, \n order_sum_amount DECIMAL NOT NULL, \n order_sum_currency_paycash VARCHAR NOT NULL, \n order_sum_bank_paycash VARCHAR NOT NULL, \n shop_sum_amount DECIMAL NOT NULL, \n shop_sum_currency_paycash VARCHAR NOT NULL, \n shop_sum_bank_paycash VARCHAR NOT NULL, \n payment_payer_code VARCHAR NOT NULL, \n payment_type VARCHAR NOT NULL, \n action VARCHAR NOT NULL, \n payment_datetime TIMESTAMP WITHOUT TIME ZONE, \n cps_user_country_code VARCHAR, \n PRIMARY KEY (id)\n);\"\"\")\n\n sqldb.engine.execute(u\"\"\"DROP INDEX IF EXISTS ix_yad_requests_order_number;\"\"\")\n sqldb.engine.execute(u\"\"\"CREATE INDEX ix_yad_requests_order_number ON yad_requests (order_number);\"\"\")\n\n sqldb.engine.execute(u\"\"\"DROP INDEX IF EXISTS ix_yad_requests_invoice_id;\"\"\")\n sqldb.engine.execute(u\"\"\"CREATE INDEX ix_yad_requests_invoice_id ON yad_requests (invoice_id);\"\"\")\n\n sqldb.engine.execute(u\"\"\"DROP INDEX IF EXISTS ix_yad_requests_customer_number;\"\"\")\n sqldb.engine.execute(u\"\"\"CREATE INDEX ix_yad_requests_customer_number ON yad_requests (customer_number);\"\"\")\n\n\ndef rollback(config, logger):\n pass\n" }, { "alpha_fraction": 0.7164461016654968, "alphanum_fraction": 0.7177063822746277, "avg_line_length": 36.3411750793457, "blob_id": "a70c74397687373f71f8c805de97987356c6dfe3", "content_id": "e50aee4745d9fa0648bd113c45c20dba1b16d793", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 3174, "license_type": "no_license", "max_line_length": 92, "num_lines": 85, "path": "/app/services/partners/models.py", "repo_name": "StanislavKraev/jb_code", "src_encoding": "UTF-8", "text": "# -*- coding: utf-8 -*-\nfrom sqlalchemy.orm import relationship\n\nfrom sqlalchemy.sql.functions import func\nfrom sqlalchemy import Column, Unicode, String, DateTime, Boolean, Integer, ForeignKey, Text\nfrom sqlalchemy.dialects.postgresql import ARRAY, JSONB\n\nfrom fw.db.sql_base import db as sqldb\n\n\nclass AccountantPartnersObject(sqldb.Model):\n __tablename__ = \"accountant_partners\"\n\n id = Column(String, primary_key=True)\n type = Column(Unicode, nullable=False)\n created = Column(DateTime, nullable=False, default=func.now())\n link = Column(Unicode, nullable=True)\n title = Column(Unicode, nullable=False)\n banner = Column(Unicode, nullable=False)\n enabled = Column(Boolean, default=True)\n sort_index = Column(Integer, nullable=False, default=1)\n region = Column(ARRAY(Text), nullable=True)\n city = Column(ARRAY(Text), nullable=True)\n\n\nclass BankPartnersObject(sqldb.Model):\n __tablename__ = \"bank_partners\"\n\n id = Column(String, primary_key=True)\n created = Column(DateTime, nullable=False, default=func.now())\n link = Column(Unicode, nullable=True)\n title = Column(Unicode, nullable=False)\n banner = Column(Unicode, nullable=False)\n enabled = Column(Boolean, default=True)\n sort_index = Column(Integer, nullable=False, default=1)\n region = Column(ARRAY(Text), nullable=True)\n city = Column(ARRAY(Text), nullable=True)\n conditions = Column(ARRAY(Text), nullable=True)\n\n\nclass BankPartnerRequestObject(sqldb.Model):\n __tablename__ = \"bank_partners_request\"\n\n id = Column(Integer, primary_key=True)\n\n bank_partner_id = Column(String, ForeignKey('bank_partners.id'), nullable=False)\n bank_partner = relationship(\"BankPartnersObject\")\n\n batch_id = Column(String, ForeignKey('doc_batch.id'), nullable=False)\n batch = relationship(\"DocumentBatchDbObject\", uselist=False)\n\n bank_partner_caption = Column(Unicode, nullable=True)\n sent_date = Column(DateTime, nullable=False, default=func.now())\n status = Column(Unicode, nullable=False)\n bank_contact_phone_general_manager = Column(Unicode, nullable=True)\n bank_contact_phone = Column(Unicode, nullable=True)\n send_private_data = Column(Boolean, default=True)\n\n\nclass BankPartnersServiceObject(sqldb.Model):\n __tablename__ = \"bank_partners_service\"\n\n id = Column(String, primary_key=True)\n type = Column(Unicode, nullable=False)\n fields = Column(JSONB, nullable=True)\n\n email = Column(Unicode, nullable=True)\n template_name = Column(Unicode, nullable=True)\n config = Column(JSONB, nullable=True)\n\n bank_partner_id = Column(String, ForeignKey('bank_partners.id'), nullable=False)\n bank_partner = relationship(\"BankPartnersObject\")\n\n\nclass StampPartnersObject(sqldb.Model):\n __tablename__ = \"stamp_partners\"\n\n id = Column(String, primary_key=True)\n region = Column(ARRAY(Text), nullable=True)\n enabled = Column(Boolean, default=True)\n sort_index = Column(Integer, nullable=False, default=1)\n link = Column(Unicode, nullable=True)\n banner = Column(Unicode, nullable=False)\n title = Column(Unicode, nullable=False)\n created = Column(DateTime, nullable=False, default=func.now())\n" }, { "alpha_fraction": 0.650364875793457, "alphanum_fraction": 0.6504929065704346, "avg_line_length": 28.927202224731445, "blob_id": "b1d575d2db037e0a5e2996021bfe4da1b25e6447", "content_id": "c46664b5644b086a1df601e79cf4753f04d015a9", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 8357, "license_type": "no_license", "max_line_length": 116, "num_lines": 261, "path": "/app/services/osago/documents/enums.py", "repo_name": "StanislavKraev/jb_code", "src_encoding": "UTF-8", "text": "# -*- coding: utf-8 -*-\n\n\nclass CrashSubjectEnum(object):\n CS_VICTIM = \"victim\"\n CS_GUILTY = \"guilty\"\n CS_BOTH = \"both\"\n\n _NAMES = {\n CS_VICTIM: u\"жертва\",\n CS_GUILTY: u\"виновник\",\n CS_BOTH: u\"и то и другое\"\n }\n\n @classmethod\n def validate(cls, value):\n return value in cls._NAMES\n\n @staticmethod\n def get_name(value):\n return CrashSubjectEnum._NAMES.get(value, u\"неизвестно\")\n\n\nclass OsagoReasonEnum(object):\n OR_REFUSAL = \"refusal\"\n OR_UNDERPAY = \"underpay\"\n\n _NAMES = {\n OR_REFUSAL: u\"отказ\",\n OR_UNDERPAY: u\"недоплата\",\n }\n\n @classmethod\n def validate(cls, value):\n return value in cls._NAMES\n\n @staticmethod\n def get_name(value):\n return OsagoReasonEnum._NAMES.get(value, u\"неизвестно\")\n\n\nclass OsagoRefusalReasonEnum(object):\n ORR_REPAIRED = \"repaired\"\n ORR_NOT_SHOWN = \"not_shown\"\n ORR_DELAY = \"delay\"\n ORR_WRONG_DOCS = \"wrong_docs\"\n ORR_INCOMPLETE_DOCS = \"incomplete_docs\"\n\n _NAMES = {\n ORR_REPAIRED: u\"отремонтирована\",\n ORR_NOT_SHOWN: u\"не осмотрена\",\n ORR_DELAY: u\"задержка\",\n ORR_WRONG_DOCS: u\"ошибки в документах\",\n ORR_INCOMPLETE_DOCS: u\"неполные документы\"\n }\n\n @classmethod\n def validate(cls, value):\n return value in cls._NAMES\n\n @staticmethod\n def get_name(value):\n return OsagoRefusalReasonEnum._NAMES.get(value, u\"неизвестно\")\n\nclass _SimpleEnum(object):\n\n @classmethod\n def validate(cls, value):\n return value in cls._ALL\n\n @staticmethod\n def get_name(value):\n return \"\"\n\nclass OsagoDocTypeEnum(_SimpleEnum):\n ODT_INQUIRE_CRASH = \"inquire_crash\"\n ODT_NOTICE_CRASH = \"notice_crash\"\n ODT_ACT_INSURANCE_CASE = \"act_insurance_case\"\n ODT_INSURANCE_DENIAL = \"insurance_denial\"\n ODT_POLICE_STATEMENT = \"police_statement\"\n ODT_POLICE_PROTOCOL = \"police_protocol\"\n ODT_CASE_INITIATION_REFUSAL = \"case_init_refusal\"\n ODT_EXPERTISE_REPORT = \"expertise_report\"\n ODT_EXPERTISE_CONTRACT = \"expertise_contract\"\n ODT_PRETENSION_ANSWER_COPY = \"pretension_answer\"\n ODT_NOTARY_PAY_ACT = \"notary_pay_act\"\n ODT_POLICY_OSAGO = \"policy_osago\"\n ODT_BANK_STATEMENT = \"bank_statement\"\n\n _ALL = {\n ODT_INQUIRE_CRASH: u\"cправка о ДТП\",\n ODT_NOTICE_CRASH: u\"извещение о ДТП\",\n ODT_ACT_INSURANCE_CASE: u\"акт о страховом случае\",\n ODT_INSURANCE_DENIAL: u\"отказ в выплате страховой компании\",\n ODT_POLICE_STATEMENT: u\"постановление по делу об административном нарушении\",\n ODT_POLICE_PROTOCOL: u\"протокол об административном нарушении\",\n ODT_CASE_INITIATION_REFUSAL: u\"определение об отказе в возбуждении дела об административном правонарушении\",\n ODT_EXPERTISE_REPORT: u\"отчет независимой экспертизы\",\n ODT_EXPERTISE_CONTRACT: u\"договор о проведении независимой экспертизы\",\n ODT_PRETENSION_ANSWER_COPY: u\"копия ответа на претензию\",\n ODT_NOTARY_PAY_ACT: u\"документы об оплате нотариальных услуг\",\n ODT_POLICY_OSAGO: u\"полис ОСАГО\",\n ODT_BANK_STATEMENT: u\"банковские документы, подтверждающие оплату страхового возмещения\"\n }\n\n @staticmethod\n def get_name(value):\n\n return OsagoDocTypeEnum._ALL.get(value, u\"\")\n\nclass ApplicationTypeEnum(_SimpleEnum):\n AT_ONESELF = \"oneself\"\n AT_RESPONSIBLE_PERSON = \"responsible_person\"\n AT_MAIL = \"mail\"\n\n _ALL = {\n AT_ONESELF,\n AT_RESPONSIBLE_PERSON,\n AT_MAIL\n }\n\nclass ObtainAddressEnum(_SimpleEnum):\n OA_OWNER_ADDRESS = \"owner_address\"\n OA_RESPONSIBLE_PERSON_ADDRESS = \"responsible_person_address\"\n OA_OTHER_ADDRESS = \"other_address\"\n\n _ALL = {\n OA_OWNER_ADDRESS,\n OA_RESPONSIBLE_PERSON_ADDRESS,\n OA_OTHER_ADDRESS\n }\n\nclass OsagoBatchStatusEnum(_SimpleEnum):\n OBS_NEW = \"new\"\n OBS_PRETENSION = \"pretension\"\n OBS_CLAIM = \"claim\"\n OBS_CLAIM_PRESENTATION = \"claim_presentation\"\n\n _ALL = {\n OBS_NEW,\n OBS_PRETENSION,\n OBS_CLAIM,\n OBS_CLAIM_PRESENTATION\n }\n\nclass PretensionResultEnum(_SimpleEnum):\n PR_SUCCESS = \"success\"\n PR_REFUSE = \"refuse\"\n PR_PARTIAL_SUCCESS = \"partial_success\"\n PR_UNKNOWN = \"unknown\"\n\n _ALL = {\n PR_SUCCESS,\n PR_REFUSE,\n PR_PARTIAL_SUCCESS,\n PR_UNKNOWN\n }\n\nclass CourtAttendanceEnum(_SimpleEnum):\n CA_ONESELF = \"oneself\"\n CA_NOBODY = \"nobody\"\n CA_RESPONSIBLE_PERSON = \"responsible_person\"\n\n _ALL = {\n CA_ONESELF,\n CA_NOBODY,\n CA_RESPONSIBLE_PERSON\n }\n\nclass ActObtainWayEnum(_SimpleEnum):\n ABW_ONESELF = \"oneself\"\n ABW_MAIL = \"mail\"\n ABW_RESPONSIBLE_PERSON = \"responsible_person\"\n ABW_NO_OBTAIN = \"no_obtain\"\n\n _ALL = {\n ABW_ONESELF,\n ABW_MAIL,\n ABW_RESPONSIBLE_PERSON,\n ABW_NO_OBTAIN\n }\n\nclass InsuranceLawsuitEnum(_SimpleEnum):\n ILS_UNDERPAY = \"insurance_underpay_lawsuit\"\n ILS_PENALTY = \"insurance_penalty_lawsuit\"\n ILS_EXPERTISE_COST = \"insurance_expertise_cost_lawsuit\"\n ILS_FINE = \"insurance_fine_lawsuit\"\n\n _ALL = {\n ILS_UNDERPAY,\n ILS_PENALTY,\n ILS_EXPERTISE_COST,\n ILS_FINE\n }\n\nclass CourtLawsuitDocEnum(_SimpleEnum):\n CLD_LAWSUIT = u\"lawsuit\"\n CLD_CLAIM_COURT_ABSENT = u\"claim_court_absent\"\n CLD_INQUIRE_CRASH = u\"inquire_crash\"\n CLD_POLICE_PROTOCOL = u\"police_protocol\"\n CLD_POLICE_STATEMENT = u\"police_statement\"\n CLD_CASE_INIT_REFUSAL = u\"case_init_refusal\"\n CLD_NOTICE_CRASH = u\"notice_crash\"\n CLD_INSURANCE_DENIAL = u\"insurance_denial\"\n CLD_ACT_INSURANCE_CASE = u\"act_insurance_case\"\n CLD_EXPERTISE_REPORT = u\"expertise_report\"\n CLD_EXPERTISE_CONTRACT = u\"expertise_contract\"\n CLD_EXPERTISE_RECEIPT = u\"expertise_receipt\"\n CLD_PRETENSION_MAIL_RECEIPT = u\"pretension_mail_receipt\"\n CLD_PRETENSION_MAIL_LIST = u\"pretension_mail_list\"\n CLD_PRETENSION_MAIL_NOTIFY = u\"pretension_mail_notify\"\n CLD_PRETENSION = u\"pretension\"\n CLD_PRETENSION_INSURANCE_NOTE = u\"pretension_insurance_note\"\n CLD_DOCUMENTS_CLAIM = u\"documents_claim\"\n CLD_DOCUMENTS_CLAIM_INSURANCE_NOTE = u\"documents_claim_insurance_note\"\n CLD_PRETENSION_ANSWER_COPY = u\"pretension_answer\"\n CLD_POLICY_OSAGO_COPY = u\"policy_osago_copy\"\n CLD_CAR_CERTIFICATE = u\"car_certificate\"\n CLD_CAR_PASSPORT = u\"car_passport\"\n CLD_LEGAL_FEE_RECEIPT = u\"legal_fee_receipt\"\n CLD_TRUST_COURT_REPRESENTATION = u\"trust_court_representation\"\n CLD_TRUST_SUBMISION_OBTAIN_DOCS = u\"trust_submision_obtain_docs\"\n CLD_TRUST_INSURANCE_COURT = u\"trust_insurance_court\"\n CLD_MAIL_DOCS_LIST = u\"mail_docs_list\"\n CLD_BANK_STATEMENT = u\"bank_statement\"\n CLD_NOTARY_PAY_ACT = u\"notary_pay_act\"\n CLD_VICTIM_OWNER_PASSPORT_COPY = u\"victim_owner_passport_copy\"\n\n _ALL = {\n CLD_LAWSUIT,\n CLD_CLAIM_COURT_ABSENT,\n CLD_INQUIRE_CRASH,\n CLD_POLICE_PROTOCOL,\n CLD_POLICE_STATEMENT,\n CLD_CASE_INIT_REFUSAL,\n CLD_NOTICE_CRASH,\n CLD_INSURANCE_DENIAL,\n CLD_ACT_INSURANCE_CASE,\n CLD_EXPERTISE_REPORT,\n CLD_EXPERTISE_CONTRACT,\n CLD_EXPERTISE_RECEIPT,\n CLD_PRETENSION_MAIL_RECEIPT,\n CLD_PRETENSION_MAIL_LIST,\n CLD_PRETENSION_MAIL_NOTIFY,\n CLD_PRETENSION,\n CLD_PRETENSION_INSURANCE_NOTE,\n CLD_DOCUMENTS_CLAIM,\n CLD_DOCUMENTS_CLAIM_INSURANCE_NOTE,\n CLD_PRETENSION_ANSWER_COPY,\n CLD_POLICY_OSAGO_COPY,\n CLD_CAR_CERTIFICATE,\n CLD_CAR_PASSPORT,\n CLD_LEGAL_FEE_RECEIPT,\n CLD_TRUST_COURT_REPRESENTATION,\n CLD_TRUST_SUBMISION_OBTAIN_DOCS,\n CLD_TRUST_INSURANCE_COURT,\n CLD_MAIL_DOCS_LIST,\n CLD_BANK_STATEMENT,\n CLD_NOTARY_PAY_ACT,\n CLD_VICTIM_OWNER_PASSPORT_COPY\n }\n" }, { "alpha_fraction": 0.59910649061203, "alphanum_fraction": 0.6038090586662292, "avg_line_length": 39.122642517089844, "blob_id": "a36293c5e6f42cff8010ddd700380022a52eb949", "content_id": "32b013c228121f5b02e120d8e43806d127980740", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 4640, "license_type": "no_license", "max_line_length": 125, "num_lines": 106, "path": "/app/services/osago/osago_manager.py", "repo_name": "StanislavKraev/jb_code", "src_encoding": "UTF-8", "text": "# -*- coding: utf-8 -*-\nfrom datetime import datetime\nimport pytils\nimport common_utils\nfrom fw.documents.batch_manager import BatchManager\nfrom fw.documents.enums import DocumentTypeEnum, DocumentBatchTypeEnum\nfrom fw.db.sql_base import db as sqldb\nfrom services.pay.models import PayInfoObject, PurchaseServiceType\n\n\nclass OsagoBatchManager(BatchManager):\n\n BATCH_TYPE = DocumentBatchTypeEnum.DBT_OSAGO\n\n DOC_TITLES = {\n DocumentTypeEnum.DT_OSAGO_MAIL_LIST: u\"Опись ценного письма для ОСАГО\",\n DocumentTypeEnum.DT_OSAGO_PRETENSION: u\"Претензия по ОСАГО\",\n DocumentTypeEnum.DT_OSAGO_DOCUMENTS_CLAIM: u\"Заявление на выдачу документов по ОСАГО\",\n DocumentTypeEnum.DT_OSAGO_TRUST_SUBMISSION_DOCS: u\"Доверенность на подачу документов в страховую\",\n DocumentTypeEnum.DT_OSAGO_TRUST_OBTAIN_DOCS: u\"Доверенность на получение документов из страховой\",\n DocumentTypeEnum.DT_OSAGO_TRUST_SUBMISION_OBTAIN_DOCS: u\"Доверенность на подачу и получение документов из страховой\",\n\n DocumentTypeEnum.DT_OSAGO_CLAIM_COURT_ABSENT: u\"Заявление об отсутствии на суде\",\n DocumentTypeEnum.DT_OSAGO_CLAIM_ALL_EXECUTION_ACT: u\"Завление на выдачу ИЛ ко всем\",\n DocumentTypeEnum.DT_OSAGO_CLAIM_GUILTY_EXECUTION_ACT: u\"Завление на выдачу ИЛ к виновнику\",\n DocumentTypeEnum.DT_OSAGO_CLAIM_INSURANCE_EXECUTION_ACT: u\"Завление на выдачу ИЛ к страховой\",\n DocumentTypeEnum.DT_OSAGO_LAWSUIT: u\"Иск\",\n DocumentTypeEnum.DT_OSAGO_COURT_MAIL_LIST: u\"Опись для ценного письма\"\n }\n\n def get_title(self, doc_type):\n return OsagoBatchManager.DOC_TITLES.get(doc_type, '')\n\n def get_last_modified_batch_caption(self, batch_db):\n if not batch_db:\n return u\"\"\n\n dt = batch_db.data.get('crash_date', None)\n if not dt:\n return u\"\"\n return pytils.dt.ru_strftime(u\"%d %B %Y г.\", inflected=True, date=dt)\n\n def get_batch_caption(self, batch_db):\n if not batch_db:\n return u\"\"\n\n data = batch_db.data\n dt = data.get('crash_date', None)\n return u\"Возмещение по ОСАГО, дата аварии: %s\" % common_utils.get_russian_date(dt) if dt else u\"Возмещение по ОСАГО\"\n\n @staticmethod\n def cancel_batch_finalization(batch_db_obj, config, logger):\n pass\n\n def finalize_batch(self, config, logger, batch):\n return False\n\n def get_stage(self, batch):\n state_map = {\n 'pretension': 'preparation',\n 'generating_pretension': 'preparation',\n 'claim': 'submission',\n 'generating_claim': 'submission',\n 'court': 'submission',\n }\n return state_map.get(batch.status, 'submission')\n\n @staticmethod\n def check_and_fix_osago_payments(batch):\n if batch.creation_date >= datetime(2015, 10, 7):\n return\n\n if batch.status == 'claim':\n pay_count = PayInfoObject.query.filter_by(batch=batch).count()\n if pay_count < 1:\n new_pay_info = PayInfoObject(\n user=batch._owner,\n batch=batch,\n pay_record_id=0,\n payment_provider=0,\n service_type=PurchaseServiceType.OSAGO_PART1\n )\n sqldb.session.add(new_pay_info)\n sqldb.session.commit()\n elif batch.status == 'court':\n pay_count = PayInfoObject.query.filter_by(batch=batch).count()\n if pay_count < 1:\n new_pay_info = PayInfoObject(\n user=batch._owner,\n batch=batch,\n pay_record_id=0,\n payment_provider=0,\n service_type=PurchaseServiceType.OSAGO_PART1\n )\n sqldb.session.add(new_pay_info)\n sqldb.session.commit()\n if pay_count < 2:\n new_pay_info = PayInfoObject(\n user=batch._owner,\n batch=batch,\n pay_record_id=0,\n payment_provider=0,\n service_type=PurchaseServiceType.OSAGO_PART2\n )\n sqldb.session.add(new_pay_info)\n sqldb.session.commit()\n" }, { "alpha_fraction": 0.6211643218994141, "alphanum_fraction": 0.62288498878479, "avg_line_length": 38.17977523803711, "blob_id": "27939fe0a328efcae1b81ecd81ca44d42d7ebd52", "content_id": "61fe0ab701a0794ddf6d99b4d0f965e047b587f6", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 3487, "license_type": "no_license", "max_line_length": 129, "num_lines": 89, "path": "/app/fw/async_tasks/periodic_tasks.py", "repo_name": "StanislavKraev/jb_code", "src_encoding": "UTF-8", "text": "# -*- coding: utf-8 -*-\nfrom datetime import datetime, timedelta\n\nfrom celery import current_app as celery\nfrom flask.globals import current_app\nfrom fw.async_tasks import celery_utils\nfrom fw.db.sql_base import db as sqldb\nfrom fw.documents.batch_manager import BatchManager\nfrom fw.documents.db_fields import DocumentBatchDbObject\nfrom fw.documents.enums import BatchStatusEnum\n\ncelery.config_from_envvar('CELERY_CONFIG_MODULE')\n\nfrom manage_commands.batch_commands import GetFssNumberCommand\n\nBATCH_FINALISATION_MAX_DURATION = 120 # seconds\n\n\[email protected]()\ndef check_frozen_batch_finalisation():\n config = celery.conf.get('config')\n\n with celery.conf['flask_app']().app_context():\n logger = current_app.logger\n logger.debug(u\"Starting dead batches being finalised\")\n\n cur = DocumentBatchDbObject.query.filter(\n DocumentBatchDbObject.current_task_id != None,\n DocumentBatchDbObject.batch_rendering_start < datetime.utcnow() - timedelta(seconds=BATCH_FINALISATION_MAX_DURATION),\n DocumentBatchDbObject.status == BatchStatusEnum.BS_BEING_FINALISED\n )\n\n inspect_tasks = []\n inspector = celery.control.inspect()\n actives = inspector.active()\n for item in (actives.values() if actives else []):\n inspect_tasks.extend(item)\n schedules = inspector.scheduled()\n for item in (schedules.values() if schedules else []):\n inspect_tasks.extend(item)\n\n for batch in cur:\n logger.debug(u\"checking %s\" % batch.id)\n task_id = batch.current_task_id\n if not celery_utils.found_same_task('fw.async_tasks.rendering.render_batch',\n task_id=task_id, args=(batch.id, ),\n inspect_tasks=inspect_tasks):\n logger.warn(\n u\"Batch %s is being finalised but corresponding celery task was not found. \"\n u\"Cancelling batch finalisation!\" % batch.id)\n\n try:\n BatchManager.cancel_batch_finalization(batch, config, logger)\n except Exception:\n current_app.logger.exception(u\"Failed to cancel batch finalization.\")\n continue\n\n cur = DocumentBatchDbObject.query.filter(\n DocumentBatchDbObject.current_task_id == None,\n DocumentBatchDbObject.status == BatchStatusEnum.BS_BEING_FINALISED\n )\n for batch in cur:\n logger.warn(\n u\"Batch %s is being finalised but corresponding celery task was not found [2]. \"\n u\"Cancelling batch finalisation!\" % batch.id)\n\n try:\n BatchManager.cancel_batch_finalization(batch, config, logger)\n except Exception:\n current_app.logger.exception(u\"Failed to cancel batch finalization.\")\n continue\n\n return True\n\[email protected]()\ndef get_fss_task():\n config = celery.conf.get('config')\n db = celery.conf.get('db')\n\n with celery.conf['flask_app']().app_context():\n logger = current_app.logger\n GetFssNumberCommand.get_fss_number(logger)\n\n\[email protected]()\ndef clean_kombu_messages():\n with celery.conf['flask_app']().app_context():\n sqldb.engine.execute(u\"DELETE FROM kombu_message WHERE timestamp < '%s';\" %\n (datetime.utcnow() - timedelta(days=3)).strftime(\"%Y-%m-%dT%H:%M:%S\"))\n" }, { "alpha_fraction": 0.324611634016037, "alphanum_fraction": 0.3268222212791443, "avg_line_length": 40.5029182434082, "blob_id": "d57ca6b073408088f2c5d72870595a06f2b9683b", "content_id": "8a31fd424d0c4156eb03e5a05d601c2f8a42ccec", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 50123, "license_type": "no_license", "max_line_length": 130, "num_lines": 1199, "path": "/app/services/llc_reg/documents/third_stage_llc_reg_initial_db_data.py", "repo_name": "StanislavKraev/jb_code", "src_encoding": "UTF-8", "text": "# -*- coding: utf-8 -*-\nimport os\nfrom fw.documents.enums import DocumentTypeEnum, DocumentKindEnum, BatchStatusEnum\nfrom fw.documents.field_matchers import SimpleMatcher, FieldAttributeMatcher, ConstantMatcher\nfrom fw.documents.common_schema_fields import (SHORT_NAME_FIELD, FULL_NAME_FIELD, ADDRESS_FIELD,\n GENERAL_MANAGER_FIELD, FOUNDERS_COUNT_FIELD,\n GENERAL_MANAGER_CAPTION_FIELD,\n BOARD_OF_DIRECTORS_FIELD, EMPLOYER_FIELD,\n ADDRESS_TYPE_FIELD, INN_FIELD, KPP_FIELD,\n OGRN_FIELD, ACCOUNTANT_PERSON_FIELD)\nfrom services.llc_reg.documents.enums import DocumentDeliveryTypeEnum\n\nMAP_OBTAIN_WAY = {\n \"field\": \"obtain_way\",\n \"map\": {\n \"founder\": DocumentDeliveryTypeEnum.DDT_ISSUE_TO_THE_APPLICANT,\n \"responsible_person\": DocumentDeliveryTypeEnum.DDT_ISSUE_TO_THE_APPLICANT_OR_AGENT,\n \"mail\": DocumentDeliveryTypeEnum.DDT_SEND_BY_MAIL\n }\n}\n\n\ndef get_test_resource_name(config, resource_rel_path):\n resources_path = config['resources_path']\n return os.path.join(resources_path, resource_rel_path)\n\n\ndef load_data(config):\n GENERAL_MANAGER_CONTRACT_TEMPLATE = {\n \"doc_name\": DocumentTypeEnum.DT_GENERAL_MANAGER_CONTRACT,\n \"template_name\": \"general_manager_contract\",\n \"file_name\": get_test_resource_name(config, \"general_manager_contract.tex\"),\n \"is_strict\": False,\n }\n\n GENERAL_MANAGER_CONTRACT_SCHEMA = {\n \"doc_name\": DocumentTypeEnum.DT_GENERAL_MANAGER_CONTRACT,\n \"doc_kind\": DocumentKindEnum.DK_TEX_TEMPLATE,\n \"file_name_template\": u\"Трудовой договор с руководителем\",\n \"conditions\": {\n u\"has_general_manager_contract\": True\n },\n \"batch_statuses\": [BatchStatusEnum.BS_FINALISED],\n \"fields\": [\n SHORT_NAME_FIELD,\n FULL_NAME_FIELD,\n ADDRESS_FIELD,\n GENERAL_MANAGER_FIELD,\n FOUNDERS_COUNT_FIELD,\n GENERAL_MANAGER_CAPTION_FIELD,\n BOARD_OF_DIRECTORS_FIELD,\n EMPLOYER_FIELD,\n ADDRESS_TYPE_FIELD,\n INN_FIELD,\n KPP_FIELD,\n OGRN_FIELD,\n {\n \"name\": \"general_manager_salary\",\n \"type\": \"DocCurrencyField\",\n \"required\": True,\n \"error_field_mapping\": {\n \"general_manager_salary\": \".\"\n },\n \"validator\": {\n \"conditions\": [{\n \"value\": {\n \"#gt\": 0.0\n }\n }],\n \"error_field\": \"general_manager_salary\"\n }\n }, {\n \"name\": \"general_manager_salary_days\",\n \"type\": \"DocArrayField\",\n \"cls\": \"DocIntField\",\n \"required\": True,\n \"validator\": {\n \"conditions\": [{\n \"values\": {\n \"#not_empty\": True\n }\n }]\n }\n }, {\n \"name\": \"general_manager_trial_period\",\n \"type\": \"DocIntField\",\n \"required\": True\n }, {\n \"name\": \"general_manager_quit_notify_period\",\n \"type\": \"DocIntField\",\n \"required\": True\n }, {\n \"name\": \"general_manager_fixed_working_hours\",\n \"type\": \"DocBoolField\",\n \"required\": True\n }, {\n \"name\": \"general_manager_term\",\n \"type\": \"DocIntField\",\n \"min_val\": 6,\n \"max_val\": 60,\n \"required\": True\n }, {\n \"name\": \"general_manager_contract_number\",\n \"type\": \"DocTextField\",\n \"required\": True,\n \"min_length\": 1\n }, {\n \"name\": \"accountant_contract_number\",\n \"type\": \"DocTextField\",\n \"required\": False,\n \"min_length\": 1,\n }, {\n \"name\": \"general_manager_contract_date\",\n \"type\": \"DocDateTimeField\",\n \"input_format\": \"%Y-%m-%d\",\n \"required\": True\n }, {\n \"name\": \"general_manager_has_special_terms\",\n \"type\": \"DocBoolField\",\n \"required\": True,\n \"default\": False\n }, {\n \"name\": \"general_manager_contract_additional_terms\",\n \"type\": \"DocAdditionalRightsField\",\n \"required\": False\n }, {\n \"name\": \"general_manager_working_hours\",\n \"type\": \"DocWorkingHoursField\",\n \"required\": False,\n \"override_fields_kwargs\": {\n \"holidays\": {\n \"validator\": {\n \"conditions\": [{\n \"values\": {\n \"#not_empty\": True\n }\n }]\n }\n }\n }\n }, {\n \"name\": \"reg_date\",\n \"type\": \"calculated\",\n \"field_type\": \"DocDateTimeField\",\n \"input_format\": \"%Y-%m-%d\",\n \"required\": True,\n \"suppress_validation_errors\": True,\n \"value\": {\n \"#rendered_doc_field\": {\n \"document_type\": DocumentTypeEnum.DT_ARTICLES,\n \"value\": {\n \"#field\": \"<document>->doc_date\"\n }\n }\n }\n }, {\n \"name\": \"has_general_manager_contract\",\n \"type\": \"DocBoolField\",\n \"required\": False\n }\n ],\n \"validators\": [{\n \"condition\": {\n \"weekly_hours\": {\n \"#lte\": 2400\n }\n },\n \"error\": {\n \"field\": \"general_manager_working_hours\",\n \"code\": 5\n },\n \"#set\": [{\n \"day_hours_with_lunch\": {\n \"#sub\": [{\n \"#field\": \"general_manager_working_hours->finish_working_hours\"\n }, {\n \"#field\": \"general_manager_working_hours->start_working_hours\"\n }]\n }\n }, {\n \"weekly_hours\": {\n \"#mul\": [{\n \"#sub\": [7, {\n \"#size\": \"general_manager_working_hours->holidays\"\n }]\n }, {\n \"#sub\": [{\n \"#div\": [{\n \"#field\": \"day_hours_with_lunch->total_seconds\"\n }, 60]\n }, {\n \"#field\": \"general_manager_working_hours->lunch_time\"\n }]\n }]\n }\n }]\n }, {\n \"condition\": {\n \"accountant_contract_number\": {\n \"#ne\": \"@general_manager_contract_number\"\n }\n },\n \"error\": {\n \"field\": \"general_manager_contract_number\",\n \"code\": 5\n }\n }]\n }\n\n GENERAL_MANAGER_ORDER_TEMPLATE = {\n \"doc_name\": DocumentTypeEnum.DT_GENERAL_MANAGER_ORDER,\n \"template_name\": \"general_manager_order\",\n \"file_name\": get_test_resource_name(config, \"general_manager_order.tex\"),\n \"is_strict\": False,\n }\n\n GENERAL_MANAGER_ORDER_SCHEMA = {\n \"doc_name\": DocumentTypeEnum.DT_GENERAL_MANAGER_ORDER,\n \"doc_kind\": DocumentKindEnum.DK_TEX_TEMPLATE,\n \"file_name_template\": u\"Приказ о вступлении в должность\",\n \"conditions\": {\n u\"has_general_manager_order\": True\n },\n \"batch_statuses\": [BatchStatusEnum.BS_FINALISED],\n \"fields\": [\n SHORT_NAME_FIELD,\n FULL_NAME_FIELD,\n ADDRESS_FIELD,\n GENERAL_MANAGER_FIELD,\n FOUNDERS_COUNT_FIELD,\n GENERAL_MANAGER_CAPTION_FIELD,\n BOARD_OF_DIRECTORS_FIELD,\n ADDRESS_TYPE_FIELD,\n INN_FIELD,\n KPP_FIELD,\n OGRN_FIELD,\n {\n \"name\": \"general_manager_order_date\",\n \"type\": \"calculated\",\n \"field_type\": \"DocDateTimeField\",\n \"input_format\": \"%Y-%m-%d\",\n \"required\": True,\n \"suppress_validation_errors\": True,\n \"value\": {\n \"#cases\": {\n \"list\": [\n {\n \"conditions\": {\n \"has_general_manager_contract\": True\n },\n \"value\": {\n \"#field\": \"general_manager_contract_date\"\n }\n }],\n \"default\": {\n \"value\": {\n \"#field\": \"registration_date->next_working_day_p\"\n }\n }\n },\n }\n },\n {\n \"name\": \"general_manager_order_number\",\n \"type\": \"DocTextField\",\n \"required\": True,\n \"max_length\": 30,\n \"min_length\": 1,\n \"allowed_re\": ur\"^[0-9a-zA-Zа-яёА-ЯЁ#\\|\\-\\s]+$\",\n },\n {\n \"name\": \"has_general_manager_order\",\n \"type\": \"DocBoolField\",\n \"required\": False\n },\n {\n \"name\": \"reshenie_date\",\n \"type\": \"calculated\",\n \"field_type\": \"DocDateTimeField\",\n \"input_format\": \"%Y-%m-%d\",\n \"required\": True,\n \"suppress_validation_errors\": True,\n \"value\": {\n \"#rendered_doc_field\": {\n \"document_type\": DocumentTypeEnum.DT_ARTICLES,\n \"value\": {\n \"#field\": \"<document>->doc_date\"\n }\n }\n }\n }\n ]\n }\n\n ACCOUNTANT_CONTRACT_TEMPLATE = {\n \"doc_name\": DocumentTypeEnum.DT_ACCOUNTANT_CONTRACT,\n \"template_name\": \"accountant_contract\",\n \"file_name\": get_test_resource_name(config, \"accountant_contract.tex\"),\n \"is_strict\": False,\n }\n\n ACCOUNTANT_CONTRACT_SCHEMA = {\n \"doc_name\": DocumentTypeEnum.DT_ACCOUNTANT_CONTRACT,\n \"doc_kind\": DocumentKindEnum.DK_TEX_TEMPLATE,\n \"file_name_template\": u\"Трудовой договор с главным бухгалтером\",\n \"conditions\": {\n u\"has_accountant_contract_order\": True\n },\n \"batch_statuses\": [BatchStatusEnum.BS_FINALISED],\n \"fields\": [\n SHORT_NAME_FIELD,\n FULL_NAME_FIELD,\n ADDRESS_FIELD,\n ADDRESS_TYPE_FIELD,\n GENERAL_MANAGER_FIELD,\n FOUNDERS_COUNT_FIELD,\n GENERAL_MANAGER_CAPTION_FIELD,\n BOARD_OF_DIRECTORS_FIELD,\n EMPLOYER_FIELD,\n INN_FIELD,\n KPP_FIELD,\n OGRN_FIELD,\n ACCOUNTANT_PERSON_FIELD,\n {\n \"name\": \"accountant_contract_number\",\n \"type\": \"DocTextField\",\n \"required\": True,\n \"min_length\": 1,\n }, {\n \"name\": \"general_manager_contract_number\",\n \"type\": \"DocTextField\",\n \"required\": False,\n \"min_length\": 1\n }, {\n \"name\": \"accountant_start_work\",\n \"type\": \"DocDateTimeField\",\n \"input_format\": \"%Y-%m-%d\",\n \"required\": True\n },\n {\n \"name\": \"accountant_trial_period\",\n \"type\": \"DocIntField\",\n \"required\": True,\n },\n {\n \"name\": \"accountant_salary\",\n \"type\": \"DocCurrencyField\",\n \"required\": True,\n \"error_field_mapping\": {\n \"accountant_salary\": \".\"\n },\n \"validator\": {\n \"conditions\": [{\n \"value\": {\n \"#gt\": 0.0\n }\n }],\n \"error_field\": \"accountant_salary\"\n }\n },\n {\n \"name\": \"accountant_salary_days\",\n \"type\": \"DocArrayField\",\n \"cls\": \"DocIntField\",\n \"required\": True,\n \"validator\": {\n \"conditions\": [{\n \"values\": {\n \"#not_empty\": True\n }\n }]\n }\n },\n {\n \"name\": \"accountant_fixed_working_hours\",\n \"type\": \"DocBoolField\",\n \"required\": True\n },\n {\n \"name\": \"accountant_has_special_terms\",\n \"type\": \"DocBoolField\",\n \"required\": True,\n \"default\": False\n },\n {\n \"name\": \"accountant_contract_additional_terms\",\n \"type\": \"DocAdditionalRightsField\",\n \"required\": False\n },\n {\n \"name\": \"accountant_working_hours\",\n \"type\": \"DocWorkingHoursField\",\n \"required\": False,\n \"override_fields_kwargs\": {\n \"holidays\": {\n \"validator\": {\n \"conditions\": [{\n \"values\": {\n \"#not_empty\": True\n }\n }]\n }\n }\n }\n },\n {\n \"name\": \"accountant_fixed_working_hours\",\n \"type\": \"DocBoolField\",\n \"required\": True\n },\n {\n \"name\": \"has_accountant_contract_order\",\n \"type\": \"DocBoolField\",\n \"required\": False\n }\n\n ],\n \"validators\": [{\n \"condition\": {\n \"weekly_hours\": {\n \"#lte\": 2400\n }\n },\n \"error\": {\n \"field\": \"accountant_working_hours\",\n \"code\": 5\n },\n \"#set\": [{\n \"day_hours_with_lunch\": {\n \"#sub\": [{\n \"#field\": \"accountant_working_hours->finish_working_hours\"\n }, {\n \"#field\": \"accountant_working_hours->start_working_hours\"\n }]\n }\n }, {\n \"weekly_hours\": {\n \"#mul\": [{\n \"#sub\": [7, {\n \"#size\": \"accountant_working_hours->holidays\"\n }]\n }, {\n \"#sub\": [{\n \"#div\": [{\n \"#field\": \"day_hours_with_lunch->total_seconds\"\n }, 60]\n }, {\n \"#field\": \"accountant_working_hours->lunch_time\"\n }]\n }]\n }\n }]\n },\n {\n \"condition\": {\n \"accountant_contract_number\": {\n \"#ne\": \"@general_manager_contract_number\"\n }\n },\n \"error\": {\n \"field\": \"accountant_contract_number\",\n \"code\": 5\n }\n }]\n }\n\n ACCOUNTANT_IMPOSITION_ORDER_TEMPLATE = {\n \"doc_name\": DocumentTypeEnum.DT_ACCOUNTANT_IMPOSITION_ORDER,\n \"template_name\": \"accountant_imposition_order\",\n \"file_name\": get_test_resource_name(config, \"accountant_imposition_order.tex\"),\n \"is_strict\": False,\n }\n\n ACCOUNTANT_IMPOSITION_ORDER_SCHEMA = {\n \"doc_name\": DocumentTypeEnum.DT_ACCOUNTANT_IMPOSITION_ORDER,\n \"doc_kind\": DocumentKindEnum.DK_TEX_TEMPLATE,\n \"file_name_template\": u\"Приказ о возложении обязанностей бухгалтера на директора\",\n \"conditions\": {\n u\"general_manager_as_accountant\": True\n },\n \"batch_statuses\": [BatchStatusEnum.BS_FINALISED],\n \"fields\": [\n SHORT_NAME_FIELD,\n FULL_NAME_FIELD,\n ADDRESS_FIELD,\n GENERAL_MANAGER_FIELD,\n GENERAL_MANAGER_CAPTION_FIELD,\n OGRN_FIELD,\n INN_FIELD,\n KPP_FIELD,\n {\n \"name\": \"general_manager_contract_date\",\n \"type\": \"DocDateTimeField\",\n \"input_format\": \"%Y-%m-%d\",\n \"required\": False\n },\n {\n \"name\": \"general_manager_contract_date_calc\",\n \"type\": \"calculated\",\n \"field_type\": \"DocDateTimeField\",\n \"input_format\": \"%Y-%m-%d\",\n \"required\": True,\n \"error_field_mapping\": {\n \"general_manager_contract_date_calc\": \"general_manager_contract_date\",\n },\n \"value\": {\n \"#cases\": {\n \"list\": [\n {\n \"conditions\": {\n \"has_general_manager_contract\": True\n },\n \"value\": {\n \"#field\": \"general_manager_contract_date\"\n }\n }],\n \"default\": {\n \"value\": {\n \"#field\": \"registration_date->next_working_day_p\"\n }\n }\n },\n }\n },\n {\n \"name\": \"general_manager_as_accountant_order_number\",\n \"type\": \"DocTextField\",\n \"max_length\": 30,\n \"min_length\": 1,\n \"allowed_re\": ur\"^[0-9a-zA-Zа-яёА-ЯЁ#\\|\\-\\s]+$\",\n \"required\": True\n }\n ]\n }\n\n ACCOUNTANT_ORDER_TEMPLATE = {\n \"doc_name\": DocumentTypeEnum.DT_ACCOUNTANT_ORDER,\n \"template_name\": \"accountant_order\",\n \"is_strict\": True,\n \"pages\": [\n {\n \"page_file\": get_test_resource_name(config, \"job_order.pdf\"),\n \"fields\": [\n {\n \"name\": \"company_full_name\",\n \"field-length\": 100,\n \"text-align\": \"left\",\n }, {\n \"name\": \"doc_number\",\n \"field-length\": 20,\n \"text-align\": \"left\",\n }, {\n \"name\": \"doc_date1\",\n \"field-length\": 10,\n \"text-align\": \"left\"\n }, {\n \"name\": \"work_start_date\",\n \"field-length\": 10,\n \"text-align\": \"left\"\n }, {\n \"name\": \"fio\",\n \"field-length\": 100,\n \"text-align\": \"left\"\n }, {\n \"name\": \"tabel_number\",\n \"field-length\": 20,\n \"text-align\": \"left\"\n }, {\n \"name\": \"direction\",\n \"field-length\": 100,\n \"text-align\": \"left\"\n }, {\n \"name\": \"title\",\n \"field-length\": 100,\n \"text-align\": \"left\"\n }, {\n \"name\": \"conditions_line1\",\n \"field-length\": 100,\n \"text-align\": \"left\"\n }, {\n \"name\": \"salary_rub\",\n \"field-length\": 20,\n \"text-align\": \"left\"\n }, {\n \"name\": \"salary_cop\",\n \"field-length\": 3,\n \"text-align\": \"left\"\n }, {\n \"name\": \"bonus_rub\",\n \"field-length\": 20,\n \"text-align\": \"left\"\n }, {\n \"name\": \"bonus_cop\",\n \"field-length\": 3,\n \"text-align\": \"left\"\n }, {\n \"name\": \"trial_period_monthes\",\n \"field-length\": 100,\n \"text-align\": \"left\"\n }, {\n \"name\": \"contract_date_day\",\n \"field-length\": 2,\n \"text-align\": \"right\",\n \"space-filler\": u\"0\"\n }, {\n \"name\": \"contract_date_month\"\n }, {\n \"name\": \"contract_date_year\",\n \"field-length\": 2,\n \"text-align\": \"right\",\n \"space-filler\": u\"0\"\n }, {\n \"name\": \"contract_number\",\n \"field-length\": 20,\n \"text-align\": \"left\"\n }\n ]\n }]\n }\n\n ACCOUNTANT_ORDER_SCHEMA = {\n \"doc_name\": DocumentTypeEnum.DT_ACCOUNTANT_ORDER,\n \"file_name_template\": u\"Приказ о приёме на работу главного бухгалтера\",\n \"conditions\": {\n u\"has_accountant_contract_order\": True\n },\n \"batch_statuses\": [BatchStatusEnum.BS_FINALISED],\n \"fields\": [\n FULL_NAME_FIELD,\n ACCOUNTANT_PERSON_FIELD,\n {\n \"name\": \"accountant_contract_number\",\n \"type\": \"DocTextField\",\n \"required\": True,\n \"max_length\": 30,\n \"min_length\": 1,\n \"allowed_re\": ur\"^[0-9a-zA-Zа-яёА-ЯЁ#\\|\\-\\s]+$\",\n }, {\n \"name\": \"accountant_start_work\",\n \"type\": \"DocDateTimeField\",\n \"input_format\": \"%d.%m.%Y\",\n \"required\": True\n }, {\n \"name\": \"accountant_trial_period\",\n \"type\": \"DocIntField\",\n \"required\": False\n }, {\n \"name\": \"accountant_salary\",\n \"type\": \"DocCurrencyField\",\n \"required\": True\n }, {\n \"name\": \"accountant_order_number\",\n \"type\": \"DocTextField\",\n \"required\": True\n }, {\n \"name\": \"accountant_trial_period_text\",\n \"type\": \"calculated\",\n \"field_type\": \"DocTextField\",\n \"required\": False,\n \"suppress_validation_errors\": True,\n \"value\": {\n \"#cases\": {\n \"list\": [{\n \"conditions\": {\n \"accountant_trial_period\": {\n \"#empty\": True\n }\n },\n \"value\": {\n \"#value\": u\"Без срока испытания\"\n }\n }],\n \"default\": {\n \"value\": {\n \"#exec\": {\n \"module\": \"llc_reg_methods\",\n \"method\": \"num_to_text\",\n \"args\": [{\n \"#field\": \"accountant_trial_period\"\n }]\n }\n }\n }\n }\n }\n }, {\n \"name\": \"has_accountant_contract_order\",\n \"type\": \"DocBoolField\",\n \"required\": False\n }\n ]\n }\n\n ACCOUNTANT_ORDER_MATCHER = {\n \"doc_name\": DocumentTypeEnum.DT_ACCOUNTANT_ORDER,\n \"template_name\": ACCOUNTANT_ORDER_TEMPLATE['template_name'],\n \"fields\": {\n \"company_full_name\": SimpleMatcher(field_name=\"full_name\",\n prefix=u\"Общество с ограниченной ответственностью «\",\n suffix=u\"»\"),\n \"doc_number\": SimpleMatcher(field_name=\"accountant_order_number\"),\n \"doc_date1\": SimpleMatcher(field_name=\"accountant_start_work\"),\n \"work_start_date\": SimpleMatcher(field_name=\"accountant_start_work\"),\n \"fio\": FieldAttributeMatcher(field_name=\"accountant_person\", attr_name=\"full_name\"),\n \"tabel_number\": ConstantMatcher(value=u\"\"),\n \"direction\": ConstantMatcher(value=u\"Дирекция\"),\n \"title\": ConstantMatcher(value=u\"Главный бухгалтер\"),\n \"conditions_line1\": ConstantMatcher(value=u\"Постоянно\"),\n \"salary_rub\": FieldAttributeMatcher(field_name=\"accountant_salary\", attr_name=\"currency_major\"),\n \"salary_cop\": ConstantMatcher(value=\"00\"),\n \"bonus_rub\": ConstantMatcher(value=\"\"),\n \"bonus_cop\": ConstantMatcher(value=\"\"),\n \"trial_period_monthes\": SimpleMatcher(field_name=\"accountant_trial_period_text\"),\n \"contract_date_day\": FieldAttributeMatcher(field_name=\"accountant_start_work\", attr_name=\"day\"),\n \"contract_date_month\": FieldAttributeMatcher(field_name=\"accountant_start_work\", attr_name=\"month\",\n adapter=\"MonthRusNameDeclAdapter\"),\n \"contract_date_year\": FieldAttributeMatcher(field_name=\"accountant_start_work\", attr_name=\"year\"),\n \"contract_number\": SimpleMatcher(field_name=\"accountant_contract_number\")\n }\n }\n\n ROSSTAT_CLAIM_TEMPLATE = {\n \"doc_name\": DocumentTypeEnum.DT_ROSSTAT_CLAIM,\n \"template_name\": \"rosstat_claim\",\n \"file_name\": get_test_resource_name(config, \"rosstat_claim.tex\"),\n \"is_strict\": False,\n }\n\n ROSSTAT_CLAIM_SCHEMA = {\n \"doc_name\": DocumentTypeEnum.DT_ROSSTAT_CLAIM,\n \"doc_kind\": DocumentKindEnum.DK_TEX_TEMPLATE,\n \"file_name_template\": u\"Заявление в Росстат\",\n \"batch_statuses\": [BatchStatusEnum.BS_FINALISED],\n \"fields\": [\n SHORT_NAME_FIELD,\n FULL_NAME_FIELD,\n ADDRESS_FIELD,\n GENERAL_MANAGER_FIELD,\n GENERAL_MANAGER_CAPTION_FIELD,\n INN_FIELD,\n KPP_FIELD,\n OGRN_FIELD,\n {\n \"name\": \"day_after_registration\",\n \"type\": \"calculated\",\n \"field_type\": \"DocDateTimeField\",\n \"input_format\": \"%Y-%m-%d\",\n \"required\": True,\n \"suppress_validation_errors\": True,\n \"value\": {\n \"#field\": \"registration_date->next_working_day_p\"\n }\n },\n ]\n }\n\n FSS_CLAIM_TEMPLATE = {\n \"doc_name\": DocumentTypeEnum.DT_FSS_CLAIM,\n \"template_name\": \"fss_claim\",\n \"file_name\": get_test_resource_name(config, \"fss_claim.tex\"),\n \"is_strict\": False,\n }\n\n FSS_CLAIM_SCHEMA = {\n \"doc_name\": DocumentTypeEnum.DT_FSS_CLAIM,\n \"doc_kind\": DocumentKindEnum.DK_TEX_TEMPLATE,\n \"file_name_template\": u\"Заявление в ФСС\",\n \"batch_statuses\": [BatchStatusEnum.BS_FINALISED],\n \"fields\": [\n SHORT_NAME_FIELD,\n FULL_NAME_FIELD,\n ADDRESS_FIELD,\n GENERAL_MANAGER_FIELD,\n GENERAL_MANAGER_CAPTION_FIELD,\n INN_FIELD,\n KPP_FIELD,\n OGRN_FIELD,\n {\n \"name\": \"day_after_registration\",\n \"type\": \"calculated\",\n \"field_type\": \"DocDateTimeField\",\n \"input_format\": \"%Y-%m-%d\",\n \"required\": True,\n \"suppress_validation_errors\": True,\n \"value\": {\n \"#field\": \"registration_date->next_working_day_p\"\n }\n },\n ]\n }\n\n PFR_CLAIM_TEMPLATE = {\n \"doc_name\": DocumentTypeEnum.DT_PFR_CLAIM,\n \"template_name\": \"pfr_claim\",\n \"file_name\": get_test_resource_name(config, \"pfr_claim.tex\"),\n \"is_strict\": False,\n }\n\n PFR_CLAIM_SCHEMA = {\n \"doc_name\": DocumentTypeEnum.DT_PFR_CLAIM,\n \"doc_kind\": DocumentKindEnum.DK_TEX_TEMPLATE,\n \"file_name_template\": u\"Заявление в ПФР\",\n \"batch_statuses\": [BatchStatusEnum.BS_FINALISED],\n \"fields\": [\n SHORT_NAME_FIELD,\n FULL_NAME_FIELD,\n ADDRESS_FIELD,\n GENERAL_MANAGER_FIELD,\n GENERAL_MANAGER_CAPTION_FIELD,\n INN_FIELD,\n KPP_FIELD,\n OGRN_FIELD,\n {\n \"name\": \"day_after_registration\",\n \"type\": \"calculated\",\n \"field_type\": \"DocDateTimeField\",\n \"input_format\": \"%Y-%m-%d\",\n \"required\": True,\n \"suppress_validation_errors\": True,\n \"value\": {\n \"#field\": \"registration_date->next_working_day_p\"\n }\n },\n ]\n }\n\n FOUNDERS_LIST_TEMPLATE = {\n \"doc_name\": DocumentTypeEnum.DT_FOUNDERS_LIST,\n \"template_name\": \"founders_list\",\n \"file_name\": get_test_resource_name(config, \"founders_list.tex\"),\n \"is_strict\": False,\n }\n\n FOUNDERS_LIST_SCHEMA = {\n \"doc_name\": DocumentTypeEnum.DT_FOUNDERS_LIST,\n \"doc_kind\": DocumentKindEnum.DK_TEX_TEMPLATE,\n \"file_name_template\": u\"Список участников\",\n \"batch_statuses\": [BatchStatusEnum.BS_FINALISED],\n \"fields\": [\n SHORT_NAME_FIELD,\n FULL_NAME_FIELD,\n ADDRESS_FIELD,\n GENERAL_MANAGER_FIELD,\n FOUNDERS_COUNT_FIELD,\n GENERAL_MANAGER_CAPTION_FIELD,\n BOARD_OF_DIRECTORS_FIELD,\n OGRN_FIELD,\n {\n \"name\": \"founders_list_date\",\n \"type\": \"calculated\",\n \"field_type\": \"DocDateTimeField\",\n \"input_format\": \"%Y-%m-%d\",\n \"required\": True,\n \"suppress_validation_errors\": True,\n \"value\": {\n \"#cases\": {\n \"list\": [\n {\n \"conditions\": {\n \"has_general_manager_contract\": True\n },\n \"value\": {\n \"#field\": \"general_manager_contract_date\"\n }\n }],\n \"default\": {\n \"value\": {\n \"#field\": \"registration_date->next_working_day_p\"\n }\n }\n },\n }\n },\n {\n \"name\": \"registration_date\",\n \"type\": \"DocDateTimeField\",\n \"input_format\": \"%Y-%m-%d\",\n \"required\": True\n },\n {\n \"name\": \"founders\",\n \"type\": \"calculated\",\n \"field_type\": \"DocArrayField\",\n \"cls\": \"FounderObject\",\n \"required\": True,\n \"subfield_kwargs\": {\n \"error_field_mapping\": {\n \"company\": \"founder\",\n \"person\": \"founder\",\n \"documents_recipient_type\": \"\", # suppress error\n \"share\": \"share.\",\n \"nominal_capital\": \"nominal_capital.\"\n }\n },\n \"value\": {\n \"#array_mapping\": {\n \"array_source_field\": {\n \"#field\": \"founders\"\n },\n \"target_items\": {\n \"#object\": {\n \"founder_type\": {\n \"#value_map\": {\n \"field\": {\n \"#array_item_field\": \"founder->type\"\n },\n \"map\": {\n \"person\": 1,\n \"company\": 2\n }\n }\n },\n \"nominal_capital\": {\n \"#array_item_field\": \"nominal_capital\"\n },\n \"share\": {\n \"#object\": {\n \"type\": {\n \"#field\": \"share_type\",\n },\n \"value\": {\n \"#array_item_field\": \"share\"\n }\n }\n },\n \"person\": {\n \"#cases\": {\n \"set\": {\n \"cur_founder_type\": {\n \"#array_item_field\": \"founder->type\"\n }\n },\n \"list\": [{\n \"conditions\": {\n \"cur_founder_type\": \"person\"\n },\n \"value\": {\n \"#array_item_field\": \"founder\"\n }\n }],\n \"default\": {\n \"value\": None\n }\n }\n },\n \"company\": {\n \"#cases\": {\n \"set\": {\n \"cur_founder_type\": {\n \"#array_item_field\": \"founder->type\"\n }\n },\n \"list\": [{\n \"conditions\": {\n \"cur_founder_type\": \"company\"\n },\n \"value\": {\n \"#array_item_field\": \"founder\"\n }\n }],\n \"default\": {\n \"value\": None\n }\n }\n },\n \"documents_recipient_type\": {\n \"#cases\": {\n \"set\": {\n \"cur_founder_ref\": {\n \"#array_item_field\": \"founder\"\n },\n \"cur_founders_count\": {\n \"#size\": \"founders\"\n }\n },\n \"list\": [\n {\n \"conditions\": {\n \"cur_founders_count\": 1\n },\n \"value\": {\n \"#value_map\": MAP_OBTAIN_WAY\n }\n }, {\n \"conditions\": {\n \"cur_founders_count\": {\n \"#gt\": 1\n },\n \"obtain_way\": \"founder\",\n \"cur_founder_ref\": \"@doc_obtain_founder\"\n },\n \"value\": {\n \"#value_map\": MAP_OBTAIN_WAY\n }\n }, {\n \"conditions\": {\n \"cur_founders_count\": {\n \"#gt\": 1\n },\n \"obtain_way\": \"responsible_person\",\n \"cur_founder_ref\": \"@selected_moderator\"\n },\n \"value\": {\n \"#value_map\": MAP_OBTAIN_WAY\n }\n }, {\n \"conditions\": {\n \"cur_founders_count\": {\n \"#gt\": 1\n },\n \"obtain_way\": \"mail\",\n \"cur_founder_ref\": \"@selected_moderator\"\n },\n \"value\": {\n \"#value_map\": MAP_OBTAIN_WAY\n }\n }\n ],\n \"default\": {\n \"value\": None\n }\n }\n }\n }\n }\n }\n },\n \"validator\": {\n \"#set\": {\n \"total_share\": {\n \"#aggregate\": {\n \"field\": \"values\",\n \"attr\": \"share.normal\",\n \"operator\": \"add\"\n }\n },\n \"founders_count\": {\n \"#size\": \"values\"\n }\n },\n \"conditions\": [{\n \"#or\": [{\n \"total_share\": {\n \"#almost_equal\": 1\n }\n }, {\n \"values\": {\n \"#size\": 0\n }\n }],\n \"founders_count\": {\n \"#gt\": 0\n }\n }],\n \"error_field\": \"founders\"\n }\n },\n {\n \"name\": \"starter_capital\",\n \"type\": \"calculated\",\n \"field_type\": \"CompanyStarterCapitalField\",\n \"required\": True,\n \"override_fields_kwargs\": {\n \"value\": {\n \"override_fields_kwargs\": {\n \"value\": {\n \"min_val\": 10000\n }\n }\n }\n },\n \"value\": {\n \"#object\": {\n \"capital_type\": 1,\n \"value\": {\n \"#field\": \"starter_capital->value\"\n }\n }\n }\n }\n ]\n }\n\n COMPANY_DETAILS_TEMPLATE = {\n \"doc_name\": DocumentTypeEnum.DT_COMPANY_DETAILS,\n \"template_name\": \"company_details\",\n \"file_name\": get_test_resource_name(config, \"company_details.tex\"),\n \"is_strict\": False,\n }\n\n COMPANY_DETAILS_SCHEMA = {\n \"doc_name\": DocumentTypeEnum.DT_COMPANY_DETAILS,\n \"doc_kind\": DocumentKindEnum.DK_TEX_TEMPLATE,\n \"file_name_template\": u\"Реквизиты компании\",\n \"batch_statuses\": [BatchStatusEnum.BS_FINALISED],\n \"fields\": [\n SHORT_NAME_FIELD,\n FULL_NAME_FIELD,\n ADDRESS_FIELD,\n GENERAL_MANAGER_FIELD,\n FOUNDERS_COUNT_FIELD,\n GENERAL_MANAGER_CAPTION_FIELD,\n BOARD_OF_DIRECTORS_FIELD,\n INN_FIELD,\n KPP_FIELD,\n {\n \"name\": \"actual_address\",\n \"type\": \"DocAddressField\",\n \"required\": True,\n },\n {\n \"name\": \"ogrn\",\n \"type\": \"calculated\",\n \"field_type\": \"DocTextField\",\n \"value\": {\n \"#field\": \"<batch>->result_fields->ifns_reg_info->ogrn\"\n }\n }, {\n \"name\": \"bank_bik\",\n \"type\": \"DocTextField\",\n \"min_length\": 9,\n \"max_length\": 9,\n \"required\": True\n }, {\n \"name\": \"bank_account\",\n \"type\": \"DocTextField\",\n \"min_length\": 20,\n \"max_length\": 20,\n \"required\": True\n }, {\n \"name\": \"bank_info\",\n \"type\": \"calculated\",\n \"field_type\": \"DocJsonField\",\n # \"suppress_validation_errors\" : True,\n \"error_field_mapping\": {\n \"bank_info\": \".\"\n },\n \"required\": True,\n \"value\": {\n \"#exec\": {\n \"module\": \"llc_reg_methods\",\n \"method\": \"get_bank_info\",\n \"args\": [{\n \"#field\": \"bank_bik\"\n }]\n }\n },\n \"validator\": {\n \"#set\": {\n \"test_bik\": {\n \"#field\": \"value->bik\"\n }\n },\n \"conditions\": [{\n \"test_bik\": {\n \"#not_empty\": True\n }\n }],\n \"error_field\": \"bank_bik\"\n }\n }, {\n \"name\": \"company_email\",\n \"type\": \"DocTextField\",\n \"required\": False\n }, {\n \"name\": \"company_site\",\n \"type\": \"DocTextField\",\n \"required\": False\n }, {\n \"name\": \"company_phone\",\n \"type\": \"DocPhoneNumberField\",\n \"required\": False\n }\n ]\n }\n\n return {\n 'GENERAL_MANAGER_CONTRACT_SCHEMA': GENERAL_MANAGER_CONTRACT_SCHEMA,\n 'GENERAL_MANAGER_CONTRACT_TEMPLATE': GENERAL_MANAGER_CONTRACT_TEMPLATE,\n 'GENERAL_MANAGER_ORDER_SCHEMA': GENERAL_MANAGER_ORDER_SCHEMA,\n 'GENERAL_MANAGER_ORDER_TEMPLATE': GENERAL_MANAGER_ORDER_TEMPLATE,\n 'ACCOUNTANT_CONTRACT_SCHEMA': ACCOUNTANT_CONTRACT_SCHEMA,\n 'ACCOUNTANT_CONTRACT_TEMPLATE': ACCOUNTANT_CONTRACT_TEMPLATE,\n 'ACCOUNTANT_IMPOSITION_ORDER_SCHEMA': ACCOUNTANT_IMPOSITION_ORDER_SCHEMA,\n 'ACCOUNTANT_IMPOSITION_ORDER_TEMPLATE': ACCOUNTANT_IMPOSITION_ORDER_TEMPLATE,\n 'ACCOUNTANT_ORDER_SCHEMA': ACCOUNTANT_ORDER_SCHEMA,\n 'ACCOUNTANT_ORDER_TEMPLATE': ACCOUNTANT_ORDER_TEMPLATE,\n 'ACCOUNTANT_ORDER_MATCHER': ACCOUNTANT_ORDER_MATCHER,\n 'ROSSTAT_CLAIM_SCHEMA': ROSSTAT_CLAIM_SCHEMA,\n 'ROSSTAT_CLAIM_TEMPLATE': ROSSTAT_CLAIM_TEMPLATE,\n 'FSS_CLAIM_SCHEMA': FSS_CLAIM_SCHEMA,\n 'FSS_CLAIM_TEMPLATE': FSS_CLAIM_TEMPLATE,\n 'PFR_CLAIM_SCHEMA': PFR_CLAIM_SCHEMA,\n 'PFR_CLAIM_TEMPLATE': PFR_CLAIM_TEMPLATE,\n 'FOUNDERS_LIST_SCHEMA': FOUNDERS_LIST_SCHEMA,\n 'FOUNDERS_LIST_TEMPLATE': FOUNDERS_LIST_TEMPLATE,\n 'COMPANY_DETAILS_SCHEMA': COMPANY_DETAILS_SCHEMA,\n 'COMPANY_DETAILS_TEMPLATE': COMPANY_DETAILS_TEMPLATE\n }" }, { "alpha_fraction": 0.6233502626419067, "alphanum_fraction": 0.6284263730049133, "avg_line_length": 38.400001525878906, "blob_id": "3e2d9d5b84c9e3bef5876e20a88c96c81d869eda", "content_id": "12fdb9fce8d2e3c12b3084288fb10f9a00c46e7d", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 985, "license_type": "no_license", "max_line_length": 116, "num_lines": 25, "path": "/app/fw/plugins/emailer_plugin/mail_composer.py", "repo_name": "StanislavKraev/jb_code", "src_encoding": "UTF-8", "text": "# -*- coding: utf-8 -*-\nfrom fw.async_tasks import send_email\n\n\nclass EmailComposer(object):\n def __init__(self, email_type, logger):\n self.email_type = email_type\n self.logger = logger\n\n def send_email(self, target_emails, batch_id, event_data, retry_count, retry_delay=300):\n raise NotImplementedError()\n\n\nclass SomeEmailComposer(EmailComposer):\n def send_email(self, recipients, batch_id, event_data, max_retries, retry_timeout_seconds=None, silent=False,\n template_data=None):\n assert max_retries >= 0\n for addr in recipients:\n send_email.send_email_plugin.delay(batch_id, event_data, addr, self.email_type, max_retries=max_retries,\n retry_timeout_seconds=retry_timeout_seconds, silent=silent,\n template_data=template_data)\n\n\ndef create_composer(email_type, logger):\n return SomeEmailComposer(email_type, logger)\n" }, { "alpha_fraction": 0.6748911738395691, "alphanum_fraction": 0.6763425469398499, "avg_line_length": 33.5, "blob_id": "250a792992370f9acfb89b2f45b5f8b6c58f0914", "content_id": "9c6800e339d23ca3501545da0381343c40ddc517", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 689, "license_type": "no_license", "max_line_length": 70, "num_lines": 20, "path": "/app/fw/auth/social_services/__init__.py", "repo_name": "StanislavKraev/jb_code", "src_encoding": "UTF-8", "text": "# -*- coding: utf-8 -*-\nfrom fw.auth.social_services.facebook_backend import FacebookBackend\nfrom fw.auth.social_services.social_models import SocialUserLink\nfrom fw.auth.social_services.vk_backend import VkBackend\n\n\nclass SocialServiceBackends(object):\n backends = {\n 'vk': VkBackend,\n 'facebook': FacebookBackend\n }\n\n @staticmethod\n def get_user_social_network_profile_url(user_id):\n link = SocialUserLink.query.filter_by(user_id=user_id).first()\n if link:\n backend_type = link.service_id\n backend = SocialServiceBackends.backends.get(backend_type)\n if backend:\n return backend.get_profile_url(link)" }, { "alpha_fraction": 0.6440678238868713, "alphanum_fraction": 0.6457627415657043, "avg_line_length": 24.65217399597168, "blob_id": "faf209950bc07b16ca80701d24f70a725f6ffd4d", "content_id": "3f222db3330aa3d29b199af1aebc901dc1a1c22e", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 590, "license_type": "no_license", "max_line_length": 62, "num_lines": 23, "path": "/app/services/ifns/data_model/enums.py", "repo_name": "StanislavKraev/jb_code", "src_encoding": "UTF-8", "text": "# -*- coding: utf-8 -*-\n\n\nclass IfnsRegStatusEnum(object):\n IRS_REGISTERED = \"registered\"\n IRS_REGISTRATION_DECLINED = \"registration_declined\"\n IRS_PROGRESS = \"progress\"\n IRS_UNKNOWN = \"unknown\"\n\n _NAMES = {\n IRS_REGISTERED: IRS_REGISTERED,\n IRS_REGISTRATION_DECLINED: IRS_REGISTRATION_DECLINED,\n IRS_PROGRESS: IRS_PROGRESS,\n IRS_UNKNOWN: IRS_UNKNOWN\n }\n\n @classmethod\n def validate(cls, value):\n return value in cls._NAMES\n\n @staticmethod\n def get_name(value):\n return IfnsRegStatusEnum._NAMES.get(value, u\"unknown\")\n" }, { "alpha_fraction": 0.7052116990089417, "alphanum_fraction": 0.7057546377182007, "avg_line_length": 33.77358627319336, "blob_id": "c4d6dcdfb2fa01029de77fc364a83c1371a2b6cd", "content_id": "f3bc5bf491ef64607231945be5cfe00a9cd3e157", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 1842, "license_type": "no_license", "max_line_length": 86, "num_lines": 53, "path": "/app/services/ifns/data_model/models.py", "repo_name": "StanislavKraev/jb_code", "src_encoding": "UTF-8", "text": "# -*- coding: utf-8 -*-\n\nfrom bson import ObjectId\nfrom sqlalchemy import Column, Unicode, String, ForeignKey, DateTime, Boolean, Integer\nfrom sqlalchemy.orm import relationship\nfrom sqlalchemy.dialects.postgresql import JSONB, ARRAY\n\nfrom fw.db.sql_base import db as sqldb\n\n\nclass IfnsBookingTaskStatus(object):\n BTS_NEW = \"new\"\n BTS_PROGRESS = \"progress\"\n BTS_FAIL = \"fail\"\n BTS_SUCCESS = \"success\"\n\n BTS_ALL = (BTS_NEW, BTS_PROGRESS, BTS_FAIL, BTS_SUCCESS)\n\n\nclass IfnsBookingObject(sqldb.Model):\n __tablename__ = \"ifns_booking\"\n\n id = Column(String, primary_key=True, default=lambda: str(ObjectId()))\n batch_id = Column(String, ForeignKey('doc_batch.id'), nullable=True)\n batch = relationship(\"DocumentBatchDbObject\", uselist=False)\n code = Column(Unicode, nullable=False, index=True)\n date = Column(DateTime, nullable=False)\n service = Column(Unicode, nullable=False)\n _discarded = Column(Boolean, default=False)\n phone = Column(Unicode, nullable=True)\n window = Column(Unicode, nullable=True)\n address = Column(Unicode, nullable=True)\n service_id = Column(Integer, nullable=False)\n ifns = Column(Unicode, nullable=True)\n how_to_get = Column(Unicode, nullable=True)\n reg_info = Column(JSONB, nullable=True)\n\n\nclass IfnsCatalogObject(sqldb.Model):\n __tablename__ = \"ifns_catalog\"\n\n id = Column(String, primary_key=True)\n\n updated = Column(DateTime, nullable=True)\n code = Column(Integer, nullable=False, index=True)\n comment = Column(Unicode, nullable=True)\n tel = Column(ARRAY(String), nullable=True)\n name = Column(Unicode, nullable=True)\n rof = Column(JSONB, nullable=True)\n rou = Column(JSONB, nullable=True)\n plat = Column(JSONB, nullable=True)\n address = Column(JSONB, nullable=True)\n region = Column(Unicode, nullable=True, index=True)" }, { "alpha_fraction": 0.6814255118370056, "alphanum_fraction": 0.6889848709106445, "avg_line_length": 30.579545974731445, "blob_id": "47565184f478898dfdc20f4d47891b9cc68be967", "content_id": "6b4a8613b39da8647f2dca31336aed8b1c3651ce", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 2778, "license_type": "no_license", "max_line_length": 154, "num_lines": 88, "path": "/app/prod_celeryconfig.py", "repo_name": "StanislavKraev/jb_code", "src_encoding": "UTF-8", "text": "# -*- coding: utf-8 -*-\nfrom datetime import timedelta\nimport sys\nimport os\nimport external_tools\nfrom fw.async_tasks.celery_utils import make_app\nfrom fw.transport.mail import Mailer\nfrom fw.transport.sms_gate import SmsSender\nfrom jb_config import JBConfiguration\n\nsys.path.append(os.path.abspath(os.path.dirname(__file__)))\n\n\nCELERY_IMPORTS = (\n \"fw.async_tasks.rendering\",\n \"fw.async_tasks.send_email\",\n \"fw.async_tasks.test_task\",\n \"fw.async_tasks.send_sms_task\",\n \"fw.async_tasks.periodic_tasks\",\n \"fw.async_tasks.not_paid_check_send\",\n \"fw.async_tasks.core_tasks\",\n \"fw.async_tasks.scheduler\",\n\n \"services.ifns.async_tasks.ifns_booking_tasks\",\n \"services.car_assurance.async_tasks\",\n \"services.yurist.async_tasks.yurist_check_send\",\n \"services.russian_post.async_tasks\"\n)\n\nCELERY_RESULT_BACKEND = \"database\"\n\nCELERY_TASK_RESULT_EXPIRES = None\n\nCELERY_IGNORE_RESULT = True\nCELERY_STORE_ERRORS_EVEN_IF_IGNORED = False\n\nCELERY_QUEUE_HA_POLICY = 'all'\n\nCELERY_DISABLE_RATE_LIMITS = True\n\nCELERY_SEND_EVENTS = True\n\nCELERY_SEND_TASK_SENT_EVENT = True\n\nDEFAULT_CONFIG_PATH = '/etc/jurbureau/config.cfg'\n\nconfig = JBConfiguration('Jur bureau async tasks service', DEFAULT_CONFIG_PATH)\n\nSETTINGS_STORAGE = config\n\nCELERY_RESULT_DBURI = config['SQLALCHEMY_DATABASE_URI'].replace('postgres', 'postgresql', 1) # \"postgresql://postgres:postgres@lw-2-prod-storage-1/jb\"\nBROKER_URL = 'sqla+' + CELERY_RESULT_DBURI # 'sqla+postgresql://postgres:postgres@lw-2-prod-storage-1/jb'\n\nMAILER = Mailer(config['mailer_server'], config['mailer_smtp_user'], config['mailer_smtp_password'])\nSMS_SENDER = SmsSender(config['sms_gate_address'], config['sms_gate_user'], config['sms_gate_password'], config['sms_gate_sender'])\n\nEXTERNAL_TOOLS = external_tools\n\nCELERY_TIMEZONE = 'UTC'\n\nCELERYBEAT_SCHEDULE = {\n 'add-every-30-seconds': {\n 'task': 'fw.async_tasks.periodic_tasks.check_frozen_batch_finalisation',\n 'schedule': timedelta(minutes=30)\n },\n 'post-items-track': {\n 'task': 'services.russian_post.async_tasks.get_tracking_info_async',\n 'schedule': timedelta(seconds=600)\n },\n 'check-scheduled-tasks': {\n 'task': 'fw.async_tasks.core_tasks.check_scheduled_tasks',\n 'schedule': timedelta(seconds=5)\n },\n 'check_doc_group_render': {\n 'task': 'fw.async_tasks.rendering.batch_group_gen_check_task',\n 'schedule': timedelta(seconds=600)\n },\n 'get_fss_task': {\n 'task': 'fw.async_tasks.periodic_tasks.get_fss_task',\n 'schedule': timedelta(hours=2, seconds=10)\n },\n 'clean_kombu_messages': {\n 'task': 'fw.async_tasks.periodic_tasks.clean_kombu_messages',\n 'schedule': timedelta(hours=4)\n }\n}\n\nflask_app = make_app(config, external_tools)" }, { "alpha_fraction": 0.6912280917167664, "alphanum_fraction": 0.692105233669281, "avg_line_length": 35.774192810058594, "blob_id": "eaaa4ad4af3130e93742aadb457a9af883887885", "content_id": "a7f68894f8dac5650a2d9eb4c1870ae3ef9c0a38", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 1140, "license_type": "no_license", "max_line_length": 105, "num_lines": 31, "path": "/app/fw/async_tasks/core_tasks.py", "repo_name": "StanislavKraev/jb_code", "src_encoding": "UTF-8", "text": "# -*- coding: utf-8 -*-\n\n#noinspection PyUnresolvedReferences\nfrom datetime import datetime\nfrom celery import current_app as celery\nfrom fw.async_tasks.models import CeleryScheduledTask\nfrom fw.async_tasks.scheduler import CeleryScheduler\n\ncelery.config_from_envvar('CELERY_CONFIG_MODULE')\n\[email protected]()\ndef send(batch_id, event, event_data=None):\n event_data = event_data or {}\n from fw.documents.batch_manager import BatchManager\n app = celery.conf['flask_app']()\n logger = app.logger\n logger.info(u\"PROCESSING event %s for batch %s\" % (event, batch_id))\n with app.app_context():\n result = BatchManager.handle_event(batch_id, event, event_data, logger=logger, config=app.config)\n logger.info(u\"FINISH PROCESSING event %s for batch %s\" % (event, batch_id))\n return result\n\[email protected]()\ndef check_scheduled_tasks():\n app = celery.conf['flask_app']()\n with app.app_context():\n for task in CeleryScheduledTask.query.filter(\n CeleryScheduledTask.sent==False,\n CeleryScheduledTask.eta.__le__(datetime.utcnow())\n ):\n CeleryScheduler.run_task(task)\n" }, { "alpha_fraction": 0.7153246998786926, "alphanum_fraction": 0.7225976586341858, "avg_line_length": 39.604652404785156, "blob_id": "64dc67be227173332c6dafd8641545af64117520", "content_id": "5dec5fba1fa5b05e689167a51463a61192f3e135", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 23640, "license_type": "no_license", "max_line_length": 523, "num_lines": 430, "path": "/app/services/llc_reg/documents/enums.py", "repo_name": "StanislavKraev/jb_code", "src_encoding": "UTF-8", "text": "# -*- coding: utf-8 -*-\n\n\nclass StarterCompanyCapitalTypeEnum(object):\n TYPE_CLS = int\n\n SCT_USTAVNOY_CAPITAL = 1\n SCT_SKLADOCHNY_CAPITAL = 2\n SCT_USTAVNOY_FOND = 3\n SCT_PAEVOY_FOND = 4\n\n _NAMES = {\n SCT_USTAVNOY_CAPITAL: u\"уставной капитал\",\n SCT_SKLADOCHNY_CAPITAL: u\"складочный капитал\",\n SCT_USTAVNOY_FOND: u\"уставной фонд\",\n SCT_PAEVOY_FOND: u\"паевой фонд\"\n }\n\n @classmethod\n def validate(cls, value):\n return value in cls._NAMES\n\n\n @staticmethod\n def get_name(value):\n return StarterCompanyCapitalTypeEnum._NAMES.get(value, u\"неизвестно\")\n\n\nclass JSCMemberTypeEnum(object):\n TYPE_CLS = int\n JSCMT_NEW_JSC = 1\n JSCMT_REGISTRATOR = 2\n\n _NAMES = {\n JSCMT_NEW_JSC: u\"акционерное общество\",\n JSCMT_REGISTRATOR: u\"регистратор\"\n }\n\n @classmethod\n def validate(cls, value):\n return value in cls._NAMES\n\n @staticmethod\n def get_name(value):\n return JSCMemberTypeEnum._NAMES.get(value, u\"неизвестно\")\n\n\nclass InitialCapitalDepositTypeEnum(object):\n TYPE_CLS = int\n\n ICD_MONEY_FULL = 1\n ICD_MONEY_PARTIAL = 2\n ICD_PROPERTY = 3\n ICD_REAL_ESTATES = 4\n ICD_PROPERTY_WITH_EXAMINATION = 5\n\n _NAMES = {\n ICD_MONEY_FULL: u\"полностью оплатить\",\n ICD_MONEY_PARTIAL: u\"частично оплатить\",\n ICD_PROPERTY: u\"вещевой вклад\",\n ICD_REAL_ESTATES: u\"оплатить недвижимостью\",\n ICD_PROPERTY_WITH_EXAMINATION: u\"вещевой вклад с экспертизой\"\n }\n\n @classmethod\n def validate(cls, value):\n return value in cls._NAMES\n\n @staticmethod\n def get_name(value):\n return InitialCapitalDepositTypeEnum._NAMES.get(value, u\"неизвестно\")\n\n\nclass DocumentDeliveryTypeEnum(object):\n TYPE_CLS = int\n\n DDT_ISSUE_TO_THE_APPLICANT = 1\n DDT_ISSUE_TO_THE_APPLICANT_OR_AGENT = 2\n DDT_SEND_BY_MAIL = 3\n\n _NAMES = {\n DDT_ISSUE_TO_THE_APPLICANT: u\"выдать заявителю\",\n DDT_ISSUE_TO_THE_APPLICANT_OR_AGENT: u\"выдать заявителю или лицу, действующему на основании доверенности\",\n DDT_SEND_BY_MAIL: u\"направить по почте\"\n }\n\n @classmethod\n def validate(cls, value):\n return value in DocumentDeliveryTypeEnum._NAMES\n\n @staticmethod\n def get_name(value):\n return DocumentDeliveryTypeEnum._NAMES.get(value, u\"неизвестно\")\n\n\nclass WitnessTypeEnum(object):\n TYPE_CLS = int\n\n WT_NOTARY = 1\n WT_NOTARY_TEMPORARY_SUBSTITUTE = 2\n WT_NOTARIAL_ACT_AUTHORIZED_PERSON = 3\n\n _NAMES = {\n WT_NOTARY: u\"нотариус\",\n WT_NOTARY_TEMPORARY_SUBSTITUTE: u\"лицо, замещающее временно отсутствующего нотариуса\",\n WT_NOTARIAL_ACT_AUTHORIZED_PERSON: u\"должностное лицо, уполномоченное на совершение нотариального действия\"\n }\n\n @classmethod\n def validate(cls, value):\n return value in WitnessTypeEnum._NAMES\n\n @staticmethod\n def get_name(value):\n return WitnessTypeEnum._NAMES.get(value, u\"неизвестно\")\n\n\nclass CompanyStarterCapitalTypeEnum(object):\n TYPE_CLS = int\n\n CSC_USTAVNOY_CAPITAL = 1\n CSC_SKLADOCHNY_CAPITAL = 2\n CSC_USTAVNOY_FOND = 3\n CSC_PAEVOY_FOND = 4\n\n _NAMES = {\n CSC_USTAVNOY_CAPITAL: u\"уставной капитал\",\n CSC_SKLADOCHNY_CAPITAL: u\"складочный капитал\",\n CSC_USTAVNOY_FOND: u\"уставной фонд\",\n CSC_PAEVOY_FOND: u\"паевой фонд\"\n }\n\n @classmethod\n def validate(cls, value):\n return value in CompanyStarterCapitalTypeEnum._NAMES\n\n @staticmethod\n def get_name(value):\n return unicode(value)\n\n @staticmethod\n def get_description(value):\n return CompanyStarterCapitalTypeEnum._NAMES.get(value, u\"неизвестно\")\n\n\nclass GovernmentFounderTypeEnum(object):\n TYPE_CLS = int\n\n GF_RUSSIA = 1\n GF_REGION = 2\n GF_MUNICIPALITY = 3\n\n _NAMES = {\n GF_RUSSIA: u'Российская Федерация',\n GF_REGION: u\"субъект Российской Федерации\",\n GF_MUNICIPALITY: u\"муниципальное образование\"\n }\n\n @classmethod\n def validate(cls, value):\n return value in GovernmentFounderTypeEnum._NAMES\n\n @staticmethod\n def get_name(value):\n return GovernmentFounderTypeEnum._NAMES.get(value, u\"\")\n\n\nclass FounderTypeEnum(object):\n TYPE_CLS = int\n\n FT_PERSON = 1\n FT_COMPANY = 2\n\n _NAMES = {\n FT_PERSON: u\"учредитель юридического лица — физическое лицо\",\n FT_COMPANY: u\"учредитель юридического лица — российское юридическое лицо\"\n }\n\n @classmethod\n def validate(cls, value):\n return value in FounderTypeEnum._NAMES\n\n @staticmethod\n def get_name(value):\n return FounderTypeEnum._NAMES.get(value, u\"\")\n\n\nclass FounderStrTypeEnum(object):\n FST_PERSON = \"person\"\n FST_COMPANY = \"company\"\n\n _NAMES = {\n FST_PERSON: u\"учредитель юридического лица — физическое лицо\",\n FST_COMPANY: u\"учредитель юридического лица — российское юридическое лицо\"\n }\n\n @classmethod\n def validate(cls, value):\n return value in cls._NAMES\n\n @staticmethod\n def get_name(value):\n return FounderStrTypeEnum._NAMES.get(value, u\"\")\n\n\nclass AlienationRightEnum(object):\n TYPE_CLS = int\n\n AR_PROHIBITED = 1\n AR_SOGLASIE_DRUGIH_UCHASTNIKOV_NE_TREBUETSYA = 2\n AR_SOGLASIE_DRUGIH_UCHASTNIKOV_TREBUETSYA = 3\n AR_THIRD_TREB_UCH_NE_TREB = 4\n AR_THIRD_PROHIB_UCH_NE_TREB = 5\n\n _NAMES = {\n AR_PROHIBITED: u\"Продажа доли или части доли в уставном капитале Обществу третьим лицам запрещена, согласие на продажу или отчуждение в пользу учредителей требуется.\",\n AR_SOGLASIE_DRUGIH_UCHASTNIKOV_NE_TREBUETSYA: u\"Согласие других участников общества на продажу или отчуждение в пользу третьих лиц или участников не требуется.\",\n AR_SOGLASIE_DRUGIH_UCHASTNIKOV_TREBUETSYA: u\"Согласие других участников общества на продажу или отчуждение в пользу третьих лиц или участников требуется.\",\n AR_THIRD_TREB_UCH_NE_TREB: u\"Согласие других участников общества на продажу или отчуждение в пользу третьих лиц или участников требуется, участников — не требуется.\",\n AR_THIRD_PROHIB_UCH_NE_TREB: u\"Продажа доли или части доли в уставном капитале Обществу третьим лицам запрещена, согласие на продажу или отчуждение в пользу учредителей не требуется.\"\n }\n\n @classmethod\n def validate(cls, value):\n return value in AlienationRightEnum._NAMES\n\n @staticmethod\n def get_name(value):\n return AlienationRightEnum._NAMES.get(value, u\"\")\n\n\nclass BoardOfDirectorsAuthority(object):\n TYPE_CLS = int\n\n BDA_BONDS = 1\n BDA_BUSINESS_PLAN = 2\n BDA_BUDGET = 3\n BDA_ASSOCIATIONS = 4\n BDA_COMPANY_PROPERTY_BIG_DEALS = 5\n BDA_REAL_ESTATE_DEALS = 6\n BDA_INTELLECTUAL_PROPERTY_DEALS = 7\n BDA_PLEDGE_DECISIONMAKING = 8\n BDA_DEALS_CHANGES_DECISIONMAKING = 9\n BDA_OBLIGATION_DEALS_DECISIONMAKING = 10\n BDA_AUDIT_APPOINTMENT = 11\n BDA_CHANGE_GENERAL_MANAGER = 12\n BDA_CHANGE_BANK_ACCOUNTS_CREDENTIALS = 13\n BDA_INTERNAL_SHARE_PLEDGE_DECISIONMAKING = 14\n BDA_FUNDS_SPENDING_REPORTS_APPROVAL = 15\n BDA_GENERAL_MANAGER_CONTRACT_APPROVAL = 16\n BDA_CHANGE_COMPANY_REGULATIONS = 17\n BDA_FINANCE_DIRECTOR_APPROVAL = 18\n BDA_KEY_PERSONS_CONTRACTS_APPROVAL = 19\n BDA_ASSETS_MANAGEMENT_DEALS_APPROVAL = 20\n BDA_CREDITS_USAGE_DEALS_DECISIONMAKING = 21\n BDA_CONSULTATIONS_DEALS_APPROVAL = 22\n BDA_STAFF_LIST_APPROVAL = 23\n BDA_BILL_DEAL_APPROVAL = 24\n BDA_TAKING_PART_IN_COMERCIAL_COMPANIES = 25\n BDA_MANAGE_EXECUTIVES = 26\n BDA_MANAGE_EXECUTIVE_CONTRACTS = 27\n BDA_SHARES_RIGHTS_MANAGEMENT = 28\n\n _NAMES = {\n BDA_BONDS: u\"принятие решения о размещении Обществом облигаций и иных эмиссионных ценных бумаг\",\n BDA_BUSINESS_PLAN: u\"утверждение и изменение Бизнес-плана Общества\",\n BDA_BUDGET: u\"утверждение и изменение Бюджетов Общества\",\n BDA_ASSOCIATIONS: u\"принятие решения об участии Общества в ассоциациях и других объединениях коммерческих организаций\",\n BDA_COMPANY_PROPERTY_BIG_DEALS: u\"принятие решений о совершении крупных сделок, связанных с приобретением, отчуждением или возможностью отчуждения Обществом прямо либо косвенно имущества, стоимость которого составляет от двадцати пяти до пятидесяти процентов стоимости имущества Общества, определенной на основании данных бухгалтерской отчетности за последний отчетный период, предшествующий дню принятия решения о совершении таких сделок, если уставом общества не предусмотрен более высокий размер крупной сделки\",\n BDA_REAL_ESTATE_DEALS: u\"принятие решений о совершении сделок с недвижимым имуществом или правами на недвижимое имущество\",\n BDA_INTELLECTUAL_PROPERTY_DEALS: u\"принятие решений о совершении сделок с приобретением, отчуждением и обременением исключительных прав на объекты интеллектуальной собственности и или средства индивидуализации (кроме случаев приобретения прав на использование программ для электронных вычислительных машин и/или баз данных)\",\n BDA_PLEDGE_DECISIONMAKING: u\"принятие решений о предоставлении имущества Общества в залог (или иные обременения), заключение Обществом иных договоров, направленных на обеспечение исполнения обязательств Общества или третьих лиц, изменение или прекращение таких договоров\",\n BDA_DEALS_CHANGES_DECISIONMAKING: u\"принятие решений о заключении или изменении сделки Общества в совершении которой имеется заинтересованность\",\n BDA_OBLIGATION_DEALS_DECISIONMAKING: u\"принятие решений о заключении, изменении или прекращении любого соглашения, не входящего в обычную сферу деятельности Общества или которое направлено на создание обязательств у Общества сроком более чем на 12 месяцев\",\n BDA_AUDIT_APPOINTMENT: u\"назначение аудиторской проверки, утверждение аудитора и установление размера оплаты его услуг как для Общества. принятие решения о порядке голосования на общих собраниях Участников/акционеров дочерних и зависимых Обществ в части утверждения аудитора и установления размера оплаты его услуг\",\n BDA_CHANGE_GENERAL_MANAGER: u\"избрание Генерального директора Общества и досрочное прекращение его полномочий, определение и изменение объёма его полномочий, а также принятие решения о передаче полномочий Генерального директора Общества коммерческой организации или индивидуальному предпринимателю (далее - управляющий), утверждение такого управляющего и условий договора с ним\",\n BDA_CHANGE_BANK_ACCOUNTS_CREDENTIALS: u\"утверждение полномочий и прекращение полномочий на распоряжение (включая право подписи) банковскими счетами Общества\",\n BDA_INTERNAL_SHARE_PLEDGE_DECISIONMAKING: u\"принятие решения об одобрении залога (или обременения иным образом) доли в уставном капитале Общества любым из Участников Общества в пользу другого Участника Общества\",\n BDA_FUNDS_SPENDING_REPORTS_APPROVAL: u\"утверждение отчетов Генерального директора и документов, подтверждающих целевое расходование денежных средств в рамках финансового плана (бюджета) Общества\",\n BDA_GENERAL_MANAGER_CONTRACT_APPROVAL: u\"утверждений условий трудового договора, установление размера вознаграждения и денежных компенсаций Генеральному директору Общества\",\n BDA_CHANGE_COMPANY_REGULATIONS: u\"утверждение, принятие или изменение документов, регулирующих организацию деятельности Общества (внутренних документов Общества), за исключением внутренних документов, утверждение которых отнесено к компетенции общего собрания участников Общества\",\n BDA_FINANCE_DIRECTOR_APPROVAL: u\"принятие решения об одобрении кандидатуры на должность финансового директора Общества, определение и изменение объёма его полномочий, утверждение условий договора с ним, включая размер вознаграждения и денежных компенсаций\",\n BDA_KEY_PERSONS_CONTRACTS_APPROVAL: u\"утверждение условий трудовых договоров и изменений к ним с ключевыми работниками Общества, включая размер вознаграждения и денежных компенсаций\",\n BDA_ASSETS_MANAGEMENT_DEALS_APPROVAL: u\"принятие решений об одобрении сделок по приобретению или продаже активов (включая движимое и недвижимое имущество) Общества, иных капиталовложениях Общества, включая сделки в рамках текущей хозяйственной деятельности Общества и сделки во исполнение Бизнес-плана Общества, в форме одной сделки или нескольких взаимосвязанных сделок, превышающих в сумме 500000 (пятьсот тысяч) рублей, но не превышающих 3000000 (три миллиона рублей)\",\n BDA_CREDITS_USAGE_DEALS_DECISIONMAKING: u\"принятие решений об одобрении сделок о предоставлении/получении Обществом любых займов, кредитов\",\n BDA_CONSULTATIONS_DEALS_APPROVAL: u\"одобрение сделок Общества на оказании консультационных, маркетинговых и иных услуг в форме одной сделки или нескольких взаимосвязанных сделок, превышающих в сумме 500000 (пятьсот тысяч) рублей, но не превышающих 3000000 (три миллиона рублей)\",\n BDA_STAFF_LIST_APPROVAL: u\"утверждение штатного расписания Общества\",\n BDA_BILL_DEAL_APPROVAL: u\"одобрение вексельной сделки, в том числе по выдаче Обществом векселей, производстве по ним передаточных надписей, авалей, платежей независимо от суммы\",\n BDA_TAKING_PART_IN_COMERCIAL_COMPANIES: u\"принятие решений об учреждении, участии и прекращении участия в коммерческих организациях, а также о совершении сделок, связанных с приобретением, отчуждением и возможностью отчуждения акций (паев, долей в уставном или складочном капитале) других коммерческих организаций\",\n BDA_MANAGE_EXECUTIVES: u\"образование исполнительных органов общества и досрочное прекращение их полномочий, а также принятие решения о передаче полномочий единоличного исполнительного органа общества\",\n BDA_MANAGE_EXECUTIVE_CONTRACTS: u\"установление размера вознаграждения и денежных компенсаций единоличному исполнительному органу общества, членам коллегиального исполнительного органа общества, управляющему\",\n BDA_SHARES_RIGHTS_MANAGEMENT: u\"принятие решения об использовании прав, предоставляемых принадлежащими Обществу акциями (паями, долями в уставном или складочном капитале) других коммерческих организаций\"\n }\n\n @classmethod\n def validate(cls, value):\n return value in BoardOfDirectorsAuthority._NAMES\n\n @staticmethod\n def get_name(value):\n return BoardOfDirectorsAuthority._NAMES.get(value, u\"\")\n\n\nclass NecessaryVotesEnum(object):\n TYPE_CLS = int\n\n NV_ALL = 1\n NV_2_3 = 2\n NV_3_4 = 3\n\n _NAMES = {\n NV_ALL: u\"единогласно\",\n NV_2_3: u\"простым большинством (2/3 голосов)\",\n NV_3_4: u\"квалифицированным большинством (3/4 голосов)\"\n }\n\n @classmethod\n def validate(cls, value):\n return value in NecessaryVotesEnum._NAMES\n\n @staticmethod\n def get_name(value):\n return NecessaryVotesEnum._NAMES.get(value, u\"\")\n\n\nclass IfnsServiceEnum(object):\n TYPE_CLS = int\n\n IS_REG_COMPANY = 1\n IS_RECEIVE_REG_DOCS = 2\n IS_REG_IP = 3\n\n _NAMES = {\n IS_REG_COMPANY: u\"регистрация юр. лица\",\n IS_RECEIVE_REG_DOCS: u\"получение документов юр. лица по создании\",\n IS_REG_IP: u\"регистрация ИП\",\n }\n\n @classmethod\n def validate(cls, value):\n return value in IfnsServiceEnum._NAMES\n\n @staticmethod\n def get_name(value):\n return IfnsServiceEnum._NAMES.get(value, u\"\")\n\n\nclass UsnTaxType(object):\n TYPE_CLS = int\n\n UT_INCOME = 1\n UT_INCOME_MINUS_EXPENSE = 2\n\n _NAMES = {\n UT_INCOME: u\"доходы\",\n UT_INCOME_MINUS_EXPENSE: u\"доходы, уменьшенные на сумму расходов\"\n }\n\n @classmethod\n def validate(cls, value):\n return value in UsnTaxType._NAMES\n\n @staticmethod\n def get_name(value):\n return UsnTaxType._NAMES.get(value, u\"\")\n\n\nclass RegistrationWay(object):\n RW_ALL_FOUNDERS = \"all_founders\"\n RW_SOME_FOUNDERS = \"some_founders\"\n RW_RESPONSIBLE_PERSON = \"responsible_person\"\n RW_NOTARY = \"notary\"\n\n _NAMES = {\n RW_ALL_FOUNDERS: u\"все учредители\",\n RW_SOME_FOUNDERS: u\"некоторые учредители\",\n RW_RESPONSIBLE_PERSON: u\"ответственное лицо\",\n RW_NOTARY: u\"нотариус\"\n }\n\n @classmethod\n def validate(cls, value):\n return value in cls._NAMES\n\n @staticmethod\n def get_name(value):\n return RegistrationWay._NAMES.get(value, u\"\")\n\n\nclass DocumentDeliveryTypeStrEnum(object):\n DDT_ISSUE_TO_THE_APPLICANT = \"founder\"\n DDT_ISSUE_TO_THE_APPLICANT_OR_AGENT = \"responsible_person\"\n DDT_SEND_BY_MAIL = \"mail\"\n\n _NAMES = {\n DDT_ISSUE_TO_THE_APPLICANT: u\"выдать заявителю\",\n DDT_ISSUE_TO_THE_APPLICANT_OR_AGENT: u\"выдать заявителю или лицу, действующему на основании доверенности\",\n DDT_SEND_BY_MAIL: u\"направить по почте\"\n }\n\n @classmethod\n def validate(cls, value):\n return value in cls._NAMES\n\n @staticmethod\n def get_name(value):\n return DocumentDeliveryTypeStrEnum._NAMES.get(value, u\"неизвестно\")\n\n\nclass AddressType(object):\n AT_GENERAL_MANAGER_REGISTRATION_ADDRESS = \"general_manager_registration_address\"\n AT_FOUNDER_REGISTRATION_ADDRESS = \"founder_registration_address\"\n AT_REAL_ESTATE_ADDRESS = \"real_estate_address\"\n AT_OFFICE_ADDRESS = \"office_address\"\n\n _NAMES = {\n AT_GENERAL_MANAGER_REGISTRATION_ADDRESS: \"x\",\n AT_FOUNDER_REGISTRATION_ADDRESS: \"y\",\n AT_REAL_ESTATE_ADDRESS: \"z\",\n AT_OFFICE_ADDRESS: \".\"\n }\n\n\n @classmethod\n def validate(cls, value):\n return value in cls._NAMES\n\n @staticmethod\n def get_name(value):\n return AddressType._NAMES.get(value, u\"неизвестно\")\n\n\n" }, { "alpha_fraction": 0.699999988079071, "alphanum_fraction": 0.70652174949646, "avg_line_length": 29.600000381469727, "blob_id": "06b0dd2c806272ac493007882b3bfa1e4d9bbec5", "content_id": "3ac0a575d3c1372a8b3534921b565abdc5f31be8", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 500, "license_type": "no_license", "max_line_length": 79, "num_lines": 15, "path": "/app/manage_commands/deploy_commands.py", "repo_name": "StanislavKraev/jb_code", "src_encoding": "UTF-8", "text": "# -*- coding: utf-8 -*-\nfrom deployment_migrations.migrations import MigrationManager\nfrom manage_commands import BaseManageCommand\n\n\nclass MigrateCommand(BaseManageCommand):\n NAME = \"migrate\"\n\n def run(self):\n self.logger.info(u\"Запуск миграции\")\n self.logger.info(u'=' * 50)\n\n current_version = MigrationManager.migrate_to(self.config, self.logger)\n\n self.logger.info(u\"Система мигрирована до версии %s\" % current_version)\n\n" }, { "alpha_fraction": 0.3219689428806305, "alphanum_fraction": 0.32786208391189575, "avg_line_length": 35.72806930541992, "blob_id": "8f648ed51da04f724c7ef9056c8921f1b3225a0c", "content_id": "0e035e7591e05dfdeb2df9272384e15813c2c498", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 172561, "license_type": "no_license", "max_line_length": 206, "num_lines": 4560, "path": "/app/services/osago/documents/initial_db_data.py", "repo_name": "StanislavKraev/jb_code", "src_encoding": "UTF-8", "text": "# -*- coding: utf-8 -*-\nimport os\n\nfrom fw.documents.enums import DocumentTypeEnum, DocumentKindEnum, DocumentBatchTypeEnum\nfrom services.osago.documents.enums import OsagoDocTypeEnum\n\n\ndef _get_test_resource_name(config, resource_rel_path):\n resources_path = config['resources_path']\n return os.path.join(resources_path, resource_rel_path)\n\n\ndef load_data(config):\n\n REQUIRED_DOCS_FIELD = {\n \"#array_mapping\": {\n \"source_array\": [\n OsagoDocTypeEnum.ODT_INQUIRE_CRASH,\n OsagoDocTypeEnum.ODT_NOTICE_CRASH,\n OsagoDocTypeEnum.ODT_INSURANCE_DENIAL,\n OsagoDocTypeEnum.ODT_ACT_INSURANCE_CASE,\n OsagoDocTypeEnum.ODT_POLICE_STATEMENT,\n OsagoDocTypeEnum.ODT_POLICE_PROTOCOL,\n OsagoDocTypeEnum.ODT_CASE_INITIATION_REFUSAL\n ],\n \"filter\": [{\n \"#not\": {\n \"<loop_item>\": {\n \"#in\": \"@docs_got\"\n },\n }\n }, {\n \"#not\": {\n \"policy_called\": False,\n \"<loop_item>\": OsagoDocTypeEnum.ODT_CASE_INITIATION_REFUSAL\n }\n }, {\n \"#or\": [{\n \"police_case\": False,\n \"<loop_item>\": {\n \"#nin\": [OsagoDocTypeEnum.ODT_POLICE_STATEMENT]\n }\n }, {\n \"police_case\": True,\n \"<loop_item>\": {\n \"#nin\": [OsagoDocTypeEnum.ODT_CASE_INITIATION_REFUSAL]\n }\n }]\n }, {\n \"#or\": [{\n \"policy_called\": True,\n }, {\n \"policy_called\": False,\n \"<loop_item>\": {\n \"#nin\": [\n OsagoDocTypeEnum.ODT_INQUIRE_CRASH,\n OsagoDocTypeEnum.ODT_POLICE_STATEMENT,\n OsagoDocTypeEnum.ODT_POLICE_PROTOCOL\n ]\n }\n }]\n }, {\n \"#or\": [{\n \"problem_type\": \"refusal\"\n }, {\n \"problem_type\": {\n \"#ne\": \"refusal\"\n },\n \"<loop_item>\": {\n \"#nin\": [OsagoDocTypeEnum.ODT_INSURANCE_DENIAL]\n }\n }]\n }]\n }\n }\n\n FINAL_RESP_PERSON_FIELD = {\n \"name\": \"final_responsible_person\",\n \"type\": \"calculated\",\n \"field_type\": \"db_object\",\n \"cls\": \"PrivatePerson\",\n \"suppress_validation_errors\": True,\n \"value\": {\n \"#cases\": {\n \"list\": [{\n \"conditions\": {\n \"court_include\": True\n },\n \"value\": {\n \"#field\": \"responsible_person\"\n }\n }],\n \"default\": {\n \"value\": {\n \"#field\": \"lawsuit_submission_responsible_person\"\n }\n }\n }\n }\n }\n\n RESULT_FIELDS_FIELD = {\n \"name\": \"_result_fields\",\n \"type\": \"calculated\",\n \"field_type\": \"DocJsonField\",\n \"suppress_validation_errors\": True,\n \"required\": False,\n \"value\": {\n \"#field\": \"<batch>->result_fields\"\n }\n }\n\n DOC_DATE_FIELD = {\n \"name\": \"doc_date\",\n \"type\": \"calculated\",\n \"field_type\": \"DocDateTimeField\",\n \"suppress_validation_errors\": True,\n \"input_format\": \"%Y-%m-%d\",\n \"required\": True,\n \"value\": {\n \"#datetime\": \"#now\"\n }\n }\n\n INSURANCE_ADDRESS_FINAL_FIELD = {\n \"name\": \"insurance_address_final\",\n \"type\": \"calculated\",\n \"suppress_validation_errors\": True,\n \"field_type\": \"DocTextField\",\n \"required\": False,\n \"value\": {\n \"#cases\": {\n \"list\": [{\n \"conditions\": {\n \"use_other_submission_address\": True\n },\n \"value\": {\n \"#field\": \"submission_address\"\n }\n }],\n \"default\": {\n \"value\": {\n \"#fetch_db_table_row\": {\n \"table_name\": \"car_assurance_branch\",\n \"id\": \"@submission_branch_id\",\n \"field_name\": \"address\"\n }\n }\n }\n }\n }\n }\n\n INSURANCE_NAME_FINAL_FIELD = {\n \"name\": \"insurance_name_final\",\n \"type\": \"calculated\",\n \"suppress_validation_errors\": True,\n \"field_type\": \"DocTextField\",\n \"required\": False,\n \"value\": {\n \"#cases\": {\n \"list\": [{\n \"conditions\": {\n \"other_insurance\": True\n },\n \"value\": {\n \"#field\": \"insurance_name\"\n }\n }],\n \"default\": {\n \"value\": {\n \"#field\": \"<batch>->result_fields->insurance_name\"\n }\n }\n }\n }\n }\n\n RESPONSIBLE_PERSON_ADDRESS_FIELD = {\n \"name\": \"responsible_person_address\",\n \"type\": \"calculated\",\n \"suppress_validation_errors\": True,\n \"field_type\": \"DocTextField\",\n \"value\": {\n \"#cases\": {\n \"list\": [{\n \"conditions\": {\n \"obtain_address_type\": \"other_address\"\n },\n \"value\": {\n \"#field\": \"obtain_address\"\n }\n }, {\n \"conditions\": {\n \"obtain_address_type\": \"owner_address\"\n },\n \"value\": {\n \"#field\": \"victim_owner->address->as_string\"\n }\n }],\n \"default\": {\n \"value\": {\n \"#field\": \"responsible_person->address->as_string\"\n }\n }\n }\n }\n }\n\n OWNER_ADDRESS_FIELD = {\n \"name\": \"owner_address\",\n \"type\": \"calculated\",\n \"field_type\": \"DocTextField\",\n \"suppress_validation_errors\": True,\n \"value\": {\n \"#cases\": {\n \"list\": [{\n \"conditions\": {\n \"submission_way\": {\n \"#ne\": \"responsible_person\"\n }\n },\n \"value\": {\n \"#cases\": {\n \"list\": [{\n \"conditions\": {\n \"obtain_address_type\": \"other_address\"\n },\n \"value\": {\n \"#field\": \"obtain_address\"\n }\n }],\n \"default\": {\n \"value\": {\n \"#field\": \"victim_owner->address->as_string\"\n }\n }\n }\n }\n }],\n \"default\": {\n \"value\": {\n \"#field\": \"victim_owner->address->as_string\"\n }\n }\n }\n }\n }\n\n UNDERPAY_SUM_FIELD = {\n \"name\": \"underpay_sum\",\n \"type\": \"calculated\",\n \"field_type\": \"DocDecimalField\",\n \"suppress_validation_errors\": True,\n \"value\": {\n # underpay_sum — размер недоплаты страховой (float — с копейками), рассчитывается по формуле:\n # min(independent_expertise_sum, gibdd ?\n # (< 1.10.2014 ? 120000 : 400000) :\n # (< 1.10.2014 ? 25000 : 50000)) -\n # (problem_type=refusal ? 0 : compensation_sum)\n \"#sub\": [{\n \"#min\": [{\n \"#cases\": {\n \"list\": [{\n \"conditions\": {\n \"policy_called\": True\n },\n \"value\": {\n \"#cases\": {\n \"set\": {\n \"datetime_01_10_2014\": {\n \"#datetime\": {\"year\": 2014, \"month\": 10, \"day\": 1}\n }\n },\n \"list\": [{\n \"conditions\": {\n \"policy_date\": {\n \"#lt\": \"@datetime_01_10_2014\"\n }\n },\n 'value': {\"#value\": 120000}\n }],\n \"default\": {\"value\": {\"#value\": 400000}}\n }\n }\n }],\n \"default\": {\n \"value\": {\n \"#cases\": {\n \"set\": {\n \"datetime_01_10_2014\": {\n \"#datetime\": {\"year\": 2014, \"month\": 10, \"day\": 1}\n }\n },\n \"list\": [{\n \"conditions\": {\n \"policy_date\": {\n \"#lt\": \"@datetime_01_10_2014\"\n }\n },\n 'value': {\"#value\": 25000}\n }],\n \"default\": {\"value\": {\"#value\": 50000}}\n }\n }\n }\n }\n }, {\n \"#field\": \"independent_expertise_sum\"\n }]\n }, {\n \"#cases\": {\n \"list\": [{\n \"conditions\": {\n \"problem_type\": \"refusal\"\n },\n \"value\": {\"#value\": 0}\n }],\n \"default\": {\n \"value\": {\n \"#field\": \"compensation_sum\"\n }\n }\n }\n }]\n\n }\n }\n\n OSAGO_MAIL_LIST_TEMPLATE = {\n \"template_name\": \"template_osago1\",\n \"file_name\": _get_test_resource_name(config, \"osago/mail_list_template.tex\"),\n \"is_strict\": False,\n \"doc_name\": DocumentTypeEnum.DT_OSAGO_MAIL_LIST\n }\n\n OSAGO_PRETENSION_TEMPLATE = {\n \"template_name\": \"template_osago2\",\n \"file_name\": _get_test_resource_name(config, \"osago/pretension_template.tex\"),\n \"is_strict\": False,\n \"doc_name\": DocumentTypeEnum.DT_OSAGO_PRETENSION\n }\n\n OSAGO_DOCUMENTS_CLAIM_TEMPLATE = {\n \"template_name\": \"template_osago3\",\n \"file_name\": _get_test_resource_name(config, \"osago/documents_claim.tex\"),\n \"is_strict\": False,\n \"doc_name\": DocumentTypeEnum.DT_OSAGO_DOCUMENTS_CLAIM\n }\n\n OSAGO_TRUST_SUBMISSION_DOCS_TEMPLATE = {\n \"template_name\": \"template_osago4\",\n \"file_name\": _get_test_resource_name(config, \"osago/trust_submission_docs.tex\"),\n \"is_strict\": False,\n \"doc_name\": DocumentTypeEnum.DT_OSAGO_TRUST_SUBMISSION_DOCS\n }\n\n OSAGO_TRUST_OBTAIN_DOCS_TEMPLATE = {\n \"template_name\": \"template_osago5\",\n \"file_name\": _get_test_resource_name(config, \"osago/trust_submission_docs.tex\"),\n \"is_strict\": False,\n \"doc_name\": DocumentTypeEnum.DT_OSAGO_TRUST_OBTAIN_DOCS\n }\n\n OSAGO_TRUST_SUBMISION_OBTAIN_DOCS_TEMPLATE = {\n \"template_name\": \"template_osago6\",\n \"file_name\": _get_test_resource_name(config, \"osago/trust_submission_docs.tex\"),\n \"is_strict\": False,\n \"doc_name\": DocumentTypeEnum.DT_OSAGO_TRUST_SUBMISION_OBTAIN_DOCS\n }\n\n OSAGO_MAIL_LIST_SCHEMA = {\n \"doc_name\": DocumentTypeEnum.DT_OSAGO_MAIL_LIST,\n \"doc_kind\": DocumentKindEnum.DK_TEX_TEMPLATE,\n \"file_name_template\": u\"Опись ценного письма для ОСАГО\",\n \"conditions\": {\n \"submission_way\": \"mail\"\n },\n \"collections\": [\"pretension_collection\"],\n \"batch_statuses\": [\"pretension\"],\n \"fields\": [\n INSURANCE_NAME_FINAL_FIELD,\n INSURANCE_ADDRESS_FINAL_FIELD,\n {\n \"name\": \"is_claim_created\",\n \"type\": \"calculated\",\n \"suppress_validation_errors\": True,\n \"field_type\": \"DocBoolField\",\n \"value\": {\n \"#cases\": {\n \"set\": {\n \"calc_docs_list\": REQUIRED_DOCS_FIELD\n },\n \"list\": [{\n \"conditions\": {\n \"calc_docs_list->__len__\": {\n \"#gt\": 0\n }\n },\n \"value\": {\n \"#value\": True\n }\n }],\n \"default\": {\n \"value\": {\n \"#value\": False\n }\n }\n }\n }\n }, {\n \"name\": \"submission_way\",\n \"type\": \"DocEnumField\",\n \"enum_cls\": \"ApplicationTypeEnum\",\n \"required\": True\n }\n ]\n }\n\n OSAGO_PRETENSION_SCHEMA = {\n \"doc_name\": DocumentTypeEnum.DT_OSAGO_PRETENSION,\n \"doc_kind\": DocumentKindEnum.DK_TEX_TEMPLATE,\n \"file_name_template\": u\"Претензия по ОСАГО\",\n \"batch_statuses\": [\"pretension\"],\n \"collections\": [\"pretension_collection\"],\n \"fields\": [\n {\n \"name\": \"paid_document\",\n \"type\": \"calculated\",\n \"field_type\": \"DocBoolField\",\n \"value\": {\n \"#exec\": {\n \"module\": \"osago_reg_methods\",\n \"method\": \"is_paid_document\",\n \"args\": [{\n \"#field\": \"<batch_id>\"\n }, {\n \"#field\": \"<document_type>\"\n }]\n }\n }\n },\n OWNER_ADDRESS_FIELD,\n RESPONSIBLE_PERSON_ADDRESS_FIELD,\n DOC_DATE_FIELD,\n INSURANCE_NAME_FINAL_FIELD,\n INSURANCE_ADDRESS_FINAL_FIELD,\n RESULT_FIELDS_FIELD,\n {\n \"name\": \"first_claim_date\", #дата первого заявления о наступлении аварии в формате ISO_8601 — \"2005-08-09T18:31:42\"\n \"type\": \"DocDateTimeField\",\n \"input_format\": \"%Y-%m-%d\",\n \"required\": True\n },\n {\n \"name\": \"own_insurance_company\",\n \"type\": \"DocBoolField\",\n \"required\": True\n },\n {\n \"name\": \"policy_date\",\n \"type\": \"DocDateTimeField\",\n \"input_format\": \"%Y-%m-%d\",\n \"required\": True\n },\n {\n \"name\": \"independent_expertise_sum\",\n \"type\": \"DocDecimalField\",\n \"required\": True\n },\n {\n \"name\": \"independent_expertise_cost\",\n \"type\": \"DocDecimalField\",\n \"required\": True\n },\n {\n \"name\": \"insurance_company_region\",\n \"type\": \"DocEnumField\",\n \"enum_cls\": \"RFRegionsEnum\",\n \"required\": True\n },\n {\n \"name\": \"obtain_address_type\",\n \"type\": \"DocEnumField\",\n \"enum_cls\": \"ObtainAddressEnum\",\n \"required\": True\n },\n {\n \"name\": \"victim_owner\",\n \"type\": \"db_object\",\n \"cls\": \"PrivatePerson\",\n \"required\": True,\n \"override_fields_kwargs\": {\n \"address\": {\"required\": True},\n \"phone\": {\"required\": True}\n }\n },\n {\n \"name\": \"responsible_person\",\n \"type\": \"db_object\",\n \"cls\": \"PrivatePerson\",\n \"required\": False,\n \"override_fields_kwargs\": {\n \"address\": {\"required\": True},\n \"phone\": {\"required\": True}\n }\n },\n {\n \"name\": \"victim_car_brand\",\n \"type\": \"DocTextField\",\n \"required\": True,\n \"max_length\": 100,\n \"min_length\": 1\n },\n {\n \"name\": \"victim_car_number\",\n \"type\": \"DocTextField\",\n \"required\": True,\n \"max_length\": 15,\n \"allowed_re\": ur\"^[0-9а-яА-Яa-zA-Z]*$\"\n },\n {\n \"name\": \"crash_date\",\n \"type\": \"DocDateTimeField\",\n \"input_format\": \"%Y-%m-%d\",\n \"required\": True\n },\n {\n \"name\": \"owner_as_victim_driver\",\n \"type\": \"DocBoolField\",\n \"required\": True\n },\n {\n \"name\": \"victim_driver\", #идентификатор физического лица водителя пострадавшего автомобиля\n \"type\": \"db_object\",\n \"cls\": \"PrivatePerson\",\n \"required\": False,\n \"override_fields_kwargs\": {\n \"birthdate\": {\"required\": False},\n \"birthplace\": {\"required\": False}\n }\n },\n {\n \"name\": \"guilty_owner\", #идентификатор физического лица владельца виновного автомобиля\n \"type\": \"db_object\",\n \"cls\": \"PrivatePerson\",\n \"required\": True,\n \"override_fields_kwargs\": {\n \"birthdate\": {\"required\": False},\n \"birthplace\": {\"required\": False},\n \"address\": {\"required\": True}\n }\n },\n {\n \"name\": \"owner_as_guilty_driver\",\n \"type\": \"DocBoolField\",\n \"required\": True\n },\n {\n \"name\": \"guilty_driver\", #идентификатор физического лица водителя виновного автомобиля\n \"type\": \"db_object\",\n \"cls\": \"PrivatePerson\",\n \"required\": False,\n \"override_fields_kwargs\": {\n \"birthdate\": {\"required\": False},\n \"birthplace\": {\"required\": False}\n }\n },\n {\n \"name\": \"guilty_car_brand\",\n \"type\": \"DocTextField\",\n \"required\": True,\n \"max_length\": 100,\n \"min_length\": 1\n },\n {\n \"name\": \"guilty_car_number\",\n \"type\": \"DocTextField\",\n \"required\": True,\n \"max_length\": 15,\n \"min_length\": 1,\n \"allowed_re\": ur\"^[0-9а-яА-Яa-zA-Z]*$\"\n },\n {\n \"name\": \"policy_series\",\n \"type\": \"DocTextField\",\n \"required\": True,\n \"max_length\": 3,\n \"min_length\": 1\n },\n {\n \"name\": \"policy_number\",\n \"type\": \"DocTextField\",\n \"required\": True,\n \"max_length\": 10,\n \"allowed_re\": ur\"^[0-9]*$\",\n \"min_length\": 1\n },\n {\n \"name\": \"problem_type\",\n \"type\": \"DocEnumField\",\n \"enum_cls\": \"OsagoReasonEnum\",\n \"required\": True\n },\n {\n \"name\": \"compensation_sum\",\n \"type\": \"DocDecimalField\",\n \"required\": False\n },\n {\n \"name\": \"bik_account\",\n \"type\": \"DocTextField\",\n \"required\": True,\n \"min_length\": 9,\n \"allowed_re\": ur\"^[0-9]*$\"\n },\n {\n \"name\": \"account_number\",\n \"type\": \"DocTextField\",\n \"required\": True,\n \"min_length\": 20,\n \"allowed_re\": ur\"^[0-9]*$\"\n },\n {\n \"name\": \"independent_expertise_number\",\n \"type\": \"DocTextField\",\n \"required\": True,\n \"max_length\": 20,\n \"min_length\": 0\n },\n {\n \"name\": \"use_other_submission_address\",\n \"type\": \"DocBoolField\",\n \"required\": True\n },\n {\n \"name\": \"submission_address\",\n \"type\": \"DocTextField\",\n \"required\": False,\n \"min_length\": 0\n },\n {\n \"name\": \"other_insurance\",\n \"type\": \"DocBoolField\",\n \"required\": True\n },\n {\n \"name\": \"insurance_name\",\n \"type\": \"DocTextField\",\n \"required\": False,\n },\n {\n \"name\": \"submission_branch_id\",\n \"type\": \"DocTextField\",\n \"required\": False,\n },\n {\n \"name\": \"obtain_address\",\n \"type\": \"DocTextField\",\n \"required\": False\n },\n {\n \"name\": \"bank_info\",\n \"type\": \"calculated\",\n \"field_type\": \"DocJsonField\",\n \"error_field_mapping\": {\n \"bank_info\": \".\"\n },\n \"required\": True,\n \"value\": {\n \"#exec\": {\n \"module\": \"llc_reg_methods\",\n \"method\": \"get_bank_info\",\n \"args\": [{\n \"#field\": \"bik_account\"\n }]\n }\n },\n \"validator\": {\n \"#set\": {\n \"test_bik\": {\n \"#field\": \"value->bik\"\n }\n },\n \"conditions\": [{\n \"test_bik\": {\n \"#not_empty\": True\n }\n }],\n \"error_field\": \"bik_account\"\n }\n },\n {\n \"name\": \"police_case\",\n \"type\": \"DocBoolField\",\n \"required\": True\n },\n {\n \"name\": \"obtain_way\",\n \"type\": \"DocEnumField\",\n \"enum_cls\": \"ApplicationTypeEnum\",\n \"required\": True\n },\n {\n \"name\": \"submission_way\",\n \"type\": \"DocEnumField\",\n \"enum_cls\": \"ApplicationTypeEnum\",\n \"required\": True\n },\n {\n \"name\": \"add_person_to_claim\",\n \"type\": \"DocBoolField\",\n \"required\": True\n },\n ],\n \"validators\": [{\n \"condition\": {\n \"#or\": [{\n \"use_other_submission_address\": True,\n \"submission_address\": {\n \"#empty\": False\n }\n }, {\n \"use_other_submission_address\": {\n \"#ne\": True\n }\n }]\n },\n \"error\": {\n \"field\": \"submission_address\",\n \"code\": 5\n }\n }, {\n \"condition\": {\n \"#or\": [{\n \"use_other_submission_address\": {\n \"#ne\": True\n },\n \"submission_branch_id\": {\n \"#empty\": False\n }\n }, {\n \"use_other_submission_address\": {\n \"#ne\": False\n }\n }]\n },\n \"error\": {\n \"field\": \"submission_branch_id\",\n \"code\": 5\n }\n }, {\n \"condition\": {\n \"#or\": [{\n \"obtain_address_type\": \"other_address\",\n \"obtain_address\": {\n \"#empty\": False\n }\n }, {\n \"obtain_address_type\": {\n \"#ne\": \"other_address\"\n }\n }]\n },\n \"error\": {\n \"field\": \"obtain_address\",\n \"code\": 5\n }\n }, {\n \"condition\": {\n \"#or\": [{\n \"obtain_address_type\": \"owner_address\",\n \"victim_owner\": {\n \"#empty\": False\n }\n }, {\n \"obtain_address_type\": {\n \"#ne\": \"owner_address\"\n }\n }]\n },\n \"error\": {\n \"field\": \"victim_owner\",\n \"code\": 5\n }\n }, {\n \"condition\": {\n \"#or\": [{\n \"obtain_address_type\": \"responsible_person_address\",\n \"responsible_person\": {\n \"#empty\": False\n }\n }, {\n \"obtain_address_type\": {\n \"#ne\": \"responsible_person_address\"\n }\n }]\n },\n \"error\": {\n \"field\": \"responsible_person\",\n \"code\": 5\n }\n }, {\n \"condition\": {\n \"#or\": [{\n \"other_insurance\": {\n \"#ne\": False\n },\n \"insurance_name\": {\n \"#empty\": False\n }\n }, {\n \"other_insurance\": False\n }]\n },\n \"error\": {\n \"field\": \"insurance_id\",\n \"code\": 5\n }\n }, {\n \"condition\": {\n \"#or\": [{\n \"owner_as_guilty_driver\": {\n \"#ne\": True\n },\n \"guilty_driver\": {\n \"#empty\": False\n }\n }, {\n \"owner_as_guilty_driver\": {\n \"#ne\": False\n },\n }]\n },\n \"error\": {\n \"field\": \"guilty_driver\",\n \"code\": 5\n }\n }, {\n \"condition\": {\n \"#or\": [{\n \"owner_as_victim_driver\": False,\n \"victim_driver\": {\n \"#empty\": False\n }\n }, {\n \"owner_as_victim_driver\": True\n }]\n },\n \"error\": {\n \"field\": \"victim_driver\",\n \"code\": 5\n }\n }, {\n \"condition\": {\n \"#or\": [{\n \"submission_way\": \"responsible_person\",\n \"court_include\": {\n \"#exists\": True\n }\n }, {\n \"submission_way\": {\n \"#ne\": \"responsible_person\"\n }\n }]\n },\n \"error\": {\n \"field\": \"court_include\",\n \"code\": 4\n }\n }]\n }\n\n OSAGO_DOCUMENTS_CLAIM_SCHEMA = {\n \"doc_name\": DocumentTypeEnum.DT_OSAGO_DOCUMENTS_CLAIM,\n \"doc_kind\": DocumentKindEnum.DK_TEX_TEMPLATE,\n \"file_name_template\": u\"Заявление на выдачу документов по ОСАГО\",\n \"collections\": [\"pretension_collection\"],\n \"conditions\": {\n \"requested_docs->__len__\": {\n \"#gt\": 0\n },\n \"#not\": {\n \"requested_docs->__len__\": 1,\n \"act_insurance_case\": {\n \"#in\": \"@requested_docs\"\n },\n \"problem_type\": {\n \"#ne\": \"underpay\"\n }\n }\n },\n \"batch_statuses\": [\"pretension\"],\n \"fields\": [\n OWNER_ADDRESS_FIELD,\n RESPONSIBLE_PERSON_ADDRESS_FIELD,\n INSURANCE_NAME_FINAL_FIELD,\n INSURANCE_ADDRESS_FINAL_FIELD,\n DOC_DATE_FIELD,\n RESULT_FIELDS_FIELD,\n {\n \"name\": \"own_insurance_company\",\n \"type\": \"DocBoolField\",\n \"required\": True\n },\n {\n \"name\": \"problem_type\",\n \"type\": \"DocEnumField\",\n \"enum_cls\": \"OsagoReasonEnum\",\n \"required\": True\n },\n {\n \"name\": \"obtain_address_type\",\n \"type\": \"DocEnumField\",\n \"enum_cls\": \"ObtainAddressEnum\",\n \"required\": True\n },\n {\n \"name\": \"victim_owner\",\n \"type\": \"db_object\",\n \"cls\": \"PrivatePerson\",\n \"required\": True\n },\n {\n \"name\": \"police_case\",\n \"type\": \"DocBoolField\",\n \"required\": True\n },\n {\n \"name\": \"requested_docs\",\n \"type\": \"calculated\",\n \"suppress_validation_errors\": True,\n \"field_type\": \"DocArrayField\",\n \"cls\": \"DocEnumField\",\n \"subfield_kwargs\": {\n \"enum_cls\": \"OsagoDocTypeEnum\"\n },\n \"value\": REQUIRED_DOCS_FIELD\n },\n {\n \"name\": \"responsible_person\",\n \"type\": \"db_object\",\n \"cls\": \"PrivatePerson\",\n \"required\": False\n }, {\n \"name\": \"victim_car_brand\",\n \"type\": \"DocTextField\",\n \"required\": True,\n \"max_length\": 100,\n \"min_length\": 1\n }, {\n \"name\": \"victim_car_number\",\n \"type\": \"DocTextField\",\n \"required\": True,\n \"max_length\": 15,\n \"allowed_re\": ur\"^[\\s0-9а-яА-Яa-zA-Z]*$\"\n }, {\n \"name\": \"crash_date\",\n \"type\": \"DocDateTimeField\",\n \"input_format\": \"%Y-%m-%d\",\n \"required\": True\n }, {\n \"name\": \"obtain_way\",\n \"type\": \"DocEnumField\",\n \"enum_cls\": \"ApplicationTypeEnum\",\n \"required\": True\n }, {\n \"name\": \"submission_way\",\n \"type\": \"DocEnumField\",\n \"enum_cls\": \"ApplicationTypeEnum\",\n \"required\": True\n }, {\n \"name\": \"obtain_address\",\n \"type\": \"DocTextField\",\n \"required\": False\n }]\n }\n\n OSAGO_TRUST_SUBMISSION_DOCS_SCHEMA = {\n \"doc_name\": DocumentTypeEnum.DT_OSAGO_TRUST_SUBMISSION_DOCS,\n \"doc_kind\": DocumentKindEnum.DK_TEX_TEMPLATE,\n \"file_name_template\": u\"Доверенность на подачу документов в страховую\",\n \"collections\": [\"pretension_collection\"],\n \"conditions\": {\n \"submission_way\": \"responsible_person\",\n \"obtain_way\": {\n \"#ne\": \"responsible_person\"\n }\n },\n \"batch_statuses\": [\"pretension\"],\n \"fields\": [\n DOC_DATE_FIELD,\n INSURANCE_NAME_FINAL_FIELD,\n {\n \"name\": \"victim_owner\",\n \"type\": \"db_object\",\n \"cls\": \"PrivatePerson\",\n \"required\": True\n },\n {\n \"name\": \"responsible_person\",\n \"type\": \"db_object\",\n \"cls\": \"PrivatePerson\",\n \"required\": True\n },\n {\n \"name\": \"court_include\",\n \"type\": \"DocBoolField\",\n \"required\": True\n },\n {\n \"name\": \"crash_date\",\n \"type\": \"DocDateTimeField\",\n \"input_format\": \"%Y-%m-%d\",\n \"required\": True\n },\n {\n \"name\": \"victim_car_brand\",\n \"type\": \"DocTextField\",\n \"required\": True,\n \"max_length\": 100\n },\n {\n \"name\": \"victim_car_number\",\n \"type\": \"DocTextField\",\n \"required\": True,\n \"max_length\": 15,\n \"allowed_re\": ur\"^[\\s0-9а-яА-Яa-zA-Z]*$\"\n }\n ]\n }\n\n OSAGO_TRUST_OBTAIN_DOCS_SCHEMA = {\n \"doc_name\": DocumentTypeEnum.DT_OSAGO_TRUST_OBTAIN_DOCS,\n \"doc_kind\": DocumentKindEnum.DK_TEX_TEMPLATE,\n \"file_name_template\": u\"Доверенность на получение документов из страховой\",\n \"collections\": [\"pretension_collection\"],\n \"conditions\": {\n \"obtain_way\": \"responsible_person\",\n \"submission_way\": {\n \"#ne\": \"responsible_person\"\n }\n },\n \"batch_statuses\": [\"pretension\"],\n \"fields\": [\n DOC_DATE_FIELD,\n INSURANCE_NAME_FINAL_FIELD,\n {\n \"name\": \"victim_owner\",\n \"type\": \"db_object\",\n \"cls\": \"PrivatePerson\",\n \"required\": True\n },\n {\n \"name\": \"responsible_person\",\n \"type\": \"db_object\",\n \"cls\": \"PrivatePerson\",\n \"required\": True\n },\n {\n \"name\": \"court_include\",\n \"type\": \"DocBoolField\",\n \"required\": True\n },\n {\n \"name\": \"crash_date\",\n \"type\": \"DocDateTimeField\",\n \"input_format\": \"%Y-%m-%d\",\n \"required\": True\n },\n {\n \"name\": \"victim_car_brand\",\n \"type\": \"DocTextField\",\n \"required\": True,\n \"max_length\": 100\n },\n {\n \"name\": \"victim_car_number\",\n \"type\": \"DocTextField\",\n \"required\": True,\n \"max_length\": 15,\n \"allowed_re\": ur\"^[\\s0-9а-яА-Яa-zA-Z]*$\"\n }\n ]\n }\n\n OSAGO_TRUST_SUBMISION_OBTAIN_DOCS_SCHEMA = {\n \"doc_name\": DocumentTypeEnum.DT_OSAGO_TRUST_SUBMISION_OBTAIN_DOCS,\n \"doc_kind\": DocumentKindEnum.DK_TEX_TEMPLATE,\n \"file_name_template\": u\"Доверенность на подачу и получение документов из страховой\",\n \"collections\": [\"pretension_collection\"],\n \"conditions\": {\n \"obtain_way\": \"responsible_person\",\n \"submission_way\": \"responsible_person\"\n },\n \"batch_statuses\": [\"pretension\"],\n \"fields\": [\n DOC_DATE_FIELD,\n INSURANCE_NAME_FINAL_FIELD,\n {\n \"name\": \"victim_owner\",\n \"type\": \"db_object\",\n \"cls\": \"PrivatePerson\",\n \"required\": True\n },\n {\n \"name\": \"responsible_person\",\n \"type\": \"db_object\",\n \"cls\": \"PrivatePerson\",\n \"required\": True\n },\n {\n \"name\": \"court_include\",\n \"type\": \"DocBoolField\",\n \"required\": True\n },\n {\n \"name\": \"crash_date\",\n \"type\": \"DocDateTimeField\",\n \"input_format\": \"%Y-%m-%d\",\n \"required\": True\n },\n {\n \"name\": \"victim_car_brand\",\n \"type\": \"DocTextField\",\n \"required\": True,\n \"max_length\": 100\n },\n {\n \"name\": \"victim_car_number\",\n \"type\": \"DocTextField\",\n \"required\": True,\n \"max_length\": 15,\n \"allowed_re\": ur\"^[\\s0-9а-яА-Яa-zA-Z]*$\"\n }\n ]\n }\n\n OSAGO_CLAIM_COURT_ABSENT_SCHEMA = {\n \"doc_name\": DocumentTypeEnum.DT_OSAGO_CLAIM_COURT_ABSENT,\n \"doc_kind\": DocumentKindEnum.DK_TEX_TEMPLATE,\n \"file_name_template\": u\"Заявление об отсутствии на суде\",\n \"batch_statuses\": [\"claim\"],\n \"collections\": [\"claim_collection\"],\n \"conditions\": {\n 'court_attendance': {\n '#ne': 'oneself'\n }\n },\n \"fields\": [\n {\n \"name\": \"obtain_address_type\",\n \"type\": \"DocEnumField\",\n \"enum_cls\": \"ObtainAddressEnum\",\n \"required\": False\n },\n {\n \"name\": \"obtain_address\",\n \"type\": \"DocTextField\",\n \"required\": False\n },\n {\n \"name\": \"court_attendance\",\n \"type\": \"DocEnumField\",\n \"enum_cls\": \"CourtAttendanceEnum\",\n \"required\": True\n },\n {\n \"name\": \"other_insurance\",\n \"type\": \"DocBoolField\",\n \"required\": False\n },\n {\n \"name\": \"insurance_name\",\n \"type\": \"DocTextField\",\n \"required\": False,\n },\n {\n \"name\": \"insurance_id\",\n \"type\": \"DocTextField\",\n \"required\": False,\n \"min_length\": 0,\n \"max_length\": 30\n },\n {\n \"name\": \"court_name\",\n \"type\": \"DocTextField\",\n \"max_length\": 200,\n \"required\": True\n },\n {\n \"name\": \"court_address\",\n \"type\": \"DocTextField\",\n \"max_length\": 1024,\n \"required\": True\n },\n {\n \"name\": \"victim_owner\",\n \"type\": \"db_object\",\n \"cls\": \"PrivatePerson\",\n \"required\": False\n },\n {\n \"name\": \"guilty_owner\",\n \"type\": \"db_object\",\n \"cls\": \"PrivatePerson\",\n \"required\": False,\n \"override_fields_kwargs\": {\n \"birthdate\": {\"required\": False},\n \"birthplace\": {\"required\": False}\n }\n },\n {\n \"name\": \"guilty_driver\",\n \"type\": \"db_object\",\n \"cls\": \"PrivatePerson\",\n \"required\": False,\n \"override_fields_kwargs\": {\n \"birthdate\": {\"required\": False},\n \"birthplace\": {\"required\": False}\n }\n },\n {\n \"name\": \"add_person_to_claim\",\n \"type\": \"DocBoolField\",\n \"required\": False\n },\n {\n \"name\": \"lawsuit_submission_way\",\n \"type\": \"DocEnumField\",\n \"enum_cls\": \"ApplicationTypeEnum\",\n \"required\": True\n },\n {\n \"name\": \"court_include\",\n \"type\": \"DocBoolField\",\n \"required\": False\n },\n {\n \"name\": \"lawsuit_submission_responsible_person\",\n \"type\": \"db_object\",\n \"cls\": \"PrivatePerson\",\n \"required\": False,\n \"override_fields_kwargs\": {\n \"address\": {\"required\": True},\n \"phone\": {\"required\": True},\n }\n },\n {\n \"name\": \"responsible_person\",\n \"type\": \"db_object\",\n \"cls\": \"PrivatePerson\",\n \"required\": False,\n },\n {\n \"name\": \"owner_as_guilty_driver\",\n \"type\": \"DocBoolField\",\n \"required\": False\n },\n DOC_DATE_FIELD,\n FINAL_RESP_PERSON_FIELD,\n RESULT_FIELDS_FIELD,\n INSURANCE_ADDRESS_FINAL_FIELD,\n INSURANCE_NAME_FINAL_FIELD,\n {\n \"name\": \"use_other_submission_address\",\n \"type\": \"DocBoolField\",\n \"required\": False\n },\n {\n \"name\": \"submission_address\",\n \"type\": \"DocTextField\",\n \"required\": False,\n },\n {\n \"name\": \"lawsuit_date\",\n \"type\": \"DocDateTimeField\",\n \"input_format\": \"%Y-%m-%d\",\n \"required\": True\n },\n ]\n }\n\n OSAGO_CLAIM_COURT_ABSENT_TEMPLATE = {\n \"template_name\": \"template_osago2_1\",\n \"file_name\": _get_test_resource_name(config, \"osago/court_absence_claim.tex\"),\n \"is_strict\": False,\n \"doc_name\": DocumentTypeEnum.DT_OSAGO_CLAIM_COURT_ABSENT\n }\n\n OSAGO_CLAIM_ALL_EXECUTION_ACT_SCHEMA = {\n \"doc_name\": DocumentTypeEnum.DT_OSAGO_CLAIM_ALL_EXECUTION_ACT,\n \"doc_kind\": DocumentKindEnum.DK_TEX_TEMPLATE,\n \"file_name_template\": u\"Заявление о выдаче двух ИЛ\",\n \"conditions\": {\n \"add_person_to_claim\": True\n },\n \"collections\": [\"court_collection\"],\n \"batch_statuses\": [\"court\"],\n \"fields\": [\n DOC_DATE_FIELD,\n RESULT_FIELDS_FIELD,\n {\n \"name\": \"lawsuit_number\",\n \"type\": \"DocTextField\",\n \"max_length\": 20,\n \"min_length\": 1,\n \"required\": True\n },\n {\n \"name\": \"court_name\",\n \"type\": \"DocTextField\",\n \"max_length\": 200,\n \"required\": True\n },\n {\n \"name\": \"court_address\",\n \"type\": \"DocTextField\",\n \"max_length\": 1024,\n \"required\": True\n },\n {\n \"name\": \"lawsuit_submission_way\",\n \"type\": \"DocEnumField\",\n \"enum_cls\": \"ApplicationTypeEnum\",\n \"required\": True\n },\n {\n \"name\": \"victim_owner\",\n \"type\": \"db_object\",\n \"cls\": \"PrivatePerson\",\n \"required\": False\n },\n {\n \"name\": \"obtain_address_type\",\n \"type\": \"DocEnumField\",\n \"enum_cls\": \"ObtainAddressEnum\",\n \"required\": False\n },\n {\n \"name\": \"obtain_address\",\n \"type\": \"DocTextField\",\n \"required\": False\n },\n {\n \"name\": \"court_include\",\n \"type\": \"DocBoolField\",\n \"required\": False\n },\n {\n \"name\": \"responsible_person\",\n \"type\": \"db_object\",\n \"cls\": \"PrivatePerson\",\n \"required\": False,\n },\n {\n \"name\": \"add_person_to_claim\",\n \"type\": \"DocBoolField\",\n \"required\": False\n },\n {\n \"name\": \"owner_as_guilty_driver\",\n \"type\": \"DocBoolField\",\n \"required\": False\n },\n {\n \"name\": \"guilty_driver\",\n \"type\": \"db_object\",\n \"cls\": \"PrivatePerson\",\n \"required\": False,\n \"override_fields_kwargs\": {\n \"birthdate\": {\"required\": False},\n \"birthplace\": {\"required\": False}\n }\n },\n {\n \"name\": \"guilty_owner\",\n \"type\": \"db_object\",\n \"cls\": \"PrivatePerson\",\n \"required\": False,\n \"override_fields_kwargs\": {\n \"birthdate\": {\"required\": False},\n \"birthplace\": {\"required\": False}\n }\n },\n INSURANCE_ADDRESS_FINAL_FIELD,\n INSURANCE_NAME_FINAL_FIELD,\n {\n \"name\": \"other_insurance\",\n \"type\": \"DocBoolField\",\n \"required\": False\n },\n {\n \"name\": \"insurance_name\",\n \"type\": \"DocTextField\",\n \"required\": False,\n },\n {\n \"name\": \"insurance_id\",\n \"type\": \"DocTextField\",\n \"required\": False,\n \"min_length\": 0,\n \"max_length\": 30\n },\n {\n \"name\": \"use_other_submission_address\",\n \"type\": \"DocBoolField\",\n \"required\": False\n },\n {\n \"name\": \"submission_address\",\n \"type\": \"DocTextField\",\n \"required\": False\n }\n ]\n }\n\n OSAGO_CLAIM_ALL_EXECUTION_ACT_TEMPLATE = {\n \"template_name\": \"template_osago2_2\",\n \"file_name\": _get_test_resource_name(config, \"osago/il_claim_double.tex\"),\n \"is_strict\": False,\n \"doc_name\": DocumentTypeEnum.DT_OSAGO_CLAIM_ALL_EXECUTION_ACT\n }\n\n OSAGO_CLAIM_GUILTY_EXECUTION_ACT_SCHEMA = {\n \"doc_name\": DocumentTypeEnum.DT_OSAGO_CLAIM_GUILTY_EXECUTION_ACT,\n \"doc_kind\": DocumentKindEnum.DK_TEX_TEMPLATE,\n \"file_name_template\": u\"Заявление о получении ИЛ к виновнику\",\n \"conditions\": {\n \"add_person_to_claim\": True\n },\n \"batch_statuses\": [\"court\"],\n \"collections\": [\"court_collection\"],\n \"fields\": [\n DOC_DATE_FIELD,\n RESULT_FIELDS_FIELD,\n {\n \"name\": \"lawsuit_number\",\n \"type\": \"DocTextField\",\n \"max_length\": 20,\n \"min_length\": 1,\n \"required\": True\n },\n {\n \"name\": \"court_name\",\n \"type\": \"DocTextField\",\n \"max_length\": 200,\n \"required\": True\n },\n {\n \"name\": \"court_address\",\n \"type\": \"DocTextField\",\n \"max_length\": 1024,\n \"required\": True\n },\n {\n \"name\": \"lawsuit_submission_way\",\n \"type\": \"DocEnumField\",\n \"enum_cls\": \"ApplicationTypeEnum\",\n \"required\": True\n },\n {\n \"name\": \"victim_owner\",\n \"type\": \"db_object\",\n \"cls\": \"PrivatePerson\",\n \"required\": False\n },\n {\n \"name\": \"obtain_address_type\",\n \"type\": \"DocEnumField\",\n \"enum_cls\": \"ObtainAddressEnum\",\n \"required\": False\n },\n {\n \"name\": \"obtain_address\",\n \"type\": \"DocTextField\",\n \"required\": False\n },\n {\n \"name\": \"court_include\",\n \"type\": \"DocBoolField\",\n \"required\": False\n },\n {\n \"name\": \"responsible_person\",\n \"type\": \"db_object\",\n \"cls\": \"PrivatePerson\",\n \"required\": False,\n },\n {\n \"name\": \"add_person_to_claim\",\n \"type\": \"DocBoolField\",\n \"required\": False\n },\n {\n \"name\": \"owner_as_guilty_driver\",\n \"type\": \"DocBoolField\",\n \"required\": False\n },\n {\n \"name\": \"guilty_driver\",\n \"type\": \"db_object\",\n \"cls\": \"PrivatePerson\",\n \"required\": False,\n \"override_fields_kwargs\": {\n \"birthdate\": {\"required\": False},\n \"birthplace\": {\"required\": False}\n }\n },\n {\n \"name\": \"guilty_owner\",\n \"type\": \"db_object\",\n \"cls\": \"PrivatePerson\",\n \"required\": False,\n \"override_fields_kwargs\": {\n \"birthdate\": {\"required\": False},\n \"birthplace\": {\"required\": False}\n }\n },\n INSURANCE_ADDRESS_FINAL_FIELD,\n INSURANCE_NAME_FINAL_FIELD,\n {\n \"name\": \"other_insurance\",\n \"type\": \"DocBoolField\",\n \"required\": False\n },\n {\n \"name\": \"insurance_name\",\n \"type\": \"DocTextField\",\n \"required\": False,\n },\n {\n \"name\": \"insurance_id\",\n \"type\": \"DocTextField\",\n \"required\": False,\n \"min_length\": 0,\n \"max_length\": 30\n },\n {\n \"name\": \"use_other_submission_address\",\n \"type\": \"DocBoolField\",\n \"required\": False\n },\n {\n \"name\": \"submission_address\",\n \"type\": \"DocTextField\",\n \"required\": False\n },\n {\n \"name\": \"guilty_execution_act_responsible_person\",\n \"type\": \"DocBoolField\",\n \"required\": False\n },\n {\n \"name\": \"guilty_execution_act_obtain_way\",\n \"type\": \"DocEnumField\",\n \"enum_cls\": \"ActObtainWayEnum\",\n \"required\": False\n },\n ],\n \"validators\": [{\n \"condition\": {\n \"#or\": [{\n \"guilty_execution_act_responsible_person\": {\n \"#exists\": True\n },\n \"lawsuit_submission_way\": \"responsible_person\"\n }, {\n \"lawsuit_submission_way\": {\n \"#ne\": \"responsible_person\"\n }\n }]\n },\n \"error\": {\n \"field\": \"guilty_execution_act_responsible_person\",\n \"code\": 4\n }\n }, {\n \"condition\": {\n \"#or\": [{\n \"guilty_execution_act_obtain_way\": {\"#exists\": True},\n \"#or\": [{\n \"lawsuit_submission_way\": \"responsible_person\",\n \"guilty_execution_act_responsible_person\": {\"#ne\": False}\n }]\n\n }, {\n \"#not\": {\n \"#or\": [{\n \"lawsuit_submission_way\": \"responsible_person\",\n \"guilty_execution_act_responsible_person\": {\"#ne\": False}\n }]\n }\n }]\n },\n \"error\": {\n \"field\": \"guilty_execution_act_obtain_way\",\n \"code\": 4\n }\n }]\n }\n\n OSAGO_CLAIM_GUILTY_EXECUTION_ACT_TEMPLATE = {\n \"template_name\": \"template_osago2_3\",\n \"file_name\": _get_test_resource_name(config, \"osago/il_claim_guilty.tex\"),\n \"is_strict\": False,\n \"doc_name\": DocumentTypeEnum.DT_OSAGO_CLAIM_GUILTY_EXECUTION_ACT\n }\n\n OSAGO_CLAIM_INSURANCE_EXECUTION_ACT_SCHEMA = {\n \"doc_name\": DocumentTypeEnum.DT_OSAGO_CLAIM_INSURANCE_EXECUTION_ACT,\n \"doc_kind\": DocumentKindEnum.DK_TEX_TEMPLATE,\n \"file_name_template\": u\"Заявление о получении ИЛ к страховой\",\n \"conditions\": {},\n \"batch_statuses\": [\"court\"],\n \"collections\": [\"court_collection\"],\n \"fields\": [\n DOC_DATE_FIELD,\n RESULT_FIELDS_FIELD,\n {\n \"name\": \"lawsuit_number\",\n \"type\": \"DocTextField\",\n \"max_length\": 20,\n \"min_length\": 1,\n \"required\": True\n },\n {\n \"name\": \"court_name\",\n \"type\": \"DocTextField\",\n \"max_length\": 200,\n \"required\": True\n },\n {\n \"name\": \"court_address\",\n \"type\": \"DocTextField\",\n \"max_length\": 1024,\n \"required\": True\n },\n {\n \"name\": \"lawsuit_submission_way\",\n \"type\": \"DocEnumField\",\n \"enum_cls\": \"ApplicationTypeEnum\",\n \"required\": True\n },\n {\n \"name\": \"victim_owner\",\n \"type\": \"db_object\",\n \"cls\": \"PrivatePerson\",\n \"required\": False\n },\n {\n \"name\": \"obtain_address_type\",\n \"type\": \"DocEnumField\",\n \"enum_cls\": \"ObtainAddressEnum\",\n \"required\": False\n },\n {\n \"name\": \"obtain_address\",\n \"type\": \"DocTextField\",\n \"required\": False\n },\n {\n \"name\": \"court_include\",\n \"type\": \"DocBoolField\",\n \"required\": False\n },\n {\n \"name\": \"responsible_person\",\n \"type\": \"db_object\",\n \"cls\": \"PrivatePerson\",\n \"required\": False,\n },\n {\n \"name\": \"add_person_to_claim\",\n \"type\": \"DocBoolField\",\n \"required\": False\n },\n {\n \"name\": \"owner_as_guilty_driver\",\n \"type\": \"DocBoolField\",\n \"required\": False\n },\n {\n \"name\": \"guilty_driver\",\n \"type\": \"db_object\",\n \"cls\": \"PrivatePerson\",\n \"required\": False,\n \"override_fields_kwargs\": {\n \"birthdate\": {\"required\": False},\n \"birthplace\": {\"required\": False}\n }\n },\n {\n \"name\": \"guilty_owner\",\n \"type\": \"db_object\",\n \"cls\": \"PrivatePerson\",\n \"required\": False,\n \"override_fields_kwargs\": {\n \"birthdate\": {\"required\": False},\n \"birthplace\": {\"required\": False}\n }\n },\n INSURANCE_ADDRESS_FINAL_FIELD,\n INSURANCE_NAME_FINAL_FIELD,\n {\n \"name\": \"other_insurance\",\n \"type\": \"DocBoolField\",\n \"required\": False\n },\n {\n \"name\": \"insurance_name\",\n \"type\": \"DocTextField\",\n \"required\": False,\n },\n {\n \"name\": \"insurance_id\",\n \"type\": \"DocTextField\",\n \"required\": False,\n \"min_length\": 0,\n \"max_length\": 30\n },\n {\n \"name\": \"use_other_submission_address\",\n \"type\": \"DocBoolField\",\n \"required\": False\n },\n {\n \"name\": \"submission_address\",\n \"type\": \"DocTextField\",\n \"required\": False\n },\n {\n \"name\": \"insurance_execution_act_responsible_person\",\n \"type\": \"DocBoolField\",\n \"required\": False\n },\n {\n \"name\": \"insurance_execution_act_obtain_way\",\n \"type\": \"DocEnumField\",\n \"enum_cls\": \"ActObtainWayEnum\",\n \"required\": False\n },\n\n ],\n \"validators\": [{\n \"condition\": {\n \"#or\": [{\n \"insurance_execution_act_responsible_person\": {\n \"#exists\": True\n },\n \"lawsuit_submission_way\": \"responsible_person\"\n }, {\n \"lawsuit_submission_way\": {\n \"#ne\": \"responsible_person\"\n }\n }]\n },\n \"error\": {\n \"field\": \"insurance_execution_act_responsible_person\",\n \"code\": 4\n }\n }, {\n \"condition\": {\n \"#or\": [{\n \"insurance_execution_act_obtain_way\": {\"#exists\": True},\n \"#or\": [{\n \"lawsuit_submission_way\": \"responsible_person\",\n \"insurance_execution_act_responsible_person\": {\"#ne\": False}\n }]\n\n }, {\n \"#not\": {\n \"#or\": [{\n \"lawsuit_submission_way\": \"responsible_person\",\n \"insurance_execution_act_responsible_person\": {\"#ne\": False}\n }]\n }\n }]\n },\n \"error\": {\n \"field\": \"insurance_execution_act_obtain_way\",\n \"code\": 4\n }\n }]\n # insurance_execution_act_oneself - required if: не через представителя (lawsuit_submission_way)\n # insurance_execution_act_responsible_person - required if: через представителя (lawsuit_submission_way)\n # insurance_execution_act_obtain_way - required if: (insurance_execution_act_oneself == False && не через представителя) || (через представителя && insurance_execution_act_responsible_person==False)\n }\n\n OSAGO_CLAIM_INSURANCE_EXECUTION_ACT_TEMPLATE = {\n \"template_name\": \"template_osago2_4\",\n \"file_name\": _get_test_resource_name(config, \"osago/il_claim_insurance.tex\"),\n \"is_strict\": False,\n \"doc_name\": DocumentTypeEnum.DT_OSAGO_CLAIM_INSURANCE_EXECUTION_ACT\n }\n\n OSAGO_LAWSUIT_SCHEMA = {\n \"doc_name\": DocumentTypeEnum.DT_OSAGO_LAWSUIT,\n \"doc_kind\": DocumentKindEnum.DK_TEX_TEMPLATE,\n \"file_name_template\": u\"Иск\",\n \"batch_statuses\": [\"claim\"],\n \"collections\": [\"claim_collection\"],\n \"fields\": [\n {\n \"name\": \"paid_document\",\n \"type\": \"calculated\",\n \"field_type\": \"DocBoolField\",\n \"value\": {\n \"#exec\": {\n \"module\": \"osago_reg_methods\",\n \"method\": \"is_paid_document\",\n \"args\": [{\n \"#field\": \"<batch_id>\"\n }, {\n \"#field\": \"<document_type>\"\n }]\n }\n }\n },\n DOC_DATE_FIELD,\n {\n \"name\": \"isk_type\",\n \"type\": \"calculated\",\n \"suppress_validation_errors\": True,\n \"field_type\": \"DocTextField\",\n \"value\": {\n \"#cases\": {\n \"list\": [{\n \"conditions\": {\n \"problem_type\": \"refusal\",\n \"policy_called\": False\n },\n \"value\": {\n \"#value\": \"EURO_ISK\"\n }\n }, {\n \"conditions\": {\n \"problem_type\": \"refusal\",\n \"policy_called\": True\n },\n \"value\": {\n \"#value\": \"GIBDD_ISK\"\n }\n }],\n \"default\": {\n \"value\": {\n \"#value\": \"ISK_UNDERPAY\"\n }\n }\n }\n }\n },\n {\n \"name\": \"docs_got\",\n \"type\": \"DocArrayField\",\n \"cls\": \"DocEnumField\",\n \"subfield_kwargs\": {\n \"enum_cls\": \"OsagoDocTypeEnum\"\n },\n \"required\": False,\n \"default\": []\n },\n {\n \"name\": \"lawsuit_submission_way\",\n \"type\": \"DocEnumField\",\n \"enum_cls\": \"ApplicationTypeEnum\",\n \"required\": True\n },\n {\n \"name\": \"submission_way\",\n \"type\": \"DocEnumField\",\n \"enum_cls\": \"ApplicationTypeEnum\",\n \"required\": True\n },\n {\n \"name\": \"court_include\",\n \"type\": \"DocBoolField\",\n \"required\": False\n },\n {\n \"name\": \"responsible_person\",\n \"type\": \"db_object\",\n \"cls\": \"PrivatePerson\",\n \"required\": False,\n },\n {\n \"name\": \"lawsuit_submission_responsible_person\",\n \"type\": \"db_object\",\n \"cls\": \"PrivatePerson\",\n \"required\": False,\n \"override_fields_kwargs\": {\n \"address\": {\"required\": True},\n \"phone\": {\"required\": True},\n }\n },\n {\n \"name\": \"victim_owner\",\n \"type\": \"db_object\",\n \"cls\": \"PrivatePerson\",\n \"required\": False\n },\n {\n \"name\": \"add_person_to_claim\",\n \"type\": \"DocBoolField\",\n \"required\": False\n },\n {\n \"name\": \"court_name\",\n \"type\": \"DocTextField\",\n \"max_length\": 200,\n \"required\": True\n },\n {\n \"name\": \"court_address\",\n \"type\": \"DocTextField\",\n \"max_length\": 1024,\n \"required\": True\n },\n {\n \"name\": \"obtain_address_type\",\n \"type\": \"DocEnumField\",\n \"enum_cls\": \"ObtainAddressEnum\",\n \"required\": False\n },\n {\n \"name\": \"obtain_address\",\n \"type\": \"DocTextField\",\n \"required\": False\n },\n {\n \"name\": \"owner_as_guilty_driver\",\n \"type\": \"DocBoolField\",\n \"required\": False\n },\n {\n \"name\": \"guilty_driver\",\n \"type\": \"db_object\",\n \"cls\": \"PrivatePerson\",\n \"required\": False,\n \"override_fields_kwargs\": {\n \"birthdate\": {\"required\": False},\n \"birthplace\": {\"required\": False}\n }\n },\n {\n \"name\": \"guilty_car_brand\",\n \"type\": \"DocTextField\",\n \"required\": False,\n \"max_length\": 100,\n \"min_length\": 1\n },\n {\n \"name\": \"guilty_car_number\",\n \"type\": \"DocTextField\",\n \"required\": False,\n \"max_length\": 15,\n \"min_length\": 1,\n \"allowed_re\": ur\"^[0-9а-яА-Яa-zA-Z]*$\"\n },\n RESULT_FIELDS_FIELD,\n INSURANCE_ADDRESS_FINAL_FIELD,\n INSURANCE_NAME_FINAL_FIELD,\n {\n \"name\": \"crash_date\",\n \"type\": \"DocDateTimeField\",\n \"input_format\": \"%Y-%m-%d\",\n \"required\": False\n },\n {\n \"name\": \"problem_type\",\n \"type\": \"DocEnumField\",\n \"enum_cls\": \"OsagoReasonEnum\",\n \"required\": False\n },\n {\n \"name\": \"policy_called\",\n \"type\": \"DocBoolField\",\n \"required\": False\n },\n {\n \"name\": \"victim_car_brand\",\n \"type\": \"DocTextField\",\n \"required\": False,\n \"max_length\": 100,\n \"min_length\": 1\n },\n {\n \"name\": \"victim_car_number\",\n \"type\": \"DocTextField\",\n \"required\": False,\n \"max_length\": 15,\n \"allowed_re\": ur\"^[0-9а-яА-Яa-zA-Z]*$\"\n },\n {\n \"name\": \"victim_owner\",\n \"type\": \"db_object\",\n \"cls\": \"PrivatePerson\",\n \"required\": False\n },\n {\n \"name\": \"owner_as_victim_driver\",\n \"type\": \"DocBoolField\",\n \"required\": False\n },\n {\n \"name\": \"victim_driver\",\n \"type\": \"db_object\",\n \"cls\": \"PrivatePerson\",\n \"required\": False,\n \"override_fields_kwargs\": {\n \"birthdate\": {\"required\": False},\n \"birthplace\": {\"required\": False}\n }\n },\n {\n \"name\": \"guilty_owner\",\n \"type\": \"db_object\",\n \"cls\": \"PrivatePerson\",\n \"required\": False,\n \"override_fields_kwargs\": {\n \"birthdate\": {\"required\": False},\n \"birthplace\": {\"required\": False}\n }\n },\n {\n \"name\": \"own_insurance_company\",\n \"type\": \"DocBoolField\",\n \"required\": False\n },\n {\n \"name\": \"policy_series\",\n \"type\": \"DocTextField\",\n \"required\": False,\n \"max_length\": 3,\n \"min_length\": 1\n },\n {\n \"name\": \"policy_number\",\n \"type\": \"DocTextField\",\n \"required\": False,\n \"max_length\": 10,\n \"allowed_re\": ur\"^[0-9]*$\",\n \"min_length\": 1\n },\n {\n \"name\": \"refusal_reason\",\n \"type\": \"DocEnumField\",\n \"enum_cls\": \"OsagoRefusalReasonEnum\",\n \"required\": False\n },\n {\n \"name\": \"independent_expertise_cost\",\n \"type\": \"DocDecimalField\",\n \"required\": False\n },\n {\n \"name\": \"independent_expertise_number\",\n \"type\": \"DocTextField\",\n \"required\": False,\n \"max_length\": 20,\n \"min_length\": 0\n },\n {\n \"name\": \"independent_expertise_sum\",\n \"type\": \"DocDecimalField\",\n \"required\": False\n },\n {\n \"name\": \"pretension_answer_got\",\n \"type\": \"DocBoolField\",\n \"required\": True\n },\n {\n \"name\": \"pretension_result\",\n \"type\": \"DocEnumField\",\n \"enum_cls\": \"PretensionResultEnum\",\n \"required\": True\n },\n {\n \"name\": \"make_lawsuit\",\n \"type\": \"DocBoolField\",\n \"required\": True\n },\n {\n \"name\": \"insurance_returned_docs\",\n \"type\": \"DocArrayField\",\n \"cls\": \"DocEnumField\",\n \"subfield_kwargs\": {\n \"enum_cls\": \"OsagoDocTypeEnum\"\n },\n \"required\": False,\n \"default\": []\n },\n {\n \"name\": \"moral_damages\",\n \"type\": \"DocDecimalField\",\n \"required\": False,\n \"default\": 0\n },\n {\n \"name\": \"notary_costs\",\n \"type\": \"DocDecimalField\",\n \"required\": False,\n \"default\": 0\n },\n {\n \"name\": \"insurance_lawsuit\",\n \"type\": \"DocArrayField\",\n \"cls\": \"DocEnumField\",\n \"subfield_kwargs\": {\n \"enum_cls\": \"InsuranceLawsuitEnum\"\n },\n \"required\": True,\n \"min_length\": 1\n },\n {\n \"name\": \"lawsuit_date\",\n \"type\": \"DocDateTimeField\",\n \"input_format\": \"%Y-%m-%d\",\n \"required\": True\n },\n {\n \"name\": \"first_claim_date\",\n \"type\": \"DocDateTimeField\",\n \"input_format\": \"%Y-%m-%d\",\n \"required\": False\n },\n {\n \"name\": \"compensation_got\",\n \"type\": \"DocDecimalField\",\n \"required\": False,\n },\n {\n \"name\": \"compensation_sum\",\n \"type\": \"DocDecimalField\",\n \"required\": False\n },\n {\n \"name\": \"compensation_date\",\n \"type\": \"DocDateTimeField\",\n \"input_format\": \"%Y-%m-%d\",\n \"required\": False\n },\n {\n \"name\": \"other_insurance\",\n \"type\": \"DocBoolField\",\n \"required\": False\n },\n {\n \"name\": \"insurance_name\",\n \"type\": \"DocTextField\",\n \"required\": False,\n },\n {\n \"name\": \"insurance_id\",\n \"type\": \"DocTextField\",\n \"required\": False,\n \"min_length\": 0,\n \"max_length\": 30\n },\n {\n \"name\": \"use_other_submission_address\",\n \"type\": \"DocBoolField\",\n \"required\": False\n },\n {\n \"name\": \"submission_address\",\n \"type\": \"DocTextField\",\n \"required\": False,\n },\n ],\n \"validators\": [{\n \"condition\": {\n \"#or\": [{\n \"pretension_result\": \"partial_success\",\n \"compensation_got\": {\n \"#gt\": 0\n }\n }, {\n \"pretension_result\": {\n \"#ne\": \"partial_success\"\n }\n }]\n },\n \"error\": {\n \"field\": \"compensation_got\",\n \"code\": 5\n }\n }, {\n \"condition\": {\n \"#or\": [{\n \"pretension_result\": \"partial_success\",\n \"compensation_date\": {\n \"#empty\": False\n }\n }, {\n \"pretension_result\": \"success\",\n \"make_lawsuit\": True,\n \"compensation_date\": {\n \"#empty\": False\n }\n }, {\n \"#not\": {\n \"#or\": [{\n \"pretension_result\": \"partial_success\"\n }, {\n \"pretension_result\": \"success\",\n \"make_lawsuit\": True\n }]\n }\n }]\n },\n \"error\": {\n \"field\": \"compensation_date\",\n \"code\": 5\n }\n }, {\n \"condition\": {\n \"#or\": [{\n \"submission_way\": \"responsible_person\",\n \"court_include\": {\n \"#exists\": True\n }\n }, {\n \"submission_way\": {\n \"#ne\": \"responsible_person\"\n }\n }]\n },\n \"error\": {\n \"field\": \"court_include\",\n \"code\": 4\n }\n }, {\n \"condition\": {\n \"#or\": [{\n \"lawsuit_submission_way\": \"responsible_person\",\n \"responsible_person\": {\n \"#empty\": False\n }\n }, {\n \"lawsuit_submission_way\": {\n \"#ne\": \"responsible_person\"\n }\n }]\n },\n \"error\": {\n \"field\": \"responsible_person\",\n \"code\": 5\n }\n }, {\n \"condition\": {\n \"#or\": [{\n \"lawsuit_submission_way\": \"responsible_person\",\n \"#not\": {\n \"submission_way\": \"responsible_person\",\n \"court_include\": True,\n },\n \"lawsuit_submission_responsible_person\": {\n \"#empty\": False\n }\n }, {\n \"#not\": {\n \"lawsuit_submission_way\": \"responsible_person\",\n \"#not\": {\n \"submission_way\": \"responsible_person\",\n \"court_include\": True,\n },\n }\n }]\n },\n \"error\": {\n \"field\": \"lawsuit_submission_responsible_person\",\n \"code\": 5\n }\n }]\n }\n\n OSAGO_LAWSUIT_TEMPLATE = {\n \"template_name\": \"template_osago2_4\",\n \"file_name\": _get_test_resource_name(config, \"osago/isk.tex\"),\n \"is_strict\": False,\n \"doc_name\": DocumentTypeEnum.DT_OSAGO_LAWSUIT\n }\n\n OSAGO_OSAGO_COURT_MAIL_LIST_SCHEMA = {\n \"doc_name\": DocumentTypeEnum.DT_OSAGO_COURT_MAIL_LIST,\n \"doc_kind\": DocumentKindEnum.DK_TEX_TEMPLATE,\n \"file_name_template\": u\"Опись для ценного письма\",\n \"conditions\": {\n \"lawsuit_submission_way\": \"mail\"\n },\n \"batch_statuses\": [\"claim\"],\n \"collections\": [\"claim_collection\"],\n \"fields\": [\n {\n \"name\": \"court_name\",\n \"type\": \"DocTextField\",\n \"max_length\": 200,\n \"required\": True\n },\n {\n \"name\": \"court_address\",\n \"type\": \"DocTextField\",\n \"max_length\": 1024,\n \"required\": True\n },\n {\n \"name\": \"lawsuit_submission_way\",\n \"type\": \"DocEnumField\",\n \"enum_cls\": \"ApplicationTypeEnum\",\n \"required\": True\n },\n RESULT_FIELDS_FIELD\n ]\n }\n\n OSAGO_OSAGO_COURT_MAIL_LIST_TEMPLATE = {\n \"template_name\": \"template_osago2_4\",\n \"file_name\": _get_test_resource_name(config, \"osago/court_mail_list.tex\"),\n \"is_strict\": False,\n \"doc_name\": DocumentTypeEnum.DT_OSAGO_COURT_MAIL_LIST\n }\n ################################################################################################################\n\n OSAGO_SCHEMA = {\n \"doc_name\": DocumentBatchTypeEnum.DBT_OSAGO,\n \"fields\": [\n {\n \"name\": \"crash_date\", # дата аварии без времени в формате ISO_8601 — \"2005-08-09T18:31:42\"\n \"type\": \"DocDateTimeField\",\n \"input_format\": \"%Y-%m-%d\",\n \"required\": True\n },\n {\n \"name\": \"policy_called\", # признак вызова ГИБДД на место проишествия\n \"type\": \"DocBoolField\",\n \"required\": True\n },\n {\n \"name\": \"all_have_osago\", # признак наличия у всех полисов ОСАГО\n \"type\": \"DocBoolField\",\n \"required\": False\n },\n {\n \"name\": \"own_insurance_company\",# признак обращения в собственную страховую\n \"type\": \"DocBoolField\",\n \"required\": False\n },\n {\n \"name\": \"have_osago\", # признак отсутствия полиса у конкретной стороны\n \"type\": \"DocEnumField\",\n \"enum_cls\": \"CrashSubjectEnum\",\n \"required\": False\n },\n {\n \"name\": \"problem_type\", # тип проблемы: отказ или недоплата\n \"type\": \"DocEnumField\",\n \"enum_cls\": \"OsagoReasonEnum\",\n \"required\": False\n },\n {\n \"name\": \"refusal_reason\", # причина отказа\n \"type\": \"DocEnumField\",\n \"enum_cls\": \"OsagoRefusalReasonEnum\",\n \"required\": False\n },\n {\n \"name\": \"notice_has_mistakes\", # признак того, что в извещении были ошибки\n \"type\": \"DocBoolField\",\n \"required\": False\n },\n {\n \"name\": \"got_cash\", # признак того, что страховая возместила деньгами\n \"type\": \"DocBoolField\",\n \"required\": False\n },\n {\n \"name\": \"victim_owner\", #идентификатор физического лица владельца пострадавшего автомобиля\n \"type\": \"db_object\",\n \"cls\": \"PrivatePerson\",\n \"required\": False\n },\n {\n \"name\": \"owner_as_victim_driver\", #(true/false) признак того, что владелец автомобиля был за рулем пострадавшей машины\n \"type\": \"DocBoolField\",\n \"required\": False\n },\n {\n \"name\": \"victim_driver\", #идентификатор физического лица водителя пострадавшего автомобиля\n \"type\": \"db_object\",\n \"cls\": \"PrivatePerson\",\n \"required\": False,\n \"override_fields_kwargs\": {\n \"birthdate\": {\"required\": False},\n \"birthplace\": {\"required\": False}\n }\n },\n {\n \"name\": \"victim_car_brand\", #марка пострадавшего автомобиля (строка, до 100 символов)\n \"type\": \"DocTextField\",\n \"required\": False,\n \"max_length\": 100\n },\n {\n \"name\": \"victim_car_number\", #автомобильный номер пострадавшего автомобиля (русские или латинские буквы и цифры, строка, до 10 символов)\n \"type\": \"DocTextField\",\n \"required\": False,\n \"max_length\": 15,\n \"allowed_re\": ur\"^[0-9а-яА-Яa-zA-Z]*$\"\n },\n {\n \"name\": \"guilty_owner\", #идентификатор физического лица владельца виновного автомобиля\n \"type\": \"db_object\",\n \"cls\": \"PrivatePerson\",\n \"required\": False,\n \"override_fields_kwargs\": {\n \"birthdate\": {\"required\": False},\n \"birthplace\": {\"required\": False},\n \"address\": {\"required\": True}\n }\n },\n {\n \"name\": \"owner_as_guilty_driver\", #(true/false) признак того, что владелец автомобиля был за рулем виновной машины\n \"type\": \"DocBoolField\",\n \"required\": False\n },\n {\n \"name\": \"guilty_driver\", #идентификатор физического лица водителя виновного автомобиля\n \"type\": \"db_object\",\n \"cls\": \"PrivatePerson\",\n \"required\": False,\n \"override_fields_kwargs\": {\n \"birthdate\": {\"required\": False},\n \"birthplace\": {\"required\": False}\n }\n },\n {\n \"name\": \"guilty_car_brand\", #марка виновного автомобиля (строка, до 100 символов)\n \"type\": \"DocTextField\",\n \"required\": False,\n \"max_length\": 100,\n \"min_length\": 0\n },\n {\n \"name\": \"guilty_car_number\", #автомобильный номер виновного автомобиля (русские или латинские буквы и цифры, строка, до 10 символов)\n \"type\": \"DocTextField\",\n \"required\": False,\n \"max_length\": 15,\n \"min_length\": 0,\n \"allowed_re\": ur\"^[0-9а-яА-Яa-zA-Z]*$\"\n },\n {\n \"name\": \"other_victims\", #массив других пострадавших\n \"type\": \"DocArrayField\",\n \"cls\": \"CarWithDriver\",\n \"required\": False\n },\n {\n \"name\": \"insurance_company_region\", #регион, в котором обращались за возмещением (см. субъекты РФ)\n \"type\": \"DocEnumField\",\n \"enum_cls\": \"RFRegionsEnum\",\n \"required\": False\n },\n {\n \"name\": \"policy_series\", #серия страхового полиса (строка, 3 символа)\n \"type\": \"DocTextField\",\n \"required\": False,\n \"max_length\": 3,\n \"allowed_re\": ur\"^[0-9]*$\",\n \"min_length\": 0\n },\n {\n \"name\": \"policy_number\", #номер полиса, по которому обращались в страховую (10 цифр)\n \"type\": \"DocTextField\",\n \"required\": False,\n \"max_length\": 10,\n \"allowed_re\": ur\"^[0-9]*$\",\n \"min_length\": 0\n },\n {\n \"name\": \"other_insurance\", #(true/false) признак ошибки автоматического определения страховой\n \"type\": \"DocBoolField\",\n \"required\": False\n },\n {\n \"name\": \"insurance_name\", # название страховой компании для подготовки претензии\n \"type\": \"DocTextField\",\n \"required\": False,\n },\n {\n \"name\": \"insurance_id\", # идентификатор страховой компании (строка до 20 символов)\n \"type\": \"DocTextField\",\n \"required\": False,\n \"min_length\": 0,\n \"max_length\": 30\n },\n {\n \"name\": \"other_date\", #(true/false) признак ошибки автоматического определения даты полиса\n \"type\": \"DocBoolField\",\n \"required\": False\n },\n {\n \"name\": \"policy_date\", #дата выдачи полиса ОСАГО без времени в формате ISO_8601 — \"2005-08-09T18:31:42\"\n \"type\": \"DocDateTimeField\",\n \"input_format\": \"%Y-%m-%d\",\n \"required\": False\n },\n {\n \"name\": \"first_claim_date\", #дата первого заявления о наступлении аварии в формате ISO_8601 — \"2005-08-09T18:31:42\"\n \"type\": \"DocDateTimeField\",\n \"input_format\": \"%Y-%m-%d\",\n \"required\": False\n },\n {\n \"name\": \"independent_expertise_number\", #номер незавизимой экспертизы (строка, до 20 символов)\n \"type\": \"DocTextField\",\n \"required\": False,\n \"max_length\": 20,\n \"min_length\": 0\n },\n {\n \"name\": \"independent_expertise_sum\", #ущерб, насчитанный независимой экспертизы (число с копейками — float)\n \"type\": \"DocDecimalField\",\n \"required\": True\n },\n {\n \"name\": \"independent_expertise_cost\", #стоимость независимой экспертизы (число с копейками — float)\n \"type\": \"DocDecimalField\",\n \"required\": True\n },\n {\n \"name\": \"compensation_sum\", #сумма компенсации от страховой (число с копейками — float)\n \"type\": \"DocDecimalField\",\n \"required\": True\n },\n {\n \"name\": \"add_person_to_claim\", #(true/false) добавить в иск физическое лицо\n \"type\": \"DocBoolField\",\n \"required\": False\n },\n {\n \"name\": \"docs_got\", #список документов на руках (значения: euro, act_crash, act_review, act_insurance, insurance_denial, insurance_repair_cost, police_act, police_statement, police_protocol)\n \"type\": \"DocArrayField\",\n \"cls\": \"DocEnumField\",\n \"subfield_kwargs\": {\n \"enum_cls\": \"OsagoDocTypeEnum\"\n },\n \"required\": False,\n \"default\": []\n },\n {\n \"name\": \"insurance_case_number\", #номер страхового дела (до 20 цифр)\n \"type\": \"DocTextField\",\n \"required\": False,\n \"max_length\": 20,\n \"min_length\": 0\n },\n {\n \"name\": \"submission_way\", #способ подачи претензии в страховую (oneself, responsible_person, mail)\n \"type\": \"DocEnumField\",\n \"enum_cls\": \"ApplicationTypeEnum\",\n \"required\": False\n },\n {\n \"name\": \"submission_branch_id\", # идентификатор филиала страховой для подачи претензии\n \"type\": \"DocTextField\",\n \"required\": False,\n },\n {\n \"name\": \"use_other_submission_address\", #(true/false) использовать другой адрес для подачи документов\n \"type\": \"DocBoolField\",\n \"required\": False\n },\n {\n \"name\": \"submission_address\", #адрес в России\n \"type\": \"DocTextField\",\n \"required\": False,\n },\n {\n \"name\": \"obtain_way\", #способ получения (oneself, responsible_person, mail)\n \"type\": \"DocEnumField\",\n \"enum_cls\": \"ApplicationTypeEnum\",\n \"required\": False\n },\n {\n \"name\": \"responsible_person\", #идентификатор физического лица доверенного лица подающего (для подачи и/или полчуения через представителя)\n \"type\": \"db_object\",\n \"cls\": \"PrivatePerson\",\n \"required\": False\n },\n {\n \"name\": \"court_include\", #(true/false) признак включения представлений интересов в доверенность\n \"type\": \"DocBoolField\",\n \"required\": False\n },\n {\n \"name\": \"obtain_address_type\", #тип адрес, на который будет получен ответ (owner_address, responsible_person_address, other_address)\n \"type\": \"DocEnumField\",\n \"enum_cls\": \"ObtainAddressEnum\",\n \"required\": False\n },\n {\n \"name\": \"obtain_address\", #адрес в России\n \"type\": \"DocTextField\",\n \"required\": False\n },\n {\n \"name\": \"bik_account\", # БИК банка для перечисления средств\n \"type\": \"DocTextField\",\n \"required\": False,\n \"min_length\": 9,\n \"allowed_re\": ur\"^[0-9]*$\"\n },\n {\n \"name\": \"account_number\", # номер расчетного счета\n \"type\": \"DocTextField\",\n \"required\": False,\n \"min_length\": 20,\n \"allowed_re\": ur\"^[0-9]*$\"\n },\n {\n \"name\": \"police_case\",\n \"type\": \"DocBoolField\",\n \"required\": True\n },\n {\n \"name\": \"pretension_result\", # — результат поданной претензии (success, refuse, partial_success, unknown)\n \"type\": \"DocEnumField\",\n \"enum_cls\": \"PretensionResultEnum\"\n },\n {\n \"name\": \"make_lawsuit\", # — (true/false) признак подачи иска\n \"type\": \"DocBoolField\"\n },\n {\n \"name\": \"compensation_got\", # — сумма компенсации от страховой после претензии (число с копейками)\n \"type\": \"DocDecimalField\"\n },\n {\n \"name\": \"compensation_date\", # — дата перечисления компенсации без времени в формате ISO_8601 — \"2005-08-09T18:31:42\"\n \"type\": \"DocDateTimeField\",\n \"input_format\": \"%Y-%m-%d\"\n },\n {\n \"name\": \"pretension_answer_got\", # — (true/false) признак того, что страховая прислала ответ на претензию\n \"type\": \"DocBoolField\"\n },\n {\n \"name\": \"lawsuit_date\", # — дата в иске без времени в формате ISO_8601 — \"2005-08-09T18:31:42\"\n \"type\": \"DocDateTimeField\",\n \"input_format\": \"%Y-%m-%d\"\n },\n {\n \"name\": \"insurance_lawsuit\", #[] — список претензий в иске к страховой, значения:\n \"type\": \"DocArrayField\",\n \"cls\": \"DocEnumField\",\n \"subfield_kwargs\": {\n \"enum_cls\": \"InsuranceLawsuitEnum\"\n }\n },\n {\n \"name\": \"moral_damages\", # — сумма для компенсации морального ущерба (число с копейками)\n \"type\": \"DocDecimalField\"\n },\n {\n \"name\": \"notary_costs\", # — сумма нотариальных затрат (число с копейками)\n \"type\": \"DocDecimalField\"\n },\n {\n \"name\": \"home_court\", # — (true/false) признак подачи иска по месту регистрации исца\n \"type\": \"DocBoolField\"\n },\n {\n \"name\": \"court_name\", # — наименование суда (до 200 символов)\n \"type\": \"DocTextField\",\n \"max_length\": 200\n },\n {\n \"name\": \"court_address\", # — адрес суда (до 1024 символов)\n \"type\": \"DocTextField\",\n \"max_length\": 1024\n },\n {\n \"name\": \"lawsuit_submission_way\", # — способ подачи документов в суд (oneself, mail, responsible_person)\n \"type\": \"DocEnumField\",\n \"enum_cls\": \"ApplicationTypeEnum\"\n },\n {\n \"name\": \"lawsuit_submission_responsible_person\", # — идентификатор физического лица доверенного лица, подающего иск\n \"type\": \"db_object\",\n \"cls\": \"PrivatePerson\"\n },\n {\n \"name\": \"insurance_returned_docs\", #[] — список до сих пор не полученных документов (см. выше docs_got)\n \"type\": \"DocArrayField\",\n \"cls\": \"DocEnumField\",\n \"subfield_kwargs\": {\n \"enum_cls\": \"OsagoDocTypeEnum\"\n }\n },\n {\n \"name\": \"court_attendance\", # — кто будет присутствовать на суде (oneself/nobody/responsible_person)\n \"type\": \"DocEnumField\",\n \"enum_cls\": \"CourtAttendanceEnum\"\n },\n {\n \"name\": \"attached_to_lawsuit_docs_pagecount\", #[] — список документов с количеством страниц\n \"type\": \"DocArrayField\",\n \"cls\": \"DocLawSuitDocPageCount\"\n },\n {\n \"name\": \"lawsuit_number\", # — номер судебного дела (до 20 символов)\n \"type\": \"DocTextField\",\n \"min_length\": 1,\n \"max_length\": 20\n },\n {\n \"name\": \"insurance_execution_act_responsible_person\", # — (true/false) признак того, что ИЛ к страховой надо выдать на представителю\n \"type\": \"DocBoolField\"\n },\n {\n \"name\": \"insurance_execution_act_obtain_way\", # — способ получения ИЛ к страховой (oneself, mail, responsible_person, no_obtain)\n \"type\": \"DocEnumField\",\n \"enum_cls\": \"ActObtainWayEnum\"\n },\n {\n \"name\": \"guilty_execution_act_responsible_person\", # — (true/false) признак того, что ИЛ к виновнику надо выдать на представителю\n \"type\": \"DocBoolField\"\n },\n {\n \"name\": \"guilty_execution_act_obtain_way\", # — способ получения ИЛ к виновнику (oneself, mail, responsible_person, no_obtain)\n \"type\": \"DocEnumField\",\n \"enum_cls\": \"ActObtainWayEnum\"\n }\n ]\n }\n\n ALSV = {\n \"#max\": [{\n \"#value\": 0\n }, {\n \"#sub\": [{\n \"#field\": \"independent_expertise_sum\"\n }, {\n \"#cases\": {\n \"list\": [{\n \"conditions\": {\n \"policy_called\": True\n },\n \"value\": {\n \"#cases\": {\n \"set\": {\n \"datetime_01_10_2014\": {\n \"#datetime\": {\"year\": 2014, \"month\": 10, \"day\": 1}\n }\n },\n \"list\": [{\n \"conditions\": {\n \"policy_date\": {\n \"#lt\": \"@datetime_01_10_2014\"\n }\n },\n 'value': {\"#value\": 120000}\n }],\n \"default\": {\"value\": {\"#value\": 400000}}\n }\n }\n }],\n \"default\": {\n \"value\": {\n \"#cases\": {\n \"set\": {\n \"datetime_01_10_2014\": {\n \"#datetime\": {\"year\": 2014, \"month\": 10, \"day\": 1}\n }\n },\n \"list\": [{\n \"conditions\": {\n \"policy_date\": {\n \"#lt\": \"@datetime_01_10_2014\"\n }\n },\n 'value': {\"#value\": 25000}\n }],\n \"default\": {\"value\": {\"#value\": 50000}}\n }\n }\n }\n }\n }]\n }]\n }\n\n LFV = {\n \"#cases\": {\n \"set\": {\n \"_above_limits_sum\": ALSV\n },\n \"list\": [{\n \"conditions\": {\n \"_above_limits_sum\": {\n \"#lte\": 20000\n }\n },\n \"value\": { # max(0,04 * above_limits_sum, 400);\n \"#max\": [{\n \"#mul\": [{\n \"#value\": 0.04\n }, {\n \"#field\": \"_above_limits_sum\"\n }]\n }, {\n \"#value\": 400\n }]\n }\n }, {\n \"conditions\": {\n \"_above_limits_sum\": {\n \"#gt\": 20000,\n \"#lte\": 100000\n }\n },\n \"value\": { # 800 + 0,03 * (above_limits_sum - 20 000);\n \"#sum\": [{\n \"#value\": 800\n }, {\n \"#mul\": [{\n \"#value\": 0.03\n }, {\n \"#sub\": [{\n \"#field\": \"_above_limits_sum\"\n }, {\n \"#value\": 20000\n }]\n }]\n }]\n }\n }, {\n \"conditions\": {\n \"_above_limits_sum\": {\n \"#gt\": 100000,\n \"#lte\": 200000\n }\n },\n \"value\": { # 3 200 + 0,02 * (above_limits_sum - 100 000);\n \"#sum\": [{\n \"#value\": 3200\n }, {\n \"#mul\": [{\n \"#value\": 0.02\n }, {\n \"#sub\": [{\n \"#field\": \"_above_limits_sum\"\n }, {\n \"#value\": 100000\n }]\n }]\n }]\n }\n }, {\n \"conditions\": {\n \"_above_limits_sum\": {\n \"#gt\": 200000,\n \"#lte\": 1000000\n }\n },\n \"value\": { # 5 200 + 0,01 * (above_limits_sum - 200 000);\n \"#sum\": [{\n \"#value\": 5200\n }, {\n \"#mul\": [{\n \"#value\": 0.01\n }, {\n \"#sub\": [{\n \"#field\": \"_above_limits_sum\"\n }, {\n \"#value\": 200000\n }]\n }]\n }]\n }\n }],\n \"default\": {\n \"value\": { # min(13 200 + 0,005 * (above_limits_sum - 1 000 000), 60 000 рублей);\n \"#min\": [{\n \"#sum\": [{\n \"#value\": 13200\n }, {\n \"#mul\": [{\n \"#value\": 0.005\n }, {\n \"#sub\": [{\n \"#field\": \"_above_limits_sum\"\n }, {\n \"#value\": 1000000\n }]\n }]\n }]\n }, {\n \"#value\": 60000\n }]\n }\n }\n }\n }\n\n _2_if_add_person_to_claim_else_1 = {\n \"#cases\": {\n \"list\": [\n {\"conditions\": {\"add_person_to_claim\": False}, \"value\": 1},\n ],\n \"default\": 2\n }\n }\n\n get_lawsuit_doc_page_count = lambda doc_name: {\n \"#sum\": [{\n \"#value\": 0\n }, {\n \"#aggregate\": {\n \"field\": {\n \"#array_mapping\": {\n \"array_source_field\": {\n \"#field\": \"attached_to_lawsuit_docs_pagecount\"\n },\n \"filter\": {\n \"<loop_item>->page\": doc_name\n }\n }\n\n },\n \"attr\": \"pagecount\",\n \"operator\": \"add\"\n }\n }]\n }\n\n OSAGO_RESULT_FIELDS = [\n UNDERPAY_SUM_FIELD,\n {\n \"name\": \"above_limits_sum\",\n \"type\": \"calculated\",\n \"required\": False,\n \"field_type\": \"DocDecimalField\",\n \"suppress_validation_errors\": True,\n \"value\": ALSV\n },\n {\n \"name\": \"insurance_name\", # название страховой компании для подготовки претензии\n \"type\": \"DocTextField\",\n \"required\": False,\n },\n {\n \"name\": \"insurance_id\", # идентификатор страховой компании (строка до 20 символов)\n \"type\": \"DocTextField\",\n \"required\": False,\n \"min_length\": 1,\n \"max_length\": 30\n },\n {\n \"name\": \"policy_date\", #дата выдачи полиса ОСАГО без времени в формате ISO_8601 — \"2005-08-09T18:31:42\"\n \"type\": \"DocDateTimeField\",\n \"input_format\": \"%Y-%m-%d\",\n \"required\": False\n },\n {\n \"name\": \"region_prepositional\",\n \"type\": \"calculated\",\n \"field_type\": \"DocTextField\",\n 'value': {\n \"#morpher\": {\n \"word\": \"@insurance_company_region\",\n \"case\": \"pra\" # предложный падеж\n }\n }\n }, {\n \"name\": \"responsible_person_dative\",\n \"type\": \"calculated\",\n \"field_type\": \"DocTextField\",\n 'value': {\n \"#morpher\": {\n \"word\": {\n \"#field\": \"responsible_person->full_name\"\n },\n \"case\": \"dat\" # дательный падеж\n }\n }\n },\n {\n \"name\": \"region\",\n \"type\": \"calculated\",\n \"field_type\": \"DocEnumField\",\n \"enum_cls\": \"RFRegionsEnum\",\n \"required\": False,\n \"value\": {\n \"#field\": \"insurance_company_region\"\n # \"#cases\": {\n # \"set\": {\n # \"db_submission_address\": {\n # \"#fetch_db_table_row\": {\n # \"table_name\": \"car_assurance_branch\",\n # \"id\": \"@submission_branch_id\",\n # \"field_name\": \"address\"\n # }\n # }\n # },\n # \"list\": [{\n # \"conditions\": {\n # \"use_other_submission_address\": True\n # },\n # \"value\": {\n # \"#field\": \"submission_address->region\"\n # }\n # }],\n # \"default\": {\n # \"value\": {\n # \"#field\": \"db_submission_address->region\"\n # }\n # }\n # }\n }\n },\n {\n \"name\": \"limits_sum\",\n \"type\": \"calculated\",\n \"required\": False,\n \"field_type\": \"DocDecimalField\",\n \"suppress_validation_errors\": True,\n \"value\": {\n \"#cases\": {\n \"list\": [{\n \"conditions\": {\n \"policy_called\": True\n },\n \"value\": {\n \"#cases\": {\n \"set\": {\n \"datetime_01_10_2014\": {\n \"#datetime\": {\"year\": 2014, \"month\": 10, \"day\": 1}\n }\n },\n \"list\": [{\n \"conditions\": {\n \"policy_date\": {\n \"#lt\": \"@datetime_01_10_2014\"\n }\n },\n 'value': {\"#value\": 120000}\n }],\n \"default\": {\"value\": {\"#value\": 400000}}\n }\n }\n }],\n \"default\": {\n \"value\": {\n \"#cases\": {\n \"set\": {\n \"datetime_01_10_2014\": {\n \"#datetime\": {\"year\": 2014, \"month\": 10, \"day\": 1}\n }\n },\n \"list\": [{\n \"conditions\": {\n \"policy_date\": {\n \"#lt\": \"@datetime_01_10_2014\"\n }\n },\n 'value': {\"#value\": 25000}\n }],\n \"default\": {\"value\": {\"#value\": 50000}}\n }\n }\n }\n }\n }\n },\n {\n \"name\": \"legal_fee\",\n \"type\": \"calculated\",\n \"required\": False,\n \"field_type\": \"DocDecimalField\",\n \"value\": LFV\n },\n {\n \"name\": \"insurance_penalty\",\n \"type\": \"calculated\",\n \"required\": False,\n \"field_type\": \"DocDecimalField\",\n \"depends_on\": [\"underpay_sum\"],\n \"value\": {\n \"#max\": [{\n \"#value\": 0\n }, {\n \"#cases\": {\n \"set\": {\n \"_count_days\": {\n \"#cases\": {\n \"set\": {\n \"_lawsuit_date_or_today\": {\n \"#cases\": {\n \"list\": [{\n \"conditions\": {\"lawsuit_date\": {\"#empty\": True}},\n \"value\": {\"#datetime\": \"#now\"}\n }],\n \"default\": {\"value\": {\"#field\": \"lawsuit_date\"}}\n }\n },\n \"_first_claim_date_or_today\": {\n \"#cases\": {\n \"list\": [{\n \"conditions\": {\"first_claim_date\": {\"#empty\": True}},\n \"value\": {\"#datetime\": \"#now\"}\n }],\n \"default\": {\"value\": {\"#field\": \"first_claim_date\"}}\n }\n },\n \"_compensation_date_or_today\": {\n \"#cases\": {\n \"list\": [{\n \"conditions\": {\"compensation_date\": {\"#empty\": True}},\n \"value\": {\"#datetime\": \"#now\"}\n }],\n \"default\": {\"value\": {\"#field\": \"compensation_date\"}}\n }\n }\n },\n \"list\": [{\n \"conditions\": {\n \"pretension_result\": \"success\",\n \"make_lawsuit\": True\n },\n \"value\": {\n \"#sub\": [{\n \"#field\": \"_compensation_date_or_today\"\n }, {\n \"#field\": \"_first_claim_date_or_today\"\n }, {\n \"#timedelta\": {\n \"days\": 20\n }\n }]\n }\n }],\n \"default\": {\n \"value\": {\n \"#sub\": [{\n \"#field\": \"_lawsuit_date_or_today\"\n }, {\n \"#field\": \"_first_claim_date_or_today\"\n }, {\n \"#timedelta\": {\n \"days\": 20\n }\n }]\n }\n }\n }\n },\n \"_count_days_compensation\": {\n \"#cases\": {\n \"set\": {\n \"_compensation_date_or_today\": {\n \"#cases\": {\n \"list\": [{\n \"conditions\": {\"compensation_date\": {\"#empty\": True}},\n \"value\": {\"#datetime\": \"#now\"}\n }],\n \"default\": {\"value\": {\"#field\": \"compensation_date\"}}\n }\n }\n },\n \"list\": [{\n \"conditions\": {\n \"first_claim_date\": {\"#empty\": True}\n },\n \"value\": {\"#timedelta\": {\"days\": 0}}\n }],\n \"default\": {\n \"value\": {\n \"#sub\": [{\n \"#field\": \"_compensation_date_or_today\"\n }, {\n \"#field\": \"first_claim_date\"\n }, {\n \"#timedelta\": {\n \"days\": 20\n }\n }]\n }\n }\n }\n },\n \"_lawsuit_date_sub_compensation_date\": {\n \"#cases\": {\n \"set\": {\n \"_compensation_date_or_today\": {\n \"#cases\": {\n \"list\": [{\n \"conditions\": {\"compensation_date\": {\"#empty\": True}},\n \"value\": {\"#datetime\": \"#now\"}\n }],\n \"default\": {\"value\": {\"#field\": \"compensation_date\"}}\n }\n },\n \"_lawsuit_date_or_today\": {\n \"#cases\": {\n \"list\": [{\n \"conditions\": {\"lawsuit_date\": {\"#empty\": True}},\n \"value\": {\"#datetime\": \"#now\"}\n }],\n \"default\": {\"value\": {\"#field\": \"lawsuit_date\"}}\n }\n },\n },\n \"list\": [],\n \"default\": {\n \"value\": {\n \"#sub\": [{\"#field\": \"_lawsuit_date_or_today\"}, {\"#field\": \"_compensation_date_or_today\"}]\n }\n }\n }\n\n }\n },\n \"list\": [{\n \"conditions\": {\n \"compensation_got\": {\"#gt\": 0},\n \"pretension_result\": \"partial_success\"\n },\n \"value\": {\n \"#sum\": [{\n \"#mul\": [{\n \"#value\": 0.01\n }, {\n \"#field\": \"underpay_sum\"\n }, {\n \"#field\": \"_count_days_compensation->days\"\n }]\n }, {\n \"#mul\": [{\n \"#value\": 0.01\n }, {\n \"#sub\": [{\n \"#field\": \"underpay_sum\"\n }, {\n \"#field\": \"compensation_got\"\n }]\n }, {\n \"#field\": \"_lawsuit_date_sub_compensation_date->days\"\n }]\n }]\n }\n }],\n \"default\": {\n \"value\": {\n \"#mul\": [{\n \"#value\": 0.01\n }, {\n \"#field\": \"underpay_sum\"\n }, {\n \"#field\": \"_count_days->days\"\n }]\n }\n }\n }\n }]\n }\n },\n {\n \"name\": \"lawsuit_cost\",\n \"type\": \"calculated\",\n \"required\": False,\n \"field_type\": \"DocDecimalField\",\n \"depends_on\": [\"insurance_penalty\", \"legal_fee\", \"above_limits_sum\", \"underpay_sum\"],\n \"value\": {\n \"#sum\": [{\n \"#cases\": {\n \"list\": [{\n \"conditions\": {\n \"insurance_underpay_lawsuit\": {\n \"#in\": \"@insurance_lawsuit\"\n }\n },\n \"value\": {\n \"#cases\": {\n \"list\": [{\n \"conditions\": {\n \"compensation_got\": {\n \"#gt\": 0\n },\n \"pretension_result\": \"partial_success\"\n },\n \"value\": {\n \"#sub\": [{\n \"#field\": \"underpay_sum\"\n }, {\n \"#field\": \"compensation_got\"\n }]\n }\n }],\n \"default\": {\n \"value\": {\n \"#cases\": {\n \"list\": [{\n \"conditions\": {\n \"pretension_result\": \"success\",\n \"make_lawsuit\": True\n },\n \"value\": {\n \"#value\": 0\n }\n }],\n \"default\": {\n \"value\": {\n \"#field\": \"underpay_sum\"\n }\n }\n }\n }\n }\n }\n }\n }],\n \"default\": {\n \"value\": {\"#value\": 0}\n }\n }\n }, {\n \"#cases\": {\n \"list\": [{\n \"conditions\": {\n \"insurance_penalty_lawsuit\": {\n \"#in\": \"@insurance_lawsuit\"\n }\n },\n \"value\": {\n \"#field\": \"insurance_penalty\"\n }\n }],\n \"default\": {\n \"value\": {\"#value\": 0}\n }\n }\n }, {\n \"#cases\": {\n \"list\": [{\n \"conditions\": {\n \"insurance_expertise_cost_lawsuit\": {\n \"#in\": \"@insurance_lawsuit\"\n }\n },\n \"value\": {\n \"#field\": \"independent_expertise_cost\"\n }\n }],\n \"default\": {\n \"value\": {\"#value\": 0}\n }\n }\n }, {\n \"#cases\": {\n \"list\": [{\n \"conditions\": {\n \"moral_damages\": {\n \"#gt\": 0\n }\n },\n \"value\": {\n \"#field\": \"moral_damages\"\n }\n }],\n \"default\": {\n \"value\": {\"#value\": 0}\n }\n }\n }, {\n \"#cases\": {\n \"list\": [{\n \"conditions\": {\n \"notary_costs\": {\n \"#gt\": 0\n }\n },\n \"value\": {\n \"#field\": \"notary_costs\"\n }\n }],\n \"default\": {\n \"value\": {\"#value\": 0}\n }\n }\n }, {\n \"#cases\": {\n \"list\": [{\n \"conditions\": {\n \"add_person_to_claim\": True\n },\n \"value\": {\n \"#sum\": [{\n \"#field\": \"legal_fee\"\n }, {\n \"#field\": \"above_limits_sum\"\n }]\n }\n }],\n \"default\": {\n \"value\": {\"#value\": 0}\n }\n }\n }]\n }\n },\n {\n \"name\": \"insufficient_docs\",\n \"type\": \"calculated\",\n \"field_type\": \"DocArrayField\",\n \"required\": False,\n \"cls\": \"DocEnumField\",\n \"subfield_kwargs\": {\n \"enum_cls\": \"OsagoDocTypeEnum\"\n },\n \"value\": {\n \"#array_mapping\": {\n \"source_array\": REQUIRED_DOCS_FIELD,\n \"filter\": [{\n \"#not\": {\n \"<loop_item>\": OsagoDocTypeEnum.ODT_ACT_INSURANCE_CASE,\n \"problem_type\": {\"#ne\": \"underpay\"}\n }\n }]\n }\n }\n },\n {\n \"name\": \"attached_to_lawsuit_docs\",\n \"type\": \"calculated\",\n \"field_type\": \"DocArrayField\",\n \"required\": False,\n \"cls\": \"DocEnumField\",\n \"subfield_kwargs\": {\n \"enum_cls\": \"OsagoDocTypeEnum\"\n },\n \"value\": {\n \"#make_array\": [{\n \"value\": \"inquire_crash\",\n \"conditions\": {\n \"#or\": [{\n \"inquire_crash\": {\n \"#in\": \"@docs_got\"\n }\n }, {\n \"inquire_crash\": {\n \"#in\": \"@insurance_returned_docs\"\n }\n }]\n }\n }, {\n \"value\": \"notice_crash\",\n \"conditions\": {\n \"#or\": [{\n \"notice_crash\": {\n \"#in\": \"@docs_got\"\n }\n }, {\n \"notice_crash\": {\n \"#in\": \"@insurance_returned_docs\"\n }\n }]\n }\n }, {\n \"value\": \"act_insurance_case\",\n \"conditions\": {\n \"#or\": [{\n \"act_insurance_case\": {\n \"#in\": \"@docs_got\"\n }\n }, {\n \"act_insurance_case\": {\n \"#in\": \"@insurance_returned_docs\"\n }\n }]\n }\n }, {\n \"value\": \"insurance_denial\",\n \"conditions\": {\n \"#or\": [{\n \"insurance_denial\": {\n \"#in\": \"@docs_got\"\n }\n }, {\n \"insurance_denial\": {\n \"#in\": \"@insurance_returned_docs\"\n }\n }],\n \"problem_type\": \"refusal\"\n }\n }, {\n \"value\": \"police_statement\",\n \"conditions\": {\n \"#or\": [{\n \"police_statement\": {\n \"#in\": \"@docs_got\"\n }\n }, {\n \"police_statement\": {\n \"#in\": \"@insurance_returned_docs\"\n }\n }],\n \"police_case\": True\n }\n }, {\n \"value\": \"police_protocol\",\n \"conditions\": {\n \"#or\": [{\n \"police_protocol\": {\n \"#in\": \"@docs_got\"\n }\n }, {\n \"police_protocol\": {\n \"#in\": \"@insurance_returned_docs\"\n }\n }]\n }\n }, {\n \"value\": \"case_init_refusal\",\n \"conditions\": {\n \"#or\": [{\n \"case_init_refusal\": {\n \"#in\": \"@docs_got\"\n }\n }, {\n \"case_init_refusal\": {\n \"#in\": \"@insurance_returned_docs\"\n }\n }],\n \"police_case\": False\n }\n }, {\n \"value\": \"pretension_answer\",\n \"conditions\": {\n \"pretension_answer_got\": True\n }\n }, {\n \"value\": \"notary_pay_act\",\n \"conditions\": {\n \"notary_costs\": {\n \"#gt\": 0\n }\n }\n }, {\n \"value\": \"bank_statement\",\n \"conditions\": {\n \"#or\": [{\n \"problem_type\":\"underpay\",\n }, {\n \"pretension_result\": \"partial_success\",\n }, {\n \"pretension_result\": \"success\"\n }]\n }\n },\n \"policy_osago\",\n \"expertise_contract\",\n \"expertise_report\"]\n }\n }, {\n \"name\": \"court_lawsuit_docs\",\n \"type\": \"calculated\",\n \"field_type\": \"DocArrayField\",\n \"cls\": \"CourtLawsuitDocInfo\",\n \"depends_on\": [\"add_person_to_claim\",\"attached_to_lawsuit_docs\", \"insufficient_docs\"],\n \"value\": {\n \"#make_array\": [{\n '#object': {\n \"doc_name\": \"lawsuit\",\n \"originals\": 1,\n \"copies\": {\n \"#cases\": {\n \"list\": [\n {\"conditions\": {\"add_person_to_claim\": False, \"lawsuit_submission_way\": \"mail\"}, \"value\": 1},\n {\"conditions\": {\"add_person_to_claim\": True, \"lawsuit_submission_way\": {\"#ne\": \"mail\"}}, \"value\": 3},\n ],\n \"default\": 2\n }\n },\n \"title\": u\"Исковое заявление\"\n },\n }, {\n \"conditions\": {\n \"court_attendance\": \"nobody\"\n },\n \"value\": {\n '#object': {\n \"doc_name\": \"claim_court_absent\",\n \"originals\": 1,\n \"copies\": 0,\n \"title\": u\"Заявление об отсутствии на заседании\",\n \"pagecount\": 1\n }\n }\n }, {\n \"conditions\": {\n \"lawsuit_submission_way\": \"mail\"\n },\n \"value\": {\n '#object': {\n \"doc_name\": \"victim_owner_passport_copy\",\n \"originals\": 1,\n \"copies\": 0,\n \"title\": u\"Нотариально заверенная копия паспорта владельца авто\",\n \"pagecount\": 0 # not needed\n }\n }\n }, {\n \"conditions\": {\n \"inquire_crash\": {\"#in\": \"@attached_to_lawsuit_docs\"}\n },\n \"value\": {\n '#object': {\n \"doc_name\": \"inquire_crash\",\n \"originals\": 1,\n \"copies\": _2_if_add_person_to_claim_else_1,\n \"title\": u\"Справка о дорожно-транспортном происшествии\",\n \"pagecount\": get_lawsuit_doc_page_count(\"inquire_crash\")\n }\n }\n }, {\n \"conditions\": {\n \"police_protocol\": {\"#in\": \"@attached_to_lawsuit_docs\"}\n },\n \"value\": {\n '#object': {\n \"doc_name\": \"police_protocol\",\n \"originals\": 1,\n \"copies\": _2_if_add_person_to_claim_else_1,\n \"title\": u\"Протокол об административном правонарушении\",\n \"pagecount\": get_lawsuit_doc_page_count(\"police_protocol\")\n }\n }\n }, {\n \"conditions\": {\n \"police_statement\": {\"#in\": \"@attached_to_lawsuit_docs\"}\n },\n \"value\": {\n '#object': {\n \"doc_name\": \"police_statement\",\n \"originals\": 1,\n \"copies\": _2_if_add_person_to_claim_else_1,\n \"title\": u\"Постановление по делу об административном правонарушении\",\n \"pagecount\": get_lawsuit_doc_page_count(\"police_statement\")\n }\n }\n }, {\n \"conditions\": {\n \"case_init_refusal\": {\"#in\": \"@attached_to_lawsuit_docs\"}\n },\n \"value\": {\n '#object': {\n \"doc_name\": \"case_init_refusal\",\n \"originals\": 1,\n \"copies\": _2_if_add_person_to_claim_else_1,\n \"title\": u\"Определение об отказе в возбуждении дела об административном правонарушении\",\n \"pagecount\": get_lawsuit_doc_page_count(\"case_init_refusal\")\n }\n }\n }, {\n \"conditions\": {\n \"notice_crash\": {\"#in\": \"@attached_to_lawsuit_docs\"}\n },\n \"value\": {\n '#object': {\n \"doc_name\": \"notice_crash\",\n \"originals\": 1,\n \"copies\": _2_if_add_person_to_claim_else_1,\n \"title\": u\"Извещение о дорожно-транспортном происшествии\",\n \"pagecount\": get_lawsuit_doc_page_count(\"notice_crash\")\n }\n }\n }, {\n \"conditions\": {\n \"insurance_denial\": {\"#in\": \"@attached_to_lawsuit_docs\"}\n },\n \"value\": {\n '#object': {\n \"doc_name\": \"insurance_denial\",\n \"originals\": 1,\n \"copies\": 0,\n \"title\": u\"Отказ в выплате страхового возмещения\",\n \"pagecount\": get_lawsuit_doc_page_count(\"insurance_denial\")\n }\n }\n }, {\n \"conditions\": {\n \"act_insurance_case\": {\"#in\": \"@attached_to_lawsuit_docs\"}\n },\n \"value\": {\n '#object': {\n \"doc_name\": \"act_insurance_case\",\n \"originals\": 1,\n \"copies\": _2_if_add_person_to_claim_else_1,\n \"title\": u\"Акт о страховом случае\",\n \"pagecount\": get_lawsuit_doc_page_count(\"act_insurance_case\")\n }\n }\n }, {\n '#object': {\n \"doc_name\": \"expertise_report\",\n \"originals\": 1,\n \"copies\": _2_if_add_person_to_claim_else_1,\n \"title\": {\n \"#sum\": [{\n \"#value\": u\"Отчет независимой экспертизы №\"\n }, {\n \"#field\": \"independent_expertise_number\"\n }]\n },\n \"pagecount\": get_lawsuit_doc_page_count(\"expertise_report\")\n },\n }, {\n '#object': {\n \"doc_name\": \"expertise_contract\",\n \"originals\": 1,\n \"copies\": _2_if_add_person_to_claim_else_1,\n \"title\": u\"Договор о проведении независимой экспертизы\",\n \"pagecount\": get_lawsuit_doc_page_count(\"expertise_contract\")\n },\n }, {\n '#object': {\n \"doc_name\": \"expertise_receipt\",\n \"originals\": 1,\n \"copies\": _2_if_add_person_to_claim_else_1,\n \"title\": u\"Квитанция об оплате независимой экспертизы\",\n \"pagecount\": 0 # not needed\n },\n }, {\n \"conditions\": {\n \"submission_way\": \"mail\"\n },\n \"value\": {\n '#object': {\n \"doc_name\": \"pretension_mail_receipt\",\n \"originals\": 1,\n \"copies\": 0,\n \"title\": u\"Квитанция об оплате почтового отправления (претензии) в страховую\",\n \"pagecount\": 0 # not needed\n }\n }\n }, {\n \"conditions\": {\n \"submission_way\": \"mail\"\n },\n \"value\": {\n '#object': {\n \"doc_name\": \"pretension_mail_list\",\n \"originals\": 1,\n \"copies\": 0,\n \"title\": u\"Опись документов, направленных в страховую (с отметкой почты)\",\n \"pagecount\": {\n \"#rendered_doc_count\": \"osago_mail_list\"\n }\n }\n }\n }, {\n \"conditions\": {\n \"submission_way\": \"mail\"\n },\n \"value\": {\n '#object': {\n \"doc_name\": \"pretension_mail_notify\",\n \"originals\": 1,\n \"copies\": 0,\n \"title\": u\"Уведомление о вручении почтового отправления (претензии) с подтверждением получения\",\n \"pagecount\": 0 # not needed\n }\n }\n }, {\n \"conditions\": {\n \"submission_way\": \"mail\"\n },\n \"value\": {\n '#object': {\n \"doc_name\": \"pretension\",\n \"originals\": 1,\n \"copies\": 0,\n \"title\": u\"Подписаный экземпляр претензии в страховую\",\n \"pagecount\": {\n \"#rendered_doc_count\": \"osago_pretension\"\n }\n }\n }\n }, {\n \"conditions\": {\n \"submission_way\": {\"#ne\": \"mail\"}\n },\n \"value\": {\n '#object': {\n \"doc_name\": \"pretension_insurance_note\",\n \"originals\": 1,\n \"copies\": 0,\n \"title\": u\"Претензия с отметкой страховой о получении\",\n \"pagecount\": {\n \"#rendered_doc_count\": \"osago_pretension\"\n }\n }\n }\n }, {\n \"conditions\": {\n \"submission_way\": \"mail\",\n \"insufficient_docs\": {\n \"#not_empty\": True\n }\n },\n \"value\": {\n '#object': {\n \"doc_name\": \"documents_claim\",\n \"originals\": 1,\n \"copies\": 0,\n \"title\": u\"Подписанный экземпляр заявления в страховую о выдаче документов\",\n \"pagecount\": {\n \"#rendered_doc_count\": \"osago_documents_claim\"\n }\n }\n }\n }, {\n \"conditions\": {\n \"submission_way\": {\"#ne\": \"mail\"},\n \"insufficient_docs\": {\n \"#not_empty\": True\n }\n },\n \"value\": {\n '#object': {\n \"doc_name\": \"documents_claim_insurance_note\",\n \"originals\": 1,\n \"copies\": 0,\n \"title\": u\"Заявление в страховую о выдаче документов с отметкой страховой о получении\",\n \"pagecount\": {\n \"#rendered_doc_count\": \"osago_documents_claim\"\n }\n }\n }\n }, {\n \"conditions\": {\n \"pretension_answer_got\": True\n },\n \"value\": {\n '#object': {\n \"doc_name\": \"pretension_answer\",\n \"originals\": 1,\n \"copies\": 0,\n \"title\": u\"Ответ страховой на претензию\",\n \"pagecount\": get_lawsuit_doc_page_count(\"pretension_answer\")\n }\n }\n }, {\n '#object': {\n \"doc_name\": \"policy_osago_copy\",\n \"originals\": 0,\n \"copies\": _2_if_add_person_to_claim_else_1,\n \"title\": u\"Копия полиса ОСАГО\",\n \"pagecount\": get_lawsuit_doc_page_count(\"policy_osago\")\n },\n }, {\n '#object': {\n \"doc_name\": \"car_certificate\",\n \"originals\": 1,\n \"copies\": 0,\n \"title\": u\"Свидетельство о регистрации ТС\",\n \"pagecount\": 0 # not needed\n },\n }, {\n '#object': {\n \"doc_name\": \"car_passport\",\n \"originals\": 1,\n \"copies\": 0,\n \"title\": u\"Паспорт ТС\",\n \"pagecount\": 0 # not needed\n },\n }, {\n \"conditions\": {\n \"add_person_to_claim\": True\n },\n \"value\": {\n '#object': {\n \"doc_name\": \"legal_fee_receipt\",\n \"originals\": 1,\n \"copies\": 0,\n \"title\": u\"Квитанция об оплате государственной пошлины за суд\",\n \"pagecount\": 0 # not needed\n }\n }\n }, {\n \"conditions\": {\n \"lawsuit_submission_way\": \"responsible_person\"\n },\n \"value\": {\n '#object': {\n \"doc_name\": \"trust_court_representation\",\n \"originals\": 1,\n \"copies\": 0,\n \"title\": u\"Нотариально заверенная доверенность на представительство в суде\",\n \"pagecount\": 0 # not needed\n }\n }\n }, {\n \"conditions\": {\n \"submission_way\": \"responsible_person\"\n },\n \"value\": {\n '#object': {\n \"doc_name\": \"trust_submision_obtain_docs\",\n \"originals\": 1,\n \"copies\": 0,\n \"title\": u\"Нотариально заверенная доверенность на представительство в страховой\",\n \"pagecount\": 0 # not needed\n }\n }\n }, {\n \"conditions\": {\n \"submission_way\": \"responsible_person\",\n \"court_include\": True\n },\n \"value\": {\n '#object': {\n \"doc_name\": \"trust_insurance_court\",\n \"originals\": 1,\n \"copies\": 0,\n \"title\": u\"Нотариально заверенная доверенность на представительство в страховой и в суде\",\n \"pagecount\": 0 # not needed\n }\n }\n }, {\n \"conditions\": {\n \"lawsuit_submission_way\": \"mail\"\n },\n \"value\": {\n '#object': {\n \"doc_name\": \"mail_docs_list\",\n \"originals\": 1,\n \"copies\": 0,\n \"title\": u\"Опись документов, которые направляются в суд, с отметкой почты\",\n \"pagecount\": 1 # todo!!!\n }\n }\n }, {\n \"conditions\": {\n \"#or\": [{\n \"problem_type\": \"underpay\"\n }, {\n \"compensation_got\": {\n \"#gt\": 0\n }\n }]\n },\n \"value\": {\n '#object': {\n \"doc_name\": \"bank_statement\",\n \"originals\": 1,\n \"copies\": 0,\n \"title\": u\"Банковские документы, подтверждающие оплату страхового возмещения\",\n \"pagecount\": get_lawsuit_doc_page_count(\"bank_statement\")\n }\n }\n }, {\n \"conditions\": {\n \"notary_costs\": {\n \"#gt\": 0\n }\n },\n \"value\": {\n '#object': {\n \"doc_name\": \"notary_pay_act\",\n \"originals\": 1,\n \"copies\": 0,\n \"title\": u\"Документы, подтверждающие оплату нотариальных услуг\",\n \"pagecount\": get_lawsuit_doc_page_count(\"notary_pay_act\")\n }\n }\n }]\n }\n }\n ]\n\n OSAGO_ACTIONS = {\n 'set_policy_info': {\n 'plugin': 'batch_manager',\n 'action': 'set_result_fields',\n 'config': {\n 'field_name_map': {\n 'insurance_id': 'insurance_id',\n 'insurance_name': 'insurance_name',\n 'policy_date': 'policy_date'\n }\n }\n },\n 'generate_first_stage_docs': {\n 'plugin': 'doc_builder',\n 'action': 'render_group',\n 'config': {\n 'doc_types': [\n DocumentTypeEnum.DT_OSAGO_MAIL_LIST,\n DocumentTypeEnum.DT_OSAGO_PRETENSION,\n DocumentTypeEnum.DT_OSAGO_DOCUMENTS_CLAIM,\n DocumentTypeEnum.DT_OSAGO_TRUST_SUBMISSION_DOCS,\n DocumentTypeEnum.DT_OSAGO_TRUST_OBTAIN_DOCS,\n DocumentTypeEnum.DT_OSAGO_TRUST_SUBMISION_OBTAIN_DOCS\n ]\n }\n },\n 'generate_second_stage_docs': {\n 'plugin': 'doc_builder',\n 'action': 'render_group',\n 'config': {\n 'doc_types': [\n DocumentTypeEnum.DT_OSAGO_CLAIM_COURT_ABSENT,\n DocumentTypeEnum.DT_OSAGO_LAWSUIT,\n DocumentTypeEnum.DT_OSAGO_COURT_MAIL_LIST\n ]\n }\n },\n 'generate_third_stage_docs': {\n 'plugin': 'doc_builder',\n 'action': 'render_group',\n 'config': {\n 'doc_types': [\n DocumentTypeEnum.DT_OSAGO_CLAIM_ALL_EXECUTION_ACT,\n DocumentTypeEnum.DT_OSAGO_CLAIM_GUILTY_EXECUTION_ACT,\n DocumentTypeEnum.DT_OSAGO_CLAIM_INSURANCE_EXECUTION_ACT\n ]\n }\n },\n 'get_policy_info_first_try': {\n 'plugin': 'car_assurance',\n 'action': 'get_policy_info_first_try',\n 'config': {\n 'policy_series_field_name': 'policy_series',\n 'policy_number_field_name': 'policy_number'\n }\n },\n 'get_policy_info_async': {\n 'plugin': 'car_assurance',\n 'action': 'get_policy_info_async',\n 'config': {\n 'policy_series_field_name': 'policy_series',\n 'policy_number_field_name': 'policy_number'\n }\n },\n 'reset_policy_info': {\n 'plugin': 'batch_manager',\n 'action': 'reset_result_fields',\n 'config': {\n 'fields': ['insurance_id', 'insurance_name', 'policy_date']\n }\n },\n 'touch': {\n 'plugin': 'batch_manager',\n 'action': 'touch',\n 'config': {}\n },\n 'check_and_fix_payments': {\n 'plugin': 'batch_manager',\n 'action': 'check_and_fix_osago_payments',\n 'config': {}\n },\n 'remove_pretension_documents': {\n 'plugin': 'batch_manager',\n 'action': 'remove_documents',\n 'config': {\n 'doc_types': [\n DocumentTypeEnum.DT_OSAGO_MAIL_LIST,\n DocumentTypeEnum.DT_OSAGO_PRETENSION,\n DocumentTypeEnum.DT_OSAGO_DOCUMENTS_CLAIM,\n DocumentTypeEnum.DT_OSAGO_TRUST_SUBMISSION_DOCS,\n DocumentTypeEnum.DT_OSAGO_TRUST_OBTAIN_DOCS,\n DocumentTypeEnum.DT_OSAGO_TRUST_SUBMISION_OBTAIN_DOCS\n ]\n }\n },\n 'remove_claim_documents': {\n 'plugin': 'batch_manager',\n 'action': 'remove_documents',\n 'config': {\n 'doc_types': [\n DocumentTypeEnum.DT_OSAGO_CLAIM_COURT_ABSENT,\n DocumentTypeEnum.DT_OSAGO_LAWSUIT,\n DocumentTypeEnum.DT_OSAGO_COURT_MAIL_LIST\n ]\n }\n },\n 'remove_court_documents': {\n 'plugin': 'batch_manager',\n 'action': 'remove_documents',\n 'config': {\n 'doc_types': [\n DocumentTypeEnum.DT_OSAGO_CLAIM_ALL_EXECUTION_ACT,\n DocumentTypeEnum.DT_OSAGO_CLAIM_GUILTY_EXECUTION_ACT,\n DocumentTypeEnum.DT_OSAGO_CLAIM_INSURANCE_EXECUTION_ACT,\n ]\n }\n },\n 'send_docs_to_user': {\n 'plugin': 'emailer',\n 'action': 'send_email',\n 'condition': {\n \"<batch>->_owner->email\": {\n \"#validator\": {\n \"cls\": \"EmailAddressValidator\",\n \"valid\": True\n }\n },\n \"<batch>->paid\": True\n },\n 'config': {\n 'mail_type': 'osago_docs_ready',\n 'max_retries': 5,\n 'retry_timeout_seconds': [5, 10, 60, 300, 300],\n 'recipients': {\n \"field_type\": \"DocArrayField\",\n \"cls\": \"DocTextField\",\n \"value\": {\n \"#make_array\": [{\n \"#field\": \"<batch>->_owner->email\"\n }]\n }\n },\n 'data': {\n 'crash_date': {\n \"field_type\": \"DocDateTimeField\",\n \"value\": {\n '#field': '<batch>->data->crash_date'\n }\n },\n 'service_schema': {\n \"field_type\": \"DocTextField\",\n \"value\": {\n \"#field\": \"<app_config>->WEB_SCHEMA\"\n }\n },\n 'service_domain': {\n \"field_type\": \"DocTextField\",\n \"value\": {\n \"#field\": \"<app_config>->DOMAIN\"\n }\n },\n 'user_id': {\n \"field_type\": \"DocTextField\",\n \"value\": {\n \"#field\": \"<current_user>->uuid\"\n }\n },\n 'batch_id': {\n \"field_type\": \"DocTextField\",\n \"value\": {\n \"#field\": \"<batch>->id\"\n }\n },\n 'docs': {\n \"field_type\": \"DocArrayField\",\n \"cls\": \"DocJsonField\",\n \"value\": {\n \"#field\": \"<batch>->rendered_docs\"\n }\n }\n }\n }\n },\n 'notify_admin_on_send_docs_to_user_fail': {\n 'plugin': 'emailer',\n 'action': 'send_email',\n 'config': {\n 'mail_type': 'on_send_docs_to_user_fail',\n 'max_retries': 20,\n 'retry_timeout_seconds': [10, 30, 60, 120, 300, 600, 1200, 3600],\n 'recipients': {\n \"field_type\": \"DocArrayField\",\n \"cls\": \"DocTextField\",\n \"value\": {\n \"#field\": \"<app_config>->ADMIN_EMAIL_LIST\"\n }\n },\n 'data': {\n 'user_id': {\n \"field_type\": \"DocTextField\",\n \"value\": {\n \"#field\": \"<previous_event_data>->template_data->user_id\"\n }\n },\n 'mail_type': {\n \"field_type\": \"DocTextField\",\n \"value\": {\n \"#field\": \"<previous_event_data>->mail_type\"\n }\n },\n 'action_dt': {\n \"field_type\": \"DocDateTimeField\",\n \"value\": {\n \"#field\": \"<previous_event_data>-><action_dt>\"\n }\n }\n }\n }\n },\n 'schedule_please_finalise_osago_send': {\n 'plugin': 'task_scheduler',\n 'action': 'schedule',\n 'condition': {\n \"<batch>->_owner->email\": {\n \"#validator\": {\n \"cls\": \"EmailAddressValidator\",\n \"valid\": True\n }\n },\n \"<batch>->sent_mails\": {\n \"#not_contain\": \"osago_please_finalise\"\n },\n '<batch>->status': 'pretension'\n },\n 'config': {\n 'action': '%s.send_please_finalise_osago' % DocumentBatchTypeEnum.DBT_OSAGO,\n 'task_id': {\n \"field_type\": \"DocTextField\",\n \"value\": {\n \"#sum\": [{\n \"#value\": \"osago_please_finalise_\"\n }, {\n \"#field\": \"<batch>->id\"\n }]\n }\n },\n 'dt_type': 'exact_time_every_day',\n 'dt_not_earlier': {\n \"field_type\": \"DocDateTimeField\",\n \"value\": {\n \"#sum\": [{\n \"#timedelta\": {\n \"days\": 1\n }\n }, {\n \"#datetime\": \"#now\"\n }]\n }\n },\n 'dt_format': \"%H:%M\", # тоже можно в конфиг\n 'dt_exact_time': {\n \"field_type\": \"DocTextField\",\n \"value\": {\n \"#field\": \"<app_config>->NOT_PAID_BATCH_NOTIFY_DESIRED_TIME\"\n }\n },\n 'dt_time_zone_region': {\n \"field_type\": \"DocTextField\",\n \"value\": {\n \"#field\": \"<batch>->data->insurance_company_region\"\n }\n }\n }\n },\n 'send_please_finalise_osago': {\n 'plugin': 'emailer',\n 'action': 'send_email',\n 'condition': {\n \"<batch>->_owner->email\": {\n \"#validator\": {\n \"cls\": \"EmailAddressValidator\",\n \"valid\": True\n }\n },\n \"<batch>->sent_mails\": {\n \"#not_contain\": \"osago_please_finalise\"\n },\n '<batch>->status': 'pretension'\n },\n 'config': {\n 'mail_type': 'osago_please_finalise',\n 'max_retries': 10,\n 'retry_timeout_seconds': 10,\n 'recipients': {\n \"field_type\": \"DocArrayField\",\n \"cls\": \"DocTextField\",\n \"value\": {\n \"#make_array\": [{\n \"#field\": \"<batch>->_owner->email\"\n }]\n }\n },\n 'data': {\n 'service_schema': {\n \"field_type\": \"DocTextField\",\n \"value\": {\n \"#field\": \"<app_config>->WEB_SCHEMA\"\n }\n },\n 'service_domain': {\n \"field_type\": \"DocTextField\",\n \"value\": {\n \"#field\": \"<app_config>->DOMAIN\"\n }\n },\n 'user_id': {\n \"field_type\": \"DocTextField\",\n \"value\": {\n \"#field\": \"<current_user>->uuid\"\n }\n },\n 'batch_id': {\n \"field_type\": \"DocTextField\",\n \"value\": {\n \"#field\": \"<batch>->id\"\n }\n }\n }\n }\n },\n 'on_sent_please_finalise_osago': {\n 'plugin': 'batch_manager',\n 'action': 'set_batch_fields',\n 'config': {\n 'fields': {\n 'sent_mails': {\n \"field_type\": \"DocArrayField\",\n \"cls\": \"DocTextField\",\n \"value\": {\n \"#push_to_set\": {\n \"source\": {\n \"#field\": \"<batch>->sent_mails\"\n },\n \"new_item\": \"osago_please_finalise\"\n }\n }\n }\n }\n }\n }\n }\n\n OSAGO_TRANSITIONS = [\n {\n \"status\": \"generating_pretension\",\n \"condition\": {\n \"<batch>->status\": \"pretension\",\n \"<event>\": \"go_ahead\",\n },\n \"actions\": ['generate_first_stage_docs']\n },\n {\n \"status\": \"generating_pretension\",\n \"condition\": {\n \"<batch>->status\": \"claim\",\n \"<event>\": \"rerender_pretension\",\n },\n \"actions\": ['generate_first_stage_docs']\n },\n {\n \"status\": \"claim\",\n \"condition\": {\n \"<batch>->status\": \"generating_pretension\",\n \"<event>\": \"doc_group_render_success\"\n },\n \"actions\": ['send_docs_to_user', 'check_and_fix_payments']\n },\n {\n \"condition\": {\n \"<event>\": \"emailer.send_fail\",\n \"<event_data>->mail_type\": \"osago_docs_ready\"\n },\n \"actions\": ['notify_admin_on_send_docs_to_user_fail']\n },\n {\n \"status\": \"pretension\",\n \"condition\": {\n \"<batch>->status\": \"generating_pretension\",\n \"<event>\": {\n \"#in\": [\"doc_group_render_fail\", \"doc_group_render_canceled\"]\n }\n }\n },\n {\n \"status\": \"generating_claim\",\n \"condition\": {\n \"<batch>->status\": \"claim\",\n \"<event>\": \"go_ahead\",\n },\n \"actions\": ['generate_second_stage_docs']\n },\n {\n \"status\": \"generating_claim\",\n \"condition\": {\n \"<batch>->status\": \"court\",\n \"<event>\": \"rerender_claim\",\n },\n \"actions\": ['generate_second_stage_docs']\n },\n {\n \"status\": \"court\",\n \"condition\": {\n \"<batch>->status\": \"generating_claim\",\n \"<event>\": \"doc_group_render_success\"\n },\n \"actions\": ['check_and_fix_payments']\n },\n {\n \"status\": \"claim\",\n \"condition\": {\n \"<batch>->status\": \"generating_claim\",\n \"<event>\": {\n \"#in\": [\"doc_group_render_fail\", \"doc_group_render_canceled\"]\n }\n },\n \"actions\": ['check_and_fix_payments']\n },\n {\n \"status\": \"pretension\",\n \"condition\": {\n \"<batch>->status\": {\n \"#in\": [\"claim\", \"generating_pretension\"]\n },\n \"<event>\": \"go_back\",\n },\n \"actions\": ['remove_pretension_documents']\n },\n {\n \"status\": \"claim\",\n \"condition\": {\n \"<batch>->status\": {\n \"#in\": [\"court\", \"generating_claim\"]\n },\n \"<event>\": \"go_back\",\n },\n \"actions\": ['remove_claim_documents', 'check_and_fix_payments']\n },\n {\n \"status\": \"generating_court\",\n \"condition\": {\n \"<batch>->status\": {\n \"#in\": [\"court\"]\n },\n \"<event>\": \"go_ahead\",\n },\n \"actions\": ['generate_third_stage_docs']\n },\n {\n \"status\": \"writ\",\n \"condition\": {\n \"<batch>->status\": {\n \"#in\": [\"generating_court\"]\n },\n \"<event>\": \"doc_group_render_success\"\n },\n },\n {\n \"status\": \"court\",\n \"condition\": {\n \"<batch>->status\": {\n \"#in\": [\"writ\", \"generating_court\"]\n },\n \"<event>\": \"go_back\",\n },\n \"actions\": ['remove_court_documents', 'check_and_fix_payments']\n },\n {\n \"status\": \"court\",\n \"condition\": {\n \"<batch>->status\": \"generating_court\",\n \"<event>\": {\n \"#in\": [\"doc_group_render_fail\", \"doc_group_render_canceled\"]\n }\n },\n \"actions\": ['check_and_fix_payments']\n },\n {\n \"condition\": {\n \"<batch>->status\": \"pretension\",\n \"<event>\": \"batch_manager.on_field_changed\",\n \"<event_data>->field_name\": {\n \"#in\": [\"policy_number\", \"policy_series\"]\n }\n },\n \"actions\": ['reset_policy_info'] # , 'get_policy_info_async'\n },\n # {\n # \"condition\": {\n # \"<batch>->status\": \"pretension\",\n # \"<event>\": \"on_policy_info_receive_timeout\",\n # },\n # \"actions\": ['get_policy_info_async']\n # },\n {\n \"condition\": {\n \"<batch>->status\": \"pretension\",\n \"<event>\": \"on_policy_info_received\"\n },\n \"actions\": ['set_policy_info']\n },\n {\n \"condition\": {\n \"<batch>->status\": \"pretension\",\n \"<event>\": \"batch_manager.on_fieldset_changed\",\n },\n \"actions\": ['schedule_please_finalise_osago_send']\n },\n {\n \"condition\": {\n \"<event>\": \"emailer.mail_sent\",\n \"<event_data>->mail_type\": \"osago_please_finalise\"\n },\n \"actions\": ['on_sent_please_finalise_osago']\n },\n ]\n\n VALIDATION_CONDITION = {\n \"#cases\": {\n \"list\": [{\n \"conditions\": {\n \"<document>->tried_to_render\": True\n },\n \"value\": {\"#value\": \"strict\"}\n }],\n \"default\": {\n \"value\": {\n \"#value\": \"yes\"\n }\n }\n }\n }\n\n return {\n \"OSAGO_MAIL_LIST_TEMPLATE\": OSAGO_MAIL_LIST_TEMPLATE,\n \"OSAGO_PRETENSION_TEMPLATE\": OSAGO_PRETENSION_TEMPLATE,\n \"OSAGO_DOCUMENTS_CLAIM_TEMPLATE\": OSAGO_DOCUMENTS_CLAIM_TEMPLATE,\n \"OSAGO_TRUST_SUBMISSION_DOCS_TEMPLATE\": OSAGO_TRUST_SUBMISSION_DOCS_TEMPLATE,\n \"OSAGO_TRUST_OBTAIN_DOCS_TEMPLATE\": OSAGO_TRUST_OBTAIN_DOCS_TEMPLATE,\n \"OSAGO_TRUST_SUBMISION_OBTAIN_DOCS_TEMPLATE\": OSAGO_TRUST_SUBMISION_OBTAIN_DOCS_TEMPLATE,\n \"OSAGO_CLAIM_COURT_ABSENT_TEMPLATE\": OSAGO_CLAIM_COURT_ABSENT_TEMPLATE,\n \"OSAGO_CLAIM_ALL_EXECUTION_ACT_TEMPLATE\": OSAGO_CLAIM_ALL_EXECUTION_ACT_TEMPLATE,\n \"OSAGO_CLAIM_GUILTY_EXECUTION_ACT_TEMPLATE\": OSAGO_CLAIM_GUILTY_EXECUTION_ACT_TEMPLATE,\n \"OSAGO_CLAIM_INSURANCE_EXECUTION_ACT_TEMPLATE\": OSAGO_CLAIM_INSURANCE_EXECUTION_ACT_TEMPLATE,\n \"OSAGO_LAWSUIT_TEMPLATE\": OSAGO_LAWSUIT_TEMPLATE,\n \"OSAGO_OSAGO_COURT_MAIL_LIST_TEMPLATE\": OSAGO_OSAGO_COURT_MAIL_LIST_TEMPLATE,\n\n \"OSAGO_SCHEMA\": OSAGO_SCHEMA,\n\n \"OSAGO_MAIL_LIST_SCHEMA\": OSAGO_MAIL_LIST_SCHEMA,\n \"OSAGO_PRETENSION_SCHEMA\": OSAGO_PRETENSION_SCHEMA,\n \"OSAGO_DOCUMENTS_CLAIM_SCHEMA\": OSAGO_DOCUMENTS_CLAIM_SCHEMA,\n \"OSAGO_TRUST_SUBMISSION_DOCS_SCHEMA\": OSAGO_TRUST_SUBMISSION_DOCS_SCHEMA,\n \"OSAGO_TRUST_OBTAIN_DOCS_SCHEMA\": OSAGO_TRUST_OBTAIN_DOCS_SCHEMA,\n \"OSAGO_TRUST_SUBMISION_OBTAIN_DOCS_SCHEMA\": OSAGO_TRUST_SUBMISION_OBTAIN_DOCS_SCHEMA,\n \"OSAGO_CLAIM_COURT_ABSENT_SCHEMA\": OSAGO_CLAIM_COURT_ABSENT_SCHEMA,\n \"OSAGO_CLAIM_ALL_EXECUTION_ACT_SCHEMA\": OSAGO_CLAIM_ALL_EXECUTION_ACT_SCHEMA,\n \"OSAGO_CLAIM_GUILTY_EXECUTION_ACT_SCHEMA\": OSAGO_CLAIM_GUILTY_EXECUTION_ACT_SCHEMA,\n \"OSAGO_CLAIM_INSURANCE_EXECUTION_ACT_SCHEMA\": OSAGO_CLAIM_INSURANCE_EXECUTION_ACT_SCHEMA,\n \"OSAGO_LAWSUIT_SCHEMA\": OSAGO_LAWSUIT_SCHEMA,\n \"OSAGO_OSAGO_COURT_MAIL_LIST_SCHEMA\": OSAGO_OSAGO_COURT_MAIL_LIST_SCHEMA,\n\n \"OSAGO_RESULT_FIELDS\": OSAGO_RESULT_FIELDS,\n \"OSAGO_ACTIONS\": OSAGO_ACTIONS,\n \"OSAGO_TRANSITIONS\": OSAGO_TRANSITIONS,\n \"VALIDATION_CONDITION\": VALIDATION_CONDITION\n }\n\n\n" }, { "alpha_fraction": 0.7202072739601135, "alphanum_fraction": 0.7305699586868286, "avg_line_length": 23.125, "blob_id": "9ad385a7c621172f1968ed9330ec16463335277b", "content_id": "eb602ca2b76c7e65825cc841de050e3f1a057edc", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 193, "license_type": "no_license", "max_line_length": 64, "num_lines": 8, "path": "/app/fw/auth/social_services/google_backend.py", "repo_name": "StanislavKraev/jb_code", "src_encoding": "UTF-8", "text": "# -*- coding: utf-8 -*-\nfrom fw.auth.social_services.social_backend import SocialBackend\n\nGOOGLE_PLUS_URL = \"https://www.googleapis.com/plus/v1/\"\n\n\nclass GoogleBackend(SocialBackend):\n pass\n" }, { "alpha_fraction": 0.6039435863494873, "alphanum_fraction": 0.6213418841362, "avg_line_length": 36.11258316040039, "blob_id": "4cb54d648a59884870ea9b088aa47b42c2ce89bb", "content_id": "94001929976d8c3108b9369a13cd01b61372122c", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 11214, "license_type": "no_license", "max_line_length": 131, "num_lines": 302, "path": "/jb_tests/base_test_case.py", "repo_name": "StanislavKraev/jb_code", "src_encoding": "UTF-8", "text": "# -*- coding: utf-8 -*-\nimport logging\nimport os\n\nfrom flask import json, _request_ctx_stack\nfrom fw.api import errors\nfrom fw.auth.encrypt import encrypt_password\nfrom fw.auth.models import AuthUser\nfrom fw.auth.user_manager import UserManager\nfrom fw.db.sql_base import db as sql_db\n\nos.environ['CELERY_CONFIG_MODULE'] = 'local_celery_config'\n\n\nCURRENT_DIR = os.path.normpath(os.path.abspath(os.path.dirname(__file__)))\n\nTEST_CONFIG = {\n 'DEBUG': True,\n 'TEST': True,\n \"PDF_BUILDER_PATH\": os.path.normpath(\n os.path.join(os.path.abspath(os.path.dirname(__file__)), '../app/pdf-builder.jar')),\n \"PDFTK_PATH\": os.path.normpath(os.path.join(os.path.abspath(os.path.dirname(__file__)), '../app/pdftk')),\n \"SECRET_KEY\": \"secret_test\",\n \"SESSION_COOKIE_NAME\": \"rsa_sid\",\n \"PERMANENT_SESSION_LIFETIME\": 86400,\n \"DOMAIN\": \"jur-bureau.org\",\n 'max_activation_link_length': 20,\n 'digital_activation_link_length': 4,\n 'digital_activation_code_timeout': 900,\n 'email_activation_code_timeout': 86400,\n 'max_activation_attempts_count': 3,\n 'site_domain': 'legalcloud.ru',\n 'resources_path': os.path.join(CURRENT_DIR, 'test_data'),\n 'db_host': '127.0.0.1',\n 'db_name': 'test_db',\n 'WEB_SCHEMA': '',\n 'DOCUMENT_STORAGE': '/tmp/jb_docs/',\n 'STORAGE_URL': 'http://service.zz/storage/',\n 'PRIVATE_STORAGE': '/tmp/jb_docs/',\n 'bind_addr': '/tmp/tmp.tmp',\n 'UPLOAD_FOLDER': '/tmp',\n 'YURIST_EMAIL_LIST': ['[email protected]'],\n 'NOTARIUS_EMAIL_LIST': ['[email protected]'],\n 'mailer_reply_to': u'Юрбюро Онлайн <[email protected]>',\n 'log_file_path': '/tmp/tmp.log',\n 'LOG_LEVEL': 'DEBUG',\n 'api_url': 'legalcloud.ru/api',\n 'pdf_preview_watermark': os.path.normpath(\n os.path.join(os.path.abspath(os.path.dirname(__file__)), 'test_data/preview_watermark.png')),\n 'PDF_STAMPER_PATH': os.path.normpath(\n os.path.join(os.path.abspath(os.path.dirname(__file__)), '../app/pdf-stamper.jar')),\n 'celery_tasks_dir': '/tmp/',\n 'service_name': u\"ЮРБЮРО\",\n 'sms_gate_address': 'address',\n 'sms_gate_user': 'user',\n 'sms_gate_password': 'password',\n 'sms_gate_sender': 'YURBURO',\n 'mailer_smtp_user': 'user',\n 'vk_api_version': '5.2',\n 'vk_app_id': 4629685,\n 'vk_app_permissions': 2 + 4194304, # 73730,\n 'vk_auth_redirect_url': '/account/login/external/vk/',\n 'vk_test_token': '868218ba11994a0a334257b0e07acc45f0333fe77700d5e395d723feefd1b4188a5d77700d48b63a5c9c7',\n 'facebook_app_secret': '73535b9a524f23dc3e3efe4a576a3807',\n 'facebook_app_permissions': 'publish_stream,email,publish_actions',\n 'facebook_app_id': 673204526111410,\n 'facebook_auth_redirect_url': '/account/login/external/facebook/',\n 'SERVICE_NALOG_RU_URL': 'https://service.nalog.ru/',\n 'be_quiet': True,\n 'STAGING': False,\n 'ADMIN_EMAIL_LIST': ['[email protected]'],\n 'YAD_ESHOP_PASSWORD': '1234567890',\n 'YAD_IP_LIST': [],\n 'MAX_CONTENT_LENGTH': 1024000,\n 'OFFICE_IP': ['1.0.0.0'],\n 'NOT_PAID_BATCH_NOTIFY_TIMEOUT_SECONDS': 86400,\n 'NOT_PAID_BATCH_NOTIFY_DESIRED_TIME': '08:30',\n 'SEND_DOCS_TO_YURIST_DELAY_SECONDS': 0,\n 'user_by_code_tries_count': 5,\n 'SQLALCHEMY_DATABASE_URI': 'postgres://postgres:postgres@localhost/test_db',\n 'RUSSIAN_POST_API_LOGIN': 'test',\n 'RUSSIAN_POST_API_PASSWORD': 'test',\n 'CELERY_LOG_LEVEL': 'DEBUG',\n 'MEMCACHED_HOST': '127.0.0.1',\n 'RAISE_RIGHT_OFF': True,\n# 'SQLALCHEMY_ECHO': True\n}\n\nfrom unittest import TestCase\n\nfrom flask.app import Flask\nfrom flask_login import LoginManager\nfrom celery import current_app as celery\n\nfrom common_utils import LazyClassLoader\nfrom fw.auth import load_user\nfrom fw.api.sql_session_storage import SQLAlchemySessionInterface\nfrom fw.documents.contexts import ValidatorContext, RenderingContext, ModelCacheContext\nfrom template_filters import load_filters, set_template_loader\nimport test_external_tools\n\nfrom flask import Response\n\n\ndef init_db(app):\n app.external_tools = test_external_tools\n app.cache = test_external_tools.cache\n\n\ndef init_configuration(app, config):\n app.config.update(config)\n app.production = os.environ.get('JB_PRODUCTION', None) is not None\n celery.conf['config'] = config\n for handler in app.logger.handlers:\n celery.log.get_default_logger().addHandler(handler)\n\n\ndef init_login_system(app):\n app.session_interface = SQLAlchemySessionInterface(app.config)\n login_manager = LoginManager()\n login_manager.init_app(app)\n\n @login_manager.unauthorized_handler\n def unauthorized():\n raise errors.NotAuthorized()\n\n login_manager.user_callback = load_user\n\n\ndef init_blueprints(_app):\n from fw.api.views.auth import auth_bp\n from fw.api.views.documents import documents_bp\n from fw.api.views.general import general_bp\n from fw.api.views.object_management import domain_objects_bp\n from fw.api.views.system_views import system_bp\n from fw.api.views.files import files_bp\n\n _app.register_blueprint(general_bp)\n _app.register_blueprint(auth_bp)\n _app.register_blueprint(documents_bp)\n _app.register_blueprint(domain_objects_bp)\n _app.register_blueprint(files_bp)\n _app.register_blueprint(system_bp)\n\n\ndef init_services(_app):\n from services import ifns, notarius, yurist, partners, pay, llc_reg, ip_reg, osago, test_svc, car_assurance, russian_post\n class_loader = LazyClassLoader\n _app.class_loader = class_loader\n\n services = (ifns, notarius, yurist, partners, pay, llc_reg, ip_reg, osago, test_svc, car_assurance, russian_post)\n for service in services:\n service.register(_app, _app.jinja_env, class_loader)\n\ndef init_plugins():\n class_loader = LazyClassLoader\n from fw.plugins import emailer_plugin, register, doc_builder_plugin, batch_manager_plugin, car_assurance_plugin, task_scheduler\n plugins = (emailer_plugin, doc_builder_plugin, batch_manager_plugin, car_assurance_plugin, task_scheduler)\n for p in plugins:\n register(p.PLUGIN_NAME, p, class_loader=class_loader)\n\n\nclass BaseTestCase(TestCase):\n def setUp(self):\n self.app = Flask(__name__)\n\n self.test_client = self.app.test_client()\n self.init_logging()\n self.init_validator_context()\n self.config = TEST_CONFIG\n\n self.auth_cookie = None\n load_filters(self.app.jinja_env, self.config)\n self.app_context = self.app.app_context()\n self.app_context.__enter__()\n set_template_loader(self.app.jinja_env)\n init_configuration(self.app, self.config)\n init_blueprints(self.app)\n init_services(self.app)\n init_login_system(self.app)\n init_db(self.app)\n init_plugins()\n self.mailer = celery.conf['MAILER']\n self.mailer.mails = []\n self.sms_sender = celery.conf['SMS_SENDER']\n self.sms_sender.sms = []\n self.user = None\n self.user_profile = None\n UserManager.init(self.config, self.app.logger)\n sql_db.init_app(self.app)\n sql_db.create_all()\n for table in reversed(sql_db.metadata.sorted_tables):\n sql_db.engine.execute(table.delete())\n\n @self.app.errorhandler(413)\n def catcher(error):\n data_json = json.dumps({\"error\": {\"code\": errors.FileToLarge.ERROR_CODE, \"message\": errors.FileToLarge.ERROR_MESSAGE}})\n result = Response(data_json, mimetype='application/json', status=400)\n result.headers.add('Access-Control-Allow-Credentials', \"true\")\n result.headers.add('Access-Control-Allow-Origin', \"http://%s\" % self.config['site_domain'])\n return result\n\n def tearDown(self):\n sql_db.session.close()\n #sql_db.drop_all()\n for table in reversed(sql_db.metadata.sorted_tables):\n sql_db.engine.execute(table.delete())\n\n # noinspection PyUnresolvedReferences\n self.app.model_cache_context.clear()\n self.app_context.__exit__(None, None, None)\n\n def get_test_resource_name(self, name):\n return os.path.join(CURRENT_DIR, 'test_data', name)\n\n def init_logging(self):\n consoleHandler = logging.StreamHandler()\n consoleHandler.setFormatter(\n logging.Formatter('%(asctime)s %(levelname)s: %(message)s [in %(pathname)s:%(lineno)d]'))\n consoleHandler.setLevel(logging.DEBUG)\n self.app.logger.addHandler(consoleHandler)\n self.app.logger.setLevel(logging.DEBUG)\n\n def init_validator_context(self):\n self.app.validator_context = ValidatorContext()\n self.app.rendering_context = RenderingContext()\n self.app.model_cache_context = ModelCacheContext()\n\n\ndef authorized(admin=False, is_temporal=False, user_id=None):\n def _wrapper(func):\n def _authorize(self, *args, **kwargs):\n if user_id is not None:\n user = AuthUser.query.filter_by(id=user_id).first()\n with self.test_client.session_transaction() as sess:\n sess['user_id'] = user_id\n self.user = user\n with self.app.test_request_context():\n _request_ctx_stack.top.user = self.user\n return func(self, *args, **kwargs)\n\n enabled = kwargs.get('user_enabled', True)\n data = {\n \"password\": encrypt_password('TestPassword123'),\n \"email\": '[email protected]',\n 'enabled': enabled,\n 'email_confirmed': True,\n 'mobile': \"+79001112233\",\n 'mobile_confirmed': True,\n\n 'name': 'Name',\n 'surname': 'Surname',\n 'patronymic': 'Patronymic',\n 'is_tester': True,\n 'temporal': is_temporal\n } if not is_temporal else {\n 'enabled': enabled,\n 'is_tester': True,\n 'temporal': is_temporal\n }\n if admin:\n data['admin'] = True\n new_user = AuthUser(**data)\n sql_db.session.add(new_user)\n sql_db.session.commit()\n with self.test_client.session_transaction() as sess:\n sess['user_id'] = new_user.get_id()\n self.user = new_user\n with self.app.test_request_context():\n _request_ctx_stack.top.user = self.user\n return func(self, *args, **kwargs)\n\n return _authorize\n\n return _wrapper\n\n\ndef registered_user(is_temporal=False):\n def _wrapper(func):\n def _register(self, *args, **kwargs):\n new_user = AuthUser(**{\n \"password\": encrypt_password('TestPassword123'),\n \"email\": '[email protected]',\n 'enabled': True,\n 'email_confirmed': True,\n 'mobile': \"+79001112233\",\n 'mobile_confirmed': True,\n\n 'name': 'Name',\n 'surname': 'Surname',\n 'patronymic': 'Patronymic',\n 'is_tester': True,\n 'temporal': is_temporal\n })\n sql_db.session.add(new_user)\n sql_db.session.flush()\n self.user = new_user\n return func(self, *args, **kwargs)\n\n return _register\n\n return _wrapper\n" }, { "alpha_fraction": 0.6290456652641296, "alphanum_fraction": 0.636929452419281, "avg_line_length": 31.133333206176758, "blob_id": "9b5b3556d0871af867961f8ce15048837fb796fd", "content_id": "dfa664691af0a5ebd6e6f4b54e17849a6399efb1", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 2410, "license_type": "no_license", "max_line_length": 108, "num_lines": 75, "path": "/app/fw/api/views/system_views.py", "repo_name": "StanislavKraev/jb_code", "src_encoding": "UTF-8", "text": "# -*- coding: utf-8 -*-\nimport codecs\nimport os\n\nfrom flask import Blueprint, current_app, Response, abort\nfrom guppy import hpy\nfrom fw.api import errors\nfrom fw.api.args_validators import validate_arguments, ArgumentValidator\nfrom fw.api.base_handlers import api_view\n\nfrom fw.async_tasks import test_task\nfrom fw.documents.db_fields import DocumentBatchDbObject, BatchDocumentDbObject\nfrom fw.storage.file_storage import FileStorage\n\nsystem_bp = Blueprint('system', __name__)\n\n\n@system_bp.route('/test/task/', methods=['POST'])\ndef run_test_task():\n test_task.test_task.apply_async(task_id=\"test_task_id\", countdown=10)\n result = Response(u\"Ok\", status=200)\n return result\n\n\nhp = hpy()\nhp.setref()\n\n\n@system_bp.route('/sys/get_mem_stat/', methods=['GET'])\ndef get_mem_stat():\n pid = os.getpid()\n result = u\"Process: %s\\n\" % unicode(pid)\n res = hp.heap()\n result += unicode(res)\n\n result += u\"\\n\\n\"\n\n for i in xrange(min(len(res), 10)):\n result += \"\\n%s\" % unicode(res[i].byvia)\n result += u\"\\n\\n\"\n for i in xrange(min(len(res), 10)):\n result += \"\\n%s\" % unicode(res[i].bysize)\n result += u\"\\n\\n\"\n for i in xrange(min(len(res), 10)):\n result += \"\\n%s\" % unicode(res[i].byclodo)\n result += u\"\\n\\n\"\n for i in xrange(min(len(res), 10)):\n result += \"\\n%s\" % unicode(res[i].byid)\n\n return u\"<html><body><pre>%s</pre></body></html>\" % result\n\n\n@system_bp.route('/sys/get_batch_doc/', methods=['GET'])\n@api_view\n@validate_arguments(batch_id=ArgumentValidator(required=True), document_id=ArgumentValidator(required=True))\ndef get_batch_doc(batch_id=None, document_id=None):\n config = current_app.config\n if not config['STAGING'] and not config['DEBUG']:\n abort(404)\n batch = DocumentBatchDbObject.query.filter_by(id=batch_id).first()\n if not batch:\n raise errors.BatchNotFound()\n\n doc = BatchDocumentDbObject.query.filter_by(id=document_id).first()\n\n file_obj = doc.file\n if file_obj:\n file_path = FileStorage.get_path(file_obj, current_app.config)\n if os.path.exists(file_path) and file_path.endswith('.pdf'):\n file_path = file_path[:-4] + '.text-src'\n if os.path.exists(file_path):\n with codecs.open(file_path, 'r', 'utf-8') as ff:\n content = ff.read()\n return {'result': content}\n raise errors.BatchNotFound()\n" }, { "alpha_fraction": 0.6088249683380127, "alphanum_fraction": 0.6093207597732544, "avg_line_length": 34.385963439941406, "blob_id": "7d5771bc0e18a5a28257b433858d4d4979de931f", "content_id": "82e4311e74cd4a4bcf79ba0e2dd104d036c2da63", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 2017, "license_type": "no_license", "max_line_length": 85, "num_lines": 57, "path": "/app/deployment_migrations/migration_list/20150907_migrate_notarius_models.py", "repo_name": "StanislavKraev/jb_code", "src_encoding": "UTF-8", "text": "# -*- coding: utf-8 -*-\n\nfrom fw.db.sql_base import db as sqldb\nfrom fw.documents.db_fields import DocumentBatchDbObject\nfrom services.notarius.data_model.models import NotariusObject, NotariusBookingObject\n\n\ndef forward(config, logger):\n logger.debug(u\"Migrate notarius models\")\n\n notarius_col = db['notarius']\n NotariusBookingObject.query.delete()\n NotariusObject.query.delete()\n sqldb.session.commit()\n for old_notarius in notarius_col.find():\n new_notarius = NotariusObject(\n id=str(old_notarius['_id']),\n surname=old_notarius.get('surname', u\"\"),\n name=old_notarius.get('name', u\"\"),\n patronymic=old_notarius.get('patronymic', None),\n\n schedule=old_notarius['schedule'],\n schedule_caption=old_notarius.get('schedule_caption', ''),\n title=old_notarius.get('title', ''),\n address=old_notarius.get('address'),\n region=old_notarius['region']['code'],\n metro_station=old_notarius.get('metro_station', '')\n )\n sqldb.session.add(new_notarius)\n sqldb.session.commit()\n\n booking_col = db['notarius_booking']\n for old_booking in booking_col.find():\n notarius_id = str(old_booking['notarius_id'])\n notarius = NotariusObject.query.filter_by(id=notarius_id).first()\n if not notarius:\n continue\n\n batch_id = str(old_booking['batch_id'])\n batch = DocumentBatchDbObject.query.filter_by(id=batch_id).first()\n if not batch:\n continue\n\n new_booking = NotariusBookingObject(\n id=str(old_booking['_id']),\n batch_id=str(old_booking['batch_id']),\n owner_id=str(old_booking['_owner']),\n notarius_id=notarius_id,\n dt=old_booking['dt'],\n address=old_booking['address'],\n _discarded=old_booking['_discarded']\n )\n sqldb.session.add(new_booking)\n sqldb.session.commit()\n\ndef rollback(config, logger):\n pass\n" }, { "alpha_fraction": 0.6301369667053223, "alphanum_fraction": 0.6369863152503967, "avg_line_length": 26.375, "blob_id": "06346a63762a9a0b690e5acf031364a090f88192", "content_id": "a362e16f202710229323adfcf81db55e5214d589", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 438, "license_type": "no_license", "max_line_length": 59, "num_lines": 16, "path": "/app/fw/async_tasks/test_task.py", "repo_name": "StanislavKraev/jb_code", "src_encoding": "UTF-8", "text": "# -*- coding: utf-8 -*-\n\nfrom celery import current_app as celery\nfrom flask.globals import current_app\n\ncelery.config_from_envvar('CELERY_CONFIG_MODULE')\n\[email protected]()\ndef test_task(a = 1, b = 2):\n config = celery.conf.get('config')\n db = celery.conf.get('db')\n\n with celery.conf['flask_app']().app_context():\n logger = current_app.logger\n logger.warn(u\"Test task started: %s\" % str((a, b)))\n return True\n" }, { "alpha_fraction": 0.6934306621551514, "alphanum_fraction": 0.6970803141593933, "avg_line_length": 23.909090042114258, "blob_id": "cafd2f62811c45e52f133115967a2e09a798f114", "content_id": "d9bf7dc692562aa7609fa254a933bd36832c3222", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 274, "license_type": "no_license", "max_line_length": 47, "num_lines": 11, "path": "/app/deployment_migrations/models.py", "repo_name": "StanislavKraev/jb_code", "src_encoding": "UTF-8", "text": "# -*- coding: utf-8 -*-\n\nfrom sqlalchemy import Column, Unicode, Integer\nfrom fw.db.sql_base import db as sqldb\n\n\nclass MigrationState(sqldb.Model):\n __tablename__ = 'migration_state'\n\n id = Column(Integer, primary_key=True)\n value = Column(Unicode, nullable=True)\n" }, { "alpha_fraction": 0.7229219079017639, "alphanum_fraction": 0.7267002463340759, "avg_line_length": 35.09090805053711, "blob_id": "0903a9bf9fb8a3d2ec70acde43647a846c90d386", "content_id": "00d52686d092cbfacc2f463857f3765d117fbaa0", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 794, "license_type": "no_license", "max_line_length": 102, "num_lines": 22, "path": "/app/services/osago/documents/osago_reg_methods.py", "repo_name": "StanislavKraev/jb_code", "src_encoding": "UTF-8", "text": "# -*- coding: utf-8 -*-\nfrom fw.documents.db_fields import DocumentBatchDbObject\nfrom fw.documents.enums import DocumentTypeEnum\nfrom services.pay.models import PayInfoObject\n\n\ndef is_paid_document(batch_id=None, document_type=None):\n if not document_type or not batch_id:\n return False\n\n batch = DocumentBatchDbObject.query.filter_by(id=batch_id).first()\n if not batch:\n return False\n\n if document_type not in (DocumentTypeEnum.DT_OSAGO_PRETENSION, DocumentTypeEnum.DT_OSAGO_LAWSUIT):\n return True\n\n payments_count = PayInfoObject.query.filter_by(batch=batch).count()\n if document_type == DocumentTypeEnum.DT_OSAGO_PRETENSION:\n return payments_count > 0\n if document_type == DocumentTypeEnum.DT_OSAGO_LAWSUIT:\n return payments_count > 1\n" }, { "alpha_fraction": 0.45998039841651917, "alphanum_fraction": 0.4799085259437561, "avg_line_length": 32.63736343383789, "blob_id": "9a36ac86fa005ba0bee47365585df5156adba67b", "content_id": "1f06b539a0eb0b56fe9f9f47a9655e6e42047f44", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 3061, "license_type": "no_license", "max_line_length": 103, "num_lines": 91, "path": "/app/services/ifns/utils/process_egrul_captcha.py", "repo_name": "StanislavKraev/jb_code", "src_encoding": "UTF-8", "text": "# -*- coding: utf-8 -*-\nimport os\nimport shlex\nimport shutil\nimport subprocess\nimport tempfile\nimport requests\nfrom PIL import Image\n\ndef recognize_captcha(token):\n url = u\"http://egrul.nalog.ru/static/captcha.html?a=\" + token\n LEVEL = 1275\n ITERATIONS = 2\n GLOBAL_ITERATIONS = 10\n letters_result = [{}, {}, {}, {}, {}, {}]\n\n gi = 0\n while gi < GLOBAL_ITERATIONS:\n result = requests.get(url, stream=True, timeout=2)\n if not result or result.status_code!=200:\n print(u\"Failed to load captcha image\")\n return\n\n t_file_out = tempfile.NamedTemporaryFile(mode=\"w+\", delete=False, suffix=\".gif\")\n full_name = t_file_out.name\n t_file_out.close()\n file_path = full_name\n with open(file_path, 'wb') as f:\n result.raw.decode_content = True\n shutil.copyfileobj(result.raw, f)\n\n img = Image.open(file_path)\n img = img.convert(\"RGB\")\n pixdata = img.load()\n\n width = img.size[0]\n height = img.size[1]\n for i in xrange(ITERATIONS):\n layer = []\n for x in xrange(1, width - 1):\n for y in xrange(1, height -1):\n if pixdata[x, y][0] < 50:\n continue\n count = pixdata[x, y - 1][0]\n count += pixdata[x, y + 1][0]\n count += pixdata[x - 1, y - 1][0]\n count += pixdata[x + 1, y - 1][0]\n count += pixdata[x - 1, y + 1][0]\n count += pixdata[x + 1, y + 1][0]\n count += pixdata[x - 1, y][0]\n count += pixdata[x + 1, y][0]\n if count < LEVEL:\n layer.append((x, y))\n\n for x, y in layer:\n pixdata[x, y] = (40, 40, 40, 255)\n\n t_file_out = tempfile.NamedTemporaryFile(mode=\"w+\", delete=False, suffix=\".gif\")\n out_full_name = t_file_out.name\n t_file_out.close()\n img.save(out_full_name)\n\n t_file_out = tempfile.NamedTemporaryFile(mode=\"w+\", delete=True)\n text_full_name = t_file_out.name\n t_file_out.close()\n\n subprocess.call(shlex.split(\"tesseract -psm 7 %s %s digits\" % (out_full_name, text_full_name)))\n os.unlink(file_path)\n os.unlink(out_full_name)\n with open(text_full_name + '.txt', 'r') as f:\n word = f.read().strip()\n if not word.isdigit() or len(word) != 6:\n pass\n else:\n print(\"Hit: %s\" % word)\n gi += 1\n j = 0\n for letter in word:\n if letter not in letters_result[j]:\n letters_result[j][letter] = 1\n else:\n letters_result[j][letter] += 1\n\n j += 1\n os.unlink(text_full_name + '.txt')\n word = \"\"\n for x in letters_result:\n c = sorted(x.items(), key = lambda y: y[1])\n word += c[-1][0]\n print(word)\n return word\n" }, { "alpha_fraction": 0.7097436189651489, "alphanum_fraction": 0.7158974409103394, "avg_line_length": 35.79245376586914, "blob_id": "fe79d491b04e704e692eb86575b9dc8e069ef825", "content_id": "607085e0670eecc26162f61c249212b6d17698eb", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 1950, "license_type": "no_license", "max_line_length": 120, "num_lines": 53, "path": "/app/async_app.py", "repo_name": "StanislavKraev/jb_code", "src_encoding": "UTF-8", "text": "# -*- coding: utf-8 -*-\nimport asyncmongo\n\nfrom importlib import import_module\nimport logging\nfrom logging.handlers import SocketHandler, DEFAULT_TCP_LOGGING_PORT\nimport tornado.web\nimport tornado.ioloop\nfrom tornado.options import options, define\nfrom tornado.httpserver import HTTPServer\nfrom fw.cache.cache_wrapper import CacheWrapper\nfrom fw.db.db_wrapper import DbWrapper\nfrom jb_config import JBConfiguration\n\nSERVICE_DESCRIPTION = 'JurBureau'\nDEFAULT_CONFIG_PATH = '/etc/jurbureau/config.cfg'\n\ndef init_logging(config):\n\n logger = logging.getLogger('jb_async')\n socketHandler = SocketHandler('localhost', DEFAULT_TCP_LOGGING_PORT)\n socketHandler.setLevel(config['LOG_LEVEL'])\n socketHandler.setFormatter(logging.Formatter('%(asctime)s %(levelname)s: %(message)s [in %(pathname)s:%(lineno)d]'))\n logger.addHandler(socketHandler)\n logger.setLevel(config['LOG_LEVEL'])\n return logger\n\nclass Application(tornado.web.Application):\n def __init__(self, config, db = None, cache_client = None,\n file_manager = None, init_pool=True, logger = None, **settings):\n self.config = config or JBConfiguration(SERVICE_DESCRIPTION, DEFAULT_CONFIG_PATH)\n api_module_urls = import_module('async.urls')\n self.sync_db = DbWrapper(self.config)\n self.cache = CacheWrapper(self.config)\n\n self.logger = logger or init_logging(self.config)\n super(Application, self).__init__(api_module_urls.url_patterns, **settings)\n\n\ndef main():\n config = JBConfiguration(SERVICE_DESCRIPTION, DEFAULT_CONFIG_PATH)\n\n define(\"port\", default=\"9876\", help=\"HTTP service port\")\n define(\"host\", default=\"127.0.0.1\", help=\"HTTP service address\")\n tornado.options.parse_command_line()\n http_server = HTTPServer(Application(config))\n http_server.bind(int(options.port), options.host)\n http_server.start(1)\n\n tornado.ioloop.IOLoop.instance().start()\n\nif __name__ == '__main__':\n main()\n" }, { "alpha_fraction": 0.5346097350120544, "alphanum_fraction": 0.5920471549034119, "avg_line_length": 33.846153259277344, "blob_id": "417b500e336ffdf19a5b3ec0662f510dfe1f67de", "content_id": "93877e8128de83b603f2704688a943e57fd24015", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 1418, "license_type": "no_license", "max_line_length": 108, "num_lines": 39, "path": "/app/services/car_assurance/integration.py", "repo_name": "StanislavKraev/jb_code", "src_encoding": "UTF-8", "text": "# -*- coding: utf-8 -*-\n\nimport json\nimport requests\n\ndef check_car_policy(policy_series, policy_number, timeout=20.0):\n headers = {\n 'User-Agent': u'Mozilla/5.0 (X11; Ubuntu; Linux x86_64; rv:38.0) Gecko/20100101 Firefox/38.0',\n 'Accept': u'application/json',\n 'Accept-Language': u'en-US,en;q=0.5',\n 'Accept-Encoding': u'gzip, deflate',\n 'Content-Type': u'application/x-www-form-urlencoded; charset=UTF-8',\n 'X-Requested-With': u'XMLHttpRequest',\n 'Referer': u'http://dkbm-web.autoins.ru/dkbm-web-1.0/bsostate.htm',\n }\n\n response = requests.post('http://dkbm-web.autoins.ru/dkbm-web-1.0/bsostate.htm', data={\n 'answer': u'Введите текст', # БГГ, это капча\n 'bsonumber': policy_number,\n 'bsoseries': policy_series\n }, headers=headers, timeout=timeout)\n\n if response.status_code != 200:\n return\n\n return json.loads(response.text)\n\n #typical response: {\n # \"policyCreateDate\":\"17.10.2013\",\n # \"bsoSeries\":\"ССС\",\n # \"bsoNumber\":\"0307897277\",\n # \"changeDate\":\"06.02.2014\",\n # \"policyBeginDate\":\"20.10.2013\",\n # \"policyEndDate\":\"19.10.2014\",\n # \"insCompanyName\":\"РЕСО-ГАРАНТИЯ\",\n # \"bsoStatusName\":\"Находится у страхователя\",\n # \"validCaptcha\":true,\n # \"errorMessage\":null\n # }" }, { "alpha_fraction": 0.6616161465644836, "alphanum_fraction": 0.6641414165496826, "avg_line_length": 25.133333206176758, "blob_id": "09929ede4729dd4bfd7fd3a28857e742c5ffa474", "content_id": "d3c027c012e9598b2ed53fadfec64a50f64e34b9", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 396, "license_type": "no_license", "max_line_length": 90, "num_lines": 15, "path": "/app/services/ifns/utils/ifns_logger.py", "repo_name": "StanislavKraev/jb_code", "src_encoding": "UTF-8", "text": "# -*- coding: utf-8 -*-\n\nimport logging\n\nIFNS_LOGGER = logging.getLogger(\"IFNS\")\nIFNS_LOGGER.setLevel(logging.DEBUG)\n\ntry:\n _fh = logging.FileHandler('/var/log/jb/ifns.log')\n _fh.setLevel(logging.DEBUG)\n _formatter = logging.Formatter('%(asctime)s - %(name)s - %(levelname)s - %(message)s')\n _fh.setFormatter(_formatter)\n IFNS_LOGGER.addHandler(_fh)\nexcept Exception:\n pass\n\n\n\n\n" }, { "alpha_fraction": 0.6562827229499817, "alphanum_fraction": 0.6586387157440186, "avg_line_length": 35.39047622680664, "blob_id": "a87de5ae912eae86b2fb3de43b87e429a5675d50", "content_id": "846e7b254a5bf0dd8afb7e097f01c2ac886d9e42", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 3820, "license_type": "no_license", "max_line_length": 123, "num_lines": 105, "path": "/app/manage.py", "repo_name": "StanislavKraev/jb_code", "src_encoding": "UTF-8", "text": "# -*- coding: utf-8 -*-\n\nimport argparse\nimport inspect\nimport logging\nimport os\nfrom flask import Flask\nimport external_tools\nfrom fw.cache.cache_wrapper import CacheWrapper\nfrom jb_config import JBConfiguration\nfrom manage_commands import BaseManageCommand\nfrom template_filters import load_filters\nfrom services import ifns, ip_reg, llc_reg, notarius, osago, partners, pay, yurist, car_assurance\n\n\nSERVICE_DESCRIPTION = 'JurBureau'\nDEFAULT_CONFIG_PATH = '/etc/jurbureau/config.cfg'\n\napp = Flask(__name__)\n\nSERVICES = (ifns, ip_reg, llc_reg, notarius, osago, partners, pay, yurist, car_assurance)\n\n\ndef init_configuration():\n parser = argparse.ArgumentParser(description=SERVICE_DESCRIPTION)\n parser.add_argument('command', action='store')\n parser.add_argument(\"--config\", help=\"config file\")\n parser.add_argument(\"--quiet\", help=\"skip user interaction if possible\", action=\"store_true\")\n\n res = parser.parse_known_args()\n\n config = JBConfiguration(SERVICE_DESCRIPTION, res[0].config if res[0].config else DEFAULT_CONFIG_PATH)\n config.settings['be_quiet'] = not not res[0].quiet\n app.config.update(config.settings)\n return res[0].command\n\n\ndef load_commands(command_locations=None):\n command_locations = command_locations or []\n\n def load_command_from_module(module):\n commands = {}\n for name, obj in inspect.getmembers(module):\n if inspect.isclass(obj) and issubclass(obj, BaseManageCommand) and obj.__name__ != 'BaseManageCommand':\n # noinspection PyUnresolvedReferences\n commands[obj.NAME] = obj(config=app.config, logger=app.logger)\n return commands\n\n commands = {}\n command_locations.append(os.path.normpath(os.path.join(os.path.abspath(os.path.dirname(__file__)), \"manage_commands\")))\n base_dir = os.path.normpath(os.path.abspath(os.path.dirname(__file__)))\n for command_dir in command_locations:\n for root, dirs, files in os.walk(command_dir):\n mod_rel_dir = command_dir[len(base_dir) + 1:].replace('/', '.')\n for file_name in files:\n if os.path.splitext(file_name)[1] != '.py':\n continue\n mod_path = os.path.splitext(file_name)[0]\n try:\n mod = __import__(mod_rel_dir + '.' + mod_path, fromlist=mod_rel_dir)\n except ImportError, ex:\n continue\n mod_commands = load_command_from_module(mod)\n commands.update(mod_commands)\n\n return commands\n\n\ndef init_db():\n app.cache = CacheWrapper()\n app.external_tools = external_tools\n\n from fw.db.sql_base import db\n db.init_app(app)\n\n\nif __name__ == '__main__':\n del app.logger.handlers[:]\n consoleHandler = logging.StreamHandler()\n consoleHandler.setFormatter(\n logging.Formatter('%(asctime)s %(levelname)s: %(message)s [in %(pathname)s:%(lineno)d]'))\n consoleHandler.setLevel(logging.DEBUG)\n app.logger.addHandler(consoleHandler)\n app.logger.setLevel(logging.DEBUG)\n command = init_configuration()\n init_db()\n load_filters(app.jinja_env, app.config)\n os.environ['CELERY_CONFIG_MODULE'] = app.config['CELERY_CONFIG_MODULE']\n from app import init_contexts\n\n init_contexts(app)\n\n command_locations = []\n for service in SERVICES:\n if hasattr(service, 'get_manager_command_locations'):\n command_locations.extend(service.get_manager_command_locations())\n COMMAND_HANDLERS = load_commands(command_locations)\n handler = COMMAND_HANDLERS.get(command, None)\n if not handler:\n print('Do not know how to handle \"%s\" command\\r\\n' % command)\n print('Available commands:\\r\\n * %s' % '\\r\\n * '.join(sorted(COMMAND_HANDLERS.keys())))\n exit(-1)\n\n with app.app_context():\n handler.run()" }, { "alpha_fraction": 0.4538794159889221, "alphanum_fraction": 0.4610857665538788, "avg_line_length": 29.057762145996094, "blob_id": "2a8b883873c165f55f9cb2f0b8686c741406b52e", "content_id": "e9f7f91d78444741abdb2906d65745af619c5a1d", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 8422, "license_type": "no_license", "max_line_length": 115, "num_lines": 277, "path": "/app/services/test_svc/documents/db_data.py", "repo_name": "StanislavKraev/jb_code", "src_encoding": "UTF-8", "text": "# -*- coding: utf-8 -*-\nimport copy\nimport os\n\nfrom fw.documents.enums import DocumentTypeEnum, BatchStatusEnum, DocumentBatchTypeEnum, DocumentKindEnum\nfrom fw.documents.field_matchers import SimpleMatcher\nfrom fw.documents.common_schema_fields import SHORT_NAME_FIELD, GENERAL_MANAGER_FIELD\n\n\ndef get_test_resource_name(config, resource_rel_path):\n resources_path = config['resources_path']\n return os.path.join(resources_path, resource_rel_path)\n\nSHORT_NAME_FIELD = copy.copy(SHORT_NAME_FIELD)\nSHORT_NAME_FIELD[\"max_length\"] = 40\n\nGENERAL_MANAGER_FIELD_NA = copy.copy(GENERAL_MANAGER_FIELD)\nGENERAL_MANAGER_FIELD_NA['required'] = False\n\nSOME_DB_OBJECT = {\n \"name\": \"some_db_object\",\n \"type\": \"db_object\",\n \"required\": False\n}\n\n\ndef load_data(config):\n TEST_BATCH_SCHEMA = {\n \"doc_name\": DocumentBatchTypeEnum.DBT_TEST_TYPE,\n \"fields\": [\n SHORT_NAME_FIELD,\n GENERAL_MANAGER_FIELD_NA,\n SOME_DB_OBJECT,\n {\n \"name\": \"text_field\",\n \"type\": \"DocTextField\",\n \"max_length\": 20,\n \"min_length\": 5,\n \"required\": False\n },\n {\n \"name\": \"some_text_field\",\n \"type\": \"DocTextField\",\n \"max_length\": 20,\n \"min_length\": 5,\n \"required\": False\n },\n {\n \"name\": \"restricted_field\",\n \"type\": \"DocTextField\",\n \"modification_condition\": {\n '<batch>->status': 'new'\n },\n \"required\": False\n }\n ]\n }\n\n TEST_DOC_1_SCHEMA = {\n \"doc_name\": DocumentTypeEnum.DT_TEST_DOC_1,\n \"file_name_template\": u\"Тестовый документ 1\",\n \"batch_statuses\": [BatchStatusEnum.BS_EDITED, BatchStatusEnum.BS_NEW],\n \"fields\": [\n SHORT_NAME_FIELD,\n {\n \"name\": \"some_text_field\",\n \"type\": \"DocTextField\",\n \"max_length\": 20,\n \"min_length\": 5,\n \"required\": False\n }\n ]\n }\n\n TEST_DOC_1_TEMPLATE = {\n \"doc_name\": DocumentTypeEnum.DT_TEST_DOC_1,\n \"template_name\": \"test_doc_1_template\",\n \"is_strict\": True,\n \"pages\": [{\n \"page_file\": [get_test_resource_name(config, \"test/test.pdf\")],\n \"fields\": [\n {\n \"name\": \"field1\",\n \"field-length\": 10\n }\n ]\n }]\n }\n\n TEST_DOC_1_MATCHER = {\n \"doc_name\": DocumentTypeEnum.DT_TEST_DOC_1,\n \"template_name\": TEST_DOC_1_TEMPLATE['template_name'],\n \"fields\": {\n \"field1\": SimpleMatcher(field_name=\"short_name\")\n }\n }\n\n TEST_DOC_2_SCHEMA = {\n \"doc_name\": DocumentTypeEnum.DT_TEST_DOC_2,\n \"doc_kind\": DocumentKindEnum.DK_TEX_TEMPLATE,\n \"file_name_template\": u\"Тестовый документ 2\",\n \"batch_statuses\": [BatchStatusEnum.BS_EDITED, BatchStatusEnum.BS_NEW],\n \"conditions\": {\n \"short_name\": u\"создай второй документ\"\n },\n \"error_filter\": {\n \"<document>->tried_to_render\": True\n },\n \"fields\": [\n SHORT_NAME_FIELD,\n {\n \"name\": \"text_field\",\n \"type\": \"DocTextField\",\n \"max_length\": 20,\n \"min_length\": 5,\n \"required\": False\n }\n ]\n }\n\n TEST_DOC_2_TEMPLATE = {\n \"doc_name\": DocumentTypeEnum.DT_TEST_DOC_2,\n \"template_name\": \"some_doc\",\n \"file_name\": get_test_resource_name(config, \"test/test.tex\"),\n \"is_strict\": False\n }\n\n TEST_DOC_3_SCHEMA = {\n \"doc_name\": DocumentTypeEnum.DT_TEST_DOC_3,\n \"doc_kind\": DocumentKindEnum.DK_TEX_TEMPLATE,\n \"file_name_template\": u\"Тестовый документ 3\",\n \"batch_statuses\": [BatchStatusEnum.BS_EDITED, BatchStatusEnum.BS_NEW],\n \"conditions\": {\n \"general_manager\" : {\n \"#not_empty\": True\n }\n },\n \"validators\": [{\n \"condition\": {\n 'general_manager->sex': 'male'\n },\n \"error\": {\n \"field\": \"test_doc_validation\",\n \"code\": 5\n }\n }],\n \"fields\": [\n GENERAL_MANAGER_FIELD_NA,\n SOME_DB_OBJECT,\n {\n \"name\": \"ugly_field\",\n \"type\": \"calculated\",\n \"field_type\": \"DocIntField\",\n \"required\": False,\n \"value\": {\n \"#div\": [{\n \"#field\": \"general_manager->living_country_code\"\n }, {\n \"#field\": \"general_manager->living_country_code\"\n }]\n }\n }\n ]\n }\n\n TEST_DOC_3_TEMPLATE = {\n \"doc_name\": DocumentTypeEnum.DT_TEST_DOC_3,\n \"template_name\": \"some_doc\",\n \"file_name\": get_test_resource_name(config, \"test/test.tex\"),\n \"is_strict\": False\n }\n\n TEST_BATCH_RESULT_FIELDS = [{\n \"name\": \"name\",\n \"type\": \"calculated\",\n \"field_type\": \"DocTextField\",\n \"required\": False,\n \"value\": {\n \"#field\": \"short_name\"\n }\n }]\n\n TEST_BATCH_DEFER_DOCS = [DocumentTypeEnum.DT_TEST_DOC_2]\n\n TEST_BATCH_ACTIONS = { # id: action\n 'action1': {\n 'plugin': 'emailer',\n 'action': 'send_email',\n 'config': {\n 'mail_type': 'simple_mail',\n 'target_type': 'batch_owner',\n 'retry_count': 3\n }\n },\n 'fail_mail_action': {\n 'plugin': 'emailer',\n 'action': 'send_email',\n 'config': {\n 'mail_type': 'simple_mail',\n 'target_type': 'specified_users',\n 'target_email_list': ['[email protected]'],\n 'silent': True # do not send any events after email send /send failure\n } # to beat infinite cycle\n }\n }\n\n TEST_BATCH_TRANSITIONS = [{\n \"status\": \"finalised\",\n \"condition\": {\n \"<batch>->status\": \"new\",\n \"short_name\": u\"финализируйся\"\n }\n }, {\n \"status\": \"edited\",\n \"condition\": {\n \"short_name\": u\"едитыд\"\n }\n }, {\n \"status\": \"after_event\",\n \"condition\": {\n \"<batch>->status\": \"new\",\n \"<event>\": \"go_ahead\",\n \"short_name\": u\"по событию\"\n }\n }, {\n \"status\": \"after_simple_event\",\n \"condition\": {\n \"<event>\": \"simple_event\"\n },\n \"actions\": [\"action1\"]\n }, {\n \"condition\": {\n \"<event>\": \"emailer.send_fail\"\n },\n \"actions\": [\"fail_mail_action\"]\n }, {\n \"status\": \"finalised\",\n \"condition\": {\n \"<batch>->status\": {\n \"#in\": [\"new\", \"finalised1\"]\n },\n \"<event>\": \"doc_group_render_success\"\n }\n }, {\n \"status\": \"edited\",\n \"condition\": {\n \"<batch>->status\": \"new\",\n \"<event>\": \"doc_group_render_fail\"\n }\n }, {\n \"status\": \"finalised1\",\n \"condition\": {\n \"<batch>->status\": \"new\",\n \"<event>\": \"doc_render_success\"\n }\n }, {\n \"status\": \"finalised2\",\n \"condition\": {\n \"<batch>->status\": \"new\",\n \"<event>\": \"doc_render_fail\"\n }\n }]\n\n return {\n \"TEST_BATCH_SCHEMA\": TEST_BATCH_SCHEMA,\n \"TEST_DOC_1_SCHEMA\": TEST_DOC_1_SCHEMA,\n \"TEST_DOC_1_TEMPLATE\": TEST_DOC_1_TEMPLATE,\n \"TEST_DOC_1_MATCHER\": TEST_DOC_1_MATCHER,\n \"TEST_DOC_2_SCHEMA\": TEST_DOC_2_SCHEMA,\n \"TEST_DOC_2_TEMPLATE\": TEST_DOC_2_TEMPLATE,\n \"TEST_DOC_3_SCHEMA\": TEST_DOC_3_SCHEMA,\n \"TEST_DOC_3_TEMPLATE\": TEST_DOC_3_TEMPLATE,\n \"TEST_BATCH_RESULT_FIELDS\": TEST_BATCH_RESULT_FIELDS,\n \"TEST_BATCH_DEFER_DOCS\": TEST_BATCH_DEFER_DOCS,\n \"TEST_BATCH_TRANSITIONS\": TEST_BATCH_TRANSITIONS,\n \"TEST_BATCH_ACTIONS\": TEST_BATCH_ACTIONS\n }\n" }, { "alpha_fraction": 0.7928143739700317, "alphanum_fraction": 0.7940119504928589, "avg_line_length": 36.95454406738281, "blob_id": "071ac7fc529a44ea7f0dc92f9ca76f06a757a7dc", "content_id": "86011ea3085fe3eb88a2b493733b15aed25ea710", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 835, "license_type": "no_license", "max_line_length": 119, "num_lines": 22, "path": "/app/deployment_migrations/migration_list/20150709_add_column_to_batch.py", "repo_name": "StanislavKraev/jb_code", "src_encoding": "UTF-8", "text": "# -*- coding: utf-8 -*-\nfrom datetime import datetime\nimport logging\nfrom tempfile import TemporaryFile, NamedTemporaryFile\nfrom bson import ObjectId\nimport requests\nfrom fw.auth.models import AuthUser\nfrom fw.db.sql_base import db as sqldb\nfrom fw.documents.db_fields import DocumentBatchDbObject, BatchDocumentDbObject, CompanyDbObject, PrivatePersonDbObject\nfrom fw.documents.enums import PersonTypeEnum, IncorporationFormEnum, CompanyTypeEnum\nfrom fw.storage.models import FileObject\n\n\ndef forward(config, logger):\n logger.debug(u\"Add column sent_mails to table doc_batch\")\n\n sqldb.session.close()\n sqldb.engine.execute(\"ALTER TABLE doc_batch ADD COLUMN sent_mails VARCHAR[] DEFAULT NULL;\")\n\ndef rollback(config, logger):\n sqldb.session.close()\n sqldb.engine.execute(\"ALTER TABLE doc_batch DROP COLUMN sent_mails;\")\n" }, { "alpha_fraction": 0.6502485275268555, "alphanum_fraction": 0.6565747857093811, "avg_line_length": 28.13157844543457, "blob_id": "700ffac00c97d3ca0769da36768a4faeea467f41", "content_id": "24f7bb34cabdf965819e189a572d077cad1143b2", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 2286, "license_type": "no_license", "max_line_length": 88, "num_lines": 76, "path": "/app/custom_exceptions.py", "repo_name": "StanislavKraev/jb_code", "src_encoding": "UTF-8", "text": "# -*- coding: utf-8 -*-\nfrom fw.api.errors import ApiBaseError\n\n\nclass MissingRequiredFieldException(ApiBaseError):\n ERROR_CODE = 4\n HTTP_ERROR_CODE = 400\n ERROR_MESSAGE = u\"Пропущено обязательное поле %s\"\n\n def __init__(self, field_name, *args, **kwargs):\n super(MissingRequiredFieldException, self).__init__(field_name, *args, **kwargs)\n self.field_name = field_name\n self.ext_data = kwargs.get('ext_data', [])\n\n def get_error_message(self):\n return self.ERROR_MESSAGE % self.field_name\n\n\nclass InvalidFieldValueException(ApiBaseError):\n ERROR_CODE = 5\n HTTP_ERROR_CODE = 400\n ERROR_MESSAGE = u\"Недопустимое значение поля %s\"\n\n def __init__(self, field_name=u\"\", *args, **kwargs):\n super(InvalidFieldValueException, self).__init__(field_name, *args, **kwargs)\n self.field_name = field_name\n self.ext_data = []\n\n def get_error_message(self):\n if not self.field_name and self.ext_data:\n return self.ERROR_MESSAGE % unicode(self.ext_data)\n return self.ERROR_MESSAGE % self.field_name\n\n\nclass InvalidRawFieldValueException(ApiBaseError):\n ERROR_CODE = 5\n HTTP_ERROR_CODE = 400\n ERROR_MESSAGE = u\"Недопустимое значение поля %s\"\n\n def __init__(self, field_name, *args, **kwargs):\n super(InvalidRawFieldValueException, self).__init__(field_name, *args, **kwargs)\n self.field_name = field_name\n self.ext_data = []\n\n def get_error_message(self):\n return self.ERROR_MESSAGE % self.field_name\n\n\nclass ArrayFieldTooShortException(Exception):\n def __init__(self, field_name, *args, **kwargs):\n super(ArrayFieldTooShortException, self).__init__(field_name, *args, **kwargs)\n self.field_name = field_name\n self.ext_data = []\n\n\nclass ArrayFieldTooLongException(Exception):\n def __init__(self, field_name, *args, **kwargs):\n super(ArrayFieldTooLongException, self).__init__(field_name, *args, **kwargs)\n self.field_name = field_name\n self.ext_data = []\n\n\nclass NotInitialized(Exception):\n ERROR_CODE = 4\n\n\nclass NotMineException(Exception):\n pass\n\n\nclass FileNotFound(Exception):\n pass\n\n\nclass CacheMiss(Exception):\n pass" }, { "alpha_fraction": 0.6993067860603333, "alphanum_fraction": 0.7001733183860779, "avg_line_length": 26.4761905670166, "blob_id": "1eb1eb215b7d2e7c9909f18d41587a0686bbb842", "content_id": "11321d54f33a7ba258207477b537d2228c34774b", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 1154, "license_type": "no_license", "max_line_length": 119, "num_lines": 42, "path": "/app/deployment_migrations/migration_list/20150703_add_assurances.py", "repo_name": "StanislavKraev/jb_code", "src_encoding": "UTF-8", "text": "# -*- coding: utf-8 -*-\nfrom datetime import datetime\nimport logging\nfrom tempfile import TemporaryFile, NamedTemporaryFile\nfrom bson import ObjectId\nimport requests\nfrom fw.auth.models import AuthUser\nfrom fw.db.sql_base import db as sqldb\nfrom fw.documents.db_fields import DocumentBatchDbObject, BatchDocumentDbObject, CompanyDbObject, PrivatePersonDbObject\nfrom fw.documents.enums import PersonTypeEnum, IncorporationFormEnum, CompanyTypeEnum\nfrom fw.storage.models import FileObject\n\n\ndef forward(config, logger):\n logger.debug(u\"Add car assurances tables\")\n\n sqldb.session.close()\n\n sqldb.engine.execute(\"\"\"\n CREATE TABLE car_assurance (\n id VARCHAR NOT NULL,\n full_name VARCHAR,\n short_name VARCHAR,\n PRIMARY KEY (id)\n );\n \"\"\")\n\n sqldb.engine.execute(\"\"\"\n CREATE TABLE car_assurance_branch (\n id VARCHAR NOT NULL,\n full_name VARCHAR,\n short_name VARCHAR,\n address VARCHAR,\n car_assurance_id VARCHAR NOT NULL,\n PRIMARY KEY (id),\n FOREIGN KEY(car_assurance_id) REFERENCES car_assurance (id)\n );\n \"\"\")\n\n\ndef rollback(config, logger):\n pass\n" }, { "alpha_fraction": 0.6179295778274536, "alphanum_fraction": 0.6197082996368408, "avg_line_length": 37.50685119628906, "blob_id": "5ebe0759a0f3c38a4b9df952b4c4b7c8fa411bff", "content_id": "63b68de6842a197face6825915860977b4408880", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 2811, "license_type": "no_license", "max_line_length": 130, "num_lines": 73, "path": "/app/deployment_migrations/migration_list/20150914_migrate_pay_models.py", "repo_name": "StanislavKraev/jb_code", "src_encoding": "UTF-8", "text": "# -*- coding: utf-8 -*-\nfrom datetime import datetime\n\nimport dateutil.parser\nfrom decimal import Decimal\nimport pytz\n\nfrom fw.db.sql_base import db as sqldb\nfrom services.pay.models import YadRequestsObject, PaymentSubscriptionObject\n\n\ndef _parse_iso_dt(str_val):\n val = dateutil.parser.parse(str_val)\n return val.astimezone(pytz.timezone('utc')).replace(tzinfo=None)\n\n\ndef forward(config, logger):\n logger.debug(u\"Migrate pay models\")\n\n yad_requests = db['yad_requests']\n YadRequestsObject.query.delete()\n sqldb.session.commit()\n for old_req in yad_requests.find():\n new_req = YadRequestsObject(\n ip=old_req['ip'],\n created=old_req['created'],\n request_datetime=_parse_iso_dt(old_req['request_datetime']),\n md5=old_req['md5'],\n shop_id=int(old_req['shop_id']),\n shop_article_id=int(old_req['shop_article_id']) if 'shop_article_id' in old_req and old_req['shop_article_id'] else 0,\n invoice_id=int(old_req['invoice_id']),\n order_number=old_req['order_number'],\n customer_number=old_req['customer_number'],\n order_created_datetime=_parse_iso_dt(old_req['order_created_datetime']),\n order_sum_amount=Decimal(old_req['order_sum_amount']),\n order_sum_currency_paycash=old_req['order_sum_currency_paycash'],\n order_sum_bank_paycash=old_req['order_sum_bank_paycash'],\n shop_sum_amount=Decimal(old_req['shop_sum_amount']),\n shop_sum_currency_paycash=old_req['shop_sum_currency_paycash'],\n shop_sum_bank_paycash=old_req['shop_sum_bank_paycash'],\n payment_payer_code=old_req['payment_payer_code'],\n payment_type=old_req['payment_type'],\n action=old_req['action'],\n payment_datetime=_parse_iso_dt(old_req['payment_datetime']) if (\n 'payment_datetime' in old_req and old_req['payment_datetime']) else None,\n cps_user_country_code=old_req.get('cps_user_country_code', None)\n )\n sqldb.session.add(new_req)\n try:\n sqldb.session.commit()\n except:\n raise\n\n PaymentSubscriptionObject.query.delete()\n sqldb.session.commit()\n\n subs_col = db['payment_subscription']\n for old_sub in subs_col.find():\n if 'user' not in old_sub or not isinstance(old_sub['user'], int):\n continue\n new_sub = PaymentSubscriptionObject(\n pay_info=old_sub['pay_info'],\n created=old_sub.get('created', datetime.utcnow()),\n end_dt=old_sub.get('end_dt', datetime.utcnow()),\n type=old_sub.get('type', 'subscription_3'),\n user_id=old_sub['user']\n )\n sqldb.session.add(new_sub)\n sqldb.session.commit()\n\n\ndef rollback(config, logger):\n pass\n" }, { "alpha_fraction": 0.4832802414894104, "alphanum_fraction": 0.4904458522796631, "avg_line_length": 28.20930290222168, "blob_id": "dc6c56ddf2518e6174d36fe96ac95bd9b3d498e4", "content_id": "5dee72de71dc8e41ada1572f90d49c15024b2cda", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 1256, "license_type": "no_license", "max_line_length": 63, "num_lines": 43, "path": "/app/services/ifns/data_model/okved.py", "repo_name": "StanislavKraev/jb_code", "src_encoding": "UTF-8", "text": "# -*- coding: utf-8 -*-\n\n\ndef chunks(l, n):\n for i in xrange(0, len(l), n):\n yield l[i:i + n]\n\n\nclass OkvedEnum(object):\n\n @staticmethod\n def get_name(code):\n if len(code) > 2 and '.' not in code:\n code = ''.join(chunks(code, 2))\n if len(code) < 6:\n code += u\" \" * (6 - len(code))\n elif len(code) > 2 and '.' in code:\n code_parts = [part for part in code.split('.')]\n while len(code_parts) < 3:\n code_parts.append(None)\n\n parts_str_array = []\n for part in code_parts:\n if part is not None:\n part_str = unicode(part)\n if len(part_str) < 2:\n part_str += u\" \"\n parts_str_array.append(part_str)\n else:\n parts_str_array.append(u\" \")\n code = u\"\".join(parts_str_array)\n return code\n\n @staticmethod\n def get_title(code):\n from fw.catalogs.models import OkvadObject\n okvad = OkvadObject.query.filter_by(okved=code).first()\n return okvad.caption if okvad else u\"\"\n\n # noinspection PyUnusedLocal\n @classmethod\n def validate(cls, value):\n return True\n" }, { "alpha_fraction": 0.6985074877738953, "alphanum_fraction": 0.699999988079071, "avg_line_length": 34.21052551269531, "blob_id": "b8170c44c0ec36f8400fa7d4e7845404aa9c9ddf", "content_id": "12d6021aa7c49be6fde2e5bca230704b39d2c7a7", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 670, "license_type": "no_license", "max_line_length": 89, "num_lines": 19, "path": "/app/fw/api/views/__init__.py", "repo_name": "StanislavKraev/jb_code", "src_encoding": "UTF-8", "text": "# -*- coding: utf-8 -*-\nimport json\n\nfrom flask import Response\n\nfrom fw.api import errors\n\n\ndef not_authorized(site_domain=None):\n api_error_code = errors.NotAuthorized.ERROR_CODE\n http_error_code = errors.NotAuthorized.HTTP_ERROR_CODE\n api_error_msg = errors.NotAuthorized.ERROR_MESSAGE\n data_json = json.dumps({\"error\": {\"code\": api_error_code, \"message\": api_error_msg}})\n\n result = Response(data_json, mimetype='application/json', status=http_error_code)\n if site_domain:\n result.headers.add('Access-Control-Allow-Credentials', \"true\")\n result.headers.add('Access-Control-Allow-Origin', \"http://%s\" % site_domain)\n return result\n\n" }, { "alpha_fraction": 0.6052108407020569, "alphanum_fraction": 0.6056524515151978, "avg_line_length": 35.231998443603516, "blob_id": "9a4b0d8f28596945e042a743e73a3934d29dd02e", "content_id": "62b3366334b141af62cd30f37ea5cd82571b79e2", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 4529, "license_type": "no_license", "max_line_length": 107, "num_lines": 125, "path": "/app/fw/plugins/batch_manager_plugin/__init__.py", "repo_name": "StanislavKraev/jb_code", "src_encoding": "UTF-8", "text": "# -*- coding: utf-8 -*-\nfrom copy import copy\nfrom fw.async_tasks import rendering\nfrom fw.db.sql_base import db as sqldb\nfrom fw.documents.db_fields import DocumentBatchDbObject, BatchDocumentDbObject, DocumentFilesObject\nfrom fw.documents.enums import UserDocumentStatus\nfrom fw.documents.schema.schema_transform import transform_field_with_schema\n\nPLUGIN_NAME = 'batch_manager'\n\n\ndef get_actions():\n actions = [{\n 'name': 'set_result_fields',\n 'async': False\n }, {\n 'name': 'reset_result_fields',\n 'async': False\n }, {\n 'name': 'remove_documents',\n 'async': False\n }, {\n 'name': 'set_batch_fields',\n 'async': False\n }, {\n 'name': 'touch',\n 'async': True\n }, {\n 'name': 'check_and_fix_osago_payments',\n 'async': False\n }]\n return actions\n\ndef get_events():\n events = [{\n 'name': 'on_field_changed'\n }, {\n 'name': 'on_fieldset_changed'\n }]\n return events\n\ndef _apply_dict_patch(original, patch):\n for k, v in patch.items():\n if isinstance(v, dict) and k in original and isinstance(original[k], dict):\n original[k] = _apply_dict_patch(original[k], v)\n else:\n original[k] = v\n return original\n\ndef _make_result_fields_patch(data, field_name_map):\n patch = {}\n for src, dst in field_name_map.items():\n if '.' in src:\n raise NotImplementedError()\n if src in data:\n patch[dst] = data[src]\n return patch\n\ndef act(action, batch_db, event_data, plugin_config, logger, config):\n assert batch_db\n descriptors = filter(lambda x: x['name'] == action, get_actions())\n action_descriptor = descriptors[0] if descriptors else None\n if not action_descriptor:\n raise ValueError(u'Invalid action: %s for %s plugin' % (action, PLUGIN_NAME))\n\n if action == 'set_result_fields':\n data = copy(event_data)\n if not data:\n logger.warn(u\"Empty data in set_result_fields event\")\n return True\n field_name_map = plugin_config['field_name_map']\n result_fields_patch = _make_result_fields_patch(data, field_name_map)\n fields = copy(batch_db.result_fields or {})\n batch_db.result_fields = _apply_dict_patch(fields, result_fields_patch)\n sqldb.session.commit()\n elif action == 'reset_result_fields':\n keys = plugin_config['fields']\n if not keys:\n return True\n fields = copy(batch_db.result_fields or {})\n for key in keys:\n if key in fields:\n del fields[key]\n batch_db.result_fields = fields\n sqldb.session.commit()\n elif action == 'remove_documents':\n query = BatchDocumentDbObject.query.filter_by(batch=batch_db)\n if plugin_config and 'doc_types' in plugin_config and isinstance(plugin_config['doc_types'], list):\n query = query.filter(BatchDocumentDbObject.document_type.in_(plugin_config['doc_types']))\n for i in query:\n DocumentFilesObject.query.filter_by(doc_id=i.id).delete()\n BatchDocumentDbObject.query.filter_by(id=i.id).update({\n 'status': UserDocumentStatus.DS_NEW,\n '_celery_task_id': None,\n '_celery_task_started': None\n })\n sqldb.session.commit()\n elif action == 'set_batch_fields':\n fields = plugin_config['fields']\n update_dict = {}\n source_data = copy(plugin_config)\n source_data['<batch>'] = batch_db\n source_data['<app_config>'] = config\n source_data['<current_user>'] = batch_db._owner\n source_data['<previous_event_data>'] = event_data\n for field_name, field_descr in fields.items():\n if isinstance(field_descr, dict):\n field_val = transform_field_with_schema(source_data, field_descr).db_value()\n else:\n field_val = field_descr\n update_dict[field_name] = field_val\n DocumentBatchDbObject.query.filter_by(id=batch_db.id).update(update_dict)\n sqldb.session.commit()\n elif action == 'touch':\n rendering.touch_batch_plugin.delay(batch_db.id, {})\n elif action == 'check_and_fix_osago_payments':\n from services.osago.osago_manager import OsagoBatchManager\n OsagoBatchManager.check_and_fix_osago_payments(batch_db)\n else:\n raise Exception(u\"Invalid action %s for plugin %s\" % (action, PLUGIN_NAME))\n\n return True\n\ndef register(class_loader):\n pass\n" }, { "alpha_fraction": 0.6542288661003113, "alphanum_fraction": 0.6567164063453674, "avg_line_length": 19.100000381469727, "blob_id": "7a1170792298e41fe592ef0fcaa2e5770a02b5e0", "content_id": "132cff792790afb59ce53682af56ee00fbbda943", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 402, "license_type": "no_license", "max_line_length": 61, "num_lines": 20, "path": "/app/deployment_migrations/migration_list/20150812_add_test_table.py", "repo_name": "StanislavKraev/jb_code", "src_encoding": "UTF-8", "text": "# -*- coding: utf-8 -*-\n\nfrom fw.db.sql_base import db as sqldb\n\n\ndef forward(config, logger):\n logger.debug(u\"Add indexes\")\n\n sqldb.session.close()\n sqldb.engine.execute(u\"\"\"CREATE TABLE test_web_requests (\n id SERIAL NOT NULL,\n created TIMESTAMP WITHOUT TIME ZONE NOT NULL,\n data JSONB,\n url VARCHAR NOT NULL,\n PRIMARY KEY (id)\n);\"\"\")\n\n\ndef rollback(config, logger):\n pass\n" }, { "alpha_fraction": 0.7222780585289001, "alphanum_fraction": 0.7244555950164795, "avg_line_length": 31.62295150756836, "blob_id": "a25c718ff6d1da8cb4a9b3ef75cf94ab3038f7ec", "content_id": "e4a74705b50aae6965417b73963b68f72a2d8fca", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 5970, "license_type": "no_license", "max_line_length": 131, "num_lines": 183, "path": "/app/app.py", "repo_name": "StanislavKraev/jb_code", "src_encoding": "UTF-8", "text": "# -*- coding: utf-8 -*-\nfrom datetime import timedelta\nimport json\nimport logging\nfrom logging.handlers import DEFAULT_TCP_LOGGING_PORT, SocketHandler\nimport os\n\nfrom flask import Flask, Response\nfrom flask_compress import Compress\nfrom flask_login import LoginManager\nfrom werkzeug.contrib.fixers import ProxyFix\n\nfrom fw.api import errors\nfrom fw.api.errors import NotAuthorized\nfrom fw.api.sql_session_storage import SQLAlchemySessionInterface\nfrom fw.documents.contexts import ValidatorContext, RenderingContext, ModelCacheContext\nfrom jb_config import JBConfiguration\nfrom template_filters import load_filters, set_template_loader\nfrom common_utils import LazyClassLoader\n\n\nSERVICE_DESCRIPTION = 'JurBureau'\nDEFAULT_CONFIG_PATH = '/etc/jurbureau/config.cfg'\n\napp = Flask(__name__)\n\napp.wsgi_app = ProxyFix(app.wsgi_app)\n\nCompress(app)\n\n\ndef init_logging(app):\n log_file_path = app.config['log_file_path']\n if not os.path.exists(os.path.dirname(log_file_path)):\n raise Exception('Failed to open log file: no such directory %s' % os.path.dirname(log_file_path))\n\n del app.logger.handlers[:]\n # consoleHandler = logging.StreamHandler()\n # consoleHandler.setFormatter(logging.Formatter('%(asctime)s %(levelname)s: %(message)s [in %(pathname)s:%(lineno)d]'))\n # app.logger.addHandler(consoleHandler)\n\n socketHandler = SocketHandler('localhost', DEFAULT_TCP_LOGGING_PORT)\n socketHandler.setLevel(app.config['LOG_LEVEL'])\n socketHandler.setFormatter(logging.Formatter('%(asctime)s %(levelname)s: %(message)s [in %(pathname)s:%(lineno)d]'))\n app.logger.addHandler(socketHandler)\n app.logger.setLevel(app.config['LOG_LEVEL'])\n\n\ndef init_login_system(app, config):\n app.session_interface = SQLAlchemySessionInterface(config)\n login_manager = LoginManager()\n login_manager.init_app(app)\n\n def load_user(user_id):\n from fw.auth.models import AuthUser\n return AuthUser.query.filter_by(id=user_id).first()\n\n @login_manager.unauthorized_handler\n def unauthorized():\n raise NotAuthorized()\n\n login_manager.user_callback = load_user\n\n\ndef init_configuration():\n config = JBConfiguration(SERVICE_DESCRIPTION, DEFAULT_CONFIG_PATH)\n app.config.update(config.settings)\n app.config.update({\n 'PERMANENT_SESSION_LIFETIME': timedelta(days=60)\n })\n app.production = os.environ.get('JB_PRODUCTION', None) is not None\n\n\ndef init_db():\n import external_tools\n app.external_tools = external_tools\n\n\ndef init_contexts(target_app=None):\n app1 = target_app or app\n app1.validator_context = ValidatorContext()\n app1.rendering_context = RenderingContext()\n app1.model_cache_context = ModelCacheContext()\n\n\ninit_configuration()\ninit_logging(app)\ninit_db()\ninit_contexts()\n\n\ndef init_blueprints(_app):\n from fw.api.views.auth import auth_bp\n from fw.api.views.documents import documents_bp\n from fw.api.views.general import general_bp\n from fw.api.views.object_management import domain_objects_bp\n from fw.api.views.system_views import system_bp\n from fw.api.views.files import files_bp\n\n url_prefix = None if app.config['DEBUG'] else '/api'\n\n _app.register_blueprint(general_bp, url_prefix=url_prefix)\n _app.register_blueprint(auth_bp, url_prefix=url_prefix)\n _app.register_blueprint(documents_bp, url_prefix=url_prefix)\n _app.register_blueprint(domain_objects_bp, url_prefix=url_prefix)\n _app.register_blueprint(files_bp, url_prefix=url_prefix)\n _app.register_blueprint(system_bp, url_prefix=url_prefix)\n\n\ndef init_services(_app):\n from services import ifns, notarius, yurist, partners, pay, llc_reg, ip_reg, osago, test_svc, car_assurance, russian_post\n class_loader = LazyClassLoader\n _app.class_loader = class_loader\n\n url_prefix = None if app.config['DEBUG'] else '/api'\n services = (ifns, notarius, yurist, partners, pay, llc_reg, ip_reg, osago, test_svc, car_assurance, russian_post)\n for service in services:\n service.register(_app, _app.jinja_env, class_loader, url_prefix=url_prefix)\n\n\ndef init_plugins():\n class_loader = LazyClassLoader\n from fw.plugins import emailer_plugin, register, doc_builder_plugin, batch_manager_plugin, car_assurance_plugin, task_scheduler\n plugins = (emailer_plugin, doc_builder_plugin, batch_manager_plugin, car_assurance_plugin, task_scheduler)\n for p in plugins:\n register(p.PLUGIN_NAME, p, class_loader=class_loader)\n\ndef init_sql_db(_app):\n from fw.db.sql_base import db\n db.init_app(_app)\n\n\ndef init_flask_signals():\n from fw.db.sql_base import db as sqldb\n\n def finish_db_transaction(sender, **extra):\n sqldb.session.commit()\n app.model_cache_context.clear()\n\n def rollback(sender, **extra):\n sqldb.session.rollback()\n\n from flask import request_finished, got_request_exception\n request_finished.connect(finish_db_transaction, app)\n got_request_exception.connect(rollback, app)\n\n\nload_filters(app.jinja_env, app.config)\nset_template_loader(app.jinja_env)\n\ninit_blueprints(app)\ninit_services(app)\ninit_sql_db(app)\ninit_plugins()\ninit_flask_signals()\n\n#noinspection PyUnresolvedReferences\ninit_login_system(app, app.config)\napp.secret_key = app.config['secret_key']\nfrom fw.auth.user_manager import UserManager\nUserManager.init(app.config, app.logger)\n\n\[email protected](413)\ndef catcher(error):\n data_json = json.dumps(\n {\"error\": {\"code\": errors.FileToLarge.ERROR_CODE, \"message\": errors.FileToLarge.ERROR_MESSAGE}})\n result = Response(data_json, mimetype='application/json', status=400)\n result.headers.add('Access-Control-Allow-Credentials', \"true\")\n result.headers.add('Access-Control-Allow-Origin', \"http://%s\" % app.config['site_domain'])\n return result\n\n\[email protected]_processor\ndef inject_user():\n return dict(js_scripts_suffix=u\".min\" if not app.production else u\"\")\n\n\nif __name__ == '__main__':\n try:\n app.run()\n except KeyboardInterrupt:\n pass\n" }, { "alpha_fraction": 0.6196880340576172, "alphanum_fraction": 0.6207638382911682, "avg_line_length": 33.425926208496094, "blob_id": "ef39abb638fe7448c0ea507717b76ea1dc78006f", "content_id": "0d0f629b07f3f471c96fc8ce80f6c77c22999518", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 1859, "license_type": "no_license", "max_line_length": 99, "num_lines": 54, "path": "/app/fw/settings.py", "repo_name": "StanislavKraev/jb_code", "src_encoding": "UTF-8", "text": "# -*- coding: utf-8 -*-\n\nimport ConfigParser\nimport argparse\nimport logging\nimport os\n\n\nclass Configuration(object):\n LEVEL_NAME_VALUE_DICT = {\n 'DEBUG': logging.DEBUG,\n 'INFO': logging.INFO,\n 'WARN': logging.WARN,\n 'ERROR': logging.ERROR,\n 'CRITICAL': logging.CRITICAL\n }\n\n def __init__(self, service_description, default_config_path):\n kwargs = Configuration.parse_command_line_options(service_description, default_config_path)\n self.settings = {}\n self.settings.update(vars(kwargs))\n config_file_path = self.settings.get('config', '')\n if config_file_path:\n if not os.path.exists(config_file_path):\n raise Exception(\"Can not find config file: %s\" % config_file_path)\n self.config_file = ConfigParser.RawConfigParser()\n self.config_file.read(config_file_path)\n\n def get_from_config(self, full_key_name, default=None):\n section, key = full_key_name.split(':')\n try:\n value = self.config_file.get(section, key)\n except ConfigParser.NoOptionError:\n if default is not None:\n return default\n raise\n return value\n\n def get_int_from_config(self, full_key_name, default=None):\n return int(self.get_from_config(full_key_name, default))\n\n def __getitem__(self, key):\n return self.settings[key]\n\n def __setitem__(self, key, value):\n self.settings[key] = value\n\n @staticmethod\n def parse_command_line_options(service_description, default_config_path):\n parser = argparse.ArgumentParser(description=service_description)\n parser.add_argument('--config', dest='config', action='store', help='path to config file',\n default=default_config_path)\n\n return parser.parse_known_args()[0]\n" }, { "alpha_fraction": 0.6701623797416687, "alphanum_fraction": 0.6786197423934937, "avg_line_length": 40.61971664428711, "blob_id": "e1c9987a94a0c51674139aef086ec9c3cad02897", "content_id": "30b37da02e3dfbfd71d25b7c71ba039e978e7c2f", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 2956, "license_type": "no_license", "max_line_length": 138, "num_lines": 71, "path": "/jb_tests/test_pack/test_celery_scheduler.py", "repo_name": "StanislavKraev/jb_code", "src_encoding": "UTF-8", "text": "# -*- coding: utf-8 -*-\nfrom datetime import datetime, timedelta\nimport time\nimport os\nfrom fw.api import errors\nfrom fw.async_tasks.models import CeleryScheduledTask\nfrom fw.async_tasks.scheduler import CeleryScheduler\nfrom fw.db.sql_base import db as sqldb\n\nos.environ['CELERY_CONFIG_MODULE'] = 'dev_celeryconfig'\n\nfrom fw.async_tasks.core_tasks import check_scheduled_tasks\nfrom test_pack.base_batch_test import BaseBatchTestCase\nfrom test_pack.test_api import authorized\n\nclass CelerySchedulerTestCase(BaseBatchTestCase):\n\n @authorized()\n def test_post_simple_task(self):\n new_task = CeleryScheduler.post(\"fw.async_tasks.test_task.test_task\", countdown_seconds=1)\n task_obj = CeleryScheduledTask.query.filter_by(id=new_task.id).scalar()\n self.assertIsNotNone(task_obj)\n\n @authorized()\n def test_post_task_with_args_and_kwargs(self):\n new_task = CeleryScheduler.post(\"fw.async_tasks.test_task.test_task\", args=(1, 2), kwargs={'a': 1, 'b': 2}, countdown_seconds=1)\n task_obj = CeleryScheduledTask.query.filter_by(id=new_task.id).scalar()\n self.assertIsNotNone(task_obj)\n self.assertEqual(task_obj.args, [1, 2])\n self.assertEqual(task_obj.kwargs, {'a': 1, 'b': 2})\n\n @authorized()\n def test_post_task_with_eta_in_past(self):\n with self.assertRaises(ValueError):\n CeleryScheduler.post(\"fw.async_tasks.test_task.test_task\", eta=datetime.utcnow() - timedelta(days=1))\n\n @authorized()\n def test_replace_task(self):\n new_task = CeleryScheduler.post(\"fw.async_tasks.test_task.test_task\", countdown_seconds=1, task_id=\"abc\")\n task_obj = CeleryScheduledTask.query.filter_by(id=new_task.id).scalar()\n self.assertIsNotNone(task_obj)\n self.assertEqual(CeleryScheduledTask.query.count(), 1)\n\n with self.assertRaises(errors.DuplicateIdError):\n CeleryScheduler.post(\"fw.async_tasks.test_task.test_task\", countdown_seconds=1, task_id=\"abc\")\n\n new_task = CeleryScheduler.post(\"fw.async_tasks.test_task.test_task\", countdown_seconds=1, task_id=\"abc\", force_replace_task=True)\n task_obj = CeleryScheduledTask.query.filter_by(id=new_task.id).scalar()\n self.assertIsNotNone(task_obj)\n self.assertEqual(CeleryScheduledTask.query.count(), 1)\n\n @authorized()\n def test_post_invalid_task(self):\n with self.assertRaises(ValueError):\n CeleryScheduler.post(\"test_task1\", countdown_seconds=1)\n\n @authorized()\n def test_run_simple_task(self):\n new_task = CeleryScheduledTask(\n task_name=\"fw.async_tasks.test_task.test_task\",\n eta=datetime.utcnow() + timedelta(seconds=1),\n kwargs={'a': 5, 'b': 4}\n )\n sqldb.session.add(new_task)\n sqldb.session.commit()\n\n self.app.logger.info('1')\n check_scheduled_tasks.delay()\n time.sleep(1)\n self.app.logger.info('2')\n check_scheduled_tasks.delay()\n\n" }, { "alpha_fraction": 0.6189638376235962, "alphanum_fraction": 0.6202139258384705, "avg_line_length": 41.855159759521484, "blob_id": "67733e30490bf190ef825e8122ec5a11d765ecf2", "content_id": "02a686fb746b135601af60038db5959c91d041ff", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 22337, "license_type": "no_license", "max_line_length": 122, "num_lines": 504, "path": "/app/services/llc_reg/llc_reg_manager.py", "repo_name": "StanislavKraev/jb_code", "src_encoding": "UTF-8", "text": "# -*- coding: utf-8 -*-\nfrom datetime import datetime, timedelta\nimport pytz\nfrom fw.api import errors\nfrom fw.api.base_handlers import error_tree_to_list\nfrom fw.db.sql_base import db as sqldb\nfrom fw.documents.address_enums import RFRegionsEnum\nfrom fw.documents.batch_manager import BatchManager\nfrom fw.documents.db_fields import DocumentBatchDbObject, BatchDocumentDbObject\nfrom fw.documents.doc_requisites_storage import DocRequisitiesStorage\nfrom fw.documents.enums import DocumentTypeEnum, BatchStatusEnum, DocumentBatchTypeEnum, UserDocumentStatus\nfrom fw.documents.fields.doc_fields import DocumentBatch, UserDocument\nfrom fw.documents.schema.schema_transform import transform_with_schema\nfrom fw.storage.file_storage import FileStorage\nfrom services.pay.models import PayInfoObject, PurchaseServiceType\nfrom services.pay.subs_manager import SubscriptionManager\n\n\nclass LlcRegBatchManager(BatchManager):\n FIRST_STAGE_DOCS = {\n DocumentTypeEnum.DT_P11001,\n DocumentTypeEnum.DT_ARTICLES,\n DocumentTypeEnum.DT_ACT,\n DocumentTypeEnum.DT_USN,\n DocumentTypeEnum.DT_DECISION,\n DocumentTypeEnum.DT_PROTOCOL,\n DocumentTypeEnum.DT_ESHN,\n DocumentTypeEnum.DT_CONTRACT,\n DocumentTypeEnum.DT_REGISTRATION_FEE_INVOICE,\n DocumentTypeEnum.DT_DOVERENNOST,\n DocumentTypeEnum.DT_DOVERENNOST_OBTAIN,\n DocumentTypeEnum.DT_SOGLASIE_SOBSTVENNIKOV,\n DocumentTypeEnum.DT_GARANT_LETTER_ARENDA,\n DocumentTypeEnum.DT_GARANT_LETTER_SUBARENDA\n }\n\n THIRD_STAGE_DOCS = {\n DocumentTypeEnum.DT_GENERAL_MANAGER_CONTRACT,\n DocumentTypeEnum.DT_GENERAL_MANAGER_ORDER,\n DocumentTypeEnum.DT_ACCOUNTANT_CONTRACT,\n DocumentTypeEnum.DT_ACCOUNTANT_ORDER,\n DocumentTypeEnum.DT_FSS_CLAIM,\n DocumentTypeEnum.DT_PFR_CLAIM,\n DocumentTypeEnum.DT_ROSSTAT_CLAIM,\n DocumentTypeEnum.DT_FOUNDERS_LIST,\n DocumentTypeEnum.DT_COMPANY_DETAILS,\n DocumentTypeEnum.DT_ACCOUNTANT_IMPOSITION_ORDER\n }\n\n DOC_TITLES = {\n DocumentTypeEnum.DT_P11001: u\"Заявление о государственной регистрации (форма Р11001)\",\n DocumentTypeEnum.DT_ARTICLES: u\"Устав\",\n DocumentTypeEnum.DT_PROTOCOL: u\"Протокол собрания учредителей\",\n DocumentTypeEnum.DT_DECISION: u\"Решение единственного учредителя\",\n DocumentTypeEnum.DT_ACT: u\"Акт оценки имущества\",\n DocumentTypeEnum.DT_USN: u\"Заявление о переходе на УСН\",\n DocumentTypeEnum.DT_ESHN: u\"Заявление о переходе на ЕСХН\",\n DocumentTypeEnum.DT_CONTRACT: u\"Договор об учреждении юридического лица\",\n DocumentTypeEnum.DT_REGISTRATION_FEE_INVOICE: u\"Квитанция на уплату госпошлины\",\n DocumentTypeEnum.DT_DOVERENNOST: u\"Доверенность на подачу документов\",\n DocumentTypeEnum.DT_DOVERENNOST_OBTAIN: u\"Доверенность на получение документов\",\n DocumentTypeEnum.DT_SOGLASIE_SOBSTVENNIKOV: u\"Согласие других собствеников жилья\",\n DocumentTypeEnum.DT_GARANT_LETTER_ARENDA: u\"Образец гарантийного письма (аренда от собственника)\",\n DocumentTypeEnum.DT_GARANT_LETTER_SUBARENDA: u\"Образец гарантийного письма (помещение в субаренду)\",\n\n DocumentTypeEnum.DT_GENERAL_MANAGER_CONTRACT: u\"Трудовой договор с руководителем\",\n DocumentTypeEnum.DT_GENERAL_MANAGER_ORDER: u\"Приказ о вступлении в должность\",\n DocumentTypeEnum.DT_ACCOUNTANT_CONTRACT: u\"Трудовой договор с бухгалтером\",\n DocumentTypeEnum.DT_ACCOUNTANT_ORDER: u\"Приказ о приеме на работу бухгалтера\",\n DocumentTypeEnum.DT_FSS_CLAIM: u\"заявление в ФСС\",\n DocumentTypeEnum.DT_PFR_CLAIM: u\"заявление в ПФР\",\n DocumentTypeEnum.DT_ROSSTAT_CLAIM: u\"заявление в Росстат\",\n DocumentTypeEnum.DT_FOUNDERS_LIST: u\"Список участников\",\n DocumentTypeEnum.DT_COMPANY_DETAILS: u\"Реквизиты компании\",\n DocumentTypeEnum.DT_ACCOUNTANT_IMPOSITION_ORDER: u\"Приказ о возложении обязанностей бухгалтера на директора\",\n\n DocumentTypeEnum.DT_DOV_OLD: u\"Доверенность на подачу-получение документов\"\n }\n\n def __init__(self):\n pass\n\n @staticmethod\n def get_batch_reg_address(batch_id):\n doc = BatchDocumentDbObject.query.filter_by(batch_id=batch_id, document_type=DocumentTypeEnum.DT_ARTICLES).first()\n if doc and doc.data:\n return doc.data.get('address', None)\n\n def merge_docs(self, batch_status, current_docs_db_models, new_docs_db_models, changed_field_names):\n merged_docs = []\n unused_docs = []\n\n if batch_status != BatchStatusEnum.BS_FINALISED:\n return new_docs_db_models, current_docs_db_models\n\n doc_fields_map = {}\n for doc in current_docs_db_models:\n doc_type = doc.document_type\n ds = DocRequisitiesStorage.get_schema(doc_type)\n doc_fields_map[doc_type] = set([f['name'] for f in ds['fields']]) #set(doc.data.keys())\n\n modified_doc_types_set = set()\n for field_name in changed_field_names:\n for cur_doc_type in doc_fields_map:\n if field_name in doc_fields_map[cur_doc_type]:\n modified_doc_types_set.add(cur_doc_type)\n\n added_doc_types = set()\n for doc in current_docs_db_models:\n doc_type = doc.document_type\n if doc_type in LlcRegBatchManager.FIRST_STAGE_DOCS:\n merged_docs.append(doc)\n added_doc_types.add(doc_type)\n elif doc_type not in LlcRegBatchManager.FIRST_STAGE_DOCS and doc_type not in modified_doc_types_set:\n merged_docs.append(doc)\n added_doc_types.add(doc_type)\n\n for doc in new_docs_db_models:\n doc_type = doc.document_type\n\n if doc_type not in LlcRegBatchManager.FIRST_STAGE_DOCS and \\\n doc.status != UserDocumentStatus.DS_RENDERED and doc_type not in added_doc_types:\n merged_docs.append(doc)\n\n for doc in current_docs_db_models:\n if doc.document_type not in added_doc_types:\n unused_docs.append(doc)\n\n return merged_docs, unused_docs\n\n def update_batch(self, batch_id, new_batch, current_user_id, config, logger):\n current_batch_db_object = DocumentBatchDbObject.query.filter_by(id=batch_id, _owner_id=current_user_id,\n deleted=False).first()\n if not current_batch_db_object:\n raise errors.BatchNotFound()\n\n batch_status = current_batch_db_object.status\n if batch_status not in (BatchStatusEnum.BS_NEW, BatchStatusEnum.BS_EDITED, BatchStatusEnum.BS_FINALISED):\n logger.warn(u\"Can't update batch %s in status %s\" % (unicode(batch_id), unicode(batch_status)))\n raise errors.DocumentBatchUpdateError()\n\n try:\n current_batch = DocumentBatch.db_obj_to_field(current_batch_db_object)\n except Exception:\n logger.fatal(u\"Failed to validate batch from DB!\")\n raise\n\n current_fields = current_batch.data.value\n assert isinstance(current_fields, dict)\n\n # STEP 1: make new data and metadata\n # and collect changed fields names\n new_batch_db = new_batch.get_db_object()\n merged_fields, changed_field_names = self._merge_raw_fields(current_batch_db_object.data, new_batch_db.data)\n\n current_batch_db_object._metadata = new_batch_db._metadata\n current_batch_db_object.data = merged_fields\n sqldb.session.commit()\n\n if current_batch_db_object.status == BatchStatusEnum.BS_FINALISED:\n if 'full_name' in changed_field_names and current_batch_db_object.paid:\n raise errors.PaidBatchUpdateError()\n\n # STEP 2: make document set from data and schema\n try:\n new_field_set, new_docs, changed_field_names = self.make_docs_for_new_data(\n current_batch.data.value,\n new_batch.data.value,\n current_batch_db_object,\n BatchManager.get_batch_document_fields(current_batch_db_object),\n logger=logger\n )\n except Exception, ex:\n logger.exception(u\"Failed to update batch with new values\")\n current_batch_db_object.error_info = {\"error\": u\"unknown error (%s)\" % str(ex)}\n sqldb.session.commit()\n raise\n\n current_docs_db_models = [doc for doc in current_batch_db_object._documents]\n new_docs_db_models = [BatchDocumentDbObject(**new_doc.get_db_object_data()) for new_doc in new_docs]\n\n merged_docs, unused_db_docs = self.merge_docs(current_batch_db_object.status,\n current_docs_db_models, new_docs_db_models,\n changed_field_names)\n for doc in merged_docs:\n doc.batch = current_batch_db_object\n doc._owner_id = current_user_id\n\n for doc in unused_db_docs:\n BatchDocumentDbObject.query.filter_by(id=doc.id).delete()\n\n sqldb.session.commit()\n\n # STEP 3: combine old and new documents\n error_info = None\n try:\n current_batch.update_db_obj(current_batch_db_object, current_batch.get_db_object_data(), False)\n current_batch_db_object._metadata = new_batch_db._metadata\n current_batch_db_object.data = merged_fields\n sqldb.session.commit()\n\n # STEP 5: make result fields\n result_fields = self.make_result_fields(current_batch, new_field_set)\n\n result_fields[\"general_manager_caption_genitive\"] = u\"генерального директора\"\n if current_batch_db_object.status == BatchStatusEnum.BS_FINALISED:\n if 'inn' in changed_field_names and 'fss_number' in result_fields:\n del result_fields['fss_number']\n\n current_batch_db_object.result_fields = result_fields\n current_batch_db_object.error_info = None\n sqldb.session.commit()\n except Exception, ex:\n logger.exception(u\"Failed to update batch with new values\")\n current_batch_db_object.error_info = {\"error\": u\"unknown error\"}\n sqldb.session.commit()\n raise errors.DocumentBatchUpdateError()\n\n if current_batch_db_object.status in (BatchStatusEnum.BS_EDITED, BatchStatusEnum.BS_FINALISED):\n error_ext = self.get_batch_errors(current_batch_db_object, logger)\n if error_ext:\n error_info = {'error_ext': error_ext}\n current_batch_db_object.error_info = error_info\n sqldb.session.commit()\n\n current_batch = DocumentBatch.db_obj_to_field(current_batch_db_object)\n struct = current_batch.get_api_structure()\n\n if error_info:\n struct['error_info'] = error_info\n elif 'error_info' in struct:\n del struct['error_info']\n\n return {'result': struct}\n\n def make_docs_from_batch_fields(self, field_set, current_batch_db_object=None):\n batch_descriptor = DocRequisitiesStorage.get_batch_descriptor(DocumentBatchTypeEnum.DBT_NEW_LLC)\n docs = []\n fields = field_set\n\n doc_in_batch = lambda doc_type_, batch_: any([doc_.document_type == doc_type_ for doc_ in batch_._documents])\n\n batch_doc_types = batch_descriptor.get('doc_types', [])\n for doc_type in batch_doc_types:\n if doc_type in self.THIRD_STAGE_DOCS and not doc_in_batch(doc_type, current_batch_db_object):\n continue\n\n result_fields = current_batch_db_object.result_fields or {}\n ifns_reg_status = result_fields.get('ifns_reg_info', {}).get('status', '')\n if ifns_reg_status == 'registered' and doc_type in self.FIRST_STAGE_DOCS:\n continue\n\n doc = self.get_doc_schema(doc_type)\n\n doc_data = doc\n new_doc = {\n \"document_type\": doc_data['doc_name'],\n \"creation_date\": datetime.utcnow(),\n \"status\": UserDocumentStatus.DS_NEW,\n \"rendered_docs\": []\n }\n data = transform_with_schema(fields, doc_data)\n\n if data is not None:\n user_doc = UserDocument.parse_raw_value(new_doc, api_data=False)\n user_doc.data.value = data\n user_doc.data.initialized = True\n docs.append(user_doc)\n\n return docs\n\n def finalize_batch(self, config, logger, batch):\n if batch.status not in (BatchStatusEnum.BS_NEW, BatchStatusEnum.BS_EDITED):\n return False\n\n batch_id = batch.id\n\n docs = batch._documents or []\n error_info = batch.error_info or {}\n exc_list = []\n\n types_of_invalid_docs = set()\n for doc in docs:\n try:\n user_doc = UserDocument.db_obj_to_field(doc)\n user_doc.validate(strict=True)\n except Exception, ex:\n logger.exception(u\"Failed to validate document %s\" % doc.document_type)\n types_of_invalid_docs.add(doc.document_type)\n exc_list.append(ex)\n\n allowed_invalid_docs = (DocumentTypeEnum.DT_GENERAL_MANAGER_CONTRACT,\n DocumentTypeEnum.DT_GENERAL_MANAGER_CONTRACT) # todo: ???\n if exc_list:\n next_exc = errors.DocumentBatchFinalizationError()\n for ex in exc_list:\n if getattr(ex, 'ext_data', None):\n next_exc.ext_data.extend(ex.ext_data)\n if next_exc.ext_data:\n error_list = error_tree_to_list(next_exc.ext_data)\n for error in error_list:\n if 'field' in error and error['field'].startswith('data.'):\n error['field'] = error['field'][5:]\n error_info['error_ext'] = error_list\n\n batch.error_info = error_info\n\n if not all([doc_type in allowed_invalid_docs for doc_type in types_of_invalid_docs]):\n batch.status = BatchStatusEnum.BS_EDITED\n sqldb.session.commit()\n return {\"result\": False}\n\n for doc in docs:\n if doc.document_type in types_of_invalid_docs:\n BatchDocumentDbObject.query.filter_by(id=doc.id).delete()\n else:\n batch.error_info = None\n\n batch.status = BatchStatusEnum.BS_BEING_FINALISED\n sqldb.session.commit()\n\n try:\n DocumentBatch.db_obj_to_field(batch)\n except Exception:\n logger.exception(u\"Failed to finalize: rolling back\")\n\n batch.status = BatchStatusEnum.BS_EDITED\n sqldb.session.commit()\n return False\n\n last_change_dt = datetime.utcnow()\n batch.last_change_dt = last_change_dt\n\n if not batch.paid:\n subs = SubscriptionManager.get_user_active_subscription(batch._owner.id)\n if subs:\n batch.paid = True\n pay_info = PayInfoObject(\n user=batch._owner,\n batch=batch,\n pay_record_id=subs.pay_record_id,\n payment_provider=subs.payment_provider,\n service_type=PurchaseServiceType.LLC_AUTO_PURCHASE\n )\n sqldb.session.add(pay_info)\n\n sqldb.session.commit()\n try:\n logger.debug(u\"Adding rendering task for batch %s\" % unicode(batch.id))\n from fw.async_tasks import rendering\n\n async_result = rendering.render_batch.delay(str(batch.id))\n\n if not async_result.ready():\n batch.current_task_id = unicode(async_result.id)\n batch.batch_rendering_start = datetime.now()\n logger.debug(u\"Task id: %s\" % unicode(async_result.id))\n sqldb.session.commit()\n\n if not batch.paid:\n self.check_and_send_not_paid_user_notification(batch_id, config, logger)\n except Exception:\n logger.exception(u\"Failed to queue task\")\n batch.status = BatchStatusEnum.BS_EDITED\n sqldb.session.commit()\n\n return True\n\n @staticmethod\n def check_and_send_not_paid_user_notification(batch_id, config, logger):\n last_change_dt = datetime.utcnow()\n eta = datetime.utcnow()\n eta = eta.replace(tzinfo=pytz.utc)\n timezone_name = BatchManager.get_batch_timezone(batch_id) or \"Europe/Moscow\"\n eta = datetime.astimezone(eta, pytz.timezone(timezone_name))\n eta += timedelta(seconds=config['NOT_PAID_BATCH_NOTIFY_TIMEOUT_SECONDS'])\n\n try:\n not_paid_batch_notify_desired_time = config['NOT_PAID_BATCH_NOTIFY_DESIRED_TIME']\n if not_paid_batch_notify_desired_time:\n desired_time = datetime.strptime(not_paid_batch_notify_desired_time, \"%H:%M\")\n dt = eta.replace(hour=desired_time.hour, minute=desired_time.minute)\n if dt < eta:\n dt += timedelta(days=1)\n eta = dt\n eta = eta.astimezone(pytz.utc).replace(tzinfo=None)\n except Exception:\n logger.exception(u\"Failed to calculate correct send time\")\n\n from fw.async_tasks import not_paid_check_send\n\n not_paid_check_send.not_paid_check_and_send.apply_async(kwargs={\n 'batch_id': str(batch_id),\n 'last_change_dt_str': last_change_dt.strftime(\"%Y-%m-%dT%H:%M:%S\")\n }, eta=eta)\n\n @staticmethod\n def get_founder_applicant(batch_db, logger=None):\n doc = BatchDocumentDbObject.query.filter_by(batch=batch_db, document_type=DocumentTypeEnum.DT_P11001).first()\n if doc and doc.data and 'founders' in doc.data:\n founders = doc.data['founders']\n for founder in founders:\n if 'documents_recipient_type' in founder:\n return founder\n logger.error(u\"Failed to get founder applicant\")\n\n @staticmethod\n def get_reg_responsible_object(batch_db, logger=None):\n data = batch_db.data\n\n try:\n if 'registration_way' not in data:\n return\n\n if data['registration_way'] == 'responsible_person':\n return data['reg_responsible_person']\n else:\n return data.get('reg_responsible_founder', None)\n except Exception:\n if logger:\n logger.exception(u\"Failed to get registration responsible object\")\n\n def get_title(self, doc_type):\n return LlcRegBatchManager.DOC_TITLES.get(doc_type, '')\n\n def get_batch_caption(self, batch):\n if not batch:\n return u\"\"\n\n company_name = (batch.data or {}).get('short_name')\n return u\"Создание ООО «%s»\" % company_name if company_name else u\"Создание ООО\"\n\n def get_last_modified_batch_caption(self, batch_db):\n if not batch_db:\n return u\"\"\n\n company_name = (batch_db.data or {}).get('short_name')\n return company_name if company_name else u\"\"\n\n def get_stage(self, batch_db):\n company_registered = False\n if batch_db.result_fields:\n result_fields = batch_db.result_fields\n if 'ifns_reg_info' in result_fields:\n ifns_reg_info = result_fields['ifns_reg_info']\n if 'status' in ifns_reg_info and ifns_reg_info['status'] == 'registered':\n company_registered = True\n return 'preparation' if batch_db.status != BatchStatusEnum.BS_FINALISED else \\\n ('submission' if not company_registered else\n 'running')\n\n def definalize_batch(self, config, logger, batch, force):\n if batch.status != BatchStatusEnum.BS_FINALISED:\n return False\n\n result_fields = batch.result_fields or {}\n ifns_reg_status = result_fields.get('ifns_reg_info', {}).get('status', '')\n if ifns_reg_status == 'registered':\n raise errors.DocumentBatchDefinalizationError()\n\n for doc in BatchDocumentDbObject.query.filter_by(batch=batch):\n if doc.file:\n file_obj = doc.file\n doc.file = None\n FileStorage.remove_file(file_obj.id, config)\n\n batch.status = BatchStatusEnum.BS_EDITED\n batch.ifns_reg_info = None\n batch.last_change_dt = datetime.utcnow()\n sqldb.session.commit()\n\n from services.ifns import ifns_manager\n from services.notarius import notarius_manager\n from services.yurist import yurist_manager\n\n yurist_manager.cancel_check(batch, config, logger)\n notarius_manager.discard_booking(batch, config, logger)\n ifns_manager.discard_booking(batch, logger)\n\n return True\n\n def create_batch(self, owner):\n new_batch = DocumentBatchDbObject(\n batch_type=DocumentBatchTypeEnum.DBT_NEW_LLC,\n data={},\n _owner=owner,\n status=BatchStatusEnum.BS_NEW,\n paid=False\n )\n\n return new_batch\n\n @staticmethod\n def get_batch_timezone(batch_id):\n batch = DocumentBatchDbObject.query.filter_by(id=batch_id, deleted=False).scalar()\n if not batch or not batch.result_fields:\n return\n\n registration_address = batch.result_fields.get('registration_address', None)\n if not registration_address or 'region' not in registration_address:\n return\n\n region = registration_address['region']\n return RFRegionsEnum.get_time_zone(region)\n" }, { "alpha_fraction": 0.5599194169044495, "alphanum_fraction": 0.6273917555809021, "avg_line_length": 30.046875, "blob_id": "cd9033c817d0429d9c9e285fa698906af2dac441", "content_id": "bf87e46acc1009fab77793844faa9773d4541ae4", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 1986, "license_type": "no_license", "max_line_length": 127, "num_lines": 64, "path": "/app/fw/db/working_calendar.py", "repo_name": "StanislavKraev/jb_code", "src_encoding": "UTF-8", "text": "# -*- coding: utf-8 -*-\nimport datetime\n# TODO: This should be moved to DB and edited in admin console\n# regular holidays are holidays that are happening every year\nREGULAR_HOLIDAYS = {101, 102, 107, 223, 308, 501, 502, 509, 612, 1104}\n\nIRREGULAR_HOLIDAYS = {20150102, 20150103, 20150104, 20150105, 20150106, 20150108, 20150109, 20150309, 20150504,\n 20150511}\n\nIRREGULAR_WORKING_DAYS = set([\n\n])\n\n\ndef is_working_day(day):\n \"\"\"\n :param day: instance of class `date` or `datetime`\n :return: True if day is a working day (in Russia), False otherwise\n \"\"\"\n if isinstance(day, datetime.datetime):\n day = day.date()\n if day.isoweekday() < 6:\n # normally: working day\n if day.month * 100 + day.day in REGULAR_HOLIDAYS or day.year * 10000 + day.month * 100 + day.day in IRREGULAR_HOLIDAYS:\n return False\n return True\n else:\n # normally: day off\n if day.year * 10000 + day.month * 100 + day.day in IRREGULAR_WORKING_DAYS:\n return True\n else:\n return False\n\n\ndef get_next_working_day(day):\n \"\"\"\n Get next to given working day\n :param day: instance of class date or datetime\n :return: instance of class datetime\n \"\"\"\n # Simply try to find the next working day\n if isinstance(day, datetime.datetime):\n day = day.date()\n while True:\n day = day + datetime.timedelta(1)\n if is_working_day(day):\n break\n return datetime.datetime(day.year, day.month, day.day)\n\n\ndef get_prev_working_day(day):\n \"\"\"\n Get previous to given working day\n :param day: instance of class date or datetime\n :return: instance of class datetime\n \"\"\"\n # Simply try to find previous working date\n if isinstance(day, datetime.datetime):\n day = day.date()\n while True:\n day = day - datetime.timedelta(1)\n if is_working_day(day):\n break\n return datetime.datetime(day.year, day.month, day.day)" }, { "alpha_fraction": 0.588810384273529, "alphanum_fraction": 0.5947149395942688, "avg_line_length": 30.208459854125977, "blob_id": "df580c5a434af8d58b38cc67c21f304853b4b4f3", "content_id": "a00c75d037d1740726e12bd902fae889f6d3e948", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 10414, "license_type": "no_license", "max_line_length": 113, "num_lines": 331, "path": "/app/template_filters.py", "repo_name": "StanislavKraev/jb_code", "src_encoding": "UTF-8", "text": "# -*- coding: utf-8 -*-\nimport datetime\nfrom decimal import Decimal\nimport jinja2\nimport os\nimport pyphen\nimport pytils\nfrom pytz import timezone\nfrom fw.documents.fields.simple_doc_fields import DocMultiDeclensionField, DocField\nfrom fw.documents.morpher_tools import morph_with_morpher\nfrom common_utils import num_word, word_from_num, int_to_ifns, LazyClassLoader\nfrom jinja2.utils import Markup\n\nmsk_timezone = timezone('Europe/Moscow')\n\n\ndef static_url(domain):\n url = \"http://\" + domain + \"/static%s\"\n\n def get_static_url(value):\n return url % value\n\n return get_static_url\n\n\ndef dt_from_iso(value):\n # transform datetime value from iso format to human readable\n if not value:\n return \"\"\n dt = datetime.datetime.strptime(value, \"%Y-%m-%dT%H:%M:%S.%f\")\n return dt.strftime(\"%Y-%m-%d %H:%M\")\n\n\ndef msk_dt_from_iso(value):\n # transform datetime value from iso format to human readable\n if not value:\n return \"\"\n dt = datetime.datetime.strptime(value, \"%Y-%m-%dT%H:%M:%S.%f\")\n return msk_timezone.fromutc(dt).strftime(\"%Y-%m-%d %H:%M\")\n\n\ndef msk_dt_from_iso_p30(value):\n # transform datetime value from iso format to human readable\n if not value:\n return \"\"\n dt = datetime.datetime.strptime(value, \"%Y-%m-%dT%H:%M:%S.%f\") + datetime.timedelta(30)\n return msk_timezone.fromutc(dt).strftime(\"%Y-%m-%d\")\n\n\ndef js_dt_from_iso(value):\n if not value:\n return \"\"\n dt = datetime.datetime.strptime(value, \"%Y-%m-%dT%H:%M:%S.%f\")\n return dt.strftime(\"new Date(Date.UTC(%Y, %m, %d, %H, %M, %S, %f))\")\n\n\ndef countdown_to_date(value):\n if not value:\n return \"\"\n dt = datetime.datetime.strptime(value, \"%Y-%m-%dT%H:%M:%S.%f\")\n now = datetime.datetime.utcnow()\n td = dt - now\n return \"+%d\" % (td.total_seconds())\n\n\ndef num_to_text(value, gender='m', padezh='im'):\n return num_word(value, gender, padezh)\n\n\nDECL_PYMORPHY = {\n DocMultiDeclensionField.DF_NOMINATIVE: 'nomn',\n DocMultiDeclensionField.DF_GENITIVE: 'gent',\n DocMultiDeclensionField.DF_DATIVUS: 'datv',\n DocMultiDeclensionField.DF_ACCUSATIVUS: 'accs',\n DocMultiDeclensionField.DF_INSTRUMENTALIS: 'ablt',\n DocMultiDeclensionField.DF_PRAEPOSITIONALIS: 'loct',\n}\n\n\ndef padezh_and_number(word, padezh, number):\n return word_from_num(word, number, padezh)\n\n\ndef declension_fio(value, case='im'):\n return declension(value, case)\n\n\ndef declension(value, case=DocMultiDeclensionField.DF_NOMINATIVE):\n if isinstance(value, DocMultiDeclensionField):\n value = value.declension('nom')\n if not isinstance(value, basestring):\n if hasattr(value, 'initialized') and not getattr(value, 'initialized', False):\n return u\"\"\n value = unicode(value)\n if case not in DECL_PYMORPHY:\n return value\n\n new_value = morph_with_morpher(value)\n if not new_value:\n return value\n\n result = new_value.get(case, value)\n if not result:\n return value\n return result if isinstance(result, unicode) else result.decode('utf-8')\n\n\ndef shorten_fio(full_name):\n parts = filter(lambda x: len(x) > 0, full_name.strip().split(u' '))\n if len(parts) < 2:\n return full_name\n if len(parts) == 2:\n return parts[0] + u' ' + parts[1][0] + u'.'\n if len(parts) == 3:\n return parts[0] + u' ' + parts[1][0] + u'. ' + parts[2][0] + u'.'\n return parts[0] + u' ' + parts[1][0] + u'. ' + u'. '.join(parts[2:]) + u'.'\n\n\nTEX_SPEC_SYMBOL_MAP = (\n (u'\\\\', u'\\\\textbackslash '),\n (u'<', u'\\\\textless '),\n (u'>', u'\\\\textgreater '),\n (u'%', u'\\\\%'),\n (u'$', u'\\\\$'),\n (u'{', u'\\\\{'),\n (u'}', u'\\\\}'),\n (u'_', u'\\\\_'),\n (u'¶', u'\\\\P '),\n (u'‡', u'\\\\ddag '),\n (u'|', u'\\\\textbar '),\n (u'–', u'\\\\textendash '),\n (u'—', u'\\\\textemdash '),\n (u'™', u'\\\\texttrademark '),\n (u'£', u'\\\\pounds '),\n (u'#', u'\\\\#'),\n (u'&', u'\\\\&'),\n (u'§', u'\\\\S'),\n (u'®', u'\\\\textregistered '),\n (u'©', u'\\\\copyright '),\n (u'¿', u'\\\\textquestiondown '),\n (u'«', u'<<'),\n (u'»', u'>>'),\n (u'\"', u\"''\"),\n (u'^', u\"\\^{}\"),\n)\n\n\ndef texify(val):\n if not val:\n return u\"\"\n\n if isinstance(val, DocField):\n val = val.api_value() if val.initialized else u\"\"\n if not isinstance(val, unicode) and isinstance(val, basestring):\n val = val.decode('utf-8')\n val = unicode(val)\n if val.count('\"') == 2:\n val = val.replace('\"', \"``\", 1).replace('\"', \"''\")\n for from_symbol, to_symbol in TEX_SPEC_SYMBOL_MAP:\n val = val.replace(from_symbol, to_symbol)\n while u'<<<<' in val:\n val = val.replace(u'<<<<', u'<<\\empt<<')\n while u'<<\"' in val:\n val = val.replace(u'<<\"', u'<<\\empt\"')\n while u'\">>' in val:\n val = val.replace(u'\">>', u'\"\\empt>>')\n return val\n\n\ndef skolki(val):\n if val == 1:\n return u\"одно\"\n if val == 2:\n return u\"двух\"\n if val == 3:\n return u\"трёх\"\n if val == 4:\n return u\"четырёх\"\n if val == 5:\n return u\"пяти\"\n if val == 6:\n return u\"шести\"\n if val == 7:\n return u\"семи\"\n if val == 8:\n return u\"восьми\"\n if val == 9:\n return u\"девяти\"\n if val == 10:\n return u\"десяти\"\n return u\"\"\n\n\ndef strftime(val, format):\n return val.strftime(format)\n\n\ndef select_byattr(target_list, attr_name, attr_val):\n if not isinstance(attr_val, (tuple, list)):\n attr_val = [attr_val]\n return filter(lambda x: getattr(x, attr_name) in attr_val, target_list)\n\n\ndef utm_args(value, link_name, user_id=None):\n from flask import current_app\n # if current_app.config['DEBUG'] or current_app.config['STAGING']:\n # return value\n if value is None:\n return value\n value = unicode(value)\n last_symbol = value[-1] if value else ''\n suffix = u\"\"\n if '#' in value:\n try:\n value, suffix = value.split('#')\n suffix = \"#\" + suffix if suffix else \"\"\n except Exception:\n return value\n if user_id:\n return Markup(\"%s%s%s\" % (\n value,\n '&' if last_symbol not in '&?' else '',\n \"utm_source=%s&utm_medium=email&utm_term=%s%s\" % (link_name, user_id, suffix)\n ))\n\n return Markup(\"%s%s%s\" % (\n value, '&' if last_symbol not in '&?' else '', \"utm_source=%s&utm_medium=email%s\" % (link_name, suffix)))\n\ndef number_as_currency_text(value, case = \"im\"):\n from common_utils import word_from_num, num_word, chunks\n if isinstance(value, dict):\n decival = Decimal(value['value'])\n elif isinstance(value, DocField):\n decival = value.db_value()\n else:\n decival = Decimal(value)\n is_minus = decival < 0\n if is_minus:\n decival = -decival\n decival_str = str(decival)\n rubles = int(decival_str.split('.')[0] if '.' in decival_str else decival_str)\n rubles_splited = u' '.join([_ for _ in chunks(unicode(rubles)[::-1], 3)])[::-1]\n rubles_text = num_word(rubles, padezh=case)\n cur_word_maj = u\"рубль\"\n cur_word_min = u\"копейка\"\n\n min_str = decival_str.split('.')[1] if '.' in decival_str else '0'\n if len(min_str) < 2:\n min_str += u\"0\"\n currency_minor = int(min_str)\n\n rubles_word = word_from_num(cur_word_maj, rubles, padezh=case)\n copeek_word = word_from_num(cur_word_min, currency_minor, padezh=case)\n copeek_num = unicode(currency_minor)\n if len(copeek_num) < 2:\n copeek_num = u\"0\" + copeek_num\n if is_minus:\n return u\"минус %s (%s) %s %s %s\" % (rubles_splited, rubles_text, rubles_word, copeek_num, copeek_word)\n return u\"%s (%s) %s %s %s\" % (rubles_splited, rubles_text, rubles_word, copeek_num, copeek_word)\n\ndef tex_hyphenize(val):\n if not val:\n return u\"\"\n if isinstance(val, DocField):\n val = val.db_value()\n\n dic = pyphen.Pyphen(lang='ru_RU')\n\n return u\" \".join([dic.inserted(word, hyphen=u'\\\\-') for word in val.split(u\" \")])\n\ndef rus_full_date(val):\n return pytils.dt.ru_strftime(u\"%d\" + u\" %B %Y\", inflected=True, date=val) if val else u\"\"\n\ndef make_auth_url(val, user_uuid):\n from fw.auth.user_manager import UserManager, AuthUser\n user = AuthUser.query.filter_by(uuid=user_uuid).scalar()\n if not user:\n return val # fallback\n return UserManager.make_auth_url(val, user)\n\ndef md5_filter(val):\n import hashlib\n m = hashlib.md5()\n m.update(unicode(val))\n return m.hexdigest()\n\ndef enum_to_name(value, cls_name):\n try:\n if isinstance(value, DocField):\n return unicode(value)\n cls_loader = LazyClassLoader(cls_name)\n cls = cls_loader.load()\n val = cls.get_name(value)\n return val\n except:\n return u\"\"\n\ndef timeshift(value, days, seconds=0):\n return value + datetime.timedelta(days=days, seconds=seconds)\n\ndef load_filters(env, config):\n env.filters['declension_fio'] = declension_fio\n env.filters['declension'] = declension\n env.filters['utc_datetime'] = dt_from_iso\n env.filters['answer_due_date'] = msk_dt_from_iso_p30\n env.filters['js_datetime'] = js_dt_from_iso\n env.filters['countdown_to'] = countdown_to_date\n env.filters['num_to_text'] = num_to_text\n env.filters['padezh_and_number'] = padezh_and_number\n env.filters['shorten_fio'] = shorten_fio\n env.filters['texify'] = texify\n env.filters['int_to_ifns'] = int_to_ifns\n env.filters['skolki'] = skolki\n env.filters['strftime'] = strftime\n env.filters['byattr'] = select_byattr\n env.filters['utm_args'] = utm_args\n env.filters['capitalize_true'] = lambda x: \"\" if not unicode(x) else (unicode(x)[0].upper() + unicode(x)[1:])\n env.filters['number_as_currency_text'] = number_as_currency_text\n env.filters['float'] = float\n env.filters['Decimal'] = Decimal\n env.filters['tex_hyphenize'] = tex_hyphenize\n env.filters['rus_full_date'] = rus_full_date\n env.filters['make_auth_url'] = make_auth_url\n env.filters['md5'] = md5_filter\n env.filters['T'] = lambda val, t, f: t if val else f\n env.filters['enum_to_name'] = enum_to_name\n env.filters['timeshift'] = timeshift\n\ndef set_template_loader(jinja):\n _search_path = os.path.normpath(os.path.join(os.path.abspath(os.path.dirname(__file__)), u\"fw/templates\"))\n jinja.loader = jinja2.ChoiceLoader([jinja2.FileSystemLoader(_search_path)])\n\n" }, { "alpha_fraction": 0.7022767066955566, "alphanum_fraction": 0.7040280103683472, "avg_line_length": 32.52941131591797, "blob_id": "939ca3e7647cb8ef61690a797debba4e55842663", "content_id": "d004446ff7b67931d8c7d0883ac9dbb93ed12b28", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 571, "license_type": "no_license", "max_line_length": 97, "num_lines": 17, "path": "/app/deployment_migrations/migration_list/20150803_add_indexes.py", "repo_name": "StanislavKraev/jb_code", "src_encoding": "UTF-8", "text": "# -*- coding: utf-8 -*-\n\nfrom fw.db.sql_base import db as sqldb\n\n\ndef forward(config, logger):\n logger.debug(u\"Add indexes\")\n\n sqldb.session.close()\n sqldb.engine.execute(u\"create index doc_batch_owner_idx on doc_batch (_owner_id);\")\n sqldb.engine.execute(u\"create index private_person_owner_idx on private_person (_owner_id);\")\n sqldb.engine.execute(u\"create index company_object_owner_idx on company_object (_owner_id);\")\n sqldb.engine.execute(u\"create index batch_docs_owner_idx on batch_docs (_owner_id);\")\n\n\ndef rollback(config, logger):\n pass\n\n" }, { "alpha_fraction": 0.700941264629364, "alphanum_fraction": 0.7049922347068787, "avg_line_length": 39.157894134521484, "blob_id": "0c77cc20655e28da6c17c91325fed8a6aba8d864", "content_id": "92e540d764dc1ec37308bef9547e1ab2724a7bc1", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 8393, "license_type": "no_license", "max_line_length": 120, "num_lines": 209, "path": "/app/services/llc_reg/documents/general_doc_fields.py", "repo_name": "StanislavKraev/jb_code", "src_encoding": "UTF-8", "text": "# -*- coding: utf-8 -*-\n\nfrom decimal import Decimal\n\nfrom fw.documents.fields.complex_doc_fields import ObjectRefField, DocArrayField\nfrom fw.documents.fields.general_doc_fields import general_doc_field, GeneralField, DocCurrencyField, \\\n DocNumericPartField\nfrom fw.documents.fields.simple_doc_fields import DocEnumField, DocDecimalField, DocIntField, DocTextField, \\\n DocDateTimeField, DocBoolField\nfrom services.llc_reg.documents.enums import InitialCapitalDepositTypeEnum, FounderTypeEnum\n\n\n@general_doc_field\nclass NecessaryVotesForGeneralMeeting(GeneralField):\n company_strategy = DocEnumField(enum_cls=\"NecessaryVotesEnum\")\n articles_change = DocEnumField(enum_cls=\"NecessaryVotesEnum\")\n executives_formation = DocEnumField(enum_cls=\"NecessaryVotesEnum\")\n auditor_election = DocEnumField(enum_cls=\"NecessaryVotesEnum\")\n annual_reports_approval = DocEnumField(enum_cls=\"NecessaryVotesEnum\")\n profit_distribution = DocEnumField(enum_cls=\"NecessaryVotesEnum\")\n internal_documents_approval = DocEnumField(enum_cls=\"NecessaryVotesEnum\")\n obligations_emission = DocEnumField(enum_cls=\"NecessaryVotesEnum\")\n audit_assignment = DocEnumField(enum_cls=\"NecessaryVotesEnum\")\n large_deals_approval = DocEnumField(enum_cls=\"NecessaryVotesEnum\")\n concern_deals_approval = DocEnumField(enum_cls=\"NecessaryVotesEnum\")\n reorganization_or_liquidation = DocEnumField(enum_cls=\"NecessaryVotesEnum\")\n liquidation_committee_assignment = DocEnumField(enum_cls=\"NecessaryVotesEnum\")\n branch_establishment = DocEnumField(enum_cls=\"NecessaryVotesEnum\")\n other_issues = DocEnumField(enum_cls=\"NecessaryVotesEnum\")\n\n\n@general_doc_field\nclass DocPersonPropertyField(GeneralField):\n name = DocTextField()\n price = DocCurrencyField()\n count = DocIntField(min_val=1)\n\n def __unicode__(self):\n return self.name\n\n@general_doc_field\nclass FarmObjectField(GeneralField):\n person = ObjectRefField(cls=\"PrivatePerson\")\n ogrnip = DocTextField(max_length=15, required=False)\n\n\n@general_doc_field\nclass HolderShareField(GeneralField):\n holder_type = DocEnumField(enum_cls=\"JSCMemberTypeEnum\", required=False)\n company = ObjectRefField(cls=\"CompanyObject\", required=False)\n\n@general_doc_field\nclass FounderObject(GeneralField):\n caption = DocTextField(required=False)\n founder_type = DocEnumField(enum_cls=\"FounderTypeEnum\")\n\n company = ObjectRefField(cls=\"CompanyObject\", required=False,\n override_fields_kwargs={\"address\": {\"required\": True}})\n person = ObjectRefField(cls=\"PrivatePerson\", required=False, override_fields_kwargs={\"address\": {\"required\": True}})\n name = DocTextField(max_length=160, required=False)\n\n management_company = ObjectRefField(cls=\"CompanyObject\", required=False)\n\n gov_founder_type = DocEnumField(enum_cls=\"GovernmentFounderTypeEnum\", required=False)\n region = DocEnumField(enum_cls=\"RFRegionsEnum\", required=False)\n\n documents_recipient_type = DocEnumField(enum_cls=\"DocumentDeliveryTypeEnum\", required=False)\n\n nominal_capital = DocCurrencyField(required=True)\n share = DocNumericPartField(required=True)\n\n properties = DocArrayField(cls='DocPersonPropertyField', required=False)\n\n duplicate_fio = DocBoolField(required=False, is_service=True)\n is_starter_capital_dividable = DocBoolField(required=False, is_service=True)\n\n @property\n def deposit_type(self):\n if self.fully_initialized:\n if self.properties.initialized and self.properties.api_value():\n sum = 0\n for i in self.properties.api_value():\n sum += Decimal(i['price']['value']) * i['count']\n if Decimal(sum) == Decimal(self.nominal_capital.api_value()['value']):\n return InitialCapitalDepositTypeEnum.ICD_PROPERTY\n return InitialCapitalDepositTypeEnum.ICD_MONEY_PARTIAL\n\n return InitialCapitalDepositTypeEnum.ICD_MONEY_FULL\n\n @property\n def property_total_amount(self):\n if not self.fully_initialized:\n return 0\n if self.nominal_capital.initialized:\n if self.properties.initialized and self.properties.api_value():\n sum = 0\n for i in self.properties.api_value():\n sum += Decimal(i['price']['value']) * i['count']\n return Decimal(sum)\n\n return 0\n\n @property\n def fully_initialized(self):\n return self.initialized and self.nominal_capital.initialized and self.share.initialized\n\n @property\n def property_total_percents(self):\n if not self.fully_initialized:\n return 0\n return float(self.property_total_amount) / float(self.nominal_capital.value) * 100.0 * self.share.normal_value\n\n @property\n def money_total_percents(self):\n if not self.fully_initialized:\n return 0\n return float(self.money_total_amount) / float(self.nominal_capital.value) * 100.0 * self.share.normal_value\n\n @property\n def money_total_amount(self):\n if not self.fully_initialized:\n return 0\n return self.nominal_capital.value - self.property_total_amount\n\n @property\n def full_name(self):\n if not self.initialized or (not self.person.initialized and not self.company.initialized):\n return u\"\"\n if self.founder_type == FounderTypeEnum.FT_PERSON:\n return self.person.full_name\n if self.founder_type == FounderTypeEnum.FT_COMPANY:\n return u\"%s %s %s\" % (self.company.general_manager_caption, self.company.qualified_short_name,\n self.company.general_manager.full_name)\n return u\"\"\n\n @property\n def short_name(self):\n if not self.initialized:\n return u\"\"\n if self.founder_type == FounderTypeEnum.FT_PERSON:\n if not self.person.initialized:\n return u\"\"\n return self.person.short_name\n if self.founder_type == FounderTypeEnum.FT_COMPANY:\n if not self.company.initialized:\n return u\"\"\n return u\"%s %s %s\" % (self.company.general_manager_caption, self.company.qualified_short_name,\n self.company.general_manager.short_name)\n return u\"\"\n\n@general_doc_field\nclass FounderUIObject(GeneralField):\n founder = ObjectRefField(required=True)\n nominal_capital = DocCurrencyField(required=True)\n share = DocDecimalField(required=True)\n\n\n@general_doc_field\nclass ManagementCompanyField(GeneralField):\n company = ObjectRefField(cls=\"CompanyObject\")\n\n foreign_company = ObjectRefField(cls=\"CompanyObject\", required=False)\n russian_branch_or_agency = ObjectRefField(cls=\"CompanyObject\", required=False)\n russian_agent = ObjectRefField(cls=\"PrivatePerson\", required=False)\n\n\n@general_doc_field\nclass DocWitnessField(GeneralField):\n inn = DocTextField(min_length=1)\n type = DocEnumField(enum_cls='WitnessTypeEnum')\n\n def __unicode__(self):\n return u\"\"\n\n\n@general_doc_field\nclass CompanyStarterCapitalField(GeneralField):\n capital_type = DocEnumField(enum_cls='CompanyStarterCapitalTypeEnum')\n value = DocCurrencyField()\n\n\n@general_doc_field\nclass CharterCapitalPartField(GeneralField):\n person = ObjectRefField(cls='PrivatePerson')\n company = ObjectRefField(cls='CompanyObject')\n share_percents = DocIntField(min_val=1, max_val=100)\n share_value = DocCurrencyField()\n deposit_type = DocEnumField(enum_cls='InitialCapitalDepositTypeEnum')\n properties = DocArrayField(cls='DocPersonPropertyField')\n property_examinator = DocTextField()\n real_estate = DocTextField()\n\n def __unicode__(self):\n return u\"\"\n\n\n@general_doc_field\nclass DocAdditionalRightsField(GeneralField):\n rights = DocArrayField(cls='DocTextField', required=False, subfield_kwargs={'min_length': 1})\n responsibility = DocArrayField(cls='DocTextField', required=False, subfield_kwargs={'min_length': 1})\n duties = DocArrayField(cls='DocTextField', required=False, subfield_kwargs={'min_length': 1})\n\n\n@general_doc_field\nclass IfnsRegInfoField(GeneralField):\n # full_name = DocTextField(required=False)\n ogrn = DocTextField(min_length=13, max_length=13, required=False)\n status = DocEnumField(enum_cls='IfnsRegStatusEnum', required=True)\n reg_date = DocDateTimeField(required=False)\n" }, { "alpha_fraction": 0.6893819570541382, "alphanum_fraction": 0.6909667253494263, "avg_line_length": 26.39130401611328, "blob_id": "7292f71a3cd35ef057a2034b46abad55c277280a", "content_id": "93f68bed0aaded61fe028908f71d9c3cb46eb10a", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 631, "license_type": "no_license", "max_line_length": 61, "num_lines": 23, "path": "/app/deployment_migrations/migration_list/201507201_add_table_for_auth_url.py", "repo_name": "StanislavKraev/jb_code", "src_encoding": "UTF-8", "text": "# -*- coding: utf-8 -*-\nfrom fw.db.sql_base import db as sqldb\n\n\ndef forward(config, logger):\n logger.debug(u\"Add table for authorization url\")\n\n sqldb.session.close()\n sqldb.engine.execute(u\"\"\"CREATE TABLE authorization_url (\n id VARCHAR NOT NULL,\n url VARCHAR,\n created TIMESTAMP WITHOUT TIME ZONE NOT NULL,\n expire_at TIMESTAMP WITHOUT TIME ZONE NOT NULL,\n used_times INTEGER NOT NULL,\n owner_id INTEGER,\n PRIMARY KEY (id),\n FOREIGN KEY(owner_id) REFERENCES authuser (id)\n)\"\"\")\n\n\ndef rollback(config, logger):\n sqldb.session.close()\n sqldb.engine.execute(\"DROP table authorization_url;\")\n\n" }, { "alpha_fraction": 0.7421524524688721, "alphanum_fraction": 0.7436472177505493, "avg_line_length": 37.20000076293945, "blob_id": "af4d2380f92ea636594695e5ad4ba766d6874dd3", "content_id": "c716dd833de6490e10c61245aa83928239097d9f", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 2676, "license_type": "no_license", "max_line_length": 120, "num_lines": 70, "path": "/app/external_tools.py", "repo_name": "StanislavKraev/jb_code", "src_encoding": "UTF-8", "text": "# -*- coding: utf-8 -*-\n\nfrom fw.api import dadata_proxy\nfrom flask import current_app\n\nfrom fw.cache.cache_wrapper import CacheWrapper\n\ncache = CacheWrapper()\n\n\ndef dadata_suggest(method, data):\n return dadata_proxy.dadata_suggest(method, data)\n\n\ndef dadata_clean(method, data):\n return dadata_proxy.dadata_clean(method, data)\n\n\ndef get_detailed_address(address):\n from fw.utils.address_utils import get_detailed_address as _get_detailed_address\n\n return _get_detailed_address(address)\n\n\ndef dadata_standardize_address(address):\n from fw.utils.address_utils import dadata_standardize_address as _dadata_standardize_address\n\n return _dadata_standardize_address(address)\n\n\ndef get_ifns_by_address(address, service_nalog_ru_url):\n from services.ifns.ifns_manager import get_ifns_by_address as _get_ifns_by_address\n\n return _get_ifns_by_address(address, service_nalog_ru_url)\n\n\ndef get_ifns_by_code(tax_office, service_nalog_ru_url):\n from services.ifns.ifns_manager import get_ifns_by_code as _get_ifns_by_code\n\n return _get_ifns_by_code(tax_office, service_nalog_ru_url)\n\n\ndef get_nalog_ru_time_slots(person_data, company_data, internal_ifns_number, internal_ifns_service, logger):\n from services.ifns.ifns_manager import get_nalog_ru_time_slots as _get_nalog_ru_time_slots\n\n return _get_nalog_ru_time_slots(person_data, company_data, internal_ifns_number, internal_ifns_service, logger)\n\n\ndef book_ifns(person_data, company_data, internal_ifns_number, internal_ifns_service, dt, logger):\n from services.ifns.ifns_manager import book_ifns as _book_ifns\n\n return _book_ifns(person_data, company_data, internal_ifns_number, internal_ifns_service, dt, logger)\n\n\ndef get_registration_ifns(service_nalog_ru_url, address_ifns=None):\n from services.ifns.ifns_manager import get_registration_ifns as _get_registration_ifns\n\n return _get_registration_ifns(service_nalog_ru_url, address_ifns=address_ifns)\n\n\ndef get_ifns_registrations(name, company_type='ooo', date_from=None, date_to=None,\n service=None, ifns=None, service_nalog_ru_url=None, logger=None):\n from services.ifns.ifns_manager import get_ifns_registrations as _get_ifns_registrations\n\n return _get_ifns_registrations(name, company_type=company_type, date_from=date_from, date_to=date_to,\n service=service, ifns=ifns, service_nalog_ru_url=service_nalog_ru_url, logger=logger)\n\ndef check_car_policy(policy_series, policy_number, timeout=20.0):\n from services.car_assurance.integration import check_car_policy as _check_car_policy\n return _check_car_policy(policy_series, policy_number, timeout=timeout)\n\n\n" }, { "alpha_fraction": 0.6204985976219177, "alphanum_fraction": 0.626731276512146, "avg_line_length": 23.89655113220215, "blob_id": "05ba17f5db3df19cbc7966bd68c8dcd4774951d7", "content_id": "5287eb40eba1ec72e5dc487779d62cdbef46efa1", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 1444, "license_type": "no_license", "max_line_length": 79, "num_lines": 58, "path": "/app/fw/auth/social_services/social_models.py", "repo_name": "StanislavKraev/jb_code", "src_encoding": "UTF-8", "text": "# -*- coding: utf-8 -*-\n\nfrom sqlalchemy import Column, String, ForeignKey, Integer\nfrom sqlalchemy.orm import relationship\n\nfrom fw.db.sql_base import db as sqldb\n\n\nclass SocialServiceEnum(object):\n SS_UNKNOWN = 0\n SS_VK = 1\n SS_FACEBOOK = 2\n SS_GOOGLE = 3\n SS_TWITTER = 4\n SS_OK = 5\n SS_MYMAILRU = 6\n SS_OPENID = 7\n\n TAG_VK = 'vk'\n TAG_FACEBOOK = 'facebook'\n TAG_GOOGLE = 'ggogle'\n TAG_TWITTER = 'twitter'\n TAG_OK = 'ok'\n TAG_MYMAILRU = 'mymailru'\n TAG_OPENID = 'openid'\n TAG_ALL = [TAG_VK, TAG_FACEBOOK, TAG_GOOGLE]\n\n _SS_TAGS = {\n SS_FACEBOOK: TAG_FACEBOOK,\n SS_VK: TAG_VK,\n SS_GOOGLE: TAG_GOOGLE,\n SS_TWITTER: TAG_TWITTER,\n SS_OK: TAG_OK,\n SS_MYMAILRU: TAG_MYMAILRU,\n SS_OPENID: TAG_OPENID,\n }\n\n _TAG_SS = dict((tag, ss) for ss, tag in _SS_TAGS.items())\n\n @staticmethod\n def from_tag(tag):\n return SocialServiceEnum._TAG_SS.get(tag, SocialServiceEnum.SS_UNKNOWN)\n\n @staticmethod\n def tag(value):\n return SocialServiceEnum._SS_TAGS.get(value, None)\n\n\nclass SocialUserLink(sqldb.Model):\n __tablename__ = 'socialserviceuserlink'\n\n id = Column(Integer, primary_key=True)\n service_id = Column(Integer, nullable=False)\n uid = Column(String, nullable=False)\n access_token = Column(String, nullable=False)\n\n user_id = Column(Integer, ForeignKey('authuser.id'))\n user = relationship(\"AuthUser\")\n" }, { "alpha_fraction": 0.6592483520507812, "alphanum_fraction": 0.6602702140808105, "avg_line_length": 34.087650299072266, "blob_id": "32f609b81652122887d9375aeb674eca69b6a0d3", "content_id": "a9e5664b247600acb76488841a0a56c0a5f42af2", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 8819, "license_type": "no_license", "max_line_length": 120, "num_lines": 251, "path": "/app/services/notarius/api.py", "repo_name": "StanislavKraev/jb_code", "src_encoding": "UTF-8", "text": "# -*- coding: utf-8 -*-\nfrom datetime import datetime, timedelta\nfrom flask import Blueprint, current_app\nfrom flask_login import login_required, current_user\nimport pytils\nfrom fw.api import errors\nfrom fw.api.args_validators import validate_arguments, ArgumentValidator, DateTypeValidator, DateTimeValidator\nfrom fw.api.base_handlers import api_view\nfrom fw.auth.social_services import SocialServiceBackends\nfrom fw.db.sql_base import db as sqldb\nfrom fw.documents.address_enums import RFRegionsEnum\nfrom fw.documents.batch_manager import BatchManager\nfrom fw.documents.db_fields import DocumentBatchDbObject\nfrom fw.documents.fields.general_doc_fields import DocAddressField\nfrom fw.documents.fields.simple_doc_fields import DocDateTimeField\nfrom services.notarius.data_model.models import NotariusObject, NotariusBookingObject\n\nnotarius_bp = Blueprint('notarius', __name__)\n\n\n@notarius_bp.route('/meeting/notarius/', methods=['GET'])\n@api_view\n@login_required\n@validate_arguments(batch_id=ArgumentValidator())\ndef notarius_list(batch_id=None):\n batch = DocumentBatchDbObject.query.filter_by(id=batch_id, _owner=current_user, deleted=False).scalar()\n if not batch:\n raise errors.InvalidParameterValue('batch_id')\n\n batch_manager = BatchManager.init(batch)\n assert batch_manager\n\n region = batch_manager.get_batch_region(batch_id)\n\n if not region:\n return {'result': []}\n query = NotariusObject.query.filter_by(region=region)\n result = [item.get_api_structure() for item in query]\n return {'result': result}\n\n\n@notarius_bp.route('/meeting/notarius-in-region/', methods=['GET'])\n@api_view\n@login_required\n@validate_arguments(region=ArgumentValidator())\ndef notarius_list_by_region(region=None):\n if not RFRegionsEnum.validate(region):\n raise errors.InvalidParameterValue('region')\n\n query = NotariusObject.query.filter_by(region=region)\n result = [item.get_api_structure() for item in query]\n return {'result': result}\n\n\n@notarius_bp.route('/meeting/notarius/schedule/', methods=['GET'])\n@api_view\n@login_required\n@validate_arguments(notarius_id=ArgumentValidator(required=True),\n datetime=DateTypeValidator(required=True))\ndef notarius_schedule(notarius_id=None, **kwargs):\n empty_result = {'result': {\"nearest_time\": None,\"slots\": []}}\n\n dt = kwargs.get('datetime', None)\n if not dt:\n raise errors.InvalidParameterValue('datetime')\n\n notarius_db = NotariusObject.query.filter_by(id=notarius_id).scalar()\n if not notarius_db:\n raise errors.NotariusNotFound()\n\n now = datetime.utcnow()\n two_weeks = timedelta(14)\n\n dt = datetime(dt.year, dt.month, dt.day)\n if dt < now or dt > now + two_weeks:\n return empty_result\n\n day_from, day_to = dt, dt + timedelta(days=1)\n\n days = NotariusObject.get_notarius_schedule(notarius_db, day_from=day_from, day_to=day_to)\n days = filter(lambda y: y['slots'], sorted(days, key=lambda x: x['nearest_time']))\n days = filter(lambda x: datetime.strptime(x['nearest_time'], DocDateTimeField.FORMAT).date() == dt.date(), days)\n if not days:\n return empty_result\n\n return {'result': days[0]}\n\n\n@notarius_bp.route('/meeting/notarius/create/', methods=['POST'])\n@api_view\n@login_required\n@validate_arguments(\n notarius_id=ArgumentValidator(required=True),\n datetime=DateTimeValidator(required=True),\n batch_id=ArgumentValidator(required=False)\n)\ndef notarius_reserve(notarius_id=None, **kwargs):\n dt = kwargs['datetime']\n batch_id = kwargs.get('batch_id', None)\n batch = None\n if batch_id:\n batch = DocumentBatchDbObject.query.filter_by(id=batch_id, _owner=current_user, deleted=False).scalar()\n if not batch:\n raise errors.BatchNotFound()\n\n notarius_db = NotariusObject.query.filter_by(id=notarius_id).scalar()\n if not notarius_db:\n raise errors.NotariusNotFound()\n\n target_day = datetime(dt.year, dt.month, dt.day)\n\n if NotariusObject.is_weekend(notarius_db, target_day):\n current_app.logger.info(u\"NotariusObject.is_weekend -> skip\")\n return {'result': None}\n\n time_slots = NotariusObject.make_slots(notarius_db, target_day, )\n if not time_slots:\n return {'result': None}\n\n found = False\n for slot in time_slots:\n if slot['slot_start'] == dt.strftime(\"%H:%M\") and slot['slot_end'] == (dt + timedelta(seconds=1800)).strftime(\n \"%H:%M\"):\n found = True\n break\n if not found:\n current_app.logger.info(u\"timeslot not found -> skip\")\n return {'result': None}\n address = DocAddressField()\n address.parse_raw_value(notarius_db.address, api_data=False)\n booking = NotariusBookingObject(\n notarius=notarius_db,\n dt=dt,\n address=address.as_string(),\n owner=current_user,\n _discarded=False\n )\n if batch_id:\n NotariusBookingObject.query.filter_by(\n batch_id=batch_id,\n owner=current_user,\n _discarded=False\n ).delete()\n sqldb.session.commit()\n booking.batch_id=batch_id\n sqldb.session.add(booking)\n\n batch_data = batch.data or {}\n batch_data['lawyer_check'] = True\n DocumentBatchDbObject.query.filter_by(id=batch_id).update({\n 'data': batch_data\n })\n sqldb.session.commit()\n result = booking.get_api_structure()\n rec_list = current_app.config['YURIST_EMAIL_LIST']\n\n if batch:\n attaches = BatchManager.get_shared_links_to_rendered_docs(batch, current_app.config, current_app.logger)\n llc_full_name = batch.data.get('full_name', \"\")\n social_link = SocialServiceBackends.get_user_social_network_profile_url(current_user.id)\n\n from fw.async_tasks import send_email\n for recipient in rec_list:\n send_email.send_email.delay(\n recipient,\n 'notarius_batch_check',\n email=current_user.email,\n mobile=current_user.mobile,\n social_link=social_link,\n full_name=llc_full_name,\n notarius=unicode(notarius_db.title) or address.as_string(),\n booking_time=pytils.dt.ru_strftime(u\"%d %B %Y в %H:%M\", inflected=True, date=dt),\n attaches=attaches\n )\n return {'result': result}\n\n\n@notarius_bp.route('/meeting/notarius/discard/', methods=['POST'])\n@api_view\n@login_required\n@validate_arguments(booking_id=ArgumentValidator(required=True))\ndef notarius_discard(booking_id=None):\n result = NotariusBookingObject.query.filter_by(\n id=booking_id,\n owner=current_user,\n _discarded=False).update({\n '_discarded': True\n })\n sqldb.session.commit()\n\n if not result:\n raise errors.NotariusBookingNotFound()\n\n booking = NotariusBookingObject.query.filter_by(id=booking_id).scalar()\n if not booking:\n raise errors.NotariusBookingNotFound()\n\n notarius_id = booking.notarius_id\n dt = booking.dt\n address = booking.address\n rec_list = current_app.config['YURIST_EMAIL_LIST']\n company_full_name = u\"\"\n batch_id = booking.batch_id\n if batch_id:\n try:\n batch = DocumentBatchDbObject.query.filter_by(id=batch_id).scalar()\n if not batch:\n raise Exception()\n company_full_name = batch.data.get('full_name', '')\n batch_data = batch.data or {}\n batch_data['lawyer_check'] = False\n DocumentBatchDbObject.query.filter_by(id=batch_id).update({\n 'data': batch_data\n })\n sqldb.session.commit()\n except Exception:\n current_app.logger.exception(u\"Failed to get company name from batch\")\n\n from fw.async_tasks import send_email\n for recipient in rec_list:\n send_email.send_email.delay(\n recipient,\n 'notarius_discard',\n email=current_user.email,\n notarius_id=notarius_id,\n booking_time=pytils.dt.ru_strftime(u\"%d %B %Y в %H:%M\", inflected=True, date=dt) if dt else u\"<неизвестно>\",\n address=address,\n company_full_name=company_full_name\n )\n\n return {'result': True}\n\n\n@notarius_bp.route('/meeting/notarius/booking/', methods=['GET'])\n@api_view\n@login_required\n@validate_arguments(\n batch_id=ArgumentValidator(required=False)\n)\ndef get_batch_notarius_booking(batch_id=None):\n result_list = []\n cur = NotariusBookingObject.query.filter_by(\n batch_id=batch_id,\n owner=current_user,\n _discarded=False) if batch_id else NotariusBookingObject.query.filter_by(\n owner=current_user,\n _discarded=False\n )\n for notarius_book in cur:\n result_list.append(notarius_book.get_api_structure())\n return {\"result\": result_list}\n" }, { "alpha_fraction": 0.6586361527442932, "alphanum_fraction": 0.6590445041656494, "avg_line_length": 34.49275207519531, "blob_id": "d70e0d689b267b086d57ac5cb5626e44cf9b2746", "content_id": "47e93daf1ccd0906ca762bad1c44eedc93b97f7a", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 2449, "license_type": "no_license", "max_line_length": 122, "num_lines": 69, "path": "/app/fw/async_tasks/scheduler.py", "repo_name": "StanislavKraev/jb_code", "src_encoding": "UTF-8", "text": "# -*- coding: utf-8 -*-\nfrom celery import current_app as celery\nfrom datetime import datetime, timedelta\nfrom fw.api import errors\n\nfrom fw.async_tasks.models import CeleryScheduledTask\nfrom fw.db.sql_base import db as sqldb\n\nfrom celery import current_app as celery_current_app\nfrom fw.documents.batch_manager import BatchManager\nfrom fw.documents.db_fields import DocumentBatchDbObject\nfrom fw.documents.doc_requisites_storage import DocRequisitiesStorage\n\n\nclass CeleryScheduler(object):\n\n @staticmethod\n def post(task_name, countdown_seconds=None, eta=None, args=None, kwargs=None, task_id=None, force_replace_task=False):\n assert countdown_seconds or eta\n\n # all_task_names = celery_current_app.tasks.keys()\n # if task_name not in all_task_names:\n # raise ValueError()\n\n eta_calc = eta or datetime.utcnow() + timedelta(seconds=countdown_seconds)\n # if eta_calc < datetime.utcnow():\n # raise ValueError()\n\n new_task = CeleryScheduledTask(\n task_name=task_name,\n args=args,\n kwargs=kwargs,\n eta=eta_calc\n )\n if task_id is not None:\n if CeleryScheduledTask.query.filter_by(id=task_id).count():\n if not force_replace_task:\n raise errors.DuplicateIdError()\n CeleryScheduledTask.query.filter_by(id=task_id).delete()\n sqldb.session.commit()\n new_task.id = task_id\n\n sqldb.session.add(new_task)\n sqldb.session.commit()\n return new_task\n\n @staticmethod\n def remove(task_id):\n assert task_id\n\n CeleryScheduledTask.query.filter_by(id=task_id).delete()\n sqldb.session.commit()\n\n @staticmethod\n def run_task(task):\n assert isinstance(task, CeleryScheduledTask)\n task_obj = celery_current_app.tasks[task.task_name]\n task.sent = True\n sqldb.session.commit()\n task_obj.apply_async(args=task.args, kwargs=task.kwargs)\n\[email protected]()\ndef run_scheduled_task(descriptor_name, action_name, batch_id):\n app = celery.conf['flask_app']()\n with app.app_context():\n batch_db = DocumentBatchDbObject.query.filter_by(id=batch_id).scalar()\n descriptor = DocRequisitiesStorage.get_batch_descriptor(descriptor_name)\n action = descriptor['actions'][action_name]\n BatchManager.perform_action(action, batch_db, {}, app.logger, app.config)\n" }, { "alpha_fraction": 0.5883575677871704, "alphanum_fraction": 0.590436577796936, "avg_line_length": 39.16666793823242, "blob_id": "2a5b09a77aa6337e83344a1af1851dc2a28481c3", "content_id": "28ccefb422844d6373f1263bd7e2c802aa707c04", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 497, "license_type": "no_license", "max_line_length": 154, "num_lines": 12, "path": "/jb_tests/test_pack/test_tex.py", "repo_name": "StanislavKraev/jb_code", "src_encoding": "UTF-8", "text": "# -*- coding: utf-8 -*-\n\nfrom base_test_case import BaseTestCase\nfrom template_filters import texify\n\n\nclass TexTestCase(BaseTestCase):\n\n def test_texify(self):\n self.maxDiff = None\n self.assertEqual(texify(u'<>%${}_¶‡|–—™£#&§\\\\®©¿«»'), ur'\\textless\\textgreater\\%\\$\\{\\}\\_\\P\\ddag\\textbar\\textendash\\textemdash'\n ur'\\texttrademark\\pounds\\#\\&\\S\\textbackslash\\textregistered\\copyright\\textquestiondown<<>>')" }, { "alpha_fraction": 0.801047146320343, "alphanum_fraction": 0.8023560047149658, "avg_line_length": 35.380950927734375, "blob_id": "729ded6958c33be5a0681ad665c4a7a63c3d2571", "content_id": "c98569a2b03dc8b71a93ebb18b9468b068ee9a7b", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 764, "license_type": "no_license", "max_line_length": 119, "num_lines": 21, "path": "/app/deployment_migrations/migration_list/20150618_add_column_to_documents.py", "repo_name": "StanislavKraev/jb_code", "src_encoding": "UTF-8", "text": "# -*- coding: utf-8 -*-\nfrom datetime import datetime\nimport logging\nfrom tempfile import TemporaryFile, NamedTemporaryFile\nfrom bson import ObjectId\nimport requests\nfrom fw.auth.models import AuthUser\nfrom fw.db.sql_base import db as sqldb\nfrom fw.documents.db_fields import DocumentBatchDbObject, BatchDocumentDbObject, CompanyDbObject, PrivatePersonDbObject\nfrom fw.documents.enums import PersonTypeEnum, IncorporationFormEnum, CompanyTypeEnum\nfrom fw.storage.models import FileObject\n\n\ndef forward(config, logger):\n logger.debug(u\"Add column tried_to_render to table batch_docs\")\n\n sqldb.session.close()\n sqldb.engine.execute(\"ALTER TABLE batch_docs ADD COLUMN tried_to_render BOOLEAN NOT NULL DEFAULT FALSE;\")\n\ndef rollback(config, logger):\n pass\n" }, { "alpha_fraction": 0.6658605933189392, "alphanum_fraction": 0.6676587462425232, "avg_line_length": 36.94823455810547, "blob_id": "d09b390987f1a050930d7084e3b8a85d401243de", "content_id": "eabc6d730ff9c2c1d145b79453f6c7f740400477", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 16128, "license_type": "no_license", "max_line_length": 130, "num_lines": 425, "path": "/app/fw/api/views/auth.py", "repo_name": "StanislavKraev/jb_code", "src_encoding": "UTF-8", "text": "# -*- coding: utf-8 -*-\nfrom datetime import datetime\nimport json\n\nimport flask\nfrom flask import current_app, Response, request, session, Blueprint, abort, redirect\nfrom flask_login import (login_user, logout_user, login_required, current_user)\nfrom fw import metrics\n\nfrom fw.api import errors\nfrom fw.api.api_data import get_user_api_structure\nfrom fw.api.args_validators import (validate_arguments, EmailAddressValidator, PasswordValidator, AtLeastOneOfValidator,\n AccessTokenValidator, SocialNetworkTypeValidator, ConfirmationCodeValidator,\n ArgumentValidator)\nfrom fw.api.base_handlers import api_view\nfrom fw.auth.models import ConfirmationLinkTypeEnum, AuthUser, AuthorizationUrl, UserActivationLink\nfrom fw.auth.social_services import SocialServiceBackends\nfrom fw.auth.social_services.social_models import SocialServiceEnum\nfrom fw.auth.user_manager import UserManager\nfrom fw.db.sql_base import db as sqldb\nfrom fw.documents.db_fields import DocumentBatchDbObject, CompanyDbObject, PrivatePersonDbObject, BatchDocumentDbObject\nfrom fw.storage.models import FileObject\nfrom services.russian_post.db_models import RussianPostTrackingItem\n\nauth_bp = Blueprint('auth', __name__)\n\n\nclass MyResp(Response):\n def set_cookie(self, key, value='', max_age=None, expires=None,\n path='/', domain=None, secure=None, httponly=False):\n self.__name = key\n self.__val = value\n super(MyResp, self).set_cookie(key, value, max_age, expires, path, domain, secure, httponly)\n\n\ndef change_account_data_owner(old_user_id, new_user_id):\n from services.notarius import notarius_manager\n\n DocumentBatchDbObject.query.filter_by(_owner_id=old_user_id).update({'_owner_id': new_user_id})\n BatchDocumentDbObject.query.filter_by(_owner_id=old_user_id).update({'_owner_id': new_user_id})\n CompanyDbObject.query.filter_by(_owner_id=old_user_id).update({'_owner_id': new_user_id})\n PrivatePersonDbObject.query.filter_by(_owner_id=old_user_id).update({'_owner_id': new_user_id})\n RussianPostTrackingItem.query.filter_by(owner_id=old_user_id).update({'owner_id': new_user_id})\n\n notarius_manager.change_objects_owner(old_user_id, new_user_id)\n FileObject.query.filter_by(_owner_id=old_user_id).update({'_owner_id': new_user_id})\n AuthorizationUrl.query.filter_by(owner_id=old_user_id).update({'owner_id': new_user_id})\n UserActivationLink.query.filter_by(auth_user_id=old_user_id).update({'auth_user_id': new_user_id})\n sqldb.session.commit()\n\n AuthUser.query.filter_by(id=old_user_id, temporal=True).delete()\n sqldb.session.commit()\n\n\n@auth_bp.route('/account/login/', methods=['POST'])\n@api_view\n@validate_arguments(email=EmailAddressValidator(), password=PasswordValidator())\ndef login(email=None, password=None):\n user = UserManager.login_user(email, password)\n if not user:\n raise errors.InvalidLoginOrPassword()\n\n google_client_id = request.cookies.get('_ga_cid')\n old_user_id = None\n if current_user and not current_user.is_anonymous:\n if not current_user.temporal:\n my_resp = MyResp()\n current_app.session_interface.save_session(current_app, flask.session, my_resp)\n if google_client_id:\n metrics.update_user_info(current_user, google_client_id=google_client_id)\n # noinspection PyUnresolvedReferences\n my_resp = MyResp(json.dumps({\"result\": my_resp._MyResp__val}), status=200, content_type=\"application/json\")\n return my_resp\n\n old_user_id = current_user.id\n\n login_user(user)\n if google_client_id:\n metrics.update_user_info(user, google_client_id=google_client_id)\n\n if old_user_id:\n new_user_id = user.id\n change_account_data_owner(old_user_id, new_user_id)\n\n my_resp = MyResp()\n current_app.session_interface.save_session(current_app, flask.session, my_resp)\n # noinspection PyUnresolvedReferences\n my_resp = MyResp(json.dumps({\"result\": my_resp._MyResp__val}), status=200, content_type=\"application/json\")\n user.last_login_date = datetime.utcnow()\n return my_resp\n\n\n@auth_bp.route('/account/logout/', methods=['POST'])\n@api_view\n@login_required\ndef logout():\n if current_user.temporal:\n raise errors.NotAuthorized()\n logout_user()\n session['logout'] = True\n return {\"result\": \"OK\"}\n\n\n@auth_bp.route('/account/create/', methods=['POST'])\n@api_view\n@validate_arguments(AtLeastOneOfValidator(\n email=EmailAddressValidator(),\n access_token=AccessTokenValidator()),\n password=PasswordValidator(required=False, raise_exception=errors.InvalidPassword),\n social_network=SocialNetworkTypeValidator(required=False))\ndef signup(email=None, access_token=None, password=None, social_network=None):\n if \"X-Forwarded-For\" in request.headers and request.headers.getlist(\"X-Forwarded-For\"):\n ip = request.headers.getlist(\"X-Forwarded-For\")[0]\n elif \"X-Real-Ip\" in request.headers and request.headers.getlist(\"X-Real-Ip\"):\n ip = request.headers.getlist(\"X-Real-Ip\")[0]\n else:\n ip = request.remote_addr\n\n if ip in current_app.config['OFFICE_IP']:\n is_test_user = True\n else:\n is_test_user = False\n\n if not email and not access_token:\n raise errors.MissingRequiredParameter('email')\n\n if not EmailAddressValidator().validate(email) and not access_token:\n raise errors.InvalidParameterValue('email')\n\n if access_token:\n password = ''\n\n if social_network and social_network not in SocialServiceEnum.TAG_ALL:\n raise errors.InvalidParameterValue('social_network')\n\n if not PasswordValidator().validate(password) and not access_token:\n raise errors.MissingRequiredParameter('password')\n\n if current_user and not current_user.is_anonymous:\n if not current_user.temporal:\n raise errors.InvalidParameterValue('email')\n\n new_user = UserManager.promote_temp_user(current_user, access_token, None, email, u\"\", u\"\", u\"\", password, social_network)\n new_user.is_tester = is_test_user\n else:\n new_user = UserManager.register_user(access_token, None, email, u\"\", u\"\", u\"\", password, social_network)\n new_user.is_tester = is_test_user\n\n google_client_id = request.cookies.get('_ga_cid')\n if google_client_id and not new_user.temporal:\n metrics.update_user_info(new_user, google_client_id=google_client_id)\n\n new_user.email = new_user.email.lower() if new_user.email else u\"\"\n\n data = get_user_api_structure(new_user)\n result = {\"result\": data}\n\n if not email and access_token:\n login_user(new_user)\n my_resp = MyResp()\n current_app.session_interface.save_session(current_app, flask.session, my_resp)\n return result\n\n user = UserManager.login_user(email, password)\n if user:\n login_user(user)\n my_resp = MyResp()\n current_app.session_interface.save_session(current_app, flask.session, my_resp)\n user.last_login_date = datetime.utcnow()\n\n return result\n\n\n@auth_bp.route('/account/profile/', methods=['GET'])\n@api_view\n@login_required\ndef get_profile():\n data = get_user_api_structure(current_user)\n return {\"result\": data}\n\n\n@auth_bp.route('/account/confirm/', methods=['POST', 'GET'])\n@api_view\n@validate_arguments(code=ConfirmationCodeValidator(),\n user_id=ArgumentValidator(required=False))\ndef confirm_account(code=None, user_id=None):\n if len(code) != current_app.config['max_activation_link_length'] and \\\n len(code) != current_app.config['digital_activation_link_length']:\n raise errors.InvalidParameterValue('code')\n\n if len(code) == current_app.config['digital_activation_link_length'] and not user_id:\n raise errors.MissingRequiredParameter('code')\n\n link_type = ConfirmationLinkTypeEnum.CLT_MOBILE if (\n len(code) == current_app.config['digital_activation_link_length']) else ConfirmationLinkTypeEnum.CLT_EMAIL\n user = UserManager.confirm_email_or_mobile(code, user_id if user_id else None, link_type)\n if not user:\n raise errors.UserNotFound()\n\n data = get_user_api_structure(user)\n return {\"result\": data}\n\n\n@auth_bp.route('/account/send_activation_code/', methods=['POST'])\n@api_view\n@validate_arguments(AtLeastOneOfValidator(\n email=EmailAddressValidator(),\n mobile=ArgumentValidator()\n))\ndef resend_activation_code(email=None, mobile=None):\n UserManager.resend_activation_code(email, mobile)\n return {'result': True}\n\n\n@auth_bp.route('/account/login/temporal/', methods=['POST'])\n@api_view\n@validate_arguments(guid=ArgumentValidator(required=False))\ndef login_temporal(guid=None):\n if \"X-Forwarded-For\" in request.headers and request.headers.getlist(\"X-Forwarded-For\"):\n ip = request.headers.getlist(\"X-Forwarded-For\")[0]\n elif \"X-Real-Ip\" in request.headers and request.headers.getlist(\"X-Real-Ip\"):\n ip = request.headers.getlist(\"X-Real-Ip\")[0]\n else:\n ip = request.remote_addr\n\n if ip in current_app.config['OFFICE_IP']:\n test_user = True\n else:\n test_user = False\n\n user = None\n if current_user and current_user.is_anonymous:\n user = UserManager.create_temp_user()\n if user:\n login_user(user)\n user.last_login_date = datetime.utcnow()\n user.is_tester = test_user\n\n elif current_user:\n user = current_user\n\n if not user:\n raise errors.UserNotFound()\n\n data = get_user_api_structure(user)\n return {\"result\": data}\n\n\n@auth_bp.route('/account/profile/update/', methods=['POST'])\n@api_view\n@login_required\n@validate_arguments(email=ArgumentValidator(required=False),\n mobile=ArgumentValidator(required=False),\n notifications=ArgumentValidator(required=False)\n )\ndef update_profile(email=None, notifications=None, mobile=None):\n UserManager.update_profile(current_user, email, new_mobile=mobile)\n\n data = get_user_api_structure(current_user)\n return {\"result\": data}\n\n\n@auth_bp.route('/account/password_recovery/', methods=['POST'])\n@api_view\n@validate_arguments(AtLeastOneOfValidator(email=EmailAddressValidator(), mobile=ArgumentValidator()))\ndef password_recovery(email=None, mobile=None):\n UserManager.send_password_recovery_code(email, mobile)\n return {\"result\": True}\n\n\n@auth_bp.route('/account/password_change/', methods=['POST'])\n@api_view\n@validate_arguments(\n AtLeastOneOfValidator(\n user_id=ArgumentValidator(),\n email=EmailAddressValidator()),\n AtLeastOneOfValidator(\n code=ArgumentValidator(),\n old_password=PasswordValidator(raise_exception=errors.InvalidPassword)\n ),\n new_password=PasswordValidator()\n)\ndef password_change(user_id=None, email=None, code=None, old_password=None, new_password=None):\n email = email.lower() if email else u\"\"\n\n if not user_id and email:\n # find user based on email\n user = AuthUser.query.filter_by(email=email).first()\n if not user:\n raise errors.UserNotFound()\n user_id = user.uuid\n elif not user_id:\n raise errors.InvalidParameterValue('user_id')\n\n user = UserManager.change_password(user_id, code, old_password, new_password)\n if user and not current_user.is_authenticated:\n login_user(user)\n return {\"result\": True}\n\n\n@auth_bp.route('/account/by/code/', methods=['GET'])\n@api_view\n@validate_arguments(user_id=ArgumentValidator(),\n code=ConfirmationCodeValidator())\ndef account_by_code(user_id=None, code=None):\n user = UserManager.get_user_by_code(user_id, code)\n\n data = get_user_api_structure(user)\n return {\"result\": data}\n\n\n@auth_bp.route('/account/login/external/<string:social_network>/<path:next_page>', methods=['GET'], strict_slashes=True)\n@auth_bp.route('/account/login/external/<string:social_network>/', methods=['GET'])\n@api_view\ndef login_external(social_network=None, next_page=\"\"):\n class MyResp(Response):\n def set_cookie(self, key, value='', max_age=None, expires=None,\n path='/', domain=None, secure=None, httponly=False):\n self.__name = key\n self.__val = value\n super(MyResp, self).set_cookie(key, value, max_age, expires, path, domain, secure, httponly)\n\n try:\n code = request.args['code']\n except Exception:\n if 'error' in request.args:\n html = u\"\"\"<html><head></head><body><script>window.location.href = \"/\";</script></body></html>\"\"\"\n my_resp = MyResp(html, status=200, content_type=\"text/html; charset=utf-8\")\n return my_resp\n raise errors.InvalidParameterValue('code')\n\n if social_network not in ('facebook', 'vk', 'google'):\n raise errors.InvalidParameterValue('social_network')\n\n backend = SocialServiceBackends.backends.get(social_network)\n if not backend:\n raise errors.InvalidParameterValue('social_network')\n\n config = current_app.config\n\n if backend:\n if '?' in next_page:\n next_page = next_page.split('?')[0]\n current_app.logger.debug(u\"2 redirect url: %s\" % next_page)\n access_token, ext_data = backend.get_token(code, config, next_page=next_page)\n if not access_token:\n raise errors.SocialAuthError()\n\n user_data = backend.get_user_data(config, access_token)\n social_uid = user_data.get('id')\n if not social_uid:\n raise errors.SocialAuthError()\n social_service_user_link = backend.get_user_link(unicode(social_uid))\n if social_service_user_link:\n user = social_service_user_link.user\n else:\n ext_data = ext_data or {}\n if 'email' not in ext_data:\n ext_data = backend.get_user_data(config, access_token)\n user, user_profile = UserManager.create_user(access_token, \"\", ext_data.get('email', \"\"), \"\", \"\", \"\", \"\",\n social_network, email_is_social=True)\n\n old_user_id = current_user.id if (\n current_user and not current_user.is_anonymous and current_user.temporal) else None\n if old_user_id:\n new_user_id = user.id\n change_account_data_owner(old_user_id, new_user_id)\n\n google_client_id = request.cookies.get('_ga_cid')\n if google_client_id and not user.temporal:\n metrics.update_user_info(user, google_client_id=google_client_id)\n login_user(user)\n user.last_login_date = datetime.utcnow()\n\n my_resp = MyResp()\n current_app.session_interface.save_session(current_app, flask.session, my_resp)\n # noinspection PyUnresolvedReferences\n html = u\"\"\"\n <html>\n <head></head>\n <body>\n <script>\n window.location.href = \"/%s\";\n </script>\n </body>\n </html>\n \"\"\" % next_page\n my_resp = MyResp(html, status=200, content_type=\"text/html; charset=utf-8\")\n return my_resp\n\n return {\"result\": None}\n\n\n@auth_bp.route('/account/login/external-url/', methods=['GET'])\n@api_view\n@validate_arguments(social_network=ArgumentValidator())\ndef get_external_login_url(social_network=None):\n if social_network not in ('facebook', 'vk', 'google'):\n raise errors.InvalidParameterValue('social_network')\n\n backend = SocialServiceBackends.backends.get(social_network)\n if not backend:\n raise errors.InvalidParameterValue('social_network')\n\n config = current_app.config\n next_page = request.args['next_page']\n\n if '?' in next_page:\n next_page = next_page.split('?')[0]\n current_app.logger.debug(u\"2 redirect url: %s\" % next_page)\n\n token_url = backend.get_token_url(config, next_page=next_page)\n return {\"result\": token_url}\n\n\n@auth_bp.route('/go/<go_id>/', methods=['GET'])\ndef go_auth_url(go_id):\n if not go_id or not go_id.isalnum():\n abort(404)\n\n try:\n return redirect(UserManager.authorize_by_url(go_id))\n except Exception:\n pass\n abort(404)\n" }, { "alpha_fraction": 0.3603382110595703, "alphanum_fraction": 0.44177260994911194, "avg_line_length": 42.654449462890625, "blob_id": "6a8f8d690a6c3a4cfcf463b1cbed3865cf0ceea2", "content_id": "968b44e388e759a2dd3203aef7c00b4b46aadfcf", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 18226, "license_type": "no_license", "max_line_length": 120, "num_lines": 382, "path": "/jb_tests/test_pack/test_ifns_api.py", "repo_name": "StanislavKraev/jb_code", "src_encoding": "UTF-8", "text": "# -*- coding: utf-8 -*-\nimport json\nimport os\nfrom fw.documents.address_enums import RFRegionsEnum\n\nos.environ['CELERY_CONFIG_MODULE'] = 'dev_celeryconfig'\n\nfrom test_pack.base_batch_test import BaseBatchTestCase\nfrom test_pack.test_api import authorized\n\nclass IfnsApiTestCase(BaseBatchTestCase):\n\n @authorized()\n def test_search_ifns(self):\n r1 = self.addRegIfns(u\"ИФНС 1 (рег)\", code=100)\n n1 = self.addIfns(u\"ИФНС А\", r1, code=1001)\n n2 = self.addIfns(u\"ИФНС Б\", r1, code=1002, address={'region': RFRegionsEnum.RFR_SPB, 'address_string': u'СПБ'})\n n3 = self.addIfns(u\"ИФНС В\", r1, code=1003, address={'region': RFRegionsEnum.RFR_SPB, 'address_string': u'СПБ'})\n\n response = self.test_client.get('/structures/ifns/search/')\n self.assertEqual(response.status_code, 200)\n\n result = json.loads(response.data)\n self.assertEqual(result, {\n u'count': 4,\n u'total': 4,\n u'ifns': [{\n u'additional_info': u\"Код ОКПО:88111351 Режим работы Понедельник-четверг: 8.30 - 17.30\",\n u'address': u\"Россия, Респ Башкортостан, г Уфа, ул Красина, д 52\",\n u'caption': u\"ИФНС 1 (рег)\",\n u'id': unicode(r1['_id']),\n u'ifns_code': u'0100',\n u'payment_details': {\n u'account': u'',\n u'bank_caption': u'',\n u'bik': u'',\n u'inn': u'0275067000',\n u'kpp': u'027501001',\n u'payment_recipient_caption': u'ИФНС 1 (рег)'\n },\n u'phones': [u'+73472290200', u'+73472290210']\n }, {\n u'additional_info': u\"Код ОКПО:88111351 Режим работы Понедельник-четверг: 8.30 - 17.30\",\n u'address': u\"Россия, Респ Башкортостан, г Уфа, ул Красина, д 52\",\n u'caption': u\"ИФНС А\",\n u'id': unicode(n1['_id']),\n u'ifns_code': u'1001',\n u'llc_registration_ifns': {\n u'caption': u'ИФНС 1 (рег)',\n u'ifns_reg_code': u'10010',\n u'address': u\"Россия, Респ Башкортостан, г Уфа, ул Красина, д 52\",\n u'phones': [u'+73472290200', u'+73472290210']\n },\n u'payment_details': {\n u'account': u'',\n u'bank_caption': u'',\n u'bik': u'',\n u'inn': u'0275067000',\n u'kpp': u'027501001',\n u'payment_recipient_caption': u'ИФНС А'\n },\n u'phones': [u'+73472290200', u'+73472290210']\n }, {\n u'additional_info': u\"Код ОКПО:88111351 Режим работы Понедельник-четверг: 8.30 - 17.30\",\n u'address': u\"СПБ\",\n u'caption': u\"ИФНС Б\",\n u'id': unicode(n2['_id']),\n u'ifns_code': u'1002',\n u'llc_registration_ifns': {\n u'caption': u'ИФНС 1 (рег)',\n u'ifns_reg_code': u'10020',\n u'address': u\"Россия, Респ Башкортостан, г Уфа, ул Красина, д 52\",\n u'phones': [u'+73472290200', u'+73472290210']\n },\n u'payment_details': {\n u'account': u'',\n u'bank_caption': u'',\n u'bik': u'',\n u'inn': u'0275067000',\n u'kpp': u'027501001',\n u'payment_recipient_caption': u'ИФНС Б'\n },\n u'phones': [u'+73472290200', u'+73472290210']\n }, {\n u'additional_info': u\"Код ОКПО:88111351 Режим работы Понедельник-четверг: 8.30 - 17.30\",\n u'address': u\"СПБ\",\n u'caption': u\"ИФНС В\",\n u'id': unicode(n3['_id']),\n u'ifns_code': u'1003',\n u'llc_registration_ifns': {\n u'caption': u'ИФНС 1 (рег)',\n u'ifns_reg_code': u'10030',\n u'address': u\"Россия, Респ Башкортостан, г Уфа, ул Красина, д 52\",\n u'phones': [u'+73472290200', u'+73472290210']\n },\n u'payment_details': {\n u'account': u'',\n u'bank_caption': u'',\n u'bik': u'',\n u'inn': u'0275067000',\n u'kpp': u'027501001',\n u'payment_recipient_caption': u'ИФНС В'\n },\n u'phones': [u'+73472290200', u'+73472290210']\n }]\n })\n\n response = self.test_client.get(u'/structures/ifns/search/?region=%s' % RFRegionsEnum.RFR_SPB)\n self.assertEqual(response.status_code, 200)\n\n result = json.loads(response.data)\n self.assertEqual(result, {\n u'count': 2,\n u'total': 2,\n u'ifns': [{\n u'additional_info': u\"Код ОКПО:88111351 Режим работы Понедельник-четверг: 8.30 - 17.30\",\n u'address': u\"СПБ\",\n u'caption': u\"ИФНС Б\",\n u'id': unicode(n2['_id']),\n u'ifns_code': u'1002',\n u'llc_registration_ifns': {\n u'caption': u'ИФНС 1 (рег)',\n u'ifns_reg_code': u'10020',\n u'address': u\"Россия, Респ Башкортостан, г Уфа, ул Красина, д 52\",\n u'phones': [u'+73472290200', u'+73472290210']\n },\n u'payment_details': {\n u'account': u'',\n u'bank_caption': u'',\n u'bik': u'',\n u'inn': u'0275067000',\n u'kpp': u'027501001',\n u'payment_recipient_caption': u'ИФНС Б'\n },\n u'phones': [u'+73472290200', u'+73472290210']\n }, {\n u'additional_info': u\"Код ОКПО:88111351 Режим работы Понедельник-четверг: 8.30 - 17.30\",\n u'address': u\"СПБ\",\n u'caption': u\"ИФНС В\",\n u'id': unicode(n3['_id']),\n u'ifns_code': u'1003',\n u'llc_registration_ifns': {\n u'caption': u'ИФНС 1 (рег)',\n u'ifns_reg_code': u'10030',\n u'address': u\"Россия, Респ Башкортостан, г Уфа, ул Красина, д 52\",\n u'phones': [u'+73472290200', u'+73472290210']\n },\n u'payment_details': {\n u'account': u'',\n u'bank_caption': u'',\n u'bik': u'',\n u'inn': u'0275067000',\n u'kpp': u'027501001',\n u'payment_recipient_caption': u'ИФНС В'\n },\n u'phones': [u'+73472290200', u'+73472290210']\n }]\n })\n\n response = self.test_client.get(u'/structures/ifns/search/?search_string=ИФНС А')\n self.assertEqual(response.status_code, 200)\n\n result = json.loads(response.data)\n self.assertEqual(result, {\n u'count': 1,\n u'total': 1,\n u'ifns': [{\n u'additional_info': u\"Код ОКПО:88111351 Режим работы Понедельник-четверг: 8.30 - 17.30\",\n u'address': u\"Россия, Респ Башкортостан, г Уфа, ул Красина, д 52\",\n u'caption': u\"ИФНС А\",\n u'id': unicode(n1['_id']),\n u'ifns_code': u'1001',\n u'llc_registration_ifns': {\n u'caption': u'ИФНС 1 (рег)',\n u'ifns_reg_code': u'10010',\n u'address': u\"Россия, Респ Башкортостан, г Уфа, ул Красина, д 52\",\n u'phones': [u'+73472290200', u'+73472290210']\n },\n u'payment_details': {\n u'account': u'',\n u'bank_caption': u'',\n u'bik': u'',\n u'inn': u'0275067000',\n u'kpp': u'027501001',\n u'payment_recipient_caption': u'ИФНС А'\n },\n u'phones': [u'+73472290200', u'+73472290210']\n }]\n })\n\n response = self.test_client.get(u'/structures/ifns/search/?search_string=ИФНС')\n self.assertEqual(response.status_code, 200)\n\n result = json.loads(response.data)\n self.assertEqual(result, {\n u'count': 4,\n u'total': 4,\n u'ifns': [{\n u'additional_info': u\"Код ОКПО:88111351 Режим работы Понедельник-четверг: 8.30 - 17.30\",\n u'address': u\"Россия, Респ Башкортостан, г Уфа, ул Красина, д 52\",\n u'caption': u\"ИФНС 1 (рег)\",\n u'id': unicode(r1['_id']),\n u'ifns_code': u'0100',\n u'payment_details': {\n u'account': u'',\n u'bank_caption': u'',\n u'bik': u'',\n u'inn': u'0275067000',\n u'kpp': u'027501001',\n u'payment_recipient_caption': u'ИФНС 1 (рег)'\n },\n u'phones': [u'+73472290200', u'+73472290210']\n }, {\n u'additional_info': u\"Код ОКПО:88111351 Режим работы Понедельник-четверг: 8.30 - 17.30\",\n u'address': u\"Россия, Респ Башкортостан, г Уфа, ул Красина, д 52\",\n u'caption': u\"ИФНС А\",\n u'id': unicode(n1['_id']),\n u'ifns_code': u'1001',\n u'llc_registration_ifns': {\n u'caption': u'ИФНС 1 (рег)',\n u'ifns_reg_code': u'10010',\n u'address': u\"Россия, Респ Башкортостан, г Уфа, ул Красина, д 52\",\n u'phones': [u'+73472290200', u'+73472290210']\n },\n u'payment_details': {\n u'account': u'',\n u'bank_caption': u'',\n u'bik': u'',\n u'inn': u'0275067000',\n u'kpp': u'027501001',\n u'payment_recipient_caption': u'ИФНС А'\n },\n u'phones': [u'+73472290200', u'+73472290210']\n }, {\n u'additional_info': u\"Код ОКПО:88111351 Режим работы Понедельник-четверг: 8.30 - 17.30\",\n u'address': u\"СПБ\",\n u'caption': u\"ИФНС Б\",\n u'id': unicode(n2['_id']),\n u'ifns_code': u'1002',\n u'llc_registration_ifns': {\n u'caption': u'ИФНС 1 (рег)',\n u'ifns_reg_code': u'10020',\n u'address': u\"Россия, Респ Башкортостан, г Уфа, ул Красина, д 52\",\n u'phones': [u'+73472290200', u'+73472290210']\n },\n u'payment_details': {\n u'account': u'',\n u'bank_caption': u'',\n u'bik': u'',\n u'inn': u'0275067000',\n u'kpp': u'027501001',\n u'payment_recipient_caption': u'ИФНС Б'\n },\n u'phones': [u'+73472290200', u'+73472290210']\n }, {\n u'additional_info': u\"Код ОКПО:88111351 Режим работы Понедельник-четверг: 8.30 - 17.30\",\n u'address': u\"СПБ\",\n u'caption': u\"ИФНС В\",\n u'id': unicode(n3['_id']),\n u'ifns_code': u'1003',\n u'llc_registration_ifns': {\n u'caption': u'ИФНС 1 (рег)',\n u'ifns_reg_code': u'10030',\n u'address': u\"Россия, Респ Башкортостан, г Уфа, ул Красина, д 52\",\n u'phones': [u'+73472290200', u'+73472290210']\n },\n u'payment_details': {\n u'account': u'',\n u'bank_caption': u'',\n u'bik': u'',\n u'inn': u'0275067000',\n u'kpp': u'027501001',\n u'payment_recipient_caption': u'ИФНС В'\n },\n u'phones': [u'+73472290200', u'+73472290210']\n }]\n })\n\n response = self.test_client.get(u'/structures/ifns/search/?search_string=ИФНС&limit=2&offset=1')\n self.assertEqual(response.status_code, 200)\n\n result = json.loads(response.data)\n self.assertEqual(result, {\n u'count': 2,\n u'total': 4,\n u'ifns': [{\n u'additional_info': u\"Код ОКПО:88111351 Режим работы Понедельник-четверг: 8.30 - 17.30\",\n u'address': u\"Россия, Респ Башкортостан, г Уфа, ул Красина, д 52\",\n u'caption': u\"ИФНС А\",\n u'id': unicode(n1['_id']),\n u'ifns_code': u'1001',\n u'llc_registration_ifns': {\n u'caption': u'ИФНС 1 (рег)',\n u'ifns_reg_code': u'10010',\n u'address': u\"Россия, Респ Башкортостан, г Уфа, ул Красина, д 52\",\n u'phones': [u'+73472290200', u'+73472290210']\n },\n u'payment_details': {\n u'account': u'',\n u'bank_caption': u'',\n u'bik': u'',\n u'inn': u'0275067000',\n u'kpp': u'027501001',\n u'payment_recipient_caption': u'ИФНС А'\n },\n u'phones': [u'+73472290200', u'+73472290210']\n }, {\n u'additional_info': u\"Код ОКПО:88111351 Режим работы Понедельник-четверг: 8.30 - 17.30\",\n u'address': u\"СПБ\",\n u'caption': u\"ИФНС Б\",\n u'id': unicode(n2['_id']),\n u'ifns_code': u'1002',\n u'llc_registration_ifns': {\n u'caption': u'ИФНС 1 (рег)',\n u'ifns_reg_code': u'10020',\n u'address': u\"Россия, Респ Башкортостан, г Уфа, ул Красина, д 52\",\n u'phones': [u'+73472290200', u'+73472290210']\n },\n u'payment_details': {\n u'account': u'',\n u'bank_caption': u'',\n u'bik': u'',\n u'inn': u'0275067000',\n u'kpp': u'027501001',\n u'payment_recipient_caption': u'ИФНС Б'\n },\n u'phones': [u'+73472290200', u'+73472290210']\n }]\n })\n\n @authorized()\n def test_get_ifns(self):\n r = self.addRegIfns(u\"ИФНС 1 (рег)\", code=100)\n n = self.addIfns(u\"ИФНС А\", r, code=1003, address={'region': RFRegionsEnum.RFR_SPB, 'address_string': u'СПБ'})\n\n response = self.test_client.get('/structures/ifns/?id=%s' % str(r['_id']))\n self.assertEqual(response.status_code, 200)\n\n result = json.loads(response.data)\n self.assertEqual(result, {\n u'additional_info': u\"Код ОКПО:88111351 Режим работы Понедельник-четверг: 8.30 - 17.30\",\n u'address': u\"Россия, Респ Башкортостан, г Уфа, ул Красина, д 52\",\n u'caption': u'ИФНС 1 (рег)',\n u'id': unicode(r['_id']),\n u'ifns_code': u'0100',\n u'payment_details': {\n u'account': u'',\n u'bank_caption': u'',\n u'bik': u'',\n u'inn': u'0275067000',\n u'kpp': u'027501001',\n u'payment_recipient_caption': u'ИФНС 1 (рег)'\n },\n u'phones': [u'+73472290200', u'+73472290210']\n })\n\n response = self.test_client.get('/structures/ifns/?id=%s' % str(n['_id']))\n self.assertEqual(response.status_code, 200)\n\n result = json.loads(response.data)\n self.assertEqual(result, {\n u'additional_info': u\"Код ОКПО:88111351 Режим работы Понедельник-четверг: 8.30 - 17.30\",\n u'address': u\"СПБ\",\n u'caption': u'ИФНС А',\n u'id': unicode(n['_id']),\n u'ifns_code': u'1003',\n u'payment_details': {\n u'account': u'',\n u'bank_caption': u'',\n u'bik': u'',\n u'inn': u'0275067000',\n u'kpp': u'027501001',\n u'payment_recipient_caption': u'ИФНС А'\n },\n u'phones': [u'+73472290200', u'+73472290210'],\n u'llc_registration_ifns': {\n u'caption': u'ИФНС 1 (рег)',\n u'ifns_reg_code': u'10030',\n u'address': u\"Россия, Респ Башкортостан, г Уфа, ул Красина, д 52\",\n u'phones': [u'+73472290200', u'+73472290210']\n }\n })\n" }, { "alpha_fraction": 0.5895910859107971, "alphanum_fraction": 0.5977694988250732, "avg_line_length": 33.487178802490234, "blob_id": "89ed03ff7362b6ae1d4f00aaeb47bbd4e6b2f327", "content_id": "408cb716257d2654d1acd7e01f0584b3a2fb6429", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 1345, "license_type": "no_license", "max_line_length": 143, "num_lines": 39, "path": "/app/async/async_dadata_provider.py", "repo_name": "StanislavKraev/jb_code", "src_encoding": "UTF-8", "text": "# -*- coding: utf-8 -*-\n\nimport json\nfrom urllib import urlencode\nfrom tornado import httputil, gen\nfrom tornado.httpclient import AsyncHTTPClient\nfrom utils import make_cache_key\n\nclass AsyncDadataProvider(object):\n\n @staticmethod\n @gen.coroutine\n def get_detailed_address(address, cache):\n if not address or not isinstance(address, basestring):\n raise gen.Return(None)\n\n\n cache_key = make_cache_key('dadata/s/addr' + address)\n #result_text = cache.get(cache_key)\n result_text = \"\"\n if not result_text:\n headers = httputil.HTTPHeaders({\n u'Content-Type': u'application/json',\n u'Accept': u'application/json',\n u'Authorization': u'Token %s' % dd_api_key\n })\n http_client = AsyncHTTPClient()\n body = json.dumps({\"query\" : address})\n response = yield http_client.fetch(url, method = 'POST', headers = headers, request_timeout=5, body = body, follow_redirects=False)\n if response.code != 200:\n raise gen.Return(None)\n result_text = response.body\n\n cache.set(cache_key, result_text, 3600 * 24)\n try:\n result = json.loads(result_text)\n except Exception:\n raise gen.Return(None)\n raise gen.Return(result)\n" }, { "alpha_fraction": 0.656843900680542, "alphanum_fraction": 0.6579263806343079, "avg_line_length": 34.99134063720703, "blob_id": "ac136188187dbb95738db25ad575cf4503d0bc07", "content_id": "e095066033eba4a0271c48847d981190ee788a4f", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 8314, "license_type": "no_license", "max_line_length": 125, "num_lines": 231, "path": "/app/fw/api/views/object_management.py", "repo_name": "StanislavKraev/jb_code", "src_encoding": "UTF-8", "text": "# -*- coding: utf-8 -*-\nfrom flask import current_app, Blueprint\n\nfrom flask_login import login_required, current_user\n\nfrom fw.api import errors\nfrom fw.api.args_validators import validate_arguments, ObjectRefValidator, IntValidator\nfrom fw.api.args_validators import JsonValidator\nfrom fw.api.base_handlers import api_view\nfrom fw.documents.db_fields import PrivatePersonDbObject, CompanyDbObject\nfrom fw.documents.fields.doc_fields import PrivatePerson, CompanyObject\nfrom fw.db.sql_base import db as sqldb\n\ndomain_objects_bp = Blueprint('domain_objects', __name__)\n\n\n@domain_objects_bp.route('/entity/person/create/', methods=['POST'])\n@api_view\n@login_required\n@validate_arguments(person=JsonValidator())\ndef create_person(person=None):\n try:\n person_doc = PrivatePerson.parse_raw_value(person, api_data=True)\n if not person_doc:\n raise Exception()\n except Exception, ex:\n raise errors.InvalidParameterValue('person')\n\n try:\n person_doc.validate(strict=False)\n except Exception, ex:\n raise errors.InvalidParameterValue('person')\n\n person = person_doc.get_db_object()\n person._owner = current_user\n\n sqldb.session.add(person)\n sqldb.session.commit()\n\n result = person_doc.get_api_structure()\n result['id'] = person.id + \"_person\"\n return {\"result\": result}\n\n\n@domain_objects_bp.route('/entity/person/', methods=['GET'])\n@api_view\n@login_required\n@validate_arguments(person_id=ObjectRefValidator(required=False),\n count=IntValidator(min_val=1, required=False),\n offset=IntValidator(min_val=0, required=False))\ndef get_persons(person_id=None, count=None, offset=None):\n result_list = []\n person_id = str(person_id) if person_id else ''\n\n if person_id:\n person = PrivatePersonDbObject.query.filter_by(id=person_id, deleted=False, _owner=current_user,\n _batch=None, _copy_id=None).first()\n if not person:\n raise errors.EntityNotFound('person_id')\n person_doc = PrivatePerson.db_obj_to_field(person)\n result_list.append(person_doc.get_api_structure())\n\n result_count = 1\n result_total = 1\n else:\n cur = PrivatePersonDbObject.query.filter_by(deleted=False, _owner=current_user, _batch=None, _copy_id=None).order_by(\n PrivatePersonDbObject.id.asc())\n result_total = cur.count()\n cur = PrivatePersonDbObject.query.filter_by(deleted=False, _owner=current_user, _batch=None, _copy_id=None).order_by(\n PrivatePersonDbObject.id.asc())\n\n if count is not None:\n cur = cur.limit(offset)\n if offset is not None:\n cur = cur.offset(offset)\n\n result_count = cur.count()\n for person in cur:\n person_doc = PrivatePerson.db_obj_to_field(person)\n result_list.append(person_doc.get_api_structure())\n\n return {'result': {\n 'persons': result_list,\n 'total': result_total,\n 'count': result_count\n }}\n\n\n@domain_objects_bp.route('/entity/person/update/', methods=['POST'])\n@api_view\n@login_required\n@validate_arguments(person_id=ObjectRefValidator(), person=JsonValidator())\ndef update_person(person_id=None, person=None):\n person_db_obj = PrivatePersonDbObject.query.filter_by(id=unicode(person_id), _owner=current_user,\n _batch=None).first()\n if not person_db_obj:\n raise errors.EntityNotFound()\n\n PrivatePerson.update_db_obj(person_db_obj, person, api_data=True)\n\n # validator = IsMyObjectVisitor(current_user._id) # todo:!!!\n # try:\n # validator.process(old_person_doc)\n # except NotMineException:\n # raise InvalidParameterValue('person')\n\n sqldb.session.commit()\n person_doc = PrivatePerson.db_obj_to_field(person_db_obj)\n return {\"result\": person_doc.get_api_structure()}\n\n\n@domain_objects_bp.route('/entity/person/remove/', methods=['POST'])\n@api_view\n@login_required\n@validate_arguments(person_id=ObjectRefValidator())\ndef delete_person(person_id=None):\n person_db_obj = PrivatePersonDbObject.query.filter_by(id=unicode(person_id), _owner=current_user,\n _batch=None).first()\n if not person_db_obj:\n raise errors.EntityNotFound()\n\n person_db_obj.deleted = True\n sqldb.session.commit()\n\n return {\"result\": True}\n\n\n@domain_objects_bp.route('/entity/company/create/', methods=['POST'])\n@api_view\n@login_required\n@validate_arguments(company=JsonValidator())\ndef create_company(company=None):\n try:\n company_doc = CompanyObject.parse_raw_value(company, api_data=True)\n if not company_doc:\n raise Exception()\n except Exception:\n current_app.logger.exception(u\"Failed to validate company data\")\n raise errors.InvalidParameterValue('company')\n\n try:\n company_doc.validate(strict=False)\n except Exception, ex:\n raise errors.InvalidParameterValue('company')\n\n company_db_obj = company_doc.get_db_object()\n company_db_obj._owner = current_user\n\n sqldb.session.add(company_db_obj)\n sqldb.session.commit()\n\n result = company_doc.get_api_structure()\n result['id'] = company_db_obj.id + \"_company\"\n return {\"result\": result}\n\n\n@domain_objects_bp.route('/entity/company/', methods=['GET'])\n@api_view\n@login_required\n@validate_arguments(company_id=ObjectRefValidator(required=False),\n count=IntValidator(min_val=1, required=False),\n offset=IntValidator(min_val=0, required=False))\ndef get_companies(company_id=None, count=None, offset=None):\n result_list = []\n company_id = str(company_id) if company_id else ''\n if company_id:\n company = CompanyDbObject.query.filter_by(id=company_id, deleted=False, _owner=current_user,\n _batch=None, _copy_id=None).first()\n\n if not company:\n raise errors.EntityNotFound()\n company_doc = CompanyObject.db_obj_to_field(company)\n result_list.append(company_doc.get_api_structure())\n result_count = 1\n result_total = 1\n else:\n cur = CompanyDbObject.query.filter_by(deleted=False, _owner=current_user, _batch=None, _copy_id=None).order_by(\n CompanyDbObject.id.asc())\n result_total = cur.count()\n cur = CompanyDbObject.query.filter_by(deleted=False, _owner=current_user, _batch=None, _copy_id=None).order_by(\n CompanyDbObject.id.asc())\n if count is not None:\n cur = cur.limit(offset)\n if offset is not None:\n cur = cur.offset(offset)\n result_count = cur.count()\n for company in cur:\n company_doc = CompanyObject.db_obj_to_field(company)\n result_list.append(company_doc.get_api_structure())\n return {'result': {\n 'companies': result_list,\n 'total': result_total,\n 'count': result_count\n }}\n\n\n@domain_objects_bp.route('/entity/company/update/', methods=['POST'])\n@api_view\n@login_required\n@validate_arguments(company_id=ObjectRefValidator(), company=JsonValidator())\ndef update_company(company_id=None, company=None):\n company_id = str(company_id) if company_id else None\n company_db_obj = CompanyDbObject.query.filter_by(id=company_id, _owner=current_user, _batch=None).first()\n if not company_db_obj:\n raise errors.EntityNotFound()\n\n old_company_doc = CompanyObject.db_obj_to_field(company_db_obj)\n old_company_doc.update(company)\n\n CompanyObject.update_db_obj(company_db_obj, company, api_data=True)\n\n sqldb.session.commit()\n company_doc = CompanyObject.db_obj_to_field(company_db_obj)\n\n return {\"result\": company_doc.get_api_structure()}\n\n\n@domain_objects_bp.route('/entity/company/remove/', methods=['POST'])\n@api_view\n@login_required\n@validate_arguments(company_id=ObjectRefValidator())\ndef delete_company(company_id=None):\n company_id = str(company_id) if company_id else None\n company_db_obj = CompanyDbObject.query.filter_by(id=company_id, _owner=current_user, _batch=None).first()\n if not company_db_obj:\n raise errors.EntityNotFound()\n\n company_db_obj.deleted = True\n sqldb.session.commit()\n\n return {\"result\": True}\n" }, { "alpha_fraction": 0.7164404392242432, "alphanum_fraction": 0.7179487347602844, "avg_line_length": 40.4375, "blob_id": "969544bb417869e7e70c2fc53cdb1e6943e4b786", "content_id": "b2777ee9a68a2f85e2b56a2089f919929b8dc253", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 663, "license_type": "no_license", "max_line_length": 119, "num_lines": 16, "path": "/app/async/urls.py", "repo_name": "StanislavKraev/jb_code", "src_encoding": "UTF-8", "text": "# -*- coding: utf-8 -*-\nfrom async.test_views import TestDbSyncView, TestNetSyncView, TestNetAsyncView, TestDbAsyncView\n\nfrom async.views import IfnsGetScheduleView, IfnsMakeBookingView, IfnsBookingView, IfnsNameView, IfnsDiscardBookingView\n\nurl_patterns = (\n (r\"/meeting/ifns/\", IfnsBookingView),\n (r\"/meeting/ifns/schedule/\", IfnsGetScheduleView),\n (r\"/meeting/ifns/create/\", IfnsMakeBookingView),\n (r\"/meeting/ifns/name/\", IfnsNameView),\n (r\"/meeting/ifns/discard/\", IfnsDiscardBookingView),\n (r\"/db-sync/\", TestDbSyncView),\n (r\"/db-async/\", TestDbAsyncView),\n (r\"/net-sync/\", TestNetSyncView),\n (r\"/net-async/\", TestNetAsyncView)\n)\n" }, { "alpha_fraction": 0.5769515037536621, "alphanum_fraction": 0.5884017944335938, "avg_line_length": 35.362571716308594, "blob_id": "96e1076d81cf09bc6a805febb464b7ac21f2320e", "content_id": "1832b3650adc12fcc2ae524f632f70934408ce90", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 31153, "license_type": "no_license", "max_line_length": 139, "num_lines": 855, "path": "/app/services/ifns/api.py", "repo_name": "StanislavKraev/jb_code", "src_encoding": "UTF-8", "text": "# -*- coding: utf-8 -*-\nfrom datetime import datetime\nimport json\nfrom random import randint\nimport re\nfrom datetime import timedelta\nfrom flask import Blueprint, current_app\nfrom flask_login import login_required, current_user\nfrom common_utils import int_to_ifns\n\nfrom fw.api import errors\nfrom fw.api.args_validators import validate_arguments, IntValidator, EnumValidator, DateTypeValidator, DateTimeValidator\nfrom fw.api.args_validators import ArgumentValidator\nfrom fw.api.base_handlers import api_view\nfrom fw.api.errors import BatchNotFound\nfrom fw.auth.social_services import SocialServiceBackends\nfrom fw.catalogs.models import OkvadObject, OkvedCatalogObject\nfrom fw.db.sql_base import db as sqldb\nfrom fw.documents.batch_manager import BatchManager\nfrom fw.documents.db_fields import DocumentBatchDbObject, CompanyDbObject, PrivatePersonDbObject\nfrom fw.documents.enums import DocumentBatchTypeEnum, BatchStatusEnum\nfrom fw.documents.fields.doc_fields import as_dumpable\n\nfrom services.ifns import ifns_manager\nfrom services.ifns.data_model.fields import IfnsBooking\nfrom services.ifns.data_model.models import IfnsBookingObject, IfnsCatalogObject\nfrom services.ifns.utils.ifns_logger import IFNS_LOGGER\n\nfrom services.llc_reg.documents.enums import IfnsServiceEnum, FounderTypeEnum\n\nifns_bp = Blueprint('ifns', __name__)\n\n\n_okvad_to_json = lambda x: {\n '_id': x.id,\n 'okved': x.okved,\n 'caption': x.caption,\n 'nalog': x.nalog,\n 'parent': x.parent\n}\n\n@ifns_bp.route('/get_okvad/', methods=['POST'])\n@api_view\n@validate_arguments(\n parent=ArgumentValidator(required=False),\n batch_type=ArgumentValidator(required=False),\n search=ArgumentValidator(required=False)\n)\ndef get_okvad(parent=None, batch_type=None, search=None):\n search = search or \"\"\n make_okvad = lambda x: x['class'] + (\n ('.%s' % x['group'] + ('.%s' % x['kind'] if 'kind' in x else '')) if 'group' in x else '')\n\n query = OkvadObject.query.filter()\n if parent:\n query = query.filter_by(parent=parent)\n if search:\n query = query.filter(OkvadObject.caption.ilike(u'%%%s%%' % search))\n\n result = [_okvad_to_json(item) for item in query]\n else:\n query = OkvadObject.query.filter_by(parent=None)\n query2 = OkvadObject.query.filter(OkvadObject.parent.__ne__(None))\n if search:\n query = query.filter(OkvadObject.caption.ilike(u'%%%s%%' % search))\n query2 = query2.filter(OkvadObject.caption.ilike(u'%%%s%%' % search))\n\n result = [_okvad_to_json(item) for item in query.order_by(OkvadObject.caption.asc())] + [\n _okvad_to_json(item) for item in query2]\n\n from services.ip_reg.okvad_utils import is_restricted_for_ip_okvad\n if batch_type == DocumentBatchTypeEnum.DBT_NEW_IP:\n result = filter(lambda item: not is_restricted_for_ip_okvad(make_okvad(item)), result)\n return {'result': result}\n\n\ndef sort_okvads(search_term, okvad_list):\n term_len = len(search_term)\n for i in okvad_list:\n cap = i['caption']\n if cap == search_term:\n i['w'] += 200\n else:\n try:\n pattern = ur'\\W(%s\\w*)' % search_term\n m = re.search(pattern, cap, flags=re.I | re.U)\n word = m.group(1)\n i['w'] += 100 - abs(len(word) - term_len) * 10\n except Exception:\n pass\n result = sorted(okvad_list, key=lambda x: x['w'], reverse=True)\n return [{\n '_id': r['_id'],\n 'caption': r['caption'],\n 'code': r['code'],\n 'parent': r['parent'],\n } for r in result]\n\n\n@ifns_bp.route('/search_okvad/', methods=['GET'])\n@api_view\n@validate_arguments(title=ArgumentValidator(required=True, min_length=3))\ndef search_okvad(title=u\"\"):\n result = []\n\n for r in OkvadObject.query.filter(OkvadObject.caption.ilike('%%%s%%' % title)):\n result.append(as_dumpable({\n '_id': r.id,\n 'caption': r.caption,\n 'code': r.okved,\n 'parent': r.parent,\n 'w': 0\n }))\n\n result = sort_okvads(title, result)\n\n return {'result': result[:20]}\n\n\n@ifns_bp.route('/get_okvad_skeleton/', methods=['POST'])\n@api_view\n@validate_arguments(\n batch_type=ArgumentValidator(required=False),\n search=ArgumentValidator(required=False)\n)\ndef get_okvad_skeleton(batch_type=None, search=None):\n search = search or \"\"\n\n from services.ip_reg.okvad_utils import is_restricted_for_ip_okvad\n\n result = OkvadObject.query.filter_by(parent=None)\n all_roots_list = [i.id for i in result]\n all_roots = set(all_roots_list)\n\n query = OkvadObject.query.filter(OkvadObject.parent!=None)\n if search:\n query = query.filter(OkvadObject.caption.ilike(u'%%%s%%' % search))\n total_items = [i for i in query]\n\n if batch_type == DocumentBatchTypeEnum.DBT_NEW_IP:\n total_items = filter(lambda x: not is_restricted_for_ip_okvad(x.okved or ''), total_items)\n\n first_level = filter(lambda x: x.parent in all_roots, total_items) # подходящие по поиску\n first_level_ids = set([i.id for i in first_level])\n\n all_first_level = [i for i in OkvadObject.query.filter(OkvadObject.parent.in_(all_roots_list))]\n if batch_type == DocumentBatchTypeEnum.DBT_NEW_IP:\n all_first_level = filter(lambda x: not is_restricted_for_ip_okvad(x.okved or ''), all_first_level)\n all_first_level_map = {}\n for item in all_first_level:\n item_id = item.id\n all_first_level_map[item_id] = item\n\n #all_roots.add(None)\n\n second_level = filter(lambda x: x.parent not in all_roots, total_items)\n\n for item in second_level:\n item_code = item.okved\n item_id = item.id\n if not item_code:\n continue\n if item_id not in first_level_ids and item_id in all_first_level_map:\n first_level.append(item)\n first_level_ids.add(item_id)\n\n zero_level = set([x.parent for x in first_level])\n\n zero_level_full = [i.id for i in OkvadObject.query.filter(OkvadObject.parent==None).order_by(OkvadObject.okved.asc())]\n zero_level = filter(lambda i: i in all_roots, zero_level_full)\n\n result2 = [{\n '_id': str(i),\n 'parent': None\n } for i in zero_level]\n\n first_level = sorted(first_level, key=lambda x: x.okved)\n result2.extend([{\n '_id': i.id,\n 'code': i.okved,\n 'parent': i.parent\n } for i in first_level])\n\n second_level = sorted(second_level, key=lambda x: x.okved)\n result2.extend([{\n '_id': i.id,\n 'code': i.okved,\n 'parent': i.parent\n } for i in second_level])\n\n return {'result': result2}\n\n\n@ifns_bp.route('/get_okvad_items_data/', methods=['POST'])\n@api_view\n@validate_arguments(\n ids=ArgumentValidator(required=True),\n batch_type=ArgumentValidator(required=False)\n)\ndef get_okvad_items_data(ids=None, batch_type=None):\n if not ids:\n return {'result': []}\n\n items = json.loads(ids)\n result = [_okvad_to_json(item) for item in OkvadObject.query.filter(OkvadObject.id.in_(items))]\n result2 = []\n from services.ip_reg.okvad_utils import is_restricted_for_ip_okvad\n for r in result:\n if batch_type == DocumentBatchTypeEnum.DBT_NEW_IP and is_restricted_for_ip_okvad(r['okved']):\n continue\n result2.append({\n '_id': r['_id'],\n 'caption': r['caption'],\n 'parent': r['parent'],\n 'code': r['okved'],\n 'description': u\"\",\n 'nalog': r['nalog']\n })\n\n return {'result': result2}\n\n\n@ifns_bp.route('/get_okvad_catalog/', methods=['GET'])\n@api_view\n@validate_arguments(batch_type=ArgumentValidator(required=False))\ndef get_okvad_catalog(batch_type=None):\n result = []\n query = OkvedCatalogObject.query.filter().order_by(OkvedCatalogObject.name.asc())\n from services.ip_reg.okvad_utils import is_restricted_for_ip_okvad\n for item in query:\n new_item = {\n 'name': item.name,\n '_id': item.id,\n }\n deps = []\n\n for dep in item.departments:\n main_okvad = dep['main_okvad']\n if batch_type == DocumentBatchTypeEnum.DBT_NEW_IP and is_restricted_for_ip_okvad(main_okvad):\n continue\n deps.append({\n \"_id\": dep['id'],\n \"name\": dep['name'],\n \"main_okvad\": main_okvad\n })\n deps = sorted(deps, key=lambda x: x['name'])\n new_item['departments'] = deps\n result.append(new_item)\n\n return {'result': result}\n\n\n@ifns_bp.route('/get_department_okvads/', methods=['GET'])\n@api_view\n@validate_arguments(\n department_id=ArgumentValidator(required=True),\n batch_type=ArgumentValidator(required=False)\n)\ndef get_dep_okvads(department_id=None, batch_type=None):\n from services.ip_reg.okvad_utils import is_restricted_for_ip_okvad\n for c in OkvedCatalogObject.query.filter():\n if not c.departments:\n continue\n for dep in c.departments:\n if dep['id'] == department_id:\n okvad_list = dep['okvads']\n if not okvad_list:\n continue\n result = [_okvad_to_json(item) for item in OkvadObject.query.filter(OkvadObject.okved.in_(okvad_list))]\n for i in xrange(len(result)):\n result[i]['description'] = \"\"\n code = result[i]['okved']\n result[i]['code'] = code\n del result[i]['okved']\n if batch_type == DocumentBatchTypeEnum.DBT_NEW_IP:\n result = filter(lambda x: not is_restricted_for_ip_okvad(x['code']), result)\n return {'result': result}\n\n raise errors.InvalidParameterValue('department_id')\n\n\ndef get_company_person_data_for_ifns(founder_applicant, reg_responsible_object, email):\n person_data = None\n company_data = None\n\n reg_responsible_person = None\n try:\n if reg_responsible_object and '_id' in reg_responsible_object and 'type' in reg_responsible_object:\n obj_id = reg_responsible_object['_id']\n obj_type = reg_responsible_object['type']\n if obj_type == 'person':\n reg_responsible_person = obj_id\n else:\n reg_responsible_company = CompanyDbObject.query.filter_by(id=obj_id).scalar()\n if reg_responsible_company and reg_responsible_company.general_manager:\n reg_responsible_person = reg_responsible_company.general_manager['_id']\n except Exception:\n pass\n\n if reg_responsible_person:\n person_obj = PrivatePersonDbObject.query.filter_by(id=reg_responsible_person).scalar()\n if not person_obj:\n raise errors.InvalidParameterValue(\"reg_responsible_person\")\n person_data = {\n \"name\": person_obj.name or u\"\",\n \"surname\": person_obj.surname or u\"\",\n \"patronymic\": person_obj.patronymic or u\"\",\n \"phone\": person_obj.phone,\n \"email\": email,\n \"inn\": person_obj.inn\n }\n return None, person_data\n\n if FounderTypeEnum.TYPE_CLS(founder_applicant['founder_type']) == FounderTypeEnum.FT_PERSON:\n person_obj = PrivatePersonDbObject.query.filter_by(id=founder_applicant['person']['_id']).scalar()\n if not person_obj:\n raise errors.InvalidParameterValue(\"founder_applicant\")\n person_data = {\n \"name\": person_obj.name or u\"\",\n \"surname\": person_obj.surname or u\"\",\n \"patronymic\": person_obj.patronymic or u\"\",\n \"phone\": person_obj.phone,\n \"email\": email,\n \"inn\": person_obj.inn\n }\n else:\n company_obj = CompanyDbObject.query.filter_by(id=founder_applicant['company']['_id']).scalar()\n general_manager = company_obj.general_manager\n if not general_manager or not company_obj:\n raise errors.InvalidParameterValue(\"founder_applicant\")\n company_data = {\n \"name\": company_obj.full_name,\n \"phone\": general_manager.phone,\n \"email\": email,\n \"inn\": company_obj.inn\n }\n return company_data, person_data\n\n\ndef _make_fake_company_data():\n return {\n \"name\": u\"Ромашка \" + str(randint(1000000, 9999999)),\n \"phone\": \"+7900\" + str(randint(1000000, 9999999)),\n \"email\": \"check\" + str(randint(1000000, 9999999)) + \"@mail.ru\",\n \"inn\": \"0000000000\"\n }\n\n\nRECEIVE_REG_DOCS_INTERNAL_SERVICE_ID_MAP = {\n '78086': (285, 1112), # 'agent' : (189, 1112)\n '77066': (285, 2586),\n '47029': (77, 557),\n '47026': (71, 554),\n '47027': (71, 555),\n '47028': (71, 556),\n '47031': (71, 559),\n '47100': (71, 1633),\n '47030': (71, 558),\n '47035': (71, 561),\n '47098': (71, 565),\n '47034': (71, 560),\n '47036': (71, 562),\n '47039': (71, 563),\n '47040': (71, 564)\n}\n\nLLC_REG_INTERNAL_SERVICE_ID_MAP = {\n '78086': {\n 'company': (182, 1112),\n 'person': (281, 1112)\n },\n '77066': {\n 'company': (275, 1265),\n 'person': (281, 2813)\n }\n}\n\n\n@ifns_bp.route('/meeting/ifns/schedule/', methods=['POST'])\n@api_view\n@login_required\n@validate_arguments(ifns=IntValidator(required=True),\n service=EnumValidator(enum_cls=IfnsServiceEnum, required=True),\n datetime=DateTypeValidator(required=True),\n batch_id=ArgumentValidator(required=True)\n )\ndef ifns_schedule(ifns=None, service=None, batch_id=None, **kwargs):\n dt = kwargs['datetime']\n service = int(service)\n\n batch_db = DocumentBatchDbObject.query.filter_by(id=batch_id, _owner=current_user, deleted=False).scalar()\n if not batch_db:\n raise errors.InvalidParameterValue('batch_id')\n\n # todo: check batch type (new llc)\n\n from services.llc_reg.llc_reg_manager import LlcRegBatchManager\n founder_applicant = LlcRegBatchManager.get_founder_applicant(batch_db, logger=current_app.logger)\n\n if not founder_applicant:\n raise errors.InvalidParameterValue(\"batch_id\")\n\n IFNS_LOGGER.debug(\n u\"Trying to get ifns schedule. ifns:%d, service:%s, dt:%s\" % (int(ifns), unicode(service), unicode(dt)))\n\n all_time_slots = []\n\n ifns_data = ifns_manager.get_ifns_data(ifns)\n if not ifns_data or not ifns_data.rou:\n raise errors.InvalidParameterValue(\"ifns\")\n\n rou = ifns_data.rou\n if 'code' not in rou:\n raise errors.InvalidParameterValue(\"ifns\")\n\n code = rou['code']\n\n if service == IfnsServiceEnum.IS_REG_COMPANY:\n if code not in LLC_REG_INTERNAL_SERVICE_ID_MAP:\n raise errors.InvalidParameterValue(\"ifns\")\n\n # noinspection PyTypeChecker\n company_data, person_data = get_company_person_data_for_ifns(founder_applicant, None, current_user.email)\n\n obj_type = \"person\" if person_data is not None else \"company\"\n internal_ifns_service, internal_ifns_number = LLC_REG_INTERNAL_SERVICE_ID_MAP[code][obj_type]\n\n try:\n all_time_slots = current_app.external_tools.get_nalog_ru_time_slots(person_data,\n company_data,\n internal_ifns_number,\n internal_ifns_service, IFNS_LOGGER)\n if not all_time_slots:\n raise Exception()\n except Exception:\n IFNS_LOGGER.exception(u\"Failed to get_nalog_ru_time_slots\")\n raise\n\n elif service == IfnsServiceEnum.IS_RECEIVE_REG_DOCS:\n if code not in RECEIVE_REG_DOCS_INTERNAL_SERVICE_ID_MAP:\n raise errors.InvalidParameterValue(\"ifns\")\n internal_ifns_service, internal_ifns_number = RECEIVE_REG_DOCS_INTERNAL_SERVICE_ID_MAP[code]\n\n company_data = _make_fake_company_data()\n\n try:\n all_time_slots = current_app.external_tools.get_nalog_ru_time_slots(None,\n company_data,\n internal_ifns_number,\n internal_ifns_service, IFNS_LOGGER)\n if not all_time_slots:\n raise Exception()\n except Exception:\n IFNS_LOGGER.exception(u\"Failed to get_nalog_ru_time_slots\")\n raise\n\n if not all_time_slots:\n return {\n 'result': {\n 'nearest_time': None,\n 'slots': []\n }\n }\n\n td_min = timedelta(seconds=99999999)\n nearest_time = datetime.strptime(all_time_slots[0]['date'], \"%Y-%m-%d\")\n slots = all_time_slots[0]['time_slots']\n\n actual_slots = []\n\n dt_str = dt.strftime(\"%Y-%m-%d\")\n for slot in all_time_slots:\n cur_date = datetime.strptime(slot['date'], \"%Y-%m-%d\")\n if slot['date'] == dt_str:\n actual_slots = slot['time_slots']\n td_cur = cur_date - dt if (cur_date > dt) else (dt - cur_date)\n if td_cur < td_min:\n td_min = td_cur\n nearest_time = cur_date\n slots = slot['time_slots']\n\n # IFNS_LOGGER.debug(u\"Ifns schedule - succeeded. Nearest time: %s, Slots: %s\" % (nearest_time.strftime(\"%Y-%m-%d\"), json.dumps(slots)))\n return {'result': {\n 'nearest_time': dt_str,\n 'slots': actual_slots\n }}\n\n\n@ifns_bp.route('/meeting/ifns/create/', methods=['POST'])\n@api_view\n@login_required\n@validate_arguments(ifns=IntValidator(required=True),\n service=EnumValidator(enum_cls=IfnsServiceEnum, required=True),\n datetime=DateTimeValidator(required=True),\n batch_id=ArgumentValidator(required=True)\n )\ndef ifns_reserve(ifns=None, service=None, batch_id=None, **kwargs):\n dt = kwargs['datetime']\n service = int(service)\n\n result = None\n\n batch_db = DocumentBatchDbObject.query.filter_by(id=batch_id, _owner=current_user, deleted=False).scalar()\n if not batch_db:\n raise errors.InvalidParameterValue('batch_id')\n\n from services.llc_reg.llc_reg_manager import LlcRegBatchManager\n founder_applicant = LlcRegBatchManager.get_founder_applicant(batch_db, logger=current_app.logger)\n if not founder_applicant:\n raise errors.InvalidParameterValue(\"batch_id\")\n\n reg_responsible_object = LlcRegBatchManager.get_reg_responsible_object(batch_db, logger=current_app.logger)\n\n if not batch_db.data:\n raise errors.InvalidParameterValue('batch_id')\n\n batch_data_data = batch_db.data\n if 'full_name' not in batch_data_data or not batch_data_data['full_name']:\n raise errors.InvalidParameterValue('batch_id')\n company_full_name = batch_data_data['full_name']\n\n ifns_data = ifns_manager.get_ifns_data(ifns)\n if not ifns_data or not ifns_data.rou:\n raise errors.InvalidParameterValue(\"ifns\")\n\n rou = ifns_data.rou\n if 'code' not in rou:\n raise errors.InvalidParameterValue(\"ifns\")\n\n code = rou['code']\n\n if service == IfnsServiceEnum.IS_REG_COMPANY:\n if code not in LLC_REG_INTERNAL_SERVICE_ID_MAP:\n raise errors.InvalidParameterValue(\"ifns\")\n\n IFNS_LOGGER.debug(\n u\"Trying to reserve ifns. ifns:%d, service:%s, dt:%s\" % (int(ifns), unicode(service), unicode(dt)))\n company_data, person_data = get_company_person_data_for_ifns(founder_applicant, reg_responsible_object,\n current_user.email)\n\n obj_type = \"person\" if person_data is not None else \"company\"\n internal_ifns_service, internal_ifns_number = LLC_REG_INTERNAL_SERVICE_ID_MAP[code][obj_type]\n try:\n result = current_app.external_tools.book_ifns(person_data, company_data, internal_ifns_number,\n internal_ifns_service, dt, IFNS_LOGGER)\n except Exception:\n IFNS_LOGGER.exception(u\"Failed to reserve ifns.\")\n raise\n elif service == IfnsServiceEnum.IS_RECEIVE_REG_DOCS:\n try:\n if code not in RECEIVE_REG_DOCS_INTERNAL_SERVICE_ID_MAP:\n raise errors.InvalidParameterValue(\"ifns\")\n general_manager = batch_data_data.get('general_manager')\n if not general_manager or '_id' not in general_manager:\n raise errors.InvalidParameterValue(\"batch_id\")\n general_manager_id = general_manager['_id']\n general_manager_obj = PrivatePersonDbObject.query.filter_by(id=general_manager_id).scalar()\n if not general_manager_obj:\n raise errors.InvalidParameterValue(\"batch_id\")\n internal_ifns_service, internal_ifns_number = RECEIVE_REG_DOCS_INTERNAL_SERVICE_ID_MAP[code]\n company_data = {\n 'inn': '0000000000',\n \"name\": company_full_name,\n \"phone\": general_manager_obj.phone,\n \"email\": current_user.email,\n }\n result = current_app.external_tools.book_ifns(None, company_data, internal_ifns_number,\n internal_ifns_service, dt, IFNS_LOGGER)\n except Exception:\n IFNS_LOGGER.exception(u\"Failed to reserve ifns.\")\n raise\n\n if result:\n from fw.async_tasks import send_email\n try:\n booking = IfnsBookingObject(\n batch_id=batch_id,\n code=result['code'],\n date=result['date'],\n service=result['service'],\n phone=result['phone'],\n window=result['window'],\n address=result['address'],\n service_id=service,\n ifns=result['ifns'],\n how_to_get=result['how_to_get']\n )\n sqldb.session.add(booking)\n sqldb.session.commit()\n\n IFNS_LOGGER.debug(u\"Reserverd ifns. booking id: %s\" % booking.id)\n\n social_link = SocialServiceBackends.get_user_social_network_profile_url(current_user.id)\n rec_list = current_app.config['YURIST_EMAIL_LIST']\n llc_full_name = batch_db.data.get('full_name', \"\")\n docs = BatchManager.get_shared_links_to_rendered_docs(batch_db, current_app.config,\n IFNS_LOGGER)\n for recipient in rec_list:\n send_email.send_email.delay(\n recipient,\n 'ifns_reservation_notification',\n email=current_user.email,\n mobile=current_user.mobile,\n social_link=social_link,\n full_name=llc_full_name,\n ifns=result['ifns'],\n booking_time=result['date'],\n docs=docs\n )\n booking_field = IfnsBooking.db_obj_to_field(booking)\n return {'result': booking_field.get_api_structure()}\n except Exception, ex:\n IFNS_LOGGER.exception(u\"Failed to save booking!\")\n raise errors.ServerError()\n\n IFNS_LOGGER.error(u\"Failed to reserve ifns due to unknown reason.\")\n return {'result': False}\n\n\n@ifns_bp.route('/meeting/ifns/', methods=['GET'])\n@api_view\n@login_required\n@validate_arguments(batch_id=ArgumentValidator(required=True))\ndef ifns_get_booking(batch_id=None):\n result_values = []\n for book in IfnsBookingObject.query.filter(\n IfnsBookingObject.batch_id==batch_id,\n IfnsBookingObject._discarded==False,\n IfnsBookingObject.ifns.__ne__(None),\n IfnsBookingObject.date.__ne__(None),\n IfnsBookingObject.service_id.__ne__(None)):\n try:\n booking = IfnsBooking.db_obj_to_field(book)\n result_values.append(booking.get_api_structure())\n except Exception:\n current_app.logger.exception(u\"Failed to parse ifns booking object from db\")\n continue\n return {'result': result_values}\n\n\n@ifns_bp.route('/ifns/reservations/', methods=['POST'])\n@api_view\n@login_required\n@validate_arguments(batch_id=ArgumentValidator(required=True))\ndef ifns_get_reservations(batch_id=None):\n batch_db = DocumentBatchDbObject.query.filter_by(id=batch_id, _owner=current_user, deleted=False).scalar()\n if not batch_db:\n raise BatchNotFound()\n if batch_db.status != BatchStatusEnum.BS_FINALISED:\n raise errors.InvalidParameterValue('batch_id')\n\n booking = IfnsBookingObject.query.filter(\n IfnsBookingObject.batch_id == batch_id,\n IfnsBookingObject._discarded == False,\n IfnsBookingObject.reg_info.__ne__(None)\n ).first()\n\n try:\n name = batch_db.data['full_name']\n full_name = u\"Общество с ограниченной ответственностью \\\"%s\\\"\" % name\n except Exception:\n current_app.logger.exception(u\"Failed to collect data\")\n raise errors.InvalidParameterValue('batch_id')\n\n if not booking:\n return {\n 'result': {\n 'full_name': full_name,\n 'status': \"unknown\"\n }\n }\n\n result = {'result': {\n 'full_name': full_name,\n 'status': booking.reg_info.get('status', 'unknown')\n }}\n reg_date = booking.reg_info.get('reg_date', None)\n ogrn = booking.reg_info.get('ogrn', None)\n if reg_date:\n result['result']['reg_date'] = reg_date\n if ogrn:\n result['result']['ogrn'] = booking.reg_info.get('ogrn', None)\n return result\n\n\n@ifns_bp.route('/meeting/ifns/discard/', methods=['POST'])\n@api_view\n@login_required\n@validate_arguments(\n booking_id=ArgumentValidator(required=True),\n)\ndef ifns_discard(booking_id=None):\n IFNS_LOGGER.debug(u\"Trying to cancel ifns booking %s\" % unicode(booking_id))\n\n IfnsBookingObject.query.filter_by(\n id=booking_id,\n _discarded=False\n ).update({\n '_discarded': True\n })\n sqldb.session.commit()\n\n IFNS_LOGGER.debug(u\"Ifns booking %s canceled\" % unicode(booking_id))\n return {'result': True}\n\n\n@ifns_bp.route('/meeting/ifns/name/', methods=['GET'])\n@api_view\n@login_required\n@validate_arguments(\n ifns=ArgumentValidator(required=True)\n)\ndef ifns_get_name(ifns=None):\n null_res = {'result': {\n 'title': \"\",\n 'has_booking': False,\n 'address': None\n }}\n IFNS_LOGGER.debug(\"ifns_get_name\")\n\n try:\n ifns = int(ifns)\n if ifns < 1 or ifns > 99999:\n raise ValueError()\n except Exception:\n raise errors.InvalidParameterValue('ifns')\n\n result = IfnsCatalogObject.query.filter_by(code=ifns).first()\n if not result:\n return null_res\n\n ifns_item = result\n if not ifns_item.rou:\n return null_res\n\n rou = ifns_item.rou\n if not rou or 'name' not in rou:\n return null_res\n\n title = rou['name']\n if not title:\n return null_res\n\n has_booking = (7700 <= ifns <= 7799) or (7800 <= ifns <= 7899) # or (4700 <= ifns <= 4799)\n\n address = (ifns_item.rou or {}).get('address', {})\n if not isinstance(address, dict):\n address = {}\n\n return {'result': {\n \"title\": title,\n \"has_booking\": has_booking,\n \"address\": address\n }}\n\n@ifns_bp.route('/structures/ifns/search/', methods=['GET'])\n@api_view\n@validate_arguments(\n region=ArgumentValidator(required=False),\n search_string=ArgumentValidator(required=False),\n limit=IntValidator(required=False, min_val=1, max_val=100500),\n offset=IntValidator(required=False, min_val=0, max_val=100500)\n)\ndef search_ifns(region=None, search_string=None, limit=20, offset=0):\n query = IfnsCatalogObject.query\n if region:\n query = query.filter_by(region=region)\n\n if search_string:\n reserved_symbols = '.\\\\$^*/[]()|+'\n for s in reserved_symbols:\n search_string = search_string.replace(s, '').strip()\n if not search_string:\n break\n if search_string:\n try:\n code_int = int(search_string)\n if 100 <= code_int < 10000:\n query = query.filter_by(code=code_int)\n except Exception:\n query = query.filter(IfnsCatalogObject.name.ilike(u'%%%s%%' % search_string))\n\n total = query.count()\n if offset:\n query = query.skip(offset)\n if limit:\n query = query.limit(limit)\n\n query = query.order_by(IfnsCatalogObject.code.asc())\n return {\n 'total': total,\n 'count': query.count(),\n 'ifns': [db_ifns_to_api(item) for item in query]\n }\n\n@ifns_bp.route('/structures/ifns/', methods=['GET'])\n@api_view\n@validate_arguments(\n id=ArgumentValidator(required=True),\n)\ndef get_ifns(**kwargs):\n id_val = kwargs.get('id', None)\n if not id_val:\n raise errors.MissingRequiredParameter('id')\n\n item = IfnsCatalogObject.query.filter_by(id=id_val).first()\n if not item:\n raise errors.IfnsNotFound()\n\n return db_ifns_to_api(item)\n\n\ndef db_ifns_to_api(item):\n result_item = {\n 'id': item.id,\n 'ifns_code': int_to_ifns(item.code),\n 'caption': item.name or u'',\n 'address': item.address or u'',\n 'phones': [phone.replace('(', '').replace(')', '') for phone in (item.tel or [])],\n 'additional_info': item.comment or u''\n }\n if 'address_string' in result_item['address']:\n try:\n address = json.loads(result_item['address'])\n result_item['address'] = address.get('address_string', u'')\n except Exception:\n pass\n plat = item.plat or {}\n if plat:\n result_item['payment_details'] = {\n 'payment_recipient_caption': plat.get('recipient_name', u''),\n 'inn': plat.get('recipient_inn', u''),\n 'kpp': plat.get('recipient_kpp', u''),\n 'bank_caption': plat.get('bank_name', u''),\n 'bik': plat.get('bik', u''),\n 'account': plat.get('recipient_account', u'')\n }\n if item.rou:\n rou = item.rou\n result_item['llc_registration_ifns'] = {\n 'ifns_reg_code': str(rou['code']),\n 'caption': rou.get('name', u\"\"),\n 'address': rou.get('address', {}).get('address_string', u''),\n 'phones': [phone.replace('(', '').replace(')', '') for phone in rou.get('tel', [])]\n }\n\n if item.rof:\n rof = item.rof\n result_item['ip_registration_ifns'] = {\n 'ifns_reg_code': str(rof['code']),\n 'caption': rof.get('name', u\"\"),\n 'address': rof.get('address', {}).get('address_string', u''),\n 'phones': [phone.replace('(', '').replace(')', '') for phone in (rof or {}).get('tel', [])]\n }\n return result_item\n\n" }, { "alpha_fraction": 0.6805896759033203, "alphanum_fraction": 0.6859132051467896, "avg_line_length": 33.88571548461914, "blob_id": "9f758726dde28d62be68d3e5ea9fbde25cd59346", "content_id": "b1f338494c5bdebddd4d41e65f7c69ece7f2c296", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 2442, "license_type": "no_license", "max_line_length": 106, "num_lines": 70, "path": "/app/services/test_svc/__init__.py", "repo_name": "StanislavKraev/jb_code", "src_encoding": "UTF-8", "text": "# -*- coding: utf-8 -*-\n\nimport os\nimport jinja2\nfrom fw.documents.batch_manager import BatchManager\nfrom fw.documents.doc_requisites_storage import DocRequisitiesStorage\nfrom fw.documents.enums import DocumentBatchTypeEnum, DocumentTypeEnum\nfrom services.test_svc.test_svc_manager import TestSvcManager\n\n\ndef _init_doc_requisities(config):\n from services.test_svc.documents.db_data import load_data\n data = load_data(config)\n\n templates = (\n \"TEST_DOC_1_TEMPLATE\",\n \"TEST_DOC_2_TEMPLATE\",\n \"TEST_DOC_3_TEMPLATE\",\n )\n\n for template_name in templates:\n DocRequisitiesStorage.add_template(data[template_name]['doc_name'], data[template_name])\n\n schemas = (\n \"TEST_BATCH_SCHEMA\",\n \"TEST_DOC_3_SCHEMA\",\n \"TEST_DOC_2_SCHEMA\",\n \"TEST_DOC_1_SCHEMA\"\n )\n\n for schema_name in schemas:\n DocRequisitiesStorage.add_schema(data[schema_name]['doc_name'], data[schema_name])\n\n matchers = (\n \"TEST_DOC_1_MATCHER\",\n )\n\n for matcher_name in matchers:\n DocRequisitiesStorage.add_field_matcher(data[matcher_name]['doc_name'], data[matcher_name])\n\n bd = dict(\n batch_type=DocumentBatchTypeEnum.DBT_TEST_TYPE,\n doc_types=[\n DocumentTypeEnum.DT_TEST_DOC_1,\n DocumentTypeEnum.DT_TEST_DOC_2,\n DocumentTypeEnum.DT_TEST_DOC_3\n ],\n result_fields=data['TEST_BATCH_RESULT_FIELDS'],\n deferred_render_docs=data['TEST_BATCH_DEFER_DOCS'],\n transitions=data['TEST_BATCH_TRANSITIONS'],\n actions=data['TEST_BATCH_ACTIONS'],\n initial_status=\"new\",\n statuses=[\"new\", \"finalisation\", \"finalised\", \"edited\"],\n fields=data[\"TEST_BATCH_SCHEMA\"]['fields']\n )\n\n DocRequisitiesStorage.add_batch_descriptor(DocumentBatchTypeEnum.DBT_TEST_TYPE, bd)\n\n\ndef register(app, jinja_env, class_loader, **kwargs):\n search_path = os.path.normpath(os.path.join(os.path.abspath(os.path.dirname(__file__)), u\"templates\"))\n jinja_env.loader.loaders.append(jinja2.FileSystemLoader(search_path))\n\n class_loader.POSSIBLE_LOCATIONS.append('services.test_svc.documents.enums')\n class_loader.POSSIBLE_LOCATIONS.append('services.test_svc.documents.ext_methods')\n class_loader.POSSIBLE_LOCATIONS.append('services.test_svc.documents.ext_validators')\n\n BatchManager.register_manager(DocumentBatchTypeEnum.DBT_TEST_TYPE, TestSvcManager)\n\n _init_doc_requisities(app.config)\n" }, { "alpha_fraction": 0.4492262601852417, "alphanum_fraction": 0.4630606770515442, "avg_line_length": 43.23659133911133, "blob_id": "db8cacab4d279c7af34f83fef93c1129782a61e7", "content_id": "4df68e2de95d57368bbbdf4b4d3134e2bac58ba9", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 32915, "license_type": "no_license", "max_line_length": 171, "num_lines": 634, "path": "/app/common_utils/__init__.py", "repo_name": "StanislavKraev/jb_code", "src_encoding": "UTF-8", "text": "# -*- coding: utf-8 -*-\nimport hashlib\nfrom bson.objectid import ObjectId\nimport os\nimport sys\nimport requests\nfrom requests.exceptions import Timeout, ConnectionError\n\nMAX_ITEMS_ON_PAGE = 20\n\ndef make_paginator_data(page, total):\n max_possible_page = (total - 1) / MAX_ITEMS_ON_PAGE + 1\n if page > max(max_possible_page, 1):\n page = max_possible_page\n max_possible_page = (total - 1) / MAX_ITEMS_ON_PAGE + 1\n\n max_page = min(max_possible_page, page + 2)\n min_page = max(1, max_page - 4)\n max_page = min(max_possible_page, min_page + 4)\n\n pages = range(min_page, max_page + 1)\n paginator = {\n 'page' : page,\n 'pages' : pages,\n 'max_page' : max_page,\n 'max_possible_page' : max_possible_page\n }\n return paginator\n\ndef get_russian_month_skl(month):\n if month == 1:\n return u\"января\"\n if month == 2:\n return u\"февраля\"\n if month == 3:\n return u\"марта\"\n if month == 4:\n return u\"апреля\"\n if month == 5:\n return u\"мая\"\n if month == 6:\n return u\"июня\"\n if month == 7:\n return u\"июля\"\n if month == 8:\n return u\"августа\"\n if month == 9:\n return u\"сентября\"\n if month == 10:\n return u\"октября\"\n if month == 11:\n return u\"ноября\"\n if month == 12:\n return u\"декабря\"\n\nMONTHS = {\n u'Январь' : 1,\n u'Февраль' : 2,\n u'Март' : 3,\n u'Апрель' : 4,\n u'Май' : 5,\n u'Июнь' : 6,\n u'Июль' : 7,\n u'Август' : 8,\n u'Сентябрь' : 9,\n u'Октябрь' : 10,\n u'Ноябрь' : 11,\n u'Декабрь' : 12,\n }\n\ndef get_russian_date(date_val):\n return u\"%d %s %s г.\" % (date_val.day, get_russian_month_skl(date_val.month), date_val.year)\n\ndef get_russian_date_time(date_val):\n return u\"%s %s\" % (get_russian_date(date_val), date_val.strftime(u'%H:%M').decode('utf-8'))\n\n#def get_system_settings(store):\n# cls_info = get_cls_info(SettingsObject)\n# columns = cls_info.columns\n# columns_str = u\",\".join([column.name for column in columns])\n# settings_list = store.execute(u\"SELECT %s from settings ORDER BY id DESC LIMIT 1\" % columns_str)\n#\n# for settings in settings_list:\n# settings_item = store._load_object(cls_info, settings_list, settings)\n# return settings_item\n#\n\ndef is32bit():\n return sys.maxsize < 2**32\n\ndef chunks(l, n):\n \"\"\" Yield successive n-sized chunks from l.\n \"\"\"\n for i in xrange(0, len(l), n):\n yield l[i:i+n]\n\n\n# padezh / gender m f n\n# im стол табуретка окно\n# rod стола табуретки окна\n# dat столу табуретке окну\n# vin стол табуретку окно\n# tvor столом табуреткой окном\n# predl о столе о табуретке об окне\n\ntens = (\n {'im' : u'', 'rod' : u'', 'dat' : u'', 'vin' : u'', 'tvor' : u'', 'predl' : u''},\n {'im' : u'', 'rod' : u'', 'dat' : u'', 'vin' : u'', 'tvor' : u'', 'predl' : u''},\n {'im' : u'двадцать', 'rod' : u'двадцати', 'dat' : u'двадцати', 'vin' : u'двадцать', 'tvor' : u'двадцатью', 'predl' : u'двадцати'},\n {'im' : u'тридцать', 'rod' : u'тридцати', 'dat' : u'тридцати', 'vin' : u'тридцать', 'tvor' : u'тридцатью', 'predl' : u'тридцати'},\n {'im' : u'сорок', 'rod' : u'сорока', 'dat' : u'сорока', 'vin' : u'сорок', 'tvor' : u'сорока', 'predl' : u'сорока'},\n {'im' : u'пятьдесят', 'rod' : u'пятидесяти', 'dat' : u'пятидесяти', 'vin' : u'пятьдесят', 'tvor' : u'пятьюдесятью', 'predl' : u'пятидесяти'},\n {'im' : u'шестьдесят', 'rod' : u'шестидесяти', 'dat' : u'шестидесяти', 'vin' : u'шестьдесят', 'tvor' : u'шестьюдесятью', 'predl' : u'шестидесяти'},\n {'im' : u'семьдесят', 'rod' : u'семидесяти', 'dat' : u'семидесяти', 'vin' : u'семьдесят', 'tvor' : u'семьюдесятью', 'predl' : u'семидесяти'},\n {'im' : u'восемьдесят', 'rod' : u'восьмидесяти', 'dat' : u'восьмидесяти', 'vin' : u'восемьдесят', 'tvor' : u'восемьюдесятью', 'predl' : u'восьмидесяти'},\n {'im' : u'девяносто', 'rod' : u'девяноста', 'dat' : u'девяноста', 'vin' : u'девяносто', 'tvor' : u'девяноста', 'predl' : u'девяноста'},\n )\n\nones = (\n {'im' : u'три', 'rod' : u'трёх', 'dat' : u'трём', 'vin' : u'три', 'tvor' : u'тремя', 'predl' : u'трёх'},\n {'im' : u'четыре', 'rod' : u'четырёх', 'dat' : u'четырём', 'vin' : u'четыре', 'tvor' : u'четырьмя', 'predl' : u'четырёх'},\n {'im' : u'пять', 'rod' : u'пяти', 'dat' : u'пяти', 'vin' : u'пять', 'tvor' : u'пятью', 'predl' : u'пяти'},\n {'im' : u'шесть', 'rod' : u'шести', 'dat' : u'шести', 'vin' : u'шесть', 'tvor' : u'шестью', 'predl' : u'шести'},\n {'im' : u'семь', 'rod' : u'семи', 'dat' : u'семи', 'vin' : u'семь', 'tvor' : u'семью', 'predl' : u'семи'},\n {'im' : u'восемь', 'rod' : u'восьми', 'dat' : u'восьми', 'vin' : u'восемь', 'tvor' : u'восемью', 'predl' : u'восьми'},\n {'im' : u'девять', 'rod' : u'девяти', 'dat' : u'девяти', 'vin' : u'девять', 'tvor' : u'девятью', 'predl' : u'девяти'},\n {'im' : u'десять', 'rod' : u'десяти', 'dat' : u'десяти', 'vin' : u'десять', 'tvor' : u'десятью', 'predl' : u'десяти'},\n {'im' : u'одиннадцать', 'rod' : u'одиннадцати', 'dat' : u'одиннадцати', 'vin' : u'одиннадцать', 'tvor' : u'одиннадцатью', 'predl' : u'одиннадцати'},\n {'im' : u'двенадцать', 'rod' : u'двенадцати', 'dat' : u'двенадцати', 'vin' : u'двенадцать', 'tvor' : u'двенадцатью', 'predl' : u'двенадцати'},\n {'im' : u'тринадцать', 'rod' : u'тринадцати', 'dat' : u'тринадцати', 'vin' : u'тринадцать', 'tvor' : u'тринадцатью', 'predl' : u'тринадцати'},\n {'im' : u'четырнадцать', 'rod' : u'четырнадцати', 'dat' : u'четырнадцати','vin' : u'четырнадцать', 'tvor' : u'четырнадцатью', 'predl' : u'четырнадцати'},\n {'im' : u'пятнадцать', 'rod' : u'пятнадцати', 'dat' : u'пятнадцати', 'vin' : u'пятнадцать', 'tvor' : u'пятнадцатью', 'predl' : u'пятнадцати'},\n {'im' : u'шестнадцать', 'rod' : u'шестнадцати', 'dat' : u'шестнадцати', 'vin' : u'шестнадцать', 'tvor' : u'шестнадцатью', 'predl' : u'шестнадцати'},\n {'im' : u'семнадцать', 'rod' : u'семнадцати', 'dat' : u'семнадцати', 'vin' : u'семнадцать', 'tvor' : u'семнадцатью', 'predl' : u'семнадцати'},\n {'im' : u'восемнадцать', 'rod' : u'восемнадцати', 'dat' : u'восемнадцати','vin' : u'восемнадцать', 'tvor' : u'восемнадцатью', 'predl' : u'восемнадцати'},\n {'im' : u'девятнадцать', 'rod' : u'девятнадцати', 'dat' : u'девятнадцати','vin' : u'девятнадцать', 'tvor' : u'девятнадцатью', 'predl' : u'девятнадцати'},\n )\n\nhundreds = (\n {'im' : u'', 'rod' : u'', 'dat' : u'', 'vin' : u'', 'tvor' : u'', 'predl' : u''},\n {'im' : u'сто', 'rod' : u'ста', 'dat' : u'ста', 'vin' : u'сто', 'tvor' : u'ста', 'predl' : u'ста'},\n {'im' : u'двести', 'rod' : u'двухсот', 'dat' : u'двумстам', 'vin' : u'двести', 'tvor' : u'двумястами', 'predl' : u'двухстах'},\n {'im' : u'триста', 'rod' : u'трёхсот', 'dat' : u'трёмстам', 'vin' : u'триста', 'tvor' : u'тремястами', 'predl' : u'трёхстах'},\n {'im' : u'четыреста', 'rod' : u'четырёхсот', 'dat' : u'четырёмстам', 'vin' : u'четыреста', 'tvor' : u'четырьмястами', 'predl' : u'четырёхстах'},\n {'im' : u'пятьсот', 'rod' : u'пятисот', 'dat' : u'пятистам', 'vin' : u'пятьсот', 'tvor' : u'пятьюстами', 'predl' : u'пятистах'},\n {'im' : u'шестьсот', 'rod' : u'шестисот', 'dat' : u'шестистам', 'vin' : u'шестьсот', 'tvor' : u'шестьюстами', 'predl' : u'шестистах'},\n {'im' : u'семьсот', 'rod' : u'семисот', 'dat' : u'семистам', 'vin' : u'семьсот', 'tvor' : u'семьюстами', 'predl' : u'семистах'},\n {'im' : u'восемьсот', 'rod' : u'восьмисот', 'dat' : u'восьмистам', 'vin' : u'восемьсот', 'tvor' : u'восемьюстами', 'predl' : u'восьмистах'},\n {'im' : u'девятьсот', 'rod' : u'девятисот', 'dat' : u'девятистам', 'vin' : u'девятьсот', 'tvor' : u'девятьюстами', 'predl' : u'девятистах'}\n )\n\nrazryad_single = (\n {'im' : u'', 'rod' : u'', 'dat' : u'', 'vin' : u'', 'tvor' : u'', 'predl' : u''},\n {'im' : u'тысяча', 'rod' : u'тысячи', 'dat' : u'тысяче', 'vin' : u'тысячу', 'tvor' : u'тысячей', 'predl' : u'тысяче'},\n {'im' : u'миллион', 'rod' : u'миллиона', 'dat' : u'миллиону', 'vin' : u'миллион', 'tvor' : u'миллионом', 'predl' : u'миллионе'},\n {'im' : u'миллиард', 'rod' : u'миллиарда', 'dat' : u'миллиарду', 'vin' : u'миллиард', 'tvor' : u'миллиардом', 'predl' : u'миллиарде'},\n {'im' : u'триллион', 'rod' : u'триллиона', 'dat' : u'триллиону', 'vin' : u'триллион', 'tvor' : u'триллионом', 'predl' : u'триллионе'},\n )\n\nrazryad_plural = (\n {'im' : u'', 'rod' : u'', 'dat' : u'', 'vin' : u'', 'tvor' : u'', 'predl' : u''},\n {'im' : u'тысяч', 'rod' : u'тысяч', 'dat' : u'тысячам', 'vin' : u'тысяч', 'tvor' : u'тысячами', 'predl' : u'тысячах'},\n {'im' : u'миллион', 'rod' : u'миллионов', 'dat' : u'миллионам', 'vin' : u'миллион', 'tvor' : u'миллионами', 'predl' : u'миллионах'},\n {'im' : u'миллиард', 'rod' : u'миллиардов', 'dat' : u'миллиардам', 'vin' : u'миллиард', 'tvor' : u'миллиардами', 'predl' : u'миллиардах'},\n {'im' : u'триллион', 'rod' : u'триллионов', 'dat' : u'триллионам', 'vin' : u'триллион', 'tvor' : u'триллионами', 'predl' : u'триллионах'},\n )\nodna = {\n 'im' : u'одна',\n 'rod' : u'одной',\n 'dat' : u'одной',\n 'vin' : u'одну',\n 'tvor' : u'одной',\n 'predl' : u'одной'\n}\nodin = {\n 'im' : u'один',\n 'rod' : u'одного',\n 'dat' : u'одному',\n 'vin' : u'один',\n 'tvor' : u'одним',\n 'predl' : u'одном'\n}\nodno = {\n 'im' : u'одно',\n 'rod' : u'одно',\n 'dat' : u'одному',\n 'vin' : u'одно',\n 'tvor' : u'одним',\n 'predl' : u'одном'\n}\n\ndva = {\n 'im' : u'два',\n 'rod' : u'двух',\n 'dat' : u'двум',\n 'vin' : u'два',\n 'tvor' : u'двумя',\n 'predl' : u'двух'\n}\ndve = {\n 'im' : u'две',\n 'rod' : u'двух',\n 'dat' : u'двум',\n 'vin' : u'две',\n 'tvor' : u'двумя',\n 'predl' : u'двух'\n}\n\ndef short_num(num, razr, gender = 'm', padezh = 'im'): # f, n\n result = hundreds[num / 100][padezh]\n if result:\n result = u' ' + result\n\n if not num:\n return u\"\"\n\n t = (num % 100) / 10 # десятки\n o = num % 10 # единицы\n\n if t != 1: # 1 2 20 50 100 123\n result += u\" \" + tens[t][padezh] if t > 1 else u\"\"\n\n if o == 1: # 1 2 0 0 0 3\n if razr == 1:\n result += u' ' + odna[padezh]\n elif razr > 1:\n result += u' ' + odin[padezh]\n else:\n if gender == 'm':\n result += u' ' + odin[padezh]\n elif gender == 'f':\n result += u' ' + odna[padezh]\n else:\n result += u' ' + odno[padezh]\n elif o == 2:\n if razr == 1:\n result += u' ' + dve[padezh]\n elif razr > 1:\n result += u' ' + dva[padezh]\n else:\n if gender == 'm':\n result += u' ' + dva[padezh]\n elif gender == 'f':\n result += u' ' + dve[padezh]\n else:\n result += u' ' + dva[padezh]\n elif 3 <= o <= 9:\n result += u\" \" + ones[o - 3][padezh]\n\n if o == 1:\n if razr > 0: # одна тысяча\n result += u\" \" + razryad_single[razr][padezh]\n elif 2 <= o <= 4:\n if razr == 1: # две тысячи\n result += u\" \" + razryad_plural[razr][padezh]\n if padezh in ('im', 'vin'):\n result += u'и'\n else:\n if razr > 1: # два миллиона\n result += u\" \" + razryad_plural[razr][padezh]\n if padezh in ('im', 'vin'):\n result += u'а'\n else: # два\n pass\n else:\n if razr > 1: # пять миллионов\n result += u\" \" + razryad_plural[razr][padezh]\n if padezh in ('im', 'vin'):\n result += u\"ов\"\n elif razr == 1: # пять тысяч\n result += u\" \" + razryad_plural[razr][padezh]\n else: # пять\n pass\n else: # 10, 11, 12 - 19\n result += u\" \" + ones[num % 100 - 3][padezh]\n if razr > 0:\n result += u\" \"\n result += razryad_plural[razr][padezh]\n if razr > 1 and padezh in ('im', 'vin'):\n result += u'ов'\n\n return result\n\ndef num_word(s, gender = 'm', padezh = 'im'):\n zeros = {\n \"im\" : u\"ноль\",\n \"rod\" : u\"ноля\",\n \"dat\" : u\"нолю\",\n \"vin\" : u\"ноль\",\n \"tvor\" : u\"нолём\",\n \"predl\" : u\"ноле\"\n }\n s = unicode(s)\n if len(s) <= 0 or s == u'0':\n return zeros[padezh]\n\n count = (len(s) + 2) / 3\n if count > 5:\n return s\n\n result = u\"\"\n s = u'00' + s\n\n for i in xrange(1, count + 1):\n result = short_num(int(s[len(s) - 3 * i: len(s) - 3 * i + 3]), i - 1, gender, padezh) + result\n\n if len(result) > 0 and result[0] == u\" \":\n result = result[1:]\n\n return result\n\nWORD_NUM_DATA = {\n # 1 2, 3, 4 5, 6, 7, 8, 9, 10, 11-20\n u\"рубль\" : (\n {'im' : u'рубль', 'rod' : u'рубля', 'dat' : u'рублю', 'vin' : u'рубль', 'tvor' : u'рублём', 'predl' : u'рубле'}, # 1\n {'im' : u'рубля', 'rod' : u'рублей', 'dat' : u'рублям', 'vin' : u'рубля', 'tvor' : u'рублями', 'predl' : u'рублях'}, # 2 - 4\n {'im' : u'рублей', 'rod' : u'рублей', 'dat' : u'рублям', 'vin' : u'рублей', 'tvor' : u'рублями', 'predl' : u'рублях'}, # 5 - 20\n ),\n u\"копейка\" : (\n {'im' : u'копейка', 'rod' : u'копейки', 'dat' : u'копейке', 'vin' : u'копейку', 'tvor' : u'копейкой', 'predl' : u'копейке'}, # 1\n {'im' : u'копейки', 'rod' : u'копеек', 'dat' : u'копейкам', 'vin' : u'копейки', 'tvor' : u'копейками', 'predl' : u'копейках'}, # 2 - 4\n {'im' : u'копеек', 'rod' : u'копеек', 'dat' : u'копейкам', 'vin' : u'копеек', 'tvor' : u'копейками', 'predl' : u'копейках'}, # 5 - 20\n ),\n u\"доллар США\" : (\n {'im' : u'доллар США', 'rod' : u'доллара США', 'dat' : u'доллару США', 'vin' : u'доллар США', 'tvor' : u'долларом США', 'predl' : u'долларе США'}, # 1\n {'im' : u'доллара США', 'rod' : u'долларов США','dat' : u'долларам США', 'vin' : u'доллара США', 'tvor' : u'долларами США', 'predl' : u'долларах США'}, # 2 - 4\n {'im' : u'долларов США','rod' : u'долларов США','dat' : u'долларам США', 'vin' : u'долларов США','tvor' : u'долларами США', 'predl' : u'долларах США'}, # 5 - 20\n ),\n u\"цент\" : (\n {'im' : u'цент', 'rod' : u'цента', 'dat' : u'центу', 'vin' : u'цент', 'tvor' : u'центом', 'predl' : u'центе'}, # 1\n {'im' : u'цента', 'rod' : u'центов','dat' : u'центам', 'vin' : u'цента', 'tvor' : u'центами', 'predl' : u'центах'}, # 2 - 4\n {'im' : u'центов','rod' : u'центов','dat' : u'центам', 'vin' : u'центов','tvor' : u'центами', 'predl' : u'центах'}, # 5 - 20\n ),\n #u\"евро\" : lambda x: lambda y: u'евро',\n u\"евроцент\" : (\n {'im' : u'евроцент', 'rod' : u'евроцента', 'dat' : u'евроценту', 'vin' : u'евроцент', 'tvor' : u'евроцентом', 'predl' : u'евроценте'}, # 1\n {'im' : u'евроцента', 'rod' : u'евроцентов','dat' : u'евроцентам', 'vin' : u'евроцента', 'tvor' : u'евроцентами', 'predl' : u'евроцентах'}, # 2 - 4\n {'im' : u'евроцентов','rod' : u'евроцентов','dat' : u'евроцентам', 'vin' : u'евроцентов','tvor' : u'евроцентами', 'predl' : u'евроцентах'}, # 5 - 20\n ),\n u\"день\" : (\n {'im' : u'день', 'rod' : u'дня', 'dat' : u'дню', 'vin' : u'день', 'tvor' : u'днём', 'predl' : u'дне'}, # 1\n {'im' : u'дня', 'rod' : u'дней', 'dat' : u'дням', 'vin' : u'дня', 'tvor' : u'днями', 'predl' : u'днях'}, # 2 - 4\n {'im' : u'дней', 'rod' : u'дней', 'dat' : u'дням', 'vin' : u'дней', 'tvor' : u'днями', 'predl' : u'днях'}, # 5 - 20\n ),\n u\"год\" : (\n {'im' : u'год', 'rod' : u'года', 'dat' : u'году', 'vin' : u'год', 'tvor' : u'годом', 'predl' : u'годе'}, # 1\n {'im' : u'года','rod' : u'лет', 'dat' : u'годам', 'vin' : u'года', 'tvor' : u'годами', 'predl' : u'годах'}, # 2 - 4\n {'im' : u'лет', 'rod' : u'лет', 'dat' : u'годам', 'vin' : u'лет', 'tvor' : u'годами', 'predl' : u'годах'}, # 5 - 20\n ),\n u\"месяц\" : (\n {'im' : u'месяц', 'rod' : u'месяца', 'dat' : u'месяцу', 'vin' : u'месяц', 'tvor' : u'месяцем', 'predl' : u'месяце'}, # 1\n {'im' : u'месяца','rod' : u'месяцев', 'dat' : u'месяцам', 'vin' : u'месяца', 'tvor' : u'месяцами', 'predl' : u'месяцах'}, # 2 - 4\n {'im' : u'месяцев', 'rod' : u'месяцев', 'dat' : u'месяцам', 'vin' : u'месяцев', 'tvor' : u'месяцами', 'predl' : u'месяцах'}, # 5 - 20\n ),\n u\"член\" : (\n {'im' : u'член', 'rod' : u'члена', 'dat' : u'члену', 'vin' : u'член', 'tvor' : u'членом', 'predl' : u'члене'}, # 1\n {'im' : u'члена', 'rod' : u'членов', 'dat' : u'членам', 'vin' : u'члена', 'tvor' : u'членами', 'predl' : u'членах'}, # 2 - 4\n {'im' : u'членов','rod' : u'членов', 'dat' : u'членам', 'vin' : u'членов','tvor' : u'членами', 'predl' : u'членах'}, # 5 - 20\n ),\n u\"процент\" : (\n {'im' : u'процент', 'rod' : u'процента', 'dat' : u'проценту', 'vin' : u'процент', 'tvor' : u'процентом', 'predl' : u'проценте'}, # 1\n {'im' : u'процента', 'rod' : u'процентов', 'dat' : u'процентам', 'vin' : u'процента', 'tvor' : u'процентами', 'predl' : u'процентах'}, # 2 - 4\n {'im' : u'процентов','rod' : u'процентов', 'dat' : u'процентам', 'vin' : u'процентов','tvor' : u'процентами', 'predl' : u'процентах'}, # 5 - 20\n ),\n u\"доля\" : (\n {'im' : u'доля', 'rod' : u'доли', 'dat' : u'доле', 'vin' : u'долю', 'tvor' : u'долей', 'predl' : u'доле'}, # 1\n {'im' : u'доли', 'rod' : u'долей', 'dat' : u'долям', 'vin' : u'доли', 'tvor' : u'долями', 'predl' : u'долях'}, # 2 - 4\n {'im' : u'долей', 'rod' : u'долей', 'dat' : u'долям', 'vin' : u'долей', 'tvor' : u'долями', 'predl' : u'долях'}, # 5 - 20\n ),\n u\"час\" : (\n {'im' : u'час', 'rod' : u'часа', 'dat' : u'часу', 'vin' : u'час', 'tvor' : u'часом', 'predl' : u'часе'}, # 1\n {'im' : u'часа', 'rod' : u'часа', 'dat' : u'часам', 'vin' : u'часа', 'tvor' : u'часами', 'predl' : u'часах'}, # 2 - 4\n {'im' : u'часов', 'rod' : u'часов', 'dat' : u'часам', 'vin' : u'часов', 'tvor' : u'часами', 'predl' : u'часах'}, # 5 - 20\n ),\n u\"минута\" : (\n {'im' : u'минута', 'rod' : u'минуты', 'dat' : u'минуте', 'vin' : u'минуту', 'tvor' : u'минутой', 'predl' : u'минуте'}, # 1\n {'im' : u'минуты', 'rod' : u'минут', 'dat' : u'минутам', 'vin' : u'минуты', 'tvor' : u'минутами', 'predl' : u'минутах'}, # 2 - 4\n {'im' : u'минут', 'rod' : u'минут', 'dat' : u'минутам', 'vin' : u'минуты', 'tvor' : u'минутами', 'predl' : u'минутах'}, # 5 - 20\n ),\n}\n\ndef word_from_num(word, num, padezh = 'im'):\n from fw.documents.fields.simple_doc_fields import DocField, DocIntField\n if isinstance(word, DocField):\n word = unicode(word)\n if isinstance(num, DocIntField):\n num = num.value\n# if num == 1500121 and padezh == 'rod':\n# a = 1 + 2\n if word not in WORD_NUM_DATA:\n return word\n item = WORD_NUM_DATA[word]\n if not word:\n return item[2][padezh]\n if 5 <= num <= 19:\n return item[2][padezh]\n num_str = str(num)\n if not num_str:\n num_str = '0'\n if num > 100:\n if num_str[-2:] in ('11', '12', '13', '14'):\n return item[2][padezh]\n if num_str[-1] == '1':\n return item[0][padezh]\n if num_str[-1] in ('2', '3', '4'):\n return item[1][padezh]\n return item[2][padezh]\n\n\nclass LazyClassLoader(object):\n\n POSSIBLE_LOCATIONS = ['fw.documents.fields.simple_doc_fields',\n 'fw.documents.fields.complex_doc_fields',\n 'fw.documents.fields.general_doc_fields',\n 'fw.documents.db_fields',\n 'fw.documents.fields.doc_fields',\n 'fw.documents.enums',\n 'fw.documents.address_enums',\n 'fw.api.args_validators'\n #'adapters.llc_gov_forms_adapters',\n #'db.meeting_doc_fields',\n #'external_validators'\n ]\n\n def __init__(self, cls_name):\n self.cls_name = cls_name\n self.cls = None\n\n def __unicode__(self):\n return self.cls_name\n\n def load(self):\n if not self.cls:\n self.__load_class()\n return self.cls\n\n def __load_class(self):\n if self.cls:\n return\n\n try:\n self.cls = getattr(sys.modules[__name__], self.cls_name)\n return\n except Exception:\n if '.' not in self.cls_name:\n for item in LazyClassLoader.POSSIBLE_LOCATIONS:\n try:\n module = __import__(item, fromlist=[self.cls_name])\n self.cls = getattr(module, self.cls_name)\n return\n except Exception:\n pass\n else:\n try:\n module = __import__('.'.join(self.cls_name.split('.')[:-1]), fromlist=[self.cls_name.split('.')[-1]])\n self.cls = getattr(module, self.cls_name.split('.')[-1])\n return\n except Exception:\n for item in LazyClassLoader.POSSIBLE_LOCATIONS:\n try:\n module = __import__(item + '.' + '.'.join(self.cls_name.split('.')[:-1]), fromlist=[self.cls_name.split('.')[-1]])\n self.cls = getattr(module, self.cls_name.split('.')[-1])\n return\n except Exception:\n pass\n\n try:\n module = __import__(self.cls_name)\n self.cls = getattr(module, self.cls_name)\n except ImportError:\n pass\n\n raise ImportError(u\"Class name: %s. path: << %s >>. __name__: %s\" % (self.cls_name, unicode(sys.path), __name__))\n\n def __call__(self, *args, **kwargs):\n if not self.cls:\n self.__load_class()\n return self.cls(*args, **kwargs)\n\n def __getattr__(self, item):\n if item.startswith('_'):\n try:\n return self.__dict__[item]\n except KeyError:\n raise AttributeError(item)\n\n if not self.cls:\n self.__load_class()\n return getattr(self.cls, item)\n\ndef remove_task_id_run_file(config, task_id):\n file_name = os.path.join(os.path.dirname(config['celery_tasks_dir']), unicode(task_id))\n if os.path.exists(file_name):\n try:\n os.unlink(file_name)\n except Exception:\n pass\n\ndef as_dumpable(val):\n if isinstance(val, dict):\n tmp_dict = {}\n for k, v in val.items():\n if isinstance(v, ObjectId):\n tmp_dict[k] = unicode(v)\n elif type(v) in (tuple, dict, list):\n tmp_dict[k] = as_dumpable(v)\n else:\n tmp_dict[k] = v\n return tmp_dict\n elif type(val) in (list, tuple):\n tmp_list = []\n for i in val:\n if isinstance(i, ObjectId):\n tmp_list.append(unicode(i))\n elif type(i) in (tuple, dict, list):\n tmp_list.append(as_dumpable(i))\n else:\n tmp_list.append(i)\n return tmp_list\n elif isinstance(val, ObjectId):\n return unicode(val)\n return val\n\ndef day_name(day):\n if day == 1:\n return u\"понедельник\"\n if day == 2:\n return u\"вторник\"\n if day == 3:\n return u\"среда\"\n if day == 4:\n return u\"четверг\"\n if day == 5:\n return u\"пятница\"\n if day == 6:\n return u\"суббота\"\n if day == 7:\n return u\"воскресенье\"\n\ndef day_short_name(day):\n if day == 1:\n return u\"пн\"\n if day == 2:\n return u\"вт\"\n if day == 3:\n return u\"ср\"\n if day == 4:\n return u\"чт\"\n if day == 5:\n return u\"пт\"\n if day == 6:\n return u\"сб\"\n if day == 7:\n return u\"вс\"\n\n\ndef try_several_requests(url, tries, method, *args, **kwargs):\n method = method or requests.get\n last_error = None\n for x in xrange(tries):\n try:\n return method(url, *args, **kwargs)\n except (ConnectionError, Timeout), ex:\n last_error = ex\n raise last_error\n\ndef make_cache_key(some_string):\n m = hashlib.md5()\n if isinstance(some_string, unicode):\n m.update(some_string.encode('utf-8'))\n else:\n m.update(some_string)\n return m.hexdigest()\n\ndef int_to_ifns(val):\n val = unicode(val)[:4]\n return val if len(val) == 4 else \"0\" * (4 - len(val)) + val\n\nDDS = {\n 0: u\"нулевое\",\n 1: u\"первое\",\n 2: u\"второе\",\n 3: u\"третье\",\n 4: u\"четвертое\",\n 5: u\"пятое\",\n 6: u\"шестое\",\n 7: u\"седьмое\",\n 8: u\"восьмое\",\n 9: u\"девятое\",\n 10: u\"десятое\",\n 11: u\"одиннадцатое\",\n 12: u\"двенадцатое\",\n 13: u\"тринадцатое\",\n 14: u\"четырнадцатое\",\n 15: u\"пятнадцатое\",\n 16: u\"шестнадцатое\",\n 17: u\"семнадцатое\",\n 18: u\"восемнадцатое\",\n 19: u\"девятнадцатое\",\n 20: u\"двадцатое\",\n 21: u\"двадцать первое\",\n 22: u\"двадцать второе\",\n 23: u\"двадцать третье\",\n 24: u\"двадцать четвертое\",\n 25: u\"двадцать пятое\",\n 26: u\"двадцать шестое\",\n 27: u\"двадцать седьмое\",\n 28: u\"двадцать восьмое\",\n 29: u\"двадцать девятое\",\n 30: u\"тридцатое\",\n 31: u\"тридцать первое\",\n 32: u\"тридцать второе\"\n}\n\ndef day_for_date_str(day):\n return DDS.get(day, u\"\")\n\nYDS = {\n 2010: u\"две тысячи десятого\",\n 2011: u\"две тысячи одиннадцатого\",\n 2012: u\"две тысячи двенадцатого\",\n 2013: u\"две тысячи тринадцатого\",\n 2014: u\"две тысячи четырнадцатого\",\n 2015: u\"две тысячи пятнадцатого\",\n 2016: u\"две тысячи шестнадцатого\",\n 2017: u\"две тысячи семнадцатого\",\n 2018: u\"две тысячи восемнадцатого\",\n 2019: u\"две тысячи девятнадцатого\",\n 2020: u\"две тысячи двадцатого\"\n}\n\ndef year_for_date_str(year):\n assert year >= 2010\n return YDS.get(year, u\"две тысячи пятнадцатого\")\n" }, { "alpha_fraction": 0.6823529601097107, "alphanum_fraction": 0.6834224462509155, "avg_line_length": 43.52381134033203, "blob_id": "dc38e25756a65c783eafb474ad8b4536a516f785", "content_id": "908bb173892077a731f2fe1655ec9577195921a5", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 935, "license_type": "no_license", "max_line_length": 99, "num_lines": 21, "path": "/app/services/yurist/async_tasks/yurist_check_send.py", "repo_name": "StanislavKraev/jb_code", "src_encoding": "UTF-8", "text": "# -*- coding: utf-8 -*-\nfrom celery import current_app as celery\nfrom fw.async_tasks import send_email\nfrom fw.db.sql_base import db as sqldb\nfrom services.yurist.data_model.enums import YuristBatchCheckStatus\nfrom services.yurist.data_model.models import YuristBatchCheckObject\n\ncelery.config_from_envvar('CELERY_CONFIG_MODULE')\n\n\[email protected]()\ndef check_and_send(*args, **kwargs):\n if 'batch_check_id' and kwargs['batch_check_id']:\n with celery.conf['flask_app']().app_context():\n check_obj = YuristBatchCheckObject.query.filter_by(id=kwargs['batch_check_id']).first()\n if not check_obj or check_obj.status != YuristBatchCheckStatus.YBS_WAIT:\n return False\n check_obj.status = YuristBatchCheckStatus.YBS_IN_PROGRESS\n sqldb.session.commit()\n for rec in kwargs['rec_list']:\n send_email.send_email.delay(rec, kwargs['mail_type'], **kwargs)\n" }, { "alpha_fraction": 0.6144056916236877, "alphanum_fraction": 0.6179689764976501, "avg_line_length": 34.39639663696289, "blob_id": "e659932bca3dead3de090f20422ee2af2a1d59ad", "content_id": "a320f29c2b8488e8a830b068a24b35f16bdb5c1d", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 3929, "license_type": "no_license", "max_line_length": 126, "num_lines": 111, "path": "/app/fw/storage/file_storage.py", "repo_name": "StanislavKraev/jb_code", "src_encoding": "UTF-8", "text": "# -*- coding: utf-8 -*-\nfrom datetime import datetime\nimport shutil\nimport os\nfrom custom_exceptions import FileNotFound\nimport urllib\nfrom fw.documents.pdf_tools import get_pdf_doc_paget_count\nfrom fw.storage.models import FileObject\nfrom fw.db.sql_base import db as sqldb\n\n\nclass FileStorage(object):\n @staticmethod\n def add_file_from_disk(file_path, config, owner_id=None, file_name=None):\n assert file_name\n assert file_path\n\n if not os.path.exists(file_path):\n return\n\n new_file_path = datetime.utcnow().strftime('%Y-%m-%d')\n new_file_obj = FileObject(file_path=new_file_path, file_name=file_name)\n\n if owner_id:\n new_file_obj._owner_id = owner_id\n\n sqldb.session.add(new_file_obj)\n sqldb.session.commit()\n\n obj_id = new_file_obj.id\n\n new_file_location = os.path.join(\n config['DOCUMENT_STORAGE'],\n new_file_path,\n unicode(obj_id) + (os.path.splitext(file_path)[1] if (len(os.path.splitext(file_path)) == 2) else u\"\"))\n\n try:\n os.makedirs(os.path.dirname(new_file_location), mode=0755)\n except Exception:\n pass\n shutil.copyfile(file_path, new_file_location)\n return new_file_obj\n\n @staticmethod\n def get_url(file_obj, config, absolute=False):\n if not file_obj:\n return\n prefix = u\"\" if not absolute else config['WEB_SCHEMA'] + '://' + config['DOMAIN']\n return prefix + config['STORAGE_URL'] + urllib.quote((unicode(file_obj.id) + '/' + file_obj.file_name).encode('utf8'))\n\n @staticmethod\n def get_path(file_obj, config):\n #file_ext = file_obj.file_ext if 'file_ext' in file_obj.as_dict() else os.path.splitext(file_obj.file_name)[1]\n\n file_ext = os.path.splitext(file_obj.file_name)[1]\n\n if file_obj._owner:\n return config['PRIVATE_STORAGE'] + file_obj.file_path + u'/' + unicode(file_obj.id) + file_ext\n\n return config['DOCUMENT_STORAGE'] + file_obj.file_path + u'/' + unicode(file_obj.id) + file_ext\n\n @staticmethod\n def remove_file(file_id, config):\n file_obj = FileObject.query.filter_by(id=file_id).first()\n if not file_obj:\n raise FileNotFound()\n\n try:\n os.unlink(FileStorage.get_path(file_obj, config))\n except Exception:\n raise FileNotFound()\n FileObject.query.filter_by(id=file_id).delete(synchronize_session='fetch')\n sqldb.session.commit()\n\n @staticmethod\n def get_file(file_id):\n return FileObject.query.filter_by(id=file_id).first()\n\n @staticmethod\n def get_shared_link(original_file_id, config, absolute=False):\n file_obj = FileObject.query.filter_by(id=original_file_id).first()\n if not file_obj:\n raise FileNotFound()\n if not file_obj._owner:\n return FileStorage.get_url(file_obj, config, absolute=absolute)\n\n shared_file_obj = FileObject.query.filter_by(_original_file=original_file_id, _owner=None).first()\n if shared_file_obj:\n return FileStorage.get_url(shared_file_obj, config, absolute=absolute)\n\n shared_file_obj = FileObject(\n file_name=file_obj.file_name,\n file_path=file_obj.file_path,\n _original_file=original_file_id\n )\n sqldb.session.add(shared_file_obj)\n sqldb.session.commit()\n return FileStorage.get_url(shared_file_obj, config, absolute=absolute)\n\n @staticmethod\n def get_pdf_file_page_count(file_id, config):\n file_obj = FileObject.query.filter_by(id=file_id).first()\n if not file_obj:\n return 0\n path = FileStorage.get_path(file_obj, config)\n ext = os.path.splitext(path)[1]\n if not ext or ext.lower() != '.pdf':\n return 0\n if not os.path.exists(path):\n return 0\n return get_pdf_doc_paget_count(path)\n" }, { "alpha_fraction": 0.6092455387115479, "alphanum_fraction": 0.6188399195671082, "avg_line_length": 39.58407211303711, "blob_id": "1b0801bbd442cc73df0a07d054fc4f66abcb4cfe", "content_id": "429a5fd22ff47c68acfb64ae4f77f55aed26dda6", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 4652, "license_type": "no_license", "max_line_length": 213, "num_lines": 113, "path": "/app/fw/api/views/files.py", "repo_name": "StanislavKraev/jb_code", "src_encoding": "UTF-8", "text": "# -*- coding: utf-8 -*-\n\nimport os\nimport tempfile\nfrom bson import ObjectId\nfrom bson.errors import InvalidId\n\nfrom flask import json, request, current_app, Response, abort, Blueprint\nfrom flask_login import login_required, current_user\n\nfrom fw.api.views import not_authorized\nfrom fw.storage.file_storage import FileStorage\nfrom fw.storage.models import FileObject\n\nfiles_bp = Blueprint('files', __name__)\n\n\n@files_bp.route('/storage/put/', methods=['POST'])\n@login_required\ndef upload_file():\n file_obj = request.files['file']\n if file_obj and file_obj.filename and len(os.path.splitext(file_obj.filename)) > 1:\n t_file_out = tempfile.NamedTemporaryFile(mode=\"w+\", delete=True, suffix=os.path.splitext(file_obj.filename)[-1])\n full_name = t_file_out.name\n t_file_out.close()\n file_obj.save(full_name)\n file_obj = FileStorage.add_file_from_disk(full_name, current_app.config, current_user.id, file_name=file_obj.filename)\n\n result = {\n \"id\": unicode(file_obj.id),\n \"size\": os.path.getsize(full_name),\n \"file_name\": file_obj.file_name,\n \"url\": FileStorage.get_url(file_obj, current_app.config)\n }\n os.unlink(full_name)\n resp = Response(json.dumps({\"result\": result}), content_type=\"application/json\")\n resp.headers.add('Access-Control-Allow-Credentials', \"true\")\n resp.headers.add('Access-Control-Allow-Origin', \"http://%s\" % current_app.config['site_domain'])\n return resp\n\n abort(400)\n\n\n@files_bp.route('/storage/<path:file_path>/', methods=['GET'], strict_slashes=False)\ndef get_private_file(file_path):\n if not current_user.is_authenticated:\n return not_authorized(current_app.config['site_domain'])\n\n current_app.logger.info(u'file: %s' % file_path)\n content_type = 'application/octet-stream' if ('download/' in file_path) else ''\n file_id = None\n try:\n for part in file_path.split('/'):\n try:\n ObjectId(part)\n file_id = part\n break\n except InvalidId:\n pass\n if not file_id:\n raise ValueError()\n except ValueError:\n current_app.logger.exception(u\"Invalid file id\")\n abort(400)\n return\n\n file_obj = FileObject.query.filter_by(id=file_id).first()\n if not file_obj:\n current_app.logger.exception(u\"No such file in db with id %s\" % unicode(file_id))\n abort(404)\n return\n\n if file_obj._owner and current_user != file_obj._owner:\n current_app.logger.exception(u\"File is not yours with id %s\" % unicode(file_id))\n abort(403)\n return\n\n if file_obj._original_file:\n file_id = file_obj._original_file\n file_obj = FileObject.query.filter_by(id=file_obj._original_file).first()\n if not file_obj:\n current_app.logger.exception(u\"No such file in db with id %s\" % file_id)\n abort(404)\n return\n\n# current_app.logger.info(u\" file 4\")\n file_full_path = FileStorage.get_path(file_obj, current_app.config)\n if not os.path.exists(file_full_path):\n current_app.logger.exception(u\"File with id %s not found at %s\" % (unicode(file_id), file_full_path))\n abort(404)\n return\n# current_app.logger.info(u\" file 5\")\n resp = Response(u\"\", headers={'X-Accel-Redirect': file_full_path}, content_type=content_type)\n if 'download/' in file_path:\n try:\n from email.header import decode_header, Header\n parts = file_path.split('/')\n fname = u\"\"\n if len(parts) > 1:\n fname = parts[-1]\n if not fname and len(parts) > 2:\n fname = parts[-2]\n fname = filter(lambda x: x in u\"abcdefghijklmnopqrstuvwxyzABCDEFGHIJKLMNOPQRSTUVQXYZ0123456789-=`!@#$%^&*()_+\\\\|[]{}абвгдеёжзийклмнопрстуфхцчшщьыъэюяАБВГДЕЁЖЗИЙКЛМНОПРСТУФХЦЧШЩЬЫЪЭЮЯ.,;':\\\"/? \", fname)\n current_app.logger.info(u\" fname:%s\" % fname)\n if fname:\n header_val = str(Header(fname, 'utf-8', maxlinelen=10000)).replace('?=\\n =?utf-8?b?', '')\n current_app.logger.info(u\" header_val:%s\" % header_val)\n resp.headers.add(\"Content-Disposition\", u\"attachment; filename=%s\" % header_val)\n except Exception, ex:\n current_app.logger.exception(u\"Failed to add header\")\n resp.headers.add('Access-Control-Allow-Credentials', \"true\")\n resp.headers.add('Access-Control-Allow-Origin', \"http://%s\" % current_app.config['site_domain'])\n return resp\n" }, { "alpha_fraction": 0.6240601539611816, "alphanum_fraction": 0.6253132820129395, "avg_line_length": 26.517240524291992, "blob_id": "5dc128029276fe1e41dc18488ed74b400444fb3b", "content_id": "8d8c16e935dcc9ce30144c82f03bcc66a9a90101", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 917, "license_type": "no_license", "max_line_length": 60, "num_lines": 29, "path": "/app/services/yurist/data_model/enums.py", "repo_name": "StanislavKraev/jb_code", "src_encoding": "UTF-8", "text": "# -*- coding: utf-8 -*-\n\n\nclass YuristBatchCheckStatus(object):\n YBS_NEW = \"new\"\n YBS_IN_PROGRESS = \"progress\"\n YBS_WAIT = \"wait\"\n YBS_SUCCESS = \"success\"\n YBS_FAILED = \"failed\"\n YBS_REFUSED = \"refused\"\n\n _NAMES = {\n YBS_NEW: u\"проверки не было\",\n YBS_IN_PROGRESS: u\"в процессе\",\n YBS_SUCCESS: u\"проверка закончена успешно\",\n YBS_FAILED: u\"проверка закончена с ошибками\",\n YBS_REFUSED: u\"отказ от проверки\",\n YBS_WAIT: u\"ожидает перехода в статус 'в процессе'\"\n }\n\n FINAL_STATUSES = [YBS_REFUSED, YBS_SUCCESS, YBS_FAILED]\n\n @classmethod\n def validate(cls, value):\n return value in YuristBatchCheckStatus._NAMES\n\n @staticmethod\n def get_name(value):\n return YuristBatchCheckStatus._NAMES.get(value, u\"\")\n" }, { "alpha_fraction": 0.6949999928474426, "alphanum_fraction": 0.6966666579246521, "avg_line_length": 25.04347801208496, "blob_id": "c561e0cd4bf96ef8cccb27a5dccc5de204bc06aa", "content_id": "f1b9de0041b38afc81511ad7b1ce286d3610c45c", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 600, "license_type": "no_license", "max_line_length": 62, "num_lines": 23, "path": "/app/deployment_migrations/migration_list/20150710_add_table_group_render_check.py", "repo_name": "StanislavKraev/jb_code", "src_encoding": "UTF-8", "text": "# -*- coding: utf-8 -*-\nfrom fw.db.sql_base import db as sqldb\n\n\ndef forward(config, logger):\n logger.debug(u\"Add table group_render_check\")\n\n sqldb.session.close()\n sqldb.engine.execute(u\"\"\"CREATE TABLE group_render_check (\n\t\tid SERIAL NOT NULL,\n\t\tbatch_id VARCHAR,\n\t\tevent_data JSONB,\n\t\tdoc_id_list VARCHAR[],\n\t\tcreated TIMESTAMP WITHOUT TIME ZONE NOT NULL,\n\t\tcheck_completed BOOLEAN NOT NULL,\n\t\tPRIMARY KEY (id),\n\t\tFOREIGN KEY(batch_id) REFERENCES doc_batch (id)\n\t)\"\"\")\n\n\ndef rollback(config, logger):\n sqldb.session.close()\n sqldb.engine.execute(\"DROP table group_render_check;\")\n\n" }, { "alpha_fraction": 0.6204985976219177, "alphanum_fraction": 0.6214219927787781, "avg_line_length": 24.162790298461914, "blob_id": "3e7c550b52bdf3b509af4ec4ef5eb447edc2ab6c", "content_id": "5f5d2ce20aae0d27c79fbabb40a21c0b6332070b", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 1157, "license_type": "no_license", "max_line_length": 77, "num_lines": 43, "path": "/app/services/ip_reg/documents/enums.py", "repo_name": "StanislavKraev/jb_code", "src_encoding": "UTF-8", "text": "# -*- coding: utf-8 -*-\n\n\nclass IPRegistrationWayEnum(object):\n IP_RW_IN_PERSON = \"in_person\"\n IP_RW_RESPONSIBLE_PERSON = \"responsible_person\"\n IP_RW_NOTARY = \"notary\"\n IP_RW_MAIL = \"mail\"\n\n _NAMES = {\n IP_RW_IN_PERSON: u\"лично\",\n IP_RW_RESPONSIBLE_PERSON: u\"ответсвенное лицо\",\n IP_RW_MAIL: u\"по почте\",\n IP_RW_NOTARY: u\"нотариус\"\n }\n\n @classmethod\n def validate(cls, value):\n return value in cls._NAMES\n\n @staticmethod\n def get_name(value):\n return IPRegistrationWayEnum._NAMES.get(value, u\"\")\n\n\nclass IPDocumentDeliveryTypeStrEnum(object):\n IP_DDT_IN_PERSON = \"in_person\"\n IP_DDT_RESPONSIBLE_PERSON = \"responsible_person\"\n IP_DDT_MAIL = \"mail\"\n\n _NAMES = {\n IP_DDT_IN_PERSON: u\"лично\",\n IP_DDT_RESPONSIBLE_PERSON: u\"ответсвенное лицо\",\n IP_DDT_MAIL: u\"по почте\",\n }\n\n @classmethod\n def validate(cls, value):\n return value in cls._NAMES\n\n @staticmethod\n def get_name(value):\n return IPDocumentDeliveryTypeStrEnum._NAMES.get(value, u\"неизвестно\")\n\n" }, { "alpha_fraction": 0.5646852850914001, "alphanum_fraction": 0.5681818127632141, "avg_line_length": 21.038461685180664, "blob_id": "239c956dc6068978c493e229bed97200d4e5a470", "content_id": "78642cb909dfea1ef62213728d42a2140856f8de", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 572, "license_type": "no_license", "max_line_length": 48, "num_lines": 26, "path": "/app/deployment_migrations/data.py", "repo_name": "StanislavKraev/jb_code", "src_encoding": "UTF-8", "text": "# -*- coding: utf-8 -*-\n\nclass Migration(object):\n def __init__(self, name, forward, rollback):\n self.name = name\n self._forward = forward\n self._rollback = rollback\n\n def run(self, config, logger):\n self._forward(config, logger)\n\n def rollback(self, config, logger):\n self._rollback(config, logger)\n\n @property\n def version(self):\n return self.name.split('_')[0]\n\n def __unicode__(self):\n return self.name\n\n def __str__(self):\n return self.name\n\n def __repr__(self):\n return self.name" }, { "alpha_fraction": 0.6957026720046997, "alphanum_fraction": 0.696864128112793, "avg_line_length": 28.689655303955078, "blob_id": "a902398753b9aa980769689e79ede439e5ad9f32", "content_id": "b6b1498879fbaa836be4f7975b179591991a7d0f", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 861, "license_type": "no_license", "max_line_length": 61, "num_lines": 29, "path": "/app/deployment_migrations/migration_list/20150720_add_table_for_post_tracking.py", "repo_name": "StanislavKraev/jb_code", "src_encoding": "UTF-8", "text": "# -*- coding: utf-8 -*-\nfrom fw.db.sql_base import db as sqldb\n\n\ndef forward(config, logger):\n logger.debug(u\"Add table for post tracking\")\n\n sqldb.session.close()\n sqldb.engine.execute(u\"\"\"CREATE TABLE rus_post_tracking (\n id SERIAL NOT NULL,\n tracking VARCHAR NOT NULL,\n creation_date TIMESTAMP WITHOUT TIME ZONE NOT NULL,\n batch_id VARCHAR,\n owner_id INTEGER NOT NULL,\n status VARCHAR NOT NULL,\n status_caption VARCHAR NOT NULL,\n status_change_dt TIMESTAMP WITHOUT TIME ZONE,\n last_check_dt TIMESTAMP WITHOUT TIME ZONE,\n last_location VARCHAR,\n tracking_type VARCHAR,\n PRIMARY KEY (id),\n FOREIGN KEY(batch_id) REFERENCES doc_batch (id),\n FOREIGN KEY(owner_id) REFERENCES authuser (id)\n)\"\"\")\n\n\ndef rollback(config, logger):\n sqldb.session.close()\n sqldb.engine.execute(\"DROP table rus_post_tracking;\")\n" }, { "alpha_fraction": 0.5899796485900879, "alphanum_fraction": 0.5969651937484741, "avg_line_length": 37.94774627685547, "blob_id": "24ab8fdc45dba4e1e4f4783a4d5d16bca925d400", "content_id": "cdb9d478564442094cf94ad0e823eb3c1934925a", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 21643, "license_type": "no_license", "max_line_length": 336, "num_lines": 555, "path": "/app/deployment_migrations/migration_list/20150604_migrate_documents.py", "repo_name": "StanislavKraev/jb_code", "src_encoding": "UTF-8", "text": "# -*- coding: utf-8 -*-\nfrom datetime import datetime\nimport logging\nfrom tempfile import TemporaryFile, NamedTemporaryFile\nfrom bson import ObjectId\nimport requests\nfrom fw.auth.models import AuthUser\nfrom fw.db.sql_base import db as sqldb\nfrom fw.documents.db_fields import DocumentBatchDbObject, BatchDocumentDbObject, CompanyDbObject, PrivatePersonDbObject\nfrom fw.documents.enums import PersonTypeEnum, IncorporationFormEnum, CompanyTypeEnum\nfrom fw.storage.models import FileObject\n\n\ndef replace_all_objectid_with_str(obj):\n if isinstance(obj, dict):\n val = {}\n for k, v in obj.items():\n val[replace_all_objectid_with_str(k)] = replace_all_objectid_with_str(v)\n return val\n elif isinstance(obj, list):\n return [replace_all_objectid_with_str(i) for i in obj]\n else:\n if isinstance(obj, ObjectId):\n return str(obj)\n return obj\n\n\ndef migrate_document(new_batch, old_doc, rendered_docs):\n data = replace_all_objectid_with_str(old_doc.get('data', {}))\n\n doc_id = str(old_doc['id'])\n found_docs = filter(lambda d: 'document_id' in d and str(d['document_id']) == doc_id, rendered_docs)\n caption = \"\"\n file_obj = None\n if found_docs:\n file_id = found_docs[0]['file_id']\n file_obj = FileObject.query.filter_by(id=str(file_id)).scalar()\n caption = found_docs[0].get('caption', \"\")\n\n # rendered docs:\n # {\n # \"caption\" : \"Квитанция на уплату госпошлины\",\n # \"document_type\" : \"reg_fee_invoice\",\n # \"file_link\" : \"/api/storage/54e1d5b5e64bcf5977867c80/%D0%9A%D0%B2%D0%B8%D1%82%D0%B0%D0%BD%D1%86%D0%B8%D1%8F%20%D0%BD%D0%B0%20%D0%BE%D0%BF%D0%BB%D0%B0%D1%82%D1%83%20%D1%80%D0%B5%D0%B3%D0%B8%D1%81%D1%82%D1%80%D0%B0%D1%86%D0%B8%D0%BE%D0%BD%D0%BD%D0%BE%D0%B9%20%D0%BF%D0%BE%D1%88%D0%BB%D0%B8%D0%BD%D1%8B%20%D0%9E%D0%9E%D0%9E.pdf\",\n # \"file_id\" : ObjectId(\"54e1d5b5e64bcf5977867c80\"),\n # \"document_id\" : ObjectId(\"54e1d5afe64bcf599525a6f2\")\n # }\n\n new_doc = BatchDocumentDbObject(\n id=doc_id,\n _owner=new_batch._owner,\n creation_date=old_doc.get('creation_date', datetime.utcnow()),\n document_type=old_doc['document_type'],\n file=file_obj,\n batch=new_batch,\n data=data,\n status=old_doc['status'],\n caption=caption,\n _celery_task_id=None,\n _celery_task_started=None\n )\n sqldb.session.add(new_doc)\n sqldb.session.commit()\n\nbroken_batches = set()\nmissing_users = set()\nbroken_persons = set()\nbroken_companies = set()\nincomplete_companies = set()\nincomplete_persons = set()\nfailed_paid_bathes_of_real_users = set()\n\ndetailed_logger = logging.getLogger(__name__)\ndetailed_logger.setLevel(logging.DEBUG)\n\nt = NamedTemporaryFile(prefix=\"migration_details_%s\" % datetime.now().strftime(\"%Y-%m-%dT%H:%M\"), suffix='.log', delete=False)\n\nfh = logging.FileHandler(t.name)\nfh.setLevel(logging.DEBUG)\n\nformatter = logging.Formatter('%(asctime)s - %(name)s - %(levelname)s - %(message)s')\nfh.setFormatter(formatter)\n\ndetailed_logger.addHandler(fh)\n\ndef migrate_batch(batch):\n detailed_logger.info(u\"Migrating batch %s\" % str(batch['_id']))\n required_fields = ('_owner', 'data')\n for rf in required_fields:\n if rf not in batch:\n detailed_logger.error(u\"Invalid batch %s: missing required field %s\" % (str(batch['_id']), rf))\n broken_batches.add(str(batch['_id']))\n return\n data = replace_all_objectid_with_str(batch['data'] or {})\n _owner = batch['_owner']\n try:\n _owner = int(_owner)\n except Exception:\n detailed_logger.exception(u\"Invalid _owner type\")\n detailed_logger.error(u\"Invalid batch %s: ObjectId in _owner field instead of int\" % str(batch['_id']))\n broken_batches.add(str(batch['_id']))\n return\n if not isinstance(_owner, int):\n detailed_logger.error(u\"Invalid batch %s: ObjectId in _owner field instead of int\" % str(batch['_id']))\n broken_batches.add(str(batch['_id']))\n return\n\n owner = AuthUser.query.filter_by(id=_owner).first()\n if not owner:\n missing_users.add(owner)\n detailed_logger.error(u\"Failed to find user with id %s\" % _owner)\n broken_batches.add(str(batch['_id']))\n detailed_logger.error(u\"Invalid batch %s: Failed to find owner user\")\n return\n\n new_batch = DocumentBatchDbObject(\n id=str(batch['_id']),\n _owner_id=owner.id,\n batch_type=batch['batch_type'],\n creation_date=batch.get('creation_date', datetime.utcnow()),\n finalisation_date=batch.get('finalisation_date', None),\n status=batch['status'],\n deleted=False,\n data=data,\n result_fields=replace_all_objectid_with_str(batch.get('result_fields', None)),\n\n error_info=batch.get('error_info', None),\n current_task_id=None,\n batch_rendering_start=None,\n _metadata=batch.get('metadata', None),\n\n pay_info=batch.get('pay_info', None),\n paid=batch.get('paid', False),\n last_change_dt=batch.get('last_change_dt', None),\n _broken=False\n )\n sqldb.session.add(new_batch)\n sqldb.session.commit()\n\n old_documents = batch.get('_documents', [])\n for old_doc in old_documents:\n migrate_document(new_batch, old_doc, batch.get('rendered_docs', []))\n\ndef fix_name(name):\n if not name:\n return None\n if isinstance(name, dict):\n return name.get(\"nom\", \"\")\n return str(name)\n\ndef migrate_person(person):\n detailed_logger.info(u\"Migrating person %s\" % str(person['_id']))\n required_fields = ('name', 'surname')\n for rf in required_fields:\n if rf not in person:\n detailed_logger.error(u\"Invalid person %s: missing required field %s\" % (str(person['_id']), rf))\n broken_persons.add(str(person['_id']))\n return\n\n batch_id = person.get('_batch', None)\n if batch_id:\n batch_id = str(batch_id)\n\n batch = DocumentBatchDbObject.query.filter_by(id=batch_id).scalar()\n if not batch:\n detailed_logger.error(u\"Incomplete person %s: failed to find batch with id %s. Resetting to null\" % (str(person['_id']), batch_id))\n incomplete_persons.add(str(person['_id']))\n batch_id = None\n\n copy_id = person.get('_copy', None)\n if copy_id:\n copy_id = str(copy_id)\n\n _owner = person['_owner']\n try:\n _owner = int(_owner)\n except Exception:\n detailed_logger.exception(u\"Invalid _owner type\")\n detailed_logger.error(u\"Invalid person %s: Owner id is not int\" % str(person['_id']))\n broken_persons.add(str(person['_id']))\n return\n\n if not isinstance(_owner, int):\n detailed_logger.error(u\"Invalid person %s: Owner id is not int\" % str(person['_id']))\n broken_persons.add(str(person['_id']))\n return\n\n owner = AuthUser.query.filter_by(id=_owner).first()\n if not owner:\n detailed_logger.error(u\"Invalid person %s: failed to find person owner with id %s\" % (str(person['_id']), _owner))\n missing_users.add(str(person['_owner']))\n broken_persons.add(str(person['_id']))\n return\n\n new_person = PrivatePersonDbObject(\n id=str(person['_id']),\n _owner_id=owner.id,\n _batch_id=batch_id,\n deleted=False,\n caption=None,\n name=fix_name(person['name']),\n surname=fix_name(person['surname']),\n patronymic=fix_name(person.get('patronymic', None)),\n birthdate=person.get('birthdate', None),\n sex=person.get('sex', \"male\"),\n birthplace=person.get('birthplace', None),\n inn=person.get('inn', None),\n phone=person.get('phone', None),\n passport=person.get('passport', None),\n address=person.get('address', None),\n living_address=person.get('living_address', None),\n living_country_code=person.get('living_country_code', None),\n ogrnip=person.get('ogrnip', None),\n email=person.get('email', None),\n spouse=None,\n _copy_id=copy_id,\n person_type=PersonTypeEnum.PT_RUSSIAN\n )\n sqldb.session.add(new_person)\n sqldb.session.commit()\n\n\ndef migrate_company(company):\n detailed_logger.info(u\"Migrating company %s\" % str(company['_id']))\n batch_id = company.get('_batch', None)\n if batch_id:\n batch_id = str(batch_id)\n\n batch = DocumentBatchDbObject.query.filter_by(id=batch_id).scalar()\n if not batch:\n detailed_logger.error(u\"Incomplete company %s: failed to find batch with id %s. Resetting to null\" % (str(company['_id']), batch_id))\n incomplete_companies.add(str(company['_id']))\n batch_id = None\n\n copy_id = company.get('_copy', None)\n if copy_id:\n copy_id = str(copy_id)\n\n general_manager_id = company.get('general_manager', {}).get('_id', None)\n if general_manager_id:\n general_manager_id = str(general_manager_id)\n\n gen_man = PrivatePersonDbObject.query.filter_by(id=general_manager_id).scalar()\n if not gen_man:\n detailed_logger.error(u\"Incomplete company %s: failed to find general manager with id %s. \"\n u\"Resetting to null\" % (str(company['_id']), general_manager_id))\n incomplete_companies.add(str(company['_id']))\n general_manager_id = None\n\n _owner = company['_owner']\n try:\n _owner = int(_owner)\n except Exception:\n detailed_logger.exception(u\"Invalid _owner type\")\n detailed_logger.error(u\"Invalid company %s: Owner id is not int\" % str(company['_id']))\n broken_companies.add(str(company['_id']))\n return\n\n if not isinstance(_owner, int):\n detailed_logger.error(u\"Invalid company %s: Owner id is not int\" % str(company['_id']))\n broken_companies.add(str(company['_id']))\n return\n\n owner = AuthUser.query.filter_by(id=_owner).first()\n if not owner:\n missing_users.add(str(_owner))\n broken_companies.add(str(company['_id']))\n detailed_logger.error(u\"Invalid company %s: failed to find company owner with id %s\" % (str(company['_id']), _owner))\n return\n\n new_company = CompanyDbObject(\n id=str(company['_id']),\n _owner_id=company['_owner'],\n _copy_id=copy_id,\n _batch_id=batch_id,\n deleted=False,\n inn=company.get('inn', None),\n ogrn=company.get('ogrn', None),\n kpp=company.get('kpp', None),\n registration_date=company.get('registration_date', None),\n registration_depart=company.get('registration_depart', None),\n registration_number=company.get('registration_number', None),\n full_name=company.get('full_name', None),\n short_name=company.get('short_name', None),\n incorporation_form=IncorporationFormEnum.IF_LLC,\n country_code=company.get('country_code', None),\n address=company.get('address', None),\n generic_address=company.get('generic_address', None),\n phone=company.get('phone', None),\n general_manager=general_manager_id,\n general_manager_caption=company.get('general_manager_caption', None),\n base_general_manager_document=company.get('base_general_manager_document', None),\n company_type=CompanyTypeEnum.CT_RUSSIAN\n )\n sqldb.session.add(new_company)\n sqldb.session.commit()\n\ndef find_id_in_data(data, obj_id):\n if isinstance(data, dict):\n if len(data) == 2 and '_id' in data and 'type' in data:\n if data['_id'] == obj_id:\n print(\"Found dropped object's id %s\" % obj_id)\n return True\n else:\n for k, v in data.items():\n if find_id_in_data(k, obj_id):\n return True\n if find_id_in_data(v, obj_id):\n return True\n elif isinstance(data, list):\n for i in data:\n if find_id_in_data(i, obj_id):\n return True\n else:\n if obj_id == data:\n print(\"Found dropped object's id %s\" % obj_id)\n return True\n\n\ndef pull_id_or_ref_from_data(data, obj_id):\n if isinstance(data, dict):\n if len(data) == 2 and '_id' in data and 'type' in data:\n if data['_id'] == obj_id:\n return\n return data\n else:\n res = {}\n for k, v in data.items():\n res_v = pull_id_or_ref_from_data(v, obj_id)\n if res_v:\n res[k] = res_v\n return res\n elif isinstance(data, list):\n res = []\n for i in data:\n res_i = pull_id_or_ref_from_data(i, obj_id)\n if res_i:\n res.append(res_i)\n return res\n else:\n if obj_id == data:\n return\n return data\n\ndef forward(config, logger):\n logger.debug(u\"migrate documents, companies, persons\")\n\n logger.info(\"Drop company, person, document, batch tables\")\n try:\n CompanyDbObject.__table__.drop(sqldb.engine)\n except Exception:\n pass\n\n try:\n PrivatePersonDbObject.__table__.drop(sqldb.engine)\n except Exception:\n pass\n\n try:\n BatchDocumentDbObject.__table__.drop(sqldb.engine)\n except Exception:\n pass\n\n try:\n DocumentBatchDbObject.__table__.drop(sqldb.engine)\n except Exception:\n pass\n\n DocumentBatchDbObject.__table__.create(sqldb.engine)\n BatchDocumentDbObject.__table__.create(sqldb.engine)\n PrivatePersonDbObject.__table__.create(sqldb.engine)\n CompanyDbObject.__table__.create(sqldb.engine)\n\n batch_col = db['doc_batch']\n company_object_col = db['company_object']\n private_person_col = db['private_person']\n\n batch_query = {'deleted': {'$ne': True}, '_broken': {'$ne': True}, 'batch_type': {'$ne': None}}\n\n logger.info(u\"Calculating metrics\")\n\n batch_status_count_map = {}\n batch_type_count_map = {}\n test_users_list = []\n for user in AuthUser.query.filter_by(is_tester=True):\n test_users_list.append(user.id)\n\n paid_batch_count = batch_col.find({'deleted': {'$ne': True}, '_broken': {'$ne': True}, 'paid':True}).count()\n real_user_paid_batch_count = batch_col.find({'deleted': {'$ne': True},\n '_broken': {'$ne': True},\n 'paid': True,\n '_owner': {'$nin': test_users_list}}).count()\n documents_count = 0\n\n total_batches = batch_col.find(batch_query).count()\n for batch in batch_col.find(batch_query):\n batch_status = batch['status']\n if batch_status not in batch_status_count_map:\n batch_status_count_map[batch_status] = 1\n else:\n batch_status_count_map[batch_status] += 1\n\n batch_type = batch['batch_type']\n if batch_type not in batch_type_count_map:\n batch_type_count_map[batch_type] = 1\n else:\n batch_type_count_map[batch_type] += 1\n\n if 'rendered_docs' in batch:\n documents_count += len(batch['rendered_docs'])\n\n persons_count = private_person_col.find({'deleted': {'$ne': True}}).count()\n companies_count = company_object_col.find({'deleted': {'$ne': True}}).count()\n\n logger.info(\"Batches\")\n for batch in batch_col.find(batch_query):\n migrate_batch(batch)\n\n logger.info(\"Persons\")\n for person in private_person_col.find({'deleted': {'$ne': True}, '_copy': None}):\n migrate_person(person)\n\n # logger.info(\"Person copies\")\n # for person in private_person_col.find({'deleted': {'$ne': True}, '_copy': {'$ne': None}}):\n # migrate_person(person)\n\n logger.info(\"Companies\")\n for company in company_object_col.find({'deleted': {'$ne': True}, '_copy': None}):\n migrate_company(company)\n\n # logger.info(\"Company copies\")\n # for company in company_object_col.find({'deleted': {'$ne': True}, '_copy': {'$ne': None}}):\n # migrate_company(company)\n\n logger.info(\"\\n Summary\\n==================================================================================\\n\\n\")\n logger.info(\"Initial batch count: %s\" % total_batches)\n logger.info(\"Rendered document count: %s\" % documents_count)\n logger.info(\"batches by type: %s\" % \", \".join([\"%s: %s\" % (t, c) for t, c in batch_type_count_map.items()]))\n logger.info(\"batches by status: %s\" % \", \".join([\"%s: %s\" % (s, c) for s, c in batch_status_count_map.items()]))\n logger.info(\"paid batches: %s. real user paid batches: %s\" % (paid_batch_count, real_user_paid_batch_count))\n logger.info(\"Persons count: %s\" % persons_count)\n logger.info(\"Companies count: %s\" % companies_count)\n\n batch_type_count_map = {}\n batch_status_count_map = {}\n for batch_obj in DocumentBatchDbObject.query.filter_by():\n if batch_obj.status not in batch_status_count_map:\n batch_status_count_map[batch_obj.status] = 1\n else:\n batch_status_count_map[batch_obj.status] += 1\n\n if batch_obj.batch_type not in batch_type_count_map:\n batch_type_count_map[batch_obj.batch_type] = 1\n else:\n batch_type_count_map[batch_obj.batch_type] += 1\n\n logger.info(\"New batch count: %s\" % DocumentBatchDbObject.query.count())\n logger.info(\"Rendered document count: %s\" % BatchDocumentDbObject.query.count())\n logger.info(\"batches by type: %s\" % \", \".join([\"%s: %s\" % (t, c) for t, c in batch_type_count_map.items()]))\n logger.info(\"batches by status: %s\" % \", \".join([\"%s: %s\" % (s, c) for s, c in batch_status_count_map.items()]))\n logger.info(\"paid batches: %s. real user paid batches: %s\" % (DocumentBatchDbObject.query.filter_by(paid=True).count(),\n DocumentBatchDbObject.query.filter(DocumentBatchDbObject.paid==True).join(AuthUser).filter(AuthUser.is_tester == False).count()))\n logger.info(\"Persons count: %s\" % PrivatePersonDbObject.query.count())\n logger.info(\"Companies count: %s\" % CompanyDbObject.query.count())\n\n if broken_batches:\n logger.info(\"broken batches: %d out of %d\\n%s\" % (len(broken_batches), total_batches, '\\n'.join(broken_batches)))\n\n if missing_users:\n logger.info(\"missing users: \\n%s\" % ('\\n'.join(missing_users)))\n\n if broken_persons:\n logger.info(\"broken persons: \\n%s\" % ('\\n'.join(broken_persons)))\n\n if broken_companies:\n logger.info(\"broken companies: \\n%s\" % ('\\n'.join(broken_companies)))\n\n if incomplete_companies:\n logger.info(\"incomplete companies: \\n%s\" % ('\\n'.join(incomplete_companies)))\n\n if incomplete_persons:\n logger.info(\"incomplete persons: \\n%s\" % ('\\n'.join(incomplete_persons)))\n\n logger.warn(\"real person's paid batches that failed to migrate:\")\n for batch in batch_col.find({'deleted': {'$ne': True},\n '_broken': {'$ne': True},\n 'batch_type': {'$ne': None},\n 'paid': True,\n '_owner': {'$nin': test_users_list}\n }):\n if not DocumentBatchDbObject.query.filter_by(id=str(batch['_id'])).count():\n msg = u\"[%s] type=%s status=%s data.full_name=%s\" % (batch['_id'], batch['batch_type'], batch['status'], batch['data'].get('full_name', '<empty full_name field>'))\n logger.warn(msg)\n detailed_logger.warn(msg)\n\n save = False\n for batch in DocumentBatchDbObject.query.filter_by():\n if not batch.data:\n continue\n for person_id in broken_persons:\n if find_id_in_data(batch.data, person_id):\n batch.data = pull_id_or_ref_from_data(batch.data, person_id)\n save = True\n for company_id in broken_companies:\n if find_id_in_data(batch.data, company_id):\n batch.data = pull_id_or_ref_from_data(batch.data, company_id)\n save = True\n if save:\n sqldb.session.commit()\n\n ifns_booking_col = db['ifns_booking']\n notarius_booking_col = db['notarius_booking']\n yurist_batch_check_col = db['yurist_batch_check']\n bank_partners_request_col = db['bank_partners_request']\n\n ids = set()\n for booking in ifns_booking_col.find():\n if 'batch_id' in booking and booking['batch_id']:\n ids.add(booking['batch_id'])\n\n for _id in ids:\n ifns_booking_col.update({'batch_id': _id}, {'$set': {'batch_id': str(_id)}}, multi=True)\n\n ids = set()\n for booking in notarius_booking_col.find():\n if 'batch_id' in booking and booking['batch_id']:\n ids.add(booking['batch_id'])\n\n for _id in ids:\n notarius_booking_col.update({'batch_id': _id}, {'$set': {'batch_id': str(_id)}}, multi=True)\n\n ids = set()\n for ybc in yurist_batch_check_col.find():\n if 'batch_id' in ybc and ybc['batch_id']:\n ids.add(ybc['batch_id'])\n\n for _id in ids:\n yurist_batch_check_col.update({'batch_id': _id}, {'$set': {'batch_id': str(_id)}}, multi=True)\n\n ids = set()\n for bpr in bank_partners_request_col.find():\n if 'batch_id' in bpr and bpr['batch_id']:\n ids.add(bpr['batch_id'])\n\n for _id in ids:\n bank_partners_request_col.update({'batch_id': _id}, {'$set': {'batch_id': str(_id)}}, multi=True)\n\n\ndef rollback(config, logger):\n logger.debug(u\"Rolling back migration\")\n\n CompanyDbObject.__table__.drop(sqldb.engine)\n PrivatePersonDbObject.__table__.drop(sqldb.engine)\n BatchDocumentDbObject.__table__.drop(sqldb.engine)\n DocumentBatchDbObject.__table__.drop(sqldb.engine)\n" }, { "alpha_fraction": 0.7177700400352478, "alphanum_fraction": 0.7212543487548828, "avg_line_length": 23, "blob_id": "781321e3b7d241f5a17f034e35a48dbfae2fa942", "content_id": "ff42ad1b8483f26dc64a9015858d45e33a76c17a", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 287, "license_type": "no_license", "max_line_length": 55, "num_lines": 12, "path": "/jb_tests/test_pack/test_rosstat_api.py", "repo_name": "StanislavKraev/jb_code", "src_encoding": "UTF-8", "text": "# -*- coding: utf-8 -*-\nimport os\nos.environ['CELERY_CONFIG_MODULE'] = 'dev_celeryconfig'\n\nfrom base_test_case import BaseTestCase\nfrom test_pack.test_api import authorized\n\nclass RosstatApiTestCase(BaseTestCase):\n\n @authorized()\n def test_get_stat_codes_moscow(self):\n pass" }, { "alpha_fraction": 0.5403293967247009, "alphanum_fraction": 0.5501676797866821, "avg_line_length": 50.59505844116211, "blob_id": "a08eaad909f548e8d20e00cce2d796ac231f4d9f", "content_id": "4c9bc1dcce514f56b8455476c5a70cb12b43bdfa", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 27333, "license_type": "no_license", "max_line_length": 131, "num_lines": 526, "path": "/jb_tests/test_pack/test_rendering_empty_docs.py", "repo_name": "StanislavKraev/jb_code", "src_encoding": "UTF-8", "text": "# -*- coding: utf-8 -*-\nfrom datetime import timedelta, datetime\n\nfrom base_test_case import BaseTestCase\nfrom flask import json\nfrom fw.documents.address_enums import RFRegionsEnum, DistrictTypeEnum, CityTypeEnum, VillageTypeEnum, StreetTypeEnum, \\\n HouseTypeEnum, BuildingTypeEnum, FlatTypeEnum\nfrom fw.documents.db_fields import DocumentBatchDbObject, PrivatePersonDbObject, CompanyDbObject\nfrom fw.documents.doc_requisites_storage import DocRequisitiesStorage\nfrom fw.documents.enums import DocumentBatchTypeEnum, BatchStatusEnum, PersonDocumentTypeEnum, DocumentTypeEnum\nfrom fw.documents.fields.doc_fields import DocumentBatch\nfrom fw.documents.fields.general_doc_fields import UserDocument\nfrom test_api import authorized\n\n\nclass RenderingEmptyDocsTestCase(BaseTestCase):\n @authorized()\n def test_async_render_empty_protocol(self):\n DocRequisitiesStorage.get_batch_descriptor(DocumentBatchTypeEnum.DBT_NEW_LLC)['doc_types'] = [DocumentTypeEnum.DT_PROTOCOL]\n\n with self.app.app_context():\n batch = DocumentBatchDbObject(\n batch_type=DocumentBatchTypeEnum.DBT_NEW_LLC,\n status=BatchStatusEnum.BS_NEW,\n _documents=[],\n data={},\n _owner=self.user._id\n )\n batch_id = batch.insert(self.db)\n\n founder_otvetstvennyi = PrivatePersonDbObject(**{\n \"_owner\": self.user._id,\n \"name\": u\"\",\n \"surname\": u\"\",\n \"patronymic\": u\"\",\n \"inn\": \"781108730780\",\n \"sex\": \"male\",\n \"birthdate\": datetime.now() - timedelta(days=365 * 30),\n \"birthplace\": u\"Россия, деревня Гадюкино\",\n \"passport\": {\n \"document_type\": PersonDocumentTypeEnum.PD_INTERNAL,\n \"series\": u\"1123\",\n \"number\": u\"192837\",\n \"issue_date\": datetime.now() - timedelta(days=365 * 14),\n \"issue_depart\": u\"УМВД Гадюкинского района Гадюкинской области\",\n \"depart_code\": u\"111987\"\n },\n \"ogrnip\": \"123456789012345\",\n \"address\": {\n \"region\": RFRegionsEnum.RFR_SPB,\n \"index\": 198209,\n \"district_type\": DistrictTypeEnum.DIT_DISTRICT,\n \"district\": u\"Пушкинский\",\n \"city_type\": CityTypeEnum.CIT_CITY,\n \"city\": u\"Гадюкино\",\n \"village_type\": VillageTypeEnum.VIT_HUTOR,\n \"village\": u\"близ Диканьки\",\n \"street_type\": StreetTypeEnum.STT_BOULEVARD,\n \"street\": u\"Мотоциклистов\",\n \"house_type\": HouseTypeEnum.HOT_HOUSE,\n \"house\": \"4\",\n \"building_type\": BuildingTypeEnum.BIT_HOUSING,\n \"building\": \"2\",\n \"flat_type\": FlatTypeEnum.FLT_OFFICE,\n \"flat\": \"705\",\n },\n \"caption\": u\"Сантехник\",\n \"phone\": \"+79210001122\",\n \"email\": \"[email protected]\",\n \"living_country_code\": 3,\n \"living_address\": u\"г. Санкт-Петербург, д. Гадюкино, бульвар Мотоциклистов казарма 4, кв. 705\"\n })\n founder_otvetstvennyi.insert(self.db)\n\n company_founder = CompanyDbObject(**{\n \"_owner\": self.user._id,\n \"ogrn\": \"1234567890123\",\n \"inn\": \"781108730780\",\n \"kpp\": \"999999999\",\n \"general_manager_caption\": u\"генеральный директор\",\n \"full_name\": u\"Том и Джери\",\n \"short_name\": u\"ТиД\",\n \"general_manager\": {\n },\n \"address\": {\n \"region\": RFRegionsEnum.RFR_SPB,\n \"index\": 123131,\n \"street_type\": StreetTypeEnum.STT_STREET,\n \"street\": u\"Седова\",\n \"house_type\": HouseTypeEnum.HOT_HOUSE,\n \"house\": \"2\",\n \"flat_type\": FlatTypeEnum.FLT_OFFICE,\n \"flat\": \"2\",\n },\n \"phone\": \"+7(812)1234567\"\n })\n company_founder.insert(self.app.db)\n\n new_batch_db_object = DocumentBatchDbObject(\n data={\n u\"address_type\": u\"office_address\",\n u\"founders\": [\n {\n u\"founder\": {\n u\"_id\": company_founder.id,\n u\"type\": u\"company\"\n },\n u\"nominal_capital\": 12312.22,\n u\"share\": 85\n }, {\n u\"founder\": {\n u\"_id\": founder_otvetstvennyi.id,\n u\"type\": u\"person\"\n },\n u\"nominal_capital\": 1500.5,\n u\"share\": 15\n }\n ],\n },\n batch_type=DocumentBatchTypeEnum.DBT_NEW_LLC\n )\n\n batch = DocumentBatch.parse_raw_value(new_batch_db_object.as_dict(), False)\n structure = batch.get_api_structure()\n del structure['batch_type']\n batch_json = json.dumps(structure)\n result = self.test_client.post('/batch/update/', data={\n 'batch_id': unicode(batch_id),\n 'batch': batch_json\n })\n self.assertEqual(result.status_code, 200)\n\n db_batch = DocumentBatchDbObject.query.filter_by(id=batch_id).first()\n new_user_doc_id = db_batch._documents[0].id\n\n result = self.test_client.get(\n '/batch/document/state/?batch_id=%s&document_id=%s' % (batch_id, new_user_doc_id))\n self.assertEqual(result.status_code, 200)\n result_data = json.loads(result.data)\n del result_data['result']['document_id']\n self.assertEqual(result_data, {u'result': {u'state': u'new', u'links': {u'pdf': None, u'jpeg': []}}})\n\n result = self.test_client.post('/batch/document/render/',\n data={'batch_id': unicode(batch_id), 'document_id': unicode(new_user_doc_id)})\n self.assertEqual(result.status_code, 200)\n result_data = json.loads(result.data)\n self.assertEqual(result_data, {u'result': True})\n\n result = self.test_client.get(\n '/batch/document_preview/state/?batch_id=%s&document_id=%s' % (unicode(batch_id), unicode(new_user_doc_id)))\n self.assertEqual(result.status_code, 200)\n result_data = json.loads(result.data)\n self.assertEqual(result_data['result']['state'], 'rendered')\n self.assertTrue(result_data['result']['links']['pdf'].startswith(u'http://service.zz/storage/'))\n\n\n @authorized()\n def test_async_render_empty_articles(self):\n doc_data = {\n }\n new_user_doc = UserDocument()\n new_user_doc.parse_raw_value(dict(document_type=DocumentTypeEnum.DT_ARTICLES, data=doc_data), None, False)\n\n doc_list = [\n new_user_doc.db_value()\n ]\n new_batch_db_object = DocumentBatchDbObject(documents=doc_list, batch_type=DocumentBatchTypeEnum.DBT_NEW_LLC,\n status=BatchStatusEnum.BS_NEW, _owner=self.user._id)\n new_batch_db_object.insert(self.db)\n\n result = self.test_client.get('/batch/document/state/?batch_id=%s&document_id=%s' % (\n unicode(new_batch_db_object._id), unicode(new_user_doc.id.db_value())))\n self.assertEqual(result.status_code, 200)\n result_data = json.loads(result.data)\n del result_data['result']['document_id']\n self.assertEqual(result_data, {u'result': {u'state': u'new', u'links': {u'pdf': None, u'jpeg': []}}})\n\n result = self.test_client.post('/batch/document/render/', data={'batch_id': unicode(new_batch_db_object._id),\n 'document_id': unicode(\n new_user_doc.id.db_value())})\n self.assertEqual(result.status_code, 200)\n result_data = json.loads(result.data)\n self.assertEqual(result_data, {u'result': True})\n\n result = self.test_client.get('/batch/document/state/?batch_id=%s&document_id=%s' % (\n unicode(new_batch_db_object._id), unicode(new_user_doc.id.db_value())))\n self.assertEqual(result.status_code, 200)\n result_data = json.loads(result.data)\n self.assertEqual(result_data['result']['state'], 'rendered')\n self.assertTrue(result_data['result']['links']['pdf'].startswith(u'http://service.zz/storage/'))\n\n @authorized()\n def test_async_render_empty_act(self):\n doc_data = {\n }\n new_user_doc = UserDocument()\n new_user_doc.parse_raw_value(dict(document_type=DocumentTypeEnum.DT_ACT, data=doc_data), None, False)\n\n doc_list = [\n new_user_doc.db_value()\n ]\n new_batch_db_object = DocumentBatchDbObject(documents=doc_list, batch_type=DocumentBatchTypeEnum.DBT_NEW_LLC,\n status=BatchStatusEnum.BS_NEW, _owner=self.user._id)\n new_batch_db_object.insert(self.db)\n\n result = self.test_client.get('/batch/document/state/?batch_id=%s&document_id=%s' % (\n unicode(new_batch_db_object._id), unicode(new_user_doc.id.db_value())))\n self.assertEqual(result.status_code, 200)\n result_data = json.loads(result.data)\n self.assertEqual(result_data, {u'result': {u'state': u'new', u'links': {u'pdf': None, u'jpeg': []},\n u'document_id': unicode(result_data['result']['document_id'])}})\n\n result = self.test_client.post('/batch/document/render/', data={'batch_id': unicode(new_batch_db_object._id),\n 'document_id': unicode(\n new_user_doc.id.db_value())})\n self.assertEqual(result.status_code, 200)\n result_data = json.loads(result.data)\n self.assertEqual(result_data, {u'result': True})\n\n result = self.test_client.get('/batch/document/state/?batch_id=%s&document_id=%s' % (\n unicode(new_batch_db_object._id), unicode(new_user_doc.id.db_value())))\n self.assertEqual(result.status_code, 200)\n result_data = json.loads(result.data)\n self.assertEqual(result_data['result']['state'], 'rendered')\n self.assertTrue(result_data['result']['links']['pdf'].startswith(u'http://service.zz/storage/'))\n\n @authorized()\n def test_async_render_empty_contract(self):\n new_user_doc = UserDocument()\n new_user_doc.parse_raw_value(dict(document_type=DocumentTypeEnum.DT_CONTRACT, data={}), None, False)\n\n doc_list = [\n new_user_doc.db_value()\n ]\n new_batch_db_object = DocumentBatchDbObject(documents=doc_list, batch_type=DocumentBatchTypeEnum.DBT_NEW_LLC,\n status=BatchStatusEnum.BS_NEW, _owner=self.user._id)\n new_batch_db_object.insert(self.db)\n\n result = self.test_client.get('/batch/document/state/?batch_id=%s&document_id=%s' % (\n unicode(new_batch_db_object._id), unicode(new_user_doc.id.db_value())))\n self.assertEqual(result.status_code, 200)\n result_data = json.loads(result.data)\n del result_data['result']['document_id']\n self.assertEqual(result_data, {u'result': {u'state': u'new', u'links': {u'pdf': None, u'jpeg': []}}})\n\n result = self.test_client.post('/batch/document/render/', data={'batch_id': unicode(new_batch_db_object._id),\n 'document_id': unicode(\n new_user_doc.id.db_value())})\n self.assertEqual(result.status_code, 200)\n result_data = json.loads(result.data)\n self.assertEqual(result_data, {u'result': True})\n\n result = self.test_client.get('/batch/document/state/?batch_id=%s&document_id=%s' % (\n unicode(new_batch_db_object._id), unicode(new_user_doc.id.db_value())))\n self.assertEqual(result.status_code, 200)\n result_data = json.loads(result.data)\n self.assertEqual(result_data['result']['state'], 'rendered')\n self.assertTrue(result_data['result']['links']['pdf'].startswith(u'http://service.zz/storage/'))\n\n @authorized()\n def test_async_render_empty_doverennost(self):\n\n new_user_doc = UserDocument()\n new_user_doc.parse_raw_value(dict(document_type=DocumentTypeEnum.DT_DOVERENNOST, data={}), None, False)\n\n doc_list = [\n new_user_doc.db_value()\n ]\n new_batch_db_object = DocumentBatchDbObject(documents=doc_list, batch_type=DocumentBatchTypeEnum.DBT_NEW_LLC,\n status=BatchStatusEnum.BS_NEW, _owner=self.user._id)\n new_batch_db_object.insert(self.db)\n\n result = self.test_client.get('/batch/document/state/?batch_id=%s&document_id=%s' % (\n unicode(new_batch_db_object._id), unicode(new_user_doc.id.db_value())))\n self.assertEqual(result.status_code, 200)\n result_data = json.loads(result.data)\n del result_data['result']['document_id']\n self.assertEqual(result_data, {u'result': {u'state': u'new', u'links': {u'pdf': None, u'jpeg': []}}})\n\n result = self.test_client.post('/batch/document/render/', data={'batch_id': unicode(new_batch_db_object._id),\n 'document_id': unicode(\n new_user_doc.id.db_value())})\n self.assertEqual(result.status_code, 200)\n result_data = json.loads(result.data)\n self.assertEqual(result_data, {u'result': True})\n\n result = self.test_client.get('/batch/document/state/?batch_id=%s&document_id=%s' % (\n unicode(new_batch_db_object._id), unicode(new_user_doc.id.db_value())))\n self.assertEqual(result.status_code, 200)\n result_data = json.loads(result.data)\n self.assertEqual(result_data['result']['state'], 'rendered')\n self.assertTrue(result_data['result']['links']['pdf'].startswith(u'http://service.zz/storage/'))\n\n @authorized()\n def test_async_render_empty_reshenie(self):\n\n new_user_doc = UserDocument()\n new_user_doc.parse_raw_value(dict(document_type=DocumentTypeEnum.DT_DECISION, data={}), None, False)\n\n doc_list = [\n new_user_doc.db_value()\n ]\n new_batch_db_object = DocumentBatchDbObject(documents=doc_list, batch_type=DocumentBatchTypeEnum.DBT_NEW_LLC,\n status=BatchStatusEnum.BS_NEW, _owner=self.user._id)\n new_batch_db_object.insert(self.db)\n\n result = self.test_client.get('/batch/document/state/?batch_id=%s&document_id=%s' % (\n unicode(new_batch_db_object._id), unicode(new_user_doc.id.db_value())))\n self.assertEqual(result.status_code, 200)\n result_data = json.loads(result.data)\n del result_data['result']['document_id']\n self.assertEqual(result_data, {u'result': {u'state': u'new', u'links': {u'pdf': None, u'jpeg': []}}})\n\n result = self.test_client.post('/batch/document/render/', data={'batch_id': unicode(new_batch_db_object._id),\n 'document_id': unicode(\n new_user_doc.id.db_value())})\n self.assertEqual(result.status_code, 200)\n result_data = json.loads(result.data)\n self.assertEqual(result_data, {u'result': True})\n\n result = self.test_client.get('/batch/document/state/?batch_id=%s&document_id=%s' % (\n unicode(new_batch_db_object._id), unicode(new_user_doc.id.db_value())))\n self.assertEqual(result.status_code, 200)\n result_data = json.loads(result.data)\n self.assertEqual(result_data['result']['state'], 'rendered')\n self.assertTrue(result_data['result']['links']['pdf'].startswith(u'http://service.zz/storage/'))\n\n @authorized()\n def test_async_render_empty_r11001(self):\n DocRequisitiesStorage.get_batch_descriptor(DocumentBatchTypeEnum.DBT_NEW_LLC)['doc_types'] = [DocumentTypeEnum.DT_P11001]\n\n with self.app.app_context():\n batch = DocumentBatchDbObject(\n batch_type=DocumentBatchTypeEnum.DBT_NEW_LLC,\n status=BatchStatusEnum.BS_NEW,\n _documents=[],\n data={},\n _owner=self.user._id\n )\n batch_id = batch.insert(self.db)\n\n new_batch_db_object = DocumentBatchDbObject(\n data={},\n batch_type=DocumentBatchTypeEnum.DBT_NEW_LLC\n )\n\n batch = DocumentBatch.parse_raw_value(new_batch_db_object.as_dict(), False)\n structure = batch.get_api_structure()\n del structure['batch_type']\n batch_json = json.dumps(structure)\n result = self.test_client.post('/batch/update/', data={\n 'batch_id': unicode(batch_id),\n 'batch': batch_json\n })\n self.assertEqual(result.status_code, 200)\n\n db_batch = DocumentBatchDbObject.query.filter_by(id=batch_id).first()\n new_user_doc_id = db_batch._documents[0].id\n result = self.test_client.get(\n '/batch/document/state/?batch_id=%s&document_id=%s' % (batch_id, new_user_doc_id))\n self.assertEqual(result.status_code, 200)\n result_data = json.loads(result.data)\n del result_data['result']['document_id']\n self.assertEqual(result_data, {u'result': {u'state': u'new', u'links': {u'pdf': None, u'jpeg': []}}})\n\n result = self.test_client.post('/batch/document/render/',\n data={'batch_id': unicode(batch_id), 'document_id': new_user_doc_id})\n self.assertEqual(result.status_code, 200)\n result_data = json.loads(result.data)\n self.assertEqual(result_data, {u'result': True})\n\n result = self.test_client.get(\n '/batch/document/state/?batch_id=%s&document_id=%s' % (batch_id, new_user_doc_id))\n self.assertEqual(result.status_code, 200)\n result_data = json.loads(result.data)\n self.assertEqual(result_data['result']['state'], 'rendered')\n self.assertTrue(result_data['result']['links']['pdf'].startswith(u'http://service.zz/storage/'))\n\n @authorized()\n def test_async_render_empty_usn(self):\n\n data = {\n }\n\n new_user_doc = UserDocument()\n new_user_doc.parse_raw_value(dict(document_type=DocumentTypeEnum.DT_USN, data=data), None, False)\n\n doc_list = [\n new_user_doc.db_value()\n ]\n new_batch_db_object = DocumentBatchDbObject(documents=doc_list, batch_type=DocumentBatchTypeEnum.DBT_NEW_LLC,\n status=BatchStatusEnum.BS_NEW, _owner=self.user._id)\n new_batch_db_object.insert(self.db)\n\n result = self.test_client.get('/batch/document/state/?batch_id=%s&document_id=%s' % (\n unicode(new_batch_db_object._id), unicode(new_user_doc.id.db_value())))\n self.assertEqual(result.status_code, 200)\n result_data = json.loads(result.data)\n del result_data['result']['document_id']\n self.assertEqual(result_data, {u'result': {u'state': u'new', u'links': {u'pdf': None, u'jpeg': []}}})\n\n result = self.test_client.post('/batch/document/render/', data={'batch_id': unicode(new_batch_db_object._id),\n 'document_id': unicode(\n new_user_doc.id.db_value())})\n self.assertEqual(result.status_code, 200)\n result_data = json.loads(result.data)\n self.assertEqual(result_data, {u'result': True})\n\n result = self.test_client.get('/batch/document/state/?batch_id=%s&document_id=%s' % (\n unicode(new_batch_db_object._id), unicode(new_user_doc.id.db_value())))\n self.assertEqual(result.status_code, 200)\n result_data = json.loads(result.data)\n self.assertEqual(result_data['result']['state'], 'rendered')\n self.assertTrue(result_data['result']['links']['pdf'].startswith(u'http://service.zz/storage/'))\n\n @authorized()\n def test_async_render_empty_soglasie_sobstvennikov(self):\n\n data = {\n }\n\n new_user_doc = UserDocument()\n new_user_doc.parse_raw_value(dict(document_type=DocumentTypeEnum.DT_SOGLASIE_SOBSTVENNIKOV, data=data), None,\n False)\n\n doc_list = [\n new_user_doc.db_value()\n ]\n new_batch_db_object = DocumentBatchDbObject(documents=doc_list, batch_type=DocumentBatchTypeEnum.DBT_NEW_LLC,\n status=BatchStatusEnum.BS_NEW, _owner=self.user._id)\n new_batch_db_object.insert(self.db)\n\n result = self.test_client.get('/batch/document/state/?batch_id=%s&document_id=%s' % (\n unicode(new_batch_db_object._id), unicode(new_user_doc.id.db_value())))\n self.assertEqual(result.status_code, 200)\n result_data = json.loads(result.data)\n del result_data['result']['document_id']\n self.assertEqual(result_data, {u'result': {u'state': u'new', u'links': {u'pdf': None, u'jpeg': []}}})\n\n result = self.test_client.post('/batch/document/render/', data={'batch_id': unicode(new_batch_db_object._id),\n 'document_id': unicode(\n new_user_doc.id.db_value())})\n self.assertEqual(result.status_code, 200)\n result_data = json.loads(result.data)\n self.assertEqual(result_data, {u'result': True})\n\n result = self.test_client.get('/batch/document/state/?batch_id=%s&document_id=%s' % (\n unicode(new_batch_db_object._id), unicode(new_user_doc.id.db_value())))\n self.assertEqual(result.status_code, 200)\n result_data = json.loads(result.data)\n self.assertEqual(result_data['result']['state'], 'rendered')\n self.assertTrue(result_data['result']['links']['pdf'].startswith(u'http://service.zz/storage/'))\n\n @authorized()\n def test_async_render_empty_garant_letter_arenda(self):\n data = {\n }\n\n new_user_doc = UserDocument()\n new_user_doc.parse_raw_value(dict(document_type=DocumentTypeEnum.DT_GARANT_LETTER_ARENDA, data=data), None,\n False)\n\n doc_list = [\n new_user_doc.db_value()\n ]\n new_batch_db_object = DocumentBatchDbObject(documents=doc_list, batch_type=DocumentBatchTypeEnum.DBT_NEW_LLC,\n status=BatchStatusEnum.BS_NEW, _owner=self.user._id)\n new_batch_db_object.insert(self.db)\n\n result = self.test_client.get('/batch/document/state/?batch_id=%s&document_id=%s' % (\n unicode(new_batch_db_object._id), unicode(new_user_doc.id.db_value())))\n self.assertEqual(result.status_code, 200)\n result_data = json.loads(result.data)\n del result_data['result']['document_id']\n self.assertEqual(result_data, {u'result': {u'state': u'new', u'links': {u'pdf': None, u'jpeg': []}}})\n\n result = self.test_client.post('/batch/document/render/', data={'batch_id': unicode(new_batch_db_object._id),\n 'document_id': unicode(\n new_user_doc.id.db_value())})\n self.assertEqual(result.status_code, 200)\n result_data = json.loads(result.data)\n self.assertEqual(result_data, {u'result': True})\n\n result = self.test_client.get('/batch/document/state/?batch_id=%s&document_id=%s' % (\n unicode(new_batch_db_object._id), unicode(new_user_doc.id.db_value())))\n self.assertEqual(result.status_code, 200)\n result_data = json.loads(result.data)\n self.assertEqual(result_data['result']['state'], 'rendered')\n self.assertTrue(result_data['result']['links']['pdf'].startswith(u'http://service.zz/storage/'))\n\n @authorized()\n def test_async_render_empty_garant_letter_subarenda(self):\n data = {\n }\n\n new_user_doc = UserDocument()\n new_user_doc.parse_raw_value(dict(document_type=DocumentTypeEnum.DT_GARANT_LETTER_SUBARENDA, data=data), None,\n False)\n\n doc_list = [\n new_user_doc.db_value()\n ]\n new_batch_db_object = DocumentBatchDbObject(documents=doc_list, batch_type=DocumentBatchTypeEnum.DBT_NEW_LLC,\n status=BatchStatusEnum.BS_NEW, _owner=self.user._id)\n new_batch_db_object.insert(self.db)\n\n result = self.test_client.get('/batch/document/state/?batch_id=%s&document_id=%s' % (\n unicode(new_batch_db_object._id), unicode(new_user_doc.id.db_value())))\n self.assertEqual(result.status_code, 200)\n result_data = json.loads(result.data)\n del result_data['result']['document_id']\n self.assertEqual(result_data, {u'result': {u'state': u'new', u'links': {u'pdf': None, u'jpeg': []}}})\n\n result = self.test_client.post('/batch/document/render/', data={'batch_id': unicode(new_batch_db_object._id),\n 'document_id': unicode(\n new_user_doc.id.db_value())})\n self.assertEqual(result.status_code, 200)\n result_data = json.loads(result.data)\n self.assertEqual(result_data, {u'result': True})\n\n result = self.test_client.get('/batch/document/state/?batch_id=%s&document_id=%s' % (\n unicode(new_batch_db_object._id), unicode(new_user_doc.id.db_value())))\n self.assertEqual(result.status_code, 200)\n result_data = json.loads(result.data)\n self.assertEqual(result_data['result']['state'], 'rendered')\n self.assertTrue(result_data['result']['links']['pdf'].startswith(u'http://service.zz/storage/'))\n" }, { "alpha_fraction": 0.5308593511581421, "alphanum_fraction": 0.5313857197761536, "avg_line_length": 33.38461685180664, "blob_id": "2ccbc65518f84023f0019ce539210452df1b48bf", "content_id": "393b70f919044a09b8809f1d76ac2561b854ca7a", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 7599, "license_type": "no_license", "max_line_length": 120, "num_lines": 221, "path": "/app/fw/plugins/doc_builder_plugin/__init__.py", "repo_name": "StanislavKraev/jb_code", "src_encoding": "UTF-8", "text": "# -*- coding: utf-8 -*-\nfrom copy import copy\nfrom datetime import datetime\nfrom flask import current_app\nfrom flask_login import current_user\nfrom fw.async_tasks import rendering\nfrom fw.db.sql_base import db as sqldb\nfrom fw.documents.batch_manager import BatchManager\nfrom fw.documents.db_fields import DocGroupRenderTaskCheck, BatchDocumentDbObject\nfrom fw.documents.doc_requisites_storage import DocRequisitiesStorage\nfrom fw.documents.enums import UserDocumentStatus\nfrom fw.documents.schema.schema_transform import transform_with_schema\nfrom fw.monitoring_utils import zabbix_sender\nfrom fw.storage.file_storage import FileStorage\n\nPLUGIN_NAME = 'doc_builder'\n\n\ndef get_actions():\n actions = [{\n 'name': 'render_group',\n 'async': True,\n 'args': [\n {\n 'name': 'doc_types',\n 'type': 'DocArrayField',\n 'cls': 'DocTextField',\n 'required': True\n }, {\n 'name': 'batch_id',\n 'type': 'DocTextField',\n 'required': True\n }]\n }, {\n 'name': 'render_doc',\n 'async': True,\n 'args': [\n {\n 'name': 'batch_id',\n 'type': 'DocTextField',\n 'required': True\n }, {\n 'name': 'doc_type',\n 'type': 'DocTextField',\n 'required': True\n }]\n }, {\n 'name': 'render_doc_by_id',\n 'async': True,\n 'args': [\n {\n 'name': 'doc_id',\n 'type': 'DocTextField',\n 'required': True\n }]\n }, {\n 'name': 'cancel_doc_render',\n 'async': False,\n 'args': [\n {\n 'name': 'batch_id',\n 'type': 'DocTextField',\n 'required': True\n }, {\n 'name': 'doc_type',\n 'type': 'DocTextField',\n 'required': True\n }]\n }, {\n 'name': 'cancel_doc_render_by_id',\n 'async': False,\n 'args': [\n {\n 'name': 'doc_id',\n 'type': 'DocTextField',\n 'required': True\n }]\n }]\n return actions\n\ndef get_events():\n events = [{\n 'name': 'doc_render_success'\n }, {\n 'name': 'doc_render_fail'\n }, {\n 'name': 'doc_group_render_success'\n }, {\n 'name': 'doc_group_render_fail'\n }, {\n 'name': 'doc_group_render_canceled'\n }, {\n 'name': 'doc_render_canceled'\n }]\n return events\n\ndef act(action, batch_db, event_data, plugin_config, logger, config):\n assert batch_db\n descriptors = filter(lambda x: x['name'] == action, get_actions())\n action_descriptor = descriptors[0] if descriptors else None\n if not action_descriptor:\n raise ValueError(u'Invalid action: %s for %s plugin' % (action, PLUGIN_NAME))\n args = action_descriptor['args']\n source_data = copy(event_data)\n data = transform_with_schema(source_data, {\"fields\": args})\n\n batch_manager = BatchManager.init(batch_db)\n\n if action == 'render_group':\n doc_id_list = []\n batch_id = data['batch_id'].db_value()\n doc_types = event_data['doc_types'] if 'doc_types' in event_data else plugin_config.get('doc_types', [])\n assert doc_types\n\n try:\n all_ready = True\n for doc_type in doc_types:\n doc = BatchDocumentDbObject.query.filter_by(document_type=doc_type, batch_id=batch_id).first()\n if doc:\n doc.data = {}\n doc.status = UserDocumentStatus.DS_RENDERING\n doc.tried_to_render = True\n if doc.file:\n file_obj = doc.file\n doc.file = None\n FileStorage.remove_file(file_obj.id, current_app.config)\n sqldb.session.commit()\n\n else:\n if not batch_manager.is_document_required(batch_db, doc_type):\n logger.debug(u\"Document %s is not required by its condition. Skipping\" % doc_type)\n continue\n\n new_doc = BatchDocumentDbObject(\n _owner=current_user,\n document_type=doc_type,\n batch_id=batch_id,\n data={},\n status=UserDocumentStatus.DS_RENDERING,\n caption=batch_manager.get_title(doc_type),\n tried_to_render=True\n )\n sqldb.session.add(new_doc)\n sqldb.session.commit()\n doc = new_doc\n\n async_result = rendering.render_document_plugin.apply_async((batch_id, {'doc_id': doc.id}), countdown=2)\n if not async_result.ready():\n all_ready = False\n BatchDocumentDbObject.query.filter_by(id=doc.id).update({\n '_celery_task_id': str(async_result.id),\n '_celery_task_started': datetime.utcnow()\n })\n sqldb.session.commit()\n\n doc_id_list.append(doc.id)\n\n check_task_info = DocGroupRenderTaskCheck(\n batch_id=batch_id,\n doc_id_list=doc_id_list,\n event_data=event_data\n )\n sqldb.session.add(check_task_info)\n sqldb.session.commit()\n if all_ready:\n rendering.batch_group_gen_check_task.delay()\n except Exception:\n zabbix_sender.send(\"celery_failures\", 1)\n logger.exception(u\"Failed to start rendering document group\")\n raise\n elif action == 'render_doc':\n pass\n elif action == 'render_doc_by_id':\n pass\n elif action == 'cancel_doc_render':\n pass\n elif action == 'cancel_doc_render_by_id':\n pass\n else:\n raise Exception(u\"Invalid action %s for plugin %s\" % (action, PLUGIN_NAME))\n\n # mail_type = data['mail_type'].db_value()\n # target_type = data['target_type'].db_value()\n # target_emails = []\n # if target_type == MailTargetEnum.MTA_BATCH_OWNER:\n # email = batch_db._owner.email\n # if email:\n # target_emails.append(email)\n # elif target_type == MailTargetEnum.MTA_SPECIFIED:\n # target_emails = data.get('target_email_list', [])\n # else: #MailTargetEnum.MTA_EVENT_DATA_FIELD\n # data_field = data.get('event_data_field', None)\n # if data_field:\n # email = event_data.get(data_field, None)\n # if email:\n # target_emails.append(email)\n #\n # if not target_emails:\n # core_tasks.send.delay(batch_db.id, '%s:send_fail' % PLUGIN_NAME, event_data)\n # return False\n #\n # composer = create_composer(mail_type, logger)\n # retry_count = data.get('retry_count')\n # silent = data.get('silent', False)\n # from fw.documents.fields.simple_doc_fields import DocField\n # try:\n # if isinstance(target_emails, DocField):\n # target_emails = target_emails.db_value()\n # if isinstance(retry_count, DocField):\n # retry_count = retry_count.db_value()\n # if isinstance(silent, DocField):\n # silent = silent.db_value()\n # composer.send_email(target_emails, batch_id, event_data, retry_count, silent=silent)\n # except Exception:\n # logger.exception(u\"Failed to send email\")\n # return False\n return True\n\ndef register(class_loader):\n #class_loader.POSSIBLE_LOCATIONS.append('fw.plugins.emailer_plugin.enums')\n pass\n" }, { "alpha_fraction": 0.5792907476425171, "alphanum_fraction": 0.5804616808891296, "avg_line_length": 37.818180084228516, "blob_id": "de778fc4af60b12d3d0960bdebc08f23da8cdad0", "content_id": "6a6de3a084df4ba6b51a75c5db6d4940e124b858", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 5978, "license_type": "no_license", "max_line_length": 123, "num_lines": 154, "path": "/app/fw/async_tasks/not_paid_check_send.py", "repo_name": "StanislavKraev/jb_code", "src_encoding": "UTF-8", "text": "# -*- coding: utf-8 -*-\nfrom datetime import datetime\nimport os\nfrom celery import current_app as celery\nfrom flask.globals import current_app\nfrom fw.async_tasks import send_email\nfrom fw.auth.user_manager import UserManager\nfrom fw.db.sql_base import db as sqldb\nfrom fw.documents.db_fields import DocumentBatchDbObject\nfrom fw.documents.enums import BatchStatusEnum, UserDocumentStatus\nfrom fw.storage.file_storage import FileStorage\nfrom template_filters import utm_args\n\ncelery.config_from_envvar('CELERY_CONFIG_MODULE')\n\n\[email protected]()\ndef not_paid_check_and_send(*args, **kwargs):\n config = celery.conf.get('config')\n\n batch_id = kwargs['batch_id']\n last_change_dt_str = kwargs['last_change_dt_str']\n\n with celery.conf['flask_app']().app_context():\n logger = current_app.logger\n batch = DocumentBatchDbObject.query.filter_by(id=batch_id).scalar()\n if not batch or not batch.data or batch.status != BatchStatusEnum.BS_FINALISED:\n logger.info(\"Exit 1\")\n return False\n\n last_change_dt = batch.last_change_dt\n if last_change_dt and isinstance(last_change_dt, datetime):\n if last_change_dt_str != last_change_dt.strftime(\"%Y-%m-%dT%H:%M:%S\"):\n logger.info(\"Exit 2\")\n return True\n\n if batch.paid:\n return True\n\n user = batch._owner\n if not user or not user.email:\n logger.info(\"Exit 3\")\n return False\n\n send_mails = batch.sent_mails or []\n if send_mails and \"please_pay_finalised\" in send_mails or \"please_pay_finalised_double\" in send_mails:\n logger.info(\"Exit 4\")\n return False\n\n mail_type = \"please_pay_finalised\" if \"please_finalise\" not in send_mails else \"please_pay_finalised_double\"\n\n batch_url = u\"%s://%s/ooo/?id=%s\" % (config['WEB_SCHEMA'], config['DOMAIN'], batch_id)\n batch_url = utm_args(batch_url, mail_type, user.id)\n\n tmpl_data = {\n \"short_name\": batch.data.get('short_name', \"\"),\n \"domain\": config['DOMAIN'],\n \"schema\": config['WEB_SCHEMA'],\n \"user_id\": str(user.id),\n \"batch_url\": UserManager.make_auth_url(batch_url, user).get_url(config)\n }\n\n send_email.send_email(user.email, mail_type, **tmpl_data)\n\n mails_sent = set(send_mails)\n mails_sent.add(mail_type)\n DocumentBatchDbObject.query.filter_by(id=batch_id).update({\n 'sent_mails': list(mails_sent)\n })\n sqldb.session.commit()\n logger.info(\"Exit 5\")\n return True\n\n\[email protected]()\ndef make_all_user_fin_batch_paid_and_replace_watermarked_docs_with_normal(*args, **kwargs):\n db = celery.conf.get('db')\n config = celery.conf.get('config')\n\n user_id = kwargs['user_id']\n\n with celery.conf['flask_app']().app_context():\n logger = current_app.logger\n batches = DocumentBatchDbObject.query.filter_by(_owner_id=user_id, paid=False, status=BatchStatusEnum.BS_FINALISED)\n\n for batch in batches:\n batch.paid = True\n for doc in batch._documents:\n if doc.status == UserDocumentStatus.DS_RENDERED and doc.file:\n try:\n batch_id = batch.id\n file_obj = doc.file\n if not file_obj:\n logger.error(u\"Can't replace watermarked file: \"\n u\"Failed to find file of batch %s\" % unicode(batch_id))\n continue\n file_path = FileStorage.get_path(file_obj, current_app.config)\n if not file_path or not os.path.exists(file_path) or not os.path.exists(file_path + '.src'):\n logger.error(u\"Can't replace watermarked file: \"\n u\"Failed to find original or source file %s of batch %s\" % (\n unicode(file_path + '.src'), unicode(batch_id)))\n continue\n os.rename(file_path + '.src', file_path)\n except Exception:\n logger.exception(u\"Can't replace watermarked file\")\n sqldb.session.commit()\n return True\n\n\[email protected]()\ndef not_finalised_check_and_send(batch_id=None, last_change_dt_str=None):\n if not batch_id or not last_change_dt_str:\n return False\n\n config = celery.conf.get('config')\n\n with celery.conf['flask_app']().app_context():\n batch = DocumentBatchDbObject.query.filter_by(id=batch_id, deleted=False, finalisation_count=0).scalar()\n if not batch or batch.status not in (BatchStatusEnum.BS_NEW, BatchStatusEnum.BS_EDITED):\n return False\n\n last_change_dt = batch.last_change_dt\n if last_change_dt and isinstance(last_change_dt, datetime):\n if last_change_dt_str != last_change_dt.strftime(\"%Y-%m-%dT%H:%M:%S\"):\n return True\n\n mail_type = 'please_finalise'\n if mail_type in (batch.sent_mails or []):\n return False\n mails_sent = set(batch.sent_mails or [])\n\n user = batch._owner\n if not user or not user.email:\n return False\n batch_data = batch.data\n\n batch_url = u\"%s://%s/ooo/?id=%s\" % (config['WEB_SCHEMA'], config['DOMAIN'], batch_id)\n\n tmpl_data = {\n \"short_name\": batch_data.get('short_name', \"\"),\n \"domain\": config['DOMAIN'],\n \"schema\": config['WEB_SCHEMA'],\n \"user_id\": str(user.id),\n \"batch_url\": UserManager.make_auth_url(batch_url, user).get_url(config)\n }\n\n send_email.send_email(user.email, mail_type, **tmpl_data)\n\n mails_sent.add(mail_type)\n DocumentBatchDbObject.query.filter_by(id=batch_id).update({\n 'sent_mails': list(mails_sent)\n })\n sqldb.session.commit()\n return True\n" }, { "alpha_fraction": 0.591924786567688, "alphanum_fraction": 0.5965480208396912, "avg_line_length": 40.870967864990234, "blob_id": "8b549f3e54184101eff802ab0579b850c32630e0", "content_id": "a8c1a7f7f4c1803010d333668dda754537e05b52", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 6520, "license_type": "no_license", "max_line_length": 178, "num_lines": 155, "path": "/app/fw/auth/social_services/facebook_backend.py", "repo_name": "StanislavKraev/jb_code", "src_encoding": "UTF-8", "text": "# -*- coding: utf-8 -*-\nimport re\nimport requests\nfrom flask import current_app\nfrom fw.api import errors\nfrom fw.auth.social_services.social_backend import SocialBackend\nfrom fw.auth.social_services.social_models import SocialUserLink, SocialServiceEnum\nfrom fw.db.sql_base import db as sqldb\n\nGRAPH_URL = 'https://graph.facebook.com/v2.2'\n\n\nclass FacebookBackend(SocialBackend):\n @staticmethod\n def get_user_data(config, access_token):\n try:\n response = requests.get(GRAPH_URL + '/me?access_token=%s' % access_token)\n if response.status_code != 200:\n raise errors.SocialAuthError(\"response.code == %s\" % str(response.code))\n data = response.json()\n if not data or 'id' not in data:\n raise Exception('Failed to decode server answer: %s' % response.body)\n result = {\n 'id': int(data['id']),\n 'email': data.get('email', None)\n }\n except KeyError, exc:\n # noinspection PyUnboundLocalVariable\n raise errors.SocialAuthError(\"Invalid response: %s. No such field %s\" % (response.body, str(exc)))\n except errors.SocialAuthError:\n raise\n except Exception, exc:\n raise errors.SocialAuthError(str(exc))\n return result\n\n @staticmethod\n def get_user_link(social_uid):\n return SocialUserLink.query.filter_by(uid=str(social_uid), service_id=SocialServiceEnum.SS_FACEBOOK).first()\n\n @staticmethod\n def make_link(access_token, social_uid, auth_user, config):\n params = {\n 'grant_type': 'fb_exchange_token',\n 'client_id': config['facebook_app_id'],\n 'client_secret': config['facebook_app_secret'],\n 'fb_exchange_token': access_token\n }\n url = GRAPH_URL + '/oauth/access_token?grant_type=%(grant_type)s&client_id=%(client_id)s&client_secret=%(client_secret)s&fb_exchange_token=%(fb_exchange_token)s' % params\n\n try:\n response = requests.get(url)\n if response.status_code != 200:\n raise errors.SocialAuthError()\n data = response.text\n if not data or 'access_token=' not in data or '&expires=' not in data:\n raise Exception('Failed to decode server answer')\n\n # access_token=CAAT75UAyEFYBACPLRT6WOgehcXxVa6FiaRxwm1lcloroS7SehVtF2tS1zNm6wpNOae5atg4LcMq3PmNHqUoPXJcyDzGN69ZBCD61eTUPaWUFK9mEd4qZAO6Q9Bg0lSPZAIvJswI5WHSOLTjMNceL116V6us8uy98ZCFcwHJJzKVZABZATbn7ajMo7okd7cbCgZD&expires=5183836\n new_access_token = data[data.find('access_token=') + 13:data.find('&expires')]\n except Exception, exc:\n raise errors.SocialAuthError()\n\n link = SocialUserLink(\n uid=social_uid,\n user=auth_user,\n service_id=SocialServiceEnum.SS_FACEBOOK,\n access_token=new_access_token\n )\n sqldb.session.add(link)\n sqldb.session.commit()\n return link\n\n @staticmethod\n def get_token_url(config, next_page=\"/\"):\n permissions = config['facebook_app_permissions']\n facebook_app_id = config['facebook_app_id']\n if next_page.startswith(\"/\"):\n next_page = next_page[1:]\n redirect_url = \"%s://%s%s\" % (\n config['WEB_SCHEMA'], config['api_url'], config['facebook_auth_redirect_url'] + next_page)\n current_app.logger.info(redirect_url)\n return \"https://www.facebook.com/dialog/oauth?client_id=%d&scope=%s&response_type=code&redirect_uri=%s\" % (\n facebook_app_id, permissions, redirect_url)\n\n @staticmethod\n def new_post(post_content, auth_user, config, link_to_attach=None):\n link = SocialUserLink.query.filter_by(user=auth_user, service_id=SocialServiceEnum.SS_FACEBOOK).first()\n if not link or not link.access_token:\n raise errors.SocialAuthError()\n\n url = \"https://graph.facebook.com/%s/feed\" % link.uid\n params = {\n 'message': post_content,\n 'access_token': link.access_token,\n 'name': ' ', # image title\n }\n if link_to_attach:\n params['link'] = link_to_attach\n # params['caption'] = u'Научим людей парковаться правильно. Spot.'\n params['description'] = ' '\n del params['message']\n\n try:\n response = requests.post(url, data=params)\n except Exception, exc:\n raise errors.SocialAuthError()\n\n if response.status_code != 200: # todo: handle expired/invalidated token response to tell client to retrieve new token\n raise errors.SocialAuthError()\n\n try:\n response_data = response.json()\n if not response_data or 'id' not in response_data:\n raise Exception('Failed to decode server answer')\n post_data = {\n 'id': response_data['id']\n }\n except Exception:\n raise errors.SocialAuthError()\n return post_data\n\n @staticmethod\n def get_token(code, config, next_page):\n if next_page.startswith(\"/\"):\n next_page = next_page[1:]\n redirect_url = \"%s://%s%s\" % (\n config['WEB_SCHEMA'], config['api_url'], config['facebook_auth_redirect_url'] + next_page)\n\n current_app.logger.info(redirect_url)\n facebook_app_id = config['facebook_app_id']\n facebook_app_secret = config['facebook_app_secret']\n url = \"https://graph.facebook.com/oauth/access_token?client_id=%s&client_secret=%s&code=%s&redirect_uri=%s\" % (\n unicode(facebook_app_id), facebook_app_secret, unicode(code), redirect_url)\n result = requests.get(url)\n current_app.logger.debug(u\"code: %s, data:%s\" % (unicode(result.status_code), result.text))\n if result.status_code != 200:\n current_app.logger.error(u\"result.status_code != 200\")\n current_app.logger.error(result.text)\n return None, None\n match = re.match(ur'access_token=(.+)&expires=(.+)', result.text.strip())\n if not match:\n current_app.logger.warn(u\"failed to get token\")\n return None, None\n access_token = match.groups()[0].strip()\n current_app.logger.debug(u\"token %s\" % access_token)\n return access_token, {}\n\n @staticmethod\n def get_profile_url(social_link_object):\n if not social_link_object:\n return\n if not isinstance(social_link_object, dict):\n social_link_object = social_link_object.as_dict()\n if 'uid' in social_link_object:\n return u\"https://www.facebook.com/%s\" % unicode(social_link_object['uid'])" }, { "alpha_fraction": 0.578422486782074, "alphanum_fraction": 0.6029011607170105, "avg_line_length": 28.413333892822266, "blob_id": "6aa59d959692debedb411a55fdb042e8e6d41df8", "content_id": "499a4d72f27c3c5892fc15e4ed40317b1f6ba13e", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 2206, "license_type": "no_license", "max_line_length": 95, "num_lines": 75, "path": "/app/fw/utils/address_utils.py", "repo_name": "StanislavKraev/jb_code", "src_encoding": "UTF-8", "text": "# -*- coding: utf-8 -*-\n\nimport json\nimport requests\nfrom flask import current_app\n\n\ndef prepare_key(key):\n return filter(lambda c: c.isalnum(), key).encode('utf-8')\n\n\ndef get_detailed_address(address):\n\n if not address or not isinstance(address, basestring):\n return u\"\"\n # dd_api_key = \"90ca299132f2c9b4c09f7a907ef0dc7abcc9c374 \"\n dd_api_key = \"3dc7eb747eb6ac11509b941b40df1b582de68f2b\"\n #dd_secret_key = \"4647c2967a9d365160ca87d233b2fc6061655a7a \"\n url = u\"https://dadata.ru/api/v2/suggest/address\"\n\n cache_key = prepare_key('dadata/s/addr' + address)\n result_text = current_app.external_tools.cache.get(cache_key)\n if not result_text:\n headers = {\n u'Content-Type': u'application/json',\n u'Accept': u'application/json',\n u'Authorization': u'Token %s' % dd_api_key\n }\n\n r = requests.post(url, data=json.dumps({\"query\": address}), headers=headers, timeout=5)\n if r.status_code != 200:\n return\n result_text = r.text\n current_app.external_tools.cache.set(cache_key, result_text, 3600 * 24)\n try:\n result = json.loads(result_text)\n except Exception:\n return\n return result\n\n\ndef dadata_standardize_address(address):\n if not address or not isinstance(address, basestring):\n return {}\n\n dd_api_key = \"3dc7eb747eb6ac11509b941b40df1b582de68f2b\"\n dd_secret_key = \"0affb032f2563b4c2bb6a66d7ee4f9c8fef48240\"\n\n url = u\"https://dadata.ru/api/v2/clean/address\"\n\n cache_key = prepare_key('dadata/s/clean' + address)\n result_text = current_app.external_tools.cache.get(cache_key)\n if not result_text:\n headers = {\n u'Content-Type': u'application/json',\n u'Accept': u'application/json',\n u'Authorization': u'Token %s' % dd_api_key,\n u'X-Secret': dd_secret_key\n }\n\n r = requests.post(url, data=json.dumps([address]), headers=headers, timeout=5)\n if r.status_code != 200:\n return\n result_text = r.text\n current_app.external_tools.cache.set(cache_key, result_text, 3600 * 24)\n\n try:\n result = json.loads(result_text)\n if not result or not isinstance(result, list):\n raise Exception()\n result = result[0]\n except Exception:\n return\n\n return result\n" }, { "alpha_fraction": 0.678004264831543, "alphanum_fraction": 0.6825039982795715, "avg_line_length": 52.588653564453125, "blob_id": "e5f41653c8746ae5e41473a5f46e3a019e931233", "content_id": "cf9493db36768a29a8aeb4549a4ac400826d2da1", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 9033, "license_type": "no_license", "max_line_length": 118, "num_lines": 141, "path": "/app/fw/api/geoip_utils.py", "repo_name": "StanislavKraev/jb_code", "src_encoding": "UTF-8", "text": "# -*- coding: utf-8 -*-\nimport json\nimport requests\nfrom flask import current_app\nfrom fw.catalogs.models import GeoCities, GeoRanges\nfrom fw.documents.address_enums import RFRegionsEnum\n\n\nclass GeoIpLocator(object):\n REGION_NAME_MAP = {\n u\"Республика Адыгея\": RFRegionsEnum.RFR_ADYGEYA,\n u\"Кировская область\": RFRegionsEnum.RFR_KIROVSKAYA_REGION,\n u\"Республика Башкортостан\": RFRegionsEnum.RFR_BASHKARTOSTAN,\n u\"Костромская область\": RFRegionsEnum.RFR_KOSTROMSKAYA_REGION,\n u\"Республика Бурятия\": RFRegionsEnum.RFR_BURYATIYA,\n u\"Курганская область\": RFRegionsEnum.RFR_KURGANSKAYA_REGION,\n u\"Алтайский край\": RFRegionsEnum.RFR_ALTAY,\n u\"Курская область\": RFRegionsEnum.RFR_KURSKAYA_REGION,\n u\"Республика Дагестан\": RFRegionsEnum.RFR_DAGESTAN,\n u\"Ленинградская область\": RFRegionsEnum.RFR_LENINGRADSKAYA_REGION,\n u\"Республика Ингушетия\": RFRegionsEnum.RFR_INGUSHETIYA,\n u\"Липецкая область\": RFRegionsEnum.RFR_LIPETSKAYA_REGION,\n u\"Республика Кабардино-Балкария\": RFRegionsEnum.RFR_KABARDINO_BALKARIYA,\n u\"Магаданская область\": RFRegionsEnum.RFR_MAGADANSKAYA_REGION,\n u\"Республика Калмыкия\": RFRegionsEnum.RFR_KALMYKIYA,\n u\"Московская область\": RFRegionsEnum.RFR_MOSCOVSKAYA_REGION,\n u\"Республика Карачаево-Черкессия\": RFRegionsEnum.RFR_KARACHAEVO_CHERKESIYA,\n u\"Мурманская область\": RFRegionsEnum.RFR_MURMANSKAYA_REGION,\n u\"Республика Карелия\": RFRegionsEnum.RFR_KARELIYA,\n u\"Нижегородская область\": RFRegionsEnum.RFR_NIZHEGORODSKAYA_REGION,\n u\"Республика Коми\": RFRegionsEnum.RFR_KOMI,\n u\"Новгородская область\": RFRegionsEnum.RFR_NOVGORODSKAYA_REGION,\n u\"Республика Марий Эл\": RFRegionsEnum.RFR_MARIY_EL,\n u\"Новосибирская область\": RFRegionsEnum.RFR_NOVOSIBIRSKAYA_REGION,\n u\"Республика Мордовия\": RFRegionsEnum.RFR_MORDOVIYA,\n u\"Омская область\": RFRegionsEnum.RFR_OMSKAYA_REGION,\n u\"Республика Саха (Якутия)\": RFRegionsEnum.RFR_SAHA_YAKUTIYA,\n u\"Оренбургская область\": RFRegionsEnum.RFR_ORENBURGSKAYA_REGION,\n u\"Республика Северная Осетия (Алания)\": RFRegionsEnum.RFR_SEVERNAYA_OSETIYA,\n u\"Орловская область\": RFRegionsEnum.RFR_ORLOVSKAYA_REGION,\n u\"Республика Татарстан\": RFRegionsEnum.RFR_TATARSTAN,\n u\"Пензенская область\": RFRegionsEnum.RFR_PENZENSKAYA_REGION,\n u\"Республика Тыва (Тува)\": RFRegionsEnum.RFR_TYVA,\n u\"Пермский край\": RFRegionsEnum.RFR_PERMSKIY_KRAI,\n u\"Республика Удмуртия\": RFRegionsEnum.RFR_UDMURTIYA,\n u\"Псковская область\": RFRegionsEnum.RFR_PSKOVSKAYA_REGION,\n u\"Республика Хакасия\": RFRegionsEnum.RFR_HAKASIYA,\n u\"Ростовская область\": RFRegionsEnum.RFR_ROSTOVSKAYA_REGION,\n u\"Республика Чечня\": RFRegionsEnum.RFR_CHECHNYA,\n u\"Рязанская область\": RFRegionsEnum.RFR_RYAZANSKAYA_REGION,\n u\"Республика Чувашия\": RFRegionsEnum.RFR_CHUVASHIYA,\n u\"Самарская область\": RFRegionsEnum.RFR_SAMARSKAYA_REGION,\n u\"Саратовская область\": RFRegionsEnum.RFR_SARATOVSKAYA_REGION,\n u\"Краснодарский край\": RFRegionsEnum.RFR_KRASNODARSKIY_KRAI,\n u\"Сахалинская область\": RFRegionsEnum.RFR_SAHALINSKAYA_REGION,\n u\"Красноярский край\": RFRegionsEnum.RFR_KRASNOYARSKIY_KRAY,\n u\"Свердловская область\": RFRegionsEnum.RFR_SVERDLOVSKAYA_REGION,\n u\"Приморский край\": RFRegionsEnum.RFR_PRIMORSKIY_KRAI,\n u\"Смоленская область\": RFRegionsEnum.RFR_SMOLENSKAYA_REGION,\n u\"Ставропольский край\": RFRegionsEnum.RFR_STAVROPOLSKY_KRAI,\n u\"Тамбовская область\": RFRegionsEnum.RFR_TAMBOVSKAYA_REGION,\n u\"Хабаровский край\": RFRegionsEnum.RFR_HABAROVSKY_KRAI,\n u\"Тверская область\": RFRegionsEnum.RFR_TVERSKAYA_REGION,\n u\"Амурская область\": RFRegionsEnum.RFR_AMURSKAYA_REGION,\n u\"Томская область\": RFRegionsEnum.RFR_TOMSKAYA_REGION,\n u\"Архангельская область\": RFRegionsEnum.RFR_ARCHANGELSKAYA_REGION,\n u\"Тульская область\": RFRegionsEnum.RFR_TULSKAYA_REGION,\n u\"Астраханская область\": RFRegionsEnum.RFR_ASTRAHANSKAYA_REGION,\n u\"Тюменская область\": RFRegionsEnum.RFR_TUMENSKAYA_REGION,\n u\"Белгородская область\": RFRegionsEnum.RFR_BELGORODSKAYA_REGION,\n u\"Ульяновская область\": RFRegionsEnum.RFR_ULYANOVSKAYA_REGION,\n u\"Брянская область\": RFRegionsEnum.RFR_BRYANSKAYA_REGION,\n u\"Челябинская область\": RFRegionsEnum.RFR_CHELYABINSKAYA_REGION,\n u\"Владимирская область\": RFRegionsEnum.RFR_VLADIMIRSKAYA_REGION,\n u\"Забайкальский край\": RFRegionsEnum.RFR_ZABAIKALSKY_KRAI,\n u\"Волгоградская область\": RFRegionsEnum.RFR_VOLGOGRADSKAYA_REGION,\n u\"Вологодская область\": RFRegionsEnum.RFR_VOLOGODSKAYA_REGION,\n u\"Воронежская область\": RFRegionsEnum.RFR_VORONEZHSKAYA_REGION,\n u\"Ярославская область\": RFRegionsEnum.RFR_YAROSLAVSKAYA_REGION,\n u\"Ивановская область\": RFRegionsEnum.RFR_IVANOVSKAYA_REGION,\n u\"Москва\": RFRegionsEnum.RFR_MOSCOW,\n u\"Иркутская область\": RFRegionsEnum.RFR_IRKUTSKAYA_REGION,\n u\"Санкт-Петербург\": RFRegionsEnum.RFR_SPB,\n u\"Еврейская автономная область\": RFRegionsEnum.RFR_EVREISKAYA_AO,\n u\"Калининградская область\": RFRegionsEnum.RFR_KALININGRADSKAYA_REGION,\n u\"Ненецкий автономный округ\": RFRegionsEnum.RFR_NENETSKY_AO,\n u\"Калужская область\": RFRegionsEnum.RFR_KALUZHSKAYA_REGION,\n u\"Ханты-Мансийский автономный округ\": RFRegionsEnum.RFR_UGRA,\n u\"Камчатский край\": RFRegionsEnum.RFR_KAMCHATSKY_KRAI,\n u\"Чукотский автономный округ\": RFRegionsEnum.RFR_CHUKOTSKY_AO,\n u\"Кемеровская область\": RFRegionsEnum.RFR_KEMEROVSKAYA_REGION,\n u\"Ямало-Ненецкий автономный округ\": RFRegionsEnum.RFR_YAMALO_NENETSKY_AO,\n u\"Крым\": RFRegionsEnum.RFR_KRYM,\n u\"Севастополь\": RFRegionsEnum.RFR_SEVASTOPOL,\n }\n\n def __init__(self):\n pass\n\n @classmethod\n def get_location(cls, ip):\n logger = current_app.logger\n\n try:\n ip_parts = ip.split('.')\n ip_val = int(ip_parts[0]) * 256 * 256 * 256 + int(ip_parts[1]) * 256 * 256 + int(ip_parts[2]) * 256 + int(\n ip_parts[0])\n obj = GeoRanges.query.filter(GeoRanges.start.__le__(ip_val), GeoRanges.end.__ge__(ip_val)).scalar()\n if not obj:\n logger.info(u\"get location e1\")\n raise Exception()\n cid = obj.cid\n city = GeoCities.query.filter_by(cid=cid).scalar()\n if not city:\n logger.info(u\"get location e2\")\n raise Exception()\n return {\n 'region': cls.REGION_NAME_MAP.get(city.region, city.region)\n }\n\n except Exception:\n logger.info(u\"get location EE\")\n # return {'region' : None}\n pass # fall back to ipgeobase request\n\n try:\n response = requests.get('http://ipgeobase.ru:7020/geo?ip=%s&json=1' % ip, timeout=3)\n except Exception:\n return\n\n if response.status_code != 200:\n raise Exception(\"Failed to get geo info by ip. Response code: %s\" % unicode(response.status_code))\n try:\n data = json.loads(response.text)\n if ip in data:\n data = data[ip]\n if 'region' in data:\n data['region'] = cls.REGION_NAME_MAP.get(data['region'], data['region'])\n return data\n except Exception, ex:\n raise Exception(\"Failed to get geo info by ip. %s\" % unicode(ex))\n" }, { "alpha_fraction": 0.7255892157554626, "alphanum_fraction": 0.7289562225341797, "avg_line_length": 36.125, "blob_id": "38a38c7071257ccf8cedf11e3b7f6d2a84675a7b", "content_id": "b59b5b1239b5a85a3ed4745578eea4b73b26f877", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 594, "license_type": "no_license", "max_line_length": 107, "num_lines": 16, "path": "/app/services/russian_post/__init__.py", "repo_name": "StanislavKraev/jb_code", "src_encoding": "UTF-8", "text": "# -*- coding: utf-8 -*-\nimport os\nfrom services.russian_post.api import russian_post_bp\n\n\ndef register(app, jinja_env, class_loader, url_prefix=None):\n app.register_blueprint(russian_post_bp, url_prefix=url_prefix)\n\n #search_path = os.path.normpath(os.path.join(os.path.abspath(os.path.dirname(__file__)), u\"templates\"))\n #jinja_env.loader.loaders.append(jinja2.FileSystemLoader(search_path))\n\n class_loader.POSSIBLE_LOCATIONS.append('services.russian_post.db_models')\n\n\ndef get_manager_command_locations():\n return [os.path.normpath(os.path.abspath(os.path.dirname(__file__)))]\n" }, { "alpha_fraction": 0.5370992422103882, "alphanum_fraction": 0.5447328090667725, "avg_line_length": 37.988094329833984, "blob_id": "e826f8235a8fd3effb97cdc5462b636fbd37ebc5", "content_id": "1fa6d127c73ed8838d9f1975c526ca62554fcda5", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 3275, "license_type": "no_license", "max_line_length": 252, "num_lines": 84, "path": "/app/services/russian_post/integration.py", "repo_name": "StanislavKraev/jb_code", "src_encoding": "UTF-8", "text": "# -*- coding: utf-8 -*-\n\nfrom datetime import datetime\nimport requests\nfrom lxml import etree, objectify\n\n\ndef get_current_mail_status(bar_code, login, password):\n url = u'http://tracking.russianpost.ru/rtm34?wsdl'\n headers = {\n u\"Accept-Encoding\": u\"gzip,deflate\",\n u\"Content-Type\": u\"application/soap+xml;charset=UTF-8\",\n u\"User-Agent\": u\"Apache-HttpClient/4.1.1 (java 1.5)\"\n }\n\n data = u\"\"\"<soap:Envelope xmlns:soap=\"http://www.w3.org/2003/05/soap-envelope\" xmlns:oper=\"http://russianpost.org/operationhistory\" xmlns:data=\"http://russianpost.org/operationhistory/data\" xmlns:soapenv=\"http://schemas.xmlsoap.org/soap/envelope/\">\n <soap:Header/>\n <soap:Body>\n <oper:getOperationHistory>\n <!--Optional:-->\n <data:OperationHistoryRequest>\n <data:Barcode>%s</data:Barcode>\n <data:MessageType>0</data:MessageType>\n <!--Optional:-->\n <data:Language>RUS</data:Language>\n </data:OperationHistoryRequest>\n <!--Optional:-->\n <data:AuthorizationHeader soapenv:mustUnderstand=\"1\">\n <data:login>%s</data:login>\n <data:password>%s</data:password>\n </data:AuthorizationHeader>\n </oper:getOperationHistory>\n </soap:Body>\n </soap:Envelope>\"\"\" % (bar_code, login, password)\n\n response = requests.post(url, data=data, headers=headers)\n if response.status_code != 200:\n return\n\n last_status = {}\n root = etree.fromstring(response.content)\n for elem in root.getiterator():\n if not hasattr(elem.tag, 'find'):\n continue\n i = elem.tag.find('}')\n if i >= 0:\n elem.tag = elem.tag[i+1:]\n objectify.deannotate(root, cleanup_namespaces=True)\n tags = root.xpath('//OperationHistoryData/historyRecord')\n for tag in tags:\n oper_type_id = None\n oper_type_descr = None\n date_val = None\n address_descr = None\n\n oper_tags = tag.xpath('./OperationParameters/OperType/Id')\n for otag in oper_tags:\n oper_type_id = otag.text\n break\n\n oper_tags = tag.xpath('./OperationParameters/OperType/Name')\n for otag in oper_tags:\n oper_type_descr = otag.text\n break\n\n operdate_tags = tag.xpath('./OperationParameters/OperDate')\n for otag in operdate_tags:\n date_val = datetime.strptime(otag.text[:19], \"%Y-%m-%dT%H:%M:%S\")\n break\n\n address_tags = tag.xpath('./AddressParameters/OperationAddress/Description')\n for atag in address_tags:\n address_descr = atag.text\n break\n\n if oper_type_id is not None and oper_type_descr is not None and date_val is not None and address_tags is not None:\n last_status = {\n 'operation': oper_type_id,\n 'op_name': oper_type_descr,\n 'dt': date_val,\n 'address': address_descr\n }\n\n return last_status or None\n" }, { "alpha_fraction": 0.6360656023025513, "alphanum_fraction": 0.6393442749977112, "avg_line_length": 24.25, "blob_id": "3fe61605e075bcb735a5d75706c33dfca607fe8b", "content_id": "9276adab51c1f63b12ee561f00df8bdefde3396a", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 305, "license_type": "no_license", "max_line_length": 56, "num_lines": 12, "path": "/app/services/test_svc/documents/ext_validators.py", "repo_name": "StanislavKraev/jb_code", "src_encoding": "UTF-8", "text": "# -*- coding: utf-8 -*-\nfrom custom_exceptions import InvalidFieldValueException\n\n\ndef _push_error(field_name, code):\n next_exc = InvalidFieldValueException(\"\")\n next_data = {\n \"field\": field_name,\n \"error_code\": code\n }\n next_exc.ext_data.append(next_data)\n raise next_exc\n\n\n" }, { "alpha_fraction": 0.6529680490493774, "alphanum_fraction": 0.6560121774673462, "avg_line_length": 30.285715103149414, "blob_id": "ec61f988e7e47d9a287198375a1763d8448d3742", "content_id": "3a81bf89f008d9e3192c349342c01bf9810df52a", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 657, "license_type": "no_license", "max_line_length": 87, "num_lines": 21, "path": "/app/fw/utils/time_utils.py", "repo_name": "StanislavKraev/jb_code", "src_encoding": "UTF-8", "text": "# -*- coding: utf-8 -*-\nfrom copy import copy\nfrom datetime import datetime, timedelta\nimport pytz\n\n\ndef calc_fixed_time_not_earlier(start_dt, desired_time_str, timeout_td, timezone_name):\n eta = copy(start_dt)\n eta = eta.replace(tzinfo=pytz.utc)\n eta = datetime.astimezone(eta, pytz.timezone(timezone_name))\n eta += timeout_td\n\n if desired_time_str:\n desired_time = datetime.strptime(desired_time_str, \"%H:%M\")\n dt = eta.replace(hour=desired_time.hour, minute=desired_time.minute)\n if dt < eta:\n dt += timedelta(days=1)\n eta = dt\n eta = eta.astimezone(pytz.utc).replace(tzinfo=None)\n\n return eta\n" }, { "alpha_fraction": 0.6536885499954224, "alphanum_fraction": 0.6567623019218445, "avg_line_length": 26.11111068725586, "blob_id": "86e74133f1990c3e0066de16b289cbb694f87388", "content_id": "940ee4a01ae1f0ef2d47abeb70ad64dc2880fe93", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 976, "license_type": "no_license", "max_line_length": 69, "num_lines": 36, "path": "/app/services/ip_reg/documents/ip_validators.py", "repo_name": "StanislavKraev/jb_code", "src_encoding": "UTF-8", "text": "# -*- coding: utf-8 -*-\nfrom flask.globals import current_app\nfrom custom_exceptions import InvalidFieldValueException\nfrom fw.catalogs.models import OkvadObject\n\n\ndef _push_error(field_name, code):\n next_exc = InvalidFieldValueException(\"\")\n next_data = {\n \"field\": field_name,\n \"error_code\": code\n }\n next_exc.ext_data.append(next_data)\n raise next_exc\n\n\ndef ip_eshn_tax_type(eshn_doc):\n eshn_doc = eshn_doc.value\n job_main_code = eshn_doc['job_main_code'].db_value()\n\n okvad = OkvadObject.query.filter_by(okved=job_main_code).scalar()\n if not okvad or okvad.nalog != 'eshn':\n _push_error(\"taxation_type\", 5)\n\n return True\n\n\ndef ip_usn_tax_type(usn_doc):\n usn_doc = usn_doc.value\n job_main_code = usn_doc['job_main_code'].db_value()\n\n okvad = OkvadObject.query.filter_by(okved=job_main_code).scalar()\n if not okvad or okvad.nalog not in ('usn', 'eshn'):\n _push_error(\"taxation_type\", 5)\n\n return True\n" }, { "alpha_fraction": 0.7286282181739807, "alphanum_fraction": 0.7296222448348999, "avg_line_length": 34.89285659790039, "blob_id": "be1b5c6bfcd8aef285c95d63573ea8a7f6a0a96e", "content_id": "7b2601e826cf51197fe73bc5c757f631832518b5", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 1006, "license_type": "no_license", "max_line_length": 110, "num_lines": 28, "path": "/app/services/ifns/data_model/fields.py", "repo_name": "StanislavKraev/jb_code", "src_encoding": "UTF-8", "text": "# -*- coding: utf-8 -*-\nfrom fw.documents.fields.doc_fields import BaseDocField\nfrom fw.documents.fields.general_doc_fields import general_doc_field\nfrom fw.documents.fields.simple_doc_fields import DocMongoIdField, DocBoolField, DocEnumField, DocTextField, \\\n DocDateTimeField, DocPhoneNumberField, DocJsonField\n\n\n@general_doc_field\nclass IfnsBooking(BaseDocField):\n\n batch_id = DocTextField(is_service=True, required=False)\n _discarded = DocBoolField(is_service=True, required=False)\n service_id = DocEnumField(enum_cls=\"IfnsServiceEnum\", required=True)\n\n id = DocMongoIdField(is_parse_from_api=False)\n\n ifns = DocTextField()\n service = DocTextField()\n date = DocDateTimeField()\n window = DocTextField()\n address = DocTextField()\n phone = DocPhoneNumberField()\n how_to_get = DocTextField()\n code = DocTextField(required=False) # from appointment url\n\n reg_info = DocJsonField(is_parse_from_api=False, required=False)\n\n __api_to_db_mapping__ = {'id': '_id'}\n\n" }, { "alpha_fraction": 0.6044170260429382, "alphanum_fraction": 0.6253653764724731, "avg_line_length": 32.2864875793457, "blob_id": "d1108eed4f4c05642ed5a036f9dbae7ffb907c9e", "content_id": "1b42018858ad76abb33258f4e85f1631b1243b97", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 6158, "license_type": "no_license", "max_line_length": 91, "num_lines": 185, "path": "/app/fw/auth/encrypt.py", "repo_name": "StanislavKraev/jb_code", "src_encoding": "UTF-8", "text": "# -*- coding: utf-8 -*-\n\n# Use the system PRNG if possible\nimport base64\nfrom datetime import datetime\nfrom decimal import Decimal\nimport hashlib\nimport hmac\nimport os\nimport struct\nimport operator\nimport binascii\nimport time\n\nimport random\n\ntry:\n random = random.SystemRandom()\n using_sysrandom = True\nexcept NotImplementedError:\n import warnings\n\n warnings.warn('A secure pseudo-random number generator is not available '\n 'on your system. Falling back to Mersenne Twister.')\n using_sysrandom = False\n\nSECRET_KEY = os.environ.get(\"LO_PLATFORM_SECRET_KEY\") or \"drn2_k9k4)ow3h()21(IU1(dn=4d66h@54dw^*#t8)1ypm_$zg(7@45\"\n\n\ndef is_protected_type(obj):\n \"\"\"Determine if the object instance is of a protected type.\n\n Objects of protected types are preserved as-is when passed to\n force_text(strings_only=True).\n \"\"\"\n return isinstance(obj, (int, long) + (type(None), float, Decimal,\n datetime.datetime, datetime.date, datetime.time))\n\n\ndef force_bytes(s, encoding='utf-8', strings_only=False, errors='strict'):\n # Handle the common case first for performance reasons.\n if isinstance(s, bytes):\n if encoding == 'utf-8':\n return s\n else:\n return s.decode('utf-8', errors).encode(encoding, errors)\n if strings_only and is_protected_type(s):\n return s\n if not isinstance(s, basestring):\n try:\n return bytes(s)\n except UnicodeEncodeError:\n if isinstance(s, Exception):\n # An Exception subclass containing non-ASCII data that doesn't\n # know how to print itself properly. We shouldn't raise a\n # further exception.\n return b' '.join([force_bytes(arg, encoding, strings_only,\n errors) for arg in s])\n return unicode(s).encode(encoding, errors)\n else:\n return s.encode(encoding, errors)\n\n\ndef _fast_hmac(key, msg, digest):\n \"\"\"\n A trimmed down version of Python's HMAC implementation.\n\n This function operates on bytes.\n \"\"\"\n dig1, dig2 = digest(), digest()\n if len(key) != dig1.block_size:\n raise ValueError('Key size needs to match the block_size of the digest.')\n dig1.update(key.translate(hmac.trans_36))\n dig1.update(msg)\n dig2.update(key.translate(hmac.trans_5C))\n dig2.update(dig1.digest())\n return dig2\n\n\ndef _bin_to_long(x):\n \"\"\"\n Convert a binary string into a long integer\n\n This is a clever optimization for fast xor vector math\n \"\"\"\n return int(binascii.hexlify(x), 16)\n\n\ndef _long_to_bin(x, hex_format_string):\n \"\"\"\n Convert a long integer into a binary string.\n hex_format_string is like \"%020x\" for padding 10 characters.\n \"\"\"\n return binascii.unhexlify((hex_format_string % x).encode('ascii'))\n\n\ndef pbkdf2(password, salt, iterations, dklen=0, digest=None):\n \"\"\"\n Implements PBKDF2 as defined in RFC 2898, section 5.2\n\n HMAC+SHA256 is used as the default pseudo random function.\n\n As of 2011, 10,000 iterations was the recommended default which\n took 100ms on a 2.2Ghz Core 2 Duo. This is probably the bare\n minimum for security given 1000 iterations was recommended in\n 2001. This code is very well optimized for CPython and is only\n four times slower than openssl's implementation. Look in\n django.contrib.auth.hashers for the present default.\n \"\"\"\n assert iterations > 0\n if not digest:\n digest = hashlib.sha256\n password = force_bytes(password)\n salt = force_bytes(salt)\n hlen = digest().digest_size\n if not dklen:\n dklen = hlen\n if dklen > (2 ** 32 - 1) * hlen:\n raise OverflowError('dklen too big')\n l = -(-dklen // hlen)\n r = dklen - (l - 1) * hlen\n\n hex_format_string = \"%%0%ix\" % (hlen * 2)\n\n inner_digest_size = digest().block_size\n if len(password) > inner_digest_size:\n password = digest(password).digest()\n password += b'\\x00' * (inner_digest_size - len(password))\n\n def F(i):\n def U():\n u = salt + struct.pack(b'>I', i)\n for j in xrange(int(iterations)):\n u = _fast_hmac(password, u, digest).digest()\n yield _bin_to_long(u)\n\n return _long_to_bin(reduce(operator.xor, U()), hex_format_string)\n\n T = [F(x) for x in range(1, l + 1)]\n return b''.join(T[:-1]) + T[-1][:r]\n\n\ndef get_random_string(length=12,\n allowed_chars='abcdefghijklmnopqrstuvwxyz'\n 'ABCDEFGHIJKLMNOPQRSTUVWXYZ0123456789'):\n \"\"\"\n Returns a securely generated random string.\n\n The default length of 12 with the a-z, A-Z, 0-9 character set returns\n a 71-bit value. log_2((26+26+10)^12) =~ 71 bits\n \"\"\"\n if not using_sysrandom:\n # This is ugly, and a hack, but it makes things better than\n # the alternative of predictability. This re-seeds the PRNG\n # using a value that is hard for an attacker to predict, every\n # time a random string is required. This may change the\n # properties of the chosen random sequence slightly, but this\n # is better than absolute predictability.\n random.seed(\n hashlib.sha256(\n (\"%s%s%s\" % (\n random.getstate(),\n time.time(),\n SECRET_KEY)).encode('utf-8')\n ).digest())\n return ''.join(random.choice(allowed_chars) for i in range(length))\n\n\ndef encrypt_password(password, salt=None):\n salt = salt or get_random_string()\n iterations = 12000\n algorithm = \"pbkdf2_sha256\"\n hash = pbkdf2(password, salt, iterations, digest=hashlib.sha256)\n # noinspection PyTypeChecker\n hash = base64.b64encode(hash).decode('ascii').strip()\n return \"%s$%d$%s$%s\" % (algorithm, iterations, salt, hash)\n\n\ndef check_password(plain_password, encrypted_password):\n if '$' not in encrypted_password or len(encrypted_password.split('$')) != 4:\n return False\n\n algorithm, iterations, salt, hash = encrypted_password.split('$', 3)\n check_pwd = encrypt_password(plain_password, salt)\n return check_pwd == encrypted_password\n" }, { "alpha_fraction": 0.5572360754013062, "alphanum_fraction": 0.5646500587463379, "avg_line_length": 45.83333206176758, "blob_id": "c80f87eef35675d2146fdd075bf040caa0c2c9c9", "content_id": "fcaa1c7b0326a5bec4938963dc7a22868bbbff6c", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 3372, "license_type": "no_license", "max_line_length": 146, "num_lines": 72, "path": "/app/manage_commands/test_commands.py", "repo_name": "StanislavKraev/jb_code", "src_encoding": "UTF-8", "text": "# -*- coding: utf-8 -*-\n# noinspection PyUnresolvedReferences\nfrom email.MIMEBase import MIMEBase\nfrom email.header import Header\nimport smtplib\n\nfrom email.mime.multipart import MIMEMultipart\nfrom email.mime.text import MIMEText\nfrom fw.documents.db_fields import BatchDocumentDbObject\nfrom fw.documents.enums import BatchStatusEnum\nfrom fw.transport.mail import Mailer\n\nfrom manage_commands import BaseManageCommand\n\nclass CheckThatDocRecTypeFieldResidesInTheOnlyOneFounderInP11001DocumentOfBatch(BaseManageCommand):\n NAME = \"check_single_rcf\"\n\n def push_error(self, batch_id):\n self.logger.error(u\"Batch %s has more than one documents_recipient_type mark in founders\" % batch_id)\n\n def run(self):\n self.logger.info(u\"Test that documents recipient type is in the only one founder\")\n self.logger.info(u'=' * 50)\n\n invalid_batches = []\n batch_id_set = set()\n for doc in BatchDocumentDbObject.query.filter(BatchDocumentDbObject.batch.status.in_([BatchStatusEnum.BS_FINALISED,\n BatchStatusEnum.BS_EDITED,\n BatchStatusEnum.BS_NEW]),\n BatchDocumentDbObject.document_type == \"P11001\"):\n batch_id = doc.batch.id\n if batch_id not in batch_id_set:\n batch_id_set.add(batch_id)\n if not doc.data:\n continue\n\n founders = doc.data.get('founders', [])\n if not founders:\n continue\n\n mark = False\n for founder in founders:\n if founder and 'documents_recipient_type' in founder:\n if mark:\n self.push_error(batch_id)\n invalid_batches.append(batch_id)\n mark = True\n\n if invalid_batches:\n message = \"Warning! Following batches has invalid founders (multiple documents_recipient_type marks): %s\" % ', '.join(invalid_batches)\n\n mailer = Mailer(self.config['mailer_server'], self.config['mailer_smtp_user'], self.config['mailer_smtp_password'])\n for addr in ('[email protected]', '[email protected]'):\n msg = MIMEMultipart()\n msg['To'] = addr\n msg['From'] = self.config['mailer_smtp_user']\n msg['Date'] = smtplib.email.Utils.formatdate(localtime = 1)\n msg['Subject'] = Header(\"Warning! Some batches has invalid founders\", 'utf-8')\n\n msg_internal = MIMEMultipart('alternative')\n\n # Record the MIME types of both parts - text/plain and text/html.\n part1 = MIMEText(message, 'plain', 'utf-8')\n part2 = MIMEText(\"<html><body>%s</body></html>\" % message, 'html', 'utf-8')\n # Attach parts into message container.\n # According to RFC 2046, the last part of a multipart message, in this case\n # the HTML message, is best and preferred.\n msg_internal.attach(part1)\n msg_internal.attach(part2)\n msg.attach(msg_internal)\n\n mailer.send_email(self.config['mailer_smtp_user'], addr, msg.as_string())\n" }, { "alpha_fraction": 0.39649179577827454, "alphanum_fraction": 0.4268903434276581, "avg_line_length": 43.676116943359375, "blob_id": "a1a74960b85abc15a04e6b03f8ed73acc6b33731", "content_id": "6f28452bcc79028d1407806d439415230c0bfcb6", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 151459, "license_type": "no_license", "max_line_length": 175, "num_lines": 3245, "path": "/jb_tests/test_pack/test_rendering_third_stage.py", "repo_name": "StanislavKraev/jb_code", "src_encoding": "UTF-8", "text": "# -*- coding: utf-8 -*-\nfrom datetime import timedelta, datetime\n\nfrom bson.objectid import ObjectId\nfrom flask import json\n\nfrom fw.db.sql_base import db as sqldb\nfrom fw.documents.address_enums import RFRegionsEnum, VillageTypeEnum\nfrom fw.documents.address_enums import DistrictTypeEnum\nfrom fw.documents.address_enums import CityTypeEnum\nfrom fw.documents.address_enums import StreetTypeEnum\nfrom fw.documents.address_enums import HouseTypeEnum\nfrom fw.documents.address_enums import BuildingTypeEnum\nfrom fw.documents.address_enums import FlatTypeEnum\nfrom fw.documents.db_fields import PrivatePersonDbObject, CompanyDbObject, DocumentBatchDbObject, BatchDocumentDbObject\nfrom fw.documents.doc_requisites_storage import DocRequisitiesStorage\nfrom fw.documents.enums import DocumentBatchTypeEnum, DocumentTypeEnum, PersonDocumentTypeEnum, BatchStatusEnum, \\\n CurrencyEnum\nfrom fw.documents.fields.doc_fields import DocumentBatch\nfrom services.ifns.data_model.models import IfnsBookingObject\nfrom test_api import authorized\nfrom test_pack.base_batch_test import BaseBatchTestCase\n\n\nclass RenderingTestCase(BaseBatchTestCase):\n @authorized()\n def test_general_manager_contract(self):\n DocRequisitiesStorage._BATCH_DESCRIPTORS[DocumentBatchTypeEnum.DBT_NEW_LLC]['doc_types'] = [DocumentTypeEnum.DT_GENERAL_MANAGER_CONTRACT, DocumentTypeEnum.DT_ARTICLES]\n\n founder = PrivatePersonDbObject(**{\n \"_owner\": self.user,\n \"name\": u\"Прокл\",\n \"surname\": u\"Поликарпов\",\n \"patronymic\": u\"Поликарпович\",\n \"inn\": \"781108730780\",\n \"sex\": \"male\",\n \"birthdate\": datetime.now() - timedelta(days=365 * 30),\n \"birthplace\": u\"Россия, деревня Гадюкино\",\n \"passport\": {\n \"document_type\": PersonDocumentTypeEnum.PD_INTERNAL,\n \"series\": u\"1123\",\n \"number\": u\"192837\",\n \"issue_date\": datetime.now(),\n \"issue_depart\": u\"УМВД Гадюкинского района Гадюкинской области\",\n \"depart_code\": u\"111987\"\n },\n \"ogrnip\": \"123456789012345\",\n \"address\": {\n \"region\": RFRegionsEnum.RFR_SPB,\n \"index\": 198209,\n \"district_type\": DistrictTypeEnum.DIT_DISTRICT,\n \"district\": u\"Гатчинский\",\n \"city_type\": CityTypeEnum.CIT_CITY,\n \"city\": u\"Гадюкино\",\n \"village_type\": VillageTypeEnum.VIT_HUTOR,\n \"village\": u\"близ Диканьки\",\n \"street_type\": StreetTypeEnum.STT_BOULEVARD,\n \"street\": u\"Мотоциклистов\",\n \"house_type\": HouseTypeEnum.HOT_HOUSE,\n \"house\": \"4\",\n \"building_type\": BuildingTypeEnum.BIT_HOUSING,\n \"building\": \"2\",\n \"flat_type\": FlatTypeEnum.FLT_OFFICE,\n \"flat\": \"705\",\n },\n \"caption\": u\"Сантехник\",\n \"phone\": \"+79210001122\",\n \"email\": \"[email protected]\",\n \"living_country_code\": 3,\n \"living_address\": u\"г. Санкт-Петербург, д. Гадюкино, бульвар Мотоциклистов казарма 4, кв. 705\"\n })\n sqldb.session.add(founder)\n sqldb.session.commit()\n\n founder_otvetstvennyi = PrivatePersonDbObject(**{\n \"_owner\": self.user,\n \"name\": u\"Семен\",\n \"surname\": u\"Семенчук\",\n \"patronymic\": u\"Семейкин\",\n \"inn\": \"781108730780\",\n \"sex\": \"male\",\n \"birthdate\": datetime.now() - timedelta(days=365 * 30),\n \"birthplace\": u\"Россия, деревня Гадюкино\",\n \"passport\": {\n \"document_type\": PersonDocumentTypeEnum.PD_INTERNAL,\n \"series\": u\"1123\",\n \"number\": u\"192837\",\n \"issue_date\": datetime.now(),\n \"issue_depart\": u\"УМВД Гадюкинского района Гадюкинской области\",\n \"depart_code\": u\"111987\"\n },\n \"ogrnip\": \"123456789012345\",\n \"address\": {\n \"region\": RFRegionsEnum.RFR_SPB,\n \"index\": 198209,\n \"district_type\": DistrictTypeEnum.DIT_DISTRICT,\n \"district\": u\"Пушкинский\",\n \"city_type\": CityTypeEnum.CIT_CITY,\n \"city\": u\"Гадюкино\",\n \"village_type\": VillageTypeEnum.VIT_HUTOR,\n \"village\": u\"близ Диканьки\",\n \"street_type\": StreetTypeEnum.STT_BOULEVARD,\n \"street\": u\"Мотоциклистов\",\n \"house_type\": HouseTypeEnum.HOT_HOUSE,\n \"house\": \"4\",\n \"building_type\": BuildingTypeEnum.BIT_HOUSING,\n \"building\": \"2\",\n \"flat_type\": FlatTypeEnum.FLT_OFFICE,\n \"flat\": \"705\",\n },\n \"caption\": u\"Сантехник\",\n \"phone\": \"+79210001122\",\n \"email\": \"[email protected]\",\n \"living_country_code\": 3,\n \"living_address\": u\"г. Санкт-Петербург, д. Гадюкино, бульвар Мотоциклистов казарма 4, кв. 705\"\n })\n sqldb.session.add(founder_otvetstvennyi)\n sqldb.session.commit()\n\n company_founder = CompanyDbObject(**{\n \"_owner\": self.user,\n \"ogrn\": \"1234567890123\",\n \"inn\": \"781108730780\",\n \"kpp\": \"999999999\",\n \"general_manager_caption\": u\"генеральный директор\",\n \"full_name\": u\"Том и Джери\",\n \"short_name\": u\"ТиД\",\n \"general_manager\": {\n \"_id\": founder.id,\n \"type\": \"person\"\n },\n \"address\": {\n \"region\": RFRegionsEnum.RFR_SPB,\n \"index\": 123131,\n \"street_type\": StreetTypeEnum.STT_STREET,\n \"street\": u\"Седова\",\n \"house_type\": HouseTypeEnum.HOT_HOUSE,\n \"house\": \"2\",\n \"flat_type\": FlatTypeEnum.FLT_OFFICE,\n \"flat\": \"2\",\n },\n \"phone\": \"+7(812)1234567\"\n })\n sqldb.session.add(company_founder)\n sqldb.session.commit()\n\n doc_data = {\n u\"full_name\": u\"Питер-сервис\",\n u\"short_name\": u\"Питер-сервис\",\n u\"inn\": \"781108730780\",\n u\"kpp\": \"999999999\",\n u\"general_manager_term\": 38,\n u\"has_general_manager_contract\": True,\n u\"general_manager_caption\": u\"повелитель\",\n u\"general_manager\": {\n \"_id\": founder_otvetstvennyi.id,\n \"type\": \"person\"\n },\n u\"founders\": [\n {\n \"founder\": {\n \"_id\": founder_otvetstvennyi.id,\n \"type\": \"person\"\n },\n \"nominal_capital\": 12312.22,\n \"share\": 100\n }\n ],\n u\"general_manager_salary\": {\n \"currency\": CurrencyEnum.CE_RUS_RUBLES,\n \"value\": \"123123.00\"\n },\n u\"general_manager_fixed_working_hours\": True,\n u\"general_manager_working_hours\": {\n u\"start_working_hours\": datetime(1900, 1, 1, hour=8),\n u\"finish_working_hours\": datetime(1900, 1, 1, hour=16, minute=10),\n u\"holidays\": [\"sun\", \"sat\"],\n u\"lunch_time\": 10\n },\n u\"general_manager_trial_period\": 24,\n u\"general_manager_quit_notify_period\": 12,\n u\"general_manager_contract_number\": \"2\",\n u\"selected_moderator\": {\n \"type\": \"company\",\n \"_id\": company_founder.id\n },\n u\"general_manager_salary_days\": [10, 25],\n u\"address\": {\n \"district_type\": u\"р-н\",\n \"city_type\": u\"г\",\n \"street_type\": u\"ул\",\n \"index\": 191186,\n \"house\": u\"4\",\n \"region\": u\"Санкт-Петербург\",\n \"flat\": u\"12\",\n \"building_type\": u\"к\",\n \"street\": u\"Большая Морская\",\n \"address_string\": u\"г Санкт-Петербург, ул Большая Морская, д 4, кв 12\",\n \"flat_type\": u\"кв\",\n \"house_type\": u\"д\",\n \"village_type\": u\"п\",\n \"ifns\": 7841\n },\n u\"address_type\": \"general_manager_registration_address\",\n u\"general_manager_contract_date\": datetime.now(),\n u\"general_manager_has_special_terms\": False,\n # u\"general_manager_contract_additional_terms\" : {\n # u\"rights\" : [\"Право 1\", \"Право 2\"],\n # u\"responsibility\" : [\"Респ 1\", \"Респ 2\"],\n # u\"duties\" : [u\"колоть дрова\", \"к2\"]\n # },\n u\"board_of_directors\": True,\n u\"registration_date\": datetime.now()\n }\n\n with self.app.app_context():\n batch = DocumentBatchDbObject(\n batch_type=DocumentBatchTypeEnum.DBT_NEW_LLC,\n status=BatchStatusEnum.BS_FINALISED,\n paid=True,\n data={\n \"full_name\": u\"Пни и Кочки\"\n },\n result_fields={\n 'ifns_reg_info': {\n 'ogrn': \"1234567890123\"\n }\n },\n _owner=self.user\n )\n sqldb.session.add(batch)\n\n doc = BatchDocumentDbObject(\n _owner=self.user,\n document_type=\"articles\",\n batch=batch,\n data={\n \"job_main_code\": \"50.20\",\n \"use_national_language_company_name\": False,\n \"use_foreign_company_name\": False,\n \"pravo_otchuzhdeniya_type\": 3,\n \"general_manager_term\": 36,\n \"short_name\": u\"Питер-сервис\",\n \"preimusch_pravo_priobreteniya_doli_time_span\": 30,\n \"address\": {\n \"building\": \"20\",\n \"index\": 117105,\n \"street_type\": \"ш\",\n \"house\": \"12Г\",\n \"region\": \"Москва\",\n \"okato\": 12122222222,\n \"address_string\": \"г Москва, Варшавское шоссе, д 12Г стр 20\",\n \"building_type\": \"стр\",\n \"street\": \"Варшавское\",\n \"house_type\": \"д\",\n \"ifns\": 7726\n },\n \"perehod_doli_k_naslednikam_soglasie\": True,\n \"necessary_votes_for_general_meeting_decisions\": {\n \"audit_assignment\": 1,\n \"large_deals_approval\": 1,\n \"concern_deals_approval\": 1,\n \"executives_formation\": 1,\n \"other_issues\": 1,\n \"articles_change\": 1,\n \"branch_establishment\": 1,\n \"profit_distribution\": 1,\n \"annual_reports_approval\": 1,\n \"liquidation_committee_assignment\": 1,\n \"auditor_election\": 1,\n \"obligations_emission\": 1,\n \"reorganization_or_liquidation\": 1,\n \"internal_documents_approval\": 1,\n \"company_strategy\": 1\n },\n \"starter_capital\": {\n \"capital_type\": 1,\n \"value\": {\n \"currency\": \"RUB\",\n \"value\": \"10000\"\n }\n },\n \"general_manager_caption\": \"Генеральный директор\",\n \"doc_date\": datetime.now(),\n \"full_name\": u\"Питер-сервис\",\n \"job_code_array\": [\n \"52.48.39\",\n \"50.30\",\n \"50.40\",\n \"51.44.4\"\n ],\n \"board_of_directors\": False,\n \"founders_count\": 2,\n \"board_of_directors_caption\": \"Совет директоров\"\n }\n )\n sqldb.session.add(doc)\n sqldb.session.commit()\n _id = batch.id\n\n booking = IfnsBookingObject(reg_info={\n 'status': 'registered',\n 'reg_date': datetime.now(),\n 'ogrn': \"123456789012345\"\n }, batch_id=_id)\n sqldb.session.add(booking)\n\n # result = self.test_client.post('/batch/update/', data={\n # 'batch_id': _id,\n # 'batch': batch_json\n # })\n # self.assertEqual(result.status_code, 200)\n\n sqldb.session.commit()\n db_batch = DocumentBatchDbObject.query.filter_by(id=_id).first()\n doc_data = {\n \"inn\": u\"7811554010\", u\"kpp\": \n u\"781101001\",\n u\"address\": {\"qc\": u\"0\", u\"city\": u\"Петергоф\", u\"flat\": u\"15\", u\"ifns\": u\"7819\", u\"house\": u\"3\",\n u\"index\": 198510, u\"okato\": u\"40290501000\", u\"region\": u\"Санкт-Петербург\",\n u\"street\": u\"Аврова\", u\"city_type\": u\"г\", u\"flat_type\": u\"кв\", u\"house_type\": u\"д\",\n u\"qc_complete\": u\"10\", u\"street_type\": u\"пл\",\n u\"address_string\": u\"г Санкт-Петербург, г Петергоф, пл Аврова, д 3, кв 15\",\n u\"long_form_mode\": False}, \n u\"founders\": [{\n u\"share\": u\"100\", \n u\"founder\": {\"_id\": founder_otvetstvennyi.id, u\"type\": u\"person\"},\n u\"nominal_capital\": {\"value\": u\"10000\", u\"currency\": u\"RUB\"}\n }], \n u\"tax_type\": 2, u\"full_name\": u\"Питер-сервис\",\n u\"obtain_way\": u\"mail\", u\"share_type\": u\"percent\", u\"short_name\": u\"Питер-сервис\",\n u\"address_type\": u\"office_address\", u\"job_main_code\": u\"05.01\", u\"taxation_type\": u\"usn\",\n u\"address_person\": {\"_id\": founder_otvetstvennyi.id, u\"type\": u\"person\"},\n u\"job_code_array\": [\"15.20\", u\"15.41.1\", u\"51.38.1\", u\"52.23\"],\n u\"general_manager\": {\"_id\": founder_otvetstvennyi.id, u\"type\": u\"person\"},\n u\"starter_capital\": {\"value\": u\"10000\", u\"currency\": u\"RUB\"}, u\"registration_way\": u\"notary\",\n u\"registration_date\": datetime.now(),\n u\"board_of_directors\": False, u\"address_other_owner\": False, u\"general_manager_term\": 36,\n u\"general_manager_salary\": {\"value\": u\"12\", u\"currency\": u\"RUB\"},\n u\"general_manager_caption\": u\"Генеральный директор\", u\"pravo_otchuzhdeniya_type\": 1,\n u\"use_foreign_company_name\": False, u\"has_general_manager_order\": True,\n u\"board_of_directors_caption\": u\"Совет директоров\", u\"general_manager_salary_days\": [10],\n u\"general_manager_order_number\": u\"01\", u\"general_manager_trial_period\": 6,\n u\"has_general_manager_contract\": True,\n u\"general_manager_contract_date\": datetime.now(),\n u\"general_manager_contract_number\": u\"01\", u\"general_manager_quit_notify_period\": 1,\n u\"use_national_language_company_name\": False, u\"general_manager_fixed_working_hours\": False,\n u\"perehod_doli_k_naslednikam_soglasie\": True,\n u\"preimusch_pravo_priobreteniya_doli_time_span\": 30,\n u\"necessary_votes_for_general_meeting_decisions\": {\"other_issues\": 1, u\"articles_change\": 1,\n u\"audit_assignment\": 1,\n u\"auditor_election\": 1,\n u\"company_strategy\": 1,\n u\"profit_distribution\": 1,\n u\"branch_establishment\": 1,\n u\"executives_formation\": 1,\n u\"large_deals_approval\": 1,\n u\"obligations_emission\": 1,\n u\"concern_deals_approval\": 1,\n u\"annual_reports_approval\": 1,\n u\"internal_documents_approval\": 1,\n u\"reorganization_or_liquidation\": 1,\n u\"liquidation_committee_assignment\": 1}}\n db_batch.result_fields = {\"ifns\": u\"7819\", u\"ifns_reg_info\": {\"ogrn\": u\"1157800121990\", u\"status\": u\"registered\",\n u\"reg_date\": datetime.now(),\n u\"full_name\": u\"Питер-Сервис\"},\n u\"first_work_day\": u\"2013-06-17\",\n u\"founder_applicant\": u\"%s_person\" % founder_otvetstvennyi.id,\n u\"registration_address\": {\"qc\": u\"0\", u\"city\": u\"Петергоф\", u\"flat\": u\"15\",\n u\"ifns\": u\"7819\", u\"house\": u\"3\", u\"index\": u\"198510\",\n u\"okato\": u\"40290501000\", u\"region\": u\"Санкт-Петербург\",\n u\"street\": u\"Аврова\", u\"city_type\": u\"г\", u\"flat_type\": u\"кв\",\n u\"house_type\": u\"д\", u\"qc_complete\": u\"10\",\n u\"street_type\": u\"пл\",\n u\"address_string\": u\"г Санкт-Петербург, г Петергоф, пл Аврова, д 3, кв 15\",\n u\"long_form_mode\": u\"False\"},\n u\"general_manager_caption_genitive\": u\"генерального директора\"}\n db_batch.error_info = {\"error_ext\": [{\"field\": u\"general_manager_term\", u\"error_code\": 4},\n {\"field\": u\"general_manager_contract_number\", u\"error_code\": 4},\n {\"field\": u\"ogrn\", u\"error_code\": 4},\n {\"field\": u\"inn\", u\"error_code\": 4},\n {\"field\": u\"full_name\", u\"error_code\": 4},\n {\"field\": u\"board_of_directors\", u\"error_code\": 4},\n {\"field\": u\"general_manager_fixed_working_hours\", u\"error_code\": 4},\n {\"field\": u\"general_manager_trial_period\", u\"error_code\": 4},\n {\"field\": u\"general_manager_contract_date\", u\"error_code\": 4},\n {\"field\": u\"address_type\", u\"error_code\": 4},\n {\"field\": u\"data\", u\"error_code\": 4},\n {\"field\": u\"short_name\", u\"error_code\": 4},\n {\"field\": u\"general_manager_quit_notify_period\", u\"error_code\": 4},\n {\"field\": u\"general_manager_salary_days\", u\"error_code\": 4},\n {\"field\": u\"kpp\", u\"error_code\": 4},\n {\"field\": u\"general_manager\", u\"error_code\": 4},\n {\"field\": u\"general_manager_caption\", u\"error_code\": 4},\n {\"field\": u\"general_manager_order_number\", u\"error_code\": 4}]} \n\n db_batch.data = doc_data\n sqldb.session.commit()\n\n new_batch_db_object = DocumentBatchDbObject(\n data=doc_data,\n batch_type=DocumentBatchTypeEnum.DBT_NEW_LLC\n )\n\n batch = DocumentBatch.db_obj_to_field(new_batch_db_object)\n batch_json = json.dumps(batch.get_api_structure())\n result = self.test_client.post('/batch/update/', data={\n 'batch_id': _id,\n 'batch': batch_json\n })\n self.assertEqual(result.status_code, 200)\n db_batch = DocumentBatchDbObject.query.filter_by(id=_id).first()\n print (json.dumps(db_batch.__dict__, indent=1, default=lambda x: unicode(x), ensure_ascii=False))\n\n result = self.test_client.post('/batch/render_document/', data={\n 'batch_id': _id,\n 'document_type': json.dumps([DocumentTypeEnum.DT_GENERAL_MANAGER_CONTRACT])\n })\n self.assertEqual(result.status_code, 200)\n\n db_batch = DocumentBatchDbObject.query.filter_by(id=_id).first()\n print (json.dumps(db_batch.__dict__, indent=1, default=lambda x: unicode(x), ensure_ascii=False))\n\n doc_data['inn'] = ''\n new_batch_db_object = DocumentBatchDbObject(\n data=doc_data,\n batch_type=DocumentBatchTypeEnum.DBT_NEW_LLC\n )\n\n batch = DocumentBatch.db_obj_to_field(new_batch_db_object)\n batch_json = json.dumps(batch.get_api_structure())\n result = self.test_client.post('/batch/update/', data={\n 'batch_id': _id,\n 'batch': batch_json\n })\n self.assertEqual(result.status_code, 200)\n\n doc_data['inn'] = u'7811554010'\n new_batch_db_object = DocumentBatchDbObject(\n data=doc_data,\n batch_type=DocumentBatchTypeEnum.DBT_NEW_LLC\n )\n\n batch = DocumentBatch.db_obj_to_field(new_batch_db_object)\n batch_json = json.dumps(batch.get_api_structure())\n result = self.test_client.post('/batch/update/', data={\n 'batch_id': _id,\n 'batch': batch_json\n })\n self.assertEqual(result.status_code, 200)\n\n db_batch = DocumentBatchDbObject.query.filter_by(id=_id).first()\n print (json.dumps(db_batch.__dict__, indent=1, default=lambda x: unicode(x), ensure_ascii=False))\n\n # self.assertEqual(db_batch.status, BatchStatusEnum.BS_FINALISED)\n # self.assertEqual(len(db_batch._documents), 2)\n #\n # batch = DocumentBatch.db_obj_to_field(new_batch_db_object)\n # batch_json = json.dumps(batch.get_api_structure())\n # result = self.test_client.post('/batch/update/', data={\n # 'batch_id': _id,\n # 'batch': batch_json\n # })\n # self.assertEqual(result.status_code, 200)\n #\n # db_batch = DocumentBatchDbObject.query.filter_by(id=_id).first()\n # self.assertEqual(db_batch.status, BatchStatusEnum.BS_FINALISED)\n # self.assertEqual(len(db_batch._documents), 2)\n # # self.assertTrue(not not db_batch.rendered_docs[1]['file_link'])\n #\n # print (json.dumps(db_batch.__dict__, indent=1, default=lambda x: unicode(x), ensure_ascii=False))\n\n @authorized()\n def test_general_manager_order(self):\n DocRequisitiesStorage._BATCH_DESCRIPTORS[DocumentBatchTypeEnum.DBT_NEW_LLC]['doc_types'] = [DocumentTypeEnum.DT_GENERAL_MANAGER_ORDER]\n founder = PrivatePersonDbObject(**{\n \"_owner\": self.user,\n \"name\": u\"Прокл\",\n \"surname\": u\"Поликарпов\",\n \"patronymic\": u\"Поликарпович\",\n \"inn\": \"781108730780\",\n \"sex\": \"male\",\n \"birthdate\": datetime.now() - timedelta(days=365 * 30),\n \"birthplace\": u\"Россия, деревня Гадюкино\",\n \"passport\": {\n \"document_type\": PersonDocumentTypeEnum.PD_INTERNAL,\n \"series\": u\"1123\",\n \"number\": u\"192837\",\n \"issue_date\": datetime.now(),\n \"issue_depart\": u\"УМВД Гадюкинского района Гадюкинской области\",\n \"depart_code\": u\"111987\"\n },\n \"ogrnip\": \"123456789012345\",\n \"address\": {\n \"region\": RFRegionsEnum.RFR_SPB,\n \"index\": 198209,\n \"district_type\": DistrictTypeEnum.DIT_DISTRICT,\n \"district\": u\"Гатчинский\",\n \"city_type\": CityTypeEnum.CIT_CITY,\n \"city\": u\"Гадюкино\",\n \"village_type\": VillageTypeEnum.VIT_HUTOR,\n \"village\": u\"близ Диканьки\",\n \"street_type\": StreetTypeEnum.STT_BOULEVARD,\n \"street\": u\"Мотоциклистов\",\n \"house_type\": HouseTypeEnum.HOT_HOUSE,\n \"house\": \"4\",\n \"building_type\": BuildingTypeEnum.BIT_HOUSING,\n \"building\": \"2\",\n \"flat_type\": FlatTypeEnum.FLT_OFFICE,\n \"flat\": \"705\",\n },\n \"caption\": u\"Сантехник\",\n \"phone\": \"+79210001122\",\n \"email\": \"[email protected]\",\n \"living_country_code\": 3,\n \"living_address\": u\"г. Санкт-Петербург, д. Гадюкино, бульвар Мотоциклистов казарма 4, кв. 705\"\n })\n sqldb.session.add(founder)\n\n founder_otvetstvennyi = PrivatePersonDbObject(**{\n \"_owner\": self.user,\n \"name\": u\"Семен\",\n \"surname\": u\"Семенчук\",\n \"patronymic\": u\"Семейкин\",\n \"inn\": \"781108730780\",\n \"sex\": \"male\",\n \"birthdate\": datetime.now() - timedelta(days=365 * 30),\n \"birthplace\": u\"Россия, деревня Гадюкино\",\n \"passport\": {\n \"document_type\": PersonDocumentTypeEnum.PD_INTERNAL,\n \"series\": u\"1123\",\n \"number\": u\"192837\",\n \"issue_date\": datetime.now(),\n \"issue_depart\": u\"УМВД Гадюкинского района Гадюкинской области\",\n \"depart_code\": u\"111987\"\n },\n \"ogrnip\": \"123456789012345\",\n \"address\": {\n \"region\": RFRegionsEnum.RFR_SPB,\n \"index\": 198209,\n \"district_type\": DistrictTypeEnum.DIT_DISTRICT,\n \"district\": u\"Пушкинский\",\n \"city_type\": CityTypeEnum.CIT_CITY,\n \"city\": u\"Гадюкино\",\n \"village_type\": VillageTypeEnum.VIT_HUTOR,\n \"village\": u\"близ Диканьки\",\n \"street_type\": StreetTypeEnum.STT_BOULEVARD,\n \"street\": u\"Мотоциклистов\",\n \"house_type\": HouseTypeEnum.HOT_HOUSE,\n \"house\": \"4\",\n \"building_type\": BuildingTypeEnum.BIT_HOUSING,\n \"building\": \"2\",\n \"flat_type\": FlatTypeEnum.FLT_OFFICE,\n \"flat\": \"705\",\n },\n \"caption\": u\"Сантехник\",\n \"phone\": \"+79210001122\",\n \"email\": \"[email protected]\",\n \"living_country_code\": 3,\n \"living_address\": u\"г. Санкт-Петербург, д. Гадюкино, бульвар Мотоциклистов казарма 4, кв. 705\"\n })\n sqldb.session.add(founder_otvetstvennyi)\n\n company_founder = CompanyDbObject(**{\n \"_owner\": self.user,\n \"ogrn\": \"1234567890123\",\n \"inn\": \"781108730780\",\n \"kpp\": \"999999999\",\n \"general_manager_caption\": u\"генеральный директор\",\n \"full_name\": u\"Том и Джери\",\n \"short_name\": u\"ТиД\",\n \"general_manager\": {\n \"_id\": founder.id,\n \"type\": \"person\"\n },\n \"address\": {\n \"region\": RFRegionsEnum.RFR_SPB,\n \"index\": 123131,\n \"street_type\": StreetTypeEnum.STT_STREET,\n \"street\": u\"Седова\",\n \"house_type\": HouseTypeEnum.HOT_HOUSE,\n \"house\": \"2\",\n \"flat_type\": FlatTypeEnum.FLT_OFFICE,\n \"flat\": \"2\",\n },\n \"phone\": \"+7(812)1234567\"\n })\n sqldb.session.add(company_founder)\n sqldb.session.commit()\n\n doc_data = {\n u\"full_name\": u\"Рога и Копыта\",\n u\"short_name\": u\"РиК\",\n u\"inn\": \"781108730780\",\n u\"kpp\": \"999999999\",\n u\"general_manager_term\": 38,\n u\"has_general_manager_contract\": True,\n u\"has_general_manager_order\": True,\n u\"general_manager_caption\": u\"директор\",\n u\"general_manager\": {\n \"_id\": founder_otvetstvennyi.id,\n \"type\": \"person\"\n },\n u\"founders\": [\n {\n \"founder\": {\n \"_id\": founder_otvetstvennyi.id,\n \"type\": \"person\"\n },\n \"nominal_capital\": 12312.22,\n \"share\": 100\n }\n ],\n u\"general_manager_salary\": {\n \"currency\": CurrencyEnum.CE_RUS_RUBLES,\n \"value\": \"123123.00\"\n },\n u\"general_manager_fixed_working_hours\": True,\n # u\"general_manager_working_hours\" : {\n # u\"start_working_hours\" : datetime.now(),\n # u\"finish_working_hours\" : datetime.now() + timedelta(hours = 8),\n # u\"holidays\" : [\"mon\", \"tue\", \"fri\"],\n # u\"lunch_time\" : 10\n # },\n u\"general_manager_trial_period\": 24,\n u\"general_manager_quit_notify_period\": 12,\n # u\"general_manager_contract_number\" : \"2\",\n u\"general_manager_order_number\": \"1\",\n u\"selected_moderator\": {\n \"type\": \"company\",\n \"_id\": company_founder.id\n },\n u\"general_manager_salary_days\": [1, 2, 3, 4, 5],\n u\"address\": {\n #\"district_type\" : u\"р-н\",\n #\"city_type\" : u\"г\",\n \"street_type\": u\"ул\",\n \"index\": 191186,\n \"house\": u\"4\",\n \"region\": u\"Санкт-Петербург\",\n \"flat\": u\"12\",\n #\"building_type\" : u\"к\",\n \"street\": u\"Большая Морская\",\n \"address_string\": u\"г Санкт-Петербург, ул Большая Морская, д 4, кв 12\",\n \"flat_type\": u\"кв\",\n \"house_type\": u\"д\",\n #\"village_type\" : u\"п\",\n \"ifns\": 7841\n },\n u\"address_type\": \"office_address\",\n u\"general_manager_contract_date\" : datetime.now(),\n # u\"general_manager_contract_additional_terms\" : {\n # u\"rights\" : \"\",\n # u\"responsibility\" : None,\n # u\"duties\" : u\"колоть дрова\"\n # },\n u\"board_of_directors\": True,\n u\"registration_date\": datetime.now()\n }\n\n with self.app.app_context():\n batch = DocumentBatchDbObject(\n batch_type=DocumentBatchTypeEnum.DBT_NEW_LLC,\n status=BatchStatusEnum.BS_FINALISED,\n data={},\n _owner=self.user,\n result_fields={\n 'ifns_reg_info': {\n 'ogrn': \"1234567890123\"\n }\n }\n )\n sqldb.session.add(batch)\n\n doc = BatchDocumentDbObject(\n _owner=self.user,\n document_type=\"articles\",\n batch=batch,\n data={\n \"job_main_code\": \"50.20\",\n \"use_national_language_company_name\": False,\n \"use_foreign_company_name\": False,\n \"pravo_otchuzhdeniya_type\": 3,\n \"general_manager_term\": 36,\n \"short_name\": \"а\",\n \"preimusch_pravo_priobreteniya_doli_time_span\": 30,\n \"address\": {\n \"building\": \"20\",\n \"index\": 117105,\n \"street_type\": \"ш\",\n \"house\": \"12Г\",\n \"region\": \"Москва\",\n \"okato\": 12122222222,\n \"address_string\": \"г Москва, Варшавское шоссе, д 12Г стр 20\",\n \"building_type\": \"стр\",\n \"street\": \"Варшавское\",\n \"house_type\": \"д\",\n \"ifns\": 7726\n },\n \"perehod_doli_k_naslednikam_soglasie\": True,\n \"necessary_votes_for_general_meeting_decisions\": {\n \"audit_assignment\": 1,\n \"large_deals_approval\": 1,\n \"concern_deals_approval\": 1,\n \"executives_formation\": 1,\n \"other_issues\": 1,\n \"articles_change\": 1,\n \"branch_establishment\": 1,\n \"profit_distribution\": 1,\n \"annual_reports_approval\": 1,\n \"liquidation_committee_assignment\": 1,\n \"auditor_election\": 1,\n \"obligations_emission\": 1,\n \"reorganization_or_liquidation\": 1,\n \"internal_documents_approval\": 1,\n \"company_strategy\": 1\n },\n \"starter_capital\": {\n \"capital_type\": 1,\n \"value\": {\n \"currency\": \"RUB\",\n \"value\": \"10000\"\n }\n },\n \"general_manager_caption\": \"Генеральный директор\",\n \"doc_date\": datetime.now(),\n \"full_name\": \"аи\",\n \"job_code_array\": [\n \"52.48.39\",\n \"50.30\",\n \"50.40\",\n \"51.44.4\"\n ],\n \"board_of_directors\": False,\n \"founders_count\": 2,\n \"board_of_directors_caption\": \"Совет директоров\"\n }\n )\n sqldb.session.add(doc)\n sqldb.session.commit()\n\n doc = BatchDocumentDbObject(\n _owner=self.user,\n document_type=DocumentTypeEnum.DT_DECISION,\n batch=batch,\n data={\n \"job_main_code\": \"50.20\",\n \"use_national_language_company_name\": False,\n \"use_foreign_company_name\": False,\n \"pravo_otchuzhdeniya_type\": 3,\n \"general_manager_term\": 36,\n \"short_name\": \"а\",\n \"preimusch_pravo_priobreteniya_doli_time_span\": 30,\n \"address\": {\n \"building\": \"20\",\n \"index\": 117105,\n \"street_type\": \"ш\",\n \"house\": \"12Г\",\n \"region\": \"Москва\",\n \"okato\": 12122222222,\n \"address_string\": \"г Москва, Варшавское шоссе, д 12Г стр 20\",\n \"building_type\": \"стр\",\n \"street\": \"Варшавское\",\n \"house_type\": \"д\",\n \"ifns\": 7726\n },\n \"starter_capital\": {\n \"capital_type\": 1,\n \"value\": {\n \"currency\": \"RUB\",\n \"value\": \"10000\"\n }\n },\n \"general_manager_caption\": \"Генеральный директор\",\n \"doc_date\": datetime.now(),\n \"full_name\": \"аи\",\n \"job_code_array\": [\n \"52.48.39\",\n \"50.30\",\n \"50.40\",\n \"51.44.4\"\n ],\n \"board_of_directors\": False,\n \"founders_count\": 2,\n \"board_of_directors_caption\": \"Совет директоров\",\n u\"general_manager\": {\n \"_id\": founder_otvetstvennyi.id,\n \"type\": \"person\"\n },\n }\n )\n sqldb.session.add(doc)\n sqldb.session.commit()\n _id = batch.id\n\n booking = IfnsBookingObject(reg_info={\n 'status': 'registered',\n 'reg_date': datetime.now(),\n 'ogrn': \"123456789012345\"\n }, batch_id=_id)\n sqldb.session.add(booking)\n sqldb.session.commit()\n\n new_batch_db_object = DocumentBatchDbObject(\n data=doc_data,\n batch_type=DocumentBatchTypeEnum.DBT_NEW_LLC\n )\n\n batch = DocumentBatch.db_obj_to_field(new_batch_db_object)\n batch_json = json.dumps(batch.get_api_structure())\n result = self.test_client.post('/batch/update/', data={\n 'batch_id': _id,\n 'batch': batch_json\n })\n self.assertEqual(result.status_code, 200)\n\n result = self.test_client.post('/batch/render_document/', data={\n 'batch_id': _id,\n 'document_type': json.dumps([DocumentTypeEnum.DT_GENERAL_MANAGER_ORDER])\n })\n self.assertEqual(result.status_code, 200)\n\n db_batch = DocumentBatchDbObject.query.filter_by(id=_id).first()\n print (json.dumps(db_batch.__dict__, indent=1, default=lambda x: unicode(x), ensure_ascii=False))\n self.assertEqual(db_batch.status, BatchStatusEnum.BS_FINALISED)\n self.assertEqual(len(db_batch._documents), 3)\n\n @authorized()\n def test_accountant_contract(self):\n DocRequisitiesStorage._BATCH_DESCRIPTORS[DocumentBatchTypeEnum.DBT_NEW_LLC]['doc_types'] = [DocumentTypeEnum.DT_ACCOUNTANT_CONTRACT]\n\n founder = PrivatePersonDbObject(**{\n \"_owner\": self.user,\n \"name\": u\"Прокл\",\n \"surname\": u\"Поликарпов\",\n \"patronymic\": u\"Поликарпович\",\n \"inn\": \"781108730780\",\n \"sex\": \"male\",\n \"birthdate\": datetime.now() - timedelta(days=365 * 30),\n \"birthplace\": u\"Россия, деревня Гадюкино\",\n \"passport\": {\n \"document_type\": PersonDocumentTypeEnum.PD_INTERNAL,\n \"series\": u\"1123\",\n \"number\": u\"192837\",\n \"issue_date\": datetime.now(),\n \"issue_depart\": u\"УМВД Гадюкинского района Гадюкинской области\",\n \"depart_code\": u\"111987\"\n },\n \"ogrnip\": \"123456789012345\",\n \"address\": {\n \"region\": RFRegionsEnum.RFR_SPB,\n \"index\": 198209,\n \"district_type\": DistrictTypeEnum.DIT_DISTRICT,\n \"district\": u\"Гатчинский\",\n \"city_type\": CityTypeEnum.CIT_CITY,\n \"city\": u\"Гадюкино\",\n \"village_type\": VillageTypeEnum.VIT_HUTOR,\n \"village\": u\"близ Диканьки\",\n \"street_type\": StreetTypeEnum.STT_BOULEVARD,\n \"street\": u\"Мотоциклистов\",\n \"house_type\": HouseTypeEnum.HOT_HOUSE,\n \"house\": \"4\",\n \"building_type\": BuildingTypeEnum.BIT_HOUSING,\n \"building\": \"2\",\n \"flat_type\": FlatTypeEnum.FLT_OFFICE,\n \"flat\": \"705\",\n },\n \"caption\": u\"Сантехник\",\n \"phone\": \"+79210001122\",\n \"email\": \"[email protected]\",\n \"living_country_code\": 3,\n \"living_address\": u\"г. Санкт-Петербург, д. Гадюкино, бульвар Мотоциклистов казарма 4, кв. 705\"\n })\n sqldb.session.add(founder)\n\n founder_otvetstvennyi = PrivatePersonDbObject(**{\n \"_owner\": self.user,\n \"name\": u\"Нарцисса\",\n \"surname\": u\"Сизова\",\n \"patronymic\": u\"Октаэдровна\",\n \"inn\": \"781108730780\",\n \"sex\": \"female\",\n \"birthdate\": datetime.now() - timedelta(days=365 * 30),\n \"birthplace\": u\"Россия, деревня Гадюкино\",\n \"passport\": {\n \"document_type\": PersonDocumentTypeEnum.PD_INTERNAL,\n \"series\": u\"1123\",\n \"number\": u\"192837\",\n \"issue_date\": datetime.now(),\n \"issue_depart\": u\"УМВД Гадюкинского района Гадюкинской области\",\n \"depart_code\": u\"111987\"\n },\n \"ogrnip\": \"123456789012345\",\n \"address\": {\n \"region\": RFRegionsEnum.RFR_SPB,\n \"index\": 198209,\n \"district_type\": DistrictTypeEnum.DIT_DISTRICT,\n \"district\": u\"Пушкинский\",\n \"city_type\": CityTypeEnum.CIT_CITY,\n \"city\": u\"Гадюкино\",\n \"village_type\": VillageTypeEnum.VIT_HUTOR,\n \"village\": u\"близ Диканьки\",\n \"street_type\": StreetTypeEnum.STT_BOULEVARD,\n \"street\": u\"Мотоциклистов\",\n \"house_type\": HouseTypeEnum.HOT_HOUSE,\n \"house\": \"4\",\n \"building_type\": BuildingTypeEnum.BIT_HOUSING,\n \"building\": \"2\",\n \"flat_type\": FlatTypeEnum.FLT_OFFICE,\n \"flat\": \"705\",\n },\n \"caption\": u\"Сантехник\",\n \"phone\": \"+79210001122\",\n \"email\": \"[email protected]\",\n \"living_country_code\": 3,\n \"living_address\": u\"г. Санкт-Петербург, д. Гадюкино, бульвар Мотоциклистов казарма 4, кв. 705\"\n })\n sqldb.session.add(founder_otvetstvennyi)\n\n company_founder = CompanyDbObject(**{\n \"_owner\": self.user,\n \"ogrn\": \"1234567890123\",\n \"inn\": \"781108730780\",\n \"kpp\": \"999999999\",\n \"general_manager_caption\": u\"генеральный директор\",\n \"full_name\": u\"Том и Джери\",\n \"short_name\": u\"ТиД\",\n \"general_manager\": {\n \"_id\": founder.id\n },\n \"address\": {\n \"region\": RFRegionsEnum.RFR_SPB,\n \"index\": 123131,\n \"street_type\": StreetTypeEnum.STT_STREET,\n \"street\": u\"Седова\",\n \"house_type\": HouseTypeEnum.HOT_HOUSE,\n \"house\": \"2\",\n \"flat_type\": FlatTypeEnum.FLT_OFFICE,\n \"flat\": \"2\",\n },\n \"phone\": \"+7(812)1234567\"\n })\n sqldb.session.add(company_founder)\n\n doc_data = {\n u\"full_name\": u\"Рога—Копыт'а №3\",\n u\"short_name\": u\"РиК\",\n u\"inn\": \"781108730780\",\n u\"kpp\": \"999999999\",\n u\"has_accountant_contract_order\": True,\n u\"accountant_contract_number\": \"023\",\n u\"general_manager_contract_number\": \"01\",\n u\"general_manager_caption\": u\"директор\",\n u\"general_manager\": {\n \"_id\": founder_otvetstvennyi.id,\n \"type\": \"person\"\n },\n u\"accountant_person\": {\n \"_id\": founder_otvetstvennyi.id,\n \"type\": \"person\"\n },\n u\"founders\": [\n {\n \"founder\": {\n \"_id\": founder_otvetstvennyi.id,\n \"type\": \"person\"\n },\n \"nominal_capital\": 12312.22,\n \"share\": 100\n }\n ],\n u\"accountant_salary\": {\n \"currency\": CurrencyEnum.CE_RUS_RUBLES,\n \"value\": \"123123.00\"\n },\n u\"accountant_fixed_working_hours\": True,\n u\"accountant_working_hours\": {\n u\"start_working_hours\": datetime(1900, 1, 1, hour=8),\n u\"finish_working_hours\": datetime(1900, 1, 1, hour=16, minute=10),\n u\"holidays\": [\"tue\", \"fri\"],\n u\"lunch_time\": 10\n },\n u\"accountant_trial_period\": 24,\n u\"accountant_quit_notify_period\": 12,\n u\"accountant_order_number\": \"01\",\n u\"selected_moderator\": {\n \"type\": \"company\",\n \"_id\": company_founder\n },\n u\"accountant_salary_days\": [1, 2, 3, 4, 5],\n u\"address\": {\n # \"district_type\" : u\"р-н\",\n #\"city_type\" : u\"г\",\n \"street_type\": u\"ул\",\n \"index\": 191186,\n \"house\": u\"4\",\n \"region\": u\"Санкт-Петербург\",\n \"flat\": u\"12\",\n #\"building_type\" : u\"к\",\n \"street\": u\"Большая Морская\",\n \"address_string\": u\"г Санкт-Петербург, ул Большая Морская, д 4, кв 12\",\n \"flat_type\": u\"кв\",\n \"house_type\": u\"д\",\n #\"village_type\" : u\"п\",\n \"ifns\": 7841\n },\n u\"address_type\": \"office_address\",\n u\"accountant_has_special_terms\": True,\n u\"accountant_contract_additional_terms\": {\n u\"rights\": [],\n u\"responsibility\": [],\n u\"duties\": [u\"колоть дрова\", u\"молоть муку\"]\n },\n u\"board_of_directors\": True,\n u\"registration_date\": datetime.now(),\n u\"accountant_start_work\": datetime.now(),\n }\n\n with self.app.app_context():\n batch = DocumentBatchDbObject(\n batch_type=DocumentBatchTypeEnum.DBT_NEW_LLC,\n status=BatchStatusEnum.BS_FINALISED,\n _documents=[{\n \"status\": \"rendered\",\n \"deleted\": False,\n \"rendered_docs\": [],\n \"document_type\": \"articles\",\n \"data\": {\n \"job_main_code\": \"50.20\",\n \"use_national_language_company_name\": False,\n \"use_foreign_company_name\": False,\n \"pravo_otchuzhdeniya_type\": 3,\n \"general_manager_term\": 36,\n \"short_name\": \"а\",\n \"preimusch_pravo_priobreteniya_doli_time_span\": 30,\n \"address\": {\n \"building\": \"20\",\n \"index\": 117105,\n \"street_type\": \"ш\",\n \"house\": \"12Г\",\n \"region\": \"Москва\",\n \"okato\": 12122222222,\n \"address_string\": \"г Москва, Варшавское шоссе, д 12Г стр 20\",\n \"building_type\": \"стр\",\n \"street\": \"Варшавское\",\n \"house_type\": \"д\",\n \"ifns\": 7726\n },\n \"perehod_doli_k_naslednikam_soglasie\": True,\n \"necessary_votes_for_general_meeting_decisions\": {\n \"audit_assignment\": 1,\n \"large_deals_approval\": 1,\n \"concern_deals_approval\": 1,\n \"executives_formation\": 1,\n \"other_issues\": 1,\n \"articles_change\": 1,\n \"branch_establishment\": 1,\n \"profit_distribution\": 1,\n \"annual_reports_approval\": 1,\n \"liquidation_committee_assignment\": 1,\n \"auditor_election\": 1,\n \"obligations_emission\": 1,\n \"reorganization_or_liquidation\": 1,\n \"internal_documents_approval\": 1,\n \"company_strategy\": 1\n },\n \"starter_capital\": {\n \"capital_type\": 1,\n \"value\": {\n \"currency\": \"RUB\",\n \"value\": \"10000\"\n }\n },\n \"general_manager_caption\": \"Генеральный директор\",\n \"doc_date\": datetime.now(),\n \"full_name\": \"аи\",\n \"job_code_array\": [\n \"52.48.39\",\n \"50.30\",\n \"50.40\",\n \"51.44.4\"\n ],\n \"board_of_directors\": False,\n \"founders_count\": 2,\n \"board_of_directors_caption\": \"Совет директоров\",\n\n },\n \"id\": ObjectId(),\n \"creation_date\": datetime.now()\n }],\n data={},\n _owner=self.user,\n result_fields={\n 'ifns_reg_info': {\n 'ogrn': \"1234567890123\"\n }\n }\n )\n _id = sqldb.session.add(batch)\n\n booking = IfnsBookingObject(reg_info={\n 'status': 'registered',\n 'reg_date': datetime.now(),\n 'ogrn': \"123456789012345\"\n }, batch_id=_id)\n sqldb.session.add(booking)\n sqldb.session.commit()\n\n new_batch_db_object = DocumentBatchDbObject(\n data=doc_data,\n batch_type=DocumentBatchTypeEnum.DBT_NEW_LLC\n )\n\n batch = DocumentBatch.db_obj_to_field(new_batch_db_object)\n batch_json = json.dumps(batch.get_api_structure())\n result = self.test_client.post('/batch/update/', data={\n 'batch_id': _id,\n 'batch': batch_json\n })\n self.assertEqual(result.status_code, 200)\n\n result = self.test_client.post('/batch/render_document/', data={\n 'batch_id': _id,\n 'document_type': json.dumps([DocumentTypeEnum.DT_ACCOUNTANT_CONTRACT])\n })\n self.assertEqual(result.status_code, 200)\n\n db_batch = DocumentBatchDbObject.query.filter_by(id=_id).first()\n print(json.dumps(db_batch.as_dict(), indent=4, sort_keys=True, default=lambda x: unicode(x)))\n self.assertEqual(db_batch.status, BatchStatusEnum.BS_FINALISED)\n self.assertEqual(len(db_batch._documents), 2)\n self.assertTrue(not not db_batch.rendered_docs[0]['file_link'])\n\n @authorized()\n def test_accountant_imposition_order(self):\n DocRequisitiesStorage._BATCH_DESCRIPTORS[DocumentBatchTypeEnum.DBT_NEW_LLC]['doc_types'] = [DocumentTypeEnum.DT_ACCOUNTANT_IMPOSITION_ORDER]\n\n founder = PrivatePersonDbObject(**{\n \"_owner\": self.user,\n \"name\": u\"Прокл\",\n \"surname\": u\"Поликарпов\",\n \"patronymic\": u\"Поликарпович\",\n \"inn\": \"781108730780\",\n \"sex\": \"male\",\n \"birthdate\": datetime.now() - timedelta(days=365 * 40),\n \"birthplace\": u\"Россия, деревня Гадюкино\",\n \"passport\": {\n \"document_type\": PersonDocumentTypeEnum.PD_INTERNAL,\n \"series\": u\"1123\",\n \"number\": u\"192837\",\n \"issue_date\": datetime.now() - timedelta(days=365 * 1),\n \"issue_depart\": u\"УМВД Гадюкинского района Гадюкинской области\",\n \"depart_code\": u\"111987\"\n },\n \"ogrnip\": \"123456789012345\",\n \"address\": {\n \"region\": RFRegionsEnum.RFR_SPB,\n \"index\": 198209,\n \"district_type\": DistrictTypeEnum.DIT_DISTRICT,\n \"district\": u\"Гатчинский\",\n \"city_type\": CityTypeEnum.CIT_CITY,\n \"city\": u\"Гадюкино\",\n \"village_type\": VillageTypeEnum.VIT_HUTOR,\n \"village\": u\"близ Диканьки\",\n \"street_type\": StreetTypeEnum.STT_BOULEVARD,\n \"street\": u\"Мотоциклистов\",\n \"house_type\": HouseTypeEnum.HOT_HOUSE,\n \"house\": \"4\",\n \"building_type\": BuildingTypeEnum.BIT_HOUSING,\n \"building\": \"2\",\n \"flat_type\": FlatTypeEnum.FLT_OFFICE,\n \"flat\": \"705\",\n },\n \"caption\": u\"Сантехник\",\n \"phone\": \"+79210001122\",\n \"email\": \"[email protected]\",\n \"living_country_code\": 3,\n \"living_address\": u\"г. Санкт-Петербург, д. Гадюкино, бульвар Мотоциклистов казарма 4, кв. 705\"\n })\n sqldb.session.add(founder)\n\n founder_otvetstvennyi = PrivatePersonDbObject(**{\n \"_owner\": self.user,\n \"name\": u\"Семен\",\n \"surname\": u\"Семенчук\",\n \"patronymic\": u\"Семейкин\",\n \"inn\": \"781108730780\",\n \"sex\": \"male\",\n \"birthdate\": datetime.now() - timedelta(days=365 * 30),\n \"birthplace\": u\"Россия, деревня Гадюкино\",\n \"passport\": {\n \"document_type\": PersonDocumentTypeEnum.PD_INTERNAL,\n \"series\": u\"1123\",\n \"number\": u\"192837\",\n \"issue_date\": datetime.now() - timedelta(days=365 * 1),\n \"issue_depart\": u\"УМВД Гадюкинского района Гадюкинской области\",\n \"depart_code\": u\"111987\"\n },\n \"ogrnip\": \"123456789012345\",\n \"address\": {\n \"region\": RFRegionsEnum.RFR_SPB,\n \"index\": 198209,\n \"district_type\": DistrictTypeEnum.DIT_DISTRICT,\n \"district\": u\"Пушкинский\",\n \"city_type\": CityTypeEnum.CIT_CITY,\n \"city\": u\"Гадюкино\",\n \"village_type\": VillageTypeEnum.VIT_HUTOR,\n \"village\": u\"близ Диканьки\",\n \"street_type\": StreetTypeEnum.STT_BOULEVARD,\n \"street\": u\"Мотоциклистов\",\n \"house_type\": HouseTypeEnum.HOT_HOUSE,\n \"house\": \"4\",\n \"building_type\": BuildingTypeEnum.BIT_HOUSING,\n \"building\": \"2\",\n \"flat_type\": FlatTypeEnum.FLT_OFFICE,\n \"flat\": \"705\",\n },\n \"caption\": u\"Сантехник\",\n \"phone\": \"+79210001122\",\n \"email\": \"[email protected]\",\n \"living_country_code\": 3,\n \"living_address\": u\"г. Санкт-Петербург, д. Гадюкино, бульвар Мотоциклистов казарма 4, кв. 705\"\n })\n sqldb.session.add(founder_otvetstvennyi)\n\n company_founder = CompanyDbObject(**{\n \"_owner\": self.user,\n \"ogrn\": \"1234567890123\",\n \"inn\": \"781108730780\",\n \"kpp\": \"999999999\",\n \"general_manager_caption\": u\"генеральный директор\",\n \"full_name\": u\"Том и Джери\",\n \"short_name\": u\"ТиД\",\n \"general_manager\": {\n \"_id\": founder.id\n },\n \"address\": {\n \"region\": RFRegionsEnum.RFR_SPB,\n \"index\": 123131,\n \"street_type\": StreetTypeEnum.STT_STREET,\n \"street\": u\"Седова\",\n \"house_type\": HouseTypeEnum.HOT_HOUSE,\n \"house\": \"2\",\n \"flat_type\": FlatTypeEnum.FLT_OFFICE,\n \"flat\": \"2\",\n },\n \"phone\": \"+7(812)1234567\"\n })\n sqldb.session.add(company_founder)\n\n doc_data = {\n u\"full_name\": u\"Рога и Копыта\",\n u\"short_name\": u\"РиК\",\n u\"inn\": \"781108730780\",\n u\"kpp\": \"999999999\",\n u\"general_manager_term\": 38,\n u\"has_general_manager_contract\": False,\n # u\"has_general_manager_order\": True,\n # u\"has_accountant_contract_order\": True,\n u\"general_manager_as_accountant\": True,\n u\"general_manager_caption\": u\"директор\",\n u\"general_manager\": {\n \"_id\": founder_otvetstvennyi.id,\n \"type\": \"person\"\n },\n u\"founders\": [\n {\n \"founder\": {\n \"_id\": founder_otvetstvennyi.id,\n \"type\": \"person\"\n },\n \"nominal_capital\": 12312.22,\n \"share\": 100\n }\n ],\n # u\"general_manager_salary\" : {\n # \"currency\" : CurrencyEnum.CE_RUS_RUBLES,\n # \"value\" : \"123123.00\"\n # },\n # u\"general_manager_fixed_working_hours\" : True,\n # u\"general_manager_working_hours\" : {\n # u\"start_working_hours\" : datetime.now(),\n # u\"finish_working_hours\" : datetime.now() + timedelta(hours = 8),\n # u\"holidays\" : [\"mon\", \"tue\", \"fri\"],\n # u\"lunch_time\" : 10\n # },\n # u\"general_manager_trial_period\" : 24,\n # u\"general_manager_quit_notify_period\" : 12,\n u\"general_manager_as_accountant_order_number\": \"01К\",\n # u\"general_manager_order_number\": \"01\",\n # u\"selected_moderator\" : {\n # \"type\" : \"company\",\n # \"_id\" : company_founder._id\n # },\n # u\"general_manager_salary_days\" : [1,2,3,4,5],\n u\"address\": {\n #\"district_type\" : u\"р-н\",\n #\"city_type\" : u\"г\",\n \"street_type\": u\"ул\",\n \"index\": 191186,\n \"house\": u\"4\",\n \"region\": u\"Санкт-Петербург\",\n \"flat\": u\"12\",\n #\"building_type\" : u\"к\",\n \"street\": u\"Большая Морская\",\n \"address_string\": u\"г Санкт-Петербург, ул Большая Морская, д 4, кв 12\",\n \"flat_type\": u\"кв\",\n \"house_type\": u\"д\",\n #\"village_type\" : u\"п\",\n \"ifns\": 7841\n },\n u\"address_type\": \"office_address\",\n #u\"general_manager_contract_date\" : datetime.now(),\n # u\"general_manager_contract_additional_terms\" : {\n # u\"rights\" : \"\",\n # u\"responsibility\" : None,\n # u\"duties\" : u\"колоть дрова\"\n # },\n u\"board_of_directors\": True,\n u\"registration_date\": datetime.now()\n }\n\n with self.app.app_context():\n batch = DocumentBatchDbObject(\n batch_type=DocumentBatchTypeEnum.DBT_NEW_LLC,\n status=BatchStatusEnum.BS_FINALISED,\n _documents=[{\n \"status\": \"rendered\",\n \"deleted\": False,\n \"rendered_docs\": [],\n \"document_type\": \"articles\",\n \"data\": {\n \"job_main_code\": \"50.20\",\n \"use_national_language_company_name\": False,\n \"use_foreign_company_name\": False,\n \"pravo_otchuzhdeniya_type\": 3,\n \"general_manager_term\": 36,\n \"short_name\": \"а\",\n \"preimusch_pravo_priobreteniya_doli_time_span\": 30,\n \"address\": {\n \"building\": \"20\",\n \"index\": 117105,\n \"street_type\": \"ш\",\n \"house\": \"12Г\",\n \"region\": \"Москва\",\n \"okato\": 12122222222,\n \"address_string\": \"г Москва, Варшавское шоссе, д 12Г стр 20\",\n \"building_type\": \"стр\",\n \"street\": \"Варшавское\",\n \"house_type\": \"д\",\n \"ifns\": 7726\n },\n \"perehod_doli_k_naslednikam_soglasie\": True,\n \"necessary_votes_for_general_meeting_decisions\": {\n \"audit_assignment\": 1,\n \"large_deals_approval\": 1,\n \"concern_deals_approval\": 1,\n \"executives_formation\": 1,\n \"other_issues\": 1,\n \"articles_change\": 1,\n \"branch_establishment\": 1,\n \"profit_distribution\": 1,\n \"annual_reports_approval\": 1,\n \"liquidation_committee_assignment\": 1,\n \"auditor_election\": 1,\n \"obligations_emission\": 1,\n \"reorganization_or_liquidation\": 1,\n \"internal_documents_approval\": 1,\n \"company_strategy\": 1\n },\n \"starter_capital\": {\n \"capital_type\": 1,\n \"value\": {\n \"currency\": \"RUB\",\n \"value\": \"10000\"\n }\n },\n \"general_manager_caption\": \"Генеральный директор\",\n \"doc_date\": datetime.now(),\n \"full_name\": \"аи\",\n \"job_code_array\": [\n \"52.48.39\",\n \"50.30\",\n \"50.40\",\n \"51.44.4\"\n ],\n \"board_of_directors\": False,\n \"founders_count\": 2,\n \"board_of_directors_caption\": \"Совет директоров\",\n\n },\n \"id\": ObjectId(),\n \"creation_date\": datetime.now()\n }],\n data={},\n _owner=self.user,\n result_fields={\n 'ifns_reg_info': {\n 'ogrn': \"1234567890123\"\n }\n }\n )\n _id = sqldb.session.add(batch)\n\n booking = IfnsBookingObject(reg_info={\n 'status': 'registered',\n 'reg_date': datetime.now(),\n 'ogrn': \"123456789012345\"\n }, batch_id=_id)\n sqldb.session.add(booking)\n sqldb.session.commit()\n\n new_batch_db_object = DocumentBatchDbObject(\n data=doc_data,\n batch_type=DocumentBatchTypeEnum.DBT_NEW_LLC\n )\n\n batch = DocumentBatch.db_obj_to_field(new_batch_db_object)\n batch_json = json.dumps(batch.get_api_structure())\n result = self.test_client.post('/batch/update/', data={\n 'batch_id': _id,\n 'batch': batch_json\n })\n self.assertEqual(result.status_code, 200)\n\n result = self.test_client.post('/batch/render_document/', data={\n 'batch_id': _id,\n 'document_type': json.dumps([DocumentTypeEnum.DT_ACCOUNTANT_IMPOSITION_ORDER])\n })\n self.assertEqual(result.status_code, 200)\n\n db_batch = DocumentBatchDbObject.query.filter_by(id=_id).first()\n # print(json.dumps(db_batch.as_dict(), indent=4, sort_keys=True, default=lambda x:unicode(x)))\n self.assertEqual(db_batch.status, BatchStatusEnum.BS_FINALISED)\n self.assertEqual(len(db_batch._documents), 2)\n self.assertTrue(not not db_batch.rendered_docs[0]['file_link'])\n\n @authorized()\n def test_rosstat_claim(self):\n DocRequisitiesStorage._BATCH_DESCRIPTORS[DocumentBatchTypeEnum.DBT_NEW_LLC]['doc_types'] = [DocumentTypeEnum.DT_ROSSTAT_CLAIM]\n founder = PrivatePersonDbObject(**{\n \"_owner\": self.user,\n \"name\": u\"Прокл\",\n \"surname\": u\"Поликарпов\",\n \"patronymic\": u\"Поликарпович\",\n \"inn\": \"781108730780\",\n \"sex\": \"male\",\n \"birthdate\": datetime.now() - timedelta(days=365 * 30),\n \"birthplace\": u\"Россия, деревня Гадюкино\",\n \"passport\": {\n \"document_type\": PersonDocumentTypeEnum.PD_INTERNAL,\n \"series\": u\"1123\",\n \"number\": u\"192837\",\n \"issue_date\": datetime.now(),\n \"issue_depart\": u\"УМВД Гадюкинского района Гадюкинской области\",\n \"depart_code\": u\"111987\"\n },\n \"ogrnip\": \"123456789012345\",\n \"address\": {\n \"region\": RFRegionsEnum.RFR_SPB,\n \"index\": 198209,\n \"district_type\": DistrictTypeEnum.DIT_DISTRICT,\n \"district\": u\"Гатчинский\",\n \"city_type\": CityTypeEnum.CIT_CITY,\n \"city\": u\"Гадюкино\",\n \"village_type\": VillageTypeEnum.VIT_HUTOR,\n \"village\": u\"близ Диканьки\",\n \"street_type\": StreetTypeEnum.STT_BOULEVARD,\n \"street\": u\"Мотоциклистов\",\n \"house_type\": HouseTypeEnum.HOT_HOUSE,\n \"house\": \"4\",\n \"building_type\": BuildingTypeEnum.BIT_HOUSING,\n \"building\": \"2\",\n \"flat_type\": FlatTypeEnum.FLT_OFFICE,\n \"flat\": \"705\",\n },\n \"caption\": u\"Сантехник\",\n \"phone\": \"+79210001122\",\n \"email\": \"[email protected]\",\n \"living_country_code\": 3,\n \"living_address\": u\"г. Санкт-Петербург, д. Гадюкино, бульвар Мотоциклистов казарма 4, кв. 705\"\n })\n sqldb.session.add(founder)\n\n founder_otvetstvennyi = PrivatePersonDbObject(**{\n \"_owner\": self.user,\n \"name\": u\"Семен\",\n \"surname\": u\"Семенчук\",\n \"patronymic\": u\"Семейкин\",\n \"inn\": \"781108730780\",\n \"sex\": \"male\",\n \"birthdate\": datetime.now() - timedelta(days=365 * 30),\n \"birthplace\": u\"Россия, деревня Гадюкино\",\n \"passport\": {\n \"document_type\": PersonDocumentTypeEnum.PD_INTERNAL,\n \"series\": u\"1123\",\n \"number\": u\"192837\",\n \"issue_date\": datetime.now(),\n \"issue_depart\": u\"УМВД Гадюкинского района Гадюкинской области\",\n \"depart_code\": u\"111987\"\n },\n \"ogrnip\": \"123456789012345\",\n \"address\": {\n \"region\": RFRegionsEnum.RFR_SPB,\n \"index\": 198209,\n \"district_type\": DistrictTypeEnum.DIT_DISTRICT,\n \"district\": u\"Пушкинский\",\n \"city_type\": CityTypeEnum.CIT_CITY,\n \"city\": u\"Гадюкино\",\n \"village_type\": VillageTypeEnum.VIT_HUTOR,\n \"village\": u\"близ Диканьки\",\n \"street_type\": StreetTypeEnum.STT_BOULEVARD,\n \"street\": u\"Мотоциклистов\",\n \"house_type\": HouseTypeEnum.HOT_HOUSE,\n \"house\": \"4\",\n \"building_type\": BuildingTypeEnum.BIT_HOUSING,\n \"building\": \"2\",\n \"flat_type\": FlatTypeEnum.FLT_OFFICE,\n \"flat\": \"705\",\n },\n \"caption\": u\"Сантехник\",\n \"phone\": \"+79210001122\",\n \"email\": \"[email protected]\",\n \"living_country_code\": 3,\n \"living_address\": u\"г. Санкт-Петербург, д. Гадюкино, бульвар Мотоциклистов казарма 4, кв. 705\"\n })\n sqldb.session.add(founder_otvetstvennyi)\n\n company_founder = CompanyDbObject(**{\n \"_owner\": self.user,\n \"ogrn\": \"1234567890123\",\n \"inn\": \"781108730780\",\n \"kpp\": \"999999999\",\n \"general_manager_caption\": u\"генеральный директор\",\n \"full_name\": u\"Том и Джери\",\n \"short_name\": u\"ТиД\",\n \"general_manager\": {\n \"_id\": founder.id\n },\n \"address\": {\n \"region\": RFRegionsEnum.RFR_SPB,\n \"index\": 123131,\n \"street_type\": StreetTypeEnum.STT_STREET,\n \"street\": u\"Седова\",\n \"house_type\": HouseTypeEnum.HOT_HOUSE,\n \"house\": \"2\",\n \"flat_type\": FlatTypeEnum.FLT_OFFICE,\n \"flat\": \"2\",\n },\n \"phone\": \"+7(812)1234567\"\n })\n sqldb.session.add(company_founder)\n\n doc_data = {\n u\"full_name\": u\"Рога и Копыта\",\n u\"short_name\": u\"РиК\",\n u\"inn\": \"781108730780\",\n u\"kpp\": \"999999999\",\n u\"general_manager_term\": 38,\n # u\"has_general_manager_contract\" : True,\n # u\"has_general_manager_order\": True,\n # u\"has_accountant_contract_order\": True,\n # u\"general_manager_as_accountant\": True,\n u\"general_manager_caption\": u\"директор\",\n u\"general_manager\": {\n \"_id\": founder_otvetstvennyi.id,\n \"type\": \"person\"\n },\n u\"founders\": [\n {\n \"founder\": {\n \"_id\": founder_otvetstvennyi.id,\n \"type\": \"person\"\n },\n \"nominal_capital\": 12312.22,\n \"share\": 100\n }\n ],\n u\"general_manager_salary\": {\n \"currency\": CurrencyEnum.CE_RUS_RUBLES,\n \"value\": \"123123.00\"\n },\n u\"general_manager_fixed_working_hours\": True,\n u\"general_manager_working_hours\": {\n u\"start_working_hours\": datetime.now(),\n u\"finish_working_hours\": datetime.now() + timedelta(hours=8),\n u\"holidays\": [\"mon\", \"tue\", \"fri\"],\n u\"lunch_time\": 10\n },\n u\"general_manager_trial_period\": 24,\n u\"general_manager_quit_notify_period\": 12,\n # u\"general_manager_contract_number\" : \"2\",\n u\"general_manager_order_number\": \"01\",\n u\"selected_moderator\": {\n \"type\": \"company\",\n \"_id\": company_founder\n },\n u\"general_manager_salary_days\": [1, 2, 3, 4, 5],\n u\"address\": {\n # \"district_type\" : u\"р-н\",\n #\"city_type\" : u\"г\",\n \"street_type\": u\"ул\",\n \"index\": 191186,\n \"house\": u\"4\",\n \"region\": u\"Санкт-Петербург\",\n \"flat\": u\"12\",\n #\"building_type\" : u\"к\",\n \"street\": u\"Большая Морская\",\n \"address_string\": u\"г Санкт-Петербург, ул Большая Морская, д 4, кв 12\",\n \"flat_type\": u\"кв\",\n \"house_type\": u\"д\",\n #\"village_type\" : u\"п\",\n \"ifns\": 7841\n },\n u\"address_type\": \"office_address\",\n # u\"general_manager_contract_date\" : datetime.now(),\n # u\"general_manager_contract_additional_terms\" : {\n # u\"rights\" : \"\",\n # u\"responsibility\" : None,\n # u\"duties\" : u\"колоть дрова\"\n # },\n u\"board_of_directors\": True,\n u\"registration_date\": datetime.now()\n }\n\n with self.app.app_context():\n batch = DocumentBatchDbObject(\n batch_type=DocumentBatchTypeEnum.DBT_NEW_LLC,\n status=BatchStatusEnum.BS_FINALISED,\n _documents=[{\n \"status\": \"rendered\",\n \"deleted\": False,\n \"rendered_docs\": [],\n \"document_type\": \"articles\",\n \"data\": {\n \"job_main_code\": \"50.20\",\n \"use_national_language_company_name\": False,\n \"use_foreign_company_name\": False,\n \"pravo_otchuzhdeniya_type\": 3,\n \"general_manager_term\": 36,\n \"short_name\": \"а\",\n \"preimusch_pravo_priobreteniya_doli_time_span\": 30,\n \"address\": {\n \"building\": \"20\",\n \"index\": 117105,\n \"street_type\": \"ш\",\n \"house\": \"12Г\",\n \"region\": \"Москва\",\n \"okato\": 12122222222,\n \"address_string\": \"г Москва, Варшавское шоссе, д 12Г стр 20\",\n \"building_type\": \"стр\",\n \"street\": \"Варшавское\",\n \"house_type\": \"д\",\n \"ifns\": 7726\n },\n \"perehod_doli_k_naslednikam_soglasie\": True,\n \"necessary_votes_for_general_meeting_decisions\": {\n \"audit_assignment\": 1,\n \"large_deals_approval\": 1,\n \"concern_deals_approval\": 1,\n \"executives_formation\": 1,\n \"other_issues\": 1,\n \"articles_change\": 1,\n \"branch_establishment\": 1,\n \"profit_distribution\": 1,\n \"annual_reports_approval\": 1,\n \"liquidation_committee_assignment\": 1,\n \"auditor_election\": 1,\n \"obligations_emission\": 1,\n \"reorganization_or_liquidation\": 1,\n \"internal_documents_approval\": 1,\n \"company_strategy\": 1\n },\n \"starter_capital\": {\n \"capital_type\": 1,\n \"value\": {\n \"currency\": \"RUB\",\n \"value\": \"10000\"\n }\n },\n \"general_manager_caption\": \"Генеральный директор\",\n \"doc_date\": datetime.now(),\n \"full_name\": \"аи\",\n \"job_code_array\": [\n \"52.48.39\",\n \"50.30\",\n \"50.40\",\n \"51.44.4\"\n ],\n \"board_of_directors\": False,\n \"founders_count\": 2,\n \"board_of_directors_caption\": \"Совет директоров\",\n\n },\n \"id\": ObjectId(),\n \"creation_date\": datetime.now()\n }],\n data={},\n _owner=self.user,\n result_fields={\n 'ifns_reg_info': {\n 'ogrn': \"1234567890123\"\n }\n }\n )\n _id = sqldb.session.add(batch)\n\n booking = IfnsBookingObject(reg_info={\n 'status': 'registered',\n 'reg_date': datetime.now(),\n 'ogrn': \"123456789012345\"\n }, batch_id=_id)\n sqldb.session.add(booking)\n sqldb.session.commit()\n\n new_batch_db_object = DocumentBatchDbObject(\n data=doc_data,\n batch_type=DocumentBatchTypeEnum.DBT_NEW_LLC\n )\n\n batch = DocumentBatch.db_obj_to_field(new_batch_db_object)\n batch_json = json.dumps(batch.get_api_structure())\n result = self.test_client.post('/batch/update/', data={\n 'batch_id': _id,\n 'batch': batch_json\n })\n self.assertEqual(result.status_code, 200)\n\n result = self.test_client.post('/batch/render_document/', data={\n 'batch_id': _id,\n 'document_type': json.dumps([DocumentTypeEnum.DT_ROSSTAT_CLAIM])\n })\n self.assertEqual(result.status_code, 200)\n\n db_batch = DocumentBatchDbObject.query.filter_by(id=_id).first()\n # print(json.dumps(db_batch.as_dict(), indent=4, sort_keys=True, default=lambda x:unicode(x)))\n self.assertEqual(db_batch.status, BatchStatusEnum.BS_FINALISED)\n self.assertEqual(len(db_batch._documents), 2)\n self.assertTrue(not not db_batch.rendered_docs[0]['file_link'])\n\n @authorized()\n def test_fss_claim(self):\n DocRequisitiesStorage._BATCH_DESCRIPTORS[DocumentBatchTypeEnum.DBT_NEW_LLC]['doc_types'] = [DocumentTypeEnum.DT_FSS_CLAIM]\n\n founder = PrivatePersonDbObject(**{\n \"_owner\": self.user,\n \"name\": u\"Прокл\",\n \"surname\": u\"Поликарпов\",\n \"patronymic\": u\"Поликарпович\",\n \"inn\": \"781108730780\",\n \"sex\": \"male\",\n \"birthdate\": datetime.now() - timedelta(days=365 * 30),\n \"birthplace\": u\"Россия, деревня Гадюкино\",\n \"passport\": {\n \"document_type\": PersonDocumentTypeEnum.PD_INTERNAL,\n \"series\": u\"1123\",\n \"number\": u\"192837\",\n \"issue_date\": datetime.now(),\n \"issue_depart\": u\"УМВД Гадюкинского района Гадюкинской области\",\n \"depart_code\": u\"111987\"\n },\n \"ogrnip\": \"123456789012345\",\n \"address\": {\n \"region\": RFRegionsEnum.RFR_SPB,\n \"index\": 198209,\n \"district_type\": DistrictTypeEnum.DIT_DISTRICT,\n \"district\": u\"Гатчинский\",\n \"city_type\": CityTypeEnum.CIT_CITY,\n \"city\": u\"Гадюкино\",\n \"village_type\": VillageTypeEnum.VIT_HUTOR,\n \"village\": u\"близ Диканьки\",\n \"street_type\": StreetTypeEnum.STT_BOULEVARD,\n \"street\": u\"Мотоциклистов\",\n \"house_type\": HouseTypeEnum.HOT_HOUSE,\n \"house\": \"4\",\n \"building_type\": BuildingTypeEnum.BIT_HOUSING,\n \"building\": \"2\",\n \"flat_type\": FlatTypeEnum.FLT_OFFICE,\n \"flat\": \"705\",\n },\n \"caption\": u\"Сантехник\",\n \"phone\": \"+79210001122\",\n \"email\": \"[email protected]\",\n \"living_country_code\": 3,\n \"living_address\": u\"г. Санкт-Петербург, д. Гадюкино, бульвар Мотоциклистов казарма 4, кв. 705\"\n })\n sqldb.session.add(founder)\n\n founder_otvetstvennyi = PrivatePersonDbObject(**{\n \"_owner\": self.user,\n \"name\": u\"Семен\",\n \"surname\": u\"Семенчук\",\n \"patronymic\": u\"Семейкин\",\n \"inn\": \"781108730780\",\n \"sex\": \"male\",\n \"birthdate\": datetime.now() - timedelta(days=365 * 30),\n \"birthplace\": u\"Россия, деревня Гадюкино\",\n \"passport\": {\n \"document_type\": PersonDocumentTypeEnum.PD_INTERNAL,\n \"series\": u\"1123\",\n \"number\": u\"192837\",\n \"issue_date\": datetime.now(),\n \"issue_depart\": u\"УМВД Гадюкинского района Гадюкинской области\",\n \"depart_code\": u\"111987\"\n },\n \"ogrnip\": \"123456789012345\",\n \"address\": {\n \"region\": RFRegionsEnum.RFR_SPB,\n \"index\": 198209,\n \"district_type\": DistrictTypeEnum.DIT_DISTRICT,\n \"district\": u\"Пушкинский\",\n \"city_type\": CityTypeEnum.CIT_CITY,\n \"city\": u\"Гадюкино\",\n \"village_type\": VillageTypeEnum.VIT_HUTOR,\n \"village\": u\"близ Диканьки\",\n \"street_type\": StreetTypeEnum.STT_BOULEVARD,\n \"street\": u\"Мотоциклистов\",\n \"house_type\": HouseTypeEnum.HOT_HOUSE,\n \"house\": \"4\",\n \"building_type\": BuildingTypeEnum.BIT_HOUSING,\n \"building\": \"2\",\n \"flat_type\": FlatTypeEnum.FLT_OFFICE,\n \"flat\": \"705\",\n },\n \"caption\": u\"Сантехник\",\n \"phone\": \"+79210001122\",\n \"email\": \"[email protected]\",\n \"living_country_code\": 3,\n \"living_address\": u\"г. Санкт-Петербург, д. Гадюкино, бульвар Мотоциклистов казарма 4, кв. 705\"\n })\n sqldb.session.add(founder_otvetstvennyi)\n\n company_founder = CompanyDbObject(**{\n \"_owner\": self.user,\n \"ogrn\": \"1234567890123\",\n \"inn\": \"781108730780\",\n \"kpp\": \"999999999\",\n \"general_manager_caption\": u\"генеральный директор\",\n \"full_name\": u\"Том и Джери\",\n \"short_name\": u\"ТиД\",\n \"general_manager\": {\n \"_id\": founder.id\n },\n \"address\": {\n \"region\": RFRegionsEnum.RFR_SPB,\n \"index\": 123131,\n \"street_type\": StreetTypeEnum.STT_STREET,\n \"street\": u\"Седова\",\n \"house_type\": HouseTypeEnum.HOT_HOUSE,\n \"house\": \"2\",\n \"flat_type\": FlatTypeEnum.FLT_OFFICE,\n \"flat\": \"2\",\n },\n \"phone\": \"+7(812)1234567\"\n })\n sqldb.session.add(company_founder)\n\n doc_data = {\n u\"full_name\": u\"Рога и Копыта\",\n u\"short_name\": u\"РиК\",\n u\"inn\": \"781108730780\",\n u\"kpp\": \"999999999\",\n u\"general_manager_term\": 38,\n # u\"has_general_manager_contract\" : True,\n # u\"has_general_manager_order\": True,\n # u\"has_accountant_contract_order\": True,\n # u\"general_manager_as_accountant\": True,\n u\"general_manager_caption\": u\"директор\",\n u\"general_manager\": {\n \"_id\": founder_otvetstvennyi.id,\n \"type\": \"person\"\n },\n u\"founders\": [\n {\n \"founder\": {\n \"_id\": founder_otvetstvennyi.id,\n \"type\": \"person\"\n },\n \"nominal_capital\": 12312.22,\n \"share\": 100\n }\n ],\n u\"general_manager_salary\": {\n \"currency\": CurrencyEnum.CE_RUS_RUBLES,\n \"value\": \"123123.00\"\n },\n u\"general_manager_fixed_working_hours\": True,\n u\"general_manager_working_hours\": {\n u\"start_working_hours\": datetime.now(),\n u\"finish_working_hours\": datetime.now() + timedelta(hours=8),\n u\"holidays\": [\"mon\", \"tue\", \"fri\"],\n u\"lunch_time\": 10\n },\n u\"general_manager_trial_period\": 24,\n u\"general_manager_quit_notify_period\": 12,\n # u\"general_manager_contract_number\" : \"2\",\n u\"general_manager_order_number\": \"01\",\n u\"selected_moderator\": {\n \"type\": \"company\",\n \"_id\": company_founder\n },\n u\"general_manager_salary_days\": [1, 2, 3, 4, 5],\n u\"address\": {\n # \"district_type\" : u\"р-н\",\n #\"city_type\" : u\"г\",\n \"street_type\": u\"ул\",\n \"index\": 191186,\n \"house\": u\"4\",\n \"region\": u\"Санкт-Петербург\",\n \"flat\": u\"12\",\n #\"building_type\" : u\"к\",\n \"street\": u\"Большая Морская\",\n \"address_string\": u\"г Санкт-Петербург, ул Большая Морская, д 4, кв 12\",\n \"flat_type\": u\"кв\",\n \"house_type\": u\"д\",\n #\"village_type\" : u\"п\",\n \"ifns\": 7841\n },\n u\"address_type\": \"office_address\",\n # u\"general_manager_contract_date\" : datetime.now(),\n # u\"general_manager_contract_additional_terms\" : {\n # u\"rights\" : \"\",\n # u\"responsibility\" : None,\n # u\"duties\" : u\"колоть дрова\"\n # },\n u\"board_of_directors\": True,\n u\"registration_date\": datetime.now()\n }\n\n with self.app.app_context():\n batch = DocumentBatchDbObject(\n batch_type=DocumentBatchTypeEnum.DBT_NEW_LLC,\n status=BatchStatusEnum.BS_FINALISED,\n _documents=[{\n \"status\": \"rendered\",\n \"deleted\": False,\n \"rendered_docs\": [],\n \"document_type\": \"articles\",\n \"data\": {\n \"job_main_code\": \"50.20\",\n \"use_national_language_company_name\": False,\n \"use_foreign_company_name\": False,\n \"pravo_otchuzhdeniya_type\": 3,\n \"general_manager_term\": 36,\n \"short_name\": \"а\",\n \"preimusch_pravo_priobreteniya_doli_time_span\": 30,\n \"address\": {\n \"building\": \"20\",\n \"index\": 117105,\n \"street_type\": \"ш\",\n \"house\": \"12Г\",\n \"region\": \"Москва\",\n \"okato\": 12122222222,\n \"address_string\": \"г Москва, Варшавское шоссе, д 12Г стр 20\",\n \"building_type\": \"стр\",\n \"street\": \"Варшавское\",\n \"house_type\": \"д\",\n \"ifns\": 7726\n },\n \"perehod_doli_k_naslednikam_soglasie\": True,\n \"necessary_votes_for_general_meeting_decisions\": {\n \"audit_assignment\": 1,\n \"large_deals_approval\": 1,\n \"concern_deals_approval\": 1,\n \"executives_formation\": 1,\n \"other_issues\": 1,\n \"articles_change\": 1,\n \"branch_establishment\": 1,\n \"profit_distribution\": 1,\n \"annual_reports_approval\": 1,\n \"liquidation_committee_assignment\": 1,\n \"auditor_election\": 1,\n \"obligations_emission\": 1,\n \"reorganization_or_liquidation\": 1,\n \"internal_documents_approval\": 1,\n \"company_strategy\": 1\n },\n \"starter_capital\": {\n \"capital_type\": 1,\n \"value\": {\n \"currency\": \"RUB\",\n \"value\": \"10000\"\n }\n },\n \"general_manager_caption\": \"Генеральный директор\",\n \"doc_date\": datetime.now(),\n \"full_name\": \"аи\",\n \"job_code_array\": [\n \"52.48.39\",\n \"50.30\",\n \"50.40\",\n \"51.44.4\"\n ],\n \"board_of_directors\": False,\n \"founders_count\": 2,\n \"board_of_directors_caption\": \"Совет директоров\",\n\n },\n \"id\": ObjectId(),\n \"creation_date\": datetime.now()\n }],\n data={},\n _owner=self.user,\n result_fields={\n 'ifns_reg_info': {\n 'ogrn': \"1234567890123\"\n }\n }\n )\n _id = sqldb.session.add(batch)\n\n booking = IfnsBookingObject(reg_info={\n 'status': 'registered',\n 'reg_date': datetime.now(),\n 'ogrn': \"123456789012345\"\n }, batch_id=_id)\n sqldb.session.add(booking)\n sqldb.session.commit()\n\n new_batch_db_object = DocumentBatchDbObject(\n data=doc_data,\n batch_type=DocumentBatchTypeEnum.DBT_NEW_LLC\n )\n\n batch = DocumentBatch.db_obj_to_field(new_batch_db_object)\n batch_json = json.dumps(batch.get_api_structure())\n result = self.test_client.post('/batch/update/', data={\n 'batch_id': _id,\n 'batch': batch_json\n })\n self.assertEqual(result.status_code, 200)\n\n result = self.test_client.post('/batch/render_document/', data={\n 'batch_id': _id,\n 'document_type': json.dumps([DocumentTypeEnum.DT_FSS_CLAIM])\n })\n self.assertEqual(result.status_code, 200)\n\n db_batch = DocumentBatchDbObject.query.filter_by(id=_id).first()\n # print(json.dumps(db_batch.as_dict(), indent=4, sort_keys=True, default=lambda x:unicode(x)))\n self.assertEqual(db_batch.status, BatchStatusEnum.BS_FINALISED)\n self.assertEqual(len(db_batch._documents), 2)\n self.assertTrue(not not db_batch.rendered_docs[0]['file_link'])\n\n @authorized()\n def test_pfr_claim(self):\n DocRequisitiesStorage._BATCH_DESCRIPTORS[DocumentBatchTypeEnum.DBT_NEW_LLC]['doc_types'] = [DocumentTypeEnum.DT_PFR_CLAIM]\n\n founder = PrivatePersonDbObject(**{\n \"_owner\": self.user,\n \"name\": u\"Прокл\",\n \"surname\": u\"Поликарпов\",\n \"patronymic\": u\"Поликарпович\",\n \"inn\": \"781108730780\",\n \"sex\": \"male\",\n \"birthdate\": datetime.now() - timedelta(days=365 * 30),\n \"birthplace\": u\"Россия, деревня Гадюкино\",\n \"passport\": {\n \"document_type\": PersonDocumentTypeEnum.PD_INTERNAL,\n \"series\": u\"1123\",\n \"number\": u\"192837\",\n \"issue_date\": datetime.now(),\n \"issue_depart\": u\"УМВД Гадюкинского района Гадюкинской области\",\n \"depart_code\": u\"111987\"\n },\n \"ogrnip\": \"123456789012345\",\n \"address\": {\n \"region\": RFRegionsEnum.RFR_SPB,\n \"index\": 198209,\n \"district_type\": DistrictTypeEnum.DIT_DISTRICT,\n \"district\": u\"Гатчинский\",\n \"city_type\": CityTypeEnum.CIT_CITY,\n \"city\": u\"Гадюкино\",\n \"village_type\": VillageTypeEnum.VIT_HUTOR,\n \"village\": u\"близ Диканьки\",\n \"street_type\": StreetTypeEnum.STT_BOULEVARD,\n \"street\": u\"Мотоциклистов\",\n \"house_type\": HouseTypeEnum.HOT_HOUSE,\n \"house\": \"4\",\n \"building_type\": BuildingTypeEnum.BIT_HOUSING,\n \"building\": \"2\",\n \"flat_type\": FlatTypeEnum.FLT_OFFICE,\n \"flat\": \"705\",\n },\n \"caption\": u\"Сантехник\",\n \"phone\": \"+79210001122\",\n \"email\": \"[email protected]\",\n \"living_country_code\": 3,\n \"living_address\": u\"г. Санкт-Петербург, д. Гадюкино, бульвар Мотоциклистов казарма 4, кв. 705\"\n })\n sqldb.session.add(founder)\n\n founder_otvetstvennyi = PrivatePersonDbObject(**{\n \"_owner\": self.user,\n \"name\": u\"Семен\",\n \"surname\": u\"Семенчук\",\n \"patronymic\": u\"Семейкин\",\n \"inn\": \"781108730780\",\n \"sex\": \"male\",\n \"birthdate\": datetime.now() - timedelta(days=365 * 30),\n \"birthplace\": u\"Россия, деревня Гадюкино\",\n \"passport\": {\n \"document_type\": PersonDocumentTypeEnum.PD_INTERNAL,\n \"series\": u\"1123\",\n \"number\": u\"192837\",\n \"issue_date\": datetime.now(),\n \"issue_depart\": u\"УМВД Гадюкинского района Гадюкинской области\",\n \"depart_code\": u\"111987\"\n },\n \"ogrnip\": \"123456789012345\",\n \"address\": {\n \"region\": RFRegionsEnum.RFR_SPB,\n \"index\": 198209,\n \"district_type\": DistrictTypeEnum.DIT_DISTRICT,\n \"district\": u\"Пушкинский\",\n \"city_type\": CityTypeEnum.CIT_CITY,\n \"city\": u\"Гадюкино\",\n \"village_type\": VillageTypeEnum.VIT_HUTOR,\n \"village\": u\"близ Диканьки\",\n \"street_type\": StreetTypeEnum.STT_BOULEVARD,\n \"street\": u\"Мотоциклистов\",\n \"house_type\": HouseTypeEnum.HOT_HOUSE,\n \"house\": \"4\",\n \"building_type\": BuildingTypeEnum.BIT_HOUSING,\n \"building\": \"2\",\n \"flat_type\": FlatTypeEnum.FLT_OFFICE,\n \"flat\": \"705\",\n },\n \"caption\": u\"Сантехник\",\n \"phone\": \"+79210001122\",\n \"email\": \"[email protected]\",\n \"living_country_code\": 3,\n \"living_address\": u\"г. Санкт-Петербург, д. Гадюкино, бульвар Мотоциклистов казарма 4, кв. 705\"\n })\n sqldb.session.add(founder_otvetstvennyi)\n\n company_founder = CompanyDbObject(**{\n \"_owner\": self.user,\n \"ogrn\": \"1234567890123\",\n \"inn\": \"781108730780\",\n \"kpp\": \"999999999\",\n \"general_manager_caption\": u\"генеральный директор\",\n \"full_name\": u\"Том и Джери\",\n \"short_name\": u\"ТиД\",\n \"general_manager\": {\n \"_id\": founder.id\n },\n \"address\": {\n \"region\": RFRegionsEnum.RFR_SPB,\n \"index\": 123131,\n \"street_type\": StreetTypeEnum.STT_STREET,\n \"street\": u\"Седова\",\n \"house_type\": HouseTypeEnum.HOT_HOUSE,\n \"house\": \"2\",\n \"flat_type\": FlatTypeEnum.FLT_OFFICE,\n \"flat\": \"2\",\n },\n \"phone\": \"+7(812)1234567\"\n })\n sqldb.session.add(company_founder)\n\n doc_data = {\n u\"full_name\": u\"Рога и Копыта\",\n u\"short_name\": u\"РиК\",\n u\"inn\": \"781108730780\",\n u\"kpp\": \"999999999\",\n u\"general_manager_term\": 38,\n # u\"has_general_manager_contract\" : True,\n # u\"has_general_manager_order\": True,\n # u\"has_accountant_contract_order\": True,\n # u\"general_manager_as_accountant\": True,\n u\"general_manager_caption\": u\"директор\",\n u\"general_manager\": {\n \"_id\": founder_otvetstvennyi.id,\n \"type\": \"person\"\n },\n u\"founders\": [\n {\n \"founder\": {\n \"_id\": founder_otvetstvennyi.id,\n \"type\": \"person\"\n },\n \"nominal_capital\": 12312.22,\n \"share\": 100\n }\n ],\n # u\"general_manager_salary\" : {\n # \"currency\" : CurrencyEnum.CE_RUS_RUBLES,\n # \"value\" : \"123123.00\"\n # },\n # u\"general_manager_fixed_working_hours\" : True,\n # u\"general_manager_working_hours\" : {\n # u\"start_working_hours\" : datetime.now(),\n # u\"finish_working_hours\" : datetime.now() + timedelta(hours = 8),\n # u\"holidays\" : [\"mon\", \"tue\", \"fri\"],\n # u\"lunch_time\" : 10\n # },\n # u\"general_manager_trial_period\" : 24,\n # u\"general_manager_quit_notify_period\" : 12,\n # u\"general_manager_contract_number\" : \"2\",\n # u\"general_manager_order_number\": \"01\",\n u\"selected_moderator\": {\n \"type\": \"company\",\n \"_id\": company_founder\n },\n # u\"general_manager_salary_days\" : [1,2,3,4,5],\n u\"address\": {\n #\"district_type\" : u\"р-н\",\n #\"city_type\" : u\"г\",\n \"street_type\": u\"ул\",\n \"index\": 191186,\n \"house\": u\"4\",\n \"region\": u\"Санкт-Петербург\",\n \"flat\": u\"12\",\n #\"building_type\" : u\"к\",\n \"street\": u\"Большая Морская\",\n \"address_string\": u\"г Санкт-Петербург, ул Большая Морская, д 4, кв 12\",\n \"flat_type\": u\"кв\",\n \"house_type\": u\"д\",\n #\"village_type\" : u\"п\",\n \"ifns\": 7841\n },\n u\"address_type\": \"office_address\",\n # u\"general_manager_contract_date\" : datetime.now(),\n # u\"general_manager_contract_additional_terms\" : {\n # u\"rights\" : \"\",\n # u\"responsibility\" : None,\n # u\"duties\" : u\"колоть дрова\"\n # },\n u\"board_of_directors\": True,\n u\"registration_date\": datetime.now()\n }\n\n with self.app.app_context():\n batch = DocumentBatchDbObject(\n batch_type=DocumentBatchTypeEnum.DBT_NEW_LLC,\n status=BatchStatusEnum.BS_FINALISED,\n _documents=[{\n \"status\": \"rendered\",\n \"deleted\": False,\n \"rendered_docs\": [],\n \"document_type\": \"articles\",\n \"data\": {\n \"job_main_code\": \"50.20\",\n \"use_national_language_company_name\": False,\n \"use_foreign_company_name\": False,\n \"pravo_otchuzhdeniya_type\": 3,\n \"general_manager_term\": 36,\n \"short_name\": \"а\",\n \"preimusch_pravo_priobreteniya_doli_time_span\": 30,\n \"address\": {\n \"building\": \"20\",\n \"index\": 117105,\n \"street_type\": \"ш\",\n \"house\": \"12Г\",\n \"region\": \"Москва\",\n \"okato\": 12122222222,\n \"address_string\": \"г Москва, Варшавское шоссе, д 12Г стр 20\",\n \"building_type\": \"стр\",\n \"street\": \"Варшавское\",\n \"house_type\": \"д\",\n \"ifns\": 7726\n },\n \"perehod_doli_k_naslednikam_soglasie\": True,\n \"necessary_votes_for_general_meeting_decisions\": {\n \"audit_assignment\": 1,\n \"large_deals_approval\": 1,\n \"concern_deals_approval\": 1,\n \"executives_formation\": 1,\n \"other_issues\": 1,\n \"articles_change\": 1,\n \"branch_establishment\": 1,\n \"profit_distribution\": 1,\n \"annual_reports_approval\": 1,\n \"liquidation_committee_assignment\": 1,\n \"auditor_election\": 1,\n \"obligations_emission\": 1,\n \"reorganization_or_liquidation\": 1,\n \"internal_documents_approval\": 1,\n \"company_strategy\": 1\n },\n \"starter_capital\": {\n \"capital_type\": 1,\n \"value\": {\n \"currency\": \"RUB\",\n \"value\": \"10000\"\n }\n },\n \"general_manager_caption\": \"Генеральный директор\",\n \"doc_date\": datetime.now(),\n \"full_name\": \"аи\",\n \"job_code_array\": [\n \"52.48.39\",\n \"50.30\",\n \"50.40\",\n \"51.44.4\"\n ],\n \"board_of_directors\": False,\n \"founders_count\": 2,\n \"board_of_directors_caption\": \"Совет директоров\",\n\n },\n \"id\": ObjectId(),\n \"creation_date\": datetime.now()\n }],\n data={},\n _owner=self.user,\n result_fields={\n 'ifns_reg_info': {\n 'ogrn': \"1234567890123\"\n }\n }\n )\n _id = sqldb.session.add(batch)\n\n booking = IfnsBookingObject(reg_info={\n 'status': 'registered',\n 'reg_date': datetime.now(),\n 'ogrn': \"123456789012345\"\n }, batch_id=_id)\n sqldb.session.add(booking)\n sqldb.session.commit()\n\n new_batch_db_object = DocumentBatchDbObject(\n data=doc_data,\n batch_type=DocumentBatchTypeEnum.DBT_NEW_LLC\n )\n\n batch = DocumentBatch.db_obj_to_field(new_batch_db_object)\n batch_json = json.dumps(batch.get_api_structure())\n result = self.test_client.post('/batch/update/', data={\n 'batch_id': _id,\n 'batch': batch_json\n })\n self.assertEqual(result.status_code, 200)\n\n result = self.test_client.post('/batch/render_document/', data={\n 'batch_id': _id,\n 'document_type': json.dumps([DocumentTypeEnum.DT_PFR_CLAIM])\n })\n self.assertEqual(result.status_code, 200)\n\n db_batch = DocumentBatchDbObject.query.filter_by(id=_id).first()\n # print(json.dumps(db_batch.as_dict(), indent=4, sort_keys=True, default=lambda x:unicode(x)))\n self.assertEqual(db_batch.status, BatchStatusEnum.BS_FINALISED)\n self.assertEqual(len(db_batch._documents), 2)\n self.assertTrue(not not db_batch.rendered_docs[0]['file_link'])\n\n @authorized()\n def test_accountant_order(self):\n DocRequisitiesStorage._BATCH_DESCRIPTORS[DocumentBatchTypeEnum.DBT_NEW_LLC]['doc_types'] = [DocumentTypeEnum.DT_ACCOUNTANT_ORDER]\n founder = PrivatePersonDbObject(**{\n \"_owner\": self.user,\n \"name\": u\"Прокл\",\n \"surname\": u\"Поликарпов\",\n \"patronymic\": u\"Поликарпович\",\n \"inn\": \"781108730780\",\n \"sex\": \"male\",\n \"birthdate\": datetime.now() - timedelta(days=365 * 30),\n \"birthplace\": u\"Россия, деревня Гадюкино\",\n \"passport\": {\n \"document_type\": PersonDocumentTypeEnum.PD_INTERNAL,\n \"series\": u\"1123\",\n \"number\": u\"192837\",\n \"issue_date\": datetime.now(),\n \"issue_depart\": u\"УМВД Гадюкинского района Гадюкинской области\",\n \"depart_code\": u\"111987\"\n },\n \"ogrnip\": \"123456789012345\",\n \"address\": {\n \"region\": RFRegionsEnum.RFR_SPB,\n \"index\": 198209,\n \"district_type\": DistrictTypeEnum.DIT_DISTRICT,\n \"district\": u\"Гатчинский\",\n \"city_type\": CityTypeEnum.CIT_CITY,\n \"city\": u\"Гадюкино\",\n \"village_type\": VillageTypeEnum.VIT_HUTOR,\n \"village\": u\"близ Диканьки\",\n \"street_type\": StreetTypeEnum.STT_BOULEVARD,\n \"street\": u\"Мотоциклистов\",\n \"house_type\": HouseTypeEnum.HOT_HOUSE,\n \"house\": \"4\",\n \"building_type\": BuildingTypeEnum.BIT_HOUSING,\n \"building\": \"2\",\n \"flat_type\": FlatTypeEnum.FLT_OFFICE,\n \"flat\": \"705\",\n },\n \"caption\": u\"Сантехник\",\n \"phone\": \"+79210001122\",\n \"email\": \"[email protected]\",\n \"living_country_code\": 3,\n \"living_address\": u\"г. Санкт-Петербург, д. Гадюкино, бульвар Мотоциклистов казарма 4, кв. 705\"\n })\n sqldb.session.add(founder)\n\n founder_otvetstvennyi = PrivatePersonDbObject(**{\n \"_owner\": self.user,\n \"name\": u\"Семен\",\n \"surname\": u\"Семенчук\",\n \"patronymic\": u\"Семейкин\",\n \"inn\": \"781108730780\",\n \"sex\": \"male\",\n \"birthdate\": datetime.now() - timedelta(days=365 * 30),\n \"birthplace\": u\"Россия, деревня Гадюкино\",\n \"passport\": {\n \"document_type\": PersonDocumentTypeEnum.PD_INTERNAL,\n \"series\": u\"1123\",\n \"number\": u\"192837\",\n \"issue_date\": datetime.now(),\n \"issue_depart\": u\"УМВД Гадюкинского района Гадюкинской области\",\n \"depart_code\": u\"111987\"\n },\n \"ogrnip\": \"123456789012345\",\n \"address\": {\n \"region\": RFRegionsEnum.RFR_SPB,\n \"index\": 198209,\n \"district_type\": DistrictTypeEnum.DIT_DISTRICT,\n \"district\": u\"Пушкинский\",\n \"city_type\": CityTypeEnum.CIT_CITY,\n \"city\": u\"Гадюкино\",\n \"village_type\": VillageTypeEnum.VIT_HUTOR,\n \"village\": u\"близ Диканьки\",\n \"street_type\": StreetTypeEnum.STT_BOULEVARD,\n \"street\": u\"Мотоциклистов\",\n \"house_type\": HouseTypeEnum.HOT_HOUSE,\n \"house\": \"4\",\n \"building_type\": BuildingTypeEnum.BIT_HOUSING,\n \"building\": \"2\",\n \"flat_type\": FlatTypeEnum.FLT_OFFICE,\n \"flat\": \"705\",\n },\n \"caption\": u\"Сантехник\",\n \"phone\": \"+79210001122\",\n \"email\": \"[email protected]\",\n \"living_country_code\": 3,\n \"living_address\": u\"г. Санкт-Петербург, д. Гадюкино, бульвар Мотоциклистов казарма 4, кв. 705\"\n })\n sqldb.session.add(founder_otvetstvennyi)\n\n company_founder = CompanyDbObject(**{\n \"_owner\": self.user,\n \"ogrn\": \"1234567890123\",\n \"inn\": \"781108730780\",\n \"kpp\": \"999999999\",\n \"general_manager_caption\": u\"генеральный директор\",\n \"full_name\": u\"Том и Джери\",\n \"short_name\": u\"ТиД\",\n \"general_manager\": {\n \"_id\": founder.id\n },\n \"address\": {\n \"region\": RFRegionsEnum.RFR_SPB,\n \"index\": 123131,\n \"street_type\": StreetTypeEnum.STT_STREET,\n \"street\": u\"Седова\",\n \"house_type\": HouseTypeEnum.HOT_HOUSE,\n \"house\": \"2\",\n \"flat_type\": FlatTypeEnum.FLT_OFFICE,\n \"flat\": \"2\",\n },\n \"phone\": \"+7(812)1234567\"\n })\n sqldb.session.add(company_founder)\n\n doc_data = {\n u\"full_name\": u\"Пни и Кочки\",\n u\"short_name\": u\"ПиК\",\n u\"has_accountant_contract_order\": True,\n u\"general_manager_caption\": u\"повелитель\",\n u\"accountant_person\": {\n \"_id\": founder_otvetstvennyi.id,\n \"type\": \"person\"\n },\n u\"accountant_salary\": {\n \"currency\": CurrencyEnum.CE_RUS_RUBLES,\n \"value\": \"123123.00\"\n },\n u\"general_manager_fixed_working_hours\": True,\n u\"general_manager_working_hours\": {\n u\"start_working_hours\": datetime(1900, 1, 1, hour=8),\n u\"finish_working_hours\": datetime(1900, 1, 1, hour=19),\n u\"holidays\": [\"mon\", \"tue\", \"fri\"],\n u\"lunch_time\": 10\n },\n u\"accountant_trial_period\": 24,\n u\"accountant_contract_number\": \"c3\",\n u\"accountant_order_number\": \"o4\",\n u\"accountant_start_work\": datetime.now()\n }\n\n with self.app.app_context():\n batch = DocumentBatchDbObject(\n batch_type=DocumentBatchTypeEnum.DBT_NEW_LLC,\n status=BatchStatusEnum.BS_FINALISED,\n rendered_docs=[{\n 'document_type': DocumentTypeEnum.DT_ARTICLES,\n 'file_link': 'somefile',\n 'caption': u\"Уставчег\",\n 'file_id': ObjectId(),\n 'document_id': ObjectId(\"54b91fe7a726163324353311\")\n }],\n _documents=[\n {\n \"status\": \"rendered\",\n \"deleted\": False,\n \"rendered_docs\": [],\n \"creation_date\": datetime.now(),\n \"document_type\": \"articles\",\n \"data\": {\n \"job_main_code\": \"50.20\",\n \"use_national_language_company_name\": False,\n \"use_foreign_company_name\": False,\n \"pravo_otchuzhdeniya_type\": 3,\n \"general_manager_term\": 36,\n \"short_name\": \"а\",\n \"preimusch_pravo_priobreteniya_doli_time_span\": 30,\n \"address\": {\n \"building\": \"20\",\n \"index\": 117105,\n \"street_type\": \"ш\",\n \"house\": \"12Г\",\n \"region\": \"Москва\",\n \"okato\": 12122222222,\n \"address_string\": \"г Москва, Варшавское шоссе, д 12Г стр 20\",\n \"building_type\": \"стр\",\n \"street\": \"Варшавское\",\n \"house_type\": \"д\",\n \"ifns\": 7726\n },\n \"perehod_doli_k_naslednikam_soglasie\": True,\n \"necessary_votes_for_general_meeting_decisions\": {\n \"audit_assignment\": 1,\n \"large_deals_approval\": 1,\n \"concern_deals_approval\": 1,\n \"executives_formation\": 1,\n \"other_issues\": 1,\n \"articles_change\": 1,\n \"branch_establishment\": 1,\n \"profit_distribution\": 1,\n \"annual_reports_approval\": 1,\n \"liquidation_committee_assignment\": 1,\n \"auditor_election\": 1,\n \"obligations_emission\": 1,\n \"reorganization_or_liquidation\": 1,\n \"internal_documents_approval\": 1,\n \"company_strategy\": 1\n },\n \"starter_capital\": {\n \"capital_type\": 1,\n \"value\": {\n \"currency\": \"RUB\",\n \"value\": \"10000\"\n }\n },\n \"general_manager_caption\": \"Генеральный директор\",\n \"doc_date\": datetime.now(),\n \"full_name\": \"аи\",\n \"job_code_array\": [\n \"52.48.39\",\n \"50.30\",\n \"50.40\",\n \"51.44.4\"\n ],\n \"board_of_directors\": False,\n \"founders_count\": 2,\n \"board_of_directors_caption\": \"Совет директоров\"\n },\n \"id\": ObjectId(\"54b91fe7a726163324353311\")\n }\n ],\n data={},\n _owner=self.user\n )\n _id = sqldb.session.add(batch)\n\n booking = IfnsBookingObject(reg_info={\n 'status': 'registered',\n 'reg_date': datetime.now(),\n 'ogrn': \"123456789012345\"\n }, batch_id=_id)\n sqldb.session.add(booking)\n sqldb.session.commit()\n\n new_batch_db_object = DocumentBatchDbObject(\n data=doc_data,\n batch_type=DocumentBatchTypeEnum.DBT_NEW_LLC\n )\n\n batch = DocumentBatch.db_obj_to_field(new_batch_db_object)\n batch_json = json.dumps(batch.get_api_structure())\n result = self.test_client.post('/batch/update/', data={\n 'batch_id': _id,\n 'batch': batch_json\n })\n self.assertEqual(result.status_code, 200)\n\n result = self.test_client.post('/batch/render_document/', data={\n 'batch_id': _id,\n 'document_type': json.dumps([DocumentTypeEnum.DT_ACCOUNTANT_ORDER])\n })\n self.assertEqual(result.status_code, 200)\n\n db_batch = DocumentBatchDbObject.query.filter_by(id=_id).first()\n self.assertEqual(db_batch.status, BatchStatusEnum.BS_FINALISED)\n self.assertEqual(len(db_batch._documents), 2)\n self.assertTrue(not not db_batch.rendered_docs[0]['file_link'])\n\n\n @authorized()\n def test_founders_list(self):\n DocRequisitiesStorage._BATCH_DESCRIPTORS[DocumentBatchTypeEnum.DBT_NEW_LLC]['doc_types'] = [DocumentTypeEnum.DT_FOUNDERS_LIST]\n\n founder = PrivatePersonDbObject(**{\n \"_owner\": self.user,\n \"name\": u\"Прокл\",\n \"surname\": u\"Поликарпов\",\n \"patronymic\": u\"Поликарпович\",\n \"inn\": \"781108730780\",\n \"sex\": \"male\",\n \"birthdate\": datetime.now() - timedelta(days=365 * 30),\n \"birthplace\": u\"Россия, деревня Гадюкино\",\n \"passport\": {\n \"document_type\": PersonDocumentTypeEnum.PD_INTERNAL,\n \"series\": u\"1123\",\n \"number\": u\"192837\",\n \"issue_date\": datetime.now(),\n \"issue_depart\": u\"УМВД Гадюкинского района Гадюкинской области\",\n \"depart_code\": u\"111987\"\n },\n \"ogrnip\": \"123456789012345\",\n \"address\": {\n \"region\": RFRegionsEnum.RFR_SPB,\n \"index\": 198209,\n \"district_type\": DistrictTypeEnum.DIT_DISTRICT,\n \"district\": u\"Гатчинский\",\n \"city_type\": CityTypeEnum.CIT_CITY,\n \"city\": u\"Гадюкино\",\n \"village_type\": VillageTypeEnum.VIT_HUTOR,\n \"village\": u\"близ Диканьки\",\n \"street_type\": StreetTypeEnum.STT_BOULEVARD,\n \"street\": u\"Мотоциклистов\",\n \"house_type\": HouseTypeEnum.HOT_HOUSE,\n \"house\": \"4\",\n \"building_type\": BuildingTypeEnum.BIT_HOUSING,\n \"building\": \"2\",\n \"flat_type\": FlatTypeEnum.FLT_OFFICE,\n \"flat\": \"705\",\n \"ifns\": 7805\n },\n \"caption\": u\"Сантехник\",\n \"phone\": \"+79210001122\",\n \"email\": \"[email protected]\",\n \"living_country_code\": 3,\n \"living_address\": u\"г. Санкт-Петербург, д. Гадюкино, бульвар Мотоциклистов казарма 4, кв. 705\"\n })\n sqldb.session.add(founder)\n\n founder_otvetstvennyi = PrivatePersonDbObject(**{\n \"_owner\": self.user,\n \"name\": u\"Семен\",\n \"surname\": u\"Семенчук\",\n \"patronymic\": u\"Семейкин\",\n \"inn\": \"781108730780\",\n \"sex\": \"male\",\n \"birthdate\": datetime.now() - timedelta(days=365 * 30),\n \"birthplace\": u\"Россия, деревня Гадюкино\",\n \"passport\": {\n \"document_type\": PersonDocumentTypeEnum.PD_INTERNAL,\n \"series\": u\"1123\",\n \"number\": u\"192837\",\n \"issue_date\": datetime.now(),\n \"issue_depart\": u\"УМВД Гадюкинского района Гадюкинской области\",\n \"depart_code\": u\"111987\"\n },\n \"ogrnip\": \"123456789012345\",\n \"address\": {\n \"region\": RFRegionsEnum.RFR_SPB,\n \"index\": 198209,\n \"district_type\": DistrictTypeEnum.DIT_DISTRICT,\n \"district\": u\"Пушкинский\",\n \"city_type\": CityTypeEnum.CIT_CITY,\n \"city\": u\"Гадюкино\",\n \"village_type\": VillageTypeEnum.VIT_HUTOR,\n \"village\": u\"близ Диканьки\",\n \"street_type\": StreetTypeEnum.STT_BOULEVARD,\n \"street\": u\"Мотоциклистов\",\n \"house_type\": HouseTypeEnum.HOT_HOUSE,\n \"house\": \"4\",\n \"building_type\": BuildingTypeEnum.BIT_HOUSING,\n \"building\": \"2\",\n \"flat_type\": FlatTypeEnum.FLT_OFFICE,\n \"flat\": \"705\",\n \"ifns\": 7805\n },\n \"caption\": u\"Сантехник\",\n \"phone\": \"+79210001122\",\n \"email\": \"[email protected]\",\n \"living_country_code\": 3,\n \"living_address\": u\"г. Санкт-Петербург, д. Гадюкино, бульвар Мотоциклистов казарма 4, кв. 705\"\n })\n sqldb.session.add(founder_otvetstvennyi)\n\n company_founder = CompanyDbObject(**{\n \"_owner\": self.user.id,\n \"ogrn\": \"1095543023135\",\n \"inn\": \"781108730780\",\n \"kpp\": \"999999999\",\n \"general_manager_caption\": u\"генеральный директор\",\n \"full_name\": u\"Том и Джери\",\n \"short_name\": u\"ТиД\",\n \"general_manager\": {\n \"_id\": founder.id\n },\n \"address\": {\n \"region\": RFRegionsEnum.RFR_SPB,\n \"index\": 123131,\n \"street_type\": StreetTypeEnum.STT_STREET,\n \"street\": u\"Седова\",\n \"house_type\": HouseTypeEnum.HOT_HOUSE,\n \"house\": \"2\",\n \"flat_type\": FlatTypeEnum.FLT_OFFICE,\n \"flat\": \"2\",\n \"ifns\": 7805\n },\n \"phone\": \"+7(812)1234567\"\n })\n sqldb.session.add(company_founder)\n\n doc_data = {\n u\"full_name\": u\"Пни и Кочки\",\n u\"short_name\": u\"ПиК\",\n u\"inn\": \"781108730780\",\n u\"kpp\": \"999999999\",\n u\"general_manager_term\": 38,\n u\"has_general_manager_contract\": False,\n u\"general_manager_caption\": u\"повелитель\",\n u\"general_manager\": {\n \"_id\": founder_otvetstvennyi.id,\n \"type\": \"person\"\n },\n u\"founders\": [\n {\n \"founder\": {\n \"_id\": company_founder.id,\n \"type\": \"company\"\n },\n \"nominal_capital\": 12312.22,\n \"share\": 100\n }\n ],\n \"starter_capital\": {\n \"currency\": \"rub\",\n \"value\": \"12312.234234\"\n },\n u\"share_type\": \"percent\",\n # u\"general_manager_salary\" : {\n # \"currency\" : CurrencyEnum.CE_RUS_RUBLES,\n # \"value\" : \"123123.00\"\n # },\n # u\"general_manager_fixed_working_hours\" : True,\n # u\"general_manager_working_hours\" : {\n # u\"start_working_hours\" : datetime(1900, 1, 1, hour=8),\n # u\"finish_working_hours\" : datetime(1900, 1, 1, hour=19),\n # u\"holidays\" : [\"mon\", \"tue\", \"fri\"],\n # u\"lunch_time\" : 10\n # },\n # u\"general_manager_trial_period\" : 24,\n # u\"general_manager_quit_notify_period\" : 12,\n # u\"general_manager_contract_number\" : \"2\",\n u\"selected_moderator\": {\n \"type\": \"company\",\n \"_id\": company_founder\n },\n u\"general_manager_salary_days\": [1, 2, 3, 4, 5],\n u\"address\": {\n \"street_type\": u\"ул\",\n \"index\": 191186,\n \"house\": u\"4\",\n \"region\": u\"Санкт-Петербург\",\n \"flat\": u\"12\",\n \"street\": u\"Большая Морская\",\n \"address_string\": u\"г Санкт-Петербург, ул Большая Морская, д 4, кв 12\",\n \"flat_type\": u\"кв\",\n \"house_type\": u\"д\",\n \"village_type\": u\"п\",\n \"ifns\": 7841\n },\n u\"address_type\": \"general_manager_registration_address\",\n u\"general_manager_contract_date\": datetime.now(),\n # u\"general_manager_contract_additional_terms\" : {\n # u\"rights\" : \"\",\n # u\"responsibility\" : None,\n # u\"duties\" : u\"колоть дрова\"\n # },\n u\"board_of_directors\": True,\n u\"registration_date\": datetime.now()\n }\n\n with self.app.app_context():\n batch = DocumentBatchDbObject(\n batch_type=DocumentBatchTypeEnum.DBT_NEW_LLC,\n status=BatchStatusEnum.BS_FINALISED,\n rendered_docs=[{\n 'document_type': DocumentTypeEnum.DT_ARTICLES,\n 'file_link': 'somefile',\n 'caption': u\"Уставчег\",\n 'file_id': ObjectId(),\n 'document_id': ObjectId(\"54b91fe7a726163324353311\")\n }],\n _documents=[\n {\n \"status\": \"rendered\",\n \"deleted\": False,\n \"rendered_docs\": [],\n \"creation_date\": datetime.now(),\n \"document_type\": \"articles\",\n \"data\": {\n \"job_main_code\": \"50.20\",\n \"use_national_language_company_name\": False,\n \"use_foreign_company_name\": False,\n \"pravo_otchuzhdeniya_type\": 3,\n \"general_manager_term\": 36,\n \"short_name\": \"а\",\n \"preimusch_pravo_priobreteniya_doli_time_span\": 30,\n \"address\": {\n \"building\": \"20\",\n \"index\": 117105,\n \"street_type\": \"ш\",\n \"house\": \"12Г\",\n \"region\": \"Москва\",\n \"okato\": 12122222222,\n \"address_string\": \"г Москва, Варшавское шоссе, д 12Г стр 20\",\n \"building_type\": \"стр\",\n \"street\": \"Варшавское\",\n \"house_type\": \"д\",\n \"ifns\": 7726\n },\n \"perehod_doli_k_naslednikam_soglasie\": True,\n \"necessary_votes_for_general_meeting_decisions\": {\n \"audit_assignment\": 1,\n \"large_deals_approval\": 1,\n \"concern_deals_approval\": 1,\n \"executives_formation\": 1,\n \"other_issues\": 1,\n \"articles_change\": 1,\n \"branch_establishment\": 1,\n \"profit_distribution\": 1,\n \"annual_reports_approval\": 1,\n \"liquidation_committee_assignment\": 1,\n \"auditor_election\": 1,\n \"obligations_emission\": 1,\n \"reorganization_or_liquidation\": 1,\n \"internal_documents_approval\": 1,\n \"company_strategy\": 1\n },\n \"starter_capital\": {\n \"capital_type\": 1,\n \"value\": {\n \"currency\": \"RUB\",\n \"value\": \"10000\"\n }\n },\n \"general_manager_caption\": \"Генеральный директор\",\n \"doc_date\": datetime.now(),\n \"full_name\": \"аи\",\n \"job_code_array\": [\n \"52.48.39\",\n \"50.30\",\n \"50.40\",\n \"51.44.4\"\n ],\n \"board_of_directors\": False,\n \"founders_count\": 2,\n \"board_of_directors_caption\": \"Совет директоров\"\n },\n \"id\": ObjectId(\"54b91fe7a726163324353311\")\n }\n ],\n data={},\n _owner=self.user,\n result_fields={\n 'ifns_reg_info': {\n 'ogrn': \"1234567890123\"\n }\n }\n )\n _id = sqldb.session.add(batch)\n\n booking = IfnsBookingObject(reg_info={\n 'status': 'registered',\n 'reg_date': datetime.now(),\n 'ogrn': \"123456789012345\"\n }, batch_id=_id)\n sqldb.session.add(booking)\n sqldb.session.commit()\n\n new_batch_db_object = DocumentBatchDbObject(\n data=doc_data,\n batch_type=DocumentBatchTypeEnum.DBT_NEW_LLC\n )\n\n batch = DocumentBatch.db_obj_to_field(new_batch_db_object)\n batch_json = json.dumps(batch.get_api_structure())\n result = self.test_client.post('/batch/update/', data={\n 'batch_id': _id,\n 'batch': batch_json\n })\n self.assertEqual(result.status_code, 200)\n\n result = self.test_client.post('/batch/render_document/', data={\n 'batch_id': _id,\n 'document_type': json.dumps([DocumentTypeEnum.DT_FOUNDERS_LIST])\n })\n self.assertEqual(result.status_code, 200)\n\n db_batch = DocumentBatchDbObject.query.filter_by(id=_id).first()\n self.assertEqual(db_batch.status, BatchStatusEnum.BS_FINALISED)\n self.assertEqual(len(db_batch._documents), 2)\n self.assertTrue(not not db_batch.rendered_docs[0]['file_link'])\n\n\n @authorized()\n def test_company_details(self):\n DocRequisitiesStorage._BATCH_DESCRIPTORS[DocumentBatchTypeEnum.DBT_NEW_LLC]['doc_types'] = [DocumentTypeEnum.DT_COMPANY_DETAILS]\n founder = self.create_person(self.user, name=u\"Прокл\", surname=u\"Поликарпов\", patronymic=u\"Поликарпович\")\n # founder = PrivatePersonDbObject(**{\n # \"_owner\": self.user,\n # \"name\": u\"Прокл\",\n # \"surname\": u\"Поликарпов\",\n # \"patronymic\": u\"Поликарпович\",\n # \"inn\": \"781108730780\",\n # \"sex\": \"male\",\n # \"birthdate\": datetime.now() - timedelta(days=365 * 30),\n # \"birthplace\": u\"Россия, деревня Гадюкино\",\n # \"passport\": {\n # \"document_type\": PersonDocumentTypeEnum.PD_INTERNAL,\n # \"series\": u\"1123\",\n # \"number\": u\"192837\",\n # \"issue_date\": datetime.now(),\n # \"issue_depart\": u\"УМВД Гадюкинского района Гадюкинской области\",\n # \"depart_code\": u\"111987\"\n # },\n # \"ogrnip\": \"123456789012345\",\n # \"address\": {\n # \"region\": RFRegionsEnum.RFR_SPB,\n # \"index\": 198209,\n # \"district_type\": DistrictTypeEnum.DIT_DISTRICT,\n # \"district\": u\"Гатчинский\",\n # \"city_type\": CityTypeEnum.CIT_CITY,\n # \"city\": u\"Гадюкино\",\n # \"village_type\": VillageTypeEnum.VIT_HUTOR,\n # \"village\": u\"близ Диканьки\",\n # \"street_type\": StreetTypeEnum.STT_BOULEVARD,\n # \"street\": u\"Мотоциклистов\",\n # \"house_type\": HouseTypeEnum.HOT_HOUSE,\n # \"house\": \"4\",\n # \"building_type\": BuildingTypeEnum.BIT_HOUSING,\n # \"building\": \"2\",\n # \"flat_type\": FlatTypeEnum.FLT_OFFICE,\n # \"flat\": \"705\",\n # },\n # \"caption\": u\"Сантехник\",\n # \"phone\": \"+79210001122\",\n # \"email\": \"[email protected]\",\n # \"living_country_code\": 3,\n # \"living_address\": u\"г. Санкт-Петербург, д. Гадюкино, бульвар Мотоциклистов казарма 4, кв. 705\"\n # })\n # sqldb.session.add(founder)\n\n # founder_otvetstvennyi = PrivatePersonDbObject(**{\n # \"_owner\": self.user,\n # \"name\": u\"Семен\",\n # \"surname\": u\"Семенчук\",\n # \"patronymic\": u\"Семейкин\",\n # \"inn\": \"781108730780\",\n # \"sex\": \"male\",\n # \"birthdate\": datetime.now() - timedelta(days=365 * 30),\n # \"birthplace\": u\"Россия, деревня Гадюкино\",\n # \"passport\": {\n # \"document_type\": PersonDocumentTypeEnum.PD_INTERNAL,\n # \"series\": u\"1123\",\n # \"number\": u\"192837\",\n # \"issue_date\": datetime.now(),\n # \"issue_depart\": u\"УМВД Гадюкинского района Гадюкинской области\",\n # \"depart_code\": u\"111987\"\n # },\n # \"ogrnip\": \"123456789012345\",\n # \"address\": {\n # \"region\": RFRegionsEnum.RFR_SPB,\n # \"index\": 198209,\n # \"district_type\": DistrictTypeEnum.DIT_DISTRICT,\n # \"district\": u\"Пушкинский\",\n # \"city_type\": CityTypeEnum.CIT_CITY,\n # \"city\": u\"Гадюкино\",\n # \"village_type\": VillageTypeEnum.VIT_HUTOR,\n # \"village\": u\"близ Диканьки\",\n # \"street_type\": StreetTypeEnum.STT_BOULEVARD,\n # \"street\": u\"Мотоциклистов\",\n # \"house_type\": HouseTypeEnum.HOT_HOUSE,\n # \"house\": \"4\",\n # \"building_type\": BuildingTypeEnum.BIT_HOUSING,\n # \"building\": \"2\",\n # \"flat_type\": FlatTypeEnum.FLT_OFFICE,\n # \"flat\": \"705\",\n # },\n # \"caption\": u\"Сантехник\",\n # \"phone\": \"+79210001122\",\n # \"email\": \"[email protected]\",\n # \"living_country_code\": 3,\n # \"living_address\": u\"г. Санкт-Петербург, д. Гадюкино, бульвар Мотоциклистов казарма 4, кв. 705\"\n # })\n # sqldb.session.add(founder_otvetstvennyi)\n\n founder_otvetstvennyi = self.create_person(self.user, name=u\"Семен\", surname=u\"Семенчук\", patronymic=u\"Семейкин\")\n\n company_founder = CompanyDbObject(**{\n \"_owner\": self.user,\n \"ogrn\": \"1234567890123\",\n \"inn\": \"781108730780\",\n \"kpp\": \"999999999\",\n \"general_manager_caption\": u\"генеральный директор\",\n \"full_name\": u\"Том и Джери\",\n \"short_name\": u\"ТиД\",\n \"general_manager\": {\n \"_id\": founder.id\n },\n \"address\": {\n \"region\": RFRegionsEnum.RFR_SPB,\n \"index\": 123131,\n \"street_type\": StreetTypeEnum.STT_STREET,\n \"street\": u\"Седова\",\n \"house_type\": HouseTypeEnum.HOT_HOUSE,\n \"house\": \"2\",\n \"flat_type\": FlatTypeEnum.FLT_OFFICE,\n \"flat\": \"2\",\n },\n \"phone\": \"+7(812)1234567\"\n })\n sqldb.session.add(company_founder)\n sqldb.session.commit()\n\n self.db['bik_catalog'].insert(\n {'name': 'Банк', 'bik': '000000000', 'address': 'Питер', 'kor_account': '12345678901234567890'})\n\n doc_data = {\n u\"full_name\": u\"ТестРегистратионФееИнтернетБанк2ПриватеФоюндерс РеспонсиблеПерсонОбтаин-РеспонсиблеПерсонОбтаин\",\n u\"short_name\": u\"ТестРегистратионФееИнтернетБанк2ПриватеФоюндерс РеспонсиблеПерсонОбтаин-РеспонсиблеПерсонОбтаин\",\n u\"inn\": \"781108730780\",\n u\"kpp\": \"999999999\",\n u\"bank_bik\": \"000000000\",\n u\"bank_account\": \"10101810858050000764\",\n u\"general_manager_term\": 38,\n u\"has_general_manager_contract\": True,\n u\"general_manager_caption\": u\"повелитель\",\n u\"general_manager\": {\n \"_id\": founder_otvetstvennyi.id,\n \"type\": \"person\"\n },\n u\"founders\": [\n {\n \"founder\": {\n \"_id\": founder_otvetstvennyi.id,\n \"type\": \"person\"\n },\n \"nominal_capital\": 12312.22,\n \"share\": 100\n }\n ],\n u\"general_manager_salary\": {\n \"currency\": CurrencyEnum.CE_RUS_RUBLES,\n \"value\": \"123123.00\"\n },\n u\"general_manager_fixed_working_hours\": True,\n u\"general_manager_working_hours\": {\n u\"start_working_hours\": datetime(1900, 1, 1, hour=8),\n u\"finish_working_hours\": datetime(1900, 1, 1, hour=19),\n u\"holidays\": [\"mon\", \"tue\", \"fri\"],\n u\"lunch_time\": 10\n },\n u\"general_manager_trial_period\": 24,\n u\"general_manager_quit_notify_period\": 12,\n u\"general_manager_contract_number\": \"2\",\n u\"selected_moderator\": {\n \"type\": \"company\",\n \"_id\": company_founder.id\n },\n u\"general_manager_salary_days\": [1, 2, 3, 4, 5],\n u\"address\": {\n \"street_type\": u\"ул\",\n \"index\": 191186,\n \"house\": u\"4\",\n \"region\": u\"Санкт-Петербург\",\n \"flat\": u\"12\",\n \"street\": u\"Большая Морская\",\n \"address_string\": u\"г Санкт-Петербург, ул Большая Морская, д 4, кв 12\",\n \"flat_type\": u\"кв\",\n \"house_type\": u\"д\",\n \"ifns\": 7841\n },\n u\"actual_address\": {\n \"street_type\": u\"ул\",\n \"index\": 191186,\n \"house\": u\"4\",\n \"region\": u\"Санкт-Петербург\",\n \"flat\": u\"12\",\n \"street\": u\"Большая Морская\",\n \"address_string\": u\"г Санкт-Петербург, ул Большая Морская, д 4, кв 12\",\n \"flat_type\": u\"кв\",\n \"house_type\": u\"д\",\n \"ifns\": 7841\n },\n u\"address_type\": \"general_manager_registration_address\",\n u\"general_manager_contract_date\": datetime.now(),\n u\"general_manager_contract_additional_terms\": {\n u\"rights\": \"\",\n u\"responsibility\": None,\n u\"duties\": u\"колоть дрова\"\n },\n u\"board_of_directors\": True,\n u\"registration_date\": datetime.now(),\n u\"company_email\": \"\",\n u\"company_site\": \"\",\n u\"company_phone\": \"+79111231313\"\n }\n\n with self.app.app_context():\n batch = DocumentBatchDbObject(\n batch_type=DocumentBatchTypeEnum.DBT_NEW_LLC,\n status=BatchStatusEnum.BS_FINALISED,\n data={},\n _owner=self.user,\n result_fields={\n 'ifns_reg_info': {\n 'ogrn': \"1234567890123\"\n }\n }\n )\n sqldb.session.add(batch)\n sqldb.session.commit()\n _id = batch.id\n\n# rendered_docs=[{\n# 'document_type': DocumentTypeEnum.DT_ARTICLES,\n# 'file_link': 'somefile',\n# 'caption': u\"Уставчег\",\n# 'file_id': ObjectId(),\n# 'document_id': ObjectId(\"54b91fe7a726163324353311\")\n# }],\n doc = BatchDocumentDbObject(\n status=\"rendered\",\n document_type=\"articles\",\n batch_id=_id,\n data={\n \"job_main_code\": \"50.20\",\n \"use_national_language_company_name\": False,\n \"use_foreign_company_name\": False,\n \"pravo_otchuzhdeniya_type\": 3,\n \"general_manager_term\": 36,\n \"short_name\": \"а\",\n \"preimusch_pravo_priobreteniya_doli_time_span\": 30,\n \"address\": {\n \"building\": \"20\",\n \"index\": 117105,\n \"street_type\": \"ш\",\n \"house\": \"12Г\",\n \"region\": \"Москва\",\n \"okato\": 12122222222,\n \"address_string\": \"г Москва, Варшавское шоссе, д 12Г стр 20\",\n \"building_type\": \"стр\",\n \"street\": \"Варшавское\",\n \"house_type\": \"д\",\n \"ifns\": 7726\n },\n \"perehod_doli_k_naslednikam_soglasie\": True,\n \"necessary_votes_for_general_meeting_decisions\": {\n \"audit_assignment\": 1,\n \"large_deals_approval\": 1,\n \"concern_deals_approval\": 1,\n \"executives_formation\": 1,\n \"other_issues\": 1,\n \"articles_change\": 1,\n \"branch_establishment\": 1,\n \"profit_distribution\": 1,\n \"annual_reports_approval\": 1,\n \"liquidation_committee_assignment\": 1,\n \"auditor_election\": 1,\n \"obligations_emission\": 1,\n \"reorganization_or_liquidation\": 1,\n \"internal_documents_approval\": 1,\n \"company_strategy\": 1\n },\n \"starter_capital\": {\n \"capital_type\": 1,\n \"value\": {\n \"currency\": \"RUB\",\n \"value\": \"10000\"\n }\n },\n \"general_manager_caption\": \"Генеральный директор\",\n \"doc_date\": datetime.now(),\n \"full_name\": \"аи\",\n \"job_code_array\": [\n \"52.48.39\",\n \"50.30\",\n \"50.40\",\n \"51.44.4\"\n ],\n \"board_of_directors\": False,\n \"founders_count\": 2,\n \"board_of_directors_caption\": \"Совет директоров\"\n })\n sqldb.session.add(doc)\n sqldb.session.commit()\n\n booking = IfnsBookingObject(reg_info={\n 'status': 'registered',\n 'reg_date': datetime.now(),\n 'ogrn': \"123456789012345\"\n }, batch_id=_id)\n sqldb.session.add(booking)\n sqldb.session.commit()\n\n new_batch_db_object = DocumentBatchDbObject(\n data=doc_data,\n batch_type=DocumentBatchTypeEnum.DBT_NEW_LLC\n )\n\n batch = DocumentBatch.db_obj_to_field(new_batch_db_object)\n batch_json = json.dumps(batch.get_api_structure())\n result = self.test_client.post('/batch/update/', data={\n 'batch_id': _id,\n 'batch': batch_json\n })\n self.assertEqual(result.status_code, 200)\n\n result = self.test_client.post('/batch/render_document/', data={\n 'batch_id': _id,\n 'document_type': json.dumps([DocumentTypeEnum.DT_COMPANY_DETAILS])\n })\n self.assertEqual(result.status_code, 200)\n\n db_batch = DocumentBatchDbObject.query.filter_by(id=_id).first()\n print (json.dumps(db_batch.__dict__, indent=1, default=lambda x: unicode(x), ensure_ascii=False))\n self.assertEqual(db_batch.status, BatchStatusEnum.BS_FINALISED)\n self.assertEqual(len(db_batch._documents), 2)\n" }, { "alpha_fraction": 0.581845223903656, "alphanum_fraction": 0.5833333134651184, "avg_line_length": 16.6842098236084, "blob_id": "868fe0fd60fa1f33cb8975f0f6101f0a2841224e", "content_id": "9a4e8a0c30cb4318ad840c3112a6250eb35823a9", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 672, "license_type": "no_license", "max_line_length": 45, "num_lines": 38, "path": "/app/fw/plugins/emailer_plugin/enums.py", "repo_name": "StanislavKraev/jb_code", "src_encoding": "UTF-8", "text": "# -*- coding: utf-8 -*-\n\n\nclass MailTypeEnum(object):\n MT_SOME_EMAIL = 'simple_mail'\n\n _ALL = {\n MT_SOME_EMAIL\n }\n\n @classmethod\n def validate(cls, value):\n return value in cls._ALL\n\n @staticmethod\n def get_name(value):\n return value\n\n\nclass MailTargetEnum(object):\n\n MTA_SPECIFIED = 'specified_users'\n MTA_BATCH_OWNER = 'batch_owner'\n MTA_EVENT_DATA_FIELD = 'event_data_field'\n\n _ALL = {\n MTA_SPECIFIED,\n MTA_BATCH_OWNER,\n MTA_EVENT_DATA_FIELD\n }\n\n @classmethod\n def validate(cls, value):\n return value in cls._ALL\n\n @staticmethod\n def get_name(value):\n return value\n" }, { "alpha_fraction": 0.6000000238418579, "alphanum_fraction": 0.6034985184669495, "avg_line_length": 46.63888931274414, "blob_id": "507cc14af69581d2caae770201119f609e4cc7ce", "content_id": "98752e12cebc43f4ec9ea03906c3e4d72577ea7b", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 3458, "license_type": "no_license", "max_line_length": 106, "num_lines": 72, "path": "/app/services/russian_post/async_tasks.py", "repo_name": "StanislavKraev/jb_code", "src_encoding": "UTF-8", "text": "# -*- coding: utf-8 -*-\nfrom datetime import datetime, timedelta\n\nfrom celery.exceptions import SoftTimeLimitExceeded\nfrom celery import current_app as celery\n\nfrom fw.db.sql_base import db as sqldb\nfrom services.russian_post.db_models import RussianPostTrackingItem, PostTrackingStatus\nfrom services.russian_post.integration import get_current_mail_status\n\ncelery.config_from_envvar('CELERY_CONFIG_MODULE')\n\n\ndef _post_oper_type_2_tracking_status(oper_type):\n if oper_type in ('2',):\n return PostTrackingStatus.PTS_DELIVERED\n\n if oper_type in ('5', '22', '3', '16'):\n return PostTrackingStatus.PTS_FAILED\n\n return PostTrackingStatus.PTS_PROGRESS\n\[email protected]()\ndef get_tracking_info_async(batch_id=None):\n config = celery.conf.get('config')\n db = celery.conf.get('db')\n app = celery.conf['flask_app']()\n logger = celery.log.get_default_logger()\n login, password = config['RUSSIAN_POST_API_LOGIN'], config['RUSSIAN_POST_API_PASSWORD']\n with app.app_context():\n dt = datetime.utcnow() - timedelta(days=31)\n cur = RussianPostTrackingItem.query.filter(\n RussianPostTrackingItem.batch_id == batch_id,\n RussianPostTrackingItem.status.in_([PostTrackingStatus.PTS_UNKNOWN,\n PostTrackingStatus.PTS_NOT_FOUND,\n PostTrackingStatus.PTS_PROGRESS]),\n RussianPostTrackingItem.creation_date.__ge__(dt)\n ) if batch_id else RussianPostTrackingItem.query.filter(\n RussianPostTrackingItem.status.in_([PostTrackingStatus.PTS_UNKNOWN,\n PostTrackingStatus.PTS_NOT_FOUND,\n PostTrackingStatus.PTS_PROGRESS]),\n RussianPostTrackingItem.creation_date.__ge__(dt)\n )\n\n for tracking_item in cur.order_by(RussianPostTrackingItem.last_check_dt.asc()):\n tracking_number = tracking_item.tracking\n if not tracking_number:\n continue\n try:\n logger.info(u\"Checking tracking item %s\" % tracking_number)\n\n new_data = get_current_mail_status(tracking_item.tracking, login, password)\n if not new_data:\n tracking_item.status = PostTrackingStatus.PTS_NOT_FOUND\n tracking_item.status_caption = u\"Почтовое отправление не найдено\"\n tracking_item.last_check_dt = datetime.utcnow()\n sqldb.session.commit()\n return True\n new_status = _post_oper_type_2_tracking_status(new_data['operation'])\n if new_data['dt'] != tracking_item.status_change_dt or new_status != tracking_item.status:\n tracking_item.status = new_status\n tracking_item.status_change_dt = new_data['dt']\n tracking_item.last_location = new_data['address']\n tracking_item.status_caption = new_data['op_name']\n tracking_item.last_check_dt = datetime.utcnow()\n sqldb.session.commit()\n return True\n except SoftTimeLimitExceeded:\n logger.warn(u\"Had not enough time to check all tracking items\")\n except Exception:\n logger.exception(u\"Failed to check tracking item %s\" % tracking_number)\n continue\n" }, { "alpha_fraction": 0.7882562279701233, "alphanum_fraction": 0.7891458868980408, "avg_line_length": 42.230770111083984, "blob_id": "b6b7bdf584d3fd590549ab5c5612fcc3315ccd63", "content_id": "e4536764e3b704a24c245e3a7f61a4c6c2f0ec87", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 1124, "license_type": "no_license", "max_line_length": 119, "num_lines": 26, "path": "/app/deployment_migrations/migration_list/20150715_migrate_assurances.py", "repo_name": "StanislavKraev/jb_code", "src_encoding": "UTF-8", "text": "# -*- coding: utf-8 -*-\nfrom datetime import datetime\nimport logging\nfrom tempfile import TemporaryFile, NamedTemporaryFile\nfrom bson import ObjectId\nimport requests\nfrom fw.auth.models import AuthUser\nfrom fw.db.sql_base import db as sqldb\nfrom fw.documents.db_fields import DocumentBatchDbObject, BatchDocumentDbObject, CompanyDbObject, PrivatePersonDbObject\nfrom fw.documents.enums import PersonTypeEnum, IncorporationFormEnum, CompanyTypeEnum\nfrom fw.storage.models import FileObject\n\n\ndef forward(config, logger):\n logger.debug(u\"Modify car assurances tables\")\n\n sqldb.session.close()\n sqldb.engine.execute(\"ALTER TABLE car_assurance ADD COLUMN address VARCHAR NOT NULL DEFAULT '';\")\n\n sqldb.engine.execute(\"ALTER TABLE car_assurance_branch ALTER COLUMN title DROP NOT NULL;\")\n sqldb.engine.execute(\"ALTER TABLE car_assurance_branch ALTER COLUMN phone DROP NOT NULL;\")\n sqldb.engine.execute(\"ALTER TABLE car_assurance_branch ALTER COLUMN address DROP NOT NULL;\")\n sqldb.engine.execute(\"ALTER TABLE car_assurance_branch ALTER COLUMN address SET DEFAULT '';\")\n\ndef rollback(config, logger):\n pass\n" }, { "alpha_fraction": 0.6186262965202332, "alphanum_fraction": 0.6256111860275269, "avg_line_length": 24.879518508911133, "blob_id": "6f91c4b4d053278cc5054cea4e00d82cccac37cf", "content_id": "64f0765fc5e1ca5b188dfee8640888e3eda7d7d5", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 4323, "license_type": "no_license", "max_line_length": 106, "num_lines": 166, "path": "/app/services/llc_reg/documents/llc_gov_forms_adapters.py", "repo_name": "StanislavKraev/jb_code", "src_encoding": "UTF-8", "text": "# -*- coding: utf-8 -*-\nfrom flask import current_app\nfrom common_utils import get_russian_month_skl, num_word\nfrom fw.documents.address_enums import RFRegionsEnum\nfrom services.ip_reg.documents.enums import IPDocumentDeliveryTypeStrEnum\nfrom services.llc_reg.documents.enums import GovernmentFounderTypeEnum, FounderTypeEnum, JSCMemberTypeEnum\n\n\nclass ShortDistrictTypeAdapter(object):\n @staticmethod\n def adapt(value):\n if not hasattr(value, 'api_value'):\n return u\"\"\n return value.api_value()\n\n\nclass RFRegionNumberAdapter(object):\n @staticmethod\n def adapt(value):\n if value is None:\n return None\n if isinstance(value, basestring):\n return RFRegionsEnum._NUMBERS.get(value, u\"\")\n return RFRegionsEnum._NUMBERS.get(value.api_value(), u\"\")\n\n\nclass ShortCityTypeAdapter(object):\n @staticmethod\n def adapt(value):\n if not hasattr(value, 'api_value'):\n return u\"\"\n return value.api_value()\n\n\nclass ShortVillageTypeAdapter(object):\n @staticmethod\n def adapt(value):\n if not hasattr(value, 'api_value'):\n return u\"\"\n return value.api_value()\n\n\nclass ShortStreetTypeAdapter(object):\n @staticmethod\n def adapt(value):\n if not hasattr(value, 'api_value'):\n return u\"\"\n return value.api_value()\n\n\nclass InternalPassportAdapter(object):\n @staticmethod\n def adapt(value):\n value = value.replace(u' ', u'')\n return u\"%s %s %s\" % (value[:2], value[2:4], value[4:])\n\n\nclass CitizenshipToNumberAdapter(object):\n @staticmethod\n def adapt(value):\n if value.lower() in {u'россиия', u'рф', u'российская федерация'}:\n return 1\n elif value:\n return 2\n return 3\n\n\nclass CountryCodeRusAdapter(object):\n @staticmethod\n def adapt(value):\n if value == 643:\n return 1\n elif value:\n return 2\n return 3\n\n\nclass GenderToNumberAdapter(object):\n @staticmethod\n def adapt(value):\n if value == 'male':\n return 1\n elif value == 'female':\n return 2\n return 0\n\n\nclass GovernmentFounderTypeNumberAdapter(object):\n @staticmethod\n def adapt(value):\n if value == GovernmentFounderTypeEnum.GF_RUSSIA:\n return 1\n if value == GovernmentFounderTypeEnum.GF_REGION:\n return 2\n if value == GovernmentFounderTypeEnum.GF_MUNICIPALITY:\n return 3\n return u\"\"\n\n\nclass DocumentDeliveryNumberAdapter(object):\n @staticmethod\n def adapt(value):\n if value is None:\n return None\n if isinstance(value, basestring):\n return value\n return value.api_value()\n\n\nclass DocumentObtainNumberAdapter(object):\n @staticmethod\n def adapt(value):\n if value == IPDocumentDeliveryTypeStrEnum.IP_DDT_IN_PERSON:\n return 1\n elif value == IPDocumentDeliveryTypeStrEnum.IP_DDT_RESPONSIBLE_PERSON:\n return 2\n elif value == IPDocumentDeliveryTypeStrEnum.IP_DDT_MAIL:\n return 3\n\n\nclass FounderTypeNumberAdapter(object):\n @staticmethod\n def adapt(value):\n if value == FounderTypeEnum.FT_PERSON:\n return 1\n elif value == FounderTypeEnum.FT_COMPANY:\n return 2\n current_app.logger.warn(u\"FounderTypeNumberAdapter failed to adapt value %s\" % unicode(value))\n return u\"\"\n\n\nclass JSCMemberTypeNumberAdapter(object):\n @staticmethod\n def adapt(value):\n if value == JSCMemberTypeEnum.JSCMT_NEW_JSC:\n return 1\n elif value == JSCMemberTypeEnum.JSCMT_REGISTRATOR:\n return 2\n return u\"\"\n\n\nclass UsnTaxTypeAdapter(object):\n @staticmethod\n def adapt(value):\n if not hasattr(value, 'api_value'):\n return u\"\"\n return value.api_value()\n\n\nclass MonthRusNameDeclAdapter(object):\n @staticmethod\n def adapt(value):\n if not isinstance(value, int):\n return \"\"\n if value < 1 or value > 12:\n return \"\"\n return get_russian_month_skl(value)\n\n\nclass NumToTextAdapter(object):\n @staticmethod\n def adapt(value):\n if not isinstance(value, int):\n return\n\n return num_word(value)" }, { "alpha_fraction": 0.45864349603652954, "alphanum_fraction": 0.48751547932624817, "avg_line_length": 42.36099624633789, "blob_id": "3a003d9e3d5ba7a5e3cf183b51edc3622704f305", "content_id": "bf96cd5fa60ebc443e55cfdd6fb687a5b0bc6763", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 154421, "license_type": "no_license", "max_line_length": 184, "num_lines": 3374, "path": "/jb_tests/test_pack/test_rendering.py", "repo_name": "StanislavKraev/jb_code", "src_encoding": "UTF-8", "text": "# -*- coding: utf-8 -*-\nfrom datetime import datetime, timedelta\nimport hashlib\nimport json\n\nfrom bson.objectid import ObjectId\n\nfrom base_test_case import authorized\nfrom common_utils.perf import TimeCalculator\nfrom fw.documents.address_enums import (RFRegionsEnum, VillageTypeEnum, DistrictTypeEnum, CityTypeEnum, StreetTypeEnum,\n HouseTypeEnum, BuildingTypeEnum, FlatTypeEnum)\nfrom fw.documents.db_fields import PrivatePersonDbObject, CompanyDbObject, DocumentBatchDbObject\nfrom fw.documents.doc_requisites_storage import DocRequisitiesStorage\nfrom fw.documents.enums import DocumentBatchTypeEnum, DocumentTypeEnum, PersonDocumentTypeEnum, PersonTypeEnum, \\\n CompanyTypeEnum, BatchStatusEnum, GenderEnum, IncorporationFormEnum\nfrom fw.documents.fields.doc_fields import DocumentBatch\nfrom fw.db.sql_base import db as sqldb\nfrom fw.documents.fields.field_validators import get_uninitialized_field\nfrom fw.metrics.models import UserMetricObject\nfrom services.ifns.data_model.models import IfnsBookingObject\nfrom services.ifns.utils.process_okvad import process_okvad\nfrom services.llc_reg.documents.enums import UsnTaxType, DocumentDeliveryTypeStrEnum, RegistrationWay, AddressType\nfrom services.llc_reg.documents.enums import NecessaryVotesEnum\nfrom test_pack.base_batch_test import BaseBatchTestCase\n\nNECESSARY_VOTES_FOR_GENERAL_MEETING_DECISIONS = {\n u\"company_strategy\": NecessaryVotesEnum.NV_ALL,\n u\"articles_change\": NecessaryVotesEnum.NV_3_4,\n u\"executives_formation\": NecessaryVotesEnum.NV_2_3,\n u\"auditor_election\": NecessaryVotesEnum.NV_2_3,\n u\"annual_reports_approval\": NecessaryVotesEnum.NV_3_4,\n u\"profit_distribution\": NecessaryVotesEnum.NV_3_4,\n u\"internal_documents_approval\": NecessaryVotesEnum.NV_2_3,\n u\"obligations_emission\": NecessaryVotesEnum.NV_ALL,\n u\"audit_assignment\": NecessaryVotesEnum.NV_2_3,\n u\"large_deals_approval\": NecessaryVotesEnum.NV_3_4,\n u\"concern_deals_approval\": NecessaryVotesEnum.NV_2_3,\n u\"reorganization_or_liquidation\": NecessaryVotesEnum.NV_ALL,\n u\"liquidation_committee_assignment\": NecessaryVotesEnum.NV_2_3,\n u\"branch_establishment\": NecessaryVotesEnum.NV_3_4,\n u\"other_issues\": NecessaryVotesEnum.NV_2_3\n}\n\nclass RenderingTestCase(BaseBatchTestCase):\n\n @authorized()\n def test_11001(self):\n a = UserMetricObject()\n #DocRequisitiesStorage._BATCH_DESCRIPTORS[DocumentBatchTypeEnum.DBT_NEW_LLC]['doc_types'] = [DocumentTypeEnum.DT_ARTICLES]\n uchreditel_fis_lico_person = PrivatePersonDbObject(**{\n \"_owner\": self.user,\n \"name\": u\"Прокл\",\n \"surname\": u\"Поликарпов\",\n \"patronymic\": u\"Поликарпович\",\n \"inn\": \"010101417407\",\n \"birthdate\": datetime(datetime.now().year - 45, datetime.now().month, datetime.now().day,\n datetime.now().hour, datetime.now().minute),\n \"birthplace\": u\"Россия, деревня Гадюкино\",\n \"sex\": \"male\",\n \"passport\": {\n \"document_type\": PersonDocumentTypeEnum.PD_INTERNAL,\n\n \"series\": u\"1123\",\n \"number\": u\"192837\",\n \"issue_date\": datetime(datetime.now().year, datetime.now().month, datetime.now().day,\n datetime.now().hour, datetime.now().minute),\n \"issue_depart\": u\"УМВД Гадюкинского района Гадюкинской области\",\n \"depart_code\": u\"111987\"\n },\n \"ogrnip\": \"123456789012345\",\n \"person_type\": PersonTypeEnum.PT_RUSSIAN,\n \"address\": {\n \"region\": RFRegionsEnum.RFR_SPB,\n \"index\": 198209,\n \"district_type\": DistrictTypeEnum.DIT_DISTRICT,\n \"district\": u\"НовоПушкинскийСуперДлинноеНазваниеТакогоВообщеНеБывает\",\n \"city_type\": CityTypeEnum.CIT_CITY,\n \"city\": u\"Гадюкино\",\n \"street_type\": StreetTypeEnum.STT_BOULEVARD,\n \"street\": u\"Мотоциклистов\",\n \"house_type\": HouseTypeEnum.HOT_HOUSE,\n \"house\": \"4\",\n \"building_type\": BuildingTypeEnum.BIT_HOUSING,\n \"building\": \"2\",\n \"flat_type\": FlatTypeEnum.FLT_OFFICE,\n \"flat\": \"705\",\n \"long_form_mode\": True\n },\n \"caption\": u\"Сантехник\",\n \"phone\": \"+79210001122\",\n \"email\": \"[email protected]\",\n \"living_address\": u\"г. Санкт-Петербург, д. Гадюкино, бульвар Мотоциклистов казарма 4, кв. 705\"\n })\n sqldb.session.add(uchreditel_fis_lico_person)\n sqldb.session.commit()\n\n uchreditel_fis_lico_person2 = PrivatePersonDbObject(**{\n \"_owner\": self.user,\n \"name\": u\"Прокл\",\n \"surname\": u\"Поликарпов\",\n \"patronymic\": u\"Поликарпович\",\n \"inn\": \"010101417407\",\n \"birthdate\": datetime(datetime.now().year - 45, datetime.now().month, datetime.now().day,\n datetime.now().hour, datetime.now().minute),\n \"birthplace\": u\"Россия, деревня Гадюкино\",\n \"sex\": \"male\",\n \"passport\": {\n \"document_type\": PersonDocumentTypeEnum.PD_INTERNAL,\n\n \"series\": u\"1123\",\n \"number\": u\"192837\",\n \"issue_date\": datetime(datetime.now().year, datetime.now().month, datetime.now().day,\n datetime.now().hour, datetime.now().minute),\n \"issue_depart\": u\"УМВД Гадюкинского района Гадюкинской области\",\n \"depart_code\": u\"111987\"\n },\n \"ogrnip\": \"123456789012345\",\n \"person_type\": PersonTypeEnum.PT_RUSSIAN,\n \"address\": {\n \"region\": RFRegionsEnum.RFR_SPB,\n \"index\": 198209,\n \"district_type\": DistrictTypeEnum.DIT_DISTRICT,\n \"district\": u\"НовоПушкинскийСуперДлинноеНазваниеТакогоВообщеНеБывает\",\n \"city_type\": CityTypeEnum.CIT_CITY,\n \"city\": u\"Гадюкино\",\n \"street_type\": StreetTypeEnum.STT_BOULEVARD,\n \"street\": u\"Мотоциклистов\",\n \"house_type\": HouseTypeEnum.HOT_HOUSE,\n \"house\": \"4\",\n \"building_type\": BuildingTypeEnum.BIT_HOUSING,\n \"building\": \"2\",\n \"flat_type\": FlatTypeEnum.FLT_OFFICE,\n \"flat\": \"705\",\n \"long_form_mode\": True\n },\n \"caption\": u\"Сантехник\",\n \"phone\": \"+79210001122\",\n \"email\": \"[email protected]\",\n \"living_address\": u\"г. Санкт-Петербург, д. Гадюкино, бульвар Мотоциклистов казарма 4, кв. 705\"\n })\n sqldb.session.add(uchreditel_fis_lico_person2)\n sqldb.session.commit()\n\n uchreditel_fis_lico_person3 = PrivatePersonDbObject(**{\n \"_owner\": self.user,\n \"name\": u\"Прокл\",\n \"surname\": u\"Поликарпов\",\n \"patronymic\": u\"Поликарпович\",\n \"inn\": \"010101417407\",\n \"birthdate\": datetime(datetime.now().year - 45, datetime.now().month, datetime.now().day,\n datetime.now().hour, datetime.now().minute),\n \"birthplace\": u\"Россия, деревня Гадюкино\",\n \"sex\": \"male\",\n \"passport\": {\n \"document_type\": PersonDocumentTypeEnum.PD_INTERNAL,\n\n \"series\": u\"1123\",\n \"number\": u\"192837\",\n \"issue_date\": datetime(datetime.now().year, datetime.now().month, datetime.now().day,\n datetime.now().hour, datetime.now().minute),\n \"issue_depart\": u\"УМВД Гадюкинского района Гадюкинской области\",\n \"depart_code\": u\"111987\"\n },\n \"ogrnip\": \"123456789012345\",\n \"person_type\": PersonTypeEnum.PT_RUSSIAN,\n \"address\": {\n \"region\": RFRegionsEnum.RFR_SPB,\n \"index\": 198209,\n \"district_type\": DistrictTypeEnum.DIT_DISTRICT,\n \"district\": u\"НовоПушкинскийСуперДлинноеНазваниеТакогоВообщеНеБывает\",\n \"city_type\": CityTypeEnum.CIT_CITY,\n \"city\": u\"Гадюкино\",\n \"street_type\": StreetTypeEnum.STT_BOULEVARD,\n \"street\": u\"Мотоциклистов\",\n \"house_type\": HouseTypeEnum.HOT_HOUSE,\n \"house\": \"4\",\n \"building_type\": BuildingTypeEnum.BIT_HOUSING,\n \"building\": \"2\",\n \"flat_type\": FlatTypeEnum.FLT_OFFICE,\n \"flat\": \"705\",\n \"long_form_mode\": True\n },\n \"caption\": u\"Сантехник\",\n \"phone\": \"+79210001122\",\n \"email\": \"[email protected]\",\n \"living_address\": u\"г. Санкт-Петербург, д. Гадюкино, бульвар Мотоциклистов казарма 4, кв. 705\"\n })\n sqldb.session.add(uchreditel_fis_lico_person3)\n sqldb.session.commit()\n\n uchreditel_foreign_pp = PrivatePersonDbObject(**{\n \"_owner\": self.user,\n \"name\": u\"Джохн\",\n \"surname\": u\"Малковеч\",\n \"birthdate\": datetime(datetime.now().year - 45, datetime.now().month, datetime.now().day,\n datetime.now().hour, datetime.now().minute),\n \"birthplace\": u\"Россия, деревня Гадюкино\",\n \"sex\": \"male\",\n \"passport\": {\n \"document_type\": PersonDocumentTypeEnum.PD_FOREIGN,\n \"number\": u\"995852\",\n \"issue_date\": datetime(datetime.now().year, datetime.now().month, datetime.now().day,\n datetime.now().hour, datetime.now().minute),\n \"issue_depart\": u\"Australia\",\n },\n \"person_type\": PersonTypeEnum.PT_FOREIGN,\n \"address\": {\n \"region\": RFRegionsEnum.RFR_SPB,\n \"index\": 198209,\n \"district_type\": DistrictTypeEnum.DIT_DISTRICT,\n \"district\": u\"НовоПушкинскийСуперДлинноеНазваниеТакогоВообщеНеБывает\",\n \"city_type\": CityTypeEnum.CIT_CITY,\n \"city\": u\"Гадюкино\",\n \"street_type\": StreetTypeEnum.STT_BOULEVARD,\n \"street\": u\"Мотоциклистов\",\n \"house_type\": HouseTypeEnum.HOT_HOUSE,\n \"house\": \"4\",\n \"building_type\": BuildingTypeEnum.BIT_HOUSING,\n \"building\": \"2\",\n \"flat_type\": FlatTypeEnum.FLT_OFFICE,\n \"flat\": \"705\",\n \"long_form_mode\": True\n },\n \"living_address\": u\"г. Санкт-Петербург, д. Гадюкино, бульвар Мотоциклистов казарма 4, кв. 705\",\n \"living_country_code\": 72\n })\n sqldb.session.add(uchreditel_foreign_pp)\n sqldb.session.commit()\n\n general_manager_person = PrivatePersonDbObject(**{\n \"_owner\": self.user,\n # \"name\" : u\"Марат\",\n # \"surname\" : u\"Кожевников\",\n # \"patronymic\" : u\"Мухамедович\",\n \"name\": u\"Прокл\",\n \"surname\": u\"Поликарпов\",\n \"patronymic\": u\"Поликарпович\",\n\n \"inn\": \"010101417407\",\n \"birthdate\": datetime.now() - timedelta(days=365 * 19),\n \"birthplace\": u\"неизвестно где\",\n \"sex\": \"male\",\n \"passport\": {\n \"document_type\": PersonDocumentTypeEnum.PD_INTERNAL,\n \"series\": u\"1123\",\n \"number\": u\"192837\",\n \"issue_date\": datetime.now() - timedelta(days=365 * 30),\n \"issue_depart\": u\"УМВД Гадюкинского района Неизвестной области\",\n \"depart_code\": u\"111987\"\n },\n \"ogrnip\": \"123456789012347\",\n \"person_type\": PersonTypeEnum.PT_RUSSIAN,\n \"caption\": u\"Сам техник\",\n \"phone\": \"+79110001122\",\n \"email\": \"[email protected]\",\n \"living_address\": u\"г. Санкт-Петербург, д. Гадюкино, бульвар Мотоциклистов казарма 4, кв. 7\",\n \"address\": {\n \"building\": u\"А\",\n \"city_type\": u\"г\",\n \"qc_complete\": \"0\",\n \"street_type\": u\"ул\",\n \"index\": 420096,\n \"address_string\": u\"Респ Татарстан, г Казань, ул Березовая (Малые Дербышки), д 1А, кв 6\",\n \"house\": \"1\",\n \"region\": u\"Татарстан\",\n \"okato\": \"92401385000\",\n \"flat\": \"6\",\n \"building_type\": u\"литер\",\n \"street\": u\"Березовая (Малые Дербышки)\",\n \"long_form_mode\": False,\n \"flat_type\": u\"кв\",\n \"house_type\": u\"д\",\n \"ifns\": \"1686\",\n \"city\": u\"Казань\",\n \"qc\": \"0\"\n },\n })\n sqldb.session.add(general_manager_person)\n sqldb.session.commit()\n\n uchreditel_rus_jur_lico_company = CompanyDbObject(**{\n \"_owner\": self.user,\n \"ogrn\": \"1095543023135\",\n \"inn\": \"781108730780\",\n \"full_name\": u\"Протон\",\n \"short_name\": u\"Про\",\n \"kpp\": \"999999999\",\n \"company_type\": CompanyTypeEnum.CT_RUSSIAN,\n \"general_manager\": {\n \"_id\": uchreditel_fis_lico_person.id,\n \"type\": \"person\"\n },\n \"general_manager_caption\": u\"директор\",\n \"address\": {\n \"region\": RFRegionsEnum.RFR_IRKUTSKAYA_REGION,\n \"index\": 123131,\n \"street_type\": StreetTypeEnum.STT_BOULEVARD,\n \"street\": u\"Мотоциклистов\",\n \"house_type\": HouseTypeEnum.HOT_HOUSE,\n \"house\": \"2\",\n \"building_type\": BuildingTypeEnum.BIT_HOUSING,\n \"building\": \"778899\",\n \"flat_type\": FlatTypeEnum.FLT_OFFICE,\n \"flat\": \"2\",\n \"ifns\": \"1234\"\n },\n \"phone\": \"+7(812)1234567\",\n \"registration_date\": datetime(2009, 1, 1)\n })\n sqldb.session.add(uchreditel_rus_jur_lico_company)\n sqldb.session.commit()\n\n uchreditel_foreign_llc = CompanyDbObject(**{\n \"_owner\": self.user,\n \"full_name\": u\"Proton\",\n \"short_name\": u\"Pro\",\n \"company_type\": CompanyTypeEnum.CT_FOREIGN,\n \"general_manager_caption\": u\"директор\",\n \"country_code\": 100,\n \"generic_address\": \"Generic Address\",\n \"registration_date\": datetime(2009, 1, 1),\n \"registration_depart\": \"depart\",\n \"registration_number\": 123,\n u\"general_manager\": {\n \"_id\": general_manager_person.id,\n \"type\": \"person\"\n },\n })\n sqldb.session.add(uchreditel_foreign_llc)\n sqldb.session.commit()\n\n with self.app.app_context():\n data = {\n u\"full_name\": u\"фывафыва\",\n u\"short_name\": u\"Бокс\",\n u\"doc_date\": datetime(2019, 9, 24),\n # \"address\": {\n # \"address_string\": u\"Санкт-Петербург, наб. Свердловская, д 44, литер. ю, кв. 405\",\n # \"building\": u\"ю\",\n # \"street_type\": u\"наб\",\n # \"house\": \"44\",\n # \"house_type\": u\"д\",\n # \"region\": u\"Санкт-Петербург\",\n # \"flat\": \"405\",\n # \"building_type\": u\"литер\",\n # \"street\": u\"Свердловская\",\n # \"long_form_mode\": True,\n # \"flat_type\": u\"кв\",\n # },\n u\"address_type\": \"general_manager_registration_address\",\n u\"starter_capital\": {\n \"currency\": \"rub\",\n \"value\": \"10000\"\n },\n u\"general_manager_caption\": u\"повелитель\",\n u\"share_type\": \"fraction\",\n u\"founders\": [\n {\n \"founder\": {\n \"_id\": uchreditel_rus_jur_lico_company.id,\n \"type\": \"company\"\n },\n \"nominal_capital\": \"12312.20\",\n \"share\": \"1.4\"\n },\n {\n \"founder\": {\n \"_id\": uchreditel_fis_lico_person.id,\n \"type\": \"person\"\n },\n \"nominal_capital\": \"1500.50\",\n \"share\": \"1.4\"\n },\n {\n \"founder\": {\n \"_id\": uchreditel_fis_lico_person2.id,\n \"type\": \"person\"\n },\n \"nominal_capital\": \"1500.50\",\n \"share\": \"1.4\"\n },\n {\n \"founder\": {\n \"_id\": uchreditel_fis_lico_person3.id,\n \"type\": \"person\"\n },\n \"nominal_capital\": \"1500.50\",\n \"share\": \"1.4\"\n },\n ],\n u\"general_manager\": {\n \"_id\": general_manager_person.id,\n \"type\": \"person\"\n },\n u\"job_main_code\": u\"92.31.1\",\n # u\"job_code_array\" : [u\"92.31.1\", u\"74.14\", u\"10.01.1\"],\n u\"job_code_array\": [],\n u\"doc_obtain_founder\": {\n \"type\": \"person\",\n \"_id\": uchreditel_fis_lico_person.id\n },\n u\"obtain_way\": \"founder\",\n u'use_foreign_company_name': False,\n u'use_national_language_company_name': False,\n u\"tax_type\": UsnTaxType.UT_INCOME_MINUS_EXPENSE,\n u'general_manager_term': 20,\n u\"preimusch_pravo_priobreteniya_doli_time_span\": 60,\n u'necessary_votes_for_general_meeting_decisions': {\n u\"company_strategy\": NecessaryVotesEnum.NV_ALL,\n u\"articles_change\": NecessaryVotesEnum.NV_3_4,\n u\"executives_formation\": NecessaryVotesEnum.NV_2_3,\n u\"auditor_election\": NecessaryVotesEnum.NV_2_3,\n u\"annual_reports_approval\": NecessaryVotesEnum.NV_3_4,\n u\"profit_distribution\": NecessaryVotesEnum.NV_3_4,\n u\"internal_documents_approval\": NecessaryVotesEnum.NV_2_3,\n u\"obligations_emission\": NecessaryVotesEnum.NV_ALL,\n u\"audit_assignment\": NecessaryVotesEnum.NV_2_3,\n u\"large_deals_approval\": NecessaryVotesEnum.NV_3_4,\n u\"concern_deals_approval\": NecessaryVotesEnum.NV_2_3,\n u\"reorganization_or_liquidation\": NecessaryVotesEnum.NV_ALL,\n u\"liquidation_committee_assignment\": NecessaryVotesEnum.NV_2_3,\n u\"branch_establishment\": NecessaryVotesEnum.NV_3_4,\n u\"other_issues\": NecessaryVotesEnum.NV_2_3\n },\n u\"board_of_directors\": False,\n u\"selected_secretary\": {\n \"type\": \"person\",\n \"_id\": uchreditel_fis_lico_person.id\n },\n u\"selected_moderator\": {\n \"type\": \"person\",\n \"_id\": uchreditel_fis_lico_person.id\n },\n u\"pravo_otchuzhdeniya_type\": 5,\n u\"perehod_doli_k_naslednikam_soglasie\": True,\n u\"taxation_type\": \"general\",\n u\"registration_way\": \"some_founders\",\n u\"region\": u\"Санкт-Петербург\",\n u\"reg_responsible_founder\": {\n u\"type\": u\"person\",\n u\"_id\": uchreditel_fis_lico_person.id\n },\n }\n batch = DocumentBatchDbObject(\n batch_type=DocumentBatchTypeEnum.DBT_NEW_LLC,\n status=BatchStatusEnum.BS_NEW,\n data={},\n paid=True,\n metadata={'_something': \"wrong\"},\n _owner=self.user\n )\n sqldb.session.add(batch)\n sqldb.session.commit()\n batch_id = batch.id\n\n new_batch_db_object = DocumentBatchDbObject(\n data=data,\n metadata={'_something': \"wrong\"},\n batch_type=DocumentBatchTypeEnum.DBT_NEW_LLC\n )\n\n batch = DocumentBatch.db_obj_to_field(new_batch_db_object)\n structure = batch.get_api_structure()\n del structure['batch_type']\n batch_json = json.dumps(structure)\n result = self.test_client.post('/batch/update/', data={\n 'batch_id': unicode(batch_id),\n 'batch': batch_json })\n self.assertEqual(result.status_code, 200)\n # print(result.data)\n #\n # result = self.test_client.post('/batch/update/', data = {\n # 'batch_id' : unicode(_id),\n # 'batch' : batch_json\n # })\n # self.assertEqual(result.status_code, 200)\n # print(result.data)\n db_batch = DocumentBatchDbObject.query.filter_by(id=batch_id).first()\n print (json.dumps(db_batch.__dict__, indent=1, default=lambda x: unicode(x), ensure_ascii=False))\n\n result = self.test_client.post('/batch/finalise/', data={'batch_id': unicode(batch_id)})\n self.assertEqual(result.status_code, 200)\n\n db_batch = DocumentBatchDbObject.query.filter_by(id=batch_id).first()\n print (json.dumps(db_batch.__dict__, indent=1, default=lambda x: unicode(x), ensure_ascii=False))\n self.assertEqual(db_batch.status, BatchStatusEnum.BS_FINALISED)\n\n result = self.test_client.get('/batch/?batch_id=' + batch_id)\n print(result.data)\n\n result = self.test_client.post('/batch/unfinalise/', data={'batch_id': unicode(batch_id)})\n self.assertEqual(result.status_code, 200)\n\n result = self.test_client.post('/batch/update/', data={\n 'batch_id': unicode(batch_id),\n 'batch': batch_json\n })\n self.assertEqual(result.status_code, 200)\n\n result = self.test_client.post('/batch/finalise/', data={'batch_id': unicode(batch_id)})\n self.assertEqual(result.status_code, 200)\n\n @authorized()\n def test_ustav(self):\n DocRequisitiesStorage._BATCH_DESCRIPTORS[DocumentBatchTypeEnum.DBT_NEW_LLC]['doc_types'] = [DocumentTypeEnum.DT_ARTICLES]\n uchreditel_fis_lico_person = PrivatePersonDbObject(**{\n \"_owner\": self.user,\n \"name\": u\"Прокл\",\n \"surname\": u\"Поликарпов\",\n \"patronymic\": u\"Поликарпович\",\n \"inn\": \"781108730780\",\n \"birthdate\": datetime.now() - timedelta(days=365 * 30),\n \"birthplace\": u\"Россия, деревня Гадюкино\",\n \"sex\": \"male\",\n \"passport\": {\n \"document_type\": PersonDocumentTypeEnum.PD_INTERNAL,\n \"series\": u\"1123\",\n \"number\": u\"192837\",\n \"issue_date\": datetime.now(),\n \"issue_depart\": u\"УМВД Гадюкинского района Гадюкинской области\",\n \"depart_code\": u\"111987\"\n },\n \"ogrnip\": \"123456789012345\",\n \"person_type\": PersonTypeEnum.PT_RUSSIAN,\n \"address\": {\n \"region\": RFRegionsEnum.RFR_SPB,\n \"index\": 198209,\n \"district_type\": DistrictTypeEnum.DIT_DISTRICT,\n \"district\": u\"НовоПушкинскийСуперДлинноеНазваниеТакогоВообщеНеБывает\",\n \"city_type\": CityTypeEnum.CIT_CITY,\n \"city\": u\"Гадюкино\",\n \"street_type\": StreetTypeEnum.STT_BOULEVARD,\n \"street\": u\"Мотоциклистов\",\n \"house_type\": HouseTypeEnum.HOT_HOUSE,\n \"house\": \"4\",\n \"building_type\": BuildingTypeEnum.BIT_HOUSING,\n \"building\": \"2\",\n \"flat_type\": FlatTypeEnum.FLT_OFFICE,\n \"flat\": \"705\",\n },\n \"caption\": u\"Сантехник\",\n \"phone\": \"+79210001122\",\n \"email\": \"[email protected]\",\n \"living_address\": u\"г. Санкт-Петербург, д. Гадюкино, бульвар Мотоциклистов казарма 4, кв. 705\"\n })\n sqldb.session.add(uchreditel_fis_lico_person)\n sqldb.session.commit()\n\n uchreditel_rus_jur_lico_company = CompanyDbObject(**{\n \"_owner\": self.user,\n \"ogrn\": \"1095543023135\",\n \"inn\": \"781108730780\",\n \"full_name\": u\"Протон\",\n \"short_name\": u\"Про\",\n \"kpp\": \"999999999\",\n \"company_type\": CompanyTypeEnum.CT_RUSSIAN,\n \"general_manager\": {\n \"_id\": uchreditel_fis_lico_person.id,\n \"type\": \"person\"\n },\n \"general_manager_caption\": u\"директор\",\n \"address\": {\n \"region\": RFRegionsEnum.RFR_IRKUTSKAYA_REGION,\n \"index\": 123131,\n \"street_type\": StreetTypeEnum.STT_BOULEVARD,\n \"street\": u\"Мотоциклистов\",\n \"house_type\": HouseTypeEnum.HOT_HOUSE,\n \"house\": \"2\",\n \"building_type\": BuildingTypeEnum.BIT_HOUSING,\n \"building\": \"778899\",\n \"flat_type\": FlatTypeEnum.FLT_OFFICE,\n \"flat\": \"2\",\n \"ifns\": 1234\n },\n \"phone\": \"+7(812)1234567\"\n })\n sqldb.session.add(uchreditel_rus_jur_lico_company)\n sqldb.session.commit()\n\n with self.app.app_context():\n data = {\n u\"full_name\": u\"ТестКреатеЛтдАдд1КомпаниНевДиректор\",\n u\"short_name\": u\"ТестКреатеЛтдАдд1КомпаниНевДиректор\",\n u\"address\": {\n \"building\": u\"А\",\n \"city_type\": u\"г\",\n \"qc_complete\": \"0\",\n \"street_type\": u\"ул\",\n \"index\": 420096,\n \"address_string\": u\"Респ Татарстан, г Казань, ул Березовая (Малые Дербышки), д 1А, кв 6\",\n \"house\": \"1\",\n \"region\": u\"Татарстан\",\n \"okato\": \"92401385000\",\n \"flat\": \"6\",\n \"building_type\": u\"литер\",\n \"street\": u\"Березовая (Малые Дербышки)\",\n \"long_form_mode\": False,\n \"flat_type\": u\"кв\",\n \"house_type\": u\"д\",\n \"ifns\": \"1686\",\n \"city\": u\"Казань\",\n \"qc\": \"0\"\n },\n u\"address_type\": \"office_address\",\n u\"starter_capital\": {\n \"currency\": \"rub\",\n \"value\": \"12312\"\n },\n u\"general_manager_caption\": u\"повелитель\",\n u\"share_type\": \"percent\",\n u\"founders\": [\n {\n \"founder\": {\n \"_id\": uchreditel_rus_jur_lico_company.id,\n \"type\": \"company\"\n },\n \"nominal_capital\": 12312,\n \"share\": 85\n },\n {\n \"founder\": {\n \"_id\": uchreditel_fis_lico_person.id,\n \"type\": \"person\"\n },\n \"nominal_capital\": 1500,\n \"share\": 15\n }\n ],\n u\"general_manager\": {\n \"_id\": uchreditel_fis_lico_person.id,\n \"type\": \"person\"\n },\n u\"job_main_code\": u\"92.31.1\",\n u\"job_code_array\": [u\"92.31.1\", u\"74.14\", u\"10.01.1\"],\n u\"doc_obtain_person\": {\n \"type\": \"person\",\n \"_id\": uchreditel_fis_lico_person.id\n },\n u\"obtain_way\": \"founder\",\n u'use_foreign_company_name': True,\n u'use_national_language_company_name': False,\n \"foreign_language\": u\"английский\",\n u\"foreign_full_name\": u\"\\\"MEDPRIBOR-21\\\" Limited Liability Company\",\n u\"foreign_short_name\": u\"\\\"MEDPRIBOR-21\\\" LLC\",\n u\"tax_type\": UsnTaxType.UT_INCOME_MINUS_EXPENSE,\n u'general_manager_term': 20,\n u\"preimusch_pravo_priobreteniya_doli_time_span\": 60,\n u'necessary_votes_for_general_meeting_decisions': {\n u\"company_strategy\": NecessaryVotesEnum.NV_ALL,\n u\"articles_change\": NecessaryVotesEnum.NV_3_4,\n u\"executives_formation\": NecessaryVotesEnum.NV_2_3,\n u\"auditor_election\": NecessaryVotesEnum.NV_2_3,\n u\"annual_reports_approval\": NecessaryVotesEnum.NV_3_4,\n u\"profit_distribution\": NecessaryVotesEnum.NV_3_4,\n u\"internal_documents_approval\": NecessaryVotesEnum.NV_2_3,\n u\"obligations_emission\": NecessaryVotesEnum.NV_ALL,\n u\"audit_assignment\": NecessaryVotesEnum.NV_2_3,\n u\"large_deals_approval\": NecessaryVotesEnum.NV_3_4,\n u\"concern_deals_approval\": NecessaryVotesEnum.NV_2_3,\n u\"reorganization_or_liquidation\": NecessaryVotesEnum.NV_ALL,\n u\"liquidation_committee_assignment\": NecessaryVotesEnum.NV_2_3,\n u\"branch_establishment\": NecessaryVotesEnum.NV_3_4,\n u\"other_issues\": NecessaryVotesEnum.NV_2_3\n },\n u\"board_of_directors\": False,\n u\"selected_secretary\": {\n \"type\": \"person\",\n \"_id\": uchreditel_fis_lico_person.id\n },\n u\"selected_moderator\": {\n \"type\": \"person\",\n \"_id\": uchreditel_fis_lico_person.id\n },\n u\"pravo_otchuzhdeniya_type\": 5,\n u\"perehod_doli_k_naslednikam_soglasie\": True,\n u\"taxation_type\": \"usn\",\n u\"registration_way\": \"some_founders\",\n u\"region\": u\"Санкт-Петербург\",\n u\"general_manager_deals_max_amount\": 10000,\n u\"large_deals_min_value\": 50\n }\n batch = DocumentBatchDbObject(\n batch_type=DocumentBatchTypeEnum.DBT_NEW_LLC,\n status=BatchStatusEnum.BS_NEW,\n _documents=[],\n data={},\n paid=True,\n _owner=self.user\n )\n sqldb.session.add(batch)\n sqldb.session.commit()\n batch_id = batch.id\n\n new_batch_db_object = DocumentBatchDbObject(\n data=data,\n batch_type=DocumentBatchTypeEnum.DBT_NEW_LLC\n )\n\n booking = IfnsBookingObject(\n batch_id=batch_id,\n reg_info={\n 'reg_date': datetime.now()\n }\n )\n sqldb.session.add(booking)\n sqldb.session.commit()\n\n batch = DocumentBatch.db_obj_to_field(new_batch_db_object)\n batch_json = json.dumps(batch.get_api_structure())\n result = self.test_client.post('/batch/update/', data={\n 'batch_id': batch_id,\n 'batch': batch_json\n })\n self.assertEqual(result.status_code, 200)\n db_batch = DocumentBatchDbObject.query.filter_by(id=batch_id).first()\n\n result = self.test_client.post('/batch/finalise/', data={'batch_id': batch_id})\n self.assertEqual(result.status_code, 200)\n\n db_batch = DocumentBatchDbObject.query.filter_by(id=batch_id).first()\n print (json.dumps(db_batch.__dict__, indent=1, default=lambda x: unicode(x), ensure_ascii=False))\n self.assertEqual(db_batch.status, BatchStatusEnum.BS_FINALISED)\n self.assertEqual(len(db_batch._documents), 1)\n\n @authorized()\n def test_decision(self):\n DocRequisitiesStorage._BATCH_DESCRIPTORS[DocumentBatchTypeEnum.DBT_NEW_LLC]['doc_types'] = [DocumentTypeEnum.DT_DECISION]\n\n bd = datetime.now() - timedelta(days=365 * 30)\n founder_person = PrivatePersonDbObject(**{\n \"_owner\": self.user,\n \"name\": u\"Прокл\",\n \"surname\": u\"Поликарпов\",\n \"patronymic\": u\"Поликарпович\",\n \"inn\": \"781108730780\",\n \"birthdate\": bd,\n \"birthplace\": u\"Россия, деревня Гадюкино\",\n \"sex\": \"male\",\n \"passport\": {\n \"document_type\": PersonDocumentTypeEnum.PD_INTERNAL,\n \"series\": u\"1123\",\n \"number\": u\"192837\",\n \"issue_date\": bd + timedelta(days=20 * 366),\n \"issue_depart\": u\"УМВД Гадюкинского района Гадюкинской области\",\n \"depart_code\": u\"111987\"\n },\n \"ogrnip\": \"123456789012345\",\n \"address\": {\n \"qc_complete\": \"5\",\n \"region\": RFRegionsEnum.RFR_SPB,\n \"index\": 198259,\n \"street_type\": StreetTypeEnum.STT_STREET,\n \"street\": u\"Тамбасова\",\n \"house_type\": HouseTypeEnum.HOT_HOUSE,\n \"house\": \"38\",\n \"flat_type\": FlatTypeEnum.FLT_OFFICE,\n \"flat\": \"70\"\n },\n \"caption\": u\"Сантехник\",\n \"phone\": \"+79210001122\",\n \"email\": \"[email protected]\",\n \"living_country_code\": 3,\n \"living_address\": u\"г. Санкт-Петербург, д. Гадюкино, бульвар Мотоциклистов казарма 4, кв. 705\"\n })\n sqldb.session.add(founder_person)\n sqldb.session.commit()\n\n company_founder = CompanyDbObject(**{\n \"_owner\": self.user,\n \"ogrn\": \"1095543023135\",\n \"inn\": \"781108730780\",\n \"kpp\": \"999999999\",\n \"general_manager_caption\": u\"генеральный директор\",\n \"full_name\": u\"образовательное учреждение дополнительного образования детей специализированная детско-юношеская спортивная школа олимпийского резерва по боксу\",\n \"short_name\": u\"Бокс\",\n \"general_manager\": {\n \"_id\": founder_person.id,\n \"type\": \"person\"\n },\n \"address\": {\n \"region\": RFRegionsEnum.RFR_SPB,\n \"index\": 123131,\n \"street_type\": StreetTypeEnum.STT_STREET,\n \"street\": u\"Седова\",\n \"house_type\": HouseTypeEnum.HOT_HOUSE,\n \"house\": \"2\",\n \"flat_type\": FlatTypeEnum.FLT_OFFICE,\n \"flat\": \"2\",\n },\n \"phone\": \"+7(812)1234567\"\n })\n sqldb.session.add(company_founder)\n sqldb.session.commit()\n\n input = {\n u'use_foreign_company_name': False,\n u'use_national_language_company_name': False,\n u\"full_name\": u\"ТестКреатеЛтдАдд1КомпаниНевДиректор\",\n u\"short_name\": u\"Бокс\",\n \"address\": {\n \"flat_type\": u\"кв\",\n \"qc_complete\": u\"0\",\n \"index\": 140008,\n \"house\": u\"85\",\n \"region\": u\"Московская\",\n \"okato\": 46231501000,\n \"flat\": u\"2\",\n \"address_string\": u\"Московская обл. в/г Тёплый стан д.85, кв.2\",\n \"village\": u\"Тёплый Стан\",\n \"house_type\": u\"д\",\n \"village_type\": u\"в/гор\",\n \"ifns\": \"5027\"\n },\n \"address_type\": \"office_address\",\n \"starter_capital\": {\n \"currency\": \"rub\",\n \"value\": \"12312.234234\"\n },\n \"registration_date\": datetime.now(),\n \"general_manager_caption\": u\"повелитель\",\n \"share_type\": \"percent\",\n \"founders\": [\n {\n \"founder\": {\n \"_id\": company_founder.id,\n \"type\": \"company\"\n },\n \"nominal_capital\": 1500.5,\n \"share\": 100\n }\n ],\n \"general_manager\": {\n \"_id\": founder_person.id\n },\n \"job_main_code\": u\"92.31.1\",\n \"job_code_array\": [u\"92.31.1\", u\"74.14\", u\"10.01.1\"],\n # \"doc_obtain_person\" : {\n # \"type\" : \"person\",\n # \"_id\" : founder_person._id\n # },\n \"obtain_way\": \"responsible_person\"\n }\n\n with self.app.app_context():\n batch = DocumentBatchDbObject(\n batch_type=DocumentBatchTypeEnum.DBT_NEW_LLC,\n _documents=[],\n paid=True,\n data={},\n _owner=self.user\n )\n sqldb.session.add(batch)\n sqldb.session.commit()\n batch_id = batch.id\n\n batch = DocumentBatchDbObject(\n batch_type=DocumentBatchTypeEnum.DBT_NEW_LLC,\n _documents=[],\n data=input,\n _owner=self.user\n )\n\n batch = DocumentBatch.db_obj_to_field(batch)\n batch_json = json.dumps(batch.get_api_structure())\n result = self.test_client.post('/batch/update/', data={\n 'batch_id': batch_id,\n 'batch': batch_json\n })\n\n self.assertEqual(result.status_code, 200)\n db_batch = DocumentBatchDbObject.query.filter_by(id=batch_id).first()\n\n result = self.test_client.post('/batch/finalise/', data={'batch_id': batch_id})\n self.assertEqual(result.status_code, 200)\n db_batch = DocumentBatchDbObject.query.filter_by(id=batch_id).first()\n print (json.dumps(db_batch.__dict__, indent=1, default=lambda x: unicode(x), ensure_ascii=False))\n self.assertEqual(db_batch.status, BatchStatusEnum.BS_FINALISED)\n self.assertEqual(len(db_batch._documents), 1)\n\n @authorized()\n def test_usn(self):\n DocRequisitiesStorage._BATCH_DESCRIPTORS[DocumentBatchTypeEnum.DBT_NEW_LLC]['doc_types'] = [DocumentTypeEnum.DT_USN]\n\n col = self.db['okvad']\n col.insert({\"caption\": u\"Рыболовство\", \"okved\": \"05.01\", \"nalog\": \"usn\",\n \"parent\": ObjectId(\"5478373ee64bcf4ece4a57d8\")})\n\n founder = PrivatePersonDbObject(**{\n \"_owner\": self.user,\n \"name\": u\"Прокл\",\n \"surname\": u\"Поликарпов\",\n \"patronymic\": u\"Поликарпович\",\n \"inn\": \"781108730780\",\n \"sex\": \"male\",\n \"birthdate\": datetime.now() - timedelta(days=365 * 30),\n \"birthplace\": u\"Россия, деревня Гадюкино\",\n \"passport\": {\n \"document_type\": PersonDocumentTypeEnum.PD_INTERNAL,\n \"series\": u\"1123\",\n \"number\": u\"192837\",\n \"issue_date\": datetime.now() - timedelta(days=365 * 2),\n \"issue_depart\": u\"УМВД Гадюкинского района Гадюкинской области\",\n \"depart_code\": u\"111987\"\n },\n \"ogrnip\": \"123456789012345\",\n \"address\": {\n 'index': 199000,\n \"region\": RFRegionsEnum.RFR_LENINGRADSKAYA_REGION,\n \"district_type\": DistrictTypeEnum.DIT_DISTRICT,\n \"district\": u\"Гатчинский\",\n \"city_type\": CityTypeEnum.CIT_CITY,\n \"city\": u\"Гатчина\",\n \"street_type\": StreetTypeEnum.STT_STREET,\n \"street\": u\"Радищева\",\n \"house_type\": HouseTypeEnum.HOT_HOUSE,\n \"house\": \"26\",\n \"flat_type\": FlatTypeEnum.FLT_FLAT,\n \"flat\": \"80\",\n },\n \"caption\": u\"Сантехник\",\n \"phone\": \"+79210001122\",\n \"email\": \"[email protected]\",\n \"living_country_code\": 3,\n \"living_address\": u\"г. Санкт-Петербург, д. Гадюкино, бульвар Мотоциклистов казарма 4, кв. 705\"\n })\n sqldb.session.add(founder)\n sqldb.session.commit()\n\n founder_otvetstvennyi = PrivatePersonDbObject(**{\n \"_owner\": self.user,\n \"name\": u\"Семен\",\n \"surname\": u\"Семенчук\",\n \"patronymic\": u\"Семейкин\",\n \"inn\": \"781108730780\",\n \"sex\": \"male\",\n \"birthdate\": datetime.now() - timedelta(days=365 * 30),\n \"birthplace\": u\"Россия, деревня Гадюкино\",\n \"passport\": {\n \"document_type\": PersonDocumentTypeEnum.PD_INTERNAL,\n \"series\": u\"1123\",\n \"number\": u\"192837\",\n \"issue_date\": datetime.now() - timedelta(days=365 * 2),\n \"issue_depart\": u\"УМВД Гадюкинского района Гадюкинской области\",\n \"depart_code\": u\"111987\"\n },\n \"ogrnip\": \"123456789012345\",\n \"address\": {\n 'index': 199000,\n \"region\": RFRegionsEnum.RFR_LENINGRADSKAYA_REGION,\n \"district_type\": DistrictTypeEnum.DIT_DISTRICT,\n \"district\": u\"Гатчинский\",\n \"city_type\": CityTypeEnum.CIT_CITY,\n \"city\": u\"Гатчина\",\n \"street_type\": StreetTypeEnum.STT_STREET,\n \"street\": u\"Радищева\",\n \"house_type\": HouseTypeEnum.HOT_HOUSE,\n \"house\": \"26\",\n \"flat_type\": FlatTypeEnum.FLT_FLAT,\n \"flat\": \"80\",\n },\n \"caption\": u\"Сантехник\",\n \"phone\": \"+79210001122\",\n \"email\": \"[email protected]\",\n \"living_country_code\": 3,\n \"living_address\": u\"г. Санкт-Петербург, д. Гадюкино, бульвар Мотоциклистов казарма 4, кв. 705\"\n })\n sqldb.session.add(founder_otvetstvennyi)\n sqldb.session.commit()\n\n company_founder = CompanyDbObject(**{\n \"_owner\": self.user,\n \"ogrn\": \"1095543023135\",\n \"inn\": \"781108730780\",\n \"kpp\": \"999999999\",\n \"general_manager_caption\": u\"генеральный директор\",\n \"full_name\": u\"образовательное учреждение дополнительного образования детей специализированная детско-юношеская спортивная школа олимпийского резерва по боксу\",\n \"short_name\": u\"Бокс\",\n \"general_manager\": {\n \"_id\": founder_otvetstvennyi.id,\n \"type\": \"person\"\n },\n \"address\": {\n 'index': '199000',\n \"region\": RFRegionsEnum.RFR_LENINGRADSKAYA_REGION,\n \"district_type\": DistrictTypeEnum.DIT_DISTRICT,\n \"district\": u\"Гатчинский\",\n \"city_type\": CityTypeEnum.CIT_CITY,\n \"city\": u\"Гатчина\",\n \"street_type\": StreetTypeEnum.STT_STREET,\n \"street\": u\"Радищева\",\n \"house_type\": HouseTypeEnum.HOT_HOUSE,\n \"house\": \"26\",\n \"flat_type\": FlatTypeEnum.FLT_FLAT,\n \"flat\": \"80\",\n },\n \"phone\": \"+7(812)1234567\"\n })\n sqldb.session.add(company_founder)\n sqldb.session.commit()\n\n input = {\n u\"full_name\": u\"Общество с ограниченной ответственностью образовательное учреждение дополнительного образования детей специализированная детско-юношеская спортивная школа\",\n \"address\": {\n 'index': 199000,\n \"region\": RFRegionsEnum.RFR_LENINGRADSKAYA_REGION,\n \"district_type\": DistrictTypeEnum.DIT_DISTRICT,\n \"district\": u\"Гатчинский\",\n \"city_type\": CityTypeEnum.CIT_CITY,\n \"city\": u\"Гатчина\",\n \"street_type\": StreetTypeEnum.STT_STREET,\n \"street\": u\"Радищева\",\n \"house_type\": HouseTypeEnum.HOT_HOUSE,\n \"house\": \"26\",\n \"flat_type\": FlatTypeEnum.FLT_FLAT,\n \"flat\": \"80\",\n \"ifns\": 1234\n },\n \"address_type\": \"office_address\",\n \"starter_capital\": {\n \"currency\": \"rub\",\n \"value\": \"12312.234234\"\n },\n \"general_manager_caption\": u\"повелитель\",\n \"share_type\": \"percent\",\n \"founders\": [\n {\n \"founder\": {\n \"_id\": company_founder.id,\n \"type\": \"company\"\n },\n \"nominal_capital\": 12312.22,\n \"share\": 25\n },\n {\n \"founder\": {\n \"_id\": founder_otvetstvennyi.id,\n \"type\": \"person\"\n },\n \"nominal_capital\": 1500.5,\n \"share\": 75\n }\n ],\n \"general_manager\": {\n \"_id\": founder_otvetstvennyi.id\n },\n \"job_main_code\": u\"05.01\",\n \"job_code_array\": [u\"92.31.1\", u\"74.14\", u\"10.01.1\"],\n \"doc_obtain_person\": {\n \"type\": \"person\",\n \"_id\": founder_otvetstvennyi.id\n },\n \"obtain_way\": \"founder\",\n u\"tax_type\": UsnTaxType.UT_INCOME,\n u'general_manager_term': 20,\n u\"preimusch_pravo_priobreteniya_doli_time_span\": 60,\n u'necessary_votes_for_general_meeting_decisions': {\n u\"company_strategy\": NecessaryVotesEnum.NV_ALL,\n u\"articles_change\": NecessaryVotesEnum.NV_3_4,\n u\"executives_formation\": NecessaryVotesEnum.NV_2_3,\n u\"auditor_election\": NecessaryVotesEnum.NV_2_3,\n u\"annual_reports_approval\": NecessaryVotesEnum.NV_3_4,\n u\"profit_distribution\": NecessaryVotesEnum.NV_3_4,\n u\"internal_documents_approval\": NecessaryVotesEnum.NV_2_3,\n u\"obligations_emission\": NecessaryVotesEnum.NV_ALL,\n u\"audit_assignment\": NecessaryVotesEnum.NV_2_3,\n u\"large_deals_approval\": NecessaryVotesEnum.NV_3_4,\n u\"concern_deals_approval\": NecessaryVotesEnum.NV_2_3,\n u\"reorganization_or_liquidation\": NecessaryVotesEnum.NV_ALL,\n u\"liquidation_committee_assignment\": NecessaryVotesEnum.NV_2_3,\n u\"branch_establishment\": NecessaryVotesEnum.NV_3_4,\n u\"other_issues\": NecessaryVotesEnum.NV_2_3\n },\n \"board_of_directors\": False,\n \"selected_secretary\": {\n \"type\": \"person\",\n \"_id\": founder_otvetstvennyi.id\n },\n \"selected_moderator\": {\n \"type\": \"person\",\n \"_id\": founder_otvetstvennyi.id\n },\n \"pravo_otchuzhdeniya_type\": 5,\n \"short_name\": u\"АБВ\",\n \"perehod_doli_k_naslednikam_soglasie\": True,\n \"taxation_type\": \"usn\",\n \"registration_way\": \"some_founders\",\n \"region\": u\"Санкт-Петербург\"\n }\n\n with self.app.app_context():\n batch = DocumentBatchDbObject(\n batch_type=DocumentBatchTypeEnum.DBT_NEW_LLC,\n status=BatchStatusEnum.BS_NEW,\n _documents=[],\n data={},\n _owner=self.user\n )\n sqldb.session.add(batch)\n sqldb.session.commit()\n batch_id = batch.id\n\n new_batch_db_object = DocumentBatchDbObject(\n data=input,\n batch_type=DocumentBatchTypeEnum.DBT_NEW_LLC\n )\n\n batch = DocumentBatch.db_obj_to_field(new_batch_db_object)\n batch_json = json.dumps(batch.get_api_structure())\n result = self.test_client.post('/batch/update/', data={\n 'batch_id': batch_id,\n 'batch': batch_json\n })\n self.assertEqual(result.status_code, 200)\n db_batch = DocumentBatchDbObject.query.filter_by(id=batch_id).first()\n\n result = self.test_client.post('/batch/finalise/', data={'batch_id': batch_id})\n self.assertEqual(result.status_code, 200)\n\n db_batch = DocumentBatchDbObject.query.filter_by(id=batch_id).first()\n self.assertEqual(db_batch.status, BatchStatusEnum.BS_FINALISED)\n self.assertTrue(not not db_batch._documents[0].file)\n\n @authorized()\n def test_protocol(self):\n DocRequisitiesStorage._BATCH_DESCRIPTORS[DocumentBatchTypeEnum.DBT_NEW_LLC]['doc_types'] = [DocumentTypeEnum.DT_PROTOCOL]\n\n founder = PrivatePersonDbObject(**{\n \"_owner\": self.user,\n \"name\": u\"Прокл\",\n \"surname\": u\"Поликарпов\",\n \"patronymic\": u\"Поликарпович\",\n \"inn\": \"781108730780\",\n \"sex\": \"male\",\n \"birthdate\": datetime.now() - timedelta(days=365 * 30),\n \"birthplace\": u\"Россия, деревня Гадюкино\",\n \"passport\": {\n \"document_type\": PersonDocumentTypeEnum.PD_INTERNAL,\n \"series\": u\"1123\",\n \"number\": u\"192837\",\n \"issue_date\": datetime.now(),\n \"issue_depart\": u\"УМВД Гадюкинского района Гадюкинской области\",\n \"depart_code\": u\"111987\"\n },\n \"ogrnip\": \"123456789012345\",\n \"address\": {\n \"region\": RFRegionsEnum.RFR_SPB,\n \"index\": 198209,\n \"district_type\": DistrictTypeEnum.DIT_DISTRICT,\n \"district\": u\"Гатчинский\",\n \"city_type\": CityTypeEnum.CIT_CITY,\n \"city\": u\"Гадюкино\",\n \"village_type\": VillageTypeEnum.VIT_HUTOR,\n \"village\": u\"близ Диканьки\",\n \"street_type\": StreetTypeEnum.STT_BOULEVARD,\n \"street\": u\"Мотоциклистов\",\n \"house_type\": HouseTypeEnum.HOT_HOUSE,\n \"house\": \"4\",\n \"building_type\": BuildingTypeEnum.BIT_HOUSING,\n \"building\": \"2\",\n \"flat_type\": FlatTypeEnum.FLT_OFFICE,\n \"flat\": \"705\",\n },\n \"caption\": u\"Сантехник\",\n \"phone\": \"+79210001122\",\n \"email\": \"[email protected]\",\n \"living_country_code\": 3,\n \"living_address\": u\"г. Санкт-Петербург, д. Гадюкино, бульвар Мотоциклистов казарма 4, кв. 705\"\n })\n sqldb.session.add(founder)\n sqldb.session.commit()\n\n founder_otvetstvennyi = PrivatePersonDbObject(**{\n \"_owner\": self.user,\n \"name\": u\"Семен\",\n \"surname\": u\"Семенчук\",\n \"patronymic\": u\"Семейкин\",\n \"inn\": \"781108730780\",\n \"sex\": \"male\",\n \"birthdate\": datetime.now() - timedelta(days=365 * 30),\n \"birthplace\": u\"Россия, деревня Гадюкино\",\n \"passport\": {\n \"document_type\": PersonDocumentTypeEnum.PD_INTERNAL,\n \"series\": u\"1123\",\n \"number\": u\"192837\",\n \"issue_date\": datetime.now(),\n \"issue_depart\": u\"УМВД Гадюкинского района Гадюкинской области\",\n \"depart_code\": u\"111987\"\n },\n \"ogrnip\": \"123456789012345\",\n \"address\": {\n \"region\": RFRegionsEnum.RFR_SPB,\n \"index\": 198209,\n \"district_type\": DistrictTypeEnum.DIT_DISTRICT,\n \"district\": u\"Пушкинский\",\n \"city_type\": CityTypeEnum.CIT_CITY,\n \"city\": u\"Гадюкино\",\n \"village_type\": VillageTypeEnum.VIT_HUTOR,\n \"village\": u\"близ Диканьки\",\n \"street_type\": StreetTypeEnum.STT_BOULEVARD,\n \"street\": u\"Мотоциклистов\",\n \"house_type\": HouseTypeEnum.HOT_HOUSE,\n \"house\": \"4\",\n \"building_type\": BuildingTypeEnum.BIT_HOUSING,\n \"building\": \"2\",\n \"flat_type\": FlatTypeEnum.FLT_OFFICE,\n \"flat\": \"705\",\n },\n \"caption\": u\"Сантехник\",\n \"phone\": \"+79210001122\",\n \"email\": \"[email protected]\",\n \"living_country_code\": 3,\n \"living_address\": u\"г. Санкт-Петербург, д. Гадюкино, бульвар Мотоциклистов казарма 4, кв. 705\"\n })\n sqldb.session.add(founder_otvetstvennyi)\n sqldb.session.commit()\n\n company_founder = CompanyDbObject(**{\n \"_owner\": self.user,\n \"ogrn\": \"1095543023135\",\n \"inn\": \"781108730780\",\n \"kpp\": \"999999999\",\n \"general_manager_caption\": u\"генеральный директор\",\n \"full_name\": u\"Том и «Джери»\",\n \"short_name\": u\"Т и \\\"Д\\\"\",\n \"general_manager\": {\n \"_id\": founder.id,\n \"type\": \"person\"\n },\n \"address\": {\n \"region\": RFRegionsEnum.RFR_SPB,\n \"index\": 123131,\n \"street_type\": StreetTypeEnum.STT_STREET,\n \"street\": u\"Седова\",\n \"house_type\": HouseTypeEnum.HOT_HOUSE,\n \"house\": \"2\",\n \"flat_type\": FlatTypeEnum.FLT_OFFICE,\n \"flat\": \"2\",\n },\n \"phone\": \"+7(812)1234567\"\n })\n sqldb.session.add(company_founder)\n sqldb.session.commit()\n\n doc_data = {\n u\"full_name\": u\"ТестКреатеЛтдАдд1КомпаниНевДиректор\",\n u\"short_name\": u\"ТестКреатеЛтдАдд1КомпаниНевДиректор\",\n u'use_foreign_company_name': False,\n u'use_national_language_company_name': False,\n u'foreign_full_name': u'Microsoft Company',\n u'foreign_short_name': u'Microsoft',\n u'national_language_full_name': u'Эбэртэ Туруун',\n u'national_language_short_name': u'Туруун',\n u\"foreign_language\": u\"английский\",\n u\"national_language\": u\"ташкентский\",\n u\"selected_secretary\": {\n u\"_id\": founder_otvetstvennyi.id,\n u\"type\": u\"person\"\n },\n \"address\": {\n \"flat_type\": u\"кв\",\n \"qc_complete\": u\"0\",\n \"index\": 140008,\n \"house\": u\"85\",\n \"region\": u\"Московская\",\n \"okato\": 46231501000,\n \"flat\": u\"2\",\n \"address_string\": u\"Московская обл. в/г Тёплый стан д.85, кв.2\",\n \"village\": u\"Тёплый Стан\",\n \"house_type\": u\"д\",\n \"village_type\": u\"в/гор\",\n \"ifns\": 5027\n },\n u\"address_type\": u\"office_address\",\n u\"starter_capital\": {\n u\"currency\": u\"rub\",\n u\"value\": u\"38000\"\n },\n u\"general_manager_caption\": u\"повелитель\",\n u\"share_type\": u\"fraction\",\n u\"founders\": [\n {\n u\"founder\": {\n u\"_id\": company_founder.id,\n u\"type\": u\"company\"\n },\n u\"nominal_capital\": 12312.22,\n u\"share\": \"3.4\"\n }, {\n u\"founder\": {\n u\"_id\": founder_otvetstvennyi.id,\n u\"type\": u\"person\"\n },\n u\"nominal_capital\": 1500.5,\n u\"share\": \"1.4\"\n }\n ],\n u\"selected_moderator\": {\n u\"_id\": founder_otvetstvennyi.id,\n u\"type\": u\"person\"\n },\n u\"obtain_way\": u\"founder\",\n u\"doc_obtain_founder\": {\n u\"type\": u\"person\",\n u\"_id\": founder_otvetstvennyi.id\n },\n u\"reg_responsible_founder\": {\n u\"type\": u\"person\",\n u\"_id\": founder_otvetstvennyi.id\n },\n u\"registration_way\": u\"some_founders\",\n u\"general_manager\": {\n u\"_id\": founder_otvetstvennyi.id\n },\n u\"job_main_code\": u\"92.31.1\",\n u\"job_code_array\": [u\"92.31.1\", u\"74.14\", u\"10.01.1\"],\n u\"tax_type\": UsnTaxType.UT_INCOME_MINUS_EXPENSE,\n u'general_manager_term': 20,\n u\"preimusch_pravo_priobreteniya_doli_time_span\": 60,\n u'necessary_votes_for_general_meeting_decisions': {\n u\"company_strategy\": NecessaryVotesEnum.NV_ALL,\n u\"articles_change\": NecessaryVotesEnum.NV_3_4,\n u\"executives_formation\": NecessaryVotesEnum.NV_2_3,\n u\"auditor_election\": NecessaryVotesEnum.NV_2_3,\n u\"annual_reports_approval\": NecessaryVotesEnum.NV_3_4,\n u\"profit_distribution\": NecessaryVotesEnum.NV_3_4,\n u\"internal_documents_approval\": NecessaryVotesEnum.NV_2_3,\n u\"obligations_emission\": NecessaryVotesEnum.NV_ALL,\n u\"audit_assignment\": NecessaryVotesEnum.NV_2_3,\n u\"large_deals_approval\": NecessaryVotesEnum.NV_3_4,\n u\"concern_deals_approval\": NecessaryVotesEnum.NV_2_3,\n u\"reorganization_or_liquidation\": NecessaryVotesEnum.NV_ALL,\n u\"liquidation_committee_assignment\": NecessaryVotesEnum.NV_2_3,\n u\"branch_establishment\": NecessaryVotesEnum.NV_3_4,\n u\"other_issues\": NecessaryVotesEnum.NV_2_3\n },\n u\"board_of_directors\": False,\n u\"pravo_otchuzhdeniya_type\": 5,\n u\"perehod_doli_k_naslednikam_soglasie\": True,\n u\"taxation_type\": u\"usn\",\n u\"region\": u\"Санкт-Петербург\"\n }\n\n with self.app.app_context():\n batch = DocumentBatchDbObject(\n batch_type=DocumentBatchTypeEnum.DBT_NEW_LLC,\n status=BatchStatusEnum.BS_EDITED,\n _documents=[],\n data={},\n _owner=self.user\n )\n sqldb.session.add(batch)\n sqldb.session.commit()\n batch_id = batch.id\n\n new_batch_db_object = DocumentBatchDbObject(\n data=doc_data,\n batch_type=DocumentBatchTypeEnum.DBT_NEW_LLC\n )\n\n batch = DocumentBatch.db_obj_to_field(new_batch_db_object)\n batch_json = json.dumps(batch.get_api_structure())\n result = self.test_client.post('/batch/update/', data={\n 'batch_id': batch_id,\n 'batch': batch_json\n })\n self.assertEqual(result.status_code, 200)\n db_batch = DocumentBatchDbObject.query.filter_by(id=batch_id).first()\n\n result = self.test_client.post('/batch/finalise/', data={'batch_id': batch_id})\n self.assertEqual(result.status_code, 200)\n\n db_batch = DocumentBatchDbObject.query.filter_by(id=batch_id).first()\n print (json.dumps(db_batch.__dict__, indent=1, default=lambda x: unicode(x), ensure_ascii=False))\n self.assertEqual(db_batch.status, BatchStatusEnum.BS_FINALISED)\n self.assertEqual(len(db_batch._documents), 1)\n\n @authorized()\n def test_eshn(self):\n DocRequisitiesStorage._BATCH_DESCRIPTORS[DocumentBatchTypeEnum.DBT_NEW_LLC]['doc_types'] = [DocumentTypeEnum.DT_ESHN]\n process_okvad()\n founder = PrivatePersonDbObject(**{\n \"_owner\": self.user,\n \"name\": u\"Прокл\",\n \"surname\": u\"Поликарпов\",\n \"patronymic\": u\"Поликарпович\",\n \"inn\": \"781108730780\",\n \"sex\": \"male\",\n \"birthdate\": datetime.now() - timedelta(days=365 * 30),\n \"birthplace\": u\"Россия, деревня Гадюкино\",\n \"passport\": {\n \"document_type\": PersonDocumentTypeEnum.PD_INTERNAL,\n \"series\": u\"1123\",\n \"number\": u\"192837\",\n \"issue_date\": datetime.now() - timedelta(days=365 * 2),\n \"issue_depart\": u\"УМВД Гадюкинского района Гадюкинской области\",\n \"depart_code\": u\"111987\"\n },\n \"ogrnip\": \"123456789012345\",\n \"address\": {\n \"index\": 190000,\n \"region\": RFRegionsEnum.RFR_LENINGRADSKAYA_REGION,\n \"district_type\": DistrictTypeEnum.DIT_DISTRICT,\n \"district\": u\"Гатчинский\",\n \"city_type\": CityTypeEnum.CIT_CITY,\n \"city\": u\"Гатчина\",\n \"street_type\": StreetTypeEnum.STT_STREET,\n \"street\": u\"Радищева\",\n \"house_type\": HouseTypeEnum.HOT_HOUSE,\n \"house\": \"26\",\n \"flat_type\": FlatTypeEnum.FLT_FLAT,\n \"flat\": \"80\",\n },\n \"caption\": u\"Сантехник\",\n \"phone\": \"+79210001122\",\n \"email\": \"[email protected]\",\n \"living_country_code\": 3,\n \"living_address\": u\"г. Санкт-Петербург, д. Гадюкино, бульвар Мотоциклистов казарма 4, кв. 705\"\n })\n sqldb.session.add(founder)\n sqldb.session.commit()\n\n founder_otvetstvennyi = PrivatePersonDbObject(**{\n \"_owner\": self.user,\n \"name\": u\"Семен\",\n \"surname\": u\"Семенчук\",\n \"patronymic\": u\"Семейкин\",\n \"inn\": \"781108730780\",\n \"sex\": \"male\",\n \"birthdate\": datetime.now() - timedelta(days=365 * 30),\n \"birthplace\": u\"Россия, деревня Гадюкино\",\n \"passport\": {\n \"document_type\": PersonDocumentTypeEnum.PD_INTERNAL,\n \"series\": u\"1123\",\n \"number\": u\"192837\",\n \"issue_date\": datetime.now() - timedelta(days=365 * 2),\n \"issue_depart\": u\"УМВД Гадюкинского района Гадюкинской области\",\n \"depart_code\": u\"111987\"\n },\n \"ogrnip\": \"123456789012345\",\n \"address\": {\n \"index\": 190000,\n \"region\": RFRegionsEnum.RFR_LENINGRADSKAYA_REGION,\n \"district_type\": DistrictTypeEnum.DIT_DISTRICT,\n \"district\": u\"Гатчинский\",\n \"city_type\": CityTypeEnum.CIT_CITY,\n \"city\": u\"Гатчина\",\n \"street_type\": StreetTypeEnum.STT_STREET,\n \"street\": u\"Радищева\",\n \"house_type\": HouseTypeEnum.HOT_HOUSE,\n \"house\": \"26\",\n \"flat_type\": FlatTypeEnum.FLT_FLAT,\n \"flat\": \"80\",\n },\n \"caption\": u\"Сантехник\",\n \"phone\": \"+79210001122\",\n \"email\": \"[email protected]\",\n \"living_country_code\": 3,\n \"living_address\": u\"г. Санкт-Петербург, д. Гадюкино, бульвар Мотоциклистов казарма 4, кв. 705\"\n })\n sqldb.session.add(founder_otvetstvennyi)\n sqldb.session.commit()\n\n company_founder = CompanyDbObject(**{\n \"_owner\": self.user,\n \"ogrn\": \"1095543023135\",\n \"inn\": \"781108730780\",\n \"kpp\": \"999999999\",\n \"general_manager_caption\": u\"генеральный директор\",\n \"full_name\": u\"образовательное учреждение дополнительного образования детей специализированная детско-юношеская спортивная школа олимпийского резерва по боксу\",\n \"short_name\": u\"Бокс\",\n \"general_manager\": {\n \"_id\": founder_otvetstvennyi.id,\n \"type\": \"person\"\n },\n \"address\": {\n \"index\": 190000,\n \"region\": RFRegionsEnum.RFR_LENINGRADSKAYA_REGION,\n \"district_type\": DistrictTypeEnum.DIT_DISTRICT,\n \"district\": u\"Гатчинский\",\n \"city_type\": CityTypeEnum.CIT_CITY,\n \"city\": u\"Гатчина\",\n \"street_type\": StreetTypeEnum.STT_STREET,\n \"street\": u\"Радищева\",\n \"house_type\": HouseTypeEnum.HOT_HOUSE,\n \"house\": \"26\",\n \"flat_type\": FlatTypeEnum.FLT_FLAT,\n \"flat\": \"80\",\n },\n \"phone\": \"+7(812)1234567\"\n })\n sqldb.session.add(company_founder)\n sqldb.session.commit()\n\n input = {\n u\"full_name\": u\"Общество с ограниченной ответственностью образовательное учреждение дополнительного образования детей специализированная детско-юношеская спортивная школа\",\n \"address\": {\n 'index': 199000,\n \"region\": RFRegionsEnum.RFR_LENINGRADSKAYA_REGION,\n \"district_type\": DistrictTypeEnum.DIT_DISTRICT,\n \"district\": u\"Гатчинский\",\n \"city_type\": CityTypeEnum.CIT_CITY,\n \"city\": u\"Гатчина\",\n \"street_type\": StreetTypeEnum.STT_STREET,\n \"street\": u\"Радищева\",\n \"house_type\": HouseTypeEnum.HOT_HOUSE,\n \"house\": \"26\",\n \"flat_type\": FlatTypeEnum.FLT_FLAT,\n \"flat\": \"80\",\n \"ifns\": 1234\n },\n \"address_type\": \"office_address\",\n \"starter_capital\": {\n \"currency\": \"rub\",\n \"value\": \"12312.234234\"\n },\n \"general_manager_caption\": u\"повелитель\",\n \"share_type\": \"percent\",\n \"founders\": [\n {\n \"founder\": {\n \"_id\": founder.id,\n \"type\": \"person\"\n },\n \"nominal_capital\": 12312.22,\n \"share\": 85\n },\n {\n \"founder\": {\n \"_id\": founder_otvetstvennyi.id,\n \"type\": \"person\"\n },\n \"nominal_capital\": 1500.5,\n \"share\": 15\n }\n ],\n \"general_manager\": {\n \"_id\": founder_otvetstvennyi.id\n },\n \"job_main_code\": u\"01.30\",\n \"job_code_array\": [u\"01.30\", u\"15.32\"],\n \"doc_obtain_person\": {\n \"type\": \"person\",\n \"_id\": founder_otvetstvennyi.id\n },\n \"doc_obtain_founder\": {\n \"type\": \"person\",\n \"_id\": founder_otvetstvennyi.id\n },\n \"obtain_way\": \"founder\",\n u\"tax_type\": UsnTaxType.UT_INCOME_MINUS_EXPENSE,\n u'general_manager_term': 20,\n u\"preimusch_pravo_priobreteniya_doli_time_span\": 60,\n u'necessary_votes_for_general_meeting_decisions': {\n u\"company_strategy\": NecessaryVotesEnum.NV_ALL,\n u\"articles_change\": NecessaryVotesEnum.NV_3_4,\n u\"executives_formation\": NecessaryVotesEnum.NV_2_3,\n u\"auditor_election\": NecessaryVotesEnum.NV_2_3,\n u\"annual_reports_approval\": NecessaryVotesEnum.NV_3_4,\n u\"profit_distribution\": NecessaryVotesEnum.NV_3_4,\n u\"internal_documents_approval\": NecessaryVotesEnum.NV_2_3,\n u\"obligations_emission\": NecessaryVotesEnum.NV_ALL,\n u\"audit_assignment\": NecessaryVotesEnum.NV_2_3,\n u\"large_deals_approval\": NecessaryVotesEnum.NV_3_4,\n u\"concern_deals_approval\": NecessaryVotesEnum.NV_2_3,\n u\"reorganization_or_liquidation\": NecessaryVotesEnum.NV_ALL,\n u\"liquidation_committee_assignment\": NecessaryVotesEnum.NV_2_3,\n u\"branch_establishment\": NecessaryVotesEnum.NV_3_4,\n u\"other_issues\": NecessaryVotesEnum.NV_2_3\n },\n \"board_of_directors\": False,\n \"selected_secretary\": {\n \"type\": \"person\",\n \"_id\": founder_otvetstvennyi.id\n },\n \"selected_moderator\": {\n \"type\": \"person\",\n \"_id\": founder_otvetstvennyi.id\n },\n \"pravo_otchuzhdeniya_type\": 5,\n \"short_name\": u\"АБВ\",\n \"perehod_doli_k_naslednikam_soglasie\": True,\n \"taxation_type\": \"eshn\",\n \"registration_way\": \"some_founders\",\n \"region\": u\"Санкт-Петербург\"\n }\n\n with self.app.app_context():\n batch = DocumentBatchDbObject(\n batch_type=DocumentBatchTypeEnum.DBT_NEW_LLC,\n status=BatchStatusEnum.BS_NEW,\n _documents=[],\n data={},\n _owner=self.user\n )\n sqldb.session.add(batch)\n sqldb.session.commit()\n batch_id = batch.id\n\n new_batch_db_object = DocumentBatchDbObject(\n data=input,\n batch_type=DocumentBatchTypeEnum.DBT_NEW_LLC\n )\n\n batch = DocumentBatch.db_obj_to_field(new_batch_db_object)\n batch_json = json.dumps(batch.get_api_structure())\n result = self.test_client.post('/batch/update/', data={\n 'batch_id': batch_id,\n 'batch': batch_json\n })\n self.assertEqual(result.status_code, 200)\n db_batch = DocumentBatchDbObject.query.filter_by(id=batch_id).first()\n\n result = self.test_client.post('/batch/finalise/', data={'batch_id': batch_id})\n self.assertEqual(result.status_code, 200)\n\n db_batch = DocumentBatchDbObject.query.filter_by(id=batch_id).first()\n self.assertEqual(db_batch.status, BatchStatusEnum.BS_FINALISED)\n self.assertEqual(len(db_batch._documents), 1)\n self.assertTrue(not not db_batch._documents[0].file)\n\n @authorized()\n def test_contract(self):\n DocRequisitiesStorage._BATCH_DESCRIPTORS[DocumentBatchTypeEnum.DBT_NEW_LLC]['doc_types'] = [DocumentTypeEnum.DT_CONTRACT]\n\n founder = PrivatePersonDbObject(**{\n \"_owner\": self.user,\n \"name\": u\"Прокла\",\n \"surname\": u\"Поликарпова\",\n \"patronymic\": u\"Поликарповна\",\n \"inn\": \"781108730780\",\n \"birthdate\": datetime.now() - timedelta(days=365 * 30),\n \"birthplace\": u\"Россия, деревня Гадюкино\",\n \"sex\": GenderEnum.G_FEMALE,\n \"passport\": {\n \"document_type\": PersonDocumentTypeEnum.PD_INTERNAL,\n \"series\": u\"1123\",\n \"number\": u\"192837\",\n \"issue_date\": datetime.now(),\n \"issue_depart\": u\"УМВД Гадюкинского района Гадюкинской области\",\n \"depart_code\": u\"111987\"\n },\n \"ogrnip\": \"123456789012345\",\n \"address\": {\n \"region\": RFRegionsEnum.RFR_SPB,\n \"index\": 198209,\n \"district_type\": DistrictTypeEnum.DIT_DISTRICT,\n \"district\": u\"НовоПушкинскийСуперДлинноеНазваниеТакогоВообщеНеБывает\",\n \"city_type\": CityTypeEnum.CIT_CITY,\n \"city\": u\"Гадюкино\",\n \"village_type\": VillageTypeEnum.VIT_HUTOR,\n \"village\": u\"близ Диканьки\",\n \"street_type\": StreetTypeEnum.STT_BOULEVARD,\n \"street\": u\"Мотоциклистов\",\n \"house_type\": HouseTypeEnum.HOT_HOUSE,\n \"house\": \"4\",\n \"building_type\": BuildingTypeEnum.BIT_HOUSING,\n \"building\": \"2\",\n \"flat_type\": FlatTypeEnum.FLT_OFFICE,\n \"flat\": \"705\",\n },\n \"caption\": u\"Сантехник\",\n \"phone\": \"+79210001122\",\n \"email\": \"[email protected]\",\n \"living_country_code\": 3,\n \"living_address\": u\"г. Санкт-Петербург, д. Гадюкино, бульвар Мотоциклистов казарма 4, кв. 705\"\n })\n sqldb.session.add(founder)\n sqldb.session.commit()\n\n founder_otvetstvennyi = PrivatePersonDbObject(**{\n \"_owner\": self.user,\n \"name\": u\"Семен\",\n \"surname\": u\"Семенчук\",\n \"patronymic\": u\"Семейкин\",\n \"inn\": \"781108730780\",\n \"sex\": \"male\",\n \"birthdate\": datetime.now() - timedelta(days=365 * 30),\n \"birthplace\": u\"Россия, деревня Гадюкино\",\n \"passport\": {\n \"document_type\": PersonDocumentTypeEnum.PD_INTERNAL,\n \"series\": u\"1123\",\n \"number\": u\"192837\",\n \"issue_date\": datetime.now(),\n \"issue_depart\": u\"УМВД Гадюкинского района Гадюкинской области\",\n \"depart_code\": u\"111987\"\n },\n \"ogrnip\": \"123456789012345\",\n \"address\": {\n \"region\": RFRegionsEnum.RFR_SPB,\n \"index\": 198209,\n \"district_type\": DistrictTypeEnum.DIT_DISTRICT,\n \"district\": u\"НовоПушкинскийСуперДлинноеНазваниеТакогоВообщеНеБывает\",\n \"city_type\": CityTypeEnum.CIT_CITY,\n \"city\": u\"Гадюкино\",\n \"village_type\": VillageTypeEnum.VIT_HUTOR,\n \"village\": u\"близ Диканьки\",\n \"street_type\": StreetTypeEnum.STT_BOULEVARD,\n \"street\": u\"Мотоциклистов\",\n \"house_type\": HouseTypeEnum.HOT_HOUSE,\n \"house\": \"4\",\n \"building_type\": BuildingTypeEnum.BIT_HOUSING,\n \"building\": \"2\",\n \"flat_type\": FlatTypeEnum.FLT_OFFICE,\n \"flat\": \"705\",\n },\n \"caption\": u\"Сантехник\",\n \"phone\": \"+79210001122\",\n \"email\": \"[email protected]\",\n \"living_country_code\": 3,\n \"living_address\": u\"г. Санкт-Петербург, д. Гадюкино, бульвар Мотоциклистов казарма 4, кв. 705\"\n })\n sqldb.session.add(founder_otvetstvennyi)\n sqldb.session.commit()\n\n company_founder = CompanyDbObject(**{\n \"_owner\": self.user,\n \"ogrn\": \"1095543023135\",\n \"inn\": \"781108730780\",\n \"kpp\": \"999999999\",\n \"general_manager_caption\": u\"генеральный директор\",\n \"incorporation_form\": IncorporationFormEnum.IF_LLC,\n \"full_name\": u\"Том и \\\"Джерри\\\"\",\n \"short_name\": u\"Т и \\\"Д\\\"\",\n \"general_manager\": {\n \"_id\": founder_otvetstvennyi.id,\n \"type\": \"person\"\n },\n \"address\": {\n \"region\": RFRegionsEnum.RFR_SPB,\n \"index\": 123131,\n \"street_type\": StreetTypeEnum.STT_STREET,\n \"street\": u\"Седова\",\n \"house_type\": HouseTypeEnum.HOT_HOUSE,\n \"house\": \"2\",\n \"flat_type\": FlatTypeEnum.FLT_OFFICE,\n \"flat\": \"2\",\n },\n \"phone\": \"+7(812)1234567\"\n })\n sqldb.session.add(company_founder)\n sqldb.session.commit()\n\n input = {\n u\"full_name\": u\"ТестКреатеЛтдАдд1КомпаниНевДиректор\",\n \"address\": {\n 'index': 199000,\n \"region\": RFRegionsEnum.RFR_LENINGRADSKAYA_REGION,\n \"district_type\": DistrictTypeEnum.DIT_DISTRICT,\n \"district\": u\"Гатчинский\",\n \"city_type\": CityTypeEnum.CIT_CITY,\n \"city\": u\"Гатчина\",\n \"street_type\": StreetTypeEnum.STT_STREET,\n \"street\": u\"Радищева\",\n \"house_type\": HouseTypeEnum.HOT_HOUSE,\n \"house\": \"26\",\n \"flat_type\": FlatTypeEnum.FLT_FLAT,\n \"flat\": \"80\",\n \"ifns\": 1234\n },\n \"address_type\": \"office_address\",\n \"starter_capital\": {\n \"currency\": \"rub\",\n \"value\": \"12312.234234\"\n },\n \"general_manager_caption\": u\"повелитель\",\n \"share_type\": \"percent\",\n \"founders\": [\n {\n \"founder\": {\n \"_id\": company_founder.id,\n \"type\": \"company\"\n },\n \"nominal_capital\": 12312.22,\n \"share\": 85\n },\n {\n \"founder\": {\n \"_id\": founder_otvetstvennyi.id,\n \"type\": \"person\"\n },\n \"nominal_capital\": 1500.5,\n \"share\": 15\n }\n ],\n \"general_manager\": {\n \"_id\": founder_otvetstvennyi.id\n },\n \"job_main_code\": u\"92.31.1\",\n \"job_code_array\": [u\"92.31.1\", u\"74.14\", u\"10.01.1\"],\n \"doc_obtain_person\": {\n \"type\": \"person\",\n \"_id\": founder_otvetstvennyi.id\n },\n \"obtain_way\": \"founder\",\n u\"tax_type\": UsnTaxType.UT_INCOME_MINUS_EXPENSE,\n u'general_manager_term': 20,\n u\"preimusch_pravo_priobreteniya_doli_time_span\": 60,\n u'necessary_votes_for_general_meeting_decisions': {\n u\"company_strategy\": NecessaryVotesEnum.NV_ALL,\n u\"articles_change\": NecessaryVotesEnum.NV_3_4,\n u\"executives_formation\": NecessaryVotesEnum.NV_2_3,\n u\"auditor_election\": NecessaryVotesEnum.NV_2_3,\n u\"annual_reports_approval\": NecessaryVotesEnum.NV_3_4,\n u\"profit_distribution\": NecessaryVotesEnum.NV_3_4,\n u\"internal_documents_approval\": NecessaryVotesEnum.NV_2_3,\n u\"obligations_emission\": NecessaryVotesEnum.NV_ALL,\n u\"audit_assignment\": NecessaryVotesEnum.NV_2_3,\n u\"large_deals_approval\": NecessaryVotesEnum.NV_3_4,\n u\"concern_deals_approval\": NecessaryVotesEnum.NV_2_3,\n u\"reorganization_or_liquidation\": NecessaryVotesEnum.NV_ALL,\n u\"liquidation_committee_assignment\": NecessaryVotesEnum.NV_2_3,\n u\"branch_establishment\": NecessaryVotesEnum.NV_3_4,\n u\"other_issues\": NecessaryVotesEnum.NV_2_3\n },\n \"board_of_directors\": False,\n \"selected_secretary\": {\n \"type\": \"person\",\n \"_id\": founder_otvetstvennyi.id\n },\n \"selected_moderator\": {\n \"type\": \"person\",\n \"_id\": founder_otvetstvennyi.id\n },\n \"pravo_otchuzhdeniya_type\": 5,\n \"short_name\": u\"ТестКреатеЛтдАдд1КомпаниНевДиректор\",\n \"perehod_doli_k_naslednikam_soglasie\": True,\n \"taxation_type\": \"eshn\",\n \"registration_way\": \"some_founders\",\n \"region\": u\"Санкт-Петербург\"\n }\n\n with self.app.app_context():\n batch = DocumentBatchDbObject(\n batch_type=DocumentBatchTypeEnum.DBT_NEW_LLC,\n status=BatchStatusEnum.BS_NEW,\n _documents=[],\n data={},\n _owner=self.user\n )\n sqldb.session.add(batch)\n sqldb.session.commit()\n batch_id = batch.id\n\n new_batch_db_object = DocumentBatchDbObject(\n data=input,\n batch_type=DocumentBatchTypeEnum.DBT_NEW_LLC\n )\n\n batch = DocumentBatch.db_obj_to_field(new_batch_db_object)\n batch_json = json.dumps(batch.get_api_structure())\n result = self.test_client.post('/batch/update/', data={\n 'batch_id': batch_id,\n 'batch': batch_json\n })\n self.assertEqual(result.status_code, 200)\n db_batch = DocumentBatchDbObject.query.filter_by(id=batch_id).first()\n\n result = self.test_client.post('/batch/finalise/', data={'batch_id': batch_id})\n self.assertEqual(result.status_code, 200)\n\n db_batch = DocumentBatchDbObject.query.filter_by(id=batch_id).first()\n self.assertEqual(db_batch.status, BatchStatusEnum.BS_FINALISED)\n self.assertEqual(len(db_batch._documents), 1)\n\n @authorized()\n def test_kvitanciya_new(self):\n with self.app.app_context():\n DocRequisitiesStorage._BATCH_DESCRIPTORS[DocumentBatchTypeEnum.DBT_NEW_LLC]['doc_types'] = [DocumentTypeEnum.DT_REGISTRATION_FEE_INVOICE]\n\n founder = PrivatePersonDbObject(**{\n \"_owner\": self.user,\n \"name\": u\"Прокл\",\n \"surname\": u\"Поликарпов\",\n \"sex\": \"male\",\n \"patronymic\": u\"Поликарпович\",\n \"inn\": \"781108730780\",\n \"birthdate\": datetime.now() - timedelta(days=365 * 30),\n \"birthplace\": u\"Россия, деревня Гадюкино\",\n \"passport\": {\n \"document_type\": PersonDocumentTypeEnum.PD_INTERNAL,\n \"series\": u\"1123\",\n \"number\": u\"192837\",\n \"issue_date\": datetime.now() - timedelta(days=365 * 2),\n \"issue_depart\": u\"УМВД Гадюкинского района Гадюкинской области\",\n \"depart_code\": u\"111987\"\n },\n \"ogrnip\": \"123456789012345\",\n \"address\": {\n \"region\": RFRegionsEnum.RFR_SPB,\n \"index\": 198209,\n \"street_type\": StreetTypeEnum.STT_STREET,\n \"street\": u\"Тамбасова\",\n \"house_type\": HouseTypeEnum.HOT_HOUSE,\n \"house\": \"30\",\n \"flat_type\": FlatTypeEnum.FLT_OFFICE,\n \"flat\": \"20\",\n #\"okato\": \"40298562000\",\n \"ifns\": \"7804\"\n },\n \"caption\": u\"Сантехник\",\n \"phone\": \"+79210001122\",\n \"email\": \"[email protected]\",\n \"living_country_code\": 3,\n \"living_address\": u\"г. Санкт-Петербург, д. Гадюкино, бульвар Мотоциклистов казарма 4, кв. 705\"\n })\n sqldb.session.add(founder)\n sqldb.session.commit()\n\n company_founder = CompanyDbObject(**{\n \"_owner\": self.user,\n \"ogrn\": \"1095543023135\",\n \"inn\": \"781108730780\",\n \"kpp\": \"999999999\",\n \"general_manager_caption\": u\"генеральный директор\",\n \"incorporation_form\": IncorporationFormEnum.IF_LLC,\n \"full_name\": u\"образовательное учреждение дополнительного образования детей специализированная детско-юношеская спортивная школа олимпийского резерва по боксу\",\n \"short_name\": u\"Бокс\",\n \"general_manager\": {\n \"_id\": founder.id,\n \"type\": \"person\"\n },\n \"address\": {\n \"region\": RFRegionsEnum.RFR_SPB,\n \"index\": 123131,\n \"street_type\": StreetTypeEnum.STT_STREET,\n \"street\": u\"Седова\",\n \"house_type\": HouseTypeEnum.HOT_HOUSE,\n \"house\": \"2\",\n \"flat_type\": FlatTypeEnum.FLT_FLAT,\n \"flat\": \"2\",\n \"okato\": \"40298562000\"\n },\n \"phone\": \"+7(812)1234567\"\n })\n sqldb.session.add(company_founder)\n sqldb.session.commit()\n\n doc_data = {\n u\"full_name\": u\"Общество с ограниченной ответственностью\",\n \"address\": {\n 'index': 199000,\n \"region\": RFRegionsEnum.RFR_MOSCOW,\n \"street_type\": StreetTypeEnum.STT_HIGHWAY,\n \"street\": u\"Варшавское\",\n \"house_type\": HouseTypeEnum.HOT_HOUSE,\n \"house\": \"21\",\n \"flat_type\": FlatTypeEnum.FLT_FLAT,\n \"flat\": \"3\"\n },\n \"address_type\": \"founder_registration_address\",\n \"starter_capital\": {\n \"currency\": \"rub\",\n \"value\": \"12312.234234\"\n },\n \"general_manager_caption\": u\"повелитель\",\n \"share_type\": \"percent\",\n \"founders\": [\n {\n \"founder\": {\n \"_id\": founder.id,\n \"type\": \"person\"\n },\n \"nominal_capital\": 12312.22,\n \"share\": 85\n },\n {\n \"founder\": {\n \"_id\": founder.id,\n \"type\": \"person\"\n },\n \"nominal_capital\": 1500.5,\n \"share\": 15\n }\n ],\n \"general_manager\": {\n \"_id\": founder.id\n },\n \"address_person\": {\n \"_id\": founder.id\n },\n \"job_main_code\": u\"92.31.1\",\n \"job_code_array\": [u\"92.31.1\", u\"74.14\", u\"10.01.1\"],\n \"doc_obtain_person\": {\n \"type\": \"person\",\n \"_id\": founder.id\n },\n \"obtain_way\": \"mail\",\n u\"tax_type\": UsnTaxType.UT_INCOME_MINUS_EXPENSE,\n u'general_manager_term': 20,\n u\"preimusch_pravo_priobreteniya_doli_time_span\": 60,\n u'necessary_votes_for_general_meeting_decisions': {\n u\"company_strategy\": NecessaryVotesEnum.NV_ALL,\n u\"articles_change\": NecessaryVotesEnum.NV_3_4,\n u\"executives_formation\": NecessaryVotesEnum.NV_2_3,\n u\"auditor_election\": NecessaryVotesEnum.NV_2_3,\n u\"annual_reports_approval\": NecessaryVotesEnum.NV_3_4,\n u\"profit_distribution\": NecessaryVotesEnum.NV_3_4,\n u\"internal_documents_approval\": NecessaryVotesEnum.NV_2_3,\n u\"obligations_emission\": NecessaryVotesEnum.NV_ALL,\n u\"audit_assignment\": NecessaryVotesEnum.NV_2_3,\n u\"large_deals_approval\": NecessaryVotesEnum.NV_3_4,\n u\"concern_deals_approval\": NecessaryVotesEnum.NV_2_3,\n u\"reorganization_or_liquidation\": NecessaryVotesEnum.NV_ALL,\n u\"liquidation_committee_assignment\": NecessaryVotesEnum.NV_2_3,\n u\"branch_establishment\": NecessaryVotesEnum.NV_3_4,\n u\"other_issues\": NecessaryVotesEnum.NV_2_3\n },\n \"board_of_directors\": False,\n \"selected_secretary\": {\n \"type\": \"person\",\n \"_id\": founder.id\n },\n \"selected_moderator\": {\n \"type\": \"person\",\n \"_id\": founder.id\n },\n \"pravo_otchuzhdeniya_type\": 5,\n \"short_name\": u\"АБВ\",\n \"perehod_doli_k_naslednikam_soglasie\": True,\n \"taxation_type\": \"eshn\",\n \"registration_way\": \"responsible_person\",\n \"region\": u\"Санкт-Петербург\"\n }\n\n batch = DocumentBatchDbObject(\n batch_type=DocumentBatchTypeEnum.DBT_NEW_LLC,\n _documents=[],\n data=doc_data,\n _owner=self.user,\n status=BatchStatusEnum.BS_NEW\n )\n sqldb.session.add(batch)\n sqldb.session.commit()\n batch_id = batch.id\n\n new_batch_db_object = DocumentBatchDbObject(\n data=doc_data,\n batch_type=DocumentBatchTypeEnum.DBT_NEW_LLC\n )\n\n batch = DocumentBatch.db_obj_to_field(new_batch_db_object)\n batch_json = json.dumps(batch.get_api_structure())\n result = self.test_client.post('/batch/update/', data={\n 'batch_id': batch_id,\n 'batch': batch_json\n })\n self.assertEqual(result.status_code, 200)\n db_batch = DocumentBatchDbObject.query.filter_by(id=batch_id).first()\n\n result = self.test_client.post('/batch/finalise/', data={'batch_id': batch_id})\n self.assertEqual(result.status_code, 200)\n\n db_batch = DocumentBatchDbObject.query.filter_by(id=batch_id).first()\n self.assertEqual(db_batch.status, BatchStatusEnum.BS_FINALISED)\n self.assertEqual(len(db_batch._documents), 1)\n self.assertTrue(not not db_batch._documents[0].file)\n\n # \"\"\"addrFl\t117105,77,,,,ВАРШАВСКОЕ Ш,17,,25\n #addrFl_ifns\t7726\n #addrFl_okatom\t45296561000\n #addrUl\t117105,77,,,,ВАРШАВСКОЕ Ш,17,,24\n #addrUl_ifns\t7726\n #addrUl_okatom\t45296561000\n #bank\n #c\n #fam\tДолгов\n #gp\t11|18210807010011000110|13|ul|4000\n #inn\t772900273375\n #nam\tЦентр\n #otch\tИванович\n #payKind\ton\n #region\n #sum\t4000\n # \"\"\"\n\n @authorized()\n def test_doverennost(self):\n DocRequisitiesStorage._BATCH_DESCRIPTORS[DocumentBatchTypeEnum.DBT_NEW_LLC]['doc_types'] = \\\n [DocumentTypeEnum.DT_DOVERENNOST, DocumentTypeEnum.DT_DOVERENNOST_OBTAIN]\n\n founder = PrivatePersonDbObject(**{\n \"_owner\": self.user,\n \"name\": u\"Хомяк\",\n \"surname\": u\"Серый\",\n \"sex\": \"male\",\n \"patronymic\": u\"\",\n \"inn\": \"781108730780\",\n \"birthdate\": datetime.now() - timedelta(days=365 * 30),\n \"birthplace\": u\"Россия, деревня Гадюкино\",\n \"passport\": {\n \"document_type\": PersonDocumentTypeEnum.PD_INTERNAL,\n \"series\": u\"1123\",\n \"number\": u\"192837\",\n \"issue_date\": datetime.now() - timedelta(days=365 * 2),\n \"issue_depart\": u\"УМВД Гадюкинского района Гадюкинской области\",\n \"depart_code\": u\"111987\"\n },\n \"ogrnip\": \"123456789012345\",\n \"caption\": u\"Сантехник\",\n \"phone\": \"+79210001122\",\n \"email\": \"[email protected]\",\n \"living_country_code\": 3,\n \"living_address\": u\"г. Санкт-Петербург, д. Гадюкино, бульвар Мотоциклистов казарма 4, кв. 705\",\n \"address\": {\n \"region\": RFRegionsEnum.RFR_SPB,\n \"index\": 198209,\n \"district_type\": DistrictTypeEnum.DIT_DISTRICT,\n \"district\": u\"НовоПушкинскийСуперДлинноеНазваниеТакогоВообщеНеБывает\",\n \"city_type\": CityTypeEnum.CIT_CITY,\n \"city\": u\"Гадюкино\",\n \"street_type\": StreetTypeEnum.STT_BOULEVARD,\n \"street\": u\"Мотоциклистов\",\n \"house_type\": HouseTypeEnum.HOT_HOUSE,\n \"house\": \"4\",\n \"building_type\": BuildingTypeEnum.BIT_HOUSING,\n \"building\": \"2\",\n \"flat_type\": FlatTypeEnum.FLT_OFFICE,\n \"flat\": \"705\"\n },\n })\n sqldb.session.add(founder)\n sqldb.session.commit()\n\n founder2 = PrivatePersonDbObject(**{\n \"_owner\": self.user,\n \"name\": u\"Хомяк\",\n \"surname\": u\"Серый\",\n \"sex\": \"male\",\n \"patronymic\": u\"\",\n \"inn\": \"781108730780\",\n \"birthdate\": datetime.now() - timedelta(days=365 * 30),\n \"birthplace\": u\"Россия, деревня Гадюкино\",\n \"passport\": {\n \"document_type\": PersonDocumentTypeEnum.PD_INTERNAL,\n \"series\": u\"1123\",\n \"number\": u\"192837\",\n \"issue_date\": datetime.now() - timedelta(days=365 * 2),\n \"issue_depart\": u\"УМВД Гадюкинского района Гадюкинской области\",\n \"depart_code\": u\"111987\"\n },\n \"ogrnip\": \"123456789012345\",\n \"caption\": u\"Сантехник\",\n \"phone\": \"+79210001122\",\n \"email\": \"[email protected]\",\n \"living_country_code\": 3,\n \"living_address\": u\"г. Санкт-Петербург, д. Гадюкино, бульвар Мотоциклистов казарма 4, кв. 705\",\n \"address\": {\n \"region\": RFRegionsEnum.RFR_SPB,\n \"index\": 198209,\n \"district_type\": DistrictTypeEnum.DIT_DISTRICT,\n \"district\": u\"НовоПушкинскийСуперДлинноеНазваниеТакогоВообщеНеБывает\",\n \"city_type\": CityTypeEnum.CIT_CITY,\n \"city\": u\"Гадюкино\",\n \"street_type\": StreetTypeEnum.STT_BOULEVARD,\n \"street\": u\"Мотоциклистов\",\n \"house_type\": HouseTypeEnum.HOT_HOUSE,\n \"house\": \"4\",\n \"building_type\": BuildingTypeEnum.BIT_HOUSING,\n \"building\": \"2\",\n \"flat_type\": FlatTypeEnum.FLT_OFFICE,\n \"flat\": \"705\"\n },\n })\n sqldb.session.add(founder2)\n sqldb.session.commit()\n\n uchreditel_fis_lico_person = PrivatePersonDbObject(**{\n \"_owner\": self.user,\n \"name\": u\"Прокл\",\n \"surname\": u\"Поликарпов\",\n \"patronymic\": u\"Поликарпович\",\n \"inn\": \"781108730780\",\n \"birthdate\": datetime.now() - timedelta(days=365 * 30),\n \"birthplace\": u\"Россия, деревня Гадюкино\",\n \"sex\": \"male\",\n \"passport\": {\n \"document_type\": PersonDocumentTypeEnum.PD_INTERNAL,\n \"series\": u\"1123\",\n \"number\": u\"192837\",\n \"issue_date\": datetime.now() - timedelta(days=365 * 2),\n \"issue_depart\": u\"УМВД Гадюкинского района Гадюкинской области\",\n \"depart_code\": u\"111987\"\n },\n \"ogrnip\": \"123456789012345\",\n \"person_type\": PersonTypeEnum.PT_RUSSIAN,\n \"address\": {\n \"region\": RFRegionsEnum.RFR_SPB,\n \"index\": 198209,\n \"district_type\": DistrictTypeEnum.DIT_DISTRICT,\n \"district\": u\"НовоПушкинскийСуперДлинноеНазваниеТакогоВообщеНеБывает\",\n \"city_type\": CityTypeEnum.CIT_CITY,\n \"city\": u\"Гадюкино\",\n \"street_type\": StreetTypeEnum.STT_BOULEVARD,\n \"street\": u\"Мотоциклистов\",\n \"house_type\": HouseTypeEnum.HOT_HOUSE,\n \"house\": \"4\",\n \"building_type\": BuildingTypeEnum.BIT_HOUSING,\n \"building\": \"2\",\n \"flat_type\": FlatTypeEnum.FLT_OFFICE,\n \"flat\": \"705\",\n },\n \"caption\": u\"Сантехник\",\n \"phone\": \"+79210001122\",\n \"email\": \"[email protected]\",\n \"living_address\": u\"г. Санкт-Петербург, д. Гадюкино, бульвар Мотоциклистов казарма 4, кв. 705\"\n })\n sqldb.session.add(uchreditel_fis_lico_person)\n sqldb.session.commit()\n\n uchreditel_rus_jur_lico_company = CompanyDbObject(**{\n \"_owner\": self.user,\n \"ogrn\": \"1095543023135\",\n \"inn\": \"781108730780\",\n \"full_name\": u\"Протон\",\n \"short_name\": u\"Про\",\n \"kpp\": \"999999999\",\n \"company_type\": CompanyTypeEnum.CT_RUSSIAN,\n \"general_manager\": {\n \"type\": \"person\",\n \"_id\": uchreditel_fis_lico_person.id\n },\n \"general_manager_caption\": u\"директор\",\n \"address\": {\n \"region\": RFRegionsEnum.RFR_IRKUTSKAYA_REGION,\n \"index\": 123131,\n \"street_type\": StreetTypeEnum.STT_BOULEVARD,\n \"street\": u\"Мотоциклистов\",\n \"house_type\": HouseTypeEnum.HOT_HOUSE,\n \"house\": \"2\",\n \"building_type\": BuildingTypeEnum.BIT_HOUSING,\n \"building\": \"778899\",\n \"flat_type\": FlatTypeEnum.FLT_OFFICE,\n \"flat\": \"2\",\n \"ifns\": 1234\n },\n \"phone\": \"+7(812)1234567\"\n })\n sqldb.session.add(uchreditel_rus_jur_lico_company)\n sqldb.session.commit()\n\n with self.app.app_context():\n data = {\n u\"full_name\": u\"образовательное учреждение дополнительного образования детей специализированная детско-юношеская спортивная школа олимпийского резерва по боксу\",\n u\"short_name\": u\"Бокс\",\n u\"address\": {\n \"street_type\": u\"ул\",\n \"index\": 191186,\n \"house\": u\"4\",\n \"region\": u\"Санкт-Петербург\",\n \"flat\": u\"12\",\n \"street\": u\"Большая Морская\",\n \"address_string\": u\"г Санкт-Петербург, ул Большая Морская, д 4, кв 12\",\n \"flat_type\": u\"кв\",\n \"house_type\": u\"д\",\n \"ifns\": 7841\n },\n u\"address_type\": \"office_address\",\n u\"starter_capital\": {\n \"currency\": \"rub\",\n \"value\": \"12312.234234\"\n },\n u\"general_manager_caption\": u\"повелитель\",\n u\"share_type\": \"percent\",\n u\"founders\": [\n {\n \"founder\": {\n \"_id\": uchreditel_rus_jur_lico_company.id,\n \"type\": \"company\"\n },\n \"nominal_capital\": 12312.22,\n \"share\": 85\n },\n {\n \"founder\": {\n \"_id\": uchreditel_fis_lico_person.id,\n \"type\": \"person\"\n },\n \"nominal_capital\": 1500.5,\n \"share\": 15\n }\n ],\n u\"general_manager\": {\n \"_id\": uchreditel_fis_lico_person.id,\n \"type\": \"person\"\n },\n u\"job_main_code\": u\"92.31.1\",\n u\"job_code_array\": [u\"92.31.1\", u\"74.14\", u\"10.01.1\"],\n u\"doc_obtain_person\": {\n \"type\": \"person\",\n \"_id\": founder.id\n },\n u\"obtain_way\": DocumentDeliveryTypeStrEnum.DDT_ISSUE_TO_THE_APPLICANT_OR_AGENT,\n u'use_foreign_company_name': False,\n u'use_national_language_company_name': False,\n u\"tax_type\": UsnTaxType.UT_INCOME_MINUS_EXPENSE,\n u'general_manager_term': 20,\n u\"preimusch_pravo_priobreteniya_doli_time_span\": 60,\n u'necessary_votes_for_general_meeting_decisions': {\n u\"company_strategy\": NecessaryVotesEnum.NV_ALL,\n u\"articles_change\": NecessaryVotesEnum.NV_3_4,\n u\"executives_formation\": NecessaryVotesEnum.NV_2_3,\n u\"auditor_election\": NecessaryVotesEnum.NV_2_3,\n u\"annual_reports_approval\": NecessaryVotesEnum.NV_3_4,\n u\"profit_distribution\": NecessaryVotesEnum.NV_3_4,\n u\"internal_documents_approval\": NecessaryVotesEnum.NV_2_3,\n u\"obligations_emission\": NecessaryVotesEnum.NV_ALL,\n u\"audit_assignment\": NecessaryVotesEnum.NV_2_3,\n u\"large_deals_approval\": NecessaryVotesEnum.NV_3_4,\n u\"concern_deals_approval\": NecessaryVotesEnum.NV_2_3,\n u\"reorganization_or_liquidation\": NecessaryVotesEnum.NV_ALL,\n u\"liquidation_committee_assignment\": NecessaryVotesEnum.NV_2_3,\n u\"branch_establishment\": NecessaryVotesEnum.NV_3_4,\n u\"other_issues\": NecessaryVotesEnum.NV_2_3\n },\n u\"board_of_directors\": False,\n u\"selected_secretary\": {\n \"type\": \"person\",\n \"_id\": uchreditel_fis_lico_person.id\n },\n u\"selected_moderator\": {\n \"type\": \"person\",\n \"_id\": uchreditel_fis_lico_person.id\n },\n u\"pravo_otchuzhdeniya_type\": \"5\",\n u\"perehod_doli_k_naslednikam_soglasie\": True,\n u\"taxation_type\": \"usn\",\n u\"registration_way\": RegistrationWay.RW_RESPONSIBLE_PERSON,\n u\"region\": u\"Санкт-Петербург\",\n \"reg_responsible_person\": {\n \"type\": \"person\",\n \"_id\": founder2.id\n }\n }\n batch = DocumentBatchDbObject(\n batch_type=DocumentBatchTypeEnum.DBT_NEW_LLC,\n status=BatchStatusEnum.BS_NEW,\n _documents=[],\n data={},\n _owner=self.user\n )\n sqldb.session.add(batch)\n sqldb.session.commit()\n batch_id = batch.id\n\n new_batch_db_object = DocumentBatchDbObject(\n data=data,\n batch_type=DocumentBatchTypeEnum.DBT_NEW_LLC\n )\n\n batch = DocumentBatch.db_obj_to_field(new_batch_db_object)\n batch_json = json.dumps(batch.get_api_structure())\n result = self.test_client.post('/batch/update/', data={\n 'batch_id': batch_id,\n 'batch': batch_json\n })\n self.assertEqual(result.status_code, 200)\n db_batch = DocumentBatchDbObject.query.filter_by(id=batch_id).first()\n\n result = self.test_client.post('/batch/finalise/', data={'batch_id': batch_id})\n self.assertEqual(result.status_code, 200)\n\n db_batch = DocumentBatchDbObject.query.filter_by(id=batch_id).first()\n print (json.dumps(db_batch.__dict__, indent=1, default=lambda x: unicode(x), ensure_ascii=False))\n self.assertEqual(db_batch.status, BatchStatusEnum.BS_FINALISED)\n self.assertEqual(len(db_batch._documents), 2)\n self.assertTrue(not not db_batch._documents[0].file)\n self.assertTrue(not not db_batch._documents[1].file)\n\n @authorized()\n def test_render_soglasie_sobstvennikov(self):\n DocRequisitiesStorage._BATCH_DESCRIPTORS[DocumentBatchTypeEnum.DBT_NEW_LLC]['doc_types'] = [DocumentTypeEnum.DT_SOGLASIE_SOBSTVENNIKOV]\n\n uchreditel_fis_lico_person = PrivatePersonDbObject(**{\n \"_owner\": self.user,\n \"name\": u\"Прокл\",\n \"surname\": u\"Поликарпов\",\n \"patronymic\": u\"Поликарпович\",\n \"inn\": \"781108730780\",\n \"birthdate\": datetime.now() - timedelta(days=365 * 30),\n \"birthplace\": u\"Россия, деревня Гадюкино\",\n \"sex\": \"male\",\n \"passport\": {\n \"document_type\": PersonDocumentTypeEnum.PD_INTERNAL,\n \"series\": u\"1123\",\n \"number\": u\"192837\",\n \"issue_date\": datetime.now() - timedelta(days=365 * 2),\n \"issue_depart\": u\"УМВД Гадюкинского района Гадюкинской области\",\n \"depart_code\": u\"111987\"\n },\n \"ogrnip\": \"123456789012345\",\n \"person_type\": PersonTypeEnum.PT_RUSSIAN,\n u\"address\": {\n \"street_type\": u\"ул\",\n \"index\": 191186,\n \"house\": u\"4\",\n \"region\": u\"Санкт-Петербург\",\n \"flat\": u\"12\",\n \"street\": u\"Большая Морская\",\n \"address_string\": u\"г Санкт-Петербург, ул Большая Морская, д 4, кв 12\",\n \"flat_type\": u\"кв\",\n \"house_type\": u\"д\",\n \"ifns\": 7841\n },\n \"caption\": u\"Сантехник\",\n \"phone\": \"+79210001122\",\n \"email\": \"[email protected]\",\n \"living_address\": u\"г. Санкт-Петербург, д. Гадюкино, бульвар Мотоциклистов казарма 4, кв. 705\"\n })\n sqldb.session.add(uchreditel_fis_lico_person)\n sqldb.session.commit()\n\n uchreditel_rus_jur_lico_company = CompanyDbObject(**{\n \"_owner\": self.user,\n \"ogrn\": \"1095543023135\",\n \"inn\": \"781108730780\",\n \"full_name\": u\"Протон\",\n \"short_name\": u\"Про\",\n \"kpp\": \"999999999\",\n \"company_type\": CompanyTypeEnum.CT_RUSSIAN,\n \"general_manager\": {\n \"_id\": uchreditel_fis_lico_person.id,\n \"type\": \"person\"\n },\n \"general_manager_caption\": u\"директор\",\n u\"address\": {\n \"street_type\": u\"ул\",\n \"index\": 191186,\n \"house\": u\"4\",\n \"region\": u\"Санкт-Петербург\",\n \"flat\": u\"12\",\n \"street\": u\"Большая Морская\",\n \"address_string\": u\"г Санкт-Петербург, ул Большая Морская, д 4, кв 12\",\n \"flat_type\": u\"кв\",\n \"house_type\": u\"д\",\n \"ifns\": 7841\n },\n \"phone\": \"+7(812)1234567\"\n })\n sqldb.session.add(uchreditel_rus_jur_lico_company)\n sqldb.session.commit()\n\n with self.app.app_context():\n data = {\n u\"full_name\": u\"образовательное учреждение дополнительного образования детей специализированная детско-юношеская спортивная школа олимпийского резерва по боксу\",\n u\"short_name\": u\"Бокс\",\n u\"address\": {\n \"street_type\": u\"ул\",\n \"index\": 191186,\n \"house\": u\"4\",\n \"region\": u\"Санкт-Петербург\",\n \"flat\": u\"12\",\n \"street\": u\"Большая Морская\",\n \"address_string\": u\"г Санкт-Петербург, ул Большая Морская, д 4, кв 12\",\n \"flat_type\": u\"кв\",\n \"house_type\": u\"д\",\n \"ifns\": 7841\n },\n u\"address_type\": AddressType.AT_REAL_ESTATE_ADDRESS,\n u\"starter_capital\": {\n \"currency\": \"rub\",\n \"value\": \"12312.234234\"\n },\n u\"general_manager_caption\": u\"повелитель\",\n u\"share_type\": \"percent\",\n u\"founders\": [\n {\n \"founder\": {\n \"_id\": uchreditel_rus_jur_lico_company.id,\n \"type\": \"company\"\n },\n \"nominal_capital\": 12312.22,\n \"share\": 85\n },\n {\n \"founder\": {\n \"_id\": uchreditel_fis_lico_person.id,\n \"type\": \"person\"\n },\n \"nominal_capital\": 1500.5,\n \"share\": 15\n }\n ],\n u\"general_manager\": {\n \"_id\": uchreditel_fis_lico_person.id,\n \"type\": \"person\"\n },\n u\"job_main_code\": u\"92.31.1\",\n u\"job_code_array\": [u\"92.31.1\", u\"74.14\", u\"10.01.1\"],\n u\"doc_obtain_person\": {\n \"type\": \"person\",\n \"_id\": uchreditel_fis_lico_person.id\n },\n u\"obtain_way\": DocumentDeliveryTypeStrEnum.DDT_ISSUE_TO_THE_APPLICANT_OR_AGENT,\n u'use_foreign_company_name': False,\n u'use_national_language_company_name': False,\n u\"tax_type\": UsnTaxType.UT_INCOME_MINUS_EXPENSE,\n u'general_manager_term': 20,\n u\"preimusch_pravo_priobreteniya_doli_time_span\": 60,\n u'necessary_votes_for_general_meeting_decisions': {\n u\"company_strategy\": NecessaryVotesEnum.NV_ALL,\n u\"articles_change\": NecessaryVotesEnum.NV_3_4,\n u\"executives_formation\": NecessaryVotesEnum.NV_2_3,\n u\"auditor_election\": NecessaryVotesEnum.NV_2_3,\n u\"annual_reports_approval\": NecessaryVotesEnum.NV_3_4,\n u\"profit_distribution\": NecessaryVotesEnum.NV_3_4,\n u\"internal_documents_approval\": NecessaryVotesEnum.NV_2_3,\n u\"obligations_emission\": NecessaryVotesEnum.NV_ALL,\n u\"audit_assignment\": NecessaryVotesEnum.NV_2_3,\n u\"large_deals_approval\": NecessaryVotesEnum.NV_3_4,\n u\"concern_deals_approval\": NecessaryVotesEnum.NV_2_3,\n u\"reorganization_or_liquidation\": NecessaryVotesEnum.NV_ALL,\n u\"liquidation_committee_assignment\": NecessaryVotesEnum.NV_2_3,\n u\"branch_establishment\": NecessaryVotesEnum.NV_3_4,\n u\"other_issues\": NecessaryVotesEnum.NV_2_3\n },\n u\"board_of_directors\": False,\n u\"selected_secretary\": {\n \"type\": \"person\",\n \"_id\": uchreditel_fis_lico_person.id\n },\n u\"selected_moderator\": {\n \"type\": \"person\",\n \"_id\": uchreditel_fis_lico_person.id\n },\n u\"pravo_otchuzhdeniya_type\": \"5\",\n u\"perehod_doli_k_naslednikam_soglasie\": True,\n u\"taxation_type\": \"usn\",\n u\"registration_way\": RegistrationWay.RW_RESPONSIBLE_PERSON,\n u\"region\": u\"Санкт-Петербург\",\n u\"reg_responsible_person\": {\n \"_id\": uchreditel_fis_lico_person.id\n },\n u\"address_other_owner\": True,\n u\"address_person\": {\n \"_id\": uchreditel_fis_lico_person.id\n }\n }\n batch = DocumentBatchDbObject(\n batch_type=DocumentBatchTypeEnum.DBT_NEW_LLC,\n status=BatchStatusEnum.BS_EDITED,\n data={},\n _owner=self.user\n )\n sqldb.session.add(batch)\n sqldb.session.commit()\n batch_id = batch.id\n\n new_batch_db_object = DocumentBatchDbObject(\n data=data,\n batch_type=DocumentBatchTypeEnum.DBT_NEW_LLC\n )\n\n batch = DocumentBatch.db_obj_to_field(new_batch_db_object)\n batch_json = json.dumps(batch.get_api_structure())\n result = self.test_client.post('/batch/update/', data={\n 'batch_id': batch_id,\n 'batch': batch_json\n })\n self.assertEqual(result.status_code, 200)\n db_batch = DocumentBatchDbObject.query.filter_by(id=batch_id).first()\n\n result = self.test_client.post('/batch/finalise/', data={'batch_id': batch_id})\n self.assertEqual(result.status_code, 200)\n\n db_batch = DocumentBatchDbObject.query.filter_by(id=batch_id).first()\n self.assertEqual(db_batch.status, BatchStatusEnum.BS_FINALISED)\n self.assertEqual(len(db_batch._documents), 1)\n self.assertTrue(not not db_batch._documents[0].file)\n\n @authorized()\n def test_render_garant_letter_arenda(self):\n DocRequisitiesStorage._BATCH_DESCRIPTORS[DocumentBatchTypeEnum.DBT_NEW_LLC]['doc_types'] = [DocumentTypeEnum.DT_GARANT_LETTER_ARENDA]\n\n uchreditel_fis_lico_person = PrivatePersonDbObject(**{\n \"_owner\": self.user,\n \"name\": u\"Прокл\",\n \"surname\": u\"Поликарпов\",\n \"patronymic\": u\"Поликарпович\",\n \"inn\": \"781108730780\",\n \"birthdate\": datetime.now() - timedelta(days=365 * 30),\n \"birthplace\": u\"Россия, деревня Гадюкино\",\n \"sex\": \"male\",\n \"passport\": {\n \"document_type\": PersonDocumentTypeEnum.PD_INTERNAL,\n \"series\": u\"1123\",\n \"number\": u\"192837\",\n \"issue_date\": datetime.now() - timedelta(days=365 * 2),\n \"issue_depart\": u\"УМВД Гадюкинского района Гадюкинской области\",\n \"depart_code\": u\"111987\"\n },\n \"ogrnip\": \"123456789012345\",\n \"person_type\": PersonTypeEnum.PT_RUSSIAN,\n \"address\": {\n \"region\": RFRegionsEnum.RFR_SPB,\n \"index\": 198209,\n \"district_type\": DistrictTypeEnum.DIT_DISTRICT,\n \"district\": u\"НовоПушкинскийСуперДлинноеНазваниеТакогоВообщеНеБывает\",\n \"city_type\": CityTypeEnum.CIT_CITY,\n \"city\": u\"Гадюкино\",\n \"street_type\": StreetTypeEnum.STT_BOULEVARD,\n \"street\": u\"Мотоциклистов\",\n \"house_type\": HouseTypeEnum.HOT_HOUSE,\n \"house\": \"4\",\n \"building_type\": BuildingTypeEnum.BIT_HOUSING,\n \"building\": \"2\",\n \"flat_type\": FlatTypeEnum.FLT_OFFICE,\n \"flat\": \"705\",\n },\n \"caption\": u\"Сантехник\",\n \"phone\": \"+79210001122\",\n \"email\": \"[email protected]\",\n \"living_address\": u\"г. Санкт-Петербург, д. Гадюкино, бульвар Мотоциклистов казарма 4, кв. 705\"\n })\n sqldb.session.add(uchreditel_fis_lico_person)\n sqldb.session.commit()\n\n uchreditel_rus_jur_lico_company = CompanyDbObject(**{\n \"_owner\": self.user,\n \"ogrn\": \"1095543023135\",\n \"inn\": \"781108730780\",\n \"full_name\": u\"Протон\",\n \"short_name\": u\"Про\",\n \"kpp\": \"999999999\",\n \"company_type\": CompanyTypeEnum.CT_RUSSIAN,\n \"general_manager\": {\n \"_id\": uchreditel_fis_lico_person.id,\n \"type\": \"person\"\n },\n \"general_manager_caption\": u\"директор\",\n \"address\": {\n \"region\": RFRegionsEnum.RFR_IRKUTSKAYA_REGION,\n \"index\": 123131,\n \"street_type\": StreetTypeEnum.STT_BOULEVARD,\n \"street\": u\"Мотоциклистов\",\n \"house_type\": HouseTypeEnum.HOT_HOUSE,\n \"house\": \"2\",\n \"building_type\": BuildingTypeEnum.BIT_HOUSING,\n \"building\": \"778899\",\n \"flat_type\": FlatTypeEnum.FLT_OFFICE,\n \"flat\": \"2\",\n \"ifns\": 1234\n },\n \"phone\": \"+7(812)1234567\"\n })\n sqldb.session.add(uchreditel_rus_jur_lico_company)\n sqldb.session.commit()\n\n with self.app.app_context():\n data = {\n u\"full_name\": u\"образовательное учреждение дополнительного образования детей специализированная детско-юношеская спортивная школа олимпийского резерва по боксу\",\n u\"short_name\": u\"Бокс\",\n u\"address\": {\n \"region\": RFRegionsEnum.RFR_IRKUTSKAYA_REGION,\n \"index\": 123131,\n \"street_type\": StreetTypeEnum.STT_BOULEVARD,\n \"street\": u\"Мотоциклистов\",\n \"house_type\": HouseTypeEnum.HOT_HOUSE,\n \"house\": \"2\",\n \"building_type\": BuildingTypeEnum.BIT_HOUSING,\n \"building\": \"778899\",\n \"flat_type\": FlatTypeEnum.FLT_OFFICE,\n \"flat\": \"2\",\n \"ifns\": 1234\n },\n u\"address_type\": \"office_address\",\n u\"starter_capital\": {\n \"currency\": \"rub\",\n \"value\": \"12312.234234\"\n },\n u\"general_manager_caption\": u\"повелитель\",\n u\"share_type\": \"percent\",\n u\"founders\": [\n {\n \"founder\": {\n \"_id\": uchreditel_rus_jur_lico_company.id,\n \"type\": \"company\"\n },\n \"nominal_capital\": 12312.22,\n \"share\": 85\n },\n {\n \"founder\": {\n \"_id\": uchreditel_fis_lico_person.id,\n \"type\": \"person\"\n },\n \"nominal_capital\": 1500.5,\n \"share\": 15\n }\n ],\n u\"general_manager\": {\n \"_id\": uchreditel_fis_lico_person.id,\n \"type\": \"person\"\n },\n u\"job_main_code\": u\"92.31.1\",\n u\"job_code_array\": [u\"92.31.1\", u\"74.14\", u\"10.01.1\"],\n u\"doc_obtain_person\": {\n \"type\": \"person\",\n \"_id\": uchreditel_fis_lico_person.id\n },\n u\"obtain_way\": \"founder\",\n u'use_foreign_company_name': False,\n u'use_national_language_company_name': False,\n u\"tax_type\": UsnTaxType.UT_INCOME_MINUS_EXPENSE,\n u'general_manager_term': 20,\n u\"preimusch_pravo_priobreteniya_doli_time_span\": 60,\n u'necessary_votes_for_general_meeting_decisions': {\n u\"company_strategy\": NecessaryVotesEnum.NV_ALL,\n u\"articles_change\": NecessaryVotesEnum.NV_3_4,\n u\"executives_formation\": NecessaryVotesEnum.NV_2_3,\n u\"auditor_election\": NecessaryVotesEnum.NV_2_3,\n u\"annual_reports_approval\": NecessaryVotesEnum.NV_3_4,\n u\"profit_distribution\": NecessaryVotesEnum.NV_3_4,\n u\"internal_documents_approval\": NecessaryVotesEnum.NV_2_3,\n u\"obligations_emission\": NecessaryVotesEnum.NV_ALL,\n u\"audit_assignment\": NecessaryVotesEnum.NV_2_3,\n u\"large_deals_approval\": NecessaryVotesEnum.NV_3_4,\n u\"concern_deals_approval\": NecessaryVotesEnum.NV_2_3,\n u\"reorganization_or_liquidation\": NecessaryVotesEnum.NV_ALL,\n u\"liquidation_committee_assignment\": NecessaryVotesEnum.NV_2_3,\n u\"branch_establishment\": NecessaryVotesEnum.NV_3_4,\n u\"other_issues\": NecessaryVotesEnum.NV_2_3\n },\n u\"board_of_directors\": False,\n u\"selected_secretary\": {\n \"type\": \"person\",\n \"_id\": uchreditel_fis_lico_person.id\n },\n u\"selected_moderator\": {\n \"type\": \"person\",\n \"_id\": uchreditel_fis_lico_person.id\n },\n u\"pravo_otchuzhdeniya_type\": 5,\n u\"perehod_doli_k_naslednikam_soglasie\": True,\n u\"taxation_type\": \"usn\",\n u\"registration_way\": \"some_founders\",\n u\"region\": u\"Санкт-Петербург\"\n }\n batch = DocumentBatchDbObject(\n batch_type=DocumentBatchTypeEnum.DBT_NEW_LLC,\n status=BatchStatusEnum.BS_NEW,\n _documents=[],\n data={},\n _owner=self.user\n )\n sqldb.session.add(batch)\n sqldb.session.commit()\n batch_id = batch.id\n\n new_batch_db_object = DocumentBatchDbObject(\n data=data,\n batch_type=DocumentBatchTypeEnum.DBT_NEW_LLC\n )\n\n batch = DocumentBatch.db_obj_to_field(new_batch_db_object)\n batch_json = json.dumps(batch.get_api_structure())\n result = self.test_client.post('/batch/update/', data={\n 'batch_id': batch_id,\n 'batch': batch_json\n })\n self.assertEqual(result.status_code, 200)\n\n result = self.test_client.post('/batch/finalise/', data={'batch_id': batch_id})\n self.assertEqual(result.status_code, 200)\n\n db_batch = DocumentBatchDbObject.query.filter_by(id=batch_id).first()\n self.assertEqual(db_batch.status, BatchStatusEnum.BS_FINALISED)\n self.assertEqual(len(db_batch._documents), 1)\n self.assertTrue(not not db_batch._documents[0].file)\n\n @authorized()\n def test_render_garant_letter_subarenda(self):\n DocRequisitiesStorage._BATCH_DESCRIPTORS[DocumentBatchTypeEnum.DBT_NEW_LLC]['doc_types'] = [DocumentTypeEnum.DT_GARANT_LETTER_SUBARENDA]\n\n uchreditel_fis_lico_person = PrivatePersonDbObject(**{\n \"_owner\": self.user,\n \"name\": u\"Прокл\",\n \"surname\": u\"Поликарпов\",\n \"patronymic\": u\"Поликарпович\",\n \"inn\": \"781108730780\",\n \"birthdate\": datetime.now() - timedelta(days=365 * 30),\n \"birthplace\": u\"Россия, деревня Гадюкино\",\n \"sex\": \"male\",\n \"passport\": {\n \"document_type\": PersonDocumentTypeEnum.PD_INTERNAL,\n \"series\": u\"1123\",\n \"number\": u\"192837\",\n \"issue_date\": datetime.now() - timedelta(days=365 * 2),\n \"issue_depart\": u\"УМВД Гадюкинского района Гадюкинской области\",\n \"depart_code\": u\"111987\"\n },\n \"ogrnip\": \"123456789012345\",\n \"person_type\": PersonTypeEnum.PT_RUSSIAN,\n \"address\": {\n \"region\": RFRegionsEnum.RFR_SPB,\n \"index\": 198209,\n \"district_type\": DistrictTypeEnum.DIT_DISTRICT,\n \"district\": u\"НовоПушкинскийСуперДлинноеНазваниеТакогоВообщеНеБывает\",\n \"city_type\": CityTypeEnum.CIT_CITY,\n \"city\": u\"Гадюкино\",\n \"street_type\": StreetTypeEnum.STT_BOULEVARD,\n \"street\": u\"Мотоциклистов\",\n \"house_type\": HouseTypeEnum.HOT_HOUSE,\n \"house\": \"4\",\n \"building_type\": BuildingTypeEnum.BIT_HOUSING,\n \"building\": \"2\",\n \"flat_type\": FlatTypeEnum.FLT_OFFICE,\n \"flat\": \"705\",\n },\n \"caption\": u\"Сантехник\",\n \"phone\": \"+79210001122\",\n \"email\": \"[email protected]\",\n \"living_address\": u\"г. Санкт-Петербург, д. Гадюкино, бульвар Мотоциклистов казарма 4, кв. 705\"\n })\n sqldb.session.add(uchreditel_fis_lico_person)\n sqldb.session.commit()\n\n uchreditel_rus_jur_lico_company = CompanyDbObject(**{\n \"_owner\": self.user,\n \"ogrn\": \"1095543023135\",\n \"inn\": \"781108730780\",\n \"full_name\": u\"Протон\",\n \"short_name\": u\"Про\",\n \"kpp\": \"999999999\",\n \"company_type\": CompanyTypeEnum.CT_RUSSIAN,\n \"general_manager\": {\n \"_id\": uchreditel_fis_lico_person.id,\n \"type\": \"person\"\n },\n \"general_manager_caption\": u\"директор\",\n \"address\": {\n \"region\": RFRegionsEnum.RFR_IRKUTSKAYA_REGION,\n \"index\": 123131,\n \"street_type\": StreetTypeEnum.STT_BOULEVARD,\n \"street\": u\"Мотоциклистов\",\n \"house_type\": HouseTypeEnum.HOT_HOUSE,\n \"house\": \"2\",\n \"building_type\": BuildingTypeEnum.BIT_HOUSING,\n \"building\": \"778899\",\n \"flat_type\": FlatTypeEnum.FLT_OFFICE,\n \"flat\": \"2\",\n \"ifns\": 1234\n },\n \"phone\": \"+7(812)1234567\"\n })\n sqldb.session.add(uchreditel_rus_jur_lico_company)\n sqldb.session.commit()\n\n with self.app.app_context():\n data = {\n u\"full_name\": u\"образовательное учреждение дополнительного образования детей специализированная детско-юношеская спортивная школа олимпийского резерва по боксу\",\n u\"short_name\": u\"Бокс\",\n u\"address\": {\n \"street_type\": u\"ул\",\n \"index\": 191186,\n \"house\": u\"4\",\n \"region\": u\"Санкт-Петербург\",\n \"flat\": u\"12\",\n \"street\": u\"Большая Морская\",\n \"address_string\": u\"г Санкт-Петербург, ул Большая Морская, д 4, кв 12\",\n \"flat_type\": u\"кв\",\n \"house_type\": u\"д\",\n \"ifns\": 7841\n },\n u\"address_type\": \"office_address\",\n u\"starter_capital\": {\n \"currency\": \"rub\",\n \"value\": \"12312.234234\"\n },\n u\"general_manager_caption\": u\"повелитель\",\n u\"share_type\": \"percent\",\n u\"founders\": [\n {\n \"founder\": {\n \"_id\": uchreditel_rus_jur_lico_company.id,\n \"type\": \"company\"\n },\n \"nominal_capital\": 12312.22,\n \"share\": 85\n },\n {\n \"founder\": {\n \"_id\": uchreditel_fis_lico_person.id,\n \"type\": \"person\"\n },\n \"nominal_capital\": 1500.5,\n \"share\": 15\n }\n ],\n u\"general_manager\": {\n \"_id\": uchreditel_fis_lico_person.id,\n \"type\": \"person\"\n },\n u\"job_main_code\": u\"92.31.1\",\n u\"job_code_array\": [u\"92.31.1\", u\"74.14\", u\"10.01.1\"],\n u\"doc_obtain_person\": {\n \"type\": \"person\",\n \"_id\": uchreditel_fis_lico_person.id\n },\n u\"obtain_way\": \"founder\",\n u'use_foreign_company_name': False,\n u'use_national_language_company_name': False,\n u\"tax_type\": UsnTaxType.UT_INCOME_MINUS_EXPENSE,\n u'general_manager_term': 20,\n u\"preimusch_pravo_priobreteniya_doli_time_span\": 60,\n u'necessary_votes_for_general_meeting_decisions': {\n u\"company_strategy\": NecessaryVotesEnum.NV_ALL,\n u\"articles_change\": NecessaryVotesEnum.NV_3_4,\n u\"executives_formation\": NecessaryVotesEnum.NV_2_3,\n u\"auditor_election\": NecessaryVotesEnum.NV_2_3,\n u\"annual_reports_approval\": NecessaryVotesEnum.NV_3_4,\n u\"profit_distribution\": NecessaryVotesEnum.NV_3_4,\n u\"internal_documents_approval\": NecessaryVotesEnum.NV_2_3,\n u\"obligations_emission\": NecessaryVotesEnum.NV_ALL,\n u\"audit_assignment\": NecessaryVotesEnum.NV_2_3,\n u\"large_deals_approval\": NecessaryVotesEnum.NV_3_4,\n u\"concern_deals_approval\": NecessaryVotesEnum.NV_2_3,\n u\"reorganization_or_liquidation\": NecessaryVotesEnum.NV_ALL,\n u\"liquidation_committee_assignment\": NecessaryVotesEnum.NV_2_3,\n u\"branch_establishment\": NecessaryVotesEnum.NV_3_4,\n u\"other_issues\": NecessaryVotesEnum.NV_2_3\n },\n u\"board_of_directors\": False,\n u\"selected_secretary\": {\n \"type\": \"person\",\n \"_id\": uchreditel_fis_lico_person.id\n },\n u\"selected_moderator\": {\n \"type\": \"person\",\n \"_id\": uchreditel_fis_lico_person.id\n },\n u\"pravo_otchuzhdeniya_type\": 5,\n u\"perehod_doli_k_naslednikam_soglasie\": True,\n u\"taxation_type\": \"usn\",\n u\"registration_way\": \"some_founders\",\n u\"region\": u\"Санкт-Петербург\"\n }\n batch = DocumentBatchDbObject(\n batch_type=DocumentBatchTypeEnum.DBT_NEW_LLC,\n status=BatchStatusEnum.BS_NEW,\n _documents=[],\n data={},\n _owner=self.user\n )\n sqldb.session.add(batch)\n sqldb.session.commit()\n batch_id = batch.id\n\n new_batch_db_object = DocumentBatchDbObject(\n data=data,\n batch_type=DocumentBatchTypeEnum.DBT_NEW_LLC\n )\n\n batch = DocumentBatch.db_obj_to_field(new_batch_db_object)\n batch_json = json.dumps(batch.get_api_structure())\n result = self.test_client.post('/batch/update/', data={\n 'batch_id': batch_id,\n 'batch': batch_json\n })\n self.assertEqual(result.status_code, 200)\n\n result = self.test_client.post('/batch/finalise/', data={'batch_id': batch_id})\n self.assertEqual(result.status_code, 200)\n\n db_batch = DocumentBatchDbObject.query.filter_by(id=batch_id).first()\n self.assertEqual(db_batch.status, BatchStatusEnum.BS_FINALISED)\n self.assertEqual(len(db_batch._documents), 1)\n self.assertTrue(not not db_batch._documents[0].file)\n\n @authorized()\n def test_send_please_finalise_email(self):\n # Триггер: в 9 утра местного времени на следующий день после последнего /update/, но не ранее чем через 24 часа.\n DocRequisitiesStorage._BATCH_DESCRIPTORS[DocumentBatchTypeEnum.DBT_NEW_LLC]['doc_types'] = [DocumentTypeEnum.DT_CONTRACT]\n\n founder = PrivatePersonDbObject(**{\n \"_owner\": self.user,\n \"name\": u\"Прокла\",\n \"surname\": u\"Поликарпова\",\n \"inn\": \"781108730780\",\n \"birthdate\": datetime.utcnow() - timedelta(days=365 * 30),\n \"birthplace\": u\"Россия, деревня Гадюкино\",\n \"sex\": GenderEnum.G_FEMALE,\n \"passport\": {\n \"document_type\": PersonDocumentTypeEnum.PD_INTERNAL,\n \"series\": u\"1123\",\n \"number\": u\"192837\",\n \"issue_date\": datetime.now(),\n \"issue_depart\": u\"УМВД Гадюкинского района Гадюкинской области\",\n \"depart_code\": u\"111987\"\n },\n \"address\": {\n \"region\": RFRegionsEnum.RFR_SPB,\n \"index\": 198209,\n \"street_type\": StreetTypeEnum.STT_BOULEVARD,\n \"street\": u\"Мотоциклистов\",\n \"house_type\": HouseTypeEnum.HOT_HOUSE,\n \"house\": \"4\"\n },\n \"phone\": \"+79210001122\"\n })\n sqldb.session.add(founder)\n sqldb.session.commit()\n\n founder_otvetstvennyi = PrivatePersonDbObject(**{\n \"_owner\": self.user,\n \"name\": u\"Семен\",\n \"surname\": u\"Семенчук\",\n \"inn\": \"781108730780\",\n \"sex\": \"male\",\n \"birthdate\": datetime.utcnow() - timedelta(days=365 * 30),\n \"birthplace\": u\"Россия, деревня Гадюкино\",\n \"passport\": {\n \"document_type\": PersonDocumentTypeEnum.PD_INTERNAL,\n \"series\": u\"1123\",\n \"number\": u\"192837\",\n \"issue_date\": datetime.now(),\n \"issue_depart\": u\"УМВД Гадюкинского района Гадюкинской области\",\n \"depart_code\": u\"111987\"\n },\n \"ogrnip\": \"123456789012345\",\n \"address\": {\n \"region\": RFRegionsEnum.RFR_SPB,\n \"index\": 198209,\n \"street_type\": StreetTypeEnum.STT_BOULEVARD,\n \"street\": u\"Мотоциклистов\",\n \"house_type\": HouseTypeEnum.HOT_HOUSE,\n \"house\": \"4\"\n },\n \"phone\": \"+79210001122\"\n })\n sqldb.session.add(founder_otvetstvennyi)\n sqldb.session.commit()\n\n company_founder = CompanyDbObject(**{\n \"_owner\": self.user,\n \"ogrn\": \"1095543023135\",\n \"inn\": \"781108730780\",\n \"kpp\": \"999999999\",\n \"general_manager_caption\": u\"генеральный директор\",\n \"incorporation_form\": IncorporationFormEnum.IF_LLC,\n \"full_name\": u\"Том и \\\"Джерри\\\"\",\n \"short_name\": u\"Т и \\\"Д\\\"\",\n \"general_manager\": {\n \"_id\": founder_otvetstvennyi.id,\n \"type\": \"person\"\n },\n \"address\": {\n \"region\": RFRegionsEnum.RFR_SPB,\n \"index\": 123131,\n \"house_type\": HouseTypeEnum.HOT_HOUSE,\n \"house\": \"2\",\n \"flat_type\": FlatTypeEnum.FLT_OFFICE,\n \"flat\": \"2\",\n },\n \"phone\": \"+7(812)1234567\"\n })\n sqldb.session.add(company_founder)\n sqldb.session.commit()\n\n input = {\n u\"full_name\": u\"ТестКреатеЛтдАдд1КомпаниНевДиректор\",\n \"address\": {\n 'index': 199000,\n \"region\": RFRegionsEnum.RFR_LENINGRADSKAYA_REGION,\n \"district_type\": DistrictTypeEnum.DIT_DISTRICT,\n \"district\": u\"Гатчинский\",\n \"city_type\": CityTypeEnum.CIT_CITY,\n \"city\": u\"Гатчина\",\n \"street_type\": StreetTypeEnum.STT_STREET,\n \"street\": u\"Радищева\",\n \"house_type\": HouseTypeEnum.HOT_HOUSE,\n \"house\": \"26\",\n \"flat_type\": FlatTypeEnum.FLT_FLAT,\n \"flat\": \"80\",\n \"ifns\": 1234\n },\n \"address_type\": \"office_address\",\n \"starter_capital\": {\n \"currency\": \"rub\",\n \"value\": \"12312.234234\"\n },\n \"general_manager_caption\": u\"повелитель\",\n \"share_type\": \"percent\",\n \"founders\": [\n {\n \"founder\": {\n \"_id\": company_founder.id,\n \"type\": \"company\"\n },\n \"nominal_capital\": 12312.22,\n \"share\": 85\n },\n {\n \"founder\": {\n \"_id\": founder_otvetstvennyi.id,\n \"type\": \"person\"\n },\n \"nominal_capital\": 1500.5,\n \"share\": 15\n }\n ],\n \"general_manager\": {\n \"_id\": founder_otvetstvennyi.id\n },\n \"job_main_code\": u\"92.31.1\",\n \"job_code_array\": [u\"92.31.1\", u\"74.14\", u\"10.01.1\"],\n \"doc_obtain_person\": {\n \"type\": \"person\",\n \"_id\": founder_otvetstvennyi.id\n },\n \"obtain_way\": \"founder\",\n u\"tax_type\": UsnTaxType.UT_INCOME_MINUS_EXPENSE,\n u'general_manager_term': 20,\n u\"preimusch_pravo_priobreteniya_doli_time_span\": 60,\n u'necessary_votes_for_general_meeting_decisions': NECESSARY_VOTES_FOR_GENERAL_MEETING_DECISIONS,\n \"board_of_directors\": False,\n \"selected_secretary\": {\n \"type\": \"person\",\n \"_id\": founder_otvetstvennyi.id\n },\n \"selected_moderator\": {\n \"type\": \"person\",\n \"_id\": founder_otvetstvennyi.id\n },\n \"pravo_otchuzhdeniya_type\": 5,\n \"short_name\": u\"ТестКреатеЛтдАдд1КомпаниНевДиректор\",\n \"perehod_doli_k_naslednikam_soglasie\": True,\n \"taxation_type\": \"eshn\",\n \"registration_way\": \"some_founders\",\n \"region\": u\"Санкт-Петербург\"\n }\n\n with self.app.app_context():\n batch = DocumentBatchDbObject(\n batch_type=DocumentBatchTypeEnum.DBT_NEW_LLC,\n status=BatchStatusEnum.BS_NEW,\n _documents=[],\n data={},\n _owner=self.user\n )\n sqldb.session.add(batch)\n sqldb.session.commit()\n batch_id = batch.id\n\n new_batch_db_object = DocumentBatchDbObject(\n data=input,\n batch_type=DocumentBatchTypeEnum.DBT_NEW_LLC\n )\n\n batch = DocumentBatch.db_obj_to_field(new_batch_db_object)\n batch_json = json.dumps(batch.get_api_structure())\n result = self.test_client.post('/batch/update/', data={\n 'batch_id': batch_id,\n 'batch': batch_json\n })\n self.assertEqual(result.status_code, 200)\n db_batch = DocumentBatchDbObject.query.filter_by(id=batch_id).first()\n\n @authorized()\n def test_profiling(self):\n DocRequisitiesStorage._BATCH_DESCRIPTORS[DocumentBatchTypeEnum.DBT_NEW_LLC]['doc_types'] = [DocumentTypeEnum.DT_P11001]\n # for n in xrange(200):\n # self.create_company(self.user)\n # self.create_person(self.user)\n # self.create_batch('llc', self.user)\n\n uchreditel_fis_lico_person = self.create_person(self.user, name=u\"Прокл\", surname=u\"Поликарпов\", patronymic=u\"Поликарпович\")\n uchreditel_fis_lico_person2 = self.create_person(self.user, name=u\"Прокл\", surname=u\"Поликарпов\", patronymic=u\"Поликарпович\")\n uchreditel_fis_lico_person3 = self.create_person(self.user, name=u\"Прокл\", surname=u\"Поликарпов\", patronymic=u\"Поликарпович\")\n general_manager_person = self.create_person(self.user, name=u\"Прокл\", surname=u\"Поликарпов\", patronymic=u\"Поликарпович\")\n uchreditel_rus_jur_lico_company = self.create_company(\n self.user,\n general_manager=general_manager_person,\n inn=\"010101417407\",\n kpp=\"999999999\",\n ogrn=\"1095543023135\"\n )\n\n with self.app.app_context():\n\n data = {\n u\"full_name\": u\"фывафыва\",\n u\"short_name\": u\"Бокс\",\n \"address\": {\n \"building\": u\"6\",\n \"city_type\": u\"г\",\n \"street_type\": u\"ул\",\n \"index\": 192212,\n \"house\": \"5\",\n \"region\": u\"Санкт-Петербург\",\n \"flat\": \"7\",\n \"building_type\": u\"к\",\n \"street\": u\"Уланина\",\n \"long_form_mode\": False,\n \"flat_type\": u\"кв\",\n \"house_type\": u\"д\",\n \"city\": u\"Санкт-Петербург\",\n \"ifns\": \"1234\"\n },\n u\"address_type\": \"office_address\",\n u\"starter_capital\": {\n \"currency\": \"rub\",\n \"value\": \"12312.5\"\n },\n u\"general_manager_caption\": u\"повелитель\",\n u\"share_type\": \"percent\",\n u\"founders\": [\n {\n \"founder\": {\n \"_id\": uchreditel_rus_jur_lico_company.id,\n \"type\": \"company\"\n },\n \"nominal_capital\": \"12312.20\",\n \"share\": 25\n },\n {\n \"founder\": {\n \"_id\": uchreditel_fis_lico_person.id,\n \"type\": \"person\"\n },\n \"nominal_capital\": \"1500.50\",\n \"share\": 25\n },\n {\n \"founder\": {\n \"_id\": uchreditel_fis_lico_person2.id,\n \"type\": \"person\"\n },\n \"nominal_capital\": \"1500.50\",\n \"share\": 25\n },\n {\n \"founder\": {\n \"_id\": uchreditel_fis_lico_person3.id,\n \"type\": \"person\"\n },\n \"nominal_capital\": \"1500.50\",\n \"share\": 25\n },\n ],\n u\"general_manager\": {\n \"_id\": general_manager_person.id,\n \"type\": \"person\"\n },\n u\"job_main_code\": u\"92.31.1\",\n # u\"job_code_array\" : [u\"92.31.1\", u\"74.14\", u\"10.01.1\"],\n u\"job_code_array\": [],\n u\"doc_obtain_founder\": {\n \"type\": \"person\",\n \"_id\": uchreditel_fis_lico_person.id\n },\n u\"obtain_way\": \"founder\",\n u'use_foreign_company_name': False,\n u'use_national_language_company_name': False,\n u\"tax_type\": UsnTaxType.UT_INCOME_MINUS_EXPENSE,\n u'general_manager_term': 20,\n u\"preimusch_pravo_priobreteniya_doli_time_span\": 60,\n u'necessary_votes_for_general_meeting_decisions': {\n u\"company_strategy\": NecessaryVotesEnum.NV_ALL,\n u\"articles_change\": NecessaryVotesEnum.NV_3_4,\n u\"executives_formation\": NecessaryVotesEnum.NV_2_3,\n u\"auditor_election\": NecessaryVotesEnum.NV_2_3,\n u\"annual_reports_approval\": NecessaryVotesEnum.NV_3_4,\n u\"profit_distribution\": NecessaryVotesEnum.NV_3_4,\n u\"internal_documents_approval\": NecessaryVotesEnum.NV_2_3,\n u\"obligations_emission\": NecessaryVotesEnum.NV_ALL,\n u\"audit_assignment\": NecessaryVotesEnum.NV_2_3,\n u\"large_deals_approval\": NecessaryVotesEnum.NV_3_4,\n u\"concern_deals_approval\": NecessaryVotesEnum.NV_2_3,\n u\"reorganization_or_liquidation\": NecessaryVotesEnum.NV_ALL,\n u\"liquidation_committee_assignment\": NecessaryVotesEnum.NV_2_3,\n u\"branch_establishment\": NecessaryVotesEnum.NV_3_4,\n u\"other_issues\": NecessaryVotesEnum.NV_2_3\n },\n u\"board_of_directors\": False,\n u\"selected_secretary\": {\n \"type\": \"person\",\n \"_id\": uchreditel_fis_lico_person.id\n },\n u\"selected_moderator\": {\n \"type\": \"person\",\n \"_id\": uchreditel_fis_lico_person.id\n },\n u\"pravo_otchuzhdeniya_type\": 5,\n u\"perehod_doli_k_naslednikam_soglasie\": True,\n u\"taxation_type\": \"usn\",\n u\"registration_way\": \"some_founders\",\n u\"region\": u\"Санкт-Петербург\"\n }\n batch = DocumentBatchDbObject(\n batch_type=DocumentBatchTypeEnum.DBT_NEW_LLC,\n status=BatchStatusEnum.BS_NEW,\n data=data,\n paid=True,\n metadata={'_something': \"wrong\"},\n _owner=self.user\n )\n sqldb.session.add(batch)\n sqldb.session.commit()\n batch_id = batch.id\n\n new_batch_db_object = DocumentBatchDbObject(\n data=data,\n metadata={'_something': \"wrong\"},\n batch_type=DocumentBatchTypeEnum.DBT_NEW_LLC\n )\n\n batch = DocumentBatch.db_obj_to_field(new_batch_db_object)\n structure = batch.get_api_structure()\n del structure['batch_type']\n batch_json = json.dumps(structure)\n result = self.test_client.post('/batch/update/', data={\n 'batch_id': unicode(batch_id),\n 'batch': batch_json\n })\n self.assertEqual(result.status_code, 200)\n db_batch = DocumentBatchDbObject.query.filter_by(id=batch_id).first()\n\n# print (json.dumps(db_batch.__dict__, indent=1, default=lambda x: unicode(x), ensure_ascii=False))\n\n with TimeCalculator('name', self.app.logger, use_profile=True, min_time=0.001):\n result = self.test_client.post('/batch/finalise/', data={'batch_id': unicode(batch_id)})\n self.assertEqual(result.status_code, 200)\n\n db_batch = DocumentBatchDbObject.query.filter_by(id=batch_id).first()\n# print (json.dumps(db_batch.__dict__, indent=1, default=lambda x: unicode(x), ensure_ascii=False))\n self.assertEqual(db_batch.status, BatchStatusEnum.BS_FINALISED)\n self.assertEqual(len(db_batch._documents), 1)\n\n our_md5_string = \"%s;%s;%s;%s;%s;%s;%s;%s\" % ('checkOrder', '450.0', '450.0',\n '450.0', '1', '2', unicode(self.user.id),\n '1234567890')\n\n m = hashlib.md5()\n m.update(our_md5_string)\n md5val = m.hexdigest().upper()\n\n result = self.test_client.post('/payment/checkOrder/', data={\n 'md5': md5val,\n 'shopId': '1',\n 'action': 'checkOrder',\n 'orderSumAmount': '450.0',\n 'orderSumCurrencyPaycash': '450.0',\n 'orderSumBankPaycash': '450.0',\n 'invoiceId': '2',\n 'customerNumber': unicode(self.user.id),\n 'orderNumber': \"subscription_1\"\n })\n self.assertEqual(result.status_code, 200)\n\n def test_uninitialized_field(self):\n with self.app.app_context():\n field_description = {\n \"type\": \"db_object\",\n \"cls\": \"PrivatePerson\"\n }\n with TimeCalculator('name', self.app.logger, use_profile=True, min_time=0.001):\n for i in xrange(10000):\n x = get_uninitialized_field(field_description)\n\n @authorized(user_id=245)\n def test_get_batch_status(self):\n batch_id = \"54d4b370b8ac2f78815a79ca\"\n\n with self.app.app_context():\n result = self.test_client.get('/batch/status/?batch_id=%s' % batch_id)\n self.assertEqual(result.status_code, 200)\n\n @authorized()\n def test_get_batch_with_pay_info(self):\n batch = self.create_batch(\"llc\", self.user)\n\n with self.app.app_context():\n result = self.test_client.get('/batch/?batch_id=%s' % batch.id)\n self.assertEqual(result.status_code, 200)\n\n" }, { "alpha_fraction": 0.6520874500274658, "alphanum_fraction": 0.654075562953949, "avg_line_length": 21.863636016845703, "blob_id": "ab5f7c687da3586262ee2640d4dd61f1112c69d5", "content_id": "136bcd7a26956b105a6658a2c487977b6dbfab3e", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 503, "license_type": "no_license", "max_line_length": 69, "num_lines": 22, "path": "/app/deployment_migrations/migration_list/20150915_add_catalog_tables2.py", "repo_name": "StanislavKraev/jb_code", "src_encoding": "UTF-8", "text": "# -*- coding: utf-8 -*-\n\nfrom fw.db.sql_base import db as sqldb\n\n\ndef forward(config, logger):\n logger.debug(u\"Create new models\")\n\n sqldb.session.close()\n sqldb.engine.execute(u\"\"\"CREATE TABLE IF NOT EXISTS bik_catalog (\n id VARCHAR NOT NULL, \n name VARCHAR NOT NULL, \n okpo VARCHAR NOT NULL, \n bik VARCHAR NOT NULL, \n phone VARCHAR NOT NULL, \n address VARCHAR NOT NULL, \n kor_account VARCHAR NOT NULL, \n PRIMARY KEY (id)\n);\"\"\")\n\ndef rollback(config, logger):\n pass\n" }, { "alpha_fraction": 0.45315423607826233, "alphanum_fraction": 0.49278873205184937, "avg_line_length": 32.89179229736328, "blob_id": "f602b2736f264425d463a6547a1042b80c123ac3", "content_id": "008dba3da788eda30396310b9624cbbe56838401", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 9446, "license_type": "no_license", "max_line_length": 115, "num_lines": 268, "path": "/jb_tests/test_pack/base_batch_test.py", "repo_name": "StanislavKraev/jb_code", "src_encoding": "UTF-8", "text": "# -*- coding: utf-8 -*-\nfrom datetime import datetime, timedelta\nfrom random import randint\n\nfrom base_test_case import BaseTestCase\nfrom fw.db.sql_base import db as sqldb\nfrom fw.documents.address_enums import RFRegionsEnum, FlatTypeEnum, BuildingTypeEnum, HouseTypeEnum, StreetTypeEnum\nfrom fw.documents.batch_manager import BatchManager\nfrom fw.documents.db_fields import PrivatePersonDbObject, BatchDocumentDbObject, CompanyDbObject\nfrom fw.documents.enums import PersonDocumentTypeEnum, PersonTypeEnum\nfrom services.car_assurance.db_models import CarAssurance, CarAssuranceBranch\nfrom services.ifns.data_model.models import IfnsCatalogObject\n\n\nclass BaseBatchTestCase(BaseTestCase):\n def setUp(self):\n super(BaseBatchTestCase, self).setUp()\n self.maxDiff = None\n self.events = []\n BatchManager.register_event_consumer('test_logger', self)\n\n def create_batch(self, batch_type, owner, do_not_save_to_db=False, status=None):\n manager = BatchManager.init(batch_type=batch_type)\n batch = manager.create_batch(owner)\n if status is not None:\n batch.status = status\n if not do_not_save_to_db:\n sqldb.session.add(batch)\n sqldb.session.commit()\n return batch\n\n def create_document(self, document_type, batch, data=None):\n doc = BatchDocumentDbObject(\n _owner=batch._owner,\n document_type=document_type,\n batch=batch,\n data=data or {},\n status='new',\n caption='caption'\n )\n sqldb.session.add(doc)\n sqldb.session.commit()\n return doc\n\n def on_event(self, batch_id, event, event_data, logger):\n self.events.append({\n 'batch_id': batch_id,\n 'event': event,\n 'event_data': event_data\n })\n\n def make_address(self):\n return {\n \"region\": RFRegionsEnum.RFR_IRKUTSKAYA_REGION,\n \"index\": 123131,\n \"street_type\": StreetTypeEnum.STT_BOULEVARD,\n \"street\": u\"Мотоциклистов\",\n \"house_type\": HouseTypeEnum.HOT_HOUSE,\n \"house\": \"2\",\n \"building_type\": BuildingTypeEnum.BIT_HOUSING,\n \"building\": \"778899\",\n \"flat_type\": FlatTypeEnum.FLT_OFFICE,\n \"flat\": \"2\",\n \"ifns\": 7806\n }\n\n def create_person(self, owner, batch_id=None,\n name=u\"Трофим\",\n surname=u\"Соболенко\",\n patronymic=None,\n age=40,\n birthplace=u\"неизвестно где\",\n sex='male',\n inn=781108730780,\n phone=\"+79001231313\",\n passport=None,\n address=None,\n email=None,\n person_type=PersonTypeEnum.PT_RUSSIAN,\n do_not_save_to_db=False\n ):\n passport = passport or {\n \"document_type\": PersonDocumentTypeEnum.PD_INTERNAL,\n \"series\": \"4000\",\n \"number\": \"111222\",\n \"issue_date\": datetime.now() - timedelta(days=1),\n \"issue_depart\": u\"неизвестно кем\",\n \"depart_code\": \"111-222\",\n \"citizenship\": u\"РФ\"\n }\n person = PrivatePersonDbObject(\n _owner=owner,\n _batch_id=batch_id,\n name=name,\n surname=surname,\n patronymic=patronymic,\n birthdate=datetime.utcnow() - timedelta(days=365 * age),\n birthplace=birthplace,\n sex=sex,\n inn=inn,\n phone=phone,\n passport=passport,\n address=address or self.make_address(),\n email=email,\n person_type=person_type\n )\n if not do_not_save_to_db:\n sqldb.session.add(person)\n sqldb.session.commit()\n return person\n\n def create_company(self, owner, batch_id=None, general_manager=None, inn=None, kpp=None, ogrn=None):\n company = CompanyDbObject(\n _owner=owner,\n _batch_id=batch_id,\n full_name = u\"Компания\",\n short_name = u\"Ко\",\n address = self.make_address(),\n )\n if general_manager:\n company.general_manager = {\n '_id': general_manager.id,\n 'type': 'person'\n }\n if inn:\n company.inn = inn\n if kpp:\n company.kpp = kpp\n if ogrn:\n company.ogrn = ogrn\n\n sqldb.session.add(company)\n sqldb.session.commit()\n return company\n\n def addCarAssurance(self, assurance_name):\n obj = CarAssurance(\n full_name=assurance_name,\n short_name=assurance_name,\n connection_name=assurance_name,\n address=\"\"\n )\n sqldb.session.add(obj)\n sqldb.session.commit()\n return obj\n\n def addCarAssuranceBranch(self, assurance=None, region=None):\n region = region or RFRegionsEnum.RFR_LENINGRADSKAYA_REGION\n obj = CarAssuranceBranch(\n title=u'title',\n phone=u'112',\n car_assurance=assurance,\n address=u\"дер. Поганкино д. 13\",\n region=region\n )\n sqldb.session.add(obj)\n sqldb.session.commit()\n return obj\n\n def addRegIfns(self, name, code=None, address=None):\n\n address = address or {\n \"city_type\": u\"г\",\n \"qc_complete\": u\"5\",\n \"street_type\": u\"ул\",\n \"index\": 450076,\n \"house\": u\"52\",\n \"region\": u\"Башкортостан\",\n \"okato\": u\"80401380000\",\n \"address_string\": u\"Россия, Респ Башкортостан, г Уфа, ул Красина, д 52\",\n \"qc\": u\"0\",\n \"street\": u\"Красина\",\n \"coord_lat\": u\"54.733428\",\n \"coord_long\": u\"55.934008\",\n \"house_type\": u\"д\",\n \"source_address\": u\",450076,,,Уфа г,,Красина ул,52,,\",\n \"ifns\": u\"0274\",\n \"city\": u\"Уфа\"\n }\n\n new_item = IfnsCatalogObject(**{\n u\"code\": code or randint(100, 9999),\n u\"comment\": u\"Код ОКПО:88111351 Режим работы Понедельник-четверг: 8.30 - 17.30\",\n u\"tel\": [\n u\"+7(347)2290200\",\n u\"+7(347)2290210\"\n ],\n u\"name\": name,\n \"plat\": {\n u\"recipient_inn\": u\"0275067000\",\n u\"recipient_kpp\": u\"027501001\",\n u\"recipient_name\": name\n },\n \"address\": address\n })\n sqldb.session.add(new_item)\n sqldb.session.commit()\n\n return new_item\n\n def addIfns(self, name, reg_ifns, address=None, code = None):\n address = address or {\n \"city_type\": u\"г\",\n \"qc_complete\": u\"5\",\n \"street_type\": u\"ул\",\n \"index\": 450076,\n \"house\": u\"52\",\n \"region\": u\"Башкортостан\",\n \"okato\": u\"80401380000\",\n \"address_string\": u\"Россия, Респ Башкортостан, г Уфа, ул Красина, д 52\",\n \"qc\": u\"0\",\n \"street\": u\"Красина\",\n \"coord_lat\": u\"54.733428\",\n \"coord_long\": u\"55.934008\",\n \"house_type\": u\"д\",\n \"source_address\": u\",450076,,,Уфа г,,Красина ул,52,,\",\n \"ifns\": u\"0274\",\n \"city\": u\"Уфа\"\n }\n\n code = code or randint(100, 9999)\n\n new_item = IfnsCatalogObject(**{\n u\"code\": code,\n u\"comment\": u\"Код ОКПО:88111351 Режим работы Понедельник-четверг: 8.30 - 17.30\",\n u\"tel\": [\n u\"+7(347)2290200\",\n u\"+7(347)2290210\"\n ],\n u\"name\": name,\n \"plat\": {\n u\"recipient_inn\": u\"0275067000\",\n u\"recipient_kpp\": u\"027501001\",\n u\"recipient_name\": name\n },\n \"address\": address,\n \"rou\": {\n u\"_id\": reg_ifns['_id'],\n u\"code\": str(code) + \"0\",\n u\"tel\": [\n u\"+7(347)2290200\",\n u\"+7(347)2290210\"\n ],\n \"name\": reg_ifns['name'],\n \"address\": {\n \"city_type\": u\"г\",\n \"qc_complete\": u\"5\",\n \"street_type\": u\"ул\",\n \"index\": 450076,\n \"house\": u\"52\",\n \"region\": u\"Башкортостан\",\n \"okato\": u\"80401380000\",\n \"address_string\": u\"Россия, Респ Башкортостан, г Уфа, ул Красина, д 52\",\n \"qc\": u\"0\",\n \"street\": u\"Красина\",\n \"coord_lat\": u\"54.733428\",\n \"coord_long\": u\"55.934008\",\n \"house_type\": u\"д\",\n \"ifns\": str(code) + '0',\n \"city\": u\"Уфа\"\n }\n },\n\n })\n sqldb.session.add(new_item)\n sqldb.session.commit()\n\n return new_item\n" }, { "alpha_fraction": 0.5725916028022766, "alphanum_fraction": 0.5729308128356934, "avg_line_length": 37.28571319580078, "blob_id": "080bb0c121641cdeaae40fa3b20d90b10e50763d", "content_id": "389a1e45314605cae64d314793e2645c8eaa437b", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 2970, "license_type": "no_license", "max_line_length": 105, "num_lines": 77, "path": "/app/services/notarius/notarius_manager.py", "repo_name": "StanislavKraev/jb_code", "src_encoding": "UTF-8", "text": "# -*- coding: utf-8 -*-\nfrom flask_login import current_user\nimport pytils\nfrom fw.auth.social_services import SocialServiceBackends\nfrom fw.db.sql_base import db as sqldb\nfrom fw.documents.db_fields import DocumentBatchDbObject\nfrom services.notarius.data_model.models import NotariusBookingObject\n\n\ndef discard_booking(batch, config, logger):\n from fw.async_tasks import send_email\n\n try:\n batch_id = batch.id\n booking = NotariusBookingObject.query.filter_by(\n batch=batch,\n owner=current_user,\n _discarded=False).scalar()\n if not booking:\n return\n\n booking._discarded = True\n batch_data = batch.data or {}\n batch_data['lawyer_check'] = False\n DocumentBatchDbObject.query.filter_by(id=batch_id).update({\n 'data': batch_data\n })\n sqldb.session.commit()\n\n llc_full_name = batch.data.get('full_name', \"\")\n social_link = SocialServiceBackends.get_user_social_network_profile_url(current_user.id)\n rec_list = config['YURIST_EMAIL_LIST']\n\n notarius_id = booking.notarius_id\n dt = booking.dt\n address = booking.address\n\n for recipient in rec_list:\n logger.info(u\"Sending %s email to %s\" % ('notarius_discard_on_batch_change', recipient))\n send_email.send_email.delay(\n recipient,\n 'notarius_discard_on_batch_change',\n notarius_id=unicode(notarius_id),\n booking_time=pytils.dt.ru_strftime(u\"%d %B %Y в %H:%M\", inflected=True,\n date=dt) if dt else u\"<неизвестно>\",\n address=address,\n email=current_user.email,\n mobile=current_user.mobile,\n social_link=social_link,\n full_name=llc_full_name\n )\n\n if current_user.email:\n logger.info(u\"Sending %s email to %s\" % ('notarius_discard_user_notify', current_user.email))\n send_email.send_email.delay(\n current_user.email,\n 'notarius_discard_user_notify',\n notarius_id=unicode(notarius_id),\n booking_time=pytils.dt.ru_strftime(u\"%d %B %Y в %H:%M\", inflected=True,\n date=dt) if dt else u\"<неизвестно>\",\n address=address,\n email=current_user.email,\n mobile=current_user.mobile,\n domain=config['DOMAIN'],\n schema=config['WEB_SCHEMA'],\n batch_id=batch_id,\n user_id=str(current_user.id)\n )\n except Exception, ex:\n logger.exception(u\"Failed to discard notarius booking\")\n\n\ndef change_objects_owner(old_user_id, new_user_id):\n NotariusBookingObject.query.filter_by(owner_id=old_user_id).update({\n 'owner_id': new_user_id\n })\n sqldb.session.commit()\n" }, { "alpha_fraction": 0.45719972252845764, "alphanum_fraction": 0.49155086278915405, "avg_line_length": 40.063629150390625, "blob_id": "639404f2895dbad0379815b5d5b55c68de64723b", "content_id": "b1ffc5ca69f504b428e14222bc59e33b22d069ed", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 64430, "license_type": "no_license", "max_line_length": 223, "num_lines": 1493, "path": "/jb_tests/test_pack/test_rendering_ip_docs.py", "repo_name": "StanislavKraev/jb_code", "src_encoding": "UTF-8", "text": "# -*- coding: utf-8 -*-\nimport time\nfrom datetime import datetime, timedelta\nimport json\n\nfrom bson.objectid import ObjectId\n\nfrom base_test_case import BaseTestCase\nfrom fw.db.sql_base import db as sqldb\nfrom fw.documents.address_enums import (RFRegionsEnum, VillageTypeEnum, DistrictTypeEnum, CityTypeEnum, StreetTypeEnum,\n HouseTypeEnum, BuildingTypeEnum, FlatTypeEnum)\nfrom fw.documents.db_fields import PrivatePersonDbObject, DocumentBatchDbObject\nfrom fw.documents.doc_requisites_storage import DocRequisitiesStorage\nfrom fw.documents.enums import DocumentBatchTypeEnum, DocumentTypeEnum, PersonDocumentTypeEnum, BatchStatusEnum, TaxType\nfrom fw.documents.fields.doc_fields import DocumentBatch\nfrom services.ifns.data_model.models import IfnsBookingObject, IfnsCatalogObject\nfrom services.ip_reg.documents.enums import IPRegistrationWayEnum\nfrom services.llc_reg.documents.enums import UsnTaxType\nfrom test_api import authorized\n\n\nclass RenderingTestCase(BaseTestCase):\n @authorized()\n def test_ip_P21001(self):\n DocRequisitiesStorage._BATCH_DESCRIPTORS[DocumentBatchTypeEnum.DBT_NEW_IP]['doc_types'] = [DocumentTypeEnum.DT_P21001]\n\n founder = PrivatePersonDbObject(**{\n \"_owner\": self.user,\n \"name\": u\"Прокд\",\n \"surname\": u\"Подикарпов\",\n \"patronymic\": u\"Подикарпович\",\n \"inn\": \"781108730780\",\n \"sex\": \"male\",\n \"birthdate\": datetime.now() - timedelta(days=365 * 30),\n \"birthplace\": u\"Россия, деревня Гадюкино\",\n \"passport\": {\n \"document_type\": PersonDocumentTypeEnum.PD_INTERNAL,\n \"series\": u\"1123\",\n \"number\": u\"192837\",\n \"issue_date\": datetime.now(),\n \"issue_depart\": u\"УМВД Гадюкинского района Гадюкинской области\",\n \"depart_code\": u\"111987\"\n },\n \"ogrnip\": \"123456789012345\",\n \"address\": {\n \"region\": RFRegionsEnum.RFR_SPB,\n \"index\": 198209,\n \"district_type\": DistrictTypeEnum.DIT_DISTRICT,\n \"district\": u\"Гадчинский\",\n \"city_type\": CityTypeEnum.CIT_CITY,\n \"city\": u\"Гадюкино\",\n \"village_type\": VillageTypeEnum.VIT_HUTOR,\n \"village\": u\"близ Диканьки\",\n \"street_type\": StreetTypeEnum.STT_BOULEVARD,\n \"street\": u\"Модоциклистов\",\n \"house_type\": HouseTypeEnum.HOT_HOUSE,\n \"house\": \"4\",\n \"building_type\": BuildingTypeEnum.BIT_HOUSING,\n \"building\": \"2\",\n \"flat_type\": FlatTypeEnum.FLT_OFFICE,\n \"flat\": \"705\",\n \"ifns\": \"7840\",\n \"okato\": \"40298566000\"\n },\n \"caption\": u\"Сандехник\",\n \"phone\": \"+79210001122\",\n \"email\": \"[email protected]\",\n \"person_type\": 1,\n #\"living_country_code\" : 643,\n \"living_address\": u\"г. Санкт-Петербург, д. Гадюкино, бульвар Мотоциклистов казарма 4, кв. 705\"\n })\n sqldb.session.add(founder)\n sqldb.session.commit()\n\n with self.app.app_context():\n data = {\n u\"person\": {\n \"_id\": founder.id,\n \"type\": \"person\"\n },\n u\"job_main_code\": u\"92.31.1\",\n u\"job_code_array\": [u\"92.31.1\", u\"74.14\", u\"10.01.1\", u\"10.01.22\"],\n u\"doc_obtain_person\": {\n \"type\": \"person\",\n \"_id\": founder.id\n },\n u\"obtain_way\": \"in_person\",\n u\"region\": u\"Санкт-Петербург\",\n }\n batch = DocumentBatchDbObject(\n batch_type=DocumentBatchTypeEnum.DBT_NEW_IP,\n status=BatchStatusEnum.BS_NEW,\n data={},\n _owner=self.user\n )\n sqldb.session.add(batch)\n sqldb.session.commit()\n _id = batch.id\n\n new_batch_db_object = DocumentBatchDbObject(\n data=data,\n batch_type=DocumentBatchTypeEnum.DBT_NEW_IP\n )\n\n booking = IfnsBookingObject(\n batch_id=_id,\n reg_info={\n 'reg_date': datetime.now()\n }\n )\n sqldb.session.add(booking)\n sqldb.session.commit()\n\n batch = DocumentBatch.db_obj_to_field(new_batch_db_object)\n batch_json = json.dumps(batch.get_api_structure())\n result = self.test_client.post('/batch/update/', data={\n 'batch_id': unicode(_id),\n 'batch': batch_json\n })\n self.assertEqual(result.status_code, 200)\n\n t1 = time.time()\n result = self.test_client.post('/batch/finalise/', data={'batch_id': unicode(_id)})\n t2 = time.time()\n print(t2 - t1)\n self.assertEqual(result.status_code, 200)\n\n db_batch = DocumentBatchDbObject.query.filter_by(id=_id).one()\n print (json.dumps(db_batch.__dict__, indent=1, default=lambda x: unicode(x), ensure_ascii=False))\n self.assertEqual(db_batch.status, BatchStatusEnum.BS_FINALISED)\n self.assertEqual(len(db_batch._documents), 1)\n\n @authorized()\n def test_ip_state_duty(self):\n DocRequisitiesStorage._BATCH_DESCRIPTORS[DocumentBatchTypeEnum.DBT_NEW_IP]['doc_types'] = [DocumentTypeEnum.DT_IP_STATE_DUTY]\n\n founder = PrivatePersonDbObject(**{\n \"_owner\": self.user.id,\n \"name\": u\"Прокл\",\n \"surname\": u\"Поликарпов\",\n \"patronymic\": u\"Поликарпович\",\n \"inn\": \"781108730780\",\n \"sex\": \"male\",\n \"birthdate\": datetime.now() - timedelta(days=365 * 30),\n \"birthplace\": u\"Россия, деревня Гадюкино\",\n \"passport\": {\n \"document_type\": PersonDocumentTypeEnum.PD_INTERNAL,\n \"series\": u\"1123\",\n \"number\": u\"192837\",\n \"issue_date\": datetime.now() - timedelta(days=365 * 2),\n \"issue_depart\": u\"УМВД Гадюкинского района Гадюкинской области\",\n \"depart_code\": u\"111987\"\n },\n \"ogrnip\": \"123456789012345\",\n \"address\": {\n \"region\": RFRegionsEnum.RFR_SPB,\n \"index\": 198209,\n \"district_type\": DistrictTypeEnum.DIT_DISTRICT,\n \"district\": u\"Гатчинский\",\n \"city_type\": CityTypeEnum.CIT_CITY,\n \"city\": u\"Гадюкино\",\n \"village_type\": VillageTypeEnum.VIT_HUTOR,\n \"village\": u\"близ Диканьки\",\n \"street_type\": StreetTypeEnum.STT_BOULEVARD,\n \"street\": u\"Мотоциклистов\",\n \"house_type\": HouseTypeEnum.HOT_HOUSE,\n \"house\": \"4\",\n \"building_type\": BuildingTypeEnum.BIT_HOUSING,\n \"building\": \"2\",\n \"flat_type\": FlatTypeEnum.FLT_OFFICE,\n \"flat\": \"705\",\n \"ifns\": \"7840\",\n \"okato\": \"40298566000\"\n },\n \"caption\": u\"Сантехник\",\n \"phone\": \"+79210001122\",\n \"email\": \"[email protected]\",\n \"living_country_code\": 3,\n \"living_address\": u\"г. Санкт-Петербург, д. Гадюкино, бульвар Мотоциклистов казарма 4, кв. 705\"\n })\n founder.insert(self.db)\n\n with self.app.app_context():\n data = {\n u\"person\": {\n \"_id\": founder.id,\n \"type\": \"person\"\n },\n u\"job_main_code\": u\"92.31.1\",\n u\"job_code_array\": [u\"92.31.1\", u\"74.14\", u\"10.01.1\"],\n u\"doc_obtain_person\": {\n \"type\": \"person\",\n \"_id\": founder.id\n },\n u\"obtain_way\": \"in_person\",\n u\"region\": u\"Санкт-Петербург\",\n }\n batch = DocumentBatchDbObject(\n batch_type=DocumentBatchTypeEnum.DBT_NEW_IP,\n status=BatchStatusEnum.BS_NEW,\n _documents=[],\n data={},\n _owner=self.user.id\n )\n _id = batch.insert(self.db)\n\n new_batch_db_object = DocumentBatchDbObject(\n data=data,\n batch_type=DocumentBatchTypeEnum.DBT_NEW_IP\n )\n\n booking = IfnsBookingObject(\n batch_id=_id,\n reg_info={\n 'reg_date': datetime.now()\n }\n )\n sqldb.session.add(booking)\n sqldb.session.commit()\n\n batch = DocumentBatch.parse_raw_value(new_batch_db_object.as_dict(), False)\n batch_json = json.dumps(batch.get_api_structure())\n result = self.test_client.post('/batch/update/', data={\n 'batch_id': unicode(_id),\n 'batch': batch_json\n })\n self.assertEqual(result.status_code, 200)\n db_batch = DocumentBatchDbObject.query.filter_by(id=_id).first()\n\n result = self.test_client.post('/batch/finalise/', data={'batch_id': unicode(_id)})\n self.assertEqual(result.status_code, 200)\n\n db_batch = DocumentBatchDbObject.query.filter_by(id=_id).first()\n print(json.dumps(db_batch.as_dict(), default=lambda x: unicode(x), ensure_ascii=False, indent=1))\n self.assertEqual(db_batch.status, BatchStatusEnum.BS_FINALISED)\n self.assertEqual(len(db_batch._documents), 1)\n self.assertTrue(not not db_batch.rendered_docs[0]['file_link'])\n\n @authorized()\n def test_ip_dov_filing(self):\n DocRequisitiesStorage._BATCH_DESCRIPTORS[DocumentBatchTypeEnum.DBT_NEW_IP]['doc_types'] = [DocumentTypeEnum.DT_IP_DOV_FILING_DOCS]\n\n founder = PrivatePersonDbObject(**{\n \"_owner\": self.user.id,\n \"name\": u\"Прокл\",\n \"surname\": u\"Поликарпов\",\n \"patronymic\": u\"Поликарпович\",\n \"inn\": \"781108730780\",\n \"sex\": \"male\",\n \"birthdate\": datetime.now() - timedelta(days=365 * 30),\n \"birthplace\": u\"Россия, деревня Гадюкино\",\n \"passport\": {\n \"document_type\": PersonDocumentTypeEnum.PD_INTERNAL,\n \"series\": u\"1123\",\n \"number\": u\"192837\",\n \"issue_date\": datetime.now() - timedelta(days=365 * 2),\n \"issue_depart\": u\"УМВД Гадюкинского района Гадюкинской области\",\n \"depart_code\": u\"111987\"\n },\n \"ogrnip\": \"123456789012345\",\n \"address\": {\n \"region\": RFRegionsEnum.RFR_SPB,\n \"index\": 198209,\n \"district_type\": DistrictTypeEnum.DIT_DISTRICT,\n \"district\": u\"Гатчинский\",\n \"city_type\": CityTypeEnum.CIT_CITY,\n \"city\": u\"Гадюкино\",\n \"village_type\": VillageTypeEnum.VIT_HUTOR,\n \"village\": u\"близ Диканьки\",\n \"street_type\": StreetTypeEnum.STT_BOULEVARD,\n \"street\": u\"Мотоциклистов\",\n \"house_type\": HouseTypeEnum.HOT_HOUSE,\n \"house\": \"4\",\n \"building_type\": BuildingTypeEnum.BIT_HOUSING,\n \"building\": \"2\",\n \"flat_type\": FlatTypeEnum.FLT_OFFICE,\n \"flat\": \"705\",\n \"ifns\": \"7840\",\n \"okato\": \"40298566000\"\n },\n \"caption\": u\"Сантехник\",\n \"phone\": \"+79210001122\",\n \"email\": \"[email protected]\",\n \"living_country_code\": 3,\n \"living_address\": u\"г. Санкт-Петербург, д. Гадюкино, бульвар Мотоциклистов казарма 4, кв. 705\"\n })\n founder.insert(self.db)\n\n dov_person = PrivatePersonDbObject(**{\n \"_owner\": self.user.id,\n \"name\": u\"Акакий\",\n \"surname\": u\"Тунцов\",\n \"patronymic\": u\"Подляченко\",\n \"inn\": \"781108730780\",\n \"sex\": \"male\",\n \"birthdate\": datetime.now() - timedelta(days=365 * 32),\n \"birthplace\": u\"Россия, деревня Гадюкино\",\n \"passport\": {\n \"document_type\": PersonDocumentTypeEnum.PD_INTERNAL,\n \"series\": u\"1123\",\n \"number\": u\"192837\",\n \"issue_date\": datetime.now() - timedelta(days=365 * 2),\n \"issue_depart\": u\"УМВД Гадюкинского района Гадюкинской области\",\n \"depart_code\": u\"111987\"\n },\n \"ogrnip\": \"123456789012345\",\n \"address\": {\n \"region\": RFRegionsEnum.RFR_SPB,\n \"index\": 198209,\n \"house_type\": HouseTypeEnum.HOT_HOUSE,\n \"house\": \"4\",\n \"flat_type\": FlatTypeEnum.FLT_OFFICE,\n \"flat\": \"705\",\n \"ifns\": \"7840\",\n \"okato\": \"40298566000\"\n },\n \"caption\": u\"Сантехник\",\n \"phone\": \"+79210001122\",\n \"email\": \"[email protected]\"\n })\n dov_person.insert(self.db)\n\n with self.app.app_context():\n data = {\n u\"person\": {\n \"_id\": founder.id,\n \"type\": \"person\"\n },\n u\"job_main_code\": u\"92.31.1\",\n u\"job_code_array\": [u\"92.31.1\", u\"74.14\", u\"10.01.1\"],\n # u\"doc_obtain_person\" : {\n # \"type\" : \"person\",\n # \"_id\" : founder.id\n # },\n # u\"obtain_way\" : \"in_person\",\n u\"registration_way\": \"responsible_person\",\n u\"region\": u\"Санкт-Петербург\",\n # u\"same_obtain_trust_person\": False,\n u\"reg_responsible_person\": {\n \"_id\": dov_person.id,\n \"type\": \"person\"\n },\n }\n batch = DocumentBatchDbObject(\n batch_type=DocumentBatchTypeEnum.DBT_NEW_IP,\n status=BatchStatusEnum.BS_NEW,\n _documents=[],\n data={},\n _owner=self.user.id\n )\n _id = batch.insert(self.db)\n\n new_batch_db_object = DocumentBatchDbObject(\n data=data,\n batch_type=DocumentBatchTypeEnum.DBT_NEW_IP\n )\n\n booking = IfnsBookingObject(\n batch_id=_id,\n reg_info={\n 'reg_date': datetime.now()\n }\n )\n sqldb.session.add(booking)\n sqldb.session.commit()\n\n batch = DocumentBatch.parse_raw_value(new_batch_db_object.as_dict(), False)\n batch_json = json.dumps(batch.get_api_structure())\n result = self.test_client.post('/batch/update/', data={\n 'batch_id': unicode(_id),\n 'batch': batch_json\n })\n self.assertEqual(result.status_code, 200)\n db_batch = DocumentBatchDbObject.query.filter_by(id=_id).first()\n\n result = self.test_client.post('/batch/finalise/', data={'batch_id': unicode(_id)})\n self.assertEqual(result.status_code, 200)\n\n db_batch = DocumentBatchDbObject.query.filter_by(id=_id).first()\n print(json.dumps(db_batch.as_dict(), default=lambda x: unicode(x), ensure_ascii=False, indent=1))\n self.assertEqual(db_batch.status, BatchStatusEnum.BS_FINALISED)\n self.assertEqual(len(db_batch._documents), 1)\n self.assertTrue(not not db_batch.rendered_docs[0]['file_link'])\n\n @authorized()\n def test_ip_dov_filing_receiving_by_same_person(self):\n DocRequisitiesStorage._BATCH_DESCRIPTORS[DocumentBatchTypeEnum.DBT_NEW_IP]['doc_types'] = [DocumentTypeEnum.DT_IP_DOV_FILING_RECEIVING_DOCS, DocumentTypeEnum.DT_IP_DOV_RECEIVING_DOCS]\n\n founder = PrivatePersonDbObject(**{\n \"_owner\": self.user.id,\n \"name\": u\"Прокл\",\n \"surname\": u\"Поликарпов\",\n \"patronymic\": u\"Поликарпович\",\n \"inn\": \"781108730780\",\n \"sex\": \"male\",\n \"birthdate\": datetime.now() - timedelta(days=365 * 30),\n \"birthplace\": u\"Россия, деревня Гадюкино\",\n \"passport\": {\n \"document_type\": PersonDocumentTypeEnum.PD_INTERNAL,\n \"series\": u\"1123\",\n \"number\": u\"192837\",\n \"issue_date\": datetime.now() - timedelta(days=365 * 2),\n \"issue_depart\": u\"УМВД Гадюкинского района Гадюкинской области\",\n \"depart_code\": u\"111987\"\n },\n \"ogrnip\": \"123456789012345\",\n \"address\": {\n \"region\": RFRegionsEnum.RFR_SPB,\n \"index\": 198209,\n \"district_type\": DistrictTypeEnum.DIT_DISTRICT,\n \"district\": u\"Гатчинский\",\n \"city_type\": CityTypeEnum.CIT_CITY,\n \"city\": u\"Гадюкино\",\n \"village_type\": VillageTypeEnum.VIT_HUTOR,\n \"village\": u\"близ Диканьки\",\n \"street_type\": StreetTypeEnum.STT_BOULEVARD,\n \"street\": u\"Мотоциклистов\",\n \"house_type\": HouseTypeEnum.HOT_HOUSE,\n \"house\": \"4\",\n \"building_type\": BuildingTypeEnum.BIT_HOUSING,\n \"building\": \"2\",\n \"flat_type\": FlatTypeEnum.FLT_OFFICE,\n \"flat\": \"705\",\n \"ifns\": \"7840\",\n \"okato\": \"40298566000\"\n },\n \"caption\": u\"Сантехник\",\n \"phone\": \"+79210001122\",\n \"email\": \"[email protected]\",\n \"living_country_code\": 3,\n \"living_address\": u\"г. Санкт-Петербург, д. Гадюкино, бульвар Мотоциклистов казарма 4, кв. 705\"\n })\n founder.insert(self.db)\n\n dov_person = PrivatePersonDbObject(**{\n \"_owner\": self.user.id,\n \"name\": u\"Акакий\",\n \"surname\": u\"Тунцов\",\n \"patronymic\": u\"Подляченко\",\n \"inn\": \"781108730780\",\n \"sex\": \"male\",\n \"birthdate\": datetime.now() - timedelta(days=365 * 32),\n \"birthplace\": u\"Россия, деревня Гадюкино\",\n \"passport\": {\n \"document_type\": PersonDocumentTypeEnum.PD_INTERNAL,\n \"series\": u\"1123\",\n \"number\": u\"192837\",\n \"issue_date\": datetime.now() - timedelta(days=365 * 2),\n \"issue_depart\": u\"УМВД Гадюкинского района Гадюкинской области\",\n \"depart_code\": u\"111987\"\n },\n \"ogrnip\": \"123456789012345\",\n \"address\": {\n \"region\": RFRegionsEnum.RFR_SPB,\n \"index\": 198209,\n \"house_type\": HouseTypeEnum.HOT_HOUSE,\n \"house\": \"4\",\n \"flat_type\": FlatTypeEnum.FLT_OFFICE,\n \"flat\": \"705\",\n \"ifns\": \"7840\",\n \"okato\": \"40298566000\"\n },\n \"caption\": u\"Сантехник\",\n \"phone\": \"+79210001122\",\n \"email\": \"[email protected]\"\n })\n dov_person.insert(self.db)\n\n with self.app.app_context():\n data = {\n u\"person\": {\n \"_id\": founder.id,\n \"type\": \"person\"\n },\n u\"job_main_code\": u\"92.31.1\",\n u\"job_code_array\": [u\"92.31.1\", u\"74.14\", u\"10.01.1\"],\n u\"obtain_way\": \"responsible_person\",\n u\"registration_way\": \"in_person\",\n u\"same_obtain_trust_person\": False,\n u\"region\": u\"Санкт-Петербург\",\n u\"doc_obtain_person\": None,\n # u\"reg_responsible_person\" : {\n # \"_id\" : dov_person.id,\n # \"type\" : \"person\"\n # },\n }\n batch = DocumentBatchDbObject(\n batch_type=DocumentBatchTypeEnum.DBT_NEW_IP,\n status=BatchStatusEnum.BS_NEW,\n _documents=[],\n data={},\n _owner=self.user.id\n )\n _id = batch.insert(self.db)\n\n new_batch_db_object = DocumentBatchDbObject(\n data=data,\n batch_type=DocumentBatchTypeEnum.DBT_NEW_IP\n )\n\n booking = IfnsBookingObject(\n batch_id=_id,\n reg_info={\n 'reg_date': datetime.now()\n }\n )\n sqldb.session.add(booking)\n sqldb.session.commit()\n\n batch = DocumentBatch.parse_raw_value(new_batch_db_object.as_dict(), False)\n batch_json = json.dumps(batch.get_api_structure())\n result = self.test_client.post('/batch/update/', data={\n 'batch_id': unicode(_id),\n 'batch': batch_json\n })\n self.assertEqual(result.status_code, 200)\n db_batch = DocumentBatchDbObject.query.filter_by(id=_id).first()\n\n result = self.test_client.post('/batch/finalise/', data={'batch_id': unicode(_id)})\n print result.data\n self.assertEqual(result.status_code, 200)\n\n db_batch = DocumentBatchDbObject.query.filter_by(id=_id).first()\n print(json.dumps(db_batch.as_dict(), default=lambda x: unicode(x), ensure_ascii=False, indent=1))\n self.assertEqual(db_batch.status, BatchStatusEnum.BS_FINALISED)\n self.assertEqual(len(db_batch._documents), 1)\n self.assertTrue(not not db_batch.rendered_docs[0]['file_link'])\n\n @authorized()\n def test_ip_dov_filing_receiving_different_persons(self):\n DocRequisitiesStorage._BATCH_DESCRIPTORS[DocumentBatchTypeEnum.DBT_NEW_IP]['doc_types'] = [DocumentTypeEnum.DT_IP_DOV_RECEIVING_DOCS, DocumentTypeEnum.DT_IP_DOV_FILING_DOCS]\n\n founder = PrivatePersonDbObject(**{\n \"_owner\": self.user.id,\n \"name\": u\"Прокл\",\n \"surname\": u\"Поликарпов\",\n \"patronymic\": u\"Поликарпович\",\n \"inn\": \"781108730780\",\n \"sex\": \"male\",\n \"birthdate\": datetime.now() - timedelta(days=365 * 30),\n \"birthplace\": u\"Россия, деревня Гадюкино\",\n \"passport\": {\n \"document_type\": PersonDocumentTypeEnum.PD_INTERNAL,\n \"series\": u\"1123\",\n \"number\": u\"192837\",\n \"issue_date\": datetime.now() - timedelta(days=365 * 2),\n \"issue_depart\": u\"УМВД Гадюкинского района Гадюкинской области\",\n \"depart_code\": u\"111987\"\n },\n \"ogrnip\": \"123456789012345\",\n \"address\": {\n \"region\": RFRegionsEnum.RFR_SPB,\n \"index\": 198209,\n \"district_type\": DistrictTypeEnum.DIT_DISTRICT,\n \"district\": u\"Гатчинский\",\n \"city_type\": CityTypeEnum.CIT_CITY,\n \"city\": u\"Гадюкино\",\n \"village_type\": VillageTypeEnum.VIT_HUTOR,\n \"village\": u\"близ Диканьки\",\n \"street_type\": StreetTypeEnum.STT_BOULEVARD,\n \"street\": u\"Мотоциклистов\",\n \"house_type\": HouseTypeEnum.HOT_HOUSE,\n \"house\": \"4\",\n \"building_type\": BuildingTypeEnum.BIT_HOUSING,\n \"building\": \"2\",\n \"flat_type\": FlatTypeEnum.FLT_OFFICE,\n \"flat\": \"705\",\n \"ifns\": \"7840\",\n \"okato\": \"40298566000\"\n },\n \"caption\": u\"Сантехник\",\n \"phone\": \"+79210001122\",\n \"email\": \"[email protected]\",\n \"living_country_code\": 3,\n \"living_address\": u\"г. Санкт-Петербург, д. Гадюкино, бульвар Мотоциклистов казарма 4, кв. 705\"\n })\n founder.insert(self.db)\n\n dov_person = PrivatePersonDbObject(**{\n \"_owner\": self.user.id,\n \"name\": u\"Акакий\",\n \"surname\": u\"Тунцов\",\n \"patronymic\": u\"Подляченко\",\n \"inn\": \"781108730780\",\n \"sex\": \"male\",\n \"birthdate\": datetime.now() - timedelta(days=365 * 32),\n \"birthplace\": u\"Россия, деревня Гадюкино\",\n \"passport\": {\n \"document_type\": PersonDocumentTypeEnum.PD_INTERNAL,\n \"series\": u\"1123\",\n \"number\": u\"192837\",\n \"issue_date\": datetime.now() - timedelta(days=365 * 2),\n \"issue_depart\": u\"УМВД Гадюкинского района Гадюкинской области\",\n \"depart_code\": u\"111987\"\n },\n \"ogrnip\": \"123456789012345\",\n \"address\": {\n \"region\": RFRegionsEnum.RFR_SPB,\n \"index\": 198209,\n \"house_type\": HouseTypeEnum.HOT_HOUSE,\n \"house\": \"4\",\n \"flat_type\": FlatTypeEnum.FLT_OFFICE,\n \"flat\": \"705\",\n \"ifns\": \"7840\",\n \"okato\": \"40298566000\"\n },\n \"caption\": u\"Сантехник\",\n \"phone\": \"+79210001122\",\n \"email\": \"[email protected]\"\n })\n dov_person.insert(self.db)\n\n dov_person2 = PrivatePersonDbObject(**{\n \"_owner\": self.user.id,\n \"name\": u\"Марципан\",\n \"surname\": u\"Арешкин\",\n \"patronymic\": u\"Трофимович\",\n \"inn\": \"781108730780\",\n \"sex\": \"male\",\n \"birthdate\": datetime.now() - timedelta(days=365 * 33),\n \"birthplace\": u\"Россия, деревня Гадюкино\",\n \"passport\": {\n \"document_type\": PersonDocumentTypeEnum.PD_INTERNAL,\n \"series\": u\"1123\",\n \"number\": u\"192837\",\n \"issue_date\": datetime.now() - timedelta(days=365 * 1),\n \"issue_depart\": u\"УМВД Гадюкинского района Гадюкинской области\",\n \"depart_code\": u\"111987\"\n },\n \"ogrnip\": \"123456789012345\",\n \"address\": {\n \"region\": RFRegionsEnum.RFR_SPB,\n \"index\": 198209,\n \"house_type\": HouseTypeEnum.HOT_HOUSE,\n \"house\": \"4\",\n \"flat_type\": FlatTypeEnum.FLT_OFFICE,\n \"flat\": \"705\",\n \"ifns\": \"7840\",\n \"okato\": \"40298566000\"\n },\n \"caption\": u\"Полупроводник\",\n \"phone\": \"+79210002233\",\n \"email\": \"[email protected]\"\n })\n dov_person2.insert(self.db)\n\n with self.app.app_context():\n data = {\n u\"person\": {\n \"_id\": founder.id,\n \"type\": \"person\"\n },\n u\"job_main_code\": u\"92.31.1\",\n u\"job_code_array\": [u\"92.31.1\", u\"74.14\", u\"10.01.1\"],\n u\"obtain_way\": \"responsible_person\",\n u\"registration_way\": \"responsible_person\",\n u\"same_obtain_trust_person\": False,\n u\"region\": u\"Санкт-Петербург\",\n u\"reg_responsible_person\": {\n \"_id\": dov_person.id,\n \"type\": \"person\"\n },\n u\"doc_obtain_person\": {\n \"_id\": dov_person.id,\n \"type\": \"person\"\n },\n }\n batch = DocumentBatchDbObject(\n batch_type=DocumentBatchTypeEnum.DBT_NEW_IP,\n status=BatchStatusEnum.BS_EDITED,\n _documents=[],\n data={},\n _owner=self.user.id\n )\n _id = batch.insert(self.db)\n\n new_batch_db_object = DocumentBatchDbObject(\n data=data,\n batch_type=DocumentBatchTypeEnum.DBT_NEW_IP\n )\n\n booking = IfnsBookingObject(\n batch_id=_id,\n reg_info={\n 'reg_date': datetime.now()\n }\n )\n sqldb.session.add(booking)\n sqldb.session.commit()\n\n batch = DocumentBatch.parse_raw_value(new_batch_db_object.as_dict(), False)\n batch_json = json.dumps(batch.get_api_structure())\n result = self.test_client.post('/batch/update/', data={\n 'batch_id': unicode(_id),\n 'batch': batch_json\n })\n self.assertEqual(result.status_code, 200)\n db_batch = DocumentBatchDbObject.query.filter_by(id=_id).first()\n\n result = self.test_client.post('/batch/finalise/', data={'batch_id': unicode(_id)})\n print result.data\n self.assertEqual(result.status_code, 200)\n\n db_batch = DocumentBatchDbObject.query.filter_by(id=_id).first()\n print(json.dumps(db_batch.as_dict(), default=lambda x: unicode(x), ensure_ascii=False, indent=1))\n self.assertEqual(db_batch.status, BatchStatusEnum.BS_FINALISED)\n self.assertEqual(len(db_batch._documents), 2)\n self.assertTrue(not not db_batch.rendered_docs[0]['file_link'])\n\n @authorized()\n def test_ip_dov_filing_receiving_different_persons2(self):\n DocRequisitiesStorage._BATCH_DESCRIPTORS[DocumentBatchTypeEnum.DBT_NEW_IP]['doc_types'] = [\n DocumentTypeEnum.DT_IP_DOV_RECEIVING_DOCS,\n DocumentTypeEnum.DT_IP_DOV_FILING_DOCS,\n DocumentTypeEnum.DT_IP_DOV_FILING_RECEIVING_DOCS\n ]\n\n founder = PrivatePersonDbObject(**{\n \"_owner\": self.user.id,\n \"name\": u\"Прокл\",\n \"surname\": u\"Поликарпов\",\n \"patronymic\": u\"Поликарпович\",\n \"inn\": \"781108730780\",\n \"sex\": \"male\",\n \"birthdate\": datetime.now() - timedelta(days=365 * 30),\n \"birthplace\": u\"Россия, деревня Гадюкино\",\n \"passport\": {\n \"document_type\": PersonDocumentTypeEnum.PD_INTERNAL,\n \"series\": u\"1123\",\n \"number\": u\"192837\",\n \"issue_date\": datetime.now() - timedelta(days=365 * 4),\n \"issue_depart\": u\"УМВД Гадюкинского района Гадюкинской области\",\n \"depart_code\": u\"111987\"\n },\n \"ogrnip\": \"123456789012345\",\n \"address\": {\n \"region\": RFRegionsEnum.RFR_SPB,\n \"index\": 198209,\n \"district_type\": DistrictTypeEnum.DIT_DISTRICT,\n \"district\": u\"Гатчинский\",\n \"city_type\": CityTypeEnum.CIT_CITY,\n \"city\": u\"Гадюкино\",\n \"village_type\": VillageTypeEnum.VIT_HUTOR,\n \"village\": u\"близ Диканьки\",\n \"street_type\": StreetTypeEnum.STT_BOULEVARD,\n \"street\": u\"Мотоциклистов\",\n \"house_type\": HouseTypeEnum.HOT_HOUSE,\n \"house\": \"4\",\n \"building_type\": BuildingTypeEnum.BIT_HOUSING,\n \"building\": \"2\",\n \"flat_type\": FlatTypeEnum.FLT_OFFICE,\n \"flat\": \"705\",\n \"ifns\": \"7840\",\n \"okato\": \"40298566000\"\n },\n \"caption\": u\"Сантехник\",\n \"phone\": \"+79210001122\",\n \"email\": \"[email protected]\",\n \"living_country_code\": 3,\n \"living_address\": u\"г. Санкт-Петербург, д. Гадюкино, бульвар Мотоциклистов казарма 4, кв. 705\"\n })\n founder.insert(self.db)\n\n dov_person = PrivatePersonDbObject(**{\n \"_owner\": self.user.id,\n \"name\": u\"Акакий\",\n \"surname\": u\"Ивашкин\",\n \"patronymic\": u\"Подляченко\",\n \"inn\": \"781108730780\",\n \"sex\": \"male\",\n \"birthdate\": datetime.now() - timedelta(days=365 * 32),\n \"birthplace\": u\"Россия, деревня Гадюкино\",\n \"passport\": {\n \"document_type\": PersonDocumentTypeEnum.PD_INTERNAL,\n \"series\": u\"1123\",\n \"number\": u\"192837\",\n \"issue_date\": datetime.now() - timedelta(days=365 * 1),\n \"issue_depart\": u\"УМВД Гадюкинского района Гадюкинской области\",\n \"depart_code\": u\"111987\"\n },\n \"ogrnip\": \"123456789012345\",\n \"address\": {\n \"region\": RFRegionsEnum.RFR_SPB,\n \"index\": 198209,\n \"house_type\": HouseTypeEnum.HOT_HOUSE,\n \"house\": \"4\",\n \"flat_type\": FlatTypeEnum.FLT_OFFICE,\n \"flat\": \"705\",\n \"ifns\": \"7840\",\n \"okato\": \"40298566000\"\n },\n \"caption\": u\"Сантехник\",\n \"phone\": \"+79210001122\",\n \"email\": \"[email protected]\"\n })\n dov_person.insert(self.db)\n\n dov_person2 = PrivatePersonDbObject(**{\n \"_owner\": self.user.id,\n \"name\": u\"Марципан\",\n # \"surname\" : u\"Арешкин\",\n # \"patronymic\" : u\"Трофимович\",\n \"inn\": \"781108730780\",\n \"sex\": \"male\",\n \"birthdate\": datetime.now() - timedelta(days=365 * 33),\n \"birthplace\": u\"Россия, деревня Гадюкино\",\n \"passport\": {\n \"document_type\": PersonDocumentTypeEnum.PD_INTERNAL,\n \"series\": u\"1123\",\n \"number\": u\"192837\",\n \"issue_date\": datetime.now() - timedelta(days=365 * 1),\n \"issue_depart\": u\"УМВД Гадюкинского района Гадюкинской области\",\n \"depart_code\": u\"111987\"\n },\n \"ogrnip\": \"123456789012345\",\n \"address\": {\n \"region\": RFRegionsEnum.RFR_SPB,\n \"index\": 198209,\n \"house_type\": HouseTypeEnum.HOT_HOUSE,\n \"house\": \"4\",\n \"flat_type\": FlatTypeEnum.FLT_OFFICE,\n \"flat\": \"705\",\n \"ifns\": \"7840\",\n \"okato\": \"40298566000\"\n },\n \"caption\": u\"Полупроводник\",\n \"phone\": \"+79210002233\",\n \"email\": \"[email protected]\"\n })\n dov_person2.insert(self.db)\n\n with self.app.app_context():\n data = {\n u\"person\": {\n \"_id\": founder.id,\n \"type\": \"person\"\n },\n u\"job_main_code\": u\"92.31.1\",\n u\"job_code_array\": [u\"92.31.1\", u\"74.14\", u\"10.01.1\"],\n u\"obtain_way\": \"responsible_person\",\n u\"registration_way\": \"notary\",\n u\"same_obtain_trust_person\": False,\n u\"reg_responsible_person\": {\n \"_id\": dov_person2.id,\n \"type\": \"person\"\n },\n u\"doc_obtain_person\": None\n # u\"doc_obtain_person\" : {\n # \"_id\" : dov_person.id,\n # \"type\" : \"person\"\n # },\n }\n\n data = {\n \"doc_obtain_person\": \"%s_person\" % dov_person.id,\n \"job_code_array\": [\"50.30\", \"50.40\", \"51.44.4\", \"52.48.39\"],\n \"job_main_code\": \"50.20\",\n \"obtain_way\": \"responsible_person\",\n \"person\": \"%s_person\" % founder.id,\n \"reg_responsible_person\": \"%s_person\" % dov_person2.id,\n \"registration_way\": \"responsible_person\",\n \"same_obtain_trust_person\": \"false\",\n \"tax_type\": \"2\",\n \"taxation_type\": \"general\"\n }\n\n batch = DocumentBatchDbObject(\n batch_type=DocumentBatchTypeEnum.DBT_NEW_IP,\n status=BatchStatusEnum.BS_NEW,\n _documents=[],\n data={},\n _owner=self.user.id\n )\n _id = batch.insert(self.db)\n\n # new_batch_db_object = DocumentBatchDbObject(\n # data = data,\n # batch_type = DocumentBatchTypeEnum.DBT_NEW_IP\n # )\n\n booking = IfnsBookingObject(\n batch_id=_id,\n reg_info={\n 'reg_date': datetime.now()\n }\n )\n sqldb.session.add(booking)\n sqldb.session.commit()\n\n #batch = DocumentBatch.parse_raw_value(new_batch_db_object.as_dict(), False)\n batch_json = json.dumps({\n \"batch_type\": DocumentBatchTypeEnum.DBT_NEW_IP,\n \"data\": data\n })\n result = self.test_client.post('/batch/update/', data={\n 'batch_id': unicode(_id),\n 'batch': batch_json\n })\n self.assertEqual(result.status_code, 200)\n db_batch = DocumentBatchDbObject.query.filter_by(id=_id).first()\n\n result = self.test_client.post('/batch/finalise/', data={'batch_id': unicode(_id)})\n print result.data\n self.assertEqual(result.status_code, 200)\n\n db_batch = DocumentBatchDbObject.query.filter_by(id=_id).first()\n print(json.dumps(db_batch.as_dict(), default=lambda x: unicode(x), ensure_ascii=False, indent=1))\n self.assertEqual(db_batch.status, BatchStatusEnum.BS_FINALISED)\n self.assertEqual(len(db_batch._documents), 1)\n self.assertTrue(not not db_batch.rendered_docs[0]['file_link'])\n\n @authorized()\n def test_ip_usn(self):\n DocRequisitiesStorage._BATCH_DESCRIPTORS[DocumentBatchTypeEnum.DBT_NEW_IP]['doc_types'] = [DocumentTypeEnum.DT_IP_USN_CLAIM]\n\n col = self.db['okvad']\n col.insert({\"caption\": u\"Рыболовство\", \"okved\": \"92.31.1\", \"nalog\": \"usn\",\n \"parent\": ObjectId(\"5478373ee64bcf4ece4a57d8\")})\n\n founder = PrivatePersonDbObject(**{\n \"_owner\": self.user.id,\n \"name\": u\"Прокл\",\n \"surname\": u\"Поликарпов\",\n \"patronymic\": u\"Поликарпович\",\n \"inn\": \"781108730780\",\n \"sex\": \"male\",\n \"birthdate\": datetime.now() - timedelta(days=365 * 30),\n \"birthplace\": u\"Россия, деревня Гадюкино\",\n \"passport\": {\n \"document_type\": PersonDocumentTypeEnum.PD_INTERNAL,\n \"series\": u\"1123\",\n \"number\": u\"192837\",\n \"issue_date\": datetime.now() - timedelta(days=365 * 3),\n \"issue_depart\": u\"УМВД Гадюкинского района Гадюкинской области\",\n \"depart_code\": u\"111987\"\n },\n \"ogrnip\": \"123456789012345\",\n \"address\": {\n \"region\": RFRegionsEnum.RFR_SPB,\n \"index\": 198209,\n \"district_type\": DistrictTypeEnum.DIT_DISTRICT,\n \"district\": u\"Гатчинский\",\n \"city_type\": CityTypeEnum.CIT_CITY,\n \"city\": u\"Гадюкино\",\n \"village_type\": VillageTypeEnum.VIT_HUTOR,\n \"village\": u\"близ Диканьки\",\n \"street_type\": StreetTypeEnum.STT_BOULEVARD,\n \"street\": u\"Мотоциклистов\",\n \"house_type\": HouseTypeEnum.HOT_HOUSE,\n \"house\": \"4\",\n \"building_type\": BuildingTypeEnum.BIT_HOUSING,\n \"building\": \"2\",\n \"flat_type\": FlatTypeEnum.FLT_OFFICE,\n \"flat\": \"705\",\n \"ifns\": \"7840\",\n \"okato\": \"40298566000\"\n },\n \"caption\": u\"Сантехник\",\n \"phone\": \"+79210001122\",\n \"email\": \"[email protected]\",\n \"living_country_code\": 643,\n \"living_address\": u\"г. Санкт-Петербург, д. Гадюкино, бульвар Мотоциклистов казарма 4, кв. 705\"\n })\n founder.insert(self.db)\n\n with self.app.app_context():\n data = {\n u\"person\": {\n \"_id\": founder.id,\n \"type\": \"person\"\n },\n u\"job_main_code\": u\"92.31.1\",\n u\"job_code_array\": [u\"92.31.1\", u\"74.14\", u\"10.01.1\", u\"10.01.1\"],\n u\"doc_obtain_person\": {\n \"type\": \"person\",\n \"_id\": founder.id\n },\n u\"obtain_way\": \"in_person\",\n u\"region\": u\"Санкт-Петербург\",\n u\"taxation_type\": u\"usn\",\n u\"tax_type\": UsnTaxType.UT_INCOME_MINUS_EXPENSE,\n }\n batch = DocumentBatchDbObject(\n batch_type=DocumentBatchTypeEnum.DBT_NEW_IP,\n status=BatchStatusEnum.BS_NEW,\n _documents=[],\n data={},\n _owner=self.user.id\n )\n _id = batch.insert(self.db)\n\n new_batch_db_object = DocumentBatchDbObject(\n data=data,\n batch_type=DocumentBatchTypeEnum.DBT_NEW_IP\n )\n\n booking = IfnsBookingObject(\n batch_id=_id,\n reg_info={\n 'reg_date': datetime.now()\n }\n )\n sqldb.session.add(booking)\n sqldb.session.commit()\n\n batch = DocumentBatch.parse_raw_value(new_batch_db_object.as_dict(), False)\n batch_json = json.dumps(batch.get_api_structure())\n result = self.test_client.post('/batch/update/', data={\n 'batch_id': unicode(_id),\n 'batch': batch_json\n })\n self.assertEqual(result.status_code, 200)\n db_batch = DocumentBatchDbObject.query.filter_by(id=_id).first()\n\n result = self.test_client.post('/batch/finalise/', data={'batch_id': unicode(_id)})\n self.assertEqual(result.status_code, 200)\n\n db_batch = DocumentBatchDbObject.query.filter_by(id=_id).first()\n print(json.dumps(db_batch.as_dict(), default=lambda x: unicode(x), ensure_ascii=False, indent=1))\n self.assertEqual(db_batch.status, BatchStatusEnum.BS_FINALISED)\n self.assertEqual(len(db_batch._documents), 1)\n self.assertTrue(not not db_batch.rendered_docs[0]['file_link'])\n\n @authorized()\n def test_ip_eshn(self):\n DocRequisitiesStorage._BATCH_DESCRIPTORS[DocumentBatchTypeEnum.DBT_NEW_IP]['doc_types'] = [DocumentTypeEnum.DT_IP_ESHN_CLAIM]\n\n col = self.db['okvad']\n col.insert({\"caption\": u\"Рыболовство\", \"okved\": \"92.31.1\", \"nalog\": \"eshn\",\n \"parent\": ObjectId(\"5478373ee64bcf4ece4a57d8\")})\n\n founder = PrivatePersonDbObject(**{\n \"_owner\": self.user.id,\n \"name\": u\"Прокл\",\n \"surname\": u\"Поликарпов\",\n \"patronymic\": u\"Поликарпович\",\n #\"inn\" : \"781108730780\",\n \"sex\": \"male\",\n \"birthdate\": datetime.now() - timedelta(days=365 * 30),\n \"birthplace\": u\"Россия, деревня Гадюкино\",\n \"passport\": {\n \"document_type\": PersonDocumentTypeEnum.PD_INTERNAL,\n \"series\": u\"1123\",\n \"number\": u\"192837\",\n \"issue_date\": datetime.now() - timedelta(days=365),\n \"issue_depart\": u\"УМВД Гадюкинского района Гадюкинской области\",\n \"depart_code\": u\"111987\"\n },\n \"ogrnip\": \"123456789012345\",\n \"address\": {\n \"region\": RFRegionsEnum.RFR_SPB,\n \"index\": 198209,\n \"district_type\": DistrictTypeEnum.DIT_DISTRICT,\n \"district\": u\"Гатчинский\",\n \"city_type\": CityTypeEnum.CIT_CITY,\n \"city\": u\"Гадюкино\",\n \"village_type\": VillageTypeEnum.VIT_HUTOR,\n \"village\": u\"близ Диканьки\",\n \"street_type\": StreetTypeEnum.STT_BOULEVARD,\n \"street\": u\"Мотоциклистов\",\n \"house_type\": HouseTypeEnum.HOT_HOUSE,\n \"house\": \"4\",\n \"building_type\": BuildingTypeEnum.BIT_HOUSING,\n \"building\": \"2\",\n \"flat_type\": FlatTypeEnum.FLT_OFFICE,\n \"flat\": \"705\",\n \"ifns\": \"7840\",\n \"okato\": \"40298566000\"\n },\n \"caption\": u\"Сантехник\",\n \"phone\": \"+79210001122\",\n \"email\": \"[email protected]\",\n \"living_country_code\": 643,\n \"living_address\": u\"г. Санкт-Петербург, д. Гадюкино, бульвар Мотоциклистов казарма 4, кв. 705\"\n })\n founder.insert(self.db)\n\n with self.app.app_context():\n data = {\n u\"person\": {\n \"_id\": founder.id,\n \"type\": \"person\"\n },\n u\"job_main_code\": u\"92.31.1\",\n u\"job_code_array\": [u\"92.31.1\", u\"74.14\", u\"10.01.1\", u\"10.01.1\"],\n u\"doc_obtain_person\": {\n \"type\": \"person\",\n \"_id\": founder.id\n },\n u\"obtain_way\": \"in_person\",\n u\"region\": u\"Санкт-Петербург\",\n u\"taxation_type\": u\"eshn\",\n # u\"tax_type\" : UsnTaxType.UT_INCOME_MINUS_EXPENSE,\n }\n batch = DocumentBatchDbObject(\n batch_type=DocumentBatchTypeEnum.DBT_NEW_IP,\n status=BatchStatusEnum.BS_NEW,\n _documents=[],\n data={},\n _owner=self.user.id\n )\n _id = batch.insert(self.db)\n\n new_batch_db_object = DocumentBatchDbObject(\n data=data,\n batch_type=DocumentBatchTypeEnum.DBT_NEW_IP\n )\n\n booking = IfnsBookingObject(\n batch_id=_id,\n reg_info={\n 'reg_date': datetime.now()\n }\n )\n sqldb.session.add(booking)\n sqldb.session.commit()\n\n batch = DocumentBatch.parse_raw_value(new_batch_db_object.as_dict(), False)\n batch_json = json.dumps(batch.get_api_structure())\n result = self.test_client.post('/batch/update/', data={\n 'batch_id': unicode(_id),\n 'batch': batch_json\n })\n self.assertEqual(result.status_code, 200)\n db_batch = DocumentBatchDbObject.query.filter_by(id=_id).first()\n\n result = self.test_client.post('/batch/finalise/', data={'batch_id': unicode(_id)})\n self.assertEqual(result.status_code, 200)\n\n db_batch = DocumentBatchDbObject.query.filter_by(id=_id).first()\n print(json.dumps(db_batch.as_dict(), default=lambda x: unicode(x), ensure_ascii=False, indent=1))\n self.assertEqual(db_batch.status, BatchStatusEnum.BS_FINALISED)\n self.assertEqual(len(db_batch._documents), 1)\n self.assertTrue(not not db_batch.rendered_docs[0]['file_link'])\n\n @authorized()\n def test_ip_empty_batch(self):\n DocRequisitiesStorage._BATCH_DESCRIPTORS[DocumentBatchTypeEnum.DBT_NEW_IP]['doc_types'] = [DocumentTypeEnum.DT_P21001]\n\n founder = PrivatePersonDbObject(**{\n \"_owner\": self.user.id,\n \"name\": u\"Прокл\",\n \"surname\": u\"Поликарпов\",\n \"patronymic\": u\"Поликарпович\",\n \"inn\": \"781108730780\",\n \"sex\": \"male\",\n \"birthdate\": datetime.now() - timedelta(days=365 * 30),\n \"birthplace\": u\"Россия, деревня Гадюкино\",\n \"passport\": {\n \"document_type\": PersonDocumentTypeEnum.PD_INTERNAL,\n \"series\": u\"1123\",\n \"number\": u\"192837\",\n \"issue_date\": datetime.now() - timedelta(days=365 * 2),\n \"issue_depart\": u\"УМВД Гадюкинского района Гадюкинской области\",\n \"depart_code\": u\"111987\"\n },\n \"ogrnip\": \"123456789012345\",\n \"address\": {\n \"region\": RFRegionsEnum.RFR_SPB,\n \"index\": 198209,\n \"district_type\": DistrictTypeEnum.DIT_DISTRICT,\n \"district\": u\"Гатчинский\",\n \"city_type\": CityTypeEnum.CIT_CITY,\n \"city\": u\"Гадюкино\",\n \"village_type\": VillageTypeEnum.VIT_HUTOR,\n \"village\": u\"близ Диканьки\",\n \"street_type\": StreetTypeEnum.STT_BOULEVARD,\n \"street\": u\"Мотоциклистов\",\n \"house_type\": HouseTypeEnum.HOT_HOUSE,\n \"house\": \"4\",\n \"building_type\": BuildingTypeEnum.BIT_HOUSING,\n \"building\": \"2\",\n \"flat_type\": FlatTypeEnum.FLT_OFFICE,\n \"flat\": \"705\",\n \"ifns\": \"7840\",\n \"okato\": \"40298566000\"\n },\n \"caption\": u\"Сантехник\",\n \"phone\": \"+79210001122\",\n \"email\": \"[email protected]\",\n \"living_country_code\": 643,\n \"living_address\": u\"г. Санкт-Петербург, д. Гадюкино, бульвар Мотоциклистов казарма 4, кв. 705\"\n })\n founder.insert(self.db)\n\n with self.app.app_context():\n data = {\n # u\"person\" : {\n # \"_id\" : founder.id,\n # \"type\" : \"person\"\n # },\n }\n batch = DocumentBatchDbObject(\n batch_type=DocumentBatchTypeEnum.DBT_NEW_IP,\n status=BatchStatusEnum.BS_NEW,\n _documents=[],\n data={\n\n },\n _owner=self.user.id\n )\n _id = batch.insert(self.db)\n\n new_batch_db_object = DocumentBatchDbObject(\n data=data,\n batch_type=DocumentBatchTypeEnum.DBT_NEW_IP\n )\n\n booking = IfnsBookingObject(\n batch_id=_id,\n reg_info={\n 'reg_date': datetime.now()\n }\n )\n sqldb.session.add(booking)\n sqldb.session.commit()\n\n batch = DocumentBatch.parse_raw_value(new_batch_db_object.as_dict(), False)\n batch_json = json.dumps(batch.get_api_structure())\n result = self.test_client.post('/batch/update/', data={\n 'batch_id': unicode(_id),\n 'batch': batch_json\n })\n self.assertEqual(result.status_code, 200)\n db_batch = DocumentBatchDbObject.query.filter_by(id=_id).first()\n self.assertEqual(db_batch.status, BatchStatusEnum.BS_NEW)\n\n @authorized()\n def test_ip_full_batch(self):\n DocRequisitiesStorage._BATCH_DESCRIPTORS[DocumentBatchTypeEnum.DBT_NEW_IP]['doc_types'] = [\n DocumentTypeEnum.DT_P21001,\n DocumentTypeEnum.DT_IP_DOV_FILING_DOCS,\n DocumentTypeEnum.DT_IP_ESHN_CLAIM]\n\n founder = PrivatePersonDbObject(**{\n \"_owner\": self.user.id,\n \"name\": u\"Прокл\",\n \"surname\": u\"Поликарпов\",\n \"patronymic\": u\"Поликарпович\",\n \"inn\": \"781108730780\",\n \"sex\": \"male\",\n \"birthdate\": datetime.now() - timedelta(days=365 * 30),\n \"birthplace\": u\"Россия, деревня Гадюкино\",\n \"passport\": {\n \"document_type\": PersonDocumentTypeEnum.PD_INTERNAL,\n \"series\": u\"1123\",\n \"number\": u\"192837\",\n \"issue_date\": datetime.now() - timedelta(days=365 * 2),\n \"issue_depart\": u\"УМВД Гадюкинского района Гадюкинской области\",\n \"depart_code\": u\"111987\"\n },\n \"ogrnip\": \"123456789012345\",\n \"address\": {\n \"region\": RFRegionsEnum.RFR_SPB,\n \"index\": 198209,\n \"district_type\": DistrictTypeEnum.DIT_DISTRICT,\n \"district\": u\"Гатчинский\",\n \"city_type\": CityTypeEnum.CIT_CITY,\n \"city\": u\"Гадюкино\",\n \"village_type\": VillageTypeEnum.VIT_HUTOR,\n \"village\": u\"близ Диканьки\",\n \"street_type\": StreetTypeEnum.STT_BOULEVARD,\n \"street\": u\"Мотоциклистов\",\n \"house_type\": HouseTypeEnum.HOT_HOUSE,\n \"house\": \"4\",\n \"building_type\": BuildingTypeEnum.BIT_HOUSING,\n \"building\": \"2\",\n \"flat_type\": FlatTypeEnum.FLT_OFFICE,\n \"flat\": \"705\",\n \"ifns\": \"7840\",\n \"okato\": \"40298566000\"\n },\n \"caption\": u\"Сантехник\",\n \"phone\": \"+79210001122\",\n \"email\": \"[email protected]\",\n \"living_country_code\": 643,\n \"living_address\": u\"г. Санкт-Петербург, д. Гадюкино, бульвар Мотоциклистов казарма 4, кв. 705\"\n })\n founder.insert(self.db)\n\n with self.app.app_context():\n data = {\n u\"person\": {\n \"_id\": founder.id,\n \"type\": \"person\"\n },\n u\"job_main_code\": u\"92.31.1\",\n u\"job_code_array\": [u\"92.31.1\", u\"74.14\", u\"10.01.1\", u\"10.01.1\"],\n u\"doc_obtain_person\": {\n \"type\": \"person\",\n \"_id\": founder.id\n },\n u\"obtain_way\": \"in_person\",\n u\"region\": u\"Санкт-Петербург\",\n u\"taxation_type\": u\"usn\",\n u\"tax_type\": UsnTaxType.UT_INCOME_MINUS_EXPENSE,\n u\"registration_way\": \"in_person\",\n # u\"tax_type\" : UsnTaxType.UT_INCOME_MINUS_EXPENSE,\n }\n batch = DocumentBatchDbObject(\n batch_type=DocumentBatchTypeEnum.DBT_NEW_IP,\n status=BatchStatusEnum.BS_NEW,\n _documents=[],\n data={\n\n },\n _owner=self.user.id\n )\n _id = batch.insert(self.db)\n\n new_batch_db_object = DocumentBatchDbObject(\n data=data,\n batch_type=DocumentBatchTypeEnum.DBT_NEW_IP\n )\n\n booking = IfnsBookingObject(\n batch_id=_id,\n reg_info={\n 'reg_date': datetime.now()\n }\n )\n sqldb.session.add(booking)\n sqldb.session.commit()\n\n batch = DocumentBatch.parse_raw_value(new_batch_db_object.as_dict(), False)\n batch_json = json.dumps(batch.get_api_structure())\n result = self.test_client.post('/batch/update/', data={\n 'batch_id': unicode(_id),\n 'batch': batch_json\n })\n self.assertEqual(result.status_code, 200)\n db_batch = DocumentBatchDbObject.query.filter_by(id=_id).first()\n\n result = self.test_client.post('/batch/finalise/', data={'batch_id': unicode(_id)})\n self.assertEqual(result.status_code, 200)\n\n db_batch = DocumentBatchDbObject.query.filter_by(id=_id).first()\n print(json.dumps(db_batch.as_dict(), default=lambda x: unicode(x), ensure_ascii=False, indent=1))\n self.assertEqual(db_batch.status, BatchStatusEnum.BS_FINALISED)\n # self.assertEqual(len(db_batch._documents), 1)\n # self.assertTrue(not not db_batch.rendered_docs[0]['file_link'])\n\n\n @authorized()\n def test_ip_letter_inventory(self):\n DocRequisitiesStorage._BATCH_DESCRIPTORS[DocumentBatchTypeEnum.DBT_NEW_IP]['doc_types'] = [DocumentTypeEnum.DT_IP_LETTER_INVENTORY]\n\n founder = PrivatePersonDbObject(**{\n \"_owner\": self.user.id,\n \"name\": u\"Прокл\",\n \"surname\": u\"Поликарпов\",\n \"patronymic\": u\"Поликарпович\",\n \"inn\": \"781108730780\",\n \"sex\": \"male\",\n \"birthdate\": datetime.now() - timedelta(days=365 * 30),\n \"birthplace\": u\"Россия, деревня Гадюкино\",\n \"passport\": {\n \"document_type\": PersonDocumentTypeEnum.PD_INTERNAL,\n \"series\": u\"1123\",\n \"number\": u\"192837\",\n \"issue_date\": datetime.now() - timedelta(days=365 * 2),\n \"issue_depart\": u\"УМВД Гадюкинского района Гадюкинской области\",\n \"depart_code\": u\"111987\"\n },\n \"ogrnip\": \"123456789012345\",\n \"address\": {\n \"region\": RFRegionsEnum.RFR_SPB,\n \"index\": 198209,\n \"district_type\": DistrictTypeEnum.DIT_DISTRICT,\n \"district\": u\"Гатчинский\",\n \"city_type\": CityTypeEnum.CIT_CITY,\n \"city\": u\"Гадюкино\",\n \"village_type\": VillageTypeEnum.VIT_HUTOR,\n \"village\": u\"близ Диканьки\",\n \"street_type\": StreetTypeEnum.STT_BOULEVARD,\n \"street\": u\"Мотоциклистов\",\n \"house_type\": HouseTypeEnum.HOT_HOUSE,\n \"house\": \"4\",\n \"building_type\": BuildingTypeEnum.BIT_HOUSING,\n \"building\": \"2\",\n \"flat_type\": FlatTypeEnum.FLT_OFFICE,\n \"flat\": \"705\",\n \"ifns\": \"7840\",\n \"okato\": \"40298566000\"\n },\n \"caption\": u\"Сантехник\",\n \"phone\": \"+79210001122\",\n \"email\": \"[email protected]\",\n \"living_country_code\": 3,\n \"living_address\": u\"г. Санкт-Петербург, д. Гадюкино, бульвар Мотоциклистов казарма 4, кв. 705\"\n })\n founder.insert(self.db)\n\n dov_person = PrivatePersonDbObject(**{\n \"_owner\": self.user.id,\n \"name\": u\"Акакий\",\n \"surname\": u\"Тунцов\",\n \"patronymic\": u\"Подляченко\",\n \"inn\": \"781108730780\",\n \"sex\": \"male\",\n \"birthdate\": datetime.now() - timedelta(days=365 * 32),\n \"birthplace\": u\"Россия, деревня Гадюкино\",\n \"passport\": {\n \"document_type\": PersonDocumentTypeEnum.PD_INTERNAL,\n \"series\": u\"1123\",\n \"number\": u\"192837\",\n \"issue_date\": datetime.now() - timedelta(days=365 * 2),\n \"issue_depart\": u\"УМВД Гадюкинского района Гадюкинской области\",\n \"depart_code\": u\"111987\"\n },\n \"ogrnip\": \"123456789012345\",\n \"address\": {\n \"region\": RFRegionsEnum.RFR_SPB,\n \"index\": 198209,\n \"house_type\": HouseTypeEnum.HOT_HOUSE,\n \"house\": \"4\",\n \"flat_type\": FlatTypeEnum.FLT_OFFICE,\n \"flat\": \"705\",\n \"ifns\": \"7840\",\n \"okato\": \"40298566000\"\n },\n \"caption\": u\"Сантехник\",\n \"phone\": \"+79210001122\",\n \"email\": \"[email protected]\"\n })\n dov_person.insert(self.db)\n\n new_item = IfnsCatalogObject(**{\n 'name': u\"Межрайонная ИФНС России №22 по Санкт-Петербургу\",\n 'address': {\n \"address_string\": u\",198334,Санкт-Петербург г,,,,Партизана Германа ул,37,,\",\n },\n 'comment': u\"Код ОКПО:39449549 Прием: Понедельник, среда с 09.00 до 18.00. Вторник, четверг с 09.00 до 20.00. Пятница с 09.00 до 16.45. Вторая и четвертая субботы месяца с 10.00 до 15.00. Без перерыва на обед.\",\n 'code': 7840,\n 'rou': {\n \"name\": u\"Межрайонная ИФНС России №15 по Санкт-Петербургу\",\n \"code\": u\"78086\",\n \"tel\": [\"+7(812)3351403\"]\n }\n })\n sqldb.session.add(new_item)\n sqldb.session.commit()\n\n with self.app.app_context():\n data = {\n u\"person\": {\n \"_id\": founder.id,\n \"type\": \"person\"\n },\n u\"taxation_type\": TaxType.TT_GENERAL,\n u\"registration_way\": IPRegistrationWayEnum.IP_RW_MAIL\n }\n batch = DocumentBatchDbObject(\n batch_type=DocumentBatchTypeEnum.DBT_NEW_IP,\n status=BatchStatusEnum.BS_NEW,\n _documents=[],\n data={},\n paid=True,\n _owner=self.user.id\n )\n _id = batch.insert(self.db)\n\n new_batch_db_object = DocumentBatchDbObject(\n data=data,\n batch_type=DocumentBatchTypeEnum.DBT_NEW_IP\n )\n\n booking = IfnsBookingObject(\n batch_id=_id,\n reg_info={\n 'reg_date': datetime.now()\n }\n )\n sqldb.session.add(booking)\n sqldb.session.commit()\n\n batch = DocumentBatch.parse_raw_value(new_batch_db_object.as_dict(), False)\n batch_json = json.dumps(batch.get_api_structure())\n result = self.test_client.post('/batch/update/', data={\n 'batch_id': unicode(_id),\n 'batch': batch_json\n })\n self.assertEqual(result.status_code, 200)\n db_batch = DocumentBatchDbObject.query.filter_by(id=_id).first()\n\n result = self.test_client.post('/batch/finalise/', data={'batch_id': unicode(_id)})\n self.assertEqual(result.status_code, 200)\n\n db_batch = DocumentBatchDbObject.query.filter_by(id=_id).first()\n print(json.dumps(db_batch.as_dict(), default=lambda x: unicode(x), ensure_ascii=False, indent=1))\n self.assertEqual(db_batch.status, BatchStatusEnum.BS_FINALISED)\n self.assertEqual(len(db_batch._documents), 1)\n self.assertTrue(not not db_batch.rendered_docs[0]['file_link'])\n" }, { "alpha_fraction": 0.61081862449646, "alphanum_fraction": 0.6160067319869995, "avg_line_length": 35.611732482910156, "blob_id": "dc702b62a5c490ab8d307de28d0c0fc089d9527a", "content_id": "3e496499c77b68a2d216247aff8292b515a769c2", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 26527, "license_type": "no_license", "max_line_length": 158, "num_lines": 716, "path": "/app/fw/api/views/documents.py", "repo_name": "StanislavKraev/jb_code", "src_encoding": "UTF-8", "text": "# -*- coding: utf-8 -*-\nfrom datetime import datetime, timedelta\nimport json\nimport codecs\nimport subprocess\nimport tempfile\nimport os\nimport re\n\nfrom flask import current_app, Blueprint\nfrom flask_login import login_required, current_user\nfrom sqlalchemy.orm import make_transient, joinedload\nfrom common_utils import remove_task_id_run_file\n\nfrom fw.api import errors\nfrom fw.api.args_validators import validate_arguments, BoolTypeValidator, EnumValidator, IntValidator, JsonValidator, \\\n ArgumentValidator\nfrom fw.api.base_handlers import api_view\nfrom fw.async_tasks import rendering\nfrom fw.async_tasks.scheduler import CeleryScheduler\nfrom fw.db.sql_base import db as sqldb\nfrom fw.documents.batch_manager import BatchManager\nfrom fw.documents.db_fields import DocumentBatchDbObject, BatchDocumentDbObject\nfrom fw.documents.doc_requisites_storage import DocRequisitiesStorage\nfrom fw.documents.enums import BatchStatusEnum, DocumentBatchTypeEnum, UserDocumentStatus, DocumentTypeEnum\nfrom fw.documents.fields.doc_fields import DocumentBatch\nfrom fw.storage.file_storage import FileStorage\nfrom fw.utils.time_utils import calc_fixed_time_not_earlier\n\nMAX_PREVIEW_RENDERING_DURATION_SECONDS = 60\n\ndocuments_bp = Blueprint('documents', __name__)\n\ndef _finalize_batch(batch):\n batch_status = batch.status\n if batch_status == BatchStatusEnum.BS_FINALISED:\n return {\"result\": True}\n\n if batch_status not in (BatchStatusEnum.BS_NEW, BatchStatusEnum.BS_EDITED):\n raise errors.DocumentBatchFinalizationError()\n\n docs = batch._documents\n if not docs:\n return {\"result\": False}\n\n batch_manager = BatchManager.init(batch)\n\n try:\n if not batch_manager.finalize_batch(current_app.config, current_app.logger, batch):\n raise errors.DocumentBatchFinalizationError()\n except Exception:\n current_app.logger.exception(u\"Finalisation error\")\n return {\"result\": False}\n\n return {\"result\": True}\n\n@documents_bp.route('/batch/finalise/', methods=['POST'])\n@api_view\n@login_required\n@validate_arguments(batch_id=ArgumentValidator())\ndef finalize_batch(batch_id=None):\n batch = DocumentBatchDbObject.query.filter_by(id=batch_id, _owner=current_user, deleted=False).first()\n if not batch:\n raise errors.BatchNotFound()\n return _finalize_batch(batch)\n\n\n@documents_bp.route('/batch/finalise/cancel/', methods=['POST'])\n@api_view\n@login_required\n@validate_arguments(batch_id=ArgumentValidator())\ndef cancel_batch_finalization(batch_id=None):\n batch = DocumentBatchDbObject.query.filter_by(id=batch_id, _owner=current_user, deleted=False).first()\n if not batch:\n raise errors.BatchNotFound()\n if batch.status != BatchStatusEnum.BS_BEING_FINALISED:\n return {\"result\": True}\n\n try:\n BatchManager.cancel_batch_finalization(batch, current_app.config, current_app.logger)\n except Exception:\n current_app.logger.exception(u\"Failed to cancel batch finalization.\")\n return {\"result\": False}\n\n return {\"result\": True}\n\n\n@documents_bp.route('/batch/unfinalise/', methods=['POST'])\n@api_view\n@login_required\n@validate_arguments(batch_id=ArgumentValidator(), force=BoolTypeValidator(required=False))\ndef unfinalize_batch(batch_id=None, force=False):\n batch = DocumentBatchDbObject.query.filter_by(id=batch_id, _owner=current_user, deleted=False).scalar()\n if not batch:\n raise errors.BatchNotFound()\n if batch.status not in (BatchStatusEnum.BS_FINALISED,):\n raise errors.DocumentBatchDefinalizationError()\n\n if batch.current_task_id:\n from celery import app as celery\n current_app.logger.debug(u\"There are task id: %s\" % unicode(batch.current_task_id))\n celery.default_app.control.revoke(batch.current_task_id)\n remove_task_id_run_file(current_app.config, batch.current_task_id)\n batch.current_task_id = None\n batch.batch_rendering_start = None\n sqldb.session.commit()\n\n batch_manager = BatchManager.init(batch)\n try:\n if not batch_manager.definalize_batch(current_app.config, current_app.logger, batch, force):\n raise errors.DocumentBatchDefinalizationError()\n except Exception:\n batch.status = BatchStatusEnum.BS_FINALISED\n sqldb.session.commit()\n raise errors.DocumentBatchDefinalizationError()\n\n return {\"result\": True}\n\n\n@documents_bp.route('/batch/create/', methods=['POST'])\n@api_view\n@login_required\n@validate_arguments(batch_type=EnumValidator(DocumentBatchTypeEnum))\ndef batch_create(batch_type=None):\n try:\n DocRequisitiesStorage.get_batch_descriptor(batch_type)\n except Exception:\n raise errors.InvalidParameterValue('batch_type')\n\n batch_manager = BatchManager.init(batch_type=batch_type)\n new_batch = batch_manager.create_batch(current_user)\n sqldb.session.add(new_batch)\n sqldb.session.commit()\n doc_batch = DocumentBatch.db_obj_to_field(new_batch)\n return {'result': doc_batch.get_api_structure()}\n\n\n@documents_bp.route('/batch/', methods=['GET'])\n@api_view\n@login_required\n@validate_arguments(\n count=IntValidator(min_val=0, required=False),\n offset=IntValidator(min_val=0, required=False),\n batch_id=ArgumentValidator(required=False),\n finalised=BoolTypeValidator(required=False)\n)\ndef get_batch(count=None, offset=None, batch_id=None, finalised=None):\n batch_api_structures = []\n\n broken_batch_ids = []\n if batch_id:\n batch_db_obj = DocumentBatchDbObject.query.filter_by(id=batch_id,\n _owner=current_user,\n _broken=False, deleted=False).first()\n if not batch_db_obj:\n raise errors.BatchNotFound()\n sqldb.session.expunge(batch_db_obj)\n make_transient(batch_db_obj)\n total = 1\n result_count = 1\n if 1:\n try:\n batch_api_structures.append(DocumentBatch.db_obj_to_field(batch_db_obj).get_api_structure())\n except Exception:\n current_app.logger.exception(u\"Set batch _broken\")\n broken_batch_ids.append(batch_id)\n else:\n query = DocumentBatchDbObject.query.filter_by(_owner=current_user, _broken=False, deleted=False).options(joinedload(DocumentBatchDbObject._documents))\n\n if finalised is not None:\n query = query.filter_by(status=BatchStatusEnum.BS_FINALISED) if finalised else query.filter(\n DocumentBatchDbObject.status != BatchStatusEnum.BS_FINALISED)\n query = query.order_by(DocumentBatchDbObject.creation_date.desc())\n total = query.count()\n if count is not None:\n query = query.limit(count)\n if offset is not None:\n query = query.offset(offset)\n result_count = query.count()\n has_broken = False\n with current_app.model_cache_context:\n for item in query:\n batch_id = item.id\n sqldb.session.expunge(item)\n make_transient(item)\n\n for doc in item._documents:\n current_app.model_cache_context.add(BatchDocumentDbObject.__tablename__, doc.id, doc)\n if 1:\n try:\n batch_api_structures.append(\n DocumentBatch.db_obj_to_field(item).get_api_structure(db_obj=item))\n except Exception:\n current_app.logger.exception(u\"Set batch _broken %s\" % unicode(item.id))\n broken_batch_ids.append(batch_id)\n has_broken = True\n continue\n if has_broken:\n for batch_id in broken_batch_ids:\n batch = DocumentBatchDbObject.query.filter_by(id=batch_id).first()\n if batch:\n batch._broken = True\n sqldb.session.commit()\n\n result = {\n 'result': {\n 'total': total,\n 'count': result_count,\n 'batches': batch_api_structures\n }\n }\n return result\n\n\n@documents_bp.route('/batch/status/', methods=['GET'])\n@api_view\n@login_required\n@validate_arguments(\n batch_id=ArgumentValidator(required=True),\n)\ndef get_batch_status(batch_id=None):\n batch_db_obj = DocumentBatchDbObject.query.filter_by(id=batch_id, _owner=current_user, deleted=False).first()\n if not batch_db_obj:\n raise errors.BatchNotFound()\n batch = DocumentBatch.db_obj_to_field(batch_db_obj)\n result = {\n 'result': batch.get_api_structure(skip_documents=True)\n }\n return result\n\n\n@documents_bp.route('/batch/delete/', methods=['POST'])\n@api_view\n@login_required\n@validate_arguments(batch_id=ArgumentValidator())\ndef batch_delete(batch_id=None):\n batch = DocumentBatchDbObject.query.filter_by(id=batch_id, _owner=current_user, deleted=False).first()\n if not batch:\n raise errors.BatchNotFound()\n batch.deleted = True\n sqldb.session.commit()\n\n return {'result': True}\n\n\ndef schedule_notification_email(batch_id):\n batch = DocumentBatchDbObject.query.filter_by(id=batch_id, deleted=False, finalisation_count=0).scalar()\n if not batch or batch.status not in (BatchStatusEnum.BS_NEW, BatchStatusEnum.BS_EDITED):\n return\n\n mail_type = 'please_finalise'\n if mail_type in (batch.sent_mails or []):\n return False\n\n user = batch._owner\n if not user or not user.email:\n return\n\n manager = BatchManager.init(batch)\n timezone_name = manager.get_batch_timezone(batch_id) or \"Europe/Moscow\"\n\n desired_time = current_app.config['NOT_PAID_BATCH_NOTIFY_DESIRED_TIME']\n timeout_td = timedelta(seconds=current_app.config['NOT_PAID_BATCH_NOTIFY_TIMEOUT_SECONDS'])\n dt = calc_fixed_time_not_earlier(datetime.utcnow(), desired_time, timeout_td, timezone_name)\n\n CeleryScheduler.post(\n \"fw.async_tasks.not_paid_check_send.not_finalised_check_and_send\",\n task_id=\"not_finalised_check_and_send%s\" % str(batch_id),\n force_replace_task=True,\n kwargs={\n 'batch_id': str(batch_id),\n 'last_change_dt_str': batch.last_change_dt.strftime(\"%Y-%m-%dT%H:%M:%S\")\n },\n eta=dt\n )\n\n\n@documents_bp.route('/batch/update/', methods=['POST'])\n@api_view\n@login_required\n@validate_arguments(batch_id=ArgumentValidator(), batch=JsonValidator())\ndef batch_update(batch_id=None, batch=None):\n with current_app.model_cache_context:\n current_batch_db_object = DocumentBatchDbObject.query.filter_by(id=batch_id, _owner=current_user,\n deleted=False).first()\n if not current_batch_db_object:\n raise errors.BatchNotFound()\n\n if current_batch_db_object.status == BatchStatusEnum.BS_BEING_FINALISED:\n current_app.logger.debug(u\"Updating batch during finalization - cancelling finalization\")\n\n try:\n BatchManager.cancel_batch_finalization(current_batch_db_object,\n current_app.config, current_app.logger)\n except Exception:\n current_app.logger.exception(u\"Failed to cancel batch finalisation\")\n DocumentBatchDbObject.query.filter_by(id=batch_id, status=BatchStatusEnum.BS_BEING_FINALISED).update(\n {'status': BatchStatusEnum.BS_EDITED})\n sqldb.session.commit()\n raise errors.DocumentBatchUpdateError()\n\n manager = BatchManager.init(current_batch_db_object)\n\n batch_type = current_batch_db_object.batch_type\n batch['batch_type'] = batch_type\n if 'metadata' in batch:\n batch['_metadata'] = batch['metadata']\n\n new_batch = DocumentBatch.parse_raw_value(batch, api_data=True)\n\n new_batch_api_data = manager.update_batch(batch_id, new_batch, current_user.id,\n current_app.config, current_app.logger)\n\n DocumentBatchDbObject.query.filter_by(id=batch_id).update({'last_change_dt': datetime.utcnow()})\n sqldb.session.commit()\n if batch_type == DocumentBatchTypeEnum.DBT_NEW_LLC:\n schedule_notification_email(batch_id)\n return new_batch_api_data\n\n\n@documents_bp.route('/batch/document/state/', methods=['GET'])\n@api_view\n@login_required\n@validate_arguments(batch_id=ArgumentValidator(), document_id=ArgumentValidator())\ndef get_document_preview_status(batch_id=None, document_id=None):\n batch = DocumentBatchDbObject.query.filter_by(id=batch_id, _owner=current_user, deleted=False).first()\n if not batch:\n raise errors.BatchNotFound()\n\n doc = BatchDocumentDbObject.query.filter_by(batch=batch, id=document_id).first()\n if not doc:\n raise errors.DocumentNotFound()\n\n links = {\n 'pdf': FileStorage.get_url(doc.file, current_app.config),\n 'jpeg': []\n } if doc.file else {\n 'pdf': None,\n 'jpeg': []\n }\n\n return {\n 'result': {\n 'state': doc.status,\n 'links': links,\n 'document_id': unicode(document_id)\n }\n }\n\n\n@documents_bp.route('/batch/document/render/', methods=['POST'])\n@api_view\n@login_required\n@validate_arguments(batch_id=ArgumentValidator(), document_id=ArgumentValidator())\ndef render_document_preview(batch_id=None, document_id=None):\n batch = DocumentBatchDbObject.query.filter_by(id=batch_id, _owner=current_user, deleted=False).first()\n if not batch:\n raise errors.BatchNotFound()\n\n doc = BatchDocumentDbObject.query.filter_by(batch=batch, id=document_id).scalar()\n if not doc:\n return errors.DocumentNotFound()\n\n async_result = rendering.render_document_preview.apply_async((document_id,), countdown=2)\n if not async_result.ready():\n task_id = str(async_result.id)\n doc.status = UserDocumentStatus.DS_RENDERING\n doc._celery_task_id = task_id # override (capture) document by new task\n doc._celery_task_started = datetime.utcnow()\n current_app.logger.debug(u\"Render preview task id: %s\" % task_id)\n sqldb.session.commit()\n return {\"result\": True}\n\n\n@documents_bp.route('/batch/update_metadata/', methods=['POST'])\n@api_view\n@login_required\n@validate_arguments(batch_id=ArgumentValidator(), batch=JsonValidator())\ndef batch_update_metadata(batch_id=None, batch=None):\n current_batch_db_object = DocumentBatchDbObject.query.filter_by(id=batch_id, _owner=current_user,\n deleted=False).first()\n if not current_batch_db_object:\n raise errors.BatchNotFound()\n\n current_batch_db_object._metadata = batch.get('metadata', {})\n sqldb.session.commit()\n current_batch = DocumentBatch.db_obj_to_field(current_batch_db_object)\n return {'result': current_batch.get_api_structure()}\n\n\ndef get_reg_fee_data(path):\n temp_file_out = tempfile.NamedTemporaryFile(mode=\"w+\", suffix=\".txt\")\n output_file_name = temp_file_out.name\n temp_file_out.close()\n p = subprocess.Popen(['pdftotext', path, output_file_name], stdin=subprocess.PIPE, stdout=subprocess.PIPE,\n stderr=subprocess.PIPE)\n out, err = p.communicate()\n rc = p.returncode\n if rc is not 0:\n current_app.logger.error(u\"Failed to executed pdftotext (%s, %s)\" % (out, err))\n return\n if not os.path.exists(output_file_name):\n current_app.logger.error(u\"No file were generated\")\n return\n\n with codecs.open(output_file_name, 'r', 'utf-8') as f:\n content = f.read().split(u'линия отрыва')[0].strip().replace('\\r', '')\n while ' ' in content:\n content = content.replace(' ', ' ')\n\n pos = content.rfind(u'Индекс док.')\n if pos == -1:\n return\n\n part1 = content[:pos]\n part2 = content[pos:]\n doc_index = u\"\"\n applicant = u\"\"\n fio = u\"\"\n address = u\"\"\n inn = u\"\"\n kbk = u\"\"\n f106 = u\"\"\n f107 = u\"\"\n f110 = u\"\"\n cost = u\"\"\n bik = u\"\"\n # account1 = u\"\"\n account2 = u\"\"\n inn_ul = u\"\"\n kpp = u\"\"\n oktmo = u\"\"\n receiver = u\"\"\n receiver_bank = u\"\"\n\n part1_parts = part1.split('\\n\\n')\n for part in part1_parts:\n part = part.replace('\\n', ' ')\n part = part.strip()\n if not part:\n continue\n #current_app.logger.debug(u\"Part: %s\" % part)\n match = re.match(u'\\(101\\)\\s*(\\d+).*', part)\n if match:\n applicant = match.groups()[0]\n continue\n match = re.match(u'Индекс док\\.\\s*(\\d+).*', part)\n if match:\n doc_index = match.groups()[0]\n continue\n match = re.match(u'ФИО\\s*(.*)', part)\n if match:\n fio = match.groups()[0]\n continue\n match = re.match(u'Адрес\\s*(.*)', part)\n if match:\n address = match.groups()[0]\n continue\n match = re.match(u'ИНН\\s*(\\d{12}).*', part)\n if match:\n inn = match.groups()[0]\n continue\n match = re.match(u'КБК\\s*(\\d+).*', part)\n if match:\n kbk = match.groups()[0]\n continue\n match = re.match(u'.*\\(106\\)\\s*(\\w{1,3}).*', part)\n if match:\n f106 = match.groups()[0]\n\n match = re.match(u'.*\\s*\\(110\\)\\s*(\\d+).*', part)\n if match:\n f110 = match.groups()[0]\n\n match = re.match(u'.*Сумма\\s*(\\d+).*', part)\n if match:\n cost = match.groups()[0]\n match = re.match(u'.*БИК\\s*(\\d+).*', part)\n if match:\n bik = match.groups()[0]\n match = re.match(u'.*Сч\\.. (\\d+).*Сч\\.. (\\d+)\\D+.*', part)\n if match:\n account1 = match.groups()[0]\n account2 = match.groups()[1]\n match = re.match(u'.*ИНН\\s*(\\d{10}).*', part)\n if match:\n inn_ul = match.groups()[0]\n match = re.match(u'.*КПП\\s*(\\d{9}).*', part)\n if match:\n kpp = match.groups()[0]\n match = re.match(u'.*ОКТМО\\s*(\\d{7,10}).*', part)\n if match:\n oktmo = match.groups()[0]\n match = re.match(u'.*\\(107\\)\\s*(\\d{1,2}\\.\\d{1,2}\\.\\d{4}).*', part)\n if match:\n f107 = match.groups()[0]\n\n match = re.match(u'.*ОКТМО\\s*(\\d{7,10}).*', part)\n if match:\n oktmo = match.groups()[0]\n continue\n\n match = re.match(u'Банк получателя\\s*(.*)\\s*Получатель\\s*(.*)', part)\n if match:\n receiver_bank = match.groups()[0]\n receiver = match.groups()[0]\n\n part2_parts = part2.split('\\n\\n')\n for part in part2_parts:\n part = part.replace('\\n', ' ')\n part = part.strip()\n if not part:\n continue\n #current_app.logger.debug(u\"Part[2]: %s\" % part)\n match = re.match(u'КБК\\s*(\\d+).*', part)\n if match:\n kbk = match.groups()[0]\n continue\n match = re.match(u'.*\\(106\\)\\s*([АБВГДЕЁЖЗИЙКЛМНОПРСТУФХЦЧШЩЬЪЭЮЯ]{1,3}).*', part)\n if match:\n f106 = match.groups()[0]\n continue\n\n match = re.match(u'.*\\s*\\(110\\)\\s*(\\d+).*', part)\n if match:\n f110 = match.groups()[0]\n\n match = re.match(u'.*Банк получателя\\s*(.*)\\s*Получатель\\s*(.*)', part)\n if match:\n receiver_bank = match.groups()[0]\n receiver = match.groups()[0]\n\n try:\n os.unlink(output_file_name)\n except Exception:\n current_app.logger.warn(u\"Failed to remove file %s\" % output_file_name)\n\n return {\n u\"Индекс документа\": doc_index,\n u\"Платильщик\": {\n u\"ФИО\": fio,\n u\"Адрес\": address,\n u\"ИНН\": inn\n },\n u\"Получатель\": {\n u\"Банк получателя\": receiver_bank,\n u\"Получатель\": receiver,\n u\"БИК\": bik,\n u\"Сч.№\": account2,\n u\"ИНН\": inn_ul,\n u\"КПП\": kpp\n },\n u\"Параметры платежа\": {\n u\"Сумма\": cost,\n u\"ОКТМО\": oktmo,\n u\"КБК\": kbk,\n u\"(101) Статус\": applicant,\n u\"(107) Налоговый период\": f107,\n u\"(106) Основание платежа\": f106,\n u\"(110) Тип платежа\": f110\n }\n }\n\n\n@documents_bp.route('/batch/invoice_data/', methods=['GET'])\n@api_view\n@login_required\n@validate_arguments(batch_id=ArgumentValidator())\ndef get_batch_reg_fee_invoice_data(batch_id=None):\n batch = DocumentBatchDbObject.query.filter_by(id=batch_id, _owner=current_user, deleted=False).scalar()\n if not batch:\n raise errors.BatchNotFound()\n\n if batch.status != BatchStatusEnum.BS_FINALISED:\n return {\"result\": None, \"message\": \"document batch is not finalized\"}\n\n for doc in batch._documents:\n if doc.document_type in (DocumentTypeEnum.DT_REGISTRATION_FEE_INVOICE,\n DocumentTypeEnum.DT_IP_STATE_DUTY) and doc.status==UserDocumentStatus.DS_RENDERED:\n file_obj = doc.file\n if not file_obj:\n return {\"result\": None, \"message\": \"failed to find file object for document with id %s\" % doc.id}\n path = FileStorage.get_path(file_obj, current_app.config)\n if not path or not os.path.exists(path):\n return {\"result\": None, \"message\": \"file %s not found\" % path}\n res = get_reg_fee_data(path)\n if not res:\n return {\"result\": None, \"message\": \"Failed to get reg fee data\"}\n return {\"result\": res}\n\n return {\"result\": None, \"message\": \"rendered document not found\"}\n\n\n@documents_bp.route('/batch/render_document/', methods=['POST'])\n@api_view\n@login_required\n@validate_arguments(batch_id=ArgumentValidator(), document_type=ArgumentValidator())\ndef render_batch_documents(batch_id=None, document_type=None):\n batch = DocumentBatchDbObject.query.filter_by(id=batch_id, _owner=current_user, deleted=False).first()\n if not batch:\n raise errors.BatchNotFound()\n batch_manager = BatchManager.init(batch)\n\n document_types = json.loads(document_type)\n if not isinstance(document_types, list) and not isinstance(document_types, tuple):\n raise errors.InvalidParameterValue('document_type')\n\n doc_type_set = set()\n for doc_type in document_types:\n if not batch_manager.check_doc_type_support(batch.batch_type, doc_type):\n raise errors.InvalidParameterValue('document_type')\n doc_type_set.add(doc_type)\n\n action_descriptor = {\n 'plugin': 'doc_builder',\n 'action': 'render_group'\n }\n\n event_data = {\n 'doc_types': list(doc_type_set),\n 'batch_id': batch.id\n }\n\n BatchManager.perform_action(action_descriptor, batch, event_data, current_app.logger, current_app.config)\n return {\"result\": True}\n\n\n@documents_bp.route('/batch/render_document/state/', methods=['GET'])\n@api_view\n@login_required\n@validate_arguments(batch_id=ArgumentValidator(), document_types=ArgumentValidator())\ndef get_render_batch_documents_state(batch_id=None, document_types=None):\n batch = DocumentBatchDbObject.query.filter_by(id=batch_id, _owner=current_user, deleted=False).first()\n if not batch:\n raise errors.BatchNotFound()\n\n batch_manager = BatchManager.init(batch)\n try:\n document_types = json.loads(document_types)\n if not isinstance(document_types, list) and not isinstance(document_types, tuple):\n raise Exception()\n except Exception:\n raise errors.InvalidParameterValue('document_type')\n\n doc_type_set = set()\n for doc_type in document_types:\n if not batch_manager.check_doc_type_support(batch.batch_type, doc_type):\n raise errors.InvalidParameterValue('document_type')\n doc_type_set.add(doc_type)\n\n result = []\n\n for doc_type in doc_type_set:\n doc_obj = BatchDocumentDbObject.query.filter_by(batch_id=batch_id, document_type=doc_type).first()\n if not doc_obj:\n result.append({\n 'state': UserDocumentStatus.DS_NEW,\n 'document_type': doc_type\n })\n continue\n\n doc_info = {\n 'state': doc_obj.status,\n 'document_type': doc_type\n }\n\n if doc_obj.status == UserDocumentStatus.DS_RENDERED:\n if doc_obj.file:\n doc_info['links'] = {\n 'pdf': FileStorage.get_url(doc_obj.file, current_app.config),\n 'jpeg': []\n }\n result.append(doc_info)\n else:\n current_app.logger.debug(u\"Not found rendered documents for rendered document %s. \"\n u\"Returning as rendering_failed\" % doc_type)\n result.append({\n 'state': UserDocumentStatus.DS_RENDERING_FAILED,\n 'document_type': doc_type\n })\n else:\n result.append(doc_info)\n\n return result\n\n\n@documents_bp.route('/batch/go_ahead/', methods=['POST'])\n@api_view\n@login_required\n@validate_arguments(batch_id=ArgumentValidator())\ndef go_ahead(batch_id=None):\n batch = DocumentBatchDbObject.query.filter_by(id=batch_id, _owner=current_user, deleted=False).first()\n if not batch:\n raise errors.BatchNotFound()\n if batch.batch_type != DocumentBatchTypeEnum.DBT_NEW_LLC:\n BatchManager.handle_event(batch_id, 'go_ahead', {\n 'batch_id': batch_id\n }, logger=current_app.logger, config=current_app.config)\n return {\"result\": True}\n return _finalize_batch(batch)\n\n\n@documents_bp.route('/batch/go_back/', methods=['POST'])\n@api_view\n@login_required\n@validate_arguments(batch_id=ArgumentValidator())\ndef go_back(batch_id=None):\n batch = DocumentBatchDbObject.query.filter_by(id=batch_id, _owner=current_user, deleted=False).first()\n if not batch:\n raise errors.BatchNotFound()\n if batch.batch_type != DocumentBatchTypeEnum.DBT_NEW_LLC:\n BatchManager.handle_event(batch_id, 'go_back', {\n 'batch_id': batch_id\n }, logger=current_app.logger, config=current_app.config)\n return {\"result\": True}\n raise NotImplementedError()\n" }, { "alpha_fraction": 0.563715934753418, "alphanum_fraction": 0.5642023086547852, "avg_line_length": 35.07017517089844, "blob_id": "eea20b2aec936d4703313d9d82f123cf8d0fa723", "content_id": "1d0389c4d8f2fddd422e5c8c19e2dd39fda5642b", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 2204, "license_type": "no_license", "max_line_length": 105, "num_lines": 57, "path": "/app/fw/api/api_data.py", "repo_name": "StanislavKraev/jb_code", "src_encoding": "UTF-8", "text": "# -*- coding: utf-8 -*-\n\n\ndef get_user_api_structure(auth_user):\n result = {\n 'temporal': auth_user.temporal or False,\n\n 'id': unicode(auth_user.uuid),\n 'email': auth_user.email or u\"\",\n 'email_confirmed': auth_user.email_confirmed,\n 'mobile': auth_user.mobile or u\"\",\n 'mobile_confirmed': auth_user.mobile_confirmed,\n\n 'password_set': bool(auth_user.password and auth_user.password != u'!notset!'), # bool\n\n 'registration_date': auth_user.signup_date.strftime(\"%Y-%m-%dT%H:%M:%S\"), # — дата регистрации\n 'facebook': None, # — идентификатор пользователя в facebook (если есть)\n 'vk': None, # — идентификатор пользователя в VK (если есть)\n\n 'person': { # — физическое лицо\n 'name': auth_user.name,\n 'surname': auth_user.surname,\n 'patronymic': auth_user.patronymic\n },\n 'role': ['user'] # — список ролей пользователя, в виде массива,\n # пример: [\"user\", \"support\", \"moderator\", \"admin\"]\n }\n\n from fw.documents.batch_manager import BatchManager\n batch = BatchManager.get_last_modified_batch(auth_user.id)\n\n if batch:\n batch_manager = BatchManager.init(batch)\n\n batch_caption = batch_manager.get_last_modified_batch_caption(batch)\n batch_type = batch.batch_type\n batch_stage = batch_manager.get_stage(batch)\n\n last_service_data = {\n 'id': batch.id,\n 'caption': batch_caption,\n 'type': batch_type,\n 'stage': batch_stage\n }\n\n result['last_service'] = last_service_data\n\n from services.pay.subs_manager import SubscriptionManager\n\n user_subs = SubscriptionManager.get_user_active_subscription(auth_user.id)\n\n result['subscription'] = {\n 'type': user_subs.type,\n 'last_day': user_subs.end_dt.strftime(\"%Y-%m-%dT%H:%M:%S\")\n } if user_subs else None\n\n return result\n" }, { "alpha_fraction": 0.5615113973617554, "alphanum_fraction": 0.5711774826049805, "avg_line_length": 33.48484802246094, "blob_id": "c5ef9ccbaaa0a287c42d135d1486fc5d7dec8229", "content_id": "9aa6526d841d45f2c4c9b519ffcbb3903a0fdc4a", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 1138, "license_type": "no_license", "max_line_length": 122, "num_lines": 33, "path": "/app/fw/async_tasks/send_sms_task.py", "repo_name": "StanislavKraev/jb_code", "src_encoding": "UTF-8", "text": "# -*- coding: utf-8 -*-\nfrom celery import current_app as celery\nfrom flask.templating import render_template\n\n\[email protected](default_retry_delay=15,max_retries=10)\ndef send_sms(number_to, sms_type, **kwargs):\n sms_sender = celery.conf.get('SMS_SENDER')\n\n with celery.conf['flask_app']().app_context():\n text = render_template(\"sms/%s.sms\" % sms_type, **kwargs)\n\n values = {\n 'phones':number_to,\n 'mes':unicode(text),\n 'charset':'utf-8',\n 'fmt' : 3 # json response\n }\n # check sms cost first. do not send if more than 3 rubles\n values_cost = values.copy()\n values_cost['cost'] = 1\n try:\n cost = sms_sender.get_sms_cost(values)\n except Exception, ex:\n raise RuntimeError(\"Bad answer from sms gate: %s\" % unicode(ex))\n\n if cost > 3.0:\n raise RuntimeError(\"SMS cost too big: %s. number: %s, message length: %d\" % (str(cost), number_to, len(text)))\n\n try:\n sms_sender.send(values)\n except Exception, ex:\n raise RuntimeError(\"Error sending sms: %s\" % str(ex))\n" }, { "alpha_fraction": 0.5610803365707397, "alphanum_fraction": 0.5620983839035034, "avg_line_length": 44.556522369384766, "blob_id": "3b2106d26bb6b869aeb1d17c5265f5457bb3f100", "content_id": "503637ddfdfa609e362f2e874bf960e1aae58140", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 31434, "license_type": "no_license", "max_line_length": 193, "num_lines": 690, "path": "/app/fw/async_tasks/rendering.py", "repo_name": "StanislavKraev/jb_code", "src_encoding": "UTF-8", "text": "# -*- coding: utf-8 -*-\nimport json\nimport os\nimport sys\nfrom copy import copy\nfrom datetime import datetime\n\nfrom celery.exceptions import SoftTimeLimitExceeded\nfrom celery import current_app as celery\nfrom celery import current_task\nfrom sqlalchemy.orm import make_transient\n\nfrom fw.api.base_handlers import error_tree_to_list\nfrom fw.async_tasks import send_email\nfrom fw.async_tasks.scheduler import CeleryScheduler\nfrom fw.auth.user_manager import UserManager\nfrom fw.db.sql_base import db as sqldb\nfrom fw.documents.db_fields import DocumentBatchDbObject, BatchDocumentDbObject, DocGroupRenderTaskCheck, \\\n PrivatePersonDbObject, DocumentFilesObject\nfrom fw.documents.doc_requisites_storage import DocRequisitiesStorage\nfrom fw.documents.enums import UserDocumentStatus, BatchStatusEnum, DocumentBatchTypeEnum\nfrom fw.documents.fields.doc_fields import UserDocument, PrivatePerson\nfrom fw.monitoring_utils import zabbix_sender\nfrom fw.storage.file_storage import FileStorage\nfrom fw.storage.models import FileObject\nfrom template_filters import utm_args\n\ncelery.config_from_envvar('CELERY_CONFIG_MODULE')\n\n\nclass BatchTaskFileIdHolder(object):\n def __init__(self, task_id, config, batch):\n self.task_id = task_id\n self.config = config\n self.file_name = os.path.join(os.path.dirname(self.config['celery_tasks_dir']), unicode(self.task_id))\n self.batch = batch\n self.unbind = False\n\n def __enter__(self):\n if not os.path.exists(self.file_name):\n try:\n with open(self.file_name, 'w') as f:\n f.write(str(self.task_id))\n except Exception:\n pass\n return self\n\n def __exit__(self, *args):\n if self.file_name and os.path.exists(self.file_name):\n try:\n os.unlink(self.file_name)\n except Exception:\n pass\n\n if self.unbind:\n return\n self.batch.current_task_id = None\n sqldb.session.commit()\n\n def exists(self):\n return os.path.exists(self.file_name)\n\n\nclass FakeTaskHolder(object):\n def __init__(self):\n pass\n\n def __enter__(self):\n return self\n\n def __exit__(self, exc_type, exc_val, exc_tb):\n pass\n\n def exists(self):\n return True\n\n\n# noinspection PyProtectedMember\ndef render_single_document(db_doc, doc_title, watermark, config, logger, task_id, render_doc_file):\n doc_id = db_doc.id\n\n doc_type = db_doc.document_type\n owner = db_doc._owner\n if not db_doc:\n logger.info(u\"Exit 1\")\n return False\n\n from fw.documents.doc_builder import DocBuilder\n\n if db_doc._celery_task_id is None:\n db_doc._celery_task_started = datetime.utcnow()\n db_doc._celery_task_id = task_id\n sqldb.session.commit()\n elif db_doc._celery_task_id != task_id:\n if db_doc._celery_task_started and abs((datetime.utcnow() - db_doc._celery_task_started).total_seconds()) > 60:\n db_doc._celery_task_started = datetime.utcnow()\n db_doc._celery_task_id = task_id\n sqldb.session.commit()\n else:\n logger.info(u\"Exit 2\")\n return False\n\n user_doc = UserDocument.db_obj_to_field(db_doc)\n try:\n doc_data = user_doc.get_db_object_data()[\"data\"]\n db_doc.status = UserDocumentStatus.DS_RENDERING\n db_doc.tried_to_render = True\n sqldb.session.commit()\n result = DocBuilder.process(doc_data, doc_type, config, owner.id, add_watermark=watermark, render_doc_file=render_doc_file)\n if result:\n current_doc = BatchDocumentDbObject.query.filter_by(id=doc_id).scalar()\n if not current_doc or current_doc._celery_task_id != task_id:\n logger.warn(u\"Failed to set result: user document has been captured by another task\")\n logger.info(u\"Exit 3\")\n return False\n\n for doc_file in db_doc.files:\n old_file_id = doc_file.id\n FileStorage.remove_file(old_file_id, config)\n db_doc.files.remove(doc_file)\n\n file_id = None\n for file_id in result:\n new_doc_file = DocumentFilesObject(doc_id=doc_id, files_id=file_id)\n sqldb.session.add(new_doc_file)\n\n if file_id:\n updated_count = BatchDocumentDbObject.query.filter_by(id=doc_id, _celery_task_id=task_id).update({\n 'status': UserDocumentStatus.DS_RENDERED,\n 'caption': doc_title,\n 'pages_count': FileStorage.get_pdf_file_page_count(file_id, config)\n })\n if not updated_count:\n logger.warn(u\"Failed to set result: user document has been captured by another task 2\")\n logger.info(u\"Exit 4\")\n sqldb.session.rollback()\n return False\n\n sqldb.session.commit()\n logger.info(u\"Exit 5\")\n return True\n else:\n raise Exception(u\"Failed to generate document %s\" % doc_type)\n except Exception, ex:\n logger.exception(u\"Failed to render document %s\" % doc_type)\n db_doc.status = UserDocumentStatus.DS_RENDERING_FAILED\n sqldb.session.commit()\n try:\n error_message = ex.message if isinstance(ex.message, unicode) else unicode(ex.message, errors='ignore')\n logger.exception(error_message)\n except Exception:\n logger.error(u\"Failed to make error\")\n finally:\n try:\n if BatchDocumentDbObject.query.filter_by(id=doc_id, _celery_task_id=task_id).update({\n '_celery_task_id': None,\n '_celery_task_started': None\n }):\n sqldb.session.commit()\n except Exception:\n current_doc = BatchDocumentDbObject.query.filter_by(id=doc_id).scalar()\n logger.error(u\"Failed to reset task id! Document object in db: %s\" %\n json.dumps(current_doc.__dict__, indent=1, default=lambda x: unicode(x), ensure_ascii=False))\n logger.info(u\"Exit 6\")\n return False\n\n\ndef render_batch_raw(batch_db_object_id, render_only_failed_docs=False):\n request = current_task.request\n config = celery.conf.get('config')\n eager = celery.conf['CELERY_ALWAYS_EAGER']\n app = celery.conf['flask_app']()\n logger = app.logger\n\n from fw.documents.batch_manager import BatchManager\n with app.app_context():\n batch_db_object = DocumentBatchDbObject.query.filter_by(id=batch_db_object_id).first()\n if not batch_db_object:\n logger.error(u\"Failed to find batch with id %s in collection %s in db %s\" % (\n batch_db_object_id, DocumentBatchDbObject.COLLECTION_NAME, config['db_name']))\n return False\n\n batch_db_object.current_task_id = request.id\n batch_db_object.batch_rendering_start = datetime.utcnow()\n sqldb.session.commit()\n\n task_holder = BatchTaskFileIdHolder(request.id, config, batch_db_object) if not eager else FakeTaskHolder()\n with task_holder as task_file:\n logger.info(u\"Generating documents for batch %s\" % batch_db_object_id)\n\n try:\n batch_type = batch_db_object.batch_type\n\n batch_manager = BatchManager.init(batch_db_object)\n batch_descriptor = DocRequisitiesStorage.get_batch_descriptor(batch_type)\n doc_types_allowed_to_deferred_rendering = batch_descriptor.get('deferred_render_docs', [])\n\n watermark = None if batch_db_object.paid else \"notpaid_watermark.png\"\n\n has_errors = False\n failed_doc_types = set()\n for doc in batch_db_object._documents:\n # if not task_file.exists():\n # logger.warning(u\"Task with id %s has been revoked\" % request.id)\n # batch_db_object.status = BatchStatusEnum.BS_EDITED\n # sqldb.session.commit()\n # return True\n\n doc_type = doc.document_type\n if render_only_failed_docs and doc.status != UserDocumentStatus.DS_RENDERING_FAILED:\n continue\n\n render_doc_file = batch_type == DocumentBatchTypeEnum.DBT_NEW_LLC and batch_db_object.paid\n if not render_single_document(doc, batch_manager.get_title(doc_type), watermark,\n config, logger, request.id, render_doc_file):\n has_errors = True\n if doc_type not in doc_types_allowed_to_deferred_rendering:\n if not render_only_failed_docs:\n batch_db_object.status = BatchStatusEnum.BS_EDITED\n batch_db_object.error_info = {\n \"error_ext\": [{\n \"field\": \"\",\n \"error_code\": 0,\n \"message\": u\"Failed to render document %s\" % doc_type\n }]\n }\n sqldb.session.commit()\n\n return False\n failed_doc_types.add(doc_type)\n\n if not render_only_failed_docs:\n batch_db_object.finalisation_count += 1\n batch_db_object.status = BatchStatusEnum.BS_FINALISED\n if batch_db_object.batch_rendering_start:\n try:\n dt = datetime.utcnow() - batch_db_object.batch_rendering_start\n fs = dt.total_seconds() + dt.microseconds / 1000000.\n zabbix_sender.send('celery_max_time', fs)\n except Exception:\n pass\n batch_db_object.finalisation_date = datetime.utcnow()\n CeleryScheduler.remove(\"not_finalised_check_and_send%s\" % str(batch_db_object_id))\n try:\n if batch_db_object.batch_rendering_start:\n dt = datetime.utcnow() - batch_db_object.batch_rendering_start\n seconds = dt.total_seconds()\n zabbix_sender.send(\"celery_max_time\", seconds)\n except Exception:\n pass\n sqldb.session.commit()\n\n if has_errors:\n if len(failed_doc_types) == 1 and failed_doc_types.pop() in ('reg_fee_invoice', 'ip_reg_fee_invoice'):\n from services.ifns import ifns_manager\n if ifns_manager.if_gp_pay_working():\n logger.error(u\"Invalid fee invoice render attempt (service.nalog.ru is working). Cancelling!\")\n batch_db_object.status = BatchStatusEnum.BS_EDITED\n sqldb.session.commit()\n return False\n async_result = render_batch.apply_async((batch_db_object_id,), {'render_only_failed_docs': True}, countdown=300)\n if not async_result.ready():\n task_file.unbind = True\n new_task_id = unicode(async_result.id)\n batch_db_object.current_task_id = new_task_id\n batch_db_object.batch_rendering_start = datetime.utcnow()\n logger.debug(u\"Task id: %s\" % new_task_id)\n sqldb.session.commit()\n except Exception:\n logger.exception(u\"Failed to render batch %s\" % batch_db_object_id)\n if not render_only_failed_docs:\n batch_db_object.status = BatchStatusEnum.BS_EDITED\n sqldb.session.commit()\n\n if render_only_failed_docs and batch_db_object.paid:\n user = batch_db_object._owner\n addr_to = user.email or \"\"\n if not addr_to:\n logger.warning(u\"Failed to send email: user %s has no email\" % unicode(user.id))\n else:\n documents = BatchManager.get_shared_links_to_rendered_docs(batch_db_object, config, logger)\n\n if batch_db_object.batch_type == DocumentBatchTypeEnum.DBT_NEW_LLC:\n go_further_url = u\"%s://%s/ooo/?id=%s\" % (\n config['WEB_SCHEMA'], config['DOMAIN'], batch_db_object_id)\n go_further_url = utm_args(go_further_url, 'deferred_docs_ready', user.id) + u\"#page=documents\"\n go_further_url = UserManager.make_auth_url(go_further_url, user).get_url(config)\n\n short_name = batch_db_object.data.get('short_name', \"\")\n send_email.send_email.delay(addr_to, \"deferred_docs_ready\",\n go_further_url=go_further_url,\n short_name=short_name,\n schema=config['WEB_SCHEMA'],\n domain=config['DOMAIN'],\n docs=documents,\n user_id=str(user.id)\n )\n elif batch_db_object.batch_type == DocumentBatchTypeEnum.DBT_NEW_IP:\n go_further_url = u\"%s://%s/ip/?id=%s\" % (\n config['WEB_SCHEMA'], config['DOMAIN'], batch_db_object_id)\n go_further_url = utm_args(go_further_url, 'deferred_ip_docs_ready', user.id) + u\"#page=documents\"\n go_further_url = UserManager.make_auth_url(go_further_url, user).get_url(config)\n short_name = \"\"\n try:\n pp_data = batch_db_object.data.get('person')\n if pp_data and '_id' in pp_data:\n person_db = PrivatePersonDbObject.query.filter_by(id=pp_data['_id']).scalar()\n if person_db:\n person = PrivatePerson.db_obj_to_field(person_db)\n short_name = person.get_short_name()\n except Exception:\n logger.exception(u\"Failed to get batch caption\")\n\n send_email.send_email.delay(addr_to, \"deferred_ip_docs_ready\",\n go_further_url=go_further_url,\n short_name=short_name,\n schema=config['WEB_SCHEMA'],\n domain=config['DOMAIN'],\n docs=documents,\n user_id=str(user.id)\n )\n\n return True\n\n\n# noinspection PyProtectedMember\[email protected]()\ndef render_batch(batch_db_object_id, render_only_failed_docs=False):\n return render_batch_raw(batch_db_object_id, render_only_failed_docs)\n\n\[email protected]()\ndef render_document_preview(document_object_id):\n document_object_id = document_object_id\n config = celery.conf.get('config')\n request = current_task.request\n sys.path.append(os.path.normpath(os.path.abspath(os.path.dirname(__name__))))\n\n logger = celery.log.get_default_logger()\n\n with celery.conf['flask_app']().app_context():\n db_doc = BatchDocumentDbObject.query.filter_by(id=document_object_id).scalar()\n if not db_doc or db_doc.status not in (UserDocumentStatus.DS_NEW,\n UserDocumentStatus.DS_RENDERING_FAILED,\n UserDocumentStatus.DS_RENDERING) \\\n or db_doc._celery_task_id not in (None, request.id):\n return False\n\n from fw.documents.batch_manager import BatchManager\n\n batch_manager = BatchManager.init(db_doc.batch)\n doc_caption = batch_manager.get_batch_caption(db_doc.batch)\n return render_single_document(db_doc, doc_caption, 'preview_watermark.png', config, logger, request.id)\n\n\[email protected]()\ndef render_batch_document(batch_db_object_id, doc_id):\n config = celery.conf.get('config')\n app = celery.conf['flask_app']()\n with app.app_context():\n return render_batch_document_raw(batch_db_object_id, doc_id, config)\n\n\[email protected]()\ndef render_document_plugin(batch_id, event_data):\n doc_id = event_data['doc_id']\n from fw.documents.batch_manager import BatchManager\n config = celery.conf.get('config')\n app = celery.conf['flask_app']()\n logger = celery.log.get_default_logger()\n with app.app_context():\n batch = DocumentBatchDbObject.query.filter_by(id=batch_id).scalar()\n if not batch:\n BatchManager.handle_event(batch_id, \"doc_render_fail\", event_data, logger, config=config)\n batch_group_gen_check_task.delay()\n raise Exception(\"Batch not found: %s\" % batch_id)\n try:\n render_batch_document_raw(batch_id, doc_id, config)\n doc = BatchDocumentDbObject.query.filter_by(id=doc_id).scalar()\n assert(doc)\n event = \"doc_render_success\" if doc.status == UserDocumentStatus.DS_RENDERED else \"doc_render_fail\"\n logger.info(u\"render_document_plugin event %s for document %s\" % (event, doc.id))\n BatchManager.handle_event(batch_id, event, event_data, logger, config=config)\n batch_group_gen_check_task.delay()\n except Exception:\n zabbix_sender.send(\"celery_failures\", 1)\n BatchManager.handle_event(batch_id, \"doc_render_fail\", event_data, logger, config=config)\n batch_group_gen_check_task.delay()\n raise\n\n\[email protected]()\ndef batch_group_gen_check_task():\n from fw.documents.batch_manager import BatchManager\n app = celery.conf['flask_app']()\n logger = celery.log.get_default_logger()\n with app.app_context():\n for batch_check in DocGroupRenderTaskCheck.query.filter_by(check_completed=False):\n logger.info(u\"Checking check %s\" % str(batch_check.id))\n batch = DocumentBatchDbObject.query.filter_by(id=batch_check.batch_id).scalar()\n if not batch:\n BatchManager.handle_event(batch_check.batch_id, \"doc_group_render_fail\", {}, logger, config=app.config)\n raise Exception(\"Batch not found: %s\" % batch_check.batch_id)\n doc_id_list = batch_check.doc_id_list\n all_rendered = True\n logger.info(u\"Checking. Doc id list length: %s\" % str(len(doc_id_list)))\n for doc_id in doc_id_list:\n doc = BatchDocumentDbObject.query.filter_by(id=doc_id).scalar()\n logger.info(u\"Checking doc %s.\" % str(doc_id))\n if not doc or doc.status == UserDocumentStatus.DS_RENDERING_FAILED or \\\n doc._celery_task_id and abs((datetime.utcnow() - doc._celery_task_started).total_seconds()) > 60:\n res = DocGroupRenderTaskCheck.query.filter_by(id=batch_check.id, check_completed=False).update({\n 'check_completed': True\n })\n sqldb.session.commit()\n logger.info(u\"Checking -> checked %s.\" % str(res))\n if res:\n all_rendered = False\n logger.info(u\"Checking: handling doc_group_render_fail for %s\" % str(batch_check.batch_id))\n BatchManager.handle_event(batch_check.batch_id, \"doc_group_render_fail\", batch_check.event_data, logger, config=app.config)\n break\n if doc.status != UserDocumentStatus.DS_RENDERED:\n logger.info(u\"Checking: doc %s is not rendered\" % str(doc.id))\n all_rendered = False\n if all_rendered:\n logger.info(u\"Checking: All rendered\")\n res = DocGroupRenderTaskCheck.query.filter_by(id=batch_check.id, check_completed=False).update({\n 'check_completed': True\n })\n sqldb.session.commit()\n if res:\n BatchManager.handle_event(batch_check.batch_id, \"doc_group_render_success\", batch_check.event_data, logger, config=app.config)\n\n return True\n\n\ndef render_batch_document_raw(batch_db_object_id, doc_id, config):\n start_time = datetime.utcnow()\n request_id = current_task.request.id\n logger = celery.log.get_default_logger()\n logger.info(u\"Starting rendering document %s of %s\" % (doc_id, batch_db_object_id))\n target_document = BatchDocumentDbObject.query.filter_by(id=doc_id).scalar()\n if not target_document:\n logger.warn(u\" - O_o - \")\n return False\n\n if celery.conf['CELERY_ALWAYS_EAGER']:\n request_id = \"test\"\n target_document._celery_task_id = request_id\n target_document._celery_task_started = start_time\n\n logger.info(u\"doc type of %s is %s\" % (doc_id, target_document.document_type))\n if target_document and target_document.status == UserDocumentStatus.DS_RENDERING:\n assert(target_document._celery_task_id)\n assert(target_document._celery_task_started)\n if target_document._celery_task_id != request_id:\n logger.info(u\" - B - \")\n if abs((datetime.utcnow() - target_document._celery_task_started).total_seconds()) < 60:\n logger.info(u\" - C - \")\n logger.info(\n u\"Task for rendering %s of %s is already being run. Exiting...\" % (doc_id, batch_db_object_id))\n return True\n\n result = BatchDocumentDbObject.query.filter_by(id=doc_id, status=UserDocumentStatus.DS_RENDERING).update(\n {\n '_celery_task_id': request_id,\n '_celery_task_started': start_time\n }\n )\n sqldb.session.commit()\n\n if not result:\n logger.error(u\"Failed to change status of document being rendered into RENDERING state. \\n\"\n u\"Probably document is being generated now already 1\")\n return True\n\n logger.info(u\" - F - \")\n batch_db_object = DocumentBatchDbObject.query.filter_by(id=batch_db_object_id).scalar()\n if not batch_db_object:\n logger.error(u\"Failed to find batch with id %s\" % batch_db_object_id)\n return True\n\n from fw.documents.batch_manager import BatchManager\n\n owner_id = batch_db_object._owner_id\n\n logger.info(u\" - I - \")\n if target_document.file:\n file_obj = target_document.file\n target_document.file = None\n FileStorage.remove_file(file_obj.id, config)\n\n BatchDocumentDbObject.query.filter_by(id=target_document.id).update({\n 'file_id': None,\n 'status': UserDocumentStatus.DS_RENDERING,\n 'data': {},\n '_celery_task_id': request_id,\n '_celery_task_started': start_time,\n 'tried_to_render': True\n })\n sqldb.session.commit()\n target_document = BatchDocumentDbObject.query.filter_by(id=target_document.id).scalar()\n logger.info(u\" - J updated %s - \" % target_document.id)\n\n try:\n target_document = UserDocument.db_obj_to_field(target_document)\n _ = batch_db_object.data\n detached_batch = batch_db_object\n sqldb.session.expunge(detached_batch)\n make_transient(detached_batch)\n\n target_document_data = BatchManager.make_document(detached_batch, target_document.document_type.db_value())\n batch_db_object = DocumentBatchDbObject.query.filter_by(id=batch_db_object_id).scalar()\n\n logger.info(u\" - T - \")\n if not target_document_data:\n logger.info(u\" - U - \")\n BatchDocumentDbObject.query.filter_by(id=doc_id, _celery_task_id=request_id).update({\n 'status': UserDocumentStatus.DS_RENDERING_FAILED,\n '_celery_task_id': None,\n '_celery_task_started': None\n })\n sqldb.session.commit()\n return True\n\n target_document.data.value = target_document_data\n target_document.data.initialized = True\n\n doc_data = target_document.get_db_object_data()[\"data\"]\n result = BatchDocumentDbObject.query.filter_by(id=doc_id, _celery_task_id=request_id).update({'data': doc_data})\n sqldb.session.commit()\n\n # batch_descriptor = DocRequisitiesStorage.get_batch_descriptor(batch_db_object.batch_type)\n # validator_condition_schema = batch_descriptor.get('validation_condition', None)\n # from fw.documents.schema.var_construction import VarConstructorFactory\n # validator_condition = VarConstructorFactory.make_constructor(validator_condition_schema) if validator_condition_schema else None\n #\n # validation_type = ValidationTypeEnum.VT_STRICT\n #\n # if validator_condition:\n # context = {\n # '<document>': target_document,\n # '<batch>': batch_db_object\n # }\n # validation_type = validator_condition.build(context)\n #\n # if validation_type != ValidationTypeEnum.VT_NO:\n # target_document.validate(strict=(validation_type == ValidationTypeEnum.VT_STRICT))\n\n target_document.validate(strict=True)\n logger.info(u\" - V - \")\n except Exception, ex:\n logger.info(u\" - W - \")\n logger.exception(u\"Failed to make document %s from batch %s\" % (doc_id, batch_db_object_id))\n ext_data = []\n if getattr(ex, 'ext_data', None):\n ext_data.extend(ex.ext_data)\n if ext_data:\n error_info_ext = error_tree_to_list(ext_data)\n error_info_ext = [{\n 'field': '.'.join(i['field'].split('.')[1:]) if '.' in i['field'] else i['field'],\n 'error_code': i['error_code']\n } for i in error_info_ext]\n if not batch_db_object.error_info or 'error_ext' not in batch_db_object.error_info:\n batch_db_object.error_info = {\n 'error_ext': error_info_ext\n }\n else:\n error_info_fields_set = set([i['field'] for i in batch_db_object.error_info['error_ext']])\n merged_error_info = copy(batch_db_object.error_info)\n for i in error_info_ext:\n if i['field'] not in error_info_fields_set:\n merged_error_info['error_ext'].append({\n 'field': i['field'],\n 'error_code': i['error_code']\n })\n sqldb.session.commit()\n batch_db_object.error_info = merged_error_info\n\n logger.info(u\" - pre YY -> XX: %s - \" % str(doc_id))\n\n result = BatchDocumentDbObject.query.filter_by(id=doc_id, _celery_task_id=request_id).update({\n 'status': UserDocumentStatus.DS_RENDERING_FAILED,\n '_celery_task_id': None,\n '_celery_task_started': None\n })\n sqldb.session.commit()\n if not result:\n logger.warn(u\"Failed to mark rendering document as failed\")\n logger.info(u\" - X - \")\n\n logger.info(u\" - XX - \")\n return True\n\n logger.info(u\" - Y - \")\n\n if not result:\n logger.error(u\"Failed to update document %s in batch %s: suddenly document not found\" % (\n doc_id, batch_db_object_id))\n return True\n\n assert target_document\n assert doc_id\n\n logger.info(u\" - AA doc_id= %s - \" % doc_id)\n try:\n\n try:\n from fw.documents.doc_builder import DocBuilder\n\n result = DocBuilder.process(doc_data, target_document.document_type.db_value(), config, owner_id, add_watermark=None) # TODO: watermark logic for not-paid batches, preview documents\n if result:\n logger.info(u\" - BB - \")\n first_file_id = result[0]\n assert first_file_id\n file_obj = FileObject.query.filter_by(id=first_file_id).first()\n if file_obj:\n logger.info(u\" - BB file_obj = %s - \" % first_file_id)\n BatchDocumentDbObject.query.filter_by(id=doc_id, _celery_task_id=request_id).update({\n 'status': UserDocumentStatus.DS_RENDERED,\n 'pages_count': FileStorage.get_pdf_file_page_count(first_file_id, config)\n })\n DocumentFilesObject.query.filter_by(doc_id=doc_id).delete()\n\n for file_id in result:\n new_doc_file = DocumentFilesObject(\n doc_id=doc_id,\n files_id=file_id\n )\n sqldb.session.add(new_doc_file)\n else:\n logger.info(u\" - CC - \")\n raise Exception(\"Failed to generate document %s\" % doc_id)\n except Exception, ex:\n logger.info(u\" - DD - \")\n logger.exception(u\"Failed to render document %s\" % doc_id)\n\n result = BatchDocumentDbObject.query.filter_by(id=doc_id,\n _celery_task_id=request_id).update(\n {\n 'status': UserDocumentStatus.DS_RENDERING_FAILED,\n '_celery_task_id': None,\n '_celery_task_started': None\n })\n if not result:\n logger.warn(u\"Failed to mark document %s as failed to render\" % doc_id)\n sqldb.session.commit()\n\n return True\n\n BatchDocumentDbObject.query.filter_by(id=doc_id,\n _celery_task_id=request_id).update(\n {\n 'status': UserDocumentStatus.DS_RENDERED,\n '_celery_task_id': None,\n '_celery_task_started': None\n }\n )\n sqldb.session.commit()\n logger.info(u\" - EE - \")\n except SoftTimeLimitExceeded:\n logger.info(u\" - FF - \")\n logger.exception(u\"Did not have time to complete rendering. Exiting\")\n result = BatchDocumentDbObject.query.filter_by(id=doc_id,\n _celery_task_id=request_id).update(\n {\n 'status': UserDocumentStatus.DS_RENDERING_FAILED,\n '_celery_task_id': None,\n '_celery_task_started': None\n\n })\n sqldb.session.commit()\n if not result:\n logger.warn(u\"Failed to mark document %s as failed\" % doc_id)\n return True\n logger.info(u\" - GG - \")\n return True\n\[email protected]()\ndef touch_batch_plugin(batch_id, event_data):\n from fw.documents.batch_manager import BatchManager\n config = celery.conf.get('config')\n app = celery.conf['flask_app']()\n logger = celery.log.get_default_logger()\n with app.app_context():\n batch = DocumentBatchDbObject.query.filter_by(id=batch_id).scalar()\n if not batch:\n raise Exception(\"Batch not found: %s\" % batch_id)\n BatchManager.handle_event(batch_id, \"batch_manager.touch\", event_data, logger, config=config)\n return True\n" }, { "alpha_fraction": 0.699313759803772, "alphanum_fraction": 0.699937641620636, "avg_line_length": 29.245283126831055, "blob_id": "2f05e11eee7aff5b8510344a9855832c900aa5a5", "content_id": "1b0913b455edaf7790ef70f5bdffb2cf4ae71047", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 1603, "license_type": "no_license", "max_line_length": 72, "num_lines": 53, "path": "/app/fw/catalogs/models.py", "repo_name": "StanislavKraev/jb_code", "src_encoding": "UTF-8", "text": "# -*- coding: utf-8 -*-\n\nfrom sqlalchemy import Column, Unicode, Integer, String, BigInteger\nfrom sqlalchemy.dialects.postgresql import JSONB\nfrom fw.db.sql_base import db as sqldb\n\n\nclass BikCatalog(sqldb.Model):\n __tablename__ = 'bik_catalog'\n\n id = Column(String, primary_key=True)\n name = Column(Unicode, nullable=False)\n okpo = Column(Unicode, nullable=False)\n bik = Column(Unicode, nullable=False, index=True)\n phone = Column(Unicode, nullable=False)\n address = Column(Unicode, nullable=False)\n kor_account = Column(Unicode, nullable=False)\n\n\nclass OkvadObject(sqldb.Model):\n __tablename__ = \"okvad\"\n\n id = Column(String, primary_key=True)\n caption = Column(Unicode, nullable=False)\n okved = Column(Unicode, nullable=False, index=True, unique=True)\n nalog = Column(Unicode, nullable=False)\n parent = Column(Unicode, nullable=True)\n\n\nclass OkvedCatalogObject(sqldb.Model):\n __tablename__ = \"okved_catalog\"\n\n id = Column(String, primary_key=True)\n name = Column(Unicode, nullable=False)\n departments = Column(JSONB)\n\n\nclass GeoCities(sqldb.Model):\n __tablename__ = \"geo_cities\"\n\n name = Column(Unicode, nullable=False)\n cid = Column(Integer, nullable=False, unique=True, primary_key=True)\n region = Column(Unicode, nullable=False)\n lat = Column(Unicode, nullable=False)\n lng = Column(Unicode, nullable=False)\n\n\nclass GeoRanges(sqldb.Model):\n __tablename__ = \"geo_ranges\"\n\n cid = Column(Integer, nullable=False, primary_key=True)\n start = Column(BigInteger, nullable=False)\n end = Column(BigInteger, nullable=False)\n" }, { "alpha_fraction": 0.6386274099349976, "alphanum_fraction": 0.6404494643211365, "avg_line_length": 48.149253845214844, "blob_id": "ddaafa8441b18b337972cc809f8f07990b751520", "content_id": "73b87e884dac506208c49e4835d78170d4fdda7d", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 3293, "license_type": "no_license", "max_line_length": 131, "num_lines": 67, "path": "/app/services/car_assurance/async_tasks.py", "repo_name": "StanislavKraev/jb_code", "src_encoding": "UTF-8", "text": "# -*- coding: utf-8 -*-\nfrom datetime import datetime\nimport json\n\nfrom celery.exceptions import SoftTimeLimitExceeded\nfrom celery import current_app as celery\nimport requests\n\nfrom fw.documents.db_fields import DocumentBatchDbObject\nfrom services.car_assurance.db_models import CarAssurance\n\ncelery.config_from_envvar('CELERY_CONFIG_MODULE')\n\[email protected]()\ndef get_policy_info_async(policy_series, policy_number, event_data, batch_id, async=True, logger = None):\n from fw.documents.batch_manager import BatchManager\n app = celery.conf['flask_app']()\n logger = logger or celery.log.get_default_logger()\n if not policy_number or not policy_series:\n return\n with app.app_context():\n batch = DocumentBatchDbObject.query.filter_by(id=batch_id, deleted=False).scalar()\n try:\n try:\n result_data = app.external_tools.check_car_policy(policy_series, policy_number, timeout=2.0 if not async else 20.0)\n except requests.exceptions.RequestException, ex:\n BatchManager.handle_event(batch_id, \"on_policy_info_receive_timeout\", event_data, logger, config=app.config)\n return False\n\n if not result_data:\n BatchManager.handle_event(batch_id, \"on_policy_info_receive_fail\", event_data, logger, config=app.config)\n return False\n\n logger.info(u\"get policy info returned: %s\" % json.dumps(result_data, default=lambda x: unicode(x)))\n insurance_name = result_data.get('insCompanyName', u\"\")\n if not insurance_name:\n raise Exception(u\"Failed to get insurance: empty name returned\")\n\n insurance = CarAssurance.query.filter_by(connection_name=insurance_name).first()\n if not insurance:\n raise Exception(u\"Failed to get insurance from db by name: %s\" % insurance_name)\n insurance_id = insurance.id\n\n policy_date = result_data['policyBeginDate'] or None\n policy_date_str = \"\"\n if policy_date:\n policy_date_dt = datetime.strptime(policy_date, \"%d.%m.%Y\")\n policy_date_str = policy_date_dt.strftime(\"%Y-%m-%d\")\n returned_policy_series = result_data['bsoSeries']\n returned_policy_number = result_data['bsoNumber']\n if not policy_date_str or not returned_policy_number or not returned_policy_series:\n BatchManager.handle_event(batch_id, \"on_policy_info_receive_fail\", event_data, logger, config=app.config)\n return False\n\n event_data = {\n 'policy_series': result_data['bsoSeries'],\n 'policy_number': result_data['bsoNumber'],\n 'insurance_id': insurance_id,\n 'insurance_name': insurance_name,\n 'policy_date': policy_date_str\n }\n BatchManager.handle_event(batch_id, \"on_policy_info_received\", event_data, logger, config=app.config)\n except SoftTimeLimitExceeded:\n BatchManager.handle_event(batch_id, \"on_policy_info_receive_fail\", event_data, logger, config=app.config)\n except Exception:\n BatchManager.handle_event(batch_id, \"on_policy_info_receive_fail\", event_data, logger, config=app.config)\n raise\n" }, { "alpha_fraction": 0.7038167715072632, "alphanum_fraction": 0.70534348487854, "avg_line_length": 33.47368240356445, "blob_id": "3a3d62460de30a87abd99e718b7b4429f70f4d5a", "content_id": "5fa7324b6afcdea0aa84b1bb062991a05d970ba9", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 655, "license_type": "no_license", "max_line_length": 107, "num_lines": 19, "path": "/app/deployment_migrations/migration_list/20151014_migrate_doc_files.py", "repo_name": "StanislavKraev/jb_code", "src_encoding": "UTF-8", "text": "# -*- coding: utf-8 -*-\n\nfrom fw.db.sql_base import db as sqldb\nfrom fw.documents.db_fields import BatchDocumentDbObject, DocumentFilesObject\n\n\ndef forward(config, logger):\n logger.debug(u\"Migrate document files\")\n\n for doc in BatchDocumentDbObject.query.filter(BatchDocumentDbObject.file_id != None):\n existing_mapping = DocumentFilesObject.query.filter_by(doc_id=doc.id, files_id=doc.file_id).first()\n if existing_mapping:\n continue\n new_mapping = DocumentFilesObject(doc_id=doc.id, files_id=doc.file_id)\n sqldb.session.add(new_mapping)\n sqldb.session.commit()\n\ndef rollback(config, logger):\n pass\n" }, { "alpha_fraction": 0.6731380820274353, "alphanum_fraction": 0.7031108140945435, "avg_line_length": 22.869918823242188, "blob_id": "6fb22d88af6ffb51ddf8136d7a5cf056076992b8", "content_id": "8b60f46028a4273e034e7d5ffa1802a3bed55acf", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 10200, "license_type": "no_license", "max_line_length": 101, "num_lines": 369, "path": "/app/fw/api/errors.py", "repo_name": "StanislavKraev/jb_code", "src_encoding": "UTF-8", "text": "# -*- coding: utf-8 -*-\n\n\nclass ApiBaseError(Exception):\n\n def __init__(self, *args, **kwargs):\n super(ApiBaseError, self).__init__()\n self.ext_data = []\n\n @classmethod\n def get_error_code(cls):\n return cls.ERROR_CODE\n\n def get_error_message(self):\n return self.ERROR_MESSAGE\n\n @classmethod\n def get_http_error_code(cls):\n return cls.HTTP_ERROR_CODE\n\n\nclass BadRequestError(ApiBaseError):\n\n ERROR_CODE = 1\n HTTP_ERROR_CODE = 400\n ERROR_MESSAGE = u\"Неверный запрос\"\n\n\nclass HttpNotSupportedError(ApiBaseError):\n\n ERROR_CODE = 2\n HTTP_ERROR_CODE = 400\n ERROR_MESSAGE = u\"Метод доступен только через защищенное соединение\"\n\n\nclass BadRequestFormatError(ApiBaseError):\n\n ERROR_CODE = 3\n HTTP_ERROR_CODE = 400\n ERROR_MESSAGE = u\"Неправильный формат входных данных %s\"\n\n def __init__(self, parameter_name):\n super(BadRequestFormatError, self).__init__()\n self.parameter_name = parameter_name\n\n def get_error_message(self):\n return self.ERROR_MESSAGE % self.parameter_name\n\n\nclass MissingRequiredParameter(ApiBaseError):\n\n ERROR_CODE = 4\n HTTP_ERROR_CODE = 400\n ERROR_MESSAGE = u\"Пропущен обязательный параметр %s\"\n\n def __init__(self, parameter_name):\n super(MissingRequiredParameter, self).__init__()\n self.parameter_name = parameter_name\n self.ext_data = []\n\n def get_error_message(self):\n return self.ERROR_MESSAGE % self.parameter_name\n\n\nclass InvalidParameterValue(ApiBaseError):\n\n ERROR_CODE = 5\n HTTP_ERROR_CODE = 400\n ERROR_MESSAGE = u\"Недопустимое значение параметра %s\"\n\n def __init__(self, parameter_name):\n super(InvalidParameterValue, self).__init__()\n self.parameter_name = parameter_name\n self.ext_data = []\n\n def get_error_message(self):\n return self.ERROR_MESSAGE % self.parameter_name\n\n\nclass ParameterLengthTooLong(ApiBaseError):\n\n ERROR_CODE = 6\n HTTP_ERROR_CODE = 400\n ERROR_MESSAGE = u\"Длина %s превышает допустимое значение\"\n\n def __init__(self, parameter_name):\n super(ParameterLengthTooLong, self).__init__()\n self.parameter_name = parameter_name\n\n def get_error_message(self):\n return self.ERROR_MESSAGE % self.parameter_name\n\nclass FileToLarge(ApiBaseError):\n\n ERROR_CODE = 10\n HTTP_ERROR_CODE = 400\n ERROR_MESSAGE = u\"Превышен допустимый размер файла\"\n\nclass NotAuthorized(ApiBaseError):\n\n ERROR_CODE = 100\n HTTP_ERROR_CODE = 403\n ERROR_MESSAGE = u\"Пользователь не авторизован\"\n\n\nclass InvalidLoginOrPassword(ApiBaseError):\n\n ERROR_CODE = 102\n HTTP_ERROR_CODE = 200\n ERROR_MESSAGE = u\"Неверная пара логин/пароль\"\n\n\nclass SocialAuthError(ApiBaseError):\n\n ERROR_CODE = 104\n HTTP_ERROR_CODE = 200\n ERROR_MESSAGE = u\"Ошибка авторизации через соцсеть\"\n\n\nclass UserNotFound(ApiBaseError):\n\n ERROR_CODE = 105\n HTTP_ERROR_CODE = 200\n ERROR_MESSAGE = u\"Пользователь не найден\"\n\n\nclass ActivationAttemptsCountExceeded(ApiBaseError):\n\n ERROR_CODE = 106\n HTTP_ERROR_CODE = 200\n ERROR_MESSAGE = u\"Превышено количество попыток использования кода подтверждения\"\n\n\nclass RecoveryAttemptsCountExceeded(ApiBaseError):\n\n ERROR_CODE = 107\n HTTP_ERROR_CODE = 200\n ERROR_MESSAGE = u\"Превышено количество попыток восстановления пароля\"\n\n\nclass ActivationCodeExpiredOrInvalid(ApiBaseError):\n\n ERROR_CODE = 108\n HTTP_ERROR_CODE = 200\n ERROR_MESSAGE = u\"Истек период действия кода подтверждения или код неверен\"\n\n\nclass InvalidPassword(ApiBaseError):\n\n ERROR_CODE = 109\n HTTP_ERROR_CODE = 200\n ERROR_MESSAGE = u\"Пароль должен быть от 6 до 32 символов\"\n\n\nclass DuplicateEmail(ApiBaseError):\n\n ERROR_CODE = 110\n HTTP_ERROR_CODE = 200\n ERROR_MESSAGE = u\"Указанный email уже занят\"\n\n\nclass DuplicateMobile(ApiBaseError):\n\n ERROR_CODE = 111\n HTTP_ERROR_CODE = 200\n ERROR_MESSAGE = u\"Указанный телефон уже занят\"\n\n\nclass EmailIsNotConfirmed(ApiBaseError):\n\n ERROR_CODE = 112\n HTTP_ERROR_CODE = 200\n ERROR_MESSAGE = u\"Адрес электронной почты не подтвержден\"\n\n\nclass MobileIsNotConfirmed(ApiBaseError):\n\n ERROR_CODE = 113\n HTTP_ERROR_CODE = 200\n ERROR_MESSAGE = u\"Телефонный номер не подтвержден\"\n\n\nclass InvalidCurrentPassword(ApiBaseError):\n\n ERROR_CODE = 114\n HTTP_ERROR_CODE = 200\n ERROR_MESSAGE = u\"Текущий пароль указан неверно\"\n \n\nclass RenewAuthTokenError(ApiBaseError):\n\n ERROR_CODE = 120\n HTTP_ERROR_CODE = 200\n ERROR_MESSAGE = u\"Access token устарел. Необходимо получить новый.\"\n\nclass NoPushTokenError(ApiBaseError):\n\n ERROR_CODE = 121\n HTTP_ERROR_CODE = 200\n ERROR_MESSAGE = u\"Не указан push-токен для push уведомлений\"\n\n\nclass SocialServiceRedirect(Exception):\n def __init__(self, url, *args, **kwargs):\n super(SocialServiceRedirect, self).__init__(*args, **kwargs)\n self.url = url\n\n\nclass BatchNotFound(ApiBaseError):\n\n ERROR_CODE = 200\n HTTP_ERROR_CODE = 404\n ERROR_MESSAGE = u\"Пакет документов не найден\"\n\n\nclass DocumentNotFound(ApiBaseError):\n\n ERROR_CODE = 201\n HTTP_ERROR_CODE = 404\n ERROR_MESSAGE = u\"Документ не найден\"\n\n\nclass DocumentBatchFinalizedError(ApiBaseError):\n\n ERROR_CODE = 202\n HTTP_ERROR_CODE = 200\n ERROR_MESSAGE = u\"Невозможно изменить документ в финализированном пакете\"\n\n\nclass DocumentBatchFinalizationError(ApiBaseError):\n\n ERROR_CODE = 203\n HTTP_ERROR_CODE = 200\n ERROR_MESSAGE = u\"Пакет документов не может быть финализирован\"\n\nclass DocumentBatchDefinalizationError(ApiBaseError):\n\n ERROR_CODE = 204\n HTTP_ERROR_CODE = 200\n ERROR_MESSAGE = u\"Пакет документов не может быть дефинализирован\"\n\n\nclass DocumentBatchUpdateError(ApiBaseError):\n\n ERROR_CODE = 205\n HTTP_ERROR_CODE = 200\n ERROR_MESSAGE = u\"Пакет документов не может быть обновлен\"\n\n\nclass DuplicatedStoredObjectsChangesFoundException(ApiBaseError):\n\n ERROR_CODE = 206\n HTTP_ERROR_CODE = 200\n ERROR_MESSAGE = u\"Хранимые сущности изменились. Требуется передать флаг force для дефинализации\"\n\n\nclass EntityNotFound(ApiBaseError):\n\n ERROR_CODE = 207\n HTTP_ERROR_CODE = 404\n ERROR_MESSAGE = u\"Хранимая сущность не найдена\"\n\nclass NotariusBookingNotFound(ApiBaseError):\n\n ERROR_CODE = 208\n HTTP_ERROR_CODE = 404\n ERROR_MESSAGE = u\"Запись к нотариусу не найдена\"\n\nclass IfnsBookingNotFound(ApiBaseError):\n\n ERROR_CODE = 209\n HTTP_ERROR_CODE = 404\n ERROR_MESSAGE = u\"Запись в налоговую не найдена\"\n\nclass PaidBatchUpdateError(ApiBaseError):\n\n ERROR_CODE = 211\n HTTP_ERROR_CODE = 403\n ERROR_MESSAGE = u\"Данное изменение не доступно для оплаченного пакета документов\"\n\nclass NotariusNotFound(ApiBaseError):\n ERROR_CODE = 300\n HTTP_ERROR_CODE = 404\n ERROR_MESSAGE = u\"Данный нотариус не найден\"\n\n\nclass IfnsNotFound(ApiBaseError):\n ERROR_CODE = 301\n HTTP_ERROR_CODE = 404\n ERROR_MESSAGE = u\"Данная налоговая не найдена\"\n\n\nclass InvalidCodeProvided(ApiBaseError):\n ERROR_CODE = 302\n HTTP_ERROR_CODE = 403\n ERROR_MESSAGE = u\"Указан неверный код\"\n\n\nclass MobileNotSet(ApiBaseError):\n ERROR_CODE = 303\n HTTP_ERROR_CODE = 403\n ERROR_MESSAGE = u\"Мобильный телефон не задан\"\n\n\nclass MobileNotConfirmed(ApiBaseError):\n ERROR_CODE = 304\n HTTP_ERROR_CODE = 403\n ERROR_MESSAGE = u\"Мобильный телефон не подтвержден\"\n\n\nclass MaximumRegistrationsExceeded(ApiBaseError):\n ERROR_CODE = 305\n HTTP_ERROR_CODE = 200\n ERROR_MESSAGE = u\"Превышено количество регистраций в данный промежуток времени. Попробуйте позже\"\n\n\nclass DuplicateBookingAtTheSameTime(ApiBaseError):\n ERROR_CODE = 306\n HTTP_ERROR_CODE = 200\n ERROR_MESSAGE = u\"Вы уже записались на данную услугу на то же время\"\n\n\nclass DayBusyOrHolliday(ApiBaseError):\n\n ERROR_CODE = 307\n HTTP_ERROR_CODE = 200\n\n def __init__(self, dt, *args, **kwargs):\n self.dt = dt\n self.ERROR_MESSAGE = u\"День полностью занят или выходной (%s)\" % self.dt.strftime(\"%Y-%m-%d\")\n super(DayBusyOrHolliday, self).__init__(*args, **kwargs)\n\nclass IfnsSessionExpired(ApiBaseError):\n ERROR_CODE = 308\n HTTP_ERROR_CODE = 200\n ERROR_MESSAGE = u\"Сессия с сервером ИФНС устарела\"\n\n\nclass IfnsServiceUnavailable(ApiBaseError):\n ERROR_CODE = 309\n HTTP_ERROR_CODE = 200\n ERROR_MESSAGE = u\"Сервер ИФНС недоступен\"\n\n\nclass ServerUnavailable(ApiBaseError):\n\n HTTP_ERROR_CODE = 503\n ERROR_MESSAGE = u\"Server temporarily unavailable. Please try again later.\"\n\n\nclass ServerError(ApiBaseError):\n\n HTTP_ERROR_CODE = 500\n ERROR_MESSAGE = u\"Server error.\"\n ERROR_CODE = 500\n\n\nclass SkipException(Exception):\n pass\n\n\nclass PostTrackingItemNotFound(ApiBaseError):\n ERROR_CODE = 501\n HTTP_ERROR_CODE = 404\n ERROR_MESSAGE = u\"Почтовое отправление не найдено\"\n\n\nclass DuplicateIdError(Exception):\n pass\n" }, { "alpha_fraction": 0.7224669456481934, "alphanum_fraction": 0.7268722653388977, "avg_line_length": 27.375, "blob_id": "ff8b56274a70b294eae6aff3e4ad5df4a3ce6a30", "content_id": "d092123b67c050915358130ad915adb5d35a4738", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 227, "license_type": "no_license", "max_line_length": 55, "num_lines": 8, "path": "/app/fw/auth/__init__.py", "repo_name": "StanislavKraev/jb_code", "src_encoding": "UTF-8", "text": "# -*- coding: utf-8 -*-\nfrom flask.globals import current_app\nfrom fw.auth.encrypt import check_password\n\n\ndef load_user(user_id):\n from fw.auth.models import AuthUser\n return AuthUser.query.filter_by(id=user_id).first()\n" }, { "alpha_fraction": 0.5729597210884094, "alphanum_fraction": 0.5768336653709412, "avg_line_length": 42.505619049072266, "blob_id": "e3564ca3983e3e92acd12e32e7bbc4b2615a89a1", "content_id": "5ba582ecf33ec5ad799cef6c3f177250dd62d3c8", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 7744, "license_type": "no_license", "max_line_length": 139, "num_lines": 178, "path": "/app/fw/async_tasks/send_email.py", "repo_name": "StanislavKraev/jb_code", "src_encoding": "UTF-8", "text": "# -*- coding: utf-8 -*-\nfrom datetime import datetime\nimport smtplib\n\nfrom email.mime.multipart import MIMEMultipart\nfrom email.mime.text import MIMEText\n#noinspection PyUnresolvedReferences\nfrom email.MIMEBase import MIMEBase\nfrom email.header import Header\nfrom email import Encoders\nfrom flask.templating import render_template\nfrom celery import current_app as celery\nfrom flask.globals import current_app\nfrom fw.async_tasks import core_tasks\nfrom fw.transport.mail import fix_email_addr\nfrom services.partners.models import BankPartnersObject, BankPartnerRequestObject\n\ncelery.config_from_envvar('CELERY_CONFIG_MODULE')\n\ndef _send_email_raw(addr_to, email_type, subject=\"\", addr_from=\"\", attach=None, reply_to = None, **tmpl_data):\n mailer = celery.conf.get('MAILER')\n\n config = celery.conf.get('config')\n with celery.conf['flask_app']().app_context():\n html_text = tmpl_data.get('html_text', None)\n if html_text is None:\n html_text = render_template('email/%s.html' % email_type, **tmpl_data)\n\n plain_text = tmpl_data.get('plain_text', None)\n if plain_text is None:\n plain_text = render_template('email/%s.text' % email_type, **tmpl_data)\n\n subject_text = subject or tmpl_data.get('subject_text', None)\n if not subject_text:\n subject_text = render_template('email/%s.subject' % email_type, **tmpl_data)\n\n reply_to = fix_email_addr(reply_to or config['mailer_reply_to'])\n if reply_to:\n addr_from = reply_to\n else:\n addr_from = fix_email_addr(addr_from or config['mailer_smtp_user'])\n addr_to = fix_email_addr(addr_to)\n message_id = smtplib.email.Utils.make_msgid()\n\n msg = MIMEMultipart()\n msg['To'] = addr_to\n msg['From'] = addr_from\n msg['Date'] = smtplib.email.Utils.formatdate(localtime = 1)\n msg['Subject'] = Header(subject_text, 'utf-8')\n msg['Message-ID'] = message_id\n msg.add_header('reply-to', reply_to)\n\n msg_internal = MIMEMultipart('alternative')\n\n # Record the MIME types of both parts - text/plain and text/html.\n part1 = MIMEText(plain_text, 'plain', 'utf-8')\n part2 = MIMEText(html_text, 'html', 'utf-8')\n # Attach parts into message container.\n # According to RFC 2046, the last part of a multipart message, in this case\n # the HTML message, is best and preferred.\n msg_internal.attach(part1)\n msg_internal.attach(part2)\n msg.attach(msg_internal)\n # attach attachments\n logger = current_app.logger\n if attach:\n if not isinstance(attach, list):\n attach = [attach,]\n for attachment in attach:\n if isinstance(attachment, dict):\n file_name = attachment['file_name']\n file_path = attachment['file_path']\n else:\n file_name = None\n file_path = attachment\n\n ext = file_path.split('.')[-1]\n # define application type\n if ext=='pdf':\n attachFile = MIMEBase('application', 'pdf')\n elif ext=='doc' or ext=='docx':\n attachFile = MIMEBase('application', 'msword')\n elif ext=='jpg' or ext=='jpeg':\n attachFile = MIMEBase('image', 'jpeg')\n elif ext=='png':\n attachFile = MIMEBase('image', 'png')\n else:\n attachFile = MIMEBase('application', 'octet-stream')\n # get file\n full_path = file_path\n # load the data into attachment object\n att_file = open(full_path, 'rb')\n attachFile.set_payload(att_file.read())\n att_file.close()\n Encoders.encode_base64(attachFile)\n file_name = file_name or file_path.split('/')[-1]\n try:\n att_header = Header(file_name.encode('utf-8'), 'utf-8')\n attachFile.add_header('Content-Disposition', 'attachment; filename=\"%s\"' % att_header)\n except Exception:\n logger.exception(u\"Failed to add utf-8 header\")\n attachFile.add_header('Content-Disposition', 'attachment', file_name)\n # attach it to message\n msg.attach(attachFile)\n # email params\n\n logger.info(u\"Sending %s email to %s. ID:%s\" % (email_type, addr_to, message_id))\n mailer.send_email(addr_from, addr_to, msg.as_string())\n\n return message_id\n\[email protected](default_retry_delay=60, max_retries=5)\ndef send_email(addr_to, email_type, subject=\"\", addr_from=\"\", attach=None, reply_to = None, **tmpl_data):\n return _send_email_raw(addr_to, email_type, subject=subject, addr_from=addr_from, attach=attach, reply_to = reply_to, **tmpl_data)\n\n\[email protected]()\ndef send_email_plugin(batch_id, event_data, addr, email_type, max_retries=0, retry_timeout_seconds=None, silent=False, template_data=None):\n template_data = template_data or {}\n try:\n _send_email_raw(addr, email_type, **template_data)\n if not silent:\n core_tasks.send.delay(batch_id, 'emailer.mail_sent', event_data)\n except Exception, ex:\n try:\n timeout = 0\n if retry_timeout_seconds is not None:\n if isinstance(retry_timeout_seconds, list):\n timeout = retry_timeout_seconds[0]\n if len(retry_timeout_seconds) > 1:\n retry_timeout_seconds = retry_timeout_seconds[1:]\n else:\n timeout = retry_timeout_seconds\n if max_retries > 0:\n send_email_plugin.apply_async(args=(batch_id, event_data, addr, email_type), kwargs={\n 'max_retries': max_retries - 1,\n 'retry_timeout_seconds': retry_timeout_seconds,\n 'silent': silent,\n 'template_data': template_data}, countdown=timeout)\n return False\n else:\n if not silent:\n core_tasks.send.delay(batch_id, 'emailer.send_fail', event_data)\n except Exception, ex:\n if not silent:\n core_tasks.send.delay(batch_id, 'emailer.send_fail', event_data)\n raise\n\[email protected]()\ndef send_email_to_partner_and_set_result(addr_to, email_type, batch_id, bank_id, bank_contact_phone_general_manager,\n bank_contact_phone, send_private_data, **tmpl_data):\n with celery.conf['flask_app']().app_context():\n partner = BankPartnersObject.query.filter_by(id=bank_id).first()\n req = BankPartnerRequestObject.query.filter_by(bank_partner_id=partner.id, batch_id=batch_id).first()\n if not req:\n return False\n\n status = \"success\"\n try:\n if isinstance(addr_to, list):\n for addr in addr_to:\n send_email(addr, email_type, **tmpl_data)\n else:\n send_email(addr_to, email_type, **tmpl_data)\n except Exception, ex:\n logger = current_app.logger\n logger.exception(u\"Failed to send email\")\n status = \"failed\"\n\n BankPartnerRequestObject.query.filter_by(id=req.id, status='sending').update({\n 'status': status,\n 'sent_date': datetime.utcnow(),\n 'bank_contact_phone_general_manager': bank_contact_phone_general_manager,\n 'bank_contact_phone': bank_contact_phone,\n 'send_private_data': send_private_data\n })\n\n return status == \"success\"\n" }, { "alpha_fraction": 0.591160237789154, "alphanum_fraction": 0.5935280323028564, "avg_line_length": 41.233333587646484, "blob_id": "0477bedfb247fdf7c6144113af89d6bd8c69b8ce", "content_id": "af4eb0262730dbebaa48f2dfd2402331777682ab", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 2534, "license_type": "no_license", "max_line_length": 117, "num_lines": 60, "path": "/app/deployment_migrations/migrations.py", "repo_name": "StanislavKraev/jb_code", "src_encoding": "UTF-8", "text": "# -*- coding: utf-8 -*-\n\nimport os\nimport sys\nfrom fw.db.sql_base import db as sqldb\nfrom deployment_migrations.data import Migration\nfrom deployment_migrations.models import MigrationState\n\n\nclass MigrationManager(object):\n @staticmethod\n def migrate_to(config, logger, version=None):\n sqldb.engine.execute(u\"create table if not exists migration_state (id SERIAL NOT NULL, value VARCHAR NULL);\")\n cur_state = MigrationState.query.filter_by(id=1).scalar()\n assert cur_state\n current_version = cur_state.value\n\n migration_list = MigrationManager.load_migration_list(config)\n migration_list = sorted(migration_list, key=lambda x: unicode(x))\n\n for migration in migration_list:\n if current_version < migration.version:\n # noinspection PyBroadException\n try:\n migration.run(config, logger)\n except Exception:\n logger.exception(u\"Failed to run migration %s. Break.\" % migration.version)\n return\n\n # noinspection PyBroadException\n try:\n MigrationState.query.filter_by(id=1).update({\n 'value': unicode(migration.version)\n })\n sqldb.session.commit()\n current_version = migration.version\n except Exception:\n logger.exception(\n u\"Failed to set current version after successfull migration %s. Break.\" % migration.version)\n return\n return current_version\n\n @staticmethod\n def load_migration_list(config):\n migrations = []\n migrations_list_dir = config.get('MIGRATION_LIST_DIR', os.path.normpath(\n os.path.abspath(os.path.join(os.path.dirname(__file__), 'migration_list'))))\n sys.path.append(migrations_list_dir)\n for file_name in os.listdir(migrations_list_dir):\n if not os.path.splitext(file_name)[1] == '.py' or not file_name[0].isdigit():\n continue\n module_name = os.path.splitext(file_name)[0]\n module = __import__(module_name)\n forward_migration = getattr(module, 'forward')\n rollback_migration = getattr(module, 'rollback')\n is_skip = getattr(module, 'skip', False)\n if is_skip:\n continue\n migrations.append(Migration(module_name, forward_migration, rollback_migration))\n return migrations\n" }, { "alpha_fraction": 0.6111580729484558, "alphanum_fraction": 0.6162827610969543, "avg_line_length": 32.62166976928711, "blob_id": "1e60b00a941040d4306ded052a0235277114e3d3", "content_id": "1585995f4f3b180af2919fc8e751e90a2e0bae83", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 19126, "license_type": "no_license", "max_line_length": 162, "num_lines": 563, "path": "/app/fw/api/args_validators.py", "repo_name": "StanislavKraev/jb_code", "src_encoding": "UTF-8", "text": "# -*- coding: utf-8 -*-\nfrom datetime import datetime\nfrom functools import wraps\nfrom bson.objectid import ObjectId\nimport flask\nfrom flask import current_app, request\nimport re\nfrom unidecode import unidecode\nfrom fw.api import errors\nfrom fw.auth.models import ConfirmationLinkTypeEnum\nfrom fw.auth.social_services.social_models import SocialServiceEnum\n\n\nclass ArgumentValidator(object):\n def __init__(self, required=True, default_value=None, min_length=None, max_length=None, raise_exception=None):\n self.required = required\n self.default_value = default_value\n self.min_length = min_length\n self.max_length = max_length\n self.application = None\n self.raise_exception = raise_exception\n if self.default_value and not self._validate(self.default_value):\n raise ValueError(default_value)\n\n def validation_result(self, result):\n if not result and self.raise_exception:\n raise self.raise_exception()\n return result\n\n def validate(self, argument):\n return self._validate(argument)\n\n def _validate(self, argument):\n if self.required and argument is None:\n return self.validation_result(False)\n\n if argument is None:\n return self.validation_result(False)\n\n if self.min_length and len(argument) < self.min_length:\n return self.validation_result(False)\n\n if self.max_length and len(argument) > self.max_length:\n return self.validation_result(False)\n\n return self.validation_result(True)\n\n def get_value(self, argument):\n if argument is None:\n return self.default_value\n return argument\n\n def __or__(self, other):\n if not isinstance(other, ArgumentValidator):\n raise ValueError(\"Invalid argument\")\n\n return OneOfTypeArgumentValidator(self, other)\n\n\nclass OneOfTypeArgumentValidator(ArgumentValidator):\n # noinspection PyMissingConstructor\n def __init__(self, validator1, validator2):\n super(OneOfTypeArgumentValidator, self).__init__()\n\n self.val1 = validator1\n self.val2 = validator2\n\n self.val1.required = False\n self.val2.required = False\n\n def validate(self, argument):\n self.val1.application = self.application\n self.val2.application = self.application\n return self.validation_result(self.val1.validate(argument) or self.val2.validate(argument))\n\n def get_value(self, argument):\n if argument is None:\n return False\n if self.val1.validate(argument):\n return self.val1.get_value(argument)\n return self.val2.get_value(argument)\n\n\ndef _validate(arg_name, validator, required_args):\n if isinstance(validator, ArgumentValidator):\n validator.application = current_app\n try:\n if not len(request.form) and not len(request.args) and request.data:\n from werkzeug.urls import url_decode\n\n args = url_decode(request.data)\n if arg_name not in args:\n raise KeyError(arg_name)\n value = args[arg_name]\n else:\n value = request.form[arg_name] if arg_name in request.form else request.args[arg_name]\n if not validator.validate(value):\n raise errors.InvalidParameterValue(arg_name)\n except KeyError:\n if validator.required:\n raise errors.MissingRequiredParameter(arg_name)\n value = validator.default_value\n required_args[arg_name] = validator.get_value(value) if value is not None else None\n else:\n raise ValueError(\"Invalid parameter in argument validator description: %s\" % str(validator))\n\n\ndef validate_arguments(*arguments_list, **arguments_dict):\n def _decorator(function):\n wrapped = function\n\n @wraps(function)\n def wrapper():\n required_args = {}\n for arg_name, validator in arguments_dict.items():\n _validate(arg_name, validator, required_args)\n\n for validator in arguments_list:\n if isinstance(validator, AtLeastOneOfValidator):\n if not validator.arguments_dict:\n raise ValueError(\n \"Invalid parameter in argument validator description: AtLeastOneOfValidator has no items.\")\n\n required_args_part = {}\n first_arg_name = u\"\"\n for arg_name, single_validator in validator.arguments_dict.items():\n single_validator.required = False\n single_validator.default = None\n if not first_arg_name:\n first_arg_name = arg_name\n _validate(arg_name, single_validator, required_args_part)\n if not required_args_part:\n raise errors.MissingRequiredParameter(first_arg_name)\n required_args.update(required_args_part)\n\n return wrapped(**required_args)\n\n return wrapper\n\n return _decorator\n\n\n\nclass AtLeastOneOfValidator(object):\n def __init__(self, **kwargs):\n self.arguments_dict = kwargs\n\n\nclass IntValidator(ArgumentValidator):\n def __init__(self, min_val=None, max_val=None, **kwargs):\n super(IntValidator, self).__init__(**kwargs)\n self.min_val = min_val\n self.max_val = max_val\n\n def validate(self, argument):\n if not super(IntValidator, self).validate(argument):\n return False\n\n try:\n int_val = int(argument)\n except ValueError:\n return False\n\n if self.min_val is not None:\n if int_val < self.min_val:\n return False\n\n if self.max_val is not None:\n if int_val > self.max_val:\n return False\n\n return True\n\n def get_value(self, argument):\n return int(argument)\n\n\nclass ClientTypeValidator(ArgumentValidator):\n CLIENT_TYPES = ('android', 'ios')\n\n def validate(self, argument):\n if not super(ClientTypeValidator, self).validate(argument):\n return False\n return self.validation_result(argument in ClientTypeValidator.CLIENT_TYPES)\n\n\nclass EmailAddressValidator(ArgumentValidator):\n def __init__(self, max_length=100, **kwargs):\n self.max_length = max_length\n super(EmailAddressValidator, self).__init__(**kwargs)\n\n def validate(self, argument):\n if not super(EmailAddressValidator, self).validate(argument):\n return False\n\n rus_chars = u\"абвгдеёжзийклмнопрстуфхцчшщьыъэюяАБВГДЕЁЖЗИЙКЛМНОПРСТУФХЦЧШЩЬЫЪЭЮЯ\"\n email_re = ur\"^[a-zA-Z\" + rus_chars + ur\"0-9._%\\-+]+\\@[a-zA-Z\" + rus_chars + ur\"0-9._%-]+\\.[a-zA-Z\" + rus_chars + ur\"]{2,}$\"\n if re.match(email_re, argument, re.IGNORECASE) is not None:\n return True\n\n email_re = r\"^.*\\<[a-zA-Z\" + rus_chars + ur\"0-9._%\\-+]+\\@[a-zA-Z\" + rus_chars + ur\"0-9._%-]+\\.[a-zA-Z\" + rus_chars + ur\"]{2,}>$\"\n return self.validation_result(re.match(email_re, argument, re.IGNORECASE) is not None)\n\n\nclass MobileNumberValidator(ArgumentValidator):\n def __init__(self, max_length=20, **kwargs):\n self.max_length = max_length\n super(MobileNumberValidator, self).__init__(**kwargs)\n\n def validate(self, argument):\n if not super(MobileNumberValidator, self).validate(argument):\n return False\n\n return self.validation_result(\n (argument[0] == '+' and argument[1:].isdigit()) or (argument.isdigit() and argument[0] == \"8\"))\n\n def get_value(self, argument):\n if not argument or len(argument) < 2:\n return argument\n return (\"+7\" + argument[1:]) if argument[0] == '8' else argument\n\n\nclass HumanNameValidator(ArgumentValidator):\n ALLOWED_CHARS = u'.,-\\'\" абвгдеёжзийклмнопрстуфхцчшщьыъэюяАБВГДЕЁЖЗИЙКЛМНОПРСТУФХЦЧШЩЬЫЪЭЮЯ0123456789'\n ALLOWED_CHARS_ENG = u'.,-\\'\" абвгдеёжзийклмнопрстуфхцчшщьыъэюяАБВГДЕЁЖЗИЙКЛМНОПРСТУФХЦЧШЩЬЫЪЭЮЯ0123456789abcdefghijklmnopqrstuvwxyzABCDEFGHIJKLMNOPQRSTUVWXYZ'\n\n def __init__(self, max_length=100, allow_english_chars=False, **kwargs):\n self.max_length = max_length\n self.allow_english_chars = allow_english_chars\n super(HumanNameValidator, self).__init__(**kwargs)\n\n def validate(self, argument):\n if not super(HumanNameValidator, self).validate(argument):\n return False\n\n name = argument.strip()\n if not name:\n return self.validation_result(False)\n\n if self.max_length is not None and len(name) > self.max_length:\n return self.validation_result(False)\n\n test_set = HumanNameValidator.ALLOWED_CHARS if not self.allow_english_chars else HumanNameValidator.ALLOWED_CHARS_ENG\n for c in name:\n if c not in test_set:\n return self.validation_result(False)\n return True\n\n def get_value(self, argument):\n return argument.strip()\n\n\nclass LoginValidator(ArgumentValidator):\n def __init__(self, max_length=20, min_length=5, **kwargs):\n self.max_length = max_length\n self.min_length = min_length\n super(LoginValidator, self).__init__(**kwargs)\n\n def validate(self, argument):\n if not super(LoginValidator, self).validate(argument):\n return False\n\n name = argument.strip()\n if not name:\n return self.validation_result(False)\n\n if self.max_length is not None and len(name) > self.max_length:\n return self.validation_result(False)\n\n for c in name:\n if c.lower() not in 'abcdefghijklmnopqrstuvwxyz0123456789':\n return self.validation_result(False)\n return True\n\n def get_value(self, argument):\n return argument.strip()\n\n\nclass PasswordValidator(ArgumentValidator):\n def __init__(self, min_length=6, max_length=32, **kwargs):\n super(PasswordValidator, self).__init__(min_length=min_length, max_length=max_length, **kwargs)\n\n\nclass AccessTokenValidator(ArgumentValidator):\n def __init__(self, min_length=40, max_length=300, **kwargs):\n self.min_length = min_length\n self.max_length = max_length\n super(AccessTokenValidator, self).__init__(**kwargs)\n\n def validate(self, argument):\n if not super(AccessTokenValidator, self).validate(argument):\n return False\n\n if '@' in argument or '+' in argument:\n return self.validation_result(False)\n\n return True\n\n\nclass FloatTypeValidator(ArgumentValidator):\n def validate(self, argument):\n if not super(FloatTypeValidator, self).validate(argument):\n return False\n\n try:\n float(argument)\n except ValueError:\n return self.validation_result(False)\n\n return True\n\n def get_value(self, argument):\n return float(argument)\n\n\nclass BoolTypeValidator(ArgumentValidator):\n def validate(self, argument):\n if not super(BoolTypeValidator, self).validate(argument):\n return False\n\n if argument.lower() in (\"true\", \"t\", \"false\", \"f\"):\n return True\n\n return self.validation_result(False)\n\n def get_value(self, argument):\n argument = argument.lower()\n if not argument:\n return None\n elif argument in (\"true\", \"t\"):\n return True\n elif argument in (\"false\", \"f\"):\n return False\n\n return None\n\n\nclass CarNumberValidator(ArgumentValidator):\n # basic number\n CAR_NUMBER_RE1 = r\"^\\w{1}\\d{3}\\w{2} \\d{2,3}$\"\n # commercial transport\n CAR_NUMBER_RE2 = r\"^\\w{2}\\d{4} \\d{2,3}$\"\n\n def validate(self, argument):\n if not super(CarNumberValidator, self).validate(argument):\n return False\n\n argument = unidecode(argument).upper()\n number_matched = re.match(CarNumberValidator.CAR_NUMBER_RE1, argument, re.IGNORECASE) or \\\n re.match(CarNumberValidator.CAR_NUMBER_RE2, argument, re.IGNORECASE) or None\n if number_matched is None:\n return self.validation_result(False)\n return True\n\n def get_value(self, argument):\n argument = unidecode(argument).upper()\n return unicode(argument)\n\n\nclass DateTimeValidator(ArgumentValidator):\n FORMAT = '%Y-%m-%dT%H:%M:%S'\n\n def validate(self, argument):\n if not super(DateTimeValidator, self).validate(argument):\n return False\n\n if \".\" in argument:\n tmpl = '%Y-%m-%dT%H:%M:%S.%f'\n else:\n tmpl = '%Y-%m-%dT%H:%M:%S'\n try:\n datetime.strptime(argument, tmpl)\n except ValueError:\n return self.validation_result(False)\n return True\n\n def get_value(self, argument):\n if \".\" in argument:\n tmpl = '%Y-%m-%dT%H:%M:%S.%f'\n else:\n tmpl = '%Y-%m-%dT%H:%M:%S'\n return datetime.strptime(argument, tmpl)\n\n\nclass DateTypeValidator(ArgumentValidator):\n FORMAT = '%Y-%m-%d'\n\n def validate(self, argument):\n if not super(DateTypeValidator, self).validate(argument):\n return False\n\n try:\n datetime.strptime(argument, DateTypeValidator.FORMAT)\n except ValueError:\n return self.validation_result(False)\n return True\n\n def get_value(self, argument):\n return datetime.strptime(argument, DateTypeValidator.FORMAT)\n\n\nclass PhotoDocListValidator(ArgumentValidator):\n def validate(self, argument):\n if not super(PhotoDocListValidator, self).validate(argument):\n return False\n\n try:\n data = flask.json.loads(argument)\n except ValueError:\n # raise ValueError(str(argument) + \" \" + str(type(argument)))\n return self.validation_result(False)\n\n if not isinstance(data, list):\n return self.validation_result(False)\n\n for photo in data:\n # print str(photo)\n if \"id\" in photo and \"shoot_time\" in photo and \"coord\" in photo:\n try:\n unicode(photo['id'])\n except ValueError:\n return self.validation_result(False)\n # check shoot time\n if not DateTimeValidator().validate(photo['shoot_time']):\n return self.validation_result(False)\n # check coords\n coord = photo['coord']\n if not isinstance(coord, dict):\n return self.validation_result(False)\n if not \"lat\" in coord or not \"lon\" in coord:\n return self.validation_result(False)\n if not FloatTypeValidator().validate(coord['lat']) or not FloatTypeValidator().validate(coord['lon']):\n return self.validation_result(False)\n else:\n return self.validation_result(False)\n return True\n\n def get_value(self, argument):\n data_list = flask.json.loads(argument)\n for data in data_list:\n data['id'] = unicode(data['id'])\n data['shoot_time'] = DateTimeValidator().get_value(data['shoot_time'])\n data['coord']['lat'] = FloatTypeValidator().get_value(data['coord']['lat'])\n data['coord']['lon'] = FloatTypeValidator().get_value(data['coord']['lon'])\n return data_list\n\n\nclass ConfirmationCodeValidator(ArgumentValidator):\n def validate(self, argument):\n if not super(ConfirmationCodeValidator, self).validate(argument):\n return False\n\n code_len = len(argument)\n if code_len == self.application.config['max_activation_link_length']:\n return self.validation_result(argument.isalnum())\n\n if code_len == self.application.config['digital_activation_link_length']:\n return self.validation_result(argument.isdigit())\n\n return self.validation_result(False)\n\n\nclass JsonValidator(ArgumentValidator):\n def validate(self, argument):\n if not super(JsonValidator, self).validate(argument):\n return False\n\n try:\n flask.json.loads(argument)\n except Exception:\n return False\n\n return True\n\n def get_value(self, argument):\n try:\n return flask.json.loads(argument)\n except Exception:\n return None\n\n\nclass SocialNetworkTypeValidator(ArgumentValidator):\n def validate(self, argument):\n if not super(SocialNetworkTypeValidator, self).validate(argument):\n return False\n\n return self.validation_result(argument in SocialServiceEnum.TAG_ALL)\n\n\nclass ObjectIdValidator(ArgumentValidator):\n def validate(self, argument):\n if not super(ObjectIdValidator, self).validate(argument):\n return False\n\n try:\n ObjectId(argument)\n except Exception:\n return self.validation_result(False)\n\n return True\n\n def get_value(self, argument):\n return ObjectId(argument)\n\n\nclass ObjectRefValidator(ArgumentValidator):\n def validate(self, argument):\n if not super(ObjectRefValidator, self).validate(argument):\n return False\n\n if \"_\" in argument:\n try:\n _id, _type = argument.split('_')\n obj_id = ObjectId(_id)\n except Exception:\n return self.validation_result(False)\n else:\n try:\n obj_id = ObjectId(argument)\n except Exception:\n return self.validation_result(False)\n\n return True\n\n def get_value(self, argument):\n _id, _type = argument.split('_')\n return ObjectId(_id)\n\n\nclass ConfirmationCodeTypeValidator(ArgumentValidator):\n CODE_TYPES = ('email', 'mobile', 'password')\n\n def validate(self, argument):\n if not super(ConfirmationCodeTypeValidator, self).validate(argument):\n return False\n return self.validation_result(argument in ConfirmationCodeTypeValidator.CODE_TYPES)\n\n def get_value(self, argument):\n return ConfirmationLinkTypeEnum.from_string(argument) or None\n\n\nclass EnumValidator(ArgumentValidator):\n def __init__(self, enum_cls, **kwargs):\n super(EnumValidator, self).__init__(**kwargs)\n self.enum_cls = enum_cls\n\n def validate(self, argument):\n cls = getattr(self.enum_cls, 'TYPE_CLS', unicode)\n return self.validation_result(self.enum_cls.validate(cls(argument)))\n\n\nclass MyObjectValidator(object):\n def __init__(self, user_id):\n self.user_id = user_id\n\n def validate(self, base_obj_field):\n if not base_obj_field.id.initialized:\n return True\n owner = base_obj_field.get_object_owner()\n if not owner or owner != self.user_id:\n current_app.logger.warn(u\"Not my object\")\n return False\n return True" }, { "alpha_fraction": 0.6623610854148865, "alphanum_fraction": 0.6644638180732727, "avg_line_length": 26.75, "blob_id": "37098dd6f49c21016545415509e3af0e5a5c5b23", "content_id": "5e28999fafdc756cedc380947ccafefe14157602", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 3329, "license_type": "no_license", "max_line_length": 135, "num_lines": 120, "path": "/app/fw/auth/models.py", "repo_name": "StanislavKraev/jb_code", "src_encoding": "UTF-8", "text": "# -*- coding: utf-8 -*-\nfrom bson.objectid import ObjectId\nfrom flask import current_app\nfrom sqlalchemy.sql.functions import func\nfrom sqlalchemy import Column, Integer, String, Boolean, DateTime, ForeignKey\nfrom sqlalchemy.orm import relationship\n\nfrom fw.db.sql_base import db\n\n\nclass AnonymousUser(object):\n\n @property\n def is_anonymous(self):\n return True\n\n\nclass AuthUser(db.Model):\n\n __tablename__ = 'authuser'\n\n id = Column(Integer, primary_key=True)\n uuid = Column(String, unique=True, default=lambda: unicode(ObjectId()))\n name = Column(String)\n surname = Column(String)\n patronymic = Column(String)\n password = Column(String)\n enabled = Column(Boolean, default=True)\n signup_date = Column(DateTime, default=func.now(), nullable=False)\n last_login_date = Column(DateTime)\n\n email_confirmed = Column(Boolean, nullable=False, default=False)\n mobile_confirmed = Column(Boolean, nullable=False, default=False)\n email = Column(String)\n mobile = Column(String)\n\n is_tester = Column(Boolean, nullable=False, default=False)\n temporal = Column(Boolean, nullable=False, default=False)\n last_password_drop_attempts_date = Column(DateTime)\n last_password_drop_attempts_count = Column(Integer)\n\n admin = Column(Boolean, default=False)\n\n @property\n def is_anonymous(self):\n return False\n\n @property\n def is_active(self):\n #return self.enabled\n return True\n\n @property\n def is_authenticated(self):\n return True\n\n def get_id(self):\n return self.id\n\n @property\n def user_id(self):\n return self.get_id()\n\n\nclass UserActivationLink(db.Model):\n\n __tablename__ = \"useractivationlink\"\n\n id = Column(Integer, primary_key=True)\n\n auth_user_id = Column(Integer, ForeignKey('authuser.id'))\n auth_user = relationship(\"AuthUser\")\n\n use_attempts = Column(Integer, default=0, nullable=False)\n new_email = Column(String)\n new_mobile = Column(String)\n\n link_code = Column(String, nullable=False)\n creation_date = Column(DateTime, default=func.now(), nullable=False)\n used_date = Column(DateTime)\n link_type = Column(Integer, nullable=False)\n\n\nclass ConfirmationLinkTypeEnum(object):\n\n CLT_INVALID = 0\n CLT_EMAIL = 1\n CLT_MOBILE = 2\n CLT_PASSWORD = 3\n CLT_ALL = [CLT_EMAIL, CLT_MOBILE, CLT_PASSWORD]\n\n @classmethod\n def from_string(cls, val):\n if val == 'email':\n return cls.CLT_EMAIL\n if val == 'mobile':\n return cls.CLT_MOBILE\n if val == 'password':\n return cls.CLT_PASSWORD\n return cls.CLT_INVALID\n\n\nclass AuthorizationUrl(db.Model):\n\n __tablename__ = \"authorization_url\"\n\n id = Column(String, primary_key=True, default=lambda: unicode(ObjectId()))\n url = Column(String)\n created = Column(DateTime, default=func.now(), nullable=False)\n expire_at = Column(DateTime, nullable=False)\n used_times = Column(Integer, default=0, nullable=False)\n\n owner_id = Column(Integer, ForeignKey('authuser.id'))\n owner = relationship(\"AuthUser\")\n\n def get_url(self, config):\n return u\"%s://%s/go/%s/\" % (config['WEB_SCHEMA'], config['api_url'], self.id) if config['WEB_SCHEMA'] else u\"/go/%s/\" % self.id\n\n def __unicode__(self):\n return self.get_url(current_app.config)" }, { "alpha_fraction": 0.6883683204650879, "alphanum_fraction": 0.6891855001449585, "avg_line_length": 39.340660095214844, "blob_id": "66d29e1e9ec2b1ddbfe786235d03077628e433c6", "content_id": "9f19101731c5f8bb18b5dd78d7ba8d87d028c410", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 3671, "license_type": "no_license", "max_line_length": 106, "num_lines": 91, "path": "/app/services/osago/__init__.py", "repo_name": "StanislavKraev/jb_code", "src_encoding": "UTF-8", "text": "# -*- coding: utf-8 -*-\n\nimport os\nimport jinja2\nfrom fw.documents.batch_manager import BatchManager\nfrom fw.documents.doc_requisites_storage import DocRequisitiesStorage\nfrom fw.documents.enums import DocumentBatchTypeEnum, DocumentTypeEnum\nfrom services.osago.osago_manager import OsagoBatchManager\n\n\ndef _init_doc_requisities(config):\n from services.osago.documents.initial_db_data import load_data\n data = load_data(config)\n\n templates = (\n \"OSAGO_MAIL_LIST_TEMPLATE\",\n \"OSAGO_PRETENSION_TEMPLATE\",\n \"OSAGO_DOCUMENTS_CLAIM_TEMPLATE\",\n \"OSAGO_TRUST_SUBMISSION_DOCS_TEMPLATE\",\n \"OSAGO_TRUST_OBTAIN_DOCS_TEMPLATE\",\n \"OSAGO_TRUST_SUBMISION_OBTAIN_DOCS_TEMPLATE\",\n \"OSAGO_CLAIM_COURT_ABSENT_TEMPLATE\",\n \"OSAGO_CLAIM_ALL_EXECUTION_ACT_TEMPLATE\",\n \"OSAGO_CLAIM_GUILTY_EXECUTION_ACT_TEMPLATE\",\n \"OSAGO_CLAIM_INSURANCE_EXECUTION_ACT_TEMPLATE\",\n \"OSAGO_LAWSUIT_TEMPLATE\",\n \"OSAGO_OSAGO_COURT_MAIL_LIST_TEMPLATE\"\n )\n\n for template_name in templates:\n DocRequisitiesStorage.add_template(data[template_name]['doc_name'], data[template_name])\n\n schemas = (\n \"OSAGO_SCHEMA\",\n \"OSAGO_MAIL_LIST_SCHEMA\",\n \"OSAGO_PRETENSION_SCHEMA\",\n \"OSAGO_DOCUMENTS_CLAIM_SCHEMA\",\n \"OSAGO_TRUST_SUBMISSION_DOCS_SCHEMA\",\n \"OSAGO_TRUST_OBTAIN_DOCS_SCHEMA\",\n \"OSAGO_TRUST_SUBMISION_OBTAIN_DOCS_SCHEMA\",\n \"OSAGO_CLAIM_COURT_ABSENT_SCHEMA\",\n \"OSAGO_CLAIM_ALL_EXECUTION_ACT_SCHEMA\",\n \"OSAGO_CLAIM_GUILTY_EXECUTION_ACT_SCHEMA\",\n \"OSAGO_CLAIM_INSURANCE_EXECUTION_ACT_SCHEMA\",\n \"OSAGO_LAWSUIT_SCHEMA\",\n \"OSAGO_OSAGO_COURT_MAIL_LIST_SCHEMA\"\n )\n\n for schema_name in schemas:\n DocRequisitiesStorage.add_schema(data[schema_name]['doc_name'], data[schema_name])\n\n bd = dict(\n batch_type=DocumentBatchTypeEnum.DBT_OSAGO,\n doc_types=[\n DocumentTypeEnum.DT_OSAGO_MAIL_LIST,\n DocumentTypeEnum.DT_OSAGO_PRETENSION,\n DocumentTypeEnum.DT_OSAGO_DOCUMENTS_CLAIM,\n DocumentTypeEnum.DT_OSAGO_TRUST_SUBMISSION_DOCS,\n DocumentTypeEnum.DT_OSAGO_TRUST_OBTAIN_DOCS,\n DocumentTypeEnum.DT_OSAGO_TRUST_SUBMISION_OBTAIN_DOCS,\n\n DocumentTypeEnum.DT_OSAGO_CLAIM_COURT_ABSENT,\n DocumentTypeEnum.DT_OSAGO_CLAIM_ALL_EXECUTION_ACT,\n DocumentTypeEnum.DT_OSAGO_CLAIM_GUILTY_EXECUTION_ACT,\n DocumentTypeEnum.DT_OSAGO_CLAIM_INSURANCE_EXECUTION_ACT,\n DocumentTypeEnum.DT_OSAGO_LAWSUIT,\n DocumentTypeEnum.DT_OSAGO_COURT_MAIL_LIST\n ],\n result_fields=data['OSAGO_RESULT_FIELDS'],\n fields=data['OSAGO_SCHEMA']['fields'],\n initial_status='pretension',\n actions=data['OSAGO_ACTIONS'],\n transitions=data['OSAGO_TRANSITIONS'],\n validation_condition=data['VALIDATION_CONDITION'],\n fixed_on_states=['generating_pretension', 'generating_claim']\n )\n\n DocRequisitiesStorage.add_batch_descriptor(DocumentBatchTypeEnum.DBT_OSAGO, bd)\n\n\ndef register(app, jinja_env, class_loader, **kwargs):\n search_path = os.path.normpath(os.path.join(os.path.abspath(os.path.dirname(__file__)), u\"templates\"))\n jinja_env.loader.loaders.append(jinja2.FileSystemLoader(search_path))\n\n class_loader.POSSIBLE_LOCATIONS.append('services.osago.documents')\n class_loader.POSSIBLE_LOCATIONS.append('services.osago.documents.enums')\n class_loader.POSSIBLE_LOCATIONS.append('services.osago.documents.general_doc_fields')\n\n BatchManager.register_manager(DocumentBatchTypeEnum.DBT_OSAGO, OsagoBatchManager)\n\n _init_doc_requisities(app.config)\n" }, { "alpha_fraction": 0.7158798575401306, "alphanum_fraction": 0.716738224029541, "avg_line_length": 26.738094329833984, "blob_id": "783916a11723fd6c915b373bb9610a3328e614a5", "content_id": "e61d8137b1a5d4b7079ad8b7dad0e0ba7eddcea3", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 1165, "license_type": "no_license", "max_line_length": 99, "num_lines": 42, "path": "/app/fw/db/sql_base.py", "repo_name": "StanislavKraev/jb_code", "src_encoding": "UTF-8", "text": "# -*- coding: utf-8 -*-\n\nfrom datetime import datetime\nimport calendar\n# noinspection PyUnresolvedReferences\nfrom flask.ext.sqlalchemy import SQLAlchemy\nimport json\nfrom bson import ObjectId\nfrom sqlalchemy import func\n\nCUSTOM_SERIALIZERS = {\n datetime: lambda dt: {\"custom_type\": \"datetime\", \"val\": calendar.timegm(dt.timetuple())},\n ObjectId: lambda oid: unicode(oid)\n}\n\ndef raiser(val):\n raise TypeError(val)\n\n\ndef custom_json_serializer(data, **kwargs):\n return json.dumps(data, default=lambda x: CUSTOM_SERIALIZERS.get(type(x), raiser)(x), **kwargs)\n\n\ndef custom_obj_parser(obj):\n if 'custom_type' in obj and 'val' in obj and obj['custom_type'] == 'datetime':\n return datetime.utcfromtimestamp(obj['val'])\n return obj\n\n\ndef custom_json_deserializer(data):\n return json.loads(data, object_hook=custom_obj_parser)\n\n\nclass CustomSQLAlchemy(SQLAlchemy):\n\n def apply_driver_hacks(self, app, info, options):\n super(CustomSQLAlchemy, self).apply_driver_hacks(app, info, options)\n\n options['json_serializer'] = custom_json_serializer\n options['json_deserializer'] = custom_json_deserializer\n\ndb = CustomSQLAlchemy()\n" }, { "alpha_fraction": 0.6942496299743652, "alphanum_fraction": 0.6949509382247925, "avg_line_length": 31.409090042114258, "blob_id": "7cae562cce1d62db3910493aa53dc491a751bc93", "content_id": "679830ebc3832e8da4ae8e7f2b538ee1e22ae228", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 1426, "license_type": "no_license", "max_line_length": 128, "num_lines": 44, "path": "/app/services/russian_post/db_models.py", "repo_name": "StanislavKraev/jb_code", "src_encoding": "UTF-8", "text": "# -*- coding: utf-8 -*-\n\nfrom sqlalchemy import Column, String, ForeignKey, Unicode, Integer, DateTime, Index\nfrom sqlalchemy.orm import relationship\nfrom sqlalchemy.sql.functions import func\n\nfrom fw.db.sql_base import db as sqldb\n\n\nclass PostTrackingStatus(object):\n\n PTS_UNKNOWN = 'unknown'\n PTS_NOT_FOUND = 'not_found'\n PTS_PROGRESS = 'progress'\n PTS_DELIVERED = 'delivered'\n PTS_FAILED = 'failed'\n\n\nclass RussianPostTrackingItem(sqldb.Model):\n __tablename__ = 'rus_post_tracking'\n\n id = Column(Integer, primary_key=True)\n\n tracking = Column(Unicode, nullable=False)\n\n creation_date = Column(DateTime, default=func.now(), nullable=False)\n\n batch_id = Column(String, ForeignKey('doc_batch.id'), nullable=True)\n batch = relationship(\"DocumentBatchDbObject\")\n\n owner_id = Column(Integer, ForeignKey('authuser.id'), nullable=False)\n owner = relationship(\"AuthUser\")\n\n status = Column(String, default=PostTrackingStatus.PTS_UNKNOWN, nullable=False)\n status_caption = Column(Unicode, nullable=False, default=u\"\")\n\n status_change_dt = Column(DateTime, nullable=True)\n last_check_dt = Column(DateTime, nullable=True)\n\n last_location = Column(Unicode, nullable=True)\n\n tracking_type = Column(String, nullable=True) # reserved (for batches with multiple tracks)\n\n __table_args__ = (Index('batch_tracking_index', \"tracking\", \"batch_id\"), )\n" }, { "alpha_fraction": 0.6460321545600891, "alphanum_fraction": 0.6463049054145813, "avg_line_length": 38.01063919067383, "blob_id": "e378c4178d1dee1917cd355e5c2ce3e11a3292cf", "content_id": "55c9ec50f42ae1ca432210c05430884a61879142", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 3673, "license_type": "no_license", "max_line_length": 149, "num_lines": 94, "path": "/app/fw/plugins/task_scheduler/__init__.py", "repo_name": "StanislavKraev/jb_code", "src_encoding": "UTF-8", "text": "# -*- coding: utf-8 -*-\nfrom copy import copy\nfrom datetime import datetime\n\nfrom fw.async_tasks.scheduler import CeleryScheduler\nfrom fw.documents.address_enums import RFRegionsEnum\nfrom fw.documents.doc_requisites_storage import DocRequisitiesStorage\nfrom fw.documents.schema.schema_transform import transform_field_with_schema\nfrom fw.utils.time_utils import calc_fixed_time_not_earlier\n\nPLUGIN_NAME = 'task_scheduler'\n\n\ndef get_events():\n events = [{\n 'name': 'schedule'\n }]\n return events\n\ndef act(action, batch_db, previous_event_data, plugin_config, logger, config):\n assert batch_db\n if action != 'schedule':\n logger.error(u\"Invalid action %s for task_scheduler plugin\" % action)\n return False\n\n descriptor_name, task_to_schedule = plugin_config['action'].split('.')\n descriptor = DocRequisitiesStorage.get_batch_descriptor(descriptor_name)\n assert descriptor\n batch_actions = descriptor.get('actions') or {}\n if task_to_schedule not in batch_actions:\n logger.error(u\"Invalid task name %s for task_scheduler plugin\" % task_to_schedule)\n return False\n\n source_data = copy(plugin_config)\n source_data['<batch>'] = batch_db\n source_data['<app_config>'] = config\n source_data['<current_user>'] = batch_db._owner\n source_data['<previous_event_data>'] = previous_event_data\n\n task_id = plugin_config.get('task_id', None)\n\n if task_id and isinstance(task_id, dict):\n task_id = transform_field_with_schema(source_data, task_id).db_value()\n\n dt_type = plugin_config['dt_type']\n\n event_data = {\n 'task_to_schedule': task_to_schedule,\n 'dt_type': dt_type,\n '<action_dt>': datetime.utcnow(),\n }\n\n if dt_type == 'exact_time_every_day':\n dt_format = plugin_config['dt_format']\n dt_exact_time = plugin_config['dt_exact_time']\n dt_time_zone_region = plugin_config.get('dt_time_zone_region', u'Москва')\n dt_not_earlier = plugin_config.get('dt_not_earlier', None)\n\n if dt_format and isinstance(dt_format, dict):\n dt_format = transform_field_with_schema(source_data, dt_format).db_value()\n if dt_exact_time and isinstance(dt_exact_time, dict):\n dt_exact_time = transform_field_with_schema(source_data, dt_exact_time).db_value()\n if dt_time_zone_region and isinstance(dt_time_zone_region, dict):\n dt_time_zone_region = transform_field_with_schema(source_data, dt_time_zone_region)\n if dt_time_zone_region:\n dt_time_zone_region = dt_time_zone_region.db_value()\n if dt_not_earlier and isinstance(dt_not_earlier, dict):\n dt_not_earlier = transform_field_with_schema(source_data, dt_not_earlier).db_value()\n\n tz_name = RFRegionsEnum.get_time_zone(dt_time_zone_region) or \"Europe/Moscow\"\n now = datetime.utcnow()\n dt = calc_fixed_time_not_earlier(now, dt_exact_time, dt_not_earlier - now, tz_name)\n\n event_data.update({\n 'dt': dt,\n 'dt_format': dt_format,\n 'dt_time_zone_region': dt_time_zone_region,\n 'dt_not_earlier': dt_not_earlier,\n 'tz_name': tz_name\n })\n else:\n raise NotImplementedError()\n\n try:\n new_task = CeleryScheduler.post('fw.async_tasks.scheduler.run_scheduled_task', eta=dt, args=(descriptor_name, task_to_schedule, batch_db.id),\n task_id=task_id, force_replace_task=True)\n event_data['task_id'] = new_task.id\n except Exception:\n logger.exception(u\"Failed to schedule task\")\n return False\n return True\n\ndef register(class_loader):\n pass\n" }, { "alpha_fraction": 0.3452831208705902, "alphanum_fraction": 0.35497042536735535, "avg_line_length": 54.97810363769531, "blob_id": "b3118e700445ddeffdf6d4f45e59c37a7c139508", "content_id": "520d197471aaa5af763fee3955c8b0389aaf0f40", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 298819, "license_type": "no_license", "max_line_length": 149, "num_lines": 5298, "path": "/app/services/llc_reg/documents/initial_db_data.py", "repo_name": "StanislavKraev/jb_code", "src_encoding": "UTF-8", "text": "# -*- coding: utf-8 -*-\nfrom copy import deepcopy\nimport copy\nimport os\nfrom fw.documents.enums import DocumentTypeEnum, BatchStatusEnum, DocumentKindEnum, TaxType, DocumentBatchTypeEnum\nfrom fw.documents.field_matchers import MultilineFieldMatcher, ConcatFieldAttributeMatcher, FieldSetMatcher, \\\n ConstantMatcher, SimpleMatcher, ArrayAttributeMatcher\nfrom fw.documents.field_matchers import FieldAttributeMatcher\nfrom fw.documents.fields.doc_fields import CompanyObject\nfrom fw.documents.template_renderer import RenderingVariants\nfrom fw.documents.common_schema_fields import (ADDRESS_FIELD, GENERAL_MANAGER_CAPTION_FIELD,\n FULL_NAME_FIELD, JOB_MAIN_CODE_FIELD, JOB_CODE_ARRAY_FIELD,\n STARTER_CAPITAL_FIELD,\n USE_FOREIGN_COMPANY_NAME_FIELD,\n USE_NATIONAL_LANGUAGE_COMPANY_NAME_FIELD, FOREIGN_FULL_NAME_FIELD,\n FOREIGN_SHORT_NAME_FIELD, FOREIGN_LANGUAGE_FIELD,\n NATIONAL_LANGUAGE_FIELD,\n NATIONAL_LANGUAGE_FULL_NAME_FIELD, NATIONAL_LANGUAGE_SHORT_NAME_FIELD,\n SHORT_NAME_FIELD, GENERAL_MANAGER_FIELD, FOUNDERS_COUNT_FIELD,\n BOARD_OF_DIRECTORS_FIELD, ADDRESS_TYPE_FIELD, ADDRESS_TYPE_FIELD_NR,\n DOC_DATE_FIELD_TODAY, DOC_DATE_OR_TODAY)\nfrom services.llc_reg.documents.enums import DocumentDeliveryTypeEnum, FounderTypeEnum, JSCMemberTypeEnum, \\\n DocumentDeliveryTypeStrEnum, RegistrationWay, AddressType\n\n\ndef get_test_resource_name(config, resource_rel_path):\n resources_path = config['resources_path']\n return os.path.join(resources_path, resource_rel_path)\n\n\ndef load_data(config):\n FOUNDERS_REF_LIST_TEMP_VARIABLE = {\n \"#array_mapping\": {\n \"array_source_field\": {\n \"#field\": \"founders\"\n },\n \"target_items\": {\n \"#cases\": {\n \"set\": {\n \"cur_founder_type\": {\n \"#array_item_field\": \"founder_type\"\n }\n },\n \"list\": [{\n \"conditions\": {\n \"cur_founder_type\": 1\n },\n \"value\": {\n \"#array_item_field\": \"person->_id\"\n }\n }, {\n \"conditions\": {\n \"cur_founder_type\": 2\n },\n \"value\": {\n \"#array_item_field\": \"company->_id\"\n }\n }],\n \"default\": {\n \"value\": None\n }}}}}\n\n ADDRESS_FIELD_WITH_OKATO = deepcopy(ADDRESS_FIELD)\n ADDRESS_FIELD_WITH_OKATO['override_fields_kwargs']['okato'] = {\n 'required': True\n }\n ADDRESS_FIELD_WITH_OKATO['required'] = True\n\n MAP_OBTAIN_WAY = {\n \"field\": \"obtain_way\",\n \"map\": {\n \"founder\": DocumentDeliveryTypeEnum.DDT_ISSUE_TO_THE_APPLICANT,\n \"responsible_person\": DocumentDeliveryTypeEnum.DDT_ISSUE_TO_THE_APPLICANT_OR_AGENT,\n \"mail\": DocumentDeliveryTypeEnum.DDT_SEND_BY_MAIL\n }\n }\n FOUNDER_APPLICANT_FIELD = {\n \"name\": \"founder_applicant\",\n \"type\": \"calculated\",\n \"field_type\": \"FounderObject\",\n \"required\": False,\n \"suppress_validation_errors\": True,\n \"override_fields_kwargs\": {\n \"documents_recipient_type\": {\n \"required\": True\n }\n },\n \"value\": {\n \"#set\": {\n \"selected_founder\": {\n \"#pick_array_item\": {\n \"array_field\": \"founders\",\n \"conditions\": {\n \"#or\": [{\n \"founders|size\": 1,\n }, {\n \"founders|size\": {\n \"#gt\": 1\n },\n \"obtain_way\": \"responsible_person\",\n \"<loop_item>->founder\": \"@selected_moderator\"\n }, {\n \"founders|size\": {\n \"#gt\": 1\n },\n \"obtain_way\": \"founder\",\n \"<loop_item>->founder\": \"@doc_obtain_founder\"\n }, {\n \"founders|size\": {\n \"#gt\": 1\n },\n \"obtain_way\": \"mail\",\n \"<loop_item>->founder\": \"@selected_moderator\"\n }]\n }\n }\n }\n },\n \"#object\": {\n \"founder_type\": {\n \"#value_map\": {\n \"field\": \"selected_founder->founder->type\",\n \"map\": {\n \"person\": 1,\n \"company\": 2\n }\n }\n },\n \"company\": {\n \"#object\": {\n \"type\": \"CompanyObject\",\n \"_id\": {\n \"#cases\": {\n \"list\": [{\n \"conditions\": {\n \"selected_founder->founder->type\": \"company\"\n },\n \"value\": {\n \"#field\": \"selected_founder->founder->_id\"\n }\n }],\n \"default\": {\n \"value\": \"\"\n }\n }\n }\n }\n },\n \"person\": {\n \"#object\": {\n \"type\": \"PrivatePerson\",\n \"_id\": {\n \"#cases\": {\n \"list\": [{\n \"conditions\": {\n \"selected_founder->founder->type\": \"person\"\n },\n \"value\": {\n \"#field\": \"selected_founder->founder->_id\"\n }\n }],\n \"default\": {\n \"value\": \"\"\n }\n }\n }\n }\n },\n \"nominal_capital\": {\n \"#field\": \"selected_founder->nominal_capital\",\n },\n \"share\": {\n \"#object\": {\n \"type\": {\n \"#field\": \"share_type\"\n },\n \"value\": {\n \"#field\": \"selected_founder->share\"\n }\n }\n },\n \"documents_recipient_type\": {\n \"#value_map\": MAP_OBTAIN_WAY\n }\n }\n }\n }\n\n FOUNDERS_FIELD = {\n \"name\": \"founders\",\n \"type\": \"calculated\",\n \"field_type\": \"DocArrayField\",\n \"cls\": \"FounderObject\",\n \"depends_on\": [\"founder_applicant\"],\n \"required\": True,\n \"subfield_kwargs\": {\n \"error_field_mapping\": {\n \"company\": \"founder\",\n \"person\": \"founder\",\n \"documents_recipient_type\": \"\", # suppress error\n \"share\": \"founder.share.\",\n \"nominal_capital\": \"founder.nominal_capital.\"\n }\n },\n \"value\": {\n \"#array_mapping\": {\n \"array_source_field\": {\n \"#field\": \"founders\"\n },\n \"target_items\": {\n \"#object\": {\n \"founder_type\": {\n \"#value_map\": {\n \"field\": {\n \"#array_item_field\": \"founder->type\"\n },\n \"map\": {\n \"person\": 1,\n \"company\": 2\n }\n }\n },\n \"nominal_capital\": {\n \"#array_item_field\": \"nominal_capital\"\n },\n \"share\": {\n \"#object\": {\n \"type\": {\n \"#field\": \"share_type\",\n },\n \"value\": {\n \"#array_item_field\": \"share\"\n }\n }\n },\n \"person\": {\n \"#cases\": {\n \"set\": {\n \"cur_founder_type\": {\n \"#array_item_field\": \"founder->type\"\n }\n },\n \"list\": [{\n \"conditions\": {\n \"cur_founder_type\": \"person\"\n },\n \"value\": {\n \"#array_item_field\": \"founder\"\n }\n }],\n \"default\": {\n \"value\": None\n }\n }\n },\n \"company\": {\n \"#cases\": {\n \"set\": {\n \"cur_founder_type\": {\n \"#array_item_field\": \"founder->type\"\n }\n },\n \"list\": [{\n \"conditions\": {\n \"cur_founder_type\": \"company\"\n },\n \"value\": {\n \"#array_item_field\": \"founder\"\n }\n }],\n \"default\": {\n \"value\": None\n }\n }\n },\n \"documents_recipient_type\": {\n \"#cases\": {\n \"set\": {\n \"cur_founder_ref\": {\n \"#array_item_field\": \"founder\"\n },\n \"cur_founders_count\": {\n \"#size\": \"founders\"\n }\n },\n \"list\": [\n {\n \"conditions\": {\n \"cur_founders_count\": 1\n },\n \"value\": {\n \"#value_map\": MAP_OBTAIN_WAY\n }\n }, {\n \"conditions\": {\n \"cur_founders_count\": {\n \"#gt\": 1\n },\n \"obtain_way\": \"founder\",\n \"cur_founder_ref\": \"@doc_obtain_founder\"\n },\n \"value\": {\n \"#value_map\": MAP_OBTAIN_WAY\n }\n }, {\n \"conditions\": {\n \"cur_founders_count\": {\n \"#gt\": 1\n },\n \"obtain_way\": \"responsible_person\",\n \"cur_founder_ref\": \"@selected_moderator\"\n },\n \"value\": {\n \"#value_map\": MAP_OBTAIN_WAY\n }\n }, {\n \"conditions\": {\n \"cur_founders_count\": {\n \"#gt\": 1\n },\n \"obtain_way\": \"mail\",\n \"cur_founder_ref\": \"@selected_moderator\"\n },\n \"value\": {\n \"#value_map\": MAP_OBTAIN_WAY\n }\n }\n ],\n \"default\": {\n \"value\": None\n }\n }\n },\n \"duplicate_fio\": {\n \"#exec\": {\n \"module\": \"llc_reg_methods\",\n \"method\": \"check_founder_has_same_fio\",\n \"kwargs\": {\n \"founders\": {\n \"#field\": \"founders\"\n },\n \"founder\": {\n \"#array_item_field\": \"founder\"\n }\n }\n }\n },\n \"is_starter_capital_dividable\": {\n \"#exec\": {\n \"module\": \"llc_reg_methods\",\n \"method\": \"is_starter_capital_dividable\",\n \"kwargs\": {\n \"founder_share\": {\n \"#array_item_field\": \"share\"\n },\n \"starter_capital\": {\n \"#field\": \"starter_capital->value->value\"\n },\n \"share_type\": {\n \"#field\": \"share_type\"\n }\n }\n }\n }\n }\n }\n }\n },\n \"validator\": {\n \"#set\": {\n \"total_share\": {\n \"#aggregate\": {\n \"field\": \"values\",\n \"attr\": \"share.normal\",\n \"operator\": \"add\"\n }\n },\n \"founders_count\": {\n \"#size\": \"values\"\n }\n },\n \"conditions\": [{\n \"#or\": [{\n \"total_share\": {\n \"#almost_equal\": 1\n }\n }, {\n \"values\": {\n \"#size\": 0\n }\n }],\n \"founders_count\": {\n \"#gt\": 0\n }\n }, {\n \"founders_count\": {\n \"#lte\": 50\n }\n }],\n \"error_field\": \"\"\n }\n }\n\n P11001_TEMPLATE = {\n \"template_name\": \"P11001_template\",\n \"doc_name\": DocumentTypeEnum.DT_P11001,\n \"is_strict\": True,\n \"pages\": [\n {\n \"page_file\": get_test_resource_name(config, \"11001/pg_0001.pdf\"),\n \"array_fields\": [\n {\n \"name\": \"page1-polnoe_naimenovanie__line{{item}}\",\n \"count\": 6,\n \"field-length\": 40,\n \"case\": \"upper\"\n }, {\n \"name\": \"page1-sokr_naimenovanie__line{{item}}\",\n \"count\": 4,\n \"field-length\": 40,\n \"case\": \"upper\"\n },\n {\n \"name\": \"page1-district_name__line{{item}}\",\n \"count\": 2,\n \"field-length\": [28, 40],\n \"case\": \"upper\"\n },\n ],\n \"fields\": [\n {\n \"name\": \"page1-subject_code\",\n \"field-length\": 2,\n \"text-align\": \"right\",\n \"space-filler\": u\"0\",\n },\n {\n \"name\": \"page1-pocht_index\",\n \"field-length\": 6,\n \"text-align\": \"right\",\n \"space-filler\": u\"0\",\n },\n {\n \"name\": \"page1-district_type\",\n \"field-length\": 10,\n \"text-align\": \"left\",\n \"case\": \"upper\"\n },\n {\n \"name\": \"page1-city_type\",\n \"field-length\": 10,\n \"text-align\": \"left\",\n \"case\": \"upper\"\n },\n {\n \"name\": \"page1-city_name\",\n \"field-length\": 28,\n \"text-align\": \"left\",\n \"case\": \"upper\"\n }\n ]\n },\n {\n \"page_file\": get_test_resource_name(config, \"11001/pg_0002.pdf\"),\n \"fields\": [\n {\n \"name\": \"page2-nas_punkt_type\",\n \"field-length\": 10,\n \"case\": \"upper\"\n }, {\n \"name\": \"page2-street_type\",\n \"field-length\": 10,\n \"case\": \"upper\"\n }, {\n \"name\": \"page2-house_type\",\n \"field-length\": 10,\n \"case\": \"upper\"\n }, {\n \"name\": \"page2-house_number\",\n \"field-length\": 8,\n \"case\": \"upper\"\n }, {\n \"name\": \"page2-corpus_type\",\n \"field-length\": 10,\n \"case\": \"upper\"\n }, {\n \"name\": \"page2-corpus_number\",\n \"field-length\": 8,\n \"case\": \"upper\"\n }, {\n \"name\": \"page2-office_type\",\n \"field-length\": 10,\n \"case\": \"upper\"\n }, {\n \"name\": \"page2-office_number\",\n \"field-length\": 8,\n \"case\": \"upper\"\n }, {\n \"name\": \"page2-nach_capital_type\",\n \"field-length\": 1\n }, {\n \"name\": \"page2-akc_obchestvo_member_type\",\n \"field-length\": 1\n }, {\n \"name\": \"page2-nach_capital_value__currency-maj\",\n \"field-length\": 15,\n \"text-align\": \"right\"\n }, {\n \"name\": \"page2-nach_capital_value__currency-min\",\n \"field-length\": 4,\n \"text-align\": \"left\"\n }\n ],\n \"array_fields\": [\n {\n \"name\": \"page2-nas_punkt_name__line{{item}}\",\n \"count\": 2,\n \"field-length\": [28, 40],\n \"case\": \"upper\"\n }, {\n \"name\": \"page2-street_name__line{{item}}\",\n \"count\": 2,\n \"field-length\": [28, 40],\n \"case\": \"upper\"\n },\n ]\n },\n {\n \"page_file\": get_test_resource_name(config, \"11001/pg_0003.pdf\"),\n \"multiple\": True,\n \"array_field\": \"founders\",\n \"array_item_filter\": {\n \"founder_type\": FounderTypeEnum.FT_COMPANY\n },\n \"fields\": [\n {\n \"name\": \"page3-ogrn\",\n \"field-length\": 13\n }, {\n \"name\": \"page3-inn\",\n \"field-length\": 10\n }, {\n \"name\": \"page3-nominal_part_value__currency-maj\",\n \"field-length\": 15,\n \"text-align\": \"right\"\n }, {\n \"name\": \"page3-nominal_part_value__currency-min\",\n \"field-length\": 4,\n \"text-align\": \"left\"\n }, {\n \"name\": \"page3-razmer_doli__percent-maj\",\n \"field-length\": 3,\n \"text-align\": \"right\"\n }, {\n \"name\": \"page3-razmer_doli__percent-min\",\n \"field-length\": 15,\n \"text-align\": \"left\"\n }, {\n \"name\": \"page3-razmer_doli__decimal-maj\",\n \"field-length\": 1,\n \"text-align\": \"right\"\n }, {\n \"name\": \"page3-razmer_doli__decimal-min\",\n \"field-length\": 15,\n \"text-align\": \"left\"\n }, {\n \"name\": \"page3-razmer_doli__fraction-maj\",\n \"field-length\": 15,\n \"text-align\": \"right\"\n }, {\n \"name\": \"page3-razmer_doli__fraction-min\",\n \"field-length\": 15,\n \"text-align\": \"left\"\n }, {\n \"name\": \"$page\",\n \"field-length\": 3,\n \"text-align\": \"right\",\n \"space-filler\": u\"0\",\n }\n ],\n \"array_fields\": [\n {\n \"name\": \"page3-polnoe_naimenovanie__line{{item}}\",\n \"count\": 6,\n \"field-length\": 40,\n \"case\": \"upper\"\n }\n ]\n },\n # {\n # \"page_file\": get_test_resource_name(config, \"11001/pg_0004.pdf\"),\n # \"multiple\" : True,\n # \"array_field\" : \"founders\",\n # \"array_item_filter\" : {\n # \"founder_type\" : FounderTypeEnum.FT_FOREIGN_COMPANY\n # },\n # \"fields\": [\n # {\n # \"name\": \"page4-inn\",\n # \"field-length\": 10\n # }, {\n # \"name\" : \"page4-reg_number\",\n # \"field-length\": 25\n # },{\n # \"name\" : \"page4-country_code\",\n # \"field-length\": 3,\n # \"text-align\": \"right\",\n # \"space-filler\" : u\"0\",\n # }, {\n # \"name\": \"page4-reg_date__day\",\n # \"field-length\": 2,\n # \"text-align\": \"right\",\n # \"space-filler\" : u\"0\",\n # }, {\n # \"name\": \"page4-reg_date__month\",\n # \"field-length\": 2,\n # \"text-align\": \"right\",\n # \"space-filler\" : u\"0\",\n # }, {\n # \"name\": \"page4-reg_date__year\",\n # \"field-length\": 4\n # }, {\n # \"name\": \"page4-nominal_part_value__currency-maj\",\n # \"field-length\": 15,\n # \"text-align\": \"right\"\n # }, {\n # \"name\": \"page4-nominal_part_value__currency-min\",\n # \"field-length\": 4,\n # \"text-align\": \"left\"\n # }, {\n # \"name\": \"page4-part_value__percent-maj\",\n # \"field-length\": 3,\n # \"text-align\": \"right\"\n # }, {\n # \"name\": \"page4-part_value__percent-min\",\n # \"field-length\": 15,\n # \"text-align\": \"left\"\n # }, {\n # \"name\": \"page4-part_value__decimal-maj\",\n # \"field-length\": 1,\n # \"text-align\": \"right\"\n # }, {\n # \"name\": \"page4-part_value__decimal-min\",\n # \"field-length\": 15,\n # \"text-align\": \"left\"\n # }, {\n # \"name\": \"page4-part_value__fraction-left\",\n # \"field-length\": 15,\n # \"text-align\": \"right\"\n # }, {\n # \"name\": \"page4-part_value__fraction-right\",\n # \"field-length\": 15,\n # \"text-align\": \"left\"\n # }, {\n # \"name\" : \"$page\",\n # \"field-length\": 3,\n # \"text-align\": \"right\",\n # \"space-filler\" : u\"0\",\n # }\n # ],\n # \"array_fields\": [\n # {\n # \"name\": \"page4-full_name__line{{item}}\",\n # \"count\": 6,\n # \"field-length\": 40,\n # \"case\": \"upper\"\n # }, {\n # \"name\": \"page4-reg_organ_name__line{{item}}\",\n # \"count\": 4,\n # \"field-length\": 40,\n # \"case\": \"upper\"\n # }, {\n # \"name\": \"page4-address__line{{item}}\",\n # \"count\": 4,\n # \"field-length\": 40,\n # \"case\": \"upper\"\n # }\n # ]\n # },\n {\n \"page_file\": [get_test_resource_name(config, \"11001/pg_0005.pdf\"),\n get_test_resource_name(config, \"11001/pg_0006.pdf\")],\n \"multiple\": True,\n \"array_field\": \"founders\",\n \"array_item_filter\": {\n \"founder_type\": FounderTypeEnum.FT_PERSON\n },\n \"fields\": [\n {\n \"name\": \"page5-surname\",\n \"field-length\": 35,\n \"text-align\": \"left\",\n \"case\": \"upper\"\n }, {\n \"name\": \"page5-name\",\n \"field-length\": 35,\n \"text-align\": \"left\",\n \"case\": \"upper\"\n }, {\n \"name\": \"page5-patronymic\",\n \"field-length\": 35,\n \"text-align\": \"left\",\n \"case\": \"upper\"\n }, {\n \"name\": \"page5-inn\",\n \"field-length\": 12\n }, {\n \"name\": \"page5-birth_date__day\",\n \"field-length\": 2,\n \"text-align\": \"right\",\n \"space-filler\": u\"0\",\n }, {\n \"name\": \"page5-birth_date__month\",\n \"field-length\": 2,\n \"text-align\": \"right\",\n \"space-filler\": u\"0\",\n }, {\n \"name\": \"page5-birth_date__year\",\n \"field-length\": 4\n }, {\n \"name\": \"page5-ogrnip\",\n \"field-length\": 15\n }, {\n \"name\": \"page5-doc_type\",\n \"field-length\": 2,\n \"text-align\": \"right\",\n \"space-filler\": u\"0\",\n }, {\n \"name\": \"page5-doc_number\",\n \"field-length\": 25\n }, {\n \"name\": \"page5-issue_date__day\",\n \"field-length\": 2,\n \"text-align\": \"right\",\n \"space-filler\": u\"0\",\n }, {\n \"name\": \"page5-issue_date__month\",\n \"field-length\": 2,\n \"text-align\": \"right\",\n \"space-filler\": u\"0\",\n }, {\n \"name\": \"page5-issue_date__year\",\n \"field-length\": 4\n }, {\n \"name\": \"page5-subdivision_code__left\",\n \"field-length\": 3,\n \"text-align\": \"left\",\n }, {\n \"name\": \"page5-subdivision_code__right\",\n \"field-length\": 3,\n \"text-align\": \"right\",\n }, {\n \"name\": \"page6-subject_code\",\n \"field-length\": 2,\n \"text-align\": \"right\",\n \"space-filler\": u\"0\",\n }, {\n \"name\": \"page6-postal_index\",\n \"field-length\": 6,\n \"text-align\": \"right\",\n \"space-filler\": u\"0\",\n }, {\n \"name\": \"page6-district_type\",\n \"field-length\": 10,\n \"text-align\": \"left\",\n \"case\": \"upper\"\n }, {\n \"name\": \"page6-city_type\",\n \"field-length\": 10,\n \"text-align\": \"left\",\n \"case\": \"upper\"\n }, {\n \"name\": \"page6-city_name\",\n \"field-length\": 28,\n \"text-align\": \"left\",\n \"case\": \"upper\"\n }, {\n \"name\": \"page6-nas_punkt_type\",\n \"field-length\": 10,\n \"case\": \"upper\"\n }, {\n \"name\": \"page6-street_type\",\n \"field-length\": 10,\n \"case\": \"upper\"\n }, {\n \"name\": \"page6-building_type\",\n \"field-length\": 10,\n \"case\": \"upper\"\n }, {\n \"name\": \"page6-building_number\",\n \"field-length\": 8,\n \"case\": \"upper\"\n }, {\n \"name\": \"page6-korpus_type\",\n \"field-length\": 10,\n \"case\": \"upper\"\n }, {\n \"name\": \"page6-korpus_number\",\n \"field-length\": 8,\n \"case\": \"upper\"\n }, {\n \"name\": \"page6-flat_type\",\n \"field-length\": 10,\n \"case\": \"upper\"\n }, {\n \"name\": \"page6-flat_number\",\n \"field-length\": 8,\n \"case\": \"upper\"\n }, {\n \"name\": \"page6-living_country_code\",\n \"field-length\": 3,\n \"text-align\": \"right\",\n \"space-filler\": u\"0\",\n }, {\n \"name\": \"page6-nominal_part_value__currency-maj\",\n \"field-length\": 15,\n \"text-align\": \"right\"\n }, {\n \"name\": \"page6-nominal_part_value__currency-min\",\n \"field-length\": 4,\n \"text-align\": \"left\"\n }, {\n \"name\": \"page6-part_value__percent-maj\",\n \"field-length\": 3,\n \"text-align\": \"right\"\n }, {\n \"name\": \"page6-part_value__percent-min\",\n \"field-length\": 15,\n \"text-align\": \"left\"\n }, {\n \"name\": \"page6-part_value__decimal-maj\",\n \"field-length\": 1,\n \"text-align\": \"right\"\n }, {\n \"name\": \"page6-part_value__decimal-min\",\n \"field-length\": 15,\n \"text-align\": \"left\"\n }, {\n \"name\": \"page6-part_value__fraction-left\",\n \"field-length\": 15,\n \"text-align\": \"right\"\n }, {\n \"name\": \"page6-part_value__fraction-right\",\n \"field-length\": 15,\n \"text-align\": \"left\"\n }, {\n \"name\": \"$page\",\n \"field-length\": 3,\n \"text-align\": \"right\",\n \"space-filler\": u\"0\",\n }\n ],\n \"array_fields\": [\n {\n \"name\": \"page6-nas_punkt_name__line{{item}}\",\n \"count\": 2,\n \"field-length\": [28, 40],\n \"case\": \"upper\"\n }, {\n \"name\": \"page6-street_name__line{{item}}\",\n \"count\": 2,\n \"field-length\": [28, 40],\n \"case\": \"upper\"\n }, {\n \"name\": \"page5-birth_place__line{{item}}\",\n \"count\": 2,\n \"field-length\": 40,\n \"case\": \"upper\"\n }, {\n \"name\": \"page5-issuer__line{{item}}\",\n \"count\": 3,\n \"field-length\": [34, 40, 40],\n \"case\": \"upper\"\n }, {\n \"name\": \"page6-district_name__line{{item}}\",\n \"count\": 2,\n \"field-length\": [28, 40],\n \"case\": \"upper\"\n }, {\n \"name\": \"page6-living_address__line{{item}}\",\n \"count\": 2,\n \"field-length\": 40,\n \"case\": \"upper\"\n }\n ],\n },\n\n # {\n # \"page_file\": [get_test_resource_name(config, \"11001/pg_0007.pdf\"), get_test_resource_name(config, \"11001/pg_0008.pdf\")],\n # \"multiple\" : True,\n # \"array_field\" : \"founders\",\n # \"array_item_filter\" : {\n # \"founder_type\" : FounderTypeEnum.FT_GOV,\n # \"company.id.initialized\" : True\n # },\n # \"fields\": [\n # {\n # \"name\" : \"page7-uchreditel_type\",\n # \"field-length\": 1,\n # }, {\n # \"name\" : \"page7-uchreditel_subject_code\",\n # \"field-length\" : 2,\n # \"text-align\" : \"right\",\n # \"space-filler\" : u\"0\",\n # }, {\n # \"name\": \"page7-nominal_part_value__currency-maj\",\n # \"field-length\": 15,\n # \"text-align\": \"right\"\n # }, {\n # \"name\": \"page7-nominal_part_value__currency-min\",\n # \"field-length\": 4,\n # \"text-align\": \"left\"\n # }, {\n # \"name\": \"page7-nominal_part_value__percent-maj\",\n # \"field-length\": 3,\n # \"text-align\": \"right\"\n # }, {\n # \"name\": \"page7-nominal_part_value__percent-min\",\n # \"field-length\": 15,\n # \"text-align\": \"left\"\n # }, {\n # \"name\": \"page7-nominal_part_value__decimal-maj\",\n # \"field-length\": 1,\n # \"text-align\": \"right\"\n # }, {\n # \"name\": \"page7-nominal_part_value__decimal-min\",\n # \"field-length\": 15,\n # \"text-align\": \"left\"\n # }, {\n # \"name\": \"page7-nominal_part_value__fraction-left\",\n # \"field-length\": 15,\n # \"text-align\": \"right\"\n # }, {\n # \"name\": \"page7-nominal_part_value__fraction-right\",\n # \"field-length\": 15,\n # \"text-align\": \"left\"\n # }, {\n # \"name\": \"page8-ogrn\",\n # \"field-length\": 13\n # }, {\n # \"name\": \"page8-inn\",\n # \"field-length\": 10\n # }, {\n # \"name\" : \"$page\",\n # \"field-length\": 3,\n # \"text-align\": \"right\",\n # \"space-filler\" : u\"0\",\n # }\n # ],\n # \"array_fields\" : [\n # {\n # \"name\": \"page7-uchreditel_munic_obraz_name__line{{item}}\",\n # \"count\": 4,\n # \"field-length\": 40,\n # \"case\": \"upper\"\n # }, {\n # \"name\": \"page8-full_name__line{{item}}\",\n # \"count\": 6,\n # \"field-length\": 40,\n # \"case\": \"upper\"\n # }\n # ]\n # },\n\n # {\n # \"page_file\": [get_test_resource_name(config, \"11001/pg_0007.pdf\"),\n # get_test_resource_name(config, \"11001/pg_0008.pdf\"),\n # get_test_resource_name(config, \"11001/pg_0009.pdf\")],\n # \"multiple\" : True,\n # \"array_field\" : \"founders\",\n # \"array_item_filter\" : {\n # \"founder_type\" : FounderTypeEnum.FT_GOV,\n # \"person.id.initialized\" : True\n # },\n # \"fields\": [\n # {\n # \"name\" : \"page7-uchreditel_type\",\n # \"field-length\": 1,\n # }, {\n # \"name\" : \"page7-uchreditel_subject_code\",\n # \"field-length\" : 2,\n # \"text-align\" : \"right\",\n # \"space-filler\" : u\"0\",\n # }, {\n # \"name\": \"page7-nominal_part_value__currency-maj\",\n # \"field-length\": 15,\n # \"text-align\": \"right\"\n # }, {\n # \"name\": \"page7-nominal_part_value__currency-min\",\n # \"field-length\": 4,\n # \"text-align\": \"left\"\n # }, {\n # \"name\": \"page7-nominal_part_value__percent-maj\",\n # \"field-length\": 3,\n # \"text-align\": \"right\"\n # }, {\n # \"name\": \"page7-nominal_part_value__percent-min\",\n # \"field-length\": 15,\n # \"text-align\": \"left\"\n # }, {\n # \"name\": \"page7-nominal_part_value__decimal-maj\",\n # \"field-length\": 1,\n # \"text-align\": \"right\"\n # }, {\n # \"name\": \"page7-nominal_part_value__decimal-min\",\n # \"field-length\": 15,\n # \"text-align\": \"left\"\n # }, {\n # \"name\": \"page7-nominal_part_value__fraction-left\",\n # \"field-length\": 15,\n # \"text-align\": \"right\"\n # }, {\n # \"name\": \"page7-nominal_part_value__fraction-right\",\n # \"field-length\": 15,\n # \"text-align\": \"left\"\n # },{\n # \"name\" : \"$page\",\n # \"field-length\": 3,\n # \"text-align\": \"right\",\n # \"space-filler\" : u\"0\",\n # }, {\n # \"name\" : \"page8-surname\",\n # \"field-length\" : 35,\n # \"text-align\" : \"left\",\n # \"case\" : \"upper\"\n # }, {\n # \"name\" : \"page8-name\",\n # \"field-length\" : 35,\n # \"text-align\" : \"left\",\n # \"case\" : \"upper\"\n # }, {\n # \"name\" : \"page8-patronymic\",\n # \"field-length\" : 35,\n # \"text-align\" : \"left\",\n # \"case\" : \"upper\"\n # }, {\n # \"name\": \"page8-person_inn\",\n # \"field-length\": 12\n # }, {\n # \"name\": \"page8-birth_date__day\",\n # \"field-length\": 2,\n # \"text-align\": \"right\",\n # \"space-filler\" : u\"0\",\n # }, {\n # \"name\": \"page8-birth_date__month\",\n # \"field-length\": 2,\n # \"text-align\": \"right\",\n # \"space-filler\" : u\"0\",\n # }, {\n # \"name\": \"page8-birth_date__year\",\n # \"field-length\": 4\n # }, {\n # \"name\" : \"page9-doc_type_code\",\n # \"field-length\": 2,\n # \"text-align\": \"right\",\n # \"space-filler\" : u\"0\",\n # }, {\n # \"name\": \"page9-doc_number\",\n # \"field-length\": 25\n # }, {\n # \"name\": \"page9-doc_issue_date__day\",\n # \"field-length\": 2,\n # \"text-align\": \"right\",\n # \"space-filler\" : u\"0\",\n # }, {\n # \"name\": \"page9-doc_issue_date__month\",\n # \"field-length\": 2,\n # \"text-align\": \"right\",\n # \"space-filler\" : u\"0\",\n # }, {\n # \"name\": \"page9-doc_issue_date__year\",\n # \"field-length\": 4\n # }, {\n # \"name\" : \"page9-doc_issuer_subdivision_code__left\",\n # \"field-length\": 3,\n # \"text-align\": \"left\",\n # }, {\n # \"name\" : \"page9-doc_issuer_subdivision_code__right\",\n # \"field-length\": 3,\n # \"text-align\": \"right\",\n # }, {\n # \"name\" : \"page9-subject_code\",\n # \"field-length\" : 2,\n # \"text-align\" : \"right\",\n # \"space-filler\" : u\"0\",\n # }, {\n # \"name\" : \"page9-postal_index\",\n # \"field-length\" : 6,\n # \"text-align\" : \"right\",\n # \"space-filler\" : u\"0\",\n # }, {\n # \"name\" : \"page9-district_type\",\n # \"field-length\" : 10,\n # \"text-align\" : \"left\",\n # \"case\" : \"upper\"\n # }, {\n # \"name\" : \"page9-city_type\",\n # \"field-length\" : 10,\n # \"text-align\" : \"left\",\n # \"case\" : \"upper\"\n # }, {\n # \"name\" : \"page9-city_name\",\n # \"field-length\" : 28,\n # \"text-align\" : \"left\",\n # \"case\" : \"upper\"\n # }, {\n # \"name\": \"page9-nas_punkt_type\",\n # \"field-length\": 10,\n # \"case\": \"upper\"\n # }, {\n # \"name\": \"page9-street_type\",\n # \"field-length\": 10,\n # \"case\": \"upper\"\n # }, {\n # \"name\": \"page9-house_type\",\n # \"field-length\": 10,\n # \"case\": \"upper\"\n # }, {\n # \"name\": \"page9-house_number\",\n # \"field-length\": 8,\n # \"case\": \"upper\"\n # }, {\n # \"name\": \"page9-corpus_type\",\n # \"field-length\": 10,\n # \"case\": \"upper\"\n # }, {\n # \"name\": \"page9-corpus_number\",\n # \"field-length\": 8,\n # \"case\": \"upper\"\n # }, {\n # \"name\": \"page9-flat_type\",\n # \"field-length\": 10,\n # \"case\": \"upper\"\n # }, {\n # \"name\": \"page9-flat_number\",\n # \"field-length\": 8,\n # \"case\": \"upper\"\n # }, {\n # \"name\" : \"page9-living_country_code\",\n # \"field-length\": 3,\n # \"text-align\": \"right\",\n # \"space-filler\" : u\"0\",\n # }\n # ],\n # \"array_fields\" : [\n # {\n # \"name\": \"page7-uchreditel_munic_obraz_name__line{{item}}\",\n # \"count\": 4,\n # \"field-length\": 40,\n # \"case\": \"upper\"\n # }, {\n # \"name\": \"page9-nas_punkt_name__line{{item}}\",\n # \"count\": 2,\n # \"field-length\": [28, 40],\n # \"case\": \"upper\"\n # }, {\n # \"name\": \"page9-street_name__line{{item}}\",\n # \"count\": 2,\n # \"field-length\": [28, 40],\n # \"case\": \"upper\"\n # }, {\n # \"name\": \"page8-birth_place__{{item}}\",\n # \"count\": 2,\n # \"field-length\": 40,\n # \"case\": \"upper\"\n # }, {\n # \"name\": \"page9-doc_issuer__line{{item}}\",\n # \"count\": 3,\n # \"field-length\": [34, 40, 40],\n # \"case\": \"upper\"\n # }, {\n # \"name\": \"page9-district_name__line{{item}}\",\n # \"count\": 2,\n # \"field-length\": [28, 40],\n # \"case\": \"upper\"\n # }, {\n # \"name\": \"page9-living_address__line{{item}}\",\n # \"count\": 2,\n # \"field-length\": 40,\n # \"case\": \"upper\"\n # }\n # ]\n # },\n {\n \"page_file\": get_test_resource_name(config, \"11001/pg_0010.pdf\"),\n \"multiple\": True,\n \"array_field\": \"uit\",\n \"fields\": [\n {\n \"name\": \"page10-invest_fond_uprav_company_ogrn\",\n \"field-length\": 13\n }, {\n \"name\": \"page10-invest_fond_uprav_company_inn\",\n \"field-length\": 10\n }, {\n \"name\": \"page10-nominal_value__currency_maj\",\n \"field-length\": 15,\n \"text-align\": \"right\"\n }, {\n \"name\": \"page10-nominal_value__currency_min\",\n \"field-length\": 4,\n \"text-align\": \"left\"\n }, {\n \"name\": \"page10-nominal_value__percent_maj\",\n \"field-length\": 3,\n \"text-align\": \"right\"\n }, {\n \"name\": \"page10-nominal_value__percent_min\",\n \"field-length\": 15,\n \"text-align\": \"left\"\n }, {\n \"name\": \"page10-nominal_value__decimal_maj\",\n \"field-length\": 1,\n \"text-align\": \"right\"\n }, {\n \"name\": \"page10-nominal_value__decimal_min\",\n \"field-length\": 15,\n \"text-align\": \"left\"\n }, {\n \"name\": \"page10-nominal_value__fraction_maj\",\n \"field-length\": 15,\n \"text-align\": \"right\"\n }, {\n \"name\": \"page10-nominal_value__fraction_min\",\n \"field-length\": 15,\n \"text-align\": \"left\"\n }, {\n \"name\": \"$page\",\n \"field-length\": 3,\n \"text-align\": \"right\",\n \"space-filler\": u\"0\",\n }, {\n \"name\": \"page8-surname\",\n \"field-length\": 35,\n \"text-align\": \"left\",\n \"case\": \"upper\"\n }\n ],\n \"array_fields\": [\n {\n \"name\": \"page10-invest_fond_name__line{{item}}\",\n \"count\": 6,\n \"field-length\": 40,\n \"case\": \"upper\"\n }, {\n \"name\": \"page10-invest_fond_uprav_company_full_name__line{{item}}\",\n \"count\": 6,\n \"field-length\": 40,\n \"case\": \"upper\"\n }]\n }, {\n \"page_file\": [get_test_resource_name(config, \"11001/pg_0011.pdf\"),\n get_test_resource_name(config, \"11001/pg_0012.pdf\")],\n \"fields\": [\n {\n \"name\": \"page11-surname\",\n \"field-length\": 35,\n \"text-align\": \"left\",\n \"case\": \"upper\"\n }, {\n \"name\": \"page11-name\",\n \"field-length\": 35,\n \"text-align\": \"left\",\n \"case\": \"upper\"\n }, {\n \"name\": \"page11-patronymic\",\n \"field-length\": 35,\n \"text-align\": \"left\",\n \"case\": \"upper\"\n }, {\n \"name\": \"page11-inn\",\n \"field-length\": 12\n }, {\n \"name\": \"page11-birth_date__day\",\n \"field-length\": 2,\n \"text-align\": \"right\",\n \"space-filler\": u\"0\",\n }, {\n \"name\": \"page11-birth_date__month\",\n \"field-length\": 2,\n \"text-align\": \"right\",\n \"space-filler\": u\"0\",\n }, {\n \"name\": \"page11-birth_date__year\",\n \"field-length\": 4\n }, {\n \"name\": \"page11-doc_type_code\",\n \"field-length\": 2,\n \"text-align\": \"right\",\n \"space-filler\": u\"0\",\n }, {\n \"name\": \"page11-doc_number\",\n \"field-length\": 25\n }, {\n \"name\": \"page11-doc_issue_date__day\",\n \"field-length\": 2,\n \"text-align\": \"right\",\n \"space-filler\": u\"0\",\n }, {\n \"name\": \"page11-doc_issue_date__month\",\n \"field-length\": 2,\n \"text-align\": \"right\",\n \"space-filler\": u\"0\",\n }, {\n \"name\": \"page11-doc_issue_date__year\",\n \"field-length\": 4\n }, {\n \"name\": \"page11-doc_issuer_subdivision_code__left\",\n \"field-length\": 3,\n \"text-align\": \"left\",\n }, {\n \"name\": \"page11-doc_issuer_subdivision_code__right\",\n \"field-length\": 3,\n \"text-align\": \"right\",\n }, {\n \"name\": \"page12-subject_code\",\n \"field-length\": 2,\n \"text-align\": \"right\",\n \"space-filler\": u\"0\",\n }, {\n \"name\": \"page12-postal_index\",\n \"field-length\": 6,\n \"text-align\": \"right\",\n \"space-filler\": u\"0\",\n }, {\n \"name\": \"page12-district_type\",\n \"field-length\": 10,\n \"text-align\": \"left\",\n \"case\": \"upper\"\n }, {\n \"name\": \"page12-city_type\",\n \"field-length\": 10,\n \"text-align\": \"left\",\n \"case\": \"upper\"\n }, {\n \"name\": \"page12-city_name\",\n \"field-length\": 28,\n \"text-align\": \"left\",\n \"case\": \"upper\"\n }, {\n \"name\": \"page12-nas_punkt_type\",\n \"field-length\": 10,\n \"case\": \"upper\"\n }, {\n \"name\": \"page12-street_type\",\n \"field-length\": 10,\n \"case\": \"upper\"\n }, {\n \"name\": \"page12-house_type\",\n \"field-length\": 10,\n \"case\": \"upper\"\n }, {\n \"name\": \"page12-house_number\",\n \"field-length\": 8,\n \"case\": \"upper\"\n }, {\n \"name\": \"page12-corpus_type\",\n \"field-length\": 10,\n \"case\": \"upper\"\n }, {\n \"name\": \"page12-corpus_number\",\n \"field-length\": 8,\n \"case\": \"upper\"\n }, {\n \"name\": \"page12-flat_type\",\n \"field-length\": 10,\n \"case\": \"upper\"\n }, {\n \"name\": \"page12-flat_number\",\n \"field-length\": 8,\n \"case\": \"upper\"\n }, {\n \"name\": \"page12-living_country_code\",\n \"field-length\": 3,\n \"text-align\": \"right\",\n \"space-filler\": u\"0\",\n }, {\n \"name\": \"page12-phone_number\",\n \"field-length\": 20,\n }, {\n \"name\": \"$page\",\n \"field-length\": 3,\n \"text-align\": \"right\",\n \"space-filler\": u\"0\",\n }\n ],\n \"array_fields\": [\n {\n \"name\": \"page12-nas_punkt_name__line{{item}}\",\n \"count\": 2,\n \"field-length\": [28, 40],\n \"case\": \"upper\"\n }, {\n \"name\": \"page12-street_name__line{{item}}\",\n \"count\": 2,\n \"field-length\": [28, 40],\n \"case\": \"upper\"\n }, {\n \"name\": \"page11-birth_place__line{{item}}\",\n \"count\": 2,\n \"field-length\": 40,\n \"case\": \"upper\"\n }, {\n \"name\": \"page11-doc_issuer__line{{item}}\",\n \"count\": 3,\n \"field-length\": [34, 40, 40],\n \"case\": \"upper\"\n }, {\n \"name\": \"page12-district_name__line{{item}}\",\n \"count\": 2,\n \"field-length\": [28, 40],\n \"case\": \"upper\"\n }, {\n \"name\": \"page12-living_address__line{{item}}\",\n \"count\": 2,\n \"field-length\": 40,\n \"case\": \"upper\"\n }, {\n \"name\": \"page11-title__line{{item}}\",\n \"count\": 2,\n \"field-length\": 80,\n \"case\": \"upper\"\n }\n ],\n }, {\n \"variants\": {\n \"type\": RenderingVariants.TYPE_RENDER_FIRST_MATCHING,\n \"cases\": [{\n \"page_file\": [get_test_resource_name(config, \"11001/pg_0013.pdf\"),\n get_test_resource_name(config, \"11001/pg_0014.pdf\")],\n \"field_matcher_set\": \"%page_Je-set-1\",\n \"conditions\": {\n \"management_company.company.id.initialized\": True\n }\n }, {\n \"page_file\": [get_test_resource_name(config, \"11001/pg_0013.pdf\"),\n get_test_resource_name(config, \"11001/pg_0014.pdf\"),\n get_test_resource_name(config, \"11001/pg_0015.pdf\")],\n \"field_matcher_set\": \"%page_Je-set-3\",\n \"conditions\": {\n \"management_company.foreign_company.id.initialized\": True,\n \"management_company.russian_agent.id.initialized\": True\n }\n }, {\n \"page_file\": [get_test_resource_name(config, \"11001/pg_0013.pdf\"),\n get_test_resource_name(config, \"11001/pg_0014.pdf\")],\n \"field_matcher_set\": \"%page_Je-set-2\",\n \"conditions\": {\n \"management_company.foreign_company.id.initialized\": True\n }\n }]\n },\n \"fields\": [\n {\n \"name\": \"page13-ogrn\",\n \"field-length\": 13\n }, {\n \"name\": \"page13-inn\",\n \"field-length\": 10\n }, {\n \"name\": \"page13-reg_number\",\n \"field-length\": 25\n }, {\n \"name\": \"page13-country_code\",\n \"field-length\": 3,\n \"text-align\": \"right\",\n \"space-filler\": u\"0\",\n }, {\n \"name\": \"page13-reg_date__day\",\n \"field-length\": 2,\n \"text-align\": \"right\",\n \"space-filler\": u\"0\",\n }, {\n \"name\": \"page13-reg_date__month\",\n \"field-length\": 2,\n \"text-align\": \"right\",\n \"space-filler\": u\"0\",\n }, {\n \"name\": \"page13-reg_date__year\",\n \"field-length\": 4\n }, {\n \"name\": \"$page\",\n \"field-length\": 3,\n \"text-align\": \"right\",\n \"space-filler\": u\"0\",\n }, {\n \"name\": \"page14-subject_code\",\n \"field-length\": 2,\n \"text-align\": \"right\",\n \"space-filler\": u\"0\",\n }, {\n \"name\": \"page14-postal_index\",\n \"field-length\": 6,\n \"text-align\": \"right\",\n \"space-filler\": u\"0\",\n }, {\n \"name\": \"page14-district_type\",\n \"field-length\": 10,\n \"text-align\": \"left\",\n \"case\": \"upper\"\n }, {\n \"name\": \"page14-city_type\",\n \"field-length\": 10,\n \"text-align\": \"left\",\n \"case\": \"upper\"\n }, {\n \"name\": \"page14-city_name\",\n \"field-length\": 28,\n \"text-align\": \"left\",\n \"case\": \"upper\"\n }, {\n \"name\": \"page14-nas_punkt_type\",\n \"field-length\": 10,\n \"case\": \"upper\"\n }, {\n \"name\": \"page14-street_type\",\n \"field-length\": 10,\n \"case\": \"upper\"\n }, {\n \"name\": \"page14-house_type\",\n \"field-length\": 10,\n \"case\": \"upper\"\n }, {\n \"name\": \"page14-house_number\",\n \"field-length\": 8,\n \"case\": \"upper\"\n }, {\n \"name\": \"page14-corpus_type\",\n \"field-length\": 10,\n \"case\": \"upper\"\n }, {\n \"name\": \"page14-corpus_number\",\n \"field-length\": 8,\n \"case\": \"upper\"\n }, {\n \"name\": \"page14-flat_type\",\n \"field-length\": 10,\n \"case\": \"upper\"\n }, {\n \"name\": \"page14-flat_number\",\n \"field-length\": 8,\n \"case\": \"upper\"\n }, {\n \"name\": \"page14-phone_number\",\n \"field-length\": 20,\n }, {\n \"name\": \"page14-surname\",\n \"field-length\": 35,\n \"text-align\": \"left\",\n \"case\": \"upper\"\n }, {\n \"name\": \"page14-name\",\n \"field-length\": 35,\n \"text-align\": \"left\",\n \"case\": \"upper\"\n }, {\n \"name\": \"page14-patronymic\",\n \"field-length\": 35,\n \"text-align\": \"left\",\n \"case\": \"upper\"\n }, {\n \"name\": \"page14-inn\",\n \"field-length\": 12\n }, {\n \"name\": \"page14-birth_date__day\",\n \"field-length\": 2,\n \"text-align\": \"right\",\n \"space-filler\": u\"0\",\n }, {\n \"name\": \"page14-birth_date__month\",\n \"field-length\": 2,\n \"text-align\": \"right\",\n \"space-filler\": u\"0\",\n }, {\n \"name\": \"page14-birth_date__year\",\n \"field-length\": 4\n }, {\n \"name\": \"page15-doc_type_code\",\n \"field-length\": 2,\n \"text-align\": \"right\",\n \"space-filler\": u\"0\",\n }, {\n \"name\": \"page15-doc_number\",\n \"field-length\": 25\n }, {\n \"name\": \"page15-doc_issue_date__day\",\n \"field-length\": 2,\n \"text-align\": \"right\",\n \"space-filler\": u\"0\",\n }, {\n \"name\": \"page15-doc_issue_date__month\",\n \"field-length\": 2,\n \"text-align\": \"right\",\n \"space-filler\": u\"0\",\n }, {\n \"name\": \"page15-doc_issue_date__year\",\n \"field-length\": 4\n }, {\n \"name\": \"page15-doc_issuer_subdivision_code__left\",\n \"field-length\": 3,\n \"text-align\": \"left\",\n }, {\n \"name\": \"page15-doc_issuer_subdivision_code__right\",\n \"field-length\": 3,\n \"text-align\": \"right\",\n }, {\n \"name\": \"page15-subject_code\",\n \"field-length\": 2,\n \"text-align\": \"right\",\n \"space-filler\": u\"0\",\n }, {\n \"name\": \"page15-postal_index\",\n \"field-length\": 6,\n \"text-align\": \"right\",\n \"space-filler\": u\"0\",\n }, {\n \"name\": \"page15-district_type\",\n \"field-length\": 10,\n \"text-align\": \"left\",\n \"case\": \"upper\"\n }, {\n \"name\": \"page15-city_type\",\n \"field-length\": 10,\n \"text-align\": \"left\",\n \"case\": \"upper\"\n }, {\n \"name\": \"page15-city_name\",\n \"field-length\": 28,\n \"text-align\": \"left\",\n \"case\": \"upper\"\n }, {\n \"name\": \"page15-nas_punkt_type\",\n \"field-length\": 10,\n \"case\": \"upper\"\n }, {\n \"name\": \"page15-street_type\",\n \"field-length\": 10,\n \"case\": \"upper\"\n }, {\n \"name\": \"page15-house_type\",\n \"field-length\": 10,\n \"case\": \"upper\"\n }, {\n \"name\": \"page15-house_number\",\n \"field-length\": 8,\n \"case\": \"upper\"\n }, {\n \"name\": \"page15-corpus_type\",\n \"field-length\": 10,\n \"case\": \"upper\"\n }, {\n \"name\": \"page15-corpus_number\",\n \"field-length\": 8,\n \"case\": \"upper\"\n }, {\n \"name\": \"page15-flat_type\",\n \"field-length\": 10,\n \"case\": \"upper\"\n }, {\n \"name\": \"page15-flat_number\",\n \"field-length\": 8,\n \"case\": \"upper\"\n }, {\n \"name\": \"page15-living_country_code\",\n \"field-length\": 3,\n \"text-align\": \"right\",\n \"space-filler\": u\"0\",\n }, {\n \"name\": \"page15-phone_number\",\n \"field-length\": 20,\n }\n ],\n \"array_fields\": [{\n \"name\": \"page13-full_name__line{{item}}\",\n \"count\": 6,\n \"field-length\": 40,\n \"case\": \"upper\"\n }, {\n \"name\": \"page13-reg_organ_name__line{{item}}\",\n \"count\": 4,\n \"field-length\": 40,\n \"case\": \"upper\"\n }, {\n \"name\": \"page13-reg_organ_address__line{{item}}\",\n \"count\": 3,\n \"field-length\": 40,\n \"case\": \"upper\"\n }, {\n \"name\": \"page13-predstav_full_name__line{{item}}\",\n \"count\": 6,\n \"field-length\": 40,\n \"case\": \"upper\"\n }, {\n \"name\": \"page14-nas_punkt_name__line{{item}}\",\n \"count\": 2,\n \"field-length\": [28, 40],\n \"case\": \"upper\"\n }, {\n \"name\": \"page14-street_name__line{{item}}\",\n \"count\": 2,\n \"field-length\": [28, 40],\n \"case\": \"upper\"\n }, {\n \"name\": \"page14-district_name__line{{item}}\",\n \"count\": 2,\n \"field-length\": [28, 40],\n \"case\": \"upper\"\n }, {\n \"name\": \"page15-nas_punkt_name__line{{item}}\",\n \"count\": 2,\n \"field-length\": [28, 40],\n \"case\": \"upper\"\n }, {\n \"name\": \"page15-street_name__line{{item}}\",\n \"count\": 2,\n \"field-length\": [28, 40],\n \"case\": \"upper\"\n }, {\n \"name\": \"page14-birth_place__line{{item}}\",\n \"count\": 2,\n \"field-length\": 40,\n \"case\": \"upper\"\n }, {\n \"name\": \"page15-doc_issuer__line{{item}}\",\n \"count\": 3,\n \"field-length\": [34, 40, 40],\n \"case\": \"upper\"\n }, {\n \"name\": \"page15-district_name__line{{item}}\",\n \"count\": 2,\n \"field-length\": [28, 40],\n \"case\": \"upper\"\n }, {\n \"name\": \"page15-living_address__line{{item}}\",\n \"count\": 2,\n \"field-length\": 40,\n \"case\": \"upper\"\n }]\n }, {\n \"conditions\": {\n \"manager.id.initialized\": True\n },\n \"page_file\": [get_test_resource_name(config, \"11001/pg_0016.pdf\"),\n get_test_resource_name(config, \"11001/pg_0017.pdf\")],\n \"fields\": [\n {\n \"name\": \"$page\",\n \"field-length\": 3,\n \"text-align\": \"right\",\n \"space-filler\": u\"0\",\n }, {\n \"name\": \"page16-ogrnip\",\n \"field-length\": 15\n }, {\n \"name\": \"page16-surname\",\n \"field-length\": 35,\n \"text-align\": \"left\",\n \"case\": \"upper\"\n }, {\n \"name\": \"page16-name\",\n \"field-length\": 35,\n \"text-align\": \"left\",\n \"case\": \"upper\"\n }, {\n \"name\": \"page16-patronymic\",\n \"field-length\": 35,\n \"text-align\": \"left\",\n \"case\": \"upper\"\n }, {\n \"name\": \"page16-inn\",\n \"field-length\": 12\n }, {\n \"name\": \"page16-birth_date__day\",\n \"field-length\": 2,\n \"text-align\": \"right\",\n \"space-filler\": u\"0\",\n }, {\n \"name\": \"page16-birth_date__month\",\n \"field-length\": 2,\n \"text-align\": \"right\",\n \"space-filler\": u\"0\",\n }, {\n \"name\": \"page16-birth_date__year\",\n \"field-length\": 4\n }, {\n \"name\": \"page16-doc_type_code\",\n \"field-length\": 2,\n \"text-align\": \"right\",\n \"space-filler\": u\"0\",\n }, {\n \"name\": \"page16-doc_type_number\",\n \"field-length\": 25\n }, {\n \"name\": \"page16-doc_issue_date__day\",\n \"field-length\": 2,\n \"text-align\": \"right\",\n \"space-filler\": u\"0\",\n }, {\n \"name\": \"page16-doc_issue_date__month\",\n \"field-length\": 2,\n \"text-align\": \"right\",\n \"space-filler\": u\"0\",\n }, {\n \"name\": \"page16-doc_issue_date__year\",\n \"field-length\": 4\n }, {\n \"name\": \"page16-doc_issuer_subdivision_code__left\",\n \"field-length\": 3,\n \"text-align\": \"left\",\n }, {\n \"name\": \"page16-doc_issuer_subdivision_code__right\",\n \"field-length\": 3,\n \"text-align\": \"right\",\n }, {\n \"name\": \"page17-subject_code\",\n \"field-length\": 2,\n \"text-align\": \"right\",\n \"space-filler\": u\"0\",\n }, {\n \"name\": \"page17-postal_index\",\n \"field-length\": 6,\n \"text-align\": \"right\",\n \"space-filler\": u\"0\",\n }, {\n \"name\": \"page17-district_type\",\n \"field-length\": 10,\n \"text-align\": \"left\",\n \"case\": \"upper\"\n }, {\n \"name\": \"page17-city_type\",\n \"field-length\": 10,\n \"text-align\": \"left\",\n \"case\": \"upper\"\n }, {\n \"name\": \"page17-city_name\",\n \"field-length\": 28,\n \"text-align\": \"left\",\n \"case\": \"upper\"\n }, {\n \"name\": \"page17-nas_punkt_type\",\n \"field-length\": 10,\n \"case\": \"upper\"\n }, {\n \"name\": \"page17-street_type\",\n \"field-length\": 10,\n \"case\": \"upper\"\n }, {\n \"name\": \"page17-house_type\",\n \"field-length\": 10,\n \"case\": \"upper\"\n }, {\n \"name\": \"page17-house_number\",\n \"field-length\": 8,\n \"case\": \"upper\"\n }, {\n \"name\": \"page17-corpus_type\",\n \"field-length\": 10,\n \"case\": \"upper\"\n }, {\n \"name\": \"page17-corpus_number\",\n \"field-length\": 8,\n \"case\": \"upper\"\n }, {\n \"name\": \"page17-flat_type\",\n \"field-length\": 10,\n \"case\": \"upper\"\n }, {\n \"name\": \"page17-flat_number\",\n \"field-length\": 8,\n \"case\": \"upper\"\n }, {\n \"name\": \"page17-phone_number\",\n \"field-length\": 20,\n }\n ],\n \"array_fields\": [{\n \"name\": \"page16-birth_place__line{{item}}\",\n \"count\": 2,\n \"field-length\": 40,\n \"case\": \"upper\"\n }, {\n \"name\": \"page16-doc_issuer__line{{item}}\",\n \"count\": 3,\n \"field-length\": [34, 40, 40],\n \"case\": \"upper\"\n }, {\n \"name\": \"page17-nas_punkt_name__line{{item}}\",\n \"count\": 2,\n \"field-length\": [28, 40],\n \"case\": \"upper\"\n }, {\n \"name\": \"page17-street_name__line{{item}}\",\n \"count\": 2,\n \"field-length\": [28, 40],\n \"case\": \"upper\"\n }, {\n \"name\": \"page17-district_name__line{{item}}\",\n \"count\": 2,\n \"field-length\": [28, 40],\n \"case\": \"upper\"\n }\n ]\n }, {\n \"page_file\": get_test_resource_name(config, \"11001/pg_0018.pdf\"),\n \"fields\": [\n {\n \"name\": \"page18-main_job_code__part1\",\n \"field-length\": 2,\n \"text-align\": \"left\",\n }, {\n \"name\": \"page18-main_job_code__part2\",\n \"field-length\": 2,\n \"text-align\": \"center\",\n }, {\n \"name\": \"page18-main_job_code__part3\",\n \"field-length\": 2,\n \"text-align\": \"right\",\n }, {\n \"name\": \"$page\",\n \"field-length\": 3,\n \"text-align\": \"right\",\n \"space-filler\": u\"0\",\n }\n ],\n \"array_fields\": [{\n \"name\": \"page18-job_code#{{item}}__part1\",\n \"field-length\": 2,\n \"count\": 14 * 4,\n \"text-align\": \"left\",\n }, {\n \"name\": \"page18-job_code#{{item}}__part2\",\n \"field-length\": 2,\n \"count\": 14 * 4,\n \"text-align\": \"center\",\n }, {\n \"name\": \"page18-job_code#{{item}}__part3\",\n \"field-length\": 2,\n \"count\": 14 * 4,\n \"text-align\": \"right\",\n }]\n }, {\n \"page_file\": get_test_resource_name(config, \"11001/pg_0019.pdf\"),\n \"conditions\": {\n \"holder_share.holder_type\": JSCMemberTypeEnum.JSCMT_REGISTRATOR,\n \"holder_share.company.id.initialized\": True\n },\n \"fields\": [\n {\n \"name\": \"page19-ogrn\",\n \"field-length\": 13\n }, {\n \"name\": \"page19-inn\",\n \"field-length\": 10\n }, {\n \"name\": \"$page\",\n \"field-length\": 3,\n \"text-align\": \"right\",\n \"space-filler\": u\"0\",\n }\n ],\n \"array_fields\": [{\n \"name\": \"page19-full_name__line{{item}}\",\n \"field-length\": 40,\n \"case\": \"upper\",\n \"count\": 6\n }]\n },\n {\n \"page_file\": get_test_resource_name(config, \"11001/pg_0020.pdf\"),\n \"multiple\": True,\n \"array_field\": \"farm_companies\",\n \"fields\": [\n {\n \"name\": \"page20-ogrnip\",\n \"field-length\": 15\n }, {\n \"name\": \"page20-surname\",\n \"field-length\": 35,\n \"text-align\": \"left\",\n \"case\": \"upper\"\n }, {\n \"name\": \"page20-name\",\n \"field-length\": 35,\n \"text-align\": \"left\",\n \"case\": \"upper\"\n }, {\n \"name\": \"page20-patronymic\",\n \"field-length\": 35,\n \"text-align\": \"left\",\n \"case\": \"upper\"\n }, {\n \"name\": \"page20-inn\",\n \"field-length\": 12\n }, {\n \"name\": \"$page\",\n \"field-length\": 3,\n \"text-align\": \"right\",\n \"space-filler\": u\"0\",\n }\n ]\n }, {\n \"multiple\": True,\n \"array_field\": \"founders\",\n \"sort\": {\n \"field\": \"documents_recipient_type\",\n \"order\": \"desc\"\n },\n \"variants\": {\n \"type\": RenderingVariants.TYPE_RENDER_FIRST_MATCHING,\n \"cases\": [{ # учредитель - единственное физ. лицо\n \"page_file\": [get_test_resource_name(config, \"11001/pg_0022.pdf\"),\n get_test_resource_name(config, \"11001/pg_0024.pdf\")],\n \"field_matcher_set\": \"%page_N-set-1\",\n \"conditions\": {\n \"founders[].founder_type\": FounderTypeEnum.FT_PERSON,\n \"founders\": {\n \"#size\": 1\n }\n },\n }, {\n # учредитель - физ лицо, кол-во учредителей > 1. при этом есть учредитель-физ лицо, ФИО которого совпадает с данным\n \"page_file\": [get_test_resource_name(config, \"11001/pg_0022.pdf\"),\n get_test_resource_name(config, \"11001/pg_0024.pdf\")],\n \"field_matcher_set\": \"%page_N-set-2dup\",\n \"conditions\": {\n \"founders[].founder_type\": FounderTypeEnum.FT_PERSON,\n \"founders[].duplicate_fio\": True\n }\n }, { # учредитель - физ лицо, кол-во учредителей > 1\n \"page_file\": [get_test_resource_name(config, \"11001/pg_0022.pdf\"),\n get_test_resource_name(config, \"11001/pg_0024.pdf\")],\n \"field_matcher_set\": \"%page_N-set-2\",\n \"conditions\": {\n \"founders[].founder_type\": FounderTypeEnum.FT_PERSON,\n }\n }, { # единственный учредитель - российское юр. лицо\n \"page_file\": [get_test_resource_name(config, \"11001/pg_0022.pdf\"),\n get_test_resource_name(config, \"11001/pg_0023.pdf\"),\n get_test_resource_name(config, \"11001/pg_0024.pdf\")],\n \"field_matcher_set\": \"%page_N-set-5\",\n \"conditions\": {\n \"founders[].founder_type\": FounderTypeEnum.FT_COMPANY,\n \"founders\": {\n \"#size\": 1\n }\n }\n }, { # учредитель - российское юр. лицо, кол-во учредителей > 1\n \"page_file\": [get_test_resource_name(config, \"11001/pg_0022.pdf\"),\n get_test_resource_name(config, \"11001/pg_0023.pdf\"),\n get_test_resource_name(config, \"11001/pg_0024.pdf\")],\n \"field_matcher_set\": \"%page_N-set-3\",\n \"conditions\": {\n \"founders[].founder_type\": FounderTypeEnum.FT_COMPANY,\n }\n },\n # { # учредитель - иностранное юр. лицо\n # \"page_file\": [get_test_resource_name(config, \"11001/pg_0022.pdf\"),\n # get_test_resource_name(config, \"11001/pg_0023.pdf\"),\n # get_test_resource_name(config, \"11001/pg_0024.pdf\")],\n # \"field_matcher_set\" : \"%page_N-set-4\",\n # \"conditions\" : {\n # \"founders[].founder_type\" : FounderTypeEnum.FT_FOREIGN_COMPANY,\n # },\n # }\n ]\n },\n \"fields\": [\n {\n \"name\": \"page22-zayavitel_type\",\n \"field-length\": 1,\n }, {\n \"name\": \"page22-ogrn\",\n \"field-length\": 13\n }, {\n \"name\": \"page22-uchreditel_inn\",\n \"field-length\": 10\n }, {\n \"name\": \"page22-upravlyayuschiy_ogrn\",\n \"field-length\": 13\n }, {\n \"name\": \"page22-upravlyayuschiy_inn\",\n \"field-length\": 10\n }, {\n \"name\": \"page22-zayavitel_surname\",\n \"field-length\": 35,\n \"text-align\": \"left\",\n \"case\": \"upper\"\n }, {\n \"name\": \"page22-zayavitel_name\",\n \"field-length\": 35,\n \"text-align\": \"left\",\n \"case\": \"upper\"\n }, {\n \"name\": \"page22-zayavitel_patronymic\",\n \"field-length\": 35,\n \"text-align\": \"left\",\n \"case\": \"upper\"\n }, {\n \"name\": \"page22-zayavitel_inn\",\n \"field-length\": 12\n }, {\n \"name\": \"page22-zayavitel_birth_date__day\",\n \"field-length\": 2,\n \"text-align\": \"right\",\n \"space-filler\": u\"0\",\n }, {\n \"name\": \"page22-zayavitel_birth_date__month\",\n \"field-length\": 2,\n \"text-align\": \"right\",\n \"space-filler\": u\"0\",\n }, {\n \"name\": \"page22-zayavitel_birth_date__year\",\n \"field-length\": 4\n }, {\n \"name\": \"page23-doc_type\",\n \"field-length\": 2,\n \"text-align\": \"right\",\n \"space-filler\": u\"0\",\n }, {\n \"name\": \"page23-doc_number\",\n \"field-length\": 25\n }, {\n \"name\": \"page23-doc_issue_date__day\",\n \"field-length\": 2,\n \"text-align\": \"right\",\n \"space-filler\": u\"0\",\n }, {\n \"name\": \"page23-doc_issue_date__month\",\n \"field-length\": 2,\n \"text-align\": \"right\",\n \"space-filler\": u\"0\",\n }, {\n \"name\": \"page23-doc_issue_date__year\",\n \"field-length\": 4\n }, {\n \"name\": \"page23-issuer_subdivision_code__left\",\n \"field-length\": 3,\n \"text-align\": \"left\",\n }, {\n \"name\": \"page23-issuer_subdivision_code__right\",\n \"field-length\": 3,\n \"text-align\": \"right\",\n }, {\n \"name\": \"page23-address_subject_code\",\n \"field-length\": 2,\n \"text-align\": \"right\",\n \"space-filler\": u\"0\",\n }, {\n \"name\": \"page23-postal_index\",\n \"field-length\": 6,\n \"text-align\": \"right\",\n \"space-filler\": u\"0\",\n }, {\n \"name\": \"page23-address_district_type\",\n \"field-length\": 10,\n \"text-align\": \"left\",\n \"case\": \"upper\"\n }, {\n \"name\": \"page23-address_city_type\",\n \"field-length\": 10,\n \"text-align\": \"left\",\n \"case\": \"upper\"\n }, {\n \"name\": \"page23-address_city_name\",\n \"field-length\": 28,\n \"text-align\": \"left\",\n \"case\": \"upper\"\n }, {\n \"name\": \"page23-address_nas_punkt_type\",\n \"field-length\": 10,\n \"case\": \"upper\"\n }, {\n \"name\": \"page23-address_street_type\",\n \"field-length\": 10,\n \"case\": \"upper\"\n }, {\n \"name\": \"page23-address_house_type\",\n \"field-length\": 10,\n \"case\": \"upper\"\n }, {\n \"name\": \"page23-address_house_number\",\n \"field-length\": 8,\n \"case\": \"upper\"\n }, {\n \"name\": \"page23-address_corpus_type\",\n \"field-length\": 10,\n \"case\": \"upper\"\n }, {\n \"name\": \"page23-address_corpus_number\",\n \"field-length\": 8,\n \"case\": \"upper\"\n }, {\n \"name\": \"page23-address_flat_type\",\n \"field-length\": 10,\n \"case\": \"upper\"\n }, {\n \"name\": \"page23-address_flat_number\",\n \"field-length\": 8,\n \"case\": \"upper\"\n }, {\n \"name\": \"page23-phone_number\",\n \"field-length\": 20,\n }, {\n \"name\": \"page23-living_country_code\",\n \"field-length\": 3,\n \"text-align\": \"right\",\n \"space-filler\": u\"0\",\n }, {\n \"name\": \"page23-email\",\n \"field-length\": 35,\n \"case\": \"upper\"\n }, {\n \"name\": \"page24-document_delivery_type\",\n \"field-length\": 1\n }, {\n \"name\": \"page24-zaveritel_type\",\n \"field-length\": 1\n }, {\n \"name\": \"page24-inn_zaveritelya\",\n \"field-length\": 12\n }, {\n \"name\": \"$page\",\n \"field-length\": 3,\n \"text-align\": \"right\",\n \"space-filler\": u\"0\",\n }\n ], \"array_fields\": [\n {\n \"name\": \"page22-uchreditel_full_name__line{{item}}\",\n \"count\": 6,\n \"field-length\": 40,\n \"case\": \"upper\"\n }, {\n \"name\": \"page22-upravlyayuschiy_full_name__line{{item}}\",\n \"count\": 6,\n \"field-length\": 40,\n \"case\": \"upper\"\n }, {\n \"name\": \"page22-zayavitel_birth_place__line{{item}}\",\n \"count\": 2,\n \"field-length\": 40,\n \"case\": \"upper\"\n }, {\n \"name\": \"page23-issuer__line{{item}}\",\n \"count\": 3,\n \"field-length\": [33, 40, 40],\n \"case\": \"upper\"\n }, {\n \"name\": \"page23-address_nas_punkt_name__line{{item}}\",\n \"count\": 2,\n \"field-length\": [28, 40],\n \"case\": \"upper\"\n }, {\n \"name\": \"page23-address_street_name__line{{item}}\",\n \"count\": 2,\n \"field-length\": [28, 40],\n \"case\": \"upper\"\n }, {\n \"name\": \"page23-address_district_name__line{{item}}\",\n \"count\": 2,\n \"field-length\": [28, 40],\n \"case\": \"upper\"\n }, {\n \"name\": \"page23-living_address__line{{item}}\",\n \"count\": 2,\n \"field-length\": 40,\n \"case\": \"upper\"\n }\n ]}\n ]\n }\n\n P11001_MATCHER = {\n \"doc_name\": DocumentTypeEnum.DT_P11001,\n \"template_name\": P11001_TEMPLATE['template_name'],\n \"fields\": {\n \"page1-polnoe_naimenovanie__line{{item}}\": MultilineFieldMatcher(field_name=\"full_name\",\n prefix=u\"Общество с ограниченной ответственностью «\",\n suffix=u\"»\"),\n \"page1-sokr_naimenovanie__line{{item}}\": MultilineFieldMatcher(field_name=\"short_name\", prefix=u\"ООО «\",\n suffix=u\"»\"),\n \"page1-subject_code\": FieldAttributeMatcher(field_name=\"address\", attr_name=\"region\",\n adapter=\"RFRegionNumberAdapter\"),\n \"page1-pocht_index\": FieldAttributeMatcher(field_name=\"address\", attr_name=\"index\"),\n \"page1-district_type\": FieldAttributeMatcher(field_name=\"address\", attr_name=\"district_type\",\n adapter=\"ShortDistrictTypeAdapter\"),\n \"page1-district_name__line{{item}}\": MultilineFieldMatcher(field_name=\"address\",\n attr_name=\"district\"),\n \"page1-city_type\": FieldAttributeMatcher(field_name=\"address\", attr_name=\"city_type\",\n adapter=\"ShortCityTypeAdapter\"),\n \"page1-city_name\": FieldAttributeMatcher(field_name=\"address\", attr_name=\"city\"),\n\n \"page2-nas_punkt_type\": FieldAttributeMatcher(field_name=\"address\", attr_name=\"village_type\",\n adapter=\"ShortVillageTypeAdapter\"),\n \"page2-nas_punkt_name__line{{item}}\": MultilineFieldMatcher(field_name=\"address\",\n attr_name=\"village\"),\n \"page2-street_type\": FieldAttributeMatcher(field_name=\"address\", attr_name=\"street_type\",\n adapter=\"ShortStreetTypeAdapter\"),\n \"page2-street_name__line{{item}}\": MultilineFieldMatcher(field_name=\"address\",\n attr_name=\"street\"),\n \"page2-house_type\": FieldAttributeMatcher(field_name=\"address\", attr_name=\"house_type\"),\n \"page2-house_number\": FieldAttributeMatcher(field_name=\"address\", attr_name=\"house\"),\n \"page2-corpus_type\": FieldAttributeMatcher(field_name=\"address\", attr_name=\"building_type\"),\n \"page2-corpus_number\": FieldAttributeMatcher(field_name=\"address\", attr_name=\"building\"),\n \"page2-office_type\": FieldAttributeMatcher(field_name=\"address\", attr_name=\"flat_type\"),\n \"page2-office_number\": FieldAttributeMatcher(field_name=\"address\", attr_name=\"flat\"),\n \"page2-nach_capital_type\": FieldAttributeMatcher(field_name=\"starter_capital\",\n attr_name=\"capital_type\"),\n \"page2-akc_obchestvo_member_type\": FieldAttributeMatcher(field_name=\"holder_share\", attr_name=\"holder_type\",\n adapter=\"JSCMemberTypeNumberAdapter\"),\n \"page2-nach_capital_value__currency-maj\": FieldAttributeMatcher(field_name=\"starter_capital\",\n attr_name=\"value.currency_major\"),\n \"page2-nach_capital_value__currency-min\": FieldAttributeMatcher(field_name=\"starter_capital\",\n attr_name=\"value.currency_minor\"),\n\n \"page3-ogrn\": FieldAttributeMatcher(field_name=\"founders[]\", attr_name=\"company.ogrn\"),\n \"page3-inn\": FieldAttributeMatcher(field_name=\"founders[]\", attr_name=\"company.inn\"),\n \"page3-polnoe_naimenovanie__line{{item}}\": MultilineFieldMatcher(field_name=\"founders[]\",\n attr_name=\"company.qualified_full_name\"),\n \"page3-nominal_part_value__currency-maj\": FieldAttributeMatcher(field_name=\"founders[]\",\n attr_name=\"nominal_capital.currency_major\"),\n \"page3-nominal_part_value__currency-min\": FieldAttributeMatcher(field_name=\"founders[]\",\n attr_name=\"nominal_capital.currency_minor\"),\n \"page3-razmer_doli__percent-maj\": FieldAttributeMatcher(field_name=\"founders[]\",\n attr_name=\"share.percent_major\"),\n \"page3-razmer_doli__percent-min\": FieldAttributeMatcher(field_name=\"founders[]\",\n attr_name=\"share.percent_minor\"),\n \"page3-razmer_doli__decimal-maj\": FieldAttributeMatcher(field_name=\"founders[]\",\n attr_name=\"share.decimal_major\"),\n \"page3-razmer_doli__decimal-min\": FieldAttributeMatcher(field_name=\"founders[]\",\n attr_name=\"share.decimal_minor\"),\n \"page3-razmer_doli__fraction-maj\": FieldAttributeMatcher(field_name=\"founders[]\",\n attr_name=\"share.fraction_major\"),\n \"page3-razmer_doli__fraction-min\": FieldAttributeMatcher(field_name=\"founders[]\",\n attr_name=\"share.fraction_minor\"),\n\n \"page4-full_name__line{{item}}\": MultilineFieldMatcher(field_name=\"founders[]\",\n attr_name=\"company.full_name\"),\n \"page4-country_code\": FieldAttributeMatcher(field_name=\"founders[]\",\n attr_name=\"company.country_code\"),\n \"page4-reg_date__day\": FieldAttributeMatcher(field_name=\"founders[]\",\n attr_name=\"company.registration_date.day\"),\n \"page4-reg_date__month\": FieldAttributeMatcher(field_name=\"founders[]\",\n attr_name=\"company.registration_date.month\"),\n \"page4-reg_date__year\": FieldAttributeMatcher(field_name=\"founders[]\",\n attr_name=\"company.registration_date.year\"),\n \"page4-reg_number\": FieldAttributeMatcher(field_name=\"founders[]\",\n attr_name=\"company.registration_number\"),\n \"page4-reg_organ_name__line{{item}}\": MultilineFieldMatcher(field_name=\"founders[]\",\n attr_name=\"company.registration_depart\"),\n \"page4-address__line{{item}}\": MultilineFieldMatcher(field_name=\"founders[]\",\n attr_name=\"company.generic_address\"),\n \"page4-inn\": FieldAttributeMatcher(field_name=\"founders[]\", attr_name=\"company.inn\"),\n \"page4-nominal_part_value__currency-maj\": FieldAttributeMatcher(field_name=\"founders[]\",\n attr_name=\"nominal_capital.currency_major\"),\n \"page4-nominal_part_value__currency-min\": FieldAttributeMatcher(field_name=\"founders[]\",\n attr_name=\"nominal_capital.currency_minor\"),\n \"page4-part_value__percent-maj\": FieldAttributeMatcher(field_name=\"founders[]\",\n attr_name=\"share.percent_major\"),\n \"page4-part_value__percent-min\": FieldAttributeMatcher(field_name=\"founders[]\",\n attr_name=\"share.percent_minor\"),\n \"page4-part_value__decimal-maj\": FieldAttributeMatcher(field_name=\"founders[]\",\n attr_name=\"share.decimal_major\"),\n \"page4-part_value__decimal-min\": FieldAttributeMatcher(field_name=\"founders[]\",\n attr_name=\"share.decimal_minor\"),\n \"page4-part_value__fraction-left\": FieldAttributeMatcher(field_name=\"founders[]\",\n attr_name=\"share.fraction_major\"),\n \"page4-part_value__fraction-right\": FieldAttributeMatcher(field_name=\"founders[]\",\n attr_name=\"share.fraction_minor\"),\n\n \"page5-surname\": FieldAttributeMatcher(field_name=\"founders[]\", attr_name=\"person.surname\"),\n \"page5-name\": FieldAttributeMatcher(field_name=\"founders[]\", attr_name=\"person.name\"),\n \"page5-patronymic\": FieldAttributeMatcher(field_name=\"founders[]\", attr_name=\"person.patronymic\"),\n \"page5-inn\": FieldAttributeMatcher(field_name=\"founders[]\", attr_name=\"person.inn\"),\n \"page5-birth_date__day\": FieldAttributeMatcher(field_name=\"founders[]\",\n attr_name=\"person.birthdate.day\"),\n \"page5-birth_date__month\": FieldAttributeMatcher(field_name=\"founders[]\",\n attr_name=\"person.birthdate.month\"),\n \"page5-birth_date__year\": FieldAttributeMatcher(field_name=\"founders[]\",\n attr_name=\"person.birthdate.year\"),\n \"page5-birth_place__line{{item}}\": MultilineFieldMatcher(field_name=\"founders[]\",\n attr_name=\"person.birthplace\"),\n \"page5-issuer__line{{item}}\": MultilineFieldMatcher(field_name=\"founders[]\",\n attr_name=\"person.passport.issue_depart\"),\n \"page5-ogrnip\": FieldAttributeMatcher(field_name=\"founders[]\", attr_name=\"person.ogrnip\"),\n \"page5-doc_type\": FieldAttributeMatcher(field_name=\"founders[]\",\n attr_name=\"person.passport.document_type\"),\n \"page5-doc_number\": ConcatFieldAttributeMatcher(field_name=\"founders[]\",\n attributes=[\"person.passport.series\",\n \"person.passport.number\"],\n adapter=\"InternalPassportAdapter\"),\n \"page5-issue_date__day\": FieldAttributeMatcher(field_name=\"founders[]\",\n attr_name=\"person.passport.issue_date.day\"),\n \"page5-issue_date__month\": FieldAttributeMatcher(field_name=\"founders[]\",\n attr_name=\"person.passport.issue_date.month\"),\n \"page5-issue_date__year\": FieldAttributeMatcher(field_name=\"founders[]\",\n attr_name=\"person.passport.issue_date.year\"),\n \"page5-subdivision_code__left\": FieldAttributeMatcher(field_name=\"founders[]\",\n attr_name=\"person.passport.depart_code\"),\n \"page5-subdivision_code__right\": FieldAttributeMatcher(field_name=\"founders[]\",\n attr_name=\"person.passport.depart_code\"),\n\n \"page6-postal_index\": FieldAttributeMatcher(field_name=\"founders[]\",\n attr_name=\"person.address.index\"),\n \"page6-subject_code\": FieldAttributeMatcher(field_name=\"founders[]\", attr_name=\"person.address.region\",\n adapter=\"RFRegionNumberAdapter\"),\n \"page6-district_type\": FieldAttributeMatcher(field_name=\"founders[]\",\n attr_name=\"person.address.district_type\",\n adapter=\"ShortDistrictTypeAdapter\"),\n \"page6-city_type\": FieldAttributeMatcher(field_name=\"founders[]\", attr_name=\"person.address.city_type\",\n adapter=\"ShortCityTypeAdapter\"),\n \"page6-city_name\": FieldAttributeMatcher(field_name=\"founders[]\",\n attr_name=\"person.address.city\"),\n \"page6-nas_punkt_type\": FieldAttributeMatcher(field_name=\"founders[]\",\n attr_name=\"person.address.village_type\",\n adapter=\"ShortVillageTypeAdapter\"),\n \"page6-street_type\": FieldAttributeMatcher(field_name=\"founders[]\", attr_name=\"person.address.street_type\",\n adapter=\"ShortStreetTypeAdapter\"),\n \"page6-building_type\": FieldAttributeMatcher(field_name=\"founders[]\",\n attr_name=\"person.address.house_type\"),\n \"page6-korpus_type\": FieldAttributeMatcher(field_name=\"founders[]\",\n attr_name=\"person.address.building_type\"),\n \"page6-building_number\": FieldAttributeMatcher(field_name=\"founders[]\",\n attr_name=\"person.address.house\"),\n \"page6-korpus_number\": FieldAttributeMatcher(field_name=\"founders[]\",\n attr_name=\"person.address.building\"),\n \"page6-flat_type\": FieldAttributeMatcher(field_name=\"founders[]\",\n attr_name=\"person.address.flat_type\"),\n \"page6-flat_number\": FieldAttributeMatcher(field_name=\"founders[]\",\n attr_name=\"person.address.flat\"),\n \"page6-living_country_code\": FieldAttributeMatcher(field_name=\"founders[]\",\n attr_name=\"person.living_country_code\"),\n\n \"page6-nominal_part_value__currency-maj\": FieldAttributeMatcher(field_name=\"founders[]\",\n attr_name=\"nominal_capital.currency_major\"),\n \"page6-nominal_part_value__currency-min\": FieldAttributeMatcher(field_name=\"founders[]\",\n attr_name=\"nominal_capital.currency_minor\"),\n \"page6-part_value__percent-maj\": FieldAttributeMatcher(field_name=\"founders[]\",\n attr_name=\"share.percent_major\"),\n \"page6-part_value__percent-min\": FieldAttributeMatcher(field_name=\"founders[]\",\n attr_name=\"share.percent_minor\"),\n \"page6-part_value__decimal-maj\": FieldAttributeMatcher(field_name=\"founders[]\",\n attr_name=\"share.decimal_major\"),\n \"page6-part_value__decimal-min\": FieldAttributeMatcher(field_name=\"founders[]\",\n attr_name=\"share.decimal_minor\"),\n \"page6-part_value__fraction-left\": FieldAttributeMatcher(field_name=\"founders[]\",\n attr_name=\"share.fraction_major\"),\n \"page6-part_value__fraction-right\": FieldAttributeMatcher(field_name=\"founders[]\",\n attr_name=\"share.fraction_minor\"),\n\n \"page6-district_name__line{{item}}\": MultilineFieldMatcher(field_name=\"founders[]\",\n attr_name=\"person.address.district\"),\n \"page6-nas_punkt_name__line{{item}}\": MultilineFieldMatcher(field_name=\"founders[]\",\n attr_name=\"person.address.village\"),\n \"page6-street_name__line{{item}}\": MultilineFieldMatcher(field_name=\"founders[]\",\n attr_name=\"person.address.street\"),\n \"page6-living_address__line{{item}}\": MultilineFieldMatcher(field_name=\"founders[]\",\n attr_name=\"person.living_address\"),\n\n \"page7-uchreditel_type\": FieldAttributeMatcher(field_name=\"founders[]\", attr_name=\"gov_founder_type\",\n adapter=\"GovernmentFounderTypeNumberAdapter\"),\n \"page7-uchreditel_subject_code\": FieldAttributeMatcher(field_name=\"founders[]\", attr_name=\"region\",\n adapter=\"RFRegionNumberAdapter\"),\n \"page7-uchreditel_munic_obraz_name__line{{item}}\": MultilineFieldMatcher(field_name=\"founders[]\",\n attr_name=\"name\"),\n \"page7-nominal_part_value__currency-maj\": FieldAttributeMatcher(field_name=\"founders[]\",\n attr_name=\"nominal_capital.currency_major\"),\n \"page7-nominal_part_value__currency-min\": FieldAttributeMatcher(field_name=\"founders[]\",\n attr_name=\"nominal_capital.currency_minor\"),\n \"page7-nominal_part_value__percent-maj\": FieldAttributeMatcher(field_name=\"founders[]\",\n attr_name=\"share.percent_major\"),\n \"page7-nominal_part_value__percent-min\": FieldAttributeMatcher(field_name=\"founders[]\",\n attr_name=\"share.percent_minor\"),\n \"page7-nominal_part_value__decimal-maj\": FieldAttributeMatcher(field_name=\"founders[]\",\n attr_name=\"share.decimal_major\"),\n \"page7-nominal_part_value__decimal-min\": FieldAttributeMatcher(field_name=\"founders[]\",\n attr_name=\"share.decimal_minor\"),\n \"page7-nominal_part_value__fraction-left\": FieldAttributeMatcher(field_name=\"founders[]\",\n attr_name=\"share.fraction_major\"),\n \"page7-nominal_part_value__fraction-right\": FieldAttributeMatcher(field_name=\"founders[]\",\n attr_name=\"share.fraction_minor\"),\n\n \"page8-ogrn\": FieldAttributeMatcher(field_name=\"founders[]\", attr_name=\"company.ogrn\"),\n \"page8-inn\": FieldAttributeMatcher(field_name=\"founders[]\", attr_name=\"company.inn\"),\n \"page8-full_name__line{{item}}\": MultilineFieldMatcher(field_name=\"founders[]\",\n attr_name=\"company.qualified_full_name\"),\n \"page8-surname\": FieldAttributeMatcher(field_name=\"founders[]\", attr_name=\"person.surname\"),\n \"page8-name\": FieldAttributeMatcher(field_name=\"founders[]\", attr_name=\"person.name\"),\n \"page8-patronymic\": FieldAttributeMatcher(field_name=\"founders[]\", attr_name=\"person.patronymic\"),\n \"page8-birth_date__day\": FieldAttributeMatcher(field_name=\"founders[]\",\n attr_name=\"person.birthdate.day\"),\n \"page8-birth_date__month\": FieldAttributeMatcher(field_name=\"founders[]\",\n attr_name=\"person.birthdate.month\"),\n \"page8-birth_date__year\": FieldAttributeMatcher(field_name=\"founders[]\",\n attr_name=\"person.birthdate.year\"),\n \"page8-birth_place__{{item}}\": MultilineFieldMatcher(field_name=\"founders[]\",\n attr_name=\"person.birthplace\"),\n \"page8-person_inn\": FieldAttributeMatcher(field_name=\"founders[]\", attr_name=\"person.inn\"),\n\n \"page9-doc_type_code\": FieldAttributeMatcher(field_name=\"founders[]\",\n attr_name=\"person.passport.document_type\"),\n \"page9-doc_number\": ConcatFieldAttributeMatcher(field_name=\"founders[]\",\n attributes=[\"person.passport.series\",\n \"person.passport.number\"],\n adapter=\"InternalPassportAdapter\"),\n \"page9-doc_issue_date__day\": FieldAttributeMatcher(field_name=\"founders[]\",\n attr_name=\"person.passport.issue_date.day\"),\n \"page9-doc_issue_date__month\": FieldAttributeMatcher(field_name=\"founders[]\",\n attr_name=\"person.passport.issue_date.month\"),\n \"page9-doc_issue_date__year\": FieldAttributeMatcher(field_name=\"founders[]\",\n attr_name=\"person.passport.issue_date.year\"),\n \"page9-doc_issuer__line{{item}}\": MultilineFieldMatcher(field_name=\"founders[]\",\n attr_name=\"person.passport.issue_depart\"),\n \"page9-doc_issuer_subdivision_code__left\": FieldAttributeMatcher(field_name=\"founders[]\",\n attr_name=\"person.passport.depart_code\"),\n \"page9-doc_issuer_subdivision_code__right\": FieldAttributeMatcher(field_name=\"founders[]\",\n attr_name=\"person.passport.depart_code\"),\n\n \"page9-postal_index\": FieldAttributeMatcher(field_name=\"founders[]\",\n attr_name=\"person.address.index\"),\n \"page9-subject_code\": FieldAttributeMatcher(field_name=\"founders[]\", attr_name=\"person.address.region\",\n adapter=\"RFRegionNumberAdapter\"),\n \"page9-district_type\": FieldAttributeMatcher(field_name=\"founders[]\",\n attr_name=\"person.address.district_type\",\n adapter=\"ShortDistrictTypeAdapter\"),\n \"page9-district_name__line{{item}}\": MultilineFieldMatcher(field_name=\"founders[]\",\n attr_name=\"person.address.district\"),\n \"page9-city_type\": FieldAttributeMatcher(field_name=\"founders[]\", attr_name=\"person.address.city_type\",\n adapter=\"ShortCityTypeAdapter\"),\n \"page9-street_type\": FieldAttributeMatcher(field_name=\"founders[]\", attr_name=\"person.address.street_type\",\n adapter=\"ShortStreetTypeAdapter\"),\n \"page9-street_name__line{{item}}\": MultilineFieldMatcher(field_name=\"founders[]\",\n attr_name=\"person.address.street\"),\n \"page9-house_type\": FieldAttributeMatcher(field_name=\"founders[]\",\n attr_name=\"person.address.house_type\"),\n \"page9-house_number\": FieldAttributeMatcher(field_name=\"founders[]\",\n attr_name=\"person.address.house\"),\n \"page9-corpus_type\": FieldAttributeMatcher(field_name=\"founders[]\",\n attr_name=\"person.address.building_type\"),\n \"page9-corpus_number\": FieldAttributeMatcher(field_name=\"founders[]\",\n attr_name=\"person.address.building\"),\n \"page9-flat_type\": FieldAttributeMatcher(field_name=\"founders[]\",\n attr_name=\"person.address.flat_type\"),\n \"page9-flat_number\": FieldAttributeMatcher(field_name=\"founders[]\",\n attr_name=\"person.address.flat\"),\n \"page9-nas_punkt_type\": FieldAttributeMatcher(field_name=\"founders[]\",\n attr_name=\"person.address.village_type\",\n adapter=\"ShortVillageTypeAdapter\"),\n \"page9-nas_punkt_name__line{{item}}\": MultilineFieldMatcher(field_name=\"founders[]\",\n attr_name=\"person.address.village\"),\n \"page9-city_name\": FieldAttributeMatcher(field_name=\"founders[]\",\n attr_name=\"person.address.city\"),\n \"page9-living_country_code\": FieldAttributeMatcher(field_name=\"founders[]\",\n attr_name=\"person.living_country_code\"),\n \"page9-living_address__line{{item}}\": MultilineFieldMatcher(field_name=\"founders[]\",\n attr_name=\"person.living_address\"),\n\n \"page10-invest_fond_name__line{{item}}\": MultilineFieldMatcher(field_name=\"uit[]\",\n attr_name=\"name\"),\n \"page10-invest_fond_uprav_company_ogrn\": FieldAttributeMatcher(field_name=\"uit[]\",\n attr_name=\"company.ogrn\"),\n \"page10-invest_fond_uprav_company_inn\": FieldAttributeMatcher(field_name=\"uit[]\",\n attr_name=\"company.inn\"),\n \"page10-invest_fond_uprav_company_full_name__line{{item}}\": MultilineFieldMatcher(field_name=\"uit[]\",\n attr_name=\"company.qualified_full_name\"),\n\n \"page10-nominal_value__currency_maj\": FieldAttributeMatcher(field_name=\"uit[]\",\n attr_name=\"nominal_capital.currency_major\"),\n \"page10-nominal_value__currency_min\": FieldAttributeMatcher(field_name=\"uit[]\",\n attr_name=\"nominal_capital.currency_minor\"),\n \"page10-nominal_value__percent_maj\": FieldAttributeMatcher(field_name=\"uit[]\",\n attr_name=\"share.percent_major\"),\n \"page10-nominal_value__percent_min\": FieldAttributeMatcher(field_name=\"uit[]\",\n attr_name=\"share.percent_minor\"),\n \"page10-nominal_value__decimal_maj\": FieldAttributeMatcher(field_name=\"uit[]\",\n attr_name=\"share.decimal_major\"),\n \"page10-nominal_value__decimal_min\": FieldAttributeMatcher(field_name=\"uit[]\",\n attr_name=\"share.decimal_minor\"),\n \"page10-nominal_value__fraction_maj\": FieldAttributeMatcher(field_name=\"uit[]\",\n attr_name=\"share.fraction_major\"),\n \"page10-nominal_value__fraction_min\": FieldAttributeMatcher(field_name=\"uit[]\",\n attr_name=\"share.fraction_minor\"),\n\n \"page11-surname\": FieldAttributeMatcher(field_name=\"general_manager\", attr_name=\"surname\"),\n \"page11-name\": FieldAttributeMatcher(field_name=\"general_manager\", attr_name=\"name\"),\n \"page11-patronymic\": FieldAttributeMatcher(field_name=\"general_manager\", attr_name=\"patronymic\"),\n \"page11-inn\": FieldAttributeMatcher(field_name=\"general_manager\", attr_name=\"inn\"),\n \"page11-birth_date__day\": FieldAttributeMatcher(field_name=\"general_manager\",\n attr_name=\"birthdate.day\"),\n \"page11-birth_date__month\": FieldAttributeMatcher(field_name=\"general_manager\",\n attr_name=\"birthdate.month\"),\n \"page11-birth_date__year\": FieldAttributeMatcher(field_name=\"general_manager\",\n attr_name=\"birthdate.year\"),\n \"page11-birth_place__line{{item}}\": MultilineFieldMatcher(field_name=\"general_manager\",\n attr_name=\"birthplace\"),\n \"page11-doc_issuer__line{{item}}\": MultilineFieldMatcher(field_name=\"general_manager\",\n attr_name=\"passport.issue_depart\"),\n \"page11-doc_type_code\": FieldAttributeMatcher(field_name=\"general_manager\",\n attr_name=\"passport.document_type\"),\n \"page11-doc_number\": ConcatFieldAttributeMatcher(field_name=\"general_manager\",\n attributes=[\"passport.series\",\n \"passport.number\"],\n adapter=\"InternalPassportAdapter\"),\n \"page11-doc_issue_date__day\": FieldAttributeMatcher(field_name=\"general_manager\",\n attr_name=\"passport.issue_date.day\"),\n \"page11-doc_issue_date__month\": FieldAttributeMatcher(field_name=\"general_manager\",\n attr_name=\"passport.issue_date.month\"),\n \"page11-doc_issue_date__year\": FieldAttributeMatcher(field_name=\"general_manager\",\n attr_name=\"passport.issue_date.year\"),\n \"page11-doc_issuer_subdivision_code__left\": FieldAttributeMatcher(field_name=\"general_manager\",\n attr_name=\"passport.depart_code\"),\n \"page11-doc_issuer_subdivision_code__right\": FieldAttributeMatcher(field_name=\"general_manager\",\n attr_name=\"passport.depart_code\"),\n \"page11-title__line{{item}}\": MultilineFieldMatcher(field_name=\"general_manager_caption\"),\n\n \"page12-postal_index\": FieldAttributeMatcher(field_name=\"general_manager\",\n attr_name=\"address.index\"),\n \"page12-subject_code\": FieldAttributeMatcher(field_name=\"general_manager\", attr_name=\"address.region\",\n adapter=\"RFRegionNumberAdapter\"),\n \"page12-district_type\": FieldAttributeMatcher(field_name=\"general_manager\",\n attr_name=\"address.district_type\",\n adapter=\"ShortDistrictTypeAdapter\"),\n \"page12-city_type\": FieldAttributeMatcher(field_name=\"general_manager\", attr_name=\"address.city_type\",\n adapter=\"ShortCityTypeAdapter\"),\n \"page12-city_name\": FieldAttributeMatcher(field_name=\"general_manager\", attr_name=\"address.city\"),\n \"page12-nas_punkt_type\": FieldAttributeMatcher(field_name=\"general_manager\",\n attr_name=\"address.village_type\",\n adapter=\"ShortVillageTypeAdapter\"),\n \"page12-street_type\": FieldAttributeMatcher(field_name=\"general_manager\", attr_name=\"address.street_type\",\n adapter=\"ShortStreetTypeAdapter\"),\n \"page12-house_type\": FieldAttributeMatcher(field_name=\"general_manager\",\n attr_name=\"address.house_type\"),\n \"page12-corpus_type\": FieldAttributeMatcher(field_name=\"general_manager\",\n attr_name=\"address.building_type\"),\n \"page12-house_number\": FieldAttributeMatcher(field_name=\"general_manager\",\n attr_name=\"address.house\"),\n \"page12-corpus_number\": FieldAttributeMatcher(field_name=\"general_manager\",\n attr_name=\"address.building\"),\n \"page12-flat_type\": FieldAttributeMatcher(field_name=\"general_manager\",\n attr_name=\"address.flat_type\"),\n \"page12-flat_number\": FieldAttributeMatcher(field_name=\"general_manager\",\n attr_name=\"address.flat\"),\n \"page12-living_country_code\": ConstantMatcher(value=\" \"),\n \"page12-phone_number\": FieldAttributeMatcher(field_name=\"general_manager\",\n attr_name=\"phone.normalised\"),\n \"page12-district_name__line{{item}}\": MultilineFieldMatcher(field_name=\"general_manager\",\n attr_name=\"address.district\"),\n \"page12-nas_punkt_name__line{{item}}\": MultilineFieldMatcher(field_name=\"general_manager\",\n attr_name=\"address.village\"),\n \"page12-street_name__line{{item}}\": MultilineFieldMatcher(field_name=\"general_manager\",\n attr_name=\"address.street\"),\n \"page12-living_address__line{{item}}\": MultilineFieldMatcher(field_name=\"general_manager\",\n attr_name=\"living_address\"),\n\n \"%page_Je-set-1\": FieldSetMatcher(fields={ # management company - russian company\n \"page13-ogrn\": FieldAttributeMatcher(\n field_name=\"management_company\",\n attr_name=\"company.ogrn\"),\n \"page13-inn\": FieldAttributeMatcher(\n field_name=\"management_company\",\n attr_name=\"company.inn\"),\n \"page13-full_name__line{{item}}\": MultilineFieldMatcher(\n field_name=\"management_company\",\n attr_name=\"company.qualified_full_name\"),\n\n \"page14-postal_index\": FieldAttributeMatcher(\n field_name=\"management_company\",\n attr_name=\"company.address.index\"),\n \"page14-subject_code\": FieldAttributeMatcher(\n field_name=\"management_company\",\n attr_name=\"company.address.region\",\n adapter=\"RFRegionNumberAdapter\"),\n \"page14-district_type\": FieldAttributeMatcher(\n field_name=\"management_company\",\n attr_name=\"company.address.district_type\",\n adapter=\"ShortDistrictTypeAdapter\"),\n \"page14-city_type\": FieldAttributeMatcher(\n field_name=\"management_company\",\n attr_name=\"company.address.city_type\",\n adapter=\"ShortCityTypeAdapter\"),\n \"page14-city_name\": FieldAttributeMatcher(\n field_name=\"management_company\",\n attr_name=\"company.address.city\"),\n \"page14-nas_punkt_type\": FieldAttributeMatcher(\n field_name=\"management_company\",\n attr_name=\"company.address.village_type\",\n adapter=\"ShortVillageTypeAdapter\"),\n \"page14-street_type\": FieldAttributeMatcher(\n field_name=\"management_company\",\n attr_name=\"company.address.street_type\",\n adapter=\"ShortStreetTypeAdapter\"),\n \"page14-house_type\": FieldAttributeMatcher(\n field_name=\"management_company\",\n attr_name=\"company.address.house_type\"),\n \"page14-corpus_type\": FieldAttributeMatcher(\n field_name=\"management_company\",\n attr_name=\"company.address.building_type\"),\n \"page14-house_number\": FieldAttributeMatcher(\n field_name=\"management_company\",\n attr_name=\"company.address.house\"),\n \"page14-corpus_number\": FieldAttributeMatcher(\n field_name=\"management_company\",\n attr_name=\"company.address.building\"),\n \"page14-flat_type\": FieldAttributeMatcher(\n field_name=\"management_company\",\n attr_name=\"company.address.flat_type\"),\n \"page14-flat_number\": FieldAttributeMatcher(\n field_name=\"management_company\",\n attr_name=\"company.address.flat\"),\n \"page14-district_name__line{{item}}\": MultilineFieldMatcher(\n field_name=\"management_company\",\n attr_name=\"company.address.district\"),\n \"page14-nas_punkt_name__line{{item}}\": MultilineFieldMatcher(\n field_name=\"management_company\",\n attr_name=\"company.address.village\"),\n \"page14-street_name__line{{item}}\": MultilineFieldMatcher(\n field_name=\"management_company\",\n attr_name=\"company.address.street\"),\n \"page14-phone_number\": FieldAttributeMatcher(\n field_name=\"management_company\",\n attr_name=\"company.phone.normalised\"),\n\n }),\n \"%page_Je-set-2\": FieldSetMatcher(\n fields={ # management company - foreign company [ + russian branch or agency]\n \"page13-inn\": FieldAttributeMatcher(field_name=\"management_company\",\n attr_name=\"foreign_company.inn\"),\n \"page13-full_name__line{{item}}\": MultilineFieldMatcher(field_name=\"management_company\",\n attr_name=\"foreign_company.qualified_full_name\"),\n \"page13-country_code\": FieldAttributeMatcher(field_name=\"management_company\",\n attr_name=\"foreign_company.country_code\"),\n \"page13-reg_date__day\": FieldAttributeMatcher(field_name=\"management_company\",\n attr_name=\"foreign_company.registration_date.day\"),\n \"page13-reg_date__month\": FieldAttributeMatcher(field_name=\"management_company\",\n attr_name=\"foreign_company.registration_date.month\"),\n \"page13-reg_date__year\": FieldAttributeMatcher(field_name=\"management_company\",\n attr_name=\"foreign_company.registration_date.year\"),\n \"page13-reg_number\": FieldAttributeMatcher(field_name=\"management_company\",\n attr_name=\"foreign_company.registration_number\"),\n \"page13-reg_organ_name__line{{item}}\": MultilineFieldMatcher(field_name=\"management_company\",\n attr_name=\"foreign_company.registration_depart\"),\n \"page13-reg_organ_address__line{{item}}\": MultilineFieldMatcher(\n field_name=\"management_company\", attr_name=\"foreign_company.generic_address\"),\n\n \"page13-predstav_full_name__line{{item}}\": MultilineFieldMatcher(\n field_name=\"management_company\",\n attr_name=\"russian_branch_or_agency.qualified_full_name\"),\n\n \"page14-postal_index\": FieldAttributeMatcher(field_name=\"management_company\",\n attr_name=\"russian_branch_or_agency.address.index\"),\n \"page14-subject_code\": FieldAttributeMatcher(field_name=\"management_company\",\n attr_name=\"russian_branch_or_agency.address.region\",\n adapter=\"RFRegionNumberAdapter\"),\n \"page14-district_type\": FieldAttributeMatcher(field_name=\"management_company\",\n attr_name=\"russian_branch_or_agency.address.district_type\",\n adapter=\"ShortDistrictTypeAdapter\"),\n \"page14-city_type\": FieldAttributeMatcher(field_name=\"management_company\",\n attr_name=\"russian_branch_or_agency.address.city_type\",\n adapter=\"ShortCityTypeAdapter\"),\n \"page14-city_name\": FieldAttributeMatcher(field_name=\"management_company\",\n attr_name=\"russian_branch_or_agency.address.city\"),\n \"page14-nas_punkt_type\": FieldAttributeMatcher(field_name=\"management_company\",\n attr_name=\"russian_branch_or_agency.address.village_type\",\n adapter=\"ShortVillageTypeAdapter\"),\n \"page14-street_type\": FieldAttributeMatcher(field_name=\"management_company\",\n attr_name=\"russian_branch_or_agency.address.street_type\",\n adapter=\"ShortStreetTypeAdapter\"),\n \"page14-house_type\": FieldAttributeMatcher(field_name=\"management_company\",\n attr_name=\"russian_branch_or_agency.address.house_type\"),\n \"page14-corpus_type\": FieldAttributeMatcher(field_name=\"management_company\",\n attr_name=\"russian_branch_or_agency.address.building_type\"),\n \"page14-house_number\": FieldAttributeMatcher(field_name=\"management_company\",\n attr_name=\"russian_branch_or_agency.address.house\"),\n \"page14-corpus_number\": FieldAttributeMatcher(field_name=\"management_company\",\n attr_name=\"russian_branch_or_agency.address.building\"),\n \"page14-flat_type\": FieldAttributeMatcher(field_name=\"management_company\",\n attr_name=\"russian_branch_or_agency.address.flat_type\"),\n \"page14-flat_number\": FieldAttributeMatcher(field_name=\"management_company\",\n attr_name=\"russian_branch_or_agency.address.flat\"),\n \"page14-district_name__line{{item}}\": MultilineFieldMatcher(field_name=\"management_company\",\n attr_name=\"russian_branch_or_agency.address.district\"),\n \"page14-nas_punkt_name__line{{item}}\": MultilineFieldMatcher(field_name=\"management_company\",\n attr_name=\"russian_branch_or_agency.address.village\"),\n \"page14-street_name__line{{item}}\": MultilineFieldMatcher(field_name=\"management_company\",\n attr_name=\"russian_branch_or_agency.address.street\"),\n \"page14-phone_number\": FieldAttributeMatcher(field_name=\"management_company\",\n attr_name=\"russian_branch_or_agency.phone.normalised\"),\n\n }), # management company - foreign company [ + russian person as agent]\n \"%page_Je-set-3\": FieldSetMatcher(fields={\n \"page13-inn\": FieldAttributeMatcher(field_name=\"management_company\",\n attr_name=\"foreign_company.inn\"),\n \"page13-full_name__line{{item}}\": MultilineFieldMatcher(field_name=\"management_company\",\n attr_name=\"foreign_company.qualified_full_name\"),\n \"page13-country_code\": FieldAttributeMatcher(field_name=\"management_company\",\n attr_name=\"foreign_company.country_code\"),\n \"page13-reg_date__day\": FieldAttributeMatcher(field_name=\"management_company\",\n attr_name=\"foreign_company.registration_date.day\"),\n \"page13-reg_date__month\": FieldAttributeMatcher(field_name=\"management_company\",\n attr_name=\"foreign_company.registration_date.month\"),\n \"page13-reg_date__year\": FieldAttributeMatcher(field_name=\"management_company\",\n attr_name=\"foreign_company.registration_date.year\"),\n \"page13-reg_number\": FieldAttributeMatcher(field_name=\"management_company\",\n attr_name=\"foreign_company.registration_number\"),\n \"page13-reg_organ_name__line{{item}}\": MultilineFieldMatcher(field_name=\"management_company\",\n attr_name=\"foreign_company.registration_depart\"),\n \"page13-reg_organ_address__line{{item}}\": MultilineFieldMatcher(field_name=\"management_company\",\n attr_name=\"foreign_company.generic_address\"),\n\n \"page14-surname\": FieldAttributeMatcher(field_name=\"management_company\",\n attr_name=\"russian_agent.surname\"),\n \"page14-name\": FieldAttributeMatcher(field_name=\"management_company\",\n attr_name=\"russian_agent.name\"),\n \"page14-patronymic\": FieldAttributeMatcher(field_name=\"management_company\",\n attr_name=\"russian_agent.patronymic\"),\n \"page14-inn\": FieldAttributeMatcher(field_name=\"management_company\",\n attr_name=\"russian_agent.inn\"),\n \"page14-birth_date__day\": FieldAttributeMatcher(field_name=\"management_company\",\n attr_name=\"russian_agent.birthdate.day\"),\n \"page14-birth_date__month\": FieldAttributeMatcher(field_name=\"management_company\",\n attr_name=\"russian_agent.birthdate.month\"),\n \"page14-birth_date__year\": FieldAttributeMatcher(field_name=\"management_company\",\n attr_name=\"russian_agent.birthdate.year\"),\n \"page14-birth_place__line{{item}}\": MultilineFieldMatcher(field_name=\"management_company\",\n attr_name=\"russian_agent.birthplace\"),\n\n \"page15-doc_issuer__line{{item}}\": MultilineFieldMatcher(field_name=\"management_company\",\n attr_name=\"russian_agent.passport.issue_depart\"),\n \"page15-doc_type_code\": FieldAttributeMatcher(field_name=\"management_company\",\n attr_name=\"russian_agent.passport.document_type\"),\n \"page15-doc_number\": ConcatFieldAttributeMatcher(field_name=\"management_company\",\n attributes=[\"russian_agent.passport.series\",\n \"russian_agent.passport.number\"],\n adapter=\"InternalPassportAdapter\"),\n \"page15-doc_issue_date__day\": FieldAttributeMatcher(field_name=\"management_company\",\n attr_name=\"russian_agent.passport.issue_date.day\"),\n \"page15-doc_issue_date__month\": FieldAttributeMatcher(field_name=\"management_company\",\n attr_name=\"russian_agent.passport.issue_date.month\"),\n \"page15-doc_issue_date__year\": FieldAttributeMatcher(field_name=\"management_company\",\n attr_name=\"russian_agent.passport.issue_date.year\"),\n \"page15-doc_issuer_subdivision_code__left\": FieldAttributeMatcher(field_name=\"management_company\",\n attr_name=\"russian_agent.passport.depart_code\"),\n \"page15-doc_issuer_subdivision_code__right\": FieldAttributeMatcher(field_name=\"management_company\",\n attr_name=\"russian_agent.passport.depart_code\"),\n \"page15-postal_index\": FieldAttributeMatcher(field_name=\"management_company\",\n attr_name=\"russian_agent.address.index\"),\n \"page15-subject_code\": FieldAttributeMatcher(field_name=\"management_company\",\n attr_name=\"russian_agent.address.region\",\n adapter=\"RFRegionNumberAdapter\"),\n \"page15-district_type\": FieldAttributeMatcher(field_name=\"management_company\",\n attr_name=\"russian_agent.address.district_type\",\n adapter=\"ShortDistrictTypeAdapter\"),\n \"page15-city_type\": FieldAttributeMatcher(field_name=\"management_company\",\n attr_name=\"russian_agent.address.city_type\",\n adapter=\"ShortCityTypeAdapter\"),\n \"page15-city_name\": FieldAttributeMatcher(field_name=\"management_company\",\n attr_name=\"russian_agent.address.city\"),\n \"page15-nas_punkt_type\": FieldAttributeMatcher(field_name=\"management_company\",\n attr_name=\"russian_agent.address.village_type\",\n adapter=\"ShortVillageTypeAdapter\"),\n \"page15-street_type\": FieldAttributeMatcher(field_name=\"management_company\",\n attr_name=\"russian_agent.address.street_type\",\n adapter=\"ShortStreetTypeAdapter\"),\n \"page15-house_type\": FieldAttributeMatcher(field_name=\"management_company\",\n attr_name=\"russian_agent.address.house_type\"),\n \"page15-corpus_type\": FieldAttributeMatcher(field_name=\"management_company\",\n attr_name=\"russian_agent.address.building_type\"),\n \"page15-house_number\": FieldAttributeMatcher(field_name=\"management_company\",\n attr_name=\"russian_agent.address.house\"),\n \"page15-corpus_number\": FieldAttributeMatcher(field_name=\"management_company\",\n attr_name=\"russian_agent.address.building\"),\n \"page15-flat_type\": FieldAttributeMatcher(field_name=\"management_company\",\n attr_name=\"russian_agent.address.flat_type\"),\n \"page15-flat_number\": FieldAttributeMatcher(field_name=\"management_company\",\n attr_name=\"russian_agent.address.flat\"),\n \"page15-living_country_code\": FieldAttributeMatcher(field_name=\"management_company\",\n attr_name=\"russian_agent.living_country_code\"),\n \"page15-phone_number\": FieldAttributeMatcher(field_name=\"management_company\",\n attr_name=\"russian_agent.phone.normalised\"),\n \"page15-district_name__line{{item}}\": MultilineFieldMatcher(field_name=\"management_company\",\n attr_name=\"russian_agent.address.district\"),\n \"page15-nas_punkt_name__line{{item}}\": MultilineFieldMatcher(field_name=\"management_company\",\n attr_name=\"russian_agent.address.village\"),\n \"page15-street_name__line{{item}}\": MultilineFieldMatcher(field_name=\"management_company\",\n attr_name=\"russian_agent.address.street\"),\n \"page15-living_address__line{{item}}\": MultilineFieldMatcher(field_name=\"management_company\",\n attr_name=\"russian_agent.living_address\"),\n }),\n\n \"page16-ogrnip\": FieldAttributeMatcher(field_name=\"manager\", attr_name=\"ogrnip\"),\n \"page16-surname\": FieldAttributeMatcher(field_name=\"manager\", attr_name=\"surname\"),\n \"page16-name\": FieldAttributeMatcher(field_name=\"manager\", attr_name=\"name\"),\n \"page16-patronymic\": FieldAttributeMatcher(field_name=\"manager\", attr_name=\"patronymic\"),\n \"page16-inn\": FieldAttributeMatcher(field_name=\"manager\", attr_name=\"inn\"),\n \"page16-birth_date__day\": FieldAttributeMatcher(field_name=\"manager\", attr_name=\"birthdate.day\"),\n \"page16-birth_date__month\": FieldAttributeMatcher(field_name=\"manager\",\n attr_name=\"birthdate.month\"),\n \"page16-birth_date__year\": FieldAttributeMatcher(field_name=\"manager\",\n attr_name=\"birthdate.year\"),\n \"page16-birth_place__line{{item}}\": MultilineFieldMatcher(field_name=\"manager\",\n attr_name=\"birthplace\"),\n \"page16-doc_issuer__line{{item}}\": MultilineFieldMatcher(field_name=\"manager\",\n attr_name=\"passport.issue_depart\"),\n \"page16-doc_type_code\": FieldAttributeMatcher(field_name=\"manager\",\n attr_name=\"passport.document_type\"),\n \"page16-doc_type_number\": ConcatFieldAttributeMatcher(field_name=\"manager\", attributes=[\"passport.series\",\n \"passport.number\"],\n adapter=\"InternalPassportAdapter\"),\n \"page16-doc_issue_date__day\": FieldAttributeMatcher(field_name=\"manager\",\n attr_name=\"passport.issue_date.day\"),\n \"page16-doc_issue_date__month\": FieldAttributeMatcher(field_name=\"manager\",\n attr_name=\"passport.issue_date.month\"),\n \"page16-doc_issue_date__year\": FieldAttributeMatcher(field_name=\"manager\",\n attr_name=\"passport.issue_date.year\"),\n \"page16-doc_issuer_subdivision_code__left\": FieldAttributeMatcher(field_name=\"manager\",\n attr_name=\"passport.depart_code\"),\n \"page16-doc_issuer_subdivision_code__right\": FieldAttributeMatcher(field_name=\"manager\",\n attr_name=\"passport.depart_code\"),\n\n \"page17-postal_index\": FieldAttributeMatcher(field_name=\"manager\", attr_name=\"address.index\"),\n \"page17-subject_code\": FieldAttributeMatcher(field_name=\"manager\", attr_name=\"address.region\",\n adapter=\"RFRegionNumberAdapter\"),\n \"page17-district_type\": FieldAttributeMatcher(field_name=\"manager\", attr_name=\"address.district_type\",\n adapter=\"ShortDistrictTypeAdapter\"),\n \"page17-city_type\": FieldAttributeMatcher(field_name=\"manager\", attr_name=\"address.city_type\",\n adapter=\"ShortCityTypeAdapter\"),\n \"page17-city_name\": FieldAttributeMatcher(field_name=\"manager\", attr_name=\"address.city\"),\n \"page17-nas_punkt_type\": FieldAttributeMatcher(field_name=\"manager\", attr_name=\"address.village_type\",\n adapter=\"ShortVillageTypeAdapter\"),\n \"page17-street_type\": FieldAttributeMatcher(field_name=\"manager\", attr_name=\"address.street_type\",\n adapter=\"ShortStreetTypeAdapter\"),\n \"page17-house_type\": FieldAttributeMatcher(field_name=\"manager\", attr_name=\"address.house_type\"),\n \"page17-corpus_type\": FieldAttributeMatcher(field_name=\"manager\",\n attr_name=\"address.building_type\"),\n \"page17-house_number\": FieldAttributeMatcher(field_name=\"manager\", attr_name=\"address.house\"),\n \"page17-corpus_number\": FieldAttributeMatcher(field_name=\"manager\", attr_name=\"address.building\"),\n \"page17-flat_type\": FieldAttributeMatcher(field_name=\"manager\", attr_name=\"address.flat_type\"),\n \"page17-flat_number\": FieldAttributeMatcher(field_name=\"manager\", attr_name=\"address.flat\"),\n \"page17-living_country_code\": FieldAttributeMatcher(field_name=\"manager\",\n attr_name=\"living_country_code\"),\n \"page17-phone_number\": FieldAttributeMatcher(field_name=\"manager\", attr_name=\"phone.normalised\"),\n \"page17-district_name__line{{item}}\": MultilineFieldMatcher(field_name=\"manager\",\n attr_name=\"address.district\"),\n \"page17-nas_punkt_name__line{{item}}\": MultilineFieldMatcher(field_name=\"manager\",\n attr_name=\"address.village\"),\n \"page17-street_name__line{{item}}\": MultilineFieldMatcher(field_name=\"manager\",\n attr_name=\"address.street\"),\n\n \"page18-main_job_code__part1\": SimpleMatcher(field_name=\"job_main_code\"),\n \"page18-main_job_code__part2\": SimpleMatcher(field_name=\"job_main_code\"),\n \"page18-main_job_code__part3\": SimpleMatcher(field_name=\"job_main_code\"),\n \"page18-job_code#{{item}}__part1\": ArrayAttributeMatcher(field_name=\"job_code_array\",\n sorted=\"true\"),\n \"page18-job_code#{{item}}__part2\": ArrayAttributeMatcher(field_name=\"job_code_array\",\n sorted=\"true\"),\n \"page18-job_code#{{item}}__part3\": ArrayAttributeMatcher(field_name=\"job_code_array\",\n sorted=\"true\"),\n\n \"page19-ogrn\": FieldAttributeMatcher(field_name=\"holder_share\", attr_name=\"company.ogrn\"),\n \"page19-inn\": FieldAttributeMatcher(field_name=\"holder_share\", attr_name=\"company.inn\"),\n \"page19-full_name__line{{item}}\": MultilineFieldMatcher(field_name=\"holder_share\",\n attr_name=\"company.qualified_full_name\"),\n\n \"page20-ogrnip\": FieldAttributeMatcher(field_name=\"farm_companies[]\", attr_name=\"ogrnip\"),\n \"page20-surname\": FieldAttributeMatcher(field_name=\"farm_companies[]\",\n attr_name=\"person.surname\"),\n \"page20-name\": FieldAttributeMatcher(field_name=\"farm_companies[]\", attr_name=\"person.name\"),\n \"page20-patronymic\": FieldAttributeMatcher(field_name=\"farm_companies[]\",\n attr_name=\"person.patronymic\"),\n \"page20-inn\": FieldAttributeMatcher(field_name=\"farm_companies[]\", attr_name=\"person.inn\"),\n\n # page21-organization_full_name\n # page21-title\n # page21-fio\n\n \"%page_N-set-1\": FieldSetMatcher(fields={\n \"page22-zayavitel_type\": FieldAttributeMatcher(field_name=\"founders[]\", attr_name=\"founder_type\",\n adapter=\"FounderTypeNumberAdapter\"),\n \"page24-document_delivery_type\": FieldAttributeMatcher(field_name=\"founders[]\",\n attr_name=\"documents_recipient_type\",\n adapter=\"DocumentDeliveryNumberAdapter\"),\n }),\n \"%page_N-set-2\": FieldSetMatcher(fields={\n \"page22-zayavitel_type\": FieldAttributeMatcher(field_name=\"founders[]\", attr_name=\"founder_type\",\n adapter=\"FounderTypeNumberAdapter\"),\n\n \"page22-zayavitel_surname\": FieldAttributeMatcher(field_name=\"founders[]\",\n attr_name=\"person.surname\"),\n \"page22-zayavitel_name\": FieldAttributeMatcher(field_name=\"founders[]\",\n attr_name=\"person.name\"),\n \"page22-zayavitel_patronymic\": FieldAttributeMatcher(field_name=\"founders[]\",\n attr_name=\"person.patronymic\"),\n\n \"page24-document_delivery_type\": FieldAttributeMatcher(field_name=\"founders[]\",\n attr_name=\"documents_recipient_type\",\n adapter=\"DocumentDeliveryNumberAdapter\"),\n }),\n \"%page_N-set-2dup\": FieldSetMatcher(fields={\n \"page22-zayavitel_type\": FieldAttributeMatcher(field_name=\"founders[]\", attr_name=\"founder_type\",\n adapter=\"FounderTypeNumberAdapter\"),\n\n \"page22-zayavitel_surname\": FieldAttributeMatcher(field_name=\"founders[]\",\n attr_name=\"person.surname\"),\n \"page22-zayavitel_name\": FieldAttributeMatcher(field_name=\"founders[]\",\n attr_name=\"person.name\"),\n \"page22-zayavitel_patronymic\": FieldAttributeMatcher(field_name=\"founders[]\",\n attr_name=\"person.patronymic\"),\n\n \"page22-zayavitel_birth_date__day\": FieldAttributeMatcher(field_name=\"founders[]\",\n attr_name=\"person.birthdate.day\"),\n \"page22-zayavitel_birth_date__month\": FieldAttributeMatcher(field_name=\"founders[]\",\n attr_name=\"person.birthdate.month\"),\n \"page22-zayavitel_birth_date__year\": FieldAttributeMatcher(field_name=\"founders[]\",\n attr_name=\"person.birthdate.year\"),\n \"page22-zayavitel_birth_place__line{{item}}\": MultilineFieldMatcher(field_name=\"founders[]\",\n attr_name=\"person.birthplace\"),\n\n \"page24-document_delivery_type\": FieldAttributeMatcher(field_name=\"founders[]\",\n attr_name=\"documents_recipient_type\",\n adapter=\"DocumentDeliveryNumberAdapter\"),\n }),\n \"%page_N-set-3\": FieldSetMatcher(fields={\n \"page22-zayavitel_type\": FieldAttributeMatcher(field_name=\"founders[]\", attr_name=\"founder_type\",\n adapter=\"FounderTypeNumberAdapter\"),\n\n \"page22-ogrn\": FieldAttributeMatcher(field_name=\"founders[]\", attr_name=\"company.ogrn\"),\n \"page22-uchreditel_inn\": FieldAttributeMatcher(field_name=\"founders[]\",\n attr_name=\"company.inn\"),\n \"page22-uchreditel_full_name__line{{item}}\": MultilineFieldMatcher(field_name=\"founders[]\",\n attr_name=\"company.qualified_full_name\"),\n\n \"page22-zayavitel_surname\": FieldAttributeMatcher(field_name=\"founders[]\",\n attr_name=\"company.general_manager.surname\"),\n \"page22-zayavitel_name\": FieldAttributeMatcher(field_name=\"founders[]\",\n attr_name=\"company.general_manager.name\"),\n \"page22-zayavitel_patronymic\": FieldAttributeMatcher(field_name=\"founders[]\",\n attr_name=\"company.general_manager.patronymic\"),\n \"page22-zayavitel_inn\": FieldAttributeMatcher(field_name=\"founders[]\",\n attr_name=\"company.general_manager.inn\"),\n \"page22-zayavitel_birth_date__day\": FieldAttributeMatcher(field_name=\"founders[]\",\n attr_name=\"company.general_manager.birthdate.day\"),\n \"page22-zayavitel_birth_date__month\": FieldAttributeMatcher(field_name=\"founders[]\",\n attr_name=\"company.general_manager.birthdate.month\"),\n \"page22-zayavitel_birth_date__year\": FieldAttributeMatcher(field_name=\"founders[]\",\n attr_name=\"company.general_manager.birthdate.year\"),\n \"page22-zayavitel_birth_place__line{{item}}\": MultilineFieldMatcher(field_name=\"founders[]\",\n attr_name=\"company.general_manager.birthplace\"),\n\n \"page23-issuer__line{{item}}\": MultilineFieldMatcher(field_name=\"founders[]\",\n attr_name=\"company.general_manager.passport.issue_depart\"),\n \"page23-doc_type\": FieldAttributeMatcher(field_name=\"founders[]\",\n attr_name=\"company.general_manager.passport.document_type\"),\n \"page23-doc_number\": ConcatFieldAttributeMatcher(field_name=\"founders[]\",\n attributes=[\"company.general_manager.passport.series\",\n \"company.general_manager.passport.number\"],\n adapter=\"InternalPassportAdapter\"),\n \"page23-doc_issue_date__day\": FieldAttributeMatcher(field_name=\"founders[]\",\n attr_name=\"company.general_manager.passport.issue_date.day\"),\n \"page23-doc_issue_date__month\": FieldAttributeMatcher(field_name=\"founders[]\",\n attr_name=\"company.general_manager.passport.issue_date.month\"),\n \"page23-doc_issue_date__year\": FieldAttributeMatcher(field_name=\"founders[]\",\n attr_name=\"company.general_manager.passport.issue_date.year\"),\n \"page23-issuer_subdivision_code__left\": FieldAttributeMatcher(field_name=\"founders[]\",\n attr_name=\"company.general_manager.passport.depart_code\"),\n \"page23-issuer_subdivision_code__right\": FieldAttributeMatcher(field_name=\"founders[]\",\n attr_name=\"company.general_manager.passport.depart_code\"),\n \"page23-postal_index\": FieldAttributeMatcher(field_name=\"founders[]\",\n attr_name=\"company.general_manager.address.index\"),\n \"page23-address_subject_code\": FieldAttributeMatcher(field_name=\"founders[]\",\n attr_name=\"company.general_manager.address.region\",\n adapter=\"RFRegionNumberAdapter\"),\n \"page23-address_district_type\": FieldAttributeMatcher(field_name=\"founders[]\",\n attr_name=\"company.general_manager.address.district_type\",\n adapter=\"ShortDistrictTypeAdapter\"),\n \"page23-address_city_type\": FieldAttributeMatcher(field_name=\"founders[]\",\n attr_name=\"company.general_manager.address.city_type\",\n adapter=\"ShortCityTypeAdapter\"),\n \"page23-address_city_name\": FieldAttributeMatcher(field_name=\"founders[]\",\n attr_name=\"company.general_manager.address.city\"),\n \"page23-address_nas_punkt_type\": FieldAttributeMatcher(field_name=\"founders[]\",\n attr_name=\"company.general_manager.address.village_type\",\n adapter=\"ShortVillageTypeAdapter\"),\n \"page23-address_street_type\": FieldAttributeMatcher(field_name=\"founders[]\",\n attr_name=\"company.general_manager.address.street_type\",\n adapter=\"ShortStreetTypeAdapter\"),\n \"page23-address_house_type\": FieldAttributeMatcher(field_name=\"founders[]\",\n attr_name=\"company.general_manager.address.house_type\"),\n \"page23-address_corpus_type\": FieldAttributeMatcher(field_name=\"founders[]\",\n attr_name=\"company.general_manager.address.building_type\"),\n \"page23-address_house_number\": FieldAttributeMatcher(field_name=\"founders[]\",\n attr_name=\"company.general_manager.address.house\"),\n \"page23-address_corpus_number\": FieldAttributeMatcher(field_name=\"founders[]\",\n attr_name=\"company.general_manager.address.building\"),\n \"page23-address_flat_type\": FieldAttributeMatcher(field_name=\"founders[]\",\n attr_name=\"company.general_manager.address.flat_type\"),\n \"page23-address_flat_number\": FieldAttributeMatcher(field_name=\"founders[]\",\n attr_name=\"company.general_manager.address.flat\"),\n \"page23-living_country_code\": FieldAttributeMatcher(field_name=\"founders[]\",\n attr_name=\"company.general_manager.living_country_code\"),\n \"page23-phone_number\": FieldAttributeMatcher(field_name=\"founders[]\",\n attr_name=\"company.general_manager.phone.normalised\"),\n \"page23-email\": FieldAttributeMatcher(field_name=\"founders[]\",\n attr_name=\"company.general_manager.email\"),\n \"page23-address_district_name__line{{item}}\": MultilineFieldMatcher(field_name=\"founders[]\",\n attr_name=\"company.general_manager.address.district\"),\n \"page23-address_nas_punkt_name__line{{item}}\": MultilineFieldMatcher(field_name=\"founders[]\",\n attr_name=\"company.general_manager.address.village\"),\n \"page23-address_street_name__line{{item}}\": MultilineFieldMatcher(field_name=\"founders[]\",\n attr_name=\"company.general_manager.address.street\"),\n \"page23-living_address__line{{item}}\": MultilineFieldMatcher(field_name=\"founders[]\",\n attr_name=\"company.general_manager.living_address\"),\n\n \"page24-document_delivery_type\": FieldAttributeMatcher(field_name=\"founders[]\",\n attr_name=\"documents_recipient_type\",\n adapter=\"DocumentDeliveryNumberAdapter\"),\n }),\n \"%page_N-set-5\": FieldSetMatcher(fields={\n \"page22-zayavitel_type\": FieldAttributeMatcher(field_name=\"founders[]\", attr_name=\"founder_type\",\n adapter=\"FounderTypeNumberAdapter\"),\n\n \"page22-zayavitel_surname\": FieldAttributeMatcher(field_name=\"founders[]\",\n attr_name=\"company.general_manager.surname\"),\n \"page22-zayavitel_name\": FieldAttributeMatcher(field_name=\"founders[]\",\n attr_name=\"company.general_manager.name\"),\n \"page22-zayavitel_patronymic\": FieldAttributeMatcher(field_name=\"founders[]\",\n attr_name=\"company.general_manager.patronymic\"),\n \"page22-zayavitel_inn\": FieldAttributeMatcher(field_name=\"founders[]\",\n attr_name=\"company.general_manager.inn\"),\n \"page22-zayavitel_birth_date__day\": FieldAttributeMatcher(field_name=\"founders[]\",\n attr_name=\"company.general_manager.birthdate.day\"),\n \"page22-zayavitel_birth_date__month\": FieldAttributeMatcher(field_name=\"founders[]\",\n attr_name=\"company.general_manager.birthdate.month\"),\n \"page22-zayavitel_birth_date__year\": FieldAttributeMatcher(field_name=\"founders[]\",\n attr_name=\"company.general_manager.birthdate.year\"),\n \"page22-zayavitel_birth_place__line{{item}}\": MultilineFieldMatcher(field_name=\"founders[]\",\n attr_name=\"company.general_manager.birthplace\"),\n\n \"page23-issuer__line{{item}}\": MultilineFieldMatcher(field_name=\"founders[]\",\n attr_name=\"company.general_manager.passport.issue_depart\"),\n \"page23-doc_type\": FieldAttributeMatcher(field_name=\"founders[]\",\n attr_name=\"company.general_manager.passport.document_type\"),\n \"page23-doc_number\": ConcatFieldAttributeMatcher(field_name=\"founders[]\",\n attributes=[\"company.general_manager.passport.series\",\n \"company.general_manager.passport.number\"],\n adapter=\"InternalPassportAdapter\"),\n \"page23-doc_issue_date__day\": FieldAttributeMatcher(field_name=\"founders[]\",\n attr_name=\"company.general_manager.passport.issue_date.day\"),\n \"page23-doc_issue_date__month\": FieldAttributeMatcher(field_name=\"founders[]\",\n attr_name=\"company.general_manager.passport.issue_date.month\"),\n \"page23-doc_issue_date__year\": FieldAttributeMatcher(field_name=\"founders[]\",\n attr_name=\"company.general_manager.passport.issue_date.year\"),\n \"page23-issuer_subdivision_code__left\": FieldAttributeMatcher(field_name=\"founders[]\",\n attr_name=\"company.general_manager.passport.depart_code\"),\n \"page23-issuer_subdivision_code__right\": FieldAttributeMatcher(field_name=\"founders[]\",\n attr_name=\"company.general_manager.passport.depart_code\"),\n \"page23-postal_index\": FieldAttributeMatcher(field_name=\"founders[]\",\n attr_name=\"company.general_manager.address.index\"),\n \"page23-address_subject_code\": FieldAttributeMatcher(field_name=\"founders[]\",\n attr_name=\"company.general_manager.address.region\",\n adapter=\"RFRegionNumberAdapter\"),\n \"page23-address_district_type\": FieldAttributeMatcher(field_name=\"founders[]\",\n attr_name=\"company.general_manager.address.district_type\",\n adapter=\"ShortDistrictTypeAdapter\"),\n \"page23-address_city_type\": FieldAttributeMatcher(field_name=\"founders[]\",\n attr_name=\"company.general_manager.address.city_type\",\n adapter=\"ShortCityTypeAdapter\"),\n \"page23-address_city_name\": FieldAttributeMatcher(field_name=\"founders[]\",\n attr_name=\"company.general_manager.address.city\"),\n \"page23-address_nas_punkt_type\": FieldAttributeMatcher(field_name=\"founders[]\",\n attr_name=\"company.general_manager.address.village_type\",\n adapter=\"ShortVillageTypeAdapter\"),\n \"page23-address_street_type\": FieldAttributeMatcher(field_name=\"founders[]\",\n attr_name=\"company.general_manager.address.street_type\",\n adapter=\"ShortStreetTypeAdapter\"),\n \"page23-address_house_type\": FieldAttributeMatcher(field_name=\"founders[]\",\n attr_name=\"company.general_manager.address.house_type\"),\n \"page23-address_corpus_type\": FieldAttributeMatcher(field_name=\"founders[]\",\n attr_name=\"company.general_manager.address.building_type\"),\n \"page23-address_house_number\": FieldAttributeMatcher(field_name=\"founders[]\",\n attr_name=\"company.general_manager.address.house\"),\n \"page23-address_corpus_number\": FieldAttributeMatcher(field_name=\"founders[]\",\n attr_name=\"company.general_manager.address.building\"),\n \"page23-address_flat_type\": FieldAttributeMatcher(field_name=\"founders[]\",\n attr_name=\"company.general_manager.address.flat_type\"),\n \"page23-address_flat_number\": FieldAttributeMatcher(field_name=\"founders[]\",\n attr_name=\"company.general_manager.address.flat\"),\n \"page23-living_country_code\": FieldAttributeMatcher(field_name=\"founders[]\",\n attr_name=\"company.general_manager.living_country_code\"),\n \"page23-phone_number\": FieldAttributeMatcher(field_name=\"founders[]\",\n attr_name=\"company.general_manager.phone.normalised\"),\n \"page23-email\": FieldAttributeMatcher(field_name=\"founders[]\",\n attr_name=\"company.general_manager.email\"),\n \"page23-address_district_name__line{{item}}\": MultilineFieldMatcher(field_name=\"founders[]\",\n attr_name=\"company.general_manager.address.district\"),\n \"page23-address_nas_punkt_name__line{{item}}\": MultilineFieldMatcher(field_name=\"founders[]\",\n attr_name=\"company.general_manager.address.village\"),\n \"page23-address_street_name__line{{item}}\": MultilineFieldMatcher(field_name=\"founders[]\",\n attr_name=\"company.general_manager.address.street\"),\n \"page23-living_address__line{{item}}\": MultilineFieldMatcher(field_name=\"founders[]\",\n attr_name=\"company.general_manager.living_address\"),\n\n \"page24-document_delivery_type\": FieldAttributeMatcher(field_name=\"founders[]\",\n attr_name=\"documents_recipient_type\",\n adapter=\"DocumentDeliveryNumberAdapter\"),\n }),\n \"%page_N-set-4\": FieldSetMatcher(fields={\n \"page22-zayavitel_type\": FieldAttributeMatcher(field_name=\"founders[]\", attr_name=\"founder_type\",\n adapter=\"FounderTypeNumberAdapter\"),\n\n \"page22-uchreditel_inn\": FieldAttributeMatcher(field_name=\"founders[]\",\n attr_name=\"company.inn\"),\n \"page22-uchreditel_full_name__line{{item}}\": MultilineFieldMatcher(field_name=\"founders[]\",\n attr_name=\"company.full_name\"),\n\n \"page22-zayavitel_surname\": FieldAttributeMatcher(field_name=\"founders[]\",\n attr_name=\"person.surname\"),\n \"page22-zayavitel_name\": FieldAttributeMatcher(field_name=\"founders[]\",\n attr_name=\"person.name\"),\n \"page22-zayavitel_patronymic\": FieldAttributeMatcher(field_name=\"founders[]\",\n attr_name=\"person.patronymic\"),\n \"page22-zayavitel_inn\": FieldAttributeMatcher(field_name=\"founders[]\",\n attr_name=\"person.inn\"),\n \"page22-zayavitel_birth_date__day\": FieldAttributeMatcher(field_name=\"founders[]\",\n attr_name=\"person.birthdate.day\"),\n \"page22-zayavitel_birth_date__month\": FieldAttributeMatcher(field_name=\"founders[]\",\n attr_name=\"person.birthdate.month\"),\n \"page22-zayavitel_birth_date__year\": FieldAttributeMatcher(field_name=\"founders[]\",\n attr_name=\"person.birthdate.year\"),\n \"page22-zayavitel_birth_place__line{{item}}\": MultilineFieldMatcher(field_name=\"founders[]\",\n attr_name=\"person.birthplace\"),\n\n \"page23-issuer__line{{item}}\": MultilineFieldMatcher(field_name=\"founders[]\",\n attr_name=\"person.passport.issue_depart\"),\n \"page23-doc_type\": FieldAttributeMatcher(field_name=\"founders[]\",\n attr_name=\"person.passport.document_type\"),\n \"page23-doc_number\": ConcatFieldAttributeMatcher(field_name=\"founders[]\",\n attributes=[\"person.passport.series\",\n \"person.passport.number\"],\n adapter=\"InternalPassportAdapter\"),\n \"page23-doc_issue_date__day\": FieldAttributeMatcher(field_name=\"founders[]\",\n attr_name=\"person.passport.issue_date.day\"),\n \"page23-doc_issue_date__month\": FieldAttributeMatcher(field_name=\"founders[]\",\n attr_name=\"person.passport.issue_date.month\"),\n \"page23-doc_issue_date__year\": FieldAttributeMatcher(field_name=\"founders[]\",\n attr_name=\"person.passport.issue_date.year\"),\n \"page23-issuer_subdivision_code__left\": FieldAttributeMatcher(field_name=\"founders[]\",\n attr_name=\"person.passport.depart_code\"),\n \"page23-issuer_subdivision_code__right\": FieldAttributeMatcher(field_name=\"founders[]\",\n attr_name=\"person.passport.depart_code\"),\n \"page23-postal_index\": FieldAttributeMatcher(field_name=\"founders[]\",\n attr_name=\"person.address.index\"),\n \"page23-address_subject_code\": FieldAttributeMatcher(field_name=\"founders[]\",\n attr_name=\"person.address.region\",\n adapter=\"RFRegionNumberAdapter\"),\n \"page23-address_district_type\": FieldAttributeMatcher(field_name=\"founders[]\",\n attr_name=\"person.address.district_type\",\n adapter=\"ShortDistrictTypeAdapter\"),\n \"page23-address_city_type\": FieldAttributeMatcher(field_name=\"founders[]\",\n attr_name=\"person.address.city_type\",\n adapter=\"ShortCityTypeAdapter\"),\n \"page23-address_city_name\": FieldAttributeMatcher(field_name=\"founders[]\",\n attr_name=\"person.address.city\"),\n \"page23-address_nas_punkt_type\": FieldAttributeMatcher(field_name=\"founders[]\",\n attr_name=\"person.address.village_type\",\n adapter=\"ShortVillageTypeAdapter\"),\n \"page23-address_street_type\": FieldAttributeMatcher(field_name=\"founders[]\",\n attr_name=\"person.address.street_type\",\n adapter=\"ShortStreetTypeAdapter\"),\n \"page23-address_house_type\": FieldAttributeMatcher(field_name=\"founders[]\",\n attr_name=\"person.address.house_type\"),\n \"page23-address_corpus_type\": FieldAttributeMatcher(field_name=\"founders[]\",\n attr_name=\"person.address.building_type\"),\n \"page23-address_house_number\": FieldAttributeMatcher(field_name=\"founders[]\",\n attr_name=\"person.address.house\"),\n \"page23-address_corpus_number\": FieldAttributeMatcher(field_name=\"founders[]\",\n attr_name=\"person.address.building\"),\n \"page23-address_flat_type\": FieldAttributeMatcher(field_name=\"founders[]\",\n attr_name=\"person.address.flat_type\"),\n \"page23-address_flat_number\": FieldAttributeMatcher(field_name=\"founders[]\",\n attr_name=\"person.address.flat\"),\n \"page23-living_country_code\": FieldAttributeMatcher(field_name=\"founders[]\",\n attr_name=\"person.living_country_code\"),\n \"page23-phone_number\": FieldAttributeMatcher(field_name=\"founders[]\",\n attr_name=\"person.phone.normalised\"),\n \"page23-email\": FieldAttributeMatcher(field_name=\"founders[]\", attr_name=\"person.email\"),\n \"page23-address_district_name__line{{item}}\": MultilineFieldMatcher(field_name=\"founders[]\",\n attr_name=\"person.address.district\"),\n \"page23-address_nas_punkt_name__line{{item}}\": MultilineFieldMatcher(field_name=\"founders[]\",\n attr_name=\"person.address.village\"),\n \"page23-address_street_name__line{{item}}\": MultilineFieldMatcher(field_name=\"founders[]\",\n attr_name=\"person.address.street\"),\n \"page23-living_address__line{{item}}\": MultilineFieldMatcher(field_name=\"founders[]\",\n attr_name=\"person.living_address\"),\n\n \"page24-document_delivery_type\": FieldAttributeMatcher(field_name=\"founders[]\",\n attr_name=\"documents_recipient_type\",\n adapter=\"DocumentDeliveryNumberAdapter\"),\n })\n }\n }\n P11001_SCHEMA = {\"doc_name\": DocumentTypeEnum.DT_P11001,\n \"file_name_template\": u\"Заявление на регистрацию ООО {{short_name}}\",\n \"batch_statuses\": [BatchStatusEnum.BS_EDITED, BatchStatusEnum.BS_NEW],\n \"fields\": [\n SHORT_NAME_FIELD,\n FULL_NAME_FIELD,\n ADDRESS_TYPE_FIELD_NR,\n ADDRESS_FIELD,\n GENERAL_MANAGER_CAPTION_FIELD,\n FOUNDERS_FIELD,\n STARTER_CAPITAL_FIELD,\n JOB_MAIN_CODE_FIELD,\n JOB_CODE_ARRAY_FIELD,\n GENERAL_MANAGER_FIELD\n ]}\n\n ARTICLES_TEMPLATE = {\n \"template_name\": \"soft_template_ustav\",\n \"file_name\": get_test_resource_name(config, \"ustav_llc.tex\"),\n \"is_strict\": False,\n \"doc_name\": DocumentTypeEnum.DT_ARTICLES\n }\n\n ARTICLES_SCHEMA = {\n \"doc_name\": DocumentTypeEnum.DT_ARTICLES,\n \"doc_kind\": DocumentKindEnum.DK_TEX_TEMPLATE,\n \"file_name_template\": u\"Устав ООО {{short_name}}\",\n \"batch_statuses\": [BatchStatusEnum.BS_EDITED, BatchStatusEnum.BS_NEW],\n \"validators\": [{\n \"condition\": {\n \"#or\": [{\n \"use_foreign_company_name\": {\n \"#ne\": True\n }\n }, {\n \"use_foreign_company_name\": True,\n \"foreign_full_name\": {\n \"#not_empty\": True\n }\n }]\n },\n \"error\": {\n \"field\": \"foreign_full_name\",\n \"code\": 4\n }\n }, {\n \"condition\": {\n \"#or\": [{\n \"use_foreign_company_name\": {\n \"#ne\": True\n }\n }, {\n \"use_foreign_company_name\": True,\n \"foreign_short_name\": {\n \"#not_empty\": True\n }\n }]\n },\n \"error\": {\n \"field\": \"foreign_short_name\",\n \"code\": 4\n }\n }, {\n \"condition\": {\n \"#or\": [{\n \"use_foreign_company_name\": {\n \"#ne\": True\n }\n }, {\n \"use_foreign_company_name\": True,\n \"foreign_language\": {\n \"#not_empty\": True\n }\n }]\n },\n \"error\": {\n \"field\": \"foreign_language\",\n \"code\": 4\n }\n }, {\n \"condition\": {\n \"#or\": [{\n \"use_national_language_company_name\": {\n \"#ne\": True\n }\n }, {\n \"use_national_language_company_name\": True,\n \"national_language_full_name\": {\n \"#not_empty\": True\n }\n }]\n },\n \"error\": {\n \"field\": \"national_language_full_name\",\n \"code\": 4\n }\n }, {\n \"condition\": {\n \"#or\": [{\n \"use_national_language_company_name\": {\n \"#ne\": True\n }\n }, {\n \"use_national_language_company_name\": True,\n \"national_language_short_name\": {\n \"#not_empty\": True\n }\n }]\n },\n \"error\": {\n \"field\": \"national_language_short_name\",\n \"code\": 4\n }\n }, {\n \"condition\": {\n \"#or\": [{\n \"use_national_language_company_name\": {\n \"#ne\": True\n }\n }, {\n \"use_national_language_company_name\": True,\n \"national_language\": {\n \"#not_empty\": True\n }\n }]\n },\n \"error\": {\n \"field\": \"national_language\",\n \"code\": 4\n }\n }, {\n \"condition\": {\n \"#or\": [{\n \"board_of_directors\": {\n \"#ne\": True\n }\n }, {\n \"board_of_directors\": True,\n \"sovet_directorov_num\": {\n \"#not_empty\": True\n }\n }]\n },\n \"error\": {\n \"field\": \"sovet_directorov_num\",\n \"code\": 4\n }\n }, {\n \"condition\": {\n \"#or\": [{\n \"board_of_directors\": {\n \"#ne\": True\n }\n }, {\n \"board_of_directors\": True,\n \"general_manager_deals_max_amount\": {\n \"#not_empty\": True\n }\n }]\n },\n \"error\": {\n \"field\": \"general_manager_deals_max_amount\",\n \"code\": 4\n }\n }, {\n \"condition\": {\n \"#or\": [{\n \"board_of_directors\": {\n \"#ne\": True\n }\n }, {\n \"board_of_directors\": True,\n \"board_of_directors_caption\": {\n \"#not_empty\": True\n }\n }]\n },\n \"error\": {\n \"field\": \"board_of_directors_caption\",\n \"code\": 4\n }\n }, {\n \"condition\": {\n \"#or\": [{\n \"board_of_directors\": {\n \"#ne\": True\n }\n }, {\n \"board_of_directors\": True,\n \"large_property_deals_max_value\": {\n \"#not_empty\": True\n }\n }]\n },\n \"error\": {\n \"field\": \"large_property_deals_max_value\",\n \"code\": 4\n }\n }, {\n \"condition\": {\n \"#or\": [{\n \"board_of_directors\": {\n \"#ne\": True\n }\n }, {\n \"board_of_directors\": True,\n \"large_property_deals_min_value\": {\n \"#not_empty\": True\n }\n }]\n },\n \"error\": {\n \"field\": \"large_property_deals_min_value\",\n \"code\": 4\n }\n }, {\n \"condition\": {\n \"#or\": [{\n \"board_of_directors\": {\n \"#ne\": True\n }\n }, {\n \"board_of_directors\": True,\n \"large_deals_min_value\": {\n \"#not_empty\": True\n }\n }]\n },\n \"error\": {\n \"field\": \"large_deals_min_value\",\n \"code\": 4\n }\n }, {\n \"condition\": {\n \"#or\": [{\n \"board_of_directors\": {\n \"#ne\": True\n }\n }, {\n \"board_of_directors\": True,\n \"large_deals_max_value\": {\n \"#not_empty\": True\n }\n }]\n },\n \"error\": {\n \"field\": \"large_deals_max_value\",\n \"code\": 4\n }\n }],\n \"fields\": [\n SHORT_NAME_FIELD,\n FULL_NAME_FIELD,\n ADDRESS_TYPE_FIELD_NR,\n ADDRESS_FIELD,\n STARTER_CAPITAL_FIELD,\n JOB_MAIN_CODE_FIELD,\n JOB_CODE_ARRAY_FIELD,\n GENERAL_MANAGER_CAPTION_FIELD,\n DOC_DATE_FIELD_TODAY,\n DOC_DATE_OR_TODAY,\n USE_FOREIGN_COMPANY_NAME_FIELD,\n USE_NATIONAL_LANGUAGE_COMPANY_NAME_FIELD,\n FOREIGN_FULL_NAME_FIELD,\n FOREIGN_SHORT_NAME_FIELD,\n NATIONAL_LANGUAGE_FULL_NAME_FIELD,\n NATIONAL_LANGUAGE_SHORT_NAME_FIELD,\n FOREIGN_LANGUAGE_FIELD,\n NATIONAL_LANGUAGE_FIELD,\n BOARD_OF_DIRECTORS_FIELD,\n FOUNDERS_COUNT_FIELD, {\n \"name\": \"pravo_otchuzhdeniya_type\",\n \"type\": \"DocEnumField\",\n \"enum_cls\": \"AlienationRightEnum\",\n \"required\": True,\n }, {\n \"name\": \"preimusch_pravo_priobreteniya_doli_time_span\",\n \"type\": \"DocIntField\",\n \"required\": True,\n \"min_val\": 30\n }, {\n \"name\": \"perehod_doli_k_naslednikam_soglasie\",\n \"type\": \"DocBoolField\",\n \"required\": True,\n }, {\n \"name\": \"sovet_directorov_num\",\n \"type\": \"DocIntField\",\n \"required\": False,\n \"even\": False\n }, {\n \"name\": \"general_manager_deals_max_amount\",\n \"type\": \"DocIntField\",\n \"required\": False\n }, {\n \"name\": \"general_manager_term\",\n \"type\": \"DocIntField\",\n \"min_val\": 6,\n \"max_val\": 60,\n \"required\": True,\n }, {\n \"name\": \"board_of_directors_caption\",\n \"type\": \"DocTextField\",\n \"required\": False,\n \"min_length\": 1,\n \"allowed_re\": CompanyObject.RUS_COMPANY_NAME_RE\n }, {\n \"name\": \"large_deals_min_value\",\n \"type\": \"DocIntField\",\n \"min_val\": 25,\n \"max_val\": 100,\n \"required\": False\n }, {\n \"name\": \"large_deals_max_value\",\n \"type\": \"DocIntField\",\n \"min_val\": 25,\n \"max_val\": 100,\n \"required\": False\n }, {\n \"name\": \"large_property_deals_max_value\",\n \"type\": \"DocIntField\",\n \"required\": False\n }, {\n \"name\": \"large_property_deals_min_value\",\n \"type\": \"DocIntField\",\n \"required\": False\n }, {\n \"name\": \"necessary_votes_for_general_meeting_decisions\",\n \"type\": \"NecessaryVotesForGeneralMeeting\",\n \"required\": True,\n }, {\n \"name\": \"base_general_manager_document\",\n \"type\": \"DocConstField\",\n \"required\": False,\n \"value\": u\"устав\"\n }\n ]}\n\n ACT_SCHEMA = {\n \"doc_name\": DocumentTypeEnum.DT_ACT,\n \"doc_kind\": DocumentKindEnum.DK_TEX_TEMPLATE,\n \"file_name_template\": u\"Акт приёма-передачи ООО {{short_name}}\",\n \"batch_statuses\": [BatchStatusEnum.BS_EDITED, BatchStatusEnum.BS_NEW],\n \"conditions\": {\n \"founder->properties\": {\n \"#not_empty\": True\n }\n },\n \"fields\": [\n SHORT_NAME_FIELD,\n FULL_NAME_FIELD,\n ADDRESS_TYPE_FIELD_NR,\n ADDRESS_FIELD,\n GENERAL_MANAGER_CAPTION_FIELD,\n DOC_DATE_FIELD_TODAY,\n DOC_DATE_OR_TODAY,\n GENERAL_MANAGER_FIELD,\n FOUNDERS_COUNT_FIELD,\n {\n \"name\": \"founder\",\n \"type\": \"FounderObject\",\n \"required\": True,\n }\n ]}\n\n ACT_TEMPLATE = {\n \"template_name\": \"soft_template_appi\",\n \"file_name\": get_test_resource_name(config, \"akt_priema_peredachi.tex\"),\n \"is_strict\": False,\n \"doc_name\": DocumentTypeEnum.DT_ACT\n }\n\n USN_SCHEMA = {\n \"doc_name\": DocumentTypeEnum.DT_USN,\n \"file_name_template\": u\"Заявление на УСН ООО {{short_name}}\",\n \"batch_statuses\": [BatchStatusEnum.BS_EDITED, BatchStatusEnum.BS_NEW],\n \"conditions\": {\n \"taxation_type\": 'usn'\n },\n \"fields\": [\n FULL_NAME_FIELD,\n ADDRESS_TYPE_FIELD_NR,\n ADDRESS_FIELD,\n DOC_DATE_FIELD_TODAY,\n DOC_DATE_OR_TODAY,\n GENERAL_MANAGER_FIELD,\n FOUNDERS_COUNT_FIELD,\n FOUNDER_APPLICANT_FIELD,\n JOB_MAIN_CODE_FIELD,\n FOUNDERS_FIELD, {\n \"name\": \"tax_type\",\n \"type\": \"DocEnumField\",\n \"enum_cls\": \"UsnTaxType\",\n \"required\": True,\n }\n ],\n \"external_validators\": [\"usn_tax_type\"],\n }\n\n USN_TEMPLATE = {\n \"doc_name\": DocumentTypeEnum.DT_USN,\n \"template_name\": \"strict_template1__usn\",\n \"is_strict\": True,\n \"pages\": [{\n\n \"variants\": {\n \"type\": RenderingVariants.TYPE_RENDER_FIRST_MATCHING,\n \"cases\": [\n { # заявитель - физ. лицо, учредителей 1\n \"page_file\": [get_test_resource_name(config, \"new_usn.pdf\")],\n \"field_matcher_set\": \"%set-1\",\n \"conditions\": {\n \"founder_applicant.founder_type\": FounderTypeEnum.FT_PERSON,\n \"founder_applicant.documents_recipient_type\": {\n \"#ne\": None\n },\n \"founders_count\": 1\n },\n }, { # заявитель - физ. лицо, учредителей > 1\n \"page_file\": [get_test_resource_name(config, \"new_usn.pdf\")],\n \"field_matcher_set\": \"%set-3\",\n \"conditions\": {\n \"founder_applicant.founder_type\": FounderTypeEnum.FT_PERSON,\n \"founder_applicant.documents_recipient_type\": {\n \"#ne\": None\n }\n },\n }, { # заявитель - российское юр. лицо, учредителей 1\n \"page_file\": [get_test_resource_name(config, \"new_usn.pdf\")],\n \"field_matcher_set\": \"%set-2\",\n \"conditions\": {\n \"founder_applicant.founder_type\": FounderTypeEnum.FT_COMPANY,\n \"founder_applicant.documents_recipient_type\": {\n \"#ne\": None\n },\n \"founders_count\": 1\n }\n }, { # заявитель - российское юр. лицо, учредителей > 1\n \"page_file\": [get_test_resource_name(config, \"new_usn.pdf\")],\n \"field_matcher_set\": \"%set-4\",\n \"conditions\": {\n \"founder_applicant.founder_type\": FounderTypeEnum.FT_COMPANY,\n \"founder_applicant.documents_recipient_type\": {\n \"#ne\": None\n }\n }\n }, { # fallback\n \"page_file\": [get_test_resource_name(config, \"new_usn.pdf\")],\n \"field_matcher_set\": \"%set-5\",\n \"conditions\": {}\n }]\n },\n \"array_fields\": [\n {\n \"name\": \"name_line{{item}}\",\n \"count\": 4,\n \"field-length\": 40,\n \"case\": \"upper\"\n }, {\n \"name\": \"applicant-name__line{{item}}\",\n \"count\": 3,\n \"field-length\": 20,\n \"case\": \"upper\"\n }, {\n \"name\": \"agent-doc-name__line{{item}}\",\n \"field-length\": 20,\n \"case\": \"upper\",\n \"count\": 2,\n }\n ],\n \"fields\": [\n {\n \"name\": \"inn\",\n \"field-length\": 12\n }, {\n \"name\": \"kpp\",\n \"field-length\": 9\n }, {\n \"name\": \"kod_nalog_organa\",\n \"field-length\": 4\n }, {\n \"name\": \"priznak_zayavitelya\",\n \"field-length\": 1\n }, {\n \"name\": \"perehod\",\n \"field-length\": 1\n }, {\n \"name\": \"god_zayavleniya\",\n \"text-align\": \"right\",\n \"field-length\": 1\n }, {\n \"name\": \"phone\",\n \"field-length\": 20\n }, {\n \"name\": \"applicant-type\",\n \"field-length\": 1\n }, {\n \"name\": \"current-date__day\",\n \"field-length\": 2,\n \"text-align\": \"right\",\n \"space-filler\": u\"0\",\n }, {\n \"name\": \"current-date__month\",\n \"field-length\": 2,\n \"text-align\": \"right\",\n \"space-filler\": u\"0\",\n }, {\n \"name\": \"current-date__year\",\n \"field-length\": 4\n }, {\n \"name\": \"dohod\",\n \"field-length\": 1\n }, {\n \"name\": \"god_podachi_uvedomleniya\",\n \"field-length\": 2,\n \"text-align\": \"right\"\n }, {\n \"name\": \"polucheno_dohodov\",\n \"field-length\": 9\n }, {\n \"name\": \"ostatok\",\n \"field-length\": 9\n }\n ]\n }]\n }\n\n USN_MATCHER = {\n \"doc_name\": DocumentTypeEnum.DT_USN,\n \"template_name\": USN_TEMPLATE['template_name'],\n \"fields\": {\n \"%set-1\": FieldSetMatcher(fields={\n \"applicant-name__line{{item}}\": MultilineFieldMatcher(field_name=\"general_manager\",\n attr_name=\"full_name\"),\n \"agent-doc-name__line{{item}}\": ConstantMatcher(value=u\"УСТАВ\"),\n \"phone\": FieldAttributeMatcher(field_name=\"general_manager\", attr_name=\"phone.normalised\"),\n \"name_line{{item}}\": MultilineFieldMatcher(field_name=\"full_name\",\n prefix=u\"Общество с ограниченной ответственностью «\",\n suffix=u\"»\"),\n \"inn\": ConstantMatcher(value=u\"————————————\"),\n \"kpp\": ConstantMatcher(value=u\"————————————\"),\n \"polucheno_dohodov\": ConstantMatcher(value=u\"————————————\"),\n \"ostatok\": ConstantMatcher(value=u\"————————————\"),\n \"kod_nalog_organa\": FieldAttributeMatcher(field_name=\"address\", attr_name=\"ifns_number\"),\n \"priznak_zayavitelya\": ConstantMatcher(value=1),\n \"perehod\": ConstantMatcher(value=2),\n \"god_zayavleniya\": FieldAttributeMatcher(field_name=\"doc_date_or_today\", attr_name=\"year\"),\n\n \"applicant-type\": ConstantMatcher(value=1),\n \"current-date__day\": FieldAttributeMatcher(field_name=\"doc_date_or_today\", attr_name=\"day\"),\n \"current-date__month\": FieldAttributeMatcher(field_name=\"doc_date_or_today\", attr_name=\"month\"),\n \"current-date__year\": FieldAttributeMatcher(field_name=\"doc_date_or_today\", attr_name=\"year\"),\n\n \"dohod\": SimpleMatcher(field_name=\"tax_type\", adapter=\"UsnTaxTypeAdapter\"),\n # \"dov_page_number\" : 1 if applicant_type = 2 else \"\",\n \"god_podachi_uvedomleniya\": FieldAttributeMatcher(field_name=\"doc_date_or_today\", attr_name=\"year\"),\n }),\n \"%set-2\": FieldSetMatcher(fields={\n \"applicant-name__line{{item}}\": MultilineFieldMatcher(field_name=\"general_manager\",\n attr_name=\"full_name\"),\n \"agent-doc-name__line{{item}}\": ConstantMatcher(value=u\"УСТАВ\"),\n \"phone\": FieldAttributeMatcher(field_name=\"general_manager\", attr_name=\"phone.normalised\"),\n \"name_line{{item}}\": MultilineFieldMatcher(field_name=\"full_name\",\n prefix=u\"Общество с ограниченной ответственностью «\",\n suffix=u\"»\"),\n \"inn\": ConstantMatcher(value=u\"————————————\"),\n \"kpp\": ConstantMatcher(value=u\"————————————\"),\n \"polucheno_dohodov\": ConstantMatcher(value=u\"————————————\"),\n \"ostatok\": ConstantMatcher(value=u\"————————————\"),\n \"kod_nalog_organa\": FieldAttributeMatcher(field_name=\"address\", attr_name=\"ifns_number\"),\n \"priznak_zayavitelya\": ConstantMatcher(value=1),\n \"perehod\": ConstantMatcher(value=2),\n \"god_zayavleniya\": FieldAttributeMatcher(field_name=\"doc_date_or_today\", attr_name=\"year\"),\n\n \"applicant-type\": ConstantMatcher(value=1),\n \"current-date__day\": FieldAttributeMatcher(field_name=\"doc_date_or_today\", attr_name=\"day\"),\n \"current-date__month\": FieldAttributeMatcher(field_name=\"doc_date_or_today\", attr_name=\"month\"),\n \"current-date__year\": FieldAttributeMatcher(field_name=\"doc_date_or_today\", attr_name=\"year\"),\n \"dohod\": SimpleMatcher(field_name=\"tax_type\", adapter=\"UsnTaxTypeAdapter\"),\n \"god_podachi_uvedomleniya\": FieldAttributeMatcher(field_name=\"doc_date_or_today\", attr_name=\"year\"),\n }),\n \"%set-3\": FieldSetMatcher(fields={\n \"applicant-name__line{{item}}\": MultilineFieldMatcher(field_name=\"general_manager\",\n attr_name=\"full_name\"),\n \"agent-doc-name__line{{item}}\": ConstantMatcher(value=u\"УСТАВ\"),\n \"phone\": FieldAttributeMatcher(field_name=\"general_manager\", attr_name=\"phone.normalised\"),\n \"name_line{{item}}\": MultilineFieldMatcher(field_name=\"full_name\",\n prefix=u\"Общество с ограниченной ответственностью «\",\n suffix=u\"»\"),\n \"inn\": ConstantMatcher(value=u\"————————————\"),\n \"kpp\": ConstantMatcher(value=u\"————————————\"),\n \"polucheno_dohodov\": ConstantMatcher(value=u\"————————————\"),\n \"ostatok\": ConstantMatcher(value=u\"————————————\"),\n \"kod_nalog_organa\": FieldAttributeMatcher(field_name=\"address\", attr_name=\"ifns_number\"),\n \"priznak_zayavitelya\": ConstantMatcher(value=1),\n \"perehod\": ConstantMatcher(value=2),\n \"god_zayavleniya\": FieldAttributeMatcher(field_name=\"doc_date_or_today\", attr_name=\"year\"),\n\n \"applicant-type\": ConstantMatcher(value=1),\n \"current-date__day\": FieldAttributeMatcher(field_name=\"doc_date_or_today\", attr_name=\"day\"),\n \"current-date__month\": FieldAttributeMatcher(field_name=\"doc_date_or_today\", attr_name=\"month\"),\n \"current-date__year\": FieldAttributeMatcher(field_name=\"doc_date_or_today\", attr_name=\"year\"),\n\n \"dohod\": SimpleMatcher(field_name=\"tax_type\", adapter=\"UsnTaxTypeAdapter\"),\n \"god_podachi_uvedomleniya\": FieldAttributeMatcher(field_name=\"doc_date_or_today\", attr_name=\"year\"),\n }),\n \"%set-4\": FieldSetMatcher(fields={\n \"applicant-name__line{{item}}\": MultilineFieldMatcher(field_name=\"general_manager\",\n attr_name=\"full_name\"),\n \"agent-doc-name__line{{item}}\": ConstantMatcher(value=u\"УСТАВ\"),\n \"phone\": FieldAttributeMatcher(field_name=\"general_manager\", attr_name=\"phone.normalised\"),\n \"name_line{{item}}\": MultilineFieldMatcher(field_name=\"full_name\",\n prefix=u\"Общество с ограниченной ответственностью «\",\n suffix=u\"»\"),\n \"inn\": ConstantMatcher(value=u\"————————————\"),\n \"kpp\": ConstantMatcher(value=u\"————————————\"),\n \"polucheno_dohodov\": ConstantMatcher(value=u\"————————————\"),\n \"ostatok\": ConstantMatcher(value=u\"————————————\"),\n \"kod_nalog_organa\": FieldAttributeMatcher(field_name=\"address\", attr_name=\"ifns_number\"),\n \"priznak_zayavitelya\": ConstantMatcher(value=1),\n \"perehod\": ConstantMatcher(value=2),\n \"god_zayavleniya\": FieldAttributeMatcher(field_name=\"doc_date_or_today\", attr_name=\"year\"),\n\n \"applicant-type\": ConstantMatcher(value=1),\n \"current-date__day\": FieldAttributeMatcher(field_name=\"doc_date_or_today\", attr_name=\"day\"),\n \"current-date__month\": FieldAttributeMatcher(field_name=\"doc_date_or_today\", attr_name=\"month\"),\n \"current-date__year\": FieldAttributeMatcher(field_name=\"doc_date_or_today\", attr_name=\"year\"),\n \"dohod\": SimpleMatcher(field_name=\"tax_type\", adapter=\"UsnTaxTypeAdapter\"),\n \"god_podachi_uvedomleniya\": FieldAttributeMatcher(field_name=\"doc_date_or_today\", attr_name=\"year\"),\n }),\n \"%set-5\": FieldSetMatcher(fields={\n \"applicant-name__line{{item}}\": MultilineFieldMatcher(field_name=\"general_manager\",\n attr_name=\"full_name\"),\n \"agent-doc-name__line{{item}}\": ConstantMatcher(value=u\"УСТАВ\"),\n \"phone\": FieldAttributeMatcher(field_name=\"general_manager\", attr_name=\"phone.normalised\"),\n \"name_line{{item}}\": MultilineFieldMatcher(field_name=\"full_name\",\n prefix=u\"Общество с ограниченной ответственностью «\",\n suffix=u\"»\"),\n \"inn\": ConstantMatcher(value=u\"————————————\"),\n \"kpp\": ConstantMatcher(value=u\"————————————\"),\n \"polucheno_dohodov\": ConstantMatcher(value=u\"————————————\"),\n \"ostatok\": ConstantMatcher(value=u\"————————————\"),\n \"kod_nalog_organa\": FieldAttributeMatcher(field_name=\"address\", attr_name=\"ifns_number\"),\n \"priznak_zayavitelya\": ConstantMatcher(value=1),\n \"perehod\": ConstantMatcher(value=2),\n \"god_zayavleniya\": FieldAttributeMatcher(field_name=\"doc_date_or_today\", attr_name=\"year\"),\n\n \"applicant-type\": ConstantMatcher(value=1),\n \"current-date__day\": FieldAttributeMatcher(field_name=\"doc_date_or_today\", attr_name=\"day\"),\n \"current-date__month\": FieldAttributeMatcher(field_name=\"doc_date_or_today\", attr_name=\"month\"),\n \"current-date__year\": FieldAttributeMatcher(field_name=\"doc_date_or_today\", attr_name=\"year\"),\n \"dohod\": SimpleMatcher(field_name=\"tax_type\", adapter=\"UsnTaxTypeAdapter\"),\n \"god_podachi_uvedomleniya\": FieldAttributeMatcher(field_name=\"doc_date_or_today\", attr_name=\"year\"),\n })\n }\n }\n\n DECISION_SCHEMA = {\n \"doc_name\": DocumentTypeEnum.DT_DECISION,\n \"doc_kind\": DocumentKindEnum.DK_TEX_TEMPLATE,\n \"file_name_template\": u\"Решение единственного учредителя ООО {{short_name}}\",\n \"batch_statuses\": [BatchStatusEnum.BS_EDITED, BatchStatusEnum.BS_NEW],\n \"conditions\": {\n \"founders_count\": 1\n },\n \"fields\": [\n FOUNDER_APPLICANT_FIELD,\n SHORT_NAME_FIELD,\n FULL_NAME_FIELD,\n ADDRESS_TYPE_FIELD_NR,\n ADDRESS_FIELD,\n STARTER_CAPITAL_FIELD,\n GENERAL_MANAGER_CAPTION_FIELD,\n DOC_DATE_FIELD_TODAY,\n DOC_DATE_OR_TODAY,\n USE_FOREIGN_COMPANY_NAME_FIELD,\n USE_NATIONAL_LANGUAGE_COMPANY_NAME_FIELD,\n FOREIGN_FULL_NAME_FIELD,\n FOREIGN_SHORT_NAME_FIELD,\n NATIONAL_LANGUAGE_FULL_NAME_FIELD,\n NATIONAL_LANGUAGE_SHORT_NAME_FIELD,\n FOREIGN_LANGUAGE_FIELD,\n NATIONAL_LANGUAGE_FIELD,\n FOUNDERS_COUNT_FIELD, {\n \"name\": \"general_manager\",\n \"type\": \"db_object\",\n \"cls\": \"PrivatePerson\",\n \"required\": True,\n \"override_fields_kwargs\": {\n \"phone\": {\n \"required\": True\n }\n }\n }\n ]\n }\n\n DECISION_TEMPLATE = {\n \"doc_name\": DocumentTypeEnum.DT_DECISION,\n \"template_name\": \"soft_template_reshenie\",\n \"file_name\": get_test_resource_name(config, \"reshenie.tex\"),\n \"is_strict\": False,\n }\n\n PROTOCOL_SCHEMA = {\n \"doc_name\": DocumentTypeEnum.DT_PROTOCOL,\n \"doc_kind\": DocumentKindEnum.DK_TEX_TEMPLATE,\n \"file_name_template\": u\"Протокол ООО {{short_name}}\",\n \"batch_statuses\": [BatchStatusEnum.BS_EDITED, BatchStatusEnum.BS_NEW],\n \"conditions\": {\n \"founders_count\": {\n \"#gt\": 1\n }\n },\n \"fields\": [\n GENERAL_MANAGER_FIELD,\n DOC_DATE_FIELD_TODAY,\n DOC_DATE_OR_TODAY,\n FOUNDERS_FIELD,\n SHORT_NAME_FIELD,\n FULL_NAME_FIELD,\n ADDRESS_TYPE_FIELD_NR,\n ADDRESS_FIELD,\n STARTER_CAPITAL_FIELD,\n GENERAL_MANAGER_CAPTION_FIELD,\n USE_FOREIGN_COMPANY_NAME_FIELD,\n USE_NATIONAL_LANGUAGE_COMPANY_NAME_FIELD,\n FOUNDER_APPLICANT_FIELD,\n FOREIGN_FULL_NAME_FIELD,\n FOREIGN_SHORT_NAME_FIELD,\n NATIONAL_LANGUAGE_FULL_NAME_FIELD,\n NATIONAL_LANGUAGE_SHORT_NAME_FIELD,\n FOREIGN_LANGUAGE_FIELD,\n NATIONAL_LANGUAGE_FIELD,\n FOUNDERS_COUNT_FIELD,\n {\n \"name\": \"share_type\",\n \"type\": \"DocEnumField\",\n \"enum_cls\": \"NumericPartEnum\",\n \"required\": False\n },\n {\n \"name\": \"reg_responsible_founder\",\n \"type\": \"db_object\",\n \"required\": False\n },\n {\n \"name\": \"doc_obtain_founder\",\n \"type\": \"db_object\",\n \"required\": False\n },\n {\n \"name\": \"registration_way\",\n \"type\": \"DocEnumField\",\n \"enum_cls\": \"RegistrationWay\",\n \"required\": True\n }, {\n \"name\": \"obtain_way\",\n \"type\": \"DocEnumField\",\n \"enum_cls\": \"DocumentDeliveryTypeStrEnum\",\n \"required\": True\n }, {\n \"name\": \"selected_secretary\",\n \"type\": \"db_object\",\n \"required\": True\n }, {\n \"name\": \"reg_responsible_person\",\n \"type\": \"db_object\",\n \"required\": False\n }, {\n \"name\": \"selected_moderator\",\n \"type\": \"db_object\",\n \"required\": True\n }, {\n \"name\": \"reg_responsible_person_obj\",\n \"type\": \"calculated\",\n \"field_type\": \"db_object\",\n \"required\": False,\n \"suppress_validation_errors\": True,\n \"value\": {\n \"#cases\": {\n \"list\": [{\n \"conditions\": {\n \"registration_way\": \"some_founders\"\n },\n \"value\": {\n \"#field\": \"reg_responsible_founder\"\n }\n }],\n \"default\": {\n \"value\": {\n \"#field\": \"selected_moderator\"\n }\n }\n }\n }\n },\n {\n \"name\":\"founders_ref_list\",\n \"type\": \"calculated\",\n \"field_type\": \"DocArrayField\",\n \"cls\": \"DocTextField\",\n \"value\": FOUNDERS_REF_LIST_TEMP_VARIABLE,\n \"depends_on\": [\"founders\"],\n \"suppress_validation_errors\": True,\n }\n ],\n \"validators\": [{\n \"condition\": {\n \"#or\": [{\n \"selected_secretary\": {\n \"#empty\": True\n }\n }, {\n \"selected_secretary->_id\":\n {\n \"#in\": \"@founders_ref_list\"\n }\n }]\n },\n \"error\": {\n \"field\": \"selected_secretary\",\n \"code\": 5\n }\n }, {\n \"condition\": {\n \"#or\": [{\n \"selected_moderator\": {\n \"#empty\": True\n }\n }, {\n \"selected_moderator->_id\":\n {\n \"#in\": \"@founders_ref_list\"\n }\n }]\n },\n \"error\": {\n \"field\": \"selected_moderator\",\n \"code\": 5\n }\n }, {\n \"condition\": {\n \"#or\": [{\n \"registration_way\": {\n \"#ne\": \"some_founders\"\n }\n }, {\n \"registration_way\": \"some_founders\",\n \"reg_responsible_founder->id\": {\n \"#in\": \"@founders_ref_list\"\n }\n }]\n },\n \"error\": {\n \"field\": \"reg_responsible_founder\",\n \"code\": 5\n }\n }, {\n \"condition\": {\n \"#or\": [{\n \"registration_way\": {\n \"#ne\": \"responsible_person\"\n }\n }, {\n \"registration_way\": \"responsible_person\",\n \"reg_responsible_person\": {\n \"#not_empty\": True\n }\n }]\n },\n \"error\": {\n \"field\": \"reg_responsible_person\",\n \"code\": 5\n }\n }, {\n \"condition\": {\n \"#or\": [{\n \"reg_responsible_person->_id\": {\n \"#nin\": \"@founders_ref_list\"\n }\n }, {\n \"registration_way\": {\n \"#ne\": \"responsible_person\"\n }\n }]\n },\n \"error\": {\n \"field\": \"reg_responsible_person\",\n \"code\": 5\n }\n }, {\n \"condition\": {\n \"#or\": [{\n \"doc_obtain_founder->_id\": {\n \"#in\": \"@founders_ref_list\"\n }\n }, {\n \"obtain_way\": {\n \"#ne\": \"founder\"\n }\n }]\n },\n \"error\": {\n \"field\": \"doc_obtain_founder\",\n \"code\": 5\n }\n },\n # {\n # \"condition\" : {\n # \"#or\" : [{\n # \"share_type\" : {\n # \"#ne\" : \"fraction\"\n # }\n # }, {\n # \"share_type\" : \"fraction\",\n # \"is_all_divisible\" : True\n # }]\n # },\n # \"error\" : {\n # \"field\" : \"capital_divisibility\",\n # \"code\" : 5\n # },\n # \"#set\" : {\n # \"is_all_divisible\" : {\n # \"#aggregate\" : {\n # \"field\" : \"founders\",\n # \"attr\" : \"is_starter_capital_dividable\",\n # \"operator\" : \"and\"\n # }\n # }\n # }\n # }\n ]\n }\n\n PROTOCOL_TEMPLATE = {\n \"doc_name\": DocumentTypeEnum.DT_PROTOCOL,\n \"template_name\": \"soft_template_protocol\",\n \"file_name\": get_test_resource_name(config, \"protocol.tex\"),\n \"is_strict\": False,\n }\n\n ESHN_SCHEMA = {\n \"doc_name\": DocumentTypeEnum.DT_ESHN,\n \"file_name_template\": u\"Заявление на ЕСХН ООО {{short_name}}\",\n \"batch_statuses\": [BatchStatusEnum.BS_EDITED, BatchStatusEnum.BS_NEW],\n \"conditions\": {\n \"taxation_type\": TaxType.TT_ESHN\n },\n \"fields\": [\n FULL_NAME_FIELD,\n ADDRESS_TYPE_FIELD_NR,\n ADDRESS_FIELD,\n DOC_DATE_FIELD_TODAY,\n DOC_DATE_OR_TODAY,\n GENERAL_MANAGER_FIELD,\n FOUNDERS_COUNT_FIELD,\n FOUNDER_APPLICANT_FIELD,\n JOB_MAIN_CODE_FIELD\n ],\n \"external_validators\": [\"eshn_tax_type\"],\n }\n\n ESHN_TEMPLATE = {\n \"doc_name\": DocumentTypeEnum.DT_ESHN,\n \"template_name\": \"strict_template1__eshn\",\n \"is_strict\": True,\n \"pages\": [{\n\n \"variants\": {\n \"type\": RenderingVariants.TYPE_RENDER_FIRST_MATCHING,\n \"cases\": [\n { # заявитель - физ. лицо, \"founders_count\" : 1\n \"page_file\": [get_test_resource_name(config, \"new_eshn.pdf\")],\n \"field_matcher_set\": \"%set-1\",\n \"conditions\": {\n \"founder_applicant.founder_type\": FounderTypeEnum.FT_PERSON,\n \"founder_applicant.documents_recipient_type\": {\n \"#ne\": None\n },\n \"founders_count\": 1\n }\n }, { # заявитель - российское юр. лицо, \"founders_count\" : 1\n \"page_file\": [get_test_resource_name(config, \"new_eshn.pdf\")],\n \"field_matcher_set\": \"%set-2\",\n \"conditions\": {\n \"founder_applicant.founder_type\": FounderTypeEnum.FT_COMPANY,\n \"founder_applicant.documents_recipient_type\": {\n \"#ne\": None\n },\n \"founders_count\": 1\n }\n }, { # заявитель - физ. лицо\n \"page_file\": [get_test_resource_name(config, \"new_eshn.pdf\")],\n \"field_matcher_set\": \"%set-3\",\n \"conditions\": {\n \"founder_applicant.founder_type\": FounderTypeEnum.FT_PERSON,\n \"founder_applicant.documents_recipient_type\": {\n \"#ne\": None\n }\n }\n }, { # заявитель - российское юр. лицо\n \"page_file\": [get_test_resource_name(config, \"new_eshn.pdf\")],\n \"field_matcher_set\": \"%set-4\",\n \"conditions\": {\n \"founder_applicant.founder_type\": FounderTypeEnum.FT_COMPANY,\n \"founder_applicant.documents_recipient_type\": {\n \"#ne\": None\n }\n }\n }]\n },\n \"array_fields\": [\n {\n \"name\": \"full_name__line{{item}}\",\n \"count\": 4,\n \"field-length\": 40,\n \"case\": \"upper\"\n }, {\n \"name\": \"applicant__line{{item}}\",\n \"count\": 3,\n \"field-length\": 20,\n \"case\": \"upper\"\n }, {\n \"name\": \"applicant_doc__line{{item}}\",\n \"field-length\": 20,\n \"case\": \"upper\",\n \"count\": 2,\n },\n ],\n \"fields\": [\n {\n \"name\": \"inn\",\n \"field-length\": 12\n }, {\n \"name\": \"kpp\",\n \"field-length\": 9\n }, {\n \"name\": \"ifns\",\n \"field-length\": 4\n }, {\n \"name\": \"priznak_zayavitelya\",\n \"field-length\": 1\n }, {\n \"name\": \"perehod\",\n \"field-length\": 1\n }, {\n \"name\": \"phone\",\n \"field-length\": 20\n }, {\n \"name\": \"applicant_type\",\n \"field-length\": 1\n }, {\n \"name\": \"doc_date__day\",\n \"field-length\": 2,\n \"text-align\": \"right\",\n \"space-filler\": u\"0\",\n }, {\n \"name\": \"doc_date__month\",\n \"field-length\": 2,\n \"text-align\": \"right\",\n \"space-filler\": u\"0\",\n }, {\n \"name\": \"doc_date__year\",\n \"field-length\": 4\n }, {\n \"name\": \"dohod_maj\",\n \"field-length\": 3\n }, {\n \"name\": \"dohod_min\",\n \"field-length\": 2\n }, {\n \"name\": \"dohod_percent\",\n \"field-length\": 1\n }\n ]\n }]\n }\n\n ESHN_MATCHER = {\n \"doc_name\": DocumentTypeEnum.DT_ESHN,\n \"template_name\": ESHN_TEMPLATE['template_name'],\n \"fields\": {\n \"%set-1\": FieldSetMatcher(fields={\n \"applicant__line{{item}}\": MultilineFieldMatcher(field_name=\"general_manager\",\n attr_name=\"full_name\"),\n \"applicant_doc__line{{item}}\": ConstantMatcher(value=u\"УСТАВ\"),\n \"phone\": FieldAttributeMatcher(field_name=\"general_manager\", attr_name=\"phone.normalised\"),\n \"full_name__line{{item}}\": MultilineFieldMatcher(field_name=\"full_name\",\n prefix=u\"Общество с ограниченной ответственностью «\",\n suffix=u\"»\"),\n \"inn\": ConstantMatcher(value=u\"————————————\"),\n \"kpp\": ConstantMatcher(value=u\"————————————\"),\n \"dohod_maj\": ConstantMatcher(value=u\"————————————\"),\n \"dohod_min\": ConstantMatcher(value=u\"————————————\"),\n \"dohod_percent\": ConstantMatcher(value=u\"————————————\"),\n \"ifns\": FieldAttributeMatcher(field_name=\"address\", attr_name=\"ifns_number\"),\n \"priznak_zayavitelya\": ConstantMatcher(value=1),\n \"perehod\": ConstantMatcher(value=2),\n \"applicant_type\": ConstantMatcher(value=1),\n \"doc_date__day\": FieldAttributeMatcher(field_name=\"doc_date_or_today\", attr_name=\"day\"),\n \"doc_date__month\": FieldAttributeMatcher(field_name=\"doc_date_or_today\", attr_name=\"month\"),\n \"doc_date__year\": FieldAttributeMatcher(field_name=\"doc_date_or_today\", attr_name=\"year\"),\n }),\n \"%set-2\": FieldSetMatcher(fields={\n \"applicant__line{{item}}\": MultilineFieldMatcher(field_name=\"general_manager\",\n attr_name=\"full_name\"),\n \"applicant_doc__line{{item}}\": ConstantMatcher(value=u\"УСТАВ\"),\n \"phone\": FieldAttributeMatcher(field_name=\"general_manager\", attr_name=\"phone.normalised\"),\n \"full_name__line{{item}}\": MultilineFieldMatcher(field_name=\"full_name\",\n prefix=u\"Общество с ограниченной ответственностью «\",\n suffix=u\"»\"),\n \"inn\": ConstantMatcher(value=u\"————————————\"),\n \"kpp\": ConstantMatcher(value=u\"————————————\"),\n \"dohod_maj\": ConstantMatcher(value=u\"————————————\"),\n \"dohod_min\": ConstantMatcher(value=u\"————————————\"),\n \"dohod_percent\": ConstantMatcher(value=u\"————————————\"),\n \"ifns\": FieldAttributeMatcher(field_name=\"address\", attr_name=\"ifns_number\"),\n \"priznak_zayavitelya\": ConstantMatcher(value=1),\n \"perehod\": ConstantMatcher(value=2),\n \"applicant_type\": ConstantMatcher(value=1),\n \"doc_date__day\": FieldAttributeMatcher(field_name=\"doc_date_or_today\", attr_name=\"day\"),\n \"doc_date__month\": FieldAttributeMatcher(field_name=\"doc_date_or_today\", attr_name=\"month\"),\n \"doc_date__year\": FieldAttributeMatcher(field_name=\"doc_date_or_today\", attr_name=\"year\"),\n }),\n \"%set-3\": FieldSetMatcher(fields={\n \"applicant__line{{item}}\": MultilineFieldMatcher(field_name=\"general_manager\",\n attr_name=\"full_name\"),\n \"applicant_doc__line{{item}}\": ConstantMatcher(value=u\"УСТАВ\"),\n \"phone\": FieldAttributeMatcher(field_name=\"general_manager\", attr_name=\"phone.normalised\"),\n \"full_name__line{{item}}\": MultilineFieldMatcher(field_name=\"full_name\",\n prefix=u\"Общество с ограниченной ответственностью «\",\n suffix=u\"»\"),\n \"inn\": ConstantMatcher(value=u\"————————————\"),\n \"kpp\": ConstantMatcher(value=u\"————————————\"),\n \"dohod_maj\": ConstantMatcher(value=u\"————————————\"),\n \"dohod_min\": ConstantMatcher(value=u\"————————————\"),\n \"dohod_percent\": ConstantMatcher(value=u\"————————————\"),\n \"ifns\": FieldAttributeMatcher(field_name=\"address\", attr_name=\"ifns_number\"),\n \"priznak_zayavitelya\": ConstantMatcher(value=1),\n \"perehod\": ConstantMatcher(value=2),\n \"applicant_type\": ConstantMatcher(value=1),\n \"doc_date__day\": FieldAttributeMatcher(field_name=\"doc_date_or_today\", attr_name=\"day\"),\n \"doc_date__month\": FieldAttributeMatcher(field_name=\"doc_date_or_today\", attr_name=\"month\"),\n \"doc_date__year\": FieldAttributeMatcher(field_name=\"doc_date_or_today\", attr_name=\"year\"),\n }),\n \"%set-4\": FieldSetMatcher(fields={\n \"applicant__line{{item}}\": MultilineFieldMatcher(field_name=\"general_manager\",\n attr_name=\"full_name\"),\n \"applicant_doc__line{{item}}\": ConstantMatcher(value=u\"УСТАВ\"),\n \"phone\": FieldAttributeMatcher(field_name=\"general_manager\", attr_name=\"phone.normalised\"),\n \"full_name__line{{item}}\": MultilineFieldMatcher(field_name=\"full_name\",\n prefix=u\"Общество с ограниченной ответственностью «\",\n suffix=u\"»\"),\n \"inn\": ConstantMatcher(value=u\"————————————\"),\n \"kpp\": ConstantMatcher(value=u\"————————————\"),\n \"dohod_maj\": ConstantMatcher(value=u\"————————————\"),\n \"dohod_min\": ConstantMatcher(value=u\"————————————\"),\n \"dohod_percent\": ConstantMatcher(value=u\"————————————\"),\n \"ifns\": FieldAttributeMatcher(field_name=\"address\", attr_name=\"ifns_number\"),\n \"priznak_zayavitelya\": ConstantMatcher(value=1),\n \"perehod\": ConstantMatcher(value=2),\n \"applicant_type\": ConstantMatcher(value=1),\n \"doc_date__day\": FieldAttributeMatcher(field_name=\"doc_date_or_today\", attr_name=\"day\"),\n \"doc_date__month\": FieldAttributeMatcher(field_name=\"doc_date_or_today\", attr_name=\"month\"),\n \"doc_date__year\": FieldAttributeMatcher(field_name=\"doc_date_or_today\", attr_name=\"year\"),\n })\n }\n }\n\n CONTRACT_SCHEMA = {\n \"doc_name\": DocumentTypeEnum.DT_CONTRACT,\n \"doc_kind\": DocumentKindEnum.DK_TEX_TEMPLATE,\n \"file_name_template\": u\"Договор ООО {{short_name}}\",\n \"batch_statuses\": [BatchStatusEnum.BS_EDITED, BatchStatusEnum.BS_NEW],\n \"conditions\": {\n \"founders_count\": {\n \"#gt\": 1\n }\n },\n \"fields\": [\n DOC_DATE_FIELD_TODAY,\n DOC_DATE_OR_TODAY,\n FULL_NAME_FIELD,\n ADDRESS_TYPE_FIELD_NR,\n ADDRESS_FIELD,\n FOUNDERS_FIELD,\n SHORT_NAME_FIELD,\n STARTER_CAPITAL_FIELD,\n GENERAL_MANAGER_CAPTION_FIELD,\n USE_FOREIGN_COMPANY_NAME_FIELD,\n USE_NATIONAL_LANGUAGE_COMPANY_NAME_FIELD,\n FOREIGN_FULL_NAME_FIELD,\n FOREIGN_SHORT_NAME_FIELD,\n FOUNDERS_COUNT_FIELD,\n NATIONAL_LANGUAGE_FULL_NAME_FIELD,\n NATIONAL_LANGUAGE_SHORT_NAME_FIELD,\n FOREIGN_LANGUAGE_FIELD,\n NATIONAL_LANGUAGE_FIELD\n ]\n }\n\n CONTRACT_TEMPLATE = {\n \"doc_name\": DocumentTypeEnum.DT_CONTRACT,\n \"template_name\": \"soft_template_contract\",\n \"file_name\": get_test_resource_name(config, \"contract.tex\"),\n \"is_strict\": False\n }\n\n REG_FEE_INVOICE_SCHEMA = {\n \"doc_name\": DocumentTypeEnum.DT_REGISTRATION_FEE_INVOICE,\n \"doc_kind\": DocumentKindEnum.DK_DOWNLOADABLE_FILE,\n \"file_name_template\": u\"Квитанция на оплату регистрационной пошлины ООО {{short_name}}\",\n \"http_method\": \"post\",\n \"data_template_name\": get_test_resource_name(config, \"reg_fee_invoice__data.txt\"),\n \"url_template_name\": get_test_resource_name(config, \"reg_fee_invoice__url.txt\"),\n \"file_name_extension\": 'pdf',\n \"batch_statuses\": [BatchStatusEnum.BS_EDITED, BatchStatusEnum.BS_NEW],\n \"conditions\": {\n \"founder_applicant\": {\n \"#not_empty\": True\n }\n },\n \"fields\": [\n ADDRESS_TYPE_FIELD,\n FOUNDER_APPLICANT_FIELD,\n ADDRESS_FIELD_WITH_OKATO,\n {\n \"name\": \"address_person\",\n \"type\": \"db_object\",\n \"cls\": \"PrivatePerson\",\n \"required\": False,\n \"override_fields_kwargs\": {\n \"address\": {\n \"required\": True,\n \"override_fields_kwargs\": {\n \"ifns\": {\n \"required\": True\n }\n }\n }\n }\n }\n ],\n \"validators\": [{\n \"condition\": {\n \"#or\": [{\n \"address_type\": {\n \"#ne\": \"founder_registration_address\"\n }\n }, {\n \"address_type\": \"founder_registration_address\",\n \"address_person\": {\n \"#not_empty\": True\n }\n }]\n },\n \"error\": {\n \"field\": \"address_person\",\n \"code\": 4\n }\n }, {\n \"condition\": {\n \"#or\": [{\n \"address_type\": {\n \"#ne\": \"founder_registration_address\"\n }\n }, {\n \"address_type\": \"founder_registration_address\",\n \"address_person->address->okato\": {\n \"#not_empty\": True\n }\n }]\n },\n \"error\": {\n \"field\": \"address_person.address.okato\",\n \"code\": 4\n }\n }]\n }\n\n OOO_BANK_PARTNER_APPLICATION_SCHEMA = {\n \"doc_name\": DocumentTypeEnum.DT_OOO_BANK_PARTNER_APPLICATION,\n \"doc_kind\": DocumentKindEnum.DK_DOWNLOADABLE_FILE,\n \"file_name_template\": u\"заявление на открытие рассчетного счета в банке-партнере\",\n \"http_method\": \"post\",\n \"data_template_name\": get_test_resource_name(config, \"ooo_bank_partner_application__data.txt\"),\n \"url_template_name\": get_test_resource_name(config, \"ooo_bank_partner_application__url.txt\"),\n \"file_name_extension\": 'pdf',\n \"fields\": [],\n \"validators\": []\n }\n\n DOVERENNOST_SCHEMA = {\n \"doc_name\": DocumentTypeEnum.DT_DOVERENNOST,\n \"doc_kind\": DocumentKindEnum.DK_TEX_TEMPLATE,\n \"file_name_template\": u\"Доверенность на подачу документов\",\n \"batch_statuses\": [BatchStatusEnum.BS_EDITED, BatchStatusEnum.BS_NEW],\n \"conditions\": {\n \"registration_way\": RegistrationWay.RW_RESPONSIBLE_PERSON\n },\n \"fields\": [\n DOC_DATE_FIELD_TODAY,\n DOC_DATE_OR_TODAY,\n FULL_NAME_FIELD,\n ADDRESS_TYPE_FIELD_NR,\n ADDRESS_FIELD,\n FOUNDERS_FIELD,\n FOUNDER_APPLICANT_FIELD, {\n \"name\": \"reg_responsible_person\",\n \"type\": \"db_object\",\n \"cls\": \"PrivatePerson\",\n \"required\": True,\n \"override_fields_kwargs\": {\n \"address\": {\n \"required\": True\n }\n }\n },\n {\n \"name\":\"founders_ref_list\",\n \"type\": \"calculated\",\n \"field_type\": \"DocArrayField\",\n \"cls\": \"DocTextField\",\n \"depends_on\": [\"founders\"],\n \"value\": FOUNDERS_REF_LIST_TEMP_VARIABLE,\n \"suppress_validation_errors\": True,\n }\n ],\n \"validators\": [{\n \"condition\": {\n \"#or\": [{\n \"reg_responsible_person->_id\": {\n \"#nin\": \"@founders_ref_list\"\n }\n }, {\n \"registration_way\": {\n \"#ne\": \"responsible_person\"\n }\n }]\n },\n \"error\": {\n \"field\": \"reg_responsible_person\",\n \"code\": 5\n }\n }]\n }\n\n DOVERENNOST2_SCHEMA = {\n \"doc_name\": DocumentTypeEnum.DT_DOVERENNOST_OBTAIN,\n \"doc_kind\": DocumentKindEnum.DK_TEX_TEMPLATE,\n \"file_name_template\": u\"Доверенность на получение документов\",\n \"batch_statuses\": [BatchStatusEnum.BS_EDITED, BatchStatusEnum.BS_NEW],\n \"conditions\": {\n \"obtain_way\": DocumentDeliveryTypeStrEnum.DDT_ISSUE_TO_THE_APPLICANT_OR_AGENT,\n \"#or\": [{\n \"registration_way\": RegistrationWay.RW_RESPONSIBLE_PERSON,\n \"doc_obtain_person\": {\n \"#ne\": \"@reg_responsible_person\"\n }\n }, {\n \"registration_way\": {\n \"#ne\": RegistrationWay.RW_RESPONSIBLE_PERSON\n }\n }]\n },\n \"fields\": [\n DOC_DATE_FIELD_TODAY,\n DOC_DATE_OR_TODAY,\n FULL_NAME_FIELD,\n ADDRESS_TYPE_FIELD_NR,\n ADDRESS_FIELD,\n FOUNDERS_FIELD,\n FOUNDER_APPLICANT_FIELD,\n {\n \"name\": \"doc_obtain_person\",\n \"type\": \"db_object\",\n \"cls\": \"PrivatePerson\",\n \"required\": True,\n \"override_fields_kwargs\": {\n \"address\": {\n \"required\": True\n }\n }\n },\n {\n \"name\":\"founders_ref_list\",\n \"type\": \"calculated\",\n \"field_type\": \"DocArrayField\",\n \"cls\": \"DocTextField\",\n \"depends_on\": [\"founders\"],\n \"value\": FOUNDERS_REF_LIST_TEMP_VARIABLE,\n \"suppress_validation_errors\": True,\n }\n ],\n \"validators\": [{\n \"condition\": {\n \"#or\": [{\n \"doc_obtain_person->_id\": {\n \"#nin\": \"@founders_ref_list\"\n }\n }, {\n \"obtain_way\": {\n \"#ne\": \"responsible_person\"\n }\n }]\n },\n \"error\": {\n \"field\": \"doc_obtain_person\",\n \"code\": 5\n }\n }]\n }\n\n DOVERENNOST_TEMPLATE = {\n \"doc_name\": DocumentTypeEnum.DT_DOVERENNOST,\n \"template_name\": \"soft_template_doverennost\",\n \"file_name\": get_test_resource_name(config, \"doverennost.tex\"),\n \"is_strict\": False,\n }\n\n DOVERENNOST_OBTAIN_TEMPLATE = {\n \"doc_name\": DocumentTypeEnum.DT_DOVERENNOST_OBTAIN,\n \"template_name\": \"soft_template_doverennost\",\n \"file_name\": get_test_resource_name(config, \"doverennost.tex\"),\n \"is_strict\": False,\n }\n\n SOGLASIE_SOBSTVENNIKOV_SCHEMA = {\n \"doc_name\": DocumentTypeEnum.DT_SOGLASIE_SOBSTVENNIKOV,\n \"doc_kind\": DocumentKindEnum.DK_TEX_TEMPLATE,\n \"file_name_template\": u\"Заявление - Согласие собственников\",\n \"batch_statuses\": [BatchStatusEnum.BS_EDITED, BatchStatusEnum.BS_NEW],\n \"conditions\": {\n \"address_type\": AddressType.AT_REAL_ESTATE_ADDRESS,\n \"address_other_owner\": True\n },\n \"fields\": [\n FULL_NAME_FIELD,\n ADDRESS_TYPE_FIELD_NR,\n ADDRESS_FIELD,\n DOC_DATE_FIELD_TODAY,\n DOC_DATE_OR_TODAY\n ]\n }\n\n SOGLASIE_SOBSTVENNIKOV_TEMPLATE = {\n \"doc_name\": DocumentTypeEnum.DT_SOGLASIE_SOBSTVENNIKOV,\n \"template_name\": \"soft_template_SOGLASIE_SOBSTVENNIKOV\",\n \"file_name\": get_test_resource_name(config, \"soglasie_sobstvennikov.tex\"),\n \"is_strict\": False,\n }\n\n GARANT_LETTER_ARENDA_SCHEMA = {\n \"doc_name\": DocumentTypeEnum.DT_GARANT_LETTER_ARENDA,\n \"doc_kind\": DocumentKindEnum.DK_TEX_TEMPLATE,\n \"file_name_template\": u\"Гарантийное письмо (аренда)\",\n \"batch_statuses\": [BatchStatusEnum.BS_EDITED, BatchStatusEnum.BS_NEW],\n \"conditions\": {\n u\"@address_type\": {\n \"#in\": [\"office_address\"]\n }\n },\n \"fields\": [\n FULL_NAME_FIELD,\n ADDRESS_TYPE_FIELD_NR,\n ADDRESS_FIELD,\n DOC_DATE_FIELD_TODAY,\n DOC_DATE_OR_TODAY\n ]\n }\n\n GARANT_LETTER_ARENDA_TEMPLATE = {\n \"doc_name\": DocumentTypeEnum.DT_GARANT_LETTER_ARENDA,\n \"template_name\": \"soft_template_garant_letter_arenda\",\n \"file_name\": get_test_resource_name(config, \"garant_letter_arenda.tex\"),\n \"is_strict\": False,\n }\n\n GARANT_LETTER_SUBARENDA_SCHEMA = {\n \"doc_name\": DocumentTypeEnum.DT_GARANT_LETTER_SUBARENDA,\n \"doc_kind\": DocumentKindEnum.DK_TEX_TEMPLATE,\n \"file_name_template\": u\"Гарантийное письмо (субаренда)\",\n \"batch_statuses\": [BatchStatusEnum.BS_EDITED, BatchStatusEnum.BS_NEW],\n \"conditions\": {\n u\"@address_type\": {\n \"#in\": [\"office_address\"]\n }\n },\n \"fields\": [\n FULL_NAME_FIELD,\n ADDRESS_TYPE_FIELD_NR,\n ADDRESS_FIELD,\n DOC_DATE_FIELD_TODAY,\n DOC_DATE_OR_TODAY\n ]\n }\n\n GARANT_LETTER_SUBARENDA_TEMPLATE = {\n \"doc_name\": DocumentTypeEnum.DT_GARANT_LETTER_SUBARENDA,\n \"template_name\": \"soft_template_garant_letter_subarenda\",\n \"file_name\": get_test_resource_name(config, \"garant_letter_subarenda.tex\"),\n \"is_strict\": False\n }\n\n ################################################################################################################\n\n LLC_REG_BATCH_SCHEMA = {\n \"doc_name\": DocumentBatchTypeEnum.DBT_NEW_LLC,\n \"fields\": [\n SHORT_NAME_FIELD,\n FULL_NAME_FIELD,\n USE_FOREIGN_COMPANY_NAME_FIELD,\n USE_NATIONAL_LANGUAGE_COMPANY_NAME_FIELD,\n FOREIGN_FULL_NAME_FIELD,\n FOREIGN_SHORT_NAME_FIELD,\n NATIONAL_LANGUAGE_FULL_NAME_FIELD,\n NATIONAL_LANGUAGE_SHORT_NAME_FIELD,\n FOREIGN_LANGUAGE_FIELD,\n NATIONAL_LANGUAGE_FIELD,\n GENERAL_MANAGER_CAPTION_FIELD,\n JOB_MAIN_CODE_FIELD,\n JOB_CODE_ARRAY_FIELD,\n GENERAL_MANAGER_FIELD,\n BOARD_OF_DIRECTORS_FIELD,\n DOC_DATE_FIELD_TODAY,\n {\n \"name\": \"lawyer_check\",\n \"type\": \"DocBoolField\",\n \"default\": False\n },\n {\n \"name\": \"region\",\n \"type\": \"DocEnumField\",\n \"enum_cls\": \"RFRegionsEnum\",\n \"required\": True,\n }, {\n \"name\": \"share_type\",\n \"type\": \"DocEnumField\",\n \"enum_cls\": \"NumericPartEnum\",\n \"required\": True\n }, {\n \"name\": \"starter_capital\",\n \"type\": \"DocCurrencyField\",\n \"required\": True\n }, {\n \"name\": \"pravo_otchuzhdeniya_type\",\n \"type\": \"DocEnumField\",\n \"enum_cls\": \"AlienationRightEnum\",\n \"required\": True\n }, {\n \"name\": \"preimusch_pravo_priobreteniya_doli_time_span\",\n \"type\": \"DocIntField\",\n \"required\": True,\n \"min_val\": 30\n }, {\n \"name\": \"perehod_doli_k_naslednikam_soglasie\",\n \"type\": \"DocBoolField\",\n \"required\": True\n }, {\n \"name\": \"sovet_directorov_num\",\n \"type\": \"DocIntField\",\n \"required\": False,\n \"even\": False\n }, {\n \"name\": \"general_manager_deals_max_amount\",\n \"type\": \"DocIntField\",\n \"required\": False\n }, {\n \"name\": \"general_manager_term\",\n \"type\": \"DocIntField\",\n \"min_val\": 6,\n \"max_val\": 60,\n \"required\": True\n }, {\n \"name\": \"board_of_directors_caption\",\n \"type\": \"DocTextField\",\n \"required\": False,\n \"allowed_re\": CompanyObject.RUS_COMPANY_NAME_RE\n }, {\n \"name\": \"large_deals_min_value\",\n \"type\": \"DocIntField\",\n \"min_val\": 25,\n \"max_val\": 100,\n \"required\": False\n }, {\n \"name\": \"large_deals_max_value\",\n \"type\": \"DocIntField\",\n \"min_val\": 25,\n \"max_val\": 100,\n \"required\": False\n }, {\n \"name\": \"large_property_deals_max_value\",\n \"type\": \"DocIntField\",\n \"required\": False\n }, {\n \"name\": \"large_property_deals_min_value\",\n \"type\": \"DocIntField\",\n \"required\": False\n }, {\n \"name\": \"necessary_votes_for_general_meeting_decisions\",\n \"type\": \"NecessaryVotesForGeneralMeeting\",\n \"required\": True\n }, {\n \"name\": \"selected_secretary\",\n \"type\": \"db_object\",\n \"required\": True\n }, {\n \"name\": \"selected_moderator\",\n \"type\": \"db_object\",\n \"required\": True\n }, {\n \"name\": \"tax_type\",\n \"type\": \"DocEnumField\",\n \"enum_cls\": \"UsnTaxType\",\n \"required\": True\n }, {\n \"name\": \"reg_responsible_person\",\n \"type\": \"db_object\",\n \"cls\": \"PrivatePerson\",\n \"required\": False\n }, {\n \"name\": \"reg_responsible_founder\",\n \"type\": \"db_object\",\n \"required\": False\n }, {\n \"name\": \"doc_obtain_person\",\n \"type\": \"db_object\",\n \"cls\": \"PrivatePerson\",\n \"required\": True\n }, {\n \"name\": \"doc_obtain_founder\",\n \"type\": \"db_object\",\n \"required\": True\n }, {\n \"name\": \"address\",\n \"type\": \"DocAddressField\",\n \"required\": True\n }, {\n \"name\": \"address_person\",\n \"type\": \"db_object\",\n \"cls\": \"PrivatePerson\",\n \"required\": False\n }, {\n \"name\": \"address_other_owner\",\n \"type\": \"DocBoolField\",\n \"default\": False\n }, {\n \"name\": \"founders\",\n \"type\": \"DocArrayField\",\n \"cls\": \"FounderUIObject\",\n \"required\": True\n }, {\n \"name\": \"taxation_type\",\n \"type\": \"DocEnumField\",\n \"enum_cls\": \"TaxType\",\n \"required\": True\n }, {\n \"name\": \"registration_way\",\n \"type\": \"DocEnumField\",\n \"enum_cls\": \"RegistrationWay\",\n \"required\": True\n }, {\n \"name\": \"obtain_way\",\n \"type\": \"DocEnumField\",\n \"enum_cls\": \"DocumentDeliveryTypeStrEnum\",\n \"required\": True\n }, {\n \"name\": \"address_type\",\n \"type\": \"DocEnumField\",\n \"enum_cls\": \"AddressType\",\n \"required\": True\n }, {\n \"name\": \"has_general_manager_contract\",\n \"type\": \"DocBoolField\",\n \"required\": False\n }, {\n \"name\": \"has_general_manager_order\",\n \"type\": \"DocBoolField\",\n \"required\": False\n }, {\n \"name\": \"general_manager_contract_number\",\n \"type\": \"DocTextField\",\n \"required\": False,\n \"min_length\": 1\n }, {\n \"name\": \"general_manager_contract_date\",\n \"type\": \"DocDateTimeField\",\n \"input_format\": \"%Y-%m-%d\",\n \"required\": False\n }, {\n \"name\": \"general_manager_order_number\",\n \"type\": \"DocTextField\",\n \"required\": False,\n \"min_length\": 1\n }, {\n \"name\": \"general_manager_salary\",\n \"type\": \"DocCurrencyField\",\n \"required\": False\n }, {\n \"name\": \"general_manager_salary_days\",\n \"type\": \"DocArrayField\",\n \"cls\": \"DocIntField\",\n \"required\": False\n }, {\n \"name\": \"general_manager_trial_period\",\n \"type\": \"DocIntField\",\n \"required\": False\n }, {\n \"name\": \"general_manager_quit_notify_period\",\n \"type\": \"DocIntField\",\n \"required\": False\n }, {\n \"name\": \"general_manager_fixed_working_hours\",\n \"type\": \"DocBoolField\",\n \"required\": False\n }, {\n \"name\": \"general_manager_as_accountant\",\n \"type\": \"DocBoolField\",\n \"required\": False\n }, {\n \"name\": \"has_accountant_contract_order\",\n \"type\": \"DocBoolField\",\n \"required\": False\n }, {\n \"name\": \"accountant_person\",\n \"type\": \"db_object\",\n \"cls\": \"PrivatePerson\",\n \"required\": False\n }, {\n \"name\": \"accountant_start_work\",\n \"type\": \"DocDateTimeField\",\n \"input_format\": \"%Y-%m-%d\",\n \"required\": False\n }, {\n \"name\": \"accountant_contract_number\",\n \"type\": \"DocTextField\",\n \"required\": False,\n \"min_length\": 1\n }, {\n \"name\": \"accountant_order_number\",\n \"type\": \"DocTextField\",\n \"required\": False,\n \"min_length\": 1\n }, {\n \"name\": \"accountant_salary\",\n \"type\": \"DocCurrencyField\",\n \"required\": False\n }, {\n \"name\": \"accountant_salary_days\",\n \"type\": \"DocArrayField\",\n \"cls\": \"DocIntField\",\n \"required\": False\n }, {\n \"name\": \"accountant_trial_period\",\n \"type\": \"DocIntField\",\n \"required\": False\n }, {\n \"name\": \"accountant_fixed_working_hours\",\n \"type\": \"DocBoolField\",\n \"required\": False\n }, {\n \"name\": \"ogrn\",\n \"type\": \"DocIntField\",\n \"min_val\": 1000000000000,\n \"max_val\": 9999999999999,\n \"required\": False\n }, {\n \"name\": \"inn\",\n \"type\": \"DocINNField\",\n \"required\": False\n }, {\n \"name\": \"kpp\",\n \"type\": \"DocTextField\",\n \"min_length\": 9,\n \"max_length\": 9,\n \"required\": False\n }, {\n \"name\": \"actual_address\",\n \"type\": \"DocAddressField\",\n \"required\": False\n }, {\n \"name\": \"bank_bik\",\n \"type\": \"DocTextField\",\n \"min_length\": 9,\n \"max_length\": 9,\n \"required\": False\n }, {\n \"name\": \"bank_account\",\n \"type\": \"DocTextField\",\n \"min_length\": 20,\n \"max_length\": 20,\n \"required\": False\n }, {\n \"name\": \"general_manager_working_hours\",\n \"type\": \"DocWorkingHoursField\",\n \"required\": False\n }, {\n \"name\": \"accountant_working_hours\",\n \"type\": \"DocWorkingHoursField\",\n \"required\": False\n }, {\n \"name\": \"general_manager_contract_additional_terms\",\n \"type\": \"DocAdditionalRightsField\",\n \"required\": False\n }, {\n \"name\": \"general_manager_as_accountant_order_number\",\n \"type\": \"DocTextField\",\n \"min_length\": 1,\n \"required\": False\n }, {\n \"name\": \"accountant_has_special_terms\",\n \"type\": \"DocBoolField\",\n \"default\": False\n }, {\n \"name\": \"general_manager_has_special_terms\",\n \"type\": \"DocBoolField\",\n \"default\": False\n }, {\n \"name\": \"registration_date\",\n \"type\": \"DocDateTimeField\",\n \"input_format\": \"%Y-%m-%d\",\n \"required\": False\n }, {\n \"name\": \"company_email\",\n \"type\": \"DocTextField\",\n \"min_length\": 1,\n \"required\": False\n }, {\n \"name\": \"company_site\",\n \"type\": \"DocTextField\",\n \"min_length\": 10,\n \"required\": False\n }, {\n \"name\": \"company_phone\",\n \"type\": \"DocPhoneNumberField\",\n \"required\": False\n }, {\n \"name\": \"stamp_partner_id\",\n \"type\": \"DocTextField\",\n \"min_length\": 1,\n \"required\": False\n }, {\n \"name\": \"accounts_partner_id\",\n \"type\": \"DocTextField\",\n \"min_length\": 1,\n \"required\": False\n }\n ]\n }\n\n REGISTRATION_ADDRESS_FIELD = copy.copy(ADDRESS_FIELD)\n REGISTRATION_ADDRESS_FIELD['name'] = 'registration_address'\n\n LLC_REG_RESULT_FIELDS = [{\n \"name\": \"founder_applicant\",\n \"type\": \"calculated\",\n \"field_type\": \"db_object\",\n \"required\": False,\n \"value\": {\n \"#set\": {\n \"selected_founder\": {\n \"#pick_array_item\": {\n \"array_field\": \"founders\",\n \"conditions\": {\n \"#or\": [{\n \"founders|size\": 1,\n }, {\n \"founders|size\": {\n \"#gt\": 1\n },\n \"obtain_way\": \"responsible_person\",\n \"<loop_item>->founder\": \"@selected_moderator\"\n }, {\n \"founders|size\": {\n \"#gt\": 1\n },\n \"obtain_way\": \"founder\",\n \"<loop_item>->founder\": \"@doc_obtain_founder\"\n }, {\n \"founders|size\": {\n \"#gt\": 1\n },\n \"obtain_way\": \"mail\",\n \"<loop_item>->founder\": \"@selected_moderator\"\n }]\n }\n }\n }\n },\n \"#object\": {\n \"type\": {\n \"#field\": \"selected_founder->founder->type\"\n },\n \"_id\": {\n \"#field\": \"selected_founder->founder->_id\"\n }\n }\n }\n }, {\n \"name\": \"ifns\",\n \"type\": \"calculated\",\n \"field_type\": \"DocIntField\",\n \"required\": False,\n \"depends_on\": [\"address\"],\n \"value\": {\n \"#cases\": {\n \"list\": [{\n \"conditions\": {\n \"address_type\": \"founder_registration_address\"\n },\n \"value\": {\n \"#field\": \"address_person->address->ifns\"\n }\n }, {\n \"conditions\": {\n \"address_type\": \"general_manager_registration_address\",\n },\n \"value\": {\n \"#field\": \"general_manager->address->ifns\"\n }\n }],\n \"default\": {\n \"value\": {\n \"#field\": \"address->ifns\"\n }\n }\n }\n }\n },\n REGISTRATION_ADDRESS_FIELD,\n {\n \"name\": \"first_work_day\",\n \"type\": \"calculated\",\n \"field_type\": \"DocDateTimeField\",\n \"input_format\": \"%Y-%m-%d\",\n \"required\": False,\n \"value\": {\n \"#field\": \"registration_date->next_working_day_p\"\n }\n }\n # {\n # \"name\" : \"ifns_reg_info\",\n # \"type\" : \"calculated\",\n # \"field_type\" : \"IfnsRegInfoField\",\n # \"required\" : False,\n # \"value\" : {\n # \"#exec\" : {\n # \"module\" : \"external_methods.llc_reg_methods\",\n # \"method\" : \"get_company_registration_info\",\n # \"kwargs\" : {\n # \"batch_id\" : {\n # \"#field\" : \"<batch_id>\"\n # }\n # }\n # }\n # }\n # }\n ]\n\n LLC_REG_DEFER_DOCS = [DocumentTypeEnum.DT_REGISTRATION_FEE_INVOICE]\n\n return {\n \"P11001_TEMPLATE\": P11001_TEMPLATE,\n \"P11001_MATCHER\": P11001_MATCHER,\n \"P11001_SCHEMA\": P11001_SCHEMA,\n \"ARTICLES_TEMPLATE\": ARTICLES_TEMPLATE,\n \"ARTICLES_SCHEMA\": ARTICLES_SCHEMA,\n \"ACT_SCHEMA\": ACT_SCHEMA,\n \"ACT_TEMPLATE\": ACT_TEMPLATE,\n \"USN_TEMPLATE\": USN_TEMPLATE,\n \"USN_MATCHER\": USN_MATCHER,\n \"USN_SCHEMA\": USN_SCHEMA,\n \"ESHN_TEMPLATE\": ESHN_TEMPLATE,\n \"ESHN_MATCHER\": ESHN_MATCHER,\n \"ESHN_SCHEMA\": ESHN_SCHEMA,\n \"DECISION_TEMPLATE\": DECISION_TEMPLATE,\n \"DECISION_SCHEMA\": DECISION_SCHEMA,\n \"PROTOCOL_SCHEMA\": PROTOCOL_SCHEMA,\n \"PROTOCOL_TEMPLATE\": PROTOCOL_TEMPLATE,\n \"CONTRACT_SCHEMA\": CONTRACT_SCHEMA,\n \"CONTRACT_TEMPLATE\": CONTRACT_TEMPLATE,\n \"REG_FEE_INVOICE_SCHEMA\": REG_FEE_INVOICE_SCHEMA,\n \"DOVERENNOST_SCHEMA\": DOVERENNOST_SCHEMA,\n \"DOVERENNOST2_SCHEMA\": DOVERENNOST2_SCHEMA,\n \"DOVERENNOST_TEMPLATE\": DOVERENNOST_TEMPLATE,\n \"DOVERENNOST_OBTAIN_TEMPLATE\": DOVERENNOST_OBTAIN_TEMPLATE,\n \"SOGLASIE_SOBSTVENNIKOV_SCHEMA\": SOGLASIE_SOBSTVENNIKOV_SCHEMA,\n \"SOGLASIE_SOBSTVENNIKOV_TEMPLATE\": SOGLASIE_SOBSTVENNIKOV_TEMPLATE,\n \"GARANT_LETTER_ARENDA_SCHEMA\": GARANT_LETTER_ARENDA_SCHEMA,\n \"GARANT_LETTER_ARENDA_TEMPLATE\": GARANT_LETTER_ARENDA_TEMPLATE,\n \"GARANT_LETTER_SUBARENDA_SCHEMA\": GARANT_LETTER_SUBARENDA_SCHEMA,\n \"GARANT_LETTER_SUBARENDA_TEMPLATE\": GARANT_LETTER_SUBARENDA_TEMPLATE,\n \"LLC_REG_BATCH_SCHEMA\": LLC_REG_BATCH_SCHEMA,\n \"LLC_REG_RESULT_FIELDS\": LLC_REG_RESULT_FIELDS,\n \"LLC_REG_DEFER_DOCS\": LLC_REG_DEFER_DOCS,\n \"OOO_BANK_PARTNER_APPLICATION_SCHEMA\": OOO_BANK_PARTNER_APPLICATION_SCHEMA\n }\n\n\n" }, { "alpha_fraction": 0.6873385310173035, "alphanum_fraction": 0.6950904130935669, "avg_line_length": 20.5, "blob_id": "4de0abed2fba878d8872031de989f48ab2004ec7", "content_id": "db55b814a751ddcad876ec82cffd75344b343d35", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 387, "license_type": "no_license", "max_line_length": 61, "num_lines": 18, "path": "/app/deployment_migrations/migration_list/20151006_change_subscription.py", "repo_name": "StanislavKraev/jb_code", "src_encoding": "UTF-8", "text": "# -*- coding: utf-8 -*-\n\nfrom fw.db.sql_base import db as sqldb\n\n\ndef forward(config, logger):\n logger.debug(u\"Create new models\")\n\n sqldb.session.close()\n\n sqldb.engine.execute(u\"\"\"ALTER TABLE payment_subscription\n ADD COLUMN pay_record_id INTEGER NOT NULL DEFAULT 0,\n ADD COLUMN payment_provider INTEGER NOT NULL DEFAULT 0\n;\"\"\")\n\n\ndef rollback(config, logger):\n pass\n" }, { "alpha_fraction": 0.6057007312774658, "alphanum_fraction": 0.606564462184906, "avg_line_length": 37.91596603393555, "blob_id": "f446855cc64d1be92d36ec89f5acf860c0166521", "content_id": "aff55856065f72b36f51690ba8dbc34622859bf4", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 4631, "license_type": "no_license", "max_line_length": 107, "num_lines": 119, "path": "/app/deployment_migrations/migration_list/20150912_migrate_partners_models.py", "repo_name": "StanislavKraev/jb_code", "src_encoding": "UTF-8", "text": "# -*- coding: utf-8 -*-\nfrom datetime import datetime\n\nfrom fw.db.sql_base import db as sqldb\nfrom fw.documents.db_fields import DocumentBatchDbObject\nfrom services.partners.models import StampPartnersObject, AccountantPartnersObject, BankPartnersObject, \\\n BankPartnersServiceObject, BankPartnerRequestObject\n\n\ndef forward(config, logger):\n logger.debug(u\"Migrate partners models\")\n\n stamp_partners = db['stamp_partners']\n StampPartnersObject.query.delete()\n sqldb.session.commit()\n for old_partner in stamp_partners.find():\n new_partner = StampPartnersObject(\n id=str(old_partner['_id']),\n region=old_partner.get('region'),\n enabled=old_partner.get('enabled', False),\n sort_index=old_partner.get('sort_index', 1),\n link=old_partner.get('region', ''),\n banner=old_partner.get('banner', ''),\n title=old_partner.get('title', ''),\n created=old_partner.get('created', datetime.utcnow())\n )\n sqldb.session.add(new_partner)\n sqldb.session.commit()\n\n accountant_partners = db['accountant_partners']\n AccountantPartnersObject.query.delete()\n sqldb.session.commit()\n for old_partner in accountant_partners.find():\n new_partner = AccountantPartnersObject(\n id=str(old_partner['_id']),\n type=old_partner['type'],\n created=old_partner.get('created', datetime.utcnow()),\n link=old_partner.get('region', ''),\n title=old_partner.get('title', ''),\n banner=old_partner.get('banner', ''),\n enabled=old_partner.get('enabled', False),\n sort_index=old_partner.get('sort_index', 1),\n region=old_partner.get('region'),\n city=old_partner.get('city')\n )\n sqldb.session.add(new_partner)\n sqldb.session.commit()\n\n BankPartnerRequestObject.query.delete()\n BankPartnersServiceObject.query.delete()\n BankPartnersObject.query.delete()\n sqldb.session.commit()\n\n bank_partners = db['bank_partners']\n for old_partner in bank_partners.find():\n new_partner = BankPartnersObject(\n id=str(old_partner['_id']),\n created=old_partner.get('created', datetime.utcnow()),\n link=old_partner.get('region', ''),\n title=old_partner.get('title', ''),\n banner=old_partner.get('banner', ''),\n enabled=old_partner.get('enabled', False),\n sort_index=old_partner.get('sort_index', 1),\n region=old_partner.get('region'),\n city=old_partner.get('city'),\n conditions=old_partner.get('conditions')\n )\n sqldb.session.add(new_partner)\n sqldb.session.commit()\n\n bank_partners_service = db['bank_partners_service']\n for old_svc in bank_partners_service.find():\n bank_partner_id = str(old_svc.get('bank_partner_id', '')) if old_svc.get('bank_partner_id') else ''\n if not bank_partner_id:\n continue\n bp = BankPartnersObject.query.filter_by(id=bank_partner_id).first()\n if not bp:\n continue\n new_svc = BankPartnersServiceObject(\n id=str(old_svc['_id']),\n type=old_svc['type'],\n fields=old_svc['fields'],\n email=old_svc.get('email'),\n template_name=old_svc.get('template_name'),\n config=old_svc.get('config'),\n bank_partner_id=bank_partner_id\n )\n sqldb.session.add(new_svc)\n sqldb.session.commit()\n\n bank_partners_request = db['bank_partners_request']\n for old_req in bank_partners_request.find():\n batch_id = str(old_req['batch_id'])\n batch = DocumentBatchDbObject.query.filter_by(id=batch_id).first()\n if not batch:\n continue\n\n bank_partner_id=str(old_req.get('bank_partner_id', ''))\n if not bank_partner_id:\n continue\n bp = BankPartnersObject.query.filter_by(id=bank_partner_id).first()\n if not bp:\n continue\n\n new_req = BankPartnerRequestObject(\n bank_partner_id=bank_partner_id,\n batch_id=batch_id,\n bank_partner_caption=old_req.get('bank_partner_caption'),\n sent_date=old_req['sent_date'],\n status=old_req['status'],\n bank_contact_phone_general_manager=old_req.get('bank_contact_phone_general_manager'),\n bank_contact_phone=old_req.get('bank_contact_phone'),\n send_private_data=old_req.get('send_private_data')\n )\n sqldb.session.add(new_req)\n sqldb.session.commit()\n\ndef rollback(config, logger):\n pass\n" }, { "alpha_fraction": 0.6672077775001526, "alphanum_fraction": 0.6715368032455444, "avg_line_length": 36.72108840942383, "blob_id": "d0945723f8446cc0290c5006b80a08bf397e4ea6", "content_id": "3c20823c750d0213a1134c393cf456d2a3b3a4de", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 5544, "license_type": "no_license", "max_line_length": 113, "num_lines": 147, "path": "/app/services/llc_reg/__init__.py", "repo_name": "StanislavKraev/jb_code", "src_encoding": "UTF-8", "text": "# -*- coding: utf-8 -*-\n\nimport os\n\nimport jinja2\n\nfrom fw.documents.batch_manager import BatchManager\nfrom fw.documents.doc_requisites_storage import DocRequisitiesStorage\nfrom fw.documents.enums import DocumentBatchTypeEnum, DocumentTypeEnum\nfrom fw.documents.fields.simple_doc_fields import DocMultiDeclensionField\nfrom services.llc_reg.documents.enums import InitialCapitalDepositTypeEnum\nfrom services.llc_reg.llc_reg_manager import LlcRegBatchManager\n\n\ndef _init_doc_requisities(config):\n from services.llc_reg.documents.initial_db_data import load_data\n from services.llc_reg.documents.third_stage_llc_reg_initial_db_data import load_data as third_stage_load_data\n data = load_data(config)\n data.update(third_stage_load_data(config))\n\n templates = (\n \"P11001_TEMPLATE\",\n \"ARTICLES_TEMPLATE\",\n \"ACT_TEMPLATE\",\n \"USN_TEMPLATE\",\n \"ESHN_TEMPLATE\",\n \"DECISION_TEMPLATE\",\n \"CONTRACT_TEMPLATE\",\n \"DOVERENNOST_TEMPLATE\",\n \"DOVERENNOST_OBTAIN_TEMPLATE\",\n \"PROTOCOL_TEMPLATE\",\n \"SOGLASIE_SOBSTVENNIKOV_TEMPLATE\",\n \"GARANT_LETTER_ARENDA_TEMPLATE\",\n \"GARANT_LETTER_SUBARENDA_TEMPLATE\",\n \"GENERAL_MANAGER_CONTRACT_TEMPLATE\",\n \"GENERAL_MANAGER_ORDER_TEMPLATE\",\n \"ACCOUNTANT_CONTRACT_TEMPLATE\",\n \"ACCOUNTANT_IMPOSITION_ORDER_TEMPLATE\",\n \"ACCOUNTANT_ORDER_TEMPLATE\",\n \"ROSSTAT_CLAIM_TEMPLATE\",\n \"FSS_CLAIM_TEMPLATE\",\n \"PFR_CLAIM_TEMPLATE\",\n \"FOUNDERS_LIST_TEMPLATE\",\n \"COMPANY_DETAILS_TEMPLATE\"\n )\n\n for template_name in templates:\n DocRequisitiesStorage.add_template(data[template_name]['doc_name'], data[template_name])\n\n schemas = (\n \"P11001_SCHEMA\",\n \"ARTICLES_SCHEMA\",\n \"ACT_SCHEMA\",\n \"USN_SCHEMA\",\n \"ESHN_SCHEMA\",\n \"DECISION_SCHEMA\",\n \"PROTOCOL_SCHEMA\",\n \"CONTRACT_SCHEMA\",\n \"REG_FEE_INVOICE_SCHEMA\",\n \"DOVERENNOST_SCHEMA\",\n \"DOVERENNOST2_SCHEMA\",\n \"SOGLASIE_SOBSTVENNIKOV_SCHEMA\",\n \"GARANT_LETTER_ARENDA_SCHEMA\",\n \"GARANT_LETTER_SUBARENDA_SCHEMA\",\n \"LLC_REG_BATCH_SCHEMA\",\n \"OOO_BANK_PARTNER_APPLICATION_SCHEMA\",\n \"GENERAL_MANAGER_CONTRACT_SCHEMA\",\n \"GENERAL_MANAGER_ORDER_SCHEMA\",\n \"ACCOUNTANT_CONTRACT_SCHEMA\",\n \"ACCOUNTANT_IMPOSITION_ORDER_SCHEMA\",\n \"ACCOUNTANT_ORDER_SCHEMA\",\n \"ROSSTAT_CLAIM_SCHEMA\",\n \"FSS_CLAIM_SCHEMA\",\n \"PFR_CLAIM_SCHEMA\",\n \"FOUNDERS_LIST_SCHEMA\",\n \"COMPANY_DETAILS_SCHEMA\"\n )\n\n for schema_name in schemas:\n DocRequisitiesStorage.add_schema(data[schema_name]['doc_name'], data[schema_name])\n\n matchers = (\n \"P11001_MATCHER\",\n \"USN_MATCHER\",\n \"ESHN_MATCHER\",\n \"ACCOUNTANT_ORDER_MATCHER\"\n )\n\n for matcher_name in matchers:\n DocRequisitiesStorage.add_field_matcher(data[matcher_name]['doc_name'], data[matcher_name])\n\n bd = dict(\n batch_type=DocumentBatchTypeEnum.DBT_NEW_LLC,\n doc_types=[\n DocumentTypeEnum.DT_P11001,\n DocumentTypeEnum.DT_ARTICLES,\n DocumentTypeEnum.DT_USN,\n DocumentTypeEnum.DT_DECISION,\n DocumentTypeEnum.DT_PROTOCOL,\n DocumentTypeEnum.DT_ESHN,\n DocumentTypeEnum.DT_CONTRACT,\n DocumentTypeEnum.DT_REGISTRATION_FEE_INVOICE,\n DocumentTypeEnum.DT_DOVERENNOST,\n DocumentTypeEnum.DT_DOVERENNOST_OBTAIN,\n DocumentTypeEnum.DT_SOGLASIE_SOBSTVENNIKOV,\n DocumentTypeEnum.DT_GARANT_LETTER_ARENDA,\n DocumentTypeEnum.DT_GARANT_LETTER_SUBARENDA,\n DocumentTypeEnum.DT_GENERAL_MANAGER_CONTRACT,\n DocumentTypeEnum.DT_GENERAL_MANAGER_ORDER,\n DocumentTypeEnum.DT_ACCOUNTANT_CONTRACT,\n DocumentTypeEnum.DT_ACCOUNTANT_IMPOSITION_ORDER,\n DocumentTypeEnum.DT_ACCOUNTANT_ORDER,\n DocumentTypeEnum.DT_ROSSTAT_CLAIM,\n DocumentTypeEnum.DT_FSS_CLAIM,\n DocumentTypeEnum.DT_PFR_CLAIM,\n DocumentTypeEnum.DT_FOUNDERS_LIST,\n DocumentTypeEnum.DT_COMPANY_DETAILS\n ],\n result_fields=data['LLC_REG_RESULT_FIELDS'],\n deferred_render_docs=data['LLC_REG_DEFER_DOCS'],\n fields=data['LLC_REG_BATCH_SCHEMA']['fields']\n )\n\n DocRequisitiesStorage.add_batch_descriptor(DocumentBatchTypeEnum.DBT_NEW_LLC, bd)\n\n\ndef register(app, jinja_env, class_loader, **kwargs):\n search_path = os.path.normpath(os.path.join(os.path.abspath(os.path.dirname(__file__)), u\"templates\"))\n jinja_env.loader.loaders.append(jinja2.FileSystemLoader(search_path))\n\n jinja_env.globals.update({\n 'DocMultiDeclensionField': DocMultiDeclensionField,\n 'InitialCapitalDepositTypeEnum': InitialCapitalDepositTypeEnum\n })\n\n class_loader.POSSIBLE_LOCATIONS.append('services.llc_reg.documents')\n class_loader.POSSIBLE_LOCATIONS.append('services.llc_reg.documents.enums')\n class_loader.POSSIBLE_LOCATIONS.append('services.llc_reg.documents.general_doc_fields')\n class_loader.POSSIBLE_LOCATIONS.append('services.llc_reg.documents.llc_gov_forms_adapters')\n class_loader.POSSIBLE_LOCATIONS.append('services.llc_reg.documents.llc_validators')\n\n BatchManager.register_manager(DocumentBatchTypeEnum.DBT_NEW_LLC, LlcRegBatchManager)\n\n _init_doc_requisities(app.config)\n\ndef get_manager_command_locations():\n return [os.path.normpath(os.path.abspath(os.path.join(os.path.dirname(__file__), 'manage_commands')))]" }, { "alpha_fraction": 0.482519268989563, "alphanum_fraction": 0.49357327818870544, "avg_line_length": 29.873016357421875, "blob_id": "1ef3412dacca08e6cbcc3dc4deed9b52bac90233", "content_id": "c8d2f1e1e0b9546daf59780a19ac6267cd9b462a", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 3890, "license_type": "no_license", "max_line_length": 120, "num_lines": 126, "path": "/app/services/ifns/okvad_commands.py", "repo_name": "StanislavKraev/jb_code", "src_encoding": "UTF-8", "text": "# -*- coding: utf-8 -*-\nimport shlex\nimport subprocess\nimport os\nimport codecs\nimport re\nfrom fw.catalogs.models import GeoCities\nfrom fw.catalogs.models import GeoRanges\nfrom fw.db.sql_base import db as sqldb\nfrom manage_commands import BaseManageCommand, get_single\n\n\nclass UpdateGeoDbFromFileCommand(BaseManageCommand):\n NAME = \"update_geo\"\n\n class FilenameSimpleValidator(object):\n def validate(self, val):\n if not os.path.exists(val):\n return False\n return True\n\n def get_value(self, value):\n return value\n\n def run(self):\n\n file_name = get_single(u'File name: ', validator=UpdateGeoDbFromFileCommand.FilenameSimpleValidator(),\n error_hint=u\"File not found\")\n\n tmp_dir = '/tmp/geo_files'\n if not os.path.exists(tmp_dir):\n os.makedirs(tmp_dir)\n\n subprocess.call(shlex.split('tar -xzvf %s -C %s' % (file_name, tmp_dir)))\n\n cities_file_name = os.path.join(tmp_dir, \"cities.txt\")\n data_file_name = os.path.join(tmp_dir, \"cidr_optim.txt\")\n\n if not os.path.exists(cities_file_name) or not os.path.exists(data_file_name):\n self.logger.error(\"missing required file(s)\")\n return\n\n cities_file = codecs.open(cities_file_name, 'r', 'cp1251')\n cities = cities_file.read()\n cities_file.close()\n\n data_file = codecs.open(data_file_name, 'r', 'cp1251')\n data = data_file.read()\n data_file.close()\n\n os.unlink(cities_file_name)\n os.unlink(data_file_name)\n\n self.logger.info('processing')\n\n cities_objects = []\n\n for line in cities.split('\\n'):\n line = line.strip()\n if not line:\n continue\n match = re.search(ur'(\\d+)\\t(.+)\\t(.+)\\t(.+)\\t(.+)\\t(.+)', line)\n if not match:\n self.logger.warn(u\"Failed to process: %s\" % line)\n continue\n\n cid = int(match.groups(0)[0])\n city_name = match.groups(0)[1]\n region = match.groups(0)[2]\n # district = match.groups(0)[3]\n lat = match.groups(0)[4]\n lng = match.groups(0)[5]\n cities_objects.append({\n 'cid': cid,\n 'name': city_name,\n 'region': region,\n 'lat': lat,\n 'lng': lng\n })\n\n geo_ranges = []\n for line in data.split('\\n'):\n line = line.strip()\n if not line:\n continue\n match = re.search(\n ur'(\\d+)\\t(\\d+)\\t(\\d{1,3}\\.\\d{1,3}\\.\\d{1,3}\\.\\d{1,3} - \\d{1,3}\\.\\d{1,3}\\.\\d{1,3}\\.\\d{1,3})\\t(.+)\\t(.+)',\n line)\n if not match:\n self.logger.warn(u\"Failed to process: %s\" % line)\n continue\n cid = match.groups(0)[4]\n if not cid.isdigit():\n continue\n geo_ranges.append({\n 'start': int(match.groups(0)[0]),\n 'end': int(match.groups(0)[1]),\n 'cid': int(cid)\n })\n\n for g in GeoCities.query.filter():\n g.delete()\n sqldb.session.commit()\n for g in GeoRanges.query.filter():\n g.delete()\n sqldb.session.commit()\n\n for city in cities_objects:\n new_gc = GeoCities(\n name=city['name'],\n cid=city['cid'],\n region=city['region'],\n lat=city['lat'],\n lng=city['lng']\n )\n sqldb.session.add(new_gc)\n sqldb.session.commit()\n\n for geo in geo_ranges:\n new_gr = GeoRanges(\n cid=geo['cid'],\n start=geo['start'],\n end=geo['end']\n )\n sqldb.session.add(new_gr)\n sqldb.session.commit()\n" }, { "alpha_fraction": 0.6106171011924744, "alphanum_fraction": 0.6122338771820068, "avg_line_length": 29.170732498168945, "blob_id": "493c8561ee81730134ecdc3beaac2d4fb0439f9a", "content_id": "50b6b50dc9f4829be7f2cb3323939c83bccd5e5b", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 3711, "license_type": "no_license", "max_line_length": 92, "num_lines": 123, "path": "/app/services/llc_reg/documents/llc_reg_methods.py", "repo_name": "StanislavKraev/jb_code", "src_encoding": "UTF-8", "text": "# -*- coding: utf-8 -*-\nfrom datetime import datetime\nfrom decimal import Decimal\nfrom flask import current_app\nfrom common_utils import num_word\nfrom fw.catalogs.models import BikCatalog\nfrom services.ifns.data_model.enums import IfnsRegStatusEnum\nfrom services.ifns.data_model.models import IfnsBookingObject\nfrom services.llc_reg.documents.enums import FounderStrTypeEnum\n\n\ndef get_company_registration_info(batch_id=None):\n null_value = {\n \"status\": IfnsRegStatusEnum.IRS_UNKNOWN\n }\n if not batch_id:\n return null_value\n booking = IfnsBookingObject.query.filter(\n IfnsBookingObject.batch_id==batch_id,\n # todo: add ifns service id, reg_date date range\n IfnsBookingObject.reg_info.__ne__(None)\n ).first()\n if booking:\n result = {\n \"status\": booking.reg_info.get('status', 'unknown')\n }\n try:\n reg_date = booking.reg_info.get('reg_date', None)\n if isinstance(reg_date, basestring):\n reg_date = datetime.strptime(reg_date, \"%d.%m.%Y\")\n if reg_date:\n result['reg_date'] = reg_date\n except Exception:\n current_app.logger.exception(u\"Failed to get date\")\n\n try:\n ogrn = booking.reg_info.get('ogrn', None)\n if ogrn is not None:\n result['ogrn'] = ogrn\n except Exception:\n current_app.logger.exception(u\"Failed to get ogrn\")\n\n return result\n\n return null_value\n\n\ndef get_bank_info(bank_bik=None):\n if not bank_bik:\n return {}\n\n bank_bik = unicode(bank_bik)\n if not bank_bik.isdigit():\n return {}\n\n info = BikCatalog.query.filter_by(bik=bank_bik).scalar()\n return {\n '_id': info.id,\n 'name': info.name,\n 'okpo': info.okpo,\n 'bik': info.bik,\n 'phone': info.phone,\n 'address': info.address,\n 'kor_account': info.kor_account\n } if info else {}\n\n\ndef num_to_text(value):\n if not isinstance(value, int):\n return\n\n return num_word(value)\n\ndef _get_person_full_name(person_id):\n from fw.documents.db_fields import PrivatePersonDbObject\n person = PrivatePersonDbObject.query.filter_by(id=person_id).first()\n if person:\n full_name = u\"%s %s\" % (person.surname, person.name)\n if person.patronymic:\n full_name += u\" \" + person.patronymic\n return full_name\n\ndef check_founder_has_same_fio(founders=None, founder=None):\n if not founder or not founders:\n return False\n\n if len(founders) < 2:\n return False\n\n if founder.get('type', None) != FounderStrTypeEnum.FST_PERSON:\n return False\n\n for founder_item in founders:\n item = founder_item.get('founder', {})\n if item.get('type', None) != FounderStrTypeEnum.FST_PERSON:\n continue\n\n if founder['_id'] == item['_id']:\n continue\n founder_name = _get_person_full_name(founder['_id'])\n item_name = _get_person_full_name(item['_id'])\n if founder_name == item_name:\n return True\n\n return False\n\n\ndef is_starter_capital_dividable(founder_share=None, starter_capital=None, share_type=None):\n if not founder_share or not starter_capital or share_type != \"fraction\":\n return False\n starter_capital_value = starter_capital\n # noinspection PyTypeChecker\n if '.' not in founder_share:\n founder_share = founder_share + '.0'\n flor = int(founder_share.split('.')[1])\n if flor == 0:\n return False\n div_result = starter_capital_value / flor\n div_str = str(div_result)\n if '.' not in div_str:\n return True\n min, maj = div_str.split('.')\n return len(maj) < 3\n" }, { "alpha_fraction": 0.5516423583030701, "alphanum_fraction": 0.5712392926216125, "avg_line_length": 39.793739318847656, "blob_id": "4668b20757e2db6d655245efdc419bd8b1359d80", "content_id": "a8bf45ab78f95ff87f757f9a17b0379f71c0b780", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 29719, "license_type": "no_license", "max_line_length": 169, "num_lines": 703, "path": "/app/services/ifns/ifns_manager.py", "repo_name": "StanislavKraev/jb_code", "src_encoding": "UTF-8", "text": "# -*- coding: utf-8 -*-\nfrom copy import deepcopy\nfrom datetime import datetime, timedelta\nimport json\nimport logging\n\nfrom lxml import etree\nimport requests\nimport html5lib\nfrom lxml.cssselect import CSSSelector\nfrom flask import current_app\n\nfrom common_utils import MONTHS, int_to_ifns\nfrom fw.db.sql_base import db as sqldb\nfrom fw.utils import address_utils\nfrom fw.utils.address_utils import prepare_key\nfrom fw.api import errors\nfrom services.ifns.data_model.models import IfnsBookingObject, IfnsCatalogObject\n\n\ndef discard_booking(batch, logger):\n try:\n batch_id = batch.id\n for booking in IfnsBookingObject.query.filter_by(batch_id=batch_id, _discarded=False):\n if not booking.reg_info or 'status' not in booking.reg_info or booking.reg_info['status'] == \"registered\":\n continue\n booking._discarded = True\n ifns_reg_info = (batch.result_fields or {}).get('ifns_reg_info', None)\n if ifns_reg_info:\n res_fields = batch.result_fields\n del res_fields['ifns_reg_info']\n batch.result_fields = res_fields\n sqldb.session.commit()\n break\n except Exception:\n logger.exception(u\"Failed to discard ifns booking\")\n\n\ndef get_ifns_data(ifns_code):\n return IfnsCatalogObject.query.filter_by(code=int(ifns_code)).first()\n\n\nclass IfnsDescription(object):\n def __init__(self, json_data, root=True):\n if root:\n self.code = json_data[\"kod\"]\n self.naimk = json_data[\"naimk\"]\n self.plat = deepcopy(json_data[\"plat\"])\n self.rof = IfnsDescription(json_data[\"rof\"], False)\n self.rou = IfnsDescription(json_data[\"rou\"], False)\n else:\n self.code = json_data[\"code\"]\n self.naimk = json_data[\"naimk\"]\n\n\ndef get_ifns_by_address(address, service_nalog_ru_url):\n if not address or not isinstance(address, basestring):\n return\n\n address_full = current_app.external_tools.get_detailed_address(address)\n if not address_full:\n return\n # noinspection PyUnresolvedReferences\n suggestions = address_full.get(\"suggestions\", [])\n if not suggestions:\n return\n\n tax_office = unicode(suggestions[0]['data'][\"tax_office\"])\n if not tax_office:\n return\n\n return get_ifns_by_code(tax_office, service_nalog_ru_url)\n\n\ndef get_ifns_by_code(tax_office, service_nalog_ru_url):\n cache_key = address_utils.prepare_key('ifns_no_' + unicode(tax_office))\n result_text = current_app.external_tools.cache.get(cache_key)\n if not result_text:\n url = u\"%s/addrno.do?l=6&g=%s\" % (service_nalog_ru_url, int_to_ifns(tax_office))\n headers = {\n u\"Referer\": u\"%s/addrno.do\" % service_nalog_ru_url,\n u\"Accept\": \"application/json, text/javascript, */*; q=0.01\"\n }\n r = requests.get(url, headers=headers, timeout=20)\n if r.status_code != 200:\n return u\"\"\n result_text = r.text\n current_app.external_tools.cache.set(cache_key, result_text, 3600 * 24)\n\n try:\n result = json.loads(result_text)\n return IfnsDescription(result[\"res\"])\n except Exception:\n pass\n\n\ndef get_nalog_ru_time_slots(person_data, company_data, internal_ifns_number, internal_ifns_service, logger):\n s = requests.Session()\n result = s.get(u'http://order.nalog.ru/details/', timeout=20)\n if result.status_code != 200:\n logger.error(u\"Невозможно начать сессию с сервером order.nalog.ru\")\n raise errors.ServerUnavailable()\n\n if company_data:\n data = {\n \"ctl00$LastName\": company_data['name'],\n \"ctl00$inn\": company_data['inn'],\n \"ctl00$phone\": company_data['phone'],\n \"ctl00$email\": company_data['email'],\n \"__VIEWSTATE\": u\"\",\n \"ctl00$face\": u\"0\",\n \"ctl00$btNext\": \"\"\n }\n # start session\n result = s.post(u'http://order.nalog.ru/details/', data=data, timeout=20)\n if result.status_code != 200 or not result.text:\n logger.error(u\"order.nalog.ru вернул неожиданный код: %s\" % unicode(result.status_code))\n raise errors.ServerUnavailable()\n elif person_data:\n data = {\n \"ctl00$LastName\": person_data['surname'],\n \"ctl00$FirstName\": person_data['name'],\n \"ctl00$SecondName\": person_data['patronymic'] or u\"\",\n \"ctl00$inn\": person_data['inn'],\n \"ctl00$phone\": person_data['phone'] or u\"\",\n \"ctl00$email\": person_data['email'],\n \"__VIEWSTATE\": u\"\",\n \"ctl00$face\": u\"1\",\n \"ctl00$btNext\": \"\"\n }\n result = s.post(u'http://order.nalog.ru/details/', data=data, timeout=20)\n if result.status_code != 200 or not result.text:\n logger.error(u\"order.nalog.ru вернул неожиданный код: %s\" % unicode(result.status_code))\n raise errors.ServerUnavailable()\n else:\n logger.error(u\"Invalid parameters\")\n raise errors.ServerError()\n\n fns, sub_service = internal_ifns_number, internal_ifns_service # get_ifns_internal_id_by_ifns_name(s, region_name, reg_ifns_name, not company_data, logger, service)\n service = 0\n is_multi_sub_service = 0\n\n cb_param = 'c0:%d;%d;%d;%d' % (\n sub_service, is_multi_sub_service, (service if is_multi_sub_service else sub_service), fns)\n data = {\n \"__CALLBACKID\": u\"ctl00$cpday\",\n \"__CALLBACKPARAM\": cb_param,\n \"__EVENTTARGET\": u\"\",\n \"__EVENTARGUMENT\": \"\",\n \"__VIEWSTATE\": u\"\",\n }\n\n result = s.post(u'http://order.nalog.ru/fns_service/', data=data, timeout=20)\n if result.status_code != 200 or not result.content:\n logger.error(u\"order.nalog.ru вернул неожиданный ответ\")\n raise errors.ServerUnavailable()\n\n str_data = result.text[26:-3].encode('utf-8').decode('string_escape').replace('!-\\\\-', '!--').replace('/-\\\\-',\n '/--').replace(\n '\\\\/script', '/script')\n content = u\"<!DOCTYPE html><html><head><title></title></head><body>%s</body></html>\" % str_data.decode('utf-8')\n root = html5lib.parse(content, treebuilder='lxml', namespaceHTMLElements=False)\n\n year = None\n month = None\n # noinspection PyCallingNonCallable\n for item in CSSSelector('#ctl00_cpday_day_T')(root):\n item_text_parts = item.text.split(' ')\n if len(item_text_parts) < 2:\n logger.error(u\"Ожидалась дата, получили %s\" % item.text)\n raise errors.ServerUnavailable(u\"Invalid nalog.ru service return content\")\n try:\n month = MONTHS[item_text_parts[0].strip()]\n year = int(item_text_parts[1].strip())\n break\n except Exception:\n logger.error(u\"Не удалось распарсить дату: %s\" % item.text)\n raise errors.ServerUnavailable(u\"Invalid nalog.ru service return content\")\n\n if not month or not year:\n logger.error(u\"Дату так и не получили\")\n raise Exception(u\"Invalid nalog.ru service return content\")\n\n day_prev = -1\n days = []\n first = True\n #noinspection PyCallingNonCallable\n for item in CSSSelector('#ctl00_cpday_day_mt td.dxeCalendarDay')(root):\n #logger.info('month: %s, item:%s' % (str(month), stringify_children(item)))\n classes = filter(lambda x: not not x, [i.strip() for i in item.attrib['class'].split(' ')])\n #logger.info('classes:%s' % unicode(classes))\n\n try:\n day = int(item.text)\n if first and (23 <= day <= 31):\n month -= 1 # (facepalm)\n first = False\n except Exception:\n logger.error(u\"Invalid nalog.ru service response: %s\" % unicode(item.text))\n raise errors.ServerUnavailable(u\"Invalid nalog.ru service response: %s\" % unicode(item.text))\n if day_prev > day:\n month += 1\n #logger.info('increase month:%s'%str(month))\n if month > 12:\n year += 1\n month = 1\n day_prev = day\n if 'dxeCalendarOutOfRange' in classes or 'dxeCalendarToday' in classes:\n #logger.info('skip')\n continue\n d = datetime(year, month, day)\n if d not in (datetime(2015, 5, 1), datetime(2015, 5, 2), datetime(2015, 5, 3), datetime(2015, 5, 4),\n datetime(2015, 5, 9), datetime(2015, 5, 10), datetime(2015, 5, 11)):\n days.append(d)\n\n if not days:\n logger.error(u\"Invalid nalog.ru service response: no days returned\")\n raise errors.ServerUnavailable(u\"Invalid nalog.ru service response: no days returned\")\n\n # ban check\n d = days[0]\n result = s.post('http://order.nalog.ru/fns_service/', data={\n \"__CALLBACKID\": u\"ctl00$clBanCheck\",\n \"__CALLBACKPARAM\": u\"c0:%s.%s.%s;%s;%s;0\" % (\n unicode(d.year), unicode(d.month), unicode(d.day), unicode(180), unicode(fns)),\n \"__EVENTARGUMENT\": u\"\",\n \"__EVENTTARGET\": u\"\",\n \"__VIEWSTATE\": u\"\",\n }, timeout=20)\n\n if result.status_code != 200 or not result.content:\n logger.error(u\"order.nalog.ru вернул неожиданный ответ\")\n raise errors.ServerUnavailable()\n\n if \"'data':'0'\" in result.text:\n raise errors.MaximumRegistrationsExceeded()\n\n day_info = []\n # get time slots\n for d in days:\n part = u\"%d.%d.%d;%d;%d;%d;%d\" % (\n d.year, d.month, d.day, service if is_multi_sub_service else sub_service, fns, is_multi_sub_service,\n sub_service)\n part2 = u\"14|CUSTOMCALLBACK%d|\" % len(part) + part\n cb_param = u\"c0:KV|2;[];GB|%d;\" % len(part2) + part2 + \";\"\n\n data = {\n \"__CALLBACKID\": u\"ctl00$gvTime\",\n \"__CALLBACKPARAM\": cb_param,\n \"__EVENTARGUMENT\": u\"\",\n \"__EVENTTARGET\": u\"\",\n \"__VIEWSTATE\": u\"\",\n }\n\n result = s.post('http://order.nalog.ru/fns_service/', data=data, timeout=20)\n if result.status_code != 200 or not result.content:\n logger.error(u\"order.nalog.ru вернул неожиданный ответ\")\n raise errors.ServerUnavailable()\n\n data_str = result.text[19:].encode('utf-8').decode('string_escape').replace('!-\\\\-', '!--').replace('/-\\\\-',\n '/--').replace(\n '\\\\/script', '/script')\n content = u\"<!DOCTYPE html><html><head><title></title></head><body>%s</body></html>\" % data_str.decode('utf-8')\n root = html5lib.parse(content, treebuilder='lxml', namespaceHTMLElements=False)\n\n time_slots = []\n #noinspection PyCallingNonCallable\n for item in CSSSelector('#ctl00_gvTime_DXMainTable tr')(root):\n #noinspection PyCallingNonCallable\n tds = CSSSelector('td')(item)\n if len(tds) > 1:\n time_str = tds[0].text\n #noinspection PyCallingNonCallable\n spans = CSSSelector('span')(tds[1])\n if len(spans):\n span_style = spans[0].attrib['style']\n available = 'block' not in span_style\n #print(time_str, available)\n if available:\n hour, minutes = time_str.strip().replace('00', '0').split(':')\n dt = datetime(2014, 1, 1, int(hour), int(minutes))\n\n time_slots.append({\n \"slot_start\": dt.strftime(\"%H:%M\"),\n \"slot_end\": (dt + timedelta(seconds=1800)).strftime(\"%H:%M\"),\n })\n\n if time_slots:\n day_info.append({\n 'date': d.strftime(\"%Y-%m-%d\"),\n 'time_slots': time_slots\n })\n return day_info\n\n\ndef book_ifns(person_data, company_data, internal_ifns_number, internal_ifns_service, dt, logger):\n s = requests.Session()\n result = s.get(u'http://order.nalog.ru/details/')\n if result.status_code != 200:\n logger.error(u\"Невозможно начать сессию с сервером order.nalog.ru\")\n raise errors.ServerUnavailable()\n\n test_str = u\"Укажите параметры для записи на посещение ИФНС России\"\n ok = False\n if company_data:\n for x in range(4):\n result = s.post(u'http://order.nalog.ru/details/', data={\n \"ctl00$LastName\": company_data['name'],\n \"ctl00$inn\": company_data['inn'],\n \"ctl00$phone\": company_data['phone'],\n \"ctl00$email\": company_data['email'],\n \"__VIEWSTATE\": u\"\",\n \"ctl00$face\": u\"0\",\n \"ctl00$btNext\": \"\"\n }, timeout=20)\n if result.status_code != 200:\n logger.error(u\"order.nalog.ru вернул неожиданный код: %s\" % unicode(result.status_code))\n raise errors.ServerUnavailable()\n content = result.content.decode('utf-8')\n\n if test_str in content:\n ok = True\n break\n if not ok:\n logger.error(u\"Не удалось начать работу с order.nalog.ru\")\n raise errors.ServerUnavailable()\n elif person_data:\n for x in range(4):\n result = s.post(u'http://order.nalog.ru/details/', data={\n \"ctl00$LastName\": person_data['surname'],\n \"ctl00$FirstName\": person_data['name'],\n \"ctl00$SecondName\": person_data['patronymic'] or u\"\",\n \"ctl00$inn\": person_data['inn'],\n \"ctl00$phone\": person_data['phone'],\n \"ctl00$email\": person_data['email'],\n \"__VIEWSTATE\": u\"\",\n \"ctl00$face\": u\"1\",\n \"ctl00$btNext\": \"\"\n }, timeout=20)\n if result.status_code != 200:\n logger.error(u\"order.nalog.ru вернул неожиданный код: %s\" % unicode(result.status_code))\n raise errors.ServerUnavailable()\n content = result.content.decode('utf-8')\n\n if test_str in content:\n ok = True\n break\n if not ok:\n logger.error(u\"Не удалось начать работу с order.nalog.ru\")\n raise errors.ServerUnavailable()\n\n fns, sub_service = internal_ifns_number, internal_ifns_service # get_ifns_internal_id_by_ifns_name(s, region_name, reg_ifns_name, not company_data, logger)\n\n service = None\n is_multi_sub_service = 0\n\n cb_param = 'c0:%d;%d;%d;%d' % (\n sub_service, is_multi_sub_service, (service if is_multi_sub_service else sub_service), fns)\n result = s.post(u'http://order.nalog.ru/fns_service/', data={\n \"__CALLBACKID\": u\"ctl00$cpday\",\n \"__CALLBACKPARAM\": cb_param,\n \"__EVENTTARGET\": u\"\",\n \"__EVENTARGUMENT\": \"\",\n \"__VIEWSTATE\": u\"\",\n }, timeout=20)\n if result.status_code != 200:\n logger.error(u\"order.nalog.ru вернул неожиданный код: %s\" % unicode(result.status_code))\n raise errors.ServerUnavailable()\n\n str_data = result.text[26:-3].encode('utf-8').decode('string_escape').replace('!-\\\\-', '!--').replace('/-\\\\-',\n '/--').replace(\n '\\\\/script', '/script')\n content = u\"<!DOCTYPE html><html><head><title></title></head><body>%s</body></html>\" % str_data.decode('utf-8')\n root = html5lib.parse(content, treebuilder='lxml', namespaceHTMLElements=False)\n\n year = None\n month = None\n # noinspection PyCallingNonCallable\n for item in CSSSelector('#ctl00_cpday_day_T')(root):\n item_text_parts = item.text.split(' ')\n if len(item_text_parts) < 2:\n logger.error(u\"Ожидалась дата, получили %s\" % item.text)\n raise errors.ServerUnavailable(u\"Invalid nalog.ru service return content\")\n try:\n month = MONTHS[item_text_parts[0].strip()]\n year = int(item_text_parts[1].strip())\n break\n except Exception:\n logger.error(u\"Не удалось распарсить дату: %s\" % item.text)\n raise errors.ServerUnavailable(u\"Invalid nalog.ru service return content\")\n\n if not month or not year:\n logger.error(u\"Дату так и не получили\")\n raise Exception(u\"Invalid nalog.ru service return content\")\n\n day_prev = -1\n days = []\n first = True\n #noinspection PyCallingNonCallable\n for item in CSSSelector('#ctl00_cpday_day_mt td.dxeCalendarDay')(root):\n classes = filter(lambda x: not not x, [i.strip() for i in item.attrib['class'].split(' ')])\n\n day = int(item.text)\n if first and (23 <= day <= 31):\n month -= 1\n first = False\n if day_prev > day:\n month += 1\n if month > 12:\n year += 1\n month = 1\n day_prev = day\n if 'dxeCalendarOutOfRange' in classes or 'dxeCalendarToday' in classes:\n continue\n d = datetime(year, month, day)\n if d not in (\n datetime(2015, 5, 1), datetime(2015, 5, 2), datetime(2015, 5, 3), datetime(2015, 5, 4), datetime(2015, 5, 9),\n datetime(2015, 5, 10), datetime(2015, 5, 11)):\n days.append(d)\n\n # ban check\n d = days[0]\n result = s.post('http://order.nalog.ru/fns_service/', data={\n \"__CALLBACKID\": u\"ctl00$clBanCheck\",\n \"__CALLBACKPARAM\": u\"c0:%s.%s.%s;%s;%s;0\" % (\n unicode(d.year), unicode(d.month), unicode(d.day), unicode(180), unicode(fns)),\n \"__EVENTARGUMENT\": u\"\",\n \"__EVENTTARGET\": u\"\",\n \"__VIEWSTATE\": u\"\",\n }, timeout=20)\n\n if result.status_code != 200 or not result.content:\n logger.error(u\"order.nalog.ru вернул неожиданный ответ\")\n raise errors.ServerUnavailable()\n\n if u\"'data':'0'\" in result.text:\n raise errors.MaximumRegistrationsExceeded()\n\n # get time slots\n part = u\"%d.%d.%d;%d;%d;%d;%d\" % (\n dt.year, dt.month, dt.day, service if is_multi_sub_service else sub_service, fns, is_multi_sub_service,\n sub_service)\n part2 = u\"14|CUSTOMCALLBACK%d|\" % len(part) + part\n cb_param = u\"c0:KV|2;[];GB|%d;\" % len(part2) + part2 + \";\"\n result = s.post('http://order.nalog.ru/fns_service/', data={\n \"__CALLBACKID\": u\"ctl00$gvTime\",\n \"__CALLBACKPARAM\": cb_param,\n \"__EVENTARGUMENT\": u\"\",\n \"__EVENTTARGET\": u\"\",\n \"__VIEWSTATE\": u\"\",\n }, timeout=20)\n if result.status_code != 200 or not result.content:\n logger.error(u\"order.nalog.ru вернул неожиданный ответ\")\n raise errors.ServerUnavailable()\n\n if u\"К сожалению, на указанную Вами услугу и дату полная запись. Предлагаем выбрать другую удобную для Вас дату.\" in result.text:\n raise errors.DayBusyOrHolliday(dt)\n text_parts = result.text.split('cpFS_ID\\':')\n if len(text_parts) < 2:\n logger.error(u\"order.nalog.ru вернул неожиданный ответ: %s\" % result.text)\n raise errors.ServerUnavailable()\n\n sub_service_fs_id = filter(lambda x: x.isdigit(), text_parts[1])\n cb_param = u\"c0:\" + unicode(dt.year) + u\".\" + unicode(dt.month) + u\".\" + unicode(dt.day) + u\" \" + dt.strftime(\n \"%H:%M:00\") + \\\n \";\" + unicode(sub_service_fs_id) + u\";\" + unicode(fns) + u\";\" + unicode(sub_service) + \";\" + unicode(\n is_multi_sub_service)\n\n result = s.post('http://order.nalog.ru/fns_service/', data={\n \"__CALLBACKID\": u\"ctl00$clRegister\",\n \"__CALLBACKPARAM\": cb_param,\n \"__EVENTARGUMENT\": u\"\",\n \"__EVENTTARGET\": u\"\",\n \"__VIEWSTATE\": u\"\",\n }, timeout=20)\n\n if result.status_code != 200 or not result.content:\n logger.error(u\"order.nalog.ru вернул неожиданный ответ\")\n raise errors.ServerUnavailable()\n\n if \"'DoubleTime'\" in result:\n raise errors.DuplicateBookingAtTheSameTime()\n\n logger.error(result.text)\n result = result.content.decode('utf-8')\n\n parts = result.split(\"'data':'\")\n if len(parts) < 2:\n logger.error(u\"order.nalog.ru вернул неожиданный ответ: %s\" % result)\n parts = parts[1].split(\"'\")\n if len(parts) < 2:\n logger.error(u\"order.nalog.ru вернул неожиданный ответ: %s\" % result)\n code = parts[0].strip()\n logger.debug(u'booking url: http://order.nalog.ru/appointment/R%s/' % code)\n\n result = requests.get(u'http://order.nalog.ru/appointment/R%s/' % code, timeout=20)\n if result.status_code != 200 or not result.content:\n logger.error(u\"order.nalog.ru вернул неожиданный ответ\")\n raise errors.ServerUnavailable()\n root = html5lib.parse(result.text, treebuilder='lxml', namespaceHTMLElements=False)\n\n #noinspection PyCallingNonCallable\n if not len(CSSSelector(\"#ctl00_pnlDetails\")(root)):\n logger.error(result.text)\n raise errors.DuplicateBookingAtTheSameTime()\n\n if len(CSSSelector(\"#ctl00_pnlDetails>table>tbody>tr>td\")(root)) < 18:\n logger.error(u\"order.nalog.ru вернул неожиданный ответ: %s\" % result.text)\n raise errors.ServerUnavailable()\n #noinspection PyCallingNonCallable\n ifns = CSSSelector(\"#ctl00_pnlDetails>table>tbody>tr>td\")(root)[3].text\n #noinspection PyCallingNonCallable\n address = CSSSelector(\"#ctl00_pnlDetails>table>tbody>tr>td\")(root)[5].text\n #noinspection PyCallingNonCallable\n map = CSSSelector(\"#ctl00_pnlDetails>table>tbody>tr>td\")(root)[7].text\n #noinspection PyCallingNonCallable\n phone = CSSSelector(\"#ctl00_pnlDetails>table>tbody>tr>td\")(root)[9].text\n #noinspection PyCallingNonCallable\n service = CSSSelector(\"#ctl00_pnlDetails>table>tbody>tr>td\")(root)[11].text\n #noinspection PyCallingNonCallable\n data_str = CSSSelector(\"#ctl00_pnlDetails>table>tbody>tr>td\")(root)[13].text\n #noinspection PyCallingNonCallable\n time_str = CSSSelector(\"#ctl00_pnlDetails>table>tbody>tr>td\")(root)[15].text\n #noinspection PyCallingNonCallable\n window = CSSSelector(\"#ctl00_pnlDetails>table>tbody>tr>td\")(root)[17].text\n\n try:\n dt = datetime.strptime(data_str + 'T' + time_str, \"%d.%m.%YT%H:%M\")\n except Exception:\n raise errors.ServerError(u\"Invalid datetime format\")\n\n return {\n \"ifns\": ifns,\n \"service\": service,\n \"date\": dt.strftime(\"%Y-%m-%dT%H:%M:%S\"),\n \"window\": window,\n \"address\": address,\n \"phone\": phone,\n \"how_to_get\": map,\n \"code\": code\n }\n\n\ndef get_registration_ifns(service_nalog_ru_url, address_ifns=None):\n if not address_ifns or not isinstance(address_ifns, basestring):\n raise errors.IfnsNotFound()\n\n result_text = current_app.external_tools.cache.get(prepare_key(address_ifns))\n if not result_text:\n result = requests.get('%s/addrno.do?l=6&g=%s' % (service_nalog_ru_url, address_ifns), headers={\n 'Referer': '%s/addrno.do' % service_nalog_ru_url\n }, timeout=5)\n if not result or result.status_code != 200:\n raise errors.IfnsNotFound()\n result_text = result.text\n current_app.external_tools.cache.set(prepare_key(address_ifns), result_text, 3600 * 24)\n\n try:\n data = json.loads(result_text)\n return data[\"res\"]\n except Exception:\n pass\n raise errors.IfnsNotFound()\n\n\ndef stringify_children(node):\n s = node.text\n if s is None:\n s = ''\n for child in node:\n s += etree.tostring(child, encoding='unicode')\n return s\n\n\ndef get_ifns_registrations(name, company_type=u'ooo', date_from=None, date_to=None,\n service=None, ifns=None, service_nalog_ru_url=None, logger=None):\n from services.llc_reg.documents.enums import IfnsServiceEnum\n service = service or IfnsServiceEnum.IS_REG_COMPANY\n\n logger = logger or logging.getLogger()\n if service == IfnsServiceEnum.IS_REG_COMPANY:\n service_str = u\"Р11001\"\n elif service == IfnsServiceEnum.IS_REG_IP:\n service_str = u\"P21001\"\n else:\n return\n\n try:\n s = requests.Session()\n s.get('%s/uwsfind.do' % service_nalog_ru_url, timeout=20)\n reg_ifns = ifns\n req_data = {\n \"dtfrom\": date_from.strftime(\"%d.%m.%Y\") if date_from else u\"\", # 06.06.2014\n \"dtto\": date_to.strftime(\"%d.%m.%Y\") if date_to else u\"\", # 27.10.2014\n \"frm\": service_str, #\n \"frmip\": u\"\",\n \"ifns\": reg_ifns, # 78086\n \"name\": name,\n \"nptype\": u\"ul\",\n \"ogrn\": u\"\",\n \"ogrnip\": u\"\"\n }\n result = s.post('%s/uwsfind.do' % service_nalog_ru_url, data=req_data, timeout=20)\n if not result or result.status_code != 200:\n raise Exception(u\"Unexpected result\")\n data = result.text\n except Exception, ex:\n logger.exception(u\"Failed to get reservations\")\n return\n\n root = html5lib.parse(data, treebuilder='lxml', namespaceHTMLElements=False)\n\n found_registries = CSSSelector(\"#uwsfound span\")(root)\n if not len(found_registries):\n return\n try:\n results_count = int(found_registries[0].text.strip())\n if not results_count:\n raise Exception(u\"Not found\")\n except Exception, ex:\n return\n\n table = CSSSelector(\"#uwsdata tbody tr\")(root)\n if not table or not len(table):\n return\n\n result_items = []\n upper_name = name.upper()\n while ' ' in upper_name:\n upper_name = upper_name.replace(' ', ' ')\n for item in table:\n result_item = {}\n reshenie = False\n for td in CSSSelector(\"td\")(item):\n td_str = stringify_children(td)\n if not td_str:\n continue\n\n td_str = td_str.strip()\n td_str = td_str.replace('<br>', '<br/>')\n for part in td_str.split('<br/>'):\n part = part.strip()\n if u\"Наименование:\" in part:\n result_item['full_name'] = part.split('</strong>')[1].strip()\n if u\"ОГРН:\" in part:\n result_item['ogrn'] = part.split('</strong>')[1].strip()\n if u\"ОГРНИП:\" in part:\n result_item['ogrnip'] = part.split('</strong>')[1].strip()\n if u\"ФИО::\" in part:\n result_item['fio'] = part.split('</strong>')[1].strip()\n\n if u\"Вид решения:\" in part:\n reshenie = True\n res = part.split('</strong>')[1].strip()\n if u'Решение о государственной регистрации' in res:\n result_item['status'] = 'registered'\n elif u'Решение об отказе в государственной регистрации' in res:\n result_item['status'] = 'registration_declined'\n else:\n result_item['status'] = 'unknown'\n\n if u\"Дата готовности документов:\" in part:\n res = part.split('</strong>')[1].strip()\n result_item['reg_date'] = res\n if 'reg_date' in result_item and not reshenie:\n result_item['status'] = 'progress'\n\n if 'full_name' in result_item:\n res_full_name = result_item['full_name'].upper()\n if not res_full_name:\n continue\n while ' ' in res_full_name:\n res_full_name = res_full_name.replace(' ', ' ')\n if res_full_name == u\"ОБЩЕСТВО С ОГРАНИЧЕННОЙ ОТВЕТСТВЕННОСТЬЮ \\\"%s\\\"\" % upper_name or \\\n res_full_name == u\"ОБЩЕСТВО С ОГРАНИЧЕННОЙ ОТВЕТСТВЕННОСТЬЮ %s\" % upper_name or \\\n res_full_name == u\"ОБЩЕСТВО С ОГРАНИЧЕННОЙ ОТВЕТСТВЕННОСТЬЮ «%s»\" % upper_name or \\\n res_full_name == u\"ОБЩЕСТВО С ОГРАНИЧЕННОЙ ОТВЕТСТВЕННОСТЬЮ ‹‹%s››\" % upper_name:\n result_items.append(result_item)\n elif 'fio' in result_item:\n result_items.append(result_item)\n return result_items\n\ndef if_gp_pay_working():\n data = {\n \"addrFl\": \"117105,77,,,,ВАРШАВСКОЕ Ш,17,,25\",\n \"addrFl_ifns\": \"7726\",\n \"addrFl_okatom\": \"45296561000\",\n \"addrUl\": \"117105,77,,,,ВАРШАВСКОЕ Ш,17,,24\",\n \"addrUl_ifns\": \"7726\",\n \"addrUl_okatom\": \"45296561000\",\n \"bank\": \"\",\n \"c\": \"\",\n \"fam\": \"Долгов\",\n \"gp\": \"11|18210807010011000110|13|ul|4000\",\n \"inn\": \"772900273375\",\n \"nam\": \"Центр\",\n \"otch\": \"Иванович\",\n \"payKind\": \"on\",\n \"region\": \"\",\n \"sum\": \"4000\"\n }\n response = requests.post('https://service.nalog.ru/gp-pay.do', data, timeout=20)\n return response.status_code == 200 and 'application/pdf' == response.headers['content-type']\n" }, { "alpha_fraction": 0.6388489007949829, "alphanum_fraction": 0.6395683288574219, "avg_line_length": 34.18987274169922, "blob_id": "f10e3f988f72906b1e8a18717c31c0a3571f872d", "content_id": "c4884551ee3bb0d919895ca3652c06abd5ee4b6a", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 2780, "license_type": "no_license", "max_line_length": 116, "num_lines": 79, "path": "/app/fw/plugins/car_assurance_plugin/__init__.py", "repo_name": "StanislavKraev/jb_code", "src_encoding": "UTF-8", "text": "# -*- coding: utf-8 -*-\nfrom fw.documents.batch_manager import BatchManager\nfrom services.car_assurance.async_tasks import get_policy_info_async\n\nPLUGIN_NAME = 'car_assurance'\n\n\ndef get_actions():\n actions = [{\n 'name': 'get_policy_info_async',\n 'async': True\n # args: policy series, policy number\n }, {\n 'name': 'get_policy_info_first_try',\n 'async': False\n # args: policy series, policy number\n }]\n return actions\n\ndef get_events():\n events = [{\n 'name': 'on_policy_info_received'\n }, {\n 'name': 'on_policy_info_receive_fail'\n }, {\n 'name': 'on_policy_info_receive_timeout'\n }]\n return events\n\ndef _apply_dict_patch(original, patch):\n for k, v in patch.items():\n if isinstance(v, dict) and k in original and isinstance(original[k], dict):\n original[k] = _apply_dict_patch(original[k], v)\n else:\n original[k] = v\n return original\n\ndef _make_result_fields_patch(data, field_name_map):\n patch = {}\n return patch\n\ndef act(action, batch_db, event_data, plugin_config, logger, config):\n assert batch_db\n descriptors = filter(lambda x: x['name'] == action, get_actions())\n action_descriptor = descriptors[0] if descriptors else None\n if not action_descriptor:\n raise ValueError(u'Invalid action: %s for %s plugin' % (action, PLUGIN_NAME))\n\n if action == 'get_policy_info_async':\n policy_series_field_name = plugin_config['policy_series_field_name']\n policy_number_field_name = plugin_config['policy_number_field_name']\n\n policy_series = batch_db.data.get(policy_series_field_name, None)\n policy_number = batch_db.data.get(policy_number_field_name, None)\n\n if not policy_number or not policy_series:\n return False\n get_policy_info_async.delay(policy_series, policy_number, event_data, batch_db.id)\n elif action == 'get_policy_info_first_try':\n policy_series_field_name = plugin_config['policy_series_field_name']\n policy_number_field_name = plugin_config['policy_number_field_name']\n\n policy_series = batch_db.data.get(policy_series_field_name, None)\n policy_number = batch_db.data.get(policy_number_field_name, None)\n\n if not policy_number or not policy_series:\n return False\n try:\n get_policy_info_async(policy_series, policy_number, event_data, batch_db.id, async=False, logger=logger)\n except Exception:\n BatchManager.handle_event(batch_db.id, \"on_policy_info_receive_fail\", event_data, logger, config=config)\n return False\n else:\n raise Exception(u\"Invalid action %s for plugin %s\" % (action, PLUGIN_NAME))\n\n return True\n\ndef register(class_loader):\n pass\n" }, { "alpha_fraction": 0.6084766983985901, "alphanum_fraction": 0.6093159914016724, "avg_line_length": 32.79432678222656, "blob_id": "df9b6f937e9a1de25410570bd93e5377ac168619", "content_id": "a16b366163bed27a3604107e27d3644ffb7eb294", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 4766, "license_type": "no_license", "max_line_length": 129, "num_lines": 141, "path": "/app/fw/async_tasks/celery_utils.py", "repo_name": "StanislavKraev/jb_code", "src_encoding": "UTF-8", "text": "# -*- coding: utf-8 -*-\nimport os\n\nfrom flask import Flask, current_app\nfrom celery import current_app as celery_app\nfrom fw.documents.contexts import ValidatorContext, RenderingContext, ModelCacheContext\nfrom template_filters import load_filters, set_template_loader\n\n\ndef init_flask_signals(_app):\n from fw.db.sql_base import db as sqldb\n\n def finish_db_transaction(sender, **extra):\n sqldb.session.commit()\n\n def rollback(sender, **extra):\n sqldb.session.rollback()\n\n from flask import request_finished, got_request_exception\n request_finished.connect(finish_db_transaction, _app)\n got_request_exception.connect(rollback, _app)\n\n\ndef init_sql_db(app):\n from fw.db.sql_base import db\n db.init_app(app)\n\n init_flask_signals(app)\n\nCELERY_FLASK_APP = None\n\ndef make_app(config, external_tools):\n def _make_app():\n global CELERY_FLASK_APP\n\n if CELERY_FLASK_APP:\n return CELERY_FLASK_APP\n\n from app import init_blueprints, init_services\n app = Flask(__name__)\n if not isinstance(config, dict):\n app.config.update(config.settings)\n else:\n app.config.update(config)\n\n app.validator_context = ValidatorContext()\n app.rendering_context = RenderingContext()\n app.model_cache_context = ModelCacheContext()\n app.external_tools = external_tools\n load_filters(app.jinja_env, app.config)\n set_template_loader(app.jinja_env)\n init_blueprints(app)\n init_services(app)\n if not app.config['TEST']:\n init_sql_db(app)\n app.cache = external_tools.cache\n app.logger_name = \"celery\"\n # log_file_path = os.path.join(os.path.split(app.config['log_file_path'])[0], \"celeryd.log\")\n # file_handler = TimedRotatingFileHandler(log_file_path, backupCount=7, encoding='utf-8', when=\"midnight\")\n # file_handler.setLevel()\n # file_handler.setFormatter(logging.Formatter('%(asctime)s %(levelname)s: %(message)s [in %(pathname)s:%(lineno)d]'))\n app.logger.setLevel(app.config['CELERY_LOG_LEVEL'])\n\n CELERY_FLASK_APP = app\n return app\n return _make_app\n\nclass TaskFileIdHolder(object):\n def __init__(self, task_id, config):\n self.task_id = task_id\n self.config = config\n self.file_name = os.path.join(os.path.dirname(self.config['celery_tasks_dir']), unicode(self.task_id))\n\n def __enter__(self):\n if not os.path.exists(self.file_name):\n try:\n with open(self.file_name, 'w') as f:\n f.write(str(self.task_id))\n except Exception:\n pass\n return self\n\n def __exit__(self, exc_type, exc_val, exc_tb):\n if os.path.exists(self.file_name):\n try:\n os.unlink(self.file_name)\n except Exception:\n pass\n\n def exists(self):\n return os.path.exists(self.file_name)\n\ndef found_same_task(task_name, request = None, task_id = None, args = None, kwargs = None, inspect_tasks = None):\n if inspect_tasks is not None and not inspect_tasks:\n return False\n\n kwargs = kwargs or {}\n args = request.args if request else args\n kwargs = request.kwargs if request else kwargs\n _id = request.id if request else task_id\n\n current_app.logger.info(u\"Searching for celery task %s\" % task_name)\n inspector = celery_app.control.inspect()\n tasks = []\n\n if inspect_tasks:\n tasks = inspect_tasks\n else:\n actives = inspector.active()\n for item in (actives.values() if actives else []):\n tasks.extend(item)\n schedules = inspector.scheduled()\n for item in (schedules.values() if schedules else []):\n tasks.extend(item)\n\n for task_info in tasks:\n task_id = task_info.get('id', None)\n if not task_id:\n continue\n if task_id == _id:\n continue\n\n this_task_name = task_info.get('name', None)\n if this_task_name != task_name:\n current_app.logger.info(u\"task names differ - skip\")\n continue\n\n if args is not None:\n this_task_args = task_info.get('args', None) # todo: test empty args\n current_app.logger.info(u\"Comparing args. Our args: <%s> Their args: <%s> \" % (unicode(args), this_task_args))\n if unicode(args) != this_task_args:\n continue\n\n if kwargs is not None:\n this_task_kwargs = task_info.get('kwargs', {})\n #current_app.logger.info(u\"Comparing kwargs. Our args: <%s> Their args: <%s> \" % (unicode(kwargs), this_task_kwargs))\n if unicode(kwargs) != this_task_kwargs:\n continue\n return True\n\n return False\n\n" }, { "alpha_fraction": 0.7164948582649231, "alphanum_fraction": 0.7242268323898315, "avg_line_length": 28.923076629638672, "blob_id": "c31a3e599c8e2c92f8dd8606cbe7b7015e02079f", "content_id": "58e68b6379d104e3f8b6b86cc916bac6057e3093", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 388, "license_type": "no_license", "max_line_length": 106, "num_lines": 13, "path": "/app/services/pay/__init__.py", "repo_name": "StanislavKraev/jb_code", "src_encoding": "UTF-8", "text": "# -*- coding: utf-8 -*-\nimport os\n\nimport jinja2\n\nfrom services.pay.api import pay_bp\n\n\ndef register(app, jinja_env, class_loader, url_prefix=None):\n app.register_blueprint(pay_bp, url_prefix=url_prefix)\n\n search_path = os.path.normpath(os.path.join(os.path.abspath(os.path.dirname(__file__)), u\"templates\"))\n jinja_env.loader.loaders.append(jinja2.FileSystemLoader(search_path))" }, { "alpha_fraction": 0.5074070692062378, "alphanum_fraction": 0.5179124474525452, "avg_line_length": 46.15981674194336, "blob_id": "8e217dfd9397fd4573c0468fae3f4ad1455c7500", "content_id": "d79d87ae23907ad8f1fe84c06f650c1ef10450f7", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 21176, "license_type": "no_license", "max_line_length": 162, "num_lines": 438, "path": "/app/services/ifns/async_tasks/ifns_booking_tasks.py", "repo_name": "StanislavKraev/jb_code", "src_encoding": "UTF-8", "text": "# -*- coding: utf-8 -*-\nfrom datetime import datetime, timedelta\nimport logging\n\nfrom celery import current_task, current_app as celery\n\n\ncelery.config_from_envvar('CELERY_CONFIG_MODULE')\n\n\ndef _is_task_expired(dt):\n return dt - datetime > timedelta(seconds=2 * 3600)\n\n\ndef _get_ifns_logger():\n logger = logging.getLogger(\"IFNS\")\n logger.setLevel(logging.DEBUG)\n\n file_handler = logging.FileHandler('/var/log/jb/ifns.log') # todo: config\n file_handler.setLevel(logging.DEBUG)\n _formatter = logging.Formatter('%(asctime)s - %(name)s - %(levelname)s - %(message)s')\n file_handler.setFormatter(_formatter)\n logger.addHandler(file_handler)\n return logger\n\n\[email protected]()\ndef book_ifns(task_id):\n request = current_task.request\n config = celery.conf.get('config')\n db = celery.conf.get('db')\n logger = _get_ifns_logger()\n ifns_admin_email = config['ifns_admin_email']\n\n# task_data = IfnsBookingTaskDbObject.find_one(db, {'_id' : ObjectId(task_id)})\n# if not task_data:\n# logger.error(u\"Invalid task id: %s\" % task_id)\n# return False\n#\n# person_data = task_data[\"person_data\"]\n# company_data = task_data[\"company_data\"]\n# reg_ifns_name = task_data[\"reg_ifns_name\"]\n# service_id = task_data[\"service\"]\n# region_name = task_data[\"region_name\"]\n# dt = datetime.strptime(task_data[\"reg_date\"], \"%Y-%m-%dT%H:%M:%S\")\n# batch_id = task_data[\"batch_id\"]\n# user_email = task_data[\"user_email\"]\n# status = task_data[\"status\"]\n# _update_task(task_id, {\n# 'status' : IfnsBookingTaskStatus.BTS_PROGRESS,\n# 'task_id' : request.id\n# })\n#\n# if status != IfnsBookingTaskStatus.BTS_NEW:\n# logger.error(u\"Invalid task status: %s\" % status)\n# return False\n#\n# with TaskFileIdHolder(request.id, config) as task_file:\n# s = requests.Session()\n# try:\n# test_str = u\"Укажите параметры для записи на посещение ИФНС России\"\n# ok = False\n# if company_data:\n# for x in range(4):\n# result = s.post(u'http://order.nalog.ru/details/', data={\n# \"ctl00$LastName\"\t: company_data['name'],\n# \"ctl00$inn\"\t: company_data['inn'],\n# \"ctl00$phone\"\t: company_data['phone'],\n# \"ctl00$email\"\t: company_data['email'],\n# \"__VIEWSTATE\" : u\"\",\n# \"ctl00$face\"\t: u\"0\",\n# \"ctl00$btNext\" : \"\"\n# }, timeout=20)\n# if result.status_code != 200:\n# logger.error(u\"order.nalog.ru вернул неожиданный код: %s\" % unicode(result.status_code))\n# raise errors.IfnsServiceUnavailable()\n# content = result.content.decode('utf-8')\n#\n# if test_str in content:\n# ok = True\n# break\n# if not ok:\n# logger.error(u\"Не удалось начать работу с order.nalog.ru\")\n# raise errors.IfnsServiceUnavailable()\n# elif person_data:\n# for x in range(4):\n# result = s.post(u'http://order.nalog.ru/details/', data={\n# \"ctl00$LastName\"\t: person_data['surname'],\n# \"ctl00$FirstName\"\t: person_data['name'],\n# \"ctl00$SecondName\" : person_data['patronymic'] or u\"\",\n# \"ctl00$inn\"\t: person_data['inn'],\n# \"ctl00$phone\"\t: person_data['phone'],\n# \"ctl00$email\"\t: person_data['email'],\n# \"__VIEWSTATE\" : u\"\",\n# \"ctl00$face\"\t: u\"1\",\n# \"ctl00$btNext\" : \"\"\n# }, timeout=20)\n# if result.status_code != 200:\n# logger.error(u\"order.nalog.ru вернул неожиданный код: %s\" % unicode(result.status_code))\n# raise errors.IfnsServiceUnavailable()\n# content = result.content.decode('utf-8')\n#\n# if test_str in content:\n# ok = True\n# break\n# if not ok:\n# logger.error(u\"Не удалось начать работу с order.nalog.ru\")\n# raise errors.IfnsServiceUnavailable()\n#\n# try:\n# fns, sub_service = get_ifns_internal_id_by_ifns_name(s, region_name, reg_ifns_name, not company_data, logger)\n# except Exception, ex:\n# logger.exception(u\"Failed to get internal ids for ifns\")\n# send_email.send_email(ifns_admin_email, 'failed_to_book_ifns')\n# _update_task(task_id, {\n# 'status' : IfnsBookingTaskStatus.BTS_FAIL,\n# 'error_info' : {\n# \"tag\" : \"get_ifns_internal_id_by_ifns_name\",\n# \"exc\" : unicode(ex)\n# }\n# })\n# return False\n#\n# service = None\n# is_multi_sub_service = 0\n#\n# cb_param = 'c0:%d;%d;%d;%d' % (sub_service, is_multi_sub_service, (service if is_multi_sub_service else sub_service), fns)\n# result = s.post(u'http://order.nalog.ru/fns_service/', data = {\n# \"__CALLBACKID\":u\"ctl00$cpday\",\n# \"__CALLBACKPARAM\":cb_param,\n# \"__EVENTTARGET\":u\"\",\n# \"__EVENTARGUMENT\":\"\",\n# \"__VIEWSTATE\" : u\"\",\n# }, timeout=20)\n# if result.status_code != 200:\n# logger.error(u\"order.nalog.ru вернул неожиданный код: %s\" % unicode(result.status_code))\n# raise errors.IfnsServiceUnavailable()\n#\n# str_data = result.text[26:-3].encode('utf-8').decode('string_escape').replace('!-\\\\-', '!--').replace('/-\\\\-', '/--').replace('\\\\/script', '/script')\n# content = u\"<!DOCTYPE html><html><head><title></title></head><body>%s</body></html>\" % str_data.decode('utf-8')\n# root = html5lib.parse(content, treebuilder='lxml', namespaceHTMLElements=False)\n#\n# year = None\n# month = None\n# #noinspection PyCallingNonCallable\n# for item in CSSSelector('#ctl00_cpday_day_T')(root):\n# item_text_parts = item.text.split(' ')\n# if len(item_text_parts) < 2:\n# logger.error(u\"Ожидалась дата, получили %s\" % item.text)\n# raise errors.IfnsServiceUnavailable(u\"Invalid nalog.ru service return content\")\n# try:\n# month = MONTHS[item_text_parts[0].strip()]\n# year = int(item_text_parts[1].strip())\n# break\n# except Exception:\n# logger.error(u\"Не удалось распарсить дату: %s\" % item.text)\n# raise errors.IfnsServiceUnavailable(u\"Invalid nalog.ru service return content\")\n#\n# if not month or not year:\n# logger.error(u\"Дату так и не получили\")\n# raise errors.IfnsServiceUnavailable(u\"Invalid nalog.ru service return content\")\n#\n# day_prev = -1\n# days = []\n# #noinspection PyCallingNonCallable\n# for item in CSSSelector('#ctl00_cpday_day_mt td.dxeCalendarDay')(root):\n# classes = filter(lambda x: not not x, [i.strip() for i in item.attrib['class'].split(' ')])\n# if 'dxeCalendarOutOfRange' in classes or 'dxeCalendarToday' in classes:\n# continue\n#\n# day = int(item.text)\n# if day_prev > day:\n# month += 1\n# if month > 12:\n# year += 1\n# month = 1\n# day_prev = day\n# d = datetime(year, month, day)\n# days.append(d)\n#\n# # ban check\n# d = days[0]\n# result= s.post('http://order.nalog.ru/fns_service/', data = {\n# \"__CALLBACKID\" : u\"ctl00$clBanCheck\",\n# \"__CALLBACKPARAM\" : u\"c0:%s.%s.%s;%s;%s;0\" % (unicode(d.year), unicode(d.month), unicode(d.day), unicode(180), unicode(fns)),\n# \"__EVENTARGUMENT\" : u\"\",\n# \"__EVENTTARGET\" : u\"\",\n# \"__VIEWSTATE\" : u\"\",\n# }, timeout=20)\n#\n# if result.status_code != 200 or not result.content:\n# logger.error(u\"order.nalog.ru вернул неожиданный ответ\")\n# raise errors.IfnsServiceUnavailable()\n#\n# if u\"'data':'0'\" in result.text:\n# send_email.send_email(user_email, 'ifns_maximum_registrations_exceeded')\n# _update_task(task_id, {\n# 'status' : IfnsBookingTaskStatus.BTS_SUCCESS,\n# 'error_info' : {\n# \"tag\" : \"MaximumRegistrationsExceeded\"\n# }\n# })\n# return False\n#\n# # get time slots\n# part = u\"%d.%d.%d;%d;%d;%d;%d\" % (dt.year, dt.month, dt.day, service if is_multi_sub_service else sub_service, fns, is_multi_sub_service,sub_service)\n# part2 = u\"14|CUSTOMCALLBACK%d|\" % len(part) + part\n# cb_param = u\"c0:KV|2;[];GB|%d;\" % len(part2) + part2 + \";\"\n# result= s.post('http://order.nalog.ru/fns_service/', data = {\n# \"__CALLBACKID\" : u\"ctl00$gvTime\",\n# \"__CALLBACKPARAM\" : cb_param,\n# \"__EVENTARGUMENT\" : u\"\",\n# \"__EVENTTARGET\" : u\"\",\n# \"__VIEWSTATE\" : u\"\",\n# }, timeout=20)\n# if result.status_code != 200 or not result.content:\n# logger.error(u\"order.nalog.ru вернул неожиданный ответ\")\n# raise errors.IfnsServiceUnavailable()\n#\n# if u\"К сожалению, на указанную Вами услугу и дату полная запись. Предлагаем выбрать другую удобную для Вас дату.\" in result.text:\n# send_email.send_email(user_email, 'ifns_no_free_slots')\n# _update_task(task_id, {\n# 'status' : IfnsBookingTaskStatus.BTS_SUCCESS,\n# 'error_info' : {\n# \"tag\" : \"no_free_slots\"\n# }\n# })\n# return False\n#\n# text_parts = result.text.split('cpFS_ID\\':')\n# if len(text_parts) < 2:\n# logger.error(u\"order.nalog.ru вернул неожиданный ответ: %s\" % result.text)\n# raise errors.IfnsServiceUnavailable()\n#\n# sub_service_fs_id = filter(lambda x: x.isdigit(), text_parts[1])\n# cb_param = u\"c0:\" + unicode(dt.year) + u\".\" + unicode(dt.month) + u\".\" + unicode(dt.day) + u\" \" + dt.strftime(\"%H:%M:00\") +\\\n# \";\" + unicode(sub_service_fs_id) + u\";\" + unicode(fns) + u\";\" + unicode(sub_service) + \";\" + unicode(is_multi_sub_service)\n#\n# result= s.post('http://order.nalog.ru/fns_service/', data = {\n# \"__CALLBACKID\" : u\"ctl00$clRegister\",\n# \"__CALLBACKPARAM\" : cb_param,\n# \"__EVENTARGUMENT\" : u\"\",\n# \"__EVENTTARGET\" : u\"\",\n# \"__VIEWSTATE\" : u\"\",\n# }, timeout=20)\n#\n# if result.status_code != 200 or not result.content:\n# logger.error(u\"order.nalog.ru вернул неожиданный ответ\")\n# raise errors.IfnsServiceUnavailable()\n#\n# if \"'DoubleTime'\" in result.content:\n# send_email.send_email(user_email, 'ifns_duplicate_booking')\n# _update_task(task_id, {\n# 'status' : IfnsBookingTaskStatus.BTS_SUCCESS,\n# 'error_info' : {\n# \"tag\" : \"DuplicateBookingAtTheSameTime\"\n# }\n# })\n# return False\n#\n# result = result.content.decode('utf-8')\n#\n# parts = result.split(\"'data':'\")\n# if len(parts) < 2:\n# logger.error(u\"order.nalog.ru вернул неожиданный ответ: %s\" % result)\n# raise errors.IfnsServiceUnavailable()\n#\n# parts = parts[1].split(\"'\")\n# if len(parts) < 2:\n# logger.error(u\"order.nalog.ru вернул неожиданный ответ: %s\" % result)\n# raise errors.IfnsServiceUnavailable()\n#\n# code = parts[0].strip()\n# #logger.debug(u'booking url: http://order.nalog.ru/appointment/R%s/' % code)\n#\n# result = requests.get(u'http://order.nalog.ru/appointment/R%s/' % code, timeout=20)\n# if result.status_code != 200 or not result.content:\n# logger.error(u\"order.nalog.ru вернул неожиданный ответ\")\n# raise errors.IfnsServiceUnavailable()\n#\n# root = html5lib.parse(result.text, treebuilder='lxml', namespaceHTMLElements=False)\n#\n# #noinspection PyCallingNonCallable\n# if not len(CSSSelector(\"#ctl00_pnlDetails\")(root)):\n# send_email.send_email(user_email, 'ifns_duplicate_booking')\n# _update_task(task_id, {\n# 'status' : IfnsBookingTaskStatus.BTS_SUCCESS,\n# 'error_info' : {\n# \"tag\" : \"DuplicateBookingAtTheSameTime\"\n# }\n# })\n# return False\n#\n# if len(CSSSelector(\"#ctl00_pnlDetails>table>tbody>tr>td\")(root)) < 18:\n# logger.error(u\"order.nalog.ru вернул неожиданный ответ: %s\" % result.text)\n# raise errors.IfnsServiceUnavailable()\n# #noinspection PyCallingNonCallable\n# ifns = CSSSelector(\"#ctl00_pnlDetails>table>tbody>tr>td\")(root)[3].text\n# #noinspection PyCallingNonCallable\n# address = CSSSelector(\"#ctl00_pnlDetails>table>tbody>tr>td\")(root)[5].text\n# #noinspection PyCallingNonCallable\n# map = CSSSelector(\"#ctl00_pnlDetails>table>tbody>tr>td\")(root)[7].text\n# #noinspection PyCallingNonCallable\n# phone = CSSSelector(\"#ctl00_pnlDetails>table>tbody>tr>td\")(root)[9].text\n# #noinspection PyCallingNonCallable\n# service = CSSSelector(\"#ctl00_pnlDetails>table>tbody>tr>td\")(root)[11].text\n# #noinspection PyCallingNonCallable\n# data_str = CSSSelector(\"#ctl00_pnlDetails>table>tbody>tr>td\")(root)[13].text\n# #noinspection PyCallingNonCallable\n# time_str = CSSSelector(\"#ctl00_pnlDetails>table>tbody>tr>td\")(root)[15].text\n# #noinspection PyCallingNonCallable\n# window = CSSSelector(\"#ctl00_pnlDetails>table>tbody>tr>td\")(root)[17].text\n#\n# try:\n# dt = datetime.strptime(data_str + 'T' + time_str, \"%d.%m.%YT%H:%M\")\n# except Exception:\n# raise errors.IfnsServiceUnavailable(u\"Invalid datetime format\")\n#\n# send_email.send_email(user_email, 'ifns_booking_success')\n# _update_task(task_id, {\n# 'status' : IfnsBookingTaskStatus.BTS_SUCCESS,\n# })\n#\n# booking = IfnsBooking.parse_raw_value({\n# 'ifns' : ifns,\n# 'service' : service,\n# 'service_id' : service_id,\n# 'date' : dt.strftime(\"%Y-%m-%dT%H:%M:%S\"),\n# 'window' : window,\n# 'address' : address,\n# 'phone' : phone,\n# 'how_to_get' : map,\n# 'code' : code,\n# '_discarded' : False\n# }, api_data = True)\n# booking_db_obj = booking.get_db_object()\n# booking_db_obj.update_attr('batch_id', batch_id)\n# booking_db_obj.insert(db)\n# return True\n# except errors.IfnsServiceUnavailable, exc:\n# if not _is_task_expired(dt):\n# _update_task(task_id, {\n# 'status' : IfnsBookingTaskStatus.BTS_NEW,\n# 'task_id' : None\n# })\n# raise book_ifns.retry(countdown=3500 + randint(200), exc=exc)\n# send_email.send_email(ifns_admin_email, 'failed_to_book_ifns')\n# _update_task(task_id, {\n# 'status' : IfnsBookingTaskStatus.BTS_FAIL,\n# 'error_info' : {\n# \"tag\" : \"task_expired\"\n# }\n# })\n# return False\n# except Exception, ex:\n# send_email.send_email(ifns_admin_email, 'failed_to_book_ifns')\n# _update_task(task_id, {\n# 'status' : IfnsBookingTaskStatus.BTS_FAIL,\n# 'error_info' : {\n# \"tag\" : \"get_ifns_internal_id_by_ifns_name\",\n# \"exc\" : unicode(ex)\n# }\n# })\n# return False\n#\n#@celery.task()\n#def find_appointment_data(apt_code, batch_id, reg_date_str, user_email, service_id):\n# request = current_task.request\n# config = celery.conf.get('config')\n# ifns_admin_email = config['ifns_admin_email']\n# db = celery.conf.get('db')\n# logger = _get_ifns_logger()\n# reg_date = datetime.strptime(reg_date_str, \"%Y-%m-%dT%H:%M:%S\")\n# with TaskFileIdHolder(request.id, config) as task_file:\n# try:\n# result = requests.get(u'http://order.nalog.ru/appointment/R%s/' % apt_code, timeout=20)\n# if result.status_code != 200 or not result.content:\n# logger.error(u\"order.nalog.ru вернул неожиданный ответ\")\n# raise errors.IfnsServiceUnavailable()\n#\n# root = html5lib.parse(result.text, treebuilder='lxml', namespaceHTMLElements=False)\n#\n# #noinspection PyCallingNonCallable\n# if not len(CSSSelector(\"#ctl00_pnlDetails\")(root)):\n# send_email.send_email(user_email, 'ifns_duplicate_booking')\n# return False\n#\n# if len(CSSSelector(\"#ctl00_pnlDetails>table>tbody>tr>td\")(root)) < 18:\n# logger.error(u\"order.nalog.ru вернул неожиданный ответ: %s\" % result.text)\n# raise errors.IfnsServiceUnavailable()\n# #noinspection PyCallingNonCallable\n# ifns = CSSSelector(\"#ctl00_pnlDetails>table>tbody>tr>td\")(root)[3].text\n# #noinspection PyCallingNonCallable\n# address = CSSSelector(\"#ctl00_pnlDetails>table>tbody>tr>td\")(root)[5].text\n# #noinspection PyCallingNonCallable\n# map = CSSSelector(\"#ctl00_pnlDetails>table>tbody>tr>td\")(root)[7].text\n# #noinspection PyCallingNonCallable\n# phone = CSSSelector(\"#ctl00_pnlDetails>table>tbody>tr>td\")(root)[9].text\n# #noinspection PyCallingNonCallable\n# service = CSSSelector(\"#ctl00_pnlDetails>table>tbody>tr>td\")(root)[11].text\n# #noinspection PyCallingNonCallable\n# data_str = CSSSelector(\"#ctl00_pnlDetails>table>tbody>tr>td\")(root)[13].text\n# #noinspection PyCallingNonCallable\n# time_str = CSSSelector(\"#ctl00_pnlDetails>table>tbody>tr>td\")(root)[15].text\n# #noinspection PyCallingNonCallable\n# window = CSSSelector(\"#ctl00_pnlDetails>table>tbody>tr>td\")(root)[17].text\n#\n# try:\n# dt = datetime.strptime(data_str + 'T' + time_str, \"%d.%m.%YT%H:%M\")\n# except Exception:\n# raise errors.IfnsServiceUnavailable(u\"Invalid datetime format\")\n#\n# send_email.send_email(user_email, 'ifns_booking_success')\n#\n# booking = IfnsBooking.parse_raw_value({\n# 'ifns' : ifns,\n# 'service' : service,\n# 'service_id' : service_id,\n# 'date' : dt.strftime(\"%Y-%m-%dT%H:%M:%S\"),\n# 'window' : window,\n# 'address' : address,\n# 'phone' : phone,\n# 'how_to_get' : map,\n# 'code' : apt_code,\n# '_discarded' : False\n# }, api_data = True)\n# booking_db_obj = booking.get_db_object()\n# booking_db_obj.update_attr('batch_id', batch_id)\n# booking_db_obj.insert(db)\n# return True\n# except errors.IfnsServiceUnavailable, exc:\n# if not _is_task_expired(reg_date):\n# raise find_appointment_data.retry(countdown=3500 + randint(200), exc=exc)\n# send_email.send_email(ifns_admin_email, 'failed_to_book_ifns')\n# return False\n# except Exception, ex:\n# send_email.send_email(ifns_admin_email, 'failed_to_book_ifns')\n# return False\n" }, { "alpha_fraction": 0.5441421270370483, "alphanum_fraction": 0.5471034646034241, "avg_line_length": 39.022220611572266, "blob_id": "bbcf559bf4bfd36efaed6e7aa47f9ae92af12bf8", "content_id": "05cbd2c18a7759e690c0392f67367e1080de5aa2", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 5403, "license_type": "no_license", "max_line_length": 120, "num_lines": 135, "path": "/app/fw/api/base_handlers.py", "repo_name": "StanislavKraev/jb_code", "src_encoding": "UTF-8", "text": "# -*- coding: utf-8 -*-\nfrom functools import wraps\nimport traceback\nfrom flask import json, current_app, abort, redirect, Response, request\nfrom werkzeug.wrappers import Response as WResponse\nfrom fw.api import errors\n\nRESTRICTED_LOG_URLS = (\n '/account/create/', '/account/login/', '/account/password_change/', '/account/by/code/', '/account/login/external/',\n '/account/login/external-url/')\n\n\ndef log_request():\n for url in RESTRICTED_LOG_URLS:\n if url in request.url:\n current_app.logger.debug(u'%s: %s {arguments hidden}' % (request.method, request.url))\n return\n try:\n headers = {}\n skip_headers = {'PRAGMA', 'CONNECTION', 'HOST', 'CACHE-CONTROL',\n 'ACCEPT', 'ACCEPT-LANGUAGE', 'ACCEPT-ENCODING', 'REFERER'}\n for k, v in request.headers.items():\n if k.upper() in skip_headers:\n continue\n headers[k] = v\n\n args = unicode(request.args).strip()\n if args == u\"ImmutableMultiDict([])\":\n args = u\"\"\n else:\n args = args.replace(u\"ImmutableMultiDict\", u\"\")\n\n form = unicode(request.form).strip()\n if form == u\"ImmutableMultiDict([])\":\n form = u\"\"\n else:\n form = form.replace(u\"ImmutableMultiDict\", u\"\")\n\n current_app.logger.debug(u'%s: %s Arguments: %s Form data: %s\\n%s' % (\n request.method, request.url, args, form, unicode(headers)))\n except Exception:\n current_app.logger.exception(u\"Failed to log request data\")\n\n\ndef error_tree_to_list(error_tree, cur_name=u\"\", fields=None):\n items = []\n fields = fields if (fields is not None) else set()\n for i in error_tree:\n if cur_name:\n name = cur_name + ((u\".\" + i['field']) if i['field'] else \"\")\n else:\n name = i['field']\n\n if 'inner_exception' in i:\n items.extend(error_tree_to_list(i[\"inner_exception\"], name, fields))\n else:\n if name not in fields:\n items.append({\n 'field': name,\n 'error_code': i['error_code']\n })\n fields.add(name)\n return items\n\n\ndef api_view(func):\n @wraps(func)\n def view_wrapper(*args, **kwargs):\n try:\n log_request()\n result = func(*args, **kwargs)\n except errors.ServerUnavailable, exc:\n current_app.logger.critical(\"Server Unavaliable error\")\n trbk = traceback.format_exc()\n current_app.logger.exception(trbk)\n return unicode(exc), 503\n except errors.ApiBaseError, exc:\n api_error_code = exc.get_error_code()\n http_error_code = exc.get_http_error_code()\n api_error_msg = exc.get_error_message()\n exc_ext_data = getattr(exc, 'ext_data', None)\n if exc_ext_data is not None:\n data_json = json.dumps({\n \"error\": {\n \"code\": api_error_code,\n \"message\": api_error_msg\n },\n \"error_ext\": error_tree_to_list(exc_ext_data)\n })\n else:\n data_json = json.dumps({\"error\": {\"code\": api_error_code, \"message\": api_error_msg}})\n if current_app.config.get('debug'):\n current_app.logger.debug(\n \"API ERROR \" + str(exc.get_error_code()) + \": \" + exc.get_error_message().encode(\n \"utf8\") + \": \" + data_json\n )\n else:\n current_app.logger.exception(\n \"API ERROR \" + str(exc.get_error_code()) + \": \" + exc.get_error_message().encode(\n \"utf8\") + \": \" + data_json\n )\n result = Response(data_json, mimetype='application/json', status=http_error_code)\n result.headers.add('Access-Control-Allow-Credentials', \"true\")\n result.headers.add('Access-Control-Allow-Origin', \"http://%s\" % current_app.config['site_domain'])\n return result\n except errors.SocialServiceRedirect, exc:\n resp = redirect(exc.url)\n resp.headers.add('Access-Control-Allow-Credentials', \"true\")\n resp.headers.add('Access-Control-Allow-Origin', \"http://%s\" % current_app.config['site_domain'])\n return resp\n except NotImplementedError:\n abort(405)\n return\n except Exception, exc:\n current_app.logger.exception(u\"Unhandled exception\")\n abort(500)\n return\n\n if isinstance(result, WResponse):\n result.headers.add('Access-Control-Allow-Credentials', \"true\")\n result.headers.add('Access-Control-Allow-Origin', \"http://%s\" % current_app.config['site_domain'])\n return result\n\n try:\n result_str = json.dumps(result, default=lambda x: unicode(x), indent=1)\n except Exception, ex:\n current_app.logger.exception(u\"Failed to jsonify result\")\n abort(500)\n return\n response = Response(result_str, mimetype='application/json')\n response.headers.add('Access-Control-Allow-Credentials', \"true\")\n response.headers.add('Access-Control-Allow-Origin', \"http://%s\" % current_app.config['site_domain'])\n return response\n\n return view_wrapper\n" }, { "alpha_fraction": 0.5960221290588379, "alphanum_fraction": 0.6022099256515503, "avg_line_length": 37.025211334228516, "blob_id": "b0663ccfef785e60c863fdd3a2b59b56891159b8", "content_id": "3c8d3427633cc42932ad6d5b18bfa1c04419274a", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 4562, "license_type": "no_license", "max_line_length": 109, "num_lines": 119, "path": "/app/fw/api/views/general.py", "repo_name": "StanislavKraev/jb_code", "src_encoding": "UTF-8", "text": "# -*- coding: utf-8 -*-\n\nfrom flask import current_app, json, abort, Response, request, Blueprint\nfrom flask_login import current_user\nfrom fw.api import geoip_utils, errors\nfrom fw.api.args_validators import validate_arguments\nfrom fw.api.args_validators import ArgumentValidator\n\nfrom fw.api.base_handlers import api_view\nfrom fw.api.views import not_authorized\nfrom fw.documents.address_enums import RFRegionsEnum\n\ngeneral_bp = Blueprint('general', __name__)\n\n\ndef _set_cors_headers(response):\n response.headers.add('Access-Control-Allow-Credentials', \"true\")\n response.headers.add('Access-Control-Allow-Origin', \"http://%s\" % current_app.config['site_domain'])\n\n\n@general_bp.route('/dadata.ru/api/v2/clean/<method>', methods=['POST'])\ndef dadata_v2_clean(method):\n if not current_user.is_authenticated:\n return not_authorized(current_app.config['site_domain'])\n if method not in ('address', 'birthdate', 'email', 'name', 'phone'):\n abort(404)\n xdata = request.form\n if not xdata:\n abort(400)\n\n data = {}\n for item in xdata:\n # noinspection PyBroadException\n try:\n item_data = json.loads(item)\n if isinstance(item_data, dict):\n data.update(item_data)\n except Exception:\n pass\n\n current_app.logger.debug(u\"clean query: %s\" % unicode(data))\n resp_data = current_app.external_tools.dadata_clean(method, [data['query']])\n current_app.logger.debug(u\"clean request finished\")\n resp = Response(json.dumps(resp_data), mimetype='application/json') \\\n if resp_data else Response(\"[]\", mimetype='application/json')\n _set_cors_headers(resp)\n return resp\n\n\n@general_bp.route('/dadata.ru/api/v2/suggest/<method>', methods=['POST'])\ndef dadata_v2_suggest(method):\n if not current_user.is_authenticated:\n return not_authorized(current_app.config['site_domain'])\n if method not in ('fio', 'address', 'party'):\n abort(404)\n try:\n data = request.form\n if not data:\n abort(400)\n for item in data:\n data = json.loads(item)\n current_app.logger.debug(u\"suggest query: %s\" % json.dumps(data, ensure_ascii=False))\n\n result_data = current_app.external_tools.dadata_suggest(method, data)\n if not result_data:\n raise Exception(u\"Failed to get suggestions\")\n\n except Exception:\n current_app.logger.exception(u\"Failed to send request\")\n resp = Response(json.dumps({'suggestions': []}), status=200)\n _set_cors_headers(resp)\n return resp\n\n if result_data:\n try:\n data = result_data\n str_result = json.dumps(result_data, ensure_ascii=False)[:200]\n # noinspection PyStringFormat\n current_app.logger.debug(u\"suggest result data: %s...\" % str_result)\n suggestions = data['suggestions']\n for sugg in suggestions:\n item = sugg['data']\n if 'house' in item and item['house'] and 'block_type' in item and item['block_type'] is None:\n if len(item['house']) > 1 and item['house'][0].isdigit() and item['house'][-1].isalpha():\n item['block_type'] = u'литер'\n item['block'] = item['house'][-1]\n item['house'] = item['house'][:-1]\n if 'city' in item and item['city'] in [u\"Москва\", u\"Санкт-Петербург\",\n u\"Севастополь\"] and 'city_type' in item and item[\n 'city_type'] == u\"г\":\n item['city'] = None\n item['city_type'] = None\n item['city_type_full'] = None\n\n resp = Response(json.dumps(data), mimetype='application/json')\n _set_cors_headers(resp)\n current_app.logger.debug(u\"suggest finished\")\n return resp\n except Exception:\n pass\n\n resp = Response('[]', mimetype='application/json')\n _set_cors_headers(resp)\n return resp\n\n\n@general_bp.route('/geoip/', methods=['GET'])\n@api_view\n@validate_arguments(ip=ArgumentValidator(required=False))\ndef geo_ip(ip=None):\n ip = ip or request.remote_addr\n try:\n result = geoip_utils.GeoIpLocator.get_location(ip)\n if not result:\n raise Exception()\n except Exception:\n current_app.logger.exception(u\"Failed to get location\")\n return {'result': {'region': RFRegionsEnum.RFR_SPB}}\n return {'result': result}\n" }, { "alpha_fraction": 0.7076923251152039, "alphanum_fraction": 0.7086538672447205, "avg_line_length": 29.558822631835938, "blob_id": "5e4ba5cec1267f817a4ec0a52b1e503d48a339d9", "content_id": "586f101d25b62131e1acceb1e702b578bb972627", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 1040, "license_type": "no_license", "max_line_length": 85, "num_lines": 34, "path": "/app/services/car_assurance/db_models.py", "repo_name": "StanislavKraev/jb_code", "src_encoding": "UTF-8", "text": "# -*- coding: utf-8 -*-\nfrom bson.objectid import ObjectId\n\nfrom sqlalchemy import Column, String, ForeignKey, Unicode\nfrom sqlalchemy.orm import relationship\n\nfrom fw.db.sql_base import db as sqldb\n\n\nclass CarAssurance(sqldb.Model):\n __tablename__ = 'car_assurance'\n\n id = Column(String, primary_key=True, default=lambda: str(ObjectId()))\n\n full_name = Column(Unicode)\n short_name = Column(Unicode)\n\n address = Column(Unicode, nullable=False)\n\n connection_name = Column(Unicode, nullable=False, default=u'')\n\n\nclass CarAssuranceBranch(sqldb.Model):\n __tablename__ = 'car_assurance_branch'\n\n id = Column(String, primary_key=True, default=lambda: str(ObjectId()))\n\n title = Column(Unicode, nullable=True, default=u'')\n address = Column(Unicode, nullable=True, default=u'')\n phone = Column(Unicode, nullable=True, default=u'')\n region = Column(Unicode, nullable=False)\n\n car_assurance_id = Column(String, ForeignKey('car_assurance.id'), nullable=False)\n car_assurance = relationship(\"CarAssurance\")\n\n" }, { "alpha_fraction": 0.6640106439590454, "alphanum_fraction": 0.6653386354446411, "avg_line_length": 46.0625, "blob_id": "839b69d25c8fc41e7bf24da8cdaa7a2e76669575", "content_id": "0a41f1746301117aaca1ee6c0012585bfaf9f8ca", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 753, "license_type": "no_license", "max_line_length": 123, "num_lines": 16, "path": "/app/deployment_migrations/migration_list/20150522_rename_batch_types.py", "repo_name": "StanislavKraev/jb_code", "src_encoding": "UTF-8", "text": "# -*- coding: utf-8 -*-\nfrom fw.documents.db_fields import DocumentBatchDbObject\n\n\ndef forward(config, logger):\n logger.debug(u\"rename batch types (new_llc -> llc, new_ip -> ie)\")\n\n DocumentBatchDbObject.get_collection(db).update({'batch_type': 'new_llc'}, {'$set': {'batch_type': 'llc'}}, multi=True)\n DocumentBatchDbObject.get_collection(db).update({'batch_type': 'new_ip'}, {'$set': {'batch_type': 'ie'}}, multi=True)\n\n\ndef rollback(config, logger):\n logger.debug(u\"Rolling back migration\")\n\n DocumentBatchDbObject.get_collection(db).update({'batch_type': 'llc'}, {'$set': {'batch_type': 'new_llc'}}, multi=True)\n DocumentBatchDbObject.get_collection(db).update({'batch_type': 'ie'}, {'$set': {'batch_type': 'new_ip'}}, multi=True)\n" }, { "alpha_fraction": 0.3201219439506531, "alphanum_fraction": 0.48170730471611023, "avg_line_length": 31.850000381469727, "blob_id": "93e9dfb1070737427681752634eef01976d2a41d", "content_id": "68d38acb4a3ff4d405edc5b881028dd93e1d85f9", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 656, "license_type": "no_license", "max_line_length": 97, "num_lines": 20, "path": "/app/services/ip_reg/okvad_utils.py", "repo_name": "StanislavKraev/jb_code", "src_encoding": "UTF-8", "text": "# -*- coding: utf-8 -*-\n\ndef is_restricted_for_ip_okvad(okvad):\n if not okvad or not isinstance(okvad, basestring):\n return False\n\n exact_match = ('15.91', '15.96', '51.17.22', '51.34', '51.34.2', '52.25', '52.25.1', '29.60',\n '24.61', '28.75.3', '24.61', '51.12.36', '51.55.33', '31.62.1', '74.20.53',\n '74.60', '66.02', '51.56.4', '67.13.51')\n if okvad in exact_match:\n return True\n\n groups = ('35.30.', '75.', '24.4.', '62.', '65.')\n for item in groups:\n if okvad == item[:-1]:\n return True\n if okvad[:len(item)] == item:\n return True\n\n return False" }, { "alpha_fraction": 0.6397188901901245, "alphanum_fraction": 0.6459450125694275, "avg_line_length": 53.95414352416992, "blob_id": "1188cfcb316da894b79b091de639953ca2dfec05", "content_id": "3405a5f900eaf4c2a543f55da14fb924710cd5f6", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 35337, "license_type": "no_license", "max_line_length": 254, "num_lines": 567, "path": "/app/services/pay/api.py", "repo_name": "StanislavKraev/jb_code", "src_encoding": "UTF-8", "text": "# -*- coding: utf-8 -*-\nfrom datetime import datetime\n\nimport hashlib\nfrom dateutil.relativedelta import relativedelta\nimport dateutil.parser\nimport os\nfrom decimal import Decimal\nfrom flask import request, current_app, make_response, redirect, abort, Blueprint\nimport pytz\nfrom fw.auth.models import AuthUser\nfrom fw.db.sql_base import db as sqldb\nfrom fw.documents.batch_manager import BatchManager\nfrom fw.documents.db_fields import DocumentBatchDbObject, BatchDocumentDbObject\nfrom fw.documents.enums import UserDocumentStatus, DocumentBatchTypeEnum\nfrom fw.metrics import appcraft, mixpanel_metrics\nfrom fw.storage.file_storage import FileStorage\nfrom services.pay.models import PaymentSubscriptionObject, YadRequestsObject, PayInfoObject, PaymentProvider, \\\n PurchaseServiceType\n\npay_bp = Blueprint('pay', __name__)\n\n\ndef _xml_resp(str_val):\n response = make_response(str_val)\n response.headers[\"Content-Type\"] = \"application/xml\"\n return response\n\n\ndef _notify_admin(action, message, recipient_list):\n from fw.async_tasks import send_email\n for rec in recipient_list:\n send_email.send_email.delay(\n rec,\n 'yad_request_error',\n action=action,\n message=message\n )\n\n\ndef parse_iso_dt(str_val):\n val = dateutil.parser.parse(str_val)\n return val.astimezone(pytz.timezone('utc')).replace(tzinfo=None)\n\n\n@pay_bp.route('/payment/paymentAviso/', methods=['POST'])\ndef yad_payment_aviso():\n dt_str = datetime.utcnow().strftime(\"%Y-%m-%dT%H:%M:%SZ\") # u\"2011-05-04T20:38:01.000+04:00\"\n\n logger = current_app.logger\n\n request_datetime = request.form.get('requestDatetime', \"\")\n md5 = request.form.get('md5', \"\")\n shop_id = request.form.get('shopId', \"\")\n shop_article_id = request.form.get('shopArticleId', \"\")\n invoice_id = request.form.get('invoiceId', \"\")\n orderId = request.form.get('orderId', \"\")\n customer_number = request.form.get('customerNumber', \"\")\n order_created_datetime = request.form.get('orderCreatedDatetime', \"\")\n order_sum_amount = request.form.get('orderSumAmount', \"\")\n order_sum_currency_paycash = request.form.get('orderSumCurrencyPaycash', \"\")\n order_sum_bank_paycash = request.form.get('orderSumBankPaycash', \"\")\n shop_sum_amount = request.form.get('shopSumAmount', \"\")\n shop_sum_currency_paycash = request.form.get('shopSumCurrencyPaycash', \"\")\n shop_sum_bank_paycash = request.form.get('shopSumBankPaycash', \"\")\n payment_payer_code = request.form.get('paymentPayerCode', \"\")\n payment_type = request.form.get('paymentType', \"\")\n action = request.form.get('action', \"\")\n payment_datetime = request.form.get('paymentDatetime', \"\")\n cps_user_country_code = request.form.get('cps_user_country_code', \"\")\n\n invalid_request_error = u\"\"\"<?xml version=\"1.0\" encoding=\"UTF-8\"?>\n<paymentAvisoResponse performedDatetime=\"%s\" code=\"200\" invoiceId=\"%s\" shopId=\"%s\" message=\"msg\"/>\"\"\" % (\n dt_str, invoice_id, shop_id)\n\n authorization_error = u\"\"\"<?xml version=\"1.0\" encoding=\"UTF-8\"?>\n<paymentAvisoResponse performedDatetime=\"%s\" code=\"1\" invoiceId=\"%s\" shopId=\"%s\" message=\"Invalid request: md5 sum does not match provided value\"/>\"\"\" % (\n dt_str, invoice_id, shop_id)\n\n admins_emails = current_app.config['ADMIN_EMAIL_LIST']\n if not md5 or not shop_id or not action or not order_sum_amount or not order_sum_currency_paycash \\\n or not order_sum_bank_paycash or not invoice_id or not customer_number or not orderId:\n current_app.logger.warn(u\"Invalid request from yad: %s\" % unicode(request.form))\n _notify_admin(action, u\"missing one of required arguments\", admins_emails)\n return _xml_resp(invalid_request_error.replace(u'msg', u\"missing one of required arguments\"))\n\n shop_password = current_app.config['YAD_ESHOP_PASSWORD']\n yad_ip_list = current_app.config['YAD_IP_LIST']\n\n # MD5 calc\n # action;orderSumAmount;orderSumCurrencyPaycash;orderSumBankPaycash;shopId;invoiceId;customerNumber;shopPassword\n our_md5_string = \"%s;%s;%s;%s;%s;%s;%s;%s\" % (action, order_sum_amount, order_sum_currency_paycash,\n order_sum_bank_paycash, shop_id, invoice_id, customer_number,\n shop_password)\n\n m = hashlib.md5()\n m.update(our_md5_string)\n\n ip = None\n if 'X-Forwarded-For' in request.headers:\n ip = request.headers['X-Forwarded-For']\n if not ip and 'X-Real-Ip' in request.headers:\n ip = request.headers['X-Real-Ip']\n if not ip:\n ip = request.remote_addr\n\n new_item = YadRequestsObject(\n ip=ip,\n created=datetime.utcnow(),\n request_datetime=parse_iso_dt(request_datetime),\n md5=md5,\n shop_id=int(shop_id),\n shop_article_id=int(shop_article_id) if shop_article_id else 0,\n invoice_id=int(invoice_id),\n order_number=orderId,\n customer_number=customer_number,\n order_created_datetime=parse_iso_dt(order_created_datetime),\n order_sum_amount=Decimal(order_sum_amount),\n order_sum_currency_paycash=order_sum_currency_paycash,\n order_sum_bank_paycash=order_sum_bank_paycash,\n shop_sum_amount=Decimal(shop_sum_amount),\n shop_sum_currency_paycash=shop_sum_currency_paycash,\n shop_sum_bank_paycash=shop_sum_bank_paycash,\n payment_payer_code=payment_payer_code,\n payment_type=payment_type,\n action=action,\n payment_datetime=parse_iso_dt(payment_datetime),\n cps_user_country_code=cps_user_country_code\n )\n sqldb.session.add(new_item)\n sqldb.session.commit()\n\n if action != u'paymentAviso':\n current_app.logger.warn(u\"Invalid request from yad: %s\" % unicode(request.form))\n _notify_admin(action, u\"invalid action id: %s\" % unicode(action), admins_emails)\n return _xml_resp(invalid_request_error.replace(u'msg', u\"invalid action id: %s\" % unicode(action)))\n\n if yad_ip_list:\n if ip not in yad_ip_list:\n current_app.logger.warn(u\"Invalid request from yad: %s\" % unicode(request.form))\n _notify_admin(action, u\"sender ip (%s) not in whitelist\" % ip, admins_emails)\n return _xml_resp(invalid_request_error.replace(u'msg', u\"sender ip not in whitelist\"))\n else:\n current_app.logger.warn(u\"Can't check IP address: YAD_IP_LIST config option is empty\")\n\n if m.hexdigest().upper() != md5:\n current_app.logger.warn(u\"Invalid request from yad: %s\" % unicode(request.form))\n _notify_admin(action, u\"arguments md5 digests do not match\", admins_emails)\n return _xml_resp(authorization_error)\n\n try:\n auth_user_id = customer_number\n batch_id = orderId if orderId not in ('subscription_3', 'subscription_1') else None\n subs_type = orderId if orderId in ('subscription_3', 'subscription_1') else None\n if not batch_id and not subs_type:\n raise Exception(\"Invalid order number:%s\" % orderId)\n except Exception:\n current_app.logger.warn(u\"Invalid request from yad: %s\" % unicode(request.form))\n _notify_admin(action, u\"Invalid user id or batch id\", admins_emails)\n return _xml_resp(invalid_request_error.replace(u'msg', u\"Invalid user id or batch id\"))\n\n user = AuthUser.query.filter_by(uuid=auth_user_id).scalar()\n if not user:\n current_app.logger.warn(u\"Invalid request from yad: %s\" % unicode(request.form))\n _notify_admin(action, u\"User with id %s not found\" % unicode(auth_user_id), admins_emails)\n return _xml_resp(invalid_request_error.replace(u'msg', u\"User not found\"))\n\n success_result = u\"\"\"<?xml version=\"1.0\" encoding=\"UTF-8\"?>\n <paymentAvisoResponse performedDatetime =\"%s\" code=\"0\" invoiceId=\"%s\" shopId=\"%s\"/>\"\"\" % (\n dt_str, invoice_id, shop_id)\n\n pay_info = {\n 'dt': datetime.now(),\n 'shop_id': shop_id,\n 'invoice_id': invoice_id,\n 'order_sum_amount': order_sum_amount,\n 'order_sum_currency_paycash': order_sum_currency_paycash,\n 'order_sum_bank_paycash': order_sum_bank_paycash\n }\n\n if shop_article_id is not None:\n pay_info['shop_article_id'] = shop_article_id\n if order_created_datetime:\n pay_info['order_created_datetime'] = order_created_datetime\n if shop_sum_amount:\n pay_info['shop_sum_amount'] = shop_sum_amount\n if shop_sum_currency_paycash:\n pay_info['shop_sum_currency_paycash'] = shop_sum_currency_paycash\n if shop_sum_bank_paycash:\n pay_info['shop_sum_bank_paycash'] = shop_sum_bank_paycash\n if payment_payer_code:\n pay_info['payment_payer_code'] = payment_payer_code\n if payment_type:\n pay_info['payment_type'] = payment_type\n if payment_datetime:\n pay_info['payment_datetime'] = payment_datetime\n if cps_user_country_code:\n pay_info['cps_user_country_code'] = cps_user_country_code\n if request_datetime:\n pay_info['request_datetime'] = request_datetime\n\n if batch_id:\n batch = DocumentBatchDbObject.query.filter_by(id=batch_id).scalar()\n\n if not batch:\n current_app.logger.warn(u\"Invalid request from yad: %s\" % unicode(request.form))\n _notify_admin(action, u\"Batch with id %s not found\" % batch_id, admins_emails)\n return _xml_resp(invalid_request_error.replace(u'msg', u\"Batch not found\"))\n\n modify_result = DocumentBatchDbObject.query.filter_by(id=batch_id).update({\n \"pay_info\": pay_info,\n \"paid\": True\n })\n if batch.batch_type == DocumentBatchTypeEnum.DBT_NEW_LLC:\n pay_info = PayInfoObject(\n user=batch._owner,\n batch=batch,\n pay_record_id=new_item.id,\n payment_provider=PaymentProvider.YAD,\n service_type=PurchaseServiceType.LLC_PURCHASE\n )\n sqldb.session.add(pay_info)\n sqldb.session.commit()\n elif batch.batch_type == DocumentBatchTypeEnum.DBT_OSAGO:\n count = PayInfoObject.query.filter_by(batch=batch).count()\n osago_service_code = PurchaseServiceType.OSAGO_PART1 if count < 1 else PurchaseServiceType.OSAGO_PART2\n pay_info = PayInfoObject(\n user=batch._owner,\n batch=batch,\n pay_record_id=new_item.id,\n payment_provider=PaymentProvider.YAD,\n service_type=osago_service_code\n )\n sqldb.session.add(pay_info)\n batch.paid = True\n sqldb.session.commit()\n event = {\n PurchaseServiceType.OSAGO_PART1: 'rerender_pretension',\n PurchaseServiceType.OSAGO_PART2: 'rerender_claim'\n }.get(osago_service_code, None)\n if event:\n BatchManager.handle_event(batch_id, event, {'batch_id': batch_id}, current_app.logger, current_app.config)\n\n if modify_result is None:\n logger.error(u\"Failed to set payment info to batch\")\n _notify_admin(action, u\"Failed to set payment info to batch\", admins_emails)\n return _xml_resp(invalid_request_error.replace(u'msg', u\"Failed to process\"))\n\n try:\n for doc in BatchDocumentDbObject.query.filter_by(batch=batch, status=UserDocumentStatus.DS_RENDERED):\n if not doc.file:\n continue\n\n file_obj = doc.file\n if not file_obj:\n logger.error(u\"Can't replace watermarked file: Failed to find file of batch %s\" % batch_id)\n continue\n\n file_path = FileStorage.get_path(file_obj, current_app.config)\n if not file_path or not os.path.exists(file_path) or not os.path.exists(file_path + '.src'):\n logger.error(\n u\"Can't replace watermarked file: Failed to find original or source file %s of batch %s\" % (\n unicode(file_path + '.src'), batch_id))\n continue\n os.rename(file_path + '.src', file_path)\n except Exception:\n current_app.logger.exception(u\"FAILED TO REPLACE WATERMARKED DOCS\")\n\n if current_app.config.get('PROD', False):\n appcraft.send_stat(batch.batch_type + '_payment_received', batch._owner, batch.id, batch.batch_type, int(invoice_id))\n mixpanel_metrics.send_stat(batch.batch_type + '_payment_received', batch._owner, batch.id, batch.batch_type)\n\n try:\n if batch.batch_type == DocumentBatchTypeEnum.DBT_NEW_LLC:\n BatchManager.send_batch_docs_to_user(batch_id, current_app.config)\n except Exception:\n logger.exception(u\"Failed to send documents to user.\")\n elif subs_type:\n user_subs = PaymentSubscriptionObject.query.filter(\n PaymentSubscriptionObject.user == user,\n PaymentSubscriptionObject.end_dt.__ge__(datetime.utcnow())\n )\n\n if not user_subs.count():\n end_date = datetime.utcnow()\n if subs_type == 'subscription_3':\n end_date += relativedelta(months=3)\n elif subs_type == 'subscription_1':\n end_date += relativedelta(months=1)\n\n new_subs = PaymentSubscriptionObject(\n pay_info=pay_info,\n created=datetime.utcnow(),\n end_dt=end_date,\n user=user,\n type=subs_type\n )\n sqldb.session.add(new_subs)\n sqldb.session.commit()\n\n from fw.async_tasks import not_paid_check_send\n\n not_paid_check_send.make_all_user_fin_batch_paid_and_replace_watermarked_docs_with_normal.delay(\n user_id=user.id)\n\n current_app.logger.info(u\"yad - success\")\n return _xml_resp(success_result)\n\n\n # request\n\n\n# Параметр\t Тип\t Описание\n# requestDatetime\t xs:dateTime\t Момент формирования запроса в ИС Оператора.\n# md5\t xs:normalizedString, ровно 32 шестнадцатеричных символа, в верхнем регистре\t MD5-хэш параметров платежной формы, правила формирования описаны в разделе 4.4 «Правила обработки HTTP-уведомлений Контрагентом».\n# shopId\t xs:long\t Идентификатор Контрагента, присваиваемый Оператором.\n# shopArticleId\t xs:long\t Идентификатор товара, присваиваемый Оператором.\n# invoiceId\t xs:long\t Уникальный номер транзакции в ИС Оператора.\n# orderNumber\t xs:normalizedString, до 64 символов\t Номер заказа в ИС Контрагента. Передается, только если был указан в платежной форме.\n# customerNumber\t xs:normalizedString, до 64 символов\t Идентификатор плательщика (присланный в платежной форме) на стороне Контрагента: номер договора, мобильного телефона и т.п.\n# orderCreatedDatetime\t xs:dateTime\t Момент регистрации заказа в ИС Оператора.\n# orderSumAmount\t CurrencyAmount\t Стоимость заказа. Может отличаться от суммы платежа, если пользователь платил в валюте, которая отличается от указанной в платежной форме. В этом случае Оператор берет на себя все конвертации.\n# orderSumCurrencyPaycash\t CurrencyCode\t Код валюты для суммы заказа.\n# orderSumBankPaycash\t CurrencyBank\t Код процессингового центра Оператора для суммы заказа.\n# shopSumAmount\t CurrencyAmount\t Сумма к выплате Контрагенту на р/с (стоимость заказа минус комиссия Оператора).\n# shopSumCurrencyPaycash\t CurrencyCode\t Код валюты для shopSumAmount.\n# shopSumBankPaycash\t CurrencyBank\t Код процессингового центра Оператора для shopSumAmount.\n# paymentPayerCode\t YMAccount\t Номер счета в ИС Оператора, с которого производится оплата.\n# paymentType\txs:normalizedString\tСпособ оплаты заказа. Список значений приведен в таблице 6.6.1.\n\n# action\t xs:normalizedString, до 16 символов\t Тип запроса, значение: paymentAviso.\n# paymentDatetime\t xs:dateTime\t Момент регистрации оплаты заказа в ИС Оператора.\n# cps_user_country_code\t xs:string, 2 символа\t Двухбуквенный код страны плательщика в соответствии с ISO 3166-1 alpha-2.\n\n# Любые названия, отличные от перечисленных выше\txs:string\tПараметры, добавленные Контрагентом в платежную форму.\n\n# RESPONSE\n\n#Параметр\t Тип\t Описание\n#performedDatetime\t xs:dateTime\t Момент обработки запроса по часам ИС Контрагента.\n#code\t xs:int\t Код результата обработки. Список допустимых значений приведен в таблице ниже.\n#shopId\t xs:long\t Идентификатор Контрагента. Должен дублировать поле shopId запроса.\n#invoiceId\t xs:long\t Идентификатор транзакции в ИС Оператора. Должен дублировать поле invoiceId запроса.\n#orderSumAmount\t CurrencyAmount\t Стоимость заказа в валюте, определенной параметром запроса orderSumCurrencyPaycash.\n#message\t xs:string, до 255 символов\tТекстовое пояснение в случае отказа принять платеж.\n#techMessage\t xs:string, до 64 символов\tДополнительное текстовое пояснение ответа Контрагента. Как правило, используется как дополнительная информация об ошибках. Необязательное поле.\n\n\n# result codes\n\n# 0\t Успешно\tУспешно — даже если Оператор прислал данный запрос повторно.\n# 1\t Ошибка авторизации\tЗначение параметра md5 не совпадает с результатом расчета хэш-функции. Оператор не будет повторять запрос и пометит заказ как «Уведомление Контрагенту не доставлено».\n# 200\t Ошибка разбора запроса\tИС Контрагента не в состоянии разобрать запрос. Оператор не будет повторять запрос и пометит заказ как «Уведомление Контрагенту не доставлено».\n\n@pay_bp.route('/payment/checkOrder/', methods=['POST'])\ndef yad_check_order():\n dt_str = datetime.utcnow().strftime(\"%Y-%m-%dT%H:%M:%S.000+00:00\") # u\"2011-05-04T20:38:01.000+04:00\"\n\n request_datetime = request.form.get('requestDatetime', \"\")\n action = request.form.get('action', \"\")\n md5 = request.form.get('md5', \"\")\n shop_id = request.form.get('shopId', \"\")\n shop_article_id = request.form.get('shopArticleId', \"\")\n invoice_id = request.form.get('invoiceId', \"\")\n order_number = request.form.get('orderId', \"\")\n customer_number = request.form.get('customerNumber', \"\")\n order_created_datetime = request.form.get('orderCreatedDatetime', \"\")\n order_sum_amount = request.form.get('orderSumAmount', \"\")\n order_sum_currency_paycash = request.form.get('orderSumCurrencyPaycash', \"\")\n order_sum_bank_paycash = request.form.get('orderSumBankPaycash', \"\")\n shop_sum_amount = request.form.get('shopSumAmount', \"\")\n shop_sum_currency_paycash = request.form.get('shopSumCurrencyPaycash', \"\")\n shop_sum_bank_paycash = request.form.get('shopSumBankPaycash', \"\")\n payment_payer_code = request.form.get('paymentPayerCode', \"\")\n payment_type = request.form.get('paymentType', \"\")\n\n invalid_request_error = u\"\"\"<?xml version=\"1.0\" encoding=\"UTF-8\"?>\n <checkOrderResponse performedDatetime=\"%s\" code=\"200\" invoiceId=\"%s\" shopId=\"%s\" message=\"msg\"/>\"\"\" % (\n dt_str, invoice_id, shop_id)\n\n authorization_error = u\"\"\"<?xml version=\"1.0\" encoding=\"UTF-8\"?>\n<checkOrderResponse performedDatetime=\"%s\" code=\"1\" invoiceId=\"%s\" shopId=\"%s\" message=\"Invalid request: md5 sum does not match provided value\"/>\"\"\" % (\n dt_str, invoice_id, shop_id)\n\n admins_emails = current_app.config['ADMIN_EMAIL_LIST']\n if not md5 or not shop_id or not action or not order_sum_amount or not order_sum_currency_paycash \\\n or not order_sum_bank_paycash or not invoice_id or not customer_number or not order_number:\n current_app.logger.warn(u\"Invalid request from yad: %s\" % unicode(request.form))\n _notify_admin(action, u\"missing one of required arguments\", admins_emails)\n return _xml_resp(invalid_request_error.replace(u'msg', u\"missing one of required arguments\"))\n\n shop_password = current_app.config['YAD_ESHOP_PASSWORD']\n yad_ip_list = current_app.config['YAD_IP_LIST']\n\n # MD5 calc\n # action;orderSumAmount;orderSumCurrencyPaycash;orderSumBankPaycash;shopId;invoiceId;customerNumber;shopPassword\n our_md5_string = \"%s;%s;%s;%s;%s;%s;%s;%s\" % (action, order_sum_amount, order_sum_currency_paycash,\n order_sum_bank_paycash, shop_id, invoice_id, customer_number,\n shop_password)\n\n m = hashlib.md5()\n m.update(our_md5_string)\n\n ip = None\n if 'X-Forwarded-For' in request.headers:\n ip = request.headers['X-Forwarded-For']\n if not ip and 'X-Real-Ip' in request.headers:\n ip = request.headers['X-Real-Ip']\n if not ip:\n ip = request.remote_addr\n\n new_item = YadRequestsObject(\n ip=ip,\n created=datetime.utcnow(),\n request_datetime=parse_iso_dt(request_datetime),\n md5=md5,\n shop_id=int(shop_id),\n shop_article_id=int(shop_article_id) if shop_article_id else 0,\n invoice_id=int(invoice_id),\n order_number=order_number,\n customer_number=customer_number,\n order_created_datetime=parse_iso_dt(order_created_datetime),\n order_sum_amount=Decimal(order_sum_amount),\n order_sum_currency_paycash=order_sum_currency_paycash,\n order_sum_bank_paycash=order_sum_bank_paycash,\n shop_sum_amount=Decimal(shop_sum_amount),\n shop_sum_currency_paycash=shop_sum_currency_paycash,\n shop_sum_bank_paycash=shop_sum_bank_paycash,\n payment_payer_code=payment_payer_code,\n payment_type=payment_type,\n action=action\n )\n sqldb.session.add(new_item)\n sqldb.session.commit()\n\n if action != u'checkOrder':\n current_app.logger.warn(u\"Invalid request from yad: %s\" % unicode(request.form))\n _notify_admin(action, u\"invalid action id: %s\" % unicode(action), admins_emails)\n return _xml_resp(invalid_request_error.replace(u'msg', u\"invalid action id: %s\" % unicode(action)))\n\n if yad_ip_list:\n if ip not in yad_ip_list:\n current_app.logger.warn(u\"Invalid request from yad: %s\" % unicode(request.form))\n _notify_admin(action, u\"sender ip (%s) not in whitelist\" % ip, admins_emails)\n return _xml_resp(invalid_request_error.replace(u'msg', u\"sender ip not in whitelist\"))\n else:\n current_app.logger.warn(u\"Can't check IP address: YAD_IP_LIST config option is empty\")\n\n if m.hexdigest().upper() != md5:\n current_app.logger.warn(u\"Invalid request from yad: %s\" % unicode(request.form))\n _notify_admin(action, u\"arguments md5 digests do not match\", admins_emails)\n return _xml_resp(authorization_error)\n\n try:\n auth_user_id = customer_number\n batch_id = order_number if order_number not in ('subscription_3', 'subscription_1') else None\n subs_type = order_number if order_number in ('subscription_3', 'subscription_1') else None\n if not batch_id and not subs_type:\n raise Exception(\"Invalid order number:%s\" % order_number)\n except Exception:\n current_app.logger.warn(u\"Invalid request from yad: %s\" % unicode(request.form))\n _notify_admin(action, u\"Invalid user id or batch id\", admins_emails)\n return _xml_resp(invalid_request_error.replace(u'msg', u\"Invalid user id or batch id\"))\n\n reject_error = u\"\"\"<?xml version=\"1.0\" encoding=\"UTF-8\"?>\n<checkOrderResponse performedDatetime=\"%s\" code=\"100\" invoiceId=\"%s\" shopId=\"%s\" message=\"msg\"/>\"\"\" % (\n dt_str, invoice_id, shop_id)\n\n user = AuthUser.query.filter_by(uuid=auth_user_id).scalar()\n if not user:\n current_app.logger.warn(u\"Invalid request from yad: %s\" % unicode(request.form))\n _notify_admin(action, u\"User with id %s not found\" % unicode(auth_user_id), admins_emails)\n return _xml_resp(reject_error.replace(u'msg', u\"User not found\"))\n\n if batch_id:\n batch = DocumentBatchDbObject.query.filter_by(id=batch_id).scalar()\n\n if not batch:\n current_app.logger.warn(u\"Invalid request from yad: %s\" % unicode(request.form))\n _notify_admin(action, u\"Batch with id %s not found\" % batch_id, admins_emails)\n return _xml_resp(reject_error.replace(u'msg', u\"Batch not found\"))\n\n if batch.batch_type == DocumentBatchTypeEnum.DBT_NEW_LLC:\n pay_info = PayInfoObject.query.filter_by(batch_id=batch_id).first()\n if pay_info or batch.paid:\n current_app.logger.warn(u\"Batch already paid!\")\n _notify_admin(action, u\"Batch with id %s is already paid\" % batch_id, admins_emails)\n return _xml_resp(reject_error.replace(u'msg', u\"Услуга уже оплачена\"))\n elif batch.batch_type == DocumentBatchTypeEnum.DBT_OSAGO:\n payments = PayInfoObject.query.filter_by(batch_id=batch_id).count()\n if payments > 1:\n current_app.logger.warn(u\"Batch already paid!\")\n _notify_admin(action, u\"Batch with id %s is already paid\" % batch_id, admins_emails)\n return _xml_resp(reject_error.replace(u'msg', u\"Услуга уже оплачена\"))\n else:\n current_app.logger.warn(u\"Invalid batch type: %s\" % batch.batch_type)\n return _xml_resp(reject_error.replace(u'msg', u\"Данная услуга не продается\"))\n\n elif subs_type:\n user_subs = PaymentSubscriptionObject.query.filter(\n PaymentSubscriptionObject.user == user,\n PaymentSubscriptionObject.end_dt.__ge__(datetime.utcnow())\n )\n\n if user_subs.count() > 0:\n current_app.logger.warn(u\"User has subscription already!\")\n _notify_admin(action, u\"User with id %s already has subscription\" % unicode(auth_user_id), admins_emails)\n return _xml_resp(reject_error.replace(u'msg', u\"Услуга уже оплачена\"))\n\n success_result = u\"\"\"<?xml version=\"1.0\" encoding=\"UTF-8\"?>\n<checkOrderResponse performedDatetime =\"%s\" code=\"0\" invoiceId=\"%s\" shopId=\"%s\"/>\"\"\" % (dt_str, invoice_id, shop_id)\n\n current_app.logger.info(u\"yad - success. returning %s\" % success_result)\n return _xml_resp(success_result)\n\n # request\n\n\n# Параметр\t Тип\t Описание\n# requestDatetime\t xs:dateTime\t Момент формирования запроса в ИС Оператора.\n# action\t xs:normalizedString, до 16 символов\t Тип запроса. Значение: «checkOrder» (без кавычек).\n# md5\t xs:normalizedString, ровно 32 шестнадцатеричных символа, в верхнем регистре\t MD5-хэш параметров платежной формы, правила формирования описаны в разделе 4.4 «Правила обработки HTTP-уведомлений Контрагентом».\n# shopId\t xs:long\t Идентификатор Контрагента, присваиваемый Оператором.\n# shopArticleId\t xs:long\t Идентификатор товара, присваиваемый Оператором.\n# invoiceId\t xs:long\t Уникальный номер транзакции в ИС Оператора.\n# orderNumber\t xs:normalizedString, до 64 символов\t Номер заказа в ИС Контрагента. Передается, только если был указан в платежной форме.\n# customerNumber\t xs:normalizedString, до 64 символов\t Идентификатор плательщика (присланный в платежной форме) на стороне Контрагента: номер договора, мобильного телефона и т.п.\n# orderCreatedDatetime\t xs:dateTime\t Момент регистрации заказа в ИС Оператора.\n# orderSumAmount\t CurrencyAmount\t Стоимость заказа. Может отличаться от суммы платежа, если пользователь платил в валюте, которая отличается от указанной в платежной форме. В этом случае Оператор берет на себя все конвертации.\n# orderSumCurrencyPaycash\t CurrencyCode\t Код валюты для суммы заказа.\n# orderSumBankPaycash\t CurrencyBank\t Код процессингового центра Оператора для суммы заказа.\n# shopSumAmount\t CurrencyAmount\t Сумма к выплате Контрагенту на р/с (стоимость заказа минус комиссия Оператора).\n# shopSumCurrencyPaycash\t CurrencyCode\t Код валюты для shopSumAmount.\n# shopSumBankPaycash\t CurrencyBank\t Код процессингового центра Оператора для shopSumAmount.\n# paymentPayerCode\t YMAccount\t Номер счета в ИС Оператора, с которого производится оплата.\n# paymentType\txs:normalizedString\tСпособ оплаты заказа. Список значений приведен в таблице 6.6.1.\n# Любые названия, отличные от перечисленных выше\txs:string\tПараметры, добавленные Контрагентом в платежную форму.\n\n# response:\n#Параметр\t Тип\t Описание\n#performedDatetime\t xs:dateTime\t Момент обработки запроса по часам ИС Контрагента.\n#code\t xs:int\t Код результата обработки. Список допустимых значений приведен в таблице ниже.\n#shopId\t xs:long\t Идентификатор Контрагента. Должен дублировать поле shopId запроса.\n#invoiceId\t xs:long\t Идентификатор транзакции в ИС Оператора. Должен дублировать поле invoiceId запроса.\n#orderSumAmount\t CurrencyAmount\t Стоимость заказа в валюте, определенной параметром запроса orderSumCurrencyPaycash.\n#message\t xs:string, до 255 символов\tТекстовое пояснение в случае отказа принять платеж.\n#techMessage\t xs:string, до 64 символов\tДополнительное текстовое пояснение ответа Контрагента. Как правило, используется как дополнительная информация об ошибках. Необязательное поле.\n\n# result codes\n# Код\t Значение\t Описание ситуации\n# 0\t Успешно\t Контрагент дал согласие и готов принять перевод.\n# 1\t Ошибка авторизации\t Несовпадение значения параметра md5 с результатом расчета хэш-функции. Оператор считает ошибку окончательной и не будет осуществлять перевод.\n# 100\t Отказ в приеме перевода\t Отказ в приеме перевода с заданными параметрами. Оператор считает ошибку окончательной и не будет осуществлять перевод.\n# 200\t Ошибка разбора запроса\t ИС Контрагента не в состоянии разобрать запрос. Оператор считает ошибку окончательной и не будет осуществлять перевод.\n\n\n@pay_bp.route('/payment/redirect/', methods=['GET'])\ndef yad_redirect():\n has_error = request.args.get('payerr', \"false\") == \"true\"\n success_url = request.args.get('shopSuccessURL', \"\")\n fail_url = request.args.get('shopFailURL', \"\")\n\n if has_error and fail_url:\n return redirect(fail_url)\n if not has_error and success_url:\n return redirect(success_url)\n abort(400)\n" }, { "alpha_fraction": 0.569513201713562, "alphanum_fraction": 0.5702780485153198, "avg_line_length": 36.54391860961914, "blob_id": "78977f9f49df658182b092f267216691bc163727", "content_id": "cff37c69c110512284b329f2c2b16e85d871d54b", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 22226, "license_type": "no_license", "max_line_length": 170, "num_lines": 592, "path": "/app/fw/auth/user_manager.py", "repo_name": "StanislavKraev/jb_code", "src_encoding": "UTF-8", "text": "# -*- coding: utf-8 -*-\nfrom datetime import datetime, timedelta\nimport random\nimport string\nfrom bson import ObjectId\n\nfrom flask_login import login_user\n\nfrom fw.api import errors\nfrom fw.auth.encrypt import encrypt_password, check_password\nfrom fw.auth.models import AuthUser, UserActivationLink, ConfirmationLinkTypeEnum, AuthorizationUrl\nfrom fw.auth.social_services import SocialServiceBackends\nfrom fw.db.sql_base import db\nfrom template_filters import utm_args\n\n\nclass UserManager(object):\n __config = None\n __logger = None\n\n @classmethod\n def _generate_link_code(cls, use_chars):\n max_activation_link_length = cls.__config['max_activation_link_length']\n digital_activation_link_length = cls.__config['digital_activation_link_length']\n if use_chars:\n return u''.join(random.choice(string.ascii_uppercase + string.digits) for _ in range(max_activation_link_length))\n return u''.join(random.choice(string.digits) for _ in range(digital_activation_link_length))\n\n @classmethod\n def init(cls, config, logger):\n cls.__config = config\n cls.__logger = logger\n\n assert cls.__config\n assert cls.__logger\n\n @staticmethod\n def create_temp_user(is_tester=False):\n result = UserManager.create_user(\"\", \"\", \"\", \"\", \"\", \"\", \"\", \"\", is_tester=is_tester)\n if result:\n return result[0]\n\n @classmethod\n def register_user(cls, social_service_access_token, mobile, email, name, surname, patronymic, password,\n social_network, temp_user=None):\n \"\"\"\n @type social_service_access_token: string\n @type email: string\n @type name: string\n @type surname: string\n @type patronymic: string\n @type password: string\n @type social_network: string\n \"\"\"\n\n user, is_new_account = UserManager.create_user(social_service_access_token, mobile, email,\n name, surname, patronymic, password, social_network,\n temp_user=temp_user\n )\n\n if (mobile and not user.mobile_confirmed) or (email and not user.email_confirmed):\n activate_by_mobile = not not mobile\n\n try:\n if activate_by_mobile:\n # UserManager.confirm_new_mobile(store, user, mobile, config)\n pass\n else:\n UserManager.confirm_new_email(user, email)\n except Exception, exc:\n cls.__logger.exception(u'Failed to create user: %s' % str(exc))\n raise errors.ServerUnavailable(exc.message)\n return user\n\n @classmethod\n def promote_temp_user(cls, current_user, social_service_access_token, mobile, email, name,\n surname, patronymic, password, social_network):\n\n new_user = UserManager.register_user(social_service_access_token, mobile, email, name,\n surname, patronymic, password, social_network, temp_user=current_user)\n\n return new_user\n\n @classmethod\n def resend_activation_code(cls, email, mobile):\n if email:\n user = AuthUser.query.filter_by(email=email).first()\n\n if not user:\n user_activation_link = UserActivationLink.query.filter_by(\n new_email=email,\n used_date=None,\n link_type=ConfirmationLinkTypeEnum.CLT_EMAIL).first()\n\n if not user_activation_link:\n raise errors.UserNotFound()\n user = AuthUser.query.filter_by(id=user_activation_link.auth_user_id).first()\n if not user:\n raise errors.UserNotFound()\n\n db.session.delete(user_activation_link)\n db.session.commit()\n\n UserManager.confirm_new_email(user, email, email_type='activate_account')\n return\n\n user = AuthUser.query.filter_by(mobile=mobile).first()\n\n if not user:\n user_activation_link = UserActivationLink.query.filter_by(\n new_mobile=mobile,\n used_date=None,\n link_type=ConfirmationLinkTypeEnum.CLT_MOBILE).first()\n\n if not user_activation_link:\n raise errors.UserNotFound()\n user = AuthUser.query.filter_by(id=user_activation_link.auth_user_id).first()\n if not user:\n raise errors.UserNotFound()\n\n db.session.delete(user_activation_link)\n db.session.commit()\n\n UserManager.confirm_new_mobile(user, mobile)\n\n\n @classmethod\n def create_user(cls, social_service_access_token, mobile, email,\n name, surname, patronymic, password, social_network, email_is_social=False, temp_user=None,\n is_tester=False):\n\n is_temp_user = not mobile and not email\n email_confirmed = False\n social_uid = None\n social_email = ''\n social_backend = None\n token = None\n if social_service_access_token and social_network:\n token = social_service_access_token\n social_backend = SocialServiceBackends.backends.get(social_network)\n if social_backend:\n social_data = social_backend.get_user_data(cls.__config, token)\n social_uid = social_data.get('id', None)\n if not social_data or not social_uid:\n raise errors.SocialAuthError()\n social_email = social_data.get('email', None)\n if social_email:\n social_email = social_email.lower()\n social_service_user_link = social_backend.get_user_link(social_uid)\n if social_service_user_link:\n user_id = social_service_user_link.auth_user\n user = AuthUser.query.filter_by(id=user_id)\n return user, False\n\n email = email.lower() if email else u''\n if email:\n if email_is_social or (social_email and email == social_email):\n email_confirmed = True\n elif not email and not mobile and social_backend:\n email = social_email\n if email:\n email_confirmed = True\n\n current_user = (AuthUser.query.filter_by(email=unicode(email)).first() if email else AuthUser.query.filter_by(mobile=unicode(mobile)).first() if mobile else None)\n\n if current_user:\n if email:\n if social_email and social_uid:\n if current_user.email_confirmed:\n social_backend.make_link(token, social_uid, current_user, cls.__config)\n return current_user, False\n else:\n email = u''\n email_confirmed = False\n else:\n raise errors.DuplicateEmail()\n else:\n raise errors.DuplicateMobile()\n\n # Creating new auth user record\n user = temp_user or AuthUser(uuid=unicode(ObjectId()))\n user.email_confirmed = email_confirmed\n user.mobile_confirmed = False\n user.is_tester = is_tester\n if email_confirmed:\n user.email = email\n user.password = unicode(encrypt_password(password)) if password else None\n user.signup_date = datetime.utcnow()\n user.enabled = True\n user.email = unicode(email) if email else None\n user.mobile = unicode(mobile) if mobile else None\n user.name = unicode(name)\n user.surname = unicode(surname)\n user.patronymic = unicode(patronymic)\n user.temporal = is_temp_user\n\n try:\n db.session.add(user)\n except Exception, exc:\n cls.__logger.error('Failed to add user to DB: %s' % str(exc))\n db.session.rollback()\n raise\n\n # add link to user profile\n if social_uid:\n social_backend.make_link(token, social_uid, user, cls.__config)\n user.enabled = True\n\n db.session.commit()\n return user, True\n\n @classmethod\n def login_user(cls, email, password):\n email = email.lower()\n if not email:\n return\n user = AuthUser.query.filter_by(email=email).first()\n staging = cls.__config['STAGING']\n if staging and password == \"111111\":\n if not user:\n try:\n user = AuthUser.query.filter_by(uuid=email.split('@')[0]).first()\n except Exception:\n db.session.rollback()\n return\n db.session.commit()\n return user\n\n db.session.commit()\n if not user or not user.password or not check_password(password, user.password):\n return\n\n return user\n\n @classmethod\n def confirm_email_or_mobile(cls, code, user_id, link_type):\n user = None\n cls.__logger.debug(\n u\"confirm_email_or_mobile: %s, %s, %s\" % (unicode(code), unicode(user_id), unicode(link_type)))\n if user_id:\n user = AuthUser.query.filter_by(uuid=user_id).first()\n if not user:\n raise errors.ActivationCodeExpiredOrInvalid()\n activation_link = UserActivationLink.query.filter_by(\n link_code=code,\n auth_user_id=user.id,\n link_type=link_type,\n used_date=None\n ).first()\n else:\n activation_link = UserActivationLink.query.filter_by(\n link_code=code,\n link_type=link_type,\n used_date=None\n ).first()\n\n if activation_link:\n user = AuthUser.query.filter_by(id=activation_link.auth_user_id).first()\n if not user:\n raise errors.ActivationCodeExpiredOrInvalid()\n if activation_link.used_date:\n raise errors.ActivationCodeExpiredOrInvalid()\n if activation_link.use_attempts >= cls.__config['max_activation_attempts_count']:\n raise errors.ActivationAttemptsCountExceeded()\n else:\n if user:\n real_link = UserActivationLink.query.filter_by(\n auth_user_id=user.id,\n link_type=link_type,\n used_date=None\n ).first()\n if real_link:\n real_link.use_attempts += 1\n db.session.commit()\n if real_link.use_attempts > cls.__config['max_activation_attempts_count']:\n raise errors.ActivationAttemptsCountExceeded()\n raise errors.ActivationCodeExpiredOrInvalid()\n\n if activation_link.link_type == ConfirmationLinkTypeEnum.CLT_MOBILE:\n # if user.mobile != activation_link.new_mobile:\n user.mobile = activation_link.new_mobile\n user.mobile_confirmed = True\n user.enabled = True\n elif activation_link.link_type == ConfirmationLinkTypeEnum.CLT_EMAIL:\n # if user.email != activation_link.new_email:\n user.email = activation_link.new_email\n user.email_confirmed = True\n user.enabled = True\n activation_link.used_date = datetime.utcnow()\n db.session.commit()\n return user\n\n @staticmethod\n def generate_password():\n return ''.join(random.choice(string.digits) for _ in range(8))\n\n @classmethod\n def update_profile(cls, auth_user, new_email, new_mobile=None):\n\n temp_user = auth_user.temporal\n if new_email:\n new_email = new_email.lower()\n\n if new_email and auth_user.email != new_email:\n if AuthUser.query.filter_by(email=new_email).count():\n raise errors.DuplicateEmail()\n\n auth_user.email = new_email\n auth_user.email_confirmed = False\n\n if temp_user:\n new_password = UserManager.generate_password()\n password = unicode(encrypt_password(new_password))\n\n link_code = cls._generate_link_code(True)\n\n activation_link = UserActivationLink(\n link_type=ConfirmationLinkTypeEnum.CLT_PASSWORD,\n link_code=link_code,\n auth_user=auth_user\n )\n db.session.add(activation_link)\n\n from fw.async_tasks import send_email\n\n schema = cls.__config['WEB_SCHEMA']\n domain = cls.__config['DOMAIN']\n selfcare_url = u\"%s://%s/account/?\" % (schema, domain)\n selfcare_url = utm_args(selfcare_url, 'new_account_user_notify', auth_user.id)\n selfcare_url = cls.make_auth_url(selfcare_url, auth_user).get_url(cls.__config)\n tmpl_data = {\n 'password': new_password,\n \"link_code\": activation_link.link_code,\n 'email': new_email,\n \"domain\": domain,\n \"schema\": schema,\n \"user_id\": auth_user.uuid,\n \"selfcare_url\": selfcare_url # {{'?'|utm_args('', user_id)}}\n }\n send_email.send_email.delay(new_email, \"new_account_user_notify\", **tmpl_data)\n auth_user.password = password\n auth_user.temporal = False\n\n UserManager.confirm_new_email(auth_user, new_email, email_type=\"confirm_email\")\n db.session.commit()\n\n elif new_email == u'':\n # empty string: delete current email\n raise errors.InvalidParameterValue('email')\n\n if new_mobile and auth_user.mobile != new_mobile:\n if AuthUser.query.filter_by(mobile=new_mobile).count():\n raise errors.DuplicateMobile()\n\n auth_user.mobile = new_mobile\n auth_user.mobile_confirmed = False\n UserManager.confirm_new_mobile(auth_user, new_mobile)\n db.session.commit()\n elif new_mobile == u\"\":\n auth_user.mobile = None\n auth_user.mobile_confirmed = False\n db.session.commit()\n\n return auth_user\n\n @classmethod\n def confirm_new_mobile(cls, auth_user, new_mobile):\n UserActivationLink.query.filter(\n UserActivationLink.auth_user == auth_user,\n UserActivationLink.link_type == ConfirmationLinkTypeEnum.CLT_MOBILE,\n UserActivationLink.used_date == None).delete()\n\n link_code = cls._generate_link_code(False)\n\n activation_link = UserActivationLink(\n new_mobile=new_mobile,\n link_type=ConfirmationLinkTypeEnum.CLT_MOBILE,\n link_code=link_code,\n auth_user=auth_user\n )\n db.session.add(activation_link)\n db.session.commit()\n\n cls.__logger.info(\"Sending sms to %s\" % str(new_mobile))\n from fw.async_tasks import send_sms_task\n\n result = send_sms_task.send_sms.delay(\n new_mobile,\n \"activate_account\",\n link_code=activation_link.link_code,\n service_name=cls.__config['service_name']\n )\n cls.__logger.info(\"Sending sms to %s result: %s\" % (str(new_mobile), str(result)))\n\n @classmethod\n def confirm_new_email(cls, auth_user, new_email, email_type=\"activate_account\"):\n link = UserActivationLink.query.filter_by(auth_user=auth_user).first()\n if link:\n db.session.delete(link)\n new_email = new_email.lower()\n link_code = UserManager._generate_link_code(True)\n activation_link = UserActivationLink(\n auth_user=auth_user,\n link_code=link_code,\n new_email=new_email,\n link_type=ConfirmationLinkTypeEnum.CLT_EMAIL\n )\n db.session.add(activation_link)\n\n tmpl_data = {\n \"uid\": auth_user.uuid,\n \"link_code\": activation_link.link_code,\n \"name\": auth_user.name,\n \"patronymic\": auth_user.patronymic,\n \"domain\": cls.__config['DOMAIN'],\n \"schema\": cls.__config['WEB_SCHEMA'],\n \"email_confirm_link\": \"%s://%s/confirm/?code=%s\" % (\n cls.__config['WEB_SCHEMA'], cls.__config['DOMAIN'], activation_link.link_code)\n }\n from fw.async_tasks import send_email\n\n db.session.commit()\n send_email.send_email.delay(new_email, email_type, **tmpl_data)\n\n @classmethod\n def send_password_recovery_code(cls, email, mobile):\n auth_user = None\n\n if email:\n auth_user = AuthUser.query.filter_by(email=email).first()\n elif mobile:\n auth_user = AuthUser.query.filter_by(mobile=mobile).first()\n\n if not auth_user:\n raise errors.UserNotFound()\n\n # check last password drop attempts: time and count\n last_drop_date = auth_user.last_password_drop_attempts_date\n\n if last_drop_date and last_drop_date >= datetime.utcnow() - timedelta(days=1):\n # the attempt was less then 1 day ago\n if auth_user.last_password_drop_attempts_count >= 2:\n raise errors.RecoveryAttemptsCountExceeded()\n else:\n auth_user.last_password_drop_attempts_count += 1\n else:\n auth_user.last_password_drop_attempts_count = 1\n auth_user.last_password_drop_attempts_date = datetime.utcnow()\n\n UserActivationLink.query.filter_by(auth_user_id=auth_user.id,\n link_type=ConfirmationLinkTypeEnum.CLT_PASSWORD).delete()\n\n link_code = cls._generate_link_code(bool(email))\n\n activation_link = UserActivationLink(\n auth_user=auth_user,\n link_type=ConfirmationLinkTypeEnum.CLT_PASSWORD,\n link_code=link_code\n )\n db.session.add(activation_link)\n db.session.commit()\n\n if email:\n from fw.async_tasks import send_email\n\n tmpl_data = {\n \"user_id\": auth_user.uuid,\n \"link_code\": activation_link.link_code,\n \"domain\": cls.__config['DOMAIN'],\n \"web_schema\": cls.__config['WEB_SCHEMA'],\n }\n send_email.send_email.delay(auth_user.email, 'change_password', **tmpl_data)\n return\n\n from fw.async_tasks import send_sms_task\n\n send_sms_task.send_sms(\n mobile,\n 'activate_account',\n link_code=activation_link.link_code,\n service_name=cls.__config['service_name']\n )\n\n\n @classmethod\n def change_password(cls, user_id, code, old_password, new_password):\n user = AuthUser.query.filter_by(uuid=user_id).first()\n if not user:\n raise errors.UserNotFound()\n\n if code:\n return UserManager._change_password_by_code(code, user, new_password)\n\n if user.password is not None:\n if not check_password(old_password, user.password):\n raise errors.InvalidCurrentPassword()\n\n user.password = unicode(encrypt_password(new_password))\n db.session.commit()\n return user\n\n @classmethod\n def _change_password_by_code(cls, code, user, new_password):\n code_links = UserActivationLink.query.filter_by(auth_user=user, used_date=None, link_type=ConfirmationLinkTypeEnum.CLT_PASSWORD)\n\n if not code_links.count():\n raise errors.ActivationCodeExpiredOrInvalid()\n\n max_use_count = 0\n found_link = None\n for code_link in code_links:\n code_link.use_attempts += 1\n\n if code_link.use_attempts > max_use_count:\n max_use_count = code_link.use_attempts\n if code_link.link_code != code:\n continue\n\n found_link = code_link\n\n if not found_link or max_use_count > 5:\n raise errors.ActivationCodeExpiredOrInvalid()\n\n found_link.used_date = datetime.utcnow()\n user = found_link.auth_user\n user.password = unicode(encrypt_password(new_password))\n db.session.commit()\n return user\n\n @classmethod\n def get_user_by_code(cls, user_id, code):\n user = AuthUser.query.filter_by(uuid=user_id).first()\n if not user:\n raise errors.ActivationCodeExpiredOrInvalid()\n\n code_links = UserActivationLink.query.filter_by(auth_user=user, used_date=None, link_code=code)\n if not code_links.count():\n raise errors.ActivationCodeExpiredOrInvalid()\n\n code_link = code_links[0]\n if code_link.use_attempts > cls.__config['user_by_code_tries_count']:\n raise errors.ActivationCodeExpiredOrInvalid()\n\n user = AuthUser.query.filter_by(id=code_link.auth_user_id).first()\n if not user or user_id != user.uuid:\n raise errors.UserNotFound()\n\n return user\n\n @classmethod\n def get_user_email(cls, user_id):\n user = AuthUser.query.filter_by(id=user_id).first()\n if not user:\n return\n\n return user.email or \"\"\n\n @staticmethod\n def generate_password():\n return ''.join(random.choice(string.digits) for _ in range(8))\n\n @staticmethod\n def make_auth_url(url, owner, expiration_td=timedelta(days=7)):\n assert url\n assert expiration_td\n assert owner\n\n AuthorizationUrl.query.filter_by(url=url, owner=owner).delete()\n auth_url = AuthorizationUrl(\n url=url,\n expire_at=datetime.now() + expiration_td,\n owner=owner\n )\n db.session.add(auth_url)\n db.session.commit()\n return auth_url\n\n @staticmethod\n def authorize_by_url(go_id):\n auth_url = AuthorizationUrl.query.filter_by(id=go_id).scalar()\n if not auth_url:\n raise Exception()\n\n auth_url.used_times += 1\n db.session.commit()\n\n if auth_url.expire_at >= datetime.utcnow():\n user = auth_url.owner\n if user:\n login_user(user)\n\n return auth_url.url\n" }, { "alpha_fraction": 0.7032700777053833, "alphanum_fraction": 0.708518385887146, "avg_line_length": 29.580245971679688, "blob_id": "9431c333a3024f436d0f41bf45e791dbde2cad71", "content_id": "81d6c25235c6b5715ff47f1563da21c57b2b574f", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 2477, "license_type": "no_license", "max_line_length": 86, "num_lines": 81, "path": "/app/services/russian_post/api.py", "repo_name": "StanislavKraev/jb_code", "src_encoding": "UTF-8", "text": "# -*- coding: utf-8 -*-\nfrom flask import Blueprint\nfrom flask_login import login_required, current_user\nfrom fw.api import errors\n\nfrom fw.api.args_validators import validate_arguments, ArgumentValidator\nfrom fw.api.base_handlers import api_view\nfrom fw.db.sql_base import db as sqldb\nfrom fw.documents.db_fields import DocumentBatchDbObject\nfrom services.russian_post.db_models import RussianPostTrackingItem\n\nrussian_post_bp = Blueprint('russian_post_bp', __name__)\n\n\n@russian_post_bp.route('/external/russianpost/mail/status/', methods=['GET'])\n@api_view\n@login_required\n@validate_arguments(\n batch_id=ArgumentValidator(required=True),\n #tracking=ArgumentValidator(required=True)\n)\ndef get_mail_status(batch_id=None):\n tracking = RussianPostTrackingItem.query.filter(\n RussianPostTrackingItem.batch_id == batch_id,\n RussianPostTrackingItem.owner_id != 111111\n ).first() # not scalar\n if not tracking:\n raise errors.PostTrackingItemNotFound()\n\n return {'result': {\n 'status': tracking.status,\n 'status_caption': tracking.status_caption\n }}\n\n\n@russian_post_bp.route('/external/russianpost/mail/track/', methods=['POST'])\n@api_view\n@login_required\n@validate_arguments(\n batch_id=ArgumentValidator(required=True),\n tracking=ArgumentValidator(required=True)\n)\ndef track_mail_status(batch_id=None, tracking=None):\n tracking = tracking.strip()\n if not tracking:\n raise errors.InvalidParameterValue('tracking')\n\n tracking_item = RussianPostTrackingItem.query.filter(\n RussianPostTrackingItem.batch_id == batch_id,\n RussianPostTrackingItem.tracking == tracking,\n RussianPostTrackingItem.owner_id != 111111\n ).first()\n\n if tracking_item:\n return {\n 'result': True\n }\n\n RussianPostTrackingItem.query.filter(\n RussianPostTrackingItem.batch_id == batch_id,\n RussianPostTrackingItem.owner == current_user\n ).delete()\n sqldb.session.commit()\n\n batch = DocumentBatchDbObject.query.filter_by(id=batch_id, deleted=False).scalar()\n if not batch:\n raise errors.BatchNotFound()\n\n new_tracking = RussianPostTrackingItem(\n batch=batch,\n owner=current_user,\n tracking=tracking\n )\n\n sqldb.session.add(new_tracking)\n sqldb.session.commit()\n\n from services.russian_post.async_tasks import get_tracking_info_async\n get_tracking_info_async.delay(batch_id=batch.id)\n\n return {'result': True}\n" }, { "alpha_fraction": 0.6494229435920715, "alphanum_fraction": 0.6495587229728699, "avg_line_length": 40.13966369628906, "blob_id": "2244251e4fb7c9bff7f413d8aeb88d34cd05c5bf", "content_id": "9b56e9876da85a5b117abf911909186133f04582", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 7365, "license_type": "no_license", "max_line_length": 111, "num_lines": 179, "path": "/app/services/yurist/api.py", "repo_name": "StanislavKraev/jb_code", "src_encoding": "UTF-8", "text": "# -*- coding: utf-8 -*-\n\nfrom datetime import datetime\nimport json\nfrom flask import Blueprint, current_app\nfrom flask_login import login_required, current_user\nfrom fw.api import errors\nfrom fw.api.args_validators import validate_arguments, BoolTypeValidator, ArgumentValidator\nfrom fw.api.base_handlers import api_view\nfrom fw.db.sql_base import db as sqldb\nfrom fw.documents.db_fields import DocumentBatchDbObject\nfrom fw.storage.file_storage import FileStorage\nfrom services.yurist import yurist_manager\nfrom services.yurist.data_model.enums import YuristBatchCheckStatus\nfrom services.yurist.data_model.fields import YuristBatchCheck\nfrom services.yurist.data_model.models import YuristBatchCheckObject, YuristCheckFilesObject\n\nyurist_bp = Blueprint('yurist', __name__)\n\n\n@yurist_bp.route('/batch/yurist/set/', methods=['POST'])\n@api_view\n@login_required\n@validate_arguments(\n batch_id=ArgumentValidator(required=True),\n check=BoolTypeValidator(required=True),\n file_list=ArgumentValidator(required=False),\n typos_correction=BoolTypeValidator(required=False)\n)\ndef yurist_set(batch_id=None, check=None, file_list=None, typos_correction=False):\n typos_correction = bool(typos_correction)\n batch_db = DocumentBatchDbObject.query.filter_by(id=batch_id, _owner=current_user, deleted=False).first()\n if not batch_db:\n raise errors.BatchNotFound()\n\n if check:\n new = True\n # search for any active check\n cur_check_obj = YuristBatchCheckObject.query.filter(\n YuristBatchCheckObject.batch_id == batch_id,\n YuristBatchCheckObject.status.notin_(YuristBatchCheckStatus.FINAL_STATUSES)\n ).order_by(YuristBatchCheckObject.create_date.desc()).first()\n\n if cur_check_obj:\n new = False\n\n real_file_list = []\n file_descr = []\n if file_list:\n try:\n file_list_data = json.loads(file_list)\n for file_obj in file_list_data:\n file_id = file_obj['id']\n file_obj = FileStorage.get_file(file_id)\n if file_obj:\n real_file_list.append(file_obj)\n else:\n current_app.logger.warn(u\"Failed to find file with id %s\" % file_id)\n except Exception:\n current_app.logger.exception(u\"Failed to parse file list: %s\" % file_list)\n # Insert new check\n if new:\n yurist_batch_check = YuristBatchCheckObject(**{\n 'batch_id': batch_id,\n 'create_date': datetime.utcnow(),\n 'status': YuristBatchCheckStatus.YBS_WAIT,\n 'typos_correction': typos_correction\n })\n sqldb.session.add(yurist_batch_check)\n for file_obj in real_file_list:\n attach = YuristCheckFilesObject()\n attach.files_id = file_obj.id\n yurist_batch_check.attached_files.append(attach)\n else:\n YuristCheckFilesObject.query.filter_by(check_id=cur_check_obj.id).delete()\n for file_obj in real_file_list:\n attach = YuristCheckFilesObject()\n attach.files_id = file_obj.id\n cur_check_obj.attached_files.append(attach)\n cur_check_obj.create_date = datetime.utcnow()\n cur_check_obj.typos_correction = typos_correction\n cur_check_obj.status = YuristBatchCheckStatus.YBS_WAIT\n\n sqldb.session.commit()\n yurist_manager.yurist_check(current_app.config, batch_db, real_file_list, current_app.logger)\n else:\n # search for active check\n cur_check_obj = YuristBatchCheckObject.query.filter(\n YuristBatchCheckObject.batch_id == batch_id,\n YuristBatchCheckObject.status.notin_(YuristBatchCheckStatus.FINAL_STATUSES)\n ).order_by(YuristBatchCheckObject.create_date.desc()).first()\n # If found any: set status to refused\n if cur_check_obj:\n cur_check_obj.status = YuristBatchCheckStatus.YBS_REFUSED\n sqldb.session.commit()\n return {'result': True}\n\n return {'result': True}\n\n\n@yurist_bp.route('/batch/yurist/', methods=['GET'])\n@api_view\n@login_required\n@validate_arguments(batch_id=ArgumentValidator(required=True))\ndef yurist_get(batch_id=None):\n batch_db = DocumentBatchDbObject.query.filter_by(id=batch_id, _owner=current_user, deleted=False).first()\n if not batch_db:\n raise errors.BatchNotFound()\n # get active or last refused\n check_obj = YuristBatchCheckObject.query.filter(\n YuristBatchCheckObject.batch_id == batch_id,\n YuristBatchCheckObject.status.notin_(YuristBatchCheckStatus.FINAL_STATUSES)\n ).first()\n if not check_obj:\n # get the last one\n check_obj = YuristBatchCheckObject.query.filter_by(batch_id=batch_id).order_by(\n YuristBatchCheckObject.create_date.desc()\n ).first()\n\n if check_obj:\n booking = YuristBatchCheck.db_obj_to_field(check_obj).get_api_structure()\n if booking['status'] == YuristBatchCheckStatus.YBS_WAIT:\n booking['status'] = YuristBatchCheckStatus.YBS_IN_PROGRESS\n return {'result': booking}\n\n return {'result': {\n 'batch_id': batch_id,\n 'attached_files': [],\n 'typos_correction': False,\n 'status': YuristBatchCheckStatus.YBS_NEW\n }}\n\n\n@yurist_bp.route('/batch/yurist/commit/', methods=['GET'])\n@api_view\n@validate_arguments(\n batch_check_id=ArgumentValidator(required=True),\n success=BoolTypeValidator(required=True),\n)\ndef yurist_commit(batch_check_id=None, success=None):\n if success:\n for i in YuristBatchCheckObject.query.filter(\n YuristBatchCheckObject.id == batch_check_id,\n YuristBatchCheckObject.status.in_([\n YuristBatchCheckStatus.YBS_IN_PROGRESS,\n YuristBatchCheckStatus.YBS_FAILED,\n YuristBatchCheckStatus.YBS_WAIT])\n ):\n i.status = YuristBatchCheckStatus.YBS_SUCCESS\n\n for i in YuristBatchCheckObject.query.filter(\n YuristBatchCheckObject.batch_id == batch_check_id,\n YuristBatchCheckObject.status.in_([\n YuristBatchCheckStatus.YBS_IN_PROGRESS,\n YuristBatchCheckStatus.YBS_FAILED,\n YuristBatchCheckStatus.YBS_WAIT])\n ):\n i.status = YuristBatchCheckStatus.YBS_SUCCESS\n else:\n for i in YuristBatchCheckObject.query.filter(\n YuristBatchCheckObject.id == batch_check_id,\n YuristBatchCheckObject.status.in_([\n YuristBatchCheckStatus.YBS_IN_PROGRESS,\n YuristBatchCheckStatus.YBS_SUCCESS,\n YuristBatchCheckStatus.YBS_WAIT])\n ):\n i.status = YuristBatchCheckStatus.YBS_FAILED\n\n for i in YuristBatchCheckObject.query.filter(\n YuristBatchCheckObject.batch_id == batch_check_id,\n YuristBatchCheckObject.status.in_([\n YuristBatchCheckStatus.YBS_IN_PROGRESS,\n YuristBatchCheckStatus.YBS_SUCCESS,\n YuristBatchCheckStatus.YBS_WAIT])\n ):\n i.status = YuristBatchCheckStatus.YBS_FAILED\n sqldb.session.commit()\n\n return {'new_status': YuristBatchCheckStatus.YBS_SUCCESS if success else YuristBatchCheckStatus.YBS_FAILED}\n\n" }, { "alpha_fraction": 0.6639053225517273, "alphanum_fraction": 0.6662721633911133, "avg_line_length": 32.79999923706055, "blob_id": "82920558f01a7ddd826f012ee0bfc05569ad78ac", "content_id": "2882b70caa3f1578faaade23d4fcb9ea1103f20d", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 845, "license_type": "no_license", "max_line_length": 76, "num_lines": 25, "path": "/app/services/pay/subs_manager.py", "repo_name": "StanislavKraev/jb_code", "src_encoding": "UTF-8", "text": "# -*- coding: utf-8 -*-\nfrom datetime import datetime\nfrom services.pay.models import PaymentSubscriptionObject\n\n\nclass SubscriptionManager(object):\n\n @staticmethod\n def if_user_subscribed(auth_user_id):\n user_subs = PaymentSubscriptionObject.query.filter(\n PaymentSubscriptionObject.user_id == auth_user_id,\n PaymentSubscriptionObject.end_dt.__ge__(datetime.utcnow())\n )\n if user_subs.count():\n return True\n return False\n\n @staticmethod\n def get_user_active_subscription(user_id):\n user_sub = PaymentSubscriptionObject.query.filter(\n PaymentSubscriptionObject.user_id == user_id,\n PaymentSubscriptionObject.end_dt.__ge__(datetime.utcnow())\n ).order_by(PaymentSubscriptionObject.end_dt.desc()).limit(1).first()\n\n return user_sub\n" }, { "alpha_fraction": 0.3816784620285034, "alphanum_fraction": 0.42785120010375977, "avg_line_length": 39.86180877685547, "blob_id": "f91e44e30c03fabf9bc70fe1bcd8783a3444bc4e", "content_id": "12c5a3d7cbe9bb15fbf4f9ea1faca850bf58d2ed", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 18700, "license_type": "no_license", "max_line_length": 235, "num_lines": 398, "path": "/jb_tests/test_external_tools.py", "repo_name": "StanislavKraev/jb_code", "src_encoding": "UTF-8", "text": "# -*- coding: utf-8 -*-\nfrom datetime import datetime, timedelta\nimport email\nfrom email.header import decode_header\nfrom services.ifns.ifns_manager import IfnsDescription\n\nSERVICES = [{\n 'id': 180,\n 'title': u'Гос. регистрация юридических и физическил лиц. Сведения из гос. реестров. Информирование. (Регистрационный центр)',\n 'check': 0,\n 'subservices': [{\n 'id': 181,\n 'title': u'Прием документов при регистрации от гос. предприятий и учреждений',\n }, {\n 'id': 182,\n 'title': u'Прием документов при регистрации лично от заявителя',\n }, {\n 'id': 275,\n 'title': u'Прием документов при регистрации создания (1-но ЮЛ)',\n }, {\n 'id': 276,\n 'title': u'Прием документов при регистрации изменений (1-но ЮЛ)*',\n }, {\n 'id': 277,\n 'title': u'Прием документов при регистрации изменений (не более 3-х ЮЛ)*',\n }, {\n 'id': 278,\n 'title': u'Прием документов при регистрации реорганизации и ликвидации (1-но ЮЛ)',\n }, {\n 'id': 188,\n 'title': u'Выдача документов при регистрации гос. предприятий и учреждений',\n }, {\n 'id': 186,\n 'title': u'Выдача документов при регистрации лично заявителю',\n }, {\n 'id': 189,\n 'title': u'Выдача документов при регистрации по доверенности',\n }, {\n 'id': 279,\n 'title': u'Прием по записи онлайн на подачу документов при регистрации ЮЛ',\n }, {\n 'id': 282,\n 'title': u'Прием по записи через портал государственных услуг на подачу документов при регистрации ЮЛ',\n }, {\n 'id': 191,\n 'title': u'Прием заявлений на предоставление сведений из ЕГРЮЛ/ЕГРИП от гос. предприятий и учреждений',\n }, {\n 'id': 190,\n 'title': u'Прием заявлений на предоставление сведений из ЕГРЮЛ/ЕГРИП лично от заявителя',\n }, {\n 'id': 246,\n 'title': u'Прием заявлений на предоставление сведений из ЕГРЮЛ/ЕГРИП по доверенности',\n }, {\n 'id': 285,\n 'title': u'Выдача документов при регистрации создания (1-но ЮЛ)',\n }, {\n 'id': 286,\n 'title': u'Выдача документов при регистрации создания (более 1-го ЮЛ)',\n }, {\n 'id': 193,\n 'title': u'Прием заявлений на предоставление сведений из ЕГРЮЛ (в отношении 1-го ЮЛ)',\n }, {\n 'id': 192,\n 'title': u'Прием заявлений на предоставление сведений из ЕГРЮЛ (в отношении более 1-го ЮЛ)',\n }, {\n 'id': 287,\n 'title': u'Выдача документов при регистрации изменений / реорганизации / ликвидации (1-но ЮЛ)',\n }, {\n 'id': 288,\n 'title': u'Выдача документов при регистрации изменений / реорганизации / ликвидации (более 1-го ЮЛ)',\n }, {\n 'id': 195,\n 'title': u'Выдача сведений из ЕГРЮЛ/ЕГРИП гос. предприятиям и учреждениям',\n }, {\n 'id': 194,\n 'title': u'Выдача сведений из ЕГРЮЛ/ЕГРИП лично заявителю',\n }, {\n 'id': 199,\n 'title': u'Прием заявлений на исправление технической ошибки в ЕГРЮЛ',\n }, {\n 'id': 200,\n 'title': u'Выдача документов после исправления технической ошибки в ЕГРЮЛ',\n }, {\n 'id': 249,\n 'title': u'Информирование по порядку регистрации',\n }, {\n 'id': 250,\n 'title': u'Прием обращений (жалоб, предложений, заявлений)',\n }, {\n 'id': 295,\n 'title': u'Прием заявлений на повторную выдачу документа о регистрации и повторная выдача документа о регистрации',\n }, {\n 'id': 300,\n 'title': u'Выдача сведений из ЕГРЮЛ (в отношении 1-го лица)',\n }, {\n 'id': 303,\n 'title': u'Выдача сведений из ЕГРЮЛ (в отношении более 1-го лица)',\n }, {\n 'id': 309,\n 'title': u'Информирование по исправлению тех. ошибок в ЕГРЮЛ/ЕГРИП',\n }]\n }]\n\nKNOWN_FSN_INTERNAL_ID_MAP = {\n 1001: 95,\n}\n\nMONTHS = {\n u'Январь': 1,\n u'Февраль': 2,\n u'Март': 3,\n u'Апрель': 4,\n u'Май': 5,\n u'Июнь': 6,\n u'Июль': 7,\n u'Август': 8,\n u'Сентябрь': 9,\n u'Октябрь': 10,\n u'Ноябрь': 11,\n u'Декабрь': 12,\n}\n\n\ndef get_detailed_address(address):\n return {\n \"suggestions\": [\n {\n \"value\": \"Хабаровский край\",\n \"unrestricted_value\": \"Хабаровский край\",\n \"data\": {\n \"qc_complete\": None,\n \"qc_house\": None,\n \"postal_code\": \"\",\n \"postal_box\": None,\n \"country\": \"Россия\",\n \"region_type\": \"край\",\n \"region_type_full\": \"край\",\n \"region\": \"Хабаровский\",\n \"area_type\": None,\n \"area_type_full\": None,\n \"area\": None,\n \"city_type\": None,\n \"city_type_full\": None,\n \"city\": None,\n \"settlement_type\": None,\n \"settlement_type_full\": None,\n \"settlement\": None,\n \"street_type\": None,\n \"street_type_full\": None,\n \"street\": None,\n \"house_type\": None,\n \"house_type_full\": None,\n \"house\": None,\n \"block_type\": None,\n \"block\": None,\n \"flat_area\": None,\n \"flat_type\": None,\n \"flat\": None,\n \"tax_office\": \"2700\",\n \"kladr_id\": \"2700000000000\",\n \"okato\": \"8000000000\",\n \"oktmo\": None,\n \"unparsed_parts\": None,\n \"qc\": None\n }\n }\n ]\n }\n\n\ndef get_ifns_by_address(address, service_nalog_ru_url):\n ifns = IfnsDescription({\n 'kod': 1001,\n 'naimk': u'отделение ифнс',\n 'plat': {\n },\n 'rof': {\n 'code': 10012,\n 'naimk': u'Межрайонная инспекция Федеральной налоговой службы №15 по Санкт-Петербургу'\n },\n 'rou': {\n 'code': 10011,\n 'naimk': u'Межрайонная инспекция Федеральной налоговой службы №15 по Санкт-Петербургу'\n }\n })\n\n return ifns\n\n\ndef get_ifns_by_code(code, service_nalog_ru_url):\n ifns = IfnsDescription({\n 'kod': 7726,\n 'naimk': u'отделение ифнс',\n 'plat': {\n },\n 'rof': {\n 'code': 10012,\n 'naimk': u'Межрайонная инспекция Федеральной налоговой службы №15 по Санкт-Петербургу'\n },\n 'rou': {\n 'code': 10011,\n 'naimk': u'Межрайонная инспекция Федеральной налоговой службы №15 по Санкт-Петербургу'\n }\n })\n\n return ifns\n\n\ndef morph_with_morpher(word_or_phrase):\n from fw.documents.morpher_tools import morph_with_morpher as _morpher\n return _morpher(word_or_phrase)\n\n\ndef get_nalog_ru_time_slots(person_data, company_data, internal_ifns_number, internal_ifns_service, IFNS_LOGGER):\n day_info = [{\n 'date': (datetime.now() + timedelta(days=3)).strftime(\"%Y-%m-%d\"),\n 'time_slots': [{\n 'slot_start': u'10:30',\n 'slot_end': u'11:00'\n }, {\n 'slot_start': u'11:30',\n 'slot_end': u'12:00'\n }]\n }]\n return day_info\n\n\ndef book_ifns(person_data, company_data, internal_ifns_number, internal_ifns_service, dt, logger):\n return {\n \"ifns\": u'Межрайонная ИФНС №10000000',\n \"service\": u'Регистрация ООО',\n \"date\": (datetime.now() + timedelta(days=2)).strftime(\"%Y-%m-%dT%H:%M:%S\"),\n \"window\": u\"-1\",\n \"address\": u\"село Гадюкино, ул. Разъезжая 2\",\n \"phone\": u\"322223233\",\n \"how_to_get\": u\"неизвестно\",\n \"code\": u\"111\"\n }\n\n\ndef get_registration_ifns(service_nalog_ru_url, address_ifns=None):\n return {'adres': \"село Гадюкино, ул. Разъезжая 2\", 'rou': {'naimk': u'Межрайонная ИФНС №10000000'}}\n\n\nclass TestMailer(object):\n def __init__(self):\n self.mails = []\n\n def send_email(self, addr_from, addr_to, message):\n if message:\n msg = email.message_from_string(message)\n if msg:\n headers = dict(msg.items())\n new_headers = {}\n for header_name in headers:\n header_val = headers[header_name]\n if header_val:\n d = decode_header(header_val)\n if d:\n d = d[0]\n if d:\n d = d[0]\n if d:\n header_val = d.decode('utf-8')\n new_headers[header_name] = header_val\n parts = [part.get_payload(decode=True) for part in msg.walk() if not part.is_multipart()]\n\n subject = new_headers.get('Subject', '')\n message = {\n 'headers': headers,\n 'subject': subject,\n 'parts': parts\n }\n\n self.mails.append({\n 'from': addr_from,\n 'to': addr_to,\n 'message': message\n })\n\n\nclass TestSmsSender(object):\n def __init__(self, *args, **kwargs):\n self.sms = []\n\n def get_sms_cost(self, data):\n return 0.5\n\n def send(self, data):\n self.sms.append(data)\n\n\nclass Cache(object):\n def set(self, key, val, time=0, min_compress_len=0):\n pass\n\n def add(self, key, val, time=0, min_compress_len=0):\n pass\n\n def replace(self, key, val, time=0, min_compress_len=0):\n pass\n\n def set_multi(self, mapping, time=0, key_prefix='', min_compress_len=0):\n pass\n\n def get(self, key):\n pass\n\n def get_multi(self, keys, key_prefix=''):\n pass\n\n def incr(self, key, delta=1):\n pass\n\n def decr(self, key, delta=1):\n pass\n\n def delete(self, key, time=0):\n pass\n\n def delete_multi(self, keys, time=0, key_prefix=''):\n pass\n\n\ndef dadata_clean(method, data):\n return [{\n u'city_type': None,\n u'settlement_type': None,\n u'settlement_type_full': None,\n u'qc_complete': 0,\n u'flat': u'705',\n u'flat_type': u'\\u043e\\u0444',\n u'house': u'44',\n u'unparsed_parts': None,\n u'okato': u'40273563000',\n u'region_type': u'\\u0433',\n u'postal_box': None,\n u'street': u'\\u0421\\u0432\\u0435\\u0440\\u0434\\u043b\\u043e\\u0432\\u0441\\u043a\\u0430\\u044f',\n u'postal_code': u'195027',\n u'oktmo': u'40330000',\n u'qc_geo': 1,\n u'timezone': u'UTC+4',\n u'geo_lon': u'30.405708993',\n u'settlement': None,\n u'city_type_full': None,\n u'kladr_id': u'7800000000012500139',\n u'city': None,\n u'flat_area': None,\n u'area': None,\n u'area_type_full': None,\n u'country': u'\\u0420\\u043e\\u0441\\u0441\\u0438\\u044f',\n u'region': u'\\u0421\\u0430\\u043d\\u043a\\u0442-\\u041f\\u0435\\u0442\\u0435\\u0440\\u0431\\u0443\\u0440\\u0433',\n u'block_type': u'\\u043b\\u0438\\u0442\\u0435\\u0440',\n u'geo_lat': u'59.9614399',\n u'area_type': None,\n u'source': u'\\u0421\\u0430\\u043d\\u043a\\u0442-\\u041f\\u0435\\u0442\\u0435\\u0440\\u0431\\u0443\\u0440\\u0433, \\u0421\\u0432\\u0435\\u0440\\u0434\\u043b\\u043e\\u0432\\u0441\\u043a\\u0430\\u044f \\u043d\\u0430\\u0431 44\\u042e \\u043e\\u0444 705',\n u'qc': 0,\n u'street_type': u'\\u043d\\u0430\\u0431',\n u'qc_house': 3,\n u'tax_office': u'7804',\n u'street_type_full': u'\\u043d\\u0430\\u0431\\u0435\\u0440\\u0435\\u0436\\u043d\\u0430\\u044f',\n u'house_type': u'\\u0434',\n u'region_type_full': u'\\u0433\\u043e\\u0440\\u043e\\u0434',\n u'block': u'\\u042e'\n }]\n\n\ndef get_ifns_registrations(name, date_from=None, date_to=None, ifns=None, service_nalog_ru_url=None, logger=None):\n result_items = []\n if name == u\"ЮРБЮРО ОНЛАЙН\":\n result_item = {'full_name': u\"ОБЩЕСТВО С ОГРАНИЧЕННОЙ ОТВЕТСТВЕННОСТЬЮ \\\"ЮРБЮРО ОНЛАЙН\\\"\",\n 'ogrn': \"1147847331367\", 'status': 'registered', 'reg_date': \"29.09.2014\"}\n result_items.append(result_item)\n elif name == u\"ЮРБЮРО ОФФЛАЙН\":\n result_item = {'full_name': u\"ОБЩЕСТВО С ОГРАНИЧЕННОЙ ОТВЕТСТВЕННОСТЬЮ \\\"ЮРБЮРО ОФФЛАЙН\\\"\",\n 'status': 'registration_declined', 'reg_date': \"29.09.2014\"}\n result_items.append(result_item)\n elif name == u\"ЮРБЮРО ПАЙПЛАЙН\":\n result_item = {'full_name': u\"ОБЩЕСТВО С ОГРАНИЧЕННОЙ ОТВЕТСТВЕННОСТЬЮ \\\"ЮРБЮРО ПАЙПЛАЙН\\\"\",\n 'status': 'progress', 'reg_date': \"29.09.2014\"}\n result_items.append(result_item)\n return result_items\n\ndef check_car_policy(policy_series, policy_number, timeout=5.0):\n return {\n \"policyCreateDate\": u\"17.10.2013\",\n \"bsoSeries\": policy_series,\n \"bsoNumber\": policy_number,\n \"changeDate\": u\"06.02.2014\",\n \"policyBeginDate\": u\"20.10.2013\",\n \"policyEndDate\": u\"19.10.2014\",\n \"insCompanyName\": u\"РЕСО-ГАРАНТИЯ\",\n \"bsoStatusName\": u\"Находится у страхователя\",\n \"validCaptcha\": True,\n \"errorMessage\": None\n }\n\ncache = Cache()\n\n\n" }, { "alpha_fraction": 0.5598846673965454, "alphanum_fraction": 0.5679746866226196, "avg_line_length": 42.18875503540039, "blob_id": "a30fd321cd5aa523bbdacc3a91cdc0b099645c4d", "content_id": "cf79fc0350daf100b4fdd57e4c3cc595a0e5d529", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 10877, "license_type": "no_license", "max_line_length": 120, "num_lines": 249, "path": "/app/services/notarius/data_model/models.py", "repo_name": "StanislavKraev/jb_code", "src_encoding": "UTF-8", "text": "# -*- coding: utf-8 -*-\nfrom bson import ObjectId\n\nfrom datetime import datetime, timedelta\nfrom common_utils import day_short_name, word_from_num\n\nfrom sqlalchemy import Column, Unicode, Integer, String, ForeignKey, DateTime, Boolean\nfrom sqlalchemy.orm import relationship\nfrom sqlalchemy.dialects.postgresql import JSONB\nfrom fw.db.sql_base import db as sqldb\nfrom fw.documents.address_enums import RFRegionsEnum\nfrom fw.documents.fields.general_doc_fields import DocAddressField\nfrom fw.documents.fields.simple_doc_fields import DocDateTimeField\n\nday_end = lambda x: x + timedelta(days=1) - timedelta(seconds=1)\n\nONE_DAY = timedelta(days=1)\n\n\nclass ScheduleTypeEnum(object):\n ST_NORMAL = 'normal'\n ST_CYCLIC = 'cyclic'\n\n _NAMES = {\n ST_NORMAL: u\"обычная рабочая неделя\",\n ST_CYCLIC: u\"два через два, например\"\n }\n\nclass NotariusObject(sqldb.Model):\n __tablename__ = \"notarius\"\n\n id = Column(String, primary_key=True)\n\n surname = Column(Unicode, nullable=False)\n name = Column(Unicode, nullable=False)\n patronymic = Column(Unicode, nullable=True)\n\n schedule = Column(JSONB, nullable=False)\n schedule_caption = Column(Unicode, nullable=True)\n title = Column(Unicode, nullable=True)\n address = Column(JSONB)\n region = Column(Unicode, nullable=False)\n metro_station = Column(Unicode, nullable=True)\n\n @staticmethod\n def make_slots(notarius, this_day, earliest_time=None):\n slots = []\n notarius_schedule = notarius.schedule\n bookings = NotariusBookingObject.query.filter(\n NotariusBookingObject.dt.__ge__(this_day),\n NotariusBookingObject.dt.__le__(day_end(this_day)),\n NotariusBookingObject.notarius==notarius,\n NotariusBookingObject._discarded==False)\n busy_slots = [i.dt for i in bookings]\n\n t = datetime.strptime(\"2014-01-01T\" + notarius_schedule['start_time'] + \":00\", DocDateTimeField.FORMAT)\n t = datetime(this_day.year, this_day.month, this_day.day, t.hour, t.minute, 0)\n end_t = datetime.strptime(\"2014-01-01T\" + notarius_schedule['end_time'] + \":00\", DocDateTimeField.FORMAT)\n end_t = datetime(this_day.year, this_day.month, this_day.day, end_t.hour, end_t.minute, 0)\n\n lunch_start = datetime.strptime(\"2014-01-01T\" + notarius_schedule['lunch_start'] + \":00\",\n DocDateTimeField.FORMAT) if 'lunch_start' in notarius_schedule else None\n lunch_start = datetime(this_day.year, this_day.month, this_day.day, lunch_start.hour, lunch_start.minute,\n 0) if lunch_start else None\n lunch_end = datetime.strptime(\"2014-01-01T\" + notarius_schedule['lunch_end'] + \":00\",\n DocDateTimeField.FORMAT) if 'lunch_end' in notarius_schedule else None\n lunch_end = datetime(this_day.year, this_day.month, this_day.day, lunch_end.hour, lunch_end.minute,\n 0) if lunch_end else None\n while t < end_t:\n if earliest_time and t < earliest_time:\n t += timedelta(seconds=1800)\n continue\n middle = t + timedelta(seconds=900)\n if lunch_start and lunch_end:\n if lunch_start <= middle <= lunch_end:\n t += timedelta(seconds=1800)\n continue\n\n found = False\n for busy in busy_slots:\n busy_start = busy\n busy_end = busy + timedelta(seconds=1800)\n if busy_start <= middle <= busy_end:\n found = True\n break\n if found:\n t += timedelta(seconds=1800)\n continue\n\n slots.append({\n 'slot_start': t.strftime(\"%H:%M\"),\n 'slot_end': (t + timedelta(seconds=1800)).strftime(\"%H:%M\")\n })\n t += timedelta(seconds=1800)\n return slots\n\n @staticmethod\n def is_weekend(notarius, target_day):\n if notarius.schedule['type'] == ScheduleTypeEnum.ST_NORMAL:\n cur_day = target_day\n week_day = cur_day.date().isoweekday()\n return week_day in notarius.schedule['weekends']\n else:\n working_days_count = notarius.schedule['working_days_count']\n weekends_count = notarius.schedule['weekends_count']\n cycle_len = working_days_count + weekends_count\n start_day = datetime.strptime(notarius.schedule['start_working_day'], \"%Y-%m-%d\")\n days_to_day_to = (target_day - start_day).days\n if days_to_day_to < 0:\n return True\n cur_day = start_day + timedelta(days=int(days_to_day_to / cycle_len) * cycle_len)\n is_working_day = True\n cycle_i = 0\n while cur_day <= target_day:\n cycle_i += 1\n if cycle_i > working_days_count + weekends_count:\n is_working_day = True\n cycle_i = 1\n elif cycle_i > working_days_count:\n is_working_day = False\n if cur_day == target_day:\n return is_working_day\n cur_day += ONE_DAY\n\n @staticmethod\n def get_notarius_schedule(notarius, day_from=None, day_to=None):\n def make_day_info(day, earliest_time=None):\n\n return {\n 'nearest_time': day.strftime(DocDateTimeField.FORMAT),\n 'slots': NotariusObject.make_slots(notarius, day, earliest_time)\n }\n\n days = []\n if day_to < day_from:\n raise Exception(u\"Invalid date range\")\n\n notarius_schedule = notarius.schedule\n if notarius_schedule['type'] == ScheduleTypeEnum.ST_NORMAL:\n cur_day = day_from\n while cur_day < day_to:\n week_day = cur_day.date().isoweekday()\n if week_day not in notarius_schedule['weekends']:\n days.append(make_day_info(cur_day, day_from))\n cur_day += ONE_DAY\n else:\n working_days_count = notarius_schedule['working_days_count']\n weekends_count = notarius_schedule['weekends_count']\n cycle_len = working_days_count + weekends_count\n start_day = datetime.strptime(notarius_schedule['start_working_day'], \"%Y-%m-%d\")\n days_to_day_to = (day_from - start_day).days\n if days_to_day_to < 0:\n return days\n cur_day = start_day + timedelta(days=int(days_to_day_to / cycle_len) * cycle_len)\n is_working_day = True\n cycle_i = 0\n while cur_day < day_to:\n cycle_i += 1\n if cycle_i > working_days_count + weekends_count:\n is_working_day = True\n cycle_i = 1\n elif cycle_i > working_days_count:\n is_working_day = False\n if cur_day >= day_from and is_working_day:\n days.append(make_day_info(cur_day, day_from))\n cur_day += ONE_DAY\n\n return days\n\n def make_working_hours(self):\n if self.schedule_caption:\n return self.schedule_caption\n\n if self.schedule['type'] == ScheduleTypeEnum.ST_NORMAL:\n lunch = u\" с перерывом на обед с %s до %s\" % (self.schedule['lunch_start'], self.schedule['lunch_end']) if (\n self.schedule.get('lunch_start', None) and self.schedule.get('lunch_end', None)) else u\"\"\n weekends = u\", %s - выходной\" % \", \".join(day_short_name(day) for day in\n self.schedule.get('weekends',\n [])) if self.schedule.get('weekends',\n None) else u\"\"\n return u\"с %s до %s%s%s\" % (self.schedule['start_time'], self.schedule['end_time'], lunch, weekends)\n return u\"%d %s через %d %s\" % (\n self.schedule['working_days_count'], word_from_num(u\"дня\", self.schedule['working_days_count']),\n self.schedule['weekends_count'], word_from_num(u\"дня\", self.schedule['weekends_count']))\n\n def get_api_structure(self):\n result = {\n 'id': self.id,\n 'surname': self.surname,\n 'name': self.name,\n 'patronymic': self.patronymic,\n\n 'schedule_caption': self.schedule_caption,\n 'title': self.title,\n 'address': self.address,\n 'region': {\n 'code': self.region,\n 'title': RFRegionsEnum.get_name(self.region)\n },\n 'metro_station': self.metro_station\n }\n del_null_items = lambda x: dict([(k, v) for k, v in x.items() if v is not None])\n result = del_null_items(result)\n schedule = NotariusObject.make_slots(self, datetime.utcnow())\n result['working_hours'] = self.make_working_hours()\n caption = result.get('metro_station', u\"\")\n if caption:\n caption = u\"м. %s (\" % caption\n else:\n caption = u\"(\"\n title = u\"%s, \" % self.title if self.title else u\"\"\n address_field = DocAddressField()\n address_field.parse_raw_value(self.address, api_data=False)\n caption += result['working_hours'] + u\") — %s\" % title + address_field.as_string_friendly()\n # м. Пл.Александра Невского (с 9 до 18 по будним дням) — Новгородская ул. д.6\n result['caption'] = caption\n result['schedule'] = schedule\n address_str = address_field.as_string_friendly()\n if self.title:\n address_str += u\" \" + self.title\n return result\n\n\nclass NotariusBookingObject(sqldb.Model):\n __tablename__ = \"notarius_booking\"\n\n id = Column(String, primary_key=True, default=lambda: str(ObjectId()))\n batch_id = Column(String, ForeignKey('doc_batch.id'), nullable=True)\n batch = relationship(\"DocumentBatchDbObject\", uselist=False)\n\n owner_id = Column(Integer, ForeignKey('authuser.id'), index=True)\n owner = relationship(\"AuthUser\", uselist=False)\n\n notarius_id = Column(String, ForeignKey('notarius.id'), index=True)\n notarius = relationship(\"NotariusObject\", uselist=False)\n\n dt = Column(DateTime, nullable=False)\n address = Column(Unicode, nullable=False)\n _discarded = Column(Boolean, default=False)\n\n def get_api_structure(self):\n result = {\n 'id': self.id,\n 'batch_id': self.batch_id,\n 'notarius': self.notarius.get_api_structure(),\n 'dt': self.dt.strftime(DocDateTimeField.FORMAT),\n 'address': self.address\n }\n return result\n" }, { "alpha_fraction": 0.5178396105766296, "alphanum_fraction": 0.5343015789985657, "avg_line_length": 39.73716735839844, "blob_id": "968b05497743834700d768623da2be3e23d62d82", "content_id": "db4eb38a8fce2b24de50458c7781482885ce940a", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 73839, "license_type": "no_license", "max_line_length": 172, "num_lines": 1773, "path": "/jb_tests/test_pack/test_objects.py", "repo_name": "StanislavKraev/jb_code", "src_encoding": "UTF-8", "text": "# -*- coding: utf-8 -*-\nimport StringIO\nimport os\n\n# noinspection PyProtectedMember\nfrom flask import json\nfrom datetime import datetime, timedelta\nfrom bson.objectid import ObjectId\nfrom sqlalchemy.orm import make_transient\nfrom base_test_case import BaseTestCase, authorized\nfrom fw.auth.models import AuthUser\nfrom fw.catalogs.models import OkvedCatalogObject\nfrom fw.documents.address_enums import RFRegionsEnum, HouseTypeEnum, FlatTypeEnum\nfrom fw.documents.address_enums import StreetTypeEnum\nfrom fw.documents.batch_manager import BatchManager\nfrom fw.documents.db_fields import PrivatePersonDbObject, CompanyDbObject, DocumentBatchDbObject, BatchDocumentDbObject\nfrom fw.documents.enums import CompanyTypeEnum, DocumentTypeEnum, BatchStatusEnum, DocumentBatchTypeEnum, \\\n IncorporationFormEnum, PersonDocumentTypeEnum, UserDocumentStatus\nfrom fw.documents.fields.doc_fields import PrivatePerson, CompanyObject, UserDocument\nfrom fw.db.sql_base import db as sqldb\nfrom fw.storage.file_storage import FileStorage\nfrom fw.storage.models import FileObject\nfrom services.ifns.utils.process_okvad import process_okvad\n\n\nclass ObjectsTestCase(BaseTestCase):\n def import_okvad_catalog(self):\n okved_item = OkvedCatalogObject(\n id=str(ObjectId()),\n name=\"foo\",\n departments=[{\n 'id': str(ObjectId()),\n 'okvads': ['1', '2', '3'],\n 'name': 'bar',\n 'main_okvad': '1'\n }, {\n 'id': str(ObjectId()),\n 'okvads': ['1', '3', '5'],\n 'name': 'baz',\n 'main_okvad': '5'\n }]\n )\n sqldb.session.add(okved_item)\n sqldb.session.commit()\n\n okved_item = OkvedCatalogObject(\n id=str(ObjectId()),\n name=\"foo2\",\n departments=[{\n 'id': str(ObjectId()),\n 'okvads': ['11', '12', '13'],\n 'name': 'bar2',\n 'main_okvad': '11'\n }, {\n 'id': str(ObjectId()),\n 'okvads': ['11', '13', '15'],\n 'name': 'baz2',\n 'main_okvad': '15'\n }]\n )\n sqldb.session.add(okved_item)\n sqldb.session.commit()\n\n @authorized()\n def test_create_person_by_name_then_add_part_of_passport(self):\n with self.app.app_context():\n data = {\n \"person\": json.dumps({\n 'name': u\"Поликарп\",\n })\n }\n result = self.test_client.post('/entity/person/create/', data=data)\n self.assertIsNotNone(result)\n self.assertEqual(result.status_code, 200)\n result_data = json.loads(result.data)\n self.assertIn('result', result_data)\n self.assertNotIn('error', result_data)\n person = result_data['result']\n self.assertIn('id', person)\n person_id, _type = person['id'].split(\"_\")\n real_person = PrivatePersonDbObject.query.filter_by(id=person_id).first()\n self.assertIsNotNone(real_person)\n self.assertEqual(person, PrivatePerson.db_obj_to_field(real_person).get_api_structure())\n self.assertIsInstance(person['name'], basestring)\n self.assertEqual(real_person.name, u\"Поликарп\")\n\n data = {\n 'person_id': unicode(real_person.id) + \"_person\",\n 'person': json.dumps({\n 'passport': {'document_type': 'internal', 'series': '4111'},\n 'address': {\n \"city_type\": \"\",\n \"city\": \"\"\n }\n })\n }\n result = self.test_client.post('/entity/person/update/', data=data)\n self.assertIsNotNone(result)\n self.assertEqual(result.status_code, 200)\n result_data = json.loads(result.data)\n self.assertIn('result', result_data)\n self.assertNotIn('error', result_data)\n person = result_data['result']\n self.assertEqual(person['passport'], {'document_type': 'internal', 'series': '4111'})\n\n @authorized()\n def test_try_set_empty_fields(self):\n with self.app.app_context():\n data = {\n \"person\": json.dumps({\n 'name': u\"Поликарп\"\n })\n }\n result = self.test_client.post('/entity/person/create/', data=data)\n self.assertIsNotNone(result)\n self.assertEqual(result.status_code, 200)\n result_data = json.loads(result.data)\n self.assertIn('result', result_data)\n self.assertNotIn('error', result_data)\n person = result_data['result']\n self.assertIn('id', person)\n person_id, _type = person['id'].split(\"_\")\n real_person = PrivatePersonDbObject.query.filter_by(id=person_id).first()\n self.assertIsNotNone(real_person)\n self.assertEqual(person, PrivatePerson.db_obj_to_field(real_person).get_api_structure())\n self.assertIsInstance(person['name'], basestring)\n self.assertEqual(real_person.name, u\"Поликарп\")\n\n data = {\n 'person_id': unicode(real_person.id) + \"_person\",\n 'person': json.dumps({\n 'passport': {'document_type': 'internal', 'series': '4111'},\n 'address': {\n \"city_type\": \"\",\n \"city\": \"\"\n }\n })\n }\n result = self.test_client.post('/entity/person/update/', data=data)\n self.assertIsNotNone(result)\n self.assertEqual(result.status_code, 200)\n result_data = json.loads(result.data)\n self.assertIn('result', result_data)\n self.assertNotIn('error', result_data)\n person = result_data['result']\n self.assertEqual(person['passport'], {'document_type': 'internal', 'series': '4111'})\n self.assertEqual(person['address'], {})\n\n @authorized()\n def test_create_person_null_fields(self):\n with self.app.app_context():\n data = {\n \"person\": json.dumps({\n 'name': u\"Поликарп\",\n 'surname': u\"Шариков\",\n 'patronymic': u\"Поликарпович\",\n 'passport': {'document_type': None, 'series': None, 'number': None, 'issue_date': None,\n 'issue_depart': None, 'depart_code': None, 'citizenship': None},\n })\n }\n result = self.test_client.post('/entity/person/create/', data=data)\n self.assertIsNotNone(result)\n self.assertEqual(result.status_code, 200)\n result_data = json.loads(result.data)\n self.assertIn('result', result_data)\n self.assertNotIn('error', result_data)\n person = result_data['result']\n self.assertIn('id', person)\n person_id, _type = person['id'].split(\"_\")\n real_person = PrivatePersonDbObject.query.filter_by(id=person_id).first()\n self.assertIsNotNone(real_person)\n real_person = PrivatePerson.db_obj_to_field(real_person).get_api_structure()\n self.assertEqual(person, real_person)\n\n @authorized()\n def test_update_person_name_declension(self):\n with self.app.app_context():\n data = {\n 'name': None,\n 'surname': None,\n 'patronymic': None,\n 'caption': None,\n 'birthdate': None,\n 'sex': None,\n 'birthplace': None,\n 'inn': None,\n 'phone': None,\n 'passport': {'document_type': 'internal', 'series': None, 'number': None, 'issue_date': None,\n 'issue_depart': None, 'depart_code': None, 'citizenship': None},\n 'address': {\"index\": None, \"region\": None, \"city\": None, \"village\": None, \"street_type\": None,\n \"street\": None, \"house_type\": None, \"house\": None, \"building_type\": None, \"building\": None,\n \"flat_type\": None, \"flat\": None},\n 'living_address': None,\n 'living_country_code': None,\n 'ogrnip': None,\n 'email': None,\n 'spouse': None,\n '_owner_id': self.user.id\n }\n person = PrivatePersonDbObject(**data)\n sqldb.session.add(person)\n sqldb.session.commit()\n\n data = {\n 'person_id': unicode(person.id) + \"_person\",\n 'person': json.dumps({\n 'caption': u\"Труляля\",\n \"id\": u\"53b53446a726161b1ab019c0\",\n 'name': None,\n 'surname': None,\n 'patronymic': None,\n 'birthdate': u\"2010-06-03\",\n 'sex': None,\n 'birthplace': None,\n 'inn': None,\n 'phone': None,\n 'passport': {'document_type': 'internal', 'series': None, 'number': None, 'issue_date': None,\n 'issue_depart': None, 'depart_code': None, 'citizenship': None},\n 'address': {\"index\": None, \"region\": None, \"city\": None, \"village\": None, \"street_type\": None,\n \"street\": None, \"house_type\": None, \"house\": None, \"building_type\": None,\n \"building\": None, \"flat_type\": None, \"flat\": None},\n 'living_address': None,\n 'living_country_code': None,\n 'ogrnip': None,\n 'email': None,\n 'spouse': None,\n })\n }\n result = self.test_client.post('/entity/person/update/', data=data)\n self.assertIsNotNone(result)\n self.assertEqual(result.status_code, 200)\n result_data = json.loads(result.data)\n self.assertIn('result', result_data)\n self.assertNotIn('error', result_data)\n person = result_data['result']\n self.assertEqual(person['caption'], u\"Труляля\")\n\n @authorized()\n def test_update_person_name(self):\n with self.app.app_context():\n data = {\n '_owner_id': self.user.id\n }\n person = PrivatePersonDbObject(**data)\n sqldb.session.add(person)\n sqldb.session.commit()\n\n simple_object = PrivatePersonDbObject.query.filter_by(id=person.id).first()\n\n # noinspection PyUnresolvedReferences\n real_person = PrivatePerson.db_obj_to_field(\n PrivatePersonDbObject.query.filter_by(id=person.id).first())\n self.assertIsNotNone(real_person)\n\n data = {\n 'person_id': unicode(person.id) + \"_person\",\n 'person': json.dumps({\n 'name': u\"Поликарпа\"\n })\n }\n result = self.test_client.post('/entity/person/update/', data=data)\n self.assertIsNotNone(result)\n self.assertEqual(result.status_code, 200)\n result_data = json.loads(result.data)\n self.assertIn('result', result_data)\n self.assertNotIn('error', result_data)\n person = result_data['result']\n\n @authorized()\n def test_get_person_by_id(self):\n with self.app.app_context():\n data = {\n 'name': u\"Поликарп\",\n 'surname': u\"Шариков\",\n 'patronymic': u\"Поликарпович\",\n '_owner_id': self.user.id,\n 'birthdate': datetime.now()\n }\n person = PrivatePersonDbObject(**data)\n sqldb.session.add(person)\n sqldb.session.commit()\n\n result = self.test_client.get('/entity/person/?person_id=%s_person' % unicode(person.id))\n self.assertIsNotNone(result)\n self.assertEqual(result.status_code, 200)\n result_data = json.loads(result.data)\n self.assertIn('result', result_data)\n self.assertNotIn('error', result_data)\n self.assertEqual(len(result_data['result']['persons']), 1)\n self.assertEqual(result_data['result']['count'], 1)\n self.assertEqual(result_data['result']['total'], 1)\n person = result_data['result']['persons'][0]\n self.assertIn('id', person)\n person_id, _type = person['id'].split(\"_\")\n real_person = PrivatePersonDbObject.query.filter_by(id=person_id).first()\n self.assertIsNotNone(real_person)\n self.assertEqual(person, PrivatePerson.db_obj_to_field(real_person).get_api_structure())\n self.assertEqual(real_person.name, data['name'])\n self.assertEqual(real_person.surname, data['surname'])\n self.assertEqual(real_person.patronymic, data['patronymic'])\n\n @authorized()\n def test_get_all_persons(self):\n with self.app.app_context():\n data = {\n 'name': u\"Поликарп\",\n 'surname': u\"Шариков\",\n 'patronymic': u\"Поликарпович\",\n '_owner_id': self.user.id\n }\n person = PrivatePersonDbObject(**data)\n sqldb.session.add(person)\n sqldb.session.commit()\n\n data2 = {\n 'name': u\"Поликарп2\",\n 'surname': u\"Шариков2\",\n 'patronymic': u\"Поликарпович2\",\n '_owner_id': self.user.id\n }\n person2 = PrivatePersonDbObject(**data2)\n sqldb.session.add(person2)\n sqldb.session.commit()\n\n data3 = {\n 'name': u\"Поликарп3\",\n 'surname': u\"Шариков3\",\n 'patronymic': u\"Поликарпович3\",\n '_owner_id': self.user.id\n }\n person3 = PrivatePersonDbObject(**data3)\n sqldb.session.add(person3)\n sqldb.session.commit()\n\n result = self.test_client.get('/entity/person/')\n self.assertIsNotNone(result)\n self.assertEqual(result.status_code, 200)\n result_data = json.loads(result.data)\n self.assertIn('result', result_data)\n self.assertNotIn('error', result_data)\n self.assertEqual(len(result_data['result']['persons']), 3)\n self.assertEqual(result_data['result']['count'], 3)\n self.assertEqual(result_data['result']['total'], 3)\n person = result_data['result']['persons'][0]\n self.assertIn('id', person)\n person_id, _type = person['id'].split(\"_\")\n real_person = PrivatePersonDbObject.query.filter_by(id=person_id).first()\n self.assertIsNotNone(real_person)\n self.assertEqual(person, PrivatePerson.db_obj_to_field(real_person).get_api_structure())\n self.assertEqual(real_person.name, data['name'])\n self.assertEqual(real_person.surname, data['surname'])\n self.assertEqual(real_person.patronymic, data['patronymic'])\n\n @authorized()\n def test_get_all_persons_limit_offset(self):\n with self.app.app_context():\n data = {\n 'name': u\"Поликарп\",\n 'surname': u\"Шариков\",\n 'patronymic': u\"Поликарпович\",\n '_owner_id': self.user.id\n }\n person = PrivatePersonDbObject(**data)\n sqldb.session.add(person)\n sqldb.session.commit()\n\n data2 = {\n 'name': u\"Поликарп2\",\n 'surname': u\"Шариков2\",\n 'patronymic': u\"Поликарпович2\",\n '_owner_id': self.user.id\n }\n person2 = PrivatePersonDbObject(**data2)\n sqldb.session.add(person2)\n sqldb.session.commit()\n\n data3 = {\n 'name': u\"Поликарп3\",\n 'surname': u\"Шариков3\",\n 'patronymic': u\"Поликарпович3\",\n '_owner_id': self.user.id\n }\n person3 = PrivatePersonDbObject(**data3)\n sqldb.session.add(person3)\n sqldb.session.commit()\n\n result = self.test_client.get('/entity/person/?offset=1&count=1')\n self.assertIsNotNone(result)\n self.assertEqual(result.status_code, 200)\n result_data = json.loads(result.data)\n self.assertIn('result', result_data)\n self.assertNotIn('error', result_data)\n self.assertEqual(len(result_data['result']['persons']), 1)\n self.assertEqual(result_data['result']['count'], 1)\n self.assertEqual(result_data['result']['total'], 3)\n person = result_data['result']['persons'][0]\n self.assertIn('id', person)\n person_id, _type = person['id'].split(\"_\")\n real_person = PrivatePersonDbObject.query.filter_by(id=person_id).first()\n self.assertIsNotNone(real_person)\n self.assertEqual(person, PrivatePerson.db_obj_to_field(real_person).get_api_structure())\n self.assertEqual(real_person.name, data2['name'])\n self.assertEqual(real_person.surname, data2['surname'])\n self.assertEqual(real_person.patronymic, data2['patronymic'])\n\n @authorized()\n def test_update_person(self):\n with self.app.app_context():\n data = {\n 'name': u\"Поликарп\",\n 'surname': u\"Шариков\",\n 'patronymic': u\"Поликарпович\",\n '_owner_id': self.user.id\n }\n\n person = PrivatePersonDbObject(**data)\n sqldb.session.add(person)\n sqldb.session.commit()\n\n data = {\n 'person_id': unicode(person.id) + \"_person\",\n 'person': json.dumps({\n 'name': u\"Иван\",\n 'surname': u\"Иванов\",\n 'patronymic': u\"Иванович\",\n 'birthdate': u'2014-02-18'\n })\n }\n result = self.test_client.post('/entity/person/update/', data=data)\n self.assertIsNotNone(result)\n self.assertEqual(result.status_code, 200)\n result_data = json.loads(result.data)\n self.assertIn('result', result_data)\n self.assertNotIn('error', result_data)\n person = result_data['result']\n\n person_id, _type = person['id'].split(\"_\")\n real_person = PrivatePersonDbObject.query.filter_by(id=person_id).first()\n self.assertIsNotNone(real_person)\n self.assertEqual(person, PrivatePerson.db_obj_to_field(real_person).get_api_structure())\n self.assertEqual(real_person.name, u\"Иван\")\n self.assertEqual(real_person.surname, u\"Иванов\")\n self.assertEqual(real_person.patronymic, u\"Иванович\")\n # noinspection PyUnresolvedReferences\n person_doc = PrivatePerson.db_obj_to_field(real_person)\n self.assertEqual(unicode(person_doc.name), u\"Иван\")\n\n @authorized()\n def test_update_person(self):\n with self.app.app_context():\n data = {\n 'name': u\"Поликарп\",\n 'surname': u\"Шариков\",\n 'patronymic': u\"Поликарпович\",\n 'phone': u\"+79001231313\",\n '_owner_id': self.user.id\n }\n\n person = PrivatePersonDbObject(**data)\n sqldb.session.add(person)\n sqldb.session.commit()\n\n data = {\n 'person_id': unicode(person.id) + \"_person\",\n 'person': json.dumps({\n 'name': u\"Иван\",\n 'surname': u\"Иванов\",\n 'patronymic': u\"Иванович\",\n 'phone': u\"\",\n 'birthdate': u'2014-02-18'\n })\n }\n result = self.test_client.post('/entity/person/update/', data=data)\n self.assertIsNotNone(result)\n self.assertEqual(result.status_code, 200)\n result_data = json.loads(result.data)\n self.assertIn('result', result_data)\n self.assertNotIn('error', result_data)\n person = result_data['result']\n\n person_id, _type = person['id'].split(\"_\")\n real_person = PrivatePersonDbObject.query.filter_by(id=person_id).first()\n self.assertIsNotNone(real_person)\n self.assertEqual(person, PrivatePerson.db_obj_to_field(real_person).get_api_structure())\n self.assertEqual(real_person.name, u\"Иван\")\n self.assertEqual(real_person.surname, u\"Иванов\")\n self.assertEqual(real_person.patronymic, u\"Иванович\")\n self.assertEqual(real_person.phone, None)\n # noinspection PyUnresolvedReferences\n person_doc = PrivatePerson.db_obj_to_field(real_person)\n self.assertEqual(unicode(person_doc.name), u\"Иван\")\n\n @authorized()\n def test_get_removed_person_by_id(self):\n with self.app.app_context():\n data = {\n 'name': u\"Поликарп\",\n 'surname': u\"Шариков\",\n 'patronymic': u\"Поликарпович\",\n 'deleted': True\n }\n\n person = PrivatePersonDbObject(**data)\n sqldb.session.add(person)\n sqldb.session.commit()\n\n result = self.test_client.get('/entity/person/?person_id=%s_person' % unicode(person.id))\n self.assertIsNotNone(result)\n self.assertEqual(result.status_code, 404)\n\n result_data = json.loads(result.data)\n self.assertNotIn('result', result_data)\n self.assertIn('error', result_data)\n error = result_data['error']\n self.assertEqual(error['code'], 207)\n\n @authorized()\n def test_get_persons_after_removal_some_of_them(self):\n with self.app.app_context():\n data = {\n 'name': u\"Поликарп\",\n 'surname': u\"Шариков\",\n 'patronymic': u\"Поликарпович\",\n 'deleted': True,\n '_owner_id': self.user.id\n }\n person = PrivatePersonDbObject(**data)\n sqldb.session.add(person)\n sqldb.session.commit()\n\n data2 = {\n 'name': u\"Поликарп2\",\n 'surname': u\"Шариков2\",\n 'patronymic': u\"Поликарпович2\",\n '_owner_id': self.user.id\n }\n person2 = PrivatePersonDbObject(**data2)\n sqldb.session.add(person2)\n sqldb.session.commit()\n\n data3 = {\n 'name': u\"Поликарп3\",\n 'surname': u\"Шариков3\",\n 'patronymic': u\"Поликарпович3\",\n 'deleted': True,\n '_owner_id': self.user.id\n }\n person3 = PrivatePersonDbObject(**data3)\n sqldb.session.add(person3)\n sqldb.session.commit()\n\n result = self.test_client.get('/entity/person/')\n self.assertIsNotNone(result)\n self.assertEqual(result.status_code, 200)\n result_data = json.loads(result.data)\n self.assertIn('result', result_data)\n self.assertNotIn('error', result_data)\n self.assertEqual(len(result_data['result']['persons']), 1)\n self.assertEqual(result_data['result']['count'], 1)\n self.assertEqual(result_data['result']['total'], 1)\n person = result_data['result']['persons'][0]\n self.assertIn('id', person)\n person_id, _type = person['id'].split(\"_\")\n real_person = PrivatePersonDbObject.query.filter_by(id=person_id).first()\n self.assertIsNotNone(real_person)\n self.assertEqual(person, PrivatePerson.db_obj_to_field(real_person).get_api_structure())\n self.assertEqual(real_person.name, data2['name'])\n self.assertEqual(real_person.surname, data2['surname'])\n self.assertEqual(real_person.patronymic, data2['patronymic'])\n\n @authorized()\n def test_delete_person(self):\n with self.app.app_context():\n data_person = {\n 'name': u\"Поликарп\",\n 'surname': u\"Шариков\",\n 'patronymic': u\"Поликарпович\",\n '_owner_id': self.user.id\n }\n person = PrivatePersonDbObject(**data_person)\n sqldb.session.add(person)\n sqldb.session.commit()\n\n result = self.test_client.post('/entity/person/remove/', data={\n 'person_id': unicode(person.id) + \"_person\"\n })\n self.assertEqual(result.status_code, 200)\n result_data = json.loads(result.data)\n self.assertNotIn('error', result_data)\n self.assertIn('result', result_data)\n self.assertTrue(result_data['result'])\n\n @authorized()\n def test_create_company(self):\n with self.app.app_context():\n data = {\n \"company\": json.dumps({\n 'full_name': u\"ООО Ромашка\",\n 'inn': \"781108730780\"\n })\n }\n result = self.test_client.post('/entity/company/create/', data=data)\n self.assertIsNotNone(result)\n self.assertEqual(result.status_code, 200)\n result_data = json.loads(result.data)\n self.assertIn('result', result_data)\n self.assertNotIn('error', result_data)\n company = result_data['result']\n self.assertIn('id', company)\n company_id, _type = company['id'].split('_')\n real_company = CompanyDbObject.query.filter_by(id=company_id).first()\n self.assertIsNotNone(real_company)\n self.assertEqual(company, CompanyObject.db_obj_to_field(real_company).get_api_structure())\n self.assertEqual(real_company.full_name, u\"ООО Ромашка\")\n self.assertEqual(real_company.inn, \"781108730780\")\n\n @authorized()\n def test_create_foreign_company(self):\n with self.app.app_context():\n data = {\n \"company\": json.dumps({\n 'full_name': u\"ООО Ромашка\",\n 'inn': \"781108730780\",\n 'company_type': CompanyTypeEnum.CT_FOREIGN\n })\n }\n result = self.test_client.post('/entity/company/create/', data=data)\n self.assertIsNotNone(result)\n self.assertEqual(result.status_code, 200)\n result_data = json.loads(result.data)\n self.assertIn('result', result_data)\n self.assertNotIn('error', result_data)\n company = result_data['result']\n self.assertIn('id', company)\n company_id, _type = company['id'].split('_')\n real_company = CompanyDbObject.query.filter_by(id=company_id).first()\n self.assertIsNotNone(real_company)\n self.assertEqual(company, CompanyObject.db_obj_to_field(real_company).get_api_structure())\n self.assertEqual(real_company.full_name, u\"ООО Ромашка\")\n self.assertEqual(real_company.inn, \"781108730780\")\n\n @authorized()\n def test_create_company_person_as_empty_string(self):\n with self.app.app_context():\n data = {\n \"company\": json.dumps({\n 'full_name': u\"ООО Ромашка\",\n 'inn': \"781108730780\",\n 'general_manager': \"\",\n 'registration_date': u\"\"\n })\n }\n result = self.test_client.post('/entity/company/create/', data=data)\n self.assertIsNotNone(result)\n self.assertEqual(result.status_code, 200)\n result_data = json.loads(result.data)\n self.assertIn('result', result_data)\n self.assertNotIn('error', result_data)\n company = result_data['result']\n self.assertIn('id', company)\n company_id, _type = company['id'].split('_')\n real_company = CompanyDbObject.query.filter_by(id=company_id).first()\n self.assertIsNotNone(real_company)\n self.assertEqual(company, CompanyObject.db_obj_to_field(real_company).get_api_structure())\n self.assertEqual(real_company.full_name, u\"ООО Ромашка\")\n self.assertEqual(real_company.inn, \"781108730780\")\n\n @authorized()\n def test_create_company_with_general_manager(self):\n with self.app.app_context():\n person = PrivatePersonDbObject(\n name=u\"Поликарп\",\n surname=u\"Шариков\",\n patronymic=u\"Поликарпович\",\n _owner=self.user,\n birthdate=datetime.now()\n )\n sqldb.session.add(person)\n sqldb.session.commit()\n data = {\n \"company\": json.dumps({\n 'full_name': u\"ООО Ромашка\",\n 'inn': \"781108730780\",\n 'general_manager': \"%s_person\" % person.id,\n 'registration_date': u\"\"\n })\n }\n result = self.test_client.post('/entity/company/create/', data=data)\n self.assertIsNotNone(result)\n self.assertEqual(result.status_code, 200)\n result_data = json.loads(result.data)\n self.assertIn('result', result_data)\n self.assertNotIn('error', result_data)\n company = result_data['result']\n self.assertIn('id', company)\n company_id, _type = company['id'].split('_')\n real_company = CompanyDbObject.query.filter_by(id=company_id).first()\n self.assertIsNotNone(real_company)\n self.assertEqual(company, CompanyObject.db_obj_to_field(real_company).get_api_structure())\n self.assertEqual(real_company.full_name, u\"ООО Ромашка\")\n self.assertEqual(real_company.inn, \"781108730780\")\n\n @authorized()\n def test_update_company(self):\n with self.app.app_context():\n data = {\n 'full_name': u\"ООО Ромашка\",\n 'inn': \"781108730780\",\n '_owner_id': self.user.id\n }\n company = CompanyDbObject(**data)\n sqldb.session.add(company)\n sqldb.session.commit()\n\n data = {\n 'company_id': company.id + \"_company\",\n 'company': json.dumps({\n 'full_name': u\"ООО Прогресс\",\n 'inn': \"781108730780\"\n })\n }\n result = self.test_client.post('/entity/company/update/', data=data)\n self.assertIsNotNone(result)\n self.assertEqual(result.status_code, 200)\n result_data = json.loads(result.data)\n self.assertIn('result', result_data)\n self.assertNotIn('error', result_data)\n company = result_data['result']\n self.assertEqual(company['full_name'], u\"ООО Прогресс\")\n self.assertEqual(company['inn'], \"781108730780\")\n\n company_id, _type = company['id'].split('_')\n real_company = CompanyObject.db_obj_to_field(CompanyDbObject.query.filter_by(id=company_id).first())\n self.assertEqual(unicode(real_company.full_name), u\"ООО Прогресс\")\n self.assertEqual(real_company.inn, \"781108730780\")\n\n @authorized()\n def test_get_company_by_id(self):\n with self.app.app_context():\n data = {\n 'full_name': u\"ООО Ромашка\",\n 'inn': \"781108730780\",\n '_owner_id': self.user.id\n }\n company = CompanyDbObject(**data)\n sqldb.session.add(company)\n sqldb.session.commit()\n\n result = self.test_client.get('/entity/company/?company_id=%s' % company.id + \"_company\")\n self.assertIsNotNone(result)\n self.assertEqual(result.status_code, 200)\n result_data = json.loads(result.data)\n\n self.assertIn('result', result_data)\n self.assertNotIn('error', result_data)\n self.assertEqual(len(result_data['result']['companies']), 1)\n self.assertEqual(result_data['result']['count'], 1)\n self.assertEqual(result_data['result']['total'], 1)\n company = result_data['result']['companies'][0]\n self.assertIn('id', company)\n company_id, _type = company['id'].split('_')\n real_company = CompanyDbObject.query.filter_by(id=company_id).first()\n self.assertIsNotNone(real_company)\n self.assertEqual(company, CompanyObject.db_obj_to_field(real_company).get_api_structure())\n self.assertEqual(real_company.full_name, data['full_name'])\n self.assertEqual(real_company.inn, data['inn'])\n\n @authorized()\n def test_get_all_companies(self):\n with self.app.app_context():\n data = {\n 'full_name': u\"ООО Ромашка1\",\n 'inn': '7826120135',\n 'kpp': '781701001',\n 'ogrn': '1027810273754',\n '_owner_id': self.user.id\n }\n company = CompanyDbObject(**data)\n sqldb.session.add(company)\n sqldb.session.commit()\n\n data2 = {\n 'full_name': u\"ООО Ромашка2\",\n 'inn': \"781108730780\",\n '_owner_id': self.user.id\n }\n company2 = CompanyDbObject(**data2)\n sqldb.session.add(company2)\n sqldb.session.commit()\n\n data3 = {\n 'full_name': u\"ООО Ромашка3\",\n 'inn': \"781108730780\",\n '_owner_id': self.user.id\n }\n company3 = CompanyDbObject(**data3)\n sqldb.session.add(company3)\n sqldb.session.commit()\n\n result = self.test_client.get('/entity/company/')\n self.assertIsNotNone(result)\n self.assertEqual(result.status_code, 200)\n result_data = json.loads(result.data)\n self.assertIn('result', result_data)\n self.assertNotIn('error', result_data)\n self.assertEqual(len(result_data['result']['companies']), 3)\n self.assertEqual(result_data['result']['count'], 3)\n self.assertEqual(result_data['result']['total'], 3)\n company = result_data['result']['companies'][0]\n self.assertIn('id', company)\n company_id, _type = company['id'].split('_')\n real_company = CompanyDbObject.query.filter_by(id=company_id).order_by(CompanyDbObject.id.asc()).first()\n self.assertIsNotNone(real_company)\n self.assertEqual(company, CompanyObject.db_obj_to_field(real_company).get_api_structure())\n self.assertEqual(real_company.full_name, data['full_name'])\n self.assertEqual(real_company.inn, str(data['inn']))\n\n @authorized()\n def test_get_all_companies_limit_offset(self):\n with self.app.app_context():\n data = {\n 'full_name': u\"ООО Ромашка1\",\n 'inn': \"781108730780\",\n '_owner_id': self.user.id\n }\n company = CompanyDbObject(**data)\n sqldb.session.add(company)\n sqldb.session.commit()\n\n data2 = {\n 'full_name': u\"ООО Ромашка2\",\n 'inn': \"781108730780\",\n '_owner_id': self.user.id\n }\n company2 = CompanyDbObject(**data2)\n sqldb.session.add(company2)\n sqldb.session.commit()\n\n data3 = {\n 'full_name': u\"ООО Ромашка3\",\n 'inn': \"781108730780\",\n '_owner_id': self.user.id\n }\n company3 = CompanyDbObject(**data3)\n sqldb.session.add(company3)\n sqldb.session.commit()\n\n result = self.test_client.get('/entity/company/?offset=1&count=1')\n self.assertIsNotNone(result)\n self.assertEqual(result.status_code, 200)\n result_data = json.loads(result.data)\n self.assertIn('result', result_data)\n self.assertNotIn('error', result_data)\n self.assertEqual(len(result_data['result']['companies']), 1)\n self.assertEqual(result_data['result']['count'], 1)\n self.assertEqual(result_data['result']['total'], 3)\n company = result_data['result']['companies'][0]\n self.assertIn('id', company)\n company_id, _type = company['id'].split('_')\n real_company = CompanyDbObject.query.filter_by(id=company_id).first()\n self.assertIsNotNone(real_company)\n self.assertEqual(company, CompanyObject.db_obj_to_field(real_company).get_api_structure())\n self.assertEqual(real_company.full_name, data2['full_name'])\n self.assertEqual(real_company.inn, data2['inn'])\n\n @authorized()\n def test_delete_company(self):\n with self.app.app_context():\n data = {\n 'full_name': u\"ООО Ромашка1\",\n 'inn': \"781108730780\",\n '_owner_id': self.user.id\n }\n company = CompanyDbObject(**data)\n sqldb.session.add(company)\n sqldb.session.commit()\n\n result = self.test_client.post('/entity/company/remove/', data={\n 'company_id': company.id + \"_company\"\n })\n company = CompanyDbObject.query.filter_by(id=company.id).first()\n self.assertEqual(company.deleted, True)\n self.assertEqual(result.status_code, 200)\n result_data = json.loads(result.data)\n self.assertNotIn('error', result_data)\n self.assertIn('result', result_data)\n self.assertTrue(result_data['result'])\n\n @authorized()\n def test_get_removed_company_by_id(self):\n with self.app.app_context():\n data = {\n 'full_name': u\"ООО Ромашка1\",\n 'inn': \"781108730780\",\n 'deleted': True\n }\n company = CompanyDbObject(**data)\n sqldb.session.add(company)\n sqldb.session.commit()\n\n result = self.test_client.get('/entity/company/?company_id=%s' % company.id + \"_company\")\n self.assertIsNotNone(result)\n self.assertEqual(result.status_code, 404)\n result_data = json.loads(result.data)\n\n self.assertNotIn('result', result_data)\n self.assertIn('error', result_data)\n self.assertEqual(result_data['error']['code'], 207)\n\n @authorized()\n def test_get_companies_after_removal_some_of_them(self):\n with self.app.app_context():\n data = {\n 'full_name': u\"ООО Ромашка1\",\n 'inn': \"781108730780\",\n 'deleted': True,\n '_owner_id': self.user.id\n }\n company = CompanyDbObject(**data)\n sqldb.session.add(company)\n sqldb.session.commit()\n\n data2 = {\n 'full_name': u\"ООО Ромашка2\",\n 'inn': \"781108730780\",\n '_owner_id': self.user.id\n }\n company2 = CompanyDbObject(**data2)\n sqldb.session.add(company2)\n sqldb.session.commit()\n\n data3 = {\n 'full_name': u\"ООО Ромашка3\",\n 'inn': \"781108730780\",\n 'deleted': True,\n '_owner_id': self.user.id\n }\n company3 = CompanyDbObject(**data3)\n sqldb.session.add(company3)\n sqldb.session.commit()\n\n result = self.test_client.get('/entity/company/')\n self.assertIsNotNone(result)\n self.assertEqual(result.status_code, 200)\n result_data = json.loads(result.data)\n self.assertIn('result', result_data)\n self.assertNotIn('error', result_data)\n self.assertEqual(len(result_data['result']['companies']), 1)\n self.assertEqual(result_data['result']['count'], 1)\n self.assertEqual(result_data['result']['total'], 1)\n company = result_data['result']['companies'][0]\n self.assertIn('id', company)\n company_id, _type = company['id'].split('_')\n real_company = CompanyDbObject.query.filter_by(id=company_id).first()\n self.assertIsNotNone(real_company)\n self.assertEqual(company, CompanyObject.db_obj_to_field(real_company).get_api_structure())\n self.assertEqual(real_company.full_name, data2['full_name'])\n self.assertEqual(real_company.inn, data2['inn'])\n\n @authorized()\n def test_not_my_person(self):\n some_user = AuthUser()\n sqldb.session.add(some_user)\n sqldb.session.commit()\n with self.app.app_context():\n data = {\n 'name': u\"Поликарп\",\n 'surname': u\"Шариков\",\n 'patronymic': u\"Поликарпович\",\n '_owner': some_user\n }\n\n person = PrivatePersonDbObject(**data)\n sqldb.session.add(person)\n sqldb.session.commit()\n\n result = self.test_client.get('/entity/person/?person_id=%s_person' % unicode(person.id))\n self.assertEqual(result.status_code, 404)\n result_data = json.loads(result.data)\n self.assertNotIn('result', result_data)\n self.assertIn('error', result_data)\n self.assertEqual(result_data['error']['code'], 207)\n\n result = self.test_client.get('/entity/person/')\n\n self.assertEqual(result.status_code, 200)\n result_data = json.loads(result.data)\n self.assertIn('result', result_data)\n self.assertNotIn('error', result_data)\n self.assertEqual(len(result_data['result']['persons']), 0)\n\n @authorized()\n def test_update_not_my_person(self):\n some_user = AuthUser()\n sqldb.session.add(some_user)\n sqldb.session.commit()\n with self.app.app_context():\n data = {\n 'name': u\"Поликарп\",\n 'surname': u\"Шариков\",\n 'patronymic': u\"Поликарпович\",\n '_owner': some_user\n }\n\n person = PrivatePersonDbObject(**data)\n sqldb.session.add(person)\n sqldb.session.commit()\n\n data = {\n 'person_id': unicode(person.id) + \"_person\",\n 'person': json.dumps({\n 'caption': u\"Труляля\",\n })\n }\n result = self.test_client.post('/entity/person/update/', data=data)\n self.assertIsNotNone(result)\n self.assertEqual(result.status_code, 404)\n result_data = json.loads(result.data)\n self.assertNotIn('result', result_data)\n self.assertIn('error', result_data)\n self.assertEqual(result_data['error']['code'], 207)\n\n @authorized()\n def test_remove_not_my_person(self):\n some_user = AuthUser()\n sqldb.session.add(some_user)\n sqldb.session.commit()\n with self.app.app_context():\n data = {\n 'name': u\"Поликарп\",\n 'surname': u\"Шариков\",\n 'patronymic': u\"Поликарпович\",\n '_owner': some_user\n }\n\n person = PrivatePersonDbObject(**data)\n sqldb.session.add(person)\n sqldb.session.commit()\n\n result = self.test_client.post('/entity/person/remove/', data={\n 'person_id': unicode(person.id) + \"_person\"\n })\n self.assertEqual(result.status_code, 404)\n result_data = json.loads(result.data)\n self.assertIn('error', result_data)\n self.assertNotIn('result', result_data)\n self.assertEqual(result_data['error']['code'], 207)\n\n @authorized()\n def test_not_my_company(self):\n with self.app.app_context():\n some_user = AuthUser()\n sqldb.session.add(some_user)\n sqldb.session.commit()\n data = {\n 'full_name': u\"ООО Лютик\",\n '_owner': some_user\n }\n\n company = CompanyDbObject(**data)\n sqldb.session.add(company)\n sqldb.session.commit()\n\n result = self.test_client.get('/entity/company/?company_id=%s_company' % company.id)\n self.assertEqual(result.status_code, 404)\n result_data = json.loads(result.data)\n self.assertNotIn('result', result_data)\n self.assertIn('error', result_data)\n self.assertEqual(result_data['error']['code'], 207)\n\n result = self.test_client.get('/entity/company/')\n\n self.assertEqual(result.status_code, 200)\n result_data = json.loads(result.data)\n self.assertIn('result', result_data)\n self.assertNotIn('error', result_data)\n self.assertEqual(len(result_data['result']['companies']), 0)\n\n @authorized()\n def test_update_not_my_company(self):\n some_user = AuthUser()\n sqldb.session.add(some_user)\n sqldb.session.commit()\n with self.app.app_context():\n data = {\n 'full_name': u\"ООО Лютик\",\n '_owner': some_user\n }\n\n company = CompanyDbObject(**data)\n sqldb.session.add(company)\n sqldb.session.commit()\n\n data = {\n 'company_id': company.id + \"_company\",\n 'company': json.dumps({\n 'full_name': u\"Труляля\",\n })\n }\n result = self.test_client.post('/entity/company/update/', data=data)\n self.assertIsNotNone(result)\n self.assertEqual(result.status_code, 404)\n result_data = json.loads(result.data)\n self.assertNotIn('result', result_data)\n self.assertIn('error', result_data)\n self.assertEqual(result_data['error']['code'], 207)\n\n @authorized()\n def test_delete_not_my_company(self):\n with self.app.app_context():\n some_user = AuthUser()\n sqldb.session.add(some_user)\n sqldb.session.commit()\n data = {\n 'full_name': u\"ООО Лютик\",\n '_owner': some_user\n }\n\n company = CompanyDbObject(**data)\n sqldb.session.add(company)\n sqldb.session.commit()\n\n result = self.test_client.post('/entity/company/remove/', data={\n 'company_id': company.id + \"_company\"\n })\n self.assertEqual(result.status_code, 404)\n result_data = json.loads(result.data)\n self.assertIn('error', result_data)\n self.assertNotIn('result', result_data)\n self.assertEqual(result_data['error']['code'], 207)\n\n @authorized()\n def test_update_company_set_not_mine_person(self):\n with self.app.app_context():\n data = {\n 'full_name': u\"ООО Ромашка\",\n 'inn': \"781108730780\",\n '_owner_id': self.user.id\n }\n company = CompanyDbObject(**data)\n sqldb.session.add(company)\n sqldb.session.commit()\n\n real_company = CompanyObject.db_obj_to_field(CompanyDbObject.query.filter_by(id=company.id).first())\n self.assertIsNotNone(real_company)\n\n some_user = AuthUser()\n sqldb.session.add(some_user)\n sqldb.session.commit()\n data = {\n 'name': u\"Поликарп\",\n 'surname': u\"Шариков\",\n 'patronymic': u\"Поликарпович\",\n '_owner': some_user\n }\n\n person = PrivatePersonDbObject(**data)\n sqldb.session.add(person)\n sqldb.session.commit()\n\n data = {\n 'company_id': company.id + \"_company\",\n 'company': json.dumps({\n 'full_name': u\"ООО Прогресс\",\n 'inn': \"781108730780\",\n 'general_manager': {\n \"id\": unicode(person.id)\n }\n })\n }\n result = self.test_client.post('/entity/company/update/', data=data)\n self.assertIsNotNone(result)\n self.assertEqual(result.status_code, 400)\n result_data = json.loads(result.data)\n self.assertNotIn('result', result_data)\n self.assertIn('error', result_data)\n self.assertEqual(result_data['error']['code'], 5)\n\n @authorized()\n def test_update_person_set_not_mine_person(self):\n with self.app.app_context():\n data = {\n 'name': u\"Ромашка\",\n 'inn': \"781108730780\",\n '_owner_id': self.user.id\n }\n my_person = PrivatePersonDbObject(**data)\n sqldb.session.add(my_person)\n sqldb.session.commit()\n\n some_user = AuthUser()\n sqldb.session.add(some_user)\n sqldb.session.commit()\n data = {\n 'name': u\"Поликарп\",\n 'surname': u\"Шариков\",\n 'patronymic': u\"Поликарпович\",\n '_owner': some_user\n }\n\n not_mine_person = PrivatePersonDbObject(**data)\n sqldb.session.add(not_mine_person)\n sqldb.session.commit()\n\n data = {\n 'person_id': unicode(my_person.id) + \"_person\",\n 'person': json.dumps({\n 'name': u\"ООО Прогресс\",\n 'inn': \"781108730780\",\n 'spouse': {\n \"id\": unicode(not_mine_person.id)\n }\n })\n }\n result = self.test_client.post('/entity/person/update/', data=data)\n self.assertIsNotNone(result)\n self.assertEqual(result.status_code, 400)\n result_data = json.loads(result.data)\n self.assertNotIn('result', result_data)\n self.assertIn('error', result_data)\n self.assertEqual(result_data['error']['code'], 5)\n\n @authorized()\n def test_get_batch_status(self):\n batch_db = DocumentBatchDbObject(\n batch_type=DocumentBatchTypeEnum.DBT_NEW_LLC,\n _owner=self.user\n )\n sqldb.session.add(batch_db)\n sqldb.session.commit()\n batch_id = batch_db.id\n\n result = self.test_client.get('/batch/status/?batch_id=%s' % batch_id)\n self.assertEqual(result.status_code, 200)\n data = json.loads(result.data)\n self.assertIn('result', data)\n self.assertEqual(data['result'], {\n u'id': batch_id,\n u'batch_type': u'llc',\n u'creation_date': batch_db.creation_date.strftime('%Y-%m-%dT%H:%M:%S'),\n u\"name\": u\"Создание ООО\",\n u\"status\": u'new',\n u\"paid\": \"false\"\n })\n\n @authorized()\n def test_get_companies_without_copies(self):\n company_founder = CompanyDbObject(**{\n \"_owner_id\": self.user.id,\n \"ogrn\": \"1234567890123\",\n \"inn\": \"781108730780\",\n \"kpp\": \"999999999\",\n \"general_manager_caption\": u\"генеральный директор\",\n \"incorporation_form\": IncorporationFormEnum.IF_LLC,\n \"full_name\": u\"образовательное учреждение дополнительного образования детей специализированная детско-юношеская спортивная школа олимпийского резерва по боксу\",\n \"short_name\": u\"Бокс\",\n \"address\": {\n \"region\": RFRegionsEnum.RFR_SPB,\n \"index\": 123131,\n \"street_type\": StreetTypeEnum.STT_STREET,\n \"street\": u\"Седова\",\n \"house_type\": HouseTypeEnum.HOT_HOUSE,\n \"house\": \"2\",\n \"flat_type\": FlatTypeEnum.FLT_OFFICE,\n \"flat\": \"2\",\n },\n \"phone\": \"+7(812)1234567\"\n })\n sqldb.session.add(company_founder)\n sqldb.session.commit()\n id1 = company_founder.id\n\n company_founder2 = CompanyDbObject(**{\n \"_owner_id\": self.user.id,\n \"ogrn\": \"1234567890123\",\n \"inn\": \"781108730780\",\n \"kpp\": \"999999999\",\n \"general_manager_caption\": u\"генеральный директор\",\n \"incorporation_form\": IncorporationFormEnum.IF_LLC,\n \"full_name\": u\"образовательное учреждение дополнительного образования детей специализированная детско-юношеская спортивная школа олимпийского резерва по боксу\",\n \"short_name\": u\"Бокс\",\n \"address\": {\n \"region\": RFRegionsEnum.RFR_SPB,\n \"index\": 123131,\n \"street_type\": StreetTypeEnum.STT_STREET,\n \"street\": u\"Седова\",\n \"house_type\": HouseTypeEnum.HOT_HOUSE,\n \"house\": \"2\",\n \"flat_type\": FlatTypeEnum.FLT_OFFICE,\n \"flat\": \"2\",\n },\n \"phone\": \"+7(812)1234567\"\n })\n sqldb.session.add(company_founder2)\n sqldb.session.commit()\n id2 = company_founder2.id\n\n sqldb.session.expunge(company_founder)\n make_transient(company_founder)\n company1_copy = company_founder\n company1_copy.id = None\n sqldb.session.add(company1_copy)\n company1_copy._copy = CompanyDbObject.query.filter_by(id=id1).first()\n sqldb.session.commit()\n\n sqldb.session.expunge(company_founder2)\n make_transient(company_founder2)\n company2_copy = company_founder2\n company2_copy.id = None\n sqldb.session.add(company2_copy)\n company2_copy._copy = CompanyDbObject.query.filter_by(id=id2).first()\n sqldb.session.commit()\n\n result = self.test_client.get('/entity/company/')\n self.assertIsNotNone(result)\n self.assertEqual(result.status_code, 200)\n result_data = json.loads(result.data)\n self.assertIn('result', result_data)\n self.assertNotIn('error', result_data)\n self.assertEqual(len(result_data['result']['companies']), 2)\n self.assertEqual(result_data['result']['count'], 2)\n self.assertEqual(result_data['result']['total'], 2)\n person = result_data['result']['companies'][0]\n self.assertIn(person['id'].split('_')[0], (id1, id2))\n person = result_data['result']['companies'][1]\n self.assertIn(person['id'].split('_')[0], (id1, id2))\n\n @authorized()\n def test_get_persons_without_copies(self):\n founder_person = PrivatePersonDbObject(**{\n \"_owner_id\": self.user.id,\n \"name\": u\"Прокл\",\n \"surname\": u\"Поликарпов\",\n \"patronymic\": u\"Поликарпович\",\n \"inn\": \"781108730780\",\n \"birthdate\": datetime.now() - timedelta(days=365 * 30),\n \"birthplace\": u\"Россия, деревня Гадюкино\",\n \"passport\": {\n \"document_type\": PersonDocumentTypeEnum.PD_INTERNAL,\n \"series\": u\"1123\",\n \"number\": u\"192837\",\n \"issue_date\": datetime.now() - timedelta(days=365 * 14),\n \"issue_depart\": u\"УМВД Гадюкинского района Гадюкинской области\",\n \"depart_code\": u\"111987\"\n },\n \"ogrnip\": \"123456789012345\",\n \"address\": {\n \"region\": RFRegionsEnum.RFR_SPB,\n \"index\": 198259,\n \"street_type\": StreetTypeEnum.STT_STREET,\n \"street\": u\"Тамбасова\",\n \"house_type\": HouseTypeEnum.HOT_HOUSE,\n \"house\": \"38\",\n \"flat_type\": FlatTypeEnum.FLT_OFFICE,\n \"flat\": \"70\",\n },\n \"caption\": u\"Сантехник\",\n \"phone\": \"+79210001122\",\n \"email\": \"[email protected]\",\n \"living_country_code\": 3,\n \"living_address\": u\"г. Санкт-Петербург, д. Гадюкино, бульвар Мотоциклистов казарма 4, кв. 705\"\n })\n sqldb.session.add(founder_person)\n sqldb.session.commit()\n founder_person_id = founder_person.id\n\n founder_person2 = PrivatePersonDbObject(**{\n \"_owner_id\": self.user.id,\n \"name\": u\"Прокл\",\n \"surname\": u\"Поликарпов\",\n \"patronymic\": u\"Поликарпович\",\n \"inn\": \"781108730780\",\n \"birthdate\": datetime.now() - timedelta(days=365 * 30),\n \"birthplace\": u\"Россия, деревня Гадюкино\",\n \"passport\": {\n \"document_type\": PersonDocumentTypeEnum.PD_INTERNAL,\n \"series\": u\"1123\",\n \"number\": u\"192837\",\n \"issue_date\": datetime.now() - timedelta(days=365 * 14),\n \"issue_depart\": u\"УМВД Гадюкинского района Гадюкинской области\",\n \"depart_code\": u\"111987\"\n },\n \"ogrnip\": \"123456789012345\",\n \"address\": {\n \"region\": RFRegionsEnum.RFR_SPB,\n \"index\": 198259,\n \"street_type\": StreetTypeEnum.STT_STREET,\n \"street\": u\"Тамбасова\",\n \"house_type\": HouseTypeEnum.HOT_HOUSE,\n \"house\": \"38\",\n \"flat_type\": FlatTypeEnum.FLT_OFFICE,\n \"flat\": \"70\",\n },\n \"caption\": u\"Сантехник\",\n \"phone\": \"+79210001122\",\n \"email\": \"[email protected]\",\n \"living_country_code\": 3,\n \"living_address\": u\"г. Санкт-Петербург, д. Гадюкино, бульвар Мотоциклистов казарма 4, кв. 705\"\n })\n sqldb.session.add(founder_person2)\n sqldb.session.commit()\n\n founder_person2_id = founder_person2.id\n\n new_batch = DocumentBatchDbObject(\n batch_type=DocumentBatchTypeEnum.DBT_TEST_TYPE,\n data={},\n _owner=self.user,\n status=BatchStatusEnum.BS_NEW,\n paid=False,\n deleted=True\n )\n sqldb.session.add(new_batch)\n sqldb.session.commit()\n\n person1_copy = PrivatePersonDbObject.query.filter_by(id=founder_person_id).first()\n sqldb.session.expunge(person1_copy)\n make_transient(person1_copy)\n person1_copy.id = None\n person1_copy._copy = founder_person.id\n person1_copy._batch = new_batch\n sqldb.session.add(person1_copy)\n sqldb.session.commit()\n\n founder_person = PrivatePersonDbObject.query.filter_by(id=founder_person_id).first()\n\n person2_copy = PrivatePersonDbObject.query.filter_by(id=founder_person2_id).first()\n sqldb.session.expunge(person2_copy)\n make_transient(person2_copy)\n person2_copy.id = None\n person2_copy._copy = founder_person2.id\n person2_copy._batch = new_batch\n sqldb.session.add(person2_copy)\n sqldb.session.commit()\n founder_person2 = PrivatePersonDbObject.query.filter_by(id=founder_person2_id).first()\n\n result = self.test_client.get('/entity/person/')\n self.assertIsNotNone(result)\n self.assertEqual(result.status_code, 200)\n result_data = json.loads(result.data)\n self.assertIn('result', result_data)\n self.assertNotIn('error', result_data)\n self.assertEqual(len(result_data['result']['persons']), 2)\n self.assertEqual(result_data['result']['count'], 2)\n self.assertEqual(result_data['result']['total'], 2)\n person = result_data['result']['persons'][0]\n self.assertIn(person['id'].split('_')[0], (unicode(founder_person.id), unicode(founder_person2.id)))\n person = result_data['result']['persons'][1]\n self.assertIn(person['id'].split('_')[0], (unicode(founder_person.id), unicode(founder_person2.id)))\n\n @authorized()\n def test_upload_file(self):\n strIO = StringIO.StringIO()\n content = \"File content\"\n strIO.write(content)\n strIO.seek(0)\n result = self.test_client.post('/storage/put/', data={\n \"file\": (strIO, 'filename.pdf')\n })\n self.assertEqual(result.status_code, 200)\n result_data = json.loads(result.data)\n self.assertIn('id', result_data['result'])\n self.assertIn('size', result_data['result'])\n self.assertIn('url', result_data['result'])\n self.assertEqual(result_data['result']['size'], len(content))\n f_obj = FileObject.query.filter_by(id=result_data['result']['id']).first()\n self.assertIsNotNone(f_obj)\n self.assertEqual(f_obj._owner, self.user)\n url = FileStorage.get_url(f_obj, self.config)\n self.assertTrue(url.endswith('/filename.pdf'), url)\n self.assertIn(unicode(f_obj.id), url)\n path = FileStorage.get_path(f_obj, self.config)\n self.assertTrue(os.path.exists(path), u\"File %s does not exist\" % path)\n with open(path, 'r') as f:\n self.assertEqual(f.read(), content)\n\n def test_get_okvad_catalog(self):\n self.import_okvad_catalog()\n result = self.test_client.get('/get_okvad_catalog/')\n self.assertEqual(result.status_code, 200)\n result_data = json.loads(result.data)\n self.assertEqual(len(result_data['result']), 2)\n\n o1 = result_data['result'][0]\n o2 = result_data['result'][1]\n\n del o1['_id']\n del o1['departments'][0]['_id']\n del o1['departments'][1]['_id']\n del o2['_id']\n del o2['departments'][0]['_id']\n del o2['departments'][1]['_id']\n self.maxDiff = None\n self.assertEqual(o1, {u'name': u'foo', u'departments': [{u'name': u'bar', u'main_okvad': u'1'},\n {u'name': u'baz', u'main_okvad': u'5'}]})\n self.assertEqual(o2, {u'name': u'foo2', u'departments': [{u'name': u'bar2', u'main_okvad': u'11'},\n {u'name': u'baz2', u'main_okvad': u'15'}]})\n\n @authorized()\n def test_send_paid_docs(self):\n data = {\n 'name': u\"Нейм\",\n 'surname': u\"Сёрнейм\",\n 'inn': \"781108730780\",\n 'phone': \"+79110010203\",\n 'email': \"[email protected]\",\n '_owner_id': self.user.id\n }\n person = PrivatePersonDbObject(**data)\n sqldb.session.add(person)\n sqldb.session.commit()\n person_id = person.id\n\n new_company = CompanyDbObject(**dict({\n \"_owner_id\": self.user.id,\n \"ogrn\": \"1234567890123\",\n \"inn\": \"781108730780\",\n \"full_name\": u\"Протон\",\n \"short_name\": u\"Про\",\n \"kpp\": \"999999999\",\n \"company_type\": CompanyTypeEnum.CT_RUSSIAN,\n \"general_manager\": {\n \"_id\" : person.id,\n \"type\": \"person\"\n }})\n )\n\n sqldb.session.add(new_company)\n sqldb.session.commit()\n new_company_id = new_company.id\n\n data = {\n u\"full_name\": u\"образовательное учреждение дополнительного образования детей специализированная \"\n u\"детско-юношеская спортивная школа олимпийского резерва по боксу\",\n u\"short_name\": u\"Парампампам\",\n u\"doc_date\": datetime.now(),\n u\"address\": {\n \"region\": RFRegionsEnum.RFR_SPB,\n \"index\": 123131,\n \"street_type\": StreetTypeEnum.STT_STREET,\n \"street\": u\"Седова\",\n \"house_type\": HouseTypeEnum.HOT_HOUSE,\n \"house\": \"2\",\n \"flat_type\": FlatTypeEnum.FLT_OFFICE,\n \"flat\": \"2\",\n \"ifns\": 1234\n },\n u\"selected_secretary\": {\n \"type\": \"company\",\n \"_id\": new_company_id\n },\n }\n new_user_doc = UserDocument()\n new_user_doc.parse_raw_value(dict(document_type=DocumentTypeEnum.DT_GARANT_LETTER_SUBARENDA, data=data), None,\n False)\n doc_list = [\n new_user_doc.db_value()\n ]\n\n file1_id = FileObject(file_path='path', file_name='f.pdf', _owner=self.user)\n sqldb.session.add(file1_id)\n sqldb.session.commit()\n\n file2_id = FileObject(file_path='path', file_name='f2.pdf', _owner=self.user)\n sqldb.session.add(file2_id)\n sqldb.session.commit()\n\n new_batch_db_object = DocumentBatchDbObject(\n batch_type=DocumentBatchTypeEnum.DBT_NEW_LLC,\n status=BatchStatusEnum.BS_NEW,\n _owner=self.user,\n paid=True,\n data=data,\n # rendered_docs=[{\n # 'document_type': DocumentTypeEnum.DT_ACCOUNTANT_ORDER,\n # 'file_link': 'http://asdfasdfasdfasdfasdf.ru/a.pdf',\n # 'caption': u'Приказ о приёме на работу бухгалтера',\n # 'file_id': file1_id.id,\n # 'document_id': ObjectId(),\n # 'status': UserDocumentStatus.DS_RENDERED\n # }, {\n # 'document_type': DocumentTypeEnum.DT_ARTICLES,\n # 'file_link': 'http://asdfhjklasdhfsf.ru/b.pdf',\n # 'caption': u'Устав',\n # 'file_id': file2_id.id,\n # 'document_id': ObjectId(),\n # 'status': UserDocumentStatus.DS_RENDERED\n # }]\n )\n sqldb.session.add(new_batch_db_object)\n sqldb.session.commit()\n\n BatchManager.send_batch_docs_to_user(new_batch_db_object.id, self.config)\n\n self.assertEqual(len(self.mailer.mails), 1)\n self.assertEqual(self.mailer.mails[0]['message']['subject'], u'Документы для регистрации ООО «Парампампам»')\n\n @authorized()\n def test_create_batch(self):\n data = {\n \"batch_type\": DocumentBatchTypeEnum.DBT_TEST_TYPE\n }\n result = self.test_client.post('/batch/create/', data=data)\n self.assertIsNotNone(result)\n self.assertEqual(result.status_code, 200)\n result_data = json.loads(result.data)\n self.assertIn('result', result_data)\n self.assertNotIn('error', result_data)\n\n self.assertEqual(DocumentBatchDbObject.query.count(), 1)\n batch = DocumentBatchDbObject.query.first()\n self.assertEqual(batch.status, BatchStatusEnum.BS_NEW)\n self.assertEqual(batch._owner, self.user)\n self.assertEqual(batch.batch_type, DocumentBatchTypeEnum.DBT_TEST_TYPE)\n self.assertIsNotNone(batch.creation_date)\n self.assertIsNone(batch.finalisation_date)\n self.assertEqual(batch.deleted, False)\n self.assertEqual(batch.data, {})\n self.assertIsNone(batch.error_info)\n self.assertIsNone(batch.result_fields)\n self.assertIsNone(batch._metadata)\n self.assertIsNone(batch.pay_info)\n self.assertEqual(batch.paid, False)\n self.assertIsNone(batch.last_change_dt)\n\n self.assertEqual(BatchDocumentDbObject.query.count(), 0)\n\n @authorized()\n def test_get_new_batch(self):\n new_batch = DocumentBatchDbObject(\n batch_type=DocumentBatchTypeEnum.DBT_TEST_TYPE,\n data={},\n _owner=self.user,\n status=BatchStatusEnum.BS_NEW,\n paid=False\n )\n sqldb.session.add(new_batch)\n sqldb.session.commit()\n\n result = self.test_client.get('/batch/?batch_id=%s' % new_batch.id)\n self.assertIsNotNone(result)\n self.assertEqual(result.status_code, 200)\n result_data = json.loads(result.data)\n self.assertIn('result', result_data)\n self.assertNotIn('error', result_data)\n\n self.assertIn('total', result_data['result'])\n self.assertIn('count', result_data['result'])\n self.assertIn('batches', result_data['result'])\n\n self.assertEqual(result_data['result']['total'], 1)\n self.assertEqual(result_data['result']['count'], 1)\n\n batches = result_data['result']['batches']\n\n self.assertEqual(len(batches), 1)\n\n self.assertIn('creation_date', batches[0])\n del batches[0]['creation_date']\n self.assertEqual(batches[0], {\n u'batch_type': DocumentBatchTypeEnum.DBT_TEST_TYPE,\n u'data': {},\n u'id': new_batch.id,\n u'paid': u'false',\n u'status': UserDocumentStatus.DS_NEW\n })\n\n @authorized()\n def test_get_batches_2_of_3(self):\n new_batch = DocumentBatchDbObject(\n batch_type=DocumentBatchTypeEnum.DBT_TEST_TYPE,\n data={},\n _owner=self.user,\n status=BatchStatusEnum.BS_NEW,\n paid=False\n )\n sqldb.session.add(new_batch)\n sqldb.session.commit()\n\n new_batch2 = DocumentBatchDbObject(\n batch_type=DocumentBatchTypeEnum.DBT_TEST_TYPE,\n data={},\n _owner=self.user,\n status=BatchStatusEnum.BS_NEW,\n paid=False\n )\n sqldb.session.add(new_batch2)\n sqldb.session.commit()\n\n new_batch3 = DocumentBatchDbObject(\n batch_type=DocumentBatchTypeEnum.DBT_TEST_TYPE,\n data={},\n _owner=self.user,\n status=BatchStatusEnum.BS_NEW,\n paid=False\n )\n sqldb.session.add(new_batch3)\n sqldb.session.commit()\n\n result = self.test_client.get('/batch/?count=2&offset=1')\n self.assertIsNotNone(result)\n self.assertEqual(result.status_code, 200)\n result_data = json.loads(result.data)\n self.assertIn('result', result_data)\n self.assertNotIn('error', result_data)\n\n self.assertIn('total', result_data['result'])\n self.assertIn('count', result_data['result'])\n self.assertIn('batches', result_data['result'])\n\n self.assertEqual(result_data['result']['total'], 3)\n self.assertEqual(result_data['result']['count'], 2)\n\n batches = result_data['result']['batches']\n\n self.assertEqual(len(batches), 2)\n\n self.assertIn('creation_date', batches[0])\n self.assertIn('creation_date', batches[1])\n\n del batches[0]['creation_date']\n del batches[1]['creation_date']\n\n self.assertEqual(batches[0], {\n u'batch_type': DocumentBatchTypeEnum.DBT_TEST_TYPE,\n u'data': {},\n u'id': new_batch2.id,\n u'paid': u'false',\n u'status': UserDocumentStatus.DS_NEW\n })\n\n self.assertEqual(batches[1], {\n u'batch_type': DocumentBatchTypeEnum.DBT_TEST_TYPE,\n u'data': {},\n u'id': new_batch.id,\n u'paid': u'false',\n u'status': UserDocumentStatus.DS_NEW\n })\n\n @authorized()\n def test_get_deleted_batch(self):\n new_batch = DocumentBatchDbObject(\n batch_type=DocumentBatchTypeEnum.DBT_TEST_TYPE,\n data={},\n _owner=self.user,\n status=BatchStatusEnum.BS_NEW,\n paid=False,\n deleted=True\n )\n sqldb.session.add(new_batch)\n sqldb.session.commit()\n\n result = self.test_client.get('/batch/?batch_id=%s' % new_batch.id)\n self.assertIsNotNone(result)\n self.assertEqual(result.status_code, 404)\n\n result = self.test_client.get('/batch/')\n self.assertIsNotNone(result)\n self.assertEqual(result.status_code, 200)\n result_data = json.loads(result.data)\n self.assertIn('result', result_data)\n self.assertNotIn('error', result_data)\n\n self.assertIn('total', result_data['result'])\n self.assertIn('count', result_data['result'])\n self.assertIn('batches', result_data['result'])\n\n self.assertEqual(result_data['result']['total'], 0)\n self.assertEqual(result_data['result']['count'], 0)\n\n batches = result_data['result']['batches']\n\n self.assertEqual(len(batches), 0)\n" }, { "alpha_fraction": 0.5515342354774475, "alphanum_fraction": 0.5546813607215881, "avg_line_length": 32.01298522949219, "blob_id": "f8dd9faa625735d546e14f254d7a81346ad5c057", "content_id": "f710a72a216a73d2bfa1b67ec1c315f86a3a6610", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 2542, "license_type": "no_license", "max_line_length": 151, "num_lines": 77, "path": "/app/services/ifns/utils/process_okvad.py", "repo_name": "StanislavKraev/jb_code", "src_encoding": "UTF-8", "text": "# -*- coding: utf-8 -*-\nimport json\nimport os\nfrom time import time\nfrom bson.objectid import ObjectId\nfrom fw.db.sql_base import db as sqldb\nfrom fw.catalogs.models import OkvadObject\n\n\ndef process_okvad():\n with open(os.path.normpath(os.path.abspath(os.path.join(os.path.dirname(__file__), '../../../data/okvad.json'))), 'r') as f:\n okvad_data = json.loads(f.read())\n\n with open(os.path.normpath(os.path.abspath(os.path.join(os.path.dirname(__file__), '../../../data/okvad_nalog_eshn.txt'))),\n 'r') as f:\n eshn_okvads = filter(lambda x: x, [i.strip() for i in f.read().strip().split('\\n')])\n\n with open(os.path.normpath(os.path.abspath(os.path.join(os.path.dirname(__file__), '../../../data/okvad_nalog_general.txt'))),\n 'r') as f:\n gen_okvads = filter(lambda x: x, [i.strip() for i in f.read().strip().split('\\n')])\n\n print(\"a\")\n OkvadObject.query.delete()\n sqldb.session.commit()\n print(\"b\")\n\n PARENTS = {}\n for s in okvad_data:\n okved = s[\"okved\"]\n caption = s[\"name\"]\n parent = s.get(\"parent_okved\", None)\n if okved in eshn_okvads:\n nalog = 'eshn'\n elif okved in gen_okvads:\n nalog = 'general'\n else:\n nalog = 'usn'\n\n t = time()\n new_ok = OkvadObject(\n id=str(ObjectId()),\n okved=okved,\n caption=caption,\n nalog=nalog,\n parent=parent\n )\n sqldb.session.add(new_ok)\n sqldb.session.commit()\n print(time() - t)\n item_id = new_ok.id\n PARENTS[okved] = item_id\n print(\"c\")\n\n for obj in OkvadObject.query.filter():\n if obj.parent and len(obj.parent) > 15:\n continue\n if obj.parent:\n obj_parent = obj.parent\n if (len(obj_parent) > 1) and (len(obj_parent) < 9) and len(\n filter(lambda c: c == '.' or c.isdigit(), obj_parent)) == len(obj_parent) and obj_parent[0].isdigit() and obj_parent[-1].isdigit():\n obj_parent = obj_parent[:2]\n\n obj.parent = PARENTS[obj_parent]\n sqldb.session.commit()\n print(\"d\")\n\n for okvad in eshn_okvads:\n if not OkvadObject.query.filter_by(okved=okvad).scalar():\n print(u\"Invalid okved in eshn okvads: %s\" % okvad)\n\n for okvad in gen_okvads:\n if not OkvadObject.query.filter_by(okved=okvad).scalar():\n print(u\"Invalid okved in general okvads: %s\" % okvad)\n\n\nif __name__ == \"__main__\":\n process_okvad()\n" }, { "alpha_fraction": 0.45840901136398315, "alphanum_fraction": 0.46047669649124146, "avg_line_length": 53.14372253417969, "blob_id": "92ca1ea33b8d2be070600a875af11cd24ade1d9f", "content_id": "fbe590b59d8cb28c38c3d4dde03059bb28183fdb", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 35943, "license_type": "no_license", "max_line_length": 161, "num_lines": 661, "path": "/app/manage_commands/batch_commands.py", "repo_name": "StanislavKraev/jb_code", "src_encoding": "UTF-8", "text": "# -*- coding: utf-8 -*-\nfrom datetime import timedelta, datetime\nimport json\nfrom bson.objectid import ObjectId\nfrom flask import current_app\nimport requests\nfrom common_utils import get_russian_month_skl\nfrom fw.auth.models import AuthUser\nfrom fw.async_tasks import send_email, rendering\nfrom fw.auth.user_manager import UserManager\nfrom fw.db.sql_base import db as sqldb\nfrom fw.documents.batch_manager import BatchManager\nfrom fw.documents.db_fields import DocumentBatchDbObject, PrivatePersonDbObject, CompanyDbObject, BatchDocumentDbObject\nfrom fw.documents.enums import BatchStatusEnum, DocumentBatchTypeEnum, DocumentTypeEnum\nfrom fw.documents.fields.complex_doc_fields import ObjectRefField\nfrom fw.documents.fields.doc_fields import PrivatePerson, CompanyObject\nfrom fw.monitoring_utils.zabbix_sender import zabbixed\nfrom manage_commands import BaseManageCommand, get_single\nfrom services.ifns import ifns_manager\nfrom services.ifns.data_model.enums import IfnsRegStatusEnum\nfrom services.ip_reg.documents.enums import IPDocumentDeliveryTypeStrEnum\nfrom services.llc_reg.documents.enums import RegistrationWay, IfnsServiceEnum, DocumentDeliveryTypeStrEnum\nfrom services.llc_reg.documents.general_doc_fields import FounderObject\nimport html5lib\nfrom template_filters import utm_args\n\n\nclass CheckBatchIfnsRegStatusCommand(BaseManageCommand):\n NAME = \"check_batch_ifns_reg_status\"\n\n @zabbixed('check_ifns_reg', (1, 0))\n def run(self):\n # self.logger.info(u\"Проверка статуса регистрации компаний\")\n # self.logger.info(u'=' * 50)\n\n is_production = not self.config['STAGING'] and not self.config['DEBUG']\n days_30 = timedelta(days=30)\n # get list of testers\n from services.ifns.data_model.models import IfnsBookingObject\n\n # and exclude their batches\n query = DocumentBatchDbObject.query.filter(\n DocumentBatchDbObject.status == BatchStatusEnum.BS_FINALISED,\n DocumentBatchDbObject.paid == True,\n DocumentBatchDbObject.finalisation_date >= datetime.now() - days_30\n )\n if is_production:\n query = query.join(AuthUser).filter(AuthUser.is_tester==False)\n\n skip_statuses = (IfnsRegStatusEnum.IRS_REGISTERED, IfnsRegStatusEnum.IRS_REGISTRATION_DECLINED)\n for batch in query:\n status = (batch.result_fields or {}).get('ifns_reg_info', {}).get('status', 'unknown')\n if status in skip_statuses:\n continue\n\n batch_id = batch.id\n # self.logger.info(u\"%s\" % unicode(batch_id))\n\n ifns = (batch.result_fields or {}).get('ifns', None)\n\n #self.logger.info(u\"ifns: %s\" % ifns)\n ifns_data = ifns_manager.get_ifns_data(ifns)\n if ifns_data and ifns_data.rou and 'code' in ifns_data.rou:\n ifns = ifns_data.rou['code']\n\n full_name = \"\"\n short_name = \"\"\n if batch.batch_type == DocumentBatchTypeEnum.DBT_NEW_LLC:\n full_name = batch.data.get('full_name', None)\n short_name = batch.data.get('short_name', None)\n elif batch.batch_type == DocumentBatchTypeEnum.DBT_NEW_IP:\n try:\n reg_responsible_person = batch.data.get('person', None)\n person_field = ObjectRefField()\n person_field.parse_raw_value(reg_responsible_person, {}, api_data=False, update=False)\n full_name = person_field.full_name\n short_name = person_field.short_name\n except Exception:\n self.logger.exception(u\"Failed to parse IP person\")\n else:\n self.logger.exception(u\"Unknown batch type for batch %s\" % str(batch['_id']))\n\n doc_rec_type = batch.data.get('obtain_way', None)\n application_form = batch.data.get('registration_way', None)\n if batch.batch_type == DocumentBatchTypeEnum.DBT_NEW_LLC:\n applicant_fio = u\"\"\n elif batch.batch_type == DocumentBatchTypeEnum.DBT_NEW_IP:\n applicant_fio = full_name\n for doc in batch._documents:\n if doc.document_type == DocumentTypeEnum.DT_REGISTRATION_FEE_INVOICE:\n founder_applicant = doc.data.get('founder_applicant', None)\n if founder_applicant:\n try:\n founder_app_field = FounderObject()\n founder_app_field.parse_raw_value(founder_applicant, {}, api_data=False, update=False)\n applicant_fio = founder_app_field.full_name\n except Exception:\n self.logger.exception(u\"Failed to parse founder object\")\n break\n\n if application_form == RegistrationWay.RW_RESPONSIBLE_PERSON:\n reg_responsible_person = batch.data.get('reg_responsible_person', None)\n if reg_responsible_person:\n try:\n reg_responsible_person_field = ObjectRefField()\n reg_responsible_person_field.parse_raw_value(\n reg_responsible_person, {}, api_data=False, update=False)\n applicant_fio = reg_responsible_person_field.full_name\n except Exception:\n self.logger.exception(u\"Failed to parse person\")\n\n ifns_booking = IfnsBookingObject.query.filter_by(\n batch_id=batch_id,\n service_id=IfnsServiceEnum.IS_REG_COMPANY,\n _discarded=False\n ).first()\n if ifns_booking:\n dt = ifns_booking.date\n if dt and is_production and datetime.now() < dt:\n self.logger.info(u\"Too early - skip\")\n continue\n\n # self.logger.info(u\"Checking batch %s\" % unicode(batch_id))\n date_from = batch.finalisation_date - timedelta(days=5)\n date_to = date_from + timedelta(days=30)\n result = current_app.external_tools.get_ifns_registrations(\n full_name,\n date_from=date_from,\n date_to=date_to,\n ifns=ifns,\n service_nalog_ru_url=self.config['SERVICE_NALOG_RU_URL'],\n logger=self.logger) or []\n\n if not result:\n # self.logger.info(u\"No reservations for batch %s\" % unicode(batch_id))\n continue\n else:\n for item in result:\n try:\n self.logger.info(u\"Found reservation info for batch %s (%s)\" % (\n batch_id,\n json.dumps(item, default=lambda x: unicode(x))\n ))\n if 'status' not in item:\n continue\n status = item['status']\n batch_result_fields = batch.result_fields or {}\n\n if status == 'registered' and ('ogrn' in item or 'ogrnip' in item):\n ogrn = item.get('ogrn')\n ogrnip = item.get('ogrnip')\n reg_date = None\n try:\n reg_date = item.get('reg_date', None)\n if reg_date:\n reg_date = datetime.strptime(reg_date, \"%d.%m.%Y\")\n except Exception:\n self.logger.exception(u\"Failed to convert reg_date\")\n reg_info = {\n 'status': 'registered',\n 'reg_date': reg_date,\n }\n if ogrn:\n reg_info['ogrn'] = ogrn\n elif ogrnip:\n reg_info['ogrnip'] = ogrnip\n\n booking = IfnsBookingObject.query.filter_by(\n batch_id=batch_id,\n service_id=IfnsServiceEnum.IS_REG_COMPANY,\n _discarded=False\n ).first()\n if booking:\n booking.reg_info = reg_info\n sqldb.session.commit()\n\n batch_result_fields['ifns_reg_info'] = {\n 'status': 'registered',\n 'reg_date': reg_date,\n 'full_name': full_name\n }\n if ogrn:\n batch_result_fields['ifns_reg_info']['ogrn'] = ogrn\n elif ogrnip:\n batch_result_fields['ifns_reg_info']['ogrnip'] = ogrnip\n\n DocumentBatchDbObject.query.filter_by(id=batch.id).update({\n 'result_fields': batch_result_fields\n })\n sqldb.session.commit()\n recipient = batch._owner.email or u\"\"\n obtain_person_fio = u\"\"\n if not recipient:\n self.logger.warn(\n u\"Failed to send ifns reg notify to user %s - no email address\" % batch._owner_id)\n else:\n docs_recipient_fio = \"\"\n # Batch Type specific logic\n if batch.batch_type == DocumentBatchTypeEnum.DBT_NEW_LLC:\n # TODO: This should be incapsulated\n if doc_rec_type == DocumentDeliveryTypeStrEnum.DDT_ISSUE_TO_THE_APPLICANT:\n doc = BatchDocumentDbObject.query.filter_by(batch=batch,\n document_type=DocumentTypeEnum.DT_P11001).first()\n if doc:\n founders = doc.data['founders']\n for founder in founders:\n if founder.get('documents_recipient_type', '') != '':\n person = founder.get('person', None)\n if person and '_id' in person:\n person_obj = PrivatePersonDbObject.query.filter_by(\n id=person['_id']).scalar()\n if person_obj:\n pp = PrivatePerson.db_obj_to_field(person_obj)\n if pp:\n docs_recipient_fio = pp.full_name\n else:\n company = founder.get('company', None)\n if company:\n company_db_object = CompanyDbObject.query.filter_by(\n id=company['_id']).scalar()\n if company_db_object:\n cc = CompanyObject.db_obj_to_field(company_db_object)\n if cc and cc.general_manager and cc.general_manager.initialized:\n docs_recipient_fio = cc.general_manager.full_name\n elif doc_rec_type == DocumentDeliveryTypeStrEnum.DDT_ISSUE_TO_THE_APPLICANT_OR_AGENT:\n doc = BatchDocumentDbObject.query.filter_by(batch=batch,\n document_type=DocumentTypeEnum.DT_DOVERENNOST_OBTAIN).first()\n\n if doc:\n doc_obtain_person = doc.data.get('doc_obtain_person', None)\n if doc_obtain_person and '_id' in doc_obtain_person:\n person_obj = PrivatePersonDbObject.query.filter_by(\n id=doc_obtain_person['_id']).scalar()\n if person_obj:\n pp = PrivatePerson.db_obj_to_field(person_obj)\n if pp:\n docs_recipient_fio = pp.full_name\n\n doc = BatchDocumentDbObject.query.filter_by(batch=batch,\n document_type=DocumentTypeEnum.DT_P11001).first()\n if doc:\n founders = doc.data['founders']\n for founder in founders:\n if founder.get('documents_recipient_type', '') != '':\n person = founder.get('person', None)\n if person and '_id' in person:\n person_obj = PrivatePersonDbObject.query.filter_by(\n id=person['_id']).scalar()\n if person_obj:\n pp = PrivatePerson.db_obj_to_field(person_obj)\n if pp:\n obtain_person_fio = pp.full_name\n else:\n company = founder.get('company', None)\n if company:\n company_db_object = CompanyDbObject.query.filter_by(\n id=company['_id']).scalar()\n if company_db_object:\n cc = CompanyObject.db_obj_to_field(company_db_object)\n if cc and cc.general_manager and cc.general_manager.initialized:\n obtain_person_fio = cc.general_manager.full_name\n ifns_book_doc_receive_url = \"%s://%s/ooo/?id=%s\" % (self.config['WEB_SCHEMA'], self.config['DOMAIN'], unicode(batch_id))\n ifns_book_doc_receive_url = utm_args(ifns_book_doc_receive_url, \"ifns_llc_reg_success\", batch._owner_id) + u\"#page=obtaining\"\n ifns_book_doc_receive_url = UserManager.make_auth_url(ifns_book_doc_receive_url, batch._owner).get_url(self.config)\n\n send_email.send_email.delay(\n recipient,\n 'ifns_llc_reg_success',\n short_name=short_name,\n doc_rec_by_email=(doc_rec_type == DocumentDeliveryTypeStrEnum.DDT_SEND_BY_MAIL),\n doc_rec_by_responsible=(\n doc_rec_type == DocumentDeliveryTypeStrEnum.DDT_ISSUE_TO_THE_APPLICANT_OR_AGENT),\n ifns_book_doc_receive_url=ifns_book_doc_receive_url,\n schema=self.config['WEB_SCHEMA'],\n domain=self.config['DOMAIN'],\n ogrn=ogrn,\n docs_ready_date=u\"%d %s %s года\" % (\n reg_date.day, get_russian_month_skl(reg_date.month), reg_date.year),\n docs_recipient_fio=docs_recipient_fio,\n obtain_person_fio=obtain_person_fio,\n service_startup=datetime.now() < datetime(2015, 6, 1),\n user_id=str(batch._owner_id)\n )\n elif batch.batch_type == DocumentBatchTypeEnum.DBT_NEW_IP:\n\n if doc_rec_type == IPDocumentDeliveryTypeStrEnum.IP_DDT_RESPONSIBLE_PERSON:\n for doc in BatchDocumentDbObject.query.filter(\n BatchDocumentDbObject.batch == batch,\n BatchDocumentDbObject.document_type.in_((\n DocumentTypeEnum.DT_IP_DOV_FILING_RECEIVING_DOCS,\n DocumentTypeEnum.DT_IP_DOV_RECEIVING_DOCS))):\n person = doc.data.get('ip_responsible_person', None)\n if person and '_id' in person:\n person_obj = PrivatePersonDbObject.query.filter_by(\n id=person['_id']).scalar()\n if person_obj:\n pp = PrivatePerson.db_obj_to_field(person_obj)\n if pp:\n docs_recipient_fio = pp.full_name\n break\n\n ifns_book_doc_receive_url = \"%s://%s/ip/?id=%s\" % (self.config['WEB_SCHEMA'], self.config['DOMAIN'], batch_id)\n ifns_book_doc_receive_url = utm_args(ifns_book_doc_receive_url, 'ifns_ip_reg_success', batch._owner_id) + u\"#page=obtaining\"\n ifns_book_doc_receive_url = UserManager.make_auth_url(ifns_book_doc_receive_url, batch._owner).get_url(self.config)\n\n send_email.send_email.delay(\n recipient,\n 'ifns_ip_reg_success',\n short_name=short_name,\n doc_rec_by_email=(doc_rec_type == IPDocumentDeliveryTypeStrEnum.IP_DDT_MAIL),\n doc_rec_by_responsible=(\n doc_rec_type == IPDocumentDeliveryTypeStrEnum.IP_DDT_RESPONSIBLE_PERSON),\n ifns_book_doc_receive_url=ifns_book_doc_receive_url,\n schema=self.config['WEB_SCHEMA'],\n domain=self.config['DOMAIN'],\n ogrnip=ogrnip,\n docs_ready_date=u\"%d %s %s года\" % (\n reg_date.day, get_russian_month_skl(reg_date.month), reg_date.year),\n docs_recipient_fio=docs_recipient_fio,\n obtain_person_fio=obtain_person_fio,\n service_startup=datetime.now() < datetime(2015, 6, 1),\n user_id=str(batch._owner_id)\n )\n self.notify_admin(True, short_name, batch_id, application_form, doc_rec_type, applicant_fio,\n recipient, ogrn=ogrn, ogrnip=ogrnip, batch_type=batch.batch_type)\n elif status == 'registration_declined':\n reg_date = None\n try:\n reg_date = item.get('reg_date', None)\n if reg_date:\n reg_date = datetime.strptime(reg_date, \"%d.%m.%Y\")\n except Exception:\n self.logger.exception(u\"Failed to convert reg_date\")\n\n IfnsBookingObject.query.filter_by(\n batch_id=batch_id,\n service_id=IfnsServiceEnum.IS_REG_COMPANY,\n _discarded=False).update({\n 'reg_info': {\n 'status': 'registration_declined',\n 'reg_date': reg_date\n }\n })\n sqldb.session.commit()\n batch_result_fields['ifns_reg_info'] = {\n 'status': 'registration_declined',\n 'reg_date': reg_date,\n 'full_name': full_name\n }\n recipient = batch._owner.email or u\"\"\n if not recipient:\n self.logger.warn(\n u\"Failed to send ifns reg notify to user %s - no email address\" % batch._owner_id)\n else:\n if batch.batch_type == DocumentBatchTypeEnum.DBT_NEW_LLC:\n ifns_book_doc_receive_url = \"%s://%s/ooo/?id=%s\" % (self.config['WEB_SCHEMA'], self.config['DOMAIN'], batch_id)\n ifns_book_doc_receive_url = utm_args(ifns_book_doc_receive_url, 'ifns_llc_reg_fail', batch._owner_id) + u\"#page=refusing\"\n ifns_book_doc_receive_url = UserManager.make_auth_url(ifns_book_doc_receive_url, batch._owner).get_url(self.config)\n\n send_email.send_email.delay(\n recipient,\n 'ifns_llc_reg_fail',\n short_name=short_name,\n doc_rec_by_email=(doc_rec_type == DocumentDeliveryTypeStrEnum.DDT_SEND_BY_MAIL),\n ifns_book_doc_receive_url=ifns_book_doc_receive_url,\n schema=self.config['WEB_SCHEMA'],\n domain=self.config['DOMAIN'],\n user_id=batch._owner_id\n )\n elif batch.batch_type == DocumentBatchTypeEnum.DBT_NEW_IP:\n ifns_book_doc_receive_url = \"%s://%s/ip/?id=%s\" % (self.config['WEB_SCHEMA'], self.config['DOMAIN'], batch_id)\n ifns_book_doc_receive_url = utm_args(ifns_book_doc_receive_url, 'ifns_ip_reg_fail', batch._owner_id) + u\"#page=refusing\"\n ifns_book_doc_receive_url = UserManager.make_auth_url(ifns_book_doc_receive_url, batch._owner).get_url(self.config)\n send_email.send_email.delay(\n recipient,\n 'ifns_ip_reg_fail',\n short_name=short_name,\n doc_rec_by_email=(doc_rec_type == IPDocumentDeliveryTypeStrEnum.IP_DDT_MAIL),\n ifns_book_doc_receive_url=ifns_book_doc_receive_url,\n schema=self.config['WEB_SCHEMA'],\n domain=self.config['DOMAIN'],\n user_id=batch._owner_id\n )\n DocumentBatchDbObject.query.filter_by(id=batch.id).update({\n 'result_fields': batch_result_fields\n })\n sqldb.session.commit()\n self.notify_admin(False, short_name, batch_id, application_form, doc_rec_type,\n applicant_fio, recipient, batch_type=batch.batch_type)\n elif status == 'progress':\n IfnsBookingObject.query.filter_by(\n batch_id=batch_id,\n service_id=IfnsServiceEnum.IS_REG_COMPANY,\n _discarded=False\n ).update({'reg_info': {\n 'status': 'progress',\n 'reg_date': item.get('reg_date', None)\n }\n })\n batch_result_fields['ifns_reg_info'] = {\n 'status': 'progress',\n 'reg_date': item.get('reg_date', None),\n 'full_name': full_name\n }\n DocumentBatchDbObject.query.filter_by(id=batch.id).update({\n 'result_fields': batch_result_fields\n })\n sqldb.session.commit()\n else:\n raise Exception()\n\n break\n except Exception:\n self.logger.exception(u\"Failed to handle result\")\n\n def notify_admin(self, is_success_reg, company_short_name, batch_id, application_form, obtain_form, applicant_fio,\n user_email, ogrn=None, ogrnip=None, batch_type=None):\n application_form_rus = RegistrationWay._NAMES.get(application_form, u\"<неизвестно>\")\n obtain_form_rus = DocumentDeliveryTypeStrEnum._NAMES.get(obtain_form, u\"<неизвестно>\")\n recipient_list = self.config['YURIST_EMAIL_LIST']\n for recipient in recipient_list:\n send_email.send_email.delay(\n recipient,\n 'admin_notify_llc_ifns_reg' if batch_type == DocumentBatchTypeEnum.DBT_NEW_LLC else 'admin_notify_ip_ifns_reg',\n short_name=company_short_name,\n success_reg=is_success_reg,\n ogrn=ogrn,\n ogrnip=ogrnip,\n batch_id=batch_id,\n application_form_rus=application_form_rus,\n obtain_form_rus=obtain_form_rus,\n applicant_fio=applicant_fio,\n user_email=user_email\n )\n\n\nclass RebuildBatchCommand(BaseManageCommand):\n NAME = \"rebuild_batch\"\n\n def run(self):\n self.logger.info(u\"Перегенерация пакета документов\")\n self.logger.info(u'=' * 50)\n\n batch_id = get_single(u'batch id: ')\n try:\n ObjectId(batch_id)\n except Exception:\n self.logger.error(u\"Invalid batch id\")\n return False\n\n batch = DocumentBatchDbObject.query.filter_by(id=batch_id).scalar()\n if not batch:\n self.logger.error(u\"Batch not found\")\n return False\n\n if batch.status == BatchStatusEnum.BS_FINALISED:\n\n batch.status = BatchStatusEnum.BS_BEING_FINALISED\n sqldb.session.commit()\n async_result = rendering.render_batch.delay(batch_id)\n\n if not async_result.ready():\n batch.current_task_id = async_result.id\n sqldb.session.commit()\n\n return True\n elif batch.status == BatchStatusEnum.BS_EDITED:\n manager = BatchManager.init(batch)\n manager.finalize_batch(self.config, self.logger, batch)\n return True\n self.logger.error(u\"Invalid current batch status\")\n return False\n\n\nclass BuildDocumentCommand(BaseManageCommand):\n NAME = \"rebuild_document\"\n\n def run(self):\n self.logger.info(u\"Перегенерация документа\")\n self.logger.info(u'=' * 50)\n\n doc_id = get_single(u'document id: ')\n doc = BatchDocumentDbObject.query.filter_by(id=doc_id).scalar()\n if not doc:\n self.logger.error(u\"Failed to find document\")\n return False\n batch = doc.batch\n rendering.render_batch_document.delay(batch.id, doc_id)\n\n self.logger.error(u\"Started document render\")\n return False\n\n\nclass SendBatchDocumentsToEmailCommand(BaseManageCommand):\n NAME = \"email_batch_docs\"\n\n def run(self):\n self.logger.info(u\"Отправка пакета документов на email\")\n self.logger.info(u'=' * 50)\n\n batch_id = get_single(u'batch id: ')\n email = get_single(u'email: ')\n try:\n ObjectId(batch_id)\n except Exception:\n self.logger.error(u\"Invalid batch id\")\n return False\n\n batch = DocumentBatchDbObject.query.filter_by(id=batch_id).scalar()\n if not batch:\n self.logger.error(u\"Batch not found\")\n return False\n\n if batch.status == BatchStatusEnum.BS_FINALISED:\n total_attachments = BatchManager.get_batch_rendered_docs(batch, current_app.config, current_app.logger)\n send_email.send_email.delay(email, 'email_batch_docs', attach=total_attachments)\n return True\n self.logger.error(u\"Invalid current batch status\")\n return True\n\n\nclass GetFssNumberCommand(BaseManageCommand):\n NAME = \"get_fss_number\"\n\n @staticmethod\n def get_fss_number(logger):\n from copy import copy\n days_30 = timedelta(days=30)\n for batch in DocumentBatchDbObject.query.filter(\n DocumentBatchDbObject.batch_type == DocumentBatchTypeEnum.DBT_NEW_LLC,\n DocumentBatchDbObject.status == BatchStatusEnum.BS_FINALISED,\n DocumentBatchDbObject.paid == True,\n DocumentBatchDbObject.deleted == False,\n DocumentBatchDbObject.finalisation_date >= datetime.utcnow() - days_30\n ).join(AuthUser).filter(AuthUser.is_tester == False):\n if batch.result_fields and 'fss_number' in batch.result_fields and batch.result_fields['fss_number']:\n continue\n if batch.data and 'inn' not in batch.data:\n continue\n\n batch_id = batch.id\n inn = batch.data['inn']\n fss_response = requests.get(\"http://fz122.fss.ru/index.php?service=28&inn=%s\" % str(inn))\n if fss_response.status_code != 200:\n logger.warn(\"Failed to get fss data for batch %s with inn %s\" % (batch_id, str(inn)))\n continue\n else:\n # parse the data\n content = fss_response.text\n root = html5lib.parse(content, treebuilder='lxml', namespaceHTMLElements=False)\n tds = root.xpath('//tr[@bgcolor=\"#BFD6C8\"]/td')\n if len(tds) != 6:\n logger.warn(\n \"Failed to locate proper cell in table; batch %s with inn %s\" % (batch_id, inn))\n continue\n else:\n fss_number = tds[3].text\n if not fss_number.isdigit():\n logger.warn(\n \"Fss number is not digit: %s ; batch %s with inn %s\" % (fss_number, batch_id, inn))\n continue\n else:\n fss_number = int(fss_number)\n new_fields = copy(batch.result_fields)\n new_fields[\"fss_number\"] = fss_number\n DocumentBatchDbObject.query.filter_by(id=batch_id).update({'result_fields': new_fields})\n sqldb.session.commit()\n return True\n\n def run(self):\n self.logger.info(u\"Забор номеров ФСС\")\n self.logger.info(u'=' * 50)\n\n return GetFssNumberCommand.get_fss_number(self.logger)\n\n\nclass SetDateCommand(BaseManageCommand):\n NAME = \"set_date\"\n\n def run(self):\n self.logger.info(u\"Set date\")\n batch_id = get_single(u'batch id: ')\n for doc in BatchDocumentDbObject.query.filter_by(batch_id=batch_id, deleted=False):\n if doc.data and 'doc_date' in doc.data:\n d = doc.data\n d['doc_date'] = datetime(2015, 6, 13)\n BatchDocumentDbObject.query.filter_by(id=doc.id).update({'data':d})\n self.logger.info('updated %s' % doc.document_type)\n sqldb.session.commit()\n\n\ndef _check_id(obj_type, obj_id):\n type_cls_map = {\n 'person': PrivatePersonDbObject,\n 'company': CompanyDbObject\n }\n\n obj = type_cls_map[obj_type].query.filter_by(id=obj_id).scalar()\n return obj is not None\n\n\ndef _get_invalid_refs(data):\n result = []\n if isinstance(data, dict):\n if len(data) == 2 and '_id' in data and 'type' in data:\n if not _check_id(data['type'], data['_id']):\n result.append(data)\n for subkey, subval in data.items():\n result.extend(_get_invalid_refs(subval))\n elif isinstance(data, list) or isinstance(data, tuple):\n for i in data:\n result.extend(_get_invalid_refs(i))\n return result\n\n\nclass FindInvalidRefsCommand(BaseManageCommand):\n NAME = \"find_invalid_refs\"\n\n def run(self):\n self.logger.info(u\"Looking for invalid references to persons/companies\")\n invalid_refs = {}\n invalid_refs_set = set()\n for batch in DocumentBatchDbObject.query.filter_by(deleted=False, _broken=False):\n data = batch.data\n docs = batch._documents\n\n invalid_refs[batch.id] = {}\n r = _get_invalid_refs(data or {}) or []\n if r:\n invalid_refs[batch.id][None] = r\n for i in r:\n invalid_refs_set.add(i['_id'])\n for doc in docs:\n r = _get_invalid_refs(doc.data or {}) or []\n if r:\n invalid_refs[batch.id][doc.id] = r\n for i in r:\n invalid_refs_set.add(i['_id'])\n if not invalid_refs[batch.id]:\n del invalid_refs[batch.id]\n\n print(json.dumps(invalid_refs, indent=1))\n print(json.dumps(list(invalid_refs_set), indent=1))\n\n\ndef _replace_ids(data, replace_map):\n if isinstance(data, basestring):\n if data in replace_map:\n return replace_map[data]\n return data\n if isinstance(data, list) or isinstance(data, tuple):\n result = []\n for i in data:\n result.append(_replace_ids(i, replace_map))\n return result\n if isinstance(data, dict):\n result = {}\n for k, v in data.items():\n result[k] = _replace_ids(v, replace_map)\n return result\n return data\n" }, { "alpha_fraction": 0.7313311696052551, "alphanum_fraction": 0.7573052048683167, "avg_line_length": 36.33333206176758, "blob_id": "435d359e3239418340498c413f7f18811541cce7", "content_id": "4907c619552e0648a7c3164a3f4fe0b7f8dde900", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 1232, "license_type": "no_license", "max_line_length": 89, "num_lines": 33, "path": "/app/services/osago/documents/general_doc_fields.py", "repo_name": "StanislavKraev/jb_code", "src_encoding": "UTF-8", "text": "# -*- coding: utf-8 -*-\n\nfrom fw.documents.fields.complex_doc_fields import ObjectRefField\nfrom fw.documents.fields.general_doc_fields import general_doc_field, GeneralField\nfrom fw.documents.fields.simple_doc_fields import DocTextField, DocEnumField, DocIntField\n\n\n@general_doc_field\nclass DriveObject(GeneralField):\n name = DocTextField(max_length=70, required=False)\n surname = DocTextField(max_length=70, required=False)\n patronymic = DocTextField(max_length=70, required=False)\n sex = DocEnumField(enum_cls='GenderEnum')\n\n\n@general_doc_field\nclass CarWithDriver(GeneralField):\n driver = DriveObject()\n car_brand = DocTextField(max_length=100, required=False)\n car_number = DocTextField(max_length=10, required=False)\n\n@general_doc_field\nclass DocLawSuitDocPageCount(GeneralField):\n page = DocEnumField(enum_cls='OsagoDocTypeEnum')\n pagecount = DocIntField(min_val=0, max_val=1000, required=True)\n\n@general_doc_field\nclass CourtLawsuitDocInfo(GeneralField):\n doc_name = DocEnumField(enum_cls='CourtLawsuitDocEnum')\n originals = DocIntField(min_val=0, max_val=1000)\n copies = DocIntField(min_val=0, max_val=1000)\n title = DocTextField()\n pagecount = DocIntField(min_val=0, max_val=1000)\n" }, { "alpha_fraction": 0.5996252298355103, "alphanum_fraction": 0.605871319770813, "avg_line_length": 34.57777786254883, "blob_id": "f4aecdd2a49b704dd9cc4aeca33bfd549d97c1cd", "content_id": "954fe1ed317a6576f74cc1c680f7ca5031a4de29", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 1601, "license_type": "no_license", "max_line_length": 115, "num_lines": 45, "path": "/app/deployment_migrations/migration_list/20151007_pay_for_current_osago.py", "repo_name": "StanislavKraev/jb_code", "src_encoding": "UTF-8", "text": "# -*- coding: utf-8 -*-\n\nfrom fw.db.sql_base import db as sqldb\nfrom fw.documents.db_fields import DocumentBatchDbObject\nfrom fw.documents.enums import DocumentBatchTypeEnum\nfrom services.pay.models import PayInfoObject, PurchaseServiceType\n\n\ndef forward(config, logger):\n logger.debug(u\"Add payments for currently existing osago batches\")\n\n for batch in DocumentBatchDbObject.query.filter_by(_broken=False, deleted=False,\n batch_type=DocumentBatchTypeEnum.DBT_OSAGO, status=\"claim\"):\n new_pay_info = PayInfoObject(\n user=batch._owner,\n batch=batch,\n pay_record_id=0,\n payment_provider=0,\n service_type=PurchaseServiceType.OSAGO_PART1\n )\n sqldb.session.add(new_pay_info)\n\n for batch in DocumentBatchDbObject.query.filter_by(_broken=False, deleted=False,\n batch_type=DocumentBatchTypeEnum.DBT_OSAGO, status=\"court\"):\n new_pay_info = PayInfoObject(\n user=batch._owner,\n batch=batch,\n pay_record_id=0,\n payment_provider=0,\n service_type=PurchaseServiceType.OSAGO_PART1\n )\n sqldb.session.add(new_pay_info)\n\n new_pay_info = PayInfoObject(\n user=batch._owner,\n batch=batch,\n pay_record_id=0,\n payment_provider=0,\n service_type=PurchaseServiceType.OSAGO_PART2\n )\n sqldb.session.add(new_pay_info)\n sqldb.session.commit()\n\ndef rollback(config, logger):\n pass\n" }, { "alpha_fraction": 0.6975945234298706, "alphanum_fraction": 0.7010309100151062, "avg_line_length": 25.454545974731445, "blob_id": "fd09dbfd60c8a78e8a901e93be175b5290412ea2", "content_id": "4adf485361fc1fef733cbd7d44f13cd85dbba6c5", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 291, "license_type": "no_license", "max_line_length": 58, "num_lines": 11, "path": "/app/fw/plugins/__init__.py", "repo_name": "StanislavKraev/jb_code", "src_encoding": "UTF-8", "text": "# -*- coding: utf-8 -*-\n\n_plugins_library = dict()\n\ndef get_plugin(plugin_id):\n return _plugins_library[plugin_id]\n\ndef register(plugin_id, plugin_module, class_loader=None):\n assert class_loader\n plugin_module.register(class_loader)\n _plugins_library[plugin_id] = plugin_module\n" }, { "alpha_fraction": 0.5147748589515686, "alphanum_fraction": 0.5263778567314148, "avg_line_length": 33.10679626464844, "blob_id": "57e317cbde145e4e171d4bdafe776ca6946ba071", "content_id": "1fe05bd129ac64629c7bb56b365495cdcd1f4e13", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 76643, "license_type": "no_license", "max_line_length": 139, "num_lines": 2163, "path": "/jb_tests/test_pack/test_docs.py", "repo_name": "StanislavKraev/jb_code", "src_encoding": "UTF-8", "text": "# -*- coding: utf-8 -*-\n\nfrom datetime import datetime, timedelta\nimport urllib\n\nfrom flask import json, current_app\n\nfrom fw.db.sql_base import db as sqldb\nfrom fw.documents.address_enums import (RFRegionsEnum, StreetTypeEnum, HouseTypeEnum)\nfrom fw.documents.db_fields import (DocumentBatchDbObject, CompanyDbObject,\n PrivatePersonDbObject, BatchDocumentDbObject)\nfrom fw.documents.enums import DocumentBatchTypeEnum, BatchStatusEnum, PersonDocumentTypeEnum, UserDocumentStatus\nfrom fw.documents.enums import DocumentTypeEnum\nfrom fw.documents.fields.doc_fields import UserDocument\nfrom fw.documents.fields.simple_doc_fields import DocTextField\nfrom fw.storage.file_storage import FileStorage\nfrom fw.storage.models import FileObject\nfrom services.llc_reg.documents.enums import CompanyStarterCapitalTypeEnum\nfrom services.llc_reg.documents.enums import FounderTypeEnum\nfrom services.test_svc import TestSvcManager\nfrom test_api import authorized\nfrom test_pack.base_batch_test import BaseBatchTestCase\n\n\nclass DocsTestCase(BaseBatchTestCase):\n def setUp(self):\n super(DocsTestCase, self).setUp()\n self.maxDiff = None\n\n @authorized()\n def test_delete_batch_document(self):\n with self.app.app_context():\n batch = DocumentBatchDbObject(\n batch_type=DocumentBatchTypeEnum.DBT_NEW_LLC,\n data={\n \"a\": 1\n },\n _owner=self.user\n )\n sqldb.session.add(batch)\n sqldb.session.commit()\n\n batch2 = DocumentBatchDbObject(\n batch_type=DocumentBatchTypeEnum.DBT_NEW_LLC,\n data={\n \"a\": 2\n },\n _owner=self.user\n )\n sqldb.session.add(batch2)\n sqldb.session.commit()\n batch2_id = batch2.id\n\n batch3 = DocumentBatchDbObject(\n batch_type=DocumentBatchTypeEnum.DBT_NEW_LLC,\n data={\n \"a\": 3\n },\n _owner=self.user\n )\n sqldb.session.add(batch3)\n sqldb.session.commit()\n\n result = self.test_client.post('/batch/delete/', data={'batch_id': batch2.id})\n self.assertEqual(result.status_code, 200)\n\n data = json.loads(result.data)\n self.assertEqual(data['result'], True)\n\n self.assertEqual(DocumentBatchDbObject.query.filter_by(deleted=False).count(), 2)\n self.assertIsNotNone(DocumentBatchDbObject.query.filter_by(id=batch.id).first())\n self.assertIsNone(DocumentBatchDbObject.query.filter_by(id=batch2_id, deleted=False).first())\n self.assertIsNotNone(DocumentBatchDbObject.query.filter_by(id=batch3.id).first())\n\n @authorized()\n def test_delete_batch_document(self):\n with self.app.app_context():\n batch = DocumentBatchDbObject(\n batch_type=DocumentBatchTypeEnum.DBT_NEW_IP,\n data={\n \"a\": 1\n },\n _owner=self.user\n )\n sqldb.session.add(batch)\n sqldb.session.commit()\n\n result = self.test_client.get('/batch/')\n self.assertEqual(result.status_code, 200)\n\n data = json.loads(result.data)\n a = 1\n\n @authorized()\n def test_make_docs_from_data(self):\n batch_manager = TestSvcManager()\n\n short_name = DocTextField()\n short_name.parse_raw_value(u\"Наименование\")\n current_batch_data_fields = {\n 'short_name': short_name\n }\n\n new_short_name = DocTextField()\n new_short_name.parse_raw_value(u\"Наименование 2\")\n new_batch_fields = {\n 'short_name': new_short_name\n }\n\n current_batch_db_model = DocumentBatchDbObject(\n _owner=self.user,\n batch_type=DocumentBatchTypeEnum.DBT_TEST_TYPE,\n data={},\n )\n\n new_field_set, docs_list, changed_field_names = batch_manager.make_docs_for_new_data(\n current_batch_data_fields,\n new_batch_fields,\n current_batch_db_model\n )\n\n self.assertEqual(changed_field_names, {\"short_name\"})\n\n self.assertIn('short_name', new_field_set)\n self.assertIsInstance(new_field_set['short_name'], DocTextField)\n self.assertEqual(new_field_set['short_name'], u\"Наименование 2\")\n\n self.assertEqual(len(docs_list), 1)\n doc = docs_list[0]\n self.assertIsInstance(doc, UserDocument)\n fields = doc.data.value\n self.assertEqual(len(fields), 1)\n self.assertEqual(fields['short_name'], u\"Наименование 2\")\n\n def test_make_docs_from_unchanged(self):\n batch_manager = TestSvcManager()\n\n short_name = DocTextField()\n short_name.parse_raw_value(u\"Наименование\")\n current_batch_data_fields = {\n 'short_name': short_name\n }\n\n new_short_name = DocTextField()\n new_short_name.parse_raw_value(u\"Наименование\")\n new_batch_fields = {\n 'short_name': new_short_name\n }\n\n current_batch_db_model = DocumentBatchDbObject(\n _owner=self.user,\n batch_type=DocumentBatchTypeEnum.DBT_TEST_TYPE,\n data={},\n )\n\n new_field_set, docs_list, changed_field_names = batch_manager.make_docs_for_new_data(\n current_batch_data_fields,\n new_batch_fields,\n current_batch_db_model\n )\n\n self.assertEqual(changed_field_names, set())\n\n self.assertIn('short_name', new_field_set)\n self.assertIsInstance(new_field_set['short_name'], DocTextField)\n self.assertEqual(new_field_set['short_name'], u\"Наименование\")\n\n self.assertEqual(len(docs_list), 1)\n doc = docs_list[0]\n self.assertIsInstance(doc, UserDocument)\n fields = doc.data.value\n self.assertEqual(len(fields), 1)\n self.assertEqual(fields['short_name'], u\"Наименование\")\n\n @authorized()\n def test_update_batch(self):\n new_batch = DocumentBatchDbObject(\n batch_type=DocumentBatchTypeEnum.DBT_TEST_TYPE,\n data={},\n _owner=self.user,\n status=BatchStatusEnum.BS_NEW,\n paid=False\n )\n sqldb.session.add(new_batch)\n sqldb.session.commit()\n\n data = {\n \"short_name\": u\"Новое наименование\"\n }\n\n result = self.test_client.post('/batch/update/', data={\n 'batch_id': new_batch.id,\n 'batch': json.dumps({\n \"data\": data,\n \"metadata\": {\n u\"some\": \"data\"\n },\n })\n })\n self.assertEqual(result.status_code, 200)\n\n self.assertEqual(len(new_batch._documents), 1)\n doc = new_batch._documents[0]\n self.assertEqual(doc.data, {\n \"short_name\": u\"Новое наименование\"\n })\n self.assertEqual(doc.status, UserDocumentStatus.DS_NEW)\n\n @authorized()\n def test_add_docs_on_update(self):\n new_batch = DocumentBatchDbObject(\n batch_type=DocumentBatchTypeEnum.DBT_TEST_TYPE,\n data={\n \"short_name\": u\"Старое наименование\"\n },\n _owner=self.user,\n status=BatchStatusEnum.BS_NEW,\n paid=False\n )\n sqldb.session.add(new_batch)\n\n first_doc = BatchDocumentDbObject(\n batch=new_batch,\n document_type=DocumentTypeEnum.DT_TEST_DOC_1,\n data={\n \"short_name\": u\"Старое наименование\"\n }\n )\n sqldb.session.add(first_doc)\n sqldb.session.commit()\n\n data = {\n \"short_name\": u\"создай второй документ\"\n }\n\n result = self.test_client.post('/batch/update/', data={\n 'batch_id': new_batch.id,\n 'batch': json.dumps({\n \"data\": data\n })\n })\n self.assertEqual(result.status_code, 200)\n\n self.assertEqual(len(new_batch._documents), 2)\n doc = new_batch._documents[0]\n self.assertEqual(doc.data, {\n \"short_name\": u\"создай второй документ\"\n })\n self.assertEqual(doc.status, UserDocumentStatus.DS_NEW)\n\n doc = new_batch._documents[1]\n self.assertEqual(doc.data, {\n \"short_name\": u\"создай второй документ\"\n })\n self.assertEqual(doc.status, UserDocumentStatus.DS_NEW)\n\n @authorized()\n def test_remove_docs_on_update(self):\n new_batch = DocumentBatchDbObject(\n batch_type=DocumentBatchTypeEnum.DBT_TEST_TYPE,\n data={\n \"short_name\": u\"создай второй документ\"\n },\n _owner=self.user,\n status=BatchStatusEnum.BS_NEW,\n paid=False\n )\n sqldb.session.add(new_batch)\n\n first_doc = BatchDocumentDbObject(\n batch=new_batch,\n document_type=DocumentTypeEnum.DT_TEST_DOC_1,\n data={\n \"short_name\": u\"создай второй документ\"\n }\n )\n sqldb.session.add(first_doc)\n\n second_doc = BatchDocumentDbObject(\n batch=new_batch,\n document_type=DocumentTypeEnum.DT_TEST_DOC_2,\n data={\n \"short_name\": u\"создай второй документ\"\n }\n )\n sqldb.session.add(second_doc)\n sqldb.session.commit()\n\n data = {\n \"short_name\": u\"не создавай второй документ\"\n }\n\n result = self.test_client.post('/batch/update/', data={\n 'batch_id': new_batch.id,\n 'batch': json.dumps({\n \"data\": data\n })\n })\n self.assertEqual(result.status_code, 200)\n\n self.assertEqual(len(new_batch._documents), 1)\n doc = new_batch._documents[0]\n self.assertEqual(doc.data, {\n \"short_name\": u\"не создавай второй документ\"\n })\n self.assertEqual(doc.status, UserDocumentStatus.DS_NEW)\n\n @authorized()\n def test_update_docs_on_update(self):\n new_batch = DocumentBatchDbObject(\n batch_type=DocumentBatchTypeEnum.DBT_TEST_TYPE,\n data={\n \"short_name\": u\"Старое наименование\"\n },\n _owner=self.user,\n status=BatchStatusEnum.BS_NEW,\n paid=False\n )\n sqldb.session.add(new_batch)\n\n first_doc = BatchDocumentDbObject(\n batch=new_batch,\n document_type=DocumentTypeEnum.DT_TEST_DOC_1,\n data={\n \"short_name\": u\"Старое наименование\"\n }\n )\n sqldb.session.add(first_doc)\n sqldb.session.commit()\n obj_id = first_doc.id\n\n data = {\n \"short_name\": u\"Новое наименование\"\n }\n\n result = self.test_client.post('/batch/update/', data={\n 'batch_id': new_batch.id,\n 'batch': json.dumps({\n \"data\": data\n })\n })\n self.assertEqual(result.status_code, 200)\n\n self.assertEqual(len(new_batch._documents), 1)\n doc = new_batch._documents[0]\n self.assertEqual(doc.data, {\n \"short_name\": u\"Новое наименование\"\n })\n self.assertEqual(doc.status, UserDocumentStatus.DS_NEW)\n self.assertEqual(doc.id, obj_id)\n\n @authorized()\n def test_update_edited_batch(self):\n new_batch = DocumentBatchDbObject(\n batch_type=DocumentBatchTypeEnum.DBT_TEST_TYPE,\n data={\n \"short_name\": u\"Старое наименование\"\n },\n _owner=self.user,\n status=BatchStatusEnum.BS_EDITED,\n paid=False\n )\n sqldb.session.add(new_batch)\n\n first_doc = BatchDocumentDbObject(\n batch=new_batch,\n document_type=DocumentTypeEnum.DT_TEST_DOC_1,\n data={\n \"short_name\": u\"Старое наименование\"\n }\n )\n sqldb.session.add(first_doc)\n sqldb.session.commit()\n\n data = {\n \"short_name\": u\"Новое наименование\"\n }\n\n result = self.test_client.post('/batch/update/', data={\n 'batch_id': new_batch.id,\n 'batch': json.dumps({\n \"data\": data\n })\n })\n self.assertEqual(result.status_code, 200)\n\n self.assertEqual(len(new_batch._documents), 1)\n doc = new_batch._documents[0]\n self.assertEqual(doc.data, {\n \"short_name\": u\"Новое наименование\"\n })\n self.assertEqual(doc.status, UserDocumentStatus.DS_NEW)\n\n @authorized()\n def test_validation_errors_during_update_edited_batch(self):\n new_batch = DocumentBatchDbObject(\n batch_type=DocumentBatchTypeEnum.DBT_TEST_TYPE,\n data={\n \"short_name\": u\"Старое наименование\"\n },\n _owner=self.user,\n status=BatchStatusEnum.BS_EDITED,\n paid=False\n )\n sqldb.session.add(new_batch)\n\n first_doc = BatchDocumentDbObject(\n batch=new_batch,\n document_type=DocumentTypeEnum.DT_TEST_DOC_1,\n data={\n \"short_name\": u\"Старое наименование\"\n }\n )\n sqldb.session.add(first_doc)\n sqldb.session.commit()\n\n data = {\n \"short_name\": u\"Новое наименование, но очень очень длинное\"\n }\n\n result = self.test_client.post('/batch/update/', data={\n 'batch_id': new_batch.id,\n 'batch': json.dumps({\n \"data\": data\n })\n })\n self.assertEqual(result.status_code, 200)\n\n self.assertEqual(new_batch.error_info, {\n u'error_ext': [{\n u'error_code': 5,\n u'field': u'short_name'\n }]\n })\n self.assertEqual(len(new_batch._documents), 1)\n doc = new_batch._documents[0]\n self.assertEqual(doc.data, {\n \"short_name\": u\"Новое наименование, но очень очень длинное\"\n })\n self.assertEqual(doc.status, UserDocumentStatus.DS_NEW)\n\n @authorized()\n def test_finalize(self):\n new_batch = DocumentBatchDbObject(\n batch_type=DocumentBatchTypeEnum.DBT_TEST_TYPE,\n data={\n \"short_name\": u\"Старое наименование\"\n },\n _owner=self.user,\n status=BatchStatusEnum.BS_EDITED,\n paid=False\n )\n sqldb.session.add(new_batch)\n\n first_doc = BatchDocumentDbObject(\n batch=new_batch,\n document_type=DocumentTypeEnum.DT_TEST_DOC_1,\n _owner=self.user,\n data={\n \"short_name\": u\"Старое наименование\"\n }\n )\n sqldb.session.add(first_doc)\n sqldb.session.commit()\n\n result = self.test_client.post('/batch/finalise/', data={\n 'batch_id': new_batch.id,\n })\n self.assertEqual(result.status_code, 200)\n data = json.loads(result.data)['result']\n self.assertTrue(data)\n\n @authorized()\n def test_render_deferred_docs(self):\n new_batch = DocumentBatchDbObject(\n batch_type=DocumentBatchTypeEnum.DBT_TEST_TYPE,\n data={\n \"short_name\": u\"создай второй документ\"\n },\n _owner=self.user,\n status=BatchStatusEnum.BS_NEW,\n paid=False\n )\n sqldb.session.add(new_batch)\n\n first_doc = BatchDocumentDbObject(\n batch=new_batch,\n document_type=DocumentTypeEnum.DT_TEST_DOC_1,\n _owner=self.user,\n data={\n \"short_name\": u\"создай второй документ\"\n }\n )\n sqldb.session.add(first_doc)\n\n second_doc = BatchDocumentDbObject(\n batch=new_batch,\n document_type=DocumentTypeEnum.DT_TEST_DOC_2,\n _owner=self.user,\n data={\n \"short_name\": u\"создай второй документ\"\n }\n )\n sqldb.session.add(second_doc)\n sqldb.session.commit()\n\n result = self.test_client.post('/batch/finalise/', data={\n 'batch_id': new_batch.id,\n })\n self.assertEqual(result.status_code, 200)\n data = json.loads(result.data)['result']\n self.assertTrue(data)\n\n @authorized()\n def test_render_with_finalized_entities(self):\n # todo: 1. create entities\n person = PrivatePersonDbObject(\n _owner=self.user,\n name=u\"СтароеИмя\",\n surname=u\"СтараяФамилия\",\n birthdate=datetime.now() - timedelta(days=365 * 30),\n sex='male',\n birthplace=u\"Неизвестно где\"\n )\n sqldb.session.add(person)\n\n company_person = PrivatePersonDbObject(\n _owner=self.user,\n name=u\"СтароеИмя2\",\n surname=u\"СтараяФамилия2\",\n birthdate=datetime.now() - timedelta(days=365 * 30),\n sex='female',\n birthplace=u\"Неизвестно где2\"\n )\n sqldb.session.add(company_person)\n sqldb.session.commit()\n\n company = CompanyDbObject(\n _owner=self.user,\n inn=u\"6514008400\",\n ogrn=u\"1086507000029\",\n kpp=u\"651401001\",\n full_name=u\"Полное наименование\",\n short_name=u\"Краткое наименование\",\n address={\n \"region\": RFRegionsEnum.RFR_SPB,\n \"index\": 198209,\n \"street_type\": StreetTypeEnum.STT_BOULEVARD,\n \"street\": u\"Мотоциклистов\",\n \"house_type\": HouseTypeEnum.HOT_HOUSE,\n \"house\": \"4\"\n },\n general_manager={\n '_id': company_person.id,\n 'type': 'person'\n }\n )\n sqldb.session.add(company)\n sqldb.session.commit()\n\n pp1_id = person.id\n pp2_id = company_person.id\n co_id = company.id\n\n # todo: 2. finalize\n\n new_batch = DocumentBatchDbObject(\n batch_type=DocumentBatchTypeEnum.DBT_TEST_TYPE,\n data={\n \"short_name\": u\"Старое наименование\"\n },\n _owner=self.user,\n status=BatchStatusEnum.BS_NEW,\n paid=False\n )\n sqldb.session.add(new_batch)\n\n first_doc = BatchDocumentDbObject(\n batch=new_batch,\n document_type=DocumentTypeEnum.DT_TEST_DOC_1,\n data={\n \"short_name\": u\"Старое наименование\"\n }\n )\n sqldb.session.add(first_doc)\n sqldb.session.commit()\n\n data = {\n \"short_name\": u\"Новое наименование\",\n \"general_manager\": \"%s_person\" % person.id,\n \"some_db_object\": \"%s_company\" % company.id\n }\n\n result = self.test_client.post('/batch/update/', data={\n 'batch_id': new_batch.id,\n 'batch': json.dumps({\n \"data\": data\n })\n })\n self.assertEqual(result.status_code, 200)\n\n self.assertEqual(len(new_batch._documents), 2)\n doc = new_batch._documents[0]\n self.assertEqual(doc.data, {\n \"short_name\": u\"Новое наименование\"\n })\n self.assertEqual(doc.status, UserDocumentStatus.DS_NEW)\n\n doc = new_batch._documents[1]\n self.assertEqual(doc.data, {\n u'general_manager': {\n u'_id': person.id,\n u'type': u'person'\n },\n u'some_db_object': {\n u'_id': company.id,\n u'type': u'company'\n }\n })\n self.assertEqual(doc.status, UserDocumentStatus.DS_NEW)\n\n result = self.test_client.post('/batch/finalise/', data={\n 'batch_id': new_batch.id,\n })\n self.assertEqual(result.status_code, 200)\n data = json.loads(result.data)['result']\n self.assertTrue(data)\n\n # todo: 3. change entities\n\n person.name = u\"НовоеИмя\"\n company.full_name = u\"НовоеНаименование\"\n company_person.name = u\"НовоеИмя2\"\n\n sqldb.session.commit()\n\n # todo: 4. get batch\n result = self.test_client.get('/batch/?batch_id=%s' % new_batch.id)\n self.assertEqual(result.status_code, 200)\n data = json.loads(result.data)['result']\n\n # todo: 5. check that batch data contain original entities\n pp_copy = PrivatePersonDbObject.query.filter_by(_copy_id=pp1_id).first()\n self.assertIsNone(pp_copy)\n\n pp2_copy = PrivatePersonDbObject.query.filter_by(_copy_id=pp2_id).first()\n self.assertIsNone(pp2_copy)\n\n cc_copy = CompanyDbObject.query.filter_by(_copy_id=co_id).first()\n self.assertIsNone(cc_copy)\n\n\n @authorized()\n def test_definalize(self):\n person = PrivatePersonDbObject(\n _owner=self.user,\n name=u\"СтароеИмя\",\n surname=u\"СтараяФамилия\",\n birthdate=datetime.now() - timedelta(days=365 * 30),\n sex='male',\n birthplace=u\"Неизвестно где\"\n )\n sqldb.session.add(person)\n\n company_person = PrivatePersonDbObject(\n _owner=self.user,\n name=u\"СтароеИмя2\",\n surname=u\"СтараяФамилия2\",\n birthdate=datetime.now() - timedelta(days=365 * 30),\n sex='female',\n birthplace=u\"Неизвестно где2\"\n )\n sqldb.session.add(company_person)\n sqldb.session.commit()\n\n company = CompanyDbObject(\n _owner=self.user,\n inn=u\"6514008400\",\n ogrn=u\"1086507000029\",\n kpp=u\"651401001\",\n full_name=u\"Полное наименование\",\n short_name=u\"Краткое наименование\",\n address={\n \"region\": RFRegionsEnum.RFR_SPB,\n \"index\": 198209,\n \"street_type\": StreetTypeEnum.STT_BOULEVARD,\n \"street\": u\"Мотоциклистов\",\n \"house_type\": HouseTypeEnum.HOT_HOUSE,\n \"house\": \"4\"\n },\n general_manager= {\n '_id': company_person.id,\n 'type': 'person'\n }\n )\n sqldb.session.add(company)\n sqldb.session.commit()\n\n pp1_id = person.id\n co_id = company.id\n\n new_batch = DocumentBatchDbObject(\n batch_type=DocumentBatchTypeEnum.DBT_TEST_TYPE,\n data={\n \"short_name\": u\"Старое наименование\"\n },\n _owner=self.user,\n status=BatchStatusEnum.BS_EDITED,\n paid=False\n )\n sqldb.session.add(new_batch)\n sqldb.session.commit()\n\n data = {\n \"short_name\": u\"Новое наименование\",\n \"general_manager\": \"%s_person\" % person.id,\n \"some_db_object\": \"%s_company\" % company.id\n }\n\n result = self.test_client.post('/batch/update/', data={\n 'batch_id': new_batch.id,\n 'batch': json.dumps({\n \"data\": data\n })\n })\n self.assertEqual(result.status_code, 200)\n\n result = self.test_client.post('/batch/finalise/', data={\n 'batch_id': new_batch.id,\n })\n self.assertEqual(result.status_code, 200)\n data = json.loads(result.data)['result']\n self.assertTrue(data)\n\n self.assertEqual(CompanyDbObject.query.count(), 1)\n self.assertEqual(PrivatePersonDbObject.query.count(), 2)\n\n result = self.test_client.post('/batch/unfinalise/', data={\n 'batch_id': new_batch.id,\n })\n self.assertEqual(result.status_code, 200)\n data = json.loads(result.data)['result']\n self.assertTrue(data)\n\n self.assertEqual(CompanyDbObject.query.count(), 1)\n self.assertEqual(PrivatePersonDbObject.query.count(), 2)\n\n result = self.test_client.get('/batch/?batch_id=%s' % new_batch.id)\n self.assertEqual(result.status_code, 200)\n data = json.loads(result.data)['result']\n\n for key in ('metadata', 'name', 'all_docs'):\n del data['batches'][0][key]\n\n self.assertEqual(data, {\n u'batches': [{\n u'batch_type': DocumentBatchTypeEnum.DBT_TEST_TYPE,\n u'creation_date': new_batch.creation_date.strftime(\"%Y-%m-%dT%H:%M:%S\"),\n u'data': {\n u'general_manager': u'%s_person' % pp1_id,\n u'short_name': u'Новое наименование',\n u'some_db_object': u'%s_company' % co_id\n },\n u'finalisation_date': new_batch.finalisation_date.strftime(\"%Y-%m-%dT%H:%M:%S\"),\n u'id': new_batch.id,\n u'paid': u'false',\n u'result_fields': {\n u'name': u'Новое наименование'\n },\n u'status': u'edited'\n }],\n u'count': 1,\n u'total': 1\n })\n\n @authorized()\n def test_delete_old_files_on_rerender(self):\n new_batch = DocumentBatchDbObject(\n batch_type=DocumentBatchTypeEnum.DBT_TEST_TYPE,\n data={\n \"short_name\": u\"Старое наименование\"\n },\n _owner=self.user,\n status=BatchStatusEnum.BS_EDITED,\n paid=False\n )\n sqldb.session.add(new_batch)\n\n first_doc = BatchDocumentDbObject(\n batch=new_batch,\n document_type=DocumentTypeEnum.DT_TEST_DOC_1,\n _owner=self.user,\n data={\n \"short_name\": u\"Старое наименование\"\n }\n )\n sqldb.session.add(first_doc)\n sqldb.session.commit()\n\n result = self.test_client.post('/batch/finalise/', data={\n 'batch_id': new_batch.id,\n })\n self.assertEqual(result.status_code, 200)\n data = json.loads(result.data)['result']\n self.assertTrue(data)\n\n self.assertEqual(FileObject.query.count(), 1)\n self.assertEqual(BatchDocumentDbObject.query.count(), 1)\n\n result = self.test_client.post('/batch/unfinalise/', data={\n 'batch_id': new_batch.id,\n })\n self.assertEqual(result.status_code, 200)\n data = json.loads(result.data)['result']\n self.assertTrue(data)\n\n self.assertEqual(FileObject.query.count(), 1)\n self.assertEqual(BatchDocumentDbObject.query.count(), 1)\n\n result = self.test_client.post('/batch/finalise/', data={\n 'batch_id': new_batch.id,\n })\n self.assertEqual(result.status_code, 200)\n data = json.loads(result.data)['result']\n self.assertTrue(data)\n\n self.assertEqual(FileObject.query.count(), 1)\n self.assertEqual(BatchDocumentDbObject.query.count(), 1)\n\n @authorized()\n def test_get_batch_data_according_to_api(self):\n new_batch = DocumentBatchDbObject(\n batch_type=DocumentBatchTypeEnum.DBT_TEST_TYPE,\n data={\n \"short_name\": u\"Старое наименование\"\n },\n _owner=self.user,\n status=BatchStatusEnum.BS_EDITED,\n paid=False\n )\n sqldb.session.add(new_batch)\n\n first_doc = BatchDocumentDbObject(\n batch=new_batch,\n document_type=DocumentTypeEnum.DT_TEST_DOC_1,\n _owner=self.user,\n data={\n \"short_name\": u\"Старое наименование\"\n }\n )\n sqldb.session.add(first_doc)\n sqldb.session.commit()\n\n result = self.test_client.post('/batch/finalise/', data={\n 'batch_id': new_batch.id,\n })\n self.assertEqual(result.status_code, 200)\n data = json.loads(result.data)['result']\n self.assertTrue(data)\n\n self.assertEqual(FileObject.query.count(), 1)\n self.assertEqual(BatchDocumentDbObject.query.count(), 1)\n\n result = self.test_client.get('/batch/')\n result = self.test_client.get('/batch/?batch_id=%s' % new_batch.id)\n self.assertEqual(result.status_code, 200)\n data = json.loads(result.data)['result']\n\n file_obj = FileObject.query.first()\n file_url = u\"http://service.zz/storage/%s\" % urllib.quote((unicode(file_obj.id) + '/' + u'Тестовый документ 1.pdf').encode('utf8'))\n self.assertEqual(data, {\n u'batches': [{\n u'id': new_batch.id,\n u'creation_date': new_batch.creation_date.strftime(\"%Y-%m-%dT%H:%M:%S\"),\n u'finalisation_date': new_batch.finalisation_date.strftime(\"%Y-%m-%dT%H:%M:%S\"),\n u'batch_type': DocumentBatchTypeEnum.DBT_TEST_TYPE,\n u'status': u'finalised',\n u'paid': u'false',\n u'all_docs': [{\n u\"file_link\": file_url,\n u\"document_type\": DocumentTypeEnum.DT_TEST_DOC_1,\n u\"document_id\": first_doc.id,\n u\"caption\": u'Тестовый документ 1'\n }],\n u'data': {\n u'short_name': u'Старое наименование',\n },\n u'name': u'Тестовый батч',\n u'metadata': {},\n u'result_fields': {}\n }],\n u'count': 1,\n u'total': 1\n })\n\n @authorized()\n def test_update_metadata(self):\n new_batch = DocumentBatchDbObject(\n batch_type=DocumentBatchTypeEnum.DBT_TEST_TYPE,\n data={},\n _owner=self.user,\n status=BatchStatusEnum.BS_NEW,\n paid=False\n )\n sqldb.session.add(new_batch)\n sqldb.session.commit()\n\n data = {\n \"short_name\": u\"Новое наименование\"\n }\n\n result = self.test_client.post('/batch/update/', data={\n 'batch_id': new_batch.id,\n 'batch': json.dumps({\n \"data\": data,\n \"metadata\": {\n \"some\": \"data\"\n }\n })\n })\n self.assertEqual(result.status_code, 200)\n self.assertEqual(new_batch._metadata, {\n \"some\": \"data\"\n })\n\n @authorized()\n def test_update_only_metadata(self):\n new_batch = DocumentBatchDbObject(\n batch_type=DocumentBatchTypeEnum.DBT_TEST_TYPE,\n data={},\n _owner=self.user,\n status=BatchStatusEnum.BS_NEW,\n paid=False\n )\n sqldb.session.add(new_batch)\n sqldb.session.commit()\n\n data = {\n \"short_name\": u\"Новое наименование\"\n }\n\n result = self.test_client.post('/batch/update_metadata/', data={\n 'batch_id': new_batch.id,\n 'batch': json.dumps({\n \"data\": data,\n \"metadata\": {\n \"some\": \"data\"\n }\n })\n })\n self.assertEqual(result.status_code, 200)\n self.assertEqual(new_batch._metadata, {\n \"some\": \"data\"\n })\n\n @authorized()\n def test_definalize_forced(self):\n person = PrivatePersonDbObject(\n _owner=self.user,\n name=u\"СтароеИмя\",\n surname=u\"СтараяФамилия\",\n birthdate=datetime.now() - timedelta(days=365 * 30),\n sex='male',\n birthplace=u\"Неизвестно где\"\n )\n sqldb.session.add(person)\n\n company_person = PrivatePersonDbObject(\n _owner=self.user,\n name=u\"СтароеИмя2\",\n surname=u\"СтараяФамилия2\",\n birthdate=datetime.now() - timedelta(days=365 * 30),\n sex='male',\n birthplace=u\"Неизвестно где2\"\n )\n sqldb.session.add(company_person)\n sqldb.session.commit()\n\n company = CompanyDbObject(\n _owner=self.user,\n inn=u\"6514008400\",\n ogrn=u\"1086507000029\",\n kpp=u\"651401001\",\n full_name=u\"Полное наименование\",\n short_name=u\"Краткое наименование\",\n address={\n \"region\": RFRegionsEnum.RFR_SPB,\n \"index\": 198209,\n \"street_type\": StreetTypeEnum.STT_BOULEVARD,\n \"street\": u\"Мотоциклистов\",\n \"house_type\": HouseTypeEnum.HOT_HOUSE,\n \"house\": \"4\"\n },\n general_manager= {\n '_id': company_person.id,\n 'type': 'person'\n }\n )\n sqldb.session.add(company)\n sqldb.session.commit()\n\n pp1_id = person.id\n co_id = company.id\n\n new_batch = DocumentBatchDbObject(\n batch_type=DocumentBatchTypeEnum.DBT_TEST_TYPE,\n data={\n \"short_name\": u\"Старое наименование\"\n },\n _owner=self.user,\n status=BatchStatusEnum.BS_EDITED,\n paid=False\n )\n sqldb.session.add(new_batch)\n sqldb.session.commit()\n\n data = {\n \"short_name\": u\"Новое наименование\",\n \"general_manager\": \"%s_person\" % person.id,\n \"some_db_object\": \"%s_company\" % company.id\n }\n\n result = self.test_client.post('/batch/update/', data={\n 'batch_id': new_batch.id,\n 'batch': json.dumps({\n \"data\": data\n })\n })\n self.assertEqual(result.status_code, 200)\n\n result = self.test_client.post('/batch/finalise/', data={\n 'batch_id': new_batch.id,\n })\n self.assertEqual(result.status_code, 200)\n data = json.loads(result.data)['result']\n self.assertTrue(data)\n\n self.assertEqual(CompanyDbObject.query.count(), 1)\n self.assertEqual(PrivatePersonDbObject.query.count(), 2)\n\n PrivatePersonDbObject.query.filter_by(_copy=None).update({\n 'name': u\"НовоеИмя\"\n })\n sqldb.session.commit()\n\n result = self.test_client.get('/batch/?batch_id=%s' % new_batch.id)\n self.assertEqual(result.status_code, 200)\n data = json.loads(result.data)['result']\n\n for key in ('metadata', 'name', 'all_docs'):\n del data['batches'][0][key]\n\n self.assertEqual(data, {\n u'batches': [{\n u'batch_type': DocumentBatchTypeEnum.DBT_TEST_TYPE,\n u'creation_date': new_batch.creation_date.strftime(\"%Y-%m-%dT%H:%M:%S\"),\n u'data': {\n u'general_manager': u'%s_person' % person.id,\n u'short_name': u'Новое наименование',\n u'some_db_object': u'%s_company' % company.id\n },\n u'finalisation_date': new_batch.finalisation_date.strftime(\"%Y-%m-%dT%H:%M:%S\"),\n u'id': new_batch.id,\n u'paid': u'false',\n u'result_fields': {\n u'name': u'Новое наименование'\n },\n u'status': u'finalised'\n }],\n u'count': 1,\n u'total': 1\n })\n\n result = self.test_client.post('/batch/unfinalise/', data={\n 'batch_id': new_batch.id,\n })\n self.assertEqual(result.status_code, 200)\n data = json.loads(result.data)\n self.assertIn('result', data)\n self.assertNotIn('error', data)\n\n @authorized()\n def test_validation_errors_on_update(self):\n person = PrivatePersonDbObject(\n _owner=self.user,\n name=u\"СтароеИмя\",\n surname=u\"СтараяФамилия\",\n birthdate=datetime.now() - timedelta(days=365 * 30),\n sex='male',\n birthplace=u\"Неизвестно где\"\n )\n sqldb.session.add(person)\n\n company_person = PrivatePersonDbObject(\n _owner=self.user,\n name=u\"СтароеИмя2\",\n surname=u\"СтараяФамилия2\",\n birthdate=datetime.now() - timedelta(days=365 * 30),\n sex='female',\n birthplace=u\"Неизвестно где2\"\n )\n sqldb.session.add(company_person)\n sqldb.session.commit()\n\n company = CompanyDbObject(\n _owner=self.user,\n inn=u\"6514008400\",\n ogrn=u\"1086507000029\",\n kpp=u\"651401001\",\n full_name=u\"Полное наименование\",\n short_name=u\"Краткое наименование\",\n address={\n \"region\": RFRegionsEnum.RFR_SPB,\n \"index\": 198209,\n \"street_type\": StreetTypeEnum.STT_BOULEVARD,\n \"street\": u\"Мотоциклистов\",\n \"house_type\": HouseTypeEnum.HOT_HOUSE,\n \"house\": \"4\"\n },\n general_manager= {\n '_id': company_person.id,\n 'type': 'person'\n }\n )\n sqldb.session.add(company)\n sqldb.session.commit()\n\n new_batch = DocumentBatchDbObject(\n batch_type=DocumentBatchTypeEnum.DBT_TEST_TYPE,\n data={\n \"short_name\": u\"Старое наименование\"\n },\n _owner=self.user,\n status=BatchStatusEnum.BS_EDITED,\n paid=False\n )\n sqldb.session.add(new_batch)\n\n first_doc = BatchDocumentDbObject(\n batch=new_batch,\n document_type=DocumentTypeEnum.DT_TEST_DOC_1,\n data={\n \"short_name\": u\"Старое наименование\"\n }\n )\n sqldb.session.add(first_doc)\n sqldb.session.commit()\n\n person.name = u\"\"\n person.address = {\n \"region\": u\"непонятно какой\",\n \"street\": u\"Мотоциклистов\",\n \"house\": \"4\"\n }\n\n company.full_name = u\"\"\n company.inn = 123\n\n sqldb.session.commit()\n\n data = {\n \"short_name\": u\"Новое наименование, но очень очень очень очень очень очень длинное\",\n \"general_manager\": \"%s_person\" % person.id,\n \"some_db_object\": \"%s_company\" % company.id\n }\n\n result = self.test_client.post('/batch/update/', data={\n 'batch_id': new_batch.id,\n 'batch': json.dumps({\n \"data\": data\n })\n })\n self.assertEqual(result.status_code, 200)\n data = json.loads(result.data)\n self.assertIn('result', data)\n self.assertNotIn('error', data)\n for key in ('batch_type', 'creation_date', 'id', 'metadata', 'name', 'paid', 'status', 'all_docs'):\n del data['result'][key]\n self.assertEqual(data['result'], {\n u'data': {\n u'general_manager': u'%s_person' % person.id,\n u'short_name': u'Новое наименование, но очень очень очень очень очень очень длинное',\n u'some_db_object': u'%s_company' % company.id\n },\n u'error_info': {\n u'error_ext': [{\n u'error_code': 5,\n u'field': u'short_name'\n }, {\n u'error_code': 5,\n u'field': u'general_manager.name'\n }, {\n u'error_code': 4,\n u'field': u'general_manager.address.index'\n }, {\n u'error_code': 4,\n u'field': u'general_manager.address.house_type'\n }, {\n u'error_code': 4,\n u'field': u'general_manager.address.region'\n }, {\n u'error_code': 5,\n u'field': u'some_db_object.inn'\n }]\n },\n u'result_fields': {\n u'name': u'Новое наименование, но очень очень очень очень очень очень длинное'\n }\n })\n\n @authorized()\n def test_validation_errors_on_finalise(self):\n person = PrivatePersonDbObject(\n _owner=self.user,\n name=u\"\",\n surname=u\"\",\n birthdate=datetime.now() - timedelta(days=365 * 30),\n sex='male',\n birthplace=u\"Неизвестно где\"\n )\n sqldb.session.add(person)\n\n company_person = PrivatePersonDbObject(\n _owner=self.user,\n name=u\"СтароеИмя2\",\n surname=u\"СтараяФамилия2\",\n birthdate=datetime.now() - timedelta(days=365 * 30),\n sex='invalid'\n )\n sqldb.session.add(company_person)\n sqldb.session.commit()\n\n company = CompanyDbObject(\n _owner=self.user,\n inn=u\"6514008400\",\n ogrn=u\"1086507000029\",\n kpp=u\"651401001\",\n full_name=u\"Полное наименование\",\n short_name=u\"Краткое наименование\",\n address={\n \"index\": 198209,\n \"street_type\": StreetTypeEnum.STT_BOULEVARD,\n \"house_type\": HouseTypeEnum.HOT_HOUSE,\n \"house\": \"4\"\n },\n general_manager= {\n '_id': company_person.id,\n 'type': 'person'\n }\n )\n sqldb.session.add(company)\n sqldb.session.commit()\n\n new_batch = DocumentBatchDbObject(\n batch_type=DocumentBatchTypeEnum.DBT_TEST_TYPE,\n data={\n \"short_name\": u\"Старое наименование и уже очень очень очень очень очень длинное\",\n \"general_manager\": {\n \"type\": \"person\",\n \"_id\": person.id\n },\n \"some_db_object\": {\n \"type\": \"company\",\n \"_id\": company.id\n }\n },\n _owner=self.user,\n status=BatchStatusEnum.BS_EDITED,\n paid=False\n )\n sqldb.session.add(new_batch)\n\n first_doc = BatchDocumentDbObject(\n batch=new_batch,\n document_type=DocumentTypeEnum.DT_TEST_DOC_1,\n data={\n \"short_name\": u\"Старое наименование и уже очень очень очень очень очень длинное\"\n }\n )\n sqldb.session.add(first_doc)\n\n second_doc = BatchDocumentDbObject(\n batch=new_batch,\n document_type=DocumentTypeEnum.DT_TEST_DOC_3,\n data={\n \"general_manager\": {\n \"type\": \"person\",\n \"_id\": person.id\n },\n \"some_db_object\": {\n \"type\": \"company\",\n \"_id\": company.id\n }\n }\n )\n sqldb.session.add(second_doc)\n sqldb.session.commit()\n\n result = self.test_client.post('/batch/finalise/', data={\n 'batch_id': new_batch.id\n })\n self.assertEqual(result.status_code, 200)\n data = json.loads(result.data)\n self.assertIn('result', data)\n self.assertNotIn('error', data)\n self.assertFalse(data['result'])\n\n result = self.test_client.get('/batch/?batch_id=%s' % new_batch.id)\n self.assertEqual(result.status_code, 200)\n data = json.loads(result.data)['result']\n for key in ('batch_type', 'creation_date', 'id', 'metadata', 'name', 'paid', 'status', 'all_docs'):\n del data['batches'][0][key]\n self.assertEqual(data['batches'][0], {\n u'data': {\n u'general_manager': u'%s_person' % person.id,\n u'short_name': u'Старое наименование и уже очень очень очень очень очень длинное',\n u'some_db_object': u'%s_company' % company.id\n },\n u'error_info': {\n u'error_ext': [{\n u'error_code': 5,\n u'field': u'short_name'\n }, {\n u'error_code': 5,\n u'field': u'general_manager.surname'\n }, {\n u'error_code': 5,\n u'field': u'general_manager.name'\n }, {\n u'error_code': 4,\n u'field': u'some_db_object.general_manager.birthplace'\n }, {\n u'error_code': 4,\n u'field': u'some_db_object.general_manager.sex'\n }, {\n u'error_code': 4,\n u'field': u'some_db_object.address.region'\n }]\n },\n u'result_fields': {}\n })\n\n @authorized()\n def test_do_not_update_service_fields(self):\n new_batch = DocumentBatchDbObject(\n batch_type=DocumentBatchTypeEnum.DBT_TEST_TYPE,\n data={},\n _owner=self.user,\n status=BatchStatusEnum.BS_NEW,\n paid=False\n )\n sqldb.session.add(new_batch)\n sqldb.session.commit()\n\n original_creation_date = new_batch.creation_date\n original_finalization_date = new_batch.finalisation_date\n\n data = {\n \"short_name\": u\"Новое наименование\",\n \"creation_date\": datetime.utcnow(),\n \"finalisation_date\": datetime.utcnow(),\n \"paid\": True,\n \"error_info\": {},\n \"deleted\": True,\n \"batch_type\": \"invalid\"\n }\n\n result = self.test_client.post('/batch/update/', data={\n 'batch_id': new_batch.id,\n 'batch': json.dumps({\n \"data\": data\n })\n })\n self.assertEqual(result.status_code, 200)\n\n self.assertEqual(len(new_batch._documents), 1)\n doc = new_batch._documents[0]\n self.assertEqual(doc.data, {\n \"short_name\": u\"Новое наименование\"\n })\n self.assertEqual(doc.status, UserDocumentStatus.DS_NEW)\n\n new_batch = DocumentBatchDbObject.query.first()\n self.assertEqual(new_batch.creation_date, original_creation_date)\n self.assertEqual(new_batch.finalisation_date, original_finalization_date)\n self.assertEqual(new_batch.paid, False)\n self.assertEqual(new_batch.error_info, None)\n self.assertEqual(new_batch.deleted, False)\n self.assertEqual(new_batch.batch_type, DocumentBatchTypeEnum.DBT_TEST_TYPE)\n\n @authorized()\n def test_finalize_wrong_batch_status(self):\n new_batch = DocumentBatchDbObject(\n batch_type=DocumentBatchTypeEnum.DBT_TEST_TYPE,\n data={},\n _owner=self.user,\n status=BatchStatusEnum.BS_FINALISED,\n paid=False\n )\n sqldb.session.add(new_batch)\n sqldb.session.commit()\n\n result = self.test_client.post('/batch/finalise/', data={\n 'batch_id': new_batch.id\n })\n self.assertEqual(result.status_code, 200)\n data = json.loads(result.data)\n self.assertIn('result', data)\n self.assertTrue('result', data)\n self.assertNotIn('error', data)\n\n new_batch_finalising = DocumentBatchDbObject(\n batch_type=DocumentBatchTypeEnum.DBT_TEST_TYPE,\n data={},\n _owner=self.user,\n status=BatchStatusEnum.BS_BEING_FINALISED,\n paid=False\n )\n sqldb.session.add(new_batch_finalising)\n sqldb.session.commit()\n\n result = self.test_client.post('/batch/finalise/', data={\n 'batch_id': new_batch_finalising.id\n })\n self.assertEqual(result.status_code, 200)\n data = json.loads(result.data)\n self.assertNotIn('result', data)\n self.assertEqual(data['error'], {\n u'message': u'Пакет документов не может быть финализирован',\n u'code': 203\n })\n\n new_batch_defin = DocumentBatchDbObject(\n batch_type=DocumentBatchTypeEnum.DBT_TEST_TYPE,\n data={},\n _owner=self.user,\n status=BatchStatusEnum.BS_DEFINALIZING,\n paid=False\n )\n sqldb.session.add(new_batch_defin)\n sqldb.session.commit()\n\n result = self.test_client.post('/batch/finalise/', data={\n 'batch_id': new_batch_defin.id\n })\n self.assertEqual(result.status_code, 200)\n data = json.loads(result.data)\n self.assertNotIn('result', data)\n self.assertEqual(data['error'], {\n u'message': u'Пакет документов не может быть финализирован',\n u'code': 203\n })\n\n @authorized()\n def test_render_single_document(self):\n person = PrivatePersonDbObject(\n _owner=self.user,\n name=u\"СтароеИмя\",\n surname=u\"СтараяФамилия\",\n birthdate=datetime.now() - timedelta(days=365 * 30),\n sex='male',\n birthplace=u\"Неизвестно где\"\n )\n sqldb.session.add(person)\n\n company_person = PrivatePersonDbObject(\n _owner=self.user,\n name=u\"СтароеИмя2\",\n surname=u\"СтараяФамилия2\",\n birthdate=datetime.now() - timedelta(days=365 * 30),\n sex='female',\n birthplace=u\"Неизвестно где2\"\n )\n sqldb.session.add(company_person)\n\n company = CompanyDbObject(\n _owner=self.user,\n inn=u\"6514008400\",\n ogrn=u\"1086507000029\",\n kpp=u\"651401001\",\n full_name=u\"Полное наименование\",\n short_name=u\"Краткое наименование\",\n address={\n \"region\": RFRegionsEnum.RFR_SPB,\n \"index\": 198209,\n \"street_type\": StreetTypeEnum.STT_BOULEVARD,\n \"street\": u\"Мотоциклистов\",\n \"house_type\": HouseTypeEnum.HOT_HOUSE,\n \"house\": \"4\"\n },\n general_manager= {\n '_id': company_person.id,\n 'type': 'person'\n }\n )\n sqldb.session.add(company)\n sqldb.session.commit()\n\n new_batch = DocumentBatchDbObject(\n batch_type=DocumentBatchTypeEnum.DBT_TEST_TYPE,\n data={\n \"short_name\": u\"Старое наименование\",\n \"general_manager\": {\n \"type\": \"person\",\n \"_id\": person.id\n },\n \"some_db_object\": {\n \"type\": \"company\",\n \"_id\": company.id\n }\n },\n _owner=self.user,\n status=BatchStatusEnum.BS_NEW,\n paid=False\n )\n sqldb.session.add(new_batch)\n\n first_doc = BatchDocumentDbObject(\n batch=new_batch,\n document_type=DocumentTypeEnum.DT_TEST_DOC_1,\n data={\n \"short_name\": u\"Старое наименование\"\n },\n _owner = self.user\n )\n sqldb.session.add(first_doc)\n sqldb.session.commit()\n\n result = self.test_client.post('/batch/finalise/', data={\n 'batch_id': new_batch.id\n })\n self.assertEqual(result.status_code, 200)\n data = json.loads(result.data)\n self.assertTrue(data['result'])\n\n result = self.test_client.post('/batch/render_document/', data={\n 'batch_id': new_batch.id,\n 'document_type': json.dumps([DocumentTypeEnum.DT_TEST_DOC_3])\n })\n self.assertEqual(result.status_code, 200)\n\n @authorized()\n def test_render_single_document_fail_on_make_document(self):\n person = PrivatePersonDbObject(\n _owner=self.user,\n name=u\"СтароеИмя\",\n surname=u\"СтараяФамилия\",\n birthdate=datetime.now() - timedelta(days=365 * 30),\n sex='male',\n birthplace=u\"Неизвестно где\",\n living_country_code=0\n )\n sqldb.session.add(person)\n\n company_person = PrivatePersonDbObject(\n _owner=self.user,\n name=u\"СтароеИмя2\",\n surname=u\"СтараяФамилия2\",\n birthdate=datetime.now() - timedelta(days=365 * 30),\n sex='female',\n birthplace=u\"Неизвестно где2\"\n )\n sqldb.session.add(company_person)\n\n company = CompanyDbObject(\n _owner=self.user,\n inn=u\"6514008400\",\n ogrn=u\"1086507000029\",\n kpp=u\"651401001\",\n full_name=u\"Полное наименование\",\n short_name=u\"Краткое наименование\",\n address={\n \"region\": RFRegionsEnum.RFR_SPB,\n \"index\": 198209,\n \"street_type\": StreetTypeEnum.STT_BOULEVARD,\n \"street\": u\"Мотоциклистов\",\n \"house_type\": HouseTypeEnum.HOT_HOUSE,\n \"house\": \"4\"\n },\n general_manager= {\n '_id': company_person.id,\n 'type': 'person'\n }\n )\n sqldb.session.add(company)\n sqldb.session.commit()\n\n new_batch = DocumentBatchDbObject(\n batch_type=DocumentBatchTypeEnum.DBT_TEST_TYPE,\n data={\n \"short_name\": u\"Старое наименование\",\n \"general_manager\": {\n \"type\": \"person\",\n \"_id\": person.id\n },\n \"some_db_object\": {\n \"type\": \"company\",\n \"_id\": company.id\n }\n },\n _owner=self.user,\n status=BatchStatusEnum.BS_NEW,\n paid=False\n )\n sqldb.session.add(new_batch)\n\n first_doc = BatchDocumentDbObject(\n batch=new_batch,\n document_type=DocumentTypeEnum.DT_TEST_DOC_1,\n _owner=self.user,\n data={\n \"short_name\": u\"Старое наименование\"\n }\n )\n sqldb.session.add(first_doc)\n sqldb.session.commit()\n\n result = self.test_client.post('/batch/finalise/', data={\n 'batch_id': new_batch.id\n })\n self.assertEqual(result.status_code, 200)\n data = json.loads(result.data)\n self.assertTrue(data['result'])\n\n result = self.test_client.post('/batch/render_document/', data={\n 'batch_id': new_batch.id,\n 'document_type': json.dumps([DocumentTypeEnum.DT_TEST_DOC_3])\n })\n self.assertEqual(result.status_code, 200)\n self.assertEqual(BatchDocumentDbObject.query.count(), 2)\n self.assertEqual(BatchDocumentDbObject.query.filter_by(batch=new_batch).count(), 2)\n new_doc = BatchDocumentDbObject.query.filter(BatchDocumentDbObject.id != first_doc.id).one()\n self.assertEqual(new_doc.status, UserDocumentStatus.DS_RENDERING_FAILED)\n self.assertEqual(new_doc._celery_task_id, None)\n self.assertEqual(new_doc._celery_task_started, None)\n\n @authorized()\n def test_render_single_document_that_can_not_be_created(self):\n new_batch = DocumentBatchDbObject(\n batch_type=DocumentBatchTypeEnum.DBT_TEST_TYPE,\n data={\n \"short_name\": u\"Старое наименование\"\n },\n _owner=self.user,\n status=BatchStatusEnum.BS_NEW,\n paid=False\n )\n sqldb.session.add(new_batch)\n\n first_doc = BatchDocumentDbObject(\n batch=new_batch,\n document_type=DocumentTypeEnum.DT_TEST_DOC_1,\n _owner=self.user,\n data={\n \"short_name\": u\"Старое наименование\"\n }\n )\n sqldb.session.add(first_doc)\n sqldb.session.commit()\n\n result = self.test_client.post('/batch/finalise/', data={\n 'batch_id': new_batch.id\n })\n self.assertEqual(result.status_code, 200)\n data = json.loads(result.data)\n self.assertTrue(data['result'])\n\n result = self.test_client.post('/batch/render_document/', data={\n 'batch_id': new_batch.id,\n 'document_type': json.dumps([DocumentTypeEnum.DT_TEST_DOC_3])\n })\n self.assertEqual(result.status_code, 200)\n self.assertEqual(BatchDocumentDbObject.query.count(), 2)\n self.assertEqual(BatchDocumentDbObject.query.filter_by(batch=new_batch).count(), 2)\n new_doc = BatchDocumentDbObject.query.filter(BatchDocumentDbObject.id != first_doc.id).one()\n self.assertEqual(new_doc.status, UserDocumentStatus.DS_RENDERING_FAILED)\n self.assertEqual(new_doc._celery_task_id, None)\n self.assertEqual(new_doc._celery_task_started, None)\n\n @authorized()\n def test_render_single_document_fail_on_validate(self):\n person = PrivatePersonDbObject(\n _owner=self.user,\n name=u\"СтароеИмя\",\n surname=u\"СтараяФамилия\",\n birthdate=datetime.now() - timedelta(days=365 * 30),\n sex='female',\n birthplace=u\"Неизвестно где\"\n )\n sqldb.session.add(person)\n sqldb.session.commit()\n\n new_batch = DocumentBatchDbObject(\n batch_type=DocumentBatchTypeEnum.DBT_TEST_TYPE,\n data={\n \"short_name\": u\"Старое наименование\",\n \"general_manager\": {\n \"type\": \"person\",\n \"_id\": person.id\n }\n },\n _owner=self.user,\n status=BatchStatusEnum.BS_NEW,\n paid=False\n )\n sqldb.session.add(new_batch)\n\n first_doc = BatchDocumentDbObject(\n batch=new_batch,\n document_type=DocumentTypeEnum.DT_TEST_DOC_1,\n _owner=self.user,\n data={\n \"short_name\": u\"Старое наименование\"\n }\n )\n sqldb.session.add(first_doc)\n sqldb.session.commit()\n\n result = self.test_client.post('/batch/finalise/', data={\n 'batch_id': new_batch.id\n })\n self.assertEqual(result.status_code, 200)\n data = json.loads(result.data)\n self.assertTrue(data['result'])\n\n result = self.test_client.post('/batch/render_document/', data={\n 'batch_id': new_batch.id,\n 'document_type': json.dumps([DocumentTypeEnum.DT_TEST_DOC_3])\n })\n self.assertEqual(result.status_code, 200)\n new_batch = DocumentBatchDbObject.query.filter_by(id=new_batch.id).one()\n self.assertEqual(BatchDocumentDbObject.query.count(), 2)\n self.assertEqual(BatchDocumentDbObject.query.filter_by(batch=new_batch).count(), 2)\n new_doc = BatchDocumentDbObject.query.filter(BatchDocumentDbObject.id != first_doc.id).one()\n self.assertEqual(new_doc.status, UserDocumentStatus.DS_RENDERING_FAILED)\n self.assertEqual(new_doc._celery_task_id, None)\n self.assertEqual(new_doc._celery_task_started, None)\n self.assertEqual(new_batch.error_info, {\n u'error_ext': [{\n u'error_code': 5,\n u'field': u'test_doc_validation'\n }]\n })\n\n @authorized()\n def test_render_single_document_merge_error_info(self):\n person = PrivatePersonDbObject(\n _owner=self.user,\n name=u\"СтароеИмя\",\n surname=u\"СтараяФамилия\",\n birthdate=datetime.now() - timedelta(days=365 * 30),\n sex='female',\n birthplace=u\"Неизвестно где\"\n )\n sqldb.session.add(person)\n sqldb.session.commit()\n\n new_batch = DocumentBatchDbObject(\n batch_type=DocumentBatchTypeEnum.DBT_TEST_TYPE,\n data={\n \"short_name\": u\"Старое наименование\",\n \"general_manager\": {\n \"type\": \"person\",\n \"_id\": person.id\n }\n },\n _owner=self.user,\n status=BatchStatusEnum.BS_NEW,\n paid=False\n )\n sqldb.session.add(new_batch)\n\n first_doc = BatchDocumentDbObject(\n batch=new_batch,\n document_type=DocumentTypeEnum.DT_TEST_DOC_1,\n _owner=self.user,\n data={\n \"short_name\": u\"Старое наименование\"\n }\n )\n sqldb.session.add(first_doc)\n sqldb.session.commit()\n\n result = self.test_client.post('/batch/finalise/', data={\n 'batch_id': new_batch.id\n })\n\n new_batch.error_info = {\n 'error_ext': [{\n 'field': 'short_name',\n 'error_code': 5\n }]\n }\n sqldb.session.commit()\n\n self.assertEqual(result.status_code, 200)\n data = json.loads(result.data)\n self.assertTrue(data['result'])\n\n result = self.test_client.post('/batch/render_document/', data={\n 'batch_id': new_batch.id,\n 'document_type': json.dumps([DocumentTypeEnum.DT_TEST_DOC_3])\n })\n self.assertEqual(result.status_code, 200)\n new_batch = DocumentBatchDbObject.query.filter_by(id=new_batch.id).one()\n self.assertEqual(BatchDocumentDbObject.query.count(), 2)\n self.assertEqual(BatchDocumentDbObject.query.filter_by(batch=new_batch).count(), 2)\n new_doc = BatchDocumentDbObject.query.filter(BatchDocumentDbObject.id != first_doc.id).one()\n self.assertEqual(new_batch.error_info, {\n u'error_ext': [{\n u'error_code': 5,\n u'field': u'short_name'\n }, {\n u'error_code': 5,\n u'field': u'test_doc_validation'\n }]\n })\n self.assertEqual(new_doc.status, UserDocumentStatus.DS_RENDERING_FAILED)\n self.assertEqual(new_doc._celery_task_id, None)\n self.assertEqual(new_doc._celery_task_started, None)\n\n @authorized()\n def test_get_document_being_rendered(self):\n person = PrivatePersonDbObject(\n _owner=self.user,\n name=u\"СтароеИмя\",\n surname=u\"СтараяФамилия\",\n birthdate=datetime.now() - timedelta(days=365 * 30),\n sex='male',\n birthplace=u\"Неизвестно где\"\n )\n sqldb.session.add(person)\n sqldb.session.commit()\n\n new_batch = DocumentBatchDbObject(\n batch_type=DocumentBatchTypeEnum.DBT_TEST_TYPE,\n data={\n \"short_name\": u\"Старое наименование\",\n \"general_manager\": {\n \"type\": \"person\",\n \"_id\": person.id\n }\n },\n _owner=self.user,\n status=BatchStatusEnum.BS_NEW,\n paid=False\n )\n sqldb.session.add(new_batch)\n\n first_doc = BatchDocumentDbObject(\n batch=new_batch,\n document_type=DocumentTypeEnum.DT_TEST_DOC_1,\n _owner=self.user,\n data={\n \"short_name\": u\"Старое наименование\"\n }\n )\n sqldb.session.add(first_doc)\n sqldb.session.commit()\n\n result = self.test_client.post('/batch/finalise/', data={\n 'batch_id': new_batch.id\n })\n self.assertEqual(result.status_code, 200)\n data = json.loads(result.data)\n self.assertTrue(data['result'])\n\n result = self.test_client.post('/batch/render_document/', data={\n 'batch_id': new_batch.id,\n 'document_type': json.dumps([DocumentTypeEnum.DT_TEST_DOC_3])\n })\n self.assertEqual(result.status_code, 200)\n\n result = self.test_client.get('/batch/render_document/state/?batch_id=%s&document_types=[\"%s\"]' %\n (new_batch.id, DocumentTypeEnum.DT_TEST_DOC_3))\n self.assertEqual(result.status_code, 200)\n data = json.loads(result.data)\n file_obj = BatchDocumentDbObject.query.filter(BatchDocumentDbObject.id != first_doc.id).one().file\n self.assertIsNotNone(file_obj)\n self.assertEqual(data, [{\n u'document_type': u'test_doc_3',\n u'links': {\n u'jpeg': [],\n u'pdf': FileStorage.get_url(file_obj, self.config)\n },\n u'state': u'rendered'\n }])\n\n @authorized()\n def test_render_single_document_doc_builder_failure(self):\n pass # todo:\n\n @authorized()\n def test_render_single_document_soft_time_limit_exceeded(self):\n pass # todo:\n\n @authorized()\n def test_regenerate_document(self):\n person = PrivatePersonDbObject(\n _owner=self.user,\n name=u\"СтароеИмя\",\n surname=u\"СтараяФамилия\",\n birthdate=datetime.now() - timedelta(days=365 * 30),\n sex='male',\n birthplace=u\"Неизвестно где\"\n )\n sqldb.session.add(person)\n\n company_person = PrivatePersonDbObject(\n _owner=self.user,\n name=u\"СтароеИмя2\",\n surname=u\"СтараяФамилия2\",\n birthdate=datetime.now() - timedelta(days=365 * 30),\n sex='female',\n birthplace=u\"Неизвестно где2\"\n )\n sqldb.session.add(company_person)\n sqldb.session.commit()\n\n company = CompanyDbObject(\n _owner=self.user,\n inn=u\"6514008400\",\n ogrn=u\"1086507000029\",\n kpp=u\"651401001\",\n full_name=u\"Полное наименование\",\n short_name=u\"Краткое наименование\",\n address={\n \"region\": RFRegionsEnum.RFR_SPB,\n \"index\": 198209,\n \"street_type\": StreetTypeEnum.STT_BOULEVARD,\n \"street\": u\"Мотоциклистов\",\n \"house_type\": HouseTypeEnum.HOT_HOUSE,\n \"house\": \"4\"\n },\n general_manager= {\n '_id': company_person.id,\n 'type': 'person'\n }\n )\n sqldb.session.add(company)\n sqldb.session.commit()\n\n new_batch = DocumentBatchDbObject(\n batch_type=DocumentBatchTypeEnum.DBT_TEST_TYPE,\n data={\n \"short_name\": u\"Старое наименование\",\n \"general_manager\": {\n \"type\": \"person\",\n \"_id\": person.id\n },\n \"some_db_object\": {\n \"type\": \"company\",\n \"_id\": company.id\n }\n },\n _owner=self.user,\n status=BatchStatusEnum.BS_NEW,\n paid=False\n )\n sqldb.session.add(new_batch)\n\n first_doc = BatchDocumentDbObject(\n batch=new_batch,\n document_type=DocumentTypeEnum.DT_TEST_DOC_1,\n _owner = self.user,\n data={\n \"short_name\": u\"Старое наименование\"\n }\n )\n sqldb.session.add(first_doc)\n sqldb.session.commit()\n\n result = self.test_client.post('/batch/finalise/', data={\n 'batch_id': new_batch.id\n })\n self.assertEqual(result.status_code, 200)\n data = json.loads(result.data)\n self.assertTrue(data['result'])\n\n result = self.test_client.post('/batch/render_document/', data={\n 'batch_id': new_batch.id,\n 'document_type': json.dumps([DocumentTypeEnum.DT_TEST_DOC_3])\n })\n self.assertEqual(result.status_code, 200)\n\n self.assertEqual(BatchDocumentDbObject.query.count(), 2)\n self.assertEqual(BatchDocumentDbObject.query.filter_by(batch=new_batch).count(), 2)\n new_doc = BatchDocumentDbObject.query.filter(BatchDocumentDbObject.id != first_doc.id).one()\n self.assertEqual(new_doc.status, UserDocumentStatus.DS_RENDERED)\n\n result = self.test_client.post('/batch/render_document/', data={\n 'batch_id': new_batch.id,\n 'document_type': json.dumps([DocumentTypeEnum.DT_TEST_DOC_3])\n })\n self.assertEqual(result.status_code, 200)\n\n self.assertEqual(BatchDocumentDbObject.query.count(), 2)\n self.assertEqual(BatchDocumentDbObject.query.filter_by(batch=new_batch).count(), 2)\n new_doc = BatchDocumentDbObject.query.filter(BatchDocumentDbObject.id != first_doc.id).one()\n self.assertEqual(new_doc.status, UserDocumentStatus.DS_RENDERED)\n\n @authorized()\n def test_render_doc_preview(self):\n pass\n\n @authorized()\n def test_finalize_not_my_batch(self):\n pass\n\n @authorized()\n def test_finalize_incomplete_batch(self):\n pass\n\n @authorized()\n def test_update_batch_in_status_render_failed_(self):\n pass\n\n @authorized()\n def test_cancel_finalization(self):\n pass\n\n @authorized()\n def test_generate_new_document_batch_finalised(self):\n pass\n\n @authorized()\n def test_generate_new_document_already_rendering(self):\n pass\n\n @authorized()\n def _test_async_render_separate_document(self):\n founder = PrivatePersonDbObject(**{\n \"_owner\": self.user,\n \"name\": u\"Прокл\",\n \"surname\": u\"Поликарпов\",\n \"inn\": \"781108730780\",\n \"birthdate\": datetime.now() - timedelta(days=365 * 30),\n \"birthplace\": u\"Россия, деревня Гадюкино\",\n \"passport\": {\n \"document_type\": PersonDocumentTypeEnum.PD_INTERNAL,\n \"series\": u\"1123\",\n \"number\": u\"192837\",\n \"issue_date\": datetime.now(),\n \"issue_depart\": u\"УМВД Гадюкинского района Гадюкинской области\",\n \"depart_code\": u\"111987\"\n },\n \"ogrnip\": \"123456789012345\",\n \"address\": {\n \"region\": RFRegionsEnum.RFR_SPB,\n \"index\": 198209,\n },\n })\n founder.insert(self.db)\n\n doc_data = {\n \"full_name\": u\"образовательное учреждение дополнительного образования\",\n \"short_name\": u\"Бокс\",\n \"doc_date\": datetime.now(),\n \"selected_secretary\": founder.id,\n \"address\": {\n \"region\": RFRegionsEnum.RFR_SPB,\n \"index\": 123131,\n \"ifns\": 1234\n },\n u\"starter_capital\": {\n u\"capital_type\": CompanyStarterCapitalTypeEnum.CSC_USTAVNOY_CAPITAL,\n u\"value\": {\n \"currency\": \"RUB\",\n \"value\": \"12312.234234\"\n }\n },\n \"general_manager\": founder.id,\n \"general_manager_caption\": u\"разнорабочий\",\n \"selected_moderator\": founder.id,\n \"reg_responsible_person\": founder.id,\n \"founders\": [{\n \"founder_type\": FounderTypeEnum.FT_PERSON,\n \"person\": founder.id,\n \"share\": {\n \"type\": \"percent\",\n \"value\": 100\n },\n \"nominal_capital\": 7466,\n }],\n \"founder_applicant\": {\n \"founder_type\": FounderTypeEnum.FT_PERSON,\n \"person\": founder.id,\n \"share\": {\n \"type\": \"percent\",\n \"value\": 100\n },\n \"nominal_capital\": 7466,\n }\n }\n\n new_user_doc = UserDocument()\n new_user_doc.parse_raw_value(dict(document_type=DocumentTypeEnum.DT_PROTOCOL, data=doc_data), None, False)\n\n doc_list = [\n new_user_doc.db_value()\n ]\n new_batch_db_object = DocumentBatchDbObject(documents=doc_list, batch_type=DocumentBatchTypeEnum.DBT_NEW_LLC,\n status=BatchStatusEnum.BS_NEW, _owner=self.user)\n new_batch_db_object.insert(current_app.db)\n\n result = self.test_client.get('/batch/document/state/?batch_id=%s&document_id=%s' % (\n unicode(new_batch_db_object.id), unicode(new_user_doc.id.db_value())))\n self.assertEqual(result.status_code, 200)\n result_data = json.loads(result.data)\n del result_data['result']['document_id']\n self.assertEqual(result_data, {u'result': {u'state': u'new', u'links': {u'pdf': None, u'jpeg': []}}})\n\n result = self.test_client.post('/batch/document/render/', data={'batch_id': unicode(new_batch_db_object.id),\n 'document_id': unicode(\n new_user_doc.id.db_value())})\n self.assertEqual(result.status_code, 200)\n result_data = json.loads(result.data)\n self.assertEqual(result_data, {u'result': True})\n\n result = self.test_client.get('/batch/document/state/?batch_id=%s&document_id=%s' % (\n unicode(new_batch_db_object.id), unicode(new_user_doc.id.db_value())))\n self.assertEqual(result.status_code, 200)\n result_data = json.loads(result.data)\n self.assertEqual(result_data['result']['state'], 'rendered')\n self.assertTrue(result_data['result']['links']['pdf'].startswith(u'http://service.zz/storage/'))\n\n @authorized()\n def test_fin_defin_cyclic_references(self):\n pass # todo:\n\n @authorized()\n def test_get_batches_with_prefetched_items(self):\n with self.app.app_context():\n batch = self.create_batch('_test', self.user)\n p1 = self.create_person(self.user, batch_id=batch.id)\n c1 = self.create_company(self.user, batch_id=batch.id)\n data = {\n 'general_manager': {\n '_id': p1.id,\n 'type': 'person'\n },\n 'some_db_object': {\n '_id': c1.id,\n 'type': 'company'\n }\n }\n p2 = self.create_person(self.user, batch_id=batch.id)\n c2 = self.create_company(self.user, batch_id=batch.id)\n data2 = {\n 'general_manager': {\n '_id': p1.id,\n 'type': 'person'\n },\n 'some_db_object': {\n '_id': c1.id,\n 'type': 'company'\n }\n }\n doc1 = self.create_document('test_doc_3', batch, data=data)\n doc2 = self.create_document('test_doc_3', batch, data=data2)\n\n result = self.test_client.get('/batch/')\n self.assertEqual(result.status_code, 200)\n\n" }, { "alpha_fraction": 0.6821305751800537, "alphanum_fraction": 0.6838487982749939, "avg_line_length": 24.30434799194336, "blob_id": "58fbb1ead84b5449248f7649e283c3c5b7d99d72", "content_id": "63c5e5232d261966b8ba6e57b41959b57b8ea9b3", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 582, "license_type": "no_license", "max_line_length": 65, "num_lines": 23, "path": "/app/deployment_migrations/migration_list/20150723_add_table_for_celery_scheduler.py", "repo_name": "StanislavKraev/jb_code", "src_encoding": "UTF-8", "text": "# -*- coding: utf-8 -*-\nfrom fw.db.sql_base import db as sqldb\n\n\ndef forward(config, logger):\n logger.debug(u\"Add table for celery scheduler\")\n\n sqldb.session.close()\n sqldb.engine.execute(u\"\"\"CREATE TABLE celery_scheduled_task (\n id VARCHAR NOT NULL,\n task_name VARCHAR,\n created TIMESTAMP WITHOUT TIME ZONE NOT NULL,\n eta TIMESTAMP WITHOUT TIME ZONE NOT NULL,\n sent BOOLEAN,\n args JSONB,\n kwargs JSONB,\n PRIMARY KEY (id)\n)\"\"\")\n\n\ndef rollback(config, logger):\n sqldb.session.close()\n sqldb.engine.execute(\"DROP table celery_scheduled_task;\")\n" }, { "alpha_fraction": 0.5726495981216431, "alphanum_fraction": 0.5925925970077515, "avg_line_length": 34.099998474121094, "blob_id": "99b785e54d6c7b4b16ef4a68ea66da0429fd4251", "content_id": "f1cc122b82c456d2b9e54e9dc103bb122fe469c0", "detected_licenses": [], "is_generated": false, "is_vendor": true, "language": "Python", "length_bytes": 1053, "license_type": "no_license", "max_line_length": 103, "num_lines": 30, "path": "/app/fw/cache/cache_wrapper.py", "repo_name": "StanislavKraev/jb_code", "src_encoding": "UTF-8", "text": "# -*- coding: utf-8 -*-\n\nimport memcache\n\n\nclass CacheWrapper(object):\n\n def __init__(self):\n self._cache = None\n\n def _call(self, f, *args, **kwargs):\n from flask import current_app\n try:\n return f(*args, **kwargs)\n except Exception, ex:\n print(u\"Failed to call memcached function %s with\" % unicode(f))\n self._cache = memcache.Client(['%s:11211' % current_app.config['MEMCACHED_HOST']], debug=0)\n raise\n\n def get(self, key):\n from flask import current_app\n if not self._cache:\n self._cache = memcache.Client(['%s:11211' % current_app.config['MEMCACHED_HOST']], debug=0)\n return self._call(self._cache.get, key)\n\n def set(self, key, val, time=0, min_compress_len=0):\n from flask import current_app\n if not self._cache:\n self._cache = memcache.Client(['%s:11211' % current_app.config['MEMCACHED_HOST']], debug=0)\n return self._call(self._cache.set, key, val, time=time, min_compress_len=min_compress_len)\n" }, { "alpha_fraction": 0.5407457947731018, "alphanum_fraction": 0.5494047999382019, "avg_line_length": 40.40696334838867, "blob_id": "148ffd1b35d18ff1dd3cb0349fdb9665c202b789", "content_id": "ad141ef94aa7d155c23b85ac6371c19f8448e86f", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 76862, "license_type": "no_license", "max_line_length": 173, "num_lines": 1838, "path": "/jb_tests/test_pack/test_api.py", "repo_name": "StanislavKraev/jb_code", "src_encoding": "UTF-8", "text": "# -*- coding: utf-8 -*-\n\nfrom datetime import datetime, timedelta\nimport pickle\nimport random\nimport string\n\nfrom flask import json, current_app\nfrom bson.objectid import ObjectId\n\nfrom base_test_case import BaseTestCase, authorized, registered_user\nfrom fw.api import errors\nfrom fw.api.errors import UserNotFound, ActivationCodeExpiredOrInvalid\nfrom fw.api.sql_session_storage import Session\nfrom fw.auth.encrypt import encrypt_password\nfrom fw.auth.models import AuthUser, UserActivationLink, ConfirmationLinkTypeEnum, AuthorizationUrl\nfrom fw.auth.user_manager import UserManager\nfrom fw.db.sql_base import db as sqldb\nfrom fw.documents.address_enums import RFRegionsEnum, HouseTypeEnum, FlatTypeEnum\nfrom fw.documents.address_enums import StreetTypeEnum\nfrom fw.documents.db_fields import PrivatePersonDbObject, CompanyDbObject, DocumentBatchDbObject, BatchDocumentDbObject\nfrom fw.documents.enums import CompanyTypeEnum, DocumentTypeEnum, BatchStatusEnum, DocumentBatchTypeEnum\nfrom services.ifns.data_model.fields import IfnsBooking\nfrom services.llc_reg.documents.enums import IfnsServiceEnum\nfrom services.notarius.data_model.models import NotariusBookingObject, NotariusObject\nfrom services.yurist.data_model.enums import YuristBatchCheckStatus\nfrom services.yurist.data_model.fields import YuristBatchCheck\n\n\nclass ApiTestCase(BaseTestCase):\n def setUp(self):\n super(ApiTestCase, self).setUp()\n\n def tearDown(self):\n super(ApiTestCase, self).tearDown()\n\n def _make_ua_link(self, use_chars, link_type, user_id=None, new_mobile=None, new_email=None):\n max_activation_link_length = self.config['max_activation_link_length']\n digital_activation_link_length = self.config['digital_activation_link_length']\n\n if use_chars:\n link_code = u''.join(random.choice(string.ascii_uppercase + string.digits) for _ in range(max_activation_link_length))\n else:\n link_code = u''.join(random.choice(string.digits) for _ in range(digital_activation_link_length))\n recovery_link = UserActivationLink(\n link_code=link_code,\n auth_user_id=user_id,\n link_type=link_type\n )\n if new_mobile is not None:\n recovery_link.new_mobile = new_mobile\n if new_email is not None:\n recovery_link.new_email = new_email\n\n return recovery_link\n\n @authorized(is_temporal=True)\n def test_logout_temp_user(self):\n result = self.test_client.post('/account/logout/')\n self.assertEqual(result.status_code, 403)\n\n def test_authorize_temporary_user(self):\n result = self.test_client.post('/account/login/temporal/')\n self.assertEqual(result.status_code, 200)\n\n user = AuthUser.query.first()\n self.assertTrue(user.temporal)\n\n result_data = json.loads(result.data)\n self.assertIn('result', result_data)\n self.assertNotIn('error', result_data)\n\n del result_data['result']['id']\n del result_data['result']['registration_date']\n self.assertEqual(result_data['result'], {\n u'email': u'',\n u'email_confirmed': False,\n u'mobile': u'',\n u'mobile_confirmed': False,\n u'person': {\n u'name': u'',\n u'patronymic': u'',\n u'surname': u''\n },\n u'facebook': None,\n u'vk': None,\n u'subscription': None,\n u'temporal': True,\n u'password_set': False,\n u'role': [u'user']\n })\n\n result = self.test_client.get('/account/profile/')\n self.assertEqual(result.status_code, 200)\n\n result_data = json.loads(result.data)\n self.assertIn('result', result_data)\n self.assertNotIn('error', result_data)\n\n del result_data['result']['id']\n del result_data['result']['registration_date']\n self.assertEqual(result_data['result'], {\n u'email': u'',\n u'email_confirmed': False,\n u'mobile': u'',\n u'mobile_confirmed': False,\n u'person': {\n u'name': u'',\n u'patronymic': u'',\n u'surname': u''\n },\n u'facebook': None,\n u'vk': None,\n u'subscription': None,\n u'temporal': True,\n u'password_set': False,\n u'role': [u'user']\n })\n\n def test_authorize_temporary_user_while_authorized_temporary_user(self):\n result = self.test_client.post('/account/login/temporal/')\n self.assertEqual(result.status_code, 200)\n\n user = AuthUser.query.first()\n self.assertTrue(user.temporal)\n\n result_data = json.loads(result.data)\n self.assertIn('result', result_data)\n self.assertNotIn('error', result_data)\n\n id1 = result_data['result']['id']\n del result_data['result']['id']\n del result_data['result']['registration_date']\n self.assertEqual(result_data['result'], {\n u'email': u'',\n u'email_confirmed': False,\n u'mobile': u'',\n u'mobile_confirmed': False,\n u'person': {\n u'name': u'',\n u'patronymic': u'',\n u'surname': u''\n },\n u'facebook': None,\n u'vk': None,\n u'subscription': None,\n u'temporal': True,\n u'password_set': False,\n u'role': [u'user']\n })\n\n result = self.test_client.post('/account/login/temporal/')\n self.assertEqual(result.status_code, 200)\n\n user = AuthUser.query.first()\n self.assertTrue(user.temporal)\n\n result_data = json.loads(result.data)\n self.assertIn('result', result_data)\n self.assertNotIn('error', result_data)\n\n self.assertEqual(id1, result_data['result']['id'])\n del result_data['result']['id']\n del result_data['result']['registration_date']\n self.assertEqual(result_data['result'], {\n u'email': u'',\n u'email_confirmed': False,\n u'mobile': u'',\n u'mobile_confirmed': False,\n u'person': {\n u'name': u'',\n u'patronymic': u'',\n u'surname': u''\n },\n u'facebook': None,\n u'vk': None,\n u'subscription': None,\n u'temporal': True,\n u'password_set': False,\n u'role': [u'user']\n })\n\n @authorized()\n def test_authorize_temporary_user_while_authorized_persistent_user(self):\n result = self.test_client.post('/account/login/temporal/')\n self.assertEqual(result.status_code, 200)\n\n user = AuthUser.query.first()\n self.assertFalse(user.temporal)\n\n result_data = json.loads(result.data)\n self.assertIn('result', result_data)\n self.assertNotIn('error', result_data)\n\n del result_data['result']['id']\n del result_data['result']['registration_date']\n self.assertEqual(result_data['result'], {\n u'email': u'[email protected]',\n u'email_confirmed': True,\n u'mobile': u'+79001112233',\n u'mobile_confirmed': True,\n u'person': {\n u'name': u'Name',\n u'surname': u'Surname',\n u'patronymic': u'Patronymic'\n },\n u'facebook': None,\n u'vk': None,\n u'subscription': None,\n u'temporal': False,\n u'password_set': True,\n u'role': [u'user']\n })\n\n def test_sign_up_email(self):\n with self.app.app_context():\n args = {\n 'name': u'Станислав'.encode('utf8'),\n 'email': '[email protected]',\n 'password': 'TestPassword123'\n }\n result = self.test_client.post('/account/create/', data=args)\n\n self.assertEqual(result.status_code, 200)\n data = json.loads(result.data)\n self.assertIsNotNone(data)\n self.assertIn('result', data)\n uid = data['result']['id']\n self.assertEqual(data['result']['email'], '[email protected]')\n user = AuthUser.query.filter_by(uuid=uid).first()\n self.assertIsNotNone(user)\n self.assertFalse(user.email_confirmed)\n self.assertLess((datetime.utcnow() - user.signup_date).total_seconds(), 10)\n self.assertIsNotNone(user.last_login_date)\n self.assertTrue(user.enabled)\n\n # user_profile_obj = UserProfile.find_one(current_app.db, {'auth_user_id': user.id})\n # self.assertIsNotNone(user_profile_obj)\n\n self.assertEqual(len(self.mailer.mails), 1)\n\n user_activation_link = UserActivationLink.query.first()\n self.assertIsNotNone(user_activation_link)\n self.assertEqual(user_activation_link.auth_user_id, user.id)\n self.assertEqual(len(user_activation_link.link_code), self.config['max_activation_link_length'])\n self.assertTrue(user_activation_link.link_code.isalnum())\n self.assertLess((datetime.utcnow() - user_activation_link.creation_date).total_seconds(), 10)\n self.assertIsNone(user_activation_link.used_date)\n self.assertEqual(user_activation_link.new_email, '[email protected]')\n\n result = self.test_client.get('/account/profile/')\n\n self.assertEqual(result.status_code, 200)\n\n def test_get_profile_unauthorized(self):\n with self.app.app_context():\n result = self.test_client.get('/account/profile/')\n\n self.assertEqual(result.status_code, 403)\n data = json.loads(result.data)\n self.assertIsNotNone(data)\n self.assertIn('error', data)\n error = data['error']\n self.assertIn('code', error)\n self.assertEqual(error['code'], 100)\n\n @authorized()\n def test_get_profile_authorized(self):\n with self.app.app_context():\n result = self.test_client.get('/account/profile/')\n\n self.assertEqual(result.status_code, 200)\n data = json.loads(result.data)\n self.assertIsNotNone(data)\n self.assertIn('result', data)\n\n def test_sign_up_short_password(self):\n with self.app.app_context():\n args = {\n 'name': 'ТестовоеИмя',\n 'surname': 'ТестоваяФамилия',\n 'patronymic': 'ТестовоеОтчество',\n 'email': '[email protected]',\n 'password': 'shrtp'\n }\n result = self.test_client.post('/account/create/', data=args)\n\n self.assertEqual(result.status_code, 200)\n data = json.loads(result.data)\n self.assertIsNotNone(data)\n self.assertIn('error', data)\n error = data['error']\n self.assertIn('code', error)\n self.assertEqual(error['code'], 109)\n\n def test_sign_up_simple_password(self):\n with self.app.app_context():\n args = {\n 'name': 'ТестовоеИмя',\n 'surname': 'ТестоваяФамилия',\n 'patronymic': 'ТестовоеОтчество',\n 'email': '[email protected]',\n 'password': 'longbutverysimple'\n }\n result = self.test_client.post('/account/create/', data=args)\n\n self.assertEqual(result.status_code, 200)\n data = json.loads(result.data)\n self.assertIsNotNone(data)\n self.assertNotIn('error', data)\n\n def test_sign_up_no_uppercase_in_password(self):\n with self.app.app_context():\n args = {\n 'name': 'ТестовоеИмя',\n 'surname': 'ТестоваяФамилия',\n 'patronymic': 'ТестовоеОтчество',\n 'email': '[email protected]',\n 'password': 'longbutverysimple123_'\n }\n result = self.test_client.post('/account/create/', data=args)\n\n self.assertEqual(result.status_code, 200)\n data = json.loads(result.data)\n self.assertIsNotNone(data)\n self.assertNotIn('error', data)\n\n def test_sign_up_no_email_no_phone(self):\n with self.app.app_context():\n args = {\n 'name': 'ТестовоеИмя',\n 'surname': 'ТестоваяФамилия',\n 'patronymic': 'ТестовоеОтчество',\n 'password': 'shrtpw'\n }\n result = self.test_client.post('/account/create/', data=args)\n\n self.assertEqual(result.status_code, 400)\n data = json.loads(result.data)\n self.assertIsNotNone(data)\n self.assertIn('error', data)\n error = data['error']\n self.assertIn('code', error)\n self.assertEqual(error['code'], 4)\n\n def test_sign_up_invalid_email(self):\n with self.app.app_context():\n args = {\n 'name': 'ТестовоеИмя',\n 'surname': 'ТестоваяФамилия',\n 'patronymic': 'ТестовоеОтчество',\n 'password': 'shrtpw',\n 'email': 'invalid'\n }\n result = self.test_client.post('/account/create/', data=args)\n\n self.assertEqual(result.status_code, 400)\n data = json.loads(result.data)\n self.assertIsNotNone(data)\n self.assertIn('error', data)\n error = data['error']\n self.assertIn('code', error)\n self.assertEqual(error['code'], 5)\n\n def test_sign_up_duplicate_email(self):\n with self.app.app_context():\n args = {\n 'name': 'ТестовоеИмя',\n 'surname': 'ТестоваяФамилия',\n 'patronymic': 'ТестовоеОтчество',\n 'email': '[email protected]',\n 'password': 'TestPassword123'\n }\n result = self.test_client.post('/account/create/', data=args)\n\n self.assertEqual(result.status_code, 200)\n self.test_client.post('/account/logout/', data={})\n result = self.test_client.post('/account/create/', data=args)\n\n self.assertEqual(result.status_code, 200)\n data = json.loads(result.data)\n self.assertIsNotNone(data)\n self.assertIn('error', data)\n\n def test_sign_up_duplicate_activated_email(self):\n with self.app.app_context():\n args = {\n 'name': 'ТестовоеИмя',\n 'surname': 'ТестоваяФамилия',\n 'patronymic': 'ТестовоеОтчество',\n 'email': '[email protected]',\n 'password': 'TestPassword123'\n }\n result = self.test_client.post('/account/create/', data=args)\n\n self.assertEqual(result.status_code, 200)\n\n user_activation_link = UserActivationLink.query.first()\n user = AuthUser.query.first()\n\n args = {\n 'user_id': user.id,\n 'type': 'email',\n 'code': user_activation_link.link_code\n }\n result = self.test_client.post('/account/confirm/', data=args)\n self.assertEqual(result.status_code, 200)\n\n self.test_client.post('/account/logout/', data={})\n args = {\n 'name': 'ТестовоеИмя',\n 'surname': 'ТестоваяФамилия',\n 'patronymic': 'ТестовоеОтчество',\n 'email': '[email protected]',\n 'password': 'TestPassword123'\n }\n result = self.test_client.post('/account/create/', data=args)\n\n self.assertEqual(result.status_code, 200)\n data = json.loads(result.data)\n self.assertIsNotNone(data)\n self.assertIn('error', data)\n\n @registered_user()\n def test_login_email(self):\n args = {'email': '[email protected]',\n 'password': 'TestPassword123'}\n\n result = self.test_client.post('/account/login/', data=args)\n\n self.assertEqual(result.status_code, 200)\n data = json.loads(result.data)\n self.assertIsNotNone(data)\n self.assertIn('result', data)\n\n def test_logout(self):\n with self.app.app_context():\n args = {\n 'email': '[email protected]',\n 'password': 'TestPassword123'\n }\n self.test_client.post('/account/create/', data=args)\n user = AuthUser.query.first()\n\n user_activation_link = UserActivationLink.query.first()\n args = {\n 'user_id': user.id,\n 'type': 'email',\n 'code': user_activation_link.link_code\n }\n\n result = self.test_client.post('/account/confirm/', data=args)\n\n self.assertEqual(result.status_code, 200)\n\n args = {'email': '[email protected]',\n 'password': 'TestPassword123'}\n\n result = self.test_client.post('/account/login/', data=args)\n\n self.assertEqual(result.status_code, 200)\n\n #cookie = result.headers['Set-Cookie'].split('=')[1].split(';')[0]\n #self.test_client.set_cookie('localhost', u'_jbuid_', cookie)\n result = self.test_client.post('/account/logout/')\n self.assertEqual(result.status_code, 200)\n data = json.loads(result.data)\n self.assertIsNotNone(data)\n self.assertIn('result', data)\n self.assertEqual(data['result'], 'OK')\n\n def test_logout_unauthorized(self):\n result = self.test_client.post('/account/logout/')\n self.assertEqual(result.status_code, 403)\n\n def test_activate_by_email_link(self):\n with self.app.app_context():\n args = {\n 'email': '[email protected]',\n 'password': 'TestPassword123'\n }\n result = self.test_client.post('/account/create/', data=args)\n\n self.assertEqual(result.status_code, 200)\n user_activation_link = UserActivationLink.query.first()\n self.assertIsNotNone(user_activation_link)\n\n user = AuthUser.query.first()\n self.assertIsNotNone(user)\n self.assertTrue(user.enabled)\n\n# user_profile_obj = UserProfile.find_one(current_app.db, {'auth_user_id': user.id})\n\n self.assertFalse(user.mobile_confirmed)\n self.assertFalse(user.email_confirmed)\n\n args = {\n 'user_id': user.uuid,\n 'type': 'email',\n 'code': user_activation_link.link_code\n }\n result = self.test_client.post('/account/confirm/', data=args)\n\n self.assertEqual(result.status_code, 200)\n data = json.loads(result.data)\n self.assertIsNotNone(data)\n self.assertIn('result', data)\n self.assertTrue(data['result'])\n user = AuthUser.query.first()\n self.assertTrue(user.enabled)\n user_activation_link = UserActivationLink.query.first()\n self.assertIsNotNone(user_activation_link.used_date)\n self.assertLess((user_activation_link.used_date - datetime.utcnow()).total_seconds(), 10)\n\n self.assertFalse(user.mobile_confirmed)\n self.assertTrue(user.email_confirmed)\n\n # def test_activate_by_email_link_get_method(self):\n # with self.app.app_context():\n # args = {\n # 'email' : '[email protected]',\n # 'password' : 'TestPassword123'\n # }\n # result = self.test_client.post('/account/create/', data = args)\n #\n # self.assertEqual(result.status_code, 200)\n # user_activation_link = UserActivationLink.query.first()\n # self.assertIsNotNone(user_activation_link)\n #\n # user = AuthUser.query.first()\n # self.assertIsNotNone(user)\n # self.assertFalse(user.enabled)\n #\n # user_profile_obj = UserProfile.find_one(current_app.db, {'auth_user_id' : user.id})\n # self.assertIsNotNone(user_profile_obj)\n #\n # self.assertFalse(user.mobile_confirmed)\n # self.assertFalse(user.email_confirmed)\n #\n # result = self.test_client.get('/account/confirm/?user_id=%s&type=%s&code=%s' % (user.id, 'email', user_activation_link.link_code))\n #\n # self.assertEqual(result.status_code, 200)\n # data = json.loads(result.data)\n # self.assertIsNotNone(data)\n # self.assertIn('result', data)\n # self.assertTrue(data['result'])\n # user = AuthUser.query.first()\n # self.assertTrue(user.enabled)\n # user_activation_link = UserActivationLink.query.first()\n # self.assertIsNotNone(user_activation_link.used_date)\n # self.assertLess((user_activation_link.used_date - datetime.utcnow()).total_seconds(), 10)\n #\n # self.assertFalse(user.mobile_confirmed)\n # self.assertTrue(user.email_confirmed)\n\n # def test_activation_code_not_found(self):\n # with self.app.app_context():\n # args = {\n # 'email' : '[email protected]',\n # 'password' : 'TestPassword123'\n # }\n # result = self.test_client.post('/account/create/', data = args)\n #\n # self.assertEqual(result.status_code, 200)\n # user_activation_link = UserActivationLink.query.first()\n # self.assertIsNotNone(user_activation_link)\n #\n # user = AuthUser.query.first()\n # self.assertIsNotNone(user)\n # self.assertFalse(user.enabled)\n #\n # args = {\n # 'user_id' : user.id,\n # 'type' : 'email',\n # 'code' : 'A' * self.config['max_activation_link_length']\n # }\n # result = self.test_client.post('/account/confirm/', data = args)\n #\n # self.assertEqual(result.status_code, 200)\n # data = json.loads(result.data)\n # self.assertIsNotNone(data)\n # self.assertIn('error', data)\n # error = data['error']\n # self.assertIn('code', error)\n # self.assertEqual(error['code'], ActivationCodeExpiredOrInvalid.ERROR_CODE)\n #\n # user = AuthUser.query.first()\n # self.assertFalse(user.enabled)\n # user_activation_link = UserActivationLink.query.first()\n # self.assertIsNone(user_activation_link.used_date)\n\n # def test_activation_code_malformed(self):\n # with self.app.app_context():\n # args = {\n # 'email' : '[email protected]',\n # 'password' : 'TestPassword123'\n # }\n # result = self.test_client.post('/account/create/', data = args)\n #\n # self.assertEqual(result.status_code, 200)\n # user_activation_link = UserActivationLink.query.first()\n # self.assertIsNotNone(user_activation_link)\n #\n # user = AuthUser.query.first()\n # self.assertIsNotNone(user)\n # self.assertFalse(user.enabled)\n #\n # args = {\n # 'user_id' : user.id,\n # 'type' : 'email',\n # 'code' : 'xyz'\n # }\n # result = self.test_client.post('/account/confirm/', data = args)\n #\n # self.assertEqual(result.status_code, 400)\n #\n # user = AuthUser.query.first()\n # self.assertFalse(user.enabled)\n # user_activation_link = UserActivationLink.query.first()\n # self.assertIsNone(user_activation_link.used_date)\n\n # def test_email_not_activated(self):\n # with self.app.app_context():\n # args = {\n # 'email' : '[email protected]',\n # 'password' : 'TestPassword123'\n # }\n # self.test_client.post('/account/create/', data = args)\n #\n # args = {'email' : '[email protected]',\n # 'password' : 'TestPassword123'}\n #\n # result = self.test_client.post('/account/login/', data = args)\n #\n # data = json.loads(result.data)\n # self.assertIn('error', data)\n # error = data['error']\n # self.assertIn('code', error)\n # self.assertEqual(error['code'], errors.EmailIsNotConfirmed.ERROR_CODE)\n\n # def test_activate_already_activated(self):\n # with self.app.app_context():\n # args = {\n # 'email' : '[email protected]',\n # 'password' : 'TestPassword123'\n # }\n # result = self.test_client.post('/account/create/', data = args)\n #\n #\n # self.assertEqual(result.status_code, 200)\n # user_activation_link = UserActivationLink.query.first()\n # self.assertIsNotNone(user_activation_link)\n #\n # user = AuthUser.query.first()\n # self.assertIsNotNone(user)\n # self.assertFalse(user.enabled)\n #\n # args = {\n # 'user_id' : user.id,\n # 'type' : 'email',\n # 'code' : user_activation_link.link_code\n # }\n # result = self.test_client.post('/account/confirm/', data = args)\n #\n # self.assertEqual(result.status_code, 200)\n # user = AuthUser.query.first()\n # self.assertTrue(user.enabled)\n # user_activation_link = UserActivationLink.query.first()\n # self.assertIsNotNone(user_activation_link.used_date)\n # self.assertLess((user_activation_link.used_date - datetime.utcnow()).total_seconds(), 10)\n # result = self.test_client.post('/account/confirm/', data = args)\n # self.assertEqual(result.status_code, 200)\n # user = AuthUser.query.first()\n # self.assertTrue(user.enabled)\n # data = json.loads(result.data)\n # self.assertIsNotNone(data)\n # self.assertIn('error', data)\n # self.assertEqual(data['error']['code'], 108)\n #\n # def test_activate_by_alien_code(self):\n # with self.app.app_context():\n # args = {\n # 'email' : '[email protected]',\n # 'password' : 'TestPassword123'\n # }\n # result = self.test_client.post('/account/create/', data = args)\n #\n # alien_user = AuthUser()\n # alien_user.email = u'asdfasdfasdf'\n # alien_user.password = u'asdfasdfasdfasdf'\n # alien_user.signup_date = datetime.utcnow()\n # alien_user.insert(current_app.db)\n #\n # self.assertEqual(result.status_code, 200)\n # user_activation_link = UserActivationLink.query.first()\n # self.assertIsNotNone(user_activation_link)\n # user_activation_link.auth_user_id = alien_user.id\n # user_activation_link.save(current_app.db)\n #\n # user = AuthUser.query.first()\n # self.assertIsNotNone(user)\n # self.assertFalse(user.enabled)\n #\n # args = {\n # 'user_id' : user.id,\n # 'type' : 'email',\n # 'code' : user_activation_link.link_code\n # }\n # result = self.test_client.post('/account/confirm/', data = args)\n #\n # self.assertEqual(result.status_code, 200)\n # data = json.loads(result.data)\n # self.assertIsNotNone(data)\n # self.assertIn('error', data)\n # error = data['error']\n # self.assertIn('code', error)\n # self.assertEqual(error['code'], ActivationCodeExpiredOrInvalid.ERROR_CODE)\n #\n # def test_activation_retries_count_exceeded(self):\n # with self.app.app_context():\n # args = {\n # 'email' : '[email protected]',\n # 'password' : 'TestPassword123'\n # }\n # result = self.test_client.post('/account/create/', data = args)\n #\n # self.assertEqual(result.status_code, 200)\n # user_activation_link = UserActivationLink.query.first()\n # correct_code = user_activation_link.link_code\n # self.assertIsNotNone(user_activation_link)\n #\n # user = AuthUser.query.first()\n # self.assertIsNotNone(user)\n # self.assertFalse(user.enabled)\n #\n # args = {\n # 'user_id' : user.id,\n # 'type' : 'email',\n # 'code' : 'A' * self.config['max_activation_link_length']\n # }\n # for _ in xrange(self.config['max_activation_attempts_count']):\n # result = self.test_client.post('/account/confirm/', data = args)\n #\n # self.assertEqual(result.status_code, 200)\n # data = json.loads(result.data)\n # self.assertIsNotNone(data)\n # self.assertIn('error', data)\n # error = data['error']\n # self.assertIn('code', error)\n # self.assertEqual(error['code'], ActivationCodeExpiredOrInvalid.ERROR_CODE)\n #\n # result = self.test_client.post('/account/confirm/', data = args)\n # self.assertEqual(result.status_code, 200)\n # data = json.loads(result.data)\n # self.assertIsNotNone(data)\n # self.assertIn('error', data)\n # error = data['error']\n # self.assertIn('code', error)\n # self.assertEqual(error['code'], ActivationAttemptsCountExceeded.ERROR_CODE)\n #\n # args = {\n # 'user_id' : user.id,\n # 'type' : 'email',\n # 'code' : correct_code\n # }\n # result = self.test_client.post('/account/confirm/', data = args)\n #\n # self.assertEqual(result.status_code, 200)\n # data = json.loads(result.data)\n # self.assertIsNotNone(data)\n # self.assertIn('error', data)\n # error = data['error']\n # self.assertIn('code', error)\n # self.assertEqual(error['code'], ActivationAttemptsCountExceeded.ERROR_CODE)\n\n def test_request_new_activation_code(self):\n with self.app.app_context():\n args = {\n 'email': '[email protected]',\n 'password': 'TestPassword123'\n }\n result = self.test_client.post('/account/create/', data=args)\n\n self.assertEqual(result.status_code, 200)\n user_activation_link = UserActivationLink.query.first()\n self.assertIsNotNone(user_activation_link)\n old_code = user_activation_link.link_code\n old_code_id = user_activation_link.id\n\n user = AuthUser.query.first()\n self.assertIsNotNone(user)\n self.assertTrue(user.enabled)\n\n args = {\n 'email': '[email protected]'\n }\n result = self.test_client.post('/account/send_activation_code/', data=args)\n\n self.assertEqual(result.status_code, 200)\n data = json.loads(result.data)\n self.assertIsNotNone(data)\n self.assertIn('result', data)\n self.assertTrue(data['result'])\n\n user_activation_link = UserActivationLink.query.first()\n self.assertIsNotNone(user_activation_link)\n self.assertNotEqual(old_code, user_activation_link.link_code)\n self.assertNotEqual(old_code_id, user_activation_link.id)\n\n self.assertEqual(user_activation_link.auth_user_id, user.id)\n self.assertEqual(len(user_activation_link.link_code), self.config['max_activation_link_length'])\n self.assertTrue(user_activation_link.link_code.isalnum())\n self.assertLess((datetime.utcnow() - user_activation_link.creation_date).total_seconds(), 10)\n self.assertIsNone(user_activation_link.used_date)\n\n old_code = user_activation_link.link_code\n old_code_id = user_activation_link.id\n\n user = AuthUser.query.first()\n self.assertIsNotNone(user)\n self.assertTrue(user.enabled)\n\n result = self.test_client.post('/account/send_activation_code/', data=args)\n\n self.assertEqual(result.status_code, 200)\n data = json.loads(result.data)\n self.assertIsNotNone(data)\n self.assertIn('result', data)\n self.assertTrue(data['result'])\n\n self.assertEqual(UserActivationLink.query.count(), 1)\n user_activation_link = UserActivationLink.query.first()\n self.assertIsNotNone(user_activation_link)\n self.assertNotEqual(old_code, user_activation_link.link_code)\n self.assertNotEqual(old_code_id, user_activation_link.id)\n\n self.assertEqual(user_activation_link.auth_user_id, user.id)\n self.assertEqual(len(user_activation_link.link_code), self.config['max_activation_link_length'])\n self.assertTrue(user_activation_link.link_code.isalnum())\n self.assertLess((datetime.utcnow() - user_activation_link.creation_date).total_seconds(), 10)\n self.assertIsNone(user_activation_link.used_date)\n\n def test_request_new_activation_code_mobile(self):\n with self.app.app_context():\n args = {\n 'email': '[email protected]',\n 'password': 'TestPassword123'\n }\n result = self.test_client.post('/account/create/', data=args)\n\n self.assertEqual(result.status_code, 200)\n user_activation_link = UserActivationLink.query.first()\n self.assertIsNotNone(user_activation_link)\n old_code = user_activation_link.link_code\n old_code_id = user_activation_link.id\n UserActivationLink.query.filter_by(id=user_activation_link.id).delete()\n\n user = AuthUser.query.first()\n new_link = self._make_ua_link(use_chars=False, new_mobile=\"+78881112233\", user_id=user.id,\n link_type=ConfirmationLinkTypeEnum.CLT_MOBILE)\n sqldb.session.add(new_link)\n sqldb.session.commit()\n\n user = AuthUser.query.first()\n self.assertIsNotNone(user)\n self.assertTrue(user.enabled)\n\n args = {\n 'mobile': '+78881112233'\n }\n result = self.test_client.post('/account/send_activation_code/', data=args)\n\n self.assertEqual(result.status_code, 200)\n data = json.loads(result.data)\n self.assertIsNotNone(data)\n self.assertIn('result', data)\n self.assertTrue(data['result'])\n\n user_activation_link = UserActivationLink.query.first()\n self.assertIsNotNone(user_activation_link)\n self.assertNotEqual(old_code, user_activation_link.link_code)\n self.assertNotEqual(old_code_id, user_activation_link.id)\n\n self.assertEqual(user_activation_link.auth_user_id, user.id)\n self.assertEqual(len(user_activation_link.link_code), self.config['digital_activation_link_length'])\n self.assertTrue(user_activation_link.link_code.isalnum())\n self.assertLess((datetime.utcnow() - user_activation_link.creation_date).total_seconds(), 10)\n self.assertIsNone(user_activation_link.used_date)\n\n old_code = user_activation_link.link_code\n old_code_id = user_activation_link.id\n\n self.assertIsNotNone(user)\n self.assertTrue(user.enabled)\n\n result = self.test_client.post('/account/send_activation_code/', data=args)\n\n self.assertEqual(result.status_code, 200)\n data = json.loads(result.data)\n self.assertIsNotNone(data)\n self.assertIn('result', data)\n self.assertTrue(data['result'])\n\n self.assertEqual(UserActivationLink.query.count(), 1)\n user_activation_link = UserActivationLink.query.first()\n self.assertIsNotNone(user_activation_link)\n self.assertNotEqual(old_code, user_activation_link.link_code)\n self.assertNotEqual(old_code_id, user_activation_link.id)\n\n self.assertEqual(user_activation_link.auth_user_id, user.id)\n self.assertEqual(len(user_activation_link.link_code), self.config['digital_activation_link_length'])\n self.assertTrue(user_activation_link.link_code.isalnum())\n self.assertLess((datetime.utcnow() - user_activation_link.creation_date).total_seconds(), 10)\n self.assertIsNone(user_activation_link.used_date)\n\n @authorized(is_temporal=True)\n def test_update_profile_for_temporary_user(self):\n with self.app.app_context():\n self.assertEqual(len(self.mailer.mails), 0)\n user = AuthUser.query.filter_by(id=self.user.id).first()\n self.assertEqual(user.email, None)\n new_email = '[email protected]'\n params = {\n 'email': new_email\n }\n result = self.test_client.post('/account/profile/update/', data=params)\n self.assertEqual(result.status_code, 200)\n data = json.loads(result.data)\n self.assertIsNotNone(data)\n self.assertIn('result', data)\n user_obj = data['result']\n\n user = AuthUser.query.filter_by(id=self.user.id).first()\n self.assertEqual(user_obj['email'], user.email)\n self.assertEqual(user_obj['email'], new_email)\n self.assertEqual(user.email_confirmed, False)\n self.assertIsNotNone(user.password)\n self.assertNotEqual(user.password, None)\n\n self.assertEqual(len(self.mailer.mails), 2)\n\n mail1 = self.mailer.mails[0]\n self.assertEqual(mail1['message']['subject'], u'Регистрация на ЮРБЮРО')\n\n mail2 = self.mailer.mails[1]\n self.assertEqual(mail2['message']['subject'], u'Подтвердите почтовый адрес в «ЮРБЮРО»')\n\n self.assertEqual(user.temporal, False)\n self.assertEqual(user.id, self.user.id)\n\n @authorized()\n def test_update_email(self):\n with self.app.app_context():\n self.assertEqual(len(self.mailer.mails), 0)\n user = AuthUser.query.filter_by(id=self.user.id).first()\n self.assertEqual(user.email, '[email protected]')\n new_email = '[email protected]'\n params = {\n 'email': new_email\n }\n result = self.test_client.post('/account/profile/update/', data=params)\n self.assertEqual(result.status_code, 200)\n data = json.loads(result.data)\n self.assertIsNotNone(data)\n self.assertIn('result', data)\n user_obj = data['result']\n\n user = AuthUser.query.filter_by(id=self.user.id).first()\n self.assertEqual(user_obj['email'], user.email)\n self.assertEqual(user_obj['email'], new_email)\n self.assertEqual(user.email_confirmed, False)\n\n self.assertEqual(len(self.mailer.mails), 1)\n\n @authorized()\n def test_update_mobile(self):\n with self.app.app_context():\n self.assertEqual(len(self.sms_sender.sms), 0)\n user = AuthUser.query.filter_by(id=self.user.id).first()\n self.assertEqual(user.mobile, '+79001112233')\n new_mobile = '+79003332211'\n params = {\n 'mobile': new_mobile\n }\n result = self.test_client.post('/account/profile/update/', data=params)\n self.assertEqual(result.status_code, 200)\n data = json.loads(result.data)\n self.assertIsNotNone(data)\n self.assertIn('result', data)\n user_obj = data['result']\n\n user = AuthUser.query.filter_by(id=self.user.id).first()\n self.assertEqual(user_obj['mobile'], user.mobile)\n self.assertEqual(user_obj['mobile'], new_mobile)\n self.assertEqual(user.mobile_confirmed, False)\n\n self.assertEqual(len(self.sms_sender.sms), 1)\n\n @authorized()\n def test_update_email_several_times(self):\n with self.app.app_context():\n self.assertEqual(len(self.mailer.mails), 0)\n user = AuthUser.query.filter_by(id=self.user.id).first()\n self.assertEqual(user.email, '[email protected]')\n new_email = '[email protected]'\n params = {\n 'email': new_email\n }\n result = self.test_client.post('/account/profile/update/', data=params)\n\n self.assertEqual(result.status_code, 200)\n data = json.loads(result.data)\n self.assertIsNotNone(data)\n self.assertIn('result', data)\n user_obj = data['result']\n\n user = AuthUser.query.filter_by(id=self.user.id).first()\n self.assertEqual(user_obj['email'], user.email)\n self.assertEqual(user_obj['email'], new_email)\n self.assertEqual(user.email_confirmed, False)\n\n self.assertEqual(len(self.mailer.mails), 1)\n self.assertEqual(\n UserActivationLink.query.filter_by(link_type=ConfirmationLinkTypeEnum.CLT_EMAIL).count(), 1)\n\n params = {\n 'email': '[email protected]'\n }\n result = self.test_client.post('/account/profile/update/', data=params)\n\n self.assertEqual(result.status_code, 200)\n data = json.loads(result.data)\n self.assertIsNotNone(data)\n self.assertIn('result', data)\n\n self.assertEqual(\n UserActivationLink.query.filter_by(link_type=ConfirmationLinkTypeEnum.CLT_EMAIL).count(), 1)\n email_confirmation_link = UserActivationLink.query.filter_by(link_type=ConfirmationLinkTypeEnum.CLT_EMAIL).first()\n\n self.assertIsNotNone(email_confirmation_link)\n\n self.assertEqual(email_confirmation_link.new_mobile, None)\n self.assertEqual(email_confirmation_link.new_email, '[email protected]')\n self.assertEqual(email_confirmation_link.use_attempts, 0)\n self.assertEqual(email_confirmation_link.used_date, None)\n\n self.assertEqual(len(self.mailer.mails), 2)\n\n @authorized()\n def test_get_profile(self):\n with self.app.app_context():\n result = self.test_client.get('/account/profile/')\n\n self.assertEqual(result.status_code, 200)\n data = json.loads(result.data)\n self.assertIsNotNone(data)\n self.assertIn('result', data)\n user_obj = data['result']\n\n user = AuthUser.query.filter_by(id=self.user.id).first()\n\n self.assertIn('id', user_obj)\n self.assertIn('email', user_obj)\n self.assertIn('registration_date', user_obj)\n\n self.assertEqual(user_obj['email'], u'' if not user.email else user.email)\n\n @registered_user()\n def test_request_password_recovery_email(self):\n with self.app.app_context():\n self.assertEqual(len(self.mailer.mails), 0)\n params = {\n 'email': '[email protected]'\n }\n result = self.test_client.post('/account/password_recovery/', data=params)\n self.assertEqual(result.status_code, 200)\n data = json.loads(result.data)\n self.assertIsNotNone(data)\n self.assertIn('result', data)\n\n password_change_link = UserActivationLink.query.filter_by(\n auth_user_id=self.user.id,\n link_type=ConfirmationLinkTypeEnum.CLT_PASSWORD,\n used_date=None)\n self.assertIsNotNone(password_change_link)\n self.assertEqual(len(self.mailer.mails), 1)\n\n @registered_user()\n def test_request_password_recovery_mobile(self):\n with self.app.app_context():\n self.assertEqual(len(self.sms_sender.sms), 0)\n params = {\n 'mobile': '+79001112233'\n }\n result = self.test_client.post('/account/password_recovery/', data=params)\n self.assertEqual(result.status_code, 200)\n data = json.loads(result.data)\n self.assertIsNotNone(data)\n self.assertIn('result', data)\n\n password_change_link = UserActivationLink.query.filter_by(\n auth_user_id=self.user.id,\n link_type=ConfirmationLinkTypeEnum.CLT_PASSWORD,\n used_date=None).first()\n self.assertIsNotNone(password_change_link)\n self.assertEqual(len(self.sms_sender.sms), 1)\n\n @registered_user()\n def test_request_password_recovery_max_attempts(self):\n with self.app.app_context():\n self.assertEqual(len(self.mailer.mails), 0)\n params = {\n 'email': '[email protected]'\n }\n # first two attempts will pass\n for i in xrange(2):\n result = self.test_client.post('/account/password_recovery/', data=params)\n self.assertEqual(result.status_code, 200)\n data = json.loads(result.data)\n self.assertIsNotNone(data)\n self.assertIn('result', data)\n # the fird one will fail\n result = self.test_client.post('/account/password_recovery/', data=params)\n self.assertEqual(result.status_code, 200)\n data = json.loads(result.data)\n self.assertIsNotNone(data)\n self.assertIn('error', data)\n\n password_change_link = UserActivationLink.query.filter_by(\n auth_user_id=self.user.id,\n link_type=ConfirmationLinkTypeEnum.CLT_PASSWORD,\n used_date=None).first()\n # FF 2 days\n self.user.last_password_drop_attempts_date = datetime.utcnow() - timedelta(2)\n\n # now it should be ok\n result = self.test_client.post('/account/password_recovery/', data=params)\n self.assertEqual(result.status_code, 200)\n data = json.loads(result.data)\n self.assertIsNotNone(data)\n self.assertIn('result', data)\n\n def test_request_password_recovery_invalid_email(self):\n with self.app.app_context():\n self.assertEqual(len(self.mailer.mails), 0)\n params = {\n 'email': '[email protected]'\n }\n result = self.test_client.post('/account/password_recovery/', data=params)\n self.assertEqual(result.status_code, 200)\n data = json.loads(result.data)\n self.assertIsNotNone(data)\n self.assertIn('error', data)\n self.assertEqual(data['error']['code'], 105)\n\n password_change_link = UserActivationLink.query.filter_by(\n link_type=ConfirmationLinkTypeEnum.CLT_PASSWORD,\n used_date=None).first()\n self.assertIsNone(password_change_link)\n self.assertEqual(len(self.mailer.mails), 0)\n\n @registered_user()\n def test_recover_password(self):\n with self.app.app_context():\n recovery_link = self._make_ua_link(use_chars=True, user_id=self.user.id, link_type=ConfirmationLinkTypeEnum.CLT_PASSWORD)\n sqldb.session.add(recovery_link)\n\n params = {\n 'user_id': self.user.uuid,\n 'code': recovery_link.link_code,\n 'new_password': 'New_password2'\n }\n result = self.test_client.post('/account/password_change/', data=params)\n\n self.assertEqual(result.status_code, 200)\n data = json.loads(result.data)\n self.assertIsNotNone(data)\n self.assertIn('result', data)\n\n recovery_link = UserActivationLink.query.filter_by(id=recovery_link.id).first()\n self.assertIsNotNone(recovery_link)\n self.assertIsNotNone(recovery_link.used_date)\n\n args = {'email': '[email protected]',\n 'password': 'New_password2'}\n\n result = self.test_client.post('/account/login/', data=args)\n self.assertEqual(result.status_code, 200)\n data = json.loads(result.data)\n self.assertIsNotNone(data)\n self.assertIn('result', data)\n\n @authorized()\n def test_recover_password_logged_in(self):\n with self.app.app_context():\n recovery_link = self._make_ua_link(use_chars=True, user_id=self.user.id, link_type=ConfirmationLinkTypeEnum.CLT_PASSWORD)\n sqldb.session.add(recovery_link)\n\n params = {\n 'user_id': self.user.uuid,\n 'code': recovery_link.link_code,\n 'new_password': 'New_password2'\n }\n result = self.test_client.post('/account/password_change/', data=params)\n\n self.assertEqual(result.status_code, 200)\n data = json.loads(result.data)\n self.assertIsNotNone(data)\n self.assertNotIn('error', data)\n\n @registered_user()\n def test_set_new_password(self):\n params = {\n 'user_id': self.user.uuid,\n 'old_password': 'TestPassword123',\n 'new_password': 'New_password2'\n }\n result = self.test_client.post('/account/password_change/', data=params)\n self.assertEqual(result.status_code, 200, str(result.data))\n data = json.loads(result.data)\n self.assertIsNotNone(data)\n self.assertIn('result', data)\n\n args = {'email': '[email protected]',\n 'password': 'New_password2',\n 'client': 'android'}\n\n result = self.test_client.post('/account/login/', data=args)\n\n self.assertEqual(result.status_code, 200)\n data = json.loads(result.data)\n self.assertIsNotNone(data)\n self.assertIn('result', data)\n\n @registered_user()\n def test_set_new_password_with_email(self):\n with self.app.app_context():\n params = {\n 'email': self.user.email,\n 'old_password': 'TestPassword123',\n 'new_password': 'New_password2'\n }\n result = self.test_client.post('/account/password_change/', data=params)\n self.assertEqual(result.status_code, 200)\n data = json.loads(result.data)\n self.assertIsNotNone(data)\n self.assertIn('result', data)\n\n args = {'email': self.user.email,\n 'password': 'New_password2',\n 'client': 'android'}\n\n result = self.test_client.post('/account/login/', data=args)\n\n self.assertEqual(result.status_code, 200)\n data = json.loads(result.data)\n self.assertIsNotNone(data)\n self.assertIn('result', data)\n\n @registered_user()\n def test_set_new_password_invalid_old(self):\n with self.app.app_context():\n params = {\n 'user_id': self.user.uuid,\n 'old_password': 'TestPassword1234',\n 'new_password': 'New_password2'\n }\n result = self.test_client.post('/account/password_change/', data=params)\n self.assertEqual(result.status_code, 200)\n data = json.loads(result.data)\n self.assertIsNotNone(data)\n self.assertIn('error', data)\n self.assertEqual(data['error']['code'], errors.InvalidCurrentPassword.ERROR_CODE)\n\n @registered_user()\n def test_set_new_password_invalid_user_id(self):\n with self.app.app_context():\n params = {\n 'user_id': str(ObjectId()),\n 'old_password': 'TestPassword123',\n 'new_password': 'New_password2'\n }\n result = self.test_client.post('/account/password_change/', data=params)\n self.assertEqual(result.status_code, 200)\n data = json.loads(result.data)\n self.assertIsNotNone(data)\n self.assertIn('error', data)\n self.assertEqual(data['error']['code'], UserNotFound.ERROR_CODE)\n\n @registered_user()\n def test_set_new_password_unauthorized(self):\n with self.app.app_context():\n params = {\n 'user_id': str(ObjectId()),\n 'old_password': 'TestPassword123',\n 'new_password': 'New_password2'\n }\n result = self.test_client.post('/account/password_change/', data=params)\n self.assertEqual(result.status_code, 200)\n data = json.loads(result.data)\n self.assertIsNotNone(data)\n self.assertIn('error', data)\n self.assertEqual(data['error']['code'], UserNotFound.ERROR_CODE)\n\n @registered_user()\n def test_set_new_password_invalid_code(self):\n with self.app.app_context():\n recovery_link = self._make_ua_link(use_chars=True, user_id=self.user.id, link_type=ConfirmationLinkTypeEnum.CLT_PASSWORD)\n sqldb.session.add(recovery_link)\n\n params = {\n 'user_id': self.user.uuid,\n 'code': u'u' * self.config['max_activation_link_length'],\n 'new_password': 'New_password2'\n }\n result = self.test_client.post('/account/password_change/', data=params)\n self.assertEqual(result.status_code, 200)\n data = json.loads(result.data)\n self.assertIsNotNone(data)\n self.assertIn('error', data)\n self.assertEqual(data['error']['code'], ActivationCodeExpiredOrInvalid.ERROR_CODE)\n\n @registered_user()\n def test_set_new_password_invalid_code_max_retries(self):\n with self.app.app_context():\n recovery_link = self._make_ua_link(use_chars=True, user_id=self.user.id, link_type=ConfirmationLinkTypeEnum.CLT_PASSWORD)\n sqldb.session.add(recovery_link)\n\n valid_code = recovery_link.link_code\n\n params = {\n 'user_id': self.user.uuid,\n 'code': u'u' * self.config['max_activation_link_length'],\n 'new_password': 'New_password2'\n }\n for i in xrange(5):\n result = self.test_client.post('/account/password_change/', data=params)\n self.assertEqual(result.status_code, 200)\n data = json.loads(result.data)\n self.assertIsNotNone(data)\n self.assertIn('error', data)\n self.assertEqual(data['error']['code'], ActivationCodeExpiredOrInvalid.ERROR_CODE)\n # now try valid code\n params = {\n 'user_id': self.user.uuid,\n 'code': valid_code,\n 'new_password': 'New_password2'\n }\n result = self.test_client.post('/account/password_change/', data=params)\n self.assertEqual(result.status_code, 200)\n data = json.loads(result.data)\n self.assertIsNotNone(data)\n self.assertIn('error', data)\n self.assertEqual(data['error']['code'], ActivationCodeExpiredOrInvalid.ERROR_CODE)\n\n @registered_user()\n def _test_get_user_data_for_password_recovery_request_handler(self):\n recovery_link = self._make_ua_link(use_chars=True, user_id=self.user.id, link_type=ConfirmationLinkTypeEnum.CLT_PASSWORD)\n sqldb.session.add(recovery_link)\n\n result = self.test_client.get('/account/by/code/?user_id=%s&code=%s' % (self.user.id, recovery_link.link_code))\n self.assertEqual(result.status_code, 200)\n\n data = json.loads(result.data)\n self.assertIsNotNone(data)\n self.assertIn('result', data)\n user_json = data['result']\n self.assertEqual(ObjectId(user_json['id']), self.user.uuid)\n\n @registered_user()\n def test_get_user_data_for_password_recovery_request_handler_invalid_code(self):\n with self.app.app_context():\n recovery_link = self._make_ua_link(use_chars=True, user_id=self.user.id, link_type=ConfirmationLinkTypeEnum.CLT_PASSWORD)\n sqldb.session.add(recovery_link)\n\n result = self.test_client.get('/account/by/code/?user_id=%s&code=invalid' % self.user.id)\n\n self.assertEqual(result.status_code, 400)\n data = json.loads(result.data)\n self.assertIsNotNone(data)\n self.assertIn('error', data)\n error_json = data['error']\n self.assertEqual(error_json['code'], 5)\n\n @registered_user()\n def test_get_user_data_for_password_recovery_request_handler_invalid_user_and_code(self):\n with self.app.app_context():\n recovery_link = self._make_ua_link(use_chars=True, user_id=self.user.id, link_type=ConfirmationLinkTypeEnum.CLT_PASSWORD)\n sqldb.session.add(recovery_link)\n\n result = self.test_client.get('/account/by/code/?user_id=%s&code=1234' % ObjectId())\n\n self.assertEqual(result.status_code, 200)\n data = json.loads(result.data)\n self.assertIsNotNone(data)\n self.assertIn('error', data)\n error_json = data['error']\n self.assertEqual(error_json['code'], 108)\n\n @registered_user()\n def test_get_user_data_for_password_recovery_request_handler_invalid_user(self):\n with self.app.app_context():\n recovery_link = self._make_ua_link(use_chars=True, user_id=self.user.id, link_type=ConfirmationLinkTypeEnum.CLT_PASSWORD)\n sqldb.session.add(recovery_link)\n\n result = self.test_client.get(\n '/account/by/code/?user_id=%s&code=%s' % (ObjectId(), recovery_link.link_code))\n\n self.assertEqual(result.status_code, 200)\n data = json.loads(result.data)\n self.assertIsNotNone(data)\n self.assertIn('error', data)\n error_json = data['error']\n self.assertEqual(error_json['code'], 108)\n\n @registered_user()\n def test_get_user_data_for_password_recovery_request_handler_used_code(self):\n with self.app.app_context():\n recovery_link = self._make_ua_link(use_chars=True, user_id=self.user.id, link_type=ConfirmationLinkTypeEnum.CLT_PASSWORD)\n recovery_link.used_date = datetime.now()\n sqldb.session.add(recovery_link)\n\n result = self.test_client.get(\n '/account/by/code/?user_id=%s&code=%s' % (self.user.id, recovery_link.link_code))\n\n self.assertEqual(result.status_code, 200)\n data = json.loads(result.data)\n self.assertIsNotNone(data)\n self.assertIn('error', data)\n error_json = data['error']\n self.assertEqual(error_json['code'], 108)\n\n @authorized(is_temporal=True)\n def test_keep_account_data_after_login_temporary_user(self):\n\n data = {\n 'name': u\"Нейм\",\n 'surname': u\"Сёрнейм\",\n 'inn': \"781108730780\",\n 'phone': \"+79110010203\",\n 'email': \"[email protected]\",\n '_owner': self.user\n }\n person = PrivatePersonDbObject(**data)\n sqldb.session.add(person)\n sqldb.session.commit()\n\n new_company = CompanyDbObject(**dict({\n \"_owner\": self.user,\n \"ogrn\": \"1234567890123\",\n \"inn\": \"781108730780\",\n \"full_name\": u\"Протон\",\n \"short_name\": u\"Про\",\n \"kpp\": \"999999999\",\n \"company_type\": CompanyTypeEnum.CT_RUSSIAN,\n \"general_manager\": {\n '_id': person.id,\n 'type': 'person'\n }\n }))\n\n sqldb.session.add(new_company)\n sqldb.session.commit()\n new_company_id = new_company.id\n\n data = {\n u\"full_name\": u\"образовательное учреждение дополнительного образования детей специализированная детско-юношеская спортивная школа олимпийского резерва по боксу\",\n u\"doc_date\": datetime.now(),\n u\"address\": {\n \"region\": RFRegionsEnum.RFR_SPB,\n \"index\": 123131,\n \"street_type\": StreetTypeEnum.STT_STREET,\n \"street\": u\"Седова\",\n \"house_type\": HouseTypeEnum.HOT_HOUSE,\n \"house\": \"2\",\n \"flat_type\": FlatTypeEnum.FLT_OFFICE,\n \"flat\": \"2\",\n \"ifns\": 1234\n },\n u\"selected_secretary\": {\n \"type\": \"company\",\n \"_id\": new_company.id\n },\n }\n new_batch_db_object = DocumentBatchDbObject(batch_type=DocumentBatchTypeEnum.DBT_NEW_LLC,\n status=BatchStatusEnum.BS_NEW, _owner=self.user)\n sqldb.session.add(new_batch_db_object)\n\n new_doc = BatchDocumentDbObject(\n _owner = self.user,\n document_type = DocumentTypeEnum.DT_GARANT_LETTER_SUBARENDA,\n batch = new_batch_db_object,\n data = data,\n caption = \"Caption\"\n )\n sqldb.session.add(new_doc)\n sqldb.session.commit()\n\n new_booking = IfnsBooking.db_model(batch_id=new_batch_db_object.id,\n service_id=IfnsServiceEnum.IS_REG_COMPANY,\n id=ObjectId())\n new_booking_id = new_booking.insert(self.db)\n\n test_notarius = NotariusObject(**{\n \"id\": \"abc\",\n \"name\": u\"Петр\",\n \"surname\": u\"Мандельштейн\",\n \"title\": u\"Нотариус №1\",\n \"schedule\": {\n \"type\": \"cyclic\",\n \"start_working_day\": \"2014-08-20\",\n \"working_days_count\": 1,\n \"weekends_count\": 2,\n \"start_time\": \"10:00\",\n \"end_time\": \"13:00\"\n },\n \"address\": {\n \"index\": 199000,\n \"street_type\": u\"пр-кт\",\n \"street\": u\"Народного Ополчения\",\n \"house_type\": u\"д\",\n \"house\": \"15\"\n },\n \"region\": u\"Санкт-Петербург\"\n })\n sqldb.session.add(test_notarius)\n sqldb.session.commit()\n\n notarius_booking = NotariusBookingObject(batch=new_batch_db_object,\n owner=self.user,\n notarius=test_notarius,\n dt=datetime.now(),\n address=\"here\")\n sqldb.session.add(notarius_booking)\n sqldb.session.commit()\n notarius_booking_id = notarius_booking.id\n\n ybc = YuristBatchCheck.db_model(batch_id=new_batch_db_object.id,\n create_date=datetime.now(),\n typos_correction=False,\n status=YuristBatchCheckStatus.YBS_NEW)\n\n ybc_id = ybc.insert(self.db)\n\n args = {\n 'name': u'Станислав',\n 'email': '[email protected]',\n 'password': 'TestPassword123'\n }\n result = self.test_client.post('/account/create/', data=args)\n\n self.assertEqual(result.status_code, 200)\n data = json.loads(result.data)\n self.assertIsNotNone(data)\n self.assertIn('result', data)\n uid = data['result']['id']\n self.assertEqual(data['result']['email'], '[email protected]')\n user = AuthUser.query.filter_by(uuid=uid).first()\n self.assertIsNotNone(user)\n self.assertEqual(user.id, self.user.id)\n\n # profile = UserProfile.find_one(current_app.db, {'auth_user_id': ObjectId(uid)})\n # self.assertEqual(profile._id, self.user_profile._id)\n\n self.assertEqual(AuthUser.query.count(), 1)\n\n ifns_booking = IfnsBooking.db_model.find_one(self.db, {'_id': new_booking_id})\n self.assertEqual(ifns_booking.batch_id, new_batch_db_object.id)\n\n notarius_booking = NotariusBookingObject.query.filter_by(\n id=notarius_booking_id\n ).scalar()\n self.assertEqual(notarius_booking.owner_id, self.user.id)\n\n ybc = YuristBatchCheck.db_model.find_one(self.db, {'_id': ybc_id})\n self.assertEqual(ybc.batch_id, new_batch_db_object.id)\n\n batch = DocumentBatchDbObject.query.filter_by(id=new_batch_db_object.id).first()\n self.assertEqual(batch._owner_id, self.user.id)\n\n company = CompanyDbObject.query.filter_by(id=new_company_id).first()\n self.assertEqual(company._owner_id, self.user.id)\n self.assertEqual(company.general_manager['_id'], person.id)\n\n person = PrivatePersonDbObject.query.filter_by(id=person.id).first()\n self.assertEqual(person._owner_id, self.user.id)\n\n @authorized(is_temporal=True)\n def test_merge_account_data_after_login_temporary_user(self):\n new_user = AuthUser(password=encrypt_password('TestPassword123'),\n email='[email protected]',\n enabled=True,\n email_confirmed=True,\n mobile=\"+79001112233\",\n mobile_confirmed=True)\n sqldb.session.add(new_user)\n\n data = {\n 'name': u\"Нейм\",\n 'surname': u\"Сёрнейм\",\n 'inn': \"781108730780\",\n 'phone': \"+79110010203\",\n 'email': \"[email protected]\",\n '_owner': self.user\n }\n person = PrivatePersonDbObject(**data)\n sqldb.session.add(person)\n sqldb.session.commit()\n person_id = person.id\n\n new_company = CompanyDbObject(**dict({\n \"_owner\": self.user,\n \"ogrn\": \"1234567890123\",\n \"inn\": \"781108730780\",\n \"full_name\": u\"Протон\",\n \"short_name\": u\"Про\",\n \"kpp\": \"999999999\",\n \"company_type\": CompanyTypeEnum.CT_RUSSIAN,\n \"general_manager\": {\n '_id': person.id,\n 'type': 'person'\n }\n }))\n\n sqldb.session.add(new_company)\n sqldb.session.commit()\n new_company_id = new_company.id\n\n data = {\n u\"full_name\": u\"образовательное учреждение дополнительного образования детей специализированная детско-юношеская спортивная школа олимпийского резерва по боксу\",\n u\"doc_date\": datetime.now(),\n u\"address\": {\n \"region\": RFRegionsEnum.RFR_SPB,\n \"index\": 123131,\n \"street_type\": StreetTypeEnum.STT_STREET,\n \"street\": u\"Седова\",\n \"house_type\": HouseTypeEnum.HOT_HOUSE,\n \"house\": \"2\",\n \"flat_type\": FlatTypeEnum.FLT_OFFICE,\n \"flat\": \"2\",\n \"ifns\": 1234\n },\n u\"selected_secretary\": {\n \"type\": \"company\",\n \"_id\": new_company_id\n },\n }\n new_batch_db_object = DocumentBatchDbObject(batch_type=DocumentBatchTypeEnum.DBT_NEW_LLC,\n status=BatchStatusEnum.BS_NEW, _owner=self.user)\n sqldb.session.add(new_batch_db_object)\n\n new_doc = BatchDocumentDbObject(\n _owner = self.user,\n document_type = DocumentTypeEnum.DT_GARANT_LETTER_SUBARENDA,\n batch = new_batch_db_object,\n data = data,\n caption = \"Caption\"\n )\n sqldb.session.add(new_doc)\n\n sqldb.session.commit()\n\n new_booking = IfnsBooking.db_model(batch_id=new_batch_db_object.id,\n service_id=IfnsServiceEnum.IS_REG_COMPANY,\n id=ObjectId())\n new_booking_id = new_booking.insert(self.db)\n\n notarius_booking = NotariusBooking.db_model(batch_id=new_batch_db_object.id,\n _owner=self.user.id,\n notarius_id=ObjectId(),\n dt=datetime.now(),\n address=\"here\")\n notarius_booking_id = notarius_booking.insert(self.db)\n\n ybc = YuristBatchCheck.db_model(batch_id=new_batch_db_object.id,\n create_date=datetime.now(),\n typos_correction=False,\n status=YuristBatchCheckStatus.YBS_NEW)\n\n ybc_id = ybc.insert(self.db)\n\n rsa_sid = self.test_client.cookie_jar._cookies['localhost.local']['/']['rsa_sid']\n print(rsa_sid.value)\n val = Session.query.get(rsa_sid.value)\n print(pickle.loads(val.data))\n\n args = {\n 'email': '[email protected]',\n 'password': 'TestPassword123'\n }\n #rsa_sid.value = \"ybf19fa423c0ed51cbc4c74c0b6564f92d\"\n result = self.test_client.post('/account/login/', data=args)\n\n rsa_sid = self.test_client.cookie_jar._cookies['localhost.local']['/']['rsa_sid']\n print(rsa_sid)\n val = Session.query.get(rsa_sid.value)\n print(pickle.loads(val.data))\n\n result = self.test_client.post('/account/login/temporal/')\n self.assertEqual(result.status_code, 200)\n\n print(rsa_sid)\n val = Session.query.get(rsa_sid.value)\n print(pickle.loads(val.data))\n\n self.assertEqual(result.status_code, 200)\n data = json.loads(result.data)\n self.assertIsNotNone(data)\n\n self.assertEqual(AuthUser.query.count(), 1)\n\n ifns_booking = IfnsBooking.db_model.find_one(self.db, {'_id': new_booking_id})\n self.assertEqual(ifns_booking.batch_id, new_batch_db_object.id)\n\n notarius_booking = NotariusBooking.db_model.find_one(self.db, {'_id': notarius_booking_id})\n self.assertEqual(notarius_booking._owner, new_user.id)\n\n ybc = YuristBatchCheck.db_model.find_one(self.db, {'_id': ybc_id})\n self.assertEqual(ybc.batch_id, new_batch_db_object.id)\n\n batch = DocumentBatchDbObject.query.filter_by(id=new_batch_db_object.id).first()\n self.assertEqual(batch._owner_id, new_user.id)\n\n company = CompanyDbObject.query.filter_by(id=new_company_id).first()\n self.assertEqual(company._owner_id, new_user.id)\n self.assertEqual(company.general_manager['_id'], person.id)\n\n person = PrivatePersonDbObject.query.filter_by(id=person_id).first()\n self.assertEqual(person._owner_id, new_user.id)\n\n @registered_user()\n def test_create_authorization_url(self):\n auth_url_object = UserManager.make_auth_url(\n url=\"http://test/url\",\n owner=self.user,\n expiration_td=timedelta(seconds=3600 * 24 * 7),\n )\n self.assertEqual(AuthorizationUrl.query.count(), 1)\n self.assertEqual(auth_url_object.url, 'http://test/url')\n self.assertLessEqual((datetime.now() - auth_url_object.created).total_seconds(), 1)\n exp = auth_url_object.created + timedelta(seconds=3600 * 24 * 7)\n self.assertLessEqual((exp - auth_url_object.expire_at).total_seconds(), 1)\n self.assertEqual(auth_url_object.used_times, 0)\n self.assertEqual(auth_url_object.owner_id, self.user.id)\n\n @registered_user()\n def test_authorize_through_url(self):\n auth_url_object = UserManager.make_auth_url(\n url=\"http://test/url\",\n owner=self.user,\n expiration_td=timedelta(seconds=3600 * 24 * 7)\n )\n\n response = self.test_client.get(auth_url_object.get_url(self.config))\n self.assertEqual(response.status_code, 302)\n self.assertEqual(response.headers['Location'], \"http://test/url\")\n self.assertIn('rsa_sid', response.headers['Set-Cookie'])\n\n self.assertEqual(auth_url_object.used_times, 1)\n\n @registered_user()\n def test_authorize_through_expired_url(self):\n auth_url_object = UserManager.make_auth_url(\n url=\"http://test/url\",\n owner=self.user,\n expiration_td=timedelta(seconds=1)\n )\n\n auth_url_object.expire_at = datetime.utcnow() - timedelta(days=1)\n sqldb.session.commit()\n\n response = self.test_client.get(auth_url_object.get_url(self.config))\n self.assertEqual(response.status_code, 302)\n self.assertNotIn('Set-Cookie', response.headers)\n\n self.assertEqual(auth_url_object.used_times, 1)\n\n @registered_user()\n def test_authorize_through_url_several_times(self):\n auth_url_object = UserManager.make_auth_url(\n url=\"http://test/url\",\n owner=self.user,\n expiration_td=timedelta(seconds=3600 * 24 * 7)\n )\n\n response = self.test_client.get(auth_url_object.get_url(self.config))\n self.assertEqual(response.status_code, 302)\n self.assertEqual(response.headers['Location'], \"http://test/url\")\n self.assertIn('rsa_sid', response.headers['Set-Cookie'])\n\n self.assertEqual(auth_url_object.used_times, 1)\n\n response = self.test_client.get(auth_url_object.get_url(self.config))\n self.assertEqual(response.status_code, 302)\n self.assertEqual(response.headers['Location'], \"http://test/url\")\n self.assertIn('rsa_sid', response.headers['Set-Cookie'])\n\n self.assertEqual(auth_url_object.used_times, 2)\n\n response = self.test_client.get(auth_url_object.get_url(self.config))\n self.assertEqual(response.status_code, 302)\n self.assertEqual(response.headers['Location'], \"http://test/url\")\n self.assertIn('rsa_sid', response.headers['Set-Cookie'])\n\n self.assertEqual(auth_url_object.used_times, 3)\n\n @registered_user()\n def test_authorize_through_missing_url(self):\n auth_url_object = UserManager.make_auth_url(\n url=\"http://test/url\",\n owner=self.user,\n expiration_td=timedelta(seconds=1)\n )\n\n auth_url_object.expire_at = datetime.utcnow() - timedelta(days=1)\n sqldb.session.commit()\n\n url = auth_url_object.get_url(self.config)\n AuthorizationUrl.query.filter_by(id=auth_url_object.id).delete()\n sqldb.session.commit()\n response = self.test_client.get(url)\n self.assertEqual(response.status_code, 404)\n self.assertNotIn('Set-Cookie', response.headers)\n\n @authorized()\n def test_authorize_through_url_while_authorized(self):\n auth_url_object = UserManager.make_auth_url(\n url=\"http://test/url\",\n owner=self.user,\n expiration_td=timedelta(seconds=3600 * 24 * 7)\n )\n\n response = self.test_client.get(auth_url_object.get_url(self.config))\n self.assertEqual(response.status_code, 302)\n self.assertEqual(response.headers['Location'], \"http://test/url\")\n self.assertIn('rsa_sid', response.headers['Set-Cookie'])\n\n self.assertEqual(auth_url_object.used_times, 1)\n" }, { "alpha_fraction": 0.6759269833564758, "alphanum_fraction": 0.6806893944740295, "avg_line_length": 71.8843002319336, "blob_id": "1c08ff139b08833e10d57aecff54d32bbbb3a1d4", "content_id": "96e663a43c3a28530b10b5f4f0dbce0b7c20546a", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 8819, "license_type": "no_license", "max_line_length": 149, "num_lines": 121, "path": "/app/jb_config.py", "repo_name": "StanislavKraev/jb_code", "src_encoding": "UTF-8", "text": "# -*- coding: utf-8 -*-\nfrom fw.settings import Configuration\n\n\nclass JBConfiguration(Configuration):\n def __init__(self, service_description, default_config_path, **kwargs):\n super(JBConfiguration, self).__init__(service_description, default_config_path)\n self.init_general_settings()\n self.init_web_server_settings()\n self.init_db_settings()\n self.init_mail_settings()\n self.init_sms_settings()\n self.init_social_settings()\n\n def init_general_settings(self):\n self.settings['MEMCACHED_HOST'] = self.get_from_config('GENERAL:MEMCACHED_HOST')\n self.settings['log_file_path'] = self.get_from_config('GENERAL:LOG_FILE_PATH', '/var/log/jb/jb.log')\n self.settings['bind_addr'] = self.get_from_config('GENERAL:BIND_ADDR', '/var/run/jb/app.sock')\n\n self.settings['DEBUG'] = self.get_from_config('GENERAL:DEBUG', 'False') == 'True'\n self.settings['STAGING'] = self.get_from_config('GENERAL:STAGING', 'False') == 'True'\n self.settings['TEST'] = self.get_from_config('GENERAL:TEST', 'False') == 'True'\n\n self.settings['PROD'] = not self.settings['DEBUG'] and not self.settings['STAGING'] and not self.settings['TEST']\n self.settings['LOG_LEVEL'] = self.LEVEL_NAME_VALUE_DICT[self.get_from_config('GENERAL:FILE_LOGGING_LEVEL', 'ERROR')]\n self.settings['CELERY_LOG_LEVEL'] = self.LEVEL_NAME_VALUE_DICT[self.get_from_config('GENERAL:CELERY_LOG_LEVEL', 'WARN')]\n self.settings['resources_path'] = self.get_from_config('GENERAL:RESOURCES_PATH')\n self.settings['PDF_BUILDER_PATH'] = self.get_from_config('GENERAL:PDF_BUILDER_PATH')\n self.settings['PDFTK_PATH'] = self.get_from_config('GENERAL:PDFTK_PATH')\n self.settings['DOCUMENT_STORAGE'] = self.get_from_config('GENERAL:DOCUMENT_STORAGE')\n self.settings['PRIVATE_STORAGE'] = self.get_from_config('GENERAL:PRIVATE_STORAGE')\n yurist_email_list = self.get_from_config('GENERAL:YURIST_EMAIL_LIST')\n yurist_email_list = [item.strip() for item in yurist_email_list.split(',') if item]\n self.settings['YURIST_EMAIL_LIST'] = yurist_email_list\n notarius_email_list = self.get_from_config('GENERAL:NOTARIUS_EMAIL_LIST')\n notarius_email_list = [item.strip() for item in notarius_email_list.split(',') if item]\n self.settings['NOTARIUS_EMAIL_LIST'] = notarius_email_list\n self.settings['CELERY_CONFIG_MODULE'] = self.get_from_config('GENERAL:CELERY_CONFIG_MODULE')\n self.settings['pdf_preview_watermark'] = self.get_from_config('GENERAL:PDF_PREVIEW_WATERMARK')\n self.settings['PDF_STAMPER_PATH'] = self.get_from_config('GENERAL:PDF_STAMPER_PATH')\n self.settings['celery_tasks_dir'] = self.get_from_config('GENERAL:CELERY_TASKS_DIR')\n self.settings['service_name'] = self.get_from_config('GENERAL:SERVICE_NAME')\n self.settings['ifns_admin_email'] = self.get_from_config('GENERAL:IFNS_ADMIN_EMAIL')\n self.settings['SITE_ROOT'] = self.get_from_config('GENERAL:SITE_ROOT')\n admin_email_list = self.get_from_config('GENERAL:ADMIN_EMAIL_LIST')\n admin_email_list = [item.strip() for item in admin_email_list.split(',') if item]\n self.settings['ADMIN_EMAIL_LIST'] = admin_email_list\n self.settings['RAISE_RIGHT_OFF'] = self.get_from_config('GENERAL:RAISE_RIGHT_OFF', 'False') == 'True'\n\n self.settings['YAD_ESHOP_PASSWORD'] = self.get_from_config('GENERAL:YAD_ESHOP_PASSWORD')\n yad_ip_list = self.get_from_config('GENERAL:YAD_IP_LIST')\n yad_ip_list = [item.strip() for item in yad_ip_list.split(',') if item]\n self.settings['YAD_IP_LIST'] = yad_ip_list\n\n self.settings['SEND_DOCS_TO_YURIST_DELAY_SECONDS'] = self.get_int_from_config('GENERAL:SEND_DOCS_TO_YURIST_DELAY_SECONDS', 60 * 60 * 2)\n self.settings['NOT_PAID_BATCH_NOTIFY_TIMEOUT_SECONDS'] = self.get_int_from_config('GENERAL:NOT_PAID_BATCH_NOTIFY_TIMEOUT_SECONDS', 3600 * 24)\n self.settings['NOT_PAID_BATCH_NOTIFY_DESIRED_TIME'] = self.get_from_config('GENERAL:NOT_PAID_BATCH_NOTIFY_DESIRED_TIME', '')\n\n self.settings['RUSSIAN_POST_API_LOGIN'] = self.get_from_config('GENERAL:RUSSIAN_POST_API_LOGIN')\n self.settings['RUSSIAN_POST_API_PASSWORD'] = self.get_from_config('GENERAL:RUSSIAN_POST_API_PASSWORD')\n\n def init_web_server_settings(self):\n self.settings['secret_key'] = self.get_from_config('WEB_SERVER:SECRET_KEY')\n self.settings['cookie_name'] = self.get_from_config('WEB_SERVER:SESSION_COOKIE_NAME')\n self.settings['SESSION_COOKIE_NAME'] = self.settings['cookie_name']\n self.settings['auth_session_lifetime'] = self.get_int_from_config('WEB_SERVER:PERMANENT_SESSION_LIFETIME', 86400)\n self.settings['PERMANENT_SESSION_LIFETIME'] = self.settings['auth_session_lifetime']\n\n self.settings['domain'] = self.get_from_config('WEB_SERVER:DOMAIN')\n self.settings['site_domain'] = self.settings['domain']\n self.settings['DOMAIN'] = self.get_from_config('WEB_SERVER:DOMAIN')\n self.settings['api_url'] = self.get_from_config('WEB_SERVER:API_URL')\n self.settings['UPLOAD_FOLDER'] = self.get_from_config('WEB_SERVER:UPLOAD_FOLDER')\n self.settings['STORAGE_URL'] = self.get_from_config('WEB_SERVER:STORAGE_URL')\n self.settings['socks_version'] = self.get_from_config('WEB_SERVER:PROXY_SOCKS_VERSION', u\"5\")\n\n self.settings['max_activation_link_length'] = self.get_int_from_config('API:MAX_ACTIVATION_LINK_LENGTH', 20)\n self.settings['digital_activation_link_length'] = self.get_int_from_config('API:DIGITAL_ACTIVATION_LINK_LENGTH', 4)\n self.settings['digital_activation_code_timeout'] = self.get_int_from_config('API:DIGITAL_ACTIVATION_CODE_TIMEOUT', 900)\n self.settings['email_activation_code_timeout'] = self.get_int_from_config('API:EMAIL_ACTIVATION_CODE_TIMEOUT', 86400)\n self.settings['max_activation_attempts_count'] = self.get_int_from_config('API:MAX_ACTIVATION_ATTEMPTS_COUNT', 3)\n\n self.settings['user_by_code_tries_count'] = self.get_int_from_config('API:USER_BY_CODE_TRIES_COUNT', 5)\n\n self.settings['WEB_SCHEMA'] = self.get_from_config('WEB_SERVER:WEB_SCHEMA', u'http')\n\n self.settings['SERVICE_NALOG_RU_URL'] = self.get_from_config('WEB_SERVER:SERVICE_NALOG_RU_URL', u\"https://service.nalog.ru\")\n self.settings['ORDER_NALOG_RU_URL'] = self.get_from_config('WEB_SERVER:ORDER_NALOG_RU_URL', u\"http://order.nalog.ru\")\n\n self.settings['MAX_CONTENT_LENGTH'] = self.get_int_from_config('API:MAX_CONTENT_LENGTH', 20 * 1024 * 1024)\n\n def init_db_settings(self):\n self.settings['db_user_name'] = self.get_from_config('DB:USER_NAME')\n self.settings['db_user_password'] = self.get_from_config('DB:PASSWORD')\n self.settings['db_host'] = self.get_from_config('DB:HOST')\n self.settings['db_name'] = self.get_from_config('DB:NAME')\n self.settings['SQLALCHEMY_DATABASE_URI'] = self.get_from_config('DB:POSTGRES')\n\n def init_mail_settings(self):\n self.settings['mailer_server'] = self.get_from_config('MAIL:SERVER')\n self.settings['mailer_smtp_user'] = self.get_from_config('MAIL:SMTP_USER')\n self.settings['mailer_reply_to'] = self.get_from_config('MAIL:REPLY_TO', self.settings['mailer_smtp_user']).decode('utf-8')\n self.settings['mailer_smtp_password'] = self.get_from_config('MAIL:SMTP_PASSWORD')\n\n def init_sms_settings(self):\n self.settings['sms_gate_address'] = self.get_from_config('SMS_GATE:SMS_GATE_ADDRESS')\n self.settings['sms_gate_user'] = self.get_from_config('SMS_GATE:SMS_GATE_USER')\n self.settings['sms_gate_password'] = self.get_from_config('SMS_GATE:SMS_GATE_PASSWORD')\n self.settings['sms_gate_sender'] = self.get_from_config('SMS_GATE:SMS_GATE_SENDER')\n\n def init_social_settings(self):\n self.settings['vk_api_version'] = self.get_from_config('SOCIAL_NETWORKS:VK_API_VERSION')\n self.settings['vk_app_id'] = self.get_int_from_config('SOCIAL_NETWORKS:VK_APP_ID')\n self.settings['vk_app_secret'] = self.get_from_config('SOCIAL_NETWORKS:VK_APP_SECRET')\n self.settings['vk_app_permissions'] = self.get_int_from_config('SOCIAL_NETWORKS:VK_APP_PERMISSIONS')\n self.settings['vk_auth_redirect_url'] = self.get_from_config('SOCIAL_NETWORKS:VK_AUTH_REDIRECT_URL')\n\n self.settings['facebook_app_id'] = self.get_int_from_config('SOCIAL_NETWORKS:FACEBOOK_APP_ID')\n self.settings['facebook_app_secret'] = self.get_from_config('SOCIAL_NETWORKS:FACEBOOK_APP_SECRET')\n self.settings['facebook_app_permissions'] = self.get_from_config('SOCIAL_NETWORKS:FACEBOOK_APP_PERMISSIONS')\n self.settings['facebook_auth_redirect_url'] = self.get_from_config('SOCIAL_NETWORKS:FACEBOOK_AUTH_REDIRECT_URL')\n" }, { "alpha_fraction": 0.5993175506591797, "alphanum_fraction": 0.6031653881072998, "avg_line_length": 36.73698806762695, "blob_id": "68b3e51321640e2f5dfcdc69703198be551525c8", "content_id": "3e5c52b5651e24647d45f65040bb0e5a20ccacd5", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 13774, "license_type": "no_license", "max_line_length": 203, "num_lines": 365, "path": "/app/fw/transport/mail.py", "repo_name": "StanislavKraev/jb_code", "src_encoding": "UTF-8", "text": "# -*- coding: utf-8 -*-\nimport smtplib\nimport codecs\nfrom email.utils import formataddr\nimport os\nimport shlex\nimport subprocess\nimport tempfile\n\nfrom flask.templating import render_template\nfrom dateutil.parser import parse as parse_date_time\nimport imaplib\nimport logging\nimport poplib\nimport uuid\n\nimport email\nfrom email.header import decode_header, Header\nfrom common_utils import is32bit\n\n\nclass EmailMessage(object):\n def __init__(self, content, mail_id, body, from_hdr, to_hdr, date_hdr, subject, attachments):\n self.content = content\n self.mail_id = unicode(mail_id)\n self.body = unicode(body)\n self.from_hdr = unicode(from_hdr)\n self.to_hdr = unicode(to_hdr)\n self.date_hdr = date_hdr\n self.subject = unicode(subject)\n self.attachments = attachments\n\n def save_to_filename(self, filename):\n with open(filename, 'wb') as f:\n f.write(self.content)\n\n\nclass ReceiptMessage(EmailMessage):\n def __init__(self, mail_id, original_mail_id, content, body, from_hdr, to_hdr, date_hdr, subject, attachments):\n super(ReceiptMessage, self).__init__(content, mail_id, body, from_hdr, to_hdr, date_hdr, subject, attachments)\n self.original_mail_id = original_mail_id\n\n\nclass ImapReader(object):\n def __init__(self, domain, port, ssl, user, password):\n self.domain = domain\n self.port = port\n self.ssl = ssl\n self.user = user\n self.password = password\n\n self._client = None\n\n def read(self, **kwargs):\n if not self._client:\n # internet connection\n self._client = imaplib.IMAP4(host=self.domain, port=self.port) if not self.ssl else imaplib.IMAP4_SSL(host=self.domain, port=self.port)\n self._client.login(self.user, self.password)\n self._client.select()\n\n typ, data = self._client.search(None, '(ALL)')\n\n for num in data[0].split():\n typ, data = self._client.fetch(num, '(BODY.PEEK[])')\n\n result = get_decoded_email_body(data[0][1])\n if not result:\n continue\n\n if result[0] == 'receipt':\n mail_id, original_mail_id, content, body, from_hdr, to_hdr, date_hdr, subject, attachments = result[1:]\n yield ReceiptMessage(mail_id, original_mail_id, content, body, from_hdr, to_hdr, date_hdr, subject, attachments)\n else:\n content, mail_id, body, from_hdr, to_hdr, date_hdr, subject, attachments = result\n yield EmailMessage(content, mail_id, body, from_hdr, to_hdr, date_hdr, subject, attachments)\n\n self._client.close()\n self._client.logout()\n self._client = None\n\n\nclass Pop3Reader(object):\n def __init__(self, domain, port, ssl, user, password, delete_on_receive, proxy):\n self.domain = domain\n self.port = port\n self.ssl = ssl\n self.user = user\n self.password = password\n self.delete_on_receive = delete_on_receive\n\n self._client = None\n self.proxy = proxy\n\n def read(self, tester = None):\n with self.proxy:\n if not self._client:\n # internet connection\n self._client = poplib.POP3(host=self.domain, port=self.port) if not self.ssl else poplib.POP3_SSL(host=self.domain, port=self.port)\n self._client.user(self.user)\n self._client.pass_(self.password)\n\n numMessages, msgBytes = self._client.stat()\n for i in range(numMessages):\n if tester is not None:\n hdr, message, octets = self._client.top(i + 1, 0)\n result = get_decoded_email_body('\\r\\n'.join(message))\n if not result:\n continue\n mail_id = result[1]\n if not tester(mail_id):\n continue\n\n hdr, message, octets = self._client.retr(i + 1)\n result = get_decoded_email_body('\\r\\n'.join(message))\n if not result:\n continue\n if self.delete_on_receive:\n self._client.dele(i + 1)\n if result[0] == 'receipt':\n mail_id, original_mail_id, content, body, from_hdr, to_hdr, date_hdr, subject, attachments = result[1:]\n logging.info(u\"Got an email read receipt: mail_id:%s, original_mail_id:%s, from_hdr:%s, date_hdr:%s\" % (unicode(mail_id), unicode(original_mail_id), unicode(from_hdr), unicode(date_hdr)))\n yield ReceiptMessage(mail_id, original_mail_id, content, body, from_hdr, to_hdr, date_hdr, subject, attachments)\n else:\n content, mail_id, body, from_hdr, to_hdr, date_hdr, subject, attachments = result\n logging.info(u\"Got an email: mail_id:%s, from_hdr:%s, date_hdr:%s\" % (unicode(mail_id), unicode(from_hdr), unicode(date_hdr)))\n yield EmailMessage(content, mail_id, body, from_hdr, to_hdr, date_hdr, subject, attachments)\n\n self._client.quit()\n self._client = None\n\n\ndef get_decoded_header(header, try_charset = None):\n\n result_list = []\n items = decode_header(header)\n for subject, encoding in items:\n\n if encoding is None:\n try:\n unicode(subject)\n result_list.append(subject)\n except Exception:\n if try_charset:\n result_list.append(subject.decode(try_charset))\n result_list.append(subject)\n else:\n result_list.append(subject.decode(encoding))\n return u\" \".join(result_list)\n\nclass EmailAttachment(object):\n\n def __init__(self, filename, content):\n self.filename = filename\n self.content = content\n\ndef get_email_attachment(message_part):\n charset = message_part.get_content_charset()\n content_type = message_part.get_content_type()\n filename = message_part.get_filename('')\n\n content_type, subtype = content_type.split('/')\n\n try:\n if content_type == 'text':\n if not filename:\n logging.warn(u'Skipping attachment without name')\n return\n if not charset:\n return EmailAttachment(filename, message_part.get_payload(decode=True))\n return EmailAttachment(filename, unicode(message_part.get_payload(decode=True), str(charset), \"ignore\"))\n if content_type in ('application', 'audio', 'image', 'video'):\n if not filename:\n logging.warn(u'Skipping attachment without name')\n return\n return EmailAttachment(filename, message_part.get_payload(decode=True))\n if content_type in ('multipart', 'message'):\n filename = unicode(uuid.uuid1()) + '.eml'\n return EmailAttachment(filename, message_part.as_string(unixfrom=True))\n except Exception:\n logging.exception(u\"Failed to get mail attachment\")\n\ndef parse_simple_email(msg):\n text = None\n html = None\n\n for part in msg.get_payload():\n if part.get_content_charset() is None:\n continue\n\n charset = part.get_content_charset()\n content_type = part.get_content_type()\n\n if content_type == 'text/plain':\n text = unicode(part.get_payload(decode=True), str(charset), \"ignore\")\n\n if content_type == 'text/html':\n html = unicode(part.get_payload(decode=True), str(charset), \"ignore\")\n\n return text, html\n\ndef parse_mixed_email(msg):\n text = \"\"\n html = \"\"\n attachments = []\n\n for part in msg.get_payload():\n content_disposition = (part.get('Content-Disposition') or '').strip()\n content_type = part.get_content_type()\n charset = part.get_content_charset()\n\n if content_type == 'multipart/alternative':\n text, html = parse_simple_email(part)\n elif content_disposition.startswith('inline') or content_disposition.startswith('attachment'):\n attachment = get_email_attachment(part)\n if not attachment:\n logging.warn(u'Failed to get attachment from part %s' % part.as_string())\n attachments.append(attachment)\n elif content_type == 'text/plain':\n text = unicode(part.get_payload(decode=True), str(charset), \"ignore\")\n elif content_type == 'text/html':\n html = unicode(part.get_payload(decode=True), str(charset), \"ignore\")\n\n return text, html, attachments\n\ndef parse_report_email(msg):\n for part in msg.get_payload():\n content_type = part.get_content_type()\n\n if content_type == 'message/disposition-notification':\n notification_payload = part.get_payload()[0]\n charset = notification_payload.get_content_charset()\n for header_name, header_val in notification_payload._headers:\n header_name = header_name.lower()\n if header_name == 'original-message-id':\n receipt_message_id = get_decoded_header(header_val, charset)\n return receipt_message_id\n\n\ndef get_decoded_email_body(message_body):\n msg = email.message_from_string(message_body)\n\n if 'message-id' not in msg:\n logging.error(u'Message without id!')\n return\n mail_id = unicode(msg['message-id'])\n\n from_hdr = \"\"\n to_hdr = \"\"\n date_hdr = \"\"\n subject = \"\"\n attachments = []\n\n #print('-' * 40 + ' ' + subject + ' ' + '-' * 40)\n root_content_type = msg.get_content_type()\n charset = msg.get_content_charset()\n\n for header_name, header_val in msg._headers:\n header_name = header_name.lower()\n if header_name == 'date':\n date_hdr = parse_date_time(header_val) # Fri, 21 Feb 2014 17:04:36 +0400\n elif header_name == 'subject':\n subject = get_decoded_header(header_val, charset)\n elif header_name == 'from':\n from_hdr = get_decoded_header(header_val, charset)\n elif header_name == 'to':\n to_hdr = get_decoded_header(header_val, charset)\n\n\n text = ''\n html = ''\n\n if msg.is_multipart():\n\n if root_content_type == 'multipart/alternative': # simple email (html + plain text)\n text, html = parse_simple_email(msg)\n elif root_content_type == 'multipart/mixed': # email with attachments\n text, html, attachments = parse_mixed_email(msg)\n elif root_content_type == 'multipart/related': # email with attachments\n text, html, attachments = parse_mixed_email(msg)\n elif root_content_type == 'multipart/report': # probably return receipt\n receipt_original_message_id = parse_report_email(msg)\n return 'receipt', mail_id, receipt_original_message_id, msg.as_string(True), '', from_hdr, to_hdr, date_hdr, subject, attachments\n else:\n logging.warn(u'Unsupported mail root content type: %s' % root_content_type)\n return None\n\n elif root_content_type == 'text/plain':\n text = unicode(msg.get_payload(decode=True), str(charset), \"ignore\")\n elif root_content_type == 'text/html':\n html = unicode(msg.get_payload(decode=True), str(charset), \"ignore\")\n\n body = text or html\n# print('body: %s\\r\\nnumber of attachments: %d' % (body[:400], len(attachments)))\n return msg.as_string(True), mail_id, body, from_hdr, to_hdr, date_hdr, subject, attachments\n\ndef create_temp_attachment(template, **template_data):\n html_text = render_template(template, **template_data)\n\n t_file = tempfile.NamedTemporaryFile(mode=\"w+\", delete=False, suffix=\".html\")\n t_file_name = t_file.name\n t_file.close()\n\n with codecs.open(t_file.name, 'wb', 'utf-8') as fo:\n fo.write(html_text)\n\n t_file_out = tempfile.NamedTemporaryFile(mode=\"w+\", delete=False, suffix=\".pdf\")\n full_name = t_file_out.name\n t_file_out.close()\n\n # convert html to pdf\n rasterize_path = os.path.normpath(os.path.join(os.path.abspath(os.path.dirname(__file__)), '../rasterize.js'))\n phantom_path = os.path.normpath(os.path.join(os.path.abspath(os.path.dirname(__file__)), '../phantomjs%s' % ('32' if is32bit() else '')))\n subprocess.call(shlex.split('%s %s %s %s A4' % (phantom_path, rasterize_path, t_file_name, full_name)))\n os.unlink(t_file_name)\n\n return full_name\n\ndef fix_email_addr(addr):\n if u'<' in addr and u'>' in addr:\n try:\n name, addr = addr.split(u'<')\n addr = addr.replace(u'>', u\"\").strip()\n name = name.strip()\n return formataddr((str(Header(name, 'utf-8')), addr))\n except Exception:\n pass\n return addr\n\ndef raw_email_address(addr):\n if u'<' in addr and '>' in addr:\n try:\n name, addr = addr.split(u'<')\n addr = addr.replace(u'>', u\"\").strip()\n return addr\n except Exception:\n pass\n return addr\n\ndef get_email_reader(settings, config, proxy):\n domain = settings.source_email_host\n port = settings.source_email_port\n ssl = settings.source_email_use_ssl\n user = settings.source_email_user\n password = settings.source_email_password\n delete_on_receive = settings.source_email_delete_on_receive\n\n # if config['source_email_is_imap']:\n # return ImapReader(domain, port, ssl, user, password)\n #\n\n return Pop3Reader(domain, port, ssl, user, password, delete_on_receive, proxy)\n\nclass Mailer(object):\n\n def __init__(self, server, user, password):\n self.server = server\n self.user = user\n self.password = password\n\n def send_email(self, addr_from, addr_to, message):\n s = smtplib.SMTP(self.server)\n s.starttls()\n s.login(self.user, self.password)\n s.sendmail(addr_from, addr_to, message)\n s.quit()\n" }, { "alpha_fraction": 0.5136363506317139, "alphanum_fraction": 0.5159090757369995, "avg_line_length": 30.404762268066406, "blob_id": "61216bb19ace0e313383fd86a6a03daf0addec77", "content_id": "cf81a57c89b7d185ad8a2337a995633bf8304cc6", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 1360, "license_type": "no_license", "max_line_length": 94, "num_lines": 42, "path": "/app/manage_commands/__init__.py", "repo_name": "StanislavKraev/jb_code", "src_encoding": "UTF-8", "text": "# -*- coding: utf-8 -*-\n\nimport getpass\nimport sys\n\nclass BaseManageCommand(object):\n def __init__(self, config = None, logger = None):\n assert config\n assert logger\n\n self.config = config\n self.logger = logger\n\n def run(self):\n raise NotImplementedError()\n\n\ndef get_single(prompt, hide_echo = False, validator = None, error_hint = \"\", retry_count = 3):\n for _ in xrange(retry_count):\n if not hide_echo:\n value = raw_input(prompt).decode(sys.stdin.encoding)\n if validator:\n if not validator.validate(value):\n if not error_hint:\n error_hint = \"Некорректное значение\"\n print(error_hint)\n else:\n return validator.get_value(unicode(value))\n else:\n return value\n else:\n value = getpass.getpass(prompt).decode(sys.stdin.encoding)\n if validator:\n if not validator.validate(unicode(value)):\n if not error_hint:\n error_hint = \"Некорректное значение\"\n print(error_hint)\n else:\n return validator.get_value(unicode(value))\n else:\n return value\n exit(-1)\n\n" }, { "alpha_fraction": 0.5668963193893433, "alphanum_fraction": 0.5694343447685242, "avg_line_length": 50.074073791503906, "blob_id": "a0f9bdd31c5bdab18decc8b5ccfc179f7fc57240", "content_id": "b15cad49e2903062a314c723a6f584ee61cb568c", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 5554, "license_type": "no_license", "max_line_length": 132, "num_lines": 108, "path": "/app/services/llc_reg/manage_commands/llc_commands.py", "repo_name": "StanislavKraev/jb_code", "src_encoding": "UTF-8", "text": "# -*- coding: utf-8 -*-\nfrom datetime import datetime\n\nfrom common_utils import get_russian_month_skl\nfrom fw.auth.user_manager import UserManager\nfrom fw.documents.db_fields import DocumentBatchDbObject, BatchDocumentDbObject, PrivatePersonDbObject, CompanyDbObject\nfrom fw.documents.enums import DocumentBatchTypeEnum, DocumentTypeEnum\nfrom fw.documents.fields.doc_fields import PrivatePerson, CompanyObject\nfrom manage_commands import BaseManageCommand, get_single\nfrom fw.async_tasks import send_email\nfrom services.llc_reg.documents.enums import DocumentDeliveryTypeStrEnum\nfrom template_filters import utm_args\n\n\nclass SendRegMailCommand(BaseManageCommand):\n NAME = \"send_llc_reg_mail\"\n\n def run(self):\n self.logger.info(u\"Отправка письма о регистрации компании\")\n self.logger.info(u'=' * 50)\n\n batch_id = get_single(u'Batch id: ')\n\n batch = DocumentBatchDbObject.query.filter_by(id=batch_id, batch_type=DocumentBatchTypeEnum.DBT_NEW_LLC).first()\n if not batch:\n self.logger.info(u'Batch not found')\n return\n\n if not batch.result_fields or 'ifns_reg_info' not in batch.result_fields:\n self.logger.info(u'Company not registered')\n return\n\n reg_info = batch.result_fields['ifns_reg_info']\n if 'status' not in reg_info or reg_info['status'] != 'registered' or not reg_info['reg_date'] or not reg_info['ogrn']:\n self.logger.info(u'Company not registered')\n return\n\n ogrn = reg_info['ogrn']\n reg_date = reg_info['reg_date'].strptime(\"%d.%m.%Y\")\n\n recipient = batch._owner.email\n if not recipient:\n self.logger.info(u'Company owner has no email')\n return\n\n short_name = batch.data.get('short_name', u\"\")\n doc_rec_type = batch.data.get('obtain_way', None)\n\n ifns_book_doc_receive_url = \"%s://%s/ip/?id=%s\" % (self.config['WEB_SCHEMA'], self.config['DOMAIN'], batch_id)\n ifns_book_doc_receive_url = utm_args(ifns_book_doc_receive_url, 'ifns_ip_reg_success', batch._owner_id) + u\"#page=obtaining\"\n ifns_book_doc_receive_url = UserManager.make_auth_url(ifns_book_doc_receive_url, batch._owner).get_url(self.config)\n\n docs_recipient_fio = u\"\"\n if doc_rec_type == DocumentDeliveryTypeStrEnum.DDT_ISSUE_TO_THE_APPLICANT:\n doc = BatchDocumentDbObject.query.filter_by(batch=batch,\n document_type=DocumentTypeEnum.DT_P11001).first()\n if doc:\n founders = doc.data['founders']\n for founder in founders:\n if founder.get('documents_recipient_type', '') != '':\n person = founder.get('person', None)\n if person and '_id' in person:\n person_obj = PrivatePersonDbObject.query.filter_by(\n id=person['_id']).scalar()\n if person_obj:\n pp = PrivatePerson.db_obj_to_field(person_obj)\n if pp:\n docs_recipient_fio = pp.full_name\n else:\n company = founder.get('company', None)\n if company:\n company_db_object = CompanyDbObject.query.filter_by(\n id=company['_id']).scalar()\n if company_db_object:\n cc = CompanyObject.db_obj_to_field(company_db_object)\n if cc and cc.general_manager and cc.general_manager.initialized:\n docs_recipient_fio = cc.general_manager.full_name\n elif doc_rec_type == DocumentDeliveryTypeStrEnum.DDT_ISSUE_TO_THE_APPLICANT_OR_AGENT:\n doc = BatchDocumentDbObject.query.filter_by(batch=batch,\n document_type=DocumentTypeEnum.DT_DOVERENNOST_OBTAIN).first()\n\n if doc:\n doc_obtain_person = doc.data.get('doc_obtain_person', None)\n if doc_obtain_person and '_id' in doc_obtain_person:\n person_obj = PrivatePersonDbObject.query.filter_by(\n id=doc_obtain_person['_id']).scalar()\n if person_obj:\n pp = PrivatePerson.db_obj_to_field(person_obj)\n if pp:\n docs_recipient_fio = pp.full_name\n\n send_email.send_email(recipient,\n 'ifns_llc_reg_success',\n short_name=short_name,\n doc_rec_by_email=(doc_rec_type == DocumentDeliveryTypeStrEnum.DDT_SEND_BY_MAIL),\n doc_rec_by_responsible=(\n doc_rec_type == DocumentDeliveryTypeStrEnum.DDT_ISSUE_TO_THE_APPLICANT_OR_AGENT),\n ifns_book_doc_receive_url=ifns_book_doc_receive_url,\n schema=self.config['WEB_SCHEMA'],\n domain=self.config['DOMAIN'],\n ogrn=ogrn,\n docs_ready_date=u\"%d %s %s года\" % (\n reg_date.day, get_russian_month_skl(reg_date.month), reg_date.year),\n docs_recipient_fio=docs_recipient_fio,\n obtain_person_fio=u\"\",\n service_startup=datetime.utcnow() < datetime(2015, 6, 1),\n user_id=str(batch._owner_id))\n self.logger.info(u'Sent %s to %s' % ('ifns_llc_reg_success', recipient))\n" }, { "alpha_fraction": 0.42971062660217285, "alphanum_fraction": 0.4354771673679352, "avg_line_length": 43.0457649230957, "blob_id": "4d7cad6e23ac3c1be337b65d173514c52b5e7742", "content_id": "fbffd2946059d636f91944b03ebac7a10dd534b2", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 38932, "license_type": "no_license", "max_line_length": 190, "num_lines": 874, "path": "/app/services/ip_reg/documents/initial_db_data_ip.py", "repo_name": "StanislavKraev/jb_code", "src_encoding": "UTF-8", "text": "# -*- coding: utf-8 -*-\nimport copy\nimport os\nfrom fw.documents.enums import DocumentTypeEnum, BatchStatusEnum, DocumentKindEnum, DocumentBatchTypeEnum, TaxType\nfrom fw.documents.field_matchers import FieldSetMatcher, FieldAttributeMatcher, MultilineFieldMatcher, \\\n ConcatFieldAttributeMatcher, SimpleMatcher, ArrayAttributeMatcher, ConstantMatcher\nfrom fw.documents.common_schema_fields import (ADDRESS_FIELD,\n JOB_MAIN_CODE_FIELD, JOB_CODE_ARRAY_FIELD, DOC_DATE_FIELD, IP_FOUNDER_FIELD, TAXATION_TYPE_FIELD,\n USN_TAX_TYPE_FIELD, IP_OBTAIN_WAY_FIELD, IP_REGISTRATION_WAY_FIELD, IP_REGISTRATION_PERSON_FIELD,\n IP_OBTAIN_PERSON_FIELD, REGISTRATION_PERSON_FIELD, OBTAIN_PERSON_FIELD, IP_SAME_OBTAIN_TRUST_PERSON_FIELD)\nfrom services.ip_reg.documents.enums import IPRegistrationWayEnum\n\n\ndef get_test_resource_name(config, resource_rel_path):\n resources_path = config['resources_path']\n return os.path.join(resources_path, resource_rel_path)\n\n\ndef load_data(config):\n\n from services.ip_reg.documents.p21001 import get_p21001_template\n P21001_TEMPLATE = get_p21001_template(config)\n\n P21001_MATCHER ={\n \"doc_name\" : DocumentTypeEnum.DT_P21001,\n \"template_name\" : P21001_TEMPLATE['template_name'],\n \"fields\" : {\n \"%page_1-set-rus\": FieldSetMatcher(fields = { # RUSSIAN CITIZEN\n \"page1-surname\": FieldAttributeMatcher(field_name=\"person\", attr_name=\"surname\"),\n \"page1-name\": FieldAttributeMatcher(field_name=\"person\", attr_name=\"name\"),\n \"page1-patronymic\": FieldAttributeMatcher(field_name=\"person\", attr_name=\"patronymic\"),\n \"page1-inn\": FieldAttributeMatcher(field_name=\"person\", attr_name=\"inn\"),\n \"page1-birth_date__day\": FieldAttributeMatcher(field_name=\"person\", attr_name=\"birthdate.day\"),\n \"page1-birth_date__month\": FieldAttributeMatcher(field_name=\"person\", attr_name=\"birthdate.month\"),\n \"page1-birth_date__year\": FieldAttributeMatcher(field_name=\"person\", attr_name=\"birthdate.year\"),\n \"page1-birth_place__line{{item}}\": MultilineFieldMatcher(field_name = \"person\", attr_name=\"birthplace\"),\n \"page1-citizenship\": FieldAttributeMatcher(field_name=\"person\", attr_name=\"person_type.value\"),\n #\"page1-citizenship\": FieldAttributeMatcher(field_name=\"person\", attr_name=\"living_country_code\", adapter=\"CountryCodeRusAdapter\"),\n \"page1-gender\": FieldAttributeMatcher(field_name=\"person\", attr_name=\"sex\", adapter=\"GenderToNumberAdapter\"),\n # \"page1-surname_latin\": FieldAttributeMatcher(field_name=\"person\", attr_name=\"surname\"),\n # \"page1-name_latin\": FieldAttributeMatcher(field_name=\"person\", attr_name=\"name\"),\n # \"page1-patronymic_latin\": FieldAttributeMatcher(field_name=\"person\", attr_name=\"patronymic\"),\n # \"page1-state_nationality\": FieldAttributeMatcher(field_name=\"person\", attr_name=\"living_country_code\")\n }),\n \"%page_1-set-not_rus\": FieldSetMatcher(fields = { # NOT RUSSIAN CITIZEN\n # \"page1-surname\": FieldAttributeMatcher(field_name=\"person\", attr_name=\"surname\"),\n # \"page1-name\": FieldAttributeMatcher(field_name=\"person\", attr_name=\"name\"),\n # \"page1-patronymic\": FieldAttributeMatcher(field_name=\"person\", attr_name=\"patronymic\"),\n \"page1-inn\": FieldAttributeMatcher(field_name=\"person\", attr_name=\"inn\"),\n \"page1-birth_date__day\": FieldAttributeMatcher(field_name=\"person\", attr_name=\"birthdate.day\"),\n \"page1-birth_date__month\": FieldAttributeMatcher(field_name=\"person\", attr_name=\"birthdate.month\"),\n \"page1-birth_date__year\": FieldAttributeMatcher(field_name=\"person\", attr_name=\"birthdate.year\"),\n \"page1-birth_place__line{{item}}\": MultilineFieldMatcher(field_name = \"person\", attr_name=\"birthplace\"),\n \"page1-citizenship\": FieldAttributeMatcher(field_name=\"person\", attr_name=\"person_type.value\"),\n #\"page1-citizenship\": FieldAttributeMatcher(field_name=\"person\", attr_name=\"living_country_code\", adapter=\"CountryCodeRusAdapter\"),\n \"page1-gender\": FieldAttributeMatcher(field_name=\"person\", attr_name=\"sex\", adapter=\"GenderToNumberAdapter\"),\n \"page1-surname_latin\": FieldAttributeMatcher(field_name=\"person\", attr_name=\"surname\"),\n \"page1-name_latin\": FieldAttributeMatcher(field_name=\"person\", attr_name=\"name\"),\n \"page1-patronymic_latin\": FieldAttributeMatcher(field_name=\"person\", attr_name=\"patronymic\"),\n \"page1-state_nationality\": FieldAttributeMatcher(field_name=\"person\", attr_name=\"living_country_code\")\n }),\n # PAGE 2\n \"page2-subject_code\": FieldAttributeMatcher(field_name=\"person\", attr_name=\"address.region\", adapter = \"RFRegionNumberAdapter\"),\n \"page2-district_type\": FieldAttributeMatcher(field_name=\"person\", attr_name=\"address.district_type\", adapter = \"ShortDistrictTypeAdapter\"),\n \"page2-postal_index\": FieldAttributeMatcher(field_name=\"person\", attr_name=\"address.index\"),\n \"page2-city_type\": FieldAttributeMatcher(field_name=\"person\", attr_name=\"address.city_type\", adapter = \"ShortCityTypeAdapter\"),\n \"page2-city_name\": FieldAttributeMatcher(field_name=\"person\", attr_name=\"address.city\"),\n \"page2-nas_punkt_type\": FieldAttributeMatcher(field_name=\"person\", attr_name=\"address.village_type\", adapter = \"ShortVillageTypeAdapter\"),\n \"page2-street_type\": FieldAttributeMatcher(field_name = \"person\", attr_name = \"address.street_type\", adapter = \"ShortStreetTypeAdapter\"),\n \"page2-building_type\": FieldAttributeMatcher(field_name = \"person\", attr_name = \"address.house_type\"),\n \"page2-building_number\": FieldAttributeMatcher(field_name = \"person\", attr_name = \"address.house\"),\n \"page2-korpus_type\": FieldAttributeMatcher(field_name = \"person\", attr_name = \"address.building_type\"),\n \"page2-korpus_number\": FieldAttributeMatcher(field_name = \"person\", attr_name = \"address.building\"),\n \"page2-flat_type\": FieldAttributeMatcher(field_name = \"person\", attr_name = \"address.flat_type\"),\n \"page2-flat_number\": FieldAttributeMatcher(field_name = \"person\", attr_name = \"address.flat\"),\n \"page2-doc_type\": FieldAttributeMatcher(field_name = \"person\", attr_name = \"passport.document_type\"),\n \"page2-doc_number\": ConcatFieldAttributeMatcher(field_name=\"person\", attributes = [ \"passport.series\", \"passport.number\"], adapter=\"InternalPassportAdapter\"),\n \"page2-issue_date__day\": FieldAttributeMatcher(field_name = \"person\", attr_name = \"passport.issue_date.day\"),\n \"page2-issue_date__month\": FieldAttributeMatcher(field_name = \"person\", attr_name = \"passport.issue_date.month\"),\n \"page2-issue_date__year\": FieldAttributeMatcher(field_name = \"person\", attr_name = \"passport.issue_date.year\"),\n \"page2-subdivision_code__left\": FieldAttributeMatcher(field_name = \"person\", attr_name = \"passport.depart_code\"),\n \"page2-subdivision_code__right\": FieldAttributeMatcher(field_name = \"person\", attr_name = \"passport.depart_code\"),\n \"page2-nas_punkt_name__line{{item}}\": MultilineFieldMatcher(field_name = \"person\", attr_name = \"address.village\"),\n \"page2-street_name__line{{item}}\": MultilineFieldMatcher(field_name = \"person\", attr_name = \"address.street\"),\n \"page2-district_name__line{{item}}\": MultilineFieldMatcher(field_name = \"person\", attr_name = \"address.district\"),\n \"page2-issuer__line{{item}}\": MultilineFieldMatcher(field_name = \"person\", attr_name = \"passport.issue_depart\"),\n # PAGE 3\n\n # PAGE 4\n \"page4-main_job_code__part1\" : SimpleMatcher(field_name = \"job_main_code\"),\n \"page4-main_job_code__part2\" : SimpleMatcher(field_name = \"job_main_code\"),\n \"page4-main_job_code__part3\" : SimpleMatcher(field_name = \"job_main_code\"),\n \"page4-job_code#{{item}}__part1\" : ArrayAttributeMatcher(field_name = \"job_code_array\", sorted = \"true\"),\n \"page4-job_code#{{item}}__part2\" : ArrayAttributeMatcher(field_name = \"job_code_array\", sorted = \"true\"),\n \"page4-job_code#{{item}}__part3\" : ArrayAttributeMatcher(field_name = \"job_code_array\", sorted = \"true\"),\n # PAGE 5\n \"page5-document_delivery_type\": SimpleMatcher(field_name = \"obtain_way\", adapter = \"DocumentObtainNumberAdapter\"),\n \"page5-phone_number\": FieldAttributeMatcher(field_name = \"person\", attr_name=\"phone.normalised\"),\n # \"page5-email\": FieldAttributeMatcher(field_name = \"person\", attr_name=\"email\")\n # \"page5-zaveritel_type\": FieldAttributeMatcher(field_name = \"\"),\n # \"page5-inn\": FieldAttributeMatcher(field_name = \"\"),\n }\n }\n P21001_SCHEMA = {\"doc_name\" : DocumentTypeEnum.DT_P21001,\n \"file_name_template\" : u\"Заявление на регистрацию ИП\",\n \"batch_statuses\" : [BatchStatusEnum.BS_EDITED, BatchStatusEnum.BS_NEW],\n \"fields\" : [\n IP_FOUNDER_FIELD,\n IP_OBTAIN_WAY_FIELD,\n JOB_MAIN_CODE_FIELD,\n JOB_CODE_ARRAY_FIELD,\n ],\n \"validators\" : [{\n \"condition\" : {\n \"#or\" : [{\n \"person->initialized\" : {\n \"#ne\" : True\n }\n }, {\n \"person->address->initialized\" : {\n \"#ne\" : True\n }\n }, {\n \"person->address->region->initialized\" : {\n \"#ne\" : True\n }\n }, {\n \"region->initialized\" : {\n \"#ne\" : True\n }\n }, {\n \"person->address->region\" : \"@region\"\n }]\n },\n \"error\" : {\n \"field\" : \"person.address.region\",\n \"code\" : 5\n }\n }]\n }\n\n IP_STATE_DUTY_SCHEMA = {\n \"doc_name\" : DocumentTypeEnum.DT_IP_STATE_DUTY,\n \"doc_kind\" : DocumentKindEnum.DK_DOWNLOADABLE_FILE,\n \"file_name_template\" : u\"Квитанция на оплату регистрационной пошлины ИП {{person.short_name}}\",\n \"http_method\" : \"post\",\n \"data_template_name\" : get_test_resource_name(config, \"ip/reg_fee_invoice__data.txt\"),\n \"url_template_name\" : get_test_resource_name(config, \"reg_fee_invoice__url.txt\"),\n \"file_name_extension\" : 'pdf',\n \"batch_statuses\" : [BatchStatusEnum.BS_EDITED, BatchStatusEnum.BS_NEW],\n \"conditions\" : {\n \"person\" : {\n \"#not_empty\" : True\n }\n },\n \"fields\" : [\n IP_FOUNDER_FIELD\n ]\n }\n\n IP_DOV_FILING_TEMPLATE = {\n \"doc_name\" : DocumentTypeEnum.DT_IP_DOV_FILING_DOCS,\n \"template_name\" : \"ip_dov_filing_docs\",\n \"file_name\" : get_test_resource_name(config, \"ip/dov_filing_receiving_docs.tex\"),\n \"is_strict\" : False,\n }\n\n IP_DOV_FILING_SCHEMA = {\n \"doc_name\" : DocumentTypeEnum.DT_IP_DOV_FILING_DOCS,\n \"doc_kind\" : DocumentKindEnum.DK_TEX_TEMPLATE,\n \"file_name_template\" : u\"Доверенность на подачу документов\",\n \"batch_statuses\" : [BatchStatusEnum.BS_EDITED, BatchStatusEnum.BS_NEW],\n \"validators\" : [{\n \"condition\" : {\n \"ip_responsible_person\" : {\n \"#ne\" : \"@person\"\n }\n },\n \"error\" : {\n \"field\" : \"reg_responsible_person\",\n \"code\" : 5\n }\n }, {\n \"condition\" : {\n \"#or\": [{\n \"same_obtain_trust_person\": False,\n \"reg_responsible_person\" : {\n \"#not_empty\" : True\n }\n }, {\n \"same_obtain_trust_person\": True\n }]\n },\n \"error\" : {\n \"field\" : \"reg_responsible_person\",\n \"code\" : 4\n }\n }, {\n \"condition\" : {\n \"#or\" : [{\n \"obtain_way\" : {\n \"#ne\" : \"responsible_person\"\n },\n }, {\n \"obtain_way\" : \"responsible_person\",\n \"#or\": [{\n \"same_obtain_trust_person\": False,\n \"doc_obtain_person\" : {\n \"#not_empty\" : True\n }\n }, {\n \"same_obtain_trust_person\": True\n }]\n }]\n },\n \"error\" : {\n \"field\" : \"doc_obtain_person\",\n \"code\" : 4\n }\n }],\n \"conditions\": {\n \"same_obtain_trust_person\": False,\n \"registration_way\": \"responsible_person\",\n },\n \"fields\" : [\n IP_FOUNDER_FIELD,\n IP_REGISTRATION_WAY_FIELD,\n IP_REGISTRATION_PERSON_FIELD,\n IP_SAME_OBTAIN_TRUST_PERSON_FIELD,\n DOC_DATE_FIELD,\n {\n \"name\": \"reg_responsible_person\",\n \"type\": \"db_object\",\n \"cls\": \"PrivatePerson\",\n \"required\": False,\n \"suppress_validation_errors\" : {\n \"registration_way\": {\n \"#ne\" : \"responsible_person\"\n }\n }\n },\n {\n \"name\": \"obtain_way\",\n \"type\" : \"DocEnumField\",\n \"enum_cls\" : \"IPDocumentDeliveryTypeStrEnum\",\n \"required\" : False\n }, {\n \"name\": \"doc_obtain_person\",\n \"type\": \"db_object\",\n \"cls\": \"PrivatePerson\",\n \"required\": False,\n \"suppress_validation_errors\" : True\n }\n ]}\n\n IP_DOV_RECEIVING_TEMPLATE = {\n \"doc_name\" : DocumentTypeEnum.DT_IP_DOV_RECEIVING_DOCS,\n \"template_name\" : \"ip_dov_filing_docs\",\n \"file_name\" : get_test_resource_name(config, \"ip/dov_filing_receiving_docs.tex\"),\n \"is_strict\" : False,\n }\n\n IP_DOV_RECEIVING_SCHEMA = {\n \"doc_name\" : DocumentTypeEnum.DT_IP_DOV_RECEIVING_DOCS,\n \"doc_kind\" : DocumentKindEnum.DK_TEX_TEMPLATE,\n \"file_name_template\" : u\"Доверенность на получение документов\",\n \"batch_statuses\" : [BatchStatusEnum.BS_EDITED, BatchStatusEnum.BS_NEW],\n \"conditions\": {\n \"#and\": [{\n \"#or\": [{\n \"same_obtain_trust_person\": False\n }, {\n \"registration_way\": {\n \"#ne\": \"responsible_person\"\n }\n }],\n }, {\n \"obtain_way\": \"responsible_person\",\n }, {\n \"#or\": [{\n \"reg_responsible_person\": {\n \"#ne\": \"@doc_obtain_person\"\n }\n\n }, {\n \"doc_obtain_person\": {\n \"#empty\": True\n }\n }]\n }]\n },\n \"fields\" : [\n IP_FOUNDER_FIELD,\n IP_OBTAIN_PERSON_FIELD,\n DOC_DATE_FIELD,\n IP_OBTAIN_WAY_FIELD,\n IP_SAME_OBTAIN_TRUST_PERSON_FIELD,\n {\n \"name\": \"reg_responsible_person\",\n \"type\": \"db_object\",\n \"cls\": \"PrivatePerson\",\n \"required\": False,\n \"suppress_validation_errors\" : True\n },\n {\n \"name\": \"doc_obtain_person\",\n \"type\": \"db_object\",\n \"cls\": \"PrivatePerson\",\n \"required\": False\n }, {\n \"name\": \"registration_way\",\n \"type\": \"DocEnumField\",\n \"enum_cls\": \"IPRegistrationWayEnum\",\n \"required\": False\n }\n ],\n \"validators\" : [{\n \"condition\" : {\n \"#or\": [{\n \"same_obtain_trust_person\": False,\n \"doc_obtain_person\" : {\n \"#not_empty\" : True\n }\n }, {\n \"same_obtain_trust_person\": True,\n \"registration_way\": \"responsible_person\",\n \"reg_responsible_person\": {\n \"#empty\": True\n }\n }]\n },\n \"error\" : {\n \"field\" : \"doc_obtain_person\",\n \"code\" : 4\n }\n }]\n }\n\n IP_DOV_FILING_RECEIVING_TEMPLATE = {\n \"doc_name\" : DocumentTypeEnum.DT_IP_DOV_FILING_RECEIVING_DOCS,\n \"template_name\" : \"ip_dov_filing_docs\",\n \"file_name\" : get_test_resource_name(config, \"ip/dov_filing_receiving_docs.tex\"),\n \"is_strict\" : False,\n }\n\n IP_DOV_FILING_RECEIVING_SCHEMA = {\n \"doc_name\" : DocumentTypeEnum.DT_IP_DOV_FILING_RECEIVING_DOCS,\n \"doc_kind\" : DocumentKindEnum.DK_TEX_TEMPLATE,\n \"file_name_template\" : u\"Доверенность на получение и подачу документов\",\n \"batch_statuses\" : [BatchStatusEnum.BS_EDITED, BatchStatusEnum.BS_NEW],\n \"validators\" : [{\n \"condition\" : {\n \"#or\": [{\n \"same_obtain_trust_person\": True,\n \"reg_responsible_person\" : {\n \"#not_empty\" : True\n }\n }, {\n \"same_obtain_trust_person\": False\n }]\n },\n \"error\" : {\n \"field\" : \"reg_responsible_person\",\n \"code\" : 4\n }\n }],\n \"conditions\": {\n \"same_obtain_trust_person\": True,\n \"registration_way\": \"responsible_person\",\n \"obtain_way\": \"responsible_person\",\n },\n \"fields\" : [\n IP_FOUNDER_FIELD,\n IP_OBTAIN_PERSON_FIELD,\n IP_SAME_OBTAIN_TRUST_PERSON_FIELD,\n DOC_DATE_FIELD,\n IP_OBTAIN_WAY_FIELD,\n IP_REGISTRATION_WAY_FIELD,\n {\n \"name\": \"reg_responsible_person\",\n \"type\": \"db_object\",\n \"cls\": \"PrivatePerson\",\n \"required\": False\n }\n ]}\n\n ################################################################################################################\n\n IP_REG_BATCH_SCHEMA = {\n \"doc_name\" : DocumentBatchTypeEnum.DBT_NEW_IP,\n \"fields\" : [\n JOB_MAIN_CODE_FIELD,\n JOB_CODE_ARRAY_FIELD,\n TAXATION_TYPE_FIELD,\n USN_TAX_TYPE_FIELD,\n IP_FOUNDER_FIELD,\n IP_REGISTRATION_WAY_FIELD,\n IP_OBTAIN_WAY_FIELD,\n IP_OBTAIN_PERSON_FIELD,\n IP_REGISTRATION_PERSON_FIELD,\n REGISTRATION_PERSON_FIELD,\n OBTAIN_PERSON_FIELD,\n IP_SAME_OBTAIN_TRUST_PERSON_FIELD\n ]\n }\n\n REGISTRATION_ADDRESS_FIELD = copy.copy(ADDRESS_FIELD)\n REGISTRATION_ADDRESS_FIELD['name'] = 'registration_address'\n\n IP_REG_RESULT_FIELDS = [\n {\n \"name\": \"ifns\",\n \"type\": \"calculated\",\n \"field_type\": \"DocIntField\",\n \"required\": False,\n \"value\": {\n \"#cases\" : {\n \"list\" : [{\n \"conditions\" : {\n \"person->initialized\" : True\n },\n \"value\" : {\n \"#field\": \"person->address->ifns\"\n }\n }],\n \"default\" : {\n \"value\" : {\n \"#value\": 0,\n }\n }\n }\n }\n },\n {\n \"name\" : \"ifns_reg_info\",\n \"type\" : \"calculated\",\n \"field_type\" : \"IfnsRegInfoField\",\n \"required\" : False,\n \"value\" : {\n \"#cases\" : {\n \"list\": [{\n \"conditions\" : {\n \"person->initialized\" : True\n },\n \"value\" : {\n \"#exec\" : {\n \"module\" : \"ip_reg_methods\",\n \"method\" : \"get_company_registration_info\",\n \"kwargs\" : {\n \"batch_id\" : {\n \"#field\" : \"<batch_id>\"\n }\n }\n }\n }\n }],\n \"default\" : {\n \"value\" : {\n \"#value\": \"\",\n }\n }\n }\n }\n },\n {\n \"name\": \"person_genitive\",\n \"type\": \"calculated\",\n \"field_type\": \"DocTextField\",\n \"required\": False,\n \"value\" : {\n \"#cases\" : {\n \"list\": [{\n \"conditions\" : {\n \"person->initialized\" : True\n },\n \"value\" : {\n \"#method\": {\n \"obj\" : \"person\",\n \"method\" : \"get_full_name\",\n \"kwargs\" : {\n \"declension\": \"gen\",\n }\n }\n }\n }],\n \"default\" : {\n \"value\" : {\n \"#value\": \"\",\n }\n }\n }\n },\n },\n {\n \"name\": \"person_dative\",\n \"type\": \"calculated\",\n \"field_type\": \"DocTextField\",\n \"required\": False,\n \"value\" : {\n \"#cases\" : {\n \"list\": [{\n \"conditions\" : {\n \"person->initialized\" : True\n },\n \"value\" : {\n \"#method\": {\n \"obj\" : \"person\",\n \"method\" : \"get_full_name\",\n \"kwargs\" : {\n \"declension\": \"dat\",\n }\n }\n }\n }],\n \"default\" : {\n \"value\" : {\n \"#value\": \"\",\n }\n }\n }\n },\n }\n ]\n\n IP_ESHN_SCHEMA = {\n \"doc_name\" : DocumentTypeEnum.DT_IP_ESHN_CLAIM,\n \"file_name_template\" : u\"Заявление на ЕСХН\",\n \"batch_statuses\" : [BatchStatusEnum.BS_EDITED, BatchStatusEnum.BS_NEW],\n \"conditions\" : {\n \"taxation_type\" : TaxType.TT_ESHN\n },\n \"fields\" : [\n IP_FOUNDER_FIELD,\n DOC_DATE_FIELD,\n JOB_MAIN_CODE_FIELD\n ],\n \"external_validators\" : [\"ip_eshn_tax_type\"]\n }\n\n IP_ESHN_TEMPLATE = {\n \"doc_name\" : DocumentTypeEnum.DT_IP_ESHN_CLAIM,\n \"template_name\" : \"strict_template1__ip_eshn\",\n \"is_strict\" : True,\n \"pages\" : [{\n \"page_file\": [get_test_resource_name(config, \"new_eshn.pdf\")],\n \"array_fields\" : [\n {\n \"name\" : \"full_name__line{{item}}\",\n \"count\" : 4,\n \"field-length\" : 40,\n \"case\" : \"upper\"\n }, {\n \"name\" : \"applicant__line{{item}}\",\n \"count\" : 3,\n \"field-length\" : 20,\n \"case\" : \"upper\"\n }, {\n \"name\" : \"applicant_doc__line{{item}}\",\n \"field-length\" : 20,\n \"case\" : \"upper\",\n \"count\" : 2,\n },\n ],\n \"fields\" : [\n {\n \"name\": \"inn\",\n \"field-length\": 12\n }, {\n \"name\": \"kpp\",\n \"field-length\": 9\n }, {\n \"name\" : \"ifns\",\n \"field-length\" : 4\n }, {\n \"name\" : \"priznak_zayavitelya\",\n \"field-length\" : 1\n }, {\n \"name\" : \"perehod\",\n \"field-length\" : 1\n }, {\n \"name\" : \"phone\",\n \"field-length\" : 20\n }, {\n \"name\" : \"applicant_type\",\n \"field-length\" : 1\n }, {\n \"name\" : \"doc_date__day\",\n \"field-length\" : 2,\n \"text-align\": \"right\",\n \"space-filler\" : u\"0\",\n }, {\n \"name\" : \"doc_date__month\",\n \"field-length\" : 2,\n \"text-align\": \"right\",\n \"space-filler\" : u\"0\",\n }, {\n \"name\" : \"doc_date__year\",\n \"field-length\" : 4\n }, {\n \"name\" : \"dohod_maj\",\n \"field-length\" : 3\n }, {\n \"name\" : \"dohod_min\",\n \"field-length\" : 2\n }, {\n \"name\" : \"dohod_percent\",\n \"field-length\" : 1\n }\n ]\n }]\n }\n\n IP_ESHN_MATCHER = {\n \"doc_name\" : DocumentTypeEnum.DT_IP_ESHN_CLAIM,\n \"template_name\" : IP_ESHN_TEMPLATE['template_name'],\n \"fields\" : {\n \"applicant__line{{item}}\" : MultilineFieldMatcher(field_name = \"person\", attr_name=\"full_name\"),\n # \"applicant_doc__line{{item}}\" : MultilineFieldMatcher(field_name = \"person\", attr_name=\"full_name\"),\n \"phone\" : FieldAttributeMatcher(field_name = \"person\", attr_name=\"phone.normalised\"),\n \"full_name__line{{item}}\" : MultilineFieldMatcher(field_name = \"person\", attr_name=\"full_name\"),\n \"inn\" : FieldAttributeMatcher(field_name = \"person\", attr_name=\"inn\", default_value=u\"————————————\"),\n \"kpp\" : ConstantMatcher(value=u\"————————————\"),\n \"ifns\" : FieldAttributeMatcher(field_name=\"person\", attr_name=\"address.ifns_number\"),\n \"priznak_zayavitelya\" : ConstantMatcher(value=1),\n \"perehod\" : ConstantMatcher(value=2),\n \"applicant_type\" : ConstantMatcher(value=1),\n \"doc_date__day\" : FieldAttributeMatcher(field_name=\"doc_date\", attr_name=\"day\"),\n \"doc_date__month\" : FieldAttributeMatcher(field_name=\"doc_date\", attr_name=\"month\"),\n \"doc_date__year\" : FieldAttributeMatcher(field_name=\"doc_date\", attr_name=\"year\"),\n \"dohod_maj\" : ConstantMatcher(value=u\"————————————\"),\n \"dohod_min\" : ConstantMatcher(value=u\"————————————\"),\n \"dohod_percent\" : ConstantMatcher(value=u\"————————————\")\n }\n }\n\n IP_USN_SCHEMA = {\n \"doc_name\" : DocumentTypeEnum.DT_IP_USN_CLAIM,\n \"file_name_template\" : u\"Заявление на УСН\",\n \"batch_statuses\" : [BatchStatusEnum.BS_EDITED, BatchStatusEnum.BS_NEW],\n \"conditions\" : {\n \"taxation_type\" : 'usn'\n },\n \"fields\" : [\n IP_FOUNDER_FIELD,\n DOC_DATE_FIELD,\n JOB_MAIN_CODE_FIELD,\n {\n \"name\" : \"tax_type\",\n \"type\" : \"DocEnumField\",\n \"enum_cls\" : \"UsnTaxType\",\n \"required\" : True,\n }\n ],\n \"external_validators\" : [\"ip_usn_tax_type\"]\n }\n\n IP_USN_TEMPLATE = {\n \"doc_name\" : DocumentTypeEnum.DT_IP_USN_CLAIM,\n \"template_name\" : \"strict_template1__ip_usn\",\n \"is_strict\" : True,\n \"pages\" : [{\n \"page_file\": get_test_resource_name(config, \"new_usn.pdf\"),\n \"array_fields\" : [\n {\n \"name\" : \"name_line{{item}}\",\n \"count\" : 4,\n \"field-length\" : 40,\n \"case\" : \"upper\"\n }, {\n \"name\" : \"applicant-name__line{{item}}\",\n \"count\" : 3,\n \"field-length\" : 20,\n \"case\" : \"upper\"\n }, {\n \"name\" : \"agent-doc-name__line{{item}}\",\n \"field-length\" : 20,\n \"case\" : \"upper\",\n \"count\" : 2,\n }\n ],\n \"fields\" : [\n {\n \"name\": \"inn\",\n \"field-length\": 12\n }, {\n \"name\": \"kpp\",\n \"field-length\": 9\n }, {\n \"name\" : \"kod_nalog_organa\",\n \"field-length\" : 4\n }, {\n \"name\" : \"priznak_zayavitelya\",\n \"field-length\" : 1\n }, {\n \"name\" : \"perehod\",\n \"field-length\" : 1\n }, {\n \"name\" : \"god_zayavleniya\",\n \"text-align\": \"right\",\n \"field-length\" : 1\n }, {\n \"name\" : \"phone\",\n \"field-length\" : 20\n }, {\n \"name\" : \"applicant-type\",\n \"field-length\" : 1\n }, {\n \"name\" : \"current-date__day\",\n \"field-length\" : 2,\n \"text-align\": \"right\",\n \"space-filler\" : u\"0\",\n }, {\n \"name\" : \"current-date__month\",\n \"field-length\" : 2,\n \"text-align\": \"right\",\n \"space-filler\" : u\"0\",\n }, {\n \"name\" : \"current-date__year\",\n \"field-length\" : 4,\n \"text-align\": \"right\",\n }, {\n \"name\" : \"dohod\",\n \"field-length\" : 1\n }, {\n \"name\" : \"polucheno_dohodov\",\n \"field-length\" : 9\n }, {\n \"name\" : \"god_podachi_uvedomleniya\",\n \"field-length\" : 2,\n \"text-align\": \"right\"\n }, {\n \"name\" : \"ostatok\",\n \"field-length\" : 9\n }\n ]\n }],\n }\n\n IP_USN_MATCHER = {\n \"doc_name\": DocumentTypeEnum.DT_IP_USN_CLAIM,\n \"template_name\": IP_USN_TEMPLATE['template_name'],\n \"fields\": {\n \"applicant-name__line{{item}}\": MultilineFieldMatcher(field_name = \"person\", attr_name=\"full_name\"),\n # \"agent-doc-name__line{{item}}\": MultilineFieldMatcher(field_name = \"person\", attr_name=\"full_name\"),\n \"phone\": FieldAttributeMatcher(field_name = \"person\", attr_name=\"phone.normalised\"),\n \"name_line{{item}}\": MultilineFieldMatcher(field_name = \"person\", attr_name=\"full_name\"),\n \"inn\": FieldAttributeMatcher(field_name = \"person\", attr_name=\"inn\", default_value=u\"————————————\"),\n \"kpp\": ConstantMatcher(value=u\"————————————\"),\n \"polucheno_dohodov\" : ConstantMatcher(value=u\"————————————\"),\n \"ostatok\" : ConstantMatcher(value=u\"————————————\"),\n \"kod_nalog_organa\": FieldAttributeMatcher(field_name=\"person\", attr_name=\"address.ifns_number\"),\n \"priznak_zayavitelya\": ConstantMatcher(value=1),\n \"perehod\": ConstantMatcher(value=2),\n \"god_zayavleniya\": FieldAttributeMatcher(field_name=\"doc_date\", attr_name=\"year\"),\n\n \"applicant-type\": ConstantMatcher(value=1),\n \"current-date__day\": FieldAttributeMatcher(field_name=\"doc_date\", attr_name=\"day\"),\n \"current-date__month\": FieldAttributeMatcher(field_name=\"doc_date\", attr_name=\"month\"),\n \"current-date__year\": FieldAttributeMatcher(field_name=\"doc_date\", attr_name=\"year\"),\n\n \"dohod\": SimpleMatcher(field_name = \"tax_type\", adapter = \"UsnTaxTypeAdapter\"),\n \"god_podachi_uvedomleniya\" : FieldAttributeMatcher(field_name=\"doc_date\", attr_name=\"year\"),\n }\n }\n\n IP_LETTER_INVENTORY_SCHEMA = {\n \"doc_name\" : DocumentTypeEnum.DT_IP_LETTER_INVENTORY,\n \"doc_kind\" : DocumentKindEnum.DK_TEX_TEMPLATE,\n \"file_name_template\" : u\"Опись для ценного письма\",\n \"batch_statuses\" : [BatchStatusEnum.BS_EDITED, BatchStatusEnum.BS_NEW],\n \"fields\" : [\n IP_FOUNDER_FIELD,\n IP_REGISTRATION_WAY_FIELD,\n TAXATION_TYPE_FIELD,\n {\n \"name\" : \"ifns_name\",\n \"type\" : \"calculated\",\n \"field_type\" : \"DocTextField\",\n \"required\" : False,\n \"value\" : {\n \"#field\" : \"person->address->ifns_name_safe\"\n }\n }, {\n \"name\" : \"ifns_address\",\n \"type\" : \"calculated\",\n \"field_type\" : \"DocTextField\",\n \"required\" : False,\n \"value\" : {\n \"#field\" : \"person->address->ifns_address_safe\"\n }\n }, {\n \"name\" : \"usn\",\n \"type\" : \"calculated\",\n \"field_type\" : \"DocBoolField\",\n \"required\" : False,\n \"value\" : {\n \"#cases\" : {\n \"list\" : [{\n \"conditions\" : {\n \"taxation_type\" : TaxType.TT_USN\n },\n \"value\" : {\n \"#value\" : True\n }\n }],\n \"default\" : {\n \"value\" : {\n \"#value\" : False\n }\n }\n }\n }\n }, {\n \"name\" : \"eshn\",\n \"type\" : \"calculated\",\n \"field_type\" : \"DocBoolField\",\n \"required\" : False,\n \"value\" : {\n \"#cases\" : {\n \"list\" : [{\n \"conditions\" : {\n \"taxation_type\" : TaxType.TT_ESHN\n },\n \"value\" : {\n \"#value\" : True\n }\n }],\n \"default\" : {\n \"value\" : {\n \"#value\" : False\n }\n }\n }\n }\n }\n ],\n \"conditions\" : {\n \"registration_way\" : IPRegistrationWayEnum.IP_RW_MAIL\n }\n }\n\n IP_LETTER_INVENTORY_TEMPLATE = {\n \"doc_name\" : DocumentTypeEnum.DT_IP_LETTER_INVENTORY,\n \"template_name\" : \"ip_letter_inventory\",\n \"file_name\" : get_test_resource_name(config, \"ip/letter_inventory.tex\"),\n \"is_strict\" : False\n }\n\n IP_REG_DEFER_DOCS = [DocumentTypeEnum.DT_IP_STATE_DUTY]\n\n return {\n \"P21001_TEMPLATE\" : P21001_TEMPLATE,\n \"P21001_MATCHER\" : P21001_MATCHER,\n \"P21001_SCHEMA\" : P21001_SCHEMA,\n \"IP_STATE_DUTY_SCHEMA\": IP_STATE_DUTY_SCHEMA,\n \"IP_DOV_FILING_SCHEMA\": IP_DOV_FILING_SCHEMA,\n \"IP_DOV_FILING_TEMPLATE\": IP_DOV_FILING_TEMPLATE,\n \"IP_DOV_RECEIVING_SCHEMA\": IP_DOV_RECEIVING_SCHEMA,\n \"IP_DOV_RECEIVING_TEMPLATE\": IP_DOV_RECEIVING_TEMPLATE,\n \"IP_DOV_FILING_RECEIVING_SCHEMA\": IP_DOV_FILING_RECEIVING_SCHEMA,\n \"IP_DOV_FILING_RECEIVING_TEMPLATE\": IP_DOV_FILING_RECEIVING_TEMPLATE,\n \"IP_ESHN_SCHEMA\": IP_ESHN_SCHEMA,\n \"IP_ESHN_TEMPLATE\": IP_ESHN_TEMPLATE,\n \"IP_ESHN_MATCHER\": IP_ESHN_MATCHER,\n \"IP_USN_SCHEMA\": IP_USN_SCHEMA,\n \"IP_USN_TEMPLATE\": IP_USN_TEMPLATE,\n \"IP_USN_MATCHER\": IP_USN_MATCHER,\n \"IP_LETTER_INVENTORY_SCHEMA\" : IP_LETTER_INVENTORY_SCHEMA,\n \"IP_LETTER_INVENTORY_TEMPLATE\" : IP_LETTER_INVENTORY_TEMPLATE,\n\n \"IP_REG_BATCH_SCHEMA\" : IP_REG_BATCH_SCHEMA,\n \"IP_REG_RESULT_FIELDS\" : IP_REG_RESULT_FIELDS,\n \"IP_REG_DEFER_DOCS\" : IP_REG_DEFER_DOCS\n }\n\n\n" }, { "alpha_fraction": 0.7286356687545776, "alphanum_fraction": 0.730134904384613, "avg_line_length": 32.349998474121094, "blob_id": "a85eb8fdea66d78d830e52a0a5a5d3c403f9a706", "content_id": "53ada144ad7162122c64a8c7a206844b7f902d31", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 667, "license_type": "no_license", "max_line_length": 78, "num_lines": 20, "path": "/app/fw/async_tasks/models.py", "repo_name": "StanislavKraev/jb_code", "src_encoding": "UTF-8", "text": "# -*- coding: utf-8 -*-\nfrom bson.objectid import ObjectId\nfrom sqlalchemy.sql.functions import func\nfrom sqlalchemy import Column, String, Boolean, DateTime\nfrom sqlalchemy.dialects.postgresql import JSONB\n\nfrom fw.db.sql_base import db\n\n\nclass CeleryScheduledTask(db.Model):\n\n __tablename__ = 'celery_scheduled_task'\n\n id = Column(String, primary_key=True, default=lambda: unicode(ObjectId()))\n task_name = Column(String)\n created = Column(DateTime, default=func.now(), nullable=False)\n eta = Column(DateTime, nullable=False)\n sent = Column(Boolean, default=False)\n args = Column(JSONB, nullable=True)\n kwargs = Column(JSONB, nullable=True)\n" }, { "alpha_fraction": 0.660260796546936, "alphanum_fraction": 0.6609471440315247, "avg_line_length": 35.42499923706055, "blob_id": "b1206a621b8ed4812cff543264dbd305f438cd9e", "content_id": "81ff354c4a978fd0e7b8c007df5b7dced4a288a5", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 1457, "license_type": "no_license", "max_line_length": 128, "num_lines": 40, "path": "/app/deployment_migrations/migration_list/20150821_migrate_lawsuit.py", "repo_name": "StanislavKraev/jb_code", "src_encoding": "UTF-8", "text": "# -*- coding: utf-8 -*-\nfrom copy import copy\n\nfrom fw.db.sql_base import db as sqldb\nfrom fw.documents.db_fields import DocumentBatchDbObject, BatchDocumentDbObject\nfrom fw.documents.enums import DocumentBatchTypeEnum\n\n\ndef forward(config, logger):\n logger.debug(u\"Migrate lawsuit fields\")\n\n m = {\n 'yes': 'success',\n 'no': 'refuse',\n 'notAll': 'partial_success',\n 'tooEarly': 'unknown'\n }\n for batch in DocumentBatchDbObject.query.filter_by(deleted=False, batch_type=DocumentBatchTypeEnum.DBT_OSAGO):\n metadata = batch._metadata or {}\n if not metadata or '_isClaimSubmissionToInsurance' not in metadata or '_insuranceReturnDebtByClaimType' not in metadata:\n continue\n _isClaimSubmissionToInsurance = metadata['_isClaimSubmissionToInsurance']\n _insuranceReturnDebtByClaimType = metadata['_insuranceReturnDebtByClaimType']\n\n make_lawsuit = _isClaimSubmissionToInsurance\n pretension_result = m.get(_insuranceReturnDebtByClaimType, 'unknown')\n\n for doc in BatchDocumentDbObject.query.filter_by(batch=batch):\n dd = copy(doc.data)\n dd.update({\n 'make_lawsuit': make_lawsuit,\n 'pretension_result': pretension_result\n })\n BatchDocumentDbObject.query.filter_by(id=doc.id).update({\n 'data': dd\n })\n sqldb.session.commit()\n\ndef rollback(config, logger):\n pass\n" }, { "alpha_fraction": 0.5452937483787537, "alphanum_fraction": 0.5465571880340576, "avg_line_length": 42.3698616027832, "blob_id": "e9bafe395ded7f2c39107d38f8bfec9d0be9dd0c", "content_id": "703d117b0b93bc46d22fe8377d8d6b1bd00e17cf", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 15830, "license_type": "no_license", "max_line_length": 129, "num_lines": 365, "path": "/app/async/views.py", "repo_name": "StanislavKraev/jb_code", "src_encoding": "UTF-8", "text": "# -*- coding: utf-8 -*-\nfrom datetime import datetime, timedelta\nimport json\nfrom tornado import gen\nfrom async.async_dadata_provider import AsyncDadataProvider\nfrom async.async_ifns_provider import AsyncIfnsProvider, FailedToGetAppointmentData\nfrom async.vews_base import JsonRequestHandler, authorized\nfrom common_utils import int_to_ifns\nfrom custom_exceptions import CacheMiss\nfrom fw.api import errors\nfrom fw.api.args_validators import validate_arguments_tornado, IntValidator, EnumValidator, DateTypeValidator, \\\n JsonValidator, DateTimeValidator, ObjectIdValidator\nfrom fw.db.sql_base import db as sqldb\nfrom fw.documents.db_fields import DocumentBatchDbObject\nfrom fw.documents.enums import DocumentTypeEnum\nfrom services.ifns.async_tasks import ifns_booking_tasks\nfrom services.ifns.data_model.models import IfnsBookingObject, IfnsBookingTaskStatus\nfrom services.llc_reg.documents.enums import IfnsServiceEnum\n\n\nclass IfnsGetScheduleView(JsonRequestHandler):\n @gen.coroutine\n @authorized\n @validate_arguments_tornado(\n ifns=IntValidator(required=True),\n service=EnumValidator(enum_cls=IfnsServiceEnum, required=True),\n datetime=DateTypeValidator(required=True),\n founder_applicant=JsonValidator(required=True)\n )\n def get_content_on_post(self, arguments=None, *args, **kwargs):\n\n logger = self.application.logger # todo: ifns logger!\n cache = self.application.cache\n\n ifns = arguments['ifns']\n service = arguments['service']\n dt = arguments['datetime']\n founder_applicant = arguments['founder_applicant']\n service_nalog_ru_url = self.application.config['SERVICE_NALOG_RU_URL']\n\n try:\n company_data, person_data = yield AsyncIfnsProvider.get_company_person_data_for_ifns(founder_applicant,\n self.user.email,\n self.application.db)\n except Exception:\n logger.exception(u\"Failed to collect data\")\n raise errors.InvalidParameterValue(\"founder_applicant\")\n\n try:\n reg_ifns = yield AsyncIfnsProvider.get_registration_ifns(int_to_ifns(ifns), cache,\n service_nalog_ru_url=service_nalog_ru_url)\n except Exception:\n logger.exception(u\"Failed to get registration ifns. Address ifns: %s\" % unicode(ifns))\n raise\n\n reg_ifns_name = reg_ifns['rou']['naimk']\n reg_ifns_addr = reg_ifns['adres']\n try:\n address = yield AsyncDadataProvider.get_detailed_address(reg_ifns_addr, cache)\n if not address:\n raise Exception()\n except Exception:\n logger.exception(u\"Failed to get detailed address. Reg ifns address: %s\" % unicode(reg_ifns_addr))\n raise\n region_name = address['suggestions'][0]['data']['region']\n\n try:\n result = yield AsyncIfnsProvider.get_nalog_ru_time_slots(person_data, company_data, reg_ifns_name, service,\n region_name, cache, logger)\n except errors.IfnsServiceUnavailable, ex:\n logger.exception(u\"Failed to get schedule from ifns. Trying to get cached value\")\n try:\n result = yield AsyncIfnsProvider.get_nalog_ru_time_slots_cached(not company_data, reg_ifns_name,\n service, region_name, cache, logger)\n if len(result) < 8:\n last_found_day = datetime.strptime(result[-1]['date'], \"%Y-%m-%d\")\n result += AsyncIfnsProvider.get_nalog_ru_default_time_slots(region_name, reg_ifns_name,\n not company_data,\n first_day=last_found_day,\n days_to_get=8 - len(result))\n except CacheMiss, ex:\n logger.exception(u\"Nothing in cache: returning defaults\")\n result = AsyncIfnsProvider.get_nalog_ru_default_time_slots(region_name, reg_ifns_name, not company_data)\n\n all_time_slots = result\n\n td_min = timedelta(seconds=99999999)\n nearest_time = datetime.strptime(all_time_slots[0]['date'], \"%Y-%m-%d\")\n slots = all_time_slots[0]['time_slots']\n\n for slot in all_time_slots:\n cur_date = datetime.strptime(slot['date'], \"%Y-%m-%d\")\n td_cur = cur_date - dt if (cur_date > dt) else (dt - cur_date)\n if td_cur < td_min:\n td_min = td_cur\n nearest_time = cur_date\n slots = slot['time_slots']\n\n logger.debug(u\"Ifns schedule - succeeded. Nearest time: %s, Slots: %s\" % (\n nearest_time.strftime(\"%Y-%m-%d\"), json.dumps(slots)))\n raise gen.Return({'result': {\n 'nearest_time': nearest_time.strftime(\"%Y-%m-%d\"),\n 'slots': slots\n }})\n\n\nclass IfnsMakeBookingView(JsonRequestHandler):\n @gen.coroutine\n @authorized\n @validate_arguments_tornado(\n ifns=IntValidator(required=True),\n service=EnumValidator(enum_cls=IfnsServiceEnum, required=True),\n datetime=DateTimeValidator(required=True),\n founder_applicant=JsonValidator(required=True),\n batch_id=ObjectIdValidator(required=True),\n reg_responsible_person=ObjectIdValidator(required=False)\n )\n def get_content_on_post(self, arguments=None, *args, **kwargs):\n\n logger = self.application.logger # todo: ifns logger!\n cache = self.application.cache\n\n ifns = arguments['ifns']\n service = arguments['service']\n dt = arguments['datetime']\n founder_applicant = arguments['founder_applicant']\n batch_id = arguments['batch_id']\n reg_responsible_person = arguments.get('reg_responsible_person', None)\n service_nalog_ru_url = self.application.config['SERVICE_NALOG_RU_URL']\n\n try:\n company_data, person_data = yield AsyncIfnsProvider.get_company_person_data_for_ifns(founder_applicant,\n self.user.email,\n self.application.db)\n except Exception:\n logger.exception(u\"Failed to collect data\")\n raise errors.InvalidParameterValue(\"founder_applicant\")\n\n try:\n reg_ifns = yield AsyncIfnsProvider.get_registration_ifns(int_to_ifns(ifns), cache,\n service_nalog_ru_url=service_nalog_ru_url)\n except Exception:\n logger.exception(u\"Failed to get registration ifns. Address ifns: %s\" % unicode(ifns))\n raise\n\n reg_ifns_name = reg_ifns['rou']['naimk']\n reg_ifns_addr = reg_ifns['adres']\n try:\n address = yield AsyncDadataProvider.get_detailed_address(reg_ifns_addr, cache)\n if not address:\n raise Exception()\n except Exception:\n logger.exception(u\"Failed to get detailed address. Reg ifns address: %s\" % unicode(reg_ifns_addr))\n raise\n region_name = address['suggestions'][0]['data']['region']\n\n # todo: remove booking tasks with the same batch_id:service. (remove objects (in statuses new & progress) & cancel tasks)\n try:\n result = yield AsyncIfnsProvider.book_ifns(person_data, company_data, reg_ifns_name, service, region_name,\n dt, reg_responsible_person, cache, logger)\n except errors.IfnsServiceUnavailable, ex:\n logger.exception(u\"Failed to book ifns\")\n booking_obj = IfnsBookingObject(**{\n \"batch_id\": batch_id,\n \"person_data\": person_data,\n \"company_data\": company_data,\n \"reg_ifns_name\": reg_ifns_name,\n \"service\": service,\n \"region_name\": region_name,\n \"reg_date\": dt.strftime(\"%Y-%m-%dT%H:%M:%S\"),\n \"reg_responsible_person\": reg_responsible_person,\n \"status\": IfnsBookingTaskStatus.BTS_NEW,\n \"error_info\": None,\n \"user_email\": self.user.email\n })\n sqldb.session.add(booking_obj)\n sqldb.session.commit()\n ifns_booking_task_id = booking_obj.id\n ifns_booking_tasks.book_ifns(str(ifns_booking_task_id))\n raise gen.Return({\n 'error': True,\n 'error_type': \"booking_queued\"\n })\n except FailedToGetAppointmentData, ex:\n ifns_booking_tasks.find_appointment_data(ex.apt_code, str(batch_id), dt.strftime(\"%Y-%m-%dT%H:%M:%S\"),\n self.user.email, service)\n raise gen.Return({\n 'error': True,\n 'error_type': \"reserved_but_no_data\"\n })\n\n result_value = None\n if result:\n try:\n booking = IfnsBookingObject(\n ifns=result['ifns'],\n service=result['service'],\n service_id=service,\n date=result['date'],\n window=result['window'],\n address=result['address'],\n phone=result['phone'],\n how_to_get=result['how_to_get'],\n code=result['code'],\n _discarded=False,\n batch_id=batch_id\n )\n sqldb.session.add(booking)\n sqldb.session.commit()\n\n logger.debug(u\"Reserverd ifns. \")\n result_value = {\n 'result': {\n \"ifns\": result['ifns'],\n \"id\": booking.id,\n \"service\": result['service'],\n \"service_id\": service,\n \"date\": result['date'],\n \"window\": result['window'],\n \"address\": result['address'],\n \"phone\": result['phone'],\n \"how_to_get\": result['how_to_get'],\n \"code\": result['code']\n }\n }\n except Exception:\n logger.exception(u\"Failed to save booking!\")\n raise errors.ServerError()\n\n if result_value:\n raise gen.Return(result_value)\n logger.error(u\"Failed to reserve ifns due to unknown reason.\")\n raise errors.ServerError()\n\n\nclass IfnsBookingView(JsonRequestHandler):\n @gen.coroutine\n @authorized\n @validate_arguments_tornado(\n batch_id=ObjectIdValidator(required=True)\n )\n def get_content_on_get(self, arguments=None, *args, **kwargs):\n result_values = []\n logger = self.application.logger # todo: ifns logger!\n batch_id = arguments['batch_id']\n\n booking_col = None # todo: IfnsBookingDbModel.get_collection(self.application.db)\n r, error = yield gen.Task(booking_col.find, {\n 'batch_id': batch_id,\n '_discarded': {\n \"$ne\": True}\n })\n booking_cursor = r[0]\n for book in booking_cursor:\n result_values.append({\n 'id': unicode(book['_id']),\n 'ifns': book['ifns'],\n 'service': book['service'],\n 'service_id': book['service_id'],\n 'date': book['date'],\n 'window': book['window'],\n 'address': book['address'],\n 'phone': book['phone'],\n 'how_to_get': book['how_to_get'],\n 'code': book['code']\n })\n raise gen.Return({'result': result_values})\n\n\nclass IfnsNameView(JsonRequestHandler):\n @gen.coroutine\n @authorized\n @validate_arguments_tornado(\n batch_id=ObjectIdValidator(required=True)\n )\n def get_content_on_get(self, arguments=None, *args, **kwargs):\n logger = self.application.logger # todo: ifns logger!\n cache = self.application.cache\n batch_id = arguments['batch_id']\n config = self.application.config\n\n null_res = {'result': \"\"}\n\n db = self.application.db\n batch_col = DocumentBatchDbObject.get_collection(db)\n\n r, error = yield gen.Task(batch_col.find_one, {'_id': batch_id})\n batch_db = r[0]\n\n if not batch_db:\n logger.error(u\"No such batch %s\" % unicode(batch_id))\n raise gen.Return(null_res)\n docs = batch_db.get('documents', [])\n if not docs:\n logger.error(u\"No documents in batch %s\" % unicode(batch_id))\n raise gen.Return(null_res)\n\n address = None\n for doc in docs:\n if 'document_type' not in doc:\n continue\n if doc['document_type'] == DocumentTypeEnum.DT_ARTICLES:\n address = doc['data'].get('address', None)\n break\n\n if not address:\n logger.error(u\"Failed to get address\")\n raise gen.Return(null_res)\n\n ifns = address.get('ifns', None)\n if not ifns:\n address_string = address.get('address_string')\n if not address_string:\n logger.error(u\"Empty address string\")\n raise gen.Return(null_res)\n detailed_address = yield AsyncDadataProvider.get_detailed_address(address_string, cache)\n if not detailed_address:\n logger.error(u\"Failed to get detailed address from address string %s\" % address_string)\n raise gen.Return(null_res)\n\n suggestions = detailed_address.get(\"suggestions\", [])\n if not suggestions:\n logger.error(u\"Empty suggestions\")\n raise gen.Return(null_res)\n ifns = suggestions[0]['data'].get(\"tax_office\", None)\n if not ifns:\n logger.error(u\"Null ifns\")\n raise gen.Return(null_res)\n\n ifns_data = yield AsyncIfnsProvider.get_ifns_by_code(ifns, config['SERVICE_NALOG_RU_URL'], cache, logger)\n if not ifns_data:\n logger.error(u\"Failed to get ifns %s data\" % unicode(ifns))\n raise gen.Return(null_res)\n\n raise gen.Return({'result': ifns_data.rou.naimk})\n\n\nclass IfnsDiscardBookingView(JsonRequestHandler):\n @gen.coroutine\n @authorized\n @validate_arguments_tornado(\n booking_id=ObjectIdValidator(required=True)\n )\n def get_content_on_post(self, arguments=None, *args, **kwargs):\n logger = self.application.logger # todo: ifns logger!\n booking_id = arguments['booking_id']\n logger.debug(u\"Trying to cancel ifns booking %s\" % unicode(booking_id))\n\n booking_col = None # todo: IfnsBookingDbModel.get_collection(self.application.db)\n r, error = yield gen.Task(booking_col.update, {\n '_id': booking_id,\n '_discarded': {\n \"$ne\": True\n }\n }, {'$set': {\n '_discarded': True\n }})\n # result = r[0]\n #\n # if not result:\n # logger.debug(u\"Ifns booking %s was not found\" % unicode(booking_id))\n # raise errors.IfnsBookingNotFound()\n\n logger.debug(u\"Ifns booking %s canceled\" % unicode(booking_id))\n raise gen.Return({'result': True})\n" }, { "alpha_fraction": 0.5300794839859009, "alphanum_fraction": 0.5323495864868164, "avg_line_length": 28.33333396911621, "blob_id": "cd4533f9cca1f7b46cd7a00100708a24024a1cd7", "content_id": "370124369e01dacf254198ca146dfcaa6dde98fb", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 881, "license_type": "no_license", "max_line_length": 117, "num_lines": 30, "path": "/app/fw/monitoring_utils/zabbix_sender.py", "repo_name": "StanislavKraev/jb_code", "src_encoding": "UTF-8", "text": "# -*- coding: utf-8 -*-\nimport shlex\nimport subprocess\n\ndef send(key, val):\n try:\n cmd = 'zabbix_sender -c /etc/zabbix/zabbix_agentd.conf -k %s -o %s' % (unicode(key),unicode(val))\n p = subprocess.Popen(shlex.split(cmd), stdin=subprocess.PIPE, stdout=subprocess.PIPE, stderr=subprocess.PIPE)\n output, err = p.communicate()\n rc = p.returncode\n except Exception:\n return\n if rc != 0:\n raise Exception(err or output)\n\n return output\n\ndef zabbixed(key, val):\n def wrapper(f):\n def deco(*args, **kwargs):\n try:\n return f(*args, **kwargs)\n except:\n if isinstance(val, list) or isinstance(val, tuple):\n for v in val:\n send(key, val)\n else:\n send(key, val)\n return deco\n return wrapper\n\n" }, { "alpha_fraction": 0.6058317422866821, "alphanum_fraction": 0.6087686419487, "avg_line_length": 38.073768615722656, "blob_id": "0c8f8a3186c096dc4cca3713a47abcf34af1d96d", "content_id": "33ef401fdffb47b0386711914dfc39eb8e521b98", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 4767, "license_type": "no_license", "max_line_length": 126, "num_lines": 122, "path": "/app/fw/auth/social_services/vk_backend.py", "repo_name": "StanislavKraev/jb_code", "src_encoding": "UTF-8", "text": "# -*- coding: utf-8 -*-\nimport requests\nfrom fw.api import errors\nfrom fw.auth.social_services.social_backend import SocialBackend\nfrom fw.auth.social_services.social_models import SocialUserLink, SocialServiceEnum\nfrom fw.db.sql_base import db as sqldb\n\nVK_API_URL = 'https://api.vk.com/method/'\n\n\nclass VkBackend(SocialBackend):\n @staticmethod\n def is_token_expired(data):\n return 'error' in data and 'error_code' in data['error'] and data['error']['error_code'] in (2, 4, 5, 7)\n\n @staticmethod\n def call_vk_method(method, **kwargs):\n response = requests.get(VK_API_URL + method + u'?' + u'&'.join([u\"%s=%s\" % (k, v) for k, v in kwargs.items()]))\n if response.status_code != 200:\n raise errors.SocialAuthError()\n\n data = response.json()\n if not data:\n if VkBackend.is_token_expired(data):\n raise errors.RenewAuthTokenError()\n else:\n raise errors.SocialAuthError('Failed to decode server answer')\n\n return data\n\n @staticmethod\n def get_user_data(config, access_token):\n vk_api_version = \"3\"\n data = VkBackend.call_vk_method('users.get', v=vk_api_version, access_token=access_token)\n\n if 'response' not in data:\n raise errors.SocialAuthError('Failed to decode server answer. data=%s; api version=%s; access_token=%s' % (\n data, vk_api_version, access_token))\n\n user_data = data['response'][0]\n try:\n user_id = int(user_data['uid'])\n except ValueError:\n raise errors.SocialAuthError('Failed to decode server answer. data=%s; api version=%s; access_token=%s' % (\n data, vk_api_version, access_token))\n\n return dict(id=user_id)\n\n @staticmethod\n def get_user_link(social_uid):\n return SocialUserLink.query.filter_by(uid=str(social_uid), service_id=SocialServiceEnum.SS_VK).first()\n\n @staticmethod\n def make_link(access_token, social_uid, auth_user, config):\n link = SocialUserLink(\n uid=social_uid,\n user=auth_user,\n service_id=SocialServiceEnum.SS_VK,\n access_token=access_token\n )\n sqldb.session.add(link)\n sqldb.session.commit()\n return link\n\n @staticmethod\n def get_token_url(config, next_page=\"/\"):\n if next_page.startswith(\"/\"):\n next_page = next_page[1:]\n vk_api_version = config['vk_api_version']\n permissions = config['vk_app_permissions']\n vk_app_id = config['vk_app_id']\n redirect_url = \"%s://%s%s\" % (\n config['WEB_SCHEMA'], config['api_url'], config['vk_auth_redirect_url'] + next_page)\n return \"https://oauth.vk.com/authorize?client_id=%d&scope=%d&redirect_uri=%s&display=page&v=%s&response_type=code\" % (\n vk_app_id, permissions, redirect_url, vk_api_version)\n\n @staticmethod\n def new_post(post_content, auth_user, config, link_to_attach=None):\n link = SocialUserLink.query.filter_by(user=auth_user, service_id=SocialServiceEnum.SS_VK).first()\n if not link or not link.access_token:\n raise errors.SocialAuthError()\n\n try:\n params = dict(v=config['vk_api_version'], access_token=link.access_token, message=post_content)\n if link_to_attach:\n params['attachments'] = link_to_attach\n\n response_data = VkBackend.call_vk_method('wall.post', **params)\n except errors.RenewAuthTokenError, exc:\n link.access_token = \"\"\n sqldb.session.commit()\n raise\n\n if 'response' not in response_data:\n raise errors.SocialAuthError('Failed to decode server answer')\n else:\n post_data = dict(id=response_data['response']['post_id'])\n\n return post_data\n\n @staticmethod\n def get_token(code, config, next_page):\n redirect_url = VkBackend.get_token_url(config, next_page=next_page)\n vk_app_id = config['vk_app_id']\n vk_app_secret = config['vk_app_secret']\n url = \"https://oauth.vk.com/access_token?client_id=%s&client_secret=%s&code=%s&redirect_uri=%s\" % (\n unicode(vk_app_id), vk_app_secret, unicode(code), redirect_url)\n result = requests.get(url)\n\n if result.status_code != 200:\n return None, None\n data = result.json()\n return data['access_token'], data\n\n @staticmethod\n def get_profile_url(social_link_object):\n if not social_link_object:\n return\n if not isinstance(social_link_object, dict):\n social_link_object = social_link_object.as_dict()\n if 'uid' in social_link_object:\n return u\"http://vk.com/id%s\" % unicode(social_link_object['uid'])\n" }, { "alpha_fraction": 0.6131707429885864, "alphanum_fraction": 0.6156097650527954, "avg_line_length": 41.93193817138672, "blob_id": "a42b5ced460986bb8c5f92414e97d9c0754c45c7", "content_id": "516d6e95fdb2ac79c18742c514a25258e6a1c8e7", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 8472, "license_type": "no_license", "max_line_length": 116, "num_lines": 191, "path": "/app/services/ip_reg/ip_reg_manager.py", "repo_name": "StanislavKraev/jb_code", "src_encoding": "UTF-8", "text": "# -*- coding: utf-8 -*-\nfrom datetime import datetime\nfrom fw.api import errors\nfrom fw.db.sql_base import db as sqldb\nfrom fw.documents.batch_manager import BatchManager\nfrom fw.documents.db_fields import DocumentBatchDbObject, PrivatePersonDbObject, BatchDocumentDbObject\nfrom fw.documents.doc_requisites_storage import DocRequisitiesStorage\nfrom fw.documents.enums import DocumentTypeEnum, BatchStatusEnum, DocumentBatchTypeEnum\nfrom fw.documents.fields.doc_fields import DocumentBatch, PrivatePerson\nfrom fw.storage.file_storage import FileStorage\nfrom template_filters import declension\n\n\nclass IpRegBatchManager(BatchManager):\n\n DOC_TITLES = {\n DocumentTypeEnum.DT_P21001: u\"Заявление о государственной регистрации (форма Р21001)\",\n DocumentTypeEnum.DT_IP_STATE_DUTY: u'Квитанция на уплату госпошлины',\n DocumentTypeEnum.DT_IP_DOV_FILING_DOCS: u'Доверенность на подачу документов',\n DocumentTypeEnum.DT_IP_DOV_RECEIVING_DOCS: u'Доверенность на получение документов',\n DocumentTypeEnum.DT_IP_DOV_FILING_RECEIVING_DOCS: u'Доверенность на получение и подачу документов',\n DocumentTypeEnum.DT_IP_USN_CLAIM: u'Заявление о переходе на УСН',\n DocumentTypeEnum.DT_IP_ESHN_CLAIM: u'Заявление о переходе на ЕСХН',\n DocumentTypeEnum.DT_IP_LETTER_INVENTORY: u'Опись для ценного письма'\n }\n\n def update_batch(self, batch_id, new_batch, current_user_id, config, logger):\n current_batch_db_object = DocumentBatchDbObject.query.filter_by(id=batch_id, _owner_id=current_user_id,\n deleted=False).first()\n if not current_batch_db_object:\n raise errors.BatchNotFound()\n\n batch_status = current_batch_db_object.status\n if batch_status not in (BatchStatusEnum.BS_NEW, BatchStatusEnum.BS_EDITED):\n logger.warn(u\"Can't update batch %s in status %s\" % (unicode(batch_id), unicode(batch_status)))\n raise errors.DocumentBatchUpdateError()\n\n try:\n current_batch = DocumentBatch.db_obj_to_field(current_batch_db_object)\n except Exception:\n logger.fatal(u\"Failed to validate batch from DB!\")\n raise\n\n current_fields = current_batch.data.value\n assert isinstance(current_fields, dict)\n\n # STEP 1: make new data and metadata\n # and collect changed fields names\n new_batch_db = new_batch.get_db_object()\n merged_fields, changed_field_names = self._merge_raw_fields(current_batch_db_object.data, new_batch_db.data)\n\n current_batch_db_object._metadata = new_batch_db._metadata\n current_batch_db_object.data = merged_fields\n sqldb.session.commit()\n\n # STEP 2: make document set from data and schema\n try:\n new_field_set, new_docs, _ = self.make_docs_for_new_data(\n current_batch.data.value,\n new_batch.data.value,\n current_batch_db_object,\n BatchManager.get_batch_document_fields(current_batch_db_object)\n )\n except errors.DocumentBatchUpdateError, ex:\n logger.exception(u\"Failed to update batch with new values\")\n current_batch_db_object.error_info = {\"error\": u\"unknown error (%s)\" % str(ex)}\n sqldb.session.commit()\n raise\n\n current_docs_db_models = [doc for doc in current_batch_db_object._documents]\n new_docs_db_models = [BatchDocumentDbObject(**new_doc.get_db_object_data()) for new_doc in new_docs]\n\n merged_docs, unused_db_docs = self.merge_docs(current_docs_db_models, new_docs_db_models, config)\n for doc in merged_docs:\n doc.batch = current_batch_db_object\n doc._owner_id = current_user_id\n\n for doc in unused_db_docs:\n BatchDocumentDbObject.query.filter_by(id=doc.id).delete()\n\n sqldb.session.commit()\n\n error_info = None\n try:\n current_batch_db_object._metadata = new_batch_db._metadata\n current_batch_db_object.data = merged_fields\n\n # STEP 4: make result fields\n current_batch_db_object.result_fields = self.make_result_fields(current_batch, new_field_set)\n current_batch_db_object.error_info = None\n sqldb.session.commit()\n\n except Exception, ex:\n logger.exception(u\"Failed to update batch with new values\")\n current_batch_db_object.error_info = {\"error\": u\"unknown error\"}\n sqldb.session.commit()\n raise errors.DocumentBatchUpdateError()\n\n if current_batch_db_object.status == BatchStatusEnum.BS_EDITED:\n error_ext = self.get_batch_errors(current_batch_db_object, logger)\n if error_ext:\n error_info = {'error_ext': error_ext}\n current_batch_db_object.error_info = error_info\n sqldb.session.commit()\n\n current_batch = DocumentBatch.db_obj_to_field(current_batch_db_object)\n struct = current_batch.get_api_structure()\n\n if error_info:\n struct['error_info'] = error_info\n elif 'error_info' in struct:\n del struct['error_info']\n\n return {'result': struct}\n\n def get_title(self, doc_type):\n return IpRegBatchManager.DOC_TITLES.get(doc_type, '')\n\n def get_last_modified_batch_caption(self, batch_db):\n if not batch_db:\n return u\"\"\n\n pp_data = batch_db.data.get('person')\n if pp_data and '_id' in pp_data:\n person_db = PrivatePersonDbObject.query.filter_by(id=pp_data['_id']).first()\n if person_db:\n person = PrivatePerson.db_obj_to_field(person_db)\n full_name_decl = declension(person.get_full_name(), 'gen')\n parts = full_name_decl.split(' ')\n if len(parts) in (2, 3):\n surname_decl = parts[0].strip()\n short_name = person.get_short_name()\n parts = short_name.split(' ')\n if len(parts) in (2, 3):\n return surname_decl + u\" \" + u\" \".join(parts[1:])\n return declension(person.get_short_name(), 'gen')\n\n return u\"\"\n\n def get_batch_caption(self, batch_db):\n if not batch_db:\n return u\"\"\n\n pp_data = batch_db.data.get('person')\n if pp_data and '_id' in pp_data:\n person_db = PrivatePersonDbObject.query.filter_by(id=pp_data['_id']).first()\n if person_db:\n person = PrivatePerson.db_obj_to_field(person_db)\n return u\"Создание ИП «%s»\" % person.get_short_name() if person else u\"Создание ИП\"\n\n return u\"Создание ИП\"\n\n def get_stage(self, batch_db):\n company_registered = False\n batch_data = batch_db.data\n if 'result_fields' in batch_data:\n result_fields = batch_data['result_fields']\n if 'ifns_reg_info' in result_fields:\n ifns_reg_info = result_fields['ifns_reg_info']\n if 'status' in ifns_reg_info and ifns_reg_info['status'] == 'registered':\n company_registered = True\n return 'preparation' if batch_db.status != BatchStatusEnum.BS_FINALISED else \\\n ('submission' if not company_registered else\n 'running')\n\n def definalize_batch(self, config, logger, batch, force):\n if batch.status != BatchStatusEnum.BS_FINALISED:\n return False\n\n for doc in BatchDocumentDbObject.query.filter_by(batch=batch):\n if doc.file:\n file_obj = doc.file\n doc.file = None\n FileStorage.remove_file(file_obj.id, config)\n\n batch.status = BatchStatusEnum.BS_EDITED\n batch.ifns_reg_info = None,\n batch.last_change_dt = datetime.utcnow()\n sqldb.session.commit()\n return True\n\n def create_batch(self, owner):\n new_batch = DocumentBatchDbObject(\n batch_type=DocumentBatchTypeEnum.DBT_NEW_IP,\n data={},\n _owner=owner,\n status=BatchStatusEnum.BS_NEW,\n paid=True\n )\n\n return new_batch\n" }, { "alpha_fraction": 0.6739811897277832, "alphanum_fraction": 0.6750261187553406, "avg_line_length": 29.870967864990234, "blob_id": "47e0341f16eb461fd53e3bbbeff416418b5abfd7", "content_id": "711acf0bb6e0d831dfcb29c60ed4f48301ead34e", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 957, "license_type": "no_license", "max_line_length": 100, "num_lines": 31, "path": "/app/deployment_migrations/migration_list/20151005_add_payment_tables.py", "repo_name": "StanislavKraev/jb_code", "src_encoding": "UTF-8", "text": "# -*- coding: utf-8 -*-\n\nfrom fw.db.sql_base import db as sqldb\n\n\ndef forward(config, logger):\n logger.debug(u\"Create new models\")\n\n sqldb.session.close()\n sqldb.engine.execute(u\"\"\"CREATE TABLE pay_info (\n id SERIAL NOT NULL,\n user_id INTEGER,\n batch_id VARCHAR NOT NULL,\n pay_record_id INTEGER NOT NULL,\n payment_provider INTEGER NOT NULL,\n dt TIMESTAMP WITHOUT TIME ZONE,\n service_type VARCHAR NOT NULL,\n PRIMARY KEY (id),\n FOREIGN KEY(user_id) REFERENCES authuser (id),\n FOREIGN KEY(batch_id) REFERENCES doc_batch (id)\n);\"\"\")\n\n sqldb.engine.execute(u\"\"\"DROP INDEX IF EXISTS ix_pay_info_user_id;\"\"\")\n sqldb.engine.execute(u\"\"\"CREATE INDEX ix_pay_info_user_id ON pay_info (user_id);\"\"\")\n\n sqldb.engine.execute(u\"\"\"DROP INDEX IF EXISTS ix_pay_info_pay_record_id;\"\"\")\n sqldb.engine.execute(u\"\"\"CREATE INDEX ix_pay_info_pay_record_id ON pay_info (pay_record_id);\"\"\")\n\n\ndef rollback(config, logger):\n pass\n" }, { "alpha_fraction": 0.6045876741409302, "alphanum_fraction": 0.606590211391449, "avg_line_length": 40.60606002807617, "blob_id": "18b6c3c3d73699c7ee928e8406937e267a6ce897", "content_id": "c857f2c78be2137475c77d92bf8aa05a49f52072", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 5553, "license_type": "no_license", "max_line_length": 118, "num_lines": 132, "path": "/app/services/test_svc/test_svc_manager.py", "repo_name": "StanislavKraev/jb_code", "src_encoding": "UTF-8", "text": "# -*- coding: utf-8 -*-\nfrom datetime import datetime\n\nfrom fw.api import errors\nfrom fw.db.sql_base import db as sqldb\nfrom fw.documents.batch_manager import BatchManager\nfrom fw.documents.db_fields import DocumentBatchDbObject, BatchDocumentDbObject\nfrom fw.documents.doc_requisites_storage import DocRequisitiesStorage\nfrom fw.documents.enums import BatchStatusEnum, DocumentBatchTypeEnum, DocumentTypeEnum\nfrom fw.documents.fields.doc_fields import DocumentBatch\n\n\nclass TestSvcManager(BatchManager):\n\n DOC_TITLES = {\n DocumentTypeEnum.DT_TEST_DOC_1: u'Тестовый документ 1',\n DocumentTypeEnum.DT_TEST_DOC_2: u'Тестовый документ 2',\n DocumentTypeEnum.DT_TEST_DOC_3: u'Тестовый документ 3'\n }\n\n BATCH_TYPE = DocumentBatchTypeEnum.DBT_TEST_TYPE\n\n # def update_batch(self, batch_id, new_batch, current_user_id, db, config, logger):\n # current_batch_db_object = DocumentBatchDbObject.query.filter_by(id=batch_id, _owner_id=current_user_id,\n # deleted=False).first()\n # if not current_batch_db_object:\n # raise errors.BatchNotFound()\n #\n # batch_status = current_batch_db_object.status\n # # if batch_status not in (BatchStatusEnum.BS_NEW, BatchStatusEnum.BS_EDITED): # todo\n # # logger.warn(u\"Can't update batch %s in status %s\" % (unicode(batch_id), unicode(batch_status)))\n # # raise errors.DocumentBatchUpdateError()\n #\n # try:\n # current_batch = DocumentBatch.db_obj_to_field(current_batch_db_object)\n # except Exception:\n # logger.fatal(u\"Failed to validate batch from DB!\")\n # raise\n #\n # current_fields = current_batch.data.value\n # assert isinstance(current_fields, dict)\n #\n # # STEP 1: make new data and metadata\n # # and collect changed fields names\n # new_batch_db = new_batch.get_db_object()\n # merged_fields, changed_field_names = self._merge_raw_fields(current_batch_db_object.data, new_batch_db.data)\n #\n # current_batch_db_object._metadata = new_batch_db._metadata\n # current_batch_db_object.data = merged_fields\n # sqldb.session.commit()\n #\n # # STEP 2: make document set from data and schema\n # try:\n # new_field_set, new_docs, _ = self.make_docs_for_new_data(\n # current_batch.data.value,\n # new_batch.data.value,\n # current_batch_db_object,\n # current_batch._documents.values\n # )\n # except errors.DocumentBatchUpdateError, ex:\n # logger.exception(u\"Failed to update batch with new values\")\n # current_batch_db_object.error_info = {\"error\": u\"unknown error (%s)\" % str(ex)}\n # sqldb.session.commit()\n # raise\n #\n # current_docs_db_models = [doc for doc in current_batch_db_object._documents]\n # new_docs_db_models = [BatchDocumentDbObject(**new_doc.get_db_object_data()) for new_doc in new_docs]\n #\n # merged_docs, unused_db_docs = self.merge_docs(current_docs_db_models, new_docs_db_models, config)\n # for doc in merged_docs:\n # doc.batch = current_batch_db_object\n # doc._owner_id = current_user_id\n #\n # for doc in unused_db_docs:\n # BatchDocumentDbObject.query.filter_by(id=doc.id).delete()\n #\n # sqldb.session.commit()\n #\n # # STEP 3: combine old and new documents\n # error_info = None\n # try:\n # current_batch_db_object.metadata = new_batch_db.metadata\n # current_batch_db_object.data = merged_fields\n #\n # # STEP 5: make result fields\n # current_batch_db_object.result_fields = self.make_result_fields(current_batch, new_field_set)\n # current_batch_db_object.error_info = None\n # sqldb.session.commit()\n # except Exception, ex:\n # logger.exception(u\"Failed to update batch with new values\")\n # current_batch_db_object.error_info = {\"error\": u\"unknown error\"}\n # sqldb.session.commit()\n # raise errors.DocumentBatchUpdateError()\n #\n # error_ext = self.get_batch_errors(current_batch_db_object, logger)\n # if error_ext:\n # error_info = {'error_ext': error_ext}\n # else:\n # error_info = None\n # current_batch_db_object.error_info = error_info\n # sqldb.session.commit()\n #\n # self.check_transition(current_batch_db_object)\n #\n # current_batch = DocumentBatch.db_obj_to_field(current_batch_db_object)\n # struct = current_batch.get_api_structure()\n #\n # if error_info:\n # struct['error_info'] = error_info\n # elif 'error_info' in struct:\n # del struct['error_info']\n #\n # return {'result': struct}\n\n def get_title(self, doc_type):\n return TestSvcManager.DOC_TITLES.get(doc_type, '')\n\n def get_batch_caption(self, batch):\n return u\"Тестовый батч\"\n\n def get_stage(self, batch):\n return 'unknown'\n\n def definalize_batch(self, config, logger, batch, force):\n if batch.status != BatchStatusEnum.BS_FINALISED:\n return False\n\n batch.status = BatchStatusEnum.BS_EDITED\n batch.ifns_reg_info = None,\n batch.last_change_dt = datetime.utcnow()\n sqldb.session.commit()\n return True\n\n" }, { "alpha_fraction": 0.6128905415534973, "alphanum_fraction": 0.6152485609054565, "avg_line_length": 35.349998474121094, "blob_id": "39db13c6b3cabc58ce71e11c2987eb41a34c5fa9", "content_id": "fb0ebf57494a6c4bd237ca39937e77507b876766", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 5089, "license_type": "no_license", "max_line_length": 114, "num_lines": 140, "path": "/app/async/vews_base.py", "repo_name": "StanislavKraev/jb_code", "src_encoding": "UTF-8", "text": "# -*- coding: utf-8 -*-\nfrom bson.objectid import ObjectId\n\nfrom tornado import escape\nimport traceback\nfrom tornado.web import HTTPError\nfrom tornado import gen\nfrom tornado.web import asynchronous\n\nimport tornado.web\n\nclass AsyncConnectionRequestHandler(tornado.web.RequestHandler):\n\n def initialize(self):\n logger = self.application.logger\n logger.debug(u\"Request data: \\n%s\" % unicode(self.request))\n logger.debug(u\"Arguments data: \\n%s\" % unicode(self.request.arguments))\n\n def _process_response(self, content):\n return content\n\n @asynchronous\n @gen.coroutine\n def get(self, *args, **kwargs):\n yield self.get_content_on_method(self._get_content_on_get, *args, **kwargs)\n\n @asynchronous\n @gen.coroutine\n def post(self, *args, **kwargs):\n yield self.get_content_on_method(self._get_content_on_post, *args, **kwargs)\n\n @asynchronous\n @gen.coroutine\n def patch(self, *args, **kwargs):\n yield self.get_content_on_method(self._get_content_on_patch, *args, **kwargs)\n\n @gen.coroutine\n def get_content_on_method(self, method, *args, **kwargs):\n config = self.application.config\n try:\n content = yield method(*args, **kwargs)\n self.set_header('Access-Control-Allow-Credentials', 'true')\n self.set_header('Access-Control-Allow-Origin', \"http://%s\" % config['site_domain'])\n content = self._process_response(content)\n except errors.ServerUnavailable, exc:\n self.application.logger.exception(u\"Server Unavaliable error\")\n trbk = traceback.format_exc()\n self.application.logger.exception(trbk)\n raise HTTPError(503, str(exc))\n except errors.ApiBaseError, exc:\n api_error_code = exc.get_error_code()\n http_error_code = exc.get_http_error_code()\n api_error_msg = exc.get_error_message()\n data_json = escape.json_encode({\"error\" : {\"code\" : api_error_code, \"message\" : api_error_msg}})\n self.set_header('Content-Type', 'application/json')\n self.set_header('Access-Control-Allow-Credentials', 'true')\n self.set_header('Access-Control-Allow-Origin', \"http://%s\" % config['site_domain'])\n\n self.write(data_json)\n self.set_status(http_error_code, reason=api_error_msg)\n self.finish()\n if self.application.settings.get('debug'):\n self.application.logger.debug(\n \"API ERROR \" + str(exc.get_error_code()) + \": \"+ exc.get_error_message().encode(\"utf8\")\n )\n else:\n self.application.logger.exception(\n \"API ERROR \" + str(exc.get_error_code()) + \": \"+ exc.get_error_message().encode(\"utf8\")\n )\n return\n except NotImplementedError:\n raise HTTPError(405)\n except Exception, exc:\n trbk = traceback.format_exc()\n self.application.logger.error(trbk)\n raise HTTPError(500, trbk)\n\n self.write(content)\n self.finish()\n\n @gen.coroutine\n def _get_content_on_get(self, *args, **kwargs):\n result = yield self.get_content_on_get(*args, **kwargs)\n raise gen.Return(result)\n\n @gen.coroutine\n def get_content_on_get(self, *args, **kwargs):\n raise NotImplementedError()\n\n @gen.coroutine\n def _get_content_on_post(self, *args, **kwargs):\n result = yield self.get_content_on_post(*args, **kwargs)\n raise gen.Return(result)\n\n @gen.coroutine\n def get_content_on_post(self, *args, **kwargs):\n raise NotImplementedError()\n\n @gen.coroutine\n def _get_content_on_patch(self, *args, **kwargs):\n result = yield self.get_content_on_patch(*args, **kwargs)\n raise gen.Return(result)\n\n @gen.coroutine\n def get_content_on_patch(self, *args, **kwargs):\n raise NotImplementedError()\n\nclass SessionRequestHandler(AsyncConnectionRequestHandler):\n\n def __init__(self, *args, **kwargs): # BLOCKING\n super(SessionRequestHandler, self).__init__(*args, **kwargs)\n\n config = self.application.config\n session_interface = MongoSessionInterface(\n None,\n self.application.sync_db,\n 'sessions',\n config['cookie_name'],\n config['PERMANENT_SESSION_LIFETIME'], False, False)\n session = session_interface.open_session(None, self) # BLOCKING\n\n if 'user_id' in session:\n user = AuthUser.find_one(self.application.sync_db, {'_id' : ObjectId(session['user_id'])}) # BLOCKING\n else:\n user = AnonymousUser()\n self.user = user\n\n\nclass JsonRequestHandler(SessionRequestHandler):\n\n def _process_response(self, content):\n self.set_header('Content-Type', 'text/javascript')\n return escape.json_encode(content)\n\ndef authorized(func):\n def wrapper(self, *args, **kwargs):\n if self.user.is_anonymous:\n raise errors.NotAuthorized()\n return func(self, *args, **kwargs)\n return wrapper\n" }, { "alpha_fraction": 0.6521739363670349, "alphanum_fraction": 0.6530434489250183, "avg_line_length": 30.97222137451172, "blob_id": "61a4c780954b7e1478fedcebf8ef61afd7d22d85", "content_id": "804dd010a65e6f34442e087e9c5d50061d4ffe7c", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 1150, "license_type": "no_license", "max_line_length": 90, "num_lines": 36, "path": "/app/fw/auth/social_services/social_backend.py", "repo_name": "StanislavKraev/jb_code", "src_encoding": "UTF-8", "text": "# -*- coding: utf-8 -*-\n\n\nclass SocialBackend(object):\n @staticmethod\n def get_user_data(config, access_token):\n \"\"\"\n Verify access token and return social service user data (dict)\n \"\"\"\n raise NotImplementedError(\"Must be implemented in derived classes\")\n\n @staticmethod\n def get_user_link(social_uid):\n \"\"\"\n Return instance of BaseSocialUserLink subclass corresponding to given user id.\n \"\"\"\n raise NotImplementedError(\"Must be implemented in derived classes\")\n\n @staticmethod\n def make_link(access_token, social_uid, auth_user, config):\n \"\"\"\n Make social service user link.\n \"\"\"\n raise NotImplementedError(\"Must be implemented in derived classes\")\n\n @staticmethod\n def get_token_url(config, next_page=\"/\"):\n raise NotImplementedError(\"Must be implemented in derived classes\")\n\n @staticmethod\n def new_post(post_content, auth_user, config, link_to_attach=None):\n return\n\n @staticmethod\n def get_profile_url(social_link_object):\n raise NotImplementedError(\"Must be implemented in derived classes\")" }, { "alpha_fraction": 0.7306272983551025, "alphanum_fraction": 0.7315497994422913, "avg_line_length": 32.75, "blob_id": "48f9d5315aabaf79c476196d0ac1441b12204e46", "content_id": "2142dd6070e0a440f2f4262feb1e837c6ff92145", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 1084, "license_type": "no_license", "max_line_length": 84, "num_lines": 32, "path": "/app/services/yurist/data_model/models.py", "repo_name": "StanislavKraev/jb_code", "src_encoding": "UTF-8", "text": "# -*- coding: utf-8 -*-\n\nfrom bson import ObjectId\nfrom sqlalchemy import Column, Unicode, String, ForeignKey, DateTime, Boolean\nfrom sqlalchemy.orm import relationship\nfrom sqlalchemy.sql.functions import func\n\nfrom fw.db.sql_base import db as sqldb\n\n\nclass YuristBatchCheckObject(sqldb.Model):\n __tablename__ = \"yurist_batch_check\"\n\n id = Column(String, primary_key=True, default=lambda: str(ObjectId()))\n batch_id = Column(String, ForeignKey('doc_batch.id'), nullable=True)\n batch = relationship(\"DocumentBatchDbObject\", uselist=False)\n\n status = Column(Unicode, nullable=False, default=\"new\")\n\n create_date=Column(DateTime, nullable=False, default=func.now())\n typos_correction=Column(Boolean, nullable=False)\n\n attached_files = relationship(\"YuristCheckFilesObject\")\n\n\nclass YuristCheckFilesObject(sqldb.Model):\n __tablename__ = 'yurist_check_files'\n\n check_id = Column(String, ForeignKey('yurist_batch_check.id'), primary_key=True)\n files_id = Column(String, ForeignKey('files.id'), primary_key=True)\n\n child = relationship(\"FileObject\")\n\n\n\n\n" }, { "alpha_fraction": 0.4845817983150482, "alphanum_fraction": 0.5009856224060059, "avg_line_length": 32.5, "blob_id": "1df376479596b600d0091251c5c227f451b55085", "content_id": "8c7a717e16f2bf54d80a47db02159dab684e3b08", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 14960, "license_type": "no_license", "max_line_length": 112, "num_lines": 424, "path": "/app/services/partners/partners_manage_commands.py", "repo_name": "StanislavKraev/jb_code", "src_encoding": "UTF-8", "text": "# -*- coding: utf-8 -*-\n\nfrom datetime import datetime\nfrom fw.db.sql_base import db as sqldb\nfrom fw.documents.address_enums import RFRegionsEnum\nfrom fw.documents.common_schema_fields import ADDRESS_FIELD\nfrom manage_commands import BaseManageCommand\nfrom services.partners.models import (\n AccountantPartnersObject,\n BankPartnersObject,\n BankPartnersServiceObject,\n StampPartnersObject,\n BankPartnerRequestObject)\n\n_BANK_PARTNER_SCHEMA = [{\n \"name\": \"contact_phone\",\n \"type\": \"calculated\",\n \"field_type\": \"DocPhoneNumberField\",\n \"required\": True,\n \"suppress_validation_errors\": True,\n \"value\": {\n \"#cases\": {\n \"list\": [{\n \"conditions\": {\n \"bank_contact_phone_general_manager\": True\n },\n \"value\": {\n \"#field\": \"general_manager->phone\"\n }\n }],\n \"default\": {\n \"value\": {\n \"#field\": \"bank_contact_phone\"\n }\n }\n }\n }\n}, {\n \"name\": \"short_name\",\n \"type\": \"DocTextField\",\n \"required\": False,\n \"suppress_validation_errors\": {\n \"send_private_data\": False\n }\n}, {\n \"name\": \"ogrn\",\n \"type\": \"calculated\",\n \"field_type\": \"DocTextField\",\n \"value\": {\n \"#field\": \"<batch>->result_fields->ifns_reg_info->ogrn\"\n }\n}, {\n \"name\": \"inn\",\n \"type\": \"DocTextField\",\n \"required\": False,\n \"suppress_validation_errors\": {\n \"send_private_data\": False\n }\n}, ADDRESS_FIELD, {\n \"name\": \"general_manager_caption\",\n \"type\": \"DocTextField\",\n \"required\": False,\n \"suppress_validation_errors\": {\n \"send_private_data\": False\n }\n}, {\n \"name\": \"general_manager\",\n \"type\": \"db_object\",\n \"cls\": \"PrivatePerson\",\n \"required\": False,\n \"override_fields_kwargs\": {\n \"phone\": {\n \"required\": True\n },\n \"address\": {\n \"required\": True\n }\n },\n \"suppress_validation_errors\": {\n \"send_private_data\": False\n }\n}, {\n \"name\": \"bank_title\",\n \"type\": \"DocTextField\",\n \"required\": True,\n \"suppress_validation_errors\": True\n}, {\n \"name\": \"schema\",\n \"type\": \"calculated\",\n \"field_type\": \"DocTextField\",\n \"required\": True,\n \"value\": {\n \"#field\": \"<config>->WEB_SCHEMA\"\n }\n}, {\n \"name\": \"domain\",\n \"type\": \"calculated\",\n \"field_type\": \"DocTextField\",\n \"required\": True,\n \"value\": {\n \"#field\": \"<config>->DOMAIN\"\n }\n}]\n\n#################################################################################################\n\n_BANK_PARTNER_SCHEMA2 = [\n {\n \"name\": \"contact_phone\",\n \"type\": \"calculated\",\n \"field_type\": \"DocPhoneNumberField\",\n \"required\": True,\n \"suppress_validation_errors\": True,\n \"value\": {\n \"#cases\": {\n \"list\": [{\n \"conditions\": {\n \"bank_contact_phone_general_manager\": True\n },\n \"value\": {\n \"#field\": \"general_manager->phone\"\n }\n }],\n \"default\": {\n \"value\": {\n \"#field\": \"bank_contact_phone\"\n }\n }\n }\n }\n },\n ADDRESS_FIELD,\n {\n \"name\": \"general_manager\",\n \"type\": \"db_object\",\n \"cls\": \"PrivatePerson\",\n \"required\": False,\n \"override_fields_kwargs\": {\n \"phone\": {\n \"required\": True\n },\n \"address\": {\n \"required\": True\n }\n },\n \"suppress_validation_errors\": {\n \"send_private_data\": False\n }\n }\n]\n\n\nclass ReinitPartnersCommand(BaseManageCommand):\n NAME = \"reinit_partners\"\n\n def run(self):\n schema = self.config['WEB_SCHEMA']\n domain = self.config['DOMAIN']\n\n make_link = lambda x: '%s://%s/res/%s' % (schema, domain, x)\n\n BankPartnerRequestObject.query.delete()\n BankPartnersServiceObject.query.delete()\n BankPartnersObject.query.delete()\n sqldb.session.commit()\n\n new_item = BankPartnersObject(\n id=\"553faf59bdffb5220faca395\",\n region=[RFRegionsEnum.RFR_SPB,\n RFRegionsEnum.RFR_MOSCOW,\n RFRegionsEnum.RFR_SVERDLOVSKAYA_REGION,\n RFRegionsEnum.RFR_CHELYABINSKAYA_REGION,\n RFRegionsEnum.RFR_TATARSTAN,\n RFRegionsEnum.RFR_NOVOSIBIRSKAYA_REGION,\n RFRegionsEnum.RFR_KRASNOYARSKIY_KRAY],\n city=[RFRegionsEnum.RFR_SPB,\n RFRegionsEnum.RFR_MOSCOW,\n u\"Екатеринбург\",\n u\"Челябинск\",\n u\"Казань\",\n u\"Новосибирск\",\n u\"Красноярск\"],\n enabled=True,\n sort_index=1,\n link='',\n banner=make_link('b1.png'),\n title=u'«Открытие»',\n created=datetime.utcnow(),\n conditions=[u\"Открытие расчетного счета — бесплатно\"]\n )\n sqldb.session.add(new_item)\n sqldb.session.commit()\n\n new_item = BankPartnersObject(\n id=\"55c9afab543ed837fea53db2\",\n region=[\n RFRegionsEnum.RFR_IRKUTSKAYA_REGION,\n RFRegionsEnum.RFR_ALTAYSKIY_KRAI,\n RFRegionsEnum.RFR_BELGORODSKAYA_REGION,\n RFRegionsEnum.RFR_PRIMORSKIY_KRAI,\n RFRegionsEnum.RFR_VOLGOGRADSKAYA_REGION,\n RFRegionsEnum.RFR_VORONEZHSKAYA_REGION,\n RFRegionsEnum.RFR_SVERDLOVSKAYA_REGION,\n RFRegionsEnum.RFR_UDMURTIYA,\n RFRegionsEnum.RFR_IRKUTSKAYA_REGION,\n RFRegionsEnum.RFR_TATARSTAN,\n RFRegionsEnum.RFR_KALININGRADSKAYA_REGION,\n RFRegionsEnum.RFR_KALUZHSKAYA_REGION,\n RFRegionsEnum.RFR_KEMEROVSKAYA_REGION,\n RFRegionsEnum.RFR_KRASNODARSKIY_KRAI,\n RFRegionsEnum.RFR_KRASNOYARSKIY_KRAY,\n RFRegionsEnum.RFR_LIPETSKAYA_REGION,\n RFRegionsEnum.RFR_CHELYABINSKAYA_REGION,\n RFRegionsEnum.RFR_MOSCOW,\n RFRegionsEnum.RFR_MURMANSKAYA_REGION,\n RFRegionsEnum.RFR_UGRA,\n RFRegionsEnum.RFR_NIZHEGORODSKAYA_REGION,\n RFRegionsEnum.RFR_NOVOSIBIRSKAYA_REGION,\n RFRegionsEnum.RFR_OMSKAYA_REGION,\n RFRegionsEnum.RFR_ORENBURGSKAYA_REGION,\n RFRegionsEnum.RFR_PENZENSKAYA_REGION,\n RFRegionsEnum.RFR_PERMSKIY_KRAI,\n RFRegionsEnum.RFR_ROSTOVSKAYA_REGION,\n RFRegionsEnum.RFR_RYAZANSKAYA_REGION,\n RFRegionsEnum.RFR_SAMARSKAYA_REGION,\n RFRegionsEnum.RFR_SPB,\n RFRegionsEnum.RFR_SARATOVSKAYA_REGION,\n RFRegionsEnum.RFR_STAVROPOLSKY_KRAI,\n RFRegionsEnum.RFR_TVERSKAYA_REGION,\n RFRegionsEnum.RFR_TOMSKAYA_REGION,\n RFRegionsEnum.RFR_TULSKAYA_REGION,\n RFRegionsEnum.RFR_TUMENSKAYA_REGION,\n RFRegionsEnum.RFR_ULYANOVSKAYA_REGION,\n RFRegionsEnum.RFR_BASHKARTOSTAN,\n RFRegionsEnum.RFR_HABAROVSKY_KRAI,\n RFRegionsEnum.RFR_CHUVASHIYA,\n RFRegionsEnum.RFR_YAROSLAVSKAYA_REGION\n ],\n city=[\n u\"Ангарск\"\n u\"Барнаул\",\n u\"Белгород\",\n u\"Владивосток\",\n u\"Волгоград\",\n u\"Волжский\",\n u\"Воронеж\",\n u\"Екатеринбург\",\n u\"Ижевск\",\n u\"Иркутск\",\n u\"Казань\",\n u\"Калининград\",\n u\"Калуга\",\n u\"Кемерово\",\n u\"Краснодар\",\n u\"Красноярск\",\n u\"Липецк\",\n u\"Магнитогорск\",\n u\"Москва\",\n u\"Мурманск\",\n u\"Набережные Челны\",\n u\"Нижневартовск\",\n u\"Нижний Новгород\",\n u\"Новокузнецк\",\n u\"Новосибирск\",\n u\"Омск\",\n u\"Оренбург\",\n u\"Орск\",\n u\"Пенза\",\n u\"Пермь\",\n u\"Ростов-на-Дону\",\n u\"Рязань\",\n u\"Самара\",\n u\"Санкт-Петербург\",\n u\"Саратов\",\n u\"Сочи\",\n u\"Ставрополь\",\n u\"Сургут\",\n u\"Тверь\",\n u\"Томск\",\n u\"Тула\",\n u\"Тюмень\",\n u\"Ульяновск\",\n u\"Уфа\",\n u\"Хабаровск\",\n u\"Чебоксары\",\n u\"Челябинск\",\n u\"Ярославль\"],\n enabled=True,\n sort_index=2,\n link='',\n banner=make_link('b2.png'),\n title=u'«Альфа-банк»',\n created=datetime.utcnow(),\n conditions=[\n u\"бесплатный выезд менеджера в офис\",\n u\"открытие расчетного счета за 2‒3 дня\",\n u\"3 месяца бесплатно при оплате сразу 9 месяцев\",\n u\"до 3000 рублей на поиск профессионалов на HH.ru\",\n u\"до 9000 рублей на Яндекс.Директ после открытия счета в подарок\"\n ]\n )\n sqldb.session.add(new_item)\n sqldb.session.commit()\n\n new_item = BankPartnersServiceObject(\n id=\"553faff4bdffb5220faca396\",\n type='email',\n email='[email protected]',\n fields=_BANK_PARTNER_SCHEMA,\n template_name='account_creation_consultation_request',\n bank_partner_id=\"553faf59bdffb5220faca395\"\n )\n sqldb.session.add(new_item)\n sqldb.session.commit()\n\n is_prod = not self.config['STAGING'] and not self.config['DEBUG']\n new_item = BankPartnersServiceObject(\n id=\"55c9eab75f7105f302fbfadc\",\n type='web',\n config={\n 'method': 'post',\n 'url': 'https://alfabank.ru/sme/invitation/' if is_prod\n else 'http://ifns.staging.legalcloud.ru/send_bank_request/',\n },\n fields=_BANK_PARTNER_SCHEMA2,\n template_name='alpha_bank_web_request',\n bank_partner_id=\"55c9afab543ed837fea53db2\"\n )\n sqldb.session.add(new_item)\n sqldb.session.commit()\n\n if self.config['STAGING'] or self.config['DEBUG']:\n new_item = BankPartnersObject(\n id=\"554b5d3fd045c98560aa9352\",\n region=[RFRegionsEnum.RFR_SPB,\n RFRegionsEnum.RFR_MOSCOW],\n city=[RFRegionsEnum.RFR_SPB,\n RFRegionsEnum.RFR_MOSCOW],\n enabled=True,\n sort_index=1,\n link='',\n banner=make_link('b1.png'),\n title=u'«Закрытие»',\n created=datetime.utcnow(),\n conditions=[u\"condition1\", u\"ждфыловаджо фывалджо фыджво\"]\n )\n sqldb.session.add(new_item)\n sqldb.session.commit()\n\n new_item = BankPartnersServiceObject(\n id=\"554b5d4dd045c98560aa9353\",\n type='email',\n email='[email protected]',\n fields=_BANK_PARTNER_SCHEMA,\n template_name='account_creation_consultation_request',\n bank_partner_id=\"554b5d3fd045c98560aa9352\"\n )\n sqldb.session.add(new_item)\n sqldb.session.commit()\n\n # accounts\n AccountantPartnersObject.query.delete()\n sqldb.session.commit()\n\n new_partner = AccountantPartnersObject(\n id=\"55424ad532dba9d4e53c990a\",\n region=None,\n enabled=True,\n sort_index=1,\n link='https://www.moedelo.org/Referal/Lead/12568?targetUrl=www.moedelo.org/Prices',\n banner=make_link('a1.png'),\n title=u'«Моё.Дело»',\n created=datetime.utcnow(),\n type='online'\n )\n sqldb.session.add(new_partner)\n\n new_partner = AccountantPartnersObject(\n id=\"554a408d8d807ba959e548a8\",\n region=None,\n enabled=True,\n sort_index=1,\n link='http://www.b-kontur.ru/?p=3020',\n banner=make_link('a2.png'),\n title=u'«Контур.Бухгалтерия»',\n created=datetime.utcnow(),\n type='online'\n )\n sqldb.session.add(new_partner)\n sqldb.session.commit()\n\n if self.config['STAGING'] or self.config['DEBUG']:\n new_partner = AccountantPartnersObject(\n id=\"554b5d11d045c98560aa9351\",\n region=[RFRegionsEnum.RFR_SPB, RFRegionsEnum.RFR_MOSCOW],\n enabled=True,\n sort_index=100,\n link='http://www.b-kontur.ru/?p=3020',\n banner=make_link('a2.png'),\n title=u'«Бухгалтерия.Контур»',\n created=datetime.utcnow(),\n type='offline'\n )\n sqldb.session.add(new_partner)\n sqldb.session.commit()\n\n StampPartnersObject.query.delete()\n sqldb.session.commit()\n\n if self.config['STAGING'] or self.config['DEBUG']:\n new_item = StampPartnersObject(\n id=\"55424a8432dba9d4e53c9909\",\n region=[RFRegionsEnum.RFR_SPB, RFRegionsEnum.RFR_MOSCOW, RFRegionsEnum.RFR_SVERDLOVSKAYA_REGION,\n RFRegionsEnum.RFR_CHELYABINSKAYA_REGION, RFRegionsEnum.RFR_TATARSTAN,\n RFRegionsEnum.RFR_NOVOSIBIRSKAYA_REGION, RFRegionsEnum.RFR_KRASNOYARSKIY_KRAY],\n enabled=True,\n sort_index=1,\n link='http://google.ru',\n banner='http://yastatic.net/morda-logo/i/logo.svg',\n title=u'«Закрытие»',\n created=datetime.utcnow()\n )\n sqldb.session.add(new_item)\n sqldb.session.commit()\n" }, { "alpha_fraction": 0.6719056963920593, "alphanum_fraction": 0.6758349537849426, "avg_line_length": 24.450000762939453, "blob_id": "42afef313318c24517c98c09ac5a0679ac93706f", "content_id": "a9faf6a3e74141956e2e7bcad4b06ffb6e96f506", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 509, "license_type": "no_license", "max_line_length": 69, "num_lines": 20, "path": "/app/deployment_migrations/migration_list/20151013_add_multi_files_table.py", "repo_name": "StanislavKraev/jb_code", "src_encoding": "UTF-8", "text": "# -*- coding: utf-8 -*-\n\nfrom fw.db.sql_base import db as sqldb\n\n\ndef forward(config, logger):\n logger.debug(u\"Add m2m table for document files\")\n\n sqldb.session.close()\n sqldb.engine.execute(\"\"\"CREATE TABLE doc_files (\n doc_id VARCHAR NOT NULL,\n files_id VARCHAR NOT NULL,\n PRIMARY KEY (doc_id, files_id),\n FOREIGN KEY(doc_id) REFERENCES batch_docs (id) ON DELETE cascade,\n FOREIGN KEY(files_id) REFERENCES files (id) ON DELETE cascade\n);\"\"\")\n\n\ndef rollback(config, logger):\n pass\n" }, { "alpha_fraction": 0.5330764055252075, "alphanum_fraction": 0.5350031852722168, "avg_line_length": 31.4375, "blob_id": "5c65c835c8f4f005143d6fe073eeb2fb4b25b0c3", "content_id": "548fc47278a442f8b517bcf55a4b23679331220f", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 3114, "license_type": "no_license", "max_line_length": 118, "num_lines": 96, "path": "/app/fw/db/base.py", "repo_name": "StanislavKraev/jb_code", "src_encoding": "UTF-8", "text": "# -*- coding: utf-8 -*-\n\n\nclass BaseMongoModel(object):\n def __init__(self, **kwargs):\n self.__data = kwargs or {}\n\n def __getattr__(self, item):\n if item == '_BaseMongoModel__data':\n return object.__getattribute__(self, '__data')\n return self.__data.get(item, None)\n\n def __setattr__(self, key, value):\n if key.startswith('_'):\n object.__setattr__(self, key, value)\n return\n self.__data[key] = value\n\n def __getitem__(self, item):\n return self.__data[item]\n\n def __setitem__(self, item, value):\n self.__data[item] = value\n\n def get(self, item, default_value=None):\n return self.__data.get(item, default_value)\n\n def insert(self, db, prepared_data=None, **kwargs):\n prepared_data = prepared_data or self.__data\n\n result_data = {}\n for c in prepared_data:\n val = prepared_data[c]\n if isinstance(val, BaseMongoModel):\n result_data[c] = val.as_dict()\n elif isinstance(val, list):\n tmp_list = []\n for i in val:\n if isinstance(i, BaseMongoModel):\n tmp_list.append(i.as_dict())\n else:\n tmp_list.append(i)\n result_data[c] = tmp_list\n else:\n result_data[c] = prepared_data[c]\n\n _id = self.get_collection(db).insert(result_data, **kwargs)\n self.__data['_id'] = _id\n return _id\n\n def __cmp__(self, other):\n if '_id' not in self.__data and '_id' not in other.__data:\n return 0 if self.__data == other.__data else -1\n if ('_id' not in self.__data and '_id' in other.__data) or '_id' in self.__data and '_id' not in other.__data:\n return -1\n return 0 if self.__data['_id'] == other.__data['_id'] else -1\n\n @classmethod\n def get_collection(cls, db):\n return db[cls.COLLECTION_NAME]\n\n @classmethod\n def find_one(cls, db, *args, **kwargs):\n col = cls.get_collection(db)\n result = col.find_one(*args, **kwargs)\n if not result:\n return\n new_obj = cls(**result)\n return new_obj\n\n @classmethod\n def find(cls, db, *args, **kwargs):\n col = cls.get_collection(db)\n return col.find(*args, **kwargs)\n\n def as_dict(self):\n return self.__data\n\n def update_attr(self, key, val):\n self.__data[key] = val\n\n @classmethod\n def update(cls, db, spec, document, upsert=False, manipulate=False, safe=None, multi=False, check_keys=True,\n **kwargs):\n col = cls.get_collection(db)\n return col.update(spec, document, upsert, manipulate, safe, multi, check_keys, **kwargs)\n\n def save(self, db):\n if '_id' not in self.__data:\n raise Exception(\"Can't save new model. Use insert method instead\")\n self.get_collection(db).update({\"_id\": self.__data[\"_id\"]}, self.__data)\n\n @classmethod\n def remove(cls, db, query):\n col = cls.get_collection(db)\n return col.remove(query)\n" }, { "alpha_fraction": 0.46866583824157715, "alphanum_fraction": 0.673317015171051, "avg_line_length": 90.7977523803711, "blob_id": "5d5d9891a55ed15c5bd87ee87793e6d32c3d3fa8", "content_id": "d7c70a6d08ac3a87d54b9009d24970a7760e8d6e", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 9860, "license_type": "no_license", "max_line_length": 238, "num_lines": 89, "path": "/app/services/car_assurance/ci_commands.py", "repo_name": "StanislavKraev/jb_code", "src_encoding": "UTF-8", "text": "# -*- coding: utf-8 -*-\nfrom fw.documents.address_enums import RFRegionsEnum\nfrom manage_commands import BaseManageCommand\nfrom fw.db.sql_base import db as sqldb\nfrom services.car_assurance.db_models import CarAssurance, CarAssuranceBranch\n\n\nclass InitCarInsurancesCommand(BaseManageCommand):\n NAME = \"create_car_insurances\"\n\n def run(self):\n self.logger.info(u'=' * 50)\n\n CarAssuranceBranch.query.filter().delete()\n sqldb.session.commit()\n CarAssurance.query.filter().delete()\n sqldb.session.commit()\n\n a_data = (\n (u'559a9b984056d47aa5c57c5b', u'РЕСО-Гарантия'),\n (u'559a9b984056d47aa5c57c5c', u'ООО \"СФ \"Адонис\"'),\n (u'559a9b994056d47aa5c57c5d', u'ООО \"СК АЛРОСА\"'),\n (u'559a9b994056d47aa5c57c5e', u'ОАО \"АльфаСтрахование\"'),\n (u'559a9b994056d47aa5c57c5f', u'ОАО СК \"Альянс\"'),\n (u'559a9b994056d47aa5c57c60', u'ООО \"Страховая компания \"Ангара\"'),\n (u'559a9b9a4056d47aa5c57c61', u'ООО \"Антал-Страхование\"'),\n (u'559a9b9a4056d47aa5c57c62', u'ООО \"СГ \"АСКО\"'),\n (u'559a9b9a4056d47aa5c57c63', u'ООО СК \"ВТБ Страхование\"'),\n (u'559a9b9a4056d47aa5c57c64', u'ООО Страховая Компания \"Гелиос\"')\n )\n\n for id_val, name in a_data:\n c = CarAssurance(id=id_val, full_name=name, short_name=name)\n sqldb.session.add(c)\n sqldb.session.commit()\n\n b_data = (\n (u'559a9b984056d47aa5c57c5b', u'РЕСО-Гарантия филиал 1', u'730-30-00; доб.1682 (факс)', u'Нагорный проезд д.6, г.Москва, 117105', RFRegionsEnum.RFR_MOSCOW, u'559a9cf64056d47aa5c57c69'),\n (u'559a9b984056d47aa5c57c5b', u'РЕСО-Гарантия филиал 2', u'730-30-00; доб.1682 (факс)', u'Нагорный проезд д.6, г.Москва, 117105', RFRegionsEnum.RFR_MOSCOW, u'559a9cf64056d47aa5c57c6a'),\n (u'559a9b984056d47aa5c57c5b', u'РЕСО-Гарантия филиал 3', u'730-30-00; доб.1682 (факс)', u'Нагорный проезд д.6, г.Москва, 117105', RFRegionsEnum.RFR_MOSCOW, u'559a9cf64056d47aa5c57c6b'),\n\n (u'559a9b984056d47aa5c57c5c', u'ООО \"СФ \"Адонис\" филиал 1', u\"(342) 241-02-87\", u'ул. Братьев Игнатовых, д. 3, оф.710, г. Пермь', RFRegionsEnum.RFR_PERMSKIY_KRAI, u'559a9e0964f10cabe6678653'),\n (u'559a9b984056d47aa5c57c5c', u'ООО \"СФ \"Адонис\" филиал 2', u\"(342) 241-02-87\", u'ул. Братьев Игнатовых, д. 3, оф.710, г. Пермь', RFRegionsEnum.RFR_PERMSKIY_KRAI, u'559a9e0a64f10cabe6678654'),\n (u'559a9b984056d47aa5c57c5c', u'ООО \"СФ \"Адонис\" филиал 3', u\"(342) 241-02-87\", u'ул. Братьев Игнатовых, д. 3, оф.710, г. Пермь', RFRegionsEnum.RFR_PERMSKIY_KRAI, u'559a9e0b64f10cabe6678655'),\n\n (u'559a9b994056d47aa5c57c5d', u'ООО \"СК АЛРОСА\" филиал 1', u\"(495) 664-28-81 (тел/факс)\", u\"Мукомольный пр-д, д.2, стр.1, г.Москва, 123290\", RFRegionsEnum.RFR_MOSCOW, u\"559a9e9c64f10cabe6678658\"),\n (u'559a9b994056d47aa5c57c5d', u'ООО \"СК АЛРОСА\" филиал 2', u\"(495) 664-28-81 (тел/факс)\", u\"Мукомольный пр-д, д.2, стр.1, г.Москва, 123290\", RFRegionsEnum.RFR_MOSCOW, u\"559a9e9c64f10cabe6678657\"),\n (u'559a9b994056d47aa5c57c5d', u'ООО \"СК АЛРОСА\" филиал 3', u\"(495) 664-28-81 (тел/факс)\", u\"Мукомольный пр-д, д.2, стр.1, г.Москва, 123290\", RFRegionsEnum.RFR_MOSCOW, u\"559a9e9c64f10cabe6678656\"),\n\n (u'559a9b994056d47aa5c57c5e', u'ОАО \"АльфаСтрахование\" филиал 1', u\"788-09-99; 785-08-88 (факс)\", u\"Ул. Шаболовка, д. 31, строение \\\"Б\\\", г.Москва, 115162\", RFRegionsEnum.RFR_MOSCOW, u\"559a9ee564f10cabe6678659\"),\n (u'559a9b994056d47aa5c57c5e', u'ОАО \"АльфаСтрахование\" филиал 2', u\"788-09-99; 785-08-88 (факс)\", u\"Ул. Шаболовка, д. 31, строение \\\"Б\\\", г.Москва, 115162\", RFRegionsEnum.RFR_MOSCOW, u\"559a9ee564f10cabe667865a\"),\n (u'559a9b994056d47aa5c57c5e', u'ОАО \"АльфаСтрахование\" филиал 3', u\"788-09-99; 785-08-88 (факс)\", u\"Ул. Шаболовка, д. 31, строение \\\"Б\\\", г.Москва, 115162\", RFRegionsEnum.RFR_MOSCOW, u\"559a9ee664f10cabe667865b\"),\n\n (u'559a9b994056d47aa5c57c5f', u'ОАО СК \"Альянс\" филиал 1', u\"956-21-05\", u\"Озерковская набережная, д. 30, г.Москва, 115184\", RFRegionsEnum.RFR_MOSCOW, u\"559a9f3364f10cabe667865c\"),\n (u'559a9b994056d47aa5c57c5f', u'ОАО СК \"Альянс\" филиал 2', u\"956-21-05\", u\"Озерковская набережная, д. 30, г.Москва, 115184\", RFRegionsEnum.RFR_MOSCOW, u\"559a9f3364f10cabe667865d\"),\n (u'559a9b994056d47aa5c57c5f', u'ОАО СК \"Альянс\" филиал 3', u\"956-21-05\", u\"Озерковская набережная, д. 30, г.Москва, 115184\", RFRegionsEnum.RFR_MOSCOW, u\"559a9f3364f10cabe667865e\"),\n\n (u'559a9b994056d47aa5c57c60', u'ООО \"Страховая компания \"Ангара\" филиал 1', u\"(3953) 41-15-15 (тел/факс)\", u\"ул.Южная, д.23, г.Братск, Иркутская обл., 665717\", RFRegionsEnum.RFR_IRKUTSKAYA_REGION, u\"559a9f8764f10cabe667865f\"),\n (u'559a9b994056d47aa5c57c60', u'ООО \"Страховая компания \"Ангара\" филиал 2', u\"(3953) 41-15-15 (тел/факс)\", u\"ул.Южная, д.23, г.Братск, Иркутская обл., 665717\", RFRegionsEnum.RFR_IRKUTSKAYA_REGION, u\"559a9f8764f10cabe6678660\"),\n (u'559a9b994056d47aa5c57c60', u'ООО \"Страховая компания \"Ангара\" филиал 3', u\"(3953) 41-15-15 (тел/факс)\", u\"ул.Южная, д.23, г.Братск, Иркутская обл., 665717\", RFRegionsEnum.RFR_IRKUTSKAYA_REGION, u\"559a9f8864f10cabe6678661\"),\n\n (u'559a9b9a4056d47aa5c57c61', u'ООО \"Антал-Страхование\" филиал 1', u\"8-800-333-18-22\", u\"Петровский пер., д.10, строение 2, г. Москва, 107031\", RFRegionsEnum.RFR_MOSCOW, u\"559a9fd164f10cabe6678662\"),\n (u'559a9b9a4056d47aa5c57c61', u'ООО \"Антал-Страхование\" филиал 2', u\"8-800-333-18-22\", u\"Петровский пер., д.10, строение 2, г. Москва, 107031\", RFRegionsEnum.RFR_MOSCOW, u\"559a9fd164f10cabe6678663\"),\n (u'559a9b9a4056d47aa5c57c61', u'ООО \"Антал-Страхование\" филиал 3', u\"8-800-333-18-22\", u\"Петровский пер., д.10, строение 2, г. Москва, 107031\", RFRegionsEnum.RFR_MOSCOW, u\"559a9fd264f10cabe6678664\"),\n\n (u'559a9b9a4056d47aa5c57c62', u'ООО \"СГ \"АСКО\" филиал 1', u\"(8552) 39-23-9\", u\"Проспект Вахитова, дом 24, а/я 27, г.Набережные Челны, Республика Татарстан, 423815\", RFRegionsEnum.RFR_TATARSTAN, u\"559aa01364f10cabe6678665\"),\n (u'559a9b9a4056d47aa5c57c62', u'ООО \"СГ \"АСКО\" филиал 2', u\"(8552) 39-23-9\", u\"Проспект Вахитова, дом 24, а/я 27, г.Набережные Челны, Республика Татарстан, 423815\", RFRegionsEnum.RFR_TATARSTAN, u\"559aa01364f10cabe6678666\"),\n (u'559a9b9a4056d47aa5c57c62', u'ООО \"СГ \"АСКО\" филиал 3', u\"(8552) 39-23-9\", u\"Проспект Вахитова, дом 24, а/я 27, г.Набережные Челны, Республика Татарстан, 423815\", RFRegionsEnum.RFR_TATARSTAN, u\"559aa01364f10cabe6678667\"),\n\n (u'559a9b9a4056d47aa5c57c63', u'ООО СК \"ВТБ Страхование\" филиал 1', u\"(495) 644-44-40\", u\"Чистопрудный бульвар, д. 8, стр. 1, г. Москва\", RFRegionsEnum.RFR_MOSCOW, u\"559aa08e64f10cabe6678668\"),\n (u'559a9b9a4056d47aa5c57c63', u'ООО СК \"ВТБ Страхование\" филиал 2', u\"(495) 644-44-40\", u\"Чистопрудный бульвар, д. 8, стр. 1, г. Москва\", RFRegionsEnum.RFR_MOSCOW, u\"559aa08e64f10cabe6678669\"),\n (u'559a9b9a4056d47aa5c57c63', u'ООО СК \"ВТБ Страхование\" филиал 3', u\"(495) 644-44-40\", u\"Чистопрудный бульвар, д. 8, стр. 1, г. Москва\", RFRegionsEnum.RFR_MOSCOW, u\"559aa08f64f10cabe667866a\"),\n\n (u'559a9b9a4056d47aa5c57c64', u'ООО Страховая Компания \"Гелиос\" филиал 1', u\"(495)981-96-33\", u\"Бульвар Энтузиастов, д. 2, г. Москва, 109544\", RFRegionsEnum.RFR_MOSCOW, u\"559aa0bf64f10cabe667866b\"),\n (u'559a9b9a4056d47aa5c57c64', u'ООО Страховая Компания \"Гелиос\" филиал 2', u\"(495)981-96-33\", u\"Бульвар Энтузиастов, д. 2, г. Москва, 109544\", RFRegionsEnum.RFR_MOSCOW, u\"559aa0bf64f10cabe667866c\"),\n (u'559a9b9a4056d47aa5c57c64', u'ООО Страховая Компания \"Гелиос\" филиал 3', u\"(495)981-96-33\", u\"Бульвар Энтузиастов, д. 2, г. Москва, 109544\", RFRegionsEnum.RFR_MOSCOW, u\"559aa0c064f10cabe667866d\")\n )\n\n for ca_id, title, phone, address, region, id_val in b_data:\n cb = CarAssuranceBranch(\n car_assurance_id=ca_id,\n id=id_val,\n title=title,\n address=address,\n phone=phone,\n region=region\n )\n sqldb.session.add(cb)\n sqldb.session.commit()\n" }, { "alpha_fraction": 0.745312511920929, "alphanum_fraction": 0.75, "avg_line_length": 36.70588302612305, "blob_id": "57a223ce0d6eb73bda77780e32fda5fe831f4dfe", "content_id": "d609694915b44aa2c3f6faeb99b03fffc76b97d4", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 640, "license_type": "no_license", "max_line_length": 106, "num_lines": 17, "path": "/app/services/yurist/__init__.py", "repo_name": "StanislavKraev/jb_code", "src_encoding": "UTF-8", "text": "# -*- coding: utf-8 -*-\nimport os\n\nimport jinja2\n\nfrom services.yurist.api import yurist_bp\n\n\ndef register(app, jinja_env, class_loader, url_prefix=None):\n app.register_blueprint(yurist_bp, url_prefix=url_prefix)\n\n search_path = os.path.normpath(os.path.join(os.path.abspath(os.path.dirname(__file__)), u\"templates\"))\n jinja_env.loader.loaders.append(jinja2.FileSystemLoader(search_path))\n\n class_loader.POSSIBLE_LOCATIONS.append('services.yurist.data_model.fields')\n class_loader.POSSIBLE_LOCATIONS.append('services.yurist.data_model.enums')\n class_loader.POSSIBLE_LOCATIONS.append('services.yurist.data_model.db_models')" }, { "alpha_fraction": 0.7474972009658813, "alphanum_fraction": 0.7486095428466797, "avg_line_length": 39.8636360168457, "blob_id": "4a890b6f1a62b924ddc6247c6f409484f357ad95", "content_id": "bad2d7c2065b0f6a4a2182b2e734e6cc31519081", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 899, "license_type": "no_license", "max_line_length": 114, "num_lines": 22, "path": "/app/services/yurist/data_model/fields.py", "repo_name": "StanislavKraev/jb_code", "src_encoding": "UTF-8", "text": "# -*- coding: utf-8 -*-\nfrom fw.documents.fields.complex_doc_fields import DocArrayField\nfrom fw.documents.fields.doc_fields import document_model, BaseDocField\nfrom fw.documents.fields.simple_doc_fields import DocMongoIdField, DocDateTimeField, DocBoolField, DocEnumField, \\\n DocTextField\nfrom services.yurist.data_model.enums import YuristBatchCheckStatus\n\n\n@document_model\nclass YuristBatchCheck(BaseDocField):\n\n id = DocMongoIdField(is_parse_from_api=False)\n batch_id = DocTextField(is_parse_from_api=False)\n\n create_date = DocDateTimeField()\n attached_files = DocArrayField(cls=u\"DocFileAttachField\")\n typos_correction = DocBoolField(required=False, default=False)\n\n status = DocEnumField(enum_cls=\"YuristBatchCheckStatus\", required=True, is_parse_from_api=False,\n default=YuristBatchCheckStatus.YBS_NEW)\n\n __api_to_db_mapping__ = {'id': '_id'}\n" }, { "alpha_fraction": 0.46767565608024597, "alphanum_fraction": 0.4915536642074585, "avg_line_length": 49.17869567871094, "blob_id": "a177f41236917598422b3577a12dc1cb24fa787f", "content_id": "b4fc764c1f0f9d49ca122fb47d59ad33a471da10", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 89496, "license_type": "no_license", "max_line_length": 207, "num_lines": 1746, "path": "/jb_tests/test_pack/test_osago.py", "repo_name": "StanislavKraev/jb_code", "src_encoding": "UTF-8", "text": "# -*- coding: utf-8 -*-\nfrom datetime import datetime, timedelta\n\nfrom flask import json\nfrom fw.catalogs.models import BikCatalog\n\nfrom fw.db.sql_base import db as sqldb\nfrom fw.documents.db_fields import DocumentBatchDbObject, BatchDocumentDbObject\nfrom fw.documents.doc_requisites_storage import DocRequisitiesStorage\nfrom fw.documents.enums import DocumentBatchTypeEnum, DocumentTypeEnum\nfrom fw.documents.schema.schema_transform import transform_with_schema\nfrom services.osago.documents.enums import OsagoDocTypeEnum, PretensionResultEnum, InsuranceLawsuitEnum, \\\n OsagoRefusalReasonEnum\nfrom services.pay.models import PayInfoObject, PurchaseServiceType\nfrom test_api import authorized\nfrom test_pack.base_batch_test import BaseBatchTestCase\n\n\nclass OsagoTestCase(BaseBatchTestCase):\n\n @authorized()\n def test_docs_claim(self):\n DocRequisitiesStorage._BATCH_DESCRIPTORS[DocumentBatchTypeEnum.DBT_OSAGO]['doc_types'] = [DocumentTypeEnum.DT_OSAGO_DOCUMENTS_CLAIM]\n\n with self.app.app_context():\n batch = self.create_batch(DocumentBatchTypeEnum.DBT_OSAGO, self.user)\n victim_car_owner = self.create_person(self.user, batch.id, phone=\"\")\n responsible_person = self.create_person(self.user, batch.id, name=u\"Арина\",\n surname=u\"Поганкина\", patronymic=u\"Мстиславовна\", age=22, phone=\"\")\n\n batch_json = json.dumps({\n \"data\": {\n 'policy_called': True,\n 'victim_owner': victim_car_owner.id + \"_person\",\n 'responsible_person': responsible_person.id + '_person',\n 'docs_got': [OsagoDocTypeEnum.ODT_INQUIRE_CRASH,\n OsagoDocTypeEnum.ODT_NOTICE_CRASH,\n #OsagoDocTypeEnum.ODT_INSURANCE_DENIAL,\n #OsagoDocTypeEnum.ODT_ACT_INSURANCE_CASE,\n OsagoDocTypeEnum.ODT_POLICE_STATEMENT,\n OsagoDocTypeEnum.ODT_POLICE_PROTOCOL,\n OsagoDocTypeEnum.ODT_CASE_INITIATION_REFUSAL],\n 'victim_car_brand': u\"Форд Фокус в кредит\",\n 'victim_car_number': u\"А000ОО98\",\n 'crash_date': (datetime.utcnow() - timedelta(days=100)).strftime(\"%Y-%m-%d\"),\n 'police_case': True,\n 'independent_expertise_sum': 1500000,\n 'independent_expertise_cost': 200000,\n 'compensation_sum': 10000,\n 'problem_type': 'refusal',\n \"submission_way\": \"responsible_person\",\n 'obtain_way': 'responsible_person',\n 'own_insurance_company': True,\n 'obtain_address_type': 'responsible_person_address',\n }\n })\n result = self.test_client.post('/batch/update/', data={\n 'batch_id': batch.id,\n 'batch': batch_json\n })\n\n self.assertEqual(result.status_code, 200)\n db_batch = DocumentBatchDbObject.query.filter_by(id=batch.id).first()\n print (json.dumps(db_batch.__dict__, indent=1, default=lambda x: unicode(x), ensure_ascii=False))\n\n result = self.test_client.post('/batch/render_document/', data={\n 'batch_id': batch.id,\n 'document_type': json.dumps([DocumentTypeEnum.DT_OSAGO_DOCUMENTS_CLAIM])\n })\n self.assertEqual(result.status_code, 200)\n doc = BatchDocumentDbObject.query.filter().scalar()\n print (json.dumps(doc.__dict__, indent=1, default=lambda x: unicode(x), ensure_ascii=False))\n\n @authorized()\n def test_pretesion(self):\n DocRequisitiesStorage._BATCH_DESCRIPTORS[DocumentBatchTypeEnum.DBT_OSAGO]['doc_types'] = [DocumentTypeEnum.DT_OSAGO_PRETENSION]\n\n with self.app.app_context():\n assurance = self.addCarAssurance(u\"РЕСО-ГАРАНТИЯ\")\n assurance_branch = self.addCarAssuranceBranch(assurance=assurance)\n batch = self.create_batch(DocumentBatchTypeEnum.DBT_OSAGO, self.user)\n\n batch_payment = PayInfoObject(\n user=self.user,\n batch=batch,\n pay_record_id=0,\n payment_provider=1,\n service_type=PurchaseServiceType.OSAGO_PART1\n )\n sqldb.session.add(batch_payment)\n sqldb.session.commit()\n\n victim_car_owner = self.create_person(self.user, batch.id, phone=\"345345\")\n responsible_person = self.create_person(self.user, batch.id, name=u\"Арина\",\n surname=u\"Поганкина\", patronymic=u\"Мстиславовна\", age=22, phone=\"1231313\")\n victim_driver = self.create_person(self.user, batch.id, name=u\"Полина\",\n surname=u\"Ступицина\", patronymic=u\"Араровна\", age=18, phone=\"1231313\")\n guilty_driver = self.create_person(self.user, batch.id, name=u\"Прохор\",\n surname=u\"Иванов\", patronymic=u\"Иванович\", age=88, phone=\"123123\")\n guilty_owner = self.create_person(self.user, batch.id, name=u\"Бобр\",\n surname=u\"Лесной\", age=31, phone=\"123123\")\n\n bik = BikCatalog(\n id=\"040173745\",\n name=u\"банк\",\n okpo=\"12323\",\n bik=\"040173745\",\n phone=\"\",\n address=\"\",\n kor_account='2342342'\n )\n sqldb.session.add(bik)\n sqldb.session.commit()\n\n ppid = lambda x: x.id + '_person'\n batch_json = json.dumps({\n \"data\": {\n 'victim_owner': ppid(victim_car_owner),\n 'got_cash': True,\n 'have_osago': 'victim',\n 'obtain_way': 'responsible_person',\n 'other_date': False,\n 'responsible_person': ppid(responsible_person),\n 'victim_car_brand': u\"Форд Фокус в кредит\",\n 'victim_car_number': u\"А000ОО98\",\n 'crash_date': (datetime.utcnow() - timedelta(days=100)).strftime(\"%Y-%m-%d\"),\n 'obtain_address_type': 'responsible_person_address',\n 'owner_as_victim_driver': True,\n 'use_other_submission_address': False,\n 'other_insurance': True,\n 'victim_driver': ppid(victim_driver),\n 'guilty_driver': ppid(guilty_driver),\n 'guilty_car_brand': u'Белаз',\n 'guilty_car_number': u'М100Е50',\n 'policy_series': u'ССС',\n 'policy_number': u'123456789',\n 'problem_type': 'underpay',\n 'compensation_sum': 1111.1,\n 'bik_account': u'040173745',\n 'account_number': u'12345678901234567890',\n 'independent_expertise_number': u'1111111',\n 'independent_expertise_sum': '0',\n 'independent_expertise_cost': '0',\n 'guilty_owner': ppid(guilty_owner),\n 'submission_branch_id': assurance_branch.id,\n \"submission_way\": \"oneself\",\n #'submission_branch_id': \"\",\n 'insurance_company_region': u\"Санкт-Петербург\",\n 'owner_as_guilty_driver': False,\n #'submission_address': u'Какой-то адрес',\n 'obtain_address': u\"Обтейн адрес\",\n 'insurance_name': u\"Суперстраховая №1\",\n 'police_case': True,\n 'policy_date': (datetime.utcnow() - timedelta(days=100)).strftime(\"%Y-%m-%d\"),\n 'own_insurance_company': True,\n 'add_person_to_claim': True,\n 'first_claim_date': datetime.utcnow().strftime(\"%Y-%m-%d\"),\n 'court_include': False\n }\n })\n result = self.test_client.post('/batch/update/', data={\n 'batch_id': batch.id,\n 'batch': batch_json\n })\n\n self.assertEqual(result.status_code, 200)\n db_batch = DocumentBatchDbObject.query.filter_by(id=batch.id).first()\n print (json.dumps(db_batch.__dict__, indent=1, default=lambda x: unicode(x), ensure_ascii=False))\n\n result = self.test_client.post('/batch/render_document/', data={\n 'batch_id': batch.id,\n 'document_type': json.dumps([DocumentTypeEnum.DT_OSAGO_PRETENSION])\n })\n self.assertEqual(result.status_code, 200)\n doc = BatchDocumentDbObject.query.filter().scalar()\n print (json.dumps(doc.__dict__, indent=1, default=lambda x: unicode(x), ensure_ascii=False))\n db_batch = DocumentBatchDbObject.query.filter_by(id=batch.id).first()\n print (json.dumps(db_batch.__dict__, indent=1, default=lambda x: unicode(x), ensure_ascii=False))\n\n @authorized()\n def test_mail_list(self):\n DocRequisitiesStorage._BATCH_DESCRIPTORS[DocumentBatchTypeEnum.DBT_OSAGO]['doc_types'] = [DocumentTypeEnum.DT_OSAGO_MAIL_LIST]\n\n with self.app.app_context():\n batch = self.create_batch(DocumentBatchTypeEnum.DBT_OSAGO, self.user, status='pretension')\n batch_json = json.dumps({\n \"data\": {\n 'other_insurance': True,\n 'insurance_name': u'ОБЩЕСТВО С ОГРАНИЧЕННОЙ ОТВЕТСТВЕННОСТЬЮ РОССИЙСКАЯ ГОСУДАРСТВЕННАЯ КОМПАНИЯ ПО ОЧИСТКЕ СТОЧНЫХ ВОД',\n 'use_other_submission_address': True,\n 'submission_address': u\"197342, Санкт-Петербург, Ушаковская наб., д. 5\",\n 'submission_way': \"mail\",\n 'docs_got': [\n OsagoDocTypeEnum.ODT_INQUIRE_CRASH,\n OsagoDocTypeEnum.ODT_NOTICE_CRASH,\n OsagoDocTypeEnum.ODT_INSURANCE_DENIAL,\n OsagoDocTypeEnum.ODT_POLICE_STATEMENT,\n OsagoDocTypeEnum.ODT_POLICE_PROTOCOL\n ],\n 'policy_called': True,\n 'court_include': True\n }\n })\n result = self.test_client.post('/batch/update/', data={\n 'batch_id': batch.id,\n 'batch': batch_json\n })\n\n self.assertEqual(result.status_code, 200)\n db_batch = DocumentBatchDbObject.query.filter_by(id=batch.id).first()\n print (json.dumps(db_batch.__dict__, indent=1, default=lambda x: unicode(x), ensure_ascii=False))\n\n result = self.test_client.post('/batch/render_document/', data={\n 'batch_id': batch.id,\n 'document_type': json.dumps([DocumentTypeEnum.DT_OSAGO_MAIL_LIST])\n })\n self.assertEqual(result.status_code, 200)\n\n @authorized()\n def test_trust_submission_docs(self):\n DocRequisitiesStorage._BATCH_DESCRIPTORS[DocumentBatchTypeEnum.DBT_OSAGO]['doc_types'] = [DocumentTypeEnum.DT_OSAGO_TRUST_SUBMISSION_DOCS]\n\n with self.app.app_context():\n batch = self.create_batch(DocumentBatchTypeEnum.DBT_OSAGO, self.user)\n victim_car_owner = self.create_person(self.user, batch.id)\n responsible_person = self.create_person(self.user, batch.id, name=u\"Арина\",\n surname=u\"Поганкина\", patronymic=u\"Мстиславовна\", age=22)\n\n batch_json = json.dumps({\n \"data\": {\n 'victim_owner': victim_car_owner.id + \"_person\",\n 'responsible_person': responsible_person.id + '_person',\n 'victim_car_brand': u\"Форд Фокус в кредит\",\n 'victim_car_number': u\"А000ОО98\",\n 'crash_date': (datetime.utcnow() - timedelta(days=100)).strftime(\"%Y-%m-%d\"),\n \"submission_way\": \"responsible_person\",\n 'obtain_way': 'mail',\n 'insurance_name': u'ООО ААА ОАО АОА',\n 'court_include': True\n }\n })\n result = self.test_client.post('/batch/update/', data={\n 'batch_id': batch.id,\n 'batch': batch_json\n })\n\n self.assertEqual(result.status_code, 200)\n db_batch = DocumentBatchDbObject.query.filter_by(id=batch.id).first()\n print (json.dumps(db_batch.__dict__, indent=1, default=lambda x: unicode(x), ensure_ascii=False))\n\n result = self.test_client.post('/batch/render_document/', data={\n 'batch_id': batch.id,\n 'document_type': json.dumps([DocumentTypeEnum.DT_OSAGO_TRUST_SUBMISSION_DOCS])\n })\n self.assertEqual(result.status_code, 200)\n\n @authorized()\n def test_trust_obtain_docs(self):\n DocRequisitiesStorage._BATCH_DESCRIPTORS[DocumentBatchTypeEnum.DBT_OSAGO]['doc_types'] = [DocumentTypeEnum.DT_OSAGO_TRUST_OBTAIN_DOCS]\n\n with self.app.app_context():\n batch = self.create_batch(DocumentBatchTypeEnum.DBT_OSAGO, self.user)\n victim_car_owner = self.create_person(self.user, batch.id)\n responsible_person = self.create_person(self.user, batch.id, name=u\"Арина\",\n surname=u\"Поганкина\", patronymic=u\"Мстиславовна\", age=22)\n\n batch_json = json.dumps({\n \"data\": {\n 'victim_owner': victim_car_owner.id + \"_person\",\n 'responsible_person': responsible_person.id + '_person',\n 'victim_car_brand': u\"Форд Фокус в кредит\",\n 'victim_car_number': u\"А000ОО98\",\n 'crash_date': (datetime.utcnow() - timedelta(days=100)).strftime(\"%Y-%m-%d\"),\n \"submission_way\": \"mail\",\n 'obtain_way': 'responsible_person',\n 'insurance_name': u'ООО ААА ОАО АОА'\n }\n })\n result = self.test_client.post('/batch/update/', data={\n 'batch_id': batch.id,\n 'batch': batch_json\n })\n\n self.assertEqual(result.status_code, 200)\n db_batch = DocumentBatchDbObject.query.filter_by(id=batch.id).first()\n print (json.dumps(db_batch.__dict__, indent=1, default=lambda x: unicode(x), ensure_ascii=False))\n\n result = self.test_client.post('/batch/render_document/', data={\n 'batch_id': batch.id,\n 'document_type': json.dumps([DocumentTypeEnum.DT_OSAGO_TRUST_OBTAIN_DOCS])\n })\n self.assertEqual(result.status_code, 200)\n\n @authorized()\n def test_trust_submission_and_obtain_docs(self):\n DocRequisitiesStorage._BATCH_DESCRIPTORS[DocumentBatchTypeEnum.DBT_OSAGO]['doc_types'] = [DocumentTypeEnum.DT_OSAGO_TRUST_SUBMISION_OBTAIN_DOCS]\n\n with self.app.app_context():\n batch = self.create_batch(DocumentBatchTypeEnum.DBT_OSAGO, self.user)\n victim_car_owner = self.create_person(self.user, batch.id)\n responsible_person = self.create_person(self.user, batch.id, name=u\"Арина\",\n surname=u\"Поганкина\", patronymic=u\"Ивановна\", age=22)\n\n batch_json = json.dumps({\n \"data\": {\n 'victim_owner': victim_car_owner.id + \"_person\",\n 'responsible_person': responsible_person.id + '_person',\n 'victim_car_brand': u\"Форд Фокус в кредит\",\n 'victim_car_number': u\"А000ОО98\",\n 'crash_date': (datetime.utcnow() - timedelta(days=100)).strftime(\"%Y-%m-%d\"),\n \"submission_way\": \"responsible_person\",\n 'obtain_way': 'responsible_person',\n 'insurance_name': u'ООО ААА ОАО АОА',\n 'court_include': True\n }\n })\n result = self.test_client.post('/batch/update/', data={\n 'batch_id': batch.id,\n 'batch': batch_json\n })\n\n self.assertEqual(result.status_code, 200)\n db_batch = DocumentBatchDbObject.query.filter_by(id=batch.id).first()\n print (json.dumps(db_batch.__dict__, indent=1, default=lambda x: unicode(x), ensure_ascii=False))\n\n result = self.test_client.post('/batch/render_document/', data={\n 'batch_id': batch.id,\n 'document_type': json.dumps([DocumentTypeEnum.DT_OSAGO_TRUST_SUBMISION_OBTAIN_DOCS])\n })\n self.assertEqual(result.status_code, 200)\n\n @authorized()\n def test_find_osago_policy_on_id_change(self):\n with self.app.app_context():\n assurance = self.addCarAssurance(u\"РЕСО-ГАРАНТИЯ\")\n DocRequisitiesStorage._BATCH_DESCRIPTORS[DocumentBatchTypeEnum.DBT_OSAGO]['doc_types'] = [DocumentTypeEnum.DT_OSAGO_MAIL_LIST]\n batch = self.create_batch(DocumentBatchTypeEnum.DBT_OSAGO, self.user)\n batch_json = json.dumps({\n \"data\": {\n 'policy_series': u'ААА',\n 'policy_number': '123111'\n }\n })\n result = self.test_client.post('/batch/update/', data={\n 'batch_id': batch.id,\n 'batch': batch_json\n })\n self.assertEqual(result.status_code, 200)\n data = json.loads(result.data)\n del data['result']['creation_date']\n del data['result']['id']\n del data['result']['name']\n self.assertEqual(data, {\n u'result': {\n u'all_docs': [],\n u'batch_type': u'osago',\n u'data': {u'policy_number': u'123111',\n u'policy_series': u'ААА'},\n u'metadata': {},\n u'paid': u'false',\n u'result_fields': {\n u'insurance_id': assurance.id,\n u'insurance_name': assurance.full_name,\n u'policy_date': u'2013-10-20',\n u'region_prepositional': u'',\n u'responsible_person_dative': u'',\n u'underpay_sum': u'50000',\n u'above_limits_sum': u'50000'\n },\n u'status': u'pretension',\n u'status_data': {u'finalisation_count': u'0'}}\n })\n batch = DocumentBatchDbObject.query.filter_by(id=batch.id).scalar()\n self.assertEqual(batch.result_fields, {\n u'insurance_id': assurance.id,\n u'insurance_name': assurance.full_name,\n u'policy_date': u\"2013-10-20\",\n u'region_prepositional': u'',\n u'responsible_person_dative': u'',\n u'underpay_sum': u'50000',\n u'above_limits_sum': u'50000'\n })\n batch_json = json.dumps({\n \"data\": {\n 'insurance_name': u'CCC',\n }\n })\n result = self.test_client.post('/batch/update/', data={\n 'batch_id': batch.id,\n 'batch': batch_json\n })\n self.assertEqual(result.status_code, 200)\n\n @authorized()\n def test_underpay_sum(self):\n DocRequisitiesStorage._BATCH_DESCRIPTORS[DocumentBatchTypeEnum.DBT_OSAGO]['doc_types'] = [DocumentTypeEnum.DT_OSAGO_MAIL_LIST]\n\n with self.app.app_context():\n batch = self.create_batch(DocumentBatchTypeEnum.DBT_OSAGO, self.user, status='pretension')\n batch_json = json.dumps({\n \"data\": {\n 'policy_called': True,\n 'crash_date': datetime.utcnow().strftime(\"%Y-%m-%d\"),\n 'independent_expertise_sum': 500000,\n 'problem_type': 'refusal',\n 'compensation_sum': 100\n }\n })\n result = self.test_client.post('/batch/update/', data={\n 'batch_id': batch.id,\n 'batch': batch_json\n })\n\n self.assertEqual(result.status_code, 200)\n db_batch = DocumentBatchDbObject.query.filter_by(id=batch.id).first()\n self.assertEqual(db_batch.result_fields['underpay_sum'], u'400000')\n\n batch_json = json.dumps({\n \"data\": {\n 'policy_called': False,\n 'crash_date': datetime.utcnow().strftime(\"%Y-%m-%d\"),\n 'independent_expertise_sum': 100500,\n 'problem_type': 'refusal',\n 'compensation_sum': 100\n }\n })\n result = self.test_client.post('/batch/update/', data={\n 'batch_id': batch.id,\n 'batch': batch_json\n })\n\n self.assertEqual(result.status_code, 200)\n db_batch = DocumentBatchDbObject.query.filter_by(id=batch.id).first()\n self.assertEqual(db_batch.result_fields['underpay_sum'], u'50000')\n\n batch_json = json.dumps({\n \"data\": {\n 'policy_called': True,\n 'crash_date': datetime.utcnow().strftime(\"%Y-%m-%d\"),\n 'independent_expertise_sum': 100500,\n 'problem_type': 'underpay',\n 'compensation_sum': 100\n }\n })\n result = self.test_client.post('/batch/update/', data={\n 'batch_id': batch.id,\n 'batch': batch_json\n })\n\n self.assertEqual(result.status_code, 200)\n db_batch = DocumentBatchDbObject.query.filter_by(id=batch.id).first()\n self.assertEqual(db_batch.result_fields['underpay_sum'], u'100400')\n\n batch_json = json.dumps({\n \"data\": {\n 'policy_called': False,\n 'crash_date': datetime.utcnow().strftime(\"%Y-%m-%d\"),\n 'independent_expertise_sum': 100500,\n 'problem_type': 'underpay',\n 'compensation_sum': 100\n }\n })\n result = self.test_client.post('/batch/update/', data={\n 'batch_id': batch.id,\n 'batch': batch_json\n })\n\n self.assertEqual(result.status_code, 200)\n db_batch = DocumentBatchDbObject.query.filter_by(id=batch.id).first()\n self.assertEqual(db_batch.result_fields['underpay_sum'], u'49900')\n\n batch_json = json.dumps({\n \"data\": {\n 'policy_called': True,\n 'crash_date': datetime(2013, 1, 1).strftime(\"%Y-%m-%d\"),\n 'independent_expertise_sum': 300000,\n 'problem_type': 'refusal',\n 'compensation_sum': 100\n }\n })\n result = self.test_client.post('/batch/update/', data={\n 'batch_id': batch.id,\n 'batch': batch_json\n })\n\n self.assertEqual(result.status_code, 200)\n db_batch = DocumentBatchDbObject.query.filter_by(id=batch.id).first()\n self.assertEqual(db_batch.result_fields['underpay_sum'], u'120000')\n\n batch_json = json.dumps({\n \"data\": {\n 'policy_called': False,\n 'crash_date': datetime(2013, 1, 1).strftime(\"%Y-%m-%d\"),\n 'independent_expertise_sum': 100500,\n 'problem_type': 'refusal',\n 'compensation_sum': 100\n }\n })\n result = self.test_client.post('/batch/update/', data={\n 'batch_id': batch.id,\n 'batch': batch_json\n })\n\n self.assertEqual(result.status_code, 200)\n db_batch = DocumentBatchDbObject.query.filter_by(id=batch.id).first()\n self.assertEqual(db_batch.result_fields['underpay_sum'], u'25000')\n\n batch_json = json.dumps({\n \"data\": {\n 'policy_called': True,\n 'crash_date': datetime(2013, 1, 1).strftime(\"%Y-%m-%d\"),\n 'independent_expertise_sum': 10500,\n 'problem_type': 'underpay',\n 'compensation_sum': 100\n }\n })\n result = self.test_client.post('/batch/update/', data={\n 'batch_id': batch.id,\n 'batch': batch_json\n })\n\n self.assertEqual(result.status_code, 200)\n db_batch = DocumentBatchDbObject.query.filter_by(id=batch.id).first()\n self.assertEqual(db_batch.result_fields['underpay_sum'], u'10400')\n\n batch_json = json.dumps({\n \"data\": {\n 'policy_called': False,\n 'crash_date': datetime(2013, 1, 1).strftime(\"%Y-%m-%d\"),\n 'independent_expertise_sum': 100500,\n 'problem_type': 'underpay',\n 'compensation_sum': 100\n }\n })\n result = self.test_client.post('/batch/update/', data={\n 'batch_id': batch.id,\n 'batch': batch_json\n })\n\n self.assertEqual(result.status_code, 200)\n db_batch = DocumentBatchDbObject.query.filter_by(id=batch.id).first()\n self.assertEqual(db_batch.result_fields['underpay_sum'], u'24900')\n\n batch_json = json.dumps({\n \"data\": {\n 'policy_called': True,\n 'policy_case': False,\n 'crash_date': datetime(2014, 10, 2).strftime(\"%Y-%m-%d\"),\n 'independent_expertise_sum': 120001,\n 'independent_expertise_cost': 1000,\n 'problem_type': 'underpay',\n 'compensation_sum': 2222227\n }\n })\n result = self.test_client.post('/batch/update/', data={\n 'batch_id': batch.id,\n 'batch': batch_json\n })\n\n self.assertEqual(result.status_code, 200)\n db_batch = DocumentBatchDbObject.query.filter_by(id=batch.id).first()\n self.assertEqual(db_batch.result_fields['above_limits_sum'], u'0')\n\n @authorized()\n def test_go_ahead_in_pretesion(self):\n with self.app.app_context():\n batch = self.create_batch(DocumentBatchTypeEnum.DBT_OSAGO, self.user, status=\"pretension\")\n victim_car_owner = self.create_person(self.user, batch.id, name=u\"ЖЖ\", surname=u\"ЖЖ\", patronymic=u\"ЖЖ\")\n guilty_car_owner = self.create_person(self.user, batch.id)\n responsible_person = self.create_person(self.user, batch.id, name=u\"Арина\",\n surname=u\"Поганкина\", patronymic=u\"Мстиславовна\", age=22)\n\n batch_json = json.dumps({\n \"data\": {\n 'crash_date': (datetime.utcnow() - timedelta(days=100)).strftime(\"%Y-%m-%d\"),\n 'policy_called': True,\n 'all_have_osago': True,\n 'own_insurance_company': True,\n 'have_osago': 'both',\n 'problem_type': 'refusal',\n 'refusal_reason': 'wrong_docs',\n 'notice_has_mistakes': False,\n 'got_cash': False,\n 'victim_owner': victim_car_owner.id + \"_person\",\n 'owner_as_victim_driver': True,\n 'victim_car_brand': u\"Форд Фокус в кредит\",\n 'victim_car_number': u\"А000ОО98\",\n 'guilty_owner': guilty_car_owner.id + \"_person\",\n 'owner_as_guilty_driver': True,\n 'guilty_car_brand': u'Рено',\n 'guilty_car_number': u'В000ВВ50',\n 'other_victims': None,\n 'insurance_company_region': u'Санкт-Петербург',\n 'policy_series': u'ААА',\n 'policy_number': '0123456789',\n 'other_insurance': True,\n 'insurance_name': u\"РоСгосСтраХ\",\n 'insurance_id': None,\n 'other_date': True,\n 'policy_date': (datetime.utcnow() - timedelta(days=100)).strftime(\"%Y-%m-%d\"),\n 'first_claim_date': (datetime.utcnow() - timedelta(days=90)).strftime(\"%Y-%m-%d\"),\n 'independent_expertise_number': u'01234567890123456789',\n 'independent_expertise_sum': '222000.50',\n 'independent_expertise_cost': 1000,\n 'compensation_sum': 1000.9,\n 'add_person_to_claim': True,\n 'docs_got': [OsagoDocTypeEnum.ODT_INQUIRE_CRASH,\n OsagoDocTypeEnum.ODT_BANK_STATEMENT],\n 'insurance_case_number': '01234567890123456789',\n 'submission_way': 'oneself',\n 'submission_branch_id': '',\n 'use_other_submission_address': True,\n 'submission_address': u'сабмишн адрес',\n 'obtain_way': 'responsible_person',\n 'responsible_person': responsible_person.id + '_person',\n 'court_include': True,\n 'obtain_address_type': 'other_address',\n 'obtain_address': 'аптейн адрес',\n 'bik_account': '012345678',\n 'account_number': '01234567890123456789',\n 'police_case': True\n }\n })\n result = self.test_client.post('/batch/update/', data={\n 'batch_id': batch.id,\n 'batch': batch_json\n })\n\n self.assertEqual(result.status_code, 200)\n db_batch = DocumentBatchDbObject.query.filter_by(id=batch.id).first()\n print (json.dumps(db_batch.__dict__, indent=1, default=lambda x: unicode(x), ensure_ascii=False))\n\n result = self.test_client.post('/batch/go_ahead/', data={\n 'batch_id': batch.id,\n })\n self.assertEqual(result.status_code, 200)\n self.assertEqual(BatchDocumentDbObject.query.count(), 3)\n self.assertEqual(BatchDocumentDbObject.query.filter_by(status=\"rendered\").count(), 3)\n db_batch = DocumentBatchDbObject.query.filter_by(id=batch.id).first()\n self.assertEqual(db_batch.status, \"claim\")\n\n @authorized()\n def test_go_ahead_in_pretesion_wrong_data(self):\n with self.app.app_context():\n batch = self.create_batch(DocumentBatchTypeEnum.DBT_OSAGO, self.user, status=\"pretension\")\n victim_car_owner = self.create_person(self.user, batch.id)\n guilty_car_owner = self.create_person(self.user, batch.id)\n responsible_person = self.create_person(self.user, batch.id, name=u\"Арина\",\n surname=u\"Поганкина\", patronymic=u\"Мстиславовна\", age=22)\n\n batch_json = json.dumps({\n \"data\": {\n 'crash_date': (datetime.utcnow() - timedelta(days=100)).strftime(\"%Y-%m-%d\"),\n 'policy_called': True,\n 'all_have_osago': True,\n 'own_insurance_company': True,\n 'have_osago': 'both',\n 'problem_type': 'refusal',\n 'refusal_reason': 'wrong_docs',\n 'notice_has_mistakes': False,\n 'got_cash': False,\n 'victim_owner': victim_car_owner.id + \"_person\",\n 'owner_as_victim_driver': True,\n 'victim_car_brand': u\"Форд Фокус в кредит\",\n 'victim_car_number': u\"А000ОО98\",\n 'guilty_owner': guilty_car_owner.id + \"_person\",\n 'owner_as_guilty_driver': True,\n 'guilty_car_brand': u'Рено',\n 'guilty_car_number': u'В000ВВ50',\n 'other_victims': None,\n 'insurance_company_region': u'Санкт-Петербург',\n 'policy_series': u'ААА',\n 'policy_number': '0123456789',\n 'other_insurance': True,\n 'insurance_name': u\"РоСгосСтраХ\",\n 'insurance_id': None,\n 'other_date': True,\n 'policy_date': (datetime.utcnow() - timedelta(days=100)).strftime(\"%Y-%m-%d\"),\n 'first_claim_date': (datetime.utcnow() - timedelta(days=90)).strftime(\"%Y-%m-%d\"),\n 'independent_expertise_number': u'01234567890123456789',\n 'independent_expertise_sum': 1500000,\n 'independent_expertise_cost': 200000,\n 'compensation_sum': 0,\n 'add_person_to_claim': True,\n 'docs_got': [OsagoDocTypeEnum.ODT_INQUIRE_CRASH,\n OsagoDocTypeEnum.ODT_NOTICE_CRASH,\n OsagoDocTypeEnum.ODT_ACT_INSURANCE_CASE],\n 'insurance_case_number': '01234567890123456789',\n 'submission_way': 'responsible_person',\n 'submission_branch_id': '',\n 'use_other_submission_address': True,\n 'submission_address': u'сабмишн адрес',\n 'obtain_way': 'responsible_person',\n 'responsible_person': responsible_person.id + '_person',\n 'court_include': True,\n 'obtain_address_type': 'other_address',\n 'bik_account': '012345678',\n 'account_number': '01234567890123456789'\n }\n })\n result = self.test_client.post('/batch/update/', data={\n 'batch_id': batch.id,\n 'batch': batch_json\n })\n\n self.assertEqual(result.status_code, 200)\n db_batch = DocumentBatchDbObject.query.filter_by(id=batch.id).first()\n print (json.dumps(db_batch.__dict__, indent=1, default=lambda x: unicode(x), ensure_ascii=False))\n\n result = self.test_client.post('/batch/go_ahead/', data={\n 'batch_id': batch.id,\n })\n self.assertEqual(result.status_code, 200)\n self.assertEqual(BatchDocumentDbObject.query.count(), 3)\n self.assertEqual(BatchDocumentDbObject.query.filter_by(status=\"rendered\").count(), 2)\n db_batch = DocumentBatchDbObject.query.filter_by(id=batch.id).scalar()\n self.assertEqual(db_batch.status, \"pretension\")\n self.assertEqual(db_batch.error_info, {\n u'error_ext': [{u'error_code': 5, u'field': u'obtain_address'}]\n })\n\n @authorized()\n def test_go_ahead_in_generating_pretension(self):\n with self.app.app_context():\n batch = self.create_batch(DocumentBatchTypeEnum.DBT_OSAGO, self.user, status=\"generating_pretension\")\n victim_car_owner = self.create_person(self.user, batch.id)\n guilty_car_owner = self.create_person(self.user, batch.id)\n responsible_person = self.create_person(self.user, batch.id, name=u\"Арина\",\n surname=u\"Поганкина\", patronymic=u\"Мстиславовна\", age=22)\n doc = self.create_document(DocumentTypeEnum.DT_OSAGO_PRETENSION, batch, data={})\n\n batch_json = json.dumps({\n \"data\": {\n 'crash_date': (datetime.utcnow() - timedelta(days=100)).strftime(\"%Y-%m-%d\"),\n 'policy_called': True,\n 'all_have_osago': True,\n 'own_insurance_company': True,\n 'have_osago': 'both',\n 'problem_type': 'refusal',\n 'refusal_reason': 'wrong_docs',\n 'notice_has_mistakes': False,\n 'got_cash': False,\n 'victim_owner': victim_car_owner.id + \"_person\",\n 'owner_as_victim_driver': True,\n 'victim_car_brand': u\"Форд Фокус в кредит\",\n 'victim_car_number': u\"А000ОО98\",\n 'guilty_owner': guilty_car_owner.id + \"_person\",\n 'owner_as_guilty_driver': True,\n 'guilty_car_brand': u'Рено',\n 'guilty_car_number': u'В000ВВ50',\n 'other_victims': None,\n 'insurance_company_region': u'Санкт-Петербург',\n 'policy_series': u'ААА',\n 'policy_number': '0123456789',\n 'other_insurance': True,\n 'insurance_name': u\"РоСгосСтраХ\",\n 'insurance_id': None,\n 'other_date': True,\n 'policy_date': (datetime.utcnow() - timedelta(days=100)).strftime(\"%Y-%m-%d\"),\n 'first_claim_date': (datetime.utcnow() - timedelta(days=90)).strftime(\"%Y-%m-%d\"),\n 'independent_expertise_number': u'01234567890123456789',\n 'independent_expertise_sum': 1500000,\n 'independent_expertise_cost': 200000,\n 'compensation_sum': 0,\n 'add_person_to_claim': True,\n 'docs_got': [OsagoDocTypeEnum.ODT_INQUIRE_CRASH,\n OsagoDocTypeEnum.ODT_NOTICE_CRASH,\n OsagoDocTypeEnum.ODT_ACT_INSURANCE_CASE],\n 'insurance_case_number': '01234567890123456789',\n 'submission_way': 'responsible_person',\n 'submission_branch_id': '',\n 'use_other_submission_address': True,\n 'submission_address': u'сабмишн адрес',\n 'obtain_way': 'responsible_person',\n 'responsible_person': responsible_person.id + '_person',\n 'court_include': True,\n 'obtain_address_type': 'other_address',\n 'obtain_address': 'аптейн адрес',\n 'bik_account': '012345678',\n 'account_number': '01234567890123456789'\n }\n })\n result = self.test_client.post('/batch/update/', data={\n 'batch_id': batch.id,\n 'batch': batch_json\n })\n\n self.assertEqual(result.status_code, 200)\n db_batch = DocumentBatchDbObject.query.filter_by(id=batch.id).first()\n print (json.dumps(db_batch.__dict__, indent=1, default=lambda x: unicode(x), ensure_ascii=False))\n\n result = self.test_client.post('/batch/go_ahead/', data={\n 'batch_id': batch.id,\n })\n self.assertEqual(result.status_code, 200)\n self.assertEqual(BatchDocumentDbObject.query.count(), 1)\n self.assertEqual(BatchDocumentDbObject.query.filter_by(status=\"rendered\").count(), 0)\n db_batch = DocumentBatchDbObject.query.filter_by(id=batch.id).first()\n self.assertEqual(db_batch.status, \"generating_pretension\")\n\n @authorized()\n def test_update_in_claim(self):\n with self.app.app_context():\n batch = self.create_batch(DocumentBatchTypeEnum.DBT_OSAGO, self.user, status=\"pretension\")\n victim_car_owner = self.create_person(self.user, batch.id)\n guilty_car_owner = self.create_person(self.user, batch.id)\n responsible_person = self.create_person(self.user, batch.id, name=u\"Арина\",\n surname=u\"Поганкина\", patronymic=u\"Мстиславовна\", age=22)\n\n batch_json = json.dumps({\n \"data\": {\n 'crash_date': (datetime.utcnow() - timedelta(days=100)).strftime(\"%Y-%m-%d\"),\n 'policy_called': True,\n 'all_have_osago': True,\n 'own_insurance_company': True,\n 'have_osago': 'both',\n 'problem_type': 'refusal',\n 'refusal_reason': 'wrong_docs',\n 'notice_has_mistakes': False,\n 'got_cash': False,\n 'victim_owner': victim_car_owner.id + \"_person\",\n 'owner_as_victim_driver': True,\n 'victim_car_brand': u\"Форд Фокус в кредит\",\n 'victim_car_number': u\"А000ОО98\",\n 'guilty_owner': guilty_car_owner.id + \"_person\",\n 'owner_as_guilty_driver': True,\n 'guilty_car_brand': u'Рено',\n 'guilty_car_number': u'В000ВВ50',\n 'other_victims': None,\n 'insurance_company_region': u'Санкт-Петербург',\n 'policy_series': u'ААА',\n 'policy_number': '0123456789',\n 'other_insurance': True,\n 'insurance_name': u\"РоСгосСтраХ\",\n 'insurance_id': None,\n 'other_date': True,\n 'policy_date': (datetime.utcnow() - timedelta(days=100)).strftime(\"%Y-%m-%d\"),\n 'first_claim_date': (datetime.utcnow() - timedelta(days=90)).strftime(\"%Y-%m-%d\"),\n 'independent_expertise_number': u'01234567890123456789',\n 'independent_expertise_sum': 1500000,\n 'independent_expertise_cost': 200000,\n 'compensation_sum': 0,\n 'add_person_to_claim': True,\n 'docs_got': [OsagoDocTypeEnum.ODT_INQUIRE_CRASH,\n OsagoDocTypeEnum.ODT_NOTICE_CRASH,\n OsagoDocTypeEnum.ODT_ACT_INSURANCE_CASE],\n 'insurance_case_number': '01234567890123456789',\n 'submission_way': 'responsible_person',\n 'submission_branch_id': '',\n 'use_other_submission_address': True,\n 'submission_address': u'сабмишн адрес',\n 'obtain_way': 'responsible_person',\n 'responsible_person': responsible_person.id + '_person',\n 'court_include': True,\n 'obtain_address_type': 'other_address',\n 'obtain_address': 'аптейн адрес',\n 'bik_account': '012345678',\n 'account_number': '01234567890123456789'\n }\n })\n result = self.test_client.post('/batch/update/', data={\n 'batch_id': batch.id,\n 'batch': batch_json\n })\n\n self.assertEqual(result.status_code, 200)\n db_batch = DocumentBatchDbObject.query.filter_by(id=batch.id).first()\n print (json.dumps(db_batch.__dict__, indent=1, default=lambda x: unicode(x), ensure_ascii=False))\n\n result = self.test_client.post('/batch/go_ahead/', data={\n 'batch_id': batch.id,\n })\n self.assertEqual(result.status_code, 200)\n self.assertEqual(BatchDocumentDbObject.query.count(), 0)\n self.assertEqual(BatchDocumentDbObject.query.filter_by(status=\"rendered\").count(), 3)\n db_batch = DocumentBatchDbObject.query.filter_by(id=batch.id).first()\n self.assertEqual(db_batch.status, \"claim\")\n\n batch_json = json.dumps({\n \"data\": {\n 'crash_date': (datetime.utcnow() - timedelta(days=100)).strftime(\"%Y-%m-%d\"),\n 'policy_called': False,\n 'all_have_osago': False,\n 'own_insurance_company': True,\n 'have_osago': 'both',\n 'problem_type': 'refusal',\n 'refusal_reason': 'wrong_docs',\n 'notice_has_mistakes': False,\n 'got_cash': False,\n 'victim_owner': victim_car_owner.id + \"_person\",\n 'owner_as_victim_driver': True,\n 'victim_car_brand': u\"Форд Фокус в кредит\",\n 'victim_car_number': u\"А000ОО98\",\n 'guilty_owner': guilty_car_owner.id + \"_person\",\n 'owner_as_guilty_driver': True,\n 'guilty_car_brand': u'Рено',\n 'guilty_car_number': u'В000ВВ50',\n 'other_victims': None,\n 'insurance_company_region': u'Санкт-Петербург',\n 'policy_series': u'ААА',\n 'policy_number': '0123456789',\n 'other_insurance': True,\n 'insurance_name': u\"РоСгосСтраХ\",\n 'insurance_id': None,\n 'other_date': True,\n 'policy_date': (datetime.utcnow() - timedelta(days=100)).strftime(\"%Y-%m-%d\"),\n 'first_claim_date': (datetime.utcnow() - timedelta(days=90)).strftime(\"%Y-%m-%d\"),\n 'independent_expertise_number': u'01234567890123456789',\n 'independent_expertise_sum': 1500000,\n 'independent_expertise_cost': 200000,\n 'compensation_sum': 0,\n 'add_person_to_claim': True,\n 'docs_got': [OsagoDocTypeEnum.ODT_INQUIRE_CRASH,\n OsagoDocTypeEnum.ODT_NOTICE_CRASH,\n OsagoDocTypeEnum.ODT_ACT_INSURANCE_CASE],\n 'insurance_case_number': '01234567890123456789',\n 'submission_way': 'responsible_person',\n 'submission_branch_id': '',\n 'use_other_submission_address': True,\n 'submission_address': u'сабмишн адрес2',\n 'obtain_way': 'responsible_person',\n 'responsible_person': responsible_person.id + '_person',\n 'court_include': True,\n 'obtain_address_type': 'other_address',\n 'obtain_address': 'аптейн адрес',\n 'bik_account': '012345678',\n 'account_number': '01234567890123456789'\n }\n })\n result = self.test_client.post('/batch/update/', data={\n 'batch_id': batch.id,\n 'batch': batch_json\n })\n\n self.assertEqual(result.status_code, 200)\n db_batch = DocumentBatchDbObject.query.filter_by(id=batch.id).first()\n self.assertEqual(db_batch.status, \"claim\")\n\n self.assertEqual(BatchDocumentDbObject.query.count(), 3)\n self.assertEqual(BatchDocumentDbObject.query.filter(\n BatchDocumentDbObject.status==\"rendered\",\n BatchDocumentDbObject.file_id!=None\n ).count(), 3)\n\n @authorized()\n def test_update_in_generating_pretension(self):\n with self.app.app_context():\n batch = self.create_batch(DocumentBatchTypeEnum.DBT_OSAGO, self.user, status=\"pretension\")\n victim_car_owner = self.create_person(self.user, batch.id)\n guilty_car_owner = self.create_person(self.user, batch.id)\n responsible_person = self.create_person(self.user, batch.id, name=u\"Арина\",\n surname=u\"Поганкина\", patronymic=u\"Мстиславовна\", age=22)\n\n batch_json = json.dumps({\n \"data\": {\n 'crash_date': (datetime.utcnow() - timedelta(days=100)).strftime(\"%Y-%m-%d\"),\n 'policy_called': True,\n 'all_have_osago': True,\n 'own_insurance_company': True,\n 'have_osago': 'both',\n 'problem_type': 'refusal',\n 'refusal_reason': 'wrong_docs',\n 'notice_has_mistakes': False,\n 'got_cash': False,\n 'victim_owner': victim_car_owner.id + \"_person\",\n 'owner_as_victim_driver': True,\n 'victim_car_brand': u\"Форд Фокус в кредит\",\n 'victim_car_number': u\"А000ОО98\",\n 'guilty_owner': guilty_car_owner.id + \"_person\",\n 'owner_as_guilty_driver': True,\n 'guilty_car_brand': u'Рено',\n 'guilty_car_number': u'В000ВВ50',\n 'other_victims': None,\n 'insurance_company_region': u'Санкт-Петербург',\n 'policy_series': u'ААА',\n 'policy_number': '0123456789',\n 'other_insurance': True,\n 'insurance_name': u\"РоСгосСтраХ\",\n 'insurance_id': None,\n 'other_date': True,\n 'policy_date': (datetime.utcnow() - timedelta(days=100)).strftime(\"%Y-%m-%d\"),\n 'first_claim_date': (datetime.utcnow() - timedelta(days=90)).strftime(\"%Y-%m-%d\"),\n 'independent_expertise_number': u'01234567890123456789',\n 'independent_expertise_sum': 1500000,\n 'independent_expertise_cost': 200000,\n 'compensation_sum': 0,\n 'add_person_to_claim': True,\n 'docs_got': [OsagoDocTypeEnum.ODT_INQUIRE_CRASH,\n OsagoDocTypeEnum.ODT_NOTICE_CRASH,\n OsagoDocTypeEnum.ODT_ACT_INSURANCE_CASE],\n 'insurance_case_number': '01234567890123456789',\n 'submission_way': 'responsible_person',\n 'submission_branch_id': '',\n 'use_other_submission_address': True,\n 'submission_address': u'сабмишн адрес',\n 'obtain_way': 'responsible_person',\n 'responsible_person': responsible_person.id + '_person',\n 'court_include': True,\n 'obtain_address_type': 'other_address',\n 'obtain_address': 'аптейн адрес',\n 'bik_account': '012345678',\n 'account_number': '01234567890123456789'\n }\n })\n result = self.test_client.post('/batch/update/', data={\n 'batch_id': batch.id,\n 'batch': batch_json\n })\n\n self.assertEqual(result.status_code, 200)\n db_batch = DocumentBatchDbObject.query.filter_by(id=batch.id).first()\n print (json.dumps(db_batch.__dict__, indent=1, default=lambda x: unicode(x), ensure_ascii=False))\n\n result = self.test_client.post('/batch/go_ahead/', data={\n 'batch_id': batch.id,\n })\n self.assertEqual(result.status_code, 200)\n self.assertEqual(BatchDocumentDbObject.query.count(), 3)\n self.assertEqual(BatchDocumentDbObject.query.filter_by(status=\"rendered\").count(), 3)\n db_batch = DocumentBatchDbObject.query.filter_by(id=batch.id).first()\n self.assertEqual(db_batch.status, \"claim\")\n\n DocumentBatchDbObject.query.filter_by(id=batch.id).update({\n 'status': 'generating_pretension'\n })\n sqldb.session.commit()\n\n batch_json = json.dumps({\n \"data\": {\n 'crash_date': (datetime.utcnow() - timedelta(days=100)).strftime(\"%Y-%m-%d\"),\n 'policy_called': False,\n 'all_have_osago': False,\n 'own_insurance_company': True,\n 'have_osago': 'both',\n 'problem_type': 'refusal',\n 'refusal_reason': 'wrong_docs',\n 'notice_has_mistakes': False,\n 'got_cash': False,\n 'victim_owner': victim_car_owner.id + \"_person\",\n 'owner_as_victim_driver': True,\n 'victim_car_brand': u\"Форд Фокус в кредит\",\n 'victim_car_number': u\"А000ОО98\",\n 'guilty_owner': guilty_car_owner.id + \"_person\",\n 'owner_as_guilty_driver': True,\n 'guilty_car_brand': u'Рено',\n 'guilty_car_number': u'В000ВВ50',\n 'other_victims': None,\n 'insurance_company_region': u'Санкт-Петербург',\n 'policy_series': u'ААА',\n 'policy_number': '0123456789',\n 'other_insurance': True,\n 'insurance_name': u\"РоСгосСтраХ\",\n 'insurance_id': None,\n 'other_date': True,\n 'policy_date': (datetime.utcnow() - timedelta(days=100)).strftime(\"%Y-%m-%d\"),\n 'first_claim_date': (datetime.utcnow() - timedelta(days=90)).strftime(\"%Y-%m-%d\"),\n 'independent_expertise_number': u'01234567890123456789',\n 'independent_expertise_sum': 1500000,\n 'independent_expertise_cost': 200000,\n 'compensation_sum': 0,\n 'add_person_to_claim': True,\n 'docs_got': [OsagoDocTypeEnum.ODT_INQUIRE_CRASH,\n OsagoDocTypeEnum.ODT_NOTICE_CRASH,\n OsagoDocTypeEnum.ODT_ACT_INSURANCE_CASE],\n 'insurance_case_number': '01234567890123456789',\n 'submission_way': 'responsible_person',\n 'submission_branch_id': '',\n 'use_other_submission_address': True,\n 'submission_address': u'сабмишн адрес2',\n 'obtain_way': 'responsible_person',\n 'responsible_person': responsible_person.id + '_person',\n 'court_include': True,\n 'obtain_address_type': 'other_address',\n 'obtain_address': 'аптейн адрес',\n 'bik_account': '012345678',\n 'account_number': '01234567890123456789'\n }\n })\n result = self.test_client.post('/batch/update/', data={\n 'batch_id': batch.id,\n 'batch': batch_json\n })\n\n self.assertEqual(result.status_code, 200)\n db_batch = DocumentBatchDbObject.query.filter_by(id=batch.id).first()\n self.assertEqual(db_batch.status, \"generating_pretension\")\n\n self.assertEqual(BatchDocumentDbObject.query.count(), 3)\n self.assertEqual(BatchDocumentDbObject.query.filter(\n BatchDocumentDbObject.status==\"rendered\",\n BatchDocumentDbObject.file_id!=None\n ).count(), 3)\n\n @authorized()\n def test_go_back_in_claim(self):\n with self.app.app_context():\n batch = self.create_batch(DocumentBatchTypeEnum.DBT_OSAGO, self.user, status=\"pretension\")\n victim_car_owner = self.create_person(self.user, batch.id)\n guilty_car_owner = self.create_person(self.user, batch.id)\n responsible_person = self.create_person(self.user, batch.id, name=u\"Арина\",\n surname=u\"Поганкина\", patronymic=u\"Мстиславовна\", age=22)\n\n batch_json = json.dumps({\n \"data\": {\n 'crash_date': (datetime.utcnow() - timedelta(days=100)).strftime(\"%Y-%m-%d\"),\n 'policy_called': True,\n 'all_have_osago': True,\n 'own_insurance_company': True,\n 'have_osago': 'both',\n 'problem_type': 'refusal',\n 'refusal_reason': 'wrong_docs',\n 'notice_has_mistakes': False,\n 'got_cash': False,\n 'victim_owner': victim_car_owner.id + \"_person\",\n 'owner_as_victim_driver': True,\n 'victim_car_brand': u\"Форд Фокус в кредит\",\n 'victim_car_number': u\"А000ОО98\",\n 'guilty_owner': guilty_car_owner.id + \"_person\",\n 'owner_as_guilty_driver': True,\n 'guilty_car_brand': u'Рено',\n 'guilty_car_number': u'В000ВВ50',\n 'other_victims': None,\n 'insurance_company_region': u'Санкт-Петербург',\n 'policy_series': u'ААА',\n 'policy_number': '0123456789',\n 'other_insurance': True,\n 'insurance_name': u\"РоСгосСтраХ\",\n 'insurance_id': None,\n 'other_date': True,\n 'policy_date': (datetime.utcnow() - timedelta(days=100)).strftime(\"%Y-%m-%d\"),\n 'first_claim_date': (datetime.utcnow() - timedelta(days=90)).strftime(\"%Y-%m-%d\"),\n 'independent_expertise_number': u'01234567890123456789',\n 'independent_expertise_sum': 1500000,\n 'independent_expertise_cost': 200000,\n 'compensation_sum': 0,\n 'add_person_to_claim': True,\n 'docs_got': [OsagoDocTypeEnum.ODT_INQUIRE_CRASH,\n OsagoDocTypeEnum.ODT_NOTICE_CRASH,\n OsagoDocTypeEnum.ODT_ACT_INSURANCE_CASE],\n 'insurance_case_number': '01234567890123456789',\n 'submission_way': 'responsible_person',\n 'submission_branch_id': '',\n 'use_other_submission_address': True,\n 'submission_address': u'сабмишн адрес',\n 'obtain_way': 'responsible_person',\n 'responsible_person': responsible_person.id + '_person',\n 'court_include': True,\n 'obtain_address_type': 'other_address',\n 'obtain_address': 'аптейн адрес',\n 'bik_account': '012345678',\n 'account_number': '01234567890123456789'\n }\n })\n result = self.test_client.post('/batch/update/', data={\n 'batch_id': batch.id,\n 'batch': batch_json\n })\n\n self.assertEqual(result.status_code, 200)\n db_batch = DocumentBatchDbObject.query.filter_by(id=batch.id).first()\n print (json.dumps(db_batch.__dict__, indent=1, default=lambda x: unicode(x), ensure_ascii=False))\n\n result = self.test_client.post('/batch/go_ahead/', data={\n 'batch_id': batch.id,\n })\n self.assertEqual(result.status_code, 200)\n self.assertEqual(BatchDocumentDbObject.query.count(), 3)\n self.assertEqual(BatchDocumentDbObject.query.filter_by(status=\"rendered\").count(), 3)\n db_batch = DocumentBatchDbObject.query.filter_by(id=batch.id).first()\n self.assertEqual(db_batch.status, \"claim\")\n\n result = self.test_client.post('/batch/go_back/', data={\n 'batch_id': batch.id,\n })\n self.assertEqual(result.status_code, 200)\n self.assertEqual(BatchDocumentDbObject.query.count(), 0)\n db_batch = DocumentBatchDbObject.query.filter_by(id=batch.id).first()\n self.assertEqual(db_batch.status, \"pretension\")\n\n @authorized()\n def test_result_fields_3_4_stage(self):\n with self.app.app_context():\n batch = self.create_batch(DocumentBatchTypeEnum.DBT_OSAGO, self.user, status=\"pretension\")\n victim_car_owner = self.create_person(self.user, batch.id, name=u\"виктим\", surname=u\"пострадашко\", patronymic=u\"потерпешевич\")\n guilty_car_owner = self.create_person(self.user, batch.id)\n responsible_person = self.create_person(self.user, batch.id, name=u\"Арина\",\n surname=u\"Поганкина\", patronymic=u\"Мстиславовна\", age=22)\n\n batch_json = json.dumps({\n \"data\": {\n 'crash_date': (datetime.utcnow() - timedelta(days=100)).strftime(\"%Y-%m-%d\"),\n 'all_have_osago': True,\n 'own_insurance_company': True,\n 'have_osago': 'both',\n 'refusal_reason': 'wrong_docs',\n 'notice_has_mistakes': False,\n 'got_cash': False,\n 'victim_owner': victim_car_owner.id + \"_person\",\n 'owner_as_victim_driver': True,\n 'victim_car_brand': u\"Форд Фокус в кредит\",\n 'victim_car_number': u\"А000ОО98\",\n 'guilty_owner': guilty_car_owner.id + \"_person\",\n 'owner_as_guilty_driver': True,\n 'guilty_car_brand': u'Рено',\n 'guilty_car_number': u'В000ВВ50',\n 'other_victims': None,\n 'insurance_company_region': u'Санкт-Петербург',\n 'policy_series': u'ААА',\n 'policy_number': '0123456789',\n 'other_insurance': True,\n 'insurance_name': u\"РоСгосСтраХ\",\n 'insurance_id': None,\n 'other_date': True,\n 'independent_expertise_number': u'01234567890123456789',\n 'independent_expertise_sum': '10.0',\n 'independent_expertise_cost': 1000,\n 'add_person_to_claim': True,\n 'docs_got': [OsagoDocTypeEnum.ODT_INQUIRE_CRASH,\n OsagoDocTypeEnum.ODT_NOTICE_CRASH,\n OsagoDocTypeEnum.ODT_ACT_INSURANCE_CASE],\n 'insurance_case_number': '01234567890123456789',\n 'submission_way': 'responsible_person',\n 'submission_branch_id': '',\n 'use_other_submission_address': True,\n 'submission_address': u'сабмишн адрес',\n 'obtain_way': 'responsible_person',\n 'responsible_person': responsible_person.id + '_person',\n 'court_include': True,\n 'obtain_address_type': 'other_address',\n 'obtain_address': 'аптейн адрес',\n 'bik_account': '012345678',\n 'account_number': '01234567890123456789',\n 'police_case': True,\n\n 'policy_called': True,\n 'problem_type': 'refusal',\n 'policy_date': (datetime.utcnow() - timedelta(days=100)).strftime(\"%Y-%m-%d\"),\n 'compensation_sum': 1000.9,\n\n 'lawsuit_date': datetime(2014, 5, 1).strftime(\"%Y-%m-%d\"),\n 'first_claim_date': datetime(2014, 1, 1).strftime(\"%Y-%m-%d\"),\n 'compensation_date': datetime(2015, 1, 1).strftime(\"%Y-%m-%d\"),\n 'compensation_got': \"9.0\"\n }\n })\n result = self.test_client.post('/batch/update/', data={\n 'batch_id': batch.id,\n 'batch': batch_json\n })\n\n self.assertEqual(result.status_code, 200)\n db_batch = DocumentBatchDbObject.query.filter_by(id=batch.id).first()\n print (json.dumps(db_batch.__dict__, indent=1, default=lambda x: unicode(x), ensure_ascii=False))\n\n # формула для проверки: (0.01 * underpay_sum * ((lawsuit_date if lawsuit_date else datetime.now()) - first_claim_date - timedelta(days=20)).days)\n # if compensation_got is None\n # else (0.01 * underpay_sum * ((compensation_date if compensation_date else datetime.today()) - first_claim_date - timedelta(days=20)).days +\n # 0.01 * (underpay_sum - compensation_got) * ((lawsuit_date if lawsuit_date else datetime.today()) - compensation_date if compensation_date else datetime.today()).days)\n\n self.assertEqual(db_batch.result_fields, {\n u'above_limits_sum': u'0',\n u'attached_to_lawsuit_docs': [u'case_init_refusal', u'expertise_report'],\n u'insufficient_docs': [u'police_protocol', u'act_insurance_case'],\n u'insurance_penalty': u'50',\n u'lawsuit_cost': u'500000.5',\n u'legal_fee': u'1000.5',\n u'limits_sum': u'400000',\n u'region': u'Санкт-Петербург',\n u'region_prepositional': u'Санкт-Петербург',\n u'responsible_person_dative': u'Поганкина Арина Мстиславовна',\n u'underpay_sum': u'10.0'\n })\n\n @authorized()\n def test_trust_court(self):\n DocRequisitiesStorage._BATCH_DESCRIPTORS[DocumentBatchTypeEnum.DBT_OSAGO]['doc_types'] = [DocumentTypeEnum.DT_OSAGO_TRUST_COURT_REPRESENTATION]\n\n with self.app.app_context():\n batch = self.create_batch(DocumentBatchTypeEnum.DBT_OSAGO, self.user)\n victim_car_owner = self.create_person(self.user, batch.id)\n responsible_person = self.create_person(self.user, batch.id, name=u\"Арина\",\n surname=u\"Поганкина\", patronymic=u\"Ивановна\", age=22)\n\n batch_json = json.dumps({\n \"data\": { # todo: add fields\n 'victim_owner': victim_car_owner.id + \"_person\",\n 'lawsuit_submission_responsible_person': responsible_person.id + '_person',\n 'victim_car_brand': u\"Форд Фокус в кредит\",\n 'victim_car_number': u\"А000ОО98\",\n 'crash_date': (datetime.utcnow() - timedelta(days=100)).strftime(\"%Y-%m-%d\"),\n \"submission_way\": \"responsible_person\",\n 'obtain_way': 'responsible_person',\n 'insurance_name': u'ООО ААА ОАО АОА',\n 'court_include': True\n }\n })\n result = self.test_client.post('/batch/update/', data={\n 'batch_id': batch.id,\n 'batch': batch_json\n })\n\n self.assertEqual(result.status_code, 200)\n db_batch = DocumentBatchDbObject.query.filter_by(id=batch.id).first()\n print (json.dumps(db_batch.__dict__, indent=1, default=lambda x: unicode(x), ensure_ascii=False))\n\n result = self.test_client.post('/batch/render_document/', data={\n 'batch_id': batch.id,\n 'document_type': json.dumps([DocumentTypeEnum.DT_OSAGO_TRUST_COURT_REPRESENTATION])\n })\n self.assertEqual(result.status_code, 200)\n\n @authorized()\n def test_trust_court_absence_claim(self):\n DocRequisitiesStorage._BATCH_DESCRIPTORS[DocumentBatchTypeEnum.DBT_OSAGO]['doc_types'] = [DocumentTypeEnum.DT_OSAGO_CLAIM_COURT_ABSENT]\n\n with self.app.app_context():\n batch = self.create_batch(DocumentBatchTypeEnum.DBT_OSAGO, self.user)\n victim_car_owner = self.create_person(self.user, batch.id)\n responsible_person = self.create_person(self.user, batch.id, name=u\"Арина\",\n surname=u\"Поганкина\", patronymic=u\"Ивановна\", age=22)\n\n batch_json = json.dumps({\n \"data\": {\n 'victim_owner': victim_car_owner.id + \"_person\",\n 'lawsuit_submission_responsible_person': responsible_person.id + '_person',\n 'responsible_person': responsible_person.id + '_person',\n 'victim_car_brand': u\"Форд Фокус в кредит\",\n 'victim_car_number': u\"А000ОО98\",\n 'crash_date': (datetime.utcnow() - timedelta(days=100)).strftime(\"%Y-%m-%d\"),\n \"submission_way\": \"responsible_person\",\n 'obtain_way': 'responsible_person',\n 'insurance_name': u'ООО ААА ОАО АОА',\n 'court_include': True,\n 'first_claim_date': (datetime.utcnow() - timedelta(days=100)).strftime(\"%Y-%m-%d\"),\n \"court_name\": u'Наш суд, самый гуманный суд в мире',\n \"court_address\": u'Планета Обезьян',\n \"add_person_to_claim\": True,\n 'lawsuit_submission_way': 'oneself'\n }\n })\n result = self.test_client.post('/batch/update/', data={\n 'batch_id': batch.id,\n 'batch': batch_json\n })\n\n self.assertEqual(result.status_code, 200)\n db_batch = DocumentBatchDbObject.query.filter_by(id=batch.id).first()\n print (json.dumps(db_batch.__dict__, indent=1, default=lambda x: unicode(x), ensure_ascii=False))\n\n result = self.test_client.post('/batch/render_document/', data={\n 'batch_id': batch.id,\n 'document_type': json.dumps([DocumentTypeEnum.DT_OSAGO_CLAIM_COURT_ABSENT])\n })\n self.assertEqual(result.status_code, 200)\n\n @authorized()\n def test_go_ahead_in_claim(self):\n with self.app.app_context():\n batch = self.create_batch(DocumentBatchTypeEnum.DBT_OSAGO, self.user, status=\"claim\")\n\n batch_payment = PayInfoObject(\n user=self.user,\n batch=batch,\n pay_record_id=0,\n payment_provider=1,\n service_type=PurchaseServiceType.OSAGO_PART1\n )\n sqldb.session.add(batch_payment)\n batch_payment = PayInfoObject(\n user=self.user,\n batch=batch,\n pay_record_id=1,\n payment_provider=1,\n service_type=PurchaseServiceType.OSAGO_PART2\n )\n sqldb.session.add(batch_payment)\n sqldb.session.commit()\n\n victim_car_owner = self.create_person(self.user, batch.id, name=u\"ЖЖ\", surname=u\"ЖЖ\", patronymic=u\"ЖЖ\")\n guilty_car_owner = self.create_person(self.user, batch.id)\n responsible_person = self.create_person(self.user, batch.id, name=u\"Арина\",\n surname=u\"Поганкина\", patronymic=u\"Мстиславовна\", age=22)\n\n batch_json = json.dumps({\n \"data\": {\n 'crash_date': (datetime.utcnow() - timedelta(days=100)).strftime(\"%Y-%m-%d\"),\n 'all_have_osago': True,\n 'own_insurance_company': True,\n 'have_osago': 'both',\n 'notice_has_mistakes': False,\n 'got_cash': True,\n 'victim_owner': victim_car_owner.id + \"_person\",\n 'owner_as_victim_driver': True,\n 'victim_car_brand': u\"Форд Фокус в кредит\",\n 'victim_car_number': u\"А000ОО98\",\n 'guilty_owner': guilty_car_owner.id + \"_person\",\n 'owner_as_guilty_driver': True,\n 'guilty_car_brand': u'Рено',\n 'guilty_car_number': u'В000ВВ50',\n 'other_victims': None,\n 'insurance_company_region': u'Санкт-Петербург',\n 'policy_series': u'ААА',\n 'policy_number': '0123456789',\n 'other_insurance': True,\n 'insurance_name': u\"РоСгосСтраХ\",\n 'insurance_id': None,\n 'first_claim_date': (datetime(2014,5,1) - timedelta(days=90)).strftime(\"%Y-%m-%d\"),\n 'independent_expertise_number': u'01234567890123456789',\n 'independent_expertise_cost': 1000,\n 'add_person_to_claim': True,\n 'docs_got': [\n OsagoDocTypeEnum.ODT_INQUIRE_CRASH,\n OsagoDocTypeEnum.ODT_NOTICE_CRASH,\n OsagoDocTypeEnum.ODT_INSURANCE_DENIAL,\n ],\n 'insurance_case_number': '01234567890123456789',\n 'submission_branch_id': '',\n 'use_other_submission_address': True,\n 'submission_address': u'сабмишн адрес',\n 'obtain_way': 'responsible_person',\n 'responsible_person': responsible_person.id + '_person',\n 'obtain_address_type': 'other_address',\n 'obtain_address': 'аптейн адрес',\n 'bik_account': '012345678',\n 'account_number': '01234567890123456789',\n 'police_case': False,\n 'refusal_reason': OsagoRefusalReasonEnum.ORR_WRONG_DOCS,\n\n 'court_name': u\"Наименование суда\",\n 'court_address': u'Адрес суда',\n 'lawsuit_submission_way': 'mail',\n\n 'court_include': True,\n 'submission_way': 'responsible_person',\n\n 'lawsuit_submission_responsible_person': responsible_person.id + '_person',\n\n # --------------------------------------------------------------------------------------------------\n 'policy_called': True, # gibdd/euro\n 'other_date': True,\n 'policy_date': \"2015-01-01\",\n\n 'independent_expertise_sum': '500000',\n\n 'problem_type': 'refusal',\n 'compensation_sum': 10000, # до претензии\n \"pretension_result\": \"success\",\n \"compensation_got\": 5000, # после претензии\n\n # --------------------------------------------------------------------------------------------------\n \"compensation_date\": (datetime.utcnow() - timedelta(days=50)).strftime(\"%Y-%m-%d\"),\n \"insurance_returned_docs\": [\n OsagoDocTypeEnum.ODT_POLICE_PROTOCOL,\n OsagoDocTypeEnum.ODT_ACT_INSURANCE_CASE,\n OsagoDocTypeEnum.ODT_POLICE_STATEMENT,\n OsagoDocTypeEnum.ODT_CASE_INITIATION_REFUSAL\n ],\n \"pretension_answer_got\": True,\n \"lawsuit_date\": (datetime.utcnow() - timedelta(days=20)).strftime(\"%Y-%m-%d\"),\n \"make_lawsuit\": True,\n \"insurance_lawsuit\": [\n InsuranceLawsuitEnum.ILS_EXPERTISE_COST,\n InsuranceLawsuitEnum.ILS_FINE,\n InsuranceLawsuitEnum.ILS_PENALTY,\n InsuranceLawsuitEnum.ILS_UNDERPAY\n ],\n \"notary_costs\": 100000.12,\n \"moral_damages\": 2000000.34,\n 'court_attendance': 'responsible_person',\n \"attached_to_lawsuit_docs_pagecount\": [\n {\n \"page\": OsagoDocTypeEnum.ODT_INQUIRE_CRASH,\n \"pagecount\": 123\n }, {\n \"page\": OsagoDocTypeEnum.ODT_NOTICE_CRASH,\n \"pagecount\": 123\n }, {\n \"page\": OsagoDocTypeEnum.ODT_ACT_INSURANCE_CASE,\n \"pagecount\": 123\n }, {\n \"page\": OsagoDocTypeEnum.ODT_INSURANCE_DENIAL,\n \"pagecount\": 123\n }, {\n \"page\": OsagoDocTypeEnum.ODT_POLICE_STATEMENT,\n \"pagecount\": 123\n }, {\n \"page\": OsagoDocTypeEnum.ODT_POLICE_PROTOCOL,\n \"pagecount\": 123\n }, {\n \"page\": OsagoDocTypeEnum.ODT_CASE_INITIATION_REFUSAL,\n \"pagecount\": 123\n }, {\n \"page\": OsagoDocTypeEnum.ODT_EXPERTISE_REPORT,\n \"pagecount\": 123\n }, {\n \"page\": OsagoDocTypeEnum.ODT_EXPERTISE_CONTRACT,\n \"pagecount\": 123\n }, {\n \"page\": OsagoDocTypeEnum.ODT_PRETENSION_ANSWER_COPY,\n \"pagecount\": 123\n },{\n \"page\": OsagoDocTypeEnum.ODT_NOTARY_PAY_ACT,\n \"pagecount\": 123\n }, {\n \"page\": OsagoDocTypeEnum.ODT_POLICY_OSAGO,\n \"pagecount\": 123\n }, {\n \"page\": OsagoDocTypeEnum.ODT_BANK_STATEMENT,\n \"pagecount\": 321\n }]\n }\n })\n result = self.test_client.post('/batch/update/', data={\n 'batch_id': batch.id,\n 'batch': batch_json\n })\n\n self.assertEqual(result.status_code, 200)\n db_batch = DocumentBatchDbObject.query.filter_by(id=batch.id).first()\n print (json.dumps(db_batch.__dict__, indent=1, default=lambda x: unicode(x), ensure_ascii=False))\n\n result = self.test_client.post('/batch/go_ahead/', data={\n 'batch_id': batch.id,\n })\n self.assertEqual(result.status_code, 200)\n self.assertEqual(BatchDocumentDbObject.query.count(), 3)\n self.assertEqual(BatchDocumentDbObject.query.filter_by(status=\"rendered\").count(), 3)\n db_batch = DocumentBatchDbObject.query.filter_by(id=batch.id).first()\n self.assertEqual(db_batch.status, \"court\")\n\n def test_fields_order(self):\n field_values = {\n 'a': 'a',\n 'b': 'b'\n }\n schema = {\n 'fields': [{\n 'name': 'd',\n 'field_type': 'DocTextField',\n 'type': 'calculated',\n 'depends_on': ['b', 'c'],\n 'value': {\n '#sum': [{\n '#field': 'b'\n }, {\n \"#value\": \" + \"\n }, {\n '#field': 'c'\n }]\n }\n }, {\n 'name': 'e',\n 'field_type': 'DocTextField',\n 'type': 'calculated',\n 'depends_on': ['a', 'd'],\n 'value': {\n '#sum': [{\n '#field': 'a'\n }, {\n \"#value\": \" + \"\n }, {\n '#field': 'd'\n }]\n }\n }, {\n 'name': 'f',\n 'field_type': 'DocTextField',\n 'type': 'calculated',\n 'depends_on': ['b'],\n 'value': {\n '#field': 'b'\n }\n }, {\n 'name': 'a',\n 'type': 'DocTextField'\n }, {\n 'name': 'c',\n 'field_type': 'DocTextField',\n 'type': 'calculated',\n 'depends_on': ['a'],\n 'value': {\n '#field': 'a'\n }\n }, {\n 'name': 'b',\n 'type': 'DocTextField'\n }]\n }\n\n result = transform_with_schema(field_values, schema)\n for i in result:\n print(i + '=' + result[i].db_value())\n\n @authorized()\n def test_go_ahead_in_court(self):\n with self.app.app_context():\n batch = self.create_batch(DocumentBatchTypeEnum.DBT_OSAGO, self.user, status=\"court\")\n victim_car_owner = self.create_person(self.user, batch.id, name=u\"ЖЖ\", surname=u\"ЖЖ\", patronymic=u\"ЖЖ\")\n guilty_car_owner = self.create_person(self.user, batch.id)\n responsible_person = self.create_person(self.user, batch.id, name=u\"Арина\",\n surname=u\"Поганкина\", patronymic=u\"Мстиславовна\", age=22)\n\n batch_json = json.dumps({\n \"data\": {\n 'crash_date': (datetime.utcnow() - timedelta(days=100)).strftime(\"%Y-%m-%d\"),\n 'all_have_osago': True,\n 'own_insurance_company': True,\n 'have_osago': 'both',\n 'notice_has_mistakes': False,\n 'got_cash': True,\n 'victim_owner': victim_car_owner.id + \"_person\",\n 'owner_as_victim_driver': True,\n 'victim_car_brand': u\"Форд Фокус в кредит\",\n 'victim_car_number': u\"А000ОО98\",\n 'guilty_owner': guilty_car_owner.id + \"_person\",\n 'owner_as_guilty_driver': True,\n 'guilty_car_brand': u'Рено',\n 'guilty_car_number': u'В000ВВ50',\n 'other_victims': None,\n 'insurance_company_region': u'Санкт-Петербург',\n 'policy_series': u'ААА',\n 'policy_number': '0123456789',\n 'other_insurance': True,\n 'insurance_name': u\"РоСгосСтраХ\",\n 'insurance_id': None,\n 'first_claim_date': (datetime(2014,5,1) - timedelta(days=90)).strftime(\"%Y-%m-%d\"),\n 'independent_expertise_number': u'01234567890123456789',\n 'independent_expertise_cost': 1000,\n 'add_person_to_claim': True,\n 'docs_got': [\n OsagoDocTypeEnum.ODT_INQUIRE_CRASH\n ],\n 'insurance_case_number': '01234567890123456789',\n 'submission_branch_id': '',\n 'use_other_submission_address': True,\n 'submission_address': u'сабмишн адрес',\n 'obtain_way': 'responsible_person',\n 'responsible_person': responsible_person.id + '_person',\n 'obtain_address_type': 'other_address',\n 'obtain_address': 'аптейн адрес',\n 'bik_account': '012345678',\n 'account_number': '01234567890123456789',\n 'police_case': False,\n 'refusal_reason': OsagoRefusalReasonEnum.ORR_WRONG_DOCS,\n\n 'court_name': u\"Наименование суда\",\n 'court_address': u'Адрес суда',\n 'lawsuit_submission_way': 'oneself',\n\n 'court_include': True,\n 'submission_way': 'responsible_person',\n\n 'lawsuit_submission_responsible_person': responsible_person.id + '_person',\n 'lawsuit_number': '1234567890',\n\n # --------------------------------------------------------------------------------------------------\n 'policy_called': True, # gibdd/euro\n 'other_date': True,\n 'policy_date': \"2013-01-01\",\n\n 'independent_expertise_sum': '1200000',\n\n 'problem_type': 'refusal',\n 'compensation_sum': 10000, # до претензии\n \"pretension_result\": \"refuse\",\n \"compensation_got\": 10000, # после претензии\n\n # --------------------------------------------------------------------------------------------------\n \"compensation_date\": (datetime.utcnow() - timedelta(days=20)).strftime(\"%Y-%m-%d\"),\n \"insurance_returned_docs\": [OsagoDocTypeEnum.ODT_POLICE_PROTOCOL],\n \"pretension_answer_got\": True,\n \"lawsuit_date\": (datetime.utcnow() - timedelta(days=50)).strftime(\"%Y-%m-%d\"),\n \"make_lawsuit\": True,\n \"insurance_lawsuit\": [\n InsuranceLawsuitEnum.ILS_EXPERTISE_COST,\n InsuranceLawsuitEnum.ILS_FINE,\n InsuranceLawsuitEnum.ILS_PENALTY,\n InsuranceLawsuitEnum.ILS_UNDERPAY\n ],\n \"notary_costs\": 100000.12,\n \"moral_damages\": 2000000.34,\n 'court_attendance': 'responsible_person',\n\n 'insurance_execution_act_responsible_person': True,\n 'insurance_execution_act_obtain_way': 'no_obtain',\n\n 'guilty_execution_act_responsible_person': False,\n 'guilty_execution_act_obtain_way': 'no_obtain',\n\n \"attached_to_lawsuit_docs_pagecount\": [\n {\n \"page\": OsagoDocTypeEnum.ODT_INQUIRE_CRASH,\n \"pagecount\": 123\n }, {\n \"page\": OsagoDocTypeEnum.ODT_NOTICE_CRASH,\n \"pagecount\": 123\n }, {\n \"page\": OsagoDocTypeEnum.ODT_ACT_INSURANCE_CASE,\n \"pagecount\": 123\n }, {\n \"page\": OsagoDocTypeEnum.ODT_INSURANCE_DENIAL,\n \"pagecount\": 123\n }, {\n \"page\": OsagoDocTypeEnum.ODT_POLICE_STATEMENT,\n \"pagecount\": 123\n }, {\n \"page\": OsagoDocTypeEnum.ODT_POLICE_PROTOCOL,\n \"pagecount\": 123\n }, {\n \"page\": OsagoDocTypeEnum.ODT_CASE_INITIATION_REFUSAL,\n \"pagecount\": 123\n }, {\n \"page\": OsagoDocTypeEnum.ODT_EXPERTISE_REPORT,\n \"pagecount\": 123\n }, {\n \"page\": OsagoDocTypeEnum.ODT_EXPERTISE_CONTRACT,\n \"pagecount\": 123\n }, {\n \"page\": OsagoDocTypeEnum.ODT_PRETENSION_ANSWER_COPY,\n \"pagecount\": 123\n },{\n \"page\": OsagoDocTypeEnum.ODT_NOTARY_PAY_ACT,\n \"pagecount\": 123\n }, {\n \"page\": OsagoDocTypeEnum.ODT_POLICY_OSAGO,\n \"pagecount\": 123\n }]\n }\n })\n result = self.test_client.post('/batch/update/', data={\n 'batch_id': batch.id,\n 'batch': batch_json\n })\n\n self.assertEqual(result.status_code, 200)\n db_batch = DocumentBatchDbObject.query.filter_by(id=batch.id).first()\n print (json.dumps(db_batch.__dict__, indent=1, default=lambda x: unicode(x), ensure_ascii=False))\n\n result = self.test_client.post('/batch/go_ahead/', data={\n 'batch_id': batch.id,\n })\n self.assertEqual(result.status_code, 200)\n self.assertEqual(BatchDocumentDbObject.query.count(), 3)\n self.assertEqual(BatchDocumentDbObject.query.filter_by(status=\"rendered\").count(), 3)\n db_batch = DocumentBatchDbObject.query.filter_by(id=batch.id).first()\n self.assertEqual(db_batch.status, \"court\")\n" }, { "alpha_fraction": 0.5278001427650452, "alphanum_fraction": 0.5350523591041565, "avg_line_length": 30.024999618530273, "blob_id": "6f53322b604d3798ebc4146027d161fe8d8f2496", "content_id": "fbc3a4d4c22e3e5c79cdc968b7928cd9db47dbfd", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 1241, "license_type": "no_license", "max_line_length": 85, "num_lines": 40, "path": "/app/common_utils/perf.py", "repo_name": "StanislavKraev/jb_code", "src_encoding": "UTF-8", "text": "# -*- coding: utf-8 -*-\nimport StringIO\nimport cProfile\nimport pstats\nfrom time import time\n\nclass TimeCalculator(object):\n\n def __init__(self, name, logger = None, use_profile = False, min_time = None):\n self.logger = logger\n self.t1 = None\n self.t2 = None\n self.name = name\n self.use_profile = use_profile\n self.min_time = min_time\n if self.use_profile:\n self.pr = cProfile.Profile()\n\n def __enter__(self):\n self.t1 = time()\n if self.use_profile:\n self.pr.enable(builtins=False)\n return self\n\n def __exit__(self, exc_type, exc_val, exc_tb):\n self.t2 = time()\n if self.use_profile:\n self.pr.disable()\n if self.min_time is not None and (self.t2 - self.t1) < self.min_time:\n return\n if self.use_profile:\n if self.logger:\n s = StringIO.StringIO()\n sortby = 'cumulative'\n ps = pstats.Stats(self.pr, stream=s).sort_stats(sortby)\n ps.print_stats()\n self.logger.debug(s.getvalue())\n else:\n if self.logger:\n self.logger.debug(\"%s: %s\" % (self.name, unicode(self.t2 - self.t1)))\n" }, { "alpha_fraction": 0.6484710574150085, "alphanum_fraction": 0.7035633325576782, "avg_line_length": 36.32075500488281, "blob_id": "93bf8bac317c7f3a060d107487864b9c9f85bab3", "content_id": "c1f22ff032afb79c36dfaec8fb6fa60e59192bc8", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 3957, "license_type": "no_license", "max_line_length": 98, "num_lines": 106, "path": "/app/services/pay/models.py", "repo_name": "StanislavKraev/jb_code", "src_encoding": "UTF-8", "text": "# -*- coding: utf-8 -*-\nfrom sqlalchemy.orm import relationship\n\nfrom sqlalchemy.sql.functions import func\nfrom sqlalchemy import Column, Unicode, DateTime, Integer, ForeignKey, DECIMAL, BigInteger, String\nfrom sqlalchemy.dialects.postgresql import JSONB\n\nfrom fw.db.sql_base import db as sqldb\n\n\nclass PaymentProvider(object):\n YAD = 1\n\n\nclass PurchaseServiceType(object):\n LLC_PURCHASE = \"llc_purchase\"\n LLC_AUTO_PURCHASE = \"llc_auto_purchase\"\n\n IP_PURCHASE = \"ip_purchase\"\n IP_AUTO_PURCHASE = \"ip_auto_purchase\"\n\n OSAGO_PART1 = \"osago_part1\"\n OSAGO_PART2 = \"osago_part2\"\n\n\nclass PaymentSubscriptionObject(sqldb.Model):\n __tablename__ = \"payment_subscription\"\n\n id = Column(Integer, primary_key=True)\n pay_info = Column(JSONB, nullable=False)\n created = Column(DateTime, nullable=False, default=func.now())\n end_dt = Column(DateTime, nullable=False)\n type = Column(Unicode, nullable=False)\n user_id = Column(Integer, ForeignKey('authuser.id'), index=True)\n user = relationship(\"AuthUser\", uselist=False)\n\n pay_record_id = Column(Integer, nullable=False)\n payment_provider = Column(Integer, nullable=False)\n\n\nclass YadRequestsObject(sqldb.Model):\n __tablename__ = \"yad_requests\"\n\n id = Column(Integer, primary_key=True)\n\n ip = Column(Unicode, nullable=False)\n created = Column(DateTime, nullable=False, default=func.now())\n request_datetime = Column(DateTime, nullable=False)\n md5 = Column(Unicode, nullable=False)\n shop_id = Column(BigInteger, nullable=False)\n shop_article_id = Column(BigInteger, nullable=False)\n invoice_id = Column(BigInteger, nullable=False, index=True)\n order_number = Column(Unicode, nullable=False, index=True)\n customer_number = Column(Unicode, nullable=False, index=True)\n order_created_datetime = Column(DateTime, nullable=False)\n order_sum_amount = Column(DECIMAL, nullable=False)\n order_sum_currency_paycash = Column(Unicode, nullable=False)\n order_sum_bank_paycash = Column(Unicode, nullable=False)\n shop_sum_amount = Column(DECIMAL, nullable=False)\n shop_sum_currency_paycash = Column(Unicode, nullable=False)\n shop_sum_bank_paycash = Column(Unicode, nullable=False)\n payment_payer_code = Column(Unicode, nullable=False)\n payment_type = Column(Unicode, nullable=False)\n action = Column(Unicode, nullable=False)\n payment_datetime = Column(DateTime, nullable=True)\n cps_user_country_code = Column(Unicode, nullable=True)\n\n# \"_id\" : ObjectId(\"5502f094e64bcf076f79bc87\"),\n# \"ip\" : \"77.75.157.170\",\n# \"cps_user_country_code\" : \"RU\",\n# \"request_datetime\" : \"2015-03-13T17:14:08.976+03:00\",\n# \"shop_id\" : \"29372\",\n# \"shop_sum_currency_paycash\" : \"10643\",\n# \"order_sum_currency_paycash\" : \"10643\",\n# \"order_created_datetime\" : \"2015-03-13T17:14:07.860+03:00\",\n# \"shop_sum_amount\" : \"434.25\",\n# \"order_sum_bank_paycash\" : \"1003\",\n# \"shop_article_id\" : \"139366\",\n# \"payment_datetime\" : \"2015-03-13T17:14:08.795+03:00\",\n# \"md5\" : \"5411413BADFD93ECF8BD3F7D4DF24CD6\",\n# \"shop_sum_bank_paycash\" : \"1003\",\n# \"order_sum_amount\" : \"450.00\",\n# \"payment_payer_code\" : \"4100322062290\",\n# \"created\" : ISODate(\"2015-03-13T14:13:40.197Z\"),\n# \"invoice_id\" : \"2000000424994\",\n# \"customer_number\" : \"5502efbde64bcf076f79bbcf\",\n# \"payment_type\" : \"AC\",\n# \"action\" : \"paymentAviso\",\n# \"order_number\" : \"5502efc0e64bcf076f79bbd1\"\n\n\nclass PayInfoObject(sqldb.Model):\n __tablename__ = \"pay_info\"\n\n id = Column(Integer, primary_key=True)\n user_id = Column(Integer, ForeignKey('authuser.id'), index=True)\n user = relationship(\"AuthUser\", uselist=False)\n\n batch_id = Column(String, ForeignKey('doc_batch.id'), nullable=False)\n batch = relationship(\"DocumentBatchDbObject\", uselist=False)\n\n pay_record_id = Column(Integer, index=True, nullable=False)\n payment_provider = Column(Integer, nullable=False)\n\n dt = Column(DateTime, nullable=True, default=func.now())\n service_type = Column(String, nullable=False)\n\n" }, { "alpha_fraction": 0.4852820336818695, "alphanum_fraction": 0.4887354075908661, "avg_line_length": 34.561405181884766, "blob_id": "5795a056155e9837f763fab45c26f1759d70122d", "content_id": "33edae351ca87c5b1b2dc76f42896632da0909c0", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 12162, "license_type": "no_license", "max_line_length": 113, "num_lines": 342, "path": "/app/manage_commands/periodic_commands.py", "repo_name": "StanislavKraev/jb_code", "src_encoding": "UTF-8", "text": "# -*- coding: utf-8 -*-\nfrom datetime import datetime, timedelta\nimport json\nimport requests\nfrom data.get_all_ifns import ALL_IFNS_NUMBERS\nfrom fw.db.sql_base import db as sqldb\nfrom fw.documents.fields.general_doc_fields import DocAddressField\nfrom fw.utils.address_utils import dadata_standardize_address\nfrom manage_commands import BaseManageCommand\nfrom common_utils import int_to_ifns\nfrom services.ifns.data_model.models import IfnsCatalogObject\n\n\nclass UpdateIfnsCatalogInitCommand(BaseManageCommand):\n NAME = \"update_ifns_catalog_init\"\n\n def run(self):\n \"\"\"\n Should be started once.\n \"\"\"\n\n updated = datetime.now() - timedelta(days=365)\n for ifns in ALL_IFNS_NUMBERS:\n res = IfnsCatalogObject.query.filter_by(code=ifns).first()\n if not res:\n new_item = IfnsCatalogObject(code=ifns, updated=updated)\n sqldb.session.add(new_item)\n sqldb.session.commit()\n\n\ndef get_phone_list(phone_str, logger):\n phone_list_str = phone_str.split(',')\n city_code = None\n phones = []\n phones_without_city_code = []\n for phone_item in phone_list_str:\n try:\n phone_item = phone_item.strip()\n phone_item = filter(lambda x: x.isdigit() or x in ('(', ')'), phone_item)\n if '(' in phone_item and ')' in phone_item:\n bracket_split_parts = phone_item.split('(')\n\n country_code, remaining = phone_item.split('(')\n if country_code not in ('', '8', '+7', '+38'):\n raise ValueError()\n if not remaining:\n raise ValueError()\n city_code, number = remaining.split(')')\n if not city_code or not number:\n raise ValueError()\n phones.append(u\"+7(%s)%s\" % (city_code, number))\n else:\n if len(phone_item) == 7:\n phones_without_city_code.append(phone_item)\n elif len(phone_item) == 11 and phone_item[0] == 8:\n city_code = phone_item[1:4]\n number = phone_item[4:]\n phones.append(u\"+7(%s)%s\" % (city_code, number))\n elif len(phone_item) == 12 and phone_item[0:2] == \"+7\":\n city_code = phone_item[2:5]\n number = phone_item[5:]\n phones.append(u\"+7(%s)%s\" % (city_code, number))\n elif len(phone_item) == 10:\n city_code = phone_item[0:3]\n number = phone_item[3:]\n phones.append(u\"+7(%s)%s\" % (city_code, number))\n\n except Exception:\n logger.exception(u\"Failed to parse number %s\" % phone_item)\n\n if city_code and phones_without_city_code:\n for phone in phones_without_city_code:\n phones.append(u\"+7(%s)%s\" % (city_code, phone))\n return phones\n\n\ndef str_address_to_struct(address):\n source_address = address\n address_json = dadata_standardize_address(address)\n if not address_json:\n return\n address_field = DocAddressField()\n address_json['address_string'] = address\n fields_map = {\n \"okato\": \"okato\",\n \"tax_office\": \"ifns\",\n \"qc_complete\": \"qc_complete\",\n \"region\": \"region\",\n \"postal_code\": \"index\",\n \"city_type\": \"city_type\",\n \"city\": \"city\",\n \"area\": \"district\",\n \"area_type\": \"district_type\",\n \"flat\": \"flat\",\n \"flat_type\": \"flat_type\",\n \"house\": \"house\",\n \"house_type\": \"house_type\",\n \"settlement\": \"village\",\n \"settlement_type\": \"village_type\",\n \"street\": \"street\",\n \"street_type\": \"street_type\",\n \"result\": \"address_string\",\n \"geo_lat\": \"coord_lat\",\n \"geo_lon\": \"coord_long\",\n \"block\": \"building\",\n \"block_type\": \"building_type\",\n \"qc\": \"qc\"\n }\n result_address = {}\n for field_name in fields_map:\n if field_name in address_json:\n val = address_json[field_name]\n if val is not None:\n result_address[fields_map[field_name]] = val\n\n address_field.parse_raw_value(result_address)\n address = address_field.db_value()\n address['source_address'] = source_address\n return address\n\n\nclass UpdateIfnsCatalogCommand(BaseManageCommand):\n NAME = \"update_ifns_catalog\"\n\n def run(self):\n \"\"\"\n Should be started every minute.\n \"\"\"\n\n result = IfnsCatalogObject.query.filter(\n IfnsCatalogObject.updated.__ne__(None)\n ).order_by(IfnsCatalogObject.updated.asc()).first()\n\n if not result:\n self.logger.error(u\"Failed to find ifns to update\")\n return\n\n ifns_item = result\n ifns = ifns_item.code\n\n self.logger.info(u\"Updating ifns %s\" % unicode(ifns))\n\n ifns_item.updated = datetime.utcnow()\n sqldb.session.commit()\n\n s = requests.Session()\n s.get('https://service.nalog.ru/addrno.do', timeout=5)\n result = s.get('https://service.nalog.ru/addrno.do?l=6&g=%s' % int_to_ifns(ifns), timeout=5)\n if result.status_code != 200:\n self.logger.error(u\"Failed to get data for ifns %s\" % ifns)\n return\n\n data = {}\n try:\n data = result.json()\n res = data['res']\n\n required_fields = ('naimk', 'adres')\n if any([field not in res for field in required_fields]):\n if not ifns_item.naimk:\n sqldb.session.delete(ifns_item)\n sqldb.session.commit()\n raise Exception(u\"Empty data\")\n name = res['naimk']\n address = res['adres']\n old_address = ifns_item.address\n\n old_address_str = old_address['address_string'] if (\n isinstance(old_address, dict) and 'address_string' in old_address) else old_address\n\n # if address != old_address_str or isinstance(old_address_str, basestring):\n # address = str_address_to_struct(address)\n\n tel = get_phone_list(res['tel'], self.logger) if 'tel' in res else None\n comment = res.get('coment', '')\n\n plat_src = res.get('plat', None)\n plat = {}\n if plat_src:\n plat['recipient_name'] = plat_src['naimpol']\n plat['recipient_kpp'] = plat_src['kpppol']\n plat['recipient_inn'] = plat_src['innpol']\n\n if plat_src['found']:\n plat['bik'] = plat_src['bik']\n plat['bank_name'] = plat_src['naimbank']\n plat['recipient_account'] = plat_src['schetpol']\n\n rou = {}\n rou_src = res.get('rou', None)\n\n if rou_src:\n rou['name'] = rou_src['naimk']\n rou['code'] = rou_src['code']\n if 'tel' in rou_src:\n rou['tel'] = get_phone_list(rou_src['tel'], self.logger)\n rou_addr = rou_src['adres']\n rou['address_str'] = rou_addr\n old_rou_addr_str = (ifns_item.rou or {}).get('address_str', \"\")\n if rou_addr != old_rou_addr_str:\n rou['address'] = str_address_to_struct(rou_addr)\n else:\n rou['address'] = (ifns_item.rou or {}).get('address', {})\n\n rof = {}\n rof_src = res.get('rof', None)\n\n if rof_src:\n rof['name'] = rof_src['naimk']\n rof['code'] = rof_src['code']\n if 'tel' in rof_src:\n rof['tel'] = get_phone_list(rof_src['tel'], self.logger)\n rof['address'] = rof_src['adres']\n\n new_fields = {\n 'name': name,\n 'address': address,\n 'comment': comment\n }\n unset_fields = {}\n if tel:\n new_fields['tel'] = tel\n else:\n unset_fields['tel'] = \"\"\n\n if plat:\n new_fields['plat'] = plat\n else:\n unset_fields['plat'] = \"\"\n\n if rou:\n new_fields['rou'] = rou\n else:\n unset_fields['rou'] = \"\"\n\n if rof:\n new_fields['rof'] = rof\n else:\n unset_fields['rof'] = \"\"\n\n # TODO:\n # if not unset_fields:\n # col.update({'code': ifns}, {'$set': new_fields})\n # else:\n # col.update({'code': ifns}, {'$set': new_fields, '$unset': unset_fields})\n # self.logger.info(u\"ifns %s updated\" % str(ifns))\n except Exception:\n self.logger.exception(u\"Invalid data returned for ifns %s: \\r\\n %s\" %\n (ifns, json.dumps(data, default=lambda x: unicode(x),\n indent=1, ensure_ascii=False)))\n return\n\n\n# class ReloadBankInfo(BaseManageCommand):\n# NAME = \"reload_bank_info\"\n#\n# def run(self):\n# # TODO:\n# file_path = get_single(u'csv file path: ')\n# if not os.path.exists(file_path):\n# self.logger.error(u\"File %s not found\" % file_path)\n# return False\n#\n# col = self.db['bik_catalog']\n#\n# col.remove({})\n#\n# with open(file_path, 'r') as f:\n# content = f.read()\n#\n# data = json.loads(content)\n# for item in data:\n# if 'NAMEP' not in item or 'NEWNUM' not in item:\n# continue\n#\n# name = item['NAMEP']\n# bik = item['NEWNUM']\n#\n# if not name or not bik:\n# continue\n#\n# item_data = {\n# 'name': name,\n# 'bik': bik\n# }\n#\n# if 'ADR' in item and item['ADR']:\n# item_data['address'] = item['ADR']\n#\n# if 'TELEF' in item and item['TELEF']:\n# item_data['phone'] = item['TELEF']\n#\n# if 'OKPO' in item and item['OKPO']:\n# item_data['okpo'] = item['OKPO']\n#\n# if 'KSNP' in item and item['KSNP']:\n# item_data['kor_account'] = item['KSNP']\n#\n# col.insert(item_data)\n#\n# self.logger.info(\"%d items added\" % col.find({}).count())\n#\n#\n# class UpdateIfnsCatalogCommandAddresses(BaseManageCommand):\n# NAME = \"update_ifns_addresses\"\n#\n# def run(self):\n# \"\"\"\n# Should be started every minute.\n# \"\"\"\n# assert False\n#\n# # TODO:\n# col = self.db['ifns_catalog']\n#\n# addr_set = set()\n# result = col.find()\n# for r in result:\n# if 'address' in r and r['address']:\n# addr_set.add(r['address'])\n# if 'rou' in r and 'address' in r['rou'] and r['rou']['address'] and isinstance(r['rou']['address'],\n# basestring):\n# addr_set.add(r['rou']['address'])\n# if 'rof' in r and 'address' in r['rof'] and r['rof']['address'] and isinstance(r['rof']['address'],\n# basestring):\n# addr_set.add(r['rof']['address'])\n#\n# addr_map = {}\n#\n# for addr in addr_set:\n# self.logger.info(addr)\n# try:\n# norm_addr = str_address_to_struct(addr)\n# addr_map[addr] = norm_addr\n# self.logger.info(json.dumps(norm_addr, indent=1, ensure_ascii=False))\n# col.update({'address': addr}, {'$set': {'address': norm_addr}}, multi=True)\n# col.update({'rou.address': addr}, {'$set': {'rou.address': norm_addr}}, multi=True)\n# col.update({'rof.address': addr}, {'$set': {'rof.address': norm_addr}}, multi=True)\n# except Exception:\n# self.logger.exception(u\"Failed to clean address %s \" % addr)\n# continue\n#\n" }, { "alpha_fraction": 0.6043784022331238, "alphanum_fraction": 0.6129788756370544, "avg_line_length": 37.75757598876953, "blob_id": "0e01434410823d7e591b414bd7ec2ee89a58049c", "content_id": "0527570455ea28ef9157f4183e53a9b8b5642389", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 1279, "license_type": "no_license", "max_line_length": 88, "num_lines": 33, "path": "/app/fw/transport/sms_gate.py", "repo_name": "StanislavKraev/jb_code", "src_encoding": "UTF-8", "text": "# -*- coding: utf-8 -*-\nimport requests\n\nclass SmsSender(object):\n\n def __init__(self, sms_gate_url, sms_gate_login, sms_gate_password_md5, sender):\n self.sms_gate_url = sms_gate_url\n self.sms_gate_login = sms_gate_login\n self.sms_gate_password_md5 = sms_gate_password_md5\n self.sender = sender\n\n def get_sms_cost(self, data):\n values_cost = data.copy()\n values_cost['cost'] = 1\n values_cost['login'] = self.sms_gate_login\n values_cost['psw'] = self.sms_gate_password_md5\n\n result = requests.post(self.sms_gate_url, data=values_cost, timeout=20)\n json_result = result.json()\n if not json_result or 'cost' not in json_result:\n raise Exception('Invalid answer: %s' % str(result.text))\n return float(json_result['cost'])\n\n def send(self, data):\n values = data.copy()\n values['login'] = self.sms_gate_login\n values['psw'] = self.sms_gate_password_md5\n values['sender'] = self.sender\n result = requests.post(self.sms_gate_url, data=values, timeout=20)\n\n json_result = result.json()\n if \"error\" in json_result and \"error_code\" in json_result:\n raise RuntimeError(\"Error sending sms: %s\" % str(json_result['error_code']))\n" } ]
191
ASudu/Numerical_Analysis
https://github.com/ASudu/Numerical_Analysis
56bc93da92dc1945cd2e9e5d4b381716a4846c68
03a1883b341be072e5775973c3feb85616ebcc2e
642b7629d6e892710c0f5cce37d7897f63e5ca49
refs/heads/main
2023-08-16T06:27:32.497849
2021-10-02T12:21:40
2021-10-02T12:21:40
408,797,539
0
0
null
null
null
null
null
[ { "alpha_fraction": 0.5362318754196167, "alphanum_fraction": 0.5662909150123596, "avg_line_length": 30.120689392089844, "blob_id": "bd00250874d8e129565bc32ffab7f19f3aaee6a7", "content_id": "8bd8269651c8acab853cd31f0c2387e7d6e4a832", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 1863, "license_type": "no_license", "max_line_length": 117, "num_lines": 58, "path": "/Secant.py", "repo_name": "ASudu/Numerical_Analysis", "src_encoding": "UTF-8", "text": "import math\r\n\r\n\r\ndef f(x):\r\n \"\"\"Computes function value for given input x\r\n\r\n Args:\r\n x (float): The input value to compute value of function f at point x\r\n \"\"\"\r\n op = 0.0\r\n # This function is taken for demonstration purposes\r\n op = 3*x + math.sin(x) - math.exp(x)\r\n\r\n return op\r\n\r\ndef swap(x_0,x_1):\r\n if(abs(f(x_0)) < abs(f(x_1))):\r\n temp = x_1\r\n x_1 = x_0\r\n x_0 = temp\r\n else:\r\n pass\r\n return x_0, x_1\r\n\r\ndef secant(x_0,x_1,counter, x_prev = 0.0, tol = 0.0001,iterations = 10):\r\n \"\"\"This method computes the root of a non-linear equation (if exists) given the interval by halving the\r\n interval at each iteration to narrow down to the root\r\n\r\n Args:\r\n x_0 (float): First initial point of method\r\n x_1 (float): Second initial point of method\r\n counter (integer): Variable that keeps track of iterations\r\n x_prev (float, optional): Keeps track of previous midpoint to compute error (used only after first iteration)\r\n tol (float, optional): Tolerance value to stop the iterations(Taken as 0.01% by default)\r\n iterations (integer, optional): Number of iterations to be performed\r\n \"\"\"\r\n x_0, x_1 = swap(x_0,x_1)\r\n x_new = (x_0*f(x_1) - x_1*f(x_0))/(f(x_1) - f(x_0))\r\n counter += 1\r\n print(\"Iteration\",counter,\": x_0= \",x_1,\", x_1= \",x_1,\", x_new= \",x_new)\r\n \r\n # flag = 0 \r\n if((abs(x_prev - x_new)<tol) or (counter > iterations)):\r\n print(\"The root for the given function is: \",x_new)\r\n else:\r\n secant(x_1,x_new,counter,x_new,tol,iterations)\r\n \r\n\r\n \r\nprint(\"Enter two integers a an b\")\r\na,b = map(int, input().split())\r\n# a = 0\r\n# b = 1\r\ncounter = 0\r\nif(f(a)*f(b)>0):\r\n print(\"No root exists in the interval (\", a, \", \", b, \")\")\r\nelse:\r\n secant(a,b,counter,a-1,0.001,5)\r\n" }, { "alpha_fraction": 0.7819095253944397, "alphanum_fraction": 0.787939727306366, "avg_line_length": 81.91666412353516, "blob_id": "4597e4082f741ea45ef3a0a42e7b990520d9f196", "content_id": "4f7e67ffbfd81ee55ae1d332d170c4cfa67a1145", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Markdown", "length_bytes": 995, "license_type": "no_license", "max_line_length": 557, "num_lines": 12, "path": "/README.md", "repo_name": "ASudu/Numerical_Analysis", "src_encoding": "UTF-8", "text": "# Numerical_Analysis\n\nRemember solving quadratic equations in Grade 8, solving for \"x\" in an equation that we had no idea where it would be used? The whole field of mathematics and its applications is about solving equations or system of equations, not just linear, to get down to numbers to get a sense of the real system we are working with. Unfortunately, it is always not possible to find the exact solutions. But we have few methods that help us get a rough estimate of the solution using elegant iterative methods. In this repo, I add pyrhon code for each of these methods.\n\n## Solution to Non-linear Equation:\n1. [Bisection method](https://github.com/ASudu/Numerical_Analysis/blob/main/Bisection.py)\n2. [Secant method](https://github.com/ASudu/Numerical_Analysis/blob/main/Secant.py)\n3. [Regula-Falsi method](https://github.com/ASudu/Numerical_Analysis/blob/main/Regula-Falsi.py)\n4. Newton-Raphson method\n5. Fixed point method\n \nRest of the methods will be updated as course progresses\n" } ]
2
kartikcr/React_vs_Vue_vs_Angular
https://github.com/kartikcr/React_vs_Vue_vs_Angular
230bae7379b5e1f1799556ae4eb147d1b9830363
cc3b835dc3aace83545dbf0d4ff1a8bea9d47e57
d79507960a1e0e00df102abf2b4a0c90e2dd9bba
refs/heads/master
2020-04-28T14:47:20.227441
2018-12-01T05:06:11
2018-12-01T05:06:11
175,348,979
0
0
null
null
null
null
null
[ { "alpha_fraction": 0.771336555480957, "alphanum_fraction": 0.771336555480957, "avg_line_length": 27.227272033691406, "blob_id": "09007ee373c116092ad1c83cabe26bd4bf6ddd8f", "content_id": "f039e172968169fbde8e7aa801b1c9f5822713c4", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 621, "license_type": "no_license", "max_line_length": 46, "num_lines": 22, "path": "/pybackend/pyapi/views.py", "repo_name": "kartikcr/React_vs_Vue_vs_Angular", "src_encoding": "UTF-8", "text": "from django.shortcuts import render\nfrom .serializer import *\nfrom .models import *\nfrom rest_framework import generics\n\n\nclass ArticleView(generics.ListAPIView):\n # ListAPIView provides GET method handlers\n queryset = Article.objects.all()\n serializer_class = ArticleSerializer\n\n\nclass CarView(generics.ListAPIView):\n # ListAPIView provides GET method handlers\n queryset = Car.objects.all()\n serializer_class = CarSerializer\n\n\nclass ImageTableView(generics.ListAPIView):\n # ListAPIView provides GET method handlers\n queryset = ImageTable.objects.all()\n serializer_class = ImageTableSerializer\n" }, { "alpha_fraction": 0.37368419766426086, "alphanum_fraction": 0.375, "avg_line_length": 23.516128540039062, "blob_id": "1ef8714b4e6d12132ab769e0142ed026eb7664b9", "content_id": "5a3157074010dc0d42044287b9f345336e08c535", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "JavaScript", "length_bytes": 1520, "license_type": "no_license", "max_line_length": 121, "num_lines": 62, "path": "/todolist/src/ToDoList.js", "repo_name": "kartikcr/React_vs_Vue_vs_Angular", "src_encoding": "UTF-8", "text": "import React from 'react';\nimport Task from './Task';\n\nclass ToDoList extends React.Component{\n constructor(props){\n super(props);\n // Persist state\n this.state = {\n list: [\n {\n 'task':'Enter tasks here' \n }\n ]\n };\n }\n\n newTask = () => {\n this.setState(\n ({list,task}) => ({\n list:[\n ...list,{task}\n ],\n task:''\n })\n )\n };\n\n handleInput = ele => {\n this.setState({\n task: ele.target.value\n });\n };\n\n deleteTask = delIndex => {\n this.setState( \n ({list}) => ({\n list: list.filter((task, index) => index !== delIndex)\n })\n )\n };\n\n render(){\n return(\n <div>\n <h1> To do list </h1>\n <div className = \"ToDoList\">\n {this.state.list.map(\n (task,key)=>{\n return <Task key={key} taskcontent={task.task} taskdelete={this.deleteTask.bind(this,key)} />\n }\n )}\n </div>\n <div>\n <input type=\"text\" value={this.state.task} onChange={this.handleInput} />\n <button onClick={this.newTask} name=\"ToDoList_btn\"> + </button>\n </div>\n </div>\n )\n }\n\n}\nexport default ToDoList;\n" }, { "alpha_fraction": 0.7375504970550537, "alphanum_fraction": 0.8034993410110474, "avg_line_length": 32.772727966308594, "blob_id": "1d3ef0abdec5eb4fabcdc6b5aaf29a40ae2b46de", "content_id": "d4729ac86ca84fc2ae5955ca1407eb7d4526d3f1", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "INI", "length_bytes": 743, "license_type": "no_license", "max_line_length": 85, "num_lines": 22, "path": "/FrontEnd_Sel_Testing/ToDoSelenium/src/config/test1.properties", "repo_name": "kartikcr/React_vs_Vue_vs_Angular", "src_encoding": "UTF-8", "text": "app_url=http://10.252.135.102/essbase/ui\napp_host=10.252.135.102\ndefault_admin=admin\ndefault_admin_pwd=Welcome1\ndefault_power=power113\ndefault_power_pwd=Welcome1\ndownload_path=C:\\\\test\ndriver_path=D:/pltfrm_wrkspc/SelAuto/driver\nsv_path=C:\\\\Users\\\\prachauh\\\\Desktop\\\\main\\\\EssCS_12c\\\\SVPlay\nhost=slc12fze.us.oracle.com\nfirefox_path=FFX_PATH\nhost_user=prachauh\nhost_pwd=Systest1#\nmw_home=/scratch/prachauh/12esscs\ncli_loc=/scratch/prachauh/12esscs/dist/bi/products/Essbase/platform/utilities/cli.zip\nlcm_script=/net/slc12fze/scratch/scripts/lcmInitialize.sh\ncli_script=/net/slc12fze/scratch/scripts/CLISetup.sh\ndb_host=slc12fze.us.oracle.com\ndb_service=db7691.us.oracle.com\ndb_port=15083\ndb_user=prachauh2fze_biplatform\ndb_pwd=welcome1\n" }, { "alpha_fraction": 0.7055702805519104, "alphanum_fraction": 0.7055702805519104, "avg_line_length": 33.272727966308594, "blob_id": "8886880e3694e691957483a5b728c050c16bdaae", "content_id": "c759bb41c8524e539413028a6a4d589e005ed8ae", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 377, "license_type": "no_license", "max_line_length": 63, "num_lines": 11, "path": "/pybackend/pyapi/urls.py", "repo_name": "kartikcr/React_vs_Vue_vs_Angular", "src_encoding": "UTF-8", "text": "from django.conf.urls import url, include\nfrom rest_framework.urlpatterns import format_suffix_patterns\nfrom .views import *\n\nurlpatterns = {\n url(r'^articles/$', ArticleView.as_view(), name=\"create\"),\n url(r'^cars/$', CarView.as_view(), name=\"create\"),\n url(r'^images/$', ImageTableView.as_view(), name=\"create\"),\n}\n\nurlpatterns = format_suffix_patterns(urlpatterns)\n" }, { "alpha_fraction": 0.47076311707496643, "alphanum_fraction": 0.5104063153266907, "avg_line_length": 36.407405853271484, "blob_id": "dde1a0688e370a80f370bfbe3365e59b7b1e24cc", "content_id": "f46868dc8601715ffe4b028981c31a8ac182cb8c", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "JavaScript", "length_bytes": 1009, "license_type": "no_license", "max_line_length": 59, "num_lines": 27, "path": "/Cypress_test/cypress_combined_test.js", "repo_name": "kartikcr/React_vs_Vue_vs_Angular", "src_encoding": "UTF-8", "text": "describe('The Home Page', function () {\n it('successfully loads', function() {\n cy.visit('http://localhost:3000')\n cy.get('#b1').then(($btn) => {})\n cy.get('#t1').then(($t1) => {})\n //cy.get('d1').then(($divblc1) => {})\n cy.get('div').then(($random) =>{})\n cy.get(\"#A\"+\"1_\"+\"title\").then(($texty1) => {})\n cy.get(\"#A\"+\"1_\"+\"author\").then(($texty2) => {})\n cy.get(\"#A\"+\"1_\"+\"article\").then(($texty3) => {})\n cy.get(\"#A\"+\"2_\"+\"title\").then(($texty1) => {})\n cy.get(\"#A\"+\"2_\"+\"author\").then(($texty2) => {})\n cy.get(\"#A\"+\"2_\"+\"article\").then(($texty3) => {})\n\n/*cy.get('#d1').should(($d1) => {\nconst $divs = $d1.find('div')\nexpect($divs.eq(0)).to.contain('data.title')\n})*/\n//cy.get('#d1').get('#d2').contains('#txt1')\n//cy.get('#d1').then(($d1 => {})\n//cy.get('#d1').get('#d2').get('#txt1').then(($txt1) => {})\n//cy.get('div h2#txt2').then(($txt2) => {})\n//cy.get('div h3#txt3').then(($txt3) => {})\n//cy.get('button').click({ position: 'topLeft' }) \n//cy.get('input[name=TaskContent]').type(`${text}`)\n})\n})" }, { "alpha_fraction": 0.8387096524238586, "alphanum_fraction": 0.8387096524238586, "avg_line_length": 31, "blob_id": "a53edb9e4619ef848d6f71553429f6769a5f5969", "content_id": "65b637cbb57b26d3e0498497ad84acc91e478ca0", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "INI", "length_bytes": 31, "license_type": "no_license", "max_line_length": 31, "num_lines": 1, "path": "/ToDoSelenium/test-output/old/MATS/todo_test.properties", "repo_name": "kartikcr/React_vs_Vue_vs_Angular", "src_encoding": "UTF-8", "text": "[SuiteResult context=todo_test]" }, { "alpha_fraction": 0.657975435256958, "alphanum_fraction": 0.657975435256958, "avg_line_length": 24.076923370361328, "blob_id": "6f5a66dc90b3395a4b4c592f0271b20ef2f4950c", "content_id": "289e4ab32ca01ae9c241ace25c6957dce5b66529", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 652, "license_type": "no_license", "max_line_length": 56, "num_lines": 26, "path": "/pybackend/pyapi/serializer.py", "repo_name": "kartikcr/React_vs_Vue_vs_Angular", "src_encoding": "UTF-8", "text": "from rest_framework import serializers\nfrom .models import *\n\n\nclass ArticleSerializer(serializers.ModelSerializer):\n class Meta:\n model = Article\n fields=('ID','title', 'author', 'article')\n\n\nclass CarSerializer(serializers.ModelSerializer):\n class Meta:\n model = Car\n fields = ('ID','company', 'model', 'price')\n\n\nclass ArticleSerializer(serializers.ModelSerializer):\n class Meta:\n model = Article\n fields = ('ID','title', 'author', 'article')\n\n\nclass ImageTableSerializer(serializers.ModelSerializer):\n class Meta:\n model = ImageTable\n fields = ('ID','image_url','image_alt_text')\n" }, { "alpha_fraction": 0.5992336869239807, "alphanum_fraction": 0.6053639650344849, "avg_line_length": 24.58823585510254, "blob_id": "82494d5a0a4703ba5462d49842202bf85f3bb055", "content_id": "043d8292c323b8aa23b64439a63542e7f689a55b", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Java", "length_bytes": 1305, "license_type": "no_license", "max_line_length": 115, "num_lines": 51, "path": "/FrontEnd_Sel_Testing/ToDoSelenium/src/com/test/ToDoTest.java", "repo_name": "kartikcr/React_vs_Vue_vs_Angular", "src_encoding": "UTF-8", "text": "package com.test;\n\nimport org.testng.annotations.Test;\n\nimport com.pages.main.Initialize;\nimport com.pages.main.ToDoMainPage;\n\nimport org.testng.annotations.BeforeClass;\nimport org.testng.annotations.Parameters;\nimport org.openqa.selenium.JavascriptExecutor;\nimport org.testng.annotations.AfterClass;\n\n/******************************************************************************************************************\nTest Case: To Do List Sanity\n\nMODIFICATION LOG\n11/29/2018\t\t\t\t\tInitial Development\t\t\t\t\tShraddha\n******************************************************************************************************************/\n\n@Test(groups = { \"mats\" })\npublic class ToDoTest extends Initialize\n{\n\t@BeforeClass\n\t@Parameters({\"browser\", \"platform\"})\n\tpublic void beforeClass(String browser, String platform) \n\t{\n\t\t//Change to @Parameters to be passed from testng.xml\n\t\tinit(browser,platform);\n\t}\n\t\n\tpublic void createLocationAlias() \n\t{\n\t\twdObj.get(propObj.getProperty(\"app_url\"));\n\t\twdObj.manage().window().maximize();\n\t\t((JavascriptExecutor) wdObj).executeScript(\"window.focus();\");\n\t\t\n\t\tToDoMainPage pgObj = new ToDoMainPage(wdObj);\n\t\t\n\t\tpgObj.setText(\"Testing\");\n\t\tpgObj.clickTaskdelete();\n\t\tpgObj.clickAdd();\n\t\t\n\t}\n\n\t\n\t@AfterClass(alwaysRun=true)\n\tpublic void afterClass() \n\t{\t\t\n\t\texitTest();\n\t}\n}\n" }, { "alpha_fraction": 0.7018633484840393, "alphanum_fraction": 0.8260869383811951, "avg_line_length": 22.14285659790039, "blob_id": "f82b55023f658a62ab50ede881d8d05453340c2f", "content_id": "6e594efd0687e27defc9c413a1d3b7cd874044bf", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "INI", "length_bytes": 161, "license_type": "no_license", "max_line_length": 48, "num_lines": 7, "path": "/FrontEnd_Sel_Testing/ToDoSelenium/src/config/console.properties", "repo_name": "kartikcr/React_vs_Vue_vs_Angular", "src_encoding": "UTF-8", "text": "email_host=slc07ibo.us.oracle.com\nscanner_host=slcn17vmf0062.c9dev1.oraclecorp.com\nsmtp_port=465\nimap_port=993\nscanner_port=1344\[email protected]\npassword=Welcome1" }, { "alpha_fraction": 0.40458938479423523, "alphanum_fraction": 0.4227053225040436, "avg_line_length": 24.090909957885742, "blob_id": "d1c16a9719ab013b812489651a40cb3781c9ee63", "content_id": "da0643c950bfda28fde6748ceb247834d09360fc", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "JavaScript", "length_bytes": 828, "license_type": "no_license", "max_line_length": 79, "num_lines": 33, "path": "/todolist/src/Car.js", "repo_name": "kartikcr/React_vs_Vue_vs_Angular", "src_encoding": "UTF-8", "text": "import React, {Component} from 'react';\nclass CarRestDisplay extends Component{\n constructor(props){\n super(props);\n this.state={\n data: []\n };\n }\n \n componentDidMount(){\n const this_reference = this;\n fetch('http://152.46.17.4:8080/pyapi/cars')\n .then(results => {return results.json();})\n .then(jsonData => {\n this.setState({data: jsonData});\n })\n }\n \n render(){\n return(\n <div>\n {this.state.data.map(\n (data,index) => (\n <div>\n {data.company} <br/>{data.model}<br/>{data.price}<hr/>\n </div>\n )\n )}\n </div>\n )\n }\n} \nexport default CarRestDisplay;\n" }, { "alpha_fraction": 0.7192474603652954, "alphanum_fraction": 0.730824887752533, "avg_line_length": 16.274999618530273, "blob_id": "445730bf5581817ae4276ff218ed085a5ffc9515", "content_id": "7c483f7443e1bb2b17015d58c2daac3b748a2b00", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Markdown", "length_bytes": 691, "license_type": "no_license", "max_line_length": 68, "num_lines": 40, "path": "/README.md", "repo_name": "kartikcr/React_vs_Vue_vs_Angular", "src_encoding": "UTF-8", "text": "# Python backend\n\n### API reference\n\nClasses and attributes:\n\n1. Car \nGET: pyapi/cars \nAttributes: company (text), model (text), price (integer) \n \n2. Article \nGET: pyapi/articles \nAttributes: title(text), author(text), article(text) \n\n3. ImageTable \nGET: pyapi/images \nAttributes: image_alt_text (text), image_url(text) \n\n4. Admin\nAccess: /admin\n\n### Installation steps\nEnsure Django and DRF are installed: \n\n`pip install Django`\n\n`pip install djangorestframework`\n\nMake migrations:\n\n`python3 manage.py makemigrations`\n`python3 manage.py migrate`\n\nCreate superuser:\n\n`python3 manage.py createsuperuser --email [email protected] --username admin`\n\nrunserver: \n\n`python3 manage.py runserver`\n" }, { "alpha_fraction": 0.5674931406974792, "alphanum_fraction": 0.5674931406974792, "avg_line_length": 29.25, "blob_id": "652c6b64509cc7674b787c041e87ef1ede6d2e90", "content_id": "834f161a20fe9a616c90978706ab383dd7bba113", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "JavaScript", "length_bytes": 363, "license_type": "no_license", "max_line_length": 98, "num_lines": 12, "path": "/todolist/src/Task.js", "repo_name": "kartikcr/React_vs_Vue_vs_Angular", "src_encoding": "UTF-8", "text": "import React from 'react';\nclass Task extends React.Component{\n render() {\n return (\n <div>\n <input type='text' className=\"TaskContent\" value={this.props.taskcontent}></input>\n <button className=\"TaskDelete\" onClick={this.props.taskdelete}> x </button>\n </div>\n );\n }\n}\nexport default Task;\n" }, { "alpha_fraction": 0.7214285731315613, "alphanum_fraction": 0.7357142567634583, "avg_line_length": 20.538461685180664, "blob_id": "17c13b037f78a7dbc4e14a86c29f7a6222b5c2ea", "content_id": "d78da8a733ff5c700717b54aa84d736c500780bb", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Markdown", "length_bytes": 280, "license_type": "no_license", "max_line_length": 68, "num_lines": 13, "path": "/todolist/README.md", "repo_name": "kartikcr/React_vs_Vue_vs_Angular", "src_encoding": "UTF-8", "text": "# React ToDoList app\nA simple to do list that allows adding, editing and deleting tasks. \n\n### Run \nGoto the src folder and run:\n`npm start`\nWill run the program at port 3000\n\n### Installation\n`sudo apt-get install nodejs npm`\n\nIf you get a node_modules error, run:\n`npm install`\n" }, { "alpha_fraction": 0.7584951519966125, "alphanum_fraction": 0.8143203854560852, "avg_line_length": 33.33333206176758, "blob_id": "8d2417b632db36e8a14af5b235f13bbc2e5bb761", "content_id": "9c1449f7c83b9cd12da2e167ca336c032ae62fec", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "INI", "length_bytes": 824, "license_type": "no_license", "max_line_length": 85, "num_lines": 24, "path": "/FrontEnd_Sel_Testing/ToDoSelenium/src/config/test.properties", "repo_name": "kartikcr/React_vs_Vue_vs_Angular", "src_encoding": "UTF-8", "text": "app_url=localhost:3000\napp_host=slc14sav.us.oracle.com\ndefault_admin=weblogic\ncodecoverage=true\ndefault_admin_pwd=welcome1\ndefault_power=test_power\ndefault_power_pwd=welcome1\ndownload_path=C:\\\\test\ndriver_path=D:/pltfrm_wrkspc/SelAuto/driver\nsv_path=C:\\\\Users\\\\prachauh\\\\Desktop\\\\main\\\\EssCS_12c\\\\SVPlay\nhost=slc12fze.us.oracle.com\nfirefox_path=Macintosh HD\\u2069 \\u25B8 \\u2068Applications\\u2069\nhost_user=prachauh\nhost_pwd=Systest13#\nmw_home=/scratch/prachauh/12esscs\ncli_loc=/scratch/prachauh/12esscs/dist/bi/products/Essbase/platform/utilities/cli.zip\nlcm_script=/net/slc12fze/scratch/scripts/lcmInitialize.sh\ncli_script=/net/slc12fze/scratch/scripts/CLISetup.sh\ngecko_path=geckodriver \\3\ndb_host=slc12fze.us.oracle.com\ndb_service=db7691.us.oracle.com\ndb_port=15083\ndb_user=prachauh2fze_biplatform\ndb_pwd=welcome1\n" }, { "alpha_fraction": 0.6683937907218933, "alphanum_fraction": 0.7253885865211487, "avg_line_length": 37.599998474121094, "blob_id": "9c91b77c4e86b23764b601b5b8d61c844f34a00f", "content_id": "dea25d90e917e88b49f500073fa60bdabf5155d7", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 193, "license_type": "no_license", "max_line_length": 86, "num_lines": 5, "path": "/venv/bin/django-admin.py", "repo_name": "kartikcr/React_vs_Vue_vs_Angular", "src_encoding": "UTF-8", "text": "#!/media/varun/0DC10EF30DC10EF3/_Studies/NCSU/Sem 1/SE/react_vs_vue/venv/bin/python3.6\nfrom django.core import management\n\nif __name__ == \"__main__\":\n management.execute_from_command_line()\n" }, { "alpha_fraction": 0.5521472096443176, "alphanum_fraction": 0.5521472096443176, "avg_line_length": 26.16666603088379, "blob_id": "43bf9fd9eabcf0438274bde942b44b602a5ee736", "content_id": "1b26e5664ae91c15791775e869b50fb51942389a", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "JavaScript", "length_bytes": 489, "license_type": "no_license", "max_line_length": 45, "num_lines": 18, "path": "/todolist/src/Main.js", "repo_name": "kartikcr/React_vs_Vue_vs_Angular", "src_encoding": "UTF-8", "text": "import React, {Component} from 'react'\nimport ToDoList from './ToDoList'\nimport ArticleRestDisplay from './Article'\nimport CarRestDisplay from './Car'\nimport ImageRestDisplay from './Image'\nclass Main extends Component{\n render(){\n return(\n <div>\n <ToDoList /> <hr />\n <ArticleRestDisplay /> <hr />\n <CarRestDisplay /> <hr />\n <ImageRestDisplay />\n </div> \n );\n }\n}\nexport default Main;\n" }, { "alpha_fraction": 0.6943573951721191, "alphanum_fraction": 0.6943573951721191, "avg_line_length": 23.538461685180664, "blob_id": "5f20ed8a784d5dbc9fc38e79d4a840c165b85d3d", "content_id": "c606070b5fed38bb955fc627e2d2268c8522dcd4", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 638, "license_type": "no_license", "max_line_length": 43, "num_lines": 26, "path": "/pybackend/pyapi/models.py", "repo_name": "kartikcr/React_vs_Vue_vs_Angular", "src_encoding": "UTF-8", "text": "from django.db import models\n\n# Create your models here.\n\n\nclass Article(models.Model):\n # Simulate large text parts\n title = models.TextField()\n author = models.TextField()\n article = models.TextField()\n ID = models.AutoField(primary_key=True)\n\n\nclass Car(models.Model):\n # Simulate a form\n company = models.TextField()\n model = models.TextField()\n price = models.BigIntegerField()\n ID = models.AutoField(primary_key=True)\n\n\nclass ImageTable(models.Model):\n # Simulate image rendering\n image_alt_text = models.TextField()\n image_url = models.TextField()\n ID = models.AutoField(primary_key=True)\n" }, { "alpha_fraction": 0.5194507837295532, "alphanum_fraction": 0.5903890132904053, "avg_line_length": 22, "blob_id": "630fe68d4e3df5b2dca223edc6a2053fcf2285eb", "content_id": "16fb7a36552a8429e901e11cd7abce86f6a7f307", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 437, "license_type": "no_license", "max_line_length": 57, "num_lines": 19, "path": "/pybackend/pyapi/migrations/0003_imagetable_image_alt_text.py", "repo_name": "kartikcr/React_vs_Vue_vs_Angular", "src_encoding": "UTF-8", "text": "# Generated by Django 2.1.3 on 2018-11-25 01:44\n\nfrom django.db import migrations, models\n\n\nclass Migration(migrations.Migration):\n\n dependencies = [\n ('pyapi', '0002_auto_20181125_0133'),\n ]\n\n operations = [\n migrations.AddField(\n model_name='imagetable',\n name='image_alt_text',\n field=models.TextField(default='sadffassdf'),\n preserve_default=False,\n ),\n ]\n" }, { "alpha_fraction": 0.7862318754196167, "alphanum_fraction": 0.804347813129425, "avg_line_length": 24.18181800842285, "blob_id": "9b3eeaebab5300a6c3c0b9230a5a798f08f6d14f", "content_id": "620ddbc533d5411135bf21b970d1f8980116ead2", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "INI", "length_bytes": 276, "license_type": "no_license", "max_line_length": 43, "num_lines": 11, "path": "/ToDoSelenium/src/config/test.properties", "repo_name": "kartikcr/React_vs_Vue_vs_Angular", "src_encoding": "UTF-8", "text": "app_url=http://localhost:8080/#/todo\n\ndefault_admin=weblogic\ncodecoverage=true\ndefault_admin_pwd=welcome1\ndefault_power=test_power\ndefault_power_pwd=welcome1\ndownload_path=C:\\\\test\ndriver_path=D:/pltfrm_wrkspc/SelAuto/driver\nfirefox_path=FFX_PATH\ngecko_path=geckodriver\\ 3" }, { "alpha_fraction": 0.39168110489845276, "alphanum_fraction": 0.4090121388435364, "avg_line_length": 26.4761905670166, "blob_id": "a10db0077843b195951ad4afc27269cb3b5a5e44", "content_id": "a5bde9898b3cb71549599dc49b659c621b63fd9b", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "JavaScript", "length_bytes": 1154, "license_type": "no_license", "max_line_length": 84, "num_lines": 42, "path": "/todolist/src/Article.js", "repo_name": "kartikcr/React_vs_Vue_vs_Angular", "src_encoding": "UTF-8", "text": "import React, {Component} from 'react';\nclass ArticleRestDisplay extends Component{\n constructor(props){\n super(props);\n this.state={\n data: [],\n count: 0\n };\n }\n \n componentDidMount(){\n const this_reference = this;\n fetch('http://152.46.17.4:8080/pyapi/articles')\n .then(results => {return results.json();})\n .then(jsonData => {\n this.setState({data: jsonData});\n })\n }\n\n newName(data){\n return \"article_\"+data+\"_\"+this.state.count;\n }\n \n render(){\n return(\n <div>\n {this.state.data.map(\n (data,index) => (\n <div>\n <h1 id={\"A\"+data.ID+\"_title\"}> {data.title} </h1>\n <br/>\n <h2 id={\"A\"+data.ID+\"_author\"}>{data.author}</h2>\n <br/>\n <span id={\"A\"+data.ID+\"_article\"}>{data.article}</span><hr/>\n </div>\n )\n )}\n </div>\n )\n }\n} \nexport default ArticleRestDisplay;\n" } ]
20
FredyCerron93/SOM
https://github.com/FredyCerron93/SOM
d583bcd0a4b515a3742b83d4fd71e5ee42c57ef0
2307265802647e3d2924097f31f256746311a606
5fd3cf4d39b6404b3032af5b8083f0806956c97b
refs/heads/master
2023-01-21T01:18:30.448413
2020-11-29T16:53:50
2020-11-29T16:53:50
316,997,771
0
0
null
null
null
null
null
[ { "alpha_fraction": 0.6373056769371033, "alphanum_fraction": 0.6459412574768066, "avg_line_length": 18.965517044067383, "blob_id": "532933831189c69e15d64c3e8e12c464280af0b3", "content_id": "9824ba9d84afc8fd0747dc405ca03fedb7cc4b59", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 579, "license_type": "no_license", "max_line_length": 61, "num_lines": 29, "path": "/src/neuron.py", "repo_name": "FredyCerron93/SOM", "src_encoding": "UTF-8", "text": "import numpy as np\n\nfrom distance import select_closest\n\ndef generate_network(size):\n\n return np.random.rand(size, 2)\n\ndef get_neighborhood(center, radix, domain):\n\n\n\n if radix < 1:\n radix = 1\n\n\n deltas = np.absolute(center - np.arange(domain))\n distances = np.minimum(deltas, domain - deltas)\n\n\n return np.exp(-(distances*distances) / (2*(radix*radix)))\n\ndef get_route(cities, network):\n\n cities['winner'] = cities[['x', 'y']].apply(\n lambda c: select_closest(network, c),\n axis=1, raw=True)\n\n return cities.sort_values('winner').index\n" }, { "alpha_fraction": 0.6875, "alphanum_fraction": 0.6960227489471436, "avg_line_length": 28.33333396911621, "blob_id": "096f6dcf88b516697299fe059c75eaa8c8f5f24a", "content_id": "bc68e82c1fc50be0de8971146c9daf073149ed52", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 352, "license_type": "no_license", "max_line_length": 70, "num_lines": 12, "path": "/src/distance.py", "repo_name": "FredyCerron93/SOM", "src_encoding": "UTF-8", "text": "import numpy as np\n\ndef select_closest(candidates, origin):\n return euclidean_distance(candidates, origin).argmin()\n\ndef euclidean_distance(a, b):\n return np.linalg.norm(a - b, axis=1)\n\ndef route_distance(cities):\n points = cities[['x', 'y']]\n distances = euclidean_distance(points, np.roll(points, 1, axis=0))\n return np.sum(distances)\n" } ]
2
egmjr/gaffer
https://github.com/egmjr/gaffer
e9c9baf9b380ea642e5b597ac0471b70aefd231f
dbc644255623a032cf6cf66cd594fc33d7c5c2ba
7d69e7fa8cfd813da03ae6b7c9933b7f89b6b8a9
refs/heads/master
2020-04-03T12:00:06.089727
2018-05-25T15:42:45
2018-05-25T15:42:45
null
0
0
null
null
null
null
null
[ { "alpha_fraction": 0.5322718024253845, "alphanum_fraction": 0.5328404903411865, "avg_line_length": 30.972726821899414, "blob_id": "9f07f9ecf93781c24b624076f9d5c023c1ad0d3b", "content_id": "4323d79823d0582b62cb3ea96e194552c3890afb", "detected_licenses": [ "LicenseRef-scancode-unknown-license-reference", "Unlicense", "MIT", "LicenseRef-scancode-public-domain", "BSD-3-Clause" ], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 3517, "license_type": "permissive", "max_line_length": 78, "num_lines": 110, "path": "/gaffer/cli/commands/load.py", "repo_name": "egmjr/gaffer", "src_encoding": "UTF-8", "text": "# -*- coding: utf-8 -\n#\n# This file is part of gaffer. See the NOTICE for more information.\n\nimport os\n\nfrom .base import Command\nfrom ...httpclient import GafferConflict\nfrom ...process import ProcessConfig\n\n\nclass Load(Command):\n \"\"\"\n usage: gaffer load [-c concurrency|--concurrency concurrency]...\n [--nostart]\n [--app APP] [<file>]\n\n Args\n\n <file> Path to a job configuration or stdin ``-``\n Options\n\n -h, --help\n -c concurrency,--concurrency concurrency Specify the number processes\n to run.\n --nostart Don't start jobs execution\n --app APP application name\n \"\"\"\n\n name = \"load\"\n short_descr = \"load a Procfile application\"\n\n def run(self, config, args):\n if args['<file>']:\n self.load_file(config, args)\n elif config.use_procfile:\n self.load_procfile(config, args)\n else:\n raise RuntimeError(\"procfile or job file is missing\")\n\n def load_file(self, config, args):\n fname = args['<file>']\n server = config.get(\"server\")\n\n # default parameter for start\n start_default = not args[\"--nostart\"]\n\n # load configs\n configs = self.load_jsonconfig(fname)\n\n for conf in configs:\n try:\n name = conf.pop('name')\n cmd = conf.pop('cmd')\n except KeyError:\n raise ValueError(\"invalid job config\")\n\n # parse job name and eventually extract the appname\n appname, name = self.parse_name(name, self.default_appname(config,\n args))\n\n # always force the appname if specified\n if args['--app']:\n appname = args['--app']\n\n start = conf.get('start', start_default)\n\n # finally load the config\n pname = \"%s.%s\" % (appname, name)\n pconfig = ProcessConfig(name, cmd, **conf)\n try:\n server.load(pconfig, sessionid=appname, start=start)\n print(\"%r has been loaded in %s\" % (pname, server.uri))\n except GafferConflict:\n print(\"%r already loaded\" % pname)\n\n print(\"%r has been loaded\" % fname)\n\n def load_procfile(self, config, args):\n procfile, server = config.get(\"procfile\", \"server\")\n appname = self.default_appname(config, args)\n\n start = True\n if args[\"--nostart\"]:\n start = False\n\n # parse the concurrency settings\n concurrency = self.parse_concurrency(args)\n\n # finally send the processes\n for name, cmd_str in procfile.processes():\n if name in procfile.redirect_input:\n redirect_input = True\n else:\n redirect_input = False\n\n cmd, args = procfile.parse_cmd(cmd_str)\n params = dict(args=args, env=procfile.env,\n numprocesses=concurrency.get(name, 1),\n redirect_output=['out', 'err'],\n redirect_input=redirect_input,\n cwd=os.path.abspath(procfile.root))\n\n config = ProcessConfig(name, cmd, **params)\n try:\n server.load(config, sessionid=appname, start=start)\n except GafferConflict:\n print(\"%r already loaded\" % name)\n\n print(\"==> %r has been loaded in %s\" % (appname, server.uri))\n" }, { "alpha_fraction": 0.5805218815803528, "alphanum_fraction": 0.5836795568466187, "avg_line_length": 32.614524841308594, "blob_id": "907cc944c6daa07f77af3ca4c3664a1cf3bdc210", "content_id": "aa418d003cb07e8bf1eb7c1f0072ecde6612a9d5", "detected_licenses": [ "LicenseRef-scancode-unknown-license-reference", "Unlicense", "MIT", "LicenseRef-scancode-public-domain", "BSD-3-Clause" ], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 6017, "license_type": "permissive", "max_line_length": 78, "num_lines": 179, "path": "/gaffer/httpclient/server.py", "repo_name": "egmjr/gaffer", "src_encoding": "UTF-8", "text": "# -*- coding: utf-8 -\n#\n# This file is part of gaffer. See the NOTICE for more information.\n\nimport base64\nimport json\n\nfrom ..process import ProcessConfig\nfrom ..util import is_ssl, parse_ssl_options\nfrom .base import BaseClient\nfrom .process import Process\nfrom .job import Job\nfrom .util import make_uri\nfrom .websocket import GafferSocket\n\n\nclass Server(BaseClient):\n \"\"\" Server, main object to connect to a gaffer node. Most of the\n calls are blocking. (but running in the loop) \"\"\"\n\n def __init__(self, uri, loop=None, api_key=None, **options):\n super(Server, self).__init__(uri, loop=loop, **options)\n self.api_key = api_key\n\n def request(self, method, path, headers=None, body=None, **params):\n headers = headers or {}\n # if we have an api key, pass it to the headers\n if self.api_key is not None:\n headers['X-Api-Key'] = self.api_key\n\n # continue the request\n return super(Server, self).request(method, path, headers=headers,\n body=body, **params)\n\n def authenticate(self, username, password):\n \"\"\" authenticate against a gafferd node to retrieve an api key \"\"\"\n # set the basic auth header\n auth_hdr = \"%s:%s\" % (username, password)\n auth_hdr = b\"Basic \" + base64.b64encode(auth_hdr.encode(\"utf-8\"))\n headers = {\"Authorization\": auth_hdr.decode(\"utf-8\")}\n\n # make the request\n resp = self.request(\"get\", \"/auth\", headers=headers)\n\n # set the server api key\n self.api_key = self.json_body(resp)[\"api_key\"]\n\n # return the api key. useful for clients that store it for later.\n return self.api_key\n\n @property\n def version(self):\n \"\"\" get gaffer version \"\"\"\n resp = self.request(\"get\", \"/\")\n return self.json_body(resp)['version']\n\n def running(self):\n resp = self.request(\"get\", \"/pids\")\n return self.json_body(resp)['pids']\n\n pids = running\n\n def ping(self):\n resp = self.request(\"get\", \"/ping\")\n return resp.body == b'OK'\n\n def sessions(self):\n \"\"\" get list of current sessions \"\"\"\n resp = self.request(\"get\", \"/sessions\")\n obj = self.json_body(resp)\n return obj['sessions']\n\n def jobs(self, sessionid=None):\n if sessionid is None:\n resp = self.request(\"get\", \"/jobs\")\n else:\n resp = self.request(\"get\", \"/jobs/%s\" % sessionid)\n\n return self.json_body(resp)[\"jobs\"]\n\n def jobs_walk(self, callback, sessionid=None):\n jobs = self.jobs(sessionid)\n for job in jobs:\n sessionid, name = self._parse_name(job)\n callback(self, Job(self, config=name, sessionid=sessionid))\n\n def job_exists(self, name):\n sessionid, name = self._parse_name(name)\n resp = self.request(\"head\", \"/jobs/%s/%s\" % (sessionid, name))\n if resp.code == 200:\n return True\n return False\n\n\n def load(self, config, sessionid=None, start=True, force=False):\n \"\"\" load a process config object.\n\n Args:\n\n - **config**: dict or a ``process.ProcessConfig`` instance\n - **sessionid**: Some processes only make sense in certain contexts.\n this flag instructs gaffer to maintain this process in the sessionid\n context. A context can be for example an application. If no session\n is specified the config will be attached to the ``default`` session.\n - **start**: This flag instructs gaffer to start the loaded process.\n \"\"\"\n\n sessionid = self._sessionid(sessionid)\n headers = {\"Content-Type\": \"application/json\" }\n\n # build config body\n config_dict = config.to_dict()\n config_dict.update({'start': start})\n body = json.dumps(config_dict)\n\n name = \"%s.%s\" % (sessionid, config.name)\n\n if force:\n if self.job_exists(name):\n self.request(\"put\", \"/jobs/%s/%s\" % (sessionid, config.name),\n body=body, headers=headers)\n else:\n self.request(\"post\", \"/jobs/%s\" % sessionid, body=body,\n headers=headers)\n else:\n self.request(\"post\", \"/jobs/%s\" % sessionid, body=body,\n headers=headers)\n\n return Job(server=self, config=config, sessionid=sessionid)\n\n def unload(self, name, sessionid=None):\n sessionid = self._sessionid(sessionid)\n self.request(\"delete\", \"/jobs/%s/%s\" % (sessionid, name))\n return True\n\n def reload(self, name, sessionid=None):\n sessionid = self._sessionid(sessionid)\n self.request(\"post\", \"/jobs/%s/%s/state\" % (sessionid, name),\n body=\"2\")\n return True\n\n def get_job(self, name):\n sessionid, name = self._parse_name(name)\n resp = self.request(\"get\", \"/jobs/%s/%s\" % (sessionid, name))\n config_dict = self.json_body(resp)['config']\n return Job(server=self, config=ProcessConfig.from_dict(config_dict),\n sessionid=sessionid)\n\n def get_process(self, pid):\n return Process(server=self, pid=pid)\n\n def socket(self, heartbeat=None):\n \"\"\" return a direct websocket connection to gaffer \"\"\"\n url0 = make_uri(self.uri, '/channel/websocket')\n url = \"ws%s\" % url0.split(\"http\", 1)[1]\n\n options = {}\n if heartbeat and heartbeat is not None:\n options['heartbeat'] = heartbeat\n\n if is_ssl(url):\n options['ssl_options'] = parse_ssl_options(self.options)\n\n return GafferSocket(self.loop, url, api_key=self.api_key, **options)\n\n def _parse_name(self, name):\n if \".\" in name:\n sessionid, name = name.split(\".\", 1)\n elif \"/\" in name:\n sessionid, name = name.split(\"/\", 1)\n else:\n sessionid = \"default\"\n\n return sessionid, name\n\n def _sessionid(self, session=None):\n if not session:\n return \"default\"\n return session\n" }, { "alpha_fraction": 0.6941870450973511, "alphanum_fraction": 0.7064027190208435, "avg_line_length": 29.81818199157715, "blob_id": "1b64ff2e837cdcbae258981d3791da3f23d8fec4", "content_id": "779b6a432649f159e5bc474386a991aa2f4f5ea3", "detected_licenses": [ "LicenseRef-scancode-unknown-license-reference", "Unlicense", "MIT", "LicenseRef-scancode-public-domain", "BSD-3-Clause" ], "is_generated": false, "is_vendor": false, "language": "reStructuredText", "length_bytes": 2374, "license_type": "permissive", "max_line_length": 84, "num_lines": 77, "path": "/docs/index.rst", "repo_name": "egmjr/gaffer", "src_encoding": "UTF-8", "text": ".. gaffer documentation master file, created by\n sphinx-quickstart on Tue Oct 9 21:10:46 2012.\n You can adapt this file completely to your liking, but it should at least\n contain the root `toctree` directive.\n\nWelcome to gaffer's documentation!\n==================================\n\nGaffer\n======\n\nControl, Watch and Launch your applications and jobs over HTTP.\n\nGaffer is a set of Python modules and tools to easily maintain and\ninteract with your applications or jobs launched on different machines over\nHTTP and websockets.\n\nIt promotes distributed and decentralized topologies without single points of\nfailure, enabling fault tolerance and high availability.\n\n.. raw:: html\n\n <iframe src=\"http://player.vimeo.com/video/51674172\" width=\"500\"\n height=\"163\" frameborder=\"0\" webkitAllowFullScreen\n mozallowfullscreen allowFullScreen></iframe>\n\nFeatures\n--------\n\n - RESTful HTTP Api\n - Websockets and `SOCKJS <http://sockjs.org>`_ support to interact with\n a gaffer node from any browser or SOCKJS client.\n - Framework to manage and interact your applications and jobs on\n differerent machines\n - Server and :doc:`command-line` tools to manage and interract with your\n processes\n - manages topology information. Clients query gaffer_lookupd to discover\n gaffer nodes for a specific job or application.\n - Possibility to interact with STDIO and PIPES to interact with your\n applications and processes\n - Subscribe to process statistics per process or process templates\n and get them in quasi RT.\n - Procfile applications support (see :doc:`gaffer`) but also JSON config\n support.\n - Supervisor-like features.\n - Fully evented. Use the libuv event loop using the\n `pyuv library <https://pyuv.readthedocs.io>`_\n - Flapping: handle cases where your processes crash too much\n - Easily extensible: add your own endpoint, create your client,\n embed gaffer in your application, ...\n - Compatible with python 2.7x, 3.x\n\n.. note::\n gaffer source code is hosted on `Github <http://github.com/benoitc/gaffer.git>`_\n\nContents:\n---------\n\n.. toctree::\n :titlesonly:\n\n getting-started\n overview\n news\n command-line\n http\n webhooks\n processframework\n httpclient\n applications\n\nIndices and tables\n==================\n\n* :ref:`genindex`\n* :ref:`modindex`\n* :ref:`search`\n\n" }, { "alpha_fraction": 0.5728984475135803, "alphanum_fraction": 0.5783712863922119, "avg_line_length": 31.39716339111328, "blob_id": "0ff5e97404e89a04c9b961273329cac043bc6d1b", "content_id": "ae2af22b45d51e3cbb96ba283850b549f895a75f", "detected_licenses": [ "LicenseRef-scancode-unknown-license-reference", "Unlicense", "MIT", "LicenseRef-scancode-public-domain", "BSD-3-Clause" ], "is_generated": false, "is_vendor": false, "language": "reStructuredText", "length_bytes": 4568, "license_type": "permissive", "max_line_length": 79, "num_lines": 141, "path": "/docs/gaffer.rst", "repo_name": "egmjr/gaffer", "src_encoding": "UTF-8", "text": ".. _gaffer:\n\nGaffer\n======\n\nThe **gaffer** command line tool is an interface to the :doc:`gaffer\nHTTP api <http>` and include support for loading/unloading Procfile\napplications, scaling them up and down, ... .\n\nIt can also be used as a manager for Procfile-based applications similar to\nforeman but using the :doc:`gaffer framework <processframework>`. It is\nrunning your application directly using a Procfile or export it to a\ngafferd configuration file or simply to a JSON file that you could send\nto gafferd using the :doc:`HTTP api <http>`.\n\nExample of use\n--------------\n\nFor example using the following **Procfile**::\n\n dummy: python -u dummy_basic.py\n dummy1: python -u dummy_basic.py\n\n\nYou can launch all the programs in this procfile using the following\ncommand line::\n\n $ gaffer start\n\n\n.. image:: _static/gafferp.png\n\n\nOr load them on a gaffer node::\n\n $ gaffer load\n\nand then scale them up and down::\n\n $ gaffer scale dummy=3 dummy1+2\n Scaling dummy processes... done, now running 3\n Scaling dummy1 processes... done, now running 3\n\n\n.. image:: _static/gaffer_ps.png\n\nOPTIONS\n-------\n\n -h --help show this help message and exit\n --version show version and exit\n -f procfile,--procfile procfile Specify an alternate Procfile to load\n -d root,--directory root Specify an alternate application root\n This defaults to the directory\n containing the Procfile [default: .]\n -e k=v,--env k=v Specify one or more .env files to load\n --endpoint endpoint gafferd node URL to connect\n [default: http://127.0.0.1:5000]\n\n\nSUBCOMMANDS\n-----------\n\n **export** [-c concurrency|--concurrency concurrency]\n [--format=format] [--out=filename] [<name>]\n\n Export a Procfile\n\n This command export a Procfile to a gafferd process settings\n format. It can be either a JSON that you could send to gafferd\n via the JSON API or an ini file that can be included to the\n gafferd configuration.\n\n <format> ini or json\n --out=filename path of filename where the export will be saved\n\n **load** [-c concurrency|--concurrency concurrency] [--nostart] [<name>]\n Load a Procfile application to gafferd\n\n <name> is the name of the application recorded in\n gafferd. By default it will be the name of your\n project folder.You can use ``.`` to specify the current\n folder.\n\n **ps** [<appname>]\n List your processes informations\n\n <appname> he name of the application (session) of process\n recoreded in gafferd. By default it will be the name of your\n project folder.You can use ``.`` to specify the current\n folder.\n\n **run** [-c] [<args>]...\n Run one-off commands using the same environment as your\n defined processes\n\n -c concurrency\n Specify the number of each process type to run. The value\n passed in should be in the format process=num,process=num\n --concurrency concurrency\n same as the -c option.\n\n **scale** [<appname>] [process=value]...\n Scaling your process\n\n Procfile applications can scale up or down instantly from the\n command line or API.\n\n Scaling a process in an application is done using the scale\n command:\n\n ::\n\n $ gaffer scale dummy=3\n Scaling dummy processes... done, now running 3\n\n\n Or both at once:\n\n ::\n\n $ gaffer scale dummy=3 dummy1+2\n Scaling dummy processes... done, now running 3\n Scaling dummy1 processes... done, now running 3\n\n\n\n\n **start** [-c concurrency|--concurrency concurrency]\n\n Start a process type or all process types from the Procfile.\n\n -c concurrency\n Specify the number of each process type to run. The value\n passed in should be in the format process=num,process=num\n --concurrency concurrency\n same as the -c option.\n\n\n **unload** [<name>]\n Unload a Procfile application from a gafferd node\n" } ]
4
mjh09/DS-Unit-3-Sprint-2-SQL-and-Databases
https://github.com/mjh09/DS-Unit-3-Sprint-2-SQL-and-Databases
23d2fb99bf3639dc8b375b2435008ba25f5f2b2b
c2555706389e92ff72b5ae19be415c3cfdf9b796
841b711243c9ebeb6d1185e2bd6943d2cf3a24b4
refs/heads/master
2020-07-03T16:32:59.725261
2019-08-17T05:16:06
2019-08-17T05:16:06
201,970,171
0
0
MIT
2019-08-12T16:40:40
2019-03-25T17:58:26
2019-08-12T16:39:44
null
[ { "alpha_fraction": 0.6375266313552856, "alphanum_fraction": 0.6833688616752625, "avg_line_length": 20.813953399658203, "blob_id": "71a02ac95cc3d796cdeb3ba0164bf0c461b238f5", "content_id": "96a3fda534cfb0262277288a0f888b63ad5c4c41", "detected_licenses": [ "MIT" ], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 1885, "license_type": "permissive", "max_line_length": 71, "num_lines": 86, "path": "/SC/northwood.py", "repo_name": "mjh09/DS-Unit-3-Sprint-2-SQL-and-Databases", "src_encoding": "UTF-8", "text": "import sqlite3\n\n\n\"\"\" \ndocstring for answers to non-stretch questions\n\n1. Côte de Blaye\t263.5\n Thüringer Rostbratwurst\t123.79\n Mishi Kobe Niku\t97\n Sir Rodney's Marmalade\t81\n Carnarvon Tigers\t62.5\n Raclette Courdavault\t55\n Manjimup Dried Apples\t53\n Tarte au sucre\t49.3\n Ipoh Coffee\t46\n Rössle Sauerkraut\t45.6\n\n2. 37.2222222222222\n\n3. 263.5\tAux joyeux ecclésiastiques\n 123.79\tPlutzer Lebensmittelgroßmärkte AG\n 97\tTokyo Traders\n 81\tSpecialty Biscuits, Ltd.\n 62.5\tPavlova, Ltd.\n 55\tGai pâturage\n 53\tG'day, Mate\n 49.3\tForêts d'érables\n 46\tLeka Trading\n 45.6\tPlutzer Lebensmittelgroßmärkte AG\n\n4. Confections\n\n\"\"\"\n\n# Instantiate connection to database and initialize cursor\nconn = sqlite3.connect(\"northwind_small.sqlite3\")\ncurs = conn.cursor()\n\n\n# Query for top ten most expensive products\nquery = \"\"\"\n SELECT ProductName, UnitPrice\n FROM Product \n ORDER BY UnitPrice DESC\n LIMIT 10;\n\"\"\"\ncurs.execute(query)\nprint('Top ten most expensive projects:', curs.fetchall())\n\n\n# Query for average age of employees when hired\nquery = \"\"\"\n SELECT AVG(HireDate) - AVG(BirthDate)\n FROM Employee\n\"\"\"\ncurs.execute(query)\nprint('Average age of employees when hired:', curs.fetchall()[0][0])\n\n\n# Query for top ten expensive products and their suppliers\nquery = \"\"\"\n SELECT UnitPrice, Supplier.CompanyName\n FROM Product\n INNER JOIN Supplier\n ON Product.SupplierID = Supplier.ID\n ORDER BY UnitPrice DESC\n LIMIT 10;\n\"\"\"\ncurs.execute(query)\nprint('Ten most expensive by unit price/supplier:', curs.fetchall())\n\n\n# Query for the largest category\nquery = '''\n SELECT MAX(cnt), CategoryName\n FROM (SELECT CategoryName, COUNT(*) as cnt\n FROM Product\n INNER JOIN Category\n ON Product.CategoryID = Category.ID\n GROUP BY CategoryName);\n'''\ncurs.execute(query)\nprint('Category with the most unique products:', curs.fetchall()[0][1])\n\ncurs.close()\nconn.commit()\n" }, { "alpha_fraction": 0.7025745511054993, "alphanum_fraction": 0.7262872457504272, "avg_line_length": 26.351852416992188, "blob_id": "974b0f534804bb4ae87a4c1de5fb2cc70ffcb132", "content_id": "854d841f35386da4d4d94fbfcc6f2366f0f421ae", "detected_licenses": [ "MIT" ], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 1476, "license_type": "permissive", "max_line_length": 123, "num_lines": 54, "path": "/module1-introduction-to-sql/buddymove_holidayiq.py", "repo_name": "mjh09/DS-Unit-3-Sprint-2-SQL-and-Databases", "src_encoding": "UTF-8", "text": "import sqlite3\nimport pandas as pd \n\n\"\"\"\nIF the database didn't already exist\n\npath = 'C:/Users/Cactuar/Projects/DS-Unit-3-Sprint-2-SQL-and-Databases/module1-introduction-to-sql/buddymove_holidayiq.csv'\n\ndf = pd.read_csv(path)\n\nconn = sqlite3.connect('buddymove_holiday.sqlite3')\n\ndf.to_sql('review', con=conn)\n\n\"\"\"\n\npath = 'C:/Users/Cactuar/Projects/DS-Unit-3-Sprint-2-SQL-and-Databases/buddymove_holidayiq.sqlite3'\nconn = sqlite3.connect(path)\ncurs = conn.cursor()\n\nquery = 'SELECT COUNT(Sports) FROM review'\ncurs.execute(query)\nprint('Total rows:', curs.fetchall()[0][0])\n\nquery = '''SELECT User_id FROM review WHERE Nature >= 100\n AND Shopping >=100'''\ncurs.execute(query)\nprint('Ten Users with >=100 in Nature & Shopping:', curs.fetchmany(10))\n\nquery = 'SELECT AVG(Sports) FROM review'\ncurs.execute(query)\nprint('Average Sports Rating:', curs.fetchall()[0][0])\n\nquery = 'SELECT AVG(Religious) FROM review'\ncurs.execute(query)\nprint('Average Religious Rating:', curs.fetchall()[0][0])\n\nquery = 'SELECT AVG(Nature) FROM review'\ncurs.execute(query)\nprint('Average Nature Rating:', curs.fetchall()[0][0])\n\nquery = 'SELECT AVG(Theatre) FROM review'\ncurs.execute(query)\nprint('Average Theatre Rating:', curs.fetchall()[0][0])\n\nquery = 'SELECT AVG(Shopping) FROM review'\ncurs.execute(query)\nprint('Average Shopping Rating:', curs.fetchall()[0][0])\n\nquery = 'SELECT AVG(Picnic) FROM review'\ncurs.execute(query)\nprint('Average Picnic Rating:', curs.fetchall()[0][0])\n\ncurs.close()" }, { "alpha_fraction": 0.6539365649223328, "alphanum_fraction": 0.6598119735717773, "avg_line_length": 24.402984619140625, "blob_id": "6de1cc88c4e96b23fb1a406689aaf5d2da9f7c14", "content_id": "3a1a13c45063b8c03b8c67854d73a8838c574a70", "detected_licenses": [ "MIT" ], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 1702, "license_type": "permissive", "max_line_length": 108, "num_lines": 67, "path": "/module2-sql-for-analysis/insert_titanic.py", "repo_name": "mjh09/DS-Unit-3-Sprint-2-SQL-and-Databases", "src_encoding": "UTF-8", "text": "import psycopg2\nimport pandas as pd \n\n\"\"\"\nThis script will take a local csv and insert the data into\na postgreSQL table\n\"\"\"\n\n# Database info\ndbname = 'jehgnrff'\nuser = 'jehgnrff'\npassword = '21Al2FjnMu8SMSi6q505r4qfFZ7JLGTO'\nhost = 'otto.db.elephantsql.com'\n\n# Initialzie connection and cursor\npg_conn = psycopg2.connect(dbname=dbname, user=user,\n password=password, host=host)\n\npg_curs = pg_conn.cursor()\n\n# Retrieve data\nPATH = 'C:/Users/Cactuar/Projects/DS-Unit-3-Sprint-2-SQL-and-Databases/module2-sql-for-analysis/titanic.csv'\n\ndf = pd.read_csv(PATH)\n\n# Add index column SQL congruancy and rename\ndf = df.reset_index()\ndf = df.rename(columns={'index':'person_id'})\n\n# Replace apostrophe's in 'Name' to prevent SQL new column\ndf['Name'] = df['Name'].map(lambda x: x.replace(\"'\", ''))\n\n# Construct list of all the rows for later insertion\npeople = []\nfor i in range(len(df)):\n people.append(tuple(df.loc[i]))\n\n# Store table creation string as variable\ncreate_table = '''\n CREATE TABLE titanic (\n person_id SERIAL PRIMARY KEY,\n survived INT,\n pclass INT,\n name TEXT,\n sex CHAR(7),\n age FLOAT(2),\n siblings_spouses_aboard INT,\n parents_children_aboard INT,\n fare NUMERIC(8,5)\n );\n'''\n\n# Send instructions to create table\npg_curs.execute(create_table)\n\n# Send looped INSERT intructions to table\nfor person in people:\n insert_data = '''\n INSERT INTO titanic\n (survived, pclass, name, sex, age, siblings_spouses_aboard,\n parents_children_aboard, fare)\n VALUES ''' + str(person[1:]) + ';'\n pg_curs.execute(insert_data)\n\n# Close cursor and commit to database\npg_curs.close()\npg_conn.commit()\n" }, { "alpha_fraction": 0.6362323760986328, "alphanum_fraction": 0.6590179800987244, "avg_line_length": 23.34375, "blob_id": "fa4f9862cbeb10e5a95107e4996e9108c7bb3a91", "content_id": "4fe17cd72f6d4a3f90fc54d74571e49c39531e47", "detected_licenses": [ "MIT" ], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 6232, "license_type": "permissive", "max_line_length": 104, "num_lines": 256, "path": "/module4-acid-and-database-scalability-tradeoffs/titanic_queries.py", "repo_name": "mjh09/DS-Unit-3-Sprint-2-SQL-and-Databases", "src_encoding": "UTF-8", "text": "import psycopg2\nimport pandas as pd \n\n\"\"\"\nScript queries titanic data from elephantSQL\n\"\"\"\n\n# Database info\ndbname = 'jehgnrff'\nuser = 'jehgnrff'\npassword = '21Al2FjnMu8SMSi6q505r4qfFZ7JLGTO'\nhost = 'otto.db.elephantsql.com'\n\n# Initalize connection and cursor to db \npg_conn = psycopg2.connect(dbname=dbname, user=user,\n password=password, host=host)\n\npg_curs = pg_conn.cursor()\n\n\n# Queries\nquery = 'SELECT COUNT(survived) FROM titanic WHERE survived = 1;'\npg_curs.execute(query)\nprint(pg_curs.fetchall()[0][0], 'passengers survived')\n\n\nquery = 'SELECT COUNT(survived) FROM titanic WHERE survived = 0;'\npg_curs.execute(query)\nprint(pg_curs.fetchall()[0][0], 'passengers died')\n\n\nquery = 'SELECT COUNT(pclass) FROM titanic WHERE pclass = 1;'\npg_curs.execute(query)\nprint(pg_curs.fetchall()[0][0], 'passengers in class 1')\n\n\nquery = 'SELECT COUNT(pclass) FROM titanic WHERE pclass = 2;'\npg_curs.execute(query)\nprint(pg_curs.fetchall()[0][0], 'passengers in class 2')\n\n\nquery = 'SELECT COUNT(pclass) FROM titanic WHERE pclass = 3;'\npg_curs.execute(query)\nprint(pg_curs.fetchall()[0][0], 'passengers in class 3')\n\n\nquery = '''\n SELECT COUNT(pclass) FROM titanic\n WHERE pclass = 1 AND survived = 1;\n '''\npg_curs.execute(query)\nprint(pg_curs.fetchall()[0][0], 'passengers in class 1 survied')\n\n\nquery = '''\n SELECT COUNT(pclass) FROM titanic\n WHERE pclass = 2 AND survived = 1;\n '''\npg_curs.execute(query)\nprint(pg_curs.fetchall()[0][0], 'passengers in class 2 survied')\n\n\nquery = '''\n SELECT COUNT(pclass) FROM titanic\n WHERE pclass = 3 AND survived = 1;\n '''\npg_curs.execute(query)\nprint(pg_curs.fetchall()[0][0], 'passengers in class 3 survied')\n\n\nquery = '''\n SELECT COUNT(pclass) FROM titanic\n WHERE pclass = 1 AND survived = 0;\n '''\npg_curs.execute(query)\nprint(pg_curs.fetchall()[0][0], 'passengers in class 1 died')\n\n\nquery = '''\n SELECT COUNT(pclass) FROM titanic\n WHERE pclass = 2 AND survived = 0;\n '''\npg_curs.execute(query)\nprint(pg_curs.fetchall()[0][0], 'passengers in class 2 died')\n\n\nquery = '''\n SELECT COUNT(pclass) FROM titanic\n WHERE pclass = 3 AND survived = 0;\n '''\npg_curs.execute(query)\nprint(pg_curs.fetchall()[0][0], 'passengers in class 3 died')\n\n\nquery = '''\n SELECT AVG(age) FROM titanic\n WHERE survived = 1;\n '''\npg_curs.execute(query)\nprint('Average age of survivors:', round(pg_curs.fetchall()[0][0], 2))\n\n\nquery = '''\n SELECT AVG(age) FROM titanic\n WHERE survived = 0;\n '''\npg_curs.execute(query)\nprint('Average age of casualties:', round(pg_curs.fetchall()[0][0], 2))\n\n\nquery = '''\n SELECT AVG(age) FROM titanic\n WHERE pclass = 1;\n '''\npg_curs.execute(query)\nprint('Average age of class 1:', round(pg_curs.fetchall()[0][0], 2))\n\n\nquery = '''\n SELECT AVG(age) FROM titanic\n WHERE pclass = 2;\n '''\npg_curs.execute(query)\nprint('Average age of class 2:', round(pg_curs.fetchall()[0][0], 2))\n\n\nquery = '''\n SELECT AVG(age) FROM titanic\n WHERE pclass = 3;\n '''\npg_curs.execute(query)\nprint('Average age of class 3:', round(pg_curs.fetchall()[0][0], 2))\n\nquery = '''\n SELECT AVG(fare) FROM titanic\n WHERE pclass = 1;\n '''\npg_curs.execute(query)\nprint('Average fare of class 1:', round(pg_curs.fetchall()[0][0], 2))\n\n\nquery = '''\n SELECT AVG(fare) FROM titanic\n WHERE pclass = 2;\n '''\npg_curs.execute(query)\nprint('Average fare of class 2:', round(pg_curs.fetchall()[0][0], 2))\n\n\nquery = '''\n SELECT AVG(fare) FROM titanic\n WHERE pclass = 3;\n '''\npg_curs.execute(query)\nprint('Average fare of class 3:', round(pg_curs.fetchall()[0][0], 2))\n\n\nquery = '''\n SELECT AVG(fare) FROM titanic\n WHERE survived = 0;\n '''\npg_curs.execute(query)\nprint('Average fare of deceased:', round(pg_curs.fetchall()[0][0], 2))\n\n\nquery = '''\n SELECT AVG(siblings_spouses_aboard) FROM titanic\n WHERE pclass = 1;\n '''\npg_curs.execute(query)\nprint('Average siblings/spouses aboard for class 1:', round(pg_curs.fetchall()[0][0], 2))\n\n\nquery = '''\n SELECT AVG(siblings_spouses_aboard) FROM titanic\n WHERE pclass = 2;\n '''\npg_curs.execute(query)\nprint('Average siblings/spouses aboard for class 2:', round(pg_curs.fetchall()[0][0], 2))\n\n\nquery = '''\n SELECT AVG(siblings_spouses_aboard) FROM titanic\n WHERE pclass = 3;\n '''\npg_curs.execute(query)\nprint('Average siblings/spouses aboard for class 3:', round(pg_curs.fetchall()[0][0], 2))\n\n\nquery = '''\n SELECT AVG(siblings_spouses_aboard) FROM titanic\n WHERE survived = 1;\n '''\npg_curs.execute(query)\nprint('Average siblings/spouses aboard for survivors:', round(pg_curs.fetchall()[0][0], 2))\n\n\nquery = '''\n SELECT AVG(siblings_spouses_aboard) FROM titanic\n WHERE survived = 0;\n '''\npg_curs.execute(query)\nprint('Average siblings/spouses aboard for deceased:', round(pg_curs.fetchall()[0][0], 2))\n\n\nquery = '''\n SELECT AVG(parents_children_aboard) FROM titanic\n WHERE pclass = 1;\n '''\npg_curs.execute(query)\nprint('Average parents/children aboard for class 1:', round(pg_curs.fetchall()[0][0], 2))\n\n\nquery = '''\n SELECT AVG(parents_children_aboard) FROM titanic\n WHERE pclass = 2;\n '''\npg_curs.execute(query)\nprint('Average parents/children aboard for class 2:', round(pg_curs.fetchall()[0][0], 2))\n\n\nquery = '''\n SELECT AVG(parents_children_aboard) FROM titanic\n WHERE pclass = 3;\n '''\npg_curs.execute(query)\nprint('Average parents/children aboard for class 3:', round(pg_curs.fetchall()[0][0], 2))\n\n\nquery = '''\n SELECT AVG(parents_children_aboard) FROM titanic\n WHERE survived = 1;\n '''\npg_curs.execute(query)\nprint('Average parents/children aboard for survivors:', round(pg_curs.fetchall()[0][0], 2))\n\n\nquery = '''\n SELECT AVG(parents_children_aboard) FROM titanic\n WHERE survived = 0;\n '''\npg_curs.execute(query)\nprint('Average parents/children aboard deceased:', round(pg_curs.fetchall()[0][0], 2))\n\nquery = '''\n SELECT COUNT(total.nm) FROM\n(SELECT a.first_name, count(*) as nm\nFROM (SELECT split_part(name, ' ', 2) AS first_name\nFROM titanic) as a\nGROUP BY a.first_name\nHAVING COUNT(*) > 1) as total\n'''\npg_curs.execute(query)\nprint('Number of passengers who share a first name with at least one other:', pg_curs.fetchall()[0][0])\n\npg_curs.close()\n" }, { "alpha_fraction": 0.6484687328338623, "alphanum_fraction": 0.6884154677391052, "avg_line_length": 29.059999465942383, "blob_id": "9ec65c76a4c9b1f7a2d25a36d012d3519c2eb61f", "content_id": "9637315208d9caf903c6197506e0eab3b21045d0", "detected_licenses": [ "MIT" ], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 1502, "license_type": "permissive", "max_line_length": 110, "num_lines": 50, "path": "/module3-nosql-and-document-oriented-databases/insert_rpg_mongoDB.py", "repo_name": "mjh09/DS-Unit-3-Sprint-2-SQL-and-Databases", "src_encoding": "UTF-8", "text": "import pymongo\nimport sqlite3\n\n'''\nThe purpose of this script is to load a local sqlite3 file and insert\nit's contents into a mongoDB 'NoSQL' database\n'''\n\n# File path to sqlite tables\nSOURCE = 'C:/Users/Cactuar/Projects/DS-Unit-3-Sprint-2-SQL-and-Databases'\npath = SOURCE + '/module1-introduction-to-sql/rpg_db.sqlite3'\n\n# Initializing the connection and cursor\nsl_conn = sqlite3.connect(path)\nsl_curs = sl_conn.cursor()\n\n# Generating a list of tuples for each character in table\ncharacters = sl_curs.execute(\n 'SELECT * FROM charactercreator_character;'\n ).fetchall()\n\n# Connecting to database\nSOURCE = 'mongodb://mjh09:[email protected]:27017,'\npath = SOURCE + 'cluster0-shard-00-01-ojdma.mongodb.net:27017,'\npath1 = path + 'cluster0-shard-00-02-ojdma.mongodb.net:27017/test?ssl=true&replicaSet'\npath2 = path1 + '27017/test?ssl=true&replicaSet=Cluster0-shard-0&authSource=admin&retryWrites=true&w=majority'\nclient = pymongo.MongoClient(path2)\ndb = client.test\n\n# For loop to insert\nfor character in characters:\n db.test.insert_one({\n 'sql_id' : character[0],\n 'name' : character[1],\n 'level' : character[2],\n 'exp' : character[3],\n 'hp' : character[4],\n 'strength' : character[5],\n 'intelligence' : character[6],\n 'dexterity' : character[7],\n 'wisdom' : character[8],\n })\n\n\n# Used to check\n#print(len(characters))\n#print(db)\n#print(db.test.find_one({'sql_id' : 4}))\nsl_curs.close()\nsl_conn.commit()" }, { "alpha_fraction": 0.5901886820793152, "alphanum_fraction": 0.6067924499511719, "avg_line_length": 16.66666603088379, "blob_id": "b5aae2ec023348cce84aae0bef8232636c5b0ec5", "content_id": "8b639d3d3d9302115ee32908f6fdf1743c7dd9c4", "detected_licenses": [ "MIT" ], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 1325, "license_type": "permissive", "max_line_length": 61, "num_lines": 75, "path": "/SC/demo_data.py", "repo_name": "mjh09/DS-Unit-3-Sprint-2-SQL-and-Databases", "src_encoding": "UTF-8", "text": "import sqlite3\n\n'''\ndocsting for query results\nTotal number of rows : 3\nRows with x,y >=5: 2\nNumber of unique values in y: 2\n'''\n\n\n# Instaniate Connection to Database\nconn = sqlite3.connect(\"demo_data.sqlite3\")\ncurs = conn.cursor()\n\n\n# Variable to create a table schema\ncreate_table = \"\"\"\n CREATE TABLE demo (\n s TEXT,\n x INT,\n y INT\n);\n\"\"\"\ncurs.execute(create_table)\n\n\n# List of data to be inserted\ndata = [(\"g\", 3, 9), (\"v\", 5, 7), (\"f\", 8, 7)]\n\n\n# Loop to execute data insertion\nfor datum in data:\n insert_data = (\n \"\"\"\n INSERT INTO demo (s, x, y)\n VALUES \"\"\"\n + str(datum[:])\n + \";\"\n )\n curs.execute(insert_data)\n\nconn.commit()\n\n\n# Print query to find the number of rows in table\nquery = '''\n SELECT COUNT(s)\n FROM demo\n'''\ncurs.execute(query)\nprint('Total number of rows:', curs.fetchall()[0][0])\n\n\n# Print a conditional query from database\nquery = \"\"\"\n SELECT COUNT(s) \n FROM demo\n WHERE x >= 5 AND y >=5;\n \"\"\"\ncurs.execute(query)\nprint(\"Rows with x,y >=5:\", curs.fetchall()[0][0])\n\n\n# Print a aggregate query from database\nquery = \"\"\"\n SELECT COUNT(DISTINCT(y))\n FROM DEMO\n \"\"\"\ncurs.execute(query)\nprint(\"Number of unique values in y:\", curs.fetchall()[0][0])\n\n\n# Commit actions and close cursor\ncurs.close()\nconn.commit()\n" }, { "alpha_fraction": 0.7624810934066772, "alphanum_fraction": 0.7624810934066772, "avg_line_length": 49.88461685180664, "blob_id": "a2a8a484bab4baf4a01d1277ddd2e791eb0f9bd5", "content_id": "973253458a070b8b8bd334ce402f4f8b5242ffcf", "detected_licenses": [ "MIT" ], "is_generated": false, "is_vendor": false, "language": "Markdown", "length_bytes": 1322, "license_type": "permissive", "max_line_length": 126, "num_lines": 26, "path": "/SC/part4.md", "repo_name": "mjh09/DS-Unit-3-Sprint-2-SQL-and-Databases", "src_encoding": "UTF-8", "text": "In the Northwind database, what is the type of relationship between the\n `Employee` and `Territory` tables?\n\n - The type of relationship between the 'Employee' and 'Territory'\n tables can be described as many-to-one. This means that many \n employees can belong to a single territory. For the most part,\n the opposite is untru in that one person doesn't typically \n live in multiple territories.\n\nWhat is a situation where a document store (like MongoDB) is appropriate, and what is a situation where it is not appropriate?\n\n - MongoDB is usefull in scenarios where the schema can be flexible.\n This is especially usefull in start-up type scenarios where a team\n can prototype a project quickly. MongoDB wouldn't be appropriate\n for more secure types of databases that require strict adherance to\n a standard and rigid implementation: like a bank or public records\n office.\n\nWhat is \"NewSQL\", and what is it trying to achieve?\n\n - NewSQl is attempting to bridge the gap between SQL databases\n and NoSQL databases. It relaxes some constraints on standard\n practices such as availability and consistency in order to achieve\n a faster transaction times with large scale databases. NewSQL still \n lives in a space of innovation, spured by the growth of horizontal\n scalability." }, { "alpha_fraction": 0.7363710403442383, "alphanum_fraction": 0.7502034306526184, "avg_line_length": 37.40625, "blob_id": "adc2cae0a23dd9271065d8342b4e3cac137ccf8d", "content_id": "97fdbf4ebb74849ae51e76fdd10fa5bd8200e126", "detected_licenses": [ "MIT" ], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 2458, "license_type": "permissive", "max_line_length": 114, "num_lines": 64, "path": "/module1-introduction-to-sql/rpg_queries.py", "repo_name": "mjh09/DS-Unit-3-Sprint-2-SQL-and-Databases", "src_encoding": "UTF-8", "text": "import sqlite3\n\npath = \"C:/Users/Cactuar/Projects/DS-Unit-3-Sprint-2-SQL-and-Databases/module1-introduction-to-sql/rpg_db.sqlite3\"\nconn = sqlite3.connect(path)\ncurs = conn.cursor()\n\nquery = \"SELECT COUNT(character_id) FROM charactercreator_character;\"\ncurs.execute(query)\nprint(\"Total Characters:\", curs.fetchall()[0][0])\n\nquery = \"SELECT COUNT(character_ptr_id) FROM charactercreator_cleric\"\ncurs.execute(query)\nprint(\"Total Cleric Characters:\", curs.fetchall()[0][0])\n\nquery = \"SELECT COUNT(character_ptr_id) FROM charactercreator_fighter\"\ncurs.execute(query)\nprint(\"Total Fighter Characters:\", curs.fetchall()[0][0])\n\nquery = \"SELECT COUNT(character_ptr_id) FROM charactercreator_mage\"\ncurs.execute(query)\nprint(\"Total Mage Characters:\", curs.fetchall()[0][0])\n\nquery = \"SELECT COUNT(mage_ptr_id) FROM charactercreator_necromancer\"\ncurs.execute(query)\nprint(\"Total Necromancer Characters:\", curs.fetchall()[0][0])\n\nquery = \"SELECT COUNT(character_ptr_id) FROM charactercreator_thief\"\ncurs.execute(query)\nprint(\"Total Thief Characters:\", curs.fetchall()[0][0])\n\nquery = \"SELECT COUNT(item_id) FROM armory_item\"\ncurs.execute(query)\nprint(\"Total Items:\", curs.fetchall()[0][0])\n\nquery = '''SELECT COUNT(item_ptr_id) FROM armory_weapon\n INNER JOIN armory_item ON item_id = item_ptr_id'''\ncurs.execute(query)\nprint(\"Total Weapons in Items:\", curs.fetchall()[0][0])\n\n\nquery = \"SELECT item_id FROM armory_item EXCEPT SELECT item_ptr_id FROM armory_weapon\"\ncurs.execute(query)\nprint(\"Total Items minus Weapons:\", len(curs.fetchall()))\n\nquery = '''SELECT COUNT(item_id) FROM charactercreator_character_inventory\n GROUP BY character_id'''\ncurs.execute(query)\nprint(\"First 20 Items per Character:\", curs.fetchmany(20))\n\nquery = '''SELECT COUNT(item_id) FROM charactercreator_character_inventory\n INNER JOIN armory_weapon ON item_id = item_ptr_id GROUP BY character_id'''\ncurs.execute(query)\nprint(\"First 20 Weapons per Character:\", curs.fetchmany(20))\n\nquery = '''SELECT AVG(item_count) FROM (SELECT COUNT(item_id) \n AS item_count FROM charactercreator_character_inventory GROUP BY character_id);'''\ncurs.execute(query)\nprint('Average Items per Character:', curs.fetchall()[0][0])\n\nquery = '''SELECT AVG(weapon_count) FROM (SELECT COUNT(item_id)\n AS weapon_count FROM charactercreator_character_inventory \n INNER JOIN armory_weapon ON item_id = item_ptr_id GROUP BY character_id)'''\ncurs.execute(query)\nprint('Average Weapons per Character:', curs.fetchall()[0][0])\n" } ]
8
bingyinh/nanomine_xlsx2xml
https://github.com/bingyinh/nanomine_xlsx2xml
d49bbf4a40cd50b9a27857ad7d1eb72e6ec40f44
def8c962c30abd6c2a947127b5e12784262e4194
4a0984e337d969035943b1c960a78a0276c087c3
refs/heads/master
2023-07-22T03:57:25.537010
2022-10-06T00:56:12
2022-10-06T00:56:12
134,615,285
3
0
null
2018-05-23T19:17:15
2022-11-16T07:29:49
2023-07-05T21:40:57
Python
[ { "alpha_fraction": 0.5176470875740051, "alphanum_fraction": 0.7176470756530762, "avg_line_length": 16.200000762939453, "blob_id": "37859b805dc70a674af804cdfd8e85bfc067a61b", "content_id": "8ee9bab0649302dc0562b8075be1cefd806ad30a", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Text", "length_bytes": 85, "license_type": "no_license", "max_line_length": 22, "num_lines": 5, "path": "/requirements.txt", "repo_name": "bingyinh/nanomine_xlsx2xml", "src_encoding": "UTF-8", "text": "MechanicalSoup==0.11.0\nbeautifulsoup4==4.7.1\ndicttoxml==1.7.4\nxlrd==1.2.0\nlxml==4.6.5" }, { "alpha_fraction": 0.600715160369873, "alphanum_fraction": 0.6076498031616211, "avg_line_length": 41.33945083618164, "blob_id": "aff020cb5e23554ea3dfba8bf2a34fb1935649ed", "content_id": "2fa488b9f6c4dc0b48282b78c633a047e035cd46", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 9229, "license_type": "no_license", "max_line_length": 165, "num_lines": 218, "path": "/extract_verify_ID_callable.py", "repo_name": "bingyinh/nanomine_xlsx2xml", "src_encoding": "UTF-8", "text": "## Excel worksheet ID extraction script\n## By Bingyin Hu 05/25/2018\n## updated to use python 3 by Bingyin Hu 07/01/2019\n\nimport xlrd\nimport sys\nfrom doiretriever import mainDOIsoupFirst\nfrom customized_compiler_callable import sortSequence\nimport pickle\nfrom lxml import etree\nimport dicttoxml\nimport collections\nimport copy\nimport datetime\n\n# a helper method to find a blurry match regardless of # signs between two\n# strings, testant is the standard expression\ndef match(testee, testant):\n if (testant.lower() == testee.lower()):\n return True\n elif (testant.lower() == testee.lower().split(\"#\")[0].strip()):\n return True\n return False\n\n# the method to verify ID\ndef verifyID(ID_raw):\n message = '' # init\n SID = ID_raw\n if SID[0].isalpha():\n # SID starts with the wrong alphabet\n if SID[0] != 'S':\n message += '[SID Error] Sample ID format error: SID must start with \"S\" case-sensitive. Current upload starts with \"%s\". Example: \"S7\".\\n' % (SID[0])\n # SID length\n if len(SID) < 2:\n message += '[SID Error] Sample ID format error: SID must have at least a length of 2. Current upload has a length of \"%s\". Example: \"S7\".\\n' % (len(SID))\n # SID ends with non-digits\n elif not SID[1:].isdigit():\n message += '[SID Error] Sample ID format error: SID must end with numbers. Current upload ends with \"%s\". Example: \"S7\".\\n' % (SID[1:])\n else:\n # SID starts with non-alphabet\n message += '[SID Error] Sample ID format error: SID must start with \"S\". Current upload is missing the alphabet. Example: \"S7\".\\n'\n return message\n \n# the method to extract ID\ndef extractID(xlsxName, myXSDtree, jobDir, code_srcDir):\n # open xlsx\n # xlrd is the library used to read xlsx file\n # https://secure.simplistix.co.uk/svn/xlrd/trunk/xlrd/doc/xlrd.html?p=4966\n xlfile = xlrd.open_workbook(xlsxName)\n # find the sheet with ID\n sheet_sample = '' # init\n sheets = xlfile.sheets()\n for sheet in sheets:\n # check the header of the sheet to determine what it has inside\n if (sheet.nrows > 0 and sheet.row_values(0)[0].strip().lower() == \"sample info\"):\n sheet_sample = sheet\n # if the sheet with ID is not found, write error message in jobDir/ID.txt\n message = ''\n if sheet_sample == '':\n message += '[Excel Error] Excel template format error: Sample_Info sheet not found.\\n'\n with open(jobDir + '/error_message.txt', 'a') as fid:\n fid.write(message)\n return\n # special case for experimental data\n lab = False\n for row in range(sheet_sample.nrows):\n if match(sheet_sample.row_values(row)[0], 'Sample ID'):\n ID_raw = str(sheet_sample.row_values(row)[1])\n # if no ID is entered in the cell\n if len(ID_raw.strip()) == 0:\n message += '[Excel Error] Excel template value error: Sample ID is not entered in the uploaded Excel template.\\n'\n if match(sheet_sample.row_values(row)[0], 'Citation Type'):\n if sheet_sample.row_values(row)[1] == 'lab-generated':\n lab = True\n if lab:\n if message != '':\n # write the message in jobDir/error_message.txt\n with open(jobDir + '/error_message.txt', 'a') as fid:\n fid.write(message)\n else:\n # write the ID in jobDir/ID.txt\n with open(jobDir + '/ID.txt', 'w') as fid:\n fid.write(ID_raw.strip())\n return\n # otherwise, find and save the ID in jobDir/ID.txt\n for row in range(sheet_sample.nrows):\n # ID\n if match(sheet_sample.row_values(row)[0], 'Sample ID'):\n ID_raw = str(sheet_sample.row_values(row)[1])\n # if no ID is entered in the cell\n if len(ID_raw.strip()) == 0:\n message += '[Excel Error] Excel template value error: Sample ID is not entered in the uploaded Excel template.\\n'\n # else verify the entered ID\n else:\n message += verifyID(ID_raw)\n # DOI\n if match(sheet_sample.row_values(row)[0], 'DOI'):\n DOI = str(sheet_sample.row_values(row)[1]).strip()\n # if no error detected\n if message == '':\n # call localDOI here\n localdoiDict = localDOI(DOI, myXSDtree, code_srcDir)\n # if doi is not valid\n if localdoiDict is None:\n with open(jobDir + '/error_message.txt', 'a') as fid:\n fid.write('[DOI Error] Please check the reported DOI, it seems that DOI does not exist.\\n')\n return\n # special case, special issue made-up DOI, format: ma-SI-FirstName-LastName\n if 'ma-SI' in DOI:\n LastName = DOI.split('-')[-1]\n CurrentYear = str(datetime.datetime.now().year)\n newID = '_'.join([str(localdoiDict['paperID']), ID_raw, LastName, CurrentYear])\n else:\n # generate ID here\n newID = generateID(localdoiDict, ID_raw)\n # write the ID in jobDir/ID.txt\n with open(jobDir + '/ID.txt', 'w') as fid:\n fid.write(newID)\n else:\n # write the message in jobDir/error_message.txt\n with open(jobDir + '/error_message.txt', 'a') as fid:\n fid.write(message)\n return\n\n\n# check local dict for doi info\ndef localDOI(DOI, myXSDtree, code_srcDir):\n with open(code_srcDir + '/doi.pkl','rb') as f:\n alldoiDict = pickle.load(f)\n rollback = copy.deepcopy(alldoiDict)\n if DOI not in alldoiDict:\n # assign it 'nextPID', update 'nextPID', save it into alldoiDict, update\n # doi.pkl, fetching the metadata is slow, so we need to make sure the\n # paperID is updated in the doi.pkl first to avoid collision.\n PID = 'L' + str(alldoiDict['nextPID'])\n alldoiDict['nextPID'] += 1\n alldoiDict[DOI] = {'paperID': PID}\n with open(code_srcDir + '/doi.pkl', 'wb') as f:\n pickle.dump(alldoiDict, f)\n # special case, special issue madeup DOI\n if 'ma-SI' in DOI:\n return alldoiDict[DOI]\n # now fetch the metadata using doi-crawler and save to alldoiDict, doi.pkl\n crawlerDict = mainDOIsoupFirst(DOI)\n # if doi is not valid, mainDOIsoupFirst() returns {}\n if len(crawlerDict) == 0:\n with open(code_srcDir + '/doi.pkl', 'wb') as f:\n pickle.dump(rollback, f)\n return None\n # transfer the newdoiDict to an xml element\n xmlstring = dict2element(crawlerDict, myXSDtree) # an xml element string\n alldoiDict[DOI]['metadata'] = xmlstring\n # update the doi.pkl for the metadata field\n with open(code_srcDir + '/doi.pkl', 'wb') as f:\n pickle.dump(alldoiDict, f)\n return alldoiDict[DOI]\n else:\n return alldoiDict[DOI]\n\n# generate ID with format PID_SID_LastName_PubYear for users with DOI\ndef generateID(doiDict, SID):\n PID = doiDict['paperID']\n LastName = 'LastName'\n tree = etree.XML(doiDict['metadata'])\n Name = tree.find('.//Author')\n if Name is not None:\n LastName = Name.text.split(',')[0]\n PubYear = 'PubYear'\n PubYearRaw = tree.find('.//PublicationYear')\n if PubYearRaw is not None:\n PubYear = PubYearRaw.text\n return '_'.join([str(PID), SID, LastName, PubYear])\n\n# convert DOI crawler dict into an xml element string\ndef dict2element(crawlerDict, myXSDtree):\n # init\n CommonFields = []\n Journal = []\n Citation = collections.OrderedDict()\n CitationType = collections.OrderedDict()\n output = collections.OrderedDict()\n # port dict infos into lists\n for key in crawlerDict:\n if key == \"ISSN\" or key == \"Issue\":\n if len(crawlerDict[key]) > 0:\n Journal.append({key: crawlerDict[key][0]})\n elif key == \"Author\" or key == \"Keyword\":\n if len(crawlerDict[key]) > 0:\n for value in crawlerDict[key]:\n CommonFields.append({key: value})\n elif key == \"Institution\":\n if len(crawlerDict[key]) > 0:\n CommonFields.append({\"Location\": crawlerDict[key][0]})\n else:\n if len(crawlerDict[key]) > 0:\n CommonFields.append({key: crawlerDict[key][0]})\n # sort sequence\n CommonFields = sortSequence(CommonFields, 'CommonFields', myXSDtree)\n Journal = sortSequence(Journal, 'Journal', myXSDtree)\n # save to a dict\n if len(CommonFields) > 0:\n Citation['CommonFields'] = CommonFields\n if len(Journal) > 0:\n CitationType = collections.OrderedDict([('Journal',Journal)])\n if len(CitationType) > 0:\n Citation['CitationType'] = CitationType\n if len(Citation) > 0:\n output = collections.OrderedDict([('Citation', Citation)])\n # convert to an xml element\n assert (len(output) > 0)\n doi_xml = dicttoxml.dicttoxml(output,attr_type=False)\n doi_xml = doi_xml.replace(b'<item>',b'').replace(b'</item>',b'').replace(b'<item/>',b'').replace(b'<item >',b'')\n return doi_xml\n\ndef runEVI(jobDir, code_srcDir, xsdDir, templateName):\n myXSDtree = etree.parse(xsdDir)\n xlsxName = jobDir + '/' + templateName\n extractID(xlsxName, myXSDtree, jobDir, code_srcDir)" }, { "alpha_fraction": 0.7282525300979614, "alphanum_fraction": 0.7355658411979675, "avg_line_length": 27.866666793823242, "blob_id": "6011e6cf5350d8833b8cee6cce6969d28422a1e7", "content_id": "62200940785a4a29179d520f1474110de442c224", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Markdown", "length_bytes": 2598, "license_type": "no_license", "max_line_length": 206, "num_lines": 90, "path": "/README.md", "repo_name": "bingyinh/nanomine_xlsx2xml", "src_encoding": "UTF-8", "text": "# NanoMine xlsx to xml conversion tool (python 3 and python 2.7)\n\nBy Bingyin Hu\n\n### Python 2.7 version see 'python2.7' branch. For Python 3+, use the 'master' branch directly.\n\n### 1. System preparations\n\nRequired packages:\n\n- glob\n - Python default package\n\n- os\n - Python default package\n\n- sys\n - Python default package\n\n- copy\n - Python default package\n\n- time\n - Python default package\n\n- datetime\n - Python default package\n\n- xlrd\n - https://github.com/python-excel/xlrd\n - Read the input Excel files.\n\n- dicttoxml\n - https://pypi.org/project/dicttoxml/\n - Convert python dictionary to xml.\n\n- collections\n - Python default package\n\n- pickle\n - Python default package\n\n- csv\n - Python default package\n\n- lxml\n - etree function generates xml ElementTree and saves to the final xml file.\n - http://lxml.de/\n\n- MechanicalSoup\n - https://mechanicalsoup.readthedocs.io/en/stable/\n - Used in the DOI modules. A python3 alternative for mechanize.\n\n- Beautiful Soup 4\n - https://www.crummy.com/software/BeautifulSoup/bs4/doc/index.html\n \n- ast\n - Python default package\n - Used in the DOI modules.\n\nOpen the command or terminal and run\n```\npip install -r requirements.txt\n```\n### 2. How to run\n\n1. Add the downloaded directory to the sys.path. Note that the NanoMine xml schema is not provided in this repository. It can be downloaded at https://github.com/Duke-MatSci/nanomine-schema/tree/master/xml.\n\n2. Apply for an account at https://apps.crossref.org/requestaccount/ for the Crossref Query Services and save the email address in `downloaded_directory/account.txt` as required by the DOI query module.\n\n2. Assign values to:\n - `jobDir`: the directory of the Excel files and other files that are to be converted.\n - `code_srcDir`: the directory of the downloaded codes (current directory).\n - `xsdDir`: the directory of the xml schema to be validated against.\n - `templateName`: the file name of the Excel template.\n\n3. In python, run the `extract_verify_ID_callable.py` by\n```\nfrom extract_verify_ID_callable import runEVI\nrunEVI(jobDir, code_srcDir, xsdDir, templateName)\n```\n\n4. If there is no `error_message.txt` generated in the `jobDir` and an `ID.txt` is generated, the conversion can be kicked off by\n```\nfrom customized_compiler_callable import compiler\nlogName = compiler(jobDir, code_srcDir, xsdDir, templateName)\n```\nwhere `logName` is the directory for the schema validation error log.\n\n5. Check the error log for potential schema validation error. There should be an `/xml` folder generated in the `jobDir`, and the converted xml file will be inside.\n" } ]
3
gotomy/convert-document
https://github.com/gotomy/convert-document
0bb5b7c9d68cdd6e91e30c0dab8153d6d7539fd0
c48bb70a4553809d3c6dcace37eceaa355e16ba7
f64bc7c737105790a56ba9159f34384e4e113e2d
refs/heads/master
2022-11-11T03:35:56.915260
2020-06-26T10:37:14
2020-06-26T10:37:14
null
0
0
null
null
null
null
null
[ { "alpha_fraction": 0.5901442170143127, "alphanum_fraction": 0.593215823173523, "avg_line_length": 34.32075500488281, "blob_id": "00f6a64c2323058cb617fe566b63cbdf876b4bcf", "content_id": "f723a142743554cae05c95a6df6d6ab5d6442707", "detected_licenses": [ "MIT" ], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 7488, "license_type": "permissive", "max_line_length": 158, "num_lines": 212, "path": "/convert/converter.py", "repo_name": "gotomy/convert-document", "src_encoding": "UTF-8", "text": "import os\nimport uno\nimport time\nimport shutil\nimport logging\nimport subprocess\nfrom threading import Timer\nfrom tempfile import gettempdir\nfrom psutil import process_iter\nfrom com.sun.star.beans import PropertyValue\nfrom com.sun.star.lang import DisposedException, IllegalArgumentException\nfrom com.sun.star.connection import NoConnectException\nfrom com.sun.star.io import IOException\nfrom com.sun.star.script import CannotConvertException\nfrom com.sun.star.uno import RuntimeException\n\nDESKTOP = 'com.sun.star.frame.Desktop'\nRESOLVER = 'com.sun.star.bridge.UnoUrlResolver'\nCONVERT_DIR = os.path.join(gettempdir(), 'convert')\nOUT_FILE = os.path.join(CONVERT_DIR, '/tmp/output.pdf')\nINSTANCE_DIR = os.path.join(gettempdir(), 'soffice')\nENV = '\"-env:UserInstallation=file:///%s\"' % INSTANCE_DIR\nCONNECTION = 'socket,host=localhost,port=2002,tcpNoDelay=1;urp;StarOffice.ComponentContext' # noqa\nACCEPT = '--accept=\"%s\"' % CONNECTION\nCOMMAND = ['/usr/bin/soffice', ENV, '--nologo', '--headless', '--nocrashreport', '--nodefault', '--norestore', '--nolockcheck', '--invisible', ACCEPT] # noqa\nCOMMAND = ' '.join(COMMAND)\n\nlog = logging.getLogger(__name__)\n\n\ndef flush_path(path):\n if os.path.exists(path):\n shutil.rmtree(path, ignore_errors=True)\n os.makedirs(path, exist_ok=True)\n\n\nclass ConversionFailure(Exception):\n # A failure related to the content or structure of the document\n # given, which is expected to re-occur with consecutive attempts\n # to process the document.\n pass\n\n\nclass SystemFailure(Exception):\n # A failure of the service that lead to a failed conversion of\n # the document which may or may not re-occur when the document\n # is processed again.\n pass\n\n\nclass Converter(object):\n \"\"\"Launch a background instance of LibreOffice and convert documents\n to PDF using it's filters.\n \"\"\"\n PDF_FILTERS = (\n ('com.sun.star.text.GenericTextDocument', 'writer_pdf_Export'),\n ('com.sun.star.text.WebDocument', 'writer_web_pdf_Export'),\n ('com.sun.star.presentation.PresentationDocument', 'impress_pdf_Export'), # noqa\n ('com.sun.star.drawing.DrawingDocument', 'draw_pdf_Export'),\n )\n\n def __init__(self):\n self.alive = False\n self.start()\n\n def kill(self):\n log.info('Disposing of LibreOffice.')\n while True:\n self.alive = False\n # The Alfred Hitchcock approach to task management:\n # https://www.youtube.com/watch?v=0WtDmbr9xyY\n try:\n for proc in process_iter():\n name = proc.name()\n if 'soffice' not in name and 'oosplash' not in name:\n continue\n self.alive = True\n log.warn(\"Killing process: %r\", name)\n proc.kill()\n time.sleep(2)\n except Exception as exc:\n log.warn(\"Failed to kill: %r (%s)\", name, exc)\n self.alive = True\n if not self.alive:\n return\n\n def start(self):\n self.kill()\n flush_path(INSTANCE_DIR)\n flush_path(CONVERT_DIR)\n log.info('Starting LibreOffice: %s', COMMAND)\n subprocess.Popen(COMMAND, shell=True)\n time.sleep(3)\n self.alive = True\n\n def prepare(self):\n if not self.alive:\n self.start()\n flush_path(CONVERT_DIR)\n\n def terminate(self):\n # This gets executed in its own thread after `timeout` seconds.\n log.error('Document conversion timed out.')\n self.kill()\n\n def _svc_create(self, ctx, clazz):\n return ctx.ServiceManager.createInstanceWithContext(clazz, ctx)\n\n def connect(self):\n for attempt in range(10):\n try:\n context = uno.getComponentContext()\n resolver = self._svc_create(context, RESOLVER)\n context = resolver.resolve('uno:%s' % CONNECTION)\n return self._svc_create(context, DESKTOP)\n except NoConnectException:\n log.warning(\"No connection to LibreOffice (%s)\", attempt)\n time.sleep(2)\n raise SystemFailure(\"No connection to LibreOffice\")\n\n def check_health(self, desktop):\n if desktop is None:\n raise SystemFailure('Cannot connect to LibreOffice.')\n if desktop.getFrames().getCount() != 0:\n raise SystemFailure('LibreOffice has stray frames.')\n if desktop.getTasks() is not None:\n raise SystemFailure('LibreOffice has stray tasks.')\n\n def convert_file(self, file_name, timeout):\n timer = Timer(timeout * 0.99, self.terminate)\n timer.start()\n try:\n return self._timed_convert_file(file_name)\n finally:\n timer.cancel()\n\n def _timed_convert_file(self, file_name):\n desktop = self.connect()\n self.check_health(desktop)\n # log.debug(\"[%s] connected.\", file_name)\n try:\n url = uno.systemPathToFileUrl(file_name)\n props = self.property_tuple({\n 'Hidden': True,\n 'MacroExecutionMode': 0,\n 'ReadOnly': True,\n 'Overwrite': True,\n 'OpenNewView': True,\n 'StartPresentation': False,\n 'RepairPackage': False,\n })\n doc = desktop.loadComponentFromURL(url, '_blank', 0, props)\n except IllegalArgumentException:\n raise ConversionFailure('Cannot open document.')\n except DisposedException:\n raise SystemFailure('Bridge is disposed.')\n\n if doc is None:\n raise ConversionFailure('Cannot open document.')\n\n # log.debug(\"[%s] opened.\", file_name)\n try:\n try:\n doc.ShowChanges = False\n except AttributeError:\n pass\n\n try:\n doc.refresh()\n except AttributeError:\n pass\n\n output_url = uno.systemPathToFileUrl(OUT_FILE)\n prop = self.get_output_properties(doc)\n # log.debug(\"[%s] refreshed.\", file_name)\n doc.storeToURL(output_url, prop)\n # log.debug(\"[%s] exported.\", file_name)\n doc.dispose()\n doc.close(True)\n del doc\n # log.debug(\"[%s] closed.\", file_name)\n except (DisposedException, IOException,\n CannotConvertException, RuntimeException):\n raise ConversionFailure('Cannot generate PDF.')\n\n stat = os.stat(OUT_FILE)\n if stat.st_size == 0 or not os.path.exists(OUT_FILE):\n raise ConversionFailure('Cannot generate PDF.')\n return OUT_FILE\n\n def get_output_properties(self, doc):\n # https://github.com/unoconv/unoconv/blob/master/doc/filters.adoc\n filter_name = 'writer_pdf_Export'\n for (service, pdf) in self.PDF_FILTERS:\n if doc.supportsService(service):\n filter_name = pdf\n return self.property_tuple({\n 'FilterName': filter_name,\n 'Overwrite': True,\n 'ReduceImageResolution': True,\n 'MaxImageResolution': 300,\n 'SelectPdfVersion': 1,\n })\n\n def property_tuple(self, propDict):\n properties = []\n for k, v in propDict.items():\n prop = PropertyValue()\n prop.Name = k\n prop.Value = v\n properties.append(prop)\n return tuple(properties)\n" } ]
1
gbakie/similar_sentences
https://github.com/gbakie/similar_sentences
da21745284697f3fbf478a5bb65b1d858fcda47a
402d73ca6d260bb0be0e3fc9f16c1a5333287859
04c6c72d08842085ad93d071e14e4cac02593491
refs/heads/master
2016-08-08T01:56:15.615860
2015-03-14T22:26:01
2015-03-14T22:26:01
32,234,936
0
0
null
null
null
null
null
[ { "alpha_fraction": 0.49750226736068726, "alphanum_fraction": 0.5256584882736206, "avg_line_length": 25.059171676635742, "blob_id": "f947729e2611f3308213f5a822a2c0341affc093", "content_id": "a785f26ffa0de29af32ecfa3362698a4b504cf0b", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 4404, "license_type": "no_license", "max_line_length": 90, "num_lines": 169, "path": "/similar_sentences.py", "repo_name": "gbakie/similar_sentences", "src_encoding": "UTF-8", "text": "__author__ = 'gbakiewicz'\n\nfrom collections import defaultdict\nfrom itertools import combinations\nimport crcmod\nfrom random import randint\nfrom sys import argv\nfrom sys import exit\nimport numpy as np\n\n# CONSTANTS\n#div = 3455275207\ndiv = 15485867\ndiv2 = 24310399\n#crc = 0x104c11db7\ncrc = 0x11EDC6F41\nnhashes = 150\nbands = 30\nrows = nhashes/bands\n\ndef init_hashes(n):\n hashes_attr = []\n for i in xrange(n):\n initv = randint(1, np.iinfo(np.uint32).max)\n xorv = randint(1, np.iinfo(np.uint32).max)\n hashes_attr.append(crcmod.mkCrcFun(crc, initCrc=initv, xorOut=xorv))\n\n return hashes_attr\n\n\ndef compute_word(word, hashes_attr):\n hashes = np.empty(nhashes, dtype=np.uint32)\n i = 0\n for attr in hashes_attr:\n hashes[i] = np.uint32(attr(word))\n #print word,hashes[i]\n i += 1\n\n return hashes\n\n# GIVEN: list of hashes for each word, list of sentences (bag of words with ids)\n# RETURNS: a matrix with minhash rows for each sentence\ndef minhashing(filename):\n fh = open(filename)\n tab = defaultdict(list)\n hashes_attr = init_hashes(nhashes)\n\n # read the number of sentences at the beginning of the file\n n = int(fh.readline().strip())\n # M has n columns and hlen rows\n M = np.full( (nhashes,n), np.iinfo(np.uint32).max, dtype=np.uint32 )\n\n print \"Starting minhashing: \" + str(n) + \" sentences\"\n\n c = 0\n for line in fh:\n tokens = line.strip().split(' ')[1:]\n for token in tokens:\n if token in tab:\n hashes = tab[token]\n else:\n hashes = compute_word(token, hashes_attr)\n tab[token] = hashes\n\n for i in xrange(nhashes):\n if hashes[i] < M[i,c]:\n M[i,c] = hashes[i]\n\n c += 1\n\n del tab\n\n return M\n\ndef init_lsh_hashes(n):\n hashes = []\n for i in xrange(n):\n hashes.append(randint(1,2**10))\n\n return hashes\n\n\n# GIVEN: matrix M with the minhashes for each sentence and number of bands\n# RETURNS: buckets with the sentence bands hashed\ndef lshashing(M):\n (n, m) = M.shape\n candidates = [np.empty(0, dtype=np.uint32)] * m\n #candidates = set()\n #candidates = {}\n h1 = init_lsh_hashes(rows)\n h2 = init_lsh_hashes(rows)\n\n print \"Bands: \" + str(bands)\n print \"Rows: \" + str(rows)\n\n for b in xrange(bands):\n lshash = {}\n #lshash = np.full(div, np.iinfo(np.uint32).max, dtype=np.uint32)\n for i in xrange(m):\n v1 = 0\n v2 = 0\n idx = b * rows\n for row in xrange(rows):\n v1 += (M[idx,i] * h1[row])\n v2 += (M[idx,i] * h2[row])\n idx += 1\n\n h1_val = np.uint32(v1 % div)\n h2_val = np.uint32(v2 % div2)\n\n if h1_val not in lshash:\n lshash[h1_val] = np.array((i,h2_val), ndmin=2, dtype=np.uint32)\n #print lshash[h1_val]\n else:\n lshash[h1_val] = np.append(lshash[h1_val], [(i, h2_val)], axis=0)\n #print lshash[h1_val]\n\n for v in lshash.itervalues():\n if v.shape[0] > 1:\n bins = defaultdict(list)\n for r1 in v:\n bins[r1[1]].append(r1[0])\n\n for vs in bins.itervalues():\n if len(vs) > 1:\n vs.sort()\n idx = vs[0]\n #candidates.update(combinations(vs,2))\n #for comb in combinations(vs,2):\n #candidates[comb] = 1\n #for a in vs:\n # print \"Sent \" + str(a)\n # print M[:,a]\n\n candidates[idx] = np.unique(np.concatenate((candidates[idx], vs)))\n\n del bins\n del lshash\n\n return candidates\n\n\ndef print_similar(lshash):\n for l in lshash:\n for v in l.values():\n if len(v) > 1:\n print \"similar sentences: \" + str(v)\n\ndef print_candidates(cand):\n print len(cand)\n for c in cand.iterkeys():\n print c\n\nif __name__ == \"__main__\":\n if len(argv) < 2:\n print \"Error: expecting input filename\"\n exit(0)\n\n M = minhashing(argv[1])\n #print M\n\n cand = lshashing(M)\n\n del M\n\n for c in cand:\n if len(c) > 1:\n print c\n #print_candidates(cand)\n" } ]
1
pirr/alt_couch
https://github.com/pirr/alt_couch
42e81c0f424d8f943fc50b2d002456070d966899
c690771f729c38ac39041d4fea981b44623a0748
a25f0f335e336fb47eb67b9ec05a4d480fbbbc49
refs/heads/master
2021-06-27T21:56:01.657168
2017-09-13T08:27:46
2017-09-13T08:27:46
103,374,847
0
0
null
null
null
null
null
[ { "alpha_fraction": 0.468490868806839, "alphanum_fraction": 0.47097843885421753, "avg_line_length": 28.02409553527832, "blob_id": "025d55c00b4655d2e6ff5307cde657225e477188", "content_id": "5098c81a0cc9c8b2e3a0979cacc47b01f7d979de", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 2412, "license_type": "no_license", "max_line_length": 75, "num_lines": 83, "path": "/_libs/alt/dict_.py", "repo_name": "pirr/alt_couch", "src_encoding": "UTF-8", "text": "import yaml, json, copy\n\nclass dict_(dict):\n \n def __init__(self, *args, **kwargs):\n super(dict_, self).__init__(*args, **kwargs)\n for name,item in self.items():\n if isinstance(item,dict):\n self[name] = dict_(item)\n if isinstance(item,list):\n for ilist,listitem in enumerate(item):\n if isinstance(listitem,dict):\n item[ilist]=dict_(listitem)\n self.__dict__ = self\n\n def set_(self, path, value):\n\n pos = path.find('.')\n if pos==-1:\n self[path] = value\n else:\n if path[:pos] not in self:\n self[path[:pos]] = dict_()\n self[path[:pos]].set_(path[pos+1:], value)\n\n def get_(self, path, default=None):\n\n pos = path.find('.')\n if pos==-1:\n if path in self:\n return self[path]\n else:\n return default\n else:\n if path[:pos] not in self:\n return default\n else:\n return self[path[:pos]].get_(path[pos+1:], default=default)\n\n def del_(self, path):\n\n pos = path.find('.')\n if pos==-1:\n if path in self:\n del self[path]\n else:\n if path[:pos] in self:\n self[path[:pos]].del_(path[pos+1:])\n if not self[path[:pos]]:\n del self[path[:pos]]\n\n def remove_empty(self):\n\n remove = []\n for name, value in self.items():\n if isinstance(value, (str, dict_, dict, list)) and not value:\n remove.append(name)\n continue\n if isinstance(value,dict):\n self[name].remove_empty()\n elif isinstance(value,list):\n for ilist, listvalue in enumerate(value):\n if isinstance(listvalue,dict):\n value[ilist].remove_empty()\n for name in remove:\n del self[name]\n\n def equal(self, doc, ignore=[]):\n\n self_doc = copy.deepcopy(self)\n for field in ignore:\n if field in self_doc:\n del self_doc[field]\n if field in doc:\n del doc[field]\n\n return json.dumps(self_doc) == json.dumps(doc)\n\n def to_yaml(self):\n\n self_doc = copy.deepcopy(self)\n\n return yaml.dump(self_doc)\n\n\n\n" }, { "alpha_fraction": 0.613043487071991, "alphanum_fraction": 0.613043487071991, "avg_line_length": 26.058822631835938, "blob_id": "79abf59f59c24130b794f5768ca756f7ed475244", "content_id": "ccddda5d194b977d839140af1e5b161dd40d79b6", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 460, "license_type": "no_license", "max_line_length": 66, "num_lines": 17, "path": "/_libs/vb/www.py", "repo_name": "pirr/alt_couch", "src_encoding": "UTF-8", "text": "import cherrypy, urllib.parse\nfrom alt.dict_ import dict_\n\ndef form_params(kwargs, default=None):\n\n params = dict_()\n for key, value in kwargs.items():\n if value!='':\n params[key] = value\n\n if (cherrypy.request.method=='POST' and params) or not kwargs:\n if not kwargs:\n params = default\n cgi = urllib.parse.urlencode(params)\n raise cherrypy.HTTPRedirect(cherrypy.url() + '?' + cgi)\n\n return params\n" }, { "alpha_fraction": 0.48801475763320923, "alphanum_fraction": 0.5082975029945374, "avg_line_length": 21.27397346496582, "blob_id": "81f17b011817a698dae7b225b70794099f450d01", "content_id": "48d1dd0823851a5e9ecf3ac232d460eaa1bef702", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 1627, "license_type": "no_license", "max_line_length": 69, "num_lines": 73, "path": "/_libs/alt/time.py", "repo_name": "pirr/alt_couch", "src_encoding": "UTF-8", "text": "from __future__ import absolute_import\nfrom datetime import datetime, timedelta\n\ndef today_utc_iso():\n \n return datetime.utcnow().strftime('%Y-%m-%d')\n \ndef today_utc():\n \n return datetime.utcnow().date()\n \ndef today_iso():\n \n return datetime.now().strftime('%Y-%m-%d')\n \ndef iso(dt):\n \n if dt.__class__.__name__=='date':\n return dt.strftime('%Y-%m-%d')\n else:\n return dt.strftime('%Y-%m-%d %H:%M:%S')\n\ndef from_iso(iso):\n \n if len(iso)==19:\n return datetime.strptime(iso,'%Y-%m-%d %H:%M:%S')\n \n if len(iso)==10:\n return datetime.strptime(iso,'%Y-%m-%d').date()\n\ndef now_iso():\n \n return datetime.now().strftime('%Y-%m-%d %H:%M:%S') \n \ndef now_utc_iso():\n \n return datetime.utcnow().strftime('%Y-%m-%d %H:%M:%S') \n\ndef mtime():\n \n return datetime.utcnow().strftime('%Y-%m-%d %H:%M:%S') \n\ndef www_format_mtime(time_iso, error_time_diff=None, with_date=True):\n \n dt = from_iso(time_iso)\n \n dt = dt + timedelta(hours=8)\n \n st = datetime.strftime(dt,'%H:%M')\n\n diff = datetime.now()-dt\n diff = int(diff.total_seconds())\n h = diff // 3600\n m = (diff-h*3600) // 60\n sd = '({0:02d}:{1:02d})'.format(h,m)\n \n if diff > 3600*24:\n if with_date:\n res = datetime.strftime(dt,'%d.%m %H:%M')\n else:\n res = st\n elif diff > 60*30:\n res = st + ' ' + sd\n else:\n res = sd\n \n if error_time_diff:\n if diff/60 > error_time_diff:\n return res, 'ERROR'\n else:\n return res, ''\n else:\n return res\n\n" }, { "alpha_fraction": 0.4943203628063202, "alphanum_fraction": 0.4972558915615082, "avg_line_length": 30.465864181518555, "blob_id": "e2fb42fc3963037a67d8f960596b8f159084fe5b", "content_id": "6d73fe19e851d31d6c6b376a2576fe62bbca2629", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 8127, "license_type": "no_license", "max_line_length": 113, "num_lines": 249, "path": "/_libs/vb/couch.py", "repo_name": "pirr/alt_couch", "src_encoding": "UTF-8", "text": "import couchdb, glob, os, socket, cherrypy\nfrom functools import reduce\nfrom alt.dict_ import dict_\nimport alt.time, vb.cfg\n\n\ndef create(db_name):\n server = couchdb.Server()\n server.resource.credentials = ('admin', 'admin')\n if db_name not in server:\n server.create(db_name)\n\n\nclass Q:\n '''\n Основной объект для составления запроса\n '''\n OR = '$or'\n AND = '$and'\n\n def __init__(self, **kwarg):\n self.field_cond = list(kwarg.keys())[0]\n self.val = list(kwarg.values())[0]\n\n @property\n def P(self):\n field_cond_list = self.field_cond.split('__')\n field = field_cond_list[:-1]\n cond = '$' + field_cond_list[-1]\n parsed_field_cond = reduce(lambda x, y: {y: x},\n reversed(field + [cond, self.val]))\n return parsed_field_cond\n\n def _combine(self, other, cond):\n query = dict()\n query[cond] = [self.P, other.P]\n return _FQ(query)\n\n def __or__(self, other):\n return self._combine(other, self.OR)\n\n def __and__(self, other):\n return self._combine(other, self.AND)\n\n\nclass _FQ(Q):\n '''\n Вспомогательный объект для составления запросов\n '''\n def __init__(self, val):\n self.val = val\n\n @property\n def P(self):\n return self.val\n\n\nclass DB:\n def __init__(self, name, admin=False, user_id=None):\n\n self.name = name\n self.host = socket.gethostname()\n self.server = couchdb.Server()\n self.user_id = user_id\n if admin:\n self.server.resource.credentials = ('admin', 'admin')\n self.db = self.server[name]\n\n def __contains__(self, doc_id):\n\n return doc_id in self.db\n\n def __getitem__(self, doc_id):\n\n return dict_(self.db[doc_id])\n\n def __setitem__(self, doc_id, doc):\n\n if 'type' not in doc:\n raise Exception('No type')\n doc.mhost = self.host\n doc.mtime = alt.time.mtime()\n self.db[doc_id] = doc\n\n def __delitem__(self, key):\n\n del self.db[key]\n\n def check(self, doc, user_id=None):\n\n if 'type' not in doc:\n raise Exception('No type in doc')\n if user_id is None:\n if self.user_id is not None:\n doc.user_id = self.user_id\n else:\n doc.user_id = cherrypy.session['user_id']\n else:\n doc.user_id = user_id\n if 'user_id' not in doc:\n raise Exception('No user_id in doc')\n doc.mhost = self.host\n doc.mtime = alt.time.mtime()\n\n def new_id(self, doc, user_id=None):\n\n self.check(doc, user_id=user_id)\n doc_id, _ = self.db.save(doc)\n print('Saved', doc_id)\n\n return doc_id\n\n def new(self, doc_id, doc, user_id=None):\n\n self.check(doc, user_id=user_id)\n self.db[doc_id] = doc\n print('Saved', doc_id)\n\n def save(self, doc_id, doc, user_id=None, exception=True):\n\n self.check(doc, user_id=user_id)\n doc.remove_empty()\n try:\n if doc_id in self.db:\n if doc.equal(self.db[doc_id], ignore=['host', 'mtime', 'user_id']):\n if exception:\n raise Exception('Nothing changed')\n else:\n return 'SAME'\n self.db[doc_id] = doc\n except couchdb.ResourceConflict:\n if exception:\n raise Exception('Version conflict')\n else:\n return 'CONFLICT'\n\n return ''\n\n def view(self, name, **options):\n\n return self.db.view(name + '/' + name, **options)\n\n def force_save(self, doc_id, doc):\n\n self.check(doc)\n doc.remove_empty()\n for itry in range(10):\n try:\n if doc_id in self.db:\n if doc.equal(self.db[doc_id], ignore=['host', 'mtime', 'user_id']):\n return 'SAME'\n rev = self.db[doc_id]['_rev']\n doc._rev = rev\n self.db[doc_id] = doc\n except couchdb.ResourceConflict:\n continue\n\n def install_views(self, prefix):\n\n server = couchdb.Server()\n server.resource.credentials = ('admin', 'admin')\n db = server[self.name]\n prefix = prefix + '-'\n\n views_ids_to_delete = []\n for row in db.view('_all_docs')[\"_design/\":\"_design0\"]:\n if not row.id.split('/')[1].startswith(prefix):\n continue\n views_ids_to_delete.append(row.id)\n\n name = 'heap_db' if self.name.endswith('_heap') else 'db'\n views_dir = vb.cfg.main_dir() + '_views/' + name + '/'\n if not os.path.exists(views_dir):\n print(views_dir)\n print('Skip %s views' % name)\n return\n\n view_files = glob.glob(views_dir + '*.py')\n for view_file in view_files:\n view_name = os.path.basename(view_file[:-3])\n if view_name == 'install':\n continue\n view_name = prefix + view_name\n view_id = '_design/' + view_name\n view_doc = {'language': 'python', 'views': {view_name: {}}}\n with open(view_file) as f:\n text = f.read()\n debug_pos = text.find('# debug')\n if debug_pos != -1:\n text = text[:debug_pos]\n reduce_pos = text.find('def reduce(')\n if reduce_pos == -1:\n map_text = text\n reduce_text = None\n else:\n map_text = text[:reduce_pos]\n reduce_text = text[reduce_pos:]\n view_doc['views'][view_name]['map'] = map_text\n if reduce_text:\n view_doc['views'][view_name]['reduce'] = reduce_text\n\n if view_id in db:\n old_view_doc = dict_(db[view_id])\n del old_view_doc._rev\n del old_view_doc._id\n if old_view_doc == view_doc:\n views_ids_to_delete.remove(view_id)\n continue\n else:\n print('Deleting', view_id)\n del db[view_id]\n\n print('Saving', view_id)\n db[view_id] = view_doc\n if view_id in views_ids_to_delete:\n views_ids_to_delete.remove(view_id)\n\n for design_id in views_ids_to_delete:\n print('Deleting', design_id)\n del db[design_id]\n\n def find(self, *args, **kwargs):\n '''\n Поиск документов в БД CouchDB 2 через /db/_find - http://docs.couchdb.org/en/2.0.0/api/database/find.html\n :param args: Q объекты с условиями запроса\n Пример запроса:\n find(Q(reestr_name__eq=u'Test1') | Q(reestr_name__eq=u'Test2')\n & Q(pi__normal__in=[u'золото', u'серебро']))\n __eq, __in - селекторы (см. CouchDB 2.0 селекторы)\n Через двойное нижнее подчеркивание ('__') передаются вложенные поля:\n pi__normal = doc['pi']['normal']\n :param kwargs:\n параметры:\n limit - кол-во документов\n простой запрос:\n find(reestr_name__eq=u'Test1')\n :return: документы соответсвующие запросу\n '''\n limit = kwargs.get('limit', 25)\n if args:\n query = args[0].P\n elif kwargs:\n query = Q(**kwargs).P\n else:\n raise Exception('Need query, for example: name__eq = \"Name\" or used Q for complex conditions')\n _, _, data = self.db.resource.post_json('_find', body={\"selector\": query, \"limit\": limit}, headers={\n 'Content-Type': 'application/json'}\n )\n return data['docs']\n" }, { "alpha_fraction": 0.7915058135986328, "alphanum_fraction": 0.7953668236732483, "avg_line_length": 12, "blob_id": "5d5929e7e49da08a7f95e6ed154000cb0ef204ba", "content_id": "1041efd1592c0c37727f3b182deab8523cf09b25", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 259, "license_type": "no_license", "max_line_length": 29, "num_lines": 20, "path": "/header.py", "repo_name": "pirr/alt_couch", "src_encoding": "UTF-8", "text": "import os\nimport re\nimport sys\nimport cherrypy\nimport jinja2\nimport hashlib\nimport subprocess\nimport couchdb\nimport json\nimport copy\n\nimport alt.cfg\nfrom alt.dict_ import dict_\n\nimport vb.couch\n\nimport vb.script\n# script = vb.script.Script()\n\n# import install" }, { "alpha_fraction": 0.4954954981803894, "alphanum_fraction": 0.5, "avg_line_length": 19.18181800842285, "blob_id": "51a348d82946006a15c73fb6a5cc1ad56085d47b", "content_id": "11619d28ae4f9db075e655fb90b17312f5ab754c", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 222, "license_type": "no_license", "max_line_length": 55, "num_lines": 11, "path": "/_libs/vb/cfg.py", "repo_name": "pirr/alt_couch", "src_encoding": "UTF-8", "text": "import os\n\ndef main_dir():\n\n dir_ = os.path.dirname(__file__).replace('\\\\', '/')\n pos = dir_.rfind('/main/')\n if pos == -1:\n raise Exception('No main dir')\n dir_ = dir_[:pos] + '/main/'\n\n return dir_\n" }, { "alpha_fraction": 0.53899085521698, "alphanum_fraction": 0.5412843823432922, "avg_line_length": 25.8125, "blob_id": "5a372ebf42bb556405d2a52a0ccad27d6167193c", "content_id": "58362b61d9d5b0bc824b9ac3985275c5b231cbca", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 436, "license_type": "no_license", "max_line_length": 62, "num_lines": 16, "path": "/_libs/alt/cfg.py", "repo_name": "pirr/alt_couch", "src_encoding": "UTF-8", "text": "import yaml, os\nfrom alt.dict_ import dict_\n\ndef read(cfg_file=None):\n\n if not cfg_file:\n this_dir = os.path.dirname(__file__).replace('\\\\','/')\n main_pos = this_dir.rfind('/main/')\n if main_pos==-1:\n raise Exception('Can not find main dir')\n cfg_file = this_dir[:main_pos] + '/main/_cfg/main.cfg'\n\n with open(cfg_file) as f:\n cfg = yaml.load(f)\n \n return dict_(cfg) " }, { "alpha_fraction": 0.6327372789382935, "alphanum_fraction": 0.6492434740066528, "avg_line_length": 33.66666793823242, "blob_id": "665236448a29c38acfab09e43c03304f051ee9bf", "content_id": "cfb24a51c3ed2c7c3e6bc834412573c7806829c9", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 912, "license_type": "no_license", "max_line_length": 109, "num_lines": 21, "path": "/test.py", "repo_name": "pirr/alt_couch", "src_encoding": "UTF-8", "text": "import alt_path\nfrom header import *\nfrom vb.couch import Q\n\n\ncfg = alt.cfg.read()\nroot_url = 'http://' + cfg.root_url\ndb = vb.couch.DB(cfg.db)\nheap_db = vb.couch.DB(cfg.db + '_heap')\n\ndata1 = db.find(reestr_name__eq=u'Иркутск', limit=50)\ndata2 = db.find(Q(reestr_name__eq=u'Иркутск')\n & (Q(полезное_ископаемое__название_нормализованное__in=['золото россыпное', 'золото рудное'])\n | Q(полезное_ископаемое__название_нормализованное__eq=\"бром\")), limit=40)\nprint('data1 length', len(data1))\nprint('data2 length', len(data2))\nprint('data2 docs:')\nfor doc in data2:\n print(doc['reestr_name'],\n doc['название_объекта'],\n doc['полезное_ископаемое']['название_нормализованное'])" }, { "alpha_fraction": 0.6279250979423523, "alphanum_fraction": 0.6287051439285278, "avg_line_length": 24.117647171020508, "blob_id": "efb507ceac3847c5111a701e3f7416915a18a02b", "content_id": "bacd5a133e7066a178a0f5db0ad867014a8ad3cf", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 1341, "license_type": "no_license", "max_line_length": 91, "num_lines": 51, "path": "/_libs/vb/script.py", "repo_name": "pirr/alt_couch", "src_encoding": "UTF-8", "text": "import alt_path\nfrom alt.dict_ import dict_\nimport alt.time\nimport psutil, subprocess, sys, os\n\ndef find_by_name(root, script_name):\n\n view = root.db.view('www-scripts_by_name')\n rows = view[script_name].rows\n if not rows:\n return dict_(error='Скрипт не существует')\n script_doc = root.db[rows[0].id]\n\n return script_doc\n\n# TODO переписать более акккуратно, возможны коллизии\ndef run(root, script_name, params=dict_()):\n\n script_doc = find_by_name(root, script_name)\n script_id = script_doc._id\n\n run_doc = dict_(type='run', script_id=script_id, params=params, ctime=alt.time.mtime(),\n status='WAIT')\n run_id = root.heap_db.new_id(run_doc)\n\n return run_id\n\ndef os_pid_running(pid):\n\n if not psutil.pid_exists(pid):\n return False\n\n proc = psutil.Process(pid)\n if proc.is_running():\n if proc.status() == psutil.STATUS_ZOMBIE:\n return False\n\n return proc.is_running()\n\ndef report(root, run_id):\n\n report_id = run_id+'_report'\n if report_id in root.heap_db:\n report_doc = root.heap_db[report_id]\n return report_doc\n\n run_doc = root.heap_db[run_id]\n if os_pid_running(run_doc.os_pid):\n return dict_(status='RUN')\n else:\n return dict_(status='FATAL')\n\n" } ]
9
yalmeidarj/blackjack
https://github.com/yalmeidarj/blackjack
0bf1aec1093d712538a75136f03a78a7644a6992
36d4bb07babb47198723e8fae1b59989497111da
041b538944fcd6ca5637d27a87e5e249c28d97f0
refs/heads/master
2023-08-05T16:51:08.593981
2021-10-11T11:52:46
2021-10-11T11:52:46
408,614,031
0
0
null
null
null
null
null
[ { "alpha_fraction": 0.5175005793571472, "alphanum_fraction": 0.5461592674255371, "avg_line_length": 33.77049255371094, "blob_id": "31925862ddb8607725b74cd6bfc06793ad2943f5", "content_id": "996d8883373b3f1d66c58535c0e7a5b5a3bcbbaa", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 4257, "license_type": "no_license", "max_line_length": 97, "num_lines": 122, "path": "/main.py", "repo_name": "yalmeidarj/blackjack", "src_encoding": "UTF-8", "text": "############### Our Blackjack House Rules #####################\n\n## The deck is unlimited in size. \n## There are no jokers. \n## The Jack/Queen/King all count as 10.\n## The the Ace can count as 11 or 1.\n## Use the following list as the deck of cards:\n## cards = [11, 2, 3, 4, 5, 6, 7, 8, 9, 10, 10, 10, 10]\n## The cards in the list have equal probability of being drawn.\n## Cards are not removed from the deck as they are drawn.\n## The computer is the dealer.\n\n##########################################\nimport random\nuser_score_list = []\ndealer_score_list = []\n\n\ndef get_card(player):\n \"\"\"Return a random card value\"\"\"\n cards = [11, 2, 3, 4, 5, 6, 7, 8, 9, 10, 10, 10, 10]\n card = random.choice(cards)\n player.append(card)\n return card\n\n\ndef check_for_11(player):\n '''check for the appearance of card\"11\" and, if total score > 21, replaces value to one.'''\n if 11 in player:\n if sum(player) > 21:\n for item in player:\n if item == 11:\n card_index = player.index(item)\n player[card_index] = 1\n return player\n\n\ndef check_winner(n1, n2):\n '''compare scores and and checks game logic to define winner'''\n user_score = sum(n1)\n dealer_score = sum(n2)\n if user_score > 21 and dealer_score < 21:\n winner = (\"dealer\")\n return winner\n elif user_score > 21 and dealer_score > 21:\n winner = (\"tie\")\n return winner\n elif user_score < 21 and dealer_score < 21:\n if user_score > dealer_score:\n winner = (\"user\")\n return winner\n elif user_score < dealer_score:\n winner = (\"dealer\")\n return winner\n elif user_score == dealer_score:\n winner = (\"tie\")\n return winner\n elif user_score < 21 and dealer_score > 21:\n winner = (\"user\")\n return winner\n elif user_score == 21 and dealer_score < 21:\n winner = (\"user\")\n return winner\n elif user_score == 21 and dealer_score == 21:\n winner = (\"tie\")\n return winner\n elif user_score == 21 and dealer_score > 21:\n winner = (\"user\")\n return winner\n elif user_score > 21 and dealer_score == 21:\n winner = (\"dealer\")\n return winner\n elif user_score < 21 and dealer_score == 21:\n winner = (\"dealer\")\n return winner\n\n\ndef keep_playing():\n while sum(user_score_list) < 21 and sum(dealer_score_list) < 21:\n print(\n f'Your total score is {sum(user_score_list)}, your cards are:{user_score_list}')\n print(f\"Dealer's first card is {dealer_first_card}\")\n should_continue = input(\n \"Type 'y' to get another card, type 'n' to pass: \")\n if should_continue == 'y':\n get_card(user_score_list)\n if sum(dealer_score_list) <= 17:\n get_card(dealer_score_list)\n else:\n if sum(dealer_score_list) <= 17:\n get_card(dealer_score_list)\n check_for_11(user_score_list)\n check_for_11(dealer_score_list)\n if sum(user_score_list) < 21 and sum(dealer_score_list) < 21:\n keep_playing()\n # print( f'The winner is {check_winner(user_score_list, dealer_score_list)}')\n else:\n if should_continue == 'n':\n check_for_11(user_score_list)\n check_for_11(dealer_score_list)\n print(\n f'The winner is {check_winner(user_score_list, dealer_score_list)}')\n break\n else:\n check_for_11(user_score_list)\n check_for_11(dealer_score_list)\n if sum(user_score_list) < 21 and sum(dealer_score_list) < 21:\n keep_playing()\n else:\n return print(f'The winner is {check_winner(user_score_list, dealer_score_list)}')\n\n\nstart_game = input(\n \"Do you want to play a game of Blackjack? Type 'y' or 'n': \")\nif start_game == 'y':\n get_card(user_score_list)\n dealer_first_card = get_card(dealer_score_list)\n get_card(user_score_list)\n keep_playing()\n\nprint(\n f'User score list = {user_score_list} /n Dealer score list = {dealer_score_list}')\t\n\t\t\n\n\t\n\t\t\n\t\n\n\n\n" } ]
1
DragosNicuDev/myhub
https://github.com/DragosNicuDev/myhub
51ad48bf2bc8694ca8fb2e7dbc5c934191377f3e
00f915b92b5cb7afd571811c5e020cfe9dcbb3ea
ce51ded676c92eaf490b87aa1d278a7a17a9bb9a
refs/heads/master
2023-01-10T16:03:38.800145
2018-02-10T13:51:57
2018-02-10T13:51:57
119,671,132
1
0
MIT
2018-01-31T10:16:43
2018-02-11T19:07:18
2022-12-26T20:36:58
CSS
[ { "alpha_fraction": 0.6789743304252625, "alphanum_fraction": 0.6789743304252625, "avg_line_length": 29.46875, "blob_id": "d64361692fe6156ce2c0185d281fb344bf037764", "content_id": "233b106c594ea3e2c8a536ae08e14255fb315142", "detected_licenses": [ "MIT" ], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 975, "license_type": "permissive", "max_line_length": 81, "num_lines": 32, "path": "/events/tests.py", "repo_name": "DragosNicuDev/myhub", "src_encoding": "UTF-8", "text": "from django.test import TestCase\n\nfrom django.core.files.uploadedfile import SimpleUploadedFile\nfrom django.utils import timezone\n\nfrom .models import Event\nfrom myhub_events.users.models import User\n\n\n# Create your tests here.\nclass EventModelTest(TestCase):\n\n def setUp(self):\n Event.objects.create(\n event_title='My entry title',\n event_slug='my-entry-title',\n event_date_created=timezone.now(),\n event_user=User.objects.create(username='dragos'))\n\n def test_string_representation(self):\n event = Event.objects.first()\n self.assertEqual(str(event), event.event_title)\n\n def test_event_title(self):\n event = Event.objects.first()\n self.assertEqual(str(event), event.event_title)\n\n def test_event_slug(self):\n event = Event.objects.first()\n slug = Event(event_title=event.event_title, event_slug=event.event_title)\n\n self.assertEqual(str(slug), slug.event_slug)\n" }, { "alpha_fraction": 0.6137183904647827, "alphanum_fraction": 0.6371841430664062, "avg_line_length": 41.61538314819336, "blob_id": "c08fe8550e1d1e262cb3ae37513ebdf40ef337e6", "content_id": "f2604b0e5c5ecbb131a5814cb480910b961c007e", "detected_licenses": [ "MIT" ], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 1108, "license_type": "permissive", "max_line_length": 140, "num_lines": 26, "path": "/events/migrations/0006_eventlocation.py", "repo_name": "DragosNicuDev/myhub", "src_encoding": "UTF-8", "text": "# -*- coding: utf-8 -*-\n# Generated by Django 1.11.9 on 2018-02-06 13:14\nfrom __future__ import unicode_literals\n\nfrom django.db import migrations, models\nimport django.db.models.deletion\n\n\nclass Migration(migrations.Migration):\n\n dependencies = [\n ('events', '0005_eventdescription'),\n ]\n\n operations = [\n migrations.CreateModel(\n name='EventLocation',\n fields=[\n ('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),\n ('event_location_name', models.CharField(blank=True, max_length=80, null=True, verbose_name='Name of the location')),\n ('event_location_address', models.CharField(blank=True, max_length=256, null=True, verbose_name='Address of the location')),\n ('event_location_datetime', models.DateTimeField(blank=True, null=True, verbose_name='Date and Time at the location')),\n ('event', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, related_name='event_location', to='events.Event')),\n ],\n ),\n ]\n" }, { "alpha_fraction": 0.592024564743042, "alphanum_fraction": 0.6134969592094421, "avg_line_length": 29.5625, "blob_id": "7b029b559645e57631dac98861e0a0490abdbb31", "content_id": "d8f0463e3dab8b7b7342e89685e7d825ace712bb", "detected_licenses": [ "MIT" ], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 978, "license_type": "permissive", "max_line_length": 110, "num_lines": 32, "path": "/events/migrations/0004_auto_20180205_1704.py", "repo_name": "DragosNicuDev/myhub", "src_encoding": "UTF-8", "text": "# -*- coding: utf-8 -*-\n# Generated by Django 1.11.9 on 2018-02-05 17:04\nfrom __future__ import unicode_literals\n\nfrom django.db import migrations, models\nimport django.utils.timezone\n\n\nclass Migration(migrations.Migration):\n\n dependencies = [\n ('events', '0003_eventdateandtime'),\n ]\n\n operations = [\n migrations.RemoveField(\n model_name='eventdateandtime',\n name='event_dateandtime',\n ),\n migrations.AddField(\n model_name='eventdateandtime',\n name='event_end_date',\n field=models.DateTimeField(default=django.utils.timezone.now, verbose_name='End Date and Time'),\n preserve_default=False,\n ),\n migrations.AddField(\n model_name='eventdateandtime',\n name='event_start_date',\n field=models.DateTimeField(default=django.utils.timezone.now, verbose_name='Start Date and Time'),\n preserve_default=False,\n ),\n ]\n" }, { "alpha_fraction": 0.5988737940788269, "alphanum_fraction": 0.6025173664093018, "avg_line_length": 25.716814041137695, "blob_id": "955e03a4e264e2adfdc80bfbd3dccbc44d2c7181", "content_id": "9dbc38c06a6bacb2686689510c4a062deb266cf9", "detected_licenses": [ "MIT" ], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 3019, "license_type": "permissive", "max_line_length": 71, "num_lines": 113, "path": "/events/models.py", "repo_name": "DragosNicuDev/myhub", "src_encoding": "UTF-8", "text": "from django.conf import settings\nfrom django.db import models\nfrom django.utils import timezone\n\nfrom autoslug import AutoSlugField\nfrom imagekit.models import ProcessedImageField\n\n\n# Create your models here.\nclass Event(models.Model):\n '''The place where an organiser can create an event'''\n\n # Event Title\n event_title = models.CharField('Event Title', max_length=256)\n\n # Event slug\n event_slug = AutoSlugField(populate_from='event_title', default='')\n\n # Event creation date\n event_date_created = models.DateTimeField(default=timezone.now,\n auto_now=False,\n auto_now_add=False)\n\n # Event main image path\n def path_and_rename(instance, filename):\n extension = filename.split('.')[-1]\n\n return '{}.{}'.format(timezone.now(), extension)\n\n # Event main image\n event_main_image = ProcessedImageField(upload_to=path_and_rename,\n format='jpeg',\n options={'quality': 80},)\n\n # Event organiser\n event_user = models.ForeignKey(settings.AUTH_USER_MODEL,\n related_name='event_user')\n\n # Return string\n def __str__(self):\n return str(self.event_title)\n\n\nclass EventDateAndTime(models.Model):\n '''The Event organiser can add a date and a time to the event'''\n\n # The event attached to\n event = models.ForeignKey(\n Event,\n related_name='date_event',\n on_delete=models.CASCADE)\n\n # The start date and time of the event\n event_start_date = models.DateTimeField(\n 'Start Date and Time',\n auto_now=False,\n auto_now_add=False)\n\n # The end date and time of the event\n event_end_date = models.DateTimeField(\n 'End Date and Time',\n auto_now=False,\n auto_now_add=False)\n\n\nclass EventDescription(models.Model):\n '''The Event organiser can add a description'''\n\n # The event attached to\n event = models.ForeignKey(\n Event,\n related_name='event_description',\n on_delete=models.CASCADE\n )\n\n # Description field\n event_description = models.TextField()\n\n\nclass EventLocation(models.Model):\n '''The event location'''\n\n # The event attached to\n event = models.ForeignKey(\n Event,\n related_name='event_location',\n on_delete=models.CASCADE\n )\n\n # The event location name\n event_location_name = models.CharField(\n 'Name of the location',\n max_length=80,\n null=True,\n blank=True\n )\n\n # The location's address\n event_location_address = models.CharField(\n 'Address of the location',\n max_length=256,\n null=True,\n blank=True\n )\n\n # The location date and time (optional)\n event_location_datetime = models.DateTimeField(\n 'Date and Time at the location',\n auto_now=False,\n auto_now_add=False,\n blank=True,\n null=True\n )\n" }, { "alpha_fraction": 0.6978609561920166, "alphanum_fraction": 0.6978609561920166, "avg_line_length": 31.521739959716797, "blob_id": "7f00a0ebd4565adfb0091af195d07d91235c8e81", "content_id": "87c502591b996716ce27b4821dd5de7a402f067d", "detected_licenses": [ "MIT" ], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 748, "license_type": "permissive", "max_line_length": 67, "num_lines": 23, "path": "/events/views.py", "repo_name": "DragosNicuDev/myhub", "src_encoding": "UTF-8", "text": "from django.conf import settings\nfrom django.views.generic import DetailView, ListView, TemplateView\n\n# Create your views here.\nfrom .models import (\n Event,\n EventDateAndTime,\n EventDescription,\n EventLocation)\n\n\nclass EventTemplateView(TemplateView):\n model = Event\n template_name = 'events/event_detail.html'\n\n def get_context_data(self, **kwargs):\n context = super().get_context_data(**kwargs)\n context['event'] = Event.objects.first()\n context['date_time'] = EventDateAndTime.objects.first()\n context['description'] = EventDescription.objects.first()\n context['gmap_key'] = settings.EASY_MAPS_GOOGLE_MAPS_API_KEY\n context['location'] = EventLocation.objects.all()\n return context\n" }, { "alpha_fraction": 0.5693717002868652, "alphanum_fraction": 0.6125654578208923, "avg_line_length": 30.83333396911621, "blob_id": "33991dc95fe829356cd778bb9e26d63e1728866e", "content_id": "7f819e08aa209101d6f75c8874b3f2d0d021ac4d", "detected_licenses": [ "MIT" ], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 764, "license_type": "permissive", "max_line_length": 143, "num_lines": 24, "path": "/events/migrations/0005_eventdescription.py", "repo_name": "DragosNicuDev/myhub", "src_encoding": "UTF-8", "text": "# -*- coding: utf-8 -*-\n# Generated by Django 1.11.9 on 2018-02-06 11:21\nfrom __future__ import unicode_literals\n\nfrom django.db import migrations, models\nimport django.db.models.deletion\n\n\nclass Migration(migrations.Migration):\n\n dependencies = [\n ('events', '0004_auto_20180205_1704'),\n ]\n\n operations = [\n migrations.CreateModel(\n name='EventDescription',\n fields=[\n ('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),\n ('event_description', models.TextField()),\n ('event', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, related_name='event_description', to='events.Event')),\n ],\n ),\n ]\n" }, { "alpha_fraction": 0.6916578412055969, "alphanum_fraction": 0.6948257684707642, "avg_line_length": 19.586956024169922, "blob_id": "40bef491a4704794b0cb59783e448d1b01a43dbb", "content_id": "ab8e9c9c2d80477fa8d1bdb740cc94ec4da0d950", "detected_licenses": [ "MIT" ], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 947, "license_type": "permissive", "max_line_length": 68, "num_lines": 46, "path": "/events/admin.py", "repo_name": "DragosNicuDev/myhub", "src_encoding": "UTF-8", "text": "from django.contrib import admin\nfrom django_summernote.admin import SummernoteModelAdmin\n\nfrom .models import (\n Event,\n EventDateAndTime,\n EventDescription,\n EventLocation)\n\n\n# Apply summernote to all TextField in model.\nclass SomeModelAdmin(SummernoteModelAdmin): # instead of ModelAdmin\n summer_note_fields = '__all__'\n\n\nclass EventTime(admin.TabularInline):\n model = EventDateAndTime\n extra = 0\n\n\nclass EventDescAdmin(admin.TabularInline):\n model = EventDescription\n extra = 0\n\n\nclass EventLocationAdmin(admin.TabularInline):\n model = EventLocation\n extra = 0\n\n\nclass EventAdmin(admin.ModelAdmin):\n list_display = (\n 'event_title',\n 'event_slug',\n 'event_user',\n 'event_date_created'\n )\n\n inlines = [\n EventTime,\n EventDescAdmin,\n EventLocationAdmin\n ]\n\nadmin.site.register(Event, EventAdmin)\nadmin.site.register(EventDescription, SomeModelAdmin)\n" }, { "alpha_fraction": 0.6578821539878845, "alphanum_fraction": 0.6636320352554321, "avg_line_length": 38.377357482910156, "blob_id": "2ffda285c7226ee308840484338e16826cfe711f", "content_id": "2c4a30855710d0f3a64eee11000415034c96be46", "detected_licenses": [ "MIT" ], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 2087, "license_type": "permissive", "max_line_length": 110, "num_lines": 53, "path": "/config/urls.py", "repo_name": "DragosNicuDev/myhub", "src_encoding": "UTF-8", "text": "from django.conf import settings\nfrom django.conf.urls import include, url\nfrom django.conf.urls.static import static\nfrom django.contrib import admin\nfrom django.views.generic import TemplateView\nfrom django.views import defaults as default_views\n\nfrom events.views import EventTemplateView\n\nurlpatterns = [\n # url(r'^$', TemplateView.as_view(template_name='pages/home.html'), name='home'),\n # url(r'^(?P<pk>\\d+)/$', EventDetailView.as_view(), name='home'),\n url(r'^$', EventTemplateView.as_view(), name='home'),\n url(r'^about/$', TemplateView.as_view(template_name='pages/about.html'), name='about'),\n\n # Fobi View URLs\n # url(r'^fobi/', include('fobi.urls.view')),\n\n # Fobi Edit URLs\n # url(r'^fobi/', include('fobi.urls.edit')),\n\n url(r'^summernote/', include('django_summernote.urls')),\n\n # Fobi DB Store plugin URLs\n # url(r'^fobi/plugins/form-handlers/db-store/',\n # include('fobi.contrib.plugins.form_handlers.db_store.urls')),\n\n # Django Admin, use {% url 'admin:index' %}\n url(settings.ADMIN_URL, admin.site.urls),\n\n # User management\n url(r'^users/', include('myhub_events.users.urls', namespace='users')),\n url(r'^accounts/', include('allauth.urls')),\n\n # Your stuff: custom urls includes go here\n\n\n] + static(settings.MEDIA_URL, document_root=settings.MEDIA_ROOT)\n\nif settings.DEBUG:\n # This allows the error pages to be debugged during development, just visit\n # these url in browser to see how these error pages look like.\n urlpatterns += [\n url(r'^400/$', default_views.bad_request, kwargs={'exception': Exception('Bad Request!')}),\n url(r'^403/$', default_views.permission_denied, kwargs={'exception': Exception('Permission Denied')}),\n url(r'^404/$', default_views.page_not_found, kwargs={'exception': Exception('Page not Found')}),\n url(r'^500/$', default_views.server_error),\n ]\n if 'debug_toolbar' in settings.INSTALLED_APPS:\n import debug_toolbar\n urlpatterns = [\n url(r'^__debug__/', include(debug_toolbar.urls)),\n ] + urlpatterns\n" }, { "alpha_fraction": 0.5895807147026062, "alphanum_fraction": 0.616264283657074, "avg_line_length": 31.79166603088379, "blob_id": "84798991ff331c34cb9247eb8fc913cbdbccc435", "content_id": "2146005ba5ed41bdc84a65680387b1fe065fe7b6", "detected_licenses": [ "MIT" ], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 787, "license_type": "permissive", "max_line_length": 136, "num_lines": 24, "path": "/events/migrations/0003_eventdateandtime.py", "repo_name": "DragosNicuDev/myhub", "src_encoding": "UTF-8", "text": "# -*- coding: utf-8 -*-\n# Generated by Django 1.11.9 on 2018-02-04 11:59\nfrom __future__ import unicode_literals\n\nfrom django.db import migrations, models\nimport django.db.models.deletion\n\n\nclass Migration(migrations.Migration):\n\n dependencies = [\n ('events', '0002_event_event_slug'),\n ]\n\n operations = [\n migrations.CreateModel(\n name='EventDateAndTime',\n fields=[\n ('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),\n ('event_dateandtime', models.DateTimeField(verbose_name='Date and Time')),\n ('event', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, related_name='date_event', to='events.Event')),\n ],\n ),\n ]\n" }, { "alpha_fraction": 0.5236400365829468, "alphanum_fraction": 0.5322826504707336, "avg_line_length": 29.261537551879883, "blob_id": "d240e2bc6574a8c1b909cd51b0b5d954d7d933f2", "content_id": "a87d9667e20621b7964dcaccfcda4e593f7d84ac", "detected_licenses": [ "MIT" ], "is_generated": false, "is_vendor": false, "language": "HTML", "length_bytes": 1967, "license_type": "permissive", "max_line_length": 109, "num_lines": 65, "path": "/myhub_events/templates/events/event_detail.html", "repo_name": "DragosNicuDev/myhub", "src_encoding": "UTF-8", "text": "{% extends \"base.html\" %}\n\n{% load easy_maps_tags static %}\n\n{% if event %}\n {% block page-header %}\n <div class=\"page-header section-dark\" style=\"background-image: url('{{event.event_main_image.url}}')\">\n <div class=\"filter\"></div>\n <div class=\"content-center\">\n <div class=\"container\">\n <div class=\"title-brand\">\n <h1 class=\"presentation-title\">{{event.event_title}}</h1>\n </div>\n <h2 class=\"presentation-subtitle text-center\">\n From {{date_time.event_start_date | date:\"l, j F Y, H:i\"}}\n </h2>\n <h2 class=\"presentation-subtitle text-center\">\n Until {{date_time.event_end_date | date:\"l, j F Y, H:i\"}}\n </h2>\n </div>\n </div>\n <div class=\"moving-clouds\" style=\"background-image: url('{% static 'images/clouds.png' %}'); \">\n </div>\n </div>\n {% endblock %}\n{% endif %}\n\n{% if description %}\n {% block description %}\n <div class=\"main\" id=\"Description\">\n <div class=\"section\">\n <div class=\"container\">\n <div class=\"row example-page\">\n <div class=\"col-md-12\">\n {{description.event_description|safe}}\n </div>\n </div>\n </div>\n </div>\n </div>\n {% endblock %}\n{% endif %}\n\n{% if location %}\n {% block location %}\n <div class=\"main\" id=\"Location\">\n <div class=\"section section-dark\">\n <div class=\"container\">\n <div class=\"row example-page\">\n {% block api_js %}\n <script type=\"text/javascript\" src=\"https://maps.google.com/maps/api/js?key={{gmap_key}}\"></script>\n {% endblock %}\n {% for l in location %}\n <div class=\"col-md-{% widthratio 12 location.count 1 %} col-sm-12 text-center\">\n <h3>{{l.event_location_name}}</h3>\n <h6>{{l.event_location_address}}</h6>\n {% easy_map l.event_location_address %}\n </div>\n {% endfor %}\n </div>\n </div>\n </div>\n </div>\n {% endblock %}\n{% endif %}\n" }, { "alpha_fraction": 0.5764706134796143, "alphanum_fraction": 0.6176470518112183, "avg_line_length": 23.285715103149414, "blob_id": "da2b10bb3fa57ec5d587bd1bb8d43d9c23acd80b", "content_id": "b8e5445c382373e9b2c173323086005eca2b722b", "detected_licenses": [ "MIT" ], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 510, "license_type": "permissive", "max_line_length": 105, "num_lines": 21, "path": "/events/migrations/0002_event_event_slug.py", "repo_name": "DragosNicuDev/myhub", "src_encoding": "UTF-8", "text": "# -*- coding: utf-8 -*-\n# Generated by Django 1.11.9 on 2018-02-03 21:36\nfrom __future__ import unicode_literals\n\nimport autoslug.fields\nfrom django.db import migrations\n\n\nclass Migration(migrations.Migration):\n\n dependencies = [\n ('events', '0001_initial'),\n ]\n\n operations = [\n migrations.AddField(\n model_name='event',\n name='event_slug',\n field=autoslug.fields.AutoSlugField(default='', editable=False, populate_from='event_title'),\n ),\n ]\n" } ]
11
restran/mountains
https://github.com/restran/mountains
1b3844ad27d569c0ed21d382d8f85d4424c0e098
e95a2640074b82df6d5e8f3cf45e877dfcb0a648
e1cd3482db05af69c8aceaa2fec0bde6e043e5cf
refs/heads/master
2022-06-14T17:00:17.053156
2022-05-16T15:23:14
2022-05-16T15:23:14
101,150,874
5
0
null
null
null
null
null
[ { "alpha_fraction": 0.42583248019218445, "alphanum_fraction": 0.528422474861145, "avg_line_length": 25.54464340209961, "blob_id": "495aed644ec16c1aaabc20912788698ae0fdfbb3", "content_id": "9b8efdc08a7fad4003d32ccbada0b7ac6a7e6058", "detected_licenses": [ "MIT" ], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 2973, "license_type": "permissive", "max_line_length": 79, "num_lines": 112, "path": "/tests/test_encoding.py", "repo_name": "restran/mountains", "src_encoding": "UTF-8", "text": "# -*- coding: utf-8 -*-\n# Created by restran on 2018/3/7\nfrom __future__ import unicode_literals, absolute_import\n\nimport unittest\n\nfrom mountains.encoding import converter, is_base64, is_base32\n\n\nclass Test(unittest.TestCase):\n def setUp(self):\n pass\n\n def test_hex2dec(self):\n s = '1F'\n r = converter.hex2dec(s)\n self.assertEqual(r, 31)\n s = '1F1F'\n r = converter.hex2dec(s)\n self.assertEqual(r, 7967)\n\n def test_dec2hex(self):\n s = '31'\n r = converter.dec2hex(s)\n self.assertEqual(r.upper(), '1F')\n s = '7967'\n r = converter.dec2hex(s)\n self.assertEqual(r.upper(), '1F1F')\n\n def test_bin2dec(self):\n s = '11111'\n r = converter.bin2dec(s)\n self.assertEqual(r, 31)\n\n def test_dec2bin(self):\n s = '31'\n r = converter.dec2bin(s)\n self.assertEqual(r, '11111')\n\n def test_str2dec(self):\n s = 'abcdef'\n r = converter.str2dec(s)\n self.assertEqual(r, 107075202213222)\n\n def test_dec2str(self):\n s = '107075202213222'\n r = converter.dec2str(s)\n self.assertEqual(r, 'abcdef')\n\n def test_str2hex(self):\n s = 'abcdef'\n r = converter.str2hex(s)\n self.assertEqual(r, '616263646566')\n\n def test_hex2str(self):\n s = '616263646566'\n r = converter.hex2str(s)\n self.assertEqual(r, 'abcdef')\n\n def test_hex2bin(self):\n s = '616263646566'\n r = converter.hex2bin(s)\n self.assertEqual(r, '011000010110001001100011011001000110010101100110')\n s = '00'\n r = converter.hex2bin(s)\n self.assertEqual(r, '00000000')\n\n def test_bin2hex(self):\n s = '011000010110001001100011011001000110010101100110'\n r = converter.bin2hex(s)\n self.assertEqual(r, '616263646566')\n\n def test_to_digital(self):\n for i in range(10, 1000):\n for j in range(2, 10):\n r = converter.to_digital(i, j)\n x = converter.from_digital(r, j)\n self.assertEqual(str(i), x)\n\n def test_is_base64(self):\n data = [\n (\"Y2QgL2QgIkM6L2luZXRwdWIvd3d3cm9vdCImd2hvYW1pJmVjaG8gW1NdJmNkJmVjaG8gW0Vd\", True),\n (\"Qzov\", True),\n (\"123\", False),\n (\"Y21k\", True),\n (\"0000\", False),\n (\"QzovaW5ldHB1Yi93d3dyb290Lw==\", True),\n (\"QzovaW5ldHB1Yi8=\", True),\n (\"aHR0cDovLzExOC4zMS42Ni4yMy8yMDIwc2tpbGwvYS5leGU=\", True)\n ]\n\n for (s, real_r) in data:\n r = is_base64(s)\n self.assertEqual(r, real_r)\n\n def test_is_base32(self):\n data = [\n (\"GFQWCYLB\", True),\n (\"1\", False),\n (\"123\", False),\n (\"GE======\", True),\n (\"0000\", False),\n (\"GFQXGZDGMFZWIZTBONSGMYLTMRTGC43EMY======\", True)\n ]\n\n for (s, real_r) in data:\n r = is_base32(s)\n self.assertEqual(r, real_r)\n\n\nif __name__ == '__main__':\n unittest.main()\n" }, { "alpha_fraction": 0.7073884606361389, "alphanum_fraction": 0.7329919338226318, "avg_line_length": 26.059406280517578, "blob_id": "368e011d14530678c783bbf9ab8f41ca46069192", "content_id": "fd1c81d939a52e693c90fa276b651d1d322f0e02", "detected_licenses": [ "MIT" ], "is_generated": false, "is_vendor": false, "language": "Markdown", "length_bytes": 3326, "license_type": "permissive", "max_line_length": 377, "num_lines": 101, "path": "/README.md", "repo_name": "restran/mountains", "src_encoding": "UTF-8", "text": "# mountains\n\n[![travis-ci](https://travis-ci.org/restran/mountains.svg?branch=master)](https://travis-ci.org/restran/mountains) [![Coverage Status](https://coveralls.io/repos/github/restran/mountains/badge.svg?branch=master)](https://coveralls.io/github/restran/mountains?branch=master) [![pypi package](https://img.shields.io/pypi/v/mountains.svg)](https://pypi.python.org/pypi/mountains/)\n\n在开发Python的过程中经常会有一些常用的方法和工具类,因此将这些代码集成在一起,在开发新东西的时候就能直接调用,加速开发。\n\n<img src=\"docs/icon.png\" style=\"margin-left: auto; margin-right: auto; text-align: center; display: block;\">\n\n## 安装\n\n pip install mountains\n\n## 功能\n\n1. Python 2-3 兼容,大部分代码都尽可能做了兼容\n2. 日期转换,各种日期、字符串、时间戳直接的转换\n3. SSHClient\n4. Tornado 的异步请求\n5. Random HTTP User Agent\n6. 文件、Excel、json 读写 \n7. ...\n\n\n### 日期转换\n\ndatetime、time、时间戳、日期字符串之间的转换\n\n```python\n\nimport time\nfrom datetime import datetime\nfrom mountains.datetime import converter\n\ndate_str = '2016-10-30 12:30:30'\ndt = datetime(year=2016, month=10, day=30, hour=12, minute=30, second=30)\nt = dt.timetuple()\nts = int(time.mktime(t))\nts_ms = int(time.mktime(t) * 1000)\n\n# 字符串转 datetime\ndt = converter.str2datetime(date_str)\n# 字符串转 time\nconverter.str2time(date_str)\n# 日期字符串转时间戳,结果为秒\nconverter.str2timestamp(date_str)\n# 日期字符串转时间戳,结果为毫秒\nconverter.str2timestamp(date_str, millisecond=True)\n# datetime 转字符串,默认格式 %Y-%m-%d %H:%M:%S\nconverter.datetime2str(dt)\n# datetime 转字符串,指定格式\nconverter.datetime2str(dt, '%Y-%m-%d')\n```\n\n### 日志功能 \n\n对原生的 logging 进行了封装,使用起来更简单\n\n```python\nfrom mountains import logging\nfrom mountains.logging import StreamHandler, FileHandler, RotatingFileHandler, TimedRotatingFileHandler\n\n# 配置日志,输出到控制台、保存到文件、日志级别、输出格式等,文件默认保存到 log.txt\nlogging.init_log(StreamHandler(format=logging.FORMAT_SIMPLE), FileHandler(format=logging.FORMAT_VERBOSE, level=logging.DEBUG))\n# RotatingFileHandler 按文件大小分割日志文件\nlogging.init_log(StreamHandler(format=logging.FORMAT_SIMPLE), RotatingFileHandler(format=logging.FORMAT_VERBOSE, level=logging.DEBUG))\n# TimedRotatingFileHandler 按时间分割日志文件\nlogging.init_log(StreamHandler(format=logging.FORMAT_SIMPLE), TimedRotatingFileHandler(format=logging.FORMAT_VERBOSE, level=logging.DEBUG))\n\n# 使用方法与原生的 logging 一样\nlogger = logging.getLogger(__name__)\nlogger.debug('hello')\n```\n\n### Excel 读写\n\n```python\nfrom mountains.file.excel import read_excel, write_excel, edit_excel\n# 读 Excel 文件\ndata = read_excel('filename.xlsx')\n\n# 写新的 Excel\nexcel_data = [\n {\n 'col1': '123',\n 'col2': '456'\n },\n {\n 'col1': '123',\n 'col2': '456'\n },\n]\n\nheaders = ['col1', 'col2']\nwrite_excel(headers, excel_data, 'filename.xlsx')\n\n# 编辑 Excel,打开已有的 Excel,往里面填充数据\nedit_data = {\n 'I2': '123'\n}\nedit_excel('test.xlsx', sheet_index=0, data=edit_data, output_filename='new_test.xlsx')\n```\n\n" }, { "alpha_fraction": 0.5274426937103271, "alphanum_fraction": 0.5329714417457581, "avg_line_length": 32.7220344543457, "blob_id": "10516178119512f91c0669c1ac0628dfcff0d718", "content_id": "4e2663f229946dfd5acef86a42b4df9eb4e60436", "detected_licenses": [ "MIT" ], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 10150, "license_type": "permissive", "max_line_length": 97, "num_lines": 295, "path": "/mountains/tornado/__init__.py", "repo_name": "restran/mountains", "src_encoding": "UTF-8", "text": "# -*- coding: utf-8 -*-\n# Created by restran on 2017/12/11\nfrom __future__ import unicode_literals, absolute_import\n\nimport logging\nimport platform\nimport time\nfrom collections import deque\n\nfrom ..base import urlunparse, urlparse, urlencode\nfrom .. import json as json_util\nfrom .. import force_text\nfrom ..http import random_agent\n\ntry:\n from tornado.gen import coroutine, is_future, Return\n from tornado.httpclient import AsyncHTTPClient, HTTPRequest, HTTPError\nexcept ImportError:\n raise Exception('tornado is not installed')\n\nlogger = logging.getLogger(__name__)\n\nDEFAULT_CONNECT_TIMEOUT = 20\nDEFAULT_REQUEST_TIMEOUT = 20\nASYNC_HTTP_MAX_CLIENTS = 100\n\n# pycurl 在 Windows Python 64 位环境下会崩溃\n# https://github.com/pycurl/pycurl/issues/395\n# libcurl 7.48.0 以后的版本修复了这个bug\nif platform.architecture()[0] == '64bit' and platform.system() == 'Windows':\n AsyncHTTPClient.configure(\n 'tornado.simple_httpclient.SimpleAsyncHTTPClient')\nelse:\n try:\n # curl_httpclient is faster than simple_httpclient\n AsyncHTTPClient.configure(\n 'tornado.curl_httpclient.CurlAsyncHTTPClient',\n max_clients=ASYNC_HTTP_MAX_CLIENTS)\n except ImportError:\n AsyncHTTPClient.configure(\n 'tornado.simple_httpclient.SimpleAsyncHTTPClient')\n\n\nclass AsyncHTTPExecutor(object):\n \"\"\"\n 异步HTTP请求,可以并发访问\n \"\"\"\n\n def __init__(self, task_queue, on_request,\n on_success, on_error, on_queue_empty=None,\n proxy_list=None,\n max_workers=20, timeout=5, verbose=False):\n self.fn_on_queue_empty = on_queue_empty\n self.fn_on_request = on_request\n self.fn_on_success = on_success\n self.fn_on_error = on_error\n self.task_queue = deque()\n self.task_queue.extend(task_queue)\n self.timeout = timeout\n self.max_workers = max_workers\n self.count = 0\n self.verbose = verbose\n self.start_time = None\n self.last_time = None\n self.proxy_list = proxy_list\n self.proxy_index = 0\n\n def get_proxy(self):\n \"\"\"\n 获取下一个可用的代理\n :return: proxy_host, proxy_port, proxy_username, proxy_password\n \"\"\"\n if self.proxy_list is None or len(self.proxy_list) <= 0:\n return None, None, None, None\n self.proxy_index += 1\n self.proxy_index = self.proxy_index % len(self.proxy_list)\n item = self.proxy_list[self.proxy_index]\n if len(item) == 2:\n return item[0], item[1], None, None\n else:\n return item[0], item[1], item[2], item[3]\n\n def get_next_task(self):\n try:\n item = self.task_queue.popleft()\n except IndexError:\n if self.fn_on_queue_empty is None:\n return None\n else:\n item = self.fn_on_queue_empty(self.task_queue)\n if isinstance(item, list) and len(item) > 0:\n self.task_queue.extend(item)\n item = self.task_queue.popleft()\n return item\n\n @coroutine\n def do_request(self, item):\n self.count += 1\n current_time = time.time()\n # 每隔10秒输出一次\n if current_time - self.last_time > 10:\n self.last_time = current_time\n speed = self.count / (current_time - self.start_time)\n past_time = current_time - self.start_time\n logger.info('items, speed, time: %s, %.1f/s, %.1fs' % (self.count, speed, past_time))\n\n base_headers = {\n 'User-Agent': random_agent()\n }\n\n url, method, headers, body, extra_params = self.fn_on_request(item)\n if url is None:\n return\n\n if headers is None:\n headers = {}\n\n base_headers.update(headers)\n headers = base_headers\n\n url = force_text(url)\n body = '' if method == 'POST' else None\n proxy_host, proxy_port, proxy_username, proxy_password = self.get_proxy()\n if proxy_port is not None:\n proxy_port = int(proxy_port)\n\n params_dict = {\n 'decompress_response': True,\n 'validate_cert': False,\n 'proxy_host': proxy_host,\n 'proxy_port': proxy_port,\n 'proxy_username': proxy_username,\n 'proxy_password': proxy_password,\n 'connect_timeout': self.timeout,\n 'request_timeout': self.timeout,\n 'follow_redirects': False\n }\n # 允许再传其他参数\n if isinstance(extra_params, dict):\n params_dict.update(extra_params)\n\n try:\n response = yield AsyncHTTPClient().fetch(\n HTTPRequest(url=url,\n method=method,\n headers=headers,\n body=body,\n **params_dict))\n try:\n self.fn_on_success(url, item, method, response, self.task_queue)\n except Exception as e:\n logger.error(e)\n except HTTPError as e:\n if hasattr(e, 'response') and e.response:\n try:\n self.fn_on_success(url, item, method, e.response, self.task_queue)\n except Exception as ex:\n logger.error(ex)\n else:\n if self.verbose:\n logger.error(e)\n logger.error('%s, %s' % (method, item))\n try:\n self.fn_on_error(url, item, method, e, self.task_queue)\n except Exception as e:\n logger.error(e)\n except Exception as e:\n if self.verbose:\n logger.error(e)\n logger.error('%s, %s' % (method, item))\n\n try:\n self.fn_on_error(url, item, method, e, self.task_queue)\n except Exception as e:\n logger.error(e)\n\n @coroutine\n def fetch_url(self, i):\n item = self.get_next_task()\n while item is not None:\n yield self.do_request(item)\n item = self.get_next_task()\n\n @coroutine\n def run(self, *args, **kwargs):\n logger.info('executor start')\n self.start_time = time.time()\n self.last_time = self.start_time\n # Start workers, then wait for the work queue to be empty.\n # 会卡在这里,等待所有的 worker 都结束\n yield [self.fetch_url(t) for t in range(self.max_workers)]\n end_time = time.time()\n logger.info('total count: %s' % self.count)\n cost_time = end_time - self.start_time\n if cost_time > 0:\n speed = self.count / cost_time\n else:\n speed = 1\n\n logger.info('executor done, %.3f, %.1f/s' % (cost_time, speed))\n\n\n@coroutine\ndef async_request(method='GET', url=None, params=None,\n headers=None, data=None, json=None,\n on_response=None, on_error=None,\n connect_timeout=DEFAULT_CONNECT_TIMEOUT,\n request_timeout=DEFAULT_REQUEST_TIMEOUT,\n follow_redirects=False,\n proxy_host=None, proxy_port=None,\n proxy_username=None, proxy_password=None):\n try:\n if url is None:\n return\n\n method = method.upper()\n\n base_headers = {\n 'User-Agent': random_agent(),\n }\n\n if params is not None:\n url_parsed = urlparse(url)\n query = urlencode(params, doseq=True)\n if url_parsed.query != '':\n query = '%s&%s' % (query, url_parsed.query)\n\n url = urlunparse((url_parsed.scheme, url_parsed.netloc,\n url_parsed.path, url_parsed.params,\n query, url_parsed.fragment))\n\n if method == 'GET':\n body = None\n else:\n if json is not None:\n body = json_util.dumps(json)\n base_headers['Content-Type'] = 'application/json;charset=utf-8'\n elif isinstance(data, dict):\n body = urlencode(data, doseq=True)\n base_headers['Content-Type'] = 'application/x-www-form-urlencoded'\n elif isinstance(data, list):\n body = force_text(data)\n else:\n body = data\n\n if isinstance(headers, dict):\n base_headers.update(headers)\n\n headers = base_headers\n response = yield AsyncHTTPClient().fetch(\n HTTPRequest(url=url,\n headers=headers,\n method=method,\n body=body,\n validate_cert=False,\n decompress_response=True,\n connect_timeout=connect_timeout,\n request_timeout=request_timeout,\n follow_redirects=follow_redirects,\n proxy_host=proxy_host,\n proxy_port=proxy_port,\n proxy_username=proxy_username,\n proxy_password=proxy_password))\n\n if on_response is not None:\n ret = on_response(response)\n if is_future(ret):\n yield ret\n\n raise Return(response)\n except Return as e:\n # 上面有抛出 Return 的异常,这里捕获到以后,要重新抛出\n # 否则返回的实际上就不是 response\n raise e\n except HTTPError as e:\n if hasattr(e, 'response') and e.response:\n if on_response is not None:\n ret = on_response(e.response)\n if is_future(ret):\n yield ret\n\n raise Return(e.response)\n else:\n if on_error is not None:\n ret = on_error(e)\n if is_future(ret):\n yield ret\n raise Return(None)\n except Exception as e:\n if on_error is not None:\n ret = on_error(e)\n if is_future(ret):\n yield ret\n\n raise Return(None)\n" }, { "alpha_fraction": 0.561170220375061, "alphanum_fraction": 0.5828900933265686, "avg_line_length": 19.697248458862305, "blob_id": "c31fffff0fafece1fe83e8819e16ca8fbbf280ea", "content_id": "dc0533f6a160197c0f984669d572cf8f2666e899", "detected_licenses": [ "MIT" ], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 2256, "license_type": "permissive", "max_line_length": 75, "num_lines": 109, "path": "/mountains/datetime/converter.py", "repo_name": "restran/mountains", "src_encoding": "UTF-8", "text": "# -*- coding: utf-8 -*-\n# Created by restran on 2016/11/1\nfrom __future__ import unicode_literals, absolute_import\n\nimport time\nfrom datetime import datetime\n\n\"\"\"\ndate string, datetime, time and timestamp converter\n\"\"\"\n\n\ndef str2datetime(date_str, format='%Y-%m-%d %H:%M:%S'):\n try:\n return datetime.strptime(date_str, format)\n except:\n return None\n\n\ndef str2time(date_str, format='%Y-%m-%d %H:%M:%S'):\n try:\n return time.strptime(date_str, format)\n except:\n return None\n\n\ndef str2timestamp(date_str, format='%Y-%m-%d %H:%M:%S', millisecond=False):\n try:\n ts = time.mktime(time.strptime(date_str, format))\n if millisecond:\n ts = ts * 1000\n return int(ts)\n except:\n return None\n\n\ndef datetime2str(dt, format='%Y-%m-%d %H:%M:%S'):\n try:\n return dt.strftime(format)\n except:\n return None\n\n\ndef datetime2time(dt):\n try:\n return dt.timetuple()\n except:\n return None\n\n\ndef datetime2timestamp(dt, millisecond=False):\n try:\n ts = time.mktime(dt.timetuple())\n if millisecond:\n ts = ts * 1000\n return int(ts)\n except:\n return None\n\n\ndef time2str(time_tuple, format='%Y-%m-%d %H:%M:%S'):\n try:\n return time.strftime(format, time_tuple)\n except:\n return None\n\n\ndef time2datetime(time_tuple):\n try:\n return datetime(*time_tuple[0:6])\n except:\n return None\n\n\ndef time2timestamp(time_tuple, millisecond=False):\n try:\n ts = time.mktime(time_tuple)\n if millisecond:\n ts = ts * 1000\n return int(ts)\n except:\n return None\n\n\ndef timestamp2datetime(ts, millisecond=False):\n try:\n if millisecond:\n ts = int(ts / 1000.0)\n return datetime.fromtimestamp(ts)\n except:\n return None\n\n\ndef timestamp2time(ts, millisecond=False):\n try:\n if millisecond:\n ts = int(ts / 1000.0)\n return time.localtime(ts)\n except:\n return None\n\n\ndef timestamp2str(ts, format='%Y-%m-%d %H:%M:%S', millisecond=False):\n try:\n if millisecond:\n ts = int(ts / 1000.0)\n return datetime.fromtimestamp(ts).strftime(format)\n except:\n return None\n" }, { "alpha_fraction": 0.5537488460540771, "alphanum_fraction": 0.5709123611450195, "avg_line_length": 19.88679313659668, "blob_id": "c62b1f752eb25a40847411f36e35210357fcff58", "content_id": "931d2ce00a32c6bb6f2f2ff3e2820c551cf869e3", "detected_licenses": [ "MIT" ], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 1141, "license_type": "permissive", "max_line_length": 66, "num_lines": 53, "path": "/mountains/database/__init__.py", "repo_name": "restran/mountains", "src_encoding": "UTF-8", "text": "# -*- coding: utf-8 -*-\n# created by restran on 2019/07/12\nfrom __future__ import unicode_literals, absolute_import\nimport math\n\ntry:\n import records\nexcept Exception as e:\n raise Exception('records is not installed')\n\n\ndef batch_insert(db, sql, data, batch_size):\n \"\"\"\n :type db: records.Database\n :param db:\n :param sql:\n :param data:\n :param batch_size:\n :return:\n \"\"\"\n\n total = len(data)\n if total <= 0:\n return\n\n times = math.ceil(total * 1.0 / batch_size)\n for i in range(times):\n data_list = data[i * batch_size:(i + 1) * batch_size]\n if len(data_list) > 0:\n db.bulk_query(sql, data_list)\n\n\ndef auto_batch_insert(db, sql, data, batch_size=500, force=False):\n \"\"\"\n 可以边添加数据,边自动批量插入数据\n :type db: records.Database\n :param db:\n :param sql:\n :param data:\n :param batch_size:\n :param force:\n :return:\n \"\"\"\n total = len(data)\n if total <= 0:\n return data\n\n if total >= batch_size or force:\n if len(data) > 0:\n db.bulk_query(sql, data)\n data = []\n\n return data\n" }, { "alpha_fraction": 0.57485032081604, "alphanum_fraction": 0.5948103666305542, "avg_line_length": 24.049999237060547, "blob_id": "fff96e2d940829bd21cb0283b524da4cf5201a14", "content_id": "8c9bc61501bef031b85743ae7d5b429e470b42f3", "detected_licenses": [ "MIT" ], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 513, "license_type": "permissive", "max_line_length": 62, "num_lines": 20, "path": "/mountains/django/utils.py", "repo_name": "restran/mountains", "src_encoding": "UTF-8", "text": "# -*- coding: utf-8 -*-\n# created by restran on 2018/04/06\nfrom __future__ import unicode_literals, absolute_import\n\n\ndef get_client_ip(request):\n \"\"\"\n 获取客户端的IP\n :param request:\n :return:\n \"\"\"\n x_forwarded_for = request.META.get('HTTP_X_FORWARDED_FOR')\n x_real_ip = request.META.get('HTTP_X_REAL_IP')\n if x_real_ip:\n ip = x_real_ip\n elif x_forwarded_for:\n ip = x_forwarded_for.split(',')[0]\n else:\n ip = request.META.get('REMOTE_ADDR')\n return ip\n" }, { "alpha_fraction": 0.6835442781448364, "alphanum_fraction": 0.7120253443717957, "avg_line_length": 27.727272033691406, "blob_id": "d3e47ca058edb6c3915f91a6e68736b3d9f20052", "content_id": "e302e5913e65ad8056658c69f26c7d3dfa098fdd", "detected_licenses": [ "MIT" ], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 316, "license_type": "permissive", "max_line_length": 59, "num_lines": 11, "path": "/convert_rst.py", "repo_name": "restran/mountains", "src_encoding": "UTF-8", "text": "# -*- coding: utf-8 -*-\n# created by restran on 2018/05/20\nfrom __future__ import unicode_literals, absolute_import\nimport pypandoc\n\n# converts markdown to reStructured\nz = pypandoc.convert('README.md', 'rst', format='markdown')\n\n# writes converted file\nwith open('README.rst', 'w') as outfile:\n outfile.write(z)\n" }, { "alpha_fraction": 0.5649895071983337, "alphanum_fraction": 0.5706798434257507, "avg_line_length": 27.538461685180664, "blob_id": "fe788e5ece350cbc0ded7d7273accec378cbd670", "content_id": "0dceb594ef556557ab61197d62acf9ff75422d11", "detected_licenses": [ "MIT" ], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 6740, "license_type": "permissive", "max_line_length": 83, "num_lines": 234, "path": "/mountains/logging/__init__.py", "repo_name": "restran/mountains", "src_encoding": "UTF-8", "text": "# -*- coding: utf-8 -*-\n# Created by restran on 2017/8/23\nfrom __future__ import unicode_literals, absolute_import\n\nimport logging\nimport logging.config\n\nFORMAT_VERBOSE = \"[%(asctime)s] %(levelname)s [%(name)s:%(lineno)s] %(message)s\"\n# Python的logging使用的是time的datefmt,并没有提供毫秒的datefmt\n# http://blog.csdn.net/arthur503/article/details/49359241\nDATE_FMT_VERBOSE = \"%Y-%m-%d %H:%M:%S\"\n\nFORMAT_SIMPLE = '%(levelname)s %(message)s'\nDATE_FMT_SIMPLE = \"%H:%M:%S\"\n\nDEBUG = 'DEBUG'\nINFO = 'INFO'\nWARNING = 'WARNING'\nERROR = 'ERROR'\nCRITICAL = 'CRITICAL'\n\n\ndef getLogger(name):\n \"\"\"\n 用于替代 logging.getLogger\n :param name:\n :return:\n \"\"\"\n return logging.getLogger(name)\n\n\ndef Logger(name):\n \"\"\"\n 用于替代 logging.getLogger\n :param name:\n :return:\n \"\"\"\n return logging.getLogger(name)\n\n\nclass BaseHandler(object):\n def __init__(self, level=DEBUG, format=None, datefmt=None):\n self.level = level\n self.format = format\n self.datefmt = datefmt\n\n if self.datefmt is None:\n self.datefmt = DATE_FMT_VERBOSE\n\n if self.format is None:\n self.format = FORMAT_VERBOSE\n\n self.handler_class = 'logging.handlers.StreamHandler'\n\n def get_formatter_name(self):\n return self.__class__.__name__\n\n def get_formatter(self):\n formatter = {\n self.get_formatter_name(): {\n 'format': self.format,\n 'datefmt': self.datefmt,\n }\n }\n\n return formatter\n\n def get_handler(self):\n handler = {\n self.get_formatter_name(): {\n 'level': self.level,\n 'class': self.handler_class,\n 'formatter': self.get_formatter_name()\n }\n }\n\n return handler\n\n\nclass StreamHandler(BaseHandler):\n def __init__(self, level=DEBUG, format=None, datefmt=None):\n super(StreamHandler, self).__init__(level, format, datefmt)\n self.handler_class = 'logging.StreamHandler'\n\n\nclass ColorStreamHandler(BaseHandler):\n def __init__(self, level=DEBUG, format=None, datefmt=None, log_colors=None):\n super(ColorStreamHandler, self).__init__(level, format, datefmt)\n self.format = '%(log_color)s' + self.format\n self.handler_class = 'colorlog.StreamHandler'\n try:\n import colorlog\n except:\n raise Exception('colorlog not installed')\n\n if log_colors is None:\n log_colors = {\n 'DEBUG': 'white',\n 'INFO': 'green',\n 'WARNING': 'yellow',\n 'ERROR': 'red',\n # 'CRITICAL': 'red',\n 'CRITICAL': 'red, bg_white',\n }\n self.log_colors = log_colors\n\n def get_formatter(self):\n formatter = {\n self.get_formatter_name(): {\n '()': 'colorlog.ColoredFormatter',\n 'format': self.format,\n 'datefmt': self.datefmt,\n 'log_colors': self.log_colors\n }\n }\n\n return formatter\n\n\nclass FileHandler(BaseHandler):\n def __init__(self, filename='log.txt', level=DEBUG, format=None, datefmt=None):\n super(FileHandler, self).__init__(level, format, datefmt)\n self.filename = filename\n self.handler_class = 'logging.FileHandler'\n\n def get_handler(self):\n handler = super(FileHandler, self).get_handler()\n new_params = {\n 'filename': self.filename\n }\n\n handler[self.get_formatter_name()].update(new_params)\n return handler\n\n\nclass RotatingFileHandler(BaseHandler):\n def __init__(self, filename='log.txt', max_bytes=1024 * 1024 * 10,\n backup_count=10, delay=True, level=DEBUG,\n format=None, datefmt=None):\n super(RotatingFileHandler, self).__init__(level, format, datefmt)\n self.filename = filename\n self.handler_class = 'logging.handlers.RotatingFileHandler'\n self.max_bytes = max_bytes\n self.backup_count = backup_count\n self.delay = delay\n\n def get_handler(self):\n handler = super(RotatingFileHandler, self).get_handler()\n new_params = {\n 'filename': self.filename,\n 'delay': self.delay,\n 'maxBytes': self.max_bytes,\n 'backupCount': self.backup_count\n }\n\n handler[self.get_formatter_name()].update(new_params)\n return handler\n\n\nclass TimedRotatingFileHandler(BaseHandler):\n def __init__(self, filename='log.txt', when='D', interval=1,\n backup_count=10, delay=True, level=DEBUG,\n format=None, datefmt=None):\n \"\"\"\n when 可以使用这些参数\n 'S' Seconds\n 'M' Minutes\n 'H' Hours\n 'D' Days\n 'W0'-'W6' Weekday (0=Monday)\n 'midnight' Roll over at midnight\n :param filename:\n :param when:\n :param interval:\n :param backup_count:\n :param delay:\n :param level:\n :param format:\n :param datefmt:\n \"\"\"\n super(TimedRotatingFileHandler, self).__init__(level, format, datefmt)\n self.filename = filename\n self.handler_class = 'logging.handlers.TimedRotatingFileHandler'\n self.when = when\n self.interval = interval\n self.delay = delay\n self.backup_count = backup_count\n\n def get_handler(self):\n handler = super(TimedRotatingFileHandler, self).get_handler()\n new_params = {\n 'filename': self.filename,\n 'delay': self.delay,\n 'when': self.when,\n 'interval': self.interval,\n 'backupCount': self.backup_count\n }\n\n handler[self.get_formatter_name()].update(new_params)\n return handler\n\n\ndef init_log(*handlers, **kwargs):\n \"\"\"\n :param handlers:\n :return:\n \"\"\"\n disable_existing_loggers = kwargs.get('disable_existing_loggers', False)\n\n handlers_config = [t.get_handler() for t in handlers]\n new_handlers_config = {}\n for t in handlers_config:\n new_handlers_config.update(t)\n\n formatter_config = [t.get_formatter() for t in handlers]\n new_formatter_config = {}\n for t in formatter_config:\n new_formatter_config.update(t)\n\n handler_name_list = [t.get_formatter_name() for t in handlers]\n dict_config = {\n 'version': 1,\n 'disable_existing_loggers': disable_existing_loggers,\n 'formatters': new_formatter_config,\n 'handlers': new_handlers_config,\n 'loggers': {\n '': {\n 'handlers': handler_name_list,\n 'level': 'DEBUG',\n }\n }\n }\n\n logging.config.dictConfig(dict_config)\n" }, { "alpha_fraction": 0.5018450021743774, "alphanum_fraction": 0.5633456110954285, "avg_line_length": 25.225807189941406, "blob_id": "1d3472315a954583c0608aa9b10d41ad37998c2c", "content_id": "e6aeaac2d116b2f413c36179e4e73ab130fb4f74", "detected_licenses": [ "MIT" ], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 821, "license_type": "permissive", "max_line_length": 85, "num_lines": 31, "path": "/tests/test_json.py", "repo_name": "restran/mountains", "src_encoding": "UTF-8", "text": "# -*- coding: utf-8 -*-\n# created by restran on 2018/01/22\nfrom __future__ import unicode_literals, absolute_import\nfrom mountains import json\nimport unittest\nimport uuid\nfrom datetime import datetime\n\n\nclass Test(unittest.TestCase):\n def setUp(self):\n pass\n\n def test_json_loads(self):\n a = json.loads('{\"a\": 123, \"b\": \"456\", \"c\": \"中文\"}')\n self.assertEqual(a['a'], 123)\n self.assertEqual(a['b'], '456')\n self.assertEqual(a['c'], '中文')\n\n def test_json_dumps(self):\n data = {\n 'a': uuid.uuid4(),\n 'b': datetime(year=2016, month=10, day=30, hour=12, minute=30, second=30)\n }\n a = json.dumps(data)\n a = json.loads(a)\n self.assertEqual(a['b'], '2016-10-30 12:30:30')\n\n\nif __name__ == '__main__':\n unittest.main()\n" }, { "alpha_fraction": 0.5317646861076355, "alphanum_fraction": 0.5490196347236633, "avg_line_length": 20.428571701049805, "blob_id": "c969a966b6b42f8cdfccd895eb5f543bc527ffc6", "content_id": "b1ae833712798fbc34f9c38dec0b7e3d3b99a9f3", "detected_licenses": [ "MIT" ], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 2566, "license_type": "permissive", "max_line_length": 68, "num_lines": 119, "path": "/mountains/encoding/__init__.py", "repo_name": "restran/mountains", "src_encoding": "UTF-8", "text": "# -*- coding: utf-8 -*-\n# Created by restran on 2017/8/23\nfrom __future__ import unicode_literals, absolute_import\nimport re\nfrom ..base import text_type, string_types\nimport string\nfrom base64 import b64decode, b32decode\n\n_UTF8_TYPES = (bytes, type(None))\n\n_TO_UNICODE_TYPES = (text_type, type(None))\n\n\ndef utf8(value, encoding=None):\n \"\"\"Converts a string argument to a byte string.\n \"\"\"\n if isinstance(value, _UTF8_TYPES):\n return value\n if not isinstance(value, text_type):\n raise TypeError(\n \"Expected bytes, unicode, or None; got %r\" % type(value)\n )\n\n if encoding is not None:\n return value.encode(encoding)\n\n try:\n return value.encode('utf-8')\n except:\n return value.encode('gbk')\n\n\ndef to_unicode(value, encoding=None):\n \"\"\"Converts a string argument to a unicode string.\n \"\"\"\n if isinstance(value, text_type):\n return value\n elif isinstance(value, type(None)):\n return text_type(value)\n\n if not isinstance(value, bytes):\n raise TypeError(\n \"Expected bytes, unicode, or None; got %r\" % type(value)\n )\n\n if encoding is not None:\n return value.decode(encoding)\n else:\n try:\n value = value.decode('utf-8')\n except:\n try:\n value = value.decode('gbk')\n except:\n pass\n\n return value\n\n\ndef force_text(s, encoding=None):\n \"\"\"\n 强制转成 Unicode\n \"\"\"\n\n return to_unicode(s, encoding)\n\n\ndef force_bytes(s, encoding=None):\n \"\"\"\n 强制转成 bytes\n \"\"\"\n\n return utf8(s, encoding)\n\n\ndef is_base64(s, is_printable=True):\n if len(s) % 4 != 0:\n return False\n\n b64rex = re.compile('^[A-Za-z0-9+/]+[=]{0,2}$', re.MULTILINE)\n if not b64rex.match(s):\n return False\n\n if is_printable:\n try:\n a = b64decode(s.encode()).decode()\n for c in a:\n if c not in string.printable:\n return False\n except:\n return False\n return True\n\n\ndef is_base32(s, is_printable=True):\n if len(s) % 8 != 0:\n return False\n\n rex = re.compile('^[A-Z2-7]+[=]{0,7}$', re.MULTILINE)\n if not rex.match(s):\n return False\n\n if is_printable:\n try:\n a = b32decode(s.encode()).decode()\n for c in a:\n if c not in string.printable:\n return False\n except:\n return False\n return True\n\n\ndef main():\n pass\n\n\nif __name__ == '__main__':\n main()\n" }, { "alpha_fraction": 0.5414238572120667, "alphanum_fraction": 0.5459842681884766, "avg_line_length": 22.355030059814453, "blob_id": "8daa1a4f4b0febb3c82fc5fa6ccff07846c82da9", "content_id": "a48c219fde4023c589bf844ace8f4d76ec10e149", "detected_licenses": [ "MIT" ], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 4221, "license_type": "permissive", "max_line_length": 70, "num_lines": 169, "path": "/mountains/file/__init__.py", "repo_name": "restran/mountains", "src_encoding": "UTF-8", "text": "# -*- coding: utf-8 -*-\n# Created by restran on 2017/8/23\nfrom __future__ import unicode_literals, absolute_import\n\nimport os\nimport shutil\nfrom collections import deque\n\nfrom .. import json\nfrom ..datetime.converter import timestamp2datetime\nfrom ..encoding import force_text, force_bytes\n\n\ndef read_dict(file_name, clear_none=False, encoding='utf-8'):\n \"\"\"\n 读取字典文件\n :param encoding:\n :param clear_none:\n :param file_name:\n :return:\n \"\"\"\n with open(file_name, 'rb') as f:\n data = f.read()\n\n if encoding is not None:\n data = data.decode(encoding)\n\n line_list = data.splitlines()\n data = []\n i = 0\n for line in line_list:\n i += 1\n try:\n line = force_text(line).strip()\n data.append(line)\n except:\n print('read error line %s' % i)\n if clear_none:\n data = [t for t in data if t != '']\n data = deque(data)\n return data\n\n\ndef write_bytes_file(file_name, data):\n with open(file_name, 'wb') as f:\n f.write(force_bytes(data))\n\n\ndef read_bytes_file(file_name):\n with open(file_name, 'rb') as f:\n return f.read()\n\n\ndef write_file(file_name, data):\n \"\"\"\n 写文本文件\n :param file_name:\n :param data:\n :return:\n \"\"\"\n with open(file_name, 'w') as f:\n f.write(data)\n\n\ndef read_file(file_name, encoding='utf-8'):\n \"\"\"\n 读文本文件\n :param encoding:\n :param file_name:\n :return:\n \"\"\"\n with open(file_name, 'rb') as f:\n data = f.read()\n\n if encoding is not None:\n data = data.decode(encoding)\n\n return data\n\n\ndef read_json(file_name):\n try:\n return json.loads(read_bytes_file(file_name))\n except:\n return None\n\n\ndef write_json(file_name, data, indent=None,\n ensure_ascii=True, sort_keys=False, **kwargs):\n with open(file_name, 'wb') as f:\n try:\n data = json.dumps(data, indent=indent,\n ensure_ascii=ensure_ascii,\n sort_keys=sort_keys, **kwargs)\n f.write(force_bytes(data))\n return True\n except:\n return False\n\n\ndef get_file_size(file_path):\n \"\"\"\n 获取文件大小,返回的是字节\n :param file_path:\n :return:\n \"\"\"\n return os.path.getsize(file_path)\n\n\ndef get_file_access_time(file_path):\n \"\"\"\n 获取文件访问时间,返回 datetime 类型\n :param file_path:\n :return:\n \"\"\"\n return timestamp2datetime(os.path.getatime(file_path))\n\n\ndef get_file_create_time(file_path):\n \"\"\"\n 获取文件创建时间,返回 datetime 类型\n :param file_path:\n :return:\n \"\"\"\n return timestamp2datetime(os.path.getctime(file_path))\n\n\ndef get_file_modify_time(file_path):\n \"\"\"\n 获取文件修改时间,返回 datetime 类型\n :param file_path:\n :return:\n \"\"\"\n return timestamp2datetime(os.path.getmtime(file_path))\n\n\ndef copy_files(src_path, dst_path):\n \"\"\"\n :param src_path:\n :param dst_path:\n :return:\n \"\"\"\n abs_src_path = os.path.abspath(src_path)\n abs_dst_path = os.path.abspath(dst_path)\n # 遍历src_path目录下的所有文件\n for root, dirs, files in os.walk(src_path):\n try:\n for f in files:\n src_file_p = os.path.abspath(os.path.join(root, f))\n # 目标文件的完整路径\n dst_file_p = os.path.abspath(os.path.join(\n abs_dst_path, src_file_p[len(abs_src_path) + 1:]))\n # 判断目标文件是否已存在,已存在则改名\n if os.path.exists(dst_file_p):\n continue\n\n try:\n # 判断目标文件所在的文件夹是否存在,不存在则递归创建文件夹\n dst_p = os.path.dirname(dst_file_p)\n if not os.path.exists(dst_p):\n os.makedirs(dst_p)\n\n # 移动文件\n shutil.copy2(src_file_p, dst_file_p)\n except Exception as e:\n print('move file error: {}'.format(e))\n\n except Exception as ex:\n print('error: {}'.format(ex))\n" }, { "alpha_fraction": 0.5979809761047363, "alphanum_fraction": 0.6110451221466064, "avg_line_length": 25.73015785217285, "blob_id": "820f0c8022f19e57f3837ef03f85483d1d6b8314", "content_id": "f0ab8cf70d3cca7c7c25f2f2e71a3d51734aa6fb", "detected_licenses": [ "MIT" ], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 1766, "license_type": "permissive", "max_line_length": 82, "num_lines": 63, "path": "/mountains/django/api.py", "repo_name": "restran/mountains", "src_encoding": "UTF-8", "text": "# -*- coding: utf-8 -*-\n# Created by restran on 2017/10/13\nfrom __future__ import unicode_literals, absolute_import\n\nimport logging\n\nfrom .. import json\n\ntry:\n from django.http import HttpResponse\nexcept:\n class HttpResponse(object):\n def __init__(*args, **kwargs):\n raise Exception('django is not installed')\n\nlogger = logging.getLogger(__name__)\n\n\ndef http_response_json(dict_data):\n \"\"\"\n 返回json数据\n :param dict_data:\n :return:\n \"\"\"\n\n return HttpResponse(json.dumps(dict_data),\n content_type=\"application/json; charset=utf-8\")\n\n\nclass APIStatusCode(object):\n SUCCESS = 200 # 成功\n FAIL = 400 # 客户端的错误, 例如请求信息不正确\n ERROR = 500 # 服务端的错误, 例如出现异常\n LOGIN_REQUIRED = 401 # 需要登录才能访问\n\n\nclass APIHandler(object):\n @classmethod\n def return_json(cls, code, data, msg):\n try:\n return http_response_json({\n 'code': code, 'data': data, 'msg': msg})\n except Exception as e:\n logger.error(e)\n return http_response_json({\n 'code': APIStatusCode.ERROR, 'data': None,\n 'msg': msg})\n\n @classmethod\n def success(cls, data=None, msg='', code=APIStatusCode.SUCCESS):\n return cls.return_json(code, data, msg)\n\n @classmethod\n def fail(cls, data=None, msg='', code=APIStatusCode.FAIL):\n return cls.return_json(code, data, msg)\n\n @classmethod\n def login_required(cls, data=None, msg='', code=APIStatusCode.LOGIN_REQUIRED):\n return cls.return_json(code, data, msg)\n\n @classmethod\n def error(cls, data=None, msg='', code=APIStatusCode.ERROR):\n return cls.return_json(code, data, msg)\n" }, { "alpha_fraction": 0.6355576515197754, "alphanum_fraction": 0.6462902426719666, "avg_line_length": 26.126583099365234, "blob_id": "8d4300749c72427726712057e9c9d17963b92fcc", "content_id": "cd6aa11fae37a84d7792774b5382562ce8defa0c", "detected_licenses": [ "MIT" ], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 2159, "license_type": "permissive", "max_line_length": 72, "num_lines": 79, "path": "/mountains/base.py", "repo_name": "restran/mountains", "src_encoding": "UTF-8", "text": "# -*- coding: utf-8 -*-\n# Created by restran on 2017/8/29\nfrom __future__ import unicode_literals, absolute_import\n\nimport os\nimport sys\nimport types\n\nPY2 = sys.version_info[0] == 2\nPY3 = sys.version_info[0] == 3\nPYPY = True if getattr(sys, 'pypy_version_info', None) else False\n\nif PY3:\n string_types = str,\n integer_types = int,\n class_types = type,\n text_type = str\n binary_type = bytes\n long_type = int\n from queue import Queue\n from io import BytesIO, StringIO\n from urllib.parse import urlencode, quote, \\\n quote_plus, urlparse, urlunparse\n\n MAXSIZE = sys.maxsize\nelse:\n string_types = basestring,\n integer_types = (int, long)\n class_types = (type, types.ClassType)\n text_type = unicode\n binary_type = str\n long_type = long\n from cStringIO import StringIO\n from io import BytesIO\n from urllib import urlencode, quote, quote_plus\n from urlparse import urlparse, urlunparse\n from Queue import Queue\n\n# 当前项目所在路径\n__base_path = os.path.dirname(os.path.abspath(__file__))\n\n\ndef iteritems(obj, **kwargs):\n \"\"\"Use this only if compatibility with Python versions before 2.7 is\n required. Otherwise, prefer viewitems().\n \"\"\"\n func = getattr(obj, \"iteritems\", None)\n if not func:\n func = obj.items\n return func(**kwargs)\n\n\ndef iterkeys(obj, **kwargs):\n \"\"\"Use this only if compatibility with Python versions before 2.7 is\n required. Otherwise, prefer viewkeys().\n \"\"\"\n func = getattr(obj, \"iterkeys\", None)\n if not func:\n func = obj.keys\n return func(**kwargs)\n\n\ndef itervalues(obj, **kwargs):\n \"\"\"Use this only if compatibility with Python versions before 2.7 is\n required. Otherwise, prefer viewvalues().\n \"\"\"\n func = getattr(obj, \"itervalues\", None)\n if not func:\n func = obj.values\n return func(**kwargs)\n\n\n__all__ = [\n 'PY2', 'PY3', 'PYPY', 'urlencode', 'quote', 'quote_plus',\n 'urlparse', 'urlunparse', 'StringIO',\n 'string_types', 'integer_types', 'class_types', 'text_type',\n 'binary_type', 'long_type', 'BytesIO', '__base_path', 'Queue',\n 'iteritems', 'iterkeys', 'itervalues'\n]\n" }, { "alpha_fraction": 0.6066433787345886, "alphanum_fraction": 0.632867157459259, "avg_line_length": 34.75, "blob_id": "7ae678f54c914e77652df6e881fd0200f0a2f668", "content_id": "2814eb86f389974bc85d74368fa892bd608eea4b", "detected_licenses": [ "MIT" ], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 572, "license_type": "permissive", "max_line_length": 76, "num_lines": 16, "path": "/mountains/__init__.py", "repo_name": "restran/mountains", "src_encoding": "UTF-8", "text": "# -*- coding: utf-8 -*-\n# Created by restran on 2017/7/26\nfrom __future__ import unicode_literals, absolute_import\n\nfrom .base import PY2, PY3, PYPY, string_types, integer_types, \\\n class_types, text_type, binary_type, long_type, BytesIO, StringIO\nfrom .encoding import force_text, force_bytes\n\n__author__ = \"restran <[email protected]>\"\n__version__ = \"0.8.7\"\n\n__all__ = [\n '__author__', '__version__', 'PY2', 'PY3', 'PYPY',\n 'string_types', 'integer_types', 'class_types', 'text_type', 'StringIO',\n 'binary_type', 'long_type', 'BytesIO', 'force_text', 'force_bytes',\n]\n" }, { "alpha_fraction": 0.6120996475219727, "alphanum_fraction": 0.6316726207733154, "avg_line_length": 20.615385055541992, "blob_id": "0f2f9bfbb949bfb63da9410fdbc82e001f0c82e0", "content_id": "ae5fe1a41f5e578d81874874c8352f854ff1e265", "detected_licenses": [ "MIT" ], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 562, "license_type": "permissive", "max_line_length": 56, "num_lines": 26, "path": "/tests/test_file.py", "repo_name": "restran/mountains", "src_encoding": "UTF-8", "text": "# -*- coding: utf-8 -*-\n# Created by restran on 2018/11/13\nfrom __future__ import unicode_literals, absolute_import\nfrom mountains import file\nimport unittest\nimport uuid\nfrom datetime import datetime\n\n\nclass Test(unittest.TestCase):\n def setUp(self):\n pass\n\n # def test_json_loads(self):\n # file.read_json('data/test.json')\n\n def test_json_dumps(self):\n pass\n\n # def test_file_size(self):\n # size = file.get_file_size('data/test.txt')\n # self.assertEqual(size, 57)\n\n\nif __name__ == '__main__':\n unittest.main()\n" }, { "alpha_fraction": 0.4666211009025574, "alphanum_fraction": 0.5299612879753113, "avg_line_length": 24.36994171142578, "blob_id": "ad957af0dedc4ec5859161e44b58e3c18707483f", "content_id": "c39fe18287677f7f48e1ddb918c69cd84be6515a", "detected_licenses": [ "MIT" ], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 4533, "license_type": "permissive", "max_line_length": 85, "num_lines": 173, "path": "/mountains/gis/evil_transform.py", "repo_name": "restran/mountains", "src_encoding": "UTF-8", "text": "# -*- coding: utf-8 -*-\n# created by restran on 2019/04/08\nfrom __future__ import unicode_literals, absolute_import\n\n\"\"\"\nOriginal from https://github.com/googollee/eviltransform\n地球坐标(WGS-84)\n火星坐标(GCJ-2)\nGCJ-02坐标用在谷歌地图,高德地图、腾讯地图等中国地图服务。\n百度地图要在GCJ-02基础上再加转换\n\"\"\"\n\nimport math\n\n__all__ = ['wgs2gcj', 'gcj2wgs', 'gcj2wgs_exact',\n 'distance', 'gcj2bd', 'bd2gcj', 'wgs2bd', 'bd2wgs']\n\nearth_r = 6378137.0\n\n\ndef out_of_china(lat, lng):\n return not (72.004 <= lng <= 137.8347 and 0.8293 <= lat <= 55.8271)\n\n\ndef transform(x, y):\n xy = x * y\n abs_x = math.sqrt(abs(x))\n x_pi = x * math.pi\n y_pi = y * math.pi\n d = 20.0 * math.sin(6.0 * x_pi) + 20.0 * math.sin(2.0 * x_pi)\n\n lat = d\n lng = d\n\n lat += 20.0 * math.sin(y_pi) + 40.0 * math.sin(y_pi / 3.0)\n lng += 20.0 * math.sin(x_pi) + 40.0 * math.sin(x_pi / 3.0)\n\n lat += 160.0 * math.sin(y_pi / 12.0) + 320 * math.sin(y_pi / 30.0)\n lng += 150.0 * math.sin(x_pi / 12.0) + 300.0 * math.sin(x_pi / 30.0)\n\n lat *= 2.0 / 3.0\n lng *= 2.0 / 3.0\n\n lat += -100.0 + 2.0 * x + 3.0 * y + 0.2 * y * y + 0.1 * xy + 0.2 * abs_x\n lng += 300.0 + x + 2.0 * y + 0.1 * x * x + 0.1 * xy + 0.1 * abs_x\n\n return lat, lng\n\n\ndef delta(lat, lng):\n ee = 0.00669342162296594323\n d_lat, d_lng = transform(lng - 105.0, lat - 35.0)\n rad_lat = lat / 180.0 * math.pi\n magic = math.sin(rad_lat)\n magic = 1 - ee * magic * magic\n sqrt_magic = math.sqrt(magic)\n d_lat = (d_lat * 180.0) / ((earth_r * (1 - ee)) / (magic * sqrt_magic) * math.pi)\n d_lng = (d_lng * 180.0) / (earth_r / sqrt_magic * math.cos(rad_lat) * math.pi)\n return d_lat, d_lng\n\n\ndef wgs2gcj(wgs_lat, wgs_lng):\n if out_of_china(wgs_lat, wgs_lng):\n return wgs_lat, wgs_lng\n else:\n d_lat, d_lng = delta(wgs_lat, wgs_lng)\n return wgs_lat + d_lat, wgs_lng + d_lng\n\n\ndef gcj2wgs(gcj_lat, gcj_lng):\n if out_of_china(gcj_lat, gcj_lng):\n return gcj_lat, gcj_lng\n else:\n d_lat, d_lng = delta(gcj_lat, gcj_lng)\n return gcj_lat - d_lat, gcj_lng - d_lng\n\n\ndef gcj2wgs_exact(gcj_lat, gcj_lng):\n init_delta = 0.01\n threshold = 0.000001\n d_lat = d_lng = init_delta\n m_lat = gcj_lat - d_lat\n m_lng = gcj_lng - d_lng\n p_lat = gcj_lat + d_lat\n p_lng = gcj_lng + d_lng\n for i in range(30):\n wgs_lat = (m_lat + p_lat) / 2\n wgs_lng = (m_lng + p_lng) / 2\n tmp_lat, tmp_lng = wgs2gcj(wgs_lat, wgs_lng)\n d_lat = tmp_lat - gcj_lat\n d_lng = tmp_lng - gcj_lng\n if abs(d_lat) < threshold and abs(d_lng) < threshold:\n return wgs_lat, wgs_lng\n if d_lat > 0:\n p_lat = wgs_lat\n else:\n m_lat = wgs_lat\n if d_lng > 0:\n p_lng = wgs_lng\n else:\n m_lng = wgs_lng\n return wgs_lat, wgs_lng\n\n\ndef distance(lat_a, lng_a, lat_b, lng_b):\n \"\"\"\n 计算两个经纬度之间的距离\n :param lat_a:\n :param lng_a:\n :param lat_b:\n :param lng_b:\n :return:\n \"\"\"\n pi180 = math.pi / 180\n arc_lat_a = lat_a * pi180\n arc_lat_b = lat_b * pi180\n x = (math.cos(arc_lat_a) * math.cos(arc_lat_b) *\n math.cos((lng_a - lng_b) * pi180))\n y = math.sin(arc_lat_a) * math.sin(arc_lat_b)\n s = x + y\n if s > 1:\n s = 1\n if s < -1:\n s = -1\n alpha = math.acos(s)\n d = alpha * earth_r\n return d\n\n\ndef gcj2bd(gcj_lat, gcj_lng):\n \"\"\"\n GCJ-2转百度\n :param gcj_lat:\n :param gcj_lng:\n :return:\n \"\"\"\n if out_of_china(gcj_lat, gcj_lng):\n return gcj_lat, gcj_lng\n\n x = gcj_lng\n y = gcj_lat\n z = math.hypot(x, y) + 0.00002 * math.sin(y * math.pi)\n theta = math.atan2(y, x) + 0.000003 * math.cos(x * math.pi)\n bd_lng = z * math.cos(theta) + 0.0065\n bd_lat = z * math.sin(theta) + 0.006\n return bd_lat, bd_lng\n\n\ndef bd2gcj(bd_lat, bd_lng):\n \"\"\"\n 百度转GCJ-2\n :param bd_lat:\n :param bd_lng:\n :return:\n \"\"\"\n if out_of_china(bd_lat, bd_lng):\n return bd_lat, bd_lng\n\n x = bd_lng - 0.0065\n y = bd_lat - 0.006\n z = math.hypot(x, y) - 0.00002 * math.sin(y * math.pi)\n theta = math.atan2(y, x) - 0.000003 * math.cos(x * math.pi)\n gcj_lng = z * math.cos(theta)\n gcj_lat = z * math.sin(theta)\n return gcj_lat, gcj_lng\n\n\ndef wgs2bd(wgs_lat, wgs_lng):\n return gcj2bd(*wgs2gcj(wgs_lat, wgs_lng))\n\n\ndef bd2wgs(bd_lat, bd_lng):\n return gcj2wgs(*bd2gcj(bd_lat, bd_lng))\n" }, { "alpha_fraction": 0.5761024355888367, "alphanum_fraction": 0.6187766790390015, "avg_line_length": 24.10714340209961, "blob_id": "ba0248d322e6f6296038baa0d7efc77177389279", "content_id": "800dad1ed55cb9e5b04a6a0bf16b4c57cd53c5f1", "detected_licenses": [ "MIT" ], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 703, "license_type": "permissive", "max_line_length": 61, "num_lines": 28, "path": "/tests/test_utils.py", "repo_name": "restran/mountains", "src_encoding": "UTF-8", "text": "# -*- coding: utf-8 -*-\n# Created by restran on 2017/10/13\nfrom __future__ import unicode_literals, absolute_import\n\nimport logging\nimport unittest\n\nfrom mountains.utils import any_in, any_none\n\nlogger = logging.getLogger(__name__)\n\n\nclass UtilsTest(unittest.TestCase):\n def setUp(self):\n pass\n\n def test_any_none(self):\n self.assertEqual(any_none(1, 2, 3, None), True)\n self.assertEqual(any_none(1, 2, 3), False)\n\n def test_any_in(self):\n self.assertEqual(any_in('123abc123', 'a', 'b'), True)\n self.assertEqual(any_in('123ab123', 'a', 'c'), True)\n self.assertEqual(any_in('123', 'a', 'b', 'c'), False)\n\n\nif __name__ == '__main__':\n unittest.main()\n" }, { "alpha_fraction": 0.6238806247711182, "alphanum_fraction": 0.6537313461303711, "avg_line_length": 26.91666603088379, "blob_id": "08ee1773f89f0c51bb5c86160c4bd82db439eaef", "content_id": "d7faa529aa62a5509b93fa453b5f78a8ff8bac58", "detected_licenses": [ "MIT" ], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 335, "license_type": "permissive", "max_line_length": 56, "num_lines": 12, "path": "/mountains/utils/converter.py", "repo_name": "restran/mountains", "src_encoding": "UTF-8", "text": "# -*- coding: utf-8 -*-\n# created by restran on 2018/01/22\nfrom __future__ import unicode_literals, absolute_import\n\n\ndef str2int(number_str, default_value=None):\n if number_str is None or number_str == '':\n return default_value\n try:\n return int(number_str)\n except Exception as e:\n return default_value\n" }, { "alpha_fraction": 0.521558403968811, "alphanum_fraction": 0.5257156491279602, "avg_line_length": 30.181482315063477, "blob_id": "57d064b7af640d693233b156f90d5b3815e8ecf3", "content_id": "018f663e707c55ff6322a1176b2aca92e5266a81", "detected_licenses": [ "MIT" ], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 8865, "license_type": "permissive", "max_line_length": 92, "num_lines": 270, "path": "/mountains/ssh/__init__.py", "repo_name": "restran/mountains", "src_encoding": "UTF-8", "text": "# -*- coding: utf-8 -*-\n# Created by restran on 2018/6/5\nfrom __future__ import unicode_literals, absolute_import\n\nimport logging\nimport time\nfrom ..encoding import force_text\n\ntry:\n import paramiko\nexcept ImportError:\n raise Exception('paramiko is not installed')\n\nlogger = logging.getLogger(__name__)\n\n\nclass SSHClient(object):\n def __init__(self, host, port, username, password=None, key_file=None,\n key_pass=None, show_output=False, manual_connect=False, timeout=10):\n \"\"\"\n\n :param host:\n :param port:\n :param username:\n :param password:\n :param key_file:\n :param key_pass:\n :param show_output: 是否显示命令的执行结果\n :param manual_connect:\n :param timeout:\n \"\"\"\n self.is_root = False\n self.host = host\n self.port = int(port)\n self.username = username\n self.password = password\n self.ssh_session = paramiko.SSHClient()\n self.ssh_session.load_system_host_keys()\n self.key_file = key_file\n self.key_pass = key_pass\n self.ssh_session.set_missing_host_key_policy(paramiko.AutoAddPolicy())\n self.private_key = None\n self.sftp = None\n self.show_output = show_output\n self.timeout = timeout\n if not manual_connect:\n use_key = self.key_file is not None\n self.ssh_connect(password, use_key)\n\n def ssh_connect(self, password=None, use_key=False):\n if not use_key:\n if password is None:\n password = self.password\n\n self.ssh_session.connect(hostname=self.host, port=self.port,\n username=self.username, password=password,\n look_for_keys=False, timeout=self.timeout)\n else:\n self.private_key = paramiko.RSAKey.from_private_key_file(\n self.key_file, self.key_pass)\n self.ssh_session.connect(hostname=self.host, port=self.port,\n username=self.username, pkey=self.private_key,\n timeout=self.timeout)\n\n def get_sftp(self, refresh=False):\n if self.sftp is None or refresh:\n self.sftp = paramiko.SFTPClient.from_transport(self.ssh_session.get_transport())\n\n return self.sftp\n\n def dispose(self):\n try:\n if self.sftp is not None:\n self.sftp.close()\n except:\n pass\n\n try:\n if self.ssh_session is not None:\n self.ssh_session.close()\n except:\n pass\n\n @classmethod\n def clean_ssh_line_output(cls, line):\n line = line.strip()\n split_list = [t for t in line.split(' ') if t != '']\n return split_list\n\n def run(self, cmd):\n stdin, stdout, stderr = self.ssh_session.exec_command(cmd)\n # stdin这个是输入的命令,stdout这个是命令的正确返回,stderr这个是命令的错误返回\n out = stdout.readlines()\n err = stderr.readlines()\n result = []\n if isinstance(out, list):\n result.extend(out)\n if isinstance(err, list):\n result.extend(err)\n\n if self.show_output:\n logger.info(''.join(result))\n else:\n logger.debug(''.join(result))\n return ''.join(result).strip()\n\n def interactive_run(self, cmd):\n stdin, stdout, stderr = self.ssh_session.exec_command(cmd)\n return stdin, stdout, stderr\n\n def run_expect_command(self, cmd, expect_end=None, timeout=3, wait_seconds=2):\n \"\"\"\n 执行 shell 命令并获取返回结果\n :param timeout:\n :param wait_seconds:\n :param cmd:\n :param expect_end:\n :return:\n \"\"\"\n shell = self.ssh_session.invoke_shell()\n last_time = int(time.time())\n\n if not cmd.endswith('\\n'):\n cmd += '\\n'\n\n def receive():\n buff = ''\n if expect_end is None:\n buff = shell.recv(9999)\n else:\n while not buff.endswith(expect_end):\n resp = shell.recv(9999)\n buff += force_text(resp)\n now = int(time.time())\n\n if now - last_time > timeout:\n break\n\n buff = force_text(buff)\n if self.show_output:\n logger.info(buff)\n return buff\n if self.show_output:\n logger.info(cmd)\n shell.send(cmd)\n time.sleep(wait_seconds)\n return receive()\n\n def run_nohup(self, cmd, working_dir=None):\n \"\"\"\n 使用重定向输出,正常输出和错误信息都不显示,不会创建 nuhup.out,>/dev/null 2>&1\n :param cmd:\n :param working_dir: 当前的工作目录,如果没有 home 目录,会因为一些原因导致运行失败,比如没有无法创建 nohup.out\n :return:\n \"\"\"\n cmd = 'nohup %s >/dev/null 2>&1 &\\n\\n' % cmd\n if working_dir is not None:\n cmd = 'cd {}; {}'.format(working_dir, cmd)\n\n self.run_expect_command(cmd)\n\n def check_root(self):\n result = self.run('id')\n return \"uid=0\" in result, result\n\n def check_sudo(self):\n \"\"\"\n TODO 有问题,应该检查用户组\n :return:\n \"\"\"\n stdin, stdout, stderr = self.ssh_session.exec_command('sudo whoami')\n stdin.write(\"%s\\n\" % self.password)\n stdin.write(\"\\n\\n\\n\\n\\n\\n\\n\\n\")\n stdout.read()\n error_message = stderr.read()[:-1]\n if b\"not in the sudoers file\" in error_message:\n logger.info('当前用户不在 sudo 组')\n return False\n else:\n logger.info(error_message)\n return True\n\n def write_ssh_key(self, pub_key):\n logger.info('写 SSH 公钥')\n try:\n id_rsa = open(pub_key, 'r').read().rstrip('\\n')\n self.run(\"mkdir -p ~/.ssh\")\n time.sleep(1)\n self.run(\"chmod 700 ~/.ssh\")\n cmd = \"\"\"echo \"{}\" >> ~/.ssh/authorized_keys\"\"\".format(id_rsa)\n self.run(cmd)\n cmd = \"chmod 600 ~/.ssh/authorized_keys\"\n self.run(cmd)\n return True\n except Exception as e:\n logger.error(e)\n return False\n\n def change_password(self, new_password):\n \"\"\"\n 一句话修改密码,但是只有 root 才有权限 echo username:new_password | chpasswd\n :param new_password:\n :return:\n \"\"\"\n is_root = self.check_root()\n if is_root[0]:\n self.is_root = True\n logger.warning(\"[+] Root user detected!\")\n else:\n self.is_root = False\n logger.warning(\"[+] Not a root user! (%s)\" % is_root[1])\n\n stdin, stdout, stderr = self.interactive_run('passwd')\n stdin.write(\"%s\\n\" % self.password)\n stdin.write(\"%s\\n\" % new_password)\n stdin.write(\"%s\\n\" % new_password)\n # 通过不停的回车,跳过密码输入错误的重试,避免卡在 stdout.read()\n stdin.write(\"\\n\\n\\n\\n\\n\\n\\n\\n\\n\\n\")\n stdout.read()\n error_message = stderr.read()[:-1]\n if b'success' in error_message:\n self.password = new_password\n logger.info('密码修改成功')\n return True\n elif b'unchanged' in error_message:\n logger.info('密码未修改')\n self.password = new_password\n logger.info(error_message)\n return True\n elif b'choose a longer password' in error_message:\n logger.info('密码长度不符合要求')\n return False\n else:\n logger.info(error_message)\n logger.info('密码修改失败')\n return False\n\n def put(self, local_file, remote_file):\n \"\"\"\n 上传文件\n :param local_file:\n :param remote_file:\n :return:\n \"\"\"\n sftp = self.get_sftp()\n try:\n sftp.put(local_file, remote_file)\n return True\n except Exception as e:\n logger.error('上传文件失败')\n logger.error('remote: %s, local: %s' % (remote_file, local_file))\n logger.error(e)\n return False\n\n def get(self, remote_file, local_file):\n \"\"\"\n 下载文件\n :param remote_file:\n :param local_file:\n :return:\n \"\"\"\n sftp = self.get_sftp()\n try:\n sftp.get(remote_file, local_file)\n return True\n except Exception as e:\n logger.error('下载文件失败')\n logger.error('remote: %s, local: %s' % (remote_file, local_file))\n logger.error(e)\n return False\n" }, { "alpha_fraction": 0.6284796595573425, "alphanum_fraction": 0.6429336071014404, "avg_line_length": 30.6610164642334, "blob_id": "69bd76147f0b200f4de9142c09bd151859f85157", "content_id": "6b8e5fe9ebd5c99b1199b6203b4d5290f4d6de27", "detected_licenses": [ "MIT" ], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 2016, "license_type": "permissive", "max_line_length": 88, "num_lines": 59, "path": "/setup.py", "repo_name": "restran/mountains", "src_encoding": "UTF-8", "text": "# -*- coding: utf-8 -*-\n# Created by restran on 2017/7/27\nfrom __future__ import unicode_literals\n\nimport sys\n\nfrom future.utils import bytes_to_native_str\nfrom setuptools import setup, find_packages\n\nfrom mountains import __version__\n\nkwargs = {\n 'packages': find_packages(),\n # 还需要创建一个 MANIFEST.in 的文件,然后将这些数据也放在那里\n # package_data 添加了配置,Python2会报错\n 'package_data': {}\n}\n\ninstall_requires = [\n 'requests',\n 'simplejson'\n]\n\nif sys.version_info < (3, 0):\n install_requires.append('futures')\n\nkwargs['install_requires'] = install_requires\nreadme_file = 'README.md'\nlong_description = open(readme_file, 'rb').read()\n\nsetup(\n name='mountains', # 文件名\n version=__version__, # 版本(每次更新上传 pypi 需要修改)\n description=\"a util collection for python developing\",\n long_description=bytes_to_native_str(long_description), # 放README.md文件,方便在 pypi 页展示\n long_description_content_type='text/markdown',\n classifiers=[\n 'Programming Language :: Python :: 2',\n 'Programming Language :: Python :: 2.7',\n 'Programming Language :: Python :: 3',\n 'Programming Language :: Python :: 3.4',\n 'Programming Language :: Python :: 3.5',\n 'Programming Language :: Python :: 3.6',\n 'Programming Language :: Python :: 3.7',\n 'Programming Language :: Python :: 3.8',\n 'Programming Language :: Python :: 3.9',\n 'Programming Language :: Python :: Implementation :: CPython',\n 'Programming Language :: Python :: Implementation :: PyPy',\n ], # Get strings from http://pypi.python.org/pypi?:action=list_classifiers\n keywords='python utils', # 关键字\n author='restran', # 用户名\n author_email='[email protected]', # 邮箱\n url='https://github.com/restran/mountains', # github上的地址\n license='MIT', # 遵循的协议\n include_package_data=True,\n zip_safe=True,\n platforms='any',\n **kwargs\n)\n" }, { "alpha_fraction": 0.584330141544342, "alphanum_fraction": 0.5946969985961914, "avg_line_length": 33.83333206176758, "blob_id": "f9df58646b11bb2fbfded887515a461ef3eb6cfe", "content_id": "2f27c5d5a66ff6e37006ba1b0268e8616d1183e9", "detected_licenses": [ "MIT" ], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 5516, "license_type": "permissive", "max_line_length": 122, "num_lines": 144, "path": "/mountains/utils/email.py", "repo_name": "restran/mountains", "src_encoding": "UTF-8", "text": "# -*- coding: utf-8 -*-\n# created by restran on 2016/07/02\nfrom __future__ import unicode_literals, absolute_import, print_function\n\nimport logging\nimport os\nimport smtplib\nfrom email.mime.multipart import MIMEMultipart\nfrom email.mime.text import MIMEText\nfrom email.utils import formatdate\nfrom email.header import make_header\n\nfrom .. import BytesIO, PY2\n\nlogger = logging.getLogger(__name__)\n\n\nclass EmailHandler(object):\n def __init__(self, mail_from, password, smtp_server, smtp_port=25):\n \"\"\"\n :param mail_from: 发件人\n :param password: 发件人密码\n :param smtp_server: SMTP服务器地址\n :param smtp_port: SMTP服务器端口,SSL 方式是 465\n :return:\n \"\"\"\n\n self.mail_from = mail_from\n self.password = password\n self.smtp_server = smtp_server\n self.smtp_port = smtp_port\n\n def send_mail_ssl(self, mail_to_list, subject, content, file_name_list):\n self.do_send_mail(True, mail_to_list, subject, content, file_name_list)\n\n def send_mail(self, mail_to_list, subject, content, file_name_list):\n self.do_send_mail(False, mail_to_list, subject, content, file_name_list)\n\n def do_send_mail(self, is_ssl, mail_to_list, subject, content, file_name_list):\n \"\"\"\n 发送邮件\n :param is_ssl: 使用SSL的方式发生\n :param mail_to_list: 收件人列表\n :param subject: 邮件主题\n :param content: 邮件正文\n :param file_name_list: 附近的文件路径列表\n :return:\n \"\"\"\n if is_ssl:\n smtp = smtplib.SMTP_SSL(self.smtp_server, self.smtp_port)\n else:\n smtp = smtplib.SMTP(self.smtp_server, self.smtp_port)\n smtp.ehlo(name='foxmail')\n # 调用login时,如果没有调用过 echlo 会自动调用该方法,但是默认使用的name为计算机名\n # 如果计算机名有中文,就会返回503方法未实现的异常\n smtp.login(self.mail_from, self.password)\n msg = MIMEMultipart()\n msg['From'] = self.mail_from\n msg['To'] = ', '.join(mail_to_list)\n msg['Date'] = formatdate(localtime=True)\n msg['Subject'] = subject\n # 如果 content 是 html,则需要设置 _subtype='html'\n # 默认情况下 _subtype='plain',即纯文本\n msg.attach(MIMEText(content, _charset='UTF-8'))\n for fn in file_name_list:\n part = MIMEText(open(fn, 'rb').read(), 'base64', 'UTF-8')\n part[\"Content-Type\"] = 'application/octet-stream'\n basename = os.path.basename(fn)\n # 解决一些邮箱在手机客户端上,附件文件名丢失的问题\n part[\"Content-Type\"] = 'application/octet-stream;name=\"%s\"'% make_header([(basename,'UTF-8')]).encode('UTF-8')\n part[\"Content-Disposition\"] = 'attachment;filename=\"%s\"' % make_header([(basename, 'UTF-8')]).encode('UTF-8')\n msg.attach(part)\n smtp.sendmail(self.mail_from, mail_to_list, msg.as_string())\n smtp.close()\n\n\nclass PostfixEmailHandler(object):\n def __init__(self, mail_from, smtp_server='localhost', smtp_port=25):\n self.mail_from = mail_from\n self.smtp_server = smtp_server\n self.smtp_port = smtp_port\n\n def send_email(self, mail_to, subject, content, content_type='plain', files=None):\n \"\"\"\n :param content_type: 如果 text 是html,则需要设置 _subtype='html'\n :param mail_to:\n :param subject:\n :param content:\n :param files: (f_name, f_data)\n :return:\n \"\"\"\n assert type(mail_to) == list\n server = self.smtp_server\n if files is None:\n files = []\n\n msg = MIMEMultipart()\n msg['From'] = self.mail_from\n msg['To'] = ', '.join(mail_to)\n msg['Date'] = formatdate(localtime=True)\n msg['Subject'] = subject\n # 如果 text 是html,则需要设置 _subtype='html'\n # 默认情况下 _subtype='plain',即纯文本\n msg.attach(MIMEText(content, _subtype=content_type, _charset='utf-8'))\n\n for fn, fd in files:\n part = MIMEText(fd, 'base64', 'utf-8')\n part[\"Content-Type\"] = 'application/octet-stream'\n basename = fn\n if PY2:\n basename = basename.encode('gb2312')\n # 文件名使用 gb2312 编码,否则会没有附件\n part.add_header('Content-Disposition', 'attachment', filename=('gb2312', '', basename))\n msg.attach(part)\n smtp = smtplib.SMTP(server, port=self.smtp_port)\n smtp.sendmail(self.mail_from, mail_to, msg.as_string())\n smtp.close()\n\n\ndef main():\n handler = EmailHandler('[email protected]', 'password', 'smtp.example.com', 465)\n mail_to_list = ['[email protected]']\n subject = 'Python 发送邮件测试'\n content = '这是用 Python 自动发送的邮件,请勿回复'\n # 附件存放在当前文件夹\n file_name_list = ['test.rar']\n handler.send_mail_ssl(mail_to_list, subject, content, file_name_list)\n print('邮件发送成功')\n\n mail_to_list = ['[email protected]']\n subject = 'Python 发送邮件测试'\n content = '这是用 Python 自动发送的邮件,请勿回复'\n bio = BytesIO()\n bio.write(b'123')\n bio.getvalue()\n files = [\n ('test中文3.txt', bio.getvalue()),\n ]\n handler = PostfixEmailHandler('[email protected]')\n handler.send_email(mail_to_list, subject, content, files=files)\n\n\nif __name__ == '__main__':\n main()\n" }, { "alpha_fraction": 0.5466509461402893, "alphanum_fraction": 0.5586329102516174, "avg_line_length": 25.10769271850586, "blob_id": "7726096cdafba8b878f1328098b8d1bd1d632147", "content_id": "983b9b78974213f54c3392798f9362663ca3d077", "detected_licenses": [ "MIT" ], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 5379, "license_type": "permissive", "max_line_length": 136, "num_lines": 195, "path": "/mountains/http/__init__.py", "repo_name": "restran/mountains", "src_encoding": "UTF-8", "text": "# -*- coding: utf-8 -*-\n# Created by restran on 2017/8/23\nfrom __future__ import unicode_literals, absolute_import\n\nimport os\nimport random\n\nimport requests\n\nfrom ..base import __base_path\nfrom ..file import read_dict\nfrom ..encoding import force_bytes\n\nDEFAULT_USER_AGENT = 'Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/57.0.2983.0 Safari/537.36'\n\nGLOBAL_USER_AGENTS = {}\n\nUSER_AGENT_DATA_PATH = os.path.join(__base_path, 'http/data/user_agents.txt')\nMOBILE_USER_AGENT_DATA_PATH = os.path.join(__base_path, 'http/data/mobile_ua.txt')\n\n\ndef random_agent(agent_type='pc'):\n \"\"\"\n 随机获取一个 User-Agent\n :return:\n \"\"\"\n agent_type = agent_type.lower().strip()\n if agent_type in ('wexin', 'wx'):\n agent_type = 'wechat'\n\n if agent_type in ('ios',):\n agent_type = 'iphone'\n\n global GLOBAL_USER_AGENTS\n if agent_type not in GLOBAL_USER_AGENTS:\n if agent_type == 'pc':\n GLOBAL_USER_AGENTS[agent_type] = read_dict(USER_AGENT_DATA_PATH)\n elif agent_type in ('mobile', 'wechat', 'android', 'iphone', 'alipay'):\n if 'mobile' not in GLOBAL_USER_AGENTS:\n GLOBAL_USER_AGENTS['mobile'] = list(read_dict(MOBILE_USER_AGENT_DATA_PATH))\n mobile_data = GLOBAL_USER_AGENTS['mobile']\n\n if agent_type == 'wechat':\n GLOBAL_USER_AGENTS[agent_type] = [t for t in mobile_data if 'MicroMessenger' in t]\n elif agent_type == 'alipay':\n GLOBAL_USER_AGENTS[agent_type] = [t for t in mobile_data if 'Alipay' in t]\n elif agent_type == 'android':\n GLOBAL_USER_AGENTS[agent_type] = [t for t in mobile_data if 'Android' in t]\n elif agent_type == 'iphone':\n GLOBAL_USER_AGENTS[agent_type] = [t for t in mobile_data if 'iPhone' in t]\n else:\n GLOBAL_USER_AGENTS[agent_type] = mobile_data\n else:\n return DEFAULT_USER_AGENT\n\n return random.choice(GLOBAL_USER_AGENTS[agent_type])\n\n\ndef random_wx_agent():\n \"\"\"\n 返回一个微信的UserAgent\n :return:\n \"\"\"\n return random_agent('wechat')\n\n\ndef random_mobile_agent():\n \"\"\"\n 返回一个手机端的UserAgent\n :return:\n \"\"\"\n return random_agent('mobile')\n\n\ndef request(method, url, headers=None, data=None, session=None):\n \"\"\"\n :type session requests.session\n :param method:\n :param url:\n :param headers:\n :param data:\n :param session:\n :return:\n \"\"\"\n base_headers = {\n 'User-Agent': random_agent()\n }\n if headers is None:\n headers = {}\n\n base_headers.update(headers)\n\n if 'Content-Length' in headers:\n del base_headers['Content-Length']\n\n headers = base_headers\n if session is not None:\n req = session.request\n else:\n req = requests.request\n\n r = req(method, url, headers=headers, data=data)\n return r\n\n\ndef read_request(file_name, **params):\n \"\"\"\n 从文件中读取请求头,并根据格式化字符串模板,进行字符串格式化\n :param file_name:\n :param params:\n :return:\n \"\"\"\n with open(file_name, 'r') as f:\n data = f.read()\n return read_request_from_str(data, **params)\n\n\ndef read_request_from_str(data, **params):\n \"\"\"\n 从字符串中读取请求头,并根据格式化字符串模板,进行字符串格式化\n :param data:\n :param params:\n :return:\n \"\"\"\n method, uri = None, None\n headers = {}\n host = ''\n\n try:\n split_list = data.split('\\n\\n')\n headers_text = split_list[0]\n body = '\\n\\n'.join(split_list[1:])\n except:\n headers_text = data\n body = ''\n\n body = force_bytes(body)\n for k, v in params.items():\n body = body.replace(b'{%s}' % force_bytes(k), force_bytes(v))\n\n header_list = headers_text.split('\\n')\n\n for i, line in enumerate(header_list):\n line = line.strip()\n if line.strip() == '':\n continue\n\n line = line.format(**params)\n if i == 0:\n # 至多3个\n split_line = line.strip().split(' ')\n method, uri, _ = split_line[0], ' '.join(split_line[1:-1]), split_line[-1]\n else:\n # 至多2个\n header, value = line.split(':', 1)\n header = header.strip()\n value = value.strip()\n headers[header] = value\n if header.lower() == 'host':\n host = value\n\n return headers, method, uri, host, body\n\n\ndef query_str_2_dict(query_str):\n \"\"\"\n 将查询字符串,转换成字典\n a=123&b=456\n {'a': '123', 'b': '456'}\n :param query_str:\n :return:\n \"\"\"\n if query_str:\n query_list = query_str.split('&')\n query_dict = {}\n for t in query_list:\n # 避免在非法查询字符串的情况下报错\n # 例如未对url参数转义\n try:\n x = t.split('=')\n query_dict[x[0]] = x[1]\n except:\n pass\n else:\n query_dict = {}\n return query_dict\n\n\ndef raw_headers_to_dict(raw_headers):\n \"\"\"\n 通过原生请求头获取请求头字典\n :param raw_headers: {str} 浏览器请求头\n :return: {dict} headers\n \"\"\"\n return dict(line.split(\": \", 1) for line in raw_headers.split(\"\\n\") if ': ' in line)\n" }, { "alpha_fraction": 0.5658578872680664, "alphanum_fraction": 0.5987868309020996, "avg_line_length": 22.5510196685791, "blob_id": "d54941cd524af36efbcffd460658a2e27ed2c53d", "content_id": "c06eae8d17a1cc77bcd9fb60a37c19f5eb20cb86", "detected_licenses": [ "MIT" ], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 1154, "license_type": "permissive", "max_line_length": 84, "num_lines": 49, "path": "/mountains/logging/terminal.py", "repo_name": "restran/mountains", "src_encoding": "UTF-8", "text": "# -*- coding: utf-8 -*-\n# Created by restran on 2018/9/9\nfrom __future__ import unicode_literals, absolute_import\nimport sys\nfrom .terminal_size import get_terminal_size\n\nTERMINAL_SIZE = get_terminal_size()\n\n\ndef terminal_pos(y, x):\n return '\\x1b[%d;%dH' % (y, x)\n\n\ndef print_on_terminal_bottom(text):\n print('%s%s' % (terminal_pos(TERMINAL_SIZE[0], TERMINAL_SIZE[1]), text), end='')\n\n\ndef print_on_terminal_fix(text):\n sys.stdout.write(\"%s\\r\" % text)\n sys.stdout.flush()\n\n\nclass ColorConsole(object):\n GREEN = \"\\033[92m\"\n BLUE = \"\\033[94m\"\n BOLD = \"\\033[1m\"\n YELLOW = \"\\033[93m\"\n RED = \"\\033[91m\"\n END = \"\\033[0m\"\n\n @classmethod\n def green(cls, message):\n return '%s%s%s' % (cls.GREEN, message, cls.END)\n\n @classmethod\n def blue(cls, message):\n return '%s%s%s' % (cls.BLUE, message, cls.END)\n\n @classmethod\n def red(cls, message):\n return '%s%s%s' % (cls.RED, message, cls.END)\n\n @classmethod\n def yellow(cls, message):\n return '%s%s%s' % (cls.YELLOW, message, cls.END)\n\n @classmethod\n def bold(cls, message):\n return '%s%s%s' % (cls.BOLD, message, cls.END)\n" }, { "alpha_fraction": 0.6209795475006104, "alphanum_fraction": 0.6527777910232544, "avg_line_length": 32.77777862548828, "blob_id": "dde2f9bb8b9e36c421d642151d7f09a2b6108b72", "content_id": "474f2c970944fa2ca21e292da46aa905db764629", "detected_licenses": [ "MIT" ], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 2736, "license_type": "permissive", "max_line_length": 86, "num_lines": 81, "path": "/tests/test_datetime.py", "repo_name": "restran/mountains", "src_encoding": "UTF-8", "text": "# -*- coding: utf-8 -*-\n# created by restran on 2018/01/22\nfrom __future__ import unicode_literals, absolute_import\n\nimport time\nimport unittest\nfrom datetime import datetime\nfrom mountains.datetime import converter\n\n\nclass DateTimeConverterTest(unittest.TestCase):\n def setUp(self):\n self.date_str = '2016-10-30 12:30:30'\n self.dt = datetime(year=2016, month=10, day=30, hour=12, minute=30, second=30)\n self.t = self.dt.timetuple()\n self.ts = int(time.mktime(self.t))\n self.ts_ms = int(time.mktime(self.t) * 1000)\n\n def test_str2datetime(self):\n dt = converter.str2datetime(self.date_str)\n self.assertEqual(dt, self.dt)\n\n def test_str2time(self):\n t = converter.str2time(self.date_str)\n self.assertEqual(t[0:6], self.t[0:6])\n\n def test_str2timestamp(self):\n ts = converter.str2timestamp(self.date_str)\n self.assertEqual(ts, self.ts)\n ts = converter.str2timestamp(self.date_str, millisecond=True)\n self.assertEqual(ts, self.ts_ms)\n\n def test_datetime2str(self):\n s = converter.datetime2str(self.dt)\n self.assertEqual(s, self.date_str)\n\n def test_datetime2time(self):\n t = converter.datetime2time(self.dt)\n self.assertEqual(t[0:6], self.t[0:6])\n\n def test_datetime2timestamp(self):\n ts = converter.datetime2timestamp(self.dt)\n self.assertEqual(ts, self.ts)\n ts = converter.datetime2timestamp(self.dt, millisecond=True)\n self.assertEqual(ts, self.ts_ms)\n\n def test_time2str(self):\n s = converter.time2str(self.t)\n self.assertEqual(s, self.date_str)\n\n def test_time2datetime(self):\n dt = converter.time2datetime(self.t)\n self.assertEqual(dt, self.dt)\n\n def test_time2timestamp(self):\n ts = converter.time2timestamp(self.t)\n self.assertEqual(ts, self.ts)\n ts = converter.time2timestamp(self.t, millisecond=True)\n self.assertEqual(ts, self.ts_ms)\n\n def test_timestamp2datetime(self):\n dt = converter.timestamp2datetime(self.ts)\n self.assertEqual(dt, self.dt)\n dt = converter.timestamp2datetime(self.ts_ms, millisecond=True)\n self.assertEqual(dt, self.dt)\n\n def test_timestamp2time(self):\n t = converter.timestamp2time(self.ts)\n self.assertEqual(t[0:6], self.t[0:6])\n t = converter.timestamp2time(self.ts_ms, millisecond=True)\n self.assertEqual(t[0:6], self.t[0:6])\n\n def test_timestamp2str(self):\n s = converter.timestamp2str(self.ts)\n self.assertEqual(s, self.date_str)\n s = converter.timestamp2str(self.ts_ms, millisecond=True)\n self.assertEqual(s, self.date_str)\n\n\nif __name__ == '__main__':\n unittest.main()\n" }, { "alpha_fraction": 0.5171695947647095, "alphanum_fraction": 0.5239334106445312, "avg_line_length": 30.508195877075195, "blob_id": "1a7dfa55f0cba137e549f9449fcda88ed0df21ba", "content_id": "1de000c53d8e7940dd353b99ffab5a1c9baa86b9", "detected_licenses": [ "MIT" ], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 2016, "license_type": "permissive", "max_line_length": 82, "num_lines": 61, "path": "/mountains/concurrent/__init__.py", "repo_name": "restran/mountains", "src_encoding": "UTF-8", "text": "# -*- coding: utf-8 -*-\n# Created by restran on 2017/8/23\nfrom __future__ import unicode_literals, absolute_import\n\nimport time\nfrom concurrent import futures\n\nfrom ..base import Queue\n\n\nclass TaskExecutor(object):\n \"\"\"\n 使用线程的执行器,可以并发执行任务\n \"\"\"\n\n def __init__(self, fn_task, task_params_list, max_workers=5):\n self.fn_task = fn_task\n self.max_workers = max_workers\n self.task_list = task_params_list\n self.task_queue = Queue()\n for t in task_params_list:\n self.task_queue.put(t)\n\n def get_next_tasks(self, max_num):\n output = []\n count = 0\n while not self.task_queue.empty() and count < max_num:\n t = self.task_queue.get()\n output.append(t)\n count += 1\n\n return output\n\n def run(self, *args, **kwargs):\n print('executor start')\n start_time = time.time()\n with futures.ThreadPoolExecutor(max_workers=self.max_workers) as executor:\n next_tasks = self.get_next_tasks(self.max_workers)\n shut_down = False\n while not shut_down and len(next_tasks) > 0:\n future_to_task = {\n executor.submit(self.fn_task, task, *args, **kwargs): task\n for task in next_tasks\n }\n\n # 这里 ThreadPoolExecutor 必须要等当前的所有任务都执行完成后,\n # 才能开始下一批的任务\n for future in futures.as_completed(future_to_task):\n _ = future_to_task[future]\n try:\n shut_down = future.result()\n except Exception as exc:\n print(exc)\n continue\n\n if shut_down:\n break\n\n next_tasks = self.get_next_tasks(self.max_workers)\n end_time = time.time()\n print('executor done, %.3fs' % (end_time - start_time))\n" }, { "alpha_fraction": 0.4576271176338196, "alphanum_fraction": 0.6101694703102112, "avg_line_length": 28.5, "blob_id": "4c54da006e12e18e6bc4989c6bff55fe56a88f7a", "content_id": "d2c69d8d5a13e6661b978ad0c826d33317e6db10", "detected_licenses": [ "MIT" ], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 59, "license_type": "permissive", "max_line_length": 34, "num_lines": 2, "path": "/mountains/gis/__init__.py", "repo_name": "restran/mountains", "src_encoding": "UTF-8", "text": "# -*- coding: utf-8 -*-\n# created by restran on 2020/03/23\n" }, { "alpha_fraction": 0.6161971688270569, "alphanum_fraction": 0.6443662047386169, "avg_line_length": 22.66666603088379, "blob_id": "35df8e6d9619fbb6c99e643b6225939eea70ecdc", "content_id": "f571f45e52b8e1b086967e3377c531101ecc9dac", "detected_licenses": [ "MIT" ], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 568, "license_type": "permissive", "max_line_length": 66, "num_lines": 24, "path": "/tests/test_concurrent.py", "repo_name": "restran/mountains", "src_encoding": "UTF-8", "text": "# -*- coding: utf-8 -*-\n# Created by restran on 2017/8/23\nfrom __future__ import unicode_literals, absolute_import\n\nfrom mountains.concurrent import TaskExecutor\nimport time\nimport random\nimport unittest\n\n\nclass DateTimeConverterTest(unittest.TestCase):\n\n def test_task_executor(self):\n def fn_task(item):\n print(item)\n time.sleep(random.choice(range(0, 10)) / 10.0)\n\n task_params_list = range(3)\n t = TaskExecutor(fn_task, task_params_list, max_workers=2)\n t.run()\n\n\nif __name__ == '__main__':\n unittest.main()\n" }, { "alpha_fraction": 0.5062549710273743, "alphanum_fraction": 0.5334916710853577, "avg_line_length": 18.92113494873047, "blob_id": "9e136efa9a0296b51bcec961aedd02b2be5c7ad1", "content_id": "0e5525293faf1666e7813b4815340782afe3ddf0", "detected_licenses": [ "MIT" ], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 6759, "license_type": "permissive", "max_line_length": 84, "num_lines": 317, "path": "/mountains/encoding/converter.py", "repo_name": "restran/mountains", "src_encoding": "UTF-8", "text": "# -*- coding: utf-8 -*-\n# Created by restran on 2018/3/7\nfrom __future__ import unicode_literals, absolute_import\n\nimport binascii\nimport struct\nfrom xml.sax.saxutils import escape as xml_escape_func\nfrom xml.sax.saxutils import unescape as xml_unescape_func\n\nfrom mountains.encoding import force_bytes, force_text\n\n\ndef to_uu(data):\n \"\"\"\n uu编码\n :param data: 字符串\n :return: 编码后的字符串\n \"\"\"\n r = binascii.b2a_uu(force_bytes(data))\n return force_text(r)\n\n\ndef from_uu(data):\n \"\"\"\n 解uu编码\n :param data: uu编码的字符串\n :return: 字符串\n \"\"\"\n r = binascii.a2b_uu(data)\n return force_text(r)\n\n\ndef str2hex(s):\n \"\"\"\n 把一个字符串转成其ASCII码的16进制表示\n :param s: 要转换的字符串\n :return: ASCII码的16进制表示字符串\n \"\"\"\n return force_text(binascii.b2a_hex(force_bytes(s)))\n\n\ndef hex2str(s):\n \"\"\"\n 把十六进制字符串转换成其ASCII表示字符串\n :param s: 十六进制字符串\n :return: 字符串\n \"\"\"\n return force_text(binascii.a2b_hex(s))\n\n\nbase = [str(x) for x in range(10)] + [chr(x) for x in range(ord('A'), ord('A') + 6)]\n\n\ndef bin2dec(s):\n \"\"\"\n bin2dec\n 二进制 to 十进制: int(str,n=10)\n :param s:\n :return:\n \"\"\"\n return int(s, 2)\n\n\ndef dec2bin(s):\n \"\"\"\n dec2bin\n 十进制 to 二进制: bin()\n :param s:\n :return:\n \"\"\"\n if not isinstance(s, int):\n num = int(s)\n else:\n num = s\n mid = []\n while True:\n if num == 0:\n break\n num, rem = divmod(num, 2)\n mid.append(base[rem])\n\n return ''.join([str(x) for x in mid[::-1]])\n\n\ndef hex2dec(s):\n \"\"\"\n hex2dec\n 十六进制 to 十进制\n :param s:\n :return:\n \"\"\"\n if not isinstance(s, str):\n s = str(s)\n return int(s.upper(), 16)\n\n\ndef dec2hex(s):\n \"\"\"\n dec2hex\n 十进制 to 八进制: oct()\n 十进制 to 十六进制: hex()\n :param s:\n :return:\n \"\"\"\n if not isinstance(s, int):\n num = int(s)\n else:\n num = s\n return hex(num)[2:]\n\n\ndef byte2hex(s):\n \"\"\"\n 字节类型的数据转成16进制字符串\n :param s:\n :return:\n \"\"\"\n\n result = binascii.b2a_hex(s)\n return force_text(result)\n\n\ndef hex2bin(s):\n \"\"\"\n hex2tobin\n 十六进制 to 二进制: bin(int(str,16))\n :param s:\n :return:\n \"\"\"\n if len(s) % 2 != 0:\n s = '0' + s\n\n result = []\n for i in range(len(s) // 2):\n t = s[i * 2:(i + 1) * 2]\n x = dec2bin(hex2dec(t.upper()))\n padding_length = (8 - len(x) % 8) % 8\n # 每个16进制值(2个字符)进行转码,不足8个的,在前面补0\n x = '%s%s' % ('0' * padding_length, x)\n result.append(x)\n\n result = ''.join(result)\n return result if result != '' else '00000000'\n\n\ndef bin2hex(s):\n \"\"\"\n bin2hex\n 二进制 to 十六进制: hex(int(str,2))\n :param s:\n :return:\n \"\"\"\n padding_length = (8 - len(s) % 8) % 8\n # 从前往后解码,不足8个的,在后面补0\n encode_str = '%s%s' % (s, '0' * padding_length)\n # 解码后是 0xab1234,需要去掉前面的 0x\n return hex(int(encode_str, 2))[2:].rstrip('L')\n\n\ndef str2dec(s):\n \"\"\"\n string to decimal number.\n \"\"\"\n if not len(s):\n return 0\n return int(str2hex(s), 16)\n\n\ndef dec2str(n):\n \"\"\"\n decimal number to string.\n \"\"\"\n s = hex(int(n))[2:].rstrip('L')\n if len(s) % 2 != 0:\n s = '0' + s\n return hex2str(s)\n\n\ndef str2bin(s):\n \"\"\"\n String to binary.\n \"\"\"\n ret = []\n for c in s:\n ret.append(bin(ord(c))[2:].zfill(8))\n return ''.join(ret)\n\n\ndef bin2str(b):\n \"\"\"\n Binary to string.\n \"\"\"\n ret = []\n for pos in range(0, len(b), 8):\n ret.append(chr(int(b[pos:pos + 8], 2)))\n return ''.join(ret)\n\n\ndef from_digital(s, num):\n \"\"\"\n 进制转换,从指定机制转到10进制\n :param s:\n :param num:\n :return:\n \"\"\"\n if not 1 < num < 10:\n raise ValueError('digital num must between 1 and 10')\n return '%s' % int(s, num)\n\n\ndef to_digital(d, num):\n \"\"\"\n 进制转换,从10进制转到指定机制\n :param d:\n :param num:\n :return:\n \"\"\"\n if not isinstance(num, int) or not 1 < num < 10:\n raise ValueError('digital num must between 1 and 10')\n\n d = int(d)\n result = []\n x = d % num\n d = d - x\n result.append(str(x))\n while d > 0:\n d = d // num\n x = d % num\n d = d - x\n result.append(str(x))\n return ''.join(result[::-1])\n\n\ndef xml_escape(data):\n return xml_escape_func(data)\n\n\ndef xml_un_escape(data):\n return xml_unescape_func(data)\n\n\ndef str2int(number_str, default_value=None):\n if number_str is None or number_str == '':\n return default_value\n try:\n return int(number_str)\n except Exception as e:\n return default_value\n\n\ndef long2bytes(n, block_size=0):\n \"\"\"Convert an integer to a byte string.\n\n In Python 3.2+, use the native method instead::\n\n >>> n.to_bytes(block_size, 'big')\n\n For instance::\n\n >>> n = 80\n >>> n.to_bytes(2, 'big')\n b'\\x00P'\n\n If the optional :data:`blocksize` is provided and greater than zero,\n the byte string is padded with binary zeros (on the front) so that\n the total length of the output is a multiple of blocksize.\n\n If :data:`blocksize` is zero or not provided, the byte string will\n be of minimal length.\n \"\"\"\n # after much testing, this algorithm was deemed to be the fastest\n s = b''\n n = int(n)\n pack = struct.pack\n while n > 0:\n s = pack('>I', n & 0xffffffff) + s\n n = n >> 32\n # strip off leading zeros\n for i in range(len(s)):\n if s[i] != b'\\000'[0]:\n break\n else:\n # only happens when n == 0\n s = b'\\000'\n i = 0\n s = s[i:]\n # add back some pad bytes. this could be done more efficiently w.r.t. the\n # de-padding being done above, but sigh...\n if block_size > 0 and len(s) % block_size:\n s = (block_size - len(s) % block_size) * b'\\000' + s\n return s\n\n\ndef bytes2long(s):\n \"\"\"Convert a byte string to a long integer (big endian).\n\n In Python 3.2+, use the native method instead::\n\n >>> int.from_bytes(s, 'big')\n\n For instance::\n\n >>> int.from_bytes(b'\\x00P', 'big')\n 80\n\n This is (essentially) the inverse of :func:`long_to_bytes`.\n \"\"\"\n acc = 0\n unpack = struct.unpack\n length = len(s)\n if length % 4:\n extra = (4 - length % 4)\n s = b'\\000' * extra + s\n length = length + extra\n for i in range(0, length, 4):\n acc = (acc << 32) + unpack('>I', s[i:i + 4])[0]\n return acc\n" }, { "alpha_fraction": 0.5543318390846252, "alphanum_fraction": 0.5668135285377502, "avg_line_length": 19.96923065185547, "blob_id": "d297045c449b77c26c7efd1b4a7194c5db84e1c8", "content_id": "71b76de3658008eeb5f88a3bd06721c24e8cfcb1", "detected_licenses": [ "MIT" ], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 1566, "license_type": "permissive", "max_line_length": 72, "num_lines": 65, "path": "/mountains/decorator/__init__.py", "repo_name": "restran/mountains", "src_encoding": "UTF-8", "text": "# -*- coding: utf-8 -*-\n# Created by restran on 2017/9/15\nfrom __future__ import unicode_literals, absolute_import\n\nimport time\nfrom functools import wraps\n\n\n# 装饰器的本质,是把函数作为参数传给装饰器函数\n# 装饰器实际上是闭包\ndef log(func):\n \"\"\"\n 打印函数运行日志的装饰器\n :param func:\n :return:\n \"\"\"\n\n # wraps 这个装饰器会将 func 的 doc 和 __name__ 复制过来\n # 否则在 wrapper 中调用 func.__name__ 输出的就是 wrapper\n @wraps(func)\n def wrapper(*args, **kwargs):\n print('before call %s' % func.__name__)\n ret = func(*args, **kwargs)\n print('after call %s' % func.__name__)\n return ret\n\n return wrapper\n\n\ndef log_with_message(message):\n \"\"\"\n 打印函数运行日志的装饰器,可以再给装饰器传参数\n :param message:\n :return:\n \"\"\"\n\n def decorator(func):\n @wraps(func)\n def wrapper(*args, **kwargs):\n print('decorator log_with_message is running, %s' % message)\n ret = func(*args, **kwargs)\n return ret\n\n return wrapper\n\n return decorator\n\n\ndef time_elapsed(func):\n \"\"\"\n 记录函数运行耗时的生成器\n :param func:\n :return:\n \"\"\"\n\n @wraps(func)\n def wrapper(*args, **kwargs):\n timestamp = time.time() * 1000\n ret = func(*args, **kwargs)\n now_ts = time.time() * 1000\n elapsed = now_ts - timestamp\n print('%s costs time: %.2fms' % (func.__name__, elapsed))\n return ret\n\n return wrapper" }, { "alpha_fraction": 0.6009947657585144, "alphanum_fraction": 0.6043105721473694, "avg_line_length": 32.50925827026367, "blob_id": "7ad841efa85d12765bbb86f14a71170e67d0e71a", "content_id": "ac7e67a205b54411372851713d6fc405f8271199", "detected_licenses": [ "MIT" ], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 3727, "license_type": "permissive", "max_line_length": 100, "num_lines": 108, "path": "/mountains/file/csv.py", "repo_name": "restran/mountains", "src_encoding": "UTF-8", "text": "# -*- coding: utf-8 -*-\n# Created by restran on 2018/9/10\nfrom __future__ import unicode_literals, absolute_import\n\nimport csv\n\n__all__ = ['read_csv', 'read_csv_as_dict', 'write_csv', 'write_csv_from_dict']\n\n\ndef read_csv(filepath_or_buffer, encoding=None, delimiter=',', quotechar='\"'):\n \"\"\"\n 读 CSV 文件\n :param encoding: 设置编码,如果文件是UTF-8 with BOM,encoding 使用 utf-8-sig\n :param filepath_or_buffer:\n :param delimiter:\n :param quotechar:\n :return:\n \"\"\"\n\n if isinstance(filepath_or_buffer, str):\n with open(filepath_or_buffer, 'r', encoding=encoding) as csv_file:\n reader = csv.reader(csv_file, delimiter=delimiter, quotechar=quotechar)\n for row in reader:\n yield row\n\n else:\n reader = csv.reader(filepath_or_buffer, delimiter=delimiter, quotechar=quotechar)\n for row in reader:\n yield row\n\n\ndef read_csv_as_dict(filepath_or_buffer, headers=None, encoding=None, delimiter=',', quotechar='\"'):\n \"\"\"\n\n :param filepath_or_buffer:\n :param headers: 首行字段列表\n :param encoding: 设置编码,如果文件是UTF-8 with BOM,encoding 使用 utf-8-sig\n :param delimiter: 字段分隔符\n :param quotechar:\n :return:\n \"\"\"\n if isinstance(filepath_or_buffer, str):\n with open(filepath_or_buffer, 'r', encoding=encoding) as csv_file:\n reader = csv.DictReader(csv_file, fieldnames=headers,\n delimiter=delimiter, quotechar=quotechar)\n for row in reader:\n yield dict(row)\n else:\n reader = csv.DictReader(filepath_or_buffer, fieldnames=headers,\n delimiter=delimiter, quotechar=quotechar)\n for row in reader:\n yield dict(row)\n\n\ndef write_csv(filepath_or_buffer=None, headers=None, data=None,\n encoding=None, delimiter=',', quotechar='\"'):\n \"\"\"\n 写 CSV\n :param filepath_or_buffer:\n :param headers:\n :param data:\n :param encoding:\n :param delimiter:\n :param quotechar:\n :return:\n \"\"\"\n if isinstance(filepath_or_buffer, str):\n with open(filepath_or_buffer, 'w', encoding=encoding) as csv_file:\n writer = csv.writer(csv_file, delimiter=delimiter, quotechar=quotechar)\n if headers is not None and isinstance(headers, list):\n writer.writerow(headers)\n\n writer.writerows(data)\n else:\n writer = csv.writer(filepath_or_buffer, delimiter=delimiter, quotechar=quotechar)\n if headers is not None and isinstance(headers, list):\n writer.writerow(headers)\n\n writer.writerows(data)\n\n\ndef write_csv_from_dict(filepath_or_buffer=None, headers=None, data=None,\n write_header=True, encoding=None, delimiter=',',\n quotechar='\"'):\n \"\"\"\n data 是 list 类型,每一项都是字典类型\n :param write_header:\n :param filepath_or_buffer:\n :param headers:\n :param data:\n :param encoding:\n :param delimiter:\n :param quotechar:\n :return:\n \"\"\"\n if isinstance(filepath_or_buffer, str):\n with open(filepath_or_buffer, 'w', encoding=encoding) as csv_file:\n writer = csv.DictWriter(csv_file, fieldnames=headers, delimiter=delimiter,\n quotechar=quotechar)\n if write_header:\n writer.writeheader()\n writer.writerows(data)\n else:\n writer = csv.DictWriter(filepath_or_buffer, fieldnames=headers, delimiter=delimiter,\n quotechar=quotechar)\n if write_header:\n writer.writeheader()\n writer.writerows(data)\n" }, { "alpha_fraction": 0.6122807264328003, "alphanum_fraction": 0.6280701756477356, "avg_line_length": 21.799999237060547, "blob_id": "a3c97eafcb347100a0ad1e149b4f5c980387b86f", "content_id": "1fa937bddfc12592081821e36185bcf87ada0c54", "detected_licenses": [ "MIT" ], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 570, "license_type": "permissive", "max_line_length": 77, "num_lines": 25, "path": "/tests/test_http.py", "repo_name": "restran/mountains", "src_encoding": "UTF-8", "text": "# -*- coding: utf-8 -*-\n# Created by restran on 2018/11/13\nfrom __future__ import unicode_literals, absolute_import\n\nimport unittest\n\nfrom mountains.http import random_agent, random_mobile_agent, random_wx_agent\n\n\nclass HTTPTest(unittest.TestCase):\n def setUp(self):\n pass\n\n def test_random_agent(self):\n random_agent()\n random_agent('pc')\n random_agent('wexin')\n random_agent('mobile')\n random_wx_agent()\n random_mobile_agent()\n self.assertEqual(True, True)\n\n\nif __name__ == '__main__':\n unittest.main()\n" }, { "alpha_fraction": 0.5322006940841675, "alphanum_fraction": 0.542868435382843, "avg_line_length": 25.50261688232422, "blob_id": "9097b4eb6e1317cd4fd0f2136784805c7a56f812", "content_id": "672cad9c0176c2a09a8560592aac54527f7cad22", "detected_licenses": [ "MIT" ], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 5280, "license_type": "permissive", "max_line_length": 113, "num_lines": 191, "path": "/mountains/file/excel.py", "repo_name": "restran/mountains", "src_encoding": "UTF-8", "text": "# -*- coding: utf-8 -*-\n# created by restran on 2018/04/16\nfrom __future__ import unicode_literals, absolute_import\n\nfrom datetime import datetime\n\nfrom ..base import BytesIO, text_type, iteritems\nfrom ..file import write_bytes_file\n\n__all__ = ['read_excel', 'write_excel', 'edit_excel']\n\n\ndef read_excel(file_name=None, file_contents=None, offset=1,\n header_index=0, sheet_index=0, sheet_name=None,\n dt2str=True, lower_header=True):\n \"\"\"\n 读取 Excel\n :param sheet_name:\n :param dt2str: 将日期类型的数据转成字符串\n :param header_index: header 在哪一行\n :param file_contents:\n :param sheet_index:\n :param file_name:\n :param offset: 偏移,一般第一行是表头,不需要读取数据\n :param lower_header: 标题转成小写\n :return:\n \"\"\"\n try:\n import xlrd\n from xlrd import xldate_as_tuple\n except:\n raise Exception('xlrd is not installed')\n\n try:\n workbook = xlrd.open_workbook(filename=file_name, file_contents=file_contents)\n except Exception as e:\n return None\n\n if len(workbook.sheets()) <= 0:\n return []\n\n if sheet_name is not None:\n sh = workbook.sheet_by_name(sheet_name)\n else:\n sh = workbook.sheet_by_index(sheet_index)\n\n raw_data = []\n n_rows = sh.nrows\n row = sh.row_values(header_index)\n header = []\n for t in row:\n t = text_type(t).strip()\n if lower_header:\n t = t.lower()\n header.append(t)\n\n # n_cols = sh.ncols\n # 第0行是提示信息和标题,跳过\n for i in range(offset, n_rows):\n try:\n # row = sh.row_values(i)\n d = {}\n # ctype: 0 empty,1 string, 2 number, 3 date, 4 boolean, 5 error\n for j, t in enumerate(header):\n ctype = sh.cell(i, j).ctype # 表格的数据类型\n cell = sh.cell_value(i, j)\n if ctype == 2 and cell % 1 == 0: # 如果是整形\n cell = int(cell)\n elif ctype == 3:\n # 转成datetime对象\n cell = datetime(*xldate_as_tuple(cell, 0))\n if dt2str:\n cell = cell.strftime('%Y-%m-%d %H:%M:%S')\n elif ctype == 4:\n cell = True if cell == 1 else False\n\n d[t] = cell\n raw_data.append(d)\n except Exception as e:\n pass\n\n return raw_data\n\n\ndef write_excel(headers, data, file_name, file_io=None):\n \"\"\"\n 写数据到新的Excel中\n :param headers:\n :param data:\n :param file_name:\n :param file_io:\n :return:\n \"\"\"\n try:\n import xlsxwriter\n except:\n raise Exception('xlsxwriter is not installed')\n\n sio = BytesIO()\n workbook = xlsxwriter.Workbook(sio)\n worksheet = workbook.add_worksheet()\n new_headers = []\n if len(headers) > 0:\n for t in headers:\n if isinstance(t, dict):\n new_headers.append(t.get('name', ''))\n else:\n new_headers.append(t)\n headers = new_headers\n for i, t in enumerate(headers):\n worksheet.write(0, i, t)\n\n index = 1\n for row in data:\n if isinstance(row, dict):\n for i, name in enumerate(headers):\n worksheet.write(index, i, row.get(name, ''))\n else:\n for i, x in enumerate(row):\n worksheet.write(index, i, x)\n index += 1\n # 关闭 Excel\n workbook.close()\n if file_io is not None:\n if not isinstance(file_io, BytesIO):\n raise Exception('output_fio should be BytesIO')\n else:\n file_io.write(sio.getvalue())\n else:\n write_bytes_file(file_name, sio.getvalue())\n\n\ndef edit_excel(file_name=None, sheet_index=0, sheet_name=None, data=None, output_filename=None, output_fio=None):\n \"\"\"\n 编辑 Excel,打开已有的 Excel,往里面填充数据\n :param file_name:\n :param sheet_index:\n :param sheet_name:\n :param data: data = {'A2': '123', 'A3': '456'}\n :param output_filename:\n :param output_fio:\n :type data: dict\n \"\"\"\n try:\n from openpyxl import load_workbook\n except:\n raise Exception('openpyxl is not installed')\n\n try:\n wb = load_workbook(file_name)\n if sheet_name is not None:\n ws = wb[sheet_name]\n else:\n ws = wb.worksheets[sheet_index]\n except Exception as e:\n return None\n\n for key, value in iteritems(data):\n try:\n ws[key] = value\n except:\n pass\n\n if output_fio is not None:\n if not isinstance(output_fio, BytesIO):\n raise Exception('output_fio should be BytesIO')\n else:\n wb.save(output_fio)\n elif output_filename is not None:\n wb.save(output_filename)\n else:\n wb.save(file_name)\n\n wb.close()\n\n\ndef to_excel_column_no(index, index_start_1=False):\n \"\"\"\n Excel 列编号是: A, B, ..., AA, ..., BA\n index 编号默认从1开始\n \"\"\"\n if index_start_1:\n index -= 1\n y = ''\n s = 'ABCDEFGHIJKLMNOPQRSTUVWXYZ'\n\n while index >= 0:\n y += s[index % 26]\n index = index // 26 - 1\n\n return y[::-1]\n" }, { "alpha_fraction": 0.6495726704597473, "alphanum_fraction": 0.6615384817123413, "avg_line_length": 33.411766052246094, "blob_id": "8542446a9691b4ba4836cb0a0d718a26850a9b1d", "content_id": "0af607ca26eeeba122fbe6ea79b64f49bec3dfb2", "detected_licenses": [ "MIT" ], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 1170, "license_type": "permissive", "max_line_length": 103, "num_lines": 34, "path": "/tests/test_logging.py", "repo_name": "restran/mountains", "src_encoding": "UTF-8", "text": "# -*- coding: utf-8 -*-\n# Created by restran on 2017/8/23\nfrom __future__ import unicode_literals, absolute_import\n\nfrom mountains import logging\nfrom mountains.logging import StreamHandler, FileHandler, RotatingFileHandler, TimedRotatingFileHandler\nimport unittest\n\n\nclass UtilsTest(unittest.TestCase):\n def test_1(self):\n logging.init_log(StreamHandler(format=logging.FORMAT_SIMPLE),\n FileHandler(format=logging.FORMAT_VERBOSE, level=logging.DEBUG))\n logger = logging.getLogger(__name__)\n print('123')\n logger.debug('hello')\n\n def test_2(self):\n logging.init_log(StreamHandler(format=logging.FORMAT_SIMPLE),\n RotatingFileHandler(format=logging.FORMAT_VERBOSE, level=logging.DEBUG))\n logger = logging.getLogger(__name__)\n\n logger.debug('hello')\n\n def test_3(self):\n logging.init_log(StreamHandler(format=logging.FORMAT_SIMPLE),\n TimedRotatingFileHandler(format=logging.FORMAT_VERBOSE, level=logging.DEBUG))\n logger = logging.getLogger(__name__)\n\n logger.debug('hello')\n\n\nif __name__ == '__main__':\n unittest.main()\n" }, { "alpha_fraction": 0.6130831837654114, "alphanum_fraction": 0.6211967468261719, "avg_line_length": 24.610389709472656, "blob_id": "20d50d23de25c18e23ffedd1eaae10a6c726ff72", "content_id": "ab332d7bd14c6761de2a0da9a2919d669fea022d", "detected_licenses": [ "MIT" ], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 2168, "license_type": "permissive", "max_line_length": 75, "num_lines": 77, "path": "/mountains/json/__init__.py", "repo_name": "restran/mountains", "src_encoding": "UTF-8", "text": "# -*- coding: utf-8 -*-\n# Created by restran on 2017/8/23\nfrom __future__ import unicode_literals, absolute_import\n\nimport uuid\nfrom datetime import datetime\nfrom decimal import Decimal\nfrom .. import text_type, PY3, force_text\n\n# https://github.com/esnme/ultrajson\n# ujson 速度比较快,但是有些参数没有,与 simplejson 和 json 不完全兼容\n# 而且 ujson 似乎已经不再维护了\n\ntry:\n import simplejson as json\n\n simplejson_imported = True\nexcept ImportError:\n import json\n\n simplejson_imported = False\n\n\ndef json_default(obj):\n \"\"\"\n 对一些数据类型的 json 序列化,\n 默认情况下 json 没有对 datetime 和 Decimal 进行序列化\n 如果不指定的话,会抛异常\n :param obj:\n :return:\n \"\"\"\n if isinstance(obj, datetime):\n return obj.strftime(\"%Y-%m-%d %H:%M:%S\")\n elif isinstance(obj, Decimal):\n return float(obj)\n elif isinstance(obj, uuid.UUID):\n return text_type(obj)\n else:\n try:\n return text_type(obj)\n except:\n pass\n\n raise TypeError(\"%r is not JSON serializable\" % obj)\n\n\nload = json.load\ndump = json.dump\n\n\ndef loads(content, encoding='utf-8', **kwargs):\n if PY3:\n # python3.5,json loads 必须为str类型,如果为bytes类型会报错\n # python3.7,json loads 支持str类型和bytes类型\n content = force_text(content)\n return json.loads(s=content, encoding=encoding, **kwargs)\n\n\ndef dumps(dict_data, ensure_ascii=True, indent=None,\n sort_keys=False, encoding='utf-8', **kwargs):\n \"\"\"\n 返回json数据\n :param encoding:\n :param ensure_ascii:\n :param sort_keys:\n :param indent:\n :param dict_data:\n :return:\n \"\"\"\n if simplejson_imported:\n return json.dumps(dict_data, default=json_default,\n ensure_ascii=ensure_ascii, indent=indent,\n sort_keys=sort_keys, encoding=encoding, **kwargs)\n else:\n return json.dumps(dict_data, default=json_default,\n ensure_ascii=ensure_ascii, indent=indent,\n sort_keys=sort_keys, **kwargs)\n" }, { "alpha_fraction": 0.5761091113090515, "alphanum_fraction": 0.5798714756965637, "avg_line_length": 26.734783172607422, "blob_id": "8f1e66665fbc182b0001d213be04138c2e60f655", "content_id": "fb7a803b13b91d19ebc8406e2df305bdd6dfd1a8", "detected_licenses": [ "MIT" ], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 6473, "license_type": "permissive", "max_line_length": 94, "num_lines": 230, "path": "/mountains/django/model.py", "repo_name": "restran/mountains", "src_encoding": "UTF-8", "text": "# -*- coding: utf-8 -*-\n# created by restran on 2018/01/22\nfrom __future__ import unicode_literals, absolute_import\n\nimport logging\nimport traceback\nimport math\n\ntry:\n from django.core.paginator import Paginator, EmptyPage\nexcept ImportError:\n raise Exception('django is not installed')\n\nfrom ..base import iteritems\n\nMAX_PAGE_SIZE = 100\n\nDEFAULT_PAGE_SIZE = 20\n\nlogger = logging.getLogger(__name__)\n\n\ndef object_set_dict_data(model_class, dict_data):\n obj = model_class()\n for k, v in iteritems(dict_data):\n setattr(obj, k, v)\n\n return obj\n\n\ndef add_fields_2_json(obj, json_dict, fields):\n for t in fields:\n if hasattr(obj, t):\n json_dict[t] = getattr(obj, t)\n\n\ndef set_dict_none_default(dict_item, default_value):\n \"\"\"\n 对字典中为None的值,重新设置默认值\n :param dict_item:\n :param default_value:\n :return:\n \"\"\"\n for (k, v) in iteritems(dict_item):\n if v is None:\n dict_item[k] = default_value\n\n\ndef auto_model_name_recognize(model_name):\n \"\"\"\n 自动将 site-user 识别成 SiteUser\n :param model_name:\n :return:\n \"\"\"\n name_list = model_name.split('-')\n return ''.join(['%s%s' % (name[0].upper(), name[1:]) for name in name_list])\n\n\ndef model_get_entry(model_class, entry_id=None, filter_dict=None, select_related_fields=None):\n \"\"\"\n \"\"\"\n if filter_dict is None:\n filter_dict = {}\n\n if entry_id is not None:\n filter_dict['id'] = entry_id\n\n try:\n if select_related_fields is None:\n return model_class.objects.get(**filter_dict)\n else:\n fields = [t.attname for t in model_class._meta.fields]\n fields.extend(select_related_fields)\n obj = model_class.objects.values(*fields).get(**filter_dict)\n return object_set_dict_data(model_class, obj)\n except model_class.DoesNotExist:\n return None\n except Exception as e:\n logger.error(traceback.format_exc())\n logger.error(e)\n return None\n\n\ndef model_delete_entry(model_class, entry_id):\n model_class.objects.filter(id=entry_id).delete()\n\n\ndef model_total_count(model_class, filter_dict=None, q_filter=None):\n if filter_dict is None:\n filter_dict = {}\n\n if q_filter is not None:\n filter_list = [q_filter]\n else:\n filter_list = []\n\n return model_class.objects.filter(\n *filter_list, **filter_dict).count()\n\n\ndef model_to_select_list(model_class, filter_dict=None, q_filter=None):\n \"\"\"\n 只选择 id 和 name,用来做列表选择\n :param model_class:\n :param filter_dict:\n :param q_filter:\n :return:\n \"\"\"\n if filter_dict is None:\n filter_dict = {}\n\n if q_filter is not None:\n filter_list = [q_filter]\n else:\n filter_list = []\n\n objects = model_class.objects.filter(\n *filter_list, **filter_dict).values('id', 'name')\n\n return list(objects)\n\n\ndef model_to_list(model_class, filter_dict=None, order_by_list=None,\n select_related_fields=None, q_filter=None,\n values=None,\n to_json_method='to_json'):\n \"\"\"\n 不分页\n :param values:\n :param to_json_method:\n :param model_class:\n :param filter_dict:\n :param order_by_list:\n :param select_related_fields:\n :param q_filter:\n :return:\n \"\"\"\n return model_to_page_list(model_class, page_num=None,\n filter_dict=filter_dict, order_by_list=order_by_list,\n select_related_fields=select_related_fields,\n q_filter=q_filter, values=values,\n to_json_method=to_json_method)\n\n\ndef model_to_page_list(model_class, page_num,\n page_size=DEFAULT_PAGE_SIZE,\n filter_dict=None, order_by_list=None,\n select_related_fields=None, q_filter=None,\n values=None, to_json_method='to_json',\n max_page_size=MAX_PAGE_SIZE):\n \"\"\"\n :param max_page_size:\n :param model_class:\n :param page_num:\n :param page_size:\n :param filter_dict:\n :param order_by_list:\n :param select_related_fields:\n :param q_filter: Q(uuid__contains=keyword) | Q(memo__contains=keyword)\n :param values:\n :param to_json_method:\n :return:\n \"\"\"\n if order_by_list is None:\n order_by_list = ['-id']\n\n if filter_dict is None:\n filter_dict = {}\n\n if q_filter is not None:\n filter_list = [q_filter]\n else:\n filter_list = []\n\n if select_related_fields is None:\n if values is None:\n objects = model_class.objects.filter(\n *filter_list, **filter_dict).order_by(*order_by_list)\n else:\n objects = model_class.objects.filter(\n *filter_list, **filter_dict).values(*values).order_by(*order_by_list)\n else:\n if values is None:\n fields = [t.attname for t in model_class._meta.fields]\n else:\n fields = values\n fields.extend(select_related_fields)\n objects = model_class.objects.filter(\n *filter_list, **filter_dict).values(*fields).order_by(*order_by_list)\n\n if page_num is not None:\n if page_size > max_page_size:\n page_size = max_page_size\n\n paginator = Paginator(objects, page_size)\n try:\n obj_list = paginator.page(page_num)\n except EmptyPage as e:\n obj_list = []\n else:\n obj_list = list(objects)\n\n if select_related_fields is not None or values is not None:\n obj_list = [object_set_dict_data(model_class, t) for t in obj_list]\n\n if to_json_method is None:\n return obj_list\n else:\n return [getattr(t, to_json_method)() for t in obj_list]\n\n\ndef bulk_delete(model, delete_id_list, id_name='id', batch_size=200):\n \"\"\"\n 批量删除\n :param model:\n :param delete_id_list:\n :param id_name: id的名称,一般是id\n :param batch_size:\n :return:\n \"\"\"\n total = len(delete_id_list)\n times = math.ceil(total * 1.0 / batch_size)\n for i in range(times):\n data_list = delete_id_list[i * batch_size:(i + 1) * batch_size]\n if len(data_list) > 0:\n id_list = [t for t in data_list]\n filter_params = {\n '{}__in'.format(id_name): id_list\n }\n model.objects.filter(**filter_params).delete()\n" }, { "alpha_fraction": 0.6034843325614929, "alphanum_fraction": 0.6209059357643127, "avg_line_length": 33.16666793823242, "blob_id": "ad8fedafd68afe6b36d826500232ce4615b40e3f", "content_id": "cf3eb311743f42c3bd6f556748973aca1d847b87", "detected_licenses": [ "MIT" ], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 1435, "license_type": "permissive", "max_line_length": 95, "num_lines": 42, "path": "/tests/test_string_utils.py", "repo_name": "restran/mountains", "src_encoding": "UTF-8", "text": "# -*- coding: utf-8 -*-\n# Created by restran on 2018/3/22\nfrom __future__ import unicode_literals, absolute_import\n\nfrom mountains.utils import string_utils\nimport unittest\nimport random\nimport string\n\n\nclass UtilsTest(unittest.TestCase):\n def test_line_break(self):\n s = ''.join([random.choice(string.ascii_letters + string.digits) for _ in range(3000)])\n break_s = string_utils.line_break(s)\n self.assertEqual(s, break_s.replace('\\n', ''))\n\n break_s = string_utils.line_break(s, 10)\n self.assertEqual(s, break_s.replace('\\n', ''))\n\n break_s = string_utils.line_break(s, 100)\n self.assertEqual(s, break_s.replace('\\n', ''))\n\n break_s = string_utils.line_break(s, 3001)\n self.assertEqual(s, break_s.replace('\\n', ''))\n\n def test_fixed_length_split(self):\n s = 'aaaabbbbccccdddd'\n r = string_utils.fixed_length_split(s, 4)\n self.assertEqual(r, ['aaaa', 'bbbb', 'cccc', 'dddd'])\n r = string_utils.fixed_length_split(s, 6)\n self.assertEqual(r, ['aaaabb', 'bbcccc', 'dddd'])\n r = string_utils.fixed_length_split(s, 16)\n self.assertEqual(r, ['aaaabbbbccccdddd'])\n\n def test_is_empty(self):\n self.assertEqual(string_utils.is_empty(''), True)\n self.assertEqual(string_utils.is_empty(None), True)\n self.assertEqual(string_utils.is_empty('aaa'), False)\n\n\nif __name__ == '__main__':\n unittest.main()\n" }, { "alpha_fraction": 0.4704282879829407, "alphanum_fraction": 0.4962610602378845, "avg_line_length": 20.31884002685547, "blob_id": "a3118f38455536da2c59b44a22cf25085c8681c4", "content_id": "cd6c17ff9880f5c92b6f287054f938632105ef9e", "detected_licenses": [ "MIT" ], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 1553, "license_type": "permissive", "max_line_length": 74, "num_lines": 69, "path": "/mountains/utils/string_utils.py", "repo_name": "restran/mountains", "src_encoding": "UTF-8", "text": "# -*- coding: utf-8 -*-\n# Created by restran on 2017/11/7\nfrom __future__ import unicode_literals, absolute_import\nfrom ..encoding import force_bytes, text_type, force_text\nfrom .. import PY2, string_types\n\n\ndef fixed_length_split(s, width):\n \"\"\"\n 固定长度分割字符串\n :param s:\n :param width:\n :return:\n \"\"\"\n # 使用正则的方法\n # import re\n # split = re.findall(r'.{%s}' % width, string)\n return [s[x: x + width] for x in range(0, len(s), width)]\n\n\ndef line_break(s, length=76):\n \"\"\"\n 将字符串分割成一行一行\n :param s:\n :param length:\n :return:\n \"\"\"\n x = '\\n'.join(s[pos:pos + length] for pos in range(0, len(s), length))\n return x\n\n\ndef is_empty(s):\n if s == '' or s == b'' or s is None or not isinstance(s, text_type):\n return True\n else:\n return False\n\n\ndef any_empty(*params):\n for s in params:\n if is_empty(s):\n return True\n else:\n return False\n\n\ndef bytes_2_printable_strings(data):\n data = force_bytes(data)\n result = ['', '']\n for c in data:\n if PY2:\n c = ord(c)\n\n if 32 <= c <= 126 or c in (9, 10, 13):\n if c == 9:\n c = 32\n elif c == 13:\n c = 10\n\n # 去掉连续的空格\n if c == 32 and result[-1] == ' ':\n continue\n # 去掉连续的换行\n elif c == 10 and result[-1] == '\\n':\n continue\n\n result.append(chr(c))\n\n return ''.join(result)\n" }, { "alpha_fraction": 0.6206896305084229, "alphanum_fraction": 0.6896551847457886, "avg_line_length": 37.33333206176758, "blob_id": "b6c799c8078b2e79fb069feedda0335ae3fa6c89", "content_id": "cc41474e5061e426c232103cb860827d6c5d0718", "detected_licenses": [ "MIT" ], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 116, "license_type": "permissive", "max_line_length": 56, "num_lines": 3, "path": "/mountains/datetime/__init__.py", "repo_name": "restran/mountains", "src_encoding": "UTF-8", "text": "# -*- coding: utf-8 -*-\n# Created by restran on 2017/8/23\nfrom __future__ import unicode_literals, absolute_import\n\n" }, { "alpha_fraction": 0.5171327590942383, "alphanum_fraction": 0.531705379486084, "avg_line_length": 19.81147575378418, "blob_id": "8ba8289c06066d5a9e3a43bde5ceff636fe4b15d", "content_id": "e3ee57e136a68c2f883f40424c7e266c8b17d97d", "detected_licenses": [ "MIT" ], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 2539, "license_type": "permissive", "max_line_length": 94, "num_lines": 122, "path": "/mountains/utils/__init__.py", "repo_name": "restran/mountains", "src_encoding": "UTF-8", "text": "# -*- coding: utf-8 -*-\n# Created by restran on 2017/9/15\nfrom __future__ import unicode_literals, absolute_import, print_function\nfrom .. import force_text, text_type, binary_type\n\nimport itertools\n\n__all__ = ['grouper', 'any_none', 'any_in', 'text_type_dict', 'PrintCollector', 'ObjectDict']\n\n\ndef grouper(iterable, size):\n \"\"\"\n >>> a = grouper([1,2,3,4,5,6,7],2)\n >>> list(a)\n [(1, 2), (3, 4), (5, 6), (7,)]\n :param iterable:\n :param size:\n :return:\n \"\"\"\n # http://stackoverflow.com/a/8991553\n it = iter(iterable)\n if size <= 0:\n yield it\n return\n while True:\n chunk = tuple(itertools.islice(it, size))\n if not chunk:\n return\n yield chunk\n\n\ndef any_none(*params):\n \"\"\"\n >>> a = None\n >>> b = 'abc'\n >>> any_none(a, b)\n True\n >>> any_none(b)\n False\n \"\"\"\n return any(map(lambda x: x is None, params))\n\n\ndef any_empty(*params):\n \"\"\"\n >>> a = None\n >>> b = 'abc'\n >>> any_none(a, b)\n True\n >>> any_none(b)\n False\n \"\"\"\n return any(map(lambda x: x in (None, '', False) or (iterable(x) and len(x) == 0), params))\n\n\ndef any_in(obj, *params):\n \"\"\"\n >>> any_in(['a', ''], '')\n True\n >>> any_in(['a', None], '', None)\n True\n \"\"\"\n return any(map(lambda x: x in obj, params))\n\n\ndef text_type_dict(dict_data):\n if not isinstance(dict_data, dict):\n raise TypeError\n\n new_dict = {}\n for k, v in dict_data.items():\n if isinstance(k, binary_type):\n k = k.decode('utf-8')\n if isinstance(v, binary_type):\n v = v.decode('utf-8')\n\n new_dict[k] = v\n\n return new_dict\n\n\nclass PrintCollector(object):\n def __init__(self):\n self.collector = []\n\n def print(self, output):\n self.collector.append(output)\n print(text_type(output))\n\n def all_output(self):\n return '\\n'.join([force_text(t) for t in self.collector])\n\n def smart_output(self, result=None, verbose=True):\n if verbose:\n return self.all_output()\n\n if result is None:\n result = self.collector\n\n return result\n\n\nclass ObjectDict(dict):\n \"\"\"Makes a dictionary behave like an object, with attribute-style access.\n \"\"\"\n\n def __getattr__(self, name):\n try:\n return self[name]\n except KeyError:\n return None\n\n def __setattr__(self, name, value):\n self[name] = value\n\n\ndef main():\n print(any_none(1, 2, 3, None))\n\n\nif __name__ == '__main__':\n main()\n" } ]
39
ismael094/Practicas
https://github.com/ismael094/Practicas
dbba987c273cc0f9a55abe67f5c98464d7a7583e
0b09c1435528e3e9edd021250baa56a0d2a5bd89
ef6d885b0ab90beb4f78f6e5cec1cfce5fc9c325
refs/heads/master
2021-01-01T05:10:31.209086
2016-04-12T05:05:31
2016-04-12T05:05:31
56,043,910
0
0
null
null
null
null
null
[ { "alpha_fraction": 0.6909090876579285, "alphanum_fraction": 0.6909090876579285, "avg_line_length": 10, "blob_id": "26e6f625fd9726b9eb463fe35b4574818f98ec02", "content_id": "cd857bd9c7b8e89d767476efc75c29e7e67719fb", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "SQL", "length_bytes": 3410, "license_type": "no_license", "max_line_length": 26, "num_lines": 310, "path": "/Dump/prueba.sql", "repo_name": "ismael094/Practicas", "src_encoding": "UTF-8", "text": "delimiter $$ \n create procedure create()\nbegin\n\nend $$\ndelimiter $$ \n create procedure create()\nbegin\n\nend $$\ndelimiter $$ \n create procedure create()\nbegin\n\nend $$\ndelimiter $$ \n create procedure create()\nbegin\n\nend $$\ndelimiter $$ \n create procedure create()\nbegin\n\nend $$\ndelimiter $$ \n create procedure create()\nbegin\n\nend $$\ndelimiter $$ \n create procedure create()\nbegin\n\nend $$\ndelimiter $$ \n create procedure create()\nbegin\n\nend $$\ndelimiter $$ \n create procedure create()\nbegin\n\nend $$\ndelimiter $$ \n create procedure create()\nbegin\n\nend $$\ndelimiter $$ \n create procedure create()\nbegin\n\nend $$\ndelimiter $$ \n create procedure create()\nbegin\n\nend $$\ndelimiter $$ \n create procedure create()\nbegin\n\nend $$\ndelimiter $$ \n create procedure create()\nbegin\n\nend $$\ndelimiter $$ \n create procedure create()\nbegin\n\nend $$\ndelimiter $$ \n create procedure create()\nbegin\n\nend $$\ndelimiter $$ \n create procedure create()\nbegin\n\nend $$\ndelimiter $$ \n create procedure create()\nbegin\n\nend $$\ndelimiter $$ \n create procedure create()\nbegin\n\nend $$\ndelimiter $$ \n create procedure create()\nbegin\n\nend $$\ndelimiter $$ \n create procedure create()\nbegin\n\nend $$\ndelimiter $$ \n create procedure create()\nbegin\n\nend $$\ndelimiter $$ \n create procedure create()\nbegin\n\nend $$\ndelimiter $$ \n create procedure create()\nbegin\n\nend $$\ndelimiter $$ \n create procedure create()\nbegin\n\nend $$\ndelimiter $$ \n create procedure create()\nbegin\n\nend $$\ndelimiter $$ \n create procedure create()\nbegin\n\nend $$\ndelimiter $$ \n create procedure create()\nbegin\n\nend $$\ndelimiter $$ \n create procedure create()\nbegin\n\nend $$\ndelimiter $$ \n create procedure create()\nbegin\n\nend $$\ndelimiter $$ \n create procedure create()\nbegin\n\nend $$\ndelimiter $$ \n create procedure create()\nbegin\n\nend $$\ndelimiter $$ \n create procedure create()\nbegin\n\nend $$\ndelimiter $$ \n create procedure create()\nbegin\n\nend $$\ndelimiter $$ \n create procedure create()\nbegin\n\nend $$\ndelimiter $$ \n create procedure create()\nbegin\n\nend $$\ndelimiter $$ \n create procedure create()\nbegin\n\nend $$\ndelimiter $$ \n create procedure create()\nbegin\n\nend $$\ndelimiter $$ \n create procedure create()\nbegin\n\nend $$\ndelimiter $$ \n create procedure create()\nbegin\n\nend $$\ndelimiter $$ \n create procedure create()\nbegin\n\nend $$\ndelimiter $$ \n create procedure create()\nbegin\n\nend $$\ndelimiter $$ \n create procedure create()\nbegin\n\nend $$\ndelimiter $$ \n create procedure create()\nbegin\n\nend $$\ndelimiter $$ \n create procedure create()\nbegin\n\nend $$\ndelimiter $$ \n create procedure create()\nbegin\n\nend $$\ndelimiter $$ \n create procedure create()\nbegin\n\nend $$\ndelimiter $$ \n create procedure create()\nbegin\n\nend $$\ndelimiter $$ \n create procedure create()\nbegin\n\nend $$\ndelimiter $$ \n create procedure create()\nbegin\n\nend $$\ndelimiter $$ \n create procedure create()\nbegin\n\nend $$\ndelimiter $$ \n create procedure create()\nbegin\n\nend $$\ndelimiter $$ \n create procedure create()\nbegin\n\nend $$\ndelimiter $$ \n create procedure create()\nbegin\n\nend $$\ndelimiter $$ \n create procedure create()\nbegin\n\nend $$\ndelimiter $$ \n create procedure create()\nbegin\n\nend $$\ndelimiter $$ \n create procedure create()\nbegin\n\nend $$\ndelimiter $$ \n create procedure create()\nbegin\n\nend $$\ndelimiter $$ \n create procedure create()\nbegin\n\nend $$\ndelimiter $$ \n create procedure create()\nbegin\n\nend $$\ndelimiter $$ \n create procedure create()\nbegin\n\nend $$\ndelimiter $$ \n create procedure create()\nbegin\n\nend $$\n" }, { "alpha_fraction": 0.5972199440002441, "alphanum_fraction": 0.6014396548271179, "avg_line_length": 33.14124298095703, "blob_id": "6bef54c8821e3f264e5ec72528f5b4bc9f8c857f", "content_id": "dd3cdd6790103683f1e7c39abf0836f30715428d", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 12086, "license_type": "no_license", "max_line_length": 123, "num_lines": 354, "path": "/Dump/controllers.py", "repo_name": "ismael094/Practicas", "src_encoding": "UTF-8", "text": "\"\"\"\nModels:\n\"\"\"\n\nclass Camera(object):\n def __init__(self):\n self.id = None\n self.instrument = None\n self.camera = None\n self.gcs_component = None\n self.display_name = None\n self.archive_directory = None\n self.active = None\n\nclass ObservationMode(object):\n def __init__(self):\n id = None\n mode = None\n id_camera = None \n\nclass Frame(object):\n def __init__(self):\n id = None\n id_camera = None\n id_observation_mode = None\n observation_date = None\n observation_date_microsecond = None\n exposition_time = None\n state = None\n is_raw = None\n id_program = None\n id_observation_block = None\n path = None\n id_principal_investigator = None\n radeg = None\n decdeg = None\n\nclass HeaderDefinition(object):\n def __init__(self):\n id = None\n comment = None\n name = None\n data_type = None\n visible = None\n id_observation_mode = None\n\nclass Header(object):\n def __init__(self):\n id = None\n id_header_definition = None\n id_frame = None\n extension = None\n string_value = None\n long_value = None\n double_value = None\n\n\"\"\"\nDAO\n\"\"\" \nclass CameraDAO(object):\n def __init__(cursor):\n cursor = cursor\n def getId(camera):\n cursor.execute(\"select id from camera where instrument = '\"+camera.instrument+\"'\")\n results = cursor.fetchall()\n for row in results:\n id_camera = row[0]\n camera.id = id_camera\n\nclass ObservationModeDAO(object):\n def __init__(cursor):\n cursor = cursor\n def getId(observationMode):\n cursor.execute(\"select id, id_camera from observation_mode \\\n where mode = '\"+mode+\"'\")\n results = cursor.fetchall()\n for row1 in results:\n observationMode.id = row1[0]\n observationMode.mode = mode\n observationMode.id_camera = row1[1]\n return True\n break\n else:\n self.id = False\n def save(observationMode):\n if cursor.execute(\"insert into observation_mode(mode,id_camera) \\\n values (%s,%s)\", (observationMode.mode, observationMode.id_camera)):\n return True\n else:\n return False \n\nclass FrameDAO(object):\n def __init__(cursor):\n cursor = cursor\n def save(frame):\n if cursor.execute(\"insert into frame(id_camera, id_observation_mode, \\\n observation_date, observation_date_microsecond, exposition_time,state,is_raw, \\\n id_program, id_observation_block,path, id_principal_investigator, decdeg, \\\n radeg) values (%s,%s,%s,%s,%s,%s, %s, %s, %s, %s, %s, %s, %s)\", \\\n (frame.id_camera, frame.id_observation_mode, \\\n frame.observation_date, frame.observation_date_microsecond, \\\n frame.exposition_time, frame.state, frame.is_raw, \\\n frame.program,frame.blockId,frame.path,\\\n frame.id_principal_investigator, frame.decdeg,frame.radeg)):\n frame.id = cursor.lastrowid\n logging.debug('The frame %s has been insert correctly'+now, frame.path)\n return True\n else:\n logging.warning('Error while inserting frame %s '+now, frame.path)\n return False\n\nclass HeaderDefinitionDAO(object):\n def __init__(cursor):\n cursor = cursor\n def getId(headerDefinition):\n cursor.execute(\"select id from header_definition where comment = %s and \\\n name= %s and data_type = %s and id_observation_mode = %s\", \\\n (headerDefinition.comment,headerDefinition.name, headerDefinition.data_type, headerDefinition.id_observation_mode))\n results = cursor.fetchall()\n if len(results) > 0:\n for row in results:\n headerDefinition.id = row[0]\n break\n else:\n return False\n def save(headerDefinition):\n if cursor.execute(\"insert into header_definition(comment, name, data_type, \\\n visible, id_observation_mode) values (%s,%s,%s,1,%s)\", (headerDefinition.comment, \\\n headerDefinition.name, headerDefinition.data_type, headerDefinition.id_observation_mode)):\n logging.debug('Created a new header definition called %s at %s' %\\\n (headerDefinition.name,now))\n return True\n else:\n logging.warning('Error while creating a new header definition %s'+now)\n return False\nclass HeaderDAO(object):\n def __init__(cursor):\n cursor = cursor\n def save(header):\n if cursor.execute(\"insert into header(id_header_definition, id_frame, \\\n extension, \"+header.type+\") values (%s,%s,%s,%s)\", \\\n (header.id_header_definition, header.id_frame, header.extension, header.value)):\n return True\n else:\n False\n\n\"\"\"\nSet\n\"\"\"\ndef setCamera(camera,instrument):\n camera.instrument = instrument\ndef setObservationMode(observationMode,mode,idCamera):\n observationMode.id = None\n observationMode.mode = mode\n observationMode.id_camera = idCamera\ndef setFrame(frame,data,path):\n mode = data[0]['OBSMODE'][0]\n camera = CameraDAO()\n camera.getId(data[0]['INSTRUME'][0])\n observationMode = ObservationModeDAO()\n observationMode.getId(mode)\n if observationMode.id == False:\n setObservationMode(observationMode, mode, camera.id)\n observationMode.save()\n observationMode.getId(mode)\n logging.info('A new Observation Mode named %s has been created %s' % \\\n (mode,now))\n raw = isRaw(path)\n program = checkProgramKey(data, path)\n frame.id = None\n frame.id_camera = camera.id\n frame.id_observation_mode = observationMode.id\n frame.observation_date = data[0]['DATE'][0]\n frame.observation_date_microsecond = 0\n frame.exposition_time = data[0]['ELAPSED'][0]\n frame.state = 'COMMITED'\n frame.is_raw = raw\n frame.program = data[0][program][0]\n frame.blockId = data[0]['GTCOBID'][0]\n frame.path = path\n frame.id_principal_investigator = data[0]['PI'][0]\n frame.decdeg = data[0]['DECDEG'][0]\n frame.radeg = data[0]['RADEG'][0] \n if frame.blockId == '':\n logging.warning('Block ID is empty in %s %s' % (path, now))\n return frame\n\ndef setHeaderDefinition(headerDefinitionData, headerData):\n headerDefinitionData.id = None\n headerDefinitionData.comment = headerData[0]\n headerDefinitionData.name = headerData[1]\n headerDefinitionData.data_type = headerData[2]\n headerDefinitionData.visible = 1\n headerDefinitionData.id_observation_mode = headerData[3]\n\ndef setHeader(headerData, headerList):\n headerData.id = None\n headerData.id_header_definition = headerList[0]\n headerData.id_frame = headerList[1]\n headerData.extension = headerList[2]\n headerData.type = headerList[3]\n headerData.value = headerList[4]\n\n\"\"\"\nChecks\n\"\"\"\n\ndef isRaw(path):\n path_split = path.rsplit('/')[1:]\n if path_split[6] == 'raw':\n is_raw=1\n else:\n is_raw=0\n return is_raw\n\ndef checkProgramKey(data, path):\n try:\n program_key = 'GTCPRGID'\n data[0][program_key]\n except KeyError:\n program_key = 'GTCPROGI'\n data[0][program_key]\n logging.warning('The frame located in %s has a ProgramId \\\n key ambiguous %s' % (path, now))\n if data[0][program_key][0] == '':\n logging.warning('%s empty in the frame located in %s %s' % \\\n (program_key, path,now))\n return program_key\n\ndef getKeywordType(data):\n if type(data) == bool:\n final_type = 'LONG'\n elif type(data) == str:\n final_type = 'STRING'\n elif type(data) == float:\n final_type = 'DOUBLE'\n elif type(data) == long:\n final_type = 'LONG'\n elif type(data) == int:\n final_type = 'LONG'\n else:\n final_type = 'LONG'\n return final_type\ndef getInsertValue(data):\n if type(data) == bool:\n value = 'long_value'\n elif type(data) == str:\n value = 'string_value'\n elif type(data) == float:\n value = 'double_value'\n elif type(data) == long:\n value = 'long_value'\n elif type(data) == int:\n value = 'long_value'\n else:\n value = 'long_value'\n return value\n\n\"\"\"\nControllers\n\"\"\"\nclass Controller(object):\n def db(self):\n self.db = MySQLdb.connect(\"localhost\", \"root\", \"\", \"datafactory\")\n self.cursor = db.cursor()\n def DAO(self):\n self.frameDAO = FrameDAO(self.cursor)\n self.cameraDAO = CameraDAO(sel.cursor)\n self.observationModeDAO = ObservationModeDAO(sel.cursor)\n self.headerDefinitionDAO = HeaderDefinitionDAO(sel.cursor)\n self.headerDAO = HeaderDAO(sel.cursor)\n def startScript(self,args):\n route = args.route\n scan = args.scan\n default = args.default\n filepath, fileextension = os.path.splitext(route)\n if scan == True and fileextension == '': \n start = fileScaner(default, route)\n elif scan != True and fileextension != '':\n start = startDumpProcess(route)\n db.commit()\n\n def fileScaner(self,pathRoot,path1):\n path = pathRoot+path1\n yu = glob.glob(path)\n for dire in yu:\n for root, dirs, files in os.walk(dire):\n for fil in files:\n if fil.endswith(\".fits\"):\n final_root = (os.path.join(root,fil))\n Open = startDumpProcess(final_root)\n db.commit()\n\n def startDumpProcess(self,path):\n data = getDataFitsImages(path)\n dump = dataBasePopulator(data, path)\n \n def getDataFitsImages(self,path):\n image = fits.open(path)\n data = []\n for extension in range(len(image)):\n datas = {}\n for keyword in image[extension].header:\n datas[keyword] = [image[extension].header[keyword],\\\n image[extension].header.comments[keyword]]\n data.append(datas)\n image.close()\n return data\n\n def dataBasePopulator(self,data, path):\n frameData = Frame() \n setFrame(frameData, data, path)\n self.frameDAO.save(frameData)\n headerDefinition = HeaderDefinition()\n header = Header()\n for extension in range(len(data)):\n for keyword in data[extension]:\n value = data[extension][keyword][0]\n observationModeId = frameData.id_observation_mode\n keywordType = getKeywordType(value)\n headerDefList = [data[extension][keyword][1],keyword,\\\n keywordType,observationModeId]\n setHeaderDefinition(headerDefinition,headerDefList)\n if self.headerDefinitionDAO.getId(headerDefinition) == False:\n self.headerDefinitionDAO.save(headerDefinition)\n self.headerDefinitionDAO.getId(headerDefinition)\n keywordInsertValue = getInsertValue(value)\n headerList = [self.headerDefinition.id, frameData.id, extension,\\\n keywordInsertValue, value]\n setHeader(header,headerList)\n self.headerDAO.save(headerData)\n\nif __name__ == '__main__':\n from astropy.io import fits\n import argparse\n import os\n import time\n import logging\n import sys\n import MySQLdb\n import glob\n \n logging.basicConfig(filename='/home/log.log',level=logging.DEBUG)\n now = time.strftime(\"%c\")\n parser = argparse.ArgumentParser(description=\"Do you wish to scan?\")\n parser.add_argument(\"-r\", dest='route', action='store',help='Route\\\n of the instrument and the date that you want to dump')\n parser.add_argument(\"-s\", dest='scan', action='store',\\\n help='Scan files in a directory', default=True)\n parser.add_argument(\"-d\", dest='default', action='store',\\\n help='Use default route', default='/scidb/framedb/')\n args = parser.parse_args()\n p1 = Controller(args)\n db.close() " }, { "alpha_fraction": 0.5414012670516968, "alphanum_fraction": 0.5414012670516968, "avg_line_length": 23.647058486938477, "blob_id": "c5f1d1a0977fe5c70ab8c6ea3f5999cb0911118c", "content_id": "71c93e6f35c35d3a476ea283fdbc5f3166c32034", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 1256, "license_type": "no_license", "max_line_length": 43, "num_lines": 51, "path": "/Dump/models.py", "repo_name": "ismael094/Practicas", "src_encoding": "UTF-8", "text": "class Camera(object):\n def __init__(self):\n self.id = None\n self.instrument = None\n self.camera = None\n self.gcs_component = None\n self.display_name = None\n self.archive_directory = None\n self.active = None\n\nclass Observation_mode(object):\n def __init__(self):\n id = None\n mode = None\n id_camera = None \n\nclass Frame(object):\n def __init__(self):\n id = None\n id_camera = None\n id_observation_mode = None\n observation_date = None\n observation_date_microsecond = None\n exposition_time = None\n state = None\n is_raw = None\n id_program = None\n id_observation_block = None\n path = None\n id_principal_investigator = None\n radeg = None\n decdeg = None\n\nclass HeaderDefinition(object):\n def __init__(self):\n id = None\n comment = None\n name = None\n data_type = None\n visible = None\n id_observation_mode = None\n\nclass Header(object):\n def __init__(self):\n id = None\n id_header_definition = None\n id_frame = None\n extension = None\n string_value = None\n long_value = None\n double_value = None" } ]
3
Niranj1997/atm-application
https://github.com/Niranj1997/atm-application
694b7eb24107ee490aa8ed61e1b0f21fdefe7630
ba191e909ee88fd000ed8b61182d716fdcfb33b9
55e8c11f04e184d206a8436dc03535e1cd977e1d
refs/heads/master
2022-06-19T21:10:11.999727
2020-05-06T14:56:35
2020-05-06T14:56:35
261,789,873
0
0
null
null
null
null
null
[ { "alpha_fraction": 0.6666666865348816, "alphanum_fraction": 0.6666666865348816, "avg_line_length": 19, "blob_id": "d6da41d2394a49a0d12e828bb1a11e51ccb0ee61", "content_id": "b4e5bb76d26254f694c2d218d62641fdd30d95de", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Markdown", "length_bytes": 21, "license_type": "no_license", "max_line_length": 19, "num_lines": 1, "path": "/README.md", "repo_name": "Niranj1997/atm-application", "src_encoding": "UTF-8", "text": "\"# atm-application\" \n" }, { "alpha_fraction": 0.46318137645721436, "alphanum_fraction": 0.5113404393196106, "avg_line_length": 41.64677429199219, "blob_id": "4a2685dcc816d105dd3ea4be749e7bbf0d2946ef", "content_id": "c36aab9b102ca81943311cdd31d40d8e6deeb1e1", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 26454, "license_type": "no_license", "max_line_length": 446, "num_lines": 620, "path": "/atm.py", "repo_name": "Niranj1997/atm-application", "src_encoding": "UTF-8", "text": "from flask import Flask,render_template,request\napp = Flask(__name__) #InitiateFlask\napp.config['TEMPLATES_AUTO_RELOAD'] = True\ntwok=0\nfiveh=0\nh=0\natm=0\np1=101\np2=102\np3=103\np4=104\np5=105\npin101=1000\npin102=1111\npin102=2222\npin103=3333\npin104=4444\npin105=5555\nbal101=100000\nbal102=200000\nbal103=150000\nbal103=175000\nbal104=75000\nbal105=125000\nrem=0\nx=0\ny=0\nz=0\namt1=0\namt2=0\namt3=0\nch=0\nacc=0\nf=open('templates/101.html','w')\nmessage=\"<center>TYPE----AMOUNT----DEBIT/CREDIT----BALANCE</center><br>\"\nf.write(message)\nf.close()\nf=open('templates/102.html','w')\nmessage=\"<center>TYPE----AMOUNT----DEBIT/CREDIT----BALANCE</center><br>\"\nf.write(message)\nf.close()\nf=open('templates/103.html','w')\nmessage=\"<center>TYPE----AMOUNT----DEBIT/CREDIT----BALANCE</center><br>\"\nf.write(message)\nf.close()\nf=open('templates/104.html','w')\nmessage=\"<center>TYPE----AMOUNT----DEBIT/CREDIT----BALANCE</center><br>\"\nf.write(message)\nf.close()\nf=open('templates/105.html','w')\nmessage=\"<center>TYPE----AMOUNT----DEBIT/CREDIT----BALANCE</center><br>\"\nf.write(message)\nf.close()\[email protected]('/') #WelcomeScreen\ndef Welcome(): \n return render_template('Welcome.html')\[email protected]('/Admin') #AdminPage\ndef Admin(): \n return render_template('Admin.html')\[email protected]('/Admin/LoadMoney')\ndef load():\n return render_template('LoadMoney.html')\[email protected]('/Load')\ndef Load():\n if 'den' in request.args:\n den=request.args['den']\n if 'qty' in request.args:\n qty=request.args['qty']\n LoadMoney(den,qty)\n return \"Successfully Loaded to ATM\"\ndef LoadMoney(den,qty): #AddStockFunction\n global twok,fiveh,h\n if den=='2000':\n twok+=int(qty)\n elif den=='500':\n fiveh+=int(qty)\n elif den=='100':\n h+=int(qty)\[email protected]('/Admin/ViewMoney')\ndef view():\n global atm\n atm=(2000*twok)+(500*fiveh)+(100*h)\n load = open('templates/ViewMoney.html','w')\n txt= \"<html><body><center><h1>Welcome</h1><br><h2>View Money Left</h2><br>2000 : \"+str(twok)+\"<br> 500 : \"+str(fiveh)+\"<br> 100 : \"+str(h)+\"<br> Net Amount : Rs.\"+str(atm)+\"</center></body></html>\"\n load.write(txt)\n load.close()\n return render_template('ViewMoney.html')\[email protected]('/Admin/Details')\ndef details():\n detail = open('templates/Details.html','w')\n txt=\"<html><body><center><h1>Welcome</h1><br><h2>View Customer Details</h2><br>AccNum----AccHolder----PIN----AccBalance<br>\"+str(p1)+\"---p1---\"+str(pin101)+\"---Rs.\"+str(bal101)+\"<br>\"+str(p2)+\"---p2---\"+str(pin102)+\"---Rs.\"+str(bal102)+\"<br>\"+str(p3)+\"---p3---\"+str(pin103)+\"---Rs.\"+str(bal103)+\"<br>\"+str(p4)+\"---p4---\"+str(pin104)+\"---Rs.\"+str(bal104)+\"<br>\"+str(p5)+\"---p5---\"+str(pin105)+\"---Rs.\"+str(bal105)+\"<br></center></body></html>\"\n detail.write(txt)\n detail.close()\n return render_template('Details.html')\[email protected]('/User')\ndef User():\n return render_template('User.html')\[email protected]('/User/CheckBalance')\ndef CheckBalance():\n return render_template('CheckBalance.html')\[email protected]('/Check')\ndef Check():\n if 'accnum' in request.args:\n accnum=request.args['accnum']\n if 'pin' in request.args:\n pin=request.args['pin']\n \n if accnum=='101':\n if pin=='1000':\n return \"Your Account Balance is \"+str(bal101)\n else:\n return \"Incorrect PIN\"\n if accnum=='102':\n if pin=='1111':\n return \"Your Account Balance is \"+str(bal102)\n else:\n return \"Incorrect PIN\"\n if accnum=='103':\n if pin=='2222':\n return \"Your Account Balance is \"+str(bal103)\n else:\n return \"Incorrect PIN\"\n if accnum=='104':\n if pin=='3333':\n return \"Your Account Balance is \"+str(bal104)\n else:\n return \"Incorrect PIN\"\n if accnum=='105':\n if pin=='4444':\n return \"Your Account Balance is \"+str(bal105)\n else:\n return \"Incorrect PIN\"\n \[email protected]('/User/WithdrawMoney')\ndef WithdrawMoney():\n return render_template('WithdrawMoney.html')\[email protected]('/Withdraw')\ndef Withdraw():\n if 'accnum' in request.args:\n accnum=request.args['accnum']\n if 'pin' in request.args:\n pin=request.args['pin']\n global acc\n if accnum=='101':\n if pin=='1000':\n acc=101\n return render_template('EnterAmount.html')\n else:\n return \"Incorrect PIN\"\n if accnum=='102':\n if pin=='1111':\n acc=102\n return render_template('EnterAmount.html')\n else:\n return \"Incorrect PIN\"\n if accnum=='103':\n if pin=='2222':\n acc=103\n return render_template('EnterAmount.html')\n else:\n return \"Incorrect PIN\"\n if accnum=='104':\n if pin=='3333':\n acc=104\n return render_template('EnterAmount.html')\n else:\n return \"Incorrect PIN\"\n if accnum=='105':\n if pin=='4444':\n acc=105\n return render_template('EnterAmount.html')\n else:\n return \"Incorrect PIN\"\[email protected]('/WithdrawConfirm')\ndef confirm():\n if 'withdraw' in request.args:\n withdraw=request.args['withdraw']\n ch=(int(withdraw)%100)\n int(ch)\n global twok,fiveh,h,acc,bal101,bal102,bal103,bal104,bal105\n if ch==0:\n rem=int(withdraw)\n x=int(int(rem)/2000)\n if int(x)<int(twok):\n amt1=x*2000\n rem=rem-int(amt1)\n else:\n x=0\n y=int(int(rem)/500)\n if int(y)<int(fiveh):\n amt2=y*500\n rem=rem-int(amt2)\n else:\n y=0\n z=int(int(rem)/100)\n if int(z)<int(h):\n amt3=z*100\n rem=rem-int(amt3)\n else:\n z=0\n \n if rem==0:\n if acc==101:\n if int(withdraw)<=int(bal101):\n twok=int(twok)-int(x)\n fiveh=int(fiveh)-int(y)\n h=int(h)-int(z)\n bal101=int(bal101)-int(withdraw)\n f=open('templates/101.html','a')\n message=\"<center>Withdraw---Rs.\"+str(withdraw)+\"---Debit---Rs.\"+str(bal101)+\"</center><br>\"\n f.write(message)\n f.close()\n return \"Transaction Done! Your Account Balance is Rs.\"+str(bal101)\n else:\n return \"No enough Money in Account\"\n elif acc==102:\n if int(withdraw)<=int(bal102):\n twok=int(twok)-int(x)\n fiveh=int(fiveh)-int(y)\n h=int(h)-int(z)\n bal102=int(bal102)-int(withdraw)\n f=open('templates/102.html','a')\n message=\"<center>Withdraw---Rs.\"+str(withdraw)+\"---Debit---Rs.\"+str(bal102)+\"</center><br>\"\n f.write(message)\n f.close()\n return \"Transaction Done! Your Account Balance is Rs.\"+str(bal102)\n else:\n return \"No enough Money in Account\"\n elif acc==103:\n if int(withdraw)<=int(bal103):\n twok=int(twok)-int(x)\n fiveh=int(fiveh)-int(y)\n h=int(h)-int(z)\n bal103=int(bal103)-int(withdraw)\n f=open('templates/103.html','a')\n message=\"<center>Withdraw---Rs.\"+str(withdraw)+\"---Debit---Rs.\"+str(bal103)+\"</center><br>\"\n f.write(message)\n f.close()\n return \"Transaction Done! Your Account Balance is Rs.\"+str(bal103)\n else:\n return \"No enough Money in Account\"\n elif acc==104:\n if int(withdraw)<=int(bal104):\n twok=int(twok)-int(x)\n fiveh=int(fiveh)-int(y)\n h=int(h)-int(z)\n bal104=int(bal104)-int(withdraw)\n f=open('templates/104.html','a')\n message=\"<center>Withdraw---Rs.\"+str(withdraw)+\"---Debit---Rs.\"+str(bal104)+\"</center><br>\"\n f.write(message)\n f.close()\n return \"Transaction Done! Your Account Balance is Rs.\"+str(bal104)\n else:\n return \"No enough Money in Account\"\n elif acc==105:\n if int(withdraw)<=int(bal105):\n twok=int(twok)-int(x)\n fiveh=int(fiveh)-int(y)\n h=int(h)-int(z)\n bal105=int(bal105)-int(withdraw)\n f=open('templates/105.html','a')\n message=\"<center>Withdraw---Rs.\"+str(withdraw)+\"---Debit---Rs.\"+str(bal105)+\"</center><br>\"\n f.write(message)\n f.close()\n return \"Transaction Done! Your Account Balance is Rs.\"+str(bal105)\n else:\n return \"No enough Money in Account\"\n else:\n return \"No enough Money in ATM. Try entering less Amount. Sorry for the Inconvenience.\"\n else:\n return \"Enter in Multiple of 100\"\[email protected]('/User/TransferMoney')\ndef TransferMoney():\n return render_template('TransferMoney.html')\[email protected]('/Transfer')\ndef Transfer():\n if 'accnum' in request.args:\n accnum=request.args['accnum']\n if 'pin' in request.args:\n pin=request.args['pin']\n global acc\n if accnum=='101':\n if pin=='1000':\n acc=101\n return render_template('Transfer.html')\n else:\n return \"Incorrect PIN\"\n if accnum=='102':\n if pin=='1111':\n acc=102\n return render_template('Transfer.html')\n else:\n return \"Incorrect PIN\"\n if accnum=='103':\n if pin=='2222':\n acc=103\n return render_template('Transfer.html')\n else:\n return \"Incorrect PIN\"\n if accnum=='104':\n if pin=='3333':\n acc=104\n return render_template('Transfer.html')\n else:\n return \"Incorrect PIN\"\n if accnum=='105':\n if pin=='4444':\n acc=105\n return render_template('Transfer.html')\n else:\n return \"Incorrect PIN\"\[email protected]('/TransferConfirm')\ndef TransferConfirm():\n global acc,bal101,bal102,bal103,bal104,bal105\n if 'accnum1' in request.args:\n acc1=request.args['accnum1']\n if 'money' in request.args:\n money=request.args['money']\n if int(acc)==int(acc1):\n return \"Sorry! Cannot Transfer to Same Account.\"\n if acc==101:\n if int(money)<=int(bal101):\n if acc1=='102':\n bal101=int(bal101)-int(money)\n bal102=int(bal102)+int(money)\n f=open('templates/101.html','a')\n message=\"<center>Transfer to \"+str(acc1)+\"---Rs.\"+str(money)+\"---Debit---Rs.\"+str(bal101)+\"</center><br>\"\n f.write(message)\n f.close()\n f=open('templates/102.html','a')\n message=\"<center>Transfer from \"+str(acc)+\"---Rs.\"+str(money)+\"---Credit---Rs.\"+str(bal102)+\"</center><br>\"\n f.write(message)\n f.close()\n return \"Successfully transferred Money!! Your Current Account Balance is Rs.\"+str(bal101)\n if acc1=='103':\n bal101=int(bal101)-int(money)\n bal103=int(bal103)+int(money)\n f=open('templates/101.html','a')\n message=\"<center>Transfer to \"+str(acc1)+\"---Rs.\"+str(money)+\"---Debit---Rs.\"+str(bal101)+\"</center><br>\"\n f.write(message)\n f.close()\n f=open('templates/103.html','a')\n message=\"<center>Transfer from \"+str(acc)+\"---Rs.\"+str(money)+\"---Credit---Rs.\"+str(bal103)+\"</center><br>\"\n f.write(message)\n f.close()\n return \"Successfully transferred Money!! Your Current Account Balance is Rs.\"+str(bal101)\n if acc1=='104':\n bal101=int(bal101)-int(money)\n bal104=int(bal104)+int(money)\n f=open('templates/101.html','a')\n message=\"<center>Transfer to \"+str(acc1)+\"---Rs.\"+str(money)+\"---Debit---Rs.\"+str(bal101)+\"</center><br>\"\n f.write(message)\n f.close()\n f=open('templates/104.html','a')\n message=\"<center>Transfer from \"+str(acc)+\"---Rs.\"+str(money)+\"---Credit---Rs.\"+str(bal104)+\"</center><br>\"\n f.write(message)\n f.close()\n return \"Successfully transferred Money!! Your Current Account Balance is Rs.\"+str(bal101)\n if acc1=='105':\n bal101=int(bal101)-int(money)\n bal105=int(bal105)+int(money)\n f=open('templates/101.html','a')\n message=\"<center>Transfer to \"+str(acc1)+\"---Rs.\"+str(money)+\"---Debit---Rs.\"+str(bal101)+\"</center><br>\"\n f.write(message)\n f.close()\n f=open('templates/105.html','a')\n message=\"<center>Transfer from \"+str(acc)+\"---Rs.\"+str(money)+\"---Credit---Rs.\"+str(bal105)+\"</center><br>\"\n f.write(message)\n f.close()\n return \"Successfully transferred Money!! Your Current Account Balance is Rs.\"+str(bal101)\n else:\n return \"Sorry! Your Account does not have Sufficient Balance to do the Transaction.\"\n if acc==102:\n if int(money)<=int(bal102):\n if acc1=='101':\n bal102=int(bal102)-int(money)\n bal101=int(bal101)+int(money)\n f=open('templates/102.html','a')\n message=\"<center>Transfer to \"+str(acc1)+\"---Rs.\"+str(money)+\"---Debit---Rs.\"+str(bal102)+\"</center><br>\"\n f.write(message)\n f.close()\n f=open('templates/101.html','a')\n message=\"<center>Transfer from \"+str(acc)+\"---Rs.\"+str(money)+\"---Credit---Rs.\"+str(bal101)+\"</center><br>\"\n f.write(message)\n f.close()\n return \"Successfully transferred Money!! Your Current Account Balance is Rs.\"+str(bal102)\n if acc1=='103':\n bal102=int(bal102)-int(money)\n bal103=int(bal103)+int(money)\n f=open('templates/102.html','a')\n message=\"<center>Transfer to \"+str(acc1)+\"---Rs.\"+str(money)+\"---Debit---Rs.\"+str(bal102)+\"</center><br>\"\n f.write(message)\n f.close()\n f=open('templates/103.html','a')\n message=\"<center>Transfer from \"+str(acc)+\"---Rs.\"+str(money)+\"---Credit---Rs.\"+str(bal103)+\"</center><br>\"\n f.write(message)\n f.close()\n return \"Successfully transferred Money!! Your Current Account Balance is Rs.\"+str(bal102)\n if acc1=='104':\n bal102=int(bal102)-int(money)\n bal104=int(bal104)+int(money)\n f=open('templates/102.html','a')\n message=\"<center>Transfer to \"+str(acc1)+\"---Rs.\"+str(money)+\"---Debit---Rs.\"+str(bal102)+\"</center><br>\"\n f.write(message)\n f.close()\n f=open('templates/104.html','a')\n message=\"<center>Transfer from \"+str(acc)+\"---Rs.\"+str(money)+\"---Credit---Rs.\"+str(bal104)+\"</center><br>\"\n f.write(message)\n f.close()\n return \"Successfully transferred Money!! Your Current Account Balance is Rs.\"+str(bal102)\n if acc1=='105':\n bal102=int(bal102)-int(money)\n bal105=int(bal105)+int(money)\n f=open('templates/102.html','a')\n message=\"<center>Transfer to \"+str(acc1)+\"---Rs.\"+str(money)+\"---Debit---Rs.\"+str(bal102)+\"</center><br>\"\n f.write(message)\n f.close()\n f=open('templates/105.html','a')\n message=\"<center>Transfer from \"+str(acc)+\"---Rs.\"+str(money)+\"---Credit---Rs.\"+str(bal105)+\"</center><br>\"\n f.write(message)\n f.close()\n return \"Successfully transferred Money!! Your Current Account Balance is Rs.\"+str(bal102)\n else:\n return \"Sorry! Your Account does not have Sufficient Balance to do the Transaction.\"\n if acc==103:\n if int(money)<=int(bal103):\n if acc1=='101':\n bal103=int(bal103)-int(money)\n bal101=int(bal101)+int(money)\n f=open('templates/103.html','a')\n message=\"<center>Transfer to \"+str(acc1)+\"---Rs.\"+str(money)+\"---Debit---Rs.\"+str(bal103)+\"</center><br>\"\n f.write(message)\n f.close()\n f=open('templates/101.html','a')\n message=\"<center>Transfer from \"+str(acc)+\"---Rs.\"+str(money)+\"---Credit---Rs.\"+str(bal101)+\"</center><br>\"\n f.write(message)\n f.close()\n return \"Successfully transferred Money!! Your Current Account Balance is Rs.\"+str(bal103)\n if acc1=='102':\n bal103=int(bal103)-int(money)\n bal102=int(bal102)+int(money)\n f=open('templates/103.html','a')\n message=\"<center>Transfer to \"+str(acc1)+\"---Rs.\"+str(money)+\"---Debit---Rs.\"+str(bal103)+\"</center><br>\"\n f.write(message)\n f.close()\n f=open('templates/102.html','a')\n message=\"<center>Transfer from \"+str(acc)+\"---Rs.\"+str(money)+\"---Credit---Rs.\"+str(bal102)+\"</center><br>\"\n f.write(message)\n f.close()\n return \"Successfully transferred Money!! Your Current Account Balance is Rs.\"+str(bal103)\n if acc1=='104':\n bal103=int(bal103)-int(money)\n bal104=int(bal104)+int(money)\n f=open('templates/103.html','a')\n message=\"<center>Transfer to \"+str(acc1)+\"---Rs.\"+str(money)+\"---Debit---Rs.\"+str(bal103)+\"</center><br>\"\n f.write(message)\n f.close()\n f=open('templates/104.html','a')\n message=\"<center>Transfer from \"+str(acc)+\"---Rs.\"+str(money)+\"---Credit---Rs.\"+str(bal104)+\"</center><br>\"\n f.write(message)\n f.close()\n return \"Successfully transferred Money!! Your Current Account Balance is Rs.\"+str(bal103)\n if acc1=='105':\n bal103=int(bal103)-int(money)\n bal105=int(bal105)+int(money)\n f=open('templates/103.html','a')\n message=\"<center>Transfer to \"+str(acc1)+\"---Rs.\"+str(money)+\"---Debit---Rs.\"+str(bal103)+\"</center><br>\"\n f.write(message)\n f.close()\n f=open('templates/105.html','a')\n message=\"<center>Transfer from \"+str(acc)+\"---Rs.\"+str(money)+\"---Credit---Rs.\"+str(bal105)+\"</center><br>\"\n f.write(message)\n f.close()\n return \"Successfully transferred Money!! Your Current Account Balance is Rs.\"+str(bal103)\n else:\n return \"Sorry! Your Account does not have Sufficient Balance to do the Transaction.\"\n if acc==104:\n if int(money)<=int(bal104):\n if acc1=='101':\n bal104=int(bal104)-int(money)\n bal101=int(bal101)+int(money)\n f=open('templates/104.html','a')\n message=\"<center>Transfer to \"+str(acc1)+\"---Rs.\"+str(money)+\"---Debit---Rs.\"+str(bal104)+\"</center><br>\"\n f.write(message)\n f.close()\n f=open('templates/101.html','a')\n message=\"<center>Transfer from \"+str(acc)+\"---Rs.\"+str(money)+\"---Credit---Rs.\"+str(bal101)+\"</center><br>\"\n f.write(message)\n f.close()\n return \"Successfully transferred Money!! Your Current Account Balance is Rs.\"+str(bal104)\n if acc1=='102':\n bal104=int(bal104)-int(money)\n bal102=int(bal102)+int(money)\n f=open('templates/104.html','a')\n message=\"<center>Transfer to \"+str(acc1)+\"---Rs.\"+str(money)+\"---Debit---Rs.\"+str(bal104)+\"</center><br>\"\n f.write(message)\n f.close()\n f=open('templates/102.html','a')\n message=\"<center>Transfer from \"+str(acc)+\"---Rs.\"+str(money)+\"---Credit---Rs.\"+str(bal102)+\"</center><br>\"\n f.write(message)\n f.close()\n return \"Successfully transferred Money!! Your Current Account Balance is Rs.\"+str(bal104)\n if acc1=='103':\n bal104=int(bal104)-int(money)\n bal103=int(bal103)+int(money)\n f=open('templates/104.html','a')\n message=\"<center>Transfer to \"+str(acc1)+\"---Rs.\"+str(money)+\"---Debit---Rs.\"+str(bal104)+\"</center><br>\"\n f.write(message)\n f.close()\n f=open('templates/103.html','a')\n message=\"<center>Transfer from \"+str(acc)+\"---Rs.\"+str(money)+\"---Credit---Rs.\"+str(bal103)+\"</center><br>\"\n f.write(message)\n f.close()\n return \"Successfully transferred Money!! Your Current Account Balance is Rs.\"+str(bal104)\n if acc1=='105':\n bal104=int(bal104)-int(money)\n bal105=int(bal105)+int(money)\n f=open('templates/104.html','a')\n message=\"<center>Transfer to \"+str(acc1)+\"---Rs.\"+str(money)+\"---Debit---Rs.\"+str(bal104)+\"</center><br>\"\n f.write(message)\n f.close()\n f=open('templates/105.html','a')\n message=\"<center>Transfer from \"+str(acc)+\"---Rs.\"+str(money)+\"---Credit---Rs.\"+str(bal105)+\"</center><br>\"\n f.write(message)\n f.close()\n return \"Successfully transferred Money!! Your Current Account Balance is Rs.\"+str(bal104)\n else:\n return \"Sorry! Your Account does not have Sufficient Balance to do the Transaction.\"\n if acc==105:\n if int(money)<=int(bal105):\n if acc1=='101':\n bal105=int(bal105)-int(money)\n bal101=int(bal101)+int(money)\n f=open('templates/105.html','a')\n message=\"<center>Transfer to \"+str(acc1)+\"---Rs.\"+str(money)+\"---Debit---Rs.\"+str(bal105)+\"</center><br>\"\n f.write(message)\n f.close()\n f=open('templates/101.html','a')\n message=\"<center>Transfer from \"+str(acc)+\"---Rs.\"+str(money)+\"---Credit---Rs.\"+str(bal101)+\"</center><br>\"\n f.write(message)\n f.close()\n return \"Successfully transferred Money!! Your Current Account Balance is Rs.\"+str(bal105)\n if acc1=='102':\n bal105=int(bal105)-int(money)\n bal102=int(bal102)+int(money)\n f=open('templates/105.html','a')\n message=\"<center>Transfer to \"+str(acc1)+\"---Rs.\"+str(money)+\"---Debit---Rs.\"+str(bal105)+\"</center><br>\"\n f.write(message)\n f.close()\n f=open('templates/102.html','a')\n message=\"<center>Transfer from \"+str(acc)+\"---Rs.\"+str(money)+\"---Credit---Rs.\"+str(bal102)+\"</center><br>\"\n f.write(message)\n f.close()\n return \"Successfully transferred Money!! Your Current Account Balance is Rs.\"+str(bal105)\n if acc1=='103':\n bal105=int(bal105)-int(money)\n bal103=int(bal103)+int(money)\n f=open('templates/105.html','a')\n message=\"<center>Transfer to \"+str(acc1)+\"---Rs.\"+str(money)+\"---Debit---Rs.\"+str(bal105)+\"</center><br>\"\n f.write(message)\n f.close()\n f=open('templates/103.html','a')\n message=\"<center>Transfer from \"+str(acc)+\"---Rs.\"+str(money)+\"---Credit---Rs.\"+str(bal103)+\"</center><br>\"\n f.write(message)\n f.close()\n return \"Successfully transferred Money!! Your Current Account Balance is Rs.\"+str(bal105)\n if acc1=='104':\n bal105=int(bal105)-int(money)\n bal104=int(bal104)+int(money)\n f=open('templates/105.html','a')\n message=\"<center>Transfer to \"+str(acc1)+\"---Rs.\"+str(money)+\"---Debit---Rs.\"+str(bal105)+\"</center><br>\"\n f.write(message)\n f.close()\n f=open('templates/104.html','a')\n message=\"<center>Transfer from \"+str(acc)+\"---Rs.\"+str(money)+\"---Credit---Rs.\"+str(bal104)+\"</center><br>\"\n f.write(message)\n f.close()\n return \"Successfully transferred Money!! Your Current Account Balance is Rs.\"+str(bal105)\n else:\n return \"Sorry! Your Account does not have Sufficient Balance to do the Transaction.\"\n \[email protected]('/User/MiniStatement')\ndef MiniStatement():\n return render_template('MiniStatement.html')\[email protected]('/MiniStatement')\ndef Statement():\n if 'accnum' in request.args:\n accnum=request.args['accnum']\n if 'pin' in request.args:\n pin=request.args['pin']\n global acc\n if accnum=='101':\n if pin=='1000':\n return render_template('101.html')\n else:\n return \"Incorrect PIN\"\n if accnum=='102':\n if pin=='1111':\n return render_template('102.html')\n else:\n return \"Incorrect PIN\"\n if accnum=='103':\n if pin=='2222':\n return render_template('103.html')\n else:\n return \"Incorrect PIN\"\n if accnum=='104':\n if pin=='3333':\n return render_template('104.html')\n else:\n return \"Incorrect PIN\"\n if accnum=='105':\n if pin=='4444':\n return render_template('105.html')\n else:\n return \"Incorrect PIN\"\n\n\nif __name__ ==\"__main__\":\n app.run(host='0.0.0.0',threaded=True)\n \n" } ]
2
Anand-Sagar28/SpaceInvader
https://github.com/Anand-Sagar28/SpaceInvader
2a28a01cf1d959571f472521a8a9e02efe3c325d
b6f45d00b877af60e091cea0cd469a93b26380ff
fcebd9bf2fd87c90ed242f3efbc2afc1061ab0ce
refs/heads/main
2023-07-19T18:20:09.554187
2021-09-09T02:50:33
2021-09-09T02:50:33
403,198,961
0
0
null
null
null
null
null
[ { "alpha_fraction": 0.5698522329330444, "alphanum_fraction": 0.5934168696403503, "avg_line_length": 29.087209701538086, "blob_id": "9b32aa89e265cd9735527639180cef0f63329917", "content_id": "5c8b3181f1c48270178d7652c9dc9e1f105d771d", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 5347, "license_type": "no_license", "max_line_length": 115, "num_lines": 172, "path": "/PygameTRY.py", "repo_name": "Anand-Sagar28/SpaceInvader", "src_encoding": "UTF-8", "text": "import pygame\r\nimport random\r\nimport math\r\nfrom pygame import mixer\r\n\r\n# Initialize the pygame\r\npygame.init()\r\n\r\n# create the screen(WIDTH , HEIGHT)\r\nscreen = pygame.display.set_mode((800, 600))\r\n\r\n# Adding background image\r\nbackground = pygame.image.load('Background.png')\r\n\r\n# TITLE < LOGO\r\npygame.display.set_caption('Space Invader') # Title\r\nicon = pygame.image.load('ufo (1).png') # Icon of the game\r\npygame.display.set_icon(icon)\r\n\r\n# Adding background music\r\nmixer.music.load('background.wav')\r\nmixer.music.play(-1) # -1 is added to make it run continously from start to stop\r\n\r\n# Adding Player image\r\nplayer_img = pygame.image.load('spaceship.png')\r\nplayerX = 370\r\nplayerY = 500\r\nplayerX_change = 0\r\n\r\n# Adding Enemy randomly anywhere in the screen.\r\nenemy_img = []\r\nenemyX = []\r\nenemyY = []\r\nenemyX_change = []\r\nenemyY_change = []\r\nnum_of_enemies = 6\r\nfor i in range(num_of_enemies):\r\n enemy_img.append(pygame.image.load('enemy.png'))\r\n enemyX.append(random.randint(0, 800))\r\n enemyY.append(random.randint(10, 200))\r\n enemyX_change.append(4)\r\n enemyY_change.append(40)\r\n\r\n# Adding bullet in the game.\r\n# Ready = Bullet is in not fired yet Cant see on screen.\r\n# Fire = Bullet is moving\r\nbulletimg = pygame.image.load('bullet.png')\r\nbulletX = 0\r\nbulletY = 500\r\nbulletX_change = 0\r\nbulletY_change = 10\r\nbullet_state = \"ready\"\r\n\r\n# score\r\nscore_value = 0\r\nfont = pygame.font.Font('freesansbold.ttf', 32)\r\ntextX = 10\r\ntextY = 10\r\n\r\nover_font = pygame.font.Font('freesansbold.ttf', 64)\r\n\r\n\r\ndef show_score(x, y):\r\n score = font.render(\"Score: \" + str(score_value), True, (255, 255, 255))\r\n screen.blit(score, (x, y))\r\n\r\n\r\ndef game_over_text():\r\n game_overfont = over_font.render(\"GAME OVER\", True, (255, 255, 255))\r\n screen.blit(game_overfont, (200, 240))\r\n\r\n\r\ndef player(x, y):\r\n screen.blit(player_img, (x, y)) # This blit method is used to draw in the screen the image of player.\r\n\r\n\r\ndef enemy(x, y, i):\r\n screen.blit(enemy_img[i], (x, y))\r\n\r\n\r\ndef fire_bullet(x, y):\r\n global bullet_state\r\n bullet_state = \"fire\"\r\n screen.blit(bulletimg, (x + 16, y + 10))\r\n\r\n\r\n# Collision between enemy and bullet can be done using distance.\r\n# same distance == collision\r\ndef isCollision(enemyX, enemyY, bulletX, bulletY):\r\n distance = math.sqrt((math.pow(enemyX - bulletX, 2)) + (math.pow(enemyY - bulletY, 2)))\r\n if distance < 27:\r\n return True\r\n else:\r\n return False\r\n\r\n\r\n# Game LOOP\r\nrunning = True\r\nwhile running:\r\n # Screen Should appear above everything..\r\n screen.fill((0, 0, 0)) # Changing Colors to RGB = (RED, GREEN, BLUE)\r\n # Background image\r\n screen.blit(background, (0, 0))\r\n # playerX += 0.3\r\n for event in pygame.event.get():\r\n if event.type == pygame.QUIT:\r\n running = False\r\n if event.type == pygame.KEYDOWN:\r\n if event.key == pygame.K_LEFT:\r\n playerX_change += -4\r\n if event.key == pygame.K_RIGHT:\r\n playerX_change += 4\r\n # print('Right Key is pressed')\r\n if event.key == pygame.K_SPACE:\r\n if bullet_state == \"ready\":\r\n bullet_sound = mixer.Sound('laser.wav')\r\n bullet_sound.play()\r\n # getting the x coordinate of the bullet.\r\n bulletX = playerX # Assigning a fixed position for the origin of bullet from the spaceship.\r\n fire_bullet(bulletX, bulletY)\r\n if event.type == pygame.KEYUP:\r\n if event.key == pygame.K_LEFT or event.key == pygame.K_RIGHT:\r\n playerX_change = 0\r\n # Assigning Movement of the player of pressing the left and right key\r\n playerX += playerX_change\r\n if playerX <= 0:\r\n playerX = 0\r\n elif playerX >= 736:\r\n playerX = 736\r\n\r\n # enemy movement\r\n for i in range(num_of_enemies):\r\n if enemyY[i] > 440:\r\n for j in range(num_of_enemies):\r\n enemyY[j] = 2000\r\n game_over_text()\r\n break\r\n\r\n enemyX[i] += enemyX_change[i]\r\n if enemyX[i] <= 0:\r\n enemyX_change[i] = 3\r\n enemyY[i] += enemyY_change[i]\r\n elif enemyX[i] >= 736:\r\n enemyX_change[i] = -3\r\n enemyY[i] += enemyY_change[i]\r\n # If collision then respawning the enemy in diff location and the score is increased by1 in each collision.\r\n collision = isCollision(enemyX[i], enemyY[i], bulletX, bulletY)\r\n if collision:\r\n bulletY = 500\r\n bullet_state = 'ready'\r\n score_value += 1\r\n # print(score_value)\r\n # Respawning enemy in a random location after bullet hits the enemy.\r\n enemyX[i] = random.randint(0, 736)\r\n enemyY[i] = random.randint(10, 200)\r\n explosion_sound = mixer.Sound('explosion.wav')\r\n explosion_sound.play()\r\n\r\n enemy(enemyX[i], enemyY[i], i)\r\n\r\n # Creating multiple bullets to hit\r\n if bulletY <= 0:\r\n bulletY = 500\r\n bullet_state = \"ready\"\r\n\r\n if bullet_state == \"fire\":\r\n fire_bullet(bulletX, bulletY)\r\n bulletY -= bulletY_change\r\n\r\n player(playerX, playerY) # It Should be called inside while loop as for the game is running inside it.\r\n show_score(textX, textY)\r\n pygame.display.update()\r\n" }, { "alpha_fraction": 0.7572815418243408, "alphanum_fraction": 0.7572815418243408, "avg_line_length": 40.20000076293945, "blob_id": "99f8e99bdae500b86cfafcc5904f5f99031e5547", "content_id": "3b7aefa4803477389dac4679b02a7f544ae3872a", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Markdown", "length_bytes": 206, "license_type": "no_license", "max_line_length": 144, "num_lines": 5, "path": "/README.md", "repo_name": "Anand-Sagar28/SpaceInvader", "src_encoding": "UTF-8", "text": "# SpaceInvader\nHi. I'm Anand Sagar Gupta. \nA B.tech student. \nThis is my mini project, I tried using Python and Pycharm. Feel free to share your views and your Pull request or any suggestion to the changes.\nThankYou.\n" } ]
2
mrjm/whcll-py
https://github.com/mrjm/whcll-py
d574474c2e7cdc7a67b3dd879632ebb5769f885c
2a7b28d0f83438a4a9cb7de74d00dbd7624bf8df
552e87be4b468508acabd84488bae4841d74d4d3
refs/heads/master
2015-08-10T12:38:40.898203
2014-11-29T13:53:57
2014-11-29T13:53:57
null
0
0
null
null
null
null
null
[ { "alpha_fraction": 0.6337907314300537, "alphanum_fraction": 0.6492281556129456, "avg_line_length": 27.439023971557617, "blob_id": "a87f679a9d63fcec6fc9a1e7ec793f679f8ba15c", "content_id": "3e3229e21bc8b92655b32bef4ae20dca494e7eca", "detected_licenses": [ "MIT" ], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 1166, "license_type": "permissive", "max_line_length": 60, "num_lines": 41, "path": "/app.py", "repo_name": "mrjm/whcll-py", "src_encoding": "UTF-8", "text": "import json\nimport bottle\nfrom bottle import route, run, request, response, abort\nfrom pymongo import MongoClient\nfrom bson.json_util import dumps\n\n\nclient = MongoClient()\ndb = client.mrjm\ncollection = db.whisky\nprint \"MongoDB Connected successfully\"\n\n@route('/whisky/api/')\ndef index():\n #Display small talk description\n return '{ \"name\": \"WhiskyCellar API\", \\\n \"url\": \"http://mrjm.de/whisky/api/v1\" }'\n\n@route('/whisky/api/v1/', method='GET')\ndef get_all():\n response.content_type = 'application/json; charset=utf8'\n #get list of all items\n entity = collection.find().sort('id', -1)\n if not entity:\n abort(404, 'No documents')\n d = dumps(entity)\n return 'angular.callbacks._0(' + d + ');'\n\n@route('/whisky/api/v1/<name>', method='GET')\ndef get_document(name):\n response.content_type = 'application/json; charset=utf8'\n #get list of specific item by name\n name = name.replace(\"_\", \" \").title();\n entity = collection.find({'name':name})\n if not entity:\n abort(404, 'No document with name %s' % name)\n d = dumps(entity)\n return 'angular.callbacks._0(' + d + ');'\n\n\nrun(host='localhost', port=3000)\n" }, { "alpha_fraction": 0.5767984390258789, "alphanum_fraction": 0.5800389051437378, "avg_line_length": 31.14583396911621, "blob_id": "115a12bb76db43abd2a16cc877c0450fa0e3908e", "content_id": "1cf8a045fd0f4506fc8ff2da77f4df21904faeca", "detected_licenses": [ "MIT" ], "is_generated": false, "is_vendor": false, "language": "JavaScript", "length_bytes": 1543, "license_type": "permissive", "max_line_length": 103, "num_lines": 48, "path": "/js/app.js", "repo_name": "mrjm/whcll-py", "src_encoding": "UTF-8", "text": "var whiskyApp = angular.module('whiskyApp', []);\n\n\nwhiskyApp.controller('ItemListCtrl', ['$scope', '$http', '$filter', function ($scope, $http, $filter) {\n\n // ajax request to get all items formatted in JSON\n $http.jsonp('http://localhost:3000/whisky/api/v1/?callback=JSON_CALLBACK')\n .success(function(data, status, headers, config) {\n $scope.items = data;\n $scope.predicate = '-rating'; // default sort is ID Desc\n $scope.search = '';\n //$scope.filteredItems = []; // used below for some filters\n $scope.filterExpr = { name: true };\n\n\n\n\n // watch search field for search results\n $scope.$watch('search', function(newVal, oldVal) {\n $scope.items = $filter('filter')($scope.items, $scope.search);\n });\n\n // watch region to filter. Used in sidebar links\n $scope.$watch('region', function(newVal, oldVal) {\n $scope.filterExpr = { region: newVal };\n });\n\n // watch type to filter. Used in sidebar links\n $scope.$watch('art', function(newVal, oldVal) {\n $scope.filterExpr = { art: newVal };\n });\n\n // watch country to filter. Used in sidebar links\n $scope.$watch('land', function(newVal, oldVal) {\n $scope.filterExpr = { land: newVal };\n });\n })\n .error(function(data, status, headers, config){\n console.log('failed loading resource'+data);\n }); \n}]);\n\n\n\n// // set focus for search field after page has loaded\n// $(function(){\n// $(\"#search\").focus();\n// });\n" }, { "alpha_fraction": 0.6179401874542236, "alphanum_fraction": 0.644518256187439, "avg_line_length": 13.949999809265137, "blob_id": "4071a1d1c45d3533b283dc057c7296784dc16081", "content_id": "a97ca736e37264d81dc08fac35adb54b44588c37", "detected_licenses": [ "MIT" ], "is_generated": false, "is_vendor": false, "language": "Markdown", "length_bytes": 301, "license_type": "permissive", "max_line_length": 63, "num_lines": 20, "path": "/README.md", "repo_name": "mrjm/whcll-py", "src_encoding": "UTF-8", "text": "#WhiskyCellar \nPersonal Collection of Whiskies \n\n\nSee it online: [http://mrjm.de/whisky](http://mrjm.de/whisky) \n\n\n*Author:* Julian Metzger \n*Contact:* [http://mrjm.de](http://mrjm.de) \n*Version:* 1.0 \n*Last update:* 10/2014 \n\n##Backend \nPython \nbottle \npymongo \n\n##Frontend \nAngularJS \njQuery \n" } ]
3
ZEPORATH/faceRecognizer
https://github.com/ZEPORATH/faceRecognizer
406d9e8e5da4c635dc86c4b59c8f28fb169665e1
d832b786bc50fb9d33cf9afe1591ada83792ed2f
60596edfd447aaef4f9fe4fc0c38b4990dbfdfd6
refs/heads/master
2021-07-07T02:07:06.581506
2017-10-01T14:49:10
2017-10-01T14:49:10
105,448,955
0
0
null
null
null
null
null
[ { "alpha_fraction": 0.5440509915351868, "alphanum_fraction": 0.5633495450019836, "avg_line_length": 37.17307662963867, "blob_id": "cfd5a824534cf04207b22c6161ba81886b87be2b", "content_id": "80970cf60d7cfc59d0db351a2d5e428b19a5521c", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 5959, "license_type": "no_license", "max_line_length": 152, "num_lines": 156, "path": "/CollectDataForFace.py", "repo_name": "ZEPORATH/faceRecognizer", "src_encoding": "UTF-8", "text": "import cv2\nimport logging\nimport numpy as np\nimport threading\nimport math\nimport time\nfrom Queue import Queue\nimport configData\n\nbuffer_queue = Queue() # Create a buffer Queue of size 128 frames\n\n\n#Creates directoru structure for new user.\ndef create_new_Record(user_ID):\n import os\n recordPath = configData.PARENT_PATH+user_ID\n\n if not os.path.exists(recordPath):\n try:\n os.makedirs(recordPath)\n return os.path.dirname(recordPath)\n except OSError as exc:\n raise exc\n except Exception as e:\n raise e\n else:\n return configData.FOLDER_EXISTS\n\ndef DetectEyes(Image):\n glass_cas = cv2.CascadeClassifier(configData.HAAR_GLASS_EYE_CASCADE)\n face = cv2.CascadeClassifier(configData.HAAR_FACE_CASCADE)\n Theta = 0\n rows, cols = Image.shape\n glass = glass_cas.detectMultiScale(Image) # This ditects the eyes\n for (sx, sy, sw, sh) in glass:\n if glass.shape[0] == 2: # The Image should have 2 eyes\n if glass[1][0] > glass[0][0]:\n DY = ((glass[1][1] + glass[1][3] / 2) - (glass[0][1] + glass[0][3] / 2)) # Height diffrence between the glass\n DX = ((glass[1][0] + glass[1][2] / 2) - glass[0][0] + (glass[0][2] / 2)) # Width diffrance between the glass\n else:\n DY = (-(glass[1][1] + glass[1][3] / 2) + (glass[0][1] + glass[0][3] / 2)) # Height diffrence between the glass\n DX = (-(glass[1][0] + glass[1][2] / 2) + glass[0][0] + (glass[0][2] / 2)) # Width diffrance between the glass\n\n if (DX != 0.0) and (DY != 0.0): # Make sure the the change happens only if there is an angle\n Theta = math.degrees(math.atan(round(float(DY) / float(DX), 2))) # Find the Angle\n print \"Theta \" + str(Theta)\n\n M = cv2.getRotationMatrix2D((cols / 2, rows / 2), Theta, 1) # Find the Rotation Matrix\n Image = cv2.warpAffine(Image, M, (cols, rows))\n # cv2.imshow('ROTATED', Image) # UNCOMMENT IF YOU WANT TO SEE THE\n\n Face2 = face.detectMultiScale(Image, 1.3, 5) # This detects a face in the image\n for (FaceX, FaceY, FaceWidth, FaceHeight) in Face2:\n CroppedFace = Image[FaceY: FaceY + FaceHeight, FaceX: FaceX + FaceWidth]\n return CroppedFace\n\n\n\ndef captureFacePhoto(capture_device, position_ID):\n face_cascade = cv2.CascadeClassifier(configData.HAAR_FACE_CASCADE)\n eye_cascade = cv2.CascadeClassifier(configData.HAAR_EYE_CASCADE)\n print position_ID\n time.sleep(1)\n while True:\n # ret,image = capture_device.read()\n image = capture_device.get()\n # image = capture_device.frame.copy()\n gray = cv2.cvtColor(image,cv2.COLOR_BGR2GRAY)\n if np.average(gray)> 110:\n faces = face_cascade.detectMultiScale(gray, 1.3, 5) # Detect the faces and store the positions\n for (x, y, w, h) in faces: # Frames LOCATION X, Y WIDTH, HEIGHT\n FaceImage = gray[y - int(h / 2): y + int(h * 1.5),\n x - int(x / 2): x + int(w * 1.5)] # The Face is isolated and cropped\n\n ResultImage = DetectEyes(FaceImage)\n print 'Face detected'\n if ResultImage is not None:\n frame = ResultImage # Show the detected faces\n else:\n frame = gray[y: y + h, x: x + w]\n\n cv2.imwrite(position_ID+'.jpg',frame)\n img_path = position_ID+'.jpg'\n return (img_path,configData.SUCCESS)\n return configData.NO_FACE_FOUND\n else:\n return configData.BRIGHTNESS_LOW\n\ndef createCaptureDevice(source=0):\n cap = cv2.VideoCapture(source)\n return cap\n\ndef showLiveFeed(frame_buffer):\n while True:\n # ret,image = capture_device.read()\n # capture_device.stream()\n image = frame_buffer.get()\n cv2.namedWindow(\"LiveFeed\")\n cv2.imshow(\"LiveFeed\",image)\n if cv2.waitKey(1) & 0xFF == (ord('q')):\n break\n\n return\n\ndef daemonVideoCapture(capture_device,queue):\n success, image = capture_device.read()\n while success:\n success,image = capture_device.read()\n queue.put(image)\n\ndef init_threads():\n videoThread = threading.Thread(target=daemonVideoCapture,args = (captureDevice,buffer_queue,))\n videoThread.setDaemon(True)\n videoThread.start()\n\n liveStreamThread = threading.Thread(target=showLiveFeed, args=(buffer_queue,))\n liveStreamThread.start()\n\n\ndef Main():\n # from camera import VideoCamera\n captureDevice = createCaptureDevice(0)\n\n videoThread = threading.Thread(target=daemonVideoCapture,args = (captureDevice,buffer_queue,))\n videoThread.setDaemon(True)\n videoThread.start()\n\n try:\n ret = create_new_Record('UID_001')\n print ret\n except OSError as e:\n print e\n except Exception as e:\n print e\n\n t1 = threading.Thread(target=showLiveFeed, args=(buffer_queue,))\n t1.start()\n count = 0\n print 'Ready for capture faces \\nPress \"c\" for Capture'\n while True:\n\n ch = str(raw_input('Press \"c\" for capture'))\n if ch == 'c' and count<=10:\n print 'Wait for 1 second for capture of face features'\n path = configData.PARENT_PATH+'UID_001/'+str(count)\n # captureFacePhoto(captureDevice,path)\n captureFacePhoto(buffer_queue,path)\n count+=1\n elif count >10 :\n print 'DataSet complete, need only 10 photos'\n return\n\n\n\nif __name__==\"__main__\":\n Main()\n " }, { "alpha_fraction": 0.7508650422096252, "alphanum_fraction": 0.7508650422096252, "avg_line_length": 31.22222137451172, "blob_id": "980b54598b1f31c97cd5f2e9911ece97c32920f3", "content_id": "2c13ed2792f7005f09205c27910f4ac24f0cf97e", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 289, "license_type": "no_license", "max_line_length": 67, "num_lines": 9, "path": "/configData.py", "repo_name": "ZEPORATH/faceRecognizer", "src_encoding": "UTF-8", "text": "FOLDER_EXISTS = True\nSUCCESS = True\nNO_FACE_FOUND = True\nBRIGHTNESS_LOW = True\n\nPARENT_PATH = './dataSets/'\nHAAR_FACE_CASCADE = './Haar/haarcascade_frontalface_default.xml'\nHAAR_EYE_CASCADE = './Haar/haarcascade_eye.xml'\nHAAR_GLASS_EYE_CASCADE = 'Haar/haarcascade_eye_tree_eyeglasses.xml'" } ]
2
KamilB91/haccp-control-system
https://github.com/KamilB91/haccp-control-system
9b163faeb85137ddc5aa0f0b3eb029aad7aadc6d
382526b5f1e7cd95fa31bb3d6c3da7f616542784
657bca99c0fe7772bdd13317e9bad870a549068b
refs/heads/main
2023-02-05T08:29:34.316104
2020-12-09T05:46:43
2020-12-09T05:46:43
313,834,053
0
0
null
null
null
null
null
[ { "alpha_fraction": 0.6530612111091614, "alphanum_fraction": 0.6530612111091614, "avg_line_length": 48, "blob_id": "f0f574217106e6d14ba006354b8887b128aec573", "content_id": "72040dd2339c648d0d3be99b865bc966dbeedbe8", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 49, "license_type": "no_license", "max_line_length": 48, "num_lines": 1, "path": "/config.py", "repo_name": "KamilB91/haccp-control-system", "src_encoding": "UTF-8", "text": "INGREDIENT_CATEGORY = ['Frozen', 'Raw', 'Spice']\n" }, { "alpha_fraction": 0.6188490390777588, "alphanum_fraction": 0.6209341287612915, "avg_line_length": 22.979999542236328, "blob_id": "6156b66b79e81d5e4f6ac2c8296bce9978c38743", "content_id": "661f7ded638e234189222816210ab9dff2eb5255", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 2398, "license_type": "no_license", "max_line_length": 91, "num_lines": 100, "path": "/forms.py", "repo_name": "KamilB91/haccp-control-system", "src_encoding": "UTF-8", "text": "from flask_wtf import FlaskForm, Form\nfrom wtforms import (StringField, widgets, SelectMultipleField, FormField,\n SelectField)\nfrom wtforms.fields.html5 import DateField, TimeField, IntegerField\nfrom wtforms.validators import DataRequired, ValidationError, Length\nfrom config import INGREDIENT_CATEGORY\n\nfrom models import Product, Ingredient\n\n\ndef product_exists(form, field):\n if Product.select().where(Product.name == field.data).exists():\n raise ValidationError('This product already exists in database.')\n\n\ndef ingredient_exists(form, field):\n if Ingredient.select().where(Ingredient.name == field.data).exists():\n raise ValidationError('This ingredient already exists in database.')\n\n\nclass AddIngredientForm(FlaskForm):\n name = StringField(\n 'Ingredient name',\n validators=[\n DataRequired(),\n Length(min=3),\n ingredient_exists\n ]\n )\n batch_code = StringField(\n 'Batch code',\n validators=[\n DataRequired(),\n Length(min=3)\n ]\n )\n category = SelectField('Ingredient category', choices=[x for x in INGREDIENT_CATEGORY])\n\n\nclass MultiCheckboxField(SelectMultipleField):\n widget = widgets.ListWidget(prefix_label=False)\n option_widget = widgets.CheckboxInput()\n\n\nclass IngredientCheckboxForm(Form):\n ingredient_checkbox = MultiCheckboxField()\n\n\nclass AddProductForm(FlaskForm):\n name = StringField(\n 'Product name',\n validators=[\n DataRequired(),\n Length(min=3),\n product_exists\n ]\n )\n ingredients_checkbox = FormField(IngredientCheckboxForm)\n\n\nclass AddBatchCode(FlaskForm):\n batch_code = StringField(\n 'New Batch code',\n validators=[\n DataRequired(),\n Length(min=3)\n ]\n )\n\n\nclass PickProduct(FlaskForm):\n product = SelectField()\n\n\nclass ProcessDetails(FlaskForm):\n start_time = TimeField(\n 'start time',\n validators=[\n DataRequired()\n ]\n )\n finish_time = TimeField(\n 'finish time',\n validators=[\n DataRequired()\n ]\n )\n temperature = IntegerField(\n 'temp.',\n validators=[\n DataRequired()\n ]\n )\n\n\nclass SelectDateForm(FlaskForm):\n date = DateField(\n 'Select Date',\n format='%Y-%m-%d'\n )\n" }, { "alpha_fraction": 0.6134212613105774, "alphanum_fraction": 0.6134212613105774, "avg_line_length": 24.82962989807129, "blob_id": "a3d38975fdd6c92a890e9f75af29b66a706028ae", "content_id": "153f6dd75579588cf0dc9ae21c4c073756c7a69f", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 3487, "license_type": "no_license", "max_line_length": 109, "num_lines": 135, "path": "/models.py", "repo_name": "KamilB91/haccp-control-system", "src_encoding": "UTF-8", "text": "import datetime\n\nfrom peewee import *\n\nDB = SqliteDatabase('database.db')\n\n\nclass BaseModel(Model):\n class Meta:\n database = DB\n\n\nclass User(BaseModel):\n username = CharField()\n password = CharField()\n\n\n# models to create relations and control batch codes\nclass Product(BaseModel):\n name = CharField(unique=True)\n\n def get_ingredients(self):\n return (\n ProductIngredient.select().where(ProductIngredient.product == self)\n )\n\n @classmethod\n def create_product(cls, name):\n try:\n with DB.transaction():\n cls.create(\n name=name\n )\n except IntegrityError:\n raise ValueError('Product already exists')\n\n\nclass Ingredient(BaseModel):\n name = CharField(unique=True)\n category = CharField()\n\n def get_batch_codes(self):\n return BatchCode.select().where(BatchCode.ingredient == self)\n\n @classmethod\n def create_ingredient(cls, name, category):\n try:\n with DB.transaction():\n x = cls.create(\n name=name,\n category=category\n )\n return x\n except IntegrityError:\n raise ValueError('Product already exists')\n\n\nclass BatchCode(BaseModel):\n batch_code = CharField()\n ingredient = ForeignKeyField(Ingredient, backref='batch_codes')\n added_at = DateTimeField(default=datetime.datetime.now)\n active = BooleanField(default=True)\n\n class Meta:\n order_by = ('-added_at',)\n\n\nclass ProductIngredient(BaseModel):\n product = ForeignKeyField(Product, backref='products')\n ingredient = ForeignKeyField(Ingredient, backref='ingredients')\n\n\nclass Process(BaseModel):\n product_name = CharField()\n process_type = CharField(null=True)\n start_time = TimeField(null=True)\n finish_time = TimeField(null=True)\n completed= BooleanField(default=False)\n temperature = IntegerField(null=True)\n date = DateField()\n\n\nclass UsedIngredient(BaseModel):\n name = CharField()\n batch = CharField()\n type = CharField()\n date = DateField()\n\n @classmethod\n def create_used_ingredient(cls, name, batch, type, date):\n if not UsedIngredient.get_or_none(name=name, batch=batch, date=date):\n with DB.transaction():\n cls.create(\n name=name,\n batch=batch,\n type=type,\n date=date,\n )\n\n\nclass ProductionDay(BaseModel):\n date = DateField()\n batch = CharField()\n\n def cooked_products(self):\n return Process.select().where(Process.date == self.date)\n\n def used_ingredients(self):\n return UsedIngredient.select().where(UsedIngredient.date == self.date)\n\n\ndef initialize():\n if not DB.is_closed():\n DB.close()\n\n DB.connect()\n DB.create_tables([User, Product, Ingredient, BatchCode, ProductIngredient,\n Process, UsedIngredient, ProductionDay], safe=True)\n DB.close()\n\n\ndef test():\n \"\"\"\n product = Product.get(name='Lasagne')\n ingredients = product.get_ingredients()\n for i in ingredients:\n print(i.product.name, i.ingredient.name)\n \"\"\"\n \"\"\"\n DB.connect()\n processes = Process.select()\n for process in processes:\n print(process.id, process.process_type, process.start_time, process.finish_time, process.temperature,\n process.product.id, process.product.name)\n \"\"\"\n" }, { "alpha_fraction": 0.6078612208366394, "alphanum_fraction": 0.6082802414894104, "avg_line_length": 39.44745635986328, "blob_id": "04400bcee793b6ea4a3d497d40436e0db0bfd6dd", "content_id": "4a122356d8c3d3e6b3c217ef9a51bf684ce010d3", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 11932, "license_type": "no_license", "max_line_length": 133, "num_lines": 295, "path": "/app.py", "repo_name": "KamilB91/haccp-control-system", "src_encoding": "UTF-8", "text": "from flask import Flask, url_for, render_template, g, flash, redirect, request\n# from flask_login import LoginManager, login_user, logout_user, login_required, current_user\nimport datetime\nimport models\nimport forms\nfrom config import INGREDIENT_CATEGORY\nimport tables\n\napp = Flask(__name__)\napp.secret_key = 'nHJBKJhkj3uhun3enml,;LK@JiwnNme,,e2moke2e,l1moiouUu2m3oiIIOII(&&uh42*@'\n\nmodels.initialize()\nmodels.test()\n\n# login_manager = LoginManager()\n# login_manager.init_app(app)\n\n\n# @login_manager.user_loader\n# def load_user(userid):\n #try:\n #return models.User.get(models.User.id == userid)\n #except models.DoesNotExist:\n #return None\n\n\[email protected]_request\ndef before_request():\n g.db = models.DB\n g.db.connect()\n # g.user = current_user\n\n\[email protected]_request\ndef after_request(response):\n g.db.close()\n return response\n\n\[email protected]('/')\ndef index():\n return render_template('layout.html')\n\n\[email protected]('/production_day/<process_type>')\ndef production_day(process_type):\n models.ProductionDay.create(\n date=datetime.date.today(),\n batch=datetime.date.today().strftime('%y%j')\n )\n return redirect(url_for('process', process_type=process_type))\n\n\[email protected]('/add_ingredient', methods=('GET', 'POST'))\ndef add_ingredient():\n form = forms.AddIngredientForm()\n if form.validate_on_submit():\n flash(\"Ingredient added\")\n ingredient = models.Ingredient.create_ingredient(\n name=form.name.data,\n category=form.category.data\n )\n models.BatchCode.create(\n batch_code=form.batch_code.data,\n ingredient=ingredient\n )\n return redirect(url_for('add_ingredient'))\n return render_template('add_ingredient.html', form=form)\n\n\[email protected]('/add_product', methods=('GET', 'POST'))\ndef add_product():\n forms.IngredientCheckboxForm.ingredient_checkbox = forms.MultiCheckboxField(\n 'Ingredients', choices=[(item.name, item.name) for item in models.Ingredient.select()]\n )\n form = forms.AddProductForm()\n if form.validate_on_submit():\n flash(\"Product added\")\n models.Product.create_product(name=form.name.data)\n for checkbox_data in form.ingredients_checkbox.data['ingredient_checkbox']:\n models.ProductIngredient.create(\n product=models.Product.get(name=form.name.data),\n ingredient=models.Ingredient.get(name=checkbox_data)\n )\n return redirect(url_for('product_list'))\n return render_template('add_product.html', form=form)\n\n\[email protected]('/product_list')\[email protected]('/product_list/<product>')\ndef product_list(product=None):\n if product:\n product = models.Product.get(name=product)\n ingredients = (x.ingredient for x in product.get_ingredients())\n return render_template('product.html',\n product=product,\n ingredients=ingredients)\n else:\n products = models.Product.select()\n return render_template('products_list.html', products=products)\n\n\[email protected]('/ingredient/<ingredient_id>', methods=['GET', 'POST'])\ndef ingredient(ingredient_id):\n form = forms.AddBatchCode()\n active_batch_table = tables.ActiveBatchCodeTable\n deactive_batch_table = tables.DeactiveBatchCodeTable\n ingredient = models.Ingredient.get(id=ingredient_id)\n active_batch_codes = [dict(id=batch_code.id,\n ingredient_id=ingredient.id,\n batch_code=batch_code.batch_code) for batch_code in ingredient.batch_codes if batch_code.active]\n deactive_batch_codes = [dict(id=batch_code.id,\n batch_code=batch_code.batch_code) for batch_code in ingredient.batch_codes if not batch_code.active]\n if form.validate_on_submit():\n models.BatchCode.create(\n batch_code=form.batch_code.data,\n ingredient=ingredient\n )\n return redirect(url_for('ingredient',\n ingredient_id=ingredient_id,\n table=active_batch_table(active_batch_codes),\n not_active_table=deactive_batch_table(deactive_batch_codes),\n form=form,\n ingredient_name=ingredient.name\n )\n )\n return render_template('ingredient.html',\n table=active_batch_table(active_batch_codes),\n not_active_table=deactive_batch_table(deactive_batch_codes),\n form=form,\n ingredient_name=ingredient.name\n )\n\n\[email protected]('/ingredients_list')\ndef ingredients_list():\n frozen_table = tables.TraceTable\n raw_table = tables.TraceTable\n spice_table = tables.TraceTable\n ingredients = models.Ingredient.select()\n frozen_ingredients = []\n raw_ingredients = []\n spice_ingredients = []\n for ingredient in ingredients:\n batch_code = [batch_code.batch_code for batch_code in ingredient.batch_codes if batch_code.active]\n if ingredient.category == 'Frozen':\n frozen_ingredients.append(dict(id=ingredient.id, name=ingredient.name, batch=batch_code[0]))\n elif ingredient.category == 'Raw':\n raw_ingredients.append(dict(id=ingredient.id, name=ingredient.name, batch=batch_code[0]))\n elif ingredient.category == 'Spice':\n spice_ingredients.append(dict(id=ingredient.id, name=ingredient.name, batch=batch_code[0]))\n return render_template('ingredients_list.html',\n frozen_table=frozen_table(frozen_ingredients),\n raw_table=raw_table(raw_ingredients),\n spice_table=spice_table(spice_ingredients)\n )\n\n\[email protected]('/deactivate_batch_code/<batch_code_id>/<ingredient_id>', methods=['GET', 'POST'])\ndef deactivate_batch_code(batch_code_id, ingredient_id):\n batch_code = models.BatchCode.get(id=batch_code_id)\n batch_code.active = False\n batch_code.save()\n return redirect(url_for('ingredient', ingredient_id=ingredient_id))\n\n\[email protected]('/process/<process_type>', methods=['POST', 'GET'])\ndef process(process_type):\n today = models.ProductionDay.get_or_none(date=datetime.date.today())\n forms.PickProduct.product = forms.SelectField('Product',\n choices=[x.name for x in models.Product.select()])\n form = forms.PickProduct()\n process = None\n\n if form.validate_on_submit():\n # this form in process.html is not provided for Packing room(process_type=cooling)\n # creates a product to be cooked and its first process\n if process_type == 'assembly-cooking':\n process = models.Process.create(\n product_name=form.product.data,\n date=datetime.date.today()\n )\n elif process_type == 'cooking':\n # if process_type came from the kitchen then process is created with process_type of 'cooking'\n process = models.Process.create(\n product_name=form.product.data,\n process_type=process_type,\n date=datetime.date.today()\n )\n # TODO .join method\n used_ingredients = [x.ingredient for x in models.Product.get(name=process.product_name).get_ingredients()]\n for ingredient in used_ingredients:\n batch_code = [batch for batch in ingredient.get_batch_codes() if batch.active]\n print(batch_code[0].batch_code)\n models.UsedIngredient.create_used_ingredient(\n name=ingredient.name,\n batch=batch_code[0].batch_code,\n type=ingredient.category,\n date=datetime.date.today()\n )\n\n # if process_type comes from assembly, process creates without process type,\n # that will be chosen later in update_process via select part of form for each product process in processes.html\n\n processes = models.Process.select().where(models.Process.completed == False)\n for process in processes:\n print(process.product_name, process.process_type, process.completed)\n\n return render_template('process.html',\n form=form,\n processes=processes,\n process_type=process_type,\n production_day=today)\n\n\[email protected]('/update_process/<process_id>', methods=['POST', 'GET'])\ndef update_process(process_id):\n process_to_update = models.Process.get(id=process_id)\n if request.method == 'POST':\n process_to_update.start_time = request.form['start']\n process_to_update.finish_time = request.form['finish']\n process_to_update.temperature = request.form['temp']\n process_to_update.completed = True\n\n if process_to_update.process_type == 'cooking':\n models.Process.create(\n product_name=process_to_update.product_name,\n date=datetime.date.today()\n )\n elif process_to_update.process_type == 'assembly-cooking':\n models.Process.create(\n product_name=process_to_update.product_name,\n process_type='cooling',\n date=datetime.date.today()\n )\n return_to = process_to_update.process_type\n try:\n if request.form['process']:\n process_to_update.process_type = request.form['process']\n return_to = 'assembly-cooking'\n if request.form['process'] == 'assembly-assembly':\n models.Process.create(\n product_name=process_to_update.product_name,\n process_type='assembly-cooking',\n date=datetime.date.today()\n )\n elif request.form['process'] == 'cooling-cooling':\n models.Process.create(\n product_name=process_to_update.product_name,\n process_type='cooling',\n date=datetime.date.today()\n )\n except KeyError:\n pass\n\n process_to_update.save()\n\n return redirect(url_for('process', process_type=return_to))\n\n\[email protected]('/select_day', methods=['GET', 'POST'])\ndef select_day():\n form = forms.SelectDateForm()\n select_production_day = models.ProductionDay.select()\n if form.validate_on_submit():\n select_production_day = models.ProductionDay.select().where(models.ProductionDay.date == form.date.data)\n return render_template('select_day.html',\n form=form,\n select_production_day=select_production_day)\n\n\[email protected]('/show_day_details/<day_id>')\ndef show_day_details(day_id):\n selected_day = models.ProductionDay.get(id=day_id)\n cooked_products = models.Process.select().where(models.Process.date == selected_day.date)\n return render_template('show_day_details.html',\n selected_day=selected_day,\n cooked_products=cooked_products)\n\n\[email protected]('/ingredient_traceability/<day_id>')\ndef ingredient_traceability(day_id):\n table = tables.IngredientTraceabilityTable\n selected_day = models.ProductionDay.get(id=day_id)\n cooked_products = models.Process.select().where(models.Process.date == selected_day.date)\n used_ingredients = models.UsedIngredient.select().where(models.UsedIngredient.date == selected_day.date)\n return render_template('ingredient_traceability.html',\n selected_day=selected_day,\n cooked_products=cooked_products,\n categories=INGREDIENT_CATEGORY,\n table=table(used_ingredients))\n\n\nif __name__ == '__main__':\n app.run(threaded=True)\n" }, { "alpha_fraction": 0.686246395111084, "alphanum_fraction": 0.686246395111084, "avg_line_length": 25.846153259277344, "blob_id": "0c26daf83919eb11f125c8223a5f7e9dd34bc8a6", "content_id": "a090f142e64c54573c3eb5cee64cb86cdeaebeb3", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 698, "license_type": "no_license", "max_line_length": 128, "num_lines": 26, "path": "/tables.py", "repo_name": "KamilB91/haccp-control-system", "src_encoding": "UTF-8", "text": "from flask_table import Table, Col, LinkCol, ButtonCol\n\n\nclass IngredientTraceabilityTable(Table):\n name = Col('Name')\n batch = Col('Batch')\n\n\nclass AssemblyTable(Table):\n product_name = Col('Name')\n start_time = Col('Start time')\n finish_time = Col('Assembly finish time')\n\n\nclass TraceTable(Table):\n name = LinkCol('Name', 'ingredient', url_kwargs=dict(ingredient_id='id'), attr='name')\n batch = Col('Batch code')\n\n\nclass ActiveBatchCodeTable(Table):\n batch_code = Col('New')\n spent_button = ButtonCol('Old', 'deactivate_batch_code', url_kwargs=dict(batch_code_id='id', ingredient_id='ingredient_id'))\n\n\nclass DeactiveBatchCodeTable(Table):\n batch_code = Col('Old')\n" } ]
5
cybersg/seemp
https://github.com/cybersg/seemp
7e9546cd31b5cccec6218d38734735db0b4312d4
aa46f09933ad71f143e745cc2f127619a3a45c8c
c962d4a3bac2aa1537bd6981e4040c16c73eca9f
refs/heads/master
2015-08-23T06:23:21.799484
2015-04-14T21:53:39
2015-04-14T21:53:39
33,958,852
0
0
null
null
null
null
null
[ { "alpha_fraction": 0.7511211037635803, "alphanum_fraction": 0.7533632516860962, "avg_line_length": 21.299999237060547, "blob_id": "f30960a89efb9ae355b4f72466c743a57bd1694f", "content_id": "07561385e37fac9da0cf6ea64d2d9e8e4524d08c", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 446, "license_type": "no_license", "max_line_length": 52, "num_lines": 20, "path": "/seemp/__init__.py", "repo_name": "cybersg/seemp", "src_encoding": "UTF-8", "text": "#!/usr/bin/env python\n# -*- coding: utf-8 -*-\n__author__ = 'cybersg'\n\nfrom flask import Flask\nfrom flask_sqlalchemy import SQLAlchemy\nfrom flask_debugtoolbar import DebugToolbarExtension\nfrom flask_util_js import FlaskUtilJs\n\napp = Flask(__name__)\n\napp.config.from_object('conf.Config')\napp.config.from_envvar('FLASK_CONFIG', silent=True)\n\nutiljs = FlaskUtilJs(app)\n#toolbar = DebugToolbarExtension(app)\n\ndb = SQLAlchemy(app)\n\nimport seemp.views\n" }, { "alpha_fraction": 0.6205673813819885, "alphanum_fraction": 0.6276595592498779, "avg_line_length": 24.636363983154297, "blob_id": "206fe927cb8061b6f19de18ab95b80b9302704e4", "content_id": "5d0d7e1dddbf306bb4bdea56147efa79dc8a547e", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 282, "license_type": "no_license", "max_line_length": 85, "num_lines": 11, "path": "/conf.py", "repo_name": "cybersg/seemp", "src_encoding": "UTF-8", "text": "#!/usr/bin/env python\n# -*- coding: utf-8 -*-\n__author__ = 'cybersg'\n\nimport os\nbasedir = os.path.abspath(os.path.dirname(__file__))\n\nclass Config(object):\n DEBUG=True\n SECRET_KEY = \"SSZELDX3S2IS21WG\"\n SQLALCHEMY_DATABASE_URI = \"sqlite:///{0}\".format(os.path.join(basedir, \"app.db\"))\n" }, { "alpha_fraction": 0.5935727953910828, "alphanum_fraction": 0.6086956262588501, "avg_line_length": 24.190475463867188, "blob_id": "13c21ed9bbf4b77eb2d5cfe9d3171d386b19e993", "content_id": "791fad7f1fc3b58eb9c20b371b4c3b04913d9d31", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 529, "license_type": "no_license", "max_line_length": 58, "num_lines": 21, "path": "/seemp/models.py", "repo_name": "cybersg/seemp", "src_encoding": "UTF-8", "text": "#!/usr/bin/env python\n# -*- coding: utf-8 -*-\n__author__ = 'cybersg'\n\nfrom seemp import db\n\nclass Room(db.Model):\n id = db.Column(db.Integer, primary_key=True)\n name = db.Column(db.String(length=64))\n\n def __repr__(self):\n return \"<Room {0}:{1}>\".format(self.id, self.name)\n\n\nclass Image(db.Model):\n id = db.Column(db.Integer, primary_key=True)\n room_id = db.Column(db.ForeignKey('room.id'))\n path = db.Column(db.String(length=128))\n\n def __repr__(self):\n return \"<Image {}>\".format(self.path)\n" }, { "alpha_fraction": 0.6850000023841858, "alphanum_fraction": 0.6899999976158142, "avg_line_length": 15.75, "blob_id": "45de4dbcf8737621aef6645a4ccf07035c3842ee", "content_id": "a666d74375aac7c8a725bee7fa669336b7b290c2", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 200, "license_type": "no_license", "max_line_length": 36, "num_lines": 12, "path": "/shell.py", "repo_name": "cybersg/seemp", "src_encoding": "UTF-8", "text": "#!/usr/bin/env python\n# -*- coding: utf-8 -*-\n__author__ = 'cybersg'\n\nimport os\nimport readline\nfrom pprint import pprint\n\nfrom flask import *\nfrom seemp import *\n\nos.environ['PYTHONINSPECT'] = 'True'" }, { "alpha_fraction": 0.6515151262283325, "alphanum_fraction": 0.6565656661987305, "avg_line_length": 17, "blob_id": "1467fbc48c5f2cdcb0b3a0c6c63ae65da549a52b", "content_id": "57abf6db59df844f6dc794a912aeadc7d1d7fb2a", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 198, "license_type": "no_license", "max_line_length": 40, "num_lines": 11, "path": "/seemp/views.py", "repo_name": "cybersg/seemp", "src_encoding": "UTF-8", "text": "#!/usr/bin/env python\n# -*- coding: utf-8 -*-\n__author__ = 'cybersg'\n\nfrom flask import render_template\n\nfrom seemp import app\n\[email protected](\"/\")\ndef index():\n return render_template(\"index.html\")\n" } ]
5
pertschuk/fairseq
https://github.com/pertschuk/fairseq
1a3f75a17de99b6c3d15305b02920b7773fa2f30
114056fcef3929cb19baa123eed17808fd269897
3b3f5545ca0e743c83462acc0ee33e08a5e75309
refs/heads/master
2020-07-05T09:52:04.432059
2019-09-05T15:50:27
2019-09-05T15:50:27
202,614,699
0
0
MIT
2019-08-15T21:37:02
2019-08-15T21:26:10
2019-08-15T21:26:04
null
[ { "alpha_fraction": 0.673389732837677, "alphanum_fraction": 0.6824983954429626, "avg_line_length": 39.47368240356445, "blob_id": "353f8868d7806a8bd715eb035f72b0c488b7296b", "content_id": "94789ae036bc7d73bde1536e2423d3790a73b5a3", "detected_licenses": [ "MIT" ], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 1537, "license_type": "permissive", "max_line_length": 128, "num_lines": 38, "path": "/roberta.py", "repo_name": "pertschuk/fairseq", "src_encoding": "UTF-8", "text": "from fairseq.models.roberta import RobertaModel\nfrom fairseq.data.data_utils import collate_tokens\n\n\nMODEL_DIR = './checkpoints/'\nCHECKPOINT_FILE = 'checkpoint_best.pt'\nCLASSES = ['SUPPORTS', 'REFUTES', 'NOT ENOUGH INFO']\n\nclass Roberta (object):\n def __init__(self,model_dir=MODEL_DIR,ckpt_file=CHECKPOINT_FILE,\n use_gpu=False):\n self.model = RobertaModel.from_pretrained(model_dir, checkpoint_file=ckpt_file)\n self.model.eval() # disable dropout\n if use_gpu: self.model.cuda()\n\n def classify_fever(self, claims, evidences):\n roberta = self.model\n batch = collate_tokens([self.trim_sentence(roberta.encode(c, e), max_len=500) for c, e in zip(claims,evidences)], pad_idx=1)\n labels = roberta.predict('sentence_classification_head', batch).argmax(dim=1)\n labels = [CLASSES[label] for label in labels]\n return labels\n\n def trim_sentence(self, sent, max_len):\n return sent if len(sent) < max_len else sent[:max_len]\n\n def encode(self, sentences, pooling_strategy='cls', layer=-4, max_len=400):\n roberta = self.model\n batch = collate_tokens([self.trim_sentence(roberta.encode(sentence), max_len)\n for sentence in sentences], pad_idx=1)\n features = roberta.extract_features(batch,return_all_hiddens=True)[layer]\n if pooling_strategy == 'cls':\n return features[:,0]\n elif pooling_strategy == 'mean':\n return features.mean(dim=1)\n elif pooling_strategy == 'max':\n return features.max(dim=1)[0]\n else:\n raise NotImplementedError()" }, { "alpha_fraction": 0.7586206793785095, "alphanum_fraction": 0.7586206793785095, "avg_line_length": 36.42856979370117, "blob_id": "50f1b2af68e6dd253cadceafa42de76f23125e56", "content_id": "c1b21cde2a745b0e560e621492bf3bab1bf6402e", "detected_licenses": [ "MIT" ], "is_generated": false, "is_vendor": false, "language": "Shell", "length_bytes": 261, "license_type": "permissive", "max_line_length": 88, "num_lines": 7, "path": "/roberta/download_fever.sh", "repo_name": "pertschuk/fairseq", "src_encoding": "UTF-8", "text": "#!/usr/bin/env bash\n\nexport DATA_DIR=fever\nexport OUTPUT_DIR=FEVER-bin\nmkdir $DATA_DIR\nwget -O $DATA_DIR/test.tsv 'https://storage.googleapis.com/poloma-tpu/fever/test.tsv'\nwget -O $DATA_DIR/train.tsv 'https://storage.googleapis.com/poloma-tpu/fever/train.tsv'" }, { "alpha_fraction": 0.662200927734375, "alphanum_fraction": 0.6681339740753174, "avg_line_length": 31.25308609008789, "blob_id": "1279af8f7ff40c1c51c2dfb51db1308493897043", "content_id": "ddaf604aa275f602664bf74d6bedda251db71015", "detected_licenses": [ "MIT" ], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 5225, "license_type": "permissive", "max_line_length": 102, "num_lines": 162, "path": "/roberta/slice_airbnb.py", "repo_name": "pertschuk/fairseq", "src_encoding": "UTF-8", "text": "import pandas as pd\nfrom snorkel.preprocess.nlp import SpacyPreprocessor\nfrom snorkel.augmentation import transformation_function\nimport nltk\nfrom nltk.corpus import wordnet as wn\nfrom snorkel.augmentation import RandomPolicy\nfrom snorkel.augmentation import PandasTFApplier\nimport numpy as np\nimport random\n\nnltk.download(\"wordnet\")\n\nimport names\n\nspacy = SpacyPreprocessor(text_field=\"text\", doc_field=\"doc\", memoize=True)\n\n\ndef main():\n df = pd.read_csv('../airbnb/reviews.tsv', sep='\\t')\n\n newdf = df[['comments', 'Great (1) Not Great (0)']]\n newdf.columns = ['text', 'label']\n chunks = []\n labels = []\n buffer = []\n for i, row in newdf.iterrows():\n sents = nltk.sent_tokenize(row['text'])\n for sent in sents:\n buffer.append(sent)\n if (len(buffer)) % 3 == 0:\n chunks.append(\" \".join(buffer))\n labels.append(row['label'])\n buffer = [buffer[random.randint(0,2)]]\n if len(buffer) > 1:\n chunks.append(\" \".join(buffer))\n labels.append(row['label'])\n buffer = []\n\n chunkedDf = pd.DataFrame({'text' : chunks, 'label': labels})\n\n random_policy = RandomPolicy(\n len(tfs), sequence_length=4, n_per_original=1, keep_original=True\n )\n tf_applier = PandasTFApplier(tfs, random_policy)\n newdf_augmented = tf_applier.apply(chunkedDf)\n print(len(newdf))\n print(len(newdf_augmented))\n newdf_augmented.to_csv('airbnb_augmented.csv')\n\ndef train_split():\n df = pd.read_csv('airbnb_augmented.csv')\n msk = np.random.rand(len(df)) < 0.8\n train = df[msk]\n test = df[~msk]\n train.to_csv('train.tsv',sep='\\t')\n test.to_csv('test.tsv', sep='\\t')\n\n\n# Pregenerate some random person names to replace existing ones with\n# for the transformation strategies below\nreplacement_names = [names.get_full_name() for _ in range(50)]\n\n\n# Replace a random named entity with a different entity of the same type.\n@transformation_function(pre=[spacy])\ndef change_person(x):\n person_names = [ent.text for ent in x.doc.ents if ent.label_ == \"PERSON\"]\n # If there is at least one person name, replace a random one. Else return None.\n if person_names:\n name_to_replace = np.random.choice(person_names)\n replacement_name = np.random.choice(replacement_names)\n x.text = x.text.replace(name_to_replace, replacement_name)\n return x\n\n\n# Swap two adjectives at random.\n@transformation_function(pre=[spacy])\ndef swap_adjectives(x):\n adjective_idxs = [i for i, token in enumerate(x.doc) if token.pos_ == \"ADJ\"]\n # Check that there are at least two adjectives to swap.\n if len(adjective_idxs) >= 2:\n idx1, idx2 = sorted(np.random.choice(adjective_idxs, 2, replace=False))\n # Swap tokens in positions idx1 and idx2.\n x.text = \" \".join(\n [\n x.doc[:idx1].text,\n x.doc[idx2].text,\n x.doc[1 + idx1: idx2].text,\n x.doc[idx1].text,\n x.doc[1 + idx2:].text,\n ]\n )\n return x\n\n\ndef get_synonym(word, pos=None):\n \"\"\"Get synonym for word given its part-of-speech (pos).\"\"\"\n synsets = wn.synsets(word, pos=pos)\n # Return None if wordnet has no synsets (synonym sets) for this word and pos.\n if synsets:\n words = [lemma.name() for lemma in synsets[0].lemmas()]\n if words[0].lower() != word.lower(): # Skip if synonym is same as word.\n # Multi word synonyms in wordnet use '_' as a separator e.g. reckon_with. Replace it with space.\n return words[0].replace(\"_\", \" \")\n\n\ndef replace_token(spacy_doc, idx, replacement):\n \"\"\"Replace token in position idx with replacement.\"\"\"\n return \" \".join([spacy_doc[:idx].text, replacement, spacy_doc[1 + idx:].text])\n\n\n@transformation_function(pre=[spacy])\ndef replace_verb_with_synonym(x):\n # Get indices of verb tokens in sentence.\n verb_idxs = [i for i, token in enumerate(x.doc) if token.pos_ == \"VERB\"]\n if verb_idxs:\n # Pick random verb idx to replace.\n idx = np.random.choice(verb_idxs)\n synonym = get_synonym(x.doc[idx].text, pos=\"v\")\n # If there's a valid verb synonym, replace it. Otherwise, return None.\n if synonym:\n x.text = replace_token(x.doc, idx, synonym)\n return x\n\n\n@transformation_function(pre=[spacy])\ndef replace_noun_with_synonym(x):\n # Get indices of noun tokens in sentence.\n noun_idxs = [i for i, token in enumerate(x.doc) if token.pos_ == \"NOUN\"]\n if noun_idxs:\n # Pick random noun idx to replace.\n idx = np.random.choice(noun_idxs)\n synonym = get_synonym(x.doc[idx].text, pos=\"n\")\n # If there's a valid noun synonym, replace it. Otherwise, return None.\n if synonym:\n x.text = replace_token(x.doc, idx, synonym)\n return x\n\n\n@transformation_function(pre=[spacy])\ndef replace_adjective_with_synonym(x):\n # Get indices of adjective tokens in sentence.\n adjective_idxs = [i for i, token in enumerate(x.doc) if token.pos_ == \"ADJ\"]\n if adjective_idxs:\n # Pick random adjective idx to replace.\n idx = np.random.choice(adjective_idxs)\n synonym = get_synonym(x.doc[idx].text, pos=\"a\")\n # If there's a valid adjective synonym, replace it. Otherwise, return None.\n if synonym:\n x.text = replace_token(x.doc, idx, synonym)\n return x\n\n\ntfs = [\n change_person,\n replace_noun_with_synonym,\n replace_adjective_with_synonym\n]\n\nif __name__ == '__main__':\n main()\n train_split()\n" }, { "alpha_fraction": 0.6132295727729797, "alphanum_fraction": 0.6280155777931213, "avg_line_length": 26.95652198791504, "blob_id": "cfbee443b871410b189e1fc5a761427d7bd93c70", "content_id": "0e162fedf9de1f92df688f207fde406bbb8acd9c", "detected_licenses": [ "MIT" ], "is_generated": false, "is_vendor": false, "language": "Shell", "length_bytes": 1285, "license_type": "permissive", "max_line_length": 70, "num_lines": 46, "path": "/roberta/encode.sh", "repo_name": "pertschuk/fairseq", "src_encoding": "UTF-8", "text": "#!/usr/bin/env bash\n\nwget -N 'https://dl.fbaipublicfiles.com/fairseq/gpt2_bpe/encoder.json'\nwget -N 'https://dl.fbaipublicfiles.com/fairseq/gpt2_bpe/vocab.bpe'\n\nexport OUTPUT_DIR=fever_train\nexport DATA_DIR=fever\n\nfor SPLIT in train dev; do\n for INPUT in 0 1; do\n python -m examples.roberta.multiprocessing_bpe_encoder \\\n --encoder-json encoder.json \\\n --vocab-bpe vocab.bpe \\\n --inputs \"$DATA_DIR/$SPLIT.input$INPUT\" \\\n --outputs \"$DATA_DIR/$SPLIT.input$INPUT.bpe\" \\\n --workers 60 \\\n --keep-empty\n done\ndone\n\necho 'preprocessing data'\n\nwget -N 'https://dl.fbaipublicfiles.com/fairseq/gpt2_bpe/dict.txt'\n\nfairseq-preprocess \\\n --only-source \\\n --trainpref \"$DATA_DIR/train.input0.bpe\" \\\n --validpref \"$DATA_DIR/dev.input0.bpe\" \\\n --destdir \"$OUTPUT_DIR/input0\" \\\n --workers 60 \\\n --srcdict dict.txt\n\nfairseq-preprocess \\\n --only-source \\\n --trainpref \"$DATA_DIR/train.input1.bpe\" \\\n --validpref \"$DATA_DIR/dev.input1.bpe\" \\\n --destdir \"$OUTPUT_DIR/input1\" \\\n --workers 60 \\\n --srcdict dict.txt\n\nfairseq-preprocess \\\n --only-source \\\n --trainpref \"$DATA_DIR/train.label\" \\\n --validpref \"$DATA_DIR/dev.label\" \\\n --destdir \"$OUTPUT_DIR/label\" \\\n --workers 60" }, { "alpha_fraction": 0.774193525314331, "alphanum_fraction": 0.774193525314331, "avg_line_length": 30, "blob_id": "5d73bbeaccfbdbe0f89358812c309fbf1920cac8", "content_id": "77fd376f85533ffd3723a4bb7a2d220edaaeca54", "detected_licenses": [ "MIT" ], "is_generated": false, "is_vendor": false, "language": "Shell", "length_bytes": 124, "license_type": "permissive", "max_line_length": 71, "num_lines": 4, "path": "/roberta/download_model.sh", "repo_name": "pertschuk/fairseq", "src_encoding": "UTF-8", "text": "#!/usr/bin/env bash\n\nwget https://dl.fbaipublicfiles.com/fairseq/models/roberta.large.tar.gz\ntar -xzvf roberta.large.tar.gz\n" }, { "alpha_fraction": 0.6723163723945618, "alphanum_fraction": 0.6820749640464783, "avg_line_length": 29.88888931274414, "blob_id": "ab654445dbded600569b91e8cf3e8240aa9633fa", "content_id": "1a46b94aa58a31f80a96dfae7aadaec605c9f9ad", "detected_licenses": [ "MIT" ], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 1947, "license_type": "permissive", "max_line_length": 96, "num_lines": 63, "path": "/run_inf.py", "repo_name": "pertschuk/fairseq", "src_encoding": "UTF-8", "text": "from fairseq.models.roberta import RobertaModel\nfrom fairseq.data.data_utils import collate_tokens\n\nimport nltk\nimport random\n\n# DOWNLOAD: wget https://storage.googleapis.com/poloma-models/airbnb_model.tar.gz\n# EXTRACT: tar -xvzf airbnb_model.tar.gz\n\n# MAKE SURE model directory points to where you downloaded the model\nMODEL_DIR = './airbnb_train/'\n\n# DEPENDENCIES:\n# pip install fairseq\n# pip install nltk\n# import nltk\n# nltk.download('punkt')\n\n## USAGE:\n# from run_inf import Roberta\n# model = Roberta(use_gpu=False, model_dir='./airbnb_train/')\n# label = model.classify(review)\n\nCHECKPOINT_FILE = 'checkpoint_best.pt'\nCLASSES = ['NOT_GREAT', 'GREAT']\n\n# how many sentences to run through at the same time. Tweak if running out of memory\nCHUNK_SIZE=4\n\n# set bias based on excel spreadsheet\nBIAS = 10\n\n\nclass Roberta (object):\n def __init__(self,model_dir=MODEL_DIR,ckpt_file=CHECKPOINT_FILE,\n use_gpu=False):\n self.model = RobertaModel.from_pretrained(model_dir, checkpoint_file=ckpt_file)\n self.model.eval() # disable dropout\n if use_gpu: self.model.cuda()\n\n def classify(self, review, logits=False):\n reviews = self.batch_review(review)\n roberta = self.model\n tokens = map(lambda x: x if len(x) < 512 else x[:511], [roberta.encode(r) for r in reviews])\n batch = collate_tokens(list(tokens), pad_idx=1)\n label = roberta.predict('sentence_classification_head', batch)\n if logits:\n return label.sum(dim=0).tolist()\n else:\n logits = label.sum(dim=0).tolist()\n return CLASSES[0] if logits[0] > logits[1] + BIAS else CLASSES[1]\n\n def batch_review(self, review):\n sents = nltk.sent_tokenize(review)\n buffer = []\n chunks = []\n for sent in sents:\n buffer.append(sent)\n if (len(buffer)) % CHUNK_SIZE == 0:\n chunks.append(\" \".join(buffer))\n buffer = [buffer[random.randint(0,CHUNK_SIZE-1)]]\n chunks.append(\" \".join(buffer))\n return chunks\n\n" }, { "alpha_fraction": 0.7446808218955994, "alphanum_fraction": 0.7446808218955994, "avg_line_length": 94, "blob_id": "6f3b231d66c15ec82ac042dabe9ee5657d396d79", "content_id": "bc3be55a731fe8acfa6fe9b47c1fa6cffa529f98", "detected_licenses": [ "MIT" ], "is_generated": false, "is_vendor": false, "language": "SQL", "length_bytes": 94, "license_type": "permissive", "max_line_length": 94, "num_lines": 1, "path": "/roberta/get_data.sql", "repo_name": "pertschuk/fairseq", "src_encoding": "UTF-8", "text": "\\copy (select claim, line_text as evidence, label from fever.train_set) to './fever/train.tsv'" }, { "alpha_fraction": 0.6274510025978088, "alphanum_fraction": 0.6713725328445435, "avg_line_length": 37.6363639831543, "blob_id": "9c6b03fcde9fc3ef99b645696d03ca919681fdcc", "content_id": "d9cd365ef79af68d7b6711bd38a7e5bf4e05c412", "detected_licenses": [ "MIT" ], "is_generated": false, "is_vendor": false, "language": "Shell", "length_bytes": 1275, "license_type": "permissive", "max_line_length": 117, "num_lines": 33, "path": "/roberta/train.sh", "repo_name": "pertschuk/fairseq", "src_encoding": "UTF-8", "text": "#!/usr/bin/env bash\n\nexport TOTAL_NUM_UPDATES=100000\n\nexport WARMUP_UPDATES=7432 # 6 percent of the number of updates\nexport LR=1e-05 # Peak LR for polynomial LR scheduler.\nexport NUM_CLASSES=3\nexport MAX_SENTENCES=4 # Batch size.\nexport ROBERTA_PATH=roberta.large/model.pt\n\nexport DATA_DIR=fever_train\n\nCUDA_VISIBLE_DEVICES=0 python train.py $DATA_DIR \\\n --restore-file $ROBERTA_PATH \\\n --max-positions 512 \\\n --max-sentences $MAX_SENTENCES \\\n --max-tokens 4400 \\\n --task sentence_prediction \\\n --reset-optimizer --reset-dataloader --reset-meters \\\n --required-batch-size-multiple 1 \\\n --init-token 0 --separator-token 2 \\\n --arch roberta_large \\\n --criterion sentence_prediction \\\n --num-classes $NUM_CLASSES \\\n --dropout 0.1 --attention-dropout 0.1 \\\n --weight-decay 0.1 --optimizer adam --adam-betas \"(0.9, 0.98)\" --adam-eps 1e-06 \\\n --clip-norm 0.0 \\\n --lr-scheduler polynomial_decay --lr $LR --total-num-update $TOTAL_NUM_UPDATES --warmup-updates $WARMUP_UPDATES \\\n --fp16 --fp16-init-scale 4 --threshold-loss-scale 1 --fp16-scale-window 128 \\\n --max-epoch 10 \\\n --best-checkpoint-metric accuracy --maximize-best-checkpoint-metric \\\n --truncate-sequence \\\n --find-unused-parameters\n" }, { "alpha_fraction": 0.5806159377098083, "alphanum_fraction": 0.5969203114509583, "avg_line_length": 28.078947067260742, "blob_id": "d2d116c7cade7d31863a4dae17b29601213098f7", "content_id": "d1a3cdf4f0d32acd18e015602f5b0f38039b6f65", "detected_licenses": [ "MIT" ], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 1104, "license_type": "permissive", "max_line_length": 70, "num_lines": 38, "path": "/roberta/format_data_fever.py", "repo_name": "pertschuk/fairseq", "src_encoding": "UTF-8", "text": "import argparse\nimport os\nimport random\nimport csv\n\nrandom.seed(0)\n\ndef main(args):\n for split in ['train', 'test']:\n samples = []\n fname = os.path.join(args.datadir, split + '.tsv')\n labels = ['SUPPORTS', 'REFUTES', 'NOT ENOUGH INFO']\n labelMap = dict()\n for i, label in enumerate(labels):\n labelMap[label] = i\n with open(fname) as file:\n for row in csv.reader(file, delimiter='\\t'):\n samples.append((row[0], row[1], labelMap[row[2]]))\n\n random.shuffle(samples)\n out_fname = 'train' if split == 'train' else 'dev'\n f1 = open(os.path.join(args.datadir, out_fname + '.input0'), 'w+')\n f2 = open(os.path.join(args.datadir, out_fname + '.input1'), 'w+')\n f3 = open(os.path.join(args.datadir, out_fname + '.label'), 'w+')\n for sample in samples:\n f1.write(sample[0] + '\\n')\n f2.write(sample[1] + '\\n')\n f3.write(str(sample[2]) + '\\n')\n\n f1.close()\n f2.close()\n f3.close()\n\nif __name__ == '__main__':\n parser = argparse.ArgumentParser()\n parser.add_argument('--datadir', default='fever')\n args = parser.parse_args()\n main(args)" }, { "alpha_fraction": 0.570376455783844, "alphanum_fraction": 0.5900163650512695, "avg_line_length": 28.119047164916992, "blob_id": "87715c210874f6a4a3eee0262f4316491bbcd3dc", "content_id": "674f459c8f6ff71b9845cc34b18ea9ad796dd626", "detected_licenses": [ "MIT" ], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 1222, "license_type": "permissive", "max_line_length": 92, "num_lines": 42, "path": "/fever_preds.py", "repo_name": "pertschuk/fairseq", "src_encoding": "UTF-8", "text": "from roberta import Roberta\nfrom polomapy import PolomaConn, PolomaBuff\n\nPSQL_HOST = \"54.172.249.216\"\n\ndef get_claims(n=None):\n p = PolomaConn(host=PSQL_HOST)\n limit = \"limit \" + str(n) if n != None else \"\"\n rows = p.iter_rows('''\n select claim_id, claim, line_text as evidence from fever.test_set order by random() {}\n '''.format(limit))\n return rows\n\ndef main():\n model = Roberta(model_dir='./fever_train',use_gpu=True)\n b = PolomaBuff('fever.test_preds',\n workers=4, # set number of processes\n maxconn=8, # set maximum postgres connections\n maxbuff=50000, # set buffer size to be held in memory\n batchsize=100,\n host=PSQL_HOST) # set batchsize to send to postgres\n\n claims = []\n evidences = []\n ids = []\n for id, claim, evidence in get_claims(1000):\n ids.append(id)\n claims.append(claim)\n evidences.append(evidence)\n if len(claims) > 4:\n print(claims)\n print(evidences)\n labels = model.classify_fever(claims, evidences)\n for id, label in zip(ids, labels):\n b.append((id, label))\n claims = []\n evidences = []\n ids = []\n\n\nif __name__ == '__main__':\n main()" } ]
10
ArunMorampudi/employee_api
https://github.com/ArunMorampudi/employee_api
2ca105b8db939fcb1837530522e5d031c110353c
9723cf24e625607d419d4f7a7146a8d1cca48d71
2e7b97c568521d1e257698a842de7dffe66b8898
refs/heads/master
2020-04-07T01:00:39.501335
2018-11-16T22:18:07
2018-11-16T22:18:07
157,927,448
0
0
null
null
null
null
null
[ { "alpha_fraction": 0.6277204751968384, "alphanum_fraction": 0.6323024034500122, "avg_line_length": 26.3125, "blob_id": "2c17488ae42838ecf7e5e19f68e8d8db5422369f", "content_id": "10156fe34dba8340fbed42dc8d619ba48e58bcb1", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 873, "license_type": "no_license", "max_line_length": 226, "num_lines": 32, "path": "/app.py", "repo_name": "ArunMorampudi/employee_api", "src_encoding": "UTF-8", "text": "from flask import Flask, request\n\nimport pymysql\n\nconn = pymysql.connect(host='localhost', port=3306, user='root', passwd='Alsbsdla@16', db='employee_details')\n\ncur = conn.cursor()\n\ncur.execute(\"SELECT * from employee_table\")\nk = cur.fetchall()\n\napp = Flask(__name__)\n\n\[email protected]('/')\ndef display():\n global cur\n cur.execute(\"SELECT * from employee_table\")\n k = cur.fetchall()\n return 'Data in Employee_database: ' + str(k)\n\n\[email protected]('/add_employee', methods= ['POST'])\ndef add_employee():\n global cur\n k = request.get_json()\n cur.execute(\"INSERT INTO employee_table (employee_id, employee_name, manager_id, employee_age) VALUES ('\"+str(k['employee_id'])+\"', '\"+str(k['employee_name'])+\"', '\"+str(k['manager_id'])+\"', '\"+str(k['employee_age'])+\"')\")\n conn.commit()\n return 'Inserted:'+str(k)\n\nif __name__ == '__main__':\n app.run(debug=True)" } ]
1
SpeedSourceLAB/Calculator-PyTestProject
https://github.com/SpeedSourceLAB/Calculator-PyTestProject
cbd5a35502e85bfe50ed952b1889cee526a6cd7c
79225f8515ad25859795f37405a1598324a81d21
110bdb12941c128da66f8e432da02f119f0560c2
refs/heads/main
2023-08-07T05:21:38.585765
2021-09-20T17:41:10
2021-09-20T17:41:10
408,537,989
0
0
null
null
null
null
null
[ { "alpha_fraction": 0.5261669158935547, "alphanum_fraction": 0.5685997009277344, "avg_line_length": 30.954545974731445, "blob_id": "dce73e218b477f877efc20b6b9e342aabe12c8cd", "content_id": "d1d6cdae260441e823ad1300729e416725213194", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 707, "license_type": "no_license", "max_line_length": 70, "num_lines": 22, "path": "/PyTest_Project/conftest_arithmetic_test.py", "repo_name": "SpeedSourceLAB/Calculator-PyTestProject", "src_encoding": "UTF-8", "text": "\n\"\"\"Use conftest.py while running this test\"\"\"\n\ndef test_add():\n a=2\n b=3\n assert a + b == 6, \"Expected: {}, Result: {}\".format(a+b,6)\n\ndef test_add(numbers):\n assert numbers[0] + numbers[1] == 5, \\\n \"Expected: {}, Result: {}\".format(numbers[0]+numbers[1],5)\n\ndef testsubtract(numbers):\n assert numbers[0] - numbers[1] == 3, \\\n \"Expected: {}, Result: {}\".format(numbers[0] - numbers[1], 3)\n\ndef testmultiply(numbers):\n assert numbers[0] * numbers[1] == 18, \\\n \"Expected: {}, Result: {}\".format(numbers[0] * numbers[1], 18)\n\ndef test_divide(numbers):\n assert (numbers[0] / numbers[1]) == 3, \\\n \"Expected: {}, Result: {}\".format(numbers[0] / numbers[1], 3)\n\n\n\n" }, { "alpha_fraction": 0.6326326131820679, "alphanum_fraction": 0.6556556820869446, "avg_line_length": 28.352941513061523, "blob_id": "ef35273260b9febd30937345185b0ff204e2870e", "content_id": "c31d28d18f8c8b28a6fa02e4234f9e65e417d5cf", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 999, "license_type": "no_license", "max_line_length": 86, "num_lines": 34, "path": "/PyTest_Project/Calculator/tests/test_Calculator.py", "repo_name": "SpeedSourceLAB/Calculator-PyTestProject", "src_encoding": "UTF-8", "text": "import pytest\n\nfrom Calculator.Calculator.Calculator import Calculator\n\"\"\"Use conftest.py while running this test\"\"\"\n\nc= Calculator()\n\[email protected]\ndef test_input_value_is_integer(numbers):\n result =c.numbers_is_integer(numbers)\n assert result == True\n\ndef test_add(numbers):\n result = c.add(numbers[0], numbers[1])\n assert result == 12, \"Expected: {}, Result: {}\".format(result, 12)\n\ndef testsubtract(numbers):\n result = c.subtract(numbers[0], numbers[1])\n assert result == 6, \"Expected: {}, Result: {}\".format(result, 6)\n\ndef testmultiply(numbers):\n result = c.multiply(numbers[0], numbers[1])\n assert result == 27, \"Expected: {}, Result: {}\".format(result, 27)\n\ndef test_divide(numbers):\n result = c.divide(numbers[0], numbers[1])\n assert result == 6, \"Expected: {}, Result: {}\".format(result, 3)\n\n\ndef test_divide_by_zero():\n a= 10\n b =0\n result = c.divide(a,b)\n assert result == \"Infinity\", \"Expected: {}, Result: {}\".format(result, \"Infinity\")\n\n" }, { "alpha_fraction": 0.45736435055732727, "alphanum_fraction": 0.49870800971984863, "avg_line_length": 19.36842155456543, "blob_id": "68edb55be04e8ac03c1583a53099c7d0a531ce1d", "content_id": "0ed06e2a2e75f90358203ce9280c4886f9374f39", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 387, "license_type": "no_license", "max_line_length": 63, "num_lines": 19, "path": "/PyTest_Project/basic_arithmetic_test.py", "repo_name": "SpeedSourceLAB/Calculator-PyTestProject", "src_encoding": "UTF-8", "text": "def test_add():\n a=6\n b=3\n assert a + b == 9, \"Expected: {}, Result: {}\".format(a+b,9)\n\ndef testsubtract():\n a=6\n b=3\n assert a-b == 2, \"Expected:{}, Result: {}\".format(a-b,2)\n\ndef multiplytest():\n a=6\n b=3\n assert a * b == 6, \"Expected: {}, Result: {}\".format(a*b,6)\n\ndef divide():\n a=6\n b=3\n assert a/b == 2, \"Expected:{}, Result: {}\".format(a/b,2)\n" }, { "alpha_fraction": 0.5733590722084045, "alphanum_fraction": 0.57722008228302, "avg_line_length": 18.11111068725586, "blob_id": "570b0738b1dfe9cc813f704cee11c56dfd1cfb1f", "content_id": "a9036b30d59ffe885697b73231d67b97b9b513e3", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 518, "license_type": "no_license", "max_line_length": 50, "num_lines": 27, "path": "/PyTest_Project/Calculator/tests/conftest.py", "repo_name": "SpeedSourceLAB/Calculator-PyTestProject", "src_encoding": "UTF-8", "text": "import pytest\nimport pandas as pd\n\n\[email protected]\ndef numbers():\n a=9\n b=3\n print(\"Herllo\")\n return a,b\n\n\n\n\n\n\[email protected](scope='module')\ndef numbers_csv():\n print(\"\\n=============SETUP==============\")\n data=pd.read_csv('numbers.csv')\n print( \"Initialization Size\",data.size)\n print(\"Initialization Shape\",data.shape)\n yield data\n print(\"\\n============TEAR DOWN==============\")\n data = pd.DataFrame()\n print(\"Teardown Size\",data.size)\n print(\"Teardown Shape\",data.shape)\n\n\n" }, { "alpha_fraction": 0.5860000252723694, "alphanum_fraction": 0.6200000047683716, "avg_line_length": 23.950000762939453, "blob_id": "2056f95d5dea1a326d68e85a60dedfbab0b3485e", "content_id": "39f754ed4f1757400dfa39ad869913b6427490ea", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 500, "license_type": "no_license", "max_line_length": 38, "num_lines": 20, "path": "/PyTest_Project/Calculator/Calculator/Calculator.py", "repo_name": "SpeedSourceLAB/Calculator-PyTestProject", "src_encoding": "UTF-8", "text": "\n# Function to add two numbers\nclass Calculator:\n\n def add(self,num1, num2):\n return num1+num2\n\n # Function to subtract two numbers\n def subtract(self,num1,num2):\n return num1 - num2\n\n # Function to multiply two numbers\n def multiply(self, num1, num2):\n return num1 * num2\n\n # Function to divide two numbers\n def divide(self,num1, num2):\n try:\n return(round(num1/num2,2))\n except ZeroDivisionError as e:\n return \"Infinity\"\n" }, { "alpha_fraction": 0.6192687153816223, "alphanum_fraction": 0.6285548210144043, "avg_line_length": 38.15909194946289, "blob_id": "e4d38c9a98b988f624d2c1fb85efac7b3f5e6c8d", "content_id": "4793b918e8154b631f49dd436600479fc7487ab1", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 1723, "license_type": "no_license", "max_line_length": 118, "num_lines": 44, "path": "/PyTest_Project/Calculator/tests/test_Calculator_Performance.py", "repo_name": "SpeedSourceLAB/Calculator-PyTestProject", "src_encoding": "UTF-8", "text": "from Calculator.Calculator.Calculator import Calculator\n\n\"\"\"Use conftest.py while running this test\"\"\"\n\nc= Calculator()\n\ndef test_add(numbers_csv):\n for set in numbers_csv.index:\n print(set)\n print(numbers_csv['num1'][set], numbers_csv['num2'][set])\n result = c.add(numbers_csv['num1'][set], numbers_csv['num2'][set])\n print(result, numbers_csv['add'][set])\n assert result== numbers_csv['add'][set], \"Actual: {}, Expected: {}\".format(result,numbers_csv['add'][set])\n\ndef test_subtract(numbers_csv):\n for set in numbers_csv.index:\n #print(numbers_csv['num1'][set], numbers_csv['num2'][set])\n result = c.subtract(numbers_csv['num1'][set], numbers_csv['num2'][set])\n assert result== numbers_csv['sub'][set], \"Actual: {}, Expected: {}\".format(result,numbers_csv['sub'][set])\n\ndef test_multiply(numbers_csv):\n for set in numbers_csv.index:\n #print(numbers_csv['num1'][set], numbers_csv['num2'][set])\n result = c.multiply(numbers_csv['num1'][set], numbers_csv['num2'][set])\n assert result== numbers_csv['multi'][set], \"Actual: {}, Expected: {}\".format(result,numbers_csv['multi'][set])\n\ndef test_divide(numbers_csv):\n for set in numbers_csv.index:\n #print(numbers_csv['num1'][set], numbers_csv['num2'][set])\n result = c.divide(numbers_csv['num1'][set], numbers_csv['num2'][set])\n assert result== numbers_csv['div'][set], \"Actual: {}, Expected: {}\".format(result,numbers_csv['div'][set])\n\n\n# def teardown_module():\n# print(\"teardown_module\")\n#\n# def setup_module():\n# print(\"setup_module\")\n#\n# def setup_function():\n# print(\" setup_function\")\n#\n# def teardown_function():\n# print(\" teardown_function\")\n" }, { "alpha_fraction": 0.6545454263687134, "alphanum_fraction": 0.6636363863945007, "avg_line_length": 21.200000762939453, "blob_id": "5df8de277ab57a0533017ab81ee9e377703a44bf", "content_id": "f3dc6af43532fdaead03dcb8012e5a1dc5c8b60b", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "INI", "length_bytes": 110, "license_type": "no_license", "max_line_length": 67, "num_lines": 5, "path": "/PyTest_Project/Calculator/tests/pytest.ini", "repo_name": "SpeedSourceLAB/Calculator-PyTestProject", "src_encoding": "UTF-8", "text": "[pytest]\n\n#addopts = --maxfail 2\nmarkers =\n DB: marks tests as database tests (deselect with '-m \"not DB\"')" }, { "alpha_fraction": 0.5696202516555786, "alphanum_fraction": 0.594936728477478, "avg_line_length": 10, "blob_id": "414e14ea6156a3c7c0c45e7ee1e1f8ef499ecf32", "content_id": "7fad088c4affd1ae96020e040f9a5af1a80e1853", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 79, "license_type": "no_license", "max_line_length": 15, "num_lines": 7, "path": "/PyTest_Project/conftest.py", "repo_name": "SpeedSourceLAB/Calculator-PyTestProject", "src_encoding": "UTF-8", "text": "import pytest\n\[email protected]\ndef numbers():\n a=6\n b=3\n return a,b\n\n\n" }, { "alpha_fraction": 0.6439393758773804, "alphanum_fraction": 0.6553030014038086, "avg_line_length": 21.08333396911621, "blob_id": "9d116162b6e421a28397fd75fc61c86c6731e730", "content_id": "e8233291a3ebd9f61ddff5f4ddf175c2a0e2c139", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "INI", "length_bytes": 264, "license_type": "no_license", "max_line_length": 67, "num_lines": 12, "path": "/PyTest_Project/UserApp/pytest.ini", "repo_name": "SpeedSourceLAB/Calculator-PyTestProject", "src_encoding": "UTF-8", "text": "[pytest]\n\n#addopts = --maxfail 2\naddopts = --strict-markers --maxfail 20\nmarkers =\n DB: marks tests as database tests (deselect with '-m \"not DB\"')\n abc\n api\n ui\n#python_files = check_*.py *_check.py\n#python_classes = Check*\n#python_functions = *_check" }, { "alpha_fraction": 0.7310924530029297, "alphanum_fraction": 0.7310924530029297, "avg_line_length": 19, "blob_id": "8cd990b835c293e68b99d24517c9be1eab4eea4a", "content_id": "ae3ddea1f0233544e1e44415827d938240821082", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "INI", "length_bytes": 119, "license_type": "no_license", "max_line_length": 39, "num_lines": 6, "path": "/PyTest_Project/pytest.ini", "repo_name": "SpeedSourceLAB/Calculator-PyTestProject", "src_encoding": "UTF-8", "text": "[pytest]\n\naddopts = --strict-markers\nmarkers =\n regression:Run the regression tests\n sanity: Run the sanity tests" }, { "alpha_fraction": 0.6806083917617798, "alphanum_fraction": 0.6882129311561584, "avg_line_length": 15.5, "blob_id": "af4e12884f93744a60892dbffe6aeff4dd501107", "content_id": "9b694b83b0ceed5d80cd45d5a9781de648003760", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 263, "license_type": "no_license", "max_line_length": 44, "num_lines": 16, "path": "/PyTest_Project/UserApp/conftest.py", "repo_name": "SpeedSourceLAB/Calculator-PyTestProject", "src_encoding": "UTF-8", "text": "import pytest\n\[email protected](scope='session')\ndef database_connection():\n print(\"Database Connection Established\")\n return True\n\[email protected](scope='session')\ndef ui_open():\n return True\n\[email protected]\ndef numbers():\n a=9\n b=3\n return a,b" } ]
11
mambocab/algorithms
https://github.com/mambocab/algorithms
f70f775e4b506bcedf38981b73f3ca957f82b75a
c90b97dee03e7399b75249d196992f9d9191546b
1b290bb49af52b8e2cc4fc046df0ddc9894064e1
refs/heads/master
2021-01-15T10:13:01.458501
2015-01-14T15:50:04
2015-01-14T15:50:04
28,716,830
0
0
null
2015-01-02T15:17:22
2015-01-02T14:29:14
2014-12-29T16:55:10
null
[ { "alpha_fraction": 0.5217044949531555, "alphanum_fraction": 0.5284746885299683, "avg_line_length": 31.610389709472656, "blob_id": "c9594d6f331d84fd23a20aece8349dd6bf6089c0", "content_id": "e34c1621a4d43b6f377b9cf07a862cd76f874587", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 2511, "license_type": "no_license", "max_line_length": 78, "num_lines": 77, "path": "/dijkstra.py", "repo_name": "mambocab/algorithms", "src_encoding": "UTF-8", "text": "from collections import namedtuple\n\nclass Edge(namedtuple('Edge', 'start end cost')):\n __slots__ = ()\n\n def __contains__(self, v):\n return v == self.start or v == self.end\n\n def other(self, v):\n if v == self.start:\n return self.end\n if v == self.end:\n return self.start\n raise ValueError\n\n\nclass Graph:\n def __init__(self, vertices, edges):\n self.edges = edges\n self.vertices = vertices\n\n def connections(self, v):\n yield from [e for e in self.edges if v in e]\n\n def check_in_graph(self, v):\n if v not in self.vertices:\n template = \"'{v}' not in vertices: {vs}\"\n raise ValueError(template.format(v=v, vs=self.vertices))\n\n\n def shortest_distance(self, source, destination=None):\n self.check_in_graph(source)\n if destination:\n self.check_in_graph(destination)\n\n if source == destination:\n return 0\n\n distances = {v: float('inf') for v in self.vertices}\n distances[source] = 0\n distances.update(**{e.other(source): e.cost\n for e in self.connections(source)})\n\n unvisited = [v for v in self.vertices if v != source]\n\n while (destination is None and unvisited) or destination in unvisited:\n current = min(unvisited, key=distances.get)\n unvisited.remove(current)\n\n for e in self.connections(current):\n other = e.other(current)\n distances[other] = min(distances.get(other, float('inf')),\n distances[current] + e.cost)\n if destination:\n return distances[destination]\n return distances\n\n\n\n\nif __name__ == '__main__':\n # test data from http://rosettacode.org/wiki/Dijkstra%27s_algorithm\n rosetta_vertices = ('a', 'b', 'c', 'd', 'e', 'f')\n rosetta_edges = (Edge(start='a', end='b', cost=7),\n Edge(start='a', end='c', cost=9),\n Edge(start='a', end='f', cost=14),\n Edge(start='b', end='c', cost=10),\n Edge(start='b', end='d', cost=15),\n Edge(start='c', end='d', cost=11),\n Edge(start='c', end='f', cost=2),\n Edge(start='d', end='e', cost=6),\n Edge(start='e', end='f', cost=9))\n\n g = Graph(rosetta_vertices, rosetta_edges)\n\n print(g.shortest_distance('a', 'e'))\n print(g.shortest_distance('a'))\n" }, { "alpha_fraction": 0.5583333373069763, "alphanum_fraction": 0.5770833492279053, "avg_line_length": 24.263158798217773, "blob_id": "21567e534fb1357252a183be6b0a824173e77a56", "content_id": "bfdfde38973e4ece2e4f24ea3c5d7728f58b7b53", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 960, "license_type": "no_license", "max_line_length": 80, "num_lines": 38, "path": "/binary_search.py", "repo_name": "mambocab/algorithms", "src_encoding": "UTF-8", "text": "from itertools import product\nfrom rumble import rumble\n\n\[email protected]\ndef recursive(n, collection):\n if not collection:\n return None\n\n mid = len(collection) // 2\n\n if n < collection[mid]:\n return recursive(n, collection[:mid])\n elif collection[mid] == n:\n return mid\n elif collection[mid] < n:\n return recursive(n, collection[mid+1:])\n\[email protected]\ndef iterative(n, collection):\n candidates = collection[:]\n\n while candidates:\n mid = len(candidates) // 2\n if n < candidates[mid]:\n candidates = candidates[:mid]\n elif candidates[mid] == n:\n return mid\n elif candidates[mid] < n:\n candidates = candidates[mid+1:]\n return None\n\n\nif __name__ == '__main__':\n for t, n in product([tuple, list], [100, 1000, 10000]):\n xs = t(range(n))\n rumble.arguments(50, xs, _name=('len(' + t.__name__ + ') == ' + str(n)))\n rumble.run()\n" }, { "alpha_fraction": 0.6796537041664124, "alphanum_fraction": 0.6851370930671692, "avg_line_length": 35.86170196533203, "blob_id": "71f6046068589a45677adaacdbc131dcae5b2746", "content_id": "905598c3c8ae341cc311098cd429128171d0dcf1", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 3465, "license_type": "no_license", "max_line_length": 81, "num_lines": 94, "path": "/utils/getnamedtuple.py", "repo_name": "mambocab/algorithms", "src_encoding": "UTF-8", "text": "import sys\nfrom collections import namedtuple\n\ntry:\n from collections import OrderedDict\nexcept:\n from ordereddict import OrderedDict\n\nnamedtuple_has_rename_kwarg = sys.version_info[:2] >= (2, 7)\n\ndef get_namedtuple(name, data_arg=None, _verbose=False, _rename=False, **kw):\n '''\n Creates a one-off namedtuple with a single function call, without\n explicitly instantiating it as a new class. For example, what was once:\n\n >>> Point = namedtuple('Point', ['x', 'y'])\n >>> p = Point(3, 5)\n >>> p.x, p.y\n (3, 5)\n\n becomes:\n\n >>> p = get_namedtuple('Point', x=3, y=5)\n >>> p.x, p.y\n (3, 5)\n\n This function can also take a key-value collection as input:\n\n >>> g = {'informal': 'Hi there!', 'formal': 'Hello; nice to meet you.'}\n >>> greetings = get_namedtuple('Greetings', g)\n >>> greetings.informal\n 'Hi there!'\n >>> greetings.formal\n 'Hello; nice to meet you.'\n\n It raises a ValueError if passed both a collection and keyword arguments\n for namedtuple fields.\n\n Note that, in the above ways of calling get_namedtuple, the order of the\n arguments to get_namedtuple is not necessarily preserved in the order of\n the returned namedtuple's fields. In this case, to access particular\n fields by index rather than by name, you must first determine the index of\n the attribute you want by looking at the _fields attribute:\n\n >>> i = greetings._fields.index('informal')\n >>> greetings[i]\n 'Hi there!'\n\n Do not write code that depends on the order of the returned namedtuple's\n fields if you call the function in the above ways, as not all Python\n implementations guarantee deterministic order for dictionary keys and\n keyword arguments.\n\n You can also get namedtuples with guaranteed field order, sidestepping the\n problem entirely, by passing an ordered collection of key-value pairs:\n\n >>> get_namedtuple('Point', (('a', 5), ('b', 7)))._fields\n ('a', 'b')\n\n Internally, this is passed to the OrderedDict constructor, so inputs must\n conform to valid OrderedDict constructor arguments. You can also construct\n an OrderedDict yourself and pass it in if you prefer. See the\n documentation for OrderedDict in the collections module for more\n information.\n\n If _verbose and/or _rename are true, it calls namedtuple with verbose\n and/or rename, respectively; these arguments begin with an underscore to\n allow callers to specify fields called 'verbose' and 'rename'.\n\n This function performs no error handling around the instantiation of the\n namedtuple class or object, so it may raise any error that namedtuple\n does.\n\n For further documentation on verbose, rename, errors, and the returned\n namedtuple itself, see the documentation for namedtuple in the collections\n module.\n '''\n\n if data_arg is not None and kw != {}:\n msg = 'get_namedtuple() called with {} and {}, but '.format(data_arg, kw)\n msg += 'it takes a collection or keyword arguments, not both.'\n raise ValueError(msg)\n\n # constructing OrderedDict allows ordered inputs like\n # [('key1', 1), ('key2', 2)]\n kw = OrderedDict(data_arg) if data_arg is not None else kw\n\n # prepare keyword arguments for namedtuple() call\n nt_opts = {'verbose': _verbose}\n # rename kwarg introduced in 2.7\n if namedtuple_has_rename_kwarg:\n nt_opts['rename'] = _rename\n\n return namedtuple(name, kw.keys(), **nt_opts)(**kw)\n" }, { "alpha_fraction": 0.5052909851074219, "alphanum_fraction": 0.5449735522270203, "avg_line_length": 17, "blob_id": "63c5083e48f95383f5721e0d711dfd67dc101125", "content_id": "fe1eda9ee4dabb3c006bcce75703b412534ca020", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 756, "license_type": "no_license", "max_line_length": 53, "num_lines": 42, "path": "/sq_root.py", "repo_name": "mambocab/algorithms", "src_encoding": "UTF-8", "text": "from random import uniform\n\nfrom rumble import rumble\n\nepsilon = .000001\ninit_guess = 10\n\ndef close(a, b):\n return (a - epsilon) < b < (b + epsilon)\n\[email protected]\ndef newton(n):\n guess = init_guess\n guess_2 = guess ** 2\n\n while not close(guess_2, n):\n guess = guess - ((guess_2 - n) / (2 * guess))\n guess_2 = guess ** 2\n\n return guess\n\n\[email protected]\ndef binary(n):\n bottom, top = 0, n\n guess = uniform(bottom, top)\n guess_2 = guess ** 2\n\n while not close(guess_2, n):\n if guess_2 < n:\n bottom = guess\n else:\n top = guess\n guess_2 = guess ** 2\n\n return guess\n\n\nif __name__ == '__main__':\n for n in [400, 20000]:\n rumble.arguments(n)\n rumble.run()\n" }, { "alpha_fraction": 0.36599764227867126, "alphanum_fraction": 0.4203069806098938, "avg_line_length": 23.91176414489746, "blob_id": "c5ad667a2195f86283b9b959c216c403970daeec", "content_id": "88406a09ca55d1be06f16ac1691fa6b22f14a0c4", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 847, "license_type": "no_license", "max_line_length": 68, "num_lines": 34, "path": "/longest_increasing_subsequence.py", "repo_name": "mambocab/algorithms", "src_encoding": "UTF-8", "text": "from collections import deque\n\ndef wiki(xs):\n current_max_len = 0\n m = [0 for x in xs]\n preds = [0 for x in xs]\n\n longest = 0\n\n for i, x in enumerate(xs):\n lo, hi = 1, current_max_len\n mid = (lo + hi) // 2\n if xs[m[mid]] < x:\n lo = mid + 1\n else:\n hi = mid - 1\n\n longest, preds[i], m[lo] = (lo if lo > longest else longest,\n m[lo - 1],\n i)\n\n rv = deque([xs[m[longest]]])\n for x in reversed(xs):\n if rv[0] > x:\n rv.appendleft(x)\n\n return rv\n\nif __name__ == '__main__':\n output = wiki([0, 8, 4, 12, 2, 10, 6, 14, 1, 9, 5,\n 13, 3, 11, 4, 5, 6, 7, 15, 8, 9])\n expected = [0, 2, 3, 4, 5, 6, 7, 8, 9]\n print(output)\n assert len(output) == len(expected)\n" } ]
5
heweiyou/Crawer
https://github.com/heweiyou/Crawer
3cc0da8a0ed273521531be2490e8e873f44a175a
d0579c8da9a0ba301353366c5a1ee6a137b03650
71e790dbc5c6de73e4ba48a16d9e2883a38d750a
refs/heads/master
2021-01-09T20:14:39.790706
2016-07-23T09:55:42
2016-07-23T09:55:42
64,010,005
0
0
null
null
null
null
null
[ { "alpha_fraction": 0.6630939841270447, "alphanum_fraction": 0.6680957674980164, "avg_line_length": 22.923076629638672, "blob_id": "8c5d4d52a58001481a62f51e9427213aa4290820", "content_id": "3800da77c5ee88f2a7fc27e071d34cf3ae6526b4", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 2885, "license_type": "no_license", "max_line_length": 77, "num_lines": 117, "path": "/MultiSpider.py", "repo_name": "heweiyou/Crawer", "src_encoding": "UTF-8", "text": "#coding=utf-8\n\nimport threading\nimport urllib\nimport re\nimport time\n\ng_mutex = threading.Condition()\ng_pages=[]#从中解析所有url连接\ng_queueURL=[]#等待爬取的url链接列表\ng_existURL=[]#已经爬取过得url连接列表\ng_failedURL=[]#下载失败的url连接列表\ng_totalcount=0#下载过得页面数\n\nclass CrawlerThread(threading.Thread):\n\tdef __init__(self,url,filename,tid):\n\t\tthreading.Thread.__init__(self)\n\t\tself.url = url\n\t\tself.filename = filename\n\t\tself.tid = tid\n\t\n\tdef run(self):\n\t\tglobal g_mutex\n\t\tglobal g_failedURL\n\t\tglobal g_queueURL\n\t\ttry:\n\t\t\tpage=urllib.urlopen(self.url)\n\t\t\thtml = page.read()\n\t\t\tfout = file(self.filename,'w')\n\t\t\tfout.write(html)\n\t\t\tfout.close()\n\t\texcept Exception,e:\n\t\t\tg_mutex.acquire()\n\t\t\tg_existURL.append(self.url)\n\t\t\tg_failedURL.append(self.url)\n\t\t\tg_mutex.release()\n\t\t\tprint 'Failed downloading and saving',self,url\n\t\t\tprint e\n\t\t\treturn None\n\n\t\tg_mutex.acquire()\n\t\tg_pages.append(html)\n\t\tg_existURL.append(self.url)\n\t\tg_mutex.release()\n\nclass Crawler:\n\tdef __init__(self,crawlername,url,threadnum):\n\t\tself.crawlername = crawlername\n\t\tself.url = url \n\t\tself.threadnum = threadnum\n\t\tself.threadpool=[]\n\t\tself.logfile = file(\"log.txt\",'w')\n\t\n\tdef getUrl(self,content):\n\t\treg=r'\"(http://.+?)\"'\n\t\tregob = re.compile(reg,re.DOTALL)\n\t\turllist = regob.findall(content)\n\t\treturn urllist\n\n\tdef updateQueueURL(self):\n\t\tglobal g_queueURL\n\t\tglobal g_existURL\n\t\tnewUrlList = []\n\t\tfor content in g_pages:\n\t\t\tnewUrlList += self.getUrl(content)\n\t\tg_queueURL = list(set(newUrlList)-set(g_existURL))\n\n\tdef download(self,url,filename,tid):\n\t\tcrawthread = CrawlerThread(url,filename,tid)\n\t\tself.threadpool.append(crawthread)\n\t\tcrawthread.start()\n\n\tdef downloadAll(self):\n\t\tglobal g_queueURL\n\t\tglobal g_totalcount\n\t\ti = 0\n\t\twhile i < len(g_queueURL):\n\t\t\tj = 0\n\t\t\twhile j < self.threadnum and i+j < len(g_queueURL):\n\t\t\t\tg_totalcount += 1\n\t\t\t\tthreadresult = self.download(g_queueURL[i+j],str(g_totalcount)+\".html\",j)\n\t\t\t\tif threadresult != None:\n\t\t\t\t\tprint 'Thread started:',i+j,'--File number =',g_totalcount\n\t\t\t\tj += 1\n\t\t\ti += j\n\t\t\tfor thread in self.threadpool:\n\t\t\t\tthread.join(30)\n\t\t\t\n\t\t\tthreadpool = []\n\t\tg_queueURL = []\n\t\n\tdef craw(self):\n\t\tglobal g_queueURL\n\t\tg_queueURL.append(url)\n\t\tdepth = 0\n\t\tprint self.crawlername+\" starting...\"\n\n\t\twhile len(g_queueURL)!= 0:\n\t\t\tdepth += 1\n\t\t\tprint 'Searching depth ',depth,'...\\n\\n'\n\t\t\tself.logfile.write(\"URL:\"+g_queueURL[0]+\".........\")\n\t\t\tself.downloadAll()\n\t\t\tself.updateQueueURL()\n\t\t\tcontent = '\\n>>>Depth '+str(depth)+':\\n'\n\t\t\tself.logfile.write(content)\n\t\t\ti = 0\n\t\t\twhile i < len(g_queueURL):\n\t\t\t\tcontent = str(g_totalcount+i)+'->'+g_queueURL[i]+'\\n'\n\t\t\t\tself.logfile.write(content)\n\t\t\t\ti += 1\n\nif __name__ == \"__main__\":\n\turl = raw_input(\"please input url:\")\n\tthreadnum = int(raw_input(\"set thread number:\"))\n\tcrawlername = \"crawler\"\n\tcrawler = Crawler(crawlername,url,threadnum)\n\tcrawler.craw()\n" } ]
1
erlinvan/oving6
https://github.com/erlinvan/oving6
9c58133340b578419561fd1cff9452711ca4ccea
de2efa81083aed7d1b0aadaefe83370e95e8051a
202caa4e5aba0059306c41eded30ee9a5f193773
refs/heads/master
2020-04-03T22:24:56.497538
2018-11-13T14:04:34
2018-11-13T14:04:34
155,599,653
0
0
null
null
null
null
null
[ { "alpha_fraction": 0.6265690326690674, "alphanum_fraction": 0.6276150345802307, "avg_line_length": 35.769229888916016, "blob_id": "c20627bc3a5448766d8bec2007bea0c999efc3d6", "content_id": "8e09df28c797daf22905bf2577f7fbaf5276f1e2", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 956, "license_type": "no_license", "max_line_length": 73, "num_lines": 26, "path": "/arbitrator.py", "repo_name": "erlinvan/oving6", "src_encoding": "UTF-8", "text": "class Arbitrator:\n\n # Hensikten med denne klassen er aa velge den beste behavioren\n\n def choose_action(self, behaviors):\n winning_behavior = None\n max_weight = -1\n\n for behavior in behaviors:\n\n # Hvis behavioren skal stoppe returnerer vi umiddelbart denne\n if behavior.halt_request:\n print(behavior.name, \" will be recommended\")\n return behavior.motor_recommendations\n\n # Hvis den ikke skal stoppe velger behavior med hoyest weight\n elif behavior.weight > max_weight:\n max_weight = behavior.weight\n winning_behavior = behavior\n\n # Kjorer bare fremover hvis ingen behavior ble funnet,\n if winning_behavior is None:\n print(\"Found no behavior, driving forwards\")\n return [\"f\"]\n print(winning_behavior.name, \" will be recommended\")\n return winning_behavior.motor_recommendations\n" }, { "alpha_fraction": 0.5772839784622192, "alphanum_fraction": 0.5772839784622192, "avg_line_length": 27.125, "blob_id": "6be6835a4b0a8c63714127701c36b1323e708760", "content_id": "5d6d8eb1cbdc13c7356809e356e7047381c78d48", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 2025, "license_type": "no_license", "max_line_length": 114, "num_lines": 72, "path": "/sensob.py", "repo_name": "erlinvan/oving6", "src_encoding": "UTF-8", "text": "from abc import abstractmethod\n\nfrom reflectance_sensors import *\nfrom ultrasonic import *\nfrom camera import *\n\n\nclass Sensob: # interface mellom en eller flere sensorer i bbcons 'behaviors'\n\n def __init__(self):\n self.sensors = []\n self.value = None\n\n def get_value(self):\n return self.value\n\n @abstractmethod\n def update(self): # tvinger sensorer til aa faa verdier en gang per iterasjon\n return\n\n def reset(self):\n for sensor in self.sensors:\n sensor.reset()\n\n\nclass ReflectanceSensob(Sensob):\n\n def __init__(self):\n super(ReflectanceSensob, self).__init__()\n self.sensor = ReflectanceSensors()\n self.sensors.append(self.sensor)\n\n def update(self): # returnerer list of values, [left, midleft, midright, right]\n self.sensor.update()\n self.value = self.sensor.get_value()\n return self.value\n\n def get_value(self): # returnerer list of values, [left, midleft, midright, right]\n return self.value\n\n\nclass UltrasonicSensob(Sensob):\n\n def __init__(self):\n super(UltrasonicSensob, self).__init__()\n self.sensor = Ultrasonic()\n self.sensors.append(self.sensor)\n # print(\"US-sensob created.\")\n\n def update(self):\n self.sensor.update()\n self.value = self.sensor.get_value()\n return self.value\n\n def get_value(self):\n return self.value # returnerer value som distanse i cm\n\n\nclass CameraSensob(Sensob):\n def __init__(self):\n super(CameraSensob, self).__init__()\n self.sensor = Camera()\n self.sensors.append(self.sensor)\n self.value = None\n\n def update(self):\n self.sensor.update()\n self.value = self.sensor.get_value()\n return self.value\n\n def get_value(self):\n return self.value # returnerer value som en RGB-array\n" }, { "alpha_fraction": 0.6451612710952759, "alphanum_fraction": 0.8064516186714172, "avg_line_length": 14.5, "blob_id": "7628753304b14369456f1619cedd07a02a9979b7", "content_id": "3694dc57ef9ecc3da8e845f7d754c8350e5eb745", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Markdown", "length_bytes": 31, "license_type": "no_license", "max_line_length": 18, "num_lines": 2, "path": "/README.md", "repo_name": "erlinvan/oving6", "src_encoding": "UTF-8", "text": "# Zumorobot\nProsjekt 6 TDT4113\n" }, { "alpha_fraction": 0.6262136101722717, "alphanum_fraction": 0.6284201145172119, "avg_line_length": 32.32352828979492, "blob_id": "386e67f89f8928016b00a33fac7e6fb8294930d7", "content_id": "91f234554db704e62570e0b2f37c218fe354f7e4", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 2266, "license_type": "no_license", "max_line_length": 94, "num_lines": 68, "path": "/bbcon.py", "repo_name": "erlinvan/oving6", "src_encoding": "UTF-8", "text": "from arbitrator import Arbitrator\nfrom time import sleep\nfrom motob import Motob\nfrom behavior import Photo\n\nclass Bbcon:\n\n def __init__(self):\n self.behaviors = [] #Liste over alle behaviors, aktive og inaktive\n self.active_behaviors = [] # Kun aktive behaviors\n self.sensobs = [] # Liste over sensorer\n self.motobs = Motob(self) # Liste med motorobjekter\n self.arbitrator = Arbitrator() # Arbitrator-objektet, velger winning behavior\n self.num_timesteps = 0 # Hvor mange timesteps som er kjort\n self.can_take_photo = False\n\n\n #Trivielt, legger til behavior i listen\n def add_behavior(self, behavior):\n if behavior not in self.behaviors:\n self.behaviors.append(behavior)\n\n # Trivielt, legger til sensor-objekt i listen\n def add_sensor(self, sensor):\n if sensor not in self.sensobs:\n self.sensobs.append(sensor)\n\n # Legger til behavior i listen over active-behaviors\n def activate_behavior(self, behavior):\n if behavior in self.behaviors:\n self.active_behaviors.append(behavior)\n\n # Fjerner aktive behaviors fra active-behaviors listen\n def deactivate_behavior(self, behavior):\n if behavior in self.active_behaviors:\n self.active_behaviors.remove(behavior)\n\n # Resetter dersom bilde allerede er tatt\n def photo_taken(self):\n self.can_take_photo = False\n self.motobs.photograph = False\n\n # loopen til klassen\n def run_one_timestep(self):\n # Oppdaterer behaviors\n for behaviour in self.behaviors:\n behaviour.update()\n\n # Henter ut motor-recommendations\n print(\"Active behaviors\", self.active_behaviors)\n motor_recoms = self.arbitrator.choose_action(self.active_behaviors)\n\n # Oppdaterer motobs\n self.motobs.update(motor_recoms)\n\n if self.motobs.photograph:\n self.can_take_photo = True\n\n # vent slik at motorene kan gjore tingen sin\n sleep(0.25)\n\n # Reset sensorverdiene\n for sensor in self.sensobs:\n sensor.reset()\n\n self.active_behaviors=[]\n\n self.num_timesteps += 1\n" }, { "alpha_fraction": 0.6575342416763306, "alphanum_fraction": 0.6575342416763306, "avg_line_length": 18.04347801208496, "blob_id": "e0f045391898475b44001890d9bf6cef46433c85", "content_id": "ffdd46f81fc61b13d0a9c465ff05ed8537b2a09f", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 438, "license_type": "no_license", "max_line_length": 36, "num_lines": 23, "path": "/main.py", "repo_name": "erlinvan/oving6", "src_encoding": "UTF-8", "text": "from bbcon import Bbcon\nfrom behavior import *\nfrom zumo_button import ZumoButton\n\ndef main():\n\n bbcon = Bbcon()\n lineRider = FollowLine(bbcon)\n obstruction = Obstruction(bbcon)\n photo = Photo(bbcon)\n\n bbcon.add_behavior(lineRider)\n bbcon.add_behavior(obstruction)\n bbcon.add_behavior(photo)\n\n ZumoButton().wait_for_press()\n\n while True:\n bbcon.run_one_timestep()\n\n\nif __name__ == \"__main__\":\n main()\n" }, { "alpha_fraction": 0.5236160755157471, "alphanum_fraction": 0.5525647401809692, "avg_line_length": 32.37288284301758, "blob_id": "19a7a2d3656a87f6e2a1c7bd39a05462db82f75b", "content_id": "3c4a00b4b140d112bc6437f885d249367a241991", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 1969, "license_type": "no_license", "max_line_length": 120, "num_lines": 59, "path": "/motob.py", "repo_name": "erlinvan/oving6", "src_encoding": "UTF-8", "text": "from motors import Motors\nfrom sensob import CameraSensob\nfrom time import sleep\n\n\nclass Motob:\n\n def __init__(self, bbcon):\n self.bbcon = bbcon\n self.values = []\n self.motor = Motors()\n self.photograph = False\n self.camera=CameraSensob()\n\n def update(self, motor_recommendation):\n # Mottar en anbefaling fra bbcon og behaviors\n\n self.values = motor_recommendation\n self.operationlize()\n\n def operationlize(self):\n # Henter ut forste verdi fra anbefalinger, antall grader gis som andre vektor i self.values dersom anbefaling er\n # 'l' eller 'r'\n\n value=self.values[0]\n print(\"Motor Recommendation = \", value)\n if value == \"f\":\n print(\"Forward\")\n self.motor.set_value([0.5, 0.5],0.15)\n elif value == \"l\":\n print(\"Left\")\n self.motor.set_value([-1,1], self.turn_n_degrees(self.values[1]))\n elif value == \"r\":\n print(\"Right\")\n self.motor.set_value([1,-1], self.turn_n_degrees(self.values[1]))\n elif value == 'fl':\n print('Left and forward')\n self.motor.set_value([0.05, 0.35],0.15)\n elif value == 'fr':\n print('Right and forward')\n self.motor.set_value([0.35, 0.05],0.15)\n elif value == 't':\n self.motor.set_value([-0.5, 0.5], 0.25)\n self.motor.set_value([0.5, -0.5], 0.25)\n print(\"Found red!\")\n self.motor.set_value([-1, 1], self.turn_n_degrees(180))\n self.bbcon.photo_taken()\n elif value == \"s\":\n print(\"Stop\")\n self.motor.stop()\n self.photograph = True\n sleep(1)\n elif value == 'p':\n self.camera.update()\n\n @staticmethod\n def turn_n_degrees(deg):\n # Returnerer antall sekunder motorene maa kjores paa full speed, henholdsvis frem og bak for aa tilsvare grader\n return 0.0028 * deg\n" }, { "alpha_fraction": 0.5473974943161011, "alphanum_fraction": 0.5584472417831421, "avg_line_length": 28.51931381225586, "blob_id": "2a9cb87befc2f18e8cca1ab3b48ecb39aa66b8d8", "content_id": "d621017fdfdc072c827c8c009b75c0c9729d3ed4", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 6878, "license_type": "no_license", "max_line_length": 123, "num_lines": 233, "path": "/behavior.py", "repo_name": "erlinvan/oving6", "src_encoding": "UTF-8", "text": "from abc import *\nfrom sensob import *\nfrom imager2 import Imager\n\n\nclass Behavior:\n\n def __init__(self, bbcon):\n\n self.bbcon = bbcon # Selve BBCON-kontrolleren hvor bevarior brukes\n self.sensobs = [] # Sensobs som bruker\n self.motor_recommendations = [\"none\"] # Motor-recommendation som skal sendes til Arbitrator\n self.active_flag = False # er behavior aktiv?\n self.halt_request = False # sender melding om at behavior skal stoppe.\n self.priority = 0 # Prioriteten til behavior\n self.match_degree = 0 # Enten 0 eller 1. Brukes i samsvar med weight og priority.\n self.weight = self.match_degree * self.priority # vektingen til behavioren naar den benyttes av Arbitrator.\n self.name = \"\"\n\n # Tester om behavioren skal deaktiveres\n def consider_deactivation(self):\n pass\n\n # Tester om behavioren skal aktiveres\n def consider_activation(self):\n pass\n\n # Funksjon som kjores for aa oppdatere behavior\n def update(self):\n pass\n\n def sense_and_act(self):\n pass\n\n\n# Stopper roboten hvis sensoren oppdager et objekt\nclass Obstruction(Behavior):\n\n def __init__(self, bbcon):\n super(Obstruction,self).__init__(bbcon)\n self.name = \"Obstruction\"\n self.u_sensob = UltrasonicSensob()\n self.sensobs.append(self.u_sensob)\n\n # Aktiver behavior hvis sensoren ser noe naermere enn 10 centimeter\n def consider_activation(self):\n val=self.u_sensob.get_value()\n print(val)\n if val < 10:\n self.bbcon.activate_behavior(self)\n self.active_flag = True\n self.halt_request = True\n\n # Deaktiver behavior hvis sensoren IKKE ser noe naermere enn 10 centimeter\n def consider_deactivation(self):\n val = self.u_sensob.get_value()\n print(val)\n if val > 10:\n self.bbcon.deactivate_behavior(self)\n self.active_flag = False\n self.halt_request = False\n\n def update(self):\n\n for sensor in self.sensobs:\n sensor.update()\n\n if self.active_flag:\n self.consider_deactivation()\n else:\n self.consider_activation()\n\n # set vekting = 0 hvis ikke aktiv\n if not self.active_flag:\n self.weight = 0\n return\n\n self.sense_and_act()\n self.weight = self.priority * self.match_degree\n\n def sense_and_act(self):\n self.motor_recommendations = [\"s\"]\n self.priority = 1\n self.match_degree = 1\n\n\n# Kjorer fremover\nclass DriveForward(Behavior):\n\n def __init__(self, bbcon):\n super(DriveForward, self).__init__(bbcon)\n self.name = \"DriveForward\"\n self.active_flag = True\n self.r_sensob = ReflectanceSensob()\n self.sensobs.append(self.r_sensob)\n self.treshold = 0.5\n\n def consider_activation(self):\n if self.active_flag:\n self.bbcon.activate_behavior(self)\n\n def consider_deactivation(self):\n return\n\n def update(self):\n self.r_sensob.update()\n self.consider_activation()\n self.sense_and_act()\n self.weight = self.priority * self.match_degree\n\n def sense_and_act(self):\n self.motor_recommendations = [\"s\"]\n self.priority = 0.5\n self.match_degree = 0.5\n\n\nclass FollowLine(Behavior):\n\n def __init__(self, bbcon):\n super(FollowLine, self).__init__(bbcon)\n self.name = \"FollowLine\"\n self.r_sensob = ReflectanceSensob()\n self.sensobs.append(self.r_sensob)\n self.treshold = 0.3\n\n def consider_activation(self):\n\n for value in self.r_sensob.update():\n if value < self.treshold:\n self.bbcon.activate_behavior(self)\n self.active_flag = True\n return\n\n # Deaktiverer behavior\n self.weight = 0\n self.bbcon.deactivate_behavior(self)\n self.active_flag = False\n\n def consider_deactivation(self):\n self.consider_activation()\n\n def update(self):\n\n self.consider_activation()\n self.sense_and_act()\n self.weight = self.priority * self.match_degree\n\n def sense_and_act(self):\n\n self.r_sensob.update()\n\n if self.r_sensob.get_value()[0] < self.treshold:\n self.motor_recommendations = [\"l\",30]\n self.match_degree = 0.8\n\n elif self.r_sensob.get_value()[5] < self.treshold:\n self.motor_recommendations = [\"r\",30]\n self.match_degree = 0.8\n\n elif self.r_sensob.get_value()[1] < self.treshold:\n self.motor_recommendations = [\"l\", 15]\n self.match_degree = 0.8\n\n elif self.r_sensob.get_value()[4] < self.treshold:\n self.motor_recommendations = [\"r\",15]\n self.match_degree = 0.8\n\n else:\n self.motor_recommendations = [\"f\"]\n self.match_degree = 0.5\n\n self.priority = 0.5\n\n\nclass Photo(Behavior):\n def __init__(self, bbcon):\n super(Photo, self).__init__(bbcon)\n self.name = \"Photo\"\n self.c_sensob = CameraSensob()\n self.sensobs.append(self.c_sensob)\n\n def consider_activation(self):\n\n if self.bbcon.can_take_photo:\n self.bbcon.activate_behavior(self)\n self.halt_request = True\n self.active_flag = True\n\n def consider_deactivation(self):\n\n if not self.bbcon.can_take_photo:\n self.bbcon.deactivate_behavior(self)\n self.halt_request = False\n self.active_flag = False\n\n def update(self):\n\n if self.active_flag:\n self.consider_deactivation()\n else:\n self.consider_activation()\n\n self.sense_and_act()\n self.weight = self.priority * self.match_degree\n\n def sense_and_act(self):\n\n if self.bbcon.can_take_photo:\n print(\"Taking photo!\")\n image_obj = self.c_sensob.update()\n img = Imager(image=image_obj)\n img.dump_image('/')\n\n self.match_degree = 0.9\n\n triple2 = [0] * 3\n for x in range(img.xmax):\n for y in range(img.ymax):\n t = img.get_pixel(x, y)\n for i in range(len(triple2)):\n triple2[i] += t[i]\n\n print(\"RGB\", triple2)\n print(triple2[0] > triple2[1] and triple2[0] > triple2[2])\n\n if triple2[0] > triple2[1] and triple2[0] > triple2[2]:\n self.motor_recommendations = ['t']\n\n else:\n self.motor_recommendations = ['f']\n self.bbcon.photo_taken()\n\n self.priority = 0.9\n" }, { "alpha_fraction": 0.5449235439300537, "alphanum_fraction": 0.5586668848991394, "avg_line_length": 25.70183563232422, "blob_id": "979224da989e5ec6e5d8dcac5567b2a7b507d69c", "content_id": "f85f583dd935e50a071642757c4b06304fbf19ed", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 11642, "license_type": "no_license", "max_line_length": 148, "num_lines": 436, "path": "/j/Bbconcop.py", "repo_name": "erlinvan/oving6", "src_encoding": "UTF-8", "text": "import random\nfrom camera import *\nfrom imager2 import *\nfrom motors import *\nfrom irproximity_sensor import *\nfrom reflectance_sensors import *\nfrom ultrasonic import *\nfrom zumo_button import *\nfrom time import sleep\n\nclass BBCON:\n\n def __init__(self):\n self.behaviors = [] #liste med alle behaviors\n self.active_behav = [] #list of active behaviors\n #self.inactive_behaviors = []\n self.sensobs = [] #liste med sensor objekter\n self.motobs = [] #liste med motor objekter\n self.arbitrator = Arbitrator(self, False)\n self.current_timestep = 0\n self.active_camera = False\n\n self.add_behavior(Approach(self))\n self.add_behavior(camera_behavior(self))\n self.add_behavior(IR_behavior(self))\n\n for behavior in self.behaviors:\n for sensob in behavior.sensobs:\n if sensob not in self.sensobs:\n self.add_sensob(sensob)\n\n self.motobs = [Motob()]\n\n def add_behavior(self,behavior):\n self.behaviors.append(behavior)\n\n def add_sensob(self, sensob):\n self.sensobs.append(sensob)\n\n def activate_behavior(self, behavior): #legger til behavior i active listen\n if behavior not in self.active_behav:\n self.active_behav.append(behavior)\n\n def deactivate_behavior(self, behavior): #fjerner behavior fra inactive listen\n if behavior in self.active_behav:\n self.active_behav.remove(behavior)\n\n\n def run_one_timestep(self):\n for i in range(len(self.sensobs)):\n self.sensobs[i].update()\n for j in range(len(self.behaviors)):\n self.behaviors[j].update()\n self.motobs[0].update(self.arbitrator.choose_action().motor_rec)\n sleep(0.4)\n for sensob in self.sensobs:\n sensob.reset()\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\nclass Arbitrator:\n\n def __init__(self, bbcon, stochastic):\n self.bbcon = bbcon #skulle ha pointer til bbcon\n self.stochastic = stochastic #boolean som velger om vi velger hoyest vekt eller random\n\n def choose_action(self):\n if self.stochastic:\n return self.stochastic()\n else:\n return self.highest_weight()\n\n def highest_weight(self):\n\n highest = self.bbcon.active_behav[0].weight\n vinner = self.bbcon.active_behav[0]\n for behavior in self.bbcon.active_behav:\n if behavior.weight > highest:\n highest = behavior.weight\n vinner = behavior\n return vinner\n\n def stochastic(self):\n\n sum = 0\n behaviors = {}\n\n #lager liste men intervaller. intervallene til hver behavior er like stpr som vekten, saann at sannsynligheten skal gjenspeiles i dette\n #[0, 0.5], [0.5, 1.3], [1.3, 2]\n for behavior in self.bbcon.active_behav:\n behaviors[behavior] = [sum, sum + behavior.weight]\n sum += behavior.weight\n\n rand_num = random.uniform(0, sum) #finner random tall\n vinner = None\n #har naa en dictionary med behavior og et tilsvaende intervall, saa henter ut hoyeste verdien i intervallet (value[1]) og sjekker opp mot random\n for behavior, interval in behaviors.items():\n if interval[1] < rand_num:\n vinner = behavior\n return vinner\n\n\n\n\nclass Sensob:\n\n def __init__(self):\n self.sensors = []\n self.value = None\n\n def update(self): #skal oppdateres en gang hver timestep. Tror det gjores i en annen klasse\n return\n\n def get_value(self):\n return self.value\n\n def reset(self):\n for sensor in self.sensors:\n sensor.reset()\n\n\n\n\n\nclass IR(Sensob):\n\n def __init__(self):\n self.sensors = [IRProximitySensor()]\n self.value = None\n\n def update(self):\n self.sensors[0].update()\n self.value = self.sensors[0].get_value()\n\n\n def get_value(self):\n #True betyr at noe er naert\n return self.value\n\n def reset(self):\n self.sensors[0].reset()\n\n\n\n\nclass LookAhead(Sensob):\n def __init__(self):\n self.sensors = [Ultrasonic()]\n\n def update(self):\n self.sensors[0].update()\n self.value = self.sensors[0].get_value()\n\n\n def get_value(self): #trenger jeg aa lage denne naar den arver?\n return self.value\n\n def reset(self):\n self.sensors[0].reset()\n\n\n\n\n\n\n\n\nclass CameraSensob(Sensob):\n\n def __init__(self, threshold=0.4, CR=(0.5, 0.25, 0, 0.25)):\n self.threshold = threshold #tillatter saasaa mye slingringsmonn\n self.CR = CR # cutratio, hvor mye av bilde som skal kuttes for analyseringa\n self.sensors = [Camera()]\n self.value = []\n\n\n def update(self): #tar bilde med Camera og analyserer fargeverdiene.\n image = self.sensors[0].update() #henter fra Camera\n width, height = image.size\n\n def wta(p): #get largest pixel, p er en RGB tuppel\n #x = max(p) #henter ut hvilken som har hoyest verdi\n liste = list(p) #gjor den om til en liste\n #print(liste)\n index = liste.index(max(p))\n rgb = [0,0,0]\n #Setter den korrekte til max og de andre til 0\n rgb[index] = 255\n return tuple(rgb) #maa vaere en tuppel, ikke en liste\n\n\n for h in range(height):\n for w in range(width):\n p = image.getpixel((w,h))\n pwta = wta(p)\n image.putpixel((w,h), pwta)\n #teller hvor mange vi har av hver farge\n color_count = [0, 0, 0]\n for h in range(height):\n for w in range(width):\n pixel = list(image.getpixel((w,h)))\n color_count[pixel.index(255)] +=1 #legger til 1 paa der det er 255 i pixel\n #maa endre det til hvor mye av det totale bildet det er\n of_total = [0.0, 0.0, 0.0]\n for i in range(len(color_count)):\n total = width*height\n of_total[i] = color_count[i]/total\n self.value = of_total\n\n\n def get_value(self):\n return self.value\n\n def reset(self):\n self.sensors[0].reset()\n\n\n\n\n\n\n\n\n\n\n\n\n\nclass Behavior:\n\n # priority = static predefined value\n\n def __init__(self, bbcon):\n self.bbcon = bbcon #pointer to controller that uses this behaviour\n self.sensobs = [] #list of sensobs this behavour uses\n self.motor_rec = []\n self.active_flag = False\n self.halt_request = False\n self.priority = 1.0\n self.match_degree = 0 #et tall som sier noen om hvor mye de naavaerende tilstandene garanterer behavioren\n self.weight = self.priority * self.match_degree\n\n def consider_deactivation(self):\n pass\n\n def consider_activation(self):\n pass\n\n def update(self):\n if self.active_flag:\n self.consider_deactivation()\n else:\n self.consider_activation()\n self.sense_and_act()\n self.update_weight()\n\n def sense_and_act(self): #calculate\n pass\n\n def update_weight(self):\n self.weight=self.priority * self.match_degree\n\n\n\nclass Approach(Behavior): #UV-behaviour\n def __init__(self, bbcon):\n super().__init__(bbcon)\n self.sensobs.append(LookAhead()) #UV\n self.active_flag=True\n self.bbcon.active_behav.append(self)\n\n\n def sense_and_act(self):\n\n distance=self.sensobs[0].get_value() #hvorfor to linjer?\n if distance >= 10:\n self.bbcon.active_camera = True\n self.match_degree=0.6\n self.motor_rec = [(\"F\", 0.4, 0.6)]\n self.update_weight()\n print(\"kjor\")\n else:\n self.bbcon.active_camera = False\n self.match_degree = 0.3\n self.motor_rec = [(\"S\", 0, 0)]\n self.update_weight()\n print(\"stopp\")\n\nclass camera_behavior(Behavior):\n\n def __init__(self, bbcon):\n super().__init__(bbcon)\n self.active_flag=True\n self.bbcon.active_behav.append(self)\n self.sensobs.append(CameraSensob())\n\n def concider_deactivation(self):\n if (self.bbcon.active_camera):\n self.active_flag=True\n else:\n self.active_flag=False\n index=self.bbcon.active_behav.index(self)\n self.bbcon.active_behav.pop(index)\n\n def consider_activation(self):\n if self.bbcon.active_camera:\n self.active_flag=True\n if self not in self.bbcon.active_behav:#trengs denne og neste linje\n self.bbcon.active_behav.append(self)\n else:\n self.active_flag=False\n\n def sense_and_act(self):\n if self.active_flag:\n self.concider_deactivation()\n else:\n self.consider_activation()\n\n if self.active_flag:\n piece=self.sensobs[0].get_value()\n index = piece.index(max(piece))\n if index==0: #red\n #print(\"ROD\")\n #self.forward()\n self.match_degree = 0.5\n self.update_weight()\n elif index == 1: # green\n print(\"GRONN\")\n # self.motor_rec = [('R', 0.2, 1.0)]\n self.pull_back\n self.match_degree = 1.0\n self.update_weight()\n else: # blue\n print(\"BLA\")\n # self.motor_rec = [('L', 0.5, 1.0)]\n #self.turn\n self.match_degree = 1.0\n self.update_weight()\n else:\n self.motor_rec = [('B', 0.2, 0.5)]\n self.match_degree = 0.1\n self.update_weight()\n\n def forward(self):\n self.motor_rec = [(\"F\", 0.7,1.0)]\n\n def pull_back(self):\n self.motor_rec = [('B', 0.2, 1.0)]\n\n def turn(self):\n self.motor_rec = [('R', 0.5, 1.0), ('F', 0.2, 1.0)]\n\n\n\n\n\n\nclass IR_behavior(Behavior):\n\n def __init__(self, bbcon):\n super().__init__(bbcon)\n self.sensobs.append(IR())\n self.active_flag=True\n self.bbcon.active_behav.append(self)\n\n def sense_and_act(self):\n val = self.sensobs[0].get_value()\n if val[0] == True:\n self.motor_rec = [('R', 0.5, 1.5)]\n self.match_degree = 1\n elif val[1] == True:\n self.motor_rec = [('L', 0.5, 1.5)]\n self.match_degree = 1\n else:\n self.motor_rec = [('S', 0.25, 1)]\n self.match_degree = 0\n\n\n\n\n\nclass Motob:\n def __init__(self):\n self.motors = [Motors()]\n self.value = None\n\n def update(self, vals):\n self.value = vals\n self.operationalize()\n\n def operationalize(self):# convert motor reccomantation into one or more motor settings\n for i in range (len(self.value)):\n if (self.value[i][0] == \"F\" or self.value[i][0] == \"f\"):\n self.motors[0].forward(self.value[i][1], self.value[i][2])\n elif (self.value[i][0] == \"B\" or self.value[i][0]==\"b\"):\n self.motors[0].backward(self.value[i][1], self.value[i][2])\n elif (self.value[i][0] == \"R\" or self.value[i][0] == \"r\"):\n self.motors[0].right(self.value[i][1], self.value[i][2])\n elif (self.value[i][0] == \"L\" or self.value[i][0]==\"l\"):\n self.motors[0].left(self.value[i][1], self.value[i][2])\n elif (self.value[i][0]==\"S\" or self.value[i][0]==\"s\"):\n self.motors[0].stop()\n else:\n print(\"Something wrong\")\n\n\n\n\n\n\n\n\n\ndef main():\n bbcon = BBCON()\n x = False\n ZumoButton().wait_for_press()\n while x == False:\n bbcon.run_one_timestep()\n\n\nif __name__ == '__main__':\n main()\n" } ]
8
carolinedlu/test_host_name
https://github.com/carolinedlu/test_host_name
441b33f841a0030432564dbe4b7fa4d9f80b7a93
3829104c059b68bd882f516b41cce7600e43c580
901d6fa1d498bc465d2d3409e05ee9e17cc5136d
refs/heads/main
2023-06-11T22:05:20.342024
2021-07-09T18:33:35
2021-07-09T18:33:35
383,599,507
0
1
null
null
null
null
null
[ { "alpha_fraction": 0.7326202988624573, "alphanum_fraction": 0.7326202988624573, "avg_line_length": 14.583333015441895, "blob_id": "db80ab3a9f53462e9ba6d15d51c9952d47113f32", "content_id": "59a907f143ca19f616be260f68ec18e709a2c6eb", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 187, "license_type": "no_license", "max_line_length": 40, "num_lines": 12, "path": "/streamlit_app.py", "repo_name": "carolinedlu/test_host_name", "src_encoding": "UTF-8", "text": "import streamlit as st\nimport os\nfrom streamlit import caching\n\nif os.getenv(\"HOSTNAME\") == \"streamlit\":\n st.write(\"welcome to the cloud\")\n\nst.write(os.environ)\n\n\n\ncaching.clear_cache()\n" } ]
1
DigimundoTesca/cristobal-poll
https://github.com/DigimundoTesca/cristobal-poll
c7644342924b73664ac4407e7917c12dd0509959
a5c7d180379a7d9ae3d5272409d4a37284c0649f
8d463d34a5a2e3dec6e25d2a99a1e057dba3ad0f
refs/heads/master
2021-01-18T16:57:11.622800
2017-08-16T11:31:57
2017-08-16T11:31:57
100,478,092
0
0
null
null
null
null
null
[ { "alpha_fraction": 0.6164102554321289, "alphanum_fraction": 0.6307692527770996, "avg_line_length": 31.5, "blob_id": "8ce72d5a1936cd5d1239e2fe428ca9831b4e65ff", "content_id": "6f9b3117e5a42d15f6af73ffd563c27dbbefd06b", "detected_licenses": [ "MIT" ], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 980, "license_type": "permissive", "max_line_length": 87, "num_lines": 30, "path": "/polls/models.py", "repo_name": "DigimundoTesca/cristobal-poll", "src_encoding": "UTF-8", "text": "from django.db import models\nfrom django.core.validators import RegexValidator\n\nclass Poll(models.Model):\n phone_regex = RegexValidator(\n regex=r'^\\d{10}$',\n message=\"Debe ingresar un número telefónico de 10 dígitos.\",\n )\n\n name = models.CharField(max_length=90, default='', blank=False, null=False)\n last_name = models.CharField(max_length=190, default='', blank=False, null=False)\n last_module = models.CharField(max_length=120, default='', blank=False, null=False)\n email = models.EmailField(default='', unique=True, blank=False, null=False)\n phone_number = models.CharField(\n blank=False,\n null=False,\n unique= True,\n max_length=10,\n validators=[phone_regex],\n error_messages={\n 'unique': \"Este número ya está registrado.\",\n },\n )\n\n def __str__(self):\n return self.name\n\n class Meta:\n verbose_name = 'encuesta'\n verbose_name_plural = 'encuestas'\n" }, { "alpha_fraction": 0.5310603380203247, "alphanum_fraction": 0.5357015132904053, "avg_line_length": 34.6815299987793, "blob_id": "038b03b9ddb8ebc217573d5153b44c8f20020e7a", "content_id": "7a8e869aab7ca5854a065c51b4a84ffc18021bfe", "detected_licenses": [ "MIT" ], "is_generated": false, "is_vendor": false, "language": "HTML", "length_bytes": 5609, "license_type": "permissive", "max_line_length": 149, "num_lines": 157, "path": "/polls/templates/poll.html", "repo_name": "DigimundoTesca/cristobal-poll", "src_encoding": "UTF-8", "text": "{% load static %}\n<!DOCTYPE html>\n<html lang=\"es\">\n<head>\n <!-- Required meta tags -->\n <meta charset=\"utf-8\">\n <meta name=\"viewport\" content=\"width=device-width, initial-scale=1, shrink-to-fit=no\">\n\n <!-- Bootstrap CSS -->\n <link rel=\"stylesheet\" href=\"https://maxcdn.bootstrapcdn.com/bootstrap/4.0.0-beta/css/bootstrap.min.css\" integrity=\"sha384-/Y6pD6FV/Vv2HJnA6t+vslU6fwYXjCFtcEpHbNJ0lyAFsXTsjBbfaDjzALeQsN6M\" crossorigin=\"anonymous\">\n <link rel=\"stylesheet\" href=\"http://getbootstrap.com/docs/4.0/examples/sticky-footer-navbar/sticky-footer-navbar.css\">\n <title>Metamundo</title>\n <style media=\"screen\">\n #main-container {\n padding-top: 2rem;\n padding-bottom: 2rem;\n }\n #metamundo-banner {\n max-height: 200px;\n }\n .container-banner {\n display: flex;\n justify-content: center;\n }\n .invalid-feedback {\n display: initial;\n }\n </style>\n</head>\n<body>\n <!-- Image and text -->\n <nav class=\"navbar navbar-dark bg-dark\">\n <div class=\"container\">\n <a class=\"navbar-brand\" href=\"{% url 'polls:poll' %}\">Metamundo Online</a>\n </div>\n </nav>\n <div class=\"container\" id=\"main-container\">\n <div class=\"container-banner\">\n <img id=\"metamundo-banner\" src=\"{% static 'images/logo-meta.png' %}\" class=\"img-fluid\" alt=\"\">\n </div>\n <form method=\"post\">\n {% csrf_token %}\n <div class=\"form-group\">\n <label for=\"name\">Nombre</label>\n <input id=\"name\"\n class=\"form-control {% if form.name.errors %}is-invalid{% endif %}\"\n type=\"text\"\n name=\"name\"\n value=\"{%if form.name.value %}{{form.name.value}}{% endif %}\"\n maxlength=\"{{form.name.field.max_length}}\"\n placeholder=\"Nombre(s)\"\n required/>\n {% if form.name.errors %}\n <div class=\"invalid-feedback\">\n <ul>\n {% for error in form.name.errors %}\n <li>{{error}}</li>\n {% endfor %}\n </ul>\n </div>\n {% endif %}\n </div>\n <div class=\"form-group\">\n <label for=\"lastName\">Apellidos</label>\n <input id=\"lastName\"\n class=\"form-control {% if form.last_name.errors %}is-invalid{% endif %}\"\n type=\"text\"\n name=\"last_name\"\n value=\"{%if form.last_name.value %}{{form.last_name.value}}{% endif %}\"\n maxlength=\"{{form.last_name.field.max_length}}\"\n placeholder=\"Apellido Materno y Paterno\"\n required/>\n {% if form.last_name.errors %}\n <div class=\"invalid-feedback\">\n <ul>\n {% for error in form.last_name.errors %}\n <li>{{error}}</li>\n {% endfor %}\n </ul>\n </div>\n {% endif %}\n </div>\n <div class=\"form-group\">\n <label for=\"lastModule\">Último modulo</label>\n <input id=\"lastModule\"\n class=\"form-control {% if form.last_module.errors %}is-invalid{% endif %}\"\n type=\"text\"\n name=\"last_module\"\n value=\"{%if form.last_module.value %}{{form.last_module.value}}{% endif %}\"\n maxlength=\"{{form.last_module.field.max_length}}\"\n placeholder=\"Último módulo cursado online en Metamundo\"\n required/>\n {% if form.last_module.errors %}\n <div class=\"invalid-feedback\">\n <ul>\n {% for error in form.last_module.errors %}\n <li>{{error}}</li>\n {% endfor %}\n </ul>\n </div>\n {% endif %}\n </div>\n <div class=\"form-group\">\n <label for=\"email\">Email</label>\n <input id=\"email\"\n class=\"form-control {% if form.email.errors %}is-invalid{% endif %}\"\n type=\"email\"\n name=\"email\"\n value=\"{%if form.email.value %}{{form.email.value}}{% endif %}\"\n placeholder=\"Email\"\n required/>\n {% if form.email.errors %}\n <div class=\"invalid-feedback\">\n <ul>\n {% for error in form.email.errors %}\n <li>{{error}}</li>\n {% endfor %}\n </ul>\n </div>\n {% endif %}\n </div>\n <div class=\"form-group\">\n <label for=\"phoneNumber\">Teléfono</label>\n <input id=\"phoneNumber\"\n class=\"form-control {% if form.phone_number.errors %}is-invalid{% endif %}\"\n type=\"text\"\n name=\"phone_number\"\n value=\"{%if form.phone_number.value %}{{form.phone_number.value}}{% endif %}\"\n maxlength=\"{{form.phone_number.field.max_length}}\"\n placeholder=\"Número de Teléfono (De preferencia con Whatsapp)\"\n required/>\n {% if form.phone_number.errors %}\n <div class=\"invalid-feedback\">\n <ul>\n {% for error in form.phone_number.errors %}\n <li>{{error}}</li>\n {% endfor %}\n </ul>\n </div>\n {% endif %}\n </div>\n <button class='btn btn-primary btn-block' type=\"submit\">Enviar</button>\n </form>\n </div>\n <footer class=\"footer\">\n <div class=\"container \">\n <span class=\"text-muted\">\n © Metamundo 2017\n </span>\n </div>\n </footer>\n <!-- jQuery first, then Popper.js, then Bootstrap JS -->\n <script src=\"https://code.jquery.com/jquery-3.2.1.slim.min.js\" integrity=\"sha384-KJ3o2DKtIkvYIK3UENzmM7KCkRr/rE9/Qpg6aAZGJwFDMVNA/GpGFF93hXpG5KkN\" crossorigin=\"anonymous\"></script>\n <script src=\"https://cdnjs.cloudflare.com/ajax/libs/popper.js/1.11.0/umd/popper.min.js\" integrity=\"sha384-b/U6ypiBEHpOf/4+1nzFpr53nxSS+GLCkfwBdFNTxtclqqenISfwAzpKaMNFNmj4\" crossorigin=\"anonymous\"></script>\n <script src=\"https://maxcdn.bootstrapcdn.com/bootstrap/4.0.0-beta/js/bootstrap.min.js\" integrity=\"sha384-h0AbiXch4ZDo7tp9hKZ4TsHbi047NrKGLO3SEJAg45jXxnGIfYzk4Si90RDIqNm1\" crossorigin=\"anonymous\"></script>\n</body>\n</html>\n" }, { "alpha_fraction": 0.7008928656578064, "alphanum_fraction": 0.7008928656578064, "avg_line_length": 27, "blob_id": "7f1bc86b2c0fd3a802199250d88a434aa73ca3fb", "content_id": "90c04d187d8e8396db2c2039405791c3fa01604e", "detected_licenses": [ "MIT" ], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 448, "license_type": "permissive", "max_line_length": 74, "num_lines": 16, "path": "/polls/views.py", "repo_name": "DigimundoTesca/cristobal-poll", "src_encoding": "UTF-8", "text": "from django.shortcuts import render, reverse\nfrom django.views.generic.edit import CreateView\n\nfrom .models import Poll\n\nclass CreatePoll(CreateView):\n model = Poll\n template_name = 'poll.html'\n fields = ['name', 'last_name', 'last_module', 'email', 'phone_number']\n\n def get_success_url(self):\n return reverse('polls:thanks')\n\ndef thanks(request):\n template = 'thanks.html'\n return render(request, template, context=None)\n" }, { "alpha_fraction": 0.6033653616905212, "alphanum_fraction": 0.6057692170143127, "avg_line_length": 31, "blob_id": "79cca8e0783d3169e1286bfe80fed56f61c70f8b", "content_id": "e37443e7cf80040dbc5660be17b59c9423a2c4f5", "detected_licenses": [ "MIT" ], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 416, "license_type": "permissive", "max_line_length": 60, "num_lines": 13, "path": "/poll/settings/dev_postgres.py", "repo_name": "DigimundoTesca/cristobal-poll", "src_encoding": "UTF-8", "text": "from poll.settings.dev import *\n\n# Database\nDATABASES = {\n 'default': {\n 'ENGINE': 'django.db.backends.postgresql_psycopg2',\n 'NAME': os.getenv('CRISTOBAL_POLL_DB_NAME'),\n 'USER': os.getenv('CRISTOBAL_POLL_DB_USER'),\n 'PASSWORD': os.getenv('CRISTOBAL_POLL_DB_PASSWORD'),\n 'HOST': os.getenv('CRISTOBAL_POLL_DB_HOST'),\n 'PORT': os.getenv('CRISTOBAL_POLL_DB_PORT'),\n }\n}\n" }, { "alpha_fraction": 0.8484848737716675, "alphanum_fraction": 0.8484848737716675, "avg_line_length": 48.5, "blob_id": "db391b5bd4d1e2dc8a916490e53378ae3d362fce", "content_id": "d19b1f7395190c5af3b23c59952c325e49b29d51", "detected_licenses": [ "MIT" ], "is_generated": false, "is_vendor": false, "language": "Markdown", "length_bytes": 100, "license_type": "permissive", "max_line_length": 81, "num_lines": 2, "path": "/README.md", "repo_name": "DigimundoTesca/cristobal-poll", "src_encoding": "UTF-8", "text": "# cristobal-poll\nRepositorio con pequeño proyecto para deplegar un formulario para los estudiantes\n" }, { "alpha_fraction": 0.7222222089767456, "alphanum_fraction": 0.7222222089767456, "avg_line_length": 27.285715103149414, "blob_id": "af6a763cec2385bcdc0faa1081e32998114700dc", "content_id": "1a40ca2077364156a7eb4b41dcdb2764c51d852b", "detected_licenses": [ "MIT" ], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 198, "license_type": "permissive", "max_line_length": 80, "num_lines": 7, "path": "/polls/admin.py", "repo_name": "DigimundoTesca/cristobal-poll", "src_encoding": "UTF-8", "text": "from django.contrib import admin\n\nfrom .models import Poll\n\[email protected](Poll)\nclass PollAdmin(admin.ModelAdmin):\n list_display = ('name', 'last_name', 'email', 'last_module', 'phone_number')\n" }, { "alpha_fraction": 0.616487443447113, "alphanum_fraction": 0.6200717091560364, "avg_line_length": 28.36842155456543, "blob_id": "b084cbd4632c51194d35837f4a882355085f3825", "content_id": "c303f34a8f88482b94c0de4ba72c1bdee4acd413", "detected_licenses": [ "MIT" ], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 558, "license_type": "permissive", "max_line_length": 61, "num_lines": 19, "path": "/poll/settings/prod.py", "repo_name": "DigimundoTesca/cristobal-poll", "src_encoding": "UTF-8", "text": "from poll.settings.base import *\n\nDEBUG = False\n\nALLOWED_HOSTS = ['*']\nTEMPLATES[0]['OPTIONS']['debug'] = DEBUG\nSECURE_PROXY_SSL_HEADER = ('HTTP_X_FORWARDED_PROTO', 'https')\n\n# Database\nDATABASES = {\n 'default': {\n 'ENGINE': 'django.db.backends.postgresql_psycopg2',\n 'NAME': os.getenv('CRISTOBAL_POLL_DB_NAME'),\n 'USER': os.getenv('CRISTOBAL_POLL_DB_USER'),\n 'PASSWORD': os.getenv('CRISTOBAL_POLL_DB_PASSWORD'),\n 'HOST': os.getenv('CRISTOBAL_POLL_DB_HOST'),\n 'PORT': os.getenv('CRISTOBAL_POLL_DB_PORT'),\n }\n}\n" }, { "alpha_fraction": 0.6694915294647217, "alphanum_fraction": 0.6694915294647217, "avg_line_length": 20.454545974731445, "blob_id": "3348007e7d0dae27b32b42acf608981beecd39d2", "content_id": "effb885416581bb2fd12925ba7ed6d49b809da53", "detected_licenses": [ "MIT" ], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 236, "license_type": "permissive", "max_line_length": 56, "num_lines": 11, "path": "/polls/urls.py", "repo_name": "DigimundoTesca/cristobal-poll", "src_encoding": "UTF-8", "text": "from django.conf import settings\nfrom django.conf.urls import url\n\nfrom . import views\n\napp_name = 'polls'\n\nurlpatterns = [\n url(r'^$', views.CreatePoll.as_view(), name='poll'),\n url(r'^gracias/$', views.thanks, name='thanks'),\n]\n" } ]
8
aayushdhakad/Fantasy-Cricket
https://github.com/aayushdhakad/Fantasy-Cricket
255088c52484f5eec147c5a128a2361e91ad5802
f41aa047482e43451d9322ed4f3a4db2d2848ee8
61360f1f34835e9ac1ef8eaa630c06e7c430470e
refs/heads/main
2023-03-02T19:45:39.174072
2021-02-14T10:31:02
2021-02-14T10:31:02
338,779,376
0
0
null
null
null
null
null
[ { "alpha_fraction": 0.5278246402740479, "alphanum_fraction": 0.5505902171134949, "avg_line_length": 31.904762268066406, "blob_id": "aab475fad5acad2d840c4c30fb803206a7d1d49c", "content_id": "b567356f91083236a376757cb861e4dff3d12211", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 3558, "license_type": "no_license", "max_line_length": 123, "num_lines": 105, "path": "/live.py", "repo_name": "aayushdhakad/Fantasy-Cricket", "src_encoding": "UTF-8", "text": "from tkinter import *\r\nfrom fantasy import *\r\nfrom tkinter import messagebox\r\n\r\nplayers = new_team\r\nPlayers = new_team_\r\ndef batsman_check():\r\n count = 0\r\n for i in batsman:\r\n if players[i].get() != 'Off':\r\n count += 1\r\n if count < 5:\r\n messagebox.showerror('Error' , 'Select ' + str(5 - count) + ' more batsman')\r\n elif count > 5:\r\n messagebox.showerror('Error' , 'Deselect the ' + str(count - 5) + ' extra batsman')\r\n\r\ndef all_rounder_check():\r\n count = 0\r\n for i in all_rounders:\r\n if players[i].get() != 'Off':\r\n count += 1\r\n if count < 2:\r\n messagebox.showerror('Error' , 'Select ' + str(2 - count) + ' more all rounder')\r\n elif count > 2:\r\n messagebox.showerror('Error' , 'Deselect the ' + str(count - 2) + ' extra all rounder')\r\n\r\ndef bowler_check():\r\n count = 0\r\n for i in bowlers:\r\n if players[i].get() != 'Off':\r\n count += 1\r\n if count < 4:\r\n messagebox.showerror('Error' , 'Select ' + str(4 - count) + ' more bowler')\r\n elif count > 4:\r\n messagebox.showerror('Error' , 'Deselect the ' + str(count - 4) + ' extra bowler')\r\n\r\ndef point_calculation():\r\n window_2 = Toplevel(root)\r\n window_2.geometry('400x400')\r\n window_2.title('Your Score')\r\n frame = Frame(window_2)\r\n frame.pack() \r\n score = 0\r\n k = 2\r\n for j in Players:\r\n if(j!='Off'):\r\n\r\n \r\n if(players[j].get()!='Off'):\r\n score += Players[players[j].get()]\r\n player_name = Label(frame,text=j,font = ('consolas' ,12))\r\n player_name.grid(sticky = NW , row = k , column = 1)\r\n points = Label(frame,text=str(Players[players[j].get()]),font = ('consolas' ,12 ))\r\n points.grid(sticky = NW , row = k , column = 2)\r\n k += 1\r\n\r\n line = Label(frame , text = '___')\r\n line.grid( sticky=NW , column=2)\r\n total_line = Label(frame , text = score , font = ('consolas' ,12))\r\n total_line.grid(sticky = NW ,column = 2)\r\n\r\nroot = Tk() \r\nroot.geometry(\"540x600\") \r\n\r\n\r\n\r\nlabel = Label(root , text = \"select 5 out of 10 batsman....\" ,font = ('consolas' ,10 ,'bold') )\r\nlabel.grid()\r\nj = 0\r\nfor i in batsman:\r\n players[i] = StringVar()\r\n button = Checkbutton(root , text = i, font = ('consolas' ,12) , variable = players[i] ,onvalue = i , offvalue = 'Off' )\r\n button.deselect()\r\n button.grid(sticky = NW , row = j//2 + 1 , column = j % 2)\r\n j += 1\r\n\r\nButton(root, text=\"Done\",command = batsman_check).grid()\r\n\r\nlabel = Label(root , text = \"select 2 out of 4 all rounders....\" ,font = ('consolas' ,10 ,'bold') )\r\nlabel.grid()\r\nj += 4\r\nfor i in all_rounders:\r\n players[i] = StringVar()\r\n button = Checkbutton(root , text = i, font = ('consolas' ,12) , variable = players[i] ,onvalue = i , offvalue = 'Off' )\r\n button.deselect()\r\n button.grid(sticky = NW , row = j//2 + 1 , column = j % 2)\r\n j += 1\r\n\r\nButton(root, text=\"Done\",command = all_rounder_check).grid()\r\n\r\nlabel = Label(root , text = \"select 4 out of 8 bowlers....\" ,font = ('consolas' ,10 ,'bold') )\r\nlabel.grid()\r\nj += 4\r\nfor i in bowlers:\r\n players[i] = StringVar()\r\n button = Checkbutton(root , text = i, font = ('consolas' ,12) , variable = players[i] ,onvalue = i , offvalue = 'Off' )\r\n button.deselect()\r\n button.grid(sticky = NW , row = j//2 + 1 , column = j % 2)\r\n j += 1\r\n\r\nButton(root, text=\"Done\",command = bowler_check).grid()\r\n\r\nButton(root, text=\"Your Points\",command = point_calculation).grid()\r\n\r\nmainloop()" }, { "alpha_fraction": 0.45429208874702454, "alphanum_fraction": 0.47449833154678345, "avg_line_length": 33.52475357055664, "blob_id": "15ceaae42b869fce8d42e48470c680b1221ce4b0", "content_id": "5fb37655904759d2cfad727c65ada0a1298d1223", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 7176, "license_type": "no_license", "max_line_length": 118, "num_lines": 202, "path": "/Live_Score (2).py", "repo_name": "aayushdhakad/Fantasy-Cricket", "src_encoding": "UTF-8", "text": "import requests\r\nimport bs4\r\nfrom tkinter import *\r\nfrom tkinter.ttk import *\r\nfrom tkinter import messagebox\r\nfrom functools import partial\r\n\r\ndef fun(stats):\r\n arr = [30, 8, 8, 8, 8, 8]\r\n text = ''\r\n for i in range(6):\r\n text += stats[i]\r\n text += ' ' * (arr[i] - len(stats[i]))\r\n return text\r\n\r\ndef live_score(url):\r\n res = requests.get(url)\r\n bs = bs4.BeautifulSoup(res.text, 'html.parser')\r\n\r\n scores = bs.select('.cb-col .cb-col-67 .cb-scrs-wrp')\r\n if not scores:\r\n messagebox.showinfo('', \"Match not started yet....\" )\r\n else:\r\n window_3 = Toplevel()\r\n window_3.geometry('500x500')\r\n frame = Frame(window_3)\r\n frame.pack()\r\n\r\n # print(scores[0])\r\n data = scores[0].getText().split(' ')\r\n # print(data)\r\n if len(data) > 1:\r\n data_1 = data[1].split(' ')\r\n\r\n first_line = Label( frame, text=data[0], font=['calibri', 15, 'underline'], foreground='gray' )\r\n first_line.pack()\r\n\r\n second_line = Label(frame, text=data_1[0], font=('calibri', 17, 'bold'))\r\n second_line.pack()\r\n\r\n if len(data_1) == 4:\r\n crr_line = data_1[1].split(\"\\xa0\")\r\n crr_line_1 = data_1[2].split(\"\\xa0\")\r\n crr_line.extend(crr_line_1)\r\n\r\n rates = ' '\r\n for text in crr_line:\r\n rates += text\r\n rates += ' '\r\n crr = Label(frame, text=rates, font=('calibri', 10))\r\n crr.pack()\r\n\r\n third_line = Label(frame, text=data_1[3], font=('calibri', 12), foreground='red')\r\n third_line.pack()\r\n else:\r\n crr_line = data_1[1].split(\"\\xa0\")\r\n rates = ' '\r\n for text in crr_line:\r\n rates += text\r\n rates += ' '\r\n crr = Label(frame, text=rates, font=('calibri', 10))\r\n crr.pack()\r\n\r\n third_line = Label(frame, text=data_1[2], font=('calibri', 12), foreground='red')\r\n third_line.pack()\r\n else:\r\n\r\n data_1 = data[0].split(' ')\r\n # print(data_1)\r\n innings_line = Label(frame, text=\"First Innings...\", font=('calibri', 15, 'underline'), foreground='gray')\r\n innings_line.pack()\r\n\r\n first_line = Label(frame, text=data_1[1], font=('calibri', 17, 'bold'))\r\n first_line.pack()\r\n\r\n crr_line = data_1[2].split(\"\\xa0\")\r\n rates = ''\r\n for text in crr_line:\r\n if text != ' ':\r\n rates += text\r\n crr = Label(frame, text=rates, font=('calibri', 10))\r\n crr.pack()\r\n\r\n second_line = Label(frame, text=data_1[3], font=('calibri', 12), foreground='red')\r\n second_line.pack()\r\n\r\n display = 'Batsman R B 4s 6s SR '\r\n empty_line = Label(frame)\r\n empty_line.pack()\r\n empty_line = Label(frame)\r\n empty_line.pack()\r\n first_line = Label(frame, text=display, font=('consolas', 10, 'bold'), background='gray')\r\n first_line.pack()\r\n # print(url)\r\n new_url = url.split('-')\r\n\r\n for i in range(len(new_url)):\r\n if new_url[i] == 'cricket':\r\n break\r\n new_url[i + 1] = new_url[i + 1].split('/')\r\n new_url[i + 1][0] = 'scorecard'\r\n new_url[i + 1] = '/'.join(new_url[i + 1])\r\n new_url = '-'.join(new_url)\r\n\r\n # print(new_url)\r\n res = requests.get(new_url)\r\n bs_new = bs4.BeautifulSoup(res.text, 'html.parser')\r\n\r\n batting_stats = bs_new.find_all('div', class_=\"cb-col cb-col-100 cb-scrd-itms\")\r\n\r\n for i in batting_stats:\r\n l = i.getText().split(' ')\r\n # print(l)\r\n if len(l) == 2:\r\n data = [l[0]]\r\n data.extend(l[1].split(' '))\r\n # print(data)\r\n if data[1] == 'batting':\r\n stats = [data[0].strip()]\r\n stats.extend(data[2].split(' '))\r\n text = fun(stats)\r\n batsman = Label(frame, text=text, font=('consolas', 10))\r\n batsman.pack()\r\n else:\r\n break\r\n\r\n display = 'Bowler O M R W ECO '\r\n empty_line = Label(frame)\r\n empty_line.pack()\r\n first_line = Label(frame, text=display, font=('consolas', 10, 'bold'), background='gray')\r\n first_line.pack()\r\n\r\n bowlers = bs.find_all('div', class_=\"cb-col cb-col-50\")\r\n bls = []\r\n for i in bowlers:\r\n bls.append(i.getText())\r\n current_bowlers = bls[4:]\r\n\r\n bowling_stats = bs_new.find_all('div', class_=\"cb-col cb-col-100 cb-scrd-itms\")\r\n for i in bowling_stats:\r\n l = i.getText().strip().split(' ')\r\n if len(l) == 2 and l[0] in current_bowlers:\r\n data = l[1].split(' ')\r\n if len(data) == 7:\r\n stats = [l[0]]\r\n eco = data[-1]\r\n stats.extend(data[:4])\r\n stats.append(eco)\r\n text = fun(stats)\r\n bowlers = Label(frame, text=text, font=('consolas', 10))\r\n bowlers.pack()\r\n\r\n empty_line = Label(frame)\r\n empty_line.pack()\r\n\r\n comments = bs.find_all('p')\r\n # print(comments)\r\n live_comment = comments[3].getText()\r\n commentry = Message(frame, text=live_comment, font=('consolas', 12), foreground='blue', aspect='300')\r\n commentry.pack()\r\n\r\n\r\n reload_btn = Button(frame, text=\"RELOAD\", style='W.TButton',\r\n command=lambda: [window_3.destroy(), live_score(url)])\r\n reload_btn.pack()\r\n window_3.mainloop()\r\n\r\n\r\ndef live_matches(url):\r\n res = requests.get(url)\r\n bs = bs4.BeautifulSoup(res.text, 'html.parser')\r\n links = bs.find_all('a', class_='cb-lv-scrs-well cb-lv-scrs-well-live')\r\n\r\n window_2 = Toplevel(window)\r\n window_2.geometry('500x500')\r\n window_2.title('Matches')\r\n matches = []\r\n href_links = []\r\n if links == []:\r\n messagebox.showinfo(' ', \"Oops...\\nNo live match available\")\r\n else:\r\n for i in links:\r\n link = i.get('href')\r\n href_links.append(link)\r\n match = i.get('title')\r\n matches.append(match)\r\n btn = Button(window_2, text=match, style='W.TButton' ,\r\n command=partial(live_score, 'https://www.cricbuzz.com' + link))\r\n btn.pack()\r\n\r\n\r\nwindow = Tk()\r\nwindow.geometry('300x80')\r\nwindow.title(\"Live Matches\")\r\nstyle = Style()\r\nstyle.configure('W.TButton', font=\r\n('calibri', 12, 'bold'))\r\n\r\ninit_btn = Button(window, text='Available Matches', style='W.TButton')\r\ninit_btn.bind('<Button>', lambda x: live_matches('https://www.cricbuzz.com/cricket-match/live-scores'))\r\ninit_btn.place(relx=0.5, rely=0.2, anchor=CENTER)\r\nwindow.mainloop()\r\n" }, { "alpha_fraction": 0.47236084938049316, "alphanum_fraction": 0.5069097876548767, "avg_line_length": 21.89908218383789, "blob_id": "1f6ae569c8b361ca181d15811edda3135f4efabf", "content_id": "ee84e881576408a819be2fb9f874c529c6558b84", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 5210, "license_type": "no_license", "max_line_length": 113, "num_lines": 218, "path": "/fantasy.py", "repo_name": "aayushdhakad/Fantasy-Cricket", "src_encoding": "UTF-8", "text": "import requests\r\nimport bs4\r\n\r\ndef calculate(data ,is_batsman):\r\n points = 0\r\n\r\n # Batting points\r\n runs_scored = int(data[0])\r\n points += runs_scored\r\n\r\n if runs_scored >= 100:\r\n points += 16\r\n\r\n elif runs_scored >= 50 and runs_scored < 100:\r\n points += 8\r\n\r\n balls_played = int(data[1])\r\n\r\n fours = int(data[2])\r\n points += fours\r\n\r\n sixes = int(data[3])\r\n points += 2*sixes\r\n\r\n\r\n strike_rate = float(data[4])\r\n if balls_played > 10 and is_batsman:\r\n if strike_rate < 50:\r\n points -= 6\r\n elif strike_rate > 50 and strike_rate < 60:\r\n points -= 4\r\n elif strike_rate > 60 and strike_rate < 70:\r\n points -= 2\r\n\r\n # Bowling points \r\n overs = float(data[5])\r\n\r\n maidens = int(data[6])\r\n points += maidens*8\r\n\r\n wickets = int(data[8])\r\n points += wickets*25\r\n\r\n if wickets == 4:\r\n points += 8\r\n\r\n elif wickets == 5:\r\n points += 16\r\n\r\n economy = float(data[11])\r\n if overs >= 2:\r\n if economy <= 4:\r\n points += 6\r\n elif economy > 4 and economy <= 5:\r\n points += 4\r\n elif economy > 5 and economy <= 6:\r\n points += 2\r\n elif economy > 9 and economy <= 10:\r\n points -= 2\r\n elif economy > 10 and economy <= 11:\r\n points -= 4\r\n elif economy >= 11:\r\n points -= 6\r\n\r\n # Feilding points\r\n\r\n catch = int(data[12])\r\n points += catch*8\r\n\r\n run_out = int(data[13])\r\n points += run_out*6\r\n\r\n stumping = int(data[14])\r\n points += stumping*12\r\n\r\n if points < 0:\r\n points = 0\r\n return points\r\n\r\n\r\nurl = 'https://www.cricbuzz.com/live-cricket-scorecard/30545/aus-vs-ind-3rd-t20i-india-tour-of-australia-2020-21'\r\n\r\nres = requests.get(url)\r\nbs = bs4.BeautifulSoup(res.text,'html.parser')\r\n\r\nteams = bs.find_all('div', class_=\"cb-col cb-col-100 cb-minfo-tm-nm\")\r\nfor i in range(len(teams)):\r\n data = teams[i].getText()\r\n if i == 1:\r\n team_1 = data\r\n if i == 3:\r\n team_2 = data\r\n\r\n\r\nteam_1 = team_1.split(' ')[1].split(', ')\r\nteam_2 = team_2.split(' ')[1].split(', ')\r\nteam = []\r\nfor i in range(11):\r\n team.append(team_1[i])\r\n team.append(team_2[i])\r\n\r\nstats = bs.find_all('div', class_=\"cb-col cb-col-100 cb-scrd-itms\")\r\nl = []\r\nfor i in stats:\r\n data = i.getText()\r\n if data[:3] == ' ':\r\n l.append(data.strip())\r\n\r\nbatting_stats = []\r\nbowling_stats = []\r\nfor i in l:\r\n if len(i.split(' ')) == 2:\r\n batting_stats.append(i)\r\n else:\r\n bowling_stats.append(i)\r\n\r\nwickets = []\r\nnew_batting_stats = []\r\nfor batsman in batting_stats:\r\n splits = batsman.split(' ')\r\n name = [(splits[0].strip()).split(' ')[0]]\r\n wickets.append((splits[1].split(' ')[0]).strip())\r\n score = (splits[1].split(' ')[1]).strip()\r\n name.extend(score.split(' '))\r\n new_batting_stats.append(name)\r\n\r\n\r\n\r\nnew_bowling_stats = []\r\nfor bowler in bowling_stats:\r\n splits = bowler.split(' ')\r\n name = [(splits[0].strip()).split(' ')[0]]\r\n score = splits[1].split(' ')\r\n name.extend(score)\r\n new_bowling_stats.append(name)\r\n\r\n\r\ncatches = []\r\nrun_outs = []\r\nstumpings = []\r\nfor wicket in wickets:\r\n splits = wicket.split(' ')\r\n if splits[0] == 'c':\r\n player = splits[1].split(' ')[0]\r\n catches.append(player)\r\n if splits[0] == 'run':\r\n players = splits[2]\r\n players = players[1:len(players)].split('/')\r\n if len(players) == 1:\r\n player = players[0].split(' ')\r\n run_outs.append(player)\r\n run_outs.append(player)\r\n else:\r\n for p in players[:2]:\r\n player = p.split(' ')[0]\r\n run_outs.append(player)\r\n if splits[0] == 'st':\r\n player = splits[1].split(' ')[0]\r\n stumpings.append(player)\r\n\r\nteam[9] , team[11] = team[11] , team[9]\r\n\r\nbatsman = team[:10]\r\nall_rounders = team[10:14]\r\nbowlers = team[14:]\r\n\r\nnew_team = {}\r\nfor i in range(22):\r\n player = team[i].split(' ')\r\n\r\n if '(wk)' in player or '(c)' in player:\r\n team[i] = ' '.join(player[:-1])\r\n if i in range(10):\r\n batsman[i] = ' '.join(player[:-1])\r\n elif i in range(10,14):\r\n all_rounders[i-10] = ' '.join(player[:-1])\r\n else:\r\n bowler[i-14] = ' '.join(player[:-1])\r\n data = []\r\n\r\n\r\n for stats in new_batting_stats:\r\n if stats[0] in player:\r\n data.extend(stats[1:])\r\n\r\n if len(data) == 0:\r\n data.extend(['0']*5)\r\n\r\n for stats in new_bowling_stats:\r\n if stats[0] in player:\r\n data.extend(stats[1:])\r\n\r\n if len(data) == 5:\r\n data.extend(['0']*7)\r\n\r\n for p in player:\r\n if p in catches:\r\n break\r\n data.append(str(catches.count(p)))\r\n\r\n for p in player:\r\n if p in run_outs:\r\n break\r\n data.append(str(run_outs.count(p)))\r\n\r\n for p in player:\r\n if p in stumpings:\r\n break\r\n data.append(str(stumpings.count(p)))\r\n \r\n new_team[team[i]] = calculate(data ,team[i] not in bowlers)\r\n\r\n\r\nnew_team['Off'] = 0\r\n\r\nnew_team_ = {}\r\nfor i in new_team:\r\n new_team_[i] = new_team[i]\r\n" } ]
3
olafurjohannsson/cpp-concurrency
https://github.com/olafurjohannsson/cpp-concurrency
71c3a8a518e4b46574ec999c299e66b2a561566d
47fa0967868ae6c6723f030f9b654373ae15155c
19337b42a299e5b27a91c167340ffd5f62181eaf
refs/heads/master
2021-05-03T13:32:25.790487
2017-01-22T11:59:42
2017-01-22T11:59:42
72,210,100
0
0
null
null
null
null
null
[ { "alpha_fraction": 0.5030156970024109, "alphanum_fraction": 0.5217128992080688, "avg_line_length": 19.469135284423828, "blob_id": "8937013ab24ca40670342ef74a9202f45a5a628d", "content_id": "375475e7f2fb19f219f74de36bc3a58a570366d3", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "C++", "length_bytes": 1659, "license_type": "no_license", "max_line_length": 93, "num_lines": 81, "path": "/asyncnet/asyncnet/lambdastuff.h", "repo_name": "olafurjohannsson/cpp-concurrency", "src_encoding": "UTF-8", "text": "//\n// lambdastuff.h\n// asyncnet\n//\n// Created by Ólafur Jóhannsson on 06/11/16.\n// Copyright © 2016 Ólafur Jóhannsson. All rights reserved.\n//\n\n#ifndef lambdastuff_h\n#define lambdastuff_h\n\n\nauto filterDigit = [] (std::string value) -> bool {\n return !value.empty() && std::find_if(value.begin(), value.end(), [](char c) {\n return !std::isdigit(c);\n }) == value.end();\n};\n\nbool isdigit1(std::string s) {\n return false;\n}\n\nbool filterDigit2(std::string value) {\n std::string::const_iterator it;\n for (it = value.begin(); it != value.end() && std::isdigit(*it); ++it)\n ;\n return !value.empty() && it == value.end();\n}\n\nvector<int> vec;\n\ntemplate<typename Func>\nstd::vector<int> find(Func func)\n{\n std::vector<int> result;\n for_each(vec.begin(), vec.end(), [&] (int v) {\n if (func(v)) {\n result.push_back(v);\n }\n });\n return result;\n}\n\nint find2(std::function<bool (int)> func) {\n for_each(vec.begin(), vec.end(), [=] (int v) {\n \n });\n return 0;\n}\n\nint test() {\n \n // lambda to automatic var\n auto print = [] (int value) -> void { std::cout << std::to_string(value) << std::endl; };\n \n // lambda to function pointer\n typedef int (*fn)();\n fn f = [] () -> int32_t { return 1337; };\n \n // lambda to std::function\n std::function<int ()> function = [] () -> int32_t {\n return 1000;\n };\n \n \n vec.push_back(function());\n vec.push_back(f());\n \n for_each(vec.begin(), vec.end(), [&] (int v) {\n print(v);\n });\n auto fff = find([] (int v) {\n return v == 1000;\n });\n \n \n \n return 0;\n}\n\n#endif /* lambdastuff_h */\n" }, { "alpha_fraction": 0.4277879297733307, "alphanum_fraction": 0.4346435070037842, "avg_line_length": 19.828571319580078, "blob_id": "dcd577b12de0b854c96dbf3d694bf7f6ec7f5a12", "content_id": "f542cb1499fd9d117949f833daf2758d73bc2648", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "C++", "length_bytes": 2188, "license_type": "no_license", "max_line_length": 94, "num_lines": 105, "path": "/asyncnet/asyncnet/test.cpp", "repo_name": "olafurjohannsson/cpp-concurrency", "src_encoding": "UTF-8", "text": "\n/*\n C includes\n*/\n#include <ctype.h>\n#include <stdio.h>\n#include <stdlib.h>\n\n/*\n C++ STL includes\n*/\n#include <iostream>\n#include <thread>\n#include <chrono>\n#include <vector>\n#include <string>\n#include <functional>\n\n/*\n Local header includes\n*/\n#include \"socket.h\"\n\n\nstd::string displayOptions() {\n return std::string(\n \"1: Enter hostname\\n\" \\\n \"2: Write data\\n\" \\\n \"3: Read data\\n\" \\\n \"4: Get address information\\n\" \\\n \"\\n\"\n );\n}\n\n\n#include \"subscriber.h\"\n\nint main()\n{\n \n subscriber sub;\n \n return 0;\n /////////\n Socket s(80);\n if (s.Connect(\"visir.is\")) {\n s.Write(\"test\");\n std::cout << s.Read() << std::endl;\n }\n \n return 0;\n int32_t input, port;\n std::string hostname, address;\n bool connected = false;\n \n std::cout << \"Creating socket, please input port: \";\n std::cin >> port;\n std::cout << \"Setting up socket on port \" << port << std::endl;\n Socket sock(port);\n\n for (;;) {\n std::cout << displayOptions();\n std::cin >> input;\n std::cin.clear();\n \n \n\n switch (input) {\n case 1:\n std::cout << \"Enter hostname: \";\n std::cin >> hostname;\n \n if ((connected = sock.Connect(hostname.c_str()))) {\n std::cout << \"Successfully connected to \" << hostname << std::endl;\n }\n else { \n std::cout << \"Could not establish connection\\n\";\n }\n break;\n \n case 2:\n std::cout << \"Data: \";\n if (sock.Write(std::string(std::istreambuf_iterator<char>(std::cin), {})) > 0)\n {\n std::cout << \"Response: \" << sock.Read() << std::endl;\n }\n \n break;\n \n case 4:\n std::cout << \"Enter address name:\";\n \n std::cin >> address;\n sock.Getaddrinfo(address);\n\n\n\n break;\n\n default:\n continue;\n };\n }\n\n return 0;\n}\n" }, { "alpha_fraction": 0.45125818252563477, "alphanum_fraction": 0.4614083170890808, "avg_line_length": 28.012269973754883, "blob_id": "15afbd94412da4734a743627fc6b9d19d940db64", "content_id": "885daf43724e547aa12020b887b80231f348ee4e", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "C++", "length_bytes": 4730, "license_type": "no_license", "max_line_length": 103, "num_lines": 163, "path": "/asyncnet/asyncnet/socket.h", "repo_name": "olafurjohannsson/cpp-concurrency", "src_encoding": "UTF-8", "text": "#include <stdio.h>\n#include <errno.h>\n#include <stdlib.h>\n#include <unistd.h>\n#include <string.h>\n#include <sys/types.h>\n#include <sys/socket.h>\n#include <netinet/in.h>\n#include <arpa/inet.h>\n#include <netdb.h>\n#include <sys/ioctl.h>\n#include <iostream>\n#include <string>\n#include <zmq.hpp>\n\nusing namespace std;\n\nclass SocketConnectException: public exception\n{\n virtual const char* what() const throw()\n {\n return \"Socket could not establish connection\";\n }\n} myex;\n\nclass Socket\n{\n int32_t sock_fd;\n struct sockaddr_in servername;\n\n public:\n\n Socket(uint32_t port) \n {\n // init socket file descriptor\n sock_fd = socket(PF_INET, SOCK_STREAM, 0);\n\n // file descriptor fail\n if (sock_fd < 0) {\n return;\n }\n\n /*\n AF_INET: Address Family\n htons: Host to network short\n */\n servername.sin_family = AF_INET;\n servername.sin_port = htons(port);\n };\n\n bool Connect(const char *hostname) {\n struct hostent *hostinfo = gethostbyname(hostname);\n \n // host not found\n if (hostinfo == NULL) {\n return false;\n }\n\n servername.sin_addr = *(struct in_addr *) hostinfo->h_addr;\n\n if (0 > connect(sock_fd, (struct sockaddr *)&servername, sizeof(servername))) {\n return false;\n }\n \n return true;\n }\n\n bool Close() \n { \n close(sock_fd); \n sock_fd = 0;\n return true; \n }\n\n ssize_t Write(std::string value)\n {\n // Write to socket file descriptor\n char buff[128];\n sprintf(buff, \"test\");\n ssize_t nbytes = ::write(sock_fd, buff, 128); //value.c_str(), strlen(value.c_str()) + 1);\n \n return nbytes < 0 ? 0 : nbytes;\n }\n\n void Getaddrinfo(std::string address) {\n int status;\n struct addrinfo hints, *servinfo, *p;\n char ipstr[INET6_ADDRSTRLEN];\n\n memset(&hints, 0x00, sizeof(hints));\n hints.ai_family = AF_UNSPEC; // ipv4 or ipv6\n hints.ai_socktype = SOCK_STREAM; // tcp socket\n hints.ai_flags = AI_PASSIVE; // fill in my ip for me\n\n if ((status = getaddrinfo(address.c_str(), \"80\", &hints, &servinfo)) != 0) {\n printf(\"getaddrinfo err: %s\\n\", gai_strerror(status));\n exit(1); \n }\n // servinfo now points to a linked list of 1 or more struct addrinfos\n for (p = servinfo; p != NULL; p = p->ai_next) {\n void *addr;\n char *ipver;\n\n if (p->ai_family == AF_INET) {\n struct sockaddr_in *ipv4 = (struct sockaddr_in *)p->ai_addr;\n addr = &(ipv4->sin_addr);\n ipver = &std::string(\"IPv4\")[0];\n }\n else {\n // IPv6\n \n }\n // convert IP to str\n inet_ntop(p->ai_family, addr, ipstr, sizeof(ipstr));\n \n std::cout << \"IP: \" << ipver << \"Str: \" << ipstr << std::endl;\n }\n // ... do everything until you don't need servinfo anymore ....\n\n freeaddrinfo(servinfo); // free the linked-list\n std::cout << \"returning from Getaddrinfo()\\n\";\n exit(0);\n }\n\n const std::string Read() \n {\n size_t chunk_size = 256;\n ssize_t bytes_read;\n char buff[chunk_size];\n printf(\"starting to read\\n\");\n while ((bytes_read = ::read(this->sock_fd, &buff, chunk_size) > 0)) {\n printf(\"bytes_read: %d, buffer: %s\\n\", bytes_read, buff);\n }\n printf(\"done\\n\");\n return \"\";\n \n ssize_t nbytes = 1024;°\n\n //ioctl(sock_fd, FIONREAD, &nbytes);\n char *buffer = new char[nbytes];\n memset(buffer, 0x00, nbytes);\n \n if (sock_fd > 0) {\n nbytes = ::read(sock_fd, buffer, nbytes);\n std::cout << \"nbytes read \" << nbytes << std::endl;\n }\n else {\n std::cout << \"Socket descriptor is invalid\\n\";\n }\n\n if (nbytes < 0) {\n printf(\"no bytes read\\n\");\n }\n else if (nbytes == 0) {\n printf(\"EOF\\n\");\n }\n \n std::string strBuffer(buffer);\n delete[] buffer;\n\n return strBuffer;\n }\n};\n" }, { "alpha_fraction": 0.6236559152603149, "alphanum_fraction": 0.6487455368041992, "avg_line_length": 13.307692527770996, "blob_id": "a1d4a41aee400fa6b04b9be2e6cbb3917b72b85f", "content_id": "c64bf7158eaf164818950ffd5eaed9a2d42a1f7b", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "C++", "length_bytes": 559, "license_type": "no_license", "max_line_length": 56, "num_lines": 39, "path": "/asyncnet/asyncnet/connector.h", "repo_name": "olafurjohannsson/cpp-concurrency", "src_encoding": "UTF-8", "text": "//\n// connector.h\n// asyncnet\n//\n// Created by Ólafur Jóhannsson on 06/11/16.\n// Copyright © 2016 Ólafur Jóhannsson. All rights reserved.\n//\n\n#ifndef connector_h\n#define connector_h\n\n#include \"socket.h\"\n\n// functor\ntemplate<Func>\nbool doConnect(std::string port, Func fn)\n{\n return fn(port);\n}\n\n// virtual function\nclass IConnector\n{\n virtual bool connect(std::string port) = 0;\n};\n\nbool doConnect (std::string port, IConnector *connector)\n{\n return connector->connect(port);\n}\n\n\n// tests\nvoid test() {\n \n doConnect(\"123\", nullptr);\n}\n\n#endif /* connector_h */\n" }, { "alpha_fraction": 0.5625, "alphanum_fraction": 0.5992646813392639, "avg_line_length": 11.363636016845703, "blob_id": "3a6769115e8478c3c0358365d3837cdd640fd7d0", "content_id": "43a5b70c55eff75051d02cc264098cc0371e3ba8", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "C++", "length_bytes": 273, "license_type": "no_license", "max_line_length": 49, "num_lines": 22, "path": "/asyncnet/asyncnet/downloader.h", "repo_name": "olafurjohannsson/cpp-concurrency", "src_encoding": "UTF-8", "text": "//\n// downloader.h\n// asyncnet\n//\n// Created by Ólafur Jóhannsson on 06/11/16.\n// Copyright © 2016 Ólafur Jóhannsson. All rights reserved.\n//\n\n#ifndef downloader_h\n#define downloader_h\n\n\nclass FileDownloader\n{\npublic:\n FileDownloader()\n {\n \n }\n};\n\n#endif /* downloader_h */\n" }, { "alpha_fraction": 0.49136579036712646, "alphanum_fraction": 0.5039246678352356, "avg_line_length": 20.266666412353516, "blob_id": "e3f668e6808eef55c55292c12f65b5c7b7c99f03", "content_id": "fddc22346fa4713e06bf6fa6560230d007a955aa", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "C++", "length_bytes": 637, "license_type": "no_license", "max_line_length": 86, "num_lines": 30, "path": "/asyncnet/asyncnet/subscriber.h", "repo_name": "olafurjohannsson/cpp-concurrency", "src_encoding": "UTF-8", "text": "#include <zmq.hpp>\n#include <iostream>\n\nclass subscriber\n{\n \npublic:\n \n subscriber()\n {\n zmq::context_t context(1);\n zmq::socket_t subscriber(context, ZMQ_SUB);\n \n const char *loc = \"tcp://localhost:5556\";\n printf(\"connecting to %s\\n\", loc);\n subscriber.connect(loc);\n subscriber.setsockopt(ZMQ_SUBSCRIBE, \"\", 0);\n \n \n for (;;) {\n zmq::message_t message(5);\n subscriber.recv(&message, 0);\n \n printf(\"recv bytes %d message: \\\"%s\\\"\\n\", message.size(), message.data());\n }\n \n subscriber.close();\n }\n \n};" }, { "alpha_fraction": 0.6188898086547852, "alphanum_fraction": 0.6263462901115417, "avg_line_length": 20.945453643798828, "blob_id": "cd584403451f2a9e2ac298ce305f444ed9b4c978", "content_id": "4ff27f117a6a1f5ed7d803bb936f55a19f44313b", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 1207, "license_type": "no_license", "max_line_length": 63, "num_lines": 55, "path": "/asyncnet/subscriber.py", "repo_name": "olafurjohannsson/cpp-concurrency", "src_encoding": "UTF-8", "text": "from threading import Thread\nimport zmq, socketio, eventlet\nfrom flask import Flask, render_template\nfrom flask_socketio import send\n\nsio = socketio.Server()\napp = Flask(__name__)\n\[email protected]('/')\ndef index():\n return render_template('index.html')\n\[email protected]('connect')\ndef connect(socket_id, environment):\n print 'connect sid: %s, env: %s' % (socket_id, environment)\n\[email protected]('message')\ndef message(socket_id, data):\n print 'message sid: %s, data: %s' % (socket_id, data)\n\[email protected]('disconnect')\ndef disconnect(socket_id):\n print 'disconnect sid: %s' % socket_id\n\n\ndef init_zeromq():\n \n print 'zmq version %s' % zmq.zmq_version()\n \n context = zmq.Context()\n subscriber = context.socket(zmq.SUB)\n subscriber.connect(\"tcp://localhost:5556\")\n subscriber.setsockopt(zmq.SUBSCRIBE, \"\")\n \n while True:\n r = subscriber.recv()\n print r\n \n \n \n subscriber.close()\n context.term()\n\n\nif __name__ == '__main__':\n \n # start 0mq, it blocks so we start a thread\n t = Thread(target=init_zeromq)\n t.start()\n \n # start flask\n app = socketio.Middleware(sio, app)\n eventlet.wsgi.server(eventlet.listen(('', 8000)), app)\n \n t.join()\n" }, { "alpha_fraction": 0.5330700874328613, "alphanum_fraction": 0.5508390665054321, "avg_line_length": 20.553192138671875, "blob_id": "fc05543daddd9b1b723809ded3227b1b32585269", "content_id": "acd34e479f3139ae4e2f682ef9af148751fdc180", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "C++", "length_bytes": 1015, "license_type": "no_license", "max_line_length": 112, "num_lines": 47, "path": "/asyncnet/asyncnet/publisher.h", "repo_name": "olafurjohannsson/cpp-concurrency", "src_encoding": "UTF-8", "text": "//\n// publisher.h\n// asyncnet\n//\n// Created by Ólafur Jóhannsson on 09/11/16.\n// Copyright © 2016 Ólafur Jóhannsson. All rights reserved.\n//\n\n#ifndef publisher_h\n#define publisher_h\n#include <iostream>\n#include <zmq.hpp>\n#include <stdio.h>\n#include <stdlib.h>\n#include <time.h>\n#include <unistd.h>\n#include <thread>\n#include <string>\n#include <functional>\n#include <vector>\n#include <map>\n\nclass publisher\n{\n publisher()\n {\n zmq::context_t context (1);\n zmq::socket_t publisher (context, ZMQ_PUB);\n publisher.bind(\"tcp://*:5556\");\n \n int major, minor, patch;\n zmq_version (&major, &minor, &patch); printf (\"Current ØMQ version is %d.%d.%d\\n\", major, minor, patch);\n \n for (;;) {\n zmq::message_t message(5);\n snprintf((char *) message.data(), 5, \"%s\", \"test\");\n printf(\"sending message %s\\n\", message.data());\n publisher.send(message);\n \n sleep(1);\n }\n \n }\n};\n\n\n#endif /* publisher_h */\n" } ]
8
EzraCerpac/SatLink
https://github.com/EzraCerpac/SatLink
582f6240d2813525fba76c9a624b2336748f8043
d5da25d8f287ea25a7da6e91eed8b435ed8416f1
db7a5051df721ff0f255df605380ad95d82abbcf
refs/heads/main
2023-08-25T20:32:38.885754
2021-09-02T19:04:22
2021-09-02T19:04:22
413,519,242
0
0
MIT
2021-10-04T17:23:41
2021-09-02T19:04:25
2021-09-02T19:04:22
null
[ { "alpha_fraction": 0.5815789699554443, "alphanum_fraction": 0.6013157963752747, "avg_line_length": 28.93877601623535, "blob_id": "4542f7436b28cdec271b192e68f1efec5febb73d", "content_id": "47b7d3a9e41c682e591a70a16ea860be53d9a2d6", "detected_licenses": [ "MIT" ], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 1520, "license_type": "permissive", "max_line_length": 78, "num_lines": 49, "path": "/models/util.py", "repo_name": "EzraCerpac/SatLink", "src_encoding": "UTF-8", "text": "import numpy as np\r\nimport math\r\n\r\n\r\n# def curve_interpolation(x, interp, name_c1, c1, name_c2, c2):\r\n# c1_interp = np.interp(x, c1.index, c1)\r\n# c2_interp = np.interp(x, c2.index, c2)\r\n# delta_curve = abs(name_c1 - name_c2)\r\n# delta_y = abs(c1_interp - c2_interp)\r\n#\r\n# if c1_interp < c2_interp:\r\n# delta_val = interp - name_c1\r\n# interpolated_point = c1_interp + delta_y * (delta_val / delta_curve)\r\n# elif c1_interp > c2_interp:\r\n# delta_val = interp - name_c2\r\n# interpolated_point = c2_interp + delta_y * (delta_val / delta_curve)\r\n# else:\r\n# return c2_interp\r\n#\r\n# return interpolated_point\r\n\r\ndef truncate(number, decimals=0):\r\n \"\"\"\r\n Returns a value truncated to a specific number of decimal places.\r\n \"\"\"\r\n if not isinstance(decimals, int):\r\n raise TypeError(\"decimal places must be an integer.\")\r\n elif decimals < 0:\r\n raise ValueError(\"decimal places has to be 0 or more.\")\r\n elif decimals == 0:\r\n return math.trunc(number)\r\n\r\n factor = 10.0 ** decimals\r\n return math.trunc(number * factor) / factor\r\n\r\n\r\n\r\ndef curve_interpolation(x, interp, data):\r\n to_be_fitedx = []\r\n to_be_fitedy = []\r\n for i in data.columns:\r\n curve_interpol = np.interp(x, data.index.array, data[i].values)\r\n to_be_fitedx.append(float(i))\r\n to_be_fitedy.append(curve_interpol)\r\n\r\n intepolated_point = np.interp(interp, to_be_fitedx, to_be_fitedy)\r\n\r\n\r\n return intepolated_point\r\n\r\n\r\n" }, { "alpha_fraction": 0.5333994626998901, "alphanum_fraction": 0.5569356083869934, "avg_line_length": 43.628570556640625, "blob_id": "1cafdb7e6db7e238acdf8520cd68ca06e4e65c9f", "content_id": "840df9c30ce42e548f6b12c25b49b3efb609eca6", "detected_licenses": [ "MIT" ], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 19163, "license_type": "permissive", "max_line_length": 128, "num_lines": 420, "path": "/sat.py", "repo_name": "EzraCerpac/SatLink", "src_encoding": "UTF-8", "text": "import sys, warnings\r\nimport numpy as np\r\nimport pandas as pd\r\nimport itur\r\nfrom GrStat import GroundStation, Reception\r\nfrom models.FsAtt import FreeSpaceAtt as FsAtt\r\nimport astropy.units as u\r\nfrom models.util import truncate\r\n\r\n# class intended for calculation of parameters related to the geostationary satellite and his link to the ground station\r\n# can set the ground_station class and reception class to it and run all the functions\r\n# please be carefully with the p value when running availability calculations\r\n\r\nclass Satellite:\r\n # all lat/long parameters are in degrees when input or output variables\r\n\r\n def __init__(self, sat_long, freq, eirp_max=0, h_sat=35786, b_transp=36, b_util=36, back_off=0, contorno=0,\r\n modulation='', roll_off=None, fec=''):\r\n self.sat_long = np.radians(sat_long) # satellite longitude\r\n self.freq = freq # frequency, in GHz\r\n self.eirp_max = eirp_max # sattelite's eirp, in dBW\r\n self.h_sat = h_sat # satellite's height\r\n self.b_transp = b_transp # transponder's band, in GHz\r\n self.b_util = b_util # transponder's band that the carrier is using, in GHz\r\n self.back_off = back_off # transponder's back off ????????????????\r\n self.contorno = contorno # ver que diabos é isso aqui ????????????????\r\n # self.tech = tech # technology (DVB-S, S2, S2X)\r\n self.modulation = modulation # modulation name\r\n self.fec = fec # FEC\r\n self.roll_off = roll_off\r\n self.eirp = eirp_max - back_off - contorno + 10 * np.log10(b_util / b_transp)\r\n\r\n # not initialized parameters that will be calculated in the atmospheric attenuation function\r\n self.a_g = None # gaseous attenuation\r\n self.a_c = None # could attenuation\r\n self.a_r = None # rain attenuation\r\n self.a_s = None # citilation or tropospheric attenuation\r\n self.a_t = None # total atmospheric attenuation\r\n self.a_fs = None # free space attenuation\r\n self.a_x = None # cross-polar attenuation\r\n self.a_co = None # co-polar attenuation\r\n self.a_tot = None # total attenuation (atmospheric + free space)\r\n self.p = None # exceed percentage - just to store the attenuation inm reference to a p value\r\n\r\n # other parameters calculated in this class\r\n self.cross_pol_discrimination = None # attenuation due to depolarization effect\r\n self.power_flux_density = None # power flux density at earth station (W/m^2)\r\n self.antenna_noise_rain = None # antenna noise under rain conditions\r\n self.total_noise_temp = None # system's noise temperature (K)\r\n self.figure_of_merit = None # figure of merit - G/T\r\n self.c_over_n0 = None # calculated C/N\r\n self.snr = None # calculated SNR\r\n self.snr_threshold = None # SNR threshold for a specific\r\n self.availability = None # availability for a specific SNR threshold\r\n self.symbol_rate = None # symbolrate based on the bandwidth and roll off factor\r\n self.bitrate = None # bitrate based on the bandwidth and inforate efficiency\r\n\r\n # ground station and reception objects that can be set in the satellite class\r\n self.grstation = None # ground station object\r\n self.reception = None # reception object\r\n\r\n def set_grstation(self, grstation: GroundStation): # works as a way to used some variables from grstation\r\n self.grstation = grstation\r\n pass\r\n\r\n def set_reception(self, reception: Reception): # set the link's reception system\r\n reception.set_parameters(self.freq, self.get_elevation()) # parameters used in some functions in Reception\r\n self.reception = reception\r\n\r\n pass\r\n\r\n def get_elevation(self): # returns the elevation angle between satellite and ground station\r\n if self.grstation is None:\r\n sys.exit(\r\n 'Need to associate a ground station to a satellite first. Try satellite.set_grstation(GroundStation)!!!')\r\n\r\n site_lat = np.radians(self.grstation.site_lat)\r\n site_long = np.radians(self.grstation.site_long)\r\n E = np.arctan((np.cos(self.sat_long - site_long) * np.cos(site_lat) - 0.15116) /\r\n (np.sqrt(1 - (np.cos(self.sat_long - site_long) ** 2) * (np.cos(site_lat) ** 2))))\r\n\r\n return np.degrees(E)\r\n\r\n def get_azimuth(self): # returns the azimuth angle between satellite and ground station\r\n if self.grstation is None:\r\n sys.exit(\r\n 'Need to associate a ground station to a satellite first. Try satellite.set_reception(reception)!!!')\r\n\r\n site_lat = np.radians(self.grstation.site_lat)\r\n site_long = np.radians(self.grstation.site_long)\r\n azimuth = np.pi + np.arctan2(np.tan(self.sat_long - site_long), np.sin(site_lat))\r\n\r\n return np.degrees(azimuth)\r\n\r\n def get_distance(self): # returns the distance (km) between satellite and ground station\r\n if self.grstation is None:\r\n sys.exit(\r\n 'Need to associate a ground station to a satellite first. Try satellite.set_reception(reception)!!!')\r\n\r\n e = np.radians(self.get_elevation())\r\n earth_rad = self.grstation.get_earth_radius()\r\n dist = np.sqrt(((earth_rad + self.h_sat) ** 2) - ((earth_rad * np.cos(e)) ** 2)) - earth_rad * np.sin(e)\r\n return dist\r\n\r\n def get_reception_threshold(self): # returns the threshold for a given modulation scheme (modcod file)\r\n if self.modulation == '' or self.fec == '':\r\n sys.exit(\r\n 'You need to create a satellite class with a technology, modulation and FEC to use this function!!!')\r\n elif self.snr_threshold is not None:\r\n return self.snr_threshold\r\n\r\n data = pd.read_csv('models\\\\Modulation_dB.csv', sep=';')\r\n # line = data.loc[(data.Tech == self.tech) & (data.Modulation == self.modulation) & (data.FEC == self.fec)]\r\n line = data.loc[(data.Modulation == self.modulation) & (data.FEC == self.fec)]\r\n self.snr_threshold = line['C_over_N'].values[0]\r\n return self.snr_threshold\r\n\r\n def get_symbol_rate(self):\r\n if self.symbol_rate is not None:\r\n return self.symbol_rate\r\n if self.roll_off is None:\r\n sys.exit('You must define the roll off factor to calculate the symbol rate!!!')\r\n self.symbol_rate = self.b_util * 10 ** 6 / self.roll_off\r\n return self.symbol_rate\r\n\r\n def get_bitrate(self):\r\n if self.bitrate is not None:\r\n return self.bitrate\r\n if self.modulation == '' or self.fec == '':\r\n sys.exit(\r\n 'You need to create a satellite class with a technology, modulation and FEC to use this function!!!')\r\n data = pd.read_csv('models\\\\Modulation_dB.csv', sep=';')\r\n line = data.loc[(data.Modulation == self.modulation) & (data.FEC == self.fec)]\r\n self.bitrate = self.b_util * line['Inforate efficiency bps_Hz'].values[0]\r\n return self.bitrate\r\n\r\n def get_link_attenuation(self, p=0.001, method='approx'):\r\n if self.grstation is None:\r\n sys.exit(\r\n 'Need to associate a ground station to a satellite first. Try satellite.set_reception(reception)!!!')\r\n if self.reception is None:\r\n sys.exit('Need to associate a reception to a satellite first. Try satellite.set_reception(reception)!!!')\r\n if self.p is None:\r\n self.p = 0.001\r\n p = 0.001\r\n if self.a_tot is not None and p == self.p:\r\n return self.a_fs, self.reception.get_depoint_loss(), self.a_g, self.a_c, self.a_r, self.a_s, self.a_t, self.a_tot\r\n else:\r\n freq = self.freq * u.GHz\r\n e = self.get_elevation()\r\n diam = self.reception.ant_size * u.m\r\n a_fs = FsAtt(self.get_distance(), self.freq)\r\n a_g, a_c, a_r, a_s, a_t = itur.atmospheric_attenuation_slant_path(\r\n self.grstation.site_lat, self.grstation.site_long, freq, e, p, diam, return_contributions=True, mode=method)\r\n a_tot = a_fs + self.reception.get_depoint_loss() + a_t.value\r\n\r\n self.a_g = a_g\r\n self.a_c = a_c\r\n self.a_r = a_r\r\n self.a_s = a_s\r\n self.a_t = a_t\r\n self.a_fs = a_fs\r\n self.a_tot = a_tot\r\n self.p = p\r\n\r\n # erasing the dependent variables that will use link atten. for different p value\r\n self.power_flux_density = None\r\n self.antenna_noise_rain = None\r\n self.total_noise_temp = None\r\n self.figure_of_merit = None\r\n self.c_over_n0 = None\r\n self.snr = None\r\n self.cross_pol_discrimination = None\r\n\r\n return a_fs, self.reception.get_depoint_loss(), a_g, a_c, a_r, a_s, a_t, a_tot\r\n\r\n def get_total_attenuation(self, p = None):\r\n self.a_fs = FsAtt(self.get_distance(), self.freq)\r\n xpd = self.get_cross_pol_discrimination()\r\n self.a_x = 10 * np.log10(1 + 10 ** (0.1 * xpd))\r\n self.a_co = 10 * np.log10(1 + 10 ** (0.1 * xpd))\r\n\r\n self.a_tot = self.a_fs + self.a_x + self.reception.get_depoint_loss() + self.a_t\r\n return self.a_tot, self.a_t, self.reception.get_depoint_loss(),\r\n\r\n def get_cross_pol_discrimination(self, p=None):\r\n if self.cross_pol_discrimination is not None and p == self.p:\r\n return self.cross_pol_discrimination\r\n\r\n if p is not None:\r\n _, _, _, _, a_r, _, _, _ = self.get_link_attenuation(p)\r\n else:\r\n _, _, _, _, a_r, _, _, _ = self.get_link_attenuation(self.p)\r\n\r\n a_r = a_r.value\r\n if self.freq < 8: # frequency in Ghz\r\n f = 10 # dummy frequency used to convert XPD calculations to frequencies below 8 GHz\r\n if self.freq < 4:\r\n warnings.warn(' XPD calculations are suited for frequencies above 4 GHz')\r\n else:\r\n f = self.freq\r\n if self.freq > 35:\r\n warnings.warn(' XPD calculations are suited for frequencies below 35 GHz')\r\n\r\n cf = 20 * np.log10(f)\r\n\r\n if 8 <= f <= 20:\r\n v = 12.8 * (f ** 0.19)\r\n else:\r\n v = 22.6\r\n\r\n ca = v * np.log10(a_r)\r\n\r\n tau = 45 # NOT FORGET TO MAKE THIS CHOOSABLE !!!!\r\n c_tau = -10 * np.log10(1 - 0.484 * (1 + np.cos(4 * np.radians(tau))))\r\n c_teta = -40 * np.log10(np.cos(np.radians(self.get_elevation())))\r\n\r\n # if 0.001 <= self.p < 0.01:\r\n # sigma = 15\r\n # elif 0.01 <= self.p < 0.1:\r\n # sigma = 10\r\n # elif 0.1 <= self.p < 1:\r\n # sigma = 5\r\n # else:\r\n # sigma = 0\r\n\r\n sigma = np.interp(self.p, [0.001, 0.01, 0.1, 1], [15, 10, 5, 0]) # interpolating the standard deviation of\r\n # raindrop inclination angle distribution from given values\r\n\r\n c_sigma = 0.0052 * sigma\r\n\r\n xpd_rain = cf - ca + c_tau + c_teta + c_sigma\r\n c_ice = xpd_rain * (0.3 + 0.1 * np.log10(self.p)) / 2\r\n xpd = xpd_rain - c_ice\r\n\r\n tau2 = tau #MAKE THIS ALSO CHOOSABLE !!!\r\n\r\n if self.freq < 8:\r\n xpd = (xpd_rain - 20 * np.log((self.freq * (1 + 0.484 * np.cos(4 * np.radians(tau2)))) ** 0.5) /\r\n (f * (1 - 0.484 * (1 + np.cos(4 * np.radians(tau)))) ** 0.5)) # RECHECK THIS EQUATION !!!\r\n\r\n self.a_x = 10 * np.log10(1 + 10 ** (0.1 * xpd))\r\n self.a_co = 10 * np.log10(1 + 10 ** (-0.1 * xpd))\r\n\r\n self.cross_pol_discrimination = xpd\r\n return self.cross_pol_discrimination, self.a_co, self.a_x\r\n\r\n def get_power_flux_density(self, p=None):\r\n if self.grstation is None:\r\n sys.exit('Need to associate a grd. station to a satellite first. Try Satellite.set_grstation(Station)!!!')\r\n elif self.reception is None:\r\n sys.exit('Need to associate a reception to a satellite first. Try Satellite.set_reception(Reception)!!!')\r\n elif self.power_flux_density is not None and p == self.p:\r\n return self.power_flux_density\r\n\r\n if p is not None:\r\n _, _, _, _, _, _, a_t, _ = self.get_link_attenuation(p)\r\n else:\r\n _, _, _, _, _, _, a_t, _ = self.get_link_attenuation(self.p)\r\n\r\n a_t = a_t.value\r\n phi = (10 ** ((self.eirp - a_t)/10)) / (4 * np.pi * ((self.get_distance() * 1000 ) ** 2))\r\n\r\n self.power_flux_density = 10 * np.log10(phi)\r\n\r\n return self.power_flux_density\r\n\r\n def get_antenna_noise_rain(self, p=None):\r\n if self.reception is None:\r\n sys.exit('Need to associate a reception to a satellite first. Try satellite.set_reception(reception)!!!')\r\n elif self.antenna_noise_rain is not None and self.p == p:\r\n return self.antenna_noise_rain\r\n\r\n if p is not None:\r\n _, _, _, _, _, _, a_t, _ = self.get_link_attenuation(p)\r\n else:\r\n _, _, _, _, _, _, a_t, _ = self.get_link_attenuation(self.p)\r\n Tm = 275\r\n a_t = 10 ** (a_t.value/10)\r\n self.antenna_noise_rain = self.reception.get_brightness_temp()/a_t + (Tm * (1-1/a_t)) + self.reception.get_ground_temp()\r\n return self.antenna_noise_rain\r\n\r\n def get_total_noise_temp(self, p=None):\r\n if self.freq is None or self.reception.e is None:\r\n sys.exit('Need to associate a reception to a satellite first. Try satellite.set_reception(reception)!!!')\r\n elif self.total_noise_temp is not None and p == self.p:\r\n return self.total_noise_temp\r\n if p is not None:\r\n _, _, _, _, _, _, a_t, _ = self.get_link_attenuation(p)\r\n\r\n # self.total_noise_temp = (self.get_antenna_noise_rain() / (10 ** (self.reception.feeder_loss / 10))\r\n # + self.reception.feeder_noise_temp * (\r\n # 1 - 1 / (10 ** (self.reception.feeder_loss / 10))) + self.reception.lnbf_noise_temp)\r\n\r\n total_loss = self.reception.coupling_loss + self.reception.cable_loss\r\n loss = 10 ** (total_loss/10)\r\n t_loss = 290 * (loss - 1)\r\n self.total_noise_temp = self.get_antenna_noise_rain() +\\\r\n (self.reception.lnb_noise_temp + t_loss/(10 ** (self.reception.lnb_gain/10)))\r\n\r\n\r\n return self.total_noise_temp\r\n\r\n def get_figure_of_merit(self, p=None): # recommendation ITU BO790\r\n if self.figure_of_merit is not None:\r\n return self.figure_of_merit\r\n elif self.figure_of_merit is not None and p == self.p:\r\n return self.figure_of_merit\r\n if p is not None:\r\n _, _, _, _, _, _, _, _ = self.get_link_attenuation(p)\r\n\r\n # self.figure_of_merit = self.reception.get_antenna_gain() - \\\r\n # self.reception.get_depoint_loss() - self.reception.polarization_loss - \\\r\n # - self.reception.coupling_loss - self.reception.cable_loss - \\\r\n # 10 * np.log10(self.get_total_noise_temp())\r\n\r\n alfa = 10 ** ((self.reception.coupling_loss + self.reception.cable_loss)/10)\r\n beta = 10 ** (self.reception.get_depoint_loss()/10)\r\n gt = 10 ** (self.reception.get_antenna_gain()/10)\r\n ta = self.get_antenna_noise_rain()\r\n t0 = 290\r\n n = self.get_total_noise_temp()/t0 + 1\r\n\r\n self.figure_of_merit = 10* np.log10((alfa * beta * gt) / (alfa * ta + (1 - alfa) * t0 + (n - 1) * t0))\r\n\r\n return self.figure_of_merit\r\n\r\n def get_c_over_n0(self, p=None): # returns the C/N for the satellite link\r\n if self.reception is None:\r\n sys.exit('Need to associate a reception to a satellite first. Try satellite.set_reception(reception)!!!')\r\n if self.eirp_max == 0:\r\n sys.exit('Please set the satellite\\'s E.I.R.P before running this!!!')\r\n if self.c_over_n0 is not None and self.p == p:\r\n return self.c_over_n0\r\n\r\n if p is not None:\r\n _, _, _, _, _, _, _, a_tot = self.get_link_attenuation(p)\r\n else:\r\n _, _, _, _, _, _, _, a_tot = self.get_link_attenuation(self.p)\r\n\r\n figure_of_merit = self.get_figure_of_merit()\r\n self.c_over_n0 = self.eirp - a_tot + figure_of_merit + 228.6\r\n\r\n self.snr = None # erasing the dependent variables that will use C/N0 for different p value\r\n return self.c_over_n0\r\n\r\n def get_snr(self, p=None):\r\n if p == self.p and self.snr is not None:\r\n return self.snr\r\n if p is not None:\r\n _, _, _, _, _, _, _, _ = self.get_link_attenuation(p)\r\n else:\r\n _, _, _, _, _, _, _, _ = self.get_link_attenuation(self.p)\r\n\r\n self.snr = self.get_c_over_n0(p) - 10 * np.log10(self.b_util * (10 ** 6))\r\n\r\n return self.snr\r\n\r\n # tris function is just a simple way to iterate over a convex optimization problem\r\n # its a option besides the recommended ITU-R BO.1696 methodology\r\n def get_availability(self, margin=0, relaxation=0.1):\r\n target = self.get_reception_threshold() + margin\r\n p = 0.0012\r\n speed = 0.000005\r\n speed_old = 0\r\n delta_old = 1000000000\r\n p_old = 10000000\r\n delta = self.get_snr(0.001) - target\r\n\r\n if delta >= 0:\r\n return 99.999\r\n\r\n for i in range(1, 5000):\r\n delta = abs(self.get_snr(p) - target)\r\n if delta < relaxation:\r\n self.availability = 100 - p\r\n return truncate(self.availability, 3)\r\n\r\n if delta_old < delta:\r\n if (abs(p_old - p) < 0.001) and (speed_old * speed < 1):\r\n self.availability = 100 - p\r\n return truncate(self.availability, 3)\r\n\r\n speed_old = speed\r\n speed = -1 * speed / 10\r\n p_old = p\r\n p += speed\r\n else:\r\n speed_old = speed\r\n speed = speed * 1.5\r\n p_old = p\r\n p += speed\r\n\r\n if p < 0.001:\r\n p_old = 100\r\n p = 0.001 + np.random.choice(np.arange(0.001, 0.002, 0.000005))\r\n speed_old = 1\r\n speed = 0.000005\r\n delta = abs(self.get_snr(p) - target)\r\n if p > 50:\r\n p_old = 100\r\n p = 50 - np.random.choice(np.arange(0.01, 2, 0.01))\r\n speed_old = 1\r\n speed = 0.000005\r\n\r\n # if speed > 2:\r\n # speed = 2\r\n # speed_old = 1\r\n\r\n delta_old = delta\r\n\r\n # if i % 500 == 0:\r\n # relaxation += 0.1\r\n\r\n sys.exit(\r\n 'Can\\'t reach the required SNR. You can change the modulation settings or the required snr relaxation!!!')\r\n\r\n def get_wm_availability(self): # get worst month availability - via ITU recommendation P.841-4\r\n if self.availability != None:\r\n self.wm_availability = 100 - (2.84 * (100 - self.availability) ** 0.87)" }, { "alpha_fraction": 0.47333332896232605, "alphanum_fraction": 0.5866666436195374, "avg_line_length": 19.428571701049805, "blob_id": "4a0be79207293ca190160bf13cd72816b4b41d30", "content_id": "fd514a6546fafeee9b04ca5a033746980ee8ac87", "detected_licenses": [ "MIT" ], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 150, "license_type": "permissive", "max_line_length": 54, "num_lines": 7, "path": "/models/FsAtt.py", "repo_name": "EzraCerpac/SatLink", "src_encoding": "UTF-8", "text": "from numpy import log10\r\n\r\ndef FreeSpaceAtt(d, f): #d em km e f em MHz\r\n\r\n AttFs = 32.4 + 20 * log10(d) + 20 * log10(f* 1000)\r\n\r\n return AttFs\r\n" }, { "alpha_fraction": 0.6628674268722534, "alphanum_fraction": 0.6784642934799194, "avg_line_length": 40.177215576171875, "blob_id": "9b5624af6b411c29016d022babbf0aa5dd115f5f", "content_id": "1693ba58e55821f04f89a4d5797043d2c8f0b609", "detected_licenses": [ "MIT" ], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 3334, "license_type": "permissive", "max_line_length": 112, "num_lines": 79, "path": "/single_point_example.py", "repo_name": "EzraCerpac/SatLink", "src_encoding": "UTF-8", "text": "from GrStat import GroundStation, Reception\r\nfrom sat import Satellite\r\nimport numpy as np\r\n\r\n# ground station parameters\r\nsite_lat = -3.7 # [decimal degrees]\r\nsite_long = -45.9 # [decimal degrees]\r\nstation = GroundStation(site_lat, site_long)\r\n\r\n# satellite parameters\r\nsat_long = -70 # [decimal degrees]\r\nfreq = 15 # [Ghz]\r\neirp = 54 # [dBW]\r\nhsat = 35800 # satellite's height [km]\r\ntau = 90 # H=0, V = 90, circ = 45\r\nb_transponder = 36 # transponder bandwidth [MHz]\r\nb_util = 9 # effective used bandwidth [MHz]\r\nbackoff = 0 # not used for now!\r\ncontour = 0 # not used for now!\r\nrolloff = 0.2 # roll-off factor (raised cosine filter)\r\nmod = '8PSK' # modulation (from modcod file)\r\nfec = '120/180' # FEC (from modcod file)\r\n\r\n# creating the satellite object\r\nsat = Satellite(sat_long, freq, eirp, hsat, b_transponder, b_util, backoff, contour, mod, rolloff, fec)\r\n\r\nsat.set_grstation(station) # relating the ground station object to a satellite one\r\n\r\n# reception parameters\r\nant_size = 1.2 # reception antenna diameter [m]\r\nant_eff = 0.6 # reception antenna efficiency\r\ncoupling_loss = 0 # [dB]\r\npolarization_loss = 3 # [dB]\r\nlnb_gain = 55 # [dB]\r\nlnb_noise_temp = 20 # [dB]\r\ncable_loss = 4 # [dB]\r\nmax_depoint = 0.1 # maximum depointing angle [degrees]\r\n\r\n# creating a reception object\r\nreception = Reception(ant_size, ant_eff, coupling_loss, polarization_loss, lnb_gain, lnb_noise_temp, cable_loss,\r\n max_depoint)\r\n\r\nsat.set_reception(reception) # relating the ground station object to a satellite one\r\n\r\np = np.random.rand() # creating a random percentage for the calculations\r\nprint('choosen p (%): ', p)\r\n\r\na_fs, a_dep, a_g, a_c, a_r, a_s, a_t, a_tot = sat.get_link_attenuation(p)\r\n\r\n# displaying some reception parameters/variables\r\nprint('reception SNR threshold ',sat.get_reception_threshold())\r\n\r\nprint('coordinates\\' earth\\'s radius: ', sat.grstation.get_earth_radius(), ' km')\r\nprint('elevation angle: ', sat.get_elevation(), ' degrees')\r\nprint('link distance: ', sat.get_distance(), ' km')\r\nprint('figure of merit: ', sat.get_figure_of_merit())\r\nprint('brightness temperature of the ground: ', sat.reception.get_ground_temp(), ' K')\r\nprint('sky brightness temperature', sat.reception.get_brightness_temp(), ' K')\r\nprint('antenna noise temperature: ', sat.reception.get_antenna_noise_temp(), ' K')\r\nprint('total noise temperature: ', sat.get_total_noise_temp(), ' K')\r\nprint('Rx antenna gain: ', sat.reception.get_antenna_gain(), ' dBi')\r\nprint('Rx antenna half power beamwidth: ', sat.reception.get_beamwidth(), ' degrees')\r\n\r\n# displaying some link budget calculation results\r\n\r\nprint(\"gaseous attenuation: \", a_g, ' dB')\r\nprint(\"cloud attenuation: \", a_c, ' dB')\r\nprint(\"rain attenuation: \", a_r, ' dB')\r\nprint(\"scintillation attenuation: \", a_s, ' dB')\r\nprint(\"total atmospheric attenuation: \", a_t, ' dB')\r\nprint('free-space attenuation: ', a_fs, ' dB')\r\nprint('depointing loss: ', a_dep, ' dB')\r\nprint('atmospheric + free-space attenuation: ', a_tot, ' dB')\r\nprint('link C/N0: ', sat.get_c_over_n0(p), ' dB-Hz')\r\nprint('link SNR: ', sat.get_snr(p), ' dB')\r\n\r\n# availability example calculation (this does not uses the variable p)\r\n# actually, this changes p to achieve the target the modulation target SNR\r\nprint('example link availability', sat.get_availability())\r\n\r\n" }, { "alpha_fraction": 0.7366666793823242, "alphanum_fraction": 0.7516666650772095, "avg_line_length": 38.13333511352539, "blob_id": "6fb29b1909fb8a5fabfa3006aead797595ea0ce4", "content_id": "fb121de6a0c73d9c9d943bec734afe54829e9ead", "detected_licenses": [ "MIT" ], "is_generated": false, "is_vendor": false, "language": "Markdown", "length_bytes": 600, "license_type": "permissive", "max_line_length": 128, "num_lines": 15, "path": "/mkdocs/docs/contact.md", "repo_name": "EzraCerpac/SatLink", "src_encoding": "UTF-8", "text": "# Contacts\r\n\r\nAny new functionalities and improvements are welcome! Please report bugs as well. \r\n\r\nWant to collaborate? Please fell free to contact us!\r\n\r\nGithub: [https://github.com/cfragoas/Satlink](https://github.com/cfragoas/Satlink)\r\n\r\ne-mail: [[email protected]](mailto:[email protected]?subject=[GitHub]%20SatLink)\r\n\r\n[Linkedin](https://br.linkedin.com/in/christian-rodrigues-177a5024)\r\n\r\nTwitter: [https://twitter.com/cfragoas](https://twitter.com/cfragoas) (I only tweet in Portuguese, but fell free to message me!)\r\n\r\nPlease mind that Satlink is a side project and bugfixes and updates take time to implement." }, { "alpha_fraction": 0.5712549090385437, "alphanum_fraction": 0.5907019376754761, "avg_line_length": 32.62105178833008, "blob_id": "d47e8292a6f93813cd68defccb40d204e1d1abf2", "content_id": "20a057eec98bca2076618c0704f7f78b3f714c4d", "detected_licenses": [ "MIT" ], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 3292, "license_type": "permissive", "max_line_length": 144, "num_lines": 95, "path": "/multi_point_example.py", "repo_name": "EzraCerpac/SatLink", "src_encoding": "UTF-8", "text": "import pandas as pd\r\nimport numpy as np\r\nfrom sat import Satellite\r\nfrom GrStat import GroundStation, Reception\r\nimport multiprocessing\r\nimport tqdm\r\nimport os\r\nimport datetime\r\n\r\n# this is a code example and very good approximation of the multi-point calculation used in link_performance.py\r\n\r\n\r\ndef point_availability(args): # function loop - return the availability to a given Lat/Long\r\n point = args[0]\r\n sat = args[1]\r\n reception = args[2]\r\n lat = point['Lat']\r\n long = point['Long']\r\n station = GroundStation(lat, long)\r\n sat.set_grstation(station)\r\n sat.set_reception(reception)\r\n point['availability'] = sat.get_availability()\r\n return point\r\n\r\n\r\nif __name__ == '__main__':\r\n\r\n # reading the input table\r\n location = 'input examples\\\\'\r\n file = 'list'\r\n point_list = pd.read_csv(location + file + '.csv', sep=';', encoding='latin1')\r\n point_list['availability'] = np.nan # creating an empty results column\r\n\r\n ##############################\r\n ### satellite parameters ###\r\n ##############################\r\n\r\n sat_long = -70 # [decimal degrees]\r\n freq = 18 # [Ghz]\r\n eirp = 54 # [dBW]\r\n hsat = 35800 # satellite's height [km]\r\n tau = 90 # H=0, V = 90, circ = 45\r\n b_transponder = 36 # transponder bandwidth [MHz]\r\n b_util = 9 # effective used bandwidth [MHz]\r\n backoff = 0 # not used for now!\r\n contour = 0 # not used for now!\r\n mod = '8PSK' # modulation (from modcod file)\r\n fec = '120/180' # FEC (from modcod file)\r\n rolloff = 0.2 # roll-off factor (raised cosine filter)\r\n\r\n # creating the satellite object\r\n sat = Satellite(sat_long, freq, eirp, hsat, b_transponder, b_util, backoff, contour, mod, rolloff, fec)\r\n\r\n ##############################\r\n ### reception parameters ###\r\n ##############################\r\n ant_size = 1.2 # reception antenna diameter [m]\r\n ant_eff = 0.6 # reception antenna efficiency\r\n coupling_loss = 0 # [dB]\r\n polarization_loss = 3 # [dB]\r\n lnb_gain = 55 # [dB]\r\n lnb_noise_temp = 20 # temperatura de ruído do LNBF\r\n cable_loss = 4 # [dB]\r\n max_depoint = 0.1 # maximum depointing angle [degrees]\r\n\r\n # creating a reception object\r\n reception = Reception(ant_size, ant_eff, coupling_loss, polarization_loss, lnb_gain, lnb_noise_temp, cable_loss,\r\n max_depoint)\r\n\r\n cores = multiprocessing.cpu_count() - 2\r\n\r\n p = multiprocessing.Pool(processes=cores)\r\n\r\n # calculation loop\r\n\r\n data = list(\r\n tqdm.tqdm(p.imap_unordered(point_availability, [(city, sat, reception) for index, city in point_list.iterrows()]),\r\n total=len(point_list)))\r\n p.close()\r\n\r\n point_list.drop(point_list.index, inplace=True)\r\n point_list = point_list.append(data, ignore_index=True)\r\n point_list['unavailability time'] = round(((100 - point_list['availability'])/100) * 525600, 0) # calculating the unavailability in minutes\r\n\r\n\r\n # saving the results into a csv file\r\n\r\n path = 'results'\r\n if not os.path.exists(path):\r\n os.makedirs(path)\r\n\r\n point_list.to_csv(path + '\\\\' + 'results ' + datetime.datetime.now().strftime('%y-%m-%d_%H-%M-%S') + '.csv', sep=';',\r\n encoding='latin1')\r\n\r\n print('Complete!!!')\r\n\r\n" }, { "alpha_fraction": 0.5763111114501953, "alphanum_fraction": 0.5966411232948303, "avg_line_length": 31.623762130737305, "blob_id": "4decac54183e68a485488372d2c360efb59da661", "content_id": "389f2d4bbb832b76154ae974355c52c743279be8", "detected_licenses": [ "MIT" ], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 3394, "license_type": "permissive", "max_line_length": 118, "num_lines": 101, "path": "/example_multi_point_ant_size.py", "repo_name": "EzraCerpac/SatLink", "src_encoding": "UTF-8", "text": "from GrStat import GroundStation, Reception\r\nfrom sat import Satellite\r\nimport numpy as np\r\nimport multiprocessing\r\nimport pandas as pd\r\nimport tqdm\r\nimport time\r\n\r\n\r\ndef point_ant_size(args): # function loop - return the availability to a given Lat/Long\r\n min_ant_size = 0.5\r\n max_ant_size = 10\r\n step_ant_size = 0.2\r\n target_availability = 99.97\r\n\r\n point = args[0]\r\n sat = args[1]\r\n reception = args[2]\r\n lat = point['Lat']\r\n long = point['Long']\r\n station = GroundStation(lat, long)\r\n sat.set_grstation(station)\r\n sat.set_reception(reception)\r\n\r\n ant_size_vector = np.arange(min_ant_size, max_ant_size, step_ant_size)\r\n for ant_size in ant_size_vector:\r\n sat.reception.ant_size = ant_size\r\n # print(sat.get_availability())\r\n if sat.get_availability() >= target_availability:\r\n # print('ant_size ', ant_size)\r\n sat.reception.ant_size = ant_size - 0.1\r\n if sat.get_availability() >= target_availability:\r\n point['ant size'] = ant_size - 0.1\r\n else:\r\n point['ant size'] = ant_size\r\n break\r\n return point\r\n\r\n\r\nif __name__ == '__main__':\r\n\r\n # reading the input table\r\n location = 'input examples\\\\'\r\n file = 'list'\r\n point_list = pd.read_csv(location + file + '.csv', sep=';', encoding='latin1')\r\n point_list['ant size'] = np.nan # creating an empty results column\r\n\r\n # ground station parameters\r\n site_lat = -3.7 # [decimal degrees]\r\n site_long = -45.9 # [decimal degrees]\r\n station = GroundStation(site_lat, site_long)\r\n\r\n # satellite parameters\r\n sat_long = -70 # [decimal degrees]\r\n freq = 12 # [Ghz]\r\n eirp = 50 # [dBW]\r\n hsat = 35800 # satellite's height [km]\r\n tau = 90 # H=0, V = 90, circ = 45\r\n b_transponder = 36 # transponder bandwidth [MHz]\r\n b_util = 9 # effective used bandwidth [MHz]\r\n backoff = 0 # not used for now!\r\n contour = 0 # not used for now!\r\n rolloff = 0.2 # roll-off factor (raised cosine filter)\r\n mod = '8PSK' # modulation (from modcod file)\r\n fec = '120/180' # FEC (from modcod file)\r\n\r\n # creating the satellite object\r\n sat = Satellite(sat_long, freq, eirp, hsat, b_transponder, b_util, backoff, contour, mod, rolloff, fec)\r\n\r\n # reception parameters\r\n ant_size = 1.2 # reception antenna diameter [m]\r\n ant_eff = 0.6 # reception antenna efficiency\r\n coupling_loss = 0 # [dB]\r\n polarization_loss = 3 # [dB]\r\n lnb_gain = 55 # [dB]\r\n lnb_noise_temp = 20 # [dB]\r\n cable_loss = 4 # [dB]\r\n max_depoint = 0.1 # maximum depointing angle [degrees]\r\n\r\n # creating a reception object\r\n reception = Reception(ant_size, ant_eff, coupling_loss, polarization_loss, lnb_gain, lnb_noise_temp, cable_loss,\r\n max_depoint)\r\n\r\n cores = multiprocessing.cpu_count() - 2\r\n\r\n p = multiprocessing.Pool(processes=cores)\r\n\r\n # calculation loop\r\n start_time = time.time()\r\n\r\n data = list(\r\n tqdm.tqdm(p.imap_unordered(point_ant_size, [(city, sat, reception) for index, city in point_list.iterrows()]),\r\n total=len(point_list)))\r\n p.close()\r\n\r\n point_list.drop(point_list.index, inplace=True)\r\n point_list = point_list.append(data, ignore_index=True)\r\n\r\n print(\"--- %s seconds ---\" % (time.time() - start_time))\r\n\r\n print(point_list)" }, { "alpha_fraction": 0.5815232396125793, "alphanum_fraction": 0.5890843868255615, "avg_line_length": 34.73737335205078, "blob_id": "6138f1c214983edf545ed3820dce29163150d12a", "content_id": "5237cf1fdafafc7f3df3d20bbeac6dd2091828d7", "detected_licenses": [ "MIT" ], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 7286, "license_type": "permissive", "max_line_length": 130, "num_lines": 198, "path": "/antenna_size.py", "repo_name": "EzraCerpac/SatLink", "src_encoding": "UTF-8", "text": "from GrStat import GroundStation, Reception\r\nfrom sat import Satellite\r\nfrom pathos.pools import ParallelPool\r\nfrom scipy import interpolate\r\nimport pandas as pd\r\nimport numpy as np\r\nimport pickle\r\nimport tqdm\r\nimport datetime\r\nimport sys, os\r\n\r\n\r\n# this file contains the functions used to estimate antenna sizes and display the results in the interface\r\n\r\n\r\n# STILL NEED TO CREATE A HEADER IN THE CSV OUTPUT (mp_mp_ant_size)\r\n\r\ndef loop_graph_ant_size(args):\r\n sat = args[0]\r\n margin = args[1]\r\n snr_relaxation = args[2]\r\n ant_size = args[3]\r\n\r\n sat.reception.ant_size = round(ant_size, 1)\r\n\r\n availability = sat.get_availability(margin, snr_relaxation)\r\n\r\n return availability\r\n\r\n\r\ndef point_ant_size(args): # function loop - return the availability to a given Lat/Long\r\n min_ant_size = 0.5\r\n max_ant_size = 10\r\n step_ant_size = 0.2\r\n target_availability = args[5]\r\n\r\n point = args[0]\r\n sat = args[1]\r\n reception = args[2]\r\n margin = args[3]\r\n snr_relaxation = args[4]\r\n lat = point['Lat']\r\n long = point['Long']\r\n station = GroundStation(lat, long)\r\n sat.set_grstation(station)\r\n sat.set_reception(reception)\r\n\r\n ant_size_vector = np.arange(min_ant_size, max_ant_size, step_ant_size)\r\n for ant_size in ant_size_vector:\r\n sat.reception.ant_size = round(ant_size, 1)\r\n if sat.get_availability(margin, snr_relaxation) >= target_availability:\r\n sat.reception.ant_size = round(round(ant_size, 1) - 0.1, 1)\r\n if sat.get_availability(margin, snr_relaxation) >= target_availability:\r\n point['ant size'] = round(round(ant_size, 1) - 0.1, 1)\r\n else:\r\n point['ant size'] = round(ant_size, 1)\r\n break\r\n return point\r\n\r\n\r\ndef sp_ant_size(): # this function runs the availability for a single point and shows a complete output\r\n with open('temp\\\\args.pkl', 'rb') as f:\r\n (site_lat, site_long, sat_long, freq, max_eirp, sat_height, max_bw, bw_util,\r\n modcod, pol, roll_off, ant_eff, lnb_gain, lnb_temp, aditional_losses,\r\n cable_loss, max_depoint, max_ant_size, min_ant_size, margin, cores) = pickle.load(f)\r\n f.close()\r\n\r\n #####################################\r\n ##### ground station parameters #####\r\n #####################################\r\n\r\n # creating a ground station object\r\n station = GroundStation(site_lat, site_long)\r\n\r\n ##############################\r\n ### satellite parameters ###\r\n ##############################\r\n\r\n data = pd.read_csv('models\\\\Modulation_dB.csv', sep=';')\r\n line = data.loc[(data.Modcod) == modcod]\r\n # tech = line['Tech'].values[0]\r\n mod = line['Modulation'].values[0]\r\n fec = line['FEC'].values[0]\r\n\r\n # criando o objeto satélite\r\n satellite = Satellite(sat_long, freq, max_eirp, sat_height, max_bw, bw_util, 0, 0, mod, roll_off, fec)\r\n\r\n # atribuindo uma estação terrena à um satélite\r\n satellite.set_grstation(station)\r\n\r\n ##############################\r\n ### reception parametters ####\r\n ##############################\r\n\r\n polarization_loss = 3 # perda de polarização, em dB\r\n\r\n # criando o objeto receptor\r\n reception = Reception(None, ant_eff, aditional_losses, polarization_loss, lnb_gain, lnb_temp, cable_loss,\r\n max_depoint)\r\n\r\n # atribuindo uma recepção à um enlace de satélite\r\n satellite.set_reception(reception) # setando o receptor do link de satélite\r\n\r\n ###################################\r\n ######### OUTPUTS #########\r\n ###################################\r\n\r\n ############ SNR target's calcullation ################\r\n\r\n step = 0.2\r\n interp_step = int(round((max_ant_size - min_ant_size) * 100))\r\n ant_size_vector = np.arange(min_ant_size, max_ant_size, step)\r\n ant_size_vector_interp = np.linspace(min_ant_size, max_ant_size, interp_step)\r\n\r\n # parallel loop for each antenna size\r\n pool = ParallelPool(nodes=round(cores/2)) #ARRUMAR AQUI\r\n availability_vector = list(pool.imap(loop_graph_ant_size, [(satellite, margin, 1, ant_size) for ant_size in ant_size_vector]))\r\n pool.clear()\r\n\r\n ant_size_vector = np.array(ant_size_vector)\r\n availability_vector = np.array(availability_vector)\r\n ant_size_vector = ant_size_vector[availability_vector > 60]\r\n availability_vector = availability_vector[availability_vector > 60]\r\n\r\n # a_BSpline = interpolate.make_interp_spline(ant_size_vector, availability_vector, k=2)\r\n # availability_vector_interp = a_BSpline(ant_size_vector_interp)\r\n\r\n availability_vector_interp = 0\r\n with open('temp\\\\args.pkl', 'wb') as f:\r\n pickle.dump(\r\n [ant_size_vector, ant_size_vector_interp, availability_vector, availability_vector_interp], f)\r\n f.close()\r\n\r\n return\r\n\r\n\r\ndef mp_ant_size():\r\n with open('temp\\\\args.pkl', 'rb') as f: # opening the input variables in the temp file\r\n (gr_station_path, sat_long, freq, max_eirp, sat_height, max_bw, bw_util,\r\n modcod, pol, roll_off, ant_eff, lnb_gain, lnb_temp, aditional_losses,\r\n cable_loss, max_depoint, availability_target, snr_relaxation, margin, threads) = pickle.load(f)\r\n f.close()\r\n\r\n # reading the input table\r\n # dir = 'models\\\\'\r\n # file = 'CitiesBrazil'\r\n # cities = pd.read_csv(dir + file + '.csv', sep=';', encoding='latin1')\r\n # cities['availability'] = np.nan # creating an empty results column\r\n\r\n point_list = pd.read_csv(gr_station_path, sep=';', encoding='latin1') # creating a point dataframe from csv file\r\n\r\n data = pd.read_csv('models\\\\Modulation_dB.csv', sep=';')\r\n line = data.loc[data.Modcod == modcod]\r\n # tech = line['Tech'].values[0]\r\n mod = line['Modulation'].values[0]\r\n fec = line['FEC'].values[0]\r\n\r\n # creating the satellite object\r\n sat = Satellite(sat_long, freq, max_eirp, sat_height, max_bw, bw_util, 0, 0, mod, roll_off, fec)\r\n\r\n polarization_loss = 3\r\n\r\n reception = Reception(None, ant_eff, aditional_losses, polarization_loss, lnb_gain, lnb_temp, cable_loss,\r\n max_depoint) # creating the receptor object\r\n\r\n # ======================== PARALLEL POOL =============================\r\n\r\n pool = ParallelPool(nodes=threads) # creating the parallelPoll\r\n\r\n sys.stderr = open('temp\\\\out.txt', 'w') # to print the output dynamically\r\n\r\n print('initializing . . .', file=sys.stderr)\r\n\r\n # running the parallel pool\r\n data = list(\r\n tqdm.tqdm(pool.imap(point_ant_size,\r\n [(point, sat, reception, margin, snr_relaxation, availability_target) for index, point in\r\n point_list.iterrows()]),\r\n total=len(point_list)))\r\n pool.clear()\r\n\r\n point_list.drop(point_list.index, inplace=True)\r\n point_list = point_list.append(data, ignore_index=True)\r\n\r\n # saving the results into a csv file\r\n\r\n dir = 'results'\r\n if not os.path.exists(dir):\r\n os.makedirs(dir)\r\n\r\n point_list.to_csv(dir + '\\\\' + 'results_ant ' + datetime.datetime.now().strftime('%y-%m-%d_%H-%M-%S') + '.csv', sep=';',\r\n encoding='latin1')\r\n\r\n print('Complete!!!', file=sys.stderr)\r\n\r\n sys.stderr.close()\r\n\r\n return\r\n" }, { "alpha_fraction": 0.7568027377128601, "alphanum_fraction": 0.7576530575752258, "avg_line_length": 47, "blob_id": "1cdb713be2adcaf30b83ada40a26cc037f234cfc", "content_id": "bae9ac919ed8e9fcaab8dfd4e293cb3780810d86", "detected_licenses": [ "MIT" ], "is_generated": false, "is_vendor": false, "language": "Markdown", "length_bytes": 1176, "license_type": "permissive", "max_line_length": 126, "num_lines": 24, "path": "/mkdocs/docs/faq.md", "repo_name": "EzraCerpac/SatLink", "src_encoding": "UTF-8", "text": "# FAQ\r\n\r\n* Does functionalty X will be implemented in Satlink?\r\n\r\nCurrentely, the plans are to add some map visualisations and more robust XPS calculations. \r\nAny other functionalities or improvements are not in planning. Feel free to suggest any via [contacts](contact.md).\r\n\r\n* I've found a bug! What should I need to do?\r\n\r\nFirst, check the issues pages at Github (https://github.com/cfragoas/SatLink/issues). If the issue is not there, \r\n you can add it or contact me (see [contacts](contact.md)).\r\n\r\n* What's the difference in using Satlink via command line and the graphical interface?\r\n\r\nUsing Satlink via command line can give the user a bit more freedom. The classes and their individual functions can be used to\r\ncreate new functionalities. Using Satlink via graphical interface can be a lot easier but also limits these possibilities.\r\n\r\n* Can Satlink be used in Linux/Windows/Mac?\r\n\r\nYes. Satlink is implemented in Python 3 and can be used in any supported OS.\r\n\r\n* I don't know the parameters to simulate one or all the calculations! What should I do?\r\n\r\nThe parameters can be consulted in [code description page](code_use.md) and [GUI usage page](gui_use.md).\r\n" }, { "alpha_fraction": 0.5338929295539856, "alphanum_fraction": 0.5654101967811584, "avg_line_length": 37.2049674987793, "blob_id": "f1f606cb7333a52ee83d4874b5aef076b9134106", "content_id": "e515dfdd44965419e8bba2a16e544257dbbf2cde", "detected_licenses": [ "MIT" ], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 6345, "license_type": "permissive", "max_line_length": 131, "num_lines": 161, "path": "/GrStat.py", "repo_name": "EzraCerpac/SatLink", "src_encoding": "UTF-8", "text": "import numpy as np\r\nimport pandas as pd\r\nfrom scipy import constants as const\r\nfrom models import util\r\nimport sys\r\n\r\n\r\n# retorna parâmetros tabelados referenciados as coordenadas da estação terrena\r\n# lat/long em formato de graus\r\n\r\nclass GroundStation:\r\n\r\n def __init__(self, site_lat, site_long):\r\n self.site_lat = site_lat\r\n self.site_long = site_long\r\n\r\n # variáveis calculadas internamente na classe\r\n\r\n def get_earth_radius(self):\r\n a = 6378137 # m\r\n b = 6356752.3142 # m\r\n phi = self.site_lat\r\n radius = (np.sqrt((((a ** 2) * np.cos(phi)) ** 2 + ((b ** 2) * np.cos(phi)) ** 2) / # km\r\n ((a * np.cos(phi)) ** 2 + (b * np.cos(phi)) ** 2))) / 1000\r\n return radius\r\n\r\n def getnearpos(self, array, value):\r\n # função para buscar os índices mais próximos dos valores amostrados nas tabelas (h0 e R001)\r\n idx = (np.abs(array - value)).argmin()\r\n return idx\r\n\r\n def get_R001(self):\r\n # função que retorna o valor de R001 dadas as coordenadas da estação terrena (ref. ITU 837-7)\r\n # R001 - taxa de precipitação da chuva excedida em 0.01% do ano\r\n\r\n R001_table = pd.read_csv('R001.csv', sep=';', index_col=0) # linha=lat, coluna=long\r\n\r\n linhas_R001 = R001_table.index.to_numpy()\r\n colunas_R001 = R001_table.columns.to_numpy().astype('int32')\r\n\r\n R001 = (R001_table.iloc[self.getnearpos(linhas_R001, self.site_lat * 1000), self.getnearpos(colunas_R001,\r\n self.site_long * 1000)]) / 1000\r\n # tem que dividir por mil pelo formato que os dados da planilha são formatados (sem casa decimais)\r\n\r\n return R001\r\n\r\n def get_h0(self):\r\n # função que retorna o valor de h0 dadas as coordenadas da estação terrena (ref. ITU 839-4)\r\n # h0 - altura isotérmica sobre o nível do mar\r\n\r\n h0_table = pd.read_csv('h0.csv', sep=';', index_col=0) # linha=lat, coluna=long\r\n\r\n linhas_h0 = h0_table.index.to_numpy()\r\n colunas_h0 = h0_table.columns.to_numpy().astype('int32')\r\n\r\n h0 = h0_table.iloc[\r\n self.getnearpos(linhas_h0, self.site_lat * 1000), self.getnearpos(colunas_h0, self.site_long * 1000)]\r\n\r\n return h0\r\n\r\n def get_hR(self):\r\n # função que retorna o valor de hR dadas as coordenadas da estação terrena (ref. ITU 839-4)\r\n # hR - altura média anual da chuva sobre o nível do mar\r\n hR = self.get_h0() + 0.36\r\n return hR\r\n\r\n\r\nclass Reception:\r\n\r\n def __init__(self, ant_size=1.2, ant_eff=0.6, coupling_loss=0.5, polarization_loss=3, lnb_gain = 60, lnb_noise_temp=20,\r\n cable_loss=5, max_depoint = 0):\r\n\r\n self.ant_size = ant_size\r\n self.ant_eff = ant_eff\r\n self.coupling_loss = coupling_loss # perda do feeder\r\n self.polarization_loss = polarization_loss # perda de polarizacao (3dB de linear para circular ou vice e versa)\r\n self.lnb_gain = lnb_gain\r\n self.lnb_noise_temp = lnb_noise_temp\r\n self.cable_loss = cable_loss\r\n self.max_depoint = max_depoint # maximum depointing angle between transmission and reception\r\n\r\n # vairáveis para armazenar os parâmetros calculados internamente na classe\r\n self.gain = None\r\n self.t_ground = None\r\n self.t_sky = None\r\n self.t_ant = None\r\n self.total_noise_temp = None\r\n self.figure_of_merit = None # G/T\r\n self.angle_3db = None # 3 db or half-power angle\r\n self.a_dep = None\r\n\r\n # parâmetros de outras classes e não setados ou calculados em na classe Reception\r\n self.freq = None\r\n self.e = None\r\n self.a_rain = None\r\n\r\n def set_parameters(self, freq, e):\r\n self.freq = freq\r\n self.e = e\r\n pass\r\n\r\n def get_antenna_gain(self):\r\n if self.freq is None:\r\n sys.exit('Need to associate a reception to a satellite first. Try satellite.set_reception(reception)!!!')\r\n else:\r\n self.gain = (10 * np.log10(self.ant_eff * (np.pi * self.ant_size * self.freq * (10 ** 9) /\r\n (const.c)) ** 2))\r\n return self.gain\r\n\r\n def get_beamwidth(self):\r\n if self.angle_3db is not None:\r\n return self.angle_3db\r\n elif self.ant_size is None or self.freq is None:\r\n sys.exit('You need to set the antenna size and system frequency before this calculation!!!')\r\n else:\r\n self.angle_3db = 70 * (const.c / (self.freq * 10 ** 9 * self.ant_size))\r\n\r\n return self.angle_3db\r\n\r\n def get_depoint_loss(self):\r\n if self.a_dep is not None:\r\n return self.a_dep\r\n self.a_dep = 12 * ((self.max_depoint/self.get_beamwidth()) ** 2)\r\n\r\n return self.a_dep\r\n\r\n def get_ground_temp(self):\r\n if self.t_ground is not None:\r\n return self.t_ground\r\n else:\r\n if self.e < -10:\r\n self.t_ground = 290\r\n elif 0 > self.e > -10:\r\n self.t_ground = 150\r\n elif 10 > self.e > 0:\r\n self.t_ground = 50\r\n elif 90 > self.e > 10:\r\n self.t_ground = 10\r\n else:\r\n sys.exit('Ground temperature can only be calculated for elevation angles between -10 and 90!!!')\r\n return self.t_ground\r\n\r\n def get_brightness_temp(self, printer=False):\r\n if self.freq is None or self.e is None:\r\n sys.exit('Need to associate a reception to a satellite first. Try satellite.set_reception(reception)!!!')\r\n elif self.t_sky is not None:\r\n return self.t_sky\r\n else:\r\n data = pd.read_csv('models\\\\ClearSkyTemp ITU 372.csv', sep=';', index_col=0)\r\n self.t_sky = util.curve_interpolation(self.freq, self.e, data)\r\n if printer:\r\n print('elevation: ', self.e, ' freq: ', self.freq, ' Tsky (brightness temperature): ', self.t_sky)\r\n\r\n return self.t_sky\r\n\r\n def get_antenna_noise_temp(self):\r\n if self.t_ant is not None:\r\n return self.t_ant\r\n else:\r\n self.t_ant = self.get_brightness_temp() + self.get_ground_temp()\r\n return self.t_ant\r\n\r\n" }, { "alpha_fraction": 0.666991114616394, "alphanum_fraction": 0.6725528240203857, "avg_line_length": 30.89719581604004, "blob_id": "960f8c43b6474a077817ffd352e9806c22dc6add", "content_id": "18ea75577cb1dd73068c8108e8e3f460fabe7509", "detected_licenses": [ "MIT" ], "is_generated": false, "is_vendor": false, "language": "Markdown", "length_bytes": 7193, "license_type": "permissive", "max_line_length": 145, "num_lines": 214, "path": "/mkdocs/docs/code_use.md", "repo_name": "EzraCerpac/SatLink", "src_encoding": "UTF-8", "text": "#Code-based usage\r\nSatLink is structured with three main classes [Satellite](#satellite), [Reception](#reception) and \r\n[GroundStation](#groundstation). For the most functions, \r\nthe user needs to instance an object of each class and relate these objects with a **set-type** function. \r\nPlease check the [example](#examples) files.\r\n\r\nFor example, to calculate one link availability, execute the following steps:\r\n\r\nImport the modules:\r\n\r\n from GrStat import GroundStation, Reception\r\n from sat import Satellite\r\n\r\nInstanciate the objects of each class:\r\n\r\n station = GroundStation(site_lat, site_long)\r\n sat = Satellite(sat_long, freq, eirp_max, hsat, b_transponder, b_util, _, _, mod, rolloff, fec)\r\n receptor = Reception(ant_size, ant_eff, coupling_loss, polarization_loss, lnb_gain, lnb_noise_temp, cable_loss, desfoc_max)\r\n\r\nUse the set functions:\r\n\r\n sat.set_grstation(station)\r\n sat.set_reception(receptor)\r\n\r\nUse the link availability function with default values:\r\n\r\n availability = sat.get_availability()\r\n print(availability)\r\n\r\n# Examples\r\nCode-based examples can be found in Satlink main folder as **single_point_example.py**, **multi_point_example.py**,\r\n**example_single_point_ant_size.py** and **example_multi_point_ant_size.py**. These examples use the majority of class functions\r\n and emulate the GUI calculations.\r\n\r\n#Classes\r\n##Satellite\r\nSatellite is the code's main class. It has the majority of functions. Its parameters are:\r\n\r\n###Parameters\r\n\r\n* sat_long: geostationary satellite longitude in degrees\r\n* freq: frequency in GHz\r\n* eirp_max: transponder maximun E.I.R.P. in dBW\r\n* h_sat: satellite height in Km\r\n* b_transp: transponder bandwidth in MHz\r\n* b_util: actual used transponder bandwidth\r\n* back_off: decrease in transmitted power to avoid non-linear amplification\r\n* modulation: please check **Modulation_dB.csv** in **models** folder to see the available modulations\r\n* fec: please check **Modulation_dB.csv** in **models** folder to see the available options\r\n* roll_off: factor of spectral efficiency, ROF (Roll-Off Factor)\r\n\r\n### Functions\r\nSatellite class functions are:\r\n\r\n* to set the GroundStation object of the link the user wants to compute\r\n\r\n .set_grstation(grstation: GroundStation)\r\n\r\n* to set the Reception object of the link the user wants to compute\r\n\r\n .set_reception(reception: Reception)\r\n\r\n* to return the elevation angle between satellite and ground station (degrees)\r\n \r\n .get_elevation() # need set_grstation()\r\n\r\n\r\n* to return the azimuth angle between satellite and ground station (degrees)\r\n \r\n .get_azimuth() # need set_grstation()\r\n\r\n\r\n * to return the distance between satellite and ground station (km)\r\n \r\n .get_distance() # need set_grstation()\r\n\r\n\r\n * to return the threshold for a given modulation and FEC\r\n \r\n .get_reception_treshold()\r\n\r\n\r\n* to return multiple attenuation values: a_fs, depointing_loss, a_g, a_c, a_r, a_s, a_t, a_tot\r\n \r\n .get_link_attenuation(p=0.001, method='approx') # need set_grstation() and set_reception()\r\n\r\n\r\n * a_fs: free-space attenuation (dB)\r\n * depointing_loss: depointing for a given max. align mismatch (dB)\r\n * a_g: gaseous attenuation (dB)\r\n * a_c: cloud attenuation (dB)\r\n * a_r: rain attenuation (dB)\r\n * a_s: scintillation or tropospheric attenuation (dB)\r\n * a_t: total atmospheric attenuation (dB)\r\n * a_tot: total attenuation (free-space + atmospheric) (dB)\r\n\r\n&NewLine;\r\n\r\n* to return total attenuation values: a_t, a_tot and depoint_loss\r\n \r\n .get_total_attenuation(p= None) # need set_grstation() and set_reception()\r\n\r\n * a_t: total atmospheric attenuation (dB)\r\n * a_tot: total attenuation (free-space + atmospheric) (dB)\r\n * depoint_loss: depointing for a given max. align mismatch (dB)\r\n\r\n&NewLine;\r\n\r\n* to return the power flux density at the reception (W/m²)\r\n \r\n .get_get_power_flux_density(p=None) # need set_grstation() and set_reception()\r\n\r\n\r\n* to return the antenna noise temperature under rain conditions (K)\r\n \r\n .get_antenna_noise_rain(p=None) # need set_grstation() and set_reception()\r\n\r\n* to return reception's total noise temperature (K)\r\n \r\n .get_total_noise_temp(p=None) # need set_grstation() and set_reception()\r\n\r\n* to return the link's figure of merit (G/T) (dB/K)\r\n \r\n .get_figure_of_merit(p=None) # need set_grstation() and set_reception()\r\n\r\n\r\n* to return the carrier over noise per bandwidth unit (dB-Hz)\r\n \r\n .get_c_over_n0(p=None) # need set_grstation() and set_reception()\r\n\r\n* to return the signal over noise (dB)\r\n \r\n .get_snr(p=None) # need set_grstation() and set_reception()\r\n\r\n* to return the link availability (%/year) for a SNR + a given margin (dB) with a relaxation (dB) over the target value\r\n \r\n .get_availability(margin=0, relaxation=0.1) # need set_grstation() and set_reception()\r\n \r\n * margin: summed value over the modulation threshold to be considered. If margin = 0, the calculation target equals the modulation threshold.\r\n * relaxation: (+-) relaxation over the error between the target value and actual SNR in availability calculation\r\n \r\n\r\n##Reception\r\n\r\n###Parameters\r\n\r\n* ant_size: antenna diameter in meters\r\n* ant_eff: antenna efficiency (between 0 and 1)\r\n* coupling_loss: coupling losses in dB\r\n* polarization_loss: polarization loss in dB\r\n* lnb_gain: LNB gain in dB\r\n* lnb_noise_temp: LNB noise temperature in Kelvin\r\n* cable_loss: cable loss in dB\r\n* max_depoint: maximum depointing angle between transmission and reception (degrees)\r\n\r\n###Functions\r\n\r\nReception class functions are:\r\n\r\n* to return the calculated antenna gain (dB)\r\n \r\n .get_antenna_gain()\r\n\r\n* to return the antenna 3dB beamwidth (degrees)\r\n \r\n .get_beamwidth():\r\n\r\n* to return the calculated maximum depoint loss based on the maximum depointing angle (dB)\r\n \r\n .get_depoint_loss()\r\n\r\n* to return the ground temperature received in the antenna (Kelvin)\r\n \r\n .get_ground_temp(self):\r\n\r\n* to return the sky brightness temperature received in the antenna (Kelvin)\r\n \r\n .get_brightness_temp():\r\n\r\n* to return the total antenna noise temperature (Kelvin)\r\n \r\n .get_antenna_noise_temp(self):\r\n\r\n\r\n##GroundStation\r\n\r\n###Parameters\r\n\r\n* site_lat: site latitude in decimal degrees\r\n* site_long: site longitude in decimal degrees\r\n\r\n###Functions\r\nReception class functions are:\r\n\r\n* to return the earth radius from lat, long (km)\r\n \r\n .get_earth_radius(self)\r\n\r\n* to return a nearest value from a point of a table. It is used in get_R001, get_h0 and get_hR.\r\n Its an auxiliary function and should be moved to another class in the future.\r\n \r\n getnearpos(self, array, value)\r\n\r\n* to return the R001 (rain exceedance in 0.01% of year) value from ITU 837-7\r\n \r\n get_R001(self)\r\n\r\n* to return h0 (isometric height) from ITU 839-4\r\n \r\n get_h0(self)\r\n\r\n* to return hR (annual rain height) from ITU 839-4\r\n \r\n get_hR(self)\r\n\r\n\r\n\r\n\r\n\r\n\r\n\r\n\r\n\r\n\r\n\r\n\r\n\r\n\r\n\r\n\r\n\r\n\r\n\r\n\r\n\r\n\r\n\r\n\r\n\r\n\r\n\r\n\r\n\r\n\r\n\r\n\r\n\r\n\r\n\r\n\r\n\r\n\r\n\r\n\r\n\r\n\r\n\r\n\r\n\r\n\r\n\r\n\r\n\r\n\r\n\r\n\r\n\r\n\r\n\r\n\r\n\r\n\r\n\r\n\r\n\r\n\r\n\r\n\r\n\r\n\r\n\r\n\r\n\r\n\r\n\r\n\r\n\r\n\r\n\r\n\r\n" }, { "alpha_fraction": 0.6500366926193237, "alphanum_fraction": 0.6551724076271057, "avg_line_length": 44.20338821411133, "blob_id": "c6638d962d7d40d6ceb7d3512da55627901ee747", "content_id": "23f7dd598d46a3b568d3f8085422fa73a696ea6d", "detected_licenses": [ "MIT" ], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 2726, "license_type": "permissive", "max_line_length": 118, "num_lines": 59, "path": "/atm_atnn.py", "repo_name": "EzraCerpac/SatLink", "src_encoding": "UTF-8", "text": "from GrStat import GroundStation, Reception\r\nfrom sat import Satellite\r\nimport sys, os\r\nimport pickle\r\nimport numpy as np\r\nimport time\r\n\r\n# This function runs the atmospheric attenuation functions and prepare the interface outputs\r\n\r\ndef calc_atm_atnn():\r\n with open('temp\\\\args.pkl', 'rb') as f:\r\n p, site_lat, site_long, ant_size, ant_eff, sat_long, freq, method = pickle.load(f)\r\n f.close()\r\n\r\n sys.stdout = open('temp\\\\out.txt', 'w') # creating a output file to show in the interface\r\n gr_station = GroundStation(site_lat, site_long)\r\n satellite = Satellite(sat_long, freq)\r\n reception = Reception(ant_size, ant_eff)\r\n\r\n satellite.set_grstation(gr_station)\r\n satellite.set_reception(reception)\r\n\r\n start = time.time()\r\n # running the atmospheric attenuation calculations and storing the results\r\n a_fs, a_dep, a_g, a_c, a_r, a_s, a_t, a_tot = satellite.get_link_attenuation(p, method)\r\n\r\n # preparing the outputs\r\n # ====== REMEMBER TO ADJUST TO 3 DECIMALS ALL OUTPUTS!!!!! =======\r\n print('RESULTS', file=sys.stdout)\r\n print('', file=sys.stdout)\r\n print('Reception characteristics:', file=sys.stdout)\r\n print('', file=sys.stdout)\r\n print('Earth\\'s radius in lat/long: ', np.round(satellite.grstation.get_earth_radius(),3), ' km', file=sys.stdout)\r\n print('Elevation angle: ', np.round(satellite.get_elevation(), 3), ' degress', file=sys.stdout)\r\n print('Link length: ', np.round(satellite.get_distance(), 3), 'km', file=sys.stdout)\r\n print('Ground noise temperature: ', np.round(satellite.reception.get_ground_temp(), 3), ' K', file=sys.stdout)\r\n print('Sky brightness temperature', np.round(satellite.reception.get_brightness_temp(), 3), ' K', file=sys.stdout)\r\n print('', file=sys.stdout)\r\n print('', file=sys.stdout)\r\n\r\n print('Link budget Aaalysis:', file=sys.stdout)\r\n print('', file=sys.stdout)\r\n print(\"Gaseous attenuation: \", np.round(a_g, 3), file=sys.stdout)\r\n print(\"Cloud attenuation: \", np.round(a_c, 3), file=sys.stdout)\r\n print(\"Rain attenuation: \", np.round(a_r, 3), file=sys.stdout)\r\n print(\"Scintillation attenuation: \", np.round(a_s, 3), file=sys.stdout)\r\n print(\"Total atmospheric attenuation: \", np.round(a_t, 3), file=sys.stdout)\r\n print('Free space attenuation: ', np.round(a_fs, 3), file=sys.stdout)\r\n print('Free space + atmospheric + depointing attenuation: ', np.round(a_tot, 3), ' dB', file=sys.stdout)\r\n\r\n print('', file=sys.stdout)\r\n print('Runtime: ', np.round(time.time() - start, 2), ' s', file=sys.stdout)\r\n\r\n if os.path.exists('temp\\\\args.pkl'): # deleting the input variables temp file\r\n os.remove('temp\\\\args.pkl')\r\n\r\n sys.stdout.close()\r\n\r\n return\r\n" }, { "alpha_fraction": 0.7476653456687927, "alphanum_fraction": 0.7568134069442749, "avg_line_length": 38.42856979370117, "blob_id": "7fa0fa98b5600c36feb2350f5bdb54b14efcbbf3", "content_id": "748eb7171feb3ab819c14dc06583a78c3b619b78", "detected_licenses": [ "MIT" ], "is_generated": false, "is_vendor": false, "language": "Markdown", "length_bytes": 5248, "license_type": "permissive", "max_line_length": 281, "num_lines": 133, "path": "/README.md", "repo_name": "EzraCerpac/SatLink", "src_encoding": "UTF-8", "text": "# SatLink\n\n<img src=\"pics/LogoSatLink225_225_white.png\" alt=\"logo\" width=\"150\"/>\n\nSatLink is a python based application that runs speciffic satellite downlink calcullations. It has a GUI and his main functions are:\n\n - Atmospheric attenuation calcullation (via [itur])\n - Single and multi-point downlink avaiability calcullation (input and output csv file)\n - Antenna size estimation for a desired availability (single point graphic plot and multi point csv output)\n - Save and load parameters for satellites, ground stations and reception characteristics\n - Totally free !!!\n\nThis project is a attempt to simplify satellite's link budget calcullations and to create a tool for teaching purposes. Please check the [**documentation**](https://cfragoas.github.io/SatLink/) for more detailed information.\n\n# GUI Interface\nFor those that dont like code writing\n\n<img src=\"https://i.imgur.com/ZMpcxgH.png\" alt=\"screenshot\" width=\"500\"/>\n\n - Simple Qt Gui made for simple usage\n - Drop lists can be edited via model folder\n\nTo run the calcullations via GUI interface, run the python file **main_window.py**. The main window will appear. Now, just choose the functions in the action menu\n\nDetailed information about the usage of the GUI can be found in the [**documentation**](https://cfragoas.github.io/SatLink/).\n\n# Using SatLink via python commands \n SatLink consists of three main classes \n \n - Satellite class\n - Ground Station class\n - Reception class\n\nYou need to define those three objects and set their relationship\n```sh\nfrom GrStat import GroundStation, Reception\nfrom sat import Satellite\n\n# creating the objects\nstation = GroundStation(site_lat, site_long)\nsat = Satellite(sat_long, freq, eirp_max, hsat, b_transponder, b_util, _, _, mod, rolloff, fec)\nreceptor = Reception(ant_size, ant_eff, coupling_loss, polarization_loss, lnb_gain, lnb_noise_temp, cable_loss, desfoc_max)\n\n# relating the objets to the satellite\nsat.set_grstation(station)\nsat.set_reception(receptor) \n\n# example - calcullating the link availability\navailability = sat.get_availability()\nprint(availability) # 0 - 100 percentage\n\n# example - calcullating the power flux density in the reception point and the antenna noise in rain conditions\npw_flx = sat.get_power_flux_density()\nprint(pw_flx) # watts/m²\nant_noise = sat.reception.get_antenna_noise_temp()\nprint(ant_noise) # Kelvin\nant_noise_rain = sat.get_antenna_noise_rain()\nprint(ant_noise_rain) # Kelvin\n```\n\nThe other functions are detailed in the [**documentation**](https://cfragoas.github.io/SatLink/).\n\n### Libraries\n\nSatLink uses a bunch of differente open source python libraries\n\n* [itur] - A python implementation of the ITU-R P. Recommendations to compute atmospheric attenuation in slant and horizontal paths\n* [pyqt] - PyQt is a set of Python bindings for The Qt Company's Qt application framework\n* [tqdm] - Instantly make your loops show a smart progress meter\n* [pathos] - It provides a consistent high-level interface for configuring and launching parallel computations across heterogeneous resources.\n* [pandas] - Fast, powerful, flexible and easy to use open source data analysis and manipulation tool\n* [astropy] - Common core package for Astronomy in Python and foster an ecosystem of interoperable astronomy packages\n* [numpy] - The fundamental package for scientific computing with Python\n\n### Installation\n\nSatLink is currently tested only in python 3.\nJust copy all the folders and files to any directory and make sure all packages are installed. To install the packages, just run **first_setup.py**, located in the main SatLink's folder, for a fresh package installation or run the following commands\n\n```sh\npip install -r requirements.txt\n```\n\nor\n\n```sh\npip install itur==0.2.1\npip install tqdm==4.56.0\npip install pandas==1.2.1\npip install pathos==0.2.7\npip install astropy==4.2\npip install pyqt5==5.15.2\npip install matplotlib==3.4.1\n```\n\nRun **main_window.py** to start the applicaiton via interface.\n\n### Contributions\n\nSince this is still a early version of the code, we expect there some problem can be found, We are tottaly open for contributions and bug/problems reports. **Please tell us!**\n\n### Future Developments\n\nSome updates are planned for the future of the SatLink\n\n* Worst month availabilty calcullation\n* More robust XPD calculations\n\n### Authorship\n\nAll the code development was made by Christian Rodrigues.\n\nContact: [email protected]\n\n### Credits\n\n[Globo] - For supporting the very first release version of the application\n\n[Caio Alexandre] - Logo designer\n\n\n[//]: # (These are reference links used in the body of this note and get stripped out when the markdown processor does its job. There is no need to format nicely because it shouldn't be seen. Thanks SO - http://stackoverflow.com/questions/4823468/store-comments-in-markdown-syntax)\n\n\n [Globo]: <https://globoplay.globo.com/>\n [itur]: <https://github.com/iportillo/ITU-Rpy>\n [pathos]: <https://github.com/uqfoundation/pathos>\n [tqdm]: <https://github.com/tqdm/tqdm>\n [pandas]: <https://pandas.pydata.org/>\n [astropy]: <https://www.astropy.org/>\n [numpy]: <https://numpy.org/>\n [pyqt]: <https://riverbankcomputing.com/software/pyqt/intro>\n [Caio Alexandre]: <https://www.instagram.com/caioalexandredasilva> \n" }, { "alpha_fraction": 0.3980582654476166, "alphanum_fraction": 0.6213592290878296, "avg_line_length": 13, "blob_id": "b37ab1dacca9a113db1d501dd0c538e0dcc718e0", "content_id": "d8bdaf041ab72a791702f8df4d91417a3b8ad55b", "detected_licenses": [ "MIT" ], "is_generated": false, "is_vendor": false, "language": "Text", "length_bytes": 103, "license_type": "permissive", "max_line_length": 17, "num_lines": 7, "path": "/requirements.txt", "repo_name": "EzraCerpac/SatLink", "src_encoding": "UTF-8", "text": "itur==0.2.1\r\ntqdm==4.56.0\r\npandas==1.2.1\r\npathos==0.2.7\r\nastropy==4.2\r\npyqt5==5.15.2\r\nmatplotlib==3.4.1" }, { "alpha_fraction": 0.7296631336212158, "alphanum_fraction": 0.7345932722091675, "avg_line_length": 37.16128921508789, "blob_id": "85e1f7bcffd7778fd2c651c7a65605a1fb8dd9e3", "content_id": "edf83fbdf6058b87c57e8cd71af036c61148cbb3", "detected_licenses": [ "MIT" ], "is_generated": false, "is_vendor": false, "language": "Markdown", "length_bytes": 1217, "license_type": "permissive", "max_line_length": 111, "num_lines": 31, "path": "/mkdocs/docs/index.md", "repo_name": "EzraCerpac/SatLink", "src_encoding": "UTF-8", "text": "# SatLink Documentation\r\n\r\n<p align=\"center\">\r\n <img src=\"..\\images\\home_logo.png\" alt=\"logo\" width=\"250\"/>\r\n</p>\r\n\r\nSatLink is a python-based application that implements satellite link budget downlink \r\ncalculations. Github page is [https://github.com/cfragoas/Satlink](https://github.com/cfragoas/Satlink). \r\n\r\nLink budget analysis consists in different aspects, from free-space and atmospheric attenuation, transmiting\r\nand receiving system characteristics such as power, antenna gains, modulations, \r\ncorrection codes, among others.\r\n\r\nSatlink has a GUI that can be used to simply generate results involving link attenuation, \r\navailability and antenna size estimation for one or multiple points. It also has functions to compute multiple \r\nparameters involving an earth-space downlink link-budget.\r\n\r\n<p align=\"center\">\r\n <img src=\"..\\images\\home_print.jpeg\" alt=\"gui example\" width=\"400\"/>\r\n</p>\r\n\r\n## Contents\r\n\r\n* [Installation](installation.md)\r\n* [GUI usage](gui_use.md)\r\n* [Code-based usage](code_use.md)\r\n* [F.A.Q.](faq.md)\r\n* [Contacts](contact.md)\r\n\r\n## Want to collaborate? Please [contact](contact.md) us!!!\r\nAny new functionalities and improvements are welcome! Please report bugs as well.\r\n\r\n " }, { "alpha_fraction": 0.3712972402572632, "alphanum_fraction": 0.4867211580276489, "avg_line_length": 42, "blob_id": "a45e9a38523dee8065f739628faff0f40add1275", "content_id": "7f56f115e866fbff91e8dedb4a37eacca1b71a9d", "detected_licenses": [ "MIT" ], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 3943, "license_type": "permissive", "max_line_length": 107, "num_lines": 89, "path": "/models/spec_att.py", "repo_name": "EzraCerpac/SatLink", "src_encoding": "UTF-8", "text": "import numpy as np\r\n\r\n\r\n# cálculo da atenuação específica da chuva\r\n# segundo a ITU-R P.838-3 (https://www.itu.int/rec/R-REC-P.838/en)\r\n\r\nclass specific_attenuation:\r\n # coeficientes para kH, segundo a tabela 1 da ITU 838-3\r\n vars_kH = {\"aj\": [-5.33980, -0.35351, -0.23789, -0.94158],\r\n \"bj\": [-0.10008, 1.26970, 0.86036, 0.64552],\r\n \"cj\": [1.13098, 0.45400, 0.15354, 0.16817],\r\n \"mk\": -0.18961,\r\n \"ck\": 0.71147}\r\n\r\n # coeficientes para kV, segundo a tabela 2 da ITU 838-3\r\n vars_kV = {\"aj\": [-3.80595, -3.44965, -0.39902, 0.50167],\r\n \"bj\": [0.56934, -0.22911, 0.73042, 1.07319],\r\n \"cj\": [0.81061, 0.51059, 0.11899, 0.27195],\r\n \"mk\": -0.16398,\r\n \"ck\": 0.63297}\r\n\r\n # coeficientes para alfaH, segundo a tabela 3 da ITU 838-3\r\n vars_alfaH = {\"aj\": [-0.14318, 0.29591, 0.32177, -5.37610, 16.1721],\r\n \"bj\": [1.82442, 0.77564, 0.63773, -0.96230, -3.29980],\r\n \"cj\": [-0.55187, 0.19822, 0.13164, 1.47828, 3.43990],\r\n \"m_alfa\": 0.67849,\r\n \"c_alfa\": -1.95537}\r\n\r\n # coeficientes para alfaV, segundo a tabela 4 da ITU 838-3\r\n vars_alfaV = {\"aj\": [-0.07771, 0.56727, -0.20238, -48.2991, 48.5833],\r\n \"bj\": [2.33840, 0.95545, 1.14520, 0.791669, 0.791459],\r\n \"cj\": [-0.76284, 0.54039, 0.26809, 0.116226, 0.116479],\r\n \"m_alfa\": -0.053739,\r\n \"c_alfa\": 0.83433}\r\n\r\n def eq_k(self, aj, bj, cj, mk, ck, f):\r\n # esta função calcula kH ou kV, segundo a equação (2) da ITU 838-3\r\n aj = np.array(aj)\r\n bj = np.array(bj)\r\n cj = np.array(cj)\r\n summation = np.sum(aj * np.exp(-(((np.log10(f) - bj) / cj) ** 2))) + mk * np.log10(f) + ck\r\n\r\n return 10 ** summation\r\n\r\n def eq_alfa(self, aj, bj, cj, m_alfa, c_alfa, f):\r\n # esta função calcula alfaV ou alfaH, segundo a equação (3) da ITU 838-3\r\n aj = np.array(aj)\r\n bj = np.array(bj)\r\n cj = np.array(cj)\r\n summation = np.sum(aj * np.exp(-(((np.log10(f) - bj) / cj) ** 2))) + m_alfa * np.log10(f) + c_alfa\r\n\r\n return summation\r\n\r\n def get_k(self, f, E, tau):\r\n # esta função calcula k, segundo a equação (4) da ITU 838-3\r\n E = np.radians(E)\r\n tau = np.radians(tau)\r\n\r\n kH = self.eq_k(self.vars_kH['aj'], self.vars_kH['bj'], self.vars_kH['cj'], self.vars_kH['mk'],\r\n self.vars_kH['ck'], f)\r\n kV = self.eq_k(self.vars_kV['aj'], self.vars_kV['bj'], self.vars_kV['cj'], self.vars_kV['mk'],\r\n self.vars_kV['ck'], f)\r\n\r\n k = (kV + kV + (kH - kV) * (np.cos(E) ** 2) * (np.cos(2 * tau))) / 2\r\n return k, kV, kH\r\n\r\n def get_alfa(self, f, E, tau):\r\n # esta função calcula alfa, segundo a equação (5) da ITU 838-3\r\n E = np.radians(E)\r\n tau = np.radians(tau)\r\n\r\n alfaH = self.eq_alfa(self.vars_alfaH['aj'], self.vars_alfaH['bj'], self.vars_alfaH['cj'],\r\n self.vars_alfaH['m_alfa'],\r\n self.vars_alfaH['c_alfa'], f)\r\n alfaV = self.eq_alfa(self.vars_alfaV['aj'], self.vars_alfaV['bj'], self.vars_alfaV['cj'],\r\n self.vars_alfaV['m_alfa'],\r\n self.vars_alfaV['c_alfa'], f)\r\n k, kV, kH = self.get_k(f, E, np.degrees(tau))\r\n alfa = ((kH * alfaH + kV * alfaV + (kH * alfaH - kV * alfaV) * (np.cos(E) ** 2) *\r\n (np.cos(2 * tau))) / (2 * k))\r\n\r\n return alfa, alfaV, alfaH\r\n\r\n def get_gamaR(self, R001, f, E, tau):\r\n # esta função calcula o coeficiente de atenuação específica gamaR, segundo a equação (1) da ITU 838\r\n k, _, _ = self.get_k(f, E, tau)\r\n alfa, _, _ = self.get_alfa(f, E, tau)\r\n gamaR = k * (R001) ** alfa\r\n return gamaR\r\n" }, { "alpha_fraction": 0.7158253192901611, "alphanum_fraction": 0.7311400771141052, "avg_line_length": 43.25640869140625, "blob_id": "737f2a5a63de7d5c0cd47e80aa6df8de732371fd", "content_id": "da5270508140144bce2988bfa1adcacba5d06b01", "detected_licenses": [ "MIT" ], "is_generated": false, "is_vendor": false, "language": "Markdown", "length_bytes": 1763, "license_type": "permissive", "max_line_length": 129, "num_lines": 39, "path": "/mkdocs/docs/installation.md", "repo_name": "EzraCerpac/SatLink", "src_encoding": "UTF-8", "text": "# SatLink Installation\r\n\r\nSatLink needs a Python 3 installation to run. It is recommended an environment with python 3.9.\r\n\r\nYou can download Python in [python.org](https://www.python.org/downloads/).\r\n\r\nTo install, just copy all the folders and files to any directory and make sure all packages/libraries are installed.\r\n\r\n## Packages\r\n\r\nSatLink has dependencies and needs some packages to run. They are: [itur](https://pypi.org/project/itur/#description),\r\n[Numpy](https://numpy.org/), [tqdm](https://github.com/tqdm/tqdm), [pathos](https://github.com/uqfoundation/pathos),\r\n[pandas](https://pandas.pydata.org/), [Astropy](https://www.astropy.org/).\r\n\r\nTo use the graphical user interface, it also needs [PyQt5](https://riverbankcomputing.com/software/pyqt/intro).\r\n\r\nOne can install SatLink simply by running **first_setup.py**. It will install all the packages, including PyQt for the GUI usage.\r\n\r\nAlternatively, run the following command:\r\n \r\n pip install -r requirements.txt\r\n\r\nIf an IDE, like [PyCharm](https://www.jetbrains.com/pt-br/pycharm/), \r\nis being used, it will automatically detect the requirements file and ask to install the packages (including PyQt).\r\n\r\nLastly, the packages can be installed individually. Here's the code with the currently tested versions:\r\n\r\n pip install itur==0.2.1\r\n pip install tqdm==4.56.0\r\n pip install pandas==1.2.1\r\n pip install pathos==0.2.7\r\n pip install astropy==4.2\r\n pip install pyqt5==5.15.2\r\n pip install matplotlib==3.4.1\r\n\r\n##Using SatLink\r\nRun **main_window.py** to start the user interface. Please refer to [GUI usage](gui_use.md) for more information.\r\n\r\nFor more detailed information about the code-based functions and classes, please refer to [Code-based usage](code_use.md)." }, { "alpha_fraction": 0.653598964214325, "alphanum_fraction": 0.6805912852287292, "avg_line_length": 32.21977996826172, "blob_id": "74019029ebfe3b9df4baee197973742069e2150f", "content_id": "8da4f30b41db450a69c090c7591a8a38611dfd90", "detected_licenses": [ "MIT" ], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 3112, "license_type": "permissive", "max_line_length": 112, "num_lines": 91, "path": "/example_single_point_ant_size.py", "repo_name": "EzraCerpac/SatLink", "src_encoding": "UTF-8", "text": "from GrStat import GroundStation, Reception\r\nfrom sat import Satellite\r\nfrom scipy import interpolate\r\nimport matplotlib.pyplot as plt\r\nimport numpy as np\r\n\r\n# ground station parameters\r\nsite_lat = -3.7 # [decimal degrees]\r\nsite_long = -45.9 # [decimal degrees]\r\nstation = GroundStation(site_lat, site_long)\r\n\r\n# satellite parameters\r\nsat_long = -70 # [decimal degrees]\r\nfreq = 12 # [Ghz]\r\neirp = 54 # [dBW]\r\nhsat = 35800 # satellite's height [km]\r\ntau = 90 # H=0, V = 90, circ = 45\r\nb_transponder = 36 # transponder bandwidth [MHz]\r\nb_util = 9 # effective used bandwidth [MHz]\r\nbackoff = 0 # not used for now!\r\ncontour = 0 # not used for now!\r\nrolloff = 0.2 # roll-off factor (raised cosine filter)\r\nmod = '8PSK' # modulation (from modcod file)\r\nfec = '120/180' # FEC (from modcod file)\r\n\r\n# creating the satellite object\r\nsat = Satellite(sat_long, freq, eirp, hsat, b_transponder, b_util, backoff, contour, mod, rolloff, fec)\r\n\r\nsat.set_grstation(station) # relating the ground station object to a satellite one\r\n\r\n# reception parameters\r\nant_size = 1.2 # reception antenna diameter [m]\r\nant_eff = 0.6 # reception antenna efficiency\r\ncoupling_loss = 0 # [dB]\r\npolarization_loss = 3 # [dB]\r\nlnb_gain = 55 # [dB]\r\nlnb_noise_temp = 20 # [dB]\r\ncable_loss = 4 # [dB]\r\nmax_depoint = 0.1 # maximum depointing angle [degrees]\r\n\r\n# creating a reception object\r\nreception = Reception(ant_size, ant_eff, coupling_loss, polarization_loss, lnb_gain, lnb_noise_temp, cable_loss,\r\n max_depoint)\r\n\r\nsat.set_reception(reception) # relating the ground station object to a satellite one\r\n\r\navailability_vector = []\r\n\r\nant_min = 0.6\r\nant_max = 4.5\r\nstep = 0.2\r\ninterp_step = int(round((ant_max - ant_min) * 100))\r\n\r\nant_size_vector = np.arange(ant_min, ant_max, step)\r\nx_new = np.linspace(ant_min, ant_max, interp_step)\r\n\r\ncheck = False\r\nfor ant_size in ant_size_vector:\r\n sat.reception.ant_size = ant_size\r\n availability_vector.append(sat.get_availability())\r\n # print(sat.get_availability())\r\n if sat.get_availability() >= 99.999 and check is False:\r\n max_availability_x = ant_size\r\n max_availability_y = sat.get_availability()\r\n check = True\r\n\r\na_BSpline = interpolate.make_interp_spline(ant_size_vector, availability_vector)\r\ny_new = a_BSpline(x_new)\r\nfig, (ax1, ax2) = plt.subplots(2)\r\nfig.suptitle('Antenna Size x Year availability')\r\navailability_time_vector = (1 - np.array(availability_vector)/100) * 8760\r\n\r\n# ax1.plot(ant_size_vector, availability_vector)\r\nax1.plot(x_new, y_new)\r\n\r\nax2.plot(ant_size_vector, availability_time_vector)\r\n# ax1.xlabel(' Antenna Size (m)')\r\n# ax1.ylabel(' Availability (%year)')\r\n# ax2.ylabel(' Availability (hours/year)')\r\n\r\nplt.setp(ax1, ylabel='Availability (%year)')\r\nplt.setp(ax2, xlabel='Antenna size')\r\nplt.setp(ax2, ylabel='Availability (hours/year)')\r\nplt.show()\r\n\r\n# plt.plot(ant_size_vector, availability_vector)\r\n# plt.plot(max_availability_x, max_availability_y)\r\n# plt.xlabel(' Antenna Size (m)')\r\n# plt.ylabel(' Availability (%year)')\r\n# plt.title('Antenna Size x Year availability')\r\n# plt.show()" }, { "alpha_fraction": 0.5568408370018005, "alphanum_fraction": 0.5624251961708069, "avg_line_length": 32.81944274902344, "blob_id": "09426af886165000c5577536c1583d45372e5268", "content_id": "6c903605451f9848f26f69993b95ad9dfca447d7", "detected_licenses": [ "MIT" ], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 2507, "license_type": "permissive", "max_line_length": 101, "num_lines": 72, "path": "/file_dialog.py", "repo_name": "EzraCerpac/SatLink", "src_encoding": "UTF-8", "text": "import sys\r\nfrom PyQt5.QtWidgets import QApplication, QWidget, QInputDialog, QLineEdit, QFileDialog\r\nfrom PyQt5.QtGui import QIcon\r\nfrom PyQt5 import QtCore, QtGui\r\nimport pickle\r\n\r\n\r\n# file dialog window\r\n# used to save and load the ground station, satellite and reception files\r\n# it uses the function open_dialog(opt, type) to run the window\r\n# opt is to choose 'save' or 'load' options\r\n# type a string used to define the extension of the file\r\n\r\nclass Dialog(QWidget):\r\n\r\n def __init__(self, opt, type):\r\n super().__init__()\r\n self.title = 'PyQt5 file dialogs - pythonspot.com'\r\n self.left = 10\r\n self.top = 10\r\n self.width = 640\r\n self.height = 480\r\n self.opt = opt\r\n self.type = type\r\n self.initUI()\r\n self.fileName = None\r\n\r\n def initUI(self):\r\n self.setWindowTitle(self.title)\r\n self.setGeometry(self.left, self.top, self.width, self.height)\r\n\r\n if self.opt == 'save':\r\n self.saveFileDialog()\r\n elif self.opt == 'load':\r\n self.openFileNameDialog()\r\n else:\r\n sys.exit('Dialog boss option not expected!')\r\n # self.openFileNamesDialog()\r\n\r\n def openFileNameDialog(self):\r\n options = QFileDialog.Options()\r\n options |= QFileDialog.DontUseNativeDialog\r\n fileDialog = QFileDialog(self)\r\n fileDialog.setAttribute(QtCore.Qt.WA_QuitOnClose, False)\r\n self.fileName, _ = fileDialog.getOpenFileName(None, \"Open File\", \"\",\r\n self.type + \";;All Files (*)\", options=options)\r\n if self.fileName:\r\n with open('temp\\\\load.pkl', 'wb') as f:\r\n pickle.dump(self.fileName, f)\r\n f.close()\r\n return\r\n else:\r\n return\r\n\r\n def saveFileDialog(self):\r\n options = QFileDialog.Options()\r\n options |= QFileDialog.DontUseNativeDialog\r\n fileDialog = QFileDialog(self)\r\n fileDialog.setAttribute(QtCore.Qt.WA_QuitOnClose, False)\r\n self.fileName, _ = fileDialog.getSaveFileName(None, \"Save File\", \"\",\r\n self.type + \";;All Files (*)\", options=options)\r\n if self.fileName:\r\n with open('temp\\\\save.pkl', 'wb') as f:\r\n pickle.dump(self.fileName, f)\r\n f.close()\r\n return\r\n else:\r\n return\r\n\r\n\r\ndef open_dialog(opt, type):\r\n Dialog(opt, type)\r\n" }, { "alpha_fraction": 0.5366364121437073, "alphanum_fraction": 0.6015352606773376, "avg_line_length": 24.766355514526367, "blob_id": "f8bc7170370e0966ae2bbc1cb1dbb376a37894b4", "content_id": "b9c3faf032eee9d715c7bb04816b1a3331adc86c", "detected_licenses": [ "MIT" ], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 2894, "license_type": "permissive", "max_line_length": 114, "num_lines": 107, "path": "/models/RainAtt.py", "repo_name": "EzraCerpac/SatLink", "src_encoding": "UTF-8", "text": "import numpy as np\r\nfrom numpy import log as ln\r\nfrom models.spec_att import specific_attenuation\r\nfrom sat import satellite\r\nfrom GrStat import ground_station\r\n\r\n\r\n# VARIÁVEIS DE ENTRADA\r\n\r\nsite_lat = -3.7\r\nsite_long = -45.9\r\nsat_long = -70\r\nf = 3.5\r\ntau = 90 #H=0, V = 90, circ = 45\r\nhS = 0.447 #altura da estação terrena\r\nant_diam = 1.2\r\np = 0.01\r\n\r\nstation = ground_station(site_lat, site_long, ant_diam)\r\n# primeiro passo - determinar R0,01\r\n\r\nR001 = station.get_R001()\r\n\r\n# segundo passo - calcular a altura efetiva da chuva hR\r\n\r\nhR = station.get_hR()\r\n\r\n# terceiro passo - calcular o percurso inclinado na chuva LS\r\n# hs - altura da estação terrena\r\n# E - angulo de elevação\r\n\r\nsat = satellite(sat_long, f)\r\nE = sat.get_elevation(site_lat, site_long)\r\nLS = (hR - hS) / np.sin(np.radians(E))\r\n\r\n# quarto passo - calcular a projeção no plano horizontal (LG) do percurso inclinado\r\n\r\nLG = LS * np.cos(np.radians(E))\r\n\r\n# quinto passo - calular a atenuação específica gamaR\r\n# isto é feito através da classe specific_attenuation (ref. ITU P.838-3)\r\n\r\ngamaR = specific_attenuation().get_gamaR(R001, f, E, tau)\r\n\r\n# sexto passo - calcular o fator de redução horizontal r001\r\n\r\nr001 = (1 + 0.078 * np.sqrt(LG * gamaR / f) - 0.38 * (1 - np.exp(-2 * LG))) ** (-1)\r\n\r\n# sétimo passo - calcular o fator de ajuste vertical v001\r\n# para se obter o v001, é necessário calcular outras variáveis - zeta, LR e chi\r\n\r\n# zeta (graus)\r\nzeta = np.tan(np.radians((hR - hS) / (LG * r001))) ** (-1)\r\n\r\n# LR (km)\r\nif zeta > E:\r\n LR = LG * r001 / np.cos(np.radians(E))\r\nelse:\r\n LR = (hR - hS) / np.sin(np.radians(E))\r\n# chi\r\nif abs(site_lat) < 36:\r\n chi = 36 - abs(site_lat)\r\nelse:\r\n chi = 0\r\n\r\nv001 = (1 + np.sqrt(np.sin(np.radians(E))) * (\r\n 31 * (1 - np.exp(-E / (1 + chi))) * (np.sqrt(LR * gamaR) / f ** 2) - 0.45)) ** (-1)\r\n\r\n# oitavo passo - calcular a distância LE do percurso (km)\r\n\r\nLE = LR * v001\r\n\r\n# nono passo - finalmente, a atenuação excedida para 0,01% da média anual A001\r\n\r\nA001 = gamaR * LE\r\n\r\n# CONVERSÃO PARA OUTROS VALORES DO PROBABILIDADE p DA CHUVA ALÉM DE 0,01% (menor que 5%)\r\n\r\nif p > 0.0001:\r\n # determinação de beta\r\n\r\n if p >= 0.01 or abs(site_lat) > 36:\r\n beta = 0\r\n elif p <= 0.01 and abs(site_lat) < 36 and E > 25:\r\n beta = -0.005 * (abs(site_lat) - 36)\r\n else:\r\n beta = -0.005 * (abs(site_lat) - 36) + 1.8 - 4.25 * np.sin(np.radians(E))\r\n\r\n # convertendo o balor de A001 para o valor Ap de um p diferente\r\n\r\n Ap = A001 * (p / 0.01) ** -(0.655 + 0.033 * ln(p) - 0.045 * ln(A001) - beta * (1 - p) * np.sin(np.radians(E)))\r\n\r\nelse:\r\n\r\n Ap = A001\r\nprint('E ', E)\r\nprint('hR ', hR)\r\nprint('LS ', LS)\r\nprint('LG ', LG)\r\nprint('zeta ', zeta)\r\nprint('chi ', chi)\r\nprint('gamaR ', gamaR)\r\nprint('r001 ', r001)\r\nprint('vv001 ', v001)\r\nprint('LE ', LE)\r\nprint('AP ', Ap)\r\nprint('A001 ', A001)\r\n\r\n" }, { "alpha_fraction": 0.5985184907913208, "alphanum_fraction": 0.6340740919113159, "avg_line_length": 35.61111068725586, "blob_id": "932caf519d3c8e90d0f1a7621495ebb062aac26f", "content_id": "c178e8fc976423750fc2dd91b1dcbfe75b98d677", "detected_licenses": [ "MIT" ], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 675, "license_type": "permissive", "max_line_length": 113, "num_lines": 18, "path": "/first_setup.py", "repo_name": "EzraCerpac/SatLink", "src_encoding": "UTF-8", "text": "import sys\r\nimport subprocess\r\n\r\n# package list must follow the installation guide in README.md\r\npackage_list = ('itur==0.2.1', 'tqdm==4.56.0', 'pandas==1.2.1', 'pathos==0.2.7', 'astropy==4.2', 'pyqt5==5.15.2',\r\n 'matplotlib==3.4.1')\r\n\r\nfor package in package_list:\r\n # implement pip as a subprocess:\r\n subprocess.check_call([sys.executable, '-m', 'pip', 'install',package])\r\n\r\n # process output with an API in the subprocess module:\r\n reqs = subprocess.check_output([sys.executable, '-m', 'pip','freeze'])\r\n installed_packages = [r.decode().split('==')[0] for r in reqs.split()]\r\n\r\n print(installed_packages)\r\n\r\nprint('Process Completed!!!')" }, { "alpha_fraction": 0.7215891480445862, "alphanum_fraction": 0.7234538197517395, "avg_line_length": 41.768707275390625, "blob_id": "bc3ee179bf5c5b80c52b29a4767274b4d27d6449", "content_id": "9b86d5cfef983304576cb40d0ef402192f0edc20", "detected_licenses": [ "MIT" ], "is_generated": false, "is_vendor": false, "language": "Markdown", "length_bytes": 19306, "license_type": "permissive", "max_line_length": 156, "num_lines": 441, "path": "/mkdocs/docs/gui_use.md", "repo_name": "EzraCerpac/SatLink", "src_encoding": "UTF-8", "text": "# Graphical User Interface\r\n\r\nTo use Satlink's graphical interface, the user must run **main_window.py** via python.\r\n\r\nFrom there, the user has 4 list options [File](#file), [Single Point Calculation](#single-point-calculation), \r\n[List Calculation](#list-calculation) and [Help](#help).\r\n\r\n![home](..\\images\\home.png)\r\n\r\n&NewLine;\r\n\r\n## File\r\nSame as Satlink's main classes, the graphical user interface uses 3 types of parameters: [satellite parameters](#satellite), \r\n[reception parameters](#reception) and [ground station parameters](#ground-station). Each one has a unique file extension and can be saved/loaded. \r\nThe File menu lists page options to manage these files outside of calculation windows.\r\n\r\nSee [save/load dialog](#saveload-dialog-boxes) window for more details in these options and [file types](#file-types).\r\n\r\n![file menu](..\\images\\file_menu.png)\r\n\r\n### Satellite\r\n\r\nTo create a satellite, one user must fill the following fields:\r\n\r\n* Name: satellite identification name \r\n* Longitude: geostationary satellite longitude in degrees\r\n* Frequency: frequency in GHz\r\n* E.I.R.P: transponder maximum E.I.R.P. in dBW\r\n* Altitude: satellite height in Km\r\n* Transponder's max bandwidth: transponder bandwidth in MHz\r\n* Effective bandwidth: actual used transponder bandwidth\r\n* back_off: decrease in transmitted power to avoid non-linear amplification\r\n* Modulation: modulations and FEC combinations from the available list of modulations/FECs\r\n* Roll-off: factor of spectral efficiency, ROF (Roll-Off Factor)\r\n* Polarization: horizontal, vertical or circular polarization\r\n\r\n![satellite page](..\\images\\satellite_page.png)\r\n\r\nClick on \"Save\" button to save satellite parameters to a .sat file. \r\n\r\nClick on \"Load\" button to load satellite parameters from a .sat file.\r\n\r\nClick on \"Clear\" button to clear all fields on the screen.\r\n\r\n### Ground Station\r\n\r\nTo create a ground station, one user must fill the following fields:\r\n\r\n* Name: ground station identification name \r\n* Latitude: site latitude in decimal degrees\r\n* Longitude: site longitude in decimal degrees\r\n\r\n![grpund station page](..\\images\\grd_station_page.png)\r\n\r\nClick on \"Save\" button to save satellite parameters to a .gst file. \r\n\r\nClick on \"Load\" button to load satellite parameters from a .gst file.\r\n\r\nClick on \"Clear\" button to clear all fields on the screen.\r\n\r\n### Reception\r\n\r\nTo create a reception system, one user must fill the following fields:\r\n\r\n* Name: reception identification name \r\n* Antenna size: antenna diameter in meters\r\n* Antenna efficiency: antenna efficiency (between 0 and 1)\r\n* LNB gain: LNB (Low Noise Block) in dB\r\n* LNB noise temp.: LNB noise temperature in Kelvin\r\n* Additional losses: connection and any other losses considered at reception in dB\r\n* Cable loss: cable loss in dB\r\n* Maximum depointing: maximum depointing angle between transmission and reception (degrees)\r\n\r\n![reception page](..\\images\\reception_page.png)\r\n\r\nClick on \"Save\" button to save satellite parameters to a .rcp file. \r\n\r\nClick on \"Load\" button to load satellite parameters from a .rcp file.\r\n\r\nClick on \"Clear\" button to clear all fields on the screen.\r\n\r\n## Single Point Calculation\r\n\r\nSingle point calculations consists in 3 types of functionalities: [Atmospheric Attenuation](#atmospheric-attenuation), \r\n[Downlink Performance](#downlink-performance) and [Antenna Size](#antenna-size). This type of operation returns a complete analysis in a report type format.\r\n\r\n![single point menu](..\\images\\single_menu.png)\r\n\r\n### Atmospheric Attenuation\r\n\r\nIn this screen, the user can estimate complete atmospheric attenuation data with minimum parameters.\r\n\r\n#### Inputs\r\n\r\n* **Reception parameters** (ground station coordinates + reception antenna parameters):\r\n * Latitude: site longitude in decimal degrees\r\n * Longitude: site longitude in decimal degrees\r\n * Antenna size: antenna diameter in meters\r\n * Antenna efficiency: antenna efficiency (between 0 and 1) \r\n* **Satellite parameters**:\r\n * Longitude: geostationary satellite longitude in degrees\r\n * Frequency: frequency in GHz\r\n * Polarization: horizontal, vertical or circular polarization\r\n* Excess % of time per year: percentage of time the values are exceeded\r\n* Method: exact or approximate calculation methods from ITU 676\r\n \r\nDefault Satellites box has a satellite list that sets the longitude coordinates accordingly.\r\n\r\n#### Buttons\r\n\r\n* Load Ground Station: loads latitude and longitude from a .gst file.\r\n* Load Reception: loads antenna size and antenna efficiency from a .rcp file.\r\n* Load Satellite: loads satellite parameters from a .sat file.\r\n\r\n\r\n\r\n![single point atm](..\\images\\single_atm_calc.png)\r\n\r\n#### Outputs\r\n\r\n* Earth's radius in lat/long (km)\r\n* Elevation angle (degrees)\r\n* Link length (km)\r\n* Ground noise temperature (K)\r\n* Sky brightness temperature (K)\r\n* Antenna noise temperature (K)\r\n* Antenna noise temperature w/ rain (K)\r\n* Total noise temperature (K)\r\n* Reception antenna gain (dBi)\r\n* Reception antenna 3dB beamwidth (degrees)\r\n* Figure of Merit (dB/K)\r\n* Gaseous attenuation (dB)\r\n* Cloud attenuation (dB)\r\n* Rain attenuation (dB)\r\n* Scintillation attenuation (dB)\r\n* Total atmospheric attenuation (dB)\r\n* Free space attenuation (dB)\r\n* Free space + atmospheric + depointing attenuation (dB)\r\n\r\n\r\n### Downlink Performance\r\n\r\nIn this screen, the user can estimate a link budget and availability of a satellite downlink.\r\n\r\n#### Inputs\r\n\r\n* **Ground Station parameters**\r\n * Name: ground station identification name \r\n * Latitude: site latitude in decimal degrees\r\n * Longitude: site longitude in decimal degrees\r\n* **Satellite parameters**\r\n * Name: satellite identification name \r\n * Longitude: geostationary satellite longitude in degrees\r\n * Frequency: frequency in GHz\r\n * E.I.R.P: transponder maximum E.I.R.P. in dBW\r\n * Altitude: satellite height in Km\r\n * Transponder's max bandwidth: transponder bandwidth in MHz\r\n * Effective bandwidth: actual used transponder bandwidth\r\n * back_off: decrease in transmitted power to avoid non-linear amplification\r\n * Modulation: modulations and FEC combinations from the available list of modulations/FECs\r\n * Roll-off: factor of spectral efficiency, ROF (Roll-Off Factor)\r\n * Polarization: horizontal, vertical or circular polarization\r\n* **Reception parameters**\r\n * Name: reception identification name \r\n * Antenna size: antenna diameter in meters\r\n * Antenna efficiency: antenna efficiency (between 0 and 1)\r\n * LNB gain: LNB (Low Noise Block) in dB\r\n * LNB noise temp.: LNB noise temperature in Kelvin\r\n * Additional losses: connection and any other losses considered in the reception in dB\r\n * Cable loss: cable loss in dB\r\n * Maximum depointing: maximum depointing angle between transmission and reception (degrees)\r\n* **Calculation parameters**\r\n * SNR target relaxation: (+-) relaxation over the error between the target value and actual snr in availability\r\n calculation\r\n * Margin: summed value over the modulation threshold to be considered. If margin = 0, \r\n the calculation target equals the modulation threshold.\r\n \r\nDefault Satellites box has a satellite list that sets the longitude coordinates accordingly.\r\n\r\n#### Buttons\r\n\r\n* **Ground Station buttons**\r\n * Clear: clears ground station fields\r\n * Load: loads ground station parameters from a .gst file.\r\n * Save: loads ground station parameters into a .gst file.\r\n* **Reception buttons**\r\n * Clear: clears reception fields\r\n * Load Reception: loads reception parameters from a .rcp file.\r\n * Save: loads reception parameters into a .rcp file.\r\n* **Satellite buttons**\r\n * Clear: clears satellite fields\r\n * Load Reception: loads satellite parameters from a .rcp file.\r\n * Save: loads satellite parameters into a .rcp file.\r\n* Calculate: runs the link performance calculation.\r\n\r\n![single point atm](..\\images\\single_down_perf.png)\r\n\r\n#### Outputs\r\n\r\n* Link budget at 0.001% of the year\r\n * C/N0 (dB)\r\n * SNR (dB)\r\n* Actual SNR target's availability (year%)\r\n* Reception characteristics:\r\n * Earth's radius in lat/long (km)\r\n * Elevation angle (degrees)\r\n * Link length (km)\r\n * Ground noise temperature (K)\r\n * Sky brightness temperature (K)\r\n * Antenna noise temperature (K)\r\n * Antenna noise temperature w/ rain (K)\r\n * Total noise temperature (K)\r\n * Reception antenna gain (dBi)\r\n * Reception antenna 3dB beamwidth (degrees)\r\n * Figure of Merit (dB/K)\r\n* Link budget Analysis:\r\n * Gaseous attenuation (dB)\r\n * Cloud attenuation (dB)\r\n * Rain attenuation (dB)\r\n * Scintillation attenuation (dB)\r\n * Total atmospheric attenuation (dB)\r\n * Free space attenuation (dB)\r\n * Free space + atmospheric + depointing attenuation (dB)\r\n * Reception threshold (SNR) (dB)\r\n \r\n### Antenna Size\r\n\r\nIn this screen, the user can estimate the availability for multiple antenna sizes of a satellite downlink.\r\n\r\n#### Inputs\r\n\r\n* **Ground Station parameters**\r\n * Name: ground station identification name \r\n * Latitude: site latitude in decimal degrees\r\n * Longitude: site longitude in decimal degrees\r\n* **Satellite parameters**\r\n * Name: satellite identification name \r\n * Longitude: geostationary satellite longitude in degrees\r\n * Frequency: frequency in GHz\r\n * E.I.R.P: transponder maximum E.I.R.P. in dBW\r\n * Altitude: satellite height in Km\r\n * Transponder's max bandwidth: transponder bandwidth in MHz\r\n * Effective bandwidth: actual used transponder bandwidth\r\n * back_off: decrease in transmitted power to avoid non-linear amplification\r\n * Modulation: modulations and FEC combinations from the available list of modulations/FECs\r\n * Roll-off: factor of spectral efficiency, ROF (Roll-Off Factor)\r\n * Polarization: horizontal, vertical or circular polarization\r\n* **Reception parameters**\r\n * Name: reception identification name\r\n * Antenna efficiency: antenna efficiency (between 0 and 1)\r\n * LNB gain: LNB (Low Noise Block) in dB\r\n * LNB noise temp.: LNB noise temperature in Kelvin\r\n * Additional losses: connection and any other losses considered in the reception in dB\r\n * Cable loss: cable loss in dB\r\n * Maximum depointing: maximum depointing angle between transmission and reception (degrees)\r\n* **Calculation parameters**\r\n * Ant. min. size: minimum antenna diameter to be calculated in meters\r\n * Ant. max. size: maximum antenna diameter to be calculated in meters\r\n * Margin: summed value over the modulation threshold to be considered. If margin = 0, \r\n the calculation target equals the modulation threshold.\r\n \r\nDefault Satellites box has a satellite list that sets the longitude coordinates accordingly.\r\n\r\n#### Buttons\r\n\r\n* **Ground Station buttons**\r\n * Clear: clears ground station fields\r\n * Load: loads ground station parameters from a .gst file.\r\n * Save: loads ground station parameters into a .gst file.\r\n* **Reception buttons**\r\n * Clear: clears reception fields\r\n * Load Reception: loads reception parameters from a .rcp file.\r\n * Save: loads reception parameters into a .rcp file.\r\n* **Satellite buttons**\r\n * Clear: clears satellite fields\r\n * Load Reception: loads satellite parameters from a .rcp file.\r\n * Save: loads satellite parameters into a .rcp file.\r\n* Calculate button: runs the link performance calculation.\r\n* Export graph button: save the displayed graph in a .png image file.\r\n\r\n![single ant size](..\\images\\single_ant_sz_calc.png)\r\n\r\n#### Outputs\r\n\r\nThere are two outputs in this operation: a list in text format containing antenna size vs. availability and \r\na matplotlib graph will also be displayed.\r\n\r\n## List Calculation \r\n\r\nMultiple point calculations consists in 2 functionalities: **Downlink Performance** and **Antenna Size**. \r\nThese operations use a .csv file as input and return a new .csv file with a result column.\r\n\r\n![multi point menu](..\\images\\list_menu.png)\r\n\r\n### Downlink Performance\r\n\r\nIn this screen, the user can estimate the availability of satellite downlink for multiple points.\r\n\r\n##### Inputs\r\n* Path: path to the input .csv file \r\n * .csv file columns: Name, Lat, Long, Delta Footprint (optional)\r\n * Name: point name\r\n * Lat: point latitude in decimal degrees\r\n * Longitude: point longitude in decimal degrees\r\n * Delta Footprint: optional argument. Represents the difference between the maximum power \r\n received and the received one in the chosen coordinate (in case of footprint differences between coordinates)\r\n * a .csv example can be found in **input examples** folder\r\n* **Satellite parameters**\r\n * Name: satellite identification name \r\n * Longitude: geostationary satellite longitude in degrees\r\n * Frequency: frequency in GHz\r\n * E.I.R.P: transponder maximum E.I.R.P. in dBW\r\n * Altitude: satellite height in Km\r\n * Transponder's max bandwidth: transponder bandwidth in MHz\r\n * Effective bandwidth: actual used transponder bandwidth\r\n * back_off: decrease in transmitted power to avoid non-linear amplification\r\n * Modulation: modulations and FEC combinations from the available list of modulations/FECs\r\n * Roll-off: factor of spectral efficiency, ROF (Roll-Off Factor)\r\n * Polarization: horizontal, vertical or circular polarization\r\n* **Reception parameters**\r\n * Name: reception identification name \r\n * Antenna size: antenna diameter in meters\r\n * Antenna efficiency: antenna efficiency (between 0 and 1)\r\n * LNB gain: LNB (Low Noise Block) in dB\r\n * LNB noise temp.: LNB noise temperature in Kelvin\r\n * Additional losses: connection and any other losses considered in the reception in dB\r\n * Cable loss: cable loss in dB\r\n * Maximum depointing: maximum depointing angle between transmission and reception (degrees)\r\n* **Calculation parameters**\r\n * SNR target relaxation: (+-) relaxation over the error between the target value and actual SNR in availability\r\n calculation\r\n * Margin: summed value over the modulation threshold to be considered. If margin = 0, \r\n the calculation target equals the modulation threshold.\r\n * Threads: number of simultaneous executed calculations. The maximum number o threads depends on the user's processor. \r\n \r\nDefault Satellites box has a satellite list that sets the longitude coordinates accordingly.\r\n\r\n#### Buttons\r\n\r\n* Browse button: opens a dialog box to select the .csv file used as input for the calculations.\r\n* **Reception buttons**\r\n * Clear: clears reception fields\r\n * Load Reception: loads reception parameters from a .rcp file.\r\n * Save: loads reception parameters into a .rcp file.\r\n* **Satellite buttons**\r\n * Clear: clears satellite fields\r\n * Load Reception: loads satellite parameters from a .rcp file.\r\n * Save: loads satellite parameters into a .rcp file.\r\n* Calculate: runs the link performance calculation.\r\n\r\n![multi point menu](..\\images\\multi_down_perf.png)\r\n\r\n#### Outputs\r\n\r\nA progress bar will appear when the process is started. When the process is completed, the progress bar will \r\ndisappear, and a 'Complete!' message will show up. After that, the user can see the .csv output file in **results** folder. \r\n\r\n### Antenna Size\r\n\r\nIn this screen, the user can estimate the availability of satellite downlink for multiple points.\r\n\r\n##### Inputs\r\n* Path: path to the input .csv file \r\n * .csv file columns: Name, Lat, Long, Delta Footprint (optional)\r\n * Name: point name\r\n * Lat: point latitude in decimal degrees\r\n * Longitude: point longitude in decimal degrees\r\n * Delta Footprint: optional argument. Represents the difference between the maximum power \r\n received and the received one in the chosen coordinate (in case of footprint differences between coordinates)\r\n * a .csv example can be found in **input examples** folder\r\n* **Satellite parameters**\r\n * Name: satellite identification name \r\n * Longitude: geostationary satellite longitude in degrees\r\n * Frequency: frequency in GHz\r\n * E.I.R.P: transponder maximum E.I.R.P. in dBW\r\n * Altitude: satellite height in Km\r\n * Transponder's max bandwidth: transponder bandwidth in MHz\r\n * Effective bandwidth: actual used transponder bandwidth\r\n * back_off: decrease in transmitted power to avoid non-linear amplification\r\n * Modulation: modulations and FEC combinations from the available list of modulations/FECs\r\n * Roll-off: factor of spectral efficiency, ROF (Roll-Off Factor)\r\n * Polarization: horizontal, vertical or circular polarization\r\n* **Reception parameters**\r\n * Name: reception identification name\r\n * LNB gain: LNB (Low Noise Block) in dB\r\n * LNB noise temp.: LNB noise temperature in Kelvin\r\n * Additional losses: connection and any other losses considered in the reception in dB\r\n * Cable loss: cable loss in dB\r\n * Maximum depointing: maximum depointing angle between transmission and reception (degrees)\r\n * Antenna efficiency: antenna efficiency (between 0 and 1)\r\n* **Calculation parameters**\r\n * Availability target: availability to be achieved in calculations (% - between 0 and 99.999). \r\n Satlink will choose the minimum atenna size for the chosen availability.\r\n * SNR target relaxation: (+-) relaxation over the error between the target value and actual SNR in availability\r\n calculation\r\n * Margin: summed value over the modulation threshold to be considered. If margin = 0, \r\n the calculation target equals the modulation threshold.\r\n * Threads: number of simultaneous executed calculations. The maximum number o threads depends on the user's processor. \r\n \r\nDefault Satellites box has a satellite list that sets the longitude coordinates accordingly.\r\n\r\n#### Buttons\r\n\r\n* Browse button: opens a dialog box to select the .csv file used as input for the calculations.\r\n* **Reception buttons**\r\n * Clear: clears reception fields\r\n * Load Reception: loads reception parameters from a .rcp file.\r\n * Save: loads reception parameters into a .rcp file.\r\n* **Satellite buttons**\r\n * Clear: clears satellite fields\r\n * Load Reception: loads satellite parameters from a .rcp file.\r\n * Save: loads satellite parameters into a .rcp file.\r\n* Calculate: runs the link performance calculation.\r\n\r\n![multi point menu](..\\images\\multi_ant_sz_calc.png)\r\n\r\n#### Outputs\r\n\r\nA progress bar will appear when the process is started. When the process is completed, the progress bar will \r\ndisappear, and a 'Complete!' message will show up. After that, the user can see the .csv output file in **results** folder.\r\n\r\n## Save/load dialog boxes\r\n\r\nWhen every save/load/export button is clicked, a dialog box will open with file path, name and type as options.\r\n\r\n![dialog_save](..\\images\\dialog_save.png)\r\n\r\n![dialog_load](..\\images\\dialog_load.png)\r\n\r\nTrying to load a different type of file extension might crash the software.\r\n\r\n## File types\r\n\r\nFor file input examples, please check **inputs_examples** folder.\r\n\r\n* **.sat** - satellite characteristics file\r\n* **.rcp** - reception characteristics file\r\n* **.gsp** - ground station characteristics file\r\n* **.csv** - list calculation functions input/output format\r\n\r\n## Help\r\n\r\n\r\n" }, { "alpha_fraction": 0.5845065712928772, "alphanum_fraction": 0.6330817937850952, "avg_line_length": 57.262516021728516, "blob_id": "622c9d785d7f5250045c8ea1c36adbf47db04bef", "content_id": "e9b89ed92638f87f523e6b8fc0c7096c2471a1bf", "detected_licenses": [ "MIT" ], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 363457, "license_type": "permissive", "max_line_length": 149, "num_lines": 6133, "path": "/main_window.py", "repo_name": "EzraCerpac/SatLink", "src_encoding": "UTF-8", "text": "# -*- coding: utf-8 -*-\r\n\r\n# Form implementation generated from reading ui file 'UI\\main.ui'\r\n#\r\n# Created by: PyQt5 UI code generator 5.15.2\r\n#\r\n# WARNING: Any manual changes made to this file will be lost when pyuic5 is\r\n# run again. Do not edit this file unless you know what you are doing.\r\n\r\n\r\nfrom PyQt5 import QtCore, QtGui, QtWidgets\r\n\r\n\r\nclass Ui_MainWindow(object):\r\n def setupUi(self, MainWindow):\r\n MainWindow.setObjectName(\"MainWindow\")\r\n MainWindow.resize(820, 911)\r\n sizePolicy = QtWidgets.QSizePolicy(QtWidgets.QSizePolicy.Fixed, QtWidgets.QSizePolicy.Fixed)\r\n sizePolicy.setHorizontalStretch(0)\r\n sizePolicy.setVerticalStretch(0)\r\n sizePolicy.setHeightForWidth(MainWindow.sizePolicy().hasHeightForWidth())\r\n MainWindow.setSizePolicy(sizePolicy)\r\n icon = QtGui.QIcon()\r\n icon.addPixmap(QtGui.QPixmap(\"UI\\\\icon.png\"), QtGui.QIcon.Normal, QtGui.QIcon.Off)\r\n MainWindow.setWindowIcon(icon)\r\n MainWindow.setLayoutDirection(QtCore.Qt.LeftToRight)\r\n MainWindow.setAnimated(True)\r\n MainWindow.setTabShape(QtWidgets.QTabWidget.Rounded)\r\n self.centralwidget = QtWidgets.QWidget(MainWindow)\r\n self.centralwidget.setObjectName(\"centralwidget\")\r\n self.stackedWidget = QtWidgets.QStackedWidget(self.centralwidget)\r\n self.stackedWidget.setGeometry(QtCore.QRect(0, -1, 811, 861))\r\n self.stackedWidget.setObjectName(\"stackedWidget\")\r\n self.home_page = QtWidgets.QWidget()\r\n self.home_page.setObjectName(\"home_page\")\r\n self.label_5 = QtWidgets.QLabel(self.home_page)\r\n self.label_5.setGeometry(QtCore.QRect(217, 270, 431, 391))\r\n self.label_5.setText(\"\")\r\n self.label_5.setPixmap(QtGui.QPixmap(\"UI\\\\home_logo.png\"))\r\n self.label_5.setObjectName(\"label_5\")\r\n self.stackedWidget.addWidget(self.home_page)\r\n self.satellite_page = QtWidgets.QWidget()\r\n self.satellite_page.setObjectName(\"satellite_page\")\r\n self.groupBox_9 = QtWidgets.QGroupBox(self.satellite_page)\r\n self.groupBox_9.setGeometry(QtCore.QRect(10, 10, 801, 851))\r\n font = QtGui.QFont()\r\n font.setPointSize(10)\r\n self.groupBox_9.setFont(font)\r\n self.groupBox_9.setAutoFillBackground(False)\r\n self.groupBox_9.setAlignment(QtCore.Qt.AlignJustify|QtCore.Qt.AlignVCenter)\r\n self.groupBox_9.setObjectName(\"groupBox_9\")\r\n self.label_31 = QtWidgets.QLabel(self.groupBox_9)\r\n self.label_31.setGeometry(QtCore.QRect(20, 38, 55, 16))\r\n self.label_31.setObjectName(\"label_31\")\r\n self.label_54 = QtWidgets.QLabel(self.groupBox_9)\r\n self.label_54.setGeometry(QtCore.QRect(20, 80, 161, 21))\r\n self.label_54.setObjectName(\"label_54\")\r\n self.name_sat = QtWidgets.QLineEdit(self.groupBox_9)\r\n self.name_sat.setGeometry(QtCore.QRect(100, 38, 511, 22))\r\n self.name_sat.setObjectName(\"name_sat\")\r\n self.long_sat = QtWidgets.QLineEdit(self.groupBox_9)\r\n self.long_sat.setGeometry(QtCore.QRect(200, 80, 101, 22))\r\n self.long_sat.setObjectName(\"long_sat\")\r\n self.save_sat = QtWidgets.QPushButton(self.groupBox_9)\r\n self.save_sat.setGeometry(QtCore.QRect(680, 810, 111, 28))\r\n font = QtGui.QFont()\r\n font.setPointSize(10)\r\n font.setBold(False)\r\n font.setWeight(50)\r\n self.save_sat.setFont(font)\r\n self.save_sat.setObjectName(\"save_sat\")\r\n self.load_sat = QtWidgets.QPushButton(self.groupBox_9)\r\n self.load_sat.setGeometry(QtCore.QRect(550, 810, 111, 28))\r\n font = QtGui.QFont()\r\n font.setPointSize(10)\r\n font.setBold(False)\r\n font.setWeight(50)\r\n self.load_sat.setFont(font)\r\n self.load_sat.setObjectName(\"load_sat\")\r\n self.default_sat = QtWidgets.QComboBox(self.groupBox_9)\r\n self.default_sat.setGeometry(QtCore.QRect(650, 70, 131, 22))\r\n font = QtGui.QFont()\r\n font.setPointSize(8)\r\n self.default_sat.setFont(font)\r\n self.default_sat.setObjectName(\"default_sat\")\r\n self.label_55 = QtWidgets.QLabel(self.groupBox_9)\r\n self.label_55.setGeometry(QtCore.QRect(650, 40, 131, 16))\r\n self.label_55.setObjectName(\"label_55\")\r\n self.label_56 = QtWidgets.QLabel(self.groupBox_9)\r\n self.label_56.setGeometry(QtCore.QRect(330, 78, 111, 16))\r\n self.label_56.setObjectName(\"label_56\")\r\n self.height_sat = QtWidgets.QLineEdit(self.groupBox_9)\r\n self.height_sat.setGeometry(QtCore.QRect(510, 78, 101, 22))\r\n self.height_sat.setObjectName(\"height_sat\")\r\n self.label_59 = QtWidgets.QLabel(self.groupBox_9)\r\n self.label_59.setGeometry(QtCore.QRect(330, 119, 121, 16))\r\n self.label_59.setObjectName(\"label_59\")\r\n self.eirp_sat = QtWidgets.QLineEdit(self.groupBox_9)\r\n self.eirp_sat.setGeometry(QtCore.QRect(510, 115, 101, 22))\r\n self.eirp_sat.setObjectName(\"eirp_sat\")\r\n self.label_60 = QtWidgets.QLabel(self.groupBox_9)\r\n self.label_60.setGeometry(QtCore.QRect(20, 148, 171, 51))\r\n self.label_60.setLayoutDirection(QtCore.Qt.LeftToRight)\r\n self.label_60.setObjectName(\"label_60\")\r\n self.max_bw_sat = QtWidgets.QLineEdit(self.groupBox_9)\r\n self.max_bw_sat.setGeometry(QtCore.QRect(200, 158, 101, 22))\r\n self.max_bw_sat.setObjectName(\"max_bw_sat\")\r\n self.label_61 = QtWidgets.QLabel(self.groupBox_9)\r\n self.label_61.setGeometry(QtCore.QRect(330, 143, 171, 51))\r\n self.label_61.setLayoutDirection(QtCore.Qt.LeftToRight)\r\n self.label_61.setObjectName(\"label_61\")\r\n self.bw_util_sat = QtWidgets.QLineEdit(self.groupBox_9)\r\n self.bw_util_sat.setGeometry(QtCore.QRect(510, 156, 101, 22))\r\n self.bw_util_sat.setObjectName(\"bw_util_sat\")\r\n self.label_64 = QtWidgets.QLabel(self.groupBox_9)\r\n self.label_64.setGeometry(QtCore.QRect(20, 203, 61, 16))\r\n self.label_64.setObjectName(\"label_64\")\r\n self.rolloff_sat = QtWidgets.QLineEdit(self.groupBox_9)\r\n self.rolloff_sat.setGeometry(QtCore.QRect(200, 203, 101, 22))\r\n self.rolloff_sat.setObjectName(\"rolloff_sat\")\r\n self.label_65 = QtWidgets.QLabel(self.groupBox_9)\r\n self.label_65.setGeometry(QtCore.QRect(330, 203, 91, 16))\r\n self.label_65.setObjectName(\"label_65\")\r\n self.modcod_sat = QtWidgets.QComboBox(self.groupBox_9)\r\n self.modcod_sat.setGeometry(QtCore.QRect(450, 203, 161, 22))\r\n self.modcod_sat.setObjectName(\"modcod_sat\")\r\n self.label_66 = QtWidgets.QLabel(self.groupBox_9)\r\n self.label_66.setGeometry(QtCore.QRect(20, 120, 131, 20))\r\n self.label_66.setObjectName(\"label_66\")\r\n self.freq_sat = QtWidgets.QLineEdit(self.groupBox_9)\r\n self.freq_sat.setGeometry(QtCore.QRect(200, 120, 101, 22))\r\n self.freq_sat.setObjectName(\"freq_sat\")\r\n self.label_7 = QtWidgets.QLabel(self.groupBox_9)\r\n self.label_7.setGeometry(QtCore.QRect(673, 100, 91, 20))\r\n self.label_7.setObjectName(\"label_7\")\r\n self.pol_sat = QtWidgets.QComboBox(self.groupBox_9)\r\n self.pol_sat.setGeometry(QtCore.QRect(667, 130, 101, 22))\r\n self.pol_sat.setObjectName(\"pol_sat\")\r\n self.pol_sat.addItem(\"\")\r\n self.pol_sat.addItem(\"\")\r\n self.pol_sat.addItem(\"\")\r\n self.clear_sat = QtWidgets.QPushButton(self.groupBox_9)\r\n self.clear_sat.setGeometry(QtCore.QRect(420, 810, 111, 28))\r\n font = QtGui.QFont()\r\n font.setPointSize(10)\r\n font.setBold(False)\r\n font.setWeight(50)\r\n self.clear_sat.setFont(font)\r\n self.clear_sat.setObjectName(\"clear_sat\")\r\n self.stackedWidget.addWidget(self.satellite_page)\r\n self.ground_station_page = QtWidgets.QWidget()\r\n self.ground_station_page.setObjectName(\"ground_station_page\")\r\n self.groupBox = QtWidgets.QGroupBox(self.ground_station_page)\r\n self.groupBox.setGeometry(QtCore.QRect(10, 10, 801, 851))\r\n font = QtGui.QFont()\r\n font.setPointSize(10)\r\n self.groupBox.setFont(font)\r\n self.groupBox.setAutoFillBackground(False)\r\n self.groupBox.setAlignment(QtCore.Qt.AlignJustify|QtCore.Qt.AlignVCenter)\r\n self.groupBox.setObjectName(\"groupBox\")\r\n self.label = QtWidgets.QLabel(self.groupBox)\r\n self.label.setGeometry(QtCore.QRect(20, 36, 55, 16))\r\n self.label.setObjectName(\"label\")\r\n self.label_2 = QtWidgets.QLabel(self.groupBox)\r\n self.label_2.setGeometry(QtCore.QRect(20, 66, 151, 31))\r\n self.label_2.setObjectName(\"label_2\")\r\n self.label_3 = QtWidgets.QLabel(self.groupBox)\r\n self.label_3.setGeometry(QtCore.QRect(20, 111, 161, 21))\r\n self.label_3.setObjectName(\"label_3\")\r\n self.name_ground_station_grstat = QtWidgets.QLineEdit(self.groupBox)\r\n self.name_ground_station_grstat.setGeometry(QtCore.QRect(100, 36, 271, 22))\r\n self.name_ground_station_grstat.setObjectName(\"name_ground_station_grstat\")\r\n self.long_ground_station_grstat = QtWidgets.QLineEdit(self.groupBox)\r\n self.long_ground_station_grstat.setGeometry(QtCore.QRect(200, 111, 101, 22))\r\n self.long_ground_station_grstat.setObjectName(\"long_ground_station_grstat\")\r\n self.lat_ground_station_grstat = QtWidgets.QLineEdit(self.groupBox)\r\n self.lat_ground_station_grstat.setGeometry(QtCore.QRect(200, 76, 101, 22))\r\n self.lat_ground_station_grstat.setObjectName(\"lat_ground_station_grstat\")\r\n self.save_ground_station_gdstation = QtWidgets.QPushButton(self.groupBox)\r\n self.save_ground_station_gdstation.setGeometry(QtCore.QRect(700, 810, 93, 28))\r\n font = QtGui.QFont()\r\n font.setPointSize(10)\r\n font.setBold(False)\r\n font.setWeight(50)\r\n self.save_ground_station_gdstation.setFont(font)\r\n self.save_ground_station_gdstation.setObjectName(\"save_ground_station_gdstation\")\r\n self.load_ground_station_gdstation = QtWidgets.QPushButton(self.groupBox)\r\n self.load_ground_station_gdstation.setGeometry(QtCore.QRect(591, 811, 93, 28))\r\n font = QtGui.QFont()\r\n font.setPointSize(10)\r\n font.setBold(False)\r\n font.setWeight(50)\r\n self.load_ground_station_gdstation.setFont(font)\r\n self.load_ground_station_gdstation.setObjectName(\"load_ground_station_gdstation\")\r\n self.clear_ground_station_gdstation = QtWidgets.QPushButton(self.groupBox)\r\n self.clear_ground_station_gdstation.setGeometry(QtCore.QRect(481, 811, 93, 28))\r\n font = QtGui.QFont()\r\n font.setPointSize(10)\r\n font.setBold(False)\r\n font.setWeight(50)\r\n self.clear_ground_station_gdstation.setFont(font)\r\n self.clear_ground_station_gdstation.setObjectName(\"clear_ground_station_gdstation\")\r\n self.stackedWidget.addWidget(self.ground_station_page)\r\n self.reception_page = QtWidgets.QWidget()\r\n self.reception_page.setObjectName(\"reception_page\")\r\n self.groupBox_3 = QtWidgets.QGroupBox(self.reception_page)\r\n self.groupBox_3.setGeometry(QtCore.QRect(10, 10, 801, 851))\r\n font = QtGui.QFont()\r\n font.setPointSize(10)\r\n self.groupBox_3.setFont(font)\r\n self.groupBox_3.setAutoFillBackground(False)\r\n self.groupBox_3.setAlignment(QtCore.Qt.AlignJustify|QtCore.Qt.AlignVCenter)\r\n self.groupBox_3.setObjectName(\"groupBox_3\")\r\n self.label_19 = QtWidgets.QLabel(self.groupBox_3)\r\n self.label_19.setGeometry(QtCore.QRect(20, 38, 55, 16))\r\n self.label_19.setObjectName(\"label_19\")\r\n self.label_20 = QtWidgets.QLabel(self.groupBox_3)\r\n self.label_20.setGeometry(QtCore.QRect(20, 68, 151, 31))\r\n self.label_20.setObjectName(\"label_20\")\r\n self.label_21 = QtWidgets.QLabel(self.groupBox_3)\r\n self.label_21.setGeometry(QtCore.QRect(20, 113, 161, 21))\r\n self.label_21.setObjectName(\"label_21\")\r\n self.name_reception_rcp = QtWidgets.QLineEdit(self.groupBox_3)\r\n self.name_reception_rcp.setGeometry(QtCore.QRect(100, 38, 511, 22))\r\n self.name_reception_rcp.setObjectName(\"name_reception_rcp\")\r\n self.lnb_gain_reception_rcp = QtWidgets.QLineEdit(self.groupBox_3)\r\n self.lnb_gain_reception_rcp.setGeometry(QtCore.QRect(200, 113, 101, 22))\r\n self.lnb_gain_reception_rcp.setObjectName(\"lnb_gain_reception_rcp\")\r\n self.ant_size_reception_rcp = QtWidgets.QLineEdit(self.groupBox_3)\r\n self.ant_size_reception_rcp.setGeometry(QtCore.QRect(200, 74, 101, 22))\r\n self.ant_size_reception_rcp.setObjectName(\"ant_size_reception_rcp\")\r\n self.save_reception_rcp = QtWidgets.QPushButton(self.groupBox_3)\r\n self.save_reception_rcp.setGeometry(QtCore.QRect(700, 810, 93, 28))\r\n font = QtGui.QFont()\r\n font.setPointSize(10)\r\n font.setBold(False)\r\n font.setWeight(50)\r\n self.save_reception_rcp.setFont(font)\r\n self.save_reception_rcp.setObjectName(\"save_reception_rcp\")\r\n self.load_reception_rcp = QtWidgets.QPushButton(self.groupBox_3)\r\n self.load_reception_rcp.setGeometry(QtCore.QRect(591, 811, 93, 28))\r\n font = QtGui.QFont()\r\n font.setPointSize(10)\r\n font.setBold(False)\r\n font.setWeight(50)\r\n self.load_reception_rcp.setFont(font)\r\n self.load_reception_rcp.setObjectName(\"load_reception_rcp\")\r\n self.label_23 = QtWidgets.QLabel(self.groupBox_3)\r\n self.label_23.setGeometry(QtCore.QRect(330, 80, 171, 16))\r\n self.label_23.setObjectName(\"label_23\")\r\n self.ant_eff_reception_rcp = QtWidgets.QLineEdit(self.groupBox_3)\r\n self.ant_eff_reception_rcp.setGeometry(QtCore.QRect(510, 78, 101, 22))\r\n self.ant_eff_reception_rcp.setObjectName(\"ant_eff_reception_rcp\")\r\n self.label_24 = QtWidgets.QLabel(self.groupBox_3)\r\n self.label_24.setGeometry(QtCore.QRect(330, 119, 161, 16))\r\n self.label_24.setObjectName(\"label_24\")\r\n self.lnb_temp_reception_rcp = QtWidgets.QLineEdit(self.groupBox_3)\r\n self.lnb_temp_reception_rcp.setGeometry(QtCore.QRect(510, 115, 101, 22))\r\n self.lnb_temp_reception_rcp.setObjectName(\"lnb_temp_reception_rcp\")\r\n self.label_25 = QtWidgets.QLabel(self.groupBox_3)\r\n self.label_25.setGeometry(QtCore.QRect(20, 138, 171, 51))\r\n self.label_25.setLayoutDirection(QtCore.Qt.LeftToRight)\r\n self.label_25.setObjectName(\"label_25\")\r\n self.aditional_losses_reception_rcp = QtWidgets.QLineEdit(self.groupBox_3)\r\n self.aditional_losses_reception_rcp.setGeometry(QtCore.QRect(200, 153, 101, 22))\r\n self.aditional_losses_reception_rcp.setObjectName(\"aditional_losses_reception_rcp\")\r\n self.label_26 = QtWidgets.QLabel(self.groupBox_3)\r\n self.label_26.setGeometry(QtCore.QRect(20, 187, 171, 51))\r\n self.label_26.setLayoutDirection(QtCore.Qt.LeftToRight)\r\n self.label_26.setObjectName(\"label_26\")\r\n self.max_depoint_reception_rcp = QtWidgets.QLineEdit(self.groupBox_3)\r\n self.max_depoint_reception_rcp.setGeometry(QtCore.QRect(200, 200, 101, 22))\r\n self.max_depoint_reception_rcp.setObjectName(\"max_depoint_reception_rcp\")\r\n self.clear_reception_rcp = QtWidgets.QPushButton(self.groupBox_3)\r\n self.clear_reception_rcp.setGeometry(QtCore.QRect(481, 811, 93, 28))\r\n font = QtGui.QFont()\r\n font.setPointSize(10)\r\n font.setBold(False)\r\n font.setWeight(50)\r\n self.clear_reception_rcp.setFont(font)\r\n self.clear_reception_rcp.setObjectName(\"clear_reception_rcp\")\r\n self.label_62 = QtWidgets.QLabel(self.groupBox_3)\r\n self.label_62.setGeometry(QtCore.QRect(330, 140, 171, 51))\r\n self.label_62.setLayoutDirection(QtCore.Qt.LeftToRight)\r\n self.label_62.setObjectName(\"label_62\")\r\n self.cable_loss_reception_rcp = QtWidgets.QLineEdit(self.groupBox_3)\r\n self.cable_loss_reception_rcp.setGeometry(QtCore.QRect(510, 155, 101, 22))\r\n self.cable_loss_reception_rcp.setObjectName(\"cable_loss_reception_rcp\")\r\n self.stackedWidget.addWidget(self.reception_page)\r\n self.single_point_ant_size_calc_page = QtWidgets.QWidget()\r\n self.single_point_ant_size_calc_page.setObjectName(\"single_point_ant_size_calc_page\")\r\n self.stackedWidget_2 = QtWidgets.QStackedWidget(self.single_point_ant_size_calc_page)\r\n self.stackedWidget_2.setGeometry(QtCore.QRect(0, 0, 811, 861))\r\n self.stackedWidget_2.setObjectName(\"stackedWidget_2\")\r\n self.empty_page_2 = QtWidgets.QWidget()\r\n self.empty_page_2.setObjectName(\"empty_page_2\")\r\n self.label_13 = QtWidgets.QLabel(self.empty_page_2)\r\n self.label_13.setGeometry(QtCore.QRect(217, 270, 431, 391))\r\n self.label_13.setText(\"\")\r\n self.label_13.setPixmap(QtGui.QPixmap(\"UI\\\\home_logo.png\"))\r\n self.label_13.setObjectName(\"label_13\")\r\n self.stackedWidget_2.addWidget(self.empty_page_2)\r\n self.satellite_page_2 = QtWidgets.QWidget()\r\n self.satellite_page_2.setObjectName(\"satellite_page_2\")\r\n self.groupBox_12 = QtWidgets.QGroupBox(self.satellite_page_2)\r\n self.groupBox_12.setGeometry(QtCore.QRect(10, 10, 801, 851))\r\n font = QtGui.QFont()\r\n font.setPointSize(10)\r\n self.groupBox_12.setFont(font)\r\n self.groupBox_12.setAutoFillBackground(False)\r\n self.groupBox_12.setAlignment(QtCore.Qt.AlignJustify|QtCore.Qt.AlignVCenter)\r\n self.groupBox_12.setObjectName(\"groupBox_12\")\r\n self.label_38 = QtWidgets.QLabel(self.groupBox_12)\r\n self.label_38.setGeometry(QtCore.QRect(20, 38, 55, 16))\r\n self.label_38.setObjectName(\"label_38\")\r\n self.label_69 = QtWidgets.QLabel(self.groupBox_12)\r\n self.label_69.setGeometry(QtCore.QRect(20, 80, 161, 21))\r\n self.label_69.setObjectName(\"label_69\")\r\n self.name_sat_2 = QtWidgets.QLineEdit(self.groupBox_12)\r\n self.name_sat_2.setGeometry(QtCore.QRect(100, 38, 511, 22))\r\n self.name_sat_2.setObjectName(\"name_sat_2\")\r\n self.long_sat_2 = QtWidgets.QLineEdit(self.groupBox_12)\r\n self.long_sat_2.setGeometry(QtCore.QRect(200, 80, 101, 22))\r\n self.long_sat_2.setObjectName(\"long_sat_2\")\r\n self.save_sat_2 = QtWidgets.QPushButton(self.groupBox_12)\r\n self.save_sat_2.setGeometry(QtCore.QRect(680, 810, 111, 28))\r\n font = QtGui.QFont()\r\n font.setPointSize(10)\r\n font.setBold(False)\r\n font.setWeight(50)\r\n self.save_sat_2.setFont(font)\r\n self.save_sat_2.setObjectName(\"save_sat_2\")\r\n self.load_sat_2 = QtWidgets.QPushButton(self.groupBox_12)\r\n self.load_sat_2.setGeometry(QtCore.QRect(550, 810, 111, 28))\r\n font = QtGui.QFont()\r\n font.setPointSize(10)\r\n font.setBold(False)\r\n font.setWeight(50)\r\n self.load_sat_2.setFont(font)\r\n self.load_sat_2.setObjectName(\"load_sat_2\")\r\n self.default_sat_2 = QtWidgets.QComboBox(self.groupBox_12)\r\n self.default_sat_2.setGeometry(QtCore.QRect(650, 70, 131, 22))\r\n font = QtGui.QFont()\r\n font.setPointSize(8)\r\n self.default_sat_2.setFont(font)\r\n self.default_sat_2.setObjectName(\"default_sat_2\")\r\n self.label_70 = QtWidgets.QLabel(self.groupBox_12)\r\n self.label_70.setGeometry(QtCore.QRect(650, 40, 131, 16))\r\n self.label_70.setObjectName(\"label_70\")\r\n self.label_78 = QtWidgets.QLabel(self.groupBox_12)\r\n self.label_78.setGeometry(QtCore.QRect(330, 78, 111, 16))\r\n self.label_78.setObjectName(\"label_78\")\r\n self.height_sat_2 = QtWidgets.QLineEdit(self.groupBox_12)\r\n self.height_sat_2.setGeometry(QtCore.QRect(510, 78, 101, 22))\r\n self.height_sat_2.setObjectName(\"height_sat_2\")\r\n self.label_79 = QtWidgets.QLabel(self.groupBox_12)\r\n self.label_79.setGeometry(QtCore.QRect(330, 119, 121, 16))\r\n self.label_79.setObjectName(\"label_79\")\r\n self.eirp_sat_2 = QtWidgets.QLineEdit(self.groupBox_12)\r\n self.eirp_sat_2.setGeometry(QtCore.QRect(510, 115, 101, 22))\r\n self.eirp_sat_2.setObjectName(\"eirp_sat_2\")\r\n self.label_91 = QtWidgets.QLabel(self.groupBox_12)\r\n self.label_91.setGeometry(QtCore.QRect(20, 148, 171, 51))\r\n self.label_91.setLayoutDirection(QtCore.Qt.LeftToRight)\r\n self.label_91.setObjectName(\"label_91\")\r\n self.max_bw_sat_2 = QtWidgets.QLineEdit(self.groupBox_12)\r\n self.max_bw_sat_2.setGeometry(QtCore.QRect(200, 158, 101, 22))\r\n self.max_bw_sat_2.setObjectName(\"max_bw_sat_2\")\r\n self.label_92 = QtWidgets.QLabel(self.groupBox_12)\r\n self.label_92.setGeometry(QtCore.QRect(330, 143, 171, 51))\r\n self.label_92.setLayoutDirection(QtCore.Qt.LeftToRight)\r\n self.label_92.setObjectName(\"label_92\")\r\n self.bw_util_sat_2 = QtWidgets.QLineEdit(self.groupBox_12)\r\n self.bw_util_sat_2.setGeometry(QtCore.QRect(510, 156, 101, 22))\r\n self.bw_util_sat_2.setObjectName(\"bw_util_sat_2\")\r\n self.label_93 = QtWidgets.QLabel(self.groupBox_12)\r\n self.label_93.setGeometry(QtCore.QRect(20, 203, 61, 16))\r\n self.label_93.setObjectName(\"label_93\")\r\n self.rolloff_sat_2 = QtWidgets.QLineEdit(self.groupBox_12)\r\n self.rolloff_sat_2.setGeometry(QtCore.QRect(200, 203, 101, 22))\r\n self.rolloff_sat_2.setObjectName(\"rolloff_sat_2\")\r\n self.label_94 = QtWidgets.QLabel(self.groupBox_12)\r\n self.label_94.setGeometry(QtCore.QRect(330, 203, 91, 16))\r\n self.label_94.setObjectName(\"label_94\")\r\n self.modcod_sat_2 = QtWidgets.QComboBox(self.groupBox_12)\r\n self.modcod_sat_2.setGeometry(QtCore.QRect(450, 203, 161, 22))\r\n self.modcod_sat_2.setObjectName(\"modcod_sat_2\")\r\n self.modcod_sat_2.addItem(\"\")\r\n self.modcod_sat_2.addItem(\"\")\r\n self.modcod_sat_2.addItem(\"\")\r\n self.label_95 = QtWidgets.QLabel(self.groupBox_12)\r\n self.label_95.setGeometry(QtCore.QRect(20, 120, 131, 20))\r\n self.label_95.setObjectName(\"label_95\")\r\n self.freq_sat_2 = QtWidgets.QLineEdit(self.groupBox_12)\r\n self.freq_sat_2.setGeometry(QtCore.QRect(200, 120, 101, 22))\r\n self.freq_sat_2.setObjectName(\"freq_sat_2\")\r\n self.label_17 = QtWidgets.QLabel(self.groupBox_12)\r\n self.label_17.setGeometry(QtCore.QRect(673, 100, 91, 20))\r\n self.label_17.setObjectName(\"label_17\")\r\n self.pol_sat_3 = QtWidgets.QComboBox(self.groupBox_12)\r\n self.pol_sat_3.setGeometry(QtCore.QRect(667, 130, 101, 22))\r\n self.pol_sat_3.setObjectName(\"pol_sat_3\")\r\n self.pol_sat_3.addItem(\"\")\r\n self.pol_sat_3.addItem(\"\")\r\n self.pol_sat_3.addItem(\"\")\r\n self.clear_sat_2 = QtWidgets.QPushButton(self.groupBox_12)\r\n self.clear_sat_2.setGeometry(QtCore.QRect(420, 810, 111, 28))\r\n font = QtGui.QFont()\r\n font.setPointSize(10)\r\n font.setBold(False)\r\n font.setWeight(50)\r\n self.clear_sat_2.setFont(font)\r\n self.clear_sat_2.setObjectName(\"clear_sat_2\")\r\n self.stackedWidget_2.addWidget(self.satellite_page_2)\r\n self.ground_station_page_2 = QtWidgets.QWidget()\r\n self.ground_station_page_2.setObjectName(\"ground_station_page_2\")\r\n self.groupBox_2 = QtWidgets.QGroupBox(self.ground_station_page_2)\r\n self.groupBox_2.setGeometry(QtCore.QRect(10, 10, 801, 851))\r\n font = QtGui.QFont()\r\n font.setPointSize(10)\r\n self.groupBox_2.setFont(font)\r\n self.groupBox_2.setAutoFillBackground(False)\r\n self.groupBox_2.setAlignment(QtCore.Qt.AlignJustify|QtCore.Qt.AlignVCenter)\r\n self.groupBox_2.setObjectName(\"groupBox_2\")\r\n self.label_18 = QtWidgets.QLabel(self.groupBox_2)\r\n self.label_18.setGeometry(QtCore.QRect(20, 36, 55, 16))\r\n self.label_18.setObjectName(\"label_18\")\r\n self.label_22 = QtWidgets.QLabel(self.groupBox_2)\r\n self.label_22.setGeometry(QtCore.QRect(20, 66, 151, 31))\r\n self.label_22.setObjectName(\"label_22\")\r\n self.label_39 = QtWidgets.QLabel(self.groupBox_2)\r\n self.label_39.setGeometry(QtCore.QRect(20, 111, 161, 21))\r\n self.label_39.setObjectName(\"label_39\")\r\n self.name_ground_station_grstat_2 = QtWidgets.QLineEdit(self.groupBox_2)\r\n self.name_ground_station_grstat_2.setGeometry(QtCore.QRect(100, 36, 271, 22))\r\n self.name_ground_station_grstat_2.setObjectName(\"name_ground_station_grstat_2\")\r\n self.long_ground_station_grstat_2 = QtWidgets.QLineEdit(self.groupBox_2)\r\n self.long_ground_station_grstat_2.setGeometry(QtCore.QRect(200, 111, 101, 22))\r\n self.long_ground_station_grstat_2.setObjectName(\"long_ground_station_grstat_2\")\r\n self.lat_ground_station_grstat_2 = QtWidgets.QLineEdit(self.groupBox_2)\r\n self.lat_ground_station_grstat_2.setGeometry(QtCore.QRect(200, 76, 101, 22))\r\n self.lat_ground_station_grstat_2.setObjectName(\"lat_ground_station_grstat_2\")\r\n self.save_ground_station_gdstation_2 = QtWidgets.QPushButton(self.groupBox_2)\r\n self.save_ground_station_gdstation_2.setGeometry(QtCore.QRect(700, 810, 93, 28))\r\n font = QtGui.QFont()\r\n font.setPointSize(10)\r\n font.setBold(False)\r\n font.setWeight(50)\r\n self.save_ground_station_gdstation_2.setFont(font)\r\n self.save_ground_station_gdstation_2.setObjectName(\"save_ground_station_gdstation_2\")\r\n self.load_ground_station_gdstation_2 = QtWidgets.QPushButton(self.groupBox_2)\r\n self.load_ground_station_gdstation_2.setGeometry(QtCore.QRect(591, 811, 93, 28))\r\n font = QtGui.QFont()\r\n font.setPointSize(10)\r\n font.setBold(False)\r\n font.setWeight(50)\r\n self.load_ground_station_gdstation_2.setFont(font)\r\n self.load_ground_station_gdstation_2.setObjectName(\"load_ground_station_gdstation_2\")\r\n self.clear_ground_station_gdstation_2 = QtWidgets.QPushButton(self.groupBox_2)\r\n self.clear_ground_station_gdstation_2.setGeometry(QtCore.QRect(481, 811, 93, 28))\r\n font = QtGui.QFont()\r\n font.setPointSize(10)\r\n font.setBold(False)\r\n font.setWeight(50)\r\n self.clear_ground_station_gdstation_2.setFont(font)\r\n self.clear_ground_station_gdstation_2.setObjectName(\"clear_ground_station_gdstation_2\")\r\n self.stackedWidget_2.addWidget(self.ground_station_page_2)\r\n self.reception_page_2 = QtWidgets.QWidget()\r\n self.reception_page_2.setObjectName(\"reception_page_2\")\r\n self.groupBox_13 = QtWidgets.QGroupBox(self.reception_page_2)\r\n self.groupBox_13.setGeometry(QtCore.QRect(10, 10, 801, 851))\r\n font = QtGui.QFont()\r\n font.setPointSize(10)\r\n self.groupBox_13.setFont(font)\r\n self.groupBox_13.setAutoFillBackground(False)\r\n self.groupBox_13.setAlignment(QtCore.Qt.AlignJustify|QtCore.Qt.AlignVCenter)\r\n self.groupBox_13.setObjectName(\"groupBox_13\")\r\n self.label_96 = QtWidgets.QLabel(self.groupBox_13)\r\n self.label_96.setGeometry(QtCore.QRect(20, 38, 55, 16))\r\n self.label_96.setObjectName(\"label_96\")\r\n self.label_97 = QtWidgets.QLabel(self.groupBox_13)\r\n self.label_97.setGeometry(QtCore.QRect(20, 68, 151, 31))\r\n self.label_97.setObjectName(\"label_97\")\r\n self.label_98 = QtWidgets.QLabel(self.groupBox_13)\r\n self.label_98.setGeometry(QtCore.QRect(20, 113, 161, 21))\r\n self.label_98.setObjectName(\"label_98\")\r\n self.name_reception_rcp_2 = QtWidgets.QLineEdit(self.groupBox_13)\r\n self.name_reception_rcp_2.setGeometry(QtCore.QRect(100, 38, 511, 22))\r\n self.name_reception_rcp_2.setObjectName(\"name_reception_rcp_2\")\r\n self.lnb_gain_reception_rcp_2 = QtWidgets.QLineEdit(self.groupBox_13)\r\n self.lnb_gain_reception_rcp_2.setGeometry(QtCore.QRect(200, 113, 101, 22))\r\n self.lnb_gain_reception_rcp_2.setObjectName(\"lnb_gain_reception_rcp_2\")\r\n self.ant_size_reception_rcp_2 = QtWidgets.QLineEdit(self.groupBox_13)\r\n self.ant_size_reception_rcp_2.setGeometry(QtCore.QRect(200, 74, 101, 22))\r\n self.ant_size_reception_rcp_2.setObjectName(\"ant_size_reception_rcp_2\")\r\n self.save_reception_rcp_2 = QtWidgets.QPushButton(self.groupBox_13)\r\n self.save_reception_rcp_2.setGeometry(QtCore.QRect(700, 810, 93, 28))\r\n font = QtGui.QFont()\r\n font.setPointSize(10)\r\n font.setBold(False)\r\n font.setWeight(50)\r\n self.save_reception_rcp_2.setFont(font)\r\n self.save_reception_rcp_2.setObjectName(\"save_reception_rcp_2\")\r\n self.load_reception_rcp_5 = QtWidgets.QPushButton(self.groupBox_13)\r\n self.load_reception_rcp_5.setGeometry(QtCore.QRect(591, 811, 93, 28))\r\n font = QtGui.QFont()\r\n font.setPointSize(10)\r\n font.setBold(False)\r\n font.setWeight(50)\r\n self.load_reception_rcp_5.setFont(font)\r\n self.load_reception_rcp_5.setObjectName(\"load_reception_rcp_5\")\r\n self.label_99 = QtWidgets.QLabel(self.groupBox_13)\r\n self.label_99.setGeometry(QtCore.QRect(330, 80, 171, 16))\r\n self.label_99.setObjectName(\"label_99\")\r\n self.ant_eff_reception_rcp_2 = QtWidgets.QLineEdit(self.groupBox_13)\r\n self.ant_eff_reception_rcp_2.setGeometry(QtCore.QRect(510, 78, 101, 22))\r\n self.ant_eff_reception_rcp_2.setObjectName(\"ant_eff_reception_rcp_2\")\r\n self.label_100 = QtWidgets.QLabel(self.groupBox_13)\r\n self.label_100.setGeometry(QtCore.QRect(330, 119, 161, 16))\r\n self.label_100.setObjectName(\"label_100\")\r\n self.lnb_temp_reception_rcp_2 = QtWidgets.QLineEdit(self.groupBox_13)\r\n self.lnb_temp_reception_rcp_2.setGeometry(QtCore.QRect(510, 115, 101, 22))\r\n self.lnb_temp_reception_rcp_2.setObjectName(\"lnb_temp_reception_rcp_2\")\r\n self.label_101 = QtWidgets.QLabel(self.groupBox_13)\r\n self.label_101.setGeometry(QtCore.QRect(20, 138, 171, 51))\r\n self.label_101.setLayoutDirection(QtCore.Qt.LeftToRight)\r\n self.label_101.setObjectName(\"label_101\")\r\n self.aditional_losses_reception_rcp_2 = QtWidgets.QLineEdit(self.groupBox_13)\r\n self.aditional_losses_reception_rcp_2.setGeometry(QtCore.QRect(200, 153, 101, 22))\r\n self.aditional_losses_reception_rcp_2.setObjectName(\"aditional_losses_reception_rcp_2\")\r\n self.label_102 = QtWidgets.QLabel(self.groupBox_13)\r\n self.label_102.setGeometry(QtCore.QRect(20, 187, 171, 51))\r\n self.label_102.setLayoutDirection(QtCore.Qt.LeftToRight)\r\n self.label_102.setObjectName(\"label_102\")\r\n self.max_depoint_reception_rcp_2 = QtWidgets.QLineEdit(self.groupBox_13)\r\n self.max_depoint_reception_rcp_2.setGeometry(QtCore.QRect(200, 200, 101, 22))\r\n self.max_depoint_reception_rcp_2.setObjectName(\"max_depoint_reception_rcp_2\")\r\n self.clear_reception_rcp_3 = QtWidgets.QPushButton(self.groupBox_13)\r\n self.clear_reception_rcp_3.setGeometry(QtCore.QRect(481, 811, 93, 28))\r\n font = QtGui.QFont()\r\n font.setPointSize(10)\r\n font.setBold(False)\r\n font.setWeight(50)\r\n self.clear_reception_rcp_3.setFont(font)\r\n self.clear_reception_rcp_3.setObjectName(\"clear_reception_rcp_3\")\r\n self.label_103 = QtWidgets.QLabel(self.groupBox_13)\r\n self.label_103.setGeometry(QtCore.QRect(330, 140, 171, 51))\r\n self.label_103.setLayoutDirection(QtCore.Qt.LeftToRight)\r\n self.label_103.setObjectName(\"label_103\")\r\n self.cable_loss_reception_rcp_2 = QtWidgets.QLineEdit(self.groupBox_13)\r\n self.cable_loss_reception_rcp_2.setGeometry(QtCore.QRect(510, 155, 101, 22))\r\n self.cable_loss_reception_rcp_2.setObjectName(\"cable_loss_reception_rcp_2\")\r\n self.stackedWidget_2.addWidget(self.reception_page_2)\r\n self.ant_size_single_point_calc_page_2 = QtWidgets.QWidget()\r\n self.ant_size_single_point_calc_page_2.setObjectName(\"ant_size_single_point_calc_page_2\")\r\n self.stackedWidget_2.addWidget(self.ant_size_single_point_calc_page_2)\r\n self.list_ant_size_calc_page_2 = QtWidgets.QWidget()\r\n self.list_ant_size_calc_page_2.setObjectName(\"list_ant_size_calc_page_2\")\r\n self.stackedWidget_2.addWidget(self.list_ant_size_calc_page_2)\r\n self.single_point_atm_calc_page_2 = QtWidgets.QWidget()\r\n self.single_point_atm_calc_page_2.setObjectName(\"single_point_atm_calc_page_2\")\r\n self.groupBox_14 = QtWidgets.QGroupBox(self.single_point_atm_calc_page_2)\r\n self.groupBox_14.setGeometry(QtCore.QRect(20, 20, 781, 131))\r\n font = QtGui.QFont()\r\n font.setPointSize(10)\r\n self.groupBox_14.setFont(font)\r\n self.groupBox_14.setObjectName(\"groupBox_14\")\r\n self.lat_ground_station_spatm_2 = QtWidgets.QLineEdit(self.groupBox_14)\r\n self.lat_ground_station_spatm_2.setGeometry(QtCore.QRect(200, 30, 101, 22))\r\n self.lat_ground_station_spatm_2.setObjectName(\"lat_ground_station_spatm_2\")\r\n self.long_ground_station_spatm_2 = QtWidgets.QLineEdit(self.groupBox_14)\r\n self.long_ground_station_spatm_2.setGeometry(QtCore.QRect(500, 30, 101, 22))\r\n self.long_ground_station_spatm_2.setObjectName(\"long_ground_station_spatm_2\")\r\n self.label_104 = QtWidgets.QLabel(self.groupBox_14)\r\n self.label_104.setGeometry(QtCore.QRect(20, 23, 151, 31))\r\n self.label_104.setObjectName(\"label_104\")\r\n self.label_105 = QtWidgets.QLabel(self.groupBox_14)\r\n self.label_105.setGeometry(QtCore.QRect(319, 28, 161, 21))\r\n self.label_105.setObjectName(\"label_105\")\r\n self.label_106 = QtWidgets.QLabel(self.groupBox_14)\r\n self.label_106.setGeometry(QtCore.QRect(20, 60, 161, 21))\r\n self.label_106.setObjectName(\"label_106\")\r\n self.ant_size_reception_spatm_2 = QtWidgets.QLineEdit(self.groupBox_14)\r\n self.ant_size_reception_spatm_2.setGeometry(QtCore.QRect(200, 60, 101, 22))\r\n self.ant_size_reception_spatm_2.setObjectName(\"ant_size_reception_spatm_2\")\r\n self.ant_eff_reception_spatm_2 = QtWidgets.QLineEdit(self.groupBox_14)\r\n self.ant_eff_reception_spatm_2.setGeometry(QtCore.QRect(500, 60, 101, 22))\r\n self.ant_eff_reception_spatm_2.setObjectName(\"ant_eff_reception_spatm_2\")\r\n self.label_107 = QtWidgets.QLabel(self.groupBox_14)\r\n self.label_107.setGeometry(QtCore.QRect(320, 60, 141, 16))\r\n self.label_107.setObjectName(\"label_107\")\r\n self.clear_reception_rcp_4 = QtWidgets.QPushButton(self.groupBox_14)\r\n self.clear_reception_rcp_4.setGeometry(QtCore.QRect(660, 30, 93, 28))\r\n font = QtGui.QFont()\r\n font.setPointSize(10)\r\n font.setBold(False)\r\n font.setWeight(50)\r\n self.clear_reception_rcp_4.setFont(font)\r\n self.clear_reception_rcp_4.setObjectName(\"clear_reception_rcp_4\")\r\n self.load_reception_rcp_6 = QtWidgets.QPushButton(self.groupBox_14)\r\n self.load_reception_rcp_6.setGeometry(QtCore.QRect(15, 91, 201, 28))\r\n font = QtGui.QFont()\r\n font.setPointSize(10)\r\n font.setBold(False)\r\n font.setWeight(50)\r\n self.load_reception_rcp_6.setFont(font)\r\n self.load_reception_rcp_6.setObjectName(\"load_reception_rcp_6\")\r\n self.load_reception_rcp_7 = QtWidgets.QPushButton(self.groupBox_14)\r\n self.load_reception_rcp_7.setGeometry(QtCore.QRect(230, 90, 201, 28))\r\n font = QtGui.QFont()\r\n font.setPointSize(10)\r\n font.setBold(False)\r\n font.setWeight(50)\r\n self.load_reception_rcp_7.setFont(font)\r\n self.load_reception_rcp_7.setObjectName(\"load_reception_rcp_7\")\r\n self.calc_spatm_2 = QtWidgets.QPushButton(self.single_point_atm_calc_page_2)\r\n self.calc_spatm_2.setGeometry(QtCore.QRect(666, 830, 141, 28))\r\n font = QtGui.QFont()\r\n font.setPointSize(10)\r\n self.calc_spatm_2.setFont(font)\r\n self.calc_spatm_2.setObjectName(\"calc_spatm_2\")\r\n self.textEdit_3 = QtWidgets.QTextEdit(self.single_point_atm_calc_page_2)\r\n self.textEdit_3.setGeometry(QtCore.QRect(6, 650, 801, 171))\r\n font = QtGui.QFont()\r\n font.setPointSize(10)\r\n self.textEdit_3.setFont(font)\r\n self.textEdit_3.setObjectName(\"textEdit_3\")\r\n self.groupBox_15 = QtWidgets.QGroupBox(self.single_point_atm_calc_page_2)\r\n self.groupBox_15.setGeometry(QtCore.QRect(20, 150, 781, 131))\r\n font = QtGui.QFont()\r\n font.setPointSize(10)\r\n self.groupBox_15.setFont(font)\r\n self.groupBox_15.setObjectName(\"groupBox_15\")\r\n self.label_108 = QtWidgets.QLabel(self.groupBox_15)\r\n self.label_108.setGeometry(QtCore.QRect(20, 60, 131, 16))\r\n self.label_108.setObjectName(\"label_108\")\r\n self.label_109 = QtWidgets.QLabel(self.groupBox_15)\r\n self.label_109.setGeometry(QtCore.QRect(20, 30, 161, 21))\r\n self.label_109.setObjectName(\"label_109\")\r\n self.long_sat_spatm_2 = QtWidgets.QLineEdit(self.groupBox_15)\r\n self.long_sat_spatm_2.setGeometry(QtCore.QRect(200, 30, 101, 22))\r\n self.long_sat_spatm_2.setObjectName(\"long_sat_spatm_2\")\r\n self.freq_sat_spatm_2 = QtWidgets.QLineEdit(self.groupBox_15)\r\n self.freq_sat_spatm_2.setGeometry(QtCore.QRect(200, 60, 101, 22))\r\n self.freq_sat_spatm_2.setObjectName(\"freq_sat_spatm_2\")\r\n self.default_sat_sp_perf_4 = QtWidgets.QComboBox(self.groupBox_15)\r\n self.default_sat_sp_perf_4.setGeometry(QtCore.QRect(634, 44, 131, 22))\r\n font = QtGui.QFont()\r\n font.setPointSize(8)\r\n self.default_sat_sp_perf_4.setFont(font)\r\n self.default_sat_sp_perf_4.setObjectName(\"default_sat_sp_perf_4\")\r\n self.label_110 = QtWidgets.QLabel(self.groupBox_15)\r\n self.label_110.setGeometry(QtCore.QRect(636, 14, 131, 16))\r\n self.label_110.setObjectName(\"label_110\")\r\n self.pol_sat_4 = QtWidgets.QComboBox(self.groupBox_15)\r\n self.pol_sat_4.setGeometry(QtCore.QRect(450, 30, 101, 22))\r\n self.pol_sat_4.setObjectName(\"pol_sat_4\")\r\n self.pol_sat_4.addItem(\"\")\r\n self.pol_sat_4.addItem(\"\")\r\n self.pol_sat_4.addItem(\"\")\r\n self.label_111 = QtWidgets.QLabel(self.groupBox_15)\r\n self.label_111.setGeometry(QtCore.QRect(350, 30, 91, 20))\r\n self.label_111.setObjectName(\"label_111\")\r\n self.load_reception_rcp_8 = QtWidgets.QPushButton(self.groupBox_15)\r\n self.load_reception_rcp_8.setGeometry(QtCore.QRect(10, 90, 201, 28))\r\n font = QtGui.QFont()\r\n font.setPointSize(10)\r\n font.setBold(False)\r\n font.setWeight(50)\r\n self.load_reception_rcp_8.setFont(font)\r\n self.load_reception_rcp_8.setObjectName(\"load_reception_rcp_8\")\r\n self.p_year_spatm_2 = QtWidgets.QLineEdit(self.single_point_atm_calc_page_2)\r\n self.p_year_spatm_2.setGeometry(QtCore.QRect(550, 834, 101, 21))\r\n font = QtGui.QFont()\r\n font.setPointSize(10)\r\n self.p_year_spatm_2.setFont(font)\r\n self.p_year_spatm_2.setToolTip(\"\")\r\n self.p_year_spatm_2.setObjectName(\"p_year_spatm_2\")\r\n self.label_112 = QtWidgets.QLabel(self.single_point_atm_calc_page_2)\r\n self.label_112.setGeometry(QtCore.QRect(340, 834, 201, 20))\r\n font = QtGui.QFont()\r\n font.setPointSize(10)\r\n self.label_112.setFont(font)\r\n self.label_112.setObjectName(\"label_112\")\r\n self.label_113 = QtWidgets.QLabel(self.single_point_atm_calc_page_2)\r\n self.label_113.setGeometry(QtCore.QRect(10, 837, 71, 16))\r\n font = QtGui.QFont()\r\n font.setPointSize(10)\r\n self.label_113.setFont(font)\r\n self.label_113.setObjectName(\"label_113\")\r\n self.method_spatm_2 = QtWidgets.QComboBox(self.single_point_atm_calc_page_2)\r\n self.method_spatm_2.setGeometry(QtCore.QRect(84, 835, 101, 21))\r\n font = QtGui.QFont()\r\n font.setPointSize(10)\r\n self.method_spatm_2.setFont(font)\r\n self.method_spatm_2.setObjectName(\"method_spatm_2\")\r\n self.method_spatm_2.addItem(\"\")\r\n self.method_spatm_2.addItem(\"\")\r\n self.stackedWidget_2.addWidget(self.single_point_atm_calc_page_2)\r\n self.single_point_perf_calc_page_2 = QtWidgets.QWidget()\r\n self.single_point_perf_calc_page_2.setObjectName(\"single_point_perf_calc_page_2\")\r\n self.groupBox_16 = QtWidgets.QGroupBox(self.single_point_perf_calc_page_2)\r\n self.groupBox_16.setGeometry(QtCore.QRect(10, 10, 801, 121))\r\n font = QtGui.QFont()\r\n font.setPointSize(10)\r\n self.groupBox_16.setFont(font)\r\n self.groupBox_16.setAutoFillBackground(False)\r\n self.groupBox_16.setAlignment(QtCore.Qt.AlignJustify|QtCore.Qt.AlignVCenter)\r\n self.groupBox_16.setObjectName(\"groupBox_16\")\r\n self.label_114 = QtWidgets.QLabel(self.groupBox_16)\r\n self.label_114.setGeometry(QtCore.QRect(20, 36, 55, 16))\r\n self.label_114.setObjectName(\"label_114\")\r\n self.label_115 = QtWidgets.QLabel(self.groupBox_16)\r\n self.label_115.setGeometry(QtCore.QRect(20, 70, 151, 31))\r\n self.label_115.setObjectName(\"label_115\")\r\n self.label_116 = QtWidgets.QLabel(self.groupBox_16)\r\n self.label_116.setGeometry(QtCore.QRect(330, 78, 161, 21))\r\n self.label_116.setObjectName(\"label_116\")\r\n self.name_ground_station_sp_ant_size = QtWidgets.QLineEdit(self.groupBox_16)\r\n self.name_ground_station_sp_ant_size.setGeometry(QtCore.QRect(100, 36, 511, 22))\r\n self.name_ground_station_sp_ant_size.setObjectName(\"name_ground_station_sp_ant_size\")\r\n self.long_ground_station_sp_ant_size = QtWidgets.QLineEdit(self.groupBox_16)\r\n self.long_ground_station_sp_ant_size.setGeometry(QtCore.QRect(510, 80, 101, 22))\r\n self.long_ground_station_sp_ant_size.setObjectName(\"long_ground_station_sp_ant_size\")\r\n self.lat_ground_station_sp_ant_size = QtWidgets.QLineEdit(self.groupBox_16)\r\n self.lat_ground_station_sp_ant_size.setGeometry(QtCore.QRect(200, 76, 101, 22))\r\n self.lat_ground_station_sp_ant_size.setObjectName(\"lat_ground_station_sp_ant_size\")\r\n self.save_ground_station_sp_ant_size = QtWidgets.QPushButton(self.groupBox_16)\r\n self.save_ground_station_sp_ant_size.setGeometry(QtCore.QRect(652, 80, 111, 28))\r\n font = QtGui.QFont()\r\n font.setPointSize(10)\r\n font.setBold(False)\r\n font.setWeight(50)\r\n self.save_ground_station_sp_ant_size.setFont(font)\r\n self.save_ground_station_sp_ant_size.setObjectName(\"save_ground_station_sp_ant_size\")\r\n self.load_ground_station_sp_ant_size = QtWidgets.QPushButton(self.groupBox_16)\r\n self.load_ground_station_sp_ant_size.setGeometry(QtCore.QRect(652, 48, 111, 28))\r\n font = QtGui.QFont()\r\n font.setPointSize(10)\r\n font.setBold(False)\r\n font.setWeight(50)\r\n self.load_ground_station_sp_ant_size.setFont(font)\r\n self.load_ground_station_sp_ant_size.setObjectName(\"load_ground_station_sp_ant_size\")\r\n self.clear_ground_station_sp_ant_size = QtWidgets.QPushButton(self.groupBox_16)\r\n self.clear_ground_station_sp_ant_size.setGeometry(QtCore.QRect(652, 16, 111, 28))\r\n font = QtGui.QFont()\r\n font.setPointSize(10)\r\n font.setBold(False)\r\n font.setWeight(50)\r\n self.clear_ground_station_sp_ant_size.setFont(font)\r\n self.clear_ground_station_sp_ant_size.setObjectName(\"clear_ground_station_sp_ant_size\")\r\n self.groupBox_17 = QtWidgets.QGroupBox(self.single_point_perf_calc_page_2)\r\n self.groupBox_17.setGeometry(QtCore.QRect(10, 129, 801, 251))\r\n font = QtGui.QFont()\r\n font.setPointSize(10)\r\n self.groupBox_17.setFont(font)\r\n self.groupBox_17.setAutoFillBackground(False)\r\n self.groupBox_17.setAlignment(QtCore.Qt.AlignJustify|QtCore.Qt.AlignVCenter)\r\n self.groupBox_17.setObjectName(\"groupBox_17\")\r\n self.label_117 = QtWidgets.QLabel(self.groupBox_17)\r\n self.label_117.setGeometry(QtCore.QRect(20, 29, 55, 16))\r\n self.label_117.setObjectName(\"label_117\")\r\n self.label_118 = QtWidgets.QLabel(self.groupBox_17)\r\n self.label_118.setGeometry(QtCore.QRect(20, 71, 161, 21))\r\n self.label_118.setObjectName(\"label_118\")\r\n self.name_sat_sp_ant_size = QtWidgets.QLineEdit(self.groupBox_17)\r\n self.name_sat_sp_ant_size.setGeometry(QtCore.QRect(100, 29, 511, 22))\r\n self.name_sat_sp_ant_size.setObjectName(\"name_sat_sp_ant_size\")\r\n self.long_sat_sp_ant_size = QtWidgets.QLineEdit(self.groupBox_17)\r\n self.long_sat_sp_ant_size.setGeometry(QtCore.QRect(200, 71, 101, 22))\r\n self.long_sat_sp_ant_size.setObjectName(\"long_sat_sp_ant_size\")\r\n self.save_sat_sp_ant_size = QtWidgets.QPushButton(self.groupBox_17)\r\n self.save_sat_sp_ant_size.setGeometry(QtCore.QRect(656, 207, 111, 28))\r\n font = QtGui.QFont()\r\n font.setPointSize(10)\r\n font.setBold(False)\r\n font.setWeight(50)\r\n self.save_sat_sp_ant_size.setFont(font)\r\n self.save_sat_sp_ant_size.setObjectName(\"save_sat_sp_ant_size\")\r\n self.load_sat_sp_ant_size = QtWidgets.QPushButton(self.groupBox_17)\r\n self.load_sat_sp_ant_size.setGeometry(QtCore.QRect(656, 175, 111, 28))\r\n font = QtGui.QFont()\r\n font.setPointSize(10)\r\n font.setBold(False)\r\n font.setWeight(50)\r\n self.load_sat_sp_ant_size.setFont(font)\r\n self.load_sat_sp_ant_size.setObjectName(\"load_sat_sp_ant_size\")\r\n self.default_sat_sp_ant_size = QtWidgets.QComboBox(self.groupBox_17)\r\n self.default_sat_sp_ant_size.setGeometry(QtCore.QRect(650, 42, 131, 22))\r\n font = QtGui.QFont()\r\n font.setPointSize(8)\r\n self.default_sat_sp_ant_size.setFont(font)\r\n self.default_sat_sp_ant_size.setObjectName(\"default_sat_sp_ant_size\")\r\n self.label_119 = QtWidgets.QLabel(self.groupBox_17)\r\n self.label_119.setGeometry(QtCore.QRect(648, 16, 131, 16))\r\n self.label_119.setObjectName(\"label_119\")\r\n self.label_120 = QtWidgets.QLabel(self.groupBox_17)\r\n self.label_120.setGeometry(QtCore.QRect(330, 69, 111, 16))\r\n self.label_120.setObjectName(\"label_120\")\r\n self.height_sat_sp_ant_size = QtWidgets.QLineEdit(self.groupBox_17)\r\n self.height_sat_sp_ant_size.setGeometry(QtCore.QRect(510, 69, 101, 22))\r\n self.height_sat_sp_ant_size.setObjectName(\"height_sat_sp_ant_size\")\r\n self.label_121 = QtWidgets.QLabel(self.groupBox_17)\r\n self.label_121.setGeometry(QtCore.QRect(330, 110, 121, 16))\r\n self.label_121.setObjectName(\"label_121\")\r\n self.eirp_sat_sp_ant_size = QtWidgets.QLineEdit(self.groupBox_17)\r\n self.eirp_sat_sp_ant_size.setGeometry(QtCore.QRect(510, 106, 101, 22))\r\n self.eirp_sat_sp_ant_size.setObjectName(\"eirp_sat_sp_ant_size\")\r\n self.label_122 = QtWidgets.QLabel(self.groupBox_17)\r\n self.label_122.setGeometry(QtCore.QRect(20, 139, 171, 51))\r\n self.label_122.setLayoutDirection(QtCore.Qt.LeftToRight)\r\n self.label_122.setObjectName(\"label_122\")\r\n self.max_bw_sat_sp_ant_size = QtWidgets.QLineEdit(self.groupBox_17)\r\n self.max_bw_sat_sp_ant_size.setGeometry(QtCore.QRect(200, 149, 101, 22))\r\n self.max_bw_sat_sp_ant_size.setObjectName(\"max_bw_sat_sp_ant_size\")\r\n self.label_123 = QtWidgets.QLabel(self.groupBox_17)\r\n self.label_123.setGeometry(QtCore.QRect(330, 134, 171, 51))\r\n self.label_123.setLayoutDirection(QtCore.Qt.LeftToRight)\r\n self.label_123.setObjectName(\"label_123\")\r\n self.bw_util_sat_sp_ant_size = QtWidgets.QLineEdit(self.groupBox_17)\r\n self.bw_util_sat_sp_ant_size.setGeometry(QtCore.QRect(510, 147, 101, 22))\r\n self.bw_util_sat_sp_ant_size.setObjectName(\"bw_util_sat_sp_ant_size\")\r\n self.label_124 = QtWidgets.QLabel(self.groupBox_17)\r\n self.label_124.setGeometry(QtCore.QRect(20, 191, 61, 16))\r\n self.label_124.setObjectName(\"label_124\")\r\n self.rolloff_sat_sp_ant_size = QtWidgets.QLineEdit(self.groupBox_17)\r\n self.rolloff_sat_sp_ant_size.setGeometry(QtCore.QRect(200, 190, 101, 22))\r\n self.rolloff_sat_sp_ant_size.setObjectName(\"rolloff_sat_sp_ant_size\")\r\n self.label_125 = QtWidgets.QLabel(self.groupBox_17)\r\n self.label_125.setGeometry(QtCore.QRect(330, 192, 91, 16))\r\n self.label_125.setObjectName(\"label_125\")\r\n self.modcod_sat_sp_ant_size = QtWidgets.QComboBox(self.groupBox_17)\r\n self.modcod_sat_sp_ant_size.setGeometry(QtCore.QRect(450, 190, 161, 22))\r\n self.modcod_sat_sp_ant_size.setObjectName(\"modcod_sat_sp_ant_size\")\r\n self.label_126 = QtWidgets.QLabel(self.groupBox_17)\r\n self.label_126.setGeometry(QtCore.QRect(20, 111, 131, 20))\r\n self.label_126.setObjectName(\"label_126\")\r\n self.freq_sat_sp_ant_size = QtWidgets.QLineEdit(self.groupBox_17)\r\n self.freq_sat_sp_ant_size.setGeometry(QtCore.QRect(200, 111, 101, 22))\r\n self.freq_sat_sp_ant_size.setObjectName(\"freq_sat_sp_ant_size\")\r\n self.label_127 = QtWidgets.QLabel(self.groupBox_17)\r\n self.label_127.setGeometry(QtCore.QRect(665, 73, 91, 20))\r\n self.label_127.setObjectName(\"label_127\")\r\n self.pol_sat_sp_ant_size = QtWidgets.QComboBox(self.groupBox_17)\r\n self.pol_sat_sp_ant_size.setGeometry(QtCore.QRect(660, 97, 101, 22))\r\n self.pol_sat_sp_ant_size.setObjectName(\"pol_sat_sp_ant_size\")\r\n self.pol_sat_sp_ant_size.addItem(\"\")\r\n self.pol_sat_sp_ant_size.addItem(\"\")\r\n self.pol_sat_sp_ant_size.addItem(\"\")\r\n self.clear_sat_sp_ant_size = QtWidgets.QPushButton(self.groupBox_17)\r\n self.clear_sat_sp_ant_size.setGeometry(QtCore.QRect(656, 145, 111, 28))\r\n font = QtGui.QFont()\r\n font.setPointSize(10)\r\n font.setBold(False)\r\n font.setWeight(50)\r\n self.clear_sat_sp_ant_size.setFont(font)\r\n self.clear_sat_sp_ant_size.setObjectName(\"clear_sat_sp_ant_size\")\r\n self.groupBox_18 = QtWidgets.QGroupBox(self.single_point_perf_calc_page_2)\r\n self.groupBox_18.setGeometry(QtCore.QRect(10, 378, 801, 181))\r\n font = QtGui.QFont()\r\n font.setPointSize(10)\r\n self.groupBox_18.setFont(font)\r\n self.groupBox_18.setAutoFillBackground(False)\r\n self.groupBox_18.setAlignment(QtCore.Qt.AlignJustify|QtCore.Qt.AlignVCenter)\r\n self.groupBox_18.setObjectName(\"groupBox_18\")\r\n self.label_128 = QtWidgets.QLabel(self.groupBox_18)\r\n self.label_128.setGeometry(QtCore.QRect(20, 27, 55, 16))\r\n self.label_128.setObjectName(\"label_128\")\r\n self.label_130 = QtWidgets.QLabel(self.groupBox_18)\r\n self.label_130.setGeometry(QtCore.QRect(20, 60, 161, 21))\r\n self.label_130.setObjectName(\"label_130\")\r\n self.name_reception_sp_ant_size = QtWidgets.QLineEdit(self.groupBox_18)\r\n self.name_reception_sp_ant_size.setGeometry(QtCore.QRect(100, 27, 511, 22))\r\n self.name_reception_sp_ant_size.setObjectName(\"name_reception_sp_ant_size\")\r\n self.lnb_gain_reception_sp_ant_size = QtWidgets.QLineEdit(self.groupBox_18)\r\n self.lnb_gain_reception_sp_ant_size.setGeometry(QtCore.QRect(200, 60, 101, 22))\r\n self.lnb_gain_reception_sp_ant_size.setObjectName(\"lnb_gain_reception_sp_ant_size\")\r\n self.save_reception_sp_ant_size = QtWidgets.QPushButton(self.groupBox_18)\r\n self.save_reception_sp_ant_size.setGeometry(QtCore.QRect(654, 133, 111, 28))\r\n font = QtGui.QFont()\r\n font.setPointSize(10)\r\n font.setBold(False)\r\n font.setWeight(50)\r\n self.save_reception_sp_ant_size.setFont(font)\r\n self.save_reception_sp_ant_size.setObjectName(\"save_reception_sp_ant_size\")\r\n self.load_reception_sp_ant_size = QtWidgets.QPushButton(self.groupBox_18)\r\n self.load_reception_sp_ant_size.setGeometry(QtCore.QRect(654, 99, 111, 28))\r\n font = QtGui.QFont()\r\n font.setPointSize(10)\r\n font.setBold(False)\r\n font.setWeight(50)\r\n self.load_reception_sp_ant_size.setFont(font)\r\n self.load_reception_sp_ant_size.setObjectName(\"load_reception_sp_ant_size\")\r\n self.label_131 = QtWidgets.QLabel(self.groupBox_18)\r\n self.label_131.setGeometry(QtCore.QRect(330, 137, 141, 16))\r\n self.label_131.setObjectName(\"label_131\")\r\n self.ant_eff_reception_sp_ant_size = QtWidgets.QLineEdit(self.groupBox_18)\r\n self.ant_eff_reception_sp_ant_size.setGeometry(QtCore.QRect(510, 137, 101, 22))\r\n self.ant_eff_reception_sp_ant_size.setObjectName(\"ant_eff_reception_sp_ant_size\")\r\n self.label_132 = QtWidgets.QLabel(self.groupBox_18)\r\n self.label_132.setGeometry(QtCore.QRect(330, 65, 161, 16))\r\n self.label_132.setObjectName(\"label_132\")\r\n self.lnb_temp_reception_sp_ant_size = QtWidgets.QLineEdit(self.groupBox_18)\r\n self.lnb_temp_reception_sp_ant_size.setGeometry(QtCore.QRect(510, 62, 101, 22))\r\n self.lnb_temp_reception_sp_ant_size.setObjectName(\"lnb_temp_reception_sp_ant_size\")\r\n self.label_133 = QtWidgets.QLabel(self.groupBox_18)\r\n self.label_133.setGeometry(QtCore.QRect(17, 83, 171, 51))\r\n self.label_133.setLayoutDirection(QtCore.Qt.LeftToRight)\r\n self.label_133.setObjectName(\"label_133\")\r\n self.aditional_losses_reception_sp_ant_size = QtWidgets.QLineEdit(self.groupBox_18)\r\n self.aditional_losses_reception_sp_ant_size.setGeometry(QtCore.QRect(200, 97, 101, 22))\r\n self.aditional_losses_reception_sp_ant_size.setObjectName(\"aditional_losses_reception_sp_ant_size\")\r\n self.label_134 = QtWidgets.QLabel(self.groupBox_18)\r\n self.label_134.setGeometry(QtCore.QRect(19, 119, 171, 51))\r\n self.label_134.setLayoutDirection(QtCore.Qt.LeftToRight)\r\n self.label_134.setObjectName(\"label_134\")\r\n self.max_depoint_reception_sp_ant_size = QtWidgets.QLineEdit(self.groupBox_18)\r\n self.max_depoint_reception_sp_ant_size.setGeometry(QtCore.QRect(200, 139, 101, 22))\r\n self.max_depoint_reception_sp_ant_size.setObjectName(\"max_depoint_reception_sp_ant_size\")\r\n self.clear_reception_sp_ant_size = QtWidgets.QPushButton(self.groupBox_18)\r\n self.clear_reception_sp_ant_size.setGeometry(QtCore.QRect(654, 67, 111, 28))\r\n font = QtGui.QFont()\r\n font.setPointSize(10)\r\n font.setBold(False)\r\n font.setWeight(50)\r\n self.clear_reception_sp_ant_size.setFont(font)\r\n self.clear_reception_sp_ant_size.setObjectName(\"clear_reception_sp_ant_size\")\r\n self.cable_loss_reception_sp_ant_size = QtWidgets.QLineEdit(self.groupBox_18)\r\n self.cable_loss_reception_sp_ant_size.setGeometry(QtCore.QRect(510, 98, 101, 22))\r\n self.cable_loss_reception_sp_ant_size.setObjectName(\"cable_loss_reception_sp_ant_size\")\r\n self.label_135 = QtWidgets.QLabel(self.groupBox_18)\r\n self.label_135.setGeometry(QtCore.QRect(330, 84, 171, 51))\r\n self.label_135.setLayoutDirection(QtCore.Qt.LeftToRight)\r\n self.label_135.setObjectName(\"label_135\")\r\n self.calc_sp_ant_size = QtWidgets.QPushButton(self.single_point_perf_calc_page_2)\r\n self.calc_sp_ant_size.setGeometry(QtCore.QRect(670, 830, 141, 28))\r\n font = QtGui.QFont()\r\n font.setPointSize(10)\r\n self.calc_sp_ant_size.setFont(font)\r\n self.calc_sp_ant_size.setObjectName(\"calc_sp_ant_size\")\r\n self.margin_sp_ant_size = QtWidgets.QLineEdit(self.single_point_perf_calc_page_2)\r\n self.margin_sp_ant_size.setGeometry(QtCore.QRect(123, 834, 61, 22))\r\n font = QtGui.QFont()\r\n font.setPointSize(10)\r\n self.margin_sp_ant_size.setFont(font)\r\n self.margin_sp_ant_size.setToolTip(\"\")\r\n self.margin_sp_ant_size.setObjectName(\"margin_sp_ant_size\")\r\n self.label_174 = QtWidgets.QLabel(self.single_point_perf_calc_page_2)\r\n self.label_174.setGeometry(QtCore.QRect(23, 834, 91, 21))\r\n font = QtGui.QFont()\r\n font.setPointSize(10)\r\n self.label_174.setFont(font)\r\n self.label_174.setObjectName(\"label_174\")\r\n self.groupBox_41 = QtWidgets.QGroupBox(self.single_point_perf_calc_page_2)\r\n self.groupBox_41.setGeometry(QtCore.QRect(10, 556, 801, 271))\r\n self.groupBox_41.setObjectName(\"groupBox_41\")\r\n self.ant_max_size_sp_ant_size = QtWidgets.QLineEdit(self.groupBox_41)\r\n self.ant_max_size_sp_ant_size.setGeometry(QtCore.QRect(436, 23, 101, 22))\r\n self.ant_max_size_sp_ant_size.setObjectName(\"ant_max_size_sp_ant_size\")\r\n self.label_323 = QtWidgets.QLabel(self.groupBox_41)\r\n self.label_323.setGeometry(QtCore.QRect(9, 8, 171, 51))\r\n self.label_323.setLayoutDirection(QtCore.Qt.LeftToRight)\r\n self.label_323.setObjectName(\"label_323\")\r\n self.ant_min_size_sp_ant_size = QtWidgets.QLineEdit(self.groupBox_41)\r\n self.ant_min_size_sp_ant_size.setGeometry(QtCore.QRect(153, 23, 101, 22))\r\n self.ant_min_size_sp_ant_size.setObjectName(\"ant_min_size_sp_ant_size\")\r\n self.label_313 = QtWidgets.QLabel(self.groupBox_41)\r\n self.label_313.setGeometry(QtCore.QRect(287, 9, 171, 51))\r\n self.label_313.setLayoutDirection(QtCore.Qt.LeftToRight)\r\n self.label_313.setObjectName(\"label_313\")\r\n self.output_sp_ant_size = QtWidgets.QTextEdit(self.groupBox_41)\r\n self.output_sp_ant_size.setGeometry(QtCore.QRect(10, 56, 781, 191))\r\n self.output_sp_ant_size.setObjectName(\"output_sp_ant_size\")\r\n self.label_136 = QtWidgets.QLabel(self.groupBox_41)\r\n self.label_136.setGeometry(QtCore.QRect(10, 250, 331, 20))\r\n self.label_136.setObjectName(\"label_136\")\r\n self.export_result_sp_ant_size = QtWidgets.QPushButton(self.single_point_perf_calc_page_2)\r\n self.export_result_sp_ant_size.setGeometry(QtCore.QRect(520, 830, 141, 28))\r\n font = QtGui.QFont()\r\n font.setPointSize(10)\r\n self.export_result_sp_ant_size.setFont(font)\r\n self.export_result_sp_ant_size.setObjectName(\"export_result_sp_ant_size\")\r\n self.stackedWidget_2.addWidget(self.single_point_perf_calc_page_2)\r\n self.list_perf_calc_page_2 = QtWidgets.QWidget()\r\n self.list_perf_calc_page_2.setObjectName(\"list_perf_calc_page_2\")\r\n self.browse_path_mp_perf_2 = QtWidgets.QPushButton(self.list_perf_calc_page_2)\r\n self.browse_path_mp_perf_2.setGeometry(QtCore.QRect(700, 11, 93, 21))\r\n self.browse_path_mp_perf_2.setObjectName(\"browse_path_mp_perf_2\")\r\n self.label_137 = QtWidgets.QLabel(self.list_perf_calc_page_2)\r\n self.label_137.setGeometry(QtCore.QRect(6, 13, 55, 16))\r\n font = QtGui.QFont()\r\n font.setPointSize(10)\r\n self.label_137.setFont(font)\r\n self.label_137.setObjectName(\"label_137\")\r\n self.preview_mp_perf_2 = QtWidgets.QTableWidget(self.list_perf_calc_page_2)\r\n self.preview_mp_perf_2.setGeometry(QtCore.QRect(10, 61, 801, 131))\r\n self.preview_mp_perf_2.setObjectName(\"preview_mp_perf_2\")\r\n self.preview_mp_perf_2.setColumnCount(0)\r\n self.preview_mp_perf_2.setRowCount(0)\r\n self.label_138 = QtWidgets.QLabel(self.list_perf_calc_page_2)\r\n self.label_138.setGeometry(QtCore.QRect(10, 41, 91, 16))\r\n self.label_138.setObjectName(\"label_138\")\r\n self.groupBox_19 = QtWidgets.QGroupBox(self.list_perf_calc_page_2)\r\n self.groupBox_19.setGeometry(QtCore.QRect(8, 199, 801, 261))\r\n font = QtGui.QFont()\r\n font.setPointSize(10)\r\n self.groupBox_19.setFont(font)\r\n self.groupBox_19.setAutoFillBackground(False)\r\n self.groupBox_19.setAlignment(QtCore.Qt.AlignJustify|QtCore.Qt.AlignVCenter)\r\n self.groupBox_19.setObjectName(\"groupBox_19\")\r\n self.label_139 = QtWidgets.QLabel(self.groupBox_19)\r\n self.label_139.setGeometry(QtCore.QRect(20, 27, 55, 16))\r\n self.label_139.setObjectName(\"label_139\")\r\n self.label_140 = QtWidgets.QLabel(self.groupBox_19)\r\n self.label_140.setGeometry(QtCore.QRect(20, 69, 161, 21))\r\n self.label_140.setObjectName(\"label_140\")\r\n self.name_sat_mp_perf_2 = QtWidgets.QLineEdit(self.groupBox_19)\r\n self.name_sat_mp_perf_2.setGeometry(QtCore.QRect(100, 27, 511, 22))\r\n self.name_sat_mp_perf_2.setObjectName(\"name_sat_mp_perf_2\")\r\n self.long_sat_mp_perf_2 = QtWidgets.QLineEdit(self.groupBox_19)\r\n self.long_sat_mp_perf_2.setGeometry(QtCore.QRect(200, 69, 101, 22))\r\n self.long_sat_mp_perf_2.setObjectName(\"long_sat_mp_perf_2\")\r\n self.save_sat_mp_perf_2 = QtWidgets.QPushButton(self.groupBox_19)\r\n self.save_sat_mp_perf_2.setGeometry(QtCore.QRect(658, 222, 111, 28))\r\n font = QtGui.QFont()\r\n font.setPointSize(10)\r\n font.setBold(False)\r\n font.setWeight(50)\r\n self.save_sat_mp_perf_2.setFont(font)\r\n self.save_sat_mp_perf_2.setObjectName(\"save_sat_mp_perf_2\")\r\n self.load_sat_mp_perf_2 = QtWidgets.QPushButton(self.groupBox_19)\r\n self.load_sat_mp_perf_2.setGeometry(QtCore.QRect(658, 190, 111, 28))\r\n font = QtGui.QFont()\r\n font.setPointSize(10)\r\n font.setBold(False)\r\n font.setWeight(50)\r\n self.load_sat_mp_perf_2.setFont(font)\r\n self.load_sat_mp_perf_2.setObjectName(\"load_sat_mp_perf_2\")\r\n self.default_sat_mp_perf_2 = QtWidgets.QComboBox(self.groupBox_19)\r\n self.default_sat_mp_perf_2.setGeometry(QtCore.QRect(650, 58, 131, 22))\r\n font = QtGui.QFont()\r\n font.setPointSize(8)\r\n self.default_sat_mp_perf_2.setFont(font)\r\n self.default_sat_mp_perf_2.setObjectName(\"default_sat_mp_perf_2\")\r\n self.label_141 = QtWidgets.QLabel(self.groupBox_19)\r\n self.label_141.setGeometry(QtCore.QRect(651, 29, 131, 16))\r\n self.label_141.setObjectName(\"label_141\")\r\n self.label_142 = QtWidgets.QLabel(self.groupBox_19)\r\n self.label_142.setGeometry(QtCore.QRect(330, 70, 111, 16))\r\n self.label_142.setObjectName(\"label_142\")\r\n self.height_sat_mp_perf_2 = QtWidgets.QLineEdit(self.groupBox_19)\r\n self.height_sat_mp_perf_2.setGeometry(QtCore.QRect(510, 67, 101, 22))\r\n self.height_sat_mp_perf_2.setObjectName(\"height_sat_mp_perf_2\")\r\n self.label_143 = QtWidgets.QLabel(self.groupBox_19)\r\n self.label_143.setGeometry(QtCore.QRect(330, 108, 121, 16))\r\n self.label_143.setObjectName(\"label_143\")\r\n self.eirp_sat_mp_perf_2 = QtWidgets.QLineEdit(self.groupBox_19)\r\n self.eirp_sat_mp_perf_2.setGeometry(QtCore.QRect(510, 104, 101, 22))\r\n self.eirp_sat_mp_perf_2.setObjectName(\"eirp_sat_mp_perf_2\")\r\n self.label_144 = QtWidgets.QLabel(self.groupBox_19)\r\n self.label_144.setGeometry(QtCore.QRect(20, 137, 171, 51))\r\n self.label_144.setLayoutDirection(QtCore.Qt.LeftToRight)\r\n self.label_144.setObjectName(\"label_144\")\r\n self.max_bw_sat_mp_perf_2 = QtWidgets.QLineEdit(self.groupBox_19)\r\n self.max_bw_sat_mp_perf_2.setGeometry(QtCore.QRect(200, 147, 101, 22))\r\n self.max_bw_sat_mp_perf_2.setObjectName(\"max_bw_sat_mp_perf_2\")\r\n self.label_145 = QtWidgets.QLabel(self.groupBox_19)\r\n self.label_145.setGeometry(QtCore.QRect(330, 132, 171, 51))\r\n self.label_145.setLayoutDirection(QtCore.Qt.LeftToRight)\r\n self.label_145.setObjectName(\"label_145\")\r\n self.bw_util_sat_mp_perf_2 = QtWidgets.QLineEdit(self.groupBox_19)\r\n self.bw_util_sat_mp_perf_2.setGeometry(QtCore.QRect(510, 145, 101, 22))\r\n self.bw_util_sat_mp_perf_2.setObjectName(\"bw_util_sat_mp_perf_2\")\r\n self.label_146 = QtWidgets.QLabel(self.groupBox_19)\r\n self.label_146.setGeometry(QtCore.QRect(20, 191, 61, 16))\r\n self.label_146.setObjectName(\"label_146\")\r\n self.rolloff_sat_mp_perf_2 = QtWidgets.QLineEdit(self.groupBox_19)\r\n self.rolloff_sat_mp_perf_2.setGeometry(QtCore.QRect(200, 191, 101, 22))\r\n self.rolloff_sat_mp_perf_2.setObjectName(\"rolloff_sat_mp_perf_2\")\r\n self.label_147 = QtWidgets.QLabel(self.groupBox_19)\r\n self.label_147.setGeometry(QtCore.QRect(330, 191, 91, 16))\r\n self.label_147.setObjectName(\"label_147\")\r\n self.modcod_sat_mp_perf_2 = QtWidgets.QComboBox(self.groupBox_19)\r\n self.modcod_sat_mp_perf_2.setGeometry(QtCore.QRect(450, 191, 161, 22))\r\n self.modcod_sat_mp_perf_2.setObjectName(\"modcod_sat_mp_perf_2\")\r\n self.modcod_sat_mp_perf_2.addItem(\"\")\r\n self.modcod_sat_mp_perf_2.addItem(\"\")\r\n self.modcod_sat_mp_perf_2.addItem(\"\")\r\n self.label_148 = QtWidgets.QLabel(self.groupBox_19)\r\n self.label_148.setGeometry(QtCore.QRect(20, 109, 131, 20))\r\n self.label_148.setObjectName(\"label_148\")\r\n self.freq_sat_mp_perf_2 = QtWidgets.QLineEdit(self.groupBox_19)\r\n self.freq_sat_mp_perf_2.setGeometry(QtCore.QRect(200, 109, 101, 22))\r\n self.freq_sat_mp_perf_2.setObjectName(\"freq_sat_mp_perf_2\")\r\n self.label_149 = QtWidgets.QLabel(self.groupBox_19)\r\n self.label_149.setGeometry(QtCore.QRect(666, 89, 91, 20))\r\n self.label_149.setObjectName(\"label_149\")\r\n self.pol_sat_mp_perf_2 = QtWidgets.QComboBox(self.groupBox_19)\r\n self.pol_sat_mp_perf_2.setGeometry(QtCore.QRect(663, 119, 101, 22))\r\n self.pol_sat_mp_perf_2.setObjectName(\"pol_sat_mp_perf_2\")\r\n self.pol_sat_mp_perf_2.addItem(\"\")\r\n self.pol_sat_mp_perf_2.addItem(\"\")\r\n self.pol_sat_mp_perf_2.addItem(\"\")\r\n self.clear_satellite_mp_perf_2 = QtWidgets.QPushButton(self.groupBox_19)\r\n self.clear_satellite_mp_perf_2.setGeometry(QtCore.QRect(658, 156, 111, 28))\r\n font = QtGui.QFont()\r\n font.setPointSize(10)\r\n font.setBold(False)\r\n font.setWeight(50)\r\n self.clear_satellite_mp_perf_2.setFont(font)\r\n self.clear_satellite_mp_perf_2.setObjectName(\"clear_satellite_mp_perf_2\")\r\n self.groupBox_20 = QtWidgets.QGroupBox(self.list_perf_calc_page_2)\r\n self.groupBox_20.setGeometry(QtCore.QRect(10, 458, 801, 221))\r\n font = QtGui.QFont()\r\n font.setPointSize(10)\r\n self.groupBox_20.setFont(font)\r\n self.groupBox_20.setAutoFillBackground(False)\r\n self.groupBox_20.setAlignment(QtCore.Qt.AlignJustify|QtCore.Qt.AlignVCenter)\r\n self.groupBox_20.setObjectName(\"groupBox_20\")\r\n self.label_150 = QtWidgets.QLabel(self.groupBox_20)\r\n self.label_150.setGeometry(QtCore.QRect(20, 38, 55, 16))\r\n self.label_150.setObjectName(\"label_150\")\r\n self.label_151 = QtWidgets.QLabel(self.groupBox_20)\r\n self.label_151.setGeometry(QtCore.QRect(20, 68, 151, 31))\r\n self.label_151.setObjectName(\"label_151\")\r\n self.label_152 = QtWidgets.QLabel(self.groupBox_20)\r\n self.label_152.setGeometry(QtCore.QRect(20, 113, 161, 21))\r\n self.label_152.setObjectName(\"label_152\")\r\n self.name_reception_mp_perf_2 = QtWidgets.QLineEdit(self.groupBox_20)\r\n self.name_reception_mp_perf_2.setGeometry(QtCore.QRect(100, 38, 511, 22))\r\n self.name_reception_mp_perf_2.setObjectName(\"name_reception_mp_perf_2\")\r\n self.lnb_gain_reception_mp_perf_2 = QtWidgets.QLineEdit(self.groupBox_20)\r\n self.lnb_gain_reception_mp_perf_2.setGeometry(QtCore.QRect(200, 113, 101, 22))\r\n self.lnb_gain_reception_mp_perf_2.setObjectName(\"lnb_gain_reception_mp_perf_2\")\r\n self.ant_size_reception_mp_perf_2 = QtWidgets.QLineEdit(self.groupBox_20)\r\n self.ant_size_reception_mp_perf_2.setGeometry(QtCore.QRect(200, 74, 101, 22))\r\n self.ant_size_reception_mp_perf_2.setObjectName(\"ant_size_reception_mp_perf_2\")\r\n self.save_reception_mp_perf_2 = QtWidgets.QPushButton(self.groupBox_20)\r\n self.save_reception_mp_perf_2.setGeometry(QtCore.QRect(656, 186, 111, 28))\r\n font = QtGui.QFont()\r\n font.setPointSize(10)\r\n font.setBold(False)\r\n font.setWeight(50)\r\n self.save_reception_mp_perf_2.setFont(font)\r\n self.save_reception_mp_perf_2.setObjectName(\"save_reception_mp_perf_2\")\r\n self.load_reception_mp_perf_2 = QtWidgets.QPushButton(self.groupBox_20)\r\n self.load_reception_mp_perf_2.setGeometry(QtCore.QRect(656, 151, 111, 28))\r\n font = QtGui.QFont()\r\n font.setPointSize(10)\r\n font.setBold(False)\r\n font.setWeight(50)\r\n self.load_reception_mp_perf_2.setFont(font)\r\n self.load_reception_mp_perf_2.setObjectName(\"load_reception_mp_perf_2\")\r\n self.label_153 = QtWidgets.QLabel(self.groupBox_20)\r\n self.label_153.setGeometry(QtCore.QRect(330, 78, 141, 16))\r\n self.label_153.setObjectName(\"label_153\")\r\n self.ant_eff_reception_mp_perf_2 = QtWidgets.QLineEdit(self.groupBox_20)\r\n self.ant_eff_reception_mp_perf_2.setGeometry(QtCore.QRect(510, 78, 101, 22))\r\n self.ant_eff_reception_mp_perf_2.setObjectName(\"ant_eff_reception_mp_perf_2\")\r\n self.label_154 = QtWidgets.QLabel(self.groupBox_20)\r\n self.label_154.setGeometry(QtCore.QRect(330, 118, 161, 16))\r\n self.label_154.setScaledContents(False)\r\n self.label_154.setObjectName(\"label_154\")\r\n self.lnb_temp_reception_mp_perf_2 = QtWidgets.QLineEdit(self.groupBox_20)\r\n self.lnb_temp_reception_mp_perf_2.setGeometry(QtCore.QRect(510, 115, 101, 22))\r\n self.lnb_temp_reception_mp_perf_2.setObjectName(\"lnb_temp_reception_mp_perf_2\")\r\n self.label_155 = QtWidgets.QLabel(self.groupBox_20)\r\n self.label_155.setGeometry(QtCore.QRect(20, 132, 171, 51))\r\n self.label_155.setLayoutDirection(QtCore.Qt.LeftToRight)\r\n self.label_155.setObjectName(\"label_155\")\r\n self.aditional_losses_reception_mp_perf_2 = QtWidgets.QLineEdit(self.groupBox_20)\r\n self.aditional_losses_reception_mp_perf_2.setGeometry(QtCore.QRect(200, 146, 101, 22))\r\n self.aditional_losses_reception_mp_perf_2.setObjectName(\"aditional_losses_reception_mp_perf_2\")\r\n self.label_156 = QtWidgets.QLabel(self.groupBox_20)\r\n self.label_156.setGeometry(QtCore.QRect(20, 170, 171, 51))\r\n self.label_156.setLayoutDirection(QtCore.Qt.LeftToRight)\r\n self.label_156.setObjectName(\"label_156\")\r\n self.max_depoint_reception_mp_perf_2 = QtWidgets.QLineEdit(self.groupBox_20)\r\n self.max_depoint_reception_mp_perf_2.setGeometry(QtCore.QRect(200, 190, 101, 22))\r\n self.max_depoint_reception_mp_perf_2.setObjectName(\"max_depoint_reception_mp_perf_2\")\r\n self.clear_reception_mp_perf_2 = QtWidgets.QPushButton(self.groupBox_20)\r\n self.clear_reception_mp_perf_2.setGeometry(QtCore.QRect(656, 115, 111, 28))\r\n font = QtGui.QFont()\r\n font.setPointSize(10)\r\n font.setBold(False)\r\n font.setWeight(50)\r\n self.clear_reception_mp_perf_2.setFont(font)\r\n self.clear_reception_mp_perf_2.setObjectName(\"clear_reception_mp_perf_2\")\r\n self.cable_loss_reception_mp_perf_2 = QtWidgets.QLineEdit(self.groupBox_20)\r\n self.cable_loss_reception_mp_perf_2.setGeometry(QtCore.QRect(510, 150, 101, 22))\r\n self.cable_loss_reception_mp_perf_2.setObjectName(\"cable_loss_reception_mp_perf_2\")\r\n self.label_157 = QtWidgets.QLabel(self.groupBox_20)\r\n self.label_157.setGeometry(QtCore.QRect(330, 133, 171, 51))\r\n self.label_157.setLayoutDirection(QtCore.Qt.LeftToRight)\r\n self.label_157.setObjectName(\"label_157\")\r\n self.output_sp_perf_4 = QtWidgets.QTextEdit(self.list_perf_calc_page_2)\r\n self.output_sp_perf_4.setGeometry(QtCore.QRect(10, 684, 801, 141))\r\n self.output_sp_perf_4.setObjectName(\"output_sp_perf_4\")\r\n self.relaxation_mp_perf_2 = QtWidgets.QLineEdit(self.list_perf_calc_page_2)\r\n self.relaxation_mp_perf_2.setGeometry(QtCore.QRect(170, 832, 51, 22))\r\n font = QtGui.QFont()\r\n font.setPointSize(10)\r\n self.relaxation_mp_perf_2.setFont(font)\r\n self.relaxation_mp_perf_2.setToolTip(\"\")\r\n self.relaxation_mp_perf_2.setObjectName(\"relaxation_mp_perf_2\")\r\n self.calc_mp_perf_2 = QtWidgets.QPushButton(self.list_perf_calc_page_2)\r\n self.calc_mp_perf_2.setGeometry(QtCore.QRect(670, 830, 141, 28))\r\n font = QtGui.QFont()\r\n font.setPointSize(10)\r\n self.calc_mp_perf_2.setFont(font)\r\n self.calc_mp_perf_2.setObjectName(\"calc_mp_perf_2\")\r\n self.label_158 = QtWidgets.QLabel(self.list_perf_calc_page_2)\r\n self.label_158.setGeometry(QtCore.QRect(10, 833, 151, 21))\r\n font = QtGui.QFont()\r\n font.setPointSize(10)\r\n self.label_158.setFont(font)\r\n self.label_158.setObjectName(\"label_158\")\r\n self.label_159 = QtWidgets.QLabel(self.list_perf_calc_page_2)\r\n self.label_159.setGeometry(QtCore.QRect(420, 831, 61, 20))\r\n font = QtGui.QFont()\r\n font.setPointSize(10)\r\n self.label_159.setFont(font)\r\n self.label_159.setObjectName(\"label_159\")\r\n self.n_threads_2 = QtWidgets.QComboBox(self.list_perf_calc_page_2)\r\n self.n_threads_2.setGeometry(QtCore.QRect(490, 831, 73, 22))\r\n self.n_threads_2.setObjectName(\"n_threads_2\")\r\n self.path_mp_perf_2 = QtWidgets.QLineEdit(self.list_perf_calc_page_2)\r\n self.path_mp_perf_2.setGeometry(QtCore.QRect(55, 10, 631, 22))\r\n self.path_mp_perf_2.setObjectName(\"path_mp_perf_2\")\r\n self.label_160 = QtWidgets.QLabel(self.list_perf_calc_page_2)\r\n self.label_160.setGeometry(QtCore.QRect(240, 831, 91, 21))\r\n font = QtGui.QFont()\r\n font.setPointSize(10)\r\n self.label_160.setFont(font)\r\n self.label_160.setObjectName(\"label_160\")\r\n self.margin_mp_perf_2 = QtWidgets.QLineEdit(self.list_perf_calc_page_2)\r\n self.margin_mp_perf_2.setGeometry(QtCore.QRect(340, 831, 61, 22))\r\n font = QtGui.QFont()\r\n font.setPointSize(10)\r\n self.margin_mp_perf_2.setFont(font)\r\n self.margin_mp_perf_2.setToolTip(\"\")\r\n self.margin_mp_perf_2.setObjectName(\"margin_mp_perf_2\")\r\n self.stackedWidget_2.addWidget(self.list_perf_calc_page_2)\r\n self.stackedWidget.addWidget(self.single_point_ant_size_calc_page)\r\n self.list_ant_size_calc = QtWidgets.QWidget()\r\n self.list_ant_size_calc.setObjectName(\"list_ant_size_calc\")\r\n self.stackedWidget_3 = QtWidgets.QStackedWidget(self.list_ant_size_calc)\r\n self.stackedWidget_3.setGeometry(QtCore.QRect(0, 0, 811, 861))\r\n self.stackedWidget_3.setObjectName(\"stackedWidget_3\")\r\n self.home_page_2 = QtWidgets.QWidget()\r\n self.home_page_2.setObjectName(\"home_page_2\")\r\n self.label_129 = QtWidgets.QLabel(self.home_page_2)\r\n self.label_129.setGeometry(QtCore.QRect(217, 270, 431, 391))\r\n self.label_129.setText(\"\")\r\n self.label_129.setPixmap(QtGui.QPixmap(\"UI\\\\home_logo.png\"))\r\n self.label_129.setObjectName(\"label_129\")\r\n self.stackedWidget_3.addWidget(self.home_page_2)\r\n self.satellite_page_3 = QtWidgets.QWidget()\r\n self.satellite_page_3.setObjectName(\"satellite_page_3\")\r\n self.groupBox_21 = QtWidgets.QGroupBox(self.satellite_page_3)\r\n self.groupBox_21.setGeometry(QtCore.QRect(10, 10, 801, 851))\r\n font = QtGui.QFont()\r\n font.setPointSize(10)\r\n self.groupBox_21.setFont(font)\r\n self.groupBox_21.setAutoFillBackground(False)\r\n self.groupBox_21.setAlignment(QtCore.Qt.AlignJustify|QtCore.Qt.AlignVCenter)\r\n self.groupBox_21.setObjectName(\"groupBox_21\")\r\n self.label_161 = QtWidgets.QLabel(self.groupBox_21)\r\n self.label_161.setGeometry(QtCore.QRect(20, 38, 55, 16))\r\n self.label_161.setObjectName(\"label_161\")\r\n self.label_162 = QtWidgets.QLabel(self.groupBox_21)\r\n self.label_162.setGeometry(QtCore.QRect(20, 80, 161, 21))\r\n self.label_162.setObjectName(\"label_162\")\r\n self.name_sat_3 = QtWidgets.QLineEdit(self.groupBox_21)\r\n self.name_sat_3.setGeometry(QtCore.QRect(100, 38, 511, 22))\r\n self.name_sat_3.setObjectName(\"name_sat_3\")\r\n self.long_sat_3 = QtWidgets.QLineEdit(self.groupBox_21)\r\n self.long_sat_3.setGeometry(QtCore.QRect(200, 80, 101, 22))\r\n self.long_sat_3.setObjectName(\"long_sat_3\")\r\n self.save_sat_3 = QtWidgets.QPushButton(self.groupBox_21)\r\n self.save_sat_3.setGeometry(QtCore.QRect(680, 810, 111, 28))\r\n font = QtGui.QFont()\r\n font.setPointSize(10)\r\n font.setBold(False)\r\n font.setWeight(50)\r\n self.save_sat_3.setFont(font)\r\n self.save_sat_3.setObjectName(\"save_sat_3\")\r\n self.load_sat_3 = QtWidgets.QPushButton(self.groupBox_21)\r\n self.load_sat_3.setGeometry(QtCore.QRect(550, 810, 111, 28))\r\n font = QtGui.QFont()\r\n font.setPointSize(10)\r\n font.setBold(False)\r\n font.setWeight(50)\r\n self.load_sat_3.setFont(font)\r\n self.load_sat_3.setObjectName(\"load_sat_3\")\r\n self.default_sat_3 = QtWidgets.QComboBox(self.groupBox_21)\r\n self.default_sat_3.setGeometry(QtCore.QRect(650, 70, 131, 22))\r\n font = QtGui.QFont()\r\n font.setPointSize(8)\r\n self.default_sat_3.setFont(font)\r\n self.default_sat_3.setObjectName(\"default_sat_3\")\r\n self.label_163 = QtWidgets.QLabel(self.groupBox_21)\r\n self.label_163.setGeometry(QtCore.QRect(650, 40, 131, 16))\r\n self.label_163.setObjectName(\"label_163\")\r\n self.label_164 = QtWidgets.QLabel(self.groupBox_21)\r\n self.label_164.setGeometry(QtCore.QRect(330, 78, 111, 16))\r\n self.label_164.setObjectName(\"label_164\")\r\n self.height_sat_3 = QtWidgets.QLineEdit(self.groupBox_21)\r\n self.height_sat_3.setGeometry(QtCore.QRect(510, 78, 101, 22))\r\n self.height_sat_3.setObjectName(\"height_sat_3\")\r\n self.label_165 = QtWidgets.QLabel(self.groupBox_21)\r\n self.label_165.setGeometry(QtCore.QRect(330, 119, 121, 16))\r\n self.label_165.setObjectName(\"label_165\")\r\n self.eirp_sat_3 = QtWidgets.QLineEdit(self.groupBox_21)\r\n self.eirp_sat_3.setGeometry(QtCore.QRect(510, 115, 101, 22))\r\n self.eirp_sat_3.setObjectName(\"eirp_sat_3\")\r\n self.label_166 = QtWidgets.QLabel(self.groupBox_21)\r\n self.label_166.setGeometry(QtCore.QRect(20, 148, 171, 51))\r\n self.label_166.setLayoutDirection(QtCore.Qt.LeftToRight)\r\n self.label_166.setObjectName(\"label_166\")\r\n self.max_bw_sat_3 = QtWidgets.QLineEdit(self.groupBox_21)\r\n self.max_bw_sat_3.setGeometry(QtCore.QRect(200, 158, 101, 22))\r\n self.max_bw_sat_3.setObjectName(\"max_bw_sat_3\")\r\n self.label_167 = QtWidgets.QLabel(self.groupBox_21)\r\n self.label_167.setGeometry(QtCore.QRect(330, 143, 171, 51))\r\n self.label_167.setLayoutDirection(QtCore.Qt.LeftToRight)\r\n self.label_167.setObjectName(\"label_167\")\r\n self.bw_util_sat_3 = QtWidgets.QLineEdit(self.groupBox_21)\r\n self.bw_util_sat_3.setGeometry(QtCore.QRect(510, 156, 101, 22))\r\n self.bw_util_sat_3.setObjectName(\"bw_util_sat_3\")\r\n self.label_168 = QtWidgets.QLabel(self.groupBox_21)\r\n self.label_168.setGeometry(QtCore.QRect(20, 203, 61, 16))\r\n self.label_168.setObjectName(\"label_168\")\r\n self.rolloff_sat_3 = QtWidgets.QLineEdit(self.groupBox_21)\r\n self.rolloff_sat_3.setGeometry(QtCore.QRect(200, 203, 101, 22))\r\n self.rolloff_sat_3.setObjectName(\"rolloff_sat_3\")\r\n self.label_169 = QtWidgets.QLabel(self.groupBox_21)\r\n self.label_169.setGeometry(QtCore.QRect(330, 203, 91, 16))\r\n self.label_169.setObjectName(\"label_169\")\r\n self.modcod_sat_3 = QtWidgets.QComboBox(self.groupBox_21)\r\n self.modcod_sat_3.setGeometry(QtCore.QRect(450, 203, 161, 22))\r\n self.modcod_sat_3.setObjectName(\"modcod_sat_3\")\r\n self.modcod_sat_3.addItem(\"\")\r\n self.modcod_sat_3.addItem(\"\")\r\n self.modcod_sat_3.addItem(\"\")\r\n self.label_170 = QtWidgets.QLabel(self.groupBox_21)\r\n self.label_170.setGeometry(QtCore.QRect(20, 120, 131, 20))\r\n self.label_170.setObjectName(\"label_170\")\r\n self.freq_sat_3 = QtWidgets.QLineEdit(self.groupBox_21)\r\n self.freq_sat_3.setGeometry(QtCore.QRect(200, 120, 101, 22))\r\n self.freq_sat_3.setObjectName(\"freq_sat_3\")\r\n self.label_171 = QtWidgets.QLabel(self.groupBox_21)\r\n self.label_171.setGeometry(QtCore.QRect(673, 100, 91, 20))\r\n self.label_171.setObjectName(\"label_171\")\r\n self.pol_sat_5 = QtWidgets.QComboBox(self.groupBox_21)\r\n self.pol_sat_5.setGeometry(QtCore.QRect(667, 130, 101, 22))\r\n self.pol_sat_5.setObjectName(\"pol_sat_5\")\r\n self.pol_sat_5.addItem(\"\")\r\n self.pol_sat_5.addItem(\"\")\r\n self.pol_sat_5.addItem(\"\")\r\n self.clear_sat_3 = QtWidgets.QPushButton(self.groupBox_21)\r\n self.clear_sat_3.setGeometry(QtCore.QRect(420, 810, 111, 28))\r\n font = QtGui.QFont()\r\n font.setPointSize(10)\r\n font.setBold(False)\r\n font.setWeight(50)\r\n self.clear_sat_3.setFont(font)\r\n self.clear_sat_3.setObjectName(\"clear_sat_3\")\r\n self.stackedWidget_3.addWidget(self.satellite_page_3)\r\n self.ground_station_page_3 = QtWidgets.QWidget()\r\n self.ground_station_page_3.setObjectName(\"ground_station_page_3\")\r\n self.groupBox_22 = QtWidgets.QGroupBox(self.ground_station_page_3)\r\n self.groupBox_22.setGeometry(QtCore.QRect(10, 10, 801, 851))\r\n font = QtGui.QFont()\r\n font.setPointSize(10)\r\n self.groupBox_22.setFont(font)\r\n self.groupBox_22.setAutoFillBackground(False)\r\n self.groupBox_22.setAlignment(QtCore.Qt.AlignJustify|QtCore.Qt.AlignVCenter)\r\n self.groupBox_22.setObjectName(\"groupBox_22\")\r\n self.label_172 = QtWidgets.QLabel(self.groupBox_22)\r\n self.label_172.setGeometry(QtCore.QRect(20, 36, 55, 16))\r\n self.label_172.setObjectName(\"label_172\")\r\n self.label_175 = QtWidgets.QLabel(self.groupBox_22)\r\n self.label_175.setGeometry(QtCore.QRect(20, 66, 151, 31))\r\n self.label_175.setObjectName(\"label_175\")\r\n self.label_176 = QtWidgets.QLabel(self.groupBox_22)\r\n self.label_176.setGeometry(QtCore.QRect(20, 111, 161, 21))\r\n self.label_176.setObjectName(\"label_176\")\r\n self.name_ground_station_grstat_3 = QtWidgets.QLineEdit(self.groupBox_22)\r\n self.name_ground_station_grstat_3.setGeometry(QtCore.QRect(100, 36, 271, 22))\r\n self.name_ground_station_grstat_3.setObjectName(\"name_ground_station_grstat_3\")\r\n self.long_ground_station_grstat_3 = QtWidgets.QLineEdit(self.groupBox_22)\r\n self.long_ground_station_grstat_3.setGeometry(QtCore.QRect(200, 111, 101, 22))\r\n self.long_ground_station_grstat_3.setObjectName(\"long_ground_station_grstat_3\")\r\n self.lat_ground_station_grstat_3 = QtWidgets.QLineEdit(self.groupBox_22)\r\n self.lat_ground_station_grstat_3.setGeometry(QtCore.QRect(200, 76, 101, 22))\r\n self.lat_ground_station_grstat_3.setObjectName(\"lat_ground_station_grstat_3\")\r\n self.save_ground_station_gdstation_3 = QtWidgets.QPushButton(self.groupBox_22)\r\n self.save_ground_station_gdstation_3.setGeometry(QtCore.QRect(700, 810, 93, 28))\r\n font = QtGui.QFont()\r\n font.setPointSize(10)\r\n font.setBold(False)\r\n font.setWeight(50)\r\n self.save_ground_station_gdstation_3.setFont(font)\r\n self.save_ground_station_gdstation_3.setObjectName(\"save_ground_station_gdstation_3\")\r\n self.load_ground_station_gdstation_3 = QtWidgets.QPushButton(self.groupBox_22)\r\n self.load_ground_station_gdstation_3.setGeometry(QtCore.QRect(591, 811, 93, 28))\r\n font = QtGui.QFont()\r\n font.setPointSize(10)\r\n font.setBold(False)\r\n font.setWeight(50)\r\n self.load_ground_station_gdstation_3.setFont(font)\r\n self.load_ground_station_gdstation_3.setObjectName(\"load_ground_station_gdstation_3\")\r\n self.clear_ground_station_gdstation_3 = QtWidgets.QPushButton(self.groupBox_22)\r\n self.clear_ground_station_gdstation_3.setGeometry(QtCore.QRect(481, 811, 93, 28))\r\n font = QtGui.QFont()\r\n font.setPointSize(10)\r\n font.setBold(False)\r\n font.setWeight(50)\r\n self.clear_ground_station_gdstation_3.setFont(font)\r\n self.clear_ground_station_gdstation_3.setObjectName(\"clear_ground_station_gdstation_3\")\r\n self.stackedWidget_3.addWidget(self.ground_station_page_3)\r\n self.reception_page_3 = QtWidgets.QWidget()\r\n self.reception_page_3.setObjectName(\"reception_page_3\")\r\n self.groupBox_23 = QtWidgets.QGroupBox(self.reception_page_3)\r\n self.groupBox_23.setGeometry(QtCore.QRect(10, 10, 801, 851))\r\n font = QtGui.QFont()\r\n font.setPointSize(10)\r\n self.groupBox_23.setFont(font)\r\n self.groupBox_23.setAutoFillBackground(False)\r\n self.groupBox_23.setAlignment(QtCore.Qt.AlignJustify|QtCore.Qt.AlignVCenter)\r\n self.groupBox_23.setObjectName(\"groupBox_23\")\r\n self.label_177 = QtWidgets.QLabel(self.groupBox_23)\r\n self.label_177.setGeometry(QtCore.QRect(20, 38, 55, 16))\r\n self.label_177.setObjectName(\"label_177\")\r\n self.label_178 = QtWidgets.QLabel(self.groupBox_23)\r\n self.label_178.setGeometry(QtCore.QRect(20, 68, 151, 31))\r\n self.label_178.setObjectName(\"label_178\")\r\n self.label_179 = QtWidgets.QLabel(self.groupBox_23)\r\n self.label_179.setGeometry(QtCore.QRect(20, 113, 161, 21))\r\n self.label_179.setObjectName(\"label_179\")\r\n self.name_reception_rcp_3 = QtWidgets.QLineEdit(self.groupBox_23)\r\n self.name_reception_rcp_3.setGeometry(QtCore.QRect(100, 38, 511, 22))\r\n self.name_reception_rcp_3.setObjectName(\"name_reception_rcp_3\")\r\n self.lnb_gain_reception_rcp_3 = QtWidgets.QLineEdit(self.groupBox_23)\r\n self.lnb_gain_reception_rcp_3.setGeometry(QtCore.QRect(200, 113, 101, 22))\r\n self.lnb_gain_reception_rcp_3.setObjectName(\"lnb_gain_reception_rcp_3\")\r\n self.ant_size_reception_rcp_3 = QtWidgets.QLineEdit(self.groupBox_23)\r\n self.ant_size_reception_rcp_3.setGeometry(QtCore.QRect(200, 74, 101, 22))\r\n self.ant_size_reception_rcp_3.setObjectName(\"ant_size_reception_rcp_3\")\r\n self.save_reception_rcp_3 = QtWidgets.QPushButton(self.groupBox_23)\r\n self.save_reception_rcp_3.setGeometry(QtCore.QRect(700, 810, 93, 28))\r\n font = QtGui.QFont()\r\n font.setPointSize(10)\r\n font.setBold(False)\r\n font.setWeight(50)\r\n self.save_reception_rcp_3.setFont(font)\r\n self.save_reception_rcp_3.setObjectName(\"save_reception_rcp_3\")\r\n self.load_reception_rcp_9 = QtWidgets.QPushButton(self.groupBox_23)\r\n self.load_reception_rcp_9.setGeometry(QtCore.QRect(591, 811, 93, 28))\r\n font = QtGui.QFont()\r\n font.setPointSize(10)\r\n font.setBold(False)\r\n font.setWeight(50)\r\n self.load_reception_rcp_9.setFont(font)\r\n self.load_reception_rcp_9.setObjectName(\"load_reception_rcp_9\")\r\n self.label_180 = QtWidgets.QLabel(self.groupBox_23)\r\n self.label_180.setGeometry(QtCore.QRect(330, 80, 171, 16))\r\n self.label_180.setObjectName(\"label_180\")\r\n self.ant_eff_reception_rcp_3 = QtWidgets.QLineEdit(self.groupBox_23)\r\n self.ant_eff_reception_rcp_3.setGeometry(QtCore.QRect(510, 78, 101, 22))\r\n self.ant_eff_reception_rcp_3.setObjectName(\"ant_eff_reception_rcp_3\")\r\n self.label_181 = QtWidgets.QLabel(self.groupBox_23)\r\n self.label_181.setGeometry(QtCore.QRect(330, 119, 161, 16))\r\n self.label_181.setObjectName(\"label_181\")\r\n self.lnb_temp_reception_rcp_3 = QtWidgets.QLineEdit(self.groupBox_23)\r\n self.lnb_temp_reception_rcp_3.setGeometry(QtCore.QRect(510, 115, 101, 22))\r\n self.lnb_temp_reception_rcp_3.setObjectName(\"lnb_temp_reception_rcp_3\")\r\n self.label_182 = QtWidgets.QLabel(self.groupBox_23)\r\n self.label_182.setGeometry(QtCore.QRect(20, 138, 171, 51))\r\n self.label_182.setLayoutDirection(QtCore.Qt.LeftToRight)\r\n self.label_182.setObjectName(\"label_182\")\r\n self.aditional_losses_reception_rcp_3 = QtWidgets.QLineEdit(self.groupBox_23)\r\n self.aditional_losses_reception_rcp_3.setGeometry(QtCore.QRect(200, 153, 101, 22))\r\n self.aditional_losses_reception_rcp_3.setObjectName(\"aditional_losses_reception_rcp_3\")\r\n self.label_183 = QtWidgets.QLabel(self.groupBox_23)\r\n self.label_183.setGeometry(QtCore.QRect(20, 187, 171, 51))\r\n self.label_183.setLayoutDirection(QtCore.Qt.LeftToRight)\r\n self.label_183.setObjectName(\"label_183\")\r\n self.max_depoint_reception_rcp_3 = QtWidgets.QLineEdit(self.groupBox_23)\r\n self.max_depoint_reception_rcp_3.setGeometry(QtCore.QRect(200, 200, 101, 22))\r\n self.max_depoint_reception_rcp_3.setObjectName(\"max_depoint_reception_rcp_3\")\r\n self.clear_reception_rcp_5 = QtWidgets.QPushButton(self.groupBox_23)\r\n self.clear_reception_rcp_5.setGeometry(QtCore.QRect(481, 811, 93, 28))\r\n font = QtGui.QFont()\r\n font.setPointSize(10)\r\n font.setBold(False)\r\n font.setWeight(50)\r\n self.clear_reception_rcp_5.setFont(font)\r\n self.clear_reception_rcp_5.setObjectName(\"clear_reception_rcp_5\")\r\n self.label_184 = QtWidgets.QLabel(self.groupBox_23)\r\n self.label_184.setGeometry(QtCore.QRect(330, 140, 171, 51))\r\n self.label_184.setLayoutDirection(QtCore.Qt.LeftToRight)\r\n self.label_184.setObjectName(\"label_184\")\r\n self.cable_loss_reception_rcp_3 = QtWidgets.QLineEdit(self.groupBox_23)\r\n self.cable_loss_reception_rcp_3.setGeometry(QtCore.QRect(510, 155, 101, 22))\r\n self.cable_loss_reception_rcp_3.setObjectName(\"cable_loss_reception_rcp_3\")\r\n self.stackedWidget_3.addWidget(self.reception_page_3)\r\n self.ant_size_single_point_calc_page_3 = QtWidgets.QWidget()\r\n self.ant_size_single_point_calc_page_3.setObjectName(\"ant_size_single_point_calc_page_3\")\r\n self.stackedWidget_4 = QtWidgets.QStackedWidget(self.ant_size_single_point_calc_page_3)\r\n self.stackedWidget_4.setGeometry(QtCore.QRect(0, 0, 811, 861))\r\n self.stackedWidget_4.setObjectName(\"stackedWidget_4\")\r\n self.empty_page_3 = QtWidgets.QWidget()\r\n self.empty_page_3.setObjectName(\"empty_page_3\")\r\n self.label_185 = QtWidgets.QLabel(self.empty_page_3)\r\n self.label_185.setGeometry(QtCore.QRect(217, 270, 431, 391))\r\n self.label_185.setText(\"\")\r\n self.label_185.setPixmap(QtGui.QPixmap(\"UI\\\\home_logo.png\"))\r\n self.label_185.setObjectName(\"label_185\")\r\n self.stackedWidget_4.addWidget(self.empty_page_3)\r\n self.satellite_page_4 = QtWidgets.QWidget()\r\n self.satellite_page_4.setObjectName(\"satellite_page_4\")\r\n self.groupBox_24 = QtWidgets.QGroupBox(self.satellite_page_4)\r\n self.groupBox_24.setGeometry(QtCore.QRect(10, 10, 801, 851))\r\n font = QtGui.QFont()\r\n font.setPointSize(10)\r\n self.groupBox_24.setFont(font)\r\n self.groupBox_24.setAutoFillBackground(False)\r\n self.groupBox_24.setAlignment(QtCore.Qt.AlignJustify|QtCore.Qt.AlignVCenter)\r\n self.groupBox_24.setObjectName(\"groupBox_24\")\r\n self.label_186 = QtWidgets.QLabel(self.groupBox_24)\r\n self.label_186.setGeometry(QtCore.QRect(20, 38, 55, 16))\r\n self.label_186.setObjectName(\"label_186\")\r\n self.label_187 = QtWidgets.QLabel(self.groupBox_24)\r\n self.label_187.setGeometry(QtCore.QRect(20, 80, 161, 21))\r\n self.label_187.setObjectName(\"label_187\")\r\n self.name_sat_4 = QtWidgets.QLineEdit(self.groupBox_24)\r\n self.name_sat_4.setGeometry(QtCore.QRect(100, 38, 511, 22))\r\n self.name_sat_4.setObjectName(\"name_sat_4\")\r\n self.long_sat_4 = QtWidgets.QLineEdit(self.groupBox_24)\r\n self.long_sat_4.setGeometry(QtCore.QRect(200, 80, 101, 22))\r\n self.long_sat_4.setObjectName(\"long_sat_4\")\r\n self.save_sat_4 = QtWidgets.QPushButton(self.groupBox_24)\r\n self.save_sat_4.setGeometry(QtCore.QRect(680, 810, 111, 28))\r\n font = QtGui.QFont()\r\n font.setPointSize(10)\r\n font.setBold(False)\r\n font.setWeight(50)\r\n self.save_sat_4.setFont(font)\r\n self.save_sat_4.setObjectName(\"save_sat_4\")\r\n self.load_sat_4 = QtWidgets.QPushButton(self.groupBox_24)\r\n self.load_sat_4.setGeometry(QtCore.QRect(550, 810, 111, 28))\r\n font = QtGui.QFont()\r\n font.setPointSize(10)\r\n font.setBold(False)\r\n font.setWeight(50)\r\n self.load_sat_4.setFont(font)\r\n self.load_sat_4.setObjectName(\"load_sat_4\")\r\n self.default_sat_4 = QtWidgets.QComboBox(self.groupBox_24)\r\n self.default_sat_4.setGeometry(QtCore.QRect(650, 70, 131, 22))\r\n font = QtGui.QFont()\r\n font.setPointSize(8)\r\n self.default_sat_4.setFont(font)\r\n self.default_sat_4.setObjectName(\"default_sat_4\")\r\n self.label_188 = QtWidgets.QLabel(self.groupBox_24)\r\n self.label_188.setGeometry(QtCore.QRect(650, 40, 131, 16))\r\n self.label_188.setObjectName(\"label_188\")\r\n self.label_189 = QtWidgets.QLabel(self.groupBox_24)\r\n self.label_189.setGeometry(QtCore.QRect(330, 78, 111, 16))\r\n self.label_189.setObjectName(\"label_189\")\r\n self.height_sat_4 = QtWidgets.QLineEdit(self.groupBox_24)\r\n self.height_sat_4.setGeometry(QtCore.QRect(510, 78, 101, 22))\r\n self.height_sat_4.setObjectName(\"height_sat_4\")\r\n self.label_190 = QtWidgets.QLabel(self.groupBox_24)\r\n self.label_190.setGeometry(QtCore.QRect(330, 119, 121, 16))\r\n self.label_190.setObjectName(\"label_190\")\r\n self.eirp_sat_4 = QtWidgets.QLineEdit(self.groupBox_24)\r\n self.eirp_sat_4.setGeometry(QtCore.QRect(510, 115, 101, 22))\r\n self.eirp_sat_4.setObjectName(\"eirp_sat_4\")\r\n self.label_191 = QtWidgets.QLabel(self.groupBox_24)\r\n self.label_191.setGeometry(QtCore.QRect(20, 148, 171, 51))\r\n self.label_191.setLayoutDirection(QtCore.Qt.LeftToRight)\r\n self.label_191.setObjectName(\"label_191\")\r\n self.max_bw_sat_4 = QtWidgets.QLineEdit(self.groupBox_24)\r\n self.max_bw_sat_4.setGeometry(QtCore.QRect(200, 158, 101, 22))\r\n self.max_bw_sat_4.setObjectName(\"max_bw_sat_4\")\r\n self.label_192 = QtWidgets.QLabel(self.groupBox_24)\r\n self.label_192.setGeometry(QtCore.QRect(330, 143, 171, 51))\r\n self.label_192.setLayoutDirection(QtCore.Qt.LeftToRight)\r\n self.label_192.setObjectName(\"label_192\")\r\n self.bw_util_sat_4 = QtWidgets.QLineEdit(self.groupBox_24)\r\n self.bw_util_sat_4.setGeometry(QtCore.QRect(510, 156, 101, 22))\r\n self.bw_util_sat_4.setObjectName(\"bw_util_sat_4\")\r\n self.label_193 = QtWidgets.QLabel(self.groupBox_24)\r\n self.label_193.setGeometry(QtCore.QRect(20, 203, 61, 16))\r\n self.label_193.setObjectName(\"label_193\")\r\n self.rolloff_sat_4 = QtWidgets.QLineEdit(self.groupBox_24)\r\n self.rolloff_sat_4.setGeometry(QtCore.QRect(200, 203, 101, 22))\r\n self.rolloff_sat_4.setObjectName(\"rolloff_sat_4\")\r\n self.label_194 = QtWidgets.QLabel(self.groupBox_24)\r\n self.label_194.setGeometry(QtCore.QRect(330, 203, 91, 16))\r\n self.label_194.setObjectName(\"label_194\")\r\n self.modcod_sat_4 = QtWidgets.QComboBox(self.groupBox_24)\r\n self.modcod_sat_4.setGeometry(QtCore.QRect(450, 203, 161, 22))\r\n self.modcod_sat_4.setObjectName(\"modcod_sat_4\")\r\n self.modcod_sat_4.addItem(\"\")\r\n self.modcod_sat_4.addItem(\"\")\r\n self.modcod_sat_4.addItem(\"\")\r\n self.label_195 = QtWidgets.QLabel(self.groupBox_24)\r\n self.label_195.setGeometry(QtCore.QRect(20, 120, 131, 20))\r\n self.label_195.setObjectName(\"label_195\")\r\n self.freq_sat_4 = QtWidgets.QLineEdit(self.groupBox_24)\r\n self.freq_sat_4.setGeometry(QtCore.QRect(200, 120, 101, 22))\r\n self.freq_sat_4.setObjectName(\"freq_sat_4\")\r\n self.label_196 = QtWidgets.QLabel(self.groupBox_24)\r\n self.label_196.setGeometry(QtCore.QRect(673, 100, 91, 20))\r\n self.label_196.setObjectName(\"label_196\")\r\n self.pol_sat_6 = QtWidgets.QComboBox(self.groupBox_24)\r\n self.pol_sat_6.setGeometry(QtCore.QRect(667, 130, 101, 22))\r\n self.pol_sat_6.setObjectName(\"pol_sat_6\")\r\n self.pol_sat_6.addItem(\"\")\r\n self.pol_sat_6.addItem(\"\")\r\n self.pol_sat_6.addItem(\"\")\r\n self.clear_sat_4 = QtWidgets.QPushButton(self.groupBox_24)\r\n self.clear_sat_4.setGeometry(QtCore.QRect(420, 810, 111, 28))\r\n font = QtGui.QFont()\r\n font.setPointSize(10)\r\n font.setBold(False)\r\n font.setWeight(50)\r\n self.clear_sat_4.setFont(font)\r\n self.clear_sat_4.setObjectName(\"clear_sat_4\")\r\n self.stackedWidget_4.addWidget(self.satellite_page_4)\r\n self.ground_station_page_4 = QtWidgets.QWidget()\r\n self.ground_station_page_4.setObjectName(\"ground_station_page_4\")\r\n self.groupBox_25 = QtWidgets.QGroupBox(self.ground_station_page_4)\r\n self.groupBox_25.setGeometry(QtCore.QRect(10, 10, 801, 851))\r\n font = QtGui.QFont()\r\n font.setPointSize(10)\r\n self.groupBox_25.setFont(font)\r\n self.groupBox_25.setAutoFillBackground(False)\r\n self.groupBox_25.setAlignment(QtCore.Qt.AlignJustify|QtCore.Qt.AlignVCenter)\r\n self.groupBox_25.setObjectName(\"groupBox_25\")\r\n self.label_197 = QtWidgets.QLabel(self.groupBox_25)\r\n self.label_197.setGeometry(QtCore.QRect(20, 36, 55, 16))\r\n self.label_197.setObjectName(\"label_197\")\r\n self.label_198 = QtWidgets.QLabel(self.groupBox_25)\r\n self.label_198.setGeometry(QtCore.QRect(20, 66, 151, 31))\r\n self.label_198.setObjectName(\"label_198\")\r\n self.label_199 = QtWidgets.QLabel(self.groupBox_25)\r\n self.label_199.setGeometry(QtCore.QRect(20, 111, 161, 21))\r\n self.label_199.setObjectName(\"label_199\")\r\n self.name_ground_station_grstat_4 = QtWidgets.QLineEdit(self.groupBox_25)\r\n self.name_ground_station_grstat_4.setGeometry(QtCore.QRect(100, 36, 271, 22))\r\n self.name_ground_station_grstat_4.setObjectName(\"name_ground_station_grstat_4\")\r\n self.long_ground_station_grstat_4 = QtWidgets.QLineEdit(self.groupBox_25)\r\n self.long_ground_station_grstat_4.setGeometry(QtCore.QRect(200, 111, 101, 22))\r\n self.long_ground_station_grstat_4.setObjectName(\"long_ground_station_grstat_4\")\r\n self.lat_ground_station_grstat_4 = QtWidgets.QLineEdit(self.groupBox_25)\r\n self.lat_ground_station_grstat_4.setGeometry(QtCore.QRect(200, 76, 101, 22))\r\n self.lat_ground_station_grstat_4.setObjectName(\"lat_ground_station_grstat_4\")\r\n self.save_ground_station_gdstation_4 = QtWidgets.QPushButton(self.groupBox_25)\r\n self.save_ground_station_gdstation_4.setGeometry(QtCore.QRect(700, 810, 93, 28))\r\n font = QtGui.QFont()\r\n font.setPointSize(10)\r\n font.setBold(False)\r\n font.setWeight(50)\r\n self.save_ground_station_gdstation_4.setFont(font)\r\n self.save_ground_station_gdstation_4.setObjectName(\"save_ground_station_gdstation_4\")\r\n self.load_ground_station_gdstation_4 = QtWidgets.QPushButton(self.groupBox_25)\r\n self.load_ground_station_gdstation_4.setGeometry(QtCore.QRect(591, 811, 93, 28))\r\n font = QtGui.QFont()\r\n font.setPointSize(10)\r\n font.setBold(False)\r\n font.setWeight(50)\r\n self.load_ground_station_gdstation_4.setFont(font)\r\n self.load_ground_station_gdstation_4.setObjectName(\"load_ground_station_gdstation_4\")\r\n self.clear_ground_station_gdstation_4 = QtWidgets.QPushButton(self.groupBox_25)\r\n self.clear_ground_station_gdstation_4.setGeometry(QtCore.QRect(481, 811, 93, 28))\r\n font = QtGui.QFont()\r\n font.setPointSize(10)\r\n font.setBold(False)\r\n font.setWeight(50)\r\n self.clear_ground_station_gdstation_4.setFont(font)\r\n self.clear_ground_station_gdstation_4.setObjectName(\"clear_ground_station_gdstation_4\")\r\n self.stackedWidget_4.addWidget(self.ground_station_page_4)\r\n self.reception_page_4 = QtWidgets.QWidget()\r\n self.reception_page_4.setObjectName(\"reception_page_4\")\r\n self.groupBox_26 = QtWidgets.QGroupBox(self.reception_page_4)\r\n self.groupBox_26.setGeometry(QtCore.QRect(10, 10, 801, 851))\r\n font = QtGui.QFont()\r\n font.setPointSize(10)\r\n self.groupBox_26.setFont(font)\r\n self.groupBox_26.setAutoFillBackground(False)\r\n self.groupBox_26.setAlignment(QtCore.Qt.AlignJustify|QtCore.Qt.AlignVCenter)\r\n self.groupBox_26.setObjectName(\"groupBox_26\")\r\n self.label_200 = QtWidgets.QLabel(self.groupBox_26)\r\n self.label_200.setGeometry(QtCore.QRect(20, 38, 55, 16))\r\n self.label_200.setObjectName(\"label_200\")\r\n self.label_201 = QtWidgets.QLabel(self.groupBox_26)\r\n self.label_201.setGeometry(QtCore.QRect(20, 68, 151, 31))\r\n self.label_201.setObjectName(\"label_201\")\r\n self.label_202 = QtWidgets.QLabel(self.groupBox_26)\r\n self.label_202.setGeometry(QtCore.QRect(20, 113, 161, 21))\r\n self.label_202.setObjectName(\"label_202\")\r\n self.name_reception_rcp_4 = QtWidgets.QLineEdit(self.groupBox_26)\r\n self.name_reception_rcp_4.setGeometry(QtCore.QRect(100, 38, 511, 22))\r\n self.name_reception_rcp_4.setObjectName(\"name_reception_rcp_4\")\r\n self.lnb_gain_reception_rcp_4 = QtWidgets.QLineEdit(self.groupBox_26)\r\n self.lnb_gain_reception_rcp_4.setGeometry(QtCore.QRect(200, 113, 101, 22))\r\n self.lnb_gain_reception_rcp_4.setObjectName(\"lnb_gain_reception_rcp_4\")\r\n self.ant_size_reception_rcp_4 = QtWidgets.QLineEdit(self.groupBox_26)\r\n self.ant_size_reception_rcp_4.setGeometry(QtCore.QRect(200, 74, 101, 22))\r\n self.ant_size_reception_rcp_4.setObjectName(\"ant_size_reception_rcp_4\")\r\n self.save_reception_rcp_4 = QtWidgets.QPushButton(self.groupBox_26)\r\n self.save_reception_rcp_4.setGeometry(QtCore.QRect(700, 810, 93, 28))\r\n font = QtGui.QFont()\r\n font.setPointSize(10)\r\n font.setBold(False)\r\n font.setWeight(50)\r\n self.save_reception_rcp_4.setFont(font)\r\n self.save_reception_rcp_4.setObjectName(\"save_reception_rcp_4\")\r\n self.load_reception_rcp_10 = QtWidgets.QPushButton(self.groupBox_26)\r\n self.load_reception_rcp_10.setGeometry(QtCore.QRect(591, 811, 93, 28))\r\n font = QtGui.QFont()\r\n font.setPointSize(10)\r\n font.setBold(False)\r\n font.setWeight(50)\r\n self.load_reception_rcp_10.setFont(font)\r\n self.load_reception_rcp_10.setObjectName(\"load_reception_rcp_10\")\r\n self.label_203 = QtWidgets.QLabel(self.groupBox_26)\r\n self.label_203.setGeometry(QtCore.QRect(330, 80, 171, 16))\r\n self.label_203.setObjectName(\"label_203\")\r\n self.ant_eff_reception_rcp_4 = QtWidgets.QLineEdit(self.groupBox_26)\r\n self.ant_eff_reception_rcp_4.setGeometry(QtCore.QRect(510, 78, 101, 22))\r\n self.ant_eff_reception_rcp_4.setObjectName(\"ant_eff_reception_rcp_4\")\r\n self.label_204 = QtWidgets.QLabel(self.groupBox_26)\r\n self.label_204.setGeometry(QtCore.QRect(330, 119, 161, 16))\r\n self.label_204.setObjectName(\"label_204\")\r\n self.lnb_temp_reception_rcp_4 = QtWidgets.QLineEdit(self.groupBox_26)\r\n self.lnb_temp_reception_rcp_4.setGeometry(QtCore.QRect(510, 115, 101, 22))\r\n self.lnb_temp_reception_rcp_4.setObjectName(\"lnb_temp_reception_rcp_4\")\r\n self.label_205 = QtWidgets.QLabel(self.groupBox_26)\r\n self.label_205.setGeometry(QtCore.QRect(20, 138, 171, 51))\r\n self.label_205.setLayoutDirection(QtCore.Qt.LeftToRight)\r\n self.label_205.setObjectName(\"label_205\")\r\n self.aditional_losses_reception_rcp_4 = QtWidgets.QLineEdit(self.groupBox_26)\r\n self.aditional_losses_reception_rcp_4.setGeometry(QtCore.QRect(200, 153, 101, 22))\r\n self.aditional_losses_reception_rcp_4.setObjectName(\"aditional_losses_reception_rcp_4\")\r\n self.label_206 = QtWidgets.QLabel(self.groupBox_26)\r\n self.label_206.setGeometry(QtCore.QRect(20, 187, 171, 51))\r\n self.label_206.setLayoutDirection(QtCore.Qt.LeftToRight)\r\n self.label_206.setObjectName(\"label_206\")\r\n self.max_depoint_reception_rcp_4 = QtWidgets.QLineEdit(self.groupBox_26)\r\n self.max_depoint_reception_rcp_4.setGeometry(QtCore.QRect(200, 200, 101, 22))\r\n self.max_depoint_reception_rcp_4.setObjectName(\"max_depoint_reception_rcp_4\")\r\n self.clear_reception_rcp_6 = QtWidgets.QPushButton(self.groupBox_26)\r\n self.clear_reception_rcp_6.setGeometry(QtCore.QRect(481, 811, 93, 28))\r\n font = QtGui.QFont()\r\n font.setPointSize(10)\r\n font.setBold(False)\r\n font.setWeight(50)\r\n self.clear_reception_rcp_6.setFont(font)\r\n self.clear_reception_rcp_6.setObjectName(\"clear_reception_rcp_6\")\r\n self.label_207 = QtWidgets.QLabel(self.groupBox_26)\r\n self.label_207.setGeometry(QtCore.QRect(330, 140, 171, 51))\r\n self.label_207.setLayoutDirection(QtCore.Qt.LeftToRight)\r\n self.label_207.setObjectName(\"label_207\")\r\n self.cable_loss_reception_rcp_4 = QtWidgets.QLineEdit(self.groupBox_26)\r\n self.cable_loss_reception_rcp_4.setGeometry(QtCore.QRect(510, 155, 101, 22))\r\n self.cable_loss_reception_rcp_4.setObjectName(\"cable_loss_reception_rcp_4\")\r\n self.stackedWidget_4.addWidget(self.reception_page_4)\r\n self.ant_size_single_point_calc_page_4 = QtWidgets.QWidget()\r\n self.ant_size_single_point_calc_page_4.setObjectName(\"ant_size_single_point_calc_page_4\")\r\n self.stackedWidget_4.addWidget(self.ant_size_single_point_calc_page_4)\r\n self.list_ant_size_calc_page_3 = QtWidgets.QWidget()\r\n self.list_ant_size_calc_page_3.setObjectName(\"list_ant_size_calc_page_3\")\r\n self.stackedWidget_4.addWidget(self.list_ant_size_calc_page_3)\r\n self.single_point_atm_calc_page_3 = QtWidgets.QWidget()\r\n self.single_point_atm_calc_page_3.setObjectName(\"single_point_atm_calc_page_3\")\r\n self.groupBox_27 = QtWidgets.QGroupBox(self.single_point_atm_calc_page_3)\r\n self.groupBox_27.setGeometry(QtCore.QRect(20, 20, 781, 131))\r\n font = QtGui.QFont()\r\n font.setPointSize(10)\r\n self.groupBox_27.setFont(font)\r\n self.groupBox_27.setObjectName(\"groupBox_27\")\r\n self.lat_ground_station_spatm_3 = QtWidgets.QLineEdit(self.groupBox_27)\r\n self.lat_ground_station_spatm_3.setGeometry(QtCore.QRect(200, 30, 101, 22))\r\n self.lat_ground_station_spatm_3.setObjectName(\"lat_ground_station_spatm_3\")\r\n self.long_ground_station_spatm_3 = QtWidgets.QLineEdit(self.groupBox_27)\r\n self.long_ground_station_spatm_3.setGeometry(QtCore.QRect(500, 30, 101, 22))\r\n self.long_ground_station_spatm_3.setObjectName(\"long_ground_station_spatm_3\")\r\n self.label_208 = QtWidgets.QLabel(self.groupBox_27)\r\n self.label_208.setGeometry(QtCore.QRect(20, 23, 151, 31))\r\n self.label_208.setObjectName(\"label_208\")\r\n self.label_209 = QtWidgets.QLabel(self.groupBox_27)\r\n self.label_209.setGeometry(QtCore.QRect(319, 28, 161, 21))\r\n self.label_209.setObjectName(\"label_209\")\r\n self.label_210 = QtWidgets.QLabel(self.groupBox_27)\r\n self.label_210.setGeometry(QtCore.QRect(20, 60, 161, 21))\r\n self.label_210.setObjectName(\"label_210\")\r\n self.ant_size_reception_spatm_3 = QtWidgets.QLineEdit(self.groupBox_27)\r\n self.ant_size_reception_spatm_3.setGeometry(QtCore.QRect(200, 60, 101, 22))\r\n self.ant_size_reception_spatm_3.setObjectName(\"ant_size_reception_spatm_3\")\r\n self.ant_eff_reception_spatm_3 = QtWidgets.QLineEdit(self.groupBox_27)\r\n self.ant_eff_reception_spatm_3.setGeometry(QtCore.QRect(500, 60, 101, 22))\r\n self.ant_eff_reception_spatm_3.setObjectName(\"ant_eff_reception_spatm_3\")\r\n self.label_211 = QtWidgets.QLabel(self.groupBox_27)\r\n self.label_211.setGeometry(QtCore.QRect(320, 60, 141, 16))\r\n self.label_211.setObjectName(\"label_211\")\r\n self.clear_reception_rcp_7 = QtWidgets.QPushButton(self.groupBox_27)\r\n self.clear_reception_rcp_7.setGeometry(QtCore.QRect(660, 30, 93, 28))\r\n font = QtGui.QFont()\r\n font.setPointSize(10)\r\n font.setBold(False)\r\n font.setWeight(50)\r\n self.clear_reception_rcp_7.setFont(font)\r\n self.clear_reception_rcp_7.setObjectName(\"clear_reception_rcp_7\")\r\n self.load_reception_rcp_11 = QtWidgets.QPushButton(self.groupBox_27)\r\n self.load_reception_rcp_11.setGeometry(QtCore.QRect(15, 91, 201, 28))\r\n font = QtGui.QFont()\r\n font.setPointSize(10)\r\n font.setBold(False)\r\n font.setWeight(50)\r\n self.load_reception_rcp_11.setFont(font)\r\n self.load_reception_rcp_11.setObjectName(\"load_reception_rcp_11\")\r\n self.load_reception_rcp_12 = QtWidgets.QPushButton(self.groupBox_27)\r\n self.load_reception_rcp_12.setGeometry(QtCore.QRect(230, 90, 201, 28))\r\n font = QtGui.QFont()\r\n font.setPointSize(10)\r\n font.setBold(False)\r\n font.setWeight(50)\r\n self.load_reception_rcp_12.setFont(font)\r\n self.load_reception_rcp_12.setObjectName(\"load_reception_rcp_12\")\r\n self.calc_spatm_3 = QtWidgets.QPushButton(self.single_point_atm_calc_page_3)\r\n self.calc_spatm_3.setGeometry(QtCore.QRect(666, 830, 141, 28))\r\n font = QtGui.QFont()\r\n font.setPointSize(10)\r\n self.calc_spatm_3.setFont(font)\r\n self.calc_spatm_3.setObjectName(\"calc_spatm_3\")\r\n self.textEdit_4 = QtWidgets.QTextEdit(self.single_point_atm_calc_page_3)\r\n self.textEdit_4.setGeometry(QtCore.QRect(6, 650, 801, 171))\r\n font = QtGui.QFont()\r\n font.setPointSize(10)\r\n self.textEdit_4.setFont(font)\r\n self.textEdit_4.setObjectName(\"textEdit_4\")\r\n self.groupBox_28 = QtWidgets.QGroupBox(self.single_point_atm_calc_page_3)\r\n self.groupBox_28.setGeometry(QtCore.QRect(20, 150, 781, 131))\r\n font = QtGui.QFont()\r\n font.setPointSize(10)\r\n self.groupBox_28.setFont(font)\r\n self.groupBox_28.setObjectName(\"groupBox_28\")\r\n self.label_212 = QtWidgets.QLabel(self.groupBox_28)\r\n self.label_212.setGeometry(QtCore.QRect(20, 60, 131, 16))\r\n self.label_212.setObjectName(\"label_212\")\r\n self.label_213 = QtWidgets.QLabel(self.groupBox_28)\r\n self.label_213.setGeometry(QtCore.QRect(20, 30, 161, 21))\r\n self.label_213.setObjectName(\"label_213\")\r\n self.long_sat_spatm_3 = QtWidgets.QLineEdit(self.groupBox_28)\r\n self.long_sat_spatm_3.setGeometry(QtCore.QRect(200, 30, 101, 22))\r\n self.long_sat_spatm_3.setObjectName(\"long_sat_spatm_3\")\r\n self.freq_sat_spatm_3 = QtWidgets.QLineEdit(self.groupBox_28)\r\n self.freq_sat_spatm_3.setGeometry(QtCore.QRect(200, 60, 101, 22))\r\n self.freq_sat_spatm_3.setObjectName(\"freq_sat_spatm_3\")\r\n self.default_sat_sp_perf_5 = QtWidgets.QComboBox(self.groupBox_28)\r\n self.default_sat_sp_perf_5.setGeometry(QtCore.QRect(634, 44, 131, 22))\r\n font = QtGui.QFont()\r\n font.setPointSize(8)\r\n self.default_sat_sp_perf_5.setFont(font)\r\n self.default_sat_sp_perf_5.setObjectName(\"default_sat_sp_perf_5\")\r\n self.label_214 = QtWidgets.QLabel(self.groupBox_28)\r\n self.label_214.setGeometry(QtCore.QRect(636, 14, 131, 16))\r\n self.label_214.setObjectName(\"label_214\")\r\n self.pol_sat_7 = QtWidgets.QComboBox(self.groupBox_28)\r\n self.pol_sat_7.setGeometry(QtCore.QRect(450, 30, 101, 22))\r\n self.pol_sat_7.setObjectName(\"pol_sat_7\")\r\n self.pol_sat_7.addItem(\"\")\r\n self.pol_sat_7.addItem(\"\")\r\n self.pol_sat_7.addItem(\"\")\r\n self.label_215 = QtWidgets.QLabel(self.groupBox_28)\r\n self.label_215.setGeometry(QtCore.QRect(350, 30, 91, 20))\r\n self.label_215.setObjectName(\"label_215\")\r\n self.load_reception_rcp_13 = QtWidgets.QPushButton(self.groupBox_28)\r\n self.load_reception_rcp_13.setGeometry(QtCore.QRect(10, 90, 201, 28))\r\n font = QtGui.QFont()\r\n font.setPointSize(10)\r\n font.setBold(False)\r\n font.setWeight(50)\r\n self.load_reception_rcp_13.setFont(font)\r\n self.load_reception_rcp_13.setObjectName(\"load_reception_rcp_13\")\r\n self.p_year_spatm_3 = QtWidgets.QLineEdit(self.single_point_atm_calc_page_3)\r\n self.p_year_spatm_3.setGeometry(QtCore.QRect(550, 834, 101, 21))\r\n font = QtGui.QFont()\r\n font.setPointSize(10)\r\n self.p_year_spatm_3.setFont(font)\r\n self.p_year_spatm_3.setToolTip(\"\")\r\n self.p_year_spatm_3.setObjectName(\"p_year_spatm_3\")\r\n self.label_216 = QtWidgets.QLabel(self.single_point_atm_calc_page_3)\r\n self.label_216.setGeometry(QtCore.QRect(340, 834, 201, 20))\r\n font = QtGui.QFont()\r\n font.setPointSize(10)\r\n self.label_216.setFont(font)\r\n self.label_216.setObjectName(\"label_216\")\r\n self.label_217 = QtWidgets.QLabel(self.single_point_atm_calc_page_3)\r\n self.label_217.setGeometry(QtCore.QRect(10, 837, 71, 16))\r\n font = QtGui.QFont()\r\n font.setPointSize(10)\r\n self.label_217.setFont(font)\r\n self.label_217.setObjectName(\"label_217\")\r\n self.method_spatm_3 = QtWidgets.QComboBox(self.single_point_atm_calc_page_3)\r\n self.method_spatm_3.setGeometry(QtCore.QRect(84, 835, 101, 21))\r\n font = QtGui.QFont()\r\n font.setPointSize(10)\r\n self.method_spatm_3.setFont(font)\r\n self.method_spatm_3.setObjectName(\"method_spatm_3\")\r\n self.method_spatm_3.addItem(\"\")\r\n self.method_spatm_3.addItem(\"\")\r\n self.stackedWidget_4.addWidget(self.single_point_atm_calc_page_3)\r\n self.single_point_perf_calc_page_3 = QtWidgets.QWidget()\r\n self.single_point_perf_calc_page_3.setObjectName(\"single_point_perf_calc_page_3\")\r\n self.groupBox_29 = QtWidgets.QGroupBox(self.single_point_perf_calc_page_3)\r\n self.groupBox_29.setGeometry(QtCore.QRect(10, 10, 801, 121))\r\n font = QtGui.QFont()\r\n font.setPointSize(10)\r\n self.groupBox_29.setFont(font)\r\n self.groupBox_29.setAutoFillBackground(False)\r\n self.groupBox_29.setAlignment(QtCore.Qt.AlignJustify|QtCore.Qt.AlignVCenter)\r\n self.groupBox_29.setObjectName(\"groupBox_29\")\r\n self.label_218 = QtWidgets.QLabel(self.groupBox_29)\r\n self.label_218.setGeometry(QtCore.QRect(20, 36, 55, 16))\r\n self.label_218.setObjectName(\"label_218\")\r\n self.label_219 = QtWidgets.QLabel(self.groupBox_29)\r\n self.label_219.setGeometry(QtCore.QRect(20, 66, 151, 31))\r\n self.label_219.setObjectName(\"label_219\")\r\n self.label_220 = QtWidgets.QLabel(self.groupBox_29)\r\n self.label_220.setGeometry(QtCore.QRect(330, 78, 161, 21))\r\n self.label_220.setObjectName(\"label_220\")\r\n self.name_ground_station_sp_perf_3 = QtWidgets.QLineEdit(self.groupBox_29)\r\n self.name_ground_station_sp_perf_3.setGeometry(QtCore.QRect(100, 36, 511, 22))\r\n self.name_ground_station_sp_perf_3.setObjectName(\"name_ground_station_sp_perf_3\")\r\n self.long_ground_station_sp_perf_3 = QtWidgets.QLineEdit(self.groupBox_29)\r\n self.long_ground_station_sp_perf_3.setGeometry(QtCore.QRect(510, 80, 101, 22))\r\n self.long_ground_station_sp_perf_3.setObjectName(\"long_ground_station_sp_perf_3\")\r\n self.lat_ground_station_sp_perf_3 = QtWidgets.QLineEdit(self.groupBox_29)\r\n self.lat_ground_station_sp_perf_3.setGeometry(QtCore.QRect(200, 76, 101, 22))\r\n self.lat_ground_station_sp_perf_3.setObjectName(\"lat_ground_station_sp_perf_3\")\r\n self.save_ground_station_sp_perf_3 = QtWidgets.QPushButton(self.groupBox_29)\r\n self.save_ground_station_sp_perf_3.setGeometry(QtCore.QRect(652, 80, 111, 28))\r\n font = QtGui.QFont()\r\n font.setPointSize(10)\r\n font.setBold(False)\r\n font.setWeight(50)\r\n self.save_ground_station_sp_perf_3.setFont(font)\r\n self.save_ground_station_sp_perf_3.setObjectName(\"save_ground_station_sp_perf_3\")\r\n self.load_ground_station_sp_perf_3 = QtWidgets.QPushButton(self.groupBox_29)\r\n self.load_ground_station_sp_perf_3.setGeometry(QtCore.QRect(652, 48, 111, 28))\r\n font = QtGui.QFont()\r\n font.setPointSize(10)\r\n font.setBold(False)\r\n font.setWeight(50)\r\n self.load_ground_station_sp_perf_3.setFont(font)\r\n self.load_ground_station_sp_perf_3.setObjectName(\"load_ground_station_sp_perf_3\")\r\n self.clear_ground_station_sp_perf_3 = QtWidgets.QPushButton(self.groupBox_29)\r\n self.clear_ground_station_sp_perf_3.setGeometry(QtCore.QRect(652, 16, 111, 28))\r\n font = QtGui.QFont()\r\n font.setPointSize(10)\r\n font.setBold(False)\r\n font.setWeight(50)\r\n self.clear_ground_station_sp_perf_3.setFont(font)\r\n self.clear_ground_station_sp_perf_3.setObjectName(\"clear_ground_station_sp_perf_3\")\r\n self.groupBox_30 = QtWidgets.QGroupBox(self.single_point_perf_calc_page_3)\r\n self.groupBox_30.setGeometry(QtCore.QRect(10, 129, 801, 271))\r\n font = QtGui.QFont()\r\n font.setPointSize(10)\r\n self.groupBox_30.setFont(font)\r\n self.groupBox_30.setAutoFillBackground(False)\r\n self.groupBox_30.setAlignment(QtCore.Qt.AlignJustify|QtCore.Qt.AlignVCenter)\r\n self.groupBox_30.setObjectName(\"groupBox_30\")\r\n self.label_221 = QtWidgets.QLabel(self.groupBox_30)\r\n self.label_221.setGeometry(QtCore.QRect(20, 29, 55, 16))\r\n self.label_221.setObjectName(\"label_221\")\r\n self.label_222 = QtWidgets.QLabel(self.groupBox_30)\r\n self.label_222.setGeometry(QtCore.QRect(20, 71, 161, 21))\r\n self.label_222.setObjectName(\"label_222\")\r\n self.name_sat_sp_perf_3 = QtWidgets.QLineEdit(self.groupBox_30)\r\n self.name_sat_sp_perf_3.setGeometry(QtCore.QRect(100, 29, 511, 22))\r\n self.name_sat_sp_perf_3.setObjectName(\"name_sat_sp_perf_3\")\r\n self.long_sat_sp_perf_3 = QtWidgets.QLineEdit(self.groupBox_30)\r\n self.long_sat_sp_perf_3.setGeometry(QtCore.QRect(200, 71, 101, 22))\r\n self.long_sat_sp_perf_3.setObjectName(\"long_sat_sp_perf_3\")\r\n self.save_sat_sp_perf_3 = QtWidgets.QPushButton(self.groupBox_30)\r\n self.save_sat_sp_perf_3.setGeometry(QtCore.QRect(653, 231, 111, 28))\r\n font = QtGui.QFont()\r\n font.setPointSize(10)\r\n font.setBold(False)\r\n font.setWeight(50)\r\n self.save_sat_sp_perf_3.setFont(font)\r\n self.save_sat_sp_perf_3.setObjectName(\"save_sat_sp_perf_3\")\r\n self.load_sat_sp_perf_3 = QtWidgets.QPushButton(self.groupBox_30)\r\n self.load_sat_sp_perf_3.setGeometry(QtCore.QRect(653, 199, 111, 28))\r\n font = QtGui.QFont()\r\n font.setPointSize(10)\r\n font.setBold(False)\r\n font.setWeight(50)\r\n self.load_sat_sp_perf_3.setFont(font)\r\n self.load_sat_sp_perf_3.setObjectName(\"load_sat_sp_perf_3\")\r\n self.default_sat_sp_perf_6 = QtWidgets.QComboBox(self.groupBox_30)\r\n self.default_sat_sp_perf_6.setGeometry(QtCore.QRect(650, 61, 131, 22))\r\n font = QtGui.QFont()\r\n font.setPointSize(8)\r\n self.default_sat_sp_perf_6.setFont(font)\r\n self.default_sat_sp_perf_6.setObjectName(\"default_sat_sp_perf_6\")\r\n self.label_223 = QtWidgets.QLabel(self.groupBox_30)\r\n self.label_223.setGeometry(QtCore.QRect(648, 31, 131, 16))\r\n self.label_223.setObjectName(\"label_223\")\r\n self.label_224 = QtWidgets.QLabel(self.groupBox_30)\r\n self.label_224.setGeometry(QtCore.QRect(330, 69, 111, 16))\r\n self.label_224.setObjectName(\"label_224\")\r\n self.height_sat_sp_perf_3 = QtWidgets.QLineEdit(self.groupBox_30)\r\n self.height_sat_sp_perf_3.setGeometry(QtCore.QRect(510, 69, 101, 22))\r\n self.height_sat_sp_perf_3.setObjectName(\"height_sat_sp_perf_3\")\r\n self.label_225 = QtWidgets.QLabel(self.groupBox_30)\r\n self.label_225.setGeometry(QtCore.QRect(330, 110, 121, 16))\r\n self.label_225.setObjectName(\"label_225\")\r\n self.eirp_sat_sp_perf_3 = QtWidgets.QLineEdit(self.groupBox_30)\r\n self.eirp_sat_sp_perf_3.setGeometry(QtCore.QRect(510, 106, 101, 22))\r\n self.eirp_sat_sp_perf_3.setObjectName(\"eirp_sat_sp_perf_3\")\r\n self.label_226 = QtWidgets.QLabel(self.groupBox_30)\r\n self.label_226.setGeometry(QtCore.QRect(20, 139, 171, 51))\r\n self.label_226.setLayoutDirection(QtCore.Qt.LeftToRight)\r\n self.label_226.setObjectName(\"label_226\")\r\n self.max_bw_sat_sp_perf_3 = QtWidgets.QLineEdit(self.groupBox_30)\r\n self.max_bw_sat_sp_perf_3.setGeometry(QtCore.QRect(200, 149, 101, 22))\r\n self.max_bw_sat_sp_perf_3.setObjectName(\"max_bw_sat_sp_perf_3\")\r\n self.label_227 = QtWidgets.QLabel(self.groupBox_30)\r\n self.label_227.setGeometry(QtCore.QRect(330, 134, 171, 51))\r\n self.label_227.setLayoutDirection(QtCore.Qt.LeftToRight)\r\n self.label_227.setObjectName(\"label_227\")\r\n self.bw_util_sat_sp_perf_3 = QtWidgets.QLineEdit(self.groupBox_30)\r\n self.bw_util_sat_sp_perf_3.setGeometry(QtCore.QRect(510, 147, 101, 22))\r\n self.bw_util_sat_sp_perf_3.setObjectName(\"bw_util_sat_sp_perf_3\")\r\n self.label_228 = QtWidgets.QLabel(self.groupBox_30)\r\n self.label_228.setGeometry(QtCore.QRect(20, 191, 61, 16))\r\n self.label_228.setObjectName(\"label_228\")\r\n self.rolloff_sat_sp_perf_3 = QtWidgets.QLineEdit(self.groupBox_30)\r\n self.rolloff_sat_sp_perf_3.setGeometry(QtCore.QRect(200, 190, 101, 22))\r\n self.rolloff_sat_sp_perf_3.setObjectName(\"rolloff_sat_sp_perf_3\")\r\n self.label_229 = QtWidgets.QLabel(self.groupBox_30)\r\n self.label_229.setGeometry(QtCore.QRect(330, 192, 91, 16))\r\n self.label_229.setObjectName(\"label_229\")\r\n self.modcod_sat_sp_perf_3 = QtWidgets.QComboBox(self.groupBox_30)\r\n self.modcod_sat_sp_perf_3.setGeometry(QtCore.QRect(450, 190, 161, 22))\r\n self.modcod_sat_sp_perf_3.setObjectName(\"modcod_sat_sp_perf_3\")\r\n self.modcod_sat_sp_perf_3.addItem(\"\")\r\n self.modcod_sat_sp_perf_3.addItem(\"\")\r\n self.modcod_sat_sp_perf_3.addItem(\"\")\r\n self.label_230 = QtWidgets.QLabel(self.groupBox_30)\r\n self.label_230.setGeometry(QtCore.QRect(20, 111, 131, 20))\r\n self.label_230.setObjectName(\"label_230\")\r\n self.freq_sat_sp_perf_3 = QtWidgets.QLineEdit(self.groupBox_30)\r\n self.freq_sat_sp_perf_3.setGeometry(QtCore.QRect(200, 111, 101, 22))\r\n self.freq_sat_sp_perf_3.setObjectName(\"freq_sat_sp_perf_3\")\r\n self.label_231 = QtWidgets.QLabel(self.groupBox_30)\r\n self.label_231.setGeometry(QtCore.QRect(665, 97, 91, 20))\r\n self.label_231.setObjectName(\"label_231\")\r\n self.pol_sat_sp_perf_3 = QtWidgets.QComboBox(self.groupBox_30)\r\n self.pol_sat_sp_perf_3.setGeometry(QtCore.QRect(660, 121, 101, 22))\r\n self.pol_sat_sp_perf_3.setObjectName(\"pol_sat_sp_perf_3\")\r\n self.pol_sat_sp_perf_3.addItem(\"\")\r\n self.pol_sat_sp_perf_3.addItem(\"\")\r\n self.pol_sat_sp_perf_3.addItem(\"\")\r\n self.clear_sat_sp_perf_3 = QtWidgets.QPushButton(self.groupBox_30)\r\n self.clear_sat_sp_perf_3.setGeometry(QtCore.QRect(653, 169, 111, 28))\r\n font = QtGui.QFont()\r\n font.setPointSize(10)\r\n font.setBold(False)\r\n font.setWeight(50)\r\n self.clear_sat_sp_perf_3.setFont(font)\r\n self.clear_sat_sp_perf_3.setObjectName(\"clear_sat_sp_perf_3\")\r\n self.groupBox_31 = QtWidgets.QGroupBox(self.single_point_perf_calc_page_3)\r\n self.groupBox_31.setGeometry(QtCore.QRect(10, 400, 801, 201))\r\n font = QtGui.QFont()\r\n font.setPointSize(10)\r\n self.groupBox_31.setFont(font)\r\n self.groupBox_31.setAutoFillBackground(False)\r\n self.groupBox_31.setAlignment(QtCore.Qt.AlignJustify|QtCore.Qt.AlignVCenter)\r\n self.groupBox_31.setObjectName(\"groupBox_31\")\r\n self.label_232 = QtWidgets.QLabel(self.groupBox_31)\r\n self.label_232.setGeometry(QtCore.QRect(20, 38, 55, 16))\r\n self.label_232.setObjectName(\"label_232\")\r\n self.label_233 = QtWidgets.QLabel(self.groupBox_31)\r\n self.label_233.setGeometry(QtCore.QRect(20, 71, 161, 21))\r\n self.label_233.setObjectName(\"label_233\")\r\n self.name_reception_sp_perf_3 = QtWidgets.QLineEdit(self.groupBox_31)\r\n self.name_reception_sp_perf_3.setGeometry(QtCore.QRect(100, 38, 511, 22))\r\n self.name_reception_sp_perf_3.setObjectName(\"name_reception_sp_perf_3\")\r\n self.lnb_gain_reception_sp_perf_3 = QtWidgets.QLineEdit(self.groupBox_31)\r\n self.lnb_gain_reception_sp_perf_3.setGeometry(QtCore.QRect(200, 71, 101, 22))\r\n self.lnb_gain_reception_sp_perf_3.setObjectName(\"lnb_gain_reception_sp_perf_3\")\r\n self.save_reception_sp_perf_3 = QtWidgets.QPushButton(self.groupBox_31)\r\n self.save_reception_sp_perf_3.setGeometry(QtCore.QRect(654, 147, 111, 28))\r\n font = QtGui.QFont()\r\n font.setPointSize(10)\r\n font.setBold(False)\r\n font.setWeight(50)\r\n self.save_reception_sp_perf_3.setFont(font)\r\n self.save_reception_sp_perf_3.setObjectName(\"save_reception_sp_perf_3\")\r\n self.load_reception_sp_perf_3 = QtWidgets.QPushButton(self.groupBox_31)\r\n self.load_reception_sp_perf_3.setGeometry(QtCore.QRect(654, 113, 111, 28))\r\n font = QtGui.QFont()\r\n font.setPointSize(10)\r\n font.setBold(False)\r\n font.setWeight(50)\r\n self.load_reception_sp_perf_3.setFont(font)\r\n self.load_reception_sp_perf_3.setObjectName(\"load_reception_sp_perf_3\")\r\n self.label_234 = QtWidgets.QLabel(self.groupBox_31)\r\n self.label_234.setGeometry(QtCore.QRect(330, 148, 141, 16))\r\n self.label_234.setObjectName(\"label_234\")\r\n self.ant_eff_reception_sp_perf_3 = QtWidgets.QLineEdit(self.groupBox_31)\r\n self.ant_eff_reception_sp_perf_3.setGeometry(QtCore.QRect(510, 148, 101, 22))\r\n self.ant_eff_reception_sp_perf_3.setObjectName(\"ant_eff_reception_sp_perf_3\")\r\n self.label_235 = QtWidgets.QLabel(self.groupBox_31)\r\n self.label_235.setGeometry(QtCore.QRect(330, 76, 161, 16))\r\n self.label_235.setObjectName(\"label_235\")\r\n self.lnb_temp_reception_sp_perf_3 = QtWidgets.QLineEdit(self.groupBox_31)\r\n self.lnb_temp_reception_sp_perf_3.setGeometry(QtCore.QRect(510, 73, 101, 22))\r\n self.lnb_temp_reception_sp_perf_3.setObjectName(\"lnb_temp_reception_sp_perf_3\")\r\n self.label_236 = QtWidgets.QLabel(self.groupBox_31)\r\n self.label_236.setGeometry(QtCore.QRect(17, 97, 171, 51))\r\n self.label_236.setLayoutDirection(QtCore.Qt.LeftToRight)\r\n self.label_236.setObjectName(\"label_236\")\r\n self.aditional_losses_reception_sp_perf_3 = QtWidgets.QLineEdit(self.groupBox_31)\r\n self.aditional_losses_reception_sp_perf_3.setGeometry(QtCore.QRect(200, 111, 101, 22))\r\n self.aditional_losses_reception_sp_perf_3.setObjectName(\"aditional_losses_reception_sp_perf_3\")\r\n self.label_237 = QtWidgets.QLabel(self.groupBox_31)\r\n self.label_237.setGeometry(QtCore.QRect(19, 137, 171, 51))\r\n self.label_237.setLayoutDirection(QtCore.Qt.LeftToRight)\r\n self.label_237.setObjectName(\"label_237\")\r\n self.max_depoint_reception_sp_perf_3 = QtWidgets.QLineEdit(self.groupBox_31)\r\n self.max_depoint_reception_sp_perf_3.setGeometry(QtCore.QRect(200, 150, 101, 22))\r\n self.max_depoint_reception_sp_perf_3.setObjectName(\"max_depoint_reception_sp_perf_3\")\r\n self.clear_reception_sp_perf_3 = QtWidgets.QPushButton(self.groupBox_31)\r\n self.clear_reception_sp_perf_3.setGeometry(QtCore.QRect(654, 81, 111, 28))\r\n font = QtGui.QFont()\r\n font.setPointSize(10)\r\n font.setBold(False)\r\n font.setWeight(50)\r\n self.clear_reception_sp_perf_3.setFont(font)\r\n self.clear_reception_sp_perf_3.setObjectName(\"clear_reception_sp_perf_3\")\r\n self.cable_loss_reception_sp_perf_3 = QtWidgets.QLineEdit(self.groupBox_31)\r\n self.cable_loss_reception_sp_perf_3.setGeometry(QtCore.QRect(509, 112, 101, 22))\r\n self.cable_loss_reception_sp_perf_3.setObjectName(\"cable_loss_reception_sp_perf_3\")\r\n self.label_238 = QtWidgets.QLabel(self.groupBox_31)\r\n self.label_238.setGeometry(QtCore.QRect(330, 98, 171, 51))\r\n self.label_238.setLayoutDirection(QtCore.Qt.LeftToRight)\r\n self.label_238.setObjectName(\"label_238\")\r\n self.calc_sp_perf_3 = QtWidgets.QPushButton(self.single_point_perf_calc_page_3)\r\n self.calc_sp_perf_3.setGeometry(QtCore.QRect(670, 830, 141, 28))\r\n font = QtGui.QFont()\r\n font.setPointSize(10)\r\n self.calc_sp_perf_3.setFont(font)\r\n self.calc_sp_perf_3.setObjectName(\"calc_sp_perf_3\")\r\n self.label_239 = QtWidgets.QLabel(self.single_point_perf_calc_page_3)\r\n self.label_239.setGeometry(QtCore.QRect(10, 830, 151, 21))\r\n font = QtGui.QFont()\r\n font.setPointSize(10)\r\n self.label_239.setFont(font)\r\n self.label_239.setObjectName(\"label_239\")\r\n self.relaxation_sp_perf_3 = QtWidgets.QLineEdit(self.single_point_perf_calc_page_3)\r\n self.relaxation_sp_perf_3.setGeometry(QtCore.QRect(170, 830, 51, 22))\r\n font = QtGui.QFont()\r\n font.setPointSize(10)\r\n self.relaxation_sp_perf_3.setFont(font)\r\n self.relaxation_sp_perf_3.setToolTip(\"\")\r\n self.relaxation_sp_perf_3.setObjectName(\"relaxation_sp_perf_3\")\r\n self.margin_sp_perf_3 = QtWidgets.QLineEdit(self.single_point_perf_calc_page_3)\r\n self.margin_sp_perf_3.setGeometry(QtCore.QRect(340, 830, 61, 22))\r\n font = QtGui.QFont()\r\n font.setPointSize(10)\r\n self.margin_sp_perf_3.setFont(font)\r\n self.margin_sp_perf_3.setToolTip(\"\")\r\n self.margin_sp_perf_3.setObjectName(\"margin_sp_perf_3\")\r\n self.label_240 = QtWidgets.QLabel(self.single_point_perf_calc_page_3)\r\n self.label_240.setGeometry(QtCore.QRect(240, 830, 91, 21))\r\n font = QtGui.QFont()\r\n font.setPointSize(10)\r\n self.label_240.setFont(font)\r\n self.label_240.setObjectName(\"label_240\")\r\n self.plot_area_2 = QtWidgets.QWidget(self.single_point_perf_calc_page_3)\r\n self.plot_area_2.setGeometry(QtCore.QRect(10, 609, 791, 211))\r\n self.plot_area_2.setObjectName(\"plot_area_2\")\r\n self.stackedWidget_4.addWidget(self.single_point_perf_calc_page_3)\r\n self.list_perf_calc_page_3 = QtWidgets.QWidget()\r\n self.list_perf_calc_page_3.setObjectName(\"list_perf_calc_page_3\")\r\n self.browse_path_mp_perf_3 = QtWidgets.QPushButton(self.list_perf_calc_page_3)\r\n self.browse_path_mp_perf_3.setGeometry(QtCore.QRect(700, 11, 93, 21))\r\n self.browse_path_mp_perf_3.setObjectName(\"browse_path_mp_perf_3\")\r\n self.label_241 = QtWidgets.QLabel(self.list_perf_calc_page_3)\r\n self.label_241.setGeometry(QtCore.QRect(6, 13, 55, 16))\r\n font = QtGui.QFont()\r\n font.setPointSize(10)\r\n self.label_241.setFont(font)\r\n self.label_241.setObjectName(\"label_241\")\r\n self.preview_mp_perf_3 = QtWidgets.QTableWidget(self.list_perf_calc_page_3)\r\n self.preview_mp_perf_3.setGeometry(QtCore.QRect(10, 61, 801, 131))\r\n self.preview_mp_perf_3.setObjectName(\"preview_mp_perf_3\")\r\n self.preview_mp_perf_3.setColumnCount(0)\r\n self.preview_mp_perf_3.setRowCount(0)\r\n self.label_242 = QtWidgets.QLabel(self.list_perf_calc_page_3)\r\n self.label_242.setGeometry(QtCore.QRect(10, 41, 91, 16))\r\n self.label_242.setObjectName(\"label_242\")\r\n self.groupBox_32 = QtWidgets.QGroupBox(self.list_perf_calc_page_3)\r\n self.groupBox_32.setGeometry(QtCore.QRect(8, 199, 801, 261))\r\n font = QtGui.QFont()\r\n font.setPointSize(10)\r\n self.groupBox_32.setFont(font)\r\n self.groupBox_32.setAutoFillBackground(False)\r\n self.groupBox_32.setAlignment(QtCore.Qt.AlignJustify|QtCore.Qt.AlignVCenter)\r\n self.groupBox_32.setObjectName(\"groupBox_32\")\r\n self.label_243 = QtWidgets.QLabel(self.groupBox_32)\r\n self.label_243.setGeometry(QtCore.QRect(20, 27, 55, 16))\r\n self.label_243.setObjectName(\"label_243\")\r\n self.label_244 = QtWidgets.QLabel(self.groupBox_32)\r\n self.label_244.setGeometry(QtCore.QRect(20, 69, 161, 21))\r\n self.label_244.setObjectName(\"label_244\")\r\n self.name_sat_mp_perf_3 = QtWidgets.QLineEdit(self.groupBox_32)\r\n self.name_sat_mp_perf_3.setGeometry(QtCore.QRect(100, 27, 511, 22))\r\n self.name_sat_mp_perf_3.setObjectName(\"name_sat_mp_perf_3\")\r\n self.long_sat_mp_perf_3 = QtWidgets.QLineEdit(self.groupBox_32)\r\n self.long_sat_mp_perf_3.setGeometry(QtCore.QRect(200, 69, 101, 22))\r\n self.long_sat_mp_perf_3.setObjectName(\"long_sat_mp_perf_3\")\r\n self.save_sat_mp_perf_3 = QtWidgets.QPushButton(self.groupBox_32)\r\n self.save_sat_mp_perf_3.setGeometry(QtCore.QRect(658, 222, 111, 28))\r\n font = QtGui.QFont()\r\n font.setPointSize(10)\r\n font.setBold(False)\r\n font.setWeight(50)\r\n self.save_sat_mp_perf_3.setFont(font)\r\n self.save_sat_mp_perf_3.setObjectName(\"save_sat_mp_perf_3\")\r\n self.load_sat_mp_perf_3 = QtWidgets.QPushButton(self.groupBox_32)\r\n self.load_sat_mp_perf_3.setGeometry(QtCore.QRect(658, 190, 111, 28))\r\n font = QtGui.QFont()\r\n font.setPointSize(10)\r\n font.setBold(False)\r\n font.setWeight(50)\r\n self.load_sat_mp_perf_3.setFont(font)\r\n self.load_sat_mp_perf_3.setObjectName(\"load_sat_mp_perf_3\")\r\n self.default_sat_mp_perf_3 = QtWidgets.QComboBox(self.groupBox_32)\r\n self.default_sat_mp_perf_3.setGeometry(QtCore.QRect(650, 58, 131, 22))\r\n font = QtGui.QFont()\r\n font.setPointSize(8)\r\n self.default_sat_mp_perf_3.setFont(font)\r\n self.default_sat_mp_perf_3.setObjectName(\"default_sat_mp_perf_3\")\r\n self.label_245 = QtWidgets.QLabel(self.groupBox_32)\r\n self.label_245.setGeometry(QtCore.QRect(651, 29, 131, 16))\r\n self.label_245.setObjectName(\"label_245\")\r\n self.label_246 = QtWidgets.QLabel(self.groupBox_32)\r\n self.label_246.setGeometry(QtCore.QRect(330, 70, 111, 16))\r\n self.label_246.setObjectName(\"label_246\")\r\n self.height_sat_mp_perf_3 = QtWidgets.QLineEdit(self.groupBox_32)\r\n self.height_sat_mp_perf_3.setGeometry(QtCore.QRect(510, 67, 101, 22))\r\n self.height_sat_mp_perf_3.setObjectName(\"height_sat_mp_perf_3\")\r\n self.label_247 = QtWidgets.QLabel(self.groupBox_32)\r\n self.label_247.setGeometry(QtCore.QRect(330, 108, 121, 16))\r\n self.label_247.setObjectName(\"label_247\")\r\n self.eirp_sat_mp_perf_3 = QtWidgets.QLineEdit(self.groupBox_32)\r\n self.eirp_sat_mp_perf_3.setGeometry(QtCore.QRect(510, 104, 101, 22))\r\n self.eirp_sat_mp_perf_3.setObjectName(\"eirp_sat_mp_perf_3\")\r\n self.label_248 = QtWidgets.QLabel(self.groupBox_32)\r\n self.label_248.setGeometry(QtCore.QRect(20, 137, 171, 51))\r\n self.label_248.setLayoutDirection(QtCore.Qt.LeftToRight)\r\n self.label_248.setObjectName(\"label_248\")\r\n self.max_bw_sat_mp_perf_3 = QtWidgets.QLineEdit(self.groupBox_32)\r\n self.max_bw_sat_mp_perf_3.setGeometry(QtCore.QRect(200, 147, 101, 22))\r\n self.max_bw_sat_mp_perf_3.setObjectName(\"max_bw_sat_mp_perf_3\")\r\n self.label_249 = QtWidgets.QLabel(self.groupBox_32)\r\n self.label_249.setGeometry(QtCore.QRect(330, 132, 171, 51))\r\n self.label_249.setLayoutDirection(QtCore.Qt.LeftToRight)\r\n self.label_249.setObjectName(\"label_249\")\r\n self.bw_util_sat_mp_perf_3 = QtWidgets.QLineEdit(self.groupBox_32)\r\n self.bw_util_sat_mp_perf_3.setGeometry(QtCore.QRect(510, 145, 101, 22))\r\n self.bw_util_sat_mp_perf_3.setObjectName(\"bw_util_sat_mp_perf_3\")\r\n self.label_250 = QtWidgets.QLabel(self.groupBox_32)\r\n self.label_250.setGeometry(QtCore.QRect(20, 191, 61, 16))\r\n self.label_250.setObjectName(\"label_250\")\r\n self.rolloff_sat_mp_perf_3 = QtWidgets.QLineEdit(self.groupBox_32)\r\n self.rolloff_sat_mp_perf_3.setGeometry(QtCore.QRect(200, 191, 101, 22))\r\n self.rolloff_sat_mp_perf_3.setObjectName(\"rolloff_sat_mp_perf_3\")\r\n self.label_251 = QtWidgets.QLabel(self.groupBox_32)\r\n self.label_251.setGeometry(QtCore.QRect(330, 191, 91, 16))\r\n self.label_251.setObjectName(\"label_251\")\r\n self.modcod_sat_mp_perf_3 = QtWidgets.QComboBox(self.groupBox_32)\r\n self.modcod_sat_mp_perf_3.setGeometry(QtCore.QRect(450, 191, 161, 22))\r\n self.modcod_sat_mp_perf_3.setObjectName(\"modcod_sat_mp_perf_3\")\r\n self.modcod_sat_mp_perf_3.addItem(\"\")\r\n self.modcod_sat_mp_perf_3.addItem(\"\")\r\n self.modcod_sat_mp_perf_3.addItem(\"\")\r\n self.label_252 = QtWidgets.QLabel(self.groupBox_32)\r\n self.label_252.setGeometry(QtCore.QRect(20, 109, 131, 20))\r\n self.label_252.setObjectName(\"label_252\")\r\n self.freq_sat_mp_perf_3 = QtWidgets.QLineEdit(self.groupBox_32)\r\n self.freq_sat_mp_perf_3.setGeometry(QtCore.QRect(200, 109, 101, 22))\r\n self.freq_sat_mp_perf_3.setObjectName(\"freq_sat_mp_perf_3\")\r\n self.label_253 = QtWidgets.QLabel(self.groupBox_32)\r\n self.label_253.setGeometry(QtCore.QRect(666, 89, 91, 20))\r\n self.label_253.setObjectName(\"label_253\")\r\n self.pol_sat_mp_perf_3 = QtWidgets.QComboBox(self.groupBox_32)\r\n self.pol_sat_mp_perf_3.setGeometry(QtCore.QRect(663, 119, 101, 22))\r\n self.pol_sat_mp_perf_3.setObjectName(\"pol_sat_mp_perf_3\")\r\n self.pol_sat_mp_perf_3.addItem(\"\")\r\n self.pol_sat_mp_perf_3.addItem(\"\")\r\n self.pol_sat_mp_perf_3.addItem(\"\")\r\n self.clear_satellite_mp_perf_3 = QtWidgets.QPushButton(self.groupBox_32)\r\n self.clear_satellite_mp_perf_3.setGeometry(QtCore.QRect(658, 156, 111, 28))\r\n font = QtGui.QFont()\r\n font.setPointSize(10)\r\n font.setBold(False)\r\n font.setWeight(50)\r\n self.clear_satellite_mp_perf_3.setFont(font)\r\n self.clear_satellite_mp_perf_3.setObjectName(\"clear_satellite_mp_perf_3\")\r\n self.groupBox_33 = QtWidgets.QGroupBox(self.list_perf_calc_page_3)\r\n self.groupBox_33.setGeometry(QtCore.QRect(10, 458, 801, 221))\r\n font = QtGui.QFont()\r\n font.setPointSize(10)\r\n self.groupBox_33.setFont(font)\r\n self.groupBox_33.setAutoFillBackground(False)\r\n self.groupBox_33.setAlignment(QtCore.Qt.AlignJustify|QtCore.Qt.AlignVCenter)\r\n self.groupBox_33.setObjectName(\"groupBox_33\")\r\n self.label_254 = QtWidgets.QLabel(self.groupBox_33)\r\n self.label_254.setGeometry(QtCore.QRect(20, 38, 55, 16))\r\n self.label_254.setObjectName(\"label_254\")\r\n self.label_255 = QtWidgets.QLabel(self.groupBox_33)\r\n self.label_255.setGeometry(QtCore.QRect(20, 68, 151, 31))\r\n self.label_255.setObjectName(\"label_255\")\r\n self.label_256 = QtWidgets.QLabel(self.groupBox_33)\r\n self.label_256.setGeometry(QtCore.QRect(20, 113, 161, 21))\r\n self.label_256.setObjectName(\"label_256\")\r\n self.name_reception_mp_perf_3 = QtWidgets.QLineEdit(self.groupBox_33)\r\n self.name_reception_mp_perf_3.setGeometry(QtCore.QRect(100, 38, 511, 22))\r\n self.name_reception_mp_perf_3.setObjectName(\"name_reception_mp_perf_3\")\r\n self.lnb_gain_reception_mp_perf_3 = QtWidgets.QLineEdit(self.groupBox_33)\r\n self.lnb_gain_reception_mp_perf_3.setGeometry(QtCore.QRect(200, 113, 101, 22))\r\n self.lnb_gain_reception_mp_perf_3.setObjectName(\"lnb_gain_reception_mp_perf_3\")\r\n self.ant_size_reception_mp_perf_3 = QtWidgets.QLineEdit(self.groupBox_33)\r\n self.ant_size_reception_mp_perf_3.setGeometry(QtCore.QRect(200, 74, 101, 22))\r\n self.ant_size_reception_mp_perf_3.setObjectName(\"ant_size_reception_mp_perf_3\")\r\n self.save_reception_mp_perf_3 = QtWidgets.QPushButton(self.groupBox_33)\r\n self.save_reception_mp_perf_3.setGeometry(QtCore.QRect(656, 186, 111, 28))\r\n font = QtGui.QFont()\r\n font.setPointSize(10)\r\n font.setBold(False)\r\n font.setWeight(50)\r\n self.save_reception_mp_perf_3.setFont(font)\r\n self.save_reception_mp_perf_3.setObjectName(\"save_reception_mp_perf_3\")\r\n self.load_reception_mp_perf_3 = QtWidgets.QPushButton(self.groupBox_33)\r\n self.load_reception_mp_perf_3.setGeometry(QtCore.QRect(656, 151, 111, 28))\r\n font = QtGui.QFont()\r\n font.setPointSize(10)\r\n font.setBold(False)\r\n font.setWeight(50)\r\n self.load_reception_mp_perf_3.setFont(font)\r\n self.load_reception_mp_perf_3.setObjectName(\"load_reception_mp_perf_3\")\r\n self.label_257 = QtWidgets.QLabel(self.groupBox_33)\r\n self.label_257.setGeometry(QtCore.QRect(330, 78, 141, 16))\r\n self.label_257.setObjectName(\"label_257\")\r\n self.ant_eff_reception_mp_perf_3 = QtWidgets.QLineEdit(self.groupBox_33)\r\n self.ant_eff_reception_mp_perf_3.setGeometry(QtCore.QRect(510, 78, 101, 22))\r\n self.ant_eff_reception_mp_perf_3.setObjectName(\"ant_eff_reception_mp_perf_3\")\r\n self.label_258 = QtWidgets.QLabel(self.groupBox_33)\r\n self.label_258.setGeometry(QtCore.QRect(330, 118, 161, 16))\r\n self.label_258.setScaledContents(False)\r\n self.label_258.setObjectName(\"label_258\")\r\n self.lnb_temp_reception_mp_perf_3 = QtWidgets.QLineEdit(self.groupBox_33)\r\n self.lnb_temp_reception_mp_perf_3.setGeometry(QtCore.QRect(510, 115, 101, 22))\r\n self.lnb_temp_reception_mp_perf_3.setObjectName(\"lnb_temp_reception_mp_perf_3\")\r\n self.label_259 = QtWidgets.QLabel(self.groupBox_33)\r\n self.label_259.setGeometry(QtCore.QRect(20, 132, 171, 51))\r\n self.label_259.setLayoutDirection(QtCore.Qt.LeftToRight)\r\n self.label_259.setObjectName(\"label_259\")\r\n self.aditional_losses_reception_mp_perf_3 = QtWidgets.QLineEdit(self.groupBox_33)\r\n self.aditional_losses_reception_mp_perf_3.setGeometry(QtCore.QRect(200, 146, 101, 22))\r\n self.aditional_losses_reception_mp_perf_3.setObjectName(\"aditional_losses_reception_mp_perf_3\")\r\n self.label_260 = QtWidgets.QLabel(self.groupBox_33)\r\n self.label_260.setGeometry(QtCore.QRect(20, 170, 171, 51))\r\n self.label_260.setLayoutDirection(QtCore.Qt.LeftToRight)\r\n self.label_260.setObjectName(\"label_260\")\r\n self.max_depoint_reception_mp_perf_3 = QtWidgets.QLineEdit(self.groupBox_33)\r\n self.max_depoint_reception_mp_perf_3.setGeometry(QtCore.QRect(200, 190, 101, 22))\r\n self.max_depoint_reception_mp_perf_3.setObjectName(\"max_depoint_reception_mp_perf_3\")\r\n self.clear_reception_mp_perf_3 = QtWidgets.QPushButton(self.groupBox_33)\r\n self.clear_reception_mp_perf_3.setGeometry(QtCore.QRect(656, 115, 111, 28))\r\n font = QtGui.QFont()\r\n font.setPointSize(10)\r\n font.setBold(False)\r\n font.setWeight(50)\r\n self.clear_reception_mp_perf_3.setFont(font)\r\n self.clear_reception_mp_perf_3.setObjectName(\"clear_reception_mp_perf_3\")\r\n self.cable_loss_reception_mp_perf_3 = QtWidgets.QLineEdit(self.groupBox_33)\r\n self.cable_loss_reception_mp_perf_3.setGeometry(QtCore.QRect(510, 150, 101, 22))\r\n self.cable_loss_reception_mp_perf_3.setObjectName(\"cable_loss_reception_mp_perf_3\")\r\n self.label_261 = QtWidgets.QLabel(self.groupBox_33)\r\n self.label_261.setGeometry(QtCore.QRect(330, 133, 171, 51))\r\n self.label_261.setLayoutDirection(QtCore.Qt.LeftToRight)\r\n self.label_261.setObjectName(\"label_261\")\r\n self.output_sp_perf_5 = QtWidgets.QTextEdit(self.list_perf_calc_page_3)\r\n self.output_sp_perf_5.setGeometry(QtCore.QRect(10, 684, 801, 141))\r\n self.output_sp_perf_5.setObjectName(\"output_sp_perf_5\")\r\n self.relaxation_mp_perf_3 = QtWidgets.QLineEdit(self.list_perf_calc_page_3)\r\n self.relaxation_mp_perf_3.setGeometry(QtCore.QRect(170, 832, 51, 22))\r\n font = QtGui.QFont()\r\n font.setPointSize(10)\r\n self.relaxation_mp_perf_3.setFont(font)\r\n self.relaxation_mp_perf_3.setToolTip(\"\")\r\n self.relaxation_mp_perf_3.setObjectName(\"relaxation_mp_perf_3\")\r\n self.calc_mp_perf_3 = QtWidgets.QPushButton(self.list_perf_calc_page_3)\r\n self.calc_mp_perf_3.setGeometry(QtCore.QRect(670, 830, 141, 28))\r\n font = QtGui.QFont()\r\n font.setPointSize(10)\r\n self.calc_mp_perf_3.setFont(font)\r\n self.calc_mp_perf_3.setObjectName(\"calc_mp_perf_3\")\r\n self.label_262 = QtWidgets.QLabel(self.list_perf_calc_page_3)\r\n self.label_262.setGeometry(QtCore.QRect(10, 833, 151, 21))\r\n font = QtGui.QFont()\r\n font.setPointSize(10)\r\n self.label_262.setFont(font)\r\n self.label_262.setObjectName(\"label_262\")\r\n self.label_263 = QtWidgets.QLabel(self.list_perf_calc_page_3)\r\n self.label_263.setGeometry(QtCore.QRect(420, 831, 61, 20))\r\n font = QtGui.QFont()\r\n font.setPointSize(10)\r\n self.label_263.setFont(font)\r\n self.label_263.setObjectName(\"label_263\")\r\n self.n_threads_3 = QtWidgets.QComboBox(self.list_perf_calc_page_3)\r\n self.n_threads_3.setGeometry(QtCore.QRect(490, 831, 73, 22))\r\n self.n_threads_3.setObjectName(\"n_threads_3\")\r\n self.path_mp_perf_3 = QtWidgets.QLineEdit(self.list_perf_calc_page_3)\r\n self.path_mp_perf_3.setGeometry(QtCore.QRect(55, 10, 631, 22))\r\n self.path_mp_perf_3.setObjectName(\"path_mp_perf_3\")\r\n self.label_264 = QtWidgets.QLabel(self.list_perf_calc_page_3)\r\n self.label_264.setGeometry(QtCore.QRect(240, 831, 91, 21))\r\n font = QtGui.QFont()\r\n font.setPointSize(10)\r\n self.label_264.setFont(font)\r\n self.label_264.setObjectName(\"label_264\")\r\n self.margin_mp_perf_3 = QtWidgets.QLineEdit(self.list_perf_calc_page_3)\r\n self.margin_mp_perf_3.setGeometry(QtCore.QRect(340, 831, 61, 22))\r\n font = QtGui.QFont()\r\n font.setPointSize(10)\r\n self.margin_mp_perf_3.setFont(font)\r\n self.margin_mp_perf_3.setToolTip(\"\")\r\n self.margin_mp_perf_3.setObjectName(\"margin_mp_perf_3\")\r\n self.stackedWidget_4.addWidget(self.list_perf_calc_page_3)\r\n self.stackedWidget_3.addWidget(self.ant_size_single_point_calc_page_3)\r\n self.page_2 = QtWidgets.QWidget()\r\n self.page_2.setObjectName(\"page_2\")\r\n self.stackedWidget_3.addWidget(self.page_2)\r\n self.list_ant_size_calc_page_4 = QtWidgets.QWidget()\r\n self.list_ant_size_calc_page_4.setObjectName(\"list_ant_size_calc_page_4\")\r\n self.stackedWidget_3.addWidget(self.list_ant_size_calc_page_4)\r\n self.single_point_atm_calc_page_4 = QtWidgets.QWidget()\r\n self.single_point_atm_calc_page_4.setObjectName(\"single_point_atm_calc_page_4\")\r\n self.groupBox_34 = QtWidgets.QGroupBox(self.single_point_atm_calc_page_4)\r\n self.groupBox_34.setGeometry(QtCore.QRect(20, 20, 781, 131))\r\n font = QtGui.QFont()\r\n font.setPointSize(10)\r\n self.groupBox_34.setFont(font)\r\n self.groupBox_34.setObjectName(\"groupBox_34\")\r\n self.lat_ground_station_spatm_4 = QtWidgets.QLineEdit(self.groupBox_34)\r\n self.lat_ground_station_spatm_4.setGeometry(QtCore.QRect(200, 30, 101, 22))\r\n self.lat_ground_station_spatm_4.setObjectName(\"lat_ground_station_spatm_4\")\r\n self.long_ground_station_spatm_4 = QtWidgets.QLineEdit(self.groupBox_34)\r\n self.long_ground_station_spatm_4.setGeometry(QtCore.QRect(500, 30, 101, 22))\r\n self.long_ground_station_spatm_4.setObjectName(\"long_ground_station_spatm_4\")\r\n self.label_265 = QtWidgets.QLabel(self.groupBox_34)\r\n self.label_265.setGeometry(QtCore.QRect(20, 23, 151, 31))\r\n self.label_265.setObjectName(\"label_265\")\r\n self.label_266 = QtWidgets.QLabel(self.groupBox_34)\r\n self.label_266.setGeometry(QtCore.QRect(319, 28, 161, 21))\r\n self.label_266.setObjectName(\"label_266\")\r\n self.label_267 = QtWidgets.QLabel(self.groupBox_34)\r\n self.label_267.setGeometry(QtCore.QRect(20, 60, 161, 21))\r\n self.label_267.setObjectName(\"label_267\")\r\n self.ant_size_reception_spatm_4 = QtWidgets.QLineEdit(self.groupBox_34)\r\n self.ant_size_reception_spatm_4.setGeometry(QtCore.QRect(200, 60, 101, 22))\r\n self.ant_size_reception_spatm_4.setObjectName(\"ant_size_reception_spatm_4\")\r\n self.ant_eff_reception_spatm_4 = QtWidgets.QLineEdit(self.groupBox_34)\r\n self.ant_eff_reception_spatm_4.setGeometry(QtCore.QRect(500, 60, 101, 22))\r\n self.ant_eff_reception_spatm_4.setObjectName(\"ant_eff_reception_spatm_4\")\r\n self.label_268 = QtWidgets.QLabel(self.groupBox_34)\r\n self.label_268.setGeometry(QtCore.QRect(320, 60, 141, 16))\r\n self.label_268.setObjectName(\"label_268\")\r\n self.clear_reception_rcp_8 = QtWidgets.QPushButton(self.groupBox_34)\r\n self.clear_reception_rcp_8.setGeometry(QtCore.QRect(660, 30, 93, 28))\r\n font = QtGui.QFont()\r\n font.setPointSize(10)\r\n font.setBold(False)\r\n font.setWeight(50)\r\n self.clear_reception_rcp_8.setFont(font)\r\n self.clear_reception_rcp_8.setObjectName(\"clear_reception_rcp_8\")\r\n self.load_reception_rcp_14 = QtWidgets.QPushButton(self.groupBox_34)\r\n self.load_reception_rcp_14.setGeometry(QtCore.QRect(15, 91, 201, 28))\r\n font = QtGui.QFont()\r\n font.setPointSize(10)\r\n font.setBold(False)\r\n font.setWeight(50)\r\n self.load_reception_rcp_14.setFont(font)\r\n self.load_reception_rcp_14.setObjectName(\"load_reception_rcp_14\")\r\n self.load_reception_rcp_15 = QtWidgets.QPushButton(self.groupBox_34)\r\n self.load_reception_rcp_15.setGeometry(QtCore.QRect(230, 90, 201, 28))\r\n font = QtGui.QFont()\r\n font.setPointSize(10)\r\n font.setBold(False)\r\n font.setWeight(50)\r\n self.load_reception_rcp_15.setFont(font)\r\n self.load_reception_rcp_15.setObjectName(\"load_reception_rcp_15\")\r\n self.calc_spatm_4 = QtWidgets.QPushButton(self.single_point_atm_calc_page_4)\r\n self.calc_spatm_4.setGeometry(QtCore.QRect(666, 830, 141, 28))\r\n font = QtGui.QFont()\r\n font.setPointSize(10)\r\n self.calc_spatm_4.setFont(font)\r\n self.calc_spatm_4.setObjectName(\"calc_spatm_4\")\r\n self.textEdit_5 = QtWidgets.QTextEdit(self.single_point_atm_calc_page_4)\r\n self.textEdit_5.setGeometry(QtCore.QRect(6, 650, 801, 171))\r\n font = QtGui.QFont()\r\n font.setPointSize(10)\r\n self.textEdit_5.setFont(font)\r\n self.textEdit_5.setObjectName(\"textEdit_5\")\r\n self.groupBox_35 = QtWidgets.QGroupBox(self.single_point_atm_calc_page_4)\r\n self.groupBox_35.setGeometry(QtCore.QRect(20, 150, 781, 131))\r\n font = QtGui.QFont()\r\n font.setPointSize(10)\r\n self.groupBox_35.setFont(font)\r\n self.groupBox_35.setObjectName(\"groupBox_35\")\r\n self.label_269 = QtWidgets.QLabel(self.groupBox_35)\r\n self.label_269.setGeometry(QtCore.QRect(20, 60, 131, 16))\r\n self.label_269.setObjectName(\"label_269\")\r\n self.label_270 = QtWidgets.QLabel(self.groupBox_35)\r\n self.label_270.setGeometry(QtCore.QRect(20, 30, 161, 21))\r\n self.label_270.setObjectName(\"label_270\")\r\n self.long_sat_spatm_4 = QtWidgets.QLineEdit(self.groupBox_35)\r\n self.long_sat_spatm_4.setGeometry(QtCore.QRect(200, 30, 101, 22))\r\n self.long_sat_spatm_4.setObjectName(\"long_sat_spatm_4\")\r\n self.freq_sat_spatm_4 = QtWidgets.QLineEdit(self.groupBox_35)\r\n self.freq_sat_spatm_4.setGeometry(QtCore.QRect(200, 60, 101, 22))\r\n self.freq_sat_spatm_4.setObjectName(\"freq_sat_spatm_4\")\r\n self.default_sat_sp_perf_7 = QtWidgets.QComboBox(self.groupBox_35)\r\n self.default_sat_sp_perf_7.setGeometry(QtCore.QRect(634, 44, 131, 22))\r\n font = QtGui.QFont()\r\n font.setPointSize(8)\r\n self.default_sat_sp_perf_7.setFont(font)\r\n self.default_sat_sp_perf_7.setObjectName(\"default_sat_sp_perf_7\")\r\n self.label_271 = QtWidgets.QLabel(self.groupBox_35)\r\n self.label_271.setGeometry(QtCore.QRect(636, 14, 131, 16))\r\n self.label_271.setObjectName(\"label_271\")\r\n self.pol_sat_8 = QtWidgets.QComboBox(self.groupBox_35)\r\n self.pol_sat_8.setGeometry(QtCore.QRect(450, 30, 101, 22))\r\n self.pol_sat_8.setObjectName(\"pol_sat_8\")\r\n self.pol_sat_8.addItem(\"\")\r\n self.pol_sat_8.addItem(\"\")\r\n self.pol_sat_8.addItem(\"\")\r\n self.label_272 = QtWidgets.QLabel(self.groupBox_35)\r\n self.label_272.setGeometry(QtCore.QRect(350, 30, 91, 20))\r\n self.label_272.setObjectName(\"label_272\")\r\n self.load_reception_rcp_16 = QtWidgets.QPushButton(self.groupBox_35)\r\n self.load_reception_rcp_16.setGeometry(QtCore.QRect(10, 90, 201, 28))\r\n font = QtGui.QFont()\r\n font.setPointSize(10)\r\n font.setBold(False)\r\n font.setWeight(50)\r\n self.load_reception_rcp_16.setFont(font)\r\n self.load_reception_rcp_16.setObjectName(\"load_reception_rcp_16\")\r\n self.p_year_spatm_4 = QtWidgets.QLineEdit(self.single_point_atm_calc_page_4)\r\n self.p_year_spatm_4.setGeometry(QtCore.QRect(550, 834, 101, 21))\r\n font = QtGui.QFont()\r\n font.setPointSize(10)\r\n self.p_year_spatm_4.setFont(font)\r\n self.p_year_spatm_4.setToolTip(\"\")\r\n self.p_year_spatm_4.setObjectName(\"p_year_spatm_4\")\r\n self.label_273 = QtWidgets.QLabel(self.single_point_atm_calc_page_4)\r\n self.label_273.setGeometry(QtCore.QRect(340, 834, 201, 20))\r\n font = QtGui.QFont()\r\n font.setPointSize(10)\r\n self.label_273.setFont(font)\r\n self.label_273.setObjectName(\"label_273\")\r\n self.label_274 = QtWidgets.QLabel(self.single_point_atm_calc_page_4)\r\n self.label_274.setGeometry(QtCore.QRect(10, 837, 71, 16))\r\n font = QtGui.QFont()\r\n font.setPointSize(10)\r\n self.label_274.setFont(font)\r\n self.label_274.setObjectName(\"label_274\")\r\n self.method_spatm_4 = QtWidgets.QComboBox(self.single_point_atm_calc_page_4)\r\n self.method_spatm_4.setGeometry(QtCore.QRect(84, 835, 101, 21))\r\n font = QtGui.QFont()\r\n font.setPointSize(10)\r\n self.method_spatm_4.setFont(font)\r\n self.method_spatm_4.setObjectName(\"method_spatm_4\")\r\n self.method_spatm_4.addItem(\"\")\r\n self.method_spatm_4.addItem(\"\")\r\n self.stackedWidget_3.addWidget(self.single_point_atm_calc_page_4)\r\n self.single_point_perf_calc_page_4 = QtWidgets.QWidget()\r\n self.single_point_perf_calc_page_4.setObjectName(\"single_point_perf_calc_page_4\")\r\n self.groupBox_36 = QtWidgets.QGroupBox(self.single_point_perf_calc_page_4)\r\n self.groupBox_36.setGeometry(QtCore.QRect(10, 10, 801, 121))\r\n font = QtGui.QFont()\r\n font.setPointSize(10)\r\n self.groupBox_36.setFont(font)\r\n self.groupBox_36.setAutoFillBackground(False)\r\n self.groupBox_36.setAlignment(QtCore.Qt.AlignJustify|QtCore.Qt.AlignVCenter)\r\n self.groupBox_36.setObjectName(\"groupBox_36\")\r\n self.label_275 = QtWidgets.QLabel(self.groupBox_36)\r\n self.label_275.setGeometry(QtCore.QRect(20, 36, 55, 16))\r\n self.label_275.setObjectName(\"label_275\")\r\n self.label_276 = QtWidgets.QLabel(self.groupBox_36)\r\n self.label_276.setGeometry(QtCore.QRect(20, 66, 151, 31))\r\n self.label_276.setObjectName(\"label_276\")\r\n self.label_277 = QtWidgets.QLabel(self.groupBox_36)\r\n self.label_277.setGeometry(QtCore.QRect(330, 78, 161, 21))\r\n self.label_277.setObjectName(\"label_277\")\r\n self.name_ground_station_sp_perf_4 = QtWidgets.QLineEdit(self.groupBox_36)\r\n self.name_ground_station_sp_perf_4.setGeometry(QtCore.QRect(100, 36, 511, 22))\r\n self.name_ground_station_sp_perf_4.setObjectName(\"name_ground_station_sp_perf_4\")\r\n self.long_ground_station_sp_perf_4 = QtWidgets.QLineEdit(self.groupBox_36)\r\n self.long_ground_station_sp_perf_4.setGeometry(QtCore.QRect(510, 80, 101, 22))\r\n self.long_ground_station_sp_perf_4.setObjectName(\"long_ground_station_sp_perf_4\")\r\n self.lat_ground_station_sp_perf_4 = QtWidgets.QLineEdit(self.groupBox_36)\r\n self.lat_ground_station_sp_perf_4.setGeometry(QtCore.QRect(200, 76, 101, 22))\r\n self.lat_ground_station_sp_perf_4.setObjectName(\"lat_ground_station_sp_perf_4\")\r\n self.save_ground_station_sp_perf_4 = QtWidgets.QPushButton(self.groupBox_36)\r\n self.save_ground_station_sp_perf_4.setGeometry(QtCore.QRect(652, 80, 111, 28))\r\n font = QtGui.QFont()\r\n font.setPointSize(10)\r\n font.setBold(False)\r\n font.setWeight(50)\r\n self.save_ground_station_sp_perf_4.setFont(font)\r\n self.save_ground_station_sp_perf_4.setObjectName(\"save_ground_station_sp_perf_4\")\r\n self.load_ground_station_sp_perf_4 = QtWidgets.QPushButton(self.groupBox_36)\r\n self.load_ground_station_sp_perf_4.setGeometry(QtCore.QRect(652, 48, 111, 28))\r\n font = QtGui.QFont()\r\n font.setPointSize(10)\r\n font.setBold(False)\r\n font.setWeight(50)\r\n self.load_ground_station_sp_perf_4.setFont(font)\r\n self.load_ground_station_sp_perf_4.setObjectName(\"load_ground_station_sp_perf_4\")\r\n self.clear_ground_station_sp_perf_4 = QtWidgets.QPushButton(self.groupBox_36)\r\n self.clear_ground_station_sp_perf_4.setGeometry(QtCore.QRect(652, 16, 111, 28))\r\n font = QtGui.QFont()\r\n font.setPointSize(10)\r\n font.setBold(False)\r\n font.setWeight(50)\r\n self.clear_ground_station_sp_perf_4.setFont(font)\r\n self.clear_ground_station_sp_perf_4.setObjectName(\"clear_ground_station_sp_perf_4\")\r\n self.groupBox_37 = QtWidgets.QGroupBox(self.single_point_perf_calc_page_4)\r\n self.groupBox_37.setGeometry(QtCore.QRect(10, 129, 801, 271))\r\n font = QtGui.QFont()\r\n font.setPointSize(10)\r\n self.groupBox_37.setFont(font)\r\n self.groupBox_37.setAutoFillBackground(False)\r\n self.groupBox_37.setAlignment(QtCore.Qt.AlignJustify|QtCore.Qt.AlignVCenter)\r\n self.groupBox_37.setObjectName(\"groupBox_37\")\r\n self.label_278 = QtWidgets.QLabel(self.groupBox_37)\r\n self.label_278.setGeometry(QtCore.QRect(20, 29, 55, 16))\r\n self.label_278.setObjectName(\"label_278\")\r\n self.label_279 = QtWidgets.QLabel(self.groupBox_37)\r\n self.label_279.setGeometry(QtCore.QRect(20, 71, 161, 21))\r\n self.label_279.setObjectName(\"label_279\")\r\n self.name_sat_sp_perf_4 = QtWidgets.QLineEdit(self.groupBox_37)\r\n self.name_sat_sp_perf_4.setGeometry(QtCore.QRect(100, 29, 511, 22))\r\n self.name_sat_sp_perf_4.setObjectName(\"name_sat_sp_perf_4\")\r\n self.long_sat_sp_perf_4 = QtWidgets.QLineEdit(self.groupBox_37)\r\n self.long_sat_sp_perf_4.setGeometry(QtCore.QRect(200, 71, 101, 22))\r\n self.long_sat_sp_perf_4.setObjectName(\"long_sat_sp_perf_4\")\r\n self.save_sat_sp_perf_4 = QtWidgets.QPushButton(self.groupBox_37)\r\n self.save_sat_sp_perf_4.setGeometry(QtCore.QRect(653, 231, 111, 28))\r\n font = QtGui.QFont()\r\n font.setPointSize(10)\r\n font.setBold(False)\r\n font.setWeight(50)\r\n self.save_sat_sp_perf_4.setFont(font)\r\n self.save_sat_sp_perf_4.setObjectName(\"save_sat_sp_perf_4\")\r\n self.load_sat_sp_perf_4 = QtWidgets.QPushButton(self.groupBox_37)\r\n self.load_sat_sp_perf_4.setGeometry(QtCore.QRect(653, 199, 111, 28))\r\n font = QtGui.QFont()\r\n font.setPointSize(10)\r\n font.setBold(False)\r\n font.setWeight(50)\r\n self.load_sat_sp_perf_4.setFont(font)\r\n self.load_sat_sp_perf_4.setObjectName(\"load_sat_sp_perf_4\")\r\n self.default_sat_sp_perf_8 = QtWidgets.QComboBox(self.groupBox_37)\r\n self.default_sat_sp_perf_8.setGeometry(QtCore.QRect(650, 61, 131, 22))\r\n font = QtGui.QFont()\r\n font.setPointSize(8)\r\n self.default_sat_sp_perf_8.setFont(font)\r\n self.default_sat_sp_perf_8.setObjectName(\"default_sat_sp_perf_8\")\r\n self.label_280 = QtWidgets.QLabel(self.groupBox_37)\r\n self.label_280.setGeometry(QtCore.QRect(648, 31, 131, 16))\r\n self.label_280.setObjectName(\"label_280\")\r\n self.label_281 = QtWidgets.QLabel(self.groupBox_37)\r\n self.label_281.setGeometry(QtCore.QRect(330, 69, 111, 16))\r\n self.label_281.setObjectName(\"label_281\")\r\n self.height_sat_sp_perf_4 = QtWidgets.QLineEdit(self.groupBox_37)\r\n self.height_sat_sp_perf_4.setGeometry(QtCore.QRect(510, 69, 101, 22))\r\n self.height_sat_sp_perf_4.setObjectName(\"height_sat_sp_perf_4\")\r\n self.label_282 = QtWidgets.QLabel(self.groupBox_37)\r\n self.label_282.setGeometry(QtCore.QRect(330, 110, 121, 16))\r\n self.label_282.setObjectName(\"label_282\")\r\n self.eirp_sat_sp_perf_4 = QtWidgets.QLineEdit(self.groupBox_37)\r\n self.eirp_sat_sp_perf_4.setGeometry(QtCore.QRect(510, 106, 101, 22))\r\n self.eirp_sat_sp_perf_4.setObjectName(\"eirp_sat_sp_perf_4\")\r\n self.label_283 = QtWidgets.QLabel(self.groupBox_37)\r\n self.label_283.setGeometry(QtCore.QRect(20, 139, 171, 51))\r\n self.label_283.setLayoutDirection(QtCore.Qt.LeftToRight)\r\n self.label_283.setObjectName(\"label_283\")\r\n self.max_bw_sat_sp_perf_4 = QtWidgets.QLineEdit(self.groupBox_37)\r\n self.max_bw_sat_sp_perf_4.setGeometry(QtCore.QRect(200, 149, 101, 22))\r\n self.max_bw_sat_sp_perf_4.setObjectName(\"max_bw_sat_sp_perf_4\")\r\n self.label_284 = QtWidgets.QLabel(self.groupBox_37)\r\n self.label_284.setGeometry(QtCore.QRect(330, 134, 171, 51))\r\n self.label_284.setLayoutDirection(QtCore.Qt.LeftToRight)\r\n self.label_284.setObjectName(\"label_284\")\r\n self.bw_util_sat_sp_perf_4 = QtWidgets.QLineEdit(self.groupBox_37)\r\n self.bw_util_sat_sp_perf_4.setGeometry(QtCore.QRect(510, 147, 101, 22))\r\n self.bw_util_sat_sp_perf_4.setObjectName(\"bw_util_sat_sp_perf_4\")\r\n self.label_285 = QtWidgets.QLabel(self.groupBox_37)\r\n self.label_285.setGeometry(QtCore.QRect(20, 191, 61, 16))\r\n self.label_285.setObjectName(\"label_285\")\r\n self.rolloff_sat_sp_perf_4 = QtWidgets.QLineEdit(self.groupBox_37)\r\n self.rolloff_sat_sp_perf_4.setGeometry(QtCore.QRect(200, 190, 101, 22))\r\n self.rolloff_sat_sp_perf_4.setObjectName(\"rolloff_sat_sp_perf_4\")\r\n self.label_286 = QtWidgets.QLabel(self.groupBox_37)\r\n self.label_286.setGeometry(QtCore.QRect(330, 192, 91, 16))\r\n self.label_286.setObjectName(\"label_286\")\r\n self.modcod_sat_sp_perf_4 = QtWidgets.QComboBox(self.groupBox_37)\r\n self.modcod_sat_sp_perf_4.setGeometry(QtCore.QRect(450, 190, 161, 22))\r\n self.modcod_sat_sp_perf_4.setObjectName(\"modcod_sat_sp_perf_4\")\r\n self.modcod_sat_sp_perf_4.addItem(\"\")\r\n self.modcod_sat_sp_perf_4.addItem(\"\")\r\n self.modcod_sat_sp_perf_4.addItem(\"\")\r\n self.label_287 = QtWidgets.QLabel(self.groupBox_37)\r\n self.label_287.setGeometry(QtCore.QRect(20, 111, 131, 20))\r\n self.label_287.setObjectName(\"label_287\")\r\n self.freq_sat_sp_perf_4 = QtWidgets.QLineEdit(self.groupBox_37)\r\n self.freq_sat_sp_perf_4.setGeometry(QtCore.QRect(200, 111, 101, 22))\r\n self.freq_sat_sp_perf_4.setObjectName(\"freq_sat_sp_perf_4\")\r\n self.label_288 = QtWidgets.QLabel(self.groupBox_37)\r\n self.label_288.setGeometry(QtCore.QRect(665, 97, 91, 20))\r\n self.label_288.setObjectName(\"label_288\")\r\n self.pol_sat_sp_perf_4 = QtWidgets.QComboBox(self.groupBox_37)\r\n self.pol_sat_sp_perf_4.setGeometry(QtCore.QRect(660, 121, 101, 22))\r\n self.pol_sat_sp_perf_4.setObjectName(\"pol_sat_sp_perf_4\")\r\n self.pol_sat_sp_perf_4.addItem(\"\")\r\n self.pol_sat_sp_perf_4.addItem(\"\")\r\n self.pol_sat_sp_perf_4.addItem(\"\")\r\n self.clear_sat_sp_perf_4 = QtWidgets.QPushButton(self.groupBox_37)\r\n self.clear_sat_sp_perf_4.setGeometry(QtCore.QRect(653, 169, 111, 28))\r\n font = QtGui.QFont()\r\n font.setPointSize(10)\r\n font.setBold(False)\r\n font.setWeight(50)\r\n self.clear_sat_sp_perf_4.setFont(font)\r\n self.clear_sat_sp_perf_4.setObjectName(\"clear_sat_sp_perf_4\")\r\n self.groupBox_38 = QtWidgets.QGroupBox(self.single_point_perf_calc_page_4)\r\n self.groupBox_38.setGeometry(QtCore.QRect(10, 400, 801, 231))\r\n font = QtGui.QFont()\r\n font.setPointSize(10)\r\n self.groupBox_38.setFont(font)\r\n self.groupBox_38.setAutoFillBackground(False)\r\n self.groupBox_38.setAlignment(QtCore.Qt.AlignJustify|QtCore.Qt.AlignVCenter)\r\n self.groupBox_38.setObjectName(\"groupBox_38\")\r\n self.label_289 = QtWidgets.QLabel(self.groupBox_38)\r\n self.label_289.setGeometry(QtCore.QRect(20, 38, 55, 16))\r\n self.label_289.setObjectName(\"label_289\")\r\n self.label_290 = QtWidgets.QLabel(self.groupBox_38)\r\n self.label_290.setGeometry(QtCore.QRect(20, 68, 151, 31))\r\n self.label_290.setObjectName(\"label_290\")\r\n self.label_291 = QtWidgets.QLabel(self.groupBox_38)\r\n self.label_291.setGeometry(QtCore.QRect(20, 113, 161, 21))\r\n self.label_291.setObjectName(\"label_291\")\r\n self.name_reception_sp_perf_4 = QtWidgets.QLineEdit(self.groupBox_38)\r\n self.name_reception_sp_perf_4.setGeometry(QtCore.QRect(100, 38, 511, 22))\r\n self.name_reception_sp_perf_4.setObjectName(\"name_reception_sp_perf_4\")\r\n self.lnb_gain_reception_sp_perf_4 = QtWidgets.QLineEdit(self.groupBox_38)\r\n self.lnb_gain_reception_sp_perf_4.setGeometry(QtCore.QRect(200, 113, 101, 22))\r\n self.lnb_gain_reception_sp_perf_4.setObjectName(\"lnb_gain_reception_sp_perf_4\")\r\n self.ant_size_reception_sp_perf_2 = QtWidgets.QLineEdit(self.groupBox_38)\r\n self.ant_size_reception_sp_perf_2.setGeometry(QtCore.QRect(200, 74, 101, 22))\r\n self.ant_size_reception_sp_perf_2.setObjectName(\"ant_size_reception_sp_perf_2\")\r\n self.save_reception_sp_perf_4 = QtWidgets.QPushButton(self.groupBox_38)\r\n self.save_reception_sp_perf_4.setGeometry(QtCore.QRect(654, 189, 111, 28))\r\n font = QtGui.QFont()\r\n font.setPointSize(10)\r\n font.setBold(False)\r\n font.setWeight(50)\r\n self.save_reception_sp_perf_4.setFont(font)\r\n self.save_reception_sp_perf_4.setObjectName(\"save_reception_sp_perf_4\")\r\n self.load_reception_sp_perf_4 = QtWidgets.QPushButton(self.groupBox_38)\r\n self.load_reception_sp_perf_4.setGeometry(QtCore.QRect(654, 155, 111, 28))\r\n font = QtGui.QFont()\r\n font.setPointSize(10)\r\n font.setBold(False)\r\n font.setWeight(50)\r\n self.load_reception_sp_perf_4.setFont(font)\r\n self.load_reception_sp_perf_4.setObjectName(\"load_reception_sp_perf_4\")\r\n self.label_292 = QtWidgets.QLabel(self.groupBox_38)\r\n self.label_292.setGeometry(QtCore.QRect(330, 78, 141, 16))\r\n self.label_292.setObjectName(\"label_292\")\r\n self.ant_eff_reception_sp_perf_4 = QtWidgets.QLineEdit(self.groupBox_38)\r\n self.ant_eff_reception_sp_perf_4.setGeometry(QtCore.QRect(510, 78, 101, 22))\r\n self.ant_eff_reception_sp_perf_4.setObjectName(\"ant_eff_reception_sp_perf_4\")\r\n self.label_293 = QtWidgets.QLabel(self.groupBox_38)\r\n self.label_293.setGeometry(QtCore.QRect(330, 118, 161, 16))\r\n self.label_293.setObjectName(\"label_293\")\r\n self.lnb_temp_reception_sp_perf_4 = QtWidgets.QLineEdit(self.groupBox_38)\r\n self.lnb_temp_reception_sp_perf_4.setGeometry(QtCore.QRect(510, 115, 101, 22))\r\n self.lnb_temp_reception_sp_perf_4.setObjectName(\"lnb_temp_reception_sp_perf_4\")\r\n self.label_294 = QtWidgets.QLabel(self.groupBox_38)\r\n self.label_294.setGeometry(QtCore.QRect(17, 139, 171, 51))\r\n self.label_294.setLayoutDirection(QtCore.Qt.LeftToRight)\r\n self.label_294.setObjectName(\"label_294\")\r\n self.aditional_losses_reception_sp_perf_4 = QtWidgets.QLineEdit(self.groupBox_38)\r\n self.aditional_losses_reception_sp_perf_4.setGeometry(QtCore.QRect(200, 153, 101, 22))\r\n self.aditional_losses_reception_sp_perf_4.setObjectName(\"aditional_losses_reception_sp_perf_4\")\r\n self.label_295 = QtWidgets.QLabel(self.groupBox_38)\r\n self.label_295.setGeometry(QtCore.QRect(19, 179, 171, 51))\r\n self.label_295.setLayoutDirection(QtCore.Qt.LeftToRight)\r\n self.label_295.setObjectName(\"label_295\")\r\n self.max_depoint_reception_sp_perf_4 = QtWidgets.QLineEdit(self.groupBox_38)\r\n self.max_depoint_reception_sp_perf_4.setGeometry(QtCore.QRect(200, 192, 101, 22))\r\n self.max_depoint_reception_sp_perf_4.setObjectName(\"max_depoint_reception_sp_perf_4\")\r\n self.clear_reception_sp_perf_4 = QtWidgets.QPushButton(self.groupBox_38)\r\n self.clear_reception_sp_perf_4.setGeometry(QtCore.QRect(654, 123, 111, 28))\r\n font = QtGui.QFont()\r\n font.setPointSize(10)\r\n font.setBold(False)\r\n font.setWeight(50)\r\n self.clear_reception_sp_perf_4.setFont(font)\r\n self.clear_reception_sp_perf_4.setObjectName(\"clear_reception_sp_perf_4\")\r\n self.cable_loss_reception_sp_perf_4 = QtWidgets.QLineEdit(self.groupBox_38)\r\n self.cable_loss_reception_sp_perf_4.setGeometry(QtCore.QRect(509, 154, 101, 22))\r\n self.cable_loss_reception_sp_perf_4.setObjectName(\"cable_loss_reception_sp_perf_4\")\r\n self.label_296 = QtWidgets.QLabel(self.groupBox_38)\r\n self.label_296.setGeometry(QtCore.QRect(330, 140, 171, 51))\r\n self.label_296.setLayoutDirection(QtCore.Qt.LeftToRight)\r\n self.label_296.setObjectName(\"label_296\")\r\n self.calc_sp_perf_4 = QtWidgets.QPushButton(self.single_point_perf_calc_page_4)\r\n self.calc_sp_perf_4.setGeometry(QtCore.QRect(670, 830, 141, 28))\r\n font = QtGui.QFont()\r\n font.setPointSize(10)\r\n self.calc_sp_perf_4.setFont(font)\r\n self.calc_sp_perf_4.setObjectName(\"calc_sp_perf_4\")\r\n self.output_sp_perf_3 = QtWidgets.QTextEdit(self.single_point_perf_calc_page_4)\r\n self.output_sp_perf_3.setGeometry(QtCore.QRect(10, 638, 801, 181))\r\n font = QtGui.QFont()\r\n font.setPointSize(10)\r\n self.output_sp_perf_3.setFont(font)\r\n self.output_sp_perf_3.setObjectName(\"output_sp_perf_3\")\r\n self.label_297 = QtWidgets.QLabel(self.single_point_perf_calc_page_4)\r\n self.label_297.setGeometry(QtCore.QRect(10, 830, 151, 21))\r\n font = QtGui.QFont()\r\n font.setPointSize(10)\r\n self.label_297.setFont(font)\r\n self.label_297.setObjectName(\"label_297\")\r\n self.relaxation_sp_perf_4 = QtWidgets.QLineEdit(self.single_point_perf_calc_page_4)\r\n self.relaxation_sp_perf_4.setGeometry(QtCore.QRect(170, 830, 51, 22))\r\n font = QtGui.QFont()\r\n font.setPointSize(10)\r\n self.relaxation_sp_perf_4.setFont(font)\r\n self.relaxation_sp_perf_4.setToolTip(\"\")\r\n self.relaxation_sp_perf_4.setObjectName(\"relaxation_sp_perf_4\")\r\n self.margin_sp_perf_4 = QtWidgets.QLineEdit(self.single_point_perf_calc_page_4)\r\n self.margin_sp_perf_4.setGeometry(QtCore.QRect(340, 830, 61, 22))\r\n font = QtGui.QFont()\r\n font.setPointSize(10)\r\n self.margin_sp_perf_4.setFont(font)\r\n self.margin_sp_perf_4.setToolTip(\"\")\r\n self.margin_sp_perf_4.setObjectName(\"margin_sp_perf_4\")\r\n self.label_298 = QtWidgets.QLabel(self.single_point_perf_calc_page_4)\r\n self.label_298.setGeometry(QtCore.QRect(240, 830, 91, 21))\r\n font = QtGui.QFont()\r\n font.setPointSize(10)\r\n self.label_298.setFont(font)\r\n self.label_298.setObjectName(\"label_298\")\r\n self.stackedWidget_3.addWidget(self.single_point_perf_calc_page_4)\r\n self.list_perf_calc_page_4 = QtWidgets.QWidget()\r\n self.list_perf_calc_page_4.setObjectName(\"list_perf_calc_page_4\")\r\n self.browse_mp_ant_size = QtWidgets.QPushButton(self.list_perf_calc_page_4)\r\n self.browse_mp_ant_size.setGeometry(QtCore.QRect(700, 11, 93, 21))\r\n self.browse_mp_ant_size.setObjectName(\"browse_mp_ant_size\")\r\n self.label_299 = QtWidgets.QLabel(self.list_perf_calc_page_4)\r\n self.label_299.setGeometry(QtCore.QRect(6, 13, 55, 16))\r\n font = QtGui.QFont()\r\n font.setPointSize(10)\r\n self.label_299.setFont(font)\r\n self.label_299.setObjectName(\"label_299\")\r\n self.preview_mp_ant_size = QtWidgets.QTableWidget(self.list_perf_calc_page_4)\r\n self.preview_mp_ant_size.setGeometry(QtCore.QRect(10, 61, 801, 131))\r\n self.preview_mp_ant_size.setObjectName(\"preview_mp_ant_size\")\r\n self.preview_mp_ant_size.setColumnCount(0)\r\n self.preview_mp_ant_size.setRowCount(0)\r\n self.label_300 = QtWidgets.QLabel(self.list_perf_calc_page_4)\r\n self.label_300.setGeometry(QtCore.QRect(10, 41, 91, 16))\r\n self.label_300.setObjectName(\"label_300\")\r\n self.groupBox_39 = QtWidgets.QGroupBox(self.list_perf_calc_page_4)\r\n self.groupBox_39.setGeometry(QtCore.QRect(8, 199, 801, 261))\r\n font = QtGui.QFont()\r\n font.setPointSize(10)\r\n self.groupBox_39.setFont(font)\r\n self.groupBox_39.setAutoFillBackground(False)\r\n self.groupBox_39.setAlignment(QtCore.Qt.AlignJustify|QtCore.Qt.AlignVCenter)\r\n self.groupBox_39.setObjectName(\"groupBox_39\")\r\n self.label_301 = QtWidgets.QLabel(self.groupBox_39)\r\n self.label_301.setGeometry(QtCore.QRect(20, 27, 55, 16))\r\n self.label_301.setObjectName(\"label_301\")\r\n self.label_302 = QtWidgets.QLabel(self.groupBox_39)\r\n self.label_302.setGeometry(QtCore.QRect(20, 69, 161, 21))\r\n self.label_302.setObjectName(\"label_302\")\r\n self.name_sat_mp_ant_size = QtWidgets.QLineEdit(self.groupBox_39)\r\n self.name_sat_mp_ant_size.setGeometry(QtCore.QRect(100, 27, 511, 22))\r\n self.name_sat_mp_ant_size.setObjectName(\"name_sat_mp_ant_size\")\r\n self.long_sat_mp_ant_size = QtWidgets.QLineEdit(self.groupBox_39)\r\n self.long_sat_mp_ant_size.setGeometry(QtCore.QRect(200, 69, 101, 22))\r\n self.long_sat_mp_ant_size.setObjectName(\"long_sat_mp_ant_size\")\r\n self.save_sat_mp_ant_size = QtWidgets.QPushButton(self.groupBox_39)\r\n self.save_sat_mp_ant_size.setGeometry(QtCore.QRect(659, 222, 111, 28))\r\n font = QtGui.QFont()\r\n font.setPointSize(10)\r\n font.setBold(False)\r\n font.setWeight(50)\r\n self.save_sat_mp_ant_size.setFont(font)\r\n self.save_sat_mp_ant_size.setObjectName(\"save_sat_mp_ant_size\")\r\n self.load_sat_mp_ant_size = QtWidgets.QPushButton(self.groupBox_39)\r\n self.load_sat_mp_ant_size.setGeometry(QtCore.QRect(659, 190, 111, 28))\r\n font = QtGui.QFont()\r\n font.setPointSize(10)\r\n font.setBold(False)\r\n font.setWeight(50)\r\n self.load_sat_mp_ant_size.setFont(font)\r\n self.load_sat_mp_ant_size.setObjectName(\"load_sat_mp_ant_size\")\r\n self.default_sat_mp_ant_size = QtWidgets.QComboBox(self.groupBox_39)\r\n self.default_sat_mp_ant_size.setGeometry(QtCore.QRect(650, 58, 131, 22))\r\n font = QtGui.QFont()\r\n font.setPointSize(8)\r\n self.default_sat_mp_ant_size.setFont(font)\r\n self.default_sat_mp_ant_size.setObjectName(\"default_sat_mp_ant_size\")\r\n self.label_303 = QtWidgets.QLabel(self.groupBox_39)\r\n self.label_303.setGeometry(QtCore.QRect(651, 29, 131, 16))\r\n self.label_303.setObjectName(\"label_303\")\r\n self.label_304 = QtWidgets.QLabel(self.groupBox_39)\r\n self.label_304.setGeometry(QtCore.QRect(330, 70, 111, 16))\r\n self.label_304.setObjectName(\"label_304\")\r\n self.height_sat_mp_ant_size = QtWidgets.QLineEdit(self.groupBox_39)\r\n self.height_sat_mp_ant_size.setGeometry(QtCore.QRect(510, 67, 101, 22))\r\n self.height_sat_mp_ant_size.setObjectName(\"height_sat_mp_ant_size\")\r\n self.label_305 = QtWidgets.QLabel(self.groupBox_39)\r\n self.label_305.setGeometry(QtCore.QRect(330, 108, 121, 16))\r\n self.label_305.setObjectName(\"label_305\")\r\n self.eirp_sat_mp_ant_size = QtWidgets.QLineEdit(self.groupBox_39)\r\n self.eirp_sat_mp_ant_size.setGeometry(QtCore.QRect(510, 104, 101, 22))\r\n self.eirp_sat_mp_ant_size.setObjectName(\"eirp_sat_mp_ant_size\")\r\n self.label_306 = QtWidgets.QLabel(self.groupBox_39)\r\n self.label_306.setGeometry(QtCore.QRect(20, 137, 171, 51))\r\n self.label_306.setLayoutDirection(QtCore.Qt.LeftToRight)\r\n self.label_306.setObjectName(\"label_306\")\r\n self.max_bw_sat_mp_ant_size = QtWidgets.QLineEdit(self.groupBox_39)\r\n self.max_bw_sat_mp_ant_size.setGeometry(QtCore.QRect(200, 147, 101, 22))\r\n self.max_bw_sat_mp_ant_size.setObjectName(\"max_bw_sat_mp_ant_size\")\r\n self.label_307 = QtWidgets.QLabel(self.groupBox_39)\r\n self.label_307.setGeometry(QtCore.QRect(330, 132, 171, 51))\r\n self.label_307.setLayoutDirection(QtCore.Qt.LeftToRight)\r\n self.label_307.setObjectName(\"label_307\")\r\n self.bw_util_sat_mp_ant_size = QtWidgets.QLineEdit(self.groupBox_39)\r\n self.bw_util_sat_mp_ant_size.setGeometry(QtCore.QRect(510, 145, 101, 22))\r\n self.bw_util_sat_mp_ant_size.setObjectName(\"bw_util_sat_mp_ant_size\")\r\n self.label_308 = QtWidgets.QLabel(self.groupBox_39)\r\n self.label_308.setGeometry(QtCore.QRect(20, 191, 61, 16))\r\n self.label_308.setObjectName(\"label_308\")\r\n self.rolloff_sat_mp_ant_size = QtWidgets.QLineEdit(self.groupBox_39)\r\n self.rolloff_sat_mp_ant_size.setGeometry(QtCore.QRect(200, 191, 101, 22))\r\n self.rolloff_sat_mp_ant_size.setObjectName(\"rolloff_sat_mp_ant_size\")\r\n self.label_309 = QtWidgets.QLabel(self.groupBox_39)\r\n self.label_309.setGeometry(QtCore.QRect(330, 191, 91, 16))\r\n self.label_309.setObjectName(\"label_309\")\r\n self.modcod_sat_mp_ant_size = QtWidgets.QComboBox(self.groupBox_39)\r\n self.modcod_sat_mp_ant_size.setGeometry(QtCore.QRect(450, 191, 161, 22))\r\n self.modcod_sat_mp_ant_size.setObjectName(\"modcod_sat_mp_ant_size\")\r\n self.label_310 = QtWidgets.QLabel(self.groupBox_39)\r\n self.label_310.setGeometry(QtCore.QRect(20, 109, 131, 20))\r\n self.label_310.setObjectName(\"label_310\")\r\n self.freq_sat_mp_ant_size = QtWidgets.QLineEdit(self.groupBox_39)\r\n self.freq_sat_mp_ant_size.setGeometry(QtCore.QRect(200, 109, 101, 22))\r\n self.freq_sat_mp_ant_size.setObjectName(\"freq_sat_mp_ant_size\")\r\n self.label_311 = QtWidgets.QLabel(self.groupBox_39)\r\n self.label_311.setGeometry(QtCore.QRect(666, 89, 91, 20))\r\n self.label_311.setObjectName(\"label_311\")\r\n self.pol_sat_mp_ant_size = QtWidgets.QComboBox(self.groupBox_39)\r\n self.pol_sat_mp_ant_size.setGeometry(QtCore.QRect(663, 119, 101, 22))\r\n self.pol_sat_mp_ant_size.setObjectName(\"pol_sat_mp_ant_size\")\r\n self.pol_sat_mp_ant_size.addItem(\"\")\r\n self.pol_sat_mp_ant_size.addItem(\"\")\r\n self.pol_sat_mp_ant_size.addItem(\"\")\r\n self.clear_sat_mp_ant_size = QtWidgets.QPushButton(self.groupBox_39)\r\n self.clear_sat_mp_ant_size.setGeometry(QtCore.QRect(659, 156, 111, 28))\r\n font = QtGui.QFont()\r\n font.setPointSize(10)\r\n font.setBold(False)\r\n font.setWeight(50)\r\n self.clear_sat_mp_ant_size.setFont(font)\r\n self.clear_sat_mp_ant_size.setObjectName(\"clear_sat_mp_ant_size\")\r\n self.groupBox_40 = QtWidgets.QGroupBox(self.list_perf_calc_page_4)\r\n self.groupBox_40.setGeometry(QtCore.QRect(10, 458, 801, 181))\r\n font = QtGui.QFont()\r\n font.setPointSize(10)\r\n self.groupBox_40.setFont(font)\r\n self.groupBox_40.setAutoFillBackground(False)\r\n self.groupBox_40.setAlignment(QtCore.Qt.AlignJustify|QtCore.Qt.AlignVCenter)\r\n self.groupBox_40.setObjectName(\"groupBox_40\")\r\n self.label_312 = QtWidgets.QLabel(self.groupBox_40)\r\n self.label_312.setGeometry(QtCore.QRect(20, 29, 55, 16))\r\n self.label_312.setObjectName(\"label_312\")\r\n self.label_314 = QtWidgets.QLabel(self.groupBox_40)\r\n self.label_314.setGeometry(QtCore.QRect(20, 61, 161, 21))\r\n self.label_314.setObjectName(\"label_314\")\r\n self.name_reception_mp_ant_size = QtWidgets.QLineEdit(self.groupBox_40)\r\n self.name_reception_mp_ant_size.setGeometry(QtCore.QRect(100, 29, 511, 22))\r\n self.name_reception_mp_ant_size.setObjectName(\"name_reception_mp_ant_size\")\r\n self.lnb_gain_reception_mp_ant_size = QtWidgets.QLineEdit(self.groupBox_40)\r\n self.lnb_gain_reception_mp_ant_size.setGeometry(QtCore.QRect(200, 61, 101, 22))\r\n self.lnb_gain_reception_mp_ant_size.setObjectName(\"lnb_gain_reception_mp_ant_size\")\r\n self.save_reception_mp_ant_size = QtWidgets.QPushButton(self.groupBox_40)\r\n self.save_reception_mp_ant_size.setGeometry(QtCore.QRect(661, 131, 111, 28))\r\n font = QtGui.QFont()\r\n font.setPointSize(10)\r\n font.setBold(False)\r\n font.setWeight(50)\r\n self.save_reception_mp_ant_size.setFont(font)\r\n self.save_reception_mp_ant_size.setObjectName(\"save_reception_mp_ant_size\")\r\n self.load_reception_mp_ant_size = QtWidgets.QPushButton(self.groupBox_40)\r\n self.load_reception_mp_ant_size.setGeometry(QtCore.QRect(661, 96, 111, 28))\r\n font = QtGui.QFont()\r\n font.setPointSize(10)\r\n font.setBold(False)\r\n font.setWeight(50)\r\n self.load_reception_mp_ant_size.setFont(font)\r\n self.load_reception_mp_ant_size.setObjectName(\"load_reception_mp_ant_size\")\r\n self.label_315 = QtWidgets.QLabel(self.groupBox_40)\r\n self.label_315.setGeometry(QtCore.QRect(330, 138, 141, 16))\r\n self.label_315.setObjectName(\"label_315\")\r\n self.ant_eff_reception_mp_ant_size = QtWidgets.QLineEdit(self.groupBox_40)\r\n self.ant_eff_reception_mp_ant_size.setGeometry(QtCore.QRect(510, 136, 101, 22))\r\n self.ant_eff_reception_mp_ant_size.setObjectName(\"ant_eff_reception_mp_ant_size\")\r\n self.label_316 = QtWidgets.QLabel(self.groupBox_40)\r\n self.label_316.setGeometry(QtCore.QRect(330, 66, 161, 16))\r\n self.label_316.setScaledContents(False)\r\n self.label_316.setObjectName(\"label_316\")\r\n self.lnb_temp_reception_mp_ant_size = QtWidgets.QLineEdit(self.groupBox_40)\r\n self.lnb_temp_reception_mp_ant_size.setGeometry(QtCore.QRect(510, 63, 101, 22))\r\n self.lnb_temp_reception_mp_ant_size.setObjectName(\"lnb_temp_reception_mp_ant_size\")\r\n self.label_317 = QtWidgets.QLabel(self.groupBox_40)\r\n self.label_317.setGeometry(QtCore.QRect(20, 80, 171, 51))\r\n self.label_317.setLayoutDirection(QtCore.Qt.LeftToRight)\r\n self.label_317.setObjectName(\"label_317\")\r\n self.aditional_losses_reception_mp_ant_size = QtWidgets.QLineEdit(self.groupBox_40)\r\n self.aditional_losses_reception_mp_ant_size.setGeometry(QtCore.QRect(200, 95, 101, 22))\r\n self.aditional_losses_reception_mp_ant_size.setObjectName(\"aditional_losses_reception_mp_ant_size\")\r\n self.label_318 = QtWidgets.QLabel(self.groupBox_40)\r\n self.label_318.setGeometry(QtCore.QRect(20, 118, 171, 51))\r\n self.label_318.setLayoutDirection(QtCore.Qt.LeftToRight)\r\n self.label_318.setObjectName(\"label_318\")\r\n self.max_depoint_reception_mp_ant_size = QtWidgets.QLineEdit(self.groupBox_40)\r\n self.max_depoint_reception_mp_ant_size.setGeometry(QtCore.QRect(200, 138, 101, 22))\r\n self.max_depoint_reception_mp_ant_size.setObjectName(\"max_depoint_reception_mp_ant_size\")\r\n self.clear_reception_mp_ant_size = QtWidgets.QPushButton(self.groupBox_40)\r\n self.clear_reception_mp_ant_size.setGeometry(QtCore.QRect(661, 60, 111, 28))\r\n font = QtGui.QFont()\r\n font.setPointSize(10)\r\n font.setBold(False)\r\n font.setWeight(50)\r\n self.clear_reception_mp_ant_size.setFont(font)\r\n self.clear_reception_mp_ant_size.setObjectName(\"clear_reception_mp_ant_size\")\r\n self.cable_loss_reception_mp_ant_size = QtWidgets.QLineEdit(self.groupBox_40)\r\n self.cable_loss_reception_mp_ant_size.setGeometry(QtCore.QRect(510, 95, 101, 22))\r\n self.cable_loss_reception_mp_ant_size.setObjectName(\"cable_loss_reception_mp_ant_size\")\r\n self.label_319 = QtWidgets.QLabel(self.groupBox_40)\r\n self.label_319.setGeometry(QtCore.QRect(330, 81, 171, 51))\r\n self.label_319.setLayoutDirection(QtCore.Qt.LeftToRight)\r\n self.label_319.setObjectName(\"label_319\")\r\n self.output_mp_ant_size = QtWidgets.QTextEdit(self.list_perf_calc_page_4)\r\n self.output_mp_ant_size.setGeometry(QtCore.QRect(10, 674, 801, 151))\r\n self.output_mp_ant_size.setObjectName(\"output_mp_ant_size\")\r\n self.relaxation_mp_ant_size = QtWidgets.QLineEdit(self.list_perf_calc_page_4)\r\n self.relaxation_mp_ant_size.setGeometry(QtCore.QRect(177, 832, 51, 22))\r\n font = QtGui.QFont()\r\n font.setPointSize(10)\r\n self.relaxation_mp_ant_size.setFont(font)\r\n self.relaxation_mp_ant_size.setToolTip(\"\")\r\n self.relaxation_mp_ant_size.setObjectName(\"relaxation_mp_ant_size\")\r\n self.calc_mp_ant_size = QtWidgets.QPushButton(self.list_perf_calc_page_4)\r\n self.calc_mp_ant_size.setGeometry(QtCore.QRect(670, 830, 141, 28))\r\n font = QtGui.QFont()\r\n font.setPointSize(10)\r\n self.calc_mp_ant_size.setFont(font)\r\n self.calc_mp_ant_size.setObjectName(\"calc_mp_ant_size\")\r\n self.label_320 = QtWidgets.QLabel(self.list_perf_calc_page_4)\r\n self.label_320.setGeometry(QtCore.QRect(10, 833, 161, 21))\r\n font = QtGui.QFont()\r\n font.setPointSize(10)\r\n self.label_320.setFont(font)\r\n self.label_320.setObjectName(\"label_320\")\r\n self.label_321 = QtWidgets.QLabel(self.list_perf_calc_page_4)\r\n self.label_321.setGeometry(QtCore.QRect(420, 831, 61, 20))\r\n font = QtGui.QFont()\r\n font.setPointSize(10)\r\n self.label_321.setFont(font)\r\n self.label_321.setObjectName(\"label_321\")\r\n self.n_threads_mp_ant_size = QtWidgets.QComboBox(self.list_perf_calc_page_4)\r\n self.n_threads_mp_ant_size.setGeometry(QtCore.QRect(490, 831, 73, 22))\r\n self.n_threads_mp_ant_size.setObjectName(\"n_threads_mp_ant_size\")\r\n self.path_mp_ant_size = QtWidgets.QLineEdit(self.list_perf_calc_page_4)\r\n self.path_mp_ant_size.setGeometry(QtCore.QRect(55, 10, 631, 22))\r\n self.path_mp_ant_size.setObjectName(\"path_mp_ant_size\")\r\n self.label_322 = QtWidgets.QLabel(self.list_perf_calc_page_4)\r\n self.label_322.setGeometry(QtCore.QRect(246, 832, 91, 21))\r\n font = QtGui.QFont()\r\n font.setPointSize(10)\r\n self.label_322.setFont(font)\r\n self.label_322.setObjectName(\"label_322\")\r\n self.margin_mp_ant_size = QtWidgets.QLineEdit(self.list_perf_calc_page_4)\r\n self.margin_mp_ant_size.setGeometry(QtCore.QRect(346, 832, 61, 22))\r\n font = QtGui.QFont()\r\n font.setPointSize(10)\r\n self.margin_mp_ant_size.setFont(font)\r\n self.margin_mp_ant_size.setToolTip(\"\")\r\n self.margin_mp_ant_size.setObjectName(\"margin_mp_ant_size\")\r\n self.label_324 = QtWidgets.QLabel(self.list_perf_calc_page_4)\r\n self.label_324.setGeometry(QtCore.QRect(15, 644, 141, 21))\r\n self.label_324.setObjectName(\"label_324\")\r\n self.availability_target_mp_ant_size = QtWidgets.QLineEdit(self.list_perf_calc_page_4)\r\n self.availability_target_mp_ant_size.setGeometry(QtCore.QRect(148, 643, 101, 22))\r\n self.availability_target_mp_ant_size.setObjectName(\"availability_target_mp_ant_size\")\r\n self.stackedWidget_3.addWidget(self.list_perf_calc_page_4)\r\n self.stackedWidget.addWidget(self.list_ant_size_calc)\r\n self.single_point_atm_calc_page = QtWidgets.QWidget()\r\n self.single_point_atm_calc_page.setObjectName(\"single_point_atm_calc_page\")\r\n self.groupBox_7 = QtWidgets.QGroupBox(self.single_point_atm_calc_page)\r\n self.groupBox_7.setGeometry(QtCore.QRect(20, 20, 781, 131))\r\n font = QtGui.QFont()\r\n font.setPointSize(10)\r\n self.groupBox_7.setFont(font)\r\n self.groupBox_7.setObjectName(\"groupBox_7\")\r\n self.lat_ground_station_spatm = QtWidgets.QLineEdit(self.groupBox_7)\r\n self.lat_ground_station_spatm.setGeometry(QtCore.QRect(200, 30, 101, 22))\r\n self.lat_ground_station_spatm.setObjectName(\"lat_ground_station_spatm\")\r\n self.long_ground_station_spatm = QtWidgets.QLineEdit(self.groupBox_7)\r\n self.long_ground_station_spatm.setGeometry(QtCore.QRect(500, 30, 101, 22))\r\n self.long_ground_station_spatm.setObjectName(\"long_ground_station_spatm\")\r\n self.label_51 = QtWidgets.QLabel(self.groupBox_7)\r\n self.label_51.setGeometry(QtCore.QRect(20, 23, 151, 31))\r\n self.label_51.setObjectName(\"label_51\")\r\n self.label_50 = QtWidgets.QLabel(self.groupBox_7)\r\n self.label_50.setGeometry(QtCore.QRect(319, 28, 161, 21))\r\n self.label_50.setObjectName(\"label_50\")\r\n self.label_53 = QtWidgets.QLabel(self.groupBox_7)\r\n self.label_53.setGeometry(QtCore.QRect(20, 60, 161, 21))\r\n self.label_53.setObjectName(\"label_53\")\r\n self.ant_size_reception_spatm = QtWidgets.QLineEdit(self.groupBox_7)\r\n self.ant_size_reception_spatm.setGeometry(QtCore.QRect(200, 60, 101, 22))\r\n self.ant_size_reception_spatm.setObjectName(\"ant_size_reception_spatm\")\r\n self.ant_eff_reception_spatm = QtWidgets.QLineEdit(self.groupBox_7)\r\n self.ant_eff_reception_spatm.setGeometry(QtCore.QRect(500, 60, 101, 22))\r\n self.ant_eff_reception_spatm.setObjectName(\"ant_eff_reception_spatm\")\r\n self.label_68 = QtWidgets.QLabel(self.groupBox_7)\r\n self.label_68.setGeometry(QtCore.QRect(320, 60, 141, 16))\r\n self.label_68.setObjectName(\"label_68\")\r\n self.clear_reception_spatm = QtWidgets.QPushButton(self.groupBox_7)\r\n self.clear_reception_spatm.setGeometry(QtCore.QRect(660, 30, 93, 28))\r\n font = QtGui.QFont()\r\n font.setPointSize(10)\r\n font.setBold(False)\r\n font.setWeight(50)\r\n self.clear_reception_spatm.setFont(font)\r\n self.clear_reception_spatm.setObjectName(\"clear_reception_spatm\")\r\n self.load_ground_station_spatm = QtWidgets.QPushButton(self.groupBox_7)\r\n self.load_ground_station_spatm.setGeometry(QtCore.QRect(15, 91, 201, 28))\r\n font = QtGui.QFont()\r\n font.setPointSize(10)\r\n font.setBold(False)\r\n font.setWeight(50)\r\n self.load_ground_station_spatm.setFont(font)\r\n self.load_ground_station_spatm.setObjectName(\"load_ground_station_spatm\")\r\n self.load_reception_spatm = QtWidgets.QPushButton(self.groupBox_7)\r\n self.load_reception_spatm.setGeometry(QtCore.QRect(230, 90, 201, 28))\r\n font = QtGui.QFont()\r\n font.setPointSize(10)\r\n font.setBold(False)\r\n font.setWeight(50)\r\n self.load_reception_spatm.setFont(font)\r\n self.load_reception_spatm.setObjectName(\"load_reception_spatm\")\r\n self.calc_spatm = QtWidgets.QPushButton(self.single_point_atm_calc_page)\r\n self.calc_spatm.setGeometry(QtCore.QRect(666, 830, 141, 28))\r\n font = QtGui.QFont()\r\n font.setPointSize(10)\r\n self.calc_spatm.setFont(font)\r\n self.calc_spatm.setObjectName(\"calc_spatm\")\r\n self.output_spatm = QtWidgets.QTextEdit(self.single_point_atm_calc_page)\r\n self.output_spatm.setGeometry(QtCore.QRect(6, 650, 801, 171))\r\n font = QtGui.QFont()\r\n font.setPointSize(10)\r\n self.output_spatm.setFont(font)\r\n self.output_spatm.setObjectName(\"output_spatm\")\r\n self.groupBox_8 = QtWidgets.QGroupBox(self.single_point_atm_calc_page)\r\n self.groupBox_8.setGeometry(QtCore.QRect(20, 150, 781, 131))\r\n font = QtGui.QFont()\r\n font.setPointSize(10)\r\n self.groupBox_8.setFont(font)\r\n self.groupBox_8.setObjectName(\"groupBox_8\")\r\n self.label_52 = QtWidgets.QLabel(self.groupBox_8)\r\n self.label_52.setGeometry(QtCore.QRect(20, 60, 131, 16))\r\n self.label_52.setObjectName(\"label_52\")\r\n self.label_57 = QtWidgets.QLabel(self.groupBox_8)\r\n self.label_57.setGeometry(QtCore.QRect(20, 30, 161, 21))\r\n self.label_57.setObjectName(\"label_57\")\r\n self.long_sat_spatm = QtWidgets.QLineEdit(self.groupBox_8)\r\n self.long_sat_spatm.setGeometry(QtCore.QRect(200, 30, 101, 22))\r\n self.long_sat_spatm.setObjectName(\"long_sat_spatm\")\r\n self.freq_sat_spatm = QtWidgets.QLineEdit(self.groupBox_8)\r\n self.freq_sat_spatm.setGeometry(QtCore.QRect(200, 60, 101, 22))\r\n self.freq_sat_spatm.setObjectName(\"freq_sat_spatm\")\r\n self.default_sat_spatm = QtWidgets.QComboBox(self.groupBox_8)\r\n self.default_sat_spatm.setGeometry(QtCore.QRect(634, 44, 131, 22))\r\n font = QtGui.QFont()\r\n font.setPointSize(8)\r\n self.default_sat_spatm.setFont(font)\r\n self.default_sat_spatm.setObjectName(\"default_sat_spatm\")\r\n self.label_63 = QtWidgets.QLabel(self.groupBox_8)\r\n self.label_63.setGeometry(QtCore.QRect(636, 14, 131, 16))\r\n self.label_63.setObjectName(\"label_63\")\r\n self.pol_sat_spatm = QtWidgets.QComboBox(self.groupBox_8)\r\n self.pol_sat_spatm.setGeometry(QtCore.QRect(450, 30, 101, 22))\r\n self.pol_sat_spatm.setObjectName(\"pol_sat_spatm\")\r\n self.pol_sat_spatm.addItem(\"\")\r\n self.pol_sat_spatm.addItem(\"\")\r\n self.pol_sat_spatm.addItem(\"\")\r\n self.label_8 = QtWidgets.QLabel(self.groupBox_8)\r\n self.label_8.setGeometry(QtCore.QRect(350, 30, 91, 20))\r\n self.label_8.setObjectName(\"label_8\")\r\n self.load_sat_spatm = QtWidgets.QPushButton(self.groupBox_8)\r\n self.load_sat_spatm.setGeometry(QtCore.QRect(10, 90, 201, 28))\r\n font = QtGui.QFont()\r\n font.setPointSize(10)\r\n font.setBold(False)\r\n font.setWeight(50)\r\n self.load_sat_spatm.setFont(font)\r\n self.load_sat_spatm.setObjectName(\"load_sat_spatm\")\r\n self.p_year_spatm = QtWidgets.QLineEdit(self.single_point_atm_calc_page)\r\n self.p_year_spatm.setGeometry(QtCore.QRect(550, 834, 101, 21))\r\n font = QtGui.QFont()\r\n font.setPointSize(10)\r\n self.p_year_spatm.setFont(font)\r\n self.p_year_spatm.setToolTip(\"\")\r\n self.p_year_spatm.setObjectName(\"p_year_spatm\")\r\n self.label_58 = QtWidgets.QLabel(self.single_point_atm_calc_page)\r\n self.label_58.setGeometry(QtCore.QRect(340, 834, 201, 20))\r\n font = QtGui.QFont()\r\n font.setPointSize(10)\r\n self.label_58.setFont(font)\r\n self.label_58.setObjectName(\"label_58\")\r\n self.label_9 = QtWidgets.QLabel(self.single_point_atm_calc_page)\r\n self.label_9.setGeometry(QtCore.QRect(10, 837, 71, 16))\r\n font = QtGui.QFont()\r\n font.setPointSize(10)\r\n self.label_9.setFont(font)\r\n self.label_9.setObjectName(\"label_9\")\r\n self.method_spatm = QtWidgets.QComboBox(self.single_point_atm_calc_page)\r\n self.method_spatm.setGeometry(QtCore.QRect(84, 835, 101, 21))\r\n font = QtGui.QFont()\r\n font.setPointSize(10)\r\n self.method_spatm.setFont(font)\r\n self.method_spatm.setObjectName(\"method_spatm\")\r\n self.method_spatm.addItem(\"\")\r\n self.method_spatm.addItem(\"\")\r\n self.stackedWidget.addWidget(self.single_point_atm_calc_page)\r\n self.single_point_perf_calc_page = QtWidgets.QWidget()\r\n self.single_point_perf_calc_page.setObjectName(\"single_point_perf_calc_page\")\r\n self.groupBox_4 = QtWidgets.QGroupBox(self.single_point_perf_calc_page)\r\n self.groupBox_4.setGeometry(QtCore.QRect(10, 10, 801, 121))\r\n font = QtGui.QFont()\r\n font.setPointSize(10)\r\n self.groupBox_4.setFont(font)\r\n self.groupBox_4.setAutoFillBackground(False)\r\n self.groupBox_4.setAlignment(QtCore.Qt.AlignJustify|QtCore.Qt.AlignVCenter)\r\n self.groupBox_4.setObjectName(\"groupBox_4\")\r\n self.label_27 = QtWidgets.QLabel(self.groupBox_4)\r\n self.label_27.setGeometry(QtCore.QRect(20, 36, 55, 16))\r\n self.label_27.setObjectName(\"label_27\")\r\n self.label_28 = QtWidgets.QLabel(self.groupBox_4)\r\n self.label_28.setGeometry(QtCore.QRect(20, 66, 151, 31))\r\n self.label_28.setObjectName(\"label_28\")\r\n self.label_29 = QtWidgets.QLabel(self.groupBox_4)\r\n self.label_29.setGeometry(QtCore.QRect(330, 78, 161, 21))\r\n self.label_29.setObjectName(\"label_29\")\r\n self.name_ground_station_sp_perf = QtWidgets.QLineEdit(self.groupBox_4)\r\n self.name_ground_station_sp_perf.setGeometry(QtCore.QRect(100, 36, 511, 22))\r\n self.name_ground_station_sp_perf.setObjectName(\"name_ground_station_sp_perf\")\r\n self.long_ground_station_sp_perf = QtWidgets.QLineEdit(self.groupBox_4)\r\n self.long_ground_station_sp_perf.setGeometry(QtCore.QRect(510, 80, 101, 22))\r\n self.long_ground_station_sp_perf.setObjectName(\"long_ground_station_sp_perf\")\r\n self.lat_ground_station_sp_perf = QtWidgets.QLineEdit(self.groupBox_4)\r\n self.lat_ground_station_sp_perf.setGeometry(QtCore.QRect(200, 76, 101, 22))\r\n self.lat_ground_station_sp_perf.setObjectName(\"lat_ground_station_sp_perf\")\r\n self.save_ground_station_sp_perf = QtWidgets.QPushButton(self.groupBox_4)\r\n self.save_ground_station_sp_perf.setGeometry(QtCore.QRect(652, 80, 111, 28))\r\n font = QtGui.QFont()\r\n font.setPointSize(10)\r\n font.setBold(False)\r\n font.setWeight(50)\r\n self.save_ground_station_sp_perf.setFont(font)\r\n self.save_ground_station_sp_perf.setObjectName(\"save_ground_station_sp_perf\")\r\n self.load_ground_station_sp_perf = QtWidgets.QPushButton(self.groupBox_4)\r\n self.load_ground_station_sp_perf.setGeometry(QtCore.QRect(652, 48, 111, 28))\r\n font = QtGui.QFont()\r\n font.setPointSize(10)\r\n font.setBold(False)\r\n font.setWeight(50)\r\n self.load_ground_station_sp_perf.setFont(font)\r\n self.load_ground_station_sp_perf.setObjectName(\"load_ground_station_sp_perf\")\r\n self.clear_ground_station_sp_perf = QtWidgets.QPushButton(self.groupBox_4)\r\n self.clear_ground_station_sp_perf.setGeometry(QtCore.QRect(652, 16, 111, 28))\r\n font = QtGui.QFont()\r\n font.setPointSize(10)\r\n font.setBold(False)\r\n font.setWeight(50)\r\n self.clear_ground_station_sp_perf.setFont(font)\r\n self.clear_ground_station_sp_perf.setObjectName(\"clear_ground_station_sp_perf\")\r\n self.groupBox_5 = QtWidgets.QGroupBox(self.single_point_perf_calc_page)\r\n self.groupBox_5.setGeometry(QtCore.QRect(10, 129, 801, 271))\r\n font = QtGui.QFont()\r\n font.setPointSize(10)\r\n self.groupBox_5.setFont(font)\r\n self.groupBox_5.setAutoFillBackground(False)\r\n self.groupBox_5.setAlignment(QtCore.Qt.AlignJustify|QtCore.Qt.AlignVCenter)\r\n self.groupBox_5.setObjectName(\"groupBox_5\")\r\n self.label_30 = QtWidgets.QLabel(self.groupBox_5)\r\n self.label_30.setGeometry(QtCore.QRect(20, 29, 55, 16))\r\n self.label_30.setObjectName(\"label_30\")\r\n self.label_32 = QtWidgets.QLabel(self.groupBox_5)\r\n self.label_32.setGeometry(QtCore.QRect(20, 71, 161, 21))\r\n self.label_32.setObjectName(\"label_32\")\r\n self.name_sat_sp_perf = QtWidgets.QLineEdit(self.groupBox_5)\r\n self.name_sat_sp_perf.setGeometry(QtCore.QRect(100, 29, 511, 22))\r\n self.name_sat_sp_perf.setObjectName(\"name_sat_sp_perf\")\r\n self.long_sat_sp_perf = QtWidgets.QLineEdit(self.groupBox_5)\r\n self.long_sat_sp_perf.setGeometry(QtCore.QRect(200, 71, 101, 22))\r\n self.long_sat_sp_perf.setObjectName(\"long_sat_sp_perf\")\r\n self.save_sat_sp_perf = QtWidgets.QPushButton(self.groupBox_5)\r\n self.save_sat_sp_perf.setGeometry(QtCore.QRect(653, 231, 111, 28))\r\n font = QtGui.QFont()\r\n font.setPointSize(10)\r\n font.setBold(False)\r\n font.setWeight(50)\r\n self.save_sat_sp_perf.setFont(font)\r\n self.save_sat_sp_perf.setObjectName(\"save_sat_sp_perf\")\r\n self.load_sat_sp_perf = QtWidgets.QPushButton(self.groupBox_5)\r\n self.load_sat_sp_perf.setGeometry(QtCore.QRect(653, 199, 111, 28))\r\n font = QtGui.QFont()\r\n font.setPointSize(10)\r\n font.setBold(False)\r\n font.setWeight(50)\r\n self.load_sat_sp_perf.setFont(font)\r\n self.load_sat_sp_perf.setObjectName(\"load_sat_sp_perf\")\r\n self.default_sat_sp_perf = QtWidgets.QComboBox(self.groupBox_5)\r\n self.default_sat_sp_perf.setGeometry(QtCore.QRect(650, 61, 131, 22))\r\n font = QtGui.QFont()\r\n font.setPointSize(8)\r\n self.default_sat_sp_perf.setFont(font)\r\n self.default_sat_sp_perf.setObjectName(\"default_sat_sp_perf\")\r\n self.label_33 = QtWidgets.QLabel(self.groupBox_5)\r\n self.label_33.setGeometry(QtCore.QRect(648, 31, 131, 16))\r\n self.label_33.setObjectName(\"label_33\")\r\n self.label_34 = QtWidgets.QLabel(self.groupBox_5)\r\n self.label_34.setGeometry(QtCore.QRect(330, 69, 111, 16))\r\n self.label_34.setObjectName(\"label_34\")\r\n self.height_sat_sp_perf = QtWidgets.QLineEdit(self.groupBox_5)\r\n self.height_sat_sp_perf.setGeometry(QtCore.QRect(510, 69, 101, 22))\r\n self.height_sat_sp_perf.setObjectName(\"height_sat_sp_perf\")\r\n self.label_35 = QtWidgets.QLabel(self.groupBox_5)\r\n self.label_35.setGeometry(QtCore.QRect(330, 110, 121, 16))\r\n self.label_35.setObjectName(\"label_35\")\r\n self.eirp_sat_sp_perf = QtWidgets.QLineEdit(self.groupBox_5)\r\n self.eirp_sat_sp_perf.setGeometry(QtCore.QRect(510, 106, 101, 22))\r\n self.eirp_sat_sp_perf.setObjectName(\"eirp_sat_sp_perf\")\r\n self.label_36 = QtWidgets.QLabel(self.groupBox_5)\r\n self.label_36.setGeometry(QtCore.QRect(20, 139, 171, 51))\r\n self.label_36.setLayoutDirection(QtCore.Qt.LeftToRight)\r\n self.label_36.setObjectName(\"label_36\")\r\n self.max_bw_sat_sp_perf = QtWidgets.QLineEdit(self.groupBox_5)\r\n self.max_bw_sat_sp_perf.setGeometry(QtCore.QRect(200, 149, 101, 22))\r\n self.max_bw_sat_sp_perf.setObjectName(\"max_bw_sat_sp_perf\")\r\n self.label_37 = QtWidgets.QLabel(self.groupBox_5)\r\n self.label_37.setGeometry(QtCore.QRect(330, 134, 171, 51))\r\n self.label_37.setLayoutDirection(QtCore.Qt.LeftToRight)\r\n self.label_37.setObjectName(\"label_37\")\r\n self.bw_util_sat_sp_perf = QtWidgets.QLineEdit(self.groupBox_5)\r\n self.bw_util_sat_sp_perf.setGeometry(QtCore.QRect(510, 147, 101, 22))\r\n self.bw_util_sat_sp_perf.setObjectName(\"bw_util_sat_sp_perf\")\r\n self.label_40 = QtWidgets.QLabel(self.groupBox_5)\r\n self.label_40.setGeometry(QtCore.QRect(20, 191, 61, 16))\r\n self.label_40.setObjectName(\"label_40\")\r\n self.rolloff_sat_sp_perf = QtWidgets.QLineEdit(self.groupBox_5)\r\n self.rolloff_sat_sp_perf.setGeometry(QtCore.QRect(200, 190, 101, 22))\r\n self.rolloff_sat_sp_perf.setObjectName(\"rolloff_sat_sp_perf\")\r\n self.label_41 = QtWidgets.QLabel(self.groupBox_5)\r\n self.label_41.setGeometry(QtCore.QRect(330, 192, 91, 16))\r\n self.label_41.setObjectName(\"label_41\")\r\n self.modcod_sat_sp_perf = QtWidgets.QComboBox(self.groupBox_5)\r\n self.modcod_sat_sp_perf.setGeometry(QtCore.QRect(450, 190, 161, 22))\r\n self.modcod_sat_sp_perf.setObjectName(\"modcod_sat_sp_perf\")\r\n self.label_42 = QtWidgets.QLabel(self.groupBox_5)\r\n self.label_42.setGeometry(QtCore.QRect(20, 111, 131, 20))\r\n self.label_42.setObjectName(\"label_42\")\r\n self.freq_sat_sp_perf = QtWidgets.QLineEdit(self.groupBox_5)\r\n self.freq_sat_sp_perf.setGeometry(QtCore.QRect(200, 111, 101, 22))\r\n self.freq_sat_sp_perf.setObjectName(\"freq_sat_sp_perf\")\r\n self.label_6 = QtWidgets.QLabel(self.groupBox_5)\r\n self.label_6.setGeometry(QtCore.QRect(665, 97, 91, 20))\r\n self.label_6.setObjectName(\"label_6\")\r\n self.pol_sat_sp_perf = QtWidgets.QComboBox(self.groupBox_5)\r\n self.pol_sat_sp_perf.setGeometry(QtCore.QRect(660, 121, 101, 22))\r\n self.pol_sat_sp_perf.setObjectName(\"pol_sat_sp_perf\")\r\n self.pol_sat_sp_perf.addItem(\"\")\r\n self.pol_sat_sp_perf.addItem(\"\")\r\n self.pol_sat_sp_perf.addItem(\"\")\r\n self.clear_sat_sp_perf = QtWidgets.QPushButton(self.groupBox_5)\r\n self.clear_sat_sp_perf.setGeometry(QtCore.QRect(653, 169, 111, 28))\r\n font = QtGui.QFont()\r\n font.setPointSize(10)\r\n font.setBold(False)\r\n font.setWeight(50)\r\n self.clear_sat_sp_perf.setFont(font)\r\n self.clear_sat_sp_perf.setObjectName(\"clear_sat_sp_perf\")\r\n self.groupBox_6 = QtWidgets.QGroupBox(self.single_point_perf_calc_page)\r\n self.groupBox_6.setGeometry(QtCore.QRect(10, 400, 801, 231))\r\n font = QtGui.QFont()\r\n font.setPointSize(10)\r\n self.groupBox_6.setFont(font)\r\n self.groupBox_6.setAutoFillBackground(False)\r\n self.groupBox_6.setAlignment(QtCore.Qt.AlignJustify|QtCore.Qt.AlignVCenter)\r\n self.groupBox_6.setObjectName(\"groupBox_6\")\r\n self.label_43 = QtWidgets.QLabel(self.groupBox_6)\r\n self.label_43.setGeometry(QtCore.QRect(20, 38, 55, 16))\r\n self.label_43.setObjectName(\"label_43\")\r\n self.label_44 = QtWidgets.QLabel(self.groupBox_6)\r\n self.label_44.setGeometry(QtCore.QRect(20, 68, 151, 31))\r\n self.label_44.setObjectName(\"label_44\")\r\n self.label_45 = QtWidgets.QLabel(self.groupBox_6)\r\n self.label_45.setGeometry(QtCore.QRect(20, 113, 161, 21))\r\n self.label_45.setObjectName(\"label_45\")\r\n self.name_reception_sp_perf = QtWidgets.QLineEdit(self.groupBox_6)\r\n self.name_reception_sp_perf.setGeometry(QtCore.QRect(100, 38, 511, 22))\r\n self.name_reception_sp_perf.setObjectName(\"name_reception_sp_perf\")\r\n self.lnb_gain_reception_sp_perf = QtWidgets.QLineEdit(self.groupBox_6)\r\n self.lnb_gain_reception_sp_perf.setGeometry(QtCore.QRect(200, 113, 101, 22))\r\n self.lnb_gain_reception_sp_perf.setObjectName(\"lnb_gain_reception_sp_perf\")\r\n self.ant_size_reception_sp_perf = QtWidgets.QLineEdit(self.groupBox_6)\r\n self.ant_size_reception_sp_perf.setGeometry(QtCore.QRect(200, 74, 101, 22))\r\n self.ant_size_reception_sp_perf.setObjectName(\"ant_size_reception_sp_perf\")\r\n self.save_reception_sp_perf = QtWidgets.QPushButton(self.groupBox_6)\r\n self.save_reception_sp_perf.setGeometry(QtCore.QRect(654, 189, 111, 28))\r\n font = QtGui.QFont()\r\n font.setPointSize(10)\r\n font.setBold(False)\r\n font.setWeight(50)\r\n self.save_reception_sp_perf.setFont(font)\r\n self.save_reception_sp_perf.setObjectName(\"save_reception_sp_perf\")\r\n self.load_reception_sp_perf = QtWidgets.QPushButton(self.groupBox_6)\r\n self.load_reception_sp_perf.setGeometry(QtCore.QRect(654, 155, 111, 28))\r\n font = QtGui.QFont()\r\n font.setPointSize(10)\r\n font.setBold(False)\r\n font.setWeight(50)\r\n self.load_reception_sp_perf.setFont(font)\r\n self.load_reception_sp_perf.setObjectName(\"load_reception_sp_perf\")\r\n self.label_46 = QtWidgets.QLabel(self.groupBox_6)\r\n self.label_46.setGeometry(QtCore.QRect(330, 78, 141, 16))\r\n self.label_46.setObjectName(\"label_46\")\r\n self.ant_eff_reception_sp_perf = QtWidgets.QLineEdit(self.groupBox_6)\r\n self.ant_eff_reception_sp_perf.setGeometry(QtCore.QRect(510, 78, 101, 22))\r\n self.ant_eff_reception_sp_perf.setObjectName(\"ant_eff_reception_sp_perf\")\r\n self.label_47 = QtWidgets.QLabel(self.groupBox_6)\r\n self.label_47.setGeometry(QtCore.QRect(330, 118, 161, 16))\r\n self.label_47.setObjectName(\"label_47\")\r\n self.lnb_temp_reception_sp_perf = QtWidgets.QLineEdit(self.groupBox_6)\r\n self.lnb_temp_reception_sp_perf.setGeometry(QtCore.QRect(510, 115, 101, 22))\r\n self.lnb_temp_reception_sp_perf.setObjectName(\"lnb_temp_reception_sp_perf\")\r\n self.label_48 = QtWidgets.QLabel(self.groupBox_6)\r\n self.label_48.setGeometry(QtCore.QRect(17, 139, 171, 51))\r\n self.label_48.setLayoutDirection(QtCore.Qt.LeftToRight)\r\n self.label_48.setObjectName(\"label_48\")\r\n self.aditional_losses_reception_sp_perf = QtWidgets.QLineEdit(self.groupBox_6)\r\n self.aditional_losses_reception_sp_perf.setGeometry(QtCore.QRect(200, 153, 101, 22))\r\n self.aditional_losses_reception_sp_perf.setObjectName(\"aditional_losses_reception_sp_perf\")\r\n self.label_49 = QtWidgets.QLabel(self.groupBox_6)\r\n self.label_49.setGeometry(QtCore.QRect(19, 179, 171, 51))\r\n self.label_49.setLayoutDirection(QtCore.Qt.LeftToRight)\r\n self.label_49.setObjectName(\"label_49\")\r\n self.max_depoint_reception_sp_perf = QtWidgets.QLineEdit(self.groupBox_6)\r\n self.max_depoint_reception_sp_perf.setGeometry(QtCore.QRect(200, 192, 101, 22))\r\n self.max_depoint_reception_sp_perf.setObjectName(\"max_depoint_reception_sp_perf\")\r\n self.clear_reception_sp_perf = QtWidgets.QPushButton(self.groupBox_6)\r\n self.clear_reception_sp_perf.setGeometry(QtCore.QRect(654, 123, 111, 28))\r\n font = QtGui.QFont()\r\n font.setPointSize(10)\r\n font.setBold(False)\r\n font.setWeight(50)\r\n self.clear_reception_sp_perf.setFont(font)\r\n self.clear_reception_sp_perf.setObjectName(\"clear_reception_sp_perf\")\r\n self.cable_loss_reception_sp_perf = QtWidgets.QLineEdit(self.groupBox_6)\r\n self.cable_loss_reception_sp_perf.setGeometry(QtCore.QRect(509, 154, 101, 22))\r\n self.cable_loss_reception_sp_perf.setObjectName(\"cable_loss_reception_sp_perf\")\r\n self.label_67 = QtWidgets.QLabel(self.groupBox_6)\r\n self.label_67.setGeometry(QtCore.QRect(330, 140, 171, 51))\r\n self.label_67.setLayoutDirection(QtCore.Qt.LeftToRight)\r\n self.label_67.setObjectName(\"label_67\")\r\n self.calc_sp_perf = QtWidgets.QPushButton(self.single_point_perf_calc_page)\r\n self.calc_sp_perf.setGeometry(QtCore.QRect(670, 830, 141, 28))\r\n font = QtGui.QFont()\r\n font.setPointSize(10)\r\n self.calc_sp_perf.setFont(font)\r\n self.calc_sp_perf.setObjectName(\"calc_sp_perf\")\r\n self.output_sp_perf = QtWidgets.QTextEdit(self.single_point_perf_calc_page)\r\n self.output_sp_perf.setGeometry(QtCore.QRect(10, 638, 801, 181))\r\n font = QtGui.QFont()\r\n font.setPointSize(10)\r\n self.output_sp_perf.setFont(font)\r\n self.output_sp_perf.setObjectName(\"output_sp_perf\")\r\n self.label_4 = QtWidgets.QLabel(self.single_point_perf_calc_page)\r\n self.label_4.setGeometry(QtCore.QRect(10, 830, 161, 21))\r\n font = QtGui.QFont()\r\n font.setPointSize(10)\r\n self.label_4.setFont(font)\r\n self.label_4.setObjectName(\"label_4\")\r\n self.relaxation_sp_perf = QtWidgets.QLineEdit(self.single_point_perf_calc_page)\r\n self.relaxation_sp_perf.setGeometry(QtCore.QRect(175, 830, 51, 22))\r\n font = QtGui.QFont()\r\n font.setPointSize(10)\r\n self.relaxation_sp_perf.setFont(font)\r\n self.relaxation_sp_perf.setToolTip(\"\")\r\n self.relaxation_sp_perf.setObjectName(\"relaxation_sp_perf\")\r\n self.margin_sp_perf = QtWidgets.QLineEdit(self.single_point_perf_calc_page)\r\n self.margin_sp_perf.setGeometry(QtCore.QRect(340, 830, 61, 22))\r\n font = QtGui.QFont()\r\n font.setPointSize(10)\r\n self.margin_sp_perf.setFont(font)\r\n self.margin_sp_perf.setToolTip(\"\")\r\n self.margin_sp_perf.setObjectName(\"margin_sp_perf\")\r\n self.label_173 = QtWidgets.QLabel(self.single_point_perf_calc_page)\r\n self.label_173.setGeometry(QtCore.QRect(240, 830, 91, 21))\r\n font = QtGui.QFont()\r\n font.setPointSize(10)\r\n self.label_173.setFont(font)\r\n self.label_173.setObjectName(\"label_173\")\r\n self.stackedWidget.addWidget(self.single_point_perf_calc_page)\r\n self.list_perf_calc_page = QtWidgets.QWidget()\r\n self.list_perf_calc_page.setObjectName(\"list_perf_calc_page\")\r\n self.browse_path_mp_perf = QtWidgets.QPushButton(self.list_perf_calc_page)\r\n self.browse_path_mp_perf.setGeometry(QtCore.QRect(700, 11, 93, 21))\r\n self.browse_path_mp_perf.setObjectName(\"browse_path_mp_perf\")\r\n self.label_10 = QtWidgets.QLabel(self.list_perf_calc_page)\r\n self.label_10.setGeometry(QtCore.QRect(6, 13, 55, 16))\r\n font = QtGui.QFont()\r\n font.setPointSize(10)\r\n self.label_10.setFont(font)\r\n self.label_10.setObjectName(\"label_10\")\r\n self.preview_mp_perf = QtWidgets.QTableWidget(self.list_perf_calc_page)\r\n self.preview_mp_perf.setGeometry(QtCore.QRect(10, 61, 801, 131))\r\n self.preview_mp_perf.setObjectName(\"preview_mp_perf\")\r\n self.preview_mp_perf.setColumnCount(0)\r\n self.preview_mp_perf.setRowCount(0)\r\n self.label_11 = QtWidgets.QLabel(self.list_perf_calc_page)\r\n self.label_11.setGeometry(QtCore.QRect(10, 41, 91, 16))\r\n self.label_11.setObjectName(\"label_11\")\r\n self.groupBox_10 = QtWidgets.QGroupBox(self.list_perf_calc_page)\r\n self.groupBox_10.setGeometry(QtCore.QRect(8, 199, 801, 261))\r\n font = QtGui.QFont()\r\n font.setPointSize(10)\r\n self.groupBox_10.setFont(font)\r\n self.groupBox_10.setAutoFillBackground(False)\r\n self.groupBox_10.setAlignment(QtCore.Qt.AlignJustify|QtCore.Qt.AlignVCenter)\r\n self.groupBox_10.setObjectName(\"groupBox_10\")\r\n self.label_71 = QtWidgets.QLabel(self.groupBox_10)\r\n self.label_71.setGeometry(QtCore.QRect(20, 27, 55, 16))\r\n self.label_71.setObjectName(\"label_71\")\r\n self.label_72 = QtWidgets.QLabel(self.groupBox_10)\r\n self.label_72.setGeometry(QtCore.QRect(20, 69, 161, 21))\r\n self.label_72.setObjectName(\"label_72\")\r\n self.name_sat_mp_perf = QtWidgets.QLineEdit(self.groupBox_10)\r\n self.name_sat_mp_perf.setGeometry(QtCore.QRect(100, 27, 511, 22))\r\n self.name_sat_mp_perf.setObjectName(\"name_sat_mp_perf\")\r\n self.long_sat_mp_perf = QtWidgets.QLineEdit(self.groupBox_10)\r\n self.long_sat_mp_perf.setGeometry(QtCore.QRect(200, 69, 101, 22))\r\n self.long_sat_mp_perf.setObjectName(\"long_sat_mp_perf\")\r\n self.save_sat_mp_perf = QtWidgets.QPushButton(self.groupBox_10)\r\n self.save_sat_mp_perf.setGeometry(QtCore.QRect(658, 222, 111, 28))\r\n font = QtGui.QFont()\r\n font.setPointSize(10)\r\n font.setBold(False)\r\n font.setWeight(50)\r\n self.save_sat_mp_perf.setFont(font)\r\n self.save_sat_mp_perf.setObjectName(\"save_sat_mp_perf\")\r\n self.load_sat_mp_perf = QtWidgets.QPushButton(self.groupBox_10)\r\n self.load_sat_mp_perf.setGeometry(QtCore.QRect(658, 190, 111, 28))\r\n font = QtGui.QFont()\r\n font.setPointSize(10)\r\n font.setBold(False)\r\n font.setWeight(50)\r\n self.load_sat_mp_perf.setFont(font)\r\n self.load_sat_mp_perf.setObjectName(\"load_sat_mp_perf\")\r\n self.default_sat_mp_perf = QtWidgets.QComboBox(self.groupBox_10)\r\n self.default_sat_mp_perf.setGeometry(QtCore.QRect(650, 58, 131, 22))\r\n font = QtGui.QFont()\r\n font.setPointSize(8)\r\n self.default_sat_mp_perf.setFont(font)\r\n self.default_sat_mp_perf.setObjectName(\"default_sat_mp_perf\")\r\n self.label_73 = QtWidgets.QLabel(self.groupBox_10)\r\n self.label_73.setGeometry(QtCore.QRect(651, 29, 131, 16))\r\n self.label_73.setObjectName(\"label_73\")\r\n self.label_74 = QtWidgets.QLabel(self.groupBox_10)\r\n self.label_74.setGeometry(QtCore.QRect(330, 70, 111, 16))\r\n self.label_74.setObjectName(\"label_74\")\r\n self.height_sat_mp_perf = QtWidgets.QLineEdit(self.groupBox_10)\r\n self.height_sat_mp_perf.setGeometry(QtCore.QRect(510, 67, 101, 22))\r\n self.height_sat_mp_perf.setObjectName(\"height_sat_mp_perf\")\r\n self.label_75 = QtWidgets.QLabel(self.groupBox_10)\r\n self.label_75.setGeometry(QtCore.QRect(330, 108, 121, 16))\r\n self.label_75.setObjectName(\"label_75\")\r\n self.eirp_sat_mp_perf = QtWidgets.QLineEdit(self.groupBox_10)\r\n self.eirp_sat_mp_perf.setGeometry(QtCore.QRect(510, 104, 101, 22))\r\n self.eirp_sat_mp_perf.setObjectName(\"eirp_sat_mp_perf\")\r\n self.label_76 = QtWidgets.QLabel(self.groupBox_10)\r\n self.label_76.setGeometry(QtCore.QRect(20, 137, 171, 51))\r\n self.label_76.setLayoutDirection(QtCore.Qt.LeftToRight)\r\n self.label_76.setObjectName(\"label_76\")\r\n self.max_bw_sat_mp_perf = QtWidgets.QLineEdit(self.groupBox_10)\r\n self.max_bw_sat_mp_perf.setGeometry(QtCore.QRect(200, 147, 101, 22))\r\n self.max_bw_sat_mp_perf.setObjectName(\"max_bw_sat_mp_perf\")\r\n self.label_77 = QtWidgets.QLabel(self.groupBox_10)\r\n self.label_77.setGeometry(QtCore.QRect(330, 132, 171, 51))\r\n self.label_77.setLayoutDirection(QtCore.Qt.LeftToRight)\r\n self.label_77.setObjectName(\"label_77\")\r\n self.bw_util_sat_mp_perf = QtWidgets.QLineEdit(self.groupBox_10)\r\n self.bw_util_sat_mp_perf.setGeometry(QtCore.QRect(510, 145, 101, 22))\r\n self.bw_util_sat_mp_perf.setObjectName(\"bw_util_sat_mp_perf\")\r\n self.label_80 = QtWidgets.QLabel(self.groupBox_10)\r\n self.label_80.setGeometry(QtCore.QRect(20, 191, 61, 16))\r\n self.label_80.setObjectName(\"label_80\")\r\n self.rolloff_sat_mp_perf = QtWidgets.QLineEdit(self.groupBox_10)\r\n self.rolloff_sat_mp_perf.setGeometry(QtCore.QRect(200, 191, 101, 22))\r\n self.rolloff_sat_mp_perf.setObjectName(\"rolloff_sat_mp_perf\")\r\n self.label_81 = QtWidgets.QLabel(self.groupBox_10)\r\n self.label_81.setGeometry(QtCore.QRect(330, 191, 91, 16))\r\n self.label_81.setObjectName(\"label_81\")\r\n self.modcod_sat_mp_perf = QtWidgets.QComboBox(self.groupBox_10)\r\n self.modcod_sat_mp_perf.setGeometry(QtCore.QRect(450, 191, 161, 22))\r\n self.modcod_sat_mp_perf.setObjectName(\"modcod_sat_mp_perf\")\r\n self.label_82 = QtWidgets.QLabel(self.groupBox_10)\r\n self.label_82.setGeometry(QtCore.QRect(20, 109, 131, 20))\r\n self.label_82.setObjectName(\"label_82\")\r\n self.freq_sat_mp_perf = QtWidgets.QLineEdit(self.groupBox_10)\r\n self.freq_sat_mp_perf.setGeometry(QtCore.QRect(200, 109, 101, 22))\r\n self.freq_sat_mp_perf.setObjectName(\"freq_sat_mp_perf\")\r\n self.label_12 = QtWidgets.QLabel(self.groupBox_10)\r\n self.label_12.setGeometry(QtCore.QRect(666, 89, 91, 20))\r\n self.label_12.setObjectName(\"label_12\")\r\n self.pol_sat_mp_perf = QtWidgets.QComboBox(self.groupBox_10)\r\n self.pol_sat_mp_perf.setGeometry(QtCore.QRect(663, 119, 101, 22))\r\n self.pol_sat_mp_perf.setObjectName(\"pol_sat_mp_perf\")\r\n self.pol_sat_mp_perf.addItem(\"\")\r\n self.pol_sat_mp_perf.addItem(\"\")\r\n self.pol_sat_mp_perf.addItem(\"\")\r\n self.clear_satellite_mp_perf = QtWidgets.QPushButton(self.groupBox_10)\r\n self.clear_satellite_mp_perf.setGeometry(QtCore.QRect(658, 156, 111, 28))\r\n font = QtGui.QFont()\r\n font.setPointSize(10)\r\n font.setBold(False)\r\n font.setWeight(50)\r\n self.clear_satellite_mp_perf.setFont(font)\r\n self.clear_satellite_mp_perf.setObjectName(\"clear_satellite_mp_perf\")\r\n self.groupBox_11 = QtWidgets.QGroupBox(self.list_perf_calc_page)\r\n self.groupBox_11.setGeometry(QtCore.QRect(10, 458, 801, 221))\r\n font = QtGui.QFont()\r\n font.setPointSize(10)\r\n self.groupBox_11.setFont(font)\r\n self.groupBox_11.setAutoFillBackground(False)\r\n self.groupBox_11.setAlignment(QtCore.Qt.AlignJustify|QtCore.Qt.AlignVCenter)\r\n self.groupBox_11.setObjectName(\"groupBox_11\")\r\n self.label_83 = QtWidgets.QLabel(self.groupBox_11)\r\n self.label_83.setGeometry(QtCore.QRect(20, 38, 55, 16))\r\n self.label_83.setObjectName(\"label_83\")\r\n self.label_84 = QtWidgets.QLabel(self.groupBox_11)\r\n self.label_84.setGeometry(QtCore.QRect(20, 68, 151, 31))\r\n self.label_84.setObjectName(\"label_84\")\r\n self.label_85 = QtWidgets.QLabel(self.groupBox_11)\r\n self.label_85.setGeometry(QtCore.QRect(20, 113, 161, 21))\r\n self.label_85.setObjectName(\"label_85\")\r\n self.name_reception_mp_perf = QtWidgets.QLineEdit(self.groupBox_11)\r\n self.name_reception_mp_perf.setGeometry(QtCore.QRect(100, 38, 511, 22))\r\n self.name_reception_mp_perf.setObjectName(\"name_reception_mp_perf\")\r\n self.lnb_gain_reception_mp_perf = QtWidgets.QLineEdit(self.groupBox_11)\r\n self.lnb_gain_reception_mp_perf.setGeometry(QtCore.QRect(200, 113, 101, 22))\r\n self.lnb_gain_reception_mp_perf.setObjectName(\"lnb_gain_reception_mp_perf\")\r\n self.ant_size_reception_mp_perf = QtWidgets.QLineEdit(self.groupBox_11)\r\n self.ant_size_reception_mp_perf.setGeometry(QtCore.QRect(200, 74, 101, 22))\r\n self.ant_size_reception_mp_perf.setObjectName(\"ant_size_reception_mp_perf\")\r\n self.save_reception_mp_perf = QtWidgets.QPushButton(self.groupBox_11)\r\n self.save_reception_mp_perf.setGeometry(QtCore.QRect(656, 186, 111, 28))\r\n font = QtGui.QFont()\r\n font.setPointSize(10)\r\n font.setBold(False)\r\n font.setWeight(50)\r\n self.save_reception_mp_perf.setFont(font)\r\n self.save_reception_mp_perf.setObjectName(\"save_reception_mp_perf\")\r\n self.load_reception_mp_perf = QtWidgets.QPushButton(self.groupBox_11)\r\n self.load_reception_mp_perf.setGeometry(QtCore.QRect(656, 151, 111, 28))\r\n font = QtGui.QFont()\r\n font.setPointSize(10)\r\n font.setBold(False)\r\n font.setWeight(50)\r\n self.load_reception_mp_perf.setFont(font)\r\n self.load_reception_mp_perf.setObjectName(\"load_reception_mp_perf\")\r\n self.label_86 = QtWidgets.QLabel(self.groupBox_11)\r\n self.label_86.setGeometry(QtCore.QRect(330, 78, 141, 16))\r\n self.label_86.setObjectName(\"label_86\")\r\n self.ant_eff_reception_mp_perf = QtWidgets.QLineEdit(self.groupBox_11)\r\n self.ant_eff_reception_mp_perf.setGeometry(QtCore.QRect(510, 78, 101, 22))\r\n self.ant_eff_reception_mp_perf.setObjectName(\"ant_eff_reception_mp_perf\")\r\n self.label_87 = QtWidgets.QLabel(self.groupBox_11)\r\n self.label_87.setGeometry(QtCore.QRect(330, 118, 161, 16))\r\n self.label_87.setScaledContents(False)\r\n self.label_87.setObjectName(\"label_87\")\r\n self.lnb_temp_reception_mp_perf = QtWidgets.QLineEdit(self.groupBox_11)\r\n self.lnb_temp_reception_mp_perf.setGeometry(QtCore.QRect(510, 115, 101, 22))\r\n self.lnb_temp_reception_mp_perf.setObjectName(\"lnb_temp_reception_mp_perf\")\r\n self.label_88 = QtWidgets.QLabel(self.groupBox_11)\r\n self.label_88.setGeometry(QtCore.QRect(20, 132, 171, 51))\r\n self.label_88.setLayoutDirection(QtCore.Qt.LeftToRight)\r\n self.label_88.setObjectName(\"label_88\")\r\n self.aditional_losses_reception_mp_perf = QtWidgets.QLineEdit(self.groupBox_11)\r\n self.aditional_losses_reception_mp_perf.setGeometry(QtCore.QRect(200, 146, 101, 22))\r\n self.aditional_losses_reception_mp_perf.setObjectName(\"aditional_losses_reception_mp_perf\")\r\n self.label_89 = QtWidgets.QLabel(self.groupBox_11)\r\n self.label_89.setGeometry(QtCore.QRect(20, 170, 171, 51))\r\n self.label_89.setLayoutDirection(QtCore.Qt.LeftToRight)\r\n self.label_89.setObjectName(\"label_89\")\r\n self.max_depoint_reception_mp_perf = QtWidgets.QLineEdit(self.groupBox_11)\r\n self.max_depoint_reception_mp_perf.setGeometry(QtCore.QRect(200, 190, 101, 22))\r\n self.max_depoint_reception_mp_perf.setObjectName(\"max_depoint_reception_mp_perf\")\r\n self.clear_reception_mp_perf = QtWidgets.QPushButton(self.groupBox_11)\r\n self.clear_reception_mp_perf.setGeometry(QtCore.QRect(656, 115, 111, 28))\r\n font = QtGui.QFont()\r\n font.setPointSize(10)\r\n font.setBold(False)\r\n font.setWeight(50)\r\n self.clear_reception_mp_perf.setFont(font)\r\n self.clear_reception_mp_perf.setObjectName(\"clear_reception_mp_perf\")\r\n self.cable_loss_reception_mp_perf = QtWidgets.QLineEdit(self.groupBox_11)\r\n self.cable_loss_reception_mp_perf.setGeometry(QtCore.QRect(510, 150, 101, 22))\r\n self.cable_loss_reception_mp_perf.setObjectName(\"cable_loss_reception_mp_perf\")\r\n self.label_90 = QtWidgets.QLabel(self.groupBox_11)\r\n self.label_90.setGeometry(QtCore.QRect(330, 133, 171, 51))\r\n self.label_90.setLayoutDirection(QtCore.Qt.LeftToRight)\r\n self.label_90.setObjectName(\"label_90\")\r\n self.output_mp_perf = QtWidgets.QTextEdit(self.list_perf_calc_page)\r\n self.output_mp_perf.setGeometry(QtCore.QRect(10, 684, 801, 141))\r\n self.output_mp_perf.setObjectName(\"output_mp_perf\")\r\n self.relaxation_mp_perf = QtWidgets.QLineEdit(self.list_perf_calc_page)\r\n self.relaxation_mp_perf.setGeometry(QtCore.QRect(174, 832, 51, 22))\r\n font = QtGui.QFont()\r\n font.setPointSize(10)\r\n self.relaxation_mp_perf.setFont(font)\r\n self.relaxation_mp_perf.setToolTip(\"\")\r\n self.relaxation_mp_perf.setObjectName(\"relaxation_mp_perf\")\r\n self.calc_mp_perf = QtWidgets.QPushButton(self.list_perf_calc_page)\r\n self.calc_mp_perf.setGeometry(QtCore.QRect(670, 830, 141, 28))\r\n font = QtGui.QFont()\r\n font.setPointSize(10)\r\n self.calc_mp_perf.setFont(font)\r\n self.calc_mp_perf.setObjectName(\"calc_mp_perf\")\r\n self.label_14 = QtWidgets.QLabel(self.list_perf_calc_page)\r\n self.label_14.setGeometry(QtCore.QRect(10, 833, 161, 21))\r\n font = QtGui.QFont()\r\n font.setPointSize(10)\r\n self.label_14.setFont(font)\r\n self.label_14.setObjectName(\"label_14\")\r\n self.label_15 = QtWidgets.QLabel(self.list_perf_calc_page)\r\n self.label_15.setGeometry(QtCore.QRect(420, 831, 61, 20))\r\n font = QtGui.QFont()\r\n font.setPointSize(10)\r\n self.label_15.setFont(font)\r\n self.label_15.setObjectName(\"label_15\")\r\n self.n_threads_mp_perf = QtWidgets.QComboBox(self.list_perf_calc_page)\r\n self.n_threads_mp_perf.setGeometry(QtCore.QRect(490, 831, 73, 22))\r\n self.n_threads_mp_perf.setObjectName(\"n_threads_mp_perf\")\r\n self.path_mp_perf = QtWidgets.QLineEdit(self.list_perf_calc_page)\r\n self.path_mp_perf.setGeometry(QtCore.QRect(55, 10, 631, 22))\r\n self.path_mp_perf.setObjectName(\"path_mp_perf\")\r\n self.label_16 = QtWidgets.QLabel(self.list_perf_calc_page)\r\n self.label_16.setGeometry(QtCore.QRect(240, 831, 91, 21))\r\n font = QtGui.QFont()\r\n font.setPointSize(10)\r\n self.label_16.setFont(font)\r\n self.label_16.setObjectName(\"label_16\")\r\n self.margin_mp_perf = QtWidgets.QLineEdit(self.list_perf_calc_page)\r\n self.margin_mp_perf.setGeometry(QtCore.QRect(340, 831, 61, 22))\r\n font = QtGui.QFont()\r\n font.setPointSize(10)\r\n self.margin_mp_perf.setFont(font)\r\n self.margin_mp_perf.setToolTip(\"\")\r\n self.margin_mp_perf.setObjectName(\"margin_mp_perf\")\r\n self.stackedWidget.addWidget(self.list_perf_calc_page)\r\n MainWindow.setCentralWidget(self.centralwidget)\r\n self.menubar = QtWidgets.QMenuBar(MainWindow)\r\n self.menubar.setGeometry(QtCore.QRect(0, 0, 820, 26))\r\n self.menubar.setObjectName(\"menubar\")\r\n self.menuNew = QtWidgets.QMenu(self.menubar)\r\n self.menuNew.setObjectName(\"menuNew\")\r\n self.menuCalculation = QtWidgets.QMenu(self.menubar)\r\n self.menuCalculation.setObjectName(\"menuCalculation\")\r\n self.menuList_Calculation = QtWidgets.QMenu(self.menubar)\r\n self.menuList_Calculation.setObjectName(\"menuList_Calculation\")\r\n self.menuHelp = QtWidgets.QMenu(self.menubar)\r\n self.menuHelp.setObjectName(\"menuHelp\")\r\n MainWindow.setMenuBar(self.menubar)\r\n self.statusbar = QtWidgets.QStatusBar(MainWindow)\r\n self.statusbar.setObjectName(\"statusbar\")\r\n MainWindow.setStatusBar(self.statusbar)\r\n self.action_New_Satellite = QtWidgets.QAction(MainWindow)\r\n self.action_New_Satellite.setObjectName(\"action_New_Satellite\")\r\n self.actionReception = QtWidgets.QAction(MainWindow)\r\n self.actionReception.setObjectName(\"actionReception\")\r\n self.action_New_Ground_Station = QtWidgets.QAction(MainWindow)\r\n self.action_New_Ground_Station.setObjectName(\"action_New_Ground_Station\")\r\n self.action_Single_Atmospheric_Atenuation = QtWidgets.QAction(MainWindow)\r\n self.action_Single_Atmospheric_Atenuation.setObjectName(\"action_Single_Atmospheric_Atenuation\")\r\n self.action_Single_Downlink_Performance = QtWidgets.QAction(MainWindow)\r\n self.action_Single_Downlink_Performance.setObjectName(\"action_Single_Downlink_Performance\")\r\n self.action_List_Downlink_Performance = QtWidgets.QAction(MainWindow)\r\n self.action_List_Downlink_Performance.setObjectName(\"action_List_Downlink_Performance\")\r\n self.action_Single_Antenna_Size = QtWidgets.QAction(MainWindow)\r\n self.action_Single_Antenna_Size.setObjectName(\"action_Single_Antenna_Size\")\r\n self.action_List_Antenna_Size = QtWidgets.QAction(MainWindow)\r\n self.action_List_Antenna_Size.setObjectName(\"action_List_Antenna_Size\")\r\n self.menuNew.addAction(self.action_New_Satellite)\r\n self.menuNew.addAction(self.action_New_Ground_Station)\r\n self.menuNew.addAction(self.actionReception)\r\n self.menuCalculation.addAction(self.action_Single_Atmospheric_Atenuation)\r\n self.menuCalculation.addAction(self.action_Single_Downlink_Performance)\r\n self.menuCalculation.addAction(self.action_Single_Antenna_Size)\r\n self.menuList_Calculation.addAction(self.action_List_Downlink_Performance)\r\n self.menuList_Calculation.addAction(self.action_List_Antenna_Size)\r\n self.menubar.addAction(self.menuNew.menuAction())\r\n self.menubar.addAction(self.menuCalculation.menuAction())\r\n self.menubar.addAction(self.menuList_Calculation.menuAction())\r\n self.menubar.addAction(self.menuHelp.menuAction())\r\n\r\n self.retranslateUi(MainWindow)\r\n self.stackedWidget.setCurrentIndex(4)\r\n self.stackedWidget_2.setCurrentIndex(7)\r\n self.stackedWidget_3.setCurrentIndex(9)\r\n self.stackedWidget_4.setCurrentIndex(7)\r\n QtCore.QMetaObject.connectSlotsByName(MainWindow)\r\n\r\n # ===================================== HERE GOES MY CODE! ======================================================\r\n\r\n self.stackedWidget.setCurrentIndex(0) # selecting the initial application's page\r\n\r\n ######### change window actions buttons ##########\r\n import webbrowser\r\n self.action_New_Satellite.triggered.connect(\r\n lambda state, x=1, page_name='SaLink - Create Satellite': self.change_page(x, page_name))\r\n self.action_New_Ground_Station.triggered.connect(\r\n lambda state, x=2, page_name='SaLink - Create Ground Station': self.change_page(x, page_name))\r\n self.actionReception.triggered.connect(\r\n lambda state, x=3, page_name='SaLink - Create Reception': self.change_page(x, page_name))\r\n self.action_Single_Atmospheric_Atenuation.triggered.connect(\r\n lambda state, x=6, page_name='SaLink - Atmospheric Attenuation': self.change_page(x, page_name))\r\n self.action_Single_Downlink_Performance.triggered.connect(\r\n lambda state, x=7, page_name='SaLink - Downlink Performance': self.change_page(x, page_name))\r\n self.action_List_Downlink_Performance.triggered.connect(\r\n lambda state, x=8, page_name='SaLink - Multi Downlink Performance': self.change_page(x, page_name))\r\n self.action_Single_Antenna_Size.triggered.connect(\r\n lambda state, x=4, page_name='SaLink - Antenna Size': self.change_page(x, page_name))\r\n self.action_List_Antenna_Size.triggered.connect(\r\n lambda state, x=5, page_name='SaLink - Multi Antenna Size': self.change_page(x, page_name))\r\n # self.menuHelp.clicked.connect(webbrowser.open('https://cfragoas.github.io/SatLink/'))\r\n\r\n ################# calculation buttons #####################\r\n self.calc_spatm.clicked.connect(\r\n self.single_point_atm_atnn_calc) # Single point atmospheric attenuation calc button activation\r\n self.calc_sp_perf.clicked.connect(self.single_point_calc_perf)\r\n self.calc_mp_perf.clicked.connect(self.multi_point_calc_perf)\r\n self.calc_sp_ant_size.clicked.connect(self.single_point_ant_size)\r\n self.calc_mp_ant_size.clicked.connect(self.multi_point_ant_size)\r\n\r\n ########################saves and load boxes ##########################\r\n # main satellite save/load/clear screen\r\n self.load_sat.clicked.connect(lambda state, opt='load': self.load_save_sat(opt))\r\n self.save_sat.clicked.connect(lambda state, opt='save': self.load_save_sat(opt))\r\n self.clear_sat.clicked.connect(lambda state, opt='clear': self.load_save_sat(opt))\r\n\r\n # main ground station save/load/clear screen\r\n self.load_ground_station_gdstation.clicked.connect(lambda state, opt='load': self.load_save_gr_station(opt))\r\n self.save_ground_station_gdstation.clicked.connect(lambda state, opt='save': self.load_save_gr_station(opt))\r\n self.clear_ground_station_gdstation.clicked.connect(lambda state, opt='clear': self.load_save_gr_station(opt))\r\n\r\n # main reception save/load/clear screen\r\n self.load_reception_rcp.clicked.connect(lambda state, opt='load': self.load_save_reception(opt))\r\n self.save_reception_rcp.clicked.connect(lambda state, opt='save': self.load_save_reception(opt))\r\n self.clear_reception_rcp.clicked.connect(lambda state, opt='clear': self.load_save_reception(opt))\r\n\r\n # list path browse\r\n self.browse_path_mp_perf.clicked.connect(\r\n lambda state, preview_window=self.preview_mp_perf, path_box=self.path_mp_perf: self.load_point_list(\r\n preview_window, path_box))\r\n self.browse_mp_ant_size.clicked.connect(\r\n lambda state, preview_window=self.preview_mp_ant_size, path_box=self.path_mp_ant_size: self.load_point_list(\r\n preview_window, path_box))\r\n\r\n # single point atmospheric attenuation screen\r\n self.load_ground_station_spatm.clicked.connect(\r\n lambda state, opt='load', item='gst': self.load_clear_atm_atnn_calc(opt, item))\r\n self.load_reception_spatm.clicked.connect(\r\n lambda state, opt='load', item='rcp': self.load_clear_atm_atnn_calc(opt, item))\r\n self.clear_reception_spatm.clicked.connect(\r\n lambda state, opt='clear', item='rcp': self.load_clear_atm_atnn_calc(opt, item))\r\n self.load_sat_spatm.clicked.connect(\r\n lambda state, opt='load', item='sat': self.load_clear_atm_atnn_calc(opt, item))\r\n\r\n # single point link performance screen\r\n self.load_ground_station_sp_perf.clicked.connect(\r\n lambda state, opt='load', item='gst': self.load_save_single_point_perf(opt, item))\r\n self.save_ground_station_sp_perf.clicked.connect(\r\n lambda state, opt='save', item='gst': self.load_save_single_point_perf(opt, item))\r\n self.clear_ground_station_sp_perf.clicked.connect(\r\n lambda state, opt='clear', item='gst': self.load_save_single_point_perf(opt, item))\r\n self.load_sat_sp_perf.clicked.connect(\r\n lambda state, opt='load', item='sat': self.load_save_single_point_perf(opt, item))\r\n self.save_sat_sp_perf.clicked.connect(\r\n lambda state, opt='save', item='sat': self.load_save_single_point_perf(opt, item))\r\n self.clear_sat_sp_perf.clicked.connect(\r\n lambda state, opt='clear', item='sat': self.load_save_single_point_perf(opt, item))\r\n self.load_reception_sp_perf.clicked.connect(\r\n lambda state, opt='load', item='rcp': self.load_save_single_point_perf(opt, item))\r\n self.save_reception_sp_perf.clicked.connect(\r\n lambda state, opt='save', item='rcp': self.load_save_single_point_perf(opt, item))\r\n self.clear_reception_sp_perf.clicked.connect(\r\n lambda state, opt='clear', item='rcp': self.load_save_single_point_perf(opt, item))\r\n\r\n # single point antenna size calculation screen\r\n self.load_ground_station_sp_ant_size.clicked.connect(\r\n lambda state, opt='load', item='gst': self.load_save_single_point_ant_size(opt, item))\r\n self.save_ground_station_sp_ant_size.clicked.connect(\r\n lambda state, opt='save', item='gst': self.load_save_single_point_ant_size(opt, item))\r\n self.clear_ground_station_sp_ant_size.clicked.connect(\r\n lambda state, opt='clear', item='gst': self.load_save_single_point_ant_size(opt, item))\r\n self.load_sat_sp_ant_size.clicked.connect(\r\n lambda state, opt='load', item='sat': self.load_save_single_point_ant_size(opt, item))\r\n self.save_sat_sp_ant_size.clicked.connect(\r\n lambda state, opt='save', item='sat': self.load_save_single_point_ant_size(opt, item))\r\n self.clear_sat_sp_ant_size.clicked.connect(\r\n lambda state, opt='clear', item='sat': self.load_save_single_point_ant_size(opt, item))\r\n self.load_reception_sp_ant_size.clicked.connect(\r\n lambda state, opt='load', item='rcp': self.load_save_single_point_ant_size(opt, item))\r\n self.clear_reception_sp_ant_size.clicked.connect(\r\n lambda state, opt='clear', item='rcp': self.load_save_single_point_ant_size(opt, item))\r\n self.export_result_sp_ant_size.clicked.connect(self.export_graph)\r\n\r\n # multi point performance screen\r\n self.load_sat_mp_perf.clicked.connect(\r\n lambda state, opt='load', item='sat': self.load_save_multi_point_perf(opt, item))\r\n self.save_sat_mp_perf.clicked.connect(\r\n lambda state, opt='save', item='sat': self.load_save_multi_point_perf(opt, item))\r\n self.clear_satellite_mp_perf.clicked.connect(\r\n lambda state, opt='clear', item='sat': self.load_save_multi_point_perf(opt, item))\r\n self.load_reception_mp_perf.clicked.connect(\r\n lambda state, opt='load', item='rcp': self.load_save_multi_point_perf(opt, item))\r\n self.save_reception_mp_perf.clicked.connect(\r\n lambda state, opt='save', item='rcp': self.load_save_multi_point_perf(opt, item))\r\n self.clear_reception_mp_perf.clicked.connect(\r\n lambda state, opt='clear', item='rcp': self.load_save_multi_point_perf(opt, item))\r\n\r\n # multi point antenna size calculation screen\r\n self.load_sat_mp_ant_size.clicked.connect(\r\n lambda state, opt='load', item='sat': self.load_save_multi_point_ant_size(opt, item))\r\n self.save_sat_mp_ant_size.clicked.connect(\r\n lambda state, opt='save', item='sat': self.load_save_multi_point_ant_size(opt, item))\r\n self.clear_sat_mp_ant_size.clicked.connect(\r\n lambda state, opt='clear', item='sat': self.load_save_multi_point_ant_size(opt, item))\r\n self.load_reception_mp_ant_size.clicked.connect(\r\n lambda state, opt='load', item='rcp': self.load_save_multi_point_ant_size(opt, item))\r\n self.clear_reception_mp_ant_size.clicked.connect(\r\n lambda state, opt='clear', item='rcp': self.load_save_multi_point_ant_size(opt, item))\r\n\r\n ########################################################################\r\n\r\n # creating the combo boxes options from files and changed event\r\n import pandas as pd\r\n\r\n # Modulation combo boxes\r\n data = pd.read_csv('models\\\\Modulation_dB.csv', sep=';')['Modcod']\r\n data = data.values.tolist()\r\n self.modcod_sat_mp_perf.addItems(data)\r\n self.modcod_sat_sp_perf.addItems(data)\r\n self.modcod_sat.addItems(data)\r\n self.modcod_sat_sp_ant_size.addItems(data)\r\n self.modcod_sat_mp_ant_size.addItems(data)\r\n # Default satellite list combo boxes\r\n data = pd.read_csv('models\\\\Default_Sat.csv', sep=';')['SAT']\r\n data = data.values.tolist()\r\n self.default_sat.addItems(data)\r\n self.default_sat_sp_ant_size.addItems(data)\r\n self.default_sat_mp_ant_size.addItems(data)\r\n self.default_sat_spatm.addItems(data)\r\n self.default_sat_sp_perf.addItems(data)\r\n self.default_sat_mp_perf.addItems(data)\r\n # Default satellite change event\r\n self.default_sat.currentIndexChanged.connect(lambda state,\r\n cb_box = self.default_sat,\r\n page = 'default_sat': self.on_combobox_changed(cb_box, page))\r\n self.default_sat_sp_ant_size.currentIndexChanged.connect(lambda state,\r\n cb_box = self.default_sat_sp_ant_size,\r\n page = 'sp_ant_size': self.on_combobox_changed(cb_box, page))\r\n self.default_sat_mp_ant_size.currentIndexChanged.connect(lambda state,\r\n cb_box = self.default_sat_mp_ant_size,\r\n page = 'mp_ant_size': self.on_combobox_changed(cb_box, page))\r\n self.default_sat_spatm.currentIndexChanged.connect(lambda state,\r\n cb_box = self.default_sat_spatm,\r\n page = 'spatm': self.on_combobox_changed(cb_box, page))\r\n self.default_sat_sp_perf.currentIndexChanged.connect(lambda state,\r\n cb_box = self.default_sat_sp_perf,\r\n page = 'sp_perf': self.on_combobox_changed(cb_box, page))\r\n self.default_sat_mp_perf.currentIndexChanged.connect(lambda state,\r\n cb_box = self.default_sat_mp_perf,\r\n page = 'mp_perf': self.on_combobox_changed(cb_box, page))\r\n\r\n # threads number combo box\r\n import multiprocessing\r\n self.cores = multiprocessing.cpu_count()\r\n print('Max threads: ', self.cores)\r\n if self.cores > 63:\r\n self.cores = 63\r\n n_thread_list = [1]\r\n n_thread_list.extend(range(2, self.cores - 1, round(self.cores / 4)))\r\n n_thread_list_map = map(str, n_thread_list)\r\n\r\n self.n_threads_mp_perf.addItems(n_thread_list_map)\r\n n_thread_list_map = map(str, n_thread_list)\r\n self.n_threads_mp_ant_size.addItems(n_thread_list_map)\r\n\r\n # deleting temp files\r\n import os\r\n if os.path.exists('temp\\\\save.pkl'):\r\n os.remove('temp\\\\save.pkl')\r\n if os.path.exists('temp\\\\load.pkl'):\r\n os.remove('temp\\\\load.pkl')\r\n if os.path.exists('temp\\\\args.pkl'):\r\n os.remove('temp\\\\args.pkl')\r\n if os.path.exists(\"temp\\\\out.txt\"):\r\n os.remove(\"temp\\\\out.txt\")\r\n\r\n # maintaing updatable texts\r\n self._update_timer = QtCore.QTimer()\r\n self._update_timer.timeout.connect(lambda index=3: self.update_label(index))\r\n self._update_timer.timeout.connect(lambda index=4: self.update_label(index))\r\n self._update_timer.start(1000) # milliseconds\r\n\r\n def on_combobox_changed(self, cb_box, page):\r\n import pandas as pd\r\n index = cb_box.currentIndex()\r\n if index != 0:\r\n data = pd.read_csv('models\\\\Default_Sat.csv', sep=';')['COORD']\r\n long = data[index]\r\n if page == 'default_sat':\r\n self.long_sat.setText(long)\r\n elif page == 'sp_ant_size':\r\n self.long_sat_sp_ant_size.setText(long)\r\n elif page == 'mp_ant_size':\r\n self.long_sat_mp_ant_size.setText(long)\r\n elif page == 'spatm':\r\n self.long_sat_spatm.setText(long)\r\n elif page == 'sp_perf':\r\n self.long_sat_sp_perf.setText(long)\r\n elif page == 'mp_perf':\r\n self.long_sat_mp_perf.setText(long)\r\n\r\n\r\n def load_save_sat(self, opt):\r\n import pickle\r\n import os\r\n from file_dialog import open_dialog\r\n type = 'Sat Files (*.sat)'\r\n if opt != 'clear':\r\n open_dialog(opt, type)\r\n try:\r\n with open('temp\\\\' + opt + '.pkl', 'rb') as f:\r\n folder = pickle.load(f)\r\n # print(folder)\r\n f.close()\r\n except:\r\n folder = ''\r\n if opt == 'save':\r\n if folder != '':\r\n folder = folder + '.sat'\r\n try:\r\n name_sat = str(self.name_sat.text())\r\n long_sat = float(self.long_sat.text())\r\n height_sat = float(self.height_sat.text())\r\n freq = float(self.freq_sat.text())\r\n eirp_sat = float(self.eirp_sat.text())\r\n max_bw_sat = float(self.max_bw_sat.text())\r\n bw_util = float(self.bw_util_sat.text())\r\n rolloff_sat = float(self.rolloff_sat.text())\r\n modulation = str(self.modcod_sat.currentText())\r\n pol = str(self.pol_sat.currentText())\r\n\r\n except:\r\n QtWidgets.QMessageBox.warning(MainWindow, 'WARNING', 'Empty or wrong type in the fields!!!')\r\n return\r\n\r\n with open(folder, 'wb') as f:\r\n pickle.dump([name_sat, long_sat, height_sat, freq, eirp_sat,\r\n max_bw_sat, bw_util, rolloff_sat, modulation, pol], f)\r\n f.close()\r\n\r\n if os.path.exists('temp\\\\save.pkl'):\r\n os.remove('temp\\\\save.pkl')\r\n\r\n elif opt == 'load':\r\n if folder != '':\r\n with open(folder, 'rb') as f:\r\n name_sat, long_sat, height_sat, freq, eirp_sat, max_bw_sat, bw_util, rolloff_sat, modulation, pol = pickle.load(\r\n f)\r\n f.close()\r\n self.name_sat.setText(str(name_sat))\r\n self.long_sat.setText(str(long_sat))\r\n self.height_sat.setText(str(height_sat))\r\n self.freq_sat.setText(str(freq))\r\n self.eirp_sat.setText(str(eirp_sat))\r\n self.max_bw_sat.setText(str(max_bw_sat))\r\n self.bw_util_sat.setText(str(bw_util))\r\n self.rolloff_sat.setText(str(rolloff_sat))\r\n index = self.pol_sat.findText(str(pol), QtCore.Qt.MatchFixedString)\r\n if index >= 0:\r\n self.pol_sat.setCurrentIndex(index)\r\n index = self.modcod_sat.findText(str(modulation), QtCore.Qt.MatchFixedString)\r\n if index >= 0:\r\n self.modcod_sat.setCurrentIndex(index)\r\n\r\n if os.path.exists('temp\\\\load.pkl'):\r\n os.remove('temp\\\\load.pkl')\r\n\r\n elif opt == 'clear':\r\n self.name_sat.clear()\r\n self.long_sat.clear()\r\n self.height_sat.clear()\r\n self.freq_sat.clear()\r\n self.eirp_sat.clear()\r\n self.max_bw_sat.clear()\r\n self.bw_util_sat.clear()\r\n self.rolloff_sat.clear()\r\n self.pol_sat.setCurrentIndex(0)\r\n self.modcod_sat.setCurrentIndex(0)\r\n\r\n def load_save_gr_station(self, opt):\r\n import pickle\r\n import os\r\n from file_dialog import open_dialog\r\n type = 'Ground Station Files (*.gst)'\r\n if opt != 'clear':\r\n open_dialog(opt, type)\r\n try:\r\n with open('temp\\\\' + opt + '.pkl', 'rb') as f:\r\n folder = pickle.load(f)\r\n # print(folder)\r\n f.close()\r\n except:\r\n folder = ''\r\n if opt == 'save':\r\n if folder != '':\r\n folder = folder + '.gst'\r\n try:\r\n name_grst = str(self.name_ground_station_grstat.text())\r\n lat_gst = float(self.lat_ground_station_grstat.text())\r\n long_gst = float(self.long_ground_station_grstat.text())\r\n except:\r\n QtWidgets.QMessageBox.warning(MainWindow, 'WARNING', 'Empty or wrong type in the fields!!!')\r\n return\r\n\r\n with open(folder, 'wb') as f:\r\n pickle.dump([name_grst, lat_gst, long_gst], f)\r\n f.close()\r\n\r\n if os.path.exists('temp\\\\save.pkl'):\r\n os.remove('temp\\\\save.pkl')\r\n\r\n elif opt == 'load':\r\n if folder != '':\r\n with open(folder, 'rb') as f:\r\n name_grst, lat_gst, long_gst = pickle.load(f)\r\n f.close()\r\n self.name_ground_station_grstat.setText(str(name_grst))\r\n self.lat_ground_station_grstat.setText(str(lat_gst))\r\n self.long_ground_station_grstat.setText(str(long_gst))\r\n\r\n if os.path.exists('temp\\\\load.pkl'):\r\n os.remove('temp\\\\load.pkl')\r\n\r\n elif opt == 'clear':\r\n self.name_ground_station_grstat.clear()\r\n self.lat_ground_station_grstat.clear()\r\n self.long_ground_station_grstat.clear()\r\n\r\n def load_save_reception(self, opt):\r\n import pickle\r\n import os\r\n from file_dialog import open_dialog\r\n type = 'Reception Setup Files (*.rcp)'\r\n if opt != 'clear':\r\n open_dialog(opt, type)\r\n try:\r\n with open('temp\\\\' + opt + '.pkl', 'rb') as f:\r\n folder = pickle.load(f)\r\n # print(folder)\r\n f.close()\r\n except:\r\n folder = ''\r\n if opt == 'save':\r\n if folder != '':\r\n folder = folder + '.rcp'\r\n try:\r\n name_rcp = str(self.name_reception_rcp.text())\r\n ant_size = float(self.ant_size_reception_rcp.text())\r\n ant_eff = float(self.ant_eff_reception_rcp.text())\r\n lnb_gain = float(self.lnb_gain_reception_rcp.text())\r\n lnb_temp = float(self.lnb_temp_reception_rcp.text())\r\n aditional_losses = float(self.aditional_losses_reception_rcp.text())\r\n cable_loss = float(self.cable_loss_reception_rcp.text())\r\n max_depoint = float(self.max_depoint_reception_rcp.text())\r\n except:\r\n QtWidgets.QMessageBox.warning(MainWindow, 'WARNING', 'Empty or wrong type in the fields!!!')\r\n return\r\n\r\n with open(folder, 'wb') as f:\r\n pickle.dump(\r\n [name_rcp, ant_size, ant_eff, lnb_gain, lnb_temp, aditional_losses, cable_loss, max_depoint], f)\r\n f.close()\r\n\r\n if os.path.exists('temp\\\\save.pkl'):\r\n os.remove('temp\\\\save.pkl')\r\n\r\n elif opt == 'load':\r\n if folder != '':\r\n with open(folder, 'rb') as f:\r\n name_rcp, ant_size, ant_eff, lnb_gain, lnb_temp, aditional_losses, cable_loss, max_depoint = pickle.load(\r\n f)\r\n f.close()\r\n self.name_reception_rcp.setText(str(name_rcp))\r\n self.ant_size_reception_rcp.setText(str(ant_size))\r\n self.ant_eff_reception_rcp.setText(str(ant_eff))\r\n self.lnb_gain_reception_rcp.setText(str(lnb_gain))\r\n self.lnb_temp_reception_rcp.setText(str(lnb_temp))\r\n self.aditional_losses_reception_rcp.setText(str(aditional_losses))\r\n self.cable_loss_reception_rcp.setText(str(cable_loss))\r\n self.max_depoint_reception_rcp.setText(str(max_depoint))\r\n\r\n if os.path.exists('temp\\\\load.pkl'):\r\n os.remove('temp\\\\load.pkl')\r\n\r\n elif opt == 'clear':\r\n self.name_reception_rcp.clear()\r\n self.ant_size_reception_rcp.clear()\r\n self.ant_eff_reception_rcp.clear()\r\n self.lnb_gain_reception_rcp.clear()\r\n self.lnb_temp_reception_rcp.clear()\r\n self.aditional_losses_reception_rcp.clear()\r\n self.cable_loss_reception_rcp.clear()\r\n self.max_depoint_reception_rcp.clear()\r\n\r\n def load_clear_atm_atnn_calc(self, opt, item):\r\n import pickle\r\n import os\r\n from file_dialog import open_dialog\r\n\r\n if item == 'gst':\r\n type = 'Ground Station Files (*.gst)'\r\n if opt != 'clear':\r\n open_dialog(opt, type)\r\n try:\r\n with open('temp\\\\' + opt + '.pkl', 'rb') as f:\r\n folder = pickle.load(f)\r\n # print(folder)\r\n f.close()\r\n except:\r\n folder = ''\r\n\r\n if opt == 'load':\r\n if folder != '':\r\n with open(folder, 'rb') as f:\r\n name_grst, lat_gst, long_gst = pickle.load(f)\r\n f.close()\r\n self.lat_ground_station_spatm.setText(str(lat_gst))\r\n self.long_ground_station_spatm.setText(str(long_gst))\r\n\r\n if os.path.exists('temp\\\\load.pkl'):\r\n os.remove('temp\\\\load.pkl')\r\n\r\n elif item == 'rcp':\r\n type = 'Reception Setup Files (*.rcp)'\r\n if opt != 'clear':\r\n open_dialog(opt, type)\r\n try:\r\n with open('temp\\\\' + opt + '.pkl', 'rb') as f:\r\n folder = pickle.load(f)\r\n # print(folder)\r\n f.close()\r\n except:\r\n folder = ''\r\n\r\n if opt == 'load':\r\n if folder != '':\r\n with open(folder, 'rb') as f:\r\n name_rcp, ant_size, ant_eff, lnb_gain, lnb_temp, aditional_losses, cable_loss, max_depoint = pickle.load(\r\n f)\r\n f.close()\r\n self.ant_size_reception_spatm.setText(str(ant_size))\r\n self.ant_eff_reception_spatm.setText(str(ant_eff))\r\n\r\n if os.path.exists('temp\\\\load.pkl'):\r\n os.remove('temp\\\\load.pkl')\r\n\r\n elif opt == 'clear':\r\n self.lat_ground_station_spatm.clear()\r\n self.long_ground_station_spatm.clear()\r\n self.ant_size_reception_spatm.clear()\r\n self.ant_eff_reception_spatm.clear()\r\n\r\n elif item == 'sat':\r\n type = 'Sat Files (*.sat)'\r\n if opt != 'clear':\r\n open_dialog(opt, type)\r\n try:\r\n with open('temp\\\\' + opt + '.pkl', 'rb') as f:\r\n folder = pickle.load(f)\r\n # print(folder)\r\n f.close()\r\n except:\r\n folder = ''\r\n\r\n if opt == 'load':\r\n if folder != '':\r\n with open(folder, 'rb') as f:\r\n name_sat, long_sat, height_sat, freq, eirp_sat, max_bw_sat, bw_util, rolloff_sat, modulation, pol = pickle.load(\r\n f)\r\n f.close()\r\n self.long_sat_spatm.setText(str(long_sat))\r\n self.freq_sat_spatm.setText(str(freq))\r\n index = self.pol_sat_sp_perf.findText(str(pol), QtCore.Qt.MatchFixedString)\r\n if index >= 0:\r\n self.pol_sat_spatm.setCurrentIndex(index)\r\n\r\n if os.path.exists('temp\\\\load.pkl'):\r\n os.remove('temp\\\\load.pkl')\r\n\r\n def load_save_single_point_perf(self, opt, item):\r\n import pickle\r\n import os\r\n from file_dialog import open_dialog\r\n\r\n if item == 'gst':\r\n type = 'Ground Station Files (*.gst)'\r\n if opt != 'clear':\r\n open_dialog(opt, type)\r\n try:\r\n with open('temp\\\\' + opt + '.pkl', 'rb') as f:\r\n folder = pickle.load(f)\r\n # print(folder)\r\n f.close()\r\n except:\r\n folder = ''\r\n\r\n if opt == 'save':\r\n if folder != '':\r\n folder = folder + '.gst'\r\n try:\r\n name_grst = str(self.name_ground_station_sp_perf.text())\r\n lat_gst = float(self.lat_ground_station_sp_perf.text())\r\n long_gst = float(self.long_ground_station_sp_perf.text())\r\n except:\r\n QtWidgets.QMessageBox.warning(MainWindow, 'WARNING', 'Empty or wrong type in the fields!!!')\r\n return\r\n\r\n with open(folder, 'wb') as f:\r\n pickle.dump([name_grst, lat_gst, long_gst], f)\r\n f.close()\r\n\r\n if os.path.exists('temp\\\\save.pkl'):\r\n os.remove('temp\\\\save.pkl')\r\n\r\n elif opt == 'load':\r\n if folder != '':\r\n with open(folder, 'rb') as f:\r\n name_grst, lat_gst, long_gst = pickle.load(f)\r\n f.close()\r\n self.name_ground_station_sp_perf.setText(str(name_grst))\r\n self.lat_ground_station_sp_perf.setText(str(lat_gst))\r\n self.long_ground_station_sp_perf.setText(str(long_gst))\r\n\r\n if os.path.exists('temp\\\\load.pkl'):\r\n os.remove('temp\\\\load.pkl')\r\n\r\n elif opt == 'clear':\r\n self.name_ground_station_sp_perf.clear()\r\n self.lat_ground_station_sp_perf.clear()\r\n self.long_ground_station_sp_perf.clear()\r\n\r\n elif item == 'sat':\r\n type = 'Sat Files (*.sat)'\r\n if opt != 'clear':\r\n open_dialog(opt, type)\r\n try:\r\n with open('temp\\\\' + opt + '.pkl', 'rb') as f:\r\n folder = pickle.load(f)\r\n # print(folder)\r\n f.close()\r\n except:\r\n folder = ''\r\n if opt == 'save':\r\n if folder != '':\r\n folder = folder + '.sat'\r\n try:\r\n name_sat = str(self.name_sat_sp_perf.text())\r\n long_sat = float(self.long_sat_sp_perf.text())\r\n height_sat = float(self.height_sat_sp_perf.text())\r\n freq = float(self.freq_sat_sp_perf.text())\r\n eirp_sat = float(self.eirp_sat_sp_perf.text())\r\n max_bw_sat = float(self.max_bw_sat_sp_perf.text())\r\n bw_util = float(self.bw_util_sat_sp_perf.text())\r\n rolloff_sat = float(self.rolloff_sat_sp_perf.text())\r\n modulation = str(self.modcod_sat_sp_perf.currentText())\r\n pol = str(self.pol_sat_sp_perf.currentText())\r\n\r\n except:\r\n QtWidgets.QMessageBox.warning(MainWindow, 'WARNING', 'Empty or wrong type in the fields!!!')\r\n return\r\n\r\n with open(folder, 'wb') as f:\r\n pickle.dump([name_sat, long_sat, height_sat, freq, eirp_sat,\r\n max_bw_sat, bw_util, rolloff_sat, modulation, pol], f)\r\n f.close()\r\n\r\n if os.path.exists('temp\\\\save.pkl'):\r\n os.remove('temp\\\\save.pkl')\r\n\r\n elif opt == 'load':\r\n if folder != '':\r\n with open(folder, 'rb') as f:\r\n name_sat, long_sat, height_sat, freq, eirp_sat, max_bw_sat, bw_util, rolloff_sat, modulation, pol = pickle.load(\r\n f)\r\n f.close()\r\n self.name_sat_sp_perf.setText(str(name_sat))\r\n self.long_sat_sp_perf.setText(str(long_sat))\r\n self.height_sat_sp_perf.setText(str(height_sat))\r\n self.freq_sat_sp_perf.setText(str(freq))\r\n self.eirp_sat_sp_perf.setText(str(eirp_sat))\r\n self.max_bw_sat_sp_perf.setText(str(max_bw_sat))\r\n self.bw_util_sat_sp_perf.setText(str(bw_util))\r\n self.rolloff_sat_sp_perf.setText(str(rolloff_sat))\r\n index = self.pol_sat_sp_perf.findText(str(pol), QtCore.Qt.MatchFixedString)\r\n if index >= 0:\r\n self.pol_sat_sp_perf.setCurrentIndex(index)\r\n index = self.modcod_sat_sp_perf.findText(str(modulation), QtCore.Qt.MatchFixedString)\r\n if index >= 0:\r\n self.modcod_sat_sp_perf.setCurrentIndex(index)\r\n\r\n if os.path.exists('temp\\\\load.pkl'):\r\n os.remove('temp\\\\load.pkl')\r\n\r\n elif opt == 'clear':\r\n self.name_sat_sp_perf.clear()\r\n self.long_sat_sp_perf.clear()\r\n self.height_sat_sp_perf.clear()\r\n self.freq_sat_sp_perf.clear()\r\n self.eirp_sat_sp_perf.clear()\r\n self.max_bw_sat_sp_perf.clear()\r\n self.bw_util_sat_sp_perf.clear()\r\n self.rolloff_sat_sp_perf.clear()\r\n self.pol_sat_sp_perf.setCurrentIndex(0)\r\n self.modcod_sat_sp_perf.setCurrentIndex(0)\r\n\r\n elif item == 'rcp':\r\n type = 'Reception Setup Files (*.rcp)'\r\n if opt != 'clear':\r\n open_dialog(opt, type)\r\n try:\r\n with open('temp\\\\' + opt + '.pkl', 'rb') as f:\r\n folder = pickle.load(f)\r\n # print(folder)\r\n f.close()\r\n except:\r\n folder = ''\r\n if opt == 'save':\r\n if folder != '':\r\n folder = folder + '.rcp'\r\n try:\r\n name_rcp = str(self.name_reception_sp_perf.text())\r\n ant_size = float(self.ant_size_reception_sp_perf.text())\r\n ant_eff = float(self.ant_eff_reception_sp_perf.text())\r\n lnb_gain = float(self.lnb_gain_reception_sp_perf.text())\r\n lnb_temp = float(self.lnb_temp_reception_sp_perf.text())\r\n aditional_losses = float(self.aditional_losses_reception_sp_perf.text())\r\n cable_loss = float(self.cable_loss_reception_sp_perf.text())\r\n max_depoint = float(self.max_depoint_reception_sp_perf.text())\r\n except:\r\n QtWidgets.QMessageBox.warning(MainWindow, 'WARNING', 'Empty or wrong type in the fields!!!')\r\n return\r\n\r\n with open(folder, 'wb') as f:\r\n pickle.dump(\r\n [name_rcp, ant_size, ant_eff, lnb_gain, lnb_temp, aditional_losses, cable_loss,\r\n max_depoint], f)\r\n f.close()\r\n\r\n if os.path.exists('temp\\\\save.pkl'):\r\n os.remove('temp\\\\save.pkl')\r\n\r\n elif opt == 'load':\r\n if folder != '':\r\n with open(folder, 'rb') as f:\r\n name_rcp, ant_size, ant_eff, lnb_gain, lnb_temp, aditional_losses, cable_loss, max_depoint = pickle.load(\r\n f)\r\n f.close()\r\n self.name_reception_sp_perf.setText(str(name_rcp))\r\n self.ant_size_reception_sp_perf.setText(str(ant_size))\r\n self.ant_eff_reception_sp_perf.setText(str(ant_eff))\r\n self.lnb_gain_reception_sp_perf.setText(str(lnb_gain))\r\n self.lnb_temp_reception_sp_perf.setText(str(lnb_temp))\r\n self.aditional_losses_reception_sp_perf.setText(str(aditional_losses))\r\n self.cable_loss_reception_sp_perf.setText(str(cable_loss))\r\n self.max_depoint_reception_sp_perf.setText(str(max_depoint))\r\n\r\n if os.path.exists('temp\\\\load.pkl'):\r\n os.remove('temp\\\\load.pkl')\r\n\r\n elif opt == 'clear':\r\n self.name_reception_sp_perf.clear()\r\n self.ant_size_reception_sp_perf.clear()\r\n self.ant_eff_reception_sp_perf.clear()\r\n self.lnb_gain_reception_sp_perf.clear()\r\n self.lnb_temp_reception_sp_perf.clear()\r\n self.aditional_losses_reception_sp_perf.clear()\r\n self.cable_loss_reception_sp_perf.clear()\r\n self.max_depoint_reception_sp_perf.clear()\r\n\r\n def load_save_multi_point_perf(self, opt, item):\r\n import pickle\r\n import os\r\n from file_dialog import open_dialog\r\n\r\n if item == 'sat':\r\n type = 'Sat Files (*.sat)'\r\n if opt != 'clear':\r\n open_dialog(opt, type)\r\n try:\r\n with open('temp\\\\' + opt + '.pkl', 'rb') as f:\r\n folder = pickle.load(f)\r\n # print(folder)\r\n f.close()\r\n except:\r\n folder = ''\r\n if opt == 'save':\r\n if folder != '':\r\n folder = folder + '.sat'\r\n try:\r\n name_sat = str(self.name_sat_mp_perf.text())\r\n long_sat = float(self.long_sat_mp_perf.text())\r\n height_sat = float(self.height_sat_mp_perf.text())\r\n freq = float(self.freq_sat_mp_perf.text())\r\n eirp_sat = float(self.eirp_sat_mp_perf.text())\r\n max_bw_sat = float(self.max_bw_sat_mp_perf.text())\r\n bw_util = float(self.bw_util_sat_mp_perf.text())\r\n rolloff_sat = float(self.rolloff_sat_mp_perf.text())\r\n modulation = str(self.modcod_sat_mp_perf.currentText())\r\n pol = str(self.pol_sat_mp_perf.currentText())\r\n\r\n except:\r\n QtWidgets.QMessageBox.warning(MainWindow, 'WARNING', 'Empty or wrong type in the fields!!!')\r\n return\r\n\r\n with open(folder, 'wb') as f:\r\n pickle.dump([name_sat, long_sat, height_sat, freq, eirp_sat,\r\n max_bw_sat, bw_util, rolloff_sat, modulation, pol], f)\r\n f.close()\r\n\r\n if os.path.exists('temp\\\\save.pkl'):\r\n os.remove('temp\\\\save.pkl')\r\n\r\n elif opt == 'load':\r\n if folder != '':\r\n with open(folder, 'rb') as f:\r\n name_sat, long_sat, height_sat, freq, eirp_sat, max_bw_sat, bw_util, rolloff_sat, modulation, pol = pickle.load(\r\n f)\r\n f.close()\r\n self.name_sat_mp_perf.setText(str(name_sat))\r\n self.long_sat_mp_perf.setText(str(long_sat))\r\n self.height_sat_mp_perf.setText(str(height_sat))\r\n self.freq_sat_mp_perf.setText(str(freq))\r\n self.eirp_sat_mp_perf.setText(str(eirp_sat))\r\n self.max_bw_sat_mp_perf.setText(str(max_bw_sat))\r\n self.bw_util_sat_mp_perf.setText(str(bw_util))\r\n self.rolloff_sat_mp_perf.setText(str(rolloff_sat))\r\n index = self.pol_sat_mp_perf.findText(str(pol), QtCore.Qt.MatchFixedString)\r\n if index >= 0:\r\n self.pol_sat_mp_perf.setCurrentIndex(index)\r\n index = self.modcod_sat_mp_perf.findText(str(modulation), QtCore.Qt.MatchFixedString)\r\n if index >= 0:\r\n self.modcod_sat_mp_perf.setCurrentIndex(index)\r\n\r\n if os.path.exists('temp\\\\load.pkl'):\r\n os.remove('temp\\\\load.pkl')\r\n\r\n elif opt == 'clear':\r\n self.name_sat_mp_perf.clear()\r\n self.long_sat_mp_perf.clear()\r\n self.height_sat_mp_perf.clear()\r\n self.freq_sat_mp_perf.clear()\r\n self.eirp_sat_mp_perf.clear()\r\n self.max_bw_sat_mp_perf.clear()\r\n self.bw_util_sat_mp_perf.clear()\r\n self.rolloff_sat_mp_perf.clear()\r\n self.pol_sat_mp_perf.setCurrentIndex(0)\r\n self.modcod_sat_mp_perf.setCurrentIndex(0)\r\n\r\n elif item == 'rcp':\r\n type = 'Reception Setup Files (*.rcp)'\r\n if opt != 'clear':\r\n open_dialog(opt, type)\r\n try:\r\n with open('temp\\\\' + opt + '.pkl', 'rb') as f:\r\n folder = pickle.load(f)\r\n # print(folder)\r\n f.close()\r\n except:\r\n folder = ''\r\n if opt == 'save':\r\n if folder != '':\r\n folder = folder + '.rcp'\r\n try:\r\n name_rcp = str(self.name_reception_mp_perf.text())\r\n ant_size = float(self.ant_size_reception_mp_perf.text())\r\n ant_eff = float(self.ant_eff_reception_mp_perf.text())\r\n lnb_gain = float(self.lnb_gain_reception_mp_perf.text())\r\n lnb_temp = float(self.lnb_temp_reception_mp_perf.text())\r\n aditional_losses = float(self.aditional_losses_reception_mp_perf.text())\r\n cable_loss = float(self.cable_loss_reception_mp_perf.text())\r\n max_depoint = float(self.max_depoint_reception_mp_perf.text())\r\n except:\r\n QtWidgets.QMessageBox.warning(MainWindow, 'WARNING', 'Empty or wrong type in the fields!!!')\r\n return\r\n\r\n with open(folder, 'wb') as f:\r\n pickle.dump(\r\n [name_rcp, ant_size, ant_eff, lnb_gain, lnb_temp, aditional_losses, cable_loss,\r\n max_depoint], f)\r\n f.close()\r\n\r\n if os.path.exists('temp\\\\save.pkl'):\r\n os.remove('temp\\\\save.pkl')\r\n\r\n elif opt == 'load':\r\n if folder != '':\r\n with open(folder, 'rb') as f:\r\n name_rcp, ant_size, ant_eff, lnb_gain, lnb_temp, aditional_losses, cable_loss, max_depoint = pickle.load(\r\n f)\r\n f.close()\r\n self.name_reception_mp_perf.setText(str(name_rcp))\r\n self.ant_size_reception_mp_perf.setText(str(ant_size))\r\n self.ant_eff_reception_mp_perf.setText(str(ant_eff))\r\n self.lnb_gain_reception_mp_perf.setText(str(lnb_gain))\r\n self.lnb_temp_reception_mp_perf.setText(str(lnb_temp))\r\n self.aditional_losses_reception_mp_perf.setText(str(aditional_losses))\r\n self.cable_loss_reception_mp_perf.setText(str(cable_loss))\r\n self.max_depoint_reception_mp_perf.setText(str(max_depoint))\r\n\r\n if os.path.exists('temp\\\\load.pkl'):\r\n os.remove('temp\\\\load.pkl')\r\n\r\n elif opt == 'clear':\r\n self.name_reception_mp_perf.clear()\r\n self.ant_size_reception_mp_perf.clear()\r\n self.ant_eff_reception_mp_perf.clear()\r\n self.lnb_gain_reception_mp_perf.clear()\r\n self.lnb_temp_reception_mp_perf.clear()\r\n self.aditional_losses_reception_sp_perf.clear()\r\n self.cable_loss_reception_mp_perf.clear()\r\n self.max_depoint_reception_mp_perf.clear()\r\n\r\n def load_save_single_point_ant_size(self, opt, item):\r\n import pickle\r\n import os\r\n from file_dialog import open_dialog\r\n\r\n if item == 'gst':\r\n type = 'Ground Station Files (*.gst)'\r\n if opt != 'clear':\r\n open_dialog(opt, type)\r\n try:\r\n with open('temp\\\\' + opt + '.pkl', 'rb') as f:\r\n folder = pickle.load(f)\r\n # print(folder)\r\n f.close()\r\n except:\r\n folder = ''\r\n\r\n if opt == 'save':\r\n if folder != '':\r\n folder = folder + '.gst'\r\n try:\r\n name_grst = str(self.name_ground_station_sp_ant_size.text())\r\n lat_gst = float(self.lat_ground_station_sp_ant_size.text())\r\n long_gst = float(self.long_ground_station_sp_ant_size.text())\r\n except:\r\n QtWidgets.QMessageBox.warning(MainWindow, 'WARNING', 'Empty or wrong type in the fields!!!')\r\n return\r\n\r\n with open(folder, 'wb') as f:\r\n pickle.dump([name_grst, lat_gst, long_gst], f)\r\n f.close()\r\n\r\n if os.path.exists('temp\\\\save.pkl'):\r\n os.remove('temp\\\\save.pkl')\r\n\r\n elif opt == 'load':\r\n if folder != '':\r\n with open(folder, 'rb') as f:\r\n name_grst, lat_gst, long_gst = pickle.load(f)\r\n f.close()\r\n self.name_ground_station_sp_ant_size.setText(str(name_grst))\r\n self.lat_ground_station_sp_ant_size.setText(str(lat_gst))\r\n self.long_ground_station_sp_ant_size.setText(str(long_gst))\r\n\r\n if os.path.exists('temp\\\\load.pkl'):\r\n os.remove('temp\\\\load.pkl')\r\n\r\n elif opt == 'clear':\r\n self.name_ground_station_sp_ant_size.clear()\r\n self.lat_ground_station_sp_ant_size.clear()\r\n self.long_ground_station_sp_ant_size.clear()\r\n\r\n elif item == 'sat':\r\n type = 'Sat Files (*.sat)'\r\n if opt != 'clear':\r\n open_dialog(opt, type)\r\n try:\r\n with open('temp\\\\' + opt + '.pkl', 'rb') as f:\r\n folder = pickle.load(f)\r\n # print(folder)\r\n f.close()\r\n except:\r\n folder = ''\r\n if opt == 'save':\r\n if folder != '':\r\n folder = folder + '.sat'\r\n try:\r\n name_sat = str(self.name_sat_sp_ant_size.text())\r\n long_sat = float(self.long_sat_sp_ant_size.text())\r\n height_sat = float(self.height_sat_sp_ant_size.text())\r\n freq = float(self.freq_sat_sp_ant_size.text())\r\n eirp_sat = float(self.eirp_sat_sp_ant_size.text())\r\n max_bw_sat = float(self.max_bw_sat_sp_ant_size.text())\r\n bw_util = float(self.bw_util_sat_sp_ant_size.text())\r\n rolloff_sat = float(self.rolloff_sat_sp_ant_size.text())\r\n modulation = str(self.modcod_sat_sp_ant_size.currentText())\r\n pol = str(self.pol_sat_sp_ant_size.currentText())\r\n\r\n except:\r\n QtWidgets.QMessageBox.warning(MainWindow, 'WARNING', 'Empty or wrong type in the fields!!!')\r\n return\r\n\r\n with open(folder, 'wb') as f:\r\n pickle.dump([name_sat, long_sat, height_sat, freq, eirp_sat,\r\n max_bw_sat, bw_util, rolloff_sat, modulation, pol], f)\r\n f.close()\r\n\r\n if os.path.exists('temp\\\\save.pkl'):\r\n os.remove('temp\\\\save.pkl')\r\n\r\n elif opt == 'load':\r\n if folder != '':\r\n with open(folder, 'rb') as f:\r\n name_sat, long_sat, height_sat, freq, eirp_sat, max_bw_sat, bw_util, rolloff_sat, modulation, pol = pickle.load(\r\n f)\r\n f.close()\r\n self.name_sat_sp_ant_size.setText(str(name_sat))\r\n self.long_sat_sp_ant_size.setText(str(long_sat))\r\n self.height_sat_sp_ant_size.setText(str(height_sat))\r\n self.freq_sat_sp_ant_size.setText(str(freq))\r\n self.eirp_sat_sp_ant_size.setText(str(eirp_sat))\r\n self.max_bw_sat_sp_ant_size.setText(str(max_bw_sat))\r\n self.bw_util_sat_sp_ant_size.setText(str(bw_util))\r\n self.rolloff_sat_sp_ant_size.setText(str(rolloff_sat))\r\n index = self.pol_sat_sp_ant_size.findText(str(pol), QtCore.Qt.MatchFixedString)\r\n if index >= 0:\r\n self.pol_sat_sp_ant_size.setCurrentIndex(index)\r\n index = self.modcod_sat_sp_ant_size.findText(str(modulation), QtCore.Qt.MatchFixedString)\r\n if index >= 0:\r\n self.modcod_sat_sp_ant_size.setCurrentIndex(index)\r\n\r\n if os.path.exists('temp\\\\load.pkl'):\r\n os.remove('temp\\\\load.pkl')\r\n\r\n elif opt == 'clear':\r\n self.name_sat_sp_ant_size.clear()\r\n self.long_sat_sp_ant_size.clear()\r\n self.height_sat_sp_ant_size.clear()\r\n self.freq_sat_sp_ant_size.clear()\r\n self.eirp_sat_sp_ant_size.clear()\r\n self.max_bw_sat_sp_ant_size.clear()\r\n self.bw_util_sat_sp_ant_size.clear()\r\n self.rolloff_sat_sp_ant_size.clear()\r\n self.pol_sat_sp_ant_size.setCurrentIndex(0)\r\n self.modcod_sat_sp_ant_size.setCurrentIndex(0)\r\n\r\n elif item == 'rcp':\r\n type = 'Reception Setup Files (*.rcp)'\r\n if opt != 'clear':\r\n open_dialog(opt, type)\r\n try:\r\n with open('temp\\\\' + opt + '.pkl', 'rb') as f:\r\n folder = pickle.load(f)\r\n # print(folder)\r\n f.close()\r\n except:\r\n folder = ''\r\n\r\n if opt == 'load':\r\n if folder != '':\r\n with open(folder, 'rb') as f:\r\n name_rcp, ant_size, ant_eff, lnb_gain, lnb_temp, aditional_losses, cable_loss, max_depoint = pickle.load(\r\n f)\r\n f.close()\r\n self.name_reception_sp_ant_size.setText(str(name_rcp))\r\n self.ant_eff_reception_sp_ant_size.setText(str(ant_eff))\r\n self.lnb_gain_reception_sp_ant_size.setText(str(lnb_gain))\r\n self.lnb_temp_reception_sp_ant_size.setText(str(lnb_temp))\r\n self.aditional_losses_reception_sp_ant_size.setText(str(aditional_losses))\r\n self.cable_loss_reception_sp_ant_size.setText(str(cable_loss))\r\n self.max_depoint_reception_sp_ant_size.setText(str(max_depoint))\r\n\r\n if os.path.exists('temp\\\\load.pkl'):\r\n os.remove('temp\\\\load.pkl')\r\n\r\n elif opt == 'clear':\r\n self.name_reception_sp_ant_size.clear()\r\n self.ant_eff_reception_sp_ant_size.clear()\r\n self.lnb_gain_reception_sp_ant_size.clear()\r\n self.lnb_temp_reception_sp_ant_size.clear()\r\n self.aditional_losses_reception_sp_ant_size.clear()\r\n self.cable_loss_reception_sp_ant_size.clear()\r\n self.max_depoint_reception_sp_ant_size.clear()\r\n\r\n def load_save_multi_point_ant_size(self, opt, item):\r\n import pickle\r\n import os\r\n from file_dialog import open_dialog\r\n\r\n if item == 'sat':\r\n type = 'Sat Files (*.sat)'\r\n if opt != 'clear':\r\n open_dialog(opt, type)\r\n try:\r\n with open('temp\\\\' + opt + '.pkl', 'rb') as f:\r\n folder = pickle.load(f)\r\n # print(folder)\r\n f.close()\r\n except:\r\n folder = ''\r\n if opt == 'save':\r\n if folder != '':\r\n folder = folder + '.sat'\r\n try:\r\n name_sat = str(self.name_sat_mp_ant_size.text())\r\n long_sat = float(self.long_sat_mp_ant_size.text())\r\n height_sat = float(self.height_sat_mp_ant_size.text())\r\n freq = float(self.freq_sat_mp_ant_size.text())\r\n eirp_sat = float(self.eirp_sat_mp_ant_size.text())\r\n max_bw_sat = float(self.max_bw_sat_mp_ant_size.text())\r\n bw_util = float(self.bw_util_sat_mp_ant_size.text())\r\n rolloff_sat = float(self.rolloff_sat_mp_ant_size.text())\r\n modulation = str(self.modcod_sat_mp_ant_size.currentText())\r\n pol = str(self.pol_sat_mp_ant_size.currentText())\r\n\r\n except:\r\n QtWidgets.QMessageBox.warning(MainWindow, 'WARNING', 'Empty or wrong type in the fields!!!')\r\n return\r\n\r\n with open(folder, 'wb') as f:\r\n pickle.dump([name_sat, long_sat, height_sat, freq, eirp_sat,\r\n max_bw_sat, bw_util, rolloff_sat, modulation, pol], f)\r\n f.close()\r\n\r\n if os.path.exists('temp\\\\save.pkl'):\r\n os.remove('temp\\\\save.pkl')\r\n\r\n elif opt == 'load':\r\n if folder != '':\r\n with open(folder, 'rb') as f:\r\n name_sat, long_sat, height_sat, freq, eirp_sat, max_bw_sat, bw_util, rolloff_sat, modulation, pol = pickle.load(\r\n f)\r\n f.close()\r\n self.name_sat_mp_ant_size.setText(str(name_sat))\r\n self.long_sat_mp_ant_size.setText(str(long_sat))\r\n self.height_sat_mp_ant_size.setText(str(height_sat))\r\n self.freq_sat_mp_ant_size.setText(str(freq))\r\n self.eirp_sat_mp_ant_size.setText(str(eirp_sat))\r\n self.max_bw_sat_mp_ant_size.setText(str(max_bw_sat))\r\n self.bw_util_sat_mp_ant_size.setText(str(bw_util))\r\n self.rolloff_sat_mp_ant_size.setText(str(rolloff_sat))\r\n index = self.pol_sat_mp_ant_size.findText(str(pol), QtCore.Qt.MatchFixedString)\r\n if index >= 0:\r\n self.pol_sat_mp_ant_size.setCurrentIndex(index)\r\n index = self.modcod_sat_mp_ant_size.findText(str(modulation), QtCore.Qt.MatchFixedString)\r\n if index >= 0:\r\n self.modcod_sat_mp_ant_size.setCurrentIndex(index)\r\n\r\n if os.path.exists('temp\\\\load.pkl'):\r\n os.remove('temp\\\\load.pkl')\r\n\r\n elif opt == 'clear':\r\n self.name_sat_mp_ant_size.clear()\r\n self.long_sat_mp_ant_size.clear()\r\n self.height_sat_mp_ant_size.clear()\r\n self.freq_sat_mp_ant_size.clear()\r\n self.eirp_sat_mp_ant_size.clear()\r\n self.max_bw_sat_mp_ant_size.clear()\r\n self.bw_util_sat_mp_ant_size.clear()\r\n self.rolloff_sat_mp_ant_size.clear()\r\n self.pol_sat_mp_ant_size.setCurrentIndex(0)\r\n self.modcod_sat_mp_ant_size.setCurrentIndex(0)\r\n\r\n elif item == 'rcp':\r\n type = 'Reception Setup Files (*.rcp)'\r\n if opt != 'clear':\r\n open_dialog(opt, type)\r\n try:\r\n with open('temp\\\\' + opt + '.pkl', 'rb') as f:\r\n folder = pickle.load(f)\r\n # print(folder)\r\n f.close()\r\n except:\r\n folder = ''\r\n\r\n if opt == 'load':\r\n if folder != '':\r\n with open(folder, 'rb') as f:\r\n name_rcp, ant_size, ant_eff, lnb_gain, lnb_temp, aditional_losses, cable_loss, max_depoint = pickle.load(\r\n f)\r\n f.close()\r\n self.name_reception_mp_ant_size.setText(str(name_rcp))\r\n self.ant_eff_reception_mp_ant_size.setText(str(ant_eff))\r\n self.lnb_gain_reception_mp_ant_size.setText(str(lnb_gain))\r\n self.lnb_temp_reception_mp_ant_size.setText(str(lnb_temp))\r\n self.aditional_losses_reception_mp_ant_size.setText(str(aditional_losses))\r\n self.cable_loss_reception_mp_ant_size.setText(str(cable_loss))\r\n self.max_depoint_reception_mp_ant_size.setText(str(max_depoint))\r\n\r\n if os.path.exists('temp\\\\load.pkl'):\r\n os.remove('temp\\\\load.pkl')\r\n\r\n elif opt == 'clear':\r\n self.name_reception_mp_ant_size.clear()\r\n self.ant_eff_reception_mp_ant_size.clear()\r\n self.lnb_gain_reception_mp_ant_size.clear()\r\n self.lnb_temp_reception_mp_ant_size.clear()\r\n self.aditional_losses_reception_mp_ant_size.clear()\r\n self.cable_loss_reception_mp_ant_size.clear()\r\n self.max_depoint_reception_mp_ant_size.clear()\r\n\r\n def load_point_list(self, preview_window, path_box):\r\n from file_dialog import open_dialog\r\n import pandas as pd\r\n import pickle\r\n\r\n opt = 'load'\r\n type = 'Comma-separated values (*.csv)'\r\n open_dialog(opt, type)\r\n\r\n try:\r\n with open('temp\\\\' + opt + '.pkl', 'rb') as f:\r\n folder = pickle.load(f)\r\n # print(folder)\r\n f.close()\r\n except:\r\n folder = ''\r\n\r\n preview_window.setColumnCount(4)\r\n preview_window.setRowCount(10)\r\n\r\n preview_window.setHorizontalHeaderLabels(['Name', 'Lat', 'Long', 'Delta Footprint'])\r\n\r\n # self.preview_mp_perf.setColumnCount(4)\r\n # self.preview_mp_perf.setRowCount(10)\r\n #\r\n # self.preview_mp_perf.setHorizontalHeaderLabels(['Name', 'Lat', 'Long', 'Delta Footprint'])\r\n\r\n if folder != '':\r\n path_box.setText(str(folder))\r\n data = pd.read_csv(folder, sep=';')\r\n # print(folder)\r\n for index, row in data.iterrows():\r\n\r\n preview_window.setItem(index, 0, QtWidgets.QTableWidgetItem(str(row['Name'])))\r\n preview_window.setItem(index, 1, QtWidgets.QTableWidgetItem(str(row['Lat'])))\r\n preview_window.setItem(index, 2, QtWidgets.QTableWidgetItem(str(row['Long'])))\r\n if 'Delta Footprint' in data.columns: # check in case Delta Footprint is not used\r\n preview_window.setItem(index, 3, QtWidgets.QTableWidgetItem(str(row['Delta Footprint'])))\r\n else:\r\n preview_window.setItem(index, 3, QtWidgets.QTableWidgetItem('0'))\r\n if index == 9:\r\n break\r\n\r\n preview_window.setAlternatingRowColors(True)\r\n preview_window.horizontalHeader().setSectionResizeMode(QtWidgets.QHeaderView.Stretch)\r\n\r\n preview_window.setSizeAdjustPolicy(QtWidgets.QAbstractScrollArea.AdjustToContents)\r\n preview_window.setSizePolicy(QtWidgets.QSizePolicy.Expanding, QtWidgets.QSizePolicy.Minimum)\r\n\r\n if os.path.exists('temp\\\\load.pkl'):\r\n os.remove('temp\\\\load.pkl')\r\n\r\n return\r\n\r\n # def on_combobox_changed(self, x):\r\n # if x == 1:\r\n # modcod = str(self.modcod_sat.currentText())\r\n # elif x == 2:\r\n # modcod = str(self.modcod_sat_sp_perf.currentText())\r\n # elif x == 3:\r\n # modcod = str(self.modcod_sat_mp_perf.currentText())\r\n\r\n def update_label(self, index):\r\n import os\r\n\r\n if os.path.exists('temp\\\\out.txt'):\r\n with open(\"temp\\\\out.txt\", \"r\") as myfile:\r\n # data = myfile.read()\r\n\r\n if index == 1:\r\n data = myfile.read()\r\n if len(data) == 0:\r\n data = ' '\r\n self.output_spatm.setText(data)\r\n\r\n if index == 2:\r\n data = myfile.read()\r\n if len(data) == 0:\r\n data = ' '\r\n self.output_sp_perf.setText(data)\r\n\r\n if index == 3 or index == 4:\r\n data = myfile.readlines()\r\n if len(data) != 0:\r\n del data[:-1]\r\n else:\r\n data = ' '\r\n self.output_mp_perf.setText(data[0])\r\n self.output_mp_ant_size.setText(data[0])\r\n # self.output_sp_perf_2.moveCursor(QtGui.QTextCursor.End)\r\n else:\r\n return\r\n\r\n def change_page(self, x, page_name):\r\n self.stackedWidget.setCurrentIndex(x)\r\n MainWindow.setWindowTitle(page_name)\r\n\r\n def single_point_atm_atnn_calc(self):\r\n from atm_atnn import calc_atm_atnn\r\n\r\n try:\r\n p = float(self.p_year_spatm.text())\r\n site_lat = float(self.lat_ground_station_spatm.text())\r\n site_long = float(self.long_ground_station_spatm.text())\r\n ant_size = float(self.ant_size_reception_spatm.text())\r\n ant_eff = float(self.ant_eff_reception_spatm.text())\r\n sat_long = float(self.long_sat_spatm.text())\r\n freq = float(self.freq_sat_spatm.text())\r\n method = str(self.method_spatm.currentText())\r\n\r\n import pickle\r\n\r\n with open('temp\\\\args.pkl', 'wb') as f:\r\n pickle.dump([p, site_lat, site_long, ant_size, ant_eff, sat_long, freq, method], f)\r\n f.close()\r\n self.threadpool = QtCore.QThreadPool()\r\n self.threadpool.start(calc_atm_atnn)\r\n self.threadpool.waitForDone()\r\n self.update_label(1)\r\n return\r\n\r\n except:\r\n self.output_spatm.setText('Please fill all the fields before running!!!')\r\n\r\n def single_point_calc_perf(self):\r\n from link_performance import sp_link_performance\r\n import pickle\r\n try:\r\n # ground station parameters\r\n site_lat = float(self.lat_ground_station_sp_perf.text())\r\n site_long = float(self.long_ground_station_sp_perf.text())\r\n\r\n # #satellite parameters\r\n sat_long = float(self.long_sat_sp_perf.text())\r\n freq = float(self.freq_sat_sp_perf.text())\r\n sat_height = float(self.height_sat_sp_perf.text())\r\n max_eirp = float(self.eirp_sat_sp_perf.text())\r\n max_bw = float(self.max_bw_sat_sp_perf.text())\r\n bw_util = float(self.bw_util_sat_sp_perf.text())\r\n roll_off = float(self.rolloff_sat_sp_perf.text())\r\n modcod = str(self.modcod_sat_sp_perf.currentText())\r\n pol = str(self.pol_sat_sp_perf.currentText())\r\n\r\n # #reception parameters\r\n\r\n ant_size = float(self.ant_size_reception_sp_perf.text())\r\n ant_eff = float(self.ant_eff_reception_sp_perf.text())\r\n lnb_gain = float(self.lnb_gain_reception_sp_perf.text())\r\n lnb_temp = float(self.lnb_temp_reception_sp_perf.text())\r\n aditional_losses = float(self.aditional_losses_reception_sp_perf.text())\r\n cable_loss = float(self.cable_loss_reception_sp_perf.text())\r\n max_depoint = float(self.max_depoint_reception_sp_perf.text())\r\n\r\n # #calculation parameters\r\n snr_relaxation = float(self.relaxation_sp_perf.text())\r\n margin = float(self.margin_sp_perf.text())\r\n\r\n with open('temp\\\\args.pkl', 'wb') as f:\r\n pickle.dump(\r\n [site_lat, site_long, sat_long, freq, max_eirp, sat_height, max_bw, bw_util,\r\n modcod, pol, roll_off, ant_size, ant_eff, lnb_gain, lnb_temp, aditional_losses,\r\n cable_loss, max_depoint, snr_relaxation, margin], f)\r\n f.close()\r\n except:\r\n self.output_sp_perf.setText('Please fill all the fields before running!!!')\r\n return\r\n\r\n self.output_sp_perf.setText('Starting ....')\r\n\r\n self.threadpool = QtCore.QThreadPool()\r\n self.threadpool.start(sp_link_performance)\r\n self.threadpool.waitForDone()\r\n self.update_label(2)\r\n return\r\n\r\n def multi_point_calc_perf(self):\r\n from link_performance import mp_link_performance\r\n import pickle\r\n try:\r\n\r\n # ground station points path\r\n gr_station_path = str(self.path_mp_perf.text())\r\n\r\n # #satellite parameters\r\n sat_long = float(self.long_sat_mp_perf.text())\r\n freq = float(self.freq_sat_mp_perf.text())\r\n sat_height = float(self.height_sat_mp_perf.text())\r\n max_eirp = float(self.eirp_sat_mp_perf.text())\r\n max_bw = float(self.max_bw_sat_mp_perf.text())\r\n bw_util = float(self.bw_util_sat_mp_perf.text())\r\n roll_off = float(self.rolloff_sat_mp_perf.text())\r\n modcod = str(self.modcod_sat_mp_perf.currentText())\r\n pol = str(self.pol_sat_mp_perf.currentText())\r\n\r\n # #reception parameters\r\n\r\n ant_size = float(self.ant_size_reception_mp_perf.text())\r\n ant_eff = float(self.ant_eff_reception_mp_perf.text())\r\n lnb_gain = float(self.lnb_gain_reception_mp_perf.text())\r\n lnb_temp = float(self.lnb_temp_reception_mp_perf.text())\r\n aditional_losses = float(self.aditional_losses_reception_mp_perf.text())\r\n cable_loss = float(self.cable_loss_reception_mp_perf.text())\r\n max_depoint = float(self.max_depoint_reception_mp_perf.text())\r\n\r\n # #calculation parameters\r\n # snr_relaxation = float(self.relaxation_mp_perf.text())\r\n snr_relaxation = float(self.relaxation_mp_perf.text())\r\n margin = float(self.margin_mp_perf.text())\r\n threads = int(self.n_threads_mp_perf.currentText())\r\n\r\n with open('temp\\\\args.pkl', 'wb') as f:\r\n pickle.dump(\r\n [gr_station_path, sat_long, freq, max_eirp, sat_height, max_bw, bw_util,\r\n modcod, pol, roll_off, ant_size, ant_eff, lnb_gain, lnb_temp, aditional_losses,\r\n cable_loss, max_depoint, snr_relaxation, margin, threads], f)\r\n f.close()\r\n except:\r\n self.output_sp_perf.setText('Please fill all the fields before running!!!')\r\n return\r\n\r\n self.threadpool = QtCore.QThreadPool()\r\n self.threadpool.start(mp_link_performance)\r\n\r\n # self.threadtimer = QtCore.QTimer()\r\n # self.threadtimer.timeout.connect(lambda x=self.threadpool.activeThreadCount(): self.kill_threadpoll(x))\r\n # self.threadtimer.start(3000) # milliseconds\r\n\r\n # while self.threadpool.activeThreadCount() != 0:\r\n # print(self.threadpool.activeThreadCount())\r\n # self.threadpool.waitForDone()\r\n # self._update_timer.stop()\r\n return\r\n\r\n def single_point_ant_size(self):\r\n from antenna_size import sp_ant_size\r\n import matplotlib.pyplot as plt\r\n import numpy as np\r\n import pickle\r\n\r\n try:\r\n # ground station parameters\r\n site_lat = float(self.lat_ground_station_sp_ant_size.text())\r\n site_long = float(self.long_ground_station_sp_ant_size.text())\r\n\r\n # #satellite parameters\r\n sat_long = float(self.long_sat_sp_ant_size.text())\r\n freq = float(self.freq_sat_sp_ant_size.text())\r\n sat_height = float(self.height_sat_sp_ant_size.text())\r\n max_eirp = float(self.eirp_sat_sp_ant_size.text())\r\n max_bw = float(self.max_bw_sat_sp_ant_size.text())\r\n bw_util = float(self.bw_util_sat_sp_ant_size.text())\r\n roll_off = float(self.rolloff_sat_sp_ant_size.text())\r\n modcod = str(self.modcod_sat_sp_ant_size.currentText())\r\n pol = str(self.pol_sat_sp_ant_size.currentText())\r\n\r\n # #reception parameters\r\n ant_eff = float(self.ant_eff_reception_sp_ant_size.text())\r\n lnb_gain = float(self.lnb_gain_reception_sp_ant_size.text())\r\n lnb_temp = float(self.lnb_temp_reception_sp_ant_size.text())\r\n aditional_losses = float(self.aditional_losses_reception_sp_ant_size.text())\r\n cable_loss = float(self.cable_loss_reception_sp_ant_size.text())\r\n max_depoint = float(self.max_depoint_reception_sp_ant_size.text())\r\n\r\n # #calculation parameters\r\n max_ant_size = float(self.ant_max_size_sp_ant_size.text())\r\n min_ant_size = float(self.ant_min_size_sp_ant_size.text())\r\n margin = float(self.margin_sp_ant_size.text())\r\n\r\n with open('temp\\\\args.pkl', 'wb') as f:\r\n pickle.dump(\r\n [site_lat, site_long, sat_long, freq, max_eirp, sat_height, max_bw, bw_util,\r\n modcod, pol, roll_off, ant_eff, lnb_gain, lnb_temp, aditional_losses,\r\n cable_loss, max_depoint, max_ant_size, min_ant_size, margin, self.cores], f)\r\n f.close()\r\n except:\r\n self.output_sp_perf.setText('Please fill all the fields before running!!!')\r\n return\r\n\r\n self.output_sp_ant_size.setText('Running...')\r\n self.threadpool = QtCore.QThreadPool()\r\n self.threadpool.start(sp_ant_size)\r\n self.threadpool.waitForDone()\r\n\r\n with open('temp\\\\args.pkl', 'rb') as f:\r\n (ant_size_vector, ant_size_vector_interp, availability_vector, availability_vector_interp) = pickle.load(f)\r\n f.close()\r\n\r\n output = 'RESULTS \\n' \\\r\n 'Antenna size (m)\\tAvailability(%)\\n'\r\n\r\n for ant_size, availability in zip(ant_size_vector, availability_vector):\r\n output += (str(round(ant_size,1)) + '\\t\\t' + str(availability) + '\\n')\r\n\r\n self.output_sp_ant_size.setText(output)\r\n\r\n\r\n # ploting the availability/ unavailability time graphs\r\n fig, (ax1, ax2) = plt.subplots(2)\r\n fig.suptitle('Antenna Size x Year availability')\r\n # fig.canvas.set_window_title(self.name_ground_station_sp_ant_size.text())\r\n fig.canvas.manager.set_window_title(self.name_ground_station_sp_ant_size.text())\r\n unavailability_time_vector = (1 - np.array(availability_vector) / 100) * 8760\r\n ax1.plot(ant_size_vector, availability_vector)\r\n ax2.plot(ant_size_vector, unavailability_time_vector)\r\n plt.setp(ax1, ylabel='Availability (%year)')\r\n plt.setp(ax2, xlabel='Antenna size')\r\n plt.setp(ax2, ylabel='Unavailability (hours/year)')\r\n fig.savefig('temp\\\\fig.png')\r\n\r\n plt.show()\r\n\r\n return\r\n\r\n def multi_point_ant_size(self):\r\n from antenna_size import mp_ant_size\r\n import pickle\r\n\r\n try:\r\n\r\n # ground station points path\r\n gr_station_path = str(self.path_mp_ant_size.text())\r\n\r\n # #satellite parameters\r\n sat_long = float(self.long_sat_mp_ant_size.text())\r\n freq = float(self.freq_sat_mp_ant_size.text())\r\n sat_height = float(self.height_sat_mp_ant_size.text())\r\n max_eirp = float(self.eirp_sat_mp_ant_size.text())\r\n max_bw = float(self.max_bw_sat_mp_ant_size.text())\r\n bw_util = float(self.bw_util_sat_mp_ant_size.text())\r\n roll_off = float(self.rolloff_sat_mp_ant_size.text())\r\n modcod = str(self.modcod_sat_mp_ant_size.currentText())\r\n pol = str(self.pol_sat_mp_ant_size.currentText())\r\n\r\n # #reception parameters\r\n\r\n ant_eff = float(self.ant_eff_reception_mp_ant_size.text())\r\n lnb_gain = float(self.lnb_gain_reception_mp_ant_size.text())\r\n lnb_temp = float(self.lnb_temp_reception_mp_ant_size.text())\r\n aditional_losses = float(self.aditional_losses_reception_mp_ant_size.text())\r\n cable_loss = float(self.cable_loss_reception_mp_ant_size.text())\r\n max_depoint = float(self.max_depoint_reception_mp_ant_size.text())\r\n\r\n # #calculation parameters\r\n availability_target = float(self.availability_target_mp_ant_size.text())\r\n snr_relaxation = float(self.relaxation_mp_ant_size.text())\r\n margin = float(self.margin_mp_ant_size.text())\r\n threads = int(self.n_threads_mp_ant_size.currentText())\r\n\r\n with open('temp\\\\args.pkl', 'wb') as f:\r\n pickle.dump(\r\n [gr_station_path, sat_long, freq, max_eirp, sat_height, max_bw, bw_util,\r\n modcod, pol, roll_off, ant_eff, lnb_gain, lnb_temp, aditional_losses,\r\n cable_loss, max_depoint, availability_target, snr_relaxation, margin, threads], f)\r\n f.close()\r\n except:\r\n self.output_mp_ant_size.setText('Please fill all the fields before running!!!')\r\n return\r\n\r\n self.threadpool = QtCore.QThreadPool()\r\n self.threadpool.start(mp_ant_size)\r\n return\r\n\r\n def export_graph(self):\r\n import pickle\r\n from file_dialog import open_dialog\r\n from PIL import Image\r\n # open a file dialog and make it saves an png figure\r\n type = 'Portable Graphics Format(PNG) (*.png)'\r\n opt = 'save'\r\n open_dialog(opt, type)\r\n try:\r\n with open('temp\\\\' + opt + '.pkl', 'rb') as f:\r\n folder = pickle.load(f)\r\n f.close()\r\n except:\r\n folder = ''\r\n\r\n if folder != '':\r\n img = Image.open('temp\\\\fig.png')\r\n print(folder)\r\n # display(img)\r\n img.save(folder + '.png')\r\n\r\n def retranslateUi(self, MainWindow):\r\n _translate = QtCore.QCoreApplication.translate\r\n MainWindow.setWindowTitle(_translate(\"MainWindow\", \"SatLink\"))\r\n self.groupBox_9.setTitle(_translate(\"MainWindow\", \"Satellite\"))\r\n self.label_31.setText(_translate(\"MainWindow\", \"Name\"))\r\n self.label_54.setText(_translate(\"MainWindow\", \"Longitude (degrees)\"))\r\n self.save_sat.setText(_translate(\"MainWindow\", \"Save\"))\r\n self.load_sat.setText(_translate(\"MainWindow\", \"Load\"))\r\n # self.default_sat.setItemText(1, _translate(\"MainWindow\", \"StarOne C2/C4\"))\r\n # self.default_sat.setItemText(2, _translate(\"MainWindow\", \"Hispamar\"))\r\n # self.default_sat.setItemText(3, _translate(\"MainWindow\", \"Claro\"))\r\n # self.default_sat.setItemText(4, _translate(\"MainWindow\", \"Menino Ney\"))\r\n self.label_55.setText(_translate(\"MainWindow\", \"Default Satellites\"))\r\n self.label_56.setText(_translate(\"MainWindow\", \"Altitude (km)\"))\r\n self.label_59.setText(_translate(\"MainWindow\", \"E.I.R.P. (dBW)\"))\r\n self.label_60.setText(_translate(\"MainWindow\", \"Transpoder\\'s max.\\n\"\r\n\"bandwidith (MHz)\"))\r\n self.label_61.setText(_translate(\"MainWindow\", \"Effective \\n\"\r\n\"bandwidith (MHz)\"))\r\n self.label_64.setText(_translate(\"MainWindow\", \"Roll-off\"))\r\n self.label_65.setText(_translate(\"MainWindow\", \"Modulation\"))\r\n self.label_66.setText(_translate(\"MainWindow\", \"Frequency (GHz)\"))\r\n self.label_7.setText(_translate(\"MainWindow\", \"Polarization\"))\r\n self.pol_sat.setItemText(0, _translate(\"MainWindow\", \"horizontal\"))\r\n self.pol_sat.setItemText(1, _translate(\"MainWindow\", \"vertical\"))\r\n self.pol_sat.setItemText(2, _translate(\"MainWindow\", \"circular\"))\r\n self.clear_sat.setText(_translate(\"MainWindow\", \"Clear\"))\r\n self.groupBox.setTitle(_translate(\"MainWindow\", \"Ground Station\"))\r\n self.label.setText(_translate(\"MainWindow\", \"Name\"))\r\n self.label_2.setText(_translate(\"MainWindow\", \"Latitude (degrees)\"))\r\n self.label_3.setText(_translate(\"MainWindow\", \"Longitude (degrees)\"))\r\n self.save_ground_station_gdstation.setText(_translate(\"MainWindow\", \"Save\"))\r\n self.load_ground_station_gdstation.setText(_translate(\"MainWindow\", \"Load\"))\r\n self.clear_ground_station_gdstation.setText(_translate(\"MainWindow\", \"Clear\"))\r\n self.groupBox_3.setTitle(_translate(\"MainWindow\", \"Reception Characteristics\"))\r\n self.label_19.setText(_translate(\"MainWindow\", \"Name:\"))\r\n self.label_20.setText(_translate(\"MainWindow\", \"Antenna size (m)\"))\r\n self.label_21.setText(_translate(\"MainWindow\", \"LNB gain (dB)\"))\r\n self.save_reception_rcp.setText(_translate(\"MainWindow\", \"Save\"))\r\n self.load_reception_rcp.setText(_translate(\"MainWindow\", \"Load\"))\r\n self.label_23.setText(_translate(\"MainWindow\", \"Antenna efficiency (%)\"))\r\n self.label_24.setText(_translate(\"MainWindow\", \"LNB noise temp. (K)\"))\r\n self.label_25.setText(_translate(\"MainWindow\", \"Additional losses (dB)\"))\r\n self.label_26.setText(_translate(\"MainWindow\", \"Maximum \\n\"\r\n\"depointing (degrees)\"))\r\n self.clear_reception_rcp.setText(_translate(\"MainWindow\", \"Clear\"))\r\n self.label_62.setText(_translate(\"MainWindow\", \"Cable loss (dB)\"))\r\n self.groupBox_12.setTitle(_translate(\"MainWindow\", \"Satellite\"))\r\n self.label_38.setText(_translate(\"MainWindow\", \"Name\"))\r\n self.label_69.setText(_translate(\"MainWindow\", \"Longitude (degrees)\"))\r\n self.save_sat_2.setText(_translate(\"MainWindow\", \"Save\"))\r\n self.load_sat_2.setText(_translate(\"MainWindow\", \"Load\"))\r\n # self.default_sat_2.setItemText(1, _translate(\"MainWindow\", \"StarOne C2/C4\"))\r\n # self.default_sat_2.setItemText(2, _translate(\"MainWindow\", \"Hispamar\"))\r\n # self.default_sat_2.setItemText(3, _translate(\"MainWindow\", \"Claro\"))\r\n # self.default_sat_2.setItemText(4, _translate(\"MainWindow\", \"Menino Ney\"))\r\n self.label_70.setText(_translate(\"MainWindow\", \"Default Satellites\"))\r\n self.label_78.setText(_translate(\"MainWindow\", \"Altitude (km)\"))\r\n self.label_79.setText(_translate(\"MainWindow\", \"E.I.R.P. (dBW)\"))\r\n self.label_91.setText(_translate(\"MainWindow\", \"Transpoder\\'s max.\\n\"\r\n\"bandwidith (MHz)\"))\r\n self.label_92.setText(_translate(\"MainWindow\", \"Effective \\n\"\r\n\"bandwidith (MHz)\"))\r\n self.label_93.setText(_translate(\"MainWindow\", \"Roll-off\"))\r\n self.label_94.setText(_translate(\"MainWindow\", \"Modulation\"))\r\n self.modcod_sat_2.setItemText(0, _translate(\"MainWindow\", \"QPSK 2/3\"))\r\n self.modcod_sat_2.setItemText(1, _translate(\"MainWindow\", \"8PSK 30/33\"))\r\n self.modcod_sat_2.setItemText(2, _translate(\"MainWindow\", \"64 QAM\"))\r\n self.label_95.setText(_translate(\"MainWindow\", \"Frequency (GHz)\"))\r\n self.label_17.setText(_translate(\"MainWindow\", \"Polarization\"))\r\n self.pol_sat_3.setItemText(0, _translate(\"MainWindow\", \"horizontal\"))\r\n self.pol_sat_3.setItemText(1, _translate(\"MainWindow\", \"vertical\"))\r\n self.pol_sat_3.setItemText(2, _translate(\"MainWindow\", \"circular\"))\r\n self.clear_sat_2.setText(_translate(\"MainWindow\", \"Clear\"))\r\n self.groupBox_2.setTitle(_translate(\"MainWindow\", \"Ground Station\"))\r\n self.label_18.setText(_translate(\"MainWindow\", \"Name\"))\r\n self.label_22.setText(_translate(\"MainWindow\", \"Latitude (degrees)\"))\r\n self.label_39.setText(_translate(\"MainWindow\", \"Longitude (degrees)\"))\r\n self.save_ground_station_gdstation_2.setText(_translate(\"MainWindow\", \"Save\"))\r\n self.load_ground_station_gdstation_2.setText(_translate(\"MainWindow\", \"Load\"))\r\n self.clear_ground_station_gdstation_2.setText(_translate(\"MainWindow\", \"Clear\"))\r\n self.groupBox_13.setTitle(_translate(\"MainWindow\", \"Reception Characteristics\"))\r\n self.label_96.setText(_translate(\"MainWindow\", \"Name:\"))\r\n self.label_97.setText(_translate(\"MainWindow\", \"Antenna size (m)\"))\r\n self.label_98.setText(_translate(\"MainWindow\", \"LNB gain (dB)\"))\r\n self.save_reception_rcp_2.setText(_translate(\"MainWindow\", \"Save\"))\r\n self.load_reception_rcp_5.setText(_translate(\"MainWindow\", \"Load\"))\r\n self.label_99.setText(_translate(\"MainWindow\", \"Antenna efficiency (%)\"))\r\n self.label_100.setText(_translate(\"MainWindow\", \"LNB noise temp. (K)\"))\r\n self.label_101.setText(_translate(\"MainWindow\", \"Additional losses (dB)\"))\r\n self.label_102.setText(_translate(\"MainWindow\", \"Maximum \\n\"\r\n\"depointing (degrees)\"))\r\n self.clear_reception_rcp_3.setText(_translate(\"MainWindow\", \"Clear\"))\r\n self.label_103.setText(_translate(\"MainWindow\", \"Cable loss (dB)\"))\r\n self.groupBox_14.setTitle(_translate(\"MainWindow\", \"Reception\"))\r\n self.label_104.setText(_translate(\"MainWindow\", \"Latitude (degrees)\"))\r\n self.label_105.setText(_translate(\"MainWindow\", \"Longitude (degrees)\"))\r\n self.label_106.setText(_translate(\"MainWindow\", \"Antenna size (m)\"))\r\n self.label_107.setText(_translate(\"MainWindow\", \"Antenna efficiency\"))\r\n self.clear_reception_rcp_4.setText(_translate(\"MainWindow\", \"Clear\"))\r\n self.load_reception_rcp_6.setText(_translate(\"MainWindow\", \"Load Ground Station\"))\r\n self.load_reception_rcp_7.setText(_translate(\"MainWindow\", \"Load Reception\"))\r\n self.calc_spatm_2.setText(_translate(\"MainWindow\", \"Calculate\"))\r\n self.groupBox_15.setTitle(_translate(\"MainWindow\", \"Satellite\"))\r\n self.label_108.setText(_translate(\"MainWindow\", \"Frequency (GHz)\"))\r\n self.label_109.setText(_translate(\"MainWindow\", \"Longitude (degrees)\"))\r\n # self.default_sat_sp_perf_4.setItemText(0, _translate(\"MainWindow\", \"StarOne C2/C4\"))\r\n # self.default_sat_sp_perf_4.setItemText(1, _translate(\"MainWindow\", \"Hispamar\"))\r\n # self.default_sat_sp_perf_4.setItemText(2, _translate(\"MainWindow\", \"Claro\"))\r\n # self.default_sat_sp_perf_4.setItemText(3, _translate(\"MainWindow\", \"Menino Ney\"))\r\n self.label_110.setText(_translate(\"MainWindow\", \"Default Satellites\"))\r\n self.pol_sat_4.setItemText(0, _translate(\"MainWindow\", \"horizontal\"))\r\n self.pol_sat_4.setItemText(1, _translate(\"MainWindow\", \"vertical\"))\r\n self.pol_sat_4.setItemText(2, _translate(\"MainWindow\", \"circular\"))\r\n self.label_111.setText(_translate(\"MainWindow\", \"Polarization\"))\r\n self.load_reception_rcp_8.setText(_translate(\"MainWindow\", \"Load Satellite\"))\r\n self.p_year_spatm_2.setStatusTip(_translate(\"MainWindow\", \"Values between 0.001 and 0.5\"))\r\n self.p_year_spatm_2.setText(_translate(\"MainWindow\", \"0.001\"))\r\n self.label_112.setText(_translate(\"MainWindow\", \"Excess % of time per year\"))\r\n self.label_113.setText(_translate(\"MainWindow\", \"Method\"))\r\n self.method_spatm_2.setStatusTip(_translate(\"MainWindow\", \"Mode calculation for gaseous attenuation\"))\r\n self.method_spatm_2.setItemText(0, _translate(\"MainWindow\", \"approx\"))\r\n self.method_spatm_2.setItemText(1, _translate(\"MainWindow\", \"exact\"))\r\n self.groupBox_16.setTitle(_translate(\"MainWindow\", \"Ground Station\"))\r\n self.label_114.setText(_translate(\"MainWindow\", \"Name\"))\r\n self.label_115.setText(_translate(\"MainWindow\", \"Latitude (degrees)\"))\r\n self.label_116.setText(_translate(\"MainWindow\", \"Longitude (degrees)\"))\r\n self.save_ground_station_sp_ant_size.setText(_translate(\"MainWindow\", \"Save\"))\r\n self.load_ground_station_sp_ant_size.setText(_translate(\"MainWindow\", \"Load\"))\r\n self.clear_ground_station_sp_ant_size.setText(_translate(\"MainWindow\", \"Clear\"))\r\n self.groupBox_17.setTitle(_translate(\"MainWindow\", \"Satellite\"))\r\n self.label_117.setText(_translate(\"MainWindow\", \"Name\"))\r\n self.label_118.setText(_translate(\"MainWindow\", \"Longitude (degrees)\"))\r\n self.save_sat_sp_ant_size.setText(_translate(\"MainWindow\", \"Save\"))\r\n self.load_sat_sp_ant_size.setText(_translate(\"MainWindow\", \"Load\"))\r\n # self.default_sat_sp_ant_size.setItemText(0, _translate(\"MainWindow\", \"StarOne C2/C4\"))\r\n # self.default_sat_sp_ant_size.setItemText(1, _translate(\"MainWindow\", \"Hispamar\"))\r\n # self.default_sat_sp_ant_size.setItemText(2, _translate(\"MainWindow\", \"Claro\"))\r\n # self.default_sat_sp_ant_size.setItemText(3, _translate(\"MainWindow\", \"Menino Ney\"))\r\n self.label_119.setText(_translate(\"MainWindow\", \"Default Satellites\"))\r\n self.label_120.setText(_translate(\"MainWindow\", \"Altitude (km)\"))\r\n self.label_121.setText(_translate(\"MainWindow\", \"E.I.R.P. (dBW)\"))\r\n self.label_122.setText(_translate(\"MainWindow\", \"Transpoder\\'s max.\\n\"\r\n\"bandwidith (MHz)\"))\r\n self.label_123.setText(_translate(\"MainWindow\", \"Effective \\n\"\r\n\"bandwidith (MHz)\"))\r\n self.label_124.setText(_translate(\"MainWindow\", \"Roll-off\"))\r\n self.label_125.setText(_translate(\"MainWindow\", \"Modulation\"))\r\n self.label_126.setText(_translate(\"MainWindow\", \"Frequency (GHz)\"))\r\n self.label_127.setText(_translate(\"MainWindow\", \"Polarization\"))\r\n self.pol_sat_sp_ant_size.setItemText(0, _translate(\"MainWindow\", \"horizontal\"))\r\n self.pol_sat_sp_ant_size.setItemText(1, _translate(\"MainWindow\", \"vertical\"))\r\n self.pol_sat_sp_ant_size.setItemText(2, _translate(\"MainWindow\", \"circular\"))\r\n self.clear_sat_sp_ant_size.setText(_translate(\"MainWindow\", \"Clear\"))\r\n self.groupBox_18.setTitle(_translate(\"MainWindow\", \"Reception Characteristics\"))\r\n self.label_128.setText(_translate(\"MainWindow\", \"Name:\"))\r\n self.label_130.setText(_translate(\"MainWindow\", \"LNB gain (dB)\"))\r\n self.save_reception_sp_ant_size.setText(_translate(\"MainWindow\", \"Save\"))\r\n self.load_reception_sp_ant_size.setText(_translate(\"MainWindow\", \"Load\"))\r\n self.label_131.setText(_translate(\"MainWindow\", \"Antenna efficiency\"))\r\n self.label_132.setText(_translate(\"MainWindow\", \"LNB noise temp. (K)\"))\r\n self.label_133.setText(_translate(\"MainWindow\", \"Additional losses (dB)\"))\r\n self.label_134.setText(_translate(\"MainWindow\", \"Maximum \\n\"\r\n\"depointing (degrees)\"))\r\n self.clear_reception_sp_ant_size.setText(_translate(\"MainWindow\", \"Clear\"))\r\n self.label_135.setText(_translate(\"MainWindow\", \"Cable loss (dB)\"))\r\n self.calc_sp_ant_size.setText(_translate(\"MainWindow\", \"Calculate\"))\r\n self.margin_sp_ant_size.setStatusTip(_translate(\"MainWindow\", \"Goal (+ -) margin to achieve the required SNR\"))\r\n self.margin_sp_ant_size.setText(_translate(\"MainWindow\", \"0\"))\r\n self.label_174.setText(_translate(\"MainWindow\", \"Margin (dB)\"))\r\n self.groupBox_41.setTitle(_translate(\"MainWindow\", \"Simulation ipunts and result\"))\r\n self.ant_max_size_sp_ant_size.setStatusTip(_translate(\"MainWindow\", \"Maximun antenna size to be considered in calculations\"))\r\n self.ant_max_size_sp_ant_size.setText(_translate(\"MainWindow\", \"6\"))\r\n self.label_323.setText(_translate(\"MainWindow\", \"Ant. min size (m)\"))\r\n self.ant_min_size_sp_ant_size.setStatusTip(_translate(\"MainWindow\", \"Minimum antenna size to be considered in calculations\"))\r\n self.ant_min_size_sp_ant_size.setText(_translate(\"MainWindow\", \"0.4\"))\r\n self.label_313.setText(_translate(\"MainWindow\", \"Ant. max size (m)\"))\r\n self.label_136.setText(_translate(\"MainWindow\", \"*availability values lower than 60% will be disconsidered\"))\r\n self.export_result_sp_ant_size.setText(_translate(\"MainWindow\", \"Export graph\"))\r\n self.browse_path_mp_perf_2.setStatusTip(_translate(\"MainWindow\", \"Browse list\"))\r\n self.browse_path_mp_perf_2.setText(_translate(\"MainWindow\", \"Browse\"))\r\n self.label_137.setText(_translate(\"MainWindow\", \"Path\"))\r\n self.label_138.setText(_translate(\"MainWindow\", \"List Preview\"))\r\n self.groupBox_19.setTitle(_translate(\"MainWindow\", \"Satellite\"))\r\n self.label_139.setText(_translate(\"MainWindow\", \"Name\"))\r\n self.label_140.setText(_translate(\"MainWindow\", \"Longitude (degrees)\"))\r\n self.save_sat_mp_perf_2.setText(_translate(\"MainWindow\", \"Save\"))\r\n self.load_sat_mp_perf_2.setText(_translate(\"MainWindow\", \"Load\"))\r\n # self.default_sat_mp_perf_2.setItemText(0, _translate(\"MainWindow\", \"StarOne C2/C4\"))\r\n # self.default_sat_mp_perf_2.setItemText(1, _translate(\"MainWindow\", \"Hispamar\"))\r\n # self.default_sat_mp_perf_2.setItemText(2, _translate(\"MainWindow\", \"Claro\"))\r\n # self.default_sat_mp_perf_2.setItemText(3, _translate(\"MainWindow\", \"Menino Ney\"))\r\n self.label_141.setText(_translate(\"MainWindow\", \"Default Satellites\"))\r\n self.label_142.setText(_translate(\"MainWindow\", \"Altitude (km)\"))\r\n self.label_143.setText(_translate(\"MainWindow\", \"E.I.R.P. (dBW)\"))\r\n self.label_144.setText(_translate(\"MainWindow\", \"Transpoder\\'s max.\\n\"\r\n\"bandwidith (MHz)\"))\r\n self.label_145.setText(_translate(\"MainWindow\", \"Effective \\n\"\r\n\"bandwidith (MHz)\"))\r\n self.label_146.setText(_translate(\"MainWindow\", \"Roll-off\"))\r\n self.label_147.setText(_translate(\"MainWindow\", \"Modulation\"))\r\n self.modcod_sat_mp_perf_2.setItemText(0, _translate(\"MainWindow\", \"QPSK 2/3\"))\r\n self.modcod_sat_mp_perf_2.setItemText(1, _translate(\"MainWindow\", \"8PSK 30/33\"))\r\n self.modcod_sat_mp_perf_2.setItemText(2, _translate(\"MainWindow\", \"64 QAM\"))\r\n self.label_148.setText(_translate(\"MainWindow\", \"Frequency (GHz)\"))\r\n self.label_149.setText(_translate(\"MainWindow\", \"Polarization\"))\r\n self.pol_sat_mp_perf_2.setItemText(0, _translate(\"MainWindow\", \"horizontal\"))\r\n self.pol_sat_mp_perf_2.setItemText(1, _translate(\"MainWindow\", \"vertical\"))\r\n self.pol_sat_mp_perf_2.setItemText(2, _translate(\"MainWindow\", \"circular\"))\r\n self.clear_satellite_mp_perf_2.setText(_translate(\"MainWindow\", \"Clear\"))\r\n self.groupBox_20.setTitle(_translate(\"MainWindow\", \"Reception Characteristics\"))\r\n self.label_150.setText(_translate(\"MainWindow\", \"Name:\"))\r\n self.label_151.setText(_translate(\"MainWindow\", \"Antenna size (m)\"))\r\n self.label_152.setText(_translate(\"MainWindow\", \"LNB gain (dB)\"))\r\n self.save_reception_mp_perf_2.setText(_translate(\"MainWindow\", \"Save\"))\r\n self.load_reception_mp_perf_2.setText(_translate(\"MainWindow\", \"Load\"))\r\n self.label_153.setText(_translate(\"MainWindow\", \"Antenna efficiency\"))\r\n self.label_154.setText(_translate(\"MainWindow\", \"LNB noise temp. (K)\"))\r\n self.label_155.setText(_translate(\"MainWindow\", \"Additional losses (dB)\"))\r\n self.label_156.setText(_translate(\"MainWindow\", \"Maximum \\n\"\r\n\"depointing (degrees)\"))\r\n self.clear_reception_mp_perf_2.setText(_translate(\"MainWindow\", \"Clear\"))\r\n self.label_157.setText(_translate(\"MainWindow\", \"Cable loss (dB)\"))\r\n self.relaxation_mp_perf_2.setStatusTip(_translate(\"MainWindow\", \"Goal (+ -) margin to achieve the required SNR\"))\r\n self.relaxation_mp_perf_2.setText(_translate(\"MainWindow\", \"0.2\"))\r\n self.calc_mp_perf_2.setText(_translate(\"MainWindow\", \"Calculate\"))\r\n self.label_158.setText(_translate(\"MainWindow\", \"SNR goal relaxation (dB)\"))\r\n self.label_159.setText(_translate(\"MainWindow\", \"Threads\"))\r\n self.n_threads_2.setStatusTip(_translate(\"MainWindow\", \"Thread number used in calcullations\"))\r\n self.label_160.setText(_translate(\"MainWindow\", \"Margin (dB)\"))\r\n self.margin_mp_perf_2.setStatusTip(_translate(\"MainWindow\", \"Effective link budget margin\"))\r\n self.margin_mp_perf_2.setText(_translate(\"MainWindow\", \"0\"))\r\n self.groupBox_21.setTitle(_translate(\"MainWindow\", \"Satellite\"))\r\n self.label_161.setText(_translate(\"MainWindow\", \"Name\"))\r\n self.label_162.setText(_translate(\"MainWindow\", \"Longitude (degrees)\"))\r\n self.save_sat_3.setText(_translate(\"MainWindow\", \"Save\"))\r\n self.load_sat_3.setText(_translate(\"MainWindow\", \"Load\"))\r\n # self.default_sat_3.setItemText(1, _translate(\"MainWindow\", \"StarOne C2/C4\"))\r\n # self.default_sat_3.setItemText(2, _translate(\"MainWindow\", \"Hispamar\"))\r\n # self.default_sat_3.setItemText(3, _translate(\"MainWindow\", \"Claro\"))\r\n # self.default_sat_3.setItemText(4, _translate(\"MainWindow\", \"Menino Ney\"))\r\n self.label_163.setText(_translate(\"MainWindow\", \"Default Satellites\"))\r\n self.label_164.setText(_translate(\"MainWindow\", \"Altitude (km)\"))\r\n self.label_165.setText(_translate(\"MainWindow\", \"E.I.R.P. (dBW)\"))\r\n self.label_166.setText(_translate(\"MainWindow\", \"Transpoder\\'s max.\\n\"\r\n\"bandwidith (MHz)\"))\r\n self.label_167.setText(_translate(\"MainWindow\", \"Effective \\n\"\r\n\"bandwidith (MHz)\"))\r\n self.label_168.setText(_translate(\"MainWindow\", \"Roll-off\"))\r\n self.label_169.setText(_translate(\"MainWindow\", \"Modulation\"))\r\n self.modcod_sat_3.setItemText(0, _translate(\"MainWindow\", \"QPSK 2/3\"))\r\n self.modcod_sat_3.setItemText(1, _translate(\"MainWindow\", \"8PSK 30/33\"))\r\n self.modcod_sat_3.setItemText(2, _translate(\"MainWindow\", \"64 QAM\"))\r\n self.label_170.setText(_translate(\"MainWindow\", \"Frequency (GHz)\"))\r\n self.label_171.setText(_translate(\"MainWindow\", \"Polarization\"))\r\n self.pol_sat_5.setItemText(0, _translate(\"MainWindow\", \"horizontal\"))\r\n self.pol_sat_5.setItemText(1, _translate(\"MainWindow\", \"vertical\"))\r\n self.pol_sat_5.setItemText(2, _translate(\"MainWindow\", \"circular\"))\r\n self.clear_sat_3.setText(_translate(\"MainWindow\", \"Clear\"))\r\n self.groupBox_22.setTitle(_translate(\"MainWindow\", \"Ground Station\"))\r\n self.label_172.setText(_translate(\"MainWindow\", \"Name\"))\r\n self.label_175.setText(_translate(\"MainWindow\", \"Latitude (degrees)\"))\r\n self.label_176.setText(_translate(\"MainWindow\", \"Longitude (degrees)\"))\r\n self.save_ground_station_gdstation_3.setText(_translate(\"MainWindow\", \"Save\"))\r\n self.load_ground_station_gdstation_3.setText(_translate(\"MainWindow\", \"Load\"))\r\n self.clear_ground_station_gdstation_3.setText(_translate(\"MainWindow\", \"Clear\"))\r\n self.groupBox_23.setTitle(_translate(\"MainWindow\", \"Reception Characteristics\"))\r\n self.label_177.setText(_translate(\"MainWindow\", \"Name:\"))\r\n self.label_178.setText(_translate(\"MainWindow\", \"Antenna size (m)\"))\r\n self.label_179.setText(_translate(\"MainWindow\", \"LNB gain (dB)\"))\r\n self.save_reception_rcp_3.setText(_translate(\"MainWindow\", \"Save\"))\r\n self.load_reception_rcp_9.setText(_translate(\"MainWindow\", \"Load\"))\r\n self.label_180.setText(_translate(\"MainWindow\", \"Antenna efficiency (%)\"))\r\n self.label_181.setText(_translate(\"MainWindow\", \"LNB noise temp. (K)\"))\r\n self.label_182.setText(_translate(\"MainWindow\", \"Additional losses (dB)\"))\r\n self.label_183.setText(_translate(\"MainWindow\", \"Maximum \\n\"\r\n\"depointing (degrees)\"))\r\n self.clear_reception_rcp_5.setText(_translate(\"MainWindow\", \"Clear\"))\r\n self.label_184.setText(_translate(\"MainWindow\", \"Cable loss (dB)\"))\r\n self.groupBox_24.setTitle(_translate(\"MainWindow\", \"Satellite\"))\r\n self.label_186.setText(_translate(\"MainWindow\", \"Name\"))\r\n self.label_187.setText(_translate(\"MainWindow\", \"Longitude (degrees)\"))\r\n self.save_sat_4.setText(_translate(\"MainWindow\", \"Save\"))\r\n self.load_sat_4.setText(_translate(\"MainWindow\", \"Load\"))\r\n # self.default_sat_4.setItemText(1, _translate(\"MainWindow\", \"StarOne C2/C4\"))\r\n # self.default_sat_4.setItemText(2, _translate(\"MainWindow\", \"Hispamar\"))\r\n # self.default_sat_4.setItemText(3, _translate(\"MainWindow\", \"Claro\"))\r\n # self.default_sat_4.setItemText(4, _translate(\"MainWindow\", \"Menino Ney\"))\r\n self.label_188.setText(_translate(\"MainWindow\", \"Default Satellites\"))\r\n self.label_189.setText(_translate(\"MainWindow\", \"Altitude (km)\"))\r\n self.label_190.setText(_translate(\"MainWindow\", \"E.I.R.P. (dBW)\"))\r\n self.label_191.setText(_translate(\"MainWindow\", \"Transpoder\\'s max.\\n\"\r\n\"bandwidith (MHz)\"))\r\n self.label_192.setText(_translate(\"MainWindow\", \"Effective \\n\"\r\n\"bandwidith (MHz)\"))\r\n self.label_193.setText(_translate(\"MainWindow\", \"Roll-off\"))\r\n self.label_194.setText(_translate(\"MainWindow\", \"Modulation\"))\r\n self.modcod_sat_4.setItemText(0, _translate(\"MainWindow\", \"QPSK 2/3\"))\r\n self.modcod_sat_4.setItemText(1, _translate(\"MainWindow\", \"8PSK 30/33\"))\r\n self.modcod_sat_4.setItemText(2, _translate(\"MainWindow\", \"64 QAM\"))\r\n self.label_195.setText(_translate(\"MainWindow\", \"Frequency (GHz)\"))\r\n self.label_196.setText(_translate(\"MainWindow\", \"Polarization\"))\r\n self.pol_sat_6.setItemText(0, _translate(\"MainWindow\", \"horizontal\"))\r\n self.pol_sat_6.setItemText(1, _translate(\"MainWindow\", \"vertical\"))\r\n self.pol_sat_6.setItemText(2, _translate(\"MainWindow\", \"circular\"))\r\n self.clear_sat_4.setText(_translate(\"MainWindow\", \"Clear\"))\r\n self.groupBox_25.setTitle(_translate(\"MainWindow\", \"Ground Station\"))\r\n self.label_197.setText(_translate(\"MainWindow\", \"Name\"))\r\n self.label_198.setText(_translate(\"MainWindow\", \"Latitude (degrees)\"))\r\n self.label_199.setText(_translate(\"MainWindow\", \"Longitude (degrees)\"))\r\n self.save_ground_station_gdstation_4.setText(_translate(\"MainWindow\", \"Save\"))\r\n self.load_ground_station_gdstation_4.setText(_translate(\"MainWindow\", \"Load\"))\r\n self.clear_ground_station_gdstation_4.setText(_translate(\"MainWindow\", \"Clear\"))\r\n self.groupBox_26.setTitle(_translate(\"MainWindow\", \"Reception Characteristics\"))\r\n self.label_200.setText(_translate(\"MainWindow\", \"Name:\"))\r\n self.label_201.setText(_translate(\"MainWindow\", \"Antenna size (m)\"))\r\n self.label_202.setText(_translate(\"MainWindow\", \"LNB gain (dB)\"))\r\n self.save_reception_rcp_4.setText(_translate(\"MainWindow\", \"Save\"))\r\n self.load_reception_rcp_10.setText(_translate(\"MainWindow\", \"Load\"))\r\n self.label_203.setText(_translate(\"MainWindow\", \"Antenna efficiency (%)\"))\r\n self.label_204.setText(_translate(\"MainWindow\", \"LNB noise temp. (K)\"))\r\n self.label_205.setText(_translate(\"MainWindow\", \"Additional losses (dB)\"))\r\n self.label_206.setText(_translate(\"MainWindow\", \"Maximum \\n\"\r\n\"depointing (degrees)\"))\r\n self.clear_reception_rcp_6.setText(_translate(\"MainWindow\", \"Clear\"))\r\n self.label_207.setText(_translate(\"MainWindow\", \"Cable loss (dB)\"))\r\n self.groupBox_27.setTitle(_translate(\"MainWindow\", \"Reception\"))\r\n self.label_208.setText(_translate(\"MainWindow\", \"Latitude (degrees)\"))\r\n self.label_209.setText(_translate(\"MainWindow\", \"Longitude (degrees)\"))\r\n self.label_210.setText(_translate(\"MainWindow\", \"Antenna size (m)\"))\r\n self.label_211.setText(_translate(\"MainWindow\", \"Antenna efficiency\"))\r\n self.clear_reception_rcp_7.setText(_translate(\"MainWindow\", \"Clear\"))\r\n self.load_reception_rcp_11.setText(_translate(\"MainWindow\", \"Load Ground Station\"))\r\n self.load_reception_rcp_12.setText(_translate(\"MainWindow\", \"Load Reception\"))\r\n self.calc_spatm_3.setText(_translate(\"MainWindow\", \"Calculate\"))\r\n self.groupBox_28.setTitle(_translate(\"MainWindow\", \"Satellite\"))\r\n self.label_212.setText(_translate(\"MainWindow\", \"Frequency (GHz)\"))\r\n self.label_213.setText(_translate(\"MainWindow\", \"Longitude (degrees)\"))\r\n # self.default_sat_sp_perf_5.setItemText(0, _translate(\"MainWindow\", \"StarOne C2/C4\"))\r\n # self.default_sat_sp_perf_5.setItemText(1, _translate(\"MainWindow\", \"Hispamar\"))\r\n # self.default_sat_sp_perf_5.setItemText(2, _translate(\"MainWindow\", \"Claro\"))\r\n # self.default_sat_sp_perf_5.setItemText(3, _translate(\"MainWindow\", \"Menino Ney\"))\r\n self.label_214.setText(_translate(\"MainWindow\", \"Default Satellites\"))\r\n self.pol_sat_7.setItemText(0, _translate(\"MainWindow\", \"horizontal\"))\r\n self.pol_sat_7.setItemText(1, _translate(\"MainWindow\", \"vertical\"))\r\n self.pol_sat_7.setItemText(2, _translate(\"MainWindow\", \"circular\"))\r\n self.label_215.setText(_translate(\"MainWindow\", \"Polarization\"))\r\n self.load_reception_rcp_13.setText(_translate(\"MainWindow\", \"Load Satellite\"))\r\n self.p_year_spatm_3.setStatusTip(_translate(\"MainWindow\", \"Values between 0.001 and 0.5\"))\r\n self.p_year_spatm_3.setText(_translate(\"MainWindow\", \"0.001\"))\r\n self.label_216.setText(_translate(\"MainWindow\", \"Excess % of time per year\"))\r\n self.label_217.setText(_translate(\"MainWindow\", \"Method\"))\r\n self.method_spatm_3.setStatusTip(_translate(\"MainWindow\", \"Mode calculation for gaseous attenuation\"))\r\n self.method_spatm_3.setItemText(0, _translate(\"MainWindow\", \"approx\"))\r\n self.method_spatm_3.setItemText(1, _translate(\"MainWindow\", \"exact\"))\r\n self.groupBox_29.setTitle(_translate(\"MainWindow\", \"Ground Station\"))\r\n self.label_218.setText(_translate(\"MainWindow\", \"Name\"))\r\n self.label_219.setText(_translate(\"MainWindow\", \"Latitude (degrees)\"))\r\n self.label_220.setText(_translate(\"MainWindow\", \"Longitude (degrees)\"))\r\n self.save_ground_station_sp_perf_3.setText(_translate(\"MainWindow\", \"Save\"))\r\n self.load_ground_station_sp_perf_3.setText(_translate(\"MainWindow\", \"Load\"))\r\n self.clear_ground_station_sp_perf_3.setText(_translate(\"MainWindow\", \"Clear\"))\r\n self.groupBox_30.setTitle(_translate(\"MainWindow\", \"Satellite\"))\r\n self.label_221.setText(_translate(\"MainWindow\", \"Name\"))\r\n self.label_222.setText(_translate(\"MainWindow\", \"Longitude (degrees)\"))\r\n self.save_sat_sp_perf_3.setText(_translate(\"MainWindow\", \"Save\"))\r\n self.load_sat_sp_perf_3.setText(_translate(\"MainWindow\", \"Load\"))\r\n # self.default_sat_sp_perf_6.setItemText(0, _translate(\"MainWindow\", \"StarOne C2/C4\"))\r\n # self.default_sat_sp_perf_6.setItemText(1, _translate(\"MainWindow\", \"Hispamar\"))\r\n # self.default_sat_sp_perf_6.setItemText(2, _translate(\"MainWindow\", \"Claro\"))\r\n # self.default_sat_sp_perf_6.setItemText(3, _translate(\"MainWindow\", \"Menino Ney\"))\r\n self.label_223.setText(_translate(\"MainWindow\", \"Default Satellites\"))\r\n self.label_224.setText(_translate(\"MainWindow\", \"Altitude (km)\"))\r\n self.label_225.setText(_translate(\"MainWindow\", \"E.I.R.P. (dBW)\"))\r\n self.label_226.setText(_translate(\"MainWindow\", \"Transpoder\\'s max.\\n\"\r\n\"bandwidith (MHz)\"))\r\n self.label_227.setText(_translate(\"MainWindow\", \"Effective \\n\"\r\n\"bandwidith (MHz)\"))\r\n self.label_228.setText(_translate(\"MainWindow\", \"Roll-off\"))\r\n self.label_229.setText(_translate(\"MainWindow\", \"Modulation\"))\r\n self.modcod_sat_sp_perf_3.setItemText(0, _translate(\"MainWindow\", \"QPSK 2/3\"))\r\n self.modcod_sat_sp_perf_3.setItemText(1, _translate(\"MainWindow\", \"8PSK 30/33\"))\r\n self.modcod_sat_sp_perf_3.setItemText(2, _translate(\"MainWindow\", \"64 QAM\"))\r\n self.label_230.setText(_translate(\"MainWindow\", \"Frequency (GHz)\"))\r\n self.label_231.setText(_translate(\"MainWindow\", \"Polarization\"))\r\n self.pol_sat_sp_perf_3.setItemText(0, _translate(\"MainWindow\", \"horizontal\"))\r\n self.pol_sat_sp_perf_3.setItemText(1, _translate(\"MainWindow\", \"vertical\"))\r\n self.pol_sat_sp_perf_3.setItemText(2, _translate(\"MainWindow\", \"circular\"))\r\n self.clear_sat_sp_perf_3.setText(_translate(\"MainWindow\", \"Clear\"))\r\n self.groupBox_31.setTitle(_translate(\"MainWindow\", \"Reception Characteristics\"))\r\n self.label_232.setText(_translate(\"MainWindow\", \"Name:\"))\r\n self.label_233.setText(_translate(\"MainWindow\", \"LNB gain (dB)\"))\r\n self.save_reception_sp_perf_3.setText(_translate(\"MainWindow\", \"Save\"))\r\n self.load_reception_sp_perf_3.setText(_translate(\"MainWindow\", \"Load\"))\r\n self.label_234.setText(_translate(\"MainWindow\", \"Antenna efficiency\"))\r\n self.label_235.setText(_translate(\"MainWindow\", \"LNB noise temp. (K)\"))\r\n self.label_236.setText(_translate(\"MainWindow\", \"Additional losses (dB)\"))\r\n self.label_237.setText(_translate(\"MainWindow\", \"Maximum \\n\"\r\n\"depointing (degrees)\"))\r\n self.clear_reception_sp_perf_3.setText(_translate(\"MainWindow\", \"Clear\"))\r\n self.label_238.setText(_translate(\"MainWindow\", \"Cable loss (dB)\"))\r\n self.calc_sp_perf_3.setText(_translate(\"MainWindow\", \"Calculate\"))\r\n self.label_239.setText(_translate(\"MainWindow\", \"SNR goal relaxation (dB)\"))\r\n self.relaxation_sp_perf_3.setStatusTip(_translate(\"MainWindow\", \"Goal (+ -) margin to achieve the required SNR\"))\r\n self.relaxation_sp_perf_3.setText(_translate(\"MainWindow\", \"0.2\"))\r\n self.margin_sp_perf_3.setStatusTip(_translate(\"MainWindow\", \"Goal (+ -) margin to achieve the required SNR\"))\r\n self.margin_sp_perf_3.setText(_translate(\"MainWindow\", \"0\"))\r\n self.label_240.setText(_translate(\"MainWindow\", \"Margin (dB)\"))\r\n self.browse_path_mp_perf_3.setStatusTip(_translate(\"MainWindow\", \"Browse list\"))\r\n self.browse_path_mp_perf_3.setText(_translate(\"MainWindow\", \"Browse\"))\r\n self.label_241.setText(_translate(\"MainWindow\", \"Path\"))\r\n self.label_242.setText(_translate(\"MainWindow\", \"List Preview\"))\r\n self.groupBox_32.setTitle(_translate(\"MainWindow\", \"Satellite\"))\r\n self.label_243.setText(_translate(\"MainWindow\", \"Name\"))\r\n self.label_244.setText(_translate(\"MainWindow\", \"Longitude (degrees)\"))\r\n self.save_sat_mp_perf_3.setText(_translate(\"MainWindow\", \"Save\"))\r\n self.load_sat_mp_perf_3.setText(_translate(\"MainWindow\", \"Load\"))\r\n # self.default_sat_mp_perf_3.setItemText(0, _translate(\"MainWindow\", \"StarOne C2/C4\"))\r\n # self.default_sat_mp_perf_3.setItemText(1, _translate(\"MainWindow\", \"Hispamar\"))\r\n # self.default_sat_mp_perf_3.setItemText(2, _translate(\"MainWindow\", \"Claro\"))\r\n # self.default_sat_mp_perf_3.setItemText(3, _translate(\"MainWindow\", \"Menino Ney\"))\r\n self.label_245.setText(_translate(\"MainWindow\", \"Default Satellites\"))\r\n self.label_246.setText(_translate(\"MainWindow\", \"Altitude (km)\"))\r\n self.label_247.setText(_translate(\"MainWindow\", \"E.I.R.P. (dBW)\"))\r\n self.label_248.setText(_translate(\"MainWindow\", \"Transpoder\\'s max.\\n\"\r\n\"bandwidith (MHz)\"))\r\n self.label_249.setText(_translate(\"MainWindow\", \"Effective \\n\"\r\n\"bandwidith (MHz)\"))\r\n self.label_250.setText(_translate(\"MainWindow\", \"Roll-off\"))\r\n self.label_251.setText(_translate(\"MainWindow\", \"Modulation\"))\r\n self.modcod_sat_mp_perf_3.setItemText(0, _translate(\"MainWindow\", \"QPSK 2/3\"))\r\n self.modcod_sat_mp_perf_3.setItemText(1, _translate(\"MainWindow\", \"8PSK 30/33\"))\r\n self.modcod_sat_mp_perf_3.setItemText(2, _translate(\"MainWindow\", \"64 QAM\"))\r\n self.label_252.setText(_translate(\"MainWindow\", \"Frequency (GHz)\"))\r\n self.label_253.setText(_translate(\"MainWindow\", \"Polarization\"))\r\n self.pol_sat_mp_perf_3.setItemText(0, _translate(\"MainWindow\", \"horizontal\"))\r\n self.pol_sat_mp_perf_3.setItemText(1, _translate(\"MainWindow\", \"vertical\"))\r\n self.pol_sat_mp_perf_3.setItemText(2, _translate(\"MainWindow\", \"circular\"))\r\n self.clear_satellite_mp_perf_3.setText(_translate(\"MainWindow\", \"Clear\"))\r\n self.groupBox_33.setTitle(_translate(\"MainWindow\", \"Reception Characteristics\"))\r\n self.label_254.setText(_translate(\"MainWindow\", \"Name:\"))\r\n self.label_255.setText(_translate(\"MainWindow\", \"Antenna size (m)\"))\r\n self.label_256.setText(_translate(\"MainWindow\", \"LNB gain (dB)\"))\r\n self.save_reception_mp_perf_3.setText(_translate(\"MainWindow\", \"Save\"))\r\n self.load_reception_mp_perf_3.setText(_translate(\"MainWindow\", \"Load\"))\r\n self.label_257.setText(_translate(\"MainWindow\", \"Antenna efficiency\"))\r\n self.label_258.setText(_translate(\"MainWindow\", \"LNB noise temp. (K)\"))\r\n self.label_259.setText(_translate(\"MainWindow\", \"Additional losses (dB)\"))\r\n self.label_260.setText(_translate(\"MainWindow\", \"Maximum \\n\"\r\n\"depointing (degrees)\"))\r\n self.clear_reception_mp_perf_3.setText(_translate(\"MainWindow\", \"Clear\"))\r\n self.label_261.setText(_translate(\"MainWindow\", \"Cable loss (dB)\"))\r\n self.relaxation_mp_perf_3.setStatusTip(_translate(\"MainWindow\", \"Goal (+ -) margin to achieve the required SNR\"))\r\n self.relaxation_mp_perf_3.setText(_translate(\"MainWindow\", \"0.2\"))\r\n self.calc_mp_perf_3.setText(_translate(\"MainWindow\", \"Calculate\"))\r\n self.label_262.setText(_translate(\"MainWindow\", \"SNR goal relaxation (dB)\"))\r\n self.label_263.setText(_translate(\"MainWindow\", \"Threads\"))\r\n self.n_threads_3.setStatusTip(_translate(\"MainWindow\", \"Thread number used in calcullations\"))\r\n self.label_264.setText(_translate(\"MainWindow\", \"Margin (dB)\"))\r\n self.margin_mp_perf_3.setStatusTip(_translate(\"MainWindow\", \"Effective link budget margin\"))\r\n self.margin_mp_perf_3.setText(_translate(\"MainWindow\", \"0\"))\r\n self.groupBox_34.setTitle(_translate(\"MainWindow\", \"Reception\"))\r\n self.label_265.setText(_translate(\"MainWindow\", \"Latitude (degrees)\"))\r\n self.label_266.setText(_translate(\"MainWindow\", \"Longitude (degrees)\"))\r\n self.label_267.setText(_translate(\"MainWindow\", \"Antenna size (m)\"))\r\n self.label_268.setText(_translate(\"MainWindow\", \"Antenna efficiency\"))\r\n self.clear_reception_rcp_8.setText(_translate(\"MainWindow\", \"Clear\"))\r\n self.load_reception_rcp_14.setText(_translate(\"MainWindow\", \"Load Ground Station\"))\r\n self.load_reception_rcp_15.setText(_translate(\"MainWindow\", \"Load Reception\"))\r\n self.calc_spatm_4.setText(_translate(\"MainWindow\", \"Calculate\"))\r\n self.groupBox_35.setTitle(_translate(\"MainWindow\", \"Satellite\"))\r\n self.label_269.setText(_translate(\"MainWindow\", \"Frequency (GHz)\"))\r\n self.label_270.setText(_translate(\"MainWindow\", \"Longitude (degrees)\"))\r\n # self.default_sat_sp_perf_7.setItemText(0, _translate(\"MainWindow\", \"StarOne C2/C4\"))\r\n # self.default_sat_sp_perf_7.setItemText(1, _translate(\"MainWindow\", \"Hispamar\"))\r\n # self.default_sat_sp_perf_7.setItemText(2, _translate(\"MainWindow\", \"Claro\"))\r\n # self.default_sat_sp_perf_7.setItemText(3, _translate(\"MainWindow\", \"Menino Ney\"))\r\n self.label_271.setText(_translate(\"MainWindow\", \"Default Satellites\"))\r\n self.pol_sat_8.setItemText(0, _translate(\"MainWindow\", \"horizontal\"))\r\n self.pol_sat_8.setItemText(1, _translate(\"MainWindow\", \"vertical\"))\r\n self.pol_sat_8.setItemText(2, _translate(\"MainWindow\", \"circular\"))\r\n self.label_272.setText(_translate(\"MainWindow\", \"Polarization\"))\r\n self.load_reception_rcp_16.setText(_translate(\"MainWindow\", \"Load Satellite\"))\r\n self.p_year_spatm_4.setStatusTip(_translate(\"MainWindow\", \"Values between 0.001 and 0.5\"))\r\n self.p_year_spatm_4.setText(_translate(\"MainWindow\", \"0.001\"))\r\n self.label_273.setText(_translate(\"MainWindow\", \"Excess % of time per year\"))\r\n self.label_274.setText(_translate(\"MainWindow\", \"Method\"))\r\n self.method_spatm_4.setStatusTip(_translate(\"MainWindow\", \"Mode calculation for gaseous attenuation\"))\r\n self.method_spatm_4.setItemText(0, _translate(\"MainWindow\", \"approx\"))\r\n self.method_spatm_4.setItemText(1, _translate(\"MainWindow\", \"exact\"))\r\n self.groupBox_36.setTitle(_translate(\"MainWindow\", \"Ground Station\"))\r\n self.label_275.setText(_translate(\"MainWindow\", \"Name\"))\r\n self.label_276.setText(_translate(\"MainWindow\", \"Latitude (degrees)\"))\r\n self.label_277.setText(_translate(\"MainWindow\", \"Longitude (degrees)\"))\r\n self.save_ground_station_sp_perf_4.setText(_translate(\"MainWindow\", \"Save\"))\r\n self.load_ground_station_sp_perf_4.setText(_translate(\"MainWindow\", \"Load\"))\r\n self.clear_ground_station_sp_perf_4.setText(_translate(\"MainWindow\", \"Clear\"))\r\n self.groupBox_37.setTitle(_translate(\"MainWindow\", \"Satellite\"))\r\n self.label_278.setText(_translate(\"MainWindow\", \"Name\"))\r\n self.label_279.setText(_translate(\"MainWindow\", \"Longitude (degrees)\"))\r\n self.save_sat_sp_perf_4.setText(_translate(\"MainWindow\", \"Save\"))\r\n self.load_sat_sp_perf_4.setText(_translate(\"MainWindow\", \"Load\"))\r\n # self.default_sat_sp_perf_8.setItemText(0, _translate(\"MainWindow\", \"StarOne C2/C4\"))\r\n # self.default_sat_sp_perf_8.setItemText(1, _translate(\"MainWindow\", \"Hispamar\"))\r\n # self.default_sat_sp_perf_8.setItemText(2, _translate(\"MainWindow\", \"Claro\"))\r\n # self.default_sat_sp_perf_8.setItemText(3, _translate(\"MainWindow\", \"Menino Ney\"))\r\n self.label_280.setText(_translate(\"MainWindow\", \"Default Satellites\"))\r\n self.label_281.setText(_translate(\"MainWindow\", \"Altitude (km)\"))\r\n self.label_282.setText(_translate(\"MainWindow\", \"E.I.R.P. (dBW)\"))\r\n self.label_283.setText(_translate(\"MainWindow\", \"Transpoder\\'s max.\\n\"\r\n\"bandwidith (MHz)\"))\r\n self.label_284.setText(_translate(\"MainWindow\", \"Effective \\n\"\r\n\"bandwidith (MHz)\"))\r\n self.label_285.setText(_translate(\"MainWindow\", \"Roll-off\"))\r\n self.label_286.setText(_translate(\"MainWindow\", \"Modulation\"))\r\n self.modcod_sat_sp_perf_4.setItemText(0, _translate(\"MainWindow\", \"QPSK 2/3\"))\r\n self.modcod_sat_sp_perf_4.setItemText(1, _translate(\"MainWindow\", \"8PSK 30/33\"))\r\n self.modcod_sat_sp_perf_4.setItemText(2, _translate(\"MainWindow\", \"64 QAM\"))\r\n self.label_287.setText(_translate(\"MainWindow\", \"Frequency (GHz)\"))\r\n self.label_288.setText(_translate(\"MainWindow\", \"Polarization\"))\r\n self.pol_sat_sp_perf_4.setItemText(0, _translate(\"MainWindow\", \"horizontal\"))\r\n self.pol_sat_sp_perf_4.setItemText(1, _translate(\"MainWindow\", \"vertical\"))\r\n self.pol_sat_sp_perf_4.setItemText(2, _translate(\"MainWindow\", \"circular\"))\r\n self.clear_sat_sp_perf_4.setText(_translate(\"MainWindow\", \"Clear\"))\r\n self.groupBox_38.setTitle(_translate(\"MainWindow\", \"Reception Characteristics\"))\r\n self.label_289.setText(_translate(\"MainWindow\", \"Name:\"))\r\n self.label_290.setText(_translate(\"MainWindow\", \"Antenna size (m)\"))\r\n self.label_291.setText(_translate(\"MainWindow\", \"LNB gain (dB)\"))\r\n self.save_reception_sp_perf_4.setText(_translate(\"MainWindow\", \"Save\"))\r\n self.load_reception_sp_perf_4.setText(_translate(\"MainWindow\", \"Load\"))\r\n self.label_292.setText(_translate(\"MainWindow\", \"Antenna efficiency\"))\r\n self.label_293.setText(_translate(\"MainWindow\", \"LNB noise temp. (K)\"))\r\n self.label_294.setText(_translate(\"MainWindow\", \"Additional losses (dB)\"))\r\n self.label_295.setText(_translate(\"MainWindow\", \"Maximum \\n\"\r\n\"depointing (degrees)\"))\r\n self.clear_reception_sp_perf_4.setText(_translate(\"MainWindow\", \"Clear\"))\r\n self.label_296.setText(_translate(\"MainWindow\", \"Cable loss (dB)\"))\r\n self.calc_sp_perf_4.setText(_translate(\"MainWindow\", \"Calculate\"))\r\n self.label_297.setText(_translate(\"MainWindow\", \"SNR goal relaxation (dB)\"))\r\n self.relaxation_sp_perf_4.setStatusTip(_translate(\"MainWindow\", \"Goal (+ -) margin to achieve the required SNR\"))\r\n self.relaxation_sp_perf_4.setText(_translate(\"MainWindow\", \"0.2\"))\r\n self.margin_sp_perf_4.setStatusTip(_translate(\"MainWindow\", \"Goal (+ -) margin to achieve the required SNR\"))\r\n self.margin_sp_perf_4.setText(_translate(\"MainWindow\", \"0\"))\r\n self.label_298.setText(_translate(\"MainWindow\", \"Margin (dB)\"))\r\n self.browse_mp_ant_size.setStatusTip(_translate(\"MainWindow\", \"Browse list\"))\r\n self.browse_mp_ant_size.setText(_translate(\"MainWindow\", \"Browse\"))\r\n self.label_299.setText(_translate(\"MainWindow\", \"Path\"))\r\n self.label_300.setText(_translate(\"MainWindow\", \"List Preview\"))\r\n self.groupBox_39.setTitle(_translate(\"MainWindow\", \"Satellite\"))\r\n self.label_301.setText(_translate(\"MainWindow\", \"Name\"))\r\n self.label_302.setText(_translate(\"MainWindow\", \"Longitude (degrees)\"))\r\n self.save_sat_mp_ant_size.setText(_translate(\"MainWindow\", \"Save\"))\r\n self.load_sat_mp_ant_size.setText(_translate(\"MainWindow\", \"Load\"))\r\n # self.default_sat_mp_ant_size.setItemText(0, _translate(\"MainWindow\", \"StarOne C2/C4\"))\r\n # self.default_sat_mp_ant_size.setItemText(1, _translate(\"MainWindow\", \"Hispamar\"))\r\n # self.default_sat_mp_ant_size.setItemText(2, _translate(\"MainWindow\", \"Claro\"))\r\n # self.default_sat_mp_ant_size.setItemText(3, _translate(\"MainWindow\", \"Menino Ney\"))\r\n self.label_303.setText(_translate(\"MainWindow\", \"Default Satellites\"))\r\n self.label_304.setText(_translate(\"MainWindow\", \"Altitude (km)\"))\r\n self.label_305.setText(_translate(\"MainWindow\", \"E.I.R.P. (dBW)\"))\r\n self.label_306.setText(_translate(\"MainWindow\", \"Transpoder\\'s max.\\n\"\r\n\"bandwidith (MHz)\"))\r\n self.label_307.setText(_translate(\"MainWindow\", \"Effective \\n\"\r\n\"bandwidith (MHz)\"))\r\n self.label_308.setText(_translate(\"MainWindow\", \"Roll-off\"))\r\n self.label_309.setText(_translate(\"MainWindow\", \"Modulation\"))\r\n self.label_310.setText(_translate(\"MainWindow\", \"Frequency (GHz)\"))\r\n self.label_311.setText(_translate(\"MainWindow\", \"Polarization\"))\r\n self.pol_sat_mp_ant_size.setItemText(0, _translate(\"MainWindow\", \"horizontal\"))\r\n self.pol_sat_mp_ant_size.setItemText(1, _translate(\"MainWindow\", \"vertical\"))\r\n self.pol_sat_mp_ant_size.setItemText(2, _translate(\"MainWindow\", \"circular\"))\r\n self.clear_sat_mp_ant_size.setText(_translate(\"MainWindow\", \"Clear\"))\r\n self.groupBox_40.setTitle(_translate(\"MainWindow\", \"Reception Characteristics\"))\r\n self.label_312.setText(_translate(\"MainWindow\", \"Name:\"))\r\n self.label_314.setText(_translate(\"MainWindow\", \"LNB gain (dB)\"))\r\n self.save_reception_mp_ant_size.setText(_translate(\"MainWindow\", \"Save\"))\r\n self.load_reception_mp_ant_size.setText(_translate(\"MainWindow\", \"Load\"))\r\n self.label_315.setText(_translate(\"MainWindow\", \"Antenna efficiency\"))\r\n self.label_316.setText(_translate(\"MainWindow\", \"LNB noise temp. (K)\"))\r\n self.label_317.setText(_translate(\"MainWindow\", \"Additional losses (dB)\"))\r\n self.label_318.setText(_translate(\"MainWindow\", \"Maximum \\n\"\r\n\"depointing (degrees)\"))\r\n self.clear_reception_mp_ant_size.setText(_translate(\"MainWindow\", \"Clear\"))\r\n self.label_319.setText(_translate(\"MainWindow\", \"Cable loss (dB)\"))\r\n self.relaxation_mp_ant_size.setStatusTip(_translate(\"MainWindow\", \"Goal (+ -) margin to achieve the required SNR\"))\r\n self.relaxation_mp_ant_size.setText(_translate(\"MainWindow\", \"0.1\"))\r\n self.calc_mp_ant_size.setText(_translate(\"MainWindow\", \"Calculate\"))\r\n self.label_320.setText(_translate(\"MainWindow\", \"SNR target relaxation (dB)\"))\r\n self.label_321.setText(_translate(\"MainWindow\", \"Threads\"))\r\n self.n_threads_mp_ant_size.setStatusTip(_translate(\"MainWindow\", \"Thread number used in calcullations\"))\r\n self.label_322.setText(_translate(\"MainWindow\", \"Margin (dB)\"))\r\n self.margin_mp_ant_size.setStatusTip(_translate(\"MainWindow\", \"Effective link budget margin\"))\r\n self.margin_mp_ant_size.setText(_translate(\"MainWindow\", \"0\"))\r\n self.label_324.setText(_translate(\"MainWindow\", \"Availability target\"))\r\n self.availability_target_mp_ant_size.setText(_translate(\"MainWindow\", \"99.999\"))\r\n self.groupBox_7.setTitle(_translate(\"MainWindow\", \"Reception\"))\r\n self.label_51.setText(_translate(\"MainWindow\", \"Latitude (degrees)\"))\r\n self.label_50.setText(_translate(\"MainWindow\", \"Longitude (degrees)\"))\r\n self.label_53.setText(_translate(\"MainWindow\", \"Antenna size (m)\"))\r\n self.label_68.setText(_translate(\"MainWindow\", \"Antenna efficiency\"))\r\n self.clear_reception_spatm.setText(_translate(\"MainWindow\", \"Clear\"))\r\n self.load_ground_station_spatm.setText(_translate(\"MainWindow\", \"Load Ground Station\"))\r\n self.load_reception_spatm.setText(_translate(\"MainWindow\", \"Load Reception\"))\r\n self.calc_spatm.setText(_translate(\"MainWindow\", \"Calculate\"))\r\n self.groupBox_8.setTitle(_translate(\"MainWindow\", \"Satellite\"))\r\n self.label_52.setText(_translate(\"MainWindow\", \"Frequency (GHz)\"))\r\n self.label_57.setText(_translate(\"MainWindow\", \"Longitude (degrees)\"))\r\n # self.default_sat_spatm.setItemText(0, _translate(\"MainWindow\", \"StarOne C2/C4\"))\r\n # self.default_sat_spatm.setItemText(1, _translate(\"MainWindow\", \"Hispamar\"))\r\n # self.default_sat_spatm.setItemText(2, _translate(\"MainWindow\", \"Claro\"))\r\n # self.default_sat_spatm.setItemText(3, _translate(\"MainWindow\", \"Menino Ney\"))\r\n self.label_63.setText(_translate(\"MainWindow\", \"Default Satellites\"))\r\n self.pol_sat_spatm.setItemText(0, _translate(\"MainWindow\", \"horizontal\"))\r\n self.pol_sat_spatm.setItemText(1, _translate(\"MainWindow\", \"vertical\"))\r\n self.pol_sat_spatm.setItemText(2, _translate(\"MainWindow\", \"circular\"))\r\n self.label_8.setText(_translate(\"MainWindow\", \"Polarization\"))\r\n self.load_sat_spatm.setText(_translate(\"MainWindow\", \"Load Satellite\"))\r\n self.p_year_spatm.setStatusTip(_translate(\"MainWindow\", \"Values between 0.001 and 0.5\"))\r\n self.p_year_spatm.setText(_translate(\"MainWindow\", \"0.001\"))\r\n self.label_58.setText(_translate(\"MainWindow\", \"Excess % of time per year\"))\r\n self.label_9.setText(_translate(\"MainWindow\", \"Method\"))\r\n self.method_spatm.setStatusTip(_translate(\"MainWindow\", \"Mode calculation for gaseous attenuation\"))\r\n self.method_spatm.setItemText(0, _translate(\"MainWindow\", \"approx\"))\r\n self.method_spatm.setItemText(1, _translate(\"MainWindow\", \"exact\"))\r\n self.groupBox_4.setTitle(_translate(\"MainWindow\", \"Ground Station\"))\r\n self.label_27.setText(_translate(\"MainWindow\", \"Name\"))\r\n self.label_28.setText(_translate(\"MainWindow\", \"Latitude (degrees)\"))\r\n self.label_29.setText(_translate(\"MainWindow\", \"Longitude (degrees)\"))\r\n self.save_ground_station_sp_perf.setText(_translate(\"MainWindow\", \"Save\"))\r\n self.load_ground_station_sp_perf.setText(_translate(\"MainWindow\", \"Load\"))\r\n self.clear_ground_station_sp_perf.setText(_translate(\"MainWindow\", \"Clear\"))\r\n self.groupBox_5.setTitle(_translate(\"MainWindow\", \"Satellite\"))\r\n self.label_30.setText(_translate(\"MainWindow\", \"Name\"))\r\n self.label_32.setText(_translate(\"MainWindow\", \"Longitude (degrees)\"))\r\n self.save_sat_sp_perf.setText(_translate(\"MainWindow\", \"Save\"))\r\n self.load_sat_sp_perf.setText(_translate(\"MainWindow\", \"Load\"))\r\n # self.default_sat_sp_perf.setItemText(0, _translate(\"MainWindow\", \"StarOne C2/C4\"))\r\n # self.default_sat_sp_perf.setItemText(1, _translate(\"MainWindow\", \"Hispamar\"))\r\n # self.default_sat_sp_perf.setItemText(2, _translate(\"MainWindow\", \"Claro\"))\r\n # self.default_sat_sp_perf.setItemText(3, _translate(\"MainWindow\", \"Menino Ney\"))\r\n self.label_33.setText(_translate(\"MainWindow\", \"Default Satellites\"))\r\n self.label_34.setText(_translate(\"MainWindow\", \"Altitude (km)\"))\r\n self.label_35.setText(_translate(\"MainWindow\", \"E.I.R.P. (dBW)\"))\r\n self.label_36.setText(_translate(\"MainWindow\", \"Transpoder\\'s max.\\n\"\r\n\"bandwidith (MHz)\"))\r\n self.label_37.setText(_translate(\"MainWindow\", \"Effective \\n\"\r\n\"bandwidith (MHz)\"))\r\n self.label_40.setText(_translate(\"MainWindow\", \"Roll-off\"))\r\n self.label_41.setText(_translate(\"MainWindow\", \"Modulation\"))\r\n self.label_42.setText(_translate(\"MainWindow\", \"Frequency (GHz)\"))\r\n self.label_6.setText(_translate(\"MainWindow\", \"Polarization\"))\r\n self.pol_sat_sp_perf.setItemText(0, _translate(\"MainWindow\", \"horizontal\"))\r\n self.pol_sat_sp_perf.setItemText(1, _translate(\"MainWindow\", \"vertical\"))\r\n self.pol_sat_sp_perf.setItemText(2, _translate(\"MainWindow\", \"circular\"))\r\n self.clear_sat_sp_perf.setText(_translate(\"MainWindow\", \"Clear\"))\r\n self.groupBox_6.setTitle(_translate(\"MainWindow\", \"Reception Characteristics\"))\r\n self.label_43.setText(_translate(\"MainWindow\", \"Name:\"))\r\n self.label_44.setText(_translate(\"MainWindow\", \"Antenna size (m)\"))\r\n self.label_45.setText(_translate(\"MainWindow\", \"LNB gain (dB)\"))\r\n self.save_reception_sp_perf.setText(_translate(\"MainWindow\", \"Save\"))\r\n self.load_reception_sp_perf.setText(_translate(\"MainWindow\", \"Load\"))\r\n self.label_46.setText(_translate(\"MainWindow\", \"Antenna efficiency\"))\r\n self.label_47.setText(_translate(\"MainWindow\", \"LNB noise temp. (K)\"))\r\n self.label_48.setText(_translate(\"MainWindow\", \"Additional losses (dB)\"))\r\n self.label_49.setText(_translate(\"MainWindow\", \"Maximum \\n\"\r\n\"depointing (degrees)\"))\r\n self.clear_reception_sp_perf.setText(_translate(\"MainWindow\", \"Clear\"))\r\n self.label_67.setText(_translate(\"MainWindow\", \"Cable loss (dB)\"))\r\n self.calc_sp_perf.setText(_translate(\"MainWindow\", \"Calculate\"))\r\n self.label_4.setText(_translate(\"MainWindow\", \"SNR target relaxation (dB)\"))\r\n self.relaxation_sp_perf.setStatusTip(_translate(\"MainWindow\", \"Goal (+ -) margin to achieve the required SNR\"))\r\n self.relaxation_sp_perf.setText(_translate(\"MainWindow\", \"0.1\"))\r\n self.margin_sp_perf.setStatusTip(_translate(\"MainWindow\", \"Goal (+ -) margin to achieve the required SNR\"))\r\n self.margin_sp_perf.setText(_translate(\"MainWindow\", \"0\"))\r\n self.label_173.setText(_translate(\"MainWindow\", \"Margin (dB)\"))\r\n self.browse_path_mp_perf.setStatusTip(_translate(\"MainWindow\", \"Browse list\"))\r\n self.browse_path_mp_perf.setText(_translate(\"MainWindow\", \"Browse\"))\r\n self.label_10.setText(_translate(\"MainWindow\", \"Path\"))\r\n self.label_11.setText(_translate(\"MainWindow\", \"List Preview\"))\r\n self.groupBox_10.setTitle(_translate(\"MainWindow\", \"Satellite\"))\r\n self.label_71.setText(_translate(\"MainWindow\", \"Name\"))\r\n self.label_72.setText(_translate(\"MainWindow\", \"Longitude (degrees)\"))\r\n self.save_sat_mp_perf.setText(_translate(\"MainWindow\", \"Save\"))\r\n self.load_sat_mp_perf.setText(_translate(\"MainWindow\", \"Load\"))\r\n # self.default_sat_mp_perf.setItemText(0, _translate(\"MainWindow\", \"StarOne C2/C4\"))\r\n # self.default_sat_mp_perf.setItemText(1, _translate(\"MainWindow\", \"Hispamar\"))\r\n # self.default_sat_mp_perf.setItemText(2, _translate(\"MainWindow\", \"Claro\"))\r\n # self.default_sat_mp_perf.setItemText(3, _translate(\"MainWindow\", \"Menino Ney\"))\r\n self.label_73.setText(_translate(\"MainWindow\", \"Default Satellites\"))\r\n self.label_74.setText(_translate(\"MainWindow\", \"Altitude (km)\"))\r\n self.label_75.setText(_translate(\"MainWindow\", \"E.I.R.P. (dBW)\"))\r\n self.label_76.setText(_translate(\"MainWindow\", \"Transpoder\\'s max.\\n\"\r\n\"bandwidith (MHz)\"))\r\n self.label_77.setText(_translate(\"MainWindow\", \"Effective \\n\"\r\n\"bandwidith (MHz)\"))\r\n self.label_80.setText(_translate(\"MainWindow\", \"Roll-off\"))\r\n self.label_81.setText(_translate(\"MainWindow\", \"Modulation\"))\r\n self.label_82.setText(_translate(\"MainWindow\", \"Frequency (GHz)\"))\r\n self.label_12.setText(_translate(\"MainWindow\", \"Polarization\"))\r\n self.pol_sat_mp_perf.setItemText(0, _translate(\"MainWindow\", \"horizontal\"))\r\n self.pol_sat_mp_perf.setItemText(1, _translate(\"MainWindow\", \"vertical\"))\r\n self.pol_sat_mp_perf.setItemText(2, _translate(\"MainWindow\", \"circular\"))\r\n self.clear_satellite_mp_perf.setText(_translate(\"MainWindow\", \"Clear\"))\r\n self.groupBox_11.setTitle(_translate(\"MainWindow\", \"Reception Characteristics\"))\r\n self.label_83.setText(_translate(\"MainWindow\", \"Name:\"))\r\n self.label_84.setText(_translate(\"MainWindow\", \"Antenna size (m)\"))\r\n self.label_85.setText(_translate(\"MainWindow\", \"LNB gain (dB)\"))\r\n self.save_reception_mp_perf.setText(_translate(\"MainWindow\", \"Save\"))\r\n self.load_reception_mp_perf.setText(_translate(\"MainWindow\", \"Load\"))\r\n self.label_86.setText(_translate(\"MainWindow\", \"Antenna efficiency\"))\r\n self.label_87.setText(_translate(\"MainWindow\", \"LNB noise temp. (K)\"))\r\n self.label_88.setText(_translate(\"MainWindow\", \"Additional losses (dB)\"))\r\n self.label_89.setText(_translate(\"MainWindow\", \"Maximum \\n\"\r\n\"depointing (degrees)\"))\r\n self.clear_reception_mp_perf.setText(_translate(\"MainWindow\", \"Clear\"))\r\n self.label_90.setText(_translate(\"MainWindow\", \"Cable loss (dB)\"))\r\n self.relaxation_mp_perf.setStatusTip(_translate(\"MainWindow\", \"Goal (+ -) margin to achieve the required SNR\"))\r\n self.relaxation_mp_perf.setText(_translate(\"MainWindow\", \"0.1\"))\r\n self.calc_mp_perf.setText(_translate(\"MainWindow\", \"Calculate\"))\r\n self.label_14.setText(_translate(\"MainWindow\", \"SNR target relaxation (dB)\"))\r\n self.label_15.setText(_translate(\"MainWindow\", \"Threads\"))\r\n self.n_threads_mp_perf.setStatusTip(_translate(\"MainWindow\", \"Thread number used in calcullations\"))\r\n self.label_16.setText(_translate(\"MainWindow\", \"Margin (dB)\"))\r\n self.margin_mp_perf.setStatusTip(_translate(\"MainWindow\", \"Effective link budget margin\"))\r\n self.margin_mp_perf.setText(_translate(\"MainWindow\", \"0\"))\r\n self.menuNew.setTitle(_translate(\"MainWindow\", \"File\"))\r\n self.menuCalculation.setTitle(_translate(\"MainWindow\", \"Single Point Calculation\"))\r\n self.menuList_Calculation.setTitle(_translate(\"MainWindow\", \"List Calculation\"))\r\n self.menuHelp.setTitle(_translate(\"MainWindow\", \"Help\"))\r\n self.action_New_Satellite.setText(_translate(\"MainWindow\", \"Satellite\"))\r\n self.action_New_Satellite.setStatusTip(_translate(\"MainWindow\", \"Define and save a new satellite to use in calculations\"))\r\n self.actionReception.setText(_translate(\"MainWindow\", \"Reception\"))\r\n self.actionReception.setStatusTip(_translate(\"MainWindow\", \"Define and save new reception characteristics to use in calculations\"))\r\n self.action_New_Ground_Station.setText(_translate(\"MainWindow\", \"Ground Station\"))\r\n self.action_New_Ground_Station.setStatusTip(_translate(\"MainWindow\", \"Define and save a new ground station to use in calculations\"))\r\n self.action_Single_Atmospheric_Atenuation.setText(_translate(\"MainWindow\", \"Atmospheric Atenuation\"))\r\n self.action_Single_Atmospheric_Atenuation.setStatusTip(_translate(\"MainWindow\", \"Complete single point atmospheric attenuation calculation\"))\r\n self.action_Single_Downlink_Performance.setText(_translate(\"MainWindow\", \"Downlink Performance\"))\r\n self.action_Single_Downlink_Performance.setStatusTip(_translate(\"MainWindow\", \"Complete single point downlink performance calculation\"))\r\n self.action_List_Downlink_Performance.setText(_translate(\"MainWindow\", \"Downlink Performance\"))\r\n self.action_List_Downlink_Performance.setStatusTip(_translate(\"MainWindow\", \"Complete multi point (list) downlink performance calculation\"))\r\n self.action_Single_Antenna_Size.setText(_translate(\"MainWindow\", \"Antenna Size\"))\r\n self.action_Single_Antenna_Size.setStatusTip(_translate(\"MainWindow\", \"Single point downlink antenna diameter extimation\"))\r\n self.action_List_Antenna_Size.setText(_translate(\"MainWindow\", \"Antenna Size\"))\r\n self.action_List_Antenna_Size.setStatusTip(_translate(\"MainWindow\", \"Multi point (list) downlink antenna diameter extimation\"))\r\n\r\n\r\nif __name__ == \"__main__\":\r\n import sys, os, glob\r\n if not os.path.exists('results'):\r\n os.makedirs('results')\r\n if not os.path.exists('temp'):\r\n os.makedirs('temp')\r\n files = glob.glob('temp/*')\r\n for f in files:\r\n os.remove(f)\r\n os.environ[\"QT_AUTO_SCREEN_SCALE_FACTOR\"] = '1'\r\n QtWidgets.QApplication.setAttribute(QtCore.Qt.AA_EnableHighDpiScaling, True) # enable high dpi scaling\r\n QtWidgets.QApplication.setAttribute(QtCore.Qt.AA_UseHighDpiPixmaps, True) # use high dpi icons\r\n app = QtWidgets.QApplication(sys.argv)\r\n MainWindow = QtWidgets.QMainWindow()\r\n ui = Ui_MainWindow()\r\n ui.setupUi(MainWindow)\r\n thread = QtCore.QThread(MainWindow.show())\r\n thread.start()\r\n sys.exit(app.exec_())\r\n" } ]
23
Mik-el/Data-Mining-in-Python-Examples
https://github.com/Mik-el/Data-Mining-in-Python-Examples
3475f78c258c6f59caf924adf4155a5ca71f4d33
0e2de252107d79e1e44d9b0980d2e2c375869ed5
521ca19aa255fb1c7fceba08043ed54866c24fe2
refs/heads/master
2022-12-02T19:26:39.944543
2020-08-04T15:33:06
2020-08-04T15:33:06
243,300,935
1
0
null
null
null
null
null
[ { "alpha_fraction": 0.6820825934410095, "alphanum_fraction": 0.6969168186187744, "avg_line_length": 40.44444274902344, "blob_id": "9366a53ac4861f0aeb332fcdbcb471b5008bc03b", "content_id": "ed2dd468c83c1fe9387400af510aa70ef881e518", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 3440, "license_type": "no_license", "max_line_length": 136, "num_lines": 81, "path": "/siadm/Sia2.py", "repo_name": "Mik-el/Data-Mining-in-Python-Examples", "src_encoding": "UTF-8", "text": "\r\nimport pandas as pd\r\nimport matplotlib.pyplot as plt\r\nimport csv\r\n\r\npd.options.mode.chained_assignment = None #\r\n\r\nimport time #time e datetime per operazioni sulle date\r\nimport datetime\r\nimport numpy as np\r\n\r\n\r\ndataFrame = pd.read_excel(\"SIA_DM.xls\")\r\npd.set_option('display.expand_frame_repr', False)\r\n\r\n# RICHIESTA 2) verificare se le date di nascita contengono valori improbabili (possibili\r\n#valori sconosciuti!!)\r\nprint('\\n **********\\n Verificare se le date di nascita contengono valori improbabili \\n **********\\n ')\r\n\r\n# Come è possibile notare tra le informazioni del data frame, non sono presenti valori nulli.\r\n\r\n\r\n\r\n#****DATE NON VALIDE\r\n# Dunque si procede alla verifica di valori errati.\r\n# In caso di errore nel campo data, questo verrà rimpiazzato da una data media\r\nerr_count = 0\r\nidx = 0\r\nerr_idx_list = [] # lista utile a mantene in memoria gli indici corrispondenti ai valori errati\r\ndate_tmp_list = [] # lista utile a memorizzare le date per l'eventuale calcolo della media delle date\r\nfor tmp in dataFrame['Birthdate']:\r\n try:\r\n date_tmp_list.append(pd.to_datetime(tmp))\r\n idx = idx + 1\r\n except:\r\n print('Dato errato nell\\'istanza numero', idx)\r\n err_idx_list.append(idx)\r\n err_count = err_count + 1\r\n idx = idx + 1\r\n\r\n# Calcolo la data media per eventuali rimpiazzi\r\nmeanDate = (np.array(date_tmp_list, dtype='datetime64[s]').view('i8').mean().astype('datetime64[s]'))\r\n\r\n# In caso di date errate si procede al rimpiazzo delle stesse con la data media\r\nif err_count != 0:\r\n print('Esistono', err_count, 'date non corrette che verranno sostituite con la data media')\r\n for i in err_idx_list:\r\n # Stampa le istanze con le date non corrette\r\n display(dataFrame[dataFrame['Customer'] == i+1])\r\n dataFrame.loc[i,'Birthdate'] = meanDate\r\nelse:\r\n print('Tutte le date sono sintatticamente corrette')\r\n \r\n\r\n# Una volta azzerati i dati errati, si procede con la conversione in \"datetime64\" per poter effettuare\r\n# altre operazioni sulle date utili ai fini di altre verifiche\r\ndataFrame['Birthdate'] = pd.to_datetime(dataFrame['Birthdate'])\r\ndisplay(dataFrame.head())\r\n\r\n\r\n\r\n\r\n\r\n#****DATE IMPROBABILI\r\n# ***clienti che abbiano la data di nascita a partire dal 1900.\r\n# da 01-01-1900 ad oggi \r\n# Si crea una vista del data frame a tele scopo (df)\r\nprint('\\n **********\\n Si suppone di voler analizzare dati di clienti che abbiano la data di nascita a partire dal 1900.')\r\nprint('Dunque si assumono \"probabili\" solo date che ricadono nell\\'intervallo temporale che va dal 01-01-1900 ad oggi. \\n **********\\n')\r\ndf = dataFrame[(dataFrame['Birthdate'] > time.strftime(\"%d/%m/%Y\")) | (dataFrame['Birthdate'] < datetime.datetime(1900, 1, 1))]\r\ntotal_err_date = df['Birthdate'].size\r\n\r\n# Visualizza le istanze che non ricadono nell'intervallo di interesse\r\nprint('Le istanze che non ricadono nel range ricercato sono:', total_err_date)\r\ndisplay(df)\r\n# Rimpiazzo le date improbabili con una data media.\r\n# Essendo 'df' una vista sul data frame principale (dataFrame), i rimpiazzi effettuati hanno effetto su 'dataFrame'.\r\ndf.loc[((df['Birthdate'] > time.strftime(\"%d/%m/%Y\")) | (df['Birthdate'] < datetime.datetime(1900, 1, 1))), 'Birthdate'] = meanDate\r\nprint('Le istanze con i valori improbabili sono stati rimpiazzati con una data media')\r\ndisplay(df)\r\n\r\nprint('Si nota ovviamente che il campo \"\"age\" non combacia con la data di nascita')" }, { "alpha_fraction": 0.7027027010917664, "alphanum_fraction": 0.7227227091789246, "avg_line_length": 35.074073791503906, "blob_id": "9fe4827cbf42d0183fe12df26d30b4757ca40c59", "content_id": "4406d307c41e69e0e3b4c3dcb13b549539c5f90e", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 999, "license_type": "no_license", "max_line_length": 108, "num_lines": 27, "path": "/Posti Letto/esPostiLetto4.py", "repo_name": "Mik-el/Data-Mining-in-Python-Examples", "src_encoding": "UTF-8", "text": "# NUMERO DI LETTI PER REGIONE (IN UN ANNO)\r\n#Raggruppo il conteggio dei letti per regione con il metodo groupBy, poi li sommo con sum.\r\n\r\nimport pandas as pd\r\nimport matplotlib.pyplot as plt\r\nimport csv\r\n\r\ndati = pd.read_csv('posti_letto.csv', sep=';')\r\ndati = dati.convert_objects(convert_numeric=True)\r\n\r\n#dataset anno contenente solo voci relative all' anno 2014\r\nanno2013 = dati[dati['Anno'] == 2013]\r\n\r\n#del dataframe ci interessa solo la regione e il numero di posti letto \r\nlettiPerRegione = anno2013[ ['Descrizione Regione', 'Totale posti letto'] ].groupby('Descrizione Regione')\r\n\r\n\r\n#dataframe ottenuto ordinando e sommando tutti i valori i postiletti relativi al dataset \r\nrisultato = lettiPerRegione.sum().sort_values('Totale posti letto')\r\n\r\n\r\nprint (\"\\n **********\\n Posti letto per regione nell' anno 2013: \\n *********\\n\")\r\n#invocando .plot.barh() sul dataset otteniamo un diagramma a barre orizziontali\r\nrisultato.plot.barh()\r\n\r\n#invocando .show() su plt lo stampiamo\r\n#plt.show()" }, { "alpha_fraction": 0.6897952556610107, "alphanum_fraction": 0.6981598138809204, "avg_line_length": 39.15737533569336, "blob_id": "11b9a1db6a442ca17f12eb388155789661ec7d59", "content_id": "059715ab3f6dd5bc69bc84aeb39dd6a883ae53a8", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 12577, "license_type": "no_license", "max_line_length": 136, "num_lines": 305, "path": "/Nursery/esNursery.py", "repo_name": "Mik-el/Data-Mining-in-Python-Examples", "src_encoding": "UTF-8", "text": "\r\nimport pandas as pd\r\nimport numpy as np\r\nimport seaborn as sns\r\nimport matplotlib.pyplot as plt\r\nimport time\r\n\r\nfrom sklearn.decomposition import PCA\r\nfrom sklearn.preprocessing import StandardScaler, LabelEncoder\r\n\r\nfrom sklearn.linear_model import LogisticRegression\r\nfrom sklearn.svm import SVC\r\nfrom sklearn.neighbors import KNeighborsClassifier\r\nfrom sklearn import tree\r\nfrom sklearn.neural_network import MLPClassifier\r\nfrom sklearn.ensemble import GradientBoostingClassifier\r\nfrom sklearn.tree import DecisionTreeClassifier\r\nfrom sklearn.gaussian_process.kernels import RBF\r\nfrom sklearn.ensemble import RandomForestClassifier\r\nfrom sklearn.naive_bayes import GaussianNB\r\n\r\n\r\ndata = pd.read_csv(\"nursery.csv\", sep=';')\r\npd.set_option('display.expand_frame_repr', False)\r\n\r\nprint (\"\\n **********\\n Stampa delle prime 5 righe del dataset: \\n *********\\n\")\r\ndisplay(data.head()) \r\nprint (\"\\n **********\\n Stampa delle statistiche descrittive: \\n *********\\n\")\r\ndisplay(data.describe())\r\nprint (\"\\n **********\\n Numero totale di dati e di attributi: \\n *********\\n\")\r\nprint(data.shape)\r\n\r\n\r\n#PREPROCESSING\r\n#data= data.convert_objects(convert_numeric=True)\r\n\r\n# non ci sono dati mancanti. Parò ci sono molti dati \r\n# categoriali che vanno trasformati in numerici mediante la discretizzazione.\r\ndef label_encode(df, columns):\r\n for col in columns:\r\n le = LabelEncoder()\r\n col_values_unique = list(df[col].unique())\r\n le_fitted = le.fit(col_values_unique)\r\n \r\n col_values = list(df[col].values)\r\n le.classes_\r\n col_values_transformed = le.transform(col_values)\r\n df[col] = col_values_transformed\r\n \r\ndata = data.copy(deep=True)\r\nto_be_encoded_cols = data.columns.values\r\nlabel_encode(data, to_be_encoded_cols)\r\n\r\nprint (\"\\n **********\\n Stampa delle prime 5 righe del dataset dopo il pre-processing: \\n *********\\n\")\r\ndisplay(data.head()) \r\nprint (\"\\n **********\\n Stampa delle statistiche descrittive dopo il pre-processing: \\n *********\\n\")\r\ndisplay(data.describe())\r\n\r\n# Creo un nuovo file csv dove ci copio il dataset processato\r\nf = open(\"nursery2.csv\", \"a\")\r\ndata.to_csv('nursery2.csv', index = False ) #false= non scrive i nomi delle righe\r\nf.close()\r\n\r\n\r\n\r\n#ANALISI GRAFICA\r\n\"\"\"print (\"\\n **********\\n Grafico dopo il preprocessing: \\n *********\\n\")\r\ndata.plot( kind='barh', x='children', y='has_nurs' )\r\nplt.show()\r\n\"\"\"\r\n\r\n\"\"\"\r\nprint (\"\\n **********\\n Stampa istogramma con 30 colonne \\n *********\\n\")\r\nistogramma = data.hist(bins=30)\r\n\r\nistogramma.set_title('Relazione tra numero di bambini e presenza infermiera')\r\nistogramma.set_xlabel('Numero letti')\r\nistogramma.set_ylabel('Numero ospedali')\r\n\r\nplt.show()\r\n\"\"\"\r\n\r\n\r\n\r\n# ANALISI DEI DATI\r\n# Per ottenere maggiori informazioni su come ogni caratteristica è correlata con la Variabile Target, \r\n# possiamo calcolare e tracciare la matrice di correlazione delle features per il dataset.\r\n# La matrice di correlazione mostra le variabili fortemente correlate con la variabile target, che nel nostro caso\r\n# è la variabile 'class', è 'healt' (condizione di salute).\r\n# uso il metodo .corr\r\ncorrelation_matrix = data.corr()\r\nplt.figure(figsize=(50,20))\r\nax = sns.heatmap(correlation_matrix, vmax=1, square=True, \r\n annot=True, fmt='.2f', cmap='GnBu', cbar_kws={\"shrink\":.5}, robust=True)\r\nplt.title('Matrice di correlazione delle features', fontsize=20)\r\nplt.show()\r\n \r\n\r\n# Una matrice di correlazione è un buon modo per ottenere un quadro generale di come tutte le funzionalità nel set di \r\n# dati siano correlate tra loro. Per un set di dati con molte funzionalità potrebbe diventare molto grande e la \r\n# correlazione di una singola funzione con le altre caratteristiche diventa difficile da discernere.\r\n#Se vogliamo esaminare le correlazioni di una singola funzione, di solito è un'idea migliore visualizzarla sotto forma di grafico \r\n# a barre.\r\n\r\ndef display_corr_with_col(df, col):\r\n correlation_matrix = data.corr()\r\n correlation_type = correlation_matrix[col].copy()\r\n abs_correlation_type = correlation_type.apply(lambda x: abs(x))\r\n desc_corr_values = abs_correlation_type.sort_values(ascending=False)\r\n y_values = list(desc_corr_values.values)[1:]\r\n x_values = range(0, len(y_values))\r\n xlabels = list(desc_corr_values.keys())[1:]\r\n fig, ax = plt.subplots(figsize=(8,8))\r\n ax.bar(x_values, y_values)\r\n ax.set_title('Correlazione delle features con {}' .format(col), fontsize=20)\r\n ax.set_ylabel('Coefficiente di correlazione di Pearson', fontsize=16)\r\n plt.xticks(x_values, xlabels, rotation='vertical')\r\n plt.show()\r\n \r\ndisplay_corr_with_col(data, 'class')\r\n\r\n# Con il grafico a barre ritrovo gli stessi risultati della matrice di correlazione.\r\n\r\n\r\n\r\n\r\n# CLASSIFICAZIONE E VALIDAZIONE\r\n# Costruzione modello di apprendimento automatico: ora verranno formati diversi modelli di Machine Learning e verranno \r\n# confrontati i loro risultati.\r\n\r\n#Divisione tra Train test e Set Test\r\ndef get_train_test(df, y_col, x_cols, ratio):\r\n \"\"\"\r\n Questo metodo trasforma il dataframe in un TRAIN set e in un TEST set, per questo bisogna specificare:\r\n 1. il ratio train: test(in genere 0.7)\r\n 2. la colonna con le Y_values\r\n \"\"\"\r\n mask = np.random.rand(len(df)) < ratio\r\n df_train = df[mask]\r\n df_test = df[~mask]\r\n \r\n Y_train = df_train[y_col].values\r\n Y_test = df_test[y_col].values\r\n X_train = df_train[x_cols].values\r\n X_test = df_test[x_cols].values\r\n return df_train, df_test, X_train, Y_train, X_test, Y_test\r\n\r\ny_col_glass = 'class'\r\nx_cols_glass = list(data.columns.values)\r\nx_cols_glass.remove(y_col_glass)\r\n\r\ntrain_test_ratio = 0.7\r\ndf_train, df_test, X_train, Y_train, X_test, Y_test = get_train_test(data, y_col_glass, x_cols_glass, train_test_ratio)\r\n\r\n\r\n# Affinchè io possa testare + classificatori, creo un dizionario \r\n# esso è fatto in modo che CHIAVI= nome dei classificatori\r\n# VALORI = istanze dei classificatori.\r\ndict_classifier = {\r\n \"Logistic Regression\": LogisticRegression(),\r\n \"Nearest Neighbors\": KNeighborsClassifier(3),\r\n \"Linear SVM\": SVC(kernel=\"linear\", C=0.025),\r\n \"Gradient Boosting Classifier\": GradientBoostingClassifier(n_estimators=10),\r\n \"Decision Tree\": tree.DecisionTreeClassifier(max_depth=5),\r\n \"Random Forest\": RandomForestClassifier(max_depth=5, n_estimators=10, max_features=1),\r\n \"Naive Bayes\": GaussianNB() \r\n}\r\n\r\n\r\ndef batch_classify(X_train, Y_train, X_test, Y_test, no_classifiers=5, verbose=True):\r\n \r\n dict_models = {} #Scorro il dizionario dei classificatori\r\n for classifier_name, classifier in list(dict_classifier.items())[:no_classifiers]:\r\n t_start = time.clock() #Modulo temporale per tenere traccia del tempo necessario ad addestrare il classificatore\r\n classifier.fit(X_train, Y_train) #Allena il classificatore\r\n t_end = time.clock()\r\n \r\n t_diff = t_end - t_start\r\n train_score = classifier.score(X_train, Y_train) #Scelta del classificatore sul set di allenamento\r\n test_score = classifier.score(X_test, Y_test) #Esegue il classificatore sul set di test\r\n \r\n dict_models[classifier_name] = {'model': classifier, 'train_score': train_score, 'test_score': test_score, 'train_time': t_diff}\r\n if verbose:\r\n print(\"trained {c} in {f:.2f} s\" .format(c=classifier_name, f=t_diff))\r\n return dict_models\r\n\r\n\r\n\r\n#\r\ndef display_dict_models(dict_models, sort_by='test_score'):\r\n cls = [key for key in dict_models.keys()]\r\n test_s = [dict_models[key]['test_score'] for key in cls]\r\n training_s = [dict_models[key]['train_score'] for key in cls]\r\n training_t = [dict_models[key]['train_time'] for key in cls]\r\n \r\n df_ = pd.DataFrame(data = np.zeros(shape=(len(cls),4)), columns = ['classifier','train_score','test_score','train_time'])\r\n for ii in range(0,len(cls)):\r\n df_.loc[ii,'classifier'] = cls[ii]\r\n df_.loc[ii,'train_score'] = training_s[ii]\r\n df_.loc[ii,'test_score'] = test_s[ii]\r\n df_.loc[ii,'train_time'] = training_t[ii]\r\n \r\n display(df_.sort_values(by=sort_by, ascending=False))\r\n \r\n# VALUTAZIONE CLASSIFICATORI\r\n# Dopo la definizione dei singoli classificatori è stata fatta una valutazione degli stessi al fine di individuarne \r\n# il migliore.\r\n\r\n# CLASSIFICATORE K-NEIGHBORS\r\nknn = KNeighborsClassifier(n_neighbors = 3)\r\nknn.fit(X_train, Y_train)\r\nY_pred = knn.predict(X_test)\r\nacc_knn = round(knn.score(X_train, Y_train) * 100, 2)\r\n\r\n# CLASSIFICATORE DECISION TREE\r\ndecision_tree = DecisionTreeClassifier()\r\ndecision_tree.fit(X_train, Y_train)\r\nY_pred = decision_tree.predict(X_test)\r\nacc_decision_tree = round(decision_tree.score(X_train, Y_train) * 100, 2)\r\n\r\n# CLASSIFICATORE RANDOM FOREST\r\nrandom_forest = RandomForestClassifier(n_estimators=1000)\r\nrandom_forest.fit(X_train, Y_train)\r\nY_pred = random_forest.predict(X_test)\r\nrandom_forest.score(X_train, Y_train)\r\nacc_random_forest = round(random_forest.score(X_train, Y_train) * 100, 2)\r\n\r\n# CLASSIFICATORE GAUSSIAN (NAIVE BAYES)\r\ngaussian = GaussianNB()\r\ngaussian.fit(X_train, Y_train)\r\nY_pred = gaussian.predict(X_test)\r\nacc_gaussian = round(gaussian.score(X_train, Y_train) * 100, 2)\r\n\r\nresults = pd.DataFrame({\r\n \r\n 'Model':['Nearest Neighbors','Decision Tree',\r\n 'Random Forest','Naive Bayes'],\r\n 'Score':[acc_knn, acc_decision_tree,\r\n acc_random_forest, acc_gaussian]})\r\nresult_df = results.sort_values(by='Score', ascending=False)\r\nresult_df = result_df.set_index('Score')\r\nresult_df.head(8)\r\nprint (results)\r\n# Come è possibile notare i Classificatori Random Forest e Decision Tree hanno una percentutale del 100%, quindi sono \r\n# quelli più adatti a fare una valutazione\r\n#??? NON VEDO NESSUN OUTPUT\r\n\r\n\r\n\r\n#CROSS VALIDATION per Random Forest.\r\n#Per valutare il classificatore è stata fatta un’ulteriore verifica \r\n#applicando una convalida incrociata.\r\n#Il Cross Validation divide casualmente il train set in k sottoinsiemi chiamati folds.\r\n#Se dividessi i dati in 10 folds(K = 10),il modello del classificatore verrebbe \r\n# addestrato e valutato 10 volte, usando ogni volta 9 fold da train test e 1 fold da test set \r\n#Pertanto ottengo un array con 10 punteggi diversi.\r\nprint (\"\\n **********\\n Risultati cross validation \\n *********\\n\")\r\nfrom sklearn.model_selection import cross_val_score\r\nrf = RandomForestClassifier(n_estimators=100)\r\nscores = cross_val_score(rf, X_train, Y_train, cv=10, scoring = \"accuracy\")\r\nprint(\"Punteggio: \", scores)\r\nprint(\"Media: \", scores.mean())\r\nprint(\"Deviazione standard: \", scores.std())\r\n\r\n# Come si può osservare dall'output il modello usato (Random Forest) ha una precisione media del 94% con una deviazione \r\n# standard del 0.06%,che ci mostra, quanto precise sono le stime. Ciò significa che la precisione del nostro modello \r\n# può variare di +/- 0.06%. Dunque, a seguito di questa verifica, la precisione continua ad essere ancora buona per cui \r\n# nelle fasi successive si proverà a migliorare ulteriormente le prestazioni del Random Forest.\r\n\r\n\r\n# Matrice di Confusione per Random Forest\r\nfrom sklearn.model_selection import cross_val_predict\r\nfrom sklearn.metrics import confusion_matrix\r\npredictions = cross_val_predict(random_forest, X_train, Y_train, cv=3)\r\nconfusion_matrix(Y_train, predictions)\r\n\r\n# PRECISIONE E RECALL (per il random forest)\r\nfrom sklearn.metrics import precision_score, recall_score\r\nprint(\"Precision: \", precision_score(Y_train, predictions))\r\nprint(\"Recall: \", recall_score(Y_train, predictions))\r\n\r\n\r\n\"\"\"\r\n# Ora invece CROSS VALIDATION per Decision Tree\r\nfrom sklearn.model_selection import cross_val_score\r\nrf = DecisionTreeClassifier()\r\nscores = cross_val_score(rf, X_train, Y_train, cv=10, scoring = \"accuracy\")\r\nprint(\"Scores: \", scores)\r\nprint(\"Mean: \", scores.mean())\r\nprint(\"Standard deviation: \", scores.std())\r\n\r\n# Abbiamo all'incirca gli stessi risultati del Random Forest per quanto riguarda la deviazione standard (0.04%), mentre\r\n# la precisione media è del 96%, quindi molto più accurato del Random Forest.\r\n\r\n\r\n# Matrice di Confusione per Decision Tree\r\nfrom sklearn.model_selection import cross_val_predict\r\nfrom sklearn.metrics import confusion_matrix\r\npredictions = cross_val_predict(decision_tree, X_train, Y_train, cv=3)\r\nconfusion_matrix(Y_train, predictions)\r\n\r\n# PRECISION E RECALL\r\nfrom sklearn.metrics import precision_score, recall_score\r\nprint(\"Precision: \", precision_score(Y_train, predictions))\r\nprint(\"Recall: \", recall_score(Y_train, predictions))\r\n\"\"\"" }, { "alpha_fraction": 0.6689956188201904, "alphanum_fraction": 0.6742358207702637, "avg_line_length": 34.78125, "blob_id": "3aadf63ced96c3106a6a256622cc507a067cafd9", "content_id": "3d367159a827220d2011a33fea4c2238c090918b", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Markdown", "length_bytes": 1145, "license_type": "no_license", "max_line_length": 177, "num_lines": 32, "path": "/README.md", "repo_name": "Mik-el/Data-Mining-in-Python-Examples", "src_encoding": "UTF-8", "text": "# Data-Mining-in-Python-Examples\n## Disclaimer\n1) The codes I'm sharing contain comments and variable names in italian. (but maybe you'll result to understand since the codes are quite easy). \n2) The codes could have errors. If you find them you can use Github you can fix the code. Thank you!\n\n## What is Data Mining\nIt's the the set of techniques and methodologies which have as their object the extraction of useful information from large quantities of data (databases, data warehouses, etc.)\n\n## What you can find in this Repo\nYou'll learn how to work in Python on given datasets in \"excel\" format (.csv).\nYou'll mostly use the Pandas Library.\n\n## How to Support me\n| | |\n| ------ | ------ |\n| Telegram Channel | [link][tg] |\n| XDA Forum | [link][xda] |\n| Tech Blog | [link][cam] |\n| Instagram | [link][insta] |\n| Youtube | [link][yt] |\n\n<a href=\"https://paypal.me/donationMikel\">\n <img src=\"images/donate_icon.png\"\n alt=\"closeup\"\n width=\"250\"/></a>\n \n \n[xda]: <http://bit.ly/2NBnhqB>\n[insta]: <http://bit.ly/mikel_insta>\n[yt]: <http://bit.ly/mikel_YT>\n[cam]:<https://cam.tv/mik_el_tech>\n[tg]:<https://bit.ly/Mikel_TG>\n" }, { "alpha_fraction": 0.655978262424469, "alphanum_fraction": 0.664673924446106, "avg_line_length": 22.50666618347168, "blob_id": "75672d1d9e54000636173ff474380faea9b185d4", "content_id": "3378731e8fc8e4dcce8a79ff67cb0157ab6eaecf", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 1845, "license_type": "no_license", "max_line_length": 97, "num_lines": 75, "path": "/siadm/Sia1.py", "repo_name": "Mik-el/Data-Mining-in-Python-Examples", "src_encoding": "UTF-8", "text": "\r\n#Il file SIA_DM.XLS contiene i dati di 1500 clienti di un azienda\r\n\r\nimport pandas as pd\r\nimport matplotlib.pyplot as plt\r\nimport csv\r\n\r\npd.options.mode.chained_assignment = None #\r\n\r\nimport time #time e datetime per operazioni sulle date\r\nimport datetime\r\nimport numpy as np\r\n\r\n\r\n\r\n\r\n\r\n\r\ndataFrame = pd.read_excel(\"SIA_DM.xls\") \r\npd.set_option('display.expand_frame_repr', False)\r\n\r\nprint('\\n **********\\n Visualizzazione delle prime 5 righe del data frame \\n **********\\n')\r\ndisplay(dataFrame.head())\r\nprint('\\n')\r\n\r\nprint('\\n **********\\n Statistiche descrittive \\n **********\\n')\r\ndisplay(dataFrame.info()) #.describe()\r\n\r\nprint('\\n **********\\n Il numero di righe e di colonne è:\\n **********\\n', dataFrame.shape)\r\n\r\n#Stampa del tipo di attributi\r\nprint (\"\\n **********\\n Stampa del datatype di ogni colonna: \\n *********\\n\") \r\ndisplay(dataFrame.dtypes)\r\n\r\n\r\n\r\n# RICHIESTA 1) verificare che i Social Security Numbers siano tutti diversi\r\n# Verificare che Social Security Numbers siano tutti diversi\r\n\r\nprint('\\n **********\\n Verifica che Social Security Numbers siano tutti diversi \\n **********\\n')\r\n\r\nssn_totali = dataFrame['SSN'].size\r\nssn_unici = dataFrame['SSN'].unique().size\r\nssn_differenza = ssn_totali - ssn_unici\r\nif ssn_differenza != 0:\r\n print('Sono presenti', ssn_differenza,'SSN che si ripetono, ovvero i seguenti:')\r\n display(dataFrame['SSN'].value_counts().iloc[0:ssn_differenza])\r\nelse:\r\n print('Tutti i Social Security Numbers sono diversi')\r\n\r\n\r\n\r\n\r\n\r\n\r\n\r\n\r\n\r\n\r\n\r\n\r\n#3)verificare se i valori del campo eta` (age) contengono valori\r\n#improbabili (problema “anno 2000”)\r\n\r\n\r\n\r\n#4)verificare il campo region (possibili errori di digitazione!!)\r\n\r\n\r\n\r\n#5)verificare il campo CredCardUser (possibile errore di formattazione)\r\n\r\n\r\n\r\n\r\n#6)Verificare i campi Income e Purchases per missing values e outliers\r\n" }, { "alpha_fraction": 0.6436525583267212, "alphanum_fraction": 0.6495916843414307, "avg_line_length": 27.19565200805664, "blob_id": "e8965fd3505512683c52105a142fe7002c400006", "content_id": "25ceae74950930c1bb643799c1cbbba46f987b19", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 1355, "license_type": "no_license", "max_line_length": 114, "num_lines": 46, "path": "/siadm/Sia3.py", "repo_name": "Mik-el/Data-Mining-in-Python-Examples", "src_encoding": "UTF-8", "text": "import pandas as pd\r\nimport matplotlib.pyplot as plt\r\nimport csv\r\n\r\npd.options.mode.chained_assignment = None #\r\n\r\nimport time #time e datetime per operazioni sulle date\r\nimport datetime\r\nimport numpy as np\r\n\r\n\r\n\r\ndataFrame = pd.read_excel(\"SIA_DM.xls\")\r\npd.set_option('display.expand_frame_repr', False)\r\n\r\n# RICHIESTA 3\r\n#Verificare se i valori del campo età (age) contengono valori improbabili.\r\n\r\nprint('Sto verificando e rimpiazzando gli errori nel campo età...')\r\n\r\n# Cattura eccezioni sulla conversione ad intero per verificare la presenza di stringhe al posto di valori numerici\r\ni = 0\r\nfor tmp in dataFrame['Age']:\r\n try:\r\n int(tmp)\r\n i = i + 1\r\n except:\r\n print('Età non riconosciuta', tmp)\r\n # Rimpiazza l'età errata con il valore 0\r\n dataFrame.loc[i, 'Age'] = 0\r\n i = i + 1\r\n\r\n# Confronta l'età che dovrebbe avere nell'anno corrente rispetto all'età riportata nel data frame.\r\n\"\"\"\r\ni = 0\r\nanno_corrente = datetime.date.today().year\r\nfor eta_letta in dataFrame['Age']:\r\n eta_corretta = anno_corrente - dataFrame.loc[i].Birthdate.year\r\n if eta_corretta != eta_letta:\r\n # Rimpiazza l'età errata con l'età corretta\r\n dataFrame.loc[i, 'Age'] = eta_corretta\r\n i = i + 1\r\n\"\"\"\r\n \r\nprint('\\n **********\\n Stampa dataframe: \\n **********\\n') \r\ndisplay(dataFrame)\r\n " }, { "alpha_fraction": 0.6286472082138062, "alphanum_fraction": 0.6348364353179932, "avg_line_length": 22.688888549804688, "blob_id": "b9efb2ae8e8d62b22393a00da8c6382d4deeb7c3", "content_id": "8dd65f391cebea21316be9d5aefd41c465f39453", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 1131, "license_type": "no_license", "max_line_length": 77, "num_lines": 45, "path": "/Posti Letto/esPostiLetto1.py", "repo_name": "Mik-el/Data-Mining-in-Python-Examples", "src_encoding": "UTF-8", "text": "\"\"\"\" commento multilinea\"\"\"\r\n\r\n\"\"\"\"\r\nESPLORARE UN DATASET di posti letto\r\n\"\"\"\r\n#pd.set_option('display.float_format', True)\r\n#pd.set_option('display.max_columns', None) \r\n\r\n#pd.set_option('display.expand_frame_repr', False)\r\n#pd.set_option('max_colwidth', -1)\r\n\r\n#pd.options.display.width = None\r\n\r\n#pd.options.display.max_rows = 1000\r\n\r\n\r\nimport pandas as pd\r\nimport matplotlib.pyplot as plt\r\nimport csv\r\n\r\n#pd.set_option('display.expand_frame_repr', False)\r\n\r\npd.options.display.max_columns = None\r\npd.options.display.width=None\r\n\r\ndati = pd.read_csv('posti_letto.csv', sep=';')\r\n\r\nprint(dati.info())\r\ndati = dati.convert_objects(convert_numeric=True)\r\n\r\nprint(dati.info())\r\nprint (\"\\n **********\\n Stampa le prime 5 righe del file \\n *********\\n\")\r\n#with pd.option_context('expand_frame_repr', False):\r\n# print(dati.head(5))\r\n\r\n\r\nprint (\"\\n **********\\n Stampa il data type per ogni colonna \\n *********\\n\")\r\n#print(dati.dtypes)\r\n\r\nprint (\"\\n **********\\n Stampa statistiche descrittive \\n *********\\n\")\r\n#print(dati.describe)\r\n\r\n\r\n\r\n#documentazione pandas https://pandas.pydata.org/pandas-docs/stable/\r\n\r\n\r\n\r\n\r\n\r\n\r\n\r\n\r\n\r\n\r\n" }, { "alpha_fraction": 0.6658333539962769, "alphanum_fraction": 0.6708333492279053, "avg_line_length": 34.42424392700195, "blob_id": "79f85435c3134ad5fd91852eec944c5d723074d4", "content_id": "b0c8a5a0ffb7c5d6e14d0f565a2c0e4944005136", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 1200, "license_type": "no_license", "max_line_length": 207, "num_lines": 33, "path": "/siadm/Sia4.py", "repo_name": "Mik-el/Data-Mining-in-Python-Examples", "src_encoding": "UTF-8", "text": "import pandas as pd\r\nimport matplotlib.pyplot as plt\r\nimport csv\r\n\r\npd.options.mode.chained_assignment = None #\r\n\r\nimport time #time e datetime per operazioni sulle date\r\nimport datetime\r\nimport numpy as np\r\n\r\n\r\ndataFrame = pd.read_excel(\"SIA_DM.xls\")\r\npd.set_option('display.expand_frame_repr', False)\r\n\r\n# RICHIESTA 4\r\n#Verificare il campo region, possibili errori di digitazione.\r\n\r\nprint('Sto Verificando il campo region nel caso in cui ci fossero errori di digitazione.')\r\n\r\n# Verifica se ogni istanza del data frame abbia un valore corretto nel campo 'Region'\r\n# .lstrip() serve a ignorare gli spazi a sinistra della stringa.\r\ni = 0\r\ncont_err = 0\r\nfor reg in dataFrame['Region']:\r\n if (dataFrame.loc[i, 'Region'].lstrip() != 'North') & (dataFrame.loc[i, 'Region'].lstrip() != 'South') & (dataFrame.loc[i, 'Region'].lstrip() != 'East') & (dataFrame.loc[i, 'Region'].lstrip() != 'West'):\r\n print('Riga errata: ', i+1)\r\n cont_err = cont_err + 1\r\n i = i + 1\r\nprint('Vi sono', cont_err,'istranze errate nell\\'attributo Region')\r\n\r\n\r\nprint('\\n **********\\n Stampa dataframe, dato che si tratta solo di una verifica non ho corretto i dati: \\n **********\\n') \r\ndisplay(dataFrame)" }, { "alpha_fraction": 0.6570680737495422, "alphanum_fraction": 0.6629580855369568, "avg_line_length": 30.255319595336914, "blob_id": "4c666cc673c24a09ea7d435bf7e580e2086e0c0b", "content_id": "846cd5a21fa8d0174bd5fa93598bb5c4fec7621e", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 1528, "license_type": "no_license", "max_line_length": 108, "num_lines": 47, "path": "/siadm/Sia5.py", "repo_name": "Mik-el/Data-Mining-in-Python-Examples", "src_encoding": "UTF-8", "text": "import pandas as pd\r\nimport matplotlib.pyplot as plt\r\nimport csv\r\n\r\npd.options.mode.chained_assignment = None #\r\n\r\nimport time #time e datetime per operazioni sulle date\r\nimport datetime\r\nimport numpy as np\r\n\r\n\r\ndataFrame = pd.read_excel(\"SIA_DM.xls\")\r\npd.set_option('display.expand_frame_repr', False)\r\n\r\n# RICHIESTA 5\r\n# Verificare il campo CredCardUser, possibili errori di formattazione.\r\nprint('\\n **********\\n Verificare il campo CredCardUser, possibili errori di formattazione \\n **********\\n')\r\n\r\n# Cattura l'eccezione durante la conversione ad intero per evidenziare errori di formattazione.\r\ni = 0\r\ncount_err = 0\r\nfor tmp in dataFrame['CredCardUser']:\r\n try:\r\n int(tmp)\r\n i = i + 1\r\n except:\r\n print('Valore CredCardUser non riconosciuto', tmp)\r\n print('Nell\\'istanza:', i+1)\r\n count_err = count_err + 1\r\n i = i + 1\r\n\r\nif count_err == 0:\r\n print('Non ci sono errori di formattazione nell\\'attributo CredCardUser')\r\n \r\n\r\n#RICHIESTA 6 \r\n# Verificare i campi Income e Purchases per missing values e outliers.\r\n\r\n\r\nprint('\\n **********\\n Elenco delle istanze con missing values per attributo Income: \\n **********\\n')\r\ndisplay(dataFrame[dataFrame['Income'].isnull()])\r\n\r\nprint('\\n **********\\n Elenco delle istanze missing values con attributo Purchases: \\n **********\\n')\r\ndisplay(dataFrame[dataFrame['Purchases'].isnull()])\r\n\r\n# Per identificare i possibili outliers si considera la media e la deviazione standard\r\n#print(dataFrame['Income'].mean())\r\n\r\n \r\n " }, { "alpha_fraction": 0.6947097778320312, "alphanum_fraction": 0.7002204060554504, "avg_line_length": 35.83333206176758, "blob_id": "cf39d9b38b8f6f749c48a896bbe8f72595f51a65", "content_id": "f841efd3427d2129f286365870341c25823c4c98", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 2726, "license_type": "no_license", "max_line_length": 125, "num_lines": 72, "path": "/Posti Letto/da.py", "repo_name": "Mik-el/Data-Mining-in-Python-Examples", "src_encoding": "WINDOWS-1252", "text": "#Per fare una statica completa dei posti letto per regione\r\n#ci serve anche la popolazione per regione\r\n#che non è presente nel database originario. \r\n#Quindi creiamo un nuovo database con: codice regione e popolazione. \r\n#Dobbiamo combinazione di due database.\r\n\r\n\r\nimport pandas as pd\r\nimport matplotlib.pyplot as plt\r\n\r\n# Carica il primo csv 'postiletto1.csv'\r\ndati = pd.read_csv( 'posti_letto.csv', sep=';' )\r\ndati = dati.convert_objects( convert_numeric=True )\r\n\r\n# selezione dei dati per anno 2011\r\nanno = dati[ dati['Anno'] == 2011 ]\r\n\r\n#???\r\nletti = anno[ [ 'Codice Regione', 'Descrizione Regione', 'Totale posti letto' ] ]\r\n\r\n#raggruppamento dei dati per codice regione\r\nletti = letti.groupby( ['Codice Regione' ] )\r\n#MIK\r\nprint (\"\\n **********\\n Dati ragguppati per regione: \\n *********\\n\")\r\nprint(letti)\r\n\r\n#aggregazione dei dati (il numero di posti letto viene sommato)\r\nletti = letti.aggregate( { 'Descrizione Regione':'first', 'Totale posti letto':'sum' } )\r\n#MIK\r\nprint (\"\\n **********\\n Dati aggregati: \\n *********\\n\")\r\nprint(letti)\r\n\r\n\r\n\r\n\r\n\r\n# Carica il secondo csv 'popolazione.csv'\r\ndati2 = pd.read_csv( 'popolazione.csv', sep=';', thousands='.' )\r\ndati2 = dati2.convert_objects( convert_numeric=True )\r\ndati2n = dati2.rename( columns={'CODICE REGIONE': 'Codice Regione', 'TOTALE':'Popolazione'} )\r\n\r\n# Raggruppo i cittadin per Codice regione e sommo tutti i loro cvalori. \r\n# Il risultato e'¨ la lista delle regioni con associato il numero di abitanti\r\npopolazione = dati2[ ['Codice Regione', 'Popolazione' ] ].groupby( 'Codice Regione' ).sum()\r\n\r\n\r\n## join tra i dataframe popolazione e letti. Serve almeno una variabile con lo stesso nome (nel nostro caso Codice Regione).\r\nlettiEPopolazione = popolazione.join( letti )\r\n\r\n# Aggiungiamo una colonna: 'Letti per Cittadino' che e' data da 'Totale posti letto'/'Popolazione'\r\nlettiEPopolazione['Letti per Cittadino'] = lettiEPopolazione['Totale posti letto'] / lettiEPopolazione['Popolazione']\r\n\r\n# Ordinamento del nuovo dataframe \r\nordinato = lettiEPopolazione.sort_values( 'Letti per Cittadino' )\r\nprint (\"\\n **********\\n Dataframe ottenuto dal join e ordinato: \\n *********\\n\")\r\nprint(ordinato)\r\n\r\nprint (\"\\n **********\\n Statistiche descrittive dell' istanza Letti per Cittadino dopo il join: \\n *********\\n\")\r\nprint (ordinato['Letti per Cittadino'].describe())\r\n\r\n\r\n\r\n\r\n# Grafico, specifico tipo e cosa visuallizare sugli assi\r\nordinato.plot( kind='barh', x='Descrizione Regione', y='Letti per Cittadino' )\r\nplt.show()\r\n\r\n\r\n# la Lombardia, che sembrava possedere un numero spropositato di posti letto\r\n#non è più neanche al primo posto. \r\n#Le differenze tra le varie regioni sono attenuate\r\n#Il molise da ultimo per posti letto diventa il primo" }, { "alpha_fraction": 0.6696633696556091, "alphanum_fraction": 0.6799002289772034, "avg_line_length": 35.53940963745117, "blob_id": "ba721a9447f2993faaa63de24ef538e9e99bcb3b", "content_id": "4d900e1e1bad7ba1823fe88cb8069fb35c8a4193", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 15310, "license_type": "no_license", "max_line_length": 148, "num_lines": 406, "path": "/Bank Data/esBankData.py", "repo_name": "Mik-el/Data-Mining-in-Python-Examples", "src_encoding": "UTF-8", "text": "# Il dataset Bank Data contiene informazioni bancarie e personali di alcuni clienti.\r\n#Si vogliono definire modelli di targeting per classificare i clienti e vedere la distribuzine del reddito, in base all'acquisto del PEP (Personal \r\n# Equity Plan)\r\n\r\n\r\nimport pandas as pd\r\nimport numpy as np\r\nimport seaborn as sns\r\nimport matplotlib.pyplot as plt\r\nimport time\r\n\r\nfrom sklearn.impute import SimpleImputer\r\nfrom sklearn.decomposition import PCA\r\nfrom sklearn.preprocessing import StandardScaler, LabelEncoder\r\nfrom sklearn.impute import SimpleImputer\r\n\r\nfrom sklearn.linear_model import LogisticRegression\r\nfrom sklearn.svm import SVC\r\nfrom sklearn.neighbors import KNeighborsClassifier\r\nfrom sklearn import tree\r\nfrom sklearn.neural_network import MLPClassifier\r\nfrom sklearn.ensemble import GradientBoostingClassifier\r\nfrom sklearn.tree import DecisionTreeClassifier\r\nfrom sklearn.gaussian_process.kernels import RBF\r\nfrom sklearn.ensemble import RandomForestClassifier\r\nfrom sklearn.naive_bayes import GaussianNB\r\n\r\n# Caricamento del dataset \r\ndata = pd.read_csv(\"bank-data.csv\", sep=';')\r\npd.set_option('display.expand_frame_repr', False)\r\n\r\nprint (\"\\n **********\\n Stampa del n. tot di dati del n. tot di attributi: \\n *********\\n\")\r\nprint(data.shape)\r\nprint (\"\\n **********\\n Stampa delle prime 5 righe del dataset: \\n *********\\n\")\r\ndisplay(data.head()) \r\nprint (\"\\n **********\\n Stampa delle statistiche descrittive: \\n *********\\n\")\r\ndisplay(data.describe())\r\nprint (\"\\n **********\\n Stampa del datatype di ogni colonna: \\n *********\\n\") \r\ndisplay(data.dtypes)\r\n\r\n\r\n\r\n\r\n# PREPROCESSING\r\n# Normalizzazione delle fasce di età: \r\n# ● 0 = intervallo [18, 40] \r\n# ● 1 = intervallo [40, 60] \r\n# ● 2 = intervallo [60, 80] \r\n# ● 3 = intervallo [80, 100]\r\n\r\n#prendo le istanze di tipo 'age' dal dataset data\r\nage = data['age']\r\n#metto quei valori in una lista\r\ntitle = pd.Series([\"age\"])\r\n#con la funzione .cut vado a specificare i range per l' attributo\r\nnew_age = pd.cut(age, [18, 40, 60, 80, 100], labels=False, retbins=False, right=False)\r\nnew_age = title.append(new_age)\r\nprint (\"\\n **********\\n Stampa delle nuove fasce d'età per ogni persona: \\n *********\\n\") \r\nprint(new_age)\r\n\r\nprint (\"\\n **********\\n Istogramma delle nuove fasce d'età per ogni persona: \\n *********\\n\")\r\n\r\n \r\n\r\n#apro il dataset age.csv, inserisco la nuova colonna new_age e lo chiudo\r\nf = open(\"age.csv\", \"a\") #'a' crea un nuovo file se non esiste\r\nnew_age.to_csv('bank-data.csv', index = False )\r\nf.close()\r\n\r\ndata_con_newage = pd.read_csv(\"bank-data.csv\", sep=';')\r\ndisplay(data_con_newage.head())\r\n\r\n\"\"\"\r\nist1 = data_con_newage['new_age'].hist(bins=5, flagsize=(10,5))\r\nplt.show()#\r\n#.plot.barh()\r\n\"\"\"\r\n\r\n\r\n#non ci sono dati mancanti.\r\n#altrimenti usavo ad esempio data = data.fillna(0)\r\n\r\n#dati eterogeneri e tanti dati categoriali. \r\n#DISCRETIZZAZIONE: uniformare i dati categoriali trasformandoli in numerici\r\n #conversione in valori interi\r\ndata1 = data.copy(deep = True)\r\nto_be_encoded_cols = data1.columns.values\r\n\r\n\r\ndef label_encode(df, columns):\r\n for col in columns:\r\n le = LabelEncoder()\r\n col_values_unique = list(df[col].unique())\r\n le_fitted = le.fit(col_values_unique)\r\n \r\n col_values = list(df[col].values)\r\n le.classes_\r\n col_values_transformed = le.transform(col_values)\r\n df[col] = col_values_transformed\r\n \r\nto_be_encoded_cols = data.columns.values\r\nlabel_encode(data, to_be_encoded_cols)\r\n\r\n\r\nprint (\"\\n **********\\n Stampa delle prime 5 righe del dataset dopo il preprocessing: \\n *********\\n\")\r\ndisplay(data.head(5)) \r\nprint (\"\\n **********\\n Stampa delle statistiche descrittive del dataset dopo il preprocessing: \\n *********\\n\")\r\ndisplay(data.describe())\r\n\r\n\r\n#ISTOGRAMMA\r\n\"\"\"print (\"\\n **********\\n Stampa istogramma con 30 colonne \\n *********\\n\")\r\nistogramma = data.hist(bins=30)\r\n\r\nistogramma.set_title('Relazione tra guadagno dei clienti e loro regione di provenienza')\r\nistogramma.set_xlabel('egion')\r\nistogramma.set_ylabel('income')\r\n\r\nplt.show()\r\n\"\"\"\r\n#ISTOGRAMMA 2\r\n#print (\"\\n **********\\n Stampa istogramma \\n *********\\n\")\r\n#data.plot.barh()\r\n\r\n\r\n\r\n#apriamo il file bank-data2, ci andiamo a copiare il dataset discretizzato\r\nf = open(\"bank-data2.csv\", \"a\")\r\ndata.to_csv('bank-data2.csv', index = False )\r\nf.close()\r\n\r\n# a questo punto rimuovo manualmente l’intera colonna dell’attributo ‘id’, in quanto non è utile ai fini della nostra analisi\r\n#e sostituiamo la colonna 'age' con quella appena creata.\r\n#PREPROCESSING FINITO\r\n\r\n# ANALISI GRAFICA dei dati\r\n# regione abitativa INNER_CITY (centro città), TOWN (città), RURAL (campagna), SUBURBAN (periferia).\r\n# Mi è utile pcomparare la statistiche (describe) sul reddito dei clienti in base alla regione.\r\n# poi costruisco un istogramma per vedere come la variabile ‘income’ viene distribuita.\r\nregion = data[['region', 'income']].groupby('region')\r\nsomma = region.sum().sort_values('income')\r\nsomma.plot.barh()\r\nprint (\"\\n **********\\n Statistiche sul reddito dei clienti in base alla regione abitativa: \\n *********\\n\")\r\nprint(somma.describe())\r\nprint (\"\\n **********\\n E relativo istogramma: \\n *********\\n\")\r\nplt.show()\r\n\r\n# Per fare l’analisi grafica ho dovuto effettuare una discretizzazione preliminare\r\n# metto una legenda per interpretare i dati dell’istogramma. \r\nprint (\" ● 0 = inner_city (centro città) \\n ● 1 = rural (campagna) \\n ● 2 = suburban (periferia) \\n ● 3 = town (città)\")\r\n\r\n\r\n# Vedo che il reddito oscilla da 20mila a 80mila.\r\n# Su 600 clienti circa la metà ha un reddito di circa 42000.00.\r\n# Dall’istogramma, invece, noto che i clienti con \r\n# reddito più alto sono concentrati nel centro città, mentre quelli con reddito più basso in periferia.\r\n\r\n \r\n# ANALISI SIMILI possono essere fatte per valutare ad esempio \r\n# distribuzione del reddito in base al matrimonio e al numero dei figli.\r\n# o altri attributi a disposizione nel dataset\r\n\r\nchildren = data[ data['children'] == 0 ]\r\nsom = children[ [ 'region', 'income', 'married' ] ]\r\nsom = som.groupby( ['region' ] )\r\nsom = som.aggregate( { 'income':'first', 'married': 'sum' } )\r\nsom.plot( kind='barh', x='income', y='married' )\r\n\r\nprint (\"\\n **********\\n Statistiche sul reddito dei clienti senza figli: \\n *********\\n\")\r\nprint(som.describe())\r\nprint (\"\\n **********\\n E relativo istogramma: \\n *********\\n\")\r\nplt.show()\r\n\r\n\r\n# Distribuzione reddito di clienti sposati e con due figli.\r\nchildren = data[ data['children'] == 2 ]\r\nsom = children[ [ 'region', 'income', 'married' ] ]\r\nsom = som.groupby( ['region' ] )\r\nsom = som.aggregate( { 'income':'first', 'married': 'sum' } )\r\nsom.plot( kind='barh', x='income', y='married' )\r\nprint(som.describe())\r\nplt.show()\r\n\r\nprint (\"\\n **********\\n Statistiche sul reddito dei clienti sposati e con 2 figli: \\n *********\\n\")\r\nprint(som.describe())\r\nprint (\"\\n **********\\n E relativo istogramma: \\n *********\\n\")\r\nplt.show()\r\n\r\n\r\n\r\n\r\n# ANALISI DEI DATI\r\n# Matrice correlaz con Variabile Target la variabile Pep\r\ncorrelation_matrix = data.corr()\r\nplt.figure(figsize=(50,20))\r\nax = sns.heatmap(correlation_matrix, vmax=1, square=True, \r\n annot=True, fmt='.2f', cmap='GnBu', cbar_kws={\"shrink\":.5}, robust=True)\r\nplt.title('Matrice di correlazione delle features', fontsize=20)\r\nprint (\"\\n **********\\n Matrice di correlazione: \\n *********\\n\")\r\nplt.show()\r\n\r\n# La matrice di correlazione mostra che l'unica variabile correlata con la target\r\n# è 'income'(reddito del cliente), anche se la correlazione è comunque bassa (0.21)\r\n# Si può notare però che le due variabili in maggior correlazione sono income ed age (0.17).\r\n\r\n\r\n# Per un dataset con molte funzionalità potrebbe diventare molto grande e la \r\n# correlazione di una singola funzione con le altre caratteristiche diventa difficile da discernere.\r\n#Se si desidera esaminare le correlazioni di una singola funzione, di solito è meglio un istogramma\r\ndef display_corr_with_col(df, col):\r\n correlation_matrix = data.corr()\r\n correlation_type = correlation_matrix[col].copy()\r\n abs_correlation_type = correlation_type.apply(lambda x: abs(x))\r\n desc_corr_values = abs_correlation_type.sort_values(ascending=False)\r\n y_values = list(desc_corr_values.values)[1:]\r\n x_values = range(0, len(y_values))\r\n xlabels = list(desc_corr_values.keys())[1:]\r\n fig, ax = plt.subplots(figsize=(8,8))\r\n ax.bar(x_values, y_values)\r\n ax.set_title('Correlazione delle features con {}' .format(col), fontsize=20)\r\n ax.set_ylabel('Coefficiente di correlazione di Pearson', fontsize=16)\r\n plt.xticks(x_values, xlabels, rotation='vertical')\r\n plt.show()\r\n \r\ndisplay_corr_with_col(data, 'pep')\r\n# Con il grafico a barre ritrovo gli stessi risultati della matrice di correlazione,\r\n#ma una variabile di correlazione in più: married.\r\n# I risultati dell'analisi ci fanno capire che generalmente la decisione di acquistare o meno il\r\n# PEP dipende da married, children e income, \r\n# che a sua volta è fortemente influenzato dai fattori children e married.\r\n\r\n\r\n\r\n\r\n\r\n\r\n\r\n\r\n\r\n\r\n\r\n\r\n# CLASSIFICAZIONE E VALIDAZIONE\r\n# Costruzione modello di apprendimento automatico: ora verranno formati diversi modelli di Machine Learning e verranno \r\n# confrontati i loro risultati.\r\n\r\n\r\ndef get_train_test(df, y_col, x_cols, ratio):\r\n mask = np.random.rand(len(df)) < ratio \r\n df_train = df[mask]\r\n df_test = df[~mask]\r\n y_train = df_train[y_col].values\r\n y_test = df_test[y_col].values\r\n X_train = df_train[x_cols].values\r\n X_test = df_test[x_cols].values\r\n \r\n return df_train, df_test, X_train, y_train, X_test, y_test\r\n\r\ny_col_glass = 'pep'\r\nx_cols_glass = list(data.columns.values)\r\nx_cols_glass.remove(y_col_glass)\r\n\r\ntrain_test_ratio = 0.7\r\ndf_train, df_test, X_train, y_train, X_test, y_test = get_train_test(data, y_col_glass, x_cols_glass, train_test_ratio)\r\n\r\n# Creerò un dizionario, che \r\n# contiene come chiavi il nome dei classificatori e come valori un'istanza dei classificatori.\r\ndict_classifiers = {\r\n \"Nearest Neighbors\": KNeighborsClassifier(),\r\n \"Decision Tree\": tree.DecisionTreeClassifier(),\r\n \"Random Forest\": RandomForestClassifier(n_estimators=1000),\r\n \"Naive Bayes\": GaussianNB(),\r\n}\r\n\r\n\r\n\r\ndef batch_classify(X_train, y_train, X_test, y_test, no_classifiers=5, verbose=True):\r\n \r\n dict_models={}\r\n \r\n for classifiers_name, classifier in list(dict_classifiers.items())[:no_classifiers]:\r\n t_start = time.clock()\r\n classifier.fit(X_train,y_train)\r\n t_end = time.clock()\r\n \r\n t_diff = t_end - t_start\r\n train_score = classifier.score(X_train, y_train)\r\n test_score = classifier.score(X_test, y_test)\r\n \r\n dict_models[classifier_name] = {'model': classifier, 'train_score':train_score, 'test_score':test_score, 'train_diff':t_diff}\r\n if verbose:\r\n print(\"trained {c} in {f:.2f} s\".format(c = classifier_name, f=t_diff))\r\n return dict_models\r\n\r\n\r\n\r\n\r\n\r\ndef display_dict_models(dict_models, sort_by='test_score'):\r\n cls = [key for key in dict_models.keys()]\r\n test_s = [dict_models[key]['test_score'] for key in cls]\r\n training_s = [dict_models[key]['train_score'] for key in cls]\r\n training_t = [dict_models[key]['train_time'] for key in cls]\r\n \r\n df_ = pd.DataFrame(data = np.zeros(shape=(len(cls),4)), columns = ['classifier','train_score','test_score','train_time'])\r\n for ii in range(0,len(cls)):\r\n df_.loc[ii,'classifier'] = cls[ii]\r\n df_.loc[ii,'train_score'] = training_s[ii]\r\n df_.loc[ii,'test_score'] = test_s[ii]\r\n df_.loc[ii,'train_time'] = training_t[ii]\r\n \r\n display(df_.sort_values(by=sort_by, ascending=False))\r\n\r\n\r\n\r\n\r\n\r\n\r\n\r\n# VALUTAZIONE DEI CLASSIFICATORI\r\n# Dopo la definizione dei singoli classificatori è stata fatta una valutazione degli stessi al fine di individuarne \r\n# il migliore.\r\n\r\n# CLASSIFICATORE K-NEIGHBORS\r\nknn = KNeighborsClassifier(n_neighbors = 3)\r\nknn.fit(X_train, y_train)\r\nY_pred = knn.predict(X_test)\r\nacc_knn = round(knn.score(X_train, y_train) * 100, 2)\r\n\r\n# CLASSIFICATORE DECISION TREE\r\ndecision_tree = DecisionTreeClassifier()\r\ndecision_tree.fit(X_train, y_train)\r\nY_pred = decision_tree.predict(X_test)\r\nacc_decision_tree = round(decision_tree.score(X_train, y_train) * 100, 2)\r\n\r\n# CLASSIFICATORE RANDOM FOREST\r\nrandom_forest = RandomForestClassifier(n_estimators=1000)\r\nrandom_forest.fit(X_train, y_train)\r\nY_pred = random_forest.predict(X_test)\r\nrandom_forest.score(X_train, y_train)\r\nacc_random_forest = round(random_forest.score(X_train, y_train) * 100, 2)\r\n\r\n# CLASSIFICATORE GAUSSIAN (NAIVE BAYES)\r\ngaussian = GaussianNB()\r\ngaussian.fit(X_train, y_train)\r\nY_pred = gaussian.predict(X_test)\r\nacc_gaussian = round(gaussian.score(X_train, y_train) * 100, 2)\r\n\r\nresults = pd.DataFrame({\r\n \r\n 'Model':['Nearest Neighbors','Decision Tree',\r\n 'Random Forest','Naive Bayes'],\r\n 'Score':[acc_knn, acc_decision_tree,\r\n acc_random_forest, acc_gaussian]})\r\nresult_df = results.sort_values(by='Score', ascending=False)\r\nresult_df = result_df.set_index('Score')\r\nresult_df.head(8)\r\nprint (\"\\n **********\\n Risultati dei diversi classificatori: \\n *********\\n\")\r\nprint(results)\r\n\r\n# Come è possibile notare i Classificatori Random Forest e Decision Tree hanno una percentutale del 100%, quindi sono \r\n# quelli più adatti a fare una valutazione. La traccia però ci richiedere di fare una valutazione anche sui \r\n# classificatoi bayesiani, e confrontare i risultati.\r\n\r\n\r\n\r\n\r\n\r\n\r\n\r\n#CROSS VALIDATION\r\n# Ora invece esaminiamo il Classificatore Decision Tree\r\nfrom sklearn.model_selection import cross_val_score\r\nrf = DecisionTreeClassifier()\r\nscores = cross_val_score(rf, X_train, y_train, cv=10, scoring = \"accuracy\")\r\nprint (\"\\n **********\\n Accuratezza del decision Tree: \\n *********\\n\")\r\nprint(\"Scores: \", scores)\r\nprint(\"Mean: \", scores.mean())\r\nprint(\"Standard deviation: \", scores.std())\r\n\r\n# Il modello in questione ha una precisione media del 79% con una deviazione \r\n# standard del 0.10% circa,che ci mostra, quanto precise sono le stime\r\n#può variare di +/- 0.10%.\r\n\r\n# Matrice di Confusione per Decision Tree\r\nfrom sklearn.model_selection import cross_val_predict\r\nfrom sklearn.metrics import confusion_matrix\r\npredictions = cross_val_predict(decision_tree, X_train, y_train, cv=3)\r\nconfusion_matrix(y_train, predictions)\r\n\r\n# La prima riga della matrice di confusione riguarda le previsioni sui clienti non idonei a possedere un PEP: 169 \r\n# clienti sono stati classificati correttamente come non idonei (veri negativi), mentre 54 sono stati classificati per \r\n# errore come non idonei (falsi positivi). La seconda riga riguarda, invece, le previsioni sui clienti idonei: 41 che\r\n# per errore sono stati reputati idonei e 144 correttamente classificati come idonei.\r\n\r\n\r\n\r\n\r\n#PRECISIONE E RECALL\r\n# Decision Tree\r\nfrom sklearn.metrics import precision_score, recall_score\r\nprint(\"Precision: \", precision_score(y_train, predictions))\r\nprint(\"Recall: \", recall_score(y_train, predictions))\r\n\r\n# Il modello costruito prevede il 72% delle volte, una classificazione del cliente corretta(precisione)\r\n# La recall afferma che ha predetto la classificazione del 79% di clienti che sono effettivamente idonei." }, { "alpha_fraction": 0.6257668733596802, "alphanum_fraction": 0.666871190071106, "avg_line_length": 31.224489212036133, "blob_id": "5a16205b1cd9e3565d9b13fd8b80e9be86703840", "content_id": "1e605482fd31244a6e2320a0a5395b03fb3a69d6", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 1636, "license_type": "no_license", "max_line_length": 102, "num_lines": 49, "path": "/Posti Letto/esPostiLetto2.py", "repo_name": "Mik-el/Data-Mining-in-Python-Examples", "src_encoding": "UTF-8", "text": "#RELAZIONE TRA NUMERO DI POSTI LETTO E NUMERO DI OSPEDALI\r\n\r\nimport pandas as pd\r\nimport matplotlib.pyplot as plt\r\nimport csv\r\n\r\ndati = pd.read_csv('posti_letto.csv', sep=';')\r\ndati = dati.convert_objects(convert_numeric=True)\r\n\r\n#CHECK dei valori 2011\r\nanno2011 = [dati['Anno'] == 2011]\r\n#senza [] anno2011 è un oggetto di tipo series\r\nprint (\"\\n **********\\n Stampa dataset originale con ospendali censiti nel 2011 \\n *********\\n\")\r\ndisplay(anno2011)\r\nprint (\"\\n **********\\n Il datatype di anno2011 è: \\n *********\\n\")\r\nprint(type(anno2011))\r\n\r\n\r\n\r\n#ASSEGNAZIONE\r\n#con [] anno2011 viene castato a string\r\nanno2011_2 = dati[dati['Anno'] == 2011]\r\n\r\nprint (\"\\n **********\\n Stampa ospedali censiti nel 2011 Inseriti in un nuovo dataset \\n *********\\n\")\r\ndisplay(anno2011_2)\r\n\r\n\r\n#dataframe con una sola colonna\r\nletti = anno2011_2['Totale posti letto']\r\nprint (\"\\n **********\\n Il datatype di letti è: \\n *********\\n\")\r\nprint(type(anno2011))\r\n\r\nprint (\"\\n **********\\n Stampa statistiche descrittive sul dataset letti \\n *********\\n\")\r\nprint(letti.describe())\r\n\r\nprint (\"\\n **********\\n Stampa istogramma con 30 colonne \\n *********\\n\")\r\nistogramma = letti.hist(bins=30)\r\n\r\nistogramma.set_title('Relazione tra numero di letti e numero di ospedali')\r\nistogramma.set_xlabel('Numero letti')\r\nistogramma.set_ylabel('Numero ospedali')\r\n\r\nplt.show()\r\n\r\n#tanti ospedali hanno pochi posti letto\r\n#e pochi ospedali con tanti posti letto\r\n#senza il grafico la media non è \"affidabile\"\r\n# per questo il dato della media va sempre rapportato ad una rappresentazione grafica\r\n#da sola la media è un dato poco rappresentativo della realtà.\r\n\r\n" }, { "alpha_fraction": 0.61869877576828, "alphanum_fraction": 0.6275143027305603, "avg_line_length": 32.12711715698242, "blob_id": "c54d977d77ffb6e0e533969914bd99bb469c59e3", "content_id": "d17eb40b456bc77f42fdb8b9ad44b5e7fe4a41ee", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 8056, "license_type": "no_license", "max_line_length": 133, "num_lines": 236, "path": "/CRM/crm.py", "repo_name": "Mik-el/Data-Mining-in-Python-Examples", "src_encoding": "UTF-8", "text": "import numpy as np\r\nimport sklearn as sk\r\nimport pandas as pd\r\nimport seaborn as sns\r\nimport time\r\nimport matplotlib.pyplot as plt\r\n\r\nfrom sklearn.impute import SimpleImputer\r\nfrom sklearn.decomposition import PCA\r\nfrom sklearn.preprocessing import StandardScaler, LabelEncoder\r\nfrom sklearn.impute import SimpleImputer\r\n\r\n# classifiers\r\nfrom sklearn.linear_model import LogisticRegression\r\nfrom sklearn.ensemble import RandomForestClassifier\r\nfrom sklearn import tree\r\nfrom sklearn.ensemble import GradientBoostingClassifier\r\nfrom sklearn.neighbors import KNeighborsClassifier\r\nfrom sklearn.gaussian_process.kernels import RBF\r\nfrom sklearn.tree import DecisionTreeClassifier\r\nfrom sklearn.naive_bayes import GaussianNB\r\n\r\ndata = pd.read_csv(\"CRM.csv\", sep=\";\")\r\npd.set_option('display.expand_frame_repr', False)\r\n\r\nprint (\"\\n **********\\n Stampa del n. tot di dati del n. tot di attributi: \\n *********\\n\")\r\nprint(data.shape)\r\nprint (\"\\n **********\\n Stampa delle prime 5 righe del dataset: \\n *********\\n\")\r\ndisplay(data.head()) \r\nprint (\"\\n **********\\n Stampa delle statistiche descrittive: \\n *********\\n\")\r\ndisplay(data.describe())\r\nprint (\"\\n **********\\n Stampa del datatype di ogni colonna: \\n *********\\n\") \r\ndisplay(data.dtypes)\r\n\r\n\r\n############## PREPROCESSING #######################\r\n# Riempi i dati mancanti con lo 0\r\ndata = data.fillna(0)\r\n\r\n#######DISCRETIZZAZIONE (valori categorici -> valori numerici) ######\r\n\r\nnewdata = dict() #creo un dizionario newdata su cui va\r\nfor key in data.keys():\r\n if data[key].dtypes == 'object':\r\n#dichiaro una LabelEncoder() (trasformerà l'attributo categorico in numerico)\r\n le = LabelEncoder()\r\n#applico la le\r\n le.fit(data[key])\r\n\r\n newdata[key] = le.transform(data[key])\r\n else:\r\n newdata[key] = data[key]\r\n \r\n \r\n#Per applicare l'algoritmo di classificazione \r\n#copio i dati del dataset da un dizionario a un dataset (nuovo)\r\nres = pd.DataFrame.from_dict(newdata, orient='columns', dtype=None)\r\n\r\n\r\n###### se first_amount_spent e number_of_products =0 sostituisco la MEDIA ######\r\nnumpy_array = res.values\r\nX=numpy_array[:,[1,2]]\r\n#print(X)\r\n\r\nimp = SimpleImputer(missing_values=0, strategy=\"mean\")\r\nX = imp.fit_transform(X)\r\nprint(X[5])\r\n\r\n\r\nprint (\"\\n **********\\n Stampa delle prime 5 righe del dataset dopo il pre-processing: \\n *********\\n\")\r\ndisplay(res.head())\r\nprint (\"\\n **********\\n Stampa delle statistiche descrittive del dataset dopo il preprocessing: \\n *********\\n\")\r\ndisplay(data.describe())\r\n\r\n\r\n\r\n\r\n\r\n######ANALISI GRAFICA ########\r\n\r\nprova = data[['first_amount_spend', 'center']].groupby('age51_89')\r\nprova2 = prova.sum().sort_values('first_amount_spend')\r\nprova2.plot.barh()\r\nprint (\"\\n **********\\n Istogramma che rappresenta first_amount_spent in relazione a età 51-89: \\n *********\\n\")\r\nplt.show\r\n\r\n\r\n\r\n\r\n############### ANALISI DEI DATI MATRICE DI CORRELAZIONE ####################\r\n\r\ndata_copy = res.copy(deep=True)\r\ncorrelation_matrix = data_copy.corr()\r\nplt.figure(figsize=(50,20))\r\n\r\nax = sns.heatmap(correlation_matrix,square=True, annot=True, fmt='.2f', cmap='PuBu', robust=True)\r\n\r\nplt.title('Matrice correlazione', fontsize=20)\r\nplt.show()\r\n\r\n\r\n\r\n############### CLASSIFICAZIONE E VALIDAZIONE ####################\r\n#il metodo trasforma un dataframe in un train e test set, per questo bisogna specificare:\r\n #1-il ratio train: test (di solito=0.7)\r\n #2-la colonna con Y_values\r\ndef get_train_test(df, y_col, x_cols, ratio):\r\n \r\n mask = np.random.rand(len(df)) < ratio \r\n df_train = df[mask]\r\n #~ = Alt + 0126\r\n df_test = df[~mask]\r\n \r\n y_train = df_train[y_col].values\r\n y_test = df_test[y_col].values\r\n X_train = df_train[x_cols].values\r\n X_test = df_test[x_cols].values\r\n \r\n return df_train, df_test, X_train, y_train, X_test, y_test\r\n\r\n\r\n\r\n######################################\r\ny_col_glass = 'Y'\r\nx_cols_glass = list(data_copy.columns.values)\r\nx_cols_glass.remove(y_col_glass)\r\n\r\ntrain_test_ratio = 0.7\r\ndf_train, df_test, X_train, y_train, X_test, y_test = get_train_test(data_copy, y_col_glass, x_cols_glass, train_test_ratio)\r\n\r\n\r\n#######################################\r\ndict_classifiers = {\r\n \"Nearest Neighbors\": KNeighborsClassifier(),\r\n \"Decision Tree\": tree.DecisionTreeClassifier(),\r\n \"Random Forest\": RandomForestClassifier(n_estimators=1000),\r\n \"Naive Bayes\": GaussianNB(),\r\n}\r\n\r\n\r\n\r\n#######################################\r\ndef batch_classify(X_train, y_train, X_test, y_test, no_classifiers=5, verbose=True):\r\n \r\n dict_models={}\r\n \r\n for classifiers_name, classifier in list(dict_classifiers.items())[:no_classifiers]:\r\n t_start = time.clock()\r\n classifier.fit(X_train,y_train)\r\n t_end = time.clock()\r\n \r\n t_diff = t_end - t_start\r\n train_score = classifier.score(X_train, y_train)\r\n test_score = classifier.score(X_test, y_test)\r\n \r\n dict_models[classifier_name] = {'model': classifier, 'train_score':train_score, 'test_score':test_score, 'train_diff':t_diff}\r\n if verbose:\r\n print(\"trained {c} in {f:.2f} s\".format(c = classifier_name, f=t_diff))\r\n return dict_models\r\n\r\n\r\n#######################################\r\ndef display_dict_models(dict_models, sort_by='test_score'):\r\n cls = [key for key in dict_models.keys()]\r\n test_s = [dict_models[key]['test_score'] for key in cls]\r\n training_s = [dict_models[key]['train_score'] for key in cls]\r\n training_t = [dict_models[key]['train_time'] for key in cls]\r\n \r\n df_ = pd.DataFrame(data = np.zeros(shape=(len(cls),4)), columns = ['classifier','train_score','test_score','train_time'])\r\n for ii in range(0,len(cls)):\r\n df_.loc[ii,'classifier'] = cls[ii]\r\n df_.loc[ii,'train_score'] = training_s[ii]\r\n df_.loc[ii,'test_score'] = test_s[ii]\r\n df_.loc[ii,'train_time'] = training_t[ii]\r\n \r\n display(df_.sort_values(by=sort_by, ascending=False))\r\n\r\n\r\n\r\n#****** VALUTAZIONE DEI CLASSIFICATORI *******\r\n#accuracy score = knn.score(...)\r\n#CLASSIFICATORE K-NEIGHBORS\r\nknn = KNeighborsClassifier(n_neighbors = 3)\r\nknn.fit(X_train, y_train)\r\nY_pred = knn.predict(X_test)\r\nacc_knn = round(knn.score(X_train, y_train) * 100, 2)\r\n\r\n#CLASSIFICATORE DECISION TREE\r\ndecision_tree = DecisionTreeClassifier()\r\ndecision_tree.fit(X_train, y_train)\r\nY_pred = decision_tree.predict(X_test)\r\nacc_decision_tree = round(decision_tree.score(X_train, y_train) * 100, 2)\r\n\r\n#CLASSIFICATORE RANDOM FOREST\r\nrandom_forest = RandomForestClassifier(n_estimators=1000)\r\nrandom_forest.fit(X_train, y_train)\r\nY_pred = random_forest.predict(X_test)\r\nrandom_forest.score(X_train, y_train)\r\nacc_random_forest = round(random_forest.score(X_train, y_train) * 100, 2)\r\n\r\n#CLASSIFICATORE GAUSSIAN (NAIVE BAYES)\r\ngaussian = GaussianNB()\r\ngaussian.fit(X_train, y_train)\r\nY_pred = gaussian.predict(X_test)\r\nacc_gaussian = round(gaussian.score(X_train, y_train) * 100, 2)\r\n\r\nresults = pd.DataFrame({\r\n \r\n 'Model':['Nearest Neighbors','Decision Tree',\r\n 'Random Forest','Naive Bayes'],\r\n 'Score':[acc_knn, acc_decision_tree,\r\n acc_random_forest, acc_gaussian]})\r\n\r\n\r\nresult_df = results.sort_values(by='Score', ascending=False)\r\nresult_df = result_df.set_index('Score')\r\nresult_df.head(8)\r\nprint (\"\\n **********\\n Valutazione classificatori: \\n *********\\n\")\r\nprint (results)\r\n\r\n\r\n############ RANDOM FOREST: CROSS-VALIDATION ###############\r\nfrom sklearn.model_selection import cross_val_score\r\nprint (\"\\n **********\\n Cross validation/accuratezza Random Forest: \\n *********\\n\")\r\nrf = RandomForestClassifier(n_estimators=100)\r\nscores = cross_val_score(rf, X_train, y_train, cv=10, scoring = \"accuracy\")\r\nprint(\"Scores: \", scores)\r\nprint(\"Mean: \", scores.mean())\r\nprint(\"Standard deviation: \", scores.std())\r\n\r\n\r\n############ PRECISIONE E RECALL ###############\r\nfrom sklearn.metrics import precision_score, recall_score\r\n\r\nprint(\"Precision: \", precision_score(y_train, scores))\r\nprint(\"Recall: \", recall_score(y_train, scores))\r\n" }, { "alpha_fraction": 0.6590509414672852, "alphanum_fraction": 0.6959578394889832, "avg_line_length": 27.842105865478516, "blob_id": "67fb893105b4c816ff45432a4010dbeab6b535af", "content_id": "a0e8c6325069388afa6a0479463bbff13c047b64", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 569, "license_type": "no_license", "max_line_length": 89, "num_lines": 19, "path": "/Posti Letto/esPostiLetto3.py", "repo_name": "Mik-el/Data-Mining-in-Python-Examples", "src_encoding": "UTF-8", "text": "#OSPEDALE CON PIU' POSTI LETTO\r\nimport pandas as pd\r\nimport matplotlib.pyplot as plt\r\nimport csv\r\n\r\n\r\n\r\ndati = pd.read_csv('posti_letto.csv', sep=';', encoding= \"UTF-8\")\r\n\r\nanno2001 = dati[dati['Anno'] == 2011]\r\n#anno = dati['Anno'] == 2011\r\n#anno = dati #OK\r\nanno2001 = anno2001.sort_values('Totale posti letto', ascending=False)\r\n\r\npd.option_context('display.max_rows', None, 'display.max_columns', None)\r\n\r\n\r\n#stampa i soli attributi 'Denominzione struttura' e 'Totale posti letto' del dataset anno\r\nprint(anno[['Denominazione struttura', 'Totale posti letto']])\r\n\r\n" } ]
14
dikshantraj09/Shutdown-Over-the-Internet-
https://github.com/dikshantraj09/Shutdown-Over-the-Internet-
4ecf3743a0df1ff8273dcff4d1fd15943129c714
79c0e9aa9febc4e1acec445a42b42644cf5086d6
be4c2c7ef4b535e66329863d215caad155f748dc
refs/heads/master
2022-11-10T13:03:58.716806
2020-06-22T14:07:53
2020-06-22T14:07:53
266,693,714
0
0
null
null
null
null
null
[ { "alpha_fraction": 0.628654956817627, "alphanum_fraction": 0.6447368264198303, "avg_line_length": 19.375, "blob_id": "5ea47a5439bb030a1d72aa75a40a1a54fc9e5e67", "content_id": "cc90514ae63cb593ffcbac64735d8b7e40461105", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 684, "license_type": "no_license", "max_line_length": 50, "num_lines": 32, "path": "/Server.py", "repo_name": "dikshantraj09/Shutdown-Over-the-Internet-", "src_encoding": "UTF-8", "text": "#Run this on the Machine you want to shutdown\r\nimport os\r\nimport socket\r\nimport time\r\nimport sys\r\n\r\n\r\ns=socket.socket()\r\nprint(socket.gethostname())\r\nhost='0.0.0.0'\r\nport=96\r\ns.bind((host,port))\r\nprint(\"\\n Waiting For connection ...\\n\\n\")\r\ns.listen(9)\r\nconn,addr=s.accept()\r\nprint(\"\")\r\nprint(addr,\"-Connected\")\r\nprint(\"\")\r\n \r\ncommand=conn.recv(1024)\r\ncommand=command.decode()\r\nprint (\"\")\r\nif command == \"shutdown\" or command=='hibernate': \r\n print(\"\")\r\n print(\"Shutdown Command Recieved\")\r\n os.system(\"shutdown /s\")\r\nelif command=='hibernate': \r\n print(\"\")\r\n print(\"Hibernate Command Recieved\")\r\n os.system(\"shutdown /h\") \r\nelse:\r\n print(\"No or wrong Command\")\r\n" }, { "alpha_fraction": 0.767169177532196, "alphanum_fraction": 0.7755444049835205, "avg_line_length": 58.599998474121094, "blob_id": "44c8eb6b18927514120465ca19b8d3ce60800048", "content_id": "9de70aee115901c6fd937ed3271a56a97e555e5d", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Markdown", "length_bytes": 597, "license_type": "no_license", "max_line_length": 104, "num_lines": 10, "path": "/README.md", "repo_name": "dikshantraj09/Shutdown-Over-the-Internet-", "src_encoding": "UTF-8", "text": "# Shutdown-Over-the-Internet-\nThis is a simple server-client project made using sockets from pythons built in library.\n# Installation-\n1. Run the batch or .bat file on the system you are trying to shutdown.\n (This batch file is used for port forwarding from the internet to allow connection to your local ip)\n2. Run the Server File in Python3.\n3. Run the Client File on the device from which you are trying to access the other system. \n4. Now, Enter the command 'Shutdown'or 'Hibernate' and the remote system will be shutdown or Hibernated.\n\n#The port in client and the batch file should match \n" }, { "alpha_fraction": 0.7302631735801697, "alphanum_fraction": 0.7434210777282715, "avg_line_length": 19.200000762939453, "blob_id": "36da55bdad94d4d9e26c000531948fb9495e917e", "content_id": "9e3a093290e596b3b9078c37c46a553caa518572", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 304, "license_type": "no_license", "max_line_length": 63, "num_lines": 15, "path": "/Client.py", "repo_name": "dikshantraj09/Shutdown-Over-the-Internet-", "src_encoding": "UTF-8", "text": "#Run this file on the system you want to send the command from.\nimport os\nimport socket\nimport time\nimport sys\n\ns=socket.socket()\nhost='serveo.net'\nport=9632\ns.connect((host,port))\nprint(\"\")\nprint(\"Connected To Server\")\ncommand=input(\"Enter the Command:\")\ns.send(command.encode())\nprint(\"Command Sent\")\n\n" } ]
3
Yugdeep1996/First
https://github.com/Yugdeep1996/First
8f7fe377895ffea32858e022c2fb2c1b6437974e
90df7671c0a2781e2c3faefeb5a994a67f50167e
73a2795b002e39f2281592364d68e3bcd393097e
refs/heads/master
2023-08-16T11:03:27.162483
2021-09-14T07:32:11
2021-09-14T07:32:11
406,264,378
0
0
null
null
null
null
null
[ { "alpha_fraction": 0.627106249332428, "alphanum_fraction": 0.6329670548439026, "avg_line_length": 28.673913955688477, "blob_id": "029c00d7e5aed95d10b37a193b6adce881aa7b0b", "content_id": "7d2fab92398a4b4ff5097b7f8aee4eb3c8e311fa", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 1365, "license_type": "no_license", "max_line_length": 69, "num_lines": 46, "path": "/Django/Django_project/Django_app/function.py", "repo_name": "Yugdeep1996/First", "src_encoding": "UTF-8", "text": "# global variables used in Django_app/views.\nIFSC_Hit_Count = {}\nAPI_Hit_Count = {}\n\n\n# Create your functions here.\n\n\ndef api_count(request):\n '''\n The function counts all urls hits.\n\n Parameters:\n request: Required for retrieving complete requested url.\n '''\n \n # if url exists then increment its value by 1.\n if API_Hit_Count.get(str(request.build_absolute_uri())):\n val = API_Hit_Count.get(str(request.build_absolute_uri()))\n val += 1\n upgraded_dict = {str(request.build_absolute_uri()): val}\n API_Hit_Count.update(upgraded_dict)\n else:\n # update a new url with hit count as 1.\n API_Hit_Count.update({str(request.build_absolute_uri()): 1})\n return\n\n\ndef ifsc_count(request):\n '''\n The function counts all ifsc search urls hits.\n\n Parameters:\n request: Required for retrieving complete requested url.\n '''\n \n # if ifsc url exists then increment its value by 1.\n if IFSC_Hit_Count.get(str(request.build_absolute_uri())):\n val = IFSC_Hit_Count.get(str(request.build_absolute_uri()))\n val += 1\n upgraded_dict = {str(request.build_absolute_uri()): val}\n IFSC_Hit_Count.update(upgraded_dict)\n else:\n # update a new ifsc url with hit count as 1.\n IFSC_Hit_Count.update({str(request.build_absolute_uri()): 1})\n return\n" }, { "alpha_fraction": 0.5743885636329651, "alphanum_fraction": 0.5799931883811951, "avg_line_length": 36.272151947021484, "blob_id": "7a199854ecbd9c554ab1f88754f8e27b0fe64253", "content_id": "2310538bcae9d6b9e6309ba39717dccd5ef16507", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 5888, "license_type": "no_license", "max_line_length": 100, "num_lines": 158, "path": "/Flask/Flask_project/app.py", "repo_name": "Yugdeep1996/First", "src_encoding": "UTF-8", "text": "from flask import Flask, request, jsonify \nimport pandas as pd\nimport time\nfrom path import workbook_path\n\nprint('Loading data...')\n# global variables declared.\nbank_data = []\nStatistics = []\nbank_leaderboard_data = []\n\n# storing xlsx data in memory.\n# reading xlsx Sheet1 data.\nworkbook1 = pd.read_excel(workbook_path, sheet_name = 'Sheet1')\nworkbook1.head()\n# extracting xlsx Sheet1 data. \nfor i in range(0, len(workbook1)):\n bank_data.append({\n \"BANK\": workbook1['BANK'][i],\n \"IFSC\": workbook1['IFSC'][i],\n \"MICR\": workbook1['MICR CODE'][i],\n \"CODE\": workbook1['BRANCH'][i],\n \"BRANCH\": workbook1['ADDRESS'][i],\n \"ADDRESS\": workbook1['STD CODE'][i],\n \"STD_CODE\": workbook1['CONTACT'][i],\n \"CITY\": workbook1['CITY'][i],\n \"DISTRICT\": workbook1['DISTRICT'][i],\n \"STATE\": workbook1['STATE'][i],\n })\n\n# reading xlsx Pivot Table_Sheet1_1 data.\nworkbook2 = pd.read_excel(workbook_path, sheet_name = 'Pivot Table_Sheet1_1')\nworkbook2.head()\n# extracting xlsx Pivot Table_Sheet1_1 data. \nfor i in range(1, len(workbook2)):\n bank_leaderboard_data.append({\n workbook2['BANK'][i]: str(workbook2['Count - BANK'][i]),\n })\n\n\napp = Flask(__name__) \napp.config['SQLALCHEMY_TRACK_MODIFICATIONS'] = False\n\[email protected](\"/\")\ndef Search_ifsc_viewset(): \n return jsonify({'MESSAGE': 'Welcome user'})\n\[email protected](\"/ifsc_search\")\ndef Search_ifsc_viewset():\n '''\n The function for searched ifsc with timestamp.\n\n Parameters:\n ifsc_code (String): The ifsc code to be searched.\n \n Returns:\n results: A list which contains the ifsc bank data.\n '''\n \n # requested parameter.\n if request.args.get('ifsc_code'):\n # storing ifsc code matched results\n results = [data for data in bank_data if data['IFSC'] == request.args.get('ifsc_code')]\n if results:\n # storing ifsc code with timestamp for statistics results.\n Statistics.append({request.args.get('ifsc_code'): str(time.time())})\n return jsonify(results)\n else: \n return jsonify({'MESSAGE': 'NO RESULTS FOUND'})\n else: \n return jsonify({'MESSAGE': 'ifsc_code parameter required'})\n\[email protected](\"/bank_leader_board\", methods=['GET', 'POST'])\ndef bank_leader_board_viewset():\n '''\n The function for bank leader board data.\n\n Parameters:\n sortorder (String): The sorting order ASC/DESC,\n fetchcount (String): The counts to be fetched.\n \n Returns:\n results: A list which contains the bank leader board data.\n '''\n\n if bank_leaderboard_data:\n results = []\n if int(request.args.get('fetchcount')) <= len(bank_leaderboard_data):\n if request.args.get('sortorder') == 'ASC':\n # if sortorder is ASC then get results in ascending order.\n for i in range(0, int(request.args.get('fetchcount'))):\n results.append(bank_leaderboard_data[i]) \n else:\n # if sortorder is DESC then get results in descending order.\n for i in reversed(range(int(request.args.get('fetchcount')))):\n results.append(bank_leaderboard_data[i])\n return jsonify(results) \n else:\n if request.args.get('sortorder') == 'ASC':\n # if sortorder is ASC then get results in ascending order.\n for i in range(0, len(bank_leaderboard_data)):\n results.append(bank_leaderboard_data[i]) \n else:\n # if sortorder is DESC then get results in descending order.\n for i in reversed(range(len(bank_leaderboard_data))):\n results.append(bank_leaderboard_data[i]) \n return jsonify(results, {'MESSAGE': 'ONLY '+str(len(bank_leaderboard_data))+' RESULTS'})\n else: \n return jsonify({'MESSAGE': 'NO RESULTS'})\n\n\[email protected](\"/stats\")\ndef statistics_viewset():\n '''\n The function for searched ifsc code with timestamp statistics data.\n\n Parameters:\n sortorder (String): The sorting order ASC/DESC,\n fetchcount (String): The counts to be fetched.\n \n Returns:\n results: A list which contains the ifsc statistics data.\n '''\n\n if Statistics:\n if request.args.get('fetchcount')=='ALL':\n # returning all ifsc statistics data.\n if request.args.get('sortorder') == 'DESC':\n Statistics.reverse()\n return jsonify(Statistics)\n else:\n results = []\n if int(request.args.get('fetchcount')) <= len(Statistics):\n # if sortorder is DESC then get results in descending order.\n if request.args.get('sortorder') == 'DESC':\n for i in reversed(range(int(request.args.get('fetchcount')))):\n results.append(Statistics[i]) \n else:\n # if sortorder is ASC then get results in ascending order.\n for i in range(0, int(request.args.get('fetchcount'))):\n results.append(Statistics[i])\n return jsonify(results) \n else: \n # if sortorder is DESC then get results in descending order.\n if request.args.get('sortorder') == 'DESC':\n for i in reversed(range(len(Statistics))):\n results.append(Statistics[i]) \n else:\n # if sortorder is ASC then get results in ascending order.\n for i in range(0, len(Statistics)):\n results.append(Statistics[i])\n return jsonify(results, {'MESSAGE': 'ONLY '+str(len(Statistics))+' RESULTS'})\n else: \n return jsonify({'MESSAGE': 'NO RESULTS'})\n\n\nif __name__ == '__main__': \n app.run(debug = False)" }, { "alpha_fraction": 0.6812682747840881, "alphanum_fraction": 0.6908635497093201, "avg_line_length": 37.66128921508789, "blob_id": "58c0636fdb0c549dc838f0421c84abaacff0be9b", "content_id": "0a1aa19a42c1c03fa9d520c7cb72a95eabb66ef0", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Markdown", "length_bytes": 2399, "license_type": "no_license", "max_line_length": 302, "num_lines": 62, "path": "/Django/README.md", "repo_name": "Yugdeep1996/First", "src_encoding": "UTF-8", "text": "PROJECT - GROUP DATA & ANALYTICS:\n In-memory analytics is an approach to querying data when it resides in a computer’s random access memory (RAM), as opposed to querying data that is stored on physical disks. This results in vastly shortened query response times, allowing analytic applications to support faster business decisions.\n\nREQUIREMENTS (Prerequisites):\n Python 3.6 and up.\n\nINSTALLATION:\n * for ubuntu:\n 1. Installing venv:\n sudo apt install python3-venv\n 2. Creating an environment (venv):\n python3 -m venv venv\n 3. Activating the venv:\n source venv/bin/activate\n\n * for windows:\n 1. Installing virtualenv:\n pip install virtualenv\n 2. Creating an environment (venv):\n virtualenv venv\n 3. Activating the venv:\n venv\\Scripts\\activate\n\n * Installing requirements in venv:\n for ubuntu:\n pip3 install -r requirements.txt\n for windows:\n pip install -r requirements.txt\n\nRUNNING THE TESTS:\n * for ubuntu:\n python3 manage.py test\n * for windows:\n python manage.py test\n\nRUNNING THE SERVER:\n * for ubuntu:\n 1. python3 manage.py migrate\n 2. python3 manage.py runserver\n * for windows:\n 1. python manage.py migrate\n 2. python manage.py runserver\n\nHOW TO CONTRIBUTE:\n Pull requests are welcome. For major changes, please open an issue first to discuss what you would like to change. Please make sure to update tests as appropriate. If you'd like to contribute, please fork the repository and make changes as you'd like. Pull requests are warmly welcome.\n\n Steps to contribute:\n 1. Fork this repository (link to repository)\n 2. Create your feature branch (git checkout -b feature/fooBar)\n 3. Commit your changes (git commit -am 'Add some fooBar')\n 4. Push to the branch (git push origin feature/fooBar)\n 5. Create a new Pull Request\n \n Additionally you can create another document called CONTRIBUTING.md which gives instructions about how to contribute.\n\n Please read CONTRIBUTING.md for details on our code of conduct, and the process for submitting pull requests to us.\n\nAUTHORS:\n Yugdeep Riar - [email protected]\n\nCREDITS:\n A heartfelt thank you to @Pranav_sir and @Trishant_sir for the encouragement I needed to get this idea off the ground and start writing!\n" }, { "alpha_fraction": 0.6460176706314087, "alphanum_fraction": 0.665486752986908, "avg_line_length": 34.34375, "blob_id": "7c8b946cc10905ac4a56cc543f9274da3f77e373", "content_id": "882f6a5ae6bf307b87b009c39075f59121ab4ac8", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 1130, "license_type": "no_license", "max_line_length": 79, "num_lines": 32, "path": "/Flask/Flask_project/test.py", "repo_name": "Yugdeep1996/First", "src_encoding": "UTF-8", "text": "from app import app\nimport unittest\n\nclass Test_routes(unittest.TestCase):\n\n # Check for response 200\n def test_ifsc_search(self):\n client = app.test_client(self)\n response = client.get('/ifsc_search')\n status_code = response.status_code\n self.assertEqual(status_code, 200)\n self.assertEqual(response.content_type, \"application/json\")\n\n # Check for response 200\n # Check id data returned is application/json\n def test_bank_leader_board(self):\n client = app.test_client(self)\n response = client.get('/bank_leader_board?sortorder=ASC&fetchcount=10')\n status_code = response.status_code\n self.assertEqual(status_code, 200)\n self.assertEqual(response.content_type, \"application/json\")\n\n # Check for response 200\n def test_stats(self):\n client = app.test_client(self)\n response = client.get('/stats?sortorder=ASC&fetchcount=10')\n status_code = response.status_code\n self.assertEqual(status_code, 200)\n self.assertEqual(response.content_type, \"application/json\")\n\nif __name__ == \"__main__\":\n unittest.main()" }, { "alpha_fraction": 0.6659685969352722, "alphanum_fraction": 0.681675374507904, "avg_line_length": 40.565216064453125, "blob_id": "4090ee1a0777f437d85557508131feb87f1fb331", "content_id": "6de59b04abeead6454dc6489d8cbf3a642454e28", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 955, "license_type": "no_license", "max_line_length": 72, "num_lines": 23, "path": "/Django/Django_project/Django_app/tests.py", "repo_name": "Yugdeep1996/First", "src_encoding": "UTF-8", "text": "from rest_framework import status\nfrom rest_framework.test import APITestCase\n\nclass TestApiMethods(APITestCase):\n def test_ifsc_search(self):\n response = self.client.get('/ifsc_search/', format='json')\n self.assertEqual(response.status_code, status.HTTP_200_OK)\n \n def test_bank_leader_board(self):\n response = self.client.get('/bank_leader_board/', format='json')\n self.assertEqual(response.status_code, status.HTTP_200_OK)\n\n def test_statistics(self):\n response = self.client.get('/statistics/', format='json')\n self.assertEqual(response.status_code, status.HTTP_200_OK)\n\n def test_api_count(self):\n response = self.client.get('/api_count/', format='json')\n self.assertEqual(response.status_code, status.HTTP_200_OK)\n\n def test_ifsc_count(self):\n response = self.client.get('/ifsc_count/', format='json')\n self.assertEqual(response.status_code, status.HTTP_200_OK)" }, { "alpha_fraction": 0.5642755627632141, "alphanum_fraction": 0.5806108117103577, "avg_line_length": 44.65945816040039, "blob_id": "2399a2ffc4d62e965abe08739a9c2b4f6ad22258", "content_id": "4f57c5c0433a18dab655860407c8b6369af91061", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 8448, "license_type": "no_license", "max_line_length": 182, "num_lines": 185, "path": "/Django/Django_project/Django_app/views.py", "repo_name": "Yugdeep1996/First", "src_encoding": "UTF-8", "text": "from rest_framework import viewsets\nfrom rest_framework.response import Response\nfrom rest_framework import status\nimport traceback\n\nfrom django.views.decorators.cache import cache_page\nfrom django.utils.decorators import method_decorator\n\nimport urllib.request\n\nimport json\n\n# local imports\nfrom .validations import *\nfrom .function import *\n\n\n# Create your views here.\n\n\nclass Ifsc_viewset(viewsets.ViewSet):\n # cache requested url for 20 seconds.\n @method_decorator(cache_page(20))\n def list(self, request):\n try:\n '''\n The function to search for related ifsc bank data.\n \n Parameters:\n ifsc_code (String): The code to be searched.\n \n Returns:\n list_of_data: A list which contains the ifsc related bank data.\n '''\n\n # function for all url hit counts.\n api_count(request)\n # function for ifsc url hit counts.\n ifsc_count(request)\n\n # required requested parameter.\n if request.GET.get('ifsc_code'):\n # validating parameter.\n if isValidIFSCode(request.GET.get('ifsc_code')):\n # source contain JSON data from API.\n source = urllib.request.urlopen('http://127.0.0.1:5000/ifsc_search?ifsc_code='+request.GET.get('ifsc_code')).read()\n # converting JSON data to a dictionary.\n list_of_data = json.loads(source)\n return Response({\"data\": {'IFSC RESULTS': list_of_data}, \"message\": \"IFSC SEARCHED DATA\", \"success\": True}, status=status.HTTP_200_OK)\n else:\n raise Exception(\"Ifsc code invalid !\")\n else:\n raise Exception(\"Ifsc code required !\")\n except Exception as error:\n traceback.print_exc()\n return Response({\"message\": str(error), \"success\": False}, status=status.HTTP_200_OK)\n\n\nclass Bank_leader_board_Viewset(viewsets.ViewSet):\n def list(self, request):\n try:\n '''\n The function for bank leader board data.\n \n Parameters:\n sortorder (String): The sorting order ASC/DESC,\n fetchcount (String): The counts to be fetched.\n \n Returns:\n list_of_data: A list which contains the bank leader board data.\n '''\n\n # function for all url hit counts.\n api_count(request)\n \n # validating parameters.\n if not isValidsortorder(request.GET.get('sortorder') or \"\"):\n raise Exception(\"Sort input must be 'ASC' or 'DESC' !\")\n if not isValidfetchcount(request.GET.get('fetchcount') or \"\"):\n raise Exception(\"fetchcount must be an +ve integer !\")\n\n # required requested parameters.\n if request.GET.get('sortorder') and request.GET.get('fetchcount'):\n # source contain JSON data from API.\n source = urllib.request.urlopen('http://127.0.0.1:5000/bank_leader_board?sortorder='+request.GET.get('sortorder')+'&fetchcount='+request.GET.get('fetchcount')).read()\n # converting JSON data to a dictionary.\n list_of_data = json.loads(source)\n return Response({\"BANK LEADER BOARD RESULTS\": list_of_data, \"message\": \"BANK LEADER BOARD DATA\", \"success\": True}, status=status.HTTP_200_OK)\n else:\n if request.GET.get('sortorder'):\n # source contain JSON data from API.\n source = urllib.request.urlopen('http://127.0.0.1:5000/bank_leader_board?sortorder='+request.GET.get('sortorder')+'&fetchcount=10').read()\n elif request.GET.get('fetchcount'):\n # source contain JSON data from API.\n source = urllib.request.urlopen('http://127.0.0.1:5000/bank_leader_board?sortorder=DESC&fetchcount='+request.GET.get('fetchcount')).read()\n else:\n # source contain JSON data from API.\n source = urllib.request.urlopen('http://127.0.0.1:5000/bank_leader_board?sortorder=DESC&fetchcount=10').read()\n # converting JSON data to a dictionary.\n list_of_data = json.loads(source)\n return Response({\"BANK LEADER BOARD RESULTS\": list_of_data, \"message\": \"BANK LEADER BOARD DATA\", \"success\": True}, status=status.HTTP_200_OK)\n\n except Exception as error:\n traceback.print_exc()\n return Response({\"message\":str(error),\"success\":False},status=status.HTTP_200_OK)\n\n\nclass Statistics_viewset(viewsets.ViewSet):\n def list(self, request):\n try:\n '''\n The function for searched ifsc code with timestamp statistics data.\n \n Parameters:\n sortorder (String): The sorting order ASC/DESC,\n fetchcount (String): The counts to be fetched.\n \n Returns:\n list_of_data: A list which contains the ifsc statistics data.\n '''\n\n # function for all url hit counts.\n api_count(request)\n\n # validating parameters data.\n if not isValidsortorder(request.GET.get('sortorder') or \"\"):\n raise Exception(\"Sort input must be 'ASC' or 'DESC' !\")\n if not isValidfetchcount(request.GET.get('fetchcount') or \"\"):\n raise Exception(\"fetchcount must be an +ve integer !\")\n\n # required requested parameters.\n if request.GET.get('sortorder') and request.GET.get('fetchcount'):\n # source contain JSON data from API.\n source = urllib.request.urlopen('http://127.0.0.1:5000/stats?sortorder='+request.GET.get('sortorder')+'&fetchcount='+request.GET.get('fetchcount')).read()\n # converting JSON data to a dictionary.\n list_of_data = json.loads(source)\n return Response({\"STATISTICS RESULTS\": list_of_data, \"message\": \"STATISTICS DATA\", \"success\": True}, status=status.HTTP_200_OK)\n else:\n if request.GET.get('sortorder'):\n # source contain JSON data from API.\n source = urllib.request.urlopen('http://127.0.0.1:5000/stats?sortorder='+request.GET.get('sortorder')+'&fetchcount=ALL').read()\n elif request.GET.get('fetchcount'):\n # source contain JSON data from API.\n source = urllib.request.urlopen('http://127.0.0.1:5000/stats?sortorder=ASC&fetchcount='+request.GET.get('fetchcount')).read()\n else:\n # source contain JSON data from API.\n source = urllib.request.urlopen('http://127.0.0.1:5000/stats?sortorder=ASC&fetchcount=ALL').read()\n # converting JSON data to a dictionary.\n list_of_data = json.loads(source)\n return Response({\"STATISTICS RESULTS\": list_of_data, \"message\": \"STATISTICS DATA\", \"success\": True}, status=status.HTTP_200_OK)\n except Exception as error:\n traceback.print_exc()\n return Response({\"message\": str(error), \"success\": False},status=status.HTTP_200_OK)\n\n\nclass Api_count_viewset(viewsets.ViewSet):\n def list(self, request):\n try:\n '''\n The function for all urls hit counts.\n \n Returns:\n API_Hit_Count: A list which contains all the url with total hits.\n '''\n\n return Response({\"RESULTS\": API_Hit_Count, \"message\": \"APIS COUNT DATA\", \"success\": True}, status=status.HTTP_200_OK)\n except Exception as error:\n traceback.print_exc()\n return Response({\"message\": str(error), \"success\": False},status=status.HTTP_200_OK)\n\n\nclass Ifsc_count_viewset(viewsets.ViewSet):\n def list(self, request):\n try:\n '''\n The function for ifsc urls hit counts.\n \n Returns:\n IFSC_Hit_Count: A list which contains only the ifsc url with total hits.\n '''\n\n return Response({\"RESULTS\": IFSC_Hit_Count, \"message\": \"IFSC COUNT DATA\", \"success\": True}, status=status.HTTP_200_OK)\n except Exception as error:\n traceback.print_exc()\n return Response({\"message\": str(error), \"success\": False},status=status.HTTP_200_OK)\n\n" }, { "alpha_fraction": 0.7747163772583008, "alphanum_fraction": 0.7747163772583008, "avg_line_length": 40.13333511352539, "blob_id": "d509d4b6dbc96f313223bbd45238c15f0c4512c9", "content_id": "d4a67235c55bd4ab71bc20ca1799b440f75889d8", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 617, "license_type": "no_license", "max_line_length": 102, "num_lines": 15, "path": "/Django/Django_project/Django_app/urls.py", "repo_name": "Yugdeep1996/First", "src_encoding": "UTF-8", "text": "from django.urls import path, include\nfrom rest_framework.routers import DefaultRouter\nfrom Django_app.views import *\n\nrouter = DefaultRouter()\n\nrouter.register(r'ifsc_search', Ifsc_viewset, basename='Ifsc_viewset')\nrouter.register(r'bank_leader_board', Bank_leader_board_Viewset, basename='Bank_leader_board_Viewset')\nrouter.register(r'statistics', Statistics_viewset, basename='Statistics_viewset')\nrouter.register(r'api_count', Api_count_viewset, basename='Api_count_viewset')\nrouter.register(r'ifsc_count', Ifsc_count_viewset, basename='Ifsc_count_viewset')\n\nurlpatterns = [\n path('', include(router.urls)),\n]\n" }, { "alpha_fraction": 0.7868852615356445, "alphanum_fraction": 0.7868852615356445, "avg_line_length": 60.5, "blob_id": "3ddcc12a28a46cfa3025591a7eae4aec2ec00dcd", "content_id": "ccaf85bcc0d7a72ffdf6cd1e3508330aeb76bf5a", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 122, "license_type": "no_license", "max_line_length": 79, "num_lines": 2, "path": "/Flask/Flask_project/path.py", "repo_name": "Yugdeep1996/First", "src_encoding": "UTF-8", "text": "# static path for RBI-IFSC-Data.xlsx file.\nworkbook_path = '/home/yugdeepriar/Desktop/Assignment/Flask/RBI-IFSC-Data.xlsx'" }, { "alpha_fraction": 0.8888888955116272, "alphanum_fraction": 0.8888888955116272, "avg_line_length": 8.199999809265137, "blob_id": "4b3e9c084f0635069a87da8c6a7ea98cfa320e11", "content_id": "8433dfb81dad3e16878a646034e4ca34802e6bcf", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Text", "length_bytes": 45, "license_type": "no_license", "max_line_length": 16, "num_lines": 5, "path": "/Flask/requirements.txt", "repo_name": "Yugdeep1996/First", "src_encoding": "UTF-8", "text": "flask\nflask-sqlalchemy\npandas\nopenpyxl\npytest" }, { "alpha_fraction": 0.5447387099266052, "alphanum_fraction": 0.5490336418151855, "avg_line_length": 17.613332748413086, "blob_id": "49faf0b22014f0d048a676fad074138cb55756b6", "content_id": "2bdd4ccc1ac6f6762453ec4227c92ed493b5b3ce", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 1397, "license_type": "no_license", "max_line_length": 55, "num_lines": 75, "path": "/Django/Django_project/Django_app/validations.py", "repo_name": "Yugdeep1996/First", "src_encoding": "UTF-8", "text": "# import regular exp.\nimport re\n\n\n# Create your validations here.\n\n\ndef isValidIFSCode(str):\n '''\n The function checks if ifsc code is valid or not.\n\n Parameters:\n str (String): The ifsc code.\n \n Returns:\n (bool) : A boolean value.\n '''\n \n # Regex to check valid IFSC Code.\n regex = \"^[A-Z]{4}0[A-Z0-9]{6}$\"\n \n # Compile the ReGex\n p = re.compile(regex)\n \n # If the string is empty return false.\n if (str == None):\n return False\n \n # Return if the string matched the ReGex.\n if (re.search(p, str)):\n return True\n else:\n return False\n\n\ndef isValidsortorder(data):\n '''\n The function checks if sort order is valid or not.\n\n Parameters:\n data (String): The sort order.\n \n Returns:\n (bool) : A boolean value.\n '''\n\n # If the string is empty return.\n if data == '':\n return True\n elif data == 'DESC':\n return True\n elif data == 'ASC':\n return True\n else:\n return False\n\n\ndef isValidfetchcount(data):\n '''\n The function checks if fetch count is valid or not.\n\n Parameters:\n data (String): The fetch count.\n \n Returns:\n (bool) : A boolean value.\n '''\n\n # If the string is empty return.\n if data == '':\n return True\n elif int(data) >= 0:\n return True\n else:\n return False\n\n" } ]
10
em-git/Games
https://github.com/em-git/Games
84248c2ccf8629c623de1ae32c3aa9349d67a728
e4f7eccc60ecc9dec38b2d3a6986851e2498d07f
292d2bf0bd65c28d127756b6389dd4761783f6b3
refs/heads/master
2020-05-03T03:21:14.939866
2019-03-29T14:11:00
2019-03-29T14:11:00
121,367,134
0
0
null
null
null
null
null
[ { "alpha_fraction": 0.7570332288742065, "alphanum_fraction": 0.7672634124755859, "avg_line_length": 47.875, "blob_id": "311cde8a953a773f55705f48babbdc4aa16be0f7", "content_id": "d472411a76abe4a9ddddea8d59ec2c4f6cba707d", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Markdown", "length_bytes": 391, "license_type": "no_license", "max_line_length": 127, "num_lines": 8, "path": "/README.md", "repo_name": "em-git/Games", "src_encoding": "UTF-8", "text": "# Games\nVery simple games created using the [Pygame](https://www.pygame.org/wiki/about) library.\n\nAll projects in this repository are a work in progress and any updates to the code is based on my availability to work on them.\n\nCurrent projects:\n- [Space Invaders](https://github.com/em-git/Games/blob/master/space_invaders.py) Yes, yet another Space Invaders game.\n- [4972]() Arcade shooter\n" }, { "alpha_fraction": 0.47861436009407043, "alphanum_fraction": 0.5025804042816162, "avg_line_length": 32.758949279785156, "blob_id": "33b84deca70c39c3801ed74c23d1e943cceedf95", "content_id": "0af55738b2d9dfbe26881231d234d971fec57e42", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 14145, "license_type": "no_license", "max_line_length": 119, "num_lines": 419, "path": "/space_invaders.py", "repo_name": "em-git/Games", "src_encoding": "UTF-8", "text": "# Created by Eduardo Morais\n# Name: Space Invaders\n# Version: 0.1\n# Resources:\n# http://www.classicgaming.cc/classics/space - invaders/play - guide\n# https://spaceinvaders.fandom.com/wiki/Space_Invaders_wiki\n# http://computerarcheology.com/Arcade/SpaceInvaders/\n# imports\nimport os\nimport random\nimport pygame\nimport spritesheet\n\n\n# Initialization\npygame.init()\ndisplay_w, display_h = 800, 800\ndisplay = pygame.display.set_mode((display_w, display_h))\npygame.display.set_caption(\"Space Invaders\")\nclock = pygame.time.Clock()\n\n# Constants\nFONT = pygame.font.Font(\"fonts/space_invaders.ttf\", 20)\nWHITE = (255, 255, 255)\nBLACK = (0, 0, 0)\nGREEN = (0, 255, 0)\nFPS = 60\n\n\n# Classes\nclass Player(pygame.sprite.Sprite):\n def __init__(self):\n super(Player, self).__init__()\n self.image = pygame.image.load(\"sprites/player.png\").convert()\n self.width = 26\n self.x = display_w * 0.48\n self.y = display_h * 0.9\n self.move_x = 0\n self.rect = self.image.get_rect()\n self.rect.x = self.x\n self.rect.y = self.y\n self.shots = 0\n\n def update(self, *args):\n self.rect.x += self.move_x\n\n def control(self, x):\n self.move_x = x\n\n def lives(self, lives):\n x_coord = 0\n for var in range(lives-1):\n display.blit(self.image, (x_coord + 50, display_h - 45))\n x_coord += 30\n\n def score(self, score):\n value = FONT.render(str(score).zfill(4), False, WHITE)\n display.blit(value, (130, 40))\n\n\nclass Alien(pygame.sprite.Sprite):\n def __init__(self, strip, rect, x, y):\n super(Alien, self).__init__()\n self.frames = FPS/6\n self.strip = [spritesheet.SpriteStripAnim(strip, rect, 2, 1, True, self.frames)]\n self.image = self.strip[0].next()\n self.x = x\n self.y = y\n self.move_x = 2\n self.rect = self.image.get_rect()\n self.rect.x = self.x\n self.rect.y = self.y\n\n def update(self):\n self.image = self.strip[0].next()\n self.rect.x += self.move_x\n if self.rect.x > 776:\n self.move_x = -2\n self.rect.y += 24\n elif self.rect.x < 0:\n self.move_x = 2\n self.rect.y += 24\n\n\nclass AlienLow(Alien):\n def __init__(self, x, y):\n super(AlienLow, self).__init__(\"sprites/alien3.png\", (0, 0, 24, 16), x, y)\n\n\nclass AlienMid(Alien):\n def __init__(self, x, y):\n super(AlienMid, self).__init__(\"sprites/alien2.png\", (0, 0, 22, 16), x, y)\n\n\nclass AlienHigh(Alien):\n def __init__(self, x, y):\n super(AlienHigh, self).__init__(\"sprites/alien1.png\", (0, 0, 22, 16), x, y)\n\n\nclass Shield(pygame.sprite.Sprite):\n def __init__(self, x, y):\n super(Shield, self).__init__()\n self.image = pygame.image.load(\"sprites/shield.png\")\n self.x = x\n self.y = y\n self.rect = self.image.get_rect()\n self.rect.x = self.x\n self.rect.y = self.y\n\n\nclass Laser(pygame.sprite.Sprite):\n def __init__(self):\n super(Laser, self).__init__()\n self.image = pygame.image.load(\"sprites/player_laser.png\")\n self.rect = self.image.get_rect()\n\n def update(self, *args):\n self.rect.y -= 10\n\n\nclass AlienLaser(pygame.sprite.Sprite):\n def __init__(self, center, size):\n super(AlienLaser, self).__init__()\n self.size = size\n self.animation = {\"lst\": [pygame.image.load(os.path.join(f\"sprites/shots/alien/\"))]}\n\n\nclass Hit(pygame.sprite.Sprite):\n def __init__(self, center, size):\n super(Hit, self).__init__()\n self.size = size\n self.rect = (0, 0, 44, 32)\n self.animation = {\"lst\": [pygame.image.load(os.path.join(f\"sprites/shots/player/hit/hit{image}.png\")) for image\n in range(1, 3)]}\n self.image = self.animation[self.size][0]\n self.rect = self.image.get_rect()\n self.rect.center = center\n self.frame = 0\n self.last_update = pygame.time.get_ticks()\n self.frame_rate = FPS\n\n def update(self):\n now = pygame.time.get_ticks()\n if now - self.last_update > self.frame_rate:\n self.last_update = now\n self.frame += 1\n if self.frame == len(self.animation[self.size]):\n self.kill()\n else:\n center = self.rect.center\n self.image = self.animation[self.size][self.frame]\n self.rect = self.image.get_rect()\n self.rect.center = center\n\n\nclass Miss(pygame.sprite.Sprite):\n def __init__(self, center, size):\n super(Miss, self).__init__()\n self.size = size\n self.rect = (0, 0, 44, 32)\n self.animation = {\"lst\": [pygame.image.load(os.path.join(f\"sprites/shots/player/miss/miss{image}.png\")) for\n image in range(1, 3)]}\n self.image = self.animation[self.size][0]\n self.rect = self.image.get_rect()\n self.rect.center = center\n self.frame = 0\n self.last_update = pygame.time.get_ticks()\n self.frame_rate = FPS\n\n def update(self):\n now = pygame.time.get_ticks()\n if now - self.last_update > self.frame_rate:\n self.last_update = now\n self.frame += 1\n if self.frame == len(self.animation):\n self.kill()\n else:\n center = self.rect.center\n self.image = self.animation[self.size][self.frame]\n self.rect = self.image.get_rect()\n self.rect.center = center\n\n\nclass FlyingSaucer(pygame.sprite.Sprite):\n def __init__(self, x, y, move):\n super(FlyingSaucer, self).__init__()\n self.image = pygame.image.load(\"sprites/ufo.png\")\n self.rect = self.image.get_rect()\n self.x = x\n self.y = y\n self.move_x = move\n self.rect.x = self.x\n self.rect.y = self.y\n\n def update(self):\n self.rect.x += self.move_x\n if self.rect.x > display_w:\n self.kill()\n elif self.rect.x < -48:\n self.kill()\n\n\n# Functions\n\n# Main menu\ndef intro():\n done = False\n while not done:\n for event in pygame.event.get():\n if event.type == pygame.QUIT:\n done = True\n display.fill(BLACK)\n pygame.display.flip()\n clock.tick(FPS)\n\n\n# Game\ndef game():\n\n # Game variables\n done = False # Controls the game\n score = 0 # Player score\n lives = 3 # Player lives\n las_count = 0 # Laser count\n tick_counter = 0 # Time count\n shield_count = 0 # Shield count\n ufo_count = 0 # Ufo count\n\n # Game sprites\n all_sprites = pygame.sprite.Group() # Stores all sprites\n laser_list = pygame.sprite.Group() # Stores Player laser\n shield_list = pygame.sprite.Group() # Stores all shields\n ufo_list = pygame.sprite.Group() # Stores UFO\n alien_list_0 = pygame.sprite.Group() # Stores Alien top row\n alien_list_1 = pygame.sprite.Group() # Stores Alien middle rows\n alien_list_2 = pygame.sprite.Group() # Stores Alien bottom rows\n\n # Player\n player = Player()\n all_sprites.add(player)\n\n # Shields\n x = 66\n while shield_count < 4:\n shield = Shield(x, display_h * 0.84)\n all_sprites.add(shield)\n shield_list.add(shield)\n shield_count += 1\n x += 200\n\n # Aliens\n alien_row_0, alien_row_1, alien_row_2 = [], [], []\n loc_x, loc_y = 158, 100 # Initial x and y position\n for row in range(5):\n if row < 1: # Top row\n for x in range(11):\n alien_row_0.append(AlienHigh(loc_x, loc_y))\n loc_x += 48 # Increments x by 48\n for al in alien_row_0:\n all_sprites.add(al)\n alien_list_0.add(al)\n loc_x = 158 # Resets x\n loc_y += 50 # Increments y by 50\n elif row < 3: # Middle rows\n for x in range(11):\n alien_row_1.append(AlienMid(loc_x, loc_y))\n loc_x += 48\n for al in alien_row_1:\n all_sprites.add(al)\n alien_list_1.add(al)\n loc_x = 158\n loc_y += 50\n elif row <= 4: # Bottom rows\n for x in range(11):\n alien_row_2.append(AlienLow(loc_x, loc_y))\n loc_x += 48\n for al in alien_row_2:\n all_sprites.add(al)\n alien_list_2.add(al)\n loc_x = 158\n loc_y += 50\n\n while not done:\n tick_counter += 1\n for event in pygame.event.get():\n if event.type == pygame.QUIT:\n done = True\n elif event.type == pygame.KEYDOWN:\n if event.key == pygame.K_RIGHT:\n player.control(3)\n elif event.key == pygame.K_LEFT:\n player.control(-3)\n elif event.key == pygame.K_UP:\n if las_count == 0:\n laser = Laser()\n laser.rect.x = player.rect.x + 11\n laser.rect.y = player.rect.y - 16\n all_sprites.add(laser)\n laser_list.add(laser)\n las_count += 1\n player.shots += 1\n else:\n continue\n elif event.type == pygame.KEYUP:\n if event.key == pygame.K_RIGHT:\n player.control(0)\n elif event.key == pygame.K_LEFT:\n player.control(0)\n else:\n continue\n\n all_sprites.update()\n\n # Ufo creation based on timer, direction based on player.shots\n if tick_counter in [301, 1000, 1701]: # Needs to be corrected\n if player.shots % 2 == 0:\n ufo = FlyingSaucer(-48, 70, 2)\n ufo_list.add(ufo)\n all_sprites.add(ufo)\n ufo_count += 1\n else:\n ufo = FlyingSaucer(800, 70, -2)\n ufo_list.add(ufo)\n all_sprites.add(ufo)\n ufo_count += 1\n\n # Player shots\n for laser in laser_list:\n alien_hit_list_0 = pygame.sprite.spritecollide(laser, alien_list_0, True)\n alien_hit_list_1 = pygame.sprite.spritecollide(laser, alien_list_1, True)\n alien_hit_list_2 = pygame.sprite.spritecollide(laser, alien_list_2, True)\n ufo_hit_list = pygame.sprite.spritecollide(laser, ufo_list, True)\n if alien_hit_list_0:\n blast = Hit(laser.rect.center, \"lst\")\n all_sprites.add(blast)\n all_sprites.remove(laser)\n laser_list.remove(laser)\n score += 30\n las_count = 0\n if alien_hit_list_1:\n blast = Hit(laser.rect.center, \"lst\")\n all_sprites.add(blast)\n all_sprites.remove(laser)\n laser_list.remove(laser)\n score += 20\n las_count = 0\n if alien_hit_list_2:\n blast = Hit(laser.rect.center, \"lst\")\n all_sprites.add(blast)\n all_sprites.remove(laser)\n laser_list.remove(laser)\n score += 10\n las_count = 0\n\n if ufo_hit_list:\n blast = Hit(laser.rect.center, \"lst\")\n all_sprites.add(blast)\n all_sprites.remove(laser)\n laser_list.remove(laser)\n las_count = 0\n if ufo_count > 1:\n if player.shots < 15 or player.shots > 15:\n score += random.choice([50, 100, 150])\n player.shots = 0\n else:\n score += 300\n player.shots = 0\n else:\n if player.shots < 24 or player.shots > 24:\n score += random.choice([50, 100, 150])\n player.shots = 0\n else:\n score += 300\n player.shots = 0\n\n elif laser.rect.y < 70:\n blast = Miss(laser.rect.center, \"lst\")\n all_sprites.add(blast)\n all_sprites.remove(laser)\n laser_list.remove(laser)\n las_count = 0\n\n # Player not allowed off-screen\n if player.rect.x > display_w - player.width:\n player.rect.x = display_w - player.width\n elif player.rect.x < 0:\n player.rect.x = 0\n\n # DISPLAY DRAW\n display.fill(BLACK)\n all_sprites.draw(display)\n\n # SCREEN TOP\n player1_text = FONT.render(\"SCORE < 1 >\", True, WHITE)\n player2_text = FONT.render(\"SCORE < 2 >\", True, WHITE)\n hi_score_text = FONT.render(\"HI-SCORE\", True, WHITE)\n player.score(score)\n player2_score = FONT.render(\"0000\", True, WHITE)\n hi_score_score = FONT.render(\"0000\", True, WHITE)\n display.blit(player1_text, (100, 0))\n display.blit(player2_text, (600, 0))\n display.blit(hi_score_text, (350, 0))\n display.blit(player2_score, (630, 40))\n display.blit(hi_score_score, (380, 40))\n # SCREEN BOTTOM\n pygame.draw.line(display, GREEN, (0, display_h - 50), (display_w, display_h - 50), 2)\n lives_text = FONT.render(str(lives), True, WHITE)\n credit_text = FONT.render(\"CREDIT 00\", True, WHITE)\n display.blit(lives_text, (20, display_h - 48))\n display.blit(credit_text, (600, display_h - 48))\n player.lives(lives)\n\n # Display update\n pygame.display.flip()\n clock.tick(FPS)\n\n\nif __name__ == '__main__':\n game()\n" } ]
2
emebeiran/low-rank2020
https://github.com/emebeiran/low-rank2020
bda31e9da18392079a90cdd7167631c1efd1ef36
54d6ab9e9267d9a4cb39ff7ef1f7a6d6fac50fae
918cd1c49d904635ea82c47ae3aea2a63e549d33
refs/heads/main
2023-01-14T12:24:22.889944
2020-11-28T13:06:51
2020-11-28T13:06:51
313,571,907
2
0
null
null
null
null
null
[ { "alpha_fraction": 0.47091805934906006, "alphanum_fraction": 0.5047415494918823, "avg_line_length": 37.13508224487305, "blob_id": "be7df13b40cfc0fcc2dc63a996de0160804543c1", "content_id": "58f7f804a7a16095fb94068bdefdff947cec163e", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 98807, "license_type": "no_license", "max_line_length": 211, "num_lines": 2591, "path": "/lib_rnns.py", "repo_name": "emebeiran/low-rank2020", "src_encoding": "UTF-8", "text": "#!/usr/bin/env python3\n# -*- coding: utf-8 -*-\n\"\"\"\nCreated on Tue Apr 2 16:50:55 2019\n\n@author: mbeiran\n\"\"\"\nimport numpy as np\nimport matplotlib.pyplot as plt\nimport torch\n\n#%%\ndef set_plot(ll = 7):\n plt.style.use('ggplot')\n\n fig_width = 1.5*2.2 # width in inches\n fig_height = 1.5*2 # height in inches\n fig_size = [fig_width,fig_height]\n plt.rcParams['figure.figsize'] = fig_size\n plt.rcParams['figure.autolayout'] = True\n \n plt.rcParams['lines.linewidth'] = 1.2\n plt.rcParams['lines.markeredgewidth'] = 0.003\n plt.rcParams['lines.markersize'] = 3\n plt.rcParams['font.size'] = 14#9\n plt.rcParams['legend.fontsize'] = 11#7.\n plt.rcParams['axes.facecolor'] = '1'\n plt.rcParams['axes.edgecolor'] = '0'\n plt.rcParams['axes.linewidth'] = '0.7'\n \n plt.rcParams['axes.labelcolor'] = '0'\n plt.rcParams['axes.labelsize'] = 14#9\n plt.rcParams['xtick.labelsize'] = 11#7\n plt.rcParams['ytick.labelsize'] = 11#7\n plt.rcParams['xtick.color'] = '0'\n plt.rcParams['ytick.color'] = '0'\n plt.rcParams['xtick.major.size'] = 2\n plt.rcParams['ytick.major.size'] = 2\n \n plt.rcParams['font.sans-serif'] = 'Arial'\n \n cls = np.zeros((ll,3))\n \n cl11 = np.array((102, 153, 255))/255.\n cl12 = np.array((53, 153, 53))/255.\n \n cl21 = np.array((255, 204, 51))/255.\n cl22 = np.array((204, 0, 0))/255.\n \n if ll==7:\n cls[0,:] = 0.4*np.ones((3,))\n \n cls[1,:] = cl11\n cls[2,:] = 0.5*cl11+0.5*cl12\n cls[3,:] = cl12\n \n cls[4,:] = cl21\n cls[5,:] = 0.5*cl21+0.5*cl22\n cls[6,:] = cl22\n elif ll == 5:\n cls[0,:] = 0.4*np.ones((3,)) \n \n cls[2,:] = cl12\n \n cls[3,:] = cl21\n \n cls[4,:] = cl22 \n return(cls)\n \ndef set_plot2(ll = 7):\n plt.style.use('ggplot')\n\n fig_width = 1.5*2.2 # width in inches\n fig_height = 1.5*2 # height in inches\n fig_size = [fig_width,fig_height]\n plt.rcParams['figure.figsize'] = fig_size\n plt.rcParams['figure.autolayout'] = True\n \n plt.rcParams['lines.linewidth'] = 1.2\n plt.rcParams['lines.markeredgewidth'] = 0.003\n plt.rcParams['lines.markersize'] = 3\n plt.rcParams['font.size'] = 14#9\n plt.rcParams['legend.fontsize'] = 11#7.\n plt.rcParams['axes.facecolor'] = '1'\n plt.rcParams['axes.edgecolor'] = '0'\n plt.rcParams['axes.linewidth'] = '0.7'\n \n plt.rcParams['axes.labelcolor'] = '0'\n plt.rcParams['axes.labelsize'] = 14#9\n plt.rcParams['xtick.labelsize'] = 11#7\n plt.rcParams['ytick.labelsize'] = 11#7\n plt.rcParams['xtick.color'] = '0'\n plt.rcParams['ytick.color'] = '0'\n plt.rcParams['xtick.major.size'] = 2\n plt.rcParams['ytick.major.size'] = 2\n \n plt.rcParams['font.sans-serif'] = 'Arial'\n \n cls = np.zeros((ll,3))\n \n cl11 = np.array((102, 153, 255))/255.\n cl12 = np.array((53, 153, 53))/255.\n \n cl21 = np.array((255, 204, 51))/255.\n cl22 = np.array((204, 0, 0))/255.\n \n if ll==7:\n cls[0,:] = 0.4*np.ones((3,))\n \n cls[1,:] = cl11\n cls[2,:] = 0.5*cl11+0.5*cl12\n cls[3,:] = cl12\n \n cls[4,:] = cl21\n cls[5,:] = 0.5*cl21+0.5*cl22\n cls[6,:] = cl22\n \n cls = cls[1:]\n cls = cls[::-1]\n \n c2 = [67/256, 90/256, 162/256]\n c1 = [220/256, 70/256, 51/256]\n cls[0,:]=c1\n cls[5,:]=c2\n elif ll == 5:\n cls[0,:] = 0.4*np.ones((3,)) \n \n cls[2,:] = cl12\n \n cls[3,:] = cl21\n \n cls[4,:] = cl22 \n return(cls)\n# Create input \n \ndef plot_readout(net_low, time, Nt, time_com, amps, R_on, dt, string1, string2,\n trials=100, plot_trace=True, plot_psycho=True, pl_min = 60, pl_max = 1350, mean=False, png=False, new=False):\n cls = set_plot2()\n fig = plt.figure()\n ax = fig.add_subplot(111)\n \n Tp = np.zeros(len(amps))\n Tp_sd = np.zeros(len(amps)) \n thres = 0.35\n train_int = (0.5+thres)*time_com*dt\n for xx in range(len(amps)):\n \n trials = 100#100\n \n input_tr, output_tr, mask_tr, ct_tr, ct2_tr = create_inp_out2(trials, Nt, time_com, amps, R_on, 0., \n just=xx, perc = 0.0)\n \n outp, traj = net_low.forward(input_tr, return_dynamics=True)\n outp = outp.detach().numpy()\n traj = traj.detach().numpy()\n \n avg_outp = np.mean(outp[:,:,0],0)\n if new==False:\n ax.plot(-R_on*dt/1000+time*dt/1000, avg_outp, color=cls[xx,:])\n else:\n ax.plot(-R_on*dt/1000+time*dt/1000, avg_outp, color=cls[xx,:], lw=2.)\n \n if mean == False:\n if new ==False:\n ax.plot(-R_on*dt/1000+time*dt/1000, outp[:,:,0].T, color=cls[xx,:], lw= 0.1)\n else:\n ax.plot(-R_on*dt/1000+time*dt/1000, outp[:,:,0].T, color=cls[xx,:], lw= 1.2)\n if new==False:\n ax.plot(time[mask_tr.detach().numpy()[xx,:,0]>0.]*dt/1000-R_on*dt/1000, output_tr.detach().numpy()[xx,mask_tr.detach().numpy()[xx,:]>0.], '--k', lw=1.0)\n else:\n ax.plot(time[mask_tr.detach().numpy()[xx,:,0]>0.]*dt/1000-R_on*dt/1000, output_tr.detach().numpy()[xx,mask_tr.detach().numpy()[xx,:]>0.], '--', c = [0.3, 0.3, 0.3], lw=1., zorder=2)\n \n outp_thres = outp[:,:,0]-thres\n time_cross = np.zeros(trials)\n for t in range(trials):\n t_ser = outp_thres[t,:]\n if len(time[t_ser>0])>0:\n time_cross[t] = (time[t_ser>0][0]-R_on)*dt\n else:\n time_cross[t] = np.nan\n \n Tp[xx] = np.mean(time_cross)-R_on*10\n Tp_sd[xx] = np.std(time_cross)-R_on*10\n xl = np.min( [time[-1]*dt/1000, 1.4*(np.max(time_com)+R_on)*dt/1000])\n ax.set_xlim([-0.6, xl]) \n ax.spines['top'].set_visible(False)\n ax.spines['right'].set_visible(False)\n ax.yaxis.set_ticks_position('left')\n ax.xaxis.set_ticks_position('bottom')\n plt.xlabel('time after Set (s)')\n plt.ylabel('read out')\n if png==False:\n plt.savefig(string1)\n else:\n plt.savefig(string1, dpi=1200)\n plt.show()\n \n fig = plt.figure()\n ax = fig.add_subplot(111)\n for xx in range(len(amps)):\n plt.errorbar(train_int[xx], Tp[xx], yerr = Tp_sd[xx], fmt='o', markersize=10, color=cls[xx,:])\n plt.plot(train_int, train_int, 'k')\n ax.spines['top'].set_visible(False)\n ax.spines['right'].set_visible(False)\n ax.yaxis.set_ticks_position('left')\n ax.xaxis.set_ticks_position('bottom')\n plt.xlabel('trained interval [ms]')\n plt.ylabel(r'$t_p$ [ms]')\n if png==False:\n plt.savefig(string2)\n else:\n plt.savefig(string2, dpi=1200)\n plt.show()\n #plt.plot([pl_min, pl_max], [pl_min, pl_max], 'k')\n return(Tp, Tp_sd, train_int)\n \ndef plot_readout2(net_low, time, Nt, time_com, amps, R_on, dt, string1, string2,\n trials=100, plot_trace=True, plot_psycho=True, pl_min = 60, pl_max = 1350, mean=False):\n cls = set_plot()\n fig = plt.figure()\n ax = fig.add_subplot(111)\n \n #Tp = np.zeros(len(amps))\n #Tp_sd = np.zeros(len(amps)) \n #thres = 0.35\n #train_int = (0.5+thres)*time_com*dt\n for xx in range(len(amps)):\n \n trials = 100\n \n input_tr, output_tr, mask_tr, ct_tr, ct2_tr = create_inp_out2(trials, Nt, time_com, amps, R_on, 0., \n just=xx, perc = 0.0)\n \n outp, traj = net_low.forward(input_tr, return_dynamics=True)\n outp = outp.detach().numpy()\n traj = traj.detach().numpy()\n \n #dist = np.diff(traj,axis=1)\n speed = np.sqrt(np.sum(np.diff(traj,axis=1)**2,-1))\n avg_outp = np.mean(speed,0)#np.mean(np.diff(outp[:,:,0])/dt,0)\n ax.plot(time[:-1]*dt, avg_outp, color=cls[xx,:])\n if mean == False:\n #ax.plot(time[:-1]*dt, np.diff(outp[:,:,0]).T, color=cls[xx,:], lw= 0.1)\n ax.plot(time[:-1]*dt, speed[:,:,0].T, color=cls[xx,:], lw= 0.1)\n #ax.plot(time[mask_tr.detach().numpy()[xx,:,0]>0.]*dt, output_tr.detach().numpy()[xx,mask_tr.detach().numpy()[xx,:]>0.], '--k', lw=1.0)\n \n #outp_thres = outp[:,:,0]-thres\n #time_cross = np.zeros(trials)\n #for t in range(trials):\n # t_ser = outp_thres[t,:]\n # if len(time[t_ser>0])>0:\n # time_cross[t] = (time[t_ser>0][0]-R_on)*dt\n # else:\n # time_cross[t] = np.nan\n \n #Tp[xx] = np.mean(time_cross)\n #Tp_sd[xx] = np.std(time_cross)\n xl = np.min( [time[-1]*dt, 1.4*(np.max(time_com)+R_on)*dt])\n ax.set_xlim([0, xl]) \n ax.spines['top'].set_visible(False)\n ax.spines['right'].set_visible(False)\n ax.yaxis.set_ticks_position('left')\n ax.xaxis.set_ticks_position('bottom')\n plt.xlabel('time [ms]')\n ax.set_ylim([0,6])\n #plt.yscale('log')\n plt.ylabel('speed')\n plt.savefig(string1)\n plt.show()\n \n# fig = plt.figure()\n# ax = fig.add_subplot(111)\n# for xx in range(len(amps)):\n# plt.errorbar(train_int[xx], Tp[xx], yerr = Tp_sd[xx], fmt='o', markersize=10, color=cls[xx,:])\n# plt.plot(train_int, train_int, 'k')\n# ax.spines['top'].set_visible(False)\n# ax.spines['right'].set_visible(False)\n# ax.yaxis.set_ticks_position('left')\n# ax.xaxis.set_ticks_position('bottom')\n# plt.xlabel('trained interval [ms]')\n# plt.ylabel(r'$t_p$ [ms]')\n# plt.savefig(string2)\n# plt.show()\n #plt.plot([pl_min, pl_max], [pl_min, pl_max], 'k')\n return()\n\ndef create_inp_out(trials, Nt, tss, amps, R_on, SR_on, just=-1, perc = 0.2):\n \n n_ts = len(tss)\n time = np.arange(Nt)\n \n strt = -0.5\n inputt = np.zeros(( trials, Nt, 2))\n outputt = strt*np.ones((trials, Nt, 1))\n maskt = np.zeros((trials, Nt, 1))\n \n r_inp = np.ones((trials, Nt))\n #r2_inp = np.ones((trials, Nt))\n s_inp = np.zeros((trials, Nt))\n \n \n if just==-1: #all types of trials \n ct = np.random.randint(n_ts, size = trials)\n \n else:\n ct = just*np.ones(trials, dtype = np.int8)\n \n # Don't have the set cue in a set of inputs\n ct2 = np.random.rand(trials)<perc\n \n \n rnd = np.zeros(trials)\n if SR_on>0:\n rnd = np.random.randint(-SR_on, SR_on, trials)\n\n for itr in range(trials): \n if ct2[itr]:\n maskt[itr,:,0] = (time>R_on+1+rnd[itr])*(time<np.max(tss)+R_on+1+rnd[itr])\n else:\n maskt[itr,:,0] = (time>R_on+1+rnd[itr])*(time<tss[ct[itr]]+R_on+1+rnd[itr])\n s_inp[itr, time>R_on+rnd[itr]] = 100.\n s_inp[itr, time>1+R_on+rnd[itr]] = 0.\n if sum(maskt[itr,:,0]):\n outputt[itr, maskt[itr,:,0]==1.,0] = np.linspace(strt, -0.9*strt, int(sum(maskt[itr,:,0])), endpoint=True)\n #Include zero read-out in cost function\n if ct2[itr]:\n maskt[itr,:,0] = (time<np.max(tss)+R_on+1+rnd[itr])\n else:\n maskt[itr,:,0] = (time<tss[ct[itr]]+R_on+1+rnd[itr])\n if just==-1:\n r_inp[itr, :] = amps[ct[itr]]*r_inp[itr,:] \n \n if just>-1:\n r_inp = amps[just]*r_inp\n \n inputt[:,:,0] = r_inp #cue\n inputt[:,:,1] = s_inp #set\n #inputt[:,:,2] = r2_inp\n #outputt = strt*np.ones((trials, Nt, 1))\n \n dtype = torch.FloatTensor \n inputt = torch.from_numpy(inputt).type(dtype)\n outputt = torch.from_numpy(outputt).type(dtype)\n maskt = torch.from_numpy(maskt).type(dtype)\n \n return(inputt, outputt, maskt, ct, ct2)\n \ndef create_inp_out2(trials, Nt, tss, amps, R_on, SR_on=80, just=-1, perc = 0.2):\n '''\n Missing\n '''\n \n n_ts = len(tss)\n time = np.arange(Nt)\n \n strt = -0.5\n inputt = np.zeros(( trials, Nt, 2))\n outputt = strt*np.ones((trials, Nt, 1))\n maskt = np.zeros((trials, Nt, 1))\n \n r_inp = np.ones((trials, Nt))\n #r2_inp = np.ones((trials, Nt))\n s_inp = np.zeros((trials, Nt))\n \n \n if just==-1: #all types of trials \n ct = np.random.randint(n_ts, size = trials)\n \n else:\n ct = just*np.ones(trials, dtype = np.int8)\n \n # Don't have the set cue in a set of inputs\n ct2 = np.random.rand(trials)<perc\n \n \n rnd = np.zeros(trials)\n if SR_on>0:\n rnd = np.random.randint(-SR_on, SR_on, trials)\n\n for itr in range(trials): \n if ct2[itr]:\n maskt[itr,:,0] = (time>R_on+1+rnd[itr])*(time<np.max(tss)+R_on+1+rnd[itr])\n else:\n maskt[itr,:,0] = (time>R_on+1+rnd[itr])*(time<tss[ct[itr]]+R_on+1+rnd[itr])\n mask_aft = time>=tss[ct[itr]]+R_on+1+rnd[itr]\n s_inp[itr, time>R_on+rnd[itr]] = 10.\n s_inp[itr, time>1+R_on+rnd[itr]] = 0.\n if sum(maskt[itr,:,0]):\n outputt[itr, maskt[itr,:,0]==1.,0] = np.linspace(strt, -strt, int(sum(maskt[itr,:,0])), endpoint=True)\n outputt[itr, mask_aft==1,0] = np.linspace(-strt, -strt, int(sum(mask_aft)), endpoint=True)\n \n #Include zero read-out in cost function\n if ct2[itr]:\n maskt[itr,:,0] = (time<Nt)#np.max(tss)+R_on+1+rnd[itr])\n else:\n maskt[itr,:,0] = (time<Nt)#tss[ct[itr]]+R_on+1+rnd[itr])\n if just==-1:\n r_inp[itr, :] = amps[ct[itr]]*r_inp[itr,:] \n \n if just>-1:\n r_inp = amps[just]*r_inp\n \n inputt[:,:,0] = r_inp\n inputt[:,:,1] = s_inp\n #inputt[:,:,2] = r2_inp\n #outputt = strt*np.ones((trials, Nt, 1))\n \n dtype = torch.FloatTensor \n inputt = torch.from_numpy(inputt).type(dtype)\n outputt = torch.from_numpy(outputt).type(dtype)\n maskt = torch.from_numpy(maskt).type(dtype)\n \n return(inputt, outputt, maskt, ct, ct2)\n\ngaussian_norm = (1/np.sqrt(np.pi))\ngauss_points, gauss_weights = np.polynomial.hermite.hermgauss(200)\ngauss_points = gauss_points*np.sqrt(2)\n\ndef Phi (mu, delta0):\n integrand = np.tanh(mu+np.sqrt(delta0)*gauss_points)\n return gaussian_norm * np.dot (integrand,gauss_weights)\ndef Prime (mu, delta0):\n integrand = 1 - (np.tanh(mu+np.sqrt(delta0)*gauss_points))**2\n return gaussian_norm * np.dot (integrand,gauss_weights)\n\n\ndef self_con(kappas, rho, means_z, var_z, w_z, means_x, var_x, w_x):\n G = np.zeros(len(kappas))\n for ikap, kap in enumerate(kappas):\n tot_w = np.zeros(len(w_z)*len(w_x))\n tot_mu = np.zeros(len(tot_w))\n tot_delta = np.zeros(len(tot_w))\n tot_a = np.zeros(len(tot_w))\n tot_sig = np.zeros(len(tot_w))\n fact1 = np.zeros(len(tot_w))\n fact2 = np.zeros(len(tot_w))\n \n \n \n count= 0\n for k in range(len(w_z)):\n for l in range(len(w_x)):\n tot_w[count] = w_z[k]*w_x[l]\n tot_mu[count] = kap*(means_x[l]+rho*means_z[k])\n tot_delta[count] = kap**2*(var_x[l]+rho**2*var_z[k])\n tot_a[count] = means_z[k]\n tot_sig[count] = np.sqrt(var_z[k]) \n \n fact1 += tot_w[count]*tot_a[count] * Phi(tot_mu[count], tot_delta[count])\n fact2 += kap*rho*tot_w[count]*tot_sig[count]*Prime(tot_mu[count], tot_delta[count])\n count +=1\n G[ikap] = np.sum(fact1)+np.sum(fact2)\n\n return(G) \n\ndef self_condir(kappas, m, n, Icue, amp):\n G = np.zeros(len(kappas))\n for ikap, kap in enumerate(kappas):\n G[ikap] = np.mean(n*np.tanh(m*kap+Icue*amp))\n return(G) \n\ndef self_condir2(kappas1, kappas2, m1, n1, m2, n2, Icue, amp):\n G = np.zeros((len(kappas1), len(kappas2),2))\n Gm = np.zeros((len(kappas1), len(kappas2),2))\n \n E = np.zeros((len(kappas1), len(kappas2)))\n \n K1 = np.zeros((len(kappas1), len(kappas2)))\n K2 = np.zeros((len(kappas1), len(kappas2)))\n for ikap1, kap1 in enumerate(kappas1):\n for ikap2, kap2 in enumerate(kappas2): \n G[ikap1, ikap2,0] = np.mean(n1*np.tanh(m1*kap1+m2*kap2+Icue[:,0]*amp))\n G[ikap1, ikap2,1] = np.mean(n2*np.tanh(m1*kap1+m2*kap2+Icue[:,0]*amp))\n Gm[ikap1, ikap2,0] = G[ikap1, ikap2,0]-kap1\n Gm[ikap1, ikap2,1] = G[ikap1, ikap2,1]-kap2\n \n E[ikap1, ikap2] = (G[ikap1, ikap2,0]-kap1)**2+(G[ikap1, ikap2,1]-kap2)**2\n K1[ikap1, ikap2] = kap1\n K2[ikap1, ikap2] = kap2\n \n return(G, E, Gm, K1, K2) \n\ndef self_condir2s(kappas1, m1, n1, m2, n2, Icue, amp):\n G = np.zeros((len(kappas1)))\n \n for ikap1, kap1 in enumerate(kappas1):\n G[ikap1] = np.mean(n1*np.tanh(m1*kap1+m2*kap1+Icue[:,0]*amp))+np.mean(n2*np.tanh(m1*kap1+m2*kap1+Icue[:,0]*amp))\n return(G) \n \ndef angle2(vec1, vec2): \n norm1 = np.sqrt(np.dot(vec1.T, vec1))\n norm2 = np.sqrt(np.dot(vec2.T, vec2))\n factor= 180/np.pi\n ang = np.arccos(np.dot(vec1.T, vec2)/(norm1*norm2))*factor\n return(ang) \n \ndef find_kaps(E,Gm, kappas1, kappas2, tol_around = 0.1, tol = 0.05):\n \n Er = np.ravel(E)\n cand_ix0, cand_ix1 = np.unravel_index(np.argwhere(Er<tol), E.shape)\n \n real0 = []\n real1 = []\n stab = []\n eigs1 = []\n eigs2 = []\n c0s = []\n c1s = []\n dkap = kappas1[1]-kappas1[0]\n \n idcs_ar = (tol_around//dkap)+1\n \n for ic0, c0 in enumerate(cand_ix0):\n c0 = c0[0]\n c1 = cand_ix1[ic0][0]\n \n low1 = int(np.max((0, -idcs_ar+c0)))\n low2 = int(np.max((0, -idcs_ar+c1)))\n high1 = int(np.min((len(kappas1)-1, idcs_ar+c0)))\n high2 = int(np.min((len(kappas1)-1, idcs_ar+c1)))\n if E[c0,c1]==np.min(E[low1:high1, low2:high2]):\n if (Gm[c0+1,c1,0]*Gm[c0-1, c1,0])<0 and (Gm[c0,c1-1,1]*Gm[c0-1, c1+1,1])<0:\n real0.append(c0)\n real1.append(c1)\n M = np.zeros((2,2))\n M[0,0] = Gm[c0+1,c1,0]-Gm[c0-1,c1,0]\n M[0,1] = Gm[c0,c1+1,0]-Gm[c0,c1-1,0]\n M[1,1] = Gm[c0,c1+1,1]-Gm[c0,c1-1,1]\n M[1,0] = Gm[c0+1,c1,1]-Gm[c0-1,c1,1]\n largst, minst = np.linalg.eigvals(M/(2*dkap))\n eigs1.append(largst)\n eigs2.append(minst)\n c0s.append(c0)\n c1s.append(c1)\n \n \n if np.max(np.real(np.linalg.eigvals(M)))<0:\n stab.append(1)\n else:\n stab.append(0)\n kappa_x = np.array((kappas1[real0], kappas2[real1]))\n return(kappa_x, np.array(stab), np.array(eigs1), np.array(eigs2))#, M, c0s, c1s)\n \n#%%\ndef create_inp_out_rsg(trials, Nt, tss, R1_on, SR1_on, just=-1, perc = 0.1, perc1 = 0.1):\n '''\n trials: Number of trials\n Nt : Number of time points\n tss : Intervals between set and go\n R1_on: Time of ready\n SR1_on: Standard deviation of the temporal onset of \"Ready\".\n perc: Percentage of trials in which no transient inputs appear\n perc1: Percentage of trials in which only the ready cue appears\n '''\n \n n_ts = len(tss)\n time = np.arange(Nt)\n \n strt = -0.5\n inputt = np.zeros(( trials, Nt, 3))\n outputt = strt*np.ones((trials, Nt, 1))\n maskt = np.zeros((trials, Nt, 1))\n \n r_inp = np.ones((trials, Nt))\n s_inp_R = np.zeros((trials, Nt))\n s_inp_S = np.zeros((trials, Nt))\n \n \n \n if just==-1: #all types of trials \n ct = np.random.randint(n_ts, size = trials)\n \n else:\n ct = just*np.ones(trials, dtype = np.int8)\n \n # Don't have nor set nor ready cue in a set of inputs\n ct2 = np.random.rand(trials)<perc\n \n # Don't have a set cue\n ct3 = np.random.rand(trials)<perc1\n \n \n rnd = np.zeros(trials)\n if SR1_on>0:\n rnd = np.random.randint(-SR1_on, SR1_on, trials)\n\n for itr in range(trials): \n redset = tss[ct[itr]]\n if ct2[itr]:\n maskt[itr,:,0] = time<Nt\n s_inp_R[itr, time>R1_on+rnd[itr]] = 10.\n s_inp_R[itr, time>1+R1_on+rnd[itr]] = 0.\n if ct3[itr] and ~ct2[itr]:\n maskt[itr,:,0] = time<Nt\n else:\n maskt[itr,:,0] = (time>R1_on+1+rnd[itr]+redset)*(time<2*redset+R1_on+1+rnd[itr])\n mask_aft = time>=2*redset+R1_on+1+rnd[itr]\n s_inp_R[itr, time>R1_on+rnd[itr]] = 10.\n s_inp_R[itr, time>1+R1_on+rnd[itr]] = 0.\n s_inp_S[itr, time>R1_on+rnd[itr]+redset] = 10.\n s_inp_S[itr, time>1+R1_on+rnd[itr]+redset] = 0.\n \n if sum(maskt[itr,:,0]):\n outputt[itr, maskt[itr,:,0]==1.,0] = np.linspace(strt, -strt, int(sum(maskt[itr,:,0])), endpoint=True)\n outputt[itr, mask_aft==1,0] = np.linspace(-strt, -strt, int(sum(mask_aft)), endpoint=True)\n \n\n\n inputt[:,:,0] = r_inp\n inputt[:,:,1] = s_inp_R\n inputt[:,:,2] = s_inp_S\n \n \n dtype = torch.FloatTensor \n inputt = torch.from_numpy(inputt).type(dtype)\n outputt = torch.from_numpy(outputt).type(dtype)\n maskt = torch.from_numpy(maskt).type(dtype)\n \n return(inputt, outputt, maskt, ct, ct2, ct3)\n \n#%%\ndef create_inp_out_rsgC(trials, Nt, tss, R1_on, SR1_on, just=-1, perc = 0.1, perc1 = 0.1):\n '''\n trials: Number of trials\n Nt : Number of time points\n tss : Intervals between set and go\n R1_on: Time of ready\n SR1_on: Standard deviation of the temporal onset of \"Ready\".\n perc: Percentage of trials in which no transient inputs appear\n perc1: Percentage of trials in which only the ready cue appears\n '''\n \n n_ts = len(tss)\n time = np.arange(Nt)\n \n strt = -0.5\n inputt = np.zeros(( trials, Nt, 3))\n outputt = strt*np.ones((trials, Nt, 1))\n maskt = np.zeros((trials, Nt, 1))\n \n r_inp = np.ones((trials, Nt))\n s_inp_R = np.zeros((trials, Nt))\n s_inp_S = np.zeros((trials, Nt))\n \n \n \n if just==-1: #all types of trials \n ct = np.random.randint(n_ts, size = trials)\n \n else:\n ct = just*np.ones(trials, dtype = np.int8)\n \n # Don't have nor set nor ready cue in a set of inputs\n ct2 = np.random.rand(trials)<perc\n \n # Don't have a set cue\n ct3 = np.random.rand(trials)<perc1\n \n \n rnd = np.zeros(trials)\n if SR1_on>0:\n rnd = np.random.randint(-SR1_on, SR1_on, trials)\n\n for itr in range(trials): \n redset = tss[ct[itr]]\n if ct2[itr]:\n maskt[itr,:,0] = time<Nt\n s_inp_R[itr, time>R1_on+rnd[itr]] = 1.\n #s_inp_R[itr, time>1+R1_on+rnd[itr]] = 0.\n if ct3[itr] and ~ct2[itr]:\n maskt[itr,:,0] = time<Nt\n else:\n maskt[itr,:,0] = (time>R1_on+1+rnd[itr]+redset)*(time<2*redset+R1_on+1+rnd[itr])\n mask_aft = time>=2*redset+R1_on+1+rnd[itr]\n s_inp_R[itr, time>R1_on+rnd[itr]] = 1.\n #s_inp_R[itr, time>1+R1_on+rnd[itr]] = 0.\n s_inp_S[itr, time>R1_on+rnd[itr]+redset] = 10.\n s_inp_S[itr, time>1+R1_on+rnd[itr]+redset] = 0.\n \n if sum(maskt[itr,:,0]):\n outputt[itr, maskt[itr,:,0]==1.,0] = np.linspace(strt, -strt, int(sum(maskt[itr,:,0])), endpoint=True)\n outputt[itr, mask_aft==1,0] = np.linspace(-strt, -strt, int(sum(mask_aft)), endpoint=True)\n \n\n\n inputt[:,:,0] = r_inp\n inputt[:,:,1] = s_inp_R\n inputt[:,:,2] = s_inp_S\n \n \n dtype = torch.FloatTensor \n inputt = torch.from_numpy(inputt).type(dtype)\n outputt = torch.from_numpy(outputt).type(dtype)\n maskt = torch.from_numpy(maskt).type(dtype)\n \n return(inputt, outputt, maskt, ct, ct2, ct3)\n \n#%%\ndef create_inp_out_rsg1(trials, Nt, tss, R1_on, SR1_on, just=-1, perc = 0.1, perc1 = 0.1):\n '''\n trials: Number of trials\n Nt : Number of time points\n tss : Intervals between set and go\n R1_on: Time of ready\n SR1_on: Standard deviation of the temporal onset of \"Ready\".\n perc: Percentage of trials in which no transient inputs appear\n perc1: Percentage of trials in which only the ready cue appears\n '''\n \n n_ts = len(tss)\n time = np.arange(Nt)\n \n strt = -0.5\n inputt = np.zeros(( trials, Nt, 3))\n outputt = strt*np.ones((trials, Nt, 1))\n maskt = np.zeros((trials, Nt, 1))\n inc_mask = 20\n \n r_inp = np.ones((trials, Nt))\n s_inp_R = np.zeros((trials, Nt))\n s_inp_S = np.zeros((trials, Nt))\n \n \n \n if just==-1: #all types of trials \n ct = np.random.randint(n_ts, size = trials)\n \n else:\n ct = just*np.ones(trials, dtype = np.int8)\n \n # Don't have nor set nor ready cue in a set of inputs\n ct2 = np.random.rand(trials)<perc\n \n # Don't have a set cue\n ct3 = np.random.rand(trials)<perc1\n \n \n rnd = np.zeros(trials)\n if SR1_on>0:\n rnd = np.random.randint(-SR1_on, SR1_on, trials)\n\n for itr in range(trials): \n redset = tss[ct[itr]]\n if ct2[itr]:\n maskt[itr,:,0] = time<Nt\n s_inp_R[itr, time>R1_on+rnd[itr]] = 1.\n #s_inp_R[itr, time>1+R1_on+rnd[itr]] = 0.\n if ct3[itr] and ~ct2[itr]:\n maskt[itr,:,0] = time<Nt\n else:\n maskt[itr,:,0] = (time>R1_on+1+rnd[itr]+redset)*(time<2*redset+R1_on+1+rnd[itr])\n mask_aft = time>=2*redset+R1_on+1+rnd[itr]\n s_inp_R[itr, time>R1_on+rnd[itr]] = 1.\n s_inp_R[itr, time>R1_on+rnd[itr]+redset] = 0.\n s_inp_S[itr, time>R1_on+rnd[itr]+redset] = 10.\n s_inp_S[itr, time>1+R1_on+rnd[itr]+redset] = 0.\n \n if sum(maskt[itr,:,0]):\n outputt[itr, maskt[itr,:,0]==1.,0] = np.linspace(strt, -strt, int(sum(maskt[itr,:,0])), endpoint=True)\n outputt[itr, mask_aft==1,0] = np.linspace(-strt, -strt, int(sum(mask_aft)), endpoint=True)\n \n maskt[itr,:,0] = (time>R1_on+1+rnd[itr]+redset-inc_mask)*(time<2*redset+R1_on+1+rnd[itr]+inc_mask)\n\n inputt[:,:,0] = r_inp\n inputt[:,:,1] = s_inp_R\n inputt[:,:,2] = s_inp_S\n \n \n dtype = torch.FloatTensor \n inputt = torch.from_numpy(inputt).type(dtype)\n outputt = torch.from_numpy(outputt).type(dtype)\n maskt = torch.from_numpy(maskt).type(dtype)\n \n return(inputt, outputt, maskt, ct, ct2, ct3)\n \n#%%\ndef create_inp_out_rsg2(trials, Nt, tss, R1_on, SR1_on, just=-1, perc = 0.1, perc1 = 0.1):\n '''\n trials: Number of trials\n Nt : Number of time points\n tss : Intervals between set and go\n R1_on: Time of ready\n SR1_on: Standard deviation of the temporal onset of \"Ready\".\n perc: Percentage of trials in which no transient inputs appear\n perc1: Percentage of trials in which only the ready cue appears\n '''\n \n n_ts = len(tss)\n time = np.arange(Nt)\n \n strt = -0.5\n inputt = np.zeros(( trials, Nt, 3))\n outputt = strt*np.ones((trials, Nt, 1))\n maskt = np.zeros((trials, Nt, 1))\n inc_mask = 20\n \n r_inp = np.ones((trials, Nt))\n s_inp_R = np.zeros((trials, Nt))\n s_inp_S = np.zeros((trials, Nt))\n \n \n \n if just==-1: #all types of trials \n ct = np.random.randint(n_ts, size = trials)\n \n else:\n ct = just*np.ones(trials, dtype = np.int8)\n \n # Don't have nor set nor ready cue in a set of inputs\n ct2 = np.random.rand(trials)<perc\n \n # Don't have a set cue\n ct3 = np.random.rand(trials)<perc1\n \n \n rnd = np.zeros(trials)\n if SR1_on>0:\n rnd = np.random.randint(-SR1_on, SR1_on, trials)\n\n for itr in range(trials): \n redset = tss[ct[itr]]\n if ct2[itr]:\n maskt[itr,:,0] = time<Nt\n s_inp_R[itr, time>R1_on+rnd[itr]] = 10.\n s_inp_R[itr, time>1+R1_on+rnd[itr]] = 0.\n \n #s_inp_R[itr, time>1+R1_on+rnd[itr]] = 0.\n if ct3[itr] and ~ct2[itr]:\n maskt[itr,:,0] = time<Nt\n else:\n maskt[itr,:,0] = (time>R1_on+1+rnd[itr]+redset)*(time<2*redset+R1_on+1+rnd[itr])\n mask_aft = time>=2*redset+R1_on+1+rnd[itr]\n s_inp_R[itr, time>R1_on+rnd[itr]] = 1.\n s_inp_R[itr, time>R1_on+rnd[itr]+1] = 0.\n s_inp_S[itr, time>R1_on+rnd[itr]+redset] = 10.\n s_inp_S[itr, time>1+R1_on+rnd[itr]+redset] = 0.\n \n if sum(maskt[itr,:,0]):\n outputt[itr, maskt[itr,:,0]==1.,0] = np.linspace(strt, -strt, int(sum(maskt[itr,:,0])), endpoint=True)\n outputt[itr, mask_aft==1,0] = np.linspace(-strt, -strt, int(sum(mask_aft)), endpoint=True)\n \n maskt[itr,:,0] = (time>R1_on+1+rnd[itr]+redset-inc_mask)*(time<2*redset+R1_on+1+rnd[itr]+inc_mask)\n\n inputt[:,:,0] = r_inp\n inputt[:,:,1] = s_inp_R\n inputt[:,:,2] = s_inp_S\n \n \n dtype = torch.FloatTensor \n inputt = torch.from_numpy(inputt).type(dtype)\n outputt = torch.from_numpy(outputt).type(dtype)\n maskt = torch.from_numpy(maskt).type(dtype)\n \n return(inputt, outputt, maskt, ct, ct2, ct3)\n \n#%%\ndef create_inp_out_rsg3(trials, Nt, tss, R1_on, SR1_on, fact = 5, just=-1, perc = 0.1, perc1 = 0.1):\n '''\n trials: Number of trials\n Nt : Number of time points\n tss : Intervals between set and go\n R1_on: Time of ready\n SR1_on: Standard deviation of the temporal onset of \"Ready\".\n perc: Percentage of trials in which no transient inputs appear\n perc1: Percentage of trials in which only the ready cue appears\n '''\n \n n_ts = len(tss)\n time = np.arange(Nt)\n \n tss_comp = np.round(tss/fact)\n \n strt = -0.5\n inputt = np.zeros(( trials, Nt, 3))\n outputt = strt*np.ones((trials, Nt, 1))\n maskt = np.zeros((trials, Nt, 1))\n inc_mask = 30\n \n r_inp = np.ones((trials, Nt))\n s_inp_R = np.zeros((trials, Nt))\n s_inp_S = np.zeros((trials, Nt))\n \n \n \n if just==-1: #all types of trials \n ct = np.random.randint(n_ts, size = trials)\n \n else:\n ct = just*np.ones(trials, dtype = np.int8)\n \n # Don't have nor set nor ready cue in a set of inputs\n ct2 = np.random.rand(trials)<perc\n \n # Don't have a set cue\n ct3 = np.random.rand(trials)<perc1\n \n \n rnd = np.zeros(trials)\n if SR1_on>0:\n rnd = np.random.randint(-SR1_on, SR1_on, trials)\n\n for itr in range(trials): \n redset = tss[ct[itr]]\n redset_comp = tss_comp[ct[itr]]\n if ct2[itr]:\n maskt[itr,:,0] = time<Nt\n s_inp_R[itr, time>R1_on+rnd[itr]] = 10.\n s_inp_R[itr, time>1+R1_on+rnd[itr]] = 0.\n \n #s_inp_R[itr, time>1+R1_on+rnd[itr]] = 0.\n if ct3[itr] and ~ct2[itr]:\n maskt[itr,:,0] = time<Nt\n else:\n maskt[itr,:,0] = (time>R1_on+1+rnd[itr]+redset_comp)*(time<redset_comp+redset+R1_on+1+rnd[itr])\n mask_aft = time>=redset_comp+redset+R1_on+1+rnd[itr]\n s_inp_R[itr, time>R1_on+rnd[itr]] = 10.\n s_inp_R[itr, time>R1_on+rnd[itr]+1] = 0.\n s_inp_S[itr, time>R1_on+rnd[itr]+redset_comp] = 10.\n s_inp_S[itr, time>1+R1_on+rnd[itr]+redset_comp] = 0.\n \n if sum(maskt[itr,:,0]):\n outputt[itr, maskt[itr,:,0]==1.,0] = np.linspace(strt, -strt, int(sum(maskt[itr,:,0])), endpoint=True)\n outputt[itr, mask_aft==1,0] = np.linspace(-strt, -strt, int(sum(mask_aft)), endpoint=True)\n \n maskt[itr,:,0] = (time>redset_comp+R1_on+1+rnd[itr]-inc_mask)*(time<redset_comp+redset+R1_on+1+rnd[itr]+inc_mask)\n\n inputt[:,:,0] = r_inp\n inputt[:,:,1] = s_inp_R\n inputt[:,:,2] = s_inp_S\n \n \n dtype = torch.FloatTensor \n inputt = torch.from_numpy(inputt).type(dtype)\n outputt = torch.from_numpy(outputt).type(dtype)\n maskt = torch.from_numpy(maskt).type(dtype)\n \n return(inputt, outputt, maskt, ct, ct2, ct3)\n \n#%%\ndef create_inp_out_rsg_2out(trials, Nt, tss, R1_on, SR1_on, fact = 5, just=-1, \n perc = 0.1, perc1 = 0.1, noset=False, noready=False, align_set = False, align_go = False, fixGo = True):\n '''\n trials: Number of trials\n Nt : Number of time points\n tss : Intervals between set and go\n R1_on: Time of ready\n SR1_on: Standard deviation of the temporal onset of \"Ready\".\n perc: Percentage of trials in which no transient inputs appear\n perc1: Percentage of trials in which only the ready cue appears\n '''\n \n n_ts = len(tss)\n time = np.arange(Nt)\n \n tss_comp = np.round(tss/fact)\n \n strt = -0.5\n inputt = np.zeros(( trials, Nt, 1))\n outputt = strt*np.ones((trials, Nt, 2))\n maskt = np.zeros((trials, Nt, 2))\n interval = np.min(tss_comp)//2\n inc_mask = 30\n \n s_inp_R = np.zeros((trials, Nt))\n s_inp_S = np.zeros((trials, Nt))\n \n \n \n if just==-1: #all types of trials \n ct = np.random.randint(n_ts, size = trials)\n \n else:\n ct = just*np.ones(trials, dtype = np.int8)\n \n # Don't have nor set nor ready cue in a set of inputs\n ct2 = np.random.rand(trials)<perc\n \n # Don't have a set cue\n ct3 = np.random.rand(trials)<perc1\n \n \n rnd = np.zeros(trials)\n if SR1_on>0:\n rnd = np.random.randint(-SR1_on, SR1_on, trials)\n if not align_set and not align_go:\n for itr in range(trials): \n redset = tss[ct[itr]]\n redset_comp = tss_comp[ct[itr]]\n if ct2[itr]:\n maskt[itr,:,0] = time<Nt\n s_inp_R[itr, time>R1_on+rnd[itr]] = 10.\n s_inp_R[itr, time>1+R1_on+rnd[itr]] = 0.\n \n outputt[itr,:,1] = 0.\n mask11 = np.logical_and(time>R1_on+rnd[itr]-interval, time<R1_on+rnd[itr])\n mask22 = np.logical_and(time>R1_on+rnd[itr]+interval, time<R1_on+rnd[itr]+2*interval)\n outputt[itr,mask11,1] = strt\n outputt[itr,mask22,1] = -strt\n maskt[itr,:,1] =np.logical_or(mask11, mask22)\n \n #s_inp_R[itr, time>1+R1_on+rnd[itr]] = 0.\n if ct3[itr] and ~ct2[itr]:\n maskt[itr,:,0] = time<Nt\n else:\n maskt[itr,:,0] = (time>R1_on+1+rnd[itr]+redset_comp)*(time<redset_comp+redset+R1_on+1+rnd[itr])\n mask_aft = time>=redset_comp+redset+R1_on+1+rnd[itr]\n s_inp_R[itr, time>R1_on+rnd[itr]] = 10.\n s_inp_R[itr, time>R1_on+rnd[itr]+1] = 0.\n s_inp_S[itr, time>R1_on+rnd[itr]+redset_comp] = 10.\n s_inp_S[itr, time>1+R1_on+rnd[itr]+redset_comp] = 0.\n \n \n outputt[itr,:,1] = 0.\n mask11 = np.logical_and(time>R1_on+rnd[itr]-interval, time<R1_on+rnd[itr])\n mask22 = np.logical_and(time>R1_on+rnd[itr]+redset_comp-interval, time<R1_on+rnd[itr]+redset_comp)\n outputt[itr,mask11,1] = strt\n outputt[itr,mask22,1] = -strt\n maskt[itr,:,1] =np.logical_or(mask11, mask22)\n \n \n if sum(maskt[itr,:,0]):\n outputt[itr, maskt[itr,:,0]==1.,0] = np.linspace(strt, -strt, int(sum(maskt[itr,:,0])), endpoint=True)\n outputt[itr, mask_aft==1,0] = np.linspace(-strt, -strt, int(sum(mask_aft)), endpoint=True)\n \n maskt[itr,:,0] = (time>redset_comp+R1_on+1+rnd[itr]-inc_mask)*(time<redset_comp+redset+R1_on+1+rnd[itr]+inc_mask)\n \n elif align_set:\n \n if fixGo==True:\n fixT = R1_on + np.max(tss)\n else:\n fixT = fixGo \n \n \n for itr in range(trials): \n redset = tss[ct[itr]]\n redset_comp = tss_comp[ct[itr]]\n if ct2[itr]:\n maskt[itr,:,0] = time<Nt\n s_inp_R[itr, time>fixT-R1_on+rnd[itr]] = 10.\n s_inp_R[itr, time>1+fixT-R1_on+rnd[itr]] = 0.\n\n if ct3[itr] and ~ct2[itr]:\n maskt[itr,:,0] = time<Nt\n else:\n maskt[itr,:,0] = (time>1+fixT)*(time<redset+fixT+1)\n mask_aft = time>=redset+1+fixT\n s_inp_R[itr, time>fixT-redset_comp-rnd[itr]] = 10.\n s_inp_R[itr, time>fixT-redset_comp+1-rnd[itr]] = 0.\n \n s_inp_S[itr, time>fixT] = 10.\n s_inp_S[itr, time>1+fixT] = 0.\n \n \n outputt[itr,:,1] = 0.\n mask11 = np.logical_and(time>fixT-interval, time<fixT)\n mask22 = np.logical_and(time>fixT+redset, time<fixT+redset_comp+interval)\n outputt[itr,mask11,1] = strt\n outputt[itr,mask22,1] = -strt\n maskt[itr,:,1] =np.logical_or(mask11, mask22)\n \n if sum(maskt[itr,:,0]):\n outputt[itr, maskt[itr,:,0]==1.,0] = np.linspace(strt, -strt, int(sum(maskt[itr,:,0])), endpoint=True)\n outputt[itr, mask_aft==1,0] = np.linspace(-strt, -strt, int(sum(mask_aft)), endpoint=True)\n \n else:\n if fixGo:\n fixT = R1_on + np.max(tss)\n fixT2 = R1_on + np.max(tss)*2\n else:\n fixT = fixGo - np.max(tss)\n fixT2 = fixGo\n \n \n for itr in range(trials): \n redset = tss[ct[itr]]\n redset_comp = tss_comp[ct[itr]]\n if ct2[itr]:\n maskt[itr,:,0] = time<Nt\n s_inp_R[itr, time>fixT-R1_on+rnd[itr]] = 10.\n s_inp_R[itr, time>1+fixT-R1_on+rnd[itr]] = 0.\n\n if ct3[itr] and ~ct2[itr]:\n maskt[itr,:,0] = time<Nt\n else:\n maskt[itr,:,0] = (time>fixT2-redset)*(time<fixT2)\n mask_aft = time>=fixT2\n s_inp_R[itr, time>fixT2-2*redset_comp-rnd[itr]] = 10.\n s_inp_R[itr, time>fixT2-2*redset_comp+1-rnd[itr]] = 0.\n \n s_inp_S[itr, time>fixT2-redset] = 10.\n s_inp_S[itr, time>1+fixT2-redset] = 0.\n \n \n outputt[itr,:,1] = 0.\n mask11 = np.logical_and(time>fixT2-redset-interval, time<fixT2-redset)\n mask22 = np.logical_and(time>fixT2, time<fixT2+interval)\n outputt[itr,mask11,1] = strt\n outputt[itr,mask22,1] = -strt\n maskt[itr,:,1] =np.logical_or(mask11, mask22)\n \n if sum(maskt[itr,:,0]):\n outputt[itr, maskt[itr,:,0]==1.,0] = np.linspace(strt, -strt, int(sum(maskt[itr,:,0])), endpoint=True)\n outputt[itr, mask_aft==1,0] = np.linspace(-strt, -strt, int(sum(mask_aft)), endpoint=True)\n \n \n #maskt[itr,:,0] = (time>set_time-inc_mask)*(time<1+set_time+redset_comp+inc_mask)\n \n if noready ==False:\n inputt[:,:,0] += s_inp_R\n if noset==False:\n inputt[:,:,0] += s_inp_S\n \n \n dtype = torch.FloatTensor \n inputt = torch.from_numpy(inputt).type(dtype)\n outputt = torch.from_numpy(outputt).type(dtype)\n maskt = torch.from_numpy(maskt).type(dtype)\n \n return(inputt, outputt, maskt, ct, ct2, ct3)\n \n#%%\ndef create_inp_out_rsg_2out2inp(trials, Nt, tss, R1_on, SR1_on, fact = 5, just=-1, \n perc = 0.1, perc1 = 0.1, noset=False, noready=False, align_set = False, align_go = False, fixGo = True):\n '''\n trials: Number of trials\n Nt : Number of time points\n tss : Intervals between set and go\n R1_on: Time of ready\n SR1_on: Standard deviation of the temporal onset of \"Ready\".\n perc: Percentage of trials in which no transient inputs appear\n perc1: Percentage of trials in which only the ready cue appears\n '''\n \n n_ts = len(tss)\n time = np.arange(Nt)\n \n tss_comp = np.round(tss/fact)\n \n strt = -0.5\n inputt = np.zeros(( trials, Nt, 2))\n outputt = strt*np.ones((trials, Nt, 2))\n maskt = np.zeros((trials, Nt, 2))\n interval = np.min(tss_comp)//2\n inc_mask = 30\n \n s_inp_R = np.zeros((trials, Nt))\n s_inp_S = np.zeros((trials, Nt))\n \n \n \n if just==-1: #all types of trials \n ct = np.random.randint(n_ts, size = trials)\n \n else:\n ct = just*np.ones(trials, dtype = np.int8)\n \n # Don't have nor set nor ready cue in a set of inputs\n ct2 = np.random.rand(trials)<perc\n \n # Don't have a set cue\n ct3 = np.random.rand(trials)<perc1\n \n \n rnd = np.zeros(trials)\n if SR1_on>0:\n rnd = np.random.randint(-SR1_on, SR1_on, trials)\n if not align_set and not align_go:\n for itr in range(trials): \n redset = tss[ct[itr]]\n redset_comp = tss_comp[ct[itr]]\n if ct2[itr]:\n maskt[itr,:,0] = time<Nt\n s_inp_R[itr, time>R1_on+rnd[itr]] = 10.\n s_inp_R[itr, time>1+R1_on+rnd[itr]] = 0.\n \n outputt[itr,:,1] = 0.\n mask11 = np.logical_and(time>R1_on+rnd[itr]-interval, time<R1_on+rnd[itr])\n mask22 = np.logical_and(time>R1_on+rnd[itr]+interval, time<R1_on+rnd[itr]+2*interval)\n outputt[itr,mask11,1] = strt\n outputt[itr,mask22,1] = -strt\n maskt[itr,:,1] =np.logical_or(mask11, mask22)\n \n #s_inp_R[itr, time>1+R1_on+rnd[itr]] = 0.\n if ct3[itr] and ~ct2[itr]:\n maskt[itr,:,0] = time<Nt\n else:\n maskt[itr,:,0] = (time>R1_on+1+rnd[itr]+redset_comp)*(time<redset_comp+redset+R1_on+1+rnd[itr])\n mask_aft = time>=redset_comp+redset+R1_on+1+rnd[itr]\n s_inp_R[itr, time>R1_on+rnd[itr]] = 10.\n s_inp_R[itr, time>R1_on+rnd[itr]+1] = 0.\n s_inp_S[itr, time>R1_on+rnd[itr]+redset_comp] = 10.\n s_inp_S[itr, time>1+R1_on+rnd[itr]+redset_comp] = 0.\n \n \n outputt[itr,:,1] = 0.\n mask11 = np.logical_and(time>R1_on+rnd[itr]-interval, time<R1_on+rnd[itr])\n mask22 = np.logical_and(time>R1_on+rnd[itr]+redset_comp-interval, time<R1_on+rnd[itr]+redset_comp)\n outputt[itr,mask11,1] = strt\n outputt[itr,mask22,1] = -strt\n maskt[itr,:,1] =np.logical_or(mask11, mask22)\n \n \n if sum(maskt[itr,:,0]):\n outputt[itr, maskt[itr,:,0]==1.,0] = np.linspace(strt, -strt, int(sum(maskt[itr,:,0])), endpoint=True)\n outputt[itr, mask_aft==1,0] = np.linspace(-strt, -strt, int(sum(mask_aft)), endpoint=True)\n \n maskt[itr,:,0] = (time>redset_comp+R1_on+1+rnd[itr]-inc_mask)*(time<redset_comp+redset+R1_on+1+rnd[itr]+inc_mask)\n \n elif align_set:\n \n if fixGo==True:\n fixT = R1_on + np.max(tss)\n else:\n fixT = fixGo \n \n \n for itr in range(trials): \n redset = tss[ct[itr]]\n redset_comp = tss_comp[ct[itr]]\n if ct2[itr]:\n maskt[itr,:,0] = time<Nt\n s_inp_R[itr, time>fixT-R1_on+rnd[itr]] = 10.\n s_inp_R[itr, time>1+fixT-R1_on+rnd[itr]] = 0.\n\n if ct3[itr] and ~ct2[itr]:\n maskt[itr,:,0] = time<Nt\n else:\n maskt[itr,:,0] = (time>1+fixT)*(time<redset+fixT+1)\n mask_aft = time>=redset+1+fixT\n s_inp_R[itr, time>fixT-redset_comp-rnd[itr]] = 10.\n s_inp_R[itr, time>fixT-redset_comp+1-rnd[itr]] = 0.\n \n s_inp_S[itr, time>fixT] = 10.\n s_inp_S[itr, time>1+fixT] = 0.\n \n \n outputt[itr,:,1] = 0.\n mask11 = np.logical_and(time>fixT-interval, time<fixT)\n mask22 = np.logical_and(time>fixT+redset, time<fixT+redset_comp+interval)\n outputt[itr,mask11,1] = strt\n outputt[itr,mask22,1] = -strt\n maskt[itr,:,1] =np.logical_or(mask11, mask22)\n \n if sum(maskt[itr,:,0]):\n outputt[itr, maskt[itr,:,0]==1.,0] = np.linspace(strt, -strt, int(sum(maskt[itr,:,0])), endpoint=True)\n outputt[itr, mask_aft==1,0] = np.linspace(-strt, -strt, int(sum(mask_aft)), endpoint=True)\n \n else:\n if fixGo:\n fixT = R1_on + np.max(tss)\n fixT2 = R1_on + np.max(tss)*2\n else:\n fixT = fixGo - np.max(tss)\n fixT2 = fixGo\n \n \n for itr in range(trials): \n redset = tss[ct[itr]]\n redset_comp = tss_comp[ct[itr]]\n if ct2[itr]:\n maskt[itr,:,0] = time<Nt\n s_inp_R[itr, time>fixT-R1_on+rnd[itr]] = 10.\n s_inp_R[itr, time>1+fixT-R1_on+rnd[itr]] = 0.\n\n if ct3[itr] and ~ct2[itr]:\n maskt[itr,:,0] = time<Nt\n else:\n maskt[itr,:,0] = (time>fixT2-redset)*(time<fixT2)\n mask_aft = time>=fixT2\n s_inp_R[itr, time>fixT2-2*redset_comp-rnd[itr]] = 10.\n s_inp_R[itr, time>fixT2-2*redset_comp+1-rnd[itr]] = 0.\n \n s_inp_S[itr, time>fixT2-redset] = 10.\n s_inp_S[itr, time>1+fixT2-redset] = 0.\n \n \n outputt[itr,:,1] = 0.\n mask11 = np.logical_and(time>fixT2-redset-interval, time<fixT2-redset)\n mask22 = np.logical_and(time>fixT2, time<fixT2+interval)\n outputt[itr,mask11,1] = strt\n outputt[itr,mask22,1] = -strt\n maskt[itr,:,1] =np.logical_or(mask11, mask22)\n \n if sum(maskt[itr,:,0]):\n outputt[itr, maskt[itr,:,0]==1.,0] = np.linspace(strt, -strt, int(sum(maskt[itr,:,0])), endpoint=True)\n outputt[itr, mask_aft==1,0] = np.linspace(-strt, -strt, int(sum(mask_aft)), endpoint=True)\n \n \n #maskt[itr,:,0] = (time>set_time-inc_mask)*(time<1+set_time+redset_comp+inc_mask)\n \n if noready ==False:\n inputt[:,:,0] += s_inp_R\n if noset==False:\n inputt[:,:,1] += s_inp_S\n \n \n dtype = torch.FloatTensor \n inputt = torch.from_numpy(inputt).type(dtype)\n outputt = torch.from_numpy(outputt).type(dtype)\n maskt = torch.from_numpy(maskt).type(dtype)\n \n return(inputt, outputt, maskt, ct, ct2, ct3)\n \n \n#%%\ndef create_inp_out_rsg_cont(trials, Nt, tss, R1_on, SR1_on, fact = 1, just=-1, \n perc = 0.1, perc1 = 0.1, noset=False, noready=False, align_set = False, align_go = False, fixGo = True, bayes=False):\n '''\n trials: Number of trials\n Nt : Number of time points\n tss : Intervals between set and go\n R1_on: Time of ready\n SR1_on: Standard deviation of the temporal onset of \"Ready\".\n perc: Percentage of trials in which no transient inputs appear\n perc1: Percentage of trials in which only the ready cue appears\n '''\n \n n_ts = len(tss)\n time = np.arange(Nt)\n \n tss_comp = np.round(tss/fact)\n if len(tss_comp)>2:\n tss_comp = np.array((tss_comp[0], tss_comp[1]))\n \n strt = -0.5\n inputt = np.zeros(( trials, Nt, 1))\n outputt = strt*np.ones((trials, Nt, 2))\n maskt = np.zeros((trials, Nt, 2))\n interval = np.min(tss_comp)//2\n inc_mask = 30\n \n s_inp_R = np.zeros((trials, Nt))\n s_inp_S = np.zeros((trials, Nt))\n \n if just==-1: #all types of trials \n ct = np.random.randint(n_ts, size = trials)\n \n else:\n ct = just*np.ones(trials, dtype = np.int8)\n \n # Don't have nor set nor ready cue in a set of inputs\n ct2 = np.random.rand(trials)<perc\n \n # Don't have a set cue\n ct3 = np.random.rand(trials)<perc1\n time_shown = np.zeros(trials)\n \n rnd = np.zeros(trials)\n if SR1_on>0:\n rnd = np.random.randint(-SR1_on, SR1_on, trials)\n if not align_set and not align_go:\n for itr in range(trials): \n redset = np.random.randint(tss_comp[0], tss_comp[1])#tss[ct[itr]]\n if bayes==False:\n redset_real = redset\n else:\n redset_real = redset + np.round(bayes*redset*np.random.randn())\n ct[itr] = redset\n if ct2[itr]:\n maskt[itr,:,0] = time<Nt\n s_inp_R[itr, time>R1_on+rnd[itr]] = 10.\n s_inp_R[itr, time>1+R1_on+rnd[itr]] = 0.\n \n outputt[itr,:,1] = 0.\n mask11 = np.logical_and(time>R1_on+rnd[itr]-interval, time<R1_on+rnd[itr])\n mask22 = np.logical_and(time>R1_on+rnd[itr]+interval, time<R1_on+rnd[itr]+2*interval)\n outputt[itr,mask11,1] = strt\n outputt[itr,mask22,1] = -strt\n maskt[itr,:,1] =np.logical_or(mask11, mask22)\n \n if ct3[itr] and ~ct2[itr]:\n maskt[itr,:,0] = time<Nt \n else:\n maskt[itr,:,0] = (time>R1_on+1+rnd[itr]+redset_real)*(time<redset_real+redset+R1_on+1+rnd[itr])\n mask_aft = time>=redset_real+redset+R1_on+1+rnd[itr]\n s_inp_R[itr, time>R1_on+rnd[itr]] = 10.\n s_inp_R[itr, time>R1_on+rnd[itr]+1] = 0.\n s_inp_S[itr, time>R1_on+rnd[itr]+redset_real] = 10.\n s_inp_S[itr, time>1+R1_on+rnd[itr]+redset_real] = 0.\n \n outputt[itr,:,1] = 0.\n mask11 = np.logical_and(time>R1_on+rnd[itr]-interval, time<R1_on+rnd[itr])\n mask22 = np.logical_and(time>R1_on+rnd[itr]+redset_real-interval, time<R1_on+rnd[itr]+redset_real)\n outputt[itr,mask11,1] = strt\n outputt[itr,mask22,1] = -strt\n maskt[itr,:,1] =np.logical_or(mask11, mask22)\n \n \n if sum(maskt[itr,:,0]):\n outputt[itr, maskt[itr,:,0]==1.,0] = np.linspace(strt, -strt, int(sum(maskt[itr,:,0])), endpoint=True)\n outputt[itr, mask_aft==1,0] = np.linspace(-strt, -strt, int(sum(mask_aft)), endpoint=True)\n \n maskt[itr,:,0] = (time>redset_real+R1_on+1+rnd[itr]-inc_mask)*(time<redset+redset+R1_on+1+rnd[itr]+inc_mask)\n time_shown[itr] = redset_real\n \n elif align_set:\n \n if fixGo==True:\n fixT = R1_on + np.max(tss)\n else:\n fixT = fixGo \n \n \n for itr in range(trials): \n redset = np.random.randint(tss_comp[0], tss_comp[1])#tss[ct[itr]]\n if bayes==False:\n redset_real = redset\n else:\n redset_real = redset + np.round(bayes*redset*np.random.randn())\n ct[itr] = redset\n if ct2[itr]:\n maskt[itr,:,0] = time<Nt\n s_inp_R[itr, time>fixT-R1_on+rnd[itr]] = 10.\n s_inp_R[itr, time>1+fixT-R1_on+rnd[itr]] = 0.\n\n if ct3[itr] and ~ct2[itr]:\n maskt[itr,:,0] = time<Nt\n else:\n maskt[itr,:,0] = (time>1+fixT)*(time<redset+fixT+1)\n mask_aft = time>=redset+1+fixT\n s_inp_R[itr, time>fixT-redset_real-rnd[itr]] = 10.\n s_inp_R[itr, time>fixT-redset_real+1-rnd[itr]] = 0.\n \n s_inp_S[itr, time>fixT] = 10.\n s_inp_S[itr, time>1+fixT] = 0.\n \n \n outputt[itr,:,1] = 0.\n mask11 = np.logical_and(time>fixT-interval, time<fixT)\n mask22 = np.logical_and(time>fixT+redset, time<fixT+redset+interval)\n outputt[itr,mask11,1] = strt\n outputt[itr,mask22,1] = -strt\n maskt[itr,:,1] =np.logical_or(mask11, mask22)\n \n if sum(maskt[itr,:,0]):\n outputt[itr, maskt[itr,:,0]==1.,0] = np.linspace(strt, -strt, int(sum(maskt[itr,:,0])), endpoint=True)\n outputt[itr, mask_aft==1,0] = np.linspace(-strt, -strt, int(sum(mask_aft)), endpoint=True)\n time_shown[itr] = redset_real\n \n else:\n if fixGo:\n fixT = R1_on + np.max(tss)\n fixT2 = R1_on + np.max(tss)*2\n else:\n fixT = fixGo - np.max(tss)\n fixT2 = fixGo\n \n \n for itr in range(trials): \n redset = np.random.randint(tss_comp[0], tss_comp[1])#tss[ct[itr]]\n if bayes==False:\n redset_real = redset\n else:\n redset_real = redset + np.round(bayes*redset*np.random.randn())\n ct[itr] = redset\n if ct2[itr]:\n maskt[itr,:,0] = time<Nt\n s_inp_R[itr, time>fixT-R1_on+rnd[itr]] = 10.\n s_inp_R[itr, time>1+fixT-R1_on+rnd[itr]] = 0.\n\n if ct3[itr] and ~ct2[itr]:\n maskt[itr,:,0] = time<Nt\n else:\n maskt[itr,:,0] = (time>fixT2-redset)*(time<fixT2)\n mask_aft = time>=fixT2\n s_inp_R[itr, time>fixT2-redset_real-redset-rnd[itr]] = 10.\n s_inp_R[itr, time>fixT2-redset_real-redset+1-rnd[itr]] = 0.\n \n s_inp_S[itr, time>fixT2-redset] = 10.\n s_inp_S[itr, time>1+fixT2-redset] = 0.\n \n \n outputt[itr,:,1] = 0.\n mask11 = np.logical_and(time>fixT2-redset-interval, time<fixT2-redset)\n mask22 = np.logical_and(time>fixT2, time<fixT2+interval)\n outputt[itr,mask11,1] = strt\n outputt[itr,mask22,1] = -strt\n maskt[itr,:,1] =np.logical_or(mask11, mask22)\n \n if sum(maskt[itr,:,0]):\n outputt[itr, maskt[itr,:,0]==1.,0] = np.linspace(strt, -strt, int(sum(maskt[itr,:,0])), endpoint=True)\n outputt[itr, mask_aft==1,0] = np.linspace(-strt, -strt, int(sum(mask_aft)), endpoint=True)\n time_shown[itr] = redset_real\n \n \n \n if noready ==False:\n inputt[:,:,0] += s_inp_R\n if noset==False:\n inputt[:,:,0] += s_inp_S\n \n \n dtype = torch.FloatTensor \n inputt = torch.from_numpy(inputt).type(dtype)\n outputt = torch.from_numpy(outputt).type(dtype)\n maskt = torch.from_numpy(maskt).type(dtype)\n return(inputt, outputt, maskt, ct, ct2, ct3, time_shown)\n \n\n#%%\ndef create_inp_out_rSSg(trials, Nt, tss, R1_on, SR1_on, fact = 5, just=-1, perc = 0.1, perc1 = 0.1, delayF = 0, delay_min = 20, delay_max = 250, noset=False, noready=False, align_set = False, align_go = False):\n '''\n trials: Number of trials\n Nt : Number of time points\n tss : Intervals between set and go\n R1_on: Time of ready\n SR1_on: Standard deviation of the temporal onset of \"Ready\".\n perc: Percentage of trials in which no transient inputs appear\n perc1: Percentage of trials in which only the ready cue appears\n '''\n \n n_ts = len(tss)\n time = np.arange(Nt)\n \n tss_comp = np.round(tss/fact)\n \n strt = -0.5\n inputt = np.zeros(( trials, Nt, 1))\n outputt = strt*np.ones((trials, Nt, 2))\n maskt = np.zeros((trials, Nt, 2))\n interval = np.min(tss_comp)//2\n inc_mask = 30\n \n s_inp_R = np.zeros((trials, Nt))\n s_inp_S1 = np.zeros((trials, Nt))\n s_inp_S2 = np.zeros((trials, Nt))\n \n \n if delayF==0:\n delayF = np.round(np.mean((delay_min, delay_max)))\n \n if just==-1: #all types of trials \n ct = np.random.randint(n_ts, size = trials)\n \n else:\n ct = just*np.ones(trials, dtype = np.int8)\n \n # Don't have nor set nor ready cue in a set of inputs\n ct2 = np.random.rand(trials)<perc\n \n # Don't have a set cue\n ct3 = np.random.rand(trials)<perc1\n \n \n rnd = np.zeros(trials)\n if SR1_on>0:\n rnd = np.random.randint(-SR1_on, SR1_on, trials)\n if not align_set and not align_go:\n for itr in range(trials): \n redset = tss[ct[itr]]\n redset_comp = tss_comp[ct[itr]]\n delay = np.random.randint(delay_min, delay_max)\n if ct2[itr]:\n maskt[itr,:,0] = time<Nt\n s_inp_R[itr, time>R1_on+rnd[itr]] = 10.\n s_inp_R[itr, time>1+R1_on+rnd[itr]] = 0.\n \n outputt[itr,:,1] = 0.\n mask11 = np.logical_and(time>R1_on+rnd[itr]+delay-interval, time<R1_on+rnd[itr]+delay)\n mask22 = np.logical_and(time>R1_on+rnd[itr]+delay+interval, time<R1_on+rnd[itr]+2*interval+delay)\n outputt[itr,mask11,1] = strt\n outputt[itr,mask22,1] = -strt\n maskt[itr,:,1] =np.logical_or(mask11, mask22)\n \n #s_inp_R[itr, time>1+R1_on+rnd[itr]] = 0.\n if ct3[itr] and ~ct2[itr]:\n maskt[itr,:,0] = time<Nt\n else:\n maskt[itr,:,0] = (time>R1_on+1+rnd[itr]+redset_comp+delay)*(time<redset_comp+redset+R1_on+1+rnd[itr]+delay)\n mask_aft = time>=redset_comp+redset+R1_on+1+rnd[itr]+delay\n s_inp_R[itr, time>R1_on+rnd[itr]] = 10.\n s_inp_R[itr, time>R1_on+rnd[itr]+1] = 0.\n s_inp_S1[itr, time>R1_on+rnd[itr]+redset_comp] = 10.\n s_inp_S1[itr, time>1+R1_on+rnd[itr]+redset_comp] = 0.\n \n s_inp_S2[itr, time>R1_on+rnd[itr]+redset_comp+delay] = 10.\n s_inp_S2[itr, time>1+R1_on+rnd[itr]+redset_comp+delay] = 0.\n \n outputt[itr,:,1] = 0.\n mask11 = np.logical_and(time>R1_on+rnd[itr]-interval+delay, time<R1_on+rnd[itr]+delay)\n mask22 = np.logical_and(time>R1_on+rnd[itr]+redset_comp-interval+delay, time<R1_on+rnd[itr]+redset_comp+delay)\n outputt[itr,mask11,1] = strt\n outputt[itr,mask22,1] = -strt\n maskt[itr,:,1] =np.logical_or(mask11, mask22)\n \n \n if sum(maskt[itr,:,0]):\n outputt[itr, maskt[itr,:,0]==1.,0] = np.linspace(strt, -strt, int(sum(maskt[itr,:,0])), endpoint=True)\n outputt[itr, mask_aft==1,0] = np.linspace(-strt, -strt, int(sum(mask_aft)), endpoint=True)\n \n maskt[itr,:,0] = (time>redset_comp+R1_on+1+rnd[itr]-inc_mask+delay)*(time<redset_comp+redset+R1_on+1+rnd[itr]+inc_mask+delay)\n \n elif align_set:\n fixT = R1_on + np.max(tss)\n\n\n for itr in range(trials): \n redset = tss[ct[itr]]\n redset_comp = tss_comp[ct[itr]]\n if ct2[itr]:\n maskt[itr,:,0] = time<Nt\n s_inp_R[itr, time>fixT-R1_on+rnd[itr]] = 10.\n s_inp_R[itr, time>1+fixT-R1_on+rnd[itr]] = 0.\n\n if ct3[itr] and ~ct2[itr]:\n maskt[itr,:,0] = time<Nt\n else:\n maskt[itr,:,0] = (time>1+fixT)*(time<redset+fixT+1)\n mask_aft = time>=redset+1+fixT\n s_inp_R[itr, time>fixT-redset_comp-rnd[itr]-delayF] = 10.\n s_inp_R[itr, time>fixT-redset_comp+1-rnd[itr]-delayF] = 0.\n \n s_inp_S1[itr, time>fixT-delayF] = 10.\n s_inp_S1[itr, time>1+fixT-delayF] = 0.\n \n \n s_inp_S2[itr, time>fixT] = 10.\n s_inp_S2[itr, time>1+fixT] = 0.\n \n outputt[itr,:,1] = 0.\n mask11 = np.logical_and(time>fixT-interval, time<fixT)\n mask22 = np.logical_and(time>fixT+redset, time<fixT+redset_comp+interval)\n outputt[itr,mask11,1] = strt\n outputt[itr,mask22,1] = -strt\n maskt[itr,:,1] =np.logical_or(mask11, mask22)\n \n if sum(maskt[itr,:,0]):\n outputt[itr, maskt[itr,:,0]==1.,0] = np.linspace(strt, -strt, int(sum(maskt[itr,:,0])), endpoint=True)\n outputt[itr, mask_aft==1,0] = np.linspace(-strt, -strt, int(sum(mask_aft)), endpoint=True)\n \n else:\n fixT = R1_on + np.max(tss)\n fixT2 = R1_on + np.max(tss)*2\n \n for itr in range(trials): \n redset = tss[ct[itr]]\n redset_comp = tss_comp[ct[itr]]\n if ct2[itr]:\n maskt[itr,:,0] = time<Nt\n s_inp_R[itr, time>fixT-R1_on+rnd[itr]] = 10.\n s_inp_R[itr, time>1+fixT-R1_on+rnd[itr]] = 0.\n\n if ct3[itr] and ~ct2[itr]:\n maskt[itr,:,0] = time<Nt\n else:\n maskt[itr,:,0] = (time>fixT2-redset)*(time<fixT2)\n mask_aft = time>=fixT2\n s_inp_R[itr, time>fixT2-2*redset_comp-rnd[itr]-delayF] = 10.\n s_inp_R[itr, time>fixT2-2*redset_comp+1-rnd[itr]-delayF] = 0.\n \n s_inp_S1[itr, time>fixT2-redset-delayF] = 10.\n s_inp_S1[itr, time>1+fixT2-redset-delayF] = 0.\n \n s_inp_S2[itr, time>fixT2-redset] = 10.\n s_inp_S2[itr, time>1+fixT2-redset] = 0.\n \n outputt[itr,:,1] = 0.\n mask11 = np.logical_and(time>fixT2-redset-interval, time<fixT2-redset)\n mask22 = np.logical_and(time>fixT2, time<fixT2+interval)\n outputt[itr,mask11,1] = strt\n outputt[itr,mask22,1] = -strt\n maskt[itr,:,1] =np.logical_or(mask11, mask22)\n \n if sum(maskt[itr,:,0]):\n outputt[itr, maskt[itr,:,0]==1.,0] = np.linspace(strt, -strt, int(sum(maskt[itr,:,0])), endpoint=True)\n outputt[itr, mask_aft==1,0] = np.linspace(-strt, -strt, int(sum(mask_aft)), endpoint=True)\n \n \n #maskt[itr,:,0] = (time>set_time-inc_mask)*(time<1+set_time+redset_comp+inc_mask)\n \n if noready ==False:\n inputt[:,:,0] += s_inp_R\n if noset==False:\n inputt[:,:,0] += s_inp_S1\n inputt[:,:,0] += s_inp_S2\n \n \n \n dtype = torch.FloatTensor \n inputt = torch.from_numpy(inputt).type(dtype)\n outputt = torch.from_numpy(outputt).type(dtype)\n maskt = torch.from_numpy(maskt).type(dtype)\n \n return(inputt, outputt, maskt, ct, ct2, ct3)\n \ndef create_inp_out_rSSg_2in(trials, Nt, tss, R1_on, SR1_on, fact = 5, just=-1, perc = 0.1, perc1 = 0.1, delayF = 0, \n delay_min = 20, delay_max = 250, noset=False, noready=False, align_set = False, align_go = False):\n '''\n trials: Number of trials\n Nt : Number of time points\n tss : Intervals between set and go\n R1_on: Time of ready\n SR1_on: Standard deviation of the temporal onset of \"Ready\".\n perc: Percentage of trials in which no transient inputs appear\n perc1: Percentage of trials in which only the ready cue appears\n '''\n \n n_ts = len(tss)\n time = np.arange(Nt)\n \n tss_comp = np.round(tss/fact)\n \n strt = -0.5\n inputt = np.zeros(( trials, Nt, 2))\n outputt = strt*np.ones((trials, Nt, 2))\n maskt = np.zeros((trials, Nt, 2))\n interval = np.min(tss_comp)//2\n inc_mask = 30\n \n s_inp_R = np.zeros((trials, Nt))\n s_inp_S1 = np.zeros((trials, Nt))\n s_inp_S2 = np.zeros((trials, Nt))\n \n \n if delayF==0:\n delayF = np.round(np.mean((delay_min, delay_max)))\n \n if just==-1: #all types of trials \n ct = np.random.randint(n_ts, size = trials)\n \n else:\n ct = just*np.ones(trials, dtype = np.int8)\n \n # Don't have nor set nor ready cue in a set of inputs\n ct2 = np.random.rand(trials)<perc\n \n # Don't have a set cue\n ct3 = np.random.rand(trials)<perc1\n \n \n rnd = np.zeros(trials)\n if SR1_on>0:\n rnd = np.random.randint(-SR1_on, SR1_on, trials)\n if not align_set and not align_go:\n for itr in range(trials): \n redset = tss[ct[itr]]\n redset_comp = tss_comp[ct[itr]]\n delay = np.random.randint(delay_min, delay_max)\n if ct2[itr]:\n maskt[itr,:,0] = time<Nt\n s_inp_R[itr, time>R1_on+rnd[itr]] = 10.\n s_inp_R[itr, time>1+R1_on+rnd[itr]] = 0.\n \n outputt[itr,:,1] = 0.\n mask11 = np.logical_and(time>R1_on+rnd[itr]+delay-interval, time<R1_on+rnd[itr]+delay)\n mask22 = np.logical_and(time>R1_on+rnd[itr]+delay+interval, time<R1_on+rnd[itr]+2*interval+delay)\n outputt[itr,mask11,1] = strt\n outputt[itr,mask22,1] = -strt\n maskt[itr,:,1] =np.logical_or(mask11, mask22)\n \n #s_inp_R[itr, time>1+R1_on+rnd[itr]] = 0.\n if ct3[itr] and ~ct2[itr]:\n maskt[itr,:,0] = time<Nt\n else:\n maskt[itr,:,0] = (time>R1_on+1+rnd[itr]+redset_comp+delay)*(time<redset_comp+redset+R1_on+1+rnd[itr]+delay)\n mask_aft = time>=redset_comp+redset+R1_on+1+rnd[itr]+delay\n s_inp_R[itr, time>R1_on+rnd[itr]] = 10.\n s_inp_R[itr, time>R1_on+rnd[itr]+1] = 0.\n s_inp_S1[itr, time>R1_on+rnd[itr]+redset_comp] = 10.\n s_inp_S1[itr, time>1+R1_on+rnd[itr]+redset_comp] = 0.\n \n s_inp_S2[itr, time>R1_on+rnd[itr]+redset_comp+delay] = 10.\n s_inp_S2[itr, time>1+R1_on+rnd[itr]+redset_comp+delay] = 0.\n \n outputt[itr,:,1] = 0.\n mask11 = np.logical_and(time>R1_on+rnd[itr]-interval+delay, time<R1_on+rnd[itr]+delay)\n mask22 = np.logical_and(time>R1_on+rnd[itr]+redset_comp-interval+delay, time<R1_on+rnd[itr]+redset_comp+delay)\n outputt[itr,mask11,1] = strt\n outputt[itr,mask22,1] = -strt\n maskt[itr,:,1] =np.logical_or(mask11, mask22)\n \n \n if sum(maskt[itr,:,0]):\n outputt[itr, maskt[itr,:,0]==1.,0] = np.linspace(strt, -strt, int(sum(maskt[itr,:,0])), endpoint=True)\n outputt[itr, mask_aft==1,0] = np.linspace(-strt, -strt, int(sum(mask_aft)), endpoint=True)\n \n maskt[itr,:,0] = (time>redset_comp+R1_on+1+rnd[itr]-inc_mask+delay)*(time<redset_comp+redset+R1_on+1+rnd[itr]+inc_mask+delay)\n \n elif align_set:\n fixT = R1_on + np.max(tss)\n\n\n for itr in range(trials): \n redset = tss[ct[itr]]\n redset_comp = tss_comp[ct[itr]]\n if ct2[itr]:\n maskt[itr,:,0] = time<Nt\n s_inp_R[itr, time>fixT-R1_on+rnd[itr]] = 10.\n s_inp_R[itr, time>1+fixT-R1_on+rnd[itr]] = 0.\n\n if ct3[itr] and ~ct2[itr]:\n maskt[itr,:,0] = time<Nt\n else:\n maskt[itr,:,0] = (time>1+fixT)*(time<redset+fixT+1)\n mask_aft = time>=redset+1+fixT\n s_inp_R[itr, time>fixT-redset_comp-rnd[itr]-delayF] = 10.\n s_inp_R[itr, time>fixT-redset_comp+1-rnd[itr]-delayF] = 0.\n \n s_inp_S1[itr, time>fixT-delayF] = 10.\n s_inp_S1[itr, time>1+fixT-delayF] = 0.\n \n \n s_inp_S2[itr, time>fixT] = 10.\n s_inp_S2[itr, time>1+fixT] = 0.\n \n outputt[itr,:,1] = 0.\n mask11 = np.logical_and(time>fixT-interval, time<fixT)\n mask22 = np.logical_and(time>fixT+redset, time<fixT+redset_comp+interval)\n outputt[itr,mask11,1] = strt\n outputt[itr,mask22,1] = -strt\n maskt[itr,:,1] =np.logical_or(mask11, mask22)\n \n if sum(maskt[itr,:,0]):\n outputt[itr, maskt[itr,:,0]==1.,0] = np.linspace(strt, -strt, int(sum(maskt[itr,:,0])), endpoint=True)\n outputt[itr, mask_aft==1,0] = np.linspace(-strt, -strt, int(sum(mask_aft)), endpoint=True)\n \n else:\n fixT = R1_on + np.max(tss)\n fixT2 = R1_on + np.max(tss)*2\n \n for itr in range(trials): \n redset = tss[ct[itr]]\n redset_comp = tss_comp[ct[itr]]\n if ct2[itr]:\n maskt[itr,:,0] = time<Nt\n s_inp_R[itr, time>fixT-R1_on+rnd[itr]] = 10.\n s_inp_R[itr, time>1+fixT-R1_on+rnd[itr]] = 0.\n\n if ct3[itr] and ~ct2[itr]:\n maskt[itr,:,0] = time<Nt\n else:\n maskt[itr,:,0] = (time>fixT2-redset)*(time<fixT2)\n mask_aft = time>=fixT2\n s_inp_R[itr, time>fixT2-2*redset_comp-rnd[itr]-delayF] = 10.\n s_inp_R[itr, time>fixT2-2*redset_comp+1-rnd[itr]-delayF] = 0.\n \n s_inp_S1[itr, time>fixT2-redset-delayF] = 10.\n s_inp_S1[itr, time>1+fixT2-redset-delayF] = 0.\n \n s_inp_S2[itr, time>fixT2-redset] = 10.\n s_inp_S2[itr, time>1+fixT2-redset] = 0.\n \n outputt[itr,:,1] = 0.\n mask11 = np.logical_and(time>fixT2-redset-interval, time<fixT2-redset)\n mask22 = np.logical_and(time>fixT2, time<fixT2+interval)\n outputt[itr,mask11,1] = strt\n outputt[itr,mask22,1] = -strt\n maskt[itr,:,1] =np.logical_or(mask11, mask22)\n \n if sum(maskt[itr,:,0]):\n outputt[itr, maskt[itr,:,0]==1.,0] = np.linspace(strt, -strt, int(sum(maskt[itr,:,0])), endpoint=True)\n outputt[itr, mask_aft==1,0] = np.linspace(-strt, -strt, int(sum(mask_aft)), endpoint=True)\n \n \n if noready ==False:\n inputt[:,:,0] += s_inp_R\n if noset==False:\n inputt[:,:,0] += s_inp_S1\n inputt[:,:,1] += s_inp_S2\n \n \n \n dtype = torch.FloatTensor \n inputt = torch.from_numpy(inputt).type(dtype)\n outputt = torch.from_numpy(outputt).type(dtype)\n maskt = torch.from_numpy(maskt).type(dtype)\n \n return(inputt, outputt, maskt, ct, ct2, ct3)\n \n \n \ndef create_inp_out_rsg_justramp(trials, Nt, tss, R1_on, SR1_on, fact = 5, just=-1, \n perc = 0.1, perc1 = 0.1, inc_mask = 30, \n noset=False, noready=False, align_set = False, align_go = False):\n '''\n trials: Number of trials\n Nt : Number of time points\n tss : Intervals between set and go\n R1_on: Time of ready\n SR1_on: Standard deviation of the temporal onset of \"Ready\".\n perc: Percentage of trials in which no transient inputs appear\n perc1: Percentage of trials in which only the ready cue appears\n '''\n \n n_ts = len(tss)\n time = np.arange(Nt)\n \n tss_comp = np.round(tss/fact)\n \n strt = -0.5\n inputt = np.zeros(( trials, Nt, 1))\n outputt = strt*np.ones((trials, Nt, 2))\n maskt = np.zeros((trials, Nt, 2))\n interval = np.min(tss_comp)//2\n \n s_inp_R = np.zeros((trials, Nt))\n s_inp_S = np.zeros((trials, Nt))\n \n \n \n if just==-1: #all types of trials \n ct = np.random.randint(n_ts, size = trials)\n \n else:\n ct = just*np.ones(trials, dtype = np.int8)\n \n # Don't have nor set nor ready cue in a set of inputs\n ct2 = np.random.rand(trials)<perc\n \n # Don't have a set cue\n ct3 = np.random.rand(trials)<perc1\n \n \n rnd = np.zeros(trials)\n if SR1_on>0:\n rnd = np.random.randint(-SR1_on, SR1_on, trials)\n if not align_set and not align_go:\n for itr in range(trials): \n redset = tss[ct[itr]]\n redset_comp = tss_comp[ct[itr]]\n if ct2[itr]:\n maskt[itr,:,0] = time<Nt\n s_inp_R[itr, time>R1_on+rnd[itr]] = 10.\n s_inp_R[itr, time>1+R1_on+rnd[itr]] = 0.\n \n outputt[itr,:,1] = 0.\n mask11 = np.logical_and(time>R1_on+rnd[itr]-interval, time<R1_on+rnd[itr])\n mask22 = np.logical_and(time>R1_on+rnd[itr]+interval, time<R1_on+rnd[itr]+2*interval)\n outputt[itr,mask11,1] = strt\n outputt[itr,mask22,1] = -strt\n maskt[itr,:,1] =np.logical_or(mask11, mask22)\n \n #s_inp_R[itr, time>1+R1_on+rnd[itr]] = 0.\n if ct3[itr] and ~ct2[itr]:\n maskt[itr,:,0] = time<Nt\n else:\n maskt[itr,:,0] = (time>R1_on+1+rnd[itr]+redset_comp)*(time<redset_comp+2*redset+R1_on+1+rnd[itr])\n mask_aft = time>=redset_comp+2*redset+R1_on+1+rnd[itr]\n s_inp_R[itr, time>R1_on+rnd[itr]] = 10.\n s_inp_R[itr, time>R1_on+rnd[itr]+1] = 0.\n s_inp_S[itr, time>R1_on+rnd[itr]+redset_comp] = 10.\n s_inp_S[itr, time>1+R1_on+rnd[itr]+redset_comp] = 0.\n \n \n outputt[itr,:,1] = 0.\n mask11 = np.logical_and(time>R1_on+rnd[itr]-interval, time<R1_on+rnd[itr])\n mask22 = np.logical_and(time>R1_on+rnd[itr]+redset_comp-interval, time<R1_on+rnd[itr]+redset_comp)\n outputt[itr,mask11,1] = strt\n outputt[itr,mask22,1] = -strt\n maskt[itr,:,1] =np.logical_or(mask11, mask22)\n \n \n if sum(maskt[itr,:,0]):\n outputt[itr, maskt[itr,:,0]==1.,0] = np.linspace(strt, -strt, int(sum(maskt[itr,:,0])), endpoint=True)\n outputt[itr, mask_aft==1,0] = np.linspace(-strt, -strt, int(sum(mask_aft)), endpoint=True)\n \n maskt[itr,:,0] = (time>redset_comp+R1_on+1+rnd[itr]-inc_mask)*(time<redset_comp+redset+R1_on+2+rnd[itr]+inc_mask)\n \n elif align_set:\n fixT = R1_on + np.max(tss)\n\n \n for itr in range(trials): \n redset = tss[ct[itr]]\n redset_comp = tss_comp[ct[itr]]\n if ct2[itr]:\n maskt[itr,:,0] = time<Nt\n s_inp_R[itr, time>fixT-R1_on+rnd[itr]] = 10.\n s_inp_R[itr, time>1+fixT-R1_on+rnd[itr]] = 0.\n\n if ct3[itr] and ~ct2[itr]:\n maskt[itr,:,0] = time<Nt\n else:\n maskt[itr,:,0] = (time>1+fixT)*(time<redset+fixT+1)\n mask_aft = time>=redset+1+fixT\n s_inp_R[itr, time>fixT-redset_comp-rnd[itr]] = 10.\n s_inp_R[itr, time>fixT-redset_comp+1-rnd[itr]] = 0.\n \n s_inp_S[itr, time>fixT] = 10.\n s_inp_S[itr, time>1+fixT] = 0.\n \n \n outputt[itr,:,1] = 0.\n mask11 = np.logical_and(time>fixT-interval, time<fixT)\n mask22 = np.logical_and(time>fixT+redset, time<fixT+redset_comp+interval)\n outputt[itr,mask11,1] = strt\n outputt[itr,mask22,1] = -strt\n maskt[itr,:,1] =np.logical_or(mask11, mask22)\n \n if sum(maskt[itr,:,0]):\n outputt[itr, maskt[itr,:,0]==1.,0] = np.linspace(strt, -strt, int(sum(maskt[itr,:,0])), endpoint=True)\n outputt[itr, mask_aft==1,0] = np.linspace(-strt, -strt, int(sum(mask_aft)), endpoint=True)\n \n else:\n fixT = R1_on + np.max(tss)\n fixT2 = R1_on + np.max(tss)*2\n \n for itr in range(trials): \n redset = tss[ct[itr]]\n redset_comp = tss_comp[ct[itr]]\n if ct2[itr]:\n maskt[itr,:,0] = time<Nt\n s_inp_R[itr, time>fixT-R1_on+rnd[itr]] = 10.\n s_inp_R[itr, time>1+fixT-R1_on+rnd[itr]] = 0.\n\n if ct3[itr] and ~ct2[itr]:\n maskt[itr,:,0] = time<Nt\n else:\n maskt[itr,:,0] = (time>fixT2-redset)*(time<fixT2)\n mask_aft = time>=fixT2\n s_inp_R[itr, time>fixT2-2*redset_comp-rnd[itr]] = 10.\n s_inp_R[itr, time>fixT2-2*redset_comp+1-rnd[itr]] = 0.\n \n s_inp_S[itr, time>fixT2-redset] = 10.\n s_inp_S[itr, time>1+fixT2-redset] = 0.\n \n \n outputt[itr,:,1] = 0.\n mask11 = np.logical_and(time>fixT2-redset-interval, time<fixT2-redset)\n mask22 = np.logical_and(time>fixT2, time<fixT2+interval)\n outputt[itr,mask11,1] = strt\n outputt[itr,mask22,1] = -strt\n maskt[itr,:,1] =np.logical_or(mask11, mask22)\n \n if sum(maskt[itr,:,0]):\n outputt[itr, maskt[itr,:,0]==1.,0] = np.linspace(strt, 0, int(sum(maskt[itr,:,0])), endpoint=True)\n outputt[itr, mask_aft==1,0] = np.linspace(-strt, -strt, int(sum(mask_aft)), endpoint=True)\n \n \n #maskt[itr,:,0] = (time>set_time-inc_mask)*(time<1+set_time+redset_comp+inc_mask)\n \n if noready ==False:\n inputt[:,:,0] += s_inp_R\n if noset==False:\n inputt[:,:,0] += s_inp_S\n \n \n dtype = torch.FloatTensor \n inputt = torch.from_numpy(inputt).type(dtype)\n outputt = torch.from_numpy(outputt).type(dtype)\n maskt = torch.from_numpy(maskt).type(dtype)\n return(inputt, outputt, maskt, ct, ct2, ct3)\n \ndef create_inp_out_rsg_justramp2(trials, Nt, tss, R1_on, SR1_on, fact = 5, just=-1, \n perc = 0.1, perc1 = 0.1, inc_mask = 30, FF = 1.25,\n noset=False, noready=False, align_set = False, align_go = False,conti = True):\n '''\n trials: Number of trials\n Nt : Number of time points\n tss : Intervals between set and go\n R1_on: Time of ready\n SR1_on: Standard deviation of the temporal onset of \"Ready\".\n perc: Percentage of trials in which no transient inputs appear\n perc1: Percentage of trials in which only the ready cue appears\n '''\n \n n_ts = len(tss)\n time = np.arange(Nt)\n \n tss_comp = np.round(tss/fact)\n \n strt = -0.5\n inputt = np.zeros(( trials, Nt, 1))\n outputt = strt*np.ones((trials, Nt, 2))\n maskt = np.zeros((trials, Nt, 2))\n interval = np.min(tss_comp)//2\n \n s_inp_R = np.zeros((trials, Nt))\n s_inp_S = np.zeros((trials, Nt))\n \n \n \n if just==-1: #all types of trials \n ct = np.random.randint(n_ts, size = trials)\n \n else:\n ct = just*np.ones(trials, dtype = np.int8)\n \n # Don't have nor set nor ready cue in a set of inputs\n ct2 = np.random.rand(trials)<perc\n \n # Don't have a set cue\n ct3 = np.random.rand(trials)<perc1\n \n \n rnd = np.zeros(trials)\n \n if SR1_on>0:\n rnd = np.random.randint(-SR1_on, SR1_on, trials)\n if not align_set and not align_go:\n for itr in range(trials): \n redset = tss[ct[itr]]\n redset_comp = tss_comp[ct[itr]]\n if ct2[itr]:\n maskt[itr,:,0] = time<Nt\n s_inp_R[itr, time>R1_on+rnd[itr]] = 10.\n s_inp_R[itr, time>1+R1_on+rnd[itr]] = 0.\n \n outputt[itr,:,1] = 0.\n mask11 = np.logical_and(time>R1_on+rnd[itr]-interval, time<R1_on+rnd[itr])\n mask22 = np.logical_and(time>R1_on+rnd[itr]+interval, time<R1_on+rnd[itr]+2*interval)\n outputt[itr,mask11,1] = strt\n outputt[itr,mask22,1] = -strt\n maskt[itr,:,1] =np.logical_or(mask11, mask22)\n \n #s_inp_R[itr, time>1+R1_on+rnd[itr]] = 0.\n if ct3[itr] and ~ct2[itr]:\n maskt[itr,:,0] = time<Nt\n else:\n maskt[itr,:,0] = (time>R1_on+1+rnd[itr]+redset_comp)*(time<redset_comp+FF*redset+R1_on+1+rnd[itr])\n mask_aft = time>=redset_comp+FF*redset+R1_on+1+rnd[itr]\n s_inp_R[itr, time>R1_on+rnd[itr]] = 10.\n s_inp_R[itr, time>R1_on+rnd[itr]+1] = 0.\n s_inp_S[itr, time>R1_on+rnd[itr]+redset_comp] = 10.\n s_inp_S[itr, time>1+R1_on+rnd[itr]+redset_comp] = 0.\n \n \n outputt[itr,:,1] = 0.\n mask11 = np.logical_and(time>R1_on+rnd[itr]-interval, time<R1_on+rnd[itr])\n mask22 = np.logical_and(time>R1_on+rnd[itr]+redset_comp-interval, time<R1_on+rnd[itr]+redset_comp)\n outputt[itr,mask11,1] = strt\n outputt[itr,mask22,1] = -strt\n maskt[itr,:,1] =np.logical_or(mask11, mask22)\n \n \n if sum(maskt[itr,:,0]):\n outputt[itr, maskt[itr,:,0]==1.,0] = np.linspace(strt, -strt, int(sum(maskt[itr,:,0])), endpoint=True)\n outputt[itr, mask_aft==1,0] = np.linspace(-strt, -strt, int(sum(mask_aft)), endpoint=True)\n if conti:\n maskt[itr,:,0] = (time>redset_comp+R1_on+1+rnd[itr]-inc_mask)*(time<redset_comp+FF*redset+R1_on+1+rnd[itr])\n else:\n maskt[itr,:,0] = (time>redset_comp+R1_on+1+rnd[itr]-inc_mask)*(time<redset_comp+redset+R1_on+1+rnd[itr])\n \n elif align_set:\n fixT = R1_on + np.max(tss)\n\n \n for itr in range(trials): \n redset = tss[ct[itr]]\n redset_comp = tss_comp[ct[itr]]\n if ct2[itr]:\n maskt[itr,:,0] = time<Nt\n s_inp_R[itr, time>fixT-R1_on+rnd[itr]] = 10.\n s_inp_R[itr, time>1+fixT-R1_on+rnd[itr]] = 0.\n\n if ct3[itr] and ~ct2[itr]:\n maskt[itr,:,0] = time<Nt\n else:\n maskt[itr,:,0] = (time>1+fixT)*(time<redset+fixT+1)\n mask_aft = time>=redset+1+fixT\n s_inp_R[itr, time>fixT-redset_comp-rnd[itr]] = 10.\n s_inp_R[itr, time>fixT-redset_comp+1-rnd[itr]] = 0.\n \n s_inp_S[itr, time>fixT] = 10.\n s_inp_S[itr, time>1+fixT] = 0.\n \n \n outputt[itr,:,1] = 0.\n mask11 = np.logical_and(time>fixT-interval, time<fixT)\n mask22 = np.logical_and(time>fixT+redset, time<fixT+redset_comp+interval)\n outputt[itr,mask11,1] = strt\n outputt[itr,mask22,1] = -strt\n maskt[itr,:,1] =np.logical_or(mask11, mask22)\n \n if sum(maskt[itr,:,0]):\n outputt[itr, maskt[itr,:,0]==1.,0] = np.linspace(strt, -strt, int(sum(maskt[itr,:,0])), endpoint=True)\n outputt[itr, mask_aft==1,0] = np.linspace(-strt, -strt, int(sum(mask_aft)), endpoint=True)\n \n else:\n fixT = R1_on + np.max(tss)\n fixT2 = R1_on + np.max(tss)*2\n \n for itr in range(trials): \n redset = tss[ct[itr]]\n redset_comp = tss_comp[ct[itr]]\n if ct2[itr]:\n maskt[itr,:,0] = time<Nt\n s_inp_R[itr, time>fixT-R1_on+rnd[itr]] = 10.\n s_inp_R[itr, time>1+fixT-R1_on+rnd[itr]] = 0.\n\n if ct3[itr] and ~ct2[itr]:\n maskt[itr,:,0] = time<Nt\n else:\n maskt[itr,:,0] = (time>fixT2-redset)*(time<fixT2)\n mask_aft = time>=fixT2\n s_inp_R[itr, time>fixT2-FF*redset_comp-rnd[itr]] = 10.\n s_inp_R[itr, time>fixT2-FF*redset_comp+1-rnd[itr]] = 0.\n \n s_inp_S[itr, time>fixT2-redset] = 10.\n s_inp_S[itr, time>1+fixT2-redset] = 0.\n \n \n outputt[itr,:,1] = 0.\n mask11 = np.logical_and(time>fixT2-redset-interval, time<fixT2-redset)\n mask22 = np.logical_and(time>fixT2, time<fixT2+interval)\n outputt[itr,mask11,1] = strt\n outputt[itr,mask22,1] = -strt\n maskt[itr,:,1] =np.logical_or(mask11, mask22)\n \n if sum(maskt[itr,:,0]):\n outputt[itr, maskt[itr,:,0]==1.,0] = np.linspace(strt, 0, int(sum(maskt[itr,:,0])), endpoint=True)\n outputt[itr, mask_aft==1,0] = np.linspace(-strt, -strt, int(sum(mask_aft)), endpoint=True)\n \n \n #maskt[itr,:,0] = (time>set_time-inc_mask)*(time<1+set_time+redset_comp+inc_mask)\n \n if noready ==False:\n inputt[:,:,0] += s_inp_R\n if noset==False:\n inputt[:,:,0] += s_inp_S\n \n \n dtype = torch.FloatTensor \n inputt = torch.from_numpy(inputt).type(dtype)\n outputt = torch.from_numpy(outputt).type(dtype)\n maskt = torch.from_numpy(maskt).type(dtype)\n return(inputt, outputt, maskt, ct, ct2, ct3)\n#%%\n#def create_inp_out_rsg_2out(trials, Nt, tss, R1_on, SR1_on, fact = 5, just=-1, perc = 0.1, perc1 = 0.1):\n# '''\n# trials: Number of trials\n# Nt : Number of time points\n# tss : Intervals between set and go\n# R1_on: Time of ready\n# SR1_on: Standard deviation of the temporal onset of \"Ready\".\n# perc: Percentage of trials in which no transient inputs appear\n# perc1: Percentage of trials in which only the ready cue appears\n# '''\n# \n# n_ts = len(tss)\n# time = np.arange(Nt)\n# \n# tss_comp = np.round(tss/fact)\n# \n# strt = -0.5\n# inputt = np.zeros(( trials, Nt, 1))\n# outputt = strt*np.ones((trials, Nt, 2))\n# maskt = np.zeros((trials, Nt, 2))\n# inc_mask = 30\n# \n# interval = 35\n# \n#\n# s_inp_R = np.zeros((trials, Nt))\n# s_inp_S = np.zeros((trials, Nt))\n# \n# fixT = R1_on + np.max(tss)\n# \n# if just==-1: #all types of trials \n# ct = np.random.randint(n_ts, size = trials)\n# \n# else:\n# ct = just*np.ones(trials, dtype = np.int8)\n# \n# # Don't have nor set nor ready cue in a set of inputs\n# ct2 = np.random.rand(trials)<perc\n# \n# # Don't have a set cue\n# ct3 = np.random.rand(trials)<perc1\n# \n# \n# rnd = np.zeros(trials)\n# if SR1_on>0:\n# rnd = np.random.randint(-SR1_on, SR1_on, trials)\n#\n# for itr in range(trials): \n# redset = tss[ct[itr]]\n# redset_comp = tss_comp[ct[itr]]\n# if ct2[itr]:\n# maskt[itr,:,0] = time<Nt\n# s_inp_R[itr, time>fixT-R1_on+rnd[itr]] = 10.\n# s_inp_R[itr, time>1+R1_on+rnd[itr]] = 0.\n# \n# \n# #s_inp_R[itr, time>1+R1_on+rnd[itr]] = 0.\n# if ct3[itr] and ~ct2[itr]:\n# maskt[itr,:,0] = time<Nt\n# else:\n# maskt[itr,:,0] = (time>1+fixT)*(time<redset+1+fixT)\n# mask_aft = time>=redset+1+fixT\n# s_inp_R[itr, time>fixT-redset_comp-rnd[itr]] = 10.\n# s_inp_R[itr, time>fixT-redset_comp+1-rnd[itr]] = 0.\n# \n# outputt[itr,:,1] = 0.\n# mask11 = np.logical_and(time>fixT-redset_comp-rnd[itr]-interval, time<fixT-redset_comp-rnd[itr]-1)\n# mask22 = np.logical_and(time>fixT-redset_comp-rnd[itr]+interval, time<fixT-1)\n# outputt[itr,mask11,1] = strt\n# outputt[itr,mask22,1] = -strt\n# maskt[itr,:,1] =np.logical_or(mask11, mask22)\n# \n# s_inp_S[itr, time>fixT] = 10. #R1_on+rnd[itr]+redset_comp\n# s_inp_S[itr, time>1+fixT] = 0.\n# \n# if sum(maskt[itr,:,0]):\n# outputt[itr, maskt[itr,:,0]==1.,0] = np.linspace(strt, -strt, int(sum(maskt[itr,:,0])), endpoint=True)\n# outputt[itr, mask_aft==1,0] = np.linspace(-strt, -strt, int(sum(mask_aft)), endpoint=True)\n# \n# maskt[itr,:,0] = (time>fixT-inc_mask)*(time<redset+1+fixT+inc_mask)\n#\n# inputt[:,:,0] = s_inp_R+s_inp_S\n# \n# \n# dtype = torch.FloatTensor \n# inputt = torch.from_numpy(inputt).type(dtype)\n# outputt = torch.from_numpy(outputt).type(dtype)\n# maskt = torch.from_numpy(maskt).type(dtype)\n# \n# return(inputt, outputt, maskt, ct, ct2, ct3)\n \n#%%\ndef create_inp_out_rsg4(trials, Nt, tss, R1_on, SR1_on, fact = 5, just=-1, perc = 0.1, perc1 = 0.1):\n '''\n trials: Number of trials\n Nt : Number of time points\n tss : Intervals between set and go\n R1_on: Time of ready\n SR1_on: Standard deviation of the temporal onset of \"Ready\".\n perc: Percentage of trials in which no transient inputs appear\n perc1: Percentage of trials in which only the ready cue appears\n '''\n \n n_ts = len(tss)\n time = np.arange(Nt)\n \n tss_comp = np.round(tss/fact)\n \n strt = -0.5\n inputt = np.zeros(( trials, Nt, 3))\n outputt = strt*np.ones((trials, Nt, 1))\n maskt = np.zeros((trials, Nt, 1))\n inc_mask = 30\n \n r_inp = np.ones((trials, Nt))\n s_inp_R = np.zeros((trials, Nt))\n s_inp_S = np.zeros((trials, Nt))\n \n \n fixT = R1_on + np.max(tss)\n \n if just==-1: #all types of trials \n ct = np.random.randint(n_ts, size = trials)\n \n else:\n ct = just*np.ones(trials, dtype = np.int8)\n \n # Don't have nor set nor ready cue in a set of inputs\n ct2 = np.random.rand(trials)<perc\n \n # Don't have a set cue\n ct3 = np.random.rand(trials)<perc1\n \n \n rnd = np.zeros(trials)\n if SR1_on>0:\n rnd = np.random.randint(-SR1_on, SR1_on, trials)\n\n for itr in range(trials): \n redset = tss[ct[itr]]\n redset_comp = tss_comp[ct[itr]]\n if ct2[itr]:\n maskt[itr,:,0] = time<Nt\n s_inp_R[itr, time>fixT-R1_on+rnd[itr]] = 10.\n s_inp_R[itr, time>1+R1_on+rnd[itr]] = 0.\n \n #s_inp_R[itr, time>1+R1_on+rnd[itr]] = 0.\n if ct3[itr] and ~ct2[itr]:\n maskt[itr,:,0] = time<Nt\n else:\n maskt[itr,:,0] = (time>1+fixT)*(time<redset+1+fixT)\n mask_aft = time>=redset+1+fixT\n s_inp_R[itr, time>fixT-redset_comp-rnd[itr]] = 10.\n s_inp_R[itr, time>fixT-redset_comp+1-rnd[itr]] = 0.\n s_inp_S[itr, time>fixT] = 10. #R1_on+rnd[itr]+redset_comp\n s_inp_S[itr, time>1+fixT] = 0.\n \n if sum(maskt[itr,:,0]):\n outputt[itr, maskt[itr,:,0]==1.,0] = np.linspace(strt, -strt, int(sum(maskt[itr,:,0])), endpoint=True)\n outputt[itr, mask_aft==1,0] = np.linspace(-strt, -strt, int(sum(mask_aft)), endpoint=True)\n \n maskt[itr,:,0] = (time>fixT-inc_mask)*(time<redset+1+fixT+inc_mask)\n\n inputt[:,:,0] = r_inp\n inputt[:,:,1] = s_inp_R\n inputt[:,:,2] = s_inp_S\n \n \n dtype = torch.FloatTensor \n inputt = torch.from_numpy(inputt).type(dtype)\n outputt = torch.from_numpy(outputt).type(dtype)\n maskt = torch.from_numpy(maskt).type(dtype)\n \n return(inputt, outputt, maskt, ct, ct2, ct3)\n#%%\ndef create_inp_out_rsg_step(trials, Nt, tss, R1_on, SR1_on, fact = 5, just=-1, perc = 0.1, perc1 = 0.0, noready=False):\n '''\n trials: Number of trials\n Nt : Number of time points\n tss : Intervals between set and go\n R1_on: Time of ready\n SR1_on: Standard deviation of the temporal onset of \"Ready\".\n perc: Percentage of trials in which no transient inputs appear\n perc1: Percentage of trials in which only the ready cue appears\n '''\n \n n_ts = len(tss)\n time = np.arange(Nt)\n \n tss_comp = np.round(tss/fact)\n \n strt = -0.5\n inputt = np.zeros(( trials, Nt, 1))\n outputt = strt*np.ones((trials, Nt, 2))\n maskt = np.zeros((trials, Nt, 2))\n interval = np.min(tss_comp)//2\n inc_mask = 30\n \n s_inp_R = np.zeros((trials, Nt))\n s_inp_S = np.zeros((trials, Nt))\n \n \n \n if just==-1: #all types of trials \n ct = np.random.randint(n_ts, size = trials)\n \n else:\n ct = just*np.ones(trials, dtype = np.int8)\n \n # Don't have nor set nor ready cue in a set of inputs\n ct2 = np.random.rand(trials)<perc\n \n # Don't have a set cue\n ct3 = np.random.rand(trials)<perc1\n \n \n rnd = np.zeros(trials)\n if SR1_on>0:\n rnd = np.random.randint(-SR1_on, SR1_on, trials)\n\n for itr in range(trials): \n redset = tss[ct[itr]]\n redset_comp = tss_comp[ct[itr]]\n\n if ct3[itr]:\n maskt[itr,:,0] = time<Nt\n else:\n maskt[itr,:,0] = (time>R1_on+1+rnd[itr]+redset_comp)*(time<redset_comp+redset+R1_on+1+rnd[itr])\n mask_aft = time>=redset_comp+redset+R1_on+1+rnd[itr]\n s_inp_R[itr, time>R1_on+rnd[itr]] = 1.\n #s_inp_R[itr, time>R1_on+rnd[itr]+1] = 0.\n #s_inp_S[itr, time>R1_on+rnd[itr]+redset_comp] = 10.\n s_inp_R[itr, time>1+R1_on+rnd[itr]+redset_comp] = 0.\n \n \n outputt[itr,:,1] = 0.\n mask11 = np.logical_and(time>R1_on+rnd[itr]-interval, time<R1_on+rnd[itr])\n mask22 = np.logical_and(time>R1_on+rnd[itr]+redset_comp-interval, time<R1_on+rnd[itr]+redset_comp)\n outputt[itr,mask11,1] = strt\n outputt[itr,mask22,1] = -strt\n maskt[itr,:,1] =np.logical_or(mask11, mask22)\n \n \n if sum(maskt[itr,:,0]):\n outputt[itr, maskt[itr,:,0]==1.,0] = np.linspace(strt, -strt, int(sum(maskt[itr,:,0])), endpoint=True)\n outputt[itr, mask_aft==1,0] = np.linspace(-strt, -strt, int(sum(mask_aft)), endpoint=True)\n \n maskt[itr,:,0] = (time>redset_comp+R1_on+1+rnd[itr]-inc_mask)*(time<redset_comp+redset+R1_on+1+rnd[itr]+inc_mask)\n\n if noready ==False:\n inputt[:,:,0] += s_inp_R\n \n \n dtype = torch.FloatTensor \n inputt = torch.from_numpy(inputt).type(dtype)\n outputt = torch.from_numpy(outputt).type(dtype)\n maskt = torch.from_numpy(maskt).type(dtype)\n \n return(inputt, outputt, maskt, ct, ct2, ct3)\n \n#%%\ndef create_inp_out_csgdelay_2in(trials, Nt, tss, amps, R1_on, SR1_on, fact = 5, just=-1, perc = 0.1, \n delayF = 0, delay_min = 20, delay_max = 250, noset=False, \n noready=False, align_set = False, align_go = False):\n '''\n trials: Number of trials\n Nt : Number of time points\n tss : Intervals between set and go\n R1_on: Time of ready\n SR1_on: Standard deviation of the temporal onset of \"Ready\".\n perc: Percentage of trials in which no transient inputs appear\n perc1: Percentage of trials in which only the ready cue appears\n '''\n \n n_ts = len(tss)\n time = np.arange(Nt)\n \n tss_comp = np.round(tss/fact)\n \n strt = -0.5\n inputt = np.zeros(( trials, Nt, 2))\n outputt = strt*np.ones((trials, Nt, 1))\n maskt = np.zeros((trials, Nt, 1))\n inc_mask = 30\n \n s_inp_R = np.zeros((trials, Nt))\n s_inp_S1 = np.zeros((trials, Nt))\n s_inp_S2 = np.zeros((trials, Nt))\n \n length = 2*np.mean(tss) # I changed this in July20\n \n delays = np.zeros(trials)\n if delayF==0:\n delayF = np.round(np.mean((delay_min, delay_max)))\n \n if just==-1: #all types of trials \n ct = np.random.randint(n_ts, size = trials)\n \n else:\n ct = just*np.ones(trials, dtype = np.int8)\n \n # Don't have nor set nor ready cue in a set of inputs\n ct2 = np.random.rand(trials)<perc\n \n \n \n rnd = np.zeros(trials)\n if SR1_on>0:\n rnd = np.random.randint(-SR1_on, SR1_on, trials)\n if not align_set and not align_go:\n for itr in range(trials): \n redset = tss[ct[itr]]\n redset_comp = tss_comp[ct[itr]]\n amp_c = amps[ct[itr]]\n delay = np.random.randint(delay_min, delay_max)\n delays[itr] = delay\n if ct2[itr]:\n maskt[itr,:,0] = time<Nt\n else:\n maskt[itr,:,0] = (time>R1_on+1+rnd[itr]+length+delay)*(time<redset_comp+length+R1_on+1+rnd[itr]+delay)\n mask_aft = time>=redset_comp+length+R1_on+1+rnd[itr]+delay\n s_inp_R[itr, time>R1_on+rnd[itr]] = amp_c\n s_inp_R[itr, time>1+R1_on+rnd[itr]+length] = 0.\n \n s_inp_S2[itr, time>R1_on+rnd[itr]+length+delay] = 10.\n s_inp_S2[itr, time>1+R1_on+rnd[itr]+length+delay] = 0.\n \n if sum(maskt[itr,:,0]):\n outputt[itr, maskt[itr,:,0]==1.,0] = np.linspace(strt, -strt, int(sum(maskt[itr,:,0])), endpoint=True)\n outputt[itr, mask_aft==1,0] = np.linspace(-strt, -strt, int(sum(mask_aft)), endpoint=True)\n \n maskt[itr,:,0] = (time>length+R1_on+1+rnd[itr]-inc_mask+delay)*(time<length+redset+R1_on+1+rnd[itr]+inc_mask+delay)\n elif align_set:\n fixT = R1_on + np.max(tss)\n for itr in range(trials): \n redset = tss[ct[itr]]\n redset_comp = tss_comp[ct[itr]]\n amp_c = amps[ct[itr]]\n\n if ct2[itr]:\n maskt[itr,:,0] = time<Nt\n else:\n maskt[itr,:,0] = (time>1+fixT)*(time<redset+fixT+1)\n mask_aft = time>=redset+1+fixT\n s_inp_R[itr, time>fixT-length-delayF] = amp_c\n s_inp_R[itr, time>1+fixT-delayF] = 0.\n \n \n s_inp_S2[itr, time>fixT] = 10.\n s_inp_S2[itr, time>1+fixT] = 0.\n\n \n if sum(maskt[itr,:,0]):\n outputt[itr, maskt[itr,:,0]==1.,0] = np.linspace(strt, -strt, int(sum(maskt[itr,:,0])), endpoint=True)\n outputt[itr, mask_aft==1,0] = np.linspace(-strt, -strt, int(sum(mask_aft)), endpoint=True)\n \n\n \n if noready ==False:\n inputt[:,:,0] += s_inp_R\n if noset==False:\n inputt[:,:,0] += s_inp_S1\n inputt[:,:,1] += s_inp_S2\n \n \n \n dtype = torch.FloatTensor \n inputt = torch.from_numpy(inputt).type(dtype)\n outputt = torch.from_numpy(outputt).type(dtype)\n maskt = torch.from_numpy(maskt).type(dtype)\n \n return(inputt, outputt, maskt, ct, ct2, delays)" }, { "alpha_fraction": 0.5019564628601074, "alphanum_fraction": 0.579774022102356, "avg_line_length": 30.17182159423828, "blob_id": "a509398de771374ce8c0c505226a8845cc81fd05", "content_id": "44d0875a7eb7cf58c63e421eb9cec38ae9b137e2", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 18145, "license_type": "no_license", "max_line_length": 138, "num_lines": 582, "path": "/C_Fig3_EFGH.py", "repo_name": "emebeiran/low-rank2020", "src_encoding": "UTF-8", "text": "#!/usr/bin/env python3\n# -*- coding: utf-8 -*-\n\"\"\"\nCreated on Mon Mar 30 10:49:59 2020\n\n@author: mbeiran\n\"\"\"\n#%%\nimport numpy as np\nimport matplotlib.pyplot as plt\nimport lib_rnns as lr\nfrom matplotlib.gridspec import GridSpec\naa = lr.set_plot()\n\n\n \ngaussian_norm = (1/np.sqrt(np.pi))\ngauss_points, gauss_weights = np.polynomial.hermite.hermgauss(200)\ngauss_points = gauss_points*np.sqrt(2)\n\ndef Phi (mu, delta0):\n integrand = np.tanh(mu+np.sqrt(delta0)*gauss_points)\n return gaussian_norm * np.dot (integrand,gauss_weights)\ndef Prime (mu, delta0):\n integrand = 1 - (np.tanh(mu+np.sqrt(delta0)*gauss_points))**2\n return gaussian_norm * np.dot (integrand,gauss_weights)\n\n#%%\nN = 200\nnbins = 20\ns_mn1 = 0.5\ns_mn2 = 1.2\nm1 = np.random.randn(N)\nm1 = m1/np.std(m1)\nm2 = np.random.randn(N)\nm2 = m2/np.std(m2)\nms = np.linspace(-3, 3)\n\nn1 = s_mn1*m1+0.3*np.random.randn(N)\nn2 = s_mn2*m2+0.3*np.random.randn(N)\n\n\n#%%\n# =============================================================================\n# Fig 2\n# =============================================================================\nms = np.linspace(-5,5,100)\nSigma = np.zeros((2,2))\nSigma[0,0] = 1.6\nSigma[1,1] = 1.6\nSigma[0,1] = -0.8\nSigma[1,0] = 0.8\n\n\nN = 1000\nS=10\nM = np.random.randn(N,2)\nM = M/np.std(M,0)\nss2 = 0.3\n\nN = np.dot(Sigma, M.T)+ss2*np.random.randn(2,N)\n\nfig = plt.figure(figsize=[3.2, 3.2], dpi=600)\ngs = GridSpec(5,5)\n\nax_joint00 = fig.add_subplot(gs[1:3,0:2])\nax_joint01 = fig.add_subplot(gs[1:3,2:4])\nax_joint10 = fig.add_subplot(gs[3:5,0:2])\nax_joint11 = fig.add_subplot(gs[3:5,2:4])\n\nax_marg_x0 = fig.add_subplot(gs[0,0:2])\nax_marg_x1 = fig.add_subplot(gs[0,2:4])\n\nax_marg_y0 = fig.add_subplot(gs[1:3,4])\nax_marg_y1 = fig.add_subplot(gs[3:5,4])\n\nax_joint00.scatter(M[:,0], N[0,:], s=S, alpha=0.5, label=r'$\\sigma_{mn} = 1.2$', rasterized=True)\nax_joint00.plot(ms, Sigma[0,0]*ms, '--', c='k', lw=1)\nax_joint00.set_xlim([-3,3])\nax_joint00.set_xticks([-2., 0, 2.])\nax_joint00.set_xticklabels(['','',''])\nax_joint00.set_ylim([-5.5,5.5])\nax_joint00.set_yticks([-5, 0, 5])\nax_joint00.set_ylabel(r'$n^{\\left(1\\right)}_i$')\nax_joint00.spines['top'].set_visible(False)\nax_joint00.spines['right'].set_visible(False)\nax_joint00.yaxis.set_ticks_position('left')\nax_joint00.xaxis.set_ticks_position('bottom')\n \nax_joint01.scatter(M[:,1], N[0,:], s=S, alpha=0.5, label=r'$\\sigma_{mn} = 1.2$', rasterized=True)\nax_joint01.plot(ms, Sigma[0,1]*ms*np.std(N[0,:]), '--', c='k', lw=1)\nax_joint01.spines['top'].set_visible(False)\nax_joint01.spines['right'].set_visible(False)\nax_joint01.yaxis.set_ticks_position('left')\nax_joint01.xaxis.set_ticks_position('bottom')\nax_joint01.set_ylim([-5.5,5.5])\nax_joint01.set_yticks([-5, 0, 5])\nax_joint01.set_yticklabels(['','',''])\nax_joint01.set_xlim([-3,3])\nax_joint01.set_xticks([-2., 0, 2.])\nax_joint01.set_xticklabels(['','',''])\n\nax_joint10.scatter(M[:,0], N[1,:], s=S, alpha=0.5, label=r'$\\sigma_{mn} = 1.2$', rasterized=True)\nax_joint10.plot(ms, Sigma[1,0]*ms*np.std(N[1,:]), '--', c='k', lw=1)\nax_joint10.set_xlim([-3,3])\nax_joint10.spines['top'].set_visible(False)\nax_joint10.spines['right'].set_visible(False)\nax_joint10.yaxis.set_ticks_position('left')\nax_joint10.xaxis.set_ticks_position('bottom')\nax_joint10.set_ylim([-5.5,5.5])\nax_joint10.set_yticks([-5, 0, 5])\nax_joint10.set_xlim([-3,3])\nax_joint10.set_xticks([-2., 0, 2.])\nax_joint10.set_ylabel(r'$n^{\\left(2\\right)}_i$')\nax_joint10.set_xlabel(r'$m^{\\left(1\\right)}_i$')\n\nax_joint11.scatter(M[:,1], N[1,:], s=S, alpha=0.5, label=r'$\\sigma_{mn} = 1.2$', rasterized=True)\nax_joint11.plot(ms, Sigma[1,1]*ms, '--', c='k', lw=1)\nax_joint11.set_xlim([-3,3])\nax_joint11.spines['top'].set_visible(False)\nax_joint11.spines['right'].set_visible(False)\nax_joint11.set_ylim([-5.5,5.5])\nax_joint11.set_yticks([-5, 0, 5])\nax_joint11.set_xticks([-2., 0, 2.])\nax_joint11.set_xlim([-3,3])\nax_joint11.set_yticklabels(['','',''])\nax_joint11.yaxis.set_ticks_position('left')\nax_joint11.xaxis.set_ticks_position('bottom')\nax_joint11.set_xlabel(r'$m^{\\left(2\\right)}_i$')\n\nax_marg_x0.hist(M[:,0], nbins, alpha=0.5, density=True)\nss = 1.\nax_marg_x0.plot(ms, (1/np.sqrt(2*np.pi*ss**2))*np.exp(-(ms)**2/(2*ss**2)), 'k')\n\nax_marg_x0.spines['top'].set_visible(False)\nax_marg_x0.spines['right'].set_visible(False)\nax_marg_x0.spines['left'].set_visible(False)\nax_marg_x0.yaxis.set_ticks_position('left')\nax_marg_x0.xaxis.set_ticks_position('bottom')\nax_marg_x0.set_xlim([-3,3])\nax_marg_x0.set_xticks([-2., 0, 2.])\nax_marg_x0.set_ylim([0,0.45])\nax_marg_x0.set_xticklabels(['','',''])\nax_marg_x0.set_yticks([1])\n\nax_marg_x1.hist(M[:,1], nbins, alpha=0.5, density=True)\nss = 1.\nax_marg_x1.plot(ms, (1/np.sqrt(2*np.pi*ss**2))*np.exp(-(ms)**2/(2*ss**2)), 'k')\nax_marg_x1.spines['top'].set_visible(False)\nax_marg_x1.spines['right'].set_visible(False)\nax_marg_x1.spines['left'].set_visible(False)\nax_marg_x1.yaxis.set_ticks_position('left')\nax_marg_x1.xaxis.set_ticks_position('bottom')\nax_marg_x1.set_xlim([-3,3])\nax_marg_x1.set_ylim([0,0.45])\nax_marg_x1.set_xticks([-2., 0, 2.])\nax_marg_x1.set_xticklabels(['','',''])\nax_marg_x1.set_yticks([1])\n\nax_marg_y0.hist(N[0,:], nbins, orientation=\"horizontal\", alpha=0.5, density=True)\nss = np.sqrt(Sigma[0,0]**2+ss2**2)\nax_marg_y0.plot((1/np.sqrt(2*np.pi*ss**2))*np.exp(-(ms)**2/(2*ss**2)), ms, 'k')\nax_marg_y0.spines['top'].set_visible(False)\nax_marg_y0.spines['right'].set_visible(False)\nax_marg_y0.spines['bottom'].set_visible(False)\nax_marg_y0.yaxis.set_ticks_position('left')\nax_marg_y0.xaxis.set_ticks_position('bottom')\nax_marg_y0.set_ylim([-5.5,5.5])\nax_marg_y0.set_xlim([0,0.45])\nax_marg_y0.set_yticks([-5., 0, 5.])\nax_marg_y0.set_yticklabels(['','',''])\nax_marg_y0.set_xticks([1])\nax_marg_y0.set_xticklabels([''])\n\nax_marg_y1.hist(N[1,:], nbins, orientation=\"horizontal\", alpha=0.5, density=True)\nss = np.sqrt(Sigma[1,1]**2+ss2**2)\nax_marg_y1.plot((1/np.sqrt(2*np.pi*ss**2))*np.exp(-(ms)**2/(2*ss**2)), ms, 'k')\nax_marg_y1.spines['top'].set_visible(False)\nax_marg_y1.spines['right'].set_visible(False)\nax_marg_y1.spines['bottom'].set_visible(False)\nax_marg_y1.yaxis.set_ticks_position('left')\nax_marg_y1.xaxis.set_ticks_position('bottom')\nax_marg_y1.set_ylim([-5.5,5.5])\nax_marg_y1.set_xlim([0,0.45])\nax_marg_y1.set_yticks([-5., 0, 5.])\nax_marg_y1.set_yticklabels(['','',''])\nax_marg_y1.set_xticks([1])\nax_marg_y1.set_xticklabels([''])\n\nplt.savefig('Th_Fig3_1_A.pdf')\n\n#%%\nplt.rcParams[\"axes.grid\"] = False\nfig = plt.figure(figsize = [2.0, 2.0])\nax = fig.add_subplot(111) \nplt.imshow(Sigma, cmap='coolwarm', vmin = -4, vmax = 4)\nax.tick_params(color='white')\n\n\nfor i in range(np.shape(Sigma)[0]):\n for j in range(np.shape(Sigma)[1]):\n ax.text(i, j, str(Sigma[j,i]), va='center', ha='center', fontsize=16)\nax.spines['top'].set_visible(False)\nax.spines['right'].set_visible(False)\nax.spines['left'].set_visible(False)\nax.spines['bottom'].set_visible(False)\n\nax.yaxis.set_ticks_position('right')\nax.xaxis.set_ticks_position('top') \nax.set_xticks([0, 1])\nax.set_yticks([0, 1])\n\n\nax.set_xticklabels([r'$m_i^{\\left(1\\right)}$', r'$m_i^{\\left(2\\right)}$'], fontsize=14)\nax.set_yticklabels([r'$n_i^{\\left(1\\right)}$', r'$n_i^{\\left(2\\right)}$'], fontsize=14)\n\nplt.savefig('Th_Fig3_1_B.pdf')\n\n\n#%%\nfig = plt.figure()\nax = fig.add_subplot(111) \n\nu, v = np.linalg.eig(Sigma)\nl1 = -0.5\nl2 = 2.\nl11 = -1.1\nl22 = 1.1\n\ncC = np.array((1, 1, 1,))*0.3\nplt.plot([l1, l2], [0,0], 'k', lw=0.5)\nplt.plot([-0.05, 0.05], [1,1], 'k', lw=0.5)\nplt.plot([-0.05, 0.05], [-1,-1], 'k', lw=0.5)\nplt.plot([1,1], [-0.05, 0.05], 'k', lw=0.5)\n\n\nplt.plot( [0,0],[l11, l22], 'k', lw=0.5)\nplt.plot( [1,1],[l11, l22], 'k', lw=6., alpha=0.1)\n\n#ax.arrow(0, 0, u[0]*v[0,0], u[0]*v[1,0], fc=cC, ec=cC, alpha =0.8, width=0.06,\n# head_width=0.2, head_length=0.2)\n#ax.arrow(0, 0, u[1]*v[0,1], u[1]*v[1,1], fc=cC, ec=cC, alpha =0.8, width=0.06,\n# head_width=0.2, head_length=0.2)\nplt.scatter(np.real(u), np.imag(u), s=90, facecolor=0.6*np.ones(3), edgecolor='k')\n\nplt.plot()\nax.text(1.3, 0.1, r'Re $ \\lambda$', fontsize = 18)\nax.text(0.1, 0.7, r'Im $\\lambda$', fontsize = 18)\n\n\nax.set_xlim([l1, l2])\nax.set_ylim([l11, l22])\n\nax.axis('off')\nax.spines['top'].set_visible(False)\nax.spines['right'].set_visible(False)\nax.spines['left'].set_visible(False)\nax.spines['bottom'].set_visible(False)\n\nplt.savefig('Th_Fig3_1_B1.pdf')\n\n\n#%%\nfig = plt.figure()\nax = fig.add_subplot(111) \n\nu, v = np.linalg.eig(Sigma)\nl1 = -1.\nl2 = 1.\nl11 = -1.\nl22 = 1\n\ncC = np.array((1, 1, 1,))*0.3\nplt.plot([l1, l2], [0,0], 'k', lw=0.5)\n#plt.plot([-0.05, 0.05], [1,1], 'k', lw=0.5)\n#plt.plot([-0.05, 0.05], [-1,-1], 'k', lw=0.5)\n#plt.plot([1,1], [-0.05, 0.05], 'k', lw=0.5)\n\n\nplt.plot( [0,0],[l11, l22], 'k', lw=0.5)\n#plt.plot( [1,1],[l11, l22], 'k', lw=6., alpha=0.1)\n\n#ax.arrow(0, 0, u[0]*v[0,0], u[0]*v[1,0], fc=cC, ec=cC, alpha =0.8, width=0.06,\n# head_width=0.2, head_length=0.2)\nax.arrow(0, 0, np.real(v[0,0]), np.real(v[1,0]), fc=cC, ec=cC, alpha =0.8, width=0.06, head_width=0.2, head_length=0.2)\n#plt.scatter(np.real(u), np.imag(u), s=90, facecolor=0.6*np.ones(3), edgecolor='k')\n\nplt.plot()\nax.text(0.9, 0.05, r'$ \\kappa_1$', fontsize = 18)\nax.text(0.05, 0.9, r'$\\kappa_2$', fontsize = 18)\nax.text(0.1, 0.5, r'Re($\\bf{u}$)', fontsize = 18)\n\n\nax.set_xlim([l1, l2])\nax.set_ylim([l11, l22])\n\nax.axis('off')\nax.spines['top'].set_visible(False)\nax.spines['right'].set_visible(False)\nax.spines['left'].set_visible(False)\nax.spines['bottom'].set_visible(False)\n\nplt.savefig('Th_Fig3_1_B2.pdf')\n\n\n\n\n\n#%%\nkaps1 = np.linspace(-1.3,1.3, 130)\nkaps2 = np.linspace(-1.3,1.3, 100)\nksol = np.zeros((len(kaps1), len(kaps2), 2))\n\nK1s, K2s = np.meshgrid(kaps1, kaps2)\ndef transf(K):\n return(K*Prime(0, np.dot(K.T, K)))\n \nE = np.zeros((len(kaps1), len(kaps2)))\nfor ik1 ,k1 in enumerate(kaps1):\n for ik2, k2 in enumerate(kaps2):\n K = np.array((k1, k2))\n ksol[ik1, ik2, :] = - K+ np.dot(Sigma, transf(K))\n E[ik1, ik2] = np.sqrt(np.sum(ksol[ik1,ik2,:]**2))\n \n\n\nsearch_kap1 = np.linspace(0.2, 1.3, 300)\nE_1 = np.zeros_like(search_kap1)\nfor ik1 ,k1 in enumerate(search_kap1):\n K = k1*np.array((1,0))\n kSS = - K+ np.dot(Sigma, transf(K))\n E_1[ik1] = np.sqrt(np.sum(kSS[0]**2))\nfp1 = search_kap1[np.argmin(E_1)]\n\nsearch_kap2 = np.linspace(0.2, 1.3, 300)\nE_2 = np.zeros_like(search_kap1)\nfor ik2 ,k2 in enumerate(search_kap2):\n K = v[:,1]*k2\n kSS = - K+ np.dot(Sigma, transf(K))\n E_2[ik2] = np.sqrt(np.sum(kSS**2))\nfp2 = search_kap2[np.argmin(E_2)]\n\nfig = plt.figure()\nax = fig.add_subplot(111) \nim = plt.pcolor(kaps1, kaps2, np.log10(E).T, cmap ='viridis', vmin = -2.,vmax=0)\n\n#cbar = ax.figure.colorbar(im, ax=ax)\n#cbar.set_ticks([-2, -1, 0])\nstrm = ax.streamplot(kaps1, kaps2, ksol[:,:,0].T, ksol[:,:,1].T, color='w', linewidth=1, cmap='autumn', density=0.6)\n#cbar.set_label(r'$\\log_{10}$ speed', rotation=270, labelpad=18)\nplt.xlabel('$\\kappa_1$')\nplt.ylabel('$\\kappa_2$')\nplt.scatter([ 0,], [0], s=50, edgecolor='k', facecolor='w', linewidth=1., zorder=4)\n#plt.scatter( [v[0,1]*fp2,-v[0,1]*fp2], [v[1,1]*fp2,-v[1,1]*fp2], s=100, edgecolor='w', facecolor='k', linewidth=1.5, zorder=4)\n\nth = np.linspace(0, 2*np.pi)\n\n#plt.plot(fp1*np.cos(th), fp1*np.sin(th), '--w', lw=3)\nplt.plot(fp1*np.cos(th), fp1*np.sin(th), '--k')\n\n#ax.arrow(0, 0, np.real(v[0,0]), np.real(v[1,0]), fc=cC, ec=cC, alpha =0.8, width=0.06, head_width=0.2, head_length=0.2, zorder=4)\n#ax.arrow(0, 0, -np.real(v[0,0]), -np.real(v[1,0]), fc=cC, ec=cC, alpha =0.8, width=0.06, head_width=0.2, head_length=0.2, zorder=4)\n\n\nax.set_xticks([-1, 0, 1])\nax.set_yticks([-1, 0, 1])\nax.set_ylim([np.min(kaps2), np.max(kaps2)])\nax.set_xlim([np.min(kaps1), np.max(kaps1)])\nplt.savefig('Th_Fig3_1_C1.pdf')\n\n\n\n#%%\n\nfig = plt.figure()\nax = fig.add_subplot(111) \n\nplt.xlabel('$\\kappa_1$')\nplt.ylabel('$\\kappa_2$')\nplt.scatter([ 0,], [0], s=50, edgecolor='k', facecolor='w', linewidth=1., zorder=4)\n#plt.scatter( [v[0,1]*fp2,-v[0,1]*fp2], [v[1,1]*fp2,-v[1,1]*fp2], s=100, edgecolor='w', facecolor='k', linewidth=1.5, zorder=4)\n\nim = plt.pcolor(kaps1, kaps2, np.log10(E).T, cmap ='viridis', vmin = -2.,vmax=0)\n\n#cbar = ax.figure.colorbar(im, ax=ax)\n#cbar.set_ticks([-2, -1, 0])\nstrm = ax.streamplot(kaps1, kaps2, ksol[:,:,0].T, ksol[:,:,1].T, color='w', linewidth=1, cmap='autumn', density=0.6)\n#cbar.set_label(r'$\\log_{10}$ speed', rotation=270, labelpad=18)\nplt.xlabel('$\\kappa_1$')\nplt.ylabel('$\\kappa_2$')\nplt.scatter([ 0,], [0], s=50, edgecolor='k', facecolor='w', linewidth=1., zorder=4)\n\n\n#plt.plot(fp1*np.cos(th), fp1*np.sin(th), c='w', lw=3)\n#plt.plot(fp1*np.cos(th), fp1*np.sin(th), c='k')\n\nax.set_xticks([-1, 0, 1])\nax.set_yticks([-1, 0, 1])\nax.set_ylim([np.min(kaps2), np.max(kaps2)])\nax.set_xlim([np.min(kaps1), np.max(kaps1)])\n\nNn = 1000\nSigmaTot = np.eye(4)\nSigmaTot[2,2] = 4\nSigmaTot[3,3] = 4\n\nSigmaTot[0,2] = Sigma[0,0]\nSigmaTot[0,3] = Sigma[1,0]\nSigmaTot[1,2] = Sigma[0,1]\nSigmaTot[1,3] = Sigma[1,1]\n\nSigmaTot[2,0] = SigmaTot[0,2]\nSigmaTot[3,0] = SigmaTot[0,3]\nSigmaTot[2,1] = SigmaTot[1,2]\nSigmaTot[3,1] = SigmaTot[1,3]\n\nMu= np.zeros((4,1))\n\ninkap1 = np.linspace(-1, 1, 4)\ninkap2 = np.linspace(-1.1, 1.1001, 5)\n\ndt = 0.1\ntime = np.arange(0, 120, dt)\n\nfor trials in range(1):\n try_s0 = 100\n for tr in range(2):\n XX = np.random.multivariate_normal(Mu[:,0], SigmaTot, size=Nn)\n try_s = np.sum((np.dot(XX.T,XX)/1000-SigmaTot)**2)\n if try_s < try_s0:\n #print(try_s)\n try_s0 = try_s\n XX_s = XX\n M = XX_s[:,0:2]\n N = XX_s[:,2:4]\n \n J = np.dot(M, N.T)/Nn\n \n cC = np.ones(3)*0.6#+trials*np.array((0.1, 0, 0))\n \n \n for ik1, ink1 in enumerate(inkap1):\n for ik2, ink2 in enumerate(inkap2):\n sk1 = np.zeros_like(time)\n sk2 = np.zeros_like(time)\n \n x0 = ink1*M[:,0] + ink2*M[:,1]\n sk1[0] = np.mean(M[:,0]*x0)\n sk2[0] = np.mean(M[:,1]*x0)\n \n for it, ti in enumerate(time[:-1]):\n x = x0 + dt*(-x0 + np.dot(J, np.tanh(x0)))\n sk1[it+1] = np.mean(M[:,0]*x)\n sk2[it+1] = np.mean(M[:,1]*x)\n x0 = x\n plt.plot(sk1, sk2, c=cC)\n plt.scatter(sk1[0], sk2[0], s=10, facecolor=cC)\n plt.scatter(sk1[-1], sk2[-1], s=25, facecolor=cC, edgecolor='k', zorder=4)\n \n#ax.arrow(0, 0, np.real(v[0,0]), np.real(v[1,0]), fc=cC, ec=cC, alpha =0.8, width=0.06, head_width=0.2, head_length=0.2, zorder=4)\n#ax.arrow(0, 0, -np.real(v[0,0]), -np.real(v[1,0]), fc=cC, ec=cC, alpha =0.8, width=0.06, head_width=0.2, head_length=0.2, zorder=4)\n\nplt.plot(fp1*np.cos(th), fp1*np.sin(th), '--k')\nax.set_xticks([-1, 0, 1])\nax.set_yticks([-1, 0, 1])\nax.set_ylim([np.min(kaps2), np.max(kaps2)])\nax.set_xlim([np.min(kaps1), np.max(kaps1)])\n \n#ax.spines['top'].set_visible(False)\n#ax.spines['right'].set_visible(False)\nax.yaxis.set_ticks_position('left')\nax.xaxis.set_ticks_position('bottom') \nplt.savefig('Th_Fig3_1_D.pdf') \n\n#%%\n\nfig = plt.figure()\nax = fig.add_subplot(111) \nplt.plot(time, sk2, '--', c=cC, label=r\"$\\kappa_2$\")\nplt.plot(time, sk1, c=cC, label=r\"$\\kappa_1$\")\nplt.legend(frameon=False, loc =2)\nplt.xlabel('time')\nplt.ylabel('$\\kappa$')\nplt.xlim([0,40])\n\nax.spines['top'].set_visible(False)\nax.spines['right'].set_visible(False)\nax.yaxis.set_ticks_position('left')\nax.xaxis.set_ticks_position('bottom') \nplt.savefig('Th_Fig3_1_E.pdf') \n \n#%%\n\ns_w = Sigma[0,1]\nsS = Sigma[0,0]\nsigmas = np.linspace(0, 4, 300)\nfreq_ = np.zeros_like(sigmas)\nfor iS, sI in enumerate(sigmas):\n if sI>1:\n freq_[iS] = np.abs(s_w)/sI\nfig = plt.figure()\nax = fig.add_subplot(111) \nplt.plot(sigmas, freq_ , color=np.ones(3)*0.5, lw=2.)\n\n\ntry_s = np.linspace(1.05, 4, 8)\nfor iS, sI in enumerate(try_s):\n for trials in range(5):\n try_s0 = 100\n SigmaTot = np.eye(4)\n SigmaTot[2,2] = 4\n SigmaTot[3,3] = 4\n \n SigmaTot[0,2] = sI\n SigmaTot[0,3] = Sigma[1,0]\n SigmaTot[1,2] = Sigma[0,1]\n SigmaTot[1,3] = sI\n \n SigmaTot[2,0] = SigmaTot[0,2]\n SigmaTot[3,0] = SigmaTot[0,3]\n SigmaTot[2,1] = SigmaTot[1,2]\n SigmaTot[3,1] = SigmaTot[1,3]\n\n for tr in range(50):\n XX = np.random.multivariate_normal(Mu[:,0], SigmaTot, size=Nn)\n try_s = np.sum((np.dot(XX.T,XX)/1000-SigmaTot)**2)\n if try_s < try_s0:\n #print(try_s)\n try_s0 = try_s\n XX_s = XX\n M = XX_s[:,0:2]\n N = XX_s[:,2:4]\n \n J = np.dot(M, N.T)/Nn\n \n cC = np.ones(3)*0.6#+trials*np.array((0.1, 0, 0))\n \n ink1 = 0.1*np.random.rand()\n ink2 = 0.1*np.random.rand()\n \n sk1 = np.zeros_like(time)\n sk2 = np.zeros_like(time)\n \n x0 = ink1*M[:,0] + ink2*M[:,1]\n sk1[0] = np.mean(M[:,0]*x0)\n sk2[0] = np.mean(M[:,1]*x0)\n \n for it, ti in enumerate(time[:-100]):\n x = x0 + dt*(-x0 + np.dot(J, np.tanh(x0)))\n sk1[it+1] = np.mean(M[:,0]*x)\n sk2[it+1] = np.mean(M[:,1]*x)\n x0 = x\n \n sk1[0] = sk1[it+1]\n sk2[0] = sk2[it+1]\n \n for it, ti in enumerate(time[:-1]):\n x = x0 + dt*(-x0 + np.dot(J, np.tanh(x0)))\n sk1[it+1] = np.mean(M[:,0]*x)\n sk2[it+1] = np.mean(M[:,1]*x)\n x0 = x\n \n phase = np.arctan2(sk2, sk1)*180/np.pi\n\n mas = np.diff(phase)<-1\n phaseN = phase[0:-1]\n timeN = time[0:-1]\n \n \n freq = (2*np.pi)/np.mean(np.diff(timeN[mas]))\n plt.scatter(sI, freq, color=0.5*np.ones(3), s=30, edgecolor='k', alpha=0.7, zorder=3)\nplt.scatter(Sigma[1,1], 0.04, marker='^', facecolor= 0.2*np.ones(3), edgecolor='k', s= 60)\n#plt.plot(time, sk1, c=cC, label=r\"$\\kappa_1$\")\nplt.xlabel('$\\sigma_{mn}$')\nplt.ylabel(r'frequency')\n#plt.xlim([0,40])\n\nax.spines['top'].set_visible(False)\nax.spines['right'].set_visible(False)\nax.yaxis.set_ticks_position('left')\nax.xaxis.set_ticks_position('bottom') \nplt.savefig('Th_Fig3_1_F.pdf') \n" }, { "alpha_fraction": 0.394023597240448, "alphanum_fraction": 0.43124741315841675, "avg_line_length": 36.03303527832031, "blob_id": "11f6a2d2f1e02622215ec1064e7831fd169e8a9b", "content_id": "77d3a9942eeb90a5be6f897892f60259631218d8", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 29148, "license_type": "no_license", "max_line_length": 159, "num_lines": 787, "path": "/C_Fig7_allpanels.py", "repo_name": "emebeiran/low-rank2020", "src_encoding": "UTF-8", "text": "#!/usr/bin/env python3\n# -*- coding: utf-8 -*-\n\"\"\"\nCreated on Thu Apr 9 11:12:47 2020\n\n@author: mbeiran\n\"\"\"\n\nimport numpy as np\nimport matplotlib.pyplot as plt\nimport lib_rnns as lr\nimport matplotlib.patches as patches\nfrom scipy.linalg import sqrtm\n#from matplotlib.gridspec import GridSpec\naa = lr.set_plot()\n\nverbose = False\n \ngaussian_norm = (1/np.sqrt(np.pi))\ngauss_points, gauss_weights = np.polynomial.hermite.hermgauss(200)\ngauss_points = gauss_points*np.sqrt(2)\n\ndef Phi (mu, delta0):\n integrand = np.tanh(mu+np.sqrt(delta0)*gauss_points)\n return gaussian_norm * np.dot (integrand,gauss_weights)\ndef Prime (mu, delta0):\n integrand = 1 - (np.tanh(mu+np.sqrt(delta0)*gauss_points))**2\n return gaussian_norm * np.dot (integrand,gauss_weights)\n\ndef func(sol, mu=1.):\n s = np.zeros_like(sol)\n s[0] = sol[1]\n s[1] = -sol[0]+mu*(1-sol[0]**2)*sol[1]\n return(s)\n \ndef funcG(sol, Sigma, sMx, sMy, sI, muMx, muMy, muN, muI):\n s = np.zeros_like(sol)\n S = np.shape(Sigma)[2]\n for iS in range(S):\n Mu = muI[iS] + sol[0]*muMx[iS] + sol[1]*muMy[iS]\n Delta = sI[iS] + sMx[iS]*sol[0]**2 + sMy[iS]*sol[1]**2\n \n P = Prime(Mu, Delta)\n P1 = muN[:,iS]*Phi(Mu, Delta) \n \n s += P1 + np.dot(Sigma[:,:,iS], np.array((sol[0],sol[1])))*P\n s[0] = s[0]/S-sol[0]\n s[1] = s[1]/S-sol[1]\n return(s)\n \ndef VP_field( xs, ys, mu=1.):\n X, Y = np.meshgrid(xs,ys)\n U = np.zeros_like(X)\n V = np.zeros_like(X)\n \n for ix, x in enumerate(xs):\n for iy, y in enumerate(ys):\n U[ix, iy] = y\n V[ix, iy] = -x+mu*(1-x**2)*y\n \n E = np.sqrt(U**2+V**2)\n return(X, Y, E, U, V)\n\ndef VP_func( x, y, mu=1.):\n\n U = y\n V = -x+mu*(1-x**2)*y\n \n return( U, V)\n\n\ndef VP_field2( xs, ys, mu=1.):\n X, Y = np.meshgrid(xs,ys)\n U = np.zeros_like(X)\n V = np.zeros_like(X)\n \n for ix, x in enumerate(xs):\n for iy, y in enumerate(ys):\n delta = x**2+y**2\n U[ix, iy] = -x+(2*x-0*y)*Prime(0,delta)\n V[ix, iy] = -y+(0*x+1.2*y)*Prime(0,delta)\n \n E = np.sqrt(U**2+V**2)\n return(X, Y, E, U, V)\n \ndef VP_func2( x, y, mu=1.):\n\n delta = x**2+y**2\n U = -x+(2*x-0*y)*Prime(0,delta)\n V = -y+(0*x+1.2*y)*Prime(0,delta)\n \n return(U, V)\n \n \ndef VP_approx( xs, ys, Sigma, sMx, sMy, sI, muMx, muMy, muN, muI):\n X, Y = np.meshgrid(xs,ys)\n U = np.zeros_like(X)\n V = np.zeros_like(X)\n \n S = np.shape(Sigma)[2]\n for ix, x in enumerate(xs):\n for iy, y in enumerate(ys):\n sol = 0\n for iS in range(S):\n Mu = muI[iS] + x*muMx[iS] + y*muMy[iS]\n Delta = sI[iS] + sMx[iS]*x**2 + sMy[iS]*y**2\n \n P = Prime(Mu, Delta)\n P1 = muN[:,iS]*Phi(Mu, Delta) \n \n sol += P1 + np.dot(Sigma[:,:,iS], np.array((x,y)))*P\n U[ix, iy] = sol[0]/S-x\n V[ix, iy] = sol[1]/S-y\n \n E = np.sqrt(U**2+V**2)\n return(X, Y, E, U, V)\n \ndef VP_approxnum( xs, ys, Mv, Nv, Iv):\n X, Y = np.meshgrid(xs,ys)\n U = np.zeros_like(X)\n V = np.zeros_like(X)\n \n for ix, x in enumerate(xs):\n for iy, y in enumerate(ys):\n U[ix, iy] = -x + np.mean(Nv[:,0]*np.tanh(Iv+x*Mv[:,0]+y*Mv[:,1]))\n V[ix, iy] = -y + np.mean(Nv[:,1]*np.tanh(Iv+x*Mv[:,0]+y*Mv[:,1]))\n \n E = np.sqrt(U**2+V**2)\n return(X, Y, E, U, V)\n\ndef VP_funcnum( x, y, Mv, Nv, Iv):\n U = -x + np.mean(Nv[:,0]*np.tanh(Iv+x*Mv[:,0]+y*Mv[:,1]))\n V = -y + np.mean(Nv[:,1]*np.tanh(Iv+x*Mv[:,0]+y*Mv[:,1]))\n \n return(U, V)\ndef get_network(Sigma, sMx, sMy, sI, muMx, muMy, MuN, muI, S, NperP):\n \n# =============================================================================\n# Initialize matrices\n# =============================================================================\n N = S*NperP\n rank = 2\n Mv = np.zeros((N, rank))\n Nv = np.zeros((N, rank))\n Iv = np.zeros((N))\n sMxP = np.zeros_like(sMx)\n sMyP = np.zeros_like(sMy)\n sIP = np.zeros_like(sI)\n muMxP = np.zeros_like(muMx)\n muMyP = np.zeros_like(muMy)\n MuNP = np.zeros_like(MuN)\n muIP = np.zeros_like(muI)\n SigmaP = np.zeros_like(Sigma)\n# =============================================================================\n# Go through populations\n# =============================================================================\n for iS in range(S): \n val = -1\n SS = Sigma[:,:,iS]\n rSS = np.zeros((5,5)) #BigSigma\n rSS[0,0] = sMx[iS] #m1^2\n rSS[1,1] = sMy[iS] #m2^2\n \n rSS[0,2] = SS[0,0]\n rSS[2,0] = rSS[0,2]\n rSS[0,3] = SS[1,0]\n rSS[3,0] = rSS[0,3]\n \n rSS[1,2] = SS[0,1]\n rSS[2,1] = rSS[1,2]\n rSS[1,3] = SS[1,1]\n rSS[3,1] = rSS[1,3]\n \n rSS[2,2] = 1.1\n rSS[3,3] = 1.1\n \n rSS[4,4] = sI[iS]\n val = np.min(np.linalg.eigvalsh(rSS))\n \n #Make BigSigma positive definite\n #mVal = np.max(np.abs(SS))\n cnt =0\n vals = []\n diag1 = []\n diag2 = []\n while val<1e-7:\n if cnt<200:\n rSS[2,2] = 1.2*rSS[2,2]\n rSS[3,3] = 1.2*rSS[3,3]\n else:\n rSS[2,2] = 1.02*rSS[2,2]\n rSS[3,3] = 1.02*rSS[3,3]\n val = np.min(np.linalg.eigvalsh(rSS))\n diag1.append(rSS[2,2])\n diag2.append(rSS[3,3])\n \n vals.append(val)\n cnt +=1\n \n Mean = np.array((muMx[iS], muMy[iS],MuN[0,iS], MuN[1,iS], muI[iS] ))\n \n error = 1e8\n counter =0\n \n #Take minimal finite-size out of 500 trials\n while error>0.1 and counter<500:\n counter+=1\n Sol = np.random.multivariate_normal(Mean, rSS, NperP) \n MeanP = np.mean(Sol,0)\n rSSP = np.cov(Sol.T)\n rSS_corr = np.zeros_like(rSS)\n rSS_corr[:,:] = rSS\n rSS_corr[np.abs(rSS_corr)<1e-10] = 1e-10\n sol = (rSSP-rSS_corr)/rSS_corr\n sol[np.abs(sol)>1e8] = 0.\n sol[2,2] = 0.\n sol[3,3] = 0.\n error2 = np.std(sol)+np.std(Mean-MeanP)\n if error2<error:\n error=error2\n Solsav = Sol\n\n Sol = Solsav\n Corr = np.cov(Sol.T)\n \n sMxP[iS] = Corr[0,0]\n sMyP[iS] = Corr[1,1]\n SigmaP[0,0,iS] = Corr[0,2]\n SigmaP[0,1,iS] = Corr[0,3]\n SigmaP[1,0,iS] = Corr[1,2]\n SigmaP[1,1,iS] = Corr[1,3]\n \n Mv[iS*NperP:(iS+1)*NperP, 0] = Sol[:,0]\n Mv[iS*NperP:(iS+1)*NperP, 1] = Sol[:,1]\n Nv[iS*NperP:(iS+1)*NperP, 0] = Sol[:,2]\n Nv[iS*NperP:(iS+1)*NperP, 1] = Sol[:,3]\n Iv[iS*NperP:(iS+1)*NperP] = Sol[:,4]\n #Iv[iS*NperP:(iS+1)*NperP] += muI[iS]-np.mean(Iv[iS*NperP:(iS+1)*NperP])\n \n #Quantify errors\n sIP[iS] = np.var(Iv[iS*NperP:(iS+1)*NperP]-muI[iS])\n muIP[iS] = np.mean(Iv[iS*NperP:(iS+1)*NperP])\n MuNP[0,iS] = np.mean(Nv[iS*NperP:(iS+1)*NperP, 0])\n MuNP[1,iS] = np.mean(Nv[iS*NperP:(iS+1)*NperP, 1])\n muMyP[iS] = np.mean(Mv[iS*NperP:(iS+1)*NperP, 1])\n muMxP[iS] = np.mean(Mv[iS*NperP:(iS+1)*NperP, 0])\n \n return(Mv, Nv, Iv, sMxP, sMyP, sIP, muMxP, muMyP, MuNP, muIP, SigmaP)\n\ndef get_network2(Sigma, sMx, sMy, sI, muMx, muMy, MuN, muI, S, NperP):\n \n# =============================================================================\n# Initialize matrices\n# =============================================================================\n N = S*NperP\n rank = 2\n Mv = np.zeros((N, rank))\n Nv = np.zeros((N, rank))\n Iv = np.zeros((N))\n sMxP = np.zeros_like(sMx)\n sMyP = np.zeros_like(sMy)\n sIP = np.zeros_like(sI)\n muMxP = np.zeros_like(muMx)\n muMyP = np.zeros_like(muMy)\n MuNP = np.zeros_like(MuN)\n muIP = np.zeros_like(muI)\n SigmaP = np.zeros_like(Sigma)\n# =============================================================================\n# Go through populations\n# =============================================================================\n error = 1e8\n counter =0\n Mean = np.array((0., 0.,0., 0., 0.))\n rSS = np.eye(5)\n #Take minimal finite-size out of 500 trials\n while error>1e-5 and counter<5000:\n counter+=1\n Sol = np.random.multivariate_normal(Mean, rSS, NperP) \n MeanP = np.mean(Sol,0)\n rSSP = np.cov(Sol.T)\n error2 = np.std(rSSP-rSS)+np.std(MeanP)\n if error2<error:\n error=error2\n Solsav = Sol\n if verbose ==True:\n print(error)\n\n Sol = Solsav\n for iS in range(S): \n val = -1\n SS = Sigma[:,:,iS]\n rSS = np.zeros((5,5)) #BigSigma\n rSS[0,0] = sMx[iS] #m1^2\n rSS[1,1] = sMy[iS] #m2^2\n \n rSS[0,2] = SS[0,0]\n rSS[2,0] = rSS[0,2]\n rSS[0,3] = SS[1,0]\n rSS[3,0] = rSS[0,3]\n \n rSS[1,2] = SS[0,1]\n rSS[2,1] = rSS[1,2]\n rSS[1,3] = SS[1,1]\n rSS[3,1] = rSS[1,3]\n \n rSS[2,2] = 1.1\n rSS[3,3] = 1.1\n \n rSS[4,4] = sI[iS]\n val = np.min(np.linalg.eigvalsh(rSS))\n \n #Make BigSigma positive definite\n #mVal = np.max(np.abs(SS))\n cnt =0\n vals = []\n diag1 = []\n diag2 = []\n while val<1e-7:\n if cnt<200:\n rSS[2,2] = 1.2*rSS[2,2]\n rSS[3,3] = 1.2*rSS[3,3]\n else:\n rSS[2,2] = 1.02*rSS[2,2]\n rSS[3,3] = 1.02*rSS[3,3]\n val = np.min(np.linalg.eigvalsh(rSS))\n diag1.append(rSS[2,2])\n diag2.append(rSS[3,3])\n \n vals.append(val)\n cnt +=1\n \n Mean = np.array((muMx[iS], muMy[iS],MuN[0,iS], MuN[1,iS], muI[iS] ))\n \n error = 1e8\n counter =0\n \n# #Take minimal finite-size out of 500 trials\n# while error>0.1 and counter<500:\n# counter+=1\n# Sol = np.random.multivariate_normal(Mean, rSS, NperP) \n# MeanP = np.mean(Sol,0)\n# rSSP = np.cov(Sol.T)\n# rSS_corr = np.zeros_like(rSS)\n# rSS_corr[:,:] = rSS\n# rSS_corr[np.abs(rSS_corr)<1e-10] = 1e-10\n# sol = (rSSP-rSS_corr)/rSS_corr\n# sol[np.abs(sol)>1e8] = 0.\n# sol[2,2] = 0.\n# sol[3,3] = 0.\n# error2 = np.std(sol)+np.std(Mean-MeanP)\n# if error2<error:\n# error=error2\n# Solsav = Sol\n\n Sol = Mean[:,None] + np.dot(sqrtm(rSS), Solsav.T)\n Sol = Sol.T\n Corr = np.cov(Sol.T)\n \n sMxP[iS] = Corr[0,0]\n sMyP[iS] = Corr[1,1]\n SigmaP[0,0,iS] = Corr[0,2]\n SigmaP[0,1,iS] = Corr[0,3]\n SigmaP[1,0,iS] = Corr[1,2]\n SigmaP[1,1,iS] = Corr[1,3]\n \n Mv[iS*NperP:(iS+1)*NperP, 0] = Sol[:,0]\n Mv[iS*NperP:(iS+1)*NperP, 1] = Sol[:,1]\n Nv[iS*NperP:(iS+1)*NperP, 0] = Sol[:,2]\n Nv[iS*NperP:(iS+1)*NperP, 1] = Sol[:,3]\n Iv[iS*NperP:(iS+1)*NperP] = Sol[:,4]\n #Iv[iS*NperP:(iS+1)*NperP] += muI[iS]-np.mean(Iv[iS*NperP:(iS+1)*NperP])\n \n #Quantify errors\n sIP[iS] = np.var(Iv[iS*NperP:(iS+1)*NperP]-muI[iS])\n muIP[iS] = np.mean(Iv[iS*NperP:(iS+1)*NperP])\n MuNP[0,iS] = np.mean(Nv[iS*NperP:(iS+1)*NperP, 0])\n MuNP[1,iS] = np.mean(Nv[iS*NperP:(iS+1)*NperP, 1])\n muMyP[iS] = np.mean(Mv[iS*NperP:(iS+1)*NperP, 1])\n muMxP[iS] = np.mean(Mv[iS*NperP:(iS+1)*NperP, 0])\n \n return(Mv, Nv, Iv, sMxP, sMyP, sIP, muMxP, muMyP, MuNP, muIP, SigmaP)\n \ndef algorithm(dat_point, sigma, muMx, muMy, sMx, sMy, muI, sI):\n S = len(muMx) #number of populations\n K = 2 #rank\n p = np.shape(dat_point)[0] #number of points\n \n Phi0 = np.zeros((K*(K+1)*S, p*K))\n F0 = np.zeros((p*K, 1))\n\n for ip in range(p):\n k = dat_point[ip, :]\n k1 = dat_point[ip,0]\n k2 = dat_point[ip,1]\n SS = VP_func(k1,k2)\n F0[ip*K] = S*(SS[0] + k1)\n F0[ip*K+1] = S*(SS[1] + k2)\n \n for iS in range(S):\n Mu = muI[iS] + k[0]*muMx[iS] + k[1]*muMy[iS]\n Delta = sI[iS] + sMx[iS]*k[0]**2 + sMy[iS]*k[1]**2\n i0 = iS*K*(K+1)\n \n Phi0[i0: i0+K, ip*K] = k*Prime(Mu, Delta)\n Phi0[i0+K: i0+2*K, ip*K+1] = k*Prime(Mu, Delta)\n Phi0[i0+2*K: i0+2*K+1, ip*K] = Phi(Mu, Delta)\n Phi0[i0+2*K+1: i0+2*K+2, ip*K+1] = Phi(Mu, Delta)\n \n sol = np.dot(F0.T, Phi0.T)\n C= np.dot(Phi0, Phi0.T)+np.eye(K*(K+1)*S)*sigma**2\n \n sigmaU = np.dot(sol, np.linalg.pinv(C))\n sigmaU = sigmaU[0,:]\n \n Sigma = np.zeros((K, K, S))\n MuN = np.zeros((K, S)) \n for iS in range(S):\n Sigma[0,0,iS] = sigmaU[iS*K*(K+1)]\n Sigma[0,1,iS] = sigmaU[iS*K*(K+1)+1] #sigma_m2n1\n Sigma[1,0,iS] = sigmaU[iS*K*(K+1)+2] #sigma_m1n2\n Sigma[1,1,iS] = sigmaU[iS*K*(K+1)+3]\n MuN[0, iS] = sigmaU[iS*K*(K+1)+4]\n MuN[1, iS] = sigmaU[iS*K*(K+1)+5]\n E = np.mean(np.abs((np.dot(sigmaU, Phi0) - F0.T)))#/F0.T))\n reconst = np.dot(sigmaU, Phi0)\n return(Sigma, MuN, E, sigmaU,reconst, F0)\n\n #%%\ndt = 0.02\ntime = np.arange(0, 40, dt)\nsolT = np.zeros((len(time),2))\n\nsol0 = np.array((1.,1.))\nsolT[0,:] = sol0\n\nfor it, ti in enumerate(time[:-1]):\n solT[it+1,:] = solT[it,:]+dt*(func(solT[it,:]))\n \n#%%\nx = np.linspace(-4,4, 100)\ny = np.linspace(-4,4, 100)\nfig = plt.figure()\nax = fig.add_subplot(111)\nX, Y, E, U, V = VP_field(x,y, mu=1.)\n\nplt.plot( solT[1200:,0], solT[1200:,1], 'k', lw=1)\nplt.pcolor(x, y, np.log10(E).T, vmin = -2, vmax = 2., shading='auto')\nplt.colorbar()\nplt.streamplot(x, y, U.T, V.T, color='w')\nplt.xlim([np.min(x), np.max(x)])\nplt.ylim([np.min(y), np.max(y)])\nplt.xlabel(r'$x$')\nplt.ylabel(r'$y$')\n\nrect = patches.Rectangle((-3,3),6,-6,linewidth=1,edgecolor='r',facecolor='none', zorder=5)\nax.add_patch(rect)\n\nplt.savefig('Fig7_1n_VanderPol.pdf')\nplt.show()\n\n#%%\ndo_reg = False\ndo_fs_noreg = True\n#%%\nxp = np.linspace(-3, 3.01, 30)\nyp = np.linspace(-3, 3.01, 30)\n\nXp, Yp, Ep, Up, Vp = VP_field2(xp,yp, mu=1.)\ndat_point = np.vstack((np.ravel(Xp), np.ravel(Yp))).T\n\nf0 = np.vstack((np.ravel(Up), np.ravel(Vp))).T\nf0 += dat_point #Calculated without the leak\np = len(xp)*len(yp)\n\n\n#%%\nK = 2\npops = np.array((1, 2, 3, 4, 5, 10, 15, 20, 25, 30, 35, 40, 45, 50))#np.round(np.linspace(1, 50,10)).astype(int)#np.round(np.linspace(1, 50, 20)).astype(int)\ntrials = 50;#20#20#20\n\nsigmas = [1e-8, 0.5]#[1e-8, 0.5]#[1e-8]\n\nEss = np.zeros((len(sigmas), trials, len(pops)))\nEss_r = np.zeros((len(sigmas), trials, len(pops)))\nEss_fs = np.zeros((len(sigmas), trials, len(pops)))\nEss_r_fs = np.zeros((len(sigmas), trials, len(pops)))\n\n\nL2n = np.zeros((len(sigmas), trials, len(pops)))\nMea = np.zeros((len(sigmas), trials, len(pops)))\nVar = np.zeros((len(sigmas), trials, len(pops)))\nMax = np.zeros((len(sigmas), trials, len(pops)))\n\nplott =False\n\nfor isi, sigma in enumerate(sigmas):\n for ipop, S in enumerate(pops):\n if verbose==True:\n print(' ')\n print('New pop')\n print('-.-')\n Sigma = np.zeros((K, K, S, trials))\n MuN = np.zeros((K, S, trials))\n for iti in range(trials):\n if np.min([np.abs(S-15), np.abs(S-35)])==0 and sigma>1e-3:\n if S==15:\n ITI = 11\n else:\n ITI = 15\n if iti==ITI:\n if verbose == True:\n print('pop: '+str(S)+' _Trial: '+str(iti))\n \n try:\n fl=np.load('DataFig7/paramMF_sig_'+str(sigma)+ '_pop_'+str(S)+'_trial_'+str(iti)+'.npz')\n Sigma = fl['name1']\n sMx = fl['name2']\n sMy = fl['name3']\n sI = fl['name4']\n muMx = fl['name5']\n muMy = fl['name6']\n MuN = fl['name7']\n muI = fl['name8']\n E = fl['name9']\n sigmaU = fl['name10']\n F0 = fl['name11']\n reconst = fl['name12']\n \n \n except:\n print('no paramMF')\n print('DataFig7/paramMF_sig_'+str(sigma)+ '_pop_'+str(S)+'_trial_'+str(iti)+'.npz')\n sI = np.random.exponential(1, S) \n sMx = np.random.exponential(1, S)\n if S>1:\n sMx = sMx/np.std(sMx)\n else:\n sMx[0] = 1.\n \n muMx = 4*np.random.rand(S)\n muMx = muMx-np.mean(muMx)\n \n muMy = 0*4*np.random.rand(S)\n muMy = muMy-np.mean(muMy)\n \n muI = 4*np.random.rand(S)\n muI = muI-np.mean(muI)\n \n sMy = np.random.exponential(1, S)\n if S>1:\n sMy = sMy/np.std(sMy)\n else:\n sMy[0] = 1.\n \n plott=False\n Sigma, MuN, E, sigmaU, reconst, F0 = algorithm(dat_point, sigma, muMx, muMy, sMx, sMy, muI, sI)\n import os\n if os.path.isdir('DataFig7/')==False:\n os.mkdir('DataFig7/')\n np.savez('DataFig7/paramMF_sig_'+str(sigma)+ '_pop_'+str(S)+'_trial_'+str(iti),name1=Sigma, name2=sMx, name3=sMy, name4=sI, \n name5=muMx, name6=muMy, name7=MuN, name8=muI, name9=E, name10=sigmaU, name11=F0, name12=reconst)\n\n if sigma>0.01:\n sol = np.zeros((len(time),2))\n \n sol0 = np.array((1.,1.))\n sol[0,:] = sol0\n \n for it, ti in enumerate(time[:-1]):\n sol[it+1,:] = sol[it,:]+dt*funcG(sol[it,:], Sigma, sMx, sMy, sI, muMx, muMy, MuN, muI)\n fig = plt.figure()\n ax = fig.add_subplot(211)\n plt.plot(time, sol[:,0], lw=2)\n plt.plot(time, solT[:,0], '--k')\n \n ax.set_ylabel(r'$\\kappa_1(t)$')\n ax.spines['top'].set_visible(False)\n ax.spines['right'].set_visible(False)\n ax.yaxis.set_ticks_position('left')\n ax.xaxis.set_ticks_position('bottom') \n ax.set_yticks([-2, 0, 2])\n ax = fig.add_subplot(212)\n plt.plot(time, sol[:,1], lw=2)\n plt.plot(time, solT[:,1], '--k')\n \n ax.set_xlabel('time')\n ax.set_ylabel(r'$\\kappa_2(t)$')\n ax.spines['top'].set_visible(False)\n ax.spines['right'].set_visible(False)\n ax.yaxis.set_ticks_position('left')\n ax.xaxis.set_ticks_position('bottom')\n ax.set_yticks([-2, 0, 2]) \n plt.savefig('Fig7_2_vanderPol_xy_MF_sig_'+str(sigma)+'_pop_'+str(S)+'_trial_'+str(iti)+'.pdf')\n plt.show()\n \n \n x = np.linspace(-5,5, 30)\n y = np.linspace(-5,5, 30) \n try:\n fl = np.load('DataFig7/dynMF_sig_'+str(sigma)+ '_pop_'+str(S)+'_trial_'+str(iti)+'.npz')\n Sigma = fl['name1']\n sMx = fl['name2']\n sMy = fl['name3']\n sI = fl['name4']\n muMx = fl['name5']\n muMy = fl['name6']\n MuN = fl['name7']\n muI = fl['name8']\n Xa = fl['name9']\n Ya = fl['name10']\n Ea = fl['name11']\n Ua = fl['name12']\n Va = fl['name13']\n except:\n print('no dynMF')\n print('DataFig7/dynMF_sig_'+str(sigma)+ '_pop_'+str(S)+'_trial_'+str(iti)+'.npz')\n Xa, Ya, Ea, Ua, Va = VP_approx(x,y, Sigma, sMx, sMy, sI, muMx, muMy, MuN, muI)\n np.savez('DataFig7/dynMF_sig_'+str(sigma)+ '_pop_'+str(S)+'_trial_'+str(iti), \n name1=Sigma, name2=sMx, name3=sMy, name4=sI, name5=muMx, name6=muMy, \n name7=MuN, name8=muI, name9=Xa, name10=Ya, name11=Ea, name12=Ua, name13=Va)\n \n \n plt.figure()\n plt.plot( sol[:,0], sol[:,1], lw=2)\n plt.plot( solT[:,0], solT[:,1], '--k', lw=1)\n plt.pcolor(x, y, np.log10(Ea.T), vmin = -2, vmax = 2., shading='auto')\n #plt.colorbar()\n plt.streamplot(x, y, Ua.T, Va.T, color='w')\n plt.xlim([np.min(x), np.max(x)])\n plt.ylim([np.min(y), np.max(y)])\n plt.xlabel(r'$\\kappa_1$')\n plt.ylabel(r'$\\kappa_2$')\n plt.xticks([-4, -2, 0, 2, 4]) \n if sigma<0.1:\n plt.savefig('Fig7_1n_Gauss_approx_pops_'+str(S)+'_trial_'+ str(iti)+'.pdf')\n plt.savefig('Fig7_1n_Gauss_approx_pops_'+str(S)+'_trial_'+ str(iti)+'.png')\n else:\n plt.savefig('Fig7_1Reg_Gauss_approx_pops_'+str(S)+'_trial_'+ str(iti)+'.pdf')\n plt.savefig('Fig7_1Reg_Gauss_approx_pops_'+str(S)+'_trial_'+ str(iti)+'.png')\n plt.show()\n \n\n\n NperP = 2000\n\n try:\n fl = np.load('DataFig7/dynamicsfinsiz_sig_'+str(sigma)+ '_pop_'+str(S)+'_trial_'+str(iti)+'.npz')\n Mv = fl['name1']\n Nv = fl['name2']\n Xr = fl['name3']\n Yr = fl['name4']\n Er = fl['name5']\n Ur = fl['name6']\n Vr = fl['name7']\n Iv = fl['name8']\n #hello = fl['name9']\n except:\n print('calculating vectors')\n print('DataFig7/dynamicsfinsiz_sig_'+str(sigma)+ '_pop_'+str(S)+'_trial_'+str(iti)+'.npz')\n Mv, Nv, Iv, sMxP, sMyP, sIP, muMxP, muMyP, MuNP, muIP, SigmaP = get_network(Sigma, sMx, sMy, sI, muMx, muMy, MuN, muI, S, NperP) \n Xr, Yr, Er, Ur, Vr = VP_approxnum(x,y, Mv, Nv, Iv) \n np.savez('dynamicsfinsiz_sig_'+str(sigma)+ '_pop_'+str(S)+'_trial_'+str(iti), name1=Mv, \n name2=Nv, name3=Xr, name4=Yr, name5=Er, name6=Ur, name7=Vr, name8=Iv)\n \n \n \n sol = np.zeros((len(time),2))\n sol[0,:] = sol0\n \n for it, ti in enumerate(time[:-1]):\n sol[it+1,:] = sol[it,:]+dt*np.array((VP_funcnum( sol[it,0], sol[it,1], Mv, Nv, Iv)))\n \n fig = plt.figure()\n ax = fig.add_subplot(211)\n plt.plot(time, sol[:,0], lw=2)\n plt.plot(time, solT[:,0], '--k')\n \n ax.set_ylabel(r'$\\kappa_1(t)$')\n ax.spines['top'].set_visible(False)\n ax.spines['right'].set_visible(False)\n ax.yaxis.set_ticks_position('left')\n ax.xaxis.set_ticks_position('bottom') \n ax.set_yticks([-2, 0, 2])\n ax = fig.add_subplot(212)\n plt.plot(time, sol[:,1], lw=2)\n plt.plot(time, solT[:,1], '--k')\n \n ax.set_xlabel('time')\n ax.set_ylabel(r'$\\kappa_2(t)$')\n ax.spines['top'].set_visible(False)\n ax.spines['right'].set_visible(False)\n ax.yaxis.set_ticks_position('left')\n ax.xaxis.set_ticks_position('bottom')\n ax.set_yticks([-2, 0, 2]) \n plt.savefig('Fig7_2_vanderPol_xy_FS_sig_'+str(sigma)+'_pop_'+str(S)+'_trial_'+str(iti)+'.pdf')\n plt.show()\n \n #%\n plt.figure()\n plt.plot( sol[:,0], sol[:,1], lw=2)\n plt.plot( solT[:,0], solT[:,1], '--k', lw=1)\n plt.pcolor(x, y, np.log10(Er.T), vmin = -2, vmax = 2., shading='auto')\n #plt.colorbar()\n plt.streamplot(x, y, Ur.T, Vr.T, color='w')\n plt.xlim([np.min(x), np.max(x)])\n plt.ylim([np.min(y), np.max(y)])\n plt.xticks([-4, -2, 0, 2, 4]) \n plt.xlabel(r'$\\kappa_1$')\n plt.ylabel(r'$\\kappa_2$')\n if sigma<0.1:\n plt.savefig('Fig7_1n_Gauss_approx_FS_pops_'+str(S)+'_trial_'+ str(iti)+'.pdf')\n plt.savefig('Fig7_1n_Gauss_approx_FS_pops_'+str(S)+'_trial_'+ str(iti)+'.png')\n else:\n plt.savefig('Fig7_1n_Gauss_approx_FSreg_pops_'+str(S)+'_trial_'+ str(iti)+'.pdf')\n plt.savefig('Fig7_1n_Gauss_approx_FSreg_pops_'+str(S)+'_trial_'+ str(iti)+'.png')\n plt.show()\n\n\n\n\n#%%\nfl = np.load('DataFig7/simulations.npz')\nEss=fl['name1']\nEss_fs = fl['name2']\nVar = fl['name3']\n#%%\nEss2 = np.mean(Ess, 1)/pops\nsEss2 = np.std(Ess, 1)/(pops*np.sqrt(trials))#/np.sqrt(trials)\n\nfig = plt.figure()\nax = fig.add_subplot(111)\nplt.fill_between(pops, Ess2[0]-sEss2[0], Ess2[0]+sEss2[0], alpha=0.4, color='k')\nplt.plot(pops, Ess2[0], lw=2, c =[0.2,0.2,0.2], label=r'$\\beta=10^{-8}$')\nplt.fill_between(pops, Ess2[1]-sEss2[1], Ess2[1]+sEss2[1], alpha=0.5)\nplt.plot(pops, Ess2[1], lw=2, label=r'$\\beta=0.5$')\nplt.ylabel('error (mean field)')\nplt.xlabel(r'populations $P$')\nax.spines['top'].set_visible(False)\nax.spines['right'].set_visible(False)\nax.yaxis.set_ticks_position('left')\nax.xaxis.set_ticks_position('bottom')\nplt.yscale('log')\nplt.legend(frameon=False)\nplt.savefig('Fig7_1n_Gauss_approx_pops_0.pdf')\nplt.show()\n\n\n#%%\nEss2 = np.mean(Ess_fs, 1)/pops\nsEss2 = np.std(Ess_fs, 1)/(pops*np.sqrt(trials))\n\nfig = plt.figure()\nax = fig.add_subplot(111)\nplt.fill_between(pops, Ess2[0]-sEss2[0], Ess2[0]+sEss2[0], alpha=0.4, color='k')\nplt.plot(pops, Ess2[0], lw=2, c =[0.2,0.2,0.2],label=r'$\\beta=10^{-8}$')\nplt.fill_between(pops, Ess2[1]-sEss2[1], Ess2[1]+sEss2[1], alpha=0.5)\nplt.plot(pops, Ess2[1], lw=2, label=r'$\\beta=0.5$')\nplt.yscale('log')\nplt.ylabel('error (finite size)')\nplt.xlabel(r'populations $P$')\nax.spines['top'].set_visible(False)\nax.spines['right'].set_visible(False)\nax.yaxis.set_ticks_position('left')\nax.xaxis.set_ticks_position('bottom')\nplt.legend(frameon=False)\nplt.ylim([1., 100])\nplt.savefig('Fig7_1n_Gauss_approx_pops_2.pdf')\n\n#%%\nEss2 = np.mean(Var, 1)\nsEss2 = np.std(Var, 1)/np.sqrt(trials)\n\nfig = plt.figure()\nax = fig.add_subplot(111)\nplt.fill_between(pops, Ess2[0]-sEss2[0], Ess2[0]+sEss2[0], alpha=0.4, color='k')\nplt.plot(pops, Ess2[0], lw=2, c =[0.2,0.2,0.2],label=r'$\\beta=10^{-8}$')\nplt.fill_between(pops, Ess2[1]-sEss2[1], Ess2[1]+sEss2[1], alpha=0.5)\nplt.plot(pops, Ess2[1], lw=2, label=r'$\\beta=0.5$')\nplt.yscale('log')\nplt.ylabel(r'variance $X$')\nplt.xlabel(r'populations $P$')\nax.spines['top'].set_visible(False)\nax.spines['right'].set_visible(False)\nax.yaxis.set_ticks_position('left')\nax.xaxis.set_ticks_position('bottom')\nplt.legend(frameon=False)\nplt.savefig('Fig7_1n_Gauss_approx_pops_3.pdf')\n\n\n\n" }, { "alpha_fraction": 0.4661540389060974, "alphanum_fraction": 0.5436587333679199, "avg_line_length": 36.49216079711914, "blob_id": "b7cc4c110e5883e8815c4572fa743b82c3481b45", "content_id": "6d7e87c7bd751562e1d72822ae1fdf9e22818bb1", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 31082, "license_type": "no_license", "max_line_length": 149, "num_lines": 829, "path": "/C_Fig6_GHI.py", "repo_name": "emebeiran/low-rank2020", "src_encoding": "UTF-8", "text": "#!/usr/bin/env python3\n# -*- coding: utf-8 -*-\n\"\"\"\nCreated on Mon Mar 30 10:49:59 2020\n\n@author: mbeiran\n\"\"\"\n#%%\nimport numpy as np\nimport matplotlib.pyplot as plt\nimport lib_rnns as lr\nfrom matplotlib.gridspec import GridSpec\naa = lr.set_plot()\n\nnp.random.seed(20)\n \ngaussian_norm = (1/np.sqrt(np.pi))\ngauss_points, gauss_weights = np.polynomial.hermite.hermgauss(200)\ngauss_points = gauss_points*np.sqrt(2)\n\ndef Phi (mu, delta0):\n integrand = np.tanh(mu+np.sqrt(delta0)*gauss_points)\n return gaussian_norm * np.dot (integrand,gauss_weights)\ndef Prime (mu, delta0):\n integrand = 1 - (np.tanh(mu+np.sqrt(delta0)*gauss_points))**2\n return gaussian_norm * np.dot (integrand,gauss_weights)\n\n#%%\nN = 400\nnbins = 20\nval = 0.1\nolap = 10\na_m1 = np.sqrt((1-val**2)) *np.array((1, 1, 1, 1, -1, -1, -1, -1))\n#np.array((0., np.sqrt(8./9.), -np.sqrt(2./9.), -np.sqrt(2./9.), 0., -np.sqrt(8./9.), np.sqrt(2./9.), np.sqrt(2./9.)))\na_m2 = np.sqrt((1-val**2)) *np.array((1, -1, 1, -1, 1, -1, 1, -1))\n#*np.array((0., 0., np.sqrt(2./3.), -np.sqrt(2./3.), 0., 0., -np.sqrt(2./3.), np.sqrt(2./3.), ))\na_m3 = np.sqrt((1-val**2)) *np.array((1, -1, -1, 1, 1, -1, -1, 1)) \n# *np.array((1., -1./3., -1./3., -1./3., -1., 1./3., 1./3., 1./3.))\n\na_n1 = olap/np.sqrt((1-val**2)) *np.array((1, 1, 1, 1, -1, -1, -1, -1)) \n#*np.array((0., np.sqrt(8./9.), -np.sqrt(2./9.), -np.sqrt(2./9.), 0., -np.sqrt(8./9.), np.sqrt(2./9.), np.sqrt(2./9.)))\na_n2 = olap/np.sqrt((1-val**2)) *np.array((1, -1, 1, -1, 1, -1, 1, -1)) \n#*np.array((0., 0., np.sqrt(2./3.), -np.sqrt(2./3.), 0., 0., -np.sqrt(2./3.), np.sqrt(2./3.), ))\na_n3 = olap/np.sqrt((1-val**2)) *np.array((1, -1, -1, 1, 1, -1, -1, 1)) \n#*np.array((1., -1./3., -1./3., -1./3., -1., 1./3., 1./3., 1./3.))\n\ndef give_vecs(val=0.1, olap=1.5, N = 400):\n pops = 8\n a_m1 = np.sqrt((1-val**2)) *np.array((1, 1, 1, 1, -1, -1, -1, -1))\n #np.array((0., np.sqrt(8./9.), -np.sqrt(2./9.), -np.sqrt(2./9.), 0., -np.sqrt(8./9.), np.sqrt(2./9.), np.sqrt(2./9.)))\n a_m2 = np.sqrt((1-val**2)) *np.array((1, -1, 1, -1, 1, -1, 1, -1))\n #*np.array((0., 0., np.sqrt(2./3.), -np.sqrt(2./3.), 0., 0., -np.sqrt(2./3.), np.sqrt(2./3.), ))\n a_m3 = np.sqrt((1-val**2)) *np.array((1, -1, -1, 1, 1, -1, -1, 1)) \n # *np.array((1., -1./3., -1./3., -1./3., -1., 1./3., 1./3., 1./3.))\n \n a_n1 = olap/np.sqrt((1-val**2)) *np.array((1, 1, 1, 1, -1, -1, -1, -1)) \n #*np.array((0., np.sqrt(8./9.), -np.sqrt(2./9.), -np.sqrt(2./9.), 0., -np.sqrt(8./9.), np.sqrt(2./9.), np.sqrt(2./9.)))\n a_n2 = olap/np.sqrt((1-val**2)) *np.array((1, -1, 1, -1, 1, -1, 1, -1)) \n #*np.array((0., 0., np.sqrt(2./3.), -np.sqrt(2./3.), 0., 0., -np.sqrt(2./3.), np.sqrt(2./3.), ))\n a_n3 = olap/np.sqrt((1-val**2)) *np.array((1, -1, -1, 1, 1, -1, -1, 1)) \n #*np.array((1., -1./3., -1./3., -1./3., -1., 1./3., 1./3., 1./3.))\n m1 = np.random.randn(N)\n n1 = np.random.randn(N)\n \n m2 = np.random.randn(N)\n n2 = np.random.randn(N)\n \n m3 = np.random.randn(N)\n n3 = np.random.randn(N)\n \n sels = 1000\n err0 = 50\n \n pops = len(a_m1)\n for t in range(sels):\n V = np.random.randn(N//pops, 50)\n CC = np.dot(V, V.T)\n for po in range(pops):\n CC[po,po] = 0.\n \n err = np.std(CC)\n if err<err0:\n err0 = err\n \n ix = 0\n \n for po in range(pops):\n m1[po*(N//pops):(po+1)*(N//pops)] = a_m1[po]+val*V[:, ix] \n ix += 1\n n1[po*(N//pops):(po+1)*(N//pops)] = a_n1[po]+val*V[:, ix]#+s_mn1[po]*V[:, ix]/val\n ix += 1 \n m2[po*(N//pops):(po+1)*(N//pops)] = a_m2[po]+val*V[:, ix]\n ix += 1\n n2[po*(N//pops):(po+1)*(N//pops)] = a_n2[po]+val*V[:, ix]#+s_mn2[po]*V[:, ix]/val\n ix += 1\n m3[po*(N//pops):(po+1)*(N//pops)] = a_m3[po]+val*V[:, ix]\n ix += 1\n n3[po*(N//pops):(po+1)*(N//pops)] = a_n3[po]+val*V[:, ix]#+s_mn2[po]*V[:, ix]/val\n ix += 1\n return(m1, m2, m3, n1, n2, n3)\npops = 8 \nm1, m2, m3, n1, n2, n3 = give_vecs(olap=olap)\n#%%\n# =============================================================================\n# Fig 2\n# =============================================================================\nms = np.linspace(-10.5,10.5,3000)\n\n\nN = 1000\nS=10\nM = np.vstack((m1, m2, m3)).T\nss2 = 0.3\n\nNNN = np.vstack((n1, n2, n3))\n\nfig = plt.figure(figsize=[4.2, 4.2])#, dpi=600\ngs = GridSpec(7,7)\n\nax_joint00 = fig.add_subplot(gs[1:3,0:2])\nax_joint01 = fig.add_subplot(gs[1:3,2:4])\nax_joint02 = fig.add_subplot(gs[1:3,4:6])\n\nax_joint10 = fig.add_subplot(gs[3:5,0:2])\nax_joint11 = fig.add_subplot(gs[3:5,2:4])\nax_joint12 = fig.add_subplot(gs[3:5,4:6])\n\nax_joint20 = fig.add_subplot(gs[5:7,0:2])\nax_joint21 = fig.add_subplot(gs[5:7,2:4])\nax_joint22 = fig.add_subplot(gs[5:7,4:6])\n\nax_marg_x0 = fig.add_subplot(gs[0,0:2])\nax_marg_x1 = fig.add_subplot(gs[0,2:4])\nax_marg_x2 = fig.add_subplot(gs[0,4:6])\n\nax_marg_y0 = fig.add_subplot(gs[1:3,6])\nax_marg_y1 = fig.add_subplot(gs[3:5,6])\nax_marg_y2 = fig.add_subplot(gs[5:7,6])\n\nyl = 12.5\nylt = 10.\nxl = 2.5\nxlt = 2.\nax_joint00.scatter(M[:,0], NNN[0,:], s=S, alpha=0.5, label=r'$\\sigma_{mn} = 1.2$', rasterized=True)\n\nfor ip in range(pops):\n ax_joint00.scatter(a_m1[ip], a_n1[ip], s=0.5*S, edgecolor='k', facecolor='w')\nax_joint00.set_xlim([-xl, xl])\nax_joint00.set_xticks([-xlt, 0, xlt])\nax_joint00.set_xticklabels(['','',''])\nax_joint00.set_ylim([-yl,yl])\nax_joint00.set_yticks([-ylt, 0, ylt])\nax_joint00.set_ylabel(r'$n^{\\left(1\\right)}_i$')\nax_joint00.spines['top'].set_visible(False)\nax_joint00.spines['right'].set_visible(False)\nax_joint00.yaxis.set_ticks_position('left')\nax_joint00.xaxis.set_ticks_position('bottom')\n\n \nax_joint01.scatter(M[:,1], NNN[0,:], s=S, alpha=0.5, label=r'$\\sigma_{mn} = 1.2$', rasterized=True)\nfor ip in range(pops):\n ax_joint01.scatter(a_m2[ip], a_n1[ip], s=0.5*S, edgecolor='k', facecolor='w')\nax_joint01.spines['top'].set_visible(False)\nax_joint01.spines['right'].set_visible(False)\nax_joint01.yaxis.set_ticks_position('left')\nax_joint01.xaxis.set_ticks_position('bottom')\nax_joint01.set_ylim([-yl,yl])\nax_joint01.set_yticks([-ylt, 0, ylt])\nax_joint01.set_yticklabels(['','',''])\nax_joint01.set_xlim([-xl, xl])\nax_joint01.set_xticks([-xlt, 0, xlt])\nax_joint01.set_xticklabels(['','',''])\n\nax_joint02.scatter(M[:,2], NNN[0,:], s=S, alpha=0.5, label=r'$\\sigma_{mn} = 1.2$', rasterized=True)\nfor ip in range(pops):\n ax_joint02.scatter(a_m3[ip], a_n1[ip], s=0.5*S, edgecolor='k', facecolor='w')\nax_joint02.spines['top'].set_visible(False)\nax_joint02.spines['right'].set_visible(False)\nax_joint02.yaxis.set_ticks_position('left')\nax_joint02.xaxis.set_ticks_position('bottom')\nax_joint02.set_ylim([-yl,yl])\nax_joint02.set_yticks([-ylt, 0, ylt])\nax_joint02.set_yticklabels(['','',''])\nax_joint02.set_xlim([-xl, xl])\nax_joint02.set_xticks([-xlt, 0, xlt])\nax_joint02.set_xticklabels(['','',''])\n\n\nax_joint10.scatter(M[:,0], NNN[1,:], s=S, alpha=0.5, label=r'$\\sigma_{mn} = 1.2$', rasterized=True)\nfor ip in range(pops):\n ax_joint10.scatter(a_m1[ip], a_n2[ip], s=0.5*S, edgecolor='k', facecolor='w')\nax_joint10.set_xlim([-3,3])\nax_joint10.spines['top'].set_visible(False)\nax_joint10.spines['right'].set_visible(False)\nax_joint10.yaxis.set_ticks_position('left')\nax_joint10.xaxis.set_ticks_position('bottom')\nax_joint10.set_ylim([-yl,yl])\nax_joint10.set_yticks([-ylt, 0, ylt])\nax_joint10.set_xlim([-xl, xl])\nax_joint10.set_xticks([-xlt, 0, xlt])\nax_joint10.set_xticklabels(['','',''])\nax_joint10.set_ylabel(r'$n^{\\left(2\\right)}_i$')\n#ax_joint10.set_xlabel(r'$m^{\\left(1\\right)}_i$')\n\nax_joint11.scatter(M[:,1], NNN[1,:], s=S, alpha=0.5, label=r'$\\sigma_{mn} = 1.2$', rasterized=True)\nfor ip in range(pops):\n ax_joint11.scatter(a_m2[ip], a_n2[ip], s=0.5*S, edgecolor='k', facecolor='w')\nax_joint11.spines['top'].set_visible(False)\nax_joint11.spines['right'].set_visible(False)\nax_joint11.set_ylim([-yl, yl])\nax_joint11.set_yticks([-ylt, 0, ylt])\nax_joint11.set_xlim([-xl, xl])\nax_joint11.set_xticks([-xlt, 0, xlt])\nax_joint11.set_xticklabels(['','',''])\nax_joint11.set_yticklabels(['','',''])\nax_joint11.yaxis.set_ticks_position('left')\nax_joint11.xaxis.set_ticks_position('bottom')\n#ax_joint11.set_xlabel(r'$m^{\\left(2\\right)}_i$')\n\nax_joint12.scatter(M[:,2], NNN[1,:], s=S, alpha=0.5, label=r'$\\sigma_{mn} = 1.2$', rasterized=True)\nfor ip in range(pops):\n ax_joint12.scatter(a_m3[ip], a_n2[ip], s=0.5*S, edgecolor='k', facecolor='w')\nax_joint12.set_xlim([-3,3])\nax_joint12.spines['top'].set_visible(False)\nax_joint12.spines['right'].set_visible(False)\nax_joint12.set_ylim([-yl, yl])\nax_joint12.set_yticks([-ylt, 0, ylt])\nax_joint12.set_xlim([-xl, xl])\nax_joint12.set_xticks([-xlt, 0, xlt])\nax_joint12.set_xticklabels(['','',''])\nax_joint12.set_yticklabels(['','',''])\nax_joint12.yaxis.set_ticks_position('left')\nax_joint12.xaxis.set_ticks_position('bottom')\n#ax_joint12.set_xlabel(r'$m^{\\left(3\\right)}_i$')\n\nax_joint20.scatter(M[:,0], NNN[2,:], s=S, alpha=0.5, label=r'$\\sigma_{mn} = 1.2$', rasterized=True)\nfor ip in range(pops):\n ax_joint20.scatter(a_m1[ip], a_n3[ip], s=0.5*S, edgecolor='k', facecolor='w')\nax_joint20.set_xlim([-3,3])\nax_joint20.spines['top'].set_visible(False)\nax_joint20.spines['right'].set_visible(False)\nax_joint20.yaxis.set_ticks_position('left')\nax_joint20.xaxis.set_ticks_position('bottom')\nax_joint20.set_ylim([-yl,yl])\nax_joint20.set_yticks([-ylt, 0, ylt])\nax_joint20.set_xlim([-xl, xl])\nax_joint20.set_xticks([-xlt, 0, xlt])\nax_joint20.set_ylabel(r'$n^{\\left(3\\right)}_i$')\nax_joint20.set_xlabel(r'$m^{\\left(1\\right)}_i$')\n\nax_joint21.scatter(M[:,1], NNN[2,:], s=S, alpha=0.5, label=r'$\\sigma_{mn} = 1.2$', rasterized=True)\nfor ip in range(pops):\n ax_joint21.scatter(a_m2[ip], a_n3[ip], s=0.5*S, edgecolor='k', facecolor='w')\nax_joint21.set_xlim([-3,3])\nax_joint21.spines['top'].set_visible(False)\nax_joint21.spines['right'].set_visible(False)\nax_joint21.set_ylim([-yl, yl])\nax_joint21.set_yticks([-ylt, 0, ylt])\nax_joint21.set_xlim([-xl, xl])\nax_joint21.set_xticks([-xlt, 0, xlt])\nax_joint21.set_yticklabels(['','',''])\nax_joint21.yaxis.set_ticks_position('left')\nax_joint21.xaxis.set_ticks_position('bottom')\nax_joint21.set_xlabel(r'$m^{\\left(2\\right)}_i$')\n\nax_joint22.scatter(M[:,2], NNN[2,:], s=S, alpha=0.5, label=r'$\\sigma_{mn} = 1.2$', rasterized=True)\nfor ip in range(pops):\n ax_joint22.scatter(a_m3[ip], a_n3[ip], s=0.5*S, edgecolor='k', facecolor='w')\nax_joint22.set_xlim([-3,3])\nax_joint22.spines['top'].set_visible(False)\nax_joint22.spines['right'].set_visible(False)\nax_joint22.set_ylim([-yl, yl])\nax_joint22.set_yticks([-ylt, 0, ylt])\nax_joint22.set_xlim([-xl, xl])\nax_joint22.set_xticks([-xlt, 0, xlt])\nax_joint22.set_yticklabels(['','',''])\nax_joint22.yaxis.set_ticks_position('left')\nax_joint22.xaxis.set_ticks_position('bottom')\nax_joint22.set_xlabel(r'$m^{\\left(3\\right)}_i$')\n\nax_marg_x0.hist(M[:,0], nbins, alpha=0.5, density=True)\nss = val\n#ax_marg_x0.plot(ms, (0.5/np.sqrt(2*np.pi*ss**2))*(np.exp(-(ms+np.sqrt(1-val**2))**2/(2*ss**2)) + np.exp(-(ms-np.sqrt(1-val**2))**2/(2*ss**2))), 'k')\nsol_n = np.zeros_like(ms)\nfor ip in range(pops):\n sol_n += (1/pops)*(1/np.sqrt(2*np.pi*ss**2))*np.exp(-(ms+a_m1[ip])**2/(2*ss**2))\nax_marg_x0.plot( ms, sol_n,'k', lw=0.5)\nax_marg_x0.spines['top'].set_visible(False)\nax_marg_x0.spines['right'].set_visible(False)\nax_marg_x0.spines['left'].set_visible(False)\nax_marg_x0.yaxis.set_ticks_position('left')\nax_marg_x0.xaxis.set_ticks_position('bottom')\nax_marg_x0.set_xlim([-3,3])\nax_marg_x0.set_xticks([-2., 0, 2.])\nax_marg_x0.set_ylim([0,1.4])\nax_marg_x0.set_xticklabels(['','',''])\nax_marg_x0.set_yticks([2])\n\nax_marg_x1.hist(M[:,1], nbins, alpha=0.5, density=True)\nsol_n = np.zeros_like(ms)\nfor ip in range(pops):\n sol_n += (1/pops)*(1/np.sqrt(2*np.pi*ss**2))*np.exp(-(ms+a_m2[ip])**2/(2*ss**2))\nax_marg_x1.plot( ms, sol_n,'k', lw=0.5)\n#ax_marg_x1.plot(ms, (0.5/np.sqrt(2*np.pi*ss**2))*(np.exp(-(ms+np.sqrt(1-val**2))**2/(2*ss**2)) + np.exp(-(ms-np.sqrt(1-val**2))**2/(2*ss**2))), 'k')\nax_marg_x1.spines['top'].set_visible(False)\nax_marg_x1.spines['right'].set_visible(False)\nax_marg_x1.spines['left'].set_visible(False)\nax_marg_x1.yaxis.set_ticks_position('left')\nax_marg_x1.xaxis.set_ticks_position('bottom')\nax_marg_x1.set_xlim([-3,3])\nax_marg_x1.set_ylim([0,1.4])\nax_marg_x1.set_xticks([-2., 0, 2.])\nax_marg_x1.set_xticklabels(['','',''])\nax_marg_x1.set_yticks([2])\n\nax_marg_x2.hist(M[:,2], nbins, alpha=0.5, density=True)\nsol_n = np.zeros_like(ms)\nfor ip in range(pops):\n sol_n += (1/pops)*(1/np.sqrt(2*np.pi*ss**2))*np.exp(-(ms+a_m3[ip])**2/(2*ss**2))\nax_marg_x2.plot( ms, sol_n,'k', lw=0.5)\n#ax_marg_x2.plot(ms, (0.5/np.sqrt(2*np.pi*ss**2))*(np.exp(-(ms+np.sqrt(1-val**2))**2/(2*ss**2)) + np.exp(-(ms-np.sqrt(1-val**2))**2/(2*ss**2))), 'k')\nax_marg_x2.spines['top'].set_visible(False)\nax_marg_x2.spines['right'].set_visible(False)\nax_marg_x2.spines['left'].set_visible(False)\nax_marg_x2.yaxis.set_ticks_position('left')\nax_marg_x2.xaxis.set_ticks_position('bottom')\nax_marg_x2.set_xlim([-3,3])\nax_marg_x2.set_ylim([0,1.4])\nax_marg_x2.set_xticks([-2., 0, 2.])\nax_marg_x2.set_xticklabels(['','',''])\nax_marg_x2.set_yticks([2])\n\nax_marg_y0.hist(NNN[0,:], nbins, orientation=\"horizontal\", alpha=0.5, density=True)\nss= val\nMu = olap*(1/np.sqrt(1-val**2))\nsol_n = np.zeros_like(ms)\nfor ip in range(pops):\n sol_n += (1/pops)*(1/np.sqrt(2*np.pi*ss**2))*np.exp(-(ms+a_n1[ip])**2/(2*ss**2))\nax_marg_y0.plot(sol_n, ms, 'k', lw=0.5)\nax_marg_y0.spines['top'].set_visible(False)\nax_marg_y0.spines['right'].set_visible(False)\nax_marg_y0.spines['bottom'].set_visible(False)\nax_marg_y0.yaxis.set_ticks_position('left')\nax_marg_y0.xaxis.set_ticks_position('bottom')\nax_marg_y0.set_ylim([-yl,yl])\nax_marg_y0.set_xlim([0,1.5])\nax_marg_y0.set_yticks([-ylt, 0, ylt])\nax_marg_y0.set_yticklabels(['','',''])\nax_marg_y0.set_xticks([2])\nax_marg_y0.set_xticklabels([''])\n\nax_marg_y1.hist(NNN[1,:], nbins, orientation=\"horizontal\", alpha=0.5, density=True)\nsol_n = np.zeros_like(ms)\nfor ip in range(pops):\n sol_n += (1/pops)*(1/np.sqrt(2*np.pi*ss**2))*np.exp(-(ms+a_n2[ip])**2/(2*ss**2))\nax_marg_y1.plot(sol_n, ms, 'k', lw=0.5)\n#ax_marg_y1.plot((0.5/np.sqrt(2*np.pi*ss**2))*(np.exp(-(ms+Mu)**2/(2*ss**2)) + np.exp(-(ms-Mu)**2/(2*ss**2))),ms, 'k')\nax_marg_y1.spines['top'].set_visible(False)\nax_marg_y1.spines['right'].set_visible(False)\nax_marg_y1.spines['bottom'].set_visible(False)\nax_marg_y1.yaxis.set_ticks_position('left')\nax_marg_y1.xaxis.set_ticks_position('bottom')\nax_marg_y1.set_ylim([-yl,yl])\nax_marg_y1.set_xlim([0,1.5])\nax_marg_y1.set_yticks([-ylt, 0, ylt])\nax_marg_y1.set_yticklabels(['','',''])\nax_marg_y1.set_xticks([2])\nax_marg_y1.set_xticklabels([''])\n\nax_marg_y2.hist(NNN[2,:], nbins, orientation=\"horizontal\", alpha=0.5, density=True)\n#ax_marg_y2.plot((0.5/np.sqrt(2*np.pi*ss**2))*(np.exp(-(ms+Mu)**2/(2*ss**2)) + np.exp(-(ms-Mu)**2/(2*ss**2))),ms, 'k')\nsol_n = np.zeros_like(ms)\nfor ip in range(pops):\n sol_n += (1/pops)*(1/np.sqrt(2*np.pi*ss**2))*np.exp(-(ms+a_n3[ip])**2/(2*ss**2))\nax_marg_y2.plot(sol_n, ms, 'k', lw=0.5)\nax_marg_y2.spines['top'].set_visible(False)\nax_marg_y2.spines['right'].set_visible(False)\nax_marg_y2.spines['bottom'].set_visible(False)\nax_marg_y2.yaxis.set_ticks_position('left')\nax_marg_y2.xaxis.set_ticks_position('bottom')\nax_marg_y2.set_ylim([-yl,yl])\nax_marg_y2.set_xlim([0,1.5])\nax_marg_y2.set_yticks([-ylt, 0, ylt])\nax_marg_y2.set_yticklabels(['','',''])\nax_marg_y2.set_xticks([2])\nax_marg_y2.set_xticklabels([''])\nplt.savefig('Th_Fig6B_1_A_bgoverlap.pdf')\n\n\n\n#%%\n#kaps1 = np.linspace(-2.5,2.5, 50)\n#kaps2 = np.linspace(-2.5,2.5, 40)\n#kaps3 = np.linspace(-2.5,2.5, 40)\n#\n#ksol = np.zeros((len(kaps1), len(kaps2), len(kaps3), 3))\n#\n#K1s, K2s, K3s = np.meshgrid(kaps1, kaps2,kaps3)\n#def transf(K):\n# return(K*Prime(0, np.dot(K.T, K)))\n# \n#E = np.zeros((len(kaps1), len(kaps2), len(kaps3)))\n\na_m1 = np.sqrt((1-val**2)) *np.array((1, 1, 1, 1, -1, -1, -1, -1))\n#np.array((0., np.sqrt(8./9.), -np.sqrt(2./9.), -np.sqrt(2./9.), 0., -np.sqrt(8./9.), np.sqrt(2./9.), np.sqrt(2./9.)))\na_m2 = np.sqrt((1-val**2)) *np.array((1, -1, 1, -1, 1, -1, 1, -1))\n#*np.array((0., 0., np.sqrt(2./3.), -np.sqrt(2./3.), 0., 0., -np.sqrt(2./3.), np.sqrt(2./3.), ))\na_m3 = np.sqrt((1-val**2)) *np.array((1, -1, -1, 1, 1, -1, -1, 1)) \n# *np.array((1., -1./3., -1./3., -1./3., -1., 1./3., 1./3., 1./3.))\n\na_n1 = olap/np.sqrt((1-val**2)) *np.array((1, 1, 1, 1, -1, -1, -1, -1)) \n#*np.array((0., np.sqrt(8./9.), -np.sqrt(2./9.), -np.sqrt(2./9.), 0., -np.sqrt(8./9.), np.sqrt(2./9.), np.sqrt(2./9.)))\na_n2 = olap/np.sqrt((1-val**2)) *np.array((1, -1, 1, -1, 1, -1, 1, -1)) \n#*np.array((0., 0., np.sqrt(2./3.), -np.sqrt(2./3.), 0., 0., -np.sqrt(2./3.), np.sqrt(2./3.), ))\na_n3 = olap/np.sqrt((1-val**2)) *np.array((1, -1, -1, 1, 1, -1, -1, 1)) \nfps = np.zeros((3, 100))\nallfps = np.zeros((3,100))\neigvals = np.zeros((3,100))\n\niX = 0\n\ndef give_f(K, a_m1, a_m2, a_m3, a_n1, a_n2, a_n3, val):\n ksol = - K\n for ip in range(pops):\n ksol[0] += (1/pops)*a_n1[ip]*Phi(a_m1[ip]*K[0]+a_m2[ip]*K[1]+a_m3[ip]*K[2], val**2*np.sum(K**2))\n ksol[1] += (1/pops)*a_n2[ip]*Phi(a_m1[ip]*K[0]+a_m2[ip]*K[1]+a_m3[ip]*K[2], val**2*np.sum(K**2))\n ksol[2] += (1/pops)*a_n3[ip]*Phi(a_m1[ip]*K[0]+a_m2[ip]*K[1]+a_m3[ip]*K[2], val**2*np.sum(K**2))\n norm = np.sqrt(np.sum(ksol**2))\n return(ksol, norm)\npops = 8\n \nfor tr in range(200):\n #print(tr)\n k1 = 4*np.random.rand()-2\n k2 = 4*np.random.rand()-2\n k3 = 4*np.random.rand()-2\n norm = 10\n K = np.array((k1, k2, k3))\n while norm > 0.001:\n ksol = - K\n for ip in range(pops):\n ksol[0] += (1/pops)*a_n1[ip]*Phi(a_m1[ip]*K[0]+a_m2[ip]*K[1]+a_m3[ip]*K[2], val**2*np.sum(K**2))\n ksol[1] += (1/pops)*a_n2[ip]*Phi(a_m1[ip]*K[0]+a_m2[ip]*K[1]+a_m3[ip]*K[2], val**2*np.sum(K**2))\n ksol[2] += (1/pops)*a_n3[ip]*Phi(a_m1[ip]*K[0]+a_m2[ip]*K[1]+a_m3[ip]*K[2], val**2*np.sum(K**2))\n norm = np.sqrt(np.sum(ksol**2))\n K = K+0.1*ksol\n cand1 = K\n norm = 10\n K = K + 0.1*np.random.randn(3)\n while norm > 0.001:\n ksol = - K\n for ip in range(pops):\n ksol[0] += (1/pops)*a_n1[ip]*Phi(a_m1[ip]*K[0]+a_m2[ip]*K[1]+a_m3[ip]*K[2], val**2*np.sum(K**2))\n ksol[1] += (1/pops)*a_n2[ip]*Phi(a_m1[ip]*K[0]+a_m2[ip]*K[1]+a_m3[ip]*K[2], val**2*np.sum(K**2))\n ksol[2] += (1/pops)*a_n3[ip]*Phi(a_m1[ip]*K[0]+a_m2[ip]*K[1]+a_m3[ip]*K[2], val**2*np.sum(K**2))\n norm = np.sqrt(np.sum(ksol**2))\n K = K+0.1*ksol\n cand = K\n \n if np.sum((cand-cand1)**2)<0.02:\n cand = 0.5*(cand+cand1)\n if np.min(np.sum((fps.T-cand)**2,1))>0.01:\n fps[:,iX]= cand\n iX +=1\n fps[:,iX]= -cand\n iX +=1\n Jac = np.zeros((3,3))\n per = 0.005\n for ipp in range(3):\n pert = np.zeros(3)\n pert[ipp] = per\n nex, ccc = give_f(K+pert, a_m1, a_m2, a_m3, a_n1, a_n2, a_n3, val) \n fir, ccc = give_f(K-pert, a_m1, a_m2, a_m3, a_n1, a_n2, a_n3, val) \n \n Jac[ipp,:] = (nex-fir)/(2*per)\n eigvals[:,iX-2] = np.linalg.eigvals(Jac)\n eigvals[:,iX-1] = eigvals[:,iX-2]\n \nfps = fps[:,0:iX]\n\neigvals = eigvals[:,0:iX]\n\nRs = np.sqrt(np.sum(fps**2,0))\nprint(Rs)\n\n\n\nappart = Rs>np.mean(Rs)\nfp1s = fps[:,appart]\nfp2s = fps[:,~appart]\neigvals1 = eigvals[:,appart]\neigvals2 = eigvals[:,~appart]\n\niX = 0\nallfps = np.zeros((3,100))\nfor tr in range(100):\n #print(tr)\n k1 = 4*np.random.rand()-2\n k2 = 4*np.random.rand()-2\n k3 = 4*np.random.rand()-2\n norm = 10\n max_iter = 150\n ite = 0\n dx = 0.1\n K = np.array((k1, k2, k3))\n \n while norm>0.001 and ite<max_iter:\n \n Kold = K\n ksol, norm = give_f(K, a_m1, a_m2, a_m3, a_n1, a_n2, a_n3, val)\n \n\n K1 = K+dx*np.array((1,0,0))\n ksol1, norm1 = give_f(K1, a_m1, a_m2, a_m3, a_n1, a_n2, a_n3, val)\n K1m = K+dx*np.array((-1,0,0))\n ksol1m, norm1m = give_f(K1m, a_m1, a_m2, a_m3, a_n1, a_n2, a_n3, val)\n K2 = K+dx*np.array((0,1,0))\n ksol2, norm2 = give_f(K2, a_m1, a_m2, a_m3, a_n1, a_n2, a_n3, val)\n K2m = K+dx*np.array((0,-1,0))\n ksol2m, norm2m = give_f(K2m, a_m1, a_m2, a_m3, a_n1, a_n2, a_n3, val)\n K3 = K+dx*np.array((0,0,1))\n ksol3, norm3 = give_f(K3, a_m1, a_m2, a_m3, a_n1, a_n2, a_n3, val)\n K3m = K+dx*np.array((0,0,-1))\n ksol3m, norm3m = give_f(K3m, a_m1, a_m2, a_m3, a_n1, a_n2, a_n3, val)\n \n norms = np.array((norm1, norm1m, norm2, norm2m, norm3, norm3m))\n if np.min(norms)<norm: \n if norm1==np.min(norms):\n K = K1\n norm = norm1\n elif norm2 == np.min(norms):\n K = K2\n norm = norm2 \n elif norm3 == np.min(norms):\n K = K3\n norm = norm3\n elif norm1m == np.min(norms):\n K = K1m\n norm = norm1m\n elif norm2m == np.min(norms):\n K = K2m\n norm = norm2m\n elif norm3m == np.min(norms):\n K = K3m\n norm = norm3m\n else:\n if dx>0.0001:\n dx = dx*0.5\n ite +=1\n \n if norm<0.001: \n if np.min(np.sum((allfps.T-K)**2,1))>0.03:\n if np.min(np.sum((fps.T-K)**2,1))>0.03: \n allfps[:,iX]= K\n iX +=1\n allfps[:,iX]= -K\n iX +=1\nallfps = allfps[:,0:iX] \n\n#%%\nfrom mpl_toolkits.mplot3d import Axes3D\nimport mpl_toolkits.mplot3d as a3\nimport matplotlib.colors as colors\nimport scipy as sp\nfig = plt.figure()\nax = fig.add_subplot(111, projection='3d', azim=-59, elev=10)\n\nverts = fp1s.T\n#\n\ntop = 3\nbottom = 2\nno = 0\nne = 4\nso = 5\nse = 1\nfaces = np.array([ \n [no, top, ne], [so, top, se], [no, so, top], [ne, se, top], \n [no, bottom, ne], [so, bottom, se], [no,so, bottom], [ne,se,bottom]\n ])\n\n\n \n\nfne = 1\nfse = 3\nfno = 7\nfso = 4\nbne = 5\nbse = 6\nbno = 2\nbso = 0\nverts2 = fp2s.T\nfaces2 = np.array([ \n [fne, fno, fso,fse], [bne, bse, bso, bno], [fne, fno, bno, bne], [fse, fso, bso, bse], \n [fne, fse, bse, bne], [fno, fso, bso, bno]\n ])\n\nfor i in np.arange(len(faces)):\n square=[ verts[faces[i,0]], verts[faces[i,1]], verts[faces[i, 2]]]\n face = a3.art3d.Poly3DCollection([square], alpha=0.5)\n face.set_color(colors.rgb2hex(0.6*np.array((1., 0., 0.))+0.4*np.random.rand(3)))\n face.set_edgecolor('k')\n face.set_linewidth(1.)\n face.set_alpha(0.5)\n ax.add_collection3d(face)\nfor i in np.arange(len(faces2)):\n square2=[ verts2[faces2[i,0]], verts2[faces2[i,1]], verts2[faces2[i, 2]], verts2[faces2[i, 3]]]\n face = a3.art3d.Poly3DCollection([square2], alpha=0.5)\n face.set_color(colors.rgb2hex(0.6*np.array((0., 0., 1.))+0.4*np.random.rand(3)))\n face.set_edgecolor('k')\n face.set_linewidth(1.)\n face.set_alpha(0.5)\n ax.add_collection3d(face)\n#ax.scatter(fp2s[0,iXX], fp2s[1,iXX], fp2s[2,iXX], s=100, c='w', edgecolor='k')\nif np.sum(allfps**2)>0:\n ax.scatter(allfps[0,:], allfps[1,:], allfps[2,:], s=10, c='w', edgecolor='k')\nif np.std(Rs)>0.01:\n\n for ip in range(np.shape(fp1s)[1]):\n ax.scatter(fp1s[0,ip], fp1s[1,ip], fp1s[2,ip], s=30, c='C0', edgecolor='k')\n #plt.plot([0,fp1s[0,ip]], [0,fp1s[1,ip]], [0,fp1s[2,ip]], c='C0')\n for ip2 in range(np.shape(fp1s)[1]):\n dist = np.sqrt(np.sum((fp1s[:,ip]-fp1s[:,ip2])**2))\n #if dist<10.:\n # plt.plot([fp1s[0,ip], fp1s[0,ip2]], [fp1s[1,ip], fp1s[1,ip2]], [fp1s[2,ip], fp1s[2,ip2]], c='C0')\n # \n ##%%\n #fig = plt.figure()\n #ax = fig.add_subplot(111, projection='3d') \n for ip in range(np.shape(fp2s)[1]):\n ax.scatter(fp2s[0,ip], fp2s[1,ip], fp2s[2,ip], s=30, c='C1', edgecolor='k')\n #plt.plot([0,fp2s[0,ip]], [0,fp2s[1,ip]], [0,fp2s[2,ip]], c='C1')\n for ip2 in range(np.shape(fp2s)[1]):\n dist = np.sqrt(np.sum((fp2s[:,ip]-fp2s[:,ip2])**2))\n# if dist<10.:\n# plt.plot([fp2s[0,ip], fp2s[0,ip2]], [fp2s[1,ip], fp2s[1,ip2]], [fp2s[2,ip], fp2s[2,ip2]], c='C1')\nelse:\n for ip in range(np.shape(fps)[1]):\n if np.sum(fps[:,ip]**2)>0.2:\n ax.scatter(fps[0,ip], fps[1,ip], fps[2,ip], s=30, c='k')\n #plt.plot([0,fp1s[0,ip]], [0,fp1s[1,ip]], [0,fp1s[2,ip]], c='C0')\n for ip2 in range(np.shape(fps)[1]):\n dist = np.sqrt(np.sum((fps[:,ip]-fps[:,ip2])**2))\n \n #if dist<1.:\n # plt.plot([fps[0,ip], fps[0,ip2]], [fps[1,ip], fps[1,ip2]], [fps[2,ip], fps[2,ip2]], c='C0')\nax.set_xlabel(r'$\\kappa_1$')\nax.set_ylabel(r'$\\kappa_2$')\nax.set_zlabel(r'$\\kappa_3$')\nax.dist=11\n#ax.w_xaxis.set_pane_color((1.0, 1.0, 1.0, 0.0))\n#ax.w_yaxis.set_pane_color((1.0, 1.0, 1.0, 0.0))\n#ax.w_zaxis.set_pane_color((1.0, 1.0, 1.0, 0.0))\nax.set_xticks([-5, 5])\nax.set_yticks([-5, 5])\nax.set_zticks([-5, 5])\n#\n\nplt.savefig('Th_Fig6B_1_B.pdf')\n#ip = 2\n##ax.scatter(fp2s[0,ip], fp2s[1,ip], fp2s[2,ip], s=150, c='k')\n#print(eigvals2[:,ip])\n\n#%%\nK = fps[:,0]\nksol = - K\nsteps = 100\nfor st in range(steps):\n ksol = -K\n for ip in range(pops):\n ksol[0] += (1/pops)*a_n1[ip]*Phi(a_m1[ip]*K[0]+a_m2[ip]*K[1]+a_m3[ip]*K[2], val**2*np.sum(K**2))\n ksol[1] += (1/pops)*a_n2[ip]*Phi(a_m1[ip]*K[0]+a_m2[ip]*K[1]+a_m3[ip]*K[2], val**2*np.sum(K**2))\n ksol[2] += (1/pops)*a_n3[ip]*Phi(a_m1[ip]*K[0]+a_m2[ip]*K[1]+a_m3[ip]*K[2], val**2*np.sum(K**2))\n norm = np.sqrt(np.sum(ksol**2))\n #print(norm)\n K = K+0.05*ksol\n#%%\n\nfig = plt.figure()\nax = fig.add_subplot(111, projection='3d', azim=-59, elev=10) \n\nplt.xlabel('$\\kappa_1$')\nplt.ylabel('$\\kappa_2$')\nax.set_zlabel('$\\kappa_3$')\n\n#plt.scatter([ 0, fp2/np.sqrt(2), fp2/np.sqrt(2), -fp2/np.sqrt(2), -fp2/np.sqrt(2)], \\\n# [0, fp2/np.sqrt(2), -fp2/np.sqrt(2), fp2/np.sqrt(2), -fp2/np.sqrt(2)], s=50, edgecolor='k', facecolor='w', linewidth=1., zorder=4)\n#plt.scatter([ 0, 0, fp1, -fp1], [fp11, -fp11, 0, 0], s=70, edgecolor='w', facecolor='k', linewidth=1., zorder=5)\n\nNn = 1200\n\ninkap1 = np.linspace(-10.2, 10.2, 4)\ninkap3 = np.linspace(-10.2, 10.2, 4)\ninkap2 = np.linspace(-10.2, 10.2, 4)\n\n\ndt = 0.15\ntime = np.arange(0, 34, dt)\n\nfor i in np.arange(len(faces)):\n square=[ verts[faces[i,0]], verts[faces[i,1]], verts[faces[i, 2]]]\n face = a3.art3d.Poly3DCollection([square], alpha=0.2)\n face.set_color(colors.rgb2hex(0.6*np.array((1., 0., 0.))+0.4*np.random.rand(3)))\n face.set_edgecolor('k')\n face.set_linewidth(1.)\n face.set_alpha(0.5)\n ax.add_collection3d(face)\n\nfor i in np.arange(len(faces2)):\n square2=[ verts2[faces2[i,0]], verts2[faces2[i,1]], verts2[faces2[i, 2]], verts2[faces2[i, 3]]]\n face = a3.art3d.Poly3DCollection([square2], alpha=0.25)\n face.set_color(colors.rgb2hex(0.6*np.array((0., 0., 1.))+0.4*np.random.rand(3)))\n face.set_edgecolor('k')\n face.set_linewidth(1.)\n face.set_alpha(0.5)\n ax.add_collection3d(face)\n \n\nfor trials in range(1):\n m1, m2, m3, n1, n2, n3 = give_vecs(N = Nn, olap=olap, val=val) \n\n M = np.vstack((m1, m2, m3)).T\n N = np.vstack((n1, n2, n3))\n\n J = np.dot(M, N)/Nn\n \n cC = np.ones(3)*0.6\n \n fps = np.zeros((3,len(inkap1)*len(inkap2)*len(inkap3)))\n iX = 0\n for ik1, ink1 in enumerate(inkap1):\n #print(ik1)\n for ik2, ink2 in enumerate(inkap2):\n for ik3, ink3 in enumerate(inkap3):\n sk1 = np.zeros_like(time)\n sk2 = np.zeros_like(time)\n sk3 = np.zeros_like(time)\n \n \n x0 = ink1*M[:,0] + ink2*M[:,1] +ink3*M[:,2]\n sk1[0] = np.mean(M[:,0]*x0)\n sk2[0] = np.mean(M[:,1]*x0)\n sk3[0] = np.mean(M[:,2]*x0)\n \n for it, ti in enumerate(time[:-1]):\n x = x0 + dt*(-x0 + np.dot(J, np.tanh(x0)))+np.sqrt(dt)*0.1*np.random.randn(len(x0))\n sk1[it+1] = np.mean(M[:,0]*x)/np.mean(M[:,0]**2)\n sk2[it+1] = np.mean(M[:,1]*x)/np.mean(M[:,1]**2)\n sk3[it+1] = np.mean(M[:,2]*x)/np.mean(M[:,2]**2)\n \n x0 = x\n ax.plot(sk1, sk2,sk3, c=cC)\n ax.scatter(sk1[0], sk2[0], sk3[0], s=10, facecolor=cC)\n ax.scatter(sk1[-1], sk2[-1], sk3[-1], s=25, facecolor=cC, edgecolor='k', zorder=3)\n cand = np.array((sk1[-1], sk2[-1], sk3[-1]))\n if np.min(np.mean((fps.T-cand)**2,1))>0.1:\n fps[:,iX] = cand\n iX+=1\n fps = fps[:,0:iX+1]\n \nfor ip in range(np.shape(fps)[1]):\n #ax.scatter(fps[0,ip], fps[1,ip], fps[2,ip], s=30, c='k')\n #plt.plot([0,fp1s[0,ip]], [0,fp1s[1,ip]], [0,fp1s[2,ip]], c='C0')\n for ip2 in range(np.shape(fps)[1]):\n dist = np.sqrt(np.sum((fps[:,ip]-fps[:,ip2])**2))\n #if dist<10.:\n # plt.plot([fps[0,ip], fps[0,ip2]], [fps[1,ip], fps[1,ip2]], [fps[2,ip], fps[2,ip2]], c='k', lw=0.5)\nax.set_xticks([-5, 0, 5])\nax.set_zticks([-5, 0, 5])\nax.set_yticks([-5, 0, 5])\n\n#ax.set_ylim([-8, 8])\n#ax.set_xlim([-8, 8])\n#ax.set_zlim([-8, 8]) \nax.dist=11\nax.spines['top'].set_visible(False)\nax.spines['right'].set_visible(False)\n#ax.yaxis.set_ticks_position('left')\n#ax.xaxis.set_ticks_position('bottom') \nplt.savefig('Th_Fig6B_1_D.pdf') \n\n##%%\n#m1, m2, m3, n1, n2, n3 = give_vecs(N = Nn, olap=4., val=0.) \n#\n#M = np.vstack((m1, m2, m3)).T\n#N = np.vstack((n1, n2, n3))\n#\n#J = np.dot(M, N)/Nn\n#Av, Vv = np.linalg.eig(J)\n#\n#plt.scatter(np.real(Av), np.imag(Av))\n\n#%%\nAm = np.array((a_m1, a_m2, a_m3))\nAn = np.array((a_n1, a_n2, a_n3))\nRn = np.sum(An**2,0)\n\n#%%\n#fig = plt.figure()\nfrom mpl_toolkits.mplot3d import Axes3D\n#ax = fig.add_subplot(111, projection='3d') \n#ax.scatter(fps[0,:], fps[1,:F], fps[2,:])\n#%%\nm1, m2, m3, n1, n2, n3 = give_vecs(N = Nn, olap=olap, val=val) \nPops = 8\nfig = plt.figure()\nax = fig.add_subplot(111, projection='3d', azim=-83, elev = 31)\nax.scatter(n1, n2, n3, c='r', rasterized=True, alpha=0.5) \n\nfor ip in range(Pops):\n for ip2 in range(Pops):\n dist = np.sqrt(np.sum((An[:,ip]-An[:,ip2])**2))\n if dist<28.:\n ax.plot([An[0,ip], An[0,ip2]], [An[1,ip], An[1,ip2]],[An[2,ip], An[2,ip2]],c='k')\nax.scatter(An[0,0:Pops], An[1,0:Pops], An[2,0:Pops], s=40, edgecolor='g', facecolor='k', zorder=4)\n \nax.set_xlabel(r'$n_i^{(1)}$')\nax.set_ylabel(r'$n_i^{(2)}$')\nax.set_zlabel(r'$n_i^{(3)}$')\nax.dist=11\n#ax.w_xaxis.set_pane_color((1.0, 1.0, 1.0, 0.0))\n#ax.w_yaxis.set_pane_color((1.0, 1.0, 1.0, 0.0))\n#sax.w_zaxis.set_pane_color((1.0, 1.0, 1.0, 0.0))\nax.set_xticks([-8, 8])\nax.set_yticks([-8, 8])\nax.set_zticks([-8, 8])\n\nplt.savefig('Th_Fig6B_1_C.pdf')\n\n" }, { "alpha_fraction": 0.4953177273273468, "alphanum_fraction": 0.5782608985900879, "avg_line_length": 29.82474136352539, "blob_id": "88bb7f94febfbf8ec43454da7e072a0bb170225b", "content_id": "186a18b1a33474387b760c2d512e597c576ca2e1", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 11960, "license_type": "no_license", "max_line_length": 163, "num_lines": 388, "path": "/C_Fig4.py", "repo_name": "emebeiran/low-rank2020", "src_encoding": "UTF-8", "text": "#!/usr/bin/env python3\n# -*- coding: utf-8 -*-\n\"\"\"\nCreated on Mon Mar 30 10:49:59 2020\n\n@author: mbeiran\n\"\"\"\n#%%\nimport numpy as np\nimport matplotlib.pyplot as plt\nimport lib_rnns as lr\nfrom matplotlib.gridspec import GridSpec\naa = lr.set_plot()\n\n\nnp.random.seed(2)\ngaussian_norm = (1/np.sqrt(np.pi))\ngauss_points, gauss_weights = np.polynomial.hermite.hermgauss(200)\ngauss_points = gauss_points*np.sqrt(2)\n\ndef Phi (mu, delta0):\n integrand = np.tanh(mu+np.sqrt(delta0)*gauss_points)\n return gaussian_norm * np.dot (integrand,gauss_weights)\ndef Prime (mu, delta0):\n integrand = 1 - (np.tanh(mu+np.sqrt(delta0)*gauss_points))**2\n return gaussian_norm * np.dot (integrand,gauss_weights)\n\n#%%\nN = 400\nnbins = 20\n#s_mn1 = 0.5\ns_mnI = np.array((-10., 4.5))\ns_m2 = np.array((1.98, 0.02))\ns_mn = s_mnI/s_m2\n\ncolor1 = np.array((31, 127, 17))/256\ncolor2 = np.array((129, 34, 141))/256\nclrs = np.zeros((3,2))\nclrs[:,0] = color1\nclrs[:,1] = color2\n\nm1 = np.random.randn(N)\nm1[0:N//2] = np.sqrt(s_m2[0])*m1[0:N//2]/np.std(m1[0:N//2])\nm1[N//2:] = np.sqrt(s_m2[1])*m1[N//2:]/np.std(m1[N//2:])\nn1 = np.zeros_like(m1)\nms = np.linspace(-80.5, 80.5, 4200)\n\nn1[0:N//2] = s_mn[0]*m1[0:N//2] +3*np.random.randn(N//2)\nn1[N//2:] = s_mn[1]*m1[N//2:] +3*np.random.randn(N//2)\n\nfig = plt.figure(figsize=[3.6, 3.6])\ngs = GridSpec(4,4)\n\nax_joint = fig.add_subplot(gs[1:4,0:3])\nax_marg_x = fig.add_subplot(gs[0,0:3])\nax_marg_y = fig.add_subplot(gs[1:4,3])\n\n#ax_joint.scatter(m2, n2, s=20, alpha=0.5, label=r'$\\sigma_{mn} = 1.2$')\nax_joint.scatter(m1[0:N//2], n1[0:N//2], s=20, alpha=0.5, label=r'$\\sigma_{mn} =$'+str(s_mn[0]), color =color1)\nax_joint.scatter(m1[N//2:], n1[N//2:], s=20, alpha=0.5, label=r'$\\sigma_{mn} =$'+str(s_mn[1]), color =color2)\n\nax_joint.plot(ms, s_mn[0]*ms, '--', color ='k', lw=1)\nax_joint.plot(ms, s_mn[1]*ms, '--', color ='k', lw=1)\n\n#ax_joint.legend(locolor =6,frameon=False, handletextpad=-0.1)\nax_joint.spines['top'].set_visible(False)\nax_joint.spines['right'].set_visible(False)\nax_joint.yaxis.set_ticks_position('left')\nax_joint.xaxis.set_ticks_position('bottom')\n\nyl = 70.9\nax_joint.set_ylim([-yl, yl])\nax_joint.set_xlim([-4, 4])\n\nax_joint.set_xlabel(r'$m_i$')\nax_joint.set_ylabel(r'$n_i$')\n\nax_marg_x.hist(m1[0:N//2], nbins, alpha=0.5, density=True, color=color1)\nax_marg_x.hist(m1[N//2:], nbins, alpha=0.5, density=True, color=color2)\n\n#ax_marg_x.hist(m2, nbins, alpha=0.5, density=True)\n\nax_marg_x.spines['top'].set_visible(False)\nax_marg_x.spines['right'].set_visible(False)\nax_marg_x.yaxis.set_ticks_position('left')\nax_marg_x.xaxis.set_ticks_position('bottom')\n\na1 = np.exp(-ms**2/(2*s_m2[0]))/np.sqrt(2*np.pi*s_m2[0])\na2 = np.exp(-ms**2/(2*s_m2[1]))/np.sqrt(2*np.pi*s_m2[1])\n\n#ax_marg_x.plot(ms, 0.5*(a1+a2), lw=2, color ='k')\nax_marg_x.plot(ms, a1, lw=2, color =color1)\nax_marg_x.plot(ms, a2, lw=2, color =color2)\nax_marg_x.set_xlim([-4, 4])\n\nax_marg_y.hist(n1[0:N//2], nbins, orientation=\"horizontal\", alpha=0.5, density=True, color=color1)\nax_marg_y.hist(n1[N//2:], nbins, orientation=\"horizontal\", alpha=0.5, density=True, color=color2)\n\nss = s_mn[0]**2*np.var(m1[0:N//2])+3**2 #np.sqrt(s_m2[0])*m1[0:N//2]/np.std(m1[0:N//2])\na1 = np.exp(-ms**2/(2*ss))/np.sqrt(2*np.pi*ss)\n\nss = s_mn[1]**2*np.var(m1[N//2:])+3**2\na2 = np.exp(-ms**2/(2*ss))/np.sqrt(2*np.pi*ss)\n#ax_marg_y.plot( 0.5*np.exp(-ms**2/(2*s_mn[0]**2))/np.sqrt(2*np.pi*s_mn[0]**2)+ 0.5*np.exp(-ms**2/(2*s_mn[1]**2))/np.sqrt(2*np.pi*s_mn[1]**2),ms, lw=2, color ='k')\nax_marg_y.plot( a1,ms, lw=2, color =color1)\nax_marg_y.plot( a2,ms, lw=2, color =color2)\n\n\nax_marg_y.spines['top'].set_visible(False)\nax_marg_y.spines['right'].set_visible(False)\nax_marg_y.yaxis.set_ticks_position('left')\nax_marg_y.xaxis.set_ticks_position('bottom')\nax_marg_y.set_ylim([-yl, yl])\n\nax_marg_y.set_xticklabels(['0', '0.5'])\n\n## Turn off tick labels on marginals\nplt.setp(ax_marg_x.get_xticklabels(), visible=False)\nplt.setp(ax_marg_y.get_yticklabels(), visible=False)\n\n## Set labels on marginals\nax_marg_y.set_xlabel(r'$P(n_i)$')\nax_marg_x.set_ylabel(r'$P(m_i)$')\n\nplt.savefig('Th_Fig5_A.pdf')\nplt.show()\n\n#%%\ns_mnI = np.array((-10., 4.5))\ns_m2 = np.array((1.98, 0.02))\ns_mn = s_mnI/s_m2\n\n\nkappas = np.linspace(-10, 10, 500)\nFk1 = np.zeros_like(kappas)\nFk2 = np.zeros_like(kappas)\nFk1MC = np.zeros_like(kappas)\nFk2MC = np.zeros_like(kappas)\nNN = 100000\nfor ik, ka in enumerate(kappas):\n Fk1[ik]= s_mnI[0]*ka*Prime(0, s_m2[0]*ka**2) \n Fk2[ik]= s_mnI[1]*ka*Prime(0, s_m2[1]*ka**2)\n \n m1 = np.random.randn(NN)\n m1[0:NN//2] = np.sqrt(s_m2[0])*m1[0:NN//2]/np.std(m1[0:NN//2])\n m1[NN//2:] = np.sqrt(s_m2[1])*m1[NN//2:]/np.std(m1[NN//2:])\n n1 = np.zeros_like(m1)\n ms = np.linspace(-80.5, 80.5, 1200)\n \n n1[0:NN//2] = s_mn[0]*m1[0:NN//2] +0.3*np.random.randn(NN//2)\n n1[NN//2:] = s_mn[1]*m1[NN//2:] +0.3*np.random.randn(NN//2)\n Fk1MC[ik] = np.mean(n1[0:NN//2]*np.tanh(m1[0:NN//2]*ka)) \n Fk2MC[ik] = np.mean(n1[NN//2:]*np.tanh(m1[NN//2:]*ka))\n \nfig = plt.figure()\nax = fig.add_subplot(111)\n#plt.plot(kappas, Fk2, lw=2)\nplt.plot(kappas, -kappas+0.5*(Fk1MC+Fk2MC), color ='C3', lw=2)\n#plt.plot(kappas, -kappas+0.5*(Fk1MC+Fk2MC), '--', color ='C3', lw=2)\n\n\nFk = -kappas+0.5*(Fk1MC+Fk2MC)\nFk[Fk>0] = 10\nFk[Fk<0] = -10\nkappasm1 = kappas[:-1]\nsl = kappasm1[np.diff(Fk)>1]+0.5*(kappas[1]-kappas[0])\nsl2 = kappasm1[np.diff(Fk)<-1]+0.5*(kappas[1]-kappas[0])\n\nplt.plot(kappas, kappas*0, '--k')\nplt.scatter(sl,np.zeros_like(sl), edgecolor='C3', color='w', s=60, lw=2, zorder=4)\n\nk0s = kappas[np.argmin(np.abs(Fk[kappas<0.1]))]\nplt.scatter(sl2,np.zeros_like(sl2), edgecolor='k', color='C3', s=60, lw=1, zorder=4)\n\nplt.ylim([-1.5, 1.5])\nplt.ylabel(r'dynamics $d\\kappa / dt$')\nplt.xlabel(r'latent variable $\\kappa$')\nax.spines['top'].set_visible(False)\nax.spines['right'].set_visible(False)\nax.yaxis.set_ticks_position('left')\nax.xaxis.set_ticks_position('bottom') \nplt.savefig('Th_Fig5_B.pdf')\nplt.show()\n\n\n#%%\ns_mnI = np.array((-10., 4.5))\ns_m2 = np.array((1.98, 0.02))\ns_mn = s_mnI/s_m2\n\n\nkappas = np.linspace(-10, 10, 500)\nFk1 = np.zeros_like(kappas)\nFk2 = np.zeros_like(kappas)\nFk1MC = np.zeros_like(kappas)\nFk2MC = np.zeros_like(kappas)\nFk1MC2 = np.zeros_like(kappas)\nFk2MC2 = np.zeros_like(kappas)\nNN = 100000\nfor ik, ka in enumerate(kappas):\n Fk1[ik]= s_mnI[0]*ka*Prime(0, s_m2[0]*ka**2) \n Fk2[ik]= s_mnI[1]*ka*Prime(0, s_m2[1]*ka**2)\n \n m1 = np.random.randn(NN)\n m1[0:NN//2] = np.sqrt(s_m2[0])*m1[0:NN//2]/np.std(m1[0:NN//2])\n m1[NN//2:] = np.sqrt(s_m2[1])*m1[NN//2:]/np.std(m1[NN//2:])\n n1 = np.zeros_like(m1)\n ms = np.linspace(-80.5, 80.5, 1200)\n \n n1[0:NN//2] = s_mn[0]*m1[0:NN//2] +0.3*np.random.randn(NN//2)\n n1[NN//2:] = s_mn[1]*m1[NN//2:] +0.3*np.random.randn(NN//2)\n Fk1MC[ik] = np.mean((1./np.cosh(m1[0:NN//2]*ka))**2) \n Fk1MC2[ik] = np.mean(n1[0:NN//2]*np.tanh(m1[0:NN//2]*ka)) \n Fk2MC[ik] = np.mean((1./np.cosh(m1[NN//2:]*ka))**2) #np.mean(n1[NN//2:]*np.tanh(m1[NN//2:]*ka))\n Fk2MC2[ik] = np.mean(n1[NN//2:]*np.tanh(m1[NN//2:]*ka))\n \nfig = plt.figure()\nax = fig.add_subplot(111)\n#plt.plot(kappas, -kappas, color ='C3', lw=2)\nplt.plot(kappas, Fk1MC, color =color1, lw=2)\nplt.plot(kappas, Fk2MC, color =color2, lw=2)\n\nFk = -kappas+0.5*(Fk1MC2+Fk2MC2)\nFk[Fk>0] = 10\nFk[Fk<0] = -10\n#%%\nkappasm1 = kappas[:-1]\nsl = kappasm1[np.diff(Fk)>1]+0.5*(kappas[1]-kappas[0])\nsl2 = kappasm1[np.diff(Fk)<-1]+0.5*(kappas[1]-kappas[0])\n\nplt.plot(kappas, kappas*0, '--k')\n#plt.scatter(sl,np.zeros_like(sl), edgecolor='C3', color='w', s=60, lw=2, zorder=4)\n\nk0s = kappas[np.argmin(np.abs(Fk[kappas<0.1]))]\n#plt.scatter(sl2,np.zeros_like(sl2), edgecolor='k', color='C3', s=60, lw=1, zorder=4)\n\nplt.ylim([0, 1.05])\nplt.yticks([0, 0.5, 1.])\nplt.ylabel(r'gain $\\left\\langle \\phi^\\prime \\right\\rangle$')\nplt.xlabel(r'latent variable $\\kappa$')\nax.spines['top'].set_visible(False)\nax.spines['right'].set_visible(False)\nax.yaxis.set_ticks_position('left')\nax.xaxis.set_ticks_position('bottom') \nplt.savefig('Th_Fig5_C2.pdf')\nplt.show()\n\n\n\n#%%\nNN = 400\n \nm1 = np.random.randn(NN)\nm1[0:NN//2] = np.sqrt(s_m2[0])*m1[0:NN//2]/np.std(m1[0:NN//2])\nm1[NN//2:] = np.sqrt(s_m2[1])*m1[NN//2:]/np.std(m1[NN//2:])\nn1 = np.zeros_like(m1)\n \n#\nJ1 = np.dot(m1[:,None], n1[:,None].T)\n#J2 = np.dot(m2[:,None], n2[:,None].T)\n\ntime = np.linspace(0, 15, 200)\ndt = time[1]-time[0]\n\nnS = 10\nxs1 = np.zeros((len(time), nS))\nxs2 = np.zeros((len(time), nS))\n\nx0 = 0.5*np.random.randn(N)+10*np.random.randn()*m1\n#x02 = 0.5*np.random.randn(N)\nk1 = np.zeros(len(time))\n#k2 = np.zeros(len(time))\nfor it, t in enumerate(time[:-1]): \n xs1[it] = x0[0:nS]\n #xs2[it] = x02[0:nS]\n k1[it] = np.mean(m1*x0)\n #k2[it] = np.mean(m2*x02)\n \n x = x0 + dt*(-x0+np.dot(J1, np.tanh(x0)/N))\n #x2 = x02 + dt*(-x02+np.dot(J2, np.tanh(x02)/N))\n \n x0=x\n #x02=x2\n \n xs1[it+1] = x0[0:nS]\n #xs2[it+1] = x02[0:nS]\n k1[it+1] = np.mean(m1*x0)\n #k2[it+1] = np.mean(m2*x02)\n#fig = plt.figure()\n#ax = fig.add_subplot(111)\n##plt.plot(time, xs2, color ='C0')\n#plt.plot(time, xs1, color ='C1')\n#\n#ax.spines['top'].set_visible(False)\n#ax.spines['right'].set_visible(False)\n#ax.yaxis.set_ticks_position('left')\n#ax.xaxis.set_ticks_position('bottom') \n#plt.ylabel(r'activation $x_i\\left(t\\right)$')\n#plt.xlabel(r'time')\n#plt.savefig('Th_Fig5_C2.pdf')\n\n#%%\n\nfig = plt.figure()\nax = fig.add_subplot(111)\n\ntrials = 20\nk0s = np.linspace(-10, 10, trials)\nfor rep in range(2):\n\n \n N = 2000\n \n try_Ms = 100\n \n s_mnI = np.array((-10., 4.5))\n s_m2 = np.array((1.98, 0.02))\n s_mn = s_mnI/s_m2\n \n targ0 = 10\n for try_M in range(try_Ms):\n m1 = np.random.randn(N)\n m1[0:N//2] = np.sqrt(s_m2[0])*m1[0:N//2]/np.std(m1[0:N//2])\n m1[N//2:] = np.sqrt(s_m2[1])*m1[N//2:]/np.std(m1[N//2:])\n n1 = np.zeros_like(m1)\n ms = np.linspace(-80.5, 80.5, 1200)\n \n n1[0:N//2] = s_mn[0]*m1[0:N//2] +0.3*np.random.randn(N//2)\n n1[N//2:] = s_mn[1]*m1[N//2:] +0.3*np.random.randn(N//2)\n \n targ = (np.mean(m1[0:N//2]*n1[0:N//2]) - s_mnI[0])**2+(np.mean(m1[N//2:]*n1[N//2:]) \\\n - s_mnI[1])**2+ (np.mean(m1[0:N//2]*n1[N//2:])) **2+ (np.mean(m1[N//2:]*n1[0:N//2]))**2\n if targ<targ0:\n m1S = m1\n n1S = n1\n print(targ)\n print('---')\n \n \n m1 = m1S\n n1 = n1S\n \n for tr in range(trials):\n \n m1 = m1S\n n1 = n1S\n \n \n J1 = np.dot(m1[:,None], n1[:,None].T)\n J1s1 = J1[0:N, 0:N]\n J1s2 = J1[N:, N:]\n # J2 = np.dot(m2[:,None], n2[:,None].T)\n key = (16*np.random.rand()-8)\n x0 = 0.5*np.random.randn(N)+m1*k0s[tr]\n # x02 = 0.5*np.random.randn(N)+0.05*m2*np.random.randn()\n k1 = np.zeros(len(time))\n # k2 = np.zeros(len(time))\n for it, t in enumerate(time[:-1]): \n xs1[it] = x0[0:nS]\n # xs2[it] = x02[0:nS]\n k1[it] = np.mean(m1*x0)\n # k2[it] = np.mean(m2*x02)\n \n x = x0 + dt*(-x0+np.dot(J1, np.tanh(x0)/N))\n # x2 = x02 + dt*(-x02+np.dot(J2, np.tanh(x02)/N))\n \n x0=x\n # x02=x2\n \n k1[it+1] = np.mean(m1*x0)\n # k2[it+1] = np.mean(m2*x02)\n plt.plot(time, k1, color =[0.4,0.4,0.4])\n # plt.plot(time, k2, color ='C0')\n \n plt.plot(time, sl2[0]*np.ones_like(time), lw=4, alpha=0.2, color ='C3')\n plt.plot(time, sl2[1]*np.ones_like(time), lw=4, alpha=0.2, color ='C3')\n plt.plot(time, sl2[2]*np.ones_like(time), lw=4, alpha=0.2, color ='C3')\n \n plt.plot(time, sl[0]*np.ones_like(time), '--', lw=4, alpha=0.2, color ='C3')\n plt.plot(time, sl[1]*np.ones_like(time), '--', lw=4, alpha=0.2, color ='C3')\n\nax.spines['top'].set_visible(False)\nax.spines['right'].set_visible(False)\nax.yaxis.set_ticks_position('left')\nax.xaxis.set_ticks_position('bottom') \nplt.ylabel(r'latent variable $\\kappa$')\nplt.xlabel(r'time')\nplt.savefig('Th_Fig5_D.pdf')\n" }, { "alpha_fraction": 0.5201597213745117, "alphanum_fraction": 0.5951923727989197, "avg_line_length": 31.370647430419922, "blob_id": "1b9f59b0f20d292d8c254c230f6d87c744ff58ee", "content_id": "6a39047134d4dca9010af8f01c495b985ee0a9c7", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 13021, "license_type": "no_license", "max_line_length": 127, "num_lines": 402, "path": "/C_Fig3_IJKL.py", "repo_name": "emebeiran/low-rank2020", "src_encoding": "UTF-8", "text": "#!/usr/bin/env python3\n# -*- coding: utf-8 -*-\n\"\"\"\nCreated on Mon Mar 30 10:49:59 2020\n\n@author: mbeiran\n\"\"\"\n#%%\nimport numpy as np\nimport matplotlib.pyplot as plt\nimport lib_rnns as lr\nfrom matplotlib.gridspec import GridSpec\naa = lr.set_plot()\n\n\n \ngaussian_norm = (1/np.sqrt(np.pi))\ngauss_points, gauss_weights = np.polynomial.hermite.hermgauss(200)\ngauss_points = gauss_points*np.sqrt(2)\n\ndef Phi (mu, delta0):\n integrand = np.tanh(mu+np.sqrt(delta0)*gauss_points)\n return gaussian_norm * np.dot (integrand,gauss_weights)\ndef Prime (mu, delta0):\n integrand = 1 - (np.tanh(mu+np.sqrt(delta0)*gauss_points))**2\n return gaussian_norm * np.dot (integrand,gauss_weights)\n\n#%%\nN = 200\nnbins = 20\ns_mn1 = 0.5\ns_mn2 = 1.2\nm1 = np.random.randn(N)\nm1 = m1/np.std(m1)\nm2 = np.random.randn(N)\nm2 = m2/np.std(m2)\nms = np.linspace(-3, 3)\n\nn1 = s_mn1*m1+0.3*np.random.randn(N)\nn2 = s_mn2*m2+0.3*np.random.randn(N)\n\n\n#%%\n# =============================================================================\n# Fig 2\n# =============================================================================\nms = np.linspace(-5,5,100)\nSigma = np.zeros((2,2))\nSigma[0,0] = 1.6\nSigma[1,1] = 1.6\nSigma[0,1] = 0.\nN = 1000\nS=10\nM = np.random.randn(N,2)\nM = M/np.std(M,0)\nss2 = 0.3\n\nN = np.dot(Sigma, M.T)+ss2*np.random.randn(2,N)\n\nfig = plt.figure(figsize=[3.2, 3.2], dpi=450)\ngs = GridSpec(5,5)\n\nax_joint00 = fig.add_subplot(gs[1:3,0:2])\nax_joint01 = fig.add_subplot(gs[1:3,2:4])\nax_joint10 = fig.add_subplot(gs[3:5,0:2])\nax_joint11 = fig.add_subplot(gs[3:5,2:4])\n\nax_marg_x0 = fig.add_subplot(gs[0,0:2])\nax_marg_x1 = fig.add_subplot(gs[0,2:4])\n\nax_marg_y0 = fig.add_subplot(gs[1:3,4])\nax_marg_y1 = fig.add_subplot(gs[3:5,4])\n\nax_joint00.scatter(M[:,0], N[0,:], s=S, alpha=0.5, label=r'$\\sigma_{mn} = 1.2$', rasterized=True)\nax_joint00.plot(ms, Sigma[0,0]*ms, '--', c='k', lw=1)\nax_joint00.set_xlim([-3,3])\nax_joint00.set_xticks([-2., 0, 2.])\nax_joint00.set_xticklabels(['','',''])\nax_joint00.set_ylim([-5.5,5.5])\nax_joint00.set_yticks([-5, 0, 5])\nax_joint00.set_ylabel(r'$n^{\\left(1\\right)}_i$')\nax_joint00.spines['top'].set_visible(False)\nax_joint00.spines['right'].set_visible(False)\nax_joint00.yaxis.set_ticks_position('left')\nax_joint00.xaxis.set_ticks_position('bottom')\n \nax_joint01.scatter(M[:,1], N[0,:], s=S, alpha=0.5, label=r'$\\sigma_{mn} = 1.2$', rasterized=True)\nax_joint01.plot(ms, Sigma[0,1]*ms, '--', c='k', lw=1)\nax_joint01.spines['top'].set_visible(False)\nax_joint01.spines['right'].set_visible(False)\nax_joint01.yaxis.set_ticks_position('left')\nax_joint01.xaxis.set_ticks_position('bottom')\nax_joint01.set_ylim([-5.5,5.5])\nax_joint01.set_yticks([-5, 0, 5])\nax_joint01.set_yticklabels(['','',''])\nax_joint01.set_xlim([-3,3])\nax_joint01.set_xticks([-2., 0, 2.])\nax_joint01.set_xticklabels(['','',''])\n\nax_joint10.scatter(M[:,0], N[1,:], s=S, alpha=0.5, label=r'$\\sigma_{mn} = 1.2$', rasterized=True)\nax_joint10.plot(ms, Sigma[1,0]*ms, '--', c='k', lw=1)\nax_joint10.set_xlim([-3,3])\nax_joint10.spines['top'].set_visible(False)\nax_joint10.spines['right'].set_visible(False)\nax_joint10.yaxis.set_ticks_position('left')\nax_joint10.xaxis.set_ticks_position('bottom')\nax_joint10.set_ylim([-5.5,5.5])\nax_joint10.set_yticks([-5, 0, 5])\nax_joint10.set_xlim([-3,3])\nax_joint10.set_xticks([-2., 0, 2.])\nax_joint10.set_ylabel(r'$n^{\\left(2\\right)}_i$')\nax_joint10.set_xlabel(r'$m^{\\left(1\\right)}_i$')\n\nax_joint11.scatter(M[:,1], N[1,:], s=S, alpha=0.5, label=r'$\\sigma_{mn} = 1.2$', rasterized=True)\nax_joint11.plot(ms, Sigma[1,1]*ms, '--', c='k', lw=1)\nax_joint11.set_xlim([-3,3])\nax_joint11.spines['top'].set_visible(False)\nax_joint11.spines['right'].set_visible(False)\nax_joint11.set_ylim([-5.5,5.5])\nax_joint11.set_yticks([-5, 0, 5])\nax_joint11.set_xticks([-2., 0, 2.])\nax_joint11.set_xlim([-3,3])\nax_joint11.set_yticklabels(['','',''])\nax_joint11.yaxis.set_ticks_position('left')\nax_joint11.xaxis.set_ticks_position('bottom')\nax_joint11.set_xlabel(r'$m^{\\left(2\\right)}_i$')\n\nax_marg_x0.hist(M[:,0], nbins, alpha=0.5, density=True)\nss = 1.\nax_marg_x0.plot(ms, (1/np.sqrt(2*np.pi*ss**2))*np.exp(-(ms)**2/(2*ss**2)), 'k')\n\nax_marg_x0.spines['top'].set_visible(False)\nax_marg_x0.spines['right'].set_visible(False)\nax_marg_x0.spines['left'].set_visible(False)\nax_marg_x0.yaxis.set_ticks_position('left')\nax_marg_x0.xaxis.set_ticks_position('bottom')\nax_marg_x0.set_xlim([-3,3])\nax_marg_x0.set_xticks([-2., 0, 2.])\nax_marg_x0.set_ylim([0,0.45])\nax_marg_x0.set_xticklabels(['','',''])\nax_marg_x0.set_yticks([1])\n\nax_marg_x1.hist(M[:,1], nbins, alpha=0.5, density=True)\nss = 1.\nax_marg_x1.plot(ms, (1/np.sqrt(2*np.pi*ss**2))*np.exp(-(ms)**2/(2*ss**2)), 'k')\nax_marg_x1.spines['top'].set_visible(False)\nax_marg_x1.spines['right'].set_visible(False)\nax_marg_x1.spines['left'].set_visible(False)\nax_marg_x1.yaxis.set_ticks_position('left')\nax_marg_x1.xaxis.set_ticks_position('bottom')\nax_marg_x1.set_xlim([-3,3])\nax_marg_x1.set_ylim([0,0.45])\nax_marg_x1.set_xticks([-2., 0, 2.])\nax_marg_x1.set_xticklabels(['','',''])\nax_marg_x1.set_yticks([1])\n\nax_marg_y0.hist(N[0,:], nbins, orientation=\"horizontal\", alpha=0.5, density=True)\nss = np.sqrt(Sigma[0,0]**2+ss2**2)\nax_marg_y0.plot((1/np.sqrt(2*np.pi*ss**2))*np.exp(-(ms)**2/(2*ss**2)), ms, 'k')\nax_marg_y0.spines['top'].set_visible(False)\nax_marg_y0.spines['right'].set_visible(False)\nax_marg_y0.spines['bottom'].set_visible(False)\nax_marg_y0.yaxis.set_ticks_position('left')\nax_marg_y0.xaxis.set_ticks_position('bottom')\nax_marg_y0.set_ylim([-5.5,5.5])\nax_marg_y0.set_xlim([0,0.45])\nax_marg_y0.set_yticks([-5., 0, 5.])\nax_marg_y0.set_yticklabels(['','',''])\nax_marg_y0.set_xticks([1])\nax_marg_y0.set_xticklabels([''])\n\nax_marg_y1.hist(N[1,:], nbins, orientation=\"horizontal\", alpha=0.5, density=True)\nss = np.sqrt(Sigma[1,1]**2+ss2**2)\nax_marg_y1.plot((1/np.sqrt(2*np.pi*ss**2))*np.exp(-(ms)**2/(2*ss**2)), ms, 'k')\nax_marg_y1.spines['top'].set_visible(False)\nax_marg_y1.spines['right'].set_visible(False)\nax_marg_y1.spines['bottom'].set_visible(False)\nax_marg_y1.yaxis.set_ticks_position('left')\nax_marg_y1.xaxis.set_ticks_position('bottom')\nax_marg_y1.set_ylim([-5.5,5.5])\nax_marg_y1.set_xlim([0,0.45])\nax_marg_y1.set_yticks([-5., 0, 5.])\nax_marg_y1.set_yticklabels(['','',''])\nax_marg_y1.set_xticks([1])\nax_marg_y1.set_xticklabels([''])\n\nplt.savefig('Th_Fig22_3_A.pdf')\n\n#%%\nplt.rcParams[\"axes.grid\"] = False\nfig = plt.figure(figsize = [2.0, 2.0])\nax = fig.add_subplot(111) \nplt.imshow(Sigma, cmap='OrRd', vmin = 0, vmax = 4)\nax.tick_params(color='white')\n\n\nfor i in range(np.shape(Sigma)[0]):\n for j in range(np.shape(Sigma)[1]):\n ax.text(i, j, str(Sigma[j,i]), va='center', ha='center', fontsize=16)\nax.spines['top'].set_visible(False)\nax.spines['right'].set_visible(False)\nax.spines['left'].set_visible(False)\nax.spines['bottom'].set_visible(False)\n\nax.yaxis.set_ticks_position('right')\nax.xaxis.set_ticks_position('top') \nax.set_xticks([0, 1])\nax.set_yticks([0, 1])\n\n\nax.set_xticklabels([r'$m_i^{\\left(1\\right)}$', r'$m_i^{\\left(2\\right)}$'], fontsize=14)\nax.set_yticklabels([r'$n_i^{\\left(1\\right)}$', r'$n_i^{\\left(2\\right)}$'], fontsize=14)\n\nplt.savefig('Th_Fig22_3_B.pdf')\n\n\n#%%\nfig = plt.figure()\nax = fig.add_subplot(111) \n\nu, v = np.linalg.eig(Sigma)\nl1 = -0.5\nl2 = 2.\ncC = np.array((1, 1, 1,))*0.3\nplt.plot([l1, l2], [0,0], 'k', lw=0.5)\nplt.plot( [0,0],[l1, l2], 'k', lw=0.5)\nax.arrow(0, 0, u[0]*v[0,0], u[0]*v[1,0], fc=cC, ec=cC, alpha =0.8, width=0.06,\n head_width=0.2, head_length=0.2)\nax.arrow(0, 0, u[1]*v[0,1], u[1]*v[1,1], fc=cC, ec=cC, alpha =0.8, width=0.06,\n head_width=0.2, head_length=0.2)\n\nax.text(0.8, 0.2, r'$\\lambda_1 \\bf{u}_1$', fontsize = 18)\nax.text(0.55, 1., r'$\\lambda_2 \\bf{u}_2$', fontsize = 18)\n\n\nax.set_xlim([l1, l2])\nax.set_ylim([l1, l2])\n\n\nax.axis('off')\nax.spines['top'].set_visible(False)\nax.spines['right'].set_visible(False)\nax.spines['left'].set_visible(False)\nax.spines['bottom'].set_visible(False)\n\nplt.savefig('Th_Fig22_3_B1.pdf')\n\n#%%\nkaps1 = np.linspace(-1.3,1.3, 130)\nkaps2 = np.linspace(-1.3,1.3, 100)\nksol = np.zeros((len(kaps1), len(kaps2), 2))\n\nK1s, K2s = np.meshgrid(kaps1, kaps2)\ndef transf(K):\n return(K*Prime(0, np.dot(K.T, K)))\n \nE = np.zeros((len(kaps1), len(kaps2)))\nfor ik1 ,k1 in enumerate(kaps1):\n for ik2, k2 in enumerate(kaps2):\n K = np.array((k1, k2))\n ksol[ik1, ik2, :] = - K+ np.dot(Sigma, transf(K))\n E[ik1, ik2] = np.sqrt(np.sum(ksol[ik1,ik2,:]**2))\n \n\n\nsearch_kap1 = np.linspace(0.2, 1.3, 300)\nE_1 = np.zeros_like(search_kap1)\nfor ik1 ,k1 in enumerate(search_kap1):\n K = v[:,0]*k1\n kSS = - K+ np.dot(Sigma, transf(K))\n E_1[ik1] = np.sqrt(np.sum(kSS**2))\nfp1 = search_kap1[np.argmin(E_1)]\n\nsearch_kap2 = np.linspace(0.2, 1.3, 300)\nE_2 = np.zeros_like(search_kap1)\nfor ik2 ,k2 in enumerate(search_kap2):\n K = v[:,1]*k2\n kSS = - K+ np.dot(Sigma, transf(K))\n E_2[ik2] = np.sqrt(np.sum(kSS**2))\nfp2 = search_kap2[np.argmin(E_2)]\n\nfig = plt.figure()\nax = fig.add_subplot(111) \nim = plt.pcolor(kaps1, kaps2, np.log10(E).T, cmap ='viridis', vmin = -2.,vmax=0, shading='auto')\n\n#cbar = ax.figure.colorbar(im, ax=ax)\n#cbar.set_ticks([-2, -1, 0])\nstrm = ax.streamplot(kaps1, kaps2, ksol[:,:,0].T, ksol[:,:,1].T, color='w', linewidth=1, cmap='autumn', density=0.6)\n#cbar.set_label(r'$\\log_{10}$ speed', rotation=270, labelpad=18)\nplt.xlabel('$\\kappa_1$')\nplt.ylabel('$\\kappa_2$')\nplt.scatter([ 0, ], [0,], s=50, edgecolor='k', facecolor='w', linewidth=1., zorder=4)\n#plt.scatter( [v[0,1]*fp2,-v[0,1]*fp2], [v[1,1]*fp2,-v[1,1]*fp2], s=100, edgecolor='w', facecolor='k', linewidth=1.5, zorder=4)\nths = np.linspace(0, np.pi*2)\n\nplt.plot(fp1*np.cos(ths), fp1*np.sin(ths), c='w', lw=3)\nplt.plot(fp1*np.cos(ths), fp1*np.sin(ths), c='k', lw=1)\n\nax.set_xticks([-1, 0, 1])\nax.set_yticks([-1, 0, 1])\nax.set_ylim([np.min(kaps2), np.max(kaps2)])\nax.set_xlim([np.min(kaps1), np.max(kaps1)])\nplt.savefig('Th_Fig22_3_C1.pdf')\n\n\n\n#%%\nnp.random.seed(66779)\ncC = np.array((1, 1, 1,))*0.3\n\nfig = plt.figure()\nax = fig.add_subplot(111) \n\nplt.xlabel('$\\kappa_1$')\nplt.ylabel('$\\kappa_2$')\nplt.scatter([ 0,], [0,], s=50, edgecolor='k', facecolor='w', linewidth=1., zorder=4)\n#plt.scatter( [v[0,1]*fp2,-v[0,1]*fp2], [v[1,1]*fp2,-v[1,1]*fp2], s=100, edgecolor='w', facecolor='k', linewidth=1.5, zorder=4)\n#plt.plot(fp1*np.cos(ths), fp1*np.sin(ths), c='w', lw=2)\nplt.plot(fp1*np.cos(ths), fp1*np.sin(ths), '--k', lw=1)\nax.set_xticks([-1, 0, 1])\nax.set_yticks([-1, 0, 1])\nax.set_ylim([np.min(kaps2), np.max(kaps2)])\nax.set_xlim([np.min(kaps1), np.max(kaps1)])\n\nNn = 1000\nSigmaTot = np.eye(4)\nSigmaTot[2,2] = 4\nSigmaTot[3,3] = 4\n\nSigmaTot[0,2] = Sigma[0,0]\nSigmaTot[0,3] = Sigma[1,0]\nSigmaTot[1,2] = Sigma[0,1]\nSigmaTot[1,3] = Sigma[1,1]\n\nSigmaTot[2,0] = SigmaTot[0,2]\nSigmaTot[3,0] = SigmaTot[0,3]\nSigmaTot[2,1] = SigmaTot[1,2]\nSigmaTot[3,1] = SigmaTot[1,3]\n\nMu= np.zeros((4,1))\n\ninkap1 = np.linspace(-1, 1, 4)\ninkap2 = np.linspace(-1.1, 1.1001, 5)\n\ndt = 0.8\ntime = np.arange(0, 280, dt)\n\nfor trials in range(2):\n try_s0 = 100\n for tr in range(20):\n XX = np.random.multivariate_normal(Mu[:,0], SigmaTot, size=Nn)\n try_s = np.sum((np.dot(XX.T,XX)/1000-SigmaTot)**2)\n if try_s < try_s0:\n #print(try_s)\n try_s0 = try_s\n XX_s = XX\n M = XX_s[:,0:2]\n N = XX_s[:,2:4]\n \n J = np.dot(M, N.T)/Nn\n \n cC = 0.6+0.2*np.random.rand()-0.1\n \n for ik1, ink1 in enumerate(inkap1):\n for ik2, ink2 in enumerate(inkap2):\n sk1 = np.zeros_like(time)\n sk2 = np.zeros_like(time)\n \n x0 = ink1*M[:,0] + ink2*M[:,1]\n sk1[0] = np.mean(M[:,0]*x0)\n sk2[0] = np.mean(M[:,1]*x0)\n \n for it, ti in enumerate(time[:-1]):\n x = x0 + dt*(-x0 + np.dot(J, np.tanh(x0)))\n sk1[it+1] = np.mean(M[:,0]*x)\n sk2[it+1] = np.mean(M[:,1]*x)\n x0 = x\n plt.plot(sk1, sk2, c=[cC, cC, cC])\n plt.scatter(sk1[0], sk2[0], s=15, facecolor=[cC, cC, cC])\n if trials==0:\n cC = 0.3\n plt.plot(sk1, sk2, c=[cC, cC, cC])\n plt.scatter(sk1[0], sk2[0], s=15, facecolor=[cC, cC, cC])\n plt.scatter(sk1[-1], sk2[-1], s=40, facecolor=[cC, cC, cC], edgecolor='k', linewidth=1., zorder=5)\n elif trials==1:\n cC = 0.6\n plt.plot(sk1, sk2, c=[cC, cC, cC])\n plt.scatter(sk1[0], sk2[0], s=15, facecolor=[cC, cC, cC])\n plt.scatter(sk1[-1], sk2[-1], s=40, facecolor=[cC, cC, cC], edgecolor='k', linewidth=1., zorder=5)\n \n \nax.set_xticks([-1, 0, 1])\nax.set_yticks([-1, 0, 1])\nax.set_ylim([np.min(kaps2), np.max(kaps2)])\nax.set_xlim([np.min(kaps1), np.max(kaps1)])\n \nax.spines['top'].set_visible(False)\nax.spines['right'].set_visible(False)\nax.yaxis.set_ticks_position('left')\nax.xaxis.set_ticks_position('bottom') \n\nplt.savefig('Th_Fig22_3_D.pdf') \n " }, { "alpha_fraction": 0.5509763360023499, "alphanum_fraction": 0.6148654818534851, "avg_line_length": 26.573200225830078, "blob_id": "1f5704326cb247fb78e071486c980b7d4da8bfb2", "content_id": "c9c7cfa294b0245882babb553db4fc5895469991", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 11113, "license_type": "no_license", "max_line_length": 84, "num_lines": 403, "path": "/C_Fig2.py", "repo_name": "emebeiran/low-rank2020", "src_encoding": "UTF-8", "text": "#!/usr/bin/env python3\n# -*- coding: utf-8 -*-\n\"\"\"\nCreated on Mon Mar 30 10:49:59 2020\n\n@author: mbeiran\n\"\"\"\n#%%\nimport numpy as np\nimport matplotlib.pyplot as plt\nimport lib_rnns as lr\nfrom matplotlib.gridspec import GridSpec\naa = lr.set_plot()\n\n\n \ngaussian_norm = (1/np.sqrt(np.pi))\ngauss_points, gauss_weights = np.polynomial.hermite.hermgauss(200)\ngauss_points = gauss_points*np.sqrt(2)\n\ndef Phi (mu, delta0):\n integrand = np.tanh(mu+np.sqrt(delta0)*gauss_points)\n return gaussian_norm * np.dot (integrand,gauss_weights)\ndef Prime (mu, delta0):\n integrand = 1 - (np.tanh(mu+np.sqrt(delta0)*gauss_points))**2\n return gaussian_norm * np.dot (integrand,gauss_weights)\n\n#%%\nN = 200\nnbins = 20\ns_mn1 = 0.5\ns_mn2 = 1.2\nm1 = np.random.randn(N)\nm1 = m1/np.std(m1)\nm2 = np.random.randn(N)\nm2 = m2/np.std(m2)\nms = np.linspace(-3, 3)\n\nn1 = s_mn1*m1+0.3*np.random.randn(N)\nn2 = s_mn2*m2+0.3*np.random.randn(N)\n\nss1 = np.sqrt(s_mn1+0.3**2)\nss2 = np.sqrt(s_mn2+0.3**2)\n\nfig = plt.figure(figsize=[3.6, 3.6])\n\ngs = GridSpec(4,4)\n\nax_joint = fig.add_subplot(gs[1:4,0:3])\nax_marg_x = fig.add_subplot(gs[0,0:3])\nax_marg_y = fig.add_subplot(gs[1:4,3])\n\n\n\nax_joint.scatter(m1, n1, s=20, alpha=0.5, label=r'$\\sigma_{mn} = 0.5$')\n\nax_joint.plot(ms, s_mn1*ms, '--', c='k', lw=1)\nax_joint.plot(ms, ms, c='k', lw=1)\n\n\nax_joint.legend(loc=2,frameon=False, handletextpad=-0.1)\nax_joint.spines['top'].set_visible(False)\nax_joint.spines['right'].set_visible(False)\nax_joint.yaxis.set_ticks_position('left')\nax_joint.xaxis.set_ticks_position('bottom')\n\nax_joint.set_ylim([-4.2, 4.2])\nax_joint.set_xlabel(r'$m_i$')\nax_joint.set_ylabel(r'$n_i$')\n\nax_marg_x.hist(m2, nbins, alpha=0.5, density=True)\n\nax_marg_x.spines['top'].set_visible(False)\nax_marg_x.spines['right'].set_visible(False)\nax_marg_x.yaxis.set_ticks_position('left')\nax_marg_x.xaxis.set_ticks_position('bottom')\nax_marg_x.plot( ms, np.exp(-ms**2/(2*1**2))/np.sqrt(2*np.pi*1**2), c='C0', lw=2)\n\nax_marg_y.hist(n1, nbins, orientation=\"horizontal\", alpha=0.5, density=True)\n\nax_marg_y.plot( np.exp(-ms**2/(2*ss1**2))/np.sqrt(2*np.pi*ss1**2),ms, lw=2, c='C0',)\n\n\nax_marg_y.spines['top'].set_visible(False)\nax_marg_y.spines['right'].set_visible(False)\nax_marg_y.yaxis.set_ticks_position('left')\nax_marg_y.xaxis.set_ticks_position('bottom')\nax_marg_y.set_ylim([-4.2, 4.2])\n\nax_marg_y.set_xticklabels(['0', '0.5'])\n\n## Turn off tick labels on marginals\nplt.setp(ax_marg_x.get_xticklabels(), visible=False)\nplt.setp(ax_marg_y.get_yticklabels(), visible=False)\n\n## Set labels on marginals\nax_marg_y.set_xlabel(r'$P(n_i)$')\nax_marg_x.set_ylabel(r'$P(m_i)$')\n\nplt.savefig('Th_Fig1_A_11.pdf')\n\n#%%\nfig = plt.figure(figsize=[3.6, 3.6])\n\ngs = GridSpec(4,4)\n\nax_joint = fig.add_subplot(gs[1:4,0:3])\nax_marg_x = fig.add_subplot(gs[0,0:3])\nax_marg_y = fig.add_subplot(gs[1:4,3])\n\n\n\nax_joint.scatter(m2, n2, s=20, alpha=0.5, label=r'$\\sigma_{mn} = 1.2$')\n\nax_joint.plot(ms, s_mn2*ms, '--', c='k', lw=1)\nax_joint.plot(ms, ms, c='k', lw=1)\n\n\nax_joint.legend(loc=2,frameon=False, handletextpad=-0.1)\nax_joint.spines['top'].set_visible(False)\nax_joint.spines['right'].set_visible(False)\nax_joint.yaxis.set_ticks_position('left')\nax_joint.xaxis.set_ticks_position('bottom')\n\nax_joint.set_ylim([-4.2, 4.2])\nax_joint.set_xlabel(r'$m_i$')\nax_joint.set_ylabel(r'$n_i$')\n\nax_marg_x.hist(m2, nbins, alpha=0.5, density=True)\n\nax_marg_x.spines['top'].set_visible(False)\nax_marg_x.spines['right'].set_visible(False)\nax_marg_x.yaxis.set_ticks_position('left')\nax_marg_x.xaxis.set_ticks_position('bottom')\nax_marg_x.plot( ms, np.exp(-ms**2/(2*1**2))/np.sqrt(2*np.pi*1**2), c='C0', lw=2)\n\nax_marg_y.hist(n2, nbins, orientation=\"horizontal\", alpha=0.5, density=True)\n\nax_marg_y.plot( np.exp(-ms**2/(2*ss2**2))/np.sqrt(2*np.pi*ss2**2),ms, lw=2, c='C0',)\n\nax_marg_y.spines['top'].set_visible(False)\nax_marg_y.spines['right'].set_visible(False)\nax_marg_y.yaxis.set_ticks_position('left')\nax_marg_y.xaxis.set_ticks_position('bottom')\nax_marg_y.set_ylim([-4.2, 4.2])\n\nax_marg_y.set_xticklabels(['0', '0.5'])\n\n## Turn off tick labels on marginals\nplt.setp(ax_marg_x.get_xticklabels(), visible=False)\nplt.setp(ax_marg_y.get_yticklabels(), visible=False)\n\n## Set labels on marginals\nax_marg_y.set_xlabel(r'$P(n_i)$')\nax_marg_x.set_ylabel(r'$P(m_i)$')\n\nplt.savefig('Th_Fig1_A_12.pdf')\n#%%\nkappas = np.linspace(-1, 1, 500)\nFk1 = np.zeros_like(kappas)\nFk2 = np.zeros_like(kappas)\n\nfor ik, ka in enumerate(kappas):\n Fk1[ik]= -ka + s_mn1*ka*Prime(0, ka**2)\n Fk2[ik]= -ka + s_mn2*ka*Prime(0, ka**2)\nfig = plt.figure()\nax = fig.add_subplot(111)\nplt.plot(kappas, Fk2, lw=2, c='C3')\n#plt.plot(kappas, Fk1, lw=2)\n\nplt.plot(kappas, kappas*0, '--k')\nplt.scatter(0,0, edgecolor='C3', color='w', s=60, lw=2, zorder=4)\n\nk0s = kappas[np.argmin(np.abs(Fk2[kappas<0.1]))]\nplt.scatter(k0s,0, edgecolor='k', color='C3', s=60, lw=1, zorder=4)\nplt.scatter(-k0s,0, edgecolor='k', color='C3', s=60, lw=1, zorder=4)\n\nplt.ylim([-0.5, 0.5])\nplt.ylabel(r'dynamics $d\\kappa / dt$')\nplt.xlabel(r'latent variable $\\kappa$')\nax.spines['top'].set_visible(False)\nax.spines['right'].set_visible(False)\nax.yaxis.set_ticks_position('left')\nax.xaxis.set_ticks_position('bottom') \nplt.savefig('Th_Fig1_C_2.pdf')\n\n#%%\nkappas = np.linspace(-1, 1, 500)\nFk1 = np.zeros_like(kappas)\nFk2 = np.zeros_like(kappas)\n\nfor ik, ka in enumerate(kappas):\n Fk1[ik]= -ka + s_mn1*ka*Prime(0, ka**2)\n Fk2[ik]= -ka + s_mn2*ka*Prime(0, ka**2)\nfig = plt.figure()\nax = fig.add_subplot(111)\n#plt.plot(kappas, Fk2, lw=2)\nplt.plot(kappas, Fk1, lw=2, c='C3')\n\nplt.plot(kappas, kappas*0, '--k')\n#plt.scatter(0,0, edgecolor='C3', color='w', s=60, lw=2, zorder=4)\n\nk0s = kappas[np.argmin(np.abs(Fk2[kappas<0.1]))]\nplt.scatter(0,0, edgecolor='k', color='C3', s=60, lw=1, zorder=4)\n#plt.scatter(-k0s,0, edgecolor='k', color='C3', s=60, lw=1, zorder=4)\n\nplt.ylim([-0.5, 0.5])\nplt.ylabel(r'dynamics $d\\kappa / dt$')\nplt.xlabel(r'latent variable $\\kappa$')\nax.spines['top'].set_visible(False)\nax.spines['right'].set_visible(False)\nax.yaxis.set_ticks_position('left')\nax.xaxis.set_ticks_position('bottom') \nplt.savefig('Th_Fig1_C_1.pdf')\n\n#%%\nJ1 = np.dot(m1[:,None], n1[:,None].T)\nJ2 = np.dot(m2[:,None], n2[:,None].T)\n\ntime = np.linspace(0, 40, 1000)\ndt = time[1]-time[0]\n\nnS = 10\nxs1 = np.zeros((len(time), nS))\nxs2 = np.zeros((len(time), nS))\n\nx0 = 0.5*np.random.randn(N)\nx02 = 0.5*np.random.randn(N)\nk1 = np.zeros(len(time))\nk2 = np.zeros(len(time))\nfor it, t in enumerate(time[:-1]): \n xs1[it] = x0[0:nS]\n xs2[it] = x02[0:nS]\n k1[it] = np.mean(m1*x0)\n k2[it] = np.mean(m2*x02)\n \n x = x0 + dt*(-x0+np.dot(J1, np.tanh(x0)/N))\n x2 = x02 + dt*(-x02+np.dot(J2, np.tanh(x02)/N))\n \n x0=x\n x02=x2\n \n xs1[it+1] = x0[0:nS]\n xs2[it+1] = x02[0:nS]\n k1[it+1] = np.mean(m1*x0)\n k2[it+1] = np.mean(m2*x02)\nfig = plt.figure()\nax = fig.add_subplot(111)\nplt.plot(time, xs2, c='C0')\n#plt.plot(time, xs1, c='C1')\n\nax.spines['top'].set_visible(False)\nax.spines['right'].set_visible(False)\nax.yaxis.set_ticks_position('left')\nax.xaxis.set_ticks_position('bottom') \nplt.ylabel(r'activation $x_i\\left(t\\right)$')\nplt.xlabel(r'time')\nplt.savefig('Th_Fig1_B_2.pdf')\n\n#%%\nJ1 = np.dot(m1[:,None], n1[:,None].T)\nJ2 = np.dot(m2[:,None], n2[:,None].T)\n\ntime = np.linspace(0, 40, 1000)\ndt = time[1]-time[0]\n\nnS = 10\nxs1 = np.zeros((len(time), nS))\nxs2 = np.zeros((len(time), nS))\n\nx0 = 0.5*np.random.randn(N)\nx02 = 0.5*np.random.randn(N)\nk1 = np.zeros(len(time))\nk2 = np.zeros(len(time))\nfor it, t in enumerate(time[:-1]): \n xs1[it] = x0[0:nS]\n xs2[it] = x02[0:nS]\n k1[it] = np.mean(m1*x0)\n k2[it] = np.mean(m2*x02)\n \n x = x0 + dt*(-x0+np.dot(J1, np.tanh(x0)/N))\n x2 = x02 + dt*(-x02+np.dot(J2, np.tanh(x02)/N))\n \n x0=x\n x02=x2\n \n xs1[it+1] = x0[0:nS]\n xs2[it+1] = x02[0:nS]\n k1[it+1] = np.mean(m1*x0)\n k2[it+1] = np.mean(m2*x02)\nfig = plt.figure()\nax = fig.add_subplot(111)\n#plt.plot(time, xs2, c='C0')\nplt.plot(time, xs1, c='C0')\n\nax.spines['top'].set_visible(False)\nax.spines['right'].set_visible(False)\nax.yaxis.set_ticks_position('left')\nax.xaxis.set_ticks_position('bottom') \nplt.ylabel(r'activation $x_i\\left(t\\right)$')\nplt.xlabel(r'time')\nplt.savefig('Th_Fig1_B_1.pdf')\n#%%\nfig = plt.figure()\nax = fig.add_subplot(111)\ntrials = 10\n\nN = 1000\nfor tr in range(trials):\n m1 = np.random.randn(N)\n m1 = m1/np.std(m1)\n m2 = np.random.randn(N)\n m2 = m2/np.std(m2)\n ms = np.linspace(-3, 3)\n \n n1 = s_mn1*m1+0.3*np.random.randn(N)\n n2 = s_mn2*m2+0.3*np.random.randn(N)\n\n J1 = np.dot(m1[:,None], n1[:,None].T)\n J2 = np.dot(m2[:,None], n2[:,None].T)\n x0 = 0.5*np.random.randn(N)+0.05*m1*np.random.randn()\n x02 = 0.5*np.random.randn(N)+0.05*m2*np.random.randn()\n k1 = np.zeros(len(time))\n k2 = np.zeros(len(time))\n for it, t in enumerate(time[:-1]): \n xs1[it] = x0[0:nS]\n xs2[it] = x02[0:nS]\n k1[it] = np.mean(m1*x0)\n k2[it] = np.mean(m2*x02)\n \n x = x0 + dt*(-x0+np.dot(J1, np.tanh(x0)/N))\n x2 = x02 + dt*(-x02+np.dot(J2, np.tanh(x02)/N))\n \n x0=x\n x02=x2\n \n k1[it+1] = np.mean(m1*x0)\n k2[it+1] = np.mean(m2*x02)\n plt.plot(time, k1, c='C3')\n #plt.plot(time, k2, c='C0')\n \nplt.plot(time, 0*time, lw=4, alpha=0.2, c='C3')\n#plt.plot(time, k0s*np.ones_like(time), lw=4, alpha=0.2, c='C3')\n#plt.plot(time, -k0s*np.ones_like(time), lw=4, alpha=0.2, c='C3')\n\nax.spines['top'].set_visible(False)\nax.spines['right'].set_visible(False)\nax.yaxis.set_ticks_position('left')\nax.xaxis.set_ticks_position('bottom') \nplt.ylabel(r'latent variable $\\kappa$')\nplt.xlabel(r'time')\nplt.savefig('Th_Fig1_D_1.pdf')\n\n#%%\nfig = plt.figure()\nax = fig.add_subplot(111)\ntrials = 10\n\nN = 1000\nfor tr in range(trials):\n m1 = np.random.randn(N)\n m1 = m1/np.std(m1)\n m2 = np.random.randn(N)\n m2 = m2/np.std(m2)\n ms = np.linspace(-3, 3)\n \n n1 = s_mn1*m1+0.3*np.random.randn(N)\n n2 = s_mn2*m2+0.3*np.random.randn(N)\n\n J1 = np.dot(m1[:,None], n1[:,None].T)\n J2 = np.dot(m2[:,None], n2[:,None].T)\n x0 = 0.5*np.random.randn(N)+0.05*m1*np.random.randn()\n x02 = 0.5*np.random.randn(N)+0.05*m2*np.random.randn()\n k1 = np.zeros(len(time))\n k2 = np.zeros(len(time))\n for it, t in enumerate(time[:-1]): \n xs1[it] = x0[0:nS]\n xs2[it] = x02[0:nS]\n k1[it] = np.mean(m1*x0)\n k2[it] = np.mean(m2*x02)\n \n x = x0 + dt*(-x0+np.dot(J1, np.tanh(x0)/N))\n x2 = x02 + dt*(-x02+np.dot(J2, np.tanh(x02)/N))\n \n x0=x\n x02=x2\n \n k1[it+1] = np.mean(m1*x0)\n k2[it+1] = np.mean(m2*x02)\n #plt.plot(time, k1, c='C3')\n plt.plot(time, k2, c='C3')\n \n#plt.plot(time, 0*time, lw=4, alpha=0.2, c='C3')\nplt.plot(time, k0s*np.ones_like(time), lw=4, alpha=0.2, c='C3')\nplt.plot(time, -k0s*np.ones_like(time), lw=4, alpha=0.2, c='C3')\n\nax.spines['top'].set_visible(False)\nax.spines['right'].set_visible(False)\nax.yaxis.set_ticks_position('left')\nax.xaxis.set_ticks_position('bottom') \nplt.ylabel(r'latent variable $\\kappa$')\nplt.xlabel(r'time')\nplt.savefig('Th_Fig1_D_2.pdf')\n\n" }, { "alpha_fraction": 0.48015499114990234, "alphanum_fraction": 0.5536705255508423, "avg_line_length": 28.750778198242188, "blob_id": "3443c91d90efc2615d69f09c2999017a1738871c", "content_id": "39bb5be8c40ea5b2dcb82f79ee2cb5c6ed7b42f2", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 9549, "license_type": "no_license", "max_line_length": 136, "num_lines": 321, "path": "/C_Fig8_ABC.py", "repo_name": "emebeiran/low-rank2020", "src_encoding": "UTF-8", "text": "#!/usr/bin/env python3\n# -*- coding: utf-8 -*-\n\"\"\"\nCreated on Wed Oct 7 11:01:38 2020\n\n@author: mbeiran\n\"\"\"\n\n#%%\nimport numpy as np\nimport matplotlib.pyplot as plt\nimport lib_rnns as lr\nfrom matplotlib.gridspec import GridSpec\naa = lr.set_plot()\n\n\n \ngaussian_norm = (1/np.sqrt(np.pi))\ngauss_points, gauss_weights = np.polynomial.hermite.hermgauss(200)\ngauss_points = gauss_points*np.sqrt(2)\n\ndef Phi (mu, delta0):\n integrand = np.tanh(mu+np.sqrt(delta0)*gauss_points)\n return gaussian_norm * np.dot (integrand,gauss_weights)\ndef Prime (mu, delta0):\n integrand = 1 - (np.tanh(mu+np.sqrt(delta0)*gauss_points))**2\n return gaussian_norm * np.dot (integrand,gauss_weights)\ndef Trip (mu, delta0):\n integrand = 2*(np.cosh(2*(mu+np.sqrt(delta0)*gauss_points))-2 )/np.cosh(mu+np.sqrt(delta0)*gauss_points)**4\n return gaussian_norm * np.dot (integrand,gauss_weights)\n\n\n#%%\n\nepss = np.array((0, 0.1, 0.2, 0.4, 0.6, 1.0, 2.0))\n\nfp1s = np.zeros_like(epss)\nfp2s = np.zeros_like(epss)\nus = np.zeros((2,len(epss)))\nvs = np.zeros((2,2,len(epss)))\nu3s = np.zeros((2,len(epss)))\nv3s = np.zeros((2,2,len(epss)))\n\ndt = 0.1\nT = 40\ntime = np.arange(0, T, dt)\ntraj1s = np.zeros((2,len(time), len(epss)))\ntraj2s = np.zeros((2,len(time), len(epss)))\n\nclS = np.zeros((3, len(epss)))\n\ncl2 = np.array(( 225/256, 74/256, 51/256))\ncl1 = np.array(( 0.5, 0.5, 0.5))\n\nnL = len(epss)-1\nfor iep, eps in enumerate(epss):\n clS[:,iep] = cl1* (nL-iep)/nL + cl2* (iep)/nL\n\nfor iep, eps in enumerate(epss):\n ms = np.linspace(-5,5,100)\n Sigma = np.zeros((2,2))\n Sigma[0,0] = 1.2\n Sigma[1,1] = 1.6\n Sigma[0,1] = eps\n \n \n #%%\n\n \n u, v = np.linalg.eig(Sigma)\n us[:,iep] = u\n vs[:,:,iep] = v\n \n l1 = -0.5\n l2 = 2.\n cC = np.array((1, 1, 1,))*0.3\n\n \n #%%\n kaps1 = np.linspace(-1.3,1.3, 130)\n kaps2 = np.linspace(-1.3,1.3, 100)\n ksol = np.zeros((len(kaps1), len(kaps2), 2))\n \n K1s, K2s = np.meshgrid(kaps1, kaps2)\n def transf(K):\n return(K*Prime(0, np.dot(K.T, K)))\n \n E = np.zeros((len(kaps1), len(kaps2)))\n for ik1 ,k1 in enumerate(kaps1):\n for ik2, k2 in enumerate(kaps2):\n K = np.array((k1, k2))\n ksol[ik1, ik2, :] = - K+ np.dot(Sigma, transf(K))\n E[ik1, ik2] = np.sqrt(np.sum(ksol[ik1,ik2,:]**2))\n \n \n \n search_kap1 = np.linspace(0.2, 1.3, 300)\n E_1 = np.zeros_like(search_kap1)\n for ik1 ,k1 in enumerate(search_kap1):\n K = v[:,0]*k1\n kSS = - K+ np.dot(Sigma, transf(K))\n E_1[ik1] = np.sqrt(np.sum(kSS**2))\n fp1 = search_kap1[np.argmin(E_1)]\n fp1s[iep] = fp1\n \n search_kap2 = np.linspace(0.2, 1.3, 300)\n E_2 = np.zeros_like(search_kap1)\n for ik2 ,k2 in enumerate(search_kap2):\n K = v[:,1]*k2\n kSS = - K+ np.dot(Sigma, transf(K))\n E_2[ik2] = np.sqrt(np.sum(kSS**2))\n fp2 = search_kap2[np.argmin(E_2)]\n fp2s[iep] = fp2\n \n RR = np.array((v[0,0]*fp1, v[1,0]*fp1))\n RR = RR[:,None]\n Triple = Trip(0, fp1**2)\n S2 = -np.eye(2)+Sigma/u[0] + Triple*u[0]*np.dot(RR,RR.T)\n \n u2, v2 = np.linalg.eig(S2)\n print(u2)\n k1 = RR[:,0]\n\n traj1 = np.zeros((2,len(time)))\n traj2 = np.zeros((2,len(time)))\n traj1[:,0] = RR[:,0] + 0.01 * v2[:,1]\n traj2[:,0] = RR[:,0] - 0.01* v2[:,1]\n \n for it, ti in enumerate(time[:-1]):\n traj1[:,it+1] = traj1[:,it] + dt*(-traj1[:,it] + np.dot(Sigma,transf(traj1[:,it])))\n traj2[:,it+1] = traj2[:,it] + dt*(-traj2[:,it] + np.dot(Sigma,transf(traj2[:,it])))\n \n traj1s[:,:,iep] = traj1\n traj2s[:,:,iep] = traj2\n \n \n RR = np.array((v[0,1]*fp2, v[1,1]*fp2))\n RR = RR[:,None]\n Triple = Trip(0, fp2**2)\n S3 = -np.eye(2)+Sigma/u[1] + Triple*u[1]*np.dot(RR,RR.T)\n\n u3, v3 = np.linalg.eig(S3)\n print(u3)\n print('--')\n u3s[:,iep] = u3\n v3s[:,:,iep] = v3\n \n \n fig = plt.figure()\n ax = fig.add_subplot(111) \n im = plt.pcolor(kaps1, kaps2, np.log10(E).T, cmap ='viridis', vmin = -2.,vmax=0, shading= 'auto')\n \n\n strm = ax.streamplot(kaps1, kaps2, ksol[:,:,0].T, ksol[:,:,1].T, color='w', linewidth=1, cmap='autumn', density=0.6)\n plt.xlabel('$\\kappa_1$')\n plt.ylabel('$\\kappa_2$')\n# plt.scatter([fp1, 0, -fp1], [0,0,0], s=50, edgecolor='k', facecolor='w', linewidth=1., zorder=4)\n# plt.scatter( [v[0,1]*fp2,-v[0,1]*fp2], [v[1,1]*fp2,-v[1,1]*fp2], s=100, edgecolor='w', facecolor='k', linewidth=1.5, zorder=4)\n# \n plt.scatter([fp1, 0, -fp1], [0,0,0], s=50, edgecolor='k', facecolor='w', linewidth=1., zorder=4)\n plt.scatter( [v[0,1]*fp2,-v[0,1]*fp2], [v[1,1]*fp2,-v[1,1]*fp2], s=80, edgecolor='w', facecolor=clS[:,iep], linewidth=1.5, zorder=4)\n \n plt.plot(traj1s[0,:,iep], traj1s[1,:,iep], color=clS[:,iep], lw=1.5)\n plt.plot(-traj1s[0,:,iep], -traj1s[1,:,iep], color=clS[:,iep], lw=1.5)\n \n plt.plot(traj2s[0,:,iep], traj2s[1,:,iep], color=clS[:,iep], lw=1.5)\n plt.plot(-traj2s[0,:,iep], -traj2s[1,:,iep], color=clS[:,iep], lw=1.5)\n \n ax.set_xticks([-1, 0, 1])\n ax.set_yticks([-1, 0, 1])\n ax.set_ylim([np.min(kaps2), np.max(kaps2)])\n ax.set_xlim([np.min(kaps1), np.max(kaps1)])\n plt.savefig('Th_FigS2_2_C1_'+str(eps)+'.pdf')\n \n#%%\n \n\nfig = plt.figure()\nax = fig.add_subplot(111) \nfor iep, eps in enumerate(epss):\n fp1 = fp1s[iep]\n fp2 = fp2s[iep]\n u = us[:,iep]\n v = vs[:,:,iep]\n \n plt.scatter([fp1, 0, -fp1], [0,0,0], s=50, edgecolor='k', facecolor='w', linewidth=1., zorder=4)\n plt.scatter( [v[0,1]*fp2,-v[0,1]*fp2], [v[1,1]*fp2,-v[1,1]*fp2], s=80, edgecolor='w', facecolor=clS[:,iep], linewidth=1.5, zorder=4)\n \n plt.plot(traj1s[0,:,iep], traj1s[1,:,iep], color=clS[:,iep], lw=1.5)\n plt.plot(-traj1s[0,:,iep], -traj1s[1,:,iep], color=clS[:,iep], lw=1.5)\n \n plt.plot(traj2s[0,:,iep], traj2s[1,:,iep], color=clS[:,iep], lw=1.5)\n plt.plot(-traj2s[0,:,iep], -traj2s[1,:,iep], color=clS[:,iep], lw=1.5)\n \nax.spines['top'].set_visible(False)\nax.spines['right'].set_visible(False)\nax.set_xticks([-1, 0, 1])\nax.set_yticks([-1, 0, 1])\nax.yaxis.set_ticks_position('left')\nax.xaxis.set_ticks_position('bottom') \nplt.xlabel('$\\kappa_1$')\nplt.ylabel('$\\kappa_2$')\nplt.savefig('Th_FigS2_C.pdf')\n\n#%%\nfig = plt.figure()\nax = fig.add_subplot(111) \n\nplt.plot([l1, l2], [0,0], 'k', lw=0.5)\nplt.plot( [0,0],[l1, l2], 'k', lw=0.5)\nfor iep, eps in enumerate(epss):\n Sigma = np.zeros((2,2))\n Sigma[0,0] = 1.2\n Sigma[1,1] = 1.6\n Sigma[0,1] = eps\n \n \n u, v = np.linalg.eig(Sigma)\n l1 = -1.\n l2 = 2.\n cC = np.array((1, 1, 1,))*0.3\n\n ax.arrow(0, 0, u[0]*v[0,0], u[0]*v[1,0], fc=cC, ec=cC, alpha =0.8, width=0.06,\n head_width=0.2, head_length=0.2, zorder=3)\n ax.arrow(0, 0, u[1]*v[0,1], u[1]*v[1,1], fc=clS[:,iep], ec='k', alpha =0.8, width=0.06,\n head_width=0.2, head_length=0.2, zorder=3)\n \n ax.text(0.8, -0.4, r'$\\lambda_1 \\bf{u}_1$', fontsize = 15)\n ax.text(1., 1.5, r'$\\lambda_2 \\bf{u}_2$', fontsize = 15)\n \n \n ax.set_xlim([l1, l2])\n ax.set_ylim([l1, l2])\n \n ax.axis('off')\n ax.spines['top'].set_visible(False)\n ax.spines['right'].set_visible(False)\n ax.spines['left'].set_visible(False)\n ax.spines['bottom'].set_visible(False)\n \n plt.savefig('Th_FigS2_B.pdf')\n#%%\nSigma2 = np.zeros_like(Sigma)\nSigma2[:,:] = Sigma\n\n\nplt.rcParams[\"axes.grid\"] = False\nfig = plt.figure(figsize = [2.0, 2.0])\nax = fig.add_subplot(111) \nplt.imshow(Sigma2, cmap='OrRd', vmin = 0, vmax = 4)\nax.tick_params(color='white')\n\n\nfor i in range(np.shape(Sigma)[0]):\n for j in range(np.shape(Sigma)[1]):\n if i==1 and j==0:\n ax.text(i, j, ' ', va='center', ha='center', fontsize=16)\n else:\n ax.text(i, j, str(Sigma2[j,i]), va='center', ha='center', fontsize=16)\nax.spines['top'].set_visible(False)\nax.spines['right'].set_visible(False)\nax.spines['left'].set_visible(False)\nax.spines['bottom'].set_visible(False)\n\nax.yaxis.set_ticks_position('right')\nax.xaxis.set_ticks_position('top') \nax.set_xticks([0, 1])\nax.set_yticks([0, 1])\n\n\nax.set_xticklabels([r'$m_i^{\\left(1\\right)}$', r'$m_i^{\\left(2\\right)}$'], fontsize=14)\nax.set_yticklabels([r'$n_i^{\\left(1\\right)}$', r'$n_i^{\\left(2\\right)}$'], fontsize=14)\n\nplt.savefig('Th_FigS2_A1.pdf')\nplt.show()\n\n#%%\nfig = plt.figure(figsize = [2.0, 1.1])\nax = fig.add_subplot(111) \n\nclS = np.zeros((3, len(epss)))\n\ncl2 = np.array(( 225/256, 74/256, 51/256))\ncl1 = np.array(( 0.5, 0.5, 0.5))\n\nnL = len(epss)-1\nfor iep, eps in enumerate(epss[:-1]):\n cl11 = cl1* (nL-iep)/nL + cl2* (iep)/nL\n cl22 = cl1* (nL-iep-1)/nL + cl2* (iep+1)/nL\n \n int_points = 20\n ps = np.linspace(eps, epss[iep+1], 10)\n nP = len(ps)-1\n plt.scatter(eps, 1.18, s=40, marker='v', color='k', edgecolor=[0.8, 0.8, 0.8], zorder=3)\n plt.scatter(epss[iep+1], 1.18, s=40, marker='v', color='k', edgecolor=[0.8, 0.8, 0.8], zorder=3)\n \n for ip , p in enumerate(ps[:-1]):\n \n cc = cl11* (nP-ip)/nP + cl22* (ip)/nP\n \n plt.fill_between([p, ps[ip+1]], [0, 0], [1, 1], color=cc)\n\nax.spines['top'].set_visible(False)\nax.spines['right'].set_visible(False)\nax.spines['left'].set_visible(False)\nax.spines['bottom'].set_visible(False)\n \nax.set_yticks([])\nax.set_xticks([0, 1, 2])\nax.set_xlabel('')\n\nax.set_xlim([-0.1,2.1])\nax.set_ylim([-0.05,1.4])\nax.plot([0,2], [1,1], c='k')\nax.plot([0,0], [0,1], c='k')\nax.plot([2,2], [0,1], c='k')\nax.plot([0,2], [0.0,0.0], c='k', zorder=2)\n\nplt.tight_layout()\nplt.savefig('Th_FigS2_A2.pdf')\nplt.show()" }, { "alpha_fraction": 0.5054020881652832, "alphanum_fraction": 0.5895423293113708, "avg_line_length": 30.041139602661133, "blob_id": "0d272f4688b9fe184a011f2245cd5a47376d734f", "content_id": "87489c10de917a42028dbe0d6d4858c2dc952b7a", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 19622, "license_type": "no_license", "max_line_length": 115, "num_lines": 632, "path": "/C_Fig9.py", "repo_name": "emebeiran/low-rank2020", "src_encoding": "UTF-8", "text": "#!/usr/bin/env python3\n# -*- coding: utf-8 -*-\n\"\"\"\nCreated on Mon Mar 30 10:49:59 2020\n\n@author: mbeiran\n\"\"\"\n#%%\nimport numpy as np\nimport matplotlib.pyplot as plt\nimport lib_rnns as lr\nfrom matplotlib.gridspec import GridSpec\naa = lr.set_plot()\n\n\n \ngaussian_norm = (1/np.sqrt(np.pi))\ngauss_points, gauss_weights = np.polynomial.hermite.hermgauss(200)\ngauss_points = gauss_points*np.sqrt(2)\n\ndef Phi (mu, delta0):\n integrand = np.tanh(mu+np.sqrt(delta0)*gauss_points)\n return gaussian_norm * np.dot (integrand,gauss_weights)\ndef Prime (mu, delta0):\n integrand = 1 - (np.tanh(mu+np.sqrt(delta0)*gauss_points))**2\n return gaussian_norm * np.dot (integrand,gauss_weights)\n\n#%%\nN = 200\nnbins = 20\ns_mn1 = 0.5\ns_mn2 = 1.2\nm1 = np.random.randn(N)\nm1 = m1/np.std(m1)\nm2 = np.random.randn(N)\nm2 = m2/np.std(m2)\nms = np.linspace(-3, 3)\n\nn1 = s_mn1*m1+0.3*np.random.randn(N)\nn2 = s_mn2*m2+0.3*np.random.randn(N)\n\n\n#%%\n# =============================================================================\n# Fig 2\n# =============================================================================\nms = np.linspace(-5,5,100)\nSigma = np.zeros((3,3))\nSigma[0,0] = 1.2\nSigma[1,1] = 1.2\nSigma[2,2] = 1.6\nSigma[0,1] = -2.\nSigma[1,0] = 1.\nSigma[0,2] = 1.5\n\nvalN = 9\n\nBigSigma = np.zeros((6,6))\nBigSigma[0,0] = 1. #Sigma[0,0]\nBigSigma[1,1] = 1.#Sigma[1,1]\nBigSigma[2,2] = 1. #Sigma[2,2]\nBigSigma[3,3] = valN\nBigSigma[4,4] = valN\nBigSigma[5,5] = valN\n\nSigma2 = Sigma.T\n\n\nBigSigma[0,3] = Sigma2[0,0] #m1 - n1\nBigSigma[3,0] = Sigma2[0,0]\nBigSigma[0,4] = Sigma2[0,1] #m1 - n2\nBigSigma[4,0] = Sigma2[0,1]\nBigSigma[1,3] = Sigma2[1,0] #m2 - n1\nBigSigma[3,1] = Sigma2[1,0]\nBigSigma[1,4] = Sigma2[1,1] #m2 - n2\nBigSigma[4,1] = Sigma2[1,1]\nBigSigma[2,5] = Sigma2[2,2] #m3 - n3\nBigSigma[5,2] = Sigma2[2,2]\nBigSigma[2,3] = Sigma2[2,0] #m3 - n1\nBigSigma[3,2] = Sigma2[2,0]\nBigSigma[0,5] = Sigma2[0,2] #m1 - n3\nBigSigma[5,0] = Sigma2[0,2]\nBigSigma[1,5] = Sigma2[1,2] #m2 - n3\nBigSigma[5,1] = Sigma2[1,2]\nBigSigma[2,4] = Sigma2[2,1] #m3 - n2\nBigSigma[4,2] = Sigma2[2,1]\n\n\nmean = np.zeros(6)\n\nDat = np.random.multivariate_normal(mean, BigSigma, size=1000)\nM = Dat[:,0:3]\nN = Dat[:,3:].T\n\n\nS=10\n\nfig = plt.figure(figsize=[4.2, 4.2], dpi=450)\n\ngs = GridSpec(7,7)\n\nax_joint00 = fig.add_subplot(gs[1:3,0:2])\nax_joint01 = fig.add_subplot(gs[1:3,2:4])\nax_joint02 = fig.add_subplot(gs[1:3,4:6])\n\nax_joint10 = fig.add_subplot(gs[3:5,0:2])\nax_joint11 = fig.add_subplot(gs[3:5,2:4])\nax_joint12 = fig.add_subplot(gs[3:5,4:6])\n\nax_joint20 = fig.add_subplot(gs[5:7,0:2])\nax_joint21 = fig.add_subplot(gs[5:7,2:4])\nax_joint22 = fig.add_subplot(gs[5:7,4:6])\n\nax_marg_x0 = fig.add_subplot(gs[0,0:2])\nax_marg_x1 = fig.add_subplot(gs[0,2:4])\nax_marg_x2 = fig.add_subplot(gs[0,4:6])\n\nax_marg_y0 = fig.add_subplot(gs[1:3,6])\nax_marg_y1 = fig.add_subplot(gs[3:5,6])\nax_marg_y2 = fig.add_subplot(gs[5:7,6])\n\nmyl = -12\nmyl2 = 12\n\nax_joint00.scatter(M[:,0], N[0,:], s=S, alpha=0.5, label=r'$\\sigma_{mn} = 1.2$', rasterized=True)\nax_joint00.plot(ms, Sigma[0,0]*ms, '--', c='k', lw=1)\nax_joint00.set_xlim([-3,3])\nax_joint00.set_xticks([-2., 0, 2.])\nax_joint00.set_xticklabels(['','',''])\nax_joint00.set_ylim([myl,myl2])\nax_joint00.set_yticks([-10, 0, 10])\nax_joint00.set_ylabel(r'$n^{\\left(1\\right)}_i$')\nax_joint00.spines['top'].set_visible(False)\nax_joint00.spines['right'].set_visible(False)\nax_joint00.yaxis.set_ticks_position('left')\nax_joint00.xaxis.set_ticks_position('bottom')\n \nax_joint01.scatter(M[:,1], N[0,:], s=S, alpha=0.5, label=r'$\\sigma_{mn} = 1.2$', rasterized=True)\nax_joint01.plot(ms, Sigma[0,1]*ms, '--', c='k', lw=1)\nax_joint01.spines['top'].set_visible(False)\nax_joint01.spines['right'].set_visible(False)\nax_joint01.yaxis.set_ticks_position('left')\nax_joint01.xaxis.set_ticks_position('bottom')\nax_joint01.set_ylim([myl,myl2])\nax_joint01.set_yticks([-10, 0, 10])\nax_joint01.set_yticklabels(['','',''])\nax_joint01.set_xlim([-3,3])\nax_joint01.set_xticks([-2., 0, 2.])\nax_joint01.set_xticklabels(['','',''])\n\nax_joint02.scatter(M[:,2], N[0,:], s=S, alpha=0.5, label=r'$\\sigma_{mn} = 1.2$', rasterized=True)\nax_joint02.plot(ms, Sigma[0,2]*ms, '--', c='k', lw=1)\nax_joint02.spines['top'].set_visible(False)\nax_joint02.spines['right'].set_visible(False)\nax_joint02.yaxis.set_ticks_position('left')\nax_joint02.xaxis.set_ticks_position('bottom')\nax_joint02.set_ylim([myl,myl2])\nax_joint02.set_yticks([-10, 0, 10])\nax_joint02.set_yticklabels(['','',''])\nax_joint02.set_xlim([-3,3])\nax_joint02.set_xticks([-2., 0, 2.])\n\nax_joint02.set_xticklabels(['','',''])\n\nax_joint10.scatter(M[:,0], N[1,:], s=S, alpha=0.5, label=r'$\\sigma_{mn} = 1.2$', rasterized=True)\nax_joint10.plot(ms, Sigma[1,0]*ms, '--', c='k', lw=1)\nax_joint10.set_xlim([-3,3])\nax_joint10.spines['top'].set_visible(False)\nax_joint10.spines['right'].set_visible(False)\nax_joint10.yaxis.set_ticks_position('left')\nax_joint10.xaxis.set_ticks_position('bottom')\nax_joint10.set_ylim([myl,myl2])\nax_joint10.set_yticks([-10, 0, 10])\nax_joint10.set_xlim([-3,3])\nax_joint10.set_xticks([-2., 0, 2.])\nax_joint01.set_xticklabels(['','',''])\nax_joint10.set_ylabel(r'$n^{\\left(2\\right)}_i$')\n\nax_joint11.scatter(M[:,1], N[1,:], s=S, alpha=0.5, label=r'$\\sigma_{mn} = 1.2$', rasterized=True)\nax_joint11.plot(ms, Sigma[1,1]*ms, '--', c='k', lw=1)\nax_joint11.set_xlim([-3,3])\nax_joint11.spines['top'].set_visible(False)\nax_joint11.spines['right'].set_visible(False)\nax_joint11.set_ylim([myl,myl2])\nax_joint11.set_yticks([-10, 0, 10])\nax_joint11.set_xticks([-2., 0, 2.])\nax_joint11.set_xlim([-3,3])\nax_joint11.set_yticklabels(['','',''])\nax_joint01.set_xticklabels(['','',''])\nax_joint11.yaxis.set_ticks_position('left')\nax_joint11.xaxis.set_ticks_position('bottom')\n\n\nax_joint12.scatter(M[:,2], N[1,:], s=S, alpha=0.5, label=r'$\\sigma_{mn} = 1.2$', rasterized=True)\nax_joint12.plot(ms, Sigma[1,2]*ms, '--', c='k', lw=1)\nax_joint12.set_xlim([-3,3])\nax_joint12.spines['top'].set_visible(False)\nax_joint12.spines['right'].set_visible(False)\nax_joint12.set_ylim([myl,myl2])\nax_joint12.set_yticks([-10, 0, 10])\nax_joint12.set_xticks([-2., 0, 2.])\nax_joint12.set_xlim([-3,3])\nax_joint12.set_yticklabels(['','',''])\nax_joint01.set_xticklabels(['','',''])\nax_joint12.yaxis.set_ticks_position('left')\nax_joint12.xaxis.set_ticks_position('bottom')\n\nax_joint20.scatter(M[:,0], N[2,:], s=S, alpha=0.5, label=r'$\\sigma_{mn} = 1.2$', rasterized=True)\nax_joint20.plot(ms, Sigma[2,0]*ms, '--', c='k', lw=1)\nax_joint20.set_xlim([-3,3])\nax_joint20.spines['top'].set_visible(False)\nax_joint20.spines['right'].set_visible(False)\nax_joint20.yaxis.set_ticks_position('left')\nax_joint20.xaxis.set_ticks_position('bottom')\nax_joint20.set_ylim([myl,myl2])\nax_joint20.set_yticks([-10, 0, 10])\nax_joint20.set_xlim([-3,3])\nax_joint20.set_xticks([-2., 0, 2.])\nax_joint20.set_ylabel(r'$n^{\\left(3\\right)}_i$')\nax_joint20.set_xlabel(r'$m^{\\left(1\\right)}_i$')\n\nax_joint21.scatter(M[:,1], N[2,:], s=S, alpha=0.5, label=r'$\\sigma_{mn} = 1.2$', rasterized=True)\nax_joint21.plot(ms, Sigma[2,1]*ms, '--', c='k', lw=1)\nax_joint21.set_xlim([-3,3])\nax_joint21.spines['top'].set_visible(False)\nax_joint21.spines['right'].set_visible(False)\nax_joint21.set_ylim([myl,myl2])\nax_joint21.set_yticks([-10, 0, 10])\nax_joint21.set_xticks([-2., 0, 2.])\nax_joint21.set_xlim([-3,3])\nax_joint21.set_yticklabels(['','',''])\nax_joint21.yaxis.set_ticks_position('left')\nax_joint21.xaxis.set_ticks_position('bottom')\nax_joint21.set_xlabel(r'$m^{\\left(2\\right)}_i$')\n\nax_joint22.scatter(M[:,2], N[2,:], s=S, alpha=0.5, label=r'$\\sigma_{mn} = 1.2$', rasterized=True)\nax_joint22.plot(ms, Sigma[2,2]*ms, '--', c='k', lw=1)\nax_joint22.set_xlim([-3,3])\nax_joint22.spines['top'].set_visible(False)\nax_joint22.spines['right'].set_visible(False)\nax_joint22.set_ylim([myl,myl2])\nax_joint22.set_yticks([-10, 0, 10])\nax_joint22.set_xticks([-2., 0, 2.])\nax_joint22.set_xlim([-3,3])\nax_joint22.set_yticklabels(['','',''])\nax_joint22.yaxis.set_ticks_position('left')\nax_joint22.xaxis.set_ticks_position('bottom')\nax_joint22.set_xlabel(r'$m^{\\left(3\\right)}_i$')\n\n\nax_marg_x0.hist(M[:,0], nbins, alpha=0.5, density=True)\nss = 1.\nax_marg_x0.plot(ms, (1/np.sqrt(2*np.pi*ss**2))*np.exp(-(ms)**2/(2*ss**2)), 'k')\n\nax_marg_x0.spines['top'].set_visible(False)\nax_marg_x0.spines['right'].set_visible(False)\nax_marg_x0.spines['left'].set_visible(False)\nax_marg_x0.yaxis.set_ticks_position('left')\nax_marg_x0.xaxis.set_ticks_position('bottom')\nax_marg_x0.set_xlim([-3,3])\nax_marg_x0.set_xticks([-2., 0, 2.])\nax_marg_x0.set_ylim([0,0.45])\nax_marg_x0.set_xticklabels(['','',''])\nax_marg_x0.set_yticks([1])\n\nax_marg_x1.hist(M[:,1], nbins, alpha=0.5, density=True)\nss = 1.\nax_marg_x1.plot(ms, (1/np.sqrt(2*np.pi*ss**2))*np.exp(-(ms)**2/(2*ss**2)), 'k')\nax_marg_x1.spines['top'].set_visible(False)\nax_marg_x1.spines['right'].set_visible(False)\nax_marg_x1.spines['left'].set_visible(False)\nax_marg_x1.yaxis.set_ticks_position('left')\nax_marg_x1.xaxis.set_ticks_position('bottom')\nax_marg_x1.set_xlim([-3,3])\nax_marg_x1.set_ylim([0,0.45])\nax_marg_x1.set_xticks([-2., 0, 2.])\nax_marg_x1.set_xticklabels(['','',''])\nax_marg_x1.set_yticks([1])\n\nax_marg_x2.hist(M[:,2], nbins, alpha=0.5, density=True)\nss = 1.\nax_marg_x2.plot(ms, (1/np.sqrt(2*np.pi*ss**2))*np.exp(-(ms)**2/(2*ss**2)), 'k')\nax_marg_x2.spines['top'].set_visible(False)\nax_marg_x2.spines['right'].set_visible(False)\nax_marg_x2.spines['left'].set_visible(False)\nax_marg_x2.yaxis.set_ticks_position('left')\nax_marg_x2.xaxis.set_ticks_position('bottom')\nax_marg_x2.set_xlim([-3,3])\nax_marg_x2.set_ylim([0,0.45])\nax_marg_x2.set_xticks([-2., 0, 2.])\nax_marg_x2.set_xticklabels(['','',''])\nax_marg_x2.set_yticks([1])\nss2 = 0\nms = np.linspace(-10,10,100)\nax_marg_y0.hist(N[0,:], nbins, orientation=\"horizontal\", alpha=0.5, density=True)\nss = np.sqrt(valN)\nax_marg_y0.plot((1/np.sqrt(2*np.pi*ss**2))*np.exp(-(ms)**2/(2*ss**2)), ms, 'k')\nax_marg_y0.spines['top'].set_visible(False)\nax_marg_y0.spines['right'].set_visible(False)\nax_marg_y0.spines['bottom'].set_visible(False)\nax_marg_y0.yaxis.set_ticks_position('left')\nax_marg_y0.xaxis.set_ticks_position('bottom')\nax_marg_y0.set_ylim([myl,myl2])\nax_marg_y0.set_xlim([0,0.45])\nax_marg_y0.set_yticks([-10, 0, 10])\nax_marg_y0.set_yticklabels(['','',''])\nax_marg_y0.set_xticks([1])\nax_marg_y0.set_xticklabels([''])\n\nax_marg_y1.hist(N[1,:], nbins, orientation=\"horizontal\", alpha=0.5, density=True)\nss = np.sqrt(valN)\nax_marg_y1.plot((1/np.sqrt(2*np.pi*ss**2))*np.exp(-(ms)**2/(2*ss**2)), ms, 'k')\nax_marg_y1.spines['top'].set_visible(False)\nax_marg_y1.spines['right'].set_visible(False)\nax_marg_y1.spines['bottom'].set_visible(False)\nax_marg_y1.yaxis.set_ticks_position('left')\nax_marg_y1.xaxis.set_ticks_position('bottom')\nax_marg_y1.set_ylim([myl,myl2])\nax_marg_y1.set_xlim([0,0.45])\nax_marg_y1.set_yticks([-10, 0, 10])\nax_marg_y1.set_yticklabels(['','',''])\nax_marg_y1.set_xticks([1])\nax_marg_y1.set_xticklabels([''])\n\nax_marg_y2.hist(N[1,:], nbins, orientation=\"horizontal\", alpha=0.5, density=True)\nss = np.sqrt(valN)\nax_marg_y2.plot((1/np.sqrt(2*np.pi*ss**2))*np.exp(-(ms)**2/(2*ss**2)), ms, 'k')\nax_marg_y2.spines['top'].set_visible(False)\nax_marg_y2.spines['right'].set_visible(False)\nax_marg_y2.spines['bottom'].set_visible(False)\nax_marg_y2.yaxis.set_ticks_position('left')\nax_marg_y2.xaxis.set_ticks_position('bottom')\nax_marg_y2.set_ylim([myl,myl2])\nax_marg_y2.set_xlim([0,0.45])\nax_marg_y2.set_yticks([-10, 0, 10])\nax_marg_y2.set_yticklabels(['','',''])\nax_marg_y2.set_xticks([1])\nax_marg_y2.set_xticklabels([''])\n\nplt.savefig('Th_Fig2_4__A.pdf')\n\n#%%\nplt.rcParams[\"axes.grid\"] = False\nfig = plt.figure(figsize = [3.0, 3.0])\nax = fig.add_subplot(111) \nplt.imshow(Sigma, cmap='coolwarm', vmin = -4, vmax = 4)\nSigma22 = np.zeros_like(Sigma)\nSigma22[np.abs(Sigma)>0]=np.nan\nplt.imshow(Sigma22, cmap='OrRd', vmin = 0, vmax = 4)\n\n\nax.tick_params(color='white')\n\n\nfor i in range(np.shape(Sigma)[0]):\n for j in range(np.shape(Sigma)[1]):\n ax.text(i, j, str(Sigma[j,i]), va='center', ha='center', fontsize=16)\nax.spines['top'].set_visible(False)\nax.spines['right'].set_visible(False)\nax.spines['left'].set_visible(False)\nax.spines['bottom'].set_visible(False)\n\nax.yaxis.set_ticks_position('right')\nax.xaxis.set_ticks_position('top') \nax.set_xticks([0, 1,2])\nax.set_yticks([0, 1,2])\n\n\nax.set_xticklabels([r'$m_i^{\\left(1\\right)}$', r'$m_i^{\\left(2\\right)}$', r'$m_i^{\\left(3\\right)}$' ], fontsize=14)\nax.set_yticklabels([r'$n_i^{\\left(1\\right)}$', r'$n_i^{\\left(2\\right)}$', r'$n_i^{\\left(3\\right)}$' ], fontsize=14)\n\nplt.savefig('Th_Fig2_4__B.pdf')\n\n#%%\nfig = plt.figure()\nfrom mpl_toolkits.mplot3d import Axes3D\nax = fig.add_subplot(111, projection='3d',azim=-42, elev=21)\n\n\nu, vv = np.linalg.eig(Sigma)\nvv = np.dot(vv, np.diag(u))\nl1 = -0.5\nl2 = 2.\ncC = np.array((1, 1, 1,))*0.3\n\nfrom matplotlib.patches import FancyArrowPatch\nfrom mpl_toolkits.mplot3d import proj3d\n\nclass Arrow3D(FancyArrowPatch):\n def __init__(self, xs, ys, zs, *args, **kwargs):\n FancyArrowPatch.__init__(self, (0,0), (0,0), *args, **kwargs)\n self._verts3d = xs, ys, zs\n\n def draw(self, renderer):\n xs3d, ys3d, zs3d = self._verts3d\n xs, ys, zs = proj3d.proj_transform(xs3d, ys3d, zs3d, renderer.M)\n self.set_positions((xs[0],ys[0]),(xs[1],ys[1]))\n FancyArrowPatch.draw(self, renderer)\n\n# mean values\nmean_x = 0\nmean_y = 0\nmean_z = 0\n\ncounter = 0\nfor v in vv[:,1:3].T:\n #ax.plot([mean_x,v[0]], [mean_y,v[1]], [mean_z,v[2]], color='red', alpha=0.8, lw=3)\n #I will replace this line with:\n v1 = np.real(v)\n v1 = v1/np.sqrt(np.sum(v1**2))\n if np.imag(u[counter])==0:\n v1 = v1*np.real(u[counter])\n \n a = Arrow3D([mean_x, v1[0]], [mean_y, v1[1]], \n [mean_z, v1[2]], mutation_scale=10, \n lw=2, arrowstyle=\"-|>\", color=cC)\n ax.add_artist(a)\n #ax.plot([0,v1[0]], [0,v1[1]], [0, v1[2]], 'C0')\n #ax.scatter(v1[0], v1[1], v1[2],s=100)\n if np.min(np.abs(v1))>0.:\n ax.plot([0,v1[0]], [0,v1[1]], [0, 0], 'k--', lw=0.5)\n ax.plot([v1[0],v1[0]], [v1[1],v1[1]], [0, v1[2]], 'k--', lw=0.5)\n print(v1)\n \n #ax.plot([v1[0], v1[1], 0], [v1[0], v1[1], v1[2]], '--k')\n \n v2 = np.imag(v)\n if np.sqrt(np.sum(v2**2))>0:\n print('hey')\n v2 = v2/np.sqrt(np.sum(v2**2))\n #v2 = v2*np.real(u[counter])\n print(v2)\n b = Arrow3D([mean_x, v2[0]], [mean_y, v2[1]], \n [mean_z, (v2[2])], mutation_scale=10, \n lw=2, arrowstyle=\"-|>\", color=cC)\n ax.add_artist(b)\n #ax.scatter(v2[0], v2[1], v2[2],s=100)\n if np.min(np.abs(v2))>0.:\n ax.plot([0,v2[0]], [0,v2[1]], [0, 0], 'k--')\n ax.plot([v2[0],v2[0]], [v2[1],v2[1]], [0, v2[2]], 'k--')\n #ax.plot([0,0,0], [v2[0], v2[1], v2[2]], 'C1')\n #print(v2)\n v1s = v1\n v2s = v2\n counter+=1\n \nxx, yy = np.meshgrid(np.linspace(-1.5, 1.5), np.linspace(-1.5, 1.5))\n\ncvs = np.cross(v1s, v2s)\n\nzz = 0*(1/cvs[2])*(-cvs[0]*xx-cvs[1]*yy)\n\nax.plot_surface(xx, yy, zz, alpha=0.2)\nax.set_xlabel('x_values')\nax.set_ylabel('y_values')\nax.set_zlabel('z_values')\n#ax.set_xlim([-0.5,0.5])\n#ax.set_ylim([-0.5,0.5])\n#ax.set_zlim([-0.5,1.])\nax.text(1.25, -0.6, 0., r'$ Re(\\bf{u}_2)$', fontsize = 12)\nax.text(-3., 0.2, 0.0, r'$Im(\\bf{u}_2)$', fontsize = 12)\n\nax.text(-1.9, 0.3, 0.5, r'$\\lambda_1 \\bf{u}_1$', fontsize = 12)\n\n\n\nax.axis('off')\nax.spines['top'].set_visible(False)\nax.spines['right'].set_visible(False)\nax.spines['left'].set_visible(False)\nax.spines['bottom'].set_visible(False)\n\nplt.draw()\n\nplt.savefig('Th_Fig2_4__B1.pdf')\nplt.show()\n\n#%%\n#\nfig = plt.figure()\nax = fig.add_subplot(111, projection='3d',azim=-72, elev=7) \n\nplt.xlabel('$\\kappa_1$')\nplt.ylabel('$\\kappa_2$')\nax.set_zlabel('$\\kappa_3$')\n\n\n\n\nNn = 1000\n\n\nMu= np.zeros((6,1))\n\ninkap1 = np.linspace(-0.8, 0.8, 2)\ninkap2 = np.linspace(-0.8, 0.800, 2)\ninkap3 = np.linspace(-0.8, 0.800, 2)\n\n\ndt = 0.2\ntime = np.arange(0, 80, dt)\n\nins = np.array(([1,0,0],[1., 0, 0.2],[1., 0, -0.2]))\n\ncolors = np.zeros((3,3))\n\ncolors[1,:] = 0.6*(0.4+0.1*np.random.rand(3)-0.1)+0.4*np.array((1,0,0))\ncolors[0,:] = 0.4#0.8*(0.6+0.2*np.random.rand(3)-0.1)+np.array((1,0,0))\ncolors[2,:] = 0.6*(0.4+0.1*np.random.rand(3)-0.1)+0.4*np.array((0,0,1))\n\nfor trials in range(1):\n cC = 0.6+0.2*np.random.rand()-0.1\n \n cC2 = 0.1+0.2*np.random.rand()-0.1\n \n for rr in range(np.shape(ins)[0]):\n sk1 = np.zeros_like(time)\n sk2 = np.zeros_like(time)\n sk3 = np.zeros_like(time)\n \n \n sk1[0] = ins[rr,0]\n sk2[0] = ins[rr,1]\n sk3[0] = ins[rr,2]\n \n x0 = np.array((sk1[0], sk2[0], sk3[0]))\n print(sk3[0])\n for it, ti in enumerate(time[:-1]):\n \n x = x0 + dt*(-x0 + Prime(0, np.sum(x0**2))*np.dot(Sigma, x0))\n sk1[it+1] = x[0]\n sk2[it+1] = x[1]\n sk3[it+1] = x[2]\n \n x0 = x\n \n ax.plot(sk1, sk2, sk3, c=colors[rr,:])\n ax.scatter(sk1[0], sk2[0], sk3[0], s=10, facecolor=colors[rr,:], edgecolor=colors[rr,:], )\n if np.abs(sk3[-1])>0.1:\n ax.scatter(sk1[-1], sk2[-1], sk3[-1], s=50, facecolor=[0.6, 0.6, 0.6], edgecolor='k')\n save = np.array((sk1[-1], sk2[-1], sk3[-1]))\n\nax.scatter(save[0], save[1], save[2], s=70, facecolor=[0.6, 0.6, 0.6], edgecolor='k')\nax.scatter(-save[0], -save[1], -save[2], s=70, facecolor=[0.6, 0.6, 0.6], edgecolor='k')\n \nax.set_xticks([-1, 0, 1])\nax.set_yticks([-0.5, 0.5])\nax.set_zticks([-0.5, 0, 0.5])\n\nax.dist=11\nplt.savefig('Th_Fig2_4__C.pdf') \n\n#%%\n#\nfig = plt.figure()\nax = fig.add_subplot(111, projection='3d',azim=-72, elev=7) \n\nplt.xlabel('$\\kappa_1$')\nplt.ylabel('$\\kappa_2$')\nax.set_zlabel('$\\kappa_3$')\n\nNn = 1000\n\n\nMu= np.zeros((6,1))\n\ninkap1 = np.linspace(-0.8, 0.8, 2)\ninkap2 = np.linspace(-0.8, 0.800, 2)\ninkap3 = np.linspace(-0.8, 0.800, 2)\n\n\ndt = 0.2\ntime = np.arange(0, 80, dt)\n\nfor trials in range(2):\n try_s0 = 100\n for tr in range(2000):\n XX = np.random.multivariate_normal(Mu[:,0], BigSigma, size=Nn)\n try_s = np.sum((np.dot(XX.T,XX)/1000-BigSigma)**2)\n if try_s < try_s0:\n #print(try_s)\n try_s0 = try_s\n XX_s = XX\n M = XX_s[:,0:3]\n N = XX_s[:,3:6]\n \n J = np.dot(M, N.T)/Nn\n \n cC = 0.6+0.2*np.random.rand()-0.1\n \n cC2 = 0.1+0.2*np.random.rand()-0.1\n \n for rr in range(np.shape(ins)[0]):\n sk1 = np.zeros_like(time)\n sk2 = np.zeros_like(time)\n sk3 = np.zeros_like(time)\n \n \n sk1[0] = ins[rr,0]\n sk2[0] = ins[rr,1]\n sk3[0] = ins[rr,2]\n \n \n \n x0 = ins[rr,0]*M[:,0] + ins[rr,1]*M[:,1]+ins[rr,2]*M[:,2]\n sk1[0] = np.mean(M[:,0]*x0)\n sk2[0] = np.mean(M[:,1]*x0)\n sk3[0] = np.mean(M[:,2]*x0)\n \n for it, ti in enumerate(time[:-1]):\n x = x0 + dt*(-x0 + np.dot(J, np.tanh(x0)))\n sk1[it+1] = np.mean(M[:,0]*x)\n sk2[it+1] = np.mean(M[:,1]*x)\n sk3[it+1] = np.mean(M[:,2]*x)\n \n x0 = x\n ax.plot(sk1, sk2, sk3, c=colors[rr,:])\n ax.scatter(sk1[0], sk2[0], sk3[0], s=10, facecolor=colors[rr,:])\n ax.scatter(sk1[-1], sk2[-1], sk3[-1], s=50, facecolor=colors[rr,:])\n \n \nax.scatter(save[0], save[1], save[2], s=70, facecolor=[0.6, 0.6, 0.6], edgecolor='k')\nax.scatter(-save[0], -save[1], -save[2], s=70, facecolor=[0.6, 0.6, 0.6], edgecolor='k')\n \nax.set_xticks([-1, 0, 1])\nax.set_yticks([-0.5, 0.5])\nax.set_zticks([-0.5, 0, 0.5])\n\n\nax.dist=11\nplt.savefig('Th_Fig2_4__D.pdf') \n# \n" }, { "alpha_fraction": 0.5573328137397766, "alphanum_fraction": 0.6126583814620972, "avg_line_length": 31.008031845092773, "blob_id": "5dbfcfcee158be559db423e4dfc413ab7dad8fb3", "content_id": "96cbaa6ce7a3f10b0a73b43efc476a6acf633792", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 15942, "license_type": "no_license", "max_line_length": 131, "num_lines": 498, "path": "/C_Fig1B.py", "repo_name": "emebeiran/low-rank2020", "src_encoding": "UTF-8", "text": "#!/usr/bin/env python3\n# -*- coding: utf-8 -*-\n\"\"\"\nCreated on Mon Mar 30 10:49:59 2020\n\n@author: mbeiran\n\"\"\"\n#%%\nimport numpy as np\nimport matplotlib.pyplot as plt\nimport lib_rnns as lr\nfrom matplotlib.gridspec import GridSpec\naa = lr.set_plot()\n \ngaussian_norm = (1/np.sqrt(np.pi))\ngauss_points, gauss_weights = np.polynomial.hermite.hermgauss(200)\ngauss_points = gauss_points*np.sqrt(2)\n\ndef Phi (mu, delta0):\n integrand = np.tanh(mu+np.sqrt(delta0)*gauss_points)\n return gaussian_norm * np.dot (integrand,gauss_weights)\ndef Prime (mu, delta0):\n integrand = 1 - (np.tanh(mu+np.sqrt(delta0)*gauss_points))**2\n return gaussian_norm * np.dot (integrand,gauss_weights)\n\n\n#%%\n# =============================================================================\n# Fig 2\n# =============================================================================\nms = np.linspace(-8,8,300)\n\nMuMs = np.zeros((2,2))\nMuNs = np.zeros((2,2))\n\nSigma = np.zeros((2,2,2))+1e-10\nSigma[0,0,0] = 1.2\nSigma[1,1,0] = -1#1.6\nSigma[1,0,0] = 0.5\nSigma[0,1,0] = -0.5\n\n\nMuMs[0,0] = 0.5#1.\nMuMs[1,0] = -0.5#1.\nMuNs[0,0] = 1.5\nMuNs[1,0] = 3.\n\n\nSigma[0,0,1] = -0.3\nSigma[1,1,1] = 1.\nSigma[0,1,1] = 1.2\nSigma[1,1,1] = 0.8\n\nMuMs[0,1] = -0.5#0.5\nMuMs[1,1] = 0.5#-0.8\nMuNs[0,1] = -1.5#1.\nMuNs[1,1] = -3#-1.\n#\nfig = plt.figure(figsize=[3.2, 3.2], dpi=450)\ngs = GridSpec(5,5)\n\nax_joint00 = fig.add_subplot(gs[1:3,0:2])\nax_joint01 = fig.add_subplot(gs[1:3,2:4])\nax_joint10 = fig.add_subplot(gs[3:5,0:2])\nax_joint11 = fig.add_subplot(gs[3:5,2:4])\n\nax_marg_x0 = fig.add_subplot(gs[0,0:2])\nax_marg_x1 = fig.add_subplot(gs[0,2:4])\n\nax_marg_y0 = fig.add_subplot(gs[1:3,4])\nax_marg_y1 = fig.add_subplot(gs[3:5,4])\n\npops = 2\nNs = 100\nM = np.random.randn(Ns*pops,2)\nN = np.random.randn(2,Ns*pops)\npops=2\nBigSigma = np.zeros((4,4, pops))\nfor ip in range(pops):\n \n S=10\n \n M = M/np.std(M,0)\n ss2 = 0.3\n \n BigSigma[0,0,ip] = 1.-np.mean(MuMs[0, :]**2)\n BigSigma[1,1,ip] = 1.-np.mean(MuMs[1, :]**2)\n \n BigSigma[0,2,ip] = Sigma[0,0,ip]\n BigSigma[0,3,ip] = Sigma[0,1,ip]\n BigSigma[2,0,ip] = Sigma[0,0,ip]\n BigSigma[3,0,ip] = Sigma[0,1,ip]\n \n BigSigma[1,2,ip] = Sigma[1,0, ip]\n BigSigma[1,3,ip] = Sigma[1,1, ip]\n BigSigma[2,1,ip] = Sigma[1,0, ip]\n BigSigma[3,1,ip] = Sigma[1,1, ip]\n \n BigSigma[2,2,ip] = 1.\n BigSigma[3,3,ip] = 1.\n \n sol = np.min(np.real(np.linalg.eigvalsh(BigSigma[:,:,ip])))\n \n \n while sol<0:\n BigSigma[2,2] *= 1.01\n BigSigma[3,3] *= 1.01\n sol = np.min(np.real(np.linalg.eigvals(BigSigma[:,:,ip])))\n BigMean = 0*np.hstack((MuMs[:,ip], MuNs[:,ip]))\n \n err0 = 100.\n Dat = np.random.multivariate_normal(BigMean, BigSigma[:,:,ip], Ns)\n DatS = Dat\n for trials in range(200):\n Dat = np.random.multivariate_normal(BigMean, BigSigma[:,:,ip], Ns)\n err1 = np.std(np.cov(Dat.T)-BigSigma[:,:,ip])\n if err1<err0:\n print(err1)\n err0 = err1\n DatS = Dat\n \n Dat = DatS\n BigMean = np.hstack((MuMs[:,ip], MuNs[:,ip]))\n for i in range(4):\n Dat[:,i] = Dat[:,i]+BigMean[i]-np.mean(Dat[:,i])\n \n \n M[ip*Ns:(ip+1)*Ns,0] = Dat[:,0]\n M[ip*Ns:(ip+1)*Ns,1] = Dat[:,1]\n \n N[0,ip*Ns:(ip+1)*Ns] = Dat[:,2]\n N[1,ip*Ns:(ip+1)*Ns] = Dat[:,3]\n\n\nssiz = 30\n\ncolor1 = np.array((31, 127, 17))/256\ncolor2 = np.array((129, 34, 141))/256\nclrs = np.zeros((3,2))\nclrs[:,0] = color1\nclrs[:,1] = color2\npops=2\n\nnbins=20\nfor ip in range(pops):\n ax_joint00.scatter(M[ip*Ns:(ip+1)*Ns,0], N[0,ip*Ns:(ip+1)*Ns], s=S, color = clrs[:,ip], alpha=0.5, rasterized=True)\n ax_joint00.plot(ms, np.sqrt(BigSigma[2,2,ip]/BigSigma[0,0,ip])*Sigma[0,0,ip]*(ms- MuMs[0,ip]) +MuNs[0,ip], '--', c='k', lw=0.5)\n ax_joint00.scatter( MuMs[0,ip], +MuNs[0,ip], s=ssiz, edgecolor='k', facecolor= 'w',zorder=5)\n \n ax_joint00.set_xlim([-4.5,4.5])\n ax_joint00.set_xticks([-4., 0, 4.])\n ax_joint00.set_xticklabels(['','',''])\n ax_joint00.set_ylim([-6.5,6.5])\n ax_joint00.set_yticks([-6, 0, 6])\n ax_joint00.set_ylabel(r'$n^{\\left(1\\right)}_i$')\n ax_joint00.spines['top'].set_visible(False)\n ax_joint00.spines['right'].set_visible(False)\n ax_joint00.yaxis.set_ticks_position('left')\n ax_joint00.xaxis.set_ticks_position('bottom')\n \n ax_joint01.scatter(M[ip*Ns:(ip+1)*Ns,1], N[0,ip*Ns:(ip+1)*Ns], s=S, color = clrs[:,ip],alpha=0.5, rasterized=True)\n ax_joint01.plot(ms, np.sqrt(BigSigma[2,2,ip]/BigSigma[1,1,ip])*Sigma[1,0,ip]*(ms- MuMs[1,ip]) +MuNs[0,ip], '--', c='k', lw=0.5)\n ax_joint01.scatter( MuMs[1,ip], +MuNs[0,ip], s=ssiz, edgecolor='k', facecolor= 'w',zorder=5)\n ax_joint01.spines['top'].set_visible(False)\n ax_joint01.spines['right'].set_visible(False)\n ax_joint01.yaxis.set_ticks_position('left')\n ax_joint01.xaxis.set_ticks_position('bottom')\n ax_joint01.set_ylim([-6.5,6.5])\n ax_joint01.set_yticks([-6, 0, 6])\n ax_joint01.set_yticklabels(['','',''])\n ax_joint01.set_xlim([-4.5,4.5])\n ax_joint01.set_xticks([-4., 0, 4.])\n ax_joint01.set_xticklabels(['','',''])\n \n ax_joint10.scatter(M[ip*Ns:(ip+1)*Ns,0], N[1,ip*Ns:(ip+1)*Ns], s=S, color = clrs[:,ip],alpha=0.5, rasterized=True)\n ax_joint10.plot(ms, np.sqrt(BigSigma[3,3,ip]/BigSigma[0,0,ip])*Sigma[0,1,ip]*(ms- MuMs[0,ip]) +MuNs[1,ip], '--', c='k', lw=0.5)\n ax_joint10.scatter( MuMs[0,ip], +MuNs[1,ip], s=ssiz, edgecolor='k', facecolor= 'w',zorder=5)\n ax_joint10.spines['top'].set_visible(False)\n ax_joint10.spines['right'].set_visible(False)\n ax_joint10.yaxis.set_ticks_position('left')\n ax_joint10.xaxis.set_ticks_position('bottom')\n ax_joint10.set_ylim([-6.5,6.5])\n ax_joint10.set_yticks([-6, 0, 6])\n ax_joint10.set_xlim([-4.5,4.5])\n ax_joint10.set_xticks([-4., 0, 4.])\n\n ax_joint10.set_ylabel(r'$n^{\\left(2\\right)}_i$')\n ax_joint10.set_xlabel(r'$m^{\\left(1\\right)}_i$')\n \n ax_joint11.scatter(M[ip*Ns:(ip+1)*Ns,1], N[1,ip*Ns:(ip+1)*Ns], s=S, color = clrs[:,ip],alpha=0.5, rasterized=True)\n ax_joint11.plot(ms, np.sqrt(BigSigma[3,3,ip]/BigSigma[1,1,ip])*Sigma[1,1,ip]*(ms- MuMs[1,ip]) +MuNs[1,ip], '--', c='k', lw=0.5)\n ax_joint11.scatter( MuMs[1,ip], +MuNs[1,ip], s=ssiz, edgecolor='k', facecolor= 'w',zorder=5)\n ax_joint11.spines['top'].set_visible(False)\n ax_joint11.spines['right'].set_visible(False)\n ax_joint11.set_ylim([-6.5,6.5])\n ax_joint11.set_yticks([-6, 0, 6])\n ax_joint11.set_xlim([-4.5,4.5])\n ax_joint11.set_xticks([-4., 0, 4.])\n \n ax_joint11.set_yticklabels(['','',''])\n ax_joint11.yaxis.set_ticks_position('left')\n ax_joint11.xaxis.set_ticks_position('bottom')\n ax_joint11.set_xlabel(r'$m^{\\left(2\\right)}_i$')\n \n ax_marg_x0.hist(M[ip*Ns:(ip+1)*Ns,0], nbins, color = clrs[:,ip], alpha=0.5, density=True)\n ss = BigSigma[0,0,ip]\n ax_marg_x0.plot(ms, (1/np.sqrt(2*np.pi*ss))*np.exp(-(ms- MuMs[0,ip])**2/(2*ss)), color = clrs[:,ip],lw=0.5)\n \n ax_marg_x0.spines['top'].set_visible(False)\n ax_marg_x0.spines['right'].set_visible(False)\n ax_marg_x0.spines['left'].set_visible(False)\n ax_marg_x0.yaxis.set_ticks_position('left')\n ax_marg_x0.xaxis.set_ticks_position('bottom')\n ax_marg_x0.set_xlim([-4.5,4.5])\n ax_marg_x0.set_ylim([0,0.6])\n ax_marg_x0.set_xticks([-4., 0, 4.])\n ax_marg_x0.set_xticklabels(['','',''])\n ax_marg_x0.set_yticks([1])\n\n\n ax_marg_x1.hist(M[ip*Ns:(ip+1)*Ns,1], nbins,color = clrs[:,ip], alpha=0.5, density=True)\n ss = BigSigma[1,1,ip]\n ax_marg_x1.plot(ms, (1/np.sqrt(2*np.pi*ss))*np.exp(-(ms- MuMs[1,ip])**2/(2*ss)), color = clrs[:,ip],lw=0.5)\n ax_marg_x1.spines['top'].set_visible(False)\n ax_marg_x1.spines['right'].set_visible(False)\n ax_marg_x1.spines['left'].set_visible(False)\n ax_marg_x1.yaxis.set_ticks_position('left')\n ax_marg_x1.xaxis.set_ticks_position('bottom')\n ax_marg_x1.set_xlim([-4.5,4.5])\n ax_marg_x1.set_ylim([0,0.6])\n ax_marg_x1.set_xticks([-4., 0, 4.])\n ax_marg_x1.set_xticklabels(['','',''])\n ax_marg_x1.set_yticks([1])\n\n\n ax_marg_y0.hist(N[0,ip*Ns:(ip+1)*Ns], nbins, orientation=\"horizontal\", color = clrs[:,ip], alpha=0.5, density=True)\n ss = BigSigma[2,2,ip]\n ax_marg_y0.plot((1/np.sqrt(2*np.pi*ss))*np.exp(-(ms-MuNs[0,ip])**2/(2*ss)), ms, color = clrs[:,ip],lw=0.5)\n ax_marg_y0.spines['top'].set_visible(False)\n ax_marg_y0.spines['right'].set_visible(False)\n ax_marg_y0.spines['bottom'].set_visible(False)\n ax_marg_y0.yaxis.set_ticks_position('left')\n ax_marg_y0.xaxis.set_ticks_position('bottom')\n ax_marg_y0.set_ylim([-6.5,6.5])\n ax_marg_y0.set_xlim([0,0.45])\n ax_marg_y0.set_yticks([-6., 0, 6.])\n ax_marg_y0.set_yticklabels(['','',''])\n ax_marg_y0.set_xticks([1])\n ax_marg_y0.set_xticklabels([''])\n\n ax_marg_y1.hist(N[1,ip*Ns:(ip+1)*Ns], nbins, orientation=\"horizontal\", color = clrs[:,ip], alpha=0.5, density=True)\n ss = BigSigma[3,3,ip]\n ax_marg_y1.plot((1/np.sqrt(2*np.pi*ss))*np.exp(-(ms-MuNs[1,ip])**2/(2*ss)), ms, color = clrs[:,ip],lw=0.5)\n ax_marg_y1.spines['top'].set_visible(False)\n ax_marg_y1.spines['right'].set_visible(False)\n ax_marg_y1.spines['bottom'].set_visible(False)\n ax_marg_y1.yaxis.set_ticks_position('left')\n ax_marg_y1.xaxis.set_ticks_position('bottom')\n ax_marg_y1.set_ylim([-6.5,6.5])\n ax_marg_y1.set_xlim([0,0.45])\n ax_marg_y1.set_yticks([-6., 0, 6.])\n ax_marg_y1.set_yticklabels(['','',''])\n ax_marg_y1.set_xticks([1])\n ax_marg_y1.set_xticklabels([''])\n\nplt.savefig('Th_Fig0_A.pdf')\n\n#%%\nNneurs = 6\n\nMadap = M[0:Nneurs*2,:]\nMadap[Nneurs:,:] = M[-Nneurs:,:]\n\nNadap = N[:,0:Nneurs*2]\nNadap[:,Nneurs:] = N[:, -Nneurs:]\nJ = np.dot(Madap, Nadap)/(Ns*pops)\nplt.rcParams[\"axes.grid\"] = False\nfig = plt.figure(figsize = [2.0, 2.0])\nax = fig.add_subplot(111) \n\nplt.imshow(J, cmap='coolwarm', vmin = -0.12, vmax = 0.12)\nJ22 = np.zeros_like(J)\nJ22[np.abs(J)>0]=np.nan\nplt.imshow(J22, cmap='OrRd', vmin = 0, vmax = 0.12)\n\nax.spines['top'].set_visible(False)\nax.spines['right'].set_visible(False)\nax.spines['left'].set_visible(False)\nax.spines['bottom'].set_visible(False)\n\nax.yaxis.set_ticks_position('right')\nax.xaxis.set_ticks_position('top') \nax.set_xticks([])\nax.set_yticks([])\n\nplt.savefig('Th_Fig0_B_1.pdf')\n#%%\nfor ip in range(pops):\n fig = plt.figure(figsize = [0.2, 2.0])\n ax = fig.add_subplot(111) \n Madap1 = np.copy(Madap[:,ip,None])\n Madap2 = np.copy(Madap[:,ip,None])\n Madap1[Nneurs:,:] = np.nan\n Madap2[0:Nneurs,:] = np.nan\n Nadap1 = np.copy(Nadap[None,ip,:])\n Nadap2 = np.copy(Nadap[None,ip,:])\n Nadap1[:,Nneurs:] = np.nan\n Nadap2[:,0:Nneurs] = np.nan\n plt.imshow(Madap1, cmap='Greens')\n plt.imshow(Madap2, cmap='Purples')\n \n ax.spines['top'].set_visible(False)\n ax.spines['right'].set_visible(False)\n ax.spines['left'].set_visible(False)\n ax.spines['bottom'].set_visible(False)\n ax.yaxis.set_ticks_position('right')\n ax.xaxis.set_ticks_position('top') \n ax.set_xticks([])\n ax.set_yticks([])\n plt.savefig('Th_Fig0_B_m'+str(ip+1)+'.pdf')\n \n fig = plt.figure(figsize = [2.0, 0.2])\n ax = fig.add_subplot(111) \n plt.imshow(Nadap1, cmap='Greens')\n plt.imshow(Nadap2, cmap='Purples')\n \n \n ax.spines['top'].set_visible(False)\n ax.spines['right'].set_visible(False)\n ax.spines['left'].set_visible(False)\n ax.spines['bottom'].set_visible(False)\n ax.yaxis.set_ticks_position('right')\n ax.xaxis.set_ticks_position('top') \n ax.set_xticks([])\n ax.set_yticks([])\n plt.savefig('Th_Fig0_B_n'+str(ip+1)+'.pdf')\n\n#%%\n\n#%%\n\nfig = plt.figure(figsize = [ 2.0, 0.2])\nax = fig.add_subplot(111) \nMadap1 = np.copy(Madap[:,:])\nMadap2 = np.copy(Madap[:,:])\n\nMadap1[Nneurs:,:] = np.nan\nMadap2[0:Nneurs,:] = np.nan\nNadap1 = np.copy(Nadap)\nNadap2 = np.copy(Nadap)\nNadap1[:,Nneurs:] = np.nan\nNadap2[:,0:Nneurs] = np.nan\nplt.imshow(Madap1.T, cmap='Greens')\nplt.imshow(Madap2.T, cmap='Purples')\n\nax.spines['top'].set_visible(False)\nax.spines['right'].set_visible(False)\nax.spines['left'].set_visible(False)\nax.spines['bottom'].set_visible(False)\nax.yaxis.set_ticks_position('right')\nax.xaxis.set_ticks_position('top') \nax.set_xticks([])\nax.set_yticks([])\nplt.savefig('Th_Fig0_C_m.pdf')\n\nfig = plt.figure(figsize = [0.2, 2.0])\nax = fig.add_subplot(111) \nplt.imshow(Nadap1.T, cmap='Greens')\nplt.imshow(Nadap2.T, cmap='Purples')\n\n \nax.spines['top'].set_visible(False)\nax.spines['right'].set_visible(False)\nax.spines['left'].set_visible(False)\nax.spines['bottom'].set_visible(False)\nax.yaxis.set_ticks_position('right')\nax.xaxis.set_ticks_position('top') \nax.set_xticks([])\nax.set_yticks([])\nplt.savefig('Th_Fig0_C_n.pdf')\n\n\n#%%\n\nfig = plt.figure(figsize = [ 2.0, 0.2])\nax = fig.add_subplot(111) \nMadap1 = np.copy(Madap[:,:])\nMadap2 = np.copy(Madap[:,:])\n\nMadap1[Nneurs:,:] = np.nan\nMadap2[0:Nneurs,:] = np.nan\nNadap1 = np.copy(Nadap)\nNadap2 = np.copy(Nadap)\nNadap1[:,Nneurs:] = np.nan\nNadap2[:,0:Nneurs] = np.nan\nplt.imshow(Madap1.T, cmap='Greens')\nplt.imshow(Madap2.T, cmap='Purples')\n\nax.spines['top'].set_visible(False)\nax.spines['right'].set_visible(False)\nax.spines['left'].set_visible(False)\nax.spines['bottom'].set_visible(False)\nax.yaxis.set_ticks_position('right')\nax.xaxis.set_ticks_position('top') \nax.set_xticks([])\nax.set_yticks([])\nplt.savefig('Th_Fig0_C_m.pdf')\n#%%\nfig = plt.figure(figsize = [1., 2.0])\nax = fig.add_subplot(111) \nMadap1 = MuMs[:,0,None]\n\nplt.imshow(Madap1, cmap='Greens', vmin = -4, vmax=4)\nax.spines['top'].set_visible(False)\nax.spines['right'].set_visible(False)\nax.spines['left'].set_visible(False)\nax.spines['bottom'].set_visible(False)\nax.yaxis.set_ticks_position('right')\nax.xaxis.set_ticks_position('top') \nax.set_xticks([])\nax.set_yticks([])\nplt.savefig('Th_Fig0_D_pop1_am.pdf')\n\nfig = plt.figure(figsize = [1,2.])\nax = fig.add_subplot(111) \nMadap1 = MuMs[:,1,None]\n\nplt.imshow(Madap1, cmap='Purples', vmin = -4, vmax=4)\nax.spines['top'].set_visible(False)\nax.spines['right'].set_visible(False)\nax.spines['left'].set_visible(False)\nax.spines['bottom'].set_visible(False)\nax.yaxis.set_ticks_position('right')\nax.xaxis.set_ticks_position('top') \nax.set_xticks([])\nax.set_yticks([])\nplt.savefig('Th_Fig0_D_pop2_am.pdf')\n\nfig = plt.figure(figsize = [2.0,1.])\nax = fig.add_subplot(111) \nMadap1 = MuNs[:,0,None]\nplt.imshow(Madap1.T, cmap='Greens', vmin = -4, vmax=4)\nax.spines['top'].set_visible(False)\nax.spines['right'].set_visible(False)\nax.spines['left'].set_visible(False)\nax.spines['bottom'].set_visible(False)\nax.yaxis.set_ticks_position('right')\nax.xaxis.set_ticks_position('top') \nax.set_xticks([])\nax.set_yticks([])\nplt.savefig('Th_Fig0_D_pop1_an.pdf')\n\nfig = plt.figure(figsize = [ 2.0, 1.])\nax = fig.add_subplot(111) \nMadap1 = MuNs[:,1,None]\n\nplt.imshow(Madap1.T, cmap='Purples', vmin = -4, vmax=4)\nax.spines['top'].set_visible(False)\nax.spines['right'].set_visible(False)\nax.spines['left'].set_visible(False)\nax.spines['bottom'].set_visible(False)\nax.yaxis.set_ticks_position('right')\nax.xaxis.set_ticks_position('top') \nax.set_xticks([])\nax.set_yticks([])\nplt.savefig('Th_Fig0_D_pop2_an.pdf')\n\n#%%\nfig = plt.figure(figsize = [2., 2.0])\nax = fig.add_subplot(111) \nMadap1 = Sigma[:,:,0]\n\nplt.imshow(Madap1, cmap='Greens', vmin = -4, vmax=4)\nax.spines['top'].set_visible(False)\nax.spines['right'].set_visible(False)\nax.spines['left'].set_visible(False)\nax.spines['bottom'].set_visible(False)\nax.yaxis.set_ticks_position('right')\nax.xaxis.set_ticks_position('top') \nax.set_xticks([])\nax.set_yticks([])\nplt.savefig('Th_Fig0_D_pop1_sigmas.pdf')\n\n#%%\nfig = plt.figure(figsize = [2., 2.0])\nax = fig.add_subplot(111) \nMadap1 = Sigma[:,:,1]\n\nplt.imshow(Madap1, cmap='Purples', vmin = -4, vmax=4)\nax.spines['top'].set_visible(False)\nax.spines['right'].set_visible(False)\nax.spines['left'].set_visible(False)\nax.spines['bottom'].set_visible(False)\nax.yaxis.set_ticks_position('right')\nax.xaxis.set_ticks_position('top') \nax.set_xticks([])\nax.set_yticks([])\nplt.savefig('Th_Fig0_D_pop2_sigmas.pdf')\n\n\n" }, { "alpha_fraction": 0.49165141582489014, "alphanum_fraction": 0.5628568530082703, "avg_line_length": 27.922119140625, "blob_id": "4c223f88478dcfb15460bb43aeb6663377af0fce", "content_id": "1d5ea3140ff2cfb7c3d05105471548bcd9ff54f6", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 9283, "license_type": "no_license", "max_line_length": 134, "num_lines": 321, "path": "/C_Fig8_DEF.py", "repo_name": "emebeiran/low-rank2020", "src_encoding": "UTF-8", "text": "#!/usr/bin/env python3\n# -*- coding: utf-8 -*-\n\"\"\"\nCreated on Wed Oct 7 11:01:38 2020\n\n@author: mbeiran\n\"\"\"\n\n#%%\nimport numpy as np\nimport matplotlib.pyplot as plt\nimport lib_rnns as lr\nfrom matplotlib.gridspec import GridSpec\naa = lr.set_plot()\n\n\n \ngaussian_norm = (1/np.sqrt(np.pi))\ngauss_points, gauss_weights = np.polynomial.hermite.hermgauss(200)\ngauss_points = gauss_points*np.sqrt(2)\n\ndef Phi (mu, delta0):\n integrand = np.tanh(mu+np.sqrt(delta0)*gauss_points)\n return gaussian_norm * np.dot (integrand,gauss_weights)\ndef Prime (mu, delta0):\n integrand = 1 - (np.tanh(mu+np.sqrt(delta0)*gauss_points))**2\n return gaussian_norm * np.dot (integrand,gauss_weights)\ndef Trip (mu, delta0):\n integrand = 2*(np.cosh(2*(mu+np.sqrt(delta0)*gauss_points))-2 )/np.cosh(mu+np.sqrt(delta0)*gauss_points)**4\n return gaussian_norm * np.dot (integrand,gauss_weights)\n\n\n#%%\n\nepss = np.linspace(1., 3, 7)#np.array((0, 0.1, 0.2, 0.4, 0.6, 1.0, 2.0))\n#epss = np.linspace(0., 2, 7)#np.array((0, 0.1, 0.2, 0.4, 0.6, 1.0, 2.0))\n\nfp1s = np.zeros_like(epss)\nfp2s = np.zeros_like(epss)\nus = np.zeros((2,len(epss)))*1j\nvs = np.zeros((2,2,len(epss)))*1j\n\nclS = np.zeros((3, len(epss)))\n\ncl2 = np.array(( 225/256, 74/256, 51/256))\ncl1 = np.array(( 0.5, 0.5, 0.5))\n\nnL = len(epss)-1\nfor iep, eps in enumerate(epss):\n clS[:,iep] = cl1* (nL-iep)/nL + cl2* (iep)/nL\n#%%\ndt = 0.05\nT = 40\ntime = np.arange(0, T, dt)\ntraj1s = np.zeros((2,len(time), len(epss)))\ntraj2s = np.zeros((2,len(time), len(epss)))\n\n\nfor iep, eps in enumerate(epss):\n ms = np.linspace(-5,5,100)\n Sigma = np.zeros((2,2))\n Sigma[0,0] = 1.4\n Sigma[1,1] = 1.4\n Sigma[0,1] = 1.*eps\n Sigma[1,0] = -1./eps\n \n# Sigma[0,0] = 2+eps\n# Sigma[1,1] = 2-eps\n# Sigma[0,1] = 1.\n# Sigma[1,0] = -(eps**2+1)\n \n u, v = np.linalg.eig(Sigma)\n us[:,iep] = u\n vs[:,:,iep] = v\n \n l1 = -0.5\n l2 = 2.\n cC = np.array((1, 1, 1,))*0.3\n\n \n kaps1 = np.linspace(-1.3,1.3, 130)\n kaps2 = np.linspace(-1.3,1.3, 100)\n ksol = np.zeros((len(kaps1), len(kaps2), 2))\n \n K1s, K2s = np.meshgrid(kaps1, kaps2)\n def transf(K):\n return(K*Prime(0, np.dot(K.T, K)))\n \n E = np.zeros((len(kaps1), len(kaps2)))\n for ik1 ,k1 in enumerate(kaps1):\n for ik2, k2 in enumerate(kaps2):\n K = np.array((k1, k2))\n ksol[ik1, ik2, :] = - K+ np.dot(Sigma, transf(K))\n E[ik1, ik2] = np.sqrt(np.sum(ksol[ik1,ik2,:]**2))\n\n time1 = np.arange(0, 10, dt)\n traj1 = np.zeros((2,len(time)))\n traj1[:,0] = np.array((0.5,0.5))\n \n for it, ti in enumerate(time[:-1]):\n traj1[:,it+1] = traj1[:,it] + dt*(-traj1[:,it] + np.dot(Sigma,transf(traj1[:,it])))\n A = traj1[:,it+1]\n traj1 = np.zeros((2,len(time)))\n traj2 = np.zeros((2,len(time)))\n traj1[:,0] = A\n \n for it, ti in enumerate(time[:-1]):\n traj1[:,it+1] = traj1[:,it] + dt*(-traj1[:,it] + np.dot(Sigma,transf(traj1[:,it])))\n \n traj1s[:,:,iep] = traj1\n\n \n fig = plt.figure()\n ax = fig.add_subplot(111) \n im = plt.pcolor(kaps1, kaps2, np.log10(E).T, cmap ='viridis', vmin = -2.,vmax=0, shading= 'auto')\n \n\n strm = ax.streamplot(kaps1, kaps2, ksol[:,:,0].T, ksol[:,:,1].T, color=[0.4, 0.4, 0.4], linewidth=0.8, cmap='autumn', density=0.6)\n plt.xlabel('$\\kappa_1$')\n plt.ylabel('$\\kappa_2$')\n plt.scatter([ 0, ], [0], s=50, edgecolor='k', facecolor='w', linewidth=1., zorder=4) \n plt.plot(traj1[0,:], traj1[1,:], color=clS[:,iep], lw=2.)\n ax.set_xticks([-1, 0, 1])\n ax.set_yticks([-1, 0, 1])\n ax.set_ylim([np.min(kaps2), np.max(kaps2)])\n ax.set_xlim([np.min(kaps1), np.max(kaps1)])\n plt.savefig('Th_FigS3_2_C_'+str(eps)+'_1.pdf')\n\n#%%\nfig = plt.figure(figsize = [1.5*2.2 , 1.*2.])\nax = fig.add_subplot(111) \nfreq = np.zeros_like(time)\npers = np.zeros_like(epss)\nfor iep, eps in enumerate(epss):\n #plt.plot(time, np.arctan2(traj1s[0,:,iep],traj1s[1,:,iep]), color=clS[:,iep])\n plt.plot(time, traj1s[1,:,iep], color=clS[:,iep])\n freq = 2*(traj1s[0,:,iep]>0)-1\n time_m1 = time[:-1]\n pers[iep] = 2*np.mean(np.diff(time_m1[np.abs(np.diff(freq))>0]))\nprint(pers)\nax.spines['top'].set_visible(False)\nax.spines['right'].set_visible(False)\nax.set_yticks([-1, 0, 1])\nax.set_xticks([0, 10, 20, 30])\nax.set_xlim([0, 32])\nax.yaxis.set_ticks_position('left')\nax.xaxis.set_ticks_position('bottom') \nplt.ylabel('$\\kappa_2$')\nplt.xlabel(r'time ($\\tau$)')\nplt.savefig('Th_FigS3_2_C_2.pdf')\n#%%\n\nfig = plt.figure()\nax = fig.add_subplot(111) \nfor iep, eps in enumerate(epss):\n fp1 = fp1s[iep]\n fp2 = fp2s[iep]\n u = us[:,iep]\n v = vs[:,:,iep]\n \n plt.scatter([ 0, ], [0], s=50, edgecolor='k', facecolor='w', linewidth=1., zorder=4) \n plt.plot(traj1s[0,:,iep], traj1s[1,:,iep], color=clS[:,iep], lw=1.5)\n plt.plot(-traj1s[0,:,iep], -traj1s[1,:,iep], color=clS[:,iep], lw=1.5)\n \n plt.plot(traj2s[0,:,iep], traj2s[1,:,iep], color=clS[:,iep], lw=1.5)\n plt.plot(-traj2s[0,:,iep], -traj2s[1,:,iep], color=clS[:,iep], lw=1.5)\n \nax.spines['top'].set_visible(False)\nax.spines['right'].set_visible(False)\nax.set_xticks([-1, 0, 1])\nax.set_yticks([-1, 0, 1])\nax.yaxis.set_ticks_position('left')\nax.xaxis.set_ticks_position('bottom') \nplt.xlabel('$\\kappa_1$')\nplt.ylabel('$\\kappa_2$')\nplt.savefig('Th_FigS3_C.pdf')\n\n#%%\nfig = plt.figure()\nax = fig.add_subplot(111) \n\nl1 = -0.5\nl2 = 1.2\nplt.plot([l1, l2], [0,0], 'k', lw=0.5)\nplt.plot( [0,0],[l1, l2], 'k', lw=0.5)\n\nR = 1.13\ntheta= np.linspace(0,2*np.pi)\nplt.plot(R*np.cos(theta), R*np.sin(theta), color='k', lw=0.6)\nplt.plot()\nepss2 = epss[::-1]\nclS2 = clS[:,::-1]\nfor iep, eps in enumerate(epss):\n Sigma[0,0] = 1.4\n Sigma[1,1] = 1.4\n Sigma[0,1] = 1.*eps\n Sigma[1,0] = -1./eps\n \n u, v = np.linalg.eig(Sigma)\n v1 = np.real(v[:,0])\n fac = np.sqrt(np.sum(v1**2))\n v1 = v1/fac\n print(v1)\n print(v2)\n print('--')\n \n v2 = -np.imag(v[:,1])/fac\n\n cC = np.array((1, 1, 1,))*0.3\n\n ax.arrow(0, 0, v1[0], v1[1], fc=cC, ec='k', alpha =0.8, width=0.04,\n head_width=0.13, head_length=0.13, zorder=3)\n ax.arrow(0, 0, v2[0], v2[1], fc=clS[:,iep], ec='k', alpha =0.8, width=0.04,\n head_width=0.13, head_length=0.13, zorder=3)\n \n# ax.text(0.8, -0.4, r'Re$(\\bf{u})$', fontsize = 12)\n# ax.text(0.2, 1.2, r'Im$(\\bf{u})$', fontsize = 12)\n \n ax.text(0.5, -0.2, r'Re$( \\bf{u})$', fontsize = 15)\n ax.text(0.1, 0.8, r'Im$( \\bf{u})$', fontsize = 15) \n ax.set_xlim([l1, l2])\n ax.set_ylim([l1, l2])\n \n ax.axis('off')\n ax.spines['top'].set_visible(False)\n ax.spines['right'].set_visible(False)\n ax.spines['left'].set_visible(False)\n ax.spines['bottom'].set_visible(False)\n \n plt.savefig('Th_FigS3_B.pdf')\n\n \n#%%\nSigma2 = np.zeros_like(Sigma)\nSigma2[:,:] = Sigma\nSigma3 = np.zeros_like(Sigma)*np.nan\nSigma3[1,0] = Sigma[1,0]\n\nSigma2[1, 0] = np.nan\n\nplt.rcParams[\"axes.grid\"] = False\nfig = plt.figure(figsize = [2.0, 2.0])\nax = fig.add_subplot(111) \nplt.imshow(Sigma2, cmap='OrRd', vmin = 0, vmax = 7)\nplt.imshow(Sigma3, cmap='PuBu', vmin = -0.7, vmax = 0)\n\nax.tick_params(color='white')\n\n\nfor i in range(np.shape(Sigma)[0]):\n for j in range(np.shape(Sigma)[1]):\n if i==1 and j==0:\n ax.text(i, j, ' ', va='center', ha='center', fontsize=16)\n elif i==0 and j==1:\n ax.text(i, j, ' ', va='center', ha='center', fontsize=16)\n \n else:\n ax.text(i, j, str(Sigma2[j,i]), va='center', ha='center', fontsize=16)\nax.spines['top'].set_visible(False)\nax.spines['right'].set_visible(False)\nax.spines['left'].set_visible(False)\nax.spines['bottom'].set_visible(False)\n\nax.yaxis.set_ticks_position('right')\nax.xaxis.set_ticks_position('top') \nax.set_xticks([0, 1])\nax.set_yticks([0, 1])\n\n\nax.set_xticklabels([r'$m_i^{\\left(1\\right)}$', r'$m_i^{\\left(2\\right)}$'], fontsize=14)\nax.set_yticklabels([r'$n_i^{\\left(1\\right)}$', r'$n_i^{\\left(2\\right)}$'], fontsize=14)\n\nplt.savefig('Th_FigS3_A1.pdf')\nplt.show()\n\n\n#%%\nfig = plt.figure(figsize = [2.0, 1.1])\nax = fig.add_subplot(111) \n\nclS = np.zeros((3, len(epss)))\n\ncl2 = np.array(( 225/256, 74/256, 51/256))\ncl1 = np.array(( 0.5, 0.5, 0.5))\n\nnL = len(epss)-1\nfor iep, eps in enumerate(epss[:-1]):\n cl11 = cl1* (nL-iep)/nL + cl2* (iep)/nL\n cl22 = cl1* (nL-iep-1)/nL + cl2* (iep+1)/nL\n \n int_points = 20\n ps = np.linspace(eps, epss[iep+1], 10)\n nP = len(ps)-1\n plt.scatter(eps, 1.18, s=40, marker='v', color='k', edgecolor=[0.8, 0.8, 0.8], zorder=3)\n plt.scatter(epss[iep+1], 1.18, s=40, marker='v', color='k', edgecolor=[0.8, 0.8, 0.8], zorder=3)\n \n for ip , p in enumerate(ps[:-1]):\n \n cc = cl11* (nP-ip)/nP + cl22* (ip)/nP\n \n plt.fill_between([p, ps[ip+1]], [0, 0], [1, 1], color=cc)\n\nax.spines['top'].set_visible(False)\nax.spines['right'].set_visible(False)\nax.spines['left'].set_visible(False)\nax.spines['bottom'].set_visible(False)\n \nax.set_yticks([])\nax.set_xticks([1, 2, 3])\nax.set_xlabel('')\n\nax.set_xlim([0.9,3.1])\nax.set_ylim([-0.05,1.4])\nax.plot([1,3], [1,1], c='k')\nax.plot([1,1], [0,1], c='k')\nax.plot([3,3], [0,1], c='k')\nax.plot([1,3], [0.0,0.0], c='k', zorder=2)\n\nplt.tight_layout()\nplt.savefig('Th_FigS3_A2.pdf')\nplt.show()" }, { "alpha_fraction": 0.7658349275588989, "alphanum_fraction": 0.781190037727356, "avg_line_length": 85.83333587646484, "blob_id": "071db5e2c58114d6f125288b6e6b7e9957b9b8b1", "content_id": "f7d4a179091434ec5004260810893c7a5dbd0fbc", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Markdown", "length_bytes": 521, "license_type": "no_license", "max_line_length": 272, "num_lines": 6, "path": "/README.md", "repo_name": "emebeiran/low-rank2020", "src_encoding": "UTF-8", "text": "# low-rank2020\nThese are the codes for the paper \"Shaping dynamics with multiple populations in low-rank recurrent networks\".\n\nAll figures of the publication can be reproduced by running the respective script. They take only a few seconds to run.\n\nFigure 7 includes some summary statistics (panels B, C and D) that take longer to run (~1h). The file \"C_Fig7_allpanels.py\" plots those panels based on a saved file (also provided). The file \"C_Fig7_ABCD.py\", which takes longer to run, calculates those summary statistics.\n" }, { "alpha_fraction": 0.48621466755867004, "alphanum_fraction": 0.5631593465805054, "avg_line_length": 32.36854553222656, "blob_id": "7ea9a192c799ae18b24afa543ddc89a9ba5d743a", "content_id": "98e971ee575529b467cd667c5a0f44aa98eb6525", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 14218, "license_type": "no_license", "max_line_length": 148, "num_lines": 426, "path": "/C_Fig6_ABC.py", "repo_name": "emebeiran/low-rank2020", "src_encoding": "UTF-8", "text": "#!/usr/bin/env python3\n# -*- coding: utf-8 -*-\n\"\"\"\nCreated on Mon Mar 30 10:49:59 2020\n\n@author: mbeiran\n\"\"\"\n#%%\nimport numpy as np\nimport matplotlib.pyplot as plt\nimport lib_rnns as lr\nfrom matplotlib.gridspec import GridSpec\naa = lr.set_plot()\n\n\n \ngaussian_norm = (1/np.sqrt(np.pi))\ngauss_points, gauss_weights = np.polynomial.hermite.hermgauss(200)\ngauss_points = gauss_points*np.sqrt(2)\n\ndef Phi (mu, delta0):\n integrand = np.tanh(mu+np.sqrt(delta0)*gauss_points)\n return gaussian_norm * np.dot (integrand,gauss_weights)\ndef Prime (mu, delta0):\n integrand = 1 - (np.tanh(mu+np.sqrt(delta0)*gauss_points))**2\n return gaussian_norm * np.dot (integrand,gauss_weights)\n\n#%%\nN = 400\nnbins = 20\n\n\ns_mn1 = 0*np.array((0.5, 0.5, 0.5, 0.5))\ns_mn2 = 0*np.array((0.5, 0.5, 0.5, 0.5))\n\nval = 0.5#np.sqrt(1-0.5**2)\n\npops = 1\nolap = 2.\n\na_m1 = np.sqrt(1-val**2)*np.array((1., -1., 1., -1.))\na_m2 = np.sqrt(1-val**2)*np.array((1., 1., -1., -1.))\na_n1 = olap*(1/np.sqrt(1-val**2))*np.array((1., -1., 1., -1.))\na_n2 = olap*(1/np.sqrt(1-val**2))*pops*np.array((1., 1., -1., -1.))\n\n\n\nm1 = np.random.randn(N)\nn1 = np.random.randn(N)\nm2 = np.random.randn(N)\nn2 = np.random.randn(N)\n\nsels = 1000\nerr0 = 50\n\npops = len(a_m1)\nfor t in range(sels):\n V = np.random.randn(N//pops, 20)\n CC = np.dot(V, V.T)\n for po in range(pops):\n CC[po,po] = 0.\n \n err = np.std(CC)\n if err<err0:\n Vs = V\n err0 = err\n \nix = 0\n\nfor po in range(pops):\n m1[po*(N//pops):(po+1)*(N//pops)] = a_m1[po]+val*V[:, ix] \n ix += 1\n n1[po*(N//pops):(po+1)*(N//pops)] = a_n1[po]+val*V[:, ix]#+s_mn1[po]*V[:, ix]/val\n ix += 1 \n m2[po*(N//pops):(po+1)*(N//pops)] = a_m2[po]+val*V[:, ix]\n ix += 1\n n2[po*(N//pops):(po+1)*(N//pops)] = a_n2[po]+val*V[:, ix]#+s_mn2[po]*V[:, ix]/val\n ix += 1\n\n#%%\n# =============================================================================\n# Fig 2\n# =============================================================================\nms = np.linspace(-5,5,100)\nSigma = np.zeros((2,2))\nSigma[0,0] = 1.3\nSigma[1,1] = 1.8\nSigma[0,1] = -0.5\nSigma[1,0] = 0.8\n\n\nN = 1000\nS=10\nM = np.vstack((m1, m2)).T\nss2 = 0.3\n\nNNN = np.vstack((n1, n2))\n\nfig = plt.figure(figsize=[3.2, 3.2])#, dpi=600\ngs = GridSpec(5,5)\n\nax_joint00 = fig.add_subplot(gs[1:3,0:2])\nax_joint01 = fig.add_subplot(gs[1:3,2:4])\nax_joint10 = fig.add_subplot(gs[3:5,0:2])\nax_joint11 = fig.add_subplot(gs[3:5,2:4])\n\nax_marg_x0 = fig.add_subplot(gs[0,0:2])\nax_marg_x1 = fig.add_subplot(gs[0,2:4])\n\nax_marg_y0 = fig.add_subplot(gs[1:3,4])\nax_marg_y1 = fig.add_subplot(gs[3:5,4])\n\nyl = 4.\nylt = 3.\nxl = 2.5\nxlt = 2.\nax_joint00.scatter(M[:,0], NNN[0,:], s=S, alpha=0.5, label=r'$\\sigma_{mn} = 1.2$', rasterized=True)\nax_joint00.scatter(np.sqrt(1-val**2)*np.array((1., -1.)), olap*(1/np.sqrt(1-val**2))*np.array((1., -1.)),\n s=2*S, edgecolor='k', facecolor='w')\nax_joint00.set_xlim([-xl, xl])\nax_joint00.set_xticks([-xlt, 0, xlt])\nax_joint00.set_xticklabels(['','',''])\nax_joint00.set_ylim([-yl,yl])\nax_joint00.set_yticks([-ylt, 0, ylt])\nax_joint00.set_ylabel(r'$n^{\\left(1\\right)}_i$')\nax_joint00.spines['top'].set_visible(False)\nax_joint00.spines['right'].set_visible(False)\nax_joint00.yaxis.set_ticks_position('left')\nax_joint00.xaxis.set_ticks_position('bottom')\n \nax_joint01.scatter(M[:,1], NNN[0,:], s=S, alpha=0.5, label=r'$\\sigma_{mn} = 1.2$', rasterized=True)\nax_joint01.scatter(np.sqrt(1-val**2)*np.array((1., -1., 1., -1.)), olap*(1/np.sqrt(1-val**2))*np.array((1., -1., -1, 1.)),\n s=2*S, edgecolor='k', facecolor='w')\nax_joint01.spines['top'].set_visible(False)\nax_joint01.spines['right'].set_visible(False)\nax_joint01.yaxis.set_ticks_position('left')\nax_joint01.xaxis.set_ticks_position('bottom')\nax_joint01.set_ylim([-yl,yl])\nax_joint01.set_yticks([-ylt, 0, ylt])\nax_joint01.set_yticklabels(['','',''])\nax_joint01.set_xlim([-xl, xl])\nax_joint01.set_xticks([-xlt, 0, xlt])\nax_joint01.set_xticklabels(['','',''])\n\nax_joint10.scatter(M[:,0], NNN[1,:], s=S, alpha=0.5, label=r'$\\sigma_{mn} = 1.2$', rasterized=True)\nax_joint10.scatter(np.sqrt(1-val**2)*np.array((1., -1., 1., -1.)), olap*(1/np.sqrt(1-val**2))*np.array((1., -1., -1, 1.)),\n s=2*S, edgecolor='k', facecolor='w')\nax_joint10.set_xlim([-3,3])\nax_joint10.spines['top'].set_visible(False)\nax_joint10.spines['right'].set_visible(False)\nax_joint10.yaxis.set_ticks_position('left')\nax_joint10.xaxis.set_ticks_position('bottom')\nax_joint10.set_ylim([-yl,yl])\nax_joint10.set_yticks([-ylt, 0, ylt])\nax_joint10.set_xlim([-xl, xl])\nax_joint10.set_xticks([-xlt, 0, xlt])\nax_joint10.set_ylabel(r'$n^{\\left(2\\right)}_i$')\nax_joint10.set_xlabel(r'$m^{\\left(1\\right)}_i$')\n\nax_joint11.scatter(M[:,1], NNN[1,:], s=S, alpha=0.5, label=r'$\\sigma_{mn} = 1.2$', rasterized=True)\nax_joint11.scatter(np.sqrt(1-val**2)*np.array((1., -1.)), olap*(1/np.sqrt(1-val**2))*np.array((1., -1.)),\n s=2*S, edgecolor='k', facecolor='w')\nax_joint11.set_xlim([-3,3])\nax_joint11.spines['top'].set_visible(False)\nax_joint11.spines['right'].set_visible(False)\nax_joint11.set_ylim([-yl, yl])\nax_joint11.set_yticks([-ylt, 0, ylt])\nax_joint11.set_xlim([-xl, xl])\nax_joint11.set_xticks([-xlt, 0, xlt])\nax_joint11.set_yticklabels(['','',''])\nax_joint11.yaxis.set_ticks_position('left')\nax_joint11.xaxis.set_ticks_position('bottom')\nax_joint11.set_xlabel(r'$m^{\\left(2\\right)}_i$')\n\nax_marg_x0.hist(M[:,0], nbins, alpha=0.5, density=True)\nss = val\nax_marg_x0.plot(ms, (0.5/np.sqrt(2*np.pi*ss**2))*(np.exp(-(ms+np.sqrt(1-val**2))**2/(2*ss**2)) + np.exp(-(ms-np.sqrt(1-val**2))**2/(2*ss**2))), 'k')\nax_marg_x0.spines['top'].set_visible(False)\nax_marg_x0.spines['right'].set_visible(False)\nax_marg_x0.spines['left'].set_visible(False)\nax_marg_x0.yaxis.set_ticks_position('left')\nax_marg_x0.xaxis.set_ticks_position('bottom')\nax_marg_x0.set_xlim([-3,3])\nax_marg_x0.set_xticks([-2., 0, 2.])\nax_marg_x0.set_ylim([0,.8])\nax_marg_x0.set_xticklabels(['','',''])\nax_marg_x0.set_yticks([1])\n\nax_marg_x1.hist(M[:,1], nbins, alpha=0.5, density=True)\nax_marg_x1.plot(ms, (0.5/np.sqrt(2*np.pi*ss**2))*(np.exp(-(ms+np.sqrt(1-val**2))**2/(2*ss**2)) + np.exp(-(ms-np.sqrt(1-val**2))**2/(2*ss**2))), 'k')\nax_marg_x1.spines['top'].set_visible(False)\nax_marg_x1.spines['right'].set_visible(False)\nax_marg_x1.spines['left'].set_visible(False)\nax_marg_x1.yaxis.set_ticks_position('left')\nax_marg_x1.xaxis.set_ticks_position('bottom')\nax_marg_x1.set_xlim([-3,3])\nax_marg_x1.set_ylim([0,0.8])\nax_marg_x1.set_xticks([-2., 0, 2.])\nax_marg_x1.set_xticklabels(['','',''])\nax_marg_x1.set_yticks([1])\n\nax_marg_y0.hist(NNN[0,:], nbins, orientation=\"horizontal\", alpha=0.5, density=True)\nss= val\nMu = olap*(1/np.sqrt(1-val**2))\nax_marg_y0.plot((0.5/np.sqrt(2*np.pi*ss**2))*(np.exp(-(ms+Mu)**2/(2*ss**2)) + np.exp(-(ms-Mu)**2/(2*ss**2))),ms, 'k')\nax_marg_y0.spines['top'].set_visible(False)\nax_marg_y0.spines['right'].set_visible(False)\nax_marg_y0.spines['bottom'].set_visible(False)\nax_marg_y0.yaxis.set_ticks_position('left')\nax_marg_y0.xaxis.set_ticks_position('bottom')\nax_marg_y0.set_ylim([-yl,yl])\nax_marg_y0.set_xlim([0,0.8])\nax_marg_y0.set_yticks([-ylt, 0, ylt])\nax_marg_y0.set_yticklabels(['','',''])\nax_marg_y0.set_xticks([1])\nax_marg_y0.set_xticklabels([''])\n\nax_marg_y1.hist(NNN[1,:], nbins, orientation=\"horizontal\", alpha=0.5, density=True)\nax_marg_y1.plot((0.5/np.sqrt(2*np.pi*ss**2))*(np.exp(-(ms+Mu)**2/(2*ss**2)) + np.exp(-(ms-Mu)**2/(2*ss**2))),ms, 'k')\nax_marg_y1.spines['top'].set_visible(False)\nax_marg_y1.spines['right'].set_visible(False)\nax_marg_y1.spines['bottom'].set_visible(False)\nax_marg_y1.yaxis.set_ticks_position('left')\nax_marg_y1.xaxis.set_ticks_position('bottom')\nax_marg_y1.set_ylim([-yl,yl])\nax_marg_y1.set_xlim([0,0.8])\nax_marg_y1.set_yticks([-ylt, 0, ylt])\nax_marg_y1.set_yticklabels(['','',''])\nax_marg_y1.set_xticks([1])\nax_marg_y1.set_xticklabels([''])\n\nplt.savefig('Th_Fig5_1_A.pdf')\n\n#%%\nkaps1 = np.linspace(-2.5,2.5, 150)\nkaps2 = np.linspace(-2.5,2.5, 140)\nksol = np.zeros((len(kaps1), len(kaps2), 2))\n\nK1s, K2s = np.meshgrid(kaps1, kaps2)\ndef transf(K):\n return(K*Prime(0, np.dot(K.T, K)))\n\n\nE = np.zeros((len(kaps1), len(kaps2)))\nfor ik1 ,k1 in enumerate(kaps1):\n for ik2, k2 in enumerate(kaps2):\n K = np.array((k1, k2))\n ksol[ik1, ik2, :] = - K\n for ip in range(pops):\n ksol[ik1, ik2, 0] += (1/pops)*a_n1[ip]*Phi(a_m1[ip]*k1+a_m2[ip]*k2, val**2*k1**2+val**2*k2**2)\n ksol[ik1, ik2, 1] += (1/pops)*a_n2[ip]*Phi(a_m1[ip]*k1+a_m2[ip]*k2, val**2*k1**2+val**2*k2**2)\n \n E[ik1, ik2] = np.sqrt(np.sum(ksol[ik1,ik2,:]**2))\n \nsearch_kap1 = np.linspace(0.2, 2.5, 300)\nE_1 = np.zeros_like(search_kap1)\nv = np.array((1,0))\nfor ik1 ,k1 in enumerate(search_kap1):\n K = v*k1\n kSS = - K[0]\n for ip in range(pops):\n kSS += (1/pops)*a_n1[ip]*Phi(a_m1[ip]*K[0], val**2*K[0]**2)\n \n E_1[ik1] = np.abs(kSS)\nfp1 = search_kap1[np.argmin(E_1)]\n\nsearch_kap1 = np.linspace(0.2, 3., 500)\nE_1 = np.zeros_like(search_kap1)\nv = np.array((0,1))\nE11 = np.zeros_like(E_1)\nE12 = np.zeros_like(E_1)\n\nfor ik1 ,k1 in enumerate(search_kap1):\n K = v*k1\n kSS = - K\n for ip in range(pops):\n kSS[0] += (1/pops)*a_n1[ip]*Phi(a_m1[ip]*K[0]+a_m2[ip]*K[1], val**2*K[0]**2+val**2*K[1]**2)\n kSS[1] += (1/pops)*a_n2[ip]*Phi(a_m1[ip]*K[0]+a_m2[ip]*K[1], val**2*K[0]**2+val**2*K[1]**2)\n E11[ik1] =kSS[0]\n E12[ik1] =kSS[1]\n \n E_1[ik1] = np.sum(kSS**2)\nfp11 = search_kap1[np.argmin(E_1)]\n\nsearch_kap2 = np.linspace(0.2, 2., 300)\ndkap = search_kap2[1]-search_kap2[0]\nE_2 = np.zeros_like(search_kap2)\nAn = np.vstack((a_n1, a_n2))\nAm = np.vstack((a_m1, a_m2))\n\nfor ik2 ,k2 in enumerate(search_kap2):\n W = np.array((1, 1))/np.sqrt(2)\n K = W*k2\n kSS = - K\n for ip in range(pops):\n kSS[0] += (1/pops)*a_n1[ip]*Phi(a_m1[ip]*K[0]+a_m2[ip]*K[0], val**2*K[0]**2+val**2*K[1]**2)\n kSS[1] += (1/pops)*a_n2[ip]*Phi(a_m1[ip]*K[0]+a_m2[ip]*K[0], val**2*K[0]**2+val**2*K[1]**2)\n \n E_2[ik2] = np.abs(np.sum(kSS**2))\nfp2 = search_kap2[np.argmin(E_2)]+dkap\n\nfig = plt.figure()\nax = fig.add_subplot(111) \nim = plt.pcolor(kaps1, kaps2, np.log10(E).T, cmap ='viridis', vmin = -2.,vmax=0, shading='auto')\n\n\nstrm = ax.streamplot(kaps1, kaps2, ksol[:,:,0].T, ksol[:,:,1].T, color='w', linewidth=1, cmap='autumn', density=0.6)\n\nplt.xlabel('$\\kappa_1$')\nplt.ylabel('$\\kappa_2$')\nplt.scatter([ 0, fp2/np.sqrt(2), fp2/np.sqrt(2), -fp2/np.sqrt(2), -fp2/np.sqrt(2)], \\\n [0, fp2/np.sqrt(2), -fp2/np.sqrt(2), fp2/np.sqrt(2), -fp2/np.sqrt(2)], s=50, edgecolor='k', facecolor='w', linewidth=1., zorder=4)\nplt.scatter([ 0, 0, fp1, -fp1], [fp11, -fp11, 0, 0], s=70, edgecolor='w', facecolor='k', linewidth=1., zorder=4)\n\nplt.plot([ fp1,0, -fp1,0, fp1], [0, fp11, 0, -fp11,0], c='k', lw=0.7)\n\nth = np.linspace(0, 2*np.pi)\n\nax.set_xticks([-2, 0, 2])\nax.set_yticks([-2, 0, 2])\nax.set_ylim([np.min(kaps2), np.max(kaps2)])\nax.set_xlim([np.min(kaps1), np.max(kaps1)])\nplt.savefig('Th_Fig5_1_C1.pdf')\n\n#%%\nfig = plt.figure()\nax = fig.add_subplot(111) \n\nplt.xlabel('$\\kappa_1$')\nplt.ylabel('$\\kappa_2$')\nplt.scatter([ 0, fp2/np.sqrt(2), fp2/np.sqrt(2), -fp2/np.sqrt(2), -fp2/np.sqrt(2)], \\\n [0, fp2/np.sqrt(2), -fp2/np.sqrt(2), fp2/np.sqrt(2), -fp2/np.sqrt(2)], s=50, edgecolor='k', facecolor='w', linewidth=1., zorder=4)\nplt.scatter([ 0, 0, fp1, -fp1], [fp11, -fp11, 0, 0], s=70, edgecolor='w', facecolor='k', linewidth=1., zorder=5)\nplt.plot([ fp1,0, -fp1,0, fp1], [0, fp11, 0, -fp11,0], c='k', lw=0.7)\n\n\nNn = 1000\n\nMu= np.zeros((4,1))\n\ninkap1 = np.linspace(-2.2, 2.2, 5)\ninkap2 = np.linspace(-2.2, 2.2, 6)\n\ndt = 0.1\ntime = np.arange(0, 120, dt)\n\nfor trials in range(2):\n s_mn1 = 0*np.array((0.5, 0.5, 0.5, 0.5))\n s_mn2 = 0*np.array((0.5, 0.5, 0.5, 0.5))\n \n val = 0.5#np.sqrt(1-0.5**2)\n \n pops = 1\n olap = 2.\n \n a_m1 = np.sqrt(1-val**2)*np.array((1., -1., 1., -1.))\n a_m2 = np.sqrt(1-val**2)*np.array((1., 1., -1., -1.))\n a_n1 = olap*(1/np.sqrt(1-val**2))*np.array((1., -1., 1., -1.))\n a_n2 = olap*(1/np.sqrt(1-val**2))*pops*np.array((1., 1., -1., -1.))\n \n m1 = np.random.randn(Nn)\n n1 = np.random.randn(Nn)\n m2 = np.random.randn(Nn)\n n2 = np.random.randn(Nn)\n \n sels = 1000\n err0 = 50\n \n pops = len(a_m1)\n for t in range(sels):\n V = np.random.randn(Nn//pops, 20)\n CC = np.dot(V, V.T)\n for po in range(pops):\n CC[po,po] = 0.\n \n err = np.std(CC)\n if err<err0:\n Vs = V\n err0 = err\n \n ix = 0\n \n for po in range(pops):\n m1[po*(Nn//pops):(po+1)*(Nn//pops)] = a_m1[po]+val*V[:, ix] \n ix += 1\n n1[po*(Nn//pops):(po+1)*(Nn//pops)] = a_n1[po]+val*V[:, ix]#+s_mn1[po]*V[:, ix]/val\n ix += 1 \n m2[po*(Nn//pops):(po+1)*(Nn//pops)] = a_m2[po]+val*V[:, ix]\n ix += 1\n n2[po*(Nn//pops):(po+1)*(Nn//pops)] = a_n2[po]+val*V[:, ix]#+s_mn2[po]*V[:, ix]/val\n ix += 1\n M = np.vstack((m1, m2)).T\n N = np.vstack((n1, n2))\n\n J = np.dot(M, N)/Nn\n \n cC = np.ones(3)*0.6\n \n for ik1, ink1 in enumerate(inkap1):\n for ik2, ink2 in enumerate(inkap2):\n sk1 = np.zeros_like(time)\n sk2 = np.zeros_like(time)\n \n x0 = ink1*M[:,0] + ink2*M[:,1]\n sk1[0] = np.mean(M[:,0]*x0)\n sk2[0] = np.mean(M[:,1]*x0)\n \n for it, ti in enumerate(time[:-1]):\n x = x0 + dt*(-x0 + np.dot(J, np.tanh(x0)))\n sk1[it+1] = np.mean(M[:,0]*x)\n sk2[it+1] = np.mean(M[:,1]*x)\n x0 = x\n plt.plot(sk1, sk2, c=cC)\n plt.scatter(sk1[0], sk2[0], s=10, facecolor=cC)\n plt.scatter(sk1[-1], sk2[-1], s=25, facecolor=cC, edgecolor='k', zorder=3)\n \nax.set_xticks([-2, 0, 2])\nax.set_yticks([-2, 0, 2])\nax.set_ylim([1.1*np.min(kaps2), 1.1*np.max(kaps2)])\nax.set_xlim([1.1*np.min(kaps1), 1.1*np.max(kaps1)])\n \nax.spines['top'].set_visible(False)\nax.spines['right'].set_visible(False)\nax.yaxis.set_ticks_position('left')\nax.xaxis.set_ticks_position('bottom') \nplt.savefig('Th_Fig5_1_D.pdf') " } ]
13
excursiones/payments
https://github.com/excursiones/payments
bbfd97255273967f90c0f065e71e1af2a10460b6
ce4215e599fb3240c40e0133899abbf78d09ea3a
daa6c31f5177f6622876ae59e67bc61a658e656e
refs/heads/master
2022-05-18T10:47:49.394075
2019-05-26T17:42:28
2019-05-26T17:42:28
188,713,869
0
0
null
2019-05-26T17:44:10
2019-05-26T17:44:45
2022-04-22T21:19:47
Python
[ { "alpha_fraction": 0.5563739538192749, "alphanum_fraction": 0.5699716806411743, "avg_line_length": 37.369564056396484, "blob_id": "841892913b4807289ee84150833c691f898d6b41", "content_id": "53257f8a8b2d520b773eadedea22082352edbb13", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 1765, "license_type": "no_license", "max_line_length": 127, "num_lines": 46, "path": "/payments_ms/migrations/0001_initial.py", "repo_name": "excursiones/payments", "src_encoding": "UTF-8", "text": "# Generated by Django 2.2 on 2019-05-20 20:56\n\nimport django.core.validators\nfrom django.db import migrations, models\nimport django.db.models.deletion\n\n\nclass Migration(migrations.Migration):\n\n initial = True\n\n dependencies = [\n ]\n\n operations = [\n migrations.CreateModel(\n name='CompanyPayment',\n fields=[\n ('id', models.AutoField(primary_key=True, serialize=False)),\n ('company_id', models.IntegerField()),\n ('price', models.FloatField(validators=[django.core.validators.MinValueValidator(1)])),\n ('date', models.DateTimeField(auto_now_add=True)),\n ('origin_account', models.CharField(max_length=30)),\n ('destination_account', models.CharField(max_length=30)),\n ],\n ),\n migrations.CreateModel(\n name='UserPayment',\n fields=[\n ('id', models.AutoField(primary_key=True, serialize=False)),\n ('user_id', models.IntegerField()),\n ('price', models.FloatField(validators=[django.core.validators.MinValueValidator(1)])),\n ('date', models.DateTimeField(auto_now_add=True)),\n ('origin_account', models.CharField(max_length=30)),\n ('destination_account', models.CharField(max_length=30)),\n ],\n ),\n migrations.CreateModel(\n name='PaymentRefund',\n fields=[\n ('id', models.AutoField(primary_key=True, serialize=False)),\n ('date', models.DateTimeField(auto_now_add=True)),\n ('user_payment', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to='payments_ms.UserPayment')),\n ],\n ),\n ]\n" }, { "alpha_fraction": 0.6881463527679443, "alphanum_fraction": 0.6961018443107605, "avg_line_length": 37.121212005615234, "blob_id": "d7acd92c5d1d4725f92b48816bd8b4a2d8201eb5", "content_id": "4f5129b80d05c6e11ba32a33bc232c8a3e57cb22", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 1257, "license_type": "no_license", "max_line_length": 75, "num_lines": 33, "path": "/payments_ms/models.py", "repo_name": "excursiones/payments", "src_encoding": "UTF-8", "text": "from djongo import models\nfrom django.core.validators import MinValueValidator\n# Create your models here.\n \nclass UserPayment(models.Model):\n id = models.AutoField(primary_key = True)\n user_id = models.IntegerField()\n price = models.FloatField(validators=[MinValueValidator(1)])\n date = models.DateTimeField(auto_now_add=True)\n origin_account = models.CharField(max_length = 30)\n destination_account = models.CharField(max_length = 30)\n\n def __str__(self):\n return '{} - ${}'.format(self.user_id, self.price)\n\nclass PaymentRefund(models.Model):\n id = models.AutoField(primary_key = True)\n user_payment = models.ForeignKey(UserPayment, on_delete=models.CASCADE)\n date = models.DateTimeField(auto_now_add=True)\n\n def __str__(self):\n return '{}'.format(self.user_payment)\n\nclass CompanyPayment(models.Model):\n id = models.AutoField(primary_key = True)\n company_id = models.IntegerField()\n price = models.FloatField(validators=[MinValueValidator(1)])\n date = models.DateTimeField(auto_now_add=True)\n origin_account = models.CharField(max_length = 30)\n destination_account = models.CharField(max_length = 30)\n\n def __str__(self):\n return '{} - ${}'.format(self.company_id, self.price)" }, { "alpha_fraction": 0.6669184565544128, "alphanum_fraction": 0.6737160086631775, "avg_line_length": 37.764705657958984, "blob_id": "948e534450520b4d4104b7e8e657c47853350778", "content_id": "0f28091c36daea1c30275e34e4a336f4e024fbcc", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 1324, "license_type": "no_license", "max_line_length": 85, "num_lines": 34, "path": "/payments_ms/views/user_payment_views.py", "repo_name": "excursiones/payments", "src_encoding": "UTF-8", "text": "from rest_framework.response import Response\nfrom rest_framework.views import APIView\nfrom django.shortcuts import get_object_or_404\n\nfrom ..models import UserPayment\nfrom ..serializers import UserPaymentSerializer\n\nclass UserPaymentView(APIView):\n def get(self, request):\n payments = UserPayment.objects.all()\n payments = list(payments.values())\n return Response({\"Users payments\": payments})\n \n def post(self, request):\n payment = request.data.get('user_payment')\n\n serializer = UserPaymentSerializer(data=payment)\n if serializer.is_valid(raise_exception=True):\n payment_saved = serializer.save()\n return Response({\"User payment creation\": \"success\"})\n\n def put(self, request, pk):\n payment = get_object_or_404(UserPayment.objects.all(), pk=pk)\n data = request.data.get('user_payment')\n \n serializer = UserPaymentSerializer(instance=payment, data=data, partial=True)\n if serializer.is_valid(raise_exception=True):\n payment_saved = serializer.save()\n return Response({\"User payment updated\": \"success\"})\n\n def delete(self, request, pk):\n payment = get_object_or_404(UserPayment.objects.all(), pk=pk)\n payment.delete()\n return Response({\"User payment deleted\": \"success\"})\n \n\n" }, { "alpha_fraction": 0.677395761013031, "alphanum_fraction": 0.6839795112609863, "avg_line_length": 39.02941131591797, "blob_id": "e84bb4d0f4c5b030c5282eb280e486df8a18ce5f", "content_id": "68a92555b87aeac3ad454cbe2bc9db4b19ff333c", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 1367, "license_type": "no_license", "max_line_length": 88, "num_lines": 34, "path": "/payments_ms/views/company_payment_views.py", "repo_name": "excursiones/payments", "src_encoding": "UTF-8", "text": "from rest_framework.response import Response\nfrom rest_framework.views import APIView\nfrom django.shortcuts import get_object_or_404\n\nfrom ..models import CompanyPayment\nfrom ..serializers import CompanyPaymentSerializer\n\nclass CompanyPaymentView(APIView):\n def get(self, request):\n payments = CompanyPayment.objects.all()\n payments = list(payments.values())\n return Response({\"Companies payments\": payments})\n \n def post(self, request):\n payment = request.data.get('company_payment')\n\n serializer = CompanyPaymentSerializer(data=payment)\n if serializer.is_valid(raise_exception=True):\n payment_saved = serializer.save()\n return Response({\"Company payment creation\": \"success\"})\n\n def put(self, request, pk):\n payment = get_object_or_404(CompanyPayment.objects.all(), pk=pk)\n data = request.data.get('company_payment')\n \n serializer = CompanyPaymentSerializer(instance=payment, data=data, partial=True)\n if serializer.is_valid(raise_exception=True):\n payment_saved = serializer.save()\n return Response({\"Company payment updated\": \"success\"})\n\n def delete(self, request, pk):\n payment = get_object_or_404(CompanyPayment.objects.all(), pk=pk)\n payment.delete()\n return Response({\"Company payment deleted\": \"success\"})\n \n\n" }, { "alpha_fraction": 0.5043227672576904, "alphanum_fraction": 0.5074927806854248, "avg_line_length": 41.32926940917969, "blob_id": "be6bab7a230f858e3aec6aa1c2671ffae4cc675d", "content_id": "77f5cdc5c00bc0a13523f2c3dbf08c391740a415", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 3470, "license_type": "no_license", "max_line_length": 88, "num_lines": 82, "path": "/payments_ms/serializers.py", "repo_name": "excursiones/payments", "src_encoding": "UTF-8", "text": "from rest_framework import serializers\nfrom rest_framework.validators import UniqueValidator\nfrom django.shortcuts import get_object_or_404\nfrom .models import *\n\nclass UserPaymentSerializer(serializers.Serializer):\n user_id = serializers.IntegerField()\n price = serializers.FloatField()\n origin_account = serializers.CharField(max_length = 30)\n destination_account = serializers.CharField(max_length = 30)\n\n def create(self, validated_data):\n return UserPayment.objects.create(**validated_data)\n\n def update(self, instance, validated_data):\n instance.user_id = validated_data.get('user_id', instance.user_id)\n instance.price = validated_data.get('price', instance.price)\n instance.origin_account = validated_data.get(\n 'origin_account',\n instance.origin_account\n )\n instance.destination_account = validated_data.get(\n 'destination_account',\n instance.destination_account\n )\n\n instance.save()\n return instance\n\n class Meta:\n model = UserPayment\n fields = ('__all__')\n\nclass PaymentRefundSerializer(serializers.Serializer):\n user_payment_id = serializers.IntegerField(validators=[\n UniqueValidator(\n queryset=PaymentRefund.objects.all()\n )\n ])\n\n def create(self, validated_data):\n return PaymentRefund.objects.create(**validated_data)\n\n def update(self, instance, validated_data):\n instance.user_payment_id = validated_data.get(\n 'user_payment_id',\n instance.user_payment_id\n )\n instance.save()\n return instance\n\n class Meta:\n model = PaymentRefund\n fields = ('__all__')\n\nclass CompanyPaymentSerializer(serializers.Serializer):\n company_id = serializers.IntegerField()\n price = serializers.FloatField()\n origin_account = serializers.CharField(max_length = 30)\n destination_account = serializers.CharField(max_length = 30)\n\n def create(self, validated_data):\n return CompanyPayment.objects.create(**validated_data)\n\n def update(self, instance, validated_data):\n instance.company_id = validated_data.get('company_id', instance.company_id)\n instance.price = validated_data.get('price', instance.price)\n instance.origin_account = validated_data.get(\n 'origin_account',\n instance.origin_account\n )\n instance.destination_account = validated_data.get(\n 'destination_account',\n instance.destination_account\n )\n\n instance.save()\n return instance\n\n class Meta:\n model = CompanyPayment\n fields = ('__all__')" }, { "alpha_fraction": 0.6940726637840271, "alphanum_fraction": 0.6940726637840271, "avg_line_length": 31.75, "blob_id": "fe2a70b038a9a45ca3b34bad2b68296e7ff5e0d7", "content_id": "a31ac6707832dfb7f8269c7569ab71c917f732c6", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 523, "license_type": "no_license", "max_line_length": 68, "num_lines": 16, "path": "/payments_ms/urls.py", "repo_name": "excursiones/payments", "src_encoding": "UTF-8", "text": "from django.urls import path\n\nfrom .views import *\n\n\napp_name = \"payments_ms\"\n\n# app_name will help us do a reverse look-up latter.\nurlpatterns = [\n path('user_payments/', UserPaymentView.as_view()),\n path('user_payments/<int:pk>', UserPaymentView.as_view()),\n path('company_payments/', CompanyPaymentView.as_view()),\n path('company_payments/<int:pk>', CompanyPaymentView.as_view()),\n path('payment_refunds/', PaymentRefundView.as_view()),\n path('payment_refunds/<int:pk>', PaymentRefundView.as_view()),\n]" }, { "alpha_fraction": 0.6087470650672913, "alphanum_fraction": 0.6176123023033142, "avg_line_length": 41.32500076293945, "blob_id": "2a85ea2fa169f2a1c7ae8d1b01cab544de9de420", "content_id": "b8f10f2063d121ab0498d52cc669548500923b7d", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 1692, "license_type": "no_license", "max_line_length": 86, "num_lines": 40, "path": "/payments_ms/views/payment_refund_views.py", "repo_name": "excursiones/payments", "src_encoding": "UTF-8", "text": "from rest_framework.response import Response\nfrom rest_framework.views import APIView\nfrom django.shortcuts import get_object_or_404\n\nfrom ..models import PaymentRefund, UserPayment\nfrom ..serializers import PaymentRefundSerializer\n\nclass PaymentRefundView(APIView):\n def get(self, request):\n refunds = PaymentRefund.objects.all()\n refunds = list(refunds.values())\n return Response({\"Payment refunds\": refunds})\n \n def post(self, request):\n data = request.data.get('payment_refund')\n user_payment = get_object_or_404(UserPayment.objects.all(),\n id=data['user_payment_id']\n )\n\n serializer = PaymentRefundSerializer(data=data)\n if serializer.is_valid(raise_exception=True):\n refund_saved = serializer.save()\n return Response({\"Payment refund creation\": \"success\"})\n\n def put(self, request, pk):\n refund = get_object_or_404(PaymentRefund.objects.all(), pk=pk)\n data = request.data.get('payment_refund')\n user_payment = get_object_or_404(UserPayment.objects.all(),\n id=data['user_payment_id']\n )\n\n serializer = PaymentRefundSerializer(instance=refund, data=data, partial=True)\n if serializer.is_valid(raise_exception=True):\n payment_saved = serializer.save()\n return Response({\"Payment refund updated\": \"success\"})\n\n def delete(self, request, pk):\n refund = get_object_or_404(PaymentRefund.objects.all(), pk=pk)\n refund.delete()\n return Response({\"Payment refund deleted\": \"success\"})" }, { "alpha_fraction": 0.7757009267807007, "alphanum_fraction": 0.7757009267807007, "avg_line_length": 34.66666793823242, "blob_id": "344f8f2dbe6d02d929f3159d93dcfaf2192d1993", "content_id": "46c1bbb080b6c2e07c41c40afdb56033ba122d0f", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 107, "license_type": "no_license", "max_line_length": 36, "num_lines": 3, "path": "/payments_ms/views/__init__.py", "repo_name": "excursiones/payments", "src_encoding": "UTF-8", "text": "from .user_payment_views import *\nfrom .company_payment_views import *\nfrom .payment_refund_views import *\n" }, { "alpha_fraction": 0.7350427508354187, "alphanum_fraction": 0.7606837749481201, "avg_line_length": 13.75, "blob_id": "6f19ab9721e91e0bb401fabc041acb09dd9ca3f4", "content_id": "b5f7239b3b3b8843c55d0683e625aa214e63e7d6", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Dockerfile", "length_bytes": 117, "license_type": "no_license", "max_line_length": 35, "num_lines": 8, "path": "/Dockerfile", "repo_name": "excursiones/payments", "src_encoding": "UTF-8", "text": "FROM python:3.6\n\nENV PYTHONUNBUFFERED 1\n\nRUN mkdir /app\nWORKDIR /app\nCOPY . /app/\nRUN pip install -r requirements.txt" } ]
9