content
stringlengths
0
1.55M
"""Place HLA calls into group for validation and presentation. Uses p-groups with identical protein sequences in the antigen binding domains: http://hla.alleles.org/alleles/p_groups.html HLA allele nomenclature: https://www.ebi.ac.uk/ipd/imgt/hla/ https://github.com/jrob119/IMGTHLA HLA sequences are from the 1000 genomes build 38 reference: ftp://ftp.1000genomes.ebi.ac.uk/vol1/ftp/technical/reference/GRCh38_reference_genome/ based on IMGT/HLA-3.18.0 """<import_stmt>toolz<as>tz<def_stmt>hla_protein name data<block_start>group=tz.get_in([data["genome_build"] name] pgroups)<if_stmt>group<block_start>base=name.split("*")[0]<line_sep>group="%s*%s"%(base group)<block_end><else_stmt><block_start>group=_from_name(name)<block_end><return>group<block_end><def_stmt>_from_name name<block_start>"""Parse the HLA base name (group + protein) from a full name. Separates out synonymous and non-coding indicators. http://hla.alleles.org/nomenclature/naming.html """<line_sep><return>":".join(name.split(":")[:2])<block_end>pgroups={}<line_sep>pgroups["hg38"]={'HLA-A*01:01:01:01':'01:01P' 'HLA-A*01:01:01:02N':'' 'HLA-A*01:01:38L':'01:01P' 'HLA-A*01:02':'' 'HLA-A*01:03':'' 'HLA-A*01:04N':'' 'HLA-A*01:09':'' 'HLA-A*01:11N':'' 'HLA-A*01:14':'' 'HLA-A*01:16N':'' 'HLA-A*01:20':'' 'HLA-A*02:01:01:01':'02:01P' 'HLA-A*02:01:01:02L':'02:01P' 'HLA-A*02:01:01:03':'02:01P' 'HLA-A*02:01:01:04':'02:01P' 'HLA-A*02:02:01':'' 'HLA-A*02:03:01':'02:03P' 'HLA-A*02:03:03':'02:03P' 'HLA-A*02:05:01':'02:05P' 'HLA-A*02:06:01':'' 'HLA-A*02:07:01':'02:07P' 'HLA-A*02:10':'02:10P' 'HLA-A*02:251':'' 'HLA-A*02:259':'' 'HLA-A*02:264':'02:03P' 'HLA-A*02:265':'02:07P' 'HLA-A*02:266':'02:01P' 'HLA-A*02:269':'' 'HLA-A*02:279':'' 'HLA-A*02:32N':'' 'HLA-A*02:376':'' 'HLA-A*02:43N':'' 'HLA-A*02:455':'02:01P' 'HLA-A*02:48':'' 'HLA-A*02:51':'' 'HLA-A*02:533':'' 'HLA-A*02:53N':'' 'HLA-A*02:57':'' 'HLA-A*02:60:01':'02:60P' 'HLA-A*02:65':'02:65P' 'HLA-A*02:68':'' 'HLA-A*02:77':'' 'HLA-A*02:81':'02:81P' 'HLA-A*02:89':'02:01P' 'HLA-A*02:95':'' 'HLA-A*03:01:01:01':'03:01P' 'HLA-A*03:01:01:02N':'' 'HLA-A*03:01:01:03':'03:01P' 'HLA-A*03:02:01':'03:02P' 'HLA-A*03:11N':'' 'HLA-A*03:21N':'' 'HLA-A*03:36N':'' 'HLA-A*11:01:01':'' 'HLA-A*11:01:18':'11:01P' 'HLA-A*11:02:01':'11:02P' 'HLA-A*11:05':'' 'HLA-A*11:110':'11:02P' 'HLA-A*11:25':'' 'HLA-A*11:50Q':'' 'HLA-A*11:60':'' 'HLA-A*11:69N':'' 'HLA-A*11:74':'' 'HLA-A*11:75':'' 'HLA-A*11:77':'11:02P' 'HLA-A*23:01:01':'23:01P' 'HLA-A*23:09':'' 'HLA-A*23:38N':'' 'HLA-A*24:02:01:01':'24:02P' 'HLA-A*24:02:01:02L':'24:02P' 'HLA-A*24:02:01:03':'24:02P' 'HLA-A*24:02:03Q':'24:02P' 'HLA-A*24:02:10':'24:02P' 'HLA-A*24:03:01':'24:03P' 'HLA-A*24:07:01':'24:07P' 'HLA-A*24:08':'' 'HLA-A*24:09N':'' 'HLA-A*24:10:01':'24:10P' 'HLA-A*24:11N':'' 'HLA-A*24:152':'' 'HLA-A*24:20':'' 'HLA-A*24:215':'' 'HLA-A*24:61':'' 'HLA-A*24:86N':'' 'HLA-A*25:01:01':'25:01P' 'HLA-A*26:01:01':'' 'HLA-A*26:11N':'' 'HLA-A*26:15':'' 'HLA-A*26:50':'' 'HLA-A*29:01:01:01':'29:01P' 'HLA-A*29:01:01:02N':'' 'HLA-A*29:02:01:01':'29:02P' 'HLA-A*29:02:01:02':'29:02P' 'HLA-A*29:46':'29:02P' 'HLA-A*30:01:01':'30:01P' 'HLA-A*30:02:01:01':'30:02P' 'HLA-A*30:02:01:02':'30:02P' 'HLA-A*30:04:01':'30:04P' 'HLA-A*30:89':'' 'HLA-A*31:01:02':'' 'HLA-A*31:01:23':'31:01P' 'HLA-A*31:04':'' 'HLA-A*31:14N':'' 'HLA-A*31:46':'31:01P' 'HLA-A*32:01:01':'32:01P' 'HLA-A*32:06':'' 'HLA-A*33:01:01':'33:01P' 'HLA-A*33:03:01':'33:03P' 'HLA-A*33:07':'' 'HLA-A*34:01:01':'34:01P' 'HLA-A*34:02:01':'34:02P' 'HLA-A*36:01':'' 'HLA-A*43:01':'' 'HLA-A*66:01:01':'66:01P' 'HLA-A*66:17':'66:01P' 'HLA-A*68:01:01:01':'68:01P' 'HLA-A*68:01:01:02':'68:01P' 'HLA-A*68:01:02:01':'68:01P' 'HLA-A*68:01:02:02':'68:01P' 'HLA-A*68:02:01:01':'68:02P' 'HLA-A*68:02:01:02':'68:02P' 'HLA-A*68:02:01:03':'68:02P' 'HLA-A*68:02:02':'68:02P' 'HLA-A*68:03:01':'68:03P' 'HLA-A*68:08:01':'68:08P' 'HLA-A*68:113':'' 'HLA-A*68:17':'' 'HLA-A*68:18N':'' 'HLA-A*68:22':'' 'HLA-A*68:71':'' 'HLA-A*69:01':'' 'HLA-A*74:01':'' 'HLA-A*74:02:01:01':'74:01P' 'HLA-A*74:02:01:02':'74:01P' 'HLA-A*80:01:01:01':'80:01P' 'HLA-A*80:01:01:02':'80:01P' 'HLA-B*07:02:01':'07:02P' 'HLA-B*07:05:01':'07:05P' 'HLA-B*07:06':'07:05P' 'HLA-B*07:156':'07:02P' 'HLA-B*07:33:01':'07:33P' 'HLA-B*07:41':'' 'HLA-B*07:44':'07:02P' 'HLA-B*07:50':'' 'HLA-B*08:01:01':'08:01P' 'HLA-B*08:08N':'' 'HLA-B*08:132':'' 'HLA-B*08:134':'' 'HLA-B*08:19N':'' 'HLA-B*08:20':'' 'HLA-B*08:33':'' 'HLA-B*08:79':'' 'HLA-B*13:01:01':'13:01P' 'HLA-B*13:02:01':'13:02P' 'HLA-B*13:02:03':'13:02P' 'HLA-B*13:02:09':'13:02P' 'HLA-B*13:08':'' 'HLA-B*13:15':'' 'HLA-B*13:25':'' 'HLA-B*14:01:01':'14:01P' 'HLA-B*14:02:01':'' 'HLA-B*14:07N':'' 'HLA-B*15:01:01:01':'15:01P' 'HLA-B*15:01:01:02N':'' 'HLA-B*15:01:01:03':'15:01P' 'HLA-B*15:02:01':'15:02P' 'HLA-B*15:03:01':'' 'HLA-B*15:04:01':'15:04P' 'HLA-B*15:07:01':'15:07P' 'HLA-B*15:108':'' 'HLA-B*15:10:01':'15:10P' 'HLA-B*15:11:01':'15:11P' 'HLA-B*15:13:01':'15:13P' 'HLA-B*15:16:01':'15:16P' 'HLA-B*15:17:01:01':'15:17P' 'HLA-B*15:17:01:02':'15:17P' 'HLA-B*15:18:01':'15:18P' 'HLA-B*15:220':'15:03P' 'HLA-B*15:25:01':'15:25P' 'HLA-B*15:27:01':'15:27P' 'HLA-B*15:32:01':'15:32P' 'HLA-B*15:42':'' 'HLA-B*15:58':'' 'HLA-B*15:66':'' 'HLA-B*15:77':'' 'HLA-B*15:83':'' 'HLA-B*18:01:01:01':'18:01P' 'HLA-B*18:01:01:02':'18:01P' 'HLA-B*18:02':'' 'HLA-B*18:03':'' 'HLA-B*18:17N':'' 'HLA-B*18:26':'' 'HLA-B*18:94N':'' 'HLA-B*27:04:01':'27:04P' 'HLA-B*27:05:02':'27:05P' 'HLA-B*27:05:18':'27:05P' 'HLA-B*27:06':'' 'HLA-B*27:07:01':'27:07P' 'HLA-B*27:131':'' 'HLA-B*27:24':'' 'HLA-B*27:25':'' 'HLA-B*27:32':'' 'HLA-B*35:01:01:01':'35:01P' 'HLA-B*35:01:01:02':'35:01P' 'HLA-B*35:01:22':'35:01P' 'HLA-B*35:02:01':'35:02P' 'HLA-B*35:03:01':'35:03P' 'HLA-B*35:05:01':'35:05P' 'HLA-B*35:08:01':'35:08P' 'HLA-B*35:14:02':'35:14P' 'HLA-B*35:241':'35:01P' 'HLA-B*35:41':'' 'HLA-B*37:01:01':'37:01P' 'HLA-B*37:01:05':'37:01P' 'HLA-B*38:01:01':'38:01P' 'HLA-B*38:02:01':'38:02P' 'HLA-B*38:14':'' 'HLA-B*39:01:01:01':'39:01P' 'HLA-B*39:01:01:02L':'39:01P' 'HLA-B*39:01:01:03':'39:01P' 'HLA-B*39:01:03':'39:01P' 'HLA-B*39:01:16':'39:01P' 'HLA-B*39:01:21':'39:01P' 'HLA-B*39:05:01':'39:05P' 'HLA-B*39:06:02':'39:06P' 'HLA-B*39:10:01':'39:10P' 'HLA-B*39:13:02':'39:13P' 'HLA-B*39:14':'' 'HLA-B*39:34':'' 'HLA-B*39:38Q':'' 'HLA-B*40:01:01':'40:01P' 'HLA-B*40:01:02':'40:01P' 'HLA-B*40:02:01':'40:02P' 'HLA-B*40:03':'40:03P' 'HLA-B*40:06:01:01':'40:06P' 'HLA-B*40:06:01:02':'40:06P' 'HLA-B*40:10:01':'' 'HLA-B*40:150':'40:01P' 'HLA-B*40:40':'40:40P' 'HLA-B*40:72:01':'40:72P' 'HLA-B*40:79':'' 'HLA-B*41:01:01':'41:01P' 'HLA-B*41:02:01':'41:02P' 'HLA-B*42:01:01':'42:01P' 'HLA-B*42:02':'' 'HLA-B*42:08':'' 'HLA-B*44:02:01:01':'44:02P' 'HLA-B*44:02:01:02S':'44:02P' 'HLA-B*44:02:01:03':'44:02P' 'HLA-B*44:02:17':'44:02P' 'HLA-B*44:02:27':'44:02P' 'HLA-B*44:03:01':'' 'HLA-B*44:03:02':'44:03P' 'HLA-B*44:04':'' 'HLA-B*44:09':'' 'HLA-B*44:138Q':'' 'HLA-B*44:150':'' 'HLA-B*44:23N':'' 'HLA-B*44:26':'' 'HLA-B*44:46':'' 'HLA-B*44:49':'' 'HLA-B*44:56N':'' 'HLA-B*45:01:01':'45:01P' 'HLA-B*45:04':'' 'HLA-B*46:01:01':'46:01P' 'HLA-B*46:01:05':'46:01P' 'HLA-B*47:01:01:01':'47:01P' 'HLA-B*47:01:01:02':'47:01P' 'HLA-B*48:01:01':'48:01P' 'HLA-B*48:03:01':'48:03P' 'HLA-B*48:04':'' 'HLA-B*48:08':'' 'HLA-B*49:01:01':'49:01P' 'HLA-B*49:32':'' 'HLA-B*50:01:01':'50:01P' 'HLA-B*51:01:01':'' 'HLA-B*51:01:02':'51:01P' 'HLA-B*51:02:01':'51:02P' 'HLA-B*51:07:01':'51:07P' 'HLA-B*51:42':'' 'HLA-B*52:01:01:01':'52:01P' 'HLA-B*52:01:01:02':'52:01P' 'HLA-B*52:01:01:03':'52:01P' 'HLA-B*52:01:02':'52:01P' 'HLA-B*53:01:01':'53:01P' 'HLA-B*53:11':'' 'HLA-B*54:01:01':'54:01P' 'HLA-B*54:18':'' 'HLA-B*55:01:01':'55:01P' 'HLA-B*55:01:03':'55:01P' 'HLA-B*55:02:01':'' 'HLA-B*55:12':'' 'HLA-B*55:24':'' 'HLA-B*55:48':'' 'HLA-B*56:01:01':'' 'HLA-B*56:03':'' 'HLA-B*56:04':'' 'HLA-B*57:01:01':'57:01P' 'HLA-B*57:03:01':'57:03P' 'HLA-B*57:06':'' 'HLA-B*57:11':'' 'HLA-B*57:29':'57:01P' 'HLA-B*58:01:01':'' 'HLA-B*58:31N':'' 'HLA-B*59:01:01:01':'59:01P' 'HLA-B*59:01:01:02':'59:01P' 'HLA-B*67:01:01':'67:01P' 'HLA-B*67:01:02':'67:01P' 'HLA-B*67:02':'' 'HLA-B*73:01':'' 'HLA-B*78:01:01':'78:01P' 'HLA-B*81:01':'81:01P' 'HLA-B*82:02:01':'82:02P' 'HLA-C*01:02:01':'01:02P' 'HLA-C*01:02:11':'01:02P' 'HLA-C*01:02:29':'01:02P' 'HLA-C*01:02:30':'01:02P' 'HLA-C*01:03':'01:03P' 'HLA-C*01:06':'' 'HLA-C*01:08':'' 'HLA-C*01:14':'' 'HLA-C*01:21':'' 'HLA-C*01:30':'' 'HLA-C*01:40':'01:02P' 'HLA-C*02:02:02:01':'02:02P' 'HLA-C*02:02:02:02':'02:02P' 'HLA-C*02:10':'02:02P' 'HLA-C*02:11':'' 'HLA-C*02:16:02':'02:16P' 'HLA-C*02:69':'02:02P' 'HLA-C*02:85':'' 'HLA-C*02:86':'' 'HLA-C*02:87':'' 'HLA-C*03:02:01':'03:02P' 'HLA-C*03:02:02:01':'03:02P' 'HLA-C*03:02:02:02':'03:02P' 'HLA-C*03:02:02:03':'03:02P' 'HLA-C*03:03:01':'03:03P' 'HLA-C*03:04:01:01':'03:04P' 'HLA-C*03:04:01:02':'03:04P' 'HLA-C*03:04:02':'03:04P' 'HLA-C*03:04:04':'03:04P' 'HLA-C*03:05':'' 'HLA-C*03:06':'' 'HLA-C*03:100':'03:04P' 'HLA-C*03:13:01':'03:13P' 'HLA-C*03:20N':'' 'HLA-C*03:219':'03:04P' 'HLA-C*03:261':'' 'HLA-C*03:40:01':'03:40P' 'HLA-C*03:41:02':'03:41P' 'HLA-C*03:46':'' 'HLA-C*03:61':'' 'HLA-C*04:01:01:01':'04:01P' 'HLA-C*04:01:01:02':'04:01P' 'HLA-C*04:01:01:03':'04:01P' 'HLA-C*04:01:01:04':'04:01P' 'HLA-C*04:01:01:05':'04:01P' 'HLA-C*04:01:62':'04:01P' 'HLA-C*04:03:01':'04:03P' 'HLA-C*04:06':'' 'HLA-C*04:09N':'' 'HLA-C*04:128':'' 'HLA-C*04:161':'04:01P' 'HLA-C*04:177':'' 'HLA-C*04:70':'' 'HLA-C*04:71':'' 'HLA-C*05:01:01:01':'05:01P' 'HLA-C*05:01:01:02':'05:01P' 'HLA-C*05:08':'' 'HLA-C*05:09:01':'05:09P' 'HLA-C*05:93':'05:01P' 'HLA-C*06:02:01:01':'06:02P' 'HLA-C*06:02:01:02':'06:02P' 'HLA-C*06:02:01:03':'06:02P' 'HLA-C*06:23':'' 'HLA-C*06:24':'' 'HLA-C*06:46N':'' 'HLA-C*07:01:01:01':'07:01P' 'HLA-C*07:01:01:02':'07:01P' 'HLA-C*07:01:02':'07:01P' 'HLA-C*07:01:19':'07:01P' 'HLA-C*07:01:27':'07:01P' 'HLA-C*07:01:45':'07:01P' 'HLA-C*07:02:01:01':'07:02P' 'HLA-C*07:02:01:02':'07:02P' 'HLA-C*07:02:01:03':'07:02P' 'HLA-C*07:02:01:04':'07:02P' 'HLA-C*07:02:01:05':'07:02P' 'HLA-C*07:02:05':'07:02P' 'HLA-C*07:02:06':'07:02P' 'HLA-C*07:02:64':'07:02P' 'HLA-C*07:04:01':'07:04P' 'HLA-C*07:04:02':'07:04P' 'HLA-C*07:06':'07:01P' 'HLA-C*07:149':'' 'HLA-C*07:18':'07:01P' 'HLA-C*07:19':'' 'HLA-C*07:26':'' 'HLA-C*07:30':'' 'HLA-C*07:32N':'' 'HLA-C*07:384':'' 'HLA-C*07:385':'' 'HLA-C*07:386':'' 'HLA-C*07:391':'' 'HLA-C*07:392':'' 'HLA-C*07:49':'' 'HLA-C*07:56:02':'07:56P' 'HLA-C*07:66':'07:02P' 'HLA-C*07:67':'' 'HLA-C*08:01:01':'08:01P' 'HLA-C*08:01:03':'08:01P' 'HLA-C*08:02:01:01':'08:02P' 'HLA-C*08:02:01:02':'08:02P' 'HLA-C*08:03:01':'08:03P' 'HLA-C*08:04:01':'08:04P' 'HLA-C*08:112':'' 'HLA-C*08:20':'08:01P' 'HLA-C*08:21':'' 'HLA-C*08:22':'08:01P' 'HLA-C*08:24':'08:01P' 'HLA-C*08:27':'' 'HLA-C*08:36N':'' 'HLA-C*08:40':'08:03P' 'HLA-C*08:41':'' 'HLA-C*08:62':'' 'HLA-C*12:02:02':'12:02P' 'HLA-C*12:03:01:01':'12:03P' 'HLA-C*12:03:01:02':'12:03P' 'HLA-C*12:08':'' 'HLA-C*12:13':'' 'HLA-C*12:19':'' 'HLA-C*12:22':'' 'HLA-C*12:99':'' 'HLA-C*14:02:01':'14:02P' 'HLA-C*14:03':'' 'HLA-C*14:21N':'' 'HLA-C*14:23':'14:02P' 'HLA-C*15:02:01':'' 'HLA-C*15:05:01':'15:05P' 'HLA-C*15:05:02':'15:05P' 'HLA-C*15:13':'15:02P' 'HLA-C*15:16':'' 'HLA-C*15:17':'' 'HLA-C*15:96Q':'' 'HLA-C*16:01:01':'' 'HLA-C*16:02:01':'16:02P' 'HLA-C*16:04:01':'16:04P' 'HLA-C*17:01:01:01':'17:01P' 'HLA-C*17:01:01:02':'17:01P' 'HLA-C*17:01:01:03':'17:01P' 'HLA-C*17:03':'17:01P' 'HLA-C*18:01':'18:01P' 'HLA-DQA1*01:01:02':'01:01P' 'HLA-DQA1*01:02:01:01':'01:02P' 'HLA-DQA1*01:02:01:02':'01:02P' 'HLA-DQA1*01:02:01:03':'01:02P' 'HLA-DQA1*01:02:01:04':'01:02P' 'HLA-DQA1*01:03:01:01':'01:03P' 'HLA-DQA1*01:03:01:02':'01:03P' 'HLA-DQA1*01:04:01:01':'01:01P' 'HLA-DQA1*01:04:01:02':'01:01P' 'HLA-DQA1*01:05:01':'01:01P' 'HLA-DQA1*01:07':'' 'HLA-DQA1*01:10':'' 'HLA-DQA1*01:11':'01:02P' 'HLA-DQA1*02:01':'' 'HLA-DQA1*03:01:01':'03:01P' 'HLA-DQA1*03:02':'03:01P' 'HLA-DQA1*03:03:01':'03:01P' 'HLA-DQA1*04:01:02:01':'04:01P' 'HLA-DQA1*04:01:02:02':'04:01P' 'HLA-DQA1*04:02':'04:01P' 'HLA-DQA1*05:01:01:01':'05:01P' 'HLA-DQA1*05:01:01:02':'05:01P' 'HLA-DQA1*05:03':'05:01P' 'HLA-DQA1*05:05:01:01':'05:01P' 'HLA-DQA1*05:05:01:02':'05:01P' 'HLA-DQA1*05:05:01:03':'05:01P' 'HLA-DQA1*05:11':'05:01P' 'HLA-DQA1*06:01:01':'06:01P' 'HLA-DQB1*02:01:01':'02:01P' 'HLA-DQB1*02:02:01':'' 'HLA-DQB1*03:01:01:01':'03:01P' 'HLA-DQB1*03:01:01:02':'03:01P' 'HLA-DQB1*03:01:01:03':'03:01P' 'HLA-DQB1*03:02:01':'03:02P' 'HLA-DQB1*03:03:02:01':'03:03P' 'HLA-DQB1*03:03:02:02':'03:03P' 'HLA-DQB1*03:03:02:03':'03:03P' 'HLA-DQB1*03:05:01':'03:05P' 'HLA-DQB1*05:01:01:01':'05:01P' 'HLA-DQB1*05:01:01:02':'05:01P' 'HLA-DQB1*05:03:01:01':'05:03P' 'HLA-DQB1*05:03:01:02':'05:03P' 'HLA-DQB1*06:01:01':'06:01P' 'HLA-DQB1*06:02:01':'06:02P' 'HLA-DQB1*06:03:01':'06:03P' 'HLA-DQB1*06:09:01':'06:09P' 'HLA-DRB1*01:01:01':'01:01P' 'HLA-DRB1*01:02:01':'01:02P' 'HLA-DRB1*03:01:01:01':'03:01P' 'HLA-DRB1*03:01:01:02':'03:01P' 'HLA-DRB1*04:03:01':'04:03P' 'HLA-DRB1*07:01:01:01':'07:01P' 'HLA-DRB1*07:01:01:02':'07:01P' 'HLA-DRB1*08:03:02':'08:03P' 'HLA-DRB1*09:21':'09:01P' 'HLA-DRB1*10:01:01':'10:01P' 'HLA-DRB1*11:01:01':'11:01P' 'HLA-DRB1*11:01:02':'11:01P' 'HLA-DRB1*11:04:01':'11:04P' 'HLA-DRB1*12:01:01':'12:01P' 'HLA-DRB1*12:17':'12:01P' 'HLA-DRB1*13:01:01':'13:01P' 'HLA-DRB1*13:02:01':'13:02P' 'HLA-DRB1*14:05:01':'14:05P' 'HLA-DRB1*14:54:01':'14:01P' 'HLA-DRB1*15:01:01:01':'15:01P' 'HLA-DRB1*15:01:01:02':'15:01P' 'HLA-DRB1*15:01:01:03':'15:01P' 'HLA-DRB1*15:01:01:04':'15:01P' 'HLA-DRB1*15:02:01':'15:02P' 'HLA-DRB1*15:03:01:01':'15:03P' 'HLA-DRB1*15:03:01:02':'15:03P' 'HLA-DRB1*16:02:01':'16:02P'}<line_sep>
# Copyright (c) The OpenTracing Authors. # # Permission is hereby granted, free of charge, to any person obtaining a copy # of this software and associated documentation files (the "Software"), to deal # in the Software without restriction, including without limitation the rights # to use, copy, modify, merge, publish, distribute, sublicense, and/or sell # copies of the Software, and to permit persons to whom the Software is # furnished to do so, subject to the following conditions: # # The above copyright notice and this permission notice shall be included in # all copies or substantial portions of the Software. # # THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR # IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, # FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE # AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER # LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, # OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN # THE SOFTWARE. <import_from_stmt>threading Lock<import_stmt>time<import_stmt>opentracing<import_from_stmt>opentracing Format Tracer<import_from_stmt>opentracing UnsupportedFormatException<import_from_stmt>opentracing.scope_managers ThreadLocalScopeManager<import_from_stmt>.context SpanContext<import_from_stmt>.span MockSpan<class_stmt>MockTracer(Tracer)<block_start>"""MockTracer makes it easy to test the semantics of OpenTracing instrumentation. By using a MockTracer as a :class:`~opentracing.Tracer` implementation for tests, a developer can assert that :class:`~opentracing.Span` properties and relationships with other **Spans** are defined as expected by instrumentation code. By default, MockTracer registers propagators for :attr:`Format.TEXT_MAP`, :attr:`Format.HTTP_HEADERS` and :attr:`Format.BINARY`. The user should call :func:`register_propagator()` for each additional inject/extract format. """<def_stmt>__init__ self scope_manager=<none><block_start>"""Initialize a MockTracer instance."""<line_sep>scope_manager=ThreadLocalScopeManager()<if>scope_manager<is><none><else>scope_manager<line_sep>super(MockTracer self).__init__(scope_manager)<line_sep>self._propagators={}<line_sep>self._finished_spans=[]<line_sep>self._spans_lock=Lock()<line_sep># Simple-as-possible (consecutive for repeatability) id generation. self._next_id=0<line_sep>self._next_id_lock=Lock()<line_sep>self._register_required_propagators()<block_end><def_stmt>register_propagator self format propagator<block_start>"""Register a propagator with this MockTracer. :param string format: a :class:`~opentracing.Format` identifier like :attr:`~opentracing.Format.TEXT_MAP` :param **Propagator** propagator: a **Propagator** instance to handle inject/extract calls involving `format` """<line_sep>self._propagators[format]=propagator<block_end><def_stmt>_register_required_propagators self<block_start><import_from_stmt>.text_propagator TextPropagator<import_from_stmt>.binary_propagator BinaryPropagator<line_sep>self.register_propagator(Format.TEXT_MAP TextPropagator())<line_sep>self.register_propagator(Format.HTTP_HEADERS TextPropagator())<line_sep>self.register_propagator(Format.BINARY BinaryPropagator())<block_end><def_stmt>finished_spans self<block_start>"""Return a copy of all finished **Spans** started by this MockTracer (since construction or the last call to :meth:`~MockTracer.reset()`) :rtype: list :return: a copy of the finished **Spans**. """<with_stmt>self._spans_lock<block_start><return>list(self._finished_spans)<block_end><block_end><def_stmt>reset self<block_start>"""Clear the finished **Spans** queue. Note that this does **not** have any effect on **Spans** created by MockTracer that have not finished yet; those will still be enqueued in :meth:`~MockTracer.finished_spans()` when they :func:`finish()`. """<with_stmt>self._spans_lock<block_start>self._finished_spans=[]<block_end><block_end><def_stmt>_append_finished_span self span<block_start><with_stmt>self._spans_lock<block_start>self._finished_spans.append(span)<block_end><block_end><def_stmt>_generate_id self<block_start><with_stmt>self._next_id_lock<block_start>self._next_id<augadd>1<line_sep><return>self._next_id<block_end><block_end><def_stmt>start_active_span self operation_name child_of=<none> references=<none> tags=<none> start_time=<none> ignore_active_span=<false> finish_on_close=<true># create a new Span <block_start>span=self.start_span(operation_name=operation_name child_of=child_of references=references tags=tags start_time=start_time ignore_active_span=ignore_active_span )<line_sep><return>self.scope_manager.activate(span finish_on_close)<block_end><def_stmt>start_span self operation_name=<none> child_of=<none> references=<none> tags=<none> start_time=<none> ignore_active_span=<false><block_start>start_time=time.time()<if>start_time<is><none><else>start_time<line_sep># See if we have a parent_ctx in `references` parent_ctx=<none><if_stmt>child_of<is><not><none><block_start>parent_ctx=(child_of<if>isinstance(child_of opentracing.SpanContext)<else>child_of.context)<block_end><elif_stmt>references<is><not><none><and>len(references)<g>0# TODO only the first reference is currently used <block_start>parent_ctx=references[0].referenced_context<block_end># retrieve the active SpanContext <if_stmt><not>ignore_active_span<and>parent_ctx<is><none><block_start>scope=self.scope_manager.active<if_stmt>scope<is><not><none><block_start>parent_ctx=scope.span.context<block_end><block_end># Assemble the child ctx ctx=SpanContext(span_id=self._generate_id())<if_stmt>parent_ctx<is><not><none><block_start><if_stmt>parent_ctx._baggage<is><not><none><block_start>ctx._baggage=parent_ctx._baggage.copy()<block_end>ctx.trace_id=parent_ctx.trace_id<block_end><else_stmt><block_start>ctx.trace_id=self._generate_id()<block_end># Tie it all together <return>MockSpan(self operation_name=operation_name context=ctx parent_id=(<none><if>parent_ctx<is><none><else>parent_ctx.span_id) tags=tags start_time=start_time)<block_end><def_stmt>inject self span_context format carrier<block_start><if_stmt>format<in>self._propagators<block_start>self._propagators[format].inject(span_context carrier)<block_end><else_stmt><block_start><raise>UnsupportedFormatException()<block_end><block_end><def_stmt>extract self format carrier<block_start><if_stmt>format<in>self._propagators<block_start><return>self._propagators[format].extract(carrier)<block_end><else_stmt><block_start><raise>UnsupportedFormatException()<block_end><block_end><block_end>
<import_from_stmt>.drawing *<import_from_stmt>.general *<import_from_stmt>.multi_exchange create_multi_exchange_graph create_weighted_multi_exchange_digraph multi_graph_to_log_graph<import_from_stmt>.single_exchange load_exchange_graph create_exchange_graph FeesNotAvailable<import_from_stmt>.misc last_index_in_list next_to_each_other<import_from_stmt>.data_structures StackSet PrioritySet Collections<import_from_stmt>.graph_utils get_greatest_edge_in_bunch get_least_edge_in_bunch<import_from_stmt>.wss_graph_builder *<line_sep>
# -*- coding: utf-8 -*- """ Sources: Croatian Counties: http://en.wikipedia.org/wiki/ISO_3166-2:HR Croatia doesn't have official abbreviations for counties. The ones provided are in common use. """<import_from_future_stmt> unicode_literals<import_from_stmt>django.utils.translation ugettext_lazy<as>_<line_sep>HR_COUNTY_CHOICES=(('GZG' _('Grad Zagreb')) ('BBŽ' _('Bjelovarsko-bilogorska županija')) ('BPŽ' _('Brodsko-posavska županija')) ('DNŽ' _('Dubrovačko-neretvanska županija')) ('IŽ' _('Istarska županija')) ('KŽ' _('Karlovačka županija')) ('KKŽ' _('Koprivničko-križevačka županija')) ('KZŽ' _('Krapinsko-zagorska županija')) ('LSŽ' _('Ličko-senjska županija')) ('MŽ' _('Međimurska županija')) ('OBŽ' _('Osječko-baranjska županija')) ('PSŽ' _('Požeško-slavonska županija')) ('PGŽ' _('Primorsko-goranska županija')) ('SMŽ' _('Sisačko-moslavačka županija')) ('SDŽ' _('Splitsko-dalmatinska županija')) ('ŠKŽ' _('Šibensko-kninska županija')) ('VŽ' _('Varaždinska županija')) ('VPŽ' _('Virovitičko-podravska županija')) ('VSŽ' _('Vukovarsko-srijemska županija')) ('ZDŽ' _('Zadarska županija')) ('ZGŽ' _('Zagrebačka županija')) )<line_sep>""" Sources: http://hr.wikipedia.org/wiki/Dodatak:Popis_registracijskih_oznaka_za_cestovna_vozila_u_Hrvatskoj Only common license plate prefixes are provided. Special cases and obsolete prefixes are omitted. """<line_sep>HR_LICENSE_PLATE_PREFIX_CHOICES=(('BJ' 'BJ') ('BM' 'BM') ('ČK' 'ČK') ('DA' 'DA') ('DE' 'DE') ('DJ' 'DJ') ('DU' 'DU') ('GS' 'GS') ('IM' 'IM') ('KA' 'KA') ('KC' 'KC') ('KR' 'KR') ('KT' 'KT') ('KŽ' 'KŽ') ('MA' 'MA') ('NA' 'NA') ('NG' 'NG') ('OG' 'OG') ('OS' 'OS') ('PU' 'PU') ('PŽ' 'PŽ') ('RI' 'RI') ('SB' 'SB') ('SK' 'SK') ('SL' 'SL') ('ST' 'ST') ('ŠI' 'ŠI') ('VK' 'VK') ('VT' 'VT') ('VU' 'VU') ('VŽ' 'VŽ') ('ZD' 'ZD') ('ZG' 'ZG') ('ŽU' 'ŽU') )<line_sep>""" The list includes county and cellular network phone number prefixes. """<line_sep>HR_PHONE_NUMBER_PREFIX_CHOICES=(('1' '01') ('20' '020') ('21' '021') ('22' '022') ('23' '023') ('31' '031') ('32' '032') ('33' '033') ('34' '034') ('35' '035') ('40' '040') ('42' '042') ('43' '043') ('44' '044') ('47' '047') ('48' '048') ('49' '049') ('51' '051') ('52' '052') ('53' '053') ('91' '091') ('92' '092') ('95' '095') ('97' '097') ('98' '098') ('99' '099') )<line_sep>
<import_stmt>dash<import_stmt>dash_core_components<as>dcc<import_stmt>dash_html_components<as>html<import_stmt>dash_table_experiments<as>dt<import_stmt>pandas<as>pd<import_stmt>plotly<import_from_stmt>dash.dependencies Input Output State<import_from_stmt>plotly graph_objs<as>go<import_from_stmt>OnePy.sys_module.metabase_env OnePyEnvBase<line_sep>TRADE_LOG=OnePyEnvBase.full_trade_log<line_sep>APP=dash.Dash()<line_sep>APP.scripts.config.serve_locally=<true><line_sep>APP.layout=html.Div([html.H4('OnePy Trade Log Analysis') dt.DataTable(rows=TRADE_LOG.to_dict('records') row_selectable=<true> filterable=<true> sortable=<true> selected_row_indices=[] id='trade_log') dcc.Graph(id='drawdown_pnl') dcc.Graph(id='run_up_pnl') ] className="container")<line_sep>@APP.callback(Output('trade_log' 'selected_row_indices') [Input('drawdown_pnl' 'clickData')] [State('trade_log' 'selected_row_indices')])<def_stmt>update_selected_row_indices clickData selected_row_indices<block_start><if_stmt>clickData<block_start><for_stmt>point clickData['points']<block_start><if_stmt>point['pointNumber']<in>selected_row_indices<block_start>selected_row_indices.remove(point['pointNumber'])<block_end><else_stmt><block_start>selected_row_indices.append(point['pointNumber'])<block_end><block_end><block_end><return>selected_row_indices<block_end>@APP.callback(Output('drawdown_pnl' 'figure') [Input('trade_log' 'rows') Input('trade_log' 'selected_row_indices')])<def_stmt>update_run_up_figure rows selected_row_indices<block_start>dff=pd.DataFrame(rows)<line_sep>profit_diff=dff.loc[dff.returns_diff<g>0]<line_sep>loss_diff=dff.loc[dff.returns_diff<l>0]<line_sep>fig=plotly.tools.make_subplots(rows=1 cols=1 shared_xaxes=<true>)<line_sep>fig['layout'].update(dict(title='Profit & Loss vs Run-up'))<line_sep>fig['layout']['xaxis'].update(dict(title='Run-up(%)'))<line_sep>fig['layout']['yaxis'].update(dict(title='Profit & Loss(%)'))<line_sep>fig.append_trace({'x':profit_diff['run_up']<times>100 'y':profit_diff['returns_diff']<times>100 'text':profit_diff.entry_date+' to '+profit_diff.exit_date 'type':'scatter' 'marker':dict(color='black') 'mode':'markers' 'name':'win' 'line':{'width':1}} 1 1)<line_sep>fig.append_trace({'x':loss_diff['run_up']<times>100 'y':-loss_diff['returns_diff']<times>100 'type':'scatter' 'text':loss_diff.entry_date+' to '+loss_diff.exit_date 'marker':dict(color='red') 'mode':'markers' 'name':'lose' 'line':{'width':1}} 1 1)<line_sep>fig.append_trace({'x':[0 10] 'y':[0 10] 'type':'scatter' 'mode':'lines' 'name':'Win diagonal' 'line':{'width':1}} 1 1)<line_sep><return>fig<block_end>@APP.callback(Output('run_up_pnl' 'figure') [Input('trade_log' 'rows') Input('trade_log' 'selected_row_indices')])<def_stmt>update__drawdown_figure rows selected_row_indices<block_start>dff=pd.DataFrame(rows)<line_sep>profit_diff=dff.loc[dff.returns_diff<g>0]<line_sep>loss_diff=dff.loc[dff.returns_diff<l>0]<line_sep>fig=plotly.tools.make_subplots(rows=1 cols=1 shared_xaxes=<true>)<line_sep>fig['layout'].update(dict(title='Profit & Loss vs Drawdown'))<line_sep>fig['layout']['xaxis'].update(dict(title='Drawdown(%)'))<line_sep>fig['layout']['yaxis'].update(dict(title='Profit & Loss(%)'))<line_sep>fig.append_trace({'x':profit_diff['drawdown']<times>100 'y':profit_diff['returns_diff']<times>100 'type':'scatter' 'marker':dict(color='black') 'text':profit_diff.entry_date+' to '+profit_diff.exit_date 'mode':'markers' 'name':'win' 'line':{'width':1}} 1 1)<line_sep>fig.append_trace({'x':loss_diff['drawdown']<times>100 'y':-loss_diff['returns_diff']<times>100 'text':loss_diff.entry_date+' to '+loss_diff.exit_date 'type':'scatter' 'marker':dict(color='red') 'mode':'markers' 'name':'lose' 'line':{'width':1}} 1 1)<line_sep>fig.append_trace({'x':[0 10] 'y':[0 10] 'type':'scatter' 'mode':'lines' 'name':'Loss diagonal' 'line':{'width':1}} 1 1)<line_sep><return>fig<block_end><if_stmt>__name__<eq>'__main__'<block_start>APP.run_server(debug=<true>)<block_end>
<import_stmt>os<import_stmt>tempfile<import_stmt>unittest<import_stmt>gtirb<line_sep>IR_FILE=tempfile.mktemp(suffix=".gtirb")<class_stmt>IRTest(unittest.TestCase)<block_start><def_stmt>__init__ self *args **kwargs<block_start>super().__init__(*args **kwargs)<line_sep>ir=gtirb.IR()<line_sep>m=gtirb.Module(binary_path="binary_path" file_format=gtirb.Module.FileFormat.RAW isa=gtirb.Module.ISA.ValidButUnsupported name="name" preferred_addr=1 rebase_delta=2 ir=ir )<line_sep>s=gtirb.Section(name="name" flags=(gtirb.Section.Flag.Executable gtirb.Section.Flag.Readable gtirb.Section.Flag.Loaded gtirb.Section.Flag.Initialized ) module=m )<line_sep>bi=gtirb.ByteInterval(address=0 size=10 contents=b"abcd" section=s)<line_sep>cb=gtirb.CodeBlock(size=4 offset=0 decode_mode=1 byte_interval=bi)<line_sep>_=gtirb.DataBlock(size=6 offset=4 byte_interval=bi)<line_sep>sym=gtirb.Symbol(name="name" payload=cb module=m)<line_sep>sac=gtirb.SymAddrConst(0 sym {gtirb.SymbolicExpression.Attribute.Part1})<line_sep>bi.symbolic_expressions[2]=sac<line_sep>p=gtirb.ProxyBlock(module=m)<line_sep>ir.cfg.add(gtirb.Edge(cb p gtirb.Edge.Label(type=gtirb.Edge.Type.Branch conditional=<false> direct=<true>) ))<line_sep>ir.cfg.add(gtirb.Edge(p p))<line_sep>m.aux_data["key"]=gtirb.AuxData(gtirb.Offset(s 777) "Offset")<line_sep>ir.aux_data["key"]=gtirb.AuxData("value" "string")<line_sep>self.ir=ir<block_end><def_stmt>setUp self<block_start>self.ir.save_protobuf(IR_FILE)<block_end><def_stmt>tearDown self<block_start>os.remove(IR_FILE)<block_end><def_stmt>test_ir_protobuf_load self<block_start>new_ir=gtirb.IR.load_protobuf(IR_FILE)<line_sep>self.assertTrue(self.ir.deep_eq(new_ir))<line_sep>self.assertNotEqual(self.ir.modules[0].aux_data["key"].data new_ir.modules[0].aux_data["key"].data )<block_end><block_end><if_stmt>__name__<eq>"__main__"<block_start>unittest.main()<block_end>
<import_stmt>numpy<as>np<import_stmt>devito<as>dv<import_from_stmt>devito.builtins.utils MPIReduction<line_sep>__all__=['norm' 'sumall' 'inner' 'mmin' 'mmax']<line_sep>@dv.switchconfig(log_level='ERROR')<def_stmt>norm f order=2<block_start>""" Compute the norm of a Function. Parameters ---------- f : Function Input Function. order : int, optional The order of the norm. Defaults to 2. """<line_sep>Pow=dv.finite_differences.differentiable.Pow<line_sep>kwargs={}<if_stmt>f.is_TimeFunction<and>f._time_buffering<block_start>kwargs[f.time_dim.max_name]=f._time_size-1<block_end># Protect SparseFunctions from accessing duplicated (out-of-domain) data, # otherwise we would eventually be summing more than expected p,eqns=f.guard()<if>f.is_SparseFunction<else>(f [])<line_sep>s=dv.types.Symbol(name='sum' dtype=f.dtype)<with_stmt>MPIReduction(f)<as>mr<block_start>op=dv.Operator([dv.Eq(s 0.0)]+eqns+[dv.Inc(s dv.Abs(Pow(p order))) dv.Eq(mr.n[0] s)] name='norm%d'%order)<line_sep>op.apply(**kwargs)<block_end>v=np.power(mr.v 1/order)<line_sep><return>f.dtype(v)<block_end><def_stmt>sumall f<block_start>""" Compute the sum of all Function data. Parameters ---------- f : Function Input Function. """<line_sep>kwargs={}<if_stmt>f.is_TimeFunction<and>f._time_buffering<block_start>kwargs[f.time_dim.max_name]=f._time_size-1<block_end># Protect SparseFunctions from accessing duplicated (out-of-domain) data, # otherwise we would eventually be summing more than expected p,eqns=f.guard()<if>f.is_SparseFunction<else>(f [])<line_sep>s=dv.types.Symbol(name='sum' dtype=f.dtype)<with_stmt>MPIReduction(f)<as>mr<block_start>op=dv.Operator([dv.Eq(s 0.0)]+eqns+[dv.Inc(s p) dv.Eq(mr.n[0] s)] name='sum')<line_sep>op.apply(**kwargs)<block_end><return>f.dtype(mr.v)<block_end><def_stmt>inner f g<block_start>""" Inner product of two Functions. Parameters ---------- f : Function First input operand g : Function Second input operand Raises ------ ValueError If the two input Functions are defined over different grids, or have different dimensionality, or their dimension-wise sizes don't match. If in input are two SparseFunctions and their coordinates don't match, the exception is raised. Notes ----- The inner product is the sum of all dimension-wise products. For 1D Functions, the inner product corresponds to the dot product. """<line_sep># Input check <if_stmt>f.is_TimeFunction<and>f._time_buffering<ne>g._time_buffering<block_start><raise>ValueError("Cannot compute `inner` between save/nosave TimeFunctions")<block_end><if_stmt>f.shape<ne>g.shape<block_start><raise>ValueError("`f` and `g` must have same shape")<block_end><if_stmt>f._data<is><none><or>g._data<is><none><block_start><raise>ValueError("Uninitialized input")<block_end><if_stmt>f.is_SparseFunction<and><not>np.all(f.coordinates_data<eq>g.coordinates_data)<block_start><raise>ValueError("Non-matching coordinates")<block_end>kwargs={}<if_stmt>f.is_TimeFunction<and>f._time_buffering<block_start>kwargs[f.time_dim.max_name]=f._time_size-1<block_end># Protect SparseFunctions from accessing duplicated (out-of-domain) data, # otherwise we would eventually be summing more than expected rhs,eqns=f.guard(f<times>g)<if>f.is_SparseFunction<else>(f<times>g [])<line_sep>s=dv.types.Symbol(name='sum' dtype=f.dtype)<with_stmt>MPIReduction(f g)<as>mr<block_start>op=dv.Operator([dv.Eq(s 0.0)]+eqns+[dv.Inc(s rhs) dv.Eq(mr.n[0] s)] name='inner')<line_sep>op.apply(**kwargs)<block_end><return>f.dtype(mr.v)<block_end><def_stmt>mmin f<block_start>""" Retrieve the minimum. Parameters ---------- f : array_like or Function Input operand. """<if_stmt>isinstance(f dv.Constant)<block_start><return>f.data<block_end><elif_stmt>isinstance(f dv.types.dense.DiscreteFunction)<block_start><with_stmt>MPIReduction(f op=dv.mpi.MPI.MIN)<as>mr<block_start>mr.n.data[0]=np.min(f.data_ro_domain).item()<block_end><return>mr.v.item()<block_end><else_stmt><block_start><raise>ValueError("Expected Function, not `%s`"%type(f))<block_end><block_end><def_stmt>mmax f<block_start>""" Retrieve the maximum. Parameters ---------- f : array_like or Function Input operand. """<if_stmt>isinstance(f dv.Constant)<block_start><return>f.data<block_end><elif_stmt>isinstance(f dv.types.dense.DiscreteFunction)<block_start><with_stmt>MPIReduction(f op=dv.mpi.MPI.MAX)<as>mr<block_start>mr.n.data[0]=np.max(f.data_ro_domain).item()<block_end><return>mr.v.item()<block_end><else_stmt><block_start><raise>ValueError("Expected Function, not `%s`"%type(f))<block_end><block_end>
# Copyright (c) 2021 PaddlePaddle Authors. All Rights Reserved. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. <import_from_future_stmt> absolute_import<import_from_future_stmt> division<import_from_future_stmt> print_function<import_stmt>math<import_stmt>paddle<import_stmt>paddle.nn<as>nn<import_stmt>paddle.nn.functional<as>F<class_stmt>PairwiseCosface(nn.Layer)<block_start><def_stmt>__init__ self margin gamma<block_start>super(PairwiseCosface self).__init__()<line_sep>self.margin=margin<line_sep>self.gamma=gamma<block_end><def_stmt>forward self embedding targets<block_start><if_stmt>isinstance(embedding dict)<block_start>embedding=embedding['features']<block_end># Normalize embedding features embedding=F.normalize(embedding axis=1)<line_sep>dist_mat=paddle.matmul(embedding embedding transpose_y=<true>)<line_sep>N=dist_mat.shape[0]<line_sep>is_pos=targets.reshape([N 1]).expand([N N]).equal(paddle.t(targets.reshape([N 1]).expand([N N]))).astype('float')<line_sep>is_neg=targets.reshape([N 1]).expand([N N]).not_equal(paddle.t(targets.reshape([N 1]).expand([N N]))).astype('float')<line_sep># Mask scores related to itself is_pos=is_pos-paddle.eye(N N)<line_sep>s_p=dist_mat<times>is_pos<line_sep>s_n=dist_mat<times>is_neg<line_sep>logit_p=-self.gamma<times>s_p+(-99999999.)<times>(1-is_pos)<line_sep>logit_n=self.gamma<times>(s_n+self.margin)+(-99999999.)<times>(1-is_neg)<line_sep>loss=F.softplus(paddle.logsumexp(logit_p axis=1)+paddle.logsumexp(logit_n axis=1)).mean()<line_sep><return>{"PairwiseCosface":loss}<block_end><block_end>
# Copyright 2018 The GraphicsFuzz Project Authors # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # https://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. <import_stmt>subprocess<import_stmt>os<line_sep>HERE=os.path.abspath(__file__)<line_sep>path=os.path.join<def_stmt>get_tool_path <block_start><return>path(get_bin_jar_dirs()[1] "tool-1.0.jar")<block_end><def_stmt>get_bin_jar_dirs <block_start><def_stmt>try_get_jar_bin_dirs install_root<block_start>bin_dir=path(install_root "bin")<line_sep>jar_dir=path(install_root "jar")<if_stmt>os.path.isdir(bin_dir)<and>os.path.isdir(jar_dir)<block_start><return>os.path.abspath(bin_dir) os.path.abspath(jar_dir)<block_end><return><none><block_end># Perhaps we are running from the IDE. Check this first, since the deployed files are likely also present if # running from the IDE. res=try_get_jar_bin_dirs(path(os.path.dirname(HERE) os.pardir os.pardir os.pardir os.pardir "graphicsfuzz" "target" "graphicsfuzz"))<if_stmt>res<is><not><none><block_start><return>res<block_end># Perhaps we are running from the zip. res=try_get_jar_bin_dirs(path(os.path.dirname(HERE) os.pardir))<if_stmt>res<is><not><none><block_start><return>res<block_end><raise>Exception("Could not find bin and jar directories")<block_end><def_stmt>get_shaders_dir # Perhaps we are running from the IDE. Check this first, since the deployed files are likely also present if # running from the IDE. <block_start>res=path(os.path.dirname(HERE) os.pardir os.pardir os.pardir os.pardir "shaders" "src" "main" "glsl")<if_stmt>os.path.isdir(res)<block_start><return>os.path.abspath(res)<block_end># Perhaps we are running from the zip. res=path(os.path.dirname(HERE) os.pardir "shaders")<if_stmt>os.path.isdir(res)<block_start><return>os.path.abspath(res)<block_end><raise>Exception("Could not find shaders directory")<block_end><def_stmt>execute cmd verbose<block_start><if_stmt>verbose<block_start>print("Validator command: "+" ".join(cmd))<block_end>proc=subprocess.Popen(cmd stdout=subprocess.PIPE stderr=subprocess.PIPE)<line_sep>validator_stdout,validator_stderr=proc.communicate()<assert_stmt>(proc.returncode<is><not><none>)<line_sep><return>{"returncode":proc.returncode "stdout":validator_stdout "stderr":validator_stderr}<block_end><def_stmt>validate_frag frag_file validator verbose<block_start>cmd=[validator frag_file]<line_sep><return>execute(cmd verbose)<block_end>
<import_from_stmt>django.conf settings<import_from_stmt>django.core.management call_command<import_from_stmt>django.test.runner DiscoverRunner<import_from_stmt>elasticsearch.exceptions NotFoundError<import_from_stmt>feedhq es<class_stmt>ESTestSuiteRunner(DiscoverRunner)<block_start><def_stmt>setup_test_environment self<block_start>super().setup_test_environment()<try_stmt><block_start>es.client.indices.delete(settings.ES_INDEX)<block_end><except_stmt>NotFoundError<block_start><pass><block_end>call_command('create_index')<line_sep>es.wait_for_yellow()<block_end><def_stmt>teardown_test_environment self<block_start>super().teardown_test_environment()<line_sep>es.client.indices.delete(settings.ES_INDEX)<block_end><block_end>
<import_stmt>bleach<import_from_stmt>django.contrib messages<import_from_stmt>django.contrib.auth.decorators login_required user_passes_test<import_from_stmt>django.core.exceptions PermissionDenied<import_from_stmt>django.http HttpResponse<import_from_stmt>django.shortcuts get_object_or_404 redirect render<import_from_stmt>django.utils timezone<import_from_stmt>todo.forms AddEditTaskForm<import_from_stmt>todo.models Task TaskList<import_from_stmt>todo.utils send_notify_mail staff_check<line_sep>@login_required@user_passes_test(staff_check)<def_stmt>list_detail request list_id=<none> list_slug=<none> view_completed=<false><arrow>HttpResponse<block_start>"""Display and manage tasks in a todo list. """<line_sep># Defaults task_list=<none><line_sep>form=<none><line_sep># Which tasks to show on this list view? <if_stmt>list_slug<eq>"mine"<block_start>tasks=Task.objects.filter(assigned_to=request.user)<block_end><else_stmt># Show a specific list, ensuring permissions. <block_start>task_list=get_object_or_404(TaskList id=list_id)<if_stmt>task_list.group<not><in>request.user.groups.all()<and><not>request.user.is_superuser<block_start><raise>PermissionDenied<block_end>tasks=Task.objects.filter(task_list=task_list.id)<block_end># Additional filtering <if_stmt>view_completed<block_start>tasks=tasks.filter(completed=<true>)<block_end><else_stmt><block_start>tasks=tasks.filter(completed=<false>)<block_end># ###################### # Add New Task Form # ###################### <if_stmt>request.POST.getlist("add_edit_task")<block_start>form=AddEditTaskForm(request.user request.POST initial={"assigned_to":request.user.id "priority":999 "task_list":task_list} )<if_stmt>form.is_valid()<block_start>new_task=form.save(commit=<false>)<line_sep>new_task.created_by=request.user<line_sep>new_task.note=bleach.clean(form.cleaned_data["note"] strip=<true>)<line_sep>form.save()<line_sep># Send email alert only if Notify checkbox is checked AND assignee is not same as the submitter <if_stmt>("notify"<in>request.POST<and>new_task.assigned_to<and>new_task.assigned_to<ne>request.user)<block_start>send_notify_mail(new_task)<block_end>messages.success(request 'New task "{t}" has been added.'.format(t=new_task.title))<line_sep><return>redirect(request.path)<block_end><block_end><else_stmt># Don't allow adding new tasks on some views <block_start><if_stmt>list_slug<not><in>["mine" "recent-add" "recent-complete"]<block_start>form=AddEditTaskForm(request.user initial={"assigned_to":request.user.id "priority":999 "task_list":task_list} )<block_end><block_end>context={"list_id":list_id "list_slug":list_slug "task_list":task_list "form":form "tasks":tasks "view_completed":view_completed }<line_sep><return>render(request "todo/list_detail.html" context)<block_end>
<import_from_stmt>PyQt5.QtGui *<import_from_stmt>PyQt5.QtCore *<import_from_stmt>PyQt5 QtWidgets<import_stmt>socket<import_stmt>re<class_stmt>SettingDialog(QtWidgets.QDialog)<block_start>enable_color_map=<false><line_sep>label_font_size=10<line_sep>task_mode=0#0=det, 1=seg, 2=cls signal_conn=pyqtSignal(list)<def_stmt>__init__ self parent config<block_start>QtWidgets.QDialog.__init__(self parent)<line_sep>self.resize(320 240)<line_sep>self.__class__.task_mode=config['task_mode']<line_sep>self.__class__.label_font_size=config['label_font_size']<line_sep>self.init_UI()<block_end><def_stmt>createModeGroup self<block_start>''' set the trask mode setting group :return: mode group '''<line_sep>self.modegroupBox=QtWidgets.QGroupBox("& Task Mode")<line_sep>self.modegroupBox.setCheckable(<true>)<line_sep>self.modegroupBox.setChecked(<true>)<line_sep>self.CLS_mode_rb=QtWidgets.QRadioButton("CLS Mode")<line_sep>self.CLS_mode_rb.clicked.connect(self.CLS_model_selected)<line_sep>self.DET_mode_rb=QtWidgets.QRadioButton("DET Mode")<line_sep>self.DET_mode_rb.clicked.connect(self.DET_model_selected)<line_sep>self.SEG_mode_rb=QtWidgets.QRadioButton("SEG Mode")<line_sep>self.SEG_mode_rb.clicked.connect(self.SEG_model_selected)<line_sep>vbox=QtWidgets.QVBoxLayout()<line_sep>vbox.addWidget(self.CLS_mode_rb)<line_sep>vbox.addWidget(self.DET_mode_rb)<line_sep>vbox.addWidget(self.SEG_mode_rb)<line_sep>vbox.addStretch(<true>)<line_sep>self.modegroupBox.setLayout(vbox)<line_sep><return>self.modegroupBox<block_end><def_stmt>createDEToptGroup self<block_start>self.detgroupBox=QtWidgets.QGroupBox("& DET options")<line_sep>self.enable_show_label_cb=QtWidgets.QCheckBox('enable show label name')<line_sep>self.label_font_size_sl=QtWidgets.QSlider(Qt.Horizontal)<line_sep>self.label_font_size_sl.setRange(5 50)<line_sep>self.label_font_size_sp=QtWidgets.QSpinBox()<line_sep>self.label_font_size_sp.setRange(5 50)<line_sep>self.signal_conn.connect(self.font_conn)<line_sep>self.label_font_size_sl.valueChanged.connect(self.change_label_font_size)<line_sep>self.label_font_size_sl.setValue(self.__class__.label_font_size)<line_sep>vbox=QtWidgets.QVBoxLayout()<line_sep>vbox.addWidget(self.enable_show_label_cb)<line_sep>vbox.addWidget(QtWidgets.QLabel('label font size'))<line_sep>vbox.addWidget(self.label_font_size_sl)<line_sep>vbox.addWidget(self.label_font_size_sp)<line_sep>vbox.addStretch()<line_sep>self.detgroupBox.setLayout(vbox)<line_sep><return>self.detgroupBox<block_end><def_stmt>font_conn self<block_start>self.label_font_size_sl=QtWidgets.QSlider(Qt.Horizontal)<line_sep>self.label_font_size_sl.setRange(5 50)<line_sep>self.label_font_size_sp=QtWidgets.QSpinBox()<line_sep>self.label_font_size_sp.setRange(5 50)<block_end><def_stmt>createCLSoptGroup self<block_start>self.clsgroupBox=QtWidgets.QGroupBox("& CLS options")<line_sep>#self.single_label_rb = QtGui.QRadioButton("single label") #self.multi_label_rb = QtGui.QRadioButton("multi label") vbox=QtWidgets.QVBoxLayout()<line_sep>#vbox.addWidget(self.single_label_rb) #vbox.addWidget(self.multi_label_rb) vbox.addStretch(<true>)<line_sep>self.clsgroupBox.setLayout(vbox)<line_sep><return>self.clsgroupBox<block_end><def_stmt>createSEGoptGroup self<block_start>self.seggroupBox=QtWidgets.QGroupBox("& SEG options")<line_sep>self.enable_color_map_cb=QtWidgets.QCheckBox('enable color map')<if_stmt>self.__class__.enable_color_map<block_start>self.enable_color_map_cb.toggle()<block_end>self.enable_color_map_cb.stateChanged.connect(self.change_color_enable_state)<if_stmt>self.__class__.enable_color_map<block_start>self.enable_color_map_cb.setChecked(<true>)<block_end>vbox=QtWidgets.QVBoxLayout()<line_sep>vbox.addWidget(self.enable_color_map_cb)<line_sep>vbox.addStretch(<true>)<line_sep>self.seggroupBox.setLayout(vbox)<line_sep><return>self.seggroupBox<block_end><def_stmt>init_UI self<block_start>main_v_layout=QtWidgets.QVBoxLayout()<line_sep>grid=QtWidgets.QGridLayout()<line_sep>grid.addWidget(self.createModeGroup() 0 0)<line_sep>grid.addWidget(self.createDEToptGroup() 1 0)<line_sep>grid.addWidget(self.createCLSoptGroup() 2 0)<line_sep>grid.addWidget(self.createSEGoptGroup() 3 0)<if_stmt>self.__class__.task_mode<eq>0<block_start>self.DET_mode_rb.setChecked(<true>)<line_sep>self.DET_model_selected()<block_end><elif_stmt>self.__class__.task_mode<eq>1<block_start>self.SEG_mode_rb.setChecked(<true>)<line_sep>self.SEG_model_selected()<block_end><elif_stmt>self.__class__.task_mode<eq>2<block_start>self.CLS_mode_rb.setChecked(<true>)<line_sep>self.CLS_model_selected()<block_end>buttonBox=QtWidgets.QDialogButtonBox(parent=self)<line_sep>buttonBox.setOrientation(Qt.Horizontal)<line_sep>buttonBox.setStandardButtons(QtWidgets.QDialogButtonBox.Cancel|QtWidgets.QDialogButtonBox.Ok)<line_sep>buttonBox.accepted.connect(self.accept)<line_sep>buttonBox.rejected.connect(self.reject)<line_sep>main_v_layout.addLayout(grid)<line_sep>spacerItem=QtWidgets.QSpacerItem(20 48 QtWidgets.QSizePolicy.Minimum QtWidgets.QSizePolicy.Expanding)<line_sep>main_v_layout.addItem(spacerItem)<line_sep>main_v_layout.addWidget(buttonBox)<line_sep>self.setLayout(main_v_layout)<block_end><def_stmt>CLS_model_selected self<block_start>self.__class__.task_mode=2<line_sep>self.clsgroupBox.setDisabled(<false>)<line_sep>self.detgroupBox.setDisabled(<true>)<line_sep>self.seggroupBox.setDisabled(<true>)<block_end><def_stmt>DET_model_selected self<block_start>self.__class__.task_mode=0<line_sep>self.detgroupBox.setDisabled(<false>)<line_sep>self.clsgroupBox.setDisabled(<true>)<line_sep>self.seggroupBox.setDisabled(<true>)<block_end><def_stmt>SEG_model_selected self<block_start>self.__class__.task_mode=1<line_sep>self.seggroupBox.setDisabled(<false>)<line_sep>self.detgroupBox.setDisabled(<true>)<line_sep>self.clsgroupBox.setDisabled(<true>)<block_end><def_stmt>change_color_enable_state self state<block_start><if_stmt>state<eq>QtWidgets.Qt.Checked<block_start>self.__class__.enable_color_map=<true><block_end><else_stmt><block_start>self.__class__.enable_color_map=<false><block_end><block_end><def_stmt>change_label_font_size self value<block_start>self.__class__.label_font_size=value<block_end><def_stmt>get_color_map_state self<block_start><return>self.__class__.enable_color_map<block_end><def_stmt>get_setting_state self<block_start><if_stmt>self.__class__.task_mode<eq>0<block_start><return>{'mode':0 'enable_color_map':self.__class__.enable_color_map 'label_font_size':self.__class__.label_font_size}<block_end><elif_stmt>self.__class__.task_mode<eq>1<block_start><return>{'mode':1 'enable_color_map':self.__class__.enable_color_map}<block_end><elif_stmt>self.__class__.task_mode<eq>2<block_start><return>{'mode':2}<block_end><block_end><block_end>
# Copyright 2019 The Dreamer Authors. All rights reserved. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. <import_from_future_stmt> absolute_import<import_from_future_stmt> division<import_from_future_stmt> print_function<import_stmt>tensorflow<as>tf<import_from_stmt>dreamer.tools nested<def_stmt>chunk_sequence sequence chunk_length randomize=<true> num_chunks=<none><block_start><if_stmt>'length'<in>sequence<block_start>length=sequence.pop('length')<block_end><else_stmt><block_start>length=tf.shape(nested.flatten(sequence)[0])[0]<block_end><if_stmt>randomize<block_start><if_stmt><not>num_chunks<block_start>num_chunks=tf.maximum(1 length<floordiv>chunk_length-1)<block_end><else_stmt><block_start>num_chunks=num_chunks+0<times>length<block_end>used_length=num_chunks<times>chunk_length<line_sep>max_offset=length-used_length<line_sep>offset=tf.random_uniform(() 0 max_offset+1 dtype=tf.int32)<block_end><else_stmt><block_start><if_stmt>num_chunks<is><none><block_start>num_chunks=length<floordiv>chunk_length<block_end><else_stmt><block_start>num_chunks=num_chunks+0<times>length<block_end>used_length=num_chunks<times>chunk_length<line_sep>offset=0<block_end>clipped=nested.map(<lambda>tensor:tensor[offset:offset+used_length] sequence)<line_sep>chunks=nested.map(<lambda>tensor:tf.reshape(tensor [num_chunks chunk_length]+tensor.shape[1:].as_list()) clipped)<line_sep><return>chunks<block_end>
###################################################################### # Software License Agreement (BSD License) # # Copyright (c) 2013, Rice University # All rights reserved. # # Redistribution and use in source and binary forms, with or without # modification, are permitted provided that the following conditions # are met: # # * Redistributions of source code must retain the above copyright # notice, this list of conditions and the following disclaimer. # * Redistributions in binary form must reproduce the above # copyright notice, this list of conditions and the following # disclaimer in the documentation and/or other materials provided # with the distribution. # * Neither the name of the Rice University nor the names of its # contributors may be used to endorse or promote products derived # from this software without specific prior written permission. # # THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS # "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT # LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS # FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE # COPYRIGHT OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, # INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, # BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; # LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER # CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT # LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN # ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE # POSSIBILITY OF SUCH DAMAGE. ###################################################################### # Author: <NAME> ## \brief Information about this addon bl_info={"name":"OMPL Interface" "category":"Game Engine" "description":"Planning with OMPL (requires MORSE)" "location":"Game > OMPL" "author":"<NAME>"}<import_stmt>configparser<import_stmt>os<import_stmt>socket<import_stmt>subprocess<import_stmt>sys<import_stmt>time<import_stmt>bpy<import_stmt>ompl.morse.environment<line_sep>OMPL_DIR=ompl.morse.__path__[0]<line_sep>inf=float('inf')<line_sep># # # Addon operators (actions in the menu the user can execute) # # ## # \brief Invoke OMPL Planning <class_stmt>Plan(bpy.types.Operator)<block_start>bl_idname="ompl.plan"<line_sep>bl_label="Plan..."<line_sep>## \brief File where planner should save solution path filepath=bpy.props.StringProperty(subtype="FILE_PATH")<line_sep>## # \brief Called when the dialogs finish; starts up the simulation <def_stmt>execute self _<block_start>print('Starting planner...')<line_sep>print("Planning on %s, saving to %s"%(bpy.data.filepath self.filepath))<line_sep>subprocess.Popen(['morse' '-c' 'run' 'ompl' OMPL_DIR+'/builder.py' '--' bpy.data.filepath self.filepath 'PLAN'])<line_sep><return>{'FINISHED'}<block_end>## # \brief Called when the button is pressed; double-check configuration and # ask for a file to save the path to <def_stmt>invoke self context _# Double-check goal object properties to make sure they're out of the way and # connected properly <block_start><for_stmt>obj bpy.data.objects<block_start><if_stmt>[<true><for>goalStr ['.goalPose' '.goalRegion' '.goalRot']<if>obj.name.endswith(goalStr)]<block_start>obj.hide_render=<true><if_stmt>obj.name.endswith('.goalRegion')<block_start>obj.game.physics_type='SENSOR'<line_sep>body=bpy.data.objects.get(obj.name[:-11])<if_stmt><not>body<block_start><continue><block_end>collider=obj.game.sensors.get("__collision")<if_stmt><not>collider# Link up a collision sensor <block_start>bpy.ops.logic.sensor_add(type='COLLISION' name="__collision" object=obj.name)<line_sep>collider=obj.game.sensors.get("__collision")<block_end>collider.property=body.name.replace('.' '_')<line_sep># Just to make the sensor active dummy=obj.game.controllers.get("__dummy")<if_stmt><not>dummy<block_start>bpy.ops.logic.controller_add(type='EXPRESSION' name="__dummy" object=obj.name)<line_sep>dummy=obj.game.controllers["__dummy"]<block_end>dummy.expression='TRUE'<line_sep>collider.link(dummy)<block_end><else_stmt><block_start>obj.game.physics_type='NO_COLLISION'<block_end><block_end><block_end><if_stmt><not>context.scene.objects.get('ompl_settings')# Bounds Configuration hasn't been setup for this file yet <block_start>bpy.ops.ompl.boundsconfig('INVOKE_DEFAULT')<block_end><else_stmt><block_start>settings=context.scene.objects['ompl_settings']<if_stmt>settings.get('autopb')<is><none># Bounds Configuration hasn't been setup for this file yet <block_start>bpy.ops.ompl.boundsconfig('INVOKE_DEFAULT')<block_end><block_end># Save any changes so MORSE sees them when it loads the file bpy.ops.wm.save_mainfile()<line_sep># File selector for the path output file context.window_manager.fileselect_add(self)<line_sep><return>{'RUNNING_MODAL'}<block_end><block_end>@bpy.app.handlers.persistent<def_stmt>import_and_resave animpath<block_start>bpy.app.handlers.load_post.clear()<line_sep>animtmppath=animpath+".tmp"<line_sep>print("OMPL: appending animation data")<with_stmt>bpy.data.libraries.load(filepath=animtmppath)<as>(_ t)<block_start>t.scenes=['S.MORSE_LOGIC']<block_end>print("OMPL: deleting tmp file")<line_sep>os.remove(animtmppath)<line_sep>bpy.data.scenes.remove(bpy.data.scenes['Scene'])<line_sep>bpy.data.scenes['S.MORSE_LOGIC'].name='Scene'<line_sep>bpy.context.screen.scene=bpy.data.scenes['Scene']<line_sep>bpy.ops.wm.save_mainfile(filepath=animpath)<block_end>## # \brief Invoke Path Playback <class_stmt>Play(bpy.types.Operator)<block_start>bl_idname="ompl.play"<line_sep>bl_label="Playback and save"<line_sep>## \brief File where the planner wrote the solution path filepath=bpy.props.StringProperty(name="Solution file" description="File where where the OMPL planner saved a solution path" subtype="FILE_PATH")<line_sep>## # \brief Called when the dialogs finish; starts up the simulation <def_stmt>execute self context<block_start>animpath=context.scene.objects['ompl_settings']['Animpath']<if_stmt>animpath<eq>''<block_start>self.report({'ERROR'} "Choose animation save file first!")<line_sep><return>{'FINISHED'}<block_end>self.report({'WARNING'} "Switching to .blend file: '"+animpath+"'")<line_sep>print('Starting player...')<line_sep>print("Playing %s with %s"%(bpy.data.filepath self.filepath))<line_sep>subprocess.run(['morse' '-c' 'run' 'ompl' OMPL_DIR+'/builder.py' '--' bpy.data.filepath self.filepath 'PLAY'])<line_sep># Load blank file. Append animated objects. Re-save. print("OMPL: Will save animation data to '"+animpath+"'")<line_sep>cont=bpy.app.handlers.persistent(<lambda>_:import_and_resave(animpath))<line_sep>bpy.app.handlers.load_post.append(cont)<line_sep>blankpath=OMPL_DIR+'/resources/blank.blend'<line_sep>print("OMPL: Loading blank file")<line_sep>bpy.ops.wm.open_mainfile(filepath=blankpath)<line_sep><return>{'FINISHED'}<block_end>## # \brief Called when the button is pressed; prompts for the path file <def_stmt>invoke self context _<block_start><if_stmt><not>context.scene.objects.get('ompl_settings')# Select an animation save file <block_start>bpy.ops.ompl.animfile('INVOKE_DEFAULT')<block_end><elif_stmt><not>context.scene.objects['ompl_settings'].get('Animpath')# Select an animation save file <block_start>bpy.ops.ompl.animfile('INVOKE_DEFAULT')<block_end># Save any changes so MORSE sees them when it loads the file bpy.ops.wm.save_mainfile()<line_sep># File selector for the path file context.window_manager.fileselect_add(self)<line_sep><return>{'RUNNING_MODAL'}<block_end><block_end>## # \brief Compile a list of usable MORSE robots <def_stmt>getRobots <block_start><import_stmt>morse.builder<line_sep># This is a list of incompatible robots (e.g., some use controllers that require you to explicitly # name the internal variable you want to change instead of merely accepting a list of control values). # If you write your own controller that is compatible, feel free to take the robot out of this blacklist excluded_robots=['B21' 'BarePR2' 'BasePR2' 'Human' 'Hummer' 'Jido' 'LocalizedPR2' 'NavPR2' 'Victim']<line_sep>robotEnum=[]<line_sep>i=0<for_stmt>cname dir(morse.builder.robots)<block_start>c=getattr(morse.builder.robots cname)<line_sep># Is c a class? <if_stmt>isinstance(c type)# Does it inherit from Robot and is it neither Robot nor WheeledRobot? <block_start><if_stmt>issubclass(c morse.builder.Robot)<and>c<ne>morse.builder.Robot<and>c<ne>morse.builder.WheeledRobot# Is is not in our exlusions list? <block_start><if_stmt>cname<not><in>excluded_robots# Add an entry for it <block_start>robotEnum.append((cname cname 'morse.builder.robots.'+cname i))<line_sep>i<augadd>1<block_end><block_end><block_end><block_end># Put then in alphabetical order robotEnum.reverse()<line_sep><return>robotEnum<block_end>## # \brief Compile list of controllers <def_stmt>getControllers <block_start><import_stmt>morse.builder<line_sep># Exclude controllers that require non-numeric parameters, don't have a socket interface, or are irrelevant; # you may be able to rewrite some of these (e.g., SteerForce) with little modification so that they do # accept purely numeric inputs excluded_controllers=['Armature' 'Destination' 'ForceTorque' 'Gripper' 'Joystick' 'Keyboard' 'KukaLWR' 'Light' 'Mocap' 'MocapControl' 'MotionXYW' 'Orientation' 'PTU' 'RotorcraftAttitude' 'Sound' 'SteerForce']<line_sep>controllerEnum=[]<line_sep>i=0<for_stmt>cname dir(morse.builder.actuators)<block_start>c=getattr(morse.builder.actuators cname)<line_sep># Is c a class? <if_stmt>isinstance(c type)# Does it inherit from ActuatorCreator and is it not ActuatorCreator? <block_start><if_stmt>issubclass(c morse.builder.creator.ActuatorCreator)<and>c<ne>morse.builder.creator.ActuatorCreator# Is it not in our exclusions list? <block_start><if_stmt>cname<not><in>excluded_controllers# Add an entry for it <block_start>controllerEnum.append((cname cname 'morse.builder.actuators.'+cname i))<line_sep>i<augadd>1<block_end><block_end><block_end><block_end>controllerEnum.reverse()<line_sep><return>controllerEnum<block_end>## # \brief Add a MORSE Robot to the scene <class_stmt>AddRobot(bpy.types.Operator)<block_start>bl_idname="ompl.addrobot"<line_sep>bl_label="Add Robot..."<line_sep># Set up the robot and controller selection menus robotEnum=[('' '' '')]<line_sep>controllerEnum=[('' '' '')]<line_sep>robot_type=bpy.props.EnumProperty(items=robotEnum name="MORSE robot" description="A robot from the MORSE components library" default=robotEnum[-1][0])<line_sep>controller_type=bpy.props.EnumProperty(items=controllerEnum name="MORSE actuator" description="The actuator to control the robot" default=controllerEnum[-1][0])<line_sep>## # \brief Operator refuses to run if this returns false; requires # Blender to be in Object Mode @classmethod<def_stmt>poll cls context<block_start><return>context.mode<eq>'OBJECT'<block_end>## # \brief Add the model to the scene and set up some properties <def_stmt>execute self context<block_start><import_stmt>morse.builder<line_sep># Add model for robot_type robot=getattr(morse.builder self.robot_type)()<line_sep>robotObj=context.object<line_sep># Make visible in a render robotObj.hide_render=<false><line_sep># Remove unnecessary game properties <while_stmt>robotObj.game.properties<block_start>bpy.ops.object.game_property_remove()<block_end># Add properties for robot_type and controller_type robotObj['RobotType']=self.robot_type<line_sep>robotObj['ControllerType']=self.controller_type<line_sep><return>{'FINISHED'}<block_end>## # \brief Prompt for robot and controller selection <def_stmt>invoke self context _<block_start><return>context.window_manager.invoke_props_dialog(self)<block_end><block_end>## # \brief Recursively add children to the selection <def_stmt>_recurseSelectChildren obj<block_start><for_stmt>child obj.children<block_start>_recurseSelectChildren(child)<block_end>bpy.ops.object.select_pattern(pattern=obj.name case_sensitive=<true>)<block_end>## # \brief Add a goal to the Scene <class_stmt>AddGoal(bpy.types.Operator)<block_start>bl_idname="ompl.addgoal"<line_sep>bl_label="Add Goal..."<line_sep># Parameters are the type of goal and the name of the object we define the goal for body=bpy.props.StringProperty(name="Rigid Body" description="The body to define a goal for" default="")<line_sep>goal_type=bpy.props.EnumProperty(items=[('goalRot' 'Rotation only' 'Rotation') ('goalPose' 'Pose' 'Position and Rotation')] name="Goal Type" description="The kind of goal specification" default='goalPose')<line_sep>## # \brief Operator refuses to run if this returns false; requires # Blender to be in Object mode @classmethod<def_stmt>poll cls context<block_start><return>context.mode<eq>'OBJECT'<block_end>## # \brief Create the goal object and set up its properties <def_stmt>execute self context# Check that the object exists <block_start><if_stmt><not>bpy.data.objects.get(self.body)<block_start>self.report({'ERROR'} "No such object: '%s'"%self.body)<line_sep><return>{'FINISHED'}<block_end>goalname=self.body+'.'+self.goal_type<line_sep>bpy.ops.object.select_all(action='DESELECT')<line_sep># Duplicate object bpy.ops.object.select_pattern(pattern=self.body case_sensitive=<true>)<line_sep>_recurseSelectChildren(bpy.data.objects.get(self.body))<line_sep>bpy.ops.object.duplicate()<line_sep>goalobj=context.selected_objects[0]<line_sep># Remove old custom properties <for_stmt>prop goalobj.keys()<block_start><del_stmt>goalobj[prop]<block_end><if_stmt>self.goal_type<eq>'goalPose'# Add default locTol <block_start>goalobj['locTol']=0.5<block_end># Add default rotTol goalobj['rotTol']=0.2<line_sep># Rename goal object goalobj.name=goalname<line_sep># Move object to cursor goalobj.location=context.scene.cursor_location<line_sep><return>{'FINISHED'}<block_end>## # \brief Prompt for the object name and goal type <def_stmt>invoke self context _<block_start><return>context.window_manager.invoke_props_dialog(self)<block_end><block_end>## # \brief Choose animation save file <class_stmt>AnimFile(bpy.types.Operator)<block_start>bl_idname="ompl.animfile"<line_sep>bl_label="Choose animation save file..."<line_sep>## \brief Second *.blend to save the animation data filepath=bpy.props.StringProperty(name="Animation Save file" description="*.blend file where the animation curves should be saved to" subtype="FILE_PATH")<line_sep>## # \brief Save the name of the file for later <def_stmt>execute self context<block_start>context.scene.objects['ompl_settings']['Animpath']=self.filepath<line_sep><return>{'FINISHED'}<block_end>## # \brief Prompt for the animation save file <def_stmt>invoke self context _# Add the settings object if it doesn't exist <block_start><if_stmt><not>context.scene.objects.get('ompl_settings')<block_start>bpy.ops.object.add()<line_sep>context.object.name='ompl_settings'<block_end>settings=context.scene.objects['ompl_settings']<line_sep># Get the settings object out of the way settings.hide=<true><line_sep>settings.hide_render=<true><line_sep>settings.hide_select=<true><if_stmt><not>settings.get('Animpath')<block_start>settings['Animpath']=self.filepath<block_end># Prompt for the name of the file to save to context.window_manager.fileselect_add(self)<line_sep><return>{'RUNNING_MODAL'}<block_end><block_end>## # \brief Configure the state and control bounds <class_stmt>BoundsConfiguration(bpy.types.Operator)<block_start>bl_idname="ompl.boundsconfig"<line_sep>bl_label="Bounds Configuration..."<line_sep># Properties displayed in the dialog; p=position,l=linear,a=angular,c=control; # x,y,z,m=min, X,Y,Z,M=max; handles up to 16 control inputs autopb=bpy.props.BoolProperty(name="Automatic position bounds" description="Overrides user-provided numbers by analyzing the scene" default=<true>)<line_sep>pbx=bpy.props.FloatProperty(name="Min" default=-1000 min=-1000 max=1000)<line_sep>pbX=bpy.props.FloatProperty(name="Max" default=1000 min=-1000 max=1000)<line_sep>pby=bpy.props.FloatProperty(name="Min" default=-1000 min=-1000 max=1000)<line_sep>pbY=bpy.props.FloatProperty(name="Max" default=1000 min=-1000 max=1000)<line_sep>pbz=bpy.props.FloatProperty(name="Min" default=-1000 min=-1000 max=1000)<line_sep>pbZ=bpy.props.FloatProperty(name="Max" default=1000 min=-1000 max=1000)<line_sep>lbm=bpy.props.FloatProperty(name="Min" default=-1000 min=-1000 max=1000)<line_sep>lbM=bpy.props.FloatProperty(name="Max" default=1000 min=-1000 max=1000)<line_sep>abm=bpy.props.FloatProperty(name="Min" default=-1000 min=-1000 max=1000)<line_sep>abM=bpy.props.FloatProperty(name="Max" default=1000 min=-1000 max=1000)<line_sep>cbm0=bpy.props.FloatProperty(name="Min" default=-10 min=-1000 max=1000)<line_sep>cbM0=bpy.props.FloatProperty(name="Max" default=10 min=-1000 max=1000)<line_sep>cbm1=bpy.props.FloatProperty(name="Min" default=-10 min=-1000 max=1000)<line_sep>cbM1=bpy.props.FloatProperty(name="Max" default=10 min=-1000 max=1000)<line_sep>cbm2=bpy.props.FloatProperty(name="Min" default=-10 min=-1000 max=1000)<line_sep>cbM2=bpy.props.FloatProperty(name="Max" default=10 min=-1000 max=1000)<line_sep>cbm3=bpy.props.FloatProperty(name="Min" default=-10 min=-1000 max=1000)<line_sep>cbM3=bpy.props.FloatProperty(name="Max" default=10 min=-1000 max=1000)<line_sep>cbm4=bpy.props.FloatProperty(name="Min" default=-10 min=-1000 max=1000)<line_sep>cbM4=bpy.props.FloatProperty(name="Max" default=10 min=-1000 max=1000)<line_sep>cbm5=bpy.props.FloatProperty(name="Min" default=-10 min=-1000 max=1000)<line_sep>cbM5=bpy.props.FloatProperty(name="Max" default=10 min=-1000 max=1000)<line_sep>cbm6=bpy.props.FloatProperty(name="Min" default=-10 min=-1000 max=1000)<line_sep>cbM6=bpy.props.FloatProperty(name="Max" default=10 min=-1000 max=1000)<line_sep>cbm7=bpy.props.FloatProperty(name="Min" default=-10 min=-1000 max=1000)<line_sep>cbM7=bpy.props.FloatProperty(name="Max" default=10 min=-1000 max=1000)<line_sep>cbm8=bpy.props.FloatProperty(name="Min" default=-10 min=-1000 max=1000)<line_sep>cbM8=bpy.props.FloatProperty(name="Max" default=10 min=-1000 max=1000)<line_sep>cbm9=bpy.props.FloatProperty(name="Min" default=-10 min=-1000 max=1000)<line_sep>cbM9=bpy.props.FloatProperty(name="Max" default=10 min=-1000 max=1000)<line_sep>cbm10=bpy.props.FloatProperty(name="Min" default=-10 min=-1000 max=1000)<line_sep>cbM10=bpy.props.FloatProperty(name="Max" default=10 min=-1000 max=1000)<line_sep>cbm11=bpy.props.FloatProperty(name="Min" default=-10 min=-1000 max=1000)<line_sep>cbM11=bpy.props.FloatProperty(name="Max" default=10 min=-1000 max=1000)<line_sep>cbm12=bpy.props.FloatProperty(name="Min" default=-10 min=-1000 max=1000)<line_sep>cbM12=bpy.props.FloatProperty(name="Max" default=10 min=-1000 max=1000)<line_sep>cbm13=bpy.props.FloatProperty(name="Min" default=-10 min=-1000 max=1000)<line_sep>cbM13=bpy.props.FloatProperty(name="Max" default=10 min=-1000 max=1000)<line_sep>cbm14=bpy.props.FloatProperty(name="Min" default=-10 min=-1000 max=1000)<line_sep>cbM14=bpy.props.FloatProperty(name="Max" default=10 min=-1000 max=1000)<line_sep>cbm15=bpy.props.FloatProperty(name="Min" default=-10 min=-1000 max=1000)<line_sep>cbM15=bpy.props.FloatProperty(name="Max" default=10 min=-1000 max=1000)<line_sep>## # \brief Save all the settings and reset dialogs to new defaults <def_stmt>execute self context# Save settings to the scene <block_start>settings=context.scene.objects['ompl_settings']<line_sep>settings['autopb']=self.autopb<line_sep>settings['pbx']=self.pbx<line_sep>settings['pbX']=self.pbX<line_sep>settings['pby']=self.pby<line_sep>settings['pbY']=self.pbY<line_sep>settings['pbz']=self.pbz<line_sep>settings['pbZ']=self.pbZ<line_sep>settings['lbm']=self.lbm<line_sep>settings['lbM']=self.lbM<line_sep>settings['abm']=self.abm<line_sep>settings['abM']=self.abM<for_stmt>i range(16)<block_start>settings['cbm%i'%i]=getattr(self 'cbm%i'%i)<line_sep>settings['cbM%i'%i]=getattr(self 'cbM%i'%i)<block_end># Allow dialog defaults to be changed by resetting the properties <del_stmt>BoundsConfiguration.autopb BoundsConfiguration.pbx BoundsConfiguration.pbX BoundsConfiguration.pby BoundsConfiguration.pbY BoundsConfiguration.pbz BoundsConfiguration.pbZ BoundsConfiguration.lbm BoundsConfiguration.lbM BoundsConfiguration.abm BoundsConfiguration.abM<for_stmt>i range(16)<block_start>delattr(BoundsConfiguration 'cbm%i'%i)<line_sep>delattr(BoundsConfiguration 'cbM%i'%i)<block_end>BoundsConfiguration.autopb=bpy.props.BoolProperty(name="Automatic position bounds" description="Overrides user-provided numbers by analyzing the scene" default=settings['autopb'])<line_sep>BoundsConfiguration.pbx=bpy.props.FloatProperty(name="Min" default=settings['pbx'] min=-1000 max=1000)<line_sep>BoundsConfiguration.pbX=bpy.props.FloatProperty(name="Max" default=settings['pbX'] min=-1000 max=1000)<line_sep>BoundsConfiguration.pby=bpy.props.FloatProperty(name="Min" default=settings['pby'] min=-1000 max=1000)<line_sep>BoundsConfiguration.pbY=bpy.props.FloatProperty(name="Max" default=settings['pbY'] min=-1000 max=1000)<line_sep>BoundsConfiguration.pbz=bpy.props.FloatProperty(name="Min" default=settings['pbz'] min=-1000 max=1000)<line_sep>BoundsConfiguration.pbZ=bpy.props.FloatProperty(name="Max" default=settings['pbZ'] min=-1000 max=1000)<line_sep>BoundsConfiguration.lbm=bpy.props.FloatProperty(name="Min" default=settings['lbm'] min=-1000 max=1000)<line_sep>BoundsConfiguration.lbM=bpy.props.FloatProperty(name="Max" default=settings['lbM'] min=-1000 max=1000)<line_sep>BoundsConfiguration.abm=bpy.props.FloatProperty(name="Min" default=settings['abm'] min=-1000 max=1000)<line_sep>BoundsConfiguration.abM=bpy.props.FloatProperty(name="Max" default=settings['abM'] min=-1000 max=1000)<for_stmt>i range(16)<block_start>setattr(BoundsConfiguration 'cbm%i'%i bpy.props.FloatProperty(name="Min" default=settings['cbm%i'%i] min=-1000 max=1000))<line_sep>setattr(BoundsConfiguration 'cbM%i'%i bpy.props.FloatProperty(name="Max" default=settings['cbM%i'%i] min=-1000 max=1000))<block_end># Refresh bpy.utils.unregister_class(BoundsConfiguration)<line_sep>bpy.utils.register_class(BoundsConfiguration)<line_sep><return>{'FINISHED'}<block_end>## # \brief Query MORSE for control description, then open the dialog <def_stmt>invoke self context _# If the settings have not been set before, initialize them <block_start><if_stmt><not>context.scene.objects.get('ompl_settings')<block_start>bpy.ops.object.add()<line_sep>settings=context.object<line_sep>settings.name='ompl_settings'<line_sep>settings['autopb']=<true><line_sep>settings['pbx']=-1000<line_sep>settings['pbX']=1000<line_sep>settings['pby']=-1000<line_sep>settings['pbY']=1000<line_sep>settings['pbz']=-1000<line_sep>settings['pbZ']=1000<line_sep>settings['lbm']=-1000<line_sep>settings['lbM']=1000<line_sep>settings['abm']=-1000<line_sep>settings['abM']=1000<for_stmt>i range(16)<block_start>settings['cbm%i'%i]=-10<line_sep>settings['cbM%i'%i]=10<block_end><block_end># Save any changes so MORSE sees them when it loads the file bpy.ops.wm.save_mainfile()<line_sep># Query MORSE for cdesc by starting it up temporarily (clunky, but it needs to be done) subprocess.Popen(['morse' '-c' 'run' 'ompl' OMPL_DIR+'/builder.py' '--' bpy.data.filepath "." 'QUERY'])<line_sep># Wait for a connection sockS=socket.socket(socket.AF_INET socket.SOCK_STREAM)<line_sep>sockC=socket.socket(socket.AF_INET socket.SOCK_STREAM)<while_stmt><true><block_start><try_stmt><block_start>print("Waiting for port 50007 to connect.")<line_sep>sockS.connect(('localhost' 50007))<block_end><except_stmt><block_start>time.sleep(0.5)<line_sep><continue><block_end><break><block_end><while_stmt><true><block_start><try_stmt><block_start>print("Waiting for port 4000 to connect.")<line_sep>sockC.connect(('localhost' 4000))<block_end><except_stmt><block_start>time.sleep(0.5)<line_sep><continue><block_end><break><block_end># Retrieve the control description self.cdesc=ompl.morse.environment.MyEnvironment(sockS sockC <true>).cdesc<if_stmt>self.cdesc[0]<g>16<block_start>self.report({'ERROR'} "OMPL Error: Control dimension exceeds 16! This dialog won't be able to accomdate that many.")<line_sep><return>{'FINISHED'}<block_end># Invoke bounds dialog <return>context.window_manager.invoke_props_dialog(self width=1100)<block_end>## # \brief <def_stmt>draw self _<block_start>mainlayout=self.layout.row()<line_sep># 3 sections in first column: sections=mainlayout.column()<line_sep>sections.label(text="Position Bounds:")<line_sep>sections.prop(self 'autopb')<line_sep>pb=sections.row()<line_sep>sections.separator()<line_sep>sections.label(text="Linear Velocity Bounds:")<line_sep>lb=sections.row()<line_sep>sections.separator()<line_sep>sections.label(text="Angular Velocity Bounds:")<line_sep>ab=sections.row()<line_sep># 1 section in second column cb=mainlayout.column()<line_sep>cb.label(text="Control Input Bounds:")<line_sep>cbrow1=cb.row()<line_sep>cbrow2=cb.row()<line_sep>cbrow3=cb.row()<line_sep>cbrow4=cb.row()<line_sep># In positional bounds sections, make 3 columns for X,Y,Z, with Min,Max in each X=pb.column()<line_sep>X.label(text="X")<line_sep>X.prop(self 'pbx' text="Min")<line_sep>X.prop(self 'pbX' text="Max")<line_sep>Y=pb.column()<line_sep>Y.label(text="Y")<line_sep>Y.prop(self 'pby' text="Min")<line_sep>Y.prop(self 'pbY' text="Max")<line_sep>Z=pb.column()<line_sep>Z.label(text="Z")<line_sep>Z.prop(self 'pbz' text="Min")<line_sep>Z.prop(self 'pbZ' text="Max")<line_sep># Linear velocity bounds Min,Max lb.prop(self 'lbm' text="Min")<line_sep>lb.prop(self 'lbM' text="Max")<line_sep># Angular ab.prop(self 'abm' text="Min")<line_sep>ab.prop(self 'abM' text="Max")<line_sep># Control Input last_component=<none><line_sep>i=0<line_sep>k=0<line_sep>cbrow=[cbrow1 cbrow2 cbrow3 cbrow4]<for_stmt>control self.cdesc[2:]<block_start><if_stmt>control[0]<ne>last_component# Only allow 4 robots per row <block_start>robot=cbrow[int(k/4)].column()<line_sep>k<augadd>1<line_sep># Print the robot name robot.label(text=control[0][:-6]+":")<line_sep>services=robot.box()<block_end># Print the controller function name services.label(text=control[1]+":")<line_sep>args=services.row()<for_stmt>j range(control[2])# Print the argument number <block_start>con=args.column()<line_sep>con.label(text="Arg %i"%j)<line_sep>con.prop(self 'cbm%i'%i text="Min")<line_sep>con.prop(self 'cbM%i'%i text="Max")<line_sep>i<augadd>1<block_end><block_end><block_end><block_end># # # Addon house-keeping # # ## # \brief Class describing the layout of the OMPL menu <class_stmt>OMPLMenu(bpy.types.Menu)<block_start>bl_idname="INFO_MT_game_ompl"<line_sep>bl_label="OMPL"<line_sep>## # \brief Add operators to the menu <def_stmt>draw self _<block_start>self.layout.operator_context='INVOKE_DEFAULT'<line_sep>self.layout.operator(AddRobot.bl_idname)<line_sep>self.layout.operator(AddGoal.bl_idname)<line_sep>self.layout.operator(BoundsConfiguration.bl_idname)<line_sep>self.layout.operator(Plan.bl_idname)<line_sep>self.layout.operator(AnimFile.bl_idname)<line_sep>self.layout.operator(Play.bl_idname)<block_end><block_end>## # \brief Function called to initialize the menu <def_stmt>menu_func self _<block_start>self.layout.menu(OMPLMenu.bl_idname)<block_end>## # \brief Deferred import of morse.builder (whenever a new file is loaded) @bpy.app.handlers.persistent<def_stmt>handler_scene_update_post _# A little hackish, but now is a good time to import morse.builder <block_start><if_stmt>'morse.builder'<not><in>sys.modules<block_start><del_stmt>AddRobot.robot_type<del_stmt>AddRobot.controller_type<line_sep>robotEnum=getRobots()<line_sep>controllerEnum=getControllers()<line_sep>AddRobot.robot_type=bpy.props.EnumProperty(items=robotEnum name="MORSE robot" description="A robot from the MORSE components library" default=robotEnum[-1][0])<line_sep>AddRobot.controller_type=bpy.props.EnumProperty(items=controllerEnum name="MORSE actuator" description="The actuator to control the robot" default=controllerEnum[-1][0])<line_sep>bpy.utils.unregister_class(AddRobot)<line_sep>bpy.utils.register_class(AddRobot)<block_end><block_end>## # \brief Called when the addon is enabled or Blender starts <def_stmt>register # Ensure that MORSE environment 'ompl' is registered in ~/.morse/config <block_start>config_path=os.path.expanduser("~/.morse")<if_stmt><not>os.path.exists(config_path)<block_start>os.mkdir(config_path)<block_end>config_file=os.path.join(config_path "config")<line_sep>conf=configparser.SafeConfigParser()<line_sep>conf.read(config_file)<if_stmt><not>conf.has_section("sites")<block_start>conf.add_section("sites")<block_end>conf.set('sites' 'ompl' OMPL_DIR)<with_stmt>open(config_file 'w')<as>configfile<block_start>conf.write(configfile)<block_end># Register all the operators, menu, and handler bpy.utils.register_class(Plan)<line_sep>bpy.utils.register_class(AnimFile)<line_sep>bpy.utils.register_class(Play)<line_sep>bpy.utils.register_class(AddRobot)<line_sep>bpy.utils.register_class(AddGoal)<line_sep>bpy.utils.register_class(BoundsConfiguration)<line_sep>bpy.utils.register_class(OMPLMenu)<line_sep>bpy.types.INFO_MT_game.prepend(menu_func)<line_sep>bpy.app.handlers.scene_update_post.append(handler_scene_update_post)<block_end>## # \brief Called when operator is uninstalled <def_stmt>unregister # Undo all the registering <block_start>bpy.utils.unregister_class(Plan)<line_sep>bpy.utils.unregister_class(AnimFile)<line_sep>bpy.utils.unregister_class(Play)<line_sep>bpy.utils.unregister_class(AddRobot)<line_sep>bpy.utils.unregister_class(AddGoal)<line_sep>bpy.utils.unregister_class(BoundsConfiguration)<line_sep>bpy.utils.unregister_class(OMPLMenu)<line_sep>bpy.types.INFO_MT_game.remove(menu_func)<line_sep>bpy.app.handlers.scene_update_post.remove(handler_scene_update_post)<block_end>
# pylint: skip-file <import_stmt>os<import_stmt>tensorflow<as>tf<import_from_stmt>open_seq2seq.data Text2SpeechDataLayer<import_from_stmt>open_seq2seq.decoders CentaurDecoder<import_from_stmt>open_seq2seq.encoders CentaurEncoder<import_from_stmt>open_seq2seq.losses Text2SpeechLoss<import_from_stmt>open_seq2seq.models Text2SpeechCentaur<import_from_stmt>open_seq2seq.optimizers.lr_policies poly_decay<import_from_stmt>open_seq2seq.optimizers.novograd NovoGrad<line_sep>base_model=Text2SpeechCentaur<line_sep>dataset="LJ"<line_sep>dataset_location="/data/LJSpeech"<line_sep>output_type="both"<line_sep>trim=<false><line_sep>exp_mag=<true><line_sep>mag_num_feats=513<line_sep>train="train.csv"<line_sep>valid="test.csv"<line_sep>batch_size=32<line_sep>num_audio_features={"mel":80 "magnitude":mag_num_feats}<line_sep>data_min={"mel":1e-2 "magnitude":1e-5 }<line_sep>debug=<false><line_sep>num_gpus=8<if><not>debug<else>1<line_sep>reduction_factor=2<line_sep>attention_layers=4<line_sep>encoder_hidden_size=256<line_sep>decoder_hidden_size=512<line_sep>base_params={"random_seed":0 "use_horovod":<true><if><not>debug<else><false> "max_steps":1000000 "bench_start":0 "num_gpus":num_gpus "batch_size_per_gpu":batch_size "save_summaries_steps":1000<if><not>debug<else>10 "print_loss_steps":1000<if><not>debug<else>10 "print_samples_steps":1000<if><not>debug<else>10 "eval_steps":5000<if><not>debug<else>50 "save_checkpoint_steps":5000 "save_to_tensorboard":<true> "logdir":"result/centaur-float" "max_grad_norm":1. "optimizer":NovoGrad "optimizer_params":{"beta1":0.95 "beta2":0.98 "epsilon":1e-08 "weight_decay":0.001 } "lr_policy":poly_decay "lr_policy_params":{"learning_rate":0.02 "power":2.0 } "dtype":tf.float32 "initializer":tf.contrib.layers.xavier_initializer "summaries":["learning_rate" "variables" "gradients" "larc_summaries" "variable_norm" "gradient_norm" "global_gradient_norm"] "encoder":CentaurEncoder "encoder_params":{"src_vocab_size":94 "embedding_size":encoder_hidden_size "output_size":encoder_hidden_size "pad_embeddings_2_eight":<true> "cnn_dropout_prob":0.1 "conv_layers":[{"kernel_size":[3] "stride":[1] "num_channels":encoder_hidden_size "padding":"SAME" "activation_fn":tf.nn.relu} {"kernel_size":[3] "stride":[1] "num_channels":encoder_hidden_size "padding":"SAME" "activation_fn":tf.nn.relu} {"kernel_size":[3] "stride":[1] "num_channels":encoder_hidden_size "padding":"SAME" "activation_fn":tf.nn.relu} {"kernel_size":[3] "stride":[1] "num_channels":encoder_hidden_size "padding":"SAME" "activation_fn":tf.nn.relu}]} "decoder":CentaurDecoder "decoder_params":{"attention_layers":attention_layers "self_attention_conv_params":{"kernel_size":[5] "stride":[1] "num_channels":decoder_hidden_size "padding":"VALID" "is_causal":<true> "activation_fn":tf.nn.relu} "window_size":4 "back_step_size":0 "force_layers":[1 3] "hidden_size":decoder_hidden_size "reduction_factor":reduction_factor "prenet_layers":2 "prenet_hidden_size":decoder_hidden_size "prenet_use_inference_dropout":<false> "cnn_dropout_prob":0.1 "prenet_dropout":0.5 "conv_layers":[{"kernel_size":[5] "stride":[1] "num_channels":decoder_hidden_size "padding":"VALID" "is_causal":<true> "activation_fn":tf.nn.relu}]<times>4 "mag_conv_layers":[{"kernel_size":[5] "stride":[1] "num_channels":decoder_hidden_size "padding":"VALID" "is_causal":<true> "activation_fn":tf.nn.relu}]<times>4 "attention_dropout":0.1 "layer_postprocess_dropout":0.1} "loss":Text2SpeechLoss "loss_params":{"use_mask":<true> "l1_norm":<true>} "data_layer":Text2SpeechDataLayer "data_layer_params":{"dataset":dataset "use_cache":<true> "num_audio_features":num_audio_features "output_type":output_type "vocab_file":"open_seq2seq/test_utils/vocab_tts.txt" "dataset_location":dataset_location "mag_power":1 "pad_EOS":<true> "feature_normalize":<false> "feature_normalize_mean":0. "feature_normalize_std":1. "data_min":data_min "mel_type":"htk" "trim":trim "duration_max":1024 "duration_min":24 "exp_mag":exp_mag} }<line_sep>train_params={"data_layer_params":{"dataset_files":[os.path.join(dataset_location train) ] "shuffle":<true> } }<line_sep>eval_params={"data_layer_params":{"dataset_files":[os.path.join(dataset_location valid) ] "duration_max":1000 "duration_min":0 "shuffle":<false> } }<line_sep>infer_params={"data_layer_params":{"dataset_files":[os.path.join(dataset_location "infer.csv") ] "duration_max":1000 "duration_min":0 "shuffle":<false> } }<line_sep>interactive_infer_params={"data_layer_params":{"dataset_files":[] "duration_max":1000 "duration_min":0 "shuffle":<false> } }<line_sep>
# Generated by Django 3.2.12 on 2022-03-25 14:58 <import_from_stmt>django.conf settings<import_from_stmt>django.db migrations models<import_stmt>django.db.models.deletion<import_stmt>django_lifecycle.mixins<class_stmt>Migration(migrations.Migration)<block_start>initial=<true><line_sep>dependencies=[migrations.swappable_dependency(settings.AUTH_USER_MODEL) ('environments' '0018_add_minimum_change_request_approvals_to_environment') ]<line_sep>operations=[migrations.CreateModel(name='ChangeRequest' fields=[('id' models.AutoField(auto_created=<true> primary_key=<true> serialize=<false> verbose_name='ID')) ('created_at' models.DateTimeField(auto_now_add=<true>)) ('updated_at' models.DateTimeField(auto_now=<true>)) ('title' models.CharField(max_length=500)) ('description' models.TextField(blank=<true> null=<true>)) ('deleted_at' models.DateTimeField(null=<true>)) ('committed_at' models.DateTimeField(null=<true>)) ('committed_by' models.ForeignKey(null=<true> on_delete=django.db.models.deletion.SET_NULL related_name='committed_change_requests' to=settings.AUTH_USER_MODEL)) ('environment' models.ForeignKey(on_delete=django.db.models.deletion.CASCADE related_name='change_requests' to='environments.environment')) ('user' models.ForeignKey(on_delete=django.db.models.deletion.CASCADE related_name='change_requests' to=settings.AUTH_USER_MODEL)) ] options={'abstract':<false> } bases=(django_lifecycle.mixins.LifecycleModelMixin models.Model) ) migrations.CreateModel(name='ChangeRequestApproval' fields=[('id' models.AutoField(auto_created=<true> primary_key=<true> serialize=<false> verbose_name='ID')) ('created_at' models.DateTimeField(auto_now_add=<true>)) ('approved_at' models.DateTimeField(null=<true>)) ('change_request' models.ForeignKey(on_delete=django.db.models.deletion.CASCADE related_name='approvals' to='workflows_core.changerequest')) ('user' models.ForeignKey(on_delete=django.db.models.deletion.CASCADE to=settings.AUTH_USER_MODEL)) ] options={'unique_together':{('user' 'change_request')} } ) ]<block_end>
# Copyright (c) 2020 PaddlePaddle Authors. All Rights Reserved. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. <import_stmt>os<import_stmt>unittest<import_stmt>parl<import_stmt>time<import_stmt>threading<import_from_stmt>parl.remote.master Master<import_from_stmt>parl.remote.worker Worker<import_from_stmt>parl.remote.client disconnect<import_from_stmt>parl.utils get_free_tcp_port<class_stmt>TestImport(unittest.TestCase)<block_start><def_stmt>tearDown self<block_start>disconnect()<block_end><def_stmt>test_import_local_module self<block_start><import_from_stmt>Module2 B<line_sep>port=get_free_tcp_port()<line_sep>master=Master(port=port)<line_sep>th=threading.Thread(target=master.run)<line_sep>th.start()<line_sep>time.sleep(1)<line_sep>worker=Worker('localhost:{}'.format(port) 1)<line_sep>time.sleep(10)<line_sep>parl.connect("localhost:{}".format(port))<line_sep>obj=B()<line_sep>res=obj.add_sum(10 5)<line_sep>self.assertEqual(res 15)<line_sep>worker.exit()<line_sep>master.exit()<block_end><block_end><if_stmt>__name__<eq>'__main__'<block_start>unittest.main()<block_end>
# <Copyright 2019, Argo AI, LLC. Released under the MIT license.> """Tracking Loader unit tests"""<import_stmt>pathlib<import_stmt>numpy<as>np<import_stmt>pytest<import_from_stmt>argoverse.data_loading.argoverse_tracking_loader ArgoverseTrackingLoader<import_from_stmt>argoverse.utils.camera_stats CAMERA_LIST<line_sep>TEST_DATA_LOC=str(pathlib.Path(__file__).parent.parent/"tests"/"test_data"/"tracking")<line_sep>@pytest.fixture# type: ignore <def_stmt>data_loader <arrow>ArgoverseTrackingLoader<block_start><return>ArgoverseTrackingLoader(TEST_DATA_LOC)<block_end><def_stmt>test_get_city_name data_loader:ArgoverseTrackingLoader<arrow><none><block_start><assert_stmt>data_loader.city_name<eq>"PIT"<block_end><def_stmt>test_calib data_loader:ArgoverseTrackingLoader<arrow><none><block_start><assert_stmt>data_loader.calib<line_sep>camera="ring_front_center"<line_sep>calib=data_loader.get_calibration(camera)<line_sep>pc=data_loader.get_lidar(0)<line_sep>uv=calib.project_ego_to_image(pc)<line_sep>uv_cam=calib.project_ego_to_cam(pc)<assert_stmt>(uv<eq>calib.project_cam_to_image(uv_cam)).all<assert_stmt>(uv_cam<eq>calib.project_image_to_cam(uv)).all<assert_stmt>(pc<eq>calib.project_image_to_ego(uv)).all<block_end><def_stmt>test_log_list data_loader:ArgoverseTrackingLoader<arrow><none><block_start><assert_stmt>data_loader.log_list[0]<eq>"1"<block_end><def_stmt>test_image_list data_loader:ArgoverseTrackingLoader<arrow><none><block_start><assert_stmt>set(data_loader.image_list.keys())<eq>set(CAMERA_LIST)<block_end><def_stmt>test_image_list_sync data_loader:ArgoverseTrackingLoader<arrow><none><block_start><assert_stmt>set(data_loader.image_list_sync.keys())<eq>set(CAMERA_LIST)<block_end><def_stmt>test_image_timestamp_sync data_loader:ArgoverseTrackingLoader<arrow><none><block_start><assert_stmt>set(data_loader.image_timestamp_list_sync.keys())<eq>set(CAMERA_LIST)<for_stmt>camera CAMERA_LIST<block_start><assert_stmt>3<not><in>data_loader.image_timestamp_list_sync[camera]<block_end><block_end><def_stmt>test_lidar_list data_loader:ArgoverseTrackingLoader<arrow><none><block_start><assert_stmt>len(data_loader.lidar_list)<eq>3<block_end><def_stmt>test_labale_list data_loader:ArgoverseTrackingLoader<arrow><none><block_start><assert_stmt>len(data_loader.label_list)<eq>3<block_end><def_stmt>test_image_timestamp_list data_loader:ArgoverseTrackingLoader<arrow><none><block_start><assert_stmt>set(data_loader.image_timestamp_list.keys())<eq>set(CAMERA_LIST)<for_stmt>camera CAMERA_LIST<block_start><assert_stmt>3<in>data_loader.image_timestamp_list[camera]<block_end><block_end><def_stmt>test_timestamp_image_dict data_loader:ArgoverseTrackingLoader<arrow><none><block_start><assert_stmt>set(data_loader.timestamp_image_dict.keys())<eq>set(CAMERA_LIST)<for_stmt>camera CAMERA_LIST<block_start><assert_stmt>len(data_loader.timestamp_image_dict[camera])<eq>4<block_end><block_end><def_stmt>test_timestamp_lidar_map data_loader:ArgoverseTrackingLoader<arrow><none><block_start><assert_stmt>len(data_loader.timestamp_lidar_dict)<eq>3<assert_stmt>len(data_loader.lidar_timestamp_list)<eq>3<block_end><def_stmt>test_data_loader_get data_loader:ArgoverseTrackingLoader<arrow><none><block_start>data_0=data_loader[0].current_log<line_sep>data_1=data_loader.get("1").current_log<assert_stmt>data_0<eq>data_1<block_end><def_stmt>test_loading_image_lidar data_loader:ArgoverseTrackingLoader<arrow><none><block_start>camera=CAMERA_LIST[0]<line_sep>log="1"<line_sep>image_1=data_loader.get_image_at_timestamp(0 camera log)<line_sep>image_2=data_loader.get_image_list_sync(camera log load=<true>)[0]<line_sep>image_3=data_loader.get_image(0 camera log)<line_sep>image_4=data_loader.get_image_sync(0 camera log)<assert_stmt>np.array_equal(image_1 image_2)<and>np.array_equal(image_1 image_3)<and>np.array_equal(image_1 image_4)<line_sep>lidar_0=data_loader.get_lidar(0 log)<line_sep>lidar_gt=np.array([[0.0 0.0 5.0] [1.0 0.0 5.0] [2.0 0.0 5.0] [3.0 0.0 5.0] [4.0 0.0 5.0] [5.0 0.0 5.0] [6.0 0.0 5.0] [7.0 0.0 5.0] [8.0 0.0 5.0] [9.0 0.0 5.0] ])<assert_stmt>np.array_equal(lidar_0 lidar_gt)<block_end><def_stmt>test_label_object data_loader:ArgoverseTrackingLoader<arrow><none><block_start>label_at_frame_0=data_loader.get_label_object(0)<for_stmt>label label_at_frame_0<block_start><assert_stmt>label.label_class<eq>"VEHICLE"<assert_stmt>label.width<eq>2<assert_stmt>label.height<eq>2<assert_stmt>label.length<eq>2<block_end><block_end><def_stmt>test_calibration data_loader:ArgoverseTrackingLoader<arrow><none><block_start><for_stmt>camera CAMERA_LIST<block_start>calib=data_loader.get_calibration(camera "1")<assert_stmt>calib.camera<eq>camera<block_end><block_end><def_stmt>test_pose data_loader:ArgoverseTrackingLoader<arrow><none><block_start><for_stmt>idx range(len(data_loader.lidar_list))<block_start>pose=data_loader.get_pose(idx)<if_stmt>pose<is><not><none><block_start><assert_stmt>np.array_equal(pose.inverse().transform_point_cloud(np.array([[0 0 0]])) pose.inverse_transform_point_cloud(np.array([[0 0 0]])) )<block_end><else_stmt><block_start><assert_stmt><false><block_end><block_end><block_end><def_stmt>test_idx_from_timestamp data_loader:ArgoverseTrackingLoader<arrow><none><block_start><for_stmt>i range(len(data_loader.lidar_list))<block_start><assert_stmt>data_loader.get_idx_from_timestamp(i)<eq>i<block_end><block_end>
# test bignum unary operations i=1<lshift>65<line_sep>print(bool(i))<line_sep>print(+i)<line_sep>print(-i)<line_sep>print(~i)<line_sep>
<import_stmt>torch<import_stmt>torch.nn<as>nn<import_from_stmt>torchvision.models vgg16<import_from_stmt>collections namedtuple<import_stmt>os<import_stmt>hashlib<import_stmt>requests<import_from_stmt>tqdm tqdm<line_sep>URL_MAP={"vgg_lpips":"https://heibox.uni-heidelberg.de/f/607503859c864bc1b30b/?dl=1"}<line_sep>CKPT_MAP={"vgg_lpips":"vgg.pth"}<line_sep>MD5_MAP={"vgg_lpips":"d507d7349b931f0638a25a48a722f98a"}<def_stmt>download url local_path chunk_size=1024<block_start>os.makedirs(os.path.split(local_path)[0] exist_ok=<true>)<with_stmt>requests.get(url stream=<true>)<as>r<block_start>total_size=int(r.headers.get("content-length" 0))<with_stmt>tqdm(total=total_size unit="B" unit_scale=<true>)<as>pbar<block_start><with_stmt>open(local_path "wb")<as>f<block_start><for_stmt>data r.iter_content(chunk_size=chunk_size)<block_start><if_stmt>data<block_start>f.write(data)<line_sep>pbar.update(chunk_size)<block_end><block_end><block_end><block_end><block_end><block_end><def_stmt>md5_hash path<block_start><with_stmt>open(path "rb")<as>f<block_start>content=f.read()<block_end><return>hashlib.md5(content).hexdigest()<block_end><def_stmt>get_ckpt_path name root check=<false><block_start><assert_stmt>name<in>URL_MAP<line_sep>path=os.path.join(root CKPT_MAP[name])<if_stmt><not>os.path.exists(path)<or>(check<and><not>md5_hash(path)<eq>MD5_MAP[name])<block_start>print(f"Downloading {name} model from {URL_MAP[name]} to {path}")<line_sep>download(URL_MAP[name] path)<line_sep>md5=md5_hash(path)<assert_stmt>md5<eq>MD5_MAP[name] md5<block_end><return>path<block_end><class_stmt>LPIPS(nn.Module)<block_start><def_stmt>__init__ self<block_start>super(LPIPS self).__init__()<line_sep>self.scaling_layer=ScalingLayer()<line_sep>self.channels=[64 128 256 512 512]<line_sep>self.feature_net=VGG16()<line_sep>self.lins=nn.ModuleList([NetLinLayer(self.channels[0] use_dropout=<true>) NetLinLayer(self.channels[1] use_dropout=<true>) NetLinLayer(self.channels[2] use_dropout=<true>) NetLinLayer(self.channels[3] use_dropout=<true>) NetLinLayer(self.channels[4] use_dropout=<true>)])<line_sep>self.load_from_pretrained()<for_stmt>param self.parameters()<block_start>param.requires_grad=<false><block_end><block_end><def_stmt>load_from_pretrained self name="vgg_lpips"<block_start>ckpt=get_ckpt_path(name "vgg_lpips")<line_sep>self.load_state_dict(torch.load(ckpt map_location=torch.device("cpu")) strict=<false>)<block_end><def_stmt>forward self real_x fake_x<block_start>features_real=self.feature_net(self.scaling_layer(real_x))<line_sep>features_fake=self.feature_net(self.scaling_layer(fake_x))<line_sep>diffs={}<line_sep># calc MSE differences between features <for_stmt>i range(len(self.channels))<block_start>diffs[i]=(norm_tensor(features_real[i])-norm_tensor(features_fake[i]))<power>2<block_end><return>sum([spatial_average(self.lins[i].model(diffs[i]))<for>i range(len(self.channels))])<block_end><block_end><class_stmt>ScalingLayer(nn.Module)<block_start><def_stmt>__init__ self<block_start>super(ScalingLayer self).__init__()<line_sep>self.register_buffer("shift" torch.Tensor([-.030 -.088 -.188])[<none> : <none> <none>])<line_sep>self.register_buffer("scale" torch.Tensor([.458 .448 .450])[<none> : <none> <none>])<block_end><def_stmt>forward self x<block_start><return>(x-self.shift)/self.scale<block_end><block_end><class_stmt>NetLinLayer(nn.Module)<block_start><def_stmt>__init__ self in_channels out_channels=1 use_dropout=<false><block_start>super(NetLinLayer self).__init__()<line_sep>self.model=nn.Sequential(nn.Dropout()<if>use_dropout<else><none> nn.Conv2d(in_channels out_channels 1 1 0 bias=<false>))<block_end><block_end><class_stmt>VGG16(nn.Module)<block_start><def_stmt>__init__ self<block_start>super(VGG16 self).__init__()<line_sep>vgg_pretrained_features=vgg16(pretrained=<true>).features<line_sep>slices=[vgg_pretrained_features[i]<for>i range(30)]<line_sep>self.slice1=nn.Sequential(*slices[0:4])<line_sep>self.slice2=nn.Sequential(*slices[4:9])<line_sep>self.slice3=nn.Sequential(*slices[9:16])<line_sep>self.slice4=nn.Sequential(*slices[16:23])<line_sep>self.slice5=nn.Sequential(*slices[23:30])<for_stmt>param self.parameters()<block_start>param.requires_grad=<false><block_end><block_end><def_stmt>forward self x<block_start>h=self.slice1(x)<line_sep>h_relu1=h<line_sep>h=self.slice2(h)<line_sep>h_relu2=h<line_sep>h=self.slice3(h)<line_sep>h_relu3=h<line_sep>h=self.slice4(h)<line_sep>h_relu4=h<line_sep>h=self.slice5(h)<line_sep>h_relu5=h<line_sep>vgg_outputs=namedtuple("VGGOutputs" ['relu1_2' 'relu2_2' 'relu3_3' 'relu4_3' 'relu5_3'])<line_sep><return>vgg_outputs(h_relu1 h_relu2 h_relu3 h_relu4 h_relu5)<block_end><block_end><def_stmt>norm_tensor x<block_start>""" Normalize images by their length to make them unit vector? :param x: batch of images :return: normalized batch of images """<line_sep>norm_factor=torch.sqrt(torch.sum(x<power>2 dim=1 keepdim=<true>))<line_sep><return>x/(norm_factor+1e-10)<block_end><def_stmt>spatial_average x<block_start>""" imgs have: batch_size x channels x width x height --> average over width and height channel :param x: batch of images :return: averaged images along width and height """<line_sep><return>x.mean([2 3] keepdim=<true>)<block_end><if_stmt>__name__<eq>'__main__'<block_start>real=torch.randn(10 3 256 256)<line_sep>fake=torch.randn(10 3 256 256)<line_sep>loss=LPIPS().eval()<line_sep>print(loss(real fake).shape)<block_end>
<import_from_stmt>django.contrib messages<import_from_stmt>django.templatetags.static static<import_from_stmt>django.utils.translation ugettext_lazy<as>_<import_from_stmt>django.views.generic RedirectView<import_from_stmt>stronghold.views StrongholdPublicMixin<import_from_stmt>mayan.apps.views.generics ConfirmView SimpleView<import_from_stmt>mayan.apps.views.mixins ExternalContentTypeObjectViewMixin ObjectNameViewMixin <import_from_stmt>.classes ModelCopy<import_from_stmt>.forms LicenseForm<import_from_stmt>.icons icon_setup<import_from_stmt>.menus menu_tools menu_setup<import_from_stmt>.permissions permission_object_copy<import_from_stmt>.settings setting_home_view<class_stmt>AboutView(SimpleView)<block_start>extra_context={'title':_('About')}<line_sep>template_name='appearance/about.html'<block_end><class_stmt>FaviconRedirectView(RedirectView)<block_start>permanent=<true><def_stmt>get_redirect_url self *args **kwargs<block_start>""" Hide the static tag import to avoid errors with static file processors. """<line_sep><return>static(path='appearance/images/favicon.ico')<block_end><block_end><class_stmt>HomeView(SimpleView)<block_start>extra_context={'title':_('Home') }<line_sep>template_name='appearance/home.html'<block_end><class_stmt>LicenseView(SimpleView)<block_start>extra_context={'form':LicenseForm() 'read_only':<true> 'title':_('License') }<line_sep>template_name='appearance/generic_form.html'<block_end><class_stmt>ObjectCopyView(ExternalContentTypeObjectViewMixin ObjectNameViewMixin ConfirmView)<block_start>external_object_permission=permission_object_copy<def_stmt>get_extra_context self<block_start>model_copy=ModelCopy.get(model=self.external_object._meta.model)<line_sep>context={'object':self.external_object 'subtitle':_('Fields to be copied: %s')%', '.join(sorted(map(str model_copy.get_fields_verbose_names())))}<line_sep>context['title']=_('Make a copy of %(object_name)s "%(object)s"?')%{'object_name':self.get_object_name(context=context) 'object':self.external_object}<line_sep><return>context<block_end><def_stmt>view_action self<block_start>self.external_object.copy_instance()<line_sep>messages.success(message=_('Object copied successfully.') request=self.request)<block_end><block_end><class_stmt>RootView(StrongholdPublicMixin SimpleView)<block_start>extra_context={'home_view':setting_home_view.value}<line_sep>template_name='appearance/root.html'<block_end><class_stmt>SetupListView(SimpleView)<block_start>template_name='appearance/generic_list_horizontal.html'<def_stmt>get_extra_context self **kwargs<block_start><return>{'no_results_icon':icon_setup 'no_results_text':_('No results here means that don\'t have the required '<concat>'permissions to perform administrative task.') 'no_results_title':_('No setup options available.') 'resolved_links':menu_setup.resolve(request=self.request sort_results=<true>) 'title':_('Setup items') 'subtitle':_('Here you can configure all aspects of the system.')}<block_end><block_end><class_stmt>ToolsListView(SimpleView)<block_start>template_name='appearance/generic_list_horizontal.html'<def_stmt>get_extra_context self<block_start><return>{'resolved_links':menu_tools.resolve(request=self.request sort_results=<true>) 'title':_('Tools') 'subtitle':_('These modules are used to do system maintenance.')}<block_end><block_end>
<def_stmt>create_xml_doc text<block_start>JS(""" try //Internet Explorer { var xmlDoc=new ActiveXObject("Microsoft['XMLDOM']"); xmlDoc['async']="false"; xmlDoc['loadXML'](@{{text}}); } catch(e) { try //Firefox, Mozilla, Opera, etc. { var parser=new DOMParser(); xmlDoc=parser['parseFromString'](@{{text}},"text/xml"); } catch(e) { return null; } } return xmlDoc; """)<block_end>
# python3 # Copyright 2021 InstaDeep Ltd. All rights reserved. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. """VDN system implementation."""<import_stmt>functools<import_from_stmt>typing Any Callable Dict Optional Type Union<import_stmt>dm_env<import_stmt>reverb<import_stmt>sonnet<as>snt<import_from_stmt>acme specs<as>acme_specs<import_from_stmt>acme.utils counting<import_stmt>mava<import_from_stmt>mava core<import_from_stmt>mava specs<as>mava_specs<import_from_stmt>mava.components.tf.architectures DecentralisedValueActor<import_from_stmt>mava.components.tf.modules mixing<import_from_stmt>mava.environment_loop ParallelEnvironmentLoop<import_from_stmt>mava.systems.tf executors<import_from_stmt>mava.systems.tf.madqn.system MADQN<import_from_stmt>mava.systems.tf.vdn builder execution training<import_from_stmt>mava.types EpsilonScheduler<import_from_stmt>mava.utils.loggers MavaLogger logger_utils<line_sep># TODO Implement recurrent VDN <class_stmt>VDN(MADQN)<block_start>"""VDN system."""<def_stmt>__init__ self environment_factory:Callable[[bool] dm_env.Environment] network_factory:Callable[[acme_specs.BoundedArray] Dict[str snt.Module]] exploration_scheduler_fn:Union[EpsilonScheduler Dict[str EpsilonScheduler] Dict[str Dict[str EpsilonScheduler]] ] logger_factory:Callable[[str] MavaLogger]=<none> architecture:Type[DecentralisedValueActor]=DecentralisedValueActor trainer_fn:Type[training.VDNTrainer]=training.VDNTrainer executor_fn:Type[core.Executor]=execution.VDNFeedForwardExecutor mixer:Type[mixing.BaseMixingModule]=mixing.AdditiveMixing num_executors:int=1 num_caches:int=0 environment_spec:mava_specs.MAEnvironmentSpec=<none> shared_weights:bool=<true> agent_net_keys:Dict[str str]={} batch_size:int=256 prefetch_size:int=4 min_replay_size:int=1000 max_replay_size:int=1000000 samples_per_insert:Optional[float]=32.0 n_step:int=5 sequence_length:int=20 importance_sampling_exponent:Optional[float]=<none> max_priority_weight:float=0.9 period:int=20 max_gradient_norm:float=<none> discount:float=0.99 optimizer:Union[snt.Optimizer Dict[str snt.Optimizer]]=snt.optimizers.Adam(learning_rate=1e-4) target_update_period:int=100 executor_variable_update_period:int=1000 max_executor_steps:int=<none> checkpoint:bool=<true> checkpoint_subpath:str="~/mava/" checkpoint_minute_interval:int=5 logger_config:Dict={} train_loop_fn:Callable=ParallelEnvironmentLoop eval_loop_fn:Callable=ParallelEnvironmentLoop train_loop_fn_kwargs:Dict={} eval_loop_fn_kwargs:Dict={} evaluator_interval:Optional[dict]=<none> learning_rate_scheduler_fn:Optional[Callable[[int] <none>]]=<none> seed:Optional[int]=<none> <block_start>"""Initialise the system Args: environment_factory (Callable[[bool], dm_env.Environment]): function to instantiate an environment. network_factory (Callable[[acme_specs.BoundedArray], Dict[str, snt.Module]]): function to instantiate system networks. logger_factory (Callable[[str], MavaLogger], optional): function to instantiate a system logger. Defaults to None. architecture (Type[DecentralisedValueActor], optional): system architecture, e.g. decentralised or centralised. Defaults to DecentralisedValueActor. trainer_fn (Type[training.VDNTrainer], optional): training type associated with executor and architecture, e.g. centralised training. Defaults to training.VDNTrainer. executor_fn (Type[core.Executor], optional): executor type, e.g. feedforward or recurrent. Defaults to execution.VDNFeedForwardExecutor. mixer (Type[mixing.BaseMixingModule], optional): mixer module type, e.g. additive or monotonic mixing. Defaults to mixing.AdditiveMixing. exploration_scheduler_fn (Type[ LinearExplorationScheduler ], optional): function specifying a decaying scheduler for epsilon exploration. See mava/systems/tf/madqn/system.py for details. num_executors (int, optional): number of executor processes to run in parallel. Defaults to 1. num_caches (int, optional): number of trainer node caches. Defaults to 0. environment_spec (mava_specs.MAEnvironmentSpec, optional): description of the action, observation spaces etc. for each agent in the system. Defaults to None. shared_weights (bool, optional): whether agents should share weights or not. When agent_net_keys are provided the value of shared_weights is ignored. Defaults to True. agent_net_keys: (dict, optional): specifies what network each agent uses. Defaults to {}. batch_size (int, optional): sample batch size for updates. Defaults to 256. prefetch_size (int, optional): size to prefetch from replay. Defaults to 4. min_replay_size (int, optional): minimum replay size before updating. Defaults to 1000. max_replay_size (int, optional): maximum replay size. Defaults to 1000000. samples_per_insert (Optional[float], optional): number of samples to take from replay for every insert that is made. Defaults to 32.0. n_step (int, optional): number of steps to include prior to boostrapping. Defaults to 5. sequence_length (int, optional): recurrent sequence rollout length. Defaults to 20. importance_sampling_exponent: (float): Not implemented yet. max_priority_weight(float): Not implemented yet. period (int, optional): The period with which we add sequences. See `period` in `acme.SequenceAdder.period` for more info. Defaults to 20. max_gradient_norm (float, optional): maximum allowed norm for gradients before clipping is applied. Defaults to None. discount (float, optional): discount factor to use for TD updates. Defaults to 0.99. optimizer (Union[snt.Optimizer, Dict[str, snt.Optimizer]], optional): type of optimizer to use to update network parameters. Defaults to snt.optimizers.Adam( learning_rate=1e-4 ). target_update_period (int, optional): number of steps before target networks are updated. Defaults to 100. executor_variable_update_period (int, optional): number of steps before updating executor variables from the variable source. Defaults to 1000. max_executor_steps (int, optional): maximum number of steps and executor can in an episode. Defaults to None. checkpoint (bool, optional): whether to checkpoint models. Defaults to False. checkpoint_subpath (str, optional): subdirectory specifying where to store checkpoints. Defaults to "~/mava/". checkpoint_minute_interval (int): The number of minutes to wait between checkpoints. logger_config (Dict, optional): additional configuration settings for the logger factory. Defaults to {}. train_loop_fn (Callable, optional): function to instantiate a train loop. Defaults to ParallelEnvironmentLoop. eval_loop_fn (Callable, optional): function to instantiate an evaluation loop. Defaults to ParallelEnvironmentLoop. train_loop_fn_kwargs (Dict, optional): possible keyword arguments to send to the training loop. Defaults to {}. eval_loop_fn_kwargs (Dict, optional): possible keyword arguments to send to the evaluation loop. Defaults to {}. learning_rate_scheduler_fn: function/class that takes in a trainer step t and returns the current learning rate. seed: seed for reproducible sampling (for epsilon greedy action selection). evaluator_interval: An optional condition that is used to evaluate/test system performance after [evaluator_interval] condition has been met. If None, evaluation will happen at every timestep. E.g. to evaluate a system after every 100 executor episodes, evaluator_interval = {"executor_episodes": 100}. """<line_sep>self._mixer=mixer<line_sep># set default logger if no logger provided <if_stmt><not>logger_factory<block_start>logger_factory=functools.partial(logger_utils.make_logger directory="~/mava" to_terminal=<true> time_delta=10 )<block_end>super(VDN self).__init__(architecture=architecture environment_factory=environment_factory network_factory=network_factory logger_factory=logger_factory environment_spec=environment_spec shared_weights=shared_weights agent_net_keys=agent_net_keys num_executors=num_executors num_caches=num_caches max_executor_steps=max_executor_steps checkpoint_subpath=checkpoint_subpath checkpoint=checkpoint checkpoint_minute_interval=checkpoint_minute_interval train_loop_fn=train_loop_fn train_loop_fn_kwargs=train_loop_fn_kwargs eval_loop_fn=eval_loop_fn eval_loop_fn_kwargs=eval_loop_fn_kwargs logger_config=logger_config exploration_scheduler_fn=exploration_scheduler_fn learning_rate_scheduler_fn=learning_rate_scheduler_fn seed=seed evaluator_interval=evaluator_interval )<if_stmt>issubclass(executor_fn executors.RecurrentExecutor)<block_start>extra_specs=self._get_extra_specs()<block_end><else_stmt><block_start>extra_specs={}<block_end>self._builder=builder.VDNBuilder(builder.VDNConfig(environment_spec=self._environment_spec agent_net_keys=self._agent_net_keys discount=discount batch_size=batch_size prefetch_size=prefetch_size target_update_period=target_update_period executor_variable_update_period=executor_variable_update_period min_replay_size=min_replay_size max_replay_size=max_replay_size samples_per_insert=samples_per_insert n_step=n_step sequence_length=sequence_length importance_sampling_exponent=importance_sampling_exponent max_priority_weight=max_priority_weight period=period max_gradient_norm=max_gradient_norm checkpoint=checkpoint optimizer=optimizer checkpoint_subpath=checkpoint_subpath checkpoint_minute_interval=checkpoint_minute_interval evaluator_interval=evaluator_interval learning_rate_scheduler_fn=learning_rate_scheduler_fn ) trainer_fn=trainer_fn executor_fn=executor_fn extra_specs=extra_specs )<block_end><def_stmt>trainer self replay:reverb.Client counter:counting.Counter <arrow>mava.core.Trainer<block_start>"""System trainer Args: replay (reverb.Client): replay data table to pull data from. counter (counting.Counter): step counter object. Returns: mava.core.Trainer: system trainer. """<line_sep># Create the networks to optimize (online) networks=self._network_factory(# type: ignore environment_spec=self._environment_spec agent_net_keys=self._agent_net_keys )<line_sep># Create system architecture architecture=self._architecture(environment_spec=self._environment_spec value_networks=networks["q_networks"] agent_net_keys=self._agent_net_keys )<line_sep># Augment network architecture by adding mixing layer network. system_networks=self._mixer(architecture=architecture ).create_system()<line_sep># create logger trainer_logger_config={}<if_stmt>self._logger_config<and>"trainer"<in>self._logger_config<block_start>trainer_logger_config=self._logger_config["trainer"]<block_end>trainer_logger=self._logger_factory(# type: ignore "trainer" **trainer_logger_config)<line_sep>dataset=self._builder.make_dataset_iterator(replay)<line_sep>counter=counting.Counter(counter "trainer")<line_sep><return>self._builder.make_trainer(networks=system_networks dataset=dataset counter=counter logger=trainer_logger )<block_end><def_stmt>build self name:str="vdn"<arrow>Any<block_start>"""Build the distributed system as a graph program. Args: name (str, optional): system name. Defaults to "vdn". Returns: Any: graph program for distributed system training. """<line_sep><return>super().build(name=name)<block_end><block_end>
# test exception matching against a tuple <try_stmt><block_start>fail<block_end><except_stmt>(Exception )<block_start>print('except 1')<block_end><try_stmt><block_start>fail<block_end><except_stmt>(Exception Exception)<block_start>print('except 2')<block_end><try_stmt><block_start>fail<block_end><except_stmt>(TypeError NameError)<block_start>print('except 3')<block_end><try_stmt><block_start>fail<block_end><except_stmt>(TypeError ValueError Exception)<block_start>print('except 4')<block_end>
# Copyright (C) 2018-2021 Intel Corporation # SPDX-License-Identifier: Apache-2.0 <def_stmt>tf_pack_ext pb<block_start><assert_stmt>(pb.attr["N"].i<eq>len(pb.input))<line_sep><return>{'axis':pb.attr["axis"].i 'N':pb.attr["N"].i 'infer':<none>}<block_end>
# Copyright 2019 kubeflow.org. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. <import_stmt>os<line_sep># PyTorchJob K8S constants PYTORCHJOB_GROUP='kubeflow.org'<line_sep>PYTORCHJOB_KIND='PyTorchJob'<line_sep>PYTORCHJOB_PLURAL='pytorchjobs'<line_sep>PYTORCHJOB_VERSION=os.environ.get('PYTORCHJOB_VERSION' 'v1')<line_sep>PYTORCH_LOGLEVEL=os.environ.get('PYTORCHJOB_LOGLEVEL' 'INFO').upper()<line_sep># How long to wait in seconds for requests to the ApiServer APISERVER_TIMEOUT=120<line_sep>#PyTorchJob Labels Name PYTORCHJOB_CONTROLLER_LABEL='controller-name'<line_sep>PYTORCHJOB_GROUP_LABEL='group-name'<line_sep>PYTORCHJOB_NAME_LABEL='pytorch-job-name'<line_sep>PYTORCHJOB_TYPE_LABEL='pytorch-replica-type'<line_sep>PYTORCHJOB_INDEX_LABEL='pytorch-replica-index'<line_sep>PYTORCHJOB_ROLE_LABEL='job-role'<line_sep>
<import_from_stmt>vue VueComponent<class_stmt>Modal(VueComponent)<block_start>template="#modal-template"<block_end>Modal.register()<class_stmt>App(VueComponent)<block_start>template="#main"<line_sep>show_modal=<false><block_end>App("#app")<line_sep>
# coding: utf-8 <import_stmt>numpy<as>np<import_stmt>chainer<import_stmt>chainer.functions<as>F<import_stmt>chainer.links<as>L<import_from_stmt>chainer_compiler.elichika testtools<class_stmt>Simple(chainer.Chain)<block_start><def_stmt>__init__ self func<block_start>super().__init__()<line_sep>self.func=func<block_end><def_stmt>forward self v<block_start><return>self.func(v)<block_end><block_end><class_stmt>Axis(chainer.Chain)<block_start><def_stmt>__init__ self func<block_start>super().__init__()<line_sep>self.func=func<block_end><def_stmt>forward self v<block_start><return>self.func(v axis=1)<block_end><block_end><class_stmt>KeepDims(chainer.Chain)<block_start><def_stmt>__init__ self func<block_start>super().__init__()<line_sep>self.func=func<block_end><def_stmt>forward self v<block_start><return>self.func(v keepdims=<true>)<block_end><block_end><class_stmt>AxisKeepDims(chainer.Chain)<block_start><def_stmt>__init__ self func<block_start>super().__init__()<line_sep>self.func=func<block_end><def_stmt>forward self v<block_start><return>self.func(v axis=1 keepdims=<true>)<block_end><block_end># ====================================== <def_stmt>main <block_start>np.random.seed(314)<line_sep>a1=np.random.rand(6 2 3).astype(np.float32)<def_stmt>test func name<block_start>testtools.generate_testcase(Simple(func) [a1] subname=name+'_simple')<line_sep>testtools.generate_testcase(Axis(func) [a1] subname=name+'_axis')<line_sep>testtools.generate_testcase(KeepDims(func) [a1] subname=name+'_keepdims')<line_sep>testtools.generate_testcase(AxisKeepDims(func) [a1] subname=name+'_axiskeepdims')<block_end>test(F.min 'min')<line_sep>test(F.max 'max')<block_end><if_stmt>__name__<eq>'__main__'<block_start>main()<block_end>
<import_stmt>os<import_from_stmt>flask Flask redirect url_for session render_template flash<import_from_stmt>flask_script Manager<import_from_stmt>flask_bootstrap Bootstrap<import_from_stmt>flask_moment Moment<import_from_stmt>flask_sqlalchemy SQLAlchemy<import_from_stmt>flask_login LoginManager<import_from_stmt>config config<import_from_stmt>werkzeug.routing BaseConverter<class_stmt>RegexConverter(BaseConverter)<block_start><def_stmt>__init__ self url_map *items<block_start>super(RegexConverter self).__init__(url_map)<line_sep>self.regex=items[0]<block_end><block_end>manager=Manager()<line_sep>bootstrap=Bootstrap()<line_sep>moment=Moment()<line_sep>db=SQLAlchemy()<line_sep>login_manager=LoginManager()<line_sep>login_manager.session_protection='strong'<line_sep>login_manager.login_view="auth.login"<def_stmt>create_app config_name<block_start>app=Flask(__name__)<line_sep>app.config.from_object(config[config_name])<line_sep>config[config_name].init_app(app)<line_sep>app.url_map.converters['regex']=RegexConverter<line_sep>bootstrap.init_app(app)<line_sep>moment.init_app(app)<line_sep>db.init_app(app)<line_sep>login_manager.init_app(app)<import_from_stmt>.main main<as>main_blueprint<line_sep>app.register_blueprint(main_blueprint)<import_from_stmt>.auth auth<as>auth_blueprint<line_sep>app.register_blueprint(auth_blueprint url_prefix='/auth')<line_sep><return>app<block_end>
<import_stmt>typing<import_from_stmt>.context mango<import_from_stmt>.fakes fake_context fake_wallet<import_from_stmt>decimal Decimal<def_stmt>test_trade_executor_constructor <arrow><none><block_start>succeeded=<false><try_stmt><block_start>mango.TradeExecutor()# type: ignore[abstract] <block_end><except_stmt>TypeError# Can't instantiate the abstract base class. <block_start>succeeded=<true><block_end><assert_stmt>succeeded<block_end><def_stmt>test_null_trade_executor_constructor <arrow><none><block_start><def_stmt>reporter x:typing.Any<arrow><none><block_start><return><none><block_end>actual=mango.NullTradeExecutor(reporter)<assert_stmt>actual<is><not><none><assert_stmt>actual.reporter<eq>reporter<block_end><def_stmt>test_serum_trade_executor_constructor <arrow><none><block_start>context:mango.Context=fake_context()<line_sep>wallet:mango.Wallet=fake_wallet()<line_sep>price_adjustment_factor:Decimal=Decimal(0.05)<def_stmt>reporter x:typing.Any<arrow><none><block_start><return><none><block_end>actual=mango.ImmediateTradeExecutor(context wallet <none> price_adjustment_factor reporter)<assert_stmt>actual<is><not><none><assert_stmt>actual.context<eq>context<assert_stmt>actual.wallet<eq>wallet<assert_stmt>actual.price_adjustment_factor<eq>price_adjustment_factor<assert_stmt>actual.reporter<is><not><none><block_end>
<import_stmt>librosa.display<import_from_stmt>voicefixer.tools.pytorch_util *<import_from_stmt>voicefixer.tools.wav *<import_from_stmt>voicefixer.restorer.model VoiceFixer<as>voicefixer_fe<import_stmt>os<line_sep>EPS=1e-8<class_stmt>VoiceFixer(nn.Module)<block_start><def_stmt>__init__ self<block_start>super(VoiceFixer self).__init__()<line_sep>self._model=voicefixer_fe(channels=2 sample_rate=44100)<line_sep># print(os.path.join(os.path.expanduser('~'), ".cache/voicefixer/analysis_module/checkpoints/epoch=15_trimed_bn.ckpt")) self._model.load_state_dict(torch.load(os.path.join(os.path.expanduser("~") ".cache/voicefixer/analysis_module/checkpoints/vf.ckpt" )))<line_sep>self._model.eval()<block_end><def_stmt>_load_wav_energy self path sample_rate threshold=0.95<block_start>wav_10k,_=librosa.load(path sr=sample_rate)<line_sep>stft=np.log10(np.abs(librosa.stft(wav_10k))+1.0)<line_sep>fbins=stft.shape[0]<line_sep>e_stft=np.sum(stft axis=1)<for_stmt>i range(e_stft.shape[0])<block_start>e_stft[-i-1]=np.sum(e_stft[:-i-1])<block_end>total=e_stft[-1]<for_stmt>i range(e_stft.shape[0])<block_start><if_stmt>e_stft[i]<l>total<times>threshold<block_start><continue><block_end><else_stmt><block_start><break><block_end><block_end><return>wav_10k int((sample_rate<floordiv>2)<times>(i/fbins))<block_end><def_stmt>_load_wav self path sample_rate threshold=0.95<block_start>wav_10k,_=librosa.load(path sr=sample_rate)<line_sep><return>wav_10k<block_end><def_stmt>_amp_to_original_f self mel_sp_est mel_sp_target cutoff=0.2<block_start>freq_dim=mel_sp_target.size()[-1]<line_sep>mel_sp_est_low,mel_sp_target_low=(mel_sp_est[<ellipsis> 5:int(freq_dim<times>cutoff)] mel_sp_target[<ellipsis> 5:int(freq_dim<times>cutoff)] )<line_sep>energy_est,energy_target=torch.mean(mel_sp_est_low dim=(2 3)) torch.mean(mel_sp_target_low dim=(2 3))<line_sep>amp_ratio=energy_target/energy_est<line_sep><return>mel_sp_est<times>amp_ratio[<ellipsis> <none> <none>] mel_sp_target<block_end><def_stmt>_trim_center self est ref<block_start>diff=np.abs(est.shape[-1]-ref.shape[-1])<if_stmt>est.shape[-1]<eq>ref.shape[-1]<block_start><return>est ref<block_end><elif_stmt>est.shape[-1]<g>ref.shape[-1]<block_start>min_len=min(est.shape[-1] ref.shape[-1])<line_sep>est,ref=est[<ellipsis> int(diff<floordiv>2):-int(diff<floordiv>2)] ref<line_sep>est,ref=est[<ellipsis> :min_len] ref[<ellipsis> :min_len]<line_sep><return>est ref<block_end><else_stmt><block_start>min_len=min(est.shape[-1] ref.shape[-1])<line_sep>est,ref=est ref[<ellipsis> int(diff<floordiv>2):-int(diff<floordiv>2)]<line_sep>est,ref=est[<ellipsis> :min_len] ref[<ellipsis> :min_len]<line_sep><return>est ref<block_end><block_end><def_stmt>_pre self model input cuda<block_start>input=input[<none> <none> <ellipsis>]<line_sep>input=torch.tensor(input)<line_sep>input=try_tensor_cuda(input cuda=cuda)<line_sep>sp,_,_=model.f_helper.wav_to_spectrogram_phase(input)<line_sep>mel_orig=model.mel(sp.permute(0 1 3 2)).permute(0 1 3 2)<line_sep># return models.to_log(sp), models.to_log(mel_orig) <return>sp mel_orig<block_end><def_stmt>remove_higher_frequency self wav ratio=0.95<block_start>stft=librosa.stft(wav)<line_sep>real,img=np.real(stft) np.imag(stft)<line_sep>mag=(real<power>2+img<power>2)<power>0.5<line_sep>cos,sin=real/(mag+EPS) img/(mag+EPS)<line_sep>spec=np.abs(stft)# [1025,T] feature=spec.copy()<line_sep>feature=np.log10(feature+EPS)<line_sep>feature[feature<l>0]=0<line_sep>energy_level=np.sum(feature axis=1)<line_sep>threshold=np.sum(energy_level)<times>ratio<line_sep>curent_level,i=energy_level[0] 0<while_stmt>i<l>energy_level.shape[0]<and>curent_level<l>threshold<block_start>curent_level<augadd>energy_level[i+1 <ellipsis>]<line_sep>i<augadd>1<block_end>spec[i: <ellipsis>]=np.zeros_like(spec[i: <ellipsis>])<line_sep>stft=spec<times>cos+1j<times>spec<times>sin<line_sep><return>librosa.istft(stft)<block_end>@torch.no_grad()<def_stmt>restore_inmem self wav_10k cuda=<false> mode=0 your_vocoder_func=<none><block_start>check_cuda_availability(cuda=cuda)<line_sep>self._model=try_tensor_cuda(self._model cuda=cuda)<if_stmt>mode<eq>0<block_start>self._model.eval()<block_end><elif_stmt>mode<eq>1<block_start>self._model.eval()<block_end><elif_stmt>mode<eq>2<block_start>self._model.train()# More effective on seriously demaged speech <block_end>res=[]<line_sep>seg_length=44100<times>30<line_sep>break_point=seg_length<while_stmt>break_point<l>wav_10k.shape[0]+seg_length<block_start>segment=wav_10k[break_point-seg_length:break_point]<if_stmt>mode<eq>1<block_start>segment=self.remove_higher_frequency(segment)<block_end>sp,mel_noisy=self._pre(self._model segment cuda)<line_sep>out_model=self._model(sp mel_noisy)<line_sep>denoised_mel=from_log(out_model["mel"])<if_stmt>your_vocoder_func<is><none><block_start>out=self._model.vocoder(denoised_mel cuda=cuda)<block_end><else_stmt><block_start>out=your_vocoder_func(denoised_mel)<block_end># unify energy <if_stmt>torch.max(torch.abs(out))<g>1.0<block_start>out=out/torch.max(torch.abs(out))<line_sep>print("Warning: Exceed energy limit," input)<block_end># frame alignment out,_=self._trim_center(out segment)<line_sep>res.append(out)<line_sep>break_point<augadd>seg_length<block_end>out=torch.cat(res -1)<line_sep><return>tensor2numpy(out.squeeze(0))<block_end><def_stmt>restore self input output cuda=<false> mode=0 your_vocoder_func=<none><block_start>wav_10k=self._load_wav(input sample_rate=44100)<line_sep>out_np_wav=self.restore_inmem(wav_10k cuda=cuda mode=mode your_vocoder_func=your_vocoder_func)<line_sep>save_wave(out_np_wav fname=output sample_rate=44100)<block_end><block_end>
<import_from_stmt>marshmallow fields<import_from_stmt>CTFd.models Submissions ma<import_from_stmt>CTFd.schemas.challenges ChallengeSchema<import_from_stmt>CTFd.utils string_types<class_stmt>SubmissionSchema(ma.ModelSchema)<block_start>challenge=fields.Nested(ChallengeSchema only=["name" "category" "value"])<class_stmt>Meta<block_start>model=Submissions<line_sep>include_fk=<true><line_sep>dump_only=("id" )<block_end>views={"admin":["provided" "ip" "challenge_id" "challenge" "user" "team" "date" "type" "id" ] "user":["challenge_id" "challenge" "user" "team" "date" "type" "id"] }<def_stmt>__init__ self view=<none> *args **kwargs<block_start><if_stmt>view<block_start><if_stmt>isinstance(view string_types)<block_start>kwargs["only"]=self.views[view]<block_end><elif_stmt>isinstance(view list)<block_start>kwargs["only"]=view<block_end><block_end>super(SubmissionSchema self).__init__(*args **kwargs)<block_end><block_end>
<import_from_stmt>talon.voice Key Context<line_sep>ctx=Context("iterm" bundle="com.googlecode.iterm2")<line_sep>keymap={"broadcaster":Key("cmd-alt-i") "password":Key("cmd-alt-f") # Pane creation and navigation "split horizontal":Key("cmd-shift-d") "split vertical":Key("cmd-d") "pane next":Key("cmd-]") "pane last":Key("cmd-[") }<line_sep>ctx.keymap(keymap)<line_sep>
#-*- coding:utf-8 -*- # &Author AnFany # 适用于多维输出 <import_stmt>numpy<as>np<import_stmt>tensorflow<as>tf<line_sep>'''基于TensorFlow构建训练函数'''<line_sep># 创建激活函数 <def_stmt>activate input_layer weights biases actfunc<block_start>layer=tf.add(tf.matmul(input_layer weights) biases)<if_stmt>actfunc<eq>'relu'<block_start><return>tf.nn.relu(layer)<block_end><elif_stmt>actfunc<eq>'tanh'<block_start><return>tf.nn.tanh(layer)<block_end><elif_stmt>actfunc<eq>'sigmoid'<block_start><return>tf.nn.sigmoid(layer)<block_end><block_end># 权重初始化的方式和利用激活函数的关系很大 # sigmoid: xavir tanh: xavir relu: he # 构建训练函数 <def_stmt>Ten_train xdata ydata prexdata preydata hiddenlayers=3 hiddennodes=100 learn_rate=0.05 itertimes=100000 batch_size=200 activate_func='sigmoid' break_error=0.0043# 开始搭建神经网络 <block_start>Input_Dimen=len(xdata[0])<line_sep>Unit_Layers=[Input_Dimen]+[hiddennodes]<times>hiddenlayers+[len(ydata[0])]# 输入的维数,隐层的神经数,输出的维数1 # 创建占位符 x_data=tf.placeholder(shape=[<none> Input_Dimen] dtype=tf.float32 name='x_data')<line_sep>y_target=tf.placeholder(shape=[<none> len(ydata[0])] dtype=tf.float32)<line_sep># 实现动态命名变量 VAR_NAME=locals()<for_stmt>jj range(hiddenlayers+1)<block_start>VAR_NAME['weight%s'%jj]=tf.Variable(np.random.rand(Unit_Layers[jj] Unit_Layers[jj+1]) dtype=tf.float32 name='weight%s'%jj)/np.sqrt(Unit_Layers[jj])<line_sep># sigmoid tanh # VAR_NAME['weight%s'%jj] = tf.Variable(np.random.rand(Unit_Layers[jj], Unit_Layers[jj + 1]), dtype=tf.float32,name='weight%s' % jj) \/ np.sqrt(Unit_Layers[jj] / 2) # relu VAR_NAME['bias%s'%jj]=tf.Variable(tf.random_normal([Unit_Layers[jj+1]] stddev=10 name='bias%s'%jj) dtype=tf.float32)<if_stmt>jj<eq>0<block_start>VAR_NAME['ooutda%s'%jj]=activate(x_data eval('weight%s'%jj) eval('bias%s'%jj) actfunc=activate_func)<block_end><else_stmt><block_start>VAR_NAME['ooutda%s'%jj]=activate(eval('ooutda%s'%(jj-1)) eval('weight%s'%jj) eval('bias%s'%jj) actfunc=activate_func)<block_end><block_end># 均方误差 loss=tf.reduce_mean(tf.reduce_sum(tf.square(y_target-eval('ooutda%s'%(hiddenlayers))) reduction_indices=[1]))<line_sep># 优化的方法 my_opt=tf.train.AdamOptimizer(learn_rate)<line_sep>train_step=my_opt.minimize(loss)<line_sep># 初始化 init=tf.global_variables_initializer()<line_sep># 存储误差的字典 accudict={}<line_sep>loss_vec=[]# 训练误差 loss_pre=[]# 验证数据误差 accunum=np.inf<with_stmt>tf.Session()<as>sess<block_start>saver=tf.train.Saver()<line_sep>sess.run(init)<for_stmt>i range(itertimes)<block_start>rand_index=np.random.choice(len(xdata) size=batch_size replace=<false>)<line_sep>rand_x=xdata[rand_index]<line_sep>rand_y=ydata[rand_index]<line_sep>sess.run(train_step feed_dict={x_data:rand_x y_target:rand_y})<line_sep>temp_loss=sess.run(loss feed_dict={x_data:xdata y_target:ydata})<line_sep>temmp_losspre=sess.run(loss feed_dict={x_data:prexdata y_target:preydata})<line_sep>loss_vec.append(temp_loss)<line_sep>loss_pre.append(temmp_losspre)<line_sep>accudict[i]=[temp_loss temmp_losspre]<line_sep># 根据输出的误差,判断训练的情况 <if_stmt>(i+1)%20<eq>0<block_start>print('Generation: '+str(i+1)+'. 归一训练误差:Loss = '+str(temp_loss)+'. 归一验证误差:Loss = '+str(temmp_losspre))<block_end># 提前退出的判断 <if_stmt>temp_loss<l>break_error# 根据经验获得此数值, 因为采用的是随机下降,因此误差在前期可能出现浮动 <block_start><break><block_end># 在所有的循环次数中,找到综合误差最低的一次,保存参数 zongheaccu=0.01<times>temp_loss+0.99<times>temmp_losspre<if_stmt>zongheaccu<l>accunum<block_start>accunum=zongheaccu<line_sep># 保存模型 saver.save(sess './pm25' global_step=i)<block_end><block_end># 注意路径 sign=min(accudict.items() key=<lambda>d:0.01<times>d[1][0]+0.99<times>d[1][1])[0]<line_sep># 返回训练,验证误差 xunlian_error,adderror=loss_vec[sign] loss_pre[sign]<line_sep><return>sign hiddenlayers xunlian_error adderror<block_end><block_end>
# Copyright 2013-2021 Lawrence Livermore National Security, LLC and other # Spack Project Developers. See the top-level COPYRIGHT file for details. # # SPDX-License-Identifier: (Apache-2.0 OR MIT) <import_from_stmt>spack *<class_stmt>NvptxTools(AutotoolsPackage)<block_start>"""nvptx-tools: A collection of tools for use with nvptx-none GCC toolchains. These tools are necessary when building a version of GCC that enables offloading of OpenMP/OpenACC code to NVIDIA GPUs."""<line_sep>homepage="https://github.com/MentorEmbedded/nvptx-tools"<line_sep>git="https://github.com/MentorEmbedded/nvptx-tools"<line_sep>version('2021-05-21' commit='<PASSWORD>')<line_sep>version('2018-03-01' commit='<PASSWORD>')<line_sep>depends_on('binutils')<line_sep>depends_on('cuda')<def_stmt>configure_args self<block_start>cuda_dir=self.spec['cuda'].prefix<line_sep>config_args=["--with-cuda-driver-include={0}".format(cuda_dir.include) "--with-cuda-driver-lib={0}".format(cuda_dir.lib64)]<line_sep><return>config_args<block_end><block_end>
# Copyright FuseSoC contributors # Licensed under the 2-Clause BSD License, see LICENSE for details. # SPDX-License-Identifier: BSD-2-Clause <import_stmt>logging<import_stmt>os.path<import_stmt>sys<import_stmt>tarfile<import_from_stmt>fusesoc.provider.provider Provider<line_sep>logger=logging.getLogger(__name__)<if_stmt>sys.version_info[0]<ge>3<block_start><import_stmt>urllib.request<as>urllib<import_from_stmt>urllib.error URLError<block_end><else_stmt><block_start><import_stmt>urllib<import_from_stmt>urllib2 URLError<block_end>URL="https://github.com/{user}/{repo}/archive/{version}.tar.gz"<class_stmt>Github(Provider)<block_start><def_stmt>_checkout self local_dir<block_start>user=self.config.get("user")<line_sep>repo=self.config.get("repo")<line_sep>version=self.config.get("version" "master")<line_sep># TODO : Sanitize URL url=URL.format(user=user repo=repo version=version)<line_sep>logger.info(f"Downloading {user}/{repo} from github")<try_stmt><block_start>(filename headers)=urllib.urlretrieve(url)<block_end><except_stmt>URLError<as>e<block_start><raise>RuntimeError(f"Failed to download '{url}'. '{e.reason}'")<block_end>t=tarfile.open(filename)<line_sep>(cache_root core)=os.path.split(local_dir)<line_sep># Ugly hack to get the first part of the directory name of the extracted files tmp=t.getnames()[0]<line_sep>t.extractall(cache_root)<line_sep>os.rename(os.path.join(cache_root tmp) os.path.join(cache_root core))<block_end><block_end>
<import_stmt>requests<import_from_stmt>.graph *<import_from_stmt>.template Template<class_stmt>BaseClient(object)<block_start><def_stmt>__init__ self base_url<block_start><if_stmt>base_url.startswith("http")<block_start>self.base_url=base_url<block_end><else_stmt><block_start>self.base_url="http://"+base_url<block_end><block_end><def_stmt>request self method path **kwargs<block_start>url=self.base_url+path<line_sep><return>requests.request(method=method url=url **kwargs)<block_end><def_stmt>get self path **kwargs<block_start>kwargs.setdefault('allow_redirects' <true>)<line_sep><return>self.request('get' path **kwargs)<block_end><def_stmt>post self path data=<none> **kwargs<block_start><return>self.request('post' path data=data **kwargs)<block_end><def_stmt>put self path data=<none> **kwargs<block_start><return>self.request('put' path data=data **kwargs)<block_end><block_end><class_stmt>Client(BaseClient)<block_start>job_path='/v1/job'<line_sep>template_path='/template'<def_stmt>get self graph_id<block_start>path='%s/%s'%(Client.job_path graph_id)<line_sep>response=super(Client self).get(path)<if_stmt>(response.ok)<block_start><return>graph(response.json())<block_end><else_stmt><block_start>response.raise_for_status()<block_end><block_end><def_stmt>list self<block_start>response=super(Client self).get(Client.job_path)<if_stmt>(response.ok)<block_start><return>response.json()<block_end><else_stmt><block_start>response.raise_for_status()<block_end><block_end><def_stmt>save self graph<block_start><return>self.post(Client.job_path data=graph.json() headers={'content-type':'application/json'})<block_end><def_stmt>update self graph<block_start>path='%s/%s'%(Client.job_path graph.id())<line_sep><return>self.put(path data=graph.json())<block_end><def_stmt>create_template self template<block_start>path='%s/%s'%(Client.template_path template.name)<line_sep><return>self.post(path data=template.json())<block_end><def_stmt>get_template self name<block_start>path='%s/%s'%(Client.template_path name)<line_sep>response=super(Client self).get(path)<if_stmt>(response.ok)<block_start><return>template(response.json())<block_end><else_stmt><block_start>response.raise_for_status()<block_end><block_end><def_stmt>list_templates self<block_start>response=super(Client self).get(Client.template_path)<if_stmt>(response.ok)<block_start><return>response.json()<block_end><else_stmt><block_start>response.raise_for_status()<block_end><block_end><block_end>
""" Format: [sent_bit] [w0] [w1] ... """<def_stmt>remove_ids fname trunc=50<block_start><with_stmt>open(fname 'r' errors='ignore')<as>fd<block_start>lines=fd.read().splitlines()<block_end>bag=[]<for_stmt>l lines<block_start>l=l.replace(" <sssss>" "")<line_sep>tokens=l.split("\t")<assert_stmt>len(tokens)<eq>7<line_sep>sent_bit=str(int(tokens[4])-1)<line_sep>words=tokens[6]<line_sep>txt=words.split(" ")<if_stmt>len(txt)<g>trunc<block_start>txt=txt[:trunc]<block_end>words=" ".join(txt)<line_sep>seq=sent_bit+" "+words<line_sep>bag.append(seq)<block_end><with_stmt>open(fname[5:] 'w')<as>fd<block_start>fd.write("\n".join(bag))<block_end><block_end><import_stmt>os<line_sep>os.chdir("../../data/yelp")<line_sep>remove_ids("yelp-test.txt")<line_sep>remove_ids("yelp-train.txt")<line_sep>remove_ids("yelp-valid.txt")<def_stmt>check_num_words fname<block_start><with_stmt>open(fname 'r')<as>fd<block_start>lines=fd.read().splitlines()<block_end>bag=[]<for_stmt>l lines<block_start>words=l.split(" ")[1:]<line_sep># words = words.split(" ") bag.append(len(words))<block_end>print("{} {}".format(fname sum(bag)/len(bag)))<block_end>check_num_words("train.txt")<line_sep>check_num_words("test.txt")<line_sep>check_num_words("valid.txt")<line_sep># from NVLL.util.util import Dictionary <def_stmt>count dic fname<block_start><with_stmt>open(fname 'r')<as>fd<block_start>lines=fd.read().splitlines()<line_sep>lines=" ".join(lines)<line_sep>words=lines.split(" ")<for_stmt>w words<block_start><if_stmt>w<in>dic<block_start>dic[w]<augadd>1<block_end><else_stmt><block_start>dic[w]=1<block_end><block_end><block_end><return>dic<block_end><def_stmt>reduce_vocab_sz vocab_sz=15000# pad eos unk <block_start>d={}<line_sep>d=count(d "train.txt")<line_sep>d=count(d "valid.txt")<line_sep>d=count(d "test.txt")<line_sep>s=[(k d[k])<for>k sorted(d key=d.get reverse=<true>)][:vocab_sz]<line_sep>rt=[]<for_stmt>k,v s<block_start>rt.append(k)<line_sep># print(k, v) <block_end><return>rt<block_end>word_list=reduce_vocab_sz()<def_stmt>replace wlist fname<block_start><with_stmt>open(fname 'r')<as>fd<block_start>lines=fd.read().splitlines()<line_sep>new_lines=[]<for_stmt>l lines<block_start>raw_words=l.split(" ")<line_sep>new_words=[]<for_stmt>w raw_words<block_start><if_stmt>w<in>wlist<block_start>new_words.append(w)<block_end><else_stmt><block_start>new_words.append("<unk>")<block_end><block_end>new_lines.append(" ".join(new_words))<block_end><block_end><with_stmt>open(fname 'w')<as>fd<block_start>fd.write("\n".join(new_lines))<block_end><block_end>replace(word_list "train.txt")<line_sep>replace(word_list "valid.txt")<line_sep>replace(word_list "test.txt")<line_sep>
<import_stmt>os<import_from_stmt>pathlib Path<import_from_stmt>bauh.api.constants CACHE_PATH CONFIG_PATH TEMP_DIR<import_from_stmt>bauh.commons resource<line_sep>ROOT_DIR=os.path.dirname(os.path.abspath(__file__))<line_sep>BUILD_DIR='{}/arch'.format(TEMP_DIR)<line_sep>ARCH_CACHE_PATH=CACHE_PATH+'/arch'<line_sep>CATEGORIES_FILE_PATH=ARCH_CACHE_PATH+'/categories.txt'<line_sep>URL_CATEGORIES_FILE='https://raw.githubusercontent.com/vinifmor/bauh-files/master/arch/categories.txt'<line_sep>URL_GPG_SERVERS='https://raw.githubusercontent.com/vinifmor/bauh-files/master/arch/gpgservers.txt'<line_sep>CONFIG_DIR='{}/.config/bauh/arch'.format(str(Path.home()))<line_sep>CUSTOM_MAKEPKG_FILE='{}/makepkg.conf'.format(CONFIG_DIR)<line_sep>AUR_INDEX_FILE='{}/aur/index.txt'.format(ARCH_CACHE_PATH)<line_sep>AUR_INDEX_TS_FILE='{}/aur/index.ts'.format(ARCH_CACHE_PATH)<line_sep>CONFIG_FILE='{}/arch.yml'.format(CONFIG_PATH)<line_sep>SUGGESTIONS_FILE='https://raw.githubusercontent.com/vinifmor/bauh-files/master/arch/aur_suggestions.txt'<line_sep>UPDATES_IGNORED_FILE='{}/updates_ignored.txt'.format(CONFIG_DIR)<line_sep>EDITABLE_PKGBUILDS_FILE='{}/aur/editable_pkgbuilds.txt'.format(CONFIG_DIR)<line_sep>IGNORED_REBUILD_CHECK_FILE='{}/aur/ignored_rebuild_check.txt'.format(CONFIG_DIR)<def_stmt>get_icon_path <arrow>str<block_start><return>resource.get_path('img/arch.svg' ROOT_DIR)<block_end><def_stmt>get_repo_icon_path <arrow>str<block_start><return>resource.get_path('img/repo.svg' ROOT_DIR)<block_end>
<import_stmt>unittest<import_from_stmt>reppy.robots Agent Robots<class_stmt>AgentTest(unittest.TestCase)<block_start>'''Tests about the Agent.'''<def_stmt>parse self content name<block_start>'''Parse the robots.txt in content and return the agent of the provided name.'''<line_sep><return>Robots.parse('http://example.com' content).agent(name)<block_end><def_stmt>test_length self<block_start>'''An agent knows how many directives it has.'''<line_sep>agent=Agent().disallow('/path').allow('/path/')<line_sep>self.assertEqual(len(agent) 2)<block_end><def_stmt>test_make_allowed self<block_start>'''Make an agent that allows a path.'''<line_sep>agent=Agent().disallow('/path').allow('/path/')<line_sep>self.assertTrue(agent.allowed('/path/'))<line_sep>self.assertFalse(agent.allowed('/path'))<block_end><def_stmt>test_make_disallowed self<block_start>'''Make an agent that disallows a path.'''<line_sep>agent=Agent().disallow('/path')<line_sep>self.assertFalse(agent.allowed('/path'))<block_end><def_stmt>test_checks_allowed self<block_start>'''Answers the allowed question.'''<line_sep>agent=self.parse(''' User-agent: agent Allow: /path ''' 'agent')<line_sep>self.assertTrue(agent.allowed('/path'))<line_sep>self.assertTrue(agent.allowed('/elsewhere'))<block_end><def_stmt>test_honors_longest_first_priority self<block_start>'''The longest matching rule takes priority.'''<line_sep>agent=self.parse(''' User-agent: agent Disallow: /path Allow: /path/exception ''' 'agent')<line_sep>self.assertTrue(agent.allowed('/path/exception'))<line_sep>self.assertFalse(agent.allowed('/path'))<block_end><def_stmt>test_robots_txt_allowed self<block_start>'''Robots.txt is always allowed.'''<line_sep>agent=self.parse(''' User-agent: agent Disallow: /robots.txt ''' 'agent')<line_sep>self.assertTrue(agent.allowed('/robots.txt'))<block_end><def_stmt>test_disallow_none self<block_start>'''Recognizes the "Disallow:" form of "Allow: /"'''<line_sep>agent=self.parse(''' User-agent: agent Disallow: ''' 'agent')<line_sep>self.assertTrue(agent.allowed('/anything'))<block_end><def_stmt>test_escaped_rule self<block_start>'''Handles an escaped rule.'''<line_sep>agent=self.parse(''' User-agent: agent Disallow: /a%3cd.html ''' 'agent')<line_sep>self.assertFalse(agent.allowed('/a<d.html'))<line_sep>self.assertFalse(agent.allowed('/a%3cd.html'))<block_end><def_stmt>test_unescaped_rule self<block_start>'''Handles an unescaped rule.'''<line_sep>agent=self.parse(''' User-agent: agent Disallow: /a<d.html ''' 'agent')<line_sep>self.assertFalse(agent.allowed('/a<d.html'))<line_sep>self.assertFalse(agent.allowed('/a%3cd.html'))<block_end><def_stmt>test_escaped_rule_wildcard self<block_start>'''Handles the case where the wildcard rule is escaped.'''<line_sep>agent=self.parse(''' User-agent: agent Disallow: /a%3c* ''' 'agent')<line_sep>self.assertFalse(agent.allowed('/a<d.html'))<line_sep>self.assertFalse(agent.allowed('/a%3cd.html'))<block_end><def_stmt>test_unescaped_rule_wildcard self<block_start>'''Handles the case where the wildcard rule is unescaped.'''<line_sep>agent=self.parse(''' User-agent: agent Disallow: /a<* ''' 'agent')<line_sep>self.assertFalse(agent.allowed('/a<d.html'))<line_sep>self.assertFalse(agent.allowed('/a%3cd.html'))<block_end><def_stmt>test_accepts_full_url self<block_start>'''Accepts a full URL.'''<line_sep>agent=self.parse(''' User-agent: agent Disallow: /path;params?query ''' 'agent')<line_sep>self.assertFalse(agent.allowed('http://exmaple.com/path;params?query'))<block_end><def_stmt>test_query_only self<block_start>'''Recognized query-only rules.'''<line_sep>agent=self.parse(''' User-agent: agent Disallow: /? ''' 'agent')<line_sep>self.assertFalse(agent.allowed('/?'))<line_sep>self.assertTrue(agent.allowed('/'))<block_end><def_stmt>test_params_only self<block_start>'''Recognized params-only rules.'''<line_sep>agent=self.parse(''' User-agent: agent Disallow: /; ''' 'agent')<line_sep>self.assertFalse(agent.allowed('/;'))<line_sep>self.assertTrue(agent.allowed('/'))<block_end><def_stmt>test_str self<block_start>'''str() shows directives.'''<line_sep>agent=self.parse(''' User-agent: agent Disallow: / ''' 'agent')<line_sep>self.assertEquals(str(agent) '[Directive(Disallow: /)]')<block_end><def_stmt>test_str_crawl_delay self<block_start>'''str() shows crawl-delay.'''<line_sep>agent=self.parse(''' User-agent: agent Crawl-Delay: 1 Disallow: / ''' 'agent')<line_sep>self.assertEquals(str(agent) 'Crawl-Delay: 1 [Directive(Disallow: /)]')<block_end><block_end>
# Copyright (c) OpenMMLab. All rights reserved. <import_from_stmt>typing Any Sequence<import_from_stmt>mmcv.runner HOOKS IterBasedRunner LrUpdaterHook<line_sep>@HOOKS.register_module()<class_stmt>MultiStageLrUpdaterHook(LrUpdaterHook)<block_start>"""Multi-Stage Learning Rate Hook. Args: milestone_lrs (Sequence[float]): The base LR for multi-stages. milestone_iters (Sequence[int]): The first iterations in different stages. steps (Sequence[Sequence[int]]): The steps to decay the LR in stages. gammas (Sequence[float]): The list of decay LR ratios. kwargs (any): The arguments of LrUpdaterHook. """<def_stmt>__init__ self milestone_lrs:Sequence[float] milestone_iters:Sequence[int] steps:Sequence[Sequence[int]] gammas:Sequence[float] **kwargs:Any<arrow><none><block_start><assert_stmt>len(milestone_lrs)<eq>len(milestone_iters)<eq>len(steps)<eq>len(gammas) ('For MultiStageLr, lengths of milestones_lr and steps and gammas' f'must be equal, but got {len(milestone_lrs)}, ' f'{len(milestone_iters)}, {len(steps)}, and {len(gammas)}')<for_stmt>i range(len(milestone_iters))<block_start><assert_stmt>milestone_iters[i]<l>steps[i][0] ('miltestone step must be, '<concat>'less than step')<block_end>self.milestone_lrs=milestone_lrs<line_sep>self.steps=steps<line_sep>self.gammas=gammas<line_sep>self.milestone_iters=milestone_iters<line_sep>super().__init__(**kwargs)<block_end><def_stmt>get_lr self runner:IterBasedRunner base_lr:float<arrow>float<block_start>"""Get current LR. Args: runner (IterBasedRunner): The runner to control the training workflow. base_lr (float): The base LR in training workflow. Returns: float: The current LR. """<line_sep>progress=runner.epoch<if>self.by_epoch<else>runner.iter<if_stmt>progress<l>self.milestone_iters[0]<block_start><return>base_lr<block_end>milestone=-1<for_stmt>i,milestone_iter enumerate(self.milestone_iters[1:])<block_start><if_stmt>progress<l>milestone_iter<block_start>milestone=i<line_sep><break><block_end><block_end>exp=len(self.steps[milestone])<for_stmt>ii,s enumerate(self.steps[milestone])<block_start><if_stmt>progress<l>s<block_start>exp=ii<line_sep><break><block_end><block_end>lr=self.milestone_lrs[milestone]<times>(self.gammas[milestone]<power>exp)<line_sep><return>lr<block_end><block_end>
<import_from_stmt>boxdraw *<import_from_stmt>pprint pprint<line_sep># --------- Test utilities -------- <def_stmt>assert_cmd cmd cur1 cur2 lines *args<block_start><assert_stmt>len(lines)%2<eq>0<line_sep>input_lines=[lines[i]<for>i range(0 len(lines) 2)]<line_sep>expected=[lines[i]<for>i range(1 len(lines) 2)]<line_sep># Determine coordinates from '1' / '2' strings y1=[i<for>i range(len(input_lines))<if>'1'<in>input_lines[i]][0]<line_sep>y2=[i<for>i range(len(input_lines))<if>'2'<in>input_lines[i]][0]<line_sep>x1=[l.index('1')<for>l input_lines<if>'1'<in>l][0]<line_sep>x2=[l.index('2')<for>l input_lines<if>'2'<in>l][0]<line_sep>input_lines=[l.replace('1' cur1).replace('2' cur2)<for>l input_lines]<if_stmt>callable(cmd)<block_start>actual=list(cmd(input_lines y1 x1 y2 x2 *args))<block_end><else_stmt><block_start>actual=list(run_command(cmd input_lines y1 x1 y2 x2 *args))<block_end><if_stmt>expected<ne>actual<block_start>print("Expected:")<line_sep>pprint(expected width=1)<line_sep>print("Actual:")<line_sep>pprint(actual width=1)<block_end><assert_stmt>expected<eq>actual<block_end># -------- Utility functions -------- <def_stmt>test_expand_line <block_start><assert_stmt>expand_line('' 0)<eq>''<assert_stmt>expand_line('\n' 0)<eq>'\n'<assert_stmt>expand_line('xx' 1)<eq>'xx'<assert_stmt>expand_line('xx\n' 1)<eq>'xx\n'<assert_stmt>expand_line('xxx\n' 6)<eq>'xxx \n'<assert_stmt>expand_line('xxx' 6)<eq>'xxx '<block_end><def_stmt>test_replace_at <block_start><assert_stmt>replace_at('----' 0 'xx')<eq>'xx--'<assert_stmt>replace_at('----' 1 'xx')<eq>'-xx-'<assert_stmt>replace_at('----' 3 'xx')<eq>'---xx'<assert_stmt>replace_at('----' 3 'xxx')<eq>'---xxx'<assert_stmt>replace_at('----' 3 'xx\n')<eq>'---xx\n'<block_end><def_stmt>test_overwrite_at <block_start><assert_stmt>overwrite_at('----' 0 'x x ')<eq>'x-x-'<assert_stmt>overwrite_at('----' 1 'x x ')<eq>'-x-x'<assert_stmt>overwrite_at('----' 2 'x x ')<eq>'--x-x'<assert_stmt>overwrite_at('----' 3 'x x ')<eq>'---x x'<assert_stmt>overwrite_at('----\n' 3 'x x ')<eq>'---x x\n'<assert_stmt>overwrite_at('---' 0 ' | ')<eq>'-+-'<block_end><def_stmt>test_replace_block <block_start>lines=['foo' 'bar' 'b' ]<line_sep>block=['1234' '5678' ]<assert_stmt>list(replace_block(lines 1 2 block))<eq>['foo' 'ba1234' 'b 5678' ]<block_end><def_stmt>test_line <block_start><assert_stmt>line('<->' 0)<eq>''<assert_stmt>line('<->' 1)<eq>'<'<assert_stmt>line('<->' 2)<eq>'<-'<assert_stmt>line('<->' 3)<eq>'<->'<assert_stmt>line('<->' 4)<eq>'<-->'<assert_stmt>line('<->' 5)<eq>'<--->'<assert_stmt>line([['+---+'] ['| |'] ['+---+']] 4)<eq>['+---+' '| |' '| |' '+---+' ]<block_end># -------- Box drawing -------- <def_stmt>test_basic_box_drawing <block_start>assert_cmd('+o' '.' '.' ['........' '........' '..1.....' '..+---+.' '........' '..| |.' '......2.' '..+---+.' '........' '........' ])<block_end><def_stmt>test_box_drawing_after_line_end <block_start>assert_cmd('+o' '.' '.' ['........' '........' '..1.' '..+---+' '' ' | |' '......2' '..+---+' ])<block_end><def_stmt>test_fill_box_alignments <block_start>assert_cmd('+{[c' ' ' ' ' ['+------------+' '+------------+' '|1...........|' '|This is a |' '|....FOO.....|' '|test. |' '|............|' '| |' '|...........2|' '| |' '+------------+' '+------------+' ] 'This is a test.')<line_sep>assert_cmd('+{c' ' ' ' ' ['+------------+' '+------------+' '|1...........|' '| This is a |' '|....FOO.....|' '| test. |' '|............|' '| |' '|...........2|' '| |' '+------------+' '+------------+' ] 'This is a test.')<line_sep>assert_cmd('+{]c' ' ' ' ' ['+------------+' '+------------+' '|1...........|' '| This is a|' '|....FOO.....|' '| test.|' '|............|' '| |' '|...........2|' '| |' '+------------+' '+------------+' ] 'This is a test.')<line_sep>assert_cmd('+c' ' ' ' ' ['+------------+' '+------------+' '|1...........|' '| |' '|....FOO.....|' '| This is a |' '|............|' '| test. |' '|...........2|' '| |' '+------------+' '+------------+' ] 'This is a test.')<line_sep>assert_cmd('+}]c' ' ' ' ' ['+------------+' '+------------+' '|1...........|' '| |' '|....FOO.....|' '| |' '|............|' '| This is a|' '|...........2|' '| test.|' '+------------+' '+------------+' ] 'This is a test.')<block_end><def_stmt>test_fill_box_too_small <block_start>assert_cmd('+{[c' ' ' ' ' ['+-----+' '+-----+' '|1 |' '|not |' '| 2|' '|enoug|' '+-----+' '+-----+' ] 'not enough space')<line_sep>assert_cmd('+{[c' ' ' ' ' ['+-+' '+-+' '|1|' '|n|' '|.|' '|e|' '|2|' '|s|' '+-+' '+-+' ] 'not enough space')<block_end><def_stmt>test_draw_box_with_label <block_start>assert_cmd('+O' '.' '.' ['.........' '.........' '.1.......' '.+-----+.' '.........' '.| foo |.' '.........' '.| bar |.' '.......2.' '.+-----+.' '.........' '.........' ] 'foo bar')<block_end># -------- Line drawing -------- <def_stmt>test_arrow_reverse <block_start><assert_stmt>arrow_reverse('---')<eq>'---'<assert_stmt>arrow_reverse('<->')<eq>'<->'<assert_stmt>arrow_reverse('-->')<eq>'<--'<assert_stmt>arrow_reverse('<--')<eq>'-->'<block_end><def_stmt>test_draw_ling_hv <block_start>assert_cmd(draw_line_hv ' ' ' ' [' 1 2 ' ' o----> ' ] 'o->')<line_sep>assert_cmd(draw_line_hv ' ' ' ' [' 2 1 ' ' <----o ' ] 'o->')<line_sep>assert_cmd(draw_line_hv ' ' ' ' [' ' ' ' '1' 'o' ' ' '|' ' ' '|' '2' 'v' ] 'o->')<line_sep>assert_cmd(draw_line_hv ' ' ' ' [' ' ' ' '2' '^' ' ' '|' ' ' '|' '1' 'o' ] 'o->')<line_sep>assert_cmd(draw_line_hv ' ' ' ' [' ' ' ' ' 1 ' ' o----+ ' ' ' ' | ' ' 2 ' ' v ' ' ' ' ' ] 'o->')<line_sep>assert_cmd(draw_line_hv ' ' ' ' [' ' ' ' ' 2 ' ' ^ ' ' ' ' | ' ' 1 ' ' +----o ' ' ' ' ' ] 'o->')<line_sep>assert_cmd(draw_line_hv ' ' ' ' [' ' ' ' ' 1 ' ' +----o ' ' ' ' | ' ' 2 ' ' v ' ' ' ' ' ] 'o->')<line_sep>assert_cmd(draw_line_hv ' ' ' ' [' ' ' ' ' 2 ' ' ^ ' ' ' ' | ' ' 1 ' ' o----+ ' ' ' ' ' ] 'o->')<block_end><def_stmt>test_draw_ling_vh <block_start>assert_cmd(draw_line_vh ' ' ' ' [' 1 2 ' ' o----> ' ] 'o->')<line_sep>assert_cmd(draw_line_vh ' ' ' ' [' 2 1 ' ' <----o ' ] 'o->')<line_sep>assert_cmd(draw_line_vh ' ' ' ' [' ' ' ' '1' 'o' ' ' '|' ' ' '|' '2' 'v' ] 'o->')<line_sep>assert_cmd(draw_line_vh ' ' ' ' [' ' ' ' '2' '^' ' ' '|' ' ' '|' '1' 'o' ] 'o->')<line_sep>assert_cmd(draw_line_vh ' ' ' ' [' ' ' ' ' 1 ' ' o ' ' ' ' | ' ' 2 ' ' +----> ' ' ' ' ' ] 'o->')<line_sep>assert_cmd(draw_line_vh ' ' ' ' [' ' ' ' ' 2 ' ' <----+ ' ' ' ' | ' ' 1 ' ' o ' ' ' ' ' ] 'o->')<line_sep>assert_cmd(draw_line_vh ' ' ' ' [' ' ' ' ' 1 ' ' o ' ' ' ' | ' ' 2 ' ' <----+ ' ' ' ' ' ] 'o->')<line_sep>assert_cmd(draw_line_vh ' ' ' ' [' ' ' ' ' 2 ' ' +----> ' ' ' ' | ' ' 1 ' ' o ' ' ' ' ' ] 'o->')<block_end><def_stmt>test_line_plus_connections <block_start>assert_cmd(draw_line_vh '-' ' ' [' |' ' |' ' 2|' ' +---->|' ' |' ' | |' '-1- ' '-+- ' ' ' ' ' ] '-->')<line_sep>assert_cmd(draw_line_vh '-' '|' [' |' ' |' ' 2' ' +----->' ' |' ' | |' '-1- ' '-+- ' ' ' ' ' ] '-->')<line_sep>assert_cmd(draw_line_vh '-' '|' [' |' ' |' ' 2' ' +-----+' ' |' ' | |' '-1- ' '-+- ' ' ' ' ' ] '---')<block_end># -------- Selection -------- <def_stmt>test_select_outer_box <block_start>sel=select_outer_box([' ' ' +-------+ ' ' | # | ' ' +-------+ ' ' ' ' ' ] 2 5 2 5)<assert_stmt>sel<eq>["1,3,3,11"]<block_end><def_stmt>test_select_inner_box <block_start>sel=select_inner_box([' ' ' +-------+ ' ' | # | ' ' +-------+ ' ' ' ' ' ] 2 5 2 5)<assert_stmt>sel<eq>["2,4,2,10"]<block_end>
<import_from_future_stmt> absolute_import division print_function<import_from_stmt>six.moves range<def_stmt>exercise <block_start><import_stmt>boost_adaptbx.boost.python<as>bp<line_sep>csr=bp.ext.string_representation<import_from_stmt>libtbx.str_utils py_string_representation<as>psr<for_stmt>sr [csr psr]<block_start><assert_stmt>sr("a" '"' "'")<eq>'"a"'<assert_stmt>sr("b" "'" '"')<eq>"'b'"<block_end><def_stmt>check s<block_start>c=csr(s '"' "'")<line_sep>p=psr(s '"' "'")<assert_stmt>c<eq>p<line_sep>r=eval(c)<assert_stmt>r<eq>s<block_end>iset=list(range(130))+list(range(250 256))<for_stmt>i iset<block_start>s=chr(i)<line_sep>check(s)<for_stmt>j iset<block_start>t=s+chr(j)<line_sep>check(t)<block_end><block_end><block_end><def_stmt>run args<block_start><assert_stmt>args<in>[[] ["--forever"]]<while_stmt><true><block_start>exercise()<if_stmt>(len(args)<eq>0)<block_start><break><block_end><block_end>print("OK")<block_end><if_stmt>(__name__<eq>"__main__")<block_start><import_stmt>sys<line_sep>run(args=sys.argv[1:])<block_end>
<import_from_stmt>debug_me backchannel<line_sep>backchannel.send("ok")<line_sep>
<import_stmt>logging<import_stmt>os<import_stmt>subprocess<import_stmt>tempfile<import_stmt>pytest<line_sep>__all__=['with_layout_comparison' 'compare_output']<import_from_stmt>pyhanko.pdf_utils.writer BasePdfFileWriter<line_sep>logger=logging.getLogger(__name__)<line_sep>SKIP_LAYOUT=<false><line_sep>SKIP_LAYOUT_REASON="pdftoppm or compare tool path not specified"<line_sep>pdftoppm_path=os.environ.get('PDFTOPPM_PATH' <none>)<line_sep>compare_path=os.environ.get('IM_COMPARE_PATH' <none>)<if_stmt><not>pdftoppm_path<or><not>compare_path<block_start>logger.warning(f"Skipping layout tests --- {SKIP_LAYOUT_REASON}")<line_sep>SKIP_LAYOUT=<true><block_end>with_layout_comparison=pytest.mark.skipif(SKIP_LAYOUT reason=SKIP_LAYOUT_REASON)<def_stmt>_render_pdf pdf_file out_file_prefix# render the first page of a PDF to PNG file using pdftoppm <block_start>result=subprocess.run([pdftoppm_path '-singlefile' '-png' pdf_file out_file_prefix])<if_stmt>result.returncode<ne>0<block_start><raise>RuntimeError(f"Failed to convert {pdf_file} to {out_file_prefix}.png using "<concat>f"pdftoppm (executable: {pdftoppm_path}).")<block_end><return>f"{out_file_prefix}.png"<block_end><def_stmt>compare_output writer:BasePdfFileWriter expected_output_path<block_start><with_stmt>tempfile.TemporaryDirectory()<as>working_dir<block_start>output_path=os.path.join(working_dir 'output.pdf')<with_stmt>open(output_path 'wb')<as>outf<block_start>writer.write(outf)<block_end>expected_png=_render_pdf(expected_output_path os.path.join(working_dir 'expected'))<line_sep>actual_png=_render_pdf(output_path os.path.join(working_dir 'actual'))<line_sep>result=subprocess.run(# use the Absolute Error metric, since it's a single number # and hence very easy to process [compare_path '-metric' 'ae' expected_png actual_png os.path.join(working_dir 'diff.png')] capture_output=<true>)<line_sep># TODO maintain a directory of failed test outputs? <if_stmt>result.stderr<ne>b'0'<block_start><raise>RuntimeError(f"Output compare test failed --- absolute error: "<concat>f"{result.stderr.decode('utf8')}")<block_end><block_end><block_end>
<import_stmt>argparse<import_stmt>fnmatch<import_stmt>os.path<import_stmt>re<import_stmt>sys<import_stmt>unicodedata<import_from_stmt>collections namedtuple<import_from_stmt>enum Enum<import_from_stmt>. shellpattern<import_from_stmt>.helpers clean_lines<import_from_stmt>.helpers.errors Error<def_stmt>parse_patternfile_line line roots ie_commands fallback<block_start>"""Parse a pattern-file line and act depending on which command it represents."""<line_sep>ie_command=parse_inclexcl_command(line fallback=fallback)<if_stmt>ie_command.cmd<is>IECommand.RootPath<block_start>roots.append(ie_command.val)<block_end><elif_stmt>ie_command.cmd<is>IECommand.PatternStyle<block_start>fallback=ie_command.val<block_end><else_stmt># it is some kind of include/exclude command <block_start>ie_commands.append(ie_command)<block_end><return>fallback<block_end><def_stmt>load_pattern_file fileobj roots ie_commands fallback=<none><block_start><if_stmt>fallback<is><none><block_start>fallback=ShellPattern# ShellPattern is defined later in this module <block_end><for_stmt>line clean_lines(fileobj)<block_start>fallback=parse_patternfile_line(line roots ie_commands fallback)<block_end><block_end><def_stmt>load_exclude_file fileobj patterns<block_start><for_stmt>patternstr clean_lines(fileobj)<block_start>patterns.append(parse_exclude_pattern(patternstr))<block_end><block_end><class_stmt>ArgparsePatternAction(argparse.Action)<block_start><def_stmt>__init__ self nargs=1 **kw<block_start>super().__init__(nargs=nargs **kw)<block_end><def_stmt>__call__ self parser args values option_string=<none><block_start>parse_patternfile_line(values[0] args.paths args.patterns ShellPattern)<block_end><block_end><class_stmt>ArgparsePatternFileAction(argparse.Action)<block_start><def_stmt>__init__ self nargs=1 **kw<block_start>super().__init__(nargs=nargs **kw)<block_end><def_stmt>__call__ self parser args values option_string=<none><block_start>"""Load and parse patterns from a file. Lines empty or starting with '#' after stripping whitespace on both line ends are ignored. """<line_sep>filename=values[0]<try_stmt><block_start><with_stmt>open(filename)<as>f<block_start>self.parse(f args)<block_end><block_end><except_stmt>FileNotFoundError<as>e<block_start><raise>Error(str(e))<block_end><block_end><def_stmt>parse self fobj args<block_start>load_pattern_file(fobj args.paths args.patterns)<block_end><block_end><class_stmt>ArgparseExcludeFileAction(ArgparsePatternFileAction)<block_start><def_stmt>parse self fobj args<block_start>load_exclude_file(fobj args.patterns)<block_end><block_end><class_stmt>PatternMatcher<block_start>"""Represents a collection of pattern objects to match paths against. *fallback* is a boolean value that *match()* returns if no matching patterns are found. """<def_stmt>__init__ self fallback=<none><block_start>self._items=[]<line_sep># Value to return from match function when none of the patterns match. self.fallback=fallback<line_sep># optimizations self._path_full_patterns={}# full path -> return value # indicates whether the last match() call ended on a pattern for which # we should recurse into any matching folder. Will be set to True or # False when calling match(). self.recurse_dir=<none><line_sep># whether to recurse into directories when no match is found # TODO: allow modification as a config option? self.recurse_dir_default=<true><line_sep>self.include_patterns=[]<line_sep># TODO: move this info to parse_inclexcl_command and store in PatternBase subclass? self.is_include_cmd={IECommand.Exclude:<false> IECommand.ExcludeNoRecurse:<false> IECommand.Include:<true>}<block_end><def_stmt>empty self<block_start><return><not>len(self._items)<and><not>len(self._path_full_patterns)<block_end><def_stmt>_add self pattern cmd<block_start>"""*cmd* is an IECommand value. """<if_stmt>isinstance(pattern PathFullPattern)<block_start>key=pattern.pattern# full, normalized path self._path_full_patterns[key]=cmd<block_end><else_stmt><block_start>self._items.append((pattern cmd))<block_end><block_end><def_stmt>add self patterns cmd<block_start>"""Add list of patterns to internal list. *cmd* indicates whether the pattern is an include/exclude pattern, and whether recursion should be done on excluded folders. """<for_stmt>pattern patterns<block_start>self._add(pattern cmd)<block_end><block_end><def_stmt>add_includepaths self include_paths<block_start>"""Used to add inclusion-paths from args.paths (from commandline). """<line_sep>include_patterns=[parse_pattern(p PathPrefixPattern)<for>p include_paths]<line_sep>self.add(include_patterns IECommand.Include)<line_sep>self.fallback=<not>include_patterns<line_sep>self.include_patterns=include_patterns<block_end><def_stmt>get_unmatched_include_patterns self<block_start>"Note that this only returns patterns added via *add_includepaths*."<line_sep><return>[p<for>p self.include_patterns<if>p.match_count<eq>0]<block_end><def_stmt>add_inclexcl self patterns<block_start>"""Add list of patterns (of type CmdTuple) to internal list. """<for_stmt>pattern,cmd patterns<block_start>self._add(pattern cmd)<block_end><block_end><def_stmt>match self path<block_start>"""Return True or False depending on whether *path* is matched. If no match is found among the patterns in this matcher, then the value in self.fallback is returned (defaults to None). """<line_sep>path=normalize_path(path).lstrip(os.path.sep)<line_sep># do a fast lookup for full path matches (note: we do not count such matches): non_existent=object()<line_sep>value=self._path_full_patterns.get(path non_existent)<if_stmt>value<is><not>non_existent# we have a full path match! <block_start>self.recurse_dir=command_recurses_dir(value)<line_sep><return>self.is_include_cmd[value]<block_end># this is the slow way, if we have many patterns in self._items: <for_stmt>(pattern cmd) self._items<block_start><if_stmt>pattern.match(path normalize=<false>)<block_start>self.recurse_dir=pattern.recurse_dir<line_sep><return>self.is_include_cmd[cmd]<block_end><block_end># by default we will recurse if there is no match self.recurse_dir=self.recurse_dir_default<line_sep><return>self.fallback<block_end><block_end><def_stmt>normalize_path path<block_start>"""normalize paths for MacOS (but do nothing on other platforms)"""<line_sep># HFS+ converts paths to a canonical form, so users shouldn't be required to enter an exact match. # Windows and Unix filesystems allow different forms, so users always have to enter an exact match. <return>unicodedata.normalize('NFD' path)<if>sys.platform<eq>'darwin'<else>path<block_end><class_stmt>PatternBase<block_start>"""Shared logic for inclusion/exclusion patterns. """<line_sep>PREFIX=NotImplemented<def_stmt>__init__ self pattern recurse_dir=<false><block_start>self.pattern_orig=pattern<line_sep>self.match_count=0<line_sep>pattern=normalize_path(pattern)<line_sep>self._prepare(pattern)<line_sep>self.recurse_dir=recurse_dir<block_end><def_stmt>match self path normalize=<true><block_start>"""Return a boolean indicating whether *path* is matched by this pattern. If normalize is True (default), the path will get normalized using normalize_path(), otherwise it is assumed that it already is normalized using that function. """<if_stmt>normalize<block_start>path=normalize_path(path)<block_end>matches=self._match(path)<if_stmt>matches<block_start>self.match_count<augadd>1<block_end><return>matches<block_end><def_stmt>__repr__ self<block_start><return>'%s(%s)'%(type(self) self.pattern)<block_end><def_stmt>__str__ self<block_start><return>self.pattern_orig<block_end><def_stmt>_prepare self pattern<block_start>"Should set the value of self.pattern"<line_sep><raise>NotImplementedError<block_end><def_stmt>_match self path<block_start><raise>NotImplementedError<block_end><block_end><class_stmt>PathFullPattern(PatternBase)<block_start>"""Full match of a path."""<line_sep>PREFIX="pf"<def_stmt>_prepare self pattern<block_start>self.pattern=os.path.normpath(pattern).lstrip(os.path.sep)<block_end># sep at beginning is removed <def_stmt>_match self path<block_start><return>path<eq>self.pattern<block_end><block_end># For PathPrefixPattern, FnmatchPattern and ShellPattern, we require that the pattern either match the whole path # or an initial segment of the path up to but not including a path separator. To unify the two cases, we add a path # separator to the end of the path before matching. <class_stmt>PathPrefixPattern(PatternBase)<block_start>"""Literal files or directories listed on the command line for some operations (e.g. extract, but not create). If a directory is specified, all paths that start with that path match as well. A trailing slash makes no difference. """<line_sep>PREFIX="pp"<def_stmt>_prepare self pattern<block_start>sep=os.path.sep<line_sep>self.pattern=(os.path.normpath(pattern).rstrip(sep)+sep).lstrip(sep)<block_end># sep at beginning is removed <def_stmt>_match self path<block_start><return>(path+os.path.sep).startswith(self.pattern)<block_end><block_end><class_stmt>FnmatchPattern(PatternBase)<block_start>"""Shell glob patterns to exclude. A trailing slash means to exclude the contents of a directory, but not the directory itself. """<line_sep>PREFIX="fm"<def_stmt>_prepare self pattern<block_start><if_stmt>pattern.endswith(os.path.sep)<block_start>pattern=os.path.normpath(pattern).rstrip(os.path.sep)+os.path.sep+'*'+os.path.sep<block_end><else_stmt><block_start>pattern=os.path.normpath(pattern)+os.path.sep+'*'<block_end>self.pattern=pattern.lstrip(os.path.sep)# sep at beginning is removed # fnmatch and re.match both cache compiled regular expressions. # Nevertheless, this is about 10 times faster. self.regex=re.compile(fnmatch.translate(self.pattern))<block_end><def_stmt>_match self path<block_start><return>(self.regex.match(path+os.path.sep)<is><not><none>)<block_end><block_end><class_stmt>ShellPattern(PatternBase)<block_start>"""Shell glob patterns to exclude. A trailing slash means to exclude the contents of a directory, but not the directory itself. """<line_sep>PREFIX="sh"<def_stmt>_prepare self pattern<block_start>sep=os.path.sep<if_stmt>pattern.endswith(sep)<block_start>pattern=os.path.normpath(pattern).rstrip(sep)+sep+"**"+sep+"*"+sep<block_end><else_stmt><block_start>pattern=os.path.normpath(pattern)+sep+"**"+sep+"*"<block_end>self.pattern=pattern.lstrip(sep)# sep at beginning is removed self.regex=re.compile(shellpattern.translate(self.pattern))<block_end><def_stmt>_match self path<block_start><return>(self.regex.match(path+os.path.sep)<is><not><none>)<block_end><block_end><class_stmt>RegexPattern(PatternBase)<block_start>"""Regular expression to exclude. """<line_sep>PREFIX="re"<def_stmt>_prepare self pattern<block_start>self.pattern=pattern# sep at beginning is NOT removed self.regex=re.compile(pattern)<block_end><def_stmt>_match self path# Normalize path separators <block_start><if_stmt>os.path.sep<ne>'/'<block_start>path=path.replace(os.path.sep '/')<block_end><return>(self.regex.search(path)<is><not><none>)<block_end><block_end>_PATTERN_CLASSES={FnmatchPattern PathFullPattern PathPrefixPattern RegexPattern ShellPattern }<line_sep>_PATTERN_CLASS_BY_PREFIX=dict((i.PREFIX i)<for>i _PATTERN_CLASSES)<line_sep>CmdTuple=namedtuple('CmdTuple' 'val cmd')<class_stmt>IECommand(Enum)<block_start>"""A command that an InclExcl file line can represent. """<line_sep>RootPath=1<line_sep>PatternStyle=2<line_sep>Include=3<line_sep>Exclude=4<line_sep>ExcludeNoRecurse=5<block_end><def_stmt>command_recurses_dir cmd# TODO?: raise error or return None if *cmd* is RootPath or PatternStyle <block_start><return>cmd<not><in>[IECommand.ExcludeNoRecurse]<block_end><def_stmt>get_pattern_class prefix<block_start><try_stmt><block_start><return>_PATTERN_CLASS_BY_PREFIX[prefix]<block_end><except_stmt>KeyError<block_start><raise>ValueError("Unknown pattern style: {}".format(prefix))<from><none><block_end><block_end><def_stmt>parse_pattern pattern fallback=FnmatchPattern recurse_dir=<true><block_start>"""Read pattern from string and return an instance of the appropriate implementation class. """<if_stmt>len(pattern)<g>2<and>pattern[2]<eq>":"<and>pattern[:2].isalnum()<block_start>(style pattern)=(pattern[:2] pattern[3:])<line_sep>cls=get_pattern_class(style)<block_end><else_stmt><block_start>cls=fallback<block_end><return>cls(pattern recurse_dir)<block_end><def_stmt>parse_exclude_pattern pattern_str fallback=FnmatchPattern<block_start>"""Read pattern from string and return an instance of the appropriate implementation class. """<line_sep>epattern_obj=parse_pattern(pattern_str fallback recurse_dir=<false>)<line_sep><return>CmdTuple(epattern_obj IECommand.ExcludeNoRecurse)<block_end><def_stmt>parse_inclexcl_command cmd_line_str fallback=ShellPattern<block_start>"""Read a --patterns-from command from string and return a CmdTuple object."""<line_sep>cmd_prefix_map={'-':IECommand.Exclude '!':IECommand.ExcludeNoRecurse '+':IECommand.Include 'R':IECommand.RootPath 'r':IECommand.RootPath 'P':IECommand.PatternStyle 'p':IECommand.PatternStyle }<if_stmt><not>cmd_line_str<block_start><raise>argparse.ArgumentTypeError("A pattern/command must not be empty.")<block_end>cmd=cmd_prefix_map.get(cmd_line_str[0])<if_stmt>cmd<is><none><block_start><raise>argparse.ArgumentTypeError("A pattern/command must start with anyone of: %s"%', '.join(cmd_prefix_map))<block_end># remaining text on command-line following the command character remainder_str=cmd_line_str[1:].lstrip()<if_stmt><not>remainder_str<block_start><raise>argparse.ArgumentTypeError("A pattern/command must have a value part.")<block_end><if_stmt>cmd<is>IECommand.RootPath# TODO: validate string? <block_start>val=remainder_str<block_end><elif_stmt>cmd<is>IECommand.PatternStyle# then remainder_str is something like 're' or 'sh' <block_start><try_stmt><block_start>val=get_pattern_class(remainder_str)<block_end><except_stmt>ValueError<block_start><raise>argparse.ArgumentTypeError("Invalid pattern style: {}".format(remainder_str))<block_end><block_end><else_stmt># determine recurse_dir based on command type <block_start>recurse_dir=command_recurses_dir(cmd)<line_sep>val=parse_pattern(remainder_str fallback recurse_dir)<block_end><return>CmdTuple(val cmd)<block_end>
<import_from_stmt>collections OrderedDict<import_stmt>math<import_from_stmt>jet_bridge_base.exceptions.missing_argument_error MissingArgumentError<import_from_stmt>jet_bridge_base.paginators.pagination Pagination<import_from_stmt>jet_bridge_base.responses.json JSONResponse<import_from_stmt>jet_bridge_base.utils.http replace_query_param remove_query_param<import_from_stmt>jet_bridge_base.utils.queryset queryset_count_optimized<class_stmt>PageNumberPagination(Pagination)<block_start>default_page_size=25<line_sep>page_query_param='page'<line_sep>page_size_query_param='_per_page'<line_sep>max_page_size=10000<line_sep>count=<none><line_sep>page_number=<none><line_sep>page_size=<none><line_sep>handler=<none><def_stmt>paginate_queryset self request queryset handler<block_start>page_number=self.get_page_number(request handler)<if_stmt><not>page_number<block_start><return><none><block_end>page_size=self.get_page_size(request handler)<if_stmt><not>page_size<block_start><return><none><block_end>self.count=queryset_count_optimized(request queryset)<line_sep>self.page_number=page_number<line_sep>self.page_size=page_size<line_sep>self.handler=handler<line_sep><return>queryset.offset((page_number-1)<times>page_size).limit(page_size)<block_end><def_stmt>get_pages_count self<block_start><return>int(math.ceil(self.count/self.page_size))<block_end><def_stmt>get_paginated_response self request data<block_start><return>JSONResponse(OrderedDict([('count' self.count) ('next' self.get_next_link(request)) ('previous' self.get_previous_link(request)) ('results' data) ('num_pages' self.get_pages_count()) ('per_page' self.page_size) ]))<block_end><def_stmt>get_page_number self request handler<block_start><try_stmt><block_start>result=int(request.get_argument(self.page_query_param))<line_sep><return>max(result 1)<block_end><except_stmt>(MissingArgumentError ValueError)<block_start><return>1<block_end><block_end><def_stmt>get_page_size self request handler<block_start><if_stmt>self.page_size_query_param<block_start><try_stmt><block_start>result=int(request.get_argument(self.page_size_query_param))<line_sep>result=max(result 1)<if_stmt>self.max_page_size<block_start>result=min(result self.max_page_size)<block_end><return>result<block_end><except_stmt>(MissingArgumentError ValueError)<block_start><pass><block_end><block_end><return>self.default_page_size<block_end><def_stmt>has_next self<block_start><return>self.page_number<l>self.get_pages_count()<block_end><def_stmt>has_previous self<block_start><return>self.page_number<g>1<block_end><def_stmt>next_page_number self<block_start><return>self.page_number+1<block_end><def_stmt>previous_page_number self<block_start><return>self.page_number-1<block_end><def_stmt>get_next_link self request<block_start><if_stmt><not>self.has_next()<block_start><return><none><block_end>url=request.full_url()<line_sep>page_number=self.next_page_number()<line_sep><return>replace_query_param(url self.page_query_param page_number)<block_end><def_stmt>get_previous_link self request<block_start><if_stmt><not>self.has_previous()<block_start><return><none><block_end>url=request.full_url()<line_sep>page_number=self.previous_page_number()<if_stmt>page_number<eq>1<block_start><return>remove_query_param(url self.page_query_param)<block_end><return>replace_query_param(url self.page_query_param page_number)<block_end><block_end>
# BSD 3-Clause License # # Copyright (C) 2021 THL A29 Limited, a Tencent company. All rights reserved. # # Redistribution and use in source and binary forms, with or without modification, # are permitted provided that the following conditions are met: # # * Redistributions of source code must retain the above copyright notice, this # list of conditions and the following disclaimer. # # * Redistributions in binary form must reproduce the above copyright notice, # this list of conditions and the following disclaimer in the documentation # and/or other materials provided with the distribution. # # * Neither the name of the psutil authors nor the names of its contributors # may be used to endorse or promote products derived from this software without # specific prior written permission. # # THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND # ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED # WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE # DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS BE LIABLE FOR # ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES # (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; # LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON # ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT # (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS # SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. <import_stmt>os<import_stmt>time<import_stmt>torch<import_from_stmt>torch.multiprocessing Process<line_sep># Worker timeout *after* the first worker has completed. UNIT_WORKER_TIMEOUT=120<def_stmt>distributed_test world_size=2 backend="nccl" use_fake_dist=<false><block_start>r"""A decorator for executing a function (e.g., a unit test) in a distributed manner. This decorator manages the spawning and joining of processes, initialization of torch.distributed, and catching of errors. Usage example: @distributed_test(worker_size=[2,3]) def my_test(): rank = dist.get_rank() world_size = dist.get_world_size() assert(rank < world_size) Args: world_size (int or list): number of ranks to spawn. Can be a list to spawn multiple tests. """<def_stmt>dist_wrap run_func<block_start>"""Second-level decorator for dist_test. This actually wraps the function."""<def_stmt>dist_init local_rank num_procs *func_args **func_kwargs<block_start>"""Initialize torch.distributed and execute the user function."""<line_sep>os.environ["MASTER_ADDR"]="127.0.0.1"<line_sep>os.environ["MASTER_PORT"]="29503"<line_sep>os.environ["LOCAL_RANK"]=str(local_rank)<line_sep># NOTE: unit tests don't support multi-node so local_rank == global rank os.environ["RANK"]=str(local_rank)<line_sep>os.environ["WORLD_SIZE"]=str(num_procs)<line_sep>torch.distributed.init_process_group(backend=backend)<if_stmt>torch.cuda.is_available()<block_start><if_stmt>use_fake_dist<block_start>torch.cuda.set_device(0)<block_end><else_stmt><block_start>torch.cuda.set_device(local_rank)<block_end><block_end>run_func(*func_args **func_kwargs)<block_end><def_stmt>dist_launcher num_procs *func_args **func_kwargs<block_start>r"""Launch processes and gracefully handle failures."""<line_sep># Spawn all workers on subprocesses. processes=[]<for_stmt>local_rank range(num_procs)<block_start>p=Process(target=dist_init args=(local_rank num_procs *func_args) kwargs=func_kwargs )<line_sep>p.start()<line_sep>processes.append(p)<block_end># Now loop and wait for a test to complete. The spin-wait here isn't a big # deal because the number of processes will be O(#GPUs) << O(#CPUs). any_done=<false><while_stmt><not>any_done<block_start><for_stmt>p processes<block_start><if_stmt><not>p.is_alive()<block_start>any_done=<true><line_sep><break><block_end><block_end><block_end># Wait for all other processes to complete <for_stmt>p processes<block_start>p.join(UNIT_WORKER_TIMEOUT)<block_end>failed=[(rank p)<for>rank,p enumerate(processes)<if>p.exitcode<ne>0]<for_stmt>_,p failed# If it still hasn't terminated, kill it because it hung. <block_start><if_stmt>p.exitcode<is><none><block_start>p.terminate()<block_end><if_stmt>p.exitcode<ne>0<block_start>p.terminate()<block_end><block_end><block_end><def_stmt>run_func_decorator *func_args **func_kwargs<block_start>r"""Entry point for @distributed_test()."""<if_stmt>isinstance(world_size int)<block_start>dist_launcher(world_size *func_args **func_kwargs)<block_end><elif_stmt>isinstance(world_size list)<block_start><for_stmt>procs world_size<block_start>dist_launcher(procs *func_args **func_kwargs)<line_sep>time.sleep(0.5)<block_end><block_end><else_stmt><block_start><raise>TypeError("world_size must be an integer or a list of integers.")<block_end><block_end><return>run_func_decorator<block_end><return>dist_wrap<block_end>
""" File generation for catalog signing non-binary contents. """<line_sep>__author__="<NAME> <<EMAIL>>"<line_sep>__version__="3.8"<import_stmt>sys<line_sep>__all__=["PYTHON_CAT_NAME" "PYTHON_CDF_NAME"]<def_stmt>public f<block_start>__all__.append(f.__name__)<line_sep><return>f<block_end>PYTHON_CAT_NAME="python.cat"<line_sep>PYTHON_CDF_NAME="python.cdf"<line_sep>CATALOG_TEMPLATE=r"""[CatalogHeader] Name={target.stem}.cat ResultDir={target.parent} PublicVersion=1 CatalogVersion=2 HashAlgorithms=SHA256 PageHashes=false EncodingType= [CatalogFiles] """<def_stmt>can_sign file<block_start><return>file.is_file()<and>file.stat().st_size<block_end>@public<def_stmt>write_catalog target files<block_start><with_stmt>target.open("w" encoding="utf-8")<as>cat<block_start>cat.write(CATALOG_TEMPLATE.format(target=target))<line_sep>cat.writelines("<HASH>{}={}\n".format(n f)<for>n,f files<if>can_sign(f))<block_end><block_end>
<import_stmt>numpy<as>np<import_stmt>tensorflow<as>tf<line_sep>conv1d=tf.layers.conv1d<def_stmt>attn_head seq out_sz bias_mat activation in_drop=0.0 coef_drop=0.0 residual=<false><block_start><with_stmt>tf.name_scope('my_attn')<block_start><if_stmt>in_drop<ne>0.0<block_start>seq=tf.nn.dropout(seq 1.0-in_drop)<block_end>seq_fts=tf.layers.conv1d(seq out_sz 1 use_bias=<false>)<line_sep># simplest self-attention possible f_1=tf.layers.conv1d(seq_fts 1 1)<line_sep>f_2=tf.layers.conv1d(seq_fts 1 1)<line_sep>logits=f_1+tf.transpose(f_2 [0 2 1])<line_sep>coefs=tf.nn.softmax(tf.nn.leaky_relu(logits)+bias_mat)<if_stmt>coef_drop<ne>0.0<block_start>coefs=tf.nn.dropout(coefs 1.0-coef_drop)<block_end><if_stmt>in_drop<ne>0.0<block_start>seq_fts=tf.nn.dropout(seq_fts 1.0-in_drop)<block_end>vals=tf.matmul(coefs seq_fts)<line_sep>ret=tf.contrib.layers.bias_add(vals)<line_sep># residual connection <if_stmt>residual<block_start><if_stmt>seq.shape[-1]<ne>ret.shape[-1]<block_start>ret=ret+conv1d(seq ret.shape[-1] 1)# activation <block_end><else_stmt><block_start>ret=ret+seq<block_end><block_end><return>activation(ret)<block_end><block_end># activation # Experimental sparse attention head (for running on datasets such as Pubmed) # N.B. Because of limitations of current TF implementation, will work _only_ if batch_size = 1! <def_stmt>sp_attn_head seq out_sz adj_mat activation nb_nodes in_drop=0.0 coef_drop=0.0 residual=<false><block_start><with_stmt>tf.name_scope('sp_attn')<block_start><if_stmt>in_drop<ne>0.0<block_start>seq=tf.nn.dropout(seq 1.0-in_drop)<block_end>seq_fts=tf.layers.conv1d(seq out_sz 1 use_bias=<false>)<line_sep># simplest self-attention possible f_1=tf.layers.conv1d(seq_fts 1 1)<line_sep>f_2=tf.layers.conv1d(seq_fts 1 1)<line_sep>f_1=tf.reshape(f_1 (nb_nodes 1))<line_sep>f_2=tf.reshape(f_2 (nb_nodes 1))<line_sep>f_1=adj_mat<times>f_1<line_sep>f_2=adj_mat<times>tf.transpose(f_2 [1 0])<line_sep>logits=tf.sparse_add(f_1 f_2)<line_sep>lrelu=tf.SparseTensor(indices=logits.indices values=tf.nn.leaky_relu(logits.values) dense_shape=logits.dense_shape)<line_sep>coefs=tf.sparse_softmax(lrelu)<if_stmt>coef_drop<ne>0.0<block_start>coefs=tf.SparseTensor(indices=coefs.indices values=tf.nn.dropout(coefs.values 1.0-coef_drop) dense_shape=coefs.dense_shape)<block_end><if_stmt>in_drop<ne>0.0<block_start>seq_fts=tf.nn.dropout(seq_fts 1.0-in_drop)<block_end># As tf.sparse_tensor_dense_matmul expects its arguments to have rank-2, # here we make an assumption that our input is of batch size 1, and reshape appropriately. # The method will fail in all other cases! coefs=tf.sparse_reshape(coefs [nb_nodes nb_nodes])<line_sep>seq_fts=tf.squeeze(seq_fts)<line_sep>vals=tf.sparse_tensor_dense_matmul(coefs seq_fts)<line_sep>vals=tf.expand_dims(vals axis=0)<line_sep>vals.set_shape([1 nb_nodes out_sz])<line_sep>ret=tf.contrib.layers.bias_add(vals)<line_sep># residual connection <if_stmt>residual<block_start><if_stmt>seq.shape[-1]<ne>ret.shape[-1]<block_start>ret=ret+conv1d(seq ret.shape[-1] 1)# activation <block_end><else_stmt><block_start>ret=ret+seq<block_end><block_end><return>activation(ret)<block_end><block_end># activation <def_stmt>sp_hete_attn_head seq out_sz adj_mat adj_type edge_list activation nb_nodes in_drop=0.0 coef_drop=0.0 residual=<false># input adjacency matrices are TRANSPOSED before feeding! <block_start><with_stmt>tf.name_scope('sp_hete_attn')<block_start><if_stmt>in_drop<ne>0.0<block_start>seq=[tf.nn.dropout(seq_i 1.0-in_drop)<for>seq_i seq]<block_end># seq_fts[j][i]: hidden features from group i to group j, center node is j # 1 * nb_nodes_i * out_sz_j seq_fts=[[tf.layers.conv1d(seq_i out_sz # out_sz_j 1 use_bias=<false>)<for>seq_i seq]<for>_ seq]<line_sep>attn_biases=[<none><for>_ adj_type]<for_stmt>dir_edge edge_list<block_start>attn_bias=tf.Variable(tf.random_normal(shape=(1 out_sz)))<line_sep>attn_biases[dir_edge[0]]=attn_bias<if_stmt>len(dir_edge)<eq>2<block_start>attn_biases[dir_edge[1]]=-attn_bias<block_end><block_end># for out_sz_j in out_sz coefs_lists=[[]<for>_ range(len(seq))]<line_sep>seq_fts_lists=[[]<for>_ range(len(seq))]<line_sep># simplest self-attention possible <for_stmt>adj_ij,type_ij,attn_bias zip(adj_mat adj_type attn_biases)# transposed, # nb_nodes_j * nb_nodes_i <block_start>i,j=type_ij<line_sep>f_1=tf.reshape(seq_fts[j][j] (nb_nodes[j] out_sz))<line_sep>f_1=tf.gather(f_1 adj_ij.indices[: 0])<line_sep>f_2=tf.reshape(seq_fts[j][i] (nb_nodes[i] out_sz))<if_stmt>attn_bias<is><not><none><block_start>f_2=f_2+attn_bias<block_end>f_2=tf.gather(f_2 adj_ij.indices[: 1])<line_sep>f=tf.reduce_sum(tf.multiply(f_1 f_2) 1)<line_sep>coefs=tf.SparseTensor(indices=adj_ij.indices values=tf.nn.leaky_relu(f) dense_shape=adj_ij.dense_shape)<if_stmt>coef_drop<ne>0.0<block_start>coefs=tf.SparseTensor(indices=coefs.indices values=tf.nn.dropout(coefs.values 1.0-coef_drop) dense_shape=coefs.dense_shape)<block_end>coefs_lists[j].append(coefs)# transposed, nb_nodes_j * nb_nodes_i <if_stmt>in_drop<ne>0.0<block_start>seq_fts_ij=tf.nn.dropout(seq_fts[j][i] 1.0-in_drop)<block_end>seq_fts_lists[j].append(tf.squeeze(seq_fts_ij))<block_end># nb_nodes_i * out_sz_j # As tf.sparse_tensor_dense_matmul expects its arguments to have rank-2, # here we make an assumption that our input is of batch size 1, and reshape appropriately. # The method will fail in all other cases! coefs=[tf.sparse_concat(1 coefs_list)<for>coefs_list coefs_lists]<line_sep>coefs=[tf.sparse_softmax(coef)<for>coef coefs]<line_sep>seq_fts=[tf.concat(seq_fts_list 0)<for>seq_fts_list seq_fts_lists]<line_sep>vals=[tf.sparse_tensor_dense_matmul(coef seq_ft)<for>coef,seq_ft zip(coefs seq_fts)]<line_sep># nb_nodes_j * out_sz_j vals=[tf.expand_dims(val axis=0)<for>val vals]<for_stmt>i,val enumerate(vals)<block_start>val.set_shape([1 nb_nodes[i] out_sz])<block_end>ret=[tf.contrib.layers.bias_add(val)<for>val vals]<line_sep># residual connection <if_stmt>residual<block_start>ret2=[]<for_stmt>r,s zip(ret seq)<block_start><if_stmt>s.shape[-1]<ne>r.shape[-1]<block_start>ret2.append(r+tf.layers.conv1d(s r.shape[-1] 1))<block_end><else_stmt><block_start>ret2.append(r+s)<block_end><block_end>ret=ret2<block_end>ret=[activation(r)<for>r ret]<line_sep><return>ret<block_end><block_end># activation <def_stmt>full_connection seq out_sz target_node activation in_drop=0.0 use_bias=<true><block_start><with_stmt>tf.name_scope('full_connection_layer')<block_start><if_stmt>in_drop<ne>0.0<block_start>seq=[tf.nn.dropout(seq_i 1.0-in_drop)<for>seq_i seq]<block_end>seq_fc=[tf.layers.conv1d(seq[target_node[i]] out_sz[i] 1 use_bias=use_bias)<for>i range(len(target_node))]<line_sep>seq_fc=[tf.squeeze(seq_i)<for>seq_i seq_fc]# remove the bach_size which is set as 1 ret=[activation(s)<for>s seq_fc]<line_sep><return>ret<block_end><block_end>
'''OpenGL extension NV.present_video This module customises the behaviour of the OpenGL.raw.WGL.NV.present_video to provide a more Python-friendly API Overview (from the spec) This extension provides a mechanism for displaying textures and renderbuffers on auxiliary video output devices. It allows an application to specify separate buffers for the individual fields used with interlaced output. It also provides a way to present frames or field pairs simultaneously in two separate video streams. It also allows an application to request when images should be displayed, and to obtain feedback on exactly when images are actually first displayed. This specification attempts to avoid language that would tie it to any particular hardware or vendor. However, it should be noted that it has been designed specifically for use with NVIDIA SDI products and the features and limitations of the spec compliment those of NVIDIA's line of SDI video output devices. The official definition of this extension is available here: http://www.opengl.org/registry/specs/NV/present_video.txt '''<import_from_stmt>OpenGL platform constant arrays<import_from_stmt>OpenGL extensions wrapper<import_stmt>ctypes<import_from_stmt>OpenGL.raw.WGL _types _glgets<import_from_stmt>OpenGL.raw.WGL.NV.present_video *<import_from_stmt>OpenGL.raw.WGL.NV.present_video _EXTENSION_NAME<def_stmt>glInitPresentVideoNV <block_start>'''Return boolean indicating whether this extension is available'''<import_from_stmt>OpenGL extensions<line_sep><return>extensions.hasGLExtension(_EXTENSION_NAME)<block_end>### END AUTOGENERATED SECTION
"""RoboMaker component for deleting a simulation application."""<line_sep># Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. <import_stmt>logging<import_from_stmt>typing Dict<import_from_stmt>delete_simulation_app.src.robomaker_delete_simulation_app_spec RoboMakerDeleteSimulationAppSpec RoboMakerDeleteSimulationAppInputs RoboMakerDeleteSimulationAppOutputs <import_from_stmt>common.sagemaker_component SageMakerComponent ComponentMetadata SageMakerJobStatus <import_from_stmt>common.boto3_manager Boto3Manager<import_from_stmt>common.common_inputs SageMakerComponentCommonInputs<line_sep>@ComponentMetadata(name="RoboMaker - Delete Simulation Application" description="Delete a simulation application." spec=RoboMakerDeleteSimulationAppSpec )<class_stmt>RoboMakerDeleteSimulationAppComponent(SageMakerComponent)<block_start>"""RoboMaker component for deleting a simulation application."""<def_stmt>Do self spec:RoboMakerDeleteSimulationAppSpec<block_start>self._arn=spec.inputs.arn<line_sep>self._version=spec.inputs.version<line_sep>super().Do(spec.inputs spec.outputs spec.output_paths)<block_end><def_stmt>_get_job_status self<arrow>SageMakerJobStatus<block_start><try_stmt><block_start>response=self._rm_client.describe_simulation_application(application=self._arn)<line_sep>status=response["arn"]<if_stmt>status<is><not><none><block_start><return>SageMakerJobStatus(is_completed=<false> raw_status=status )<block_end><else_stmt><block_start><return>SageMakerJobStatus(is_completed=<true> raw_status="Item deleted")<block_end><block_end><except_stmt>Exception<as>ex<block_start><return>SageMakerJobStatus(is_completed=<true> raw_status=str(ex))<block_end><block_end><def_stmt>_configure_aws_clients self inputs:SageMakerComponentCommonInputs<block_start>"""Configures the internal AWS clients for the component. Args: inputs: A populated list of user inputs. """<line_sep>self._rm_client=Boto3Manager.get_robomaker_client(self._get_component_version() inputs.region endpoint_url=inputs.endpoint_url assume_role_arn=inputs.assume_role )<line_sep>self._cw_client=Boto3Manager.get_cloudwatch_client(inputs.region assume_role_arn=inputs.assume_role)<block_end><def_stmt>_after_job_complete self job:Dict request:Dict inputs:RoboMakerDeleteSimulationAppInputs outputs:RoboMakerDeleteSimulationAppOutputs <block_start>outputs.arn=self._arn<line_sep>logging.info("Simulation Application {} has been deleted".format(outputs.arn))<block_end><def_stmt>_on_job_terminated self<block_start>logging.info("Simulation Application {} failed to delete".format(self._arn))<block_end><def_stmt>_create_job_request self inputs:RoboMakerDeleteSimulationAppInputs outputs:RoboMakerDeleteSimulationAppOutputs <arrow>Dict<block_start>""" Documentation: https://boto3.amazonaws.com/v1/documentation/api/latest/reference/services/robomaker.html#RoboMaker.Client.delete_simulation_application """<line_sep>request=self._get_request_template("robomaker.delete.simulation.app")<line_sep>request["application"]=self._arn<line_sep># If we have a version then use it, else remove it from request object <if_stmt>inputs.version<block_start>request["applicationVersion"]=inputs.version<block_end><else_stmt><block_start>request.pop("applicationVersion")<block_end><return>request<block_end><def_stmt>_submit_job_request self request:Dict<arrow>Dict<block_start><return>self._rm_client.delete_simulation_application(**request)<block_end><def_stmt>_after_submit_job_request self job:Dict request:Dict inputs:RoboMakerDeleteSimulationAppInputs outputs:RoboMakerDeleteSimulationAppOutputs <block_start>logging.info(f"Deleted Robomaker Simulation Application with arn: {self._arn}")<block_end><def_stmt>_print_logs_for_job self<block_start><pass><block_end><block_end><if_stmt>__name__<eq>"__main__"<block_start><import_stmt>sys<line_sep>spec=RoboMakerDeleteSimulationAppSpec(sys.argv[1:])<line_sep>component=RoboMakerDeleteSimulationAppComponent()<line_sep>component.Do(spec)<block_end>
<import_stmt>pandas<as>pd<import_stmt>os<import_stmt>gzip<import_from_stmt>logomaker.src.error_handling check handle_errors<line_sep># load directory of file matrix_dir=os.path.dirname(os.path.abspath(__file__))+'/../examples/matrices'<line_sep># load directory of file data_dir=os.path.dirname(os.path.abspath(__file__))+'/../examples/datafiles'<line_sep>@handle_errors<def_stmt>list_example_matrices <block_start>""" Return list of available matrices. """<line_sep># List of supported distributions by name valid_matrices=['.'.join(name.split('.')[:-1])<for>name os.listdir(matrix_dir)<if>'.txt'<in>name]<line_sep><return>valid_matrices<block_end>@handle_errors<def_stmt>list_example_datafiles <block_start>""" Return list of available data files. """<line_sep># List of supported distributions by name valid_datafiles=[name<for>name os.listdir(data_dir)<if>len(name.split('.'))<ge>2<and>len(name.split('.')[0])<g>0]<line_sep><return>valid_datafiles<block_end>@handle_errors<def_stmt>get_example_matrix name=<none> print_description=<true><block_start>""" Returns an example matrix from which a logo can be made. parameters ---------- name: (None or str) Name of example matrix. print_description: (bool) If true, a description of the example matrix will be printed returns ------- df: (data frame) A data frame containing an example matrix. """<line_sep># get list of valid matrices valid_matrices=list_example_matrices()<line_sep># check that matrix name is valid check(name<in>valid_matrices 'Matrix "%s" not recognized. Please choose from: \n%s'%(name '\n'.join([repr(x)<for>x valid_matrices])))<line_sep># check that print_description is boolean check(isinstance(print_description bool) 'type(print_description) = %s; must be of type bool '%type(print_description))<line_sep># set matrix file file_name='%s/%s.txt'%(matrix_dir name)<assert_stmt>os.path.isfile(file_name) 'File %s does not exist!'%file_name<line_sep># if user wants a description of the example matrix, provide it <if_stmt>print_description<block_start>print('Description of example matrix "%s":'%name)<with_stmt>open(file_name 'r')<as>f<block_start>lines=f.readlines()<line_sep>lines=[l<for>l lines<if>len(l)<g>0<and>l[0]<eq>'#']<line_sep>description="".join(lines)<line_sep>print(description)<block_end><block_end># return matrix data frame <return>pd.read_csv(file_name sep='\t' index_col=0 comment='#')<block_end>@handle_errors<def_stmt>open_example_datafile name=<none> print_description=<true><block_start>""" Returns a file handle to an example dataset parameters ---------- name: (None or str) Name of example matrix. print_description: (bool) If true, a description of the example matrix will be printed returns ------- f: (file handle) A handle to the requested file """<line_sep># get list of valid data files valid_datafiles=list_example_datafiles()<line_sep># check that specified datafile is valid check(name<in>valid_datafiles 'Matrix "%s" not recognized. Please choose from: \n%s'%(name '\n'.join([repr(x)<for>x valid_datafiles])))<line_sep># check that print_description is boolean check(isinstance(print_description bool) 'type(print_description) = %s; must be of type bool '%type(print_description))<line_sep># set datafile file name file_name='%s/%s'%(data_dir name)<assert_stmt>os.path.isfile(file_name) 'File %s does not exist!'%file_name<line_sep># if user wants a description of the datafile, provide it <if_stmt>print_description<block_start>print('Description of example matrix "%s":'%name)<with_stmt>open(file_name 'r')<as>f<block_start>lines=f.readlines()<line_sep>lines=[l<for>l lines<if>len(l)<g>0<and>l[0]<eq>'#']<line_sep>description="".join(lines)<line_sep>print(description)<block_end><block_end># if file is a gzipped file, use gzip.open() <if_stmt>len(file_name)<ge>3<and>file_name[-3:]<eq>'.gz'<block_start>f=gzip.open(file_name 'r')<block_end># otherwise, use regular open() <else_stmt><block_start>f=open(file_name 'r')<block_end># return file handle to user <return>f<block_end>
""" COBRA Visualisations -------------------- This notebook will cover the visulaisation and plotting offered by pycobra. """<line_sep># %matplotlib inline <import_stmt>numpy<as>np<import_from_stmt>pycobra.cobra Cobra<import_from_stmt>pycobra.ewa Ewa<import_from_stmt>pycobra.visualisation Visualisation<import_from_stmt>pycobra.diagnostics Diagnostics<line_sep># setting up our random data-set rng=np.random.RandomState(42)<line_sep># D1 = train machines; D2 = create COBRA; D3 = calibrate epsilon, alpha; D4 = testing n_features=2<line_sep>D1,D2,D3,D4=200 200 200 200<line_sep>D=D1+D2+D3+D4<line_sep>X=rng.uniform(-1 1 D<times>n_features).reshape(D n_features)<line_sep># Y = np.power(X[:,1], 2) + np.power(X[:,3], 3) + np.exp(X[:,10]) Y=np.power(X[: 0] 2)+np.power(X[: 1] 3)<line_sep># training data-set X_train=X[:D1+D2]<line_sep>X_test=X[D1+D2+D3:D1+D2+D3+D4]<line_sep>X_eps=X[D1+D2:D1+D2+D3]<line_sep># for testing Y_train=Y[:D1+D2]<line_sep>Y_test=Y[D1+D2+D3:D1+D2+D3+D4]<line_sep>Y_eps=Y[D1+D2:D1+D2+D3]<line_sep># set up our COBRA machine with the data cobra=Cobra(epsilon=0.5)<line_sep>cobra.fit(X_train Y_train)<line_sep>###################################################################### # Plotting COBRA # ~~~~~~~~~~~~~~ # # We use the visualisation class to plot our results, and for various # visualisations. # cobra_vis=Visualisation(cobra X_test Y_test)<line_sep># to plot our machines, we need a linspace as input. This is the 'scale' to plot and should be the range of the results # since our data ranges from -1 to 1 it is such - and we space it out to a hundred points cobra_vis.plot_machines(machines=["COBRA"])<line_sep>cobra_vis.plot_machines()<line_sep>###################################################################### # Plots and Visualisations of Results # ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ # # QQ and Boxplots! # cobra_vis.QQ()<line_sep>cobra_vis.boxplot()<line_sep>###################################################################### # Plotting EWA! # ~~~~~~~~~~~~~ # # We can use the same visualisation class for seeing how EWA works. Let's # demonstrate this! # ewa=Ewa()<line_sep>ewa.set_beta(X_beta=X_eps y_beta=Y_eps)<line_sep>ewa.fit(X_train Y_train)<line_sep>ewa_vis=Visualisation(ewa X_test Y_test)<line_sep>ewa_vis.QQ("EWA")<line_sep>ewa_vis.boxplot()<line_sep>###################################################################### # Plotting ClassifierCobra # ~~~~~~~~~~~~~~~~~~~~~~~~ # <import_from_stmt>sklearn datasets<import_from_stmt>sklearn.metrics accuracy_score<import_from_stmt>pycobra.classifiercobra ClassifierCobra<line_sep>bc=datasets.load_breast_cancer()<line_sep>X_cc=bc.data[:-40]<line_sep>y_cc=bc.target[:-40]<line_sep>X_cc_test=bc.data[-40:]<line_sep>y_cc_test=bc.target[-40:]<line_sep>cc=ClassifierCobra()<line_sep>cc.fit(X_cc y_cc)<line_sep>cc_vis=Visualisation(cc X_cc_test y_cc_test)<line_sep>cc_vis.boxplot()<line_sep>###################################################################### # Remember that all the estimators in the Pycobra package are scikit-learn # compatible - we can also use the scikit-learn metrics and tools to # analyse our machines! # <import_from_stmt>sklearn.metrics classification_report<line_sep>print(classification_report(y_cc_test cc.predict(X_cc_test)))<line_sep>###################################################################### # Plotting COBRA colors! # ~~~~~~~~~~~~~~~~~~~~~~ # # We're now going to experiment with plotting colors and data. After we # get information about which indices are used by which machines the best # for a fixed epsilon (or not, we can toggle this option), we can plot the # distribution of machines. # # Why is this useful? Since we're dealing with a 2-D space now, we're # attempting to see if there are some parts in the input space which are # picked up by certain machines. This could lead to interesting # experiments and # # We first present a plot where the machine colors are mixed depending on # which machines were selected; after which we plot one machine at a time. # indices,MSE=cobra_vis.indice_info(X_test=X_eps[0:50] y_test=Y_eps[0:50] epsilon=0.50)<line_sep>cobra_vis.color_cobra(X_test=X_eps[0:50] indice_info=indices single=<true>)<line_sep>cobra_vis.color_cobra(X_test=X_eps[0:50] indice_info=indices)<line_sep>###################################################################### # Voronoi Tesselation # ~~~~~~~~~~~~~~~~~~~ # # We present a variety of Voronoi Tesselation based plots - the purpose of # this is to help in visualising the pattern of points which tend to be # picked up. # cobra_vis.voronoi(X_test=X_eps[0:50] indice_info=indices single=<true>)<line_sep>cobra_vis.voronoi(X_test=X_eps[0:50] indice_info=indices)<line_sep>###################################################################### # Gradient-Colored Based Voronoi # ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ # cobra_vis.voronoi(X_test=X_eps[0:50] indice_info=indices MSE=MSE gradient=<true>)<line_sep>###################################################################### # Licensed under the MIT License - https://opensource.org/licenses/MIT #
# coding=utf-8 # Copyright 2021 The Google Research Authors. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. """Tests for checkpoint_utils."""<import_stmt>tempfile<import_from_stmt>typing Text Tuple<import_stmt>tensorflow.compat.v1<as>tf<import_from_stmt>readtwice.models checkpoint_utils<def_stmt>_create_test_variables outer_scope inner_scope var_c_name var_e_name var_n_name# Keras layers can cause problems for `tf.train.init_from_checkpoint` # if not handled properly. Here we intentionally use Dense layers # to test whether the ckpt loading logic works. <block_start>dense_layer=tf.keras.layers.Dense(10 name="dense")<with_stmt>tf.variable_scope(outer_scope)<block_start>var_c=tf.get_variable(var_c_name shape=[2 4] initializer=tf.truncated_normal_initializer())<line_sep>var_d=dense_layer(var_c)<with_stmt>tf.variable_scope(inner_scope)<block_start>var_e=tf.get_variable(var_e_name shape=[2 3] initializer=tf.truncated_normal_initializer())<line_sep>_=tf.get_variable(var_n_name shape=[3 5] initializer=tf.truncated_normal_initializer())<block_end><block_end><return>var_c var_d var_e<block_end><class_stmt>CheckpointUtilsTest(tf.test.TestCase)<block_start><def_stmt>_create_test_checkpoint self outer_scope inner_scope var_c_name var_e_name var_n_name<block_start><with_stmt>tempfile.NamedTemporaryFile(suffix="ckpt_test")<as>ckpt_file<block_start><with_stmt>self.session()<as>sess<block_start>var_c,var_d,var_e=_create_test_variables(outer_scope inner_scope var_c_name var_e_name var_n_name)<line_sep>sess.run(tf.global_variables_initializer())<line_sep>saver=tf.train.Saver()<line_sep>saver.save(sess ckpt_file.name)<block_end><block_end><return>ckpt_file.name var_c var_d var_e<block_end><def_stmt>test_get_assignment_map_from_checkpoint self<block_start>ckpt_path,expected_c,expected_d,expected_e=(self._create_test_checkpoint("scope_a" "scope_b" "var_c" "var_e" "var_f"))<with_stmt>self.cached_session()<as>sess<block_start>var_c,var_d,var_e=_create_test_variables("another_scope_a" "scope_b" "var_c" "var_e" "var_g")<line_sep>(assignment_map initialized_variable_names)=checkpoint_utils.get_assignment_map_from_checkpoint(variables=sess.graph.get_collection(tf.GraphKeys.GLOBAL_VARIABLES) ckpt_path=ckpt_path variable_scope="another_scope_a/" ckpt_variable_scope="scope_a/")<line_sep>self.assertCountEqual(initialized_variable_names ["another_scope_a/var_c:0" "another_scope_a/dense/bias:0" "another_scope_a/dense/kernel:0" "another_scope_a/scope_b/var_e:0"])<line_sep>tf.train.init_from_checkpoint(ckpt_path assignment_map)<line_sep>sess.run(tf.global_variables_initializer())<line_sep>self.assertAllClose(var_c expected_c)<line_sep>self.assertAllClose(var_d expected_d)<line_sep>self.assertAllClose(var_e expected_e)<line_sep># When require_all_variables_initialized = True, an error is raised # since a checkpoint variable corresponding to the variable # `another_scope_a/scope_b/var_g` cannot be found # in the ckpt_variable_scope `scope_a/`. <with_stmt>self.assertRaisesRegex(ValueError "cannot be mapped")<block_start>(assignment_map initialized_variable_names)=checkpoint_utils.get_assignment_map_from_checkpoint(variables=sess.graph.get_collection(tf.GraphKeys.GLOBAL_VARIABLES) ckpt_path=ckpt_path variable_scope="another_scope_a/" ckpt_variable_scope="scope_a/" require_all_variables_initialized=<true>)<block_end><block_end><block_end><def_stmt>test_init_from_checkpoint_init_checkpoint_none self<block_start>self.assertIsNone(checkpoint_utils.get_scaffold_fn(<none> <true>))<block_end><def_stmt>test_init_from_checkpoint_single_scope_pair self<block_start>ckpt_path,expected_c,expected_d,expected_e=(self._create_test_checkpoint("scope_a" "scope_b" "var_c" "var_e" "var_f"))<with_stmt>self.cached_session()<as>sess<block_start>var_c,var_d,var_e=_create_test_variables("scope_a_1" "scope_b" "var_c" "var_e" "var_g")<line_sep>scaffold_fn=checkpoint_utils.get_scaffold_fn(ckpt_path <true> variable_scope_pairs=[("scope_a_1/" "scope_a/")])<line_sep>scaffold=scaffold_fn()<line_sep>self.assertIsInstance(scaffold tf.train.Scaffold)<line_sep>sess.run(tf.global_variables_initializer())<line_sep>self.assertAllClose(var_c expected_c)<line_sep>self.assertAllClose(var_d expected_d)<line_sep>self.assertAllClose(var_e expected_e)<block_end><block_end><def_stmt>test_init_from_checkpoint_multiple_scope_pairs self<block_start>ckpt_path,expected_c,expected_d,expected_e=(self._create_test_checkpoint("scope_a" "scope_b" "var_c" "var_e" "var_f"))<with_stmt>self.cached_session()<as>sess<block_start>var_c_1,var_d_1,var_e_1=_create_test_variables("scope_a_1" "scope_b" "var_c" "var_e" "var_g")<line_sep>var_c_2,var_d_2,var_e_2=_create_test_variables("scope_a_2" "scope_b" "var_c" "var_e" "var_g")<line_sep>scaffold_fn=checkpoint_utils.get_scaffold_fn(ckpt_path <true> variable_scope_pairs=[("scope_a_1/" "scope_a/") ("scope_a_2/" "scope_a/")])<line_sep>scaffold=scaffold_fn()<line_sep>self.assertIsInstance(scaffold tf.train.Scaffold)<line_sep>sess.run(tf.global_variables_initializer())<line_sep>self.assertAllClose(var_c_1 expected_c)<line_sep>self.assertAllClose(var_d_1 expected_d)<line_sep>self.assertAllClose(var_e_1 expected_e)<line_sep>self.assertAllClose(var_c_2 expected_c)<line_sep>self.assertAllClose(var_d_2 expected_d)<line_sep>self.assertAllClose(var_e_2 expected_e)<block_end><block_end><block_end><if_stmt>__name__<eq>"__main__"<block_start>tf.compat.v1.disable_eager_execution()<line_sep>tf.test.main()<block_end>
# Adopted from https://github.com/KiroSummer/A_Syntax-aware_MTL_Framework_for_Chinese_SRL # Inference functions for the SRL model. <import_stmt>numpy<as>np<def_stmt>decode_spans span_starts span_ends span_scores labels_inv<block_start>""" Args: span_starts: [num_candidates,] span_scores: [num_candidates, num_labels] span_ends: labels_inv: Returns: """<line_sep>pred_spans=[]<line_sep>span_labels=np.argmax(span_scores axis=1)# [num_candidates] spans_list=list(zip(span_starts span_ends span_labels span_scores))<line_sep>spans_list=sorted(spans_list key=<lambda>x:x[3][x[2]] reverse=<true>)<line_sep>predicted_spans={}<for_stmt>start,end,label,_ spans_list# Skip invalid span. <block_start><if_stmt>label<eq>0<or>(start end)<in>predicted_spans<block_start><continue><block_end>pred_spans.append((start end labels_inv[label]))<line_sep>predicted_spans[(start end)]=label<block_end><return>pred_spans<block_end><def_stmt>greedy_decode predict_dict srl_labels_inv<block_start>"""Greedy decoding for SRL predicate-argument structures. Args: predict_dict: Dictionary of name to numpy arrays. srl_labels_inv: SRL label id to string name. suppress_overlap: Whether to greedily suppress overlapping arguments for the same predicate. Returns: """<line_sep>arg_starts=predict_dict["arg_starts"]<line_sep>arg_ends=predict_dict["arg_ends"]<line_sep>predicates=predict_dict["predicates"]<line_sep>arg_labels=predict_dict["arg_labels"]<line_sep>scores=predict_dict["srl_scores"]<line_sep>num_suppressed_args=0<line_sep># Map from predicates to a list of labeled spans. pred_to_args={}<if_stmt>len(arg_ends)<g>0<and>len(predicates)<g>0<block_start>max_len=max(np.max(arg_ends) np.max(predicates))+1<block_end><else_stmt><block_start>max_len=1<block_end><for_stmt>j,pred_id enumerate(predicates)<block_start>args_list=[]<for_stmt>i,(arg_start arg_end) enumerate(zip(arg_starts arg_ends))# If label is not null. <block_start><if_stmt>arg_labels[i][j]<eq>0<block_start><continue><block_end>label=srl_labels_inv[arg_labels[i][j]]<line_sep># if label not in ["V", "C-V"]: args_list.append((arg_start arg_end label scores[i][j][arg_labels[i][j]]))<block_end># Sort arguments by highest score first. args_list=sorted(args_list key=<lambda>x:x[3] reverse=<true>)<line_sep>new_args_list=[]<line_sep>flags=[<false><for>_ range(max_len)]<line_sep># Predicate will not overlap with arguments either. flags[pred_id]=<true><for_stmt>(arg_start arg_end label score) args_list# If none of the tokens has been covered: <block_start><if_stmt><not>max(flags[arg_start:arg_end+1])<block_start>new_args_list.append((arg_start arg_end label))<for_stmt>k range(arg_start arg_end+1)<block_start>flags[k]=<true><block_end><block_end><block_end># Only add predicate if it has any argument. <if_stmt>new_args_list<block_start>pred_to_args[pred_id]=new_args_list<block_end>num_suppressed_args<augadd>len(args_list)-len(new_args_list)<block_end><return>pred_to_args num_suppressed_args<block_end>_CORE_ARGS={"ARG0":1 "ARG1":2 "ARG2":4 "ARG3":8 "ARG4":16 "ARG5":32 "ARGA":64 "A0":1 "A1":2 "A2":4 "A3":8 "A4":16 "A5":32 "AA":64}<def_stmt>get_predicted_clusters top_span_starts top_span_ends predicted_antecedents<block_start>mention_to_predicted={}<line_sep>predicted_clusters=[]<for_stmt>i,predicted_index enumerate(predicted_antecedents)<block_start><if_stmt>predicted_index<l>0<block_start><continue><block_end><assert_stmt>i<g>predicted_index<line_sep>predicted_antecedent=(int(top_span_starts[predicted_index]) int(top_span_ends[predicted_index]))<if_stmt>predicted_antecedent<in>mention_to_predicted<block_start>predicted_cluster=mention_to_predicted[predicted_antecedent]<block_end><else_stmt><block_start>predicted_cluster=len(predicted_clusters)<line_sep>predicted_clusters.append([predicted_antecedent])<line_sep>mention_to_predicted[predicted_antecedent]=predicted_cluster<block_end>mention=(int(top_span_starts[i]) int(top_span_ends[i]))<line_sep>predicted_clusters[predicted_cluster].append(mention)<line_sep>mention_to_predicted[mention]=predicted_cluster<block_end>predicted_clusters=[tuple(pc)<for>pc predicted_clusters]<line_sep>mention_to_predicted={m:predicted_clusters[i]<for>m,i list(mention_to_predicted.items())}<line_sep><return>predicted_clusters mention_to_predicted<block_end><def_stmt>_decode_non_overlapping_spans starts ends scores max_len labels_inv pred_id<block_start>labels=np.argmax(scores axis=1)<line_sep>spans=[]<for_stmt>i,(start end label) enumerate(zip(starts ends labels))<block_start><if_stmt>label<le>0<block_start><continue><block_end>label_str=labels_inv[label]<if_stmt>pred_id<is><not><none><and>label_str<eq>"V"<block_start><continue><block_end>spans.append((start end label_str scores[i][label]))<block_end>spans=sorted(spans key=<lambda>x:x[3] reverse=<true>)<line_sep>flags=np.zeros([max_len] dtype=bool)<if_stmt>pred_id<is><not><none><block_start>flags[pred_id]=<true><block_end>new_spans=[]<for_stmt>start,end,label_str,score spans<block_start><if_stmt><not>max(flags[start:end+1])<block_start>new_spans.append((start end label_str))# , score)) <for_stmt>k range(start end+1)<block_start>flags[k]=<true><block_end><block_end><block_end><return>new_spans<block_end><def_stmt>_dp_decode_non_overlapping_spans starts ends scores max_len labels_inv pred_id u_constraint=<false><block_start>num_roles=scores.shape[1]# [num_arg, num_roles] labels=np.argmax(scores axis=1).astype(np.int64)<line_sep>spans=list(zip(starts ends list(range(len(starts)))))<line_sep>spans=sorted(spans key=<lambda>x:(x[0] x[1]))# sort according to the span start index <if_stmt>u_constraint<block_start>f=np.zeros([max_len+1 128] dtype=float)-0.1<block_end><else_stmt># This one <block_start>f=np.zeros([max_len+1 1] dtype=float)-0.1<block_end>f[0 0]=0<line_sep>states={0:set([0])}# A dictionary from id to list of binary core-arg states. pointers={}# A dictionary from states to (arg_id, role, prev_t, prev_rs) best_state=[(0 0)]<def_stmt>_update_state t0 rs0 t1 rs1 delta arg_id role<block_start><if_stmt>f[t0][rs0]+delta<g>f[t1][rs1]<block_start>f[t1][rs1]=f[t0][rs0]+delta<if_stmt>t1<not><in>states<block_start>states[t1]=set()<block_end>states[t1].update([rs1])<line_sep>pointers[(t1 rs1)]=(arg_id role t0 rs0)# the pointers store <if_stmt>f[t1][rs1]<g>f[best_state[0][0]][best_state[0][1]]<block_start>best_state[0]=(t1 rs1)<block_end><block_end><block_end><for_stmt>start,end,i spans# [arg_start, arg_end, arg_span_id] <block_start><assert_stmt>scores[i][0]<eq>0# dummy score # The extra dummy score should be same for all states, so we can safely skip arguments overlap # with the predicate. <if_stmt>pred_id<is><not><none><and>start<le>pred_id<and>pred_id<le>end# skip the span contains the predicate <block_start><continue><block_end>r0=labels[i]# Locally best role assignment. # Strictly better to incorporate a dummy span if it has the highest local score. <if_stmt>r0<eq>0# labels_inv[r0] == "O" <block_start><continue><block_end>r0_str=labels_inv[r0]<line_sep># Enumerate explored states. t_states=[t<for>t list(states.keys())<if>t<le>start]# collect the state which is before the current span <for_stmt>t t_states# for each state <block_start>role_states=states[t]<line_sep># Update states if best role is not a core arg. <if_stmt><not>u_constraint<or>r0_str<not><in>_CORE_ARGS# True; this one <block_start><for_stmt>rs role_states# the set type in the value in the state dict <block_start>_update_state(t rs end+1 rs scores[i][r0] i r0)# update the state <block_end><block_end><else_stmt><block_start><for_stmt>rs role_states<block_start><for_stmt>r range(1 num_roles)<block_start><if_stmt>scores[i][r]<g>0<block_start>r_str=labels_inv[r]<line_sep>core_state=_CORE_ARGS.get(r_str 0)<line_sep># print start, end, i, r_str, core_state, rs <if_stmt>core_state&rs<eq>0<block_start>_update_state(t rs end+1 rs|core_state scores[i][r] i r)<block_end><block_end><block_end><block_end><block_end><block_end><block_end># Backtrack to decode. new_spans=[]<line_sep>t,rs=best_state[0]<while_stmt>(t rs)<in>pointers<block_start>i,r,t0,rs0=pointers[(t rs)]<line_sep>new_spans.append((int(starts[i]) int(ends[i]) labels_inv[r]))<line_sep>t=t0<line_sep>rs=rs0<block_end><return>new_spans[::-1]<block_end><def_stmt>srl_decode sentence_lengths predict_dict srl_labels_inv config# decode the predictions. # Decode sentence-level tasks. <block_start>num_sentences=len(sentence_lengths)<line_sep>predictions=[{}<for>_ range(num_sentences)]<line_sep># Sentence-level predictions. <for_stmt>i range(num_sentences)# for each sentences # if predict_dict["No_arg"] is True: # predictions["srl"][i][predict_dict["predicates"][i]] = [] # continue <block_start>predict_dict_num_args_=predict_dict["num_args"].cpu().numpy()<line_sep>predict_dict_num_preds_=predict_dict["num_preds"].cpu().numpy()<line_sep>predict_dict_predicates_=predict_dict["predicates"].cpu().numpy()<line_sep>predict_dict_arg_starts_=predict_dict["arg_starts"].cpu().numpy()<line_sep>predict_dict_arg_ends_=predict_dict["arg_ends"].cpu().numpy()<line_sep>predict_dict_srl_scores_=predict_dict["srl_scores"].detach().cpu().numpy()<line_sep>num_args=predict_dict_num_args_[i]# the number of the candidate argument spans num_preds=predict_dict_num_preds_[i]# the number of the candidate predicates # for each predicate id, exec the decode process <for_stmt>j,pred_id enumerate(predict_dict_predicates_[i][:num_preds])# sorted arg_starts and arg_ends and srl_scores ? should be??? enforce_srl_constraint = False <block_start>arg_spans=_dp_decode_non_overlapping_spans(predict_dict_arg_starts_[i][:num_args] predict_dict_arg_ends_[i][:num_args] predict_dict_srl_scores_[i :num_args j :] sentence_lengths[i] srl_labels_inv pred_id config.enforce_srl_constraint)<line_sep># To avoid warnings in the eval script. <if_stmt>config.use_gold_predicates# false <block_start>arg_spans.append((pred_id pred_id "V"))<block_end><if_stmt>arg_spans<block_start>predictions[i][int(pred_id)]=sorted(arg_spans key=<lambda>x:(x[0] x[1]))<block_end><block_end><block_end><return>predictions<block_end>
# -*- coding: utf-8 -*- <import_stmt>os<import_stmt>shlex<import_stmt>subprocess<import_from_stmt>os.path abspath dirname join<import_stmt>pytest<line_sep>SKIP_EXAMPLES=["Example 4"]<line_sep>@pytest.mark.skipif(os.name<eq>"nt" reason="No make.bat specified for Windows")<def_stmt>test_build_documentation <block_start>docroot=join(dirname(dirname(abspath(__file__))) "docs")<line_sep>cmd=shlex.split("sphinx-build -aE . _build")<line_sep>proc=subprocess.Popen(cmd cwd=docroot stdout=subprocess.PIPE stderr=subprocess.PIPE)<line_sep>status=proc.wait()<assert_stmt>status<eq>0<line_sep>issues=[]<for_stmt>output proc.communicate()<block_start><for_stmt>line str(output).split("\\n")<block_start>line=line.lower().strip()<if_stmt>"warning"<in>line<or>"error"<in>line<or>"traceback"<in>line<block_start>issues.append(line)<block_end><block_end><block_end><for_stmt>line issues<block_start>print(line)<block_end><assert_stmt><not>issues<block_end><def_stmt>test_readme_examples plex<block_start>failed=0<line_sep>examples=_fetch_examples()<assert_stmt>len(examples) "No examples found in README"<for_stmt>title,example examples<block_start><if_stmt>_check_run_example(title)<block_start><try_stmt><block_start>print("\n%s\n%s"%(title "-"<times>len(title)))<line_sep>exec("\n".join(example))<block_end><except_stmt>Exception<as>err<block_start>failed<augadd>1<line_sep>print("Error running test: %s\nError: %s"%(title err))<block_end><block_end><block_end><assert_stmt><not>failed "%s examples raised an exception."%failed<block_end><def_stmt>_fetch_examples <block_start>parsing=<false><line_sep>examples=[]<line_sep>filepath=join(dirname(dirname(abspath(__file__))) "README.rst")<with_stmt>open(filepath "r")<as>handle<block_start><for_stmt>line handle.read().split("\n")<block_start>line=line[4:]<if_stmt>line.startswith("# Example ")<block_start>parsing=<true><line_sep>title=line.lstrip("# ")<line_sep>examples.append([title []])<block_end><elif_stmt>parsing<and>line<eq>""<block_start>parsing=<false><block_end><elif_stmt>parsing<block_start>examples[-1][1].append(line)<block_end><block_end><block_end><return>examples<block_end><def_stmt>_check_run_example title<block_start><for_stmt>skip_example SKIP_EXAMPLES<block_start><if_stmt>skip_example<in>title<block_start><return><false><block_end><block_end><return><true><block_end>
# This code is part of Qiskit. # # (C) Copyright IBM 2020. # # This code is licensed under the Apache License, Version 2.0. You may # obtain a copy of this license in the LICENSE.txt file in the root directory # of this source tree or at http://www.apache.org/licenses/LICENSE-2.0. # # Any modifications or derivative works of this code must retain this # copyright notice, and modified files need to carry a notice indicating # that they have been altered from the originals. """Module for utility functions."""<import_from_stmt>typing List<import_stmt>numpy<as>np<def_stmt>bytes_to_bitarray the_bytes:bytes num_bits:int<arrow>List[int]<block_start>"""Convert input bytes into an array of bits. Args: the_bytes: Bytes to be converted. num_bits: Number of bits to return. Returns: An array of bits. """<line_sep><return>[(the_bytes[i<rshift>3]<rshift>(i&7))&1<for>i range(num_bits)]<block_end><def_stmt>bitarray_to_bytes bitarray:List[int]<arrow>bytes<block_start>"""Convert an array of bits to bytes. Args: bitarray: Bit array to be converted. Returns: Input array in bytes. """<line_sep>n_bits=len(bitarray)<line_sep>n_bytes=(n_bits+7)<rshift>3<line_sep>int_array=[0]<times>n_bytes<for_stmt>i range(n_bits)<block_start>int_array[i<rshift>3]<augor>bitarray[i]<lshift>(i&7)<block_end><return>bytes(int_array)<block_end><def_stmt>generate_wsr num_bits:int<arrow>List<block_start>"""Generate a list of WSR bits. Args: num_bits: Number of bits needed. Returns: A list of random binary numbers. """<line_sep><return>list(np.random.randint(2 size=num_bits))<block_end>
# coding=utf-8 # *** WARNING: this file was generated by the Pulumi Terraform Bridge (tfgen) Tool. *** # *** Do not edit by hand unless you're certain you know what you are doing! *** <import_stmt>warnings<import_stmt>pulumi<import_stmt>pulumi.runtime<import_from_stmt>typing Any Mapping Optional Sequence Union overload<import_from_stmt>.. _utilities<import_from_stmt>. outputs<import_from_stmt>._inputs *<line_sep>__all__=['ServicePerimetersArgs' 'ServicePerimeters']<line_sep>@pulumi.input_type<class_stmt>ServicePerimetersArgs<block_start><def_stmt>__init__ __self__ * parent:pulumi.Input[str] service_perimeters:Optional[pulumi.Input[Sequence[pulumi.Input['ServicePerimetersServicePerimeterArgs']]]]=<none><block_start>""" The set of arguments for constructing a ServicePerimeters resource. :param pulumi.Input[str] parent: The AccessPolicy this ServicePerimeter lives in. Format: accessPolicies/{policy_id} :param pulumi.Input[Sequence[pulumi.Input['ServicePerimetersServicePerimeterArgs']]] service_perimeters: The desired Service Perimeters that should replace all existing Service Perimeters in the Access Policy. Structure is documented below. """<line_sep>pulumi.set(__self__ "parent" parent)<if_stmt>service_perimeters<is><not><none><block_start>pulumi.set(__self__ "service_perimeters" service_perimeters)<block_end><block_end>@[email protected]<def_stmt>parent self<arrow>pulumi.Input[str]<block_start>""" The AccessPolicy this ServicePerimeter lives in. Format: accessPolicies/{policy_id} """<line_sep><return>pulumi.get(self "parent")<block_end>@parent.setter<def_stmt>parent self value:pulumi.Input[str]<block_start>pulumi.set(self "parent" value)<block_end>@[email protected](name="servicePerimeters")<def_stmt>service_perimeters self<arrow>Optional[pulumi.Input[Sequence[pulumi.Input['ServicePerimetersServicePerimeterArgs']]]]<block_start>""" The desired Service Perimeters that should replace all existing Service Perimeters in the Access Policy. Structure is documented below. """<line_sep><return>pulumi.get(self "service_perimeters")<block_end>@service_perimeters.setter<def_stmt>service_perimeters self value:Optional[pulumi.Input[Sequence[pulumi.Input['ServicePerimetersServicePerimeterArgs']]]]<block_start>pulumi.set(self "service_perimeters" value)<block_end><block_end>@pulumi.input_type<class_stmt>_ServicePerimetersState<block_start><def_stmt>__init__ __self__ * parent:Optional[pulumi.Input[str]]=<none> service_perimeters:Optional[pulumi.Input[Sequence[pulumi.Input['ServicePerimetersServicePerimeterArgs']]]]=<none><block_start>""" Input properties used for looking up and filtering ServicePerimeters resources. :param pulumi.Input[str] parent: The AccessPolicy this ServicePerimeter lives in. Format: accessPolicies/{policy_id} :param pulumi.Input[Sequence[pulumi.Input['ServicePerimetersServicePerimeterArgs']]] service_perimeters: The desired Service Perimeters that should replace all existing Service Perimeters in the Access Policy. Structure is documented below. """<if_stmt>parent<is><not><none><block_start>pulumi.set(__self__ "parent" parent)<block_end><if_stmt>service_perimeters<is><not><none><block_start>pulumi.set(__self__ "service_perimeters" service_perimeters)<block_end><block_end>@[email protected]<def_stmt>parent self<arrow>Optional[pulumi.Input[str]]<block_start>""" The AccessPolicy this ServicePerimeter lives in. Format: accessPolicies/{policy_id} """<line_sep><return>pulumi.get(self "parent")<block_end>@parent.setter<def_stmt>parent self value:Optional[pulumi.Input[str]]<block_start>pulumi.set(self "parent" value)<block_end>@[email protected](name="servicePerimeters")<def_stmt>service_perimeters self<arrow>Optional[pulumi.Input[Sequence[pulumi.Input['ServicePerimetersServicePerimeterArgs']]]]<block_start>""" The desired Service Perimeters that should replace all existing Service Perimeters in the Access Policy. Structure is documented below. """<line_sep><return>pulumi.get(self "service_perimeters")<block_end>@service_perimeters.setter<def_stmt>service_perimeters self value:Optional[pulumi.Input[Sequence[pulumi.Input['ServicePerimetersServicePerimeterArgs']]]]<block_start>pulumi.set(self "service_perimeters" value)<block_end><block_end><class_stmt>ServicePerimeters(pulumi.CustomResource)<block_start>@overload<def_stmt>__init__ __self__ resource_name:str opts:Optional[pulumi.ResourceOptions]=<none> parent:Optional[pulumi.Input[str]]=<none> service_perimeters:Optional[pulumi.Input[Sequence[pulumi.Input[pulumi.InputType['ServicePerimetersServicePerimeterArgs']]]]]=<none> __props__=<none><block_start>""" Replace all existing Service Perimeters in an Access Policy with the Service Perimeters provided. This is done atomically. This is a bulk edit of all Service Perimeters and may override existing Service Perimeters created by `accesscontextmanager.ServicePerimeter`, thus causing a permadiff if used alongside `accesscontextmanager.ServicePerimeter` on the same parent. To get more information about ServicePerimeters, see: * [API documentation](https://cloud.google.com/access-context-manager/docs/reference/rest/v1/accessPolicies.servicePerimeters) * How-to Guides * [Service Perimeter Quickstart](https://cloud.google.com/vpc-service-controls/docs/quickstart) ## Example Usage ### Access Context Manager Service Perimeters Basic ```python import pulumi import pulumi_gcp as gcp access_policy = gcp.accesscontextmanager.AccessPolicy("access-policy", parent="organizations/123456789", title="my policy") service_perimeter = gcp.accesscontextmanager.ServicePerimeters("service-perimeter", parent=access_policy.name.apply(lambda name: f"accessPolicies/{name}"), service_perimeters=[ gcp.accesscontextmanager.ServicePerimetersServicePerimeterArgs( name=access_policy.name.apply(lambda name: f"accessPolicies/{name}/servicePerimeters/"), status=gcp.accesscontextmanager.ServicePerimetersServicePerimeterStatusArgs( restricted_services=["storage.googleapis.com"], ), title="", ), gcp.accesscontextmanager.ServicePerimetersServicePerimeterArgs( name=access_policy.name.apply(lambda name: f"accessPolicies/{name}/servicePerimeters/"), status=gcp.accesscontextmanager.ServicePerimetersServicePerimeterStatusArgs( restricted_services=["bigtable.googleapis.com"], ), title="", ), ]) access_level = gcp.accesscontextmanager.AccessLevel("access-level", basic=gcp.accesscontextmanager.AccessLevelBasicArgs( conditions=[gcp.accesscontextmanager.AccessLevelBasicConditionArgs( device_policy=gcp.accesscontextmanager.AccessLevelBasicConditionDevicePolicyArgs( os_constraints=[gcp.accesscontextmanager.AccessLevelBasicConditionDevicePolicyOsConstraintArgs( os_type="DESKTOP_CHROME_OS", )], require_screen_lock=False, ), regions=[ "CH", "IT", "US", ], )], ), parent=access_policy.name.apply(lambda name: f"accessPolicies/{name}"), title="chromeos_no_lock") ``` ## Import ServicePerimeters can be imported using any of these accepted formats ```sh $ pulumi import gcp:accesscontextmanager/servicePerimeters:ServicePerimeters default {{parent}}/servicePerimeters ``` ```sh $ pulumi import gcp:accesscontextmanager/servicePerimeters:ServicePerimeters default {{parent}} ``` :param str resource_name: The name of the resource. :param pulumi.ResourceOptions opts: Options for the resource. :param pulumi.Input[str] parent: The AccessPolicy this ServicePerimeter lives in. Format: accessPolicies/{policy_id} :param pulumi.Input[Sequence[pulumi.Input[pulumi.InputType['ServicePerimetersServicePerimeterArgs']]]] service_perimeters: The desired Service Perimeters that should replace all existing Service Perimeters in the Access Policy. Structure is documented below. """<line_sep><ellipsis><block_end>@overload<def_stmt>__init__ __self__ resource_name:str args:ServicePerimetersArgs opts:Optional[pulumi.ResourceOptions]=<none><block_start>""" Replace all existing Service Perimeters in an Access Policy with the Service Perimeters provided. This is done atomically. This is a bulk edit of all Service Perimeters and may override existing Service Perimeters created by `accesscontextmanager.ServicePerimeter`, thus causing a permadiff if used alongside `accesscontextmanager.ServicePerimeter` on the same parent. To get more information about ServicePerimeters, see: * [API documentation](https://cloud.google.com/access-context-manager/docs/reference/rest/v1/accessPolicies.servicePerimeters) * How-to Guides * [Service Perimeter Quickstart](https://cloud.google.com/vpc-service-controls/docs/quickstart) ## Example Usage ### Access Context Manager Service Perimeters Basic ```python import pulumi import pulumi_gcp as gcp access_policy = gcp.accesscontextmanager.AccessPolicy("access-policy", parent="organizations/123456789", title="my policy") service_perimeter = gcp.accesscontextmanager.ServicePerimeters("service-perimeter", parent=access_policy.name.apply(lambda name: f"accessPolicies/{name}"), service_perimeters=[ gcp.accesscontextmanager.ServicePerimetersServicePerimeterArgs( name=access_policy.name.apply(lambda name: f"accessPolicies/{name}/servicePerimeters/"), status=gcp.accesscontextmanager.ServicePerimetersServicePerimeterStatusArgs( restricted_services=["storage.googleapis.com"], ), title="", ), gcp.accesscontextmanager.ServicePerimetersServicePerimeterArgs( name=access_policy.name.apply(lambda name: f"accessPolicies/{name}/servicePerimeters/"), status=gcp.accesscontextmanager.ServicePerimetersServicePerimeterStatusArgs( restricted_services=["bigtable.googleapis.com"], ), title="", ), ]) access_level = gcp.accesscontextmanager.AccessLevel("access-level", basic=gcp.accesscontextmanager.AccessLevelBasicArgs( conditions=[gcp.accesscontextmanager.AccessLevelBasicConditionArgs( device_policy=gcp.accesscontextmanager.AccessLevelBasicConditionDevicePolicyArgs( os_constraints=[gcp.accesscontextmanager.AccessLevelBasicConditionDevicePolicyOsConstraintArgs( os_type="DESKTOP_CHROME_OS", )], require_screen_lock=False, ), regions=[ "CH", "IT", "US", ], )], ), parent=access_policy.name.apply(lambda name: f"accessPolicies/{name}"), title="chromeos_no_lock") ``` ## Import ServicePerimeters can be imported using any of these accepted formats ```sh $ pulumi import gcp:accesscontextmanager/servicePerimeters:ServicePerimeters default {{parent}}/servicePerimeters ``` ```sh $ pulumi import gcp:accesscontextmanager/servicePerimeters:ServicePerimeters default {{parent}} ``` :param str resource_name: The name of the resource. :param ServicePerimetersArgs args: The arguments to use to populate this resource's properties. :param pulumi.ResourceOptions opts: Options for the resource. """<line_sep><ellipsis><block_end><def_stmt>__init__ __self__ resource_name:str *args **kwargs<block_start>resource_args,opts=_utilities.get_resource_args_opts(ServicePerimetersArgs pulumi.ResourceOptions *args **kwargs)<if_stmt>resource_args<is><not><none><block_start>__self__._internal_init(resource_name opts **resource_args.__dict__)<block_end><else_stmt><block_start>__self__._internal_init(resource_name *args **kwargs)<block_end><block_end><def_stmt>_internal_init __self__ resource_name:str opts:Optional[pulumi.ResourceOptions]=<none> parent:Optional[pulumi.Input[str]]=<none> service_perimeters:Optional[pulumi.Input[Sequence[pulumi.Input[pulumi.InputType['ServicePerimetersServicePerimeterArgs']]]]]=<none> __props__=<none><block_start><if_stmt>opts<is><none><block_start>opts=pulumi.ResourceOptions()<block_end><if_stmt><not>isinstance(opts pulumi.ResourceOptions)<block_start><raise>TypeError('Expected resource options to be a ResourceOptions instance')<block_end><if_stmt>opts.version<is><none><block_start>opts.version=_utilities.get_version()<block_end><if_stmt>opts.id<is><none><block_start><if_stmt>__props__<is><not><none><block_start><raise>TypeError('__props__ is only valid when passed in combination with a valid opts.id to get an existing resource')<block_end>__props__=ServicePerimetersArgs.__new__(ServicePerimetersArgs)<if_stmt>parent<is><none><and><not>opts.urn<block_start><raise>TypeError("Missing required property 'parent'")<block_end>__props__.__dict__["parent"]=parent<line_sep>__props__.__dict__["service_perimeters"]=service_perimeters<block_end>super(ServicePerimeters __self__).__init__('gcp:accesscontextmanager/servicePerimeters:ServicePerimeters' resource_name __props__ opts)<block_end>@staticmethod<def_stmt>get resource_name:str id:pulumi.Input[str] opts:Optional[pulumi.ResourceOptions]=<none> parent:Optional[pulumi.Input[str]]=<none> service_perimeters:Optional[pulumi.Input[Sequence[pulumi.Input[pulumi.InputType['ServicePerimetersServicePerimeterArgs']]]]]=<none><arrow>'ServicePerimeters'<block_start>""" Get an existing ServicePerimeters resource's state with the given name, id, and optional extra properties used to qualify the lookup. :param str resource_name: The unique name of the resulting resource. :param pulumi.Input[str] id: The unique provider ID of the resource to lookup. :param pulumi.ResourceOptions opts: Options for the resource. :param pulumi.Input[str] parent: The AccessPolicy this ServicePerimeter lives in. Format: accessPolicies/{policy_id} :param pulumi.Input[Sequence[pulumi.Input[pulumi.InputType['ServicePerimetersServicePerimeterArgs']]]] service_perimeters: The desired Service Perimeters that should replace all existing Service Perimeters in the Access Policy. Structure is documented below. """<line_sep>opts=pulumi.ResourceOptions.merge(opts pulumi.ResourceOptions(id=id))<line_sep>__props__=_ServicePerimetersState.__new__(_ServicePerimetersState)<line_sep>__props__.__dict__["parent"]=parent<line_sep>__props__.__dict__["service_perimeters"]=service_perimeters<line_sep><return>ServicePerimeters(resource_name opts=opts __props__=__props__)<block_end>@[email protected]<def_stmt>parent self<arrow>pulumi.Output[str]<block_start>""" The AccessPolicy this ServicePerimeter lives in. Format: accessPolicies/{policy_id} """<line_sep><return>pulumi.get(self "parent")<block_end>@[email protected](name="servicePerimeters")<def_stmt>service_perimeters self<arrow>pulumi.Output[Optional[Sequence['outputs.ServicePerimetersServicePerimeter']]]<block_start>""" The desired Service Perimeters that should replace all existing Service Perimeters in the Access Policy. Structure is documented below. """<line_sep><return>pulumi.get(self "service_perimeters")<block_end><block_end>
<import_from_future_stmt> absolute_import<import_stmt>sys<import_stmt>unittest<import_stmt>re<import_stmt>os<line_sep>sys.path.insert(0 os.path.join(os.path.dirname(__file__) '..' '..'))<import_from_stmt>.ServerTest ServerTest<import_from_stmt>Exscript.servers SSHd<import_from_stmt>Exscript.protocols SSH2<class_stmt>SSHdTest(ServerTest)<block_start>CORRELATE=SSHd<def_stmt>_create_daemon self<block_start>self.daemon=SSHd(self.host self.port self.device)<block_end><def_stmt>_create_client self<block_start><return>SSH2()<block_end><block_end><def_stmt>suite <block_start><return>unittest.TestLoader().loadTestsFromTestCase(SSHdTest)<block_end><if_stmt>__name__<eq>'__main__'<block_start>unittest.TextTestRunner(verbosity=2).run(suite())<block_end>
<import_stmt>datetime<import_from_stmt>api utils<import_from_stmt>abc ABC abstractmethod<import_from_stmt>twisted.internet reactor<import_from_stmt>strategies.strategy Strategy<import_from_stmt>models.order Order<class_stmt>Exchange(ABC)<block_start>currency:str<line_sep>asset:str<line_sep>strategy:Strategy<def_stmt>__init__ self key:str secret:str<block_start>self.apiKey=key<line_sep>self.apiSecret=secret<line_sep>self.name=<none><line_sep>self.client=<none><line_sep>self.socketManager=<none><line_sep>self.socket=<none><line_sep>self.currency=''<line_sep>self.asset=''<line_sep>self.strategy=<none><block_end><def_stmt>set_currency self symbol:str<block_start>self.currency=symbol<block_end><def_stmt>set_asset self symbol:str<block_start>self.asset=symbol<block_end><def_stmt>set_strategy self strategy:Strategy<block_start>self.strategy=strategy<block_end><def_stmt>compute_symbol_pair self<block_start><return>utils.format_pair(self.currency self.asset)<block_end># abstract methods # Override to set current exchange symbol pair notation (default with _ separator currency_asset ex: eur_btc) @abstractmethod<def_stmt>get_symbol self<block_start><return>self.compute_symbol_pair(self)<block_end># Get current symbol ticker @abstractmethod<def_stmt>symbol_ticker self<block_start><pass><block_end># Get current symbol ticker candle for given interval @abstractmethod<def_stmt>symbol_ticker_candle self interval<block_start><pass><block_end># Get current symbol historic value @abstractmethod<def_stmt>historical_symbol_ticker_candle self start:datetime end=<none> interval=60<block_start><pass><block_end># Get balance for a given currency @abstractmethod<def_stmt>get_asset_balance self currency<block_start><pass><block_end># Create an exchange order @abstractmethod<def_stmt>order self order:Order<block_start><pass><block_end># Create an exchange test order @abstractmethod<def_stmt>test_order self order:Order<block_start><pass><block_end># Check an exchange order status @abstractmethod<def_stmt>check_order self orderId<block_start><pass><block_end># Cancel an exchange order @abstractmethod<def_stmt>cancel_order self orderId<block_start><pass><block_end># WebSocket related methods @abstractmethod<def_stmt>get_socket_manager self purchase<block_start><pass><block_end>@abstractmethod<def_stmt>websocket_event_handler self msg<block_start><pass><block_end><def_stmt>start_socket self<block_start>print('Starting WebSocket connection...')<line_sep>self.socketManager.start()<block_end><def_stmt>close_socket self<block_start>self.socketManager.stop_socket(self.socket)<line_sep>self.socketManager.close()<line_sep># properly terminate WebSocket reactor.stop()<block_end>@abstractmethod<def_stmt>start_symbol_ticker_socket self symbol:str<block_start><pass><block_end><block_end>
<import_from_stmt>.brightness_contrast_node BrightnessContrastNode<line_sep>
<import_from_future_stmt> print_function<import_stmt>os<import_stmt>os.path<as>osp<import_stmt>platform<import_from_stmt>setuptools setup Extension Command<import_from_stmt>setuptools.command.build_ext build_ext<import_from_stmt>shutil which<import_stmt>subprocess<as>sp<import_stmt>sys<line_sep>__CMAKE_OVERRIDE_FLAGS__={}<class_stmt>CMakeExtension(Extension)<block_start><def_stmt>__init__ self name<block_start>super(CMakeExtension self).__init__(name sources=[])<block_end><block_end><class_stmt>CMakeOverride(Command)<block_start>description='Overrides CMake variables for build'<line_sep>user_options=[('settings=' 's' 'CMake variable override: <KEY>:<VALUE>:<KEY>:<VALUE>...')]<def_stmt>initialize_options self<block_start>self.settings=''<block_end><def_stmt>finalize_options self<block_start><pass><block_end><def_stmt>run self<block_start><global>__CMAKE_OVERRIDE_FLAGS__<line_sep>overrides=self.settings.split(':')<for_stmt>i range(0 len(overrides) 2)<block_start>print('Overriding %s with %s'%(overrides[i] overrides[i+1]))<line_sep>__CMAKE_OVERRIDE_FLAGS__[overrides[i]]=overrides[i+1]<block_end><block_end><block_end><class_stmt>CMakeBuildExt(build_ext)<block_start><def_stmt>run self<block_start><for_stmt>ext self.extensions<block_start>self.build_extension(ext)<block_end><block_end><def_stmt>build_extension self ext<block_start>print('Building '+ext.name)<line_sep>outdir=osp.abspath(osp.dirname(self.get_ext_fullpath(ext.name)))<line_sep>args=['-DCMAKE_LIBRARY_OUTPUT_DIRECTORY='+outdir]<if_stmt><not>osp.isdir(outdir)<block_start>os.makedirs(outdir)<block_end>args<augadd>['-DGPU_RESAMPLING_CONFIGFILE_DIR='+outdir]<line_sep>args<augadd>['-DCMAKE_BUILD_TYPE='+('Debug'<if>self.debug<else>'Release')]<if_stmt>platform.system()<eq>'Linux'<and>any(dist<in>platform.dist()<for>dist ('Debian' 'Ubuntu'))# Need to find compilers that play nice with nvcc; # this assumes compatible versions have been linked to # /PATH/TO/cuda/bin/cc and /PATH/TO/cuda/bin/c++, and # that they appear first on the search path. <block_start><if_stmt><not>'CMAKE_C_COMPILER'<in>__CMAKE_OVERRIDE_FLAGS__<block_start>args<augadd>['-DCMAKE_C_COMPILER='+which('cc')]<block_end><if_stmt><not>'CMAKE_CXX_COMPILER'<in>__CMAKE_OVERRIDE_FLAGS__<block_start>args<augadd>['-DCMAKE_CXX_COMPILER='+which('c++')]<block_end><block_end><for_stmt>key,val __CMAKE_OVERRIDE_FLAGS__.items()<block_start>args<augadd>['-D'+key+'='+val]<block_end>args<augadd>[osp.join(osp.dirname(osp.abspath(__file__)) 'niftyreg_gpu_resampler')]<if_stmt><not>osp.isdir(self.build_temp)<block_start>os.makedirs(self.build_temp)<block_end>print('Building in '+str(self.build_temp)+': cmake '+' '.join(args))<line_sep>sp.call(['cmake']+args cwd=self.build_temp)<line_sep>sp.call(['cmake']+args cwd=self.build_temp)<line_sep>sp.call(['cmake' '--build' self.build_temp])<block_end><block_end>setup(name='niftyreg_gpu_resampler' description='A NiftyNet image resampling sub-module powered by NiftyReg '<concat>'GPU code.' packages=['.'] ext_modules=[CMakeExtension('niftyreg_gpu_resampler')] cmdclass={'override':CMakeOverride 'build_ext':CMakeBuildExt} zip_safe=<false> )<line_sep>
<import_from_stmt>confidant.services keymanager<def_stmt>test_get_key_id mocker<block_start>mocker.patch('confidant.services.keymanager._KEY_METADATA' {})<line_sep>mock_auth_client=mocker.Mock()<line_sep>mock_auth_client.describe_key=mocker.Mock(return_value={'KeyMetadata':{'KeyId':'mockid'}})<line_sep>mocker.patch('confidant.services.keymanager._get_auth_kms_client' return_value=mock_auth_client )<assert_stmt>keymanager.get_key_id('mockalias')<eq>'mockid'<block_end><def_stmt>test_get_key_id_cached mocker<block_start>mocker.patch('confidant.services.keymanager._KEY_METADATA' {'mockalias':{'KeyMetadata':{'KeyId':'mockid'}}})<line_sep>mock_auth_client=mocker.Mock()<line_sep>mock_auth_client.describe_key=mocker.Mock()<line_sep>mocker.patch('confidant.services.keymanager._get_auth_kms_client' return_value=mock_auth_client )<line_sep>mock_auth_client.describe_key=mocker.Mock()<assert_stmt>keymanager.get_key_id('mockalias')<eq>'mockid'<block_end><def_stmt>test_create_datakey_mocked mocker<block_start>fernet_mock=mocker.patch('cryptography.fernet.Fernet.generate_key')<line_sep>fernet_mock.return_value='mocked_fernet_key'<line_sep>mocker.patch('confidant.services.keymanager.settings.USE_ENCRYPTION' <false>)<line_sep>ret=keymanager.create_datakey({})<assert_stmt>fernet_mock.called<is><true><line_sep># Assert that we got a dict returned where the ciphertext and plaintext # keys are equal <assert_stmt>ret['ciphertext']<eq>ret['plaintext']<line_sep># Assert ciphertext is mocked_fernet_key <assert_stmt>ret['ciphertext']<eq>'mocked_fernet_key'<block_end><def_stmt>test_decrypt_datakey_mocked mocker<block_start>mocker.patch('confidant.services.keymanager.settings.USE_ENCRYPTION' <false>)<line_sep>ret=keymanager.decrypt_datakey('mocked_fernet_key')<line_sep># Ensure we get the same value out that we sent in. <assert_stmt>ret<eq>'mocked_fernet_key'<block_end><def_stmt>test_create_datakey_with_encryption mocker<block_start>cd_mock=mocker.patch('confidant.services.keymanager.cryptolib.create_datakey')<line_sep>cmd_mock=mocker.patch('confidant.services.keymanager.cryptolib.create_mock_datakey')<line_sep>mocker.patch('confidant.services.keymanager.settings.USE_ENCRYPTION' <true>)<line_sep>context={'from':'confidant-development' 'to':'confidant-development'}<line_sep>keymanager.create_datakey(context)<line_sep># Assert that create_datakey was called and create_mock_datakey was # not called. <assert_stmt>cd_mock.called<is><true><assert_stmt>cmd_mock.called<is><false><block_end><def_stmt>test_decrypt_datakey_with_encryption mocker<block_start>dd_mock=mocker.patch('confidant.services.keymanager.cryptolib.decrypt_datakey')<line_sep>dmd_mock=mocker.patch('confidant.services.keymanager.cryptolib.decrypt_mock_datakey')<line_sep>mocker.patch('confidant.services.keymanager.settings.USE_ENCRYPTION' <true>)<line_sep>context={'from':'confidant-development' 'to':'confidant-development'}<line_sep>keymanager.decrypt_datakey(b'encrypted' context)<line_sep># Assert that decrypt_datakey was called and decrypt_mock_datakey was # not called. <assert_stmt>dd_mock.called<is><true><assert_stmt>dmd_mock.called<is><false><block_end>
# $Id: f25618704b7ebe12c191cc1a51055c26db731b85 $ """ Gadfly extended database driver. """<line_sep>__docformat__="restructuredtext en"<line_sep># --------------------------------------------------------------------------- # Imports # --------------------------------------------------------------------------- <import_stmt>os<import_stmt>sys<import_from_stmt>grizzled.db.base Cursor DB DBDriver Error Warning TableMetadata IndexMetadata RDBMSMetadata <line_sep># --------------------------------------------------------------------------- # Exports # --------------------------------------------------------------------------- # --------------------------------------------------------------------------- # Classes # --------------------------------------------------------------------------- <class_stmt>GadflyCursor(Cursor)<block_start><def_stmt>__init__ self real_cursor driver<block_start>self.real_cursor=real_cursor<line_sep>self.driver=driver<block_end>@property<def_stmt>rowcount self<block_start>total=len(self.real_cursor.fetchall())<line_sep>self.real_cursor.reset_results()<line_sep><return>total<block_end>@property<def_stmt>description self<block_start><return>self.real_cursor.description<block_end><def_stmt>close self<block_start><try_stmt><block_start>self.real_cursor.close()<block_end><except_stmt><block_start><raise>Error(sys.exc_info()[1])<block_end><block_end><def_stmt>execute self statement parameters=<none><block_start><try_stmt><block_start><if_stmt>parameters<block_start>result=self.real_cursor.execute(statement parameters)<block_end><else_stmt><block_start>result=self.real_cursor.execute(statement)<block_end><return>result<block_end><except_stmt><block_start><raise>Error(sys.exc_info()[1])<block_end><block_end><def_stmt>executemany self statement *parameters<block_start><try_stmt><block_start><return>self.real_cursor.executemany(statement *parameters)<block_end><except_stmt><block_start><raise>Error(sys.exc_info()[1])<block_end><block_end><def_stmt>fetchall self<block_start><try_stmt><block_start><return>self.real_cursor.fetchall()<block_end><except_stmt><block_start><raise>Error(sys.exc_info()[1])<block_end><block_end><def_stmt>fetchone self<block_start><try_stmt><block_start><return>self.real_cursor.fetchone()<block_end><except_stmt><block_start>s=sys.exc_info()[1]<if_stmt>(type(s)<eq>str)<and>(s.startswith('no more'))<block_start><return><none><block_end><raise>Error(s)<block_end><block_end><def_stmt>fetchmany self n<block_start><try_stmt><block_start><return>self.real_cursor.fetchmany(n)<block_end><except_stmt><block_start>s=sys.exc_info()[1]<if_stmt>(type(s)<eq>str)<and>(s.startswith('no more'))<block_start><return><none><block_end><raise>Error(s)<block_end><block_end><block_end><class_stmt>GadflyDB(DB)<block_start><def_stmt>__init__ self db driver<block_start>DB.__init__(self db driver)<line_sep>self.__db=db<line_sep>self.__driver=driver<block_end><def_stmt>cursor self<block_start><return>Cursor(GadflyCursor(self.__db.cursor() self.__driver) self.__driver)<block_end><block_end><class_stmt>GadflyDriver(DBDriver)<block_start>"""DB Driver for Gadfly, a pure Python RDBMS"""<def_stmt>__init__ self<block_start>gadfly=self.get_import()<line_sep>gadfly.error=Exception()<block_end><def_stmt>get_import self<block_start><import_stmt>gadfly<line_sep><return>gadfly<block_end><def_stmt>get_display_name self<block_start><return>"Gadfly"<block_end><def_stmt>connect self host=<none> port=<none> user='' password='' database='default'<block_start>gadfly=self.get_import()<line_sep>directory=os.path.dirname(database)<line_sep>database=os.path.basename(database)<if_stmt>database.endswith('.gfd')<block_start>database=database[:-4]<block_end><try_stmt><block_start>g=gadfly.gadfly()<line_sep>g.startup(database directory)<line_sep><return>GadflyDB(g self)<block_end><except_stmt>IOError<block_start><raise>Error(sys.exc_info()[1])<block_end><block_end><def_stmt>get_tables self cursor<block_start>cursor.execute('SELECT table_name FROM __table_names__ '<concat>'WHERE is_view = 0')<line_sep>table_names=[]<for_stmt>row cursor.fetchall()<block_start>table_names<augadd>[row[0]]<block_end><return>table_names<block_end><def_stmt>get_rdbms_metadata self cursor<block_start><import_stmt>gadfly<line_sep>version='.'.join([str(i)<for>i gadfly.version_info])<line_sep><return>RDBMSMetadata('gadfly' 'gadfly' version)<block_end><def_stmt>get_table_metadata self table cursor<block_start>self._ensure_valid_table(cursor table)<line_sep>cursor.execute("SELECT column_name FROM __columns__ "<concat>"WHERE table_name = '%s'"%table.upper())<line_sep>result=[]<line_sep>column_names=[]<for_stmt>row cursor.fetchall()<block_start>result<augadd>[TableMetadata(row[0] 'object' <none> <none> <none> <true>)]<block_end><return>result<block_end><def_stmt>get_index_metadata self table cursor<block_start>self._ensure_valid_table(cursor table)<line_sep>cursor.execute("SELECT is_unique, index_name FROM __indices__ "<concat>"WHERE table_name = '%s'"%table.upper())<line_sep>indexes=[]<line_sep>result=[]<for_stmt>row cursor.fetchall()<block_start>indexes.append(row)<block_end><for_stmt>unique,index_name indexes<block_start>cursor.execute("SELECT column_name FROM __indexcols__ "<concat>"WHERE index_name = '%s'"%index_name)<line_sep>cols=[]<for_stmt>row cursor.fetchall()<block_start>cols.append(row[0])<block_end><if_stmt>unique<block_start>description='UNIQUE'<block_end><else_stmt><block_start>description='NON-UNIQUE'<block_end>result.append(IndexMetadata(index_name cols description))<block_end><return>result<block_end><def_stmt>_is_valid_table self cursor table_name<block_start>tables=self.get_tables(cursor)<line_sep><return>table_name.upper()<in>tables<block_end><block_end>
# # Copyright 2018 Analytics Zoo Authors. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. # <import_from_stmt>zoo.orca OrcaContext<import_from_stmt>zoo.common.nncontext init_nncontext<class_stmt>elastic_search<block_start>""" Primary DataFrame-based loading data from elastic search interface, defining API to read data from ES to DataFrame. """<def_stmt>__init__ self<block_start><pass><block_end>@staticmethod<def_stmt>read_df esConfig esResource schema=<none><block_start>""" Read the data from elastic search into DataFrame. :param esConfig: Dictionary which represents configuration for elastic search(eg. ip, port etc). :param esResource: resource file in elastic search. :param schema: Optional. Defines the schema of Spark dataframe. If each column in Es is single value, don't need set schema. :return: Spark DataFrame. Each row represents a document in ES. """<line_sep>sc=init_nncontext()<line_sep>spark=OrcaContext.get_spark_session()<line_sep>reader=spark.read.format("org.elasticsearch.spark.sql")<for_stmt>key esConfig<block_start>reader.option(key esConfig[key])<block_end><if_stmt>schema<block_start>reader.schema(schema)<block_end>df=reader.load(esResource)<line_sep><return>df<block_end>@staticmethod<def_stmt>flatten_df df<block_start>fields=elastic_search.flatten(df.schema)<line_sep>flatten_df=df.select(fields)<line_sep><return>flatten_df<block_end>@staticmethod<def_stmt>flatten schema prefix=<none><block_start><import_from_stmt>pyspark.sql.types StructType<line_sep>fields=[]<for_stmt>field schema.fields<block_start>name=prefix+'.'+field.name<if>prefix<else>field.name<line_sep>dtype=field.dataType<if_stmt>isinstance(dtype StructType)<block_start>fields<augadd>elastic_search.flatten(dtype prefix=name)<block_end><else_stmt><block_start>fields.append(name)<block_end><block_end><return>fields<block_end>@staticmethod<def_stmt>write_df esConfig esResource df<block_start>""" Write the Spark DataFrame to elastic search. :param esConfig: Dictionary which represents configuration for elastic search(eg. ip, port etc). :param esResource: resource file in elastic search. :param df: Spark DataFrame that will be saved. """<line_sep>wdf=df.write.format("org.elasticsearch.spark.sql").option("es.resource" esResource)<for_stmt>key esConfig<block_start>wdf.option(key esConfig[key])<block_end>wdf.save()<block_end>@staticmethod<def_stmt>read_rdd esConfig esResource=<none> filter=<none> esQuery=<none><block_start>""" Read the data from elastic search into Spark RDD. :param esConfig: Dictionary which represents configuration for elastic search(eg. ip, port, es query etc). :param esResource: Optional. resource file in elastic search. It also can be set in esConfig :param filter: Optional. Request only those fields from Elasticsearch :param esQuery: Optional. es query :return: Spark RDD """<line_sep>sc=init_nncontext()<if_stmt>"es.resource"<not><in>esConfig<block_start>esConfig["es.resource"]=esResource<block_end><if_stmt>filter<is><not><none><block_start>esConfig["es.read.source.filter"]=filter<block_end><if_stmt>esQuery<is><not><none><block_start>esConfig["es.query"]=esQuery<block_end>rdd=sc.newAPIHadoopRDD("org.elasticsearch.hadoop.mr.EsInputFormat" "org.apache.hadoop.io.NullWritable" "org.elasticsearch.hadoop.mr.LinkedMapWritable" conf=esConfig)<line_sep><return>rdd<block_end><block_end>
""" Getting the data ================ In this section, we will dicuss how to get functional data to use in scikit-fda. We will briefly describe the :class:`~skfda.representation.grid.FDataGrid` class, which is the type that scikit-fda uses for storing and working with functional data in discretized form. We will discuss also how to import functional data from several sources and show how to fetch and load existing datasets popular in the :term:`FDA` literature. .. Disable isort isort:skip_file """<line_sep># Author: <NAME> # License: MIT # # sphinx_gallery_thumbnail_number = 6 ############################################################################## # The FDataGrid class # ------------------- # # In order to use scikit-fda, first we need functional data to analyze. # A common case is to have each functional observation measured at the same # points. # This kind of functional data is easily representable in scikit-fda using # the :class:`~skfda.representation.grid.FDataGrid` class. # The :class:`~skfda.representation.grid.FDataGrid` has two important # attributes: ``data_matrix`` and ``grid_points``. # # The attribute ``grid_points`` is a tuple with the same length as the # number of domain dimensions (that is, one for curves, two for surfaces...). # Each of its elements is a 1D numpy :class:`~numpy.ndarray` containing the # grid points for that particular dimension, # .. math:: # ((t_1, \ldots, t_{M_i}))_{i=1}^p, # where :math:`M_i` is the number of measurement points for each "argument" # or domain coordinate of the function :math:`i` and :math:`p` is the domain # dimension. # # The attribute ``data_matrix`` is a # numpy :class:`~numpy.ndarray` containing the measured values of the # functions in the grid spanned by the grid points. For functions # :math:`\{x_i: \mathbb{R}^p \to \mathbb{R}^q\}_{i=1}^N` this is a tensor # with dimensions :math:`N \times M_1 \times \ldots \times M_p \times q`. ############################################################################## # In order to create a :class:`~skfda.representation.grid.FDataGrid`, these # attributes may be provided. The attributes are converted to # :class:`~numpy.ndarray` when necessary. # # .. note:: # # The grid points can be omitted, # and in that case their number is inferred from the dimensions of # ``data_matrix`` and they are automatically assigned as equispaced points # in the unitary cube in the domain set. # # In the common case of functions with domain dimension of 1, the list of # grid points can be passed directly as ``grid_points``. # # If the codomain dimension is 1, the last dimension of ``data_matrix`` # can be dropped. ############################################################################## # The following example shows the creation of a # :class:`~skfda.representation.grid.FDataGrid` with two functions (curves) # :math:`\{x_i: \mathbb{R} \to \mathbb{R}\}, i=1,2` measured at the same # (non-equispaced) points. <import_stmt>skfda<import_stmt>matplotlib.pyplot<as>plt<line_sep>grid_points=[0 0.2 0.5 0.9 1]# Grid points of the curves data_matrix=[[0 0.2 0.5 0.9 1] # First observation [0 0.04 0.25 0.81 1] # Second observation ]<line_sep>fd=skfda.FDataGrid(data_matrix=data_matrix grid_points=grid_points )<line_sep>fd.plot()<line_sep>plt.show()<line_sep>############################################################################## # Advanced example # ^^^^^^^^^^^^^^^^ # # In order to better understand the FDataGrid structure, you can consider the # following example, in which a :class:`~skfda.representation.grid.FDataGrid` # object is created, containing just one function (vector-valued surface) # :math:`x: \mathbb{R}^2 \to \mathbb{R}^4`. grid_points_surface=[[0.2 0.5 0.7] # Measurement points in first domain dimension [0 1.5] # Measurement points in second domain dimension ]<line_sep>data_matrix_surface=[# First observation [# 0.2 [# Value at (0.2, 0) [1 2 3 4.1] # Value at (0.2, 1.5) [0 1 -1.3 2] ] # 0.5 [# Value at (0.5, 0) [-2 0 5.5 7] # Value at (0.5, 1.5) [2 1.1 -1 -2] ] # 0.7 [# Value at (0.7, 0) [0 0 1.1 1] # Value at (0.7, 1.5) [-3 5 -0.5 -2] ] ] # This example has only one observation. Next observations would be # added here. ]<line_sep>fd=skfda.FDataGrid(data_matrix=data_matrix_surface grid_points=grid_points_surface )<line_sep>fd.plot()<line_sep>plt.show()<line_sep>############################################################################## # Importing data # -------------- # # Usually one does not construct manually the functions, but instead uses # measurements already formatted in a common format, such as comma-separated # values (CSV), attribute-relation file format (ARFF) or Matlab and R formats. # # If your data is in one of these formats, you can import it into a numpy # array using the IO functions available in # `Numpy <https://numpy.org/devdocs/reference/routines.io.html>`_ (for simple # text-based or binary formats, such as CSV) or in # `Scipy <https://docs.scipy.org/doc/scipy/reference/io.html>`_ (for Matlab, # Fortran or ARFF files). For importing data in the R format one can also # use the package `RData <https://rdata.readthedocs.io>`_ with is already a # dependency of scikit-fda, as it is used to load the example datasets. ############################################################################## # Once your data has been introduced as a :class:`~numpy.ndarray` instance, # you will need to give it the proper dimensions and use it to instantiate # a functional data object. ############################################################################## # .. note:: # # :class:`Pandas DataFrames <pandas.DataFrame>` are also popular as # datasets containers in the Python scientific ecosystem. If you have # data in a Pandas DataFrame, you can extract its content as a Numpy # array using the method :meth:`~pandas.DataFrame.to_numpy` of the # DataFrame. ############################################################################## # As an example, we will load the # :func:`digits dataset <sklearn.datasets.load_digits>` of scikit-learn, which # is a preprocessed subset of the MNIST dataset, containing digit images. The # data is already a numpy array. As the data has been flattened into a 1D # vector of pixels, we need to reshape the arrays to their original 8x8 shape. # Then this array can be used to construct the digits as surfaces. <import_from_stmt>sklearn.datasets load_digits<line_sep>X,y=load_digits(return_X_y=<true>)<line_sep>X=X.reshape(-1 8 8)<line_sep>fd=skfda.FDataGrid(X)<line_sep># Plot the first 2 observations fd[0].plot()<line_sep>fd[1].plot()<line_sep>plt.show()<line_sep>############################################################################## # Common datasets # --------------- # # scikit-fda can download and import for you several of the most popular # datasets in the :term:`FDA` literature, such as the Berkeley Growth # dataset (function :func:`~skfda.datasets.fetch_growth`) or the Canadian # Weather dataset (function :func:`~skfda.datasets.fetch_weather`). These # datasets are often useful as benchmarks, in order to compare results # between different algorithms, or simply as examples to use in teaching or # research. X,y=skfda.datasets.fetch_growth(return_X_y=<true>)<line_sep>X.plot(group=y)<line_sep>plt.show()<line_sep>############################################################################## # Datasets from CRAN # ^^^^^^^^^^^^^^^^^^ # # If you want to work with a dataset for which no fetching function exist, and # you know that is available inside a R package in the CRAN repository, you # can try using the function :func:`~skfda.datasets.fetch_cran`. This function # will load the package, fetch the dataset and convert it to Python objects # using the packages # `scikit-datasets <https://github.com/daviddiazvico/scikit-datasets>`_ and # `RData <https://rdata.readthedocs.io>`_. As datasets in CRAN follow no # particular structure, you will need to know how it is structured internally # in order to use it properly. ############################################################################## # .. note:: # # Functional data objects from some packages, such as # `fda.usc <https://cran.r-project.org/web/packages/fda.usc/index.html>`_ # are automatically recognized as such and converted to # :class:`~skfda.representation.grid.FDataGrid` instances. This # behaviour can be disabled or customized to work with more packages. data=skfda.datasets.fetch_cran("MCO" "fda.usc")<line_sep>data["MCO"]["intact"].plot()<line_sep>plt.show()<line_sep>############################################################################## # Datasets from the UEA & UCR Time Series Classification Repository # ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ # # The `UEA & UCR Time Series Classification Repository # <http://www.timeseriesclassification.com/>`_ is a popular repository # for classification problems involving time series data. The datasets used # can be considered also as functional observations, where the functions # involved have domain dimension of 1, and the grid points are # equispaced. Thus, they have also been used in the :term:`FDA` literature. # The original UCR datasets are univariate time series, while the new UEA # datasets incorporate also vector-valued data. # In scikit-fda, the function :func:`~skfda.datasets.fetch_ucr` can be used # to obtain both kinds of datasets as # :class:`~skfda.representation.grid.FDataGrid` instances. # Load ArrowHead dataset from UCR dataset=skfda.datasets.fetch_ucr("ArrowHead")<line_sep>dataset["data"].plot()<line_sep>plt.show()<line_sep>############################################################################## # Load BasicMotions dataset from UEA dataset=skfda.datasets.fetch_ucr("BasicMotions")<line_sep>dataset["data"].plot()<line_sep>plt.show()<line_sep>############################################################################## # Synthetic data # -------------- # # Sometimes it is not enough to have real-world data at your disposal. # Perhaps the messy nature of real-world data makes difficult to detect when # a particular algorithm has a strange behaviour. Perhaps you want to see how # it performs under a simplified model. Maybe you want to see what happens # when your data has particular characteristics, for which no dataset is # available. Or maybe you only want to illustrate a concept without having # to introduce a particular set of data. # # In those cases, the ability to use generated data is desirable. To aid this # use case, scikit-learn provides several functions that generate data # according to some model. These functions are in the # :doc:`datasets </modules/datasets>` module and have the prefix ``make_``. # Maybe the most useful of those are the functions # :func:`skfda.datasets.make_gaussian_process` and # :func:`skfda.datasets.make_gaussian` which can be used to generate Gaussian # processes and Gaussian fields with different covariance functions. <import_stmt>numpy<as>np<line_sep>cov=skfda.misc.covariances.Exponential(length_scale=0.1)<line_sep>fd=skfda.datasets.make_gaussian_process(start=0 stop=4 n_samples=5 n_features=100 mean=<lambda>t:np.power(t 2) cov=cov )<line_sep>fd.plot()<line_sep>plt.show()<line_sep>############################################################################## # In order to know all the available functionalities to load existing and # synthetic datasets it is recommended to look at the documentation of the # :doc:`datasets </modules/datasets>` module.
<import_stmt>tensorflow<as>tf<import_stmt>sys<import_stmt>os<line_sep>BASE_DIR=os.path.dirname(os.path.abspath(__file__))<line_sep>sys.path.append(BASE_DIR)<line_sep>sys.path.append(os.path.join(BASE_DIR '../utils'))<import_stmt>sph3gcn_util<as>s3g_util<def_stmt>normalize_xyz points<block_start>points<augsub>tf.reduce_mean(points axis=1 keepdims=<true>)<line_sep>scale=tf.reduce_max(tf.reduce_sum(tf.square(points) axis=-1 keepdims=<true>) axis=1 keepdims=<true>)<line_sep>scale=tf.sqrt(scale name='normalize')<line_sep>points<augdiv>scale<line_sep><return>points<block_end><def_stmt>_separable_conv3d_block net list_channels bin_size nn_index nn_count filt_idx name depth_multiplier=<none> weight_decay=<none> reuse=<none> with_bn=<true> with_bias=<true> is_training=<none><block_start><for_stmt>l,num_out_channels enumerate(list_channels)<block_start>scope=name+'_'+str(l+1)# number from 1, not 0 net=s3g_util.separable_conv3d(net num_out_channels bin_size depth_multiplier[l] scope nn_index nn_count filt_idx weight_decay=weight_decay with_bn=with_bn with_bias=with_bias reuse=reuse is_training=is_training)<block_end><return>net<block_end><def_stmt>get_model points is_training config=<none><block_start>""" Classification Network, input is BxNx3, output Bx40 """<line_sep>batch_size=points.get_shape()[0].value<line_sep>num_point=points.get_shape()[1].value<line_sep>end_points={}<assert_stmt>(num_point<eq>config.num_input)<if_stmt>config.normalize<block_start>points=normalize_xyz(points)<block_end>xyz=points<line_sep>query=tf.reduce_mean(xyz axis=1 keepdims=<true>)# the global viewing point reuse=<none><line_sep>net=s3g_util.pointwise_conv3d(xyz config.mlp 'mlp1' weight_decay=config.weight_decay with_bn=config.with_bn with_bias=config.with_bias reuse=reuse is_training=is_training)<line_sep>global_feat=[]<for_stmt>l range(len(config.radius))<block_start><if_stmt>config.use_raw<block_start>net=tf.concat([net xyz] axis=-1)<block_end># the neighbor information is the same within xyz_pose_1 and xyz_pose_2. # Therefore, we compute it with xyz_pose_1, and apply it to xyz_pose_2 as well intra_idx,intra_cnt,intra_dst,indices=s3g_util.build_graph(xyz config.radius[l] config.nn_uplimit[l] config.num_sample[l] sample_method=config.sample)<line_sep>filt_idx=s3g_util.spherical_kernel(xyz xyz intra_idx intra_cnt intra_dst config.radius[l] kernel=config.kernel)<line_sep>net=_separable_conv3d_block(net config.channels[l] config.binSize intra_idx intra_cnt filt_idx 'conv'+str(l+1) config.multiplier[l] reuse=reuse weight_decay=config.weight_decay with_bn=config.with_bn with_bias=config.with_bias is_training=is_training)<if_stmt>config.num_sample[l]<g>1# ==================================gather_nd==================================== <block_start>xyz=tf.gather_nd(xyz indices)<line_sep>inter_idx=tf.gather_nd(intra_idx indices)<line_sep>inter_cnt=tf.gather_nd(intra_cnt indices)<line_sep>inter_dst=tf.gather_nd(intra_dst indices)<line_sep># =====================================END======================================= net=s3g_util.pool3d(net inter_idx inter_cnt method=config.pool_method scope='pool'+str(l+1))<block_end>global_maxpool=tf.reduce_max(net axis=1 keepdims=<true>)<line_sep>global_feat.append(global_maxpool)<block_end># =============================global feature extraction in the final layer============================= global_radius=100.0# global_radius(>=2.0) should connect all points to each point in the cloud nn_idx,nn_cnt,nn_dst=s3g_util.build_global_graph(xyz query global_radius)<line_sep>filt_idx=s3g_util.spherical_kernel(xyz query nn_idx nn_cnt nn_dst global_radius kernel=[8 2 1])<line_sep>net=s3g_util.separable_conv3d(net config.global_channels 17 config.global_multiplier 'global_conv' nn_idx nn_cnt filt_idx reuse=reuse weight_decay=config.weight_decay with_bn=config.with_bn with_bias=config.with_bias is_training=is_training)<line_sep>global_feat.append(net)<line_sep>net=tf.concat(global_feat axis=2)<line_sep># ===================================================================================================== # MLP on global point cloud vector net=tf.reshape(net [batch_size -1])<line_sep>net=s3g_util.fully_connected(net 512 scope='fc1' weight_decay=config.weight_decay with_bn=config.with_bn with_bias=config.with_bias is_training=is_training)<line_sep>net=tf.layers.dropout(net 0.5 training=is_training name='fc1_dp')<line_sep>net=s3g_util.fully_connected(net 256 scope='fc2' weight_decay=config.weight_decay with_bn=config.with_bn with_bias=config.with_bias is_training=is_training)<line_sep>net=tf.layers.dropout(net 0.5 training=is_training name='fc2_dp')<line_sep>net=s3g_util.fully_connected(net config.num_cls scope='logits' with_bn=<false> with_bias=config.with_bias activation_fn=<none> is_training=is_training)<line_sep><return>net end_points<block_end><def_stmt>get_loss pred label end_points<block_start>""" pred: B*NUM_CLASSES, label: B, """<line_sep>loss=tf.nn.sparse_softmax_cross_entropy_with_logits(logits=pred labels=label)<line_sep>classify_loss=tf.reduce_mean(loss)<line_sep>tf.summary.scalar('classify loss' classify_loss)<line_sep>tf.add_to_collection('losses' classify_loss)<line_sep><return>classify_loss<block_end>
# uncompyle6 version 3.7.2 # Python bytecode 3.7 (3394) # Decompiled from: Python 3.7.3 (default, Apr 24 2019, 15:29:51) [MSC v.1915 64 bit (AMD64)] # Embedded file name: extract__one_file_exe__pyinstaller\_test_file.py __author__='ipetrash'<def_stmt>say <block_start>print('Hello World!')<block_end><if_stmt>__name__<eq>'__main__'<block_start>say()<block_end>
# Copyright (c) OpenMMLab. All rights reserved. <import_from_stmt>typing Dict List Optional Tuple Union<import_stmt>mmcv<import_stmt>numpy<as>np<import_stmt>torch<import_stmt>torch.nn<as>nn<import_from_stmt>mmcls.core.visualization imshow_infos<import_from_stmt>mmcls.datasets.pipelines Compose<import_from_stmt>mmcls.models build_classifier<import_from_stmt>mmcv.parallel collate scatter<import_from_stmt>mmcv.runner load_checkpoint<import_from_stmt>mmfewshot.classification.models BaseMetricClassifier<def_stmt>init_classifier config:Union[str mmcv.Config] checkpoint:Optional[str]=<none> device:str='cuda:0' options:Optional[Dict]=<none><arrow>nn.Module<block_start>"""Prepare a few shot classifier from config file. Args: config (str or :obj:`mmcv.Config`): Config file path or the config object. checkpoint (str | None): Checkpoint path. If left as None, the model will not load any weights. Default: None. device (str): Runtime device. Default: 'cuda:0'. options (dict | None): Options to override some settings in the used config. Default: None. Returns: nn.Module: The constructed classifier. """<if_stmt>isinstance(config str)<block_start>config=mmcv.Config.fromfile(config)<block_end><elif_stmt><not>isinstance(config mmcv.Config)<block_start><raise>TypeError('config must be a filename or Config object, '<concat>f'but got {type(config)}')<block_end><if_stmt>options<is><not><none><block_start>config.merge_from_dict(options)<block_end>model=build_classifier(config.model)<if_stmt>checkpoint<is><not><none><block_start>map_loc='cpu'<if>device<eq>'cpu'<else><none><line_sep>load_checkpoint(model checkpoint map_location=map_loc)<block_end># save the config in the model for convenience in later use model.cfg=config<line_sep>model.to(device)<line_sep>model.eval()<line_sep><return>model<block_end><def_stmt>process_support_images model:nn.Module support_imgs:List[str] support_labels:List[str]<arrow><none><block_start>"""Process support images. Args: model (nn.Module): Classifier model. support_imgs (list[str]): The image filenames. support_labels (list[str]): The class names of support images. """<line_sep>cfg=model.cfg<line_sep>device=next(model.parameters()).device# model device # build the data pipeline pipeline=cfg.data.test.dataset.pipeline<if_stmt>pipeline[0]['type']<ne>'LoadImageFromFile'<block_start>pipeline[0]['type']='LoadImageFromFile'<block_end>test_pipeline=Compose(pipeline)<line_sep>model.CLASSES=list(set(support_labels))<line_sep>cat_to_id={cat:i<for>i,cat enumerate(model.CLASSES)}<line_sep>model.before_forward_support()<line_sep># forward support images <with_stmt>torch.no_grad()<block_start><for_stmt>img,label zip(support_imgs support_labels)<block_start>data=dict(img_info=dict(filename=img) gt_label=np.array(cat_to_id[label] dtype=np.int64) img_prefix=<none>)<line_sep>data=test_pipeline(data)<line_sep>data=collate([data] samples_per_gpu=1)<if_stmt>next(model.parameters()).is_cuda# scatter to specified GPU <block_start>data=scatter(data [device])[0]<block_end>model(mode='support' **data)<block_end><block_end>model.before_forward_query()<block_end><def_stmt>inference_classifier model:nn.Module query_img:str<arrow>Dict<block_start>"""Inference single image with the classifier. Args: model (nn.Module): The loaded classifier. query_img (str): The image filename. Returns: dict: The classification results that contains `pred_score` of each class. """<line_sep># only support methods without fine-tuning <if_stmt>isinstance(model BaseMetricClassifier)<block_start>cfg=model.cfg<line_sep>device=next(model.parameters()).device# model device # build the data pipeline pipeline=cfg.data.test.dataset.pipeline<if_stmt>pipeline[0]['type']<ne>'LoadImageFromFile'<block_start>pipeline[0]['type']='LoadImageFromFile'<block_end>test_pipeline=Compose(pipeline)<line_sep>data=dict(img_info=dict(filename=query_img) gt_label=np.array(-1 dtype=np.int64) img_prefix=<none>)<line_sep>data=test_pipeline(data)<line_sep>data=collate([data] samples_per_gpu=1)<if_stmt>next(model.parameters()).is_cuda# scatter to specified GPU <block_start>data=scatter(data [device])[0]<block_end># inference image <with_stmt>torch.no_grad()<block_start>scores=model(mode='query' img=data['img'])[0]<line_sep>result={model.CLASSES[i]:float(scores[i])<for>i range(scores.shape[0])}<block_end><return>result<block_end><else_stmt><block_start><raise>TypeError('currently, inference only support metric based methods')<block_end><block_end><def_stmt>show_result_pyplot img:str result:Dict fig_size:Tuple[int]=(15 10) wait_time:int=0 out_file:Optional[str]=<none><arrow>np.ndarray<block_start>"""Visualize the classification results on the image. Args: img (str): Image filename. result (dict): The classification result. fig_size (tuple): Figure size of the pyplot figure. Default: (15, 10). wait_time (int): How many seconds to display the image. Default: 0. out_file (str | None): Default: None Returns: np.ndarray: pyplot figure. """<line_sep>img=mmcv.imread(img)<line_sep>img=img.copy()<line_sep>img=imshow_infos(img result text_color='white' font_size=25 row_width=20 win_name='' show=<true> fig_size=fig_size wait_time=wait_time out_file=out_file)<line_sep><return>img<block_end>
""" Support Legacy API password auth provider. It will be removed when auth system production ready """<import_from_future_stmt> annotations<import_from_stmt>collections.abc Mapping<import_stmt>hmac<import_from_stmt>typing Any cast<import_stmt>voluptuous<as>vol<import_from_stmt>homeassistant.core callback<import_from_stmt>homeassistant.data_entry_flow FlowResult<import_from_stmt>homeassistant.exceptions HomeAssistantError<import_stmt>homeassistant.helpers.config_validation<as>cv<import_from_stmt>. AUTH_PROVIDER_SCHEMA AUTH_PROVIDERS AuthProvider LoginFlow<import_from_stmt>..models Credentials UserMeta<line_sep>AUTH_PROVIDER_TYPE="legacy_api_password"<line_sep>CONF_API_PASSWORD="<PASSWORD>"<line_sep>CONFIG_SCHEMA=AUTH_PROVIDER_SCHEMA.extend({vol.Required(CONF_API_PASSWORD):cv.string} extra=vol.PREVENT_EXTRA)<line_sep>LEGACY_USER_NAME="Legacy API password user"<class_stmt>InvalidAuthError(HomeAssistantError)<block_start>"""Raised when submitting invalid authentication."""<block_end>@AUTH_PROVIDERS.register(AUTH_PROVIDER_TYPE)<class_stmt>LegacyApiPasswordAuthProvider(AuthProvider)<block_start>"""An auth provider support legacy api_password."""<line_sep>DEFAULT_TITLE="Legacy API Password"<line_sep>@property<def_stmt>api_password self<arrow>str<block_start>"""Return api_password."""<line_sep><return>str(self.config[CONF_API_PASSWORD])<block_end><async_keyword><def_stmt>async_login_flow self context:dict[str Any]|<none><arrow>LoginFlow<block_start>"""Return a flow to login."""<line_sep><return>LegacyLoginFlow(self)<block_end>@callback<def_stmt>async_validate_login self password:str<arrow><none><block_start>"""Validate password."""<line_sep>api_password=str(self.config[CONF_API_PASSWORD])<if_stmt><not>hmac.compare_digest(api_password.encode("utf-8") password.encode("utf-8"))<block_start><raise>InvalidAuthError<block_end><block_end><async_keyword><def_stmt>async_get_or_create_credentials self flow_result:Mapping[str str]<arrow>Credentials<block_start>"""Return credentials for this login."""<line_sep>credentials=<await>self.async_credentials()<if_stmt>credentials<block_start><return>credentials[0]<block_end><return>self.async_create_credentials({})<block_end><async_keyword><def_stmt>async_user_meta_for_credentials self credentials:Credentials<arrow>UserMeta<block_start>""" Return info for the user. Will be used to populate info when creating a new user. """<line_sep><return>UserMeta(name=LEGACY_USER_NAME is_active=<true>)<block_end><block_end><class_stmt>LegacyLoginFlow(LoginFlow)<block_start>"""Handler for the login flow."""<async_keyword><def_stmt>async_step_init self user_input:dict[str str]|<none>=<none><arrow>FlowResult<block_start>"""Handle the step of the form."""<line_sep>errors={}<if_stmt>user_input<is><not><none><block_start><try_stmt><block_start>cast(LegacyApiPasswordAuthProvider self._auth_provider).async_validate_login(user_input["password"])<block_end><except_stmt>InvalidAuthError<block_start>errors["base"]="invalid_auth"<block_end><if_stmt><not>errors<block_start><return><await>self.async_finish({})<block_end><block_end><return>self.async_show_form(step_id="init" data_schema=vol.Schema({vol.Required("password"):str}) errors=errors )<block_end><block_end>
<import_from_future_stmt> print_function unicode_literals absolute_import division<import_stmt>numpy<as>np<import_from_stmt>scipy.ndimage.interpolation zoom<import_from_stmt>.care_standard CARE<import_from_stmt>..data PercentileNormalizer PadAndCropResizer<import_from_stmt>..utils _raise axes_dict<class_stmt>UpsamplingCARE(CARE)<block_start>"""CARE network for combined image restoration and upsampling of one dimension. Extends :class:`csbdeep.models.CARE` by replacing prediction (:func:`predict`, :func:`predict_probabilistic`) to first upsample Z before image restoration. """<def_stmt>predict self img axes factor normalizer=PercentileNormalizer() resizer=PadAndCropResizer() n_tiles=<none><block_start>"""Apply neural network to raw image with low-resolution Z axis. See :func:`CARE.predict` for documentation. Parameters ---------- factor : float Upsampling factor for Z axis. It is important that this is chosen in correspondence to the subsampling factor used during training data generation. """<line_sep>img=self._upsample(img axes factor)<line_sep><return>super(UpsamplingCARE self).predict(img axes normalizer resizer n_tiles)<block_end><def_stmt>predict_probabilistic self img axes factor normalizer=PercentileNormalizer() resizer=PadAndCropResizer() n_tiles=<none><block_start>"""Apply neural network to raw image with low-resolution Z axis for probabilistic prediction. See :func:`CARE.predict_probabilistic` for documentation. Parameters ---------- factor : float Upsampling factor for Z axis. It is important that this is chosen in correspondence to the subsampling factor used during training data generation. """<line_sep>img=self._upsample(img axes factor)<line_sep><return>super(UpsamplingCARE self).predict_probabilistic(img axes normalizer resizer n_tiles)<block_end>@staticmethod<def_stmt>_upsample img axes factor axis='Z'<block_start>factors=np.ones(img.ndim)<line_sep>factors[axes_dict(axes)[axis]]=factor<line_sep><return>zoom(img factors order=1)<block_end><block_end>
# Copyright (c) Microsoft Corporation. # Licensed under the MIT license. <import_stmt>torch<import_from_stmt>.stage0 Stage0<import_from_stmt>.stage1 Stage1<import_from_stmt>.stage2 Stage2<import_from_stmt>.stage3 Stage3<import_from_stmt>.stage4 Stage4<import_from_stmt>.stage5 Stage5<import_from_stmt>.stage6 Stage6<import_from_stmt>.stage7 Stage7<class_stmt>GNMT16Partitioned(torch.nn.Module)<block_start><def_stmt>__init__ self<block_start>super(GNMT16Partitioned self).__init__()<line_sep>self.stage0=Stage0()<line_sep>self.stage1=Stage1()<line_sep>self.stage2=Stage2()<line_sep>self.stage3=Stage3()<line_sep>self.stage4=Stage4()<line_sep>self.stage5=Stage5()<line_sep>self.stage6=Stage6()<line_sep>self.stage7=Stage7()<block_end><def_stmt>forward self input0 input1 input2<block_start>(out1 out0)=self.stage0(input0 input1)<line_sep>(out4 out5)=self.stage1(out1 out0)<line_sep>(out13 out14)=self.stage2(input1 out4 out5 input2)<line_sep>(out13_1 out15 out16)=self.stage3(out13 out14)<line_sep>(out13_2 out18 out19)=self.stage4(out13_1 out15 out16)<line_sep>(out13_3 out20 out21)=self.stage5(out13_2 out18 out19)<line_sep>out23=self.stage6(out13_3 out20 out21)<line_sep>out24=self.stage7(out23)<line_sep><return>out24<block_end><block_end>
<import_stmt>argparse<import_stmt>os<import_stmt>mmcv<import_stmt>cv2<import_stmt>numpy<as>np<import_from_stmt>tqdm tqdm<import_stmt>matplotlib.pyplot<as>plt<import_from_stmt>mmdet.datasets build_dataloader build_dataset<import_from_stmt>mmdet.utils.general_utils mkdir<import_from_stmt>tools.condlanenet.common COLORS<def_stmt>parse_args <block_start>parser=argparse.ArgumentParser(description='MMDet test detector')<line_sep>parser.add_argument('--config' required=<true> help='test config file path')<line_sep>parser.add_argument('--show' required=<true> help='show results')<line_sep>parser.add_argument('--max_show_num' type=int default=50 help='show results')<line_sep>args=parser.parse_args()<line_sep><return>args<block_end><def_stmt>mask_to_rgb mask<block_start>h,w=mask.shape<line_sep>rgb=np.zeros([h w 3] dtype=np.uint8)<for_stmt>i range(np.max(mask)+1)<block_start>rgb[mask<eq>i]=COLORS[i]<block_end><return>rgb<block_end><def_stmt>vis_one data# image <block_start>img=data['img'].data[0].detach().cpu().numpy()[0 : : :]<line_sep>norm_cfg=data['img_metas'].data[0][0]['img_norm_cfg']<line_sep>downscale=data['img_metas'].data[0][0]['down_scale']<line_sep>hm_downscale=data['img_metas'].data[0][0]['hm_down_scale']<line_sep>img=img.transpose(1 2 0)<line_sep>img=(img<times>norm_cfg['std'])+norm_cfg['mean']<line_sep>img=img.astype(np.uint8)<line_sep># hm gt_hm=data['gt_hm'].data[0].detach().cpu().numpy()[0 : : :]<times>255<line_sep>vis_hm=np.zeros_like(gt_hm[0])<for_stmt>i range(gt_hm.shape[0])<block_start>vis_hm<augadd>gt_hm[i : :]<block_end>gt_masks=data['img_metas'].data[0][0]['gt_masks']<line_sep>vis_img=np.zeros(img.shape np.uint8)<line_sep>vis_img[:]=img[:]<for_stmt>i,gt_info enumerate(gt_masks)<block_start>points=gt_info['points']<line_sep>mask_infos=gt_info['gt_masks']<for_stmt>color_idx,mask_info enumerate(mask_infos)<block_start>row=mask_info['row']<line_sep>row_range=mask_info['range']<for_stmt>coord_y,(coord_x valid) enumerate(zip(row row_range[0]))<block_start><if_stmt>valid<block_start>coord_y<augmul>downscale<line_sep>coord_x<augmul>downscale<line_sep>coord_x=int(coord_x)<line_sep>coord_y=int(coord_y)<line_sep>cv2.circle(vis_img (coord_x coord_y) 3 color=COLORS[color_idx+1] thickness=-1)<block_end><block_end>points=mask_info['points']<for_stmt>p points<block_start>cv2.circle(vis_img (hm_downscale<times>p[0] hm_downscale<times>p[1]) 3 COLORS[1] -1)<line_sep>cv2.circle(vis_img (hm_downscale<times>p[0] hm_downscale<times>p[1]) 1 (0 0 0) -1)<block_end>img=vis_img<block_end><block_end><return>img vis_hm<block_end><def_stmt>main <block_start>args=parse_args()<line_sep>mkdir(args.show)<line_sep># build the dataloader cfg=mmcv.Config.fromfile(args.config)<line_sep>dataset=build_dataset(cfg.data.train)<line_sep>data_loader=build_dataloader(dataset samples_per_gpu=1 workers_per_gpu=cfg.data['workers_per_gpu'] dist=<false> shuffle=<false>)<for_stmt>index,data tqdm(enumerate(data_loader))<block_start>file_name=data['img_metas'].data[0][0]['filename']<line_sep>save_name=os.path.splitext(os.path.basename(file_name))[0]<line_sep>print(index file_name)<line_sep>vis_img,vis_hm=vis_one(data)<line_sep>vis_img_dir=os.path.join(args.show '{}_img.png'.format(save_name))<line_sep>vis_hm_dir=os.path.join(args.show '{}_hm.png'.format(save_name))<line_sep>cv2.imwrite(vis_img_dir vis_img)<line_sep>cv2.imwrite(vis_hm_dir vis_hm)<if_stmt>index<ge>args.max_show_num<block_start><break><block_end><block_end><block_end><if_stmt>__name__<eq>'__main__'<block_start>main()<block_end>
<import_stmt>torch<import_from_stmt>torch nn optim<import_from_stmt>torch.nn functional<as>F<import_from_stmt>torch_optimizer DiffGrad AdamP RAdam<class_stmt>ReplaceGrad(torch.autograd.Function)<block_start>@staticmethod<def_stmt>forward ctx x_forward x_backward<block_start>ctx.shape=x_backward.shape<line_sep><return>x_forward<block_end>@staticmethod<def_stmt>backward ctx grad_in<block_start><return><none> grad_in.sum_to_size(ctx.shape)<block_end><block_end>replace_grad=ReplaceGrad.apply<class_stmt>ClampWithGrad(torch.autograd.Function)<block_start>@staticmethod<def_stmt>forward ctx input min max<block_start>ctx.min=min<line_sep>ctx.max=max<line_sep>ctx.save_for_backward(input)<line_sep><return>input.clamp(min max)<block_end>@staticmethod<def_stmt>backward ctx grad_in<block_start>input,=ctx.saved_tensors<line_sep><return>grad_in<times>(grad_in<times>(input-input.clamp(ctx.min ctx.max))<ge>0) <none> <none><block_end><block_end>clamp_with_grad=ClampWithGrad.apply<def_stmt>get_opt opt_name opt_lr<block_start><if_stmt>opt_name<eq>"Adam"<block_start>opt=optim.Adam([z] lr=opt_lr)# LR=0.1 (Default) <block_end><elif_stmt>opt_name<eq>"AdamW"<block_start>opt=optim.AdamW([z] lr=opt_lr)<block_end><elif_stmt>opt_name<eq>"Adagrad"<block_start>opt=optim.Adagrad([z] lr=opt_lr)<block_end><elif_stmt>opt_name<eq>"Adamax"<block_start>opt=optim.Adamax([z] lr=opt_lr)<block_end><elif_stmt>opt_name<eq>"DiffGrad"<block_start>opt=DiffGrad([z] lr=opt_lr eps=1e-9 weight_decay=1e-9)# NR: Playing for reasons <block_end><elif_stmt>opt_name<eq>"AdamP"<block_start>opt=AdamP([z] lr=opt_lr)<block_end><elif_stmt>opt_name<eq>"RAdam"<block_start>opt=RAdam([z] lr=opt_lr)<block_end><elif_stmt>opt_name<eq>"RMSprop"<block_start>opt=optim.RMSprop([z] lr=opt_lr)<block_end><else_stmt><block_start>print("Unknown optimiser. Are choices broken?")<line_sep>opt=optim.Adam([z] lr=opt_lr)<block_end><return>opt<block_end><def_stmt>vector_quantize x codebook<block_start>d=x.pow(2).sum(dim=-1 keepdim=<true>)+codebook.pow(2).sum(dim=1)-2<times>[email protected]<line_sep>indices=d.argmin(-1)<line_sep>x_q=F.one_hot(indices codebook.shape[0]).to(d.dtype)@codebook<line_sep><return>replace_grad(x_q x)<block_end>
<import_stmt>numpy<as>np<import_from_stmt>scipy.constants mu_0 epsilon_0<line_sep># useful params <def_stmt>omega freq<block_start>"""Angular frequency, omega"""<line_sep><return>2.0<times>np.pi<times>freq<block_end><def_stmt>k freq sigma mu=mu_0 eps=epsilon_0<block_start>""" Eq 1.47 - 1.49 in Ward and Hohmann """<line_sep>w=omega(freq)<line_sep>alp=w<times>np.sqrt(mu<times>eps/2<times>(np.sqrt(1.0+(sigma/(eps<times>w))<power>2)+1))<line_sep>beta=w<times>np.sqrt(mu<times>eps/2<times>(np.sqrt(1.0+(sigma/(eps<times>w))<power>2)-1))<line_sep><return>alp-1j<times>beta<block_end><def_stmt>TriangleFun time ta tb<block_start>""" Triangular Waveform * time: 1D array for time * ta: time at peak * tb: time at step-off """<line_sep>out=np.zeros(time.size)<line_sep>out[time<le>ta]=1/ta<times>time[time<le>ta]<line_sep>out[(time<g>ta)&(time<l>tb)]=(-1/(tb-ta)<times>(time[(time<g>ta)&(time<l>tb)]-tb))<line_sep><return>out<block_end><def_stmt>TriangleFunDeriv time ta tb<block_start>""" Derivative of Triangular Waveform """<line_sep>out=np.zeros(time.size)<line_sep>out[time<le>ta]=1/ta<line_sep>out[(time<g>ta)&(time<l>tb)]=-1/(tb-ta)<line_sep><return>out<block_end><def_stmt>SineFun time ta<block_start>""" Sine Waveform * time: 1D array for time * ta: Pulse Period """<line_sep>out=np.zeros(time.size)<line_sep>out[time<le>ta]=np.sin(1.0/ta<times>np.pi<times>time[time<le>ta])<line_sep><return>out<block_end><def_stmt>SineFunDeriv time ta<block_start>""" Derivative of Sine Waveform """<line_sep>out=np.zeros(time.size)<line_sep>out[time<le>ta]=1.0/ta<times>np.pi<times>np.cos(1.0/ta<times>np.pi<times>time[time<le>ta])<line_sep><return>out<block_end><def_stmt>VTEMFun time ta tb a<block_start>""" VTEM Waveform * time: 1D array for time * ta: time at peak of exponential part * tb: time at step-off """<line_sep>out=np.zeros(time.size)<line_sep>out[time<le>ta]=(1-np.exp(-a<times>time[time<le>ta]/ta))/(1-np.exp(-a))<line_sep>out[(time<g>ta)&(time<l>tb)]=(-1/(tb-ta)<times>(time[(time<g>ta)&(time<l>tb)]-tb))<line_sep><return>out<block_end>
<import_stmt>devpy<import_from_stmt>.log autolog# noqa <import_from_stmt>.tb color_traceback# noqa __version__="0.1.8"<def_stmt>dev_mode color_traceback=<true> autolog=<true># noqa <block_start><if_stmt>color_traceback<block_start>devpy.color_traceback()<block_end><if_stmt>autolog<block_start><return>devpy.autolog()<block_end><block_end>
<import_stmt>numpy<as>np<import_stmt>pytest<import_stmt>pandas<as>pd<import_from_stmt>pandas DataFrame Series Timestamp date_range<import_stmt>pandas._testing<as>tm<class_stmt>TestDataFrameDiff<block_start><def_stmt>test_diff self datetime_frame<block_start>the_diff=datetime_frame.diff(1)<line_sep>tm.assert_series_equal(the_diff["A"] datetime_frame["A"]-datetime_frame["A"].shift(1))<line_sep># int dtype a=10000000000000000<line_sep>b=a+1<line_sep>s=Series([a b])<line_sep>rs=DataFrame({"s":s}).diff()<assert_stmt>rs.s[1]<eq>1<line_sep># mixed numeric tf=datetime_frame.astype("float32")<line_sep>the_diff=tf.diff(1)<line_sep>tm.assert_series_equal(the_diff["A"] tf["A"]-tf["A"].shift(1))<line_sep># GH#10907 df=pd.DataFrame({"y":pd.Series([2]) "z":pd.Series([3])})<line_sep>df.insert(0 "x" 1)<line_sep>result=df.diff(axis=1)<line_sep>expected=pd.DataFrame({"x":np.nan "y":pd.Series(1) "z":pd.Series(1)}).astype("float64")<line_sep>tm.assert_frame_equal(result expected)<block_end>@pytest.mark.parametrize("tz" [<none> "UTC"])<def_stmt>test_diff_datetime_axis0 self tz# GH#18578 <block_start>df=DataFrame({0:date_range("2010" freq="D" periods=2 tz=tz) 1:date_range("2010" freq="D" periods=2 tz=tz) })<line_sep>result=df.diff(axis=0)<line_sep>expected=DataFrame({0:pd.TimedeltaIndex(["NaT" "1 days"]) 1:pd.TimedeltaIndex(["NaT" "1 days"]) })<line_sep>tm.assert_frame_equal(result expected)<block_end>@pytest.mark.parametrize("tz" [<none> "UTC"])<def_stmt>test_diff_datetime_axis1 self tz# GH#18578 <block_start>df=DataFrame({0:date_range("2010" freq="D" periods=2 tz=tz) 1:date_range("2010" freq="D" periods=2 tz=tz) })<if_stmt>tz<is><none><block_start>result=df.diff(axis=1)<line_sep>expected=DataFrame({0:pd.TimedeltaIndex(["NaT" "NaT"]) 1:pd.TimedeltaIndex(["0 days" "0 days"]) })<line_sep>tm.assert_frame_equal(result expected)<block_end><else_stmt><block_start><with_stmt>pytest.raises(NotImplementedError)<block_start>result=df.diff(axis=1)<block_end><block_end><block_end><def_stmt>test_diff_timedelta self# GH#4533 <block_start>df=DataFrame(dict(time=[Timestamp("20130101 9:01") Timestamp("20130101 9:02")] value=[1.0 2.0] ))<line_sep>res=df.diff()<line_sep>exp=DataFrame([[pd.NaT np.nan] [pd.Timedelta("00:01:00") 1]] columns=["time" "value"])<line_sep>tm.assert_frame_equal(res exp)<block_end><def_stmt>test_diff_mixed_dtype self<block_start>df=DataFrame(np.random.randn(5 3))<line_sep>df["A"]=np.array([1 2 3 4 5] dtype=object)<line_sep>result=df.diff()<assert_stmt>result[0].dtype<eq>np.float64<block_end><def_stmt>test_diff_neg_n self datetime_frame<block_start>rs=datetime_frame.diff(-1)<line_sep>xp=datetime_frame-datetime_frame.shift(-1)<line_sep>tm.assert_frame_equal(rs xp)<block_end><def_stmt>test_diff_float_n self datetime_frame<block_start>rs=datetime_frame.diff(1.0)<line_sep>xp=datetime_frame.diff(1)<line_sep>tm.assert_frame_equal(rs xp)<block_end><def_stmt>test_diff_axis self# GH#9727 <block_start>df=DataFrame([[1.0 2.0] [3.0 4.0]])<line_sep>tm.assert_frame_equal(df.diff(axis=1) DataFrame([[np.nan 1.0] [np.nan 1.0]]))<line_sep>tm.assert_frame_equal(df.diff(axis=0) DataFrame([[np.nan np.nan] [2.0 2.0]]))<block_end><block_end>
<import_stmt>pytest<import_from_stmt>lagom Container<import_from_stmt>lagom.exceptions DuplicateDefinition<class_stmt>InitialDep<block_start><pass><block_end><class_stmt>SomeMockForTesting(InitialDep)<block_start><pass><block_end><class_stmt>SomeMockThatDoesntEventExtend<block_start><pass><block_end><def_stmt>test_deps_can_be_overridden_by_a_child_class container:Container<block_start>container.define(InitialDep <lambda>:SomeMockForTesting())<line_sep>resolved=container.resolve(InitialDep)<assert_stmt>type(resolved)<eq>SomeMockForTesting<block_end><def_stmt>test_deps_can_be_overridden_by_anything container:Container<block_start>container.define(InitialDep <lambda>:SomeMockThatDoesntEventExtend())# type: ignore resolved=container.resolve(InitialDep)<assert_stmt>type(resolved)<eq>SomeMockThatDoesntEventExtend<block_end><def_stmt>test_explicit_definitions_can_only_be_made_once container:Container<block_start>container.define(InitialDep <lambda>:SomeMockForTesting())<with_stmt>pytest.raises(DuplicateDefinition)<block_start>container.define(InitialDep <lambda>:SomeMockThatDoesntEventExtend()# type: ignore )<block_end><block_end>
""" The MIT License (MIT) Copyright (c) 2015-present Rapptz Permission is hereby granted, free of charge, to any person obtaining a copy of this software and associated documentation files (the "Software"), to deal in the Software without restriction, including without limitation the rights to use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of the Software, and to permit persons to whom the Software is furnished to do so, subject to the following conditions: The above copyright notice and this permission notice shall be included in all copies or substantial portions of the Software. THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. """<import_stmt>copy<import_stmt>functools<import_stmt>inspect<import_stmt>re<import_stmt>unicodedata<import_from_stmt>collections OrderedDict<import_from_stmt>typing TYPE_CHECKING Any List Dict Optional Iterable Callable Sequence Union Tuple <import_from_stmt>fortnitepy.typedefs MaybeCoro<import_from_stmt>fortnitepy.party ClientParty<import_from_stmt>fortnitepy.friend Friend<import_from_stmt>.core Group Command<import_from_stmt>.errors CommandError<import_from_stmt>.context Context<import_from_stmt>.cog Cog<if_stmt>TYPE_CHECKING<block_start><import_from_stmt>.bot Bot<block_end>__all__=('Paginator' 'HelpCommand' 'FortniteHelpCommand' )<line_sep>_IS_ASCII=re.compile(r'^[\x00-\x7f]+$')<def_stmt>_string_width string:str * _IS_ASCII:Any=_IS_ASCII<arrow>int<block_start>"""Returns string's width."""<line_sep>match=_IS_ASCII.match(string)<if_stmt>match<block_start><return>match.endpos<block_end>UNICODE_WIDE_CHAR_TYPE='WFA'<line_sep>width=0<line_sep>func=unicodedata.east_asian_width<for_stmt>char string<block_start>width<augadd>2<if>func(char)<in>UNICODE_WIDE_CHAR_TYPE<else>1<block_end><return>width<block_end><async_keyword><def_stmt>maybe_coroutine func:MaybeCoro *args:list **kwargs:dict<arrow>Any<block_start>value=func(*args **kwargs)<if_stmt>inspect.isawaitable(value)<block_start><return><await>value<block_end><else_stmt><block_start><return>value<block_end><block_end><class_stmt>Paginator<block_start>"""A class that aids in paginating code blocks for Fortnite messages. .. container:: operations .. describe:: len(x) Returns the total number of characters in the paginator. Attributes ----------- prefix: :class:`str` The prefix inserted to every page. suffix: :class:`str` The suffix appended at the end of every page. max_size: :class:`int` The maximum amount of codepoints allowed in a page. """<def_stmt>__init__ self prefix:str='' suffix:str='' max_size:int=10000<arrow><none><block_start>self.prefix=prefix<line_sep>self.suffix=suffix<line_sep>self.max_size=max_size<line_sep>self.clear()<block_end><def_stmt>clear self<arrow><none><block_start>"""Clears the paginator to have no pages."""<if_stmt>self.prefix<is><not><none><block_start>self._current_page=[self.prefix]<line_sep>self._count=len(self.prefix)<block_end><else_stmt><block_start>self._current_page=[]<line_sep>self._count=0<block_end>self._pages=[]<block_end>@property<def_stmt>_prefix_len self<arrow>int<block_start><return>len(self.prefix)<if>self.prefix<else>0<block_end>@property<def_stmt>_suffix_len self<arrow>int<block_start><return>len(self.suffix)<if>self.suffix<else>0<block_end><def_stmt>add_page self text:str<arrow><none><block_start>"""Adds a page to the paginator with no additional checks done."""<line_sep>self._pages.append(text)<block_end><def_stmt>add_line self line:str='' * empty:bool=<false><arrow><none><block_start>"""Adds a line to the current page. If the line exceeds the :attr:`max_size` then an exception is raised. Parameters ----------- line: :class:`str` The line to add. empty: :class:`bool` Indicates if another empty line should be added. Raises ------ RuntimeError The line was too big for the current :attr:`max_size`. """<line_sep>max_page_size=self.max_size-self._prefix_len-self._suffix_len<if_stmt>len(line)<g>max_page_size<block_start><raise>RuntimeError('Line exceeds maximum page size '<concat>'{}'.format(max_page_size))<block_end><if_stmt>self._count+len(line)+1<g>self.max_size-self._suffix_len<block_start>self.close_page()<block_end>self._count<augadd>len(line)+1<line_sep>self._current_page.append(line)<if_stmt>empty<block_start>self._current_page.append('')<line_sep>self._count<augadd>1<block_end><block_end><def_stmt>close_page self<arrow><none><block_start>"""Prematurely terminate a page."""<if_stmt>self.suffix<is><not><none><block_start>self._current_page.append(self.suffix)<block_end>self._pages.append('\n'.join(self._current_page))<if_stmt>self.prefix<is><not><none><block_start>self._current_page=[]<line_sep>self._count=len(self.prefix)<block_end><else_stmt><block_start>self._current_page=[]<line_sep>self._count=0<block_end><block_end><def_stmt>__len__ self<arrow>int<block_start>total=sum(len(p)<for>p self._pages)<line_sep><return>total+self._count<block_end>@property<def_stmt>pages self<arrow>List[str]<block_start>"""Returns the rendered list of pages."""<if_stmt>len(self._current_page)<g>(0<if>self.prefix<is><none><else>1)<block_start>self.close_page()<block_end><return>self._pages<block_end><def_stmt>__repr__ self<arrow>str<block_start>fmt=('<Paginator prefix: {0.prefix} suffix: {0.suffix} max_size: '<concat>'{0.max_size} count: {0._count}>')<line_sep><return>fmt.format(self)<block_end><block_end><def_stmt>_not_overridden func:MaybeCoro<arrow>MaybeCoro<block_start>func.__fnpy_help_command_not_overridden__=<true><line_sep><return>func<block_end><class_stmt>_HelpCommandImpl(Command)<block_start><def_stmt>__init__ self inject:Command *args:list **kwargs:dict<arrow><none><block_start>super().__init__(inject.command_callback *args **kwargs)<line_sep>self._original=inject<line_sep>self._injected=inject<block_end><async_keyword><def_stmt>prepare self ctx:Context<arrow><none><block_start>self._injected=injected=self._original.copy()<line_sep>injected.context=ctx<line_sep>self.callback=injected.command_callback<line_sep>error_handler=injected.help_command_error_handler<if_stmt><not>hasattr(error_handler '__fnpy_help_command_not_overridden__')<block_start><if_stmt>self.cog<is><not><none><block_start>self.error_handler=self._error_handler_cog_implementation<block_end><else_stmt><block_start>self.error_handler=error_handler<block_end><block_end><await>super().prepare(ctx)<block_end><async_keyword><def_stmt>_parse_arguments self ctx:Context<arrow><none># Make the parser think we don't have a cog so it doesn't # inject the parameter into `ctx.args`. <block_start>original_cog=self.cog<line_sep>self.cog=<none><try_stmt><block_start><await>super()._parse_arguments(ctx)<block_end><finally_stmt><block_start>self.cog=original_cog<block_end><block_end><async_keyword><def_stmt>_error_handler_cog_implementation self _ ctx:Context error:Exception<arrow><none><block_start><await>self._injected.help_command_error_handler(ctx error)<block_end>@property<def_stmt>clean_params self<arrow>OrderedDict<block_start>result=self.params.copy()<try_stmt><block_start>result.popitem(last=<false>)<block_end><except_stmt>Exception<block_start><raise>ValueError('Missing context parameter')<from><none><block_end><else_stmt><block_start><return>result<block_end><block_end><def_stmt>_inject_into_cog self cog:Cog<arrow><none># Warning: hacky # Make the cog think that get_commands returns this command # as well if we inject it without modifying __cog_commands__ # since that's used for the injection and ejection of cogs. <block_start><def_stmt>wrapped_get_commands * _original=cog.get_commands<block_start>ret=_original()<line_sep>ret.append(self)<line_sep><return>ret<block_end># Ditto here <def_stmt>wrapped_walk_commands * _original=cog.walk_commands<block_start><yield><from>_original()<line_sep><yield>self<block_end>functools.update_wrapper(wrapped_get_commands cog.get_commands)<line_sep>functools.update_wrapper(wrapped_walk_commands cog.walk_commands)<line_sep>cog.get_commands=wrapped_get_commands<line_sep>cog.walk_commands=wrapped_walk_commands<line_sep>self.cog=cog<block_end><def_stmt>_eject_cog self<arrow><none><block_start><if_stmt>self.cog<is><none><block_start><return><block_end># revert back into their original methods cog=self.cog<line_sep>cog.get_commands=cog.get_commands.__wrapped__<line_sep>cog.walk_commands=cog.walk_commands.__wrapped__<line_sep>self.cog=<none><block_end><block_end><class_stmt>HelpCommand<block_start>r"""The base implementation for help command formatting. .. note:: Internally instances of this class are deep copied every time the command itself is invoked to prevent a race condition mentioned in discord.py issue 2123. This means that relying on the state of this class to be the same between command invocations would not work as expected. Attributes ----------- context: Optional[:class:`Context`] The context that invoked this help formatter. This is generally set after the help command assigned, :func:`command_callback`\, has been called. show_hidden: :class:`bool` Specifies if hidden commands should be shown in the output. Defaults to ``False``. verify_checks: :class:`bool` Specifies if commands should have their :attr:`.Command.checks` called and verified. Defaults to ``True``. command_attrs: :class:`dict` A dictionary of options to pass in for the construction of the help command. This allows you to change the command behaviour without actually changing the implementation of the command. The attributes will be the same as the ones passed in the :class:`.Command` constructor. """<def_stmt>__new__ cls *args:list **kwargs:dict<arrow>'HelpCommand'# To prevent race conditions of a single instance while also allowing # for settings to be passed the original arguments passed must be # assigned to allow for easier copies (which will be made when the # help command is actually called) # see discord.py issue 2123 <block_start>self=super().__new__(cls)<line_sep># Shallow copies cannot be used in this case since it is not unusual # to pass instances that need state, e.g. Paginator or what have you # into the function. The keys can be safely copied as-is since they're # 99.99% certain of being string keys deepcopy=copy.deepcopy<line_sep>self.__original_kwargs__={k:deepcopy(v)<for>k,v kwargs.items()}<line_sep>self.__original_args__=deepcopy(args)<line_sep><return>self<block_end><def_stmt>__init__ self **options:dict<arrow><none><block_start>self.show_hidden=options.pop('show_hidden' <false>)<line_sep>self.verify_checks=options.pop('verify_checks' <true>)<line_sep>self.command_attrs=attrs=options.pop('command_attrs' {})<line_sep>attrs.setdefault('name' 'help')<line_sep>attrs.setdefault('help' 'Shows this message')<line_sep>self.context=<none><line_sep>self._command_impl=<none><block_end><def_stmt>copy self<arrow>'HelpCommand'<block_start>o=self.__class__(*self.__original_args__ **self.__original_kwargs__)<line_sep>o._command_impl=self._command_impl<line_sep><return>o<block_end><def_stmt>_add_to_bot self bot:'Bot'<arrow><none><block_start>command=_HelpCommandImpl(self **self.command_attrs)<line_sep>bot.add_command(command)<line_sep>self._command_impl=command<block_end><def_stmt>_remove_from_bot self bot:'Bot'<arrow><none><block_start>bot.remove_command(self._command_impl.name)<line_sep>self._command_impl._eject_cog()<line_sep>self._command_impl=<none><block_end><def_stmt>get_bot_mapping self<arrow>Dict[Optional[Cog] List[Command]]<block_start>"""Retrieves the bot mapping passed to :meth:`send_bot_help`."""<line_sep>bot=self.context.bot<line_sep>mapping={cog:cog.get_commands()<for>cog bot.cogs.values()}<line_sep>mapping[<none>]=[c<for>c bot.all_commands.values()<if>c.cog<is><none>]<line_sep><return>mapping<block_end>@property<def_stmt>command_prefix self<arrow>str<block_start>"""The prefix used to invoke the help command."""<line_sep><return>self.context.prefix<block_end>@property<def_stmt>invoked_with self<arrow>str<block_start>"""Similar to :attr:`Context.invoked_with` except properly handles the case where :meth:`Context.send_help` is used. If the help command was used regularly then this returns the :attr:`Context.invoked_with` attribute. Otherwise, if it the help command was called using :meth:`Context.send_help` then it returns the internal command name of the help command. Returns --------- :class:`str` The command name that triggered this invocation. """<line_sep>command_name=self._command_impl.name<line_sep>ctx=self.context<if_stmt>(ctx<is><none><or>ctx.command<is><none><or>ctx.command.qualified_name<ne>command_name)<block_start><return>command_name<block_end><return>ctx.invoked_with<block_end><def_stmt>get_command_signature self command:Command<arrow>str<block_start>"""Retrieves the signature portion of the help page. Parameters ---------- command: :class:`Command` The command to get the signature of. Returns ------- :class:`str` The signature for the command. """<line_sep>parent=command.full_parent_name<if_stmt>len(command.aliases)<g>0<block_start>aliases='|'.join(command.aliases)<line_sep>fmt='[%s|%s]'%(command.name aliases)<if_stmt>parent<block_start>fmt=parent+' '+fmt<block_end>alias=fmt<block_end><else_stmt><block_start>alias=command.name<if><not>parent<else>parent+' '+command.name<block_end><return>'%s%s %s'%(self.command_prefix alias command.signature)<block_end>@property<def_stmt>cog self<arrow>Optional[Cog]<block_start>"""A property for retrieving or setting the cog for the help command. When a cog is set for the help command, it is as-if the help command belongs to that cog. All cog special methods will apply to the help command and it will be automatically unset on unload. To unbind the cog from the help command, you can set it to ``None``. Returns -------- Optional[:class:`Cog`] The cog that is currently set for the help command. """<line_sep><return>self._command_impl.cog<block_end>@cog.setter<def_stmt>cog self cog:Cog<arrow><none># Remove whatever cog is currently valid, if any <block_start>self._command_impl._eject_cog()<line_sep># If a new cog is set then inject it. <if_stmt>cog<is><not><none><block_start>self._command_impl._inject_into_cog(cog)<block_end><block_end><def_stmt>command_not_found self string:str<arrow>str<block_start>"""|maybecoro| A method called when a command is not found in the help command. This is useful to override for i18n. Defaults to ``No command called {0} found.`` Parameters ------------ string: :class:`str` The string that contains the invalid command. Note that this has had mentions removed to prevent abuse. Returns --------- :class:`str` The string to use when a command has not been found. """<line_sep><return>'No command called "{}" found.'.format(string)<block_end><def_stmt>subcommand_not_found self command:Command string:str<arrow>str<block_start>"""|maybecoro| A method called when a command did not have a subcommand requested in the help command. This is useful to override for i18n. Defaults to either: - ``'Command "{command.qualified_name}" has no subcommands.'`` - If there is no subcommand in the ``command`` parameter. - ``'Command "{command.qualified_name}" has no subcommand named {string}'`` - If the ``command`` parameter has subcommands but not one named ``string``. Parameters ------------ command: :class:`Command` The command that did not have the subcommand requested. string: :class:`str` The string that contains the invalid subcommand. Returns --------- :class:`str` The string to use when the command did not have the subcommand requested. """<line_sep># noqa <if_stmt>isinstance(command Group)<and>len(command.all_commands)<g>0<block_start><return>('Command "{0.qualified_name}" has no subcommand named '<concat>'{1}'.format(command string))<block_end><return>'Command "{0.qualified_name}" has no subcommands.'.format(command)<block_end><async_keyword><def_stmt>filter_commands self commands:Iterable[Command] * sort:bool=<false> key:Optional[Callable]=<none><arrow>List[Command]<block_start>"""|coro| Returns a filtered list of commands and optionally sorts them. This takes into account the :attr:`verify_checks` and :attr:`show_hidden` attributes. Parameters ------------ commands: Iterable[:class:`Command`] An iterable of commands that are getting filtered. sort: :class:`bool` Whether to sort the result. key: Optional[Callable[:class:`Command`, Any]] An optional key function to pass to :func:`py:sorted` that takes a :class:`Command` as its sole parameter. If ``sort`` is passed as ``True`` then this will default as the command name. Returns --------- List[:class:`Command`] A list of commands that passed the filter. """<if_stmt>sort<and>key<is><none><block_start>key=<lambda>c:c.name<block_end># noqa <if_stmt>self.show_hidden<block_start>iterator=commands<block_end><else_stmt><block_start>iterator=filter(<lambda>c:<not>c.hidden commands)<block_end><if_stmt><not>self.verify_checks# if we do not need to verify the checks then we can just # run it straight through normally without using await. <block_start><return>sorted(iterator key=key)<if>sort<else>list(iterator)<block_end># if we're here then we need to check every command if it can run <async_keyword><def_stmt>predicate cmd<block_start><try_stmt><block_start><return><await>cmd.can_run(self.context)<block_end><except_stmt>CommandError<block_start><return><false><block_end><block_end>ret=[]<for_stmt>cmd iterator<block_start>valid=<await>predicate(cmd)<if_stmt>valid<block_start>ret.append(cmd)<block_end><block_end><if_stmt>sort<block_start>ret.sort(key=key)<block_end><return>ret<block_end><def_stmt>get_max_size self commands:Sequence[Command]<arrow>int<block_start>"""Returns the largest name length of the specified command list. Parameters ------------ commands: Sequence[:class:`Command`] A sequence of commands to check for the largest size. Returns -------- :class:`int` The maximum width of the commands. """<line_sep>as_lengths=(_string_width(c.name)<for>c commands)<line_sep><return>max(as_lengths default=0)<block_end><def_stmt>get_destination self<arrow>Union[Friend ClientParty]<block_start>"""Returns either :class:`fortnitepy.Friend` or :class:`fortnitepy.ClientParty` where the help command will be output. You can override this method to customise the behaviour. By default this returns the context's destination. """<line_sep><return>self.context.get_destination()<block_end><async_keyword><def_stmt>send_error_message self error:Exception<arrow><none><block_start>"""|coro| Handles the implementation when an error happens in the help command. For example, the result of :meth:`command_not_found` or :meth:`command_has_no_subcommand_found` will be passed here. You can override this method to customise the behaviour. By default, this sends the error message to the destination specified by :meth:`get_destination`. .. note:: You can access the invocation context with :attr:`HelpCommand.context`. Parameters ------------ error: :class:`str` The error message to display to the user. """<line_sep>destination=self.get_destination()<line_sep><await>destination.send(error)<block_end>@_not_overridden<async_keyword><def_stmt>help_command_error_handler self ctx:Context error:Exception<arrow><none><block_start>"""|coro| The help command's error handler, as specified by :ref:`ext_commands_error_handler`. Useful to override if you need some specific behaviour when the error handler is called. By default this method does nothing and just propagates to the default error handlers. Parameters ------------ ctx: :class:`Context` The invocation context. error: :class:`CommandError` The error that was raised. """<line_sep><pass><block_end><async_keyword><def_stmt>send_bot_help self page:int<arrow><none><block_start>"""|coro| Handles the implementation of the bot command page in the help command. This function is called when the help command is called with no arguments. It should be noted that this method does not return anything -- rather the actual message sending should be done inside this method. Well behaved subclasses should use :meth:`get_destination` to know where to send, as this is a customisation point for other users. You can override this method to customise the behaviour. .. note:: You can access the invocation context with :attr:`HelpCommand.context`. Also, the commands in the mapping are not filtered. To do the filtering you will have to call :meth:`filter_commands` yourself. Parameters ---------- page: :class:`int` The page to send. """<line_sep><return><none><block_end><async_keyword><def_stmt>send_cog_help self cog:Cog page:int<arrow><none><block_start>"""|coro| Handles the implementation of the cog page in the help command. This function is called when the help command is called with a cog as the argument. It should be noted that this method does not return anything -- rather the actual message sending should be done inside this method. Well behaved subclasses should use :meth:`get_destination` to know where to send, as this is a customisation point for other users. You can override this method to customise the behaviour. .. note:: You can access the invocation context with :attr:`HelpCommand.context`. To get the commands that belong to this cog see :meth:`Cog.get_commands`. The commands returned not filtered. To do the filtering you will have to call :meth:`filter_commands` yourself. Parameters ----------- cog: :class:`Cog` The cog that was requested for help. page: :class:`int` The page to send. """<line_sep><return><none><block_end><async_keyword><def_stmt>send_group_help self group:Group<arrow><none><block_start>"""|coro| Handles the implementation of the group page in the help command. This function is called when the help command is called with a group as the argument. It should be noted that this method does not return anything -- rather the actual message sending should be done inside this method. Well behaved subclasses should use :meth:`get_destination` to know where to send, as this is a customisation point for other users. You can override this method to customise the behaviour. .. note:: You can access the invocation context with :attr:`HelpCommand.context`. To get the commands that belong to this group without aliases see :attr:`Group.commands`. The commands returned not filtered. To do the filtering you will have to call :meth:`filter_commands` yourself. Parameters ----------- group: :class:`Group` The group that was requested for help. """<line_sep><return><none><block_end><async_keyword><def_stmt>send_command_help self command:Command<arrow><none><block_start>"""|coro| Handles the implementation of the single command page in the help command. It should be noted that this method does not return anything -- rather the actual message sending should be done inside this method. Well behaved subclasses should use :meth:`get_destination` to know where to send, as this is a customisation point for other users. You can override this method to customise the behaviour. .. note:: You can access the invocation context with :attr:`HelpCommand.context`. .. admonition:: Showing Help :class: helpful There are certain attributes and methods that are helpful for a help command to show such as the following: - :attr:`Command.help` - :attr:`Command.brief` - :attr:`Command.short_doc` - :attr:`Command.description` - :meth:`get_command_signature` There are more than just these attributes but feel free to play around with these to help you get started to get the output that you want. Parameters ----------- command: :class:`Command` The command that was requested for help. """<line_sep><return><none><block_end><async_keyword><def_stmt>prepare_help_command self ctx:Context command:Optional[Command]=<none><arrow><none><block_start>"""|coro| A low level method that can be used to prepare the help command before it does anything. For example, if you need to prepare some state in your subclass before the command does its processing then this would be the place to do it. The default implementation does nothing. .. note:: This is called *inside* the help command callback body. So all the usual rules that happen inside apply here as well. Parameters ----------- ctx: :class:`Context` The invocation context. command: Optional[:class:`str`] The argument passed to the help command. """<line_sep><pass><block_end># Not typehinting because its a command callback <async_keyword><def_stmt>command_callback self ctx * command=<none> page:int=1<block_start>"""|coro| The actual implementation of the help command. It is not recommended to override this method and instead change the behaviour through the methods that actually get dispatched. - :meth:`send_bot_help` - :meth:`send_cog_help` - :meth:`send_group_help` - :meth:`send_command_help` - :meth:`get_destination` - :meth:`command_not_found` - :meth:`subcommand_not_found` - :meth:`send_error_message` - :meth:`on_help_command_error` - :meth:`prepare_help_command` """<line_sep># page will never get a value but we just include it here for # the param list. The actual conversion is done below. <if_stmt>command<is><not><none><block_start>split=command.split()<try_stmt><block_start>page=int(split[-1])<block_end><except_stmt>ValueError<block_start>page=1<line_sep>new=command<block_end><else_stmt><block_start>new=<none><if>len(split)<eq>1<else>' '.join(split[:-1])<block_end><block_end><else_stmt><block_start>new=command<block_end><await>self.prepare_help_command(ctx command)<line_sep>bot=ctx.bot<if_stmt>new<is><none># mapping = self.get_bot_mapping() <block_start><return><await>self.send_bot_help(page)<block_end># Check if it's a cog <if_stmt><not>command.startswith(self.command_prefix)<block_start>cog=bot.get_cog(new)<if_stmt>cog<is><not><none><block_start><return><await>self.send_cog_help(cog page)<block_end><block_end><if_stmt>command.startswith(self.command_prefix)<block_start>command=command[len(self.command_prefix):]<block_end>maybe_coro=maybe_coroutine<line_sep># If it's not a cog then it's a command. # Since we want to have detailed errors when someone # passes an invalid subcommand, we need to walk through # the command group chain ourselves. keys=command.split(' ')<line_sep>cmd=bot.all_commands.get(keys[0])<if_stmt>cmd<is><none><block_start>string=<await>maybe_coro(self.command_not_found keys[0])<line_sep><return><await>self.send_error_message(string)<block_end><for_stmt>key keys[1:]<block_start><try_stmt><block_start>found=cmd.all_commands.get(key)<block_end><except_stmt>AttributeError<block_start>string=<await>maybe_coro(self.subcommand_not_found cmd key)<line_sep><return><await>self.send_error_message(string)<block_end><else_stmt><block_start><if_stmt>found<is><none><block_start>string=<await>maybe_coro(self.subcommand_not_found cmd key)<line_sep><return><await>self.send_error_message(string)<block_end>cmd=found<block_end><block_end><if_stmt>isinstance(cmd Group)<block_start><return><await>self.send_group_help(cmd)<block_end><else_stmt><block_start><return><await>self.send_command_help(cmd)<block_end><block_end><block_end><class_stmt>FortniteHelpCommand(HelpCommand)<block_start>"""The implementation of the default help command. This inherits from :class:`HelpCommand`. It extends it with the following attributes. Attributes ------------ dm_help: Optional[:class:`bool`] A tribool that indicates if the help command should DM the user instead of sending it to the channel it received it from. If the boolean is set to ``True``, then all help output is DM'd. If ``False``, none of the help output is DM'd. paginator: :class:`Paginator` The paginator used to paginate the help command output. commands_title: :class:`str` The commands title. Defaults to ``Commands:``. cog_title: :class:`str` The cog title. Defaults to ``Category:``. usage_title: :class:`str` The usage title. Defaults to ``Usage:``. description_title: :class:`str` The description title. Defaults to ``Description:``. help_title: :class:`str` The help title. Defaults to ``Help:``. sub_commands_title: :class:`str` The sub commands title. Defaults to ``Help Commands:``. no_category_heading: :class:`str` The text to use as heading if no category (cog) is found for the command. Defaults to ``No Category``. height: :class:`int` The maximum number of lines to fit. Defaults to ``15``. width: :class:`int` The maximum number of characters that fit in a line. Defaults to ``60``. indent: :class:`int` How much to indent the commands and other text from a title. Defaults to ``4``. title_prefix: :class:`str` The prefix to use for the help title. Defaults to `` +``. title_suffix: :class:`str` The suffix to use for the help title. Defaults to ``+``. title_char: :class:`str` The char to use for the help title. Defaults to ``=``. line_prefix: :class:`str` The prefix to use for all lines. Defaults to `` ``. (Three spaces) line_suffix: :class:`str` The prefix to use for all lines. Defaults to ````. (Empty) footer_prefix: :class:`str` The prefix to use for the help footer. Defaults to `` +``. footer_suffix: :class:`str` The suffix to use for the help footer. Defaults to ``+``. footer_char: :class:`str` The char to use for the help footer. Defaults to ``=``. """<def_stmt>__init__ self **options:dict<arrow><none><block_start>self.dm_help=options.pop('dm_help' <false>)<line_sep>self.paginator=options.pop('paginator' <none>)<line_sep>self.commands_title=options.pop('commands_title' 'Commands:')<line_sep>self.cog_title=options.pop('cog_title' 'Category:')<line_sep>self.usage_title=options.pop('usage_title' 'Usage:')<line_sep>self.description_title=options.pop('description_title' 'Description:')# noqa self.help_title=options.pop('help_title' 'Help:')<line_sep>self.sub_commands_title=options.pop('sub_commands_title' 'Sub Commands:')# noqa self.no_category=options.pop('no_category_heading' 'No Category')<line_sep>self.height=options.pop('height' 15)<line_sep>self.width=options.pop('width' 60)<line_sep>self.indent=options.pop('indent' 4)<line_sep>self.title_prefix=options.pop('title_prefix' ' +')<line_sep>self.title_suffix=options.pop('title_suffix' '+')<line_sep>self.title_char=options.pop('title_char' '=')<line_sep>self.line_prefix=options.pop('line_prefix' ' ')<line_sep>self.line_suffix=options.pop('line_suffix' '')<line_sep>self.footer_prefix=options.pop('footer_prefix' ' +')<line_sep>self.footer_suffix=options.pop('footer_suffix' '+')<line_sep>self.footer_char=options.pop('footer_char' '=')<if_stmt>self.paginator<is><none><block_start>self.paginator=Paginator()<block_end>super().__init__(**options)<block_end><def_stmt>get_command_name self command:Command<arrow>str<block_start>"""Gets the name of a command. This method can be overridden for custom text. Parameters ---------- command: :class:`.Command` The command to get the name for. Returns ------- :class:`str` | The command name. | Defaults to ``self.command_prefix + command.qualified_name`` """<line_sep><return>self.command_prefix+command.qualified_name<block_end><def_stmt>get_sub_command_name self sub_command:Command<arrow>str<block_start>"""Gets the name of a sub command. This method can be overridden for custom text. Parameters ---------- sub_command: :class:`.Command` The sub command to get the name for. Returns ------- :class:`str` | The sub command name. | Defaults to ``{self.command_prefix} {sub_command.qualified_name}`` """<line_sep># noqa <return>self.command_prefix+sub_command.qualified_name<block_end><def_stmt>get_bot_header self page_num:int pages_amount:int<arrow>str<block_start>"""Gets the name of a sub command. This method can be overridden for custom text. Parameters ---------- page_num: :class:`int` The page being built. pages_amount: :class:`int` The amount of pages available. Returns ------- :class:`str` | The sub command name. | Defaults to ``{self.command_prefix} {sub_command.qualified_name}`` """<line_sep># noqa <return>'{0} - {1} / {2}'.format('All Commands' page_num pages_amount)<block_end><def_stmt>get_bot_footer self page_num:int pages_amount:str<arrow>str<block_start>"""Gets the text to appear in the footer when :meth:`send_bot_help()` is called. This method can be overridden for custom text. Parameters ---------- page_num: :class:`int` The page being built. pages_amount: :class:`int` The amount of pages available. Returns ------- :class:`str` | The bot footer. | Defaults to ```` (Empty) """<line_sep><return>''<block_end><def_stmt>get_command_header self command:Command<arrow>str<block_start>"""Gets the text to appear in the header when :meth:`send_command_help()` is called. This method can be overridden for custom text. Parameters ---------- command: :class:`.Command` The command to get the header for. Returns ------- :class:`str` | The header text. | Defaults to ``Command | {self.command_prefix}{command.qualified_name}`` """<line_sep># noqa <return>'Command | {0}{1}'.format(self.command_prefix command.qualified_name)<block_end><def_stmt>get_command_footer self command:Command<arrow>str<block_start>"""Gets the text to appear in the footer when :meth:`send_command_help()` is called. This method can be overridden for custom text. Parameters ---------- command: :class:`.Command` The command to get the footer for. Returns ------- :class:`str` | The footer text. | Defaults to ```` (Empty) """<line_sep><return>''<block_end><def_stmt>get_group_header self group:Group<arrow>str<block_start>"""Gets the text to appear in the header when :meth:`send_group_help()` is called. This method can be overridden for custom text. Parameters ---------- command: :class:`.Group` The group to get the header for. Returns ------- :class:`str` | The header text. | Defaults to ``Command | {self.command_prefix}{group.qualified_name}`` """<line_sep># noqa <return>'Command | {0}{1}'.format(self.command_prefix group.qualified_name)<block_end><def_stmt>get_group_footer self group:Group<arrow>str<block_start>"""Gets the text to appear in the footer when :meth:`send_group_help()` is called. This method can be overridden for custom text. Parameters ---------- command: :class:`.Group` The group to get the footer for. Returns ------- :class:`str` | The footer text. | Defaults to ```` (Empty) """<line_sep><return>''<block_end><def_stmt>get_cog_header self cog:Cog page_num:int pages_amount:int<arrow>str<block_start>"""Gets the text to appear in the header when :meth:`send_cog_help()` is called. This method can be overridden for custom text. Parameters ---------- cog: :class:`.Cog` The cog to get the header for. page_num: :class:`int` The page being built. pages_amount: :class:`int` The amount of pages available. Returns ------- :class:`str` | The header text. | Defaults to ``Category | {cog.qualified_name} - {page_num} / {pages_amount}`` """<line_sep># noqa <return>'Category | {0} - {1} / {2}'.format(cog.qualified_name page_num pages_amount)<block_end><def_stmt>get_cog_footer self cog:Cog page_num:int pages_amount:int<arrow>str<block_start>"""Gets the text to appear in the footer when :meth:`send_cog_help()` is called. This method can be overridden for custom text. Parameters ---------- cog: :class:`.Cog` The cog to get the footer for. page_num: :class:`int` The page being built. pages_amount: :class:`int` The amount of pages available. Returns ------- :class:`str` | The footer text. | Defaults to ``{self.command_prefix}{self.invoked_with} {cog.qualified_name} <page> | {self.command_prefix}{self.invoked_with} <command>`` """<line_sep># noqa <return>'{0}{1} {2} <page> | {0}{1} <command>'.format(self.command_prefix self.invoked_with cog.qualified_name)<block_end><def_stmt>shorten_text self text:str max_len:int dot_amount:int=3<arrow>str<block_start>"""Shortens text to fit into the :attr:`width`."""<if_stmt>len(text)<g>max_len<block_start><return>text[:max_len-dot_amount]+'.'<times>dot_amount<block_end><return>text<block_end><def_stmt>construct_title self t:str<arrow>str<block_start>_title=' '+t+' '<if>t<else>''<line_sep>w=self.width-len(self.title_prefix)-len(self.title_suffix)<line_sep><return>'{0}{1:{2}^{3}}{4}'.format(self.title_prefix _title self.title_char w self.title_suffix)<block_end><def_stmt>construct_footer self f:str<arrow>str<block_start>_footer=' '+f+' '<if>f<else>''<line_sep>w=self.width-len(self.footer_prefix)-len(self.footer_suffix)<line_sep><return>'{0}{1:{2}^{3}}{4}'.format(self.footer_prefix _footer self.footer_char w self.footer_suffix)<block_end><def_stmt>fix_too_long self string:str length:int start_length:int<arrow>Tuple[str List[str]]<block_start>first=string[:start_length-1]<line_sep>string=string[start_length-1:]<line_sep><return>(first [string[0+i:length-1+i]<for>i range(0 len(string) length-1)])<block_end><def_stmt>chunkstring self string:str length:int<arrow>List[str]<block_start>lines=[]<line_sep>curr=''<line_sep>split=string.split()<for_stmt>c,word enumerate(split 1)<block_start>spaces=1<if>c<ne>len(split)<else>0<if_stmt>len(word)+spaces<g>length<block_start>space_left=(length-len(curr))<line_sep>start_length=space_left<if>space_left<g>5<else>0<line_sep>first,too_long=self.fix_too_long(word length start_length)<if_stmt>first<block_start>curr<augadd>first+'-'<block_end><if_stmt>curr<block_start>lines.append(curr)<line_sep>curr=''<block_end><for_stmt>cc,new enumerate(too_long 1)<block_start><if_stmt>cc<ne>len(too_long)<block_start>new<augadd>'-'<line_sep>lines.append(new)<block_end><else_stmt><block_start>curr<augadd>new<block_end><block_end><continue><block_end><if_stmt>len(curr)+len(word)<g>length<block_start>lines.append(curr[:-1])<line_sep>curr=''<block_end>curr<augadd>word+' '<block_end><if_stmt>curr<block_start>lines.append(curr)<block_end><return>lines<block_end><def_stmt>construct_single_line self text:str extra_indent:int=0<arrow>str<block_start>prefix=self.line_prefix+' '<times>extra_indent<line_sep>suffix=self.line_suffix<line_sep>w=self.width-len(prefix)-len(suffix)<line_sep><return>'{0}{1:<{2}}{3}'.format(prefix text w suffix)<block_end><def_stmt>construct_category self name:str brief:str extra_indent:int=0 raw:bool=<false><arrow>List[str]<block_start>prefix=self.line_prefix+' '<times>extra_indent<line_sep>suffix=self.line_suffix<line_sep>indent=self.indent<line_sep>w=self.width-len(prefix)-len(suffix)<line_sep>name_line='{0}{1:<{2}}{3}'.format(prefix self.shorten_text(name w) w suffix)<line_sep>brief_w=w-indent<line_sep>lines=[name_line]<if_stmt><not>raw<block_start>gen=self.chunkstring(brief brief_w)<block_end><else_stmt><block_start>gen=brief.splitlines()<block_end><for_stmt>c,line enumerate(gen 2)<block_start>fmt='{0}{1}{2:<{3}}{4}'.format(prefix ' '<times>indent line brief_w suffix)<if_stmt>c<eq>self.height-2<block_start>to_cut=3+len(suffix)<line_sep>new=fmt[:to_cut]+'...'+suffix<line_sep>lines.append(new)<line_sep><break><block_end>lines.append(fmt)<block_end><return>lines<block_end><async_keyword><def_stmt>send_pages self<arrow><none><block_start>"""A helper utility to send the page output from :attr:`paginator` to the destination. """<line_sep>destination=self.get_destination()<for_stmt>page self.paginator.pages<block_start><await>destination.send(page)<block_end><block_end><async_keyword><def_stmt>send_page self page_num:int<arrow><none><block_start>"""A helper utility to send a page output from :attr:`paginator` to the destination. """<line_sep>pages=self.paginator.pages<if_stmt>page_num<le>0<or>page_num<g>len(pages)<block_start><return><await>self.send_error_message('Could not find the page you were looking for')<block_end>destination=self.get_destination()<line_sep><await>destination.send(pages[page_num-1])<block_end><def_stmt>get_destination self<arrow>Union[Friend ClientParty]<block_start>ctx=self.context<if_stmt>self.dm_help<is><true><block_start><return>ctx.author<block_end><elif_stmt>(self.dm_help<is><none><and>len(self.paginator)<g>self.dm_help_threshold)<block_start><return>ctx.author<block_end><else_stmt><block_start><return>ctx.get_destination()<block_end><block_end><async_keyword><def_stmt>prepare_help_command self ctx:Context command:Command<arrow><none><block_start>self.paginator.clear()<line_sep><await>super().prepare_help_command(ctx command)<block_end><def_stmt>construct_command_help self command:Command<arrow>List[str]<block_start>fmt={}<if_stmt>command.cog<block_start>fmt[self.cog_title]=command.cog.qualified_name<block_end>fmt[self.usage_title]=self.get_command_signature(command)<if_stmt>command.description<block_start>fmt[self.description_title]=command.description<block_end>result=[]<for_stmt>title,value fmt.items()<block_start>lines=self.construct_category(title value)<line_sep>result.extend(lines)<block_end><if_stmt>command.help<block_start>title=self.help_title<line_sep>value=command.help<line_sep>lines=self.construct_category(title value raw=<true>)<line_sep>result.extend(lines)<block_end><return>result<block_end><async_keyword><def_stmt>send_bot_help self page:int<arrow><none><block_start>ctx=self.context<line_sep>bot=ctx.bot<line_sep>no_category='\u200b{0.no_category}:'.format(self)<def_stmt>get_category command * no_category=no_category<block_start>cog=command.cog<line_sep><return>cog.qualified_name<if>cog<is><not><none><else>no_category<block_end>filtered=<await>self.filter_commands(bot.commands sort=<true> key=get_category)<line_sep>chunks=[]<line_sep>curr=[]<if_stmt>bot.description<block_start>parts=self.construct_category(self.description_title bot.description)<line_sep>curr.extend(parts)<block_end><for_stmt>command filtered<block_start>name=self.get_command_name(command)<line_sep>brief=command.brief<or>''<line_sep>lines=self.construct_category(name brief)<if_stmt>len(lines)+len(curr)<g>self.height-2<block_start>chunks.append(curr)<line_sep>curr=[]<block_end>curr.extend(lines)<block_end><if_stmt>curr<block_start>chunks.append(curr)<block_end>chunks_length=len(chunks)<for_stmt>c,chunk enumerate(chunks 1)<block_start>footer_fmt=self.get_bot_footer(c chunks_length)<or>''<line_sep>page_chunks=[self.construct_title(self.get_bot_header(c chunks_length)<or>'') *chunk self.construct_footer(footer_fmt.format(self.command_prefix self.invoked_with ))]<line_sep>self.paginator.add_page('\u200b\n'+'\n'.join(page_chunks))<block_end><await>self.send_page(page)<block_end><async_keyword><def_stmt>send_command_help self command:Command<arrow><none><block_start>result=self.construct_command_help(command)<line_sep>title=self.construct_title(self.get_command_header(command)<or>'')<line_sep>footer=self.construct_footer(self.get_command_footer(command)<or>'')<line_sep>self.paginator.add_page('\u200b\n'+'\n'.join([title *result footer]))<line_sep><await>self.send_pages()<block_end><async_keyword><def_stmt>send_group_help self group:Group<arrow><none><block_start>result=self.construct_command_help(group)<line_sep>filtered=<await>self.filter_commands(group.commands sort=<true>)<for_stmt>c,command enumerate(filtered)<block_start><if_stmt>c<eq>0<block_start>title=self.sub_commands_title<line_sep>result.append('\n'+self.construct_single_line(title))<block_end>name=self.get_sub_command_name(command)<line_sep>brief=command.brief<or>''<line_sep>lines=self.construct_category(name brief extra_indent=self.indent)<line_sep>result.extend(lines)<block_end>title=self.construct_title(self.get_group_header(group))<line_sep>footer=self.construct_footer('')<line_sep>self.paginator.add_page('\u200b\n'+'\n'.join([title *result footer]))<line_sep><await>self.send_pages()<block_end><async_keyword><def_stmt>send_cog_help self cog:Cog page:str<arrow><none><block_start>filtered=<await>self.filter_commands(cog.get_commands() sort=<true>)<line_sep>chunks=[]<line_sep>curr=[]<if_stmt>cog.description<block_start>parts=self.construct_category(self.description_title cog.description)<line_sep>curr.extend(parts)<block_end><for_stmt>c,command enumerate(filtered)<block_start><if_stmt>c<eq>0<block_start>title=self.commands_title<line_sep>pre='\n'<if>curr<else>''<line_sep>curr.append(pre+self.construct_single_line(title))<block_end>name=self.get_command_name(command)<line_sep>brief=command.brief<or>''<line_sep>lines=self.construct_category(name brief extra_indent=self.indent)<if_stmt>len(lines)+len(curr)<g>self.height-2<block_start>chunks.append(curr)<line_sep>curr=[]<block_end>curr.extend(lines)<block_end><if_stmt>curr<block_start>chunks.append(curr)<block_end>chunks_length=len(chunks)<for_stmt>c,chunk enumerate(chunks 1)<block_start>title=self.construct_title(self.get_cog_header(cog c chunks_length)<or>'')<line_sep>fmt=self.get_cog_footer(cog c chunks_length)<or>''<line_sep>footer=self.construct_footer(fmt)<line_sep>page_chunks=[title *chunk footer]<line_sep>self.paginator.add_page('\u200b\n'+'\n'.join(page_chunks))<block_end><await>self.send_page(page)<block_end><block_end>
<import_from_stmt>bs4 BeautifulSoup<import_stmt>requests<line_sep>url="https://github.com/Py-Contributors/awesomeScripts/blob/master/README.md"<line_sep>page=requests.get(url)<line_sep>pagetext=page.text<def_stmt>save_project <block_start>soup=BeautifulSoup(pagetext "lxml")<line_sep>table=soup.find("table")<line_sep>list_of_rows=[]<for_stmt>row table.findAll('tr')<block_start>list_of_cells=[]<for_stmt>cell row.findAll(["th" "td"])<block_start>text=cell.text<line_sep>list_of_cells.append(text)<block_end>list_of_rows.append(list_of_cells)<block_end>file=open("projects.csv" "w")<for_stmt>item list_of_rows<block_start>file.write(",".join(item))<line_sep>file.write("\n")<block_end>file.close()<block_end>
<import_from_future_stmt> absolute_import<import_stmt>torch<line_sep>#overrides <import_from_stmt>allennlp.modules.similarity_functions.dot_product DotProductSimilarity<import_from_stmt>allennlp.modules.similarity_functions.similarity_function SimilarityFunction<import_from_stmt>allennlp.modules.matrix_attention.matrix_attention MatrixAttention<class_stmt>LegacyMatrixAttention(MatrixAttention)<block_start>u""" The legacy implementation of ``MatrixAttention``. It should be considered deprecated as it uses much more memory than the newer specialized ``MatrixAttention`` modules. Parameters ---------- similarity_function: ``SimilarityFunction``, optional (default=``DotProductSimilarity``) The similarity function to use when computing the attention. """<def_stmt>__init__ self similarity_function=<none><block_start>super(LegacyMatrixAttention self).__init__()<line_sep>self._similarity_function=similarity_function<or>DotProductSimilarity()<block_end>#overrides <def_stmt>forward self matrix_1 matrix_2<block_start>tiled_matrix_1=matrix_1.unsqueeze(2).expand(matrix_1.size()[0] matrix_1.size()[1] matrix_2.size()[1] matrix_1.size()[2])<line_sep>tiled_matrix_2=matrix_2.unsqueeze(1).expand(matrix_2.size()[0] matrix_1.size()[1] matrix_2.size()[1] matrix_2.size()[2])<line_sep><return>self._similarity_function(tiled_matrix_1 tiled_matrix_2)<block_end><block_end>LegacyMatrixAttention=MatrixAttention.register(u"legacy")(LegacyMatrixAttention)<line_sep>
# -*- coding: utf-8 -*- # Generated by Django 1.10.1 on 2016-09-08 10:41 <import_from_future_stmt> unicode_literals<import_from_stmt>django.db migrations models<class_stmt>Migration(migrations.Migration)<block_start>dependencies=[('core' '0001_initial') ]<line_sep>operations=[migrations.AlterField(model_name='document' name='applicant_id' field=models.IntegerField(db_index=<true> verbose_name='Applicant ID') ) migrations.AlterField(model_name='document' name='cnpj_cpf' field=models.CharField(db_index=<true> max_length=14 verbose_name='CNPJ or CPF') ) migrations.AlterField(model_name='document' name='congressperson_id' field=models.IntegerField(db_index=<true> verbose_name='Congressperson ID') ) migrations.AlterField(model_name='document' name='congressperson_name' field=models.CharField(max_length=128 verbose_name='Congressperson name') ) migrations.AlterField(model_name='document' name='document_id' field=models.IntegerField(db_index=<true> verbose_name='Document ID') ) migrations.AlterField(model_name='document' name='document_number' field=models.CharField(max_length=128 verbose_name='Document number') ) migrations.AlterField(model_name='document' name='document_type' field=models.IntegerField(db_index=<true> verbose_name='Document type') ) migrations.AlterField(model_name='document' name='document_value' field=models.DecimalField(db_index=<true> decimal_places=3 max_digits=10 verbose_name='Document value') ) migrations.AlterField(model_name='document' name='leg_of_the_trip' field=models.CharField(max_length=128 verbose_name='Leg of the trip') ) migrations.AlterField(model_name='document' name='month' field=models.IntegerField(db_index=<true> verbose_name='Month') ) migrations.AlterField(model_name='document' name='net_value' field=models.DecimalField(db_index=<true> decimal_places=3 max_digits=10 verbose_name='Net value') ) migrations.AlterField(model_name='document' name='party' field=models.CharField(db_index=<true> max_length=16 verbose_name='Party') ) migrations.AlterField(model_name='document' name='passenger' field=models.CharField(max_length=128 verbose_name='Passenger') ) migrations.AlterField(model_name='document' name='reimbursement_number' field=models.IntegerField(db_index=<true> verbose_name='Reimbursement number') ) migrations.AlterField(model_name='document' name='reimbursement_value' field=models.DecimalField(db_index=<true> decimal_places=3 max_digits=10 verbose_name='Reimbusrsement value') ) migrations.AlterField(model_name='document' name='remark_value' field=models.DecimalField(db_index=<true> decimal_places=3 max_digits=10 verbose_name='Remark value') ) migrations.AlterField(model_name='document' name='subquota_description' field=models.CharField(max_length=128 verbose_name='Subquota descrition') ) migrations.AlterField(model_name='document' name='subquota_group_description' field=models.CharField(max_length=128 verbose_name='Subquota group description') ) migrations.AlterField(model_name='document' name='subquota_group_id' field=models.IntegerField(db_index=<true> verbose_name='Subquota group ID') ) migrations.AlterField(model_name='document' name='subquota_number' field=models.IntegerField(db_index=<true> verbose_name='Subquote ID') ) migrations.AlterField(model_name='document' name='term' field=models.IntegerField(db_index=<true> verbose_name='Term') ) migrations.AlterField(model_name='document' name='year' field=models.IntegerField(db_index=<true> verbose_name='Year') ) ]<block_end>
# Copyright (C) 2018-2022 Intel Corporation # SPDX-License-Identifier: Apache-2.0 <import_from_stmt>openvino.tools.mo.utils.graph Node<import_from_stmt>openvino.tools.mo.utils.ir_reader.extender Extender<class_stmt>VariadicSplit_extender(Extender)<block_start>op='VariadicSplit'<line_sep>@staticmethod<def_stmt>extend op:Node<block_start>op['out_ports_count']=len(op.ports)<block_end><block_end>
<class_stmt>Solution<block_start><def_stmt>findComplement self num:int<arrow>int<block_start>res=i=0<while_stmt>num<block_start><if_stmt><not>num&1<block_start>res<augor>1<lshift>i<block_end>num=num<rshift>1<line_sep>i<augadd>1<block_end><return>res<block_end><block_end><class_stmt>Solution<block_start><def_stmt>findComplement self num:int<arrow>int<block_start>i=1<while_stmt>i<le>num<block_start>i=i<lshift>1<block_end><return>(i-1)^num<block_end><block_end><class_stmt>Solution<block_start><def_stmt>findComplement self num:int<arrow>int<block_start>copy=num<line_sep>i=0<while_stmt>copy<ne>0<block_start>copy<augrshift>1<line_sep>num<augxor>(1<lshift>i)<line_sep>i<augadd>1<line_sep><block_end><return>num<line_sep><block_end><block_end><class_stmt>Solution<block_start><def_stmt>findComplement self num:int<arrow>int<block_start>mask=1<while_stmt>(mask<l>num)<block_start>mask=(mask<lshift>1)|1<block_end><return>~num&mask<block_end><block_end><class_stmt>Solution<block_start><def_stmt>findComplement self num:int<arrow>int<block_start>n=0<while_stmt>(n<l>num)<block_start>n=(n<lshift>1)|1<line_sep><block_end><return>n-num<line_sep><block_end><block_end>
<import_from_stmt>abc ABC<import_stmt>copy<import_from_stmt>collections OrderedDict<import_from_stmt>typing Any Union Optional Dict List<import_stmt>torch<import_from_stmt>.agent_plugin register_plugin<class_stmt>BaseAgent(ABC)<block_start>r""" Overview: the base agent class Interfaces: __init__, forward, mode, state_dict, load_state_dict, reset """<def_stmt>__init__ self model:torch.nn.Module plugin_cfg:Union[OrderedDict <none>]<arrow><none><block_start>r""" Overview: init the model and register plugins Arguments: - model (:obj:`torch.nn.Module`): the model of the agent - plugin_cfg (:obj:`Union[OrderedDict, None]`): the plugin config to register """<line_sep>self._model=model<line_sep>self._plugin_cfg=plugin_cfg<line_sep>register_plugin(self plugin_cfg)<block_end><def_stmt>forward self data:Any param:Optional[dict]=<none><arrow>Any<block_start>r""" Overview: forward method will call the foward method of the agent's model Arguments: - data (:obj:`Any`): the input data - param (:obj:`dict` or None): the optinal parameters, default set to None Returns: - output (:obj:`Any`): the output calculated by model """<if_stmt>param<is><not><none><block_start><return>self._model(data **param)<block_end><else_stmt><block_start><return>self._model(data)<block_end><block_end><def_stmt>mode self train:bool<arrow><none><block_start>r""" Overview: call the model's function accordingly Arguments: - train (:obj:`bool`): whether to call the train method or eval method """<if_stmt>train<block_start>self._model.train()<block_end><else_stmt><block_start>self._model.eval()<block_end><block_end>@property<def_stmt>model self<arrow>torch.nn.Module<block_start><return>self._model<block_end>@model.setter<def_stmt>model self _model:torch.nn.Module<arrow><none><block_start>self._model=_model<block_end><def_stmt>state_dict self<arrow>dict<block_start>r""" Overview: return the state_dict Returns: - ret (:obj:`dict`): the returned state_dict, while the ret['model'] is the model's state_dict """<line_sep><return>{'model':self._model.state_dict()}<block_end><def_stmt>load_state_dict self state_dict:dict<arrow><none><block_start>r""" Overview: load the state_dict to model Arguments: - state_dict (:obj:`dict`): the input state_dict the model will load """<line_sep>self._model.load_state_dict(state_dict['model'])<block_end><def_stmt>reset self<arrow><none><block_start><pass><block_end><block_end>model_plugin_cfg_set=set(['main' 'target' 'teacher'])<class_stmt>AgentAggregator(object)<block_start>r""" Overview: the AgentAggregator helps to build an agent according to the given input Interfaces: __init__, __getattr__ """<def_stmt>__init__ self agent_type:type model:Union[torch.nn.Module List[torch.nn.Module]] plugin_cfg:Dict[str OrderedDict]<arrow><none><block_start>r""" Overview: __init__ of the AgentAggregator will get a class with multi agents in ._agent Arguments: - agent_type (:obj:`type`): the based class type of the agents in ._agent - model (:obj:`torch.nn.Module`): the model of agents - plugin_cfg (:obj:`Dict[str, OrderedDict])`): the plugin configs of agents """<assert_stmt>issubclass(agent_type BaseAgent)<assert_stmt>set(plugin_cfg.keys()).issubset(model_plugin_cfg_set) '{}-{}'.format(set(plugin_cfg.keys()) model_plugin_cfg_set)<if_stmt>isinstance(model torch.nn.Module)<block_start><if_stmt>len(plugin_cfg)<eq>1<block_start>model=[model]<block_end><else_stmt><block_start>model=[model]+[copy.deepcopy(model)<for>_ range(len(plugin_cfg)-1)]<block_end><block_end>self._agent={}<for_stmt>i,k enumerate(plugin_cfg)<block_start>self._agent[k]=agent_type(model[i] plugin_cfg[k])<block_end><block_end><def_stmt>__getattr__ self key:str<arrow>Any<block_start>r""" Overview: get the attrbute in key Arguments: - key (:obj:`str`): the key to query Returns: - ret (:obj:`Any`): the return attribute .. note:: in usage, if you want to get the attribute "attr" in agent[k], you should query k + "_" + "attr" """<if_stmt>len(self._agent)<eq>1<block_start><return>getattr(self._agent['main'] key)<block_end><else_stmt><block_start>name='main'<for_stmt>k self._agent<block_start><if_stmt>key.startswith(k)<block_start>name=k<line_sep>key=key.split(k+'_')[1]<line_sep><break><block_end><block_end><return>getattr(self._agent[name] key)<block_end><block_end><block_end>
<import_from_stmt>conans AutoToolsBuildEnvironment ConanFile tools<import_from_stmt>conans.errors ConanInvalidConfiguration<import_from_stmt>contextlib contextmanager<import_stmt>os<import_stmt>textwrap<line_sep>required_conan_version=">=1.33.0"<class_stmt>CrashpadConan(ConanFile)<block_start>name="crashpad"<line_sep>description="Crashpad is a crash-reporting system."<line_sep>url="https://github.com/conan-io/conan-center-index"<line_sep>topics=("conan" "crashpad" "crash" "error" "stacktrace" "collecting" "reporting")<line_sep>license="Apache-2.0"<line_sep>homepage="https://chromium.googlesource.com/crashpad/crashpad/+/master/README.md"<line_sep>provides="crashpad" "mini_chromium"<line_sep>settings="os" "arch" "compiler" "build_type"<line_sep>options={"fPIC":[<true> <false>] "http_transport":["libcurl" "socket" <none>] "with_tls":["openssl" <false>] }<line_sep>default_options={"fPIC":<true> "http_transport":<none> "with_tls":"openssl" }<line_sep>exports_sources="patches/*"<line_sep>@property<def_stmt>_source_subfolder self<block_start><return>"source_subfolder"<block_end><def_stmt>_minimum_compiler_cxx14 self<block_start><return>{"apple-clang":10 "gcc":5 "clang":"3.9" "Visual Studio":14 }.get(str(self.settings.compiler))<block_end><def_stmt>config_options self<block_start><if_stmt>self.settings.os<eq>"Windows"<block_start><del_stmt>self.options.fPIC<block_end><if_stmt>self.settings.os<in>("Linux" "FreeBSD")<block_start>self.options.http_transport="libcurl"<block_end><elif_stmt>self.settings.os<eq>"Android"<block_start>self.options.http_transport="socket"<block_end><block_end><def_stmt>build_requirements self<block_start>self.build_requires("ninja/1.10.2")<line_sep>self.build_requires("gn/cci.20210429")<block_end><def_stmt>requirements self# FIXME: use mini_chromium conan package instead of embedded package (if possible) <block_start>self.requires("zlib/1.2.11")<if_stmt>self.settings.os<in>("Linux" "FreeBSD")<block_start>self.requires("linux-syscall-support/cci.20200813")<block_end><if_stmt>self.options.http_transport<ne>"socket"<block_start><del_stmt>self.options.with_tls<block_end><if_stmt>self.options.http_transport<eq>"libcurl"<block_start>self.requires("libcurl/7.75.0")<block_end><if_stmt>self.options.get_safe("with_tls")<eq>"openssl"<block_start>self.requires("openssl/1.1.1k")<block_end><block_end><def_stmt>validate self<block_start><if_stmt>self.settings.compiler<eq>"Visual Studio"<block_start><if_stmt>self.options.http_transport<in>("libcurl" "socket")<block_start><raise>ConanInvalidConfiguration("http_transport={} is not valid when building with Visual Studio".format(self.options.http_transport))<block_end><block_end><if_stmt>self.options.http_transport<eq>"libcurl"<block_start><if_stmt><not>self.options["libcurl"].shared# FIXME: is this true? <block_start>self.output.warn("crashpad needs a shared libcurl library")<block_end><block_end>min_compiler_version=self._minimum_compiler_cxx14()<if_stmt>min_compiler_version<block_start><if_stmt>tools.Version(self.settings.compiler.version)<l>min_compiler_version<block_start><raise>ConanInvalidConfiguration("crashpad needs a c++14 capable compiler, version >= {}".format(min_compiler_version))<block_end><block_end><else_stmt><block_start>self.output.warn("This recipe does not know about the current compiler and assumes it has sufficient c++14 supports.")<block_end><if_stmt>self.settings.compiler.cppstd<block_start>tools.check_min_cppstd(self 14)<block_end><block_end><def_stmt>source self<block_start>tools.get(**self.conan_data["sources"][self.version]["url"]["crashpad"] destination=self._source_subfolder strip_root=<true>)<line_sep>tools.get(**self.conan_data["sources"][self.version]["url"]["mini_chromium"] destination=os.path.join(self._source_subfolder "third_party" "mini_chromium" "mini_chromium") strip_root=<true>)<block_end>@property<def_stmt>_gn_os self<block_start><if_stmt>tools.is_apple_os(self.settings.os)<block_start><if_stmt>self.settings.os<eq>"Macos"<block_start><return>"mac"<block_end><else_stmt><block_start><return>"ios"<block_end><block_end><return>{"Windows":"win" }.get(str(self.settings.os) str(self.settings.os).lower())<block_end>@property<def_stmt>_gn_arch self<block_start><return>{"x86_64":"x64" "armv8":"aarch64" "x86":"x86" }.get(str(self.settings.arch) str(self.settings.arch))<block_end>@contextmanager<def_stmt>_build_context self<block_start><if_stmt>self.settings.compiler<eq>"Visual Studio"<block_start><with_stmt>tools.vcvars(self.settings)<block_start><yield><block_end><block_end><else_stmt><block_start>env_defaults={}<if_stmt>self.settings.compiler<eq>"gcc"<block_start>env_defaults.update({"CC":"gcc" "CXX":"g++" "LD":"g++" })<block_end><elif_stmt>self.settings.compiler<in>("clang" "apple-clang")<block_start>env_defaults.update({"CC":"clang" "CXX":"clang++" "LD":"clang++" })<block_end>env={}<for_stmt>key,value env_defaults.items()<block_start><if_stmt><not>tools.get_env(key)<block_start>env[key]=value<block_end><block_end><with_stmt>tools.environment_append(env)<block_start><yield><block_end><block_end><block_end>@property<def_stmt>_http_transport_impl self<block_start><if_stmt>str(self.options.http_transport)<eq>"None"<block_start><return>""<block_end><else_stmt><block_start><return>str(self.options.http_transport)<block_end><block_end><def_stmt>build self<block_start><for_stmt>patch self.conan_data.get("patches" {}).get(self.version [])<block_start>tools.patch(**patch)<block_end><if_stmt>self.settings.compiler<eq>"Visual Studio"<block_start>tools.replace_in_file(os.path.join(self._source_subfolder "third_party" "zlib" "BUILD.gn") "libs = [ \"z\" ]" "libs = [ {} ]".format(", ".join("\"{}.lib\"".format(l)<for>l self.deps_cpp_info["zlib"].libs)))<block_end><if_stmt>self.settings.compiler<eq>"gcc"<block_start>toolchain_path=os.path.join(self._source_subfolder "third_party" "mini_chromium" "mini_chromium" "build" "config" "BUILD.gn")<line_sep># Remove gcc-incompatible compiler arguments <for_stmt>comp_arg ("-Wheader-hygiene" "-Wnewline-eof" "-Wstring-conversion" "-Wexit-time-destructors" "-fobjc-call-cxx-cdtors" "-Wextra-semi" "-Wimplicit-fallthrough")<block_start>tools.replace_in_file(toolchain_path "\"{}\"".format(comp_arg) "\"\"")<block_end><block_end>autotools=AutoToolsBuildEnvironment(self)<line_sep>extra_cflags=autotools.flags+["-D{}".format(d)<for>d autotools.defines]<line_sep>extra_cflags_c=[]<line_sep>extra_cflags_cc=autotools.cxx_flags<line_sep>extra_ldflags=autotools.link_flags<if_stmt>self.options.get_safe("fPIC")<block_start>extra_cflags.append("-fPIC")<block_end>extra_cflags.extend("-I {}".format(inc)<for>inc autotools.include_paths)<line_sep>extra_ldflags.extend("-{}{}".format("LIBPATH:"<if>self.settings.compiler<eq>"Visual Studio"<else>"L " libdir)<for>libdir autotools.library_paths)<if_stmt>self.settings.compiler<eq>"clang"<block_start><if_stmt>self.settings.compiler.get_safe("libcxx")<block_start>stdlib={"libstdc++11":"libstdc++" }.get(str(self.settings.compiler.libcxx) str(self.settings.compiler.libcxx))<line_sep>extra_cflags_cc.append("-stdlib={}".format(stdlib))<line_sep>extra_ldflags.append("-stdlib={}".format(stdlib))<block_end><block_end>gn_args=["host_os=\\\"{}\\\"".format(self._gn_os) "host_cpu=\\\"{}\\\"".format(self._gn_arch) "is_debug={}".format(str(self.settings.build_type<eq>"Debug").lower()) "crashpad_http_transport_impl=\\\"{}\\\"".format(self._http_transport_impl) "crashpad_use_boringssl_for_http_transport_socket={}".format(str(self.options.get_safe("with_tls" <false>)<ne><false>).lower()) "extra_cflags=\\\"{}\\\"".format(" ".join(extra_cflags)) "extra_cflags_c=\\\"{}\\\"".format(" ".join(extra_cflags_c)) "extra_cflags_cc=\\\"{}\\\"".format(" ".join(extra_cflags_cc)) "extra_ldflags=\\\"{}\\\"".format(" ".join(extra_ldflags)) ]<with_stmt>tools.chdir(self._source_subfolder)<block_start><with_stmt>self._build_context()<block_start>self.run("gn gen out/Default --args=\"{}\"".format(" ".join(gn_args)) run_environment=<true>)<line_sep>targets=["client" "minidump" "crashpad_handler" "snapshot"]<if_stmt>self.settings.os<eq>"Windows"<block_start>targets.append("crashpad_handler_com")<block_end>self.run("ninja -C out/Default {targets} -j{parallel}".format(targets=" ".join(targets) parallel=tools.cpu_count()) run_environment=<true>)<block_end><block_end><def_stmt>lib_filename name<block_start>prefix,suffix=("" ".lib")<if>self.settings.compiler<eq>"Visual Studio"<else>("lib" ".a")<line_sep><return>"{}{}{}".format(prefix name suffix)<block_end>tools.rename(os.path.join(self._source_subfolder "out" "Default" "obj" "client" lib_filename("common")) os.path.join(self._source_subfolder "out" "Default" "obj" "client" lib_filename("client_common")))<line_sep>tools.rename(os.path.join(self._source_subfolder "out" "Default" "obj" "handler" lib_filename("common")) os.path.join(self._source_subfolder "out" "Default" "obj" "handler" lib_filename("handler_common")))<block_end><def_stmt>package self<block_start>self.copy("LICENSE" src=self._source_subfolder dst="licenses")<line_sep>self.copy("*.h" src=os.path.join(self._source_subfolder "client") dst=os.path.join("include" "client"))<line_sep>self.copy("*.h" src=os.path.join(self._source_subfolder "util") dst=os.path.join("include" "util"))<line_sep>self.copy("*.h" src=os.path.join(self._source_subfolder "third_party" "mini_chromium" "mini_chromium" "base") dst=os.path.join("include" "base"))<line_sep>self.copy("*.h" src=os.path.join(self._source_subfolder "third_party" "mini_chromium" "mini_chromium" "build") dst=os.path.join("include" "build"))<line_sep>self.copy("*.h" src=os.path.join(self._source_subfolder "out" "Default" "gen" "build") dst=os.path.join("include" "build"))<line_sep>self.copy("*.a" src=os.path.join(self._source_subfolder "out" "Default") dst="lib" keep_path=<false>)<line_sep>self.copy("*.lib" src=os.path.join(self._source_subfolder "out" "Default") dst="lib" keep_path=<false>)<line_sep>self.copy("crashpad_handler" src=os.path.join(self._source_subfolder "out" "Default") dst="bin" keep_path=<false>)<line_sep>self.copy("crashpad_handler.exe" src=os.path.join(self._source_subfolder "out" "Default") dst="bin" keep_path=<false>)<line_sep>self.copy("crashpad_handler_com.com" src=os.path.join(self._source_subfolder "out" "Default") dst="bin" keep_path=<false>)<if_stmt>self.settings.os<eq>"Windows"<block_start>tools.rename(os.path.join(self.package_folder "bin" "crashpad_handler_com.com") os.path.join(self.package_folder "bin" "crashpad_handler.com"))<block_end># Remove accidentally copied libraries. These are used by the executables, not by the libraries. tools.remove_files_by_mask(os.path.join(self.package_folder "lib") "*getopt*")<line_sep>tools.save(os.path.join(self.package_folder "lib" "cmake" "crashpad-cxx.cmake") textwrap.dedent("""\ if(TARGET crashpad::mini_chromium_base) target_compile_features(crashpad::mini_chromium_base INTERFACE cxx_std_14) endif() """))<block_end><def_stmt>package_info self<block_start>self.cpp_info.components["mini_chromium_base"].libs=["base"]<line_sep>self.cpp_info.components["mini_chromium_base"].build_modules=[os.path.join(self.package_folder "lib" "cmake" "crashpad-cxx.cmake")]<line_sep>self.cpp_info.components["mini_chromium_base"].builddirs=[os.path.join("lib" "cmake")]<if_stmt>tools.is_apple_os(self.settings.os)<block_start><if_stmt>self.settings.os<eq>"Macos"<block_start>self.cpp_info.components["mini_chromium_base"].frameworks=["ApplicationServices" "CoreFoundation" "Foundation" "IOKit" "Security"]<block_end><else_stmt># iOS <block_start>self.cpp_info.components["mini_chromium_base"].frameworks=["CoreFoundation" "CoreGraphics" "CoreText" "Foundation" "Security"]<block_end><block_end>self.cpp_info.components["util"].libs=["util"]<line_sep>self.cpp_info.components["util"].requires=["mini_chromium_base" "zlib::zlib"]<if_stmt>tools.is_apple_os(self.settings.os)<block_start>self.cpp_info.components["util"].libs.append("mig_output")<block_end><if_stmt>self.settings.os<in>("Linux" "FreeBSD")<block_start>self.cpp_info.components["util"].libs.append("compat")<line_sep>self.cpp_info.components["util"].requires.append("linux-syscall-support::linux-syscall-support")<block_end><if_stmt>self.settings.os<eq>"Windows"<block_start>self.cpp_info.components["util"].system_libs.extend(["dbghelp" "rpcrt4"])<block_end><if_stmt>self.options.http_transport<eq>"libcurl"<block_start>self.cpp_info.components["util"].requires.append("libcurl::libcurl")<block_end><elif_stmt>self.options.get_safe("with_tls")<eq>"openssl"<block_start>self.cpp_info.components["util"].requires.append("openssl::openssl")<block_end><if_stmt>self.settings.os<eq>"Macos"<block_start>self.cpp_info.components["util"].frameworks.extend(["CoreFoundation" "Foundation" "IOKit"])<line_sep>self.cpp_info.components["util"].system_libs.append("bsm")<block_end>self.cpp_info.components["client_common"].libs=["client_common"]<line_sep>self.cpp_info.components["client_common"].requires=["util" "mini_chromium_base"]<line_sep>self.cpp_info.components["client"].libs=["client"]<line_sep>self.cpp_info.components["client"].requires=["util" "mini_chromium_base" "client_common"]<if_stmt>self.settings.os<eq>"Windows"<block_start>self.cpp_info.components["client"].system_libs.append("rpcrt4")<block_end>self.cpp_info.components["context"].libs=["context"]<line_sep>self.cpp_info.components["context"].requires=["util"]<line_sep>self.cpp_info.components["snapshot"].libs=["snapshot"]<line_sep>self.cpp_info.components["snapshot"].requires=["client_common" "mini_chromium_base" "util"]<if_stmt>tools.is_apple_os(self.settings.os)<block_start>self.cpp_info.components["snapshot"].frameworks.extend(["OpenCL"])<block_end>self.cpp_info.components["format"].libs=["format"]<line_sep>self.cpp_info.components["format"].requires=["snapshot" "mini_chromium_base" "util"]<line_sep>self.cpp_info.components["minidump"].libs=["minidump"]<line_sep>self.cpp_info.components["minidump"].requires=["snapshot" "mini_chromium_base" "util"]<line_sep>self.cpp_info.components["handler_common"].libs=["handler_common"]<line_sep>self.cpp_info.components["handler_common"].requires=["client_common" "snapshot" "util"]<line_sep>self.cpp_info.components["handler"].libs=["handler"]<line_sep>self.cpp_info.components["handler"].requires=["client" "util" "handler_common" "minidump" "snapshot"]<line_sep>bin_path=os.path.join(self.package_folder "bin")<line_sep>self.output.info("Appending PATH environment variable: {}".format(bin_path))<line_sep>self.env_info.PATH.append(bin_path)<block_end><block_end>
<import_stmt>music21<import_stmt>torch<import_stmt>numpy<as>np<try_stmt><block_start><import_from_stmt>apex.optimizers FusedAdam<block_end><except_stmt><block_start><import_from_stmt>torch.optim Adam<as>FusedAdam<block_end><import_from_stmt>fastai.distributed *<import_from_stmt>fastai.callbacks SaveModelCallback<import_from_stmt>fastai.text.models.transformer *<import_stmt>sys<line_sep>sys.path.insert(0 '..')<import_from_stmt>musicautobot.music_transformer *<import_stmt>argparse<line_sep>parser=argparse.ArgumentParser()<line_sep>parser.add_argument('--path' type=str default='../data/numpy/')<line_sep>parser.add_argument('--data_file' type=str default='musicitem_data_save.pkl')<line_sep>parser.add_argument('--save' type=str default='first_run')<line_sep>parser.add_argument('--load' type=str default=<none>)<line_sep>parser.add_argument("--local_rank" type=int default=0)<line_sep>parser.add_argument("--batch_size" type=int default=12)<line_sep>parser.add_argument("--mem_len" type=int default=512)<line_sep>parser.add_argument("--bptt" type=int default=512)<line_sep>parser.add_argument("--num_workers" type=int default=1)<line_sep>parser.add_argument('--half' action='store_true' help='Use half precision')<line_sep>parser.add_argument('--lamb' action='store_true' help='Use lamb optimizer')<line_sep>parser.add_argument('--wd' type=float default=1e-3 help='weight decay for adam')<line_sep>parser.add_argument('--epochs' type=int default=5 help='num epochs')<line_sep>parser.add_argument('--lr' type=float default=1e-3 help='learning rate')<line_sep>parser.add_argument('--div_factor' type=int default=10 help='learning rate div factor')<line_sep>parser.add_argument('--config' type=str default='default_config' help='serve.py config name')<line_sep>parser.add_argument('--no_transpose' action='store_true' help='No transpose data augmentation')<line_sep>parser.add_argument('--parallel' action='store_true' help='Run in dataparallel')<line_sep>parser.add_argument('--mask_steps' type=int default=1 help='Attention mask - max number of random steps. Basically teacher forcing')<line_sep>args=parser.parse_args()<line_sep>is_distributed=num_distrib()<g>0<if_stmt>args.local_rank<ne>0<block_start>f=open('/dev/null' 'w')<line_sep>sys.stdout=f<block_end><if_stmt>is_distributed<block_start>torch.cuda.set_device(args.local_rank)<line_sep>torch.distributed.init_process_group(backend='nccl' init_method='env://')<block_end>path=Path(args.path)<import_from_stmt>musicautobot config<line_sep>config=getattr(config args.config)()<line_sep>config['encode_position']=<true><line_sep>config['mask_steps']=args.mask_steps<line_sep>transpose_range=<none><if>args.no_transpose<else>(0 12)<line_sep>data=load_data(path args.data_file encode_position=config['encode_position'] dl_tfms=[batch_position_tfm] bs=args.batch_size bptt=args.bptt transpose_range=transpose_range num_workers=args.num_workers)<line_sep>eps=1e-2<if>args.half<else>1e-6<line_sep>opt_func=partial(FusedAdam betas=(0.9 0.99) eps=eps)<if_stmt>args.lamb<block_start><import_from_stmt>musicautobot.utils.lamb Lamb<line_sep>opt_func=partial(Lamb eps=eps)<block_end>load_path=path/args.load<if>args.load<else><none><line_sep>learn=music_model_learner(data config=config drop_mult=1.5 opt_func=opt_func pretrained_path=load_path)<if_stmt><not>args.half<block_start>learn.clip_grad(1.0)<block_end><if_stmt>args.save<block_start>save_path=path/learn.model_dir/args.save<line_sep>save_path.parent.mkdir(parents=<true> exist_ok=<true>)<block_end><if_stmt>args.half<block_start>learn=learn.to_fp16(clip=1.0 dynamic=<true> max_scale=2<power>18)<block_end><if_stmt>is_distributed<block_start>learn=learn.to_distributed(args.local_rank cache_dir=path/'dist_logs')<block_end><if_stmt>args.parallel<block_start>learn=learn.to_parallel()<block_end><if_stmt>args.local_rank<eq>0<block_start>learn.callbacks.append(SaveModelCallback(learn name=f'{args.save}_best'))<block_end>learn.fit_one_cycle(args.epochs args.lr div_factor=args.div_factor pct_start=0.2 final_div=200 wd=args.wd)<if_stmt>args.local_rank<eq>0<block_start>learn.save(f'{args.save}' config=config)<block_end>
"""Helpers for bleparser"""<import_from_stmt>uuid UUID<def_stmt>to_uuid uuid:str<arrow>str<block_start>"""Return formatted UUID"""<line_sep><return>str(UUID(''.join(f'{i:02X}'<for>i uuid)))<block_end><def_stmt>to_mac addr:str<arrow>str<block_start>"""Return formatted MAC address"""<line_sep><return>':'.join(f'{i:02X}'<for>i addr)<block_end><def_stmt>to_unformatted_mac addr:int<block_start>"""Return unformatted MAC address"""<line_sep><return>''.join(f'{i:02X}'<for>i addr[:])<block_end>
<import_from_stmt>torchnlp._third_party.weighted_random_sampler WeightedRandomSampler<import_from_stmt>torchnlp.utils identity<class_stmt>BalancedSampler(WeightedRandomSampler)<block_start>""" Weighted sampler with respect for an element's class. Args: data (iterable) get_class (callable, optional): Get the class of an item relative to the entire dataset. get_weight (callable, optional): Define a weight for each item other than one. kwargs: Additional key word arguments passed onto `WeightedRandomSampler`. Example: >>> from torchnlp.samplers import DeterministicSampler >>> >>> data = ['a', 'b', 'c'] + ['c'] * 100 >>> sampler = BalancedSampler(data, num_samples=3) >>> sampler = DeterministicSampler(sampler, random_seed=12) >>> [data[i] for i in sampler] ['c', 'b', 'a'] """<def_stmt>__init__ self data_source get_class=identity get_weight=<lambda>x:1 **kwargs<block_start>classified=[get_class(item)<for>item data_source]<line_sep>weighted=[float(get_weight(item))<for>item data_source]<line_sep>class_totals={k:sum([w<for>c,w zip(classified weighted)<if>k<eq>c])<for>k set(classified)}<line_sep>weights=[w/class_totals[c]<if>w<g>0<else>0.0<for>c,w zip(classified weighted)]<line_sep>super().__init__(weights=weights **kwargs)<block_end><block_end>
<import_stmt>os<import_stmt>librosa<import_stmt>subprocess<import_stmt>tempfile<import_stmt>io<import_stmt>pysrt<import_from_stmt>pysrt SubRipTime<import_stmt>string<import_stmt>random<import_stmt>chardet<import_stmt>re<import_from_stmt>datetime timedelta<import_stmt>numpy<as>np<import_stmt>sklearn<import_from_stmt>.ffmpeg Transcode<import_from_stmt>.log logger<class_stmt>Media<block_start>""" Media class represents a media file on disk for which the content can be analyzed and retrieved. """<line_sep># List of supported media formats FORMATS=['.mkv' '.mp4' '.wmv' '.avi' '.flv']<line_sep># The frequency of the generated audio FREQ=16000<line_sep># The number of coefficients to extract from the mfcc N_MFCC=13<line_sep># The number of samples in each mfcc coefficient HOP_LEN=512.0<line_sep># The length (seconds) of each item in the mfcc analysis LEN_MFCC=HOP_LEN/FREQ<def_stmt>__init__ self filepath subtitles=<none><block_start>prefix,ext=os.path.splitext(filepath)<if_stmt>ext<eq>'.srt'<block_start><return>self.from_srt(filepath)<block_end><if_stmt><not>ext<block_start><raise>ValueError('unknown file: "{}"'.format(filepath))<block_end><if_stmt>ext<not><in>Media.FORMATS<block_start><raise>ValueError('filetype {} not supported: "{}"'.format(ext filepath))<block_end>self.__subtitles=subtitles<line_sep>self.filepath=os.path.abspath(filepath)<line_sep>self.filename=os.path.basename(prefix)<line_sep>self.extension=ext<line_sep>self.offset=timedelta()<block_end><def_stmt>from_srt self filepath<block_start>prefix,ext=os.path.splitext(filepath)<if_stmt>ext<ne>'.srt'<block_start><raise>ValueError('filetype must be .srt format')<block_end>prefix=os.path.basename(re.sub(r'\.\w\w$' '' prefix))<line_sep>dir=os.path.dirname(filepath)<for_stmt>f os.listdir(dir)<block_start>_,ext=os.path.splitext(f)<if_stmt>f.startswith(prefix)<and>ext<in>Media.FORMATS<block_start><return>self.__init__(os.path.join(dir f) subtitles=[filepath])<block_end><block_end><raise>ValueError('no media for subtitle: "{}"'.format(filepath))<block_end><def_stmt>subtitles self<block_start><if_stmt>self.__subtitles<is><not><none><block_start><for_stmt>s self.__subtitles<block_start><yield>Subtitle(self s)<block_end><block_end><else_stmt><block_start>dir=os.path.dirname(self.filepath)<for_stmt>f os.listdir(dir)<block_start><if_stmt>f.endswith('.srt')<and>f.startswith(self.filename)<block_start><yield>Subtitle(self os.path.join(dir f))<block_end><block_end><block_end><block_end><def_stmt>mfcc self duration=60<times>15 seek=<true><block_start>transcode=Transcode(self.filepath duration=duration seek=seek)<line_sep>self.offset=transcode.start<line_sep>print("Transcoding...")<line_sep>transcode.run()<line_sep>y,sr=librosa.load(transcode.output sr=Media.FREQ)<line_sep>print("Analysing...")<line_sep>self.mfcc=librosa.feature.mfcc(y=y sr=sr hop_length=int(Media.HOP_LEN) n_mfcc=int(Media.N_MFCC))<line_sep>os.remove(transcode.output)<line_sep><return>self.mfcc<block_end><block_end><class_stmt>Subtitle<block_start>""" Subtitle class represnets an .srt file on disk and provides functionality to inspect and manipulate the subtitle content """<def_stmt>__init__ self media path<block_start>self.media=media<line_sep>self.path=path<line_sep>self.subs=pysrt.open(self.path encoding=self._find_encoding())<block_end><def_stmt>labels self subs=<none><block_start><if_stmt>self.media.mfcc<is><none><block_start><raise>RuntimeError("Must analyse mfcc before generating labels")<block_end>samples=len(self.media.mfcc[0])<line_sep>labels=np.zeros(samples)<for_stmt>sub self.subs<if>subs<is><none><else>subs<block_start>start=timeToPos(sub.start-self.offset())<line_sep>end=timeToPos(sub.end-self.offset())+1<for_stmt>i range(start end)<block_start><if_stmt>i<ge>0<and>i<l>len(labels)<block_start>labels[i]=1<block_end><block_end><block_end><return>labels<block_end><def_stmt>_find_encoding self<block_start>data=<none><with_stmt>open(self.path "rb")<as>f<block_start>data=f.read()<block_end>det=chardet.detect(data)<line_sep><return>det.get("encoding")<block_end><def_stmt>offset self<block_start>d=self.media.offset<line_sep>hours,remainder=divmod(d.seconds 3600)<line_sep>minutes,seconds=divmod(remainder 60)<line_sep><return>SubRipTime(hours=hours minutes=minutes seconds=seconds milliseconds=d.microseconds/1000)<block_end><def_stmt>logloss self pred actual margin=12<block_start>blocks=secondsToBlocks(margin)<line_sep>logloss=np.ones(blocks<times>2)<line_sep>indices=np.ones(blocks<times>2)<line_sep>nonzero=np.nonzero(actual)[0]<line_sep>begin=max(nonzero[0]-blocks 0)<line_sep>end=min(nonzero[-1]+blocks len(actual)-1)<line_sep>pred=pred[begin:end]<line_sep>actual=actual[begin:end]<for_stmt>i,offset enumerate(range(-blocks blocks))<block_start>snippet=np.roll(actual offset)<try_stmt><block_start>logloss[i]=sklearn.metrics.log_loss(snippet[blocks:-blocks] pred[blocks:-blocks])<block_end><except_stmt>(ValueError RuntimeWarning)<block_start><pass><block_end>indices[i]=offset<block_end><return>indices logloss<block_end><def_stmt>sync self net safe=<true> margin=12 plot=<true><block_start>secs=0.0<line_sep>labels=self.labels()<line_sep>mfcc=self.media.mfcc.T<line_sep>mfcc=mfcc[<ellipsis> np.newaxis]<line_sep>pred=net.predict(mfcc)<line_sep>x,y=self.logloss(pred labels margin=margin)<line_sep>accept=<true><if_stmt>safe<block_start>mean=np.mean(y)<line_sep>sd=np.std(y)<line_sep>accept=np.min(y)<l>mean-sd<block_end><if_stmt>accept<block_start>secs=blocksToSeconds(x[np.argmin(y)])<line_sep>print("Shift {} seconds:".format(secs))<line_sep>self.subs.shift(seconds=secs)<line_sep>self.subs.save(self.path encoding='utf-8')<if_stmt>secs<ne>0.0<block_start>logger.info('{}: {}s'.format(self.path secs))<block_end><block_end><if_stmt>plot<block_start>self.plot_logloss(x y)<block_end><return>secs<block_end><def_stmt>sync_all self net margin=16 plot=<true><block_start>secs=0.0<line_sep>mfcc=self.media.mfcc.T<line_sep>mfcc=mfcc[<ellipsis> np.newaxis]<line_sep>pred=net.predict(mfcc)<line_sep>print("Fitting...")<line_sep>self.__sync_all_rec(self.subs pred)<line_sep>self.clean()<line_sep>self.subs.save(self.path encoding='utf-8')<block_end><def_stmt>__sync_all_rec self subs pred margin=16<block_start><if_stmt>len(subs)<l>3<block_start><return><block_end>labels=self.labels(subs=subs)<if_stmt>np.unique(labels).size<le>1<block_start><return><block_end>x,y=self.logloss(pred labels margin=max(margin 0.25))<line_sep>#self.plot_logloss(x,y) #self.plot_labels(labels, pred) secs=blocksToSeconds(x[np.argmin(y)])<line_sep>subs.shift(seconds=secs)<line_sep># call recursively middle=subs[len(subs)<floordiv>2]<line_sep>left=subs.slice(ends_before=middle.start)<line_sep>right=subs.slice(starts_after=middle.start)<line_sep>self.__sync_all_rec(left pred margin=margin/2)<line_sep>self.__sync_all_rec(right pred margin=margin/2)<block_end><def_stmt>clean self<block_start><for_stmt>i,s enumerate(self.subs)<block_start><if_stmt>i<ge>len(self.subs)-1<block_start><return><block_end>next=self.subs[i+1]<if_stmt>s.end<g>next.start<block_start>s.end=next.start<block_end><block_end><block_end><def_stmt>plot_logloss self x y<block_start><import_stmt>matplotlib.pyplot<as>plt<line_sep>plt.figure()<line_sep>plt.plot(x y)<line_sep>plt.title('logloss over shifts')<line_sep>plt.ylabel('logloss')<line_sep>plt.xlabel('shifts')<line_sep>plt.legend(['logloss'] loc='upper left')<line_sep>plt.show()<block_end><def_stmt>plot_labels self labels pred<block_start><import_stmt>matplotlib.pyplot<as>plt<line_sep>plt.figure()<line_sep>plt.plot([i<for>i range(0 len(labels))] labels label='labels')<line_sep>plt.title('labels vs predictions')<line_sep>plt.ylabel('value')<line_sep>plt.xlabel('time')<line_sep>plt.legend(['labels'] loc='upper left')<line_sep>plt.figure()<line_sep>plt.plot([i<for>i range(0 len(pred))] pred label='pred')<line_sep>plt.title('labels vs predictions')<line_sep>plt.ylabel('value')<line_sep>plt.xlabel('time')<line_sep>plt.legend(['pred'] loc='upper left')<line_sep>plt.show()<block_end><block_end># Convert timestamp to seconds <def_stmt>timeToSec t<block_start>total_sec=float(t.milliseconds)/1000<line_sep>total_sec<augadd>t.seconds<line_sep>total_sec<augadd>t.minutes<times>60<line_sep>total_sec<augadd>t.hours<times>60<times>60<line_sep><return>total_sec<block_end># Return timestamp from cell position <def_stmt>timeToPos t freq=Media.FREQ hop_len=Media.HOP_LEN<block_start><return>round(timeToSec(t)/(hop_len/freq))<block_end><def_stmt>secondsToBlocks s hop_len=Media.HOP_LEN freq=Media.FREQ<block_start><return>int(float(s)/(hop_len/freq))<block_end><def_stmt>blocksToSeconds h freq=Media.FREQ hop_len=Media.HOP_LEN<block_start><return>float(h)<times>(hop_len/freq)<block_end>
<import_stmt>sys<line_sep>sys.path.append("D:\\repositories/openml-python")<import_stmt>openml<if_stmt>__name__<eq>'__main__'<block_start>suite=openml.study.get_suite(218)<line_sep>tag='study_218'<for_stmt>taskid suite.tasks<block_start>print('collecting t/' taskid)<line_sep>task=openml.tasks.get_task(taskid download_data=<false>)<line_sep>#task.push_tag(tag) print('collecting d/' task.dataset_id)<line_sep>dataset=openml.datasets.get_dataset(task.dataset_id download_data=<false>)<line_sep>print('tagging')<line_sep>#dataset.push_tag(tag) <block_end><block_end>
<import_from_stmt>fireo.fields errors<import_from_stmt>fireo.fields.base_field Field<import_stmt>re<class_stmt>TextField(Field)<block_start>"""Text field for Models Define text for models allowed_attributes = ['max_length', 'to_lowercase'] Examples -------- class User(Model): age = TextField() """<line_sep>allowed_attributes=['max_length' 'to_lowercase' 'format']<def_stmt>__init__ self *args **kwargs<block_start>super().__init__(*args **kwargs)<line_sep>self.format_type=<none><line_sep>self.supported_types=['title' 'upper' 'lower' 'capitalize']<block_end><def_stmt>attr_format self attr_val field_val<block_start>self.format_type=attr_val<line_sep><return>field_val<block_end><def_stmt>attr_max_length self attr_val field_val<block_start>"""Method for attribute max_length"""<line_sep><return>field_val[:attr_val]<block_end><def_stmt>attr_to_lowercase self attr_val field_val<block_start>"""Method for attribute to_lowercase Convert text into lowercase """<if_stmt>attr_val<block_start><return>field_val.lower()<if>field_val<is><not><none><else><none><block_end><return>field_val<block_end><def_stmt>_titlecase self s<block_start><return>re.sub(r"[A-Za-z]+('[A-Za-z]+)?" <lambda>mo:mo.group(0)[0].upper()+mo.group(0)[1:].lower() s)<block_end># override method <def_stmt>db_value self val<block_start><if_stmt>type(val)<is>str<or>val<is><none># check if user defined to set the value as lower case <block_start><if_stmt>self.model_cls._meta.to_lowercase<block_start><return>val.lower()<if>val<is><not><none><else><none><block_end><return>val<block_end><raise>errors.InvalidFieldType(f'Invalid field type. Field "{self.name}" expected {str}, '<concat>f'got {type(val)}')<block_end># override method <def_stmt>field_value self val# check if val is None then there is no need to run these functions # just return back the None value <block_start><if_stmt>val<is><none><block_start><return>val<block_end>self.field_attribute.parse(val run_only=['format'])<if_stmt>self.format_type<block_start><if_stmt>self.format_type<in>self.supported_types<block_start><if_stmt>self.format_type<eq>'title'<block_start><return>self._titlecase(val)<block_end><if_stmt>self.format_type<eq>'upper'<block_start><return>val.upper()<block_end><if_stmt>self.format_type<eq>'lower'<block_start><return>val.lower()<block_end><if_stmt>self.format_type<eq>'capitalize'<block_start><return>val.capitalize()<block_end><block_end><raise>errors.AttributeTypeError(f'Invalid attribute type. Inside Field "{self.name}", '<concat>f'"format" type must be one of them "{self.supported_types}".')<block_end><return>val<block_end><block_end>
# -*- coding:utf-8 -*- # ! ./usr/bin/env python # __author__ = 'zzp' <import_stmt>shutil<import_stmt>argparse<import_stmt>numpy<as>np<line_sep>parser=argparse.ArgumentParser(description='Analysis siamfc tune results')<line_sep>parser.add_argument('--path' default='logs/gene_adjust_rpn.log' help='tune result path')<line_sep>parser.add_argument('--dataset' default='VOT2018' help='test dataset')<line_sep>parser.add_argument('--save_path' default='logs' help='log file save path')<def_stmt>collect_results args<block_start><if_stmt><not>args.path.endswith('txt')<block_start>name=args.path.split('.')[0]<line_sep>name=name+'.txt'<line_sep>shutil.copy(args.path name)<line_sep>args.path=name<block_end>fin=open(args.path 'r')<line_sep>lines=fin.readlines()<line_sep>penalty_k=[]<line_sep>scale_lr=[]<line_sep>wi=[]<line_sep>sz=[]<line_sep>bz=[]<line_sep>eao=[]<line_sep>count=0# total numbers <for_stmt>line lines<block_start><if_stmt><not>line.startswith('penalty_k')<block_start><pass><block_end><else_stmt># print(line) <block_start>count<augadd>1<line_sep>temp0,temp1,temp2,temp3,temp4,temp5=line.split(',')<line_sep>penalty_k.append(float(temp0.split(': ')[-1]))<line_sep>scale_lr.append(float(temp1.split(': ')[-1]))<line_sep>wi.append(float(temp2.split(': ')[-1]))<line_sep>sz.append(float(temp3.split(': ')[-1]))<line_sep>bz.append(float(temp4.split(': ')[-1]))<line_sep>eao.append(float(temp5.split(': ')[-1]))<block_end><block_end># find max eao=np.array(eao)<line_sep>max_idx=np.argmax(eao)<line_sep>max_eao=eao[max_idx]<line_sep>print('{} params group have been tested'.format(count))<line_sep>print('penalty_k: {:.4f}, scale_lr: {:.4f}, wi: {:.4f}, small_sz: {}, big_sz: {}, auc: {}'.format(penalty_k[max_idx] scale_lr[max_idx] wi[max_idx] sz[max_idx] bz[max_idx] max_eao))<block_end><if_stmt>__name__<eq>'__main__'<block_start>args=parser.parse_args()<line_sep>collect_results(args)<block_end>
# Copyright 2020 Nokia Software. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or # implied. # See the License for the specific language governing permissions and # limitations under the License. """create new tables for the dynamic actions and code sources Revision ID: 001 Revises: None Create Date: 2020-09-30 12:02:51.935368 """<line_sep># revision identifiers, used by Alembic. revision='040'<line_sep>down_revision='039'<import_from_stmt>alembic op<import_from_stmt>mistral.db.sqlalchemy types<as>st<import_stmt>sqlalchemy<as>sa<def_stmt>upgrade <block_start>op.create_table('code_sources' sa.Column('id' sa.String(length=36) nullable=<false>) sa.Column('name' sa.String(length=255) nullable=<false>) sa.Column('project_id' sa.String(length=80) nullable=<true>) sa.Column('namespace' sa.String(length=255) nullable=<true>) sa.Column('content' sa.TEXT nullable=<false>) sa.Column('version' sa.Integer nullable=<false>) sa.Column('tags' st.JsonEncoded() nullable=<true>) sa.Column('scope' sa.String(length=80) nullable=<true>) sa.Column('created_at' sa.DateTime() nullable=<true>) sa.Column('updated_at' sa.DateTime() nullable=<true>) sa.PrimaryKeyConstraint('id') sa.UniqueConstraint('name' 'namespace' 'project_id') sa.Index('code_sources_project_id' 'project_id') sa.Index('code_sources_scope' 'scope'))<line_sep>op.create_table('dynamic_action_definitions' sa.Column('id' sa.String(length=36) nullable=<false>) sa.Column('name' sa.String(length=255) nullable=<false>) sa.Column('class_name' sa.String(length=255) nullable=<false>) sa.Column('scope' sa.String(length=80) nullable=<true>) sa.Column('project_id' sa.String(length=80) nullable=<true>) sa.Column('code_source_id' sa.String(length=36) nullable=<false>) sa.Column('code_source_name' sa.String(length=255) nullable=<false>) sa.Column('namespace' sa.String(length=255) nullable=<true>) sa.Column('created_at' sa.DateTime() nullable=<true>) sa.Column('updated_at' sa.DateTime() nullable=<true>) sa.PrimaryKeyConstraint('id') sa.ForeignKeyConstraint(['code_source_id'] ['code_sources.id'] ondelete='CASCADE') sa.UniqueConstraint('name' 'namespace' 'project_id') sa.Index('dynamic_action_definitions_project_id' 'project_id') sa.Index('dynamic_action_definitions_scope' 'scope') )<block_end>
<import_stmt>sys<import_from_stmt>pyspark SparkConf<import_from_stmt>collections namedtuple<import_from_stmt>pyspark.sql SparkSession<import_from_stmt>lib.logger Log4j<line_sep>SurveyRecord=namedtuple("SurveyRecord" ["Age" "Gender" "Country" "State"])<if_stmt>__name__<eq>"__main__"<block_start>conf=SparkConf().setMaster("local[3]").setAppName("HelloRDD")<line_sep># sc = SparkContext(conf=conf) spark=SparkSession.builder.config(conf=conf).getOrCreate()<line_sep>sc=spark.sparkContext<line_sep>logger=Log4j(spark)<if_stmt>len(sys.argv)<ne>2<block_start>logger.error("Usage: HelloSpark <filename>")<line_sep>sys.exit(-1)<block_end>linesRDD=sc.textFile(sys.argv[1])<line_sep>partitionedRDD=linesRDD.repartition(2)<line_sep>colsRDD=partitionedRDD.map(<lambda>line:line.replace('"' '').split(","))<line_sep>selectRDD=colsRDD.map(<lambda>cols:SurveyRecord(int(cols[1]) cols[2] cols[3] cols[4]))<line_sep>filteredRDD=selectRDD.filter(<lambda>r:r.Age<l>40)<line_sep>kvRDD=filteredRDD.map(<lambda>r:(r.Country 1))<line_sep>countRDD=kvRDD.reduceByKey(<lambda>v1 v2:v1+v2)<line_sep>colsList=countRDD.collect()<for_stmt>x colsList<block_start>logger.info(x)<block_end><block_end>
# # Copyright (c) 2021, NVIDIA CORPORATION. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. # <import_stmt>io<import_from_stmt>threading Thread<import_stmt>numpy<as>np<import_from_stmt>pyarrow parquet<as>pq<try_stmt><block_start><import_stmt>cudf<import_from_stmt>cudf.core.column as_column build_categorical_column<block_end><except_stmt>ImportError<block_start>cudf=<none><block_end># # Parquet-Specific Utilities # <def_stmt>_optimized_read_partition_remote fs pieces columns index categories=() partitions=() **kwargs# This is a specialized version of `CudfEngine.read_partition` # for remote filesystems. This implementation is intended to # replace the upstream `read_partition` classmethod until # remote-filesystem handling is optimized in cudf/dask-cudf <block_start><if_stmt>columns<is><not><none><block_start>columns=list(columns)<block_end><if_stmt>isinstance(index list)<block_start>columns<augadd>index<block_end># Check that this is a single-piece read on a non-local filesystem <if_stmt><not>isinstance(pieces list)<block_start>pieces=[pieces]<block_end><if_stmt>len(pieces)<g>1<block_start><raise>ValueError("The `_custom_read_partition` code path is not designed to "<concat>"handle a multi-element `pieces` argument.")<block_end><if_stmt>cudf.utils.ioutils._is_local_filesystem(fs)<block_start><raise>ValueError("The `_custom_read_partition` code path is not intended "<concat>"for use on local filesystems.")<block_end># Unpack contents of the single piece <if_stmt>isinstance(pieces[0] str)<block_start>path=pieces[0]<line_sep>row_group=<none><line_sep>partition_keys=[]<block_end><else_stmt><block_start>(path row_group partition_keys)=pieces[0]<block_end># Call optimized read utility df=_optimized_read_remote(path row_group columns fs **kwargs)<line_sep># # Code below is directly copied from cudf-21.08 # <if_stmt>index<and>(index[0]<in>df.columns)<block_start>df=df.set_index(index[0])<block_end><elif_stmt>index<is><false><and>set(df.index.names).issubset(columns)# If index=False, we need to make sure all of the # names in `columns` are actually in `df.columns` <block_start>df.reset_index(inplace=<true>)<block_end><if_stmt>partition_keys<block_start><if_stmt>partitions<is><none><block_start><raise>ValueError("Must pass partition sets")<block_end><for_stmt>i,(name index2) enumerate(partition_keys)<block_start>categories=[val.as_py()<for>val partitions.levels[i].dictionary]<line_sep>col=as_column(index2).as_frame().repeat(len(df))._data[<none>]<line_sep>df[name]=build_categorical_column(categories=categories codes=as_column(col.base_data dtype=col.dtype) size=col.size offset=col.offset ordered=<false> )<block_end><block_end><return>df<block_end><def_stmt>_optimized_read_remote path row_groups columns fs **kwargs<block_start><if_stmt>row_groups<is><not><none><and><not>isinstance(row_groups list)<block_start>row_groups=[row_groups]<block_end># Get byte-ranges that are known to contain the # required data for this read byte_ranges,footer,file_size=_get_parquet_byte_ranges(path row_groups columns fs **kwargs)<line_sep># Transfer the required byte-ranges with fsspec. # Store these blocks in a local dummy buffer dummy_buffer=_fsspec_data_transfer(path fs byte_ranges=byte_ranges footer=footer file_size=file_size add_par1_magic=<true> **kwargs )<line_sep># Call cudf.read_parquet on the dummy buffer strings_to_cats=kwargs.get("strings_to_categorical" <false>)<line_sep>df=cudf.read_parquet(io.BytesIO(dummy_buffer) engine="cudf" columns=columns row_groups=row_groups strings_to_categorical=strings_to_cats **kwargs.get("read" {}) )<del_stmt>dummy_buffer<line_sep><return>df<block_end><def_stmt>_get_parquet_byte_ranges path rgs columns fs bytes_per_thread=256_000_000 **kwargs # The purpose of this utility is to return a list # of byte ranges (in path) that are known to contain # the data needed to read `columns` and `rgs` # Step 0 - Get size of file <block_start>file_size=fs.size(path)<line_sep># Return early if the file is too small to merit # optimized data transfer <if_stmt>file_size<le>bytes_per_thread<block_start><return><none> <none> file_size<block_end># Step 1 - Get 32 KB from tail of file. # # This "sample size" can be tunable, but should # always be >= 8 bytes (so we can read the footer size) tail_size=32_000<line_sep>footer_sample=fs.tail(path tail_size)<line_sep># Step 2 - Read the footer size and re-read a larger # tail if necessary footer_size=int.from_bytes(footer_sample[-8:-4] "little")<if_stmt>tail_size<l>(footer_size+8)<block_start>footer_sample=fs.tail(path footer_size+8)<block_end># Step 3 - Collect required byte ranges byte_ranges=[]<line_sep>md=pq.ParquetFile(io.BytesIO(footer_sample)).metadata<for_stmt>r range(md.num_row_groups)# Skip this row-group if we are targeting # specific row-groups <block_start><if_stmt>rgs<is><none><or>r<in>rgs<block_start>row_group=md.row_group(r)<for_stmt>c range(row_group.num_columns)<block_start>column=row_group.column(c)<line_sep>name=column.path_in_schema<line_sep># Skip this column if we are targeting a # specific columns <if_stmt>columns<is><none><or>name<in>columns<block_start>file_offset0=column.dictionary_page_offset<if_stmt>file_offset0<is><none><block_start>file_offset0=column.data_page_offset<block_end>num_bytes=column.total_uncompressed_size<line_sep>byte_ranges.append((file_offset0 num_bytes))<block_end><block_end><block_end><block_end><return>byte_ranges footer_sample file_size<block_end># # General Fsspec Data-transfer Optimization Code # <def_stmt>_fsspec_data_transfer path_or_fob fs byte_ranges=<none> footer=<none> file_size=<none> add_par1_magic=<none> bytes_per_thread=256_000_000 max_gap=64_000 mode="rb" **kwargs # Calculate total file size <block_start>file_size=file_size<or>fs.size(path_or_fob)<line_sep># Check if a direct read makes the most sense <if_stmt><not>byte_ranges<and>bytes_per_thread<ge>file_size<block_start><return>fs.open(path_or_fob mode=mode cache_type="none").read()<block_end># Threaded read into "dummy" buffer buf=np.zeros(file_size dtype="b")<if_stmt>byte_ranges# Optimize/merge the ranges <block_start>byte_ranges=_merge_ranges(byte_ranges max_block=bytes_per_thread max_gap=max_gap )<line_sep># Call multi-threaded data transfer of # remote byte-ranges to local buffer _read_byte_ranges(path_or_fob byte_ranges buf fs **kwargs )<line_sep># Add Header & Footer bytes <if_stmt>footer<is><not><none><block_start>footer_size=len(footer)<line_sep>buf[-footer_size:]=np.frombuffer(footer[-footer_size:] dtype="b")<block_end># Add parquet magic bytes (optional) <if_stmt>add_par1_magic<block_start>buf[:4]=np.frombuffer(b"PAR1" dtype="b")<if_stmt>footer<is><none><block_start>buf[-4:]=np.frombuffer(b"PAR1" dtype="b")<block_end><block_end><block_end><else_stmt><block_start>byte_ranges=[(b min(bytes_per_thread file_size-b))<for>b range(0 file_size bytes_per_thread)]<line_sep>_read_byte_ranges(path_or_fob byte_ranges buf fs **kwargs )<block_end><return>buf.tobytes()<block_end><def_stmt>_merge_ranges byte_ranges max_block=256_000_000 max_gap=64_000# Simple utility to merge small/adjacent byte ranges <block_start>new_ranges=[]<if_stmt><not>byte_ranges# Early return <block_start><return>new_ranges<block_end>offset,size=byte_ranges[0]<for_stmt>(new_offset new_size) byte_ranges[1:]<block_start>gap=new_offset-(offset+size)<if_stmt>gap<g>max_gap<or>(size+new_size+gap)<g>max_block# Gap is too large or total read is too large <block_start>new_ranges.append((offset size))<line_sep>offset=new_offset<line_sep>size=new_size<line_sep><continue><block_end>size<augadd>new_size+gap<block_end>new_ranges.append((offset size))<line_sep><return>new_ranges<block_end><def_stmt>_assign_block fs path_or_fob local_buffer offset nbytes<block_start><with_stmt>fs.open(path_or_fob mode="rb" cache_type="none")<as>fob<block_start>fob.seek(offset)<line_sep>local_buffer[offset:offset+nbytes]=np.frombuffer(fob.read(nbytes) dtype="b" )<block_end><block_end><def_stmt>_read_byte_ranges path_or_fob ranges local_buffer fs **kwargs <block_start>workers=[]<for_stmt>(offset nbytes) ranges<block_start><if_stmt>len(ranges)<g>1<block_start>workers.append(Thread(target=_assign_block args=(fs path_or_fob local_buffer offset nbytes)))<line_sep>workers[-1].start()<block_end><else_stmt><block_start>_assign_block(fs path_or_fob local_buffer offset nbytes)<block_end><block_end><for_stmt>worker workers<block_start>worker.join()<block_end><block_end>
<import_from_stmt>typing Optional Any<import_from_stmt>hwt.hdl.types.defs INT STR BOOL SLICE FLOAT64<import_from_stmt>hwt.hdl.types.hdlType HdlType<import_from_stmt>hwt.hdl.value HValue<import_from_stmt>hwt.hdl.variables SignalItem<import_from_stmt>hwt.synthesizer.interfaceLevel.mainBases InterfaceBase<line_sep>defaultPyConversions={int:INT str:STR bool:BOOL slice:SLICE float:FLOAT64}<def_stmt>toHVal op:Any suggestedType:Optional[HdlType]=<none><block_start>"""Convert python or hdl value/signal object to hdl value/signal object"""<if_stmt>isinstance(op HValue)<or>isinstance(op SignalItem)<block_start><return>op<block_end><elif_stmt>isinstance(op InterfaceBase)<block_start><return>op._sig<block_end><else_stmt><block_start><if_stmt>suggestedType<is><not><none><block_start><return>suggestedType.from_py(op)<block_end><if_stmt>isinstance(op int)<block_start><if_stmt>op<ge>1<lshift>31<block_start><raise>TypeError(f"Number {op:d} is too big to fit in 32 bit integer of HDL"<concat>" use Bits type instead")<block_end><elif_stmt>op<l>-(1<lshift>31)<block_start><raise>TypeError(f"Number {op:d} is too small to fit in 32 bit integer"<concat>" of HDL use Bits type instead")<block_end><block_end><try_stmt><block_start>hType=defaultPyConversions[type(op)]<block_end><except_stmt>KeyError<block_start>hType=<none><block_end><if_stmt>hType<is><none><block_start><raise>TypeError(f"Unknown hardware type for instance of {op.__class__}")<block_end><return>hType.from_py(op)<block_end><block_end>
"""Copyright 2022 Google LLC Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with the License. You may obtain a copy of the License at http://www.apache.org/licenses/LICENSE-2.0 Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions and limitations under the License. """<line_sep># [START drive_list_appdata] <import_from_future_stmt> print_function<import_stmt>google.auth<import_from_stmt>googleapiclient.discovery build<import_from_stmt>googleapiclient.errors HttpError<def_stmt>list_appdata <block_start>"""List all files inserted in the application data folder prints file titles with Ids. Returns : List of items Load pre-authorized user credentials from the environment. TODO(developer) - See https://developers.google.com/identity for guides on implementing OAuth2 for the application. """<line_sep>creds,_=google.auth.default()<try_stmt># call drive api client <block_start>service=build('drive' 'v3' credentials=creds)<line_sep># pylint: disable=maybe-no-member response=service.files().list(spaces='appDataFolder' fields='nextPageToken, files(id, '<concat>'name)' pageSize=10).execute()<for_stmt>file response.get('files' [])# Process change <block_start>print(F'Found file: {file.get("name")}, {file.get("id")}')<block_end><block_end><except_stmt>HttpError<as>error<block_start>print(F'An error occurred: {error}')<line_sep>response=<none><block_end><return>response.get('files')<block_end><if_stmt>__name__<eq>'__main__'<block_start>list_appdata()<block_end># [END drive_list_appdata]
<import_from_stmt>datetime timedelta<import_from_stmt>django.apps apps<import_from_stmt>django.conf settings<import_from_stmt>django.core.exceptions ObjectDoesNotExist<import_from_stmt>django.db models<import_from_stmt>django.utils timezone<import_from_stmt>django.utils.functional cached_property<import_from_stmt>.util random_number_token<class_stmt>DeviceManager(models.Manager)<block_start>""" The :class:`~django.db.models.Manager` object installed as ``Device.objects``. """<def_stmt>devices_for_user self user confirmed=<none><block_start>""" Returns a queryset for all devices of this class that belong to the given user. :param user: The user. :type user: :class:`~django.contrib.auth.models.User` :param confirmed: If ``None``, all matching devices are returned. Otherwise, this can be any true or false value to limit the query to confirmed or unconfirmed devices, respectively. """<line_sep>devices=self.model.objects.filter(user=user)<if_stmt>confirmed<is><not><none><block_start>devices=devices.filter(confirmed=bool(confirmed))<block_end><return>devices<block_end><block_end><class_stmt>Device(models.Model)<block_start>""" Abstract base model for a :term:`device` attached to a user. Plugins must subclass this to define their OTP models. .. _unsaved_device_warning: .. warning:: OTP devices are inherently stateful. For example, verifying a token is logically a mutating operation on the device, which may involve incrementing a counter or otherwise consuming a token. A device must be committed to the database before it can be used in any way. .. attribute:: user *ForeignKey*: Foreign key to your user model, as configured by :setting:`AUTH_USER_MODEL` (:class:`~django.contrib.auth.models.User` by default). .. attribute:: name *CharField*: A human-readable name to help the user identify their devices. .. attribute:: confirmed *BooleanField*: A boolean value that tells us whether this device has been confirmed as valid. It defaults to ``True``, but subclasses or individual deployments can force it to ``False`` if they wish to create a device and then ask the user for confirmation. As a rule, built-in APIs that enumerate devices will only include those that are confirmed. .. attribute:: objects A :class:`~django_otp.models.DeviceManager`. """<line_sep>user=models.ForeignKey(getattr(settings 'AUTH_USER_MODEL' 'auth.User') help_text="The user that this device belongs to." on_delete=models.CASCADE)<line_sep>name=models.CharField(max_length=64 help_text="The human-readable name of this device.")<line_sep>confirmed=models.BooleanField(default=<true> help_text="Is this device ready for use?")<line_sep>objects=DeviceManager()<class_stmt>Meta<block_start>abstract=<true><block_end><def_stmt>__str__ self<block_start><try_stmt><block_start>user=self.user<block_end><except_stmt>ObjectDoesNotExist<block_start>user=<none><block_end><return>"{0} ({1})".format(self.name user)<block_end>@property<def_stmt>persistent_id self<block_start>""" A stable device identifier for forms and APIs. """<line_sep><return>'{0}/{1}'.format(self.model_label() self.id)<block_end>@classmethod<def_stmt>model_label cls<block_start>""" Returns an identifier for this Django model class. This is just the standard "<app_label>.<model_name>" form. """<line_sep><return>'{0}.{1}'.format(cls._meta.app_label cls._meta.model_name)<block_end>@classmethod<def_stmt>from_persistent_id cls persistent_id for_verify=<false><block_start>""" Loads a device from its persistent id:: device == Device.from_persistent_id(device.persistent_id) :param bool for_verify: If ``True``, we'll load the device with :meth:`~django.db.models.query.QuerySet.select_for_update` to prevent concurrent verifications from succeeding. In which case, this must be called inside a transaction. """<line_sep>device=<none><try_stmt><block_start>model_label,device_id=persistent_id.rsplit('/' 1)<line_sep>app_label,model_name=model_label.split('.')<line_sep>device_cls=apps.get_model(app_label model_name)<if_stmt>issubclass(device_cls Device)<block_start>device_set=device_cls.objects.filter(id=int(device_id))<if_stmt>for_verify<block_start>device_set=device_set.select_for_update()<block_end>device=device_set.first()<block_end><block_end><except_stmt>(ValueError LookupError)<block_start><pass><block_end><return>device<block_end><def_stmt>is_interactive self<block_start>""" Returns ``True`` if this is an interactive device. The default implementation returns ``True`` if :meth:`~django_otp.models.Device.generate_challenge` has been overridden, but subclasses are welcome to provide smarter implementations. :rtype: bool """<line_sep><return><not>hasattr(self.generate_challenge 'stub')<block_end><def_stmt>generate_challenge self<block_start>""" Generates a challenge value that the user will need to produce a token. This method is permitted to have side effects, such as transmitting information to the user through some other channel (email or SMS, perhaps). And, of course, some devices may need to commit the challenge to the database. :returns: A message to the user. This should be a string that fits comfortably in the template ``'OTP Challenge: {0}'``. This may return ``None`` if this device is not interactive. :rtype: string or ``None`` :raises: Any :exc:`~exceptions.Exception` is permitted. Callers should trap ``Exception`` and report it to the user. """<line_sep><return><none><block_end>generate_challenge.stub=<true><def_stmt>verify_is_allowed self<block_start>""" Checks whether it is permissible to call :meth:`verify_token`. If it is allowed, returns ``(True, None)``. Otherwise returns ``(False, data_dict)``, where ``data_dict`` contains extra information, defined by the implementation. This method can be used to implement throttling or locking, for example. Client code should check this method before calling :meth:`verify_token` and report problems to the user. To report specific problems, the data dictionary can return include a ``'reason'`` member with a value from the constants in :class:`VerifyNotAllowed`. Otherwise, an ``'error_message'`` member should be provided with an error message. :meth:`verify_token` should also call this method and return False if verification is not allowed. :rtype: (bool, dict or ``None``) """<line_sep><return>(<true> <none>)<block_end><def_stmt>verify_token self token<block_start>""" Verifies a token. As a rule, the token should no longer be valid if this returns ``True``. :param str token: The OTP token provided by the user. :rtype: bool """<line_sep><return><false><block_end><block_end><class_stmt>SideChannelDevice(Device)<block_start>""" Abstract base model for a side-channel :term:`device` attached to a user. This model implements token generation, verification and expiration, so the concrete devices only have to implement delivery. """<line_sep>token=models.CharField(max_length=16 blank=<true> null=<true>)<line_sep>valid_until=models.DateTimeField(default=timezone.now help_text="The timestamp of the moment of expiry of the saved token.")<class_stmt>Meta<block_start>abstract=<true><block_end><def_stmt>generate_token self length=6 valid_secs=300 commit=<true><block_start>""" Generates a token of the specified length, then sets it on the model and sets the expiration of the token on the model. Pass 'commit=False' to avoid calling self.save(). :param int length: Number of decimal digits in the generated token. :param int valid_secs: Amount of seconds the token should be valid. :param bool commit: Whether to autosave the generated token. """<line_sep>self.token=random_number_token(length)<line_sep>self.valid_until=timezone.now()+timedelta(seconds=valid_secs)<if_stmt>commit<block_start>self.save()<block_end><block_end><def_stmt>verify_token self token<block_start>""" Verifies a token by content and expiry. On success, the token is cleared and the device saved. :param str token: The OTP token provided by the user. :rtype: bool """<line_sep>_now=timezone.now()<if_stmt>(self.token<is><not><none>)<and>(token<eq>self.token)<and>(_now<l>self.valid_until)<block_start>self.token=<none><line_sep>self.valid_until=_now<line_sep>self.save()<line_sep><return><true><block_end><else_stmt><block_start><return><false><block_end><block_end><block_end><class_stmt>VerifyNotAllowed<block_start>""" Constants that may be returned in the ``reason`` member of the extra information dictionary returned by :meth:`~django_otp.models.Device.verify_is_allowed` .. data:: N_FAILED_ATTEMPTS Indicates that verification is disallowed because of ``n`` successive failed attempts. The data dictionary should include the value of ``n`` in member ``failure_count`` """<line_sep>N_FAILED_ATTEMPTS='N_FAILED_ATTEMPTS'<block_end><class_stmt>ThrottlingMixin(models.Model)<block_start>""" Mixin class for models that need throttling behaviour. Implements exponential back-off. """<line_sep># This mixin is not publicly documented, but is used internally to avoid # code duplication. Subclasses must implement get_throttle_factor(), and # must use the verify_is_allowed(), throttle_reset() and # throttle_increment() methods from within their verify_token() method. throttling_failure_timestamp=models.DateTimeField(null=<true> blank=<true> default=<none> help_text="A timestamp of the last failed verification attempt. Null if last attempt succeeded.")<line_sep>throttling_failure_count=models.PositiveIntegerField(default=0 help_text="Number of successive failed attempts.")<def_stmt>verify_is_allowed self<block_start>""" If verification is allowed, returns ``(True, None)``. Otherwise, returns ``(False, data_dict)``. ``data_dict`` contains further information. Currently it can be:: {'reason': VerifyNotAllowed.N_FAILED_ATTEMPTS, 'failure_count': n } where ``n`` is the number of successive failures. See :class:`~django_otp.models.VerifyNotAllowed`. """<if_stmt>(self.throttling_enabled<and>self.throttling_failure_count<g>0<and>self.throttling_failure_timestamp<is><not><none>)<block_start>now=timezone.now()<line_sep>delay=(now-self.throttling_failure_timestamp).total_seconds()<line_sep># Required delays should be 1, 2, 4, 8 ... delay_required=self.get_throttle_factor()<times>(2<power>(self.throttling_failure_count-1))<if_stmt>delay<l>delay_required<block_start><return>(<false> {'reason':VerifyNotAllowed.N_FAILED_ATTEMPTS 'failure_count':self.throttling_failure_count 'locked_until':self.throttling_failure_timestamp+timedelta(seconds=delay_required)})<block_end><block_end><return>super().verify_is_allowed()<block_end><def_stmt>throttle_reset self commit=<true><block_start>""" Call this method to reset throttling (normally when a verify attempt succeeded). Pass 'commit=False' to avoid calling self.save(). """<line_sep>self.throttling_failure_timestamp=<none><line_sep>self.throttling_failure_count=0<if_stmt>commit<block_start>self.save()<block_end><block_end><def_stmt>throttle_increment self commit=<true><block_start>""" Call this method to increase throttling (normally when a verify attempt failed). Pass 'commit=False' to avoid calling self.save(). """<line_sep>self.throttling_failure_timestamp=timezone.now()<line_sep>self.throttling_failure_count<augadd>1<if_stmt>commit<block_start>self.save()<block_end><block_end>@cached_property<def_stmt>throttling_enabled self<block_start><return>self.get_throttle_factor()<g>0<block_end><def_stmt>get_throttle_factor self# pragma: no cover <block_start><raise>NotImplementedError()<block_end><class_stmt>Meta<block_start>abstract=<true><block_end><block_end>